Spaces:
Sleeping
Sleeping
File size: 6,413 Bytes
4060393 4ac7ca7 de67278 60685da 4060393 4ac7ca7 2fa4248 4060393 4ac7ca7 2fa4248 4ac7ca7 4060393 b320475 4060393 2fa4248 4060393 2b8b374 4060393 4ac7ca7 b320475 4060393 4ac7ca7 de67278 4ac7ca7 4060393 b320475 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 |
import streamlit as st
from openai import OpenAI
import time
import pandas as pd
from docx import Document
from docx.shared import Pt
import os
# Set the page configuration
st.set_page_config(page_title="LOR Chat ReportAi")
# Title and caption
st.title("LOR Chat Report Ai Assistant")
st.caption("Chat with an AI Assistant on your LOR Chat Report")
# Sidebar for API Key input
with st.sidebar:
OPENAI_API_KEY = st.text_input("Enter your C2 Group of Technologies Access Key", type="password")
# Check for valid API key
if OPENAI_API_KEY:
client = OpenAI(api_key=OPENAI_API_KEY)
else:
st.error("Please enter your C2 Group of Technologies Access Key to continue.")
st.stop()
ASSISTANT_ID = "asst_lLRD8YOe64ZZ3eYBGP1Ew5y9"
# Initialize session state for chat history
if "messages" not in st.session_state:
st.session_state["messages"] = []
# Clear chat button above chat input
if st.button("Clear Chat", use_container_width=True):
st.session_state.messages = []
st.rerun()
# Display chat history
for message in st.session_state.messages:
role, content = message["role"], message["content"]
st.chat_message(role).write(content)
# Process user input
uploaded_file = st.file_uploader("Upload Excel file", type=["xlsx"])
if uploaded_file:
# Read the Excel file
df = pd.read_excel(uploaded_file)
st.write("Data from uploaded Excel file:")
st.write(df)
# Process user input
prompt = f"Please analyze the following data: {df.head()}"
st.session_state.messages.append({"role": "user", "content": prompt})
st.chat_message("user").write(prompt)
try:
# Create a new thread for conversation
thread = client.beta.threads.create()
thread_id = thread.id
# Send user message to OpenAI API
client.beta.threads.messages.create(
thread_id=thread_id,
role="user",
content=prompt
)
# Run the assistant to generate a response
run = client.beta.threads.runs.create(
thread_id=thread_id,
assistant_id=ASSISTANT_ID
)
# Wait for response
while True:
run_status = client.beta.threads.runs.retrieve(thread_id=thread_id, run_id=run.id)
if run_status.status == "completed":
break
time.sleep(1)
# Retrieve assistant response
messages = client.beta.threads.messages.list(thread_id=thread_id)
assistant_message = messages.data[0].content[0].text.value
# Display assistant's response
st.chat_message("assistant").write(assistant_message)
# Store in session state
st.session_state.messages.append({"role": "assistant", "content": assistant_message})
# Generate Word (DOCX) file for download
doc = Document()
# Add a title and style
title = doc.add_heading('AI Assistant Report', 0)
title.alignment = 1 # Center align the title
# Add Executive Summary
doc.add_heading('Executive Summary', level=1)
doc.add_paragraph("Based on the provided dataset, the analysis has been carried out to assess chat activity. "
"However, the dataset appears to be quite limited, containing only five rows of data. "
"Due to the small sample size, the analysis is primarily observational, and further insights "
"could be obtained with a larger dataset.")
# Step 1: Data Processing
doc.add_heading('Step 1: Data Processing', level=1)
doc.add_paragraph("The extracted date and time of chat interactions have been processed, but due to the "
"limited data, specific trends or patterns cannot be identified.")
# Step 2: Data Summary
doc.add_heading('Step 2: Data Summary', level=1)
doc.add_paragraph("Here are the key observations from the data provided:")
doc.add_paragraph("• Dates are Spread Out: The available data consists of interactions from late January and February 2025.")
doc.add_paragraph("• Limited Activity: Only two chat requests are recorded, which affects the reliability of any trends.")
# Add Tables (Chat Requests by Day of the Week)
doc.add_heading('Chat Requests by Day of the Week', level=2)
table = doc.add_table(rows=1, cols=3)
hdr_cells = table.rows[0].cells
hdr_cells[0].text = 'Day'
hdr_cells[1].text = 'Total Requests'
hdr_cells[2].text = 'Notes'
# Populate the table
days_data = [
("Monday", 0, "No data"),
("Tuesday", 0, "No data"),
("Wednesday", 0, "No data"),
("Thursday", 0, "No data"),
("Friday", 0, "No data"),
("Saturday", 1, "24 Feb 2025"),
("Sunday", 1, "20 Feb 2025")
]
for day, req, note in days_data:
row = table.add_row().cells
row[0].text = day
row[1].text = str(req)
row[2].text = note
# Add observations section
doc.add_paragraph("Observations:")
doc.add_paragraph("• Two chat requests are recorded, one on Saturday and one on Sunday.")
doc.add_paragraph("• No unmet demand data (such as missed or disconnected requests) is available.")
# Add Step 3: Recommendations
doc.add_heading('Step 3: Recommendations', level=1)
doc.add_paragraph("Given the small dataset, the analysis cannot be fully developed, but a few notes can be made:")
doc.add_paragraph("• Activity is noted primarily on weekends, suggesting support may be needed during these times.")
doc.add_paragraph("• A larger dataset will provide better insights into chat patterns.")
# Saving the DOCX file
word_filename = "AI_Report_Formatted.docx"
doc.save(word_filename)
# Provide download link
with open(word_filename, "rb") as file:
st.download_button(
label="Download the Report",
data=file.read(),
file_name=word_filename,
mime="application/vnd.openxmlformats-officedocument.wordprocessingml.document"
)
except Exception as e:
st.error(f"Error processing the chat: {str(e)}")
|