Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -1,46 +1,69 @@
|
|
1 |
import streamlit as st
|
2 |
-
|
3 |
-
import nest_asyncio
|
4 |
import asyncio
|
5 |
-
import
|
6 |
|
7 |
-
#
|
8 |
-
|
9 |
-
tavily_api_key = st.secrets["TAVILY_API_KEY"]
|
10 |
|
11 |
-
# Apply the asyncio patch from nest_asyncio if required
|
12 |
nest_asyncio.apply()
|
13 |
|
14 |
-
#
|
15 |
-
|
16 |
|
17 |
-
#
|
18 |
-
|
19 |
|
20 |
|
21 |
-
# Define the asynchronous function to
|
22 |
-
async def fetch_report(query: str
|
23 |
"""
|
24 |
-
|
25 |
-
|
26 |
"""
|
27 |
try:
|
28 |
-
|
29 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
30 |
)
|
31 |
-
|
32 |
-
|
33 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
34 |
except Exception as e:
|
35 |
return f"Error during research: {str(e)}"
|
36 |
|
37 |
|
38 |
-
def run_report_generation(query
|
39 |
"""
|
40 |
Helper function to run async fetch_report function.
|
41 |
"""
|
42 |
loop = asyncio.get_event_loop()
|
43 |
-
report = loop.run_until_complete(fetch_report(query
|
44 |
return report
|
45 |
|
46 |
|
@@ -60,8 +83,8 @@ if st.button("Generate Report"):
|
|
60 |
st.warning("Please enter a query to generate a report.")
|
61 |
else:
|
62 |
with st.spinner("Generating report..."):
|
63 |
-
report = run_report_generation(query
|
64 |
-
if
|
65 |
st.success("Report generated successfully!")
|
66 |
st.write(report) # Display the report in the app
|
67 |
# Create a download button for the report
|
@@ -72,4 +95,4 @@ if st.button("Generate Report"):
|
|
72 |
mime="text/plain",
|
73 |
)
|
74 |
else:
|
75 |
-
st.error(
|
|
|
1 |
import streamlit as st
|
2 |
+
import openai
|
|
|
3 |
import asyncio
|
4 |
+
import re
|
5 |
|
6 |
+
# Apply the asyncio patch if required
|
7 |
+
import nest_asyncio
|
|
|
8 |
|
|
|
9 |
nest_asyncio.apply()
|
10 |
|
11 |
+
# OpenAI Client Initialization with API Key from Streamlit secrets
|
12 |
+
client = openai.OpenAI(api_key=st.secrets["OPENAI_API_KEY"])
|
13 |
|
14 |
+
# Assistant ID
|
15 |
+
ASSISTANT_ID = "asst_jNEWFnROZxSI8ZnL9WDI2yCp"
|
16 |
|
17 |
|
18 |
+
# Define the asynchronous function to interact with the OpenAI assistant
|
19 |
+
async def fetch_report(query: str) -> str:
|
20 |
"""
|
21 |
+
Interact with OpenAI Assistant to generate a report based on the provided query.
|
22 |
+
Clean the text by removing source annotations.
|
23 |
"""
|
24 |
try:
|
25 |
+
# Create a Thread with an initial user message
|
26 |
+
thread = client.beta.threads.create(
|
27 |
+
messages=[{"role": "user", "content": query}]
|
28 |
+
)
|
29 |
+
|
30 |
+
# Start the Assistant
|
31 |
+
run = client.beta.threads.runs.create(
|
32 |
+
thread_id=thread.id, assistant_id=ASSISTANT_ID
|
33 |
)
|
34 |
+
|
35 |
+
# Wait for the run to complete
|
36 |
+
while run.status != "completed":
|
37 |
+
run = client.beta.threads.runs.retrieve(thread_id=thread.id, run_id=run.id)
|
38 |
+
await asyncio.sleep(5) # Delay to prevent excessive polling
|
39 |
+
|
40 |
+
# Retrieve the Messages added by the Assistant to the Thread
|
41 |
+
thread_messages = client.beta.threads.messages.list(thread.id)
|
42 |
+
|
43 |
+
# Initialize an empty string to collect the cleaned report
|
44 |
+
report = []
|
45 |
+
for message in thread_messages.data:
|
46 |
+
if message.role == "assistant":
|
47 |
+
for content_block in message.content:
|
48 |
+
if "text" in dir(content_block) and "value" in dir(
|
49 |
+
content_block.text
|
50 |
+
):
|
51 |
+
# Remove source citations
|
52 |
+
cleaned_text = re.sub(
|
53 |
+
r"【\d+:\d+†source】", "", content_block.text.value
|
54 |
+
)
|
55 |
+
report.append(cleaned_text)
|
56 |
+
return "\n".join(report)
|
57 |
except Exception as e:
|
58 |
return f"Error during research: {str(e)}"
|
59 |
|
60 |
|
61 |
+
def run_report_generation(query):
|
62 |
"""
|
63 |
Helper function to run async fetch_report function.
|
64 |
"""
|
65 |
loop = asyncio.get_event_loop()
|
66 |
+
report = loop.run_until_complete(fetch_report(query))
|
67 |
return report
|
68 |
|
69 |
|
|
|
83 |
st.warning("Please enter a query to generate a report.")
|
84 |
else:
|
85 |
with st.spinner("Generating report..."):
|
86 |
+
report = run_report_generation(query)
|
87 |
+
if report:
|
88 |
st.success("Report generated successfully!")
|
89 |
st.write(report) # Display the report in the app
|
90 |
# Create a download button for the report
|
|
|
95 |
mime="text/plain",
|
96 |
)
|
97 |
else:
|
98 |
+
st.error("Failed to generate report or report is empty.")
|