Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -1,45 +1,36 @@
|
|
1 |
import streamlit as st
|
2 |
from gpt_researcher import GPTResearcher
|
3 |
-
import asyncio
|
4 |
-
import nest_asyncio
|
5 |
import os
|
6 |
|
7 |
# Access secrets
|
8 |
openai_api_key = st.secrets["OPENAI_API_KEY"]
|
9 |
tavily_api_key = st.secrets["TAVILY_API_KEY"]
|
10 |
|
11 |
-
# Apply the asyncio patch from nest_asyncio if required
|
12 |
-
nest_asyncio.apply()
|
13 |
-
|
14 |
# Set the document path environment variable
|
15 |
os.environ["DOC_PATH"] = "./local" # Path to the folder with documents
|
16 |
|
17 |
# Constants
|
18 |
REPORT_TYPE = "research_report"
|
19 |
|
20 |
-
|
21 |
-
|
22 |
-
async def fetch_report(query, report_type):
|
23 |
"""
|
24 |
Fetch a research report based on the provided query and report type.
|
25 |
-
Research is conducted on a local document
|
26 |
"""
|
27 |
try:
|
28 |
researcher = GPTResearcher(
|
29 |
query=query, report_type=report_type, report_source="local"
|
30 |
)
|
31 |
-
|
32 |
-
return
|
33 |
except Exception as e:
|
34 |
return f"Error during research: {str(e)}"
|
35 |
|
36 |
-
|
37 |
-
|
38 |
-
def
|
39 |
-
|
40 |
-
asyncio.set_event_loop(loop)
|
41 |
-
return loop.run_until_complete(coroutine)
|
42 |
-
|
43 |
|
44 |
# Streamlit interface
|
45 |
st.title("Google Leak Reporting Tool")
|
@@ -57,10 +48,7 @@ if st.button("Generate Report"):
|
|
57 |
st.warning("Please enter a query to generate a report.")
|
58 |
else:
|
59 |
with st.spinner("Generating report..."):
|
60 |
-
|
61 |
-
future = executor.submit(run_async, fetch_report(query, REPORT_TYPE))
|
62 |
-
# Wait for the result
|
63 |
-
report = future.result()
|
64 |
# Display the report or error message
|
65 |
if report and not report.startswith("Error"):
|
66 |
st.success("Report generated successfully!")
|
@@ -73,4 +61,4 @@ if st.button("Generate Report"):
|
|
73 |
mime="text/plain",
|
74 |
)
|
75 |
else:
|
76 |
-
st.error(report) # Show the error message if any
|
|
|
1 |
import streamlit as st
|
2 |
from gpt_researcher import GPTResearcher
|
|
|
|
|
3 |
import os
|
4 |
|
5 |
# Access secrets
|
6 |
openai_api_key = st.secrets["OPENAI_API_KEY"]
|
7 |
tavily_api_key = st.secrets["TAVILY_API_KEY"]
|
8 |
|
|
|
|
|
|
|
9 |
# Set the document path environment variable
|
10 |
os.environ["DOC_PATH"] = "./local" # Path to the folder with documents
|
11 |
|
12 |
# Constants
|
13 |
REPORT_TYPE = "research_report"
|
14 |
|
15 |
+
# Define the function to fetch the report
|
16 |
+
def fetch_report(query, report_type):
|
|
|
17 |
"""
|
18 |
Fetch a research report based on the provided query and report type.
|
19 |
+
Research is conducted on a local document.
|
20 |
"""
|
21 |
try:
|
22 |
researcher = GPTResearcher(
|
23 |
query=query, report_type=report_type, report_source="local"
|
24 |
)
|
25 |
+
researcher.conduct_research()
|
26 |
+
return researcher.write_report()
|
27 |
except Exception as e:
|
28 |
return f"Error during research: {str(e)}"
|
29 |
|
30 |
+
# Cache the report generation function to avoid redundant computations
|
31 |
+
@st.cache(suppress_st_warning=True, show_spinner=False)
|
32 |
+
def cached_fetch_report(query, report_type):
|
33 |
+
return fetch_report(query, report_type)
|
|
|
|
|
|
|
34 |
|
35 |
# Streamlit interface
|
36 |
st.title("Google Leak Reporting Tool")
|
|
|
48 |
st.warning("Please enter a query to generate a report.")
|
49 |
else:
|
50 |
with st.spinner("Generating report..."):
|
51 |
+
report = cached_fetch_report(query, REPORT_TYPE)
|
|
|
|
|
|
|
52 |
# Display the report or error message
|
53 |
if report and not report.startswith("Error"):
|
54 |
st.success("Report generated successfully!")
|
|
|
61 |
mime="text/plain",
|
62 |
)
|
63 |
else:
|
64 |
+
st.error(report) # Show the error message if any
|