Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -2,31 +2,32 @@ import os
|
|
2 |
import streamlit as st
|
3 |
import asyncio
|
4 |
import nest_asyncio
|
|
|
|
|
5 |
from gpt_researcher import GPTResearcher
|
6 |
from dotenv import load_dotenv
|
7 |
|
8 |
-
# Apply for async event loop (required for Streamlit + asyncio)
|
9 |
nest_asyncio.apply()
|
10 |
load_dotenv()
|
11 |
|
12 |
-
#
|
13 |
os.environ["TAVILY_API_KEY"] = "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"
|
14 |
|
15 |
-
# Streamlit UI config
|
16 |
st.set_page_config(page_title="π§ Super Deep Research Agent", layout="wide")
|
17 |
st.title("π GPT-Powered Super Deep Research Assistant")
|
18 |
|
19 |
-
# Sidebar
|
20 |
with st.sidebar:
|
21 |
-
st.header("π Research
|
22 |
query = st.text_input("π Research Topic", "Is AI a threat to creative jobs?")
|
23 |
report_type = st.selectbox("π Report Type", ["research_report", "summary", "detailed_report"])
|
24 |
tone = st.selectbox("π£οΈ Tone", ["objective", "persuasive", "informative"])
|
25 |
source_type = st.selectbox("π‘ Source Scope", ["web", "arxiv", "semantic-scholar", "hybrid"])
|
26 |
output_format = st.selectbox("π Output Format", ["markdown", "text"])
|
|
|
27 |
start = st.button("π Start Deep Research")
|
28 |
|
29 |
-
# Async
|
30 |
async def run_research_with_logs(query, report_type, source, tone, fmt, log_callback):
|
31 |
agent = GPTResearcher(
|
32 |
query=query,
|
@@ -34,6 +35,7 @@ async def run_research_with_logs(query, report_type, source, tone, fmt, log_call
|
|
34 |
report_source=source,
|
35 |
report_format=fmt,
|
36 |
tone=tone,
|
|
|
37 |
)
|
38 |
await agent.conduct_research()
|
39 |
report = await agent.write_report()
|
@@ -42,51 +44,74 @@ async def run_research_with_logs(query, report_type, source, tone, fmt, log_call
|
|
42 |
images = agent.get_research_images()
|
43 |
return report, context, sources, images
|
44 |
|
45 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
46 |
if start and query:
|
47 |
-
st.info("π€ Running super deep research
|
|
|
48 |
log_placeholder = st.empty()
|
|
|
49 |
|
50 |
-
|
51 |
-
|
52 |
-
log_text
|
53 |
-
|
54 |
-
def stream_log(msg):
|
55 |
-
nonlocal log_text
|
56 |
-
log_text += f"π’ {msg}\n"
|
57 |
-
log_placeholder.code(log_text, language="text")
|
58 |
-
|
59 |
-
return asyncio.run(
|
60 |
-
run_research_with_logs(
|
61 |
-
query,
|
62 |
-
report_type,
|
63 |
-
source_type,
|
64 |
-
tone,
|
65 |
-
output_format,
|
66 |
-
log_callback=stream_log
|
67 |
-
)
|
68 |
-
)
|
69 |
|
70 |
-
|
71 |
-
|
|
|
72 |
|
73 |
-
st.success("β
|
74 |
|
75 |
-
# Display report
|
76 |
st.subheader("π Final Report")
|
77 |
st.markdown(report, unsafe_allow_html=True)
|
78 |
|
79 |
-
|
80 |
-
|
81 |
-
st.
|
82 |
-
for s in sources:
|
83 |
-
st.markdown(f"- [{s.get('title', 'Untitled')}]({s.get('url', '#')})")
|
84 |
|
85 |
-
# Display images
|
86 |
if images:
|
87 |
-
st.subheader("πΌοΈ
|
88 |
for img in images:
|
89 |
st.image(img, use_column_width=True)
|
90 |
|
91 |
-
#
|
92 |
-
st.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2 |
import streamlit as st
|
3 |
import asyncio
|
4 |
import nest_asyncio
|
5 |
+
import base64
|
6 |
+
import tempfile
|
7 |
from gpt_researcher import GPTResearcher
|
8 |
from dotenv import load_dotenv
|
9 |
|
|
|
10 |
nest_asyncio.apply()
|
11 |
load_dotenv()
|
12 |
|
13 |
+
# Inject Tavily API key
|
14 |
os.environ["TAVILY_API_KEY"] = "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"
|
15 |
|
|
|
16 |
st.set_page_config(page_title="π§ Super Deep Research Agent", layout="wide")
|
17 |
st.title("π GPT-Powered Super Deep Research Assistant")
|
18 |
|
19 |
+
# --- Sidebar UI ---
|
20 |
with st.sidebar:
|
21 |
+
st.header("π Setup Research Agent")
|
22 |
query = st.text_input("π Research Topic", "Is AI a threat to creative jobs?")
|
23 |
report_type = st.selectbox("π Report Type", ["research_report", "summary", "detailed_report"])
|
24 |
tone = st.selectbox("π£οΈ Tone", ["objective", "persuasive", "informative"])
|
25 |
source_type = st.selectbox("π‘ Source Scope", ["web", "arxiv", "semantic-scholar", "hybrid"])
|
26 |
output_format = st.selectbox("π Output Format", ["markdown", "text"])
|
27 |
+
export_format = st.selectbox("π€ Export Format", ["Markdown", "PDF", "LaTeX"])
|
28 |
start = st.button("π Start Deep Research")
|
29 |
|
30 |
+
# Async runner
|
31 |
async def run_research_with_logs(query, report_type, source, tone, fmt, log_callback):
|
32 |
agent = GPTResearcher(
|
33 |
query=query,
|
|
|
35 |
report_source=source,
|
36 |
report_format=fmt,
|
37 |
tone=tone,
|
38 |
+
log_fn=log_callback
|
39 |
)
|
40 |
await agent.conduct_research()
|
41 |
report = await agent.write_report()
|
|
|
44 |
images = agent.get_research_images()
|
45 |
return report, context, sources, images
|
46 |
|
47 |
+
# File Export Utility
|
48 |
+
def export_file(content, export_format):
|
49 |
+
filename_base = f"deep_research_{query.replace(' ', '_')}"
|
50 |
+
|
51 |
+
if export_format == "Markdown":
|
52 |
+
return content, f"{filename_base}.md", "text/markdown"
|
53 |
+
|
54 |
+
elif export_format == "LaTeX":
|
55 |
+
latex_content = f"\\documentclass{{article}}\n\\begin{{document}}\n{content}\n\\end{{document}}"
|
56 |
+
return latex_content, f"{filename_base}.tex", "text/plain"
|
57 |
+
|
58 |
+
elif export_format == "PDF":
|
59 |
+
try:
|
60 |
+
from fpdf import FPDF
|
61 |
+
except ImportError:
|
62 |
+
st.error("Please install `fpdf` to enable PDF export: `pip install fpdf`")
|
63 |
+
return None, None, None
|
64 |
+
|
65 |
+
pdf = FPDF()
|
66 |
+
pdf.add_page()
|
67 |
+
pdf.set_font("Arial", size=12)
|
68 |
+
for line in content.split('\n'):
|
69 |
+
pdf.multi_cell(0, 10, line)
|
70 |
+
|
71 |
+
temp_path = tempfile.mktemp(suffix=".pdf")
|
72 |
+
pdf.output(temp_path)
|
73 |
+
with open(temp_path, "rb") as f:
|
74 |
+
return f.read(), f"{filename_base}.pdf", "application/pdf"
|
75 |
+
|
76 |
+
return None, None, None
|
77 |
+
|
78 |
+
# --- Main Execution ---
|
79 |
if start and query:
|
80 |
+
st.info("π€ Running super deep research...")
|
81 |
+
|
82 |
log_placeholder = st.empty()
|
83 |
+
log_text = ""
|
84 |
|
85 |
+
def stream_log(msg):
|
86 |
+
nonlocal log_text
|
87 |
+
log_text += f"π’ {msg}\n"
|
88 |
+
log_placeholder.code(log_text, language="text")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
89 |
|
90 |
+
report, context, sources, images = asyncio.run(
|
91 |
+
run_research_with_logs(query, report_type, source_type, tone, output_format, log_callback=stream_log)
|
92 |
+
)
|
93 |
|
94 |
+
st.success("β
Report generation complete!")
|
95 |
|
|
|
96 |
st.subheader("π Final Report")
|
97 |
st.markdown(report, unsafe_allow_html=True)
|
98 |
|
99 |
+
st.subheader("π Sources")
|
100 |
+
for s in sources:
|
101 |
+
st.markdown(f"- [{s.get('title', 'Untitled')}]({s.get('url', '#')})")
|
|
|
|
|
102 |
|
|
|
103 |
if images:
|
104 |
+
st.subheader("πΌοΈ Relevant Images")
|
105 |
for img in images:
|
106 |
st.image(img, use_column_width=True)
|
107 |
|
108 |
+
# --- Export Section ---
|
109 |
+
st.subheader("π€ Export Report")
|
110 |
+
file_data, filename, mime = export_file(report, export_format)
|
111 |
+
if file_data:
|
112 |
+
st.download_button(
|
113 |
+
label=f"πΎ Download {export_format}",
|
114 |
+
data=file_data,
|
115 |
+
file_name=filename,
|
116 |
+
mime=mime
|
117 |
+
)
|