Ani14 commited on
Commit
8ad1db4
Β·
verified Β·
1 Parent(s): d2b3f7c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +65 -40
app.py CHANGED
@@ -2,31 +2,32 @@ import os
2
  import streamlit as st
3
  import asyncio
4
  import nest_asyncio
 
 
5
  from gpt_researcher import GPTResearcher
6
  from dotenv import load_dotenv
7
 
8
- # Apply for async event loop (required for Streamlit + asyncio)
9
  nest_asyncio.apply()
10
  load_dotenv()
11
 
12
- # Set the Tavily API key (used internally by gpt-researcher)
13
  os.environ["TAVILY_API_KEY"] = "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"
14
 
15
- # Streamlit UI config
16
  st.set_page_config(page_title="🧠 Super Deep Research Agent", layout="wide")
17
  st.title("πŸ“š GPT-Powered Super Deep Research Assistant")
18
 
19
- # Sidebar configuration
20
  with st.sidebar:
21
- st.header("πŸ” Research Configuration")
22
  query = st.text_input("πŸ“Œ Research Topic", "Is AI a threat to creative jobs?")
23
  report_type = st.selectbox("πŸ“„ Report Type", ["research_report", "summary", "detailed_report"])
24
  tone = st.selectbox("πŸ—£οΈ Tone", ["objective", "persuasive", "informative"])
25
  source_type = st.selectbox("πŸ“‘ Source Scope", ["web", "arxiv", "semantic-scholar", "hybrid"])
26
  output_format = st.selectbox("πŸ“ Output Format", ["markdown", "text"])
 
27
  start = st.button("πŸš€ Start Deep Research")
28
 
29
- # Async logic
30
  async def run_research_with_logs(query, report_type, source, tone, fmt, log_callback):
31
  agent = GPTResearcher(
32
  query=query,
@@ -34,6 +35,7 @@ async def run_research_with_logs(query, report_type, source, tone, fmt, log_call
34
  report_source=source,
35
  report_format=fmt,
36
  tone=tone,
 
37
  )
38
  await agent.conduct_research()
39
  report = await agent.write_report()
@@ -42,51 +44,74 @@ async def run_research_with_logs(query, report_type, source, tone, fmt, log_call
42
  images = agent.get_research_images()
43
  return report, context, sources, images
44
 
45
- # Main research execution
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
46
  if start and query:
47
- st.info("πŸ€– Running super deep research agent...")
 
48
  log_placeholder = st.empty()
 
49
 
50
- # Enclosing function to use nonlocal log_text
51
- def run_loggable_research():
52
- log_text = ""
53
-
54
- def stream_log(msg):
55
- nonlocal log_text
56
- log_text += f"🟒 {msg}\n"
57
- log_placeholder.code(log_text, language="text")
58
-
59
- return asyncio.run(
60
- run_research_with_logs(
61
- query,
62
- report_type,
63
- source_type,
64
- tone,
65
- output_format,
66
- log_callback=stream_log
67
- )
68
- )
69
 
70
- # Run it
71
- report, context, sources, images = run_loggable_research()
 
72
 
73
- st.success("βœ… Research Complete!")
74
 
75
- # Display report
76
  st.subheader("πŸ“„ Final Report")
77
  st.markdown(report, unsafe_allow_html=True)
78
 
79
- # Display sources
80
- if sources:
81
- st.subheader("πŸ“š Sources")
82
- for s in sources:
83
- st.markdown(f"- [{s.get('title', 'Untitled')}]({s.get('url', '#')})")
84
 
85
- # Display images
86
  if images:
87
- st.subheader("πŸ–ΌοΈ Related Images")
88
  for img in images:
89
  st.image(img, use_column_width=True)
90
 
91
- # Download button
92
- st.download_button("πŸ’Ύ Download Report", report, file_name="deep_research.md", mime="text/markdown")
 
 
 
 
 
 
 
 
 
2
  import streamlit as st
3
  import asyncio
4
  import nest_asyncio
5
+ import base64
6
+ import tempfile
7
  from gpt_researcher import GPTResearcher
8
  from dotenv import load_dotenv
9
 
 
10
  nest_asyncio.apply()
11
  load_dotenv()
12
 
13
+ # Inject Tavily API key
14
  os.environ["TAVILY_API_KEY"] = "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"
15
 
 
16
  st.set_page_config(page_title="🧠 Super Deep Research Agent", layout="wide")
17
  st.title("πŸ“š GPT-Powered Super Deep Research Assistant")
18
 
19
+ # --- Sidebar UI ---
20
  with st.sidebar:
21
+ st.header("πŸ” Setup Research Agent")
22
  query = st.text_input("πŸ“Œ Research Topic", "Is AI a threat to creative jobs?")
23
  report_type = st.selectbox("πŸ“„ Report Type", ["research_report", "summary", "detailed_report"])
24
  tone = st.selectbox("πŸ—£οΈ Tone", ["objective", "persuasive", "informative"])
25
  source_type = st.selectbox("πŸ“‘ Source Scope", ["web", "arxiv", "semantic-scholar", "hybrid"])
26
  output_format = st.selectbox("πŸ“ Output Format", ["markdown", "text"])
27
+ export_format = st.selectbox("πŸ“€ Export Format", ["Markdown", "PDF", "LaTeX"])
28
  start = st.button("πŸš€ Start Deep Research")
29
 
30
+ # Async runner
31
  async def run_research_with_logs(query, report_type, source, tone, fmt, log_callback):
32
  agent = GPTResearcher(
33
  query=query,
 
35
  report_source=source,
36
  report_format=fmt,
37
  tone=tone,
38
+ log_fn=log_callback
39
  )
40
  await agent.conduct_research()
41
  report = await agent.write_report()
 
44
  images = agent.get_research_images()
45
  return report, context, sources, images
46
 
47
+ # File Export Utility
48
+ def export_file(content, export_format):
49
+ filename_base = f"deep_research_{query.replace(' ', '_')}"
50
+
51
+ if export_format == "Markdown":
52
+ return content, f"{filename_base}.md", "text/markdown"
53
+
54
+ elif export_format == "LaTeX":
55
+ latex_content = f"\\documentclass{{article}}\n\\begin{{document}}\n{content}\n\\end{{document}}"
56
+ return latex_content, f"{filename_base}.tex", "text/plain"
57
+
58
+ elif export_format == "PDF":
59
+ try:
60
+ from fpdf import FPDF
61
+ except ImportError:
62
+ st.error("Please install `fpdf` to enable PDF export: `pip install fpdf`")
63
+ return None, None, None
64
+
65
+ pdf = FPDF()
66
+ pdf.add_page()
67
+ pdf.set_font("Arial", size=12)
68
+ for line in content.split('\n'):
69
+ pdf.multi_cell(0, 10, line)
70
+
71
+ temp_path = tempfile.mktemp(suffix=".pdf")
72
+ pdf.output(temp_path)
73
+ with open(temp_path, "rb") as f:
74
+ return f.read(), f"{filename_base}.pdf", "application/pdf"
75
+
76
+ return None, None, None
77
+
78
+ # --- Main Execution ---
79
  if start and query:
80
+ st.info("πŸ€– Running super deep research...")
81
+
82
  log_placeholder = st.empty()
83
+ log_text = ""
84
 
85
+ def stream_log(msg):
86
+ nonlocal log_text
87
+ log_text += f"🟒 {msg}\n"
88
+ log_placeholder.code(log_text, language="text")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
89
 
90
+ report, context, sources, images = asyncio.run(
91
+ run_research_with_logs(query, report_type, source_type, tone, output_format, log_callback=stream_log)
92
+ )
93
 
94
+ st.success("βœ… Report generation complete!")
95
 
 
96
  st.subheader("πŸ“„ Final Report")
97
  st.markdown(report, unsafe_allow_html=True)
98
 
99
+ st.subheader("πŸ“š Sources")
100
+ for s in sources:
101
+ st.markdown(f"- [{s.get('title', 'Untitled')}]({s.get('url', '#')})")
 
 
102
 
 
103
  if images:
104
+ st.subheader("πŸ–ΌοΈ Relevant Images")
105
  for img in images:
106
  st.image(img, use_column_width=True)
107
 
108
+ # --- Export Section ---
109
+ st.subheader("πŸ“€ Export Report")
110
+ file_data, filename, mime = export_file(report, export_format)
111
+ if file_data:
112
+ st.download_button(
113
+ label=f"πŸ’Ύ Download {export_format}",
114
+ data=file_data,
115
+ file_name=filename,
116
+ mime=mime
117
+ )