File size: 7,064 Bytes
c5c06a6
 
 
 
 
 
3aa08c1
c5c06a6
 
 
 
 
 
a5d1418
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
29741cc
91910d5
a5d1418
 
 
 
7b43f2c
a5d1418
 
 
 
 
 
 
 
8aa94cd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2a71b6d
8aa94cd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2a71b6d
 
3311763
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
29d2b4d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
import asyncio # This will helpus to handle tasks without blocking execution
import streamlit as st  # Build the web application
from typing import Dict, Any, List  
from agents import Agent, Runner, trace
from agents import set_default_openai_key
from firecrawl import FirecrawlApp
from agents.tool import function_tool

# Setup the page configuration (done using streamlit)
st.set_page_config(
    page_title = "OpenAI based Deep Research Agent",
    page_icon = "πŸ“š",
    layout = "wide"
)

# Initialize session state for API Key if don't exist
if "openai_api_key" not in st.session_state:
    st.session_state.openai_api_key = ""
if "firecrawl_api_key" not in st.session_state:
    st.session_state.firecrawl_api_key = ""

# Sidebar for API Key
with st.sidebar:
    st.title("API Configuration")
    openai_api_key = st.text_input(
        "OpenAI API Key",
        value = st.session_state.openai_api_key,
        type = "password"
    )

    firecrawl_api_key = st.text_input(
        "Firecrawl API Key",
        value = st.session_state.firecrawl_api_key,
        type = "password"
    )

    if openai_api_key:
        st.session_state.openai_api_key = openai_api_key
        set_default_openai_key(openai_api_key)
    if firecrawl_api_key:
        st.session_state.firecrawl_api_key = firecrawl_api_key

# Main Application and Input Field
st.title("πŸ” OpenAI Deep Research Agent")
st.markdown("This OpenAI Agent from OpenAI Agent SDK performs deep research on any topic using Firecrawl")

# This takes the input from the user for the specific research related concern
research_topic = st.text_input("Enter research topic: ", placeholder = "e.g., Latest Development in AI")

# This function tools help us to register this function
# as a tool to the agent
@function_tool
async def deep_research(query: str, max_depth: int, time_limit: int, max_urls: int):
    """
    Perform comprehensive web research using Firecrawl's deep research endpoint.
    """
    try:
        # Initialize the firecrawl using the saved API key
        firecrawl_app = FirecrawlApp(api_key =  st.session_state.firecrawl_api_key)
        params = {
            "maxDepth": max_depth,
            "timeLimit": time_limit,
            "maxUrls": max_urls
        }
        # Callback Setup for real-time update
        def on_activity(activity):
            st.write(f"[{activity['type']}]{activity['message']}")

        # Run the deep research using firecrawl
        with st.spinner("Performing Deep Research..."):
            resp = firecrawl_app.deep_research(
                query = query,
                on_activity = on_activity,
                # **params
            )
            
        return {
            "success" : True,
            "final_analysis" : resp["data"]["finalAnalysis"],
            "sources_count": len(resp["data"]["sources"]),
            "sources":resp["data"]["sources"]
        }
    except Exception as e:
        st.error(f"Deep Research Error: {str(e)}")
        return {
            "error" : str(e),
            "success" : False
        }

# Defining Agents for specific task
research_agent = Agent(
    name = "research_agent",
    instructions = """you are a research assistant that can perform deep web research on any topic.

    When given a research topic or question:
    1. Use the deep_research tool to gather comprehensive information
        - Always use these parameters
            * max_depth: 3 (for moderate depth)
            * time_limit: 180 (3 minutes)
            * max_urls: 10 (sufficient sources)
    2. The tool will search the web, analyze multiple sources, and provide a synthesis
    3. Review the research results and organize them into a well-structured report
    4. Include proper citations for all sources
    5. Highlight key findings and insights
    """,
    model="gpt-4o-mini",
    tools = [deep_research]
)

elaboration_agent = Agent(
    name = "elaboration_agent",
    instructions = """You are an expert content enhancer specializing in research elaboration.

    When given a research report:
    1. Analyze the structure and content of the report
    2. Enhance the report by:
        - Adding more detailed explanation of complex concepts.
        - Including relevant examples, case studies, and real world application.
        - Expanding on key points with additional context and nuance
        - Adding visual elements descriptions (charts, diagrams, infographics)
        - Incorporating latest trends and future predictions
        - Suggesting pratical implications for different stackholders
    3. Maintain academic rigor and factual accuracy
    4. Preserve the original structure while making it more comprehensive
    5. Ensure all additions are relevant and valuable to the topic.
    """,
    model="gpt-4.1-nano"
)

async def run_research_process(topic : str):
    """Run the complete research process"""
    # Step 1 - Intial Research
    with st.spinner("Conducting initial research..."):
        research_results = await Runner.run(research_agent, topic)
        initial_report = research_results.final_output
    # Display initial report 
    with st.expander("View Initial Research Report"):
        st.markdown(initial_report)

        
    # Step 2 - Enhance the report
    with st.spinner("Enhancing the report with additional information..."):
        elaboration_input = f"""
        RESEARCH_TOPIC = {topic}

        INITIAL RESEARCH REPORT:
        {initial_report}

        Please enhance this research report with additional information,
        examples, case studies, and deeper insights while maintaining its
        academic rigor and factual accuracy.
        """

        elaboration_results = await Runner.run(elaboration_agent, elaboration_input)
        enhanced_report = elaboration_results.final_output
        
    return enhanced_report

# Main Research Process
if st.button("Start Research", disabled = not (openai_api_key and firecrawl_api_key and research_topic)):
    if not openai_api_key or not firecrawl_api_key:
        st.warning("Please enter both the API Key to start the research!!")
    elif not research_topic:
        st.warning("No topic provided! Please enter a research topic!!")
    else:
        try:
            # Create a placeholder for final report
            report_placeholder = st.empty()

            # Run the research process
            enhanced_report = asyncio.run(run_research_process(research_topic))

            # Display the results
            report_placeholder.markdown(f"## {research_topic} Report")
            report_placeholder.markdown(enhanced_report)

            # Add download button
            st.download_button(
                "Download Report",
                enhanced_report, 
                file_name = f"{research_topic.replace(' ', '_')}_report.md",
                mime = "text/markdown"
            )
        except Exception as e:
            st.error(f"An error occured: {str(e)}")

# Footer
st.markdown("---------------")
st.markdown("Powered by OpenAI Agents SDK and Firecrawl")