DrishtiSharma commited on
Commit
5f94e5a
Β·
verified Β·
1 Parent(s): 250b8a4

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +140 -0
app.py ADDED
@@ -0,0 +1,140 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+
3
+ # Used for Streamlit deployment
4
+ from swarm import Swarm, Agent
5
+
6
+ # For local deployment, use the following importations instead
7
+ '''
8
+ from swarm.core import Swarm
9
+ from swarm.types import Agent
10
+ '''
11
+
12
+ from bs4 import BeautifulSoup
13
+ import requests
14
+ from dotenv import load_dotenv
15
+ import os
16
+
17
+ # Load environment variables from .env file if available
18
+ load_dotenv()
19
+
20
+ # Function to set OpenAI API key dynamically in the session state
21
+ def set_openai_api_key():
22
+ api_key_input = st.text_input("Enter your OpenAI API Key", type="password")
23
+ if api_key_input:
24
+ os.environ['OPENAI_API_KEY'] = api_key_input
25
+ st.success("OpenAI API Key set successfully!")
26
+ else:
27
+ st.warning("Please enter your OpenAI API Key to continue.")
28
+
29
+
30
+ # Initialize the Swarm client
31
+ def initialize_swarm_client():
32
+ return Swarm()
33
+
34
+ # Define the scraping function
35
+ def scrape_website(url):
36
+ """Scrapes the content of the website."""
37
+ try:
38
+ response = requests.get(url)
39
+ response.raise_for_status()
40
+ soup = BeautifulSoup(response.text, 'html.parser')
41
+ return soup.get_text() # Return the text content from the HTML
42
+ except requests.exceptions.RequestException as e:
43
+ return f"Error during scraping: {str(e)}"
44
+
45
+ # Scraper Agent
46
+ scraper_agent = Agent(
47
+ name="Scraper Agent",
48
+ instructions="You are an agent that scrapes content from websites.",
49
+ functions=[scrape_website] # The agent can use the scrape_website function
50
+ )
51
+
52
+ # Define the analysis function
53
+ def analyze_content(content):
54
+ """Analyzes the scraped content for key points."""
55
+ summary = f"Summary of content: {content[:200]}..." # A simple placeholder summarization
56
+ return summary
57
+
58
+ # Research Agent
59
+ research_agent = Agent(
60
+ name="Research Agent",
61
+ instructions="You are an agent that analyzes content and extracts key insights.",
62
+ functions=[analyze_content] # The agent can use the analyze_content function
63
+ )
64
+
65
+ # Define the writing function
66
+ def write_summary(context_variables):
67
+ """Writes a summary based on the analysis."""
68
+ analysis = context_variables.get('analysis', '')
69
+ summary = f"Here's a detailed report based on the research: {analysis}"
70
+ return summary
71
+
72
+ # Writer Agent
73
+ writer_agent = Agent(
74
+ name="Writer Agent",
75
+ instructions="You are an agent that writes summaries of research.",
76
+ functions=[write_summary] # The agent can use the write_summary function
77
+ )
78
+
79
+ # Orchestrate the workflow
80
+ def orchestrate_workflow(client, url):
81
+ # Step 1: Scrape the website
82
+ scrape_result = client.run(
83
+ agent=scraper_agent,
84
+ messages=[{"role": "user", "content": f"Scrape the following website: {url}"}]
85
+ )
86
+ scraped_content = scrape_result.messages[-1]["content"]
87
+
88
+ # Check for any error during scraping
89
+ if "Error during scraping" in scraped_content:
90
+ return scraped_content
91
+
92
+ # Step 2: Analyze the scraped content
93
+ research_result = client.run(
94
+ agent=research_agent,
95
+ messages=[{"role": "user", "content": f"Analyze the following content: {scraped_content}"}]
96
+ )
97
+ analysis_summary = research_result.messages[-1]["content"]
98
+
99
+ # Step 3: Write the summary based on the analysis
100
+ writer_result = client.run(
101
+ agent=writer_agent,
102
+ messages=[{"role": "user", "content": f"Write a summary based on this analysis: {analysis_summary}"}],
103
+ context_variables={"analysis": analysis_summary} # Pass the analysis to the writer agent
104
+ )
105
+
106
+ final_summary = writer_result.messages[-1]["content"]
107
+ return final_summary
108
+
109
+ # Streamlit App UI
110
+ st.title("πŸ” OpenAI SWARM Web Scraping and Content Analysis with Multi-Agent System")
111
+ st.caption("This app scrapes a website, analyzes the content, and generates a summary using a multi-agent system built on OpenAI's Swarm framework.")
112
+
113
+ # Input for OpenAI API Key
114
+ st.subheader("OpenAI API Key Setup")
115
+ set_openai_api_key()
116
+
117
+ # Initialize Swarm client only after API key is set
118
+ if 'OPENAI_API_KEY' in os.environ and os.environ['OPENAI_API_KEY']:
119
+ # Initialize the Swarm client after API key is entered
120
+ client = initialize_swarm_client()
121
+
122
+ # Input field for the website URL
123
+ url = st.text_input("Enter the URL of the website you want to scrape", placeholder="https://example.com")
124
+
125
+ # Run Workflow button
126
+ if st.button("Run Workflow"):
127
+ if url:
128
+ with st.spinner("Running the multi-agent workflow..."):
129
+ final_report = orchestrate_workflow(client, url)
130
+ st.success("Workflow complete!")
131
+ st.write("### Final Report:")
132
+ st.write(final_report)
133
+ else:
134
+ st.error("Please enter a valid URL.")
135
+ else:
136
+ st.warning("Please set your OpenAI API Key to proceed.")
137
+
138
+ # Footer with credits
139
+ st.write("---")
140
+ st.write("**Acknowledgement:**")