Update app.py
Browse files
app.py
CHANGED
@@ -3,10 +3,6 @@ from swarm import Swarm, Agent
|
|
3 |
from bs4 import BeautifulSoup
|
4 |
import requests
|
5 |
import os
|
6 |
-
from io import BytesIO
|
7 |
-
from reportlab.lib.pagesizes import letter
|
8 |
-
from reportlab.pdfgen import canvas
|
9 |
-
import json
|
10 |
|
11 |
# Function to fetch OpenAI API key
|
12 |
def fetch_openai_api_key():
|
@@ -35,71 +31,88 @@ def scrape_website(url):
|
|
35 |
except requests.exceptions.RequestException as e:
|
36 |
return f"Error during scraping: {str(e)}"
|
37 |
|
38 |
-
#
|
39 |
-
|
40 |
-
"
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
58 |
|
59 |
# Orchestrate the workflow
|
60 |
def orchestrate_workflow(client, url):
|
61 |
-
#
|
62 |
scrape_result = client.run(
|
63 |
-
agent=
|
64 |
-
name="Scraper Agent",
|
65 |
-
instructions="Scrape content from websites.",
|
66 |
-
functions=[scrape_website]
|
67 |
-
),
|
68 |
messages=[{"role": "user", "content": f"Scrape the following website: {url}"}]
|
69 |
)
|
70 |
scraped_content = scrape_result.messages[-1]["content"]
|
71 |
|
72 |
-
#
|
73 |
if "Error during scraping" in scraped_content:
|
74 |
return scraped_content
|
75 |
|
76 |
-
#
|
77 |
-
|
78 |
-
agent=
|
79 |
-
name="Research Agent",
|
80 |
-
instructions="Analyze content and extract insights.",
|
81 |
-
functions=[lambda content: f"Summary: {content[:700]}..."]
|
82 |
-
),
|
83 |
messages=[{"role": "user", "content": f"Analyze the following content: {scraped_content}"}]
|
84 |
)
|
85 |
-
analysis_summary =
|
86 |
|
87 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
88 |
|
89 |
# Streamlit App UI
|
90 |
st.markdown(
|
91 |
"""
|
92 |
<style>
|
93 |
-
.title { text-align: center; font-size:
|
94 |
-
.description { text-align: center; font-size: 1.
|
95 |
-
.
|
|
|
96 |
</style>
|
97 |
""",
|
98 |
unsafe_allow_html=True,
|
99 |
)
|
100 |
|
101 |
-
st.markdown('<div class="title">π Swarm-based Web Content Analyzer</div>', unsafe_allow_html=True)
|
102 |
-
st.markdown('<div class="description">Effortlessly extract, analyze, and summarize web content.</div>', unsafe_allow_html=True)
|
|
|
|
|
|
|
103 |
|
104 |
fetch_openai_api_key()
|
105 |
|
@@ -111,39 +124,20 @@ if 'OPENAI_API_KEY' in os.environ and os.environ['OPENAI_API_KEY']:
|
|
111 |
st.subheader("π Enter the Website URL")
|
112 |
url = st.text_input("Enter the URL of the website you want to scrape", placeholder="https://example.com")
|
113 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
114 |
if st.button("Run Workflow"):
|
115 |
if url:
|
116 |
with st.spinner("Running the multi-agent workflow... This may take a moment."):
|
117 |
-
|
118 |
-
|
119 |
st.success("β
Workflow complete!")
|
120 |
st.write("### π Final Report:")
|
121 |
-
st.write(
|
122 |
-
|
123 |
-
# Download options
|
124 |
-
json_data = json.dumps({"summary": final_summary}, indent=4)
|
125 |
-
txt_data = final_summary
|
126 |
-
pdf_data = generate_pdf(final_summary)
|
127 |
-
|
128 |
-
|
129 |
-
st.download_button(
|
130 |
-
label="Download Report as TXT",
|
131 |
-
data=txt_data,
|
132 |
-
file_name="report.txt",
|
133 |
-
mime="text/plain"
|
134 |
-
)
|
135 |
-
st.download_button(
|
136 |
-
label="Download Report as PDF",
|
137 |
-
data=pdf_data,
|
138 |
-
file_name="report.pdf",
|
139 |
-
mime="application/pdf"
|
140 |
-
)
|
141 |
-
st.download_button(
|
142 |
-
label="Download Report as JSON",
|
143 |
-
data=json_data,
|
144 |
-
file_name="report.json",
|
145 |
-
mime="application/json"
|
146 |
-
)
|
147 |
else:
|
148 |
st.error("β Please enter a valid URL.")
|
149 |
else:
|
|
|
3 |
from bs4 import BeautifulSoup
|
4 |
import requests
|
5 |
import os
|
|
|
|
|
|
|
|
|
6 |
|
7 |
# Function to fetch OpenAI API key
|
8 |
def fetch_openai_api_key():
|
|
|
31 |
except requests.exceptions.RequestException as e:
|
32 |
return f"Error during scraping: {str(e)}"
|
33 |
|
34 |
+
# Scraper Agent
|
35 |
+
scraper_agent = Agent(
|
36 |
+
name="Scraper Agent",
|
37 |
+
instructions="You are an agent that scrapes content from websites.",
|
38 |
+
functions=[scrape_website]
|
39 |
+
)
|
40 |
+
|
41 |
+
# Define the analysis function
|
42 |
+
def analyze_content(content):
|
43 |
+
"""Analyzes the scraped content for key points."""
|
44 |
+
summary = f"Summary of content: {content[:200]}..." # A simple placeholder summarization
|
45 |
+
return summary
|
46 |
+
|
47 |
+
# Research Agent
|
48 |
+
research_agent = Agent(
|
49 |
+
name="Research Agent",
|
50 |
+
instructions="You are an agent that analyzes content and extracts key insights.",
|
51 |
+
functions=[analyze_content]
|
52 |
+
)
|
53 |
+
|
54 |
+
# Define the writing function
|
55 |
+
def write_summary(context_variables):
|
56 |
+
"""Writes a summary based on the analysis."""
|
57 |
+
analysis = context_variables.get('analysis', '')
|
58 |
+
summary = f"Here's a detailed report based on the research: {analysis}"
|
59 |
+
return summary
|
60 |
+
|
61 |
+
# Writer Agent
|
62 |
+
writer_agent = Agent(
|
63 |
+
name="Writer Agent",
|
64 |
+
instructions="You are an agent that writes summaries of research.",
|
65 |
+
functions=[write_summary]
|
66 |
+
)
|
67 |
|
68 |
# Orchestrate the workflow
|
69 |
def orchestrate_workflow(client, url):
|
70 |
+
# Step 1: Scrape the website
|
71 |
scrape_result = client.run(
|
72 |
+
agent=scraper_agent,
|
|
|
|
|
|
|
|
|
73 |
messages=[{"role": "user", "content": f"Scrape the following website: {url}"}]
|
74 |
)
|
75 |
scraped_content = scrape_result.messages[-1]["content"]
|
76 |
|
77 |
+
# Check for any error during scraping
|
78 |
if "Error during scraping" in scraped_content:
|
79 |
return scraped_content
|
80 |
|
81 |
+
# Step 2: Analyze the scraped content
|
82 |
+
research_result = client.run(
|
83 |
+
agent=research_agent,
|
|
|
|
|
|
|
|
|
84 |
messages=[{"role": "user", "content": f"Analyze the following content: {scraped_content}"}]
|
85 |
)
|
86 |
+
analysis_summary = research_result.messages[-1]["content"]
|
87 |
|
88 |
+
# Step 3: Write the summary based on the analysis
|
89 |
+
writer_result = client.run(
|
90 |
+
agent=writer_agent,
|
91 |
+
messages=[{"role": "user", "content": f"Write a summary based on this analysis: {analysis_summary}"}],
|
92 |
+
context_variables={"analysis": analysis_summary}
|
93 |
+
)
|
94 |
+
|
95 |
+
final_summary = writer_result.messages[-1]["content"]
|
96 |
+
return final_summary
|
97 |
|
98 |
# Streamlit App UI
|
99 |
st.markdown(
|
100 |
"""
|
101 |
<style>
|
102 |
+
.title { text-align: center; font-size: 3rem; font-weight: bold; }
|
103 |
+
.description { text-align: center; font-size: 1.2rem; color: #555; }
|
104 |
+
.button-container { text-align: center; }
|
105 |
+
.ack { font-size: 0.9rem; color: #888; text-align: center; }
|
106 |
</style>
|
107 |
""",
|
108 |
unsafe_allow_html=True,
|
109 |
)
|
110 |
|
111 |
+
st.markdown('<div class="title">π Swarm-based Multi-Agent Web Content Analyzer</div>', unsafe_allow_html=True)
|
112 |
+
st.markdown('<div class="description">Effortlessly extract, analyze, and summarize web content using AI-powered multi-agent systems.</div>', unsafe_allow_html=True)
|
113 |
+
|
114 |
+
st.write("")
|
115 |
+
st.write("")
|
116 |
|
117 |
fetch_openai_api_key()
|
118 |
|
|
|
124 |
st.subheader("π Enter the Website URL")
|
125 |
url = st.text_input("Enter the URL of the website you want to scrape", placeholder="https://example.com")
|
126 |
|
127 |
+
# Run Workflow button
|
128 |
+
st.write("")
|
129 |
+
st.markdown(
|
130 |
+
'<div class="button-container"><button style="padding: 10px 20px; font-size: 16px; background-color: #4CAF50; color: white; border: none; border-radius: 5px; cursor: pointer;">π Run Workflow</button></div>',
|
131 |
+
unsafe_allow_html=True,
|
132 |
+
)
|
133 |
+
|
134 |
if st.button("Run Workflow"):
|
135 |
if url:
|
136 |
with st.spinner("Running the multi-agent workflow... This may take a moment."):
|
137 |
+
final_report = orchestrate_workflow(client, url)
|
|
|
138 |
st.success("β
Workflow complete!")
|
139 |
st.write("### π Final Report:")
|
140 |
+
st.write(final_report)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
141 |
else:
|
142 |
st.error("β Please enter a valid URL.")
|
143 |
else:
|