|
import streamlit as st |
|
|
|
|
|
from swarm import Swarm, Agent |
|
|
|
|
|
|
|
from bs4 import BeautifulSoup |
|
import requests |
|
from dotenv import load_dotenv |
|
import os |
|
|
|
|
|
load_dotenv() |
|
|
|
|
|
def set_openai_api_key(): |
|
api_key_input = st.text_input("Enter your OpenAI API Key", type="password") |
|
if api_key_input: |
|
os.environ['OPENAI_API_KEY'] = api_key_input |
|
st.success("OpenAI API Key set successfully!") |
|
else: |
|
st.warning("Please enter your OpenAI API Key to continue.") |
|
|
|
|
|
|
|
def initialize_swarm_client(): |
|
return Swarm() |
|
|
|
|
|
def scrape_website(url): |
|
"""Scrapes the content of the website.""" |
|
try: |
|
response = requests.get(url) |
|
response.raise_for_status() |
|
soup = BeautifulSoup(response.text, 'html.parser') |
|
return soup.get_text() |
|
except requests.exceptions.RequestException as e: |
|
return f"Error during scraping: {str(e)}" |
|
|
|
|
|
scraper_agent = Agent( |
|
name="Scraper Agent", |
|
instructions="You are an agent that scrapes content from websites.", |
|
functions=[scrape_website] |
|
) |
|
|
|
|
|
def analyze_content(content): |
|
"""Analyzes the scraped content for key points.""" |
|
summary = f"Summary of content: {content[:200]}..." |
|
return summary |
|
|
|
|
|
research_agent = Agent( |
|
name="Research Agent", |
|
instructions="You are an agent that analyzes content and extracts key insights.", |
|
functions=[analyze_content] |
|
) |
|
|
|
|
|
def write_summary(context_variables): |
|
"""Writes a summary based on the analysis.""" |
|
analysis = context_variables.get('analysis', '') |
|
summary = f"Here's a detailed report based on the research: {analysis}" |
|
return summary |
|
|
|
|
|
writer_agent = Agent( |
|
name="Writer Agent", |
|
instructions="You are an agent that writes summaries of research.", |
|
functions=[write_summary] |
|
) |
|
|
|
|
|
def orchestrate_workflow(client, url): |
|
|
|
scrape_result = client.run( |
|
agent=scraper_agent, |
|
messages=[{"role": "user", "content": f"Scrape the following website: {url}"}] |
|
) |
|
scraped_content = scrape_result.messages[-1]["content"] |
|
|
|
|
|
if "Error during scraping" in scraped_content: |
|
return scraped_content |
|
|
|
|
|
research_result = client.run( |
|
agent=research_agent, |
|
messages=[{"role": "user", "content": f"Analyze the following content: {scraped_content}"}] |
|
) |
|
analysis_summary = research_result.messages[-1]["content"] |
|
|
|
|
|
writer_result = client.run( |
|
agent=writer_agent, |
|
messages=[{"role": "user", "content": f"Write a summary based on this analysis: {analysis_summary}"}], |
|
context_variables={"analysis": analysis_summary} |
|
) |
|
|
|
final_summary = writer_result.messages[-1]["content"] |
|
return final_summary |
|
|
|
|
|
st.title("π OpenAI SWARM Web Scraping and Content Analysis with Multi-Agent System") |
|
st.caption("This app scrapes a website, analyzes the content, and generates a summary using a multi-agent system built on OpenAI's Swarm framework.") |
|
|
|
|
|
st.subheader("OpenAI API Key Setup") |
|
set_openai_api_key() |
|
|
|
|
|
if 'OPENAI_API_KEY' in os.environ and os.environ['OPENAI_API_KEY']: |
|
|
|
client = initialize_swarm_client() |
|
|
|
|
|
url = st.text_input("Enter the URL of the website you want to scrape", placeholder="https://example.com") |
|
|
|
|
|
if st.button("Run Workflow"): |
|
if url: |
|
with st.spinner("Running the multi-agent workflow..."): |
|
final_report = orchestrate_workflow(client, url) |
|
st.success("Workflow complete!") |
|
st.write("### Final Report:") |
|
st.write(final_report) |
|
else: |
|
st.error("Please enter a valid URL.") |
|
else: |
|
st.warning("Please set your OpenAI API Key to proceed.") |
|
|
|
|
|
st.write("---") |
|
st.write("**Acknowledgement:**") |