|
import streamlit as st |
|
from swarm import Swarm, Agent |
|
from bs4 import BeautifulSoup |
|
import requests |
|
import os |
|
|
|
|
|
def fetch_openai_api_key(): |
|
try: |
|
|
|
secret_key = st.secrets.get("OPENAI_API_KEY", "") |
|
if secret_key: |
|
os.environ['OPENAI_API_KEY'] = secret_key |
|
st.success("OpenAI API Key retrieved and set successfully!") |
|
else: |
|
st.error("Could not retrieve the OpenAI API Key. Please check your Hugging Face secrets configuration.") |
|
except Exception as e: |
|
st.error(f"Error retrieving OpenAI API Key: {str(e)}") |
|
|
|
|
|
def initialize_swarm_client(): |
|
return Swarm() |
|
|
|
|
|
def scrape_website(url): |
|
"""Scrapes the content of the website.""" |
|
try: |
|
response = requests.get(url) |
|
response.raise_for_status() |
|
soup = BeautifulSoup(response.text, 'html.parser') |
|
return soup.get_text() |
|
except requests.exceptions.RequestException as e: |
|
return f"Error during scraping: {str(e)}" |
|
|
|
|
|
scraper_agent = Agent( |
|
name="Scraper Agent", |
|
instructions="You are an agent that scrapes content from websites.", |
|
functions=[scrape_website] |
|
) |
|
|
|
|
|
def analyze_content(content): |
|
"""Analyzes the scraped content for key points.""" |
|
summary = f"Summary of content: {content[:200]}..." |
|
return summary |
|
|
|
|
|
research_agent = Agent( |
|
name="Research Agent", |
|
instructions="You are an agent that analyzes content and extracts key insights.", |
|
functions=[analyze_content] |
|
) |
|
|
|
|
|
def write_summary(context_variables): |
|
"""Writes a summary based on the analysis.""" |
|
analysis = context_variables.get('analysis', '') |
|
summary = f"Here's a detailed report based on the research: {analysis}" |
|
return summary |
|
|
|
|
|
writer_agent = Agent( |
|
name="Writer Agent", |
|
instructions="You are an agent that writes summaries of research.", |
|
functions=[write_summary] |
|
) |
|
|
|
|
|
def orchestrate_workflow(client, url): |
|
|
|
scrape_result = client.run( |
|
agent=scraper_agent, |
|
messages=[{"role": "user", "content": f"Scrape the following website: {url}"}] |
|
) |
|
scraped_content = scrape_result.messages[-1]["content"] |
|
|
|
|
|
if "Error during scraping" in scraped_content: |
|
return scraped_content |
|
|
|
|
|
research_result = client.run( |
|
agent=research_agent, |
|
messages=[{"role": "user", "content": f"Analyze the following content: {scraped_content}"}] |
|
) |
|
analysis_summary = research_result.messages[-1]["content"] |
|
|
|
|
|
writer_result = client.run( |
|
agent=writer_agent, |
|
messages=[{"role": "user", "content": f"Write a summary based on this analysis: {analysis_summary}"}], |
|
context_variables={"analysis": analysis_summary} |
|
) |
|
|
|
final_summary = writer_result.messages[-1]["content"] |
|
return final_summary |
|
|
|
|
|
st.title("π Swarm-based Multi-Agent Web Content Analyzer") |
|
st.caption(""" |
|
**Effortlessly extract, analyze, and summarize information from any website!** |
|
This app leverages a **multi-agent system** built on OpenAI's Swarm framework to: |
|
- **Scrape content** from websites. |
|
- **Analyze and extract key insights** from the scraped data. |
|
- **Generate concise summaries** tailored to your needs. |
|
Simply provide a URL, and let the agents do the rest! |
|
""") |
|
|
|
|
|
st.subheader("π OpenAI API Key Setup") |
|
fetch_openai_api_key() |
|
|
|
|
|
if 'OPENAI_API_KEY' in os.environ and os.environ['OPENAI_API_KEY']: |
|
|
|
client = initialize_swarm_client() |
|
|
|
|
|
st.subheader("π Enter the Website URL") |
|
url = st.text_input("Enter the URL of the website you want to scrape", placeholder="https://example.com") |
|
|
|
|
|
if st.button("π Run Workflow"): |
|
if url: |
|
with st.spinner("Running the multi-agent workflow... This may take a moment."): |
|
final_report = orchestrate_workflow(client, url) |
|
st.success("β
Workflow complete!") |
|
st.write("### π Final Report:") |
|
st.write(final_report) |
|
else: |
|
st.error("β Please enter a valid URL.") |
|
else: |
|
st.warning("β οΈ OpenAI API Key not set. Please ensure it's properly configured in Hugging Face secrets.") |
|
|
|
|
|
st.write("---") |
|
st.markdown(""" |
|
### Acknowledgement: |
|
""") |
|
|