import streamlit as st from swarm import Swarm, Agent from bs4 import BeautifulSoup import requests import os # Function to fetch OpenAI API key from Hugging Face secrets def fetch_openai_api_key(): try: # Fetch the OpenAI API key using Streamlit's secrets secret_key = st.secrets.get("OPENAI_API_KEY", "") if secret_key: os.environ['OPENAI_API_KEY'] = secret_key st.success("OpenAI API Key retrieved and set successfully!") else: st.error("Could not retrieve the OpenAI API Key. Please check your Hugging Face secrets configuration.") except Exception as e: st.error(f"Error retrieving OpenAI API Key: {str(e)}") # Initialize the Swarm client def initialize_swarm_client(): return Swarm() # Define the scraping function def scrape_website(url): """Scrapes the content of the website.""" try: response = requests.get(url) response.raise_for_status() soup = BeautifulSoup(response.text, 'html.parser') return soup.get_text() # Return the text content from the HTML except requests.exceptions.RequestException as e: return f"Error during scraping: {str(e)}" # Scraper Agent scraper_agent = Agent( name="Scraper Agent", instructions="You are an agent that scrapes content from websites.", functions=[scrape_website] ) # Define the analysis function def analyze_content(content): """Analyzes the scraped content for key points.""" summary = f"Summary of content: {content[:200]}..." # A simple placeholder summarization return summary # Research Agent research_agent = Agent( name="Research Agent", instructions="You are an agent that analyzes content and extracts key insights.", functions=[analyze_content] ) # Define the writing function def write_summary(context_variables): """Writes a summary based on the analysis.""" analysis = context_variables.get('analysis', '') summary = f"Here's a detailed report based on the research: {analysis}" return summary # Writer Agent writer_agent = Agent( name="Writer Agent", instructions="You are an agent that writes summaries of research.", functions=[write_summary] ) # Orchestrate the workflow def orchestrate_workflow(client, url): # Step 1: Scrape the website scrape_result = client.run( agent=scraper_agent, messages=[{"role": "user", "content": f"Scrape the following website: {url}"}] ) scraped_content = scrape_result.messages[-1]["content"] # Check for any error during scraping if "Error during scraping" in scraped_content: return scraped_content # Step 2: Analyze the scraped content research_result = client.run( agent=research_agent, messages=[{"role": "user", "content": f"Analyze the following content: {scraped_content}"}] ) analysis_summary = research_result.messages[-1]["content"] # Step 3: Write the summary based on the analysis writer_result = client.run( agent=writer_agent, messages=[{"role": "user", "content": f"Write a summary based on this analysis: {analysis_summary}"}], context_variables={"analysis": analysis_summary} ) final_summary = writer_result.messages[-1]["content"] return final_summary # Streamlit App UI st.title("🔎 Swarm-based Multi-Agent Web Content Analyzer") st.caption(""" **Effortlessly extract, analyze, and summarize information from any website!** This app leverages a **multi-agent system** built on OpenAI's Swarm framework to: - **Scrape content** from websites. - **Analyze and extract key insights** from the scraped data. - **Generate concise summaries** tailored to your needs. Simply provide a URL, and let the agents do the rest! """) # Fetch OpenAI API Key from Hugging Face secrets st.subheader("🔑 OpenAI API Key Setup") fetch_openai_api_key() # Initialize Swarm client only after API key is set if 'OPENAI_API_KEY' in os.environ and os.environ['OPENAI_API_KEY']: # Initialize the Swarm client after API key is set client = initialize_swarm_client() # Input field for the website URL st.subheader("🌍 Enter the Website URL") url = st.text_input("Enter the URL of the website you want to scrape", placeholder="https://example.com") # Run Workflow button if st.button("🚀 Run Workflow"): if url: with st.spinner("Running the multi-agent workflow... This may take a moment."): final_report = orchestrate_workflow(client, url) st.success("✅ Workflow complete!") st.write("### 📜 Final Report:") st.write(final_report) else: st.error("❌ Please enter a valid URL.") else: st.warning("⚠️ OpenAI API Key not set. Please ensure it's properly configured in Hugging Face secrets.") # Footer with credits st.write("---") st.markdown(""" ### Acknowledgement: """)