# image source: https://www.globesign.com/blog/a-beginners-guide-to-google-website-analyzer/ import streamlit as st from swarm import Swarm, Agent from bs4 import BeautifulSoup import requests import os # Function to fetch OpenAI API key def fetch_openai_api_key(): """Fetch the OpenAI API key from Hugging Face secrets.""" try: secret_key = st.secrets.get("OPENAI_API_KEY", "") if secret_key: os.environ['OPENAI_API_KEY'] = secret_key else: st.warning("⚠️ OpenAI API Key is missing! Please check your Hugging Face secrets configuration.") except Exception as e: st.error(f"Error retrieving OpenAI API Key: {str(e)}") # Initialize the Swarm client def initialize_swarm_client(): return Swarm() # Define the scraping function def scrape_website(url): """Scrapes the content of the website.""" try: response = requests.get(url) response.raise_for_status() soup = BeautifulSoup(response.text, 'html.parser') return soup.get_text() # Return the text content from the HTML except requests.exceptions.RequestException as e: return f"Error during scraping: {str(e)}" # Scraper Agent scraper_agent = Agent( name="Scraper Agent", instructions="You are an agent that scrapes content from websites.", functions=[scrape_website] ) # Define the analysis function def analyze_content(content): """Analyzes the scraped content for key points.""" summary = f"Summary of content: {content[:1000]}..." return summary # Research Agent research_agent = Agent( name="Research Agent", instructions="You are an agent that analyzes content and extracts key insights.", functions=[analyze_content] ) # Define the writing function def write_summary(context_variables): """Writes a summary based on the analysis.""" analysis = context_variables.get('analysis', '') summary = f"Here's a detailed report based on the research: {analysis}" return summary # Writer Agent writer_agent = Agent( name="Writer Agent", instructions="You are an agent that writes summaries of research.", functions=[write_summary] ) # Orchestrate the workflow def orchestrate_workflow(client, url): # Step 1: Scrape the website scrape_result = client.run( agent=scraper_agent, messages=[{"role": "user", "content": f"Scrape the following website: {url}"}] ) scraped_content = scrape_result.messages[-1]["content"] # Check for any error during scraping if "Error during scraping" in scraped_content: return scraped_content # Step 2: Analyze the scraped content research_result = client.run( agent=research_agent, messages=[{"role": "user", "content": f"Analyze the following content: {scraped_content}"}] ) analysis_summary = research_result.messages[-1]["content"] # Step 3: Write the summary based on the analysis writer_result = client.run( agent=writer_agent, messages=[{"role": "user", "content": f"Write a summary based on this analysis: {analysis_summary}"}], context_variables={"analysis": analysis_summary} ) final_summary = writer_result.messages[-1]["content"] return final_summary # Streamlit App UI st.markdown( """ """, unsafe_allow_html=True, ) st.markdown('
Swarm-based Web Content Analyzer 🧐
', unsafe_allow_html=True) st.markdown('
Effortlessly extract, analyze, and summarize web content using multi-agents.
', unsafe_allow_html=True) fetch_openai_api_key() # Initialize Swarm client only after API key is set if 'OPENAI_API_KEY' in os.environ and os.environ['OPENAI_API_KEY']: client = initialize_swarm_client() # Input field for the website URL st.subheader("🌍 Enter the Website URL") url = st.text_input("Enter the URL of the website you want to scrape", placeholder="https://example.com") # Custom green button #st.markdown('
', unsafe_allow_html=True) # Add JavaScript for button interaction st.markdown( """ """, unsafe_allow_html=True, ) # Run the workflow logic if st.button("Run Workflow", key="run"): if url: with st.spinner("Running the multi-agent workflow... This may take a moment."): final_report = orchestrate_workflow(client, url) st.success("✅ Workflow complete!") st.write("### 📜 Final Report:") st.write(final_report) else: st.error("❌ Please enter a valid URL.") else: st.sidebar.warning("⚠️ OpenAI API Key not set. Please check your Hugging Face secrets configuration.") # Footer with credits st.divider() st.markdown( """
Acknowledgment: This app is based on Jad Tounsi El Azzoiani's work.
""", unsafe_allow_html=True )