# image source: https://www.globesign.com/blog/a-beginners-guide-to-google-website-analyzer/ import streamlit as st from swarm import Swarm, Agent from bs4 import BeautifulSoup import requests import os # Function to fetch OpenAI API key def fetch_openai_api_key(): """Fetch the OpenAI API key from Hugging Face secrets.""" try: secret_key = st.secrets.get("OPENAI_API_KEY", "") if secret_key: os.environ['OPENAI_API_KEY'] = secret_key else: st.warning("â ī¸ OpenAI API Key is missing! Please check your Hugging Face secrets configuration.") except Exception as e: st.error(f"Error retrieving OpenAI API Key: {str(e)}") # Initialize the Swarm client def initialize_swarm_client(): return Swarm() # Define the scraping function def scrape_website(url): """Scrapes the content of the website.""" try: response = requests.get(url) response.raise_for_status() soup = BeautifulSoup(response.text, 'html.parser') return soup.get_text() # Return the text content from the HTML except requests.exceptions.RequestException as e: return f"Error during scraping: {str(e)}" # Scraper Agent scraper_agent = Agent( name="Scraper Agent", instructions="You are an agent that scrapes content from websites.", functions=[scrape_website] ) # Define the analysis function def analyze_content(content): """Analyzes the scraped content for key points.""" summary = f"Summary of content: {content[:1000]}..." return summary # Research Agent research_agent = Agent( name="Research Agent", instructions="You are an agent that analyzes content and extracts key insights.", functions=[analyze_content] ) # Define the writing function def write_summary(context_variables): """Writes a summary based on the analysis.""" analysis = context_variables.get('analysis', '') summary = f"Here's a detailed report based on the research: {analysis}" return summary # Writer Agent writer_agent = Agent( name="Writer Agent", instructions="You are an agent that writes summaries of research.", functions=[write_summary] ) # Orchestrate the workflow def orchestrate_workflow(client, url): # Step 1: Scrape the website scrape_result = client.run( agent=scraper_agent, messages=[{"role": "user", "content": f"Scrape the following website: {url}"}] ) scraped_content = scrape_result.messages[-1]["content"] # Check for any error during scraping if "Error during scraping" in scraped_content: return scraped_content # Step 2: Analyze the scraped content research_result = client.run( agent=research_agent, messages=[{"role": "user", "content": f"Analyze the following content: {scraped_content}"}] ) analysis_summary = research_result.messages[-1]["content"] # Step 3: Write the summary based on the analysis writer_result = client.run( agent=writer_agent, messages=[{"role": "user", "content": f"Write a summary based on this analysis: {analysis_summary}"}], context_variables={"analysis": analysis_summary} ) final_summary = writer_result.messages[-1]["content"] return final_summary # Streamlit App UI st.markdown( """ """, unsafe_allow_html=True, ) # 1. Add Acknowledgment as an Info Bar #st.info( #"Acknowledgment: This app is based on [Jad Tounsi El Azzoiani's work](https://github.com/jadouse5/openai-swarm-webscraper).", #icon="âšī¸" #) # 2. Add the title at the top st.markdown('