DrishtiSharma commited on
Commit
d268952
Β·
verified Β·
1 Parent(s): dcab038

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +42 -36
app.py CHANGED
@@ -1,27 +1,23 @@
1
  import streamlit as st
2
-
3
- # Used for Streamlit deployment
4
  from swarm import Swarm, Agent
5
-
6
-
7
-
8
  from bs4 import BeautifulSoup
9
  import requests
10
- from dotenv import load_dotenv
11
  import os
12
 
13
- # Load environment variables from .env file if available
14
- load_dotenv()
15
-
16
- # Function to set OpenAI API key dynamically in the session state
17
- def set_openai_api_key():
18
- api_key_input = st.text_input("Enter your OpenAI API Key", type="password")
19
- if api_key_input:
20
- os.environ['OPENAI_API_KEY'] = api_key_input
21
- st.success("OpenAI API Key set successfully!")
22
- else:
23
- st.warning("Please enter your OpenAI API Key to continue.")
24
-
 
25
 
26
  # Initialize the Swarm client
27
  def initialize_swarm_client():
@@ -42,7 +38,7 @@ def scrape_website(url):
42
  scraper_agent = Agent(
43
  name="Scraper Agent",
44
  instructions="You are an agent that scrapes content from websites.",
45
- functions=[scrape_website] # The agent can use the scrape_website function
46
  )
47
 
48
  # Define the analysis function
@@ -55,7 +51,7 @@ def analyze_content(content):
55
  research_agent = Agent(
56
  name="Research Agent",
57
  instructions="You are an agent that analyzes content and extracts key insights.",
58
- functions=[analyze_content] # The agent can use the analyze_content function
59
  )
60
 
61
  # Define the writing function
@@ -69,7 +65,7 @@ def write_summary(context_variables):
69
  writer_agent = Agent(
70
  name="Writer Agent",
71
  instructions="You are an agent that writes summaries of research.",
72
- functions=[write_summary] # The agent can use the write_summary function
73
  )
74
 
75
  # Orchestrate the workflow
@@ -96,41 +92,51 @@ def orchestrate_workflow(client, url):
96
  writer_result = client.run(
97
  agent=writer_agent,
98
  messages=[{"role": "user", "content": f"Write a summary based on this analysis: {analysis_summary}"}],
99
- context_variables={"analysis": analysis_summary} # Pass the analysis to the writer agent
100
  )
101
 
102
  final_summary = writer_result.messages[-1]["content"]
103
  return final_summary
104
 
105
  # Streamlit App UI
106
- st.title("πŸ” OpenAI SWARM Web Scraping and Content Analysis with Multi-Agent System")
107
- st.caption("This app scrapes a website, analyzes the content, and generates a summary using a multi-agent system built on OpenAI's Swarm framework.")
108
-
109
- # Input for OpenAI API Key
110
- st.subheader("OpenAI API Key Setup")
111
- set_openai_api_key()
 
 
 
 
 
 
 
112
 
113
  # Initialize Swarm client only after API key is set
114
  if 'OPENAI_API_KEY' in os.environ and os.environ['OPENAI_API_KEY']:
115
- # Initialize the Swarm client after API key is entered
116
  client = initialize_swarm_client()
117
 
118
  # Input field for the website URL
 
119
  url = st.text_input("Enter the URL of the website you want to scrape", placeholder="https://example.com")
120
 
121
  # Run Workflow button
122
- if st.button("Run Workflow"):
123
  if url:
124
- with st.spinner("Running the multi-agent workflow..."):
125
  final_report = orchestrate_workflow(client, url)
126
- st.success("Workflow complete!")
127
- st.write("### Final Report:")
128
  st.write(final_report)
129
  else:
130
- st.error("Please enter a valid URL.")
131
  else:
132
- st.warning("Please set your OpenAI API Key to proceed.")
133
 
134
  # Footer with credits
135
  st.write("---")
136
- st.write("**Acknowledgement:**")
 
 
 
1
  import streamlit as st
 
 
2
  from swarm import Swarm, Agent
 
 
 
3
  from bs4 import BeautifulSoup
4
  import requests
5
+ from huggingface_hub import HfApi
6
  import os
7
 
8
+ # Function to fetch OpenAI API key from Hugging Face secrets
9
+ def fetch_openai_api_key():
10
+ try:
11
+ # Replace 'OPENAI_API_KEY' with the exact key name you use in Hugging Face secrets
12
+ api = HfApi()
13
+ secret_key = api.get_secret("OPENAI_API_KEY")
14
+ if secret_key:
15
+ os.environ['OPENAI_API_KEY'] = secret_key
16
+ st.success("OpenAI API Key retrieved and set successfully!")
17
+ else:
18
+ st.error("Could not retrieve the OpenAI API Key. Please check your Hugging Face secrets.")
19
+ except Exception as e:
20
+ st.error(f"Error retrieving OpenAI API Key: {str(e)}")
21
 
22
  # Initialize the Swarm client
23
  def initialize_swarm_client():
 
38
  scraper_agent = Agent(
39
  name="Scraper Agent",
40
  instructions="You are an agent that scrapes content from websites.",
41
+ functions=[scrape_website]
42
  )
43
 
44
  # Define the analysis function
 
51
  research_agent = Agent(
52
  name="Research Agent",
53
  instructions="You are an agent that analyzes content and extracts key insights.",
54
+ functions=[analyze_content]
55
  )
56
 
57
  # Define the writing function
 
65
  writer_agent = Agent(
66
  name="Writer Agent",
67
  instructions="You are an agent that writes summaries of research.",
68
+ functions=[write_summary]
69
  )
70
 
71
  # Orchestrate the workflow
 
92
  writer_result = client.run(
93
  agent=writer_agent,
94
  messages=[{"role": "user", "content": f"Write a summary based on this analysis: {analysis_summary}"}],
95
+ context_variables={"analysis": analysis_summary}
96
  )
97
 
98
  final_summary = writer_result.messages[-1]["content"]
99
  return final_summary
100
 
101
  # Streamlit App UI
102
+ st.title("🌐 Swarm-based Multi-Agent Web Scrapper and Content Analyzer")
103
+ st.caption("""
104
+ **Effortlessly extract, analyze, and summarize information from any website!**
105
+ This app leverages a **multi-agent system** built on OpenAI's Swarm framework to:
106
+ - **Scrape content** from websites.
107
+ - **Analyze and extract key insights** from the scraped data.
108
+ - **Generate concise summaries** tailored to your needs.
109
+ Simply provide a URL, and let the agents do the rest!
110
+ """)
111
+
112
+ # Fetch OpenAI API Key from Hugging Face secrets
113
+ st.subheader("πŸ”‘ OpenAI API Key Setup")
114
+ fetch_openai_api_key()
115
 
116
  # Initialize Swarm client only after API key is set
117
  if 'OPENAI_API_KEY' in os.environ and os.environ['OPENAI_API_KEY']:
118
+ # Initialize the Swarm client after API key is set
119
  client = initialize_swarm_client()
120
 
121
  # Input field for the website URL
122
+ st.subheader("🌍 Enter the Website URL")
123
  url = st.text_input("Enter the URL of the website you want to scrape", placeholder="https://example.com")
124
 
125
  # Run Workflow button
126
+ if st.button("πŸš€ Run Workflow"):
127
  if url:
128
+ with st.spinner("Running the multi-agent workflow... This may take a moment."):
129
  final_report = orchestrate_workflow(client, url)
130
+ st.success("βœ… Workflow complete!")
131
+ st.write("### πŸ“œ Final Report:")
132
  st.write(final_report)
133
  else:
134
+ st.error("❌ Please enter a valid URL.")
135
  else:
136
+ st.warning("⚠️ OpenAI API Key not set. Please ensure it's properly configured in Hugging Face secrets.")
137
 
138
  # Footer with credits
139
  st.write("---")
140
+ st.markdown("""
141
+ ### Acknowledgement:
142
+ """)