Update app.py
Browse files
app.py
CHANGED
@@ -1,4 +1,5 @@
|
|
1 |
# image source: https://www.globesign.com/blog/a-beginners-guide-to-google-website-analyzer/
|
|
|
2 |
import streamlit as st
|
3 |
from swarm import Swarm, Agent
|
4 |
from bs4 import BeautifulSoup
|
@@ -100,35 +101,27 @@ def orchestrate_workflow(client, url):
|
|
100 |
st.markdown(
|
101 |
"""
|
102 |
<style>
|
103 |
-
.title { text-align: center; font-size: 2.
|
104 |
-
.description { text-align: center; font-size: 1.
|
105 |
-
.
|
106 |
-
.
|
107 |
-
padding: 10px 20px;
|
108 |
-
font-size: 16px;
|
109 |
-
background-color: #4CAF50;
|
110 |
-
color: white;
|
111 |
-
border: none;
|
112 |
-
border-radius: 5px;
|
113 |
-
cursor: pointer;
|
114 |
-
text-align: center;
|
115 |
-
display: inline-block;
|
116 |
-
}
|
117 |
-
.custom-button:hover {
|
118 |
-
background-color: #45a049;
|
119 |
-
}
|
120 |
-
.ack { font-size: 0.95rem; color: #888; text-align: center; }
|
121 |
</style>
|
122 |
""",
|
123 |
unsafe_allow_html=True,
|
124 |
)
|
125 |
|
|
|
|
|
|
|
|
|
126 |
st.markdown('<div class="title">Swarm-based Web Content Analyzer π§</div>', unsafe_allow_html=True)
|
127 |
st.markdown('<div class="description">Effortlessly extract, analyze, and summarize web content using multi-agents.</div>', unsafe_allow_html=True)
|
128 |
|
|
|
|
|
|
|
129 |
fetch_openai_api_key()
|
130 |
|
131 |
-
# Initialize Swarm client only after API key is set
|
132 |
if 'OPENAI_API_KEY' in os.environ and os.environ['OPENAI_API_KEY']:
|
133 |
client = initialize_swarm_client()
|
134 |
|
@@ -136,26 +129,11 @@ if 'OPENAI_API_KEY' in os.environ and os.environ['OPENAI_API_KEY']:
|
|
136 |
st.subheader("π Enter the Website URL")
|
137 |
url = st.text_input("Enter the URL of the website you want to scrape", placeholder="https://example.com")
|
138 |
|
139 |
-
#
|
140 |
-
|
141 |
-
|
142 |
-
# Add JavaScript for button interaction
|
143 |
-
st.markdown(
|
144 |
-
"""
|
145 |
-
<script>
|
146 |
-
document.getElementById("run-workflow").onclick = function() {
|
147 |
-
var placeholder = document.querySelector("input[placeholder='https://example.com']");
|
148 |
-
if (placeholder) {
|
149 |
-
placeholder.focus();
|
150 |
-
}
|
151 |
-
};
|
152 |
-
</script>
|
153 |
-
""",
|
154 |
-
unsafe_allow_html=True,
|
155 |
-
)
|
156 |
|
157 |
# Run the workflow logic
|
158 |
-
if st.button("Run Workflow", key="run"):
|
159 |
if url:
|
160 |
with st.spinner("Running the multi-agent workflow... This may take a moment."):
|
161 |
final_report = orchestrate_workflow(client, url)
|
@@ -172,8 +150,11 @@ st.divider()
|
|
172 |
st.markdown(
|
173 |
"""
|
174 |
<div class="ack">
|
175 |
-
Acknowledgment: This app is based on <a href="https://github.com/jadouse5/openai-swarm-webscraper" target="_blank">Jad Tounsi El Azzoiani's work
|
176 |
</div>
|
177 |
""",
|
178 |
unsafe_allow_html=True
|
179 |
)
|
|
|
|
|
|
|
|
1 |
# image source: https://www.globesign.com/blog/a-beginners-guide-to-google-website-analyzer/
|
2 |
+
|
3 |
import streamlit as st
|
4 |
from swarm import Swarm, Agent
|
5 |
from bs4 import BeautifulSoup
|
|
|
101 |
st.markdown(
|
102 |
"""
|
103 |
<style>
|
104 |
+
.title { text-align: center; font-size: 2.8rem; font-weight: bold; margin-bottom: 20px; }
|
105 |
+
.description { text-align: center; font-size: 1.1rem; color: #555; margin-bottom: 30px; }
|
106 |
+
.section { margin-top: 30px; margin-bottom: 30px; }
|
107 |
+
.ack { font-size: 0.95rem; color: #888; text-align: center; margin-top: 50px; }
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
108 |
</style>
|
109 |
""",
|
110 |
unsafe_allow_html=True,
|
111 |
)
|
112 |
|
113 |
+
# Add cover image
|
114 |
+
st.image("./image-4.png", use_column_width=True)
|
115 |
+
|
116 |
+
# Title and description
|
117 |
st.markdown('<div class="title">Swarm-based Web Content Analyzer π§</div>', unsafe_allow_html=True)
|
118 |
st.markdown('<div class="description">Effortlessly extract, analyze, and summarize web content using multi-agents.</div>', unsafe_allow_html=True)
|
119 |
|
120 |
+
# Add some spacing
|
121 |
+
st.markdown('<div class="section"></div>', unsafe_allow_html=True)
|
122 |
+
|
123 |
fetch_openai_api_key()
|
124 |
|
|
|
125 |
if 'OPENAI_API_KEY' in os.environ and os.environ['OPENAI_API_KEY']:
|
126 |
client = initialize_swarm_client()
|
127 |
|
|
|
129 |
st.subheader("π Enter the Website URL")
|
130 |
url = st.text_input("Enter the URL of the website you want to scrape", placeholder="https://example.com")
|
131 |
|
132 |
+
# Add some spacing
|
133 |
+
st.markdown('<div class="section"></div>', unsafe_allow_html=True)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
134 |
|
135 |
# Run the workflow logic
|
136 |
+
if st.button("π Run Workflow", key="run"):
|
137 |
if url:
|
138 |
with st.spinner("Running the multi-agent workflow... This may take a moment."):
|
139 |
final_report = orchestrate_workflow(client, url)
|
|
|
150 |
st.markdown(
|
151 |
"""
|
152 |
<div class="ack">
|
153 |
+
Acknowledgment: This app is based on <a href="https://github.com/jadouse5/openai-swarm-webscraper" target="_blank">Jad Tounsi El Azzoiani's</a> work.
|
154 |
</div>
|
155 |
""",
|
156 |
unsafe_allow_html=True
|
157 |
)
|
158 |
+
|
159 |
+
|
160 |
+
|