Spaces:
Sleeping
Sleeping
Attempt 5 (ChatGPT revised)
Browse files
app.py
CHANGED
@@ -1,7 +1,12 @@
|
|
1 |
import gradio as gr
|
2 |
|
|
|
3 |
theme = gr.themes.Default(
|
4 |
-
primary_hue=gr.themes.Color(
|
|
|
|
|
|
|
|
|
5 |
secondary_hue="red",
|
6 |
neutral_hue="slate",
|
7 |
font=[gr.themes.GoogleFont('jack armstrong'), 'ui-sans-serif', 'system-ui', 'sans-serif'],
|
@@ -64,25 +69,17 @@ theme = gr.themes.Default(
|
|
64 |
button_cancel_text_color_dark='*button_secondary_text_color'
|
65 |
)
|
66 |
|
67 |
-
with gr.Blocks(theme=theme) as demo:
|
68 |
-
...
|
69 |
-
|
70 |
from sentence_transformers import SentenceTransformer, util
|
71 |
import openai
|
72 |
import os
|
73 |
|
74 |
-
# potential color customization
|
75 |
-
from transformers import pipeline
|
76 |
-
|
77 |
os.environ["TOKENIZERS_PARALLELISM"] = "false"
|
78 |
|
79 |
-
|
80 |
-
filename = "output_chess_details.txt" # Path to the file storing chess-specific details
|
81 |
retrieval_model_name = 'output/sentence-transformer-finetuned/'
|
82 |
|
83 |
openai.api_key = os.environ["OPENAI_API_KEY"]
|
84 |
|
85 |
-
# Attempt to load the necessary models and provide feedback on success or failure
|
86 |
try:
|
87 |
retrieval_model = SentenceTransformer(retrieval_model_name)
|
88 |
print("Models loaded successfully.")
|
@@ -90,9 +87,6 @@ except Exception as e:
|
|
90 |
print(f"Failed to load models: {e}")
|
91 |
|
92 |
def load_and_preprocess_text(filename):
|
93 |
-
"""
|
94 |
-
# Load and preprocess text from a file, removing empty lines and stripping whitespace.
|
95 |
-
"""
|
96 |
try:
|
97 |
with open(filename, 'r', encoding='utf-8') as file:
|
98 |
segments = [line.strip() for line in file if line.strip()]
|
@@ -105,37 +99,21 @@ def load_and_preprocess_text(filename):
|
|
105 |
segments = load_and_preprocess_text(filename)
|
106 |
|
107 |
def find_relevant_segment(user_query, segments):
|
108 |
-
"""
|
109 |
-
# Find the most relevant text segment for a user's query using cosine similarity among sentence embeddings.
|
110 |
-
# This version finds the best match based on the content of the query.
|
111 |
-
"""
|
112 |
try:
|
113 |
-
# Lowercase the query for better matching
|
114 |
lower_query = user_query.lower()
|
115 |
-
|
116 |
-
# Encode the query and the segments
|
117 |
query_embedding = retrieval_model.encode(lower_query)
|
118 |
segment_embeddings = retrieval_model.encode(segments)
|
119 |
-
|
120 |
-
# Compute cosine similarities between the query and the segments
|
121 |
similarities = util.pytorch_cos_sim(query_embedding, segment_embeddings)[0]
|
122 |
-
|
123 |
-
# Find the index of the most similar segment
|
124 |
best_idx = similarities.argmax()
|
125 |
-
|
126 |
-
# Return the most relevant segment
|
127 |
return segments[best_idx]
|
128 |
except Exception as e:
|
129 |
print(f"Error in finding relevant segment: {e}")
|
130 |
return ""
|
131 |
|
132 |
def generate_response(user_query, relevant_segment):
|
133 |
-
"""
|
134 |
-
# Generate a response emphasizing the bot's capability in providing information about St. Louis events.
|
135 |
-
"""
|
136 |
try:
|
137 |
system_message = "You are a chatbot specialized in providing information on local events, pro-Palestine movements, and community outreach, pride movements/events and community resources."
|
138 |
-
user_message = f"Here's the information on St. Louis local events, outreach programs, community resources and local activism and movements
|
139 |
messages = [
|
140 |
{"role": "system", "content": system_message},
|
141 |
{"role": "user", "content": user_message}
|
@@ -155,9 +133,6 @@ def generate_response(user_query, relevant_segment):
|
|
155 |
return f"Error in generating response: {e}"
|
156 |
|
157 |
def query_model(question):
|
158 |
-
"""
|
159 |
-
# Process a question, find relevant information, and generate a response.
|
160 |
-
"""
|
161 |
if question == "":
|
162 |
return "Welcome to GloBot! Ask me anything about the St. Louis Community!"
|
163 |
relevant_segment = find_relevant_segment(question, segments)
|
@@ -166,9 +141,7 @@ def query_model(question):
|
|
166 |
response = generate_response(question, relevant_segment)
|
167 |
return response
|
168 |
|
169 |
-
# Define the welcome message and specific topics the chatbot can provide information about
|
170 |
welcome_message = """
|
171 |
-
|
172 |
## Your AI-driven assistant for STL community outreach queries. Created by Honna, Davonne, and Maryam of the 2024 Kode With Klossy St.Louis Camp!
|
173 |
"""
|
174 |
|
@@ -179,32 +152,27 @@ topics = """
|
|
179 |
- Social Justice Workshops
|
180 |
- Cultural Festivals
|
181 |
- Community Outreach Programs
|
182 |
-
-
|
183 |
- Health & Wellness Events
|
184 |
- How to Support Local Businesses
|
185 |
"""
|
186 |
|
187 |
-
|
188 |
-
# Display function
|
189 |
-
|
190 |
def display_image():
|
191 |
return "Globot_Logo3.jpg"
|
192 |
|
193 |
# Setup the Gradio Blocks interface with custom layout components
|
194 |
-
with gr.Blocks(theme=
|
195 |
-
gr.Image(display_image(), width
|
196 |
-
gr.Markdown(welcome_message)
|
197 |
with gr.Row():
|
198 |
with gr.Column():
|
199 |
-
gr.Markdown(topics)
|
200 |
with gr.Row():
|
201 |
with gr.Column():
|
202 |
question = gr.Textbox(label="Your question", placeholder="What do you want to ask about?")
|
203 |
answer = gr.Textbox(label="GloBot Response", placeholder="GloBot will respond here...", interactive=False, lines=10)
|
204 |
submit_button = gr.Button("Submit")
|
205 |
submit_button.click(fn=query_model, inputs=question, outputs=answer)
|
206 |
-
|
207 |
|
208 |
-
|
209 |
# Launch the Gradio app to allow user interaction
|
210 |
demo.launch(share=True)
|
|
|
1 |
import gradio as gr
|
2 |
|
3 |
+
# Define the theme with custom colors and styles
|
4 |
theme = gr.themes.Default(
|
5 |
+
primary_hue=gr.themes.Color(
|
6 |
+
c100="#ffedd5", c200="#fed7aa", c300="#ffe09e", c400="#c2814c",
|
7 |
+
c50="#fff8f0", c500="#f97316", c600="#ea580c", c700="#c2410c",
|
8 |
+
c800="#9a3412", c900="#7c2d12", c950="#611f00"
|
9 |
+
),
|
10 |
secondary_hue="red",
|
11 |
neutral_hue="slate",
|
12 |
font=[gr.themes.GoogleFont('jack armstrong'), 'ui-sans-serif', 'system-ui', 'sans-serif'],
|
|
|
69 |
button_cancel_text_color_dark='*button_secondary_text_color'
|
70 |
)
|
71 |
|
|
|
|
|
|
|
72 |
from sentence_transformers import SentenceTransformer, util
|
73 |
import openai
|
74 |
import os
|
75 |
|
|
|
|
|
|
|
76 |
os.environ["TOKENIZERS_PARALLELISM"] = "false"
|
77 |
|
78 |
+
filename = "output_chess_details.txt"
|
|
|
79 |
retrieval_model_name = 'output/sentence-transformer-finetuned/'
|
80 |
|
81 |
openai.api_key = os.environ["OPENAI_API_KEY"]
|
82 |
|
|
|
83 |
try:
|
84 |
retrieval_model = SentenceTransformer(retrieval_model_name)
|
85 |
print("Models loaded successfully.")
|
|
|
87 |
print(f"Failed to load models: {e}")
|
88 |
|
89 |
def load_and_preprocess_text(filename):
|
|
|
|
|
|
|
90 |
try:
|
91 |
with open(filename, 'r', encoding='utf-8') as file:
|
92 |
segments = [line.strip() for line in file if line.strip()]
|
|
|
99 |
segments = load_and_preprocess_text(filename)
|
100 |
|
101 |
def find_relevant_segment(user_query, segments):
|
|
|
|
|
|
|
|
|
102 |
try:
|
|
|
103 |
lower_query = user_query.lower()
|
|
|
|
|
104 |
query_embedding = retrieval_model.encode(lower_query)
|
105 |
segment_embeddings = retrieval_model.encode(segments)
|
|
|
|
|
106 |
similarities = util.pytorch_cos_sim(query_embedding, segment_embeddings)[0]
|
|
|
|
|
107 |
best_idx = similarities.argmax()
|
|
|
|
|
108 |
return segments[best_idx]
|
109 |
except Exception as e:
|
110 |
print(f"Error in finding relevant segment: {e}")
|
111 |
return ""
|
112 |
|
113 |
def generate_response(user_query, relevant_segment):
|
|
|
|
|
|
|
114 |
try:
|
115 |
system_message = "You are a chatbot specialized in providing information on local events, pro-Palestine movements, and community outreach, pride movements/events and community resources."
|
116 |
+
user_message = f"Here's the information on St. Louis local events, outreach programs, community resources and local activism and movements: {relevant_segment}"
|
117 |
messages = [
|
118 |
{"role": "system", "content": system_message},
|
119 |
{"role": "user", "content": user_message}
|
|
|
133 |
return f"Error in generating response: {e}"
|
134 |
|
135 |
def query_model(question):
|
|
|
|
|
|
|
136 |
if question == "":
|
137 |
return "Welcome to GloBot! Ask me anything about the St. Louis Community!"
|
138 |
relevant_segment = find_relevant_segment(question, segments)
|
|
|
141 |
response = generate_response(question, relevant_segment)
|
142 |
return response
|
143 |
|
|
|
144 |
welcome_message = """
|
|
|
145 |
## Your AI-driven assistant for STL community outreach queries. Created by Honna, Davonne, and Maryam of the 2024 Kode With Klossy St.Louis Camp!
|
146 |
"""
|
147 |
|
|
|
152 |
- Social Justice Workshops
|
153 |
- Cultural Festivals
|
154 |
- Community Outreach Programs
|
155 |
+
- Environmental Activism
|
156 |
- Health & Wellness Events
|
157 |
- How to Support Local Businesses
|
158 |
"""
|
159 |
|
|
|
|
|
|
|
160 |
def display_image():
|
161 |
return "Globot_Logo3.jpg"
|
162 |
|
163 |
# Setup the Gradio Blocks interface with custom layout components
|
164 |
+
with gr.Blocks(theme=theme) as demo:
|
165 |
+
gr.Image(display_image(), width=2000, height=600)
|
166 |
+
gr.Markdown(welcome_message)
|
167 |
with gr.Row():
|
168 |
with gr.Column():
|
169 |
+
gr.Markdown(topics)
|
170 |
with gr.Row():
|
171 |
with gr.Column():
|
172 |
question = gr.Textbox(label="Your question", placeholder="What do you want to ask about?")
|
173 |
answer = gr.Textbox(label="GloBot Response", placeholder="GloBot will respond here...", interactive=False, lines=10)
|
174 |
submit_button = gr.Button("Submit")
|
175 |
submit_button.click(fn=query_model, inputs=question, outputs=answer)
|
|
|
176 |
|
|
|
177 |
# Launch the Gradio app to allow user interaction
|
178 |
demo.launch(share=True)
|