Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -24,7 +24,7 @@ class GeminiRAGSystem:
|
|
24 |
self.chunks = []
|
25 |
self.dataset_loaded = False
|
26 |
self.loading_error = None
|
27 |
-
self.gemini_api_key = os.getenv("
|
28 |
|
29 |
# Initialize embedding model
|
30 |
try:
|
@@ -43,11 +43,11 @@ class GeminiRAGSystem:
|
|
43 |
"""Load dataset in a background thread"""
|
44 |
def load_task():
|
45 |
try:
|
46 |
-
# Load dataset directly
|
47 |
dataset = load_dataset(
|
48 |
DATASET_NAME,
|
49 |
split='train',
|
50 |
-
|
51 |
)
|
52 |
|
53 |
# Process dataset
|
@@ -96,10 +96,10 @@ class GeminiRAGSystem:
|
|
96 |
"""Generate response with robust error handling"""
|
97 |
if not self.dataset_loaded:
|
98 |
if self.loading_error:
|
99 |
-
return f" Dataset loading failed: {self.loading_error}"
|
100 |
-
return " Dataset is still loading, please wait..."
|
101 |
if not self.gemini_api_key:
|
102 |
-
return " Please set your Gemini API key in environment variables"
|
103 |
|
104 |
context = self.get_relevant_context(query)
|
105 |
if not context:
|
@@ -116,7 +116,7 @@ class GeminiRAGSystem:
|
|
116 |
response = model.generate_content(prompt)
|
117 |
return response.text
|
118 |
except Exception as e:
|
119 |
-
return f" API Error: {str(e)}"
|
120 |
|
121 |
# Initialize system
|
122 |
try:
|
@@ -126,21 +126,35 @@ except Exception as e:
|
|
126 |
|
127 |
# Create interface
|
128 |
with gr.Blocks(title="UE Chatbot") as app:
|
129 |
-
gr.Markdown("# UE 24
|
130 |
|
131 |
with gr.Row():
|
132 |
-
chatbot = gr.Chatbot(height=500
|
|
|
|
|
133 |
|
134 |
with gr.Row():
|
135 |
-
query = gr.Textbox(label="Your question",
|
|
|
|
|
136 |
submit_btn = gr.Button("Submit", variant="primary", scale=1)
|
137 |
|
138 |
with gr.Row():
|
139 |
clear_btn = gr.Button("Clear Chat", variant="secondary")
|
140 |
|
141 |
# Status indicator
|
142 |
-
status = gr.Textbox(label="System Status",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
143 |
|
|
|
|
|
144 |
# Event handlers
|
145 |
def respond(message, chat_history):
|
146 |
try:
|
@@ -154,17 +168,9 @@ with gr.Blocks(title="UE Chatbot") as app:
|
|
154 |
def clear_chat():
|
155 |
return []
|
156 |
|
157 |
-
def get_status():
|
158 |
-
if rag_system.loading_error:
|
159 |
-
return f"Error: {rag_system.loading_error}"
|
160 |
-
return "Ready" if rag_system.dataset_loaded else "Loading dataset..."
|
161 |
-
|
162 |
submit_btn.click(respond, [query, chatbot], [query, chatbot])
|
163 |
query.submit(respond, [query, chatbot], [query, chatbot])
|
164 |
clear_btn.click(clear_chat, outputs=chatbot)
|
165 |
-
|
166 |
-
# Periodically check status (hidden from user)
|
167 |
-
app.load(get_status, None, status, every=1)
|
168 |
|
169 |
if __name__ == "__main__":
|
170 |
app.launch(share=True)
|
|
|
24 |
self.chunks = []
|
25 |
self.dataset_loaded = False
|
26 |
self.loading_error = None
|
27 |
+
self.gemini_api_key = os.getenv("GEMINI_API_KEY")
|
28 |
|
29 |
# Initialize embedding model
|
30 |
try:
|
|
|
43 |
"""Load dataset in a background thread"""
|
44 |
def load_task():
|
45 |
try:
|
46 |
+
# Load dataset directly
|
47 |
dataset = load_dataset(
|
48 |
DATASET_NAME,
|
49 |
split='train',
|
50 |
+
download_mode="force_redownload" # Fixes extraction error
|
51 |
)
|
52 |
|
53 |
# Process dataset
|
|
|
96 |
"""Generate response with robust error handling"""
|
97 |
if not self.dataset_loaded:
|
98 |
if self.loading_error:
|
99 |
+
return f"⚠️ Dataset loading failed: {self.loading_error}"
|
100 |
+
return "⚠️ Dataset is still loading, please wait..."
|
101 |
if not self.gemini_api_key:
|
102 |
+
return "🔑 Please set your Gemini API key in environment variables"
|
103 |
|
104 |
context = self.get_relevant_context(query)
|
105 |
if not context:
|
|
|
116 |
response = model.generate_content(prompt)
|
117 |
return response.text
|
118 |
except Exception as e:
|
119 |
+
return f"⚠️ API Error: {str(e)}"
|
120 |
|
121 |
# Initialize system
|
122 |
try:
|
|
|
126 |
|
127 |
# Create interface
|
128 |
with gr.Blocks(title="UE Chatbot") as app:
|
129 |
+
gr.Markdown("# UE 24 Hour Service")
|
130 |
|
131 |
with gr.Row():
|
132 |
+
chatbot = gr.Chatbot(height=500, label="Chat History",
|
133 |
+
avatar_images=(None, (None, "https://huggingface.co/spaces/groq/Groq-LLM/resolve/main/groq_logo.png")),
|
134 |
+
bubble_full_width=False)
|
135 |
|
136 |
with gr.Row():
|
137 |
+
query = gr.Textbox(label="Your question",
|
138 |
+
placeholder="Ask your question...",
|
139 |
+
scale=4)
|
140 |
submit_btn = gr.Button("Submit", variant="primary", scale=1)
|
141 |
|
142 |
with gr.Row():
|
143 |
clear_btn = gr.Button("Clear Chat", variant="secondary")
|
144 |
|
145 |
# Status indicator
|
146 |
+
status = gr.Textbox(label="System Status",
|
147 |
+
value="Initializing...",
|
148 |
+
interactive=False)
|
149 |
+
|
150 |
+
# Update status periodically
|
151 |
+
def update_status():
|
152 |
+
if rag_system.loading_error:
|
153 |
+
return f"Error: {rag_system.loading_error}"
|
154 |
+
return "Ready" if rag_system.dataset_loaded else "Loading dataset..."
|
155 |
|
156 |
+
app.load(update_status, None, status, every=1)
|
157 |
+
|
158 |
# Event handlers
|
159 |
def respond(message, chat_history):
|
160 |
try:
|
|
|
168 |
def clear_chat():
|
169 |
return []
|
170 |
|
|
|
|
|
|
|
|
|
|
|
171 |
submit_btn.click(respond, [query, chatbot], [query, chatbot])
|
172 |
query.submit(respond, [query, chatbot], [query, chatbot])
|
173 |
clear_btn.click(clear_chat, outputs=chatbot)
|
|
|
|
|
|
|
174 |
|
175 |
if __name__ == "__main__":
|
176 |
app.launch(share=True)
|