Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -171,18 +171,19 @@ Context:
|
|
171 |
model=model_name,
|
172 |
messages=messages,
|
173 |
max_tokens=500,
|
174 |
-
stream=True
|
175 |
):
|
176 |
-
|
177 |
-
|
178 |
-
|
179 |
-
|
180 |
-
|
|
|
181 |
|
182 |
except Exception as e:
|
183 |
error_msg = f"Error during query processing: {str(e)}"
|
184 |
logger.error(error_msg)
|
185 |
-
|
186 |
|
187 |
|
188 |
|
@@ -205,10 +206,11 @@ def create_gradio_interface(rag_system: AdvancedRAGSystem) :
|
|
205 |
|
206 |
def query_streaming(question: str) :
|
207 |
try:
|
208 |
-
|
209 |
-
|
|
|
210 |
except Exception as e:
|
211 |
-
|
212 |
|
213 |
def update_history(question: str):
|
214 |
try:
|
@@ -274,9 +276,9 @@ def create_gradio_interface(rag_system: AdvancedRAGSystem) :
|
|
274 |
fn=query_streaming,
|
275 |
inputs=[question_input],
|
276 |
outputs=[answer_output],
|
277 |
-
api_name="stream_response",
|
278 |
-
queue=False,
|
279 |
-
show_progress=False
|
280 |
).then(
|
281 |
fn=update_history,
|
282 |
inputs=[question_input],
|
|
|
171 |
model=model_name,
|
172 |
messages=messages,
|
173 |
max_tokens=500,
|
174 |
+
# stream=True
|
175 |
):
|
176 |
+
return stream.choices[0].message.content
|
177 |
+
# if hasattr(chunk.choices[0].delta, 'content'):
|
178 |
+
# content = chunk.choices[0].delta.content
|
179 |
+
# if content is not None:
|
180 |
+
# response_text += content
|
181 |
+
# yield response_text
|
182 |
|
183 |
except Exception as e:
|
184 |
error_msg = f"Error during query processing: {str(e)}"
|
185 |
logger.error(error_msg)
|
186 |
+
return error_msg
|
187 |
|
188 |
|
189 |
|
|
|
206 |
|
207 |
def query_streaming(question: str) :
|
208 |
try:
|
209 |
+
return rag_system.query(question)
|
210 |
+
# for response in rag_system.query(question):
|
211 |
+
# yield response
|
212 |
except Exception as e:
|
213 |
+
return f"Error: {str(e)}"
|
214 |
|
215 |
def update_history(question: str):
|
216 |
try:
|
|
|
276 |
fn=query_streaming,
|
277 |
inputs=[question_input],
|
278 |
outputs=[answer_output],
|
279 |
+
# api_name="stream_response",
|
280 |
+
# queue=False,
|
281 |
+
# show_progress=False
|
282 |
).then(
|
283 |
fn=update_history,
|
284 |
inputs=[question_input],
|