Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -6,7 +6,7 @@ from langchain_core.output_parsers import StrOutputParser
|
|
6 |
from langchain_openai import ChatOpenAI
|
7 |
from langchain_community.graphs import Neo4jGraph
|
8 |
from typing import List, Tuple
|
9 |
-
from
|
10 |
from langchain_core.messages import AIMessage, HumanMessage
|
11 |
from langchain_core.runnables import (
|
12 |
RunnableBranch,
|
@@ -203,20 +203,19 @@ def generate_audio_elevenlabs(text):
|
|
203 |
return None
|
204 |
|
205 |
|
|
|
206 |
def chat_with_bot(messages, user_message):
|
207 |
# Add user message to the chat history
|
208 |
messages.append((user_message, ""))
|
209 |
-
|
210 |
-
# Generate the response in a streaming manner
|
211 |
response = get_response(user_message)
|
212 |
|
213 |
-
# Simulate streaming by
|
214 |
for character in response:
|
215 |
messages[-1] = (user_message, messages[-1][1] + character)
|
216 |
-
yield messages #
|
217 |
-
time.sleep(0.05) #
|
218 |
|
219 |
-
yield messages #
|
220 |
|
221 |
|
222 |
# Create the Gradio Blocks interface
|
@@ -250,9 +249,9 @@ with gr.Blocks() as demo:
|
|
250 |
#clean_btn.click(fn=clear_fields, inputs=[], outputs=[question_input, response_output])
|
251 |
|
252 |
# Define interactions
|
253 |
-
get_response_btn.click(fn=chat_with_bot, inputs=[chatbot, question_input], outputs=
|
254 |
-
generate_audio_btn.click(fn=generate_audio_elevenlabs, inputs=
|
255 |
-
clean_btn.click(fn=clear_fields, inputs=[], outputs=[chatbot, question_input,
|
256 |
|
257 |
# Launch the Gradio interface
|
258 |
demo.launch(show_error=True)
|
|
|
6 |
from langchain_openai import ChatOpenAI
|
7 |
from langchain_community.graphs import Neo4jGraph
|
8 |
from typing import List, Tuple
|
9 |
+
from pydantic import BaseModel, Field
|
10 |
from langchain_core.messages import AIMessage, HumanMessage
|
11 |
from langchain_core.runnables import (
|
12 |
RunnableBranch,
|
|
|
203 |
return None
|
204 |
|
205 |
|
206 |
+
# Define function to generate a streaming response
|
207 |
def chat_with_bot(messages, user_message):
|
208 |
# Add user message to the chat history
|
209 |
messages.append((user_message, ""))
|
|
|
|
|
210 |
response = get_response(user_message)
|
211 |
|
212 |
+
# Simulate streaming response by iterating over each character in the response
|
213 |
for character in response:
|
214 |
messages[-1] = (user_message, messages[-1][1] + character)
|
215 |
+
yield messages # Stream each character
|
216 |
+
time.sleep(0.05) # Adjust delay as needed for real-time effect
|
217 |
|
218 |
+
yield messages # Final yield to ensure full response is displayed
|
219 |
|
220 |
|
221 |
# Create the Gradio Blocks interface
|
|
|
249 |
#clean_btn.click(fn=clear_fields, inputs=[], outputs=[question_input, response_output])
|
250 |
|
251 |
# Define interactions
|
252 |
+
get_response_btn.click(fn=chat_with_bot, inputs=[chatbot, question_input], outputs=chatbot)
|
253 |
+
generate_audio_btn.click(fn=generate_audio_elevenlabs, inputs=question_input, outputs=audio_output)
|
254 |
+
clean_btn.click(fn=clear_fields, inputs=[], outputs=[chatbot, question_input, audio_output])
|
255 |
|
256 |
# Launch the Gradio interface
|
257 |
demo.launch(show_error=True)
|