Update app.py
Browse files
app.py
CHANGED
@@ -72,19 +72,25 @@ runnable_sequence = RunnableSequence(prompt | llm)
|
|
72 |
|
73 |
# Initialize memory
|
74 |
|
75 |
-
# Define your get_text_response function
|
76 |
def get_text_response(user_message, history):
|
77 |
-
#
|
|
|
|
|
|
|
|
|
78 |
memory.save_context({"user_message": user_message}, None)
|
79 |
|
80 |
# Use the RunnableSequence to generate a response
|
81 |
response = runnable_sequence.run(user_message=user_message)
|
82 |
|
83 |
-
#
|
|
|
|
|
|
|
|
|
84 |
memory.save_context(None, {"chat_history": response})
|
85 |
|
86 |
return response
|
87 |
-
|
88 |
# Example usage with Gradio
|
89 |
theme = "default" # or your custom theme
|
90 |
|
|
|
72 |
|
73 |
# Initialize memory
|
74 |
|
|
|
75 |
def get_text_response(user_message, history):
|
76 |
+
# Ensure user_message is a string
|
77 |
+
if not isinstance(user_message, str):
|
78 |
+
raise ValueError("user_message must be a string")
|
79 |
+
|
80 |
+
# Save the user message to the memory
|
81 |
memory.save_context({"user_message": user_message}, None)
|
82 |
|
83 |
# Use the RunnableSequence to generate a response
|
84 |
response = runnable_sequence.run(user_message=user_message)
|
85 |
|
86 |
+
# Ensure response is a string
|
87 |
+
if not isinstance(response, str):
|
88 |
+
raise ValueError("Response must be a string")
|
89 |
+
|
90 |
+
# Save the LLM response to the memory
|
91 |
memory.save_context(None, {"chat_history": response})
|
92 |
|
93 |
return response
|
|
|
94 |
# Example usage with Gradio
|
95 |
theme = "default" # or your custom theme
|
96 |
|