Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -65,6 +65,9 @@ def question_selected(question):
|
|
65 |
@spaces.GPU
|
66 |
def respond(message, history, system_message, max_tokens, temperature, top_p):
|
67 |
try:
|
|
|
|
|
|
|
68 |
model, tokenizer = initialize_model()
|
69 |
|
70 |
# Get context from database
|
@@ -93,10 +96,14 @@ def respond(message, history, system_message, max_tokens, temperature, top_p):
|
|
93 |
max_new_tokens=max_tokens
|
94 |
)[0]['generated_text']
|
95 |
|
96 |
-
|
|
|
|
|
|
|
97 |
|
98 |
except Exception as e:
|
99 |
-
|
|
|
100 |
|
101 |
# Create the Gradio interface
|
102 |
with gr.Blocks(title="ROS2 Expert Assistant") as demo:
|
|
|
65 |
@spaces.GPU
|
66 |
def respond(message, history, system_message, max_tokens, temperature, top_p):
|
67 |
try:
|
68 |
+
# Initialize chat history if None
|
69 |
+
history = history or []
|
70 |
+
|
71 |
model, tokenizer = initialize_model()
|
72 |
|
73 |
# Get context from database
|
|
|
96 |
max_new_tokens=max_tokens
|
97 |
)[0]['generated_text']
|
98 |
|
99 |
+
# Add the new exchange to history
|
100 |
+
history.append((message, output.strip()))
|
101 |
+
|
102 |
+
return history
|
103 |
|
104 |
except Exception as e:
|
105 |
+
history.append((message, f"An error occurred: {str(e)}"))
|
106 |
+
return history
|
107 |
|
108 |
# Create the Gradio interface
|
109 |
with gr.Blocks(title="ROS2 Expert Assistant") as demo:
|