Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -4,23 +4,55 @@ from gradio_client import Client
|
|
4 |
# Call the existing model
|
5 |
client = Client("Futuresony/Mr.Events")
|
6 |
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
15 |
with gr.Blocks() as demo:
|
16 |
gr.Markdown("## 💬 Test the ABSA Model Chat")
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
submit_btn = gr.Button("Send")
|
21 |
-
with gr.Column(scale=5):
|
22 |
-
output_text = gr.Textbox(label="Model Response", lines=6)
|
23 |
|
24 |
-
|
|
|
|
|
|
|
25 |
|
26 |
-
|
|
|
|
4 |
# Call the existing model
|
5 |
client = Client("Futuresony/Mr.Events")
|
6 |
|
7 |
+
# Function to interact with the hosted model
|
8 |
+
def chat_with_model(user_input, chat_history):
|
9 |
+
"""
|
10 |
+
Sends user input and chat history to the hosted model and returns the response
|
11 |
+
and updated history.
|
12 |
+
"""
|
13 |
+
# The hosted Gradio app expects the chat history as a list of [user, assistant] pairs.
|
14 |
+
# Initial call will have chat_history as an empty list.
|
15 |
+
# Subsequent calls will have chat_history including previous turns.
|
16 |
+
|
17 |
+
print(f"Client sending query: {user_input}")
|
18 |
+
print(f"Client sending history: {chat_history}")
|
19 |
+
|
20 |
+
try:
|
21 |
+
# Call the hosted model's chat endpoint
|
22 |
+
# Pass user_input and chat_history as positional arguments
|
23 |
+
result = client.predict(
|
24 |
+
user_input,
|
25 |
+
chat_history, # Pass the history from the client's Chatbot
|
26 |
+
api_name="/chat"
|
27 |
+
)
|
28 |
+
print(f"Client received raw result: {result}")
|
29 |
+
|
30 |
+
# The hosted app's `chat` function returns the final response string.
|
31 |
+
# We need to append the user input and the model's response to the history.
|
32 |
+
updated_history = chat_history + [[user_input, result]]
|
33 |
+
return "", updated_history # Return empty string for textbox and updated history
|
34 |
+
|
35 |
+
except Exception as e:
|
36 |
+
print(f"Error during client prediction: {e}")
|
37 |
+
import traceback
|
38 |
+
print(traceback.format_exc())
|
39 |
+
# Append user input and an error message to history
|
40 |
+
error_message = f"An error occurred while communicating with the model: {e}"
|
41 |
+
updated_history = chat_history + [[user_input, error_message]]
|
42 |
+
return "", updated_history
|
43 |
+
|
44 |
+
|
45 |
+
# Create the desktop-friendly interface
|
46 |
with gr.Blocks() as demo:
|
47 |
gr.Markdown("## 💬 Test the ABSA Model Chat")
|
48 |
+
chatbot = gr.Chatbot(height=400) # Chatbot to display conversation
|
49 |
+
msg = gr.Textbox(label="Type your message") # Textbox for user input
|
50 |
+
clear = gr.ClearButton([msg, chatbot]) # Button to clear
|
|
|
|
|
|
|
51 |
|
52 |
+
# Link the input, button, and chatbot
|
53 |
+
# The fn will receive the textbox value and the chatbot history.
|
54 |
+
# It will return an empty string for the textbox and the updated history for the chatbot.
|
55 |
+
msg.submit(chat_with_model, [msg, chatbot], [msg, chatbot])
|
56 |
|
57 |
+
# Launch the Gradio interface
|
58 |
+
demo.launch(debug=True, show_error=True) # Corrected launch call and added show_error
|