SalexAI commited on
Commit
f0350d4
·
verified ·
1 Parent(s): eb0ccd7

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -10
app.py CHANGED
@@ -79,7 +79,12 @@ def stream_response(message, history, character):
79
  try:
80
  response = requests.post(API_URL, headers=HEADERS, json=payload)
81
  response.raise_for_status()
82
- content = response.json()["choices"][0]["message"]["content"]
 
 
 
 
 
83
 
84
  stream_response = ""
85
  # Simulate streaming by yielding token-by-token.
@@ -93,21 +98,19 @@ def stream_response(message, history, character):
93
  def chat(user_message, history, character):
94
  """
95
  Appends the user message to the conversation history, then streams the assistant's reply.
96
- The conversation history is a list of dictionaries with keys "role" and "content".
97
  """
98
  # Ensure history is a list.
99
  history = history or []
100
  history = history.copy()
101
- # Append user message.
102
  history.append({"role": "user", "content": user_message})
103
 
104
- # Create a generator for the streaming response.
105
  full_response = ""
106
  for partial in stream_response(user_message, history, character):
107
  full_response = partial
108
  # Yield the conversation updated with the current assistant response.
109
  yield history + [{"role": "assistant", "content": full_response}]
110
- # Once done, append the final assistant message.
111
  history.append({"role": "assistant", "content": full_response})
112
  return history
113
 
@@ -148,10 +151,12 @@ with gr.Blocks(css=css) as demo:
148
  # State to hold conversation history.
149
  state = gr.State([])
150
 
151
- # When user submits text (via button or Enter), update chat.
152
- msg.submit(fn=lambda user_message, history, choice: chat(user_message, history, clean_choice(choice)),
153
- inputs=[msg, state, model_dropdown],
154
- outputs=[chatbot, state],
155
- show_progress=True)
 
 
156
 
157
  demo.launch(share=True)
 
79
  try:
80
  response = requests.post(API_URL, headers=HEADERS, json=payload)
81
  response.raise_for_status()
82
+ data = response.json()
83
+ if "choices" not in data:
84
+ # Yield the full response data for debugging.
85
+ yield f"Error: API returned an unexpected response: {data}"
86
+ return
87
+ content = data["choices"][0]["message"]["content"]
88
 
89
  stream_response = ""
90
  # Simulate streaming by yielding token-by-token.
 
98
  def chat(user_message, history, character):
99
  """
100
  Appends the user message to the conversation history, then streams the assistant's reply.
 
101
  """
102
  # Ensure history is a list.
103
  history = history or []
104
  history = history.copy()
105
+ # Append the user's message.
106
  history.append({"role": "user", "content": user_message})
107
 
 
108
  full_response = ""
109
  for partial in stream_response(user_message, history, character):
110
  full_response = partial
111
  # Yield the conversation updated with the current assistant response.
112
  yield history + [{"role": "assistant", "content": full_response}]
113
+ # Append the final assistant message.
114
  history.append({"role": "assistant", "content": full_response})
115
  return history
116
 
 
151
  # State to hold conversation history.
152
  state = gr.State([])
153
 
154
+ # When user submits text, update chat.
155
+ msg.submit(
156
+ fn=lambda user_message, history, choice: chat(user_message, history, clean_choice(choice)),
157
+ inputs=[msg, state, model_dropdown],
158
+ outputs=[chatbot, state],
159
+ show_progress=True
160
+ )
161
 
162
  demo.launch(share=True)