vortex123 commited on
Commit
12fa79e
·
verified ·
1 Parent(s): 97eaf87

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +22 -12
app.py CHANGED
@@ -19,7 +19,7 @@ MODELS = {model_name: genai.GenerativeModel(model_name=model_name) for model_nam
19
  async def respond(message, history, selected_model):
20
  model = MODELS.get(selected_model)
21
  if not model:
22
- yield {"role": "assistant", "content": "Error: Selected model not available."}, ""
23
  return
24
 
25
  try:
@@ -28,9 +28,9 @@ async def respond(message, history, selected_model):
28
  full_response = ""
29
  for chunk in response_stream:
30
  full_response += (chunk.text or "")
31
- yield {"role": "assistant", "content": full_response}, "" # Формат messages
32
  except Exception as e:
33
- yield {"role": "assistant", "content": f"Error during API call: {e}"}, ""
34
 
35
  async def respond_thinking(message, history, selected_model):
36
  if "thinking" not in selected_model:
@@ -56,19 +56,16 @@ async def respond_thinking(message, history, selected_model):
56
  else:
57
  model_response_text += (part.text or "")
58
 
59
- yield {"role": "assistant", "content": model_response_text}, thinking_process_text
60
  except Exception as e:
61
- yield {"role": "assistant", "content": f"Error during API call: {e}"}, f"Error during API call: {e}"
62
 
63
  async def process_message(message, history, model_name):
64
  if "thinking" in model_name:
65
- generator = respond_thinking(message, history, model_name)
66
- thinking_output_content = ""
67
- async for response, thinking in generator:
68
- yield response, thinking
69
- # Здесь можно было бы дополнительно обработать thinking_output_content, если нужно
70
  else:
71
- async for response, _ in respond(message, history, model_name):
72
  yield response, ""
73
 
74
  def clear_thinking():
@@ -98,14 +95,27 @@ with gr.Blocks() as demo:
98
  def change_chatbot(model_name):
99
  return gr.ChatInterface.update() # No need to change the processing function here
100
 
 
 
 
 
 
 
 
101
  model_selection.change(
102
  change_chatbot,
103
  inputs=[model_selection],
104
  outputs=[chatbot],
105
  )
106
 
 
 
 
 
 
 
 
107
  clear_button.click(clear_thinking, outputs=[thinking_output], queue=False)
108
- clear_button.click(lambda: None, None, chatbot, queue=False) # Clear chatbot history
109
 
110
  if __name__ == "__main__":
111
  demo.launch()
 
19
  async def respond(message, history, selected_model):
20
  model = MODELS.get(selected_model)
21
  if not model:
22
+ yield {"role": "assistant", "content": "Error: Selected model not available."}
23
  return
24
 
25
  try:
 
28
  full_response = ""
29
  for chunk in response_stream:
30
  full_response += (chunk.text or "")
31
+ yield {"role": "assistant", "content": full_response}
32
  except Exception as e:
33
+ yield {"role": "assistant", "content": f"Error during API call: {e}"}
34
 
35
  async def respond_thinking(message, history, selected_model):
36
  if "thinking" not in selected_model:
 
56
  else:
57
  model_response_text += (part.text or "")
58
 
59
+ return {"role": "assistant", "content": model_response_text}, thinking_process_text
60
  except Exception as e:
61
+ return {"role": "assistant", "content": f"Error during API call: {e}"}, f"Error during API call: {e}"
62
 
63
  async def process_message(message, history, model_name):
64
  if "thinking" in model_name:
65
+ response, thinking = await respond_thinking(message, history, model_name)
66
+ yield response, thinking
 
 
 
67
  else:
68
+ async for response in respond(message, history, model_name):
69
  yield response, ""
70
 
71
  def clear_thinking():
 
95
  def change_chatbot(model_name):
96
  return gr.ChatInterface.update() # No need to change the processing function here
97
 
98
+ async def update_thinking_output(history):
99
+ if history:
100
+ last_turn = history[-1]
101
+ if last_turn[0] is not None and last_turn[1] is not None:
102
+ return last_turn[1]
103
+ return ""
104
+
105
  model_selection.change(
106
  change_chatbot,
107
  inputs=[model_selection],
108
  outputs=[chatbot],
109
  )
110
 
111
+ chatbot.change(
112
+ update_thinking_output,
113
+ inputs=[chatbot],
114
+ outputs=[thinking_output]
115
+ )
116
+
117
+ clear_button.click(lambda: None, None, chatbot, queue=False)
118
  clear_button.click(clear_thinking, outputs=[thinking_output], queue=False)
 
119
 
120
  if __name__ == "__main__":
121
  demo.launch()