vortex123 commited on
Commit
d61ca9b
·
verified ·
1 Parent(s): 2252124

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -18
app.py CHANGED
@@ -56,14 +56,14 @@ async def respond_thinking(message, history, selected_model):
56
  else:
57
  model_response_text += (part.text or "")
58
 
59
- yield {"role": "assistant", "content": model_response_text}, thinking_process_text
60
  except Exception as e:
61
- yield {"role": "assistant", "content": f"Error during API call: {e}"}, f"Error during API call: {e}"
62
 
63
  async def process_message(message, history, model_name):
64
  if "thinking" in model_name:
65
- async for response, thinking in respond_thinking(message, history, model_name):
66
- yield response, thinking
67
  else:
68
  async for response in respond(message, history, model_name):
69
  yield response, ""
@@ -86,7 +86,8 @@ with gr.Blocks() as demo:
86
  additional_inputs=[model_selection],
87
  title="Gemini Chat",
88
  description="Общайтесь с моделями Gemini от Google.",
89
- type="messages"
 
90
  )
91
 
92
  with gr.Row():
@@ -95,25 +96,12 @@ with gr.Blocks() as demo:
95
  def change_chatbot(model_name):
96
  return gr.ChatInterface.update() # No need to change the processing function here
97
 
98
- async def update_thinking_output(history):
99
- if history:
100
- last_turn = history[-1]
101
- if len(last_turn) > 1:
102
- return last_turn[1]
103
- return ""
104
-
105
  model_selection.change(
106
  change_chatbot,
107
  inputs=[model_selection],
108
  outputs=[chatbot],
109
  )
110
 
111
- chatbot.on_change(
112
- update_thinking_output,
113
- inputs=[chatbot],
114
- outputs=[thinking_output]
115
- )
116
-
117
  clear_button.click(lambda: None, None, chatbot, queue=False)
118
  clear_button.click(clear_thinking, outputs=[thinking_output], queue=False)
119
 
 
56
  else:
57
  model_response_text += (part.text or "")
58
 
59
+ yield ({"role": "assistant", "content": model_response_text}, thinking_process_text)
60
  except Exception as e:
61
+ yield ({"role": "assistant", "content": f"Error during API call: {e}"}, f"Error during API call: {e}")
62
 
63
  async def process_message(message, history, model_name):
64
  if "thinking" in model_name:
65
+ async for response_thinking in respond_thinking(message, history, model_name):
66
+ yield response_thinking
67
  else:
68
  async for response in respond(message, history, model_name):
69
  yield response, ""
 
86
  additional_inputs=[model_selection],
87
  title="Gemini Chat",
88
  description="Общайтесь с моделями Gemini от Google.",
89
+ type="messages",
90
+ outputs=[gr.Chatbot(), thinking_output] # Явное указание выходов
91
  )
92
 
93
  with gr.Row():
 
96
  def change_chatbot(model_name):
97
  return gr.ChatInterface.update() # No need to change the processing function here
98
 
 
 
 
 
 
 
 
99
  model_selection.change(
100
  change_chatbot,
101
  inputs=[model_selection],
102
  outputs=[chatbot],
103
  )
104
 
 
 
 
 
 
 
105
  clear_button.click(lambda: None, None, chatbot, queue=False)
106
  clear_button.click(clear_thinking, outputs=[thinking_output], queue=False)
107