vortex123 commited on
Commit
2f33702
·
verified ·
1 Parent(s): ecd8dbf

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +33 -32
app.py CHANGED
@@ -19,7 +19,7 @@ MODELS = {model_name: genai.GenerativeModel(model_name=model_name) for model_nam
19
  async def respond(message, history, selected_model):
20
  model = MODELS.get(selected_model)
21
  if not model:
22
- yield "Error: Selected model not available.", ""
23
  return
24
 
25
  try:
@@ -28,21 +28,21 @@ async def respond(message, history, selected_model):
28
  full_response = ""
29
  for chunk in response_stream:
30
  full_response += (chunk.text or "")
31
- yield full_response, "" # Пустая строка для thinking output
32
  except Exception as e:
33
- yield f"Error during API call: {e}", ""
34
 
35
  async def respond_thinking(message, history, selected_model):
36
  if "thinking" not in selected_model:
37
- yield "Thinking model не выбрана.", ""
38
  return
39
 
40
  model = MODELS.get(selected_model)
41
  if not model:
42
- yield "Error: Selected model not available.", ""
43
  return
44
 
45
- yield "", "Думаю..." # Сообщение о начале размышлений
46
 
47
  try:
48
  response = model.generate_content(message)
@@ -56,9 +56,9 @@ async def respond_thinking(message, history, selected_model):
56
  else:
57
  model_response_text += (part.text or "")
58
 
59
- yield model_response_text, thinking_process_text
60
  except Exception as e:
61
- yield f"Error during API call: {e}", f"Error during API call: {e}"
62
 
63
  def update_chatbot_function(model_name):
64
  if "thinking" in model_name:
@@ -66,6 +66,17 @@ def update_chatbot_function(model_name):
66
  else:
67
  return respond
68
 
 
 
 
 
 
 
 
 
 
 
 
69
  with gr.Blocks() as demo:
70
  gr.Markdown("# Gemini Chatbot с режимом размышления")
71
 
@@ -75,45 +86,35 @@ with gr.Blocks() as demo:
75
  )
76
 
77
  chatbot = gr.ChatInterface(
78
- respond, # Изначально используем асинхронную функцию respond
79
  additional_inputs=[model_selection],
80
  title="Gemini Chat",
81
  description="Общайтесь с моделями Gemini от Google.",
 
82
  )
83
 
84
  thinking_output = gr.Code(label="Процесс размышления (для моделей с размышлением)")
85
 
86
  def change_function(model_name):
87
- return update_chatbot_function(model_name)
 
 
 
 
 
 
 
88
 
89
  model_selection.change(
90
- change_function,
91
  inputs=[model_selection],
92
  outputs=[chatbot],
93
  )
94
 
95
- async def process_message(message, history, model_name):
96
- if "thinking" in model_name:
97
- generator = respond_thinking(message, history, model_name)
98
- response, thinking = await generator.__anext__() # Получаем первое значение (пустое сообщение и "Думаю...")
99
- yield response, thinking
100
- final_response, final_thinking = await generator.__anext__() # Получаем окончательный ответ и размышления
101
- yield final_response, final_thinking
102
- else:
103
- async for response, _ in respond(message, history, model_name):
104
- yield response, ""
105
 
106
- chatbot.on(
107
- process_message,
108
- inputs=[chatbot, chatbot.chat_memory, model_selection],
109
- outputs=[chatbot, thinking_output],
110
- )
111
-
112
- chatbot.change(
113
- lambda: "",
114
- inputs=[],
115
- outputs=[thinking_output]
116
- )
117
 
118
  if __name__ == "__main__":
119
  demo.launch()
 
19
  async def respond(message, history, selected_model):
20
  model = MODELS.get(selected_model)
21
  if not model:
22
+ yield {"role": "assistant", "content": "Error: Selected model not available."}, ""
23
  return
24
 
25
  try:
 
28
  full_response = ""
29
  for chunk in response_stream:
30
  full_response += (chunk.text or "")
31
+ yield {"role": "assistant", "content": full_response}, "" # Формат messages
32
  except Exception as e:
33
+ yield {"role": "assistant", "content": f"Error during API call: {e}"}, ""
34
 
35
  async def respond_thinking(message, history, selected_model):
36
  if "thinking" not in selected_model:
37
+ yield {"role": "assistant", "content": "Thinking model не выбрана."}, ""
38
  return
39
 
40
  model = MODELS.get(selected_model)
41
  if not model:
42
+ yield {"role": "assistant", "content": "Error: Selected model not available."}, ""
43
  return
44
 
45
+ yield {"role": "assistant", "content": "Думаю..."}, "" # Сообщение о начале размышлений
46
 
47
  try:
48
  response = model.generate_content(message)
 
56
  else:
57
  model_response_text += (part.text or "")
58
 
59
+ yield {"role": "assistant", "content": model_response_text}, thinking_process_text
60
  except Exception as e:
61
+ yield {"role": "assistant", "content": f"Error during API call: {e}"}, f"Error during API call: {e}"
62
 
63
  def update_chatbot_function(model_name):
64
  if "thinking" in model_name:
 
66
  else:
67
  return respond
68
 
69
+ async def process_message(message, history, model_name):
70
+ if "thinking" in model_name:
71
+ generator = respond_thinking(message, history, model_name)
72
+ thinking_output_content = ""
73
+ async for response, thinking in generator:
74
+ yield response, thinking
75
+ # Здесь можно было бы дополнительно обработать thinking_output_content, если нужно
76
+ else:
77
+ async for response, _ in respond(message, history, model_name):
78
+ yield response, ""
79
+
80
  with gr.Blocks() as demo:
81
  gr.Markdown("# Gemini Chatbot с режимом размышления")
82
 
 
86
  )
87
 
88
  chatbot = gr.ChatInterface(
89
+ process_message, # Функция обработки сообщений передается сюда
90
  additional_inputs=[model_selection],
91
  title="Gemini Chat",
92
  description="Общайтесь с моделями Gemini от Google.",
93
+ type="messages"
94
  )
95
 
96
  thinking_output = gr.Code(label="Процесс размышления (для моделей с размышлением)")
97
 
98
  def change_function(model_name):
99
+ # Обновляем функцию обработки сообщений при смене модели
100
+ if "thinking" in model_name:
101
+ return respond_thinking
102
+ else:
103
+ return respond
104
+
105
+ def change_chatbot(model_name):
106
+ return gr.ChatInterface.update(fn=process_message)
107
 
108
  model_selection.change(
109
+ change_chatbot,
110
  inputs=[model_selection],
111
  outputs=[chatbot],
112
  )
113
 
114
+ def clear_thinking():
115
+ return ""
 
 
 
 
 
 
 
 
116
 
117
+ chatbot.clear(inputs=[], outputs=[thinking_output])
 
 
 
 
 
 
 
 
 
 
118
 
119
  if __name__ == "__main__":
120
  demo.launch()