vortex123 commited on
Commit
0852e93
·
verified ·
1 Parent(s): fa9ff48

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -8
app.py CHANGED
@@ -82,24 +82,20 @@ def _history_to_genai(history, model_name):
82
  # 5. Генераторы для стрима обычных моделей и "thinking" моделей
83
  ###############################################################################
84
 
85
- async def _respond_stream(model_name, user_message, history):
86
- """
87
- Стриминговый ответ для обычных моделей:
88
- - Кусочек за кусочком (partial_text).
89
- """
90
  if model_name not in MODELS:
91
  yield "Ошибка: модель не найдена."
92
  return
93
 
94
  model = MODELS[model_name]
95
- genai_history = _history_to_genai(history, model_name)
96
 
97
  try:
98
  chat = model.start_chat(history=genai_history)
99
- response = chat.send_message(user_message, stream=True)
100
 
101
  partial_text = ""
102
- async for chunk in response:
103
  partial_text += (chunk.text or "")
104
  yield partial_text
105
 
 
82
  # 5. Генераторы для стрима обычных моделей и "thinking" моделей
83
  ###############################################################################
84
 
85
+ async def _respond_stream_enh(model_name, user_message, history):
 
 
 
 
86
  if model_name not in MODELS:
87
  yield "Ошибка: модель не найдена."
88
  return
89
 
90
  model = MODELS[model_name]
91
+ genai_history = _history_to_genai_enhanced(history, model_name)
92
 
93
  try:
94
  chat = model.start_chat(history=genai_history)
95
+ stream = chat.send_message(user_message, stream=True) # stream=True here
96
 
97
  partial_text = ""
98
+ async for chunk in stream:
99
  partial_text += (chunk.text or "")
100
  yield partial_text
101