Spaces:
Sleeping
Sleeping
File size: 4,561 Bytes
43c192f af1b567 43c192f af1b567 2f33702 af1b567 2f33702 af1b567 2f33702 af1b567 2f33702 af1b567 2f33702 af1b567 2f33702 af1b567 2f33702 af1b567 2f33702 af1b567 2f33702 af1b567 2f33702 af1b567 2f33702 af1b567 2f33702 af1b567 2f33702 af1b567 2f33702 af1b567 2f33702 43c192f af1b567 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 |
import gradio as gr
import google.generativeai as genai
import os
import asyncio # Import для асинхронности
# Безопасное получение API ключа
GEMINI_API_KEY = "AIzaSyBoqoPX-9uzvXyxzse0gRwH8_P9xO6O3Bc"
if not GEMINI_API_KEY:
print("Error: GEMINI_API_KEY environment variable not set.")
exit()
genai.configure(api_key=GEMINI_API_KEY)
AVAILABLE_MODELS = ["gemini-1.5-flash", "gemini-1.5-pro", "gemini-2.0-flash-thinking-exp"]
# Инициализация моделей один раз при запуске приложения
MODELS = {model_name: genai.GenerativeModel(model_name=model_name) for model_name in AVAILABLE_MODELS}
async def respond(message, history, selected_model):
model = MODELS.get(selected_model)
if not model:
yield {"role": "assistant", "content": "Error: Selected model not available."}, ""
return
try:
chat = model.start_chat(history=history)
response_stream = chat.send_message(message, stream=True)
full_response = ""
for chunk in response_stream:
full_response += (chunk.text or "")
yield {"role": "assistant", "content": full_response}, "" # Формат messages
except Exception as e:
yield {"role": "assistant", "content": f"Error during API call: {e}"}, ""
async def respond_thinking(message, history, selected_model):
if "thinking" not in selected_model:
yield {"role": "assistant", "content": "Thinking model не выбрана."}, ""
return
model = MODELS.get(selected_model)
if not model:
yield {"role": "assistant", "content": "Error: Selected model not available."}, ""
return
yield {"role": "assistant", "content": "Думаю..."}, "" # Сообщение о начале размышлений
try:
response = model.generate_content(message)
thinking_process_text = ""
model_response_text = ""
if response.candidates:
for part in response.candidates[0].content.parts:
if hasattr(part, 'thought') and part.thought == True:
thinking_process_text += f"Model Thought:\n{part.text}\n\n"
else:
model_response_text += (part.text or "")
yield {"role": "assistant", "content": model_response_text}, thinking_process_text
except Exception as e:
yield {"role": "assistant", "content": f"Error during API call: {e}"}, f"Error during API call: {e}"
def update_chatbot_function(model_name):
if "thinking" in model_name:
return respond_thinking
else:
return respond
async def process_message(message, history, model_name):
if "thinking" in model_name:
generator = respond_thinking(message, history, model_name)
thinking_output_content = ""
async for response, thinking in generator:
yield response, thinking
# Здесь можно было бы дополнительно обработать thinking_output_content, если нужно
else:
async for response, _ in respond(message, history, model_name):
yield response, ""
with gr.Blocks() as demo:
gr.Markdown("# Gemini Chatbot с режимом размышления")
with gr.Row():
model_selection = gr.Dropdown(
AVAILABLE_MODELS, value="gemini-1.5-flash", label="Выберите модель Gemini"
)
chatbot = gr.ChatInterface(
process_message, # Функция обработки сообщений передается сюда
additional_inputs=[model_selection],
title="Gemini Chat",
description="Общайтесь с моделями Gemini от Google.",
type="messages"
)
thinking_output = gr.Code(label="Процесс размышления (для моделей с размышлением)")
def change_function(model_name):
# Обновляем функцию обработки сообщений при смене модели
if "thinking" in model_name:
return respond_thinking
else:
return respond
def change_chatbot(model_name):
return gr.ChatInterface.update(fn=process_message)
model_selection.change(
change_chatbot,
inputs=[model_selection],
outputs=[chatbot],
)
def clear_thinking():
return ""
chatbot.clear(inputs=[], outputs=[thinking_output])
if __name__ == "__main__":
demo.launch() |