Spaces:
Sleeping
Sleeping
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,96 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# gradio_gemini_chat.py
|
2 |
+
# pip install -U google-genai gradio
|
3 |
+
|
4 |
+
import os
|
5 |
+
import traceback
|
6 |
+
import gradio as gr
|
7 |
+
from google import genai
|
8 |
+
|
9 |
+
API_KEY = os.getenv("GOOGLE_API_KEY") or "AIzaSyBAgrcgtf30Sm_msEGKATQvXRBSq1yyaSM"
|
10 |
+
MODEL_NAME = "gemini-2.5-flash"
|
11 |
+
client = genai.Client(api_key=API_KEY)
|
12 |
+
|
13 |
+
def build_contents_from_history_messages(history_msgs, user_msg):
|
14 |
+
"""
|
15 |
+
history_msgs: List[{"role": "user"/"assistant", "content": str}]
|
16 |
+
轉成 google-genai 的 contents,並附上本輪 user_msg。
|
17 |
+
"""
|
18 |
+
contents = []
|
19 |
+
for m in history_msgs:
|
20 |
+
role = m.get("role")
|
21 |
+
text = (m.get("content") or "").strip()
|
22 |
+
if not text:
|
23 |
+
continue
|
24 |
+
if role == "assistant":
|
25 |
+
contents.append({"role": "model", "parts": [{"text": text}]})
|
26 |
+
else: # "user" 以外都當使用者
|
27 |
+
contents.append({"role": "user", "parts": [{"text": text}]})
|
28 |
+
if user_msg:
|
29 |
+
contents.append({"role": "user", "parts": [{"text": user_msg}]})
|
30 |
+
return contents
|
31 |
+
|
32 |
+
def chat_fn(user_msg, history_msgs, sys_prompt):
|
33 |
+
user_msg = (user_msg or "").strip()
|
34 |
+
if not user_msg:
|
35 |
+
return history_msgs, ""
|
36 |
+
|
37 |
+
# 指令:清空
|
38 |
+
if user_msg.lower() in ("/reset", "/clear"):
|
39 |
+
return [], ""
|
40 |
+
|
41 |
+
try:
|
42 |
+
contents = build_contents_from_history_messages(history_msgs, user_msg)
|
43 |
+
|
44 |
+
kwargs = {}
|
45 |
+
if sys_prompt and sys_prompt.strip():
|
46 |
+
kwargs["system_instruction"] = sys_prompt.strip()
|
47 |
+
|
48 |
+
resp = client.models.generate_content(
|
49 |
+
model=MODEL_NAME,
|
50 |
+
contents=contents,
|
51 |
+
**kwargs
|
52 |
+
)
|
53 |
+
bot_text = (resp.text or "").strip()
|
54 |
+
except Exception as e:
|
55 |
+
bot_text = f"[發生錯誤]\n{e}\n\n{traceback.format_exc(limit=2)}"
|
56 |
+
|
57 |
+
# messages 形式需要依序 append 使用者與助理訊息
|
58 |
+
history_msgs = history_msgs + [
|
59 |
+
{"role": "user", "content": user_msg},
|
60 |
+
{"role": "assistant", "content": bot_text},
|
61 |
+
]
|
62 |
+
return history_msgs, ""
|
63 |
+
|
64 |
+
with gr.Blocks(title="Gemini Chat (google-genai + Gradio)") as demo:
|
65 |
+
gr.Markdown("## Gemini Chatbot(/reset 清空對話)")
|
66 |
+
|
67 |
+
sys_prompt = gr.Textbox(
|
68 |
+
label="System Prompt(可選)",
|
69 |
+
lines=2,
|
70 |
+
placeholder="例如:你是溫柔且穩重的助教,回答請精簡、有條理。"
|
71 |
+
)
|
72 |
+
|
73 |
+
# 改用 messages 形式,避免 'tuples' 的棄用警告
|
74 |
+
chatbot = gr.Chatbot(
|
75 |
+
label="對話",
|
76 |
+
height=500,
|
77 |
+
type="messages",
|
78 |
+
)
|
79 |
+
|
80 |
+
user_in = gr.Textbox(label="輸入訊息", lines=2, placeholder="打字聊天吧(/reset 清空)")
|
81 |
+
with gr.Row():
|
82 |
+
send_btn = gr.Button("送出", variant="primary")
|
83 |
+
clear_btn = gr.Button("清空對話")
|
84 |
+
|
85 |
+
send_btn.click(chat_fn, [user_in, chatbot, sys_prompt], [chatbot, user_in])
|
86 |
+
user_in.submit(chat_fn, [user_in, chatbot, sys_prompt], [chatbot, user_in])
|
87 |
+
clear_btn.click(lambda: ([], ""), None, [chatbot, user_in])
|
88 |
+
|
89 |
+
if __name__ == "__main__":
|
90 |
+
# 讓 Gradio 自動挑可用埠(避免 7860 被占用錯誤)
|
91 |
+
# 在部分筆記本/雲端環境不要強制 inbrowser
|
92 |
+
demo.queue().launch(
|
93 |
+
server_port=None, # 或寫 0 也可以
|
94 |
+
inbrowser=False,
|
95 |
+
share=False # 需要外網連結時改成 True
|
96 |
+
)
|