import os import requests import gradio as gr HUGGINGFACE_API_TOKEN = os.getenv("HUGGINGFACE_API_TOKEN") MODEL_NAME = "deepseek-ai/DeepSeek-Coder-V2-Instruct" # ✅ 直接可用的免费模型 def query_huggingface_api(prompt): API_URL = f"https://api-inference.huggingface.co/models/{MODEL_NAME}" headers = {"Authorization": f"Bearer {HUGGINGFACE_API_TOKEN}"} payload = { "inputs": prompt, "parameters": {"max_length": 150, "temperature": 0.7} } response = requests.post(API_URL, headers=headers, json=payload) if response.status_code == 200: return response.json()[0]["generated_text"] else: return f"API Error: {response.status_code} - {response.text}" # ✅ Gradio 界面 with gr.Blocks() as demo: gr.Markdown("# 🤖 免费 Hugging Face AI Chatbot") chatbot = gr.Chatbot() msg = gr.Textbox(placeholder="请输入你的问题...", lines=2) send = gr.Button("发送") clear = gr.Button("清除") chat_history = [] def chat(query, chat_history): response = query_huggingface_api(query) chat_history.append((query, response)) return "", chat_history send.click(chat, [msg, chatbot], [msg, chatbot], queue=False) msg.submit(chat, [msg, chatbot], [msg, chatbot], queue=False) clear.click(lambda: None, None, chatbot, queue=False) demo.launch()