File size: 3,264 Bytes
a13c2bb c96734b 1ca78b8 c96734b 9918749 a13c2bb 1ca78b8 9918749 a13c2bb 1ca78b8 32ae536 9918749 a13c2bb 32ae536 9144903 32ae536 9918749 9144903 9918749 9144903 9918749 1ca78b8 cef7f39 25f51d0 9918749 25f51d0 cef7f39 25f51d0 9918749 32ae536 25f51d0 32ae536 a13c2bb 32ae536 9918749 32ae536 3e6631d 9918749 32ae536 82deaf2 9918749 c96734b 9918749 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 |
import os
import gradio as gr
import requests
import json
# API key
OPENROUTER_API_KEY = os.environ.get("OPENROUTER_API_KEY", "")
# Basic model list
MODELS = [
("Gemini Pro 2.0", "google/gemini-2.0-pro-exp-02-05:free"),
("Llama 3.2 Vision", "meta-llama/llama-3.2-11b-vision-instruct:free")
]
def ask_ai(message, chatbot, model_choice):
"""Basic AI query function"""
# Get model ID
model_id = MODELS[0][1] # Default
for name, model_id_value in MODELS:
if name == model_choice:
model_id = model_id_value
break
# Create messages from chatbot history
messages = []
for human_msg, ai_msg in chatbot:
messages.append({"role": "user", "content": human_msg})
if ai_msg:
messages.append({"role": "assistant", "content": ai_msg})
# Add current message
messages.append({"role": "user", "content": message})
# Call API
try:
response = requests.post(
"https://openrouter.ai/api/v1/chat/completions",
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {OPENROUTER_API_KEY}",
"HTTP-Referer": "https://huggingface.co/spaces"
},
json={
"model": model_id,
"messages": messages,
"temperature": 0.7,
"max_tokens": 1000
},
timeout=60
)
if response.status_code == 200:
result = response.json()
ai_response = result.get("choices", [{}])[0].get("message", {}).get("content", "")
chatbot.append((message, ai_response))
else:
chatbot.append((message, f"Error: Status code {response.status_code}"))
except Exception as e:
chatbot.append((message, f"Error: {str(e)}"))
return chatbot, ""
def clear_chat():
return [], ""
# Create minimal interface
with gr.Blocks() as demo:
gr.Markdown("# Simple AI Chat")
with gr.Row():
with gr.Column():
chatbot = gr.Chatbot(height=400, type="messages")
with gr.Row():
message = gr.Textbox(
placeholder="Type your message here...",
label="Message",
lines=3
)
with gr.Row():
model_choice = gr.Radio(
[name for name, _ in MODELS],
value=MODELS[0][0],
label="Model"
)
with gr.Row():
submit_btn = gr.Button("Send")
clear_btn = gr.Button("Clear Chat")
# Set up events
submit_btn.click(
fn=ask_ai,
inputs=[message, chatbot, model_choice],
outputs=[chatbot, message]
)
message.submit(
fn=ask_ai,
inputs=[message, chatbot, model_choice],
outputs=[chatbot, message]
)
clear_btn.click(
fn=clear_chat,
inputs=[],
outputs=[chatbot, message]
)
# Launch directly with Gradio's built-in server
if __name__ == "__main__":
demo.launch(server_name="0.0.0.0", server_port=7860) |