import os import gradio as gr import requests import json # API key OPENROUTER_API_KEY = os.environ.get("OPENROUTER_API_KEY", "") # Basic model list MODELS = [ ("Gemini Pro 2.0", "google/gemini-2.0-pro-exp-02-05:free"), ("Llama 3.2 Vision", "meta-llama/llama-3.2-11b-vision-instruct:free") ] def ask_ai(message, chatbot, model_choice): """Basic AI query function""" # Get model ID model_id = MODELS[0][1] # Default for name, model_id_value in MODELS: if name == model_choice: model_id = model_id_value break # Create messages from chatbot history messages = [] for human_msg, ai_msg in chatbot: messages.append({"role": "user", "content": human_msg}) if ai_msg: messages.append({"role": "assistant", "content": ai_msg}) # Add current message messages.append({"role": "user", "content": message}) # Call API try: response = requests.post( "https://openrouter.ai/api/v1/chat/completions", headers={ "Content-Type": "application/json", "Authorization": f"Bearer {OPENROUTER_API_KEY}", "HTTP-Referer": "https://huggingface.co/spaces" }, json={ "model": model_id, "messages": messages, "temperature": 0.7, "max_tokens": 1000 }, timeout=60 ) if response.status_code == 200: result = response.json() ai_response = result.get("choices", [{}])[0].get("message", {}).get("content", "") chatbot.append((message, ai_response)) else: chatbot.append((message, f"Error: Status code {response.status_code}")) except Exception as e: chatbot.append((message, f"Error: {str(e)}")) return chatbot, "" def clear_chat(): return [], "" # Create minimal interface with gr.Blocks() as demo: gr.Markdown("# Simple AI Chat") with gr.Row(): with gr.Column(): chatbot = gr.Chatbot(height=400, type="messages") with gr.Row(): message = gr.Textbox( placeholder="Type your message here...", label="Message", lines=3 ) with gr.Row(): model_choice = gr.Radio( [name for name, _ in MODELS], value=MODELS[0][0], label="Model" ) with gr.Row(): submit_btn = gr.Button("Send") clear_btn = gr.Button("Clear Chat") # Set up events submit_btn.click( fn=ask_ai, inputs=[message, chatbot, model_choice], outputs=[chatbot, message] ) message.submit( fn=ask_ai, inputs=[message, chatbot, model_choice], outputs=[chatbot, message] ) clear_btn.click( fn=clear_chat, inputs=[], outputs=[chatbot, message] ) # Launch directly with Gradio's built-in server if __name__ == "__main__": demo.launch(server_name="0.0.0.0", server_port=7860)