import gradio as gr import requests # URL of the Ollama server running on port 8000 OLLAMA_SERVER_URL = "https://https://broadfield-dev-ollama-server.hf.space/api/predict/" def chat_with_ollama(prompt): # Send a request to the Ollama server response = requests.post(OLLAMA_SERVER_URL, json={"data": [prompt]}) if response.status_code == 200: return response.json()["data"][0] else: return "Error communicating with the Ollama server." # Create a Gradio interface iface = gr.Interface( fn=chat_with_ollama, input=gr.inputs.Textbox(label="Enter your prompt"), output=gr.outputs.Textbox(label="Response from Ollama"), title="Ollama Chatbot Client", description="A Gradio client to interact with the Ollama server." ) # Launch the Gradio interface iface.launch()