LA_Llama_Chat / app.py
ibrahimBlyc's picture
Update space
b812301
import gradio as gr
import requests
import json
def query_ollama(prompt):
url = "http://localhost:11434/api/generate"
payload = {
"model": "hf.co/ibrahimBlyc/LA_Llama:latest",
"prompt": prompt
}
headers = {"Content-Type": "application/json"}
response = requests.post(url, json=payload, headers=headers, stream=True)
if response.status_code == 200:
full_response = ""
for line in response.iter_lines():
if line:
data = json.loads(line.decode("utf-8"))
if "response" in data:
full_response += data["response"]
if data.get("done", False):
break
return full_response
else:
return f"Erreur HTTP : {response.status_code}"
interface = gr.Interface(
fn=query_ollama,
inputs=gr.Textbox(label="Posez votre question"),
outputs=gr.Textbox(label="Réponse"),
title="Chat avec Ollama",
description="Une interface simple pour discuter avec le modèle hébergé sur Ollama."
)
if __name__ == "__main__":
interface.launch()