File size: 1,111 Bytes
1b0a70f
6ec905a
b812301
1b0a70f
dec7ccf
b812301
dec7ccf
b812301
dec7ccf
6ec905a
b812301
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1b0a70f
dec7ccf
 
b812301
 
 
 
1b0a70f
 
 
e447252
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
import gradio as gr
import requests
import json

def query_ollama(prompt):
    url = "http://localhost:11434/api/generate"
    payload = {
        "model": "hf.co/ibrahimBlyc/LA_Llama:latest",
        "prompt": prompt
    }
    headers = {"Content-Type": "application/json"}
    response = requests.post(url, json=payload, headers=headers, stream=True)

    if response.status_code == 200:
        full_response = ""
        for line in response.iter_lines():
            if line:
                data = json.loads(line.decode("utf-8"))
                if "response" in data:
                    full_response += data["response"]
                if data.get("done", False):
                    break
        return full_response
    else:
        return f"Erreur HTTP : {response.status_code}"

interface = gr.Interface(
    fn=query_ollama,
    inputs=gr.Textbox(label="Posez votre question"),
    outputs=gr.Textbox(label="Réponse"),
    title="Chat avec Ollama",
    description="Une interface simple pour discuter avec le modèle hébergé sur Ollama."
)

if __name__ == "__main__":
    interface.launch()