File size: 1,501 Bytes
2e6dee6
ea5c2f9
e045f7a
57775d5
a618bb4
 
 
2e6dee6
 
619e565
2e6dee6
ea5c2f9
 
 
 
619e565
 
 
 
 
a618bb4
e045f7a
 
 
 
 
 
74dfd01
 
 
 
 
 
 
 
 
 
 
e045f7a
 
0686433
 
 
da32240
a618bb4
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
import os
import subprocess
from flask import Flask, render_template, Response, request, jsonify
from hello import hello

app = Flask(__name__)

@app.route("/")
def index():
    return render_template("index.html")

@app.route("/logs")
def logs():
    def stream_logs():
        process = subprocess.Popen(["docker-compose", "logs", "-f", "ollama"], stdout=subprocess.PIPE)
        def stream_logs():
            process = subprocess.Popen(["docker-compose", "logs", "-f", "ollama"], stdout=subprocess.PIPE)
            for line in iter(process.stdout.readline, b''):
                yield f"data: {line.decode('utf-8')}\n\n"
        return Response(stream_logs(), mimetype='text/event-stream')

@app.route("/api/generate", methods=["POST"])
def generate():
    data = request.get_json()
    prompt = data.get("prompt")
    if not prompt:
        return jsonify({"error": "Missing prompt"}), 400
    # Implement logic to send prompt to Ollama and get response
    try:
        result = subprocess.run(
            ["docker-compose", "exec", "ollama", "ollama-cli", "generate", prompt],
            capture_output=True,
            text=True,
            check=True
        )
        response = result.stdout.strip()
    except subprocess.CalledProcessError as e:
        response = f"Error: {e}"
    return jsonify({"response": response})

@app.route("/health")
def health():
    return jsonify({"status": "running"}), 200
if __name__ == "__main__":
    app.run(host="0.0.0.0", port=5000, debug=True)