ollama-server / app.py
DeathDaDev's picture
refactor: clean up Dockerfile and enhance Flask application for Ollama integration
da32240
raw
history blame
1.5 kB
import os
import subprocess
from flask import Flask, render_template, Response, request, jsonify
from hello import hello
app = Flask(__name__)
@app.route("/")
def index():
return render_template("index.html")
@app.route("/logs")
def logs():
def stream_logs():
process = subprocess.Popen(["docker-compose", "logs", "-f", "ollama"], stdout=subprocess.PIPE)
def stream_logs():
process = subprocess.Popen(["docker-compose", "logs", "-f", "ollama"], stdout=subprocess.PIPE)
for line in iter(process.stdout.readline, b''):
yield f"data: {line.decode('utf-8')}\n\n"
return Response(stream_logs(), mimetype='text/event-stream')
@app.route("/api/generate", methods=["POST"])
def generate():
data = request.get_json()
prompt = data.get("prompt")
if not prompt:
return jsonify({"error": "Missing prompt"}), 400
# Implement logic to send prompt to Ollama and get response
try:
result = subprocess.run(
["docker-compose", "exec", "ollama", "ollama-cli", "generate", prompt],
capture_output=True,
text=True,
check=True
)
response = result.stdout.strip()
except subprocess.CalledProcessError as e:
response = f"Error: {e}"
return jsonify({"response": response})
@app.route("/health")
def health():
return jsonify({"status": "running"}), 200
if __name__ == "__main__":
app.run(host="0.0.0.0", port=5000, debug=True)