DeathDaDev commited on
Commit
eba0ee0
1 Parent(s): 2e6dee6

Add route to display Ollama logs in the web interface.

Browse files
Files changed (2) hide show
  1. Dockerfile +0 -1
  2. app.py +6 -1
Dockerfile CHANGED
@@ -20,7 +20,6 @@ WORKDIR $HOME/app
20
  # RUN curl -fsSL https://huggingface.co/gingdev/llama7b-ictu-v2/resolve/main/llama7b_q4_k_m.gguf?download=true -o llama.gguf
21
 
22
  # Start Ollama server
23
- RUN ollama serve
24
 
25
  # Expose port 11434 for Ollama and 5000 for the web server
26
  EXPOSE 11434 5000
 
20
  # RUN curl -fsSL https://huggingface.co/gingdev/llama7b-ictu-v2/resolve/main/llama7b_q4_k_m.gguf?download=true -o llama.gguf
21
 
22
  # Start Ollama server
 
23
 
24
  # Expose port 11434 for Ollama and 5000 for the web server
25
  EXPOSE 11434 5000
app.py CHANGED
@@ -17,7 +17,12 @@ app = Flask(__name__)
17
 
18
  @app.route("/")
19
  def index():
20
- return render_template("index.html")
21
 
22
  if __name__ == "__main__":
23
  app.run(host="0.0.0.0", port=5000, debug=True)
 
 
 
 
 
 
17
 
18
  @app.route("/")
19
  def index():
20
+ return render_template("index.html", logs_url="/logs")
21
 
22
  if __name__ == "__main__":
23
  app.run(host="0.0.0.0", port=5000, debug=True)
24
+ @app.route("/logs")
25
+ def logs():
26
+ with open("logs.txt", "r") as f:
27
+ logs = f.read()
28
+ return logs