DeathDaDev commited on
Commit
64312af
1 Parent(s): d8da006

Update Dockerfile

Browse files
Files changed (1) hide show
  1. Dockerfile +37 -21
Dockerfile CHANGED
@@ -1,28 +1,44 @@
1
- # Use the latest Ollama base image
2
- FROM ollama/ollama:latest
3
 
4
- # Update package manager and install curl
5
- RUN apt-get update && apt-get install curl -y
 
 
6
 
7
- # Create a non-root user 'user' with UID 1000
8
- RUN useradd -m -u 1000 user
9
 
10
- # Set environment variables
11
- USER user
12
- ENV HOME=/home/user \
13
- PATH=/home/user/.local/bin:$PATH \
14
- OLLAMA_HOST=0.0.0.0
15
 
16
- # Set the working directory
17
- WORKDIR $HOME/app
18
 
19
- # Download the model file llama.gguf (if needed)
20
- # RUN curl -fsSL https://huggingface.co/gingdev/llama7b-ictu-v2/resolve/main/llama7b_q4_k_m.gguf?download=true -o llama.gguf
21
 
22
- # Start Ollama server
23
- CMD ["serve"]
24
 
25
- # Expose port 11434 for Ollama and 5000 for the web server
26
- EXPOSE 11434 5000
27
- RUN apt-get update && apt-get install python3-pip -y
28
- RUN pip3 install flask
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from Ubuntu:22.04
 
2
 
3
+ # Install Dependencies
4
+ Run apt-get update && apt-get install -y \
5
+ curl \
6
+ && rm -rf /var/lib/apt/lists/*
7
 
8
+ # Install Ollama
9
+ Run curl -fsSL https://ollama.com/install.sh | sh
10
 
11
+ # Expose the Ollama API port
12
+ EXPOSE 11434
 
 
 
13
 
14
+ # Run Ollama
15
+ cmd ["ollama", "serve"]
16
 
17
+ # # Use the latest Ollama base image
18
+ # from ollama/ollama:latest
19
 
20
+ # # Update package manager and install curl
21
+ # Run apt-get update && apt-get install curl -y
22
 
23
+ # # Create a non-root user 'user' with UID 1000
24
+ # Run useradd -m -u 1000 user
25
+
26
+ # # Set environment variables
27
+ # USER user
28
+ # env HOME=/home/user \
29
+ # PATH=/home/user/.local/bin:$PATH \
30
+ # OLLAMA_HOST=0.0.0.0
31
+
32
+ # # Set the working directory
33
+ # WORKDIR $HOME/app
34
+
35
+ # # Download the model file llama.gguf (if needed)
36
+ # # RUN curl -fsSL https://huggingface.co/gingdev/llama7b-ictu-v2/resolve/main/llama7b_q4_k_m.gguf?download=true -o llama.gguf
37
+
38
+ # # Start Ollama server
39
+ # cmd ["serve"]
40
+
41
+ # # Expose port 11434 for Ollama and 5000 for the web server
42
+ # EXPOSE 11434 5000
43
+ # Run apt-get update && apt-get install python3-pip -y
44
+ # Run pip3 install flask