Spaces:
Running
Running
Update docker
Browse files
docker
CHANGED
@@ -1,24 +1,37 @@
|
|
1 |
# Use an official Python runtime as a parent image
|
2 |
FROM python:3.12
|
3 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
4 |
# Set the working directory in the container
|
5 |
-
/
|
6 |
|
7 |
# Copy requirements.txt into the container
|
8 |
-
COPY requirements.txt
|
9 |
|
10 |
-
# Install dependencies
|
11 |
-
RUN pip install --no-cache-dir -r
|
12 |
|
13 |
# Copy the application code into the container
|
14 |
-
COPY .
|
15 |
|
16 |
-
# Expose
|
17 |
-
EXPOSE 8501
|
18 |
|
19 |
-
# Set environment variables
|
20 |
ENV STREAMLIT_SERVER_PORT=8501 \
|
21 |
STREAMLIT_SERVER_ADDRESS=0.0.0.0 \
|
22 |
-
PYTHONUNBUFFERED=1
|
23 |
-
|
24 |
-
|
|
|
|
|
|
1 |
# Use an official Python runtime as a parent image
|
2 |
FROM python:3.12
|
3 |
|
4 |
+
# Install system dependencies for Ollama
|
5 |
+
RUN apt-get update && apt-get install -y \
|
6 |
+
curl \
|
7 |
+
&& rm -rf /var/lib/apt/lists/*
|
8 |
+
|
9 |
+
# Install Ollama
|
10 |
+
RUN curl -fsSL https://ollama.com/install.sh | sh
|
11 |
+
|
12 |
+
# Install Ollama model (requires separate RUN to prevent layer caching issues)
|
13 |
+
RUN ollama pull llama3
|
14 |
+
|
15 |
# Set the working directory in the container
|
16 |
+
WORKDIR /app
|
17 |
|
18 |
# Copy requirements.txt into the container
|
19 |
+
COPY requirements.txt .
|
20 |
|
21 |
+
# Install Python dependencies
|
22 |
+
RUN pip install --no-cache-dir -r requirements.txt
|
23 |
|
24 |
# Copy the application code into the container
|
25 |
+
COPY . .
|
26 |
|
27 |
+
# Expose ports for both Streamlit and Ollama
|
28 |
+
EXPOSE 8501 11434
|
29 |
|
30 |
+
# Set environment variables
|
31 |
ENV STREAMLIT_SERVER_PORT=8501 \
|
32 |
STREAMLIT_SERVER_ADDRESS=0.0.0.0 \
|
33 |
+
PYTHONUNBUFFERED=1 \
|
34 |
+
OLLAMA_HOST=0.0.0.0:11434
|
35 |
+
|
36 |
+
# Start both Ollama and Streamlit
|
37 |
+
CMD ollama serve & streamlit run app.py
|