Spaces:
Running
Running
switch base image
Browse files- Dockerfile +22 -21
Dockerfile
CHANGED
@@ -1,34 +1,35 @@
|
|
1 |
-
|
|
|
2 |
|
3 |
-
#
|
4 |
SHELL ["/bin/bash", "-euxo", "pipefail", "-c"]
|
5 |
|
6 |
-
# 1.
|
7 |
-
|
8 |
-
RUN echo "### STEP 1: Installing APT packages" && \
|
9 |
apt-get update && \
|
10 |
-
|
|
|
11 |
rm -rf /var/lib/apt/lists/*
|
12 |
|
13 |
-
# 2.
|
14 |
-
RUN echo "### STEP 2: Setting
|
15 |
-
ENV CMAKE_ARGS="-DGGML_BLAS=ON -DGGML_BLAS_VENDOR=OpenBLAS"
|
16 |
-
|
17 |
-
# 3. Set working directory and copy application code
|
18 |
-
RUN echo "### STEP 3: Copying application code"
|
19 |
WORKDIR /app
|
20 |
COPY requirements.txt ./
|
|
|
21 |
COPY app.py ./
|
|
|
22 |
|
23 |
-
#
|
24 |
-
RUN echo "### STEP
|
25 |
-
|
|
|
26 |
|
27 |
-
#
|
28 |
-
RUN echo "### STEP
|
29 |
-
|
|
|
30 |
|
31 |
-
#
|
32 |
-
RUN echo "### STEP
|
33 |
EXPOSE 7860
|
34 |
-
CMD ["
|
|
|
1 |
+
# Use Ubuntu 22.04 as a public base image to avoid GHCR permissions
|
2 |
+
FROM ubuntu:22.04
|
3 |
|
4 |
+
# Enable strict bash mode
|
5 |
SHELL ["/bin/bash", "-euxo", "pipefail", "-c"]
|
6 |
|
7 |
+
# 1. Install OS-level dependencies
|
8 |
+
RUN echo "### STEP 1: Installing OS-level dependencies" && \
|
|
|
9 |
apt-get update && \
|
10 |
+
apt-get install -y --no-install-recommends \
|
11 |
+
build-essential cmake libopenblas-dev python3 python3-pip python3-opencv && \
|
12 |
rm -rf /var/lib/apt/lists/*
|
13 |
|
14 |
+
# 2. Prepare the application directory
|
15 |
+
RUN echo "### STEP 2: Setting working directory and copying source code"
|
|
|
|
|
|
|
|
|
16 |
WORKDIR /app
|
17 |
COPY requirements.txt ./
|
18 |
+
COPY packages.txt ./
|
19 |
COPY app.py ./
|
20 |
+
# COPY any other source files or folders needed by your app
|
21 |
|
22 |
+
# 3. Install Python dependencies (excluding llama-cpp-python)
|
23 |
+
RUN echo "### STEP 3: Installing Python dependencies" && \
|
24 |
+
pip3 install --upgrade pip && \
|
25 |
+
pip3 install --no-cache-dir -r requirements.txt
|
26 |
|
27 |
+
# 4. Build and install llama-cpp-python from source with OpenBLAS
|
28 |
+
RUN echo "### STEP 4: Building and installing llama-cpp-python with OpenBLAS" && \
|
29 |
+
export CMAKE_ARGS="-DGGML_BLAS=ON -DGGML_BLAS_VENDOR=OpenBLAS" && \
|
30 |
+
pip3 install --no-cache-dir --force-reinstall --no-binary llama-cpp-python llama-cpp-python
|
31 |
|
32 |
+
# 5. Expose port and launch the application
|
33 |
+
RUN echo "### STEP 5: Finalizing Docker image"
|
34 |
EXPOSE 7860
|
35 |
+
CMD ["python3", "app.py"]
|