Spaces:
Paused
Paused
Commit
·
7131f6c
1
Parent(s):
3e50743
Update Dockerfile
Browse files- Dockerfile +8 -4
Dockerfile
CHANGED
@@ -1,6 +1,7 @@
|
|
1 |
FROM nvidia/cuda:12.2.0-devel-ubuntu20.04
|
2 |
|
3 |
ENV DEBIAN_FRONTEND=noninteractive
|
|
|
4 |
# Install dependencies
|
5 |
RUN apt update && \
|
6 |
apt install --no-install-recommends -y build-essential python3 python3-pip wget curl git cmake zlib1g-dev && \
|
@@ -9,8 +10,8 @@ RUN apt update && \
|
|
9 |
WORKDIR /app
|
10 |
|
11 |
# Download ggml and mmproj models from HuggingFace
|
12 |
-
RUN wget https://huggingface.co/mys/ggml_llava-v1.5-13b/raw/main/ggml-model-q4_k.gguf
|
13 |
-
wget https://huggingface.co/mys/ggml_llava-v1.5-13b/raw/main/mmproj-model-f16.gguf
|
14 |
|
15 |
# Clone and build llava-server
|
16 |
RUN git clone https://github.com/matthoffner/llava-cpp-server.git llava && \
|
@@ -20,7 +21,10 @@ RUN git clone https://github.com/matthoffner/llava-cpp-server.git llava && \
|
|
20 |
make
|
21 |
|
22 |
# Create a non-root user for security reasons
|
23 |
-
RUN useradd -m -u 1000 user
|
|
|
|
|
|
|
24 |
|
25 |
USER user
|
26 |
ENV HOME=/home/user
|
@@ -31,4 +35,4 @@ WORKDIR $HOME/app
|
|
31 |
EXPOSE 8080
|
32 |
|
33 |
# Start the llava-server with models
|
34 |
-
CMD ["
|
|
|
1 |
FROM nvidia/cuda:12.2.0-devel-ubuntu20.04
|
2 |
|
3 |
ENV DEBIAN_FRONTEND=noninteractive
|
4 |
+
|
5 |
# Install dependencies
|
6 |
RUN apt update && \
|
7 |
apt install --no-install-recommends -y build-essential python3 python3-pip wget curl git cmake zlib1g-dev && \
|
|
|
10 |
WORKDIR /app
|
11 |
|
12 |
# Download ggml and mmproj models from HuggingFace
|
13 |
+
RUN wget https://huggingface.co/mys/ggml_llava-v1.5-13b/raw/main/ggml-model-q4_k.gguf && \
|
14 |
+
wget https://huggingface.co/mys/ggml_llava-v1.5-13b/raw/main/mmproj-model-f16.gguf
|
15 |
|
16 |
# Clone and build llava-server
|
17 |
RUN git clone https://github.com/matthoffner/llava-cpp-server.git llava && \
|
|
|
21 |
make
|
22 |
|
23 |
# Create a non-root user for security reasons
|
24 |
+
RUN useradd -m -u 1000 user && \
|
25 |
+
mkdir -p /home/user/app && \
|
26 |
+
cp /app/ggml-model-q4_k.gguf /home/user/app && \
|
27 |
+
cp /app/mmproj-model-f16.gguf /home/user/app
|
28 |
|
29 |
USER user
|
30 |
ENV HOME=/home/user
|
|
|
35 |
EXPOSE 8080
|
36 |
|
37 |
# Start the llava-server with models
|
38 |
+
CMD ["../llava/bin/llava-server", "-m", "ggml-model-q4_k.gguf", "--mmproj", "mmproj-model-f16.gguf", "--host", "0.0.0.0", "--port", "8080"]
|