File size: 1,458 Bytes
5c6fc68
6db6156
ec8373d
1444cf3
6db6156
 
 
 
 
79f1838
d1f4c29
79f1838
636bd97
 
 
6db6156
 
 
d1f4c29
a170260
df3a501
c5da4da
6db6156
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
b8923c9
636bd97
 
6db6156
 
756ebd4
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
FROM nvidia/cuda:12.0.0-cudnn8-devel-ubuntu22.04

ENV MODEL_NAME="StableBeluga-13B-GGML"
ENV DEFAULT_MODEL_FILE="stablebeluga-13b.ggmlv3.q4_K_M.bin"
ENV MODEL_USER="TheBloke"
ENV DEFAULT_MODEL_BRANCH="main"
ENV MODEL_URL="https://huggingface.co/${MODEL_USER}/${MODEL_NAME}/resolve/${DEFAULT_MODEL_BRANCH}/${DEFAULT_MODEL_FILE}"
ENV PATH="/usr/local/cuda/bin:$PATH"

RUN apt update && \
    apt install --no-install-recommends -y build-essential python3 python3-pip wget curl git && \
    apt clean && rm -rf /var/lib/apt/lists/*

WORKDIR /app

RUN apt-get install -y wget && \
    wget -qO- "https://cmake.org/files/v3.18/cmake-3.18.0-Linux-x86_64.tar.gz" | tar --strip-components=1 -xz -C /usr/local

RUN CMAKE_ARGS="-DLLAMA_CUBLAS=on" FORCE_CMAKE=1 pip install git+https://github.com/abetlen/llama-cpp-python --no-cache-dir

COPY requirements.txt ./

RUN pip3 install --upgrade pip && \
    pip3 install -r requirements.txt

WORKDIR /app

RUN echo ${MODEL_URL} && \
    wget -O /app/${DEFAULT_MODEL_FILE} ${MODEL_URL}

RUN useradd -m -u 1000 user

RUN mkdir -p /home/user/app && \
    mv /app/${DEFAULT_MODEL_FILE} /home/user/app

RUN chown -R user:user /home/user/app

USER user
ENV HOME=/home/user \
	PATH=/home/user/.local/bin:$PATH \
    MODEL_NAME=${MODEL_NAME} \
    MODEL_FILE=/home/user/app/${DEFAULT_MODEL_FILE} 

WORKDIR $HOME/app

COPY --chown=user . .

RUN ls -al

EXPOSE 8000

CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"]