|
|
|
ARG UBUNTU_VERSION=22.04 |
|
ARG CUDA_VERSION=11.7.1 |
|
ARG BASE_CUDA_DEV_CONTAINER=nvidia/cuda:${CUDA_VERSION}-devel-ubuntu${UBUNTU_VERSION} |
|
ARG BASE_CUDA_RUN_CONTAINER=nvidia/cuda:${CUDA_VERSION}-runtime-ubuntu${UBUNTU_VERSION} |
|
|
|
|
|
FROM ${BASE_CUDA_DEV_CONTAINER} as build |
|
|
|
|
|
RUN apt-get update && \ |
|
apt-get install -y build-essential git |
|
|
|
|
|
RUN apt-get install -y python3 python3-pip |
|
|
|
|
|
WORKDIR /app |
|
|
|
|
|
COPY . . |
|
|
|
|
|
RUN useradd -m -u 1000 user |
|
|
|
|
|
USER user |
|
|
|
|
|
ENV CUDA_DOCKER_ARCH=all \ |
|
LLAMA_CUBLAS=1 |
|
|
|
|
|
FROM ${BASE_CUDA_RUN_CONTAINER} as runtime |
|
|
|
|
|
RUN useradd -m -u 1000 user && \ |
|
apt-get update && \ |
|
apt-get install -y libopenblas-dev ninja-build build-essential pkg-config curl |
|
|
|
|
|
USER user |
|
|
|
|
|
ENV HOME=/home/user \ |
|
PATH=/home/user/.local/bin:$PATH |
|
|
|
|
|
WORKDIR $HOME/app |
|
|
|
|
|
USER root |
|
RUN apt-get install -y python3 python3-pip |
|
|
|
|
|
USER user |
|
RUN pip install --no-cache-dir --upgrade pip setuptools wheel && \ |
|
pip install --verbose llama-cpp-python[server] |
|
|
|
|
|
RUN mkdir $HOME/model && \ |
|
curl -L https://huggingface.co/matthoffner/Magicoder-S-DS-6.7B-GGUF/resolve/main/Magicoder-S-DS-6.7B_Q4_K_M.gguf -o $HOME/model/gguf-model.gguf |
|
|
|
COPY --chown=user ./main.py $HOME/app/ |
|
|
|
|
|
ENV HOST=0.0.0.0 \ |
|
PORT=7860 |
|
|
|
|
|
EXPOSE ${PORT} |
|
|
|
RUN ls -la $HOME/model |
|
|
|
|
|
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860", "--log-level", "debug"] |
|
|