|
FROM python:3.11-slim-bullseye |
|
|
|
|
|
RUN apt-get update && apt-get upgrade -y && apt-get install -y --no-install-recommends \ |
|
python3 \ |
|
python3-dev \ |
|
python3-pip \ |
|
ninja-build \ |
|
build-essential \ |
|
pkg-config \ |
|
gnupg2 \ |
|
git \ |
|
liblzma-dev \ |
|
wget \ |
|
clang \ |
|
c++11 \ |
|
g++ |
|
|
|
|
|
RUN apt remove cmake -y |
|
RUN pip install cmake --upgrade |
|
|
|
WORKDIR /code |
|
|
|
RUN chmod 777 . |
|
|
|
COPY ./requirements.txt /code/requirements.txt |
|
RUN pip install --upgrade pip |
|
RUN pip install --upgrade setuptools |
|
|
|
RUN cd /tmp && git clone --recurse-submodules https://github.com/nomic-ai/gpt4all && cd gpt4all/gpt4all-backend/ && mkdir build && cd build && cmake .. && cmake --build . --parallel && cd ../../gpt4all-bindings/python && pip3 install -e . |
|
RUN pip install llama-cpp-python \ |
|
--extra-index-url https://abetlen.github.io/llama-cpp-python/whl/cpu |
|
|
|
RUN pip install --no-cache-dir -r /code/requirements.txt |
|
|
|
RUN useradd -m -u 1000 user |
|
|
|
USER user |
|
|
|
ENV HOME=/home/user \ |
|
PATH=/home/user/.local/bin:$PATH \ |
|
TF_ENABLE_ONEDNN_OPTS=0 \ |
|
HOST=0.0.0.0 \ |
|
PORT=7860 \ |
|
ORIGINS=* \ |
|
DLLMODEL_CUDA=OFF |
|
|
|
WORKDIR $HOME/app |
|
|
|
COPY --chown=user . $HOME/app |
|
|
|
RUN chmod 777 . |
|
|
|
EXPOSE 7860 |
|
|
|
CMD ["python", "-m", "main"] |