File size: 636 Bytes
b8923c9
636bd97
 
 
b8923c9
636bd97
 
 
b8923c9
 
71a53bb
d99bd36
c5da4da
636bd97
 
338be58
b8923c9
636bd97
 
 
c5da4da
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
FROM python:latest

ENV PYTHONUNBUFFERED 1

EXPOSE 8080

WORKDIR /app

RUN wget -qO- "https://cmake.org/files/v3.17/cmake-3.17.0-Linux-x86_64.tar.gz" | tar --strip-components=1 -xz -C /usr/local
RUN git clone https://github.com/abetlen/llama-cpp-python
RUN CMAKE_ARGS="-DLLAMA_OPENBLAS=on" FORCE_CMAKE=1 pip install llama-cpp-python
RUN pip install uvicorn fastapi toml

RUN curl -s https://packagecloud.io/install/repositories/github/git-lfs/script.deb.sh | bash
RUN apt-get install git-lfs
RUN git clone https://huggingface.co/TheBloke/wizardLM-7B-GGML

COPY . .
RUN ls -al

CMD uvicorn main:app --host 0.0.0.0 --port 8080 --workers 2