Spaces:
Running
Running
tes
Browse files- Dockerfile +38 -22
- entrypoint.sh +21 -0
Dockerfile
CHANGED
@@ -1,23 +1,37 @@
|
|
1 |
-
|
2 |
-
#
|
3 |
-
|
4 |
-
|
5 |
-
|
6 |
-
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
21 |
# Set the working directory
|
22 |
WORKDIR /app
|
23 |
# Copy requirements file
|
@@ -62,5 +76,7 @@ RUN chmod -R 777 translations
|
|
62 |
COPY init.sh /app/init.sh
|
63 |
RUN chmod +x /app/init.sh
|
64 |
|
65 |
-
#
|
66 |
-
|
|
|
|
|
|
1 |
+
FROM python:3.10-slim-buster
|
2 |
+
# Update packages and install curl and gnupg
|
3 |
+
RUN apt-get update && apt-get install -y \
|
4 |
+
curl \
|
5 |
+
gnupg
|
6 |
+
|
7 |
+
# Add NVIDIA package repositories
|
8 |
+
RUN curl -fsSL https://nvidia.github.io/libnvidia-container/gpgkey | gpg --dearmor -o /usr/share/keyrings/nvidia-container-toolkit-keyring.gpg \
|
9 |
+
&& echo "deb [signed-by=/usr/share/keyrings/nvidia-container-toolkit-keyring.gpg] https://nvidia.github.io/libnvidia-container/stable/deb/ $(. /etc/os-release; echo $UBUNTU_CODENAME) main" > /etc/apt/sources.list.d/nvidia-container-toolkit.list
|
10 |
+
|
11 |
+
# Install NVIDIA container toolkit (Check for any updated methods or URLs for Ubuntu jammy)
|
12 |
+
RUN apt-get update && apt-get install -y nvidia-container-toolkit || true
|
13 |
+
|
14 |
+
# Install application
|
15 |
+
RUN curl https://ollama.ai/install.sh | sh
|
16 |
+
# Below is to fix embedding bug as per
|
17 |
+
# RUN curl -fsSL https://ollama.com/install.sh | sed 's#https://ollama.com/download#https://github.com/jmorganca/ollama/releases/download/v0.1.29#' | sh
|
18 |
+
|
19 |
+
|
20 |
+
# Create the directory and give appropriate permissions
|
21 |
+
RUN mkdir -p /.ollama && chmod 777 /.ollama
|
22 |
+
|
23 |
+
WORKDIR /.ollama
|
24 |
+
|
25 |
+
# Copy the entry point script
|
26 |
+
COPY entrypoint.sh /entrypoint.sh
|
27 |
+
RUN chmod +x /entrypoint.sh
|
28 |
+
|
29 |
+
# Set the model as an environment variable (this can be overridden)
|
30 |
+
ENV model=${model}
|
31 |
+
|
32 |
+
# Expose the server port
|
33 |
+
EXPOSE 7860
|
34 |
+
|
35 |
# Set the working directory
|
36 |
WORKDIR /app
|
37 |
# Copy requirements file
|
|
|
76 |
COPY init.sh /app/init.sh
|
77 |
RUN chmod +x /app/init.sh
|
78 |
|
79 |
+
# Set the entry point script as the default command
|
80 |
+
ENTRYPOINT ["/entrypoint.sh"]
|
81 |
+
CMD ["ollama", "serve"]
|
82 |
+
|
entrypoint.sh
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/bin/bash
|
2 |
+
|
3 |
+
# Starting server
|
4 |
+
echo "Starting server"
|
5 |
+
ollama serve &
|
6 |
+
sleep 1
|
7 |
+
|
8 |
+
# Splitting the models by comma and pulling each
|
9 |
+
IFS=',' read -ra MODELS <<< "$model"
|
10 |
+
for m in "${MODELS[@]}"; do
|
11 |
+
echo "Pulling $m"
|
12 |
+
ollama pull "$m"
|
13 |
+
sleep 5
|
14 |
+
# echo "Running $m"
|
15 |
+
# ollama run "$m"
|
16 |
+
# No need to sleep here unless you want to give some delay between each pull for some reason
|
17 |
+
python run.py
|
18 |
+
done
|
19 |
+
|
20 |
+
# Keep the script running to prevent the container from exiting
|
21 |
+
wait
|