Update whisper_pipeline_cu118/dockerfile
Browse filesproduction, change cuda to 12.0 base on faster whisper version, port forward to 5678
whisper_pipeline_cu118/dockerfile
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
# Use nvidia/cuda as base image with Python
|
2 |
-
FROM nvidia/cuda:
|
3 |
|
4 |
# Use args
|
5 |
ARG USE_CUDA
|
@@ -7,7 +7,7 @@ ARG USE_CUDA_VER
|
|
7 |
|
8 |
## Basis ##
|
9 |
ENV ENV=prod \
|
10 |
-
PORT=
|
11 |
USE_CUDA_DOCKER=${USE_CUDA} \
|
12 |
USE_CUDA_DOCKER_VER=${USE_CUDA_VER}
|
13 |
|
@@ -46,11 +46,11 @@ RUN pip install --no-cache-dir -r requirements.txt
|
|
46 |
COPY . .
|
47 |
|
48 |
# Expose the API port
|
49 |
-
EXPOSE
|
50 |
|
51 |
# Set the environment variables
|
52 |
ENV HOST="0.0.0.0"
|
53 |
-
ENV PORT="
|
54 |
|
55 |
# Set entrypoint to run the FastAPI server
|
56 |
ENTRYPOINT ["bash", "start.sh"]
|
|
|
1 |
# Use nvidia/cuda as base image with Python
|
2 |
+
FROM nvidia/cuda:12.0.0-cudnn8-runtime-ubuntu20.04
|
3 |
|
4 |
# Use args
|
5 |
ARG USE_CUDA
|
|
|
7 |
|
8 |
## Basis ##
|
9 |
ENV ENV=prod \
|
10 |
+
PORT=5678 \
|
11 |
USE_CUDA_DOCKER=${USE_CUDA} \
|
12 |
USE_CUDA_DOCKER_VER=${USE_CUDA_VER}
|
13 |
|
|
|
46 |
COPY . .
|
47 |
|
48 |
# Expose the API port
|
49 |
+
EXPOSE 5678
|
50 |
|
51 |
# Set the environment variables
|
52 |
ENV HOST="0.0.0.0"
|
53 |
+
ENV PORT="5678"
|
54 |
|
55 |
# Set entrypoint to run the FastAPI server
|
56 |
ENTRYPOINT ["bash", "start.sh"]
|