File size: 2,373 Bytes
55b0541 91bd588 2806807 ea0dd40 fcd826b 91bd588 3034296 91bd588 3034296 4ee3470 91bd588 4ee3470 91bd588 3034296 91bd588 8466e45 91bd588 ea0dd40 4ee3470 91bd588 4ee3470 d3ff2e2 8466e45 91bd588 8466e45 d3ff2e2 8466e45 2b16125 91bd588 55b0541 4ee3470 8466e45 55b0541 4ee3470 ea0dd40 4ee3470 d3ff2e2 4ee3470 2b16125 4ee3470 d3ff2e2 f20dd46 4ee3470 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 |
# First stage: build dependencies
FROM public.ecr.aws/docker/library/python:3.11.9-slim-bookworm AS builder
# Optional - install Lambda web adapter in case you want to run with with an AWS Lamba function URL
# COPY --from=public.ecr.aws/awsguru/aws-lambda-adapter:0.8.3 /lambda-adapter /opt/extensions/lambda-adapter
# Update apt
RUN apt-get update && rm -rf /var/lib/apt/lists/*
# Create a directory for the model
RUN mkdir -p /model /model/minilm /install
WORKDIR /src
COPY requirements_aws.txt .
RUN pip install torch==2.4.0+cpu --target=/install --index-url https://download.pytorch.org/whl/cpu \
&& pip install --no-cache-dir --target=/install sentence-transformers==3.0.1 --no-deps \
&& pip install --no-cache-dir --target=/install -r requirements_aws.txt \
&& pip install --no-cache-dir --target=/install gradio==4.41.0
# Add /install to the PYTHONPATH
ENV PYTHONPATH="/install:${PYTHONPATH}"
# Download the embedding model during the build process. Create a directory for the model and download specific files using huggingface_hub
COPY download_model.py /src/download_model.py
RUN python /src/download_model.py
# Stage 2: Final runtime image
FROM public.ecr.aws/docker/library/python:3.11.9-slim-bookworm
# Set up a new user named "user" with user ID 1000
RUN useradd -m -u 1000 user
# Copy installed packages from builder stage
COPY --from=builder /install /usr/local/lib/python3.11/site-packages/
# Change ownership of /home/user directory
RUN chown -R user:user /home/user
# Make output folder
RUN mkdir -p /home/user/app/output && mkdir -p /home/user/.cache/huggingface/hub && chown -R user:user /home/user
# Copy models from the builder stage
COPY --from=builder /model/minilm /home/user/app/model/minilm
# Switch to the "user" user
USER user
# Set home to the user's home directory
ENV HOME=/home/user \
PATH=/home/user/.local/bin:$PATH \
PYTHONPATH=$HOME/app \
PYTHONUNBUFFERED=1 \
PYTHONDONTWRITEBYTECODE=1 \
GRADIO_ALLOW_FLAGGING=never \
GRADIO_NUM_PORTS=1 \
GRADIO_SERVER_NAME=0.0.0.0 \
GRADIO_SERVER_PORT=7860 \
GRADIO_THEME=huggingface \
AWS_STS_REGIONAL_ENDPOINT=regional \
SYSTEM=spaces
# Set the working directory to the user's home directory
WORKDIR $HOME/app
# Copy the current directory contents into the container at $HOME/app setting the owner to the user
COPY --chown=user . $HOME/app
CMD ["python", "app.py"] |