llava-fastapi / Dockerfile
aa1223's picture
Update Dockerfile
b71e113 verified
raw
history blame
817 Bytes
# Use the official Python image.
FROM python:3.12-slim
# Set the working directory in the container.
WORKDIR /llava-fastapi
# Copy the current directory contents into the container at /llava-fastapi.
COPY . /llava-fastapi
# Create the cache directory and set permissions.
RUN mkdir -p /workspace/cache && \
chmod -R 777 /workspace/cache
# Set the environment variable for the transformers cache directory.
ENV TRANSFORMERS_CACHE=/workspace/cache
# Install any needed packages specified in requirements.txt.
RUN pip install --no-cache-dir -r requirements.txt
# Make port 8000 available to the world outside this container.
EXPOSE 8000
# Define environment variable for the host.
ENV HOST=0.0.0.0
# Run uvicorn server on container startup.
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"]