Spaces:
Sleeping
Sleeping
FROM python:3.9 | |
# Create a user to run the app | |
RUN useradd -m -u 1000 user | |
USER user | |
ENV PATH="/home/user/.local/bin:$PATH" | |
# Set the working directory to /app | |
WORKDIR /app | |
# Copy the requirements.txt file | |
COPY --chown=user ./requirement.txt requirement.txt | |
# Copy the necessary libraries (if required) | |
COPY --chown=user ./lib/aicloudlibs-0.1.0-py3-none-any.whl /lib/ | |
# You can add the rest of your .whl files as needed | |
# COPY --chown=user ./lib/better_profanity-2.0.0-py3-none-any.whl /lib/ | |
# COPY --chown=user ./lib/privacy-1.0.9-py3-none-any.whl /lib/ | |
# Install dependencies | |
RUN pip install --no-cache-dir --upgrade -r requirement.txt | |
# Copy the src folder into /app/src in the container | |
COPY --chown=user ./src /app/src | |
COPY --chown=user ./config /app/config | |
COPY --chown=user ./lib /app/lib | |
COPY --chown=user ./models /app/models | |
# Set PYTHONPATH to include /app/src so Python can find llm_explain | |
ENV PYTHONPATH="/app/src:$PYTHONPATH" | |
# Expose the port (default for Hugging Face is 7860) | |
EXPOSE 7860 | |
# CMD to run the FastAPI app with Uvicorn | |
CMD ["uvicorn", "src.main:app", "--host", "0.0.0.0", "--port", "7860"] | |