internvl2-llama3-demo / Dockerfile
mknolan's picture
Upload Dockerfile with huggingface_hub
e87e4c3 verified
FROM pytorch/pytorch:2.0.1-cuda11.7-cudnn8-runtime
# Set environment variables
ENV DEBIAN_FRONTEND=noninteractive
ENV PYTHONUNBUFFERED=1
ENV HF_HOME=/app/.cache/huggingface
ENV TRANSFORMERS_CACHE=/app/.cache/huggingface/transformers
ENV MPLCONFIGDIR=/tmp/matplotlib
ENV PYTORCH_CUDA_ALLOC_CONF=max_split_size_mb:128
# Create necessary directories with proper permissions
RUN mkdir -p /app/.cache/huggingface/transformers && \
mkdir -p /tmp/matplotlib && \
chmod -R 777 /app && \
chmod -R 777 /tmp/matplotlib
# Install system dependencies
RUN apt-get update && apt-get install -y --no-install-recommends \
build-essential \
git \
curl \
ca-certificates \
python3-pip \
python3-dev \
python3-setuptools \
&& rm -rf /var/lib/apt/lists/*
# Create a working directory
WORKDIR /app
# Add a script to check GPU status at startup
RUN echo '#!/bin/bash \n\
echo "Checking NVIDIA GPU status..." \n\
if ! command -v nvidia-smi &> /dev/null; then \n\
echo "WARNING: nvidia-smi command not found. NVIDIA driver might not be installed." \n\
else \n\
echo "NVIDIA driver found. Running nvidia-smi:" \n\
nvidia-smi \n\
fi \n\
echo "Environment variables for GPU:" \n\
echo "CUDA_VISIBLE_DEVICES=${CUDA_VISIBLE_DEVICES}" \n\
echo "NVIDIA_VISIBLE_DEVICES=${NVIDIA_VISIBLE_DEVICES}" \n\
exec "$@"' > /entrypoint.sh && \
chmod +x /entrypoint.sh
# Copy requirements file
COPY requirements_internvl2_llama3.txt ./requirements.txt
# Upgrade pip and install dependencies
RUN pip3 install --no-cache-dir --upgrade pip && \
# Install torch and torchvision first
pip3 install --no-cache-dir torch==2.0.1 torchvision==0.15.2 && \
# Install core dependencies
pip3 install --no-cache-dir -r requirements.txt
# Copy the application files
COPY app_internvl2_llama3.py ./app.py
# Make sure the runtime directories exist and have proper permissions
RUN mkdir -p .cache/huggingface/transformers && \
chmod -R 777 .cache
# Make port 7860 available for the app
EXPOSE 7860
# Use our entrypoint script to check GPU status before starting the app
ENTRYPOINT ["/entrypoint.sh"]
# Start the application
CMD ["python3", "app.py"]