Spaces:
Sleeping
Sleeping
Updated Dockerfile for Hugging Face Spaces deployment
Browse files- Dockerfile +4 -4
- models/nllb.py +7 -5
Dockerfile
CHANGED
@@ -44,13 +44,13 @@ ENV PATH=${CUDA_HOME}/bin:${PATH}
|
|
44 |
# Optionally set LD_LIBRARY_PATH for CUDA libraries
|
45 |
ENV LD_LIBRARY_PATH=${CUDA_HOME}/lib64:${LD_LIBRARY_PATH}
|
46 |
|
47 |
-
#
|
|
|
|
|
|
|
48 |
ENV HF_HOME=/app/.cache/huggingface
|
49 |
ENV TRANSFORMERS_CACHE=/app/.cache/huggingface
|
50 |
|
51 |
-
# Create cache directory and set permissions
|
52 |
-
RUN mkdir -p /app/.cache/huggingface && chmod -R 777 /app/.cache/huggingface
|
53 |
-
|
54 |
# Copy the setup script and requirements file into the container
|
55 |
COPY setup.sh requirements.txt /app/
|
56 |
|
|
|
44 |
# Optionally set LD_LIBRARY_PATH for CUDA libraries
|
45 |
ENV LD_LIBRARY_PATH=${CUDA_HOME}/lib64:${LD_LIBRARY_PATH}
|
46 |
|
47 |
+
# Create cache directory with appropriate permissions
|
48 |
+
RUN mkdir -p /app/.cache/huggingface && chmod -R 777 /app/.cache/huggingface
|
49 |
+
|
50 |
+
# Set environment variables for Hugging Face cache directory
|
51 |
ENV HF_HOME=/app/.cache/huggingface
|
52 |
ENV TRANSFORMERS_CACHE=/app/.cache/huggingface
|
53 |
|
|
|
|
|
|
|
54 |
# Copy the setup script and requirements file into the container
|
55 |
COPY setup.sh requirements.txt /app/
|
56 |
|
models/nllb.py
CHANGED
@@ -20,12 +20,14 @@ def nllb():
|
|
20 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
21 |
# Load the tokenizer and model
|
22 |
# Set Hugging Face cache directory
|
23 |
-
|
24 |
-
|
|
|
|
|
|
|
25 |
|
26 |
-
|
27 |
-
os.
|
28 |
-
os.chmod('/app/.cache/huggingface', 0o777)
|
29 |
|
30 |
# Load models
|
31 |
tokenizer = AutoTokenizer.from_pretrained("facebook/nllb-200-distilled-1.3B")
|
|
|
20 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
21 |
# Load the tokenizer and model
|
22 |
# Set Hugging Face cache directory
|
23 |
+
# Ensure the cache directory exists and has the correct permissions
|
24 |
+
cache_dir = "/app/.cache/huggingface"
|
25 |
+
if not os.path.exists(cache_dir):
|
26 |
+
os.makedirs(cache_dir)
|
27 |
+
os.chmod(cache_dir, 0o777)
|
28 |
|
29 |
+
os.environ['HF_HOME'] = cache_dir
|
30 |
+
os.environ['TRANSFORMERS_CACHE'] = cache_dir
|
|
|
31 |
|
32 |
# Load models
|
33 |
tokenizer = AutoTokenizer.from_pretrained("facebook/nllb-200-distilled-1.3B")
|