Spaces:
Running
Running
GitHub Actions
commited on
Commit
·
a1eb3aa
1
Parent(s):
f6e58dd
Sync API from main repo
Browse files- Dockerfile +6 -0
- fast.py +14 -5
Dockerfile
CHANGED
@@ -1,6 +1,12 @@
|
|
1 |
# Use a lightweight Python image
|
2 |
FROM python:3.10-slim
|
3 |
|
|
|
|
|
|
|
|
|
|
|
|
|
4 |
# Set the working directory in the container
|
5 |
WORKDIR /app
|
6 |
|
|
|
1 |
# Use a lightweight Python image
|
2 |
FROM python:3.10-slim
|
3 |
|
4 |
+
# Create a writable cache directory
|
5 |
+
RUN mkdir -p /app/cache && chmod -R 777 /app/cache
|
6 |
+
|
7 |
+
# Set environment variable for cache
|
8 |
+
ENV CACHE_DIR=/app/cache
|
9 |
+
|
10 |
# Set the working directory in the container
|
11 |
WORKDIR /app
|
12 |
|
fast.py
CHANGED
@@ -6,6 +6,7 @@ from preproc import label_decoding
|
|
6 |
import pandas as pd
|
7 |
from io import StringIO
|
8 |
from pathlib import Path
|
|
|
9 |
|
10 |
# Get the absolute path to the package directory
|
11 |
PACKAGE_ROOT = Path(__file__).parent.parent.parent
|
@@ -17,6 +18,14 @@ app = FastAPI(
|
|
17 |
openapi_url="/openapi.json"
|
18 |
)
|
19 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
20 |
# Use absolute paths with Path objects
|
21 |
model_cache = {}
|
22 |
encoder_cache = {}
|
@@ -37,14 +46,14 @@ async def predict(model_name: str, filepath_csv: UploadFile = File(...)):
|
|
37 |
|
38 |
# if model in model_path, load it, otherwise download it from HF
|
39 |
if model_name not in model_cache:
|
40 |
-
print("model_name", model_name)
|
41 |
-
print("model_path", model_path)
|
42 |
try:
|
43 |
if not model_path.exists():
|
44 |
# Convert downloaded paths to Path objects
|
45 |
-
model_path = Path(hf_hub_download(repo_id=HF_REPO_ID, filename=f"{model_name}"))
|
46 |
-
encoder_path = Path(hf_hub_download(repo_id=HF_REPO_ID, filename=f"{encoder_name}"))
|
47 |
-
print("model_path", model_path)
|
48 |
model_cache[model_name] = load_model_by_type(model_path) # Ensure string path for loading
|
49 |
encoder_cache[model_name] = encoder_path
|
50 |
except Exception as e:
|
|
|
6 |
import pandas as pd
|
7 |
from io import StringIO
|
8 |
from pathlib import Path
|
9 |
+
import os
|
10 |
|
11 |
# Get the absolute path to the package directory
|
12 |
PACKAGE_ROOT = Path(__file__).parent.parent.parent
|
|
|
18 |
openapi_url="/openapi.json"
|
19 |
)
|
20 |
|
21 |
+
# Dynamically set the cache directory
|
22 |
+
DEFAULT_CACHE_DIR = "./cache" # Local directory for cache
|
23 |
+
CACHE_DIR = os.getenv("CACHE_DIR", DEFAULT_CACHE_DIR)
|
24 |
+
|
25 |
+
# Ensure the cache directory exists
|
26 |
+
os.makedirs(CACHE_DIR, exist_ok=True)
|
27 |
+
|
28 |
+
|
29 |
# Use absolute paths with Path objects
|
30 |
model_cache = {}
|
31 |
encoder_cache = {}
|
|
|
46 |
|
47 |
# if model in model_path, load it, otherwise download it from HF
|
48 |
if model_name not in model_cache:
|
49 |
+
# print("model_name", model_name)
|
50 |
+
# print("model_path", model_path)
|
51 |
try:
|
52 |
if not model_path.exists():
|
53 |
# Convert downloaded paths to Path objects
|
54 |
+
model_path = Path(hf_hub_download(repo_id=HF_REPO_ID, filename=f"{model_name}", cache_dir=CACHE_DIR))
|
55 |
+
encoder_path = Path(hf_hub_download(repo_id=HF_REPO_ID, filename=f"{encoder_name}", cache_dir=CACHE_DIR))
|
56 |
+
# print("model_path", model_path)
|
57 |
model_cache[model_name] = load_model_by_type(model_path) # Ensure string path for loading
|
58 |
encoder_cache[model_name] = encoder_path
|
59 |
except Exception as e:
|