File size: 288 Bytes
8102d33
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
# download_model.py
from transformers import T5ForConditionalGeneration
import os

CACHE_DIR = "/app/src/citance_analysis/models_cache"

model = T5ForConditionalGeneration.from_pretrained(
    "google/flan-t5-base",
    cache_dir=CACHE_DIR,
    torch_dtype="auto",
    device_map="auto"
)