PapaGEN / model.py
MikeMpapa's picture
Upload 9 files
2a0ab09 verified
raw
history blame
1.03 kB
import torch
from typing import Tuple
from transformers import AutoTokenizer, AutoModelForCausalLM
# Initialize the model and tokenizer variables as None
tokenizer = None
model = None
def get_model_and_tokenizer() -> Tuple[AutoModelForCausalLM, AutoTokenizer]:
"""
Returns the preloaded model and tokenizer. If they haven't been loaded before, loads them.
Returns:
tuple: A tuple containing the preloaded model and tokenizer.
"""
global model, tokenizer
if model is None or tokenizer is None:
# Set device
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
# Load the tokenizer and the model
tokenizer = AutoTokenizer.from_pretrained("MikeMpapa/lmd_mmm_tokenizer_tutorial_artist", use_auth_token=True)
model = AutoModelForCausalLM.from_pretrained(
"MikeMpapa/lmd-4bars-2048-epochs7",
use_auth_token=True
)
# Move model to device
model = model.to(device)
return model, tokenizer