edia_lmodels_en / modules /module_languageModel.py
nanom's picture
Init
b546526
raw
history blame
529 Bytes
# --- Imports libs ---
from transformers import BertForMaskedLM, BertTokenizer
class LanguageModel:
def __init__(
self,
model_name: str
) -> None:
print("Download language model...")
self.__tokenizer = BertTokenizer.from_pretrained(model_name)
self.__model = BertForMaskedLM.from_pretrained(model_name, return_dict=True)
def initTokenizer(
self
):
return self.__tokenizer
def initModel(
self
):
return self.__model