{"unk_token": "", "bos_token": "", "eos_token": "", "add_prefix_space": false, "errors": "replace", "sep_token": "", "cls_token": "", "pad_token": "", "mask_token": {"content": "", "single_word": false, "lstrip": true, "rstrip": false, "normalized": false, "__type": "AddedToken"}, "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "models/roberta_french", "tokenizer_class": "RobertaTokenizer"}