{"errors": "replace", "bos_token": "", "eos_token": "", "sep_token": "", "cls_token": "", "unk_token": "", "pad_token": "", "mask_token": "", "add_prefix_space": false, "trim_offsets": true, "keep_accents": true, "max_len": 50, "special_tokens_map_file": "classcat/roberta-small-basque/special_tokens_map.json", "name_or_path": "classcat/roberta-small-basque", "tokenizer_class": "RobertaTokenizer"}