add tokenizer
Browse files- tokenizer_config.json +1 -1
tokenizer_config.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"errors": "replace", "bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask>", "add_prefix_space": false, "trim_offsets": true, "keep_accents": true, "max_len": 32, "special_tokens_map_file": "classcat/roberta-greek/special_tokens_map.json", "name_or_path": "classcat/roberta-greek", "tokenizer_class": "RobertaTokenizer"}
|
|
|
1 |
+
{"errors": "replace", "bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask>", "add_prefix_space": false, "trim_offsets": true, "keep_accents": true, "max_len": 32, "special_tokens_map_file": "classcat/roberta-small-greek/special_tokens_map.json", "name_or_path": "classcat/roberta-small-greek", "tokenizer_class": "RobertaTokenizer"}
|