Upload tokenizer
Browse files- tokenizer_config.json +2 -1
tokenizer_config.json
CHANGED
@@ -3,6 +3,7 @@
|
|
3 |
"do_basic_tokenize": true,
|
4 |
"do_lower_case": true,
|
5 |
"mask_token": "[MASK]",
|
|
|
6 |
"name_or_path": "distilbert-base-uncased",
|
7 |
"never_split": null,
|
8 |
"pad_token": "[PAD]",
|
@@ -10,6 +11,6 @@
|
|
10 |
"special_tokens_map_file": null,
|
11 |
"strip_accents": null,
|
12 |
"tokenize_chinese_chars": true,
|
13 |
-
"tokenizer_class": "
|
14 |
"unk_token": "[UNK]"
|
15 |
}
|
|
|
3 |
"do_basic_tokenize": true,
|
4 |
"do_lower_case": true,
|
5 |
"mask_token": "[MASK]",
|
6 |
+
"model_max_length": 512,
|
7 |
"name_or_path": "distilbert-base-uncased",
|
8 |
"never_split": null,
|
9 |
"pad_token": "[PAD]",
|
|
|
11 |
"special_tokens_map_file": null,
|
12 |
"strip_accents": null,
|
13 |
"tokenize_chinese_chars": true,
|
14 |
+
"tokenizer_class": "DistilBertTokenizer",
|
15 |
"unk_token": "[UNK]"
|
16 |
}
|