Upload tokenizer
Browse files- tokenizer.json +6 -1
- tokenizer_config.json +0 -7
tokenizer.json
CHANGED
@@ -1,6 +1,11 @@
|
|
1 |
{
|
2 |
"version": "1.0",
|
3 |
-
"truncation":
|
|
|
|
|
|
|
|
|
|
|
4 |
"padding": {
|
5 |
"strategy": "BatchLongest",
|
6 |
"direction": "Right",
|
|
|
1 |
{
|
2 |
"version": "1.0",
|
3 |
+
"truncation": {
|
4 |
+
"direction": "Right",
|
5 |
+
"max_length": 128,
|
6 |
+
"strategy": "LongestFirst",
|
7 |
+
"stride": 0
|
8 |
+
},
|
9 |
"padding": {
|
10 |
"strategy": "BatchLongest",
|
11 |
"direction": "Right",
|
tokenizer_config.json
CHANGED
@@ -5,19 +5,12 @@
|
|
5 |
"do_lower_case": false,
|
6 |
"full_tokenizer_file": null,
|
7 |
"mask_token": "[MASK]",
|
8 |
-
"max_length": 512,
|
9 |
"model_max_length": 1000000000000000019884624838656,
|
10 |
"never_split": null,
|
11 |
-
"pad_to_multiple_of": null,
|
12 |
"pad_token": "[PAD]",
|
13 |
-
"pad_token_type_id": 0,
|
14 |
-
"padding_side": "right",
|
15 |
"sep_token": "[SEP]",
|
16 |
-
"stride": 0,
|
17 |
"strip_accents": null,
|
18 |
"tokenize_chinese_chars": false,
|
19 |
"tokenizer_class": "ElectraTokenizer",
|
20 |
-
"truncation_side": "right",
|
21 |
-
"truncation_strategy": "longest_first",
|
22 |
"unk_token": "[UNK]"
|
23 |
}
|
|
|
5 |
"do_lower_case": false,
|
6 |
"full_tokenizer_file": null,
|
7 |
"mask_token": "[MASK]",
|
|
|
8 |
"model_max_length": 1000000000000000019884624838656,
|
9 |
"never_split": null,
|
|
|
10 |
"pad_token": "[PAD]",
|
|
|
|
|
11 |
"sep_token": "[SEP]",
|
|
|
12 |
"strip_accents": null,
|
13 |
"tokenize_chinese_chars": false,
|
14 |
"tokenizer_class": "ElectraTokenizer",
|
|
|
|
|
15 |
"unk_token": "[UNK]"
|
16 |
}
|