Upload tokenizer.json
Browse files- tokenizer.json +1 -1
tokenizer.json
CHANGED
@@ -143,7 +143,7 @@
|
|
143 |
"type": "WordPiece",
|
144 |
"unk_token": "[UNK]",
|
145 |
"continuing_subword_prefix": "##",
|
146 |
-
"max_input_chars_per_word":
|
147 |
"vocab": {
|
148 |
"[PAD]": 0,
|
149 |
"[UNK]": 1,
|
|
|
143 |
"type": "WordPiece",
|
144 |
"unk_token": "[UNK]",
|
145 |
"continuing_subword_prefix": "##",
|
146 |
+
"max_input_chars_per_word": 33000,
|
147 |
"vocab": {
|
148 |
"[PAD]": 0,
|
149 |
"[UNK]": 1,
|