updated model_max_length from to 1000000000000000019884624838656 to 32768
Browse filessame bug is in the original tokenizer config by
@mistral-ai
.
- tokenizer_config.json +1 -1
tokenizer_config.json
CHANGED
@@ -9012,7 +9012,7 @@
|
|
9012 |
"eos_token": "</s>",
|
9013 |
"extra_special_tokens": {},
|
9014 |
"legacy": true,
|
9015 |
-
"model_max_length":
|
9016 |
"pad_token": "<pad>",
|
9017 |
"padding_side": "left",
|
9018 |
"tokenizer_class": "LlamaTokenizer",
|
|
|
9012 |
"eos_token": "</s>",
|
9013 |
"extra_special_tokens": {},
|
9014 |
"legacy": true,
|
9015 |
+
"model_max_length": 32768,
|
9016 |
"pad_token": "<pad>",
|
9017 |
"padding_side": "left",
|
9018 |
"tokenizer_class": "LlamaTokenizer",
|