kubernetez commited on
Commit
ec08d62
1 Parent(s): a9381f1

Fix max_length parameters in tokenizer_config.json

Browse files

**What ?**
I tried to use the tokenizer but there is no truncation happening.
I think the parameter is misnamed.
**How ?**
Rename the parameter to `model_max_length`

Files changed (1) hide show
  1. tokenizer_config.json +1 -1
tokenizer_config.json CHANGED
@@ -1 +1 @@
1
- {"bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "max_length": 160, "special_tokens_map_file": "/home/ec2-user/.cache/huggingface/transformers/6654a835c284613a15c3b583fce96f417606b95fab5ef47cc3da33de8ac237b6.0dc5b1041f62041ebbd23b1297f2f573769d5c97d8b7c28180ec86b8f6185aa8", "name_or_path": "cardiffnlp/twitter-xlm-roberta-base-sentiment", "sp_model_kwargs": {}, "tokenizer_class": "XLMRobertaTokenizer"}
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "model_max_length": 160, "special_tokens_map_file": "/home/ec2-user/.cache/huggingface/transformers/6654a835c284613a15c3b583fce96f417606b95fab5ef47cc3da33de8ac237b6.0dc5b1041f62041ebbd23b1297f2f573769d5c97d8b7c28180ec86b8f6185aa8", "name_or_path": "cardiffnlp/twitter-xlm-roberta-base-sentiment", "sp_model_kwargs": {}, "tokenizer_class": "XLMRobertaTokenizer"}