{ "added_tokens_decoder": {}, "auto_map": { "AutoTokenizer": [ "tokenizer.AlphabetTokenizer", null ] }, "clean_up_tokenization_spaces": true, "model_max_length": 2048, "pad_token": "[PAD]", "tokenizer_class": "AlphabetTokenizer" }