{ "add_prefix_space": false, "additional_special_tokens": [ "[SYSTEM]", "[ASSISTANT]", "[USER]", "[END]" ], "bos_token": "<|endoftext|>", "clean_up_tokenization_spaces": true, "eos_token": "<|endoftext|>", "model_max_length": 1000, "pad_token": "[PAD]", "return_token_type_ids": false, "tokenizer_class": "GPT2Tokenizer", "unk_token": "<|endoftext|>", "vocab_size": 49152 }