gpt2-large-wechsel-ukrainian / tokenizer_config.json
benjamin's picture
initial commit
425f21f
raw
history blame
245 Bytes
{"unk_token": "<|endoftext|>", "bos_token": "<|endoftext|>", "eos_token": "<|endoftext|>", "add_prefix_space": false, "model_max_length": 1024, "special_tokens_map_file": null, "name_or_path": "gpt2_large_uk", "tokenizer_class": "GPT2Tokenizer"}