File size: 322 Bytes
b31cabe
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
{
  "add_bos_token": false,
  "add_eos_token": false,
  "model_max_length": 200000,
  "unk_token": "<unk>",
  "bos_token": "<|startoftext|>",
  "eos_token": "<|endoftext|>",
  "pad_token": "<unk>",
  "sp_model_kwargs": {},
  "clean_up_tokenization_spaces": false,
  "legacy": true,
  "tokenizer_class": "LlamaTokenizer"
}