custom-reduced-vocab-addition-tokenizer / special_tokens_map.json
Nbeau's picture
Upload tokenizer
2bf887a verified
raw
history blame contribute delete
580 Bytes
{
"bos_token": {
"content": "<|bos|>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false
},
"eos_token": {
"content": "<|end_answer|>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false
},
"pad_token": {
"content": "<|end_answer|>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false
},
"unk_token": {
"content": "<|end_answer|>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false
}
}