DukunLM-Uncensored-7B / special_tokens_map.json
hafidhsoekma's picture
Upload tokenizer
5c311c2
raw
history blame contribute delete
305 Bytes
{
"additional_special_tokens": [
">>TITLE<<",
">>ABSTRACT<<",
">>INTRODUCTION<<",
">>SUMMARY<<",
">>COMMENT<<",
">>ANSWER<<",
">>QUESTION<<",
">>DOMAIN<<",
">>PREFIX<<",
">>SUFFIX<<",
">>MIDDLE<<"
],
"eos_token": "<|endoftext|>",
"pad_token": "[PAD]"
}