jukebox-1b-lyrics / tokenizer_config.json
ArthurZ's picture
ArthurZ HF staff
Upload tokenizer
624fab8
raw
history blame
552 Bytes
{
"max_n_lyric_tokens": 384,
"n_genres": 1,
"name_or_path": "ArthurZ/jukebox-1b-lyrics",
"special_tokens_map_file": "/home/arthur_huggingface_co/.cache/huggingface/hub/models--ArthurZ--jukebox-1b-lyrics/snapshots/a0fe6d15e4a6de89f6447b1ae4e0ca5d16647b63/special_tokens_map.json",
"tokenizer_class": "JukeboxTokenizer",
"unk_token": {
"__type": "AddedToken",
"content": "<|endoftext|>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false
},
"version": [
"v2",
"v2",
"v3"
]
}