chat-gpt-sentiment-analyzer / TBTokenizer /tokenizer_config.json
SubinKrishna's picture
files added
df3e003
raw
history blame
296 Bytes
{
"bos_token": "<s>",
"cls_token": "<s>",
"eos_token": "</s>",
"mask_token": "<mask>",
"model_max_length": 128,
"normalization": true,
"pad_token": "<pad>",
"sep_token": "</s>",
"special_tokens_map_file": null,
"tokenizer_class": "BertweetTokenizer",
"unk_token": "<unk>"
}