llama3_8b_en_int8 / tokenizer.json
mattdangerw's picture
Upload folder using huggingface_hub
c7f4bdf verified
raw
history blame contribute delete
757 Bytes
{
"module": "keras_hub.src.models.llama3.llama3_tokenizer",
"class_name": "Llama3Tokenizer",
"config": {
"name": "llama3_tokenizer",
"trainable": true,
"dtype": {
"module": "keras",
"class_name": "DTypePolicy",
"config": {
"name": "int32"
},
"registered_name": null
},
"config_file": "tokenizer.json",
"sequence_length": null,
"add_prefix_space": false,
"unsplittable_tokens": [
"<|begin_of_text|>",
"<|start_header_id|>",
"<|eot_id|>",
"<|end_header_id|>",
"<|end_of_text|>"
]
},
"registered_name": "keras_hub>Llama3Tokenizer"
}