dragon-llama-7b-v0 / config.json
doberst's picture
Upload 3 files
34643e0
raw
history blame contribute delete
496 Bytes
{
"model_class": "llama2",
"model_size": "7b",
"architectures": [
"Llama2ForCausalLM"
],
"dim": 4096,
"n_layers": 32,
"n_heads": 32,
"n_kv_heads": null,
"vocab_size": 32000,
"multiple_of": 256,
"ffn_dim_multiplier": null,
"norm_eps": 1e-5,
"max_batch_size": 32,
"max_seq_len": 2048,
"bos_token_id":1,
"eos_token_id":2,
"pad_token_id":-1,
"torch_dtype": "float16",
"pretraining_base": "llama2-7b-2t-tokens",
"model_repo_folder_path": "llama2-7b-base"
}