|
{ |
|
"_name_or_path": "meta-llama/Llama-2-7b-hf", |
|
"architectures": [ |
|
"SparseLlamaForCausalLM" |
|
], |
|
"attention_bias": false, |
|
"attention_dropout": 0.0, |
|
"auto_map": { |
|
"AutoConfig": "ugly_utils.SparseLlamaConfig", |
|
"AutoModelForCausalLM": "ugly_utils.SparseLlamaForCausalLM" |
|
}, |
|
"bos_token_id": 1, |
|
"eos_token_id": 2, |
|
"hidden_act": "silu", |
|
"hidden_size": 4096, |
|
"initializer_range": 0.02, |
|
"intermediate_size": 11008, |
|
"max_position_embeddings": 4096, |
|
"model_type": "sparse_llama", |
|
"num_attention_heads": 32, |
|
"num_hidden_layers": 32, |
|
"num_key_value_heads": 32, |
|
"pretraining_tp": 1, |
|
"rms_norm_eps": 1e-05, |
|
"rope_scaling": null, |
|
"rope_theta": 10000.0, |
|
"thresholds": [ |
|
0.01905716396868229, |
|
0.03510531038045883, |
|
0.049147434532642365, |
|
0.061183542013168335, |
|
0.07923770695924759, |
|
0.09327983111143112, |
|
0.10932797938585281, |
|
0.11735205352306366, |
|
0.11935807019472122, |
|
0.12337010353803635, |
|
0.1253761202096939, |
|
0.12738214433193207, |
|
0.12738214433193207, |
|
0.1313941776752472, |
|
0.13340020179748535, |
|
0.13941824436187744, |
|
0.14744232594966888, |
|
0.151454359292984, |
|
0.15546639263629913, |
|
0.15546639263629913, |
|
0.16349045932292938, |
|
0.16349045932292938, |
|
0.1675025075674057, |
|
0.1675025075674057, |
|
0.17151454091072083, |
|
0.1735205501317978, |
|
0.1775325983762741, |
|
0.18154463171958923, |
|
0.1835506409406662, |
|
0.18555666506290436, |
|
0.19157472252845764, |
|
0.19558675587177277 |
|
], |
|
"tie_word_embeddings": false, |
|
"torch_dtype": "bfloat16", |
|
"transformers_version": "4.36.2", |
|
"us_sparse_regularization": false, |
|
"use_cache": false, |
|
"use_graceful_regularization": false, |
|
"use_relu": false, |
|
"use_sparse_model": true, |
|
"use_sparse_predictor": false, |
|
"use_sparse_regularization": false, |
|
"vocab_size": 32000 |
|
} |
|
|