|
{ |
|
"_name_or_path": "/ceph/hdd/staff/charpent/.cache/modelsz0cug6rhqj86r_rt", |
|
"activation_fn_name": "swish", |
|
"architectures": [ |
|
"OpenELMForCausalLM" |
|
], |
|
"auto_map": { |
|
"AutoConfig": "configuration_openelm.OpenELMConfig", |
|
"AutoModelForCausalLM": "modeling_openelm.OpenELMForCausalLM" |
|
}, |
|
"bos_token_id": 1, |
|
"eos_token_id": 2, |
|
"ffn_dim_divisor": 256, |
|
"ffn_multipliers": [ |
|
0.5, |
|
0.63, |
|
0.76, |
|
0.89, |
|
1.02, |
|
1.15, |
|
1.28, |
|
1.41, |
|
1.54, |
|
1.67, |
|
1.8, |
|
1.93, |
|
2.06, |
|
2.19, |
|
2.31, |
|
2.44, |
|
2.57, |
|
2.7, |
|
2.83, |
|
2.96, |
|
3.09, |
|
3.22, |
|
3.35, |
|
3.48, |
|
3.61, |
|
3.74, |
|
3.87, |
|
4.0 |
|
], |
|
"ffn_with_glu": true, |
|
"head_dim": 64, |
|
"initializer_range": 0.02, |
|
"max_context_length": 2048, |
|
"model_dim": 2048, |
|
"model_type": "openelm", |
|
"normalization_layer_name": "rms_norm", |
|
"normalize_qk_projections": true, |
|
"num_gqa_groups": 4, |
|
"num_kv_heads": [ |
|
4, |
|
4, |
|
4, |
|
5, |
|
5, |
|
5, |
|
5, |
|
5, |
|
5, |
|
5, |
|
6, |
|
6, |
|
6, |
|
6, |
|
6, |
|
6, |
|
6, |
|
6, |
|
7, |
|
7, |
|
7, |
|
7, |
|
7, |
|
7, |
|
8, |
|
8, |
|
8, |
|
8 |
|
], |
|
"num_query_heads": [ |
|
16, |
|
16, |
|
16, |
|
20, |
|
20, |
|
20, |
|
20, |
|
20, |
|
20, |
|
20, |
|
24, |
|
24, |
|
24, |
|
24, |
|
24, |
|
24, |
|
24, |
|
24, |
|
28, |
|
28, |
|
28, |
|
28, |
|
28, |
|
28, |
|
32, |
|
32, |
|
32, |
|
32 |
|
], |
|
"num_transformer_layers": 28, |
|
"qkv_multipliers": [ |
|
0.5, |
|
1.0 |
|
], |
|
"quantization_config": { |
|
"_load_in_4bit": true, |
|
"_load_in_8bit": false, |
|
"bnb_4bit_compute_dtype": "bfloat16", |
|
"bnb_4bit_quant_storage": "uint8", |
|
"bnb_4bit_quant_type": "fp4", |
|
"bnb_4bit_use_double_quant": false, |
|
"llm_int8_enable_fp32_cpu_offload": false, |
|
"llm_int8_has_fp16_weight": false, |
|
"llm_int8_skip_modules": [ |
|
"lm_head" |
|
], |
|
"llm_int8_threshold": 6.0, |
|
"load_in_4bit": true, |
|
"load_in_8bit": false, |
|
"quant_method": "bitsandbytes" |
|
}, |
|
"rope_freq_constant": 10000, |
|
"rope_max_length": 4096, |
|
"share_input_output_layers": true, |
|
"torch_dtype": "float16", |
|
"transformers_version": "4.42.4", |
|
"use_cache": true, |
|
"vocab_size": 32000 |
|
} |
|
|