Zamba2-2.7B-instruct / config.json
pglo's picture
Upload folder using huggingface_hub
ded606b verified
{
"_name_or_path": "Zyphra/Zamba2-2.7B-instruct",
"add_bias_linear": false,
"architectures": [
"Zamba2ForCausalLM"
],
"attention_dropout": 0.0,
"bos_token_id": 1,
"conv_dimension": 4,
"eos_token_id": 2,
"expansion_factor": 2,
"ffn_hidden_size": 10240,
"gated_linear_unit": true,
"hidden_size": 2560,
"initializer_range": 0.02,
"kv_channels": 80,
"layers_block_type": [
"m",
"m",
"m",
"m",
"m",
"m",
"g",
"m",
"m",
"m",
"m",
"m",
"g",
"m",
"m",
"m",
"m",
"m",
"g",
"m",
"m",
"m",
"m",
"m",
"g",
"m",
"m",
"m",
"m",
"m",
"g",
"m",
"m",
"m",
"m",
"m",
"g",
"m",
"m",
"m",
"m",
"m",
"g",
"m",
"m",
"m",
"m",
"g",
"m",
"m",
"m",
"g",
"m",
"m"
],
"lora_rank": 128,
"lora_rank_mamba": 128,
"mamba_headdim": 64,
"max_position_embeddings": 4096,
"model_type": "zamba2",
"num_attention_heads": 32,
"num_hidden_layers": 54,
"num_key_value_heads": 32,
"num_logits_to_keep": 1,
"num_mem_blocks": 2,
"num_query_groups": 32,
"pad_token_id": 0,
"rms_norm_eps": 1e-05,
"rope_theta": 10000,
"se_shared_attention_lora": false,
"sliding_window": null,
"state_size": 64,
"torch_dtype": "float32",
"transformers_version": "4.43.0.dev0",
"use_cache": false,
"use_mamba_kernels": true,
"use_mem_rope": false,
"use_shared_attention_lora": false,
"use_shared_block_lora": true,
"vocab_size": 32000
}