File size: 599 Bytes
5e0f47f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
{
    "d_model": 2560,
    "d_intermediate": 0,
    "n_layer": 64,
    "vocab_size": 50277,
    "ssm_cfg": {
        "layer": "Mamba2"
    },
    "attn_layer_idx": [
        9,
        18,
        27,
        36,
        45,
        56
    ],
    "attn_cfg": {
        "causal": true,
        "d_conv": 4,
        "head_dim": 128,
        "num_heads": 30,
        "out_proj_bias": false,
        "qkv_proj_bias": false,
        "rotary_emb_dim": 64
    },
    "rms_norm": true,
    "residual_in_fp32": true,
    "fused_add_norm": true,
    "pad_vocab_size_multiple": 16,
    "tie_embeddings": true
}