{ | |
"d_model": 2560, | |
"d_intermediate": 6912, | |
"n_layer": 32, | |
"vocab_size": 50277, | |
"ssm_cfg": {}, | |
"attn_layer_idx": [ | |
0, | |
1, | |
2, | |
3, | |
4, | |
5, | |
6, | |
7, | |
8, | |
9, | |
10, | |
11, | |
12, | |
13, | |
14, | |
15, | |
16, | |
17, | |
18, | |
19, | |
20, | |
21, | |
22, | |
23, | |
24, | |
25, | |
26, | |
27, | |
28, | |
29, | |
30, | |
31 | |
], | |
"attn_cfg": { | |
"causal": true, | |
"head_dim": 80, | |
"num_heads": 32, | |
"out_proj_bias": false, | |
"qkv_proj_bias": false, | |
"rotary_emb_dim": 40 | |
}, | |
"rms_norm": true, | |
"residual_in_fp32": true, | |
"fused_add_norm": true, | |
"pad_vocab_size_multiple": 16, | |
"tie_embeddings": true | |
} | |