{ | |
"d_model": 1536, | |
"d_intermediate": 0, | |
"n_layer": 48, | |
"vocab_size": 50277, | |
"ssm_cfg": { | |
"layer": "Mamba2" | |
}, | |
"attn_layer_idx": [], | |
"attn_cfg": {}, | |
"rms_norm": true, | |
"residual_in_fp32": False, | |
"fused_add_norm": true, | |
"pad_vocab_size_multiple": 16, | |
"tie_embeddings": true | |
} |