{ | |
"vocab_size": 84992, | |
"embd_pdrop": 0.0, | |
"tie_word_embeddings": true, | |
"position_embedding_type": "alibi", | |
"n_positions": 2048, | |
"rotary_dim": null, | |
"alibi_scaling": null, | |
"n_embd": 1088, | |
"n_layer": 14, | |
"resid_pdrop": 0.0, | |
"layer_norm_epsilon": 1e-05, | |
"n_head": 17, | |
"scale_attn_weights": true, | |
"attn_pdrop": 0.0, | |
"scale_attn_by_inverse_layer_idx": false, | |
"n_inner": 2912, | |
"activation_function": "swiglu", | |
"mup_embeddings_scale": 9.1705785388303, | |
"mup_scale_qk_dot_by_d": true, | |
"mup_output_alpha": 1.09518349815769, | |
"mup_width_scale": 0.23529411764705882, | |
"initializer_range": 0.02, | |
"model_type": "btlm", | |
"use_cache": true, | |
"auto_map": { | |
"AutoConfig": "cerebras/btlm-3b-8k-base--configuration_btlm.BTLMConfig", | |
"AutoModel": "cerebras/btlm-3b-8k-base--modeling_btlm.BTLMModel", | |
"AutoModelForCausalLM": "cerebras/btlm-3b-8k-base--modeling_btlm.BTLMLMHeadModel" | |
} | |
} |