Molmo-7B-D-0924-3bit / config.json
prince-canuma's picture
Upload folder using huggingface_hub
b29a77e verified
raw
history blame
2.15 kB
{
"architectures": [
"MolmoForCausalLM"
],
"auto_map": {
"AutoConfig": "config_molmo.MolmoConfig",
"AutoModelForCausalLM": "modeling_molmo.MolmoForCausalLM"
},
"model_type": "molmo",
"quantization": {
"group_size": 64,
"bits": 3
},
"text_config": {
"model_type": "molmo",
"max_position_embeddings": 4096,
"d_model": 3584,
"n_heads": 28,
"n_kv_heads": 4,
"n_layers": 28,
"mlp_ratio": 4,
"max_sequence_length": 1024,
"act_output_multiplier": 0.5,
"mlp_hidden_size": 37888,
"vocab_size": 152064,
"embedding_size": 152064,
"additional_vocab_size": 128,
"attention_dropout": 0.1,
"residual_dropout": 0.1,
"embedding_dropout": 0.1,
"layer_norm_eps": 1e-05,
"initializer_range": 0.02,
"pad_token_id": -1,
"rope": true,
"rope_theta": 1000000.0,
"weight_tying": false,
"rope_full_precision": true,
"rope_impl": "interleave"
},
"vision_config": {
"model_type": "molmo",
"num_channels": 3,
"image_default_input_size": [
336,
336
],
"image_patch_size": 14,
"image_pos_patch_size": 14,
"hidden_size": 18944,
"image_emb_dim": 1024,
"image_num_heads": 16,
"image_num_key_value_heads": 16,
"image_num_layers": 23,
"image_head_dim": 64,
"image_mlp_dim": 4096,
"image_mlp_activations": "gelu",
"image_dropout_rate": 0.0,
"image_num_pos": 577,
"image_norm_eps": 1e-05,
"attention_dropout": 0.0,
"residual_dropout": 0.0,
"initializer_range": 0.02,
"d_model": 3584,
"image_pooling_h": 2,
"image_pooling_w": 2,
"vit_layers": [
-2,
-9
],
"image_pooling_2d": "attention-meanq",
"image_padding_embed": "pad_and_partial_pad",
"intermediate_size": 588,
"skip_vision_non_divisible": true
},
"vocab_size": 152064
}