{ "architectures": [ "HaploForConditionalGeneration" ], "attention_dropout": 0.0, "attention_rope": false, "base_config": { "bos_token_id": 151643, "eos_token_id": 151645, "hidden_size": 3584, "intermediate_size": 18944, "model_type": "haplo", "num_attention_heads": 28, "num_hidden_layers": 28, "num_key_value_heads": 4, "rope_theta": 1000000.0, "sliding_window": 131072 }, "bos_token_id": 151643, "default_image_size": 336, "eos_token_id": 151645, "hidden_act": "silu", "hidden_size": 3584, "image_aspect_ratio": "anyres_max_6", "image_grid_pinpoints": [ [ 336, 336 ], [ 336, 672 ], [ 336, 1008 ], [ 336, 1344 ], [ 336, 1680 ], [ 336, 2016 ], [ 672, 336 ], [ 672, 672 ], [ 672, 1008 ], [ 1008, 336 ], [ 1008, 672 ], [ 1344, 336 ], [ 1680, 336 ], [ 2016, 336 ] ], "image_newline": true, "image_token_index": 151646, "initializer_range": 0.02, "intermediate_size": 18944, "max_position_embeddings": 32768, "max_window_layers": 28, "model_type": "haplo", "norm_mode": "rmsnorm", "num_attention_heads": 28, "num_hidden_layers": 28, "num_key_value_heads": 4, "patch_size": 14, "post_config": { "model_type": "haplo" }, "pre_config": { "attention_rope": true, "hidden_act": "gelu", "hidden_size": 1024, "intermediate_size": 4096, "model_type": "haplo", "norm_mode": "layernorm", "num_attention_heads": 16, "num_hidden_layers": 24, "num_key_value_heads": 16, "rope_theta": 1000000.0 }, "rms_norm_eps": 1e-06, "rope_scaling": null, "rope_theta": 1000000.0, "sliding_window": 131072, "tie_word_embeddings": false, "torch_dtype": "float32", "transformers_version": "4.49.0", "use_cache": true, "use_sliding_window": false, "video_token_index": 151647, "vocab_size": 152064 }