prince-canuma's picture
Upload folder using huggingface_hub
1acbded verified
{
"candidate_resolutions": [
[
384,
384
],
[
384,
768
],
[
768,
384
],
[
384,
1152
],
[
1152,
384
],
[
384,
1536
],
[
1536,
384
],
[
768,
768
],
[
384,
1920
],
[
1920,
384
],
[
384,
2304
],
[
2304,
384
],
[
768,
1152
],
[
1152,
768
],
[
384,
2688
],
[
2688,
384
],
[
384,
3072
],
[
3072,
384
],
[
768,
1536
],
[
1536,
768
],
[
384,
3456
],
[
3456,
384
],
[
1152,
1152
]
],
"global_view_pos": "head",
"language_config": {
"architectures": [
"DeepseekV2ForCausalLM"
],
"auto_map": {
"AutoConfig": "configuration_deepseek.DeepseekV2Config",
"AutoModel": "modeling_deepseek.DeepseekV2Model",
"AutoModelForCausalLM": "modeling_deepseek.DeepseekV2ForCausalLM"
},
"first_k_dense_replace": 1,
"hidden_size": 2048,
"intermediate_size": 10944,
"lm_head": true,
"max_position_embeddings": 4096,
"model_type": "deepseek_v2",
"moe_intermediate_size": 1408,
"n_group": 1,
"n_routed_experts": 64,
"n_shared_experts": 2,
"num_attention_heads": 16,
"num_experts_per_tok": 6,
"num_hidden_layers": 27,
"num_key_value_heads": 16,
"q_lora_rank": null,
"rm_head": false,
"topk_group": 1,
"topk_method": "greedy",
"torch_dtype": "bfloat16"
},
"model_type": "deepseek_vl_v2",
"projector_config": {
"model_type": "mlp_projector"
},
"quantization": {
"group_size": 64,
"bits": 8
},
"tile_tag": "2D",
"torch_dtype": "bfloat16",
"transformers_version": "4.38.2",
"vision_config": {
"layers": 27,
"mlp_ratio": 3.7362,
"model_name": "siglip_so400m_patch14_384",
"model_type": "vision",
"patch_size": 14,
"width": 1152,
"skip_vision_non_divisible": true
}
}