{ | |
"architectures": [ | |
"ColPaliForRetrieval" | |
], | |
"embedding_dim": 128, | |
"model_type": "colpali", | |
"text_config": { | |
"hidden_size": 16, | |
"intermediate_size": 16, | |
"model_type": "gemma", | |
"num_attention_heads": 4, | |
"num_hidden_layers": 2, | |
"num_image_tokens": 256, | |
"num_key_value_heads": 1, | |
"vocab_size": 257216 | |
}, | |
"torch_dtype": "float32", | |
"transformers_version": "4.49.0", | |
"vlm_config": { | |
"_attn_implementation_autoset": true, | |
"_name_or_path": "hf-internal-testing/tiny-random-PaliGemmaForConditionalGeneration", | |
"architectures": [ | |
"PaliGemmaForConditionalGeneration" | |
], | |
"bos_token_id": 2, | |
"eos_token_id": 1, | |
"hidden_size": 16, | |
"image_token_index": 257152, | |
"model_type": "paligemma", | |
"num_image_tokens": 256, | |
"pad_token_id": 0, | |
"projection_dim": 16, | |
"text_config": { | |
"hidden_size": 16, | |
"intermediate_size": 16, | |
"num_attention_heads": 4, | |
"num_hidden_layers": 2, | |
"num_image_tokens": 256, | |
"num_key_value_heads": 1, | |
"vocab_size": 257216 | |
}, | |
"torch_dtype": "float32", | |
"vision_config": { | |
"hidden_size": 16, | |
"intermediate_size": 32, | |
"num_attention_heads": 16, | |
"num_hidden_layers": 2, | |
"num_image_tokens": 256, | |
"patch_size": 14, | |
"projection_dim": 16, | |
"projector_hidden_act": "gelu_fast", | |
"vision_use_head": false | |
} | |
} | |
} | |