{ "_name_or_path": "meta-llama/Meta-Llama-3-8B-Instruct", "architectures": [ "LlavaLlamaForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "conversation_template": "llava_llama_3", "eos_token_id": 128009, "freeze_mm_mlp_adapter": false, "hidden_act": "silu", "hidden_size": 4096, "image_aspect_ratio": "pad", "image_size": 384, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "max_seg_frames": 8, "mm_hidden_size": 1280, "mm_patch_merge_type": "spatial_unpad", "mm_projector_lr": 2e-05, "mm_projector_type": "mlp2x_gelu", "mm_use_im_patch_token": false, "mm_use_im_start_end": false, "mm_use_sf_vid_separator_token": false, "mm_vision_select_feature": "patch", "mm_vision_select_layer": -2, "mm_vision_tower": "nvidia/RADIO", "mm_vision_tower_lr": 2e-06, "model_type": "llava_llama", "num_attention_heads": 32, "num_frames": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "num_slow_frames": 8, "pad_token_id": 128256, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "seg_backbone": "facebook/sam2.1-hiera-tiny", "seg_head": null, "seg_image_size": 1024, "seg_num_queries": 1, "seg_pad_mode": "topleft", "tie_word_embeddings": false, "tokenizer_model_max_length": 6144, "tokenizer_padding_side": "right", "torch_dtype": "bfloat16", "transformers_version": "4.37.2", "tune_mm_mlp_adapter": false, "unfreeze_mm_vision_tower": true, "use_cache": true, "use_mm_proj": true, "use_text_prompt": true, "video_mode": true, "vocab_size": 128257 }