dinov2-large / config.json
nielsr's picture
nielsr HF staff
Upload model
260875d
raw
history blame contribute delete
549 Bytes
{
"architectures": [
"Dinov2Model"
],
"attention_probs_dropout_prob": 0.0,
"drop_path_rate": 0.0,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.0,
"hidden_size": 1024,
"image_size": 518,
"initializer_range": 0.02,
"layer_norm_eps": 1e-06,
"layerscale_value": 1.0,
"mlp_ratio": 4,
"model_type": "dinov2",
"num_attention_heads": 16,
"num_channels": 3,
"num_hidden_layers": 24,
"patch_size": 14,
"qkv_bias": true,
"torch_dtype": "float32",
"transformers_version": "4.31.0.dev0",
"use_swiglu_ffn": false
}