hiera-tiny-224-mae / config.json
namangarg110's picture
Upload HieraForPreTraining
b0c0087 verified
raw
history blame
1.18 kB
{
"architectures": [
"HieraForPreTraining"
],
"decoder_depth": 8,
"decoder_hidden_size": 512,
"decoder_num_heads": 16,
"depths": [
1,
2,
7,
2
],
"drop_path_rate": 0.0,
"embed_dim": 96,
"embed_dim_multiplier": 2.0,
"hidden_act": "gelu",
"hidden_size": 768,
"image_size": [
224,
224
],
"initializer_range": 0.02,
"layer_norm_eps": 1e-06,
"layer_norm_init": 1.0,
"mask_ratio": 0.6,
"masked_unit_attention": [
true,
true,
false,
false
],
"masked_unit_size": [
8,
8
],
"mlp_ratio": 4.0,
"model_type": "hiera",
"norm_pix_loss": true,
"num_attention_heads": [
1,
2,
4,
8
],
"num_channels": 3,
"num_layers": 4,
"num_query_pool": 2,
"out_features": [
"stage4"
],
"out_indices": [
4
],
"patch_padding": [
3,
3
],
"patch_size": [
7,
7
],
"patch_stride": [
4,
4
],
"query_stride": [
2,
2
],
"stage_names": [
"stem",
"stage1",
"stage2",
"stage3",
"stage4"
],
"torch_dtype": "float32",
"transformers_version": "4.41.0.dev0",
"use_separate_position_embedding": false
}