lombardata commited on
Commit
94cfef9
1 Parent(s): 0a904bf

Upload config.json

Browse files
Files changed (1) hide show
  1. config.json +106 -90
config.json CHANGED
@@ -1,91 +1,107 @@
1
  {
2
- "_name_or_path": "facebook/dinov2-large",
3
- "apply_layernorm": true,
4
- "architectures": [
5
- "Dinov2ForImageClassification"
6
- ],
7
- "attention_probs_dropout_prob": 0.0,
8
- "drop_path_rate": 0.0,
9
- "hidden_act": "gelu",
10
- "hidden_dropout_prob": 0.0,
11
- "hidden_size": 1024,
12
- "id2label": {
13
- "0": "Acropore_branched",
14
- "1": "Acropore_digitised",
15
- "2": "Acropore_tabular",
16
- "3": "Algae",
17
- "4": "Dead_coral",
18
- "5": "Fish",
19
- "6": "Millepore",
20
- "7": "No_acropore_encrusting",
21
- "8": "No_acropore_massive",
22
- "9": "No_acropore_sub_massive",
23
- "10": "Rock",
24
- "11": "Rubble",
25
- "12": "Sand"
26
- },
27
- "image_size": 164,
28
- "initializer_range": 0.02,
29
- "label2id": {
30
- "Acropore_branched": 0,
31
- "Acropore_digitised": 1,
32
- "Acropore_tabular": 2,
33
- "Algae": 3,
34
- "Dead_coral": 4,
35
- "Fish": 5,
36
- "Millepore": 6,
37
- "No_acropore_encrusting": 7,
38
- "No_acropore_massive": 8,
39
- "No_acropore_sub_massive": 9,
40
- "Rock": 10,
41
- "Rubble": 11,
42
- "Sand": 12
43
- },
44
- "layer_norm_eps": 1e-06,
45
- "layerscale_value": 1.0,
46
- "mlp_ratio": 4,
47
- "model_type": "dinov2",
48
- "num_attention_heads": 16,
49
- "num_channels": 3,
50
- "num_hidden_layers": 24,
51
- "out_features": [
52
- "stage24"
53
- ],
54
- "out_indices": [
55
- 24
56
- ],
57
- "patch_size": 14,
58
- "problem_type": "multi_label_classification",
59
- "qkv_bias": true,
60
- "reshape_hidden_states": true,
61
- "stage_names": [
62
- "stem",
63
- "stage1",
64
- "stage2",
65
- "stage3",
66
- "stage4",
67
- "stage5",
68
- "stage6",
69
- "stage7",
70
- "stage8",
71
- "stage9",
72
- "stage10",
73
- "stage11",
74
- "stage12",
75
- "stage13",
76
- "stage14",
77
- "stage15",
78
- "stage16",
79
- "stage17",
80
- "stage18",
81
- "stage19",
82
- "stage20",
83
- "stage21",
84
- "stage22",
85
- "stage23",
86
- "stage24"
87
- ],
88
- "torch_dtype": "float32",
89
- "transformers_version": "4.41.0",
90
- "use_swiglu_ffn": false
91
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  {
2
+ "_name_or_path": "facebook/dinov2-large",
3
+ "apply_layernorm": true,
4
+ "architectures": [
5
+ "Dinov2ForImageClassification"
6
+ ],
7
+ "attention_probs_dropout_prob": 0.0,
8
+ "drop_path_rate": 0.0,
9
+ "hidden_act": "gelu",
10
+ "hidden_dropout_prob": 0.0,
11
+ "hidden_size": 1024,
12
+ "id2label": {
13
+ "0": "Acropore_branched",
14
+ "1": "Acropore_digitised",
15
+ "2": "Acropore_tabular",
16
+ "3": "Algae",
17
+ "4": "Dead_coral",
18
+ "5": "Fish",
19
+ "6": "Millepore",
20
+ "7": "No_acropore_encrusting",
21
+ "8": "No_acropore_massive",
22
+ "9": "No_acropore_sub_massive",
23
+ "10": "Rock",
24
+ "11": "Rubble",
25
+ "12": "Sand"
26
+ },
27
+ "image_size": 164,
28
+ "initializer_range": 0.02,
29
+ "label2id": {
30
+ "Acropore_branched": 0,
31
+ "Acropore_digitised": 1,
32
+ "Acropore_tabular": 2,
33
+ "Algae": 3,
34
+ "Dead_coral": 4,
35
+ "Fish": 5,
36
+ "Millepore": 6,
37
+ "No_acropore_encrusting": 7,
38
+ "No_acropore_massive": 8,
39
+ "No_acropore_sub_massive": 9,
40
+ "Rock": 10,
41
+ "Rubble": 11,
42
+ "Sand": 12
43
+ },
44
+ "layer_norm_eps": 1e-06,
45
+ "layerscale_value": 1.0,
46
+ "mlp_ratio": 4,
47
+ "model_type": "dinov2",
48
+ "num_attention_heads": 16,
49
+ "num_channels": 3,
50
+ "num_hidden_layers": 24,
51
+ "out_features": [
52
+ "stage24"
53
+ ],
54
+ "out_indices": [
55
+ 24
56
+ ],
57
+ "patch_size": 14,
58
+ "problem_type": "multi_label_classification",
59
+ "qkv_bias": true,
60
+ "reshape_hidden_states": true,
61
+ "stage_names": [
62
+ "stem",
63
+ "stage1",
64
+ "stage2",
65
+ "stage3",
66
+ "stage4",
67
+ "stage5",
68
+ "stage6",
69
+ "stage7",
70
+ "stage8",
71
+ "stage9",
72
+ "stage10",
73
+ "stage11",
74
+ "stage12",
75
+ "stage13",
76
+ "stage14",
77
+ "stage15",
78
+ "stage16",
79
+ "stage17",
80
+ "stage18",
81
+ "stage19",
82
+ "stage20",
83
+ "stage21",
84
+ "stage22",
85
+ "stage23",
86
+ "stage24"
87
+ ],
88
+ "torch_dtype": "float32",
89
+ "transformers_version": "4.41.0",
90
+ "use_swiglu_ffn": false,
91
+ "initial_learning_rate": 0.001,
92
+ "train_batch_size": 64,
93
+ "eval_batch_size": 64,
94
+ "optimizer": {
95
+ "type": "Adam"
96
+ },
97
+ "lr_scheduler_type": {
98
+ "type": "ReduceLROnPlateau"
99
+ },
100
+ "patience_lr_scheduler": 5,
101
+ "factor_lr_scheduler": 0.1,
102
+ "weight_decay": 0.0001,
103
+ "early_stopping_patience": 10,
104
+ "freeze_encoder": true,
105
+ "data_augmentation": true,
106
+ "num_epochs": 150
107
+ }