Messis | W&B Run exp-11-more-channels-1024-0 (https://wandb.ai/crop-classification/messis/runs/qyxmbzeo)
Browse files- config.json +14 -28
- model.safetensors +2 -2
config.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
{
|
2 |
"hparams": {
|
3 |
-
"accumulate_grad_batches":
|
4 |
"backbone_weights_path": "./prithvi/models/Prithvi_100M.pt",
|
5 |
"bands": [
|
6 |
0,
|
@@ -10,50 +10,38 @@
|
|
10 |
4,
|
11 |
5
|
12 |
],
|
13 |
-
"batch_size":
|
14 |
-
"bottleneck_reduction_factor": 4,
|
15 |
-
"data_augmentation": {
|
16 |
-
"enabled": true,
|
17 |
-
"flip_prob": 0.5,
|
18 |
-
"jitter_std": 0.01
|
19 |
-
},
|
20 |
"debug": false,
|
21 |
"dropout_p": 0.1,
|
22 |
"early_stopping_metric": "val_f1_tier3_majority",
|
23 |
"early_stopping_mode": "max",
|
24 |
"early_stopping_patience": 10,
|
25 |
-
"experiment_group": "
|
26 |
-
"experiment_name": "
|
27 |
-
"freeze_backbone":
|
28 |
"heads_spec": {
|
29 |
"tier1": {
|
30 |
"is_metrics_tier": true,
|
31 |
-
"kernel_size":
|
32 |
"loss_weight": 1,
|
33 |
-
"num_channels": 1024,
|
34 |
"num_classes_to_predict": 6,
|
35 |
-
"num_convs": 1,
|
36 |
"target_idx": 0,
|
37 |
"type": "HierarchicalFCNHead"
|
38 |
},
|
39 |
"tier2": {
|
40 |
"is_metrics_tier": true,
|
41 |
-
"kernel_size":
|
42 |
"loss_weight": 1,
|
43 |
-
"num_channels": 1024,
|
44 |
"num_classes_to_predict": 17,
|
45 |
-
"num_convs": 1,
|
46 |
"target_idx": 1,
|
47 |
"type": "HierarchicalFCNHead"
|
48 |
},
|
49 |
"tier3": {
|
50 |
"is_last_tier": true,
|
51 |
"is_metrics_tier": true,
|
52 |
-
"kernel_size":
|
53 |
"loss_weight": 1,
|
54 |
-
"num_channels": 1024,
|
55 |
"num_classes_to_predict": 49,
|
56 |
-
"num_convs": 1,
|
57 |
"target_idx": 2,
|
58 |
"type": "HierarchicalFCNHead"
|
59 |
},
|
@@ -67,11 +55,10 @@
|
|
67 |
}
|
68 |
},
|
69 |
"img_size": 224,
|
70 |
-
"
|
71 |
-
"lr": 0.0001,
|
72 |
"max_epochs": 400,
|
73 |
-
"name": "
|
74 |
-
"num_frames":
|
75 |
"optimizer": "Adam",
|
76 |
"optimizer_momentum": null,
|
77 |
"optimizer_weight_decay": null,
|
@@ -80,14 +67,13 @@
|
|
80 |
5
|
81 |
],
|
82 |
"train_folds": [
|
83 |
-
0,
|
84 |
1,
|
85 |
2,
|
86 |
-
3
|
|
|
87 |
],
|
88 |
-
"use_bottleneck_neck": false,
|
89 |
"val_folds": [
|
90 |
-
|
91 |
]
|
92 |
}
|
93 |
}
|
|
|
1 |
{
|
2 |
"hparams": {
|
3 |
+
"accumulate_grad_batches": 4,
|
4 |
"backbone_weights_path": "./prithvi/models/Prithvi_100M.pt",
|
5 |
"bands": [
|
6 |
0,
|
|
|
10 |
4,
|
11 |
5
|
12 |
],
|
13 |
+
"batch_size": 4,
|
|
|
|
|
|
|
|
|
|
|
|
|
14 |
"debug": false,
|
15 |
"dropout_p": 0.1,
|
16 |
"early_stopping_metric": "val_f1_tier3_majority",
|
17 |
"early_stopping_mode": "max",
|
18 |
"early_stopping_patience": 10,
|
19 |
+
"experiment_group": "exp-11-more-channels",
|
20 |
+
"experiment_name": "exp-11-more-channels-1024",
|
21 |
+
"freeze_backbone": true,
|
22 |
"heads_spec": {
|
23 |
"tier1": {
|
24 |
"is_metrics_tier": true,
|
25 |
+
"kernel_size": 3,
|
26 |
"loss_weight": 1,
|
|
|
27 |
"num_classes_to_predict": 6,
|
|
|
28 |
"target_idx": 0,
|
29 |
"type": "HierarchicalFCNHead"
|
30 |
},
|
31 |
"tier2": {
|
32 |
"is_metrics_tier": true,
|
33 |
+
"kernel_size": 3,
|
34 |
"loss_weight": 1,
|
|
|
35 |
"num_classes_to_predict": 17,
|
|
|
36 |
"target_idx": 1,
|
37 |
"type": "HierarchicalFCNHead"
|
38 |
},
|
39 |
"tier3": {
|
40 |
"is_last_tier": true,
|
41 |
"is_metrics_tier": true,
|
42 |
+
"kernel_size": 3,
|
43 |
"loss_weight": 1,
|
|
|
44 |
"num_classes_to_predict": 49,
|
|
|
45 |
"target_idx": 2,
|
46 |
"type": "HierarchicalFCNHead"
|
47 |
},
|
|
|
55 |
}
|
56 |
},
|
57 |
"img_size": 224,
|
58 |
+
"lr": 0.001,
|
|
|
59 |
"max_epochs": 400,
|
60 |
+
"name": "1024",
|
61 |
+
"num_frames": 3,
|
62 |
"optimizer": "Adam",
|
63 |
"optimizer_momentum": null,
|
64 |
"optimizer_weight_decay": null,
|
|
|
67 |
5
|
68 |
],
|
69 |
"train_folds": [
|
|
|
70 |
1,
|
71 |
2,
|
72 |
+
3,
|
73 |
+
4
|
74 |
],
|
|
|
75 |
"val_folds": [
|
76 |
+
0
|
77 |
]
|
78 |
}
|
79 |
}
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5beee0006ef65533c4dadd975b382b4bbc73a57bc9a24a74f43c9486482ba403
|
3 |
+
size 848618908
|