best_model.pt
Browse files- README.md +84 -0
- config.json +24 -0
- model.safetensors +3 -0
- training_args.bin +3 -0
README.md
ADDED
@@ -0,0 +1,84 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
library_name: transformers
|
3 |
+
tags:
|
4 |
+
- generated_from_trainer
|
5 |
+
metrics:
|
6 |
+
- accuracy
|
7 |
+
- f1
|
8 |
+
model-index:
|
9 |
+
- name: windowz_test-020625
|
10 |
+
results: []
|
11 |
+
---
|
12 |
+
|
13 |
+
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
|
14 |
+
should probably proofread and complete it, then remove this comment. -->
|
15 |
+
|
16 |
+
# windowz_test-020625
|
17 |
+
|
18 |
+
This model is a fine-tuned version of [](https://huggingface.co/) on an unknown dataset.
|
19 |
+
It achieves the following results on the evaluation set:
|
20 |
+
- Model Preparation Time: 0.001
|
21 |
+
- Accuracy: 0.9521
|
22 |
+
- F1: 0.9483
|
23 |
+
- Iou: 0.9076
|
24 |
+
- Contour Dice: 0.9035
|
25 |
+
- Per Class Metrics: {0: {'f1': 0.97101, 'iou': 0.94365, 'accuracy': 0.95541, 'contour_dice': 0.97101}, 1: {'f1': 0.90428, 'iou': 0.82528, 'accuracy': 0.95685, 'contour_dice': 0.90428}, 2: {'f1': 0.27674, 'iou': 0.16059, 'accuracy': 0.99191, 'contour_dice': 0.27674}}
|
26 |
+
- Loss: 0.4842
|
27 |
+
|
28 |
+
## Model description
|
29 |
+
|
30 |
+
More information needed
|
31 |
+
|
32 |
+
## Intended uses & limitations
|
33 |
+
|
34 |
+
More information needed
|
35 |
+
|
36 |
+
## Training and evaluation data
|
37 |
+
|
38 |
+
More information needed
|
39 |
+
|
40 |
+
## Training procedure
|
41 |
+
|
42 |
+
### Training hyperparameters
|
43 |
+
|
44 |
+
The following hyperparameters were used during training:
|
45 |
+
- learning_rate: 5e-05
|
46 |
+
- train_batch_size: 8
|
47 |
+
- eval_batch_size: 8
|
48 |
+
- seed: 42
|
49 |
+
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
50 |
+
- lr_scheduler_type: cosine
|
51 |
+
- lr_scheduler_warmup_steps: 1000
|
52 |
+
- num_epochs: 1
|
53 |
+
|
54 |
+
### Training results
|
55 |
+
|
56 |
+
| Training Loss | Epoch | Step | Model Preparation Time | | Dice | Class Metrics | Validation Loss |
|
57 |
+
|:-------------:|:------:|:----:|:----------------------:|:------:|:------:|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------:|:---------------:|
|
58 |
+
| 1.3502 | 0.0501 | 257 | 0.001 | 0.5644 | 0.0674 | {0: {'f1': 0.85985, 'iou': 0.75416, 'accuracy': 0.75632, 'contour_dice': 0.85985}, 1: {'f1': 3e-05, 'iou': 2e-05, 'accuracy': 0.75751, 'contour_dice': 3e-05}, 2: {'f1': 0.03477, 'iou': 0.01769, 'accuracy': 0.98186, 'contour_dice': 0.03477}} | 1.0501 |
|
59 |
+
| 1.2779 | 0.1003 | 514 | 0.001 | 0.5614 | 0.0110 | {0: {'f1': 0.85654, 'iou': 0.74908, 'accuracy': 0.74943, 'contour_dice': 0.85654}, 1: {'f1': 0.0, 'iou': 0.0, 'accuracy': 0.7575, 'contour_dice': 0.0}, 2: {'f1': 0.18607, 'iou': 0.10258, 'accuracy': 0.99115, 'contour_dice': 0.18607}} | 0.9869 |
|
60 |
+
| 1.2058 | 0.1504 | 771 | 0.001 | 0.5601 | 0.0024 | {0: {'f1': 0.85601, 'iou': 0.74826, 'accuracy': 0.74834, 'contour_dice': 0.85601}, 1: {'f1': 0.00043, 'iou': 0.00021, 'accuracy': 0.75755, 'contour_dice': 0.00043}, 2: {'f1': 0.04908, 'iou': 0.02516, 'accuracy': 0.99076, 'contour_dice': 0.04908}} | 0.9181 |
|
61 |
+
| 1.1158 | 0.2005 | 1028 | 0.001 | 0.5610 | 0.0074 | {0: {'f1': 0.85634, 'iou': 0.74877, 'accuracy': 0.74901, 'contour_dice': 0.85634}, 1: {'f1': 0.00103, 'iou': 0.00052, 'accuracy': 0.75763, 'contour_dice': 0.00103}, 2: {'f1': 0.14378, 'iou': 0.07746, 'accuracy': 0.99122, 'contour_dice': 0.14378}} | 0.8743 |
|
62 |
+
| 1.0785 | 0.2507 | 1285 | 0.001 | 0.5614 | 0.0087 | {0: {'f1': 0.85643, 'iou': 0.74892, 'accuracy': 0.74919, 'contour_dice': 0.85643}, 1: {'f1': 0.00618, 'iou': 0.0031, 'accuracy': 0.75825, 'contour_dice': 0.00618}, 2: {'f1': 0.07155, 'iou': 0.0371, 'accuracy': 0.99093, 'contour_dice': 0.07155}} | 0.8316 |
|
63 |
+
| 1.0445 | 0.3008 | 1542 | 0.001 | 0.7385 | 0.6122 | {0: {'f1': 0.9132, 'iou': 0.84027, 'accuracy': 0.85815, 'contour_dice': 0.9132}, 1: {'f1': 0.619, 'iou': 0.44823, 'accuracy': 0.8647, 'contour_dice': 0.619}, 2: {'f1': 0.22291, 'iou': 0.12543, 'accuracy': 0.99173, 'contour_dice': 0.22291}} | 0.7703 |
|
64 |
+
| 1.0092 | 0.3510 | 1799 | 0.001 | 0.7510 | 0.6389 | {0: {'f1': 0.91749, 'iou': 0.84755, 'accuracy': 0.86567, 'contour_dice': 0.91749}, 1: {'f1': 0.64415, 'iou': 0.47509, 'accuracy': 0.87163, 'contour_dice': 0.64415}, 2: {'f1': 0.31734, 'iou': 0.18859, 'accuracy': 0.99231, 'contour_dice': 0.31734}} | 0.7823 |
|
65 |
+
| 0.9676 | 0.4011 | 2056 | 0.001 | 0.7371 | 0.6147 | {0: {'f1': 0.91344, 'iou': 0.84066, 'accuracy': 0.85863, 'contour_dice': 0.91344}, 1: {'f1': 0.60393, 'iou': 0.43259, 'accuracy': 0.86089, 'contour_dice': 0.60393}, 2: {'f1': 0.52376, 'iou': 0.3548, 'accuracy': 0.99253, 'contour_dice': 0.52376}} | 0.8004 |
|
66 |
+
| 0.9308 | 0.4512 | 2313 | 0.001 | 0.8564 | 0.8337 | {0: {'f1': 0.95377, 'iou': 0.91162, 'accuracy': 0.92764, 'contour_dice': 0.95377}, 1: {'f1': 0.83301, 'iou': 0.71381, 'accuracy': 0.9292, 'contour_dice': 0.83301}, 2: {'f1': 0.25373, 'iou': 0.1453, 'accuracy': 0.9918, 'contour_dice': 0.25373}} | 0.7535 |
|
67 |
+
| 0.9187 | 0.5014 | 2570 | 0.001 | 0.868 | 0.8442 | {0: {'f1': 0.95642, 'iou': 0.91649, 'accuracy': 0.93189, 'contour_dice': 0.95642}, 1: {'f1': 0.8514, 'iou': 0.74124, 'accuracy': 0.93689, 'contour_dice': 0.8514}, 2: {'f1': 0.43757, 'iou': 0.28006, 'accuracy': 0.99306, 'contour_dice': 0.43757}} | 0.7077 |
|
68 |
+
| 0.8916 | 0.5515 | 2827 | 0.001 | 0.8656 | 0.8447 | {0: {'f1': 0.95636, 'iou': 0.91636, 'accuracy': 0.93186, 'contour_dice': 0.95636}, 1: {'f1': 0.84316, 'iou': 0.72885, 'accuracy': 0.93331, 'contour_dice': 0.84316}, 2: {'f1': 0.5173, 'iou': 0.34889, 'accuracy': 0.9935, 'contour_dice': 0.5173}} | 0.6670 |
|
69 |
+
| 0.8723 | 0.6016 | 3084 | 0.001 | 0.9221 | 0.9210 | {0: {'f1': 0.97564, 'iou': 0.95244, 'accuracy': 0.96276, 'contour_dice': 0.97564}, 1: {'f1': 0.92036, 'iou': 0.85247, 'accuracy': 0.9635, 'contour_dice': 0.92036}, 2: {'f1': 0.46566, 'iou': 0.30349, 'accuracy': 0.99314, 'contour_dice': 0.46566}} | 0.6549 |
|
70 |
+
| 0.8761 | 0.6518 | 3341 | 0.001 | 0.7678 | 0.7651 | {0: {'f1': 0.90349, 'iou': 0.82397, 'accuracy': 0.86319, 'contour_dice': 0.90349}, 1: {'f1': 0.76222, 'iou': 0.6158, 'accuracy': 0.86427, 'contour_dice': 0.76222}, 2: {'f1': 0.35206, 'iou': 0.21364, 'accuracy': 0.99248, 'contour_dice': 0.35206}} | 0.7303 |
|
71 |
+
| 0.869 | 0.7019 | 3598 | 0.001 | 0.9259 | 0.9263 | {0: {'f1': 0.97721, 'iou': 0.95544, 'accuracy': 0.96519, 'contour_dice': 0.97721}, 1: {'f1': 0.92398, 'iou': 0.85871, 'accuracy': 0.96506, 'contour_dice': 0.92398}, 2: {'f1': 0.47308, 'iou': 0.30983, 'accuracy': 0.99326, 'contour_dice': 0.47308}} | 0.6818 |
|
72 |
+
| 0.8526 | 0.7520 | 3855 | 0.001 | 0.9455 | 0.9507 | {0: {'f1': 0.98427, 'iou': 0.96902, 'accuracy': 0.97615, 'contour_dice': 0.98427}, 1: {'f1': 0.94744, 'iou': 0.90013, 'accuracy': 0.97519, 'contour_dice': 0.94744}, 2: {'f1': 0.38871, 'iou': 0.24124, 'accuracy': 0.99266, 'contour_dice': 0.38871}} | 0.5164 |
|
73 |
+
| 0.8487 | 0.8022 | 4112 | 0.001 | 0.8958 | 0.8869 | {0: {'f1': 0.96679, 'iou': 0.93572, 'accuracy': 0.94866, 'contour_dice': 0.96679}, 1: {'f1': 0.88802, 'iou': 0.7986, 'accuracy': 0.95052, 'contour_dice': 0.88802}, 2: {'f1': 0.36537, 'iou': 0.22352, 'accuracy': 0.99239, 'contour_dice': 0.36537}} | 0.5553 |
|
74 |
+
| 0.8519 | 0.8523 | 4369 | 0.001 | 0.9236 | 0.9231 | {0: {'f1': 0.97639, 'iou': 0.95387, 'accuracy': 0.96388, 'contour_dice': 0.97639}, 1: {'f1': 0.92437, 'iou': 0.85937, 'accuracy': 0.9653, 'contour_dice': 0.92437}, 2: {'f1': 0.29738, 'iou': 0.17466, 'accuracy': 0.99214, 'contour_dice': 0.29738}} | 0.4860 |
|
75 |
+
| 0.8331 | 0.9025 | 4626 | 0.001 | 0.9076 | 0.9035 | {0: {'f1': 0.97101, 'iou': 0.94365, 'accuracy': 0.95541, 'contour_dice': 0.97101}, 1: {'f1': 0.90428, 'iou': 0.82528, 'accuracy': 0.95685, 'contour_dice': 0.90428}, 2: {'f1': 0.27674, 'iou': 0.16059, 'accuracy': 0.99191, 'contour_dice': 0.27674}} | 0.4842 |
|
76 |
+
| 0.8357 | 0.9526 | 4883 | 0.001 | 0.8881 | 0.8770 | {0: {'f1': 0.96409, 'iou': 0.93067, 'accuracy': 0.94441, 'contour_dice': 0.96409}, 1: {'f1': 0.87736, 'iou': 0.78151, 'accuracy': 0.94613, 'contour_dice': 0.87736}, 2: {'f1': 0.40324, 'iou': 0.25254, 'accuracy': 0.99246, 'contour_dice': 0.40324}} | 0.5096 |
|
77 |
+
|
78 |
+
|
79 |
+
### Framework versions
|
80 |
+
|
81 |
+
- Transformers 4.45.0
|
82 |
+
- Pytorch 2.5.1+cu124
|
83 |
+
- Datasets 2.21.0
|
84 |
+
- Tokenizers 0.20.3
|
config.json
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"architectures": [
|
3 |
+
"UNETForSegmentation"
|
4 |
+
],
|
5 |
+
"dim": 224,
|
6 |
+
"hidden_act": "gelu",
|
7 |
+
"hidden_size": 256,
|
8 |
+
"img_size": 128,
|
9 |
+
"intermediate_size": 1024,
|
10 |
+
"is_causal": false,
|
11 |
+
"k": 2,
|
12 |
+
"model_type": "Unet",
|
13 |
+
"n_filts": 4,
|
14 |
+
"num_attention_heads": 8,
|
15 |
+
"num_channels": 3,
|
16 |
+
"num_classes": 3,
|
17 |
+
"num_hidden_layers": 6,
|
18 |
+
"num_layers": 2,
|
19 |
+
"patch_size": 16,
|
20 |
+
"problem_type": "single_label_classification",
|
21 |
+
"t": 2,
|
22 |
+
"torch_dtype": "float32",
|
23 |
+
"transformers_version": "4.45.0"
|
24 |
+
}
|
model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6b78ba10a5c1b9064a664658a29d3afa2c413804b393b6668f4fa2da25ef4de5
|
3 |
+
size 2188724
|
training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e1ae6012ddf95afdd3d8358098f85b8b1832ad99a3f879410d3a6f1c08736340
|
3 |
+
size 5240
|