qanastek commited on
Commit
9573c25
·
1 Parent(s): 22f85e0
Files changed (38) hide show
  1. README.md +4 -4
  2. app.py +118 -0
  3. images/akiec.jpg +0 -0
  4. images/bcc.jpg +0 -0
  5. images/bkl.jpg +0 -0
  6. images/df.jpg +0 -0
  7. images/mel.jpg +0 -0
  8. images/nv.jpg +0 -0
  9. images/vasc.jpg +0 -0
  10. models/DeiT/config.json +39 -0
  11. models/DeiT/preprocessor_config.json +39 -0
  12. models/DeiT/pytorch_model.bin +3 -0
  13. models/DenseNet121/best_model.pth +3 -0
  14. models/DenseNet121/config.json +25 -0
  15. models/DenseNet121/logs/logs_2021-12-12-14-52-26.txt +1199 -0
  16. models/DenseNet121/logs/test_logs_acc_2021-12-12-14-52-26.txt +40 -0
  17. models/DenseNet121/logs/train_logs_acc_2021-12-12-14-52-26.txt +40 -0
  18. models/DenseNet121/logs/train_logs_loss_2021-12-12-14-52-26.txt +40 -0
  19. models/MobileNetV2/best_model.pth +3 -0
  20. models/MobileNetV2/config.json +25 -0
  21. models/MobileNetV2/logs/logs_2021-12-12-15-41-03.txt +984 -0
  22. models/MobileNetV2/logs/test_logs_acc_2021-12-12-15-41-03.txt +40 -0
  23. models/MobileNetV2/logs/train_logs_acc_2021-12-12-15-41-03.txt +40 -0
  24. models/MobileNetV2/logs/train_logs_loss_2021-12-12-15-41-03.txt +40 -0
  25. models/ShuffleNetV2/best_model.pth +3 -0
  26. models/ShuffleNetV2/config.json +25 -0
  27. models/ShuffleNetV2/logs/logs_2021-12-12-15-31-56.txt +945 -0
  28. models/ShuffleNetV2/logs/test_logs_acc_2021-12-12-15-31-56.txt +40 -0
  29. models/ShuffleNetV2/logs/train_logs_acc_2021-12-12-15-31-56.txt +40 -0
  30. models/ShuffleNetV2/logs/train_logs_loss_2021-12-12-15-31-56.txt +40 -0
  31. models/VGG16/best_model.pth +3 -0
  32. models/VGG16/config.json +25 -0
  33. models/VGG16/logs/logs_2021-12-12-15-09-07.txt +744 -0
  34. models/VGG16/logs/test_logs_acc_2021-12-12-15-09-07.txt +40 -0
  35. models/VGG16/logs/train_logs_acc_2021-12-12-15-09-07.txt +40 -0
  36. models/VGG16/logs/train_logs_loss_2021-12-12-15-09-07.txt +40 -0
  37. ressources/models.csv +6 -0
  38. ressources/thumbnail.png +0 -0
README.md CHANGED
@@ -1,11 +1,11 @@
1
  ---
2
  title: Skin Cancer
3
- emoji: 🏢
4
- colorFrom: pink
5
- colorTo: gray
6
  sdk: gradio
7
  app_file: app.py
8
- pinned: false
9
  ---
10
 
11
  # Configuration
 
1
  ---
2
  title: Skin Cancer
3
+ emoji: ⚕️
4
+ colorFrom: red
5
+ colorTo: red
6
  sdk: gradio
7
  app_file: app.py
8
+ pinned: true
9
  ---
10
 
11
  # Configuration
app.py ADDED
@@ -0,0 +1,118 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+
3
+ import numpy as np
4
+ from PIL import Image
5
+
6
+ from transformers import DeiTFeatureExtractor, DeiTForImageClassification
7
+ from hugsvision.inference.VisionClassifierInference import VisionClassifierInference
8
+ from hugsvision.inference.TorchVisionClassifierInference import TorchVisionClassifierInference
9
+
10
+ models_name = [
11
+ "VGG16",
12
+ "DeiT",
13
+ "DenseNet121",
14
+ "MobileNetV2",
15
+ "ShuffleNetV2",
16
+ ]
17
+
18
+ radio = gr.inputs.Radio(models_name, default="DenseNet121", type="value")
19
+
20
+ def predict_image(image, model_name):
21
+
22
+ image = Image.fromarray(np.uint8(image)).convert('RGB')
23
+
24
+ model_path = "./models/" + model_name
25
+
26
+ if model_name == "DeiT":
27
+
28
+ model = VisionClassifierInference(
29
+ feature_extractor = DeiTFeatureExtractor.from_pretrained(model_path),
30
+ model = DeiTForImageClassification.from_pretrained(model_path),
31
+ )
32
+
33
+ else:
34
+
35
+ model = TorchVisionClassifierInference(
36
+ model_path = model_path
37
+ )
38
+
39
+ pred = model.predict_image(img=image, return_str=False)
40
+
41
+ for key in pred.keys():
42
+ pred[key] = pred[key]/100
43
+
44
+ return pred
45
+
46
+ id2label = ["akiec", "bcc", "bkl", "df", "mel", "nv", "vasc"]
47
+
48
+ samples = [["images/" + p + ".jpg"] for p in id2label]
49
+ print(samples)
50
+
51
+ image = gr.inputs.Image(shape=(224, 224), label="Upload Your Image Here")
52
+ label = gr.outputs.Label(num_top_classes=len(id2label))
53
+
54
+ interface = gr.Interface(
55
+ fn=predict_image,
56
+ inputs=[image,radio],
57
+ outputs=label,
58
+ capture_session=True,
59
+ allow_flagging=False,
60
+ thumbnail="ressources/thumbnail.png",
61
+ article="""
62
+ <html style="color: white;">
63
+ <style type="text/css">
64
+ .tg {border-collapse:collapse;border-spacing:0;}
65
+ .tg td{border-color:black;border-style:solid;border-width:1px;font-family:Arial, sans-serif;font-size:14px;
66
+ overflow:hidden;padding:10px 5px;word-break:normal;}
67
+ .tg th{border-color:black;border-style:solid;border-width:1px;font-family:Arial, sans-serif;font-size:14px;
68
+ font-weight:normal;overflow:hidden;padding:10px 5px;word-break:normal;}
69
+ .tg .tg-v0zy{background-color:#efefef;color:#000000;font-weight:bold;text-align:center;vertical-align:top}
70
+ .tg .tg-4jb6{background-color:#ffffff;color:#333333;text-align:center;vertical-align:top}
71
+ </style>
72
+ <table class="tg">
73
+ <thead>
74
+ <tr>
75
+ <th class="tg-v0zy">Model</th>
76
+ <th class="tg-v0zy">Accuracy</th>
77
+ <th class="tg-v0zy">Size</th>
78
+ </tr>
79
+ </thead>
80
+ <tbody>
81
+ <tr>
82
+ <td class="tg-4jb6">VGG16</td>
83
+ <td class="tg-4jb6">38.27%</td>
84
+ <td class="tg-4jb6">512.0 MB</td>
85
+ </tr>
86
+ <tr>
87
+ <td class="tg-4jb6">DeiT</td>
88
+ <td class="tg-4jb6">71.60%</td>
89
+ <td class="tg-4jb6">327.0 MB</td>
90
+ </tr>
91
+ <tr>
92
+ <td class="tg-4jb6">DenseNet121</td>
93
+ <td class="tg-4jb6">77.78%</td>
94
+ <td class="tg-4jb6">27.1 MB</td>
95
+ </tr>
96
+ <tr>
97
+ <td class="tg-4jb6">MobileNetV2</td>
98
+ <td class="tg-4jb6">75.31%</td>
99
+ <td class="tg-4jb6">8.77 MB</td>
100
+ </tr>
101
+ <tr>
102
+ <td class="tg-4jb6">ShuffleNetV2</td>
103
+ <td class="tg-4jb6">76.54%</td>
104
+ <td class="tg-4jb6">4.99 MB</td>
105
+ </tr>
106
+ </tbody>
107
+ </table>
108
+ </html>
109
+ """,
110
+ theme="darkhuggingface",
111
+ title="HAM10000: Training and using a TorchVision Image Classifier in 5 min to identify skin cancer",
112
+ description="A fast and easy tutorial to train a TorchVision Image Classifier that can help dermatologist in their identification procedures Melanoma cases with HugsVision and HAM10000 dataset.",
113
+ allow_screenshot=True,
114
+ show_tips=False,
115
+ encrypt=False,
116
+ examples=samples,
117
+ )
118
+ interface.launch()
images/akiec.jpg ADDED
images/bcc.jpg ADDED
images/bkl.jpg ADDED
images/df.jpg ADDED
images/mel.jpg ADDED
images/nv.jpg ADDED
images/vasc.jpg ADDED
models/DeiT/config.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "facebook/deit-base-distilled-patch16-224",
3
+ "architectures": [
4
+ "DeiTForImageClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.0,
7
+ "hidden_act": "gelu",
8
+ "hidden_dropout_prob": 0.0,
9
+ "hidden_size": 768,
10
+ "id2label": {
11
+ "0": "akiec",
12
+ "1": "bcc",
13
+ "2": "bkl",
14
+ "3": "df",
15
+ "4": "mel",
16
+ "5": "nv",
17
+ "6": "vasc"
18
+ },
19
+ "image_size": 224,
20
+ "initializer_range": 0.02,
21
+ "intermediate_size": 3072,
22
+ "label2id": {
23
+ "akiec": "0",
24
+ "bcc": "1",
25
+ "bkl": "2",
26
+ "df": "3",
27
+ "mel": "4",
28
+ "nv": "5",
29
+ "vasc": "6"
30
+ },
31
+ "layer_norm_eps": 1e-12,
32
+ "model_type": "deit",
33
+ "num_attention_heads": 12,
34
+ "num_channels": 3,
35
+ "num_hidden_layers": 12,
36
+ "patch_size": 16,
37
+ "torch_dtype": "float32",
38
+ "transformers_version": "4.10.0"
39
+ }
models/DeiT/preprocessor_config.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "facebook/deit-base-distilled-patch16-224",
3
+ "architectures": [
4
+ "DeiTForImageClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.0,
7
+ "hidden_act": "gelu",
8
+ "hidden_dropout_prob": 0.0,
9
+ "hidden_size": 768,
10
+ "id2label": {
11
+ "0": "akiec",
12
+ "1": "bcc",
13
+ "2": "bkl",
14
+ "3": "df",
15
+ "4": "mel",
16
+ "5": "nv",
17
+ "6": "vasc"
18
+ },
19
+ "image_size": 224,
20
+ "initializer_range": 0.02,
21
+ "intermediate_size": 3072,
22
+ "label2id": {
23
+ "akiec": "0",
24
+ "bcc": "1",
25
+ "bkl": "2",
26
+ "df": "3",
27
+ "mel": "4",
28
+ "nv": "5",
29
+ "vasc": "6"
30
+ },
31
+ "layer_norm_eps": 1e-12,
32
+ "model_type": "deit",
33
+ "num_attention_heads": 12,
34
+ "num_channels": 3,
35
+ "num_hidden_layers": 12,
36
+ "patch_size": 16,
37
+ "torch_dtype": "float32",
38
+ "transformers_version": "4.10.0"
39
+ }
models/DeiT/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8cf13380eaf41654e6eaa615ba865c88dfa02704edeab6b611b65fbbe4241485
3
+ size 343301999
models/DenseNet121/best_model.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cb3e6f2603beb6ada576cdacaac0bf0a0acb02f6830acc8518592cf9332f9c6e
3
+ size 28470227
models/DenseNet121/config.json ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "num_classes": 7,
3
+ "hidden_size": 1024,
4
+ "id2label": {
5
+ "0": "akiec",
6
+ "1": "bcc",
7
+ "2": "bkl",
8
+ "3": "df",
9
+ "4": "mel",
10
+ "5": "nv",
11
+ "6": "vasc"
12
+ },
13
+ "label2id": {
14
+ "akiec": "0",
15
+ "bcc": "1",
16
+ "bkl": "2",
17
+ "df": "3",
18
+ "mel": "4",
19
+ "nv": "5",
20
+ "vasc": "6"
21
+ },
22
+ "architectures": [
23
+ "densenet121"
24
+ ]
25
+ }
models/DenseNet121/logs/logs_2021-12-12-14-52-26.txt ADDED
@@ -0,0 +1,1199 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ==================================================
2
+ Model architecture:
3
+ ==================================================
4
+ DenseNet(
5
+ (features): Sequential(
6
+ (conv0): Conv2d(3, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False)
7
+ (norm0): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
8
+ (relu0): ReLU(inplace=True)
9
+ (pool0): MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False)
10
+ (denseblock1): _DenseBlock(
11
+ (denselayer1): _DenseLayer(
12
+ (norm1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
13
+ (relu1): ReLU(inplace=True)
14
+ (conv1): Conv2d(64, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
15
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
16
+ (relu2): ReLU(inplace=True)
17
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
18
+ )
19
+ (denselayer2): _DenseLayer(
20
+ (norm1): BatchNorm2d(96, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
21
+ (relu1): ReLU(inplace=True)
22
+ (conv1): Conv2d(96, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
23
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
24
+ (relu2): ReLU(inplace=True)
25
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
26
+ )
27
+ (denselayer3): _DenseLayer(
28
+ (norm1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
29
+ (relu1): ReLU(inplace=True)
30
+ (conv1): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
31
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
32
+ (relu2): ReLU(inplace=True)
33
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
34
+ )
35
+ (denselayer4): _DenseLayer(
36
+ (norm1): BatchNorm2d(160, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
37
+ (relu1): ReLU(inplace=True)
38
+ (conv1): Conv2d(160, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
39
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
40
+ (relu2): ReLU(inplace=True)
41
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
42
+ )
43
+ (denselayer5): _DenseLayer(
44
+ (norm1): BatchNorm2d(192, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
45
+ (relu1): ReLU(inplace=True)
46
+ (conv1): Conv2d(192, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
47
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
48
+ (relu2): ReLU(inplace=True)
49
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
50
+ )
51
+ (denselayer6): _DenseLayer(
52
+ (norm1): BatchNorm2d(224, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
53
+ (relu1): ReLU(inplace=True)
54
+ (conv1): Conv2d(224, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
55
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
56
+ (relu2): ReLU(inplace=True)
57
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
58
+ )
59
+ )
60
+ (transition1): _Transition(
61
+ (norm): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
62
+ (relu): ReLU(inplace=True)
63
+ (conv): Conv2d(256, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
64
+ (pool): AvgPool2d(kernel_size=2, stride=2, padding=0)
65
+ )
66
+ (denseblock2): _DenseBlock(
67
+ (denselayer1): _DenseLayer(
68
+ (norm1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
69
+ (relu1): ReLU(inplace=True)
70
+ (conv1): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
71
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
72
+ (relu2): ReLU(inplace=True)
73
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
74
+ )
75
+ (denselayer2): _DenseLayer(
76
+ (norm1): BatchNorm2d(160, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
77
+ (relu1): ReLU(inplace=True)
78
+ (conv1): Conv2d(160, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
79
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
80
+ (relu2): ReLU(inplace=True)
81
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
82
+ )
83
+ (denselayer3): _DenseLayer(
84
+ (norm1): BatchNorm2d(192, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
85
+ (relu1): ReLU(inplace=True)
86
+ (conv1): Conv2d(192, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
87
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
88
+ (relu2): ReLU(inplace=True)
89
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
90
+ )
91
+ (denselayer4): _DenseLayer(
92
+ (norm1): BatchNorm2d(224, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
93
+ (relu1): ReLU(inplace=True)
94
+ (conv1): Conv2d(224, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
95
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
96
+ (relu2): ReLU(inplace=True)
97
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
98
+ )
99
+ (denselayer5): _DenseLayer(
100
+ (norm1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
101
+ (relu1): ReLU(inplace=True)
102
+ (conv1): Conv2d(256, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
103
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
104
+ (relu2): ReLU(inplace=True)
105
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
106
+ )
107
+ (denselayer6): _DenseLayer(
108
+ (norm1): BatchNorm2d(288, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
109
+ (relu1): ReLU(inplace=True)
110
+ (conv1): Conv2d(288, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
111
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
112
+ (relu2): ReLU(inplace=True)
113
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
114
+ )
115
+ (denselayer7): _DenseLayer(
116
+ (norm1): BatchNorm2d(320, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
117
+ (relu1): ReLU(inplace=True)
118
+ (conv1): Conv2d(320, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
119
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
120
+ (relu2): ReLU(inplace=True)
121
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
122
+ )
123
+ (denselayer8): _DenseLayer(
124
+ (norm1): BatchNorm2d(352, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
125
+ (relu1): ReLU(inplace=True)
126
+ (conv1): Conv2d(352, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
127
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
128
+ (relu2): ReLU(inplace=True)
129
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
130
+ )
131
+ (denselayer9): _DenseLayer(
132
+ (norm1): BatchNorm2d(384, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
133
+ (relu1): ReLU(inplace=True)
134
+ (conv1): Conv2d(384, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
135
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
136
+ (relu2): ReLU(inplace=True)
137
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
138
+ )
139
+ (denselayer10): _DenseLayer(
140
+ (norm1): BatchNorm2d(416, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
141
+ (relu1): ReLU(inplace=True)
142
+ (conv1): Conv2d(416, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
143
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
144
+ (relu2): ReLU(inplace=True)
145
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
146
+ )
147
+ (denselayer11): _DenseLayer(
148
+ (norm1): BatchNorm2d(448, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
149
+ (relu1): ReLU(inplace=True)
150
+ (conv1): Conv2d(448, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
151
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
152
+ (relu2): ReLU(inplace=True)
153
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
154
+ )
155
+ (denselayer12): _DenseLayer(
156
+ (norm1): BatchNorm2d(480, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
157
+ (relu1): ReLU(inplace=True)
158
+ (conv1): Conv2d(480, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
159
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
160
+ (relu2): ReLU(inplace=True)
161
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
162
+ )
163
+ )
164
+ (transition2): _Transition(
165
+ (norm): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
166
+ (relu): ReLU(inplace=True)
167
+ (conv): Conv2d(512, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
168
+ (pool): AvgPool2d(kernel_size=2, stride=2, padding=0)
169
+ )
170
+ (denseblock3): _DenseBlock(
171
+ (denselayer1): _DenseLayer(
172
+ (norm1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
173
+ (relu1): ReLU(inplace=True)
174
+ (conv1): Conv2d(256, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
175
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
176
+ (relu2): ReLU(inplace=True)
177
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
178
+ )
179
+ (denselayer2): _DenseLayer(
180
+ (norm1): BatchNorm2d(288, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
181
+ (relu1): ReLU(inplace=True)
182
+ (conv1): Conv2d(288, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
183
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
184
+ (relu2): ReLU(inplace=True)
185
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
186
+ )
187
+ (denselayer3): _DenseLayer(
188
+ (norm1): BatchNorm2d(320, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
189
+ (relu1): ReLU(inplace=True)
190
+ (conv1): Conv2d(320, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
191
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
192
+ (relu2): ReLU(inplace=True)
193
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
194
+ )
195
+ (denselayer4): _DenseLayer(
196
+ (norm1): BatchNorm2d(352, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
197
+ (relu1): ReLU(inplace=True)
198
+ (conv1): Conv2d(352, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
199
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
200
+ (relu2): ReLU(inplace=True)
201
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
202
+ )
203
+ (denselayer5): _DenseLayer(
204
+ (norm1): BatchNorm2d(384, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
205
+ (relu1): ReLU(inplace=True)
206
+ (conv1): Conv2d(384, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
207
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
208
+ (relu2): ReLU(inplace=True)
209
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
210
+ )
211
+ (denselayer6): _DenseLayer(
212
+ (norm1): BatchNorm2d(416, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
213
+ (relu1): ReLU(inplace=True)
214
+ (conv1): Conv2d(416, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
215
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
216
+ (relu2): ReLU(inplace=True)
217
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
218
+ )
219
+ (denselayer7): _DenseLayer(
220
+ (norm1): BatchNorm2d(448, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
221
+ (relu1): ReLU(inplace=True)
222
+ (conv1): Conv2d(448, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
223
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
224
+ (relu2): ReLU(inplace=True)
225
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
226
+ )
227
+ (denselayer8): _DenseLayer(
228
+ (norm1): BatchNorm2d(480, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
229
+ (relu1): ReLU(inplace=True)
230
+ (conv1): Conv2d(480, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
231
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
232
+ (relu2): ReLU(inplace=True)
233
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
234
+ )
235
+ (denselayer9): _DenseLayer(
236
+ (norm1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
237
+ (relu1): ReLU(inplace=True)
238
+ (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
239
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
240
+ (relu2): ReLU(inplace=True)
241
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
242
+ )
243
+ (denselayer10): _DenseLayer(
244
+ (norm1): BatchNorm2d(544, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
245
+ (relu1): ReLU(inplace=True)
246
+ (conv1): Conv2d(544, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
247
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
248
+ (relu2): ReLU(inplace=True)
249
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
250
+ )
251
+ (denselayer11): _DenseLayer(
252
+ (norm1): BatchNorm2d(576, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
253
+ (relu1): ReLU(inplace=True)
254
+ (conv1): Conv2d(576, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
255
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
256
+ (relu2): ReLU(inplace=True)
257
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
258
+ )
259
+ (denselayer12): _DenseLayer(
260
+ (norm1): BatchNorm2d(608, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
261
+ (relu1): ReLU(inplace=True)
262
+ (conv1): Conv2d(608, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
263
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
264
+ (relu2): ReLU(inplace=True)
265
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
266
+ )
267
+ (denselayer13): _DenseLayer(
268
+ (norm1): BatchNorm2d(640, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
269
+ (relu1): ReLU(inplace=True)
270
+ (conv1): Conv2d(640, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
271
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
272
+ (relu2): ReLU(inplace=True)
273
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
274
+ )
275
+ (denselayer14): _DenseLayer(
276
+ (norm1): BatchNorm2d(672, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
277
+ (relu1): ReLU(inplace=True)
278
+ (conv1): Conv2d(672, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
279
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
280
+ (relu2): ReLU(inplace=True)
281
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
282
+ )
283
+ (denselayer15): _DenseLayer(
284
+ (norm1): BatchNorm2d(704, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
285
+ (relu1): ReLU(inplace=True)
286
+ (conv1): Conv2d(704, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
287
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
288
+ (relu2): ReLU(inplace=True)
289
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
290
+ )
291
+ (denselayer16): _DenseLayer(
292
+ (norm1): BatchNorm2d(736, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
293
+ (relu1): ReLU(inplace=True)
294
+ (conv1): Conv2d(736, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
295
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
296
+ (relu2): ReLU(inplace=True)
297
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
298
+ )
299
+ (denselayer17): _DenseLayer(
300
+ (norm1): BatchNorm2d(768, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
301
+ (relu1): ReLU(inplace=True)
302
+ (conv1): Conv2d(768, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
303
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
304
+ (relu2): ReLU(inplace=True)
305
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
306
+ )
307
+ (denselayer18): _DenseLayer(
308
+ (norm1): BatchNorm2d(800, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
309
+ (relu1): ReLU(inplace=True)
310
+ (conv1): Conv2d(800, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
311
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
312
+ (relu2): ReLU(inplace=True)
313
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
314
+ )
315
+ (denselayer19): _DenseLayer(
316
+ (norm1): BatchNorm2d(832, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
317
+ (relu1): ReLU(inplace=True)
318
+ (conv1): Conv2d(832, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
319
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
320
+ (relu2): ReLU(inplace=True)
321
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
322
+ )
323
+ (denselayer20): _DenseLayer(
324
+ (norm1): BatchNorm2d(864, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
325
+ (relu1): ReLU(inplace=True)
326
+ (conv1): Conv2d(864, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
327
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
328
+ (relu2): ReLU(inplace=True)
329
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
330
+ )
331
+ (denselayer21): _DenseLayer(
332
+ (norm1): BatchNorm2d(896, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
333
+ (relu1): ReLU(inplace=True)
334
+ (conv1): Conv2d(896, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
335
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
336
+ (relu2): ReLU(inplace=True)
337
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
338
+ )
339
+ (denselayer22): _DenseLayer(
340
+ (norm1): BatchNorm2d(928, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
341
+ (relu1): ReLU(inplace=True)
342
+ (conv1): Conv2d(928, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
343
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
344
+ (relu2): ReLU(inplace=True)
345
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
346
+ )
347
+ (denselayer23): _DenseLayer(
348
+ (norm1): BatchNorm2d(960, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
349
+ (relu1): ReLU(inplace=True)
350
+ (conv1): Conv2d(960, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
351
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
352
+ (relu2): ReLU(inplace=True)
353
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
354
+ )
355
+ (denselayer24): _DenseLayer(
356
+ (norm1): BatchNorm2d(992, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
357
+ (relu1): ReLU(inplace=True)
358
+ (conv1): Conv2d(992, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
359
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
360
+ (relu2): ReLU(inplace=True)
361
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
362
+ )
363
+ )
364
+ (transition3): _Transition(
365
+ (norm): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
366
+ (relu): ReLU(inplace=True)
367
+ (conv): Conv2d(1024, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
368
+ (pool): AvgPool2d(kernel_size=2, stride=2, padding=0)
369
+ )
370
+ (denseblock4): _DenseBlock(
371
+ (denselayer1): _DenseLayer(
372
+ (norm1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
373
+ (relu1): ReLU(inplace=True)
374
+ (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
375
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
376
+ (relu2): ReLU(inplace=True)
377
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
378
+ )
379
+ (denselayer2): _DenseLayer(
380
+ (norm1): BatchNorm2d(544, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
381
+ (relu1): ReLU(inplace=True)
382
+ (conv1): Conv2d(544, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
383
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
384
+ (relu2): ReLU(inplace=True)
385
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
386
+ )
387
+ (denselayer3): _DenseLayer(
388
+ (norm1): BatchNorm2d(576, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
389
+ (relu1): ReLU(inplace=True)
390
+ (conv1): Conv2d(576, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
391
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
392
+ (relu2): ReLU(inplace=True)
393
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
394
+ )
395
+ (denselayer4): _DenseLayer(
396
+ (norm1): BatchNorm2d(608, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
397
+ (relu1): ReLU(inplace=True)
398
+ (conv1): Conv2d(608, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
399
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
400
+ (relu2): ReLU(inplace=True)
401
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
402
+ )
403
+ (denselayer5): _DenseLayer(
404
+ (norm1): BatchNorm2d(640, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
405
+ (relu1): ReLU(inplace=True)
406
+ (conv1): Conv2d(640, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
407
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
408
+ (relu2): ReLU(inplace=True)
409
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
410
+ )
411
+ (denselayer6): _DenseLayer(
412
+ (norm1): BatchNorm2d(672, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
413
+ (relu1): ReLU(inplace=True)
414
+ (conv1): Conv2d(672, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
415
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
416
+ (relu2): ReLU(inplace=True)
417
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
418
+ )
419
+ (denselayer7): _DenseLayer(
420
+ (norm1): BatchNorm2d(704, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
421
+ (relu1): ReLU(inplace=True)
422
+ (conv1): Conv2d(704, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
423
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
424
+ (relu2): ReLU(inplace=True)
425
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
426
+ )
427
+ (denselayer8): _DenseLayer(
428
+ (norm1): BatchNorm2d(736, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
429
+ (relu1): ReLU(inplace=True)
430
+ (conv1): Conv2d(736, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
431
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
432
+ (relu2): ReLU(inplace=True)
433
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
434
+ )
435
+ (denselayer9): _DenseLayer(
436
+ (norm1): BatchNorm2d(768, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
437
+ (relu1): ReLU(inplace=True)
438
+ (conv1): Conv2d(768, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
439
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
440
+ (relu2): ReLU(inplace=True)
441
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
442
+ )
443
+ (denselayer10): _DenseLayer(
444
+ (norm1): BatchNorm2d(800, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
445
+ (relu1): ReLU(inplace=True)
446
+ (conv1): Conv2d(800, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
447
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
448
+ (relu2): ReLU(inplace=True)
449
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
450
+ )
451
+ (denselayer11): _DenseLayer(
452
+ (norm1): BatchNorm2d(832, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
453
+ (relu1): ReLU(inplace=True)
454
+ (conv1): Conv2d(832, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
455
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
456
+ (relu2): ReLU(inplace=True)
457
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
458
+ )
459
+ (denselayer12): _DenseLayer(
460
+ (norm1): BatchNorm2d(864, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
461
+ (relu1): ReLU(inplace=True)
462
+ (conv1): Conv2d(864, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
463
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
464
+ (relu2): ReLU(inplace=True)
465
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
466
+ )
467
+ (denselayer13): _DenseLayer(
468
+ (norm1): BatchNorm2d(896, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
469
+ (relu1): ReLU(inplace=True)
470
+ (conv1): Conv2d(896, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
471
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
472
+ (relu2): ReLU(inplace=True)
473
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
474
+ )
475
+ (denselayer14): _DenseLayer(
476
+ (norm1): BatchNorm2d(928, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
477
+ (relu1): ReLU(inplace=True)
478
+ (conv1): Conv2d(928, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
479
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
480
+ (relu2): ReLU(inplace=True)
481
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
482
+ )
483
+ (denselayer15): _DenseLayer(
484
+ (norm1): BatchNorm2d(960, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
485
+ (relu1): ReLU(inplace=True)
486
+ (conv1): Conv2d(960, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
487
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
488
+ (relu2): ReLU(inplace=True)
489
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
490
+ )
491
+ (denselayer16): _DenseLayer(
492
+ (norm1): BatchNorm2d(992, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
493
+ (relu1): ReLU(inplace=True)
494
+ (conv1): Conv2d(992, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
495
+ (norm2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
496
+ (relu2): ReLU(inplace=True)
497
+ (conv2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
498
+ )
499
+ )
500
+ (norm5): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
501
+ )
502
+ (classifier): Linear(in_features=1024, out_features=7, bias=True)
503
+ )
504
+ ==================================================
505
+
506
+ [Epoch 0], [Batch 0 / 40], [Loss 2.126059055328369]
507
+ precision recall f1-score support
508
+
509
+ akiec 0.0000 0.0000 0.0000 15
510
+ bcc 1.0000 0.1000 0.1818 10
511
+ bkl 0.0000 0.0000 0.0000 10
512
+ df 0.1765 0.2500 0.2069 12
513
+ mel 0.0000 0.0000 0.0000 9
514
+ nv 0.2708 0.8125 0.4062 16
515
+ vasc 0.4286 0.6667 0.5217 9
516
+
517
+ accuracy 0.2840 81
518
+ macro avg 0.2680 0.2613 0.1881 81
519
+ weighted avg 0.2507 0.2840 0.1913 81
520
+
521
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth
522
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 28.40%
523
+ [Epoch 1], [Batch 0 / 40], [Loss 0.7534104585647583]
524
+ precision recall f1-score support
525
+
526
+ akiec 0.7500 0.2000 0.3158 15
527
+ bcc 0.2143 0.6000 0.3158 10
528
+ bkl 0.1667 0.3000 0.2143 10
529
+ df 0.0000 0.0000 0.0000 12
530
+ mel 0.0000 0.0000 0.0000 9
531
+ nv 0.5238 0.6875 0.5946 16
532
+ vasc 0.7143 0.5556 0.6250 9
533
+
534
+ accuracy 0.3457 81
535
+ macro avg 0.3384 0.3347 0.2951 81
536
+ weighted avg 0.3688 0.3457 0.3108 81
537
+
538
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth
539
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 34.57%
540
+ [Epoch 2], [Batch 0 / 40], [Loss 0.654026448726654]
541
+ precision recall f1-score support
542
+
543
+ akiec 0.5417 0.8667 0.6667 15
544
+ bcc 0.8000 0.4000 0.5333 10
545
+ bkl 0.3333 0.2000 0.2500 10
546
+ df 0.7500 0.2500 0.3750 12
547
+ mel 1.0000 0.5556 0.7143 9
548
+ nv 0.7857 0.6875 0.7333 16
549
+ vasc 0.3913 1.0000 0.5625 9
550
+
551
+ accuracy 0.5802 81
552
+ macro avg 0.6574 0.5657 0.5479 81
553
+ weighted avg 0.6611 0.5802 0.5624 81
554
+
555
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth
556
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 58.02%
557
+ [Epoch 3], [Batch 0 / 40], [Loss 0.7007324695587158]
558
+ precision recall f1-score support
559
+
560
+ akiec 0.7778 0.4667 0.5833 15
561
+ bcc 0.5000 0.4000 0.4444 10
562
+ bkl 0.1667 0.2000 0.1818 10
563
+ df 1.0000 0.2500 0.4000 12
564
+ mel 0.5714 0.4444 0.5000 9
565
+ nv 0.6923 0.5625 0.6207 16
566
+ vasc 0.3103 1.0000 0.4737 9
567
+
568
+ accuracy 0.4691 81
569
+ macro avg 0.5741 0.4748 0.4577 81
570
+ weighted avg 0.6092 0.4691 0.4754 81
571
+
572
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/last_model.pth
573
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 58.02%
574
+ [Epoch 4], [Batch 0 / 40], [Loss 0.32403188943862915]
575
+ precision recall f1-score support
576
+
577
+ akiec 0.5714 0.2667 0.3636 15
578
+ bcc 1.0000 0.4000 0.5714 10
579
+ bkl 0.2593 0.7000 0.3784 10
580
+ df 0.7500 0.2500 0.3750 12
581
+ mel 0.3529 0.6667 0.4615 9
582
+ nv 0.8333 0.6250 0.7143 16
583
+ vasc 0.8000 0.8889 0.8421 9
584
+
585
+ accuracy 0.5185 81
586
+ macro avg 0.6524 0.5425 0.5295 81
587
+ weighted avg 0.6651 0.5185 0.5261 81
588
+
589
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/last_model.pth
590
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 58.02%
591
+ [Epoch 5], [Batch 0 / 40], [Loss 0.4335012137889862]
592
+ precision recall f1-score support
593
+
594
+ akiec 0.4444 0.5333 0.4848 15
595
+ bcc 0.1875 0.3000 0.2308 10
596
+ bkl 0.0000 0.0000 0.0000 10
597
+ df 1.0000 0.2500 0.4000 12
598
+ mel 0.6000 0.6667 0.6316 9
599
+ nv 0.6316 0.7500 0.6857 16
600
+ vasc 0.6923 1.0000 0.8182 9
601
+
602
+ accuracy 0.5062 81
603
+ macro avg 0.5080 0.5000 0.4644 81
604
+ weighted avg 0.5219 0.5062 0.4741 81
605
+
606
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/last_model.pth
607
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 58.02%
608
+ [Epoch 6], [Batch 0 / 40], [Loss 0.15954938530921936]
609
+ precision recall f1-score support
610
+
611
+ akiec 0.3333 0.0667 0.1111 15
612
+ bcc 0.6667 0.6000 0.6316 10
613
+ bkl 0.3333 0.8000 0.4706 10
614
+ df 0.8333 0.4167 0.5556 12
615
+ mel 0.4167 0.5556 0.4762 9
616
+ nv 0.7059 0.7500 0.7273 16
617
+ vasc 0.9000 1.0000 0.9474 9
618
+
619
+ accuracy 0.5679 81
620
+ macro avg 0.5985 0.5984 0.5600 81
621
+ weighted avg 0.5944 0.5679 0.5408 81
622
+
623
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/last_model.pth
624
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 58.02%
625
+ [Epoch 7], [Batch 0 / 40], [Loss 0.09287992864847183]
626
+ precision recall f1-score support
627
+
628
+ akiec 0.4815 0.8667 0.6190 15
629
+ bcc 1.0000 0.6000 0.7500 10
630
+ bkl 0.5000 0.1000 0.1667 10
631
+ df 0.7273 0.6667 0.6957 12
632
+ mel 0.7500 0.6667 0.7059 9
633
+ nv 0.7500 0.9375 0.8333 16
634
+ vasc 1.0000 0.7778 0.8750 9
635
+
636
+ accuracy 0.6914 81
637
+ macro avg 0.7441 0.6593 0.6637 81
638
+ weighted avg 0.7247 0.6914 0.6711 81
639
+
640
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth
641
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 69.14%
642
+ [Epoch 8], [Batch 0 / 40], [Loss 0.16124042868614197]
643
+ precision recall f1-score support
644
+
645
+ akiec 0.0000 0.0000 0.0000 15
646
+ bcc 0.5000 0.4000 0.4444 10
647
+ bkl 0.2727 0.3000 0.2857 10
648
+ df 0.5714 0.6667 0.6154 12
649
+ mel 0.2667 0.4444 0.3333 9
650
+ nv 0.6667 0.8750 0.7568 16
651
+ vasc 0.7500 1.0000 0.8571 9
652
+
653
+ accuracy 0.5185 81
654
+ macro avg 0.4325 0.5266 0.4704 81
655
+ weighted avg 0.4247 0.5185 0.4631 81
656
+
657
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/last_model.pth
658
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 69.14%
659
+ [Epoch 9], [Batch 0 / 40], [Loss 0.11325355619192123]
660
+ precision recall f1-score support
661
+
662
+ akiec 0.7500 0.2000 0.3158 15
663
+ bcc 0.2941 0.5000 0.3704 10
664
+ bkl 0.1622 0.6000 0.2553 10
665
+ df 1.0000 0.1667 0.2857 12
666
+ mel 1.0000 0.2222 0.3636 9
667
+ nv 0.8333 0.6250 0.7143 16
668
+ vasc 0.8571 0.6667 0.7500 9
669
+
670
+ accuracy 0.4198 81
671
+ macro avg 0.6995 0.4258 0.4364 81
672
+ weighted avg 0.7143 0.4198 0.4429 81
673
+
674
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/last_model.pth
675
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 69.14%
676
+ [Epoch 10], [Batch 0 / 40], [Loss 0.17134839296340942]
677
+ precision recall f1-score support
678
+
679
+ akiec 1.0000 0.1333 0.2353 15
680
+ bcc 0.4762 1.0000 0.6452 10
681
+ bkl 0.5714 0.4000 0.4706 10
682
+ df 0.7143 0.8333 0.7692 12
683
+ mel 0.6000 0.3333 0.4286 9
684
+ nv 0.6364 0.8750 0.7368 16
685
+ vasc 0.9000 1.0000 0.9474 9
686
+
687
+ accuracy 0.6420 81
688
+ macro avg 0.6998 0.6536 0.6047 81
689
+ weighted avg 0.7127 0.6420 0.5937 81
690
+
691
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/last_model.pth
692
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 69.14%
693
+ [Epoch 11], [Batch 0 / 40], [Loss 0.058563705533742905]
694
+ precision recall f1-score support
695
+
696
+ akiec 0.4737 0.6000 0.5294 15
697
+ bcc 0.5000 0.6000 0.5455 10
698
+ bkl 0.2632 0.5000 0.3448 10
699
+ df 1.0000 0.5000 0.6667 12
700
+ mel 0.6667 0.2222 0.3333 9
701
+ nv 0.8333 0.6250 0.7143 16
702
+ vasc 0.9000 1.0000 0.9474 9
703
+
704
+ accuracy 0.5802 81
705
+ macro avg 0.6624 0.5782 0.5830 81
706
+ weighted avg 0.6688 0.5802 0.5901 81
707
+
708
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/last_model.pth
709
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 69.14%
710
+ [Epoch 12], [Batch 0 / 40], [Loss 0.034065358340740204]
711
+ precision recall f1-score support
712
+
713
+ akiec 0.7000 0.4667 0.5600 15
714
+ bcc 0.4348 1.0000 0.6061 10
715
+ bkl 0.4286 0.6000 0.5000 10
716
+ df 0.8333 0.4167 0.5556 12
717
+ mel 0.8000 0.4444 0.5714 9
718
+ nv 0.8462 0.6875 0.7586 16
719
+ vasc 0.9000 1.0000 0.9474 9
720
+
721
+ accuracy 0.6420 81
722
+ macro avg 0.7061 0.6593 0.6427 81
723
+ weighted avg 0.7157 0.6420 0.6412 81
724
+
725
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/last_model.pth
726
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 69.14%
727
+ [Epoch 13], [Batch 0 / 40], [Loss 0.04914743825793266]
728
+ precision recall f1-score support
729
+
730
+ akiec 0.6667 0.2667 0.3810 15
731
+ bcc 0.6667 0.6000 0.6316 10
732
+ bkl 0.3077 0.4000 0.3478 10
733
+ df 0.8889 0.6667 0.7619 12
734
+ mel 0.3000 0.6667 0.4138 9
735
+ nv 1.0000 0.8125 0.8966 16
736
+ vasc 0.8182 1.0000 0.9000 9
737
+
738
+ accuracy 0.6173 81
739
+ macro avg 0.6640 0.6304 0.6189 81
740
+ weighted avg 0.6972 0.6173 0.6274 81
741
+
742
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/last_model.pth
743
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 69.14%
744
+ [Epoch 14], [Batch 0 / 40], [Loss 0.023914232850074768]
745
+ precision recall f1-score support
746
+
747
+ akiec 0.0000 0.0000 0.0000 15
748
+ bcc 0.3000 0.3000 0.3000 10
749
+ bkl 0.1795 0.7000 0.2857 10
750
+ df 0.0000 0.0000 0.0000 12
751
+ mel 0.5000 0.5556 0.5263 9
752
+ nv 0.8000 0.5000 0.6154 16
753
+ vasc 0.7500 1.0000 0.8571 9
754
+
755
+ accuracy 0.3951 81
756
+ macro avg 0.3614 0.4365 0.3692 81
757
+ weighted avg 0.3561 0.3951 0.3476 81
758
+
759
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/last_model.pth
760
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 69.14%
761
+ [Epoch 15], [Batch 0 / 40], [Loss 0.03596542775630951]
762
+ precision recall f1-score support
763
+
764
+ akiec 0.5385 0.4667 0.5000 15
765
+ bcc 0.6667 0.8000 0.7273 10
766
+ bkl 0.7500 0.3000 0.4286 10
767
+ df 0.5000 0.7500 0.6000 12
768
+ mel 0.6000 0.3333 0.4286 9
769
+ nv 0.7059 0.7500 0.7273 16
770
+ vasc 0.7500 1.0000 0.8571 9
771
+
772
+ accuracy 0.6296 81
773
+ macro avg 0.6444 0.6286 0.6098 81
774
+ weighted avg 0.6381 0.6296 0.6107 81
775
+
776
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/last_model.pth
777
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 69.14%
778
+ [Epoch 16], [Batch 0 / 40], [Loss 0.049507200717926025]
779
+ precision recall f1-score support
780
+
781
+ akiec 0.6667 0.5333 0.5926 15
782
+ bcc 0.5333 0.8000 0.6400 10
783
+ bkl 0.4545 0.5000 0.4762 10
784
+ df 0.6923 0.7500 0.7200 12
785
+ mel 0.6667 0.4444 0.5333 9
786
+ nv 0.9286 0.8125 0.8667 16
787
+ vasc 0.9000 1.0000 0.9474 9
788
+
789
+ accuracy 0.6914 81
790
+ macro avg 0.6917 0.6915 0.6823 81
791
+ weighted avg 0.7055 0.6914 0.6899 81
792
+
793
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/last_model.pth
794
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 69.14%
795
+ [Epoch 17], [Batch 0 / 40], [Loss 0.009000462479889393]
796
+ precision recall f1-score support
797
+
798
+ akiec 0.7778 0.4667 0.5833 15
799
+ bcc 0.6364 0.7000 0.6667 10
800
+ bkl 0.4000 0.4000 0.4000 10
801
+ df 0.8000 0.6667 0.7273 12
802
+ mel 0.3000 0.6667 0.4138 9
803
+ nv 0.9167 0.6875 0.7857 16
804
+ vasc 0.8889 0.8889 0.8889 9
805
+
806
+ accuracy 0.6296 81
807
+ macro avg 0.6742 0.6395 0.6380 81
808
+ weighted avg 0.7037 0.6296 0.6474 81
809
+
810
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/last_model.pth
811
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 69.14%
812
+ [Epoch 18], [Batch 0 / 40], [Loss 0.014917709864675999]
813
+ precision recall f1-score support
814
+
815
+ akiec 0.8889 0.5333 0.6667 15
816
+ bcc 0.6429 0.9000 0.7500 10
817
+ bkl 0.5455 0.6000 0.5714 10
818
+ df 0.9000 0.7500 0.8182 12
819
+ mel 0.5000 0.6667 0.5714 9
820
+ nv 0.8667 0.8125 0.8387 16
821
+ vasc 0.9000 1.0000 0.9474 9
822
+
823
+ accuracy 0.7407 81
824
+ macro avg 0.7491 0.7518 0.7377 81
825
+ weighted avg 0.7714 0.7407 0.7422 81
826
+
827
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth
828
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 74.07%
829
+ [Epoch 19], [Batch 0 / 40], [Loss 0.012562758289277554]
830
+ precision recall f1-score support
831
+
832
+ akiec 0.8462 0.7333 0.7857 15
833
+ bcc 0.6667 0.8000 0.7273 10
834
+ bkl 0.6000 0.6000 0.6000 10
835
+ df 0.8000 0.6667 0.7273 12
836
+ mel 0.3571 0.5556 0.4348 9
837
+ nv 0.9167 0.6875 0.7857 16
838
+ vasc 0.9000 1.0000 0.9474 9
839
+
840
+ accuracy 0.7160 81
841
+ macro avg 0.7267 0.7204 0.7154 81
842
+ weighted avg 0.7523 0.7160 0.7259 81
843
+
844
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/last_model.pth
845
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 74.07%
846
+ [Epoch 20], [Batch 0 / 40], [Loss 0.014114274643361568]
847
+ precision recall f1-score support
848
+
849
+ akiec 0.7778 0.4667 0.5833 15
850
+ bcc 0.8571 0.6000 0.7059 10
851
+ bkl 0.5000 0.5000 0.5000 10
852
+ df 0.6429 0.7500 0.6923 12
853
+ mel 0.4286 0.6667 0.5217 9
854
+ nv 0.7647 0.8125 0.7879 16
855
+ vasc 0.9000 1.0000 0.9474 9
856
+
857
+ accuracy 0.6790 81
858
+ macro avg 0.6959 0.6851 0.6769 81
859
+ weighted avg 0.7055 0.6790 0.6783 81
860
+
861
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/last_model.pth
862
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 74.07%
863
+ [Epoch 21], [Batch 0 / 40], [Loss 0.012415341101586819]
864
+ precision recall f1-score support
865
+
866
+ akiec 0.8889 0.5333 0.6667 15
867
+ bcc 0.8000 0.8000 0.8000 10
868
+ bkl 0.5000 0.5000 0.5000 10
869
+ df 0.8333 0.8333 0.8333 12
870
+ mel 0.3571 0.5556 0.4348 9
871
+ nv 0.8750 0.8750 0.8750 16
872
+ vasc 0.9000 1.0000 0.9474 9
873
+
874
+ accuracy 0.7284 81
875
+ macro avg 0.7363 0.7282 0.7225 81
876
+ weighted avg 0.7611 0.7284 0.7338 81
877
+
878
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/last_model.pth
879
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 74.07%
880
+ [Epoch 22], [Batch 0 / 40], [Loss 0.005975313950330019]
881
+ precision recall f1-score support
882
+
883
+ akiec 0.9167 0.7333 0.8148 15
884
+ bcc 0.8000 0.8000 0.8000 10
885
+ bkl 0.5000 0.7000 0.5833 10
886
+ df 0.9167 0.9167 0.9167 12
887
+ mel 0.4545 0.5556 0.5000 9
888
+ nv 1.0000 0.7500 0.8571 16
889
+ vasc 0.9000 1.0000 0.9474 9
890
+
891
+ accuracy 0.7778 81
892
+ macro avg 0.7840 0.7794 0.7742 81
893
+ weighted avg 0.8141 0.7778 0.7876 81
894
+
895
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth
896
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 77.78%
897
+ [Epoch 23], [Batch 0 / 40], [Loss 0.004292001016438007]
898
+ precision recall f1-score support
899
+
900
+ akiec 0.8182 0.6000 0.6923 15
901
+ bcc 0.7273 0.8000 0.7619 10
902
+ bkl 0.5714 0.8000 0.6667 10
903
+ df 0.7500 0.7500 0.7500 12
904
+ mel 0.6667 0.6667 0.6667 9
905
+ nv 0.8571 0.7500 0.8000 16
906
+ vasc 0.9000 1.0000 0.9474 9
907
+
908
+ accuracy 0.7531 81
909
+ macro avg 0.7558 0.7667 0.7550 81
910
+ weighted avg 0.7663 0.7531 0.7530 81
911
+
912
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/last_model.pth
913
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 77.78%
914
+ [Epoch 24], [Batch 0 / 40], [Loss 0.008238147012889385]
915
+ precision recall f1-score support
916
+
917
+ akiec 0.6667 0.5333 0.5926 15
918
+ bcc 0.5625 0.9000 0.6923 10
919
+ bkl 0.6667 0.6000 0.6316 10
920
+ df 0.8182 0.7500 0.7826 12
921
+ mel 0.8571 0.6667 0.7500 9
922
+ nv 0.8125 0.8125 0.8125 16
923
+ vasc 0.9000 1.0000 0.9474 9
924
+
925
+ accuracy 0.7407 81
926
+ macro avg 0.7548 0.7518 0.7441 81
927
+ weighted avg 0.7521 0.7407 0.7382 81
928
+
929
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/last_model.pth
930
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 77.78%
931
+ [Epoch 25], [Batch 0 / 40], [Loss 0.003627797355875373]
932
+ precision recall f1-score support
933
+
934
+ akiec 0.7333 0.7333 0.7333 15
935
+ bcc 0.6923 0.9000 0.7826 10
936
+ bkl 0.5556 0.5000 0.5263 10
937
+ df 0.7500 0.7500 0.7500 12
938
+ mel 0.6250 0.5556 0.5882 9
939
+ nv 0.8571 0.7500 0.8000 16
940
+ vasc 0.9000 1.0000 0.9474 9
941
+
942
+ accuracy 0.7407 81
943
+ macro avg 0.7305 0.7413 0.7326 81
944
+ weighted avg 0.7397 0.7407 0.7372 81
945
+
946
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/last_model.pth
947
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 77.78%
948
+ [Epoch 26], [Batch 0 / 40], [Loss 0.0014865432167425752]
949
+ precision recall f1-score support
950
+
951
+ akiec 0.6667 0.6667 0.6667 15
952
+ bcc 0.6429 0.9000 0.7500 10
953
+ bkl 0.5556 0.5000 0.5263 10
954
+ df 0.8000 0.6667 0.7273 12
955
+ mel 0.5000 0.5556 0.5263 9
956
+ nv 0.9231 0.7500 0.8276 16
957
+ vasc 0.9000 1.0000 0.9474 9
958
+
959
+ accuracy 0.7160 81
960
+ macro avg 0.7126 0.7198 0.7102 81
961
+ weighted avg 0.7278 0.7160 0.7160 81
962
+
963
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/last_model.pth
964
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 77.78%
965
+ [Epoch 27], [Batch 0 / 40], [Loss 0.004643445368856192]
966
+ precision recall f1-score support
967
+
968
+ akiec 0.7143 0.3333 0.4545 15
969
+ bcc 0.6923 0.9000 0.7826 10
970
+ bkl 0.6667 0.6000 0.6316 10
971
+ df 0.6667 0.8333 0.7407 12
972
+ mel 0.5833 0.7778 0.6667 9
973
+ nv 0.8667 0.8125 0.8387 16
974
+ vasc 0.9000 1.0000 0.9474 9
975
+
976
+ accuracy 0.7284 81
977
+ macro avg 0.7271 0.7510 0.7232 81
978
+ weighted avg 0.7348 0.7284 0.7135 81
979
+
980
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/last_model.pth
981
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 77.78%
982
+ [Epoch 28], [Batch 0 / 40], [Loss 0.002596722450107336]
983
+ precision recall f1-score support
984
+
985
+ akiec 0.7778 0.4667 0.5833 15
986
+ bcc 0.5333 0.8000 0.6400 10
987
+ bkl 0.5833 0.7000 0.6364 10
988
+ df 0.7273 0.6667 0.6957 12
989
+ mel 0.6250 0.5556 0.5882 9
990
+ nv 0.8750 0.8750 0.8750 16
991
+ vasc 0.9000 1.0000 0.9474 9
992
+
993
+ accuracy 0.7160 81
994
+ macro avg 0.7174 0.7234 0.7094 81
995
+ weighted avg 0.7319 0.7160 0.7121 81
996
+
997
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/last_model.pth
998
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 77.78%
999
+ [Epoch 29], [Batch 0 / 40], [Loss 0.007674324791878462]
1000
+ precision recall f1-score support
1001
+
1002
+ akiec 0.6250 0.3333 0.4348 15
1003
+ bcc 0.8000 0.8000 0.8000 10
1004
+ bkl 0.3333 0.5000 0.4000 10
1005
+ df 0.6154 0.6667 0.6400 12
1006
+ mel 0.4000 0.6667 0.5000 9
1007
+ nv 0.9091 0.6250 0.7407 16
1008
+ vasc 0.8889 0.8889 0.8889 9
1009
+
1010
+ accuracy 0.6173 81
1011
+ macro avg 0.6531 0.6401 0.6292 81
1012
+ weighted avg 0.6696 0.6173 0.6241 81
1013
+
1014
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/last_model.pth
1015
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 77.78%
1016
+ [Epoch 30], [Batch 0 / 40], [Loss 0.0040101222693920135]
1017
+ precision recall f1-score support
1018
+
1019
+ akiec 0.7143 0.3333 0.4545 15
1020
+ bcc 0.8000 0.8000 0.8000 10
1021
+ bkl 0.5385 0.7000 0.6087 10
1022
+ df 0.7692 0.8333 0.8000 12
1023
+ mel 0.3636 0.4444 0.4000 9
1024
+ nv 0.8750 0.8750 0.8750 16
1025
+ vasc 0.8182 1.0000 0.9000 9
1026
+
1027
+ accuracy 0.7037 81
1028
+ macro avg 0.6970 0.7123 0.6912 81
1029
+ weighted avg 0.7156 0.7037 0.6939 81
1030
+
1031
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/last_model.pth
1032
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 77.78%
1033
+ [Epoch 31], [Batch 0 / 40], [Loss 0.018934518098831177]
1034
+ precision recall f1-score support
1035
+
1036
+ akiec 0.6667 0.5333 0.5926 15
1037
+ bcc 0.5294 0.9000 0.6667 10
1038
+ bkl 0.5714 0.4000 0.4706 10
1039
+ df 0.8182 0.7500 0.7826 12
1040
+ mel 0.5000 0.5556 0.5263 9
1041
+ nv 0.8667 0.8125 0.8387 16
1042
+ vasc 1.0000 1.0000 1.0000 9
1043
+
1044
+ accuracy 0.7037 81
1045
+ macro avg 0.7075 0.7073 0.6968 81
1046
+ weighted avg 0.7184 0.7037 0.7013 81
1047
+
1048
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/last_model.pth
1049
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 77.78%
1050
+ [Epoch 32], [Batch 0 / 40], [Loss 0.009038643911480904]
1051
+ precision recall f1-score support
1052
+
1053
+ akiec 0.6429 0.6000 0.6207 15
1054
+ bcc 0.5714 0.8000 0.6667 10
1055
+ bkl 0.5000 0.4000 0.4444 10
1056
+ df 0.8889 0.6667 0.7619 12
1057
+ mel 0.3846 0.5556 0.4545 9
1058
+ nv 0.9286 0.8125 0.8667 16
1059
+ vasc 1.0000 1.0000 1.0000 9
1060
+
1061
+ accuracy 0.6914 81
1062
+ macro avg 0.7023 0.6907 0.6878 81
1063
+ weighted avg 0.7203 0.6914 0.6978 81
1064
+
1065
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/last_model.pth
1066
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 77.78%
1067
+ [Epoch 33], [Batch 0 / 40], [Loss 0.0041113984771072865]
1068
+ precision recall f1-score support
1069
+
1070
+ akiec 0.7273 0.5333 0.6154 15
1071
+ bcc 0.5333 0.8000 0.6400 10
1072
+ bkl 0.6000 0.6000 0.6000 10
1073
+ df 0.7273 0.6667 0.6957 12
1074
+ mel 0.4545 0.5556 0.5000 9
1075
+ nv 0.9286 0.8125 0.8667 16
1076
+ vasc 1.0000 1.0000 1.0000 9
1077
+
1078
+ accuracy 0.7037 81
1079
+ macro avg 0.7101 0.7097 0.7025 81
1080
+ weighted avg 0.7274 0.7037 0.7080 81
1081
+
1082
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/last_model.pth
1083
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 77.78%
1084
+ [Epoch 34], [Batch 0 / 40], [Loss 0.0029135795775800943]
1085
+ precision recall f1-score support
1086
+
1087
+ akiec 0.6429 0.6000 0.6207 15
1088
+ bcc 0.7273 0.8000 0.7619 10
1089
+ bkl 0.4000 0.4000 0.4000 10
1090
+ df 0.8889 0.6667 0.7619 12
1091
+ mel 0.4167 0.5556 0.4762 9
1092
+ nv 0.8125 0.8125 0.8125 16
1093
+ vasc 1.0000 1.0000 1.0000 9
1094
+
1095
+ accuracy 0.6914 81
1096
+ macro avg 0.6983 0.6907 0.6905 81
1097
+ weighted avg 0.7078 0.6914 0.6958 81
1098
+
1099
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/last_model.pth
1100
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 77.78%
1101
+ [Epoch 35], [Batch 0 / 40], [Loss 0.0028080192860215902]
1102
+ precision recall f1-score support
1103
+
1104
+ akiec 0.7273 0.5333 0.6154 15
1105
+ bcc 0.6154 0.8000 0.6957 10
1106
+ bkl 0.5000 0.6000 0.5455 10
1107
+ df 0.8000 0.6667 0.7273 12
1108
+ mel 0.5000 0.5556 0.5263 9
1109
+ nv 0.8000 0.7500 0.7742 16
1110
+ vasc 0.9000 1.0000 0.9474 9
1111
+
1112
+ accuracy 0.6914 81
1113
+ macro avg 0.6918 0.7008 0.6902 81
1114
+ weighted avg 0.7045 0.6914 0.6916 81
1115
+
1116
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/last_model.pth
1117
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 77.78%
1118
+ [Epoch 36], [Batch 0 / 40], [Loss 0.005959199741482735]
1119
+ precision recall f1-score support
1120
+
1121
+ akiec 0.7143 0.6667 0.6897 15
1122
+ bcc 0.7273 0.8000 0.7619 10
1123
+ bkl 0.4545 0.5000 0.4762 10
1124
+ df 0.8000 0.6667 0.7273 12
1125
+ mel 0.4167 0.5556 0.4762 9
1126
+ nv 0.8462 0.6875 0.7586 16
1127
+ vasc 0.9000 1.0000 0.9474 9
1128
+
1129
+ accuracy 0.6914 81
1130
+ macro avg 0.6941 0.6966 0.6910 81
1131
+ weighted avg 0.7101 0.6914 0.6963 81
1132
+
1133
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/last_model.pth
1134
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 77.78%
1135
+ [Epoch 37], [Batch 0 / 40], [Loss 0.0016287051839753985]
1136
+ precision recall f1-score support
1137
+
1138
+ akiec 0.7000 0.4667 0.5600 15
1139
+ bcc 0.6154 0.8000 0.6957 10
1140
+ bkl 0.5455 0.6000 0.5714 10
1141
+ df 0.8182 0.7500 0.7826 12
1142
+ mel 0.4545 0.5556 0.5000 9
1143
+ nv 0.8667 0.8125 0.8387 16
1144
+ vasc 0.9000 1.0000 0.9474 9
1145
+
1146
+ accuracy 0.7037 81
1147
+ macro avg 0.7000 0.7121 0.6994 81
1148
+ weighted avg 0.7159 0.7037 0.7026 81
1149
+
1150
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/last_model.pth
1151
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 77.78%
1152
+ [Epoch 38], [Batch 0 / 40], [Loss 0.0018719729268923402]
1153
+ precision recall f1-score support
1154
+
1155
+ akiec 0.6667 0.5333 0.5926 15
1156
+ bcc 0.6154 0.8000 0.6957 10
1157
+ bkl 0.4444 0.4000 0.4211 10
1158
+ df 0.7273 0.6667 0.6957 12
1159
+ mel 0.4167 0.5556 0.4762 9
1160
+ nv 0.9286 0.8125 0.8667 16
1161
+ vasc 0.9000 1.0000 0.9474 9
1162
+
1163
+ accuracy 0.6790 81
1164
+ macro avg 0.6713 0.6812 0.6707 81
1165
+ weighted avg 0.6918 0.6790 0.6800 81
1166
+
1167
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/last_model.pth
1168
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 77.78%
1169
+ [Epoch 39], [Batch 0 / 40], [Loss 0.0011942997807636857]
1170
+ precision recall f1-score support
1171
+
1172
+ akiec 0.7143 0.6667 0.6897 15
1173
+ bcc 0.6154 0.8000 0.6957 10
1174
+ bkl 0.5455 0.6000 0.5714 10
1175
+ df 0.8000 0.6667 0.7273 12
1176
+ mel 0.4545 0.5556 0.5000 9
1177
+ nv 0.9167 0.6875 0.7857 16
1178
+ vasc 0.9000 1.0000 0.9474 9
1179
+
1180
+ accuracy 0.7037 81
1181
+ macro avg 0.7066 0.7109 0.7024 81
1182
+ weighted avg 0.7257 0.7037 0.7079 81
1183
+
1184
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/last_model.pth
1185
+ [densenet121] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-14-52-26/best_model.pth - Accuracy 77.78%
1186
+ precision recall f1-score support
1187
+
1188
+ akiec 0.8000 0.5333 0.6400 15
1189
+ bcc 0.6154 0.8000 0.6957 10
1190
+ bkl 0.5000 0.7000 0.5833 10
1191
+ df 0.9167 0.9167 0.9167 12
1192
+ mel 0.4545 0.5556 0.5000 9
1193
+ nv 1.0000 0.6875 0.8148 16
1194
+ vasc 0.9000 1.0000 0.9474 9
1195
+
1196
+ accuracy 0.7284 81
1197
+ macro avg 0.7409 0.7419 0.7283 81
1198
+ weighted avg 0.7697 0.7284 0.7340 81
1199
+
models/DenseNet121/logs/test_logs_acc_2021-12-12-14-52-26.txt ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 0,0.2839506172839506
2
+ 1,0.345679012345679
3
+ 2,0.5802469135802469
4
+ 3,0.4691358024691358
5
+ 4,0.5185185185185185
6
+ 5,0.5061728395061729
7
+ 6,0.5679012345679012
8
+ 7,0.691358024691358
9
+ 8,0.5185185185185185
10
+ 9,0.41975308641975306
11
+ 10,0.6419753086419753
12
+ 11,0.5802469135802469
13
+ 12,0.6419753086419753
14
+ 13,0.6172839506172839
15
+ 14,0.3950617283950617
16
+ 15,0.6296296296296297
17
+ 16,0.691358024691358
18
+ 17,0.6296296296296297
19
+ 18,0.7407407407407407
20
+ 19,0.7160493827160493
21
+ 20,0.6790123456790124
22
+ 21,0.7283950617283951
23
+ 22,0.7777777777777778
24
+ 23,0.7530864197530864
25
+ 24,0.7407407407407407
26
+ 25,0.7407407407407407
27
+ 26,0.7160493827160493
28
+ 27,0.7283950617283951
29
+ 28,0.7160493827160493
30
+ 29,0.6172839506172839
31
+ 30,0.7037037037037037
32
+ 31,0.7037037037037037
33
+ 32,0.691358024691358
34
+ 33,0.7037037037037037
35
+ 34,0.691358024691358
36
+ 35,0.691358024691358
37
+ 36,0.691358024691358
38
+ 37,0.7037037037037037
39
+ 38,0.6790123456790124
40
+ 39,0.7037037037037037
models/DenseNet121/logs/train_logs_acc_2021-12-12-14-52-26.txt ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 0,0.4889502762430939
2
+ 1,0.7099447513812155
3
+ 2,0.7279005524861878
4
+ 3,0.787292817679558
5
+ 4,0.8411602209944752
6
+ 5,0.8825966850828729
7
+ 6,0.9406077348066298
8
+ 7,0.9475138121546961
9
+ 8,0.9378453038674033
10
+ 9,0.9461325966850829
11
+ 10,0.962707182320442
12
+ 11,0.9834254143646409
13
+ 12,0.9917127071823204
14
+ 13,0.9848066298342542
15
+ 14,0.9861878453038674
16
+ 15,0.9861878453038674
17
+ 16,0.9903314917127072
18
+ 17,0.994475138121547
19
+ 18,0.9917127071823204
20
+ 19,0.9917127071823204
21
+ 20,0.9917127071823204
22
+ 21,0.9972375690607734
23
+ 22,0.9972375690607734
24
+ 23,1.0
25
+ 24,1.0
26
+ 25,1.0
27
+ 26,0.9986187845303868
28
+ 27,0.9986187845303868
29
+ 28,0.994475138121547
30
+ 29,0.9903314917127072
31
+ 30,0.994475138121547
32
+ 31,1.0
33
+ 32,0.9986187845303868
34
+ 33,1.0
35
+ 34,1.0
36
+ 35,1.0
37
+ 36,1.0
38
+ 37,1.0
39
+ 38,1.0
40
+ 39,1.0
models/DenseNet121/logs/train_logs_loss_2021-12-12-14-52-26.txt ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 0,1.3115723133087158
2
+ 1,0.8430644273757935
3
+ 2,0.7275338172912598
4
+ 3,0.5839248895645142
5
+ 4,0.43988701701164246
6
+ 5,0.3507845401763916
7
+ 6,0.2043382227420807
8
+ 7,0.15382269024848938
9
+ 8,0.19266119599342346
10
+ 9,0.15648579597473145
11
+ 10,0.12639513611793518
12
+ 11,0.08402916043996811
13
+ 12,0.04659491032361984
14
+ 13,0.06116586923599243
15
+ 14,0.07438090443611145
16
+ 15,0.05328061804175377
17
+ 16,0.039055921137332916
18
+ 17,0.034553162753582
19
+ 18,0.03516067564487457
20
+ 19,0.03217845782637596
21
+ 20,0.03015070967376232
22
+ 21,0.016602592542767525
23
+ 22,0.013648818247020245
24
+ 23,0.0056058987975120544
25
+ 24,0.005661854520440102
26
+ 25,0.007601853460073471
27
+ 26,0.008942866697907448
28
+ 27,0.01236275490373373
29
+ 28,0.019639354199171066
30
+ 29,0.03432326763868332
31
+ 30,0.022994978353381157
32
+ 31,0.012169293127954006
33
+ 32,0.008412164635956287
34
+ 33,0.003678540699183941
35
+ 34,0.004635999910533428
36
+ 35,0.004215737339109182
37
+ 36,0.002594105899333954
38
+ 37,0.0016405590577051044
39
+ 38,0.004391920752823353
40
+ 39,0.0025029259268194437
models/MobileNetV2/best_model.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b54066967da855a7e9487c661221a17689a9fddffa15982ef63ad10b56178428
3
+ size 9198861
models/MobileNetV2/config.json ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "num_classes": 7,
3
+ "hidden_size": 1280,
4
+ "id2label": {
5
+ "0": "akiec",
6
+ "1": "bcc",
7
+ "2": "bkl",
8
+ "3": "df",
9
+ "4": "mel",
10
+ "5": "nv",
11
+ "6": "vasc"
12
+ },
13
+ "label2id": {
14
+ "akiec": "0",
15
+ "bcc": "1",
16
+ "bkl": "2",
17
+ "df": "3",
18
+ "mel": "4",
19
+ "nv": "5",
20
+ "vasc": "6"
21
+ },
22
+ "architectures": [
23
+ "mobilenet_v2"
24
+ ]
25
+ }
models/MobileNetV2/logs/logs_2021-12-12-15-41-03.txt ADDED
@@ -0,0 +1,984 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ==================================================
2
+ Model architecture:
3
+ ==================================================
4
+ MobileNetV2(
5
+ (features): Sequential(
6
+ (0): ConvBNActivation(
7
+ (0): Conv2d(3, 32, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
8
+ (1): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
9
+ (2): ReLU6(inplace=True)
10
+ )
11
+ (1): InvertedResidual(
12
+ (conv): Sequential(
13
+ (0): ConvBNActivation(
14
+ (0): Conv2d(32, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=32, bias=False)
15
+ (1): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
16
+ (2): ReLU6(inplace=True)
17
+ )
18
+ (1): Conv2d(32, 16, kernel_size=(1, 1), stride=(1, 1), bias=False)
19
+ (2): BatchNorm2d(16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
20
+ )
21
+ )
22
+ (2): InvertedResidual(
23
+ (conv): Sequential(
24
+ (0): ConvBNActivation(
25
+ (0): Conv2d(16, 96, kernel_size=(1, 1), stride=(1, 1), bias=False)
26
+ (1): BatchNorm2d(96, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
27
+ (2): ReLU6(inplace=True)
28
+ )
29
+ (1): ConvBNActivation(
30
+ (0): Conv2d(96, 96, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=96, bias=False)
31
+ (1): BatchNorm2d(96, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
32
+ (2): ReLU6(inplace=True)
33
+ )
34
+ (2): Conv2d(96, 24, kernel_size=(1, 1), stride=(1, 1), bias=False)
35
+ (3): BatchNorm2d(24, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
36
+ )
37
+ )
38
+ (3): InvertedResidual(
39
+ (conv): Sequential(
40
+ (0): ConvBNActivation(
41
+ (0): Conv2d(24, 144, kernel_size=(1, 1), stride=(1, 1), bias=False)
42
+ (1): BatchNorm2d(144, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
43
+ (2): ReLU6(inplace=True)
44
+ )
45
+ (1): ConvBNActivation(
46
+ (0): Conv2d(144, 144, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=144, bias=False)
47
+ (1): BatchNorm2d(144, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
48
+ (2): ReLU6(inplace=True)
49
+ )
50
+ (2): Conv2d(144, 24, kernel_size=(1, 1), stride=(1, 1), bias=False)
51
+ (3): BatchNorm2d(24, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
52
+ )
53
+ )
54
+ (4): InvertedResidual(
55
+ (conv): Sequential(
56
+ (0): ConvBNActivation(
57
+ (0): Conv2d(24, 144, kernel_size=(1, 1), stride=(1, 1), bias=False)
58
+ (1): BatchNorm2d(144, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
59
+ (2): ReLU6(inplace=True)
60
+ )
61
+ (1): ConvBNActivation(
62
+ (0): Conv2d(144, 144, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=144, bias=False)
63
+ (1): BatchNorm2d(144, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
64
+ (2): ReLU6(inplace=True)
65
+ )
66
+ (2): Conv2d(144, 32, kernel_size=(1, 1), stride=(1, 1), bias=False)
67
+ (3): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
68
+ )
69
+ )
70
+ (5): InvertedResidual(
71
+ (conv): Sequential(
72
+ (0): ConvBNActivation(
73
+ (0): Conv2d(32, 192, kernel_size=(1, 1), stride=(1, 1), bias=False)
74
+ (1): BatchNorm2d(192, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
75
+ (2): ReLU6(inplace=True)
76
+ )
77
+ (1): ConvBNActivation(
78
+ (0): Conv2d(192, 192, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=192, bias=False)
79
+ (1): BatchNorm2d(192, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
80
+ (2): ReLU6(inplace=True)
81
+ )
82
+ (2): Conv2d(192, 32, kernel_size=(1, 1), stride=(1, 1), bias=False)
83
+ (3): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
84
+ )
85
+ )
86
+ (6): InvertedResidual(
87
+ (conv): Sequential(
88
+ (0): ConvBNActivation(
89
+ (0): Conv2d(32, 192, kernel_size=(1, 1), stride=(1, 1), bias=False)
90
+ (1): BatchNorm2d(192, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
91
+ (2): ReLU6(inplace=True)
92
+ )
93
+ (1): ConvBNActivation(
94
+ (0): Conv2d(192, 192, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=192, bias=False)
95
+ (1): BatchNorm2d(192, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
96
+ (2): ReLU6(inplace=True)
97
+ )
98
+ (2): Conv2d(192, 32, kernel_size=(1, 1), stride=(1, 1), bias=False)
99
+ (3): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
100
+ )
101
+ )
102
+ (7): InvertedResidual(
103
+ (conv): Sequential(
104
+ (0): ConvBNActivation(
105
+ (0): Conv2d(32, 192, kernel_size=(1, 1), stride=(1, 1), bias=False)
106
+ (1): BatchNorm2d(192, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
107
+ (2): ReLU6(inplace=True)
108
+ )
109
+ (1): ConvBNActivation(
110
+ (0): Conv2d(192, 192, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=192, bias=False)
111
+ (1): BatchNorm2d(192, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
112
+ (2): ReLU6(inplace=True)
113
+ )
114
+ (2): Conv2d(192, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
115
+ (3): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
116
+ )
117
+ )
118
+ (8): InvertedResidual(
119
+ (conv): Sequential(
120
+ (0): ConvBNActivation(
121
+ (0): Conv2d(64, 384, kernel_size=(1, 1), stride=(1, 1), bias=False)
122
+ (1): BatchNorm2d(384, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
123
+ (2): ReLU6(inplace=True)
124
+ )
125
+ (1): ConvBNActivation(
126
+ (0): Conv2d(384, 384, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=384, bias=False)
127
+ (1): BatchNorm2d(384, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
128
+ (2): ReLU6(inplace=True)
129
+ )
130
+ (2): Conv2d(384, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
131
+ (3): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
132
+ )
133
+ )
134
+ (9): InvertedResidual(
135
+ (conv): Sequential(
136
+ (0): ConvBNActivation(
137
+ (0): Conv2d(64, 384, kernel_size=(1, 1), stride=(1, 1), bias=False)
138
+ (1): BatchNorm2d(384, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
139
+ (2): ReLU6(inplace=True)
140
+ )
141
+ (1): ConvBNActivation(
142
+ (0): Conv2d(384, 384, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=384, bias=False)
143
+ (1): BatchNorm2d(384, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
144
+ (2): ReLU6(inplace=True)
145
+ )
146
+ (2): Conv2d(384, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
147
+ (3): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
148
+ )
149
+ )
150
+ (10): InvertedResidual(
151
+ (conv): Sequential(
152
+ (0): ConvBNActivation(
153
+ (0): Conv2d(64, 384, kernel_size=(1, 1), stride=(1, 1), bias=False)
154
+ (1): BatchNorm2d(384, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
155
+ (2): ReLU6(inplace=True)
156
+ )
157
+ (1): ConvBNActivation(
158
+ (0): Conv2d(384, 384, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=384, bias=False)
159
+ (1): BatchNorm2d(384, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
160
+ (2): ReLU6(inplace=True)
161
+ )
162
+ (2): Conv2d(384, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
163
+ (3): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
164
+ )
165
+ )
166
+ (11): InvertedResidual(
167
+ (conv): Sequential(
168
+ (0): ConvBNActivation(
169
+ (0): Conv2d(64, 384, kernel_size=(1, 1), stride=(1, 1), bias=False)
170
+ (1): BatchNorm2d(384, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
171
+ (2): ReLU6(inplace=True)
172
+ )
173
+ (1): ConvBNActivation(
174
+ (0): Conv2d(384, 384, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=384, bias=False)
175
+ (1): BatchNorm2d(384, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
176
+ (2): ReLU6(inplace=True)
177
+ )
178
+ (2): Conv2d(384, 96, kernel_size=(1, 1), stride=(1, 1), bias=False)
179
+ (3): BatchNorm2d(96, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
180
+ )
181
+ )
182
+ (12): InvertedResidual(
183
+ (conv): Sequential(
184
+ (0): ConvBNActivation(
185
+ (0): Conv2d(96, 576, kernel_size=(1, 1), stride=(1, 1), bias=False)
186
+ (1): BatchNorm2d(576, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
187
+ (2): ReLU6(inplace=True)
188
+ )
189
+ (1): ConvBNActivation(
190
+ (0): Conv2d(576, 576, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=576, bias=False)
191
+ (1): BatchNorm2d(576, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
192
+ (2): ReLU6(inplace=True)
193
+ )
194
+ (2): Conv2d(576, 96, kernel_size=(1, 1), stride=(1, 1), bias=False)
195
+ (3): BatchNorm2d(96, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
196
+ )
197
+ )
198
+ (13): InvertedResidual(
199
+ (conv): Sequential(
200
+ (0): ConvBNActivation(
201
+ (0): Conv2d(96, 576, kernel_size=(1, 1), stride=(1, 1), bias=False)
202
+ (1): BatchNorm2d(576, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
203
+ (2): ReLU6(inplace=True)
204
+ )
205
+ (1): ConvBNActivation(
206
+ (0): Conv2d(576, 576, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=576, bias=False)
207
+ (1): BatchNorm2d(576, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
208
+ (2): ReLU6(inplace=True)
209
+ )
210
+ (2): Conv2d(576, 96, kernel_size=(1, 1), stride=(1, 1), bias=False)
211
+ (3): BatchNorm2d(96, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
212
+ )
213
+ )
214
+ (14): InvertedResidual(
215
+ (conv): Sequential(
216
+ (0): ConvBNActivation(
217
+ (0): Conv2d(96, 576, kernel_size=(1, 1), stride=(1, 1), bias=False)
218
+ (1): BatchNorm2d(576, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
219
+ (2): ReLU6(inplace=True)
220
+ )
221
+ (1): ConvBNActivation(
222
+ (0): Conv2d(576, 576, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=576, bias=False)
223
+ (1): BatchNorm2d(576, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
224
+ (2): ReLU6(inplace=True)
225
+ )
226
+ (2): Conv2d(576, 160, kernel_size=(1, 1), stride=(1, 1), bias=False)
227
+ (3): BatchNorm2d(160, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
228
+ )
229
+ )
230
+ (15): InvertedResidual(
231
+ (conv): Sequential(
232
+ (0): ConvBNActivation(
233
+ (0): Conv2d(160, 960, kernel_size=(1, 1), stride=(1, 1), bias=False)
234
+ (1): BatchNorm2d(960, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
235
+ (2): ReLU6(inplace=True)
236
+ )
237
+ (1): ConvBNActivation(
238
+ (0): Conv2d(960, 960, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=960, bias=False)
239
+ (1): BatchNorm2d(960, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
240
+ (2): ReLU6(inplace=True)
241
+ )
242
+ (2): Conv2d(960, 160, kernel_size=(1, 1), stride=(1, 1), bias=False)
243
+ (3): BatchNorm2d(160, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
244
+ )
245
+ )
246
+ (16): InvertedResidual(
247
+ (conv): Sequential(
248
+ (0): ConvBNActivation(
249
+ (0): Conv2d(160, 960, kernel_size=(1, 1), stride=(1, 1), bias=False)
250
+ (1): BatchNorm2d(960, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
251
+ (2): ReLU6(inplace=True)
252
+ )
253
+ (1): ConvBNActivation(
254
+ (0): Conv2d(960, 960, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=960, bias=False)
255
+ (1): BatchNorm2d(960, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
256
+ (2): ReLU6(inplace=True)
257
+ )
258
+ (2): Conv2d(960, 160, kernel_size=(1, 1), stride=(1, 1), bias=False)
259
+ (3): BatchNorm2d(160, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
260
+ )
261
+ )
262
+ (17): InvertedResidual(
263
+ (conv): Sequential(
264
+ (0): ConvBNActivation(
265
+ (0): Conv2d(160, 960, kernel_size=(1, 1), stride=(1, 1), bias=False)
266
+ (1): BatchNorm2d(960, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
267
+ (2): ReLU6(inplace=True)
268
+ )
269
+ (1): ConvBNActivation(
270
+ (0): Conv2d(960, 960, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=960, bias=False)
271
+ (1): BatchNorm2d(960, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
272
+ (2): ReLU6(inplace=True)
273
+ )
274
+ (2): Conv2d(960, 320, kernel_size=(1, 1), stride=(1, 1), bias=False)
275
+ (3): BatchNorm2d(320, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
276
+ )
277
+ )
278
+ (18): ConvBNActivation(
279
+ (0): Conv2d(320, 1280, kernel_size=(1, 1), stride=(1, 1), bias=False)
280
+ (1): BatchNorm2d(1280, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
281
+ (2): ReLU6(inplace=True)
282
+ )
283
+ )
284
+ (classifier): Sequential(
285
+ (0): Dropout(p=0.2, inplace=False)
286
+ (1): Linear(in_features=1280, out_features=7, bias=True)
287
+ )
288
+ )
289
+ ==================================================
290
+
291
+ [Epoch 0], [Batch 0 / 40], [Loss 1.9437333345413208]
292
+ precision recall f1-score support
293
+
294
+ akiec 0.5000 0.1667 0.2500 6
295
+ bcc 1.0000 0.2500 0.4000 12
296
+ bkl 0.0000 0.0000 0.0000 13
297
+ df 0.2632 0.3571 0.3030 14
298
+ mel 0.0000 0.0000 0.0000 14
299
+ nv 0.3043 0.9333 0.4590 15
300
+ vasc 0.5455 0.8571 0.6667 7
301
+
302
+ accuracy 0.3580 81
303
+ macro avg 0.3733 0.3663 0.2970 81
304
+ weighted avg 0.3342 0.3580 0.2728 81
305
+
306
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth
307
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 35.80%
308
+ [Epoch 1], [Batch 0 / 40], [Loss 0.7436763644218445]
309
+ precision recall f1-score support
310
+
311
+ akiec 0.1250 0.1667 0.1429 6
312
+ bcc 0.6429 0.7500 0.6923 12
313
+ bkl 0.5714 0.3077 0.4000 13
314
+ df 0.5625 0.6429 0.6000 14
315
+ mel 0.0000 0.0000 0.0000 14
316
+ nv 0.6667 0.6667 0.6667 15
317
+ vasc 0.3333 1.0000 0.5000 7
318
+
319
+ accuracy 0.4938 81
320
+ macro avg 0.4145 0.5048 0.4288 81
321
+ weighted avg 0.4457 0.4938 0.4477 81
322
+
323
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth
324
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 49.38%
325
+ [Epoch 2], [Batch 0 / 40], [Loss 0.4846855700016022]
326
+ precision recall f1-score support
327
+
328
+ akiec 0.3750 0.5000 0.4286 6
329
+ bcc 0.8000 0.6667 0.7273 12
330
+ bkl 0.6667 0.4615 0.5455 13
331
+ df 0.5417 0.9286 0.6842 14
332
+ mel 1.0000 0.1429 0.2500 14
333
+ nv 0.6842 0.8667 0.7647 15
334
+ vasc 0.7778 1.0000 0.8750 7
335
+
336
+ accuracy 0.6420 81
337
+ macro avg 0.6922 0.6523 0.6107 81
338
+ weighted avg 0.7137 0.6420 0.6057 81
339
+
340
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth
341
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 64.20%
342
+ [Epoch 3], [Batch 0 / 40], [Loss 0.2661575376987457]
343
+ precision recall f1-score support
344
+
345
+ akiec 0.5000 0.8333 0.6250 6
346
+ bcc 0.8182 0.7500 0.7826 12
347
+ bkl 0.4667 0.5385 0.5000 13
348
+ df 0.7857 0.7857 0.7857 14
349
+ mel 0.6364 0.5000 0.5600 14
350
+ nv 0.7692 0.6667 0.7143 15
351
+ vasc 1.0000 1.0000 1.0000 7
352
+
353
+ accuracy 0.6914 81
354
+ macro avg 0.7109 0.7249 0.7097 81
355
+ weighted avg 0.7078 0.6914 0.6938 81
356
+
357
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth
358
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 69.14%
359
+ [Epoch 4], [Batch 0 / 40], [Loss 0.12722927331924438]
360
+ precision recall f1-score support
361
+
362
+ akiec 0.2353 0.6667 0.3478 6
363
+ bcc 0.8000 0.6667 0.7273 12
364
+ bkl 0.8000 0.3077 0.4444 13
365
+ df 0.8235 1.0000 0.9032 14
366
+ mel 0.5833 0.5000 0.5385 14
367
+ nv 0.7692 0.6667 0.7143 15
368
+ vasc 1.0000 1.0000 1.0000 7
369
+
370
+ accuracy 0.6667 81
371
+ macro avg 0.7159 0.6868 0.6679 81
372
+ weighted avg 0.7364 0.6667 0.6727 81
373
+
374
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/last_model.pth
375
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 69.14%
376
+ [Epoch 5], [Batch 0 / 40], [Loss 0.26306378841400146]
377
+ precision recall f1-score support
378
+
379
+ akiec 0.3333 0.5000 0.4000 6
380
+ bcc 0.7692 0.8333 0.8000 12
381
+ bkl 0.4286 0.4615 0.4444 13
382
+ df 0.7059 0.8571 0.7742 14
383
+ mel 0.8333 0.3571 0.5000 14
384
+ nv 0.8000 0.8000 0.8000 15
385
+ vasc 1.0000 1.0000 1.0000 7
386
+
387
+ accuracy 0.6790 81
388
+ macro avg 0.6958 0.6870 0.6741 81
389
+ weighted avg 0.7080 0.6790 0.6743 81
390
+
391
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/last_model.pth
392
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 69.14%
393
+ [Epoch 6], [Batch 0 / 40], [Loss 0.26731279492378235]
394
+ precision recall f1-score support
395
+
396
+ akiec 0.2857 0.6667 0.4000 6
397
+ bcc 0.6667 0.5000 0.5714 12
398
+ bkl 0.4615 0.4615 0.4615 13
399
+ df 0.9231 0.8571 0.8889 14
400
+ mel 0.3750 0.4286 0.4000 14
401
+ nv 0.8000 0.5333 0.6400 15
402
+ vasc 1.0000 0.8571 0.9231 7
403
+
404
+ accuracy 0.5926 81
405
+ macro avg 0.6446 0.6149 0.6121 81
406
+ weighted avg 0.6529 0.5926 0.6094 81
407
+
408
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/last_model.pth
409
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 69.14%
410
+ [Epoch 7], [Batch 0 / 40], [Loss 0.13903522491455078]
411
+ precision recall f1-score support
412
+
413
+ akiec 0.5000 0.8333 0.6250 6
414
+ bcc 0.7778 0.5833 0.6667 12
415
+ bkl 0.5455 0.4615 0.5000 13
416
+ df 0.9231 0.8571 0.8889 14
417
+ mel 0.7273 0.5714 0.6400 14
418
+ nv 0.7000 0.9333 0.8000 15
419
+ vasc 1.0000 1.0000 1.0000 7
420
+
421
+ accuracy 0.7284 81
422
+ macro avg 0.7391 0.7486 0.7315 81
423
+ weighted avg 0.7411 0.7284 0.7241 81
424
+
425
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth
426
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 72.84%
427
+ [Epoch 8], [Batch 0 / 40], [Loss 0.03321058303117752]
428
+ precision recall f1-score support
429
+
430
+ akiec 0.3333 0.8333 0.4762 6
431
+ bcc 0.6667 0.6667 0.6667 12
432
+ bkl 0.4615 0.4615 0.4615 13
433
+ df 1.0000 0.5714 0.7273 14
434
+ mel 0.4286 0.4286 0.4286 14
435
+ nv 0.7500 0.6000 0.6667 15
436
+ vasc 1.0000 1.0000 1.0000 7
437
+
438
+ accuracy 0.6049 81
439
+ macro avg 0.6629 0.6516 0.6324 81
440
+ weighted avg 0.6698 0.6049 0.6178 81
441
+
442
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/last_model.pth
443
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 72.84%
444
+ [Epoch 9], [Batch 0 / 40], [Loss 0.018830040469765663]
445
+ precision recall f1-score support
446
+
447
+ akiec 0.2500 0.5000 0.3333 6
448
+ bcc 0.8750 0.5833 0.7000 12
449
+ bkl 0.3333 0.3846 0.3571 13
450
+ df 0.7857 0.7857 0.7857 14
451
+ mel 0.5000 0.4286 0.4615 14
452
+ nv 0.8462 0.7333 0.7857 15
453
+ vasc 1.0000 1.0000 1.0000 7
454
+
455
+ accuracy 0.6173 81
456
+ macro avg 0.6557 0.6308 0.6319 81
457
+ weighted avg 0.6670 0.6173 0.6332 81
458
+
459
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/last_model.pth
460
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 72.84%
461
+ [Epoch 10], [Batch 0 / 40], [Loss 0.04818242788314819]
462
+ precision recall f1-score support
463
+
464
+ akiec 0.4000 0.6667 0.5000 6
465
+ bcc 0.7500 0.7500 0.7500 12
466
+ bkl 1.0000 0.1538 0.2667 13
467
+ df 0.7647 0.9286 0.8387 14
468
+ mel 0.4286 0.6429 0.5143 14
469
+ nv 0.7500 0.6000 0.6667 15
470
+ vasc 1.0000 1.0000 1.0000 7
471
+
472
+ accuracy 0.6543 81
473
+ macro avg 0.7276 0.6774 0.6480 81
474
+ weighted avg 0.7328 0.6543 0.6347 81
475
+
476
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/last_model.pth
477
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 72.84%
478
+ [Epoch 11], [Batch 0 / 40], [Loss 0.07165464758872986]
479
+ precision recall f1-score support
480
+
481
+ akiec 0.4444 0.6667 0.5333 6
482
+ bcc 0.7500 0.7500 0.7500 12
483
+ bkl 0.3750 0.4615 0.4138 13
484
+ df 0.9231 0.8571 0.8889 14
485
+ mel 0.5000 0.2857 0.3636 14
486
+ nv 0.8571 0.8000 0.8276 15
487
+ vasc 0.7778 1.0000 0.8750 7
488
+
489
+ accuracy 0.6667 81
490
+ macro avg 0.6611 0.6887 0.6646 81
491
+ weighted avg 0.6761 0.6667 0.6624 81
492
+
493
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/last_model.pth
494
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 72.84%
495
+ [Epoch 12], [Batch 0 / 40], [Loss 0.03201916813850403]
496
+ precision recall f1-score support
497
+
498
+ akiec 0.3846 0.8333 0.5263 6
499
+ bcc 1.0000 0.5833 0.7368 12
500
+ bkl 0.5000 0.3846 0.4348 13
501
+ df 1.0000 0.5000 0.6667 14
502
+ mel 0.5000 0.6429 0.5625 14
503
+ nv 0.6667 0.8000 0.7273 15
504
+ vasc 0.8750 1.0000 0.9333 7
505
+
506
+ accuracy 0.6420 81
507
+ macro avg 0.7038 0.6777 0.6554 81
508
+ weighted avg 0.7152 0.6420 0.6457 81
509
+
510
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/last_model.pth
511
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 72.84%
512
+ [Epoch 13], [Batch 0 / 40], [Loss 0.051025405526161194]
513
+ precision recall f1-score support
514
+
515
+ akiec 0.5000 0.6667 0.5714 6
516
+ bcc 0.7273 0.6667 0.6957 12
517
+ bkl 0.5714 0.6154 0.5926 13
518
+ df 0.9091 0.7143 0.8000 14
519
+ mel 0.5385 0.5000 0.5185 14
520
+ nv 0.7059 0.8000 0.7500 15
521
+ vasc 1.0000 1.0000 1.0000 7
522
+
523
+ accuracy 0.6914 81
524
+ macro avg 0.7074 0.7090 0.7040 81
525
+ weighted avg 0.7038 0.6914 0.6937 81
526
+
527
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/last_model.pth
528
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 72.84%
529
+ [Epoch 14], [Batch 0 / 40], [Loss 0.020453469827771187]
530
+ precision recall f1-score support
531
+
532
+ akiec 0.7500 0.5000 0.6000 6
533
+ bcc 0.8000 0.6667 0.7273 12
534
+ bkl 0.6250 0.7692 0.6897 13
535
+ df 0.8462 0.7857 0.8148 14
536
+ mel 0.6429 0.6429 0.6429 14
537
+ nv 0.7647 0.8667 0.8125 15
538
+ vasc 1.0000 1.0000 1.0000 7
539
+
540
+ accuracy 0.7531 81
541
+ macro avg 0.7755 0.7473 0.7553 81
542
+ weighted avg 0.7598 0.7531 0.7517 81
543
+
544
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth
545
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 75.31%
546
+ [Epoch 15], [Batch 0 / 40], [Loss 0.11804791539907455]
547
+ precision recall f1-score support
548
+
549
+ akiec 0.4444 0.6667 0.5333 6
550
+ bcc 0.8000 0.6667 0.7273 12
551
+ bkl 0.5000 0.3846 0.4348 13
552
+ df 0.8333 0.7143 0.7692 14
553
+ mel 0.4706 0.5714 0.5161 14
554
+ nv 0.7059 0.8000 0.7500 15
555
+ vasc 1.0000 0.8571 0.9231 7
556
+
557
+ accuracy 0.6543 81
558
+ macro avg 0.6792 0.6658 0.6648 81
559
+ weighted avg 0.6742 0.6543 0.6579 81
560
+
561
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/last_model.pth
562
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 75.31%
563
+ [Epoch 16], [Batch 0 / 40], [Loss 0.02361106500029564]
564
+ precision recall f1-score support
565
+
566
+ akiec 0.5000 0.5000 0.5000 6
567
+ bcc 0.8333 0.8333 0.8333 12
568
+ bkl 0.5455 0.4615 0.5000 13
569
+ df 0.8889 0.5714 0.6957 14
570
+ mel 0.5238 0.7857 0.6286 14
571
+ nv 0.6667 0.5333 0.5926 15
572
+ vasc 0.7000 1.0000 0.8235 7
573
+
574
+ accuracy 0.6543 81
575
+ macro avg 0.6655 0.6693 0.6534 81
576
+ weighted avg 0.6762 0.6543 0.6505 81
577
+
578
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/last_model.pth
579
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 75.31%
580
+ [Epoch 17], [Batch 0 / 40], [Loss 0.06163478642702103]
581
+ precision recall f1-score support
582
+
583
+ akiec 0.3636 0.6667 0.4706 6
584
+ bcc 0.6000 0.7500 0.6667 12
585
+ bkl 0.5714 0.6154 0.5926 13
586
+ df 0.9000 0.6429 0.7500 14
587
+ mel 0.6364 0.5000 0.5600 14
588
+ nv 0.7692 0.6667 0.7143 15
589
+ vasc 1.0000 1.0000 1.0000 7
590
+
591
+ accuracy 0.6667 81
592
+ macro avg 0.6915 0.6917 0.6792 81
593
+ weighted avg 0.7019 0.6667 0.6738 81
594
+
595
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/last_model.pth
596
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 75.31%
597
+ [Epoch 18], [Batch 0 / 40], [Loss 0.08096787333488464]
598
+ precision recall f1-score support
599
+
600
+ akiec 0.4167 0.8333 0.5556 6
601
+ bcc 0.6667 0.6667 0.6667 12
602
+ bkl 0.7500 0.6923 0.7200 13
603
+ df 1.0000 0.7143 0.8333 14
604
+ mel 0.4706 0.5714 0.5161 14
605
+ nv 0.8182 0.6000 0.6923 15
606
+ vasc 1.0000 1.0000 1.0000 7
607
+
608
+ accuracy 0.6914 81
609
+ macro avg 0.7317 0.7254 0.7120 81
610
+ weighted avg 0.7421 0.6914 0.7033 81
611
+
612
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/last_model.pth
613
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 75.31%
614
+ [Epoch 19], [Batch 0 / 40], [Loss 0.061570439487695694]
615
+ precision recall f1-score support
616
+
617
+ akiec 0.4545 0.8333 0.5882 6
618
+ bcc 0.7273 0.6667 0.6957 12
619
+ bkl 0.7273 0.6154 0.6667 13
620
+ df 0.9091 0.7143 0.8000 14
621
+ mel 0.5294 0.6429 0.5806 14
622
+ nv 0.7692 0.6667 0.7143 15
623
+ vasc 1.0000 1.0000 1.0000 7
624
+
625
+ accuracy 0.7037 81
626
+ macro avg 0.7310 0.7342 0.7208 81
627
+ weighted avg 0.7356 0.7037 0.7110 81
628
+
629
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/last_model.pth
630
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 75.31%
631
+ [Epoch 20], [Batch 0 / 40], [Loss 0.010024969466030598]
632
+ precision recall f1-score support
633
+
634
+ akiec 0.3846 0.8333 0.5263 6
635
+ bcc 0.7000 0.5833 0.6364 12
636
+ bkl 0.6000 0.6923 0.6429 13
637
+ df 0.9000 0.6429 0.7500 14
638
+ mel 0.7000 0.5000 0.5833 14
639
+ nv 0.7500 0.8000 0.7742 15
640
+ vasc 1.0000 1.0000 1.0000 7
641
+
642
+ accuracy 0.6914 81
643
+ macro avg 0.7192 0.7217 0.7019 81
644
+ weighted avg 0.7303 0.6914 0.6967 81
645
+
646
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/last_model.pth
647
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 75.31%
648
+ [Epoch 21], [Batch 0 / 40], [Loss 0.03258311748504639]
649
+ precision recall f1-score support
650
+
651
+ akiec 0.2500 0.6667 0.3636 6
652
+ bcc 0.7273 0.6667 0.6957 12
653
+ bkl 0.6364 0.5385 0.5833 13
654
+ df 0.8000 0.5714 0.6667 14
655
+ mel 0.7778 0.5000 0.6087 14
656
+ nv 0.7059 0.8000 0.7500 15
657
+ vasc 1.0000 1.0000 1.0000 7
658
+
659
+ accuracy 0.6543 81
660
+ macro avg 0.6996 0.6776 0.6669 81
661
+ weighted avg 0.7182 0.6543 0.6694 81
662
+
663
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/last_model.pth
664
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 75.31%
665
+ [Epoch 22], [Batch 0 / 40], [Loss 0.009060573764145374]
666
+ precision recall f1-score support
667
+
668
+ akiec 0.3846 0.8333 0.5263 6
669
+ bcc 0.6667 0.6667 0.6667 12
670
+ bkl 0.6667 0.6154 0.6400 13
671
+ df 0.9000 0.6429 0.7500 14
672
+ mel 0.7778 0.5000 0.6087 14
673
+ nv 0.7222 0.8667 0.7879 15
674
+ vasc 1.0000 1.0000 1.0000 7
675
+
676
+ accuracy 0.7037 81
677
+ macro avg 0.7311 0.7321 0.7114 81
678
+ weighted avg 0.7444 0.7037 0.7076 81
679
+
680
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/last_model.pth
681
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 75.31%
682
+ [Epoch 23], [Batch 0 / 40], [Loss 0.007481844164431095]
683
+ precision recall f1-score support
684
+
685
+ akiec 0.2857 0.6667 0.4000 6
686
+ bcc 0.7273 0.6667 0.6957 12
687
+ bkl 0.5455 0.4615 0.5000 13
688
+ df 0.8182 0.6429 0.7200 14
689
+ mel 0.7778 0.5000 0.6087 14
690
+ nv 0.7222 0.8667 0.7879 15
691
+ vasc 1.0000 1.0000 1.0000 7
692
+
693
+ accuracy 0.6667 81
694
+ macro avg 0.6967 0.6863 0.6732 81
695
+ weighted avg 0.7125 0.6667 0.6749 81
696
+
697
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/last_model.pth
698
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 75.31%
699
+ [Epoch 24], [Batch 0 / 40], [Loss 0.003717311192303896]
700
+ precision recall f1-score support
701
+
702
+ akiec 0.3333 0.6667 0.4444 6
703
+ bcc 0.6923 0.7500 0.7200 12
704
+ bkl 0.6364 0.5385 0.5833 13
705
+ df 0.9000 0.6429 0.7500 14
706
+ mel 0.5833 0.5000 0.5385 14
707
+ nv 0.6875 0.7333 0.7097 15
708
+ vasc 1.0000 1.0000 1.0000 7
709
+
710
+ accuracy 0.6667 81
711
+ macro avg 0.6904 0.6902 0.6780 81
712
+ weighted avg 0.6995 0.6667 0.6737 81
713
+
714
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/last_model.pth
715
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 75.31%
716
+ [Epoch 25], [Batch 0 / 40], [Loss 0.00521900225430727]
717
+ precision recall f1-score support
718
+
719
+ akiec 0.3077 0.6667 0.4211 6
720
+ bcc 0.7273 0.6667 0.6957 12
721
+ bkl 0.4444 0.3077 0.3636 13
722
+ df 0.8333 0.7143 0.7692 14
723
+ mel 0.5714 0.5714 0.5714 14
724
+ nv 0.7333 0.7333 0.7333 15
725
+ vasc 1.0000 1.0000 1.0000 7
726
+
727
+ accuracy 0.6420 81
728
+ macro avg 0.6596 0.6657 0.6506 81
729
+ weighted avg 0.6669 0.6420 0.6466 81
730
+
731
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/last_model.pth
732
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 75.31%
733
+ [Epoch 26], [Batch 0 / 40], [Loss 0.002388110850006342]
734
+ precision recall f1-score support
735
+
736
+ akiec 0.3333 0.6667 0.4444 6
737
+ bcc 0.7273 0.6667 0.6957 12
738
+ bkl 0.6667 0.6154 0.6400 13
739
+ df 0.8182 0.6429 0.7200 14
740
+ mel 0.5833 0.5000 0.5385 14
741
+ nv 0.6875 0.7333 0.7097 15
742
+ vasc 1.0000 1.0000 1.0000 7
743
+
744
+ accuracy 0.6667 81
745
+ macro avg 0.6880 0.6893 0.6783 81
746
+ weighted avg 0.6954 0.6667 0.6741 81
747
+
748
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/last_model.pth
749
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 75.31%
750
+ [Epoch 27], [Batch 0 / 40], [Loss 0.0014026282588019967]
751
+ precision recall f1-score support
752
+
753
+ akiec 0.3636 0.6667 0.4706 6
754
+ bcc 0.7273 0.6667 0.6957 12
755
+ bkl 0.6429 0.6923 0.6667 13
756
+ df 0.8333 0.7143 0.7692 14
757
+ mel 0.6364 0.5000 0.5600 14
758
+ nv 0.7333 0.7333 0.7333 15
759
+ vasc 1.0000 1.0000 1.0000 7
760
+
761
+ accuracy 0.6914 81
762
+ macro avg 0.7053 0.7105 0.6994 81
763
+ weighted avg 0.7141 0.6914 0.6969 81
764
+
765
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/last_model.pth
766
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 75.31%
767
+ [Epoch 28], [Batch 0 / 40], [Loss 0.004848898854106665]
768
+ precision recall f1-score support
769
+
770
+ akiec 0.3077 0.6667 0.4211 6
771
+ bcc 0.8000 0.6667 0.7273 12
772
+ bkl 0.6154 0.6154 0.6154 13
773
+ df 0.8333 0.7143 0.7692 14
774
+ mel 0.6364 0.5000 0.5600 14
775
+ nv 0.7333 0.7333 0.7333 15
776
+ vasc 1.0000 1.0000 1.0000 7
777
+
778
+ accuracy 0.6790 81
779
+ macro avg 0.7037 0.6995 0.6895 81
780
+ weighted avg 0.7163 0.6790 0.6897 81
781
+
782
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/last_model.pth
783
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 75.31%
784
+ [Epoch 29], [Batch 0 / 40], [Loss 0.014736742712557316]
785
+ precision recall f1-score support
786
+
787
+ akiec 0.3846 0.8333 0.5263 6
788
+ bcc 0.6667 0.6667 0.6667 12
789
+ bkl 0.5385 0.5385 0.5385 13
790
+ df 1.0000 0.6429 0.7826 14
791
+ mel 0.6667 0.5714 0.6154 14
792
+ nv 0.7500 0.8000 0.7742 15
793
+ vasc 1.0000 0.8571 0.9231 7
794
+
795
+ accuracy 0.6790 81
796
+ macro avg 0.7152 0.7014 0.6895 81
797
+ weighted avg 0.7270 0.6790 0.6889 81
798
+
799
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/last_model.pth
800
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 75.31%
801
+ [Epoch 30], [Batch 0 / 40], [Loss 0.0035465408582240343]
802
+ precision recall f1-score support
803
+
804
+ akiec 0.3846 0.8333 0.5263 6
805
+ bcc 0.8000 0.6667 0.7273 12
806
+ bkl 0.6364 0.5385 0.5833 13
807
+ df 1.0000 0.7143 0.8333 14
808
+ mel 0.6923 0.6429 0.6667 14
809
+ nv 0.7647 0.8667 0.8125 15
810
+ vasc 1.0000 1.0000 1.0000 7
811
+
812
+ accuracy 0.7284 81
813
+ macro avg 0.7540 0.7518 0.7356 81
814
+ weighted avg 0.7697 0.7284 0.7365 81
815
+
816
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/last_model.pth
817
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 75.31%
818
+ [Epoch 31], [Batch 0 / 40], [Loss 0.0014501283876597881]
819
+ precision recall f1-score support
820
+
821
+ akiec 0.3333 0.8333 0.4762 6
822
+ bcc 0.7273 0.6667 0.6957 12
823
+ bkl 0.7500 0.4615 0.5714 13
824
+ df 0.9000 0.6429 0.7500 14
825
+ mel 0.6429 0.6429 0.6429 14
826
+ nv 0.6875 0.7333 0.7097 15
827
+ vasc 1.0000 1.0000 1.0000 7
828
+
829
+ accuracy 0.6790 81
830
+ macro avg 0.7201 0.7115 0.6923 81
831
+ weighted avg 0.7332 0.6790 0.6886 81
832
+
833
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/last_model.pth
834
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 75.31%
835
+ [Epoch 32], [Batch 0 / 40], [Loss 0.0008513450156897306]
836
+ precision recall f1-score support
837
+
838
+ akiec 0.3125 0.8333 0.4545 6
839
+ bcc 0.7500 0.7500 0.7500 12
840
+ bkl 0.6667 0.4615 0.5455 13
841
+ df 1.0000 0.7143 0.8333 14
842
+ mel 0.5000 0.5000 0.5000 14
843
+ nv 0.6923 0.6000 0.6429 15
844
+ vasc 1.0000 1.0000 1.0000 7
845
+
846
+ accuracy 0.6543 81
847
+ macro avg 0.7031 0.6942 0.6752 81
848
+ weighted avg 0.7151 0.6543 0.6682 81
849
+
850
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/last_model.pth
851
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 75.31%
852
+ [Epoch 33], [Batch 0 / 40], [Loss 0.000744424294680357]
853
+ precision recall f1-score support
854
+
855
+ akiec 0.2941 0.8333 0.4348 6
856
+ bcc 0.7273 0.6667 0.6957 12
857
+ bkl 0.6250 0.3846 0.4762 13
858
+ df 0.9091 0.7143 0.8000 14
859
+ mel 0.5714 0.5714 0.5714 14
860
+ nv 0.6923 0.6000 0.6429 15
861
+ vasc 1.0000 1.0000 1.0000 7
862
+
863
+ accuracy 0.6420 81
864
+ macro avg 0.6885 0.6815 0.6601 81
865
+ weighted avg 0.7004 0.6420 0.6542 81
866
+
867
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/last_model.pth
868
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 75.31%
869
+ [Epoch 34], [Batch 0 / 40], [Loss 0.0017604961758479476]
870
+ precision recall f1-score support
871
+
872
+ akiec 0.3571 0.8333 0.5000 6
873
+ bcc 0.8000 0.6667 0.7273 12
874
+ bkl 0.6667 0.6154 0.6400 13
875
+ df 0.9091 0.7143 0.8000 14
876
+ mel 0.5333 0.5714 0.5517 14
877
+ nv 0.7500 0.6000 0.6667 15
878
+ vasc 1.0000 1.0000 1.0000 7
879
+
880
+ accuracy 0.6790 81
881
+ macro avg 0.7166 0.7144 0.6980 81
882
+ weighted avg 0.7266 0.6790 0.6910 81
883
+
884
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/last_model.pth
885
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 75.31%
886
+ [Epoch 35], [Batch 0 / 40], [Loss 0.0005075272056274116]
887
+ precision recall f1-score support
888
+
889
+ akiec 0.3125 0.8333 0.4545 6
890
+ bcc 0.8000 0.6667 0.7273 12
891
+ bkl 0.6154 0.6154 0.6154 13
892
+ df 1.0000 0.7143 0.8333 14
893
+ mel 0.5833 0.5000 0.5385 14
894
+ nv 0.7692 0.6667 0.7143 15
895
+ vasc 1.0000 1.0000 1.0000 7
896
+
897
+ accuracy 0.6790 81
898
+ macro avg 0.7258 0.7138 0.6976 81
899
+ weighted avg 0.7430 0.6790 0.6960 81
900
+
901
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/last_model.pth
902
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 75.31%
903
+ [Epoch 36], [Batch 0 / 40], [Loss 0.0009864452295005322]
904
+ precision recall f1-score support
905
+
906
+ akiec 0.3571 0.8333 0.5000 6
907
+ bcc 0.8000 0.6667 0.7273 12
908
+ bkl 0.6923 0.6923 0.6923 13
909
+ df 0.9167 0.7857 0.8462 14
910
+ mel 0.5000 0.5000 0.5000 14
911
+ nv 0.7273 0.5333 0.6154 15
912
+ vasc 1.0000 1.0000 1.0000 7
913
+
914
+ accuracy 0.6790 81
915
+ macro avg 0.7133 0.7159 0.6973 81
916
+ weighted avg 0.7220 0.6790 0.6889 81
917
+
918
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/last_model.pth
919
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 75.31%
920
+ [Epoch 37], [Batch 0 / 40], [Loss 0.006578400731086731]
921
+ precision recall f1-score support
922
+
923
+ akiec 0.2857 0.6667 0.4000 6
924
+ bcc 0.8000 0.6667 0.7273 12
925
+ bkl 0.6154 0.6154 0.6154 13
926
+ df 0.9000 0.6429 0.7500 14
927
+ mel 0.4667 0.5000 0.4828 14
928
+ nv 0.6667 0.5333 0.5926 15
929
+ vasc 1.0000 1.0000 1.0000 7
930
+
931
+ accuracy 0.6296 81
932
+ macro avg 0.6763 0.6607 0.6526 81
933
+ weighted avg 0.6845 0.6296 0.6454 81
934
+
935
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/last_model.pth
936
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 75.31%
937
+ [Epoch 38], [Batch 0 / 40], [Loss 0.001567276893183589]
938
+ precision recall f1-score support
939
+
940
+ akiec 0.3333 0.6667 0.4444 6
941
+ bcc 0.8000 0.6667 0.7273 12
942
+ bkl 0.6667 0.7692 0.7143 13
943
+ df 0.9091 0.7143 0.8000 14
944
+ mel 0.5833 0.5000 0.5385 14
945
+ nv 0.7857 0.7333 0.7586 15
946
+ vasc 1.0000 1.0000 1.0000 7
947
+
948
+ accuracy 0.7037 81
949
+ macro avg 0.7254 0.7215 0.7119 81
950
+ weighted avg 0.7401 0.7037 0.7135 81
951
+
952
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/last_model.pth
953
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 75.31%
954
+ [Epoch 39], [Batch 0 / 40], [Loss 0.0033837121445685625]
955
+ precision recall f1-score support
956
+
957
+ akiec 0.3636 0.6667 0.4706 6
958
+ bcc 0.8889 0.6667 0.7619 12
959
+ bkl 0.5625 0.6923 0.6207 13
960
+ df 0.9231 0.8571 0.8889 14
961
+ mel 0.6364 0.5000 0.5600 14
962
+ nv 0.7143 0.6667 0.6897 15
963
+ vasc 1.0000 1.0000 1.0000 7
964
+
965
+ accuracy 0.7037 81
966
+ macro avg 0.7270 0.7214 0.7131 81
967
+ weighted avg 0.7371 0.7037 0.7119 81
968
+
969
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/last_model.pth
970
+ [mobilenet_v2] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-41-03/best_model.pth - Accuracy 75.31%
971
+ precision recall f1-score support
972
+
973
+ akiec 0.6667 0.3333 0.4444 6
974
+ bcc 0.7500 0.7500 0.7500 12
975
+ bkl 0.6667 0.7692 0.7143 13
976
+ df 0.7143 0.7143 0.7143 14
977
+ mel 0.7273 0.5714 0.6400 14
978
+ nv 0.7895 1.0000 0.8824 15
979
+ vasc 1.0000 1.0000 1.0000 7
980
+
981
+ accuracy 0.7531 81
982
+ macro avg 0.7592 0.7340 0.7351 81
983
+ weighted avg 0.7493 0.7531 0.7426 81
984
+
models/MobileNetV2/logs/test_logs_acc_2021-12-12-15-41-03.txt ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 0,0.35802469135802467
2
+ 1,0.49382716049382713
3
+ 2,0.6419753086419753
4
+ 3,0.691358024691358
5
+ 4,0.6666666666666666
6
+ 5,0.6790123456790124
7
+ 6,0.5925925925925926
8
+ 7,0.7283950617283951
9
+ 8,0.6049382716049383
10
+ 9,0.6172839506172839
11
+ 10,0.654320987654321
12
+ 11,0.6666666666666666
13
+ 12,0.6419753086419753
14
+ 13,0.691358024691358
15
+ 14,0.7530864197530864
16
+ 15,0.654320987654321
17
+ 16,0.654320987654321
18
+ 17,0.6666666666666666
19
+ 18,0.691358024691358
20
+ 19,0.7037037037037037
21
+ 20,0.691358024691358
22
+ 21,0.654320987654321
23
+ 22,0.7037037037037037
24
+ 23,0.6666666666666666
25
+ 24,0.6666666666666666
26
+ 25,0.6419753086419753
27
+ 26,0.6666666666666666
28
+ 27,0.691358024691358
29
+ 28,0.6790123456790124
30
+ 29,0.6790123456790124
31
+ 30,0.7283950617283951
32
+ 31,0.6790123456790124
33
+ 32,0.654320987654321
34
+ 33,0.6419753086419753
35
+ 34,0.6790123456790124
36
+ 35,0.6790123456790124
37
+ 36,0.6790123456790124
38
+ 37,0.6296296296296297
39
+ 38,0.7037037037037037
40
+ 39,0.7037037037037037
models/MobileNetV2/logs/train_logs_acc_2021-12-12-15-41-03.txt ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 0,0.47928176795580113
2
+ 1,0.7071823204419889
3
+ 2,0.8397790055248618
4
+ 3,0.9019337016574586
5
+ 4,0.9240331491712708
6
+ 5,0.9046961325966851
7
+ 6,0.925414364640884
8
+ 7,0.9682320441988951
9
+ 8,0.9723756906077348
10
+ 9,0.9558011049723757
11
+ 10,0.9668508287292817
12
+ 11,0.9571823204419889
13
+ 12,0.9709944751381215
14
+ 13,0.9654696132596685
15
+ 14,0.9779005524861878
16
+ 15,0.9765193370165746
17
+ 16,0.9502762430939227
18
+ 17,0.9613259668508287
19
+ 18,0.9696132596685083
20
+ 19,0.9861878453038674
21
+ 20,0.9917127071823204
22
+ 21,0.988950276243094
23
+ 22,0.9958563535911602
24
+ 23,1.0
25
+ 24,1.0
26
+ 25,0.9986187845303868
27
+ 26,1.0
28
+ 27,1.0
29
+ 28,0.9986187845303868
30
+ 29,0.9972375690607734
31
+ 30,1.0
32
+ 31,0.9986187845303868
33
+ 32,0.9986187845303868
34
+ 33,1.0
35
+ 34,1.0
36
+ 35,1.0
37
+ 36,1.0
38
+ 37,0.9986187845303868
39
+ 38,1.0
40
+ 39,1.0
models/MobileNetV2/logs/train_logs_loss_2021-12-12-15-41-03.txt ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 0,1.3283288478851318
2
+ 1,0.7845979928970337
3
+ 2,0.4683820903301239
4
+ 3,0.3241419792175293
5
+ 4,0.24197718501091003
6
+ 5,0.29220694303512573
7
+ 6,0.19436803460121155
8
+ 7,0.09668131172657013
9
+ 8,0.08244006335735321
10
+ 9,0.11392326653003693
11
+ 10,0.13123558461666107
12
+ 11,0.12716802954673767
13
+ 12,0.11016254127025604
14
+ 13,0.1108265221118927
15
+ 14,0.06764746457338333
16
+ 15,0.08966778963804245
17
+ 16,0.1984165906906128
18
+ 17,0.12155874818563461
19
+ 18,0.07818058878183365
20
+ 19,0.05533997341990471
21
+ 20,0.042471401393413544
22
+ 21,0.0365070179104805
23
+ 22,0.018686367198824883
24
+ 23,0.009167143143713474
25
+ 24,0.004488933831453323
26
+ 25,0.005246465560048819
27
+ 26,0.0034387907944619656
28
+ 27,0.0022085290402173996
29
+ 28,0.006266098469495773
30
+ 29,0.008525695651769638
31
+ 30,0.002915620803833008
32
+ 31,0.004936204757541418
33
+ 32,0.0033519347198307514
34
+ 33,0.0034256051294505596
35
+ 34,0.0018130912212654948
36
+ 35,0.0034908554516732693
37
+ 36,0.0012501556193456054
38
+ 37,0.004713095258921385
39
+ 38,0.0031446516513824463
40
+ 39,0.001865896163508296
models/ShuffleNetV2/best_model.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c6e9fe950aecd14a5af927ebd6f367dce4a31e72028c1cdc4e3fc66989ba99fb
3
+ size 5237725
models/ShuffleNetV2/config.json ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "num_classes": 7,
3
+ "hidden_size": 1024,
4
+ "id2label": {
5
+ "0": "akiec",
6
+ "1": "bcc",
7
+ "2": "bkl",
8
+ "3": "df",
9
+ "4": "mel",
10
+ "5": "nv",
11
+ "6": "vasc"
12
+ },
13
+ "label2id": {
14
+ "akiec": "0",
15
+ "bcc": "1",
16
+ "bkl": "2",
17
+ "df": "3",
18
+ "mel": "4",
19
+ "nv": "5",
20
+ "vasc": "6"
21
+ },
22
+ "architectures": [
23
+ "shufflenet_v2_x1_0"
24
+ ]
25
+ }
models/ShuffleNetV2/logs/logs_2021-12-12-15-31-56.txt ADDED
@@ -0,0 +1,945 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ==================================================
2
+ Model architecture:
3
+ ==================================================
4
+ ShuffleNetV2(
5
+ (conv1): Sequential(
6
+ (0): Conv2d(3, 24, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
7
+ (1): BatchNorm2d(24, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
8
+ (2): ReLU(inplace=True)
9
+ )
10
+ (maxpool): MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False)
11
+ (stage2): Sequential(
12
+ (0): InvertedResidual(
13
+ (branch1): Sequential(
14
+ (0): Conv2d(24, 24, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=24, bias=False)
15
+ (1): BatchNorm2d(24, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
16
+ (2): Conv2d(24, 58, kernel_size=(1, 1), stride=(1, 1), bias=False)
17
+ (3): BatchNorm2d(58, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
18
+ (4): ReLU(inplace=True)
19
+ )
20
+ (branch2): Sequential(
21
+ (0): Conv2d(24, 58, kernel_size=(1, 1), stride=(1, 1), bias=False)
22
+ (1): BatchNorm2d(58, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
23
+ (2): ReLU(inplace=True)
24
+ (3): Conv2d(58, 58, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=58, bias=False)
25
+ (4): BatchNorm2d(58, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
26
+ (5): Conv2d(58, 58, kernel_size=(1, 1), stride=(1, 1), bias=False)
27
+ (6): BatchNorm2d(58, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
28
+ (7): ReLU(inplace=True)
29
+ )
30
+ )
31
+ (1): InvertedResidual(
32
+ (branch1): Sequential()
33
+ (branch2): Sequential(
34
+ (0): Conv2d(58, 58, kernel_size=(1, 1), stride=(1, 1), bias=False)
35
+ (1): BatchNorm2d(58, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
36
+ (2): ReLU(inplace=True)
37
+ (3): Conv2d(58, 58, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=58, bias=False)
38
+ (4): BatchNorm2d(58, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
39
+ (5): Conv2d(58, 58, kernel_size=(1, 1), stride=(1, 1), bias=False)
40
+ (6): BatchNorm2d(58, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
41
+ (7): ReLU(inplace=True)
42
+ )
43
+ )
44
+ (2): InvertedResidual(
45
+ (branch1): Sequential()
46
+ (branch2): Sequential(
47
+ (0): Conv2d(58, 58, kernel_size=(1, 1), stride=(1, 1), bias=False)
48
+ (1): BatchNorm2d(58, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
49
+ (2): ReLU(inplace=True)
50
+ (3): Conv2d(58, 58, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=58, bias=False)
51
+ (4): BatchNorm2d(58, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
52
+ (5): Conv2d(58, 58, kernel_size=(1, 1), stride=(1, 1), bias=False)
53
+ (6): BatchNorm2d(58, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
54
+ (7): ReLU(inplace=True)
55
+ )
56
+ )
57
+ (3): InvertedResidual(
58
+ (branch1): Sequential()
59
+ (branch2): Sequential(
60
+ (0): Conv2d(58, 58, kernel_size=(1, 1), stride=(1, 1), bias=False)
61
+ (1): BatchNorm2d(58, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
62
+ (2): ReLU(inplace=True)
63
+ (3): Conv2d(58, 58, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=58, bias=False)
64
+ (4): BatchNorm2d(58, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
65
+ (5): Conv2d(58, 58, kernel_size=(1, 1), stride=(1, 1), bias=False)
66
+ (6): BatchNorm2d(58, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
67
+ (7): ReLU(inplace=True)
68
+ )
69
+ )
70
+ )
71
+ (stage3): Sequential(
72
+ (0): InvertedResidual(
73
+ (branch1): Sequential(
74
+ (0): Conv2d(116, 116, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=116, bias=False)
75
+ (1): BatchNorm2d(116, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
76
+ (2): Conv2d(116, 116, kernel_size=(1, 1), stride=(1, 1), bias=False)
77
+ (3): BatchNorm2d(116, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
78
+ (4): ReLU(inplace=True)
79
+ )
80
+ (branch2): Sequential(
81
+ (0): Conv2d(116, 116, kernel_size=(1, 1), stride=(1, 1), bias=False)
82
+ (1): BatchNorm2d(116, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
83
+ (2): ReLU(inplace=True)
84
+ (3): Conv2d(116, 116, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=116, bias=False)
85
+ (4): BatchNorm2d(116, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
86
+ (5): Conv2d(116, 116, kernel_size=(1, 1), stride=(1, 1), bias=False)
87
+ (6): BatchNorm2d(116, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
88
+ (7): ReLU(inplace=True)
89
+ )
90
+ )
91
+ (1): InvertedResidual(
92
+ (branch1): Sequential()
93
+ (branch2): Sequential(
94
+ (0): Conv2d(116, 116, kernel_size=(1, 1), stride=(1, 1), bias=False)
95
+ (1): BatchNorm2d(116, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
96
+ (2): ReLU(inplace=True)
97
+ (3): Conv2d(116, 116, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=116, bias=False)
98
+ (4): BatchNorm2d(116, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
99
+ (5): Conv2d(116, 116, kernel_size=(1, 1), stride=(1, 1), bias=False)
100
+ (6): BatchNorm2d(116, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
101
+ (7): ReLU(inplace=True)
102
+ )
103
+ )
104
+ (2): InvertedResidual(
105
+ (branch1): Sequential()
106
+ (branch2): Sequential(
107
+ (0): Conv2d(116, 116, kernel_size=(1, 1), stride=(1, 1), bias=False)
108
+ (1): BatchNorm2d(116, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
109
+ (2): ReLU(inplace=True)
110
+ (3): Conv2d(116, 116, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=116, bias=False)
111
+ (4): BatchNorm2d(116, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
112
+ (5): Conv2d(116, 116, kernel_size=(1, 1), stride=(1, 1), bias=False)
113
+ (6): BatchNorm2d(116, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
114
+ (7): ReLU(inplace=True)
115
+ )
116
+ )
117
+ (3): InvertedResidual(
118
+ (branch1): Sequential()
119
+ (branch2): Sequential(
120
+ (0): Conv2d(116, 116, kernel_size=(1, 1), stride=(1, 1), bias=False)
121
+ (1): BatchNorm2d(116, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
122
+ (2): ReLU(inplace=True)
123
+ (3): Conv2d(116, 116, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=116, bias=False)
124
+ (4): BatchNorm2d(116, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
125
+ (5): Conv2d(116, 116, kernel_size=(1, 1), stride=(1, 1), bias=False)
126
+ (6): BatchNorm2d(116, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
127
+ (7): ReLU(inplace=True)
128
+ )
129
+ )
130
+ (4): InvertedResidual(
131
+ (branch1): Sequential()
132
+ (branch2): Sequential(
133
+ (0): Conv2d(116, 116, kernel_size=(1, 1), stride=(1, 1), bias=False)
134
+ (1): BatchNorm2d(116, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
135
+ (2): ReLU(inplace=True)
136
+ (3): Conv2d(116, 116, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=116, bias=False)
137
+ (4): BatchNorm2d(116, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
138
+ (5): Conv2d(116, 116, kernel_size=(1, 1), stride=(1, 1), bias=False)
139
+ (6): BatchNorm2d(116, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
140
+ (7): ReLU(inplace=True)
141
+ )
142
+ )
143
+ (5): InvertedResidual(
144
+ (branch1): Sequential()
145
+ (branch2): Sequential(
146
+ (0): Conv2d(116, 116, kernel_size=(1, 1), stride=(1, 1), bias=False)
147
+ (1): BatchNorm2d(116, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
148
+ (2): ReLU(inplace=True)
149
+ (3): Conv2d(116, 116, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=116, bias=False)
150
+ (4): BatchNorm2d(116, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
151
+ (5): Conv2d(116, 116, kernel_size=(1, 1), stride=(1, 1), bias=False)
152
+ (6): BatchNorm2d(116, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
153
+ (7): ReLU(inplace=True)
154
+ )
155
+ )
156
+ (6): InvertedResidual(
157
+ (branch1): Sequential()
158
+ (branch2): Sequential(
159
+ (0): Conv2d(116, 116, kernel_size=(1, 1), stride=(1, 1), bias=False)
160
+ (1): BatchNorm2d(116, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
161
+ (2): ReLU(inplace=True)
162
+ (3): Conv2d(116, 116, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=116, bias=False)
163
+ (4): BatchNorm2d(116, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
164
+ (5): Conv2d(116, 116, kernel_size=(1, 1), stride=(1, 1), bias=False)
165
+ (6): BatchNorm2d(116, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
166
+ (7): ReLU(inplace=True)
167
+ )
168
+ )
169
+ (7): InvertedResidual(
170
+ (branch1): Sequential()
171
+ (branch2): Sequential(
172
+ (0): Conv2d(116, 116, kernel_size=(1, 1), stride=(1, 1), bias=False)
173
+ (1): BatchNorm2d(116, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
174
+ (2): ReLU(inplace=True)
175
+ (3): Conv2d(116, 116, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=116, bias=False)
176
+ (4): BatchNorm2d(116, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
177
+ (5): Conv2d(116, 116, kernel_size=(1, 1), stride=(1, 1), bias=False)
178
+ (6): BatchNorm2d(116, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
179
+ (7): ReLU(inplace=True)
180
+ )
181
+ )
182
+ )
183
+ (stage4): Sequential(
184
+ (0): InvertedResidual(
185
+ (branch1): Sequential(
186
+ (0): Conv2d(232, 232, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=232, bias=False)
187
+ (1): BatchNorm2d(232, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
188
+ (2): Conv2d(232, 232, kernel_size=(1, 1), stride=(1, 1), bias=False)
189
+ (3): BatchNorm2d(232, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
190
+ (4): ReLU(inplace=True)
191
+ )
192
+ (branch2): Sequential(
193
+ (0): Conv2d(232, 232, kernel_size=(1, 1), stride=(1, 1), bias=False)
194
+ (1): BatchNorm2d(232, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
195
+ (2): ReLU(inplace=True)
196
+ (3): Conv2d(232, 232, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=232, bias=False)
197
+ (4): BatchNorm2d(232, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
198
+ (5): Conv2d(232, 232, kernel_size=(1, 1), stride=(1, 1), bias=False)
199
+ (6): BatchNorm2d(232, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
200
+ (7): ReLU(inplace=True)
201
+ )
202
+ )
203
+ (1): InvertedResidual(
204
+ (branch1): Sequential()
205
+ (branch2): Sequential(
206
+ (0): Conv2d(232, 232, kernel_size=(1, 1), stride=(1, 1), bias=False)
207
+ (1): BatchNorm2d(232, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
208
+ (2): ReLU(inplace=True)
209
+ (3): Conv2d(232, 232, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=232, bias=False)
210
+ (4): BatchNorm2d(232, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
211
+ (5): Conv2d(232, 232, kernel_size=(1, 1), stride=(1, 1), bias=False)
212
+ (6): BatchNorm2d(232, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
213
+ (7): ReLU(inplace=True)
214
+ )
215
+ )
216
+ (2): InvertedResidual(
217
+ (branch1): Sequential()
218
+ (branch2): Sequential(
219
+ (0): Conv2d(232, 232, kernel_size=(1, 1), stride=(1, 1), bias=False)
220
+ (1): BatchNorm2d(232, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
221
+ (2): ReLU(inplace=True)
222
+ (3): Conv2d(232, 232, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=232, bias=False)
223
+ (4): BatchNorm2d(232, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
224
+ (5): Conv2d(232, 232, kernel_size=(1, 1), stride=(1, 1), bias=False)
225
+ (6): BatchNorm2d(232, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
226
+ (7): ReLU(inplace=True)
227
+ )
228
+ )
229
+ (3): InvertedResidual(
230
+ (branch1): Sequential()
231
+ (branch2): Sequential(
232
+ (0): Conv2d(232, 232, kernel_size=(1, 1), stride=(1, 1), bias=False)
233
+ (1): BatchNorm2d(232, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
234
+ (2): ReLU(inplace=True)
235
+ (3): Conv2d(232, 232, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=232, bias=False)
236
+ (4): BatchNorm2d(232, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
237
+ (5): Conv2d(232, 232, kernel_size=(1, 1), stride=(1, 1), bias=False)
238
+ (6): BatchNorm2d(232, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
239
+ (7): ReLU(inplace=True)
240
+ )
241
+ )
242
+ )
243
+ (conv5): Sequential(
244
+ (0): Conv2d(464, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
245
+ (1): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
246
+ (2): ReLU(inplace=True)
247
+ )
248
+ (fc): Linear(in_features=1024, out_features=7, bias=True)
249
+ )
250
+ ==================================================
251
+
252
+ [Epoch 0], [Batch 0 / 40], [Loss 1.9472624063491821]
253
+ precision recall f1-score support
254
+
255
+ akiec 0.0000 0.0000 0.0000 15
256
+ bcc 1.0000 0.1429 0.2500 14
257
+ bkl 0.0000 0.0000 0.0000 11
258
+ df 0.1500 0.2727 0.1935 11
259
+ mel 0.0000 0.0000 0.0000 10
260
+ nv 0.1667 0.7778 0.2745 9
261
+ vasc 0.5294 0.8182 0.6429 11
262
+
263
+ accuracy 0.2593 81
264
+ macro avg 0.2637 0.2874 0.1944 81
265
+ weighted avg 0.2836 0.2593 0.1873 81
266
+
267
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth
268
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 25.93%
269
+ [Epoch 1], [Batch 0 / 40], [Loss 1.6928582191467285]
270
+ precision recall f1-score support
271
+
272
+ akiec 0.0000 0.0000 0.0000 15
273
+ bcc 0.4118 0.5000 0.4516 14
274
+ bkl 0.3077 0.3636 0.3333 11
275
+ df 0.2381 0.4545 0.3125 11
276
+ mel 0.0000 0.0000 0.0000 10
277
+ nv 0.3846 0.5556 0.4545 9
278
+ vasc 0.5294 0.8182 0.6429 11
279
+
280
+ accuracy 0.3704 81
281
+ macro avg 0.2674 0.3846 0.3135 81
282
+ weighted avg 0.2599 0.3704 0.3036 81
283
+
284
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth
285
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 37.04%
286
+ [Epoch 2], [Batch 0 / 40], [Loss 1.1930665969848633]
287
+ precision recall f1-score support
288
+
289
+ akiec 0.0000 0.0000 0.0000 15
290
+ bcc 0.7000 0.5000 0.5833 14
291
+ bkl 0.2800 0.6364 0.3889 11
292
+ df 0.4000 0.5455 0.4615 11
293
+ mel 0.0000 0.0000 0.0000 10
294
+ nv 0.3571 0.5556 0.4348 9
295
+ vasc 0.6667 0.9091 0.7692 11
296
+
297
+ accuracy 0.4321 81
298
+ macro avg 0.3434 0.4495 0.3768 81
299
+ weighted avg 0.3436 0.4321 0.3691 81
300
+
301
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth
302
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 43.21%
303
+ [Epoch 3], [Batch 0 / 40], [Loss 0.8380683064460754]
304
+ precision recall f1-score support
305
+
306
+ akiec 0.3333 0.0667 0.1111 15
307
+ bcc 0.7778 0.5000 0.6087 14
308
+ bkl 0.2857 0.7273 0.4103 11
309
+ df 0.5714 0.7273 0.6400 11
310
+ mel 0.7143 0.5000 0.5882 10
311
+ nv 0.7500 0.6667 0.7059 9
312
+ vasc 0.8333 0.9091 0.8696 11
313
+
314
+ accuracy 0.5556 81
315
+ macro avg 0.6094 0.5853 0.5620 81
316
+ weighted avg 0.5972 0.5556 0.5376 81
317
+
318
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth
319
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 55.56%
320
+ [Epoch 4], [Batch 0 / 40], [Loss 0.7404563426971436]
321
+ precision recall f1-score support
322
+
323
+ akiec 0.6000 0.6000 0.6000 15
324
+ bcc 0.3913 0.6429 0.4865 14
325
+ bkl 0.6000 0.5455 0.5714 11
326
+ df 0.8000 0.3636 0.5000 11
327
+ mel 0.6667 0.4000 0.5000 10
328
+ nv 0.6000 0.6667 0.6316 9
329
+ vasc 0.7500 0.8182 0.7826 11
330
+
331
+ accuracy 0.5802 81
332
+ macro avg 0.6297 0.5767 0.5817 81
333
+ weighted avg 0.6197 0.5802 0.5789 81
334
+
335
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth
336
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 58.02%
337
+ [Epoch 5], [Batch 0 / 40], [Loss 0.3303229510784149]
338
+ precision recall f1-score support
339
+
340
+ akiec 0.7273 0.5333 0.6154 15
341
+ bcc 0.5625 0.6429 0.6000 14
342
+ bkl 0.4118 0.6364 0.5000 11
343
+ df 0.7000 0.6364 0.6667 11
344
+ mel 0.6667 0.2000 0.3077 10
345
+ nv 0.5833 0.7778 0.6667 9
346
+ vasc 0.8333 0.9091 0.8696 11
347
+
348
+ accuracy 0.6173 81
349
+ macro avg 0.6407 0.6194 0.6037 81
350
+ weighted avg 0.6432 0.6173 0.6062 81
351
+
352
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth
353
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 61.73%
354
+ [Epoch 6], [Batch 0 / 40], [Loss 0.2551670968532562]
355
+ precision recall f1-score support
356
+
357
+ akiec 0.5882 0.6667 0.6250 15
358
+ bcc 0.7273 0.5714 0.6400 14
359
+ bkl 0.4444 0.3636 0.4000 11
360
+ df 0.6364 0.6364 0.6364 11
361
+ mel 0.6250 0.5000 0.5556 10
362
+ nv 0.6154 0.8889 0.7273 9
363
+ vasc 0.8333 0.9091 0.8696 11
364
+
365
+ accuracy 0.6420 81
366
+ macro avg 0.6386 0.6480 0.6363 81
367
+ weighted avg 0.6401 0.6420 0.6346 81
368
+
369
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth
370
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 64.20%
371
+ [Epoch 7], [Batch 0 / 40], [Loss 0.1678694188594818]
372
+ precision recall f1-score support
373
+
374
+ akiec 0.6923 0.6000 0.6429 15
375
+ bcc 0.7333 0.7857 0.7586 14
376
+ bkl 0.5000 0.4545 0.4762 11
377
+ df 0.7273 0.7273 0.7273 11
378
+ mel 0.7500 0.3000 0.4286 10
379
+ nv 0.5385 0.7778 0.6364 9
380
+ vasc 0.7333 1.0000 0.8462 11
381
+
382
+ accuracy 0.6667 81
383
+ macro avg 0.6678 0.6636 0.6451 81
384
+ weighted avg 0.6736 0.6667 0.6521 81
385
+
386
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth
387
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 66.67%
388
+ [Epoch 8], [Batch 0 / 40], [Loss 0.3083508014678955]
389
+ precision recall f1-score support
390
+
391
+ akiec 0.5789 0.7333 0.6471 15
392
+ bcc 1.0000 0.2857 0.4444 14
393
+ bkl 0.4706 0.7273 0.5714 11
394
+ df 0.6364 0.6364 0.6364 11
395
+ mel 0.5000 0.3000 0.3750 10
396
+ nv 0.5385 0.7778 0.6364 9
397
+ vasc 0.8182 0.8182 0.8182 11
398
+
399
+ accuracy 0.6049 81
400
+ macro avg 0.6489 0.6112 0.5898 81
401
+ weighted avg 0.6630 0.6049 0.5888 81
402
+
403
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/last_model.pth
404
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 66.67%
405
+ [Epoch 9], [Batch 0 / 40], [Loss 0.11866040527820587]
406
+ precision recall f1-score support
407
+
408
+ akiec 0.5000 0.2000 0.2857 15
409
+ bcc 0.6875 0.7857 0.7333 14
410
+ bkl 0.3810 0.7273 0.5000 11
411
+ df 0.5833 0.6364 0.6087 11
412
+ mel 0.3750 0.3000 0.3333 10
413
+ nv 0.6667 0.6667 0.6667 9
414
+ vasc 0.8889 0.7273 0.8000 11
415
+
416
+ accuracy 0.5679 81
417
+ macro avg 0.5832 0.5776 0.5611 81
418
+ weighted avg 0.5835 0.5679 0.5541 81
419
+
420
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/last_model.pth
421
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 66.67%
422
+ [Epoch 10], [Batch 0 / 40], [Loss 0.09714502841234207]
423
+ precision recall f1-score support
424
+
425
+ akiec 0.5000 0.5333 0.5161 15
426
+ bcc 0.6429 0.6429 0.6429 14
427
+ bkl 0.4118 0.6364 0.5000 11
428
+ df 0.7778 0.6364 0.7000 11
429
+ mel 0.4286 0.3000 0.3529 10
430
+ nv 0.7000 0.7778 0.7368 9
431
+ vasc 1.0000 0.7273 0.8421 11
432
+
433
+ accuracy 0.6049 81
434
+ macro avg 0.6373 0.6077 0.6130 81
435
+ weighted avg 0.6317 0.6049 0.6095 81
436
+
437
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/last_model.pth
438
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 66.67%
439
+ [Epoch 11], [Batch 0 / 40], [Loss 0.11753970384597778]
440
+ precision recall f1-score support
441
+
442
+ akiec 0.6000 0.6000 0.6000 15
443
+ bcc 0.7500 0.4286 0.5455 14
444
+ bkl 0.4211 0.7273 0.5333 11
445
+ df 0.5833 0.6364 0.6087 11
446
+ mel 0.5000 0.2000 0.2857 10
447
+ nv 0.7000 0.7778 0.7368 9
448
+ vasc 0.7692 0.9091 0.8333 11
449
+
450
+ accuracy 0.6049 81
451
+ macro avg 0.6177 0.6113 0.5919 81
452
+ weighted avg 0.6211 0.6049 0.5908 81
453
+
454
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/last_model.pth
455
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 66.67%
456
+ [Epoch 12], [Batch 0 / 40], [Loss 0.04757305607199669]
457
+ precision recall f1-score support
458
+
459
+ akiec 0.7143 0.3333 0.4545 15
460
+ bcc 0.4762 0.7143 0.5714 14
461
+ bkl 0.5833 0.6364 0.6087 11
462
+ df 0.6250 0.4545 0.5263 11
463
+ mel 0.4444 0.4000 0.4211 10
464
+ nv 0.5833 0.7778 0.6667 9
465
+ vasc 0.8333 0.9091 0.8696 11
466
+
467
+ accuracy 0.5926 81
468
+ macro avg 0.6086 0.6036 0.5883 81
469
+ weighted avg 0.6115 0.5926 0.5812 81
470
+
471
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/last_model.pth
472
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 66.67%
473
+ [Epoch 13], [Batch 0 / 40], [Loss 0.05110578238964081]
474
+ precision recall f1-score support
475
+
476
+ akiec 0.6667 0.5333 0.5926 15
477
+ bcc 0.5000 0.8571 0.6316 14
478
+ bkl 0.6667 0.5455 0.6000 11
479
+ df 0.7778 0.6364 0.7000 11
480
+ mel 0.5000 0.5000 0.5000 10
481
+ nv 0.8333 0.5556 0.6667 9
482
+ vasc 0.9091 0.9091 0.9091 11
483
+
484
+ accuracy 0.6543 81
485
+ macro avg 0.6934 0.6481 0.6571 81
486
+ weighted avg 0.6838 0.6543 0.6547 81
487
+
488
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/last_model.pth
489
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 66.67%
490
+ [Epoch 14], [Batch 0 / 40], [Loss 0.05120206996798515]
491
+ precision recall f1-score support
492
+
493
+ akiec 0.7500 0.6000 0.6667 15
494
+ bcc 0.7500 0.8571 0.8000 14
495
+ bkl 0.5000 0.6364 0.5600 11
496
+ df 0.8889 0.7273 0.8000 11
497
+ mel 0.6000 0.6000 0.6000 10
498
+ nv 0.6667 0.6667 0.6667 9
499
+ vasc 0.9091 0.9091 0.9091 11
500
+
501
+ accuracy 0.7160 81
502
+ macro avg 0.7235 0.7138 0.7146 81
503
+ weighted avg 0.7287 0.7160 0.7180 81
504
+
505
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth
506
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 71.60%
507
+ [Epoch 15], [Batch 0 / 40], [Loss 0.021679097786545753]
508
+ precision recall f1-score support
509
+
510
+ akiec 0.9091 0.6667 0.7692 15
511
+ bcc 0.7143 0.7143 0.7143 14
512
+ bkl 0.5333 0.7273 0.6154 11
513
+ df 0.7273 0.7273 0.7273 11
514
+ mel 0.5556 0.5000 0.5263 10
515
+ nv 0.5833 0.7778 0.6667 9
516
+ vasc 0.8889 0.7273 0.8000 11
517
+
518
+ accuracy 0.6914 81
519
+ macro avg 0.7017 0.6915 0.6885 81
520
+ weighted avg 0.7171 0.6914 0.6959 81
521
+
522
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/last_model.pth
523
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 71.60%
524
+ [Epoch 16], [Batch 0 / 40], [Loss 0.01891976408660412]
525
+ precision recall f1-score support
526
+
527
+ akiec 0.5714 0.5333 0.5517 15
528
+ bcc 0.8750 0.5000 0.6364 14
529
+ bkl 0.5000 0.4545 0.4762 11
530
+ df 0.5556 0.9091 0.6897 11
531
+ mel 0.4545 0.5000 0.4762 10
532
+ nv 0.6250 0.5556 0.5882 9
533
+ vasc 0.8333 0.9091 0.8696 11
534
+
535
+ accuracy 0.6173 81
536
+ macro avg 0.6307 0.6231 0.6126 81
537
+ weighted avg 0.6391 0.6173 0.6127 81
538
+
539
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/last_model.pth
540
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 71.60%
541
+ [Epoch 17], [Batch 0 / 40], [Loss 0.017482126131653786]
542
+ precision recall f1-score support
543
+
544
+ akiec 0.5789 0.7333 0.6471 15
545
+ bcc 1.0000 0.5714 0.7273 14
546
+ bkl 0.5714 0.7273 0.6400 11
547
+ df 0.6923 0.8182 0.7500 11
548
+ mel 0.7500 0.3000 0.4286 10
549
+ nv 0.7273 0.8889 0.8000 9
550
+ vasc 0.8333 0.9091 0.8696 11
551
+
552
+ accuracy 0.7037 81
553
+ macro avg 0.7362 0.7069 0.6946 81
554
+ weighted avg 0.7382 0.7037 0.6942 81
555
+
556
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/last_model.pth
557
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 71.60%
558
+ [Epoch 18], [Batch 0 / 40], [Loss 0.036634355783462524]
559
+ precision recall f1-score support
560
+
561
+ akiec 0.8182 0.6000 0.6923 15
562
+ bcc 0.9091 0.7143 0.8000 14
563
+ bkl 0.5294 0.8182 0.6429 11
564
+ df 0.6667 0.9091 0.7692 11
565
+ mel 0.6667 0.4000 0.5000 10
566
+ nv 0.7500 0.6667 0.7059 9
567
+ vasc 0.8462 1.0000 0.9167 11
568
+
569
+ accuracy 0.7284 81
570
+ macro avg 0.7409 0.7297 0.7181 81
571
+ weighted avg 0.7516 0.7284 0.7229 81
572
+
573
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth
574
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 72.84%
575
+ [Epoch 19], [Batch 0 / 40], [Loss 0.0149515550583601]
576
+ precision recall f1-score support
577
+
578
+ akiec 0.6111 0.7333 0.6667 15
579
+ bcc 0.7143 0.7143 0.7143 14
580
+ bkl 0.6250 0.4545 0.5263 11
581
+ df 0.6667 0.7273 0.6957 11
582
+ mel 0.6667 0.4000 0.5000 10
583
+ nv 0.6667 0.8889 0.7619 9
584
+ vasc 0.9091 0.9091 0.9091 11
585
+
586
+ accuracy 0.6914 81
587
+ macro avg 0.6942 0.6896 0.6820 81
588
+ weighted avg 0.6919 0.6914 0.6827 81
589
+
590
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/last_model.pth
591
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 72.84%
592
+ [Epoch 20], [Batch 0 / 40], [Loss 0.04696013033390045]
593
+ precision recall f1-score support
594
+
595
+ akiec 0.5652 0.8667 0.6842 15
596
+ bcc 0.8750 0.5000 0.6364 14
597
+ bkl 0.3750 0.5455 0.4444 11
598
+ df 1.0000 0.6364 0.7778 11
599
+ mel 0.6667 0.6000 0.6316 10
600
+ nv 0.7143 0.5556 0.6250 9
601
+ vasc 0.9091 0.9091 0.9091 11
602
+
603
+ accuracy 0.6667 81
604
+ macro avg 0.7293 0.6590 0.6726 81
605
+ weighted avg 0.7278 0.6667 0.6735 81
606
+
607
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/last_model.pth
608
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 72.84%
609
+ [Epoch 21], [Batch 0 / 40], [Loss 0.015502412803471088]
610
+ precision recall f1-score support
611
+
612
+ akiec 0.6111 0.7333 0.6667 15
613
+ bcc 0.8750 0.5000 0.6364 14
614
+ bkl 0.3571 0.4545 0.4000 11
615
+ df 1.0000 0.6364 0.7778 11
616
+ mel 0.5000 0.7000 0.5833 10
617
+ nv 0.7778 0.7778 0.7778 9
618
+ vasc 0.9091 0.9091 0.9091 11
619
+
620
+ accuracy 0.6667 81
621
+ macro avg 0.7186 0.6730 0.6787 81
622
+ weighted avg 0.7203 0.6667 0.6753 81
623
+
624
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/last_model.pth
625
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 72.84%
626
+ [Epoch 22], [Batch 0 / 40], [Loss 0.01586947590112686]
627
+ precision recall f1-score support
628
+
629
+ akiec 0.7059 0.8000 0.7500 15
630
+ bcc 0.7857 0.7857 0.7857 14
631
+ bkl 0.5556 0.4545 0.5000 11
632
+ df 0.8000 0.7273 0.7619 11
633
+ mel 0.7143 0.5000 0.5882 10
634
+ nv 0.5385 0.7778 0.6364 9
635
+ vasc 0.9091 0.9091 0.9091 11
636
+
637
+ accuracy 0.7160 81
638
+ macro avg 0.7156 0.7078 0.7045 81
639
+ weighted avg 0.7221 0.7160 0.7128 81
640
+
641
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/last_model.pth
642
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 72.84%
643
+ [Epoch 23], [Batch 0 / 40], [Loss 0.0149539140984416]
644
+ precision recall f1-score support
645
+
646
+ akiec 0.7500 0.8000 0.7742 15
647
+ bcc 0.7857 0.7857 0.7857 14
648
+ bkl 0.4545 0.4545 0.4545 11
649
+ df 0.8889 0.7273 0.8000 11
650
+ mel 0.6667 0.4000 0.5000 10
651
+ nv 0.6154 0.8889 0.7273 9
652
+ vasc 0.8333 0.9091 0.8696 11
653
+
654
+ accuracy 0.7160 81
655
+ macro avg 0.7135 0.7094 0.7016 81
656
+ weighted avg 0.7210 0.7160 0.7102 81
657
+
658
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/last_model.pth
659
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 72.84%
660
+ [Epoch 24], [Batch 0 / 40], [Loss 0.007495847996324301]
661
+ precision recall f1-score support
662
+
663
+ akiec 0.6667 0.6667 0.6667 15
664
+ bcc 0.6923 0.6429 0.6667 14
665
+ bkl 0.4615 0.5455 0.5000 11
666
+ df 0.8333 0.9091 0.8696 11
667
+ mel 0.4444 0.4000 0.4211 10
668
+ nv 0.8750 0.7778 0.8235 9
669
+ vasc 0.8182 0.8182 0.8182 11
670
+
671
+ accuracy 0.6790 81
672
+ macro avg 0.6845 0.6800 0.6808 81
673
+ weighted avg 0.6822 0.6790 0.6793 81
674
+
675
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/last_model.pth
676
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 72.84%
677
+ [Epoch 25], [Batch 0 / 40], [Loss 0.007480575703084469]
678
+ precision recall f1-score support
679
+
680
+ akiec 0.7692 0.6667 0.7143 15
681
+ bcc 0.8182 0.6429 0.7200 14
682
+ bkl 0.4706 0.7273 0.5714 11
683
+ df 0.7143 0.9091 0.8000 11
684
+ mel 0.6667 0.4000 0.5000 10
685
+ nv 0.7000 0.7778 0.7368 9
686
+ vasc 0.9000 0.8182 0.8571 11
687
+
688
+ accuracy 0.7037 81
689
+ macro avg 0.7199 0.7060 0.7000 81
690
+ weighted avg 0.7271 0.7037 0.7030 81
691
+
692
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/last_model.pth
693
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 72.84%
694
+ [Epoch 26], [Batch 0 / 40], [Loss 0.009873191826045513]
695
+ precision recall f1-score support
696
+
697
+ akiec 0.7143 0.6667 0.6897 15
698
+ bcc 0.6154 0.5714 0.5926 14
699
+ bkl 0.4667 0.6364 0.5385 11
700
+ df 0.8000 0.7273 0.7619 11
701
+ mel 0.6667 0.4000 0.5000 10
702
+ nv 0.5385 0.7778 0.6364 9
703
+ vasc 0.9000 0.8182 0.8571 11
704
+
705
+ accuracy 0.6543 81
706
+ macro avg 0.6716 0.6568 0.6537 81
707
+ weighted avg 0.6750 0.6543 0.6556 81
708
+
709
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/last_model.pth
710
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 72.84%
711
+ [Epoch 27], [Batch 0 / 40], [Loss 0.019343027845025063]
712
+ precision recall f1-score support
713
+
714
+ akiec 0.6111 0.7333 0.6667 15
715
+ bcc 0.6667 0.5714 0.6154 14
716
+ bkl 0.4545 0.4545 0.4545 11
717
+ df 0.7273 0.7273 0.7273 11
718
+ mel 0.4545 0.5000 0.4762 10
719
+ nv 0.7500 0.6667 0.7059 9
720
+ vasc 0.9000 0.8182 0.8571 11
721
+
722
+ accuracy 0.6420 81
723
+ macro avg 0.6520 0.6388 0.6433 81
724
+ weighted avg 0.6506 0.6420 0.6439 81
725
+
726
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/last_model.pth
727
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 72.84%
728
+ [Epoch 28], [Batch 0 / 40], [Loss 0.011302443221211433]
729
+ precision recall f1-score support
730
+
731
+ akiec 0.6000 0.6000 0.6000 15
732
+ bcc 0.6923 0.6429 0.6667 14
733
+ bkl 0.4615 0.5455 0.5000 11
734
+ df 0.7778 0.6364 0.7000 11
735
+ mel 0.5000 0.3000 0.3750 10
736
+ nv 0.5333 0.8889 0.6667 9
737
+ vasc 0.9000 0.8182 0.8571 11
738
+
739
+ accuracy 0.6296 81
740
+ macro avg 0.6379 0.6331 0.6236 81
741
+ weighted avg 0.6423 0.6296 0.6261 81
742
+
743
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/last_model.pth
744
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 72.84%
745
+ [Epoch 29], [Batch 0 / 40], [Loss 0.014127632603049278]
746
+ precision recall f1-score support
747
+
748
+ akiec 0.6875 0.7333 0.7097 15
749
+ bcc 0.6250 0.7143 0.6667 14
750
+ bkl 0.5000 0.4545 0.4762 11
751
+ df 0.8182 0.8182 0.8182 11
752
+ mel 0.5000 0.3000 0.3750 10
753
+ nv 0.5833 0.7778 0.6667 9
754
+ vasc 0.9000 0.8182 0.8571 11
755
+
756
+ accuracy 0.6667 81
757
+ macro avg 0.6591 0.6595 0.6528 81
758
+ weighted avg 0.6631 0.6667 0.6592 81
759
+
760
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/last_model.pth
761
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 72.84%
762
+ [Epoch 30], [Batch 0 / 40], [Loss 0.011675378307700157]
763
+ precision recall f1-score support
764
+
765
+ akiec 0.7143 0.6667 0.6897 15
766
+ bcc 0.7692 0.7143 0.7407 14
767
+ bkl 0.5455 0.5455 0.5455 11
768
+ df 0.6667 0.7273 0.6957 11
769
+ mel 0.7500 0.6000 0.6667 10
770
+ nv 0.5833 0.7778 0.6667 9
771
+ vasc 0.9091 0.9091 0.9091 11
772
+
773
+ accuracy 0.7037 81
774
+ macro avg 0.7054 0.7058 0.7020 81
775
+ weighted avg 0.7107 0.7037 0.7041 81
776
+
777
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/last_model.pth
778
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 72.84%
779
+ [Epoch 31], [Batch 0 / 40], [Loss 0.006650794763118029]
780
+ precision recall f1-score support
781
+
782
+ akiec 0.6923 0.6000 0.6429 15
783
+ bcc 0.7273 0.5714 0.6400 14
784
+ bkl 0.5000 0.6364 0.5600 11
785
+ df 0.6429 0.8182 0.7200 11
786
+ mel 1.0000 0.5000 0.6667 10
787
+ nv 0.6154 0.8889 0.7273 9
788
+ vasc 0.9091 0.9091 0.9091 11
789
+
790
+ accuracy 0.6914 81
791
+ macro avg 0.7267 0.7034 0.6951 81
792
+ weighted avg 0.7244 0.6914 0.6901 81
793
+
794
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/last_model.pth
795
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 72.84%
796
+ [Epoch 32], [Batch 0 / 40], [Loss 0.03025425598025322]
797
+ precision recall f1-score support
798
+
799
+ akiec 0.8182 0.6000 0.6923 15
800
+ bcc 0.8182 0.6429 0.7200 14
801
+ bkl 0.4286 0.5455 0.4800 11
802
+ df 0.5882 0.9091 0.7143 11
803
+ mel 1.0000 0.4000 0.5714 10
804
+ nv 0.6154 0.8889 0.7273 9
805
+ vasc 0.9091 0.9091 0.9091 11
806
+
807
+ accuracy 0.6914 81
808
+ macro avg 0.7397 0.6993 0.6878 81
809
+ weighted avg 0.7463 0.6914 0.6896 81
810
+
811
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/last_model.pth
812
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 72.84%
813
+ [Epoch 33], [Batch 0 / 40], [Loss 0.007708387915045023]
814
+ precision recall f1-score support
815
+
816
+ akiec 0.7692 0.6667 0.7143 15
817
+ bcc 0.6429 0.6429 0.6429 14
818
+ bkl 0.7500 0.5455 0.6316 11
819
+ df 0.5882 0.9091 0.7143 11
820
+ mel 0.6667 0.4000 0.5000 10
821
+ nv 0.6154 0.8889 0.7273 9
822
+ vasc 0.9000 0.8182 0.8571 11
823
+
824
+ accuracy 0.6914 81
825
+ macro avg 0.7046 0.6959 0.6839 81
826
+ weighted avg 0.7082 0.6914 0.6851 81
827
+
828
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/last_model.pth
829
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 72.84%
830
+ [Epoch 34], [Batch 0 / 40], [Loss 0.006280002649873495]
831
+ precision recall f1-score support
832
+
833
+ akiec 0.7857 0.7333 0.7586 15
834
+ bcc 0.7143 0.7143 0.7143 14
835
+ bkl 0.5455 0.5455 0.5455 11
836
+ df 0.7143 0.9091 0.8000 11
837
+ mel 0.5714 0.4000 0.4706 10
838
+ nv 0.6364 0.7778 0.7000 9
839
+ vasc 0.9000 0.8182 0.8571 11
840
+
841
+ accuracy 0.7037 81
842
+ macro avg 0.6954 0.6997 0.6923 81
843
+ weighted avg 0.7035 0.7037 0.6989 81
844
+
845
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/last_model.pth
846
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 72.84%
847
+ [Epoch 35], [Batch 0 / 40], [Loss 0.007977456785738468]
848
+ precision recall f1-score support
849
+
850
+ akiec 0.7143 0.6667 0.6897 15
851
+ bcc 0.6667 0.5714 0.6154 14
852
+ bkl 0.5455 0.5455 0.5455 11
853
+ df 0.5625 0.8182 0.6667 11
854
+ mel 0.6667 0.4000 0.5000 10
855
+ nv 0.6667 0.8889 0.7619 9
856
+ vasc 0.9000 0.8182 0.8571 11
857
+
858
+ accuracy 0.6667 81
859
+ macro avg 0.6746 0.6727 0.6623 81
860
+ weighted avg 0.6766 0.6667 0.6615 81
861
+
862
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/last_model.pth
863
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 72.84%
864
+ [Epoch 36], [Batch 0 / 40], [Loss 0.005339793860912323]
865
+ precision recall f1-score support
866
+
867
+ akiec 0.6111 0.7333 0.6667 15
868
+ bcc 0.7273 0.5714 0.6400 14
869
+ bkl 0.4545 0.4545 0.4545 11
870
+ df 0.6667 0.9091 0.7692 11
871
+ mel 0.7143 0.5000 0.5882 10
872
+ nv 0.7500 0.6667 0.7059 9
873
+ vasc 0.9091 0.9091 0.9091 11
874
+
875
+ accuracy 0.6790 81
876
+ macro avg 0.6904 0.6777 0.6762 81
877
+ weighted avg 0.6861 0.6790 0.6748 81
878
+
879
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/last_model.pth
880
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 72.84%
881
+ [Epoch 37], [Batch 0 / 40], [Loss 0.006963111460208893]
882
+ precision recall f1-score support
883
+
884
+ akiec 0.7647 0.8667 0.8125 15
885
+ bcc 0.7500 0.6429 0.6923 14
886
+ bkl 0.6000 0.5455 0.5714 11
887
+ df 0.7333 1.0000 0.8462 11
888
+ mel 0.7500 0.6000 0.6667 10
889
+ nv 0.8750 0.7778 0.8235 9
890
+ vasc 0.9091 0.9091 0.9091 11
891
+
892
+ accuracy 0.7654 81
893
+ macro avg 0.7689 0.7631 0.7602 81
894
+ weighted avg 0.7656 0.7654 0.7599 81
895
+
896
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth
897
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 76.54%
898
+ [Epoch 38], [Batch 0 / 40], [Loss 0.003127896459773183]
899
+ precision recall f1-score support
900
+
901
+ akiec 0.7333 0.7333 0.7333 15
902
+ bcc 0.7692 0.7143 0.7407 14
903
+ bkl 0.5455 0.5455 0.5455 11
904
+ df 0.6667 0.9091 0.7692 11
905
+ mel 0.6250 0.5000 0.5556 10
906
+ nv 0.7500 0.6667 0.7059 9
907
+ vasc 0.9091 0.9091 0.9091 11
908
+
909
+ accuracy 0.7160 81
910
+ macro avg 0.7141 0.7111 0.7085 81
911
+ weighted avg 0.7173 0.7160 0.7128 81
912
+
913
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/last_model.pth
914
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 76.54%
915
+ [Epoch 39], [Batch 0 / 40], [Loss 0.008776417002081871]
916
+ precision recall f1-score support
917
+
918
+ akiec 0.6667 0.6667 0.6667 15
919
+ bcc 0.6923 0.6429 0.6667 14
920
+ bkl 0.5000 0.5455 0.5217 11
921
+ df 0.6923 0.8182 0.7500 11
922
+ mel 0.5000 0.5000 0.5000 10
923
+ nv 0.8571 0.6667 0.7500 9
924
+ vasc 0.9091 0.9091 0.9091 11
925
+
926
+ accuracy 0.6790 81
927
+ macro avg 0.6882 0.6784 0.6806 81
928
+ weighted avg 0.6855 0.6790 0.6799 81
929
+
930
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/last_model.pth
931
+ [shufflenet_v2_x1_0] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-31-56/best_model.pth - Accuracy 76.54%
932
+ precision recall f1-score support
933
+
934
+ akiec 0.7143 0.6667 0.6897 15
935
+ bcc 0.6667 0.7143 0.6897 14
936
+ bkl 0.5455 0.5455 0.5455 11
937
+ df 0.6429 0.8182 0.7200 11
938
+ mel 0.5714 0.4000 0.4706 10
939
+ nv 0.6000 0.6667 0.6316 9
940
+ vasc 0.9000 0.8182 0.8571 11
941
+
942
+ accuracy 0.6667 81
943
+ macro avg 0.6630 0.6613 0.6577 81
944
+ weighted avg 0.6683 0.6667 0.6634 81
945
+
models/ShuffleNetV2/logs/test_logs_acc_2021-12-12-15-31-56.txt ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 0,0.25925925925925924
2
+ 1,0.37037037037037035
3
+ 2,0.43209876543209874
4
+ 3,0.5555555555555556
5
+ 4,0.5802469135802469
6
+ 5,0.6172839506172839
7
+ 6,0.6419753086419753
8
+ 7,0.6666666666666666
9
+ 8,0.6049382716049383
10
+ 9,0.5679012345679012
11
+ 10,0.6049382716049383
12
+ 11,0.6049382716049383
13
+ 12,0.5925925925925926
14
+ 13,0.654320987654321
15
+ 14,0.7160493827160493
16
+ 15,0.691358024691358
17
+ 16,0.6172839506172839
18
+ 17,0.7037037037037037
19
+ 18,0.7283950617283951
20
+ 19,0.691358024691358
21
+ 20,0.6666666666666666
22
+ 21,0.6666666666666666
23
+ 22,0.7160493827160493
24
+ 23,0.7160493827160493
25
+ 24,0.6790123456790124
26
+ 25,0.7037037037037037
27
+ 26,0.654320987654321
28
+ 27,0.6419753086419753
29
+ 28,0.6296296296296297
30
+ 29,0.6666666666666666
31
+ 30,0.7037037037037037
32
+ 31,0.691358024691358
33
+ 32,0.691358024691358
34
+ 33,0.691358024691358
35
+ 34,0.7037037037037037
36
+ 35,0.6666666666666666
37
+ 36,0.6790123456790124
38
+ 37,0.7654320987654321
39
+ 38,0.7160493827160493
40
+ 39,0.6790123456790124
models/ShuffleNetV2/logs/train_logs_acc_2021-12-12-15-31-56.txt ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 0,0.4350828729281768
2
+ 1,0.6919889502762431
3
+ 2,0.7265193370165746
4
+ 3,0.7983425414364641
5
+ 4,0.8466850828729282
6
+ 5,0.8964088397790055
7
+ 6,0.9350828729281768
8
+ 7,0.9461325966850829
9
+ 8,0.9488950276243094
10
+ 9,0.9751381215469613
11
+ 10,0.9599447513812155
12
+ 11,0.9779005524861878
13
+ 12,0.988950276243094
14
+ 13,0.9834254143646409
15
+ 14,0.9903314917127072
16
+ 15,0.9903314917127072
17
+ 16,0.9903314917127072
18
+ 17,0.9903314917127072
19
+ 18,0.9903314917127072
20
+ 19,0.9930939226519337
21
+ 20,0.9875690607734806
22
+ 21,0.994475138121547
23
+ 22,1.0
24
+ 23,1.0
25
+ 24,0.9986187845303868
26
+ 25,0.9930939226519337
27
+ 26,0.994475138121547
28
+ 27,0.9958563535911602
29
+ 28,0.9903314917127072
30
+ 29,0.9986187845303868
31
+ 30,1.0
32
+ 31,0.9958563535911602
33
+ 32,0.9958563535911602
34
+ 33,1.0
35
+ 34,0.9986187845303868
36
+ 35,1.0
37
+ 36,1.0
38
+ 37,0.9986187845303868
39
+ 38,1.0
40
+ 39,1.0
models/ShuffleNetV2/logs/train_logs_loss_2021-12-12-15-31-56.txt ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 0,1.8709386587142944
2
+ 1,1.4948400259017944
3
+ 2,1.084162712097168
4
+ 3,0.7621438503265381
5
+ 4,0.5374770760536194
6
+ 5,0.36186251044273376
7
+ 6,0.2559089660644531
8
+ 7,0.24779772758483887
9
+ 8,0.20248866081237793
10
+ 9,0.13955894112586975
11
+ 10,0.15823791921138763
12
+ 11,0.10914398729801178
13
+ 12,0.08004051446914673
14
+ 13,0.061323583126068115
15
+ 14,0.08058642596006393
16
+ 15,0.04892304167151451
17
+ 16,0.050276126712560654
18
+ 17,0.04263928160071373
19
+ 18,0.05711854621767998
20
+ 19,0.04539179429411888
21
+ 20,0.05247002840042114
22
+ 21,0.03884650021791458
23
+ 22,0.021214909851551056
24
+ 23,0.01267595961689949
25
+ 24,0.027989603579044342
26
+ 25,0.04051186889410019
27
+ 26,0.05948528274893761
28
+ 27,0.026193585246801376
29
+ 28,0.038456693291664124
30
+ 29,0.01921292580664158
31
+ 30,0.018016455695033073
32
+ 31,0.02133302390575409
33
+ 32,0.026172570884227753
34
+ 33,0.013053493574261665
35
+ 34,0.009445875883102417
36
+ 35,0.009853013791143894
37
+ 36,0.009784508496522903
38
+ 37,0.011207174509763718
39
+ 38,0.006571437232196331
40
+ 39,0.010078544728457928
models/VGG16/best_model.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8e560d061ded8ceba5ebd92eb51e304bba0fdd703396b85b21709aa51febbf7f
3
+ size 537174661
models/VGG16/config.json ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "num_classes": 7,
3
+ "hidden_size": 4096,
4
+ "id2label": {
5
+ "0": "akiec",
6
+ "1": "bcc",
7
+ "2": "bkl",
8
+ "3": "df",
9
+ "4": "mel",
10
+ "5": "nv",
11
+ "6": "vasc"
12
+ },
13
+ "label2id": {
14
+ "akiec": "0",
15
+ "bcc": "1",
16
+ "bkl": "2",
17
+ "df": "3",
18
+ "mel": "4",
19
+ "nv": "5",
20
+ "vasc": "6"
21
+ },
22
+ "architectures": [
23
+ "vgg16"
24
+ ]
25
+ }
models/VGG16/logs/logs_2021-12-12-15-09-07.txt ADDED
@@ -0,0 +1,744 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ==================================================
2
+ Model architecture:
3
+ ==================================================
4
+ VGG(
5
+ (features): Sequential(
6
+ (0): Conv2d(3, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
7
+ (1): ReLU(inplace=True)
8
+ (2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
9
+ (3): ReLU(inplace=True)
10
+ (4): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)
11
+ (5): Conv2d(64, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
12
+ (6): ReLU(inplace=True)
13
+ (7): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
14
+ (8): ReLU(inplace=True)
15
+ (9): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)
16
+ (10): Conv2d(128, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
17
+ (11): ReLU(inplace=True)
18
+ (12): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
19
+ (13): ReLU(inplace=True)
20
+ (14): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
21
+ (15): ReLU(inplace=True)
22
+ (16): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)
23
+ (17): Conv2d(256, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
24
+ (18): ReLU(inplace=True)
25
+ (19): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
26
+ (20): ReLU(inplace=True)
27
+ (21): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
28
+ (22): ReLU(inplace=True)
29
+ (23): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)
30
+ (24): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
31
+ (25): ReLU(inplace=True)
32
+ (26): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
33
+ (27): ReLU(inplace=True)
34
+ (28): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
35
+ (29): ReLU(inplace=True)
36
+ (30): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)
37
+ )
38
+ (avgpool): AdaptiveAvgPool2d(output_size=(7, 7))
39
+ (classifier): Sequential(
40
+ (0): Linear(in_features=25088, out_features=4096, bias=True)
41
+ (1): ReLU(inplace=True)
42
+ (2): Dropout(p=0.5, inplace=False)
43
+ (3): Linear(in_features=4096, out_features=4096, bias=True)
44
+ (4): ReLU(inplace=True)
45
+ (5): Dropout(p=0.5, inplace=False)
46
+ (6): Linear(in_features=4096, out_features=7, bias=True)
47
+ )
48
+ )
49
+ ==================================================
50
+
51
+ [Epoch 0], [Batch 0 / 40], [Loss 2.061110496520996]
52
+ precision recall f1-score support
53
+
54
+ akiec 0.0000 0.0000 0.0000 12
55
+ bcc 0.0000 0.0000 0.0000 9
56
+ bkl 0.0988 1.0000 0.1798 8
57
+ df 0.0000 0.0000 0.0000 17
58
+ mel 0.0000 0.0000 0.0000 13
59
+ nv 0.0000 0.0000 0.0000 9
60
+ vasc 0.0000 0.0000 0.0000 13
61
+
62
+ accuracy 0.0988 81
63
+ macro avg 0.0141 0.1429 0.0257 81
64
+ weighted avg 0.0098 0.0988 0.0178 81
65
+
66
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth
67
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 9.88%
68
+ [Epoch 1], [Batch 0 / 40], [Loss 1.964955449104309]
69
+ precision recall f1-score support
70
+
71
+ akiec 0.0000 0.0000 0.0000 12
72
+ bcc 0.0000 0.0000 0.0000 9
73
+ bkl 0.0000 0.0000 0.0000 8
74
+ df 0.0000 0.0000 0.0000 17
75
+ mel 0.0000 0.0000 0.0000 13
76
+ nv 0.0000 0.0000 0.0000 9
77
+ vasc 0.1467 0.8462 0.2500 13
78
+
79
+ accuracy 0.1358 81
80
+ macro avg 0.0210 0.1209 0.0357 81
81
+ weighted avg 0.0235 0.1358 0.0401 81
82
+
83
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth
84
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 13.58%
85
+ [Epoch 2], [Batch 0 / 40], [Loss 1.9537009000778198]
86
+ precision recall f1-score support
87
+
88
+ akiec 0.0000 0.0000 0.0000 12
89
+ bcc 0.0625 0.1111 0.0800 9
90
+ bkl 0.0000 0.0000 0.0000 8
91
+ df 0.0000 0.0000 0.0000 17
92
+ mel 0.1538 0.7692 0.2564 13
93
+ nv 0.0000 0.0000 0.0000 9
94
+ vasc 0.0000 0.0000 0.0000 13
95
+
96
+ accuracy 0.1358 81
97
+ macro avg 0.0309 0.1258 0.0481 81
98
+ weighted avg 0.0316 0.1358 0.0500 81
99
+
100
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/last_model.pth
101
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 13.58%
102
+ [Epoch 3], [Batch 0 / 40], [Loss 1.9671849012374878]
103
+ precision recall f1-score support
104
+
105
+ akiec 0.1481 1.0000 0.2581 12
106
+ bcc 0.0000 0.0000 0.0000 9
107
+ bkl 0.0000 0.0000 0.0000 8
108
+ df 0.0000 0.0000 0.0000 17
109
+ mel 0.0000 0.0000 0.0000 13
110
+ nv 0.0000 0.0000 0.0000 9
111
+ vasc 0.0000 0.0000 0.0000 13
112
+
113
+ accuracy 0.1481 81
114
+ macro avg 0.0212 0.1429 0.0369 81
115
+ weighted avg 0.0219 0.1481 0.0382 81
116
+
117
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth
118
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 14.81%
119
+ [Epoch 4], [Batch 0 / 40], [Loss 1.9357523918151855]
120
+ precision recall f1-score support
121
+
122
+ akiec 0.0000 0.0000 0.0000 12
123
+ bcc 0.0000 0.0000 0.0000 9
124
+ bkl 0.0769 0.5000 0.1333 8
125
+ df 0.2414 0.4118 0.3043 17
126
+ mel 0.0000 0.0000 0.0000 13
127
+ nv 0.0000 0.0000 0.0000 9
128
+ vasc 0.0000 0.0000 0.0000 13
129
+
130
+ accuracy 0.1358 81
131
+ macro avg 0.0455 0.1303 0.0625 81
132
+ weighted avg 0.0583 0.1358 0.0770 81
133
+
134
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/last_model.pth
135
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 14.81%
136
+ [Epoch 5], [Batch 0 / 40], [Loss 1.9422740936279297]
137
+ precision recall f1-score support
138
+
139
+ akiec 0.0000 0.0000 0.0000 12
140
+ bcc 0.1184 1.0000 0.2118 9
141
+ bkl 0.0000 0.0000 0.0000 8
142
+ df 0.0000 0.0000 0.0000 17
143
+ mel 0.0000 0.0000 0.0000 13
144
+ nv 0.0000 0.0000 0.0000 9
145
+ vasc 0.0000 0.0000 0.0000 13
146
+
147
+ accuracy 0.1111 81
148
+ macro avg 0.0169 0.1429 0.0303 81
149
+ weighted avg 0.0132 0.1111 0.0235 81
150
+
151
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/last_model.pth
152
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 14.81%
153
+ [Epoch 6], [Batch 0 / 40], [Loss 2.1157145500183105]
154
+ precision recall f1-score support
155
+
156
+ akiec 0.0000 0.0000 0.0000 12
157
+ bcc 0.0000 0.0000 0.0000 9
158
+ bkl 0.0000 0.0000 0.0000 8
159
+ df 0.0000 0.0000 0.0000 17
160
+ mel 0.0000 0.0000 0.0000 13
161
+ nv 0.1176 0.8889 0.2078 9
162
+ vasc 0.0000 0.0000 0.0000 13
163
+
164
+ accuracy 0.0988 81
165
+ macro avg 0.0168 0.1270 0.0297 81
166
+ weighted avg 0.0131 0.0988 0.0231 81
167
+
168
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/last_model.pth
169
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 14.81%
170
+ [Epoch 7], [Batch 0 / 40], [Loss 1.9586039781570435]
171
+ precision recall f1-score support
172
+
173
+ akiec 0.0000 0.0000 0.0000 12
174
+ bcc 0.1125 1.0000 0.2022 9
175
+ bkl 0.0000 0.0000 0.0000 8
176
+ df 0.0000 0.0000 0.0000 17
177
+ mel 0.0000 0.0000 0.0000 13
178
+ nv 0.0000 0.0000 0.0000 9
179
+ vasc 0.0000 0.0000 0.0000 13
180
+
181
+ accuracy 0.1111 81
182
+ macro avg 0.0161 0.1429 0.0289 81
183
+ weighted avg 0.0125 0.1111 0.0225 81
184
+
185
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/last_model.pth
186
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 14.81%
187
+ [Epoch 8], [Batch 0 / 40], [Loss 1.9570530652999878]
188
+ precision recall f1-score support
189
+
190
+ akiec 0.0000 0.0000 0.0000 12
191
+ bcc 0.0000 0.0000 0.0000 9
192
+ bkl 0.0000 0.0000 0.0000 8
193
+ df 0.1429 0.1176 0.1290 17
194
+ mel 0.3333 0.1538 0.2105 13
195
+ nv 0.1250 0.7778 0.2154 9
196
+ vasc 0.0000 0.0000 0.0000 13
197
+
198
+ accuracy 0.1358 81
199
+ macro avg 0.0859 0.1499 0.0793 81
200
+ weighted avg 0.0974 0.1358 0.0848 81
201
+
202
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/last_model.pth
203
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 14.81%
204
+ [Epoch 9], [Batch 0 / 40], [Loss 1.9447191953659058]
205
+ precision recall f1-score support
206
+
207
+ akiec 0.3333 0.0833 0.1333 12
208
+ bcc 0.0000 0.0000 0.0000 9
209
+ bkl 0.2000 0.2500 0.2222 8
210
+ df 0.7500 0.1765 0.2857 17
211
+ mel 0.2222 0.1538 0.1818 13
212
+ nv 0.1538 0.8889 0.2623 9
213
+ vasc 0.6667 0.1538 0.2500 13
214
+
215
+ accuracy 0.2222 81
216
+ macro avg 0.3323 0.2438 0.1908 81
217
+ weighted avg 0.3863 0.2222 0.2001 81
218
+
219
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth
220
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 22.22%
221
+ [Epoch 10], [Batch 0 / 40], [Loss 1.8850387334823608]
222
+ precision recall f1-score support
223
+
224
+ akiec 0.0000 0.0000 0.0000 12
225
+ bcc 0.0000 0.0000 0.0000 9
226
+ bkl 0.0000 0.0000 0.0000 8
227
+ df 0.0000 0.0000 0.0000 17
228
+ mel 0.0000 0.0000 0.0000 13
229
+ nv 0.0000 0.0000 0.0000 9
230
+ vasc 0.1818 0.9231 0.3038 13
231
+
232
+ accuracy 0.1481 81
233
+ macro avg 0.0260 0.1319 0.0434 81
234
+ weighted avg 0.0292 0.1481 0.0488 81
235
+
236
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/last_model.pth
237
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 22.22%
238
+ [Epoch 11], [Batch 0 / 40], [Loss 1.9406187534332275]
239
+ precision recall f1-score support
240
+
241
+ akiec 0.1429 0.0833 0.1053 12
242
+ bcc 0.0000 0.0000 0.0000 9
243
+ bkl 0.0000 0.0000 0.0000 8
244
+ df 0.0000 0.0000 0.0000 17
245
+ mel 0.2105 0.3077 0.2500 13
246
+ nv 0.1282 0.5556 0.2083 9
247
+ vasc 0.3000 0.2308 0.2609 13
248
+
249
+ accuracy 0.1605 81
250
+ macro avg 0.1117 0.1682 0.1178 81
251
+ weighted avg 0.1173 0.1605 0.1207 81
252
+
253
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/last_model.pth
254
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 22.22%
255
+ [Epoch 12], [Batch 0 / 40], [Loss 1.8705354928970337]
256
+ precision recall f1-score support
257
+
258
+ akiec 0.0000 0.0000 0.0000 12
259
+ bcc 0.0000 0.0000 0.0000 9
260
+ bkl 0.0000 0.0000 0.0000 8
261
+ df 0.0000 0.0000 0.0000 17
262
+ mel 0.0000 0.0000 0.0000 13
263
+ nv 0.1039 0.8889 0.1860 9
264
+ vasc 0.0000 0.0000 0.0000 13
265
+
266
+ accuracy 0.0988 81
267
+ macro avg 0.0148 0.1270 0.0266 81
268
+ weighted avg 0.0115 0.0988 0.0207 81
269
+
270
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/last_model.pth
271
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 22.22%
272
+ [Epoch 13], [Batch 0 / 40], [Loss 1.9052350521087646]
273
+ precision recall f1-score support
274
+
275
+ akiec 0.0000 0.0000 0.0000 12
276
+ bcc 0.1212 0.8889 0.2133 9
277
+ bkl 0.0000 0.0000 0.0000 8
278
+ df 0.0000 0.0000 0.0000 17
279
+ mel 0.2667 0.3077 0.2857 13
280
+ nv 0.0000 0.0000 0.0000 9
281
+ vasc 0.0000 0.0000 0.0000 13
282
+
283
+ accuracy 0.1481 81
284
+ macro avg 0.0554 0.1709 0.0713 81
285
+ weighted avg 0.0563 0.1481 0.0696 81
286
+
287
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/last_model.pth
288
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 22.22%
289
+ [Epoch 14], [Batch 0 / 40], [Loss 1.9385014772415161]
290
+ precision recall f1-score support
291
+
292
+ akiec 0.0000 0.0000 0.0000 12
293
+ bcc 0.0000 0.0000 0.0000 9
294
+ bkl 0.0000 0.0000 0.0000 8
295
+ df 1.0000 0.0588 0.1111 17
296
+ mel 0.1714 0.4615 0.2500 13
297
+ nv 0.0000 0.0000 0.0000 9
298
+ vasc 0.2286 0.6154 0.3333 13
299
+
300
+ accuracy 0.1852 81
301
+ macro avg 0.2000 0.1622 0.0992 81
302
+ weighted avg 0.2741 0.1852 0.1169 81
303
+
304
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/last_model.pth
305
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 22.22%
306
+ [Epoch 15], [Batch 0 / 40], [Loss 1.876391887664795]
307
+ precision recall f1-score support
308
+
309
+ akiec 0.0000 0.0000 0.0000 12
310
+ bcc 0.0000 0.0000 0.0000 9
311
+ bkl 0.1014 0.8750 0.1818 8
312
+ df 0.0000 0.0000 0.0000 17
313
+ mel 0.0000 0.0000 0.0000 13
314
+ nv 0.6250 0.5556 0.5882 9
315
+ vasc 1.0000 0.0769 0.1429 13
316
+
317
+ accuracy 0.1605 81
318
+ macro avg 0.2466 0.2154 0.1304 81
319
+ weighted avg 0.2400 0.1605 0.1062 81
320
+
321
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/last_model.pth
322
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 22.22%
323
+ [Epoch 16], [Batch 0 / 40], [Loss 1.8834539651870728]
324
+ precision recall f1-score support
325
+
326
+ akiec 0.0000 0.0000 0.0000 12
327
+ bcc 0.1562 0.5556 0.2439 9
328
+ bkl 0.0000 0.0000 0.0000 8
329
+ df 0.0000 0.0000 0.0000 17
330
+ mel 0.2308 0.4615 0.3077 13
331
+ nv 0.3125 0.5556 0.4000 9
332
+ vasc 0.5000 0.1538 0.2353 13
333
+
334
+ accuracy 0.2222 81
335
+ macro avg 0.1714 0.2466 0.1696 81
336
+ weighted avg 0.1694 0.2222 0.1587 81
337
+
338
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/last_model.pth
339
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 22.22%
340
+ [Epoch 17], [Batch 0 / 40], [Loss 1.8836216926574707]
341
+ precision recall f1-score support
342
+
343
+ akiec 0.2500 0.0833 0.1250 12
344
+ bcc 0.0000 0.0000 0.0000 9
345
+ bkl 0.1042 0.6250 0.1786 8
346
+ df 0.2727 0.1765 0.2143 17
347
+ mel 0.5000 0.1538 0.2353 13
348
+ nv 0.5000 0.5556 0.5263 9
349
+ vasc 0.2500 0.0769 0.1176 13
350
+
351
+ accuracy 0.2099 81
352
+ macro avg 0.2681 0.2387 0.1996 81
353
+ weighted avg 0.2805 0.2099 0.1963 81
354
+
355
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/last_model.pth
356
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 22.22%
357
+ [Epoch 18], [Batch 0 / 40], [Loss 1.7908109426498413]
358
+ precision recall f1-score support
359
+
360
+ akiec 0.0000 0.0000 0.0000 12
361
+ bcc 0.0968 0.3333 0.1500 9
362
+ bkl 0.0000 0.0000 0.0000 8
363
+ df 0.0000 0.0000 0.0000 17
364
+ mel 0.2000 0.0769 0.1111 13
365
+ nv 0.3571 0.5556 0.4348 9
366
+ vasc 0.1852 0.3846 0.2500 13
367
+
368
+ accuracy 0.1728 81
369
+ macro avg 0.1199 0.1929 0.1351 81
370
+ weighted avg 0.1123 0.1728 0.1229 81
371
+
372
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/last_model.pth
373
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 22.22%
374
+ [Epoch 19], [Batch 0 / 40], [Loss 1.788246750831604]
375
+ precision recall f1-score support
376
+
377
+ akiec 0.0833 0.0833 0.0833 12
378
+ bcc 0.1395 0.6667 0.2308 9
379
+ bkl 0.0000 0.0000 0.0000 8
380
+ df 0.2857 0.1176 0.1667 17
381
+ mel 0.0000 0.0000 0.0000 13
382
+ nv 0.5556 0.5556 0.5556 9
383
+ vasc 0.3333 0.1538 0.2105 13
384
+
385
+ accuracy 0.1975 81
386
+ macro avg 0.1996 0.2253 0.1781 81
387
+ weighted avg 0.2030 0.1975 0.1685 81
388
+
389
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/last_model.pth
390
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 22.22%
391
+ [Epoch 20], [Batch 0 / 40], [Loss 1.675390362739563]
392
+ precision recall f1-score support
393
+
394
+ akiec 0.5000 0.0833 0.1429 12
395
+ bcc 0.0000 0.0000 0.0000 9
396
+ bkl 0.1034 0.7500 0.1818 8
397
+ df 0.5000 0.0588 0.1053 17
398
+ mel 0.0000 0.0000 0.0000 13
399
+ nv 0.7143 0.5556 0.6250 9
400
+ vasc 1.0000 0.0769 0.1429 13
401
+
402
+ accuracy 0.1728 81
403
+ macro avg 0.4025 0.2178 0.1711 81
404
+ weighted avg 0.4291 0.1728 0.1536 81
405
+
406
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/last_model.pth
407
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 22.22%
408
+ [Epoch 21], [Batch 0 / 40], [Loss 1.7336101531982422]
409
+ precision recall f1-score support
410
+
411
+ akiec 0.1463 0.5000 0.2264 12
412
+ bcc 0.0000 0.0000 0.0000 9
413
+ bkl 0.3333 0.1250 0.1818 8
414
+ df 0.4000 0.1176 0.1818 17
415
+ mel 0.1111 0.0769 0.0909 13
416
+ nv 0.7500 0.6667 0.7059 9
417
+ vasc 0.5385 0.5385 0.5385 13
418
+
419
+ accuracy 0.2840 81
420
+ macro avg 0.3256 0.2892 0.2750 81
421
+ weighted avg 0.3261 0.2840 0.2691 81
422
+
423
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth
424
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 28.40%
425
+ [Epoch 22], [Batch 0 / 40], [Loss 1.791454792022705]
426
+ precision recall f1-score support
427
+
428
+ akiec 0.0714 0.0833 0.0769 12
429
+ bcc 0.0000 0.0000 0.0000 9
430
+ bkl 0.1923 0.6250 0.2941 8
431
+ df 0.3750 0.1765 0.2400 17
432
+ mel 0.1667 0.1538 0.1600 13
433
+ nv 0.6000 0.6667 0.6316 9
434
+ vasc 0.4444 0.3077 0.3636 13
435
+
436
+ accuracy 0.2593 81
437
+ macro avg 0.2643 0.2876 0.2523 81
438
+ weighted avg 0.2730 0.2593 0.2450 81
439
+
440
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/last_model.pth
441
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 28.40%
442
+ [Epoch 23], [Batch 0 / 40], [Loss 1.5891815423965454]
443
+ precision recall f1-score support
444
+
445
+ akiec 0.0000 0.0000 0.0000 12
446
+ bcc 0.1935 0.6667 0.3000 9
447
+ bkl 0.0769 0.1250 0.0952 8
448
+ df 0.2000 0.0588 0.0909 17
449
+ mel 0.1429 0.0769 0.1000 13
450
+ nv 0.6667 0.6667 0.6667 9
451
+ vasc 0.5714 0.3077 0.4000 13
452
+
453
+ accuracy 0.2346 81
454
+ macro avg 0.2645 0.2717 0.2361 81
455
+ weighted avg 0.2598 0.2346 0.2161 81
456
+
457
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/last_model.pth
458
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 28.40%
459
+ [Epoch 24], [Batch 0 / 40], [Loss 1.7806099653244019]
460
+ precision recall f1-score support
461
+
462
+ akiec 0.2222 0.1667 0.1905 12
463
+ bcc 0.1724 0.5556 0.2632 9
464
+ bkl 0.0000 0.0000 0.0000 8
465
+ df 0.6000 0.1765 0.2727 17
466
+ mel 0.2308 0.2308 0.2308 13
467
+ nv 0.7500 0.6667 0.7059 9
468
+ vasc 0.4118 0.5385 0.4667 13
469
+
470
+ accuracy 0.3210 81
471
+ macro avg 0.3410 0.3335 0.3042 81
472
+ weighted avg 0.3645 0.3210 0.3051 81
473
+
474
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth
475
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 32.10%
476
+ [Epoch 25], [Batch 0 / 40], [Loss 1.550750494003296]
477
+ precision recall f1-score support
478
+
479
+ akiec 0.3846 0.4167 0.4000 12
480
+ bcc 0.3333 0.2222 0.2667 9
481
+ bkl 0.1538 0.5000 0.2353 8
482
+ df 0.3333 0.0588 0.1000 17
483
+ mel 0.2500 0.0769 0.1176 13
484
+ nv 0.5455 0.6667 0.6000 9
485
+ vasc 0.2222 0.3077 0.2581 13
486
+
487
+ accuracy 0.2840 81
488
+ macro avg 0.3175 0.3213 0.2825 81
489
+ weighted avg 0.3156 0.2840 0.2601 81
490
+
491
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/last_model.pth
492
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 32.10%
493
+ [Epoch 26], [Batch 0 / 40], [Loss 1.6319453716278076]
494
+ precision recall f1-score support
495
+
496
+ akiec 0.2222 0.1667 0.1905 12
497
+ bcc 0.0000 0.0000 0.0000 9
498
+ bkl 0.1026 0.5000 0.1702 8
499
+ df 0.5000 0.1765 0.2609 17
500
+ mel 0.0000 0.0000 0.0000 13
501
+ nv 0.7143 0.5556 0.6250 9
502
+ vasc 0.5385 0.5385 0.5385 13
503
+
504
+ accuracy 0.2593 81
505
+ macro avg 0.2968 0.2767 0.2550 81
506
+ weighted avg 0.3138 0.2593 0.2556 81
507
+
508
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/last_model.pth
509
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 32.10%
510
+ [Epoch 27], [Batch 0 / 40], [Loss 1.7373251914978027]
511
+ precision recall f1-score support
512
+
513
+ akiec 0.2143 0.2500 0.2308 12
514
+ bcc 0.1111 0.1111 0.1111 9
515
+ bkl 0.0909 0.2500 0.1333 8
516
+ df 0.3333 0.0588 0.1000 17
517
+ mel 0.0769 0.0769 0.0769 13
518
+ nv 0.6667 0.4444 0.5333 9
519
+ vasc 0.4286 0.4615 0.4444 13
520
+
521
+ accuracy 0.2222 81
522
+ macro avg 0.2745 0.2361 0.2328 81
523
+ weighted avg 0.2782 0.2222 0.2236 81
524
+
525
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/last_model.pth
526
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 32.10%
527
+ [Epoch 28], [Batch 0 / 40], [Loss 1.520072102546692]
528
+ precision recall f1-score support
529
+
530
+ akiec 0.0769 0.0833 0.0800 12
531
+ bcc 0.0000 0.0000 0.0000 9
532
+ bkl 0.1429 0.6250 0.2326 8
533
+ df 0.5000 0.1765 0.2609 17
534
+ mel 0.1429 0.0769 0.1000 13
535
+ nv 0.7143 0.5556 0.6250 9
536
+ vasc 0.5556 0.3846 0.4545 13
537
+
538
+ accuracy 0.2469 81
539
+ macro avg 0.3046 0.2717 0.2504 81
540
+ weighted avg 0.3219 0.2469 0.2480 81
541
+
542
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/last_model.pth
543
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 32.10%
544
+ [Epoch 29], [Batch 0 / 40], [Loss 1.6450930833816528]
545
+ precision recall f1-score support
546
+
547
+ akiec 0.3077 0.3333 0.3200 12
548
+ bcc 0.0000 0.0000 0.0000 9
549
+ bkl 0.0000 0.0000 0.0000 8
550
+ df 0.4118 0.4118 0.4118 17
551
+ mel 0.3846 0.3846 0.3846 13
552
+ nv 0.7500 0.6667 0.7059 9
553
+ vasc 0.4500 0.6923 0.5455 13
554
+
555
+ accuracy 0.3827 81
556
+ macro avg 0.3292 0.3555 0.3382 81
557
+ weighted avg 0.3493 0.3827 0.3615 81
558
+
559
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth
560
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 38.27%
561
+ [Epoch 30], [Batch 0 / 40], [Loss 1.5252735614776611]
562
+ precision recall f1-score support
563
+
564
+ akiec 0.2105 0.3333 0.2581 12
565
+ bcc 0.0000 0.0000 0.0000 9
566
+ bkl 0.2222 0.7500 0.3429 8
567
+ df 0.0000 0.0000 0.0000 17
568
+ mel 0.1000 0.0769 0.0870 13
569
+ nv 0.7500 0.6667 0.7059 9
570
+ vasc 0.6364 0.5385 0.5833 13
571
+
572
+ accuracy 0.2963 81
573
+ macro avg 0.2742 0.3379 0.2824 81
574
+ weighted avg 0.2547 0.2963 0.2581 81
575
+
576
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/last_model.pth
577
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 38.27%
578
+ [Epoch 31], [Batch 0 / 40], [Loss 1.5033398866653442]
579
+ precision recall f1-score support
580
+
581
+ akiec 0.2500 0.3333 0.2857 12
582
+ bcc 0.1429 0.1111 0.1250 9
583
+ bkl 0.1579 0.3750 0.2222 8
584
+ df 0.4545 0.2941 0.3571 17
585
+ mel 0.3636 0.3077 0.3333 13
586
+ nv 0.6667 0.6667 0.6667 9
587
+ vasc 0.6250 0.3846 0.4762 13
588
+
589
+ accuracy 0.3457 81
590
+ macro avg 0.3801 0.3532 0.3523 81
591
+ weighted avg 0.3966 0.3457 0.3571 81
592
+
593
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/last_model.pth
594
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 38.27%
595
+ [Epoch 32], [Batch 0 / 40], [Loss 1.4207830429077148]
596
+ precision recall f1-score support
597
+
598
+ akiec 0.1333 0.1667 0.1481 12
599
+ bcc 0.0000 0.0000 0.0000 9
600
+ bkl 0.0000 0.0000 0.0000 8
601
+ df 0.3750 0.1765 0.2400 17
602
+ mel 0.3333 0.3846 0.3571 13
603
+ nv 0.6250 0.5556 0.5882 9
604
+ vasc 0.5000 0.5385 0.5185 13
605
+
606
+ accuracy 0.2716 81
607
+ macro avg 0.2810 0.2603 0.2646 81
608
+ weighted avg 0.3016 0.2716 0.2782 81
609
+
610
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/last_model.pth
611
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 38.27%
612
+ [Epoch 33], [Batch 0 / 40], [Loss 1.1720422506332397]
613
+ precision recall f1-score support
614
+
615
+ akiec 0.1875 0.2500 0.2143 12
616
+ bcc 0.0000 0.0000 0.0000 9
617
+ bkl 0.2105 0.5000 0.2963 8
618
+ df 0.5000 0.1176 0.1905 17
619
+ mel 0.3333 0.3077 0.3200 13
620
+ nv 0.7143 0.5556 0.6250 9
621
+ vasc 0.4444 0.6154 0.5161 13
622
+
623
+ accuracy 0.3210 81
624
+ macro avg 0.3414 0.3352 0.3089 81
625
+ weighted avg 0.3577 0.3210 0.3046 81
626
+
627
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/last_model.pth
628
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 38.27%
629
+ [Epoch 34], [Batch 0 / 40], [Loss 1.3632439374923706]
630
+ precision recall f1-score support
631
+
632
+ akiec 0.0000 0.0000 0.0000 12
633
+ bcc 0.1667 0.2222 0.1905 9
634
+ bkl 0.1600 0.5000 0.2424 8
635
+ df 0.6667 0.2353 0.3478 17
636
+ mel 0.1250 0.0769 0.0952 13
637
+ nv 0.7143 0.5556 0.6250 9
638
+ vasc 0.5833 0.5385 0.5600 13
639
+
640
+ accuracy 0.2840 81
641
+ macro avg 0.3451 0.3041 0.2944 81
642
+ weighted avg 0.3673 0.2840 0.2927 81
643
+
644
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/last_model.pth
645
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 38.27%
646
+ [Epoch 35], [Batch 0 / 40], [Loss 1.3819386959075928]
647
+ precision recall f1-score support
648
+
649
+ akiec 0.1667 0.1667 0.1667 12
650
+ bcc 0.1818 0.2222 0.2000 9
651
+ bkl 0.0526 0.1250 0.0741 8
652
+ df 0.5000 0.1765 0.2609 17
653
+ mel 0.2000 0.0769 0.1111 13
654
+ nv 0.3333 0.2222 0.2667 9
655
+ vasc 0.3182 0.5385 0.4000 13
656
+
657
+ accuracy 0.2222 81
658
+ macro avg 0.2504 0.2183 0.2113 81
659
+ weighted avg 0.2752 0.2222 0.2206 81
660
+
661
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/last_model.pth
662
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 38.27%
663
+ [Epoch 36], [Batch 0 / 40], [Loss 1.6253221035003662]
664
+ precision recall f1-score support
665
+
666
+ akiec 0.0000 0.0000 0.0000 12
667
+ bcc 0.1176 0.2222 0.1538 9
668
+ bkl 0.0000 0.0000 0.0000 8
669
+ df 0.4444 0.2353 0.3077 17
670
+ mel 0.2500 0.2308 0.2400 13
671
+ nv 0.7500 0.6667 0.7059 9
672
+ vasc 0.4444 0.6154 0.5161 13
673
+
674
+ accuracy 0.2840 81
675
+ macro avg 0.2866 0.2815 0.2748 81
676
+ weighted avg 0.3011 0.2840 0.2815 81
677
+
678
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/last_model.pth
679
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 38.27%
680
+ [Epoch 37], [Batch 0 / 40], [Loss 1.507123351097107]
681
+ precision recall f1-score support
682
+
683
+ akiec 0.1667 0.1667 0.1667 12
684
+ bcc 0.2222 0.2222 0.2222 9
685
+ bkl 0.1364 0.3750 0.2000 8
686
+ df 0.5556 0.2941 0.3846 17
687
+ mel 0.3333 0.0769 0.1250 13
688
+ nv 0.5556 0.5556 0.5556 9
689
+ vasc 0.4706 0.6154 0.5333 13
690
+
691
+ accuracy 0.3210 81
692
+ macro avg 0.3486 0.3294 0.3125 81
693
+ weighted avg 0.3702 0.3210 0.3172 81
694
+
695
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/last_model.pth
696
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 38.27%
697
+ [Epoch 38], [Batch 0 / 40], [Loss 1.291639804840088]
698
+ precision recall f1-score support
699
+
700
+ akiec 0.3333 0.2500 0.2857 12
701
+ bcc 0.2000 0.3333 0.2500 9
702
+ bkl 0.1111 0.2500 0.1538 8
703
+ df 0.6000 0.1765 0.2727 17
704
+ mel 0.3333 0.2308 0.2727 13
705
+ nv 0.6000 0.6667 0.6316 9
706
+ vasc 0.4667 0.5385 0.5000 13
707
+
708
+ accuracy 0.3333 81
709
+ macro avg 0.3778 0.3494 0.3381 81
710
+ weighted avg 0.4036 0.3333 0.3367 81
711
+
712
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/last_model.pth
713
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 38.27%
714
+ [Epoch 39], [Batch 0 / 40], [Loss 1.0484699010849]
715
+ precision recall f1-score support
716
+
717
+ akiec 0.2500 0.2500 0.2500 12
718
+ bcc 0.0000 0.0000 0.0000 9
719
+ bkl 0.0000 0.0000 0.0000 8
720
+ df 0.5714 0.2353 0.3333 17
721
+ mel 0.5000 0.2308 0.3158 13
722
+ nv 0.5000 0.4444 0.4706 9
723
+ vasc 0.4000 0.6154 0.4848 13
724
+
725
+ accuracy 0.2716 81
726
+ macro avg 0.3173 0.2537 0.2649 81
727
+ weighted avg 0.3570 0.2716 0.2878 81
728
+
729
+ Model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/last_model.pth
730
+ [vgg16] Best model saved at: ./OUT_TORCHVISION/HAM10000/2021-12-12-15-09-07/best_model.pth - Accuracy 38.27%
731
+ precision recall f1-score support
732
+
733
+ akiec 0.2500 0.2500 0.2500 12
734
+ bcc 0.0000 0.0000 0.0000 9
735
+ bkl 0.0000 0.0000 0.0000 8
736
+ df 0.3529 0.3529 0.3529 17
737
+ mel 0.2500 0.2308 0.2400 13
738
+ nv 0.7500 0.6667 0.7059 9
739
+ vasc 0.3500 0.5385 0.4242 13
740
+
741
+ accuracy 0.3086 81
742
+ macro avg 0.2790 0.2913 0.2819 81
743
+ weighted avg 0.2907 0.3086 0.2961 81
744
+
models/VGG16/logs/test_logs_acc_2021-12-12-15-09-07.txt ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 0,0.09876543209876543
2
+ 1,0.13580246913580246
3
+ 2,0.13580246913580246
4
+ 3,0.14814814814814814
5
+ 4,0.13580246913580246
6
+ 5,0.1111111111111111
7
+ 6,0.09876543209876543
8
+ 7,0.1111111111111111
9
+ 8,0.13580246913580246
10
+ 9,0.2222222222222222
11
+ 10,0.14814814814814814
12
+ 11,0.16049382716049382
13
+ 12,0.09876543209876543
14
+ 13,0.14814814814814814
15
+ 14,0.18518518518518517
16
+ 15,0.16049382716049382
17
+ 16,0.2222222222222222
18
+ 17,0.20987654320987653
19
+ 18,0.1728395061728395
20
+ 19,0.19753086419753085
21
+ 20,0.1728395061728395
22
+ 21,0.2839506172839506
23
+ 22,0.25925925925925924
24
+ 23,0.2345679012345679
25
+ 24,0.32098765432098764
26
+ 25,0.2839506172839506
27
+ 26,0.25925925925925924
28
+ 27,0.2222222222222222
29
+ 28,0.24691358024691357
30
+ 29,0.38271604938271603
31
+ 30,0.2962962962962963
32
+ 31,0.345679012345679
33
+ 32,0.2716049382716049
34
+ 33,0.32098765432098764
35
+ 34,0.2839506172839506
36
+ 35,0.2222222222222222
37
+ 36,0.2839506172839506
38
+ 37,0.32098765432098764
39
+ 38,0.3333333333333333
40
+ 39,0.2716049382716049
models/VGG16/logs/train_logs_acc_2021-12-12-15-09-07.txt ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 0,0.1574585635359116
2
+ 1,0.14502762430939226
3
+ 2,0.12845303867403315
4
+ 3,0.13397790055248618
5
+ 4,0.1505524861878453
6
+ 5,0.17403314917127072
7
+ 6,0.1574585635359116
8
+ 7,0.13674033149171272
9
+ 8,0.15883977900552487
10
+ 9,0.18922651933701656
11
+ 10,0.1643646408839779
12
+ 11,0.2085635359116022
13
+ 12,0.2223756906077348
14
+ 13,0.1298342541436464
15
+ 14,0.1671270718232044
16
+ 15,0.2085635359116022
17
+ 16,0.23066298342541436
18
+ 17,0.26795580110497236
19
+ 18,0.25828729281767954
20
+ 19,0.27900552486187846
21
+ 20,0.27486187845303867
22
+ 21,0.2900552486187845
23
+ 22,0.3328729281767956
24
+ 23,0.2969613259668508
25
+ 24,0.319060773480663
26
+ 25,0.3218232044198895
27
+ 26,0.3674033149171271
28
+ 27,0.36049723756906077
29
+ 28,0.36464088397790057
30
+ 29,0.3770718232044199
31
+ 30,0.36187845303867405
32
+ 31,0.3825966850828729
33
+ 32,0.393646408839779
34
+ 33,0.4102209944751381
35
+ 34,0.43232044198895025
36
+ 35,0.4447513812154696
37
+ 36,0.4488950276243094
38
+ 37,0.48342541436464087
39
+ 38,0.46685082872928174
40
+ 39,0.5138121546961326
models/VGG16/logs/train_logs_loss_2021-12-12-15-09-07.txt ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 0,2.1033501625061035
2
+ 1,1.9651769399642944
3
+ 2,1.9515185356140137
4
+ 3,1.9505870342254639
5
+ 4,1.948213815689087
6
+ 5,1.9330511093139648
7
+ 6,1.9702658653259277
8
+ 7,1.9751051664352417
9
+ 8,1.9461535215377808
10
+ 9,1.9179728031158447
11
+ 10,1.9219279289245605
12
+ 11,1.8998744487762451
13
+ 12,1.8848860263824463
14
+ 13,1.9483599662780762
15
+ 14,1.9225177764892578
16
+ 15,1.8910245895385742
17
+ 16,1.8522323369979858
18
+ 17,1.793922781944275
19
+ 18,1.7878378629684448
20
+ 19,1.7371615171432495
21
+ 20,1.7406737804412842
22
+ 21,1.7121121883392334
23
+ 22,1.678593397140503
24
+ 23,1.677600622177124
25
+ 24,1.6530859470367432
26
+ 25,1.6229276657104492
27
+ 26,1.620635747909546
28
+ 27,1.6058361530303955
29
+ 28,1.5711954832077026
30
+ 29,1.5438716411590576
31
+ 30,1.5547322034835815
32
+ 31,1.547311544418335
33
+ 32,1.5458500385284424
34
+ 33,1.481902837753296
35
+ 34,1.443493366241455
36
+ 35,1.422087550163269
37
+ 36,1.4286044836044312
38
+ 37,1.3534986972808838
39
+ 38,1.3937019109725952
40
+ 39,1.3290692567825317
ressources/models.csv ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ Model;Accuracy;Size
2
+ VGG16;38.27%;512.0 MB
3
+ DeiT;71.60%;327.0 MB
4
+ DenseNet121;77.78%;27.1 MB
5
+ MobileNetV2;75.31%;8.77 MB
6
+ ShuffleNetV2;76.54%;4.99 MB
ressources/thumbnail.png ADDED