File size: 1,600 Bytes
0d02323
bf891b0
6e2ecb9
3f8a159
6f03efb
90386ed
66ff3ed
bb1a11a
9e7aeba
4ac23e9
a3d750e
e83a107
026596e
2603bff
1f5e5a6
f1e7704
2ace8f8
32343d3
e79817c
6da421e
ff2e443
4602f1c
be1b819
99c6002
46b4577
e35ccee
2247b48
aed8da5
7c7c14f
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
Started at: 12:34:41
norbert2, 0.001, 128
({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'f22bb47f536f62edfcd86ca9320ade990eafbe22'}, {})
Epoch: 0
Training loss: 0.2549108934402466 - MAE: 0.3869476704225354
Validation loss : 0.17967465169289532 - MAE: 0.32511726772455946
Epoch: 1
Training loss: 0.18461654603481292 - MAE: 0.32631207394936323
Validation loss : 0.16705343916135676 - MAE: 0.3136336903292028
Epoch: 2
Training loss: 0.1698657414317131 - MAE: 0.312740198772678
Validation loss : 0.16012747936389027 - MAE: 0.3062873321185852
Epoch: 3
Training loss: 0.16400991275906562 - MAE: 0.30695357768639636
Validation loss : 0.1566739748505985 - MAE: 0.3014212709482245
Epoch: 4
Training loss: 0.15812634140253068 - MAE: 0.30061261922994836
Validation loss : 0.15389862744247212 - MAE: 0.29856862971490655
Epoch: 5
Training loss: 0.15597046166658401 - MAE: 0.2974718498748344
Validation loss : 0.1514885302852182 - MAE: 0.2959877719065883
Epoch: 6
Training loss: 0.15466642245650292 - MAE: 0.2970412442646543
Validation loss : 0.14941750554477468 - MAE: 0.2942382126775654
Epoch: 7
Training loss: 0.15143263101577759 - MAE: 0.29342989017337273
Validation loss : 0.1488131617798525 - MAE: 0.29307345811839686
Epoch: 8
Training loss: 0.14901014417409897 - MAE: 0.2907700225602062