File size: 2,801 Bytes
8c3f06a
16150f7
6c87a0d
9e3db9b
eba3a33
1d3e2ef
93d8e6b
5ad0cc5
05f996c
aa45825
aefa4a5
c98d2f6
306b2ee
1074869
4136a94
09c1669
e26e261
13bcf37
6d59dec
bec676c
5896c61
0e31b2a
f988ede
a4acda1
1c9b2f1
0d64707
2199e2b
e50ff0d
f822e05
47de631
b56085f
9c559d4
55a940c
84b964c
fb86cda
8552065
70a2f81
9b68eb2
3881172
e70b51b
d111c88
f88d4b7
8694cce
a636c52
d079d0e
4d6f1e2
8c76212
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
Started at: 21:04:23
({'_name_or_path': '/disk4/folder1/working/checkpoints/huggingface/native_pytorch/step4_8/', 'attention_probs_dropout_prob': 0.1, 'directionality': 'bidi', 'gradient_checkpointing': False, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'layer_norm_eps': 1e-12, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'pad_token_id': 0, 'pooler_fc_size': 768, 'pooler_num_attention_heads': 12, 'pooler_num_fc_layers': 3, 'pooler_size_per_head': 128, 'pooler_type': 'first_token_transform', 'position_embedding_type': 'absolute', 'type_vocab_size': 2, 'vocab_size': 119547, '_commit_hash': '82b194c0b3ea1fcad65f1eceee04adb26f9f71ac'}, {})
Epoch: 0
Training loss: 0.21613560179338873 - MSE: 0.3443637452993947
Validation loss : 0.16539869446839606 - MSE: 0.3086538036769655
Epoch: 1
Training loss: 0.1837235710863928 - MSE: 0.32413622182287605
Validation loss : 0.16982820672648294 - MSE: 0.31674233427703646
Epoch: 2
Training loss: 0.18304820250249604 - MSE: 0.3239108068413553
Validation loss : 0.17052161650998252 - MSE: 0.31788417737032953
Epoch: 3
Training loss: 0.18448016108963097 - MSE: 0.3252394673594232
Validation loss : 0.17072700315288136 - MSE: 0.3182306206402635
Epoch: 4
Training loss: 0.184644706619596 - MSE: 0.32518134408631855
Validation loss : 0.17120066466076034 - MSE: 0.31901036988045756
Epoch: 5
Training loss: 0.18460615753259474 - MSE: 0.3252760797817471
Validation loss : 0.17181508860417774 - MSE: 0.3200009087117256
Epoch: 6
Training loss: 0.18562242764871098 - MSE: 0.3263451638426147
Validation loss : 0.1719757279115064 - MSE: 0.32025919894721094
Epoch: 7
Training loss: 0.1845672134924861 - MSE: 0.3252300495699698
Validation loss : 0.16974811958415167 - MSE: 0.3166154321565825
Epoch: 8
Training loss: 0.18489993262348822 - MSE: 0.3254686886041067
Validation loss : 0.1729555284338338 - MSE: 0.3217263126762451
Epoch: 9
Training loss: 0.18432768325782517 - MSE: 0.3248599418213628
Validation loss : 0.1708770450736795 - MSE: 0.31844797706663874
Epoch: 10
Training loss: 0.1825945836681764 - MSE: 0.3229734198364271
Validation loss : 0.17286965453198977 - MSE: 0.3215693459729664
Epoch: 11
Training loss: 0.18539906857372487 - MSE: 0.3260377252207724
Validation loss : 0.17201036236115863 - MSE: 0.32026005895708554
Epoch: 12
Training loss: 0.1842774318404568 - MSE: 0.32480808348019835
Validation loss : 0.17158785260149412 - MSE: 0.3195950089571332
Epoch: 13
Training loss: 0.1854605678360439 - MSE: 0.32605868234166485
Validation loss : 0.16932314804622106 - MSE: 0.31585823518523415
Epoch: 14
Training loss: 0.18442078412157817 - MSE: 0.3251032140988794
Validation loss : 0.17218789415700095 - MSE: 0.3203968357577521