File size: 2,209 Bytes
0d02323 bf891b0 6e2ecb9 3f8a159 6f03efb 90386ed 66ff3ed bb1a11a 9e7aeba 4ac23e9 a3d750e e83a107 026596e 2603bff 1f5e5a6 f1e7704 2ace8f8 32343d3 e79817c 6da421e ff2e443 4602f1c be1b819 99c6002 46b4577 e35ccee 2247b48 aed8da5 7c7c14f a2a2410 bc08324 82db8db 2ff37c0 dacc2ea 788346f 67baefb d762fe5 e4c6142 415bf2c abbbced fc2207e 88ed889 e648bb4 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 |
Started at: 12:34:41 norbert2, 0.001, 128 ({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'f22bb47f536f62edfcd86ca9320ade990eafbe22'}, {}) Epoch: 0 Training loss: 0.2549108934402466 - MAE: 0.3869476704225354 Validation loss : 0.17967465169289532 - MAE: 0.32511726772455946 Epoch: 1 Training loss: 0.18461654603481292 - MAE: 0.32631207394936323 Validation loss : 0.16705343916135676 - MAE: 0.3136336903292028 Epoch: 2 Training loss: 0.1698657414317131 - MAE: 0.312740198772678 Validation loss : 0.16012747936389027 - MAE: 0.3062873321185852 Epoch: 3 Training loss: 0.16400991275906562 - MAE: 0.30695357768639636 Validation loss : 0.1566739748505985 - MAE: 0.3014212709482245 Epoch: 4 Training loss: 0.15812634140253068 - MAE: 0.30061261922994836 Validation loss : 0.15389862744247212 - MAE: 0.29856862971490655 Epoch: 5 Training loss: 0.15597046166658401 - MAE: 0.2974718498748344 Validation loss : 0.1514885302852182 - MAE: 0.2959877719065883 Epoch: 6 Training loss: 0.15466642245650292 - MAE: 0.2970412442646543 Validation loss : 0.14941750554477468 - MAE: 0.2942382126775654 Epoch: 7 Training loss: 0.15143263101577759 - MAE: 0.29342989017337273 Validation loss : 0.1488131617798525 - MAE: 0.29307345811839686 Epoch: 8 Training loss: 0.14901014417409897 - MAE: 0.2907700225602062 Validation loss : 0.14839802638572805 - MAE: 0.2904832100053706 Epoch: 9 Training loss: 0.1495199640095234 - MAE: 0.29170948130625873 Validation loss : 0.1499759489122559 - MAE: 0.2957880675924255 Epoch: 10 Training loss: 0.14821490436792373 - MAE: 0.2908893913009485 Validation loss : 0.14694484104128444 - MAE: 0.28966224887354336 Epoch: 11 Training loss: 0.1440611906349659 - MAE: 0.2881480384518122 Validation loss : 0.1460408726159264 - MAE: 0.28973676685378813 Epoch: 12 Training loss: 0.14535644724965097 - MAE: 0.286966718140979 Validation loss : 0.1502678460058044 - MAE: 0.290588582627604 Epoch: 13 |