File size: 3,273 Bytes
e755955 2e0de5e 4da42a0 e7f0d63 da34eac c9771b0 2d7844c 0fa1389 affaa05 9b48316 ba886b3 12673a6 4d69215 da313da c08d3fd ae1f654 24a15d0 4a652d9 42d8487 6a41cb3 79c333f f6ebb77 2d383ce 5c08129 82e6649 e1a0385 c113fde 0b7a837 10d8d41 59b17cb b6f6b2c 0201f58 40895de f6f7553 35924c8 0b8b46c 08a16d8 0ea1d9e 6017d02 928ffe3 82cf56f 80094ad 9d041c1 9da99b0 730e2be 94b3e50 c524e39 eb9dfa9 16dbe30 001fb6c ac0e8af 9ddaca1 1f01e49 8a59350 2f03227 034d148 5dc425e 800eecc dad77d1 cb3e107 ef84a86 d055878 2c1f80c da31daf 4113067 31fa122 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 |
Started at: 11:50:08 ({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'afb829e3d0b861bd5f8cda6522b32ca0b097d7eb'}, {}) Epoch: 0 Training loss: 0.19169998451283105 - MSE: 0.3224822171590252 Validation loss : 0.1545075431931764 - MSE: 0.30456596715475825 Epoch: 1 Training loss: 0.16974398517294934 - MSE: 0.3103326546437019 Validation loss : 0.15334105514921248 - MSE: 0.3032539253831601 Epoch: 2 Training loss: 0.16987535333947132 - MSE: 0.3104551468798216 Validation loss : 0.15374588128179312 - MSE: 0.30371004876315055 Epoch: 3 Training loss: 0.16981600977872546 - MSE: 0.31038815772414813 Validation loss : 0.15414767409674823 - MSE: 0.3041637827186605 Epoch: 4 Training loss: 0.16997354869779788 - MSE: 0.3106644945154869 Validation loss : 0.15224769292399287 - MSE: 0.30202376234910844 Epoch: 5 Training loss: 0.17002245697535967 - MSE: 0.3105952131827796 Validation loss : 0.15425994875840843 - MSE: 0.3042897454938611 Epoch: 6 Training loss: 0.1700476981307331 - MSE: 0.310650163816472 Validation loss : 0.15435221185907722 - MSE: 0.3043926291643402 Epoch: 7 Training loss: 0.16941748441834198 - MSE: 0.31001210870889107 Validation loss : 0.1539384766947478 - MSE: 0.30392832597817687 Epoch: 8 Training loss: 0.16997813641240722 - MSE: 0.3105949710586207 Validation loss : 0.15462648100219667 - MSE: 0.30469803253754435 Epoch: 9 Training loss: 0.17010739684889192 - MSE: 0.31050279334273895 Validation loss : 0.155730452388525 - MSE: 0.30595527761715857 Epoch: 10 Training loss: 0.17010818978673534 - MSE: 0.31067252789705496 Validation loss : 0.15445392183028162 - MSE: 0.3045059578503242 Epoch: 11 Training loss: 0.1698176296133744 - MSE: 0.3104176910766342 Validation loss : 0.1544747839216143 - MSE: 0.30452919958725033 Epoch: 12 Training loss: 0.16994338412033885 - MSE: 0.3105494737531266 Validation loss : 0.15396608458831906 - MSE: 0.30395947231545506 Epoch: 13 Training loss: 0.1698948621357742 - MSE: 0.3105059461998054 Validation loss : 0.1545115364715457 - MSE: 0.3045701227160862 Epoch: 14 Training loss: 0.1699589282666382 - MSE: 0.31056594253224645 Validation loss : 0.1545204329304397 - MSE: 0.304580016388627 Epoch: 15 Training loss: 0.16995860153907225 - MSE: 0.3105668987295108 Validation loss : 0.15452667814679444 - MSE: 0.3045869592820054 Epoch: 16 Training loss: 0.1698562132684808 - MSE: 0.31058698633003384 Validation loss : 0.15058960486203432 - MSE: 0.30260322473577617 Epoch: 17 Training loss: 0.16920853351291856 - MSE: 0.30992660221922314 Validation loss : 0.15110802161507308 - MSE: 0.303181744856829 Epoch: 18 Training loss: 0.16951832692874105 - MSE: 0.31024539454754047 Validation loss : 0.15118786157108843 - MSE: 0.3032720424012041 Epoch: 19 Training loss: 0.16952366397569055 - MSE: 0.31024719080762236 Validation loss : 0.15118418936617672 - MSE: 0.303267858201707 Epoch: 20 Training loss: 0.16951929793546075 - MSE: 0.3102413832574418 Validation loss : 0.1511813565157354 - MSE: 0.30326463116898594 Epoch: 21 |