File size: 5,713 Bytes
c9a16c9 a45f1ab 60c04ea 6b95f88 bcd2bae 603d241 3fe10e1 9512e59 8f0c868 45f8f55 4c9c30f 0987dd4 1de3ec9 6d37b16 19ce39c ec06058 b35f200 7127cfe 5b89c81 ce35c81 802d2e3 d4e1e09 9eea91a af3bad4 ab43f09 1c1c715 7dfd04f ea7074c 6200300 28e110f c9b72e4 2878c2d a78d5b2 91273d1 e2567d8 1cd7e44 6d686e2 5908cc1 f261c12 2b961a5 cae9ab9 b77aca7 6243913 aacdcd5 83318b1 6cce714 6ed416b 2c98c5c aa3bd50 d915ac9 4747d28 5cf3c1a cec557a 219898a 18f784a 091b700 29bba41 310780d 1fe92ac 245c177 a5094b2 fceacad f1d771b 64a9c23 293014d 23bc226 6635f74 0eeddc6 917ec6d 11d3690 0030aa3 26e215b e2c995b f7f2939 9327768 dae21eb b1c8ea5 60f044b dfe0403 f3d4d27 1be3650 29c9e85 f990553 c4ef461 ca5725c 6da4341 0063379 c5593fb 1f04246 432d394 875c1bd 9f606e8 81c2a0e ee4a7fb f81117a f7dec37 348ed9c e6fea8f 5a32c92 d40ff01 5dcb9da 12e1a33 f722cfe 4d9d84e cde0ad3 c88deb2 3ff9811 7b6bed7 890b0bc 745263b d50baaa 192bc80 b0c5969 21c7f02 6c9c382 3541e70 d51f065 b5d4738 8a9f7c0 cc82072 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 |
Started at: 12:43:11 ({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 32922, '_commit_hash': '44815f7e109b53547cccdf3c6847f4c28b989816'}, {}) Epoch: 0 Training loss: 0.2008972004489991 - MSE: 0.33518312736635436 Validation loss : 0.1646703563630581 - MSE: 0.3085779357984263 Epoch: 1 Training loss: 0.1659574282256145 - MSE: 0.3064189085675543 Validation loss : 0.157762396974223 - MSE: 0.28898105177130284 Epoch: 2 Training loss: 0.16920435048856783 - MSE: 0.3099259860789372 Validation loss : 0.2597339808940887 - MSE: 0.41778062962694096 Epoch: 3 Training loss: 0.18403085151198997 - MSE: 0.3220045950051232 Validation loss : 0.2158472946711949 - MSE: 0.37326406195393896 Epoch: 4 Training loss: 0.18172540587996974 - MSE: 0.32080418783672165 Validation loss : 0.180368021769183 - MSE: 0.33231508495186324 Epoch: 5 Training loss: 0.17783939802241558 - MSE: 0.31700912658652863 Validation loss : 0.17726878098079135 - MSE: 0.33020463892025875 Epoch: 6 Training loss: 0.17681393569823609 - MSE: 0.3154852375991437 Validation loss : 0.17508771706904683 - MSE: 0.32495340255388455 Epoch: 7 Training loss: 0.18098733559684846 - MSE: 0.3195274022724451 Validation loss : 0.17311148579631533 - MSE: 0.3220135453306804 Epoch: 8 Training loss: 0.18061703660534423 - MSE: 0.3191405354137568 Validation loss : 0.17032980706010545 - MSE: 0.3176248594869061 Epoch: 9 Training loss: 0.18142556370172686 - MSE: 0.319926894033844 Validation loss : 0.1691651236798082 - MSE: 0.31562867300485126 Epoch: 10 Training loss: 0.18212312047631996 - MSE: 0.32070363034661087 Validation loss : 0.16798930146864482 - MSE: 0.31360456355614585 Epoch: 11 Training loss: 0.18196018481428183 - MSE: 0.3207149931643255 Validation loss : 0.16837686087403977 - MSE: 0.31434407412639953 Epoch: 12 Training loss: 0.1814197617248424 - MSE: 0.3202695122298144 Validation loss : 0.16799633439098086 - MSE: 0.3135975342526633 Epoch: 13 Training loss: 0.18225394370197093 - MSE: 0.3212486534658962 Validation loss : 0.16925174317189626 - MSE: 0.3159311910575655 Epoch: 14 Training loss: 0.18245843891958588 - MSE: 0.3215480212902345 Validation loss : 0.16824361362627574 - MSE: 0.31412619200834474 Epoch: 15 Training loss: 0.1803396447624975 - MSE: 0.3195030057718649 Validation loss : 0.1688083845589842 - MSE: 0.3150487916560711 Epoch: 16 Training loss: 0.17986665409311509 - MSE: 0.3195031756770499 Validation loss : 0.22056829929351807 - MSE: 0.37911737345857544 Epoch: 17 Training loss: 0.18849548813208794 - MSE: 0.3279959400740679 Validation loss : 0.1744179646883692 - MSE: 0.32440157115093565 Epoch: 18 Training loss: 0.18259398508997796 - MSE: 0.32226052051809423 Validation loss : 0.17137913001435143 - MSE: 0.32016465139874656 Epoch: 19 Training loss: 0.17060092156662524 - MSE: 0.3103650882393749 Validation loss : 0.16893972903490068 - MSE: 0.3152596680996274 Epoch: 20 Training loss: 0.18305443567269056 - MSE: 0.32248776308938315 Validation loss : 0.16867054679564067 - MSE: 0.3147993311036511 Epoch: 21 Training loss: 0.18154647418306868 - MSE: 0.3211529577614463 Validation loss : 0.16895278968981334 - MSE: 0.31527812329420285 Epoch: 22 Training loss: 0.18302884618344817 - MSE: 0.32261301473818527 Validation loss : 0.16908632452998842 - MSE: 0.31549884642507614 Epoch: 23 Training loss: 0.18315604886094344 - MSE: 0.3228350356322461 Validation loss : 0.16936154067516326 - MSE: 0.31595188302287297 Epoch: 24 Training loss: 0.18450002827979986 - MSE: 0.3239839581523096 Validation loss : 0.16373934490340097 - MSE: 0.305049752951267 Epoch: 25 Training loss: 0.18205712786287936 - MSE: 0.32188173152184024 Validation loss : 0.16982759663036892 - MSE: 0.31673403784266807 Epoch: 26 Training loss: 0.18361150451655528 - MSE: 0.32347288576566585 Validation loss : 0.17002783162253243 - MSE: 0.31706197823431076 Epoch: 27 Training loss: 0.1837405093665262 - MSE: 0.3236623950004521 Validation loss : 0.1702466935983726 - MSE: 0.3174226579672125 Epoch: 28 Training loss: 0.18387084331327272 - MSE: 0.32384873795983415 Validation loss : 0.1704471853162561 - MSE: 0.3177521555301999 Epoch: 29 Training loss: 0.18399080008413027 - MSE: 0.32403765795752404 Validation loss : 0.17062974699905942 - MSE: 0.3180521837053155 Epoch: 30 Training loss: 0.18410175344319019 - MSE: 0.32420901859116386 Validation loss : 0.1707934788295201 - MSE: 0.3183189065079205 Epoch: 31 Training loss: 0.18486136588656787 - MSE: 0.3247255681061213 Validation loss : 0.1643158195274217 - MSE: 0.3063969220184455 Epoch: 32 Training loss: 0.18248796441311976 - MSE: 0.32267628750574145 Validation loss : 0.17093155086040496 - MSE: 0.31855101221008225 Epoch: 33 Training loss: 0.18433468477818574 - MSE: 0.3245507782611819 Validation loss : 0.17107231404100146 - MSE: 0.31876078607414715 Epoch: 34 Training loss: 0.1844262485075923 - MSE: 0.3244452163176312 Validation loss : 0.16882688126393727 - MSE: 0.3150706890188823 Epoch: 35 Training loss: 0.18392424524119758 - MSE: 0.324205654223876 Validation loss : 0.17102763865675244 - MSE: 0.31873233262615813 Epoch: 36 Training loss: 0.18444539064053192 - MSE: 0.32476290058541424 Validation loss : 0.1710618957877159 - MSE: 0.3187860621688222 Epoch: 37 Training loss: 0.18447213949075023 - MSE: 0.3248193484842264 Validation loss : 0.17118607461452484 - MSE: 0.3189849752151141 Epoch: 38 Training loss: 0.1845299972275507 - MSE: 0.32490489942336864 Validation loss : 0.17124629765748978 - MSE: 0.31907812333166863 Epoch: 39 |