File size: 3,881 Bytes
09d515f 2454434 77e80cb ed1c65d 346daeb d1f8558 e731efe 423d50f f242fbb 2bcb528 4aa7353 08d62ac 0d63e36 2bdb94f 8b3825d 8f7eb6e a06ae0b 6237515 9e75879 4e22929 4e068c8 1785184 037f807 359caec 5978bcf 42c3e8c 0bee623 6c498ee 7818315 bf4a119 9c3b089 bb21c97 5ac9909 431a001 997874a 5b317d3 f96a019 ae5d5d2 9df9bb9 b484a86 6dcdd60 ebb41ad bdcdfa8 e84a906 292709b 14e9ea1 dbd5e5b 62d9135 5ed9f14 d86b461 baf0d7f 890d3bb 22b01e1 253b5e7 6bbd703 85a6b64 d2564ab b163bb3 cf24f62 e3e81e4 d9ea8bb 12d0fb0 535296d e047ea2 8a40df4 416082d 7ada21c c0fa585 5da7ba7 41d1bc1 a213489 057c5b9 4921b58 7a16bf4 336b13c 1bdc62e f9ad16d 57e7582 45359f7 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 |
Started at: 15:06:15 ({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'afb829e3d0b861bd5f8cda6522b32ca0b097d7eb'}, {}) Epoch: 0 Training loss: 0.19173999391134502 - MSE: 0.3262300431900936 Validation loss : 0.1707202664443425 - MSE: 0.3181949529015193 Epoch: 1 Training loss: 0.1834320445517892 - MSE: 0.3230671429831198 Validation loss : 0.17099061012268066 - MSE: 0.31859832233250407 Epoch: 2 Training loss: 0.18393246207422423 - MSE: 0.3238126735351295 Validation loss : 0.17154425446476254 - MSE: 0.31947564650493276 Epoch: 3 Training loss: 0.18445654531705727 - MSE: 0.32449193418262723 Validation loss : 0.17155078649520875 - MSE: 0.31949343827852444 Epoch: 4 Training loss: 0.18468433119428967 - MSE: 0.3248966974226236 Validation loss : 0.17185703590512275 - MSE: 0.31997947147548467 Epoch: 5 Training loss: 0.18483373783167126 - MSE: 0.32512973743557905 Validation loss : 0.1718519987804549 - MSE: 0.3199738939708498 Epoch: 6 Training loss: 0.18488002355092936 - MSE: 0.32523622541441644 Validation loss : 0.17180264528308595 - MSE: 0.3198964095291948 Epoch: 7 Training loss: 0.18488125423493895 - MSE: 0.3252789020479855 Validation loss : 0.17174329182931355 - MSE: 0.3198061523443487 Epoch: 8 Training loss: 0.18489378878792512 - MSE: 0.3253228091660482 Validation loss : 0.17357715804662024 - MSE: 0.3226380277707774 Epoch: 9 Training loss: 0.18595946008719286 - MSE: 0.32635185643103526 Validation loss : 0.17145181340830667 - MSE: 0.31935092495654577 Epoch: 10 Training loss: 0.18484974442755134 - MSE: 0.3253414797140243 Validation loss : 0.171514799871615 - MSE: 0.31944891148013993 Epoch: 11 Training loss: 0.1848593037394644 - MSE: 0.3253549544164942 Validation loss : 0.17148066344005722 - MSE: 0.3193935607975748 Epoch: 12 Training loss: 0.18484833496577532 - MSE: 0.3253483776600709 Validation loss : 0.17145045633826936 - MSE: 0.3193448287850645 Epoch: 13 Training loss: 0.18483807320154985 - MSE: 0.32534201221261033 Validation loss : 0.1714242550943579 - MSE: 0.3193027320467601 Epoch: 14 Training loss: 0.184799948146621 - MSE: 0.32530459791156613 Validation loss : 0.17139992096594403 - MSE: 0.3192637658295488 Epoch: 15 Training loss: 0.18460696313566374 - MSE: 0.32509753975495237 Validation loss : 0.1713372460433415 - MSE: 0.3191811611360338 Epoch: 16 Training loss: 0.18478047471601985 - MSE: 0.32529885939908587 Validation loss : 0.17135789075068064 - MSE: 0.31920771904988215 Epoch: 17 Training loss: 0.18478687360738089 - MSE: 0.3253013729808102 Validation loss : 0.17135313632232801 - MSE: 0.3191962242941372 Epoch: 18 Training loss: 0.18478634794360227 - MSE: 0.3252988952896622 Validation loss : 0.17134608296411377 - MSE: 0.31918270404483856 Epoch: 19 Training loss: 0.18478439364907812 - MSE: 0.32529616494054064 Validation loss : 0.1713385373353958 - MSE: 0.3191693609064844 Epoch: 20 Training loss: 0.18478169060737185 - MSE: 0.3252932951001202 Validation loss : 0.17133102927889143 - MSE: 0.31915661807205264 Epoch: 21 Training loss: 0.18477885548061537 - MSE: 0.325290603982356 Validation loss : 0.17132410705089568 - MSE: 0.31914513406643114 Epoch: 22 Training loss: 0.18477606650405717 - MSE: 0.3252881301248911 Validation loss : 0.17131790316530637 - MSE: 0.3191349769027771 Epoch: 23 Training loss: 0.18475595528928979 - MSE: 0.3252490983746809 Validation loss : 0.17116585086498942 - MSE: 0.3188991654947001 Epoch: 24 Training loss: 0.18471557654223394 - MSE: 0.32523076226991837 Validation loss : 0.17129607019679888 - MSE: 0.31910639966871324 Epoch: 25 Training loss: 0.1847534055993395 - MSE: 0.32526731010380944 |