File size: 2,599 Bytes
b872d84 7edb427 3820d18 12d9bec cfa8efe 89f2e6b 2add8e3 ec58dfc a1e99f7 51c9f31 e00dc32 eb4f0c6 c1451c5 f976783 d67c81d 58a10fa 8a8320e 25019f8 c1f0242 48b9598 338638a 9188b52 ef4c949 7d11bbb cd3a120 b058c7b ecb5355 4294111 0ed9b00 912bfea e51a329 b8eac80 62866e2 cb8695b 2a5990a 515ffd1 489a36d 3d06fd4 065a810 a2fac5f 5a6bc4d 2545c10 aa878c6 6b819a6 a9d6e0e cca16ac 28b3cec 4f08e9f 24d95a8 0f96708 61d660e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 |
Started at: 10:44:21 ({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'afb829e3d0b861bd5f8cda6522b32ca0b097d7eb'}, {}) Epoch: 0 Training loss: 0.19173999391134502 - MSE: 0.3262300431900936 Validation loss : 0.1707202664443425 - MSE: 0.3181949529015193 Epoch: 1 Training loss: 0.1834320445517892 - MSE: 0.3230671429831198 Validation loss : 0.17099061012268066 - MSE: 0.31859832233250407 Epoch: 2 Training loss: 0.18393246207422423 - MSE: 0.3238126735351295 Validation loss : 0.17154425446476254 - MSE: 0.31947564650493276 Epoch: 3 Training loss: 0.18445654531705727 - MSE: 0.32449193418262723 Validation loss : 0.17155078649520875 - MSE: 0.31949343827852444 Epoch: 4 Training loss: 0.18468433119428967 - MSE: 0.3248966974226236 Validation loss : 0.17185703590512275 - MSE: 0.31997947147548467 Epoch: 5 Training loss: 0.18483373783167126 - MSE: 0.32512973743557905 Validation loss : 0.1718519987804549 - MSE: 0.3199738939708498 Epoch: 6 Training loss: 0.18488002355092936 - MSE: 0.32523622541441644 Validation loss : 0.17180264528308595 - MSE: 0.3198964095291948 Epoch: 7 Training loss: 0.18488125423493895 - MSE: 0.3252789020479855 Validation loss : 0.17174329182931355 - MSE: 0.3198061523443487 Epoch: 8 Training loss: 0.18489378878792512 - MSE: 0.3253228091660482 Validation loss : 0.17357715804662024 - MSE: 0.3226380277707774 Epoch: 9 Training loss: 0.18595946008719286 - MSE: 0.32635185643103526 Validation loss : 0.17145181340830667 - MSE: 0.31935092495654577 Epoch: 10 Training loss: 0.18484974442755134 - MSE: 0.3253414797140243 Validation loss : 0.171514799871615 - MSE: 0.31944891148013993 Epoch: 11 Training loss: 0.1848593037394644 - MSE: 0.3253549544164942 Validation loss : 0.17148066344005722 - MSE: 0.3193935607975748 Epoch: 12 Training loss: 0.18484833496577532 - MSE: 0.3253483776600709 Validation loss : 0.17145045633826936 - MSE: 0.3193448287850645 Epoch: 13 Training loss: 0.18483807320154985 - MSE: 0.32534201221261033 Validation loss : 0.1714242550943579 - MSE: 0.3193027320467601 Epoch: 14 Training loss: 0.184799948146621 - MSE: 0.32530459791156613 Validation loss : 0.17139992096594403 - MSE: 0.3192637658295488 Epoch: 15 Training loss: 0.18460696313566374 - MSE: 0.32509753975495237 Validation loss : 0.1713372460433415 - MSE: 0.3191811611360338 Epoch: 16 |