File size: 2,742 Bytes
df8f1cd 842fe25 0d8077f ed7dbc6 a05c92a ef6bbe0 6c72584 8a5f0da a66536c 552681b f60ad1f cb43d0f 803cdcd c23ab79 0267642 30c4770 1abf67d f949ac5 c4ac84f b08ef31 12d8c4a 32df053 3e6786f 9c353af b1749c7 c9a5fef 0163904 c94a0d8 3331dfa 78b0c6b c5dba7e 367af12 3273ed7 2de9dd3 bea4b11 5c0a46b b8de900 619965a f99e30e c88a7ca 3e50ea3 d72b4fc 1824cfd 3ce6274 00cae79 cc60fe9 ffd6f49 f0063e9 c0d8976 01ecf84 8111c65 f4bb815 14b540b d0c85b1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 |
Started at: 14:58:18 norbert, 0.001, 128 ({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 32922, '_commit_hash': '075d4e3705390691013e859faffc5696d071e33b'}, {}) Epoch: 0 Training loss: 0.34940282940864564 - MAE: 0.45046864085630683 Validation loss : 0.18274873582755818 - MAE: 0.3261804763692437 Epoch: 1 Training loss: 0.17366532385349273 - MAE: 0.3175050753369902 Validation loss : 0.17284800375209136 - MAE: 0.3152826512353921 Epoch: 2 Training loss: 0.16550228700041772 - MAE: 0.3102880187959522 Validation loss : 0.168134662158349 - MAE: 0.3101325107911304 Epoch: 3 Training loss: 0.1614217494428158 - MAE: 0.30638796285490894 Validation loss : 0.16555996852762558 - MAE: 0.3071440144715573 Epoch: 4 Training loss: 0.15815593004226686 - MAE: 0.3032428467393789 Validation loss : 0.16414058559081135 - MAE: 0.3062275889991122 Epoch: 5 Training loss: 0.15722389429807662 - MAE: 0.3026522091596125 Validation loss : 0.16364685226889217 - MAE: 0.30600367269172085 Epoch: 6 Training loss: 0.1544417916238308 - MAE: 0.29834141285366533 Validation loss : 0.16209946178338108 - MAE: 0.3044787129354469 Epoch: 7 Training loss: 0.15222523376345634 - MAE: 0.2959395709613162 Validation loss : 0.16139676991631002 - MAE: 0.30289165513286676 Epoch: 8 Training loss: 0.1495582267642021 - MAE: 0.29315735941725324 Validation loss : 0.15973140430801056 - MAE: 0.30022579676647987 Epoch: 9 Training loss: 0.15145889401435852 - MAE: 0.2952163697893332 Validation loss : 0.15879446618697224 - MAE: 0.2994991406614638 Epoch: 10 Training loss: 0.15025388434529305 - MAE: 0.2945344618888036 Validation loss : 0.15855726161423853 - MAE: 0.29908208933891134 Epoch: 11 Training loss: 0.14902475863695144 - MAE: 0.2924443122836678 Validation loss : 0.15826284534790935 - MAE: 0.2984638590546403 Epoch: 12 Training loss: 0.14780941233038902 - MAE: 0.2926466634183582 Validation loss : 0.15731259040972767 - MAE: 0.296935231079806 Epoch: 13 Training loss: 0.14797327995300294 - MAE: 0.2928364589090259 Validation loss : 0.15824999397291856 - MAE: 0.2989947235904544 Epoch: 14 Training loss: 0.14717394277453422 - MAE: 0.29110306221919585 Validation loss : 0.1570384524324361 - MAE: 0.2966094006540162 Epoch: 15 Training loss: 0.1468842999637127 - MAE: 0.2900015210052096 Validation loss : 0.15705139452920241 - MAE: 0.2971900124498248 Epoch: 16 Training loss: 0.1456107421219349 - MAE: 0.29039021972601275 Validation loss : 0.15700176358222961 - MAE: 0.2971215088780947 |