File size: 3,247 Bytes
484e891 d1cced8 785b991 57aa57a cc8f02b a63cdf2 d226e56 9056a38 7c30979 6d07e1f 7cecc93 cf67e2d 7b15ffd 5d4f9fd 648ddb5 508085d 5d1b0a8 0a14166 cb2f2ee 8bd99bf aaa8962 c3d3da5 54a08c7 ca26d2e 8c32572 9f68810 3ea2a22 3c18de4 6d9f914 d440486 e439dbe e55b5ae 7cc7fb3 4dce033 fabd2b2 668073d 900bc94 53186ed a914095 f42727e d1dc724 8736c4b 5415a1e 7abb3c3 73753b2 1a86020 2bc3bff 71ce049 0646dc2 9d4dd03 4746aa7 d8de196 c5d812d 1d00e00 2abfb80 3b9a6e0 432ff03 4a557a6 fa53ae6 d45739b c3de406 8681bbe e2c4e6e 3b41da7 9a76ae5 cbfc5f0 9ce6b28 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 |
Started at: 15:31:41 norbert, 0.001, 256 ({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 32922, '_commit_hash': '075d4e3705390691013e859faffc5696d071e33b'}, {}) Epoch: 0 Training loss: 0.4997671264410019 - MAE: 0.5583690090433803 Validation loss : 0.2276972317033344 - MAE: 0.3624775892192396 Epoch: 1 Training loss: 0.20303732812404632 - MAE: 0.34587756490568033 Validation loss : 0.1835534324248632 - MAE: 0.3276716586715214 Epoch: 2 Training loss: 0.17609426498413086 - MAE: 0.31881399563457646 Validation loss : 0.17789346476395926 - MAE: 0.32275573904464216 Epoch: 3 Training loss: 0.1723429363965988 - MAE: 0.3149511468481285 Validation loss : 0.17444034251901838 - MAE: 0.3191513624394604 Epoch: 4 Training loss: 0.16697535365819932 - MAE: 0.30890338994193667 Validation loss : 0.17181600133577982 - MAE: 0.31612868256992294 Epoch: 5 Training loss: 0.1654062718153 - MAE: 0.30754644725112845 Validation loss : 0.170253316561381 - MAE: 0.3148427522457925 Epoch: 6 Training loss: 0.16408681154251098 - MAE: 0.30739368453663996 Validation loss : 0.16913726760281456 - MAE: 0.31410748070110894 Epoch: 7 Training loss: 0.1611470425128937 - MAE: 0.3051785699316093 Validation loss : 0.16728976534472573 - MAE: 0.3114586062997216 Epoch: 8 Training loss: 0.1585029438138008 - MAE: 0.3012857856624527 Validation loss : 0.16669333312246534 - MAE: 0.31113687044791705 Epoch: 9 Training loss: 0.1580871671438217 - MAE: 0.30056992095124985 Validation loss : 0.1657647126250797 - MAE: 0.30975514201722665 Epoch: 10 Training loss: 0.15657792925834657 - MAE: 0.2997063631806598 Validation loss : 0.16499681605233085 - MAE: 0.3090773428606195 Epoch: 11 Training loss: 0.15735965013504027 - MAE: 0.2994761878319979 Validation loss : 0.16467193100187513 - MAE: 0.30892803888519516 Epoch: 12 Training loss: 0.15662964165210724 - MAE: 0.2994051315038657 Validation loss : 0.16461661458015442 - MAE: 0.3090485561264928 Epoch: 13 Training loss: 0.15493253260850906 - MAE: 0.29702066425647616 Validation loss : 0.16322147183948094 - MAE: 0.30691353043753256 Epoch: 14 Training loss: 0.1559354120492935 - MAE: 0.2983542113707406 Validation loss : 0.16239123874240452 - MAE: 0.30587414892792375 Epoch: 15 Training loss: 0.15351686328649522 - MAE: 0.29552995258347897 Validation loss : 0.1617282662126753 - MAE: 0.3043897818179155 Epoch: 16 Training loss: 0.15441828429698945 - MAE: 0.2971652313654796 Validation loss : 0.16326840387450325 - MAE: 0.3071193081592528 Epoch: 17 Training loss: 0.1529671862721443 - MAE: 0.29453820854635787 Validation loss : 0.16275036997265285 - MAE: 0.30652749320835027 Epoch: 18 Training loss: 0.15280701637268065 - MAE: 0.2959003096594964 Validation loss : 0.160822202761968 - MAE: 0.30333586474665925 Epoch: 19 Training loss: 0.15270626097917556 - MAE: 0.2943813782626802 Validation loss : 0.16111763152811262 - MAE: 0.3041114172698317 Prediction MAE: 0.2913 Finished at: 15:31:41 Time taken: 1980 s. 0 days 0 hours 33 minutes 0 seconds |