File size: 5,174 Bytes
8c3f06a 16150f7 6c87a0d 9e3db9b eba3a33 1d3e2ef 93d8e6b 5ad0cc5 05f996c aa45825 aefa4a5 c98d2f6 306b2ee 1074869 4136a94 09c1669 e26e261 13bcf37 6d59dec bec676c 5896c61 0e31b2a f988ede a4acda1 1c9b2f1 0d64707 2199e2b e50ff0d f822e05 47de631 b56085f 9c559d4 55a940c 84b964c fb86cda 8552065 70a2f81 9b68eb2 3881172 e70b51b d111c88 f88d4b7 8694cce a636c52 d079d0e 4d6f1e2 8c76212 a37e624 d53e5c6 7928e52 a1a4e70 5581dbb 8441241 002919e d6adf79 b63ebde f9d5fc0 fa3cc9c 970cfc8 bca01de ea07d9d 2c17113 f0d1115 b8f4ee0 b61ea9b aaa8f12 4217dd3 8e65de5 0f1cc91 d33ae44 453d0be f2d5302 7734151 da4cf27 cea7640 d398bf9 c68c404 d57ef61 2aad745 8052d30 c6a373f ec1d19a 0cb110f 9644e97 253ee2a aea3f53 abc3d49 1ca2d54 ede8bac be4ec77 10d1ce9 c2a0298 1db56ee 96d774c 2b57bc4 36ca0f8 6f4c493 bfd16b2 4d9eecd 44272b5 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 |
Started at: 21:04:23 ({'_name_or_path': '/disk4/folder1/working/checkpoints/huggingface/native_pytorch/step4_8/', 'attention_probs_dropout_prob': 0.1, 'directionality': 'bidi', 'gradient_checkpointing': False, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'layer_norm_eps': 1e-12, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'pad_token_id': 0, 'pooler_fc_size': 768, 'pooler_num_attention_heads': 12, 'pooler_num_fc_layers': 3, 'pooler_size_per_head': 128, 'pooler_type': 'first_token_transform', 'position_embedding_type': 'absolute', 'type_vocab_size': 2, 'vocab_size': 119547, '_commit_hash': '82b194c0b3ea1fcad65f1eceee04adb26f9f71ac'}, {}) Epoch: 0 Training loss: 0.21613560179338873 - MSE: 0.3443637452993947 Validation loss : 0.16539869446839606 - MSE: 0.3086538036769655 Epoch: 1 Training loss: 0.1837235710863928 - MSE: 0.32413622182287605 Validation loss : 0.16982820672648294 - MSE: 0.31674233427703646 Epoch: 2 Training loss: 0.18304820250249604 - MSE: 0.3239108068413553 Validation loss : 0.17052161650998252 - MSE: 0.31788417737032953 Epoch: 3 Training loss: 0.18448016108963097 - MSE: 0.3252394673594232 Validation loss : 0.17072700315288136 - MSE: 0.3182306206402635 Epoch: 4 Training loss: 0.184644706619596 - MSE: 0.32518134408631855 Validation loss : 0.17120066466076034 - MSE: 0.31901036988045756 Epoch: 5 Training loss: 0.18460615753259474 - MSE: 0.3252760797817471 Validation loss : 0.17181508860417774 - MSE: 0.3200009087117256 Epoch: 6 Training loss: 0.18562242764871098 - MSE: 0.3263451638426147 Validation loss : 0.1719757279115064 - MSE: 0.32025919894721094 Epoch: 7 Training loss: 0.1845672134924861 - MSE: 0.3252300495699698 Validation loss : 0.16974811958415167 - MSE: 0.3166154321565825 Epoch: 8 Training loss: 0.18489993262348822 - MSE: 0.3254686886041067 Validation loss : 0.1729555284338338 - MSE: 0.3217263126762451 Epoch: 9 Training loss: 0.18432768325782517 - MSE: 0.3248599418213628 Validation loss : 0.1708770450736795 - MSE: 0.31844797706663874 Epoch: 10 Training loss: 0.1825945836681764 - MSE: 0.3229734198364271 Validation loss : 0.17286965453198977 - MSE: 0.3215693459729664 Epoch: 11 Training loss: 0.18539906857372487 - MSE: 0.3260377252207724 Validation loss : 0.17201036236115863 - MSE: 0.32026005895708554 Epoch: 12 Training loss: 0.1842774318404568 - MSE: 0.32480808348019835 Validation loss : 0.17158785260149412 - MSE: 0.3195950089571332 Epoch: 13 Training loss: 0.1854605678360439 - MSE: 0.32605868234166485 Validation loss : 0.16932314804622106 - MSE: 0.31585823518523415 Epoch: 14 Training loss: 0.18442078412157817 - MSE: 0.3251032140988794 Validation loss : 0.17218789415700095 - MSE: 0.3203968357577521 Epoch: 15 Training loss: 0.18477218655995953 - MSE: 0.3251257856019914 Validation loss : 0.1698176066790308 - MSE: 0.31671145460069444 Epoch: 16 Training loss: 0.18476421284733466 - MSE: 0.32554927459500077 Validation loss : 0.17247124761343002 - MSE: 0.32069564608225065 Epoch: 17 Training loss: 0.18528969708866286 - MSE: 0.32587523702603366 Validation loss : 0.1712306280221258 - MSE: 0.31879013103732307 Epoch: 18 Training loss: 0.18497512178513612 - MSE: 0.32565531341222104 Validation loss : 0.17178946360945702 - MSE: 0.3196054689336701 Epoch: 19 Training loss: 0.18551090916672958 - MSE: 0.3260867291475271 Validation loss : 0.16729344012481825 - MSE: 0.3128114424396439 Epoch: 20 Training loss: 0.1845464623596483 - MSE: 0.325132180970436 Validation loss : 0.16118713564106396 - MSE: 0.29868657096854545 Epoch: 21 Training loss: 0.18486736881212124 - MSE: 0.32487037529952106 Validation loss : 0.16049016671521324 - MSE: 0.29485191029795843 Epoch: 22 Training loss: 0.18485277428210362 - MSE: 0.32541124936960536 Validation loss : 0.16668628422277315 - MSE: 0.3111972908639083 Epoch: 23 Training loss: 0.18316040828245359 - MSE: 0.3236609574057915 Validation loss : 0.17183729984930585 - MSE: 0.31994768048669875 Epoch: 24 Training loss: 0.1849682867816351 - MSE: 0.3254867070790945 Validation loss : 0.17206446932894842 - MSE: 0.3203199030848087 Epoch: 25 Training loss: 0.1846233180281028 - MSE: 0.3248847895281911 Validation loss : 0.16824970575315612 - MSE: 0.31392427542014045 Epoch: 26 Training loss: 0.1850127536550309 - MSE: 0.3256664766776012 Validation loss : 0.16485340020486286 - MSE: 0.30710649498505516 Epoch: 27 Training loss: 0.18637964711750596 - MSE: 0.32707401380202467 Validation loss : 0.168049584542002 - MSE: 0.31401812814874575 Epoch: 28 Training loss: 0.18443961376414716 - MSE: 0.3256206673589944 Validation loss : 0.3068481558135578 - MSE: 0.43936994884861635 Epoch: 29 Training loss: 0.18115830479316342 - MSE: 0.3219161702367045 Validation loss : 0.16694899669715335 - MSE: 0.312683923836864 Epoch: 30 Training loss: 0.18507907698744708 - MSE: 0.3256511754441898 Validation loss : 0.16411111738000597 - MSE: 0.3074208113514552 Epoch: 31 Training loss: 0.18586217908604633 - MSE: 0.32637664962654067 Validation loss : 0.16320727840065957 - MSE: 0.3059868694166653 Epoch: 32 Training loss: 0.18312291186122062 - MSE: 0.32382993401794097 |