File size: 4,572 Bytes
925b6b9 d8d0a31 f8593c2 6526304 1f36594 909a3d2 1580db7 fe91034 ae17968 be62dee cf3ad11 847d8f2 a70becb a3a15da 4ac091b e0f7693 35c7c70 44866dc 3c04ad4 c238080 90ba5aa 9dbdb5d 2b78ec9 27b1233 f7a2371 a2b3379 4580baa 9c53bba 18b3742 bd527fc 1657d58 234fc8f 856a65f c38140b 530fa3e 15515ef 0493c19 60ae207 63db518 ff406dc eb1e782 c83f952 6c08ce5 8753d6e 263f9d4 3133d8b 1222528 26ee0c3 b3391ef 68aea3f 5ffbf0b ec03d48 4382c47 f96d9e2 0b314c4 cd6657b d62d3d1 78a10d5 ab13ac9 f7ee166 1b8f80e 32290a4 de1d636 a3b4fbd 0153270 9fe91e9 9afb9e4 c452a91 5d58f97 176ce7d d378012 5b6d2dc 32020d7 cb19280 c0223c5 ece652a 298c9fb afeeab3 7cd2fb0 2a1b6e5 178a31c aa1f0f2 d407d5c 366c9ac 4927711 c8ef86c d59a83f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 |
Started at: 15:12:31 ({'_name_or_path': '/disk4/folder1/working/checkpoints/huggingface/native_pytorch/step4_8/', 'attention_probs_dropout_prob': 0.1, 'directionality': 'bidi', 'gradient_checkpointing': False, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'layer_norm_eps': 1e-12, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'pad_token_id': 0, 'pooler_fc_size': 768, 'pooler_num_attention_heads': 12, 'pooler_num_fc_layers': 3, 'pooler_size_per_head': 128, 'pooler_type': 'first_token_transform', 'position_embedding_type': 'absolute', 'type_vocab_size': 2, 'vocab_size': 119547, '_commit_hash': '82b194c0b3ea1fcad65f1eceee04adb26f9f71ac'}, {}) Epoch: 0 Training loss: 0.20329919241860434 - MSE: 0.3402557295436599 Validation loss : 0.18580390038815411 - MSE: 0.3241432956664492 Epoch: 1 Training loss: 0.1732676090095854 - MSE: 0.31724595576337034 Validation loss : 0.16568760144891162 - MSE: 0.31341614153228575 Epoch: 2 Training loss: 0.16030060382663902 - MSE: 0.3035005222248579 Validation loss : 0.1516422192481431 - MSE: 0.2875130449669631 Epoch: 3 Training loss: 0.1554781354509937 - MSE: 0.2979727920176376 Validation loss : 0.14750216573928343 - MSE: 0.28489314670482657 Epoch: 4 Training loss: 0.15081479527094038 - MSE: 0.2927710830201153 Validation loss : 0.14724563451653178 - MSE: 0.2861008250329437 Epoch: 5 Training loss: 0.1466910548621628 - MSE: 0.28974043263491844 Validation loss : 0.14556188587889526 - MSE: 0.28255917721777324 Epoch: 6 Training loss: 0.1464147042048159 - MSE: 0.28871972411446317 Validation loss : 0.14734464714472945 - MSE: 0.2857798778567098 Epoch: 7 Training loss: 0.14595557395652467 - MSE: 0.28856819400962264 Validation loss : 0.1446235611131697 - MSE: 0.28117785966411335 Epoch: 8 Training loss: 0.1449509447601241 - MSE: 0.2885284100938226 Validation loss : 0.1455823565748605 - MSE: 0.2817915015842891 Epoch: 9 Training loss: 0.1447959460771931 - MSE: 0.2872239444923332 Validation loss : 0.14529388951081218 - MSE: 0.2819837225561805 Epoch: 10 Training loss: 0.14472211318724046 - MSE: 0.28719455320410664 Validation loss : 0.14937494666964718 - MSE: 0.2843606734781727 Epoch: 11 Training loss: 0.144088552139586 - MSE: 0.28760964775038966 Validation loss : 0.14968568538174484 - MSE: 0.2861486726980596 Epoch: 12 Training loss: 0.14606445747886213 - MSE: 0.28844804390686263 Validation loss : 0.1448706823090712 - MSE: 0.28046784730610597 Epoch: 13 Training loss: 0.14478144813612634 - MSE: 0.2881406565554036 Validation loss : 0.15223626566655707 - MSE: 0.28788476966778725 Epoch: 14 Training loss: 0.13903559124106682 - MSE: 0.28273852721941334 Validation loss : 0.14466467048182632 - MSE: 0.2818046804860061 Epoch: 15 Training loss: 0.14228089620846177 - MSE: 0.28594855431899785 Validation loss : 0.14179454258445537 - MSE: 0.279193046159515 Epoch: 16 Training loss: 0.13887919387236464 - MSE: 0.2823545795451341 Validation loss : 0.14775521138852293 - MSE: 0.28455426443221177 Epoch: 17 Training loss: 0.13864240927744637 - MSE: 0.28109458266700194 Validation loss : 0.14809496587876117 - MSE: 0.28279495799750376 Epoch: 18 Training loss: 0.1387332887365128 - MSE: 0.28150766164938196 Validation loss : 0.14517276073721322 - MSE: 0.28345849166153503 Epoch: 19 Training loss: 0.13895187230052683 - MSE: 0.283011639691845 Validation loss : 0.13953923958946357 - MSE: 0.2801639866984212 Epoch: 20 Training loss: 0.13918349640396646 - MSE: 0.2821651974106403 Validation loss : 0.13979203293495107 - MSE: 0.27861376313959874 Epoch: 21 Training loss: 0.14063993230613356 - MSE: 0.28420372002952893 Validation loss : 0.13928090024626616 - MSE: 0.27858824725509246 Epoch: 22 Training loss: 0.1396664222182356 - MSE: 0.2826851016305493 Validation loss : 0.14619293684760729 - MSE: 0.28350754677889844 Epoch: 23 Training loss: 0.1393597536734518 - MSE: 0.2831695874891608 Validation loss : 0.15760497906894394 - MSE: 0.2929710551416292 Epoch: 24 Training loss: 0.14330284036506857 - MSE: 0.2854830684384262 Validation loss : 0.13994943711793784 - MSE: 0.27753756844449295 Epoch: 25 Training loss: 0.13945241206264133 - MSE: 0.2826231307316076 Validation loss : 0.14450635089341438 - MSE: 0.2799117028245645 Epoch: 26 Training loss: 0.1400487285655767 - MSE: 0.2827613417904652 Validation loss : 0.14119875927766165 - MSE: 0.2802039488934804 Epoch: 27 Training loss: 0.13957865950344178 - MSE: 0.2835329892746324 Validation loss : 0.13819768674897426 - MSE: 0.2794946100211438 Epoch: 28 |