File size: 3,369 Bytes
9732ce6 81fa8bf 47fe4cf ef51526 2e7fb31 b80ba0a 9f37d5c f977d55 078db30 a397797 d883574 d47c284 b5fbf83 fed89ad cd44adc e20f98a f01fdd4 5a0ac5c 8df3101 3c01c41 fbaee2b f0a8512 1e3813b 72842b6 8b91514 b4df858 d672bdb 6452eb4 2a5ffe2 7303cd7 c4f60e7 1746575 f2c7933 61e4a80 5c4711d 6612576 cb20908 c9825cd 2e193a1 0f5c2d6 9fc80d1 fb7499a f77ece0 3b6134a 8579f89 1a9019b 3130a90 195b898 5bdcd05 b3f2a45 9668f97 a25ba5d 3443f27 e949fc2 3ccf7f1 e28d4a4 9981047 bc5e757 51123b8 56dd9fa |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 |
Started at: 08:53:38 nb-bert-base, 0.001, 320 ({'_name_or_path': '/disk4/folder1/working/checkpoints/huggingface/native_pytorch/step4_8/', 'attention_probs_dropout_prob': 0.1, 'directionality': 'bidi', 'gradient_checkpointing': False, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'layer_norm_eps': 1e-12, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'pad_token_id': 0, 'pooler_fc_size': 768, 'pooler_num_attention_heads': 12, 'pooler_num_fc_layers': 3, 'pooler_size_per_head': 128, 'pooler_type': 'first_token_transform', 'position_embedding_type': 'absolute', 'type_vocab_size': 2, 'vocab_size': 119547, '_commit_hash': '82b194c0b3ea1fcad65f1eceee04adb26f9f71ac'}, {}) Epoch: 0 Training loss: 0.5232093125581742 - MAE: 0.577175796848303 Validation loss : 0.24680941019739425 - MAE: 0.3737264517741245 Epoch: 1 Training loss: 0.22182521671056749 - MAE: 0.3617469672238828 Validation loss : 0.18106927829129355 - MAE: 0.33329890031003023 Epoch: 2 Training loss: 0.17927802801132203 - MAE: 0.322836395902206 Validation loss : 0.17179396961416518 - MAE: 0.3232725764296223 Epoch: 3 Training loss: 0.16818829849362374 - MAE: 0.31218254200951356 Validation loss : 0.16150868471179688 - MAE: 0.30974082318107593 Epoch: 4 Training loss: 0.16338684782385826 - MAE: 0.307377671065012 Validation loss : 0.15682851203850337 - MAE: 0.3029868431548616 Epoch: 5 Training loss: 0.15928990244865418 - MAE: 0.30138638769739584 Validation loss : 0.15434197975056513 - MAE: 0.30013179874724943 Epoch: 6 Training loss: 0.15700093284249306 - MAE: 0.2987656782697292 Validation loss : 0.15234480159623282 - MAE: 0.2975052970352484 Epoch: 7 Training loss: 0.1577582947909832 - MAE: 0.2996505677171851 Validation loss : 0.15100991193737304 - MAE: 0.29573719062577897 Epoch: 8 Training loss: 0.15404724441468715 - MAE: 0.2956406495971371 Validation loss : 0.15031706541776657 - MAE: 0.2956823820252686 Epoch: 9 Training loss: 0.15285137481987476 - MAE: 0.29401895611067946 Validation loss : 0.1492577525121825 - MAE: 0.2942113519087664 Epoch: 10 Training loss: 0.15287049673497677 - MAE: 0.29381234500198294 Validation loss : 0.14956839701959065 - MAE: 0.2956814336113086 Epoch: 11 Training loss: 0.15209382846951486 - MAE: 0.2940015999660008 Validation loss : 0.1488482728600502 - MAE: 0.29477521124384637 Epoch: 12 Training loss: 0.14788862094283103 - MAE: 0.288884180333347 Validation loss : 0.14770041512591497 - MAE: 0.2931122009020718 Epoch: 13 Training loss: 0.1489444550126791 - MAE: 0.29098623448066246 Validation loss : 0.147152093904359 - MAE: 0.2927249842922376 Epoch: 14 Training loss: 0.14717476181685923 - MAE: 0.2890514935851093 Validation loss : 0.1464785635471344 - MAE: 0.29145838958996034 Epoch: 15 Training loss: 0.14695915952324867 - MAE: 0.2886756122039008 Validation loss : 0.14657618318285262 - MAE: 0.2922686882020837 Epoch: 16 Training loss: 0.14572934694588185 - MAE: 0.28684060923232196 Validation loss : 0.14594986821923936 - MAE: 0.29126959649846756 Epoch: 17 Training loss: 0.14512549489736556 - MAE: 0.28711717575079526 Validation loss : 0.14578114237104142 - MAE: 0.2911451673368109 Epoch: 18 Training loss: 0.14541900716722012 - MAE: 0.28713926205336104 Validation loss : 0.14526801769222533 - MAE: 0.29023733142555214 |