File size: 2,015 Bytes
91debdd 98bf694 098a3a7 7d364d9 7de12fe 94e731c 11ca7ec ab26d4b 168c6ee 09cf2e6 8b695f5 a49f76b e48af9a f520065 a01f89b dc39c2d 27e06ed 2b6291f 47538e3 e58652e a473844 15ffe81 77e0631 5a37c15 6f6abb9 125cd42 62a9067 05523d6 ed51ec3 41d68a1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 |
Started at: 14:14:54 nb-bert-base, 1e-06, 256 ({'_name_or_path': '/disk4/folder1/working/checkpoints/huggingface/native_pytorch/step4_8/', 'attention_probs_dropout_prob': 0.1, 'directionality': 'bidi', 'gradient_checkpointing': False, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'layer_norm_eps': 1e-12, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'pad_token_id': 0, 'pooler_fc_size': 768, 'pooler_num_attention_heads': 12, 'pooler_num_fc_layers': 3, 'pooler_size_per_head': 128, 'pooler_type': 'first_token_transform', 'position_embedding_type': 'absolute', 'type_vocab_size': 2, 'vocab_size': 119547, '_commit_hash': '82b194c0b3ea1fcad65f1eceee04adb26f9f71ac'}, {}) Epoch: 0 Training loss: 1.3610501217842101 - MAE: 1.0817184245036757 Validation loss : 0.6300775276290046 - MAE: 0.6925418514553945 Epoch: 1 Training loss: 0.32747340738773345 - MAE: 0.4483029439383294 Validation loss : 0.24206096761756474 - MAE: 0.3825273214027049 Epoch: 2 Training loss: 0.22492031693458558 - MAE: 0.3634789782721722 Validation loss : 0.21290512879689535 - MAE: 0.3531350766820909 Epoch: 3 Training loss: 0.21074927151203154 - MAE: 0.34935127198342575 Validation loss : 0.19930076433552635 - MAE: 0.3423320980410668 Epoch: 4 Training loss: 0.1998182564973831 - MAE: 0.33952265508216506 Validation loss : 0.19053682684898376 - MAE: 0.33432257900743867 Epoch: 5 Training loss: 0.18897418558597565 - MAE: 0.3308955648791844 Validation loss : 0.183888531393475 - MAE: 0.32777869810289756 Epoch: 6 Training loss: 0.18244400441646577 - MAE: 0.3239016318289663 Validation loss : 0.17849575810962254 - MAE: 0.32227053870844763 Epoch: 7 Training loss: 0.17936479389667512 - MAE: 0.32048774343640357 Validation loss : 0.174004680580563 - MAE: 0.3172998359634526 Epoch: 8 Training loss: 0.17690573513507843 - MAE: 0.3179874754998065 Validation loss : 0.17019198503759173 - MAE: 0.3130601250011837 |