File size: 2,145 Bytes
f4f496f c9742cc 1f62c2d 5aac23b 5b2b048 7f12500 d2a9faf 6a1dd5e 09810c9 7042ea3 3af5e15 10855df 8ed476c eb538db a4a2c46 6891972 7cc6c1f e9122f9 0a066ad a4a609b d59d382 4053492 d4eee27 402d8bc 28c43e1 d263b19 8ab4f2a e0c7900 053e85f df534c0 5aec3d0 5f7e109 2944b98 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 |
Started at: 13:31:37 nb-bert-base, 1e-06, 256 ({'_name_or_path': '/disk4/folder1/working/checkpoints/huggingface/native_pytorch/step4_8/', 'attention_probs_dropout_prob': 0.1, 'directionality': 'bidi', 'gradient_checkpointing': False, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'layer_norm_eps': 1e-12, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'pad_token_id': 0, 'pooler_fc_size': 768, 'pooler_num_attention_heads': 12, 'pooler_num_fc_layers': 3, 'pooler_size_per_head': 128, 'pooler_type': 'first_token_transform', 'position_embedding_type': 'absolute', 'type_vocab_size': 2, 'vocab_size': 119547, '_commit_hash': '82b194c0b3ea1fcad65f1eceee04adb26f9f71ac'}, {}) Epoch: 0 Training loss: 1.092776976932179 - MAE: 0.8645425344538012 Validation loss : 0.4951000182252181 - MAE: 0.572274909994364 Epoch: 1 Training loss: 0.46670577309348366 - MAE: 0.5563784497415616 Validation loss : 0.4297456882501903 - MAE: 0.5356601972217859 Epoch: 2 Training loss: 0.41889828118410977 - MAE: 0.525045431562138 Validation loss : 0.3921402771221964 - MAE: 0.5076430388808509 Epoch: 3 Training loss: 0.3852084549990567 - MAE: 0.5005902598129118 Validation loss : 0.3600435053047381 - MAE: 0.48094595382325084 Epoch: 4 Training loss: 0.3557265005328438 - MAE: 0.47516046300821996 Validation loss : 0.3300904662985551 - MAE: 0.4543344315763446 Epoch: 5 Training loss: 0.32380504553968253 - MAE: 0.4505385307513744 Validation loss : 0.2983320763236598 - MAE: 0.42765013343518166 Epoch: 6 Training loss: 0.28809667690233753 - MAE: 0.42027885166017076 Validation loss : 0.26857209833044754 - MAE: 0.40546811950280826 Epoch: 7 Training loss: 0.25561105717312205 - MAE: 0.39615992725595955 Validation loss : 0.24126421699398443 - MAE: 0.38565770486318046 Epoch: 8 Training loss: 0.2314618928865953 - MAE: 0.3768201412305367 Validation loss : 0.21927646822050997 - MAE: 0.3694444803689183 Epoch: 9 Training loss: 0.21511897146701814 - MAE: 0.36569012865859263 Validation loss : 0.20238082659871956 - MAE: 0.3555664235928941 |