Started at: 14:09:17 | |
norbert2, 1e-06, 128 | |
({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'f22bb47f536f62edfcd86ca9320ade990eafbe22'}, {}) | |
Epoch: 0 | |
Training loss: 0.27911949396133423 - MAE: 0.40659444881873014 | |
Validation loss : 0.20867602176526012 - MAE: 0.3527981750883627 | |
Epoch: 1 | |
Training loss: 0.20067402869462966 - MAE: 0.3443738813489259 | |
Validation loss : 0.18265952783472397 - MAE: 0.3274485677700584 | |
Epoch: 2 | |
Training loss: 0.1841411805152893 - MAE: 0.3259698872141148 | |
Validation loss : 0.1689106410040575 - MAE: 0.31383750534412613 | |