File size: 2,813 Bytes
77f0052 d1d3609 414929a 7cf2aea be0085f 114bcbc 6304092 851e8d1 8ee8a18 9f39e39 28f3086 8727dd2 9586d44 8c71ed8 3bc1aea 54ef92d 56d45ec 4f4a2f4 bac5c84 fe4cade 4bfc933 9ab16b7 ea02d78 f5256da 3866f9d f853f1a 62783f7 48a8a65 00f9399 a74bb2b adfcde1 b6a790e d7d3e17 dd65a36 dbb9e5a d09b09d 1eb7e19 5b8ea18 b1404b0 7b5af6a 794604e 40c114c 2647b81 fe12c34 c682ef9 7c2eae0 88ac322 9f4d9e6 d4cc0ad 719b416 c90bc78 c18d3ae f2b0463 120d2b1 7859342 971b9ca |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 |
Started at: 11:02:52 norbert, 0.001, 320 ({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 32922, '_commit_hash': '075d4e3705390691013e859faffc5696d071e33b'}, {}) Epoch: 0 Training loss: 0.5664169013500213 - MAE: 0.6063041299397196 Validation loss : 0.20486256693090713 - MAE: 0.35509079580778596 Epoch: 1 Training loss: 0.20873659774661063 - MAE: 0.3458564422869644 Validation loss : 0.20633939334324428 - MAE: 0.3578822666276973 Epoch: 2 Training loss: 0.18121429160237312 - MAE: 0.3237138670284559 Validation loss : 0.1752317100763321 - MAE: 0.32327554463810326 Epoch: 3 Training loss: 0.17299151346087455 - MAE: 0.3164011842698295 Validation loss : 0.1709150948694774 - MAE: 0.3182794041691023 Epoch: 4 Training loss: 0.16864437833428383 - MAE: 0.3108945364785182 Validation loss : 0.16818763528551375 - MAE: 0.3148699793125687 Epoch: 5 Training loss: 0.16392253264784812 - MAE: 0.30660716291192486 Validation loss : 0.16617548678602492 - MAE: 0.3122630513424195 Epoch: 6 Training loss: 0.16156679168343543 - MAE: 0.30419893754124694 Validation loss : 0.16488771353449141 - MAE: 0.31062475002916534 Epoch: 7 Training loss: 0.16097598448395728 - MAE: 0.302553821661368 Validation loss : 0.1636990032025746 - MAE: 0.3090839130758772 Epoch: 8 Training loss: 0.1592773199081421 - MAE: 0.30169462074484116 Validation loss : 0.16271689108439855 - MAE: 0.30784226952028254 Epoch: 9 Training loss: 0.158780038356781 - MAE: 0.30127985970592736 Validation loss : 0.1619059762784413 - MAE: 0.3072363614681151 Epoch: 10 Training loss: 0.15823087468743324 - MAE: 0.30084050008885327 Validation loss : 0.16107881494930812 - MAE: 0.306125179925495 Epoch: 11 Training loss: 0.15756251513957978 - MAE: 0.3002241726323382 Validation loss : 0.16042622923851013 - MAE: 0.3055000833529515 Epoch: 12 Training loss: 0.15745348185300828 - MAE: 0.3004324406155775 Validation loss : 0.1599027557032449 - MAE: 0.3049718824291082 Epoch: 13 Training loss: 0.15419190675020217 - MAE: 0.297037976169182 Validation loss : 0.15917949697801045 - MAE: 0.3039148389833027 Epoch: 14 Training loss: 0.15393793657422067 - MAE: 0.29568194325243347 Validation loss : 0.15910856425762177 - MAE: 0.30454563376154037 Epoch: 15 Training loss: 0.15380960777401925 - MAE: 0.2957357144989537 Validation loss : 0.15946881685938155 - MAE: 0.30570500403389383 Epoch: 16 Training loss: 0.15151794180274009 - MAE: 0.29381794308083514 Validation loss : 0.1582362705043384 - MAE: 0.3037801051231165 Epoch: 17 Training loss: 0.15117101632058622 - MAE: 0.29237934832929063 |