|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9443985361822689, |
|
"eval_steps": 500, |
|
"global_step": 2000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.8444, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 1.9183, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.2e-05, |
|
"loss": 2.0161, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 2.0216, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2e-05, |
|
"loss": 1.9427, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.4e-05, |
|
"loss": 1.9254, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.8000000000000003e-05, |
|
"loss": 1.6781, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 1.703, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.6e-05, |
|
"loss": 2.0178, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4e-05, |
|
"loss": 1.8788, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 2.0176, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8e-05, |
|
"loss": 1.659, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.2000000000000004e-05, |
|
"loss": 1.4496, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.6000000000000006e-05, |
|
"loss": 1.3411, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6e-05, |
|
"loss": 1.3288, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.400000000000001e-05, |
|
"loss": 1.7064, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.800000000000001e-05, |
|
"loss": 1.8597, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.2e-05, |
|
"loss": 1.5418, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.6e-05, |
|
"loss": 1.6277, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8e-05, |
|
"loss": 1.4583, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.4e-05, |
|
"loss": 1.466, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.800000000000001e-05, |
|
"loss": 1.7423, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.200000000000001e-05, |
|
"loss": 1.5443, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.6e-05, |
|
"loss": 1.751, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 0.0001, |
|
"loss": 1.7061, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 0.00010400000000000001, |
|
"loss": 1.5738, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 0.00010800000000000001, |
|
"loss": 1.4467, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 0.00011200000000000001, |
|
"loss": 1.4368, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 0.000116, |
|
"loss": 1.3751, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 0.00012, |
|
"loss": 1.2682, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 0.000124, |
|
"loss": 1.685, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.00012800000000000002, |
|
"loss": 1.3104, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.000132, |
|
"loss": 1.21, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.00013600000000000003, |
|
"loss": 1.7759, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.00014, |
|
"loss": 1.4145, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.000144, |
|
"loss": 1.5398, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.000148, |
|
"loss": 1.6678, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.000152, |
|
"loss": 1.5502, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.00015600000000000002, |
|
"loss": 1.3757, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.00016, |
|
"loss": 1.5165, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.000164, |
|
"loss": 1.1687, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.000168, |
|
"loss": 1.8161, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.000172, |
|
"loss": 1.4016, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.00017600000000000002, |
|
"loss": 1.4039, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.00018, |
|
"loss": 1.4823, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.00018400000000000003, |
|
"loss": 1.3028, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.000188, |
|
"loss": 1.3179, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.000192, |
|
"loss": 1.465, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.000196, |
|
"loss": 1.3618, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.0002, |
|
"loss": 1.4782, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.00020400000000000003, |
|
"loss": 1.1542, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.00020800000000000001, |
|
"loss": 1.3877, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.00021200000000000003, |
|
"loss": 1.6806, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.00021600000000000002, |
|
"loss": 1.4566, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.00022000000000000003, |
|
"loss": 1.4927, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.00022400000000000002, |
|
"loss": 1.7024, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.00022799999999999999, |
|
"loss": 1.5621, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.000232, |
|
"loss": 1.5714, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.000236, |
|
"loss": 1.2905, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.00024, |
|
"loss": 1.332, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.000244, |
|
"loss": 1.5307, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.000248, |
|
"loss": 1.4634, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.000252, |
|
"loss": 1.3234, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.00025600000000000004, |
|
"loss": 1.3457, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.00026000000000000003, |
|
"loss": 1.6265, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.000264, |
|
"loss": 1.4168, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.000268, |
|
"loss": 1.6415, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.00027200000000000005, |
|
"loss": 1.5993, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.000276, |
|
"loss": 1.5743, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.00028, |
|
"loss": 1.1849, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.000284, |
|
"loss": 1.4831, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.000288, |
|
"loss": 1.3906, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.000292, |
|
"loss": 1.2823, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.000296, |
|
"loss": 1.3114, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.00030000000000000003, |
|
"loss": 1.8664, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.000304, |
|
"loss": 1.3636, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.000308, |
|
"loss": 1.5702, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.00031200000000000005, |
|
"loss": 1.5398, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.00031600000000000004, |
|
"loss": 1.3273, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.00032, |
|
"loss": 1.6198, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.000324, |
|
"loss": 1.3984, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.000328, |
|
"loss": 1.4277, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.000332, |
|
"loss": 1.3518, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.000336, |
|
"loss": 1.6, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.00034, |
|
"loss": 1.6434, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.000344, |
|
"loss": 1.4813, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.000348, |
|
"loss": 1.7128, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.00035200000000000005, |
|
"loss": 1.1899, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.00035600000000000003, |
|
"loss": 1.6948, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.00036, |
|
"loss": 1.3608, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.000364, |
|
"loss": 1.4245, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.00036800000000000005, |
|
"loss": 1.4884, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.00037200000000000004, |
|
"loss": 1.4071, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.000376, |
|
"loss": 1.4908, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.00038, |
|
"loss": 1.564, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.000384, |
|
"loss": 1.3262, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.000388, |
|
"loss": 1.4975, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.000392, |
|
"loss": 1.1834, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.00039600000000000003, |
|
"loss": 1.4296, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.0004, |
|
"loss": 1.5633, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.00039999975740163973, |
|
"loss": 1.5139, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.0003999990296071474, |
|
"loss": 1.3188, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.0003999978166182887, |
|
"loss": 1.495, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.0003999961184380062, |
|
"loss": 1.6301, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.0003999939350704197, |
|
"loss": 1.5732, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.0003999912665208261, |
|
"loss": 1.5114, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.0003999881127956992, |
|
"loss": 1.3505, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.0003999844739026899, |
|
"loss": 1.4632, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.00039998034985062604, |
|
"loss": 1.5271, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.0003999757406495126, |
|
"loss": 1.3881, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.00039997064631053133, |
|
"loss": 1.2799, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.0003999650668460411, |
|
"loss": 1.5678, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.00039995900226957745, |
|
"loss": 1.1719, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.0003999524525958531, |
|
"loss": 1.4742, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.00039994541784075746, |
|
"loss": 1.3277, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.00039993789802135655, |
|
"loss": 1.2949, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.0003999298931558936, |
|
"loss": 1.7286, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.00039992140326378794, |
|
"loss": 1.7081, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.0003999124283656362, |
|
"loss": 1.4714, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.0003999029684832112, |
|
"loss": 1.3633, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.0003998930236394625, |
|
"loss": 1.3825, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.0003998825938585162, |
|
"loss": 1.5665, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.00039987167916567454, |
|
"loss": 1.2232, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.0003998602795874167, |
|
"loss": 1.5348, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.00039984839515139767, |
|
"loss": 1.4528, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.00039983602588644884, |
|
"loss": 1.4307, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.000399823171822578, |
|
"loss": 1.246, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.0003998098329909689, |
|
"loss": 1.1974, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.0003997960094239812, |
|
"loss": 1.3772, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.0003997817011551508, |
|
"loss": 1.5958, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.00039976690821918917, |
|
"loss": 1.2651, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.00039975163065198376, |
|
"loss": 1.4289, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.0003997358684905979, |
|
"loss": 1.3508, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.00039971962177326996, |
|
"loss": 1.3393, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.0003997028905394146, |
|
"loss": 1.547, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.0003996856748296212, |
|
"loss": 1.2613, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.00039966797468565503, |
|
"loss": 1.6868, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.00039964979015045626, |
|
"loss": 1.5064, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.0003996311212681402, |
|
"loss": 1.2082, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.0003996119680839973, |
|
"loss": 1.4344, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.000399592330644493, |
|
"loss": 1.4068, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.0003995722089972672, |
|
"loss": 1.0907, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.00039955160319113484, |
|
"loss": 1.4394, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.00039953051327608515, |
|
"loss": 1.2697, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.00039950893930328204, |
|
"loss": 1.6047, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.00039948688132506355, |
|
"loss": 1.5132, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.0003994643393949419, |
|
"loss": 1.6163, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.0003994413135676036, |
|
"loss": 1.5018, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.0003994178038989088, |
|
"loss": 1.2945, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.00039939381044589166, |
|
"loss": 1.7071, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.0003993693332667598, |
|
"loss": 1.5062, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.00039934437242089457, |
|
"loss": 1.3906, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.0003993189279688505, |
|
"loss": 1.2134, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.0003992929999723554, |
|
"loss": 1.3035, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.0003992665884943102, |
|
"loss": 1.6205, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.00039923969359878877, |
|
"loss": 1.6062, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.00039921231535103757, |
|
"loss": 1.4719, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.00039918445381747586, |
|
"loss": 1.4253, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.00039915610906569523, |
|
"loss": 1.4565, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.0003991272811644596, |
|
"loss": 1.4769, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.00039909797018370497, |
|
"loss": 1.32, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.00039906817619453935, |
|
"loss": 1.3666, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.0003990378992692424, |
|
"loss": 1.4335, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.0003990071394812654, |
|
"loss": 1.3242, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.00039897589690523127, |
|
"loss": 1.3204, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.00039894417161693386, |
|
"loss": 1.3507, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.00039891196369333813, |
|
"loss": 1.6852, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.0003988792732125801, |
|
"loss": 1.5667, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.0003988461002539664, |
|
"loss": 1.4404, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.0003988124448979739, |
|
"loss": 1.4452, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.00039877830722625, |
|
"loss": 1.6917, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.0003987436873216122, |
|
"loss": 1.7008, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.0003987085852680478, |
|
"loss": 1.5154, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.00039867300115071374, |
|
"loss": 1.3697, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.00039863693505593655, |
|
"loss": 1.3099, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.00039860038707121204, |
|
"loss": 1.3247, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.00039856335728520495, |
|
"loss": 1.4999, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.0003985258457877489, |
|
"loss": 1.4268, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.0003984878526698463, |
|
"loss": 1.7714, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.0003984493780236677, |
|
"loss": 1.356, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.00039841042194255205, |
|
"loss": 1.2773, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.0003983709845210061, |
|
"loss": 1.0558, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.0003983310658547044, |
|
"loss": 1.7372, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.000398290666040489, |
|
"loss": 1.5983, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.0003982497851763692, |
|
"loss": 1.437, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.0003982084233615213, |
|
"loss": 1.3883, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.00039816658069628835, |
|
"loss": 1.5226, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.00039812425728218, |
|
"loss": 1.4093, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.00039808145322187214, |
|
"loss": 1.3972, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.00039803816861920673, |
|
"loss": 1.1755, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.00039799440357919153, |
|
"loss": 1.606, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.0003979501582079997, |
|
"loss": 1.5801, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.00039790543261296996, |
|
"loss": 1.4668, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.0003978602269026057, |
|
"loss": 1.5086, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.0003978145411865754, |
|
"loss": 1.3564, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.00039776837557571176, |
|
"loss": 1.53, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.0003977217301820117, |
|
"loss": 1.5908, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.0003976746051186364, |
|
"loss": 1.1649, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.0003976270004999103, |
|
"loss": 1.4091, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.00039757891644132153, |
|
"loss": 1.5316, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.0003975303530595212, |
|
"loss": 1.5858, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.0003974813104723233, |
|
"loss": 1.6776, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00039743178879870426, |
|
"loss": 1.4326, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00039738178815880296, |
|
"loss": 1.2465, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00039733130867392004, |
|
"loss": 1.374, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00039728035046651797, |
|
"loss": 1.5635, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.0003972289136602204, |
|
"loss": 1.3614, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.0003971769983798124, |
|
"loss": 1.2695, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00039712460475123943, |
|
"loss": 1.4433, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00039707173290160766, |
|
"loss": 1.4809, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00039701838295918315, |
|
"loss": 1.3501, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.0003969645550533922, |
|
"loss": 1.3736, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.0003969102493148203, |
|
"loss": 1.1709, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00039685546587521237, |
|
"loss": 1.3236, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00039680020486747205, |
|
"loss": 1.2384, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00039674446642566175, |
|
"loss": 1.3349, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.0003966882506850018, |
|
"loss": 1.3416, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.0003966315577818709, |
|
"loss": 1.132, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.000396574387853805, |
|
"loss": 1.4271, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.0003965167410394975, |
|
"loss": 1.5336, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00039645861747879843, |
|
"loss": 1.1742, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.0003964000173127148, |
|
"loss": 1.2881, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.0003963409406834095, |
|
"loss": 1.3789, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.00039628138773420147, |
|
"loss": 1.5167, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.0003962213586095653, |
|
"loss": 1.4953, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.0003961608534551307, |
|
"loss": 1.7612, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.000396099872417682, |
|
"loss": 1.2886, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.0003960384156451583, |
|
"loss": 1.3367, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.0003959764832866527, |
|
"loss": 1.6526, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.0003959140754924121, |
|
"loss": 1.3393, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.0003958511924138368, |
|
"loss": 1.6188, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.0003957878342034801, |
|
"loss": 1.163, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.00039572400101504793, |
|
"loss": 0.9372, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.00039565969300339867, |
|
"loss": 1.3556, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.0003955949103245424, |
|
"loss": 1.3058, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.00039552965313564083, |
|
"loss": 1.3413, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.000395463921595007, |
|
"loss": 1.6436, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.0003953977158621043, |
|
"loss": 1.3746, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.00039533103609754695, |
|
"loss": 1.4178, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.0003952638824630989, |
|
"loss": 1.5444, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.00039519625512167373, |
|
"loss": 1.6739, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.0003951281542373343, |
|
"loss": 1.3911, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.0003950595799752922, |
|
"loss": 1.2932, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00039499053250190754, |
|
"loss": 1.5476, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00039492101198468826, |
|
"loss": 1.5088, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.0003948510185922901, |
|
"loss": 1.4024, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00039478055249451575, |
|
"loss": 1.4, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00039470961386231496, |
|
"loss": 1.2615, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.0003946382028677836, |
|
"loss": 1.4472, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00039456631968416364, |
|
"loss": 1.4282, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00039449396448584237, |
|
"loss": 1.2253, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.0003944211374483524, |
|
"loss": 1.6736, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00039434783874837097, |
|
"loss": 1.5025, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00039427406856371944, |
|
"loss": 1.5902, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.0003941998270733631, |
|
"loss": 1.1451, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00039412511445741067, |
|
"loss": 1.5581, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.0003940499308971136, |
|
"loss": 1.5489, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00039397427657486604, |
|
"loss": 1.5395, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.0003938981516742041, |
|
"loss": 1.5695, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.0003938215563798057, |
|
"loss": 1.4453, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.0003937444908774895, |
|
"loss": 1.505, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.0003936669553542153, |
|
"loss": 1.4602, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.0003935889499980831, |
|
"loss": 1.4522, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00039351047499833235, |
|
"loss": 1.4439, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.0003934315305453423, |
|
"loss": 1.6039, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.0003933521168306309, |
|
"loss": 1.2475, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.0003932722340468545, |
|
"loss": 1.4151, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.0003931918823878074, |
|
"loss": 1.3802, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.0003931110620484215, |
|
"loss": 1.4119, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.0003930297732247654, |
|
"loss": 1.2728, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.0003929480161140446, |
|
"loss": 1.4114, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.0003928657909146006, |
|
"loss": 1.4245, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.00039278309782591014, |
|
"loss": 1.5001, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.0003926999370485856, |
|
"loss": 1.2046, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.00039261630878437343, |
|
"loss": 1.409, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.00039253221323615444, |
|
"loss": 1.4574, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.00039244765060794315, |
|
"loss": 1.5181, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.0003923626211048871, |
|
"loss": 1.2098, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.00039227712493326644, |
|
"loss": 1.5624, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.0003921911623004935, |
|
"loss": 1.5203, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.0003921047334151121, |
|
"loss": 1.2721, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.0003920178384867975, |
|
"loss": 1.2674, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.0003919304777263553, |
|
"loss": 1.574, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.0003918426513457211, |
|
"loss": 1.5655, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.0003917543595579605, |
|
"loss": 1.4375, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.0003916656025772678, |
|
"loss": 1.4467, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.000391576380618966, |
|
"loss": 1.1648, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.0003914866938995062, |
|
"loss": 1.4204, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.00039139654263646676, |
|
"loss": 1.0229, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.00039130592704855326, |
|
"loss": 1.3691, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.00039121484735559755, |
|
"loss": 1.4937, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.0003911233037785576, |
|
"loss": 1.2192, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.00039103129653951654, |
|
"loss": 1.1896, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.0003909388258616824, |
|
"loss": 1.1806, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.00039084589196938753, |
|
"loss": 1.4472, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.0003907524950880881, |
|
"loss": 1.2835, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.00039065863544436347, |
|
"loss": 1.6074, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.0003905643132659154, |
|
"loss": 1.6836, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.0003904695287815681, |
|
"loss": 1.1548, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.0003903742822212671, |
|
"loss": 1.3484, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.000390278573816079, |
|
"loss": 1.568, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.0003901824037981909, |
|
"loss": 1.4065, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.0003900857724009096, |
|
"loss": 1.4226, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.0003899886798586613, |
|
"loss": 1.4137, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.00038989112640699095, |
|
"loss": 1.2969, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.0003897931122825616, |
|
"loss": 1.4033, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.0003896946377231539, |
|
"loss": 1.5774, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00038959570296766554, |
|
"loss": 1.4064, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.0003894963082561106, |
|
"loss": 1.6194, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.000389396453829619, |
|
"loss": 1.3165, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.000389296139930436, |
|
"loss": 1.5399, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.0003891953668019214, |
|
"loss": 1.3126, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00038909413468854925, |
|
"loss": 1.4554, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.0003889924438359069, |
|
"loss": 1.1586, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.0003888902944906947, |
|
"loss": 1.0335, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.0003887876869007254, |
|
"loss": 1.5808, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00038868462131492326, |
|
"loss": 1.3421, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00038858109798332364, |
|
"loss": 1.6144, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00038847711715707254, |
|
"loss": 1.5857, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00038837267908842563, |
|
"loss": 1.3044, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00038826778403074807, |
|
"loss": 1.3907, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00038816243223851354, |
|
"loss": 1.3099, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.0003880566239673036, |
|
"loss": 1.4291, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.0003879503594738076, |
|
"loss": 1.3069, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00038784363901582134, |
|
"loss": 1.3345, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00038773646285224694, |
|
"loss": 1.3937, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.000387628831243092, |
|
"loss": 1.2333, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00038752074444946895, |
|
"loss": 1.2367, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.0003874122027335948, |
|
"loss": 1.4973, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00038730320635878973, |
|
"loss": 1.2619, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.0003871937555894773, |
|
"loss": 1.3699, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.0003870838506911832, |
|
"loss": 1.2068, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.000386973491930535, |
|
"loss": 1.4411, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.0003868626795752612, |
|
"loss": 1.4592, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.0003867514138941908, |
|
"loss": 1.4266, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00038663969515725244, |
|
"loss": 1.2687, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.000386527523635474, |
|
"loss": 1.5515, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.0003864148996009817, |
|
"loss": 1.4121, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00038630182332699965, |
|
"loss": 1.4067, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00038618829508784905, |
|
"loss": 1.3161, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00038607431515894747, |
|
"loss": 1.3532, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00038595988381680843, |
|
"loss": 1.5935, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.0003858450013390404, |
|
"loss": 1.5683, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.0003857296680043465, |
|
"loss": 1.2895, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00038561388409252346, |
|
"loss": 1.4423, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00038549764988446113, |
|
"loss": 1.5754, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.0003853809656621418, |
|
"loss": 1.4581, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00038526383170863946, |
|
"loss": 1.4425, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00038514624830811924, |
|
"loss": 1.0757, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.0003850282157458365, |
|
"loss": 1.5043, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.0003849097343081362, |
|
"loss": 1.4347, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00038479080428245247, |
|
"loss": 1.4784, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.0003846714259573077, |
|
"loss": 1.2782, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00038455159962231156, |
|
"loss": 1.3142, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00038443132556816083, |
|
"loss": 1.4472, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.0003843106040866384, |
|
"loss": 1.5361, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.0003841894354706127, |
|
"loss": 1.345, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00038406782001403663, |
|
"loss": 1.4903, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00038394575801194744, |
|
"loss": 1.2769, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.0003838232497604655, |
|
"loss": 1.3184, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.0003837002955567938, |
|
"loss": 1.338, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00038357689569921716, |
|
"loss": 1.5696, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.0003834530504871018, |
|
"loss": 1.0948, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.000383328760220894, |
|
"loss": 1.2939, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00038320402520211996, |
|
"loss": 1.5023, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.0003830788457333848, |
|
"loss": 1.272, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.0003829532221183719, |
|
"loss": 1.2972, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00038282715466184206, |
|
"loss": 1.3987, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.0003827006436696328, |
|
"loss": 1.3171, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00038257368944865776, |
|
"loss": 1.2435, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00038244629230690583, |
|
"loss": 1.4025, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00038231845255344034, |
|
"loss": 1.378, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.0003821901704983984, |
|
"loss": 1.5586, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00038206144645299025, |
|
"loss": 1.2798, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.0003819322807294984, |
|
"loss": 1.4547, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00038180267364127654, |
|
"loss": 1.3677, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.0003816726255027494, |
|
"loss": 1.4257, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00038154213662941185, |
|
"loss": 1.3593, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.0003814112073378276, |
|
"loss": 1.3271, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.0003812798379456289, |
|
"loss": 1.5296, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.0003811480287715158, |
|
"loss": 1.5515, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.0003810157801352553, |
|
"loss": 1.4145, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00038088309235768025, |
|
"loss": 1.2181, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.0003807499657606891, |
|
"loss": 1.4692, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00038061640066724473, |
|
"loss": 1.3907, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.0003804823974013741, |
|
"loss": 1.2358, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.0003803479562881666, |
|
"loss": 1.5738, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00038021307765377443, |
|
"loss": 1.3619, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.0003800777618254108, |
|
"loss": 1.4637, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.0003799420091313498, |
|
"loss": 1.3023, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00037980581990092513, |
|
"loss": 1.3733, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0003796691944645296, |
|
"loss": 1.5788, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0003795321331536145, |
|
"loss": 1.4521, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0003793946363006881, |
|
"loss": 1.3638, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0003792567042393156, |
|
"loss": 1.3267, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0003791183373041179, |
|
"loss": 1.5984, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00037897953583077097, |
|
"loss": 1.678, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0003788403001560048, |
|
"loss": 1.284, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00037870063061760294, |
|
"loss": 1.3238, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0003785605275544014, |
|
"loss": 1.4447, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0003784199913062879, |
|
"loss": 1.4148, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.000378279022214201, |
|
"loss": 1.3677, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0003781376206201295, |
|
"loss": 1.3583, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00037799578686711135, |
|
"loss": 1.3407, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00037785352129923283, |
|
"loss": 1.4658, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00037771082426162793, |
|
"loss": 1.4504, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0003775676961004774, |
|
"loss": 1.2846, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00037742413716300763, |
|
"loss": 1.4006, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00037728014779749036, |
|
"loss": 1.3782, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00037713572835324145, |
|
"loss": 1.332, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00037699087918062, |
|
"loss": 1.4053, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0003768456006310279, |
|
"loss": 1.3307, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00037669989305690835, |
|
"loss": 1.5948, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00037655375681174566, |
|
"loss": 1.6806, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.0003764071922500639, |
|
"loss": 1.3207, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.0003762601997274263, |
|
"loss": 1.326, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.0003761127796004343, |
|
"loss": 1.3651, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00037596493222672676, |
|
"loss": 1.2577, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00037581665796497895, |
|
"loss": 1.4533, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.0003756679571749018, |
|
"loss": 1.2807, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00037551883021724096, |
|
"loss": 1.3671, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00037536927745377606, |
|
"loss": 1.4806, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.0003752192992473196, |
|
"loss": 1.4572, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00037506889596171627, |
|
"loss": 1.4408, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00037491806796184196, |
|
"loss": 1.1899, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00037476681561360287, |
|
"loss": 1.2646, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.0003746151392839349, |
|
"loss": 1.0926, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.0003744630393408021, |
|
"loss": 1.4036, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00037431051615319656, |
|
"loss": 1.384, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.000374157570091137, |
|
"loss": 1.3298, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.0003740042015256681, |
|
"loss": 1.4192, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.0003738504108288595, |
|
"loss": 1.241, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00037369619837380487, |
|
"loss": 1.6417, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.0003735415645346211, |
|
"loss": 1.0922, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00037338650968644736, |
|
"loss": 1.331, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00037323103420544416, |
|
"loss": 1.4702, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.0003730751384687924, |
|
"loss": 1.1575, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.0003729188228546927, |
|
"loss": 1.466, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00037276208774236416, |
|
"loss": 1.1979, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.0003726049335120435, |
|
"loss": 1.2145, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.0003724473605449844, |
|
"loss": 1.3891, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.0003722893692234562, |
|
"loss": 1.1398, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.0003721309599307434, |
|
"loss": 1.3276, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00037197213305114414, |
|
"loss": 1.3792, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00037181288896997, |
|
"loss": 1.3913, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.0003716532280735445, |
|
"loss": 1.5605, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00037149315074920227, |
|
"loss": 1.3072, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.0003713326573852883, |
|
"loss": 1.4979, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.0003711717483711569, |
|
"loss": 1.5968, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.0003710104240971707, |
|
"loss": 1.7091, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.0003708486849546997, |
|
"loss": 1.4488, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00037068653133612045, |
|
"loss": 1.097, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.0003705239636348149, |
|
"loss": 1.5878, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00037036098224516967, |
|
"loss": 1.5458, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00037019758756257494, |
|
"loss": 1.1706, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0003700337799834236, |
|
"loss": 1.5924, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00036986955990511, |
|
"loss": 1.15, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00036970492772602946, |
|
"loss": 1.485, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0003695398838455769, |
|
"loss": 1.3393, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0003693744286641462, |
|
"loss": 1.5324, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00036920856258312873, |
|
"loss": 1.4272, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.000369042286004913, |
|
"loss": 1.3321, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0003688755993328832, |
|
"loss": 1.423, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00036870850297141846, |
|
"loss": 1.6408, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0003685409973258919, |
|
"loss": 1.4626, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0003683730828026694, |
|
"loss": 1.5387, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0003682047598091089, |
|
"loss": 1.1369, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00036803602875355913, |
|
"loss": 1.553, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00036786689004535887, |
|
"loss": 1.6675, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00036769734409483596, |
|
"loss": 1.4323, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00036752739131330596, |
|
"loss": 1.4708, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00036735703211307166, |
|
"loss": 1.2672, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0003671862669074216, |
|
"loss": 1.6278, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0003670150961106294, |
|
"loss": 1.4408, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0003668435201379526, |
|
"loss": 1.2482, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00036667153940563176, |
|
"loss": 1.3008, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0003664991543308892, |
|
"loss": 1.2442, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0003663263653319283, |
|
"loss": 1.4983, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00036615317282793244, |
|
"loss": 1.4815, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0003659795772390637, |
|
"loss": 1.516, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00036580557898646215, |
|
"loss": 1.5424, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00036563117849224474, |
|
"loss": 1.3857, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00036545637617950416, |
|
"loss": 1.5148, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.000365281172472308, |
|
"loss": 1.1913, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00036510556779569757, |
|
"loss": 1.4367, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0003649295625756869, |
|
"loss": 1.334, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00036475315723926175, |
|
"loss": 1.4054, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0003645763522143787, |
|
"loss": 1.317, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00036439914792996364, |
|
"loss": 1.2957, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0003642215448159115, |
|
"loss": 1.2054, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0003640435433030843, |
|
"loss": 1.3806, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0003638651438233109, |
|
"loss": 1.3092, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0003636863468093855, |
|
"loss": 1.3545, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0003635071526950668, |
|
"loss": 1.3809, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0003633275619150767, |
|
"loss": 0.981, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0003631475749050994, |
|
"loss": 1.4125, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00036296719210178056, |
|
"loss": 1.3213, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.0003627864139427259, |
|
"loss": 1.5648, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00036260524086650025, |
|
"loss": 1.274, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.0003624236733126265, |
|
"loss": 1.3447, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00036224171172158457, |
|
"loss": 1.4144, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.0003620593565348103, |
|
"loss": 1.4745, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00036187660819469433, |
|
"loss": 1.4349, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.0003616934671445813, |
|
"loss": 1.3624, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00036150993382876826, |
|
"loss": 1.1218, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.000361326008692504, |
|
"loss": 1.1525, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00036114169218198793, |
|
"loss": 1.557, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.0003609569847443689, |
|
"loss": 1.421, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.0003607718868277441, |
|
"loss": 1.3613, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.0003605863988811581, |
|
"loss": 1.5302, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00036040052135460156, |
|
"loss": 1.2848, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.0003602142546990103, |
|
"loss": 1.4686, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00036002759936626415, |
|
"loss": 1.3161, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00035984055580918595, |
|
"loss": 1.307, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00035965312448154034, |
|
"loss": 1.4552, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00035946530583803247, |
|
"loss": 1.2265, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.0003592771003343075, |
|
"loss": 1.4014, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.0003590885084269488, |
|
"loss": 1.5022, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.0003588995305734772, |
|
"loss": 1.0336, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00035871016723234997, |
|
"loss": 1.1944, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00035852041886295943, |
|
"loss": 1.5178, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00035833028592563204, |
|
"loss": 1.4343, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.0003581397688816271, |
|
"loss": 1.2634, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00035794886819313584, |
|
"loss": 1.3973, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00035775758432328035, |
|
"loss": 1.5337, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00035756591773611194, |
|
"loss": 1.5783, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00035737386889661077, |
|
"loss": 1.3674, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.0003571814382706842, |
|
"loss": 1.2439, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.0003569886263251657, |
|
"loss": 1.2478, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00035679543352781383, |
|
"loss": 1.6748, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00035660186034731125, |
|
"loss": 1.1766, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.0003564079072532633, |
|
"loss": 1.4937, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00035621357471619697, |
|
"loss": 1.5937, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00035601886320755984, |
|
"loss": 1.3704, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00035582377319971884, |
|
"loss": 1.1386, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.0003556283051659591, |
|
"loss": 1.4755, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.0003554324595804829, |
|
"loss": 1.5553, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.0003552362369184084, |
|
"loss": 1.5264, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00035503963765576855, |
|
"loss": 1.3787, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.0003548426622695099, |
|
"loss": 1.5708, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0003546453112374916, |
|
"loss": 1.5532, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00035444758503848405, |
|
"loss": 1.2355, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00035424948415216763, |
|
"loss": 1.2705, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00035405100905913184, |
|
"loss": 1.3754, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0003538521602408741, |
|
"loss": 1.4639, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0003536529381797984, |
|
"loss": 1.3271, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0003534533433592141, |
|
"loss": 1.4256, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00035325337626333504, |
|
"loss": 1.4383, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00035305303737727807, |
|
"loss": 1.1625, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.000352852327187062, |
|
"loss": 1.4411, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00035265124617960653, |
|
"loss": 1.2599, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0003524497948427309, |
|
"loss": 1.2186, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0003522479736651527, |
|
"loss": 1.5345, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0003520457831364868, |
|
"loss": 1.2944, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00035184322374724416, |
|
"loss": 1.3791, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0003516402959888304, |
|
"loss": 1.6277, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00035143700035354517, |
|
"loss": 1.4593, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00035123333733458014, |
|
"loss": 1.3568, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00035102930742601855, |
|
"loss": 1.2664, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0003508249111228336, |
|
"loss": 1.1526, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00035062014892088734, |
|
"loss": 1.568, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00035041502131692953, |
|
"loss": 1.3015, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.0003502095288085964, |
|
"loss": 1.2436, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00035000367189440935, |
|
"loss": 1.246, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.0003497974510737739, |
|
"loss": 1.2972, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00034959086684697847, |
|
"loss": 1.4416, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.0003493839197151928, |
|
"loss": 1.4723, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00034917661018046755, |
|
"loss": 1.2364, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00034896893874573193, |
|
"loss": 0.9685, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.0003487609059147936, |
|
"loss": 1.4989, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00034855251219233674, |
|
"loss": 1.331, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.0003483437580839212, |
|
"loss": 1.282, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.0003481346440959809, |
|
"loss": 1.4483, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00034792517073582306, |
|
"loss": 1.3028, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.0003477153385116265, |
|
"loss": 1.2324, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00034750514793244083, |
|
"loss": 1.4206, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00034729459950818496, |
|
"loss": 1.5627, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00034708369374964593, |
|
"loss": 1.4441, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.0003468724311684775, |
|
"loss": 1.2597, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00034666081227719943, |
|
"loss": 1.3732, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00034644883758919555, |
|
"loss": 1.4752, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00034623650761871305, |
|
"loss": 1.0906, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0003460238228808609, |
|
"loss": 1.3774, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0003458107838916088, |
|
"loss": 1.2825, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0003455973911677859, |
|
"loss": 1.442, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00034538364522707934, |
|
"loss": 1.3051, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00034516954658803334, |
|
"loss": 1.2828, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00034495509577004774, |
|
"loss": 1.112, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00034474029329337663, |
|
"loss": 1.707, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0003445251396791273, |
|
"loss": 1.403, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0003443096354492589, |
|
"loss": 1.2986, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00034409378112658113, |
|
"loss": 1.3418, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00034387757723475313, |
|
"loss": 1.2424, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0003436610242982819, |
|
"loss": 1.2661, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00034344412284252135, |
|
"loss": 1.2827, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00034322687339367083, |
|
"loss": 1.2011, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00034300927647877403, |
|
"loss": 1.2413, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00034279133262571734, |
|
"loss": 1.3173, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0003425730423632291, |
|
"loss": 1.2728, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0003423544062208779, |
|
"loss": 1.2539, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00034213542472907144, |
|
"loss": 1.2058, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0003419160984190552, |
|
"loss": 1.1629, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00034169642782291116, |
|
"loss": 1.1723, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0003414764134735566, |
|
"loss": 1.3751, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0003412560559047428, |
|
"loss": 1.3485, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0003410353556510536, |
|
"loss": 1.4167, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0003408143132479041, |
|
"loss": 1.4048, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0003405929292315397, |
|
"loss": 1.5166, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00034037120413903425, |
|
"loss": 1.6244, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0003401491385082892, |
|
"loss": 1.3791, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00033992673287803224, |
|
"loss": 1.086, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00033970398778781564, |
|
"loss": 1.5672, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00033948090377801545, |
|
"loss": 1.513, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0003392574813898298, |
|
"loss": 1.516, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0003390337211652777, |
|
"loss": 1.499, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0003388096236471978, |
|
"loss": 1.5168, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.000338585189379247, |
|
"loss": 1.1873, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0003383604189058992, |
|
"loss": 1.3861, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00033813531277244384, |
|
"loss": 1.5166, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0003379098715249847, |
|
"loss": 1.0685, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0003376840957104385, |
|
"loss": 1.2126, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0003374579858765338, |
|
"loss": 1.201, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00033723154257180927, |
|
"loss": 1.2925, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0003370047663456127, |
|
"loss": 1.4116, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0003367776577480994, |
|
"loss": 1.2275, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00033655021733023114, |
|
"loss": 1.3411, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0003363224456437747, |
|
"loss": 1.4525, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00033609434324130045, |
|
"loss": 1.4977, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00033586591067618103, |
|
"loss": 1.2514, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0003356371485025901, |
|
"loss": 1.3123, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.000335408057275501, |
|
"loss": 1.3881, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0003351786375506852, |
|
"loss": 1.3676, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0003349488898847113, |
|
"loss": 1.3726, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00033471881483494324, |
|
"loss": 1.5587, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0003344884129595395, |
|
"loss": 1.5089, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00033425768481745103, |
|
"loss": 1.1553, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0003340266309684207, |
|
"loss": 1.196, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0003337952519729813, |
|
"loss": 1.437, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00033356354839245443, |
|
"loss": 1.5862, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0003333315207889492, |
|
"loss": 1.2645, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0003330991697253608, |
|
"loss": 1.493, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00033286649576536915, |
|
"loss": 1.2698, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0003326334994734373, |
|
"loss": 1.6227, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00033240018141481064, |
|
"loss": 1.2429, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00033216654215551477, |
|
"loss": 1.4009, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0003319325822623548, |
|
"loss": 1.5027, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00033169830230291356, |
|
"loss": 1.4413, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00033146370284555047, |
|
"loss": 1.332, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0003312287844593998, |
|
"loss": 1.2462, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0003309935477143699, |
|
"loss": 1.3925, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00033075799318114116, |
|
"loss": 1.0344, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00033052212143116496, |
|
"loss": 1.3053, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00033028593303666235, |
|
"loss": 1.302, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0003300494285706226, |
|
"loss": 1.4378, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0003298126086068015, |
|
"loss": 1.281, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00032957547371972043, |
|
"loss": 1.1997, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0003293380244846648, |
|
"loss": 1.2488, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0003291002614776826, |
|
"loss": 1.616, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0003288621852755829, |
|
"loss": 0.9443, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0003286237964559346, |
|
"loss": 1.296, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0003283850955970653, |
|
"loss": 1.3768, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0003281460832780591, |
|
"loss": 1.5167, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00032790676007875627, |
|
"loss": 1.2445, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00032766712657975066, |
|
"loss": 0.9358, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0003274271833623893, |
|
"loss": 1.0435, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00032718693100877065, |
|
"loss": 1.1012, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0003269463701017428, |
|
"loss": 1.3027, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00032670550122490267, |
|
"loss": 1.5241, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00032646432496259416, |
|
"loss": 1.3093, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.000326222841899907, |
|
"loss": 1.2715, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.000325981052622675, |
|
"loss": 1.3346, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00032573895771747505, |
|
"loss": 1.2683, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0003254965577716255, |
|
"loss": 1.579, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0003252538533731846, |
|
"loss": 1.2409, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00032501084511094914, |
|
"loss": 1.3204, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0003247675335744533, |
|
"loss": 1.444, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0003245239193539668, |
|
"loss": 1.0368, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00032428000304049383, |
|
"loss": 1.4488, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0003240357852257713, |
|
"loss": 1.2406, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0003237912665022676, |
|
"loss": 1.4373, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0003235464474631813, |
|
"loss": 1.0534, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0003233013287024391, |
|
"loss": 1.3751, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0003230559108146953, |
|
"loss": 1.2604, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0003228101943953297, |
|
"loss": 1.1124, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00032256418004044616, |
|
"loss": 1.1776, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0003223178683468716, |
|
"loss": 1.0755, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0003220712599121541, |
|
"loss": 1.2169, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00032182435533456154, |
|
"loss": 1.4314, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00032157715521308064, |
|
"loss": 1.2822, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00032132966014741457, |
|
"loss": 1.4197, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0003210818707379826, |
|
"loss": 1.3165, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00032083378758591757, |
|
"loss": 1.2202, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0003205854112930651, |
|
"loss": 1.3977, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0003203367424619822, |
|
"loss": 1.4865, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0003200877816959352, |
|
"loss": 1.2872, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0003198385295988989, |
|
"loss": 1.3833, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00031958898677555476, |
|
"loss": 1.2206, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0003193391538312897, |
|
"loss": 1.2306, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00031908903137219417, |
|
"loss": 1.4376, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00031883862000506134, |
|
"loss": 1.5088, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0003185879203373849, |
|
"loss": 1.4149, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0003183369329773583, |
|
"loss": 1.1201, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00031808565853387263, |
|
"loss": 1.2864, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0003178340976165157, |
|
"loss": 1.3081, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00031758225083557, |
|
"loss": 1.223, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00031733011880201185, |
|
"loss": 1.3362, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0003170777021275093, |
|
"loss": 1.3054, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0003168250014244212, |
|
"loss": 1.1346, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00031657201730579513, |
|
"loss": 1.4264, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0003163187503853666, |
|
"loss": 1.2921, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0003160652012775569, |
|
"loss": 1.3265, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.000315811370597472, |
|
"loss": 1.0747, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0003155572589609011, |
|
"loss": 1.3607, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00031530286698431474, |
|
"loss": 1.2866, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00031504819528486365, |
|
"loss": 1.4863, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00031479324448037725, |
|
"loss": 1.2086, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0003145380151893621, |
|
"loss": 1.4026, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00031428250803100017, |
|
"loss": 1.4166, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0003140267236251476, |
|
"loss": 1.6379, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0003137706625923333, |
|
"loss": 1.3528, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.000313514325553757, |
|
"loss": 1.4633, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0003132577131312883, |
|
"loss": 1.3926, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0003130008259474646, |
|
"loss": 1.2951, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00031274366462549, |
|
"loss": 1.3975, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0003124862297892337, |
|
"loss": 1.7294, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0003122285220632284, |
|
"loss": 1.4895, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0003119705420726688, |
|
"loss": 1.3108, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0003117122904434101, |
|
"loss": 1.2647, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00031145376780196654, |
|
"loss": 1.133, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00031119497477550986, |
|
"loss": 1.3407, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.0003109359119918676, |
|
"loss": 1.3305, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.0003106765800795219, |
|
"loss": 1.3938, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00031041697966760777, |
|
"loss": 1.5855, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00031015711138591143, |
|
"loss": 1.347, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.0003098969758648691, |
|
"loss": 1.5524, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00030963657373556526, |
|
"loss": 1.1343, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.0003093759056297314, |
|
"loss": 1.2149, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00030911497217974386, |
|
"loss": 1.2507, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00030885377401862306, |
|
"loss": 1.5341, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.0003085923117800312, |
|
"loss": 1.3615, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00030833058609827175, |
|
"loss": 1.3651, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00030806859760828663, |
|
"loss": 1.2132, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00030780634694565573, |
|
"loss": 1.166, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.0003075438347465949, |
|
"loss": 1.4359, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00030728106164795433, |
|
"loss": 1.5718, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00030701802828721734, |
|
"loss": 1.4244, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.0003067547353024985, |
|
"loss": 1.208, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.0003064911833325423, |
|
"loss": 1.2651, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.0003062273730167215, |
|
"loss": 1.2595, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00030596330499503555, |
|
"loss": 1.6153, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00030569897990810916, |
|
"loss": 1.3482, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0003054343983971907, |
|
"loss": 1.1848, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0003051695611041505, |
|
"loss": 1.5675, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0003049044686714796, |
|
"loss": 1.2735, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0003046391217422878, |
|
"loss": 1.5186, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0003043735209603025, |
|
"loss": 1.3627, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00030410766696986667, |
|
"loss": 1.3874, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00030384156041593795, |
|
"loss": 1.3036, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0003035752019440863, |
|
"loss": 1.2132, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00030330859220049303, |
|
"loss": 1.299, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0003030417318319491, |
|
"loss": 1.2785, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00030277462148585324, |
|
"loss": 1.1849, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00030250726181021097, |
|
"loss": 1.3676, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00030223965345363234, |
|
"loss": 1.5585, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0003019717970653309, |
|
"loss": 1.5943, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00030170369329512173, |
|
"loss": 1.0816, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00030143534279342035, |
|
"loss": 1.5398, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0003011667462112406, |
|
"loss": 1.2893, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00030089790420019335, |
|
"loss": 1.3099, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0003006288174124849, |
|
"loss": 1.5297, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00030035948650091555, |
|
"loss": 1.2783, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0003000899121188775, |
|
"loss": 1.3238, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0002998200949203538, |
|
"loss": 1.6145, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0002995500355599167, |
|
"loss": 1.2022, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00029927973469272555, |
|
"loss": 1.2806, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.000299009192974526, |
|
"loss": 1.424, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00029873841106164766, |
|
"loss": 1.177, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0002984673896110032, |
|
"loss": 1.3138, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00029819612928008607, |
|
"loss": 1.3856, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0002979246307269695, |
|
"loss": 1.2792, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00029765289461030443, |
|
"loss": 1.2964, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0002973809215893183, |
|
"loss": 1.1395, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0002971087123238131, |
|
"loss": 1.3927, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00029683626747416416, |
|
"loss": 1.241, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0002965635877013181, |
|
"loss": 1.5876, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00029629067366679173, |
|
"loss": 1.5482, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0002960175260326699, |
|
"loss": 1.4041, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00029574414546160426, |
|
"loss": 1.2867, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0002954705326168117, |
|
"loss": 1.1729, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0002951966881620725, |
|
"loss": 1.4974, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0002949226127617287, |
|
"loss": 1.4537, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00029464830708068277, |
|
"loss": 1.2313, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0002943737717843959, |
|
"loss": 1.4326, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0002940990075388861, |
|
"loss": 1.249, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.000293824015010727, |
|
"loss": 1.3426, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0002935487948670459, |
|
"loss": 1.35, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00029327334777552245, |
|
"loss": 1.1759, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0002929976744043866, |
|
"loss": 1.181, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00029272177542241765, |
|
"loss": 1.1735, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00029244565149894183, |
|
"loss": 0.9487, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00029216930330383127, |
|
"loss": 1.4527, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0002918927315075023, |
|
"loss": 1.4078, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00029161593678091337, |
|
"loss": 1.2437, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00029133891979556413, |
|
"loss": 1.2497, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0002910616812234931, |
|
"loss": 1.1957, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00029078422173727664, |
|
"loss": 1.3722, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0002905065420100267, |
|
"loss": 1.2857, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00029022864271539004, |
|
"loss": 1.099, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0002899505245275456, |
|
"loss": 1.4149, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0002896721881212036, |
|
"loss": 1.3387, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0002893936341716035, |
|
"loss": 1.6126, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0002891148633545128, |
|
"loss": 1.2203, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0002888358763462248, |
|
"loss": 1.3316, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0002885566738235574, |
|
"loss": 1.2368, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0002882772564638515, |
|
"loss": 1.2352, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00028799762494496873, |
|
"loss": 1.2296, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00028771777994529086, |
|
"loss": 1.3651, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00028743772214371715, |
|
"loss": 1.5197, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00028715745221966326, |
|
"loss": 1.5716, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00028687697085305933, |
|
"loss": 1.2013, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0002865962787243487, |
|
"loss": 1.4137, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0002863153765144858, |
|
"loss": 1.2877, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00028603426490493485, |
|
"loss": 1.4059, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00028575294457766794, |
|
"loss": 1.1746, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00028547141621516355, |
|
"loss": 1.673, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0002851896805004049, |
|
"loss": 1.4219, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00028490773811687826, |
|
"loss": 1.4337, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0002846255897485712, |
|
"loss": 1.1064, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.000284343236079971, |
|
"loss": 1.2186, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.000284060677796063, |
|
"loss": 1.2138, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0002837779155823291, |
|
"loss": 1.156, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0002834949501247457, |
|
"loss": 1.3292, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00028321178210978233, |
|
"loss": 1.472, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0002829284122244, |
|
"loss": 1.4211, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0002826448411560494, |
|
"loss": 1.0677, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0002823610695926694, |
|
"loss": 1.5694, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.0002820770982226849, |
|
"loss": 1.1922, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00028179292773500606, |
|
"loss": 1.379, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00028150855881902564, |
|
"loss": 1.3802, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.0002812239921646181, |
|
"loss": 1.3687, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00028093922846213735, |
|
"loss": 1.3385, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00028065426840241545, |
|
"loss": 1.0829, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00028036911267676103, |
|
"loss": 1.3422, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00028008376197695694, |
|
"loss": 1.3711, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.0002797982169952596, |
|
"loss": 1.6293, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00027951247842439617, |
|
"loss": 1.2781, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.0002792265469575639, |
|
"loss": 1.4301, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00027894042328842773, |
|
"loss": 1.2093, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00027865410811111916, |
|
"loss": 1.5235, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.000278367602120234, |
|
"loss": 1.0747, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.0002780809060108311, |
|
"loss": 1.5449, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00027779402047843046, |
|
"loss": 1.5861, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.0002775069462190118, |
|
"loss": 1.4148, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.0002772196839290124, |
|
"loss": 1.1843, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.0002769322343053261, |
|
"loss": 1.3968, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00027664459804530077, |
|
"loss": 1.517, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00027635677584673734, |
|
"loss": 1.1604, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0002760687684078877, |
|
"loss": 1.0792, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0002757805764274533, |
|
"loss": 1.1854, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00027549220060458303, |
|
"loss": 1.099, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00027520364163887193, |
|
"loss": 1.3248, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00027491490023035935, |
|
"loss": 1.3282, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00027462597707952717, |
|
"loss": 1.305, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0002743368728872983, |
|
"loss": 1.4731, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0002740475883550347, |
|
"loss": 1.1555, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00027375812418453584, |
|
"loss": 1.2658, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00027346848107803723, |
|
"loss": 1.1447, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0002731786597382082, |
|
"loss": 1.3056, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0002728886608681505, |
|
"loss": 1.2856, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0002725984851713967, |
|
"loss": 1.4002, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0002723081333519083, |
|
"loss": 1.0462, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0002720176061140741, |
|
"loss": 1.3978, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0002717269041627082, |
|
"loss": 1.3281, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.000271436028203049, |
|
"loss": 1.3246, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00027114497894075676, |
|
"loss": 1.0361, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00027085375708191207, |
|
"loss": 1.1194, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0002705623633330146, |
|
"loss": 1.2326, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00027027079840098066, |
|
"loss": 1.324, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0002699790629931421, |
|
"loss": 1.2296, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00026968715781724404, |
|
"loss": 1.1764, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.000269395083581444, |
|
"loss": 1.2608, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0002691028409943089, |
|
"loss": 1.3877, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00026881043076481463, |
|
"loss": 1.1516, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0002685178536023437, |
|
"loss": 1.4622, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0002682251102166833, |
|
"loss": 1.3839, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0002679322013180244, |
|
"loss": 1.3128, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00026763912761695885, |
|
"loss": 1.2344, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00026734588982447885, |
|
"loss": 1.4078, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00026705248865197434, |
|
"loss": 1.0956, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0002667589248112319, |
|
"loss": 1.5464, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0002664651990144325, |
|
"loss": 1.227, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0002661713119741502, |
|
"loss": 1.2573, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00026587726440334994, |
|
"loss": 1.3705, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00026558305701538645, |
|
"loss": 1.6353, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0002652886905240021, |
|
"loss": 1.439, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00026499416564332507, |
|
"loss": 1.6142, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00026469948308786785, |
|
"loss": 1.4789, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00026440464357252555, |
|
"loss": 1.3731, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00026410964781257404, |
|
"loss": 1.2835, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0002638144965236681, |
|
"loss": 1.4461, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00026351919042184006, |
|
"loss": 1.3314, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0002632237302234975, |
|
"loss": 1.4887, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0002629281166454221, |
|
"loss": 1.474, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0002626323504047676, |
|
"loss": 1.6467, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.000262336432219058, |
|
"loss": 1.4272, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0002620403628061859, |
|
"loss": 1.3052, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.000261744142884411, |
|
"loss": 1.1621, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0002614477731723579, |
|
"loss": 1.289, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0002611512543890146, |
|
"loss": 0.7957, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0002608545872537309, |
|
"loss": 1.1423, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0002605577724862163, |
|
"loss": 1.3153, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0002602608108065387, |
|
"loss": 1.1333, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00025996370293512216, |
|
"loss": 1.1941, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00025966644959274555, |
|
"loss": 1.2343, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00025936905150054045, |
|
"loss": 1.3379, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00025907150937999005, |
|
"loss": 1.3146, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0002587738239529264, |
|
"loss": 1.4962, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0002584759959415297, |
|
"loss": 1.4088, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00025817802606832547, |
|
"loss": 1.2505, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0002578799150561841, |
|
"loss": 1.0342, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00025758166362831777, |
|
"loss": 1.2603, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0002572832725082798, |
|
"loss": 1.2303, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0002569847424199619, |
|
"loss": 1.351, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00025668607408759327, |
|
"loss": 1.1902, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00025638726823573843, |
|
"loss": 1.5928, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0002560883255892954, |
|
"loss": 1.1507, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0002557892468734942, |
|
"loss": 1.2932, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0002554900328138949, |
|
"loss": 1.4027, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00025519068413638586, |
|
"loss": 1.2834, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.000254891201567182, |
|
"loss": 1.1498, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00025459158583282325, |
|
"loss": 1.2244, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0002542918376601724, |
|
"loss": 1.3832, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00025399195777641364, |
|
"loss": 1.3634, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00025369194690905063, |
|
"loss": 1.243, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0002533918057859048, |
|
"loss": 0.9249, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0002530915351351136, |
|
"loss": 1.4711, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0002527911356851287, |
|
"loss": 1.2048, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0002524906081647143, |
|
"loss": 1.2095, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0002521899533029451, |
|
"loss": 1.3093, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.000251889171829205, |
|
"loss": 1.6384, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0002515882644731849, |
|
"loss": 1.1837, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00025128723196488105, |
|
"loss": 1.2502, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00025098607503459336, |
|
"loss": 1.3219, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0002506847944129238, |
|
"loss": 1.4386, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.000250383390830774, |
|
"loss": 1.2891, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0002500818650193442, |
|
"loss": 1.1171, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.000249780217710131, |
|
"loss": 1.112, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00024947844963492594, |
|
"loss": 1.0929, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0002491765615258134, |
|
"loss": 1.4064, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00024887455411516895, |
|
"loss": 1.2253, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0002485724281356576, |
|
"loss": 1.1958, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00024827018432023196, |
|
"loss": 1.3716, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00024796782340213075, |
|
"loss": 1.1603, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0002476653461148765, |
|
"loss": 1.2685, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0002473627531922741, |
|
"loss": 1.4317, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00024706004536840907, |
|
"loss": 1.3646, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00024675722337764565, |
|
"loss": 1.2738, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.000246454287954625, |
|
"loss": 1.3745, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0002461512398342635, |
|
"loss": 1.2735, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0002458480797517509, |
|
"loss": 1.3064, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0002455448084425486, |
|
"loss": 1.5011, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00024524142664238775, |
|
"loss": 1.3956, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0002449379350872678, |
|
"loss": 0.9758, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00024463433451345413, |
|
"loss": 1.2169, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.0002443306256574768, |
|
"loss": 1.5301, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00024402680925612855, |
|
"loss": 1.2099, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.0002437228860464629, |
|
"loss": 1.4701, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00024341885676579272, |
|
"loss": 1.3601, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.0002431147221516879, |
|
"loss": 1.1317, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00024281048294197413, |
|
"loss": 1.1306, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00024250613987473067, |
|
"loss": 1.3336, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00024220169368828888, |
|
"loss": 1.2368, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.0002418971451212302, |
|
"loss": 1.256, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00024159249491238446, |
|
"loss": 1.3063, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.000241287743800828, |
|
"loss": 1.2309, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00024098289252588208, |
|
"loss": 1.2604, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00024067794182711097, |
|
"loss": 1.4222, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00024037289244431995, |
|
"loss": 1.5747, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00024006774511755387, |
|
"loss": 1.3778, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00023976250058709505, |
|
"loss": 1.463, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00023945715959346192, |
|
"loss": 1.4231, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00023915172287740654, |
|
"loss": 1.3177, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00023884619117991345, |
|
"loss": 1.2004, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00023854056524219748, |
|
"loss": 1.1497, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.0002382348458057022, |
|
"loss": 1.4359, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00023792903361209797, |
|
"loss": 1.169, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00023762312940328012, |
|
"loss": 0.9957, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.0002373171339213672, |
|
"loss": 1.4934, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00023701104790869933, |
|
"loss": 1.2416, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.0002367048721078361, |
|
"loss": 1.0249, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00023639860726155497, |
|
"loss": 1.4124, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.0002360922541128495, |
|
"loss": 1.2547, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00023578581340492728, |
|
"loss": 1.3123, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00023547928588120863, |
|
"loss": 1.0849, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.0002351726722853241, |
|
"loss": 1.3818, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00023486597336111337, |
|
"loss": 1.3689, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00023455918985262298, |
|
"loss": 1.5367, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00023425232250410467, |
|
"loss": 1.3149, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.0002339453720600136, |
|
"loss": 1.4009, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.0002336383392650065, |
|
"loss": 1.5073, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00023333122486393992, |
|
"loss": 1.2735, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00023302402960186837, |
|
"loss": 1.2892, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00023271675422404249, |
|
"loss": 1.376, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.0002324093994759074, |
|
"loss": 1.4852, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00023210196610310048, |
|
"loss": 1.4539, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00023179445485145027, |
|
"loss": 1.1888, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00023148686646697387, |
|
"loss": 1.109, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00023117920169587572, |
|
"loss": 1.4764, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00023087146128454556, |
|
"loss": 1.1469, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00023056364597955648, |
|
"loss": 1.1258, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00023025575652766341, |
|
"loss": 1.0952, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.0002299477936758011, |
|
"loss": 1.5308, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00022963975817108238, |
|
"loss": 1.4326, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00022933165076079642, |
|
"loss": 1.203, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00022902347219240664, |
|
"loss": 1.1518, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00022871522321354912, |
|
"loss": 1.365, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00022840690457203106, |
|
"loss": 1.1922, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00022809851701582825, |
|
"loss": 1.3392, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.0002277900612930839, |
|
"loss": 1.1993, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00022748153815210654, |
|
"loss": 1.3798, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00022717294834136818, |
|
"loss": 1.5262, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00022686429260950278, |
|
"loss": 1.206, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00022655557170530398, |
|
"loss": 1.1834, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00022624678637772364, |
|
"loss": 1.2479, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00022593793737586988, |
|
"loss": 1.0998, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00022562902544900543, |
|
"loss": 1.1673, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00022532005134654547, |
|
"loss": 1.1091, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00022501101581805615, |
|
"loss": 1.4556, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00022470191961325244, |
|
"loss": 1.4475, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.0002243927634819969, |
|
"loss": 1.4791, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.0002240835481742971, |
|
"loss": 1.3507, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00022377427444030432, |
|
"loss": 1.2467, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00022346494303031157, |
|
"loss": 1.0791, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00022315555469475177, |
|
"loss": 1.345, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.000222846110184196, |
|
"loss": 1.3105, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00022253661024935158, |
|
"loss": 1.3515, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00022222705564106017, |
|
"loss": 1.2521, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00022191744711029618, |
|
"loss": 1.0644, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00022160778540816499, |
|
"loss": 1.4489, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00022129807128590067, |
|
"loss": 1.3382, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00022098830549486463, |
|
"loss": 1.2667, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00022067848878654352, |
|
"loss": 1.2325, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.0002203686219125478, |
|
"loss": 1.255, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00022005870562460925, |
|
"loss": 1.4091, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00021974874067457983, |
|
"loss": 1.2677, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.0002194387278144293, |
|
"loss": 1.2642, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.0002191286677962438, |
|
"loss": 1.3356, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00021881856137222395, |
|
"loss": 1.1519, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00021850840929468272, |
|
"loss": 1.3606, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00021819821231604413, |
|
"loss": 1.1905, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00021788797118884072, |
|
"loss": 1.219, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00021757768666571262, |
|
"loss": 1.3163, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.0002172673594994048, |
|
"loss": 1.2363, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.000216956990442766, |
|
"loss": 1.3136, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00021664658024874646, |
|
"loss": 1.0558, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00021633612967039622, |
|
"loss": 1.2747, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.0002160256394608632, |
|
"loss": 1.5154, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00021571511037339164, |
|
"loss": 1.4203, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00021540454316132003, |
|
"loss": 1.1171, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.0002150939385780793, |
|
"loss": 1.47, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.0002147832973771911, |
|
"loss": 1.5328, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00021447262031226578, |
|
"loss": 1.2424, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00021416190813700094, |
|
"loss": 1.4171, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00021385116160517917, |
|
"loss": 1.1544, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00021354038147066646, |
|
"loss": 1.2143, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00021322956848741033, |
|
"loss": 1.3098, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.000212918723409438, |
|
"loss": 1.1003, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00021260784699085448, |
|
"loss": 1.434, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.0002122969399858409, |
|
"loss": 1.1671, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.0002119860031486525, |
|
"loss": 1.2743, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00021167503723361705, |
|
"loss": 1.4703, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00021136404299513273, |
|
"loss": 1.2531, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00021105302118766642, |
|
"loss": 1.289, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.0002107419725657519, |
|
"loss": 1.406, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00021043089788398814, |
|
"loss": 1.2649, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.0002101197978970371, |
|
"loss": 1.5223, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00020980867335962234, |
|
"loss": 1.3938, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00020949752502652684, |
|
"loss": 1.0398, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00020918635365259133, |
|
"loss": 1.5071, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.0002088751599927125, |
|
"loss": 1.333, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00020856394480184105, |
|
"loss": 1.4037, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00020825270883497994, |
|
"loss": 1.301, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.0002079414528471825, |
|
"loss": 1.092, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.0002076301775935507, |
|
"loss": 1.347, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00020731888382923312, |
|
"loss": 1.2909, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00020700757230942345, |
|
"loss": 1.3255, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00020669624378935822, |
|
"loss": 1.0013, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00020638489902431536, |
|
"loss": 1.1933, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00020607353876961218, |
|
"loss": 1.5111, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00020576216378060349, |
|
"loss": 1.3786, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00020545077481268003, |
|
"loss": 0.8628, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00020513937262126616, |
|
"loss": 1.3142, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00020482795796181863, |
|
"loss": 1.247, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00020451653158982418, |
|
"loss": 1.3025, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00020420509426079825, |
|
"loss": 1.3176, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00020389364673028255, |
|
"loss": 1.2238, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00020389364673028255, |
|
"loss": 1.3724, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00020358218975384374, |
|
"loss": 1.2739, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.0002032707240870713, |
|
"loss": 1.3877, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00020295925048557586, |
|
"loss": 1.2701, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.0002026477697049873, |
|
"loss": 1.3203, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00020233628250095284, |
|
"loss": 1.314, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00020202478962913536, |
|
"loss": 1.6357, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00020171329184521137, |
|
"loss": 1.1124, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00020140178990486952, |
|
"loss": 1.4138, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.0002010902845638083, |
|
"loss": 1.3425, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00020077877657773466, |
|
"loss": 1.2216, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00020046726670236176, |
|
"loss": 1.519, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.0002001557556934075, |
|
"loss": 1.3377, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00019984424430659253, |
|
"loss": 1.6655, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.0001995327332976383, |
|
"loss": 1.4196, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00019922122342226538, |
|
"loss": 1.2147, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00019890971543619167, |
|
"loss": 1.1667, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00019859821009513055, |
|
"loss": 1.1459, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00019828670815478868, |
|
"loss": 1.3117, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00019797521037086472, |
|
"loss": 1.1508, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00019766371749904723, |
|
"loss": 1.0721, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00019735223029501272, |
|
"loss": 1.1657, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00019704074951442418, |
|
"loss": 1.0856, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00019672927591292871, |
|
"loss": 1.1536, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00019641781024615633, |
|
"loss": 1.3234, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.0001961063532697175, |
|
"loss": 1.446, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.0001957949057392018, |
|
"loss": 1.359, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00019548346841017578, |
|
"loss": 1.2901, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00019517204203818147, |
|
"loss": 1.3227, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00019486062737873389, |
|
"loss": 1.5337, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00019454922518732004, |
|
"loss": 1.0848, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00019423783621939653, |
|
"loss": 1.2892, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00019392646123038792, |
|
"loss": 1.1257, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.0001936151009756847, |
|
"loss": 1.3003, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.0001933037562106418, |
|
"loss": 1.1776, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00019299242769057665, |
|
"loss": 1.1835, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00019268111617076693, |
|
"loss": 1.5712, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00019236982240644935, |
|
"loss": 0.9642, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.0001920585471528175, |
|
"loss": 1.3297, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00019174729116502016, |
|
"loss": 1.3761, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.000191436055198159, |
|
"loss": 1.4211, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00019112484000728756, |
|
"loss": 1.3953, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.0001908136463474087, |
|
"loss": 1.3674, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00019050247497347323, |
|
"loss": 1.2079, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00019019132664037768, |
|
"loss": 1.5356, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00018988020210296294, |
|
"loss": 0.9021, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00018956910211601185, |
|
"loss": 1.4613, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00018925802743424813, |
|
"loss": 1.3332, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00018894697881233362, |
|
"loss": 1.3994, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00018863595700486732, |
|
"loss": 1.0774, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00018832496276638302, |
|
"loss": 1.3694, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00018801399685134754, |
|
"loss": 1.4133, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00018770306001415917, |
|
"loss": 1.3533, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.0001873921530091456, |
|
"loss": 1.1007, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00018708127659056208, |
|
"loss": 1.1953, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00018677043151258971, |
|
"loss": 1.2118, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00018645961852933356, |
|
"loss": 1.4276, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00018614883839482082, |
|
"loss": 1.4432, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00018583809186299913, |
|
"loss": 1.1725, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00018552737968773427, |
|
"loss": 1.3901, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00018521670262280896, |
|
"loss": 1.471, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00018490606142192078, |
|
"loss": 1.7013, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00018459545683868, |
|
"loss": 1.3411, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00018428488962660838, |
|
"loss": 1.1998, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.0001839743605391368, |
|
"loss": 1.174, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00018366387032960386, |
|
"loss": 1.2417, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00018335341975125356, |
|
"loss": 1.1483, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00018304300955723402, |
|
"loss": 1.2208, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00018273264050059523, |
|
"loss": 1.4443, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.0001824223133342875, |
|
"loss": 1.1945, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00018211202881115933, |
|
"loss": 1.2849, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00018180178768395595, |
|
"loss": 1.0626, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.0001814915907053173, |
|
"loss": 1.1795, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00018118143862777615, |
|
"loss": 1.1829, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00018087133220375624, |
|
"loss": 1.1555, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00018056127218557072, |
|
"loss": 1.3981, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00018025125932542027, |
|
"loss": 1.2743, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.0001799412943753908, |
|
"loss": 1.1822, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00017963137808745222, |
|
"loss": 1.1138, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00017932151121345644, |
|
"loss": 1.1169, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00017901169450513547, |
|
"loss": 1.2054, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00017870192871409938, |
|
"loss": 1.1044, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00017839221459183506, |
|
"loss": 1.4161, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00017808255288970378, |
|
"loss": 1.322, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00017777294435893993, |
|
"loss": 1.1476, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.0001774633897506485, |
|
"loss": 1.3483, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.000177153889815804, |
|
"loss": 1.4175, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.0001768444453052482, |
|
"loss": 1.277, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00017653505696968848, |
|
"loss": 1.3087, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.0001762257255596957, |
|
"loss": 1.225, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00017591645182570294, |
|
"loss": 1.2801, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.0001756072365180032, |
|
"loss": 1.601, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00017529808038674758, |
|
"loss": 1.3707, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00017498898418194392, |
|
"loss": 1.0633, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00017467994865345455, |
|
"loss": 1.3597, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.0001743709745509946, |
|
"loss": 1.1676, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00017406206262413014, |
|
"loss": 1.1692, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00017375321362227638, |
|
"loss": 1.294, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00017344442829469604, |
|
"loss": 1.3103, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.0001731357073904973, |
|
"loss": 1.1713, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00017282705165863187, |
|
"loss": 1.2887, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.0001725184618478935, |
|
"loss": 1.2084, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.0001722099387069162, |
|
"loss": 1.4899, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.0001719014829841718, |
|
"loss": 1.3522, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00017159309542796902, |
|
"loss": 1.2895, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00017128477678645087, |
|
"loss": 1.1823, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00017097652780759349, |
|
"loss": 1.4265, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00017066834923920365, |
|
"loss": 1.148, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00017036024182891766, |
|
"loss": 1.2976, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00017005220632419893, |
|
"loss": 1.0658, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00016974424347233669, |
|
"loss": 1.3412, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00016943635402044357, |
|
"loss": 1.2008, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00016912853871545446, |
|
"loss": 1.2512, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.0001688207983041243, |
|
"loss": 1.4035, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.0001685131335330262, |
|
"loss": 1.2508, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.0001682055451485498, |
|
"loss": 1.2797, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.0001678980338968995, |
|
"loss": 1.1797, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.0001675906005240927, |
|
"loss": 1.1574, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00016728324577595756, |
|
"loss": 1.3345, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00016697597039813168, |
|
"loss": 1.6107, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00016666877513606007, |
|
"loss": 1.3272, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00016636166073499356, |
|
"loss": 1.2153, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00016605462793998645, |
|
"loss": 1.1083, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.0001657476774958954, |
|
"loss": 1.3484, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00016544081014737704, |
|
"loss": 1.3568, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00016513402663888668, |
|
"loss": 1.2882, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00016482732771467594, |
|
"loss": 1.293, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00016452071411879145, |
|
"loss": 1.4508, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.0001642141865950727, |
|
"loss": 1.2862, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00016390774588715057, |
|
"loss": 1.2247, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00016360139273844504, |
|
"loss": 1.2461, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00016329512789216394, |
|
"loss": 1.4069, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00016298895209130077, |
|
"loss": 1.3323, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00016268286607863284, |
|
"loss": 1.4585, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.0001623768705967199, |
|
"loss": 1.3159, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00016207096638790207, |
|
"loss": 1.1522, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00016176515419429784, |
|
"loss": 1.3848, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.0001614594347578026, |
|
"loss": 1.5621, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00016115380882008662, |
|
"loss": 1.3241, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00016084827712259348, |
|
"loss": 1.6407, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00016054284040653815, |
|
"loss": 1.2676, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00016023749941290497, |
|
"loss": 1.3582, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00015993225488244618, |
|
"loss": 1.3799, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00015962710755568012, |
|
"loss": 1.4649, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00015932205817288908, |
|
"loss": 1.0993, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00015901710747411794, |
|
"loss": 1.4141, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00015871225619917202, |
|
"loss": 1.4413, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00015840750508761567, |
|
"loss": 1.1094, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00015810285487876985, |
|
"loss": 0.9049, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00015779830631171117, |
|
"loss": 1.2571, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00015749386012526935, |
|
"loss": 1.59, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00015718951705802597, |
|
"loss": 1.2153, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00015688527784831215, |
|
"loss": 1.2648, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00015658114323420733, |
|
"loss": 1.1044, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.0001562771139535371, |
|
"loss": 1.3576, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00015597319074387152, |
|
"loss": 1.2877, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00015566937434252325, |
|
"loss": 1.3409, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.0001553656654865459, |
|
"loss": 1.1914, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00015506206491273228, |
|
"loss": 1.4526, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.0001547585733576123, |
|
"loss": 1.3002, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00015445519155745147, |
|
"loss": 1.0568, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00015415192024824913, |
|
"loss": 1.2317, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00015384876016573656, |
|
"loss": 1.2007, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00015354571204537502, |
|
"loss": 1.2982, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00015324277662235436, |
|
"loss": 1.3678, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00015293995463159092, |
|
"loss": 1.4411, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00015263724680772596, |
|
"loss": 1.3717, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00015233465388512355, |
|
"loss": 1.1964, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00015203217659786927, |
|
"loss": 1.2942, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.000151729815679768, |
|
"loss": 1.1961, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00015142757186434249, |
|
"loss": 1.5537, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00015112544588483112, |
|
"loss": 1.1772, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00015082343847418664, |
|
"loss": 1.4643, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.0001505215503650741, |
|
"loss": 0.9425, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00015021978228986903, |
|
"loss": 1.2285, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00014991813498065585, |
|
"loss": 1.5057, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00014961660916922606, |
|
"loss": 1.1536, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00014931520558707627, |
|
"loss": 1.2474, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00014901392496540665, |
|
"loss": 1.145, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00014871276803511896, |
|
"loss": 1.1519, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00014841173552681514, |
|
"loss": 1.1699, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00014811082817079503, |
|
"loss": 1.2612, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00014781004669705495, |
|
"loss": 1.4188, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00014750939183528575, |
|
"loss": 1.4395, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.0001472088643148713, |
|
"loss": 1.4652, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00014690846486488645, |
|
"loss": 1.0477, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00014660819421409528, |
|
"loss": 1.1328, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00014630805309094938, |
|
"loss": 1.0984, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.0001460080422235864, |
|
"loss": 1.2177, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00014570816233982762, |
|
"loss": 1.4792, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.0001454084141671768, |
|
"loss": 1.317, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.000145108798432818, |
|
"loss": 1.4423, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00014480931586361426, |
|
"loss": 1.0848, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00014450996718610515, |
|
"loss": 1.3578, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00014421075312650577, |
|
"loss": 1.4258, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.0001439116744107046, |
|
"loss": 1.2835, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00014361273176426165, |
|
"loss": 1.1916, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00014331392591240675, |
|
"loss": 1.3486, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.0001430152575800381, |
|
"loss": 1.3556, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00014271672749172027, |
|
"loss": 1.2307, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00014241833637168222, |
|
"loss": 1.4525, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00014212008494381593, |
|
"loss": 1.2207, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.0001418219739316745, |
|
"loss": 1.3679, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00014152400405847043, |
|
"loss": 0.8475, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00014122617604707364, |
|
"loss": 1.2213, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00014092849062001, |
|
"loss": 1.1242, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.0001406309484994595, |
|
"loss": 1.3462, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00014033355040725458, |
|
"loss": 1.4102, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00014003629706487792, |
|
"loss": 1.3565, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00013973918919346135, |
|
"loss": 1.2443, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00013944222751378368, |
|
"loss": 1.4912, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00013914541274626915, |
|
"loss": 1.4366, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.0001388487456109854, |
|
"loss": 1.2756, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00013855222682764216, |
|
"loss": 1.2778, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00013825585711558906, |
|
"loss": 1.4932, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.0001379596371938141, |
|
"loss": 1.2772, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00013766356778094205, |
|
"loss": 1.2692, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00013736764959523242, |
|
"loss": 1.1895, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.0001370718833545779, |
|
"loss": 1.3382, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00013677626977650256, |
|
"loss": 1.337, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00013648080957816, |
|
"loss": 1.1632, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00013618550347633187, |
|
"loss": 1.1321, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.000135890352187426, |
|
"loss": 1.1655, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00013559535642747447, |
|
"loss": 1.3333, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00013530051691213217, |
|
"loss": 1.3118, |
|
"step": 1321 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00013500583435667495, |
|
"loss": 1.4974, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00013471130947599792, |
|
"loss": 1.2056, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00013441694298461357, |
|
"loss": 1.1723, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00013412273559665008, |
|
"loss": 1.6037, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00013382868802584992, |
|
"loss": 1.0672, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00013353480098556752, |
|
"loss": 1.2062, |
|
"step": 1327 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00013324107518876814, |
|
"loss": 1.3481, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00013294751134802565, |
|
"loss": 1.2264, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00013265411017552125, |
|
"loss": 1.36, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00013236087238304117, |
|
"loss": 1.0622, |
|
"step": 1331 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00013206779868197565, |
|
"loss": 1.3958, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00013177488978331664, |
|
"loss": 1.2677, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00013148214639765638, |
|
"loss": 1.283, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.0001311895692351854, |
|
"loss": 1.5159, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00013089715900569115, |
|
"loss": 1.3298, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00013060491641855612, |
|
"loss": 0.898, |
|
"step": 1337 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00013031284218275596, |
|
"loss": 1.2589, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00013002093700685796, |
|
"loss": 1.077, |
|
"step": 1339 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.0001297292015990193, |
|
"loss": 1.2823, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00012943763666698542, |
|
"loss": 1.4288, |
|
"step": 1341 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00012914624291808795, |
|
"loss": 1.2173, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.0001288550210592433, |
|
"loss": 0.9642, |
|
"step": 1343 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00012856397179695098, |
|
"loss": 1.3113, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00012827309583729183, |
|
"loss": 1.3067, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.000127982393885926, |
|
"loss": 1.3371, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00012769186664809173, |
|
"loss": 1.5906, |
|
"step": 1347 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00012740151482860333, |
|
"loss": 1.3334, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00012711133913184957, |
|
"loss": 1.2184, |
|
"step": 1349 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00012682134026179186, |
|
"loss": 1.0252, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.0001265315189219628, |
|
"loss": 1.3463, |
|
"step": 1351 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.0001262418758154642, |
|
"loss": 1.4166, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00012595241164496538, |
|
"loss": 1.7178, |
|
"step": 1353 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00012566312711270175, |
|
"loss": 1.3719, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00012537402292047288, |
|
"loss": 1.2142, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00012508509976964072, |
|
"loss": 1.1583, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00012479635836112814, |
|
"loss": 1.2868, |
|
"step": 1357 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00012450779939541701, |
|
"loss": 1.1618, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.0001242194235725467, |
|
"loss": 1.1537, |
|
"step": 1359 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.0001239312315921123, |
|
"loss": 1.4446, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.0001236432241532627, |
|
"loss": 1.0411, |
|
"step": 1361 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00012335540195469925, |
|
"loss": 1.2367, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.0001230677656946739, |
|
"loss": 1.4935, |
|
"step": 1363 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.0001227803160709876, |
|
"loss": 1.3403, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00012249305378098828, |
|
"loss": 1.22, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00012220597952156955, |
|
"loss": 1.4497, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00012191909398916901, |
|
"loss": 1.4069, |
|
"step": 1367 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00012163239787976603, |
|
"loss": 1.2646, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00012134589188888088, |
|
"loss": 1.499, |
|
"step": 1369 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00012105957671157222, |
|
"loss": 1.3049, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00012077345304243617, |
|
"loss": 1.5111, |
|
"step": 1371 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00012048752157560387, |
|
"loss": 1.2329, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00012020178300474043, |
|
"loss": 1.3798, |
|
"step": 1373 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00011991623802304302, |
|
"loss": 1.2576, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00011963088732323906, |
|
"loss": 1.1369, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00011934573159758456, |
|
"loss": 1.3211, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00011906077153786271, |
|
"loss": 1.4205, |
|
"step": 1377 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.000118776007835382, |
|
"loss": 1.3222, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00011849144118097442, |
|
"loss": 1.2918, |
|
"step": 1379 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00011820707226499399, |
|
"loss": 1.217, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00011792290177731504, |
|
"loss": 1.5136, |
|
"step": 1381 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00011763893040733068, |
|
"loss": 1.3777, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00011735515884395061, |
|
"loss": 1.3681, |
|
"step": 1383 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00011707158777559999, |
|
"loss": 0.8953, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00011678821789021768, |
|
"loss": 1.2376, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00011650504987525441, |
|
"loss": 0.9827, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00011622208441767093, |
|
"loss": 1.3995, |
|
"step": 1387 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00011593932220393702, |
|
"loss": 1.4319, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00011565676392002909, |
|
"loss": 1.5205, |
|
"step": 1389 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00011537441025142888, |
|
"loss": 1.1702, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00011509226188312178, |
|
"loss": 1.2451, |
|
"step": 1391 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00011481031949959508, |
|
"loss": 1.4108, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00011452858378483652, |
|
"loss": 1.235, |
|
"step": 1393 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.0001142470554223321, |
|
"loss": 1.3461, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00011396573509506517, |
|
"loss": 1.1677, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00011368462348551418, |
|
"loss": 1.2003, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.0001134037212756513, |
|
"loss": 1.3054, |
|
"step": 1397 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00011312302914694069, |
|
"loss": 1.7106, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00011284254778033685, |
|
"loss": 1.1096, |
|
"step": 1399 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00011256227785628283, |
|
"loss": 1.4595, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.0001122822200547092, |
|
"loss": 0.9698, |
|
"step": 1401 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00011200237505503124, |
|
"loss": 1.2934, |
|
"step": 1402 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00011172274353614858, |
|
"loss": 1.1179, |
|
"step": 1403 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.0001114433261764426, |
|
"loss": 1.5195, |
|
"step": 1404 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00011116412365377522, |
|
"loss": 1.2414, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00011088513664548724, |
|
"loss": 1.1524, |
|
"step": 1406 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.0001106063658283964, |
|
"loss": 1.2442, |
|
"step": 1407 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00011032781187879648, |
|
"loss": 1.0068, |
|
"step": 1408 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00011004947547245446, |
|
"loss": 1.2076, |
|
"step": 1409 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00010977135728460997, |
|
"loss": 1.0662, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00010949345798997326, |
|
"loss": 1.3129, |
|
"step": 1411 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00010921577826272345, |
|
"loss": 1.1221, |
|
"step": 1412 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00010893831877650695, |
|
"loss": 1.2171, |
|
"step": 1413 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00010866108020443596, |
|
"loss": 1.4596, |
|
"step": 1414 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.0001083840632190866, |
|
"loss": 1.3664, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00010810726849249782, |
|
"loss": 1.0676, |
|
"step": 1416 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.0001078306966961688, |
|
"loss": 1.1301, |
|
"step": 1417 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00010755434850105823, |
|
"loss": 1.2053, |
|
"step": 1418 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.0001072782245775825, |
|
"loss": 1.066, |
|
"step": 1419 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00010700232559561341, |
|
"loss": 1.1748, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00010672665222447764, |
|
"loss": 1.3408, |
|
"step": 1421 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00010645120513295412, |
|
"loss": 1.1208, |
|
"step": 1422 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00010617598498927304, |
|
"loss": 1.4331, |
|
"step": 1423 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00010590099246111394, |
|
"loss": 1.4032, |
|
"step": 1424 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00010562622821560406, |
|
"loss": 1.164, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.0001053516929193172, |
|
"loss": 1.0663, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00010507738723827141, |
|
"loss": 1.1152, |
|
"step": 1427 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00010480331183792756, |
|
"loss": 1.0993, |
|
"step": 1428 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00010452946738318835, |
|
"loss": 1.1678, |
|
"step": 1429 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.0001042558545383958, |
|
"loss": 1.4099, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00010398247396733021, |
|
"loss": 1.1813, |
|
"step": 1431 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00010370932633320837, |
|
"loss": 1.0752, |
|
"step": 1432 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00010343641229868188, |
|
"loss": 1.4635, |
|
"step": 1433 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00010316373252583593, |
|
"loss": 1.197, |
|
"step": 1434 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.0001028912876761869, |
|
"loss": 1.1872, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00010261907841068174, |
|
"loss": 1.1227, |
|
"step": 1436 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.0001023471053896956, |
|
"loss": 1.2541, |
|
"step": 1437 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00010207536927303056, |
|
"loss": 1.0352, |
|
"step": 1438 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00010180387071991395, |
|
"loss": 1.1582, |
|
"step": 1439 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00010153261038899686, |
|
"loss": 1.1927, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00010126158893835231, |
|
"loss": 1.1228, |
|
"step": 1441 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00010099080702547414, |
|
"loss": 1.2856, |
|
"step": 1442 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00010072026530727448, |
|
"loss": 1.4384, |
|
"step": 1443 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00010044996444008336, |
|
"loss": 1.1022, |
|
"step": 1444 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00010017990507964619, |
|
"loss": 1.3684, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 9.991008788112253e-05, |
|
"loss": 1.18, |
|
"step": 1446 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 9.96405134990845e-05, |
|
"loss": 1.4392, |
|
"step": 1447 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 9.937118258751501e-05, |
|
"loss": 1.2864, |
|
"step": 1448 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 9.910209579980671e-05, |
|
"loss": 1.5951, |
|
"step": 1449 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 9.883325378875949e-05, |
|
"loss": 1.237, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.856465720657966e-05, |
|
"loss": 1.25, |
|
"step": 1451 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.829630670487826e-05, |
|
"loss": 1.0986, |
|
"step": 1452 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.802820293466917e-05, |
|
"loss": 1.3593, |
|
"step": 1453 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.776034654636769e-05, |
|
"loss": 1.6713, |
|
"step": 1454 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.749273818978908e-05, |
|
"loss": 1.1223, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.722537851414671e-05, |
|
"loss": 1.2637, |
|
"step": 1456 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.6958268168051e-05, |
|
"loss": 1.3128, |
|
"step": 1457 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.669140779950707e-05, |
|
"loss": 1.3434, |
|
"step": 1458 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.642479805591378e-05, |
|
"loss": 1.2366, |
|
"step": 1459 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.615843958406221e-05, |
|
"loss": 1.3496, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.589233303013335e-05, |
|
"loss": 1.1488, |
|
"step": 1461 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.56264790396976e-05, |
|
"loss": 1.3925, |
|
"step": 1462 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.536087825771222e-05, |
|
"loss": 1.4061, |
|
"step": 1463 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.509553132852044e-05, |
|
"loss": 1.0716, |
|
"step": 1464 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.483043889584955e-05, |
|
"loss": 1.3894, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.456560160280932e-05, |
|
"loss": 1.1413, |
|
"step": 1466 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.430102009189086e-05, |
|
"loss": 1.1573, |
|
"step": 1467 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.403669500496457e-05, |
|
"loss": 1.2565, |
|
"step": 1468 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.377262698327855e-05, |
|
"loss": 1.3642, |
|
"step": 1469 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.350881666745774e-05, |
|
"loss": 1.2612, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.324526469750154e-05, |
|
"loss": 1.1751, |
|
"step": 1471 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 9.298197171278272e-05, |
|
"loss": 1.3304, |
|
"step": 1472 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 9.271893835204573e-05, |
|
"loss": 1.3528, |
|
"step": 1473 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 9.245616525340513e-05, |
|
"loss": 1.1737, |
|
"step": 1474 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 9.219365305434436e-05, |
|
"loss": 1.5289, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 9.19314023917134e-05, |
|
"loss": 1.2544, |
|
"step": 1476 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 9.166941390172831e-05, |
|
"loss": 1.4722, |
|
"step": 1477 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 9.14076882199688e-05, |
|
"loss": 1.4618, |
|
"step": 1478 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 9.114622598137706e-05, |
|
"loss": 1.2061, |
|
"step": 1479 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 9.088502782025618e-05, |
|
"loss": 1.5329, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 9.062409437026866e-05, |
|
"loss": 1.1748, |
|
"step": 1481 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 9.036342626443468e-05, |
|
"loss": 1.6027, |
|
"step": 1482 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 9.010302413513102e-05, |
|
"loss": 1.1159, |
|
"step": 1483 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 8.984288861408863e-05, |
|
"loss": 1.3043, |
|
"step": 1484 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 8.95830203323923e-05, |
|
"loss": 1.1157, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 8.932341992047811e-05, |
|
"loss": 1.0479, |
|
"step": 1486 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 8.906408800813242e-05, |
|
"loss": 1.1356, |
|
"step": 1487 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 8.88050252244902e-05, |
|
"loss": 1.2109, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 8.854623219803341e-05, |
|
"loss": 1.2692, |
|
"step": 1489 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 8.828770955658999e-05, |
|
"loss": 1.4603, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 8.80294579273313e-05, |
|
"loss": 1.3165, |
|
"step": 1491 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 8.777147793677163e-05, |
|
"loss": 1.1158, |
|
"step": 1492 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 8.751377021076634e-05, |
|
"loss": 1.3774, |
|
"step": 1493 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.725633537451006e-05, |
|
"loss": 1.4457, |
|
"step": 1494 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.69991740525355e-05, |
|
"loss": 1.2375, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.67422868687118e-05, |
|
"loss": 1.4202, |
|
"step": 1496 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.648567444624296e-05, |
|
"loss": 1.3173, |
|
"step": 1497 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.622933740766679e-05, |
|
"loss": 1.2608, |
|
"step": 1498 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.597327637485246e-05, |
|
"loss": 1.4031, |
|
"step": 1499 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.571749196899987e-05, |
|
"loss": 1.272, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.546198481063801e-05, |
|
"loss": 1.3425, |
|
"step": 1501 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.520675551962275e-05, |
|
"loss": 1.1886, |
|
"step": 1502 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.49518047151364e-05, |
|
"loss": 1.0558, |
|
"step": 1503 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.469713301568533e-05, |
|
"loss": 1.328, |
|
"step": 1504 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.444274103909897e-05, |
|
"loss": 1.1842, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.418862940252804e-05, |
|
"loss": 1.1098, |
|
"step": 1506 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.39347987224431e-05, |
|
"loss": 1.2275, |
|
"step": 1507 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.368124961463342e-05, |
|
"loss": 1.2985, |
|
"step": 1508 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.342798269420495e-05, |
|
"loss": 1.5345, |
|
"step": 1509 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.317499857557886e-05, |
|
"loss": 1.3177, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.292229787249073e-05, |
|
"loss": 1.1131, |
|
"step": 1511 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.266988119798821e-05, |
|
"loss": 1.1952, |
|
"step": 1512 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.241774916443003e-05, |
|
"loss": 1.5274, |
|
"step": 1513 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.216590238348438e-05, |
|
"loss": 1.1378, |
|
"step": 1514 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.191434146612733e-05, |
|
"loss": 1.2205, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.166306702264177e-05, |
|
"loss": 0.9346, |
|
"step": 1516 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.141207966261509e-05, |
|
"loss": 1.1095, |
|
"step": 1517 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.116137999493869e-05, |
|
"loss": 1.2342, |
|
"step": 1518 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.091096862780582e-05, |
|
"loss": 1.2845, |
|
"step": 1519 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.066084616871037e-05, |
|
"loss": 1.0934, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.041101322444526e-05, |
|
"loss": 1.1626, |
|
"step": 1521 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.016147040110118e-05, |
|
"loss": 1.033, |
|
"step": 1522 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 7.991221830406481e-05, |
|
"loss": 1.3303, |
|
"step": 1523 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 7.966325753801792e-05, |
|
"loss": 1.0324, |
|
"step": 1524 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 7.941458870693488e-05, |
|
"loss": 1.3455, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 7.916621241408249e-05, |
|
"loss": 1.2513, |
|
"step": 1526 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 7.891812926201742e-05, |
|
"loss": 1.4107, |
|
"step": 1527 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 7.867033985258541e-05, |
|
"loss": 1.3377, |
|
"step": 1528 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 7.842284478691943e-05, |
|
"loss": 1.0802, |
|
"step": 1529 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 7.817564466543843e-05, |
|
"loss": 1.2704, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 7.792874008784603e-05, |
|
"loss": 1.1902, |
|
"step": 1531 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 7.76821316531285e-05, |
|
"loss": 1.0903, |
|
"step": 1532 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 7.743581995955385e-05, |
|
"loss": 1.303, |
|
"step": 1533 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 7.718980560467033e-05, |
|
"loss": 1.4708, |
|
"step": 1534 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 7.694408918530469e-05, |
|
"loss": 1.3564, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 7.669867129756095e-05, |
|
"loss": 1.1663, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 7.645355253681882e-05, |
|
"loss": 1.1493, |
|
"step": 1537 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 7.620873349773239e-05, |
|
"loss": 1.243, |
|
"step": 1538 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 7.596421477422879e-05, |
|
"loss": 1.332, |
|
"step": 1539 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 7.571999695950627e-05, |
|
"loss": 1.4258, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 7.54760806460332e-05, |
|
"loss": 1.2923, |
|
"step": 1541 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 7.52324664255468e-05, |
|
"loss": 1.253, |
|
"step": 1542 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 7.49891548890509e-05, |
|
"loss": 1.2166, |
|
"step": 1543 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 7.474614662681547e-05, |
|
"loss": 1.3181, |
|
"step": 1544 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 7.450344222837453e-05, |
|
"loss": 1.272, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 7.426104228252497e-05, |
|
"loss": 1.1434, |
|
"step": 1546 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 7.401894737732506e-05, |
|
"loss": 1.1643, |
|
"step": 1547 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 7.377715810009303e-05, |
|
"loss": 1.2468, |
|
"step": 1548 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 7.353567503740582e-05, |
|
"loss": 1.2365, |
|
"step": 1549 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 7.32944987750974e-05, |
|
"loss": 1.3432, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 7.305362989825719e-05, |
|
"loss": 1.4599, |
|
"step": 1551 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 7.281306899122939e-05, |
|
"loss": 1.102, |
|
"step": 1552 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 7.257281663761068e-05, |
|
"loss": 1.1645, |
|
"step": 1553 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 7.233287342024941e-05, |
|
"loss": 1.1368, |
|
"step": 1554 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 7.209323992124382e-05, |
|
"loss": 1.1735, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 7.185391672194082e-05, |
|
"loss": 1.1274, |
|
"step": 1556 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 7.161490440293477e-05, |
|
"loss": 0.9405, |
|
"step": 1557 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 7.137620354406535e-05, |
|
"loss": 0.9062, |
|
"step": 1558 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 7.113781472441714e-05, |
|
"loss": 1.5528, |
|
"step": 1559 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 7.089973852231744e-05, |
|
"loss": 1.3515, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 7.066197551533522e-05, |
|
"loss": 1.5267, |
|
"step": 1561 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 7.042452628027963e-05, |
|
"loss": 1.195, |
|
"step": 1562 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 7.018739139319861e-05, |
|
"loss": 1.2459, |
|
"step": 1563 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 6.995057142937747e-05, |
|
"loss": 1.3211, |
|
"step": 1564 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 6.971406696333771e-05, |
|
"loss": 1.3303, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 6.947787856883507e-05, |
|
"loss": 1.235, |
|
"step": 1566 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 6.924200681885891e-05, |
|
"loss": 1.0975, |
|
"step": 1567 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 6.900645228563014e-05, |
|
"loss": 1.0561, |
|
"step": 1568 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 6.87712155406002e-05, |
|
"loss": 1.0033, |
|
"step": 1569 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 6.85362971544496e-05, |
|
"loss": 1.0609, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 6.83016976970864e-05, |
|
"loss": 1.22, |
|
"step": 1571 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 6.806741773764527e-05, |
|
"loss": 1.2394, |
|
"step": 1572 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 6.783345784448532e-05, |
|
"loss": 1.1023, |
|
"step": 1573 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 6.759981858518941e-05, |
|
"loss": 1.2294, |
|
"step": 1574 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 6.736650052656271e-05, |
|
"loss": 1.0494, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 6.713350423463092e-05, |
|
"loss": 1.2432, |
|
"step": 1576 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 6.690083027463923e-05, |
|
"loss": 1.0197, |
|
"step": 1577 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 6.666847921105084e-05, |
|
"loss": 1.1944, |
|
"step": 1578 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 6.643645160754559e-05, |
|
"loss": 0.9204, |
|
"step": 1579 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 6.620474802701879e-05, |
|
"loss": 1.4313, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 6.597336903157938e-05, |
|
"loss": 1.2751, |
|
"step": 1581 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 6.574231518254896e-05, |
|
"loss": 1.2733, |
|
"step": 1582 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 6.551158704046061e-05, |
|
"loss": 1.3241, |
|
"step": 1583 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 6.528118516505675e-05, |
|
"loss": 1.2363, |
|
"step": 1584 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 6.505111011528872e-05, |
|
"loss": 1.5814, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 6.482136244931483e-05, |
|
"loss": 1.023, |
|
"step": 1586 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 6.459194272449904e-05, |
|
"loss": 1.241, |
|
"step": 1587 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 6.436285149740995e-05, |
|
"loss": 1.208, |
|
"step": 1588 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 6.413408932381896e-05, |
|
"loss": 1.3445, |
|
"step": 1589 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 6.390565675869955e-05, |
|
"loss": 1.2188, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 6.367755435622536e-05, |
|
"loss": 1.0361, |
|
"step": 1591 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 6.344978266976886e-05, |
|
"loss": 1.2409, |
|
"step": 1592 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 6.322234225190065e-05, |
|
"loss": 1.1411, |
|
"step": 1593 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 6.299523365438737e-05, |
|
"loss": 1.1396, |
|
"step": 1594 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 6.276845742819078e-05, |
|
"loss": 1.1535, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 6.254201412346626e-05, |
|
"loss": 1.3295, |
|
"step": 1596 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 6.231590428956149e-05, |
|
"loss": 1.3898, |
|
"step": 1597 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 6.209012847501541e-05, |
|
"loss": 1.2843, |
|
"step": 1598 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 6.186468722755621e-05, |
|
"loss": 1.4253, |
|
"step": 1599 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 6.163958109410084e-05, |
|
"loss": 1.1143, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 6.141481062075302e-05, |
|
"loss": 1.1324, |
|
"step": 1601 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 6.119037635280225e-05, |
|
"loss": 1.176, |
|
"step": 1602 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 6.0966278834722346e-05, |
|
"loss": 1.0477, |
|
"step": 1603 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 6.0742518610170266e-05, |
|
"loss": 1.205, |
|
"step": 1604 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 6.051909622198453e-05, |
|
"loss": 1.3148, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 6.0296012212184416e-05, |
|
"loss": 1.2212, |
|
"step": 1606 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 6.0073267121967794e-05, |
|
"loss": 1.1882, |
|
"step": 1607 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 5.985086149171082e-05, |
|
"loss": 1.2734, |
|
"step": 1608 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 5.962879586096581e-05, |
|
"loss": 1.1069, |
|
"step": 1609 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 5.940707076846035e-05, |
|
"loss": 1.1915, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 5.91856867520959e-05, |
|
"loss": 1.0885, |
|
"step": 1611 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 5.89646443489464e-05, |
|
"loss": 1.2153, |
|
"step": 1612 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 5.874394409525723e-05, |
|
"loss": 1.1803, |
|
"step": 1613 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 5.852358652644343e-05, |
|
"loss": 1.3352, |
|
"step": 1614 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 5.830357217708886e-05, |
|
"loss": 1.1369, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 5.8083901580944856e-05, |
|
"loss": 1.3195, |
|
"step": 1616 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 5.786457527092859e-05, |
|
"loss": 1.2551, |
|
"step": 1617 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 5.7645593779122084e-05, |
|
"loss": 1.3172, |
|
"step": 1618 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 5.742695763677088e-05, |
|
"loss": 1.3458, |
|
"step": 1619 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 5.720866737428261e-05, |
|
"loss": 1.1934, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 5.699072352122605e-05, |
|
"loss": 0.9409, |
|
"step": 1621 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 5.677312660632921e-05, |
|
"loss": 1.162, |
|
"step": 1622 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 5.6555877157478674e-05, |
|
"loss": 1.3535, |
|
"step": 1623 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 5.633897570171813e-05, |
|
"loss": 1.2635, |
|
"step": 1624 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 5.6122422765246905e-05, |
|
"loss": 1.5661, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 5.590621887341889e-05, |
|
"loss": 1.1086, |
|
"step": 1626 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 5.569036455074117e-05, |
|
"loss": 1.0895, |
|
"step": 1627 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 5.547486032087277e-05, |
|
"loss": 1.3535, |
|
"step": 1628 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 5.525970670662344e-05, |
|
"loss": 1.2312, |
|
"step": 1629 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 5.504490422995226e-05, |
|
"loss": 1.1607, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 5.483045341196662e-05, |
|
"loss": 1.2972, |
|
"step": 1631 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 5.461635477292071e-05, |
|
"loss": 1.3073, |
|
"step": 1632 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 5.440260883221413e-05, |
|
"loss": 1.3124, |
|
"step": 1633 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 5.4189216108391205e-05, |
|
"loss": 1.2518, |
|
"step": 1634 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 5.397617711913914e-05, |
|
"loss": 0.8451, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 5.3763492381286995e-05, |
|
"loss": 1.5449, |
|
"step": 1636 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 5.355116241080449e-05, |
|
"loss": 1.1825, |
|
"step": 1637 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 5.333918772280055e-05, |
|
"loss": 1.2807, |
|
"step": 1638 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 5.312756883152252e-05, |
|
"loss": 1.4253, |
|
"step": 1639 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 5.291630625035411e-05, |
|
"loss": 1.1767, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 5.270540049181505e-05, |
|
"loss": 1.1725, |
|
"step": 1641 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.249485206755917e-05, |
|
"loss": 1.0447, |
|
"step": 1642 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.228466148837352e-05, |
|
"loss": 0.9761, |
|
"step": 1643 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.207482926417702e-05, |
|
"loss": 1.2413, |
|
"step": 1644 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.186535590401915e-05, |
|
"loss": 1.1953, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.165624191607883e-05, |
|
"loss": 1.2767, |
|
"step": 1646 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.1447487807663306e-05, |
|
"loss": 0.9696, |
|
"step": 1647 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.123909408520644e-05, |
|
"loss": 1.145, |
|
"step": 1648 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.103106125426811e-05, |
|
"loss": 1.4862, |
|
"step": 1649 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.082338981953254e-05, |
|
"loss": 0.9743, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.0616080284807175e-05, |
|
"loss": 1.2537, |
|
"step": 1651 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.0409133153021606e-05, |
|
"loss": 1.4724, |
|
"step": 1652 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.0202548926226066e-05, |
|
"loss": 1.4174, |
|
"step": 1653 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.99963281055907e-05, |
|
"loss": 1.5319, |
|
"step": 1654 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.9790471191403674e-05, |
|
"loss": 1.5159, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.958497868307048e-05, |
|
"loss": 0.9962, |
|
"step": 1656 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.937985107911267e-05, |
|
"loss": 1.3318, |
|
"step": 1657 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.917508887716642e-05, |
|
"loss": 1.3187, |
|
"step": 1658 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.8970692573981455e-05, |
|
"loss": 1.0479, |
|
"step": 1659 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.876666266541987e-05, |
|
"loss": 1.1313, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.856299964645481e-05, |
|
"loss": 1.1031, |
|
"step": 1661 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.835970401116958e-05, |
|
"loss": 0.955, |
|
"step": 1662 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.815677625275592e-05, |
|
"loss": 1.2066, |
|
"step": 1663 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.79542168635132e-05, |
|
"loss": 1.1384, |
|
"step": 1664 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.775202633484732e-05, |
|
"loss": 1.3048, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.7550205157269134e-05, |
|
"loss": 1.223, |
|
"step": 1666 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.73487538203935e-05, |
|
"loss": 1.2406, |
|
"step": 1667 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.7147672812938034e-05, |
|
"loss": 1.0293, |
|
"step": 1668 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.6946962622722e-05, |
|
"loss": 1.1029, |
|
"step": 1669 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.674662373666503e-05, |
|
"loss": 1.3801, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.654665664078588e-05, |
|
"loss": 1.3824, |
|
"step": 1671 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.634706182020161e-05, |
|
"loss": 1.3021, |
|
"step": 1672 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.6147839759125936e-05, |
|
"loss": 1.2043, |
|
"step": 1673 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.5948990940868175e-05, |
|
"loss": 1.1829, |
|
"step": 1674 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.5750515847832434e-05, |
|
"loss": 1.1432, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.555241496151603e-05, |
|
"loss": 1.2856, |
|
"step": 1676 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.535468876250841e-05, |
|
"loss": 1.1847, |
|
"step": 1677 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.5157337730490135e-05, |
|
"loss": 0.8359, |
|
"step": 1678 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.4960362344231466e-05, |
|
"loss": 1.1477, |
|
"step": 1679 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.4763763081591694e-05, |
|
"loss": 1.1412, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.4567540419517115e-05, |
|
"loss": 1.3056, |
|
"step": 1681 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.437169483404091e-05, |
|
"loss": 1.3224, |
|
"step": 1682 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.41762268002812e-05, |
|
"loss": 1.2936, |
|
"step": 1683 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.398113679244018e-05, |
|
"loss": 0.9675, |
|
"step": 1684 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.378642528380306e-05, |
|
"loss": 1.3418, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.359209274673675e-05, |
|
"loss": 1.1064, |
|
"step": 1686 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.339813965268873e-05, |
|
"loss": 1.1713, |
|
"step": 1687 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.3204566472186204e-05, |
|
"loss": 1.2932, |
|
"step": 1688 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.301137367483432e-05, |
|
"loss": 1.3301, |
|
"step": 1689 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.2818561729315795e-05, |
|
"loss": 0.9956, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.26261311033892e-05, |
|
"loss": 1.5543, |
|
"step": 1691 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.243408226388808e-05, |
|
"loss": 1.2308, |
|
"step": 1692 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.2242415676719736e-05, |
|
"loss": 1.2872, |
|
"step": 1693 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.205113180686415e-05, |
|
"loss": 1.3242, |
|
"step": 1694 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.1860231118373004e-05, |
|
"loss": 1.2402, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.166971407436806e-05, |
|
"loss": 1.3378, |
|
"step": 1696 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.147958113704058e-05, |
|
"loss": 1.103, |
|
"step": 1697 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.1289832767650015e-05, |
|
"loss": 1.1262, |
|
"step": 1698 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.110046942652279e-05, |
|
"loss": 1.3142, |
|
"step": 1699 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.091149157305123e-05, |
|
"loss": 1.1188, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.072289966569252e-05, |
|
"loss": 1.1387, |
|
"step": 1701 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.0534694161967465e-05, |
|
"loss": 1.2965, |
|
"step": 1702 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.034687551845975e-05, |
|
"loss": 1.4459, |
|
"step": 1703 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.01594441908141e-05, |
|
"loss": 1.072, |
|
"step": 1704 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.9972400633735865e-05, |
|
"loss": 0.9118, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.9785745300989753e-05, |
|
"loss": 1.1985, |
|
"step": 1706 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.95994786453985e-05, |
|
"loss": 1.3719, |
|
"step": 1707 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.9413601118841935e-05, |
|
"loss": 1.3824, |
|
"step": 1708 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.92281131722559e-05, |
|
"loss": 1.2938, |
|
"step": 1709 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.904301525563112e-05, |
|
"loss": 0.9944, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.885830781801212e-05, |
|
"loss": 1.3555, |
|
"step": 1711 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.8673991307496004e-05, |
|
"loss": 1.1441, |
|
"step": 1712 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.849006617123179e-05, |
|
"loss": 1.3109, |
|
"step": 1713 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.830653285541877e-05, |
|
"loss": 1.0028, |
|
"step": 1714 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.812339180530564e-05, |
|
"loss": 1.1562, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.794064346518973e-05, |
|
"loss": 1.0405, |
|
"step": 1716 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.7758288278415455e-05, |
|
"loss": 1.2983, |
|
"step": 1717 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.7576326687373545e-05, |
|
"loss": 1.2136, |
|
"step": 1718 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.739475913349981e-05, |
|
"loss": 1.2544, |
|
"step": 1719 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.72135860572741e-05, |
|
"loss": 1.1806, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.703280789821948e-05, |
|
"loss": 1.5005, |
|
"step": 1721 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.685242509490061e-05, |
|
"loss": 1.3666, |
|
"step": 1722 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.6672438084923376e-05, |
|
"loss": 1.2283, |
|
"step": 1723 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.649284730493323e-05, |
|
"loss": 1.3949, |
|
"step": 1724 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.631365319061448e-05, |
|
"loss": 1.0868, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.6134856176689126e-05, |
|
"loss": 1.089, |
|
"step": 1726 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.5956456696915765e-05, |
|
"loss": 1.2464, |
|
"step": 1727 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.577845518408856e-05, |
|
"loss": 0.941, |
|
"step": 1728 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.5600852070036406e-05, |
|
"loss": 1.198, |
|
"step": 1729 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.542364778562137e-05, |
|
"loss": 1.1854, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.5246842760738265e-05, |
|
"loss": 1.2015, |
|
"step": 1731 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.5070437424313176e-05, |
|
"loss": 1.1797, |
|
"step": 1732 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.489443220430251e-05, |
|
"loss": 1.3385, |
|
"step": 1733 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.4718827527692044e-05, |
|
"loss": 1.3971, |
|
"step": 1734 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.454362382049583e-05, |
|
"loss": 1.0747, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.4368821507755336e-05, |
|
"loss": 1.0842, |
|
"step": 1736 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.4194421013537914e-05, |
|
"loss": 1.1598, |
|
"step": 1737 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.4020422760936335e-05, |
|
"loss": 1.2907, |
|
"step": 1738 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.38468271720676e-05, |
|
"loss": 1.3287, |
|
"step": 1739 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.367363466807172e-05, |
|
"loss": 1.0782, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.350084566911087e-05, |
|
"loss": 1.4371, |
|
"step": 1741 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.332846059436831e-05, |
|
"loss": 1.1154, |
|
"step": 1742 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.3156479862047375e-05, |
|
"loss": 1.1617, |
|
"step": 1743 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.2984903889370634e-05, |
|
"loss": 1.1814, |
|
"step": 1744 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.281373309257842e-05, |
|
"loss": 1.3911, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.264296788692833e-05, |
|
"loss": 1.2625, |
|
"step": 1746 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.2472608686694015e-05, |
|
"loss": 1.1116, |
|
"step": 1747 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.230265590516408e-05, |
|
"loss": 1.3004, |
|
"step": 1748 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.213310995464114e-05, |
|
"loss": 1.0188, |
|
"step": 1749 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.1963971246440924e-05, |
|
"loss": 1.3164, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.179524019089117e-05, |
|
"loss": 1.0501, |
|
"step": 1751 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.1626917197330616e-05, |
|
"loss": 1.2515, |
|
"step": 1752 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.1459002674108085e-05, |
|
"loss": 1.2192, |
|
"step": 1753 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.1291497028581516e-05, |
|
"loss": 1.1618, |
|
"step": 1754 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.1124400667116884e-05, |
|
"loss": 1.1596, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.095771399508705e-05, |
|
"loss": 1.2991, |
|
"step": 1756 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.0791437416871316e-05, |
|
"loss": 1.1086, |
|
"step": 1757 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.0625571335853864e-05, |
|
"loss": 1.3552, |
|
"step": 1758 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.0460116154423102e-05, |
|
"loss": 1.1716, |
|
"step": 1759 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.0295072273970592e-05, |
|
"loss": 1.1182, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.0130440094890033e-05, |
|
"loss": 1.2788, |
|
"step": 1761 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.996622001657652e-05, |
|
"loss": 1.5593, |
|
"step": 1762 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.9802412437425077e-05, |
|
"loss": 1.216, |
|
"step": 1763 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.9639017754830377e-05, |
|
"loss": 1.5077, |
|
"step": 1764 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.9476036365185166e-05, |
|
"loss": 1.362, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.931346866387963e-05, |
|
"loss": 1.3215, |
|
"step": 1766 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.9151315045300353e-05, |
|
"loss": 1.2743, |
|
"step": 1767 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.8989575902829334e-05, |
|
"loss": 1.3791, |
|
"step": 1768 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.8828251628843083e-05, |
|
"loss": 0.9143, |
|
"step": 1769 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.8667342614711733e-05, |
|
"loss": 1.1611, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.850684925079774e-05, |
|
"loss": 1.0213, |
|
"step": 1771 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.834677192645552e-05, |
|
"loss": 1.2423, |
|
"step": 1772 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.818711103003e-05, |
|
"loss": 1.2826, |
|
"step": 1773 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.8027866948855862e-05, |
|
"loss": 1.0059, |
|
"step": 1774 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.786904006925668e-05, |
|
"loss": 1.2449, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.771063077654383e-05, |
|
"loss": 1.4577, |
|
"step": 1776 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.7552639455015672e-05, |
|
"loss": 1.354, |
|
"step": 1777 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.7395066487956557e-05, |
|
"loss": 1.3385, |
|
"step": 1778 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.7237912257635877e-05, |
|
"loss": 1.1573, |
|
"step": 1779 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.7081177145307314e-05, |
|
"loss": 1.2943, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.6924861531207613e-05, |
|
"loss": 1.3373, |
|
"step": 1781 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.67689657945559e-05, |
|
"loss": 1.4295, |
|
"step": 1782 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.6613490313552693e-05, |
|
"loss": 1.5029, |
|
"step": 1783 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.6458435465378895e-05, |
|
"loss": 1.4016, |
|
"step": 1784 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.6303801626195168e-05, |
|
"loss": 1.1433, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.614958917114052e-05, |
|
"loss": 1.0045, |
|
"step": 1786 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.5995798474331868e-05, |
|
"loss": 1.2273, |
|
"step": 1787 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.5842429908862986e-05, |
|
"loss": 0.8235, |
|
"step": 1788 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.5689483846803475e-05, |
|
"loss": 1.2355, |
|
"step": 1789 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.553696065919795e-05, |
|
"loss": 1.1482, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.5384860716065183e-05, |
|
"loss": 1.0946, |
|
"step": 1791 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.5233184386397146e-05, |
|
"loss": 1.2652, |
|
"step": 1792 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.5081932038158094e-05, |
|
"loss": 0.9681, |
|
"step": 1793 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.4931104038283738e-05, |
|
"loss": 1.1351, |
|
"step": 1794 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.4780700752680397e-05, |
|
"loss": 1.0974, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.4630722546223983e-05, |
|
"loss": 0.9365, |
|
"step": 1796 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.448116978275905e-05, |
|
"loss": 1.2426, |
|
"step": 1797 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.4332042825098245e-05, |
|
"loss": 1.2849, |
|
"step": 1798 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.4183342035021107e-05, |
|
"loss": 1.3706, |
|
"step": 1799 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.403506777327329e-05, |
|
"loss": 1.1806, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.3887220399565745e-05, |
|
"loss": 1.3546, |
|
"step": 1801 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.373980027257374e-05, |
|
"loss": 1.2827, |
|
"step": 1802 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.3592807749936196e-05, |
|
"loss": 1.2513, |
|
"step": 1803 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.344624318825439e-05, |
|
"loss": 1.3511, |
|
"step": 1804 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.330010694309168e-05, |
|
"loss": 1.1903, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.3154399368972167e-05, |
|
"loss": 1.1434, |
|
"step": 1806 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.3009120819380026e-05, |
|
"loss": 1.0491, |
|
"step": 1807 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.286427164675862e-05, |
|
"loss": 1.1442, |
|
"step": 1808 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.2719852202509695e-05, |
|
"loss": 1.2043, |
|
"step": 1809 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.25758628369924e-05, |
|
"loss": 1.289, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.2432303899522712e-05, |
|
"loss": 1.1428, |
|
"step": 1811 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.2289175738372083e-05, |
|
"loss": 0.963, |
|
"step": 1812 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.2146478700767205e-05, |
|
"loss": 1.487, |
|
"step": 1813 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.2004213132888696e-05, |
|
"loss": 1.1215, |
|
"step": 1814 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.1862379379870525e-05, |
|
"loss": 1.2184, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.1720977785799046e-05, |
|
"loss": 1.0798, |
|
"step": 1816 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.1580008693712173e-05, |
|
"loss": 1.3635, |
|
"step": 1817 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.1439472445598653e-05, |
|
"loss": 1.087, |
|
"step": 1818 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.1299369382397093e-05, |
|
"loss": 1.2251, |
|
"step": 1819 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.1159699843995217e-05, |
|
"loss": 1.1589, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.102046416922907e-05, |
|
"loss": 1.2955, |
|
"step": 1821 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.088166269588212e-05, |
|
"loss": 1.5373, |
|
"step": 1822 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.0743295760684435e-05, |
|
"loss": 1.3092, |
|
"step": 1823 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.0605363699311943e-05, |
|
"loss": 1.17, |
|
"step": 1824 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.0467866846385507e-05, |
|
"loss": 1.191, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.03308055354704e-05, |
|
"loss": 1.0686, |
|
"step": 1826 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.0194180099074943e-05, |
|
"loss": 1.1097, |
|
"step": 1827 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.0057990868650234e-05, |
|
"loss": 1.1788, |
|
"step": 1828 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.9922238174589202e-05, |
|
"loss": 1.2758, |
|
"step": 1829 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.978692234622559e-05, |
|
"loss": 1.5553, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.9652043711833402e-05, |
|
"loss": 1.0434, |
|
"step": 1831 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.9517602598625983e-05, |
|
"loss": 1.3425, |
|
"step": 1832 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.9383599332755266e-05, |
|
"loss": 1.4005, |
|
"step": 1833 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.9250034239310975e-05, |
|
"loss": 1.1019, |
|
"step": 1834 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.911690764231979e-05, |
|
"loss": 1.5482, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.8984219864744768e-05, |
|
"loss": 1.1322, |
|
"step": 1836 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.8851971228484233e-05, |
|
"loss": 1.3651, |
|
"step": 1837 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.8720162054371126e-05, |
|
"loss": 1.5033, |
|
"step": 1838 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.858879266217246e-05, |
|
"loss": 1.1742, |
|
"step": 1839 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.845786337058817e-05, |
|
"loss": 1.2404, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.8327374497250572e-05, |
|
"loss": 1.2239, |
|
"step": 1841 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.8197326358723533e-05, |
|
"loss": 1.4019, |
|
"step": 1842 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.806771927050166e-05, |
|
"loss": 0.9853, |
|
"step": 1843 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.7938553547009774e-05, |
|
"loss": 0.8759, |
|
"step": 1844 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.7809829501601594e-05, |
|
"loss": 1.1722, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.7681547446559722e-05, |
|
"loss": 1.3178, |
|
"step": 1846 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.7553707693094213e-05, |
|
"loss": 1.494, |
|
"step": 1847 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.7426310551342273e-05, |
|
"loss": 1.1439, |
|
"step": 1848 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.7299356330367256e-05, |
|
"loss": 1.1022, |
|
"step": 1849 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.7172845338157995e-05, |
|
"loss": 1.3888, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.7046777881628095e-05, |
|
"loss": 1.177, |
|
"step": 1851 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.69211542666152e-05, |
|
"loss": 1.0688, |
|
"step": 1852 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.679597479788002e-05, |
|
"loss": 1.4828, |
|
"step": 1853 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6671239779106008e-05, |
|
"loss": 1.4438, |
|
"step": 1854 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6546949512898236e-05, |
|
"loss": 1.5251, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6423104300782822e-05, |
|
"loss": 1.1282, |
|
"step": 1856 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6299704443206253e-05, |
|
"loss": 1.0136, |
|
"step": 1857 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6176750239534555e-05, |
|
"loss": 1.2888, |
|
"step": 1858 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6054241988052608e-05, |
|
"loss": 1.0351, |
|
"step": 1859 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.5932179985963412e-05, |
|
"loss": 1.3194, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.581056452938736e-05, |
|
"loss": 1.1395, |
|
"step": 1861 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.5689395913361603e-05, |
|
"loss": 1.1652, |
|
"step": 1862 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.5568674431839224e-05, |
|
"loss": 1.2574, |
|
"step": 1863 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.544840037768851e-05, |
|
"loss": 1.3462, |
|
"step": 1864 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.5328574042692368e-05, |
|
"loss": 1.5134, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.5209195717547486e-05, |
|
"loss": 1.0249, |
|
"step": 1866 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.5090265691863848e-05, |
|
"loss": 1.2759, |
|
"step": 1867 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.497178425416359e-05, |
|
"loss": 1.3408, |
|
"step": 1868 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.485375169188079e-05, |
|
"loss": 1.2896, |
|
"step": 1869 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.4736168291360552e-05, |
|
"loss": 1.3258, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.4619034337858229e-05, |
|
"loss": 1.3167, |
|
"step": 1871 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.450235011553891e-05, |
|
"loss": 1.3734, |
|
"step": 1872 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.438611590747656e-05, |
|
"loss": 1.1074, |
|
"step": 1873 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.4270331995653507e-05, |
|
"loss": 1.6006, |
|
"step": 1874 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.4154998660959596e-05, |
|
"loss": 1.131, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.4040116183191587e-05, |
|
"loss": 1.4408, |
|
"step": 1876 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.3925684841052545e-05, |
|
"loss": 1.2286, |
|
"step": 1877 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.3811704912151024e-05, |
|
"loss": 1.1798, |
|
"step": 1878 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.3698176673000351e-05, |
|
"loss": 1.0013, |
|
"step": 1879 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.3585100399018324e-05, |
|
"loss": 1.1529, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.3472476364526043e-05, |
|
"loss": 1.1891, |
|
"step": 1881 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.3360304842747595e-05, |
|
"loss": 1.3186, |
|
"step": 1882 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.3248586105809235e-05, |
|
"loss": 1.0345, |
|
"step": 1883 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.3137320424738764e-05, |
|
"loss": 1.1464, |
|
"step": 1884 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.3026508069465016e-05, |
|
"loss": 0.9625, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.2916149308816793e-05, |
|
"loss": 1.3932, |
|
"step": 1886 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.2806244410522739e-05, |
|
"loss": 1.0426, |
|
"step": 1887 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.2696793641210303e-05, |
|
"loss": 1.2517, |
|
"step": 1888 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.2587797266405265e-05, |
|
"loss": 1.0455, |
|
"step": 1889 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.2479255550531044e-05, |
|
"loss": 1.3104, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.237116875690807e-05, |
|
"loss": 1.1541, |
|
"step": 1891 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.226353714775308e-05, |
|
"loss": 1.0212, |
|
"step": 1892 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.2156360984178672e-05, |
|
"loss": 1.1716, |
|
"step": 1893 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.2049640526192396e-05, |
|
"loss": 1.1511, |
|
"step": 1894 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.1943376032696374e-05, |
|
"loss": 1.4378, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.18375677614865e-05, |
|
"loss": 1.1137, |
|
"step": 1896 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.1732215969251936e-05, |
|
"loss": 1.2647, |
|
"step": 1897 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.1627320911574369e-05, |
|
"loss": 1.2192, |
|
"step": 1898 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.1522882842927507e-05, |
|
"loss": 1.2002, |
|
"step": 1899 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.1418902016676392e-05, |
|
"loss": 1.12, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.1315378685076793e-05, |
|
"loss": 1.2486, |
|
"step": 1901 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.1212313099274596e-05, |
|
"loss": 1.2119, |
|
"step": 1902 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.1109705509305256e-05, |
|
"loss": 1.2259, |
|
"step": 1903 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.1007556164093125e-05, |
|
"loss": 1.1457, |
|
"step": 1904 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.0905865311450792e-05, |
|
"loss": 1.1518, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.0804633198078606e-05, |
|
"loss": 1.0608, |
|
"step": 1906 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.0703860069564009e-05, |
|
"loss": 1.1139, |
|
"step": 1907 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.0603546170381018e-05, |
|
"loss": 1.2249, |
|
"step": 1908 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.0503691743889455e-05, |
|
"loss": 1.4376, |
|
"step": 1909 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.0404297032334476e-05, |
|
"loss": 1.115, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.0305362276846131e-05, |
|
"loss": 1.1447, |
|
"step": 1911 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.020688771743843e-05, |
|
"loss": 1.3071, |
|
"step": 1912 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.0108873593009071e-05, |
|
"loss": 1.4344, |
|
"step": 1913 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.0011320141338721e-05, |
|
"loss": 1.3076, |
|
"step": 1914 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 9.914227599090442e-06, |
|
"loss": 1.2095, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 9.817596201809132e-06, |
|
"loss": 1.0488, |
|
"step": 1916 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 9.72142618392098e-06, |
|
"loss": 1.0581, |
|
"step": 1917 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 9.625717778732913e-06, |
|
"loss": 1.5264, |
|
"step": 1918 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 9.530471218431957e-06, |
|
"loss": 0.7503, |
|
"step": 1919 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 9.435686734084614e-06, |
|
"loss": 1.1666, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 9.341364555636568e-06, |
|
"loss": 1.1909, |
|
"step": 1921 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 9.247504911911865e-06, |
|
"loss": 1.2914, |
|
"step": 1922 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 9.154108030612451e-06, |
|
"loss": 1.0331, |
|
"step": 1923 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 9.06117413831764e-06, |
|
"loss": 1.2396, |
|
"step": 1924 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 8.968703460483508e-06, |
|
"loss": 1.3323, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 8.87669622144245e-06, |
|
"loss": 1.4465, |
|
"step": 1926 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 8.78515264440245e-06, |
|
"loss": 1.2658, |
|
"step": 1927 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 8.694072951446775e-06, |
|
"loss": 1.1626, |
|
"step": 1928 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 8.603457363533274e-06, |
|
"loss": 1.1752, |
|
"step": 1929 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 8.513306100493834e-06, |
|
"loss": 1.1018, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 8.423619381033997e-06, |
|
"loss": 1.2596, |
|
"step": 1931 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 8.33439742273221e-06, |
|
"loss": 1.0342, |
|
"step": 1932 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 8.245640442039504e-06, |
|
"loss": 1.3798, |
|
"step": 1933 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 8.157348654278906e-06, |
|
"loss": 1.1456, |
|
"step": 1934 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 8.06952227364477e-06, |
|
"loss": 1.2221, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 7.982161513202502e-06, |
|
"loss": 1.2754, |
|
"step": 1936 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 7.895266584887884e-06, |
|
"loss": 0.9985, |
|
"step": 1937 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 7.808837699506556e-06, |
|
"loss": 1.544, |
|
"step": 1938 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 7.722875066733593e-06, |
|
"loss": 1.1587, |
|
"step": 1939 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 7.637378895112911e-06, |
|
"loss": 1.3164, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 7.552349392056867e-06, |
|
"loss": 1.1158, |
|
"step": 1941 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 7.467786763845586e-06, |
|
"loss": 0.9729, |
|
"step": 1942 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 7.383691215626632e-06, |
|
"loss": 1.238, |
|
"step": 1943 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 7.300062951414455e-06, |
|
"loss": 1.0044, |
|
"step": 1944 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 7.2169021740898346e-06, |
|
"loss": 1.0541, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 7.134209085399457e-06, |
|
"loss": 1.2909, |
|
"step": 1946 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 7.051983885955404e-06, |
|
"loss": 1.2217, |
|
"step": 1947 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 6.970226775234623e-06, |
|
"loss": 1.309, |
|
"step": 1948 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 6.888937951578589e-06, |
|
"loss": 1.2958, |
|
"step": 1949 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 6.808117612192599e-06, |
|
"loss": 0.9312, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 6.727765953145482e-06, |
|
"loss": 1.1902, |
|
"step": 1951 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 6.647883169369085e-06, |
|
"loss": 1.0265, |
|
"step": 1952 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 6.568469454657677e-06, |
|
"loss": 1.1769, |
|
"step": 1953 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 6.4895250016676625e-06, |
|
"loss": 1.3831, |
|
"step": 1954 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 6.411050001916996e-06, |
|
"loss": 1.3066, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 6.333044645784703e-06, |
|
"loss": 1.2549, |
|
"step": 1956 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 6.255509122510539e-06, |
|
"loss": 1.1581, |
|
"step": 1957 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 6.178443620194352e-06, |
|
"loss": 1.0347, |
|
"step": 1958 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 6.101848325795856e-06, |
|
"loss": 1.249, |
|
"step": 1959 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 6.025723425133989e-06, |
|
"loss": 1.1094, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 5.950069102886402e-06, |
|
"loss": 1.363, |
|
"step": 1961 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 5.8748855425893475e-06, |
|
"loss": 1.0292, |
|
"step": 1962 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 5.800172926636882e-06, |
|
"loss": 1.0867, |
|
"step": 1963 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 5.7259314362805735e-06, |
|
"loss": 1.2136, |
|
"step": 1964 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 5.652161251629085e-06, |
|
"loss": 1.3425, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 5.578862551647612e-06, |
|
"loss": 1.28, |
|
"step": 1966 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 5.50603551415767e-06, |
|
"loss": 1.2319, |
|
"step": 1967 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 5.4336803158364205e-06, |
|
"loss": 1.3405, |
|
"step": 1968 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 5.361797132216406e-06, |
|
"loss": 1.2899, |
|
"step": 1969 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 5.290386137685066e-06, |
|
"loss": 1.3527, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 5.219447505484243e-06, |
|
"loss": 1.4975, |
|
"step": 1971 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 5.148981407709963e-06, |
|
"loss": 1.2036, |
|
"step": 1972 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 5.078988015311769e-06, |
|
"loss": 1.0778, |
|
"step": 1973 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 5.009467498092502e-06, |
|
"loss": 1.5373, |
|
"step": 1974 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.9404200247078485e-06, |
|
"loss": 1.3212, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.8718457626657496e-06, |
|
"loss": 1.0493, |
|
"step": 1976 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.803744878326311e-06, |
|
"loss": 1.2744, |
|
"step": 1977 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.736117536901152e-06, |
|
"loss": 1.421, |
|
"step": 1978 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.66896390245306e-06, |
|
"loss": 0.9075, |
|
"step": 1979 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.602284137895696e-06, |
|
"loss": 1.3903, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.53607840499306e-06, |
|
"loss": 1.1848, |
|
"step": 1981 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.470346864359165e-06, |
|
"loss": 1.0173, |
|
"step": 1982 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.40508967545763e-06, |
|
"loss": 1.3709, |
|
"step": 1983 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.340306996601351e-06, |
|
"loss": 1.0662, |
|
"step": 1984 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.2759989849520785e-06, |
|
"loss": 1.4046, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.212165796519929e-06, |
|
"loss": 1.1181, |
|
"step": 1986 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.148807586163228e-06, |
|
"loss": 1.2173, |
|
"step": 1987 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.085924507587913e-06, |
|
"loss": 1.4524, |
|
"step": 1988 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.0235167133473086e-06, |
|
"loss": 1.2748, |
|
"step": 1989 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.96158435484173e-06, |
|
"loss": 1.0592, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.900127582318036e-06, |
|
"loss": 1.3902, |
|
"step": 1991 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.839146544869321e-06, |
|
"loss": 1.1882, |
|
"step": 1992 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.7786413904346674e-06, |
|
"loss": 1.1128, |
|
"step": 1993 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.7186122657985045e-06, |
|
"loss": 1.2136, |
|
"step": 1994 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.659059316590541e-06, |
|
"loss": 1.0755, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.599982687285275e-06, |
|
"loss": 1.3191, |
|
"step": 1996 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.5413825212015973e-06, |
|
"loss": 0.9365, |
|
"step": 1997 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.4832589605025666e-06, |
|
"loss": 1.3355, |
|
"step": 1998 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.4256121461949676e-06, |
|
"loss": 1.4349, |
|
"step": 1999 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.3684422181290864e-06, |
|
"loss": 1.1091, |
|
"step": 2000 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 2117, |
|
"num_train_epochs": 1, |
|
"save_steps": 1000, |
|
"total_flos": 3.327487298555085e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|