|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 18.293569925912376, |
|
"global_step": 250000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 8.4479, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 7.3835, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 6e-06, |
|
"loss": 6.7134, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 6.2752, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1e-05, |
|
"loss": 5.9464, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.2e-05, |
|
"loss": 5.643, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.4000000000000001e-05, |
|
"loss": 5.3728, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 5.1453, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.8e-05, |
|
"loss": 4.9466, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2e-05, |
|
"loss": 4.7762, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.2000000000000003e-05, |
|
"loss": 4.6241, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.4e-05, |
|
"loss": 4.4917, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.6000000000000002e-05, |
|
"loss": 4.3683, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.8000000000000003e-05, |
|
"loss": 4.2491, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 3e-05, |
|
"loss": 4.1498, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 4.0539, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.4000000000000007e-05, |
|
"loss": 3.968, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.6e-05, |
|
"loss": 3.8855, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.8e-05, |
|
"loss": 3.818, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4e-05, |
|
"loss": 3.7477, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.2e-05, |
|
"loss": 3.6863, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 3.628, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.600000000000001e-05, |
|
"loss": 3.5747, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.8e-05, |
|
"loss": 3.5235, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 5e-05, |
|
"loss": 3.4808, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 5.2000000000000004e-05, |
|
"loss": 3.4356, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 5.4000000000000005e-05, |
|
"loss": 3.3986, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 5.6000000000000006e-05, |
|
"loss": 3.364, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 5.8e-05, |
|
"loss": 3.3247, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 6e-05, |
|
"loss": 3.2949, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 6.2e-05, |
|
"loss": 3.2666, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 6.400000000000001e-05, |
|
"loss": 3.2398, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 6.6e-05, |
|
"loss": 3.2131, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 6.800000000000001e-05, |
|
"loss": 3.1887, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 7e-05, |
|
"loss": 3.1665, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 7.2e-05, |
|
"loss": 3.1422, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 7.4e-05, |
|
"loss": 3.1238, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 7.6e-05, |
|
"loss": 3.1004, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 7.800000000000001e-05, |
|
"loss": 3.0856, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 8e-05, |
|
"loss": 3.0682, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 8.2e-05, |
|
"loss": 3.0488, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 8.4e-05, |
|
"loss": 3.0314, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 8.6e-05, |
|
"loss": 3.0153, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 8.800000000000001e-05, |
|
"loss": 3.0039, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 9e-05, |
|
"loss": 2.9861, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 9.200000000000001e-05, |
|
"loss": 2.9759, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 9.4e-05, |
|
"loss": 2.9631, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 9.6e-05, |
|
"loss": 2.9523, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 9.8e-05, |
|
"loss": 2.9401, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 0.0001, |
|
"loss": 2.9267, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 9.977777777777779e-05, |
|
"loss": 2.9162, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 9.955555555555556e-05, |
|
"loss": 2.9032, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 9.933333333333334e-05, |
|
"loss": 2.8907, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 9.911111111111112e-05, |
|
"loss": 2.8838, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 9.888888888888889e-05, |
|
"loss": 2.8742, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 9.866666666666668e-05, |
|
"loss": 2.8601, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 9.844444444444444e-05, |
|
"loss": 2.8534, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 9.822222222222223e-05, |
|
"loss": 2.8455, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 9.8e-05, |
|
"loss": 2.8346, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 9.777777777777778e-05, |
|
"loss": 2.8289, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 9.755555555555555e-05, |
|
"loss": 2.8203, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 9.733333333333335e-05, |
|
"loss": 2.8138, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 9.711111111111111e-05, |
|
"loss": 2.8068, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 9.68888888888889e-05, |
|
"loss": 2.7989, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 9.666666666666667e-05, |
|
"loss": 2.7923, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.644444444444445e-05, |
|
"loss": 2.7847, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.622222222222222e-05, |
|
"loss": 2.7818, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 9.6e-05, |
|
"loss": 2.7735, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 9.577777777777777e-05, |
|
"loss": 2.7691, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 9.555555555555557e-05, |
|
"loss": 2.7585, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 9.533333333333334e-05, |
|
"loss": 2.7609, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 9.511111111111112e-05, |
|
"loss": 2.752, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 9.488888888888889e-05, |
|
"loss": 2.7452, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 9.466666666666667e-05, |
|
"loss": 2.741, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 9.444444444444444e-05, |
|
"loss": 2.7384, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 9.422222222222223e-05, |
|
"loss": 2.7332, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 9.4e-05, |
|
"loss": 2.726, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 9.377777777777779e-05, |
|
"loss": 2.7218, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 9.355555555555556e-05, |
|
"loss": 2.7198, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 9.333333333333334e-05, |
|
"loss": 2.7156, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 9.311111111111111e-05, |
|
"loss": 2.71, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 9.28888888888889e-05, |
|
"loss": 2.7099, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 9.266666666666666e-05, |
|
"loss": 2.7006, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 9.244444444444445e-05, |
|
"loss": 2.694, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 9.222222222222223e-05, |
|
"loss": 2.6938, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 9.200000000000001e-05, |
|
"loss": 2.6878, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 9.177777777777778e-05, |
|
"loss": 2.6865, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 9.155555555555557e-05, |
|
"loss": 2.6847, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 9.133333333333334e-05, |
|
"loss": 2.6811, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 9.111111111111112e-05, |
|
"loss": 2.6748, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 9.088888888888889e-05, |
|
"loss": 2.6733, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 9.066666666666667e-05, |
|
"loss": 2.6708, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 9.044444444444445e-05, |
|
"loss": 2.6657, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 9.022222222222224e-05, |
|
"loss": 2.6616, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 9e-05, |
|
"loss": 2.6555, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 8.977777777777779e-05, |
|
"loss": 2.6568, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 8.955555555555556e-05, |
|
"loss": 2.654, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 8.933333333333334e-05, |
|
"loss": 2.6504, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 8.911111111111111e-05, |
|
"loss": 2.6498, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 8.888888888888889e-05, |
|
"loss": 2.646, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 8.866666666666668e-05, |
|
"loss": 2.6448, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 8.844444444444445e-05, |
|
"loss": 2.6415, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 8.822222222222223e-05, |
|
"loss": 2.6395, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 8.800000000000001e-05, |
|
"loss": 2.6366, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 8.777777777777778e-05, |
|
"loss": 2.6376, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 8.755555555555556e-05, |
|
"loss": 2.6319, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 8.733333333333333e-05, |
|
"loss": 2.6286, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 8.711111111111112e-05, |
|
"loss": 2.6267, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 8.68888888888889e-05, |
|
"loss": 2.6256, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 8.666666666666667e-05, |
|
"loss": 2.6223, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 8.644444444444445e-05, |
|
"loss": 2.6161, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 8.622222222222222e-05, |
|
"loss": 2.6128, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 8.6e-05, |
|
"loss": 2.6145, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 8.577777777777777e-05, |
|
"loss": 2.6097, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 8.555555555555556e-05, |
|
"loss": 2.6107, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 8.533333333333334e-05, |
|
"loss": 2.6069, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 8.511111111111112e-05, |
|
"loss": 2.605, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 8.488888888888889e-05, |
|
"loss": 2.6049, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 8.466666666666667e-05, |
|
"loss": 2.6003, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 8.444444444444444e-05, |
|
"loss": 2.5996, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 8.422222222222223e-05, |
|
"loss": 2.5991, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 8.4e-05, |
|
"loss": 2.5926, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 8.377777777777778e-05, |
|
"loss": 2.5933, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 8.355555555555556e-05, |
|
"loss": 2.5903, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 8.333333333333334e-05, |
|
"loss": 2.5906, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 8.311111111111111e-05, |
|
"loss": 2.5894, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 8.28888888888889e-05, |
|
"loss": 2.5848, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 8.266666666666667e-05, |
|
"loss": 2.5861, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 8.244444444444445e-05, |
|
"loss": 2.5817, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 8.222222222222222e-05, |
|
"loss": 2.5794, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 8.2e-05, |
|
"loss": 2.5794, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 8.177777777777778e-05, |
|
"loss": 2.5779, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 8.155555555555557e-05, |
|
"loss": 2.5759, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 8.133333333333334e-05, |
|
"loss": 2.5747, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 8.111111111111112e-05, |
|
"loss": 2.5728, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 8.088888888888889e-05, |
|
"loss": 2.5727, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"learning_rate": 8.066666666666667e-05, |
|
"loss": 2.5702, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 8.044444444444444e-05, |
|
"loss": 2.5676, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 8.022222222222222e-05, |
|
"loss": 2.5671, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"learning_rate": 8e-05, |
|
"loss": 2.563, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 7.977777777777779e-05, |
|
"loss": 2.5645, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 7.955555555555556e-05, |
|
"loss": 2.5598, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"learning_rate": 7.933333333333334e-05, |
|
"loss": 2.5602, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 7.911111111111111e-05, |
|
"loss": 2.559, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 7.88888888888889e-05, |
|
"loss": 2.5575, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 7.866666666666666e-05, |
|
"loss": 2.5551, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 7.844444444444446e-05, |
|
"loss": 2.5545, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"learning_rate": 7.822222222222223e-05, |
|
"loss": 2.5522, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 7.800000000000001e-05, |
|
"loss": 2.5522, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"learning_rate": 7.777777777777778e-05, |
|
"loss": 2.5505, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"learning_rate": 7.755555555555556e-05, |
|
"loss": 2.5484, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"learning_rate": 7.733333333333333e-05, |
|
"loss": 2.549, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 7.711111111111112e-05, |
|
"loss": 2.5435, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 7.688888888888889e-05, |
|
"loss": 2.5482, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"learning_rate": 7.666666666666667e-05, |
|
"loss": 2.5448, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"learning_rate": 7.644444444444445e-05, |
|
"loss": 2.5444, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 7.622222222222223e-05, |
|
"loss": 2.5409, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 7.6e-05, |
|
"loss": 2.54, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"learning_rate": 7.577777777777779e-05, |
|
"loss": 2.5387, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 7.555555555555556e-05, |
|
"loss": 2.5386, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 7.533333333333334e-05, |
|
"loss": 2.5383, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"learning_rate": 7.511111111111111e-05, |
|
"loss": 2.5369, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"learning_rate": 7.488888888888889e-05, |
|
"loss": 2.5345, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 7.466666666666667e-05, |
|
"loss": 2.5339, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"learning_rate": 7.444444444444444e-05, |
|
"loss": 2.5291, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 6.07, |
|
"learning_rate": 7.422222222222223e-05, |
|
"loss": 2.5299, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 6.11, |
|
"learning_rate": 7.4e-05, |
|
"loss": 2.5299, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"learning_rate": 7.377777777777778e-05, |
|
"loss": 2.5257, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"learning_rate": 7.355555555555556e-05, |
|
"loss": 2.5279, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 7.333333333333333e-05, |
|
"loss": 2.5258, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"learning_rate": 7.311111111111111e-05, |
|
"loss": 2.5221, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 7.28888888888889e-05, |
|
"loss": 2.5254, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"learning_rate": 7.266666666666667e-05, |
|
"loss": 2.5215, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 6.37, |
|
"learning_rate": 7.244444444444445e-05, |
|
"loss": 2.5213, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"learning_rate": 7.222222222222222e-05, |
|
"loss": 2.5234, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"learning_rate": 7.2e-05, |
|
"loss": 2.522, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"learning_rate": 7.177777777777777e-05, |
|
"loss": 2.5178, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"learning_rate": 7.155555555555555e-05, |
|
"loss": 2.5207, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 6.55, |
|
"learning_rate": 7.133333333333334e-05, |
|
"loss": 2.5131, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"learning_rate": 7.111111111111112e-05, |
|
"loss": 2.5124, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"learning_rate": 7.088888888888889e-05, |
|
"loss": 2.5151, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 7.066666666666667e-05, |
|
"loss": 2.5164, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"learning_rate": 7.044444444444444e-05, |
|
"loss": 2.5125, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"learning_rate": 7.022222222222222e-05, |
|
"loss": 2.5123, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"learning_rate": 7e-05, |
|
"loss": 2.5142, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 6.977777777777779e-05, |
|
"loss": 2.51, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"learning_rate": 6.955555555555556e-05, |
|
"loss": 2.5102, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 6.933333333333334e-05, |
|
"loss": 2.5092, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"learning_rate": 6.911111111111111e-05, |
|
"loss": 2.5068, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 6.95, |
|
"learning_rate": 6.88888888888889e-05, |
|
"loss": 2.5085, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"learning_rate": 6.866666666666666e-05, |
|
"loss": 2.5045, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 7.02, |
|
"learning_rate": 6.844444444444445e-05, |
|
"loss": 2.5029, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 7.06, |
|
"learning_rate": 6.822222222222222e-05, |
|
"loss": 2.5023, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 7.1, |
|
"learning_rate": 6.800000000000001e-05, |
|
"loss": 2.5015, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"learning_rate": 6.777777777777778e-05, |
|
"loss": 2.5016, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 7.17, |
|
"learning_rate": 6.755555555555557e-05, |
|
"loss": 2.4991, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"learning_rate": 6.733333333333333e-05, |
|
"loss": 2.4965, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"learning_rate": 6.711111111111112e-05, |
|
"loss": 2.4995, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"learning_rate": 6.688888888888889e-05, |
|
"loss": 2.4981, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 7.32, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 2.4986, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 7.35, |
|
"learning_rate": 6.644444444444444e-05, |
|
"loss": 2.4964, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"learning_rate": 6.622222222222224e-05, |
|
"loss": 2.4955, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"learning_rate": 6.6e-05, |
|
"loss": 2.4908, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 7.46, |
|
"learning_rate": 6.577777777777779e-05, |
|
"loss": 2.4937, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 6.555555555555556e-05, |
|
"loss": 2.4944, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"learning_rate": 6.533333333333334e-05, |
|
"loss": 2.4918, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 7.57, |
|
"learning_rate": 6.511111111111111e-05, |
|
"loss": 2.4925, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"learning_rate": 6.488888888888889e-05, |
|
"loss": 2.4915, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"learning_rate": 6.466666666666666e-05, |
|
"loss": 2.4896, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 7.68, |
|
"learning_rate": 6.444444444444446e-05, |
|
"loss": 2.49, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"learning_rate": 6.422222222222223e-05, |
|
"loss": 2.489, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 7.76, |
|
"learning_rate": 6.400000000000001e-05, |
|
"loss": 2.4854, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"learning_rate": 6.377777777777778e-05, |
|
"loss": 2.4873, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 7.83, |
|
"learning_rate": 6.355555555555556e-05, |
|
"loss": 2.4864, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 7.87, |
|
"learning_rate": 6.333333333333333e-05, |
|
"loss": 2.4879, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 7.9, |
|
"learning_rate": 6.311111111111112e-05, |
|
"loss": 2.486, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 7.94, |
|
"learning_rate": 6.28888888888889e-05, |
|
"loss": 2.4845, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 7.98, |
|
"learning_rate": 6.266666666666667e-05, |
|
"loss": 2.484, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 8.01, |
|
"learning_rate": 6.244444444444445e-05, |
|
"loss": 2.4848, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 8.05, |
|
"learning_rate": 6.222222222222222e-05, |
|
"loss": 2.4794, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 8.09, |
|
"learning_rate": 6.2e-05, |
|
"loss": 2.4809, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"learning_rate": 6.177777777777779e-05, |
|
"loss": 2.4799, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 8.16, |
|
"learning_rate": 6.155555555555555e-05, |
|
"loss": 2.4771, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 8.2, |
|
"learning_rate": 6.133333333333334e-05, |
|
"loss": 2.477, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 8.23, |
|
"learning_rate": 6.111111111111112e-05, |
|
"loss": 2.4769, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 8.27, |
|
"learning_rate": 6.08888888888889e-05, |
|
"loss": 2.4734, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 8.31, |
|
"learning_rate": 6.066666666666667e-05, |
|
"loss": 2.4773, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 8.34, |
|
"learning_rate": 6.044444444444445e-05, |
|
"loss": 2.4742, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"learning_rate": 6.0222222222222225e-05, |
|
"loss": 2.4734, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 8.42, |
|
"learning_rate": 6e-05, |
|
"loss": 2.4734, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 8.45, |
|
"learning_rate": 5.977777777777778e-05, |
|
"loss": 2.4724, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 8.49, |
|
"learning_rate": 5.9555555555555554e-05, |
|
"loss": 2.4764, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 8.52, |
|
"learning_rate": 5.9333333333333343e-05, |
|
"loss": 2.4712, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 8.56, |
|
"learning_rate": 5.911111111111112e-05, |
|
"loss": 2.4706, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 8.6, |
|
"learning_rate": 5.8888888888888896e-05, |
|
"loss": 2.4697, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 8.63, |
|
"learning_rate": 5.866666666666667e-05, |
|
"loss": 2.473, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 8.67, |
|
"learning_rate": 5.844444444444445e-05, |
|
"loss": 2.4684, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 8.71, |
|
"learning_rate": 5.8222222222222224e-05, |
|
"loss": 2.47, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 8.74, |
|
"learning_rate": 5.8e-05, |
|
"loss": 2.4672, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 8.78, |
|
"learning_rate": 5.7777777777777776e-05, |
|
"loss": 2.4677, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 8.82, |
|
"learning_rate": 5.755555555555556e-05, |
|
"loss": 2.4688, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 8.85, |
|
"learning_rate": 5.7333333333333336e-05, |
|
"loss": 2.4667, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 8.89, |
|
"learning_rate": 5.711111111111112e-05, |
|
"loss": 2.4659, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 8.93, |
|
"learning_rate": 5.6888888888888895e-05, |
|
"loss": 2.4671, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 8.96, |
|
"learning_rate": 5.666666666666667e-05, |
|
"loss": 2.4643, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"learning_rate": 5.644444444444445e-05, |
|
"loss": 2.4693, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 9.04, |
|
"learning_rate": 5.622222222222222e-05, |
|
"loss": 2.4633, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 9.07, |
|
"learning_rate": 5.6000000000000006e-05, |
|
"loss": 2.4625, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 9.11, |
|
"learning_rate": 5.577777777777778e-05, |
|
"loss": 2.4604, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 9.15, |
|
"learning_rate": 5.555555555555556e-05, |
|
"loss": 2.4615, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 9.18, |
|
"learning_rate": 5.5333333333333334e-05, |
|
"loss": 2.4598, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 9.22, |
|
"learning_rate": 5.511111111111111e-05, |
|
"loss": 2.4594, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 9.26, |
|
"learning_rate": 5.488888888888889e-05, |
|
"loss": 2.4614, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 9.29, |
|
"learning_rate": 5.466666666666666e-05, |
|
"loss": 2.4586, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 9.33, |
|
"learning_rate": 5.4444444444444446e-05, |
|
"loss": 2.4582, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 9.37, |
|
"learning_rate": 5.422222222222223e-05, |
|
"loss": 2.4575, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 9.4, |
|
"learning_rate": 5.4000000000000005e-05, |
|
"loss": 2.4573, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 9.44, |
|
"learning_rate": 5.377777777777778e-05, |
|
"loss": 2.4561, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 9.48, |
|
"learning_rate": 5.355555555555556e-05, |
|
"loss": 2.4577, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 9.51, |
|
"learning_rate": 5.333333333333333e-05, |
|
"loss": 2.4561, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 9.55, |
|
"learning_rate": 5.311111111111111e-05, |
|
"loss": 2.4537, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"learning_rate": 5.2888888888888885e-05, |
|
"loss": 2.4535, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 9.62, |
|
"learning_rate": 5.266666666666666e-05, |
|
"loss": 2.4538, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 9.66, |
|
"learning_rate": 5.244444444444445e-05, |
|
"loss": 2.4522, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 9.7, |
|
"learning_rate": 5.222222222222223e-05, |
|
"loss": 2.4503, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 9.73, |
|
"learning_rate": 5.2000000000000004e-05, |
|
"loss": 2.4536, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 9.77, |
|
"learning_rate": 5.177777777777778e-05, |
|
"loss": 2.4518, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 9.81, |
|
"learning_rate": 5.1555555555555556e-05, |
|
"loss": 2.4516, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 9.84, |
|
"learning_rate": 5.133333333333333e-05, |
|
"loss": 2.4519, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 9.88, |
|
"learning_rate": 5.111111111111111e-05, |
|
"loss": 2.4496, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 9.92, |
|
"learning_rate": 5.0888888888888884e-05, |
|
"loss": 2.4468, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 9.95, |
|
"learning_rate": 5.0666666666666674e-05, |
|
"loss": 2.4493, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 9.99, |
|
"learning_rate": 5.044444444444445e-05, |
|
"loss": 2.4487, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 10.02, |
|
"learning_rate": 5.0222222222222226e-05, |
|
"loss": 2.4481, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 10.06, |
|
"learning_rate": 5e-05, |
|
"loss": 2.448, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 10.1, |
|
"learning_rate": 4.977777777777778e-05, |
|
"loss": 2.4451, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 10.13, |
|
"learning_rate": 4.955555555555556e-05, |
|
"loss": 2.4455, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 10.17, |
|
"learning_rate": 4.933333333333334e-05, |
|
"loss": 2.4434, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 10.21, |
|
"learning_rate": 4.9111111111111114e-05, |
|
"loss": 2.4422, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 10.24, |
|
"learning_rate": 4.888888888888889e-05, |
|
"loss": 2.4451, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 10.28, |
|
"learning_rate": 4.866666666666667e-05, |
|
"loss": 2.4431, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 10.32, |
|
"learning_rate": 4.844444444444445e-05, |
|
"loss": 2.4455, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 10.35, |
|
"learning_rate": 4.8222222222222225e-05, |
|
"loss": 2.4408, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 10.39, |
|
"learning_rate": 4.8e-05, |
|
"loss": 2.442, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 10.43, |
|
"learning_rate": 4.7777777777777784e-05, |
|
"loss": 2.4447, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 10.46, |
|
"learning_rate": 4.755555555555556e-05, |
|
"loss": 2.4418, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 10.5, |
|
"learning_rate": 4.7333333333333336e-05, |
|
"loss": 2.4407, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 10.54, |
|
"learning_rate": 4.711111111111111e-05, |
|
"loss": 2.4401, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 10.57, |
|
"learning_rate": 4.6888888888888895e-05, |
|
"loss": 2.4397, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 10.61, |
|
"learning_rate": 4.666666666666667e-05, |
|
"loss": 2.4409, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 10.65, |
|
"learning_rate": 4.644444444444445e-05, |
|
"loss": 2.4409, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 10.68, |
|
"learning_rate": 4.6222222222222224e-05, |
|
"loss": 2.4411, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 10.72, |
|
"learning_rate": 4.600000000000001e-05, |
|
"loss": 2.4361, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 10.76, |
|
"learning_rate": 4.577777777777778e-05, |
|
"loss": 2.4354, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 10.79, |
|
"learning_rate": 4.555555555555556e-05, |
|
"loss": 2.4363, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 10.83, |
|
"learning_rate": 4.5333333333333335e-05, |
|
"loss": 2.435, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 10.87, |
|
"learning_rate": 4.511111111111112e-05, |
|
"loss": 2.4403, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 10.9, |
|
"learning_rate": 4.4888888888888894e-05, |
|
"loss": 2.4357, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 10.94, |
|
"learning_rate": 4.466666666666667e-05, |
|
"loss": 2.4357, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 10.98, |
|
"learning_rate": 4.4444444444444447e-05, |
|
"loss": 2.4365, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 11.01, |
|
"learning_rate": 4.422222222222222e-05, |
|
"loss": 2.4343, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 11.05, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 2.4338, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 11.09, |
|
"learning_rate": 4.377777777777778e-05, |
|
"loss": 2.4312, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 11.12, |
|
"learning_rate": 4.355555555555556e-05, |
|
"loss": 2.4311, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 11.16, |
|
"learning_rate": 4.3333333333333334e-05, |
|
"loss": 2.4311, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 11.2, |
|
"learning_rate": 4.311111111111111e-05, |
|
"loss": 2.4309, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 11.23, |
|
"learning_rate": 4.2888888888888886e-05, |
|
"loss": 2.4309, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 11.27, |
|
"learning_rate": 4.266666666666667e-05, |
|
"loss": 2.4308, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 11.31, |
|
"learning_rate": 4.2444444444444445e-05, |
|
"loss": 2.433, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 11.34, |
|
"learning_rate": 4.222222222222222e-05, |
|
"loss": 2.427, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 11.38, |
|
"learning_rate": 4.2e-05, |
|
"loss": 2.4294, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 11.42, |
|
"learning_rate": 4.177777777777778e-05, |
|
"loss": 2.4279, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 11.45, |
|
"learning_rate": 4.155555555555556e-05, |
|
"loss": 2.4292, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 11.49, |
|
"learning_rate": 4.133333333333333e-05, |
|
"loss": 2.4267, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 11.52, |
|
"learning_rate": 4.111111111111111e-05, |
|
"loss": 2.4288, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 11.56, |
|
"learning_rate": 4.088888888888889e-05, |
|
"loss": 2.4275, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 11.6, |
|
"learning_rate": 4.066666666666667e-05, |
|
"loss": 2.4297, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 11.63, |
|
"learning_rate": 4.0444444444444444e-05, |
|
"loss": 2.4271, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 11.67, |
|
"learning_rate": 4.022222222222222e-05, |
|
"loss": 2.4256, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 11.71, |
|
"learning_rate": 4e-05, |
|
"loss": 2.4283, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 11.74, |
|
"learning_rate": 3.977777777777778e-05, |
|
"loss": 2.4277, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 11.78, |
|
"learning_rate": 3.9555555555555556e-05, |
|
"loss": 2.4232, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 11.82, |
|
"learning_rate": 3.933333333333333e-05, |
|
"loss": 2.4236, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 11.85, |
|
"learning_rate": 3.9111111111111115e-05, |
|
"loss": 2.4236, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 11.89, |
|
"learning_rate": 3.888888888888889e-05, |
|
"loss": 2.4226, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 11.93, |
|
"learning_rate": 3.866666666666667e-05, |
|
"loss": 2.422, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 11.96, |
|
"learning_rate": 3.844444444444444e-05, |
|
"loss": 2.4236, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"learning_rate": 3.8222222222222226e-05, |
|
"loss": 2.423, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 12.04, |
|
"learning_rate": 3.8e-05, |
|
"loss": 2.4208, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 12.07, |
|
"learning_rate": 3.777777777777778e-05, |
|
"loss": 2.4214, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 12.11, |
|
"learning_rate": 3.7555555555555554e-05, |
|
"loss": 2.4203, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 12.15, |
|
"learning_rate": 3.733333333333334e-05, |
|
"loss": 2.4207, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 12.18, |
|
"learning_rate": 3.7111111111111113e-05, |
|
"loss": 2.4177, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 12.22, |
|
"learning_rate": 3.688888888888889e-05, |
|
"loss": 2.4196, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 12.26, |
|
"learning_rate": 3.6666666666666666e-05, |
|
"loss": 2.4174, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 12.29, |
|
"learning_rate": 3.644444444444445e-05, |
|
"loss": 2.42, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 12.33, |
|
"learning_rate": 3.6222222222222225e-05, |
|
"loss": 2.4194, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 12.37, |
|
"learning_rate": 3.6e-05, |
|
"loss": 2.4155, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 12.4, |
|
"learning_rate": 3.577777777777778e-05, |
|
"loss": 2.4149, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 12.44, |
|
"learning_rate": 3.555555555555556e-05, |
|
"loss": 2.418, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 12.48, |
|
"learning_rate": 3.5333333333333336e-05, |
|
"loss": 2.4199, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 12.51, |
|
"learning_rate": 3.511111111111111e-05, |
|
"loss": 2.4162, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 12.55, |
|
"learning_rate": 3.4888888888888895e-05, |
|
"loss": 2.4188, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 12.59, |
|
"learning_rate": 3.466666666666667e-05, |
|
"loss": 2.4146, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 12.62, |
|
"learning_rate": 3.444444444444445e-05, |
|
"loss": 2.4167, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 12.66, |
|
"learning_rate": 3.4222222222222224e-05, |
|
"loss": 2.414, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 12.7, |
|
"learning_rate": 3.4000000000000007e-05, |
|
"loss": 2.4156, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 12.73, |
|
"learning_rate": 3.377777777777778e-05, |
|
"loss": 2.4155, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 12.77, |
|
"learning_rate": 3.355555555555556e-05, |
|
"loss": 2.4139, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 12.81, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 2.4154, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 12.84, |
|
"learning_rate": 3.311111111111112e-05, |
|
"loss": 2.415, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 12.88, |
|
"learning_rate": 3.2888888888888894e-05, |
|
"loss": 2.4137, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 12.92, |
|
"learning_rate": 3.266666666666667e-05, |
|
"loss": 2.4126, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 12.95, |
|
"learning_rate": 3.2444444444444446e-05, |
|
"loss": 2.4139, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 12.99, |
|
"learning_rate": 3.222222222222223e-05, |
|
"loss": 2.4129, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 13.03, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 2.4151, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 13.06, |
|
"learning_rate": 3.177777777777778e-05, |
|
"loss": 2.4109, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 13.1, |
|
"learning_rate": 3.155555555555556e-05, |
|
"loss": 2.4114, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 13.13, |
|
"learning_rate": 3.1333333333333334e-05, |
|
"loss": 2.4106, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 13.17, |
|
"learning_rate": 3.111111111111111e-05, |
|
"loss": 2.4085, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 13.21, |
|
"learning_rate": 3.088888888888889e-05, |
|
"loss": 2.4076, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 13.24, |
|
"learning_rate": 3.066666666666667e-05, |
|
"loss": 2.4094, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 13.28, |
|
"learning_rate": 3.044444444444445e-05, |
|
"loss": 2.4089, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 13.32, |
|
"learning_rate": 3.0222222222222225e-05, |
|
"loss": 2.4082, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 13.35, |
|
"learning_rate": 3e-05, |
|
"loss": 2.4052, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 13.39, |
|
"learning_rate": 2.9777777777777777e-05, |
|
"loss": 2.4079, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 13.43, |
|
"learning_rate": 2.955555555555556e-05, |
|
"loss": 2.405, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 13.46, |
|
"learning_rate": 2.9333333333333336e-05, |
|
"loss": 2.41, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 13.5, |
|
"learning_rate": 2.9111111111111112e-05, |
|
"loss": 2.4085, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 13.54, |
|
"learning_rate": 2.8888888888888888e-05, |
|
"loss": 2.4067, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 13.57, |
|
"learning_rate": 2.8666666666666668e-05, |
|
"loss": 2.4065, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 13.61, |
|
"learning_rate": 2.8444444444444447e-05, |
|
"loss": 2.4059, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 13.65, |
|
"learning_rate": 2.8222222222222223e-05, |
|
"loss": 2.4049, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 13.68, |
|
"learning_rate": 2.8000000000000003e-05, |
|
"loss": 2.4061, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 13.72, |
|
"learning_rate": 2.777777777777778e-05, |
|
"loss": 2.4067, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 13.76, |
|
"learning_rate": 2.7555555555555555e-05, |
|
"loss": 2.4047, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 13.79, |
|
"learning_rate": 2.733333333333333e-05, |
|
"loss": 2.4066, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 13.83, |
|
"learning_rate": 2.7111111111111114e-05, |
|
"loss": 2.4035, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 13.87, |
|
"learning_rate": 2.688888888888889e-05, |
|
"loss": 2.4038, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 13.9, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 2.4012, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 13.94, |
|
"learning_rate": 2.6444444444444443e-05, |
|
"loss": 2.4037, |
|
"step": 190500 |
|
}, |
|
{ |
|
"epoch": 13.98, |
|
"learning_rate": 2.6222222222222226e-05, |
|
"loss": 2.4028, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 14.01, |
|
"learning_rate": 2.6000000000000002e-05, |
|
"loss": 2.4041, |
|
"step": 191500 |
|
}, |
|
{ |
|
"epoch": 14.05, |
|
"learning_rate": 2.5777777777777778e-05, |
|
"loss": 2.4024, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 14.09, |
|
"learning_rate": 2.5555555555555554e-05, |
|
"loss": 2.3971, |
|
"step": 192500 |
|
}, |
|
{ |
|
"epoch": 14.12, |
|
"learning_rate": 2.5333333333333337e-05, |
|
"loss": 2.3994, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 14.16, |
|
"learning_rate": 2.5111111111111113e-05, |
|
"loss": 2.4016, |
|
"step": 193500 |
|
}, |
|
{ |
|
"epoch": 14.2, |
|
"learning_rate": 2.488888888888889e-05, |
|
"loss": 2.3994, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 14.23, |
|
"learning_rate": 2.466666666666667e-05, |
|
"loss": 2.4007, |
|
"step": 194500 |
|
}, |
|
{ |
|
"epoch": 14.27, |
|
"learning_rate": 2.4444444444444445e-05, |
|
"loss": 2.3988, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 14.31, |
|
"learning_rate": 2.4222222222222224e-05, |
|
"loss": 2.4009, |
|
"step": 195500 |
|
}, |
|
{ |
|
"epoch": 14.34, |
|
"learning_rate": 2.4e-05, |
|
"loss": 2.3996, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 14.38, |
|
"learning_rate": 2.377777777777778e-05, |
|
"loss": 2.3971, |
|
"step": 196500 |
|
}, |
|
{ |
|
"epoch": 14.42, |
|
"learning_rate": 2.3555555555555556e-05, |
|
"loss": 2.3975, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 14.45, |
|
"learning_rate": 2.3333333333333336e-05, |
|
"loss": 2.3999, |
|
"step": 197500 |
|
}, |
|
{ |
|
"epoch": 14.49, |
|
"learning_rate": 2.3111111111111112e-05, |
|
"loss": 2.4001, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 14.53, |
|
"learning_rate": 2.288888888888889e-05, |
|
"loss": 2.397, |
|
"step": 198500 |
|
}, |
|
{ |
|
"epoch": 14.56, |
|
"learning_rate": 2.2666666666666668e-05, |
|
"loss": 2.3955, |
|
"step": 199000 |
|
}, |
|
{ |
|
"epoch": 14.6, |
|
"learning_rate": 2.2444444444444447e-05, |
|
"loss": 2.4004, |
|
"step": 199500 |
|
}, |
|
{ |
|
"epoch": 14.63, |
|
"learning_rate": 2.2222222222222223e-05, |
|
"loss": 2.3956, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 14.67, |
|
"learning_rate": 2.2000000000000003e-05, |
|
"loss": 2.3958, |
|
"step": 200500 |
|
}, |
|
{ |
|
"epoch": 14.71, |
|
"learning_rate": 2.177777777777778e-05, |
|
"loss": 2.3969, |
|
"step": 201000 |
|
}, |
|
{ |
|
"epoch": 14.74, |
|
"learning_rate": 2.1555555555555555e-05, |
|
"loss": 2.3975, |
|
"step": 201500 |
|
}, |
|
{ |
|
"epoch": 14.78, |
|
"learning_rate": 2.1333333333333335e-05, |
|
"loss": 2.3942, |
|
"step": 202000 |
|
}, |
|
{ |
|
"epoch": 14.82, |
|
"learning_rate": 2.111111111111111e-05, |
|
"loss": 2.3981, |
|
"step": 202500 |
|
}, |
|
{ |
|
"epoch": 14.85, |
|
"learning_rate": 2.088888888888889e-05, |
|
"loss": 2.3963, |
|
"step": 203000 |
|
}, |
|
{ |
|
"epoch": 14.89, |
|
"learning_rate": 2.0666666666666666e-05, |
|
"loss": 2.3942, |
|
"step": 203500 |
|
}, |
|
{ |
|
"epoch": 14.93, |
|
"learning_rate": 2.0444444444444446e-05, |
|
"loss": 2.3972, |
|
"step": 204000 |
|
}, |
|
{ |
|
"epoch": 14.96, |
|
"learning_rate": 2.0222222222222222e-05, |
|
"loss": 2.3932, |
|
"step": 204500 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"learning_rate": 2e-05, |
|
"loss": 2.3976, |
|
"step": 205000 |
|
}, |
|
{ |
|
"epoch": 15.04, |
|
"learning_rate": 1.9777777777777778e-05, |
|
"loss": 2.3939, |
|
"step": 205500 |
|
}, |
|
{ |
|
"epoch": 15.07, |
|
"learning_rate": 1.9555555555555557e-05, |
|
"loss": 2.3921, |
|
"step": 206000 |
|
}, |
|
{ |
|
"epoch": 15.11, |
|
"learning_rate": 1.9333333333333333e-05, |
|
"loss": 2.3936, |
|
"step": 206500 |
|
}, |
|
{ |
|
"epoch": 15.15, |
|
"learning_rate": 1.9111111111111113e-05, |
|
"loss": 2.3935, |
|
"step": 207000 |
|
}, |
|
{ |
|
"epoch": 15.18, |
|
"learning_rate": 1.888888888888889e-05, |
|
"loss": 2.3946, |
|
"step": 207500 |
|
}, |
|
{ |
|
"epoch": 15.22, |
|
"learning_rate": 1.866666666666667e-05, |
|
"loss": 2.3919, |
|
"step": 208000 |
|
}, |
|
{ |
|
"epoch": 15.26, |
|
"learning_rate": 1.8444444444444445e-05, |
|
"loss": 2.3925, |
|
"step": 208500 |
|
}, |
|
{ |
|
"epoch": 15.29, |
|
"learning_rate": 1.8222222222222224e-05, |
|
"loss": 2.3918, |
|
"step": 209000 |
|
}, |
|
{ |
|
"epoch": 15.33, |
|
"learning_rate": 1.8e-05, |
|
"loss": 2.3935, |
|
"step": 209500 |
|
}, |
|
{ |
|
"epoch": 15.37, |
|
"learning_rate": 1.777777777777778e-05, |
|
"loss": 2.3916, |
|
"step": 210000 |
|
}, |
|
{ |
|
"epoch": 15.4, |
|
"learning_rate": 1.7555555555555556e-05, |
|
"loss": 2.3878, |
|
"step": 210500 |
|
}, |
|
{ |
|
"epoch": 15.44, |
|
"learning_rate": 1.7333333333333336e-05, |
|
"loss": 2.3892, |
|
"step": 211000 |
|
}, |
|
{ |
|
"epoch": 15.48, |
|
"learning_rate": 1.7111111111111112e-05, |
|
"loss": 2.3902, |
|
"step": 211500 |
|
}, |
|
{ |
|
"epoch": 15.51, |
|
"learning_rate": 1.688888888888889e-05, |
|
"loss": 2.389, |
|
"step": 212000 |
|
}, |
|
{ |
|
"epoch": 15.55, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 2.39, |
|
"step": 212500 |
|
}, |
|
{ |
|
"epoch": 15.59, |
|
"learning_rate": 1.6444444444444447e-05, |
|
"loss": 2.3869, |
|
"step": 213000 |
|
}, |
|
{ |
|
"epoch": 15.62, |
|
"learning_rate": 1.6222222222222223e-05, |
|
"loss": 2.3899, |
|
"step": 213500 |
|
}, |
|
{ |
|
"epoch": 15.66, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 2.3903, |
|
"step": 214000 |
|
}, |
|
{ |
|
"epoch": 15.7, |
|
"learning_rate": 1.577777777777778e-05, |
|
"loss": 2.3861, |
|
"step": 214500 |
|
}, |
|
{ |
|
"epoch": 15.73, |
|
"learning_rate": 1.5555555555555555e-05, |
|
"loss": 2.3861, |
|
"step": 215000 |
|
}, |
|
{ |
|
"epoch": 15.77, |
|
"learning_rate": 1.5333333333333334e-05, |
|
"loss": 2.3904, |
|
"step": 215500 |
|
}, |
|
{ |
|
"epoch": 15.81, |
|
"learning_rate": 1.5111111111111112e-05, |
|
"loss": 2.3883, |
|
"step": 216000 |
|
}, |
|
{ |
|
"epoch": 15.84, |
|
"learning_rate": 1.4888888888888888e-05, |
|
"loss": 2.3879, |
|
"step": 216500 |
|
}, |
|
{ |
|
"epoch": 15.88, |
|
"learning_rate": 1.4666666666666668e-05, |
|
"loss": 2.3867, |
|
"step": 217000 |
|
}, |
|
{ |
|
"epoch": 15.92, |
|
"learning_rate": 1.4444444444444444e-05, |
|
"loss": 2.3898, |
|
"step": 217500 |
|
}, |
|
{ |
|
"epoch": 15.95, |
|
"learning_rate": 1.4222222222222224e-05, |
|
"loss": 2.3882, |
|
"step": 218000 |
|
}, |
|
{ |
|
"epoch": 15.99, |
|
"learning_rate": 1.4000000000000001e-05, |
|
"loss": 2.385, |
|
"step": 218500 |
|
}, |
|
{ |
|
"epoch": 16.03, |
|
"learning_rate": 1.3777777777777778e-05, |
|
"loss": 2.3853, |
|
"step": 219000 |
|
}, |
|
{ |
|
"epoch": 16.06, |
|
"learning_rate": 1.3555555555555557e-05, |
|
"loss": 2.3855, |
|
"step": 219500 |
|
}, |
|
{ |
|
"epoch": 16.1, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 2.3869, |
|
"step": 220000 |
|
}, |
|
{ |
|
"epoch": 16.13, |
|
"learning_rate": 1.3111111111111113e-05, |
|
"loss": 2.3859, |
|
"step": 220500 |
|
}, |
|
{ |
|
"epoch": 16.17, |
|
"learning_rate": 1.2888888888888889e-05, |
|
"loss": 2.3855, |
|
"step": 221000 |
|
}, |
|
{ |
|
"epoch": 16.21, |
|
"learning_rate": 1.2666666666666668e-05, |
|
"loss": 2.3858, |
|
"step": 221500 |
|
}, |
|
{ |
|
"epoch": 16.24, |
|
"learning_rate": 1.2444444444444445e-05, |
|
"loss": 2.3846, |
|
"step": 222000 |
|
}, |
|
{ |
|
"epoch": 16.28, |
|
"learning_rate": 1.2222222222222222e-05, |
|
"loss": 2.3842, |
|
"step": 222500 |
|
}, |
|
{ |
|
"epoch": 16.32, |
|
"learning_rate": 1.2e-05, |
|
"loss": 2.3814, |
|
"step": 223000 |
|
}, |
|
{ |
|
"epoch": 16.35, |
|
"learning_rate": 1.1777777777777778e-05, |
|
"loss": 2.3837, |
|
"step": 223500 |
|
}, |
|
{ |
|
"epoch": 16.39, |
|
"learning_rate": 1.1555555555555556e-05, |
|
"loss": 2.3841, |
|
"step": 224000 |
|
}, |
|
{ |
|
"epoch": 16.43, |
|
"learning_rate": 1.1333333333333334e-05, |
|
"loss": 2.3829, |
|
"step": 224500 |
|
}, |
|
{ |
|
"epoch": 16.46, |
|
"learning_rate": 1.1111111111111112e-05, |
|
"loss": 2.3849, |
|
"step": 225000 |
|
}, |
|
{ |
|
"epoch": 16.5, |
|
"learning_rate": 1.088888888888889e-05, |
|
"loss": 2.383, |
|
"step": 225500 |
|
}, |
|
{ |
|
"epoch": 16.54, |
|
"learning_rate": 1.0666666666666667e-05, |
|
"loss": 2.3795, |
|
"step": 226000 |
|
}, |
|
{ |
|
"epoch": 16.57, |
|
"learning_rate": 1.0444444444444445e-05, |
|
"loss": 2.3817, |
|
"step": 226500 |
|
}, |
|
{ |
|
"epoch": 16.61, |
|
"learning_rate": 1.0222222222222223e-05, |
|
"loss": 2.3826, |
|
"step": 227000 |
|
}, |
|
{ |
|
"epoch": 16.65, |
|
"learning_rate": 1e-05, |
|
"loss": 2.3825, |
|
"step": 227500 |
|
}, |
|
{ |
|
"epoch": 16.68, |
|
"learning_rate": 9.777777777777779e-06, |
|
"loss": 2.3789, |
|
"step": 228000 |
|
}, |
|
{ |
|
"epoch": 16.72, |
|
"learning_rate": 9.555555555555556e-06, |
|
"loss": 2.3865, |
|
"step": 228500 |
|
}, |
|
{ |
|
"epoch": 16.76, |
|
"learning_rate": 9.333333333333334e-06, |
|
"loss": 2.3825, |
|
"step": 229000 |
|
}, |
|
{ |
|
"epoch": 16.79, |
|
"learning_rate": 9.111111111111112e-06, |
|
"loss": 2.3808, |
|
"step": 229500 |
|
}, |
|
{ |
|
"epoch": 16.83, |
|
"learning_rate": 8.88888888888889e-06, |
|
"loss": 2.3807, |
|
"step": 230000 |
|
}, |
|
{ |
|
"epoch": 16.87, |
|
"learning_rate": 8.666666666666668e-06, |
|
"loss": 2.3813, |
|
"step": 230500 |
|
}, |
|
{ |
|
"epoch": 16.9, |
|
"learning_rate": 8.444444444444446e-06, |
|
"loss": 2.3817, |
|
"step": 231000 |
|
}, |
|
{ |
|
"epoch": 16.94, |
|
"learning_rate": 8.222222222222223e-06, |
|
"loss": 2.3824, |
|
"step": 231500 |
|
}, |
|
{ |
|
"epoch": 16.98, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 2.3809, |
|
"step": 232000 |
|
}, |
|
{ |
|
"epoch": 17.01, |
|
"learning_rate": 7.777777777777777e-06, |
|
"loss": 2.3798, |
|
"step": 232500 |
|
}, |
|
{ |
|
"epoch": 17.05, |
|
"learning_rate": 7.555555555555556e-06, |
|
"loss": 2.377, |
|
"step": 233000 |
|
}, |
|
{ |
|
"epoch": 17.09, |
|
"learning_rate": 7.333333333333334e-06, |
|
"loss": 2.3801, |
|
"step": 233500 |
|
}, |
|
{ |
|
"epoch": 17.12, |
|
"learning_rate": 7.111111111111112e-06, |
|
"loss": 2.3801, |
|
"step": 234000 |
|
}, |
|
{ |
|
"epoch": 17.16, |
|
"learning_rate": 6.888888888888889e-06, |
|
"loss": 2.3816, |
|
"step": 234500 |
|
}, |
|
{ |
|
"epoch": 17.2, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 2.3772, |
|
"step": 235000 |
|
}, |
|
{ |
|
"epoch": 17.23, |
|
"learning_rate": 6.4444444444444445e-06, |
|
"loss": 2.3818, |
|
"step": 235500 |
|
}, |
|
{ |
|
"epoch": 17.27, |
|
"learning_rate": 6.222222222222222e-06, |
|
"loss": 2.3786, |
|
"step": 236000 |
|
}, |
|
{ |
|
"epoch": 17.31, |
|
"learning_rate": 6e-06, |
|
"loss": 2.3777, |
|
"step": 236500 |
|
}, |
|
{ |
|
"epoch": 17.34, |
|
"learning_rate": 5.777777777777778e-06, |
|
"loss": 2.3774, |
|
"step": 237000 |
|
}, |
|
{ |
|
"epoch": 17.38, |
|
"learning_rate": 5.555555555555556e-06, |
|
"loss": 2.3787, |
|
"step": 237500 |
|
}, |
|
{ |
|
"epoch": 17.42, |
|
"learning_rate": 5.333333333333334e-06, |
|
"loss": 2.3772, |
|
"step": 238000 |
|
}, |
|
{ |
|
"epoch": 17.45, |
|
"learning_rate": 5.1111111111111115e-06, |
|
"loss": 2.3776, |
|
"step": 238500 |
|
}, |
|
{ |
|
"epoch": 17.49, |
|
"learning_rate": 4.888888888888889e-06, |
|
"loss": 2.3776, |
|
"step": 239000 |
|
}, |
|
{ |
|
"epoch": 17.53, |
|
"learning_rate": 4.666666666666667e-06, |
|
"loss": 2.3782, |
|
"step": 239500 |
|
}, |
|
{ |
|
"epoch": 17.56, |
|
"learning_rate": 4.444444444444445e-06, |
|
"loss": 2.3755, |
|
"step": 240000 |
|
}, |
|
{ |
|
"epoch": 17.6, |
|
"learning_rate": 4.222222222222223e-06, |
|
"loss": 2.3766, |
|
"step": 240500 |
|
}, |
|
{ |
|
"epoch": 17.63, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 2.3774, |
|
"step": 241000 |
|
}, |
|
{ |
|
"epoch": 17.67, |
|
"learning_rate": 3.777777777777778e-06, |
|
"loss": 2.3754, |
|
"step": 241500 |
|
}, |
|
{ |
|
"epoch": 17.71, |
|
"learning_rate": 3.555555555555556e-06, |
|
"loss": 2.3785, |
|
"step": 242000 |
|
}, |
|
{ |
|
"epoch": 17.74, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 2.3746, |
|
"step": 242500 |
|
}, |
|
{ |
|
"epoch": 17.78, |
|
"learning_rate": 3.111111111111111e-06, |
|
"loss": 2.3788, |
|
"step": 243000 |
|
}, |
|
{ |
|
"epoch": 17.82, |
|
"learning_rate": 2.888888888888889e-06, |
|
"loss": 2.3751, |
|
"step": 243500 |
|
}, |
|
{ |
|
"epoch": 17.85, |
|
"learning_rate": 2.666666666666667e-06, |
|
"loss": 2.3747, |
|
"step": 244000 |
|
}, |
|
{ |
|
"epoch": 17.89, |
|
"learning_rate": 2.4444444444444447e-06, |
|
"loss": 2.3758, |
|
"step": 244500 |
|
}, |
|
{ |
|
"epoch": 17.93, |
|
"learning_rate": 2.2222222222222225e-06, |
|
"loss": 2.3738, |
|
"step": 245000 |
|
}, |
|
{ |
|
"epoch": 17.96, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 2.3761, |
|
"step": 245500 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"learning_rate": 1.777777777777778e-06, |
|
"loss": 2.3765, |
|
"step": 246000 |
|
}, |
|
{ |
|
"epoch": 18.04, |
|
"learning_rate": 1.5555555555555556e-06, |
|
"loss": 2.3742, |
|
"step": 246500 |
|
}, |
|
{ |
|
"epoch": 18.07, |
|
"learning_rate": 1.3333333333333334e-06, |
|
"loss": 2.3765, |
|
"step": 247000 |
|
}, |
|
{ |
|
"epoch": 18.11, |
|
"learning_rate": 1.1111111111111112e-06, |
|
"loss": 2.3735, |
|
"step": 247500 |
|
}, |
|
{ |
|
"epoch": 18.15, |
|
"learning_rate": 8.88888888888889e-07, |
|
"loss": 2.3758, |
|
"step": 248000 |
|
}, |
|
{ |
|
"epoch": 18.18, |
|
"learning_rate": 6.666666666666667e-07, |
|
"loss": 2.3729, |
|
"step": 248500 |
|
}, |
|
{ |
|
"epoch": 18.22, |
|
"learning_rate": 4.444444444444445e-07, |
|
"loss": 2.3758, |
|
"step": 249000 |
|
}, |
|
{ |
|
"epoch": 18.26, |
|
"learning_rate": 2.2222222222222224e-07, |
|
"loss": 2.3745, |
|
"step": 249500 |
|
}, |
|
{ |
|
"epoch": 18.29, |
|
"learning_rate": 0.0, |
|
"loss": 2.3726, |
|
"step": 250000 |
|
}, |
|
{ |
|
"epoch": 18.29, |
|
"step": 250000, |
|
"total_flos": 4.212323233189429e+18, |
|
"train_loss": 2.6346825883789062, |
|
"train_runtime": 359539.1522, |
|
"train_samples_per_second": 356.011, |
|
"train_steps_per_second": 0.695 |
|
} |
|
], |
|
"max_steps": 250000, |
|
"num_train_epochs": 19, |
|
"total_flos": 4.212323233189429e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|