|
{ |
|
"best_metric": 0.6814723610877991, |
|
"best_model_checkpoint": "./jako_mbartLarge_100p_run1/checkpoint-87009", |
|
"epoch": 4.0, |
|
"eval_steps": 500, |
|
"global_step": 174018, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.97e-05, |
|
"loss": 1.9891, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.996196669018189e-05, |
|
"loss": 1.5354, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9923626660123305e-05, |
|
"loss": 1.3833, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.988528663006472e-05, |
|
"loss": 1.2912, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.984694660000614e-05, |
|
"loss": 1.2355, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.980860656994755e-05, |
|
"loss": 1.1927, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.977026653988897e-05, |
|
"loss": 1.166, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.9731926509830386e-05, |
|
"loss": 1.1238, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.9693586479771804e-05, |
|
"loss": 1.0972, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.9655246449713215e-05, |
|
"loss": 1.0964, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.961690641965463e-05, |
|
"loss": 1.0611, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.957871974971629e-05, |
|
"loss": 1.0487, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.9540379719657705e-05, |
|
"loss": 1.0166, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.9502039689599116e-05, |
|
"loss": 1.0162, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.946369965954053e-05, |
|
"loss": 0.9952, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.942535962948195e-05, |
|
"loss": 0.9853, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.9387096279483484e-05, |
|
"loss": 0.9835, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.9348832929485017e-05, |
|
"loss": 0.9634, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.9310492899426434e-05, |
|
"loss": 0.9532, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.927215286936785e-05, |
|
"loss": 0.955, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.923381283930927e-05, |
|
"loss": 0.9392, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.919547280925069e-05, |
|
"loss": 0.9403, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.91571327791921e-05, |
|
"loss": 0.9177, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.9118792749133516e-05, |
|
"loss": 0.919, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.908045271907493e-05, |
|
"loss": 0.9036, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.9042189369076466e-05, |
|
"loss": 0.9019, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.9003849339017884e-05, |
|
"loss": 0.8969, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.89655093089593e-05, |
|
"loss": 0.891, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.892716927890072e-05, |
|
"loss": 0.8904, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.888882924884213e-05, |
|
"loss": 0.8833, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.8850489218783554e-05, |
|
"loss": 0.8668, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.8812149188724965e-05, |
|
"loss": 0.8692, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.877380915866638e-05, |
|
"loss": 0.8617, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.8735469128607794e-05, |
|
"loss": 0.866, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.869720577860933e-05, |
|
"loss": 0.8592, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.8658865748550744e-05, |
|
"loss": 0.8586, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.862052571849217e-05, |
|
"loss": 0.84, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.8582262368493695e-05, |
|
"loss": 0.84, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.854392233843512e-05, |
|
"loss": 0.8397, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.850581234855688e-05, |
|
"loss": 0.8338, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.84674723184983e-05, |
|
"loss": 0.838, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.842913228843972e-05, |
|
"loss": 0.8391, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.8390792258381135e-05, |
|
"loss": 0.8276, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.8352452228322546e-05, |
|
"loss": 0.8198, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.8314112198263963e-05, |
|
"loss": 0.8127, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.827577216820538e-05, |
|
"loss": 0.8184, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.82374321381468e-05, |
|
"loss": 0.817, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.8199092108088216e-05, |
|
"loss": 0.8196, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.816075207802963e-05, |
|
"loss": 0.8024, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.812241204797105e-05, |
|
"loss": 0.8133, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.808407201791246e-05, |
|
"loss": 0.8056, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.804573198785388e-05, |
|
"loss": 0.7972, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.80073919577953e-05, |
|
"loss": 0.8026, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.7969051927736716e-05, |
|
"loss": 0.7983, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.793078857773824e-05, |
|
"loss": 0.795, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.7892448547679666e-05, |
|
"loss": 0.803, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.7854108517621084e-05, |
|
"loss": 0.7911, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.7815768487562494e-05, |
|
"loss": 0.7757, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.777742845750391e-05, |
|
"loss": 0.777, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.773908842744533e-05, |
|
"loss": 0.7848, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.770074839738675e-05, |
|
"loss": 0.7834, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.766240836732816e-05, |
|
"loss": 0.7753, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.7624068337269576e-05, |
|
"loss": 0.7715, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.7585728307211e-05, |
|
"loss": 0.7714, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.754738827715241e-05, |
|
"loss": 0.7736, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.750904824709383e-05, |
|
"loss": 0.7767, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.747070821703524e-05, |
|
"loss": 0.7661, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.7432368186976664e-05, |
|
"loss": 0.7565, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.739410483697819e-05, |
|
"loss": 0.7515, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.735584148697973e-05, |
|
"loss": 0.7558, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.731750145692114e-05, |
|
"loss": 0.7642, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.727916142686256e-05, |
|
"loss": 0.7626, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.7240821396803976e-05, |
|
"loss": 0.7627, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.7202481366745394e-05, |
|
"loss": 0.7495, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.716414133668681e-05, |
|
"loss": 0.7582, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.7125877986688344e-05, |
|
"loss": 0.7481, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.708753795662976e-05, |
|
"loss": 0.7557, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.704919792657117e-05, |
|
"loss": 0.7384, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.70108578965126e-05, |
|
"loss": 0.742, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.697251786645401e-05, |
|
"loss": 0.7388, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.6934177836395425e-05, |
|
"loss": 0.7503, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.689583780633684e-05, |
|
"loss": 0.745, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.685749777627826e-05, |
|
"loss": 0.7228, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.681915774621968e-05, |
|
"loss": 0.7333, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.678081771616109e-05, |
|
"loss": 0.7371, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.674247768610251e-05, |
|
"loss": 0.733, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6704137656043925e-05, |
|
"loss": 0.7241, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_bleu": 56.112, |
|
"eval_gen_len": 17.3165, |
|
"eval_loss": 0.7177675366401672, |
|
"eval_runtime": 7671.1849, |
|
"eval_samples_per_second": 11.342, |
|
"eval_steps_per_second": 1.418, |
|
"step": 43504 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.666587430604546e-05, |
|
"loss": 0.7322, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6627534275986875e-05, |
|
"loss": 0.7051, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 4.658919424592829e-05, |
|
"loss": 0.6935, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 4.6550854215869704e-05, |
|
"loss": 0.6831, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 4.651259086587124e-05, |
|
"loss": 0.6787, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 4.6474327515872776e-05, |
|
"loss": 0.6813, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.6436140845934424e-05, |
|
"loss": 0.6841, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 4.639780081587584e-05, |
|
"loss": 0.6697, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 4.635946078581726e-05, |
|
"loss": 0.6605, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 4.632112075575868e-05, |
|
"loss": 0.675, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 4.6282780725700094e-05, |
|
"loss": 0.6623, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 4.6244440695641505e-05, |
|
"loss": 0.6589, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 4.620610066558292e-05, |
|
"loss": 0.6476, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 4.616776063552434e-05, |
|
"loss": 0.6551, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 4.612942060546576e-05, |
|
"loss": 0.6502, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 4.6091080575407176e-05, |
|
"loss": 0.6478, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 4.605274054534859e-05, |
|
"loss": 0.6485, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 4.6014400515290004e-05, |
|
"loss": 0.6412, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 4.597606048523142e-05, |
|
"loss": 0.6376, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 4.593772045517284e-05, |
|
"loss": 0.6385, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 4.589945710517437e-05, |
|
"loss": 0.6366, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 4.586111707511579e-05, |
|
"loss": 0.6444, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 4.582277704505721e-05, |
|
"loss": 0.6311, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 4.578451369505874e-05, |
|
"loss": 0.6306, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 4.574617366500016e-05, |
|
"loss": 0.6199, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 4.570783363494157e-05, |
|
"loss": 0.6268, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 4.566949360488299e-05, |
|
"loss": 0.623, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.5631153574824404e-05, |
|
"loss": 0.6207, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 4.559281354476582e-05, |
|
"loss": 0.6204, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 4.555447351470723e-05, |
|
"loss": 0.6215, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.551613348464866e-05, |
|
"loss": 0.6114, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.5477793454590075e-05, |
|
"loss": 0.6134, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.543953010459161e-05, |
|
"loss": 0.6142, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.5401190074533025e-05, |
|
"loss": 0.6149, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.5362850044474436e-05, |
|
"loss": 0.6123, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.5324510014415854e-05, |
|
"loss": 0.6159, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 4.5286246664417387e-05, |
|
"loss": 0.6001, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 4.5247906634358804e-05, |
|
"loss": 0.6034, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 4.520956660430022e-05, |
|
"loss": 0.5999, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.517122657424164e-05, |
|
"loss": 0.6011, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 4.513288654418305e-05, |
|
"loss": 0.6051, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 4.509454651412447e-05, |
|
"loss": 0.6061, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 4.5056206484065886e-05, |
|
"loss": 0.598, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 4.50178664540073e-05, |
|
"loss": 0.5939, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.497952642394872e-05, |
|
"loss": 0.5905, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.494118639389013e-05, |
|
"loss": 0.5968, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.490284636383155e-05, |
|
"loss": 0.5943, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.486450633377297e-05, |
|
"loss": 0.6025, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.4826166303714385e-05, |
|
"loss": 0.5918, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.47878262736558e-05, |
|
"loss": 0.5983, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.474948624359721e-05, |
|
"loss": 0.5907, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.471114621353864e-05, |
|
"loss": 0.5852, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.4672882863540164e-05, |
|
"loss": 0.5886, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.463454283348159e-05, |
|
"loss": 0.5895, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.4596202803423e-05, |
|
"loss": 0.5887, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.455786277336442e-05, |
|
"loss": 0.5953, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 4.451952274330583e-05, |
|
"loss": 0.5888, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 4.448118271324725e-05, |
|
"loss": 0.571, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 4.444291936324878e-05, |
|
"loss": 0.576, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 4.44045793331902e-05, |
|
"loss": 0.5828, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 4.436631598319173e-05, |
|
"loss": 0.5875, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 4.432797595313315e-05, |
|
"loss": 0.5773, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 4.428963592307457e-05, |
|
"loss": 0.5741, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 4.425129589301598e-05, |
|
"loss": 0.5721, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 4.42129558629574e-05, |
|
"loss": 0.5797, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 4.417461583289882e-05, |
|
"loss": 0.5787, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 4.4136275802840234e-05, |
|
"loss": 0.5731, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 4.409801245284177e-05, |
|
"loss": 0.5642, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 4.4059672422783185e-05, |
|
"loss": 0.5653, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 4.4021332392724596e-05, |
|
"loss": 0.5663, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 4.398299236266601e-05, |
|
"loss": 0.5762, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 4.394465233260743e-05, |
|
"loss": 0.5742, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 4.390631230254885e-05, |
|
"loss": 0.5739, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 4.3867972272490266e-05, |
|
"loss": 0.5686, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 4.382963224243168e-05, |
|
"loss": 0.5757, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 4.3791292212373095e-05, |
|
"loss": 0.5654, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 4.375295218231451e-05, |
|
"loss": 0.5697, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 4.371461215225593e-05, |
|
"loss": 0.5582, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 4.367627212219735e-05, |
|
"loss": 0.5603, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 4.363793209213876e-05, |
|
"loss": 0.5564, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 4.359959206208018e-05, |
|
"loss": 0.5682, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 4.356132871208171e-05, |
|
"loss": 0.566, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 4.3522988682023133e-05, |
|
"loss": 0.5485, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 4.3484648651964544e-05, |
|
"loss": 0.5558, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 4.344630862190596e-05, |
|
"loss": 0.5592, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 4.340796859184737e-05, |
|
"loss": 0.5611, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.337001196208938e-05, |
|
"loss": 0.5523, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_bleu": 57.9768, |
|
"eval_gen_len": 17.2796, |
|
"eval_loss": 0.6814723610877991, |
|
"eval_runtime": 7592.1301, |
|
"eval_samples_per_second": 11.46, |
|
"eval_steps_per_second": 1.433, |
|
"step": 87009 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.33316719320308e-05, |
|
"loss": 0.5617, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.3293331901972215e-05, |
|
"loss": 0.5392, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 4.325499187191363e-05, |
|
"loss": 0.5313, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.321665184185504e-05, |
|
"loss": 0.5247, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.317831181179646e-05, |
|
"loss": 0.5183, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.313997178173788e-05, |
|
"loss": 0.5266, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.3101631751679296e-05, |
|
"loss": 0.5256, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.3063291721620714e-05, |
|
"loss": 0.5148, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.3024951691562125e-05, |
|
"loss": 0.5097, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.298661166150354e-05, |
|
"loss": 0.5192, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.294827163144496e-05, |
|
"loss": 0.5125, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.290993160138638e-05, |
|
"loss": 0.5049, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 4.2871591571327795e-05, |
|
"loss": 0.4974, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 4.2833251541269206e-05, |
|
"loss": 0.5044, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 4.279491151121063e-05, |
|
"loss": 0.5026, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 4.275657148115204e-05, |
|
"loss": 0.5028, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 4.271823145109346e-05, |
|
"loss": 0.4992, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 4.267989142103488e-05, |
|
"loss": 0.5, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 4.264162807103641e-05, |
|
"loss": 0.4942, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 4.260328804097782e-05, |
|
"loss": 0.4942, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 4.2564948010919245e-05, |
|
"loss": 0.4952, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 4.252660798086066e-05, |
|
"loss": 0.4977, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 4.2488344630862195e-05, |
|
"loss": 0.4894, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 4.245008128086372e-05, |
|
"loss": 0.4887, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 4.2411741250805146e-05, |
|
"loss": 0.4813, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 4.2373401220746563e-05, |
|
"loss": 0.4851, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 4.2335061190687974e-05, |
|
"loss": 0.4865, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 4.229672116062939e-05, |
|
"loss": 0.4823, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 4.225838113057081e-05, |
|
"loss": 0.4806, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 4.222011778057234e-05, |
|
"loss": 0.4813, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 4.218177775051376e-05, |
|
"loss": 0.4734, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 4.214343772045518e-05, |
|
"loss": 0.479, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 4.210509769039659e-05, |
|
"loss": 0.4778, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 4.2066757660338006e-05, |
|
"loss": 0.4797, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 4.2028417630279424e-05, |
|
"loss": 0.474, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 4.199007760022084e-05, |
|
"loss": 0.4811, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 4.1951814250222374e-05, |
|
"loss": 0.464, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 4.191347422016379e-05, |
|
"loss": 0.4699, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 4.187513419010521e-05, |
|
"loss": 0.4655, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 4.183679416004662e-05, |
|
"loss": 0.4694, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 4.179845412998804e-05, |
|
"loss": 0.4698, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 4.1760114099929456e-05, |
|
"loss": 0.4714, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 4.1721774069870873e-05, |
|
"loss": 0.4627, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 4.168343403981229e-05, |
|
"loss": 0.4628, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 4.16450940097537e-05, |
|
"loss": 0.4565, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 4.1606753979695126e-05, |
|
"loss": 0.466, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 4.156841394963654e-05, |
|
"loss": 0.4635, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 4.1530073919577955e-05, |
|
"loss": 0.4694, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 4.149181056957949e-05, |
|
"loss": 0.4638, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 4.1453470539520905e-05, |
|
"loss": 0.4626, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 4.1415130509462316e-05, |
|
"loss": 0.4632, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 4.137679047940374e-05, |
|
"loss": 0.455, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 4.133845044934516e-05, |
|
"loss": 0.4553, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 4.130018709934669e-05, |
|
"loss": 0.4583, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 4.126192374934822e-05, |
|
"loss": 0.4596, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 4.1223583719289635e-05, |
|
"loss": 0.4634, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 4.118524368923106e-05, |
|
"loss": 0.4595, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 4.1146980339232585e-05, |
|
"loss": 0.4441, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 4.110864030917401e-05, |
|
"loss": 0.4517, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 4.107030027911542e-05, |
|
"loss": 0.4531, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.103196024905684e-05, |
|
"loss": 0.4556, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.099362021899825e-05, |
|
"loss": 0.4481, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.095528018893967e-05, |
|
"loss": 0.4483, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.0916940158881084e-05, |
|
"loss": 0.4457, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.08786001288225e-05, |
|
"loss": 0.452, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.084026009876392e-05, |
|
"loss": 0.4511, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 4.080192006870534e-05, |
|
"loss": 0.4466, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 4.0763580038646755e-05, |
|
"loss": 0.4368, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 4.0725240008588166e-05, |
|
"loss": 0.443, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 4.0686976658589705e-05, |
|
"loss": 0.441, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 4.0648636628531116e-05, |
|
"loss": 0.4503, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 4.0610296598472534e-05, |
|
"loss": 0.4487, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 4.057195656841395e-05, |
|
"loss": 0.4482, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 4.053361653835537e-05, |
|
"loss": 0.4449, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 4.049527650829679e-05, |
|
"loss": 0.4514, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 4.04569364782382e-05, |
|
"loss": 0.4407, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 4.041867312823974e-05, |
|
"loss": 0.443, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 4.038033309818115e-05, |
|
"loss": 0.434, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 4.0341993068122566e-05, |
|
"loss": 0.4384, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 4.030365303806398e-05, |
|
"loss": 0.4334, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 4.0265389688065516e-05, |
|
"loss": 0.4419, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 4.0227049658006934e-05, |
|
"loss": 0.4421, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 4.018870962794835e-05, |
|
"loss": 0.4244, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.015036959788976e-05, |
|
"loss": 0.4345, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 4.01121062478913e-05, |
|
"loss": 0.4374, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 4.007376621783271e-05, |
|
"loss": 0.4368, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 4.003542618777413e-05, |
|
"loss": 0.4356, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_bleu": 58.5493, |
|
"eval_gen_len": 17.1569, |
|
"eval_loss": 0.7014300227165222, |
|
"eval_runtime": 7450.9373, |
|
"eval_samples_per_second": 11.677, |
|
"eval_steps_per_second": 1.46, |
|
"step": 130513 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 3.9997086157715555e-05, |
|
"loss": 0.4373, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 3.9958746127656966e-05, |
|
"loss": 0.42, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 3.992040609759838e-05, |
|
"loss": 0.4179, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 3.9882066067539794e-05, |
|
"loss": 0.406, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 3.984372603748122e-05, |
|
"loss": 0.4034, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 3.980538600742263e-05, |
|
"loss": 0.4103, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 3.976704597736405e-05, |
|
"loss": 0.4084, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 3.9728705947305465e-05, |
|
"loss": 0.4012, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 3.969036591724688e-05, |
|
"loss": 0.3966, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 3.96520258871883e-05, |
|
"loss": 0.4026, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 3.961368585712971e-05, |
|
"loss": 0.3983, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 3.957542250713125e-05, |
|
"loss": 0.3947, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 3.953708247707266e-05, |
|
"loss": 0.3894, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 3.94988191270742e-05, |
|
"loss": 0.3927, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 3.946047909701561e-05, |
|
"loss": 0.3919, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 3.942213906695703e-05, |
|
"loss": 0.3909, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 3.938379903689845e-05, |
|
"loss": 0.3914, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 3.9345459006839865e-05, |
|
"loss": 0.3876, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 3.930727233690151e-05, |
|
"loss": 0.3844, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 3.926893230684293e-05, |
|
"loss": 0.3871, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 3.923059227678435e-05, |
|
"loss": 0.3835, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 3.9192252246725766e-05, |
|
"loss": 0.3871, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 3.915391221666718e-05, |
|
"loss": 0.3803, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 3.9115572186608594e-05, |
|
"loss": 0.3776, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 3.907723215655001e-05, |
|
"loss": 0.3742, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 3.903889212649143e-05, |
|
"loss": 0.3778, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 3.900055209643285e-05, |
|
"loss": 0.3759, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 3.896221206637426e-05, |
|
"loss": 0.3756, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 3.89239487163758e-05, |
|
"loss": 0.3737, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 3.888560868631721e-05, |
|
"loss": 0.3749, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 3.884734533631875e-05, |
|
"loss": 0.3701, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 3.880900530626016e-05, |
|
"loss": 0.3736, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 3.8770665276201576e-05, |
|
"loss": 0.3706, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 3.8732325246143e-05, |
|
"loss": 0.3743, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 3.869398521608441e-05, |
|
"loss": 0.3665, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 3.865564518602583e-05, |
|
"loss": 0.3781, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 3.861738183602736e-05, |
|
"loss": 0.3591, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 3.857904180596878e-05, |
|
"loss": 0.365, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 3.854070177591019e-05, |
|
"loss": 0.3648, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 3.8502361745851615e-05, |
|
"loss": 0.3635, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 3.8464021715793026e-05, |
|
"loss": 0.3658, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 3.8425681685734444e-05, |
|
"loss": 0.3666, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 3.838734165567586e-05, |
|
"loss": 0.3579, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 3.834900162561728e-05, |
|
"loss": 0.3582, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 3.8310661595558697e-05, |
|
"loss": 0.3554, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 3.8272474925620344e-05, |
|
"loss": 0.3645, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 3.823421157562188e-05, |
|
"loss": 0.3605, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 3.8195871545563295e-05, |
|
"loss": 0.3657, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 3.8157531515504706e-05, |
|
"loss": 0.362, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 3.811919148544612e-05, |
|
"loss": 0.3585, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 3.808085145538755e-05, |
|
"loss": 0.3606, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 3.8042664785449196e-05, |
|
"loss": 0.3539, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 3.8004324755390606e-05, |
|
"loss": 0.3543, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 3.7966061405392146e-05, |
|
"loss": 0.3599, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 3.792772137533356e-05, |
|
"loss": 0.3613, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 3.7889381345274975e-05, |
|
"loss": 0.3616, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 3.78510413152164e-05, |
|
"loss": 0.357, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 3.781270128515781e-05, |
|
"loss": 0.3444, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 3.777436125509923e-05, |
|
"loss": 0.35, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 3.773602122504064e-05, |
|
"loss": 0.3515, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 3.769768119498206e-05, |
|
"loss": 0.3571, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 3.7659341164923474e-05, |
|
"loss": 0.3484, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 3.762100113486489e-05, |
|
"loss": 0.3467, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 3.758266110480631e-05, |
|
"loss": 0.3489, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 3.754432107474773e-05, |
|
"loss": 0.3505, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 3.7505981044689144e-05, |
|
"loss": 0.351, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 3.7467641014630555e-05, |
|
"loss": 0.3486, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 3.7429377664632095e-05, |
|
"loss": 0.3394, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 3.7391037634573506e-05, |
|
"loss": 0.3445, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 3.735269760451492e-05, |
|
"loss": 0.3417, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 3.731435757445634e-05, |
|
"loss": 0.3492, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 3.727601754439776e-05, |
|
"loss": 0.3519, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 3.7237677514339176e-05, |
|
"loss": 0.3484, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 3.719933748428059e-05, |
|
"loss": 0.3446, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 3.7160997454222005e-05, |
|
"loss": 0.3499, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 3.712265742416342e-05, |
|
"loss": 0.3465, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 3.708431739410484e-05, |
|
"loss": 0.3431, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 3.704597736404625e-05, |
|
"loss": 0.3379, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 3.700771401404779e-05, |
|
"loss": 0.3402, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 3.69693739839892e-05, |
|
"loss": 0.3379, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 3.6931187314050856e-05, |
|
"loss": 0.3468, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 3.6892847283992274e-05, |
|
"loss": 0.3433, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 3.685450725393369e-05, |
|
"loss": 0.3305, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 3.68161672238751e-05, |
|
"loss": 0.3373, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 3.677782719381652e-05, |
|
"loss": 0.3426, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 3.673948716375794e-05, |
|
"loss": 0.3387, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 3.6701147133699355e-05, |
|
"loss": 0.3407, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_bleu": 58.3826, |
|
"eval_gen_len": 17.1623, |
|
"eval_loss": 0.7513108253479004, |
|
"eval_runtime": 7458.9074, |
|
"eval_samples_per_second": 11.665, |
|
"eval_steps_per_second": 1.458, |
|
"step": 174018 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 652560, |
|
"num_train_epochs": 15, |
|
"save_steps": 500, |
|
"total_flos": 6.033913407575425e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|