|
{ |
|
"best_metric": 1.1023308038711548, |
|
"best_model_checkpoint": "./koen_mbartLarge_100p_run1/checkpoint-106209", |
|
"epoch": 4.999990584602058, |
|
"eval_steps": 500, |
|
"global_step": 265522, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.2450000000000001e-05, |
|
"loss": 1.6424, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.495e-05, |
|
"loss": 1.4947, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.745e-05, |
|
"loss": 1.4612, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.995e-05, |
|
"loss": 1.4595, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.996872482883609e-05, |
|
"loss": 1.4661, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.99373238018526e-05, |
|
"loss": 1.4486, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9905859846959324e-05, |
|
"loss": 1.435, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.987439589206605e-05, |
|
"loss": 1.4067, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.984299486508256e-05, |
|
"loss": 1.3827, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.9811530910189294e-05, |
|
"loss": 1.3895, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.978019281111559e-05, |
|
"loss": 1.3682, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.9748728856222316e-05, |
|
"loss": 1.3483, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.9717327829238824e-05, |
|
"loss": 1.366, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.968592680225534e-05, |
|
"loss": 1.3594, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.965446284736206e-05, |
|
"loss": 1.3401, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.9622998892468794e-05, |
|
"loss": 1.3115, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.959153493757552e-05, |
|
"loss": 1.3339, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.956007098268224e-05, |
|
"loss": 1.315, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.952860702778897e-05, |
|
"loss": 1.3058, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.949714307289569e-05, |
|
"loss": 1.3147, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.9465679118002415e-05, |
|
"loss": 1.3125, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.943427809101893e-05, |
|
"loss": 1.3018, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.940281413612566e-05, |
|
"loss": 1.2917, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.9371350181232385e-05, |
|
"loss": 1.2864, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.933988622633911e-05, |
|
"loss": 1.27, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.9308422271445834e-05, |
|
"loss": 1.2779, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.927695831655256e-05, |
|
"loss": 1.2759, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.924549436165928e-05, |
|
"loss": 1.2773, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.92140933346758e-05, |
|
"loss": 1.2802, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.918262937978252e-05, |
|
"loss": 1.2577, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.915116542488925e-05, |
|
"loss": 1.2584, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.9119701469995976e-05, |
|
"loss": 1.2463, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.90882375151027e-05, |
|
"loss": 1.2528, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.905677356020943e-05, |
|
"loss": 1.2435, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.902530960531615e-05, |
|
"loss": 1.2563, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.899384565042287e-05, |
|
"loss": 1.2356, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.8962381695529604e-05, |
|
"loss": 1.2404, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.893091774063633e-05, |
|
"loss": 1.2308, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.889945378574305e-05, |
|
"loss": 1.2296, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.886798983084978e-05, |
|
"loss": 1.2405, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.883652587595651e-05, |
|
"loss": 1.2224, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.880506192106323e-05, |
|
"loss": 1.2294, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.877359796616996e-05, |
|
"loss": 1.2249, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.874213401127669e-05, |
|
"loss": 1.226, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.871067005638341e-05, |
|
"loss": 1.2193, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.8679206101490135e-05, |
|
"loss": 1.2119, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.864774214659686e-05, |
|
"loss": 1.2213, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.8616278191703584e-05, |
|
"loss": 1.2301, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.858481423681031e-05, |
|
"loss": 1.2053, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.855335028191704e-05, |
|
"loss": 1.2098, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.852188632702376e-05, |
|
"loss": 1.2058, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.849054822795006e-05, |
|
"loss": 1.2194, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.8459147200966576e-05, |
|
"loss": 1.2015, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.84276832460733e-05, |
|
"loss": 1.2081, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.839621929118003e-05, |
|
"loss": 1.2106, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.8364755336286756e-05, |
|
"loss": 1.192, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.833329138139348e-05, |
|
"loss": 1.1804, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.830189035440999e-05, |
|
"loss": 1.2038, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.827042639951672e-05, |
|
"loss": 1.2103, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.823896244462344e-05, |
|
"loss": 1.1896, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.820749848973017e-05, |
|
"loss": 1.1941, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.817603453483689e-05, |
|
"loss": 1.1899, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.8144633507853406e-05, |
|
"loss": 1.1861, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.811316955296013e-05, |
|
"loss": 1.178, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.8081705598066855e-05, |
|
"loss": 1.1933, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.8050241643173586e-05, |
|
"loss": 1.1895, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.801877768828031e-05, |
|
"loss": 1.1817, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.7987313733387034e-05, |
|
"loss": 1.1707, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.795584977849376e-05, |
|
"loss": 1.1941, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.792451167942006e-05, |
|
"loss": 1.1824, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.789304772452679e-05, |
|
"loss": 1.1967, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.786158376963351e-05, |
|
"loss": 1.1694, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.7830119814740236e-05, |
|
"loss": 1.1656, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.779865585984696e-05, |
|
"loss": 1.1828, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.776719190495369e-05, |
|
"loss": 1.1821, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.77357908779702e-05, |
|
"loss": 1.1563, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.7704326923076924e-05, |
|
"loss": 1.1762, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.7672862968183655e-05, |
|
"loss": 1.181, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.764139901329038e-05, |
|
"loss": 1.1679, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.76099350583971e-05, |
|
"loss": 1.1737, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.757847110350383e-05, |
|
"loss": 1.1664, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.754700714861056e-05, |
|
"loss": 1.1727, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.751554319371728e-05, |
|
"loss": 1.168, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.7484079238824e-05, |
|
"loss": 1.1799, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.7452678211840515e-05, |
|
"loss": 1.1642, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.7421214256947246e-05, |
|
"loss": 1.1621, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.738975030205397e-05, |
|
"loss": 1.1587, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.7358286347160694e-05, |
|
"loss": 1.1653, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.7326822392267425e-05, |
|
"loss": 1.1598, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.729535843737415e-05, |
|
"loss": 1.1482, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.726389448248087e-05, |
|
"loss": 1.1543, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.72324305275876e-05, |
|
"loss": 1.1549, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.7201092428513896e-05, |
|
"loss": 1.1641, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.7169754329440195e-05, |
|
"loss": 1.1493, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.7138290374546926e-05, |
|
"loss": 1.1548, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.710682641965365e-05, |
|
"loss": 1.1439, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.7075362464760374e-05, |
|
"loss": 1.1516, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.70438985098671e-05, |
|
"loss": 1.1414, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.701243455497382e-05, |
|
"loss": 1.1479, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.6980970600080547e-05, |
|
"loss": 1.1437, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.694950664518728e-05, |
|
"loss": 1.1515, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.6918042690294e-05, |
|
"loss": 1.1486, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.6886578735400726e-05, |
|
"loss": 1.1471, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.685511478050745e-05, |
|
"loss": 1.1309, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.682365082561418e-05, |
|
"loss": 1.1525, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6792186870720905e-05, |
|
"loss": 1.1368, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_bleu": 34.5315, |
|
"eval_gen_len": 18.6993, |
|
"eval_loss": 1.1171826124191284, |
|
"eval_runtime": 9462.0924, |
|
"eval_samples_per_second": 11.225, |
|
"eval_steps_per_second": 1.403, |
|
"step": 53104 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.676072291582763e-05, |
|
"loss": 1.1271, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.672925896093436e-05, |
|
"loss": 1.115, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 4.6697795006041085e-05, |
|
"loss": 1.0878, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 4.666639397905759e-05, |
|
"loss": 1.0617, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 4.663493002416432e-05, |
|
"loss": 1.0543, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 4.660346606927105e-05, |
|
"loss": 1.0667, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 4.6572127970197346e-05, |
|
"loss": 1.0536, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 4.654066401530407e-05, |
|
"loss": 1.0447, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.6509200060410795e-05, |
|
"loss": 1.0365, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 4.647773610551752e-05, |
|
"loss": 1.0424, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 4.644627215062425e-05, |
|
"loss": 1.0404, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 4.6414808195730974e-05, |
|
"loss": 1.0316, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 4.63833442408377e-05, |
|
"loss": 1.0389, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 4.635188028594443e-05, |
|
"loss": 1.0367, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 4.632041633105115e-05, |
|
"loss": 1.0371, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 4.628895237615787e-05, |
|
"loss": 1.0132, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 4.62574884212646e-05, |
|
"loss": 1.0243, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 4.6226024466371326e-05, |
|
"loss": 1.0187, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 4.619456051147805e-05, |
|
"loss": 1.0167, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 4.616322241240435e-05, |
|
"loss": 1.0232, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 4.613175845751107e-05, |
|
"loss": 1.0221, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 4.6100294502617804e-05, |
|
"loss": 1.0225, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 4.606883054772453e-05, |
|
"loss": 1.0181, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 4.603736659283125e-05, |
|
"loss": 1.0052, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 4.6005902637937984e-05, |
|
"loss": 0.9995, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 4.597443868304471e-05, |
|
"loss": 1.0068, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 4.594297472815143e-05, |
|
"loss": 0.9989, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 4.591157370116794e-05, |
|
"loss": 1.0153, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 4.588010974627467e-05, |
|
"loss": 1.0086, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 4.5848645791381395e-05, |
|
"loss": 0.9958, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 4.581718183648812e-05, |
|
"loss": 0.9985, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 4.5785780809504634e-05, |
|
"loss": 0.9873, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 4.5754316854611365e-05, |
|
"loss": 1.0013, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.572291582762787e-05, |
|
"loss": 0.9864, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 4.56914518727346e-05, |
|
"loss": 0.9913, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 4.565998791784132e-05, |
|
"loss": 0.9901, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.562852396294805e-05, |
|
"loss": 0.9856, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.5597060008054777e-05, |
|
"loss": 0.9852, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.55655960531615e-05, |
|
"loss": 0.9748, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.553419502617801e-05, |
|
"loss": 0.9891, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.550273107128474e-05, |
|
"loss": 0.974, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.5471267116391464e-05, |
|
"loss": 0.9812, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.543980316149819e-05, |
|
"loss": 0.9843, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.540833920660492e-05, |
|
"loss": 0.9803, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 4.537693817962143e-05, |
|
"loss": 0.9734, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 4.534547422472815e-05, |
|
"loss": 0.9715, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 4.5314010269834876e-05, |
|
"loss": 0.9858, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 4.5282546314941607e-05, |
|
"loss": 0.9883, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.525108236004833e-05, |
|
"loss": 0.9712, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 4.5219618405155055e-05, |
|
"loss": 0.9698, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 4.5188154450261786e-05, |
|
"loss": 0.9721, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 4.515669049536851e-05, |
|
"loss": 0.9805, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 4.5125226540475234e-05, |
|
"loss": 0.9687, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 4.509376258558196e-05, |
|
"loss": 0.9745, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.506236155859847e-05, |
|
"loss": 0.9775, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.50308976037052e-05, |
|
"loss": 0.9652, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.499943364881192e-05, |
|
"loss": 0.9584, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.4967969693918646e-05, |
|
"loss": 0.975, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.493650573902538e-05, |
|
"loss": 0.9781, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.49050417841321e-05, |
|
"loss": 0.961, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.4873577829238825e-05, |
|
"loss": 0.973, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.4842113874345556e-05, |
|
"loss": 0.9605, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.481064991945228e-05, |
|
"loss": 0.9701, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.477937474828836e-05, |
|
"loss": 0.954, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.474791079339509e-05, |
|
"loss": 0.9631, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.471644683850181e-05, |
|
"loss": 0.9634, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.468498288360854e-05, |
|
"loss": 0.962, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.4653518928715266e-05, |
|
"loss": 0.9482, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 4.462205497382199e-05, |
|
"loss": 0.9686, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 4.4590591018928715e-05, |
|
"loss": 0.963, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 4.4559127064035446e-05, |
|
"loss": 0.9745, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 4.452766310914217e-05, |
|
"loss": 0.949, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 4.4496199154248894e-05, |
|
"loss": 0.9472, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 4.4464735199355625e-05, |
|
"loss": 0.9633, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 4.443327124446234e-05, |
|
"loss": 0.9612, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 4.440187021747886e-05, |
|
"loss": 0.9532, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 4.437040626258558e-05, |
|
"loss": 0.9539, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 4.433894230769231e-05, |
|
"loss": 0.9627, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 4.430747835279904e-05, |
|
"loss": 0.9581, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 4.4276077325815545e-05, |
|
"loss": 0.9596, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 4.424461337092227e-05, |
|
"loss": 0.9486, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 4.4213149416029e-05, |
|
"loss": 0.9567, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 4.4181685461135724e-05, |
|
"loss": 0.9601, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 4.415022150624245e-05, |
|
"loss": 0.9625, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 4.411875755134918e-05, |
|
"loss": 0.9508, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 4.40872935964559e-05, |
|
"loss": 0.9572, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 4.405582964156263e-05, |
|
"loss": 0.9437, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 4.4024428614579136e-05, |
|
"loss": 0.9482, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 4.3992964659685867e-05, |
|
"loss": 0.9553, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 4.396150070479259e-05, |
|
"loss": 0.9474, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 4.3930036749899315e-05, |
|
"loss": 0.9411, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 4.3898572795006046e-05, |
|
"loss": 0.9446, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 4.386710884011277e-05, |
|
"loss": 0.957, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 4.383570781312928e-05, |
|
"loss": 0.9447, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 4.380430678614579e-05, |
|
"loss": 0.9526, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 4.377284283125252e-05, |
|
"loss": 0.9381, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 4.374137887635925e-05, |
|
"loss": 0.9496, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 4.370991492146597e-05, |
|
"loss": 0.9386, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 4.3678450966572696e-05, |
|
"loss": 0.9452, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 4.364698701167943e-05, |
|
"loss": 0.939, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 4.361552305678615e-05, |
|
"loss": 0.9495, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 4.358412202980266e-05, |
|
"loss": 0.9402, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 4.3552658074909384e-05, |
|
"loss": 0.9429, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 4.3521194120016115e-05, |
|
"loss": 0.9389, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 4.348973016512284e-05, |
|
"loss": 0.939, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.345832913813935e-05, |
|
"loss": 0.9436, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_bleu": 35.4816, |
|
"eval_gen_len": 18.3763, |
|
"eval_loss": 1.1023308038711548, |
|
"eval_runtime": 9188.0575, |
|
"eval_samples_per_second": 11.559, |
|
"eval_steps_per_second": 1.445, |
|
"step": 106209 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.342686518324607e-05, |
|
"loss": 0.9347, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.3395464156262586e-05, |
|
"loss": 0.9225, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.336400020136932e-05, |
|
"loss": 0.9049, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 4.333253624647604e-05, |
|
"loss": 0.883, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 4.3301072291582765e-05, |
|
"loss": 0.8784, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.3269608336689496e-05, |
|
"loss": 0.8855, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.3238144381796214e-05, |
|
"loss": 0.879, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.320668042690294e-05, |
|
"loss": 0.8775, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.317521647200967e-05, |
|
"loss": 0.8697, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.3143815445026184e-05, |
|
"loss": 0.8663, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.311235149013291e-05, |
|
"loss": 0.8692, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.308088753523963e-05, |
|
"loss": 0.8684, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.304942358034636e-05, |
|
"loss": 0.8577, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.301795962545308e-05, |
|
"loss": 0.8654, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.2986495670559805e-05, |
|
"loss": 0.8681, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 4.2955031715666536e-05, |
|
"loss": 0.8494, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 4.292356776077326e-05, |
|
"loss": 0.8618, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 4.2892166733789775e-05, |
|
"loss": 0.8461, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 4.28607027788965e-05, |
|
"loss": 0.8519, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 4.282923882400322e-05, |
|
"loss": 0.8515, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 4.2797774869109954e-05, |
|
"loss": 0.8537, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 4.276631091421667e-05, |
|
"loss": 0.8535, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 4.27348469593234e-05, |
|
"loss": 0.8549, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 4.2703383004430127e-05, |
|
"loss": 0.8397, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 4.267191904953685e-05, |
|
"loss": 0.8408, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 4.2640455094643575e-05, |
|
"loss": 0.8416, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 4.2608991139750306e-05, |
|
"loss": 0.8372, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 4.257752718485703e-05, |
|
"loss": 0.8503, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 4.2546063229963754e-05, |
|
"loss": 0.8422, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 4.2514599275070485e-05, |
|
"loss": 0.8395, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 4.248313532017721e-05, |
|
"loss": 0.8366, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 4.2451671365283934e-05, |
|
"loss": 0.8234, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 4.242027033830044e-05, |
|
"loss": 0.8381, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 4.238899516713653e-05, |
|
"loss": 0.832, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 4.2357531212243255e-05, |
|
"loss": 0.8324, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 4.2326067257349986e-05, |
|
"loss": 0.8295, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 4.229460330245671e-05, |
|
"loss": 0.8266, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 4.2263139347563434e-05, |
|
"loss": 0.8217, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 4.223167539267016e-05, |
|
"loss": 0.8207, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 4.220021143777689e-05, |
|
"loss": 0.8248, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 4.216874748288361e-05, |
|
"loss": 0.8196, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 4.213728352799033e-05, |
|
"loss": 0.8279, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 4.210581957309706e-05, |
|
"loss": 0.8261, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 4.2074355618203786e-05, |
|
"loss": 0.8226, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 4.20429545912203e-05, |
|
"loss": 0.8195, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 4.2011490636327025e-05, |
|
"loss": 0.8164, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 4.1980026681433756e-05, |
|
"loss": 0.8229, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 4.1948562726540474e-05, |
|
"loss": 0.8272, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 4.19170987716472e-05, |
|
"loss": 0.8194, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 4.188563481675393e-05, |
|
"loss": 0.8158, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 4.1854233789770444e-05, |
|
"loss": 0.8146, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 4.182283276278695e-05, |
|
"loss": 0.8209, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 4.1791368807893676e-05, |
|
"loss": 0.8156, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 4.17599048530004e-05, |
|
"loss": 0.8204, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 4.172844089810713e-05, |
|
"loss": 0.8151, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 4.1696976943213855e-05, |
|
"loss": 0.8208, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 4.166551298832058e-05, |
|
"loss": 0.7978, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 4.163404903342731e-05, |
|
"loss": 0.8158, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 4.160264800644382e-05, |
|
"loss": 0.8234, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 4.157118405155054e-05, |
|
"loss": 0.8095, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 4.153972009665727e-05, |
|
"loss": 0.8179, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 4.1508256141764e-05, |
|
"loss": 0.8078, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 4.147679218687072e-05, |
|
"loss": 0.8161, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 4.1445328231977446e-05, |
|
"loss": 0.8062, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 4.141386427708418e-05, |
|
"loss": 0.8095, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 4.13824003221909e-05, |
|
"loss": 0.8037, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 4.1350936367297626e-05, |
|
"loss": 0.8124, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 4.131947241240435e-05, |
|
"loss": 0.8033, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 4.128800845751108e-05, |
|
"loss": 0.807, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 4.1256544502617805e-05, |
|
"loss": 0.8164, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 4.122508054772453e-05, |
|
"loss": 0.8179, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 4.1193679520741044e-05, |
|
"loss": 0.7989, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 4.116221556584777e-05, |
|
"loss": 0.7976, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 4.113075161095449e-05, |
|
"loss": 0.8069, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.1099287656061217e-05, |
|
"loss": 0.8064, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.106782370116795e-05, |
|
"loss": 0.8018, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.103635974627467e-05, |
|
"loss": 0.8032, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.100495871929118e-05, |
|
"loss": 0.8078, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.0973494764397904e-05, |
|
"loss": 0.8083, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.0942030809504635e-05, |
|
"loss": 0.804, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.091056685461136e-05, |
|
"loss": 0.7964, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 4.087910289971808e-05, |
|
"loss": 0.8071, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 4.08477018727346e-05, |
|
"loss": 0.8087, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 4.081623791784132e-05, |
|
"loss": 0.8049, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 4.0784773962948046e-05, |
|
"loss": 0.8084, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 4.075337293596456e-05, |
|
"loss": 0.7979, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 4.0721908981071285e-05, |
|
"loss": 0.8, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 4.0690445026178016e-05, |
|
"loss": 0.8017, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 4.065898107128474e-05, |
|
"loss": 0.8058, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 4.062751711639146e-05, |
|
"loss": 0.8015, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 4.059605316149819e-05, |
|
"loss": 0.7903, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 4.056458920660491e-05, |
|
"loss": 0.7945, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 4.053312525171164e-05, |
|
"loss": 0.8067, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 4.050166129681837e-05, |
|
"loss": 0.7989, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 4.047019734192509e-05, |
|
"loss": 0.7995, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 4.043879631494161e-05, |
|
"loss": 0.7934, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 4.040733236004833e-05, |
|
"loss": 0.7991, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 4.0375868405155056e-05, |
|
"loss": 0.7939, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 4.034440445026178e-05, |
|
"loss": 0.796, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 4.0312940495368504e-05, |
|
"loss": 0.7902, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 4.0281476540475235e-05, |
|
"loss": 0.7966, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 4.025001258558196e-05, |
|
"loss": 0.7988, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.0218548630688684e-05, |
|
"loss": 0.7923, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 4.0187084675795415e-05, |
|
"loss": 0.7897, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 4.015568364881192e-05, |
|
"loss": 0.7891, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 4.012421969391865e-05, |
|
"loss": 0.7948, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_bleu": 35.3376, |
|
"eval_gen_len": 18.2557, |
|
"eval_loss": 1.1393530368804932, |
|
"eval_runtime": 9122.6866, |
|
"eval_samples_per_second": 11.642, |
|
"eval_steps_per_second": 1.455, |
|
"step": 159313 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 4.009275573902537e-05, |
|
"loss": 0.7887, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 4.00612917841321e-05, |
|
"loss": 0.7831, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 4.0029827829238826e-05, |
|
"loss": 0.7654, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 3.999836387434555e-05, |
|
"loss": 0.7478, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 3.9966899919452274e-05, |
|
"loss": 0.7417, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 3.993549889246879e-05, |
|
"loss": 0.7477, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 3.9904034937575513e-05, |
|
"loss": 0.7466, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 3.987257098268224e-05, |
|
"loss": 0.7441, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 3.984116995569875e-05, |
|
"loss": 0.7286, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 3.980970600080548e-05, |
|
"loss": 0.732, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 3.977824204591221e-05, |
|
"loss": 0.7357, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 3.974677809101893e-05, |
|
"loss": 0.7325, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 3.9715314136125656e-05, |
|
"loss": 0.7211, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 3.968385018123239e-05, |
|
"loss": 0.7336, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 3.9652386226339104e-05, |
|
"loss": 0.7367, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 3.962092227144583e-05, |
|
"loss": 0.7199, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 3.958945831655256e-05, |
|
"loss": 0.7225, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 3.9557994361659284e-05, |
|
"loss": 0.7187, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 3.952653040676601e-05, |
|
"loss": 0.7198, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 3.949506645187274e-05, |
|
"loss": 0.7206, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 3.946360249697946e-05, |
|
"loss": 0.7184, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 3.943213854208619e-05, |
|
"loss": 0.7316, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 3.940067458719292e-05, |
|
"loss": 0.7146, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 3.936921063229964e-05, |
|
"loss": 0.7135, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 3.933780960531615e-05, |
|
"loss": 0.7176, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 3.9306345650422875e-05, |
|
"loss": 0.7082, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 3.9274881695529606e-05, |
|
"loss": 0.7072, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 3.924341774063633e-05, |
|
"loss": 0.7195, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 3.9212016713652845e-05, |
|
"loss": 0.7051, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 3.918055275875956e-05, |
|
"loss": 0.7131, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 3.914921465968587e-05, |
|
"loss": 0.7033, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 3.911775070479259e-05, |
|
"loss": 0.7024, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 3.9086286749899316e-05, |
|
"loss": 0.6992, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 3.905488572291583e-05, |
|
"loss": 0.706, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 3.9023484695932345e-05, |
|
"loss": 0.7011, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 3.899202074103906e-05, |
|
"loss": 0.7064, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 3.8960556786145794e-05, |
|
"loss": 0.6983, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 3.892909283125252e-05, |
|
"loss": 0.6984, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 3.889762887635924e-05, |
|
"loss": 0.6938, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 3.8866164921465966e-05, |
|
"loss": 0.6876, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 3.88347009665727e-05, |
|
"loss": 0.6951, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 3.880323701167942e-05, |
|
"loss": 0.6957, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 3.8771773056786146e-05, |
|
"loss": 0.7024, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 3.874037202980266e-05, |
|
"loss": 0.6935, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 3.8708908074909385e-05, |
|
"loss": 0.6897, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 3.867744412001611e-05, |
|
"loss": 0.6943, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 3.864598016512283e-05, |
|
"loss": 0.6927, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 3.8614516210229564e-05, |
|
"loss": 0.6984, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 3.858305225533629e-05, |
|
"loss": 0.6906, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 3.855158830044301e-05, |
|
"loss": 0.6879, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 3.8520124345549743e-05, |
|
"loss": 0.6865, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 3.848866039065647e-05, |
|
"loss": 0.6874, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 3.845719643576319e-05, |
|
"loss": 0.6959, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 3.8425732480869916e-05, |
|
"loss": 0.6896, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 3.839426852597665e-05, |
|
"loss": 0.686, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 3.8362867498993155e-05, |
|
"loss": 0.6966, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 3.833140354409988e-05, |
|
"loss": 0.6731, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 3.830006544502618e-05, |
|
"loss": 0.6867, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 3.82686014901329e-05, |
|
"loss": 0.6923, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 3.823713753523963e-05, |
|
"loss": 0.6873, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 3.820573650825615e-05, |
|
"loss": 0.6929, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 3.817427255336287e-05, |
|
"loss": 0.6836, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 3.814280859846959e-05, |
|
"loss": 0.6888, |
|
"step": 190500 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 3.8111407571486104e-05, |
|
"loss": 0.6795, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 3.8079943616592835e-05, |
|
"loss": 0.6819, |
|
"step": 191500 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 3.804847966169956e-05, |
|
"loss": 0.6812, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 3.8017015706806283e-05, |
|
"loss": 0.6904, |
|
"step": 192500 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 3.7985551751913014e-05, |
|
"loss": 0.6777, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 3.795408779701974e-05, |
|
"loss": 0.684, |
|
"step": 193500 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 3.7922623842126456e-05, |
|
"loss": 0.6839, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 3.789115988723319e-05, |
|
"loss": 0.6879, |
|
"step": 194500 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 3.785969593233991e-05, |
|
"loss": 0.6815, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 3.7828231977446635e-05, |
|
"loss": 0.6752, |
|
"step": 195500 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 3.7796768022553366e-05, |
|
"loss": 0.6782, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 3.776530406766009e-05, |
|
"loss": 0.685, |
|
"step": 196500 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 3.7733840112766815e-05, |
|
"loss": 0.6776, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 3.770237615787354e-05, |
|
"loss": 0.6782, |
|
"step": 197500 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 3.767091220298027e-05, |
|
"loss": 0.6817, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 3.7639448248086994e-05, |
|
"loss": 0.6821, |
|
"step": 198500 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 3.760798429319372e-05, |
|
"loss": 0.6731, |
|
"step": 199000 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 3.757658326621023e-05, |
|
"loss": 0.6776, |
|
"step": 199500 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 3.754511931131696e-05, |
|
"loss": 0.6844, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 3.751365535642368e-05, |
|
"loss": 0.6862, |
|
"step": 200500 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 3.7482254329440196e-05, |
|
"loss": 0.6742, |
|
"step": 201000 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 3.7450853302456704e-05, |
|
"loss": 0.6831, |
|
"step": 201500 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 3.7419389347563435e-05, |
|
"loss": 0.6744, |
|
"step": 202000 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 3.738792539267016e-05, |
|
"loss": 0.6782, |
|
"step": 202500 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 3.7356461437776884e-05, |
|
"loss": 0.6728, |
|
"step": 203000 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 3.7324997482883615e-05, |
|
"loss": 0.6818, |
|
"step": 203500 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 3.729353352799034e-05, |
|
"loss": 0.6771, |
|
"step": 204000 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 3.726206957309706e-05, |
|
"loss": 0.6666, |
|
"step": 204500 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 3.723060561820379e-05, |
|
"loss": 0.6707, |
|
"step": 205000 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 3.719914166331051e-05, |
|
"loss": 0.6807, |
|
"step": 205500 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 3.7167677708417236e-05, |
|
"loss": 0.6727, |
|
"step": 206000 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 3.713627668143375e-05, |
|
"loss": 0.6778, |
|
"step": 206500 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 3.7104812726540475e-05, |
|
"loss": 0.6691, |
|
"step": 207000 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 3.7073348771647206e-05, |
|
"loss": 0.6744, |
|
"step": 207500 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 3.704188481675393e-05, |
|
"loss": 0.6689, |
|
"step": 208000 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 3.7010420861860654e-05, |
|
"loss": 0.673, |
|
"step": 208500 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 3.6978956906967385e-05, |
|
"loss": 0.671, |
|
"step": 209000 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 3.69474929520741e-05, |
|
"loss": 0.669, |
|
"step": 209500 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 3.6916028997180827e-05, |
|
"loss": 0.6684, |
|
"step": 210000 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 3.688456504228756e-05, |
|
"loss": 0.6725, |
|
"step": 210500 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 3.685310108739428e-05, |
|
"loss": 0.6732, |
|
"step": 211000 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 3.6821637132501006e-05, |
|
"loss": 0.6603, |
|
"step": 211500 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 3.679017317760774e-05, |
|
"loss": 0.675, |
|
"step": 212000 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_bleu": 35.1692, |
|
"eval_gen_len": 18.2835, |
|
"eval_loss": 1.199652910232544, |
|
"eval_runtime": 9165.5031, |
|
"eval_samples_per_second": 11.588, |
|
"eval_steps_per_second": 1.448, |
|
"step": 212418 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 3.675870922271446e-05, |
|
"loss": 0.6715, |
|
"step": 212500 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 3.6727245267821185e-05, |
|
"loss": 0.661, |
|
"step": 213000 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 3.669578131292791e-05, |
|
"loss": 0.6471, |
|
"step": 213500 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 3.6664380285944424e-05, |
|
"loss": 0.6365, |
|
"step": 214000 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 3.663297925896094e-05, |
|
"loss": 0.6245, |
|
"step": 214500 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 3.660157823197745e-05, |
|
"loss": 0.6307, |
|
"step": 215000 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 3.657011427708417e-05, |
|
"loss": 0.6246, |
|
"step": 215500 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 3.6538650322190895e-05, |
|
"loss": 0.6323, |
|
"step": 216000 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 3.6507186367297626e-05, |
|
"loss": 0.6172, |
|
"step": 216500 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 3.647572241240435e-05, |
|
"loss": 0.619, |
|
"step": 217000 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 3.6444321385420865e-05, |
|
"loss": 0.6174, |
|
"step": 217500 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 3.641285743052759e-05, |
|
"loss": 0.6194, |
|
"step": 218000 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 3.638139347563432e-05, |
|
"loss": 0.6076, |
|
"step": 218500 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 3.634992952074104e-05, |
|
"loss": 0.6165, |
|
"step": 219000 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 3.631846556584776e-05, |
|
"loss": 0.6202, |
|
"step": 219500 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 3.628700161095449e-05, |
|
"loss": 0.6139, |
|
"step": 220000 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 3.625553765606122e-05, |
|
"loss": 0.6036, |
|
"step": 220500 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 3.622407370116794e-05, |
|
"loss": 0.6133, |
|
"step": 221000 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 3.619260974627467e-05, |
|
"loss": 0.6036, |
|
"step": 221500 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 3.61611457913814e-05, |
|
"loss": 0.6056, |
|
"step": 222000 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 3.612968183648812e-05, |
|
"loss": 0.6119, |
|
"step": 222500 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 3.609828080950463e-05, |
|
"loss": 0.6152, |
|
"step": 223000 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 3.606681685461136e-05, |
|
"loss": 0.6038, |
|
"step": 223500 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 3.6035352899718084e-05, |
|
"loss": 0.6044, |
|
"step": 224000 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 3.600388894482481e-05, |
|
"loss": 0.6032, |
|
"step": 224500 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 3.597242498993153e-05, |
|
"loss": 0.5894, |
|
"step": 225000 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 3.5940961035038264e-05, |
|
"loss": 0.6051, |
|
"step": 225500 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 3.590956000805478e-05, |
|
"loss": 0.6029, |
|
"step": 226000 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 3.5878096053161496e-05, |
|
"loss": 0.6032, |
|
"step": 226500 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 3.584663209826823e-05, |
|
"loss": 0.601, |
|
"step": 227000 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 3.581523107128474e-05, |
|
"loss": 0.599, |
|
"step": 227500 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 3.578383004430125e-05, |
|
"loss": 0.5896, |
|
"step": 228000 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 3.5752366089407974e-05, |
|
"loss": 0.5862, |
|
"step": 228500 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 3.57209021345147e-05, |
|
"loss": 0.5981, |
|
"step": 229000 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 3.568943817962143e-05, |
|
"loss": 0.5892, |
|
"step": 229500 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 3.565797422472815e-05, |
|
"loss": 0.5933, |
|
"step": 230000 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 3.562651026983488e-05, |
|
"loss": 0.585, |
|
"step": 230500 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 3.55950463149416e-05, |
|
"loss": 0.5908, |
|
"step": 231000 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 3.556358236004833e-05, |
|
"loss": 0.5863, |
|
"step": 231500 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 3.553218133306484e-05, |
|
"loss": 0.5767, |
|
"step": 232000 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 3.5500717378171565e-05, |
|
"loss": 0.5865, |
|
"step": 232500 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 3.5469253423278296e-05, |
|
"loss": 0.5813, |
|
"step": 233000 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 3.543778946838502e-05, |
|
"loss": 0.5907, |
|
"step": 233500 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 3.5406325513491744e-05, |
|
"loss": 0.5852, |
|
"step": 234000 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 3.537486155859847e-05, |
|
"loss": 0.5885, |
|
"step": 234500 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 3.53433976037052e-05, |
|
"loss": 0.587, |
|
"step": 235000 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 3.531193364881192e-05, |
|
"loss": 0.5842, |
|
"step": 235500 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 3.528046969391865e-05, |
|
"loss": 0.582, |
|
"step": 236000 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 3.524900573902538e-05, |
|
"loss": 0.584, |
|
"step": 236500 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 3.52175417841321e-05, |
|
"loss": 0.5789, |
|
"step": 237000 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 3.518607782923883e-05, |
|
"loss": 0.5773, |
|
"step": 237500 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 3.5154676802255335e-05, |
|
"loss": 0.5815, |
|
"step": 238000 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 3.5123212847362066e-05, |
|
"loss": 0.5903, |
|
"step": 238500 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 3.509174889246879e-05, |
|
"loss": 0.5781, |
|
"step": 239000 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 3.5060284937575514e-05, |
|
"loss": 0.5819, |
|
"step": 239500 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 3.5028820982682245e-05, |
|
"loss": 0.5849, |
|
"step": 240000 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 3.499735702778897e-05, |
|
"loss": 0.5748, |
|
"step": 240500 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 3.496595600080548e-05, |
|
"loss": 0.5701, |
|
"step": 241000 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 3.49344920459122e-05, |
|
"loss": 0.5803, |
|
"step": 241500 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 3.490302809101893e-05, |
|
"loss": 0.5831, |
|
"step": 242000 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 3.487156413612566e-05, |
|
"loss": 0.5821, |
|
"step": 242500 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 3.484010018123238e-05, |
|
"loss": 0.5774, |
|
"step": 243000 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 3.4808636226339105e-05, |
|
"loss": 0.5732, |
|
"step": 243500 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 3.477723519935562e-05, |
|
"loss": 0.5751, |
|
"step": 244000 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 3.4745771244462344e-05, |
|
"loss": 0.5757, |
|
"step": 244500 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 3.471437021747886e-05, |
|
"loss": 0.5751, |
|
"step": 245000 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 3.468290626258558e-05, |
|
"loss": 0.5764, |
|
"step": 245500 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 3.4651442307692314e-05, |
|
"loss": 0.5809, |
|
"step": 246000 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 3.461997835279904e-05, |
|
"loss": 0.5644, |
|
"step": 246500 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 3.458851439790576e-05, |
|
"loss": 0.5839, |
|
"step": 247000 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 3.455705044301249e-05, |
|
"loss": 0.575, |
|
"step": 247500 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 3.4525649416029e-05, |
|
"loss": 0.5824, |
|
"step": 248000 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 3.4494248389045516e-05, |
|
"loss": 0.5651, |
|
"step": 248500 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 3.4462784434152234e-05, |
|
"loss": 0.5716, |
|
"step": 249000 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 3.443132047925896e-05, |
|
"loss": 0.5771, |
|
"step": 249500 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 3.439985652436569e-05, |
|
"loss": 0.5733, |
|
"step": 250000 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 3.436839256947241e-05, |
|
"loss": 0.5602, |
|
"step": 250500 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 3.433692861457914e-05, |
|
"loss": 0.5777, |
|
"step": 251000 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 3.430546465968587e-05, |
|
"loss": 0.5759, |
|
"step": 251500 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 3.427400070479259e-05, |
|
"loss": 0.565, |
|
"step": 252000 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 3.424253674989932e-05, |
|
"loss": 0.5695, |
|
"step": 252500 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 3.421107279500604e-05, |
|
"loss": 0.571, |
|
"step": 253000 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 3.417960884011277e-05, |
|
"loss": 0.5763, |
|
"step": 253500 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 3.4148144885219496e-05, |
|
"loss": 0.5682, |
|
"step": 254000 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 3.4116743858236004e-05, |
|
"loss": 0.581, |
|
"step": 254500 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 3.4085279903342735e-05, |
|
"loss": 0.5732, |
|
"step": 255000 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 3.405381594844946e-05, |
|
"loss": 0.5692, |
|
"step": 255500 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 3.4022351993556183e-05, |
|
"loss": 0.5647, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 3.399095096657269e-05, |
|
"loss": 0.5718, |
|
"step": 256500 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 3.395948701167942e-05, |
|
"loss": 0.5627, |
|
"step": 257000 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 3.3928023056786147e-05, |
|
"loss": 0.5656, |
|
"step": 257500 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 3.389655910189287e-05, |
|
"loss": 0.5648, |
|
"step": 258000 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 3.3865095146999595e-05, |
|
"loss": 0.5692, |
|
"step": 258500 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 3.3833631192106326e-05, |
|
"loss": 0.5646, |
|
"step": 259000 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 3.380216723721305e-05, |
|
"loss": 0.5698, |
|
"step": 259500 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 3.3770703282319774e-05, |
|
"loss": 0.565, |
|
"step": 260000 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 3.3739239327426505e-05, |
|
"loss": 0.564, |
|
"step": 260500 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 3.370777537253323e-05, |
|
"loss": 0.5647, |
|
"step": 261000 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 3.3676311417639954e-05, |
|
"loss": 0.5629, |
|
"step": 261500 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 3.3644847462746685e-05, |
|
"loss": 0.5606, |
|
"step": 262000 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 3.36133835078534e-05, |
|
"loss": 0.5653, |
|
"step": 262500 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 3.358198248086992e-05, |
|
"loss": 0.5625, |
|
"step": 263000 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 3.355051852597664e-05, |
|
"loss": 0.5653, |
|
"step": 263500 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 3.351905457108337e-05, |
|
"loss": 0.5647, |
|
"step": 264000 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 3.3487590616190096e-05, |
|
"loss": 0.5579, |
|
"step": 264500 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 3.3456252517116395e-05, |
|
"loss": 0.5671, |
|
"step": 265000 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 3.342478856222312e-05, |
|
"loss": 0.5667, |
|
"step": 265500 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_bleu": 34.8464, |
|
"eval_gen_len": 18.1982, |
|
"eval_loss": 1.27733314037323, |
|
"eval_runtime": 9113.7815, |
|
"eval_samples_per_second": 11.654, |
|
"eval_steps_per_second": 1.457, |
|
"step": 265522 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"step": 265522, |
|
"total_flos": 9.206747421982327e+18, |
|
"train_loss": 0.865626546989459, |
|
"train_runtime": 252672.3096, |
|
"train_samples_per_second": 50.441, |
|
"train_steps_per_second": 3.153 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 796560, |
|
"num_train_epochs": 15, |
|
"save_steps": 500, |
|
"total_flos": 9.206747421982327e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|