|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.999279869067103, |
|
"eval_steps": 500, |
|
"global_step": 2862, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 2471.449204346868, |
|
"learning_rate": 6.968641114982578e-08, |
|
"loss": 13.4628, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2303.520775864023, |
|
"learning_rate": 3.4843205574912896e-07, |
|
"loss": 13.3868, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 795.8114853183038, |
|
"learning_rate": 6.968641114982579e-07, |
|
"loss": 10.7388, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 338.86959104040244, |
|
"learning_rate": 1.045296167247387e-06, |
|
"loss": 7.0822, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 113.06322673541652, |
|
"learning_rate": 1.3937282229965158e-06, |
|
"loss": 4.9472, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 70.94256939141412, |
|
"learning_rate": 1.742160278745645e-06, |
|
"loss": 4.0669, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 44.591095903673946, |
|
"learning_rate": 2.090592334494774e-06, |
|
"loss": 3.5725, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 41.48184329039586, |
|
"learning_rate": 2.4390243902439027e-06, |
|
"loss": 3.4129, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 20.645377248374615, |
|
"learning_rate": 2.7874564459930316e-06, |
|
"loss": 3.2561, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 16.187359900133895, |
|
"learning_rate": 3.13588850174216e-06, |
|
"loss": 3.2526, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 14.846750396994791, |
|
"learning_rate": 3.48432055749129e-06, |
|
"loss": 3.0858, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 12.958327470811641, |
|
"learning_rate": 3.832752613240418e-06, |
|
"loss": 3.0012, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 13.693662433328397, |
|
"learning_rate": 4.181184668989548e-06, |
|
"loss": 3.0081, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 15.276810654725063, |
|
"learning_rate": 4.529616724738676e-06, |
|
"loss": 2.9222, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 17.33816894805981, |
|
"learning_rate": 4.8780487804878055e-06, |
|
"loss": 2.8507, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 24.024677004801813, |
|
"learning_rate": 5.226480836236935e-06, |
|
"loss": 2.6624, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 43.87667199746306, |
|
"learning_rate": 5.574912891986063e-06, |
|
"loss": 2.4261, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 32.4595660306916, |
|
"learning_rate": 5.923344947735193e-06, |
|
"loss": 1.9462, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 11.533913461478601, |
|
"learning_rate": 6.27177700348432e-06, |
|
"loss": 1.4633, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 13.087511184290118, |
|
"learning_rate": 6.62020905923345e-06, |
|
"loss": 1.419, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 6.6859154191042, |
|
"learning_rate": 6.96864111498258e-06, |
|
"loss": 1.3821, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.5736658937623895, |
|
"learning_rate": 7.317073170731707e-06, |
|
"loss": 1.3866, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 5.686530124019074, |
|
"learning_rate": 7.665505226480837e-06, |
|
"loss": 1.3449, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 4.897980447266944, |
|
"learning_rate": 8.013937282229966e-06, |
|
"loss": 1.337, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 4.953462587844507, |
|
"learning_rate": 8.362369337979095e-06, |
|
"loss": 1.3088, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 4.225894037236463, |
|
"learning_rate": 8.710801393728223e-06, |
|
"loss": 1.3235, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 4.213174089881014, |
|
"learning_rate": 9.059233449477352e-06, |
|
"loss": 1.2898, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 7.404200488742776, |
|
"learning_rate": 9.407665505226482e-06, |
|
"loss": 1.3163, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 7.863926965953739, |
|
"learning_rate": 9.756097560975611e-06, |
|
"loss": 1.3108, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 10.623766416300974, |
|
"learning_rate": 1.0104529616724739e-05, |
|
"loss": 1.2919, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 6.899127448786479, |
|
"learning_rate": 1.045296167247387e-05, |
|
"loss": 1.3011, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 8.118460870034067, |
|
"learning_rate": 1.0801393728222997e-05, |
|
"loss": 1.3009, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 10.061037174840157, |
|
"learning_rate": 1.1149825783972127e-05, |
|
"loss": 1.2535, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 7.229218874534891, |
|
"learning_rate": 1.1498257839721256e-05, |
|
"loss": 1.2726, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 5.146560920991141, |
|
"learning_rate": 1.1846689895470385e-05, |
|
"loss": 1.2501, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 4.670748071683111, |
|
"learning_rate": 1.2195121951219513e-05, |
|
"loss": 1.2674, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 15.218842690286543, |
|
"learning_rate": 1.254355400696864e-05, |
|
"loss": 1.2773, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 6.87949959525927, |
|
"learning_rate": 1.2891986062717772e-05, |
|
"loss": 1.2612, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 8.006148328482432, |
|
"learning_rate": 1.32404181184669e-05, |
|
"loss": 1.2534, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 11.03906380856099, |
|
"learning_rate": 1.3588850174216028e-05, |
|
"loss": 1.2584, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 7.0212312027786155, |
|
"learning_rate": 1.393728222996516e-05, |
|
"loss": 1.2561, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 5.271938677452554, |
|
"learning_rate": 1.4285714285714287e-05, |
|
"loss": 1.2451, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 9.566045941232186, |
|
"learning_rate": 1.4634146341463415e-05, |
|
"loss": 1.2518, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 5.547485843465541, |
|
"learning_rate": 1.4982578397212544e-05, |
|
"loss": 1.2231, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 4.243035322097222, |
|
"learning_rate": 1.5331010452961673e-05, |
|
"loss": 1.2226, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 7.012865594849951, |
|
"learning_rate": 1.5679442508710803e-05, |
|
"loss": 1.218, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 5.0670688280736575, |
|
"learning_rate": 1.6027874564459932e-05, |
|
"loss": 1.2104, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 7.491110510309622, |
|
"learning_rate": 1.637630662020906e-05, |
|
"loss": 1.2236, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 5.096740126564542, |
|
"learning_rate": 1.672473867595819e-05, |
|
"loss": 1.2158, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 5.741617269345664, |
|
"learning_rate": 1.7073170731707317e-05, |
|
"loss": 1.2148, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 6.321112302195666, |
|
"learning_rate": 1.7421602787456446e-05, |
|
"loss": 1.1892, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 9.15042804300436, |
|
"learning_rate": 1.7770034843205575e-05, |
|
"loss": 1.2117, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 7.782222009401943, |
|
"learning_rate": 1.8118466898954705e-05, |
|
"loss": 1.2179, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 8.461191557432844, |
|
"learning_rate": 1.8466898954703834e-05, |
|
"loss": 1.2094, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 6.9758450101867595, |
|
"learning_rate": 1.8815331010452963e-05, |
|
"loss": 1.2067, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 8.64992452269596, |
|
"learning_rate": 1.9163763066202093e-05, |
|
"loss": 1.1893, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 4.766916340575965, |
|
"learning_rate": 1.9512195121951222e-05, |
|
"loss": 1.1916, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 6.270777549952219, |
|
"learning_rate": 1.9860627177700348e-05, |
|
"loss": 1.2106, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 11.435846226073695, |
|
"learning_rate": 1.9999933018123898e-05, |
|
"loss": 1.1938, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 4.78322061212292, |
|
"learning_rate": 1.999952368768613e-05, |
|
"loss": 1.2163, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 6.992505092591417, |
|
"learning_rate": 1.9998742254177562e-05, |
|
"loss": 1.1969, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 8.278250286218993, |
|
"learning_rate": 1.9997588746676955e-05, |
|
"loss": 1.1826, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 6.336204671532799, |
|
"learning_rate": 1.9996063208108723e-05, |
|
"loss": 1.2184, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 10.749083054238973, |
|
"learning_rate": 1.999416569524133e-05, |
|
"loss": 1.1912, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 14.015738057799524, |
|
"learning_rate": 1.9991896278685176e-05, |
|
"loss": 1.1791, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 9.308298000431968, |
|
"learning_rate": 1.998925504288997e-05, |
|
"loss": 1.1939, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 7.721871843702987, |
|
"learning_rate": 1.9986242086141584e-05, |
|
"loss": 1.1893, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 4.53702038549258, |
|
"learning_rate": 1.9982857520558413e-05, |
|
"loss": 1.1797, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 5.653051827129472, |
|
"learning_rate": 1.9979101472087175e-05, |
|
"loss": 1.1844, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 6.049532103242861, |
|
"learning_rate": 1.997497408049824e-05, |
|
"loss": 1.1792, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 4.2894488866832745, |
|
"learning_rate": 1.9970475499380444e-05, |
|
"loss": 1.1952, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 4.519743139516047, |
|
"learning_rate": 1.9965605896135336e-05, |
|
"loss": 1.1797, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 7.210976890334568, |
|
"learning_rate": 1.996036545197098e-05, |
|
"loss": 1.1688, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 7.360490904602484, |
|
"learning_rate": 1.9954754361895204e-05, |
|
"loss": 1.1819, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 5.634605983683038, |
|
"learning_rate": 1.994877283470834e-05, |
|
"loss": 1.1816, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 5.486105569363334, |
|
"learning_rate": 1.994242109299545e-05, |
|
"loss": 1.1687, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 4.270847761690053, |
|
"learning_rate": 1.993569937311805e-05, |
|
"loss": 1.1737, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 8.968470894433617, |
|
"learning_rate": 1.992860792520532e-05, |
|
"loss": 1.1888, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 5.144451017370969, |
|
"learning_rate": 1.9921147013144782e-05, |
|
"loss": 1.1791, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 4.783724483229381, |
|
"learning_rate": 1.9913316914572483e-05, |
|
"loss": 1.1767, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 4.370508667715079, |
|
"learning_rate": 1.9905117920862684e-05, |
|
"loss": 1.1738, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 5.260608457808345, |
|
"learning_rate": 1.9896550337116984e-05, |
|
"loss": 1.1637, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 4.526089057363955, |
|
"learning_rate": 1.988761448215299e-05, |
|
"loss": 1.1766, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 6.281635259331809, |
|
"learning_rate": 1.9878310688492452e-05, |
|
"loss": 1.1832, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 4.840153816416295, |
|
"learning_rate": 1.986863930234888e-05, |
|
"loss": 1.1589, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 5.387375580106968, |
|
"learning_rate": 1.985860068361466e-05, |
|
"loss": 1.1896, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 4.853040251096713, |
|
"learning_rate": 1.9848195205847672e-05, |
|
"loss": 1.1673, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 5.0279054758074935, |
|
"learning_rate": 1.9837423256257388e-05, |
|
"loss": 1.1676, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 6.930988364748881, |
|
"learning_rate": 1.9826285235690447e-05, |
|
"loss": 1.1573, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 9.778530344012838, |
|
"learning_rate": 1.9814781558615755e-05, |
|
"loss": 1.1817, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 8.331751071585009, |
|
"learning_rate": 1.9802912653109063e-05, |
|
"loss": 1.1626, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 5.359713991197577, |
|
"learning_rate": 1.9790678960837028e-05, |
|
"loss": 1.1784, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 6.319279029204067, |
|
"learning_rate": 1.977808093704077e-05, |
|
"loss": 1.1444, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 4.261784679520685, |
|
"learning_rate": 1.9765119050518963e-05, |
|
"loss": 1.1696, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 5.562255366866596, |
|
"learning_rate": 1.9751793783610353e-05, |
|
"loss": 1.156, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 5.491529900434857, |
|
"learning_rate": 1.9738105632175837e-05, |
|
"loss": 1.1659, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 3.828705065643722, |
|
"learning_rate": 1.972405510557999e-05, |
|
"loss": 1.1671, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 14.578719340125375, |
|
"learning_rate": 1.970964272667213e-05, |
|
"loss": 1.1505, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 6.497412444201346, |
|
"learning_rate": 1.969486903176684e-05, |
|
"loss": 1.1556, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 5.871186880472482, |
|
"learning_rate": 1.967973457062404e-05, |
|
"loss": 1.2043, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 5.725341870182259, |
|
"learning_rate": 1.9664239906428494e-05, |
|
"loss": 1.1743, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 5.5398697537902475, |
|
"learning_rate": 1.9648385615768882e-05, |
|
"loss": 1.1707, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 4.705978298786861, |
|
"learning_rate": 1.9632172288616328e-05, |
|
"loss": 1.1609, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 5.676343389899393, |
|
"learning_rate": 1.961560052830245e-05, |
|
"loss": 1.1774, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 4.698576328458431, |
|
"learning_rate": 1.959867095149691e-05, |
|
"loss": 1.1734, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 6.467528351486372, |
|
"learning_rate": 1.9581384188184475e-05, |
|
"loss": 1.1326, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 5.02003522894939, |
|
"learning_rate": 1.9563740881641548e-05, |
|
"loss": 1.1657, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 3.757204477031705, |
|
"learning_rate": 1.9545741688412256e-05, |
|
"loss": 1.183, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 7.078604907089883, |
|
"learning_rate": 1.9527387278284008e-05, |
|
"loss": 1.1494, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 8.355671792914885, |
|
"learning_rate": 1.950867833426258e-05, |
|
"loss": 1.158, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 5.851949131034738, |
|
"learning_rate": 1.9489615552546685e-05, |
|
"loss": 1.1385, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 5.491553294763838, |
|
"learning_rate": 1.9470199642502062e-05, |
|
"loss": 1.1652, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 4.570870036544042, |
|
"learning_rate": 1.945043132663511e-05, |
|
"loss": 1.1492, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 4.548405600691577, |
|
"learning_rate": 1.9430311340565967e-05, |
|
"loss": 1.1621, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 9.868426917291162, |
|
"learning_rate": 1.9409840433001153e-05, |
|
"loss": 1.1725, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 4.815903432492333, |
|
"learning_rate": 1.9389019365705718e-05, |
|
"loss": 1.1543, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 7.652186431719107, |
|
"learning_rate": 1.936784891347486e-05, |
|
"loss": 1.1563, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 8.529575407917209, |
|
"learning_rate": 1.9346329864105144e-05, |
|
"loss": 1.1647, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 6.771510700028247, |
|
"learning_rate": 1.932446301836514e-05, |
|
"loss": 1.1555, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 6.650801396317897, |
|
"learning_rate": 1.9302249189965655e-05, |
|
"loss": 1.1735, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 10.460184343024883, |
|
"learning_rate": 1.9279689205529432e-05, |
|
"loss": 1.1445, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 4.003803419445767, |
|
"learning_rate": 1.925678390456041e-05, |
|
"loss": 1.1332, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 6.6806416615146205, |
|
"learning_rate": 1.9233534139412473e-05, |
|
"loss": 1.1461, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 5.1928131452577215, |
|
"learning_rate": 1.920994077525773e-05, |
|
"loss": 1.149, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 5.298840615644176, |
|
"learning_rate": 1.9186004690054316e-05, |
|
"loss": 1.1615, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 4.98808253588444, |
|
"learning_rate": 1.9161726774513748e-05, |
|
"loss": 1.1671, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 6.644075908662694, |
|
"learning_rate": 1.9137107932067746e-05, |
|
"loss": 1.1568, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 5.7930238178492885, |
|
"learning_rate": 1.9112149078834634e-05, |
|
"loss": 1.1734, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 3.3447018259433716, |
|
"learning_rate": 1.9086851143585242e-05, |
|
"loss": 1.1587, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 4.755387772291263, |
|
"learning_rate": 1.9061215067708355e-05, |
|
"loss": 1.1541, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 5.7446275644458575, |
|
"learning_rate": 1.9035241805175655e-05, |
|
"loss": 1.142, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 10.721898818093187, |
|
"learning_rate": 1.9008932322506264e-05, |
|
"loss": 1.1544, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 15.71994055997216, |
|
"learning_rate": 1.898228759873074e-05, |
|
"loss": 1.1496, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 4.068484218747397, |
|
"learning_rate": 1.8955308625354664e-05, |
|
"loss": 1.1581, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 6.725250072228692, |
|
"learning_rate": 1.8927996406321737e-05, |
|
"loss": 1.1398, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 7.898842956592155, |
|
"learning_rate": 1.8900351957976434e-05, |
|
"loss": 1.1612, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 3.884114377957352, |
|
"learning_rate": 1.887237630902615e-05, |
|
"loss": 1.1636, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 4.280730637085246, |
|
"learning_rate": 1.8844070500502972e-05, |
|
"loss": 1.1498, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 4.33028023683306, |
|
"learning_rate": 1.8815435585724898e-05, |
|
"loss": 1.1519, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 4.582128138117869, |
|
"learning_rate": 1.8786472630256647e-05, |
|
"loss": 1.144, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 4.759734278355914, |
|
"learning_rate": 1.8757182711870028e-05, |
|
"loss": 1.1346, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 6.403681049030226, |
|
"learning_rate": 1.8727566920503806e-05, |
|
"loss": 1.1486, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 6.028223703772312, |
|
"learning_rate": 1.8697626358223172e-05, |
|
"loss": 1.141, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 4.35419137899931, |
|
"learning_rate": 1.866736213917871e-05, |
|
"loss": 1.1487, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 5.273187140872165, |
|
"learning_rate": 1.8636775389564943e-05, |
|
"loss": 1.1181, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 5.5761944785890085, |
|
"learning_rate": 1.8605867247578434e-05, |
|
"loss": 1.1483, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 4.112593812646518, |
|
"learning_rate": 1.857463886337542e-05, |
|
"loss": 1.1436, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 5.053339080254335, |
|
"learning_rate": 1.8543091399029013e-05, |
|
"loss": 1.1352, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 4.437686003383088, |
|
"learning_rate": 1.8511226028485973e-05, |
|
"loss": 1.145, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 7.355407068586111, |
|
"learning_rate": 1.8479043937522996e-05, |
|
"loss": 1.1506, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 7.539050239211431, |
|
"learning_rate": 1.844654632370262e-05, |
|
"loss": 1.1269, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 4.919134005501024, |
|
"learning_rate": 1.8413734396328626e-05, |
|
"loss": 1.1502, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 6.0310614319068225, |
|
"learning_rate": 1.8380609376401072e-05, |
|
"loss": 1.1368, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 4.091506333199426, |
|
"learning_rate": 1.8347172496570825e-05, |
|
"loss": 1.1288, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 7.348329990826859, |
|
"learning_rate": 1.8313425001093724e-05, |
|
"loss": 1.1446, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 6.088051660220532, |
|
"learning_rate": 1.827936814578426e-05, |
|
"loss": 1.1351, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 3.3368256865961192, |
|
"learning_rate": 1.824500319796883e-05, |
|
"loss": 1.1532, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 7.0525333470416385, |
|
"learning_rate": 1.8210331436438607e-05, |
|
"loss": 1.1352, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 6.087701772978905, |
|
"learning_rate": 1.817535415140195e-05, |
|
"loss": 1.1354, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 6.108600123539144, |
|
"learning_rate": 1.8140072644436357e-05, |
|
"loss": 1.1267, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 5.522575604736813, |
|
"learning_rate": 1.8104488228440083e-05, |
|
"loss": 1.1275, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 6.43595422494042, |
|
"learning_rate": 1.8068602227583242e-05, |
|
"loss": 1.1356, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 4.4465035382768985, |
|
"learning_rate": 1.8032415977258552e-05, |
|
"loss": 1.1404, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 12.247642747169323, |
|
"learning_rate": 1.7995930824031632e-05, |
|
"loss": 1.1113, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 7.913656564818845, |
|
"learning_rate": 1.795914812559092e-05, |
|
"loss": 1.1408, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 5.626531877639897, |
|
"learning_rate": 1.7922069250697105e-05, |
|
"loss": 1.1544, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 8.206216865709429, |
|
"learning_rate": 1.7884695579132233e-05, |
|
"loss": 1.1472, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 5.8692952730105095, |
|
"learning_rate": 1.784702850164834e-05, |
|
"loss": 1.138, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 4.366292561467115, |
|
"learning_rate": 1.780906941991571e-05, |
|
"loss": 1.1453, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 7.164465219218278, |
|
"learning_rate": 1.7770819746470717e-05, |
|
"loss": 1.1168, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 10.694172349552868, |
|
"learning_rate": 1.773228090466324e-05, |
|
"loss": 1.1392, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 4.213371036213153, |
|
"learning_rate": 1.769345432860374e-05, |
|
"loss": 1.1485, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 5.2256809607219115, |
|
"learning_rate": 1.765434146310984e-05, |
|
"loss": 1.1756, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 5.34173822476632, |
|
"learning_rate": 1.7614943763652614e-05, |
|
"loss": 1.135, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 4.987888768830567, |
|
"learning_rate": 1.7575262696302378e-05, |
|
"loss": 1.1415, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 4.330467850327936, |
|
"learning_rate": 1.753529973767417e-05, |
|
"loss": 1.128, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 5.651872986630596, |
|
"learning_rate": 1.7495056374872785e-05, |
|
"loss": 1.1147, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 4.391155516454488, |
|
"learning_rate": 1.7454534105437438e-05, |
|
"loss": 1.1379, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 5.610076747772705, |
|
"learning_rate": 1.741373443728605e-05, |
|
"loss": 1.1339, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 9.891118182137568, |
|
"learning_rate": 1.737265888865911e-05, |
|
"loss": 1.1328, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 5.370239636724821, |
|
"learning_rate": 1.73313089880632e-05, |
|
"loss": 1.1231, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 5.802470269440633, |
|
"learning_rate": 1.7289686274214116e-05, |
|
"loss": 1.1343, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 8.54551812341639, |
|
"learning_rate": 1.7247792295979593e-05, |
|
"loss": 1.1324, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 4.536660411564322, |
|
"learning_rate": 1.720562861232168e-05, |
|
"loss": 1.1349, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 4.654292932381109, |
|
"learning_rate": 1.716319679223873e-05, |
|
"loss": 1.1341, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 4.582534517062879, |
|
"learning_rate": 1.7120498414707e-05, |
|
"loss": 1.1136, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 3.775209025573872, |
|
"learning_rate": 1.7077535068621916e-05, |
|
"loss": 1.1434, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 5.764546147359184, |
|
"learning_rate": 1.703430835273893e-05, |
|
"loss": 1.1356, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 4.490335321181705, |
|
"learning_rate": 1.6990819875614033e-05, |
|
"loss": 1.1499, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 4.24278986300239, |
|
"learning_rate": 1.6947071255543894e-05, |
|
"loss": 1.1116, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 4.488782807165637, |
|
"learning_rate": 1.6903064120505638e-05, |
|
"loss": 1.1236, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.143049955368042, |
|
"eval_runtime": 724.0646, |
|
"eval_samples_per_second": 18.674, |
|
"eval_steps_per_second": 1.168, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 4.941796653184966, |
|
"learning_rate": 1.6858800108096277e-05, |
|
"loss": 1.1457, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 4.1194303335536, |
|
"learning_rate": 1.681428086547176e-05, |
|
"loss": 1.0501, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 6.5338416842156954, |
|
"learning_rate": 1.676950804928569e-05, |
|
"loss": 1.0397, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 6.897548552305525, |
|
"learning_rate": 1.672448332562766e-05, |
|
"loss": 1.0465, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 8.80099366693014, |
|
"learning_rate": 1.667920836996127e-05, |
|
"loss": 1.0461, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 3.8823348573817817, |
|
"learning_rate": 1.663368486706177e-05, |
|
"loss": 1.0639, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 3.687412519530629, |
|
"learning_rate": 1.6587914510953366e-05, |
|
"loss": 1.0376, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 3.7446867348164345, |
|
"learning_rate": 1.65418990048462e-05, |
|
"loss": 1.0312, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 9.46358918189822, |
|
"learning_rate": 1.6495640061072933e-05, |
|
"loss": 1.0423, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 5.376874740624564, |
|
"learning_rate": 1.644913940102507e-05, |
|
"loss": 1.0352, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 4.384407797755611, |
|
"learning_rate": 1.640239875508887e-05, |
|
"loss": 1.0642, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 3.484409403055532, |
|
"learning_rate": 1.6355419862580963e-05, |
|
"loss": 1.0585, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 4.705490046789163, |
|
"learning_rate": 1.6308204471683638e-05, |
|
"loss": 1.0509, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 9.480140507427025, |
|
"learning_rate": 1.626075433937977e-05, |
|
"loss": 1.0561, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 6.320705229041983, |
|
"learning_rate": 1.6213071231387463e-05, |
|
"loss": 1.0702, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 7.100919907080032, |
|
"learning_rate": 1.616515692209432e-05, |
|
"loss": 1.0437, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 4.889147099799428, |
|
"learning_rate": 1.6117013194491434e-05, |
|
"loss": 1.0463, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 6.49671062593204, |
|
"learning_rate": 1.606864184010702e-05, |
|
"loss": 1.0368, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 5.063939594461035, |
|
"learning_rate": 1.6020044658939767e-05, |
|
"loss": 1.0512, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 4.993678539189535, |
|
"learning_rate": 1.5971223459391853e-05, |
|
"loss": 1.0466, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 4.152174161520123, |
|
"learning_rate": 1.5922180058201623e-05, |
|
"loss": 1.0458, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 7.684056984318905, |
|
"learning_rate": 1.587291628037604e-05, |
|
"loss": 1.0551, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 6.230157763416368, |
|
"learning_rate": 1.582343395912271e-05, |
|
"loss": 1.0652, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 5.4722165804668474, |
|
"learning_rate": 1.577373493578171e-05, |
|
"loss": 1.0457, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 6.358525310978081, |
|
"learning_rate": 1.5723821059757057e-05, |
|
"loss": 1.0543, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 6.707074018209985, |
|
"learning_rate": 1.5673694188447865e-05, |
|
"loss": 1.0682, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 7.550741262433244, |
|
"learning_rate": 1.5623356187179265e-05, |
|
"loss": 1.0692, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 8.550207165051509, |
|
"learning_rate": 1.557280892913296e-05, |
|
"loss": 1.0522, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 5.001190019178178, |
|
"learning_rate": 1.5522054295277534e-05, |
|
"loss": 1.0537, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 7.3825163490814685, |
|
"learning_rate": 1.5471094174298464e-05, |
|
"loss": 1.0569, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 7.9381841337091075, |
|
"learning_rate": 1.5419930462527823e-05, |
|
"loss": 1.0472, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 4.003408564539475, |
|
"learning_rate": 1.5368565063873723e-05, |
|
"loss": 1.0535, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 4.0895158912327485, |
|
"learning_rate": 1.5316999889749466e-05, |
|
"loss": 1.0488, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 7.080965750010963, |
|
"learning_rate": 1.5265236859002406e-05, |
|
"loss": 1.0569, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 5.4630569455522, |
|
"learning_rate": 1.521327789784257e-05, |
|
"loss": 1.0589, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 6.122651586758779, |
|
"learning_rate": 1.5161124939770946e-05, |
|
"loss": 1.0454, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 4.8133487691459935, |
|
"learning_rate": 1.5108779925507562e-05, |
|
"loss": 1.0404, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 4.661144053516939, |
|
"learning_rate": 1.5056244802919251e-05, |
|
"loss": 1.0462, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 3.7336133423001647, |
|
"learning_rate": 1.500352152694717e-05, |
|
"loss": 1.0615, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 3.361836287645644, |
|
"learning_rate": 1.4950612059534061e-05, |
|
"loss": 1.0607, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 4.3838181406680885, |
|
"learning_rate": 1.4897518369551236e-05, |
|
"loss": 1.0273, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 4.043376281997201, |
|
"learning_rate": 1.4844242432725307e-05, |
|
"loss": 1.055, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 6.853007735241924, |
|
"learning_rate": 1.4790786231564672e-05, |
|
"loss": 1.0437, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 10.206581999723962, |
|
"learning_rate": 1.473715175528574e-05, |
|
"loss": 1.0515, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 4.194823520949151, |
|
"learning_rate": 1.4683340999738908e-05, |
|
"loss": 1.0735, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 3.597455376740427, |
|
"learning_rate": 1.4629355967334297e-05, |
|
"loss": 1.0549, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 4.2815665946590045, |
|
"learning_rate": 1.457519866696722e-05, |
|
"loss": 1.0387, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 5.491828047496545, |
|
"learning_rate": 1.4520871113943447e-05, |
|
"loss": 1.0447, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 3.860062175272888, |
|
"learning_rate": 1.4466375329904208e-05, |
|
"loss": 1.0628, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 4.9122313434357645, |
|
"learning_rate": 1.4411713342750942e-05, |
|
"loss": 1.0532, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 4.602100756121231, |
|
"learning_rate": 1.4356887186569872e-05, |
|
"loss": 1.0586, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 5.781609984355192, |
|
"learning_rate": 1.4301898901556279e-05, |
|
"loss": 1.0656, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 11.013625149813482, |
|
"learning_rate": 1.4246750533938603e-05, |
|
"loss": 1.058, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 10.751593548173304, |
|
"learning_rate": 1.4191444135902277e-05, |
|
"loss": 1.0608, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 5.0372064458923065, |
|
"learning_rate": 1.4135981765513391e-05, |
|
"loss": 1.0568, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 3.8163104825972143, |
|
"learning_rate": 1.4080365486642081e-05, |
|
"loss": 1.0484, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 5.643133158456843, |
|
"learning_rate": 1.402459736888574e-05, |
|
"loss": 1.0678, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 4.275124752790015, |
|
"learning_rate": 1.3968679487492001e-05, |
|
"loss": 1.0418, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 5.994755943768821, |
|
"learning_rate": 1.3912613923281517e-05, |
|
"loss": 1.056, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 4.268786996934314, |
|
"learning_rate": 1.385640276257052e-05, |
|
"loss": 1.0478, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 5.899265844652387, |
|
"learning_rate": 1.3800048097093193e-05, |
|
"loss": 1.0494, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 3.4593443197285616, |
|
"learning_rate": 1.374355202392383e-05, |
|
"loss": 1.0431, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 3.941011327216414, |
|
"learning_rate": 1.3686916645398802e-05, |
|
"loss": 1.046, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 5.354438428493789, |
|
"learning_rate": 1.3630144069038319e-05, |
|
"loss": 1.0611, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 5.084893247749403, |
|
"learning_rate": 1.3573236407468002e-05, |
|
"loss": 1.0545, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 3.614566840626027, |
|
"learning_rate": 1.3516195778340287e-05, |
|
"loss": 1.0538, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 4.479375522226828, |
|
"learning_rate": 1.3459024304255597e-05, |
|
"loss": 1.0501, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 3.815147893554819, |
|
"learning_rate": 1.3401724112683376e-05, |
|
"loss": 1.063, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 3.6702128191200214, |
|
"learning_rate": 1.334429733588291e-05, |
|
"loss": 1.0661, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 3.7643762312101137, |
|
"learning_rate": 1.328674611082398e-05, |
|
"loss": 1.0484, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 4.595318704823077, |
|
"learning_rate": 1.322907257910736e-05, |
|
"loss": 1.0495, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 8.259414827392211, |
|
"learning_rate": 1.3171278886885092e-05, |
|
"loss": 1.0468, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 6.983021244861352, |
|
"learning_rate": 1.311336718478065e-05, |
|
"loss": 1.0428, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 3.4604154720237985, |
|
"learning_rate": 1.3055339627808898e-05, |
|
"loss": 1.0359, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 5.203283636031892, |
|
"learning_rate": 1.2997198375295905e-05, |
|
"loss": 1.0137, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 8.209746338467886, |
|
"learning_rate": 1.293894559079858e-05, |
|
"loss": 1.0354, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 7.476366930367554, |
|
"learning_rate": 1.288058344202417e-05, |
|
"loss": 1.0578, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 6.779053150845288, |
|
"learning_rate": 1.2822114100749606e-05, |
|
"loss": 1.0427, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 10.811521301551252, |
|
"learning_rate": 1.2763539742740656e-05, |
|
"loss": 1.0489, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 5.732863367928463, |
|
"learning_rate": 1.2704862547670999e-05, |
|
"loss": 1.0468, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 4.237973260691363, |
|
"learning_rate": 1.2646084699041077e-05, |
|
"loss": 1.0441, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 3.7534847845244, |
|
"learning_rate": 1.2587208384096874e-05, |
|
"loss": 1.0488, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 6.616430886312652, |
|
"learning_rate": 1.2528235793748497e-05, |
|
"loss": 1.041, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 4.127261189138407, |
|
"learning_rate": 1.246916912248868e-05, |
|
"loss": 1.0471, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 5.720987005615765, |
|
"learning_rate": 1.2410010568311081e-05, |
|
"loss": 1.0517, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 7.098163731134265, |
|
"learning_rate": 1.2350762332628527e-05, |
|
"loss": 1.0506, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 6.017149941075944, |
|
"learning_rate": 1.2291426620191083e-05, |
|
"loss": 1.0447, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 6.341663758405367, |
|
"learning_rate": 1.2232005639003993e-05, |
|
"loss": 1.0457, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 5.313794622995498, |
|
"learning_rate": 1.2172501600245533e-05, |
|
"loss": 1.041, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 3.785201599941644, |
|
"learning_rate": 1.211291671818473e-05, |
|
"loss": 1.0405, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 3.3172990990696203, |
|
"learning_rate": 1.2053253210098954e-05, |
|
"loss": 1.0703, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 3.669903999307619, |
|
"learning_rate": 1.1993513296191415e-05, |
|
"loss": 1.0437, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 5.210511363185043, |
|
"learning_rate": 1.1933699199508537e-05, |
|
"loss": 1.0443, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 5.919975963584953, |
|
"learning_rate": 1.187381314585725e-05, |
|
"loss": 1.0667, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 5.485156735679113, |
|
"learning_rate": 1.1813857363722137e-05, |
|
"loss": 1.0526, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 6.307740435903517, |
|
"learning_rate": 1.1753834084182534e-05, |
|
"loss": 1.041, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 5.332778130985299, |
|
"learning_rate": 1.16937455408295e-05, |
|
"loss": 1.0496, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 3.6888651640927375, |
|
"learning_rate": 1.163359396968268e-05, |
|
"loss": 1.0337, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 5.4302795097120935, |
|
"learning_rate": 1.1573381609107128e-05, |
|
"loss": 1.0447, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 5.85394635833932, |
|
"learning_rate": 1.1513110699729997e-05, |
|
"loss": 1.0258, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 3.1195505935498575, |
|
"learning_rate": 1.1452783484357158e-05, |
|
"loss": 1.0481, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 4.034500170336364, |
|
"learning_rate": 1.139240220788975e-05, |
|
"loss": 1.0608, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 7.4832540724180605, |
|
"learning_rate": 1.1331969117240632e-05, |
|
"loss": 1.0635, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 3.519135260439649, |
|
"learning_rate": 1.1271486461250782e-05, |
|
"loss": 1.0641, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 3.922816976589446, |
|
"learning_rate": 1.1210956490605604e-05, |
|
"loss": 1.0157, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 4.3640097073563435, |
|
"learning_rate": 1.1150381457751184e-05, |
|
"loss": 1.0512, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 3.9156261283505165, |
|
"learning_rate": 1.108976361681046e-05, |
|
"loss": 1.0488, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 5.977684574902863, |
|
"learning_rate": 1.1029105223499348e-05, |
|
"loss": 1.0487, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 3.9167890188933434, |
|
"learning_rate": 1.096840853504281e-05, |
|
"loss": 1.0394, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 5.301914360737118, |
|
"learning_rate": 1.0907675810090836e-05, |
|
"loss": 1.0615, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 5.716954954939279, |
|
"learning_rate": 1.0846909308634426e-05, |
|
"loss": 1.0461, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 5.2063099958507415, |
|
"learning_rate": 1.0786111291921462e-05, |
|
"loss": 1.047, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 3.263771342476951, |
|
"learning_rate": 1.0725284022372575e-05, |
|
"loss": 1.0419, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 3.617367865381047, |
|
"learning_rate": 1.0664429763496964e-05, |
|
"loss": 1.0354, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 2.958598063290221, |
|
"learning_rate": 1.0603550779808143e-05, |
|
"loss": 1.054, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 4.091615124301502, |
|
"learning_rate": 1.0542649336739704e-05, |
|
"loss": 1.0464, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 3.436114429440288, |
|
"learning_rate": 1.048172770056098e-05, |
|
"loss": 1.0364, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 4.63745780224924, |
|
"learning_rate": 1.0420788138292751e-05, |
|
"loss": 1.0583, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 3.912097658596716, |
|
"learning_rate": 1.035983291762285e-05, |
|
"loss": 1.0391, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 3.2877159810294425, |
|
"learning_rate": 1.0298864306821797e-05, |
|
"loss": 1.0371, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 4.443026716057982, |
|
"learning_rate": 1.023788457465839e-05, |
|
"loss": 1.0516, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 6.081470191377994, |
|
"learning_rate": 1.0176895990315267e-05, |
|
"loss": 1.0364, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 4.624505251031281, |
|
"learning_rate": 1.0115900823304486e-05, |
|
"loss": 1.039, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 5.401741569903503, |
|
"learning_rate": 1.005490134338305e-05, |
|
"loss": 1.0479, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 7.229322564190597, |
|
"learning_rate": 9.993899820468454e-06, |
|
"loss": 1.0342, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 3.5183032874516456, |
|
"learning_rate": 9.932898524554225e-06, |
|
"loss": 1.0333, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 3.347409923946262, |
|
"learning_rate": 9.871899725625438e-06, |
|
"loss": 1.0499, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 3.3445974969430003, |
|
"learning_rate": 9.810905693574248e-06, |
|
"loss": 1.0376, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 3.230036863005688, |
|
"learning_rate": 9.74991869811543e-06, |
|
"loss": 1.0592, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 7.050592639671458, |
|
"learning_rate": 9.68894100870191e-06, |
|
"loss": 1.0534, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 6.88446176609171, |
|
"learning_rate": 9.627974894440315e-06, |
|
"loss": 1.0317, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 3.694071592252691, |
|
"learning_rate": 9.567022624006538e-06, |
|
"loss": 1.0322, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 12.63358828271753, |
|
"learning_rate": 9.50608646556131e-06, |
|
"loss": 1.0454, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 4.271510444516047, |
|
"learning_rate": 9.445168686665814e-06, |
|
"loss": 1.0283, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 9.464916396538381, |
|
"learning_rate": 9.38427155419728e-06, |
|
"loss": 1.0499, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 9.021024724009262, |
|
"learning_rate": 9.323397334264646e-06, |
|
"loss": 1.0537, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 5.841040059493859, |
|
"learning_rate": 9.262548292124224e-06, |
|
"loss": 1.0393, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 4.052527005317544, |
|
"learning_rate": 9.201726692095405e-06, |
|
"loss": 1.052, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 3.831946167769705, |
|
"learning_rate": 9.140934797476418e-06, |
|
"loss": 1.0368, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 3.209417072312966, |
|
"learning_rate": 9.080174870460075e-06, |
|
"loss": 1.0381, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 4.143298850639078, |
|
"learning_rate": 9.01944917204961e-06, |
|
"loss": 1.0248, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 8.97118408266604, |
|
"learning_rate": 8.958759961974548e-06, |
|
"loss": 1.0421, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 5.766425122684631, |
|
"learning_rate": 8.898109498606595e-06, |
|
"loss": 1.0374, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 3.2635469534434036, |
|
"learning_rate": 8.837500038875624e-06, |
|
"loss": 1.0386, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 3.4459812418299927, |
|
"learning_rate": 8.776933838185669e-06, |
|
"loss": 1.0555, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 5.811174136486332, |
|
"learning_rate": 8.716413150331008e-06, |
|
"loss": 1.0367, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 6.7399418501577095, |
|
"learning_rate": 8.655940227412289e-06, |
|
"loss": 1.0364, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 5.6557887801081, |
|
"learning_rate": 8.595517319752728e-06, |
|
"loss": 1.0585, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 5.029214138284903, |
|
"learning_rate": 8.535146675814376e-06, |
|
"loss": 1.0405, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 9.11806174239988, |
|
"learning_rate": 8.474830542114435e-06, |
|
"loss": 1.0439, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 9.516048397883825, |
|
"learning_rate": 8.41457116314167e-06, |
|
"loss": 1.0253, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 3.2656951747794625, |
|
"learning_rate": 8.354370781272877e-06, |
|
"loss": 1.0305, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 4.715188227700953, |
|
"learning_rate": 8.294231636689465e-06, |
|
"loss": 1.0496, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 3.837977652358768, |
|
"learning_rate": 8.234155967294062e-06, |
|
"loss": 1.0229, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 3.317122905482544, |
|
"learning_rate": 8.174146008627252e-06, |
|
"loss": 1.0283, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 4.669056895185755, |
|
"learning_rate": 8.114203993784395e-06, |
|
"loss": 1.0319, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 3.503580609840657, |
|
"learning_rate": 8.05433215333251e-06, |
|
"loss": 1.0597, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 6.679578791116039, |
|
"learning_rate": 7.99453271522729e-06, |
|
"loss": 1.0551, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 4.03291121283584, |
|
"learning_rate": 7.934807904730182e-06, |
|
"loss": 1.053, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 3.974267046866222, |
|
"learning_rate": 7.875159944325582e-06, |
|
"loss": 1.0283, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 3.2899614326437336, |
|
"learning_rate": 7.81559105363814e-06, |
|
"loss": 1.027, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 3.9394766828598335, |
|
"learning_rate": 7.75610344935015e-06, |
|
"loss": 1.04, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 3.329983516697602, |
|
"learning_rate": 7.696699345119078e-06, |
|
"loss": 1.0332, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 3.4083535209619815, |
|
"learning_rate": 7.637380951495175e-06, |
|
"loss": 1.0364, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 3.7587112815380594, |
|
"learning_rate": 7.578150475839221e-06, |
|
"loss": 1.0132, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 3.289490770453015, |
|
"learning_rate": 7.519010122240389e-06, |
|
"loss": 1.028, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 3.6046998871593057, |
|
"learning_rate": 7.459962091434214e-06, |
|
"loss": 1.0332, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 3.760562710787192, |
|
"learning_rate": 7.401008580720725e-06, |
|
"loss": 1.0264, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 4.2409061116022615, |
|
"learning_rate": 7.342151783882647e-06, |
|
"loss": 1.0274, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 2.947078094390727, |
|
"learning_rate": 7.283393891103787e-06, |
|
"loss": 1.0289, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 3.1726628559293806, |
|
"learning_rate": 7.224737088887523e-06, |
|
"loss": 1.0572, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 4.844622217255587, |
|
"learning_rate": 7.166183559975442e-06, |
|
"loss": 1.0531, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 4.601632365741536, |
|
"learning_rate": 7.107735483266122e-06, |
|
"loss": 1.042, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 3.4033568597191692, |
|
"learning_rate": 7.049395033734045e-06, |
|
"loss": 1.041, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 2.976801132831753, |
|
"learning_rate": 6.991164382348657e-06, |
|
"loss": 1.038, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 4.683331624785341, |
|
"learning_rate": 6.933045695993583e-06, |
|
"loss": 1.033, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 4.4915348720542765, |
|
"learning_rate": 6.875041137386011e-06, |
|
"loss": 1.0425, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 4.328614660399705, |
|
"learning_rate": 6.8171528649961885e-06, |
|
"loss": 1.0666, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 3.3661426339564544, |
|
"learning_rate": 6.759383032967106e-06, |
|
"loss": 1.0301, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 3.4046579651395765, |
|
"learning_rate": 6.701733791034353e-06, |
|
"loss": 1.0329, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 2.988042842848146, |
|
"learning_rate": 6.644207284446099e-06, |
|
"loss": 1.0257, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 5.49307323091551, |
|
"learning_rate": 6.586805653883292e-06, |
|
"loss": 1.0353, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 6.2880172824529375, |
|
"learning_rate": 6.529531035379969e-06, |
|
"loss": 1.0389, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 3.232113645510535, |
|
"learning_rate": 6.472385560243788e-06, |
|
"loss": 1.0506, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 3.267245694929467, |
|
"learning_rate": 6.4153713549767184e-06, |
|
"loss": 1.0279, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 5.428430801052309, |
|
"learning_rate": 6.358490541195899e-06, |
|
"loss": 1.0314, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 3.4749729536536296, |
|
"learning_rate": 6.301745235554695e-06, |
|
"loss": 1.0263, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 4.098646525920454, |
|
"learning_rate": 6.245137549663938e-06, |
|
"loss": 1.0267, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 4.131318517628498, |
|
"learning_rate": 6.188669590013336e-06, |
|
"loss": 1.0272, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 3.8097622540114737, |
|
"learning_rate": 6.132343457893093e-06, |
|
"loss": 1.0375, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 4.4309158005784415, |
|
"learning_rate": 6.076161249315715e-06, |
|
"loss": 1.0327, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 1.1133288145065308, |
|
"eval_runtime": 703.0311, |
|
"eval_samples_per_second": 19.232, |
|
"eval_steps_per_second": 1.203, |
|
"step": 1909 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 4.249071668513281, |
|
"learning_rate": 6.020125054938024e-06, |
|
"loss": 0.9214, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 5.055780123934922, |
|
"learning_rate": 5.964236959983337e-06, |
|
"loss": 0.9212, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 4.694179141724942, |
|
"learning_rate": 5.9084990441638905e-06, |
|
"loss": 0.8955, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 4.351332216050138, |
|
"learning_rate": 5.852913381603439e-06, |
|
"loss": 0.891, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 3.8555391485550903, |
|
"learning_rate": 5.797482040760074e-06, |
|
"loss": 0.9005, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 4.7519558496900824, |
|
"learning_rate": 5.742207084349274e-06, |
|
"loss": 0.9181, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 3.2398635870297356, |
|
"learning_rate": 5.687090569267102e-06, |
|
"loss": 0.8976, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 3.5430978608418977, |
|
"learning_rate": 5.632134546513706e-06, |
|
"loss": 0.9085, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 4.792275588351722, |
|
"learning_rate": 5.577341061116971e-06, |
|
"loss": 0.9082, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 4.117197709498562, |
|
"learning_rate": 5.52271215205644e-06, |
|
"loss": 0.9053, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 3.4990145490808438, |
|
"learning_rate": 5.468249852187418e-06, |
|
"loss": 0.9152, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 3.6945138791895884, |
|
"learning_rate": 5.413956188165341e-06, |
|
"loss": 0.9102, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 6.156467013329359, |
|
"learning_rate": 5.359833180370353e-06, |
|
"loss": 0.9249, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 4.304905222663046, |
|
"learning_rate": 5.305882842832119e-06, |
|
"loss": 0.9047, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 3.2637208352434053, |
|
"learning_rate": 5.2521071831549e-06, |
|
"loss": 0.9038, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 4.176543547922682, |
|
"learning_rate": 5.1985082024428155e-06, |
|
"loss": 0.904, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 3.762182112865723, |
|
"learning_rate": 5.145087895225402e-06, |
|
"loss": 0.9015, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 3.8322456818240567, |
|
"learning_rate": 5.091848249383379e-06, |
|
"loss": 0.9078, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 5.374245125912279, |
|
"learning_rate": 5.038791246074677e-06, |
|
"loss": 0.9106, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 3.119845088904843, |
|
"learning_rate": 4.985918859660732e-06, |
|
"loss": 0.9011, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 3.8255109222627177, |
|
"learning_rate": 4.933233057632989e-06, |
|
"loss": 0.9134, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 4.761501271204771, |
|
"learning_rate": 4.880735800539703e-06, |
|
"loss": 0.9229, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 6.05508562925834, |
|
"learning_rate": 4.828429041912981e-06, |
|
"loss": 0.8867, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 4.796207233113008, |
|
"learning_rate": 4.77631472819608e-06, |
|
"loss": 0.9075, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 3.945779755295953, |
|
"learning_rate": 4.724394798670997e-06, |
|
"loss": 0.8936, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 3.600172989231373, |
|
"learning_rate": 4.672671185386273e-06, |
|
"loss": 0.8911, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 2.9307496777230195, |
|
"learning_rate": 4.621145813085117e-06, |
|
"loss": 0.914, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 3.3679700528956893, |
|
"learning_rate": 4.569820599133789e-06, |
|
"loss": 0.9087, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 3.1283185101672504, |
|
"learning_rate": 4.518697453450229e-06, |
|
"loss": 0.9038, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 3.0332815455798077, |
|
"learning_rate": 4.467778278432997e-06, |
|
"loss": 0.9029, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 2.8560224185508063, |
|
"learning_rate": 4.4170649688904896e-06, |
|
"loss": 0.8993, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 4.138505450801979, |
|
"learning_rate": 4.366559411970413e-06, |
|
"loss": 0.9113, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 4.147091451785984, |
|
"learning_rate": 4.316263487089567e-06, |
|
"loss": 0.9021, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 5.0252230798049835, |
|
"learning_rate": 4.2661790658639055e-06, |
|
"loss": 0.9073, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 3.7396995461923432, |
|
"learning_rate": 4.216308012038903e-06, |
|
"loss": 0.9054, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 3.101090318931807, |
|
"learning_rate": 4.166652181420177e-06, |
|
"loss": 0.9031, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 2.8090112096853748, |
|
"learning_rate": 4.117213421804445e-06, |
|
"loss": 0.9219, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 2.846914213136753, |
|
"learning_rate": 4.067993572910759e-06, |
|
"loss": 0.8945, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 3.7713155708026025, |
|
"learning_rate": 4.01899446631206e-06, |
|
"loss": 0.9058, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 4.426212271926591, |
|
"learning_rate": 3.9702179253669925e-06, |
|
"loss": 0.9201, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 3.34447893318213, |
|
"learning_rate": 3.921665765152079e-06, |
|
"loss": 0.9091, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 3.325021896505902, |
|
"learning_rate": 3.87333979239417e-06, |
|
"loss": 0.9102, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 3.9805172093526595, |
|
"learning_rate": 3.825241805403201e-06, |
|
"loss": 0.8911, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 3.051925720025111, |
|
"learning_rate": 3.777373594005298e-06, |
|
"loss": 0.9119, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 2.976277469795037, |
|
"learning_rate": 3.729736939476147e-06, |
|
"loss": 0.9046, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 3.565948980725144, |
|
"learning_rate": 3.6823336144747248e-06, |
|
"loss": 0.8884, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 3.036279022520351, |
|
"learning_rate": 3.6351653829773315e-06, |
|
"loss": 0.8966, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 2.933009956642285, |
|
"learning_rate": 3.5882340002119466e-06, |
|
"loss": 0.8934, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 2.8063620875824324, |
|
"learning_rate": 3.541541212592924e-06, |
|
"loss": 0.9062, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 2.998801490585607, |
|
"learning_rate": 3.495088757655989e-06, |
|
"loss": 0.8884, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 2.994625158883341, |
|
"learning_rate": 3.4488783639935875e-06, |
|
"loss": 0.9024, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 3.3040426646542542, |
|
"learning_rate": 3.402911751190565e-06, |
|
"loss": 0.9006, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 3.0317655434741693, |
|
"learning_rate": 3.3571906297601697e-06, |
|
"loss": 0.9018, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 3.876786428428406, |
|
"learning_rate": 3.3117167010804142e-06, |
|
"loss": 0.8914, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 3.028390145887319, |
|
"learning_rate": 3.2664916573307483e-06, |
|
"loss": 0.9047, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 2.920638658748327, |
|
"learning_rate": 3.2215171814290924e-06, |
|
"loss": 0.8753, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 3.49141679057549, |
|
"learning_rate": 3.176794946969227e-06, |
|
"loss": 0.9024, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 5.320476089586041, |
|
"learning_rate": 3.1323266181584967e-06, |
|
"loss": 0.8935, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 2.7341193692518626, |
|
"learning_rate": 3.088113849755885e-06, |
|
"loss": 0.9119, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 3.582698051661512, |
|
"learning_rate": 3.0441582870104537e-06, |
|
"loss": 0.9017, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 3.050447412352792, |
|
"learning_rate": 3.000461565600096e-06, |
|
"loss": 0.8868, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 2.9276450722864533, |
|
"learning_rate": 2.9570253115706802e-06, |
|
"loss": 0.902, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 2.982845965177329, |
|
"learning_rate": 2.9138511412755553e-06, |
|
"loss": 0.8979, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 2.9629675081832874, |
|
"learning_rate": 2.8709406613153757e-06, |
|
"loss": 0.9029, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 2.9123686366195662, |
|
"learning_rate": 2.8282954684783337e-06, |
|
"loss": 0.903, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 3.0747361852985824, |
|
"learning_rate": 2.785917149680738e-06, |
|
"loss": 0.8955, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 2.8450082504921563, |
|
"learning_rate": 2.7438072819079553e-06, |
|
"loss": 0.8982, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 2.7824509006388918, |
|
"learning_rate": 2.70196743215574e-06, |
|
"loss": 0.8972, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 3.441660140747248, |
|
"learning_rate": 2.660399157371907e-06, |
|
"loss": 0.9184, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 2.9535666189745946, |
|
"learning_rate": 2.619104004398403e-06, |
|
"loss": 0.9083, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 2.945617220277961, |
|
"learning_rate": 2.5780835099137446e-06, |
|
"loss": 0.8865, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 4.080867961632198, |
|
"learning_rate": 2.5373392003758333e-06, |
|
"loss": 0.8861, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 3.0916182699712635, |
|
"learning_rate": 2.4968725919651614e-06, |
|
"loss": 0.9059, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 4.036948544595655, |
|
"learning_rate": 2.4566851905283774e-06, |
|
"loss": 0.8916, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 2.8348729308888565, |
|
"learning_rate": 2.4167784915222592e-06, |
|
"loss": 0.8932, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 2.770880339052027, |
|
"learning_rate": 2.3771539799580645e-06, |
|
"loss": 0.895, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 3.02480618804911, |
|
"learning_rate": 2.337813130346267e-06, |
|
"loss": 0.9098, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 2.86365439452731, |
|
"learning_rate": 2.2987574066416963e-06, |
|
"loss": 0.8823, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 2.908651156230197, |
|
"learning_rate": 2.2599882621890467e-06, |
|
"loss": 0.8886, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 3.0260294560826257, |
|
"learning_rate": 2.2215071396688058e-06, |
|
"loss": 0.8981, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 3.0929536634109396, |
|
"learning_rate": 2.1833154710435657e-06, |
|
"loss": 0.8887, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 3.2307304311891607, |
|
"learning_rate": 2.1454146775047334e-06, |
|
"loss": 0.8998, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 3.079765379438669, |
|
"learning_rate": 2.1078061694196584e-06, |
|
"loss": 0.8978, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 3.25508832204567, |
|
"learning_rate": 2.070491346279131e-06, |
|
"loss": 0.8976, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 3.101228338352758, |
|
"learning_rate": 2.033471596645318e-06, |
|
"loss": 0.9015, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 3.113980613378208, |
|
"learning_rate": 1.9967482981000896e-06, |
|
"loss": 0.896, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 2.9253371876802254, |
|
"learning_rate": 1.9603228171937505e-06, |
|
"loss": 0.8978, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 2.9250415822713025, |
|
"learning_rate": 1.9241965093941906e-06, |
|
"loss": 0.8968, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 2.863484130835247, |
|
"learning_rate": 1.8883707190364552e-06, |
|
"loss": 0.8925, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 2.854305197567257, |
|
"learning_rate": 1.8528467792727023e-06, |
|
"loss": 0.8921, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 2.867179607158074, |
|
"learning_rate": 1.8176260120225985e-06, |
|
"loss": 0.8873, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 3.89726631997802, |
|
"learning_rate": 1.7827097279241446e-06, |
|
"loss": 0.8971, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 3.83726092947648, |
|
"learning_rate": 1.7480992262848773e-06, |
|
"loss": 0.8804, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 3.2491479295383745, |
|
"learning_rate": 1.713795795033537e-06, |
|
"loss": 0.883, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 2.8645940468544184, |
|
"learning_rate": 1.6798007106721349e-06, |
|
"loss": 0.8879, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 2.7405780600749976, |
|
"learning_rate": 1.6461152382284528e-06, |
|
"loss": 0.8809, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 2.6974824310455348, |
|
"learning_rate": 1.6127406312089755e-06, |
|
"loss": 0.9056, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 2.9741926536721057, |
|
"learning_rate": 1.5796781315522302e-06, |
|
"loss": 0.9076, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 2.979027483774441, |
|
"learning_rate": 1.546928969582584e-06, |
|
"loss": 0.9061, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 3.153120396277567, |
|
"learning_rate": 1.5144943639644582e-06, |
|
"loss": 0.905, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 3.0257370083553297, |
|
"learning_rate": 1.4823755216569747e-06, |
|
"loss": 0.8955, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 2.9747138105254716, |
|
"learning_rate": 1.4505736378690504e-06, |
|
"loss": 0.9, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 3.7675967084653506, |
|
"learning_rate": 1.4190898960149146e-06, |
|
"loss": 0.9031, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 3.150355208773245, |
|
"learning_rate": 1.3879254676700715e-06, |
|
"loss": 0.8998, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 2.7618129056621985, |
|
"learning_rate": 1.357081512527708e-06, |
|
"loss": 0.8839, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 2.609198769979442, |
|
"learning_rate": 1.3265591783555343e-06, |
|
"loss": 0.8949, |
|
"step": 2435 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 2.841374706194725, |
|
"learning_rate": 1.296359600953081e-06, |
|
"loss": 0.8835, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 3.8456166110270145, |
|
"learning_rate": 1.2664839041094224e-06, |
|
"loss": 0.8923, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 2.8654585511756516, |
|
"learning_rate": 1.2369331995613664e-06, |
|
"loss": 0.8997, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 2.9668043967339264, |
|
"learning_rate": 1.20770858695208e-06, |
|
"loss": 0.9006, |
|
"step": 2455 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 2.9613167192574794, |
|
"learning_rate": 1.1788111537901703e-06, |
|
"loss": 0.8983, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 3.0076851964125053, |
|
"learning_rate": 1.150241975409222e-06, |
|
"loss": 0.8932, |
|
"step": 2465 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 2.8223484329204105, |
|
"learning_rate": 1.1220021149277739e-06, |
|
"loss": 0.8867, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 2.720658641040237, |
|
"learning_rate": 1.0940926232097549e-06, |
|
"loss": 0.9011, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 2.924799576083854, |
|
"learning_rate": 1.0665145388253973e-06, |
|
"loss": 0.8781, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 2.7946470004188595, |
|
"learning_rate": 1.0392688880125657e-06, |
|
"loss": 0.8926, |
|
"step": 2485 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 2.828608253477476, |
|
"learning_rate": 1.012356684638589e-06, |
|
"loss": 0.8968, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 2.739359169273518, |
|
"learning_rate": 9.857789301625176e-07, |
|
"loss": 0.8875, |
|
"step": 2495 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 2.6182155083649783, |
|
"learning_rate": 9.595366135978657e-07, |
|
"loss": 0.8944, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 2.6177059224194985, |
|
"learning_rate": 9.336307114758014e-07, |
|
"loss": 0.8887, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 2.576416797510853, |
|
"learning_rate": 9.080621878088203e-07, |
|
"loss": 0.8839, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 2.637953841080034, |
|
"learning_rate": 8.828319940548557e-07, |
|
"loss": 0.9078, |
|
"step": 2515 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 2.6247174933134483, |
|
"learning_rate": 8.579410690818857e-07, |
|
"loss": 0.8886, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 2.949687521884093, |
|
"learning_rate": 8.333903391329878e-07, |
|
"loss": 0.9008, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 2.944423358262864, |
|
"learning_rate": 8.091807177918776e-07, |
|
"loss": 0.9047, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 2.9646522531704194, |
|
"learning_rate": 7.853131059489139e-07, |
|
"loss": 0.8844, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 2.966790373812422, |
|
"learning_rate": 7.617883917675639e-07, |
|
"loss": 0.9064, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 2.697916099546098, |
|
"learning_rate": 7.38607450651364e-07, |
|
"loss": 0.9079, |
|
"step": 2545 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 2.6436566184382637, |
|
"learning_rate": 7.15771145211337e-07, |
|
"loss": 0.9001, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 2.8299408422439014, |
|
"learning_rate": 6.932803252338971e-07, |
|
"loss": 0.8909, |
|
"step": 2555 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 2.583637700212308, |
|
"learning_rate": 6.711358276492296e-07, |
|
"loss": 0.8961, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"grad_norm": 2.636621286289657, |
|
"learning_rate": 6.493384765001376e-07, |
|
"loss": 0.8953, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"grad_norm": 3.039071095421338, |
|
"learning_rate": 6.278890829113859e-07, |
|
"loss": 0.8805, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 2.59410799453376, |
|
"learning_rate": 6.067884450595151e-07, |
|
"loss": 0.8834, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 2.7640084564071565, |
|
"learning_rate": 5.86037348143137e-07, |
|
"loss": 0.9015, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"grad_norm": 2.858078806597894, |
|
"learning_rate": 5.656365643537242e-07, |
|
"loss": 0.8876, |
|
"step": 2585 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"grad_norm": 2.6926086058996224, |
|
"learning_rate": 5.455868528468633e-07, |
|
"loss": 0.8955, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"grad_norm": 2.538884415285918, |
|
"learning_rate": 5.258889597140159e-07, |
|
"loss": 0.888, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"grad_norm": 2.580184176939726, |
|
"learning_rate": 5.065436179547434e-07, |
|
"loss": 0.9144, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 2.7245446056714417, |
|
"learning_rate": 4.875515474494474e-07, |
|
"loss": 0.892, |
|
"step": 2605 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 2.6948585208867857, |
|
"learning_rate": 4.6891345493256355e-07, |
|
"loss": 0.9, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 2.9496107697627907, |
|
"learning_rate": 4.506300339662717e-07, |
|
"loss": 0.8891, |
|
"step": 2615 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 2.829095677015343, |
|
"learning_rate": 4.327019649146891e-07, |
|
"loss": 0.8819, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 2.642449203785096, |
|
"learning_rate": 4.1512991491854835e-07, |
|
"loss": 0.8922, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 2.9150945067532565, |
|
"learning_rate": 3.979145378703675e-07, |
|
"loss": 0.8804, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 2.842424361230579, |
|
"learning_rate": 3.8105647439013016e-07, |
|
"loss": 0.8809, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"grad_norm": 3.088557066884229, |
|
"learning_rate": 3.64556351801435e-07, |
|
"loss": 0.8928, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"grad_norm": 2.7854861115512244, |
|
"learning_rate": 3.484147841081542e-07, |
|
"loss": 0.897, |
|
"step": 2645 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"grad_norm": 2.8139786676438754, |
|
"learning_rate": 3.3263237197158957e-07, |
|
"loss": 0.9091, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"grad_norm": 2.826501403811379, |
|
"learning_rate": 3.1720970268811823e-07, |
|
"loss": 0.8762, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 2.710567942688563, |
|
"learning_rate": 3.0214735016733444e-07, |
|
"loss": 0.8934, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 2.5830469185455627, |
|
"learning_rate": 2.8744587491069897e-07, |
|
"loss": 0.907, |
|
"step": 2665 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 2.882704553478676, |
|
"learning_rate": 2.7310582399067807e-07, |
|
"loss": 0.8756, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 2.6120870241075815, |
|
"learning_rate": 2.5912773103038635e-07, |
|
"loss": 0.8977, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 2.6242506670948957, |
|
"learning_rate": 2.455121161837337e-07, |
|
"loss": 0.9115, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 2.6270537313733433, |
|
"learning_rate": 2.3225948611605985e-07, |
|
"loss": 0.8866, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 2.597009319998751, |
|
"learning_rate": 2.193703339852904e-07, |
|
"loss": 0.8962, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 2.948869160302508, |
|
"learning_rate": 2.068451394235793e-07, |
|
"loss": 0.9157, |
|
"step": 2695 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"grad_norm": 2.7548638761240105, |
|
"learning_rate": 1.9468436851946104e-07, |
|
"loss": 0.8924, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"grad_norm": 3.676159010882983, |
|
"learning_rate": 1.828884738005121e-07, |
|
"loss": 0.9018, |
|
"step": 2705 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 5.196994066852168, |
|
"learning_rate": 1.714578942165057e-07, |
|
"loss": 0.8885, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"grad_norm": 2.725846054059595, |
|
"learning_rate": 1.603930551230759e-07, |
|
"loss": 0.8931, |
|
"step": 2715 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"grad_norm": 2.590954963860932, |
|
"learning_rate": 1.496943682658958e-07, |
|
"loss": 0.8787, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"grad_norm": 2.9931138194819686, |
|
"learning_rate": 1.3936223176535202e-07, |
|
"loss": 0.8893, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"grad_norm": 2.7502498043446253, |
|
"learning_rate": 1.293970301017311e-07, |
|
"loss": 0.8977, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 2.6085358527855, |
|
"learning_rate": 1.197991341009086e-07, |
|
"loss": 0.8848, |
|
"step": 2735 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 2.663850569887782, |
|
"learning_rate": 1.105689009205535e-07, |
|
"loss": 0.8984, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 2.654541400158695, |
|
"learning_rate": 1.0170667403683665e-07, |
|
"loss": 0.8986, |
|
"step": 2745 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 2.6843751332397963, |
|
"learning_rate": 9.321278323165206e-08, |
|
"loss": 0.8812, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 2.647204212028708, |
|
"learning_rate": 8.508754458033896e-08, |
|
"loss": 0.8994, |
|
"step": 2755 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 2.5713318791915, |
|
"learning_rate": 7.733126043992233e-08, |
|
"loss": 0.8873, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 2.8099466501274653, |
|
"learning_rate": 6.994421943786744e-08, |
|
"loss": 0.8865, |
|
"step": 2765 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 2.606410824683423, |
|
"learning_rate": 6.292669646132953e-08, |
|
"loss": 0.9034, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"grad_norm": 2.777447906202334, |
|
"learning_rate": 5.627895264693206e-08, |
|
"loss": 0.8938, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"grad_norm": 2.60668480116358, |
|
"learning_rate": 5.000123537104884e-08, |
|
"loss": 0.8937, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 2.7053738556066027, |
|
"learning_rate": 4.409377824059147e-08, |
|
"loss": 0.8864, |
|
"step": 2785 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 2.5879320235988477, |
|
"learning_rate": 3.8556801084326244e-08, |
|
"loss": 0.8861, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"grad_norm": 2.7280775051210173, |
|
"learning_rate": 3.339050994468629e-08, |
|
"loss": 0.883, |
|
"step": 2795 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"grad_norm": 2.8137552020580197, |
|
"learning_rate": 2.8595097070108768e-08, |
|
"loss": 0.8934, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"grad_norm": 2.5795053811327766, |
|
"learning_rate": 2.417074090788063e-08, |
|
"loss": 0.905, |
|
"step": 2805 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"grad_norm": 2.5479818159053753, |
|
"learning_rate": 2.0117606097492803e-08, |
|
"loss": 0.8974, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 2.8848145786085286, |
|
"learning_rate": 1.6435843464522873e-08, |
|
"loss": 0.8818, |
|
"step": 2815 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 2.5452248772424206, |
|
"learning_rate": 1.312559001501179e-08, |
|
"loss": 0.8917, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 2.5430757426637087, |
|
"learning_rate": 1.018696893037685e-08, |
|
"loss": 0.8943, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"grad_norm": 2.611596061651081, |
|
"learning_rate": 7.620089562817568e-09, |
|
"loss": 0.882, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"grad_norm": 2.639539038040382, |
|
"learning_rate": 5.425047431254493e-09, |
|
"loss": 0.8962, |
|
"step": 2835 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"grad_norm": 2.763303802948848, |
|
"learning_rate": 3.6019242177698368e-09, |
|
"loss": 0.88, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"grad_norm": 2.80021735611162, |
|
"learning_rate": 2.1507877645687846e-09, |
|
"loss": 0.9019, |
|
"step": 2845 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"grad_norm": 2.988440927578717, |
|
"learning_rate": 1.0716920714570755e-09, |
|
"loss": 0.8902, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"grad_norm": 2.922057759677876, |
|
"learning_rate": 3.646772938292742e-10, |
|
"loss": 0.8754, |
|
"step": 2855 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 2.498818874159702, |
|
"learning_rate": 2.976974117552267e-11, |
|
"loss": 0.8854, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 1.1229028701782227, |
|
"eval_runtime": 702.092, |
|
"eval_samples_per_second": 19.258, |
|
"eval_steps_per_second": 1.205, |
|
"step": 2862 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 2862, |
|
"total_flos": 788305947918336.0, |
|
"train_loss": 0.29867510463206776, |
|
"train_runtime": 26871.7902, |
|
"train_samples_per_second": 13.643, |
|
"train_steps_per_second": 0.107 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 2862, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"total_flos": 788305947918336.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|