|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9990375360923965, |
|
"eval_steps": 500, |
|
"global_step": 519, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0019249278152069298, |
|
"grad_norm": 166.78744506835938, |
|
"learning_rate": 5.769230769230769e-06, |
|
"loss": 32.2268, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.009624639076034648, |
|
"grad_norm": 132.89913940429688, |
|
"learning_rate": 2.8846153846153845e-05, |
|
"loss": 29.726, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.019249278152069296, |
|
"grad_norm": 64.4798812866211, |
|
"learning_rate": 5.769230769230769e-05, |
|
"loss": 25.1178, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.028873917228103944, |
|
"grad_norm": 12.633763313293457, |
|
"learning_rate": 8.653846153846152e-05, |
|
"loss": 19.6514, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.03849855630413859, |
|
"grad_norm": 9.107071876525879, |
|
"learning_rate": 0.00011538461538461538, |
|
"loss": 17.2219, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04812319538017324, |
|
"grad_norm": 4.696881294250488, |
|
"learning_rate": 0.00014423076923076922, |
|
"loss": 15.4404, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.05774783445620789, |
|
"grad_norm": 3.118898868560791, |
|
"learning_rate": 0.00017307692307692304, |
|
"loss": 14.695, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.06737247353224254, |
|
"grad_norm": 4.403642654418945, |
|
"learning_rate": 0.00020192307692307691, |
|
"loss": 13.7845, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.07699711260827719, |
|
"grad_norm": 7.105109214782715, |
|
"learning_rate": 0.00023076923076923076, |
|
"loss": 12.8307, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.08662175168431184, |
|
"grad_norm": 16.107545852661133, |
|
"learning_rate": 0.0002596153846153846, |
|
"loss": 10.7864, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.09624639076034648, |
|
"grad_norm": 24.628265380859375, |
|
"learning_rate": 0.00028846153846153843, |
|
"loss": 7.478, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.10587102983638114, |
|
"grad_norm": 8.615938186645508, |
|
"learning_rate": 0.00029996945395293625, |
|
"loss": 3.5198, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.11549566891241578, |
|
"grad_norm": 5.187551021575928, |
|
"learning_rate": 0.0002997828287165724, |
|
"loss": 2.5417, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.12512030798845045, |
|
"grad_norm": 2.919438362121582, |
|
"learning_rate": 0.00029942675913693153, |
|
"loss": 2.0943, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.1347449470644851, |
|
"grad_norm": 1.5105671882629395, |
|
"learning_rate": 0.0002989016480237121, |
|
"loss": 1.8019, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.14436958614051973, |
|
"grad_norm": 0.6488280892372131, |
|
"learning_rate": 0.0002982080894176895, |
|
"loss": 1.6616, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.15399422521655437, |
|
"grad_norm": 1.0447803735733032, |
|
"learning_rate": 0.0002973468679186978, |
|
"loss": 1.5605, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.16361886429258904, |
|
"grad_norm": 0.5504654049873352, |
|
"learning_rate": 0.0002963189577980372, |
|
"loss": 1.5028, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.17324350336862368, |
|
"grad_norm": 0.7152052521705627, |
|
"learning_rate": 0.0002951255218963139, |
|
"loss": 1.4363, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.18286814244465832, |
|
"grad_norm": 1.1773169040679932, |
|
"learning_rate": 0.0002937679103079571, |
|
"loss": 1.4284, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.19249278152069296, |
|
"grad_norm": 0.908633828163147, |
|
"learning_rate": 0.00029224765885390143, |
|
"loss": 1.3772, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.20211742059672763, |
|
"grad_norm": 0.9546915292739868, |
|
"learning_rate": 0.0002905664873441643, |
|
"loss": 1.3789, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.21174205967276227, |
|
"grad_norm": 0.6474579572677612, |
|
"learning_rate": 0.00028872629763228145, |
|
"loss": 1.3446, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.22136669874879691, |
|
"grad_norm": 0.6460303068161011, |
|
"learning_rate": 0.0002867291714638035, |
|
"loss": 1.3102, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.23099133782483156, |
|
"grad_norm": 0.8272395730018616, |
|
"learning_rate": 0.0002845773681212862, |
|
"loss": 1.3315, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.24061597690086622, |
|
"grad_norm": 1.1858478784561157, |
|
"learning_rate": 0.00028227332186843884, |
|
"loss": 1.3232, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.2502406159769009, |
|
"grad_norm": 1.020283579826355, |
|
"learning_rate": 0.0002798196391963229, |
|
"loss": 1.298, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.2598652550529355, |
|
"grad_norm": 1.3490053415298462, |
|
"learning_rate": 0.0002772190958747147, |
|
"loss": 1.3026, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.2694898941289702, |
|
"grad_norm": 1.1797901391983032, |
|
"learning_rate": 0.00027447463381196973, |
|
"loss": 1.2889, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.2791145332050048, |
|
"grad_norm": 1.916670799255371, |
|
"learning_rate": 0.0002715893577269389, |
|
"loss": 1.2773, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.28873917228103946, |
|
"grad_norm": 0.5742417573928833, |
|
"learning_rate": 0.0002685665316367035, |
|
"loss": 1.2937, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2983638113570741, |
|
"grad_norm": 0.6991013884544373, |
|
"learning_rate": 0.0002654095751641007, |
|
"loss": 1.2667, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.30798845043310874, |
|
"grad_norm": 1.23212468624115, |
|
"learning_rate": 0.00026212205966921786, |
|
"loss": 1.2571, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.3176130895091434, |
|
"grad_norm": 0.8666950464248657, |
|
"learning_rate": 0.0002587077042092314, |
|
"loss": 1.2649, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.3272377285851781, |
|
"grad_norm": 0.945165753364563, |
|
"learning_rate": 0.00025517037133116085, |
|
"loss": 1.2262, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.3368623676612127, |
|
"grad_norm": 0.9452968835830688, |
|
"learning_rate": 0.0002515140627022976, |
|
"loss": 1.2332, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.34648700673724736, |
|
"grad_norm": 0.7168753743171692, |
|
"learning_rate": 0.00024774291458325127, |
|
"loss": 1.2303, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.35611164581328203, |
|
"grad_norm": 1.55524742603302, |
|
"learning_rate": 0.00024386119314873578, |
|
"loss": 1.2258, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.36573628488931664, |
|
"grad_norm": 0.549445629119873, |
|
"learning_rate": 0.00023987328966138704, |
|
"loss": 1.2098, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.3753609239653513, |
|
"grad_norm": 0.6080393195152283, |
|
"learning_rate": 0.00023578371550407354, |
|
"loss": 1.2385, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.3849855630413859, |
|
"grad_norm": 2.411741256713867, |
|
"learning_rate": 0.0002315970970763186, |
|
"loss": 1.2144, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.3946102021174206, |
|
"grad_norm": 1.1420116424560547, |
|
"learning_rate": 0.00022731817056060802, |
|
"loss": 1.1943, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.40423484119345526, |
|
"grad_norm": 1.1481698751449585, |
|
"learning_rate": 0.00022295177656450404, |
|
"loss": 1.2004, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.4138594802694899, |
|
"grad_norm": 3.898259401321411, |
|
"learning_rate": 0.00021850285464462677, |
|
"loss": 1.2195, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.42348411934552455, |
|
"grad_norm": 0.6957814693450928, |
|
"learning_rate": 0.0002139764377186976, |
|
"loss": 1.2159, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.4331087584215592, |
|
"grad_norm": 0.9523270726203918, |
|
"learning_rate": 0.00020937764637196638, |
|
"loss": 1.186, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.44273339749759383, |
|
"grad_norm": 1.081147313117981, |
|
"learning_rate": 0.00020471168306446336, |
|
"loss": 1.2156, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.4523580365736285, |
|
"grad_norm": 2.2060320377349854, |
|
"learning_rate": 0.0001999838262456287, |
|
"loss": 1.1998, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.4619826756496631, |
|
"grad_norm": 1.1269886493682861, |
|
"learning_rate": 0.0001951994243829781, |
|
"loss": 1.2017, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.4716073147256978, |
|
"grad_norm": 1.608312964439392, |
|
"learning_rate": 0.00019036388991155846, |
|
"loss": 1.1873, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.48123195380173245, |
|
"grad_norm": 1.0753167867660522, |
|
"learning_rate": 0.0001854826931110403, |
|
"loss": 1.1984, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.49085659287776706, |
|
"grad_norm": 1.3986575603485107, |
|
"learning_rate": 0.0001805613559173714, |
|
"loss": 1.181, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.5004812319538018, |
|
"grad_norm": 0.47483909130096436, |
|
"learning_rate": 0.0001756054456759944, |
|
"loss": 1.173, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.5101058710298364, |
|
"grad_norm": 0.8258511424064636, |
|
"learning_rate": 0.00017062056884369325, |
|
"loss": 1.1736, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.519730510105871, |
|
"grad_norm": 0.7987772822380066, |
|
"learning_rate": 0.0001656123646461951, |
|
"loss": 1.1616, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.5293551491819056, |
|
"grad_norm": 0.7245559692382812, |
|
"learning_rate": 0.00016058649869870098, |
|
"loss": 1.1846, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.5389797882579404, |
|
"grad_norm": 1.228034496307373, |
|
"learning_rate": 0.00015554865659656367, |
|
"loss": 1.165, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.548604427333975, |
|
"grad_norm": 0.8788666725158691, |
|
"learning_rate": 0.00015050453748336224, |
|
"loss": 1.1707, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.5582290664100096, |
|
"grad_norm": 0.7828085422515869, |
|
"learning_rate": 0.00014545984760365, |
|
"loss": 1.1609, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.5678537054860443, |
|
"grad_norm": 0.7824317216873169, |
|
"learning_rate": 0.00014042029384766938, |
|
"loss": 1.1656, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.5774783445620789, |
|
"grad_norm": 0.9950030446052551, |
|
"learning_rate": 0.00013539157729533678, |
|
"loss": 1.1626, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.5871029836381135, |
|
"grad_norm": 1.1628440618515015, |
|
"learning_rate": 0.00013037938676679957, |
|
"loss": 1.1633, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.5967276227141483, |
|
"grad_norm": 0.7875819802284241, |
|
"learning_rate": 0.00012538939238686286, |
|
"loss": 1.1721, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.6063522617901829, |
|
"grad_norm": 1.3848762512207031, |
|
"learning_rate": 0.0001204272391705654, |
|
"loss": 1.1667, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.6159769008662175, |
|
"grad_norm": 0.8932669162750244, |
|
"learning_rate": 0.00011549854063716169, |
|
"loss": 1.1761, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.6256015399422522, |
|
"grad_norm": 0.7711289525032043, |
|
"learning_rate": 0.00011060887245973355, |
|
"loss": 1.1694, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.6352261790182868, |
|
"grad_norm": 2.6940572261810303, |
|
"learning_rate": 0.00010576376615761647, |
|
"loss": 1.1535, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.6448508180943214, |
|
"grad_norm": 0.9238442778587341, |
|
"learning_rate": 0.00010096870283877523, |
|
"loss": 1.1737, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.6544754571703562, |
|
"grad_norm": 1.135506510734558, |
|
"learning_rate": 9.62291069992085e-05, |
|
"loss": 1.1513, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.6641000962463908, |
|
"grad_norm": 0.5180495381355286, |
|
"learning_rate": 9.155034038639637e-05, |
|
"loss": 1.1538, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.6737247353224254, |
|
"grad_norm": 0.5448501706123352, |
|
"learning_rate": 8.693769593373337e-05, |
|
"loss": 1.1342, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.6833493743984601, |
|
"grad_norm": 0.47162362933158875, |
|
"learning_rate": 8.239639177280888e-05, |
|
"loss": 1.1429, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.6929740134744947, |
|
"grad_norm": 0.599886417388916, |
|
"learning_rate": 7.793156533030761e-05, |
|
"loss": 1.1604, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.7025986525505293, |
|
"grad_norm": 0.5403456687927246, |
|
"learning_rate": 7.354826751620954e-05, |
|
"loss": 1.1378, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.7122232916265641, |
|
"grad_norm": 0.8314358592033386, |
|
"learning_rate": 6.925145700986301e-05, |
|
"loss": 1.1433, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.7218479307025987, |
|
"grad_norm": 1.6931346654891968, |
|
"learning_rate": 6.504599465039542e-05, |
|
"loss": 1.1502, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.7314725697786333, |
|
"grad_norm": 0.534213125705719, |
|
"learning_rate": 6.093663793780725e-05, |
|
"loss": 1.1438, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.7410972088546679, |
|
"grad_norm": 0.9398422837257385, |
|
"learning_rate": 5.692803565096988e-05, |
|
"loss": 1.1542, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.7507218479307026, |
|
"grad_norm": 1.0625075101852417, |
|
"learning_rate": 5.302472258861687e-05, |
|
"loss": 1.1346, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.7603464870067372, |
|
"grad_norm": 0.48856019973754883, |
|
"learning_rate": 4.923111443927615e-05, |
|
"loss": 1.1385, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.7699711260827719, |
|
"grad_norm": 0.4903761148452759, |
|
"learning_rate": 4.5551502785948405e-05, |
|
"loss": 1.1556, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.7795957651588066, |
|
"grad_norm": 0.6831708550453186, |
|
"learning_rate": 4.199005025118158e-05, |
|
"loss": 1.1529, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.7892204042348412, |
|
"grad_norm": 0.4595310688018799, |
|
"learning_rate": 3.855078578803424e-05, |
|
"loss": 1.1267, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.7988450433108758, |
|
"grad_norm": 0.652646005153656, |
|
"learning_rate": 3.5237600122254437e-05, |
|
"loss": 1.1412, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.8084696823869105, |
|
"grad_norm": 0.6624627113342285, |
|
"learning_rate": 3.2054241350831046e-05, |
|
"loss": 1.1323, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.8180943214629451, |
|
"grad_norm": 0.4390796720981598, |
|
"learning_rate": 2.9004310701895837e-05, |
|
"loss": 1.13, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.8277189605389798, |
|
"grad_norm": 0.9988071918487549, |
|
"learning_rate": 2.6091258460773862e-05, |
|
"loss": 1.1343, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.8373435996150145, |
|
"grad_norm": 0.481611430644989, |
|
"learning_rate": 2.3318380066789787e-05, |
|
"loss": 1.1217, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.8469682386910491, |
|
"grad_norm": 0.49289077520370483, |
|
"learning_rate": 2.0688812385247176e-05, |
|
"loss": 1.1299, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.8565928777670837, |
|
"grad_norm": 0.5225491523742676, |
|
"learning_rate": 1.8205530158796505e-05, |
|
"loss": 1.119, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.8662175168431184, |
|
"grad_norm": 0.5030360817909241, |
|
"learning_rate": 1.587134264220778e-05, |
|
"loss": 1.1305, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.875842155919153, |
|
"grad_norm": 0.4824385643005371, |
|
"learning_rate": 1.3688890424353726e-05, |
|
"loss": 1.1192, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.8854667949951877, |
|
"grad_norm": 0.5466002821922302, |
|
"learning_rate": 1.1660642440999196e-05, |
|
"loss": 1.1396, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.8950914340712224, |
|
"grad_norm": 0.5023711323738098, |
|
"learning_rate": 9.788893181776297e-06, |
|
"loss": 1.1369, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.904716073147257, |
|
"grad_norm": 0.4702525734901428, |
|
"learning_rate": 8.07576009450408e-06, |
|
"loss": 1.1296, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.9143407122232916, |
|
"grad_norm": 0.47570234537124634, |
|
"learning_rate": 6.5231811897903714e-06, |
|
"loss": 1.1365, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.9239653512993262, |
|
"grad_norm": 0.4453733265399933, |
|
"learning_rate": 5.13291284862452e-06, |
|
"loss": 1.1219, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.933589990375361, |
|
"grad_norm": 0.4282439053058624, |
|
"learning_rate": 3.906527835442064e-06, |
|
"loss": 1.1193, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.9432146294513956, |
|
"grad_norm": 0.4609365165233612, |
|
"learning_rate": 2.8454135189082684e-06, |
|
"loss": 1.145, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.9528392685274302, |
|
"grad_norm": 0.46295586228370667, |
|
"learning_rate": 1.950770302434157e-06, |
|
"loss": 1.1418, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.9624639076034649, |
|
"grad_norm": 0.4474254250526428, |
|
"learning_rate": 1.223610266200009e-06, |
|
"loss": 1.1285, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.9720885466794995, |
|
"grad_norm": 0.5148175358772278, |
|
"learning_rate": 6.647560222224957e-07, |
|
"loss": 1.1177, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.9817131857555341, |
|
"grad_norm": 0.4380834400653839, |
|
"learning_rate": 2.748397837611105e-07, |
|
"loss": 1.14, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.9913378248315688, |
|
"grad_norm": 0.5519540309906006, |
|
"learning_rate": 5.430265011625579e-08, |
|
"loss": 1.1207, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.9990375360923965, |
|
"eval_loss": 2.2841131687164307, |
|
"eval_runtime": 1.3512, |
|
"eval_samples_per_second": 4.441, |
|
"eval_steps_per_second": 0.74, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.9990375360923965, |
|
"step": 519, |
|
"total_flos": 7.912619964723364e+17, |
|
"train_loss": 2.7495121248425316, |
|
"train_runtime": 6850.0708, |
|
"train_samples_per_second": 2.426, |
|
"train_steps_per_second": 0.076 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 519, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 7.912619964723364e+17, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|