|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.10001577535888942, |
|
"eval_steps": 20000, |
|
"global_step": 3170, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0003155071777882947, |
|
"grad_norm": 1.1314178705215454, |
|
"learning_rate": 3.1545741324921135e-06, |
|
"loss": 1.3533, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0006310143555765894, |
|
"grad_norm": 0.8602690696716309, |
|
"learning_rate": 6.309148264984227e-06, |
|
"loss": 1.3656, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.000946521533364884, |
|
"grad_norm": 0.46021902561187744, |
|
"learning_rate": 9.46372239747634e-06, |
|
"loss": 1.3685, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0012620287111531788, |
|
"grad_norm": 0.6980212926864624, |
|
"learning_rate": 1.2618296529968454e-05, |
|
"loss": 1.2711, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0015775358889414735, |
|
"grad_norm": 0.674638569355011, |
|
"learning_rate": 1.577287066246057e-05, |
|
"loss": 1.1823, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.001893043066729768, |
|
"grad_norm": 0.9752547740936279, |
|
"learning_rate": 1.892744479495268e-05, |
|
"loss": 1.3238, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0022085502445180626, |
|
"grad_norm": 0.3866540789604187, |
|
"learning_rate": 2.2082018927444796e-05, |
|
"loss": 1.2858, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.0025240574223063575, |
|
"grad_norm": 0.7574986815452576, |
|
"learning_rate": 2.5236593059936908e-05, |
|
"loss": 1.2527, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.002839564600094652, |
|
"grad_norm": 0.380484402179718, |
|
"learning_rate": 2.8391167192429026e-05, |
|
"loss": 1.2617, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.003155071777882947, |
|
"grad_norm": 0.4931378960609436, |
|
"learning_rate": 3.154574132492114e-05, |
|
"loss": 1.145, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.0034705789556712416, |
|
"grad_norm": 0.6048789620399475, |
|
"learning_rate": 3.470031545741325e-05, |
|
"loss": 1.1987, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.003786086133459536, |
|
"grad_norm": 0.29029208421707153, |
|
"learning_rate": 3.785488958990536e-05, |
|
"loss": 1.2743, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.004101593311247831, |
|
"grad_norm": 0.40250363945961, |
|
"learning_rate": 4.1009463722397477e-05, |
|
"loss": 1.1172, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.004417100489036125, |
|
"grad_norm": 0.4228983223438263, |
|
"learning_rate": 4.416403785488959e-05, |
|
"loss": 1.2585, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.00473260766682442, |
|
"grad_norm": 0.7012104392051697, |
|
"learning_rate": 4.731861198738171e-05, |
|
"loss": 1.2598, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.005048114844612715, |
|
"grad_norm": 0.4556664526462555, |
|
"learning_rate": 5.0473186119873815e-05, |
|
"loss": 1.2826, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.00536362202240101, |
|
"grad_norm": 0.4234769642353058, |
|
"learning_rate": 5.362776025236593e-05, |
|
"loss": 1.2544, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.005679129200189304, |
|
"grad_norm": 0.3880709409713745, |
|
"learning_rate": 5.678233438485805e-05, |
|
"loss": 1.1758, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.005994636377977599, |
|
"grad_norm": 0.5538777709007263, |
|
"learning_rate": 5.993690851735017e-05, |
|
"loss": 1.3851, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.006310143555765894, |
|
"grad_norm": 0.3604615032672882, |
|
"learning_rate": 6.309148264984228e-05, |
|
"loss": 1.0848, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.006625650733554188, |
|
"grad_norm": 0.49042531847953796, |
|
"learning_rate": 6.624605678233438e-05, |
|
"loss": 1.3279, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.006941157911342483, |
|
"grad_norm": 0.4594549238681793, |
|
"learning_rate": 6.94006309148265e-05, |
|
"loss": 1.0995, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.007256665089130778, |
|
"grad_norm": 0.5176606178283691, |
|
"learning_rate": 7.255520504731861e-05, |
|
"loss": 1.1639, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.007572172266919072, |
|
"grad_norm": 0.499743789434433, |
|
"learning_rate": 7.570977917981072e-05, |
|
"loss": 1.255, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.007887679444707366, |
|
"grad_norm": 0.5964751243591309, |
|
"learning_rate": 7.886435331230284e-05, |
|
"loss": 1.1427, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.008203186622495662, |
|
"grad_norm": 0.4749509394168854, |
|
"learning_rate": 8.201892744479495e-05, |
|
"loss": 1.3347, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.008518693800283956, |
|
"grad_norm": 0.4441193640232086, |
|
"learning_rate": 8.517350157728708e-05, |
|
"loss": 1.1372, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.00883420097807225, |
|
"grad_norm": 0.576790452003479, |
|
"learning_rate": 8.832807570977918e-05, |
|
"loss": 1.2528, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.009149708155860546, |
|
"grad_norm": 0.4120502471923828, |
|
"learning_rate": 9.148264984227129e-05, |
|
"loss": 1.2139, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.00946521533364884, |
|
"grad_norm": 0.3923511803150177, |
|
"learning_rate": 9.463722397476341e-05, |
|
"loss": 1.3012, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.009780722511437134, |
|
"grad_norm": 0.5546993613243103, |
|
"learning_rate": 9.779179810725552e-05, |
|
"loss": 1.1328, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.01009622968922543, |
|
"grad_norm": 0.4276692271232605, |
|
"learning_rate": 9.999972717866783e-05, |
|
"loss": 1.1742, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.010411736867013724, |
|
"grad_norm": 0.5325655937194824, |
|
"learning_rate": 9.99948771044751e-05, |
|
"loss": 1.1614, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.01072724404480202, |
|
"grad_norm": 0.43508264422416687, |
|
"learning_rate": 9.998396501092419e-05, |
|
"loss": 1.1259, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.011042751222590314, |
|
"grad_norm": 0.5584490299224854, |
|
"learning_rate": 9.996699222113709e-05, |
|
"loss": 1.1327, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.011358258400378608, |
|
"grad_norm": 0.8362613916397095, |
|
"learning_rate": 9.994396079311228e-05, |
|
"loss": 1.2165, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.011673765578166904, |
|
"grad_norm": 0.5259298086166382, |
|
"learning_rate": 9.991487351947513e-05, |
|
"loss": 1.1041, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.011989272755955198, |
|
"grad_norm": 0.3852245807647705, |
|
"learning_rate": 9.987973392713932e-05, |
|
"loss": 1.3298, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.012304779933743492, |
|
"grad_norm": 0.4914740025997162, |
|
"learning_rate": 9.983854627687918e-05, |
|
"loss": 1.236, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.012620287111531788, |
|
"grad_norm": 0.49665552377700806, |
|
"learning_rate": 9.979131556281304e-05, |
|
"loss": 1.1544, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.012935794289320082, |
|
"grad_norm": 0.4329008162021637, |
|
"learning_rate": 9.973804751179779e-05, |
|
"loss": 1.2401, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.013251301467108376, |
|
"grad_norm": 0.30973491072654724, |
|
"learning_rate": 9.967874858273423e-05, |
|
"loss": 1.2296, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.013566808644896672, |
|
"grad_norm": 0.38413724303245544, |
|
"learning_rate": 9.961342596578422e-05, |
|
"loss": 1.0889, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.013882315822684966, |
|
"grad_norm": 0.38513725996017456, |
|
"learning_rate": 9.954208758149867e-05, |
|
"loss": 1.2241, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.01419782300047326, |
|
"grad_norm": 0.4003647565841675, |
|
"learning_rate": 9.946474207985713e-05, |
|
"loss": 1.2967, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.014513330178261556, |
|
"grad_norm": 0.7440866827964783, |
|
"learning_rate": 9.938139883921906e-05, |
|
"loss": 1.2981, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.01482883735604985, |
|
"grad_norm": 0.5401014685630798, |
|
"learning_rate": 9.929206796518662e-05, |
|
"loss": 1.2427, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.015144344533838144, |
|
"grad_norm": 0.4463556408882141, |
|
"learning_rate": 9.919676028937936e-05, |
|
"loss": 1.1139, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.01545985171162644, |
|
"grad_norm": 0.4856787621974945, |
|
"learning_rate": 9.909548736812085e-05, |
|
"loss": 1.2958, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.015775358889414733, |
|
"grad_norm": 0.49902960658073425, |
|
"learning_rate": 9.898826148103737e-05, |
|
"loss": 1.1714, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.01609086606720303, |
|
"grad_norm": 0.3542010486125946, |
|
"learning_rate": 9.887509562956911e-05, |
|
"loss": 1.1652, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.016406373244991324, |
|
"grad_norm": 0.7918481826782227, |
|
"learning_rate": 9.875600353539363e-05, |
|
"loss": 1.0727, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.01672188042277962, |
|
"grad_norm": 0.6717935800552368, |
|
"learning_rate": 9.863099963876201e-05, |
|
"loss": 1.2942, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.017037387600567912, |
|
"grad_norm": 0.307864785194397, |
|
"learning_rate": 9.850009909674807e-05, |
|
"loss": 1.157, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.017352894778356207, |
|
"grad_norm": 0.6204475164413452, |
|
"learning_rate": 9.83633177814104e-05, |
|
"loss": 1.2506, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.0176684019561445, |
|
"grad_norm": 0.44475409388542175, |
|
"learning_rate": 9.822067227786793e-05, |
|
"loss": 1.1519, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.017983909133932798, |
|
"grad_norm": 0.5524726510047913, |
|
"learning_rate": 9.807217988228884e-05, |
|
"loss": 1.0709, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.018299416311721092, |
|
"grad_norm": 0.44522351026535034, |
|
"learning_rate": 9.791785859979344e-05, |
|
"loss": 1.1671, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.018614923489509386, |
|
"grad_norm": 0.591107964515686, |
|
"learning_rate": 9.775772714227096e-05, |
|
"loss": 1.1114, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.01893043066729768, |
|
"grad_norm": 0.5873256325721741, |
|
"learning_rate": 9.759180492611065e-05, |
|
"loss": 1.2766, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.019245937845085975, |
|
"grad_norm": 0.559363842010498, |
|
"learning_rate": 9.742011206984756e-05, |
|
"loss": 1.1934, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.01956144502287427, |
|
"grad_norm": 0.6068762540817261, |
|
"learning_rate": 9.724266939172302e-05, |
|
"loss": 1.0602, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.019876952200662566, |
|
"grad_norm": 0.5704780220985413, |
|
"learning_rate": 9.705949840716047e-05, |
|
"loss": 1.3037, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.02019245937845086, |
|
"grad_norm": 0.4779965579509735, |
|
"learning_rate": 9.687062132615658e-05, |
|
"loss": 1.1023, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.020507966556239154, |
|
"grad_norm": 0.46647870540618896, |
|
"learning_rate": 9.667606105058828e-05, |
|
"loss": 1.1474, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.02082347373402745, |
|
"grad_norm": 0.4536419212818146, |
|
"learning_rate": 9.647584117143576e-05, |
|
"loss": 1.2257, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.021138980911815743, |
|
"grad_norm": 0.4657559096813202, |
|
"learning_rate": 9.626998596592209e-05, |
|
"loss": 1.0884, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.02145448808960404, |
|
"grad_norm": 0.489685595035553, |
|
"learning_rate": 9.605852039456947e-05, |
|
"loss": 1.1375, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.021769995267392334, |
|
"grad_norm": 0.9330468773841858, |
|
"learning_rate": 9.584147009817277e-05, |
|
"loss": 1.1861, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.02208550244518063, |
|
"grad_norm": 0.610003650188446, |
|
"learning_rate": 9.561886139469045e-05, |
|
"loss": 1.2251, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.022401009622968922, |
|
"grad_norm": 0.687454104423523, |
|
"learning_rate": 9.539072127605342e-05, |
|
"loss": 1.2597, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.022716516800757217, |
|
"grad_norm": 0.39726585149765015, |
|
"learning_rate": 9.515707740489224e-05, |
|
"loss": 1.2133, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.02303202397854551, |
|
"grad_norm": 0.4841748774051666, |
|
"learning_rate": 9.491795811118296e-05, |
|
"loss": 1.175, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.023347531156333808, |
|
"grad_norm": 0.6921291947364807, |
|
"learning_rate": 9.467339238881199e-05, |
|
"loss": 1.0637, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.023663038334122102, |
|
"grad_norm": 0.45699384808540344, |
|
"learning_rate": 9.442340989206047e-05, |
|
"loss": 1.0974, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.023978545511910396, |
|
"grad_norm": 0.5954831838607788, |
|
"learning_rate": 9.416804093200874e-05, |
|
"loss": 1.2166, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.02429405268969869, |
|
"grad_norm": 0.6534472703933716, |
|
"learning_rate": 9.390731647286089e-05, |
|
"loss": 1.25, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.024609559867486985, |
|
"grad_norm": 0.4319377839565277, |
|
"learning_rate": 9.364126812819038e-05, |
|
"loss": 1.0963, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.02492506704527528, |
|
"grad_norm": 0.3469117283821106, |
|
"learning_rate": 9.336992815710668e-05, |
|
"loss": 1.1501, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.025240574223063576, |
|
"grad_norm": 0.5852107405662537, |
|
"learning_rate": 9.309332946034392e-05, |
|
"loss": 1.1365, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.02555608140085187, |
|
"grad_norm": 0.364041268825531, |
|
"learning_rate": 9.281150557627143e-05, |
|
"loss": 1.1154, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.025871588578640164, |
|
"grad_norm": 0.3952679932117462, |
|
"learning_rate": 9.252449067682721e-05, |
|
"loss": 1.1984, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.02618709575642846, |
|
"grad_norm": 0.3946315050125122, |
|
"learning_rate": 9.22323195633745e-05, |
|
"loss": 1.1346, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.026502602934216753, |
|
"grad_norm": 0.38489851355552673, |
|
"learning_rate": 9.19350276624819e-05, |
|
"loss": 1.1597, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.026818110112005047, |
|
"grad_norm": 0.37699270248413086, |
|
"learning_rate": 9.163265102162794e-05, |
|
"loss": 1.1, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.027133617289793344, |
|
"grad_norm": 0.6026850342750549, |
|
"learning_rate": 9.132522630483017e-05, |
|
"loss": 1.0557, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.02744912446758164, |
|
"grad_norm": 0.7021874785423279, |
|
"learning_rate": 9.101279078819949e-05, |
|
"loss": 1.146, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.027764631645369933, |
|
"grad_norm": 0.4263465404510498, |
|
"learning_rate": 9.069538235542037e-05, |
|
"loss": 1.1513, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.028080138823158227, |
|
"grad_norm": 0.41765096783638, |
|
"learning_rate": 9.037303949315736e-05, |
|
"loss": 1.1684, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.02839564600094652, |
|
"grad_norm": 0.44986680150032043, |
|
"learning_rate": 9.004580128638846e-05, |
|
"loss": 1.1054, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.028711153178734815, |
|
"grad_norm": 0.39543870091438293, |
|
"learning_rate": 8.971370741366592e-05, |
|
"loss": 1.2186, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.029026660356523112, |
|
"grad_norm": 0.37330368161201477, |
|
"learning_rate": 8.937679814230517e-05, |
|
"loss": 1.2054, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.029342167534311406, |
|
"grad_norm": 0.8773049712181091, |
|
"learning_rate": 8.903511432350221e-05, |
|
"loss": 1.1217, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.0296576747120997, |
|
"grad_norm": 1.5796482563018799, |
|
"learning_rate": 8.868869738738038e-05, |
|
"loss": 1.0987, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.029973181889887995, |
|
"grad_norm": 0.36434420943260193, |
|
"learning_rate": 8.833758933796678e-05, |
|
"loss": 1.1457, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.03028868906767629, |
|
"grad_norm": 0.5418674945831299, |
|
"learning_rate": 8.798183274809917e-05, |
|
"loss": 1.2404, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.030604196245464583, |
|
"grad_norm": 0.4581955075263977, |
|
"learning_rate": 8.762147075426392e-05, |
|
"loss": 1.0919, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.03091970342325288, |
|
"grad_norm": 0.5733634829521179, |
|
"learning_rate": 8.725654705136558e-05, |
|
"loss": 1.1757, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.031235210601041175, |
|
"grad_norm": 0.4178631603717804, |
|
"learning_rate": 8.688710588742872e-05, |
|
"loss": 1.25, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.031550717778829465, |
|
"grad_norm": 0.4076519012451172, |
|
"learning_rate": 8.651319205823278e-05, |
|
"loss": 1.1356, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.03186622495661776, |
|
"grad_norm": 0.7019752264022827, |
|
"learning_rate": 8.613485090188042e-05, |
|
"loss": 1.2086, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.03218173213440606, |
|
"grad_norm": 0.3069887161254883, |
|
"learning_rate": 8.575212829330019e-05, |
|
"loss": 1.1486, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.03249723931219435, |
|
"grad_norm": 0.5908852815628052, |
|
"learning_rate": 8.536507063868397e-05, |
|
"loss": 1.1095, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.03281274648998265, |
|
"grad_norm": 0.6867959499359131, |
|
"learning_rate": 8.497372486986024e-05, |
|
"loss": 1.1468, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.03312825366777094, |
|
"grad_norm": 0.5001075863838196, |
|
"learning_rate": 8.45781384386033e-05, |
|
"loss": 1.0803, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.03344376084555924, |
|
"grad_norm": 0.46148380637168884, |
|
"learning_rate": 8.417835931087975e-05, |
|
"loss": 1.1178, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.033759268023347534, |
|
"grad_norm": 0.5462920665740967, |
|
"learning_rate": 8.377443596103239e-05, |
|
"loss": 1.2744, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.034074775201135825, |
|
"grad_norm": 0.4107511043548584, |
|
"learning_rate": 8.336641736590258e-05, |
|
"loss": 1.0937, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.03439028237892412, |
|
"grad_norm": 0.3727000057697296, |
|
"learning_rate": 8.295435299889173e-05, |
|
"loss": 1.2042, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.03470578955671241, |
|
"grad_norm": 0.48867249488830566, |
|
"learning_rate": 8.253829282396245e-05, |
|
"loss": 1.161, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.03502129673450071, |
|
"grad_norm": 0.5484758019447327, |
|
"learning_rate": 8.211828728958027e-05, |
|
"loss": 1.1609, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.035336803912289, |
|
"grad_norm": 0.6911703944206238, |
|
"learning_rate": 8.16943873225966e-05, |
|
"loss": 1.0674, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.0356523110900773, |
|
"grad_norm": 0.563105583190918, |
|
"learning_rate": 8.126664432207385e-05, |
|
"loss": 1.2002, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.035967818267865596, |
|
"grad_norm": 0.6931987404823303, |
|
"learning_rate": 8.083511015305299e-05, |
|
"loss": 1.1209, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.03628332544565389, |
|
"grad_norm": 0.7918395400047302, |
|
"learning_rate": 8.039983714026478e-05, |
|
"loss": 1.1997, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.036598832623442185, |
|
"grad_norm": 0.4152407944202423, |
|
"learning_rate": 7.99608780617854e-05, |
|
"loss": 1.0481, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.036914339801230475, |
|
"grad_norm": 0.3731307089328766, |
|
"learning_rate": 7.951828614263679e-05, |
|
"loss": 1.1326, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.03722984697901877, |
|
"grad_norm": 0.5490486025810242, |
|
"learning_rate": 7.907211504833301e-05, |
|
"loss": 1.2251, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.03754535415680707, |
|
"grad_norm": 0.699494481086731, |
|
"learning_rate": 7.862241887837322e-05, |
|
"loss": 1.1822, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.03786086133459536, |
|
"grad_norm": 0.48040771484375, |
|
"learning_rate": 7.816925215968189e-05, |
|
"loss": 1.084, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.03817636851238366, |
|
"grad_norm": 0.5420158505439758, |
|
"learning_rate": 7.771266983999725e-05, |
|
"loss": 1.2134, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.03849187569017195, |
|
"grad_norm": 0.6118718385696411, |
|
"learning_rate": 7.72527272812088e-05, |
|
"loss": 1.184, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.03880738286796025, |
|
"grad_norm": 0.5210500955581665, |
|
"learning_rate": 7.678948025264443e-05, |
|
"loss": 1.1628, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.03912289004574854, |
|
"grad_norm": 0.4329388439655304, |
|
"learning_rate": 7.632298492430831e-05, |
|
"loss": 1.0391, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.039438397223536835, |
|
"grad_norm": 0.5016342997550964, |
|
"learning_rate": 7.585329786007006e-05, |
|
"loss": 1.2102, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.03975390440132513, |
|
"grad_norm": 0.4969483017921448, |
|
"learning_rate": 7.538047601080629e-05, |
|
"loss": 1.0538, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.04006941157911342, |
|
"grad_norm": 0.6484401226043701, |
|
"learning_rate": 7.490457670749503e-05, |
|
"loss": 1.1832, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.04038491875690172, |
|
"grad_norm": 0.6064450740814209, |
|
"learning_rate": 7.442565765426436e-05, |
|
"loss": 1.1387, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.04070042593469001, |
|
"grad_norm": 0.4169582724571228, |
|
"learning_rate": 7.394377692139543e-05, |
|
"loss": 1.058, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.04101593311247831, |
|
"grad_norm": 0.609321117401123, |
|
"learning_rate": 7.345899293828144e-05, |
|
"loss": 1.2175, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.041331440290266606, |
|
"grad_norm": 0.4401857554912567, |
|
"learning_rate": 7.297136448634282e-05, |
|
"loss": 1.1076, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.0416469474680549, |
|
"grad_norm": 0.6151861548423767, |
|
"learning_rate": 7.248095069189981e-05, |
|
"loss": 1.1631, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.041962454645843195, |
|
"grad_norm": 0.6283815503120422, |
|
"learning_rate": 7.19878110190033e-05, |
|
"loss": 1.1071, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.042277961823631485, |
|
"grad_norm": 0.38593342900276184, |
|
"learning_rate": 7.149200526222459e-05, |
|
"loss": 1.1593, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.04259346900141978, |
|
"grad_norm": 0.3702864348888397, |
|
"learning_rate": 7.099359353940509e-05, |
|
"loss": 1.1549, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.04290897617920808, |
|
"grad_norm": 0.7573534250259399, |
|
"learning_rate": 7.049263628436702e-05, |
|
"loss": 1.139, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.04322448335699637, |
|
"grad_norm": 0.4059164524078369, |
|
"learning_rate": 6.998919423958547e-05, |
|
"loss": 1.2294, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.04353999053478467, |
|
"grad_norm": 0.5392842888832092, |
|
"learning_rate": 6.948332844882326e-05, |
|
"loss": 1.1949, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.04385549771257296, |
|
"grad_norm": 0.40931907296180725, |
|
"learning_rate": 6.897510024972925e-05, |
|
"loss": 1.1734, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.04417100489036126, |
|
"grad_norm": 0.4209199845790863, |
|
"learning_rate": 6.846457126640096e-05, |
|
"loss": 1.0708, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.04448651206814955, |
|
"grad_norm": 0.600908637046814, |
|
"learning_rate": 6.795180340191243e-05, |
|
"loss": 1.0657, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.044802019245937845, |
|
"grad_norm": 0.31521522998809814, |
|
"learning_rate": 6.743685883080835e-05, |
|
"loss": 1.0177, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.04511752642372614, |
|
"grad_norm": 0.4605425000190735, |
|
"learning_rate": 6.691979999156521e-05, |
|
"loss": 1.1436, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.04543303360151443, |
|
"grad_norm": 0.43967700004577637, |
|
"learning_rate": 6.640068957902043e-05, |
|
"loss": 1.2336, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.04574854077930273, |
|
"grad_norm": 0.4047873616218567, |
|
"learning_rate": 6.587959053677051e-05, |
|
"loss": 1.2193, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.04606404795709102, |
|
"grad_norm": 0.6791156530380249, |
|
"learning_rate": 6.535656604953884e-05, |
|
"loss": 1.1857, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.04637955513487932, |
|
"grad_norm": 0.43025296926498413, |
|
"learning_rate": 6.483167953551442e-05, |
|
"loss": 1.2128, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.046695062312667616, |
|
"grad_norm": 0.9802103638648987, |
|
"learning_rate": 6.430499463866231e-05, |
|
"loss": 1.1581, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.04701056949045591, |
|
"grad_norm": 0.5605840086936951, |
|
"learning_rate": 6.377657522100644e-05, |
|
"loss": 1.1955, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.047326076668244205, |
|
"grad_norm": 0.7271468043327332, |
|
"learning_rate": 6.324648535488631e-05, |
|
"loss": 1.1685, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.047641583846032495, |
|
"grad_norm": 0.46086645126342773, |
|
"learning_rate": 6.271478931518802e-05, |
|
"loss": 1.1762, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.04795709102382079, |
|
"grad_norm": 0.41231945157051086, |
|
"learning_rate": 6.21815515715507e-05, |
|
"loss": 1.1528, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.048272598201609083, |
|
"grad_norm": 0.4973437190055847, |
|
"learning_rate": 6.164683678054938e-05, |
|
"loss": 1.141, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.04858810537939738, |
|
"grad_norm": 0.5368614792823792, |
|
"learning_rate": 6.11107097778553e-05, |
|
"loss": 1.112, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.04890361255718568, |
|
"grad_norm": 0.4287970960140228, |
|
"learning_rate": 6.057323557037431e-05, |
|
"loss": 1.1357, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.04921911973497397, |
|
"grad_norm": 0.8291701078414917, |
|
"learning_rate": 6.00344793283646e-05, |
|
"loss": 1.0729, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.04953462691276227, |
|
"grad_norm": 0.4257969856262207, |
|
"learning_rate": 5.9494506377534695e-05, |
|
"loss": 1.1132, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.04985013409055056, |
|
"grad_norm": 0.46945375204086304, |
|
"learning_rate": 5.895338219112246e-05, |
|
"loss": 1.155, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.050165641268338855, |
|
"grad_norm": 0.450214684009552, |
|
"learning_rate": 5.841117238195631e-05, |
|
"loss": 1.2993, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.05048114844612715, |
|
"grad_norm": 0.41964858770370483, |
|
"learning_rate": 5.786794269449947e-05, |
|
"loss": 1.1301, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.05079665562391544, |
|
"grad_norm": 0.4688185453414917, |
|
"learning_rate": 5.732375899687827e-05, |
|
"loss": 1.1262, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.05111216280170374, |
|
"grad_norm": 0.40953758358955383, |
|
"learning_rate": 5.677868727289548e-05, |
|
"loss": 1.1443, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.05142766997949203, |
|
"grad_norm": 0.7492470741271973, |
|
"learning_rate": 5.623279361402952e-05, |
|
"loss": 1.0674, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.05174317715728033, |
|
"grad_norm": 0.5046231746673584, |
|
"learning_rate": 5.5686144211420775e-05, |
|
"loss": 1.1484, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.05205868433506862, |
|
"grad_norm": 0.5525899529457092, |
|
"learning_rate": 5.513880534784562e-05, |
|
"loss": 1.1305, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.05237419151285692, |
|
"grad_norm": 0.44825589656829834, |
|
"learning_rate": 5.459084338967958e-05, |
|
"loss": 1.174, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.052689698690645215, |
|
"grad_norm": 0.8712518215179443, |
|
"learning_rate": 5.40423247788501e-05, |
|
"loss": 1.0862, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.053005205868433505, |
|
"grad_norm": 0.46037644147872925, |
|
"learning_rate": 5.349331602478032e-05, |
|
"loss": 1.2612, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.0533207130462218, |
|
"grad_norm": 0.3993786871433258, |
|
"learning_rate": 5.294388369632466e-05, |
|
"loss": 1.0599, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.053636220224010094, |
|
"grad_norm": 0.4509952664375305, |
|
"learning_rate": 5.2394094413697136e-05, |
|
"loss": 1.1323, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.05395172740179839, |
|
"grad_norm": 0.3986489474773407, |
|
"learning_rate": 5.1844014840393393e-05, |
|
"loss": 1.144, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.05426723457958669, |
|
"grad_norm": 0.48152726888656616, |
|
"learning_rate": 5.1293711675107705e-05, |
|
"loss": 1.1085, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.05458274175737498, |
|
"grad_norm": 0.4110780358314514, |
|
"learning_rate": 5.074325164364548e-05, |
|
"loss": 1.1274, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.05489824893516328, |
|
"grad_norm": 0.6647006273269653, |
|
"learning_rate": 5.019270149083256e-05, |
|
"loss": 1.1762, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.05521375611295157, |
|
"grad_norm": 0.4578000605106354, |
|
"learning_rate": 4.9642127972422295e-05, |
|
"loss": 1.1406, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.055529263290739865, |
|
"grad_norm": 0.6286119222640991, |
|
"learning_rate": 4.9091597847001175e-05, |
|
"loss": 0.958, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.055844770468528156, |
|
"grad_norm": 0.4872972369194031, |
|
"learning_rate": 4.854117786789413e-05, |
|
"loss": 1.1913, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.05616027764631645, |
|
"grad_norm": 0.4607793390750885, |
|
"learning_rate": 4.799093477507058e-05, |
|
"loss": 1.2621, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.05647578482410475, |
|
"grad_norm": 0.5443147420883179, |
|
"learning_rate": 4.7440935287051996e-05, |
|
"loss": 1.1725, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.05679129200189304, |
|
"grad_norm": 0.7406758069992065, |
|
"learning_rate": 4.689124609282202e-05, |
|
"loss": 1.1468, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.05710679917968134, |
|
"grad_norm": 0.6841813921928406, |
|
"learning_rate": 4.634193384374038e-05, |
|
"loss": 1.1332, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.05742230635746963, |
|
"grad_norm": 0.41650551557540894, |
|
"learning_rate": 4.579306514546107e-05, |
|
"loss": 1.1002, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.05773781353525793, |
|
"grad_norm": 0.5348483324050903, |
|
"learning_rate": 4.524470654985637e-05, |
|
"loss": 1.1698, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.058053320713046225, |
|
"grad_norm": 0.4644758403301239, |
|
"learning_rate": 4.4696924546947105e-05, |
|
"loss": 1.1359, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.058368827890834515, |
|
"grad_norm": 0.49113455414772034, |
|
"learning_rate": 4.414978555684069e-05, |
|
"loss": 1.1629, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.05868433506862281, |
|
"grad_norm": 0.5527147650718689, |
|
"learning_rate": 4.360335592167737e-05, |
|
"loss": 1.3023, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.058999842246411104, |
|
"grad_norm": 0.6101669073104858, |
|
"learning_rate": 4.305770189758613e-05, |
|
"loss": 1.0331, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.0593153494241994, |
|
"grad_norm": 0.8309857845306396, |
|
"learning_rate": 4.251288964665098e-05, |
|
"loss": 1.24, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.05963085660198769, |
|
"grad_norm": 0.5306587815284729, |
|
"learning_rate": 4.196898522888848e-05, |
|
"loss": 1.1996, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.05994636377977599, |
|
"grad_norm": 0.5127861499786377, |
|
"learning_rate": 4.142605459423795e-05, |
|
"loss": 1.1979, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.06026187095756429, |
|
"grad_norm": 0.4082748591899872, |
|
"learning_rate": 4.088416357456471e-05, |
|
"loss": 1.1738, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.06057737813535258, |
|
"grad_norm": 0.41917574405670166, |
|
"learning_rate": 4.0343377875677854e-05, |
|
"loss": 1.2224, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.060892885313140875, |
|
"grad_norm": 0.4594613015651703, |
|
"learning_rate": 3.9803763069363256e-05, |
|
"loss": 1.2116, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.061208392490929166, |
|
"grad_norm": 0.5822821259498596, |
|
"learning_rate": 3.926538458543275e-05, |
|
"loss": 1.0644, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.06152389966871746, |
|
"grad_norm": 0.5038664937019348, |
|
"learning_rate": 3.8728307703790615e-05, |
|
"loss": 1.0946, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.06183940684650576, |
|
"grad_norm": 0.4776921272277832, |
|
"learning_rate": 3.819259754651819e-05, |
|
"loss": 1.0555, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.06215491402429405, |
|
"grad_norm": 0.37600579857826233, |
|
"learning_rate": 3.765831906997765e-05, |
|
"loss": 1.1422, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.06247042120208235, |
|
"grad_norm": 0.4422357380390167, |
|
"learning_rate": 3.7125537056935823e-05, |
|
"loss": 1.1688, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.06278592837987064, |
|
"grad_norm": 0.4450276792049408, |
|
"learning_rate": 3.659431610870918e-05, |
|
"loss": 1.2122, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.06310143555765893, |
|
"grad_norm": 0.48132121562957764, |
|
"learning_rate": 3.606472063733067e-05, |
|
"loss": 1.1017, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.06341694273544723, |
|
"grad_norm": 0.5494374632835388, |
|
"learning_rate": 3.553681485773962e-05, |
|
"loss": 1.1585, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.06373244991323553, |
|
"grad_norm": 0.7255235314369202, |
|
"learning_rate": 3.5010662779995476e-05, |
|
"loss": 1.1971, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.06404795709102382, |
|
"grad_norm": 0.44452548027038574, |
|
"learning_rate": 3.4486328201516374e-05, |
|
"loss": 1.1138, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.06436346426881212, |
|
"grad_norm": 0.45132672786712646, |
|
"learning_rate": 3.396387469934362e-05, |
|
"loss": 1.1491, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.06467897144660041, |
|
"grad_norm": 0.5831072330474854, |
|
"learning_rate": 3.3443365622432664e-05, |
|
"loss": 1.2272, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.0649944786243887, |
|
"grad_norm": 0.45740213990211487, |
|
"learning_rate": 3.2924864083971975e-05, |
|
"loss": 1.0365, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.065309985802177, |
|
"grad_norm": 0.4249660074710846, |
|
"learning_rate": 3.240843295373031e-05, |
|
"loss": 1.0816, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.0656254929799653, |
|
"grad_norm": 0.43736574053764343, |
|
"learning_rate": 3.1894134850433625e-05, |
|
"loss": 1.0117, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.06594100015775359, |
|
"grad_norm": 0.41917362809181213, |
|
"learning_rate": 3.13820321341724e-05, |
|
"loss": 1.0575, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.06625650733554188, |
|
"grad_norm": 0.6465585231781006, |
|
"learning_rate": 3.0872186898840193e-05, |
|
"loss": 1.0389, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.06657201451333018, |
|
"grad_norm": 0.5854601860046387, |
|
"learning_rate": 3.036466096460472e-05, |
|
"loss": 1.0875, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.06688752169111847, |
|
"grad_norm": 0.7354947924613953, |
|
"learning_rate": 2.9859515870411902e-05, |
|
"loss": 1.1608, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.06720302886890676, |
|
"grad_norm": 0.520214855670929, |
|
"learning_rate": 2.9356812866524076e-05, |
|
"loss": 1.1178, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.06751853604669507, |
|
"grad_norm": 0.5355998277664185, |
|
"learning_rate": 2.88566129070933e-05, |
|
"loss": 1.1249, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.06783404322448336, |
|
"grad_norm": 0.49371784925460815, |
|
"learning_rate": 2.8358976642770486e-05, |
|
"loss": 1.1029, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.06814955040227165, |
|
"grad_norm": 0.40560880303382874, |
|
"learning_rate": 2.7863964413351253e-05, |
|
"loss": 1.1094, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.06846505758005994, |
|
"grad_norm": 0.4687036871910095, |
|
"learning_rate": 2.737163624045962e-05, |
|
"loss": 1.0718, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.06878056475784824, |
|
"grad_norm": 0.4389897584915161, |
|
"learning_rate": 2.688205182027026e-05, |
|
"loss": 1.0652, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.06909607193563654, |
|
"grad_norm": 0.46042123436927795, |
|
"learning_rate": 2.6395270516270077e-05, |
|
"loss": 1.0748, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.06941157911342483, |
|
"grad_norm": 0.5048350691795349, |
|
"learning_rate": 2.591135135206026e-05, |
|
"loss": 1.1513, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.06972708629121313, |
|
"grad_norm": 0.6022383570671082, |
|
"learning_rate": 2.543035300419951e-05, |
|
"loss": 1.0872, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.07004259346900142, |
|
"grad_norm": 0.9275935292243958, |
|
"learning_rate": 2.4952333795089338e-05, |
|
"loss": 1.2373, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.07035810064678971, |
|
"grad_norm": 0.4814078211784363, |
|
"learning_rate": 2.4477351685902293e-05, |
|
"loss": 1.2757, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.070673607824578, |
|
"grad_norm": 0.48762601613998413, |
|
"learning_rate": 2.4005464269554077e-05, |
|
"loss": 1.0695, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.07098911500236631, |
|
"grad_norm": 0.7237532138824463, |
|
"learning_rate": 2.3536728763720133e-05, |
|
"loss": 1.1084, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.0713046221801546, |
|
"grad_norm": 0.35012200474739075, |
|
"learning_rate": 2.3071202003897902e-05, |
|
"loss": 1.086, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.07162012935794289, |
|
"grad_norm": 0.5497077107429504, |
|
"learning_rate": 2.260894043651537e-05, |
|
"loss": 1.0629, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.07193563653573119, |
|
"grad_norm": 0.4760759770870209, |
|
"learning_rate": 2.2150000112086755e-05, |
|
"loss": 1.0076, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.07225114371351948, |
|
"grad_norm": 0.49443313479423523, |
|
"learning_rate": 2.1694436678416215e-05, |
|
"loss": 1.0282, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.07256665089130777, |
|
"grad_norm": 0.508001446723938, |
|
"learning_rate": 2.1242305373850468e-05, |
|
"loss": 1.211, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.07288215806909608, |
|
"grad_norm": 0.6970950961112976, |
|
"learning_rate": 2.079366102058088e-05, |
|
"loss": 1.2444, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.07319766524688437, |
|
"grad_norm": 0.8585352897644043, |
|
"learning_rate": 2.0348558017996162e-05, |
|
"loss": 1.0957, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.07351317242467266, |
|
"grad_norm": 0.417327880859375, |
|
"learning_rate": 1.9907050336086368e-05, |
|
"loss": 1.1682, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.07382867960246095, |
|
"grad_norm": 1.1333324909210205, |
|
"learning_rate": 1.946919150889876e-05, |
|
"loss": 1.08, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.07414418678024925, |
|
"grad_norm": 0.5783224105834961, |
|
"learning_rate": 1.9035034628046727e-05, |
|
"loss": 1.0406, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.07445969395803755, |
|
"grad_norm": 0.505717933177948, |
|
"learning_rate": 1.860463233627225e-05, |
|
"loss": 1.2299, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.07477520113582584, |
|
"grad_norm": 0.7536906599998474, |
|
"learning_rate": 1.817803682106285e-05, |
|
"loss": 1.1869, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.07509070831361414, |
|
"grad_norm": 0.6558195948600769, |
|
"learning_rate": 1.7755299808323657e-05, |
|
"loss": 1.2183, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.07540621549140243, |
|
"grad_norm": 0.48988932371139526, |
|
"learning_rate": 1.7336472556105593e-05, |
|
"loss": 1.0805, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.07572172266919072, |
|
"grad_norm": 0.46199411153793335, |
|
"learning_rate": 1.6921605848390075e-05, |
|
"loss": 1.0771, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.07603722984697901, |
|
"grad_norm": 0.5149940848350525, |
|
"learning_rate": 1.651074998893139e-05, |
|
"loss": 1.1492, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.07635273702476732, |
|
"grad_norm": 0.38356372714042664, |
|
"learning_rate": 1.6103954795157188e-05, |
|
"loss": 1.1467, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.07666824420255561, |
|
"grad_norm": 0.4538305997848511, |
|
"learning_rate": 1.570126959212801e-05, |
|
"loss": 0.9464, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.0769837513803439, |
|
"grad_norm": 0.5672346949577332, |
|
"learning_rate": 1.530274320655644e-05, |
|
"loss": 1.1144, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.0772992585581322, |
|
"grad_norm": 0.6441534161567688, |
|
"learning_rate": 1.4908423960886809e-05, |
|
"loss": 1.2172, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.0776147657359205, |
|
"grad_norm": 0.4857594072818756, |
|
"learning_rate": 1.451835966743586e-05, |
|
"loss": 1.1294, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.07793027291370878, |
|
"grad_norm": 0.5527583360671997, |
|
"learning_rate": 1.4132597622595423e-05, |
|
"loss": 1.0536, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.07824578009149707, |
|
"grad_norm": 0.8115904927253723, |
|
"learning_rate": 1.375118460109766e-05, |
|
"loss": 1.1606, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.07856128726928538, |
|
"grad_norm": 0.6380918622016907, |
|
"learning_rate": 1.3374166850343328e-05, |
|
"loss": 1.1524, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.07887679444707367, |
|
"grad_norm": 0.44737401604652405, |
|
"learning_rate": 1.3001590084794307e-05, |
|
"loss": 1.1865, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.07919230162486196, |
|
"grad_norm": 0.44319379329681396, |
|
"learning_rate": 1.2633499480430566e-05, |
|
"loss": 1.2385, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.07950780880265026, |
|
"grad_norm": 0.48288267850875854, |
|
"learning_rate": 1.226993966927238e-05, |
|
"loss": 1.0418, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.07982331598043856, |
|
"grad_norm": 0.4171326160430908, |
|
"learning_rate": 1.191095473396861e-05, |
|
"loss": 1.1814, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.08013882315822685, |
|
"grad_norm": 0.459581196308136, |
|
"learning_rate": 1.1556588202451612e-05, |
|
"loss": 1.0307, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.08045433033601515, |
|
"grad_norm": 0.5369367599487305, |
|
"learning_rate": 1.120688304265925e-05, |
|
"loss": 1.1631, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.08076983751380344, |
|
"grad_norm": 0.6328393220901489, |
|
"learning_rate": 1.0861881657324985e-05, |
|
"loss": 1.1548, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.08108534469159173, |
|
"grad_norm": 0.46232977509498596, |
|
"learning_rate": 1.0521625878836416e-05, |
|
"loss": 1.1935, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.08140085186938002, |
|
"grad_norm": 0.46396756172180176, |
|
"learning_rate": 1.0186156964162957e-05, |
|
"loss": 1.108, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.08171635904716833, |
|
"grad_norm": 0.4770895838737488, |
|
"learning_rate": 9.855515589853326e-06, |
|
"loss": 1.1103, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.08203186622495662, |
|
"grad_norm": 0.5198113322257996, |
|
"learning_rate": 9.52974184710344e-06, |
|
"loss": 1.1943, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.08234737340274491, |
|
"grad_norm": 0.45006701350212097, |
|
"learning_rate": 9.208875236895182e-06, |
|
"loss": 1.1236, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.08266288058053321, |
|
"grad_norm": 0.7262599468231201, |
|
"learning_rate": 8.892954665206804e-06, |
|
"loss": 1.2231, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.0829783877583215, |
|
"grad_norm": 0.5783856511116028, |
|
"learning_rate": 8.582018438295553e-06, |
|
"loss": 1.0805, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.0832938949361098, |
|
"grad_norm": 0.5482893586158752, |
|
"learning_rate": 8.276104258052786e-06, |
|
"loss": 1.1554, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.08360940211389808, |
|
"grad_norm": 0.4669418931007385, |
|
"learning_rate": 7.975249217432612e-06, |
|
"loss": 1.0977, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.08392490929168639, |
|
"grad_norm": 0.6652015447616577, |
|
"learning_rate": 7.67948979595428e-06, |
|
"loss": 1.0844, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.08424041646947468, |
|
"grad_norm": 0.5294978022575378, |
|
"learning_rate": 7.388861855278861e-06, |
|
"loss": 1.095, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.08455592364726297, |
|
"grad_norm": 0.45833665132522583, |
|
"learning_rate": 7.103400634860946e-06, |
|
"loss": 1.1586, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.08487143082505128, |
|
"grad_norm": 0.5748533606529236, |
|
"learning_rate": 6.823140747675816e-06, |
|
"loss": 1.3103, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.08518693800283957, |
|
"grad_norm": 0.546541690826416, |
|
"learning_rate": 6.548116176022456e-06, |
|
"loss": 1.1564, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.08550244518062786, |
|
"grad_norm": 0.7005789279937744, |
|
"learning_rate": 6.2783602674030935e-06, |
|
"loss": 1.1113, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.08581795235841616, |
|
"grad_norm": 0.5347318649291992, |
|
"learning_rate": 6.013905730479824e-06, |
|
"loss": 1.2208, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.08613345953620445, |
|
"grad_norm": 0.47552213072776794, |
|
"learning_rate": 5.754784631108484e-06, |
|
"loss": 1.1326, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.08644896671399274, |
|
"grad_norm": 0.47632184624671936, |
|
"learning_rate": 5.5010283884506135e-06, |
|
"loss": 1.0831, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.08676447389178103, |
|
"grad_norm": 0.5365753173828125, |
|
"learning_rate": 5.252667771163827e-06, |
|
"loss": 1.2979, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.08707998106956934, |
|
"grad_norm": 0.4148613512516022, |
|
"learning_rate": 5.009732893670971e-06, |
|
"loss": 1.0888, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.08739548824735763, |
|
"grad_norm": 0.44592395424842834, |
|
"learning_rate": 4.7722532125087095e-06, |
|
"loss": 1.0999, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.08771099542514592, |
|
"grad_norm": 0.7864736914634705, |
|
"learning_rate": 4.540257522755809e-06, |
|
"loss": 1.0966, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.08802650260293422, |
|
"grad_norm": 0.6768621206283569, |
|
"learning_rate": 4.313773954541672e-06, |
|
"loss": 1.1897, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.08834200978072251, |
|
"grad_norm": 0.5477901101112366, |
|
"learning_rate": 4.092829969635464e-06, |
|
"loss": 1.1737, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.0886575169585108, |
|
"grad_norm": 0.5500572323799133, |
|
"learning_rate": 3.877452358116324e-06, |
|
"loss": 1.1163, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.0889730241362991, |
|
"grad_norm": 0.7095085978507996, |
|
"learning_rate": 3.6676672351249495e-06, |
|
"loss": 1.088, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.0892885313140874, |
|
"grad_norm": 0.6136842966079712, |
|
"learning_rate": 3.463500037697104e-06, |
|
"loss": 1.1441, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.08960403849187569, |
|
"grad_norm": 0.48445186018943787, |
|
"learning_rate": 3.2649755216792867e-06, |
|
"loss": 1.1387, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.08991954566966398, |
|
"grad_norm": 0.5624507665634155, |
|
"learning_rate": 3.0721177587270224e-06, |
|
"loss": 1.0582, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.09023505284745229, |
|
"grad_norm": 0.533638060092926, |
|
"learning_rate": 2.884950133386055e-06, |
|
"loss": 1.151, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.09055056002524058, |
|
"grad_norm": 0.5064953565597534, |
|
"learning_rate": 2.7034953402570174e-06, |
|
"loss": 1.0153, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.09086606720302887, |
|
"grad_norm": 0.4341917932033539, |
|
"learning_rate": 2.5277753812435467e-06, |
|
"loss": 1.094, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.09118157438081716, |
|
"grad_norm": 0.3772113025188446, |
|
"learning_rate": 2.35781156288451e-06, |
|
"loss": 1.0971, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.09149708155860546, |
|
"grad_norm": 0.43354088068008423, |
|
"learning_rate": 2.1936244937705906e-06, |
|
"loss": 1.1798, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.09181258873639375, |
|
"grad_norm": 0.3896254599094391, |
|
"learning_rate": 2.0352340820453453e-06, |
|
"loss": 1.0737, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.09212809591418204, |
|
"grad_norm": 0.36763468384742737, |
|
"learning_rate": 1.8826595329913489e-06, |
|
"loss": 1.0351, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.09244360309197035, |
|
"grad_norm": 0.5166642069816589, |
|
"learning_rate": 1.7359193467014812e-06, |
|
"loss": 1.1765, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.09275911026975864, |
|
"grad_norm": 0.5475133061408997, |
|
"learning_rate": 1.5950313158357432e-06, |
|
"loss": 1.127, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.09307461744754693, |
|
"grad_norm": 0.46491655707359314, |
|
"learning_rate": 1.460012523463833e-06, |
|
"loss": 1.0714, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.09339012462533523, |
|
"grad_norm": 0.42786383628845215, |
|
"learning_rate": 1.3308793409937959e-06, |
|
"loss": 1.1625, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.09370563180312352, |
|
"grad_norm": 0.5214555263519287, |
|
"learning_rate": 1.2076474261869363e-06, |
|
"loss": 1.2349, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.09402113898091181, |
|
"grad_norm": 0.6581803560256958, |
|
"learning_rate": 1.0903317212592579e-06, |
|
"loss": 1.0605, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.0943366461587001, |
|
"grad_norm": 0.3951626121997833, |
|
"learning_rate": 9.78946451069701e-07, |
|
"loss": 1.2178, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.09465215333648841, |
|
"grad_norm": 0.651982307434082, |
|
"learning_rate": 8.735051213953182e-07, |
|
"loss": 1.2502, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.0949676605142767, |
|
"grad_norm": 0.5281954407691956, |
|
"learning_rate": 7.740205172936665e-07, |
|
"loss": 1.107, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.09528316769206499, |
|
"grad_norm": 0.5843939185142517, |
|
"learning_rate": 6.805047015526089e-07, |
|
"loss": 1.1559, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.0955986748698533, |
|
"grad_norm": 0.6147716641426086, |
|
"learning_rate": 5.929690132276123e-07, |
|
"loss": 1.1583, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.09591418204764159, |
|
"grad_norm": 0.6916910409927368, |
|
"learning_rate": 5.114240662669023e-07, |
|
"loss": 1.2058, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.09622968922542988, |
|
"grad_norm": 0.4254242479801178, |
|
"learning_rate": 4.3587974822449364e-07, |
|
"loss": 1.0994, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.09654519640321817, |
|
"grad_norm": 0.6165900230407715, |
|
"learning_rate": 3.663452190612482e-07, |
|
"loss": 1.0987, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.09686070358100647, |
|
"grad_norm": 0.7261757850646973, |
|
"learning_rate": 3.028289100342585e-07, |
|
"loss": 1.1478, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.09717621075879476, |
|
"grad_norm": 0.5742084980010986, |
|
"learning_rate": 2.4533852267450974e-07, |
|
"loss": 1.1684, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.09749171793658305, |
|
"grad_norm": 0.6034279465675354, |
|
"learning_rate": 1.938810278530323e-07, |
|
"loss": 1.1457, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.09780722511437136, |
|
"grad_norm": 0.5837968587875366, |
|
"learning_rate": 1.4846266493570017e-07, |
|
"loss": 1.1902, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.09812273229215965, |
|
"grad_norm": 0.5142175555229187, |
|
"learning_rate": 1.0908894102666378e-07, |
|
"loss": 1.1009, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.09843823946994794, |
|
"grad_norm": 0.4157280921936035, |
|
"learning_rate": 7.57646303006121e-08, |
|
"loss": 1.1106, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.09875374664773623, |
|
"grad_norm": 0.41369205713272095, |
|
"learning_rate": 4.8493773423863295e-08, |
|
"loss": 1.1904, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.09906925382552453, |
|
"grad_norm": 0.6153455972671509, |
|
"learning_rate": 2.727967706447343e-08, |
|
"loss": 1.114, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.09938476100331282, |
|
"grad_norm": 0.4912586808204651, |
|
"learning_rate": 1.2124913491240453e-08, |
|
"loss": 1.0558, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.09970026818110111, |
|
"grad_norm": 0.5294234752655029, |
|
"learning_rate": 3.0313202618370562e-09, |
|
"loss": 1.1303, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.10001577535888942, |
|
"grad_norm": 0.5471630096435547, |
|
"learning_rate": 0.0, |
|
"loss": 1.1321, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.10001577535888942, |
|
"step": 3170, |
|
"total_flos": 7.093787845892506e+17, |
|
"train_loss": 1.1559843716185176, |
|
"train_runtime": 9587.5735, |
|
"train_samples_per_second": 2.645, |
|
"train_steps_per_second": 0.331 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 3170, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 1000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 7.093787845892506e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|