|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.5861574323112784, |
|
"eval_steps": 2000, |
|
"global_step": 10000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.9492120146751404, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 0.9769, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.344916582107544, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.0718, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.6680417060852051, |
|
"learning_rate": 3e-06, |
|
"loss": 0.8901, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.8369797468185425, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.9398, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.5738756656646729, |
|
"learning_rate": 5e-06, |
|
"loss": 0.9152, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.028475284576416, |
|
"learning_rate": 6e-06, |
|
"loss": 0.849, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.2893372774124146, |
|
"learning_rate": 7e-06, |
|
"loss": 0.7312, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.8779547810554504, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.6707, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.8972748517990112, |
|
"learning_rate": 9e-06, |
|
"loss": 0.6413, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.505399227142334, |
|
"learning_rate": 1e-05, |
|
"loss": 0.6129, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.6442121267318726, |
|
"learning_rate": 9.989898989898991e-06, |
|
"loss": 0.5579, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.623290479183197, |
|
"learning_rate": 9.97979797979798e-06, |
|
"loss": 0.6022, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.7216657996177673, |
|
"learning_rate": 9.96969696969697e-06, |
|
"loss": 0.5481, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.7031328678131104, |
|
"learning_rate": 9.95959595959596e-06, |
|
"loss": 0.5594, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.7411965727806091, |
|
"learning_rate": 9.94949494949495e-06, |
|
"loss": 0.6241, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.8735277652740479, |
|
"learning_rate": 9.939393939393939e-06, |
|
"loss": 0.498, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.1062073707580566, |
|
"learning_rate": 9.92929292929293e-06, |
|
"loss": 0.6463, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.7930333614349365, |
|
"learning_rate": 9.91919191919192e-06, |
|
"loss": 0.6231, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.8774802684783936, |
|
"learning_rate": 9.90909090909091e-06, |
|
"loss": 0.5895, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.929542064666748, |
|
"learning_rate": 9.8989898989899e-06, |
|
"loss": 0.632, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.6024735569953918, |
|
"learning_rate": 9.88888888888889e-06, |
|
"loss": 0.5682, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.5887441635131836, |
|
"learning_rate": 9.87878787878788e-06, |
|
"loss": 0.5192, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.7813496589660645, |
|
"learning_rate": 9.86868686868687e-06, |
|
"loss": 0.5841, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.5949407815933228, |
|
"learning_rate": 9.85858585858586e-06, |
|
"loss": 0.5263, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.8114556670188904, |
|
"learning_rate": 9.84848484848485e-06, |
|
"loss": 0.5513, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.7692855596542358, |
|
"learning_rate": 9.838383838383839e-06, |
|
"loss": 0.5217, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.8319826126098633, |
|
"learning_rate": 9.828282828282829e-06, |
|
"loss": 0.4386, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.6725042462348938, |
|
"learning_rate": 9.81818181818182e-06, |
|
"loss": 0.4919, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.779315173625946, |
|
"learning_rate": 9.80808080808081e-06, |
|
"loss": 0.5281, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.8005223274230957, |
|
"learning_rate": 9.797979797979798e-06, |
|
"loss": 0.5206, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.6395801901817322, |
|
"learning_rate": 9.787878787878788e-06, |
|
"loss": 0.474, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.1016992330551147, |
|
"learning_rate": 9.777777777777779e-06, |
|
"loss": 0.5403, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.395189881324768, |
|
"learning_rate": 9.767676767676767e-06, |
|
"loss": 0.5181, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.801499605178833, |
|
"learning_rate": 9.757575757575758e-06, |
|
"loss": 0.5296, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.7606889605522156, |
|
"learning_rate": 9.747474747474748e-06, |
|
"loss": 0.5406, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.580170750617981, |
|
"learning_rate": 9.737373737373738e-06, |
|
"loss": 0.541, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.6730871796607971, |
|
"learning_rate": 9.727272727272728e-06, |
|
"loss": 0.5665, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.240430474281311, |
|
"learning_rate": 9.717171717171719e-06, |
|
"loss": 0.5148, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.9439683556556702, |
|
"learning_rate": 9.707070707070709e-06, |
|
"loss": 0.513, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.6674547791481018, |
|
"learning_rate": 9.696969696969698e-06, |
|
"loss": 0.5202, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.8516045808792114, |
|
"learning_rate": 9.686868686868688e-06, |
|
"loss": 0.5162, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.8700432181358337, |
|
"learning_rate": 9.676767676767678e-06, |
|
"loss": 0.5392, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.5687388777732849, |
|
"learning_rate": 9.666666666666667e-06, |
|
"loss": 0.5106, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.2382631301879883, |
|
"learning_rate": 9.656565656565657e-06, |
|
"loss": 0.5074, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.8921974301338196, |
|
"learning_rate": 9.646464646464647e-06, |
|
"loss": 0.561, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.3508048057556152, |
|
"learning_rate": 9.636363636363638e-06, |
|
"loss": 0.5484, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.1822205781936646, |
|
"learning_rate": 9.626262626262626e-06, |
|
"loss": 0.5719, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.6617271304130554, |
|
"learning_rate": 9.616161616161616e-06, |
|
"loss": 0.535, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.6614571809768677, |
|
"learning_rate": 9.606060606060607e-06, |
|
"loss": 0.4549, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.8642953038215637, |
|
"learning_rate": 9.595959595959597e-06, |
|
"loss": 0.4789, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.614743173122406, |
|
"learning_rate": 9.585858585858586e-06, |
|
"loss": 0.4854, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.970829427242279, |
|
"learning_rate": 9.575757575757576e-06, |
|
"loss": 0.5196, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.7311980128288269, |
|
"learning_rate": 9.565656565656566e-06, |
|
"loss": 0.5106, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.765849769115448, |
|
"learning_rate": 9.555555555555556e-06, |
|
"loss": 0.5782, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.0889312028884888, |
|
"learning_rate": 9.545454545454547e-06, |
|
"loss": 0.5824, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.7402384877204895, |
|
"learning_rate": 9.535353535353537e-06, |
|
"loss": 0.5005, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.707028329372406, |
|
"learning_rate": 9.525252525252526e-06, |
|
"loss": 0.5233, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.8338315486907959, |
|
"learning_rate": 9.515151515151516e-06, |
|
"loss": 0.4694, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.7450662851333618, |
|
"learning_rate": 9.505050505050506e-06, |
|
"loss": 0.4762, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.7595840692520142, |
|
"learning_rate": 9.494949494949497e-06, |
|
"loss": 0.5018, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.5880123376846313, |
|
"learning_rate": 9.484848484848485e-06, |
|
"loss": 0.5228, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.9635146260261536, |
|
"learning_rate": 9.474747474747475e-06, |
|
"loss": 0.4987, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.1274373531341553, |
|
"learning_rate": 9.464646464646466e-06, |
|
"loss": 0.5085, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.1324831247329712, |
|
"learning_rate": 9.454545454545456e-06, |
|
"loss": 0.504, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.5410157442092896, |
|
"learning_rate": 9.444444444444445e-06, |
|
"loss": 0.4619, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.7583281993865967, |
|
"learning_rate": 9.434343434343435e-06, |
|
"loss": 0.5162, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.6546668410301208, |
|
"learning_rate": 9.424242424242425e-06, |
|
"loss": 0.4969, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.6070376634597778, |
|
"learning_rate": 9.414141414141414e-06, |
|
"loss": 0.4805, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.0108693838119507, |
|
"learning_rate": 9.404040404040404e-06, |
|
"loss": 0.4808, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.8799183368682861, |
|
"learning_rate": 9.393939393939396e-06, |
|
"loss": 0.5082, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.3432070016860962, |
|
"learning_rate": 9.383838383838385e-06, |
|
"loss": 0.4353, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.6518195271492004, |
|
"learning_rate": 9.373737373737375e-06, |
|
"loss": 0.4933, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.6736329793930054, |
|
"learning_rate": 9.363636363636365e-06, |
|
"loss": 0.5342, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.776785135269165, |
|
"learning_rate": 9.353535353535354e-06, |
|
"loss": 0.5162, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.9443957805633545, |
|
"learning_rate": 9.343434343434344e-06, |
|
"loss": 0.4486, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.8882728815078735, |
|
"learning_rate": 9.333333333333334e-06, |
|
"loss": 0.4774, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.819239616394043, |
|
"learning_rate": 9.323232323232325e-06, |
|
"loss": 0.5, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.8829065561294556, |
|
"learning_rate": 9.313131313131313e-06, |
|
"loss": 0.4655, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.1993345022201538, |
|
"learning_rate": 9.303030303030303e-06, |
|
"loss": 0.5392, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.681409478187561, |
|
"learning_rate": 9.292929292929294e-06, |
|
"loss": 0.5076, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.075088381767273, |
|
"learning_rate": 9.282828282828284e-06, |
|
"loss": 0.4953, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.7040195465087891, |
|
"learning_rate": 9.272727272727273e-06, |
|
"loss": 0.5596, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.1210192441940308, |
|
"learning_rate": 9.262626262626263e-06, |
|
"loss": 0.5299, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.583011269569397, |
|
"learning_rate": 9.252525252525253e-06, |
|
"loss": 0.497, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.6583887338638306, |
|
"learning_rate": 9.242424242424244e-06, |
|
"loss": 0.5556, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.8040810227394104, |
|
"learning_rate": 9.232323232323232e-06, |
|
"loss": 0.5257, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.9269919991493225, |
|
"learning_rate": 9.222222222222224e-06, |
|
"loss": 0.4421, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.9947918653488159, |
|
"learning_rate": 9.212121212121213e-06, |
|
"loss": 0.5297, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.6900811791419983, |
|
"learning_rate": 9.202020202020203e-06, |
|
"loss": 0.4833, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.8033557534217834, |
|
"learning_rate": 9.191919191919193e-06, |
|
"loss": 0.4894, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.635124683380127, |
|
"learning_rate": 9.181818181818184e-06, |
|
"loss": 0.4554, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.7293840646743774, |
|
"learning_rate": 9.171717171717172e-06, |
|
"loss": 0.4693, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.7628031373023987, |
|
"learning_rate": 9.161616161616162e-06, |
|
"loss": 0.5181, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.0783181190490723, |
|
"learning_rate": 9.151515151515153e-06, |
|
"loss": 0.4632, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.5340379476547241, |
|
"learning_rate": 9.141414141414143e-06, |
|
"loss": 0.4664, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.9029551148414612, |
|
"learning_rate": 9.131313131313132e-06, |
|
"loss": 0.5333, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.7257616519927979, |
|
"learning_rate": 9.121212121212122e-06, |
|
"loss": 0.5168, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.761325478553772, |
|
"learning_rate": 9.111111111111112e-06, |
|
"loss": 0.5606, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.8582245707511902, |
|
"learning_rate": 9.1010101010101e-06, |
|
"loss": 0.4332, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.8598415851593018, |
|
"learning_rate": 9.090909090909091e-06, |
|
"loss": 0.5884, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.8292351365089417, |
|
"learning_rate": 9.080808080808081e-06, |
|
"loss": 0.4848, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.0559266805648804, |
|
"learning_rate": 9.070707070707072e-06, |
|
"loss": 0.4588, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.6693033576011658, |
|
"learning_rate": 9.06060606060606e-06, |
|
"loss": 0.5333, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.8114706873893738, |
|
"learning_rate": 9.050505050505052e-06, |
|
"loss": 0.5166, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.8659316301345825, |
|
"learning_rate": 9.040404040404042e-06, |
|
"loss": 0.4504, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.9083582758903503, |
|
"learning_rate": 9.030303030303031e-06, |
|
"loss": 0.5611, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.6691566109657288, |
|
"learning_rate": 9.020202020202021e-06, |
|
"loss": 0.5192, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.5889317989349365, |
|
"learning_rate": 9.010101010101012e-06, |
|
"loss": 0.4515, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.9215373992919922, |
|
"learning_rate": 9e-06, |
|
"loss": 0.4776, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.7439729571342468, |
|
"learning_rate": 8.98989898989899e-06, |
|
"loss": 0.4656, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.1780657768249512, |
|
"learning_rate": 8.97979797979798e-06, |
|
"loss": 0.4933, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.9686077833175659, |
|
"learning_rate": 8.969696969696971e-06, |
|
"loss": 0.5167, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.829994261264801, |
|
"learning_rate": 8.95959595959596e-06, |
|
"loss": 0.491, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.6313827633857727, |
|
"learning_rate": 8.94949494949495e-06, |
|
"loss": 0.4864, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.596537709236145, |
|
"learning_rate": 8.93939393939394e-06, |
|
"loss": 0.4807, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.8714896440505981, |
|
"learning_rate": 8.92929292929293e-06, |
|
"loss": 0.512, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.9466399550437927, |
|
"learning_rate": 8.919191919191919e-06, |
|
"loss": 0.4883, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.7337993383407593, |
|
"learning_rate": 8.90909090909091e-06, |
|
"loss": 0.4757, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.7684504985809326, |
|
"learning_rate": 8.8989898989899e-06, |
|
"loss": 0.5224, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.6455299854278564, |
|
"learning_rate": 8.888888888888888e-06, |
|
"loss": 0.5346, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.7279661893844604, |
|
"learning_rate": 8.87878787878788e-06, |
|
"loss": 0.4845, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.7996425032615662, |
|
"learning_rate": 8.86868686868687e-06, |
|
"loss": 0.4812, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.7299336791038513, |
|
"learning_rate": 8.85858585858586e-06, |
|
"loss": 0.4985, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.7462379932403564, |
|
"learning_rate": 8.84848484848485e-06, |
|
"loss": 0.5742, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.7165307998657227, |
|
"learning_rate": 8.83838383838384e-06, |
|
"loss": 0.4627, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.7239411473274231, |
|
"learning_rate": 8.82828282828283e-06, |
|
"loss": 0.5469, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.5761345028877258, |
|
"learning_rate": 8.818181818181819e-06, |
|
"loss": 0.4391, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.8207817077636719, |
|
"learning_rate": 8.808080808080809e-06, |
|
"loss": 0.5477, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.9331930875778198, |
|
"learning_rate": 8.7979797979798e-06, |
|
"loss": 0.4975, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.0401968955993652, |
|
"learning_rate": 8.787878787878788e-06, |
|
"loss": 0.5149, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.7848596572875977, |
|
"learning_rate": 8.777777777777778e-06, |
|
"loss": 0.5448, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.5979989767074585, |
|
"learning_rate": 8.767676767676768e-06, |
|
"loss": 0.4861, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.7734145522117615, |
|
"learning_rate": 8.757575757575759e-06, |
|
"loss": 0.5955, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.9506089687347412, |
|
"learning_rate": 8.747474747474747e-06, |
|
"loss": 0.5212, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.7118907570838928, |
|
"learning_rate": 8.737373737373738e-06, |
|
"loss": 0.474, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.0045005083084106, |
|
"learning_rate": 8.727272727272728e-06, |
|
"loss": 0.509, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.1283302307128906, |
|
"learning_rate": 8.717171717171718e-06, |
|
"loss": 0.5096, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.3884085416793823, |
|
"learning_rate": 8.707070707070707e-06, |
|
"loss": 0.5008, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.7452566027641296, |
|
"learning_rate": 8.696969696969699e-06, |
|
"loss": 0.4792, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.3434414863586426, |
|
"learning_rate": 8.686868686868687e-06, |
|
"loss": 0.5385, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.9600369930267334, |
|
"learning_rate": 8.676767676767678e-06, |
|
"loss": 0.4743, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.6895599961280823, |
|
"learning_rate": 8.666666666666668e-06, |
|
"loss": 0.5639, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.9460670948028564, |
|
"learning_rate": 8.656565656565658e-06, |
|
"loss": 0.5957, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.6181186437606812, |
|
"learning_rate": 8.646464646464647e-06, |
|
"loss": 0.5032, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.6992838382720947, |
|
"learning_rate": 8.636363636363637e-06, |
|
"loss": 0.5371, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.6007195711135864, |
|
"learning_rate": 8.626262626262627e-06, |
|
"loss": 0.4446, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.8171485662460327, |
|
"learning_rate": 8.616161616161618e-06, |
|
"loss": 0.4835, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.9369902610778809, |
|
"learning_rate": 8.606060606060606e-06, |
|
"loss": 0.5437, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.1894652843475342, |
|
"learning_rate": 8.595959595959596e-06, |
|
"loss": 0.5151, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.694837212562561, |
|
"learning_rate": 8.585858585858587e-06, |
|
"loss": 0.488, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.8813522458076477, |
|
"learning_rate": 8.575757575757575e-06, |
|
"loss": 0.5129, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.0121350288391113, |
|
"learning_rate": 8.565656565656566e-06, |
|
"loss": 0.5165, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.7935439944267273, |
|
"learning_rate": 8.555555555555556e-06, |
|
"loss": 0.4736, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.7670463919639587, |
|
"learning_rate": 8.545454545454546e-06, |
|
"loss": 0.5214, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.212927222251892, |
|
"learning_rate": 8.535353535353535e-06, |
|
"loss": 0.5125, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.7966919541358948, |
|
"learning_rate": 8.525252525252527e-06, |
|
"loss": 0.4823, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.0880494117736816, |
|
"learning_rate": 8.515151515151517e-06, |
|
"loss": 0.478, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.0308737754821777, |
|
"learning_rate": 8.505050505050506e-06, |
|
"loss": 0.5368, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.7291275262832642, |
|
"learning_rate": 8.494949494949496e-06, |
|
"loss": 0.4838, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.6764214038848877, |
|
"learning_rate": 8.484848484848486e-06, |
|
"loss": 0.4882, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.051628828048706, |
|
"learning_rate": 8.474747474747475e-06, |
|
"loss": 0.4564, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.8614614605903625, |
|
"learning_rate": 8.464646464646465e-06, |
|
"loss": 0.5632, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.1045228242874146, |
|
"learning_rate": 8.454545454545455e-06, |
|
"loss": 0.4535, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.8160364031791687, |
|
"learning_rate": 8.444444444444446e-06, |
|
"loss": 0.4964, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.7776429653167725, |
|
"learning_rate": 8.434343434343434e-06, |
|
"loss": 0.4554, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.7589672207832336, |
|
"learning_rate": 8.424242424242425e-06, |
|
"loss": 0.4458, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.825233519077301, |
|
"learning_rate": 8.414141414141415e-06, |
|
"loss": 0.4814, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.8226912617683411, |
|
"learning_rate": 8.404040404040405e-06, |
|
"loss": 0.4892, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.9273412823677063, |
|
"learning_rate": 8.393939393939394e-06, |
|
"loss": 0.5143, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.9043828248977661, |
|
"learning_rate": 8.383838383838384e-06, |
|
"loss": 0.4598, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 2.2596805095672607, |
|
"learning_rate": 8.373737373737374e-06, |
|
"loss": 0.5415, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.0041202306747437, |
|
"learning_rate": 8.363636363636365e-06, |
|
"loss": 0.5899, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.9188370704650879, |
|
"learning_rate": 8.353535353535355e-06, |
|
"loss": 0.5119, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.7778961062431335, |
|
"learning_rate": 8.343434343434345e-06, |
|
"loss": 0.5237, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.7438649535179138, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 0.4999, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.5649489760398865, |
|
"learning_rate": 8.323232323232324e-06, |
|
"loss": 0.552, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.5625451803207397, |
|
"learning_rate": 8.313131313131314e-06, |
|
"loss": 0.4549, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.3711755275726318, |
|
"learning_rate": 8.303030303030305e-06, |
|
"loss": 0.445, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.4339165687561035, |
|
"learning_rate": 8.292929292929293e-06, |
|
"loss": 0.4975, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.8113200068473816, |
|
"learning_rate": 8.282828282828283e-06, |
|
"loss": 0.5288, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.1567124128341675, |
|
"learning_rate": 8.272727272727274e-06, |
|
"loss": 0.4669, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.7966761589050293, |
|
"learning_rate": 8.262626262626264e-06, |
|
"loss": 0.4856, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.0181186199188232, |
|
"learning_rate": 8.252525252525253e-06, |
|
"loss": 0.4576, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.611566960811615, |
|
"learning_rate": 8.242424242424243e-06, |
|
"loss": 0.496, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.6482832431793213, |
|
"learning_rate": 8.232323232323233e-06, |
|
"loss": 0.4601, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.7550622820854187, |
|
"learning_rate": 8.222222222222222e-06, |
|
"loss": 0.5036, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.7835694551467896, |
|
"learning_rate": 8.212121212121212e-06, |
|
"loss": 0.5617, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.7926068305969238, |
|
"learning_rate": 8.202020202020202e-06, |
|
"loss": 0.4327, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.786851167678833, |
|
"learning_rate": 8.191919191919193e-06, |
|
"loss": 0.4654, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.9023171663284302, |
|
"learning_rate": 8.181818181818183e-06, |
|
"loss": 0.5426, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.0345401763916016, |
|
"learning_rate": 8.171717171717173e-06, |
|
"loss": 0.52, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.945004940032959, |
|
"learning_rate": 8.161616161616162e-06, |
|
"loss": 0.5512, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.658362090587616, |
|
"learning_rate": 8.151515151515152e-06, |
|
"loss": 0.513, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.6390058398246765, |
|
"learning_rate": 8.141414141414142e-06, |
|
"loss": 0.5652, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.7705880403518677, |
|
"learning_rate": 8.131313131313133e-06, |
|
"loss": 0.4929, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.5400047302246094, |
|
"learning_rate": 8.121212121212121e-06, |
|
"loss": 0.4932, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.9128320217132568, |
|
"learning_rate": 8.111111111111112e-06, |
|
"loss": 0.5085, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.0019017457962036, |
|
"learning_rate": 8.101010101010102e-06, |
|
"loss": 0.4552, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.818148136138916, |
|
"learning_rate": 8.090909090909092e-06, |
|
"loss": 0.5517, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.8848174810409546, |
|
"learning_rate": 8.08080808080808e-06, |
|
"loss": 0.5094, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"eval_loss": 0.6485620141029358, |
|
"eval_runtime": 340.5095, |
|
"eval_samples_per_second": 2.937, |
|
"eval_steps_per_second": 2.937, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.9389908313751221, |
|
"learning_rate": 8.070707070707071e-06, |
|
"loss": 0.5228, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.901942789554596, |
|
"learning_rate": 8.060606060606061e-06, |
|
"loss": 0.4322, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.6482081413269043, |
|
"learning_rate": 8.050505050505052e-06, |
|
"loss": 0.4123, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.188251256942749, |
|
"learning_rate": 8.04040404040404e-06, |
|
"loss": 0.3863, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.6995498538017273, |
|
"learning_rate": 8.03030303030303e-06, |
|
"loss": 0.4834, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.7236158847808838, |
|
"learning_rate": 8.02020202020202e-06, |
|
"loss": 0.5279, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.6699327826499939, |
|
"learning_rate": 8.010101010101011e-06, |
|
"loss": 0.5294, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.7172369956970215, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.4882, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.7741474509239197, |
|
"learning_rate": 7.989898989898992e-06, |
|
"loss": 0.4928, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.7598722577095032, |
|
"learning_rate": 7.97979797979798e-06, |
|
"loss": 0.477, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.7849257588386536, |
|
"learning_rate": 7.96969696969697e-06, |
|
"loss": 0.497, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.7920640707015991, |
|
"learning_rate": 7.95959595959596e-06, |
|
"loss": 0.4974, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.7092992067337036, |
|
"learning_rate": 7.949494949494951e-06, |
|
"loss": 0.5234, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.8268713355064392, |
|
"learning_rate": 7.93939393939394e-06, |
|
"loss": 0.3966, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.1046198606491089, |
|
"learning_rate": 7.92929292929293e-06, |
|
"loss": 0.452, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.8234682083129883, |
|
"learning_rate": 7.91919191919192e-06, |
|
"loss": 0.5364, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.8524590730667114, |
|
"learning_rate": 7.909090909090909e-06, |
|
"loss": 0.3922, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.964320182800293, |
|
"learning_rate": 7.898989898989899e-06, |
|
"loss": 0.4991, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.2167562246322632, |
|
"learning_rate": 7.88888888888889e-06, |
|
"loss": 0.4237, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.893385648727417, |
|
"learning_rate": 7.87878787878788e-06, |
|
"loss": 0.503, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.4781544208526611, |
|
"learning_rate": 7.868686868686868e-06, |
|
"loss": 0.4084, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.0282255411148071, |
|
"learning_rate": 7.858585858585859e-06, |
|
"loss": 0.5531, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.5956897139549255, |
|
"learning_rate": 7.848484848484849e-06, |
|
"loss": 0.421, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.674526572227478, |
|
"learning_rate": 7.838383838383839e-06, |
|
"loss": 0.5221, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.7602831125259399, |
|
"learning_rate": 7.82828282828283e-06, |
|
"loss": 0.4826, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.637804388999939, |
|
"learning_rate": 7.81818181818182e-06, |
|
"loss": 0.4965, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.0468482971191406, |
|
"learning_rate": 7.808080808080808e-06, |
|
"loss": 0.4352, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.8232371807098389, |
|
"learning_rate": 7.797979797979799e-06, |
|
"loss": 0.4314, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.2416061162948608, |
|
"learning_rate": 7.787878787878789e-06, |
|
"loss": 0.4559, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.7591462135314941, |
|
"learning_rate": 7.77777777777778e-06, |
|
"loss": 0.5443, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.7769295573234558, |
|
"learning_rate": 7.767676767676768e-06, |
|
"loss": 0.4915, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.7007648944854736, |
|
"learning_rate": 7.757575757575758e-06, |
|
"loss": 0.4102, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.8512477874755859, |
|
"learning_rate": 7.747474747474748e-06, |
|
"loss": 0.488, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 1.0806859731674194, |
|
"learning_rate": 7.737373737373739e-06, |
|
"loss": 0.4482, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.8377964496612549, |
|
"learning_rate": 7.727272727272727e-06, |
|
"loss": 0.4714, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.9962327480316162, |
|
"learning_rate": 7.717171717171717e-06, |
|
"loss": 0.4354, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.7444832921028137, |
|
"learning_rate": 7.707070707070708e-06, |
|
"loss": 0.5651, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.7137871384620667, |
|
"learning_rate": 7.696969696969696e-06, |
|
"loss": 0.4773, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.8521376252174377, |
|
"learning_rate": 7.686868686868687e-06, |
|
"loss": 0.4961, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.9442594647407532, |
|
"learning_rate": 7.676767676767677e-06, |
|
"loss": 0.4627, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 1.169360876083374, |
|
"learning_rate": 7.666666666666667e-06, |
|
"loss": 0.4601, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.8181536197662354, |
|
"learning_rate": 7.656565656565658e-06, |
|
"loss": 0.4635, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.9113516211509705, |
|
"learning_rate": 7.646464646464648e-06, |
|
"loss": 0.4219, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 1.26584792137146, |
|
"learning_rate": 7.636363636363638e-06, |
|
"loss": 0.5165, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.6445989608764648, |
|
"learning_rate": 7.6262626262626275e-06, |
|
"loss": 0.4755, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.7551100850105286, |
|
"learning_rate": 7.616161616161617e-06, |
|
"loss": 0.4976, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 1.0578736066818237, |
|
"learning_rate": 7.606060606060606e-06, |
|
"loss": 0.524, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 1.0422261953353882, |
|
"learning_rate": 7.595959595959597e-06, |
|
"loss": 0.5136, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 1.0524308681488037, |
|
"learning_rate": 7.585858585858586e-06, |
|
"loss": 0.5058, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.8334051966667175, |
|
"learning_rate": 7.5757575757575764e-06, |
|
"loss": 0.4487, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.9613906145095825, |
|
"learning_rate": 7.565656565656566e-06, |
|
"loss": 0.5055, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.9347293972969055, |
|
"learning_rate": 7.555555555555556e-06, |
|
"loss": 0.4672, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.0498868227005005, |
|
"learning_rate": 7.545454545454546e-06, |
|
"loss": 0.4336, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.0005099773406982, |
|
"learning_rate": 7.535353535353536e-06, |
|
"loss": 0.523, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.027875542640686, |
|
"learning_rate": 7.525252525252525e-06, |
|
"loss": 0.5088, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.7960171103477478, |
|
"learning_rate": 7.515151515151516e-06, |
|
"loss": 0.4636, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 1.1945937871932983, |
|
"learning_rate": 7.505050505050505e-06, |
|
"loss": 0.4736, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.9849948287010193, |
|
"learning_rate": 7.494949494949496e-06, |
|
"loss": 0.4766, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.7381343841552734, |
|
"learning_rate": 7.484848484848486e-06, |
|
"loss": 0.4427, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.964435875415802, |
|
"learning_rate": 7.474747474747476e-06, |
|
"loss": 0.5179, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 1.0832308530807495, |
|
"learning_rate": 7.464646464646465e-06, |
|
"loss": 0.5179, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.8275842070579529, |
|
"learning_rate": 7.454545454545456e-06, |
|
"loss": 0.3957, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 1.7618038654327393, |
|
"learning_rate": 7.444444444444445e-06, |
|
"loss": 0.4626, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.9667460918426514, |
|
"learning_rate": 7.434343434343435e-06, |
|
"loss": 0.4338, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.8243513107299805, |
|
"learning_rate": 7.424242424242425e-06, |
|
"loss": 0.496, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.8097424507141113, |
|
"learning_rate": 7.414141414141415e-06, |
|
"loss": 0.4911, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.219351053237915, |
|
"learning_rate": 7.4040404040404045e-06, |
|
"loss": 0.5163, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.7272489666938782, |
|
"learning_rate": 7.393939393939395e-06, |
|
"loss": 0.4783, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.9799202084541321, |
|
"learning_rate": 7.383838383838384e-06, |
|
"loss": 0.4638, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 1.1537474393844604, |
|
"learning_rate": 7.373737373737374e-06, |
|
"loss": 0.4184, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.750483512878418, |
|
"learning_rate": 7.363636363636364e-06, |
|
"loss": 0.4544, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.915256679058075, |
|
"learning_rate": 7.353535353535353e-06, |
|
"loss": 0.4579, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 1.0220776796340942, |
|
"learning_rate": 7.343434343434344e-06, |
|
"loss": 0.4568, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 1.0540516376495361, |
|
"learning_rate": 7.333333333333333e-06, |
|
"loss": 0.5228, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 2.014601230621338, |
|
"learning_rate": 7.323232323232324e-06, |
|
"loss": 0.4482, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.9005866646766663, |
|
"learning_rate": 7.3131313131313146e-06, |
|
"loss": 0.4868, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 1.0538028478622437, |
|
"learning_rate": 7.303030303030304e-06, |
|
"loss": 0.4884, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.9115027785301208, |
|
"learning_rate": 7.2929292929292934e-06, |
|
"loss": 0.4421, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.5715361833572388, |
|
"learning_rate": 7.282828282828284e-06, |
|
"loss": 0.4442, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.8492701649665833, |
|
"learning_rate": 7.272727272727273e-06, |
|
"loss": 0.4828, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.7459146976470947, |
|
"learning_rate": 7.2626262626262635e-06, |
|
"loss": 0.4649, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.6268482804298401, |
|
"learning_rate": 7.252525252525253e-06, |
|
"loss": 0.4521, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.9125592112541199, |
|
"learning_rate": 7.242424242424243e-06, |
|
"loss": 0.57, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.8162791132926941, |
|
"learning_rate": 7.232323232323233e-06, |
|
"loss": 0.4746, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.9345506429672241, |
|
"learning_rate": 7.222222222222223e-06, |
|
"loss": 0.4777, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.6382574439048767, |
|
"learning_rate": 7.212121212121212e-06, |
|
"loss": 0.4832, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.9409840703010559, |
|
"learning_rate": 7.202020202020203e-06, |
|
"loss": 0.4659, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 1.0053200721740723, |
|
"learning_rate": 7.191919191919192e-06, |
|
"loss": 0.4041, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.8394672870635986, |
|
"learning_rate": 7.181818181818182e-06, |
|
"loss": 0.4477, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.8685116171836853, |
|
"learning_rate": 7.171717171717172e-06, |
|
"loss": 0.4155, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.8150361776351929, |
|
"learning_rate": 7.161616161616162e-06, |
|
"loss": 0.3901, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.7799510955810547, |
|
"learning_rate": 7.151515151515152e-06, |
|
"loss": 0.4273, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 1.1197009086608887, |
|
"learning_rate": 7.141414141414143e-06, |
|
"loss": 0.447, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.8753387331962585, |
|
"learning_rate": 7.131313131313132e-06, |
|
"loss": 0.3847, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.9633245468139648, |
|
"learning_rate": 7.121212121212122e-06, |
|
"loss": 0.4985, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 1.0391559600830078, |
|
"learning_rate": 7.111111111111112e-06, |
|
"loss": 0.4476, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.9432191252708435, |
|
"learning_rate": 7.101010101010102e-06, |
|
"loss": 0.4669, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 1.078410267829895, |
|
"learning_rate": 7.0909090909090916e-06, |
|
"loss": 0.401, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.9405683875083923, |
|
"learning_rate": 7.080808080808082e-06, |
|
"loss": 0.4744, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 1.4189893007278442, |
|
"learning_rate": 7.070707070707071e-06, |
|
"loss": 0.4334, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.9169256091117859, |
|
"learning_rate": 7.060606060606061e-06, |
|
"loss": 0.4234, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.8117393851280212, |
|
"learning_rate": 7.050505050505051e-06, |
|
"loss": 0.4676, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.967890739440918, |
|
"learning_rate": 7.0404040404040404e-06, |
|
"loss": 0.4497, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 1.0548313856124878, |
|
"learning_rate": 7.030303030303031e-06, |
|
"loss": 0.4429, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 1.071303367614746, |
|
"learning_rate": 7.02020202020202e-06, |
|
"loss": 0.4251, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 1.1772490739822388, |
|
"learning_rate": 7.0101010101010105e-06, |
|
"loss": 0.4897, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 1.3903322219848633, |
|
"learning_rate": 7e-06, |
|
"loss": 0.4953, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.8463280200958252, |
|
"learning_rate": 6.98989898989899e-06, |
|
"loss": 0.3923, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 1.146888256072998, |
|
"learning_rate": 6.979797979797981e-06, |
|
"loss": 0.4723, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 1.0877312421798706, |
|
"learning_rate": 6.969696969696971e-06, |
|
"loss": 0.4822, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.7444977164268494, |
|
"learning_rate": 6.95959595959596e-06, |
|
"loss": 0.4591, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.7259607911109924, |
|
"learning_rate": 6.9494949494949505e-06, |
|
"loss": 0.4564, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.9535048604011536, |
|
"learning_rate": 6.93939393939394e-06, |
|
"loss": 0.4402, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 0.9428088068962097, |
|
"learning_rate": 6.92929292929293e-06, |
|
"loss": 0.4039, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 0.9858347177505493, |
|
"learning_rate": 6.91919191919192e-06, |
|
"loss": 0.4334, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 1.0837122201919556, |
|
"learning_rate": 6.90909090909091e-06, |
|
"loss": 0.3957, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 1.0605705976486206, |
|
"learning_rate": 6.898989898989899e-06, |
|
"loss": 0.4195, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 1.0856525897979736, |
|
"learning_rate": 6.88888888888889e-06, |
|
"loss": 0.4286, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 1.02509605884552, |
|
"learning_rate": 6.878787878787879e-06, |
|
"loss": 0.4413, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 0.8847616314888, |
|
"learning_rate": 6.868686868686869e-06, |
|
"loss": 0.4117, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 1.1464060544967651, |
|
"learning_rate": 6.858585858585859e-06, |
|
"loss": 0.44, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 0.9589990973472595, |
|
"learning_rate": 6.848484848484849e-06, |
|
"loss": 0.4896, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 1.080541968345642, |
|
"learning_rate": 6.8383838383838386e-06, |
|
"loss": 0.4203, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 1.3150702714920044, |
|
"learning_rate": 6.828282828282828e-06, |
|
"loss": 0.4213, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 1.0854105949401855, |
|
"learning_rate": 6.818181818181818e-06, |
|
"loss": 0.4538, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.7211188673973083, |
|
"learning_rate": 6.808080808080809e-06, |
|
"loss": 0.4332, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 1.0323004722595215, |
|
"learning_rate": 6.797979797979799e-06, |
|
"loss": 0.465, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.9176574349403381, |
|
"learning_rate": 6.787878787878789e-06, |
|
"loss": 0.4898, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 1.2033518552780151, |
|
"learning_rate": 6.777777777777779e-06, |
|
"loss": 0.4652, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.6069207191467285, |
|
"learning_rate": 6.767676767676769e-06, |
|
"loss": 0.4663, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 0.9627435803413391, |
|
"learning_rate": 6.757575757575758e-06, |
|
"loss": 0.4992, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 0.7333098649978638, |
|
"learning_rate": 6.747474747474749e-06, |
|
"loss": 0.4876, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 0.6735036969184875, |
|
"learning_rate": 6.737373737373738e-06, |
|
"loss": 0.4797, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.8416649699211121, |
|
"learning_rate": 6.7272727272727275e-06, |
|
"loss": 0.3889, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 1.227149486541748, |
|
"learning_rate": 6.717171717171718e-06, |
|
"loss": 0.4677, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 1.5126155614852905, |
|
"learning_rate": 6.707070707070707e-06, |
|
"loss": 0.4444, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 1.1622529029846191, |
|
"learning_rate": 6.6969696969696975e-06, |
|
"loss": 0.4372, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 0.975615918636322, |
|
"learning_rate": 6.686868686868687e-06, |
|
"loss": 0.4037, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.8992089033126831, |
|
"learning_rate": 6.676767676767677e-06, |
|
"loss": 0.3785, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 1.4439970254898071, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.5169, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.9601142406463623, |
|
"learning_rate": 6.656565656565657e-06, |
|
"loss": 0.4396, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 1.0457966327667236, |
|
"learning_rate": 6.646464646464646e-06, |
|
"loss": 0.4065, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 1.0014325380325317, |
|
"learning_rate": 6.6363636363636375e-06, |
|
"loss": 0.4616, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 1.3321492671966553, |
|
"learning_rate": 6.626262626262627e-06, |
|
"loss": 0.4164, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.9500308036804199, |
|
"learning_rate": 6.616161616161617e-06, |
|
"loss": 0.4156, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 1.1907135248184204, |
|
"learning_rate": 6.606060606060607e-06, |
|
"loss": 0.4353, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 1.7826789617538452, |
|
"learning_rate": 6.595959595959597e-06, |
|
"loss": 0.4764, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 1.0160069465637207, |
|
"learning_rate": 6.585858585858586e-06, |
|
"loss": 0.4594, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 1.0795533657073975, |
|
"learning_rate": 6.575757575757577e-06, |
|
"loss": 0.4557, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 1.2484716176986694, |
|
"learning_rate": 6.565656565656566e-06, |
|
"loss": 0.5057, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 1.6144826412200928, |
|
"learning_rate": 6.555555555555556e-06, |
|
"loss": 0.4554, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 1.3575657606124878, |
|
"learning_rate": 6.545454545454546e-06, |
|
"loss": 0.3373, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 0.9021044373512268, |
|
"learning_rate": 6.535353535353536e-06, |
|
"loss": 0.4765, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 1.0576077699661255, |
|
"learning_rate": 6.525252525252526e-06, |
|
"loss": 0.4344, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 1.0687928199768066, |
|
"learning_rate": 6.515151515151516e-06, |
|
"loss": 0.5022, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.9124734401702881, |
|
"learning_rate": 6.505050505050505e-06, |
|
"loss": 0.4133, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.8715507984161377, |
|
"learning_rate": 6.494949494949495e-06, |
|
"loss": 0.454, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 1.0889540910720825, |
|
"learning_rate": 6.484848484848485e-06, |
|
"loss": 0.443, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 0.9323639273643494, |
|
"learning_rate": 6.4747474747474745e-06, |
|
"loss": 0.4983, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 0.6946113109588623, |
|
"learning_rate": 6.464646464646466e-06, |
|
"loss": 0.4364, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 1.1879271268844604, |
|
"learning_rate": 6.454545454545456e-06, |
|
"loss": 0.4816, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 1.337172031402588, |
|
"learning_rate": 6.444444444444445e-06, |
|
"loss": 0.4963, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 0.9384005069732666, |
|
"learning_rate": 6.434343434343436e-06, |
|
"loss": 0.5185, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 1.2666473388671875, |
|
"learning_rate": 6.424242424242425e-06, |
|
"loss": 0.389, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 1.037668228149414, |
|
"learning_rate": 6.4141414141414145e-06, |
|
"loss": 0.4529, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 1.4652140140533447, |
|
"learning_rate": 6.404040404040405e-06, |
|
"loss": 0.3915, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 1.1823006868362427, |
|
"learning_rate": 6.393939393939394e-06, |
|
"loss": 0.5123, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 1.0706424713134766, |
|
"learning_rate": 6.3838383838383845e-06, |
|
"loss": 0.4989, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.9993262887001038, |
|
"learning_rate": 6.373737373737374e-06, |
|
"loss": 0.4268, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 0.9947906732559204, |
|
"learning_rate": 6.363636363636364e-06, |
|
"loss": 0.4246, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 0.8563311696052551, |
|
"learning_rate": 6.353535353535354e-06, |
|
"loss": 0.4439, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 1.2519429922103882, |
|
"learning_rate": 6.343434343434344e-06, |
|
"loss": 0.3973, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 1.2925148010253906, |
|
"learning_rate": 6.333333333333333e-06, |
|
"loss": 0.4179, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 1.1731756925582886, |
|
"learning_rate": 6.323232323232324e-06, |
|
"loss": 0.4748, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 1.030536413192749, |
|
"learning_rate": 6.313131313131313e-06, |
|
"loss": 0.4131, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 1.1279093027114868, |
|
"learning_rate": 6.303030303030303e-06, |
|
"loss": 0.4143, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 1.1602801084518433, |
|
"learning_rate": 6.292929292929294e-06, |
|
"loss": 0.4718, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 1.5314010381698608, |
|
"learning_rate": 6.282828282828284e-06, |
|
"loss": 0.5463, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 1.6124476194381714, |
|
"learning_rate": 6.2727272727272734e-06, |
|
"loss": 0.4857, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 1.185355305671692, |
|
"learning_rate": 6.262626262626264e-06, |
|
"loss": 0.4167, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 1.1900933980941772, |
|
"learning_rate": 6.252525252525253e-06, |
|
"loss": 0.4658, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 1.2930880784988403, |
|
"learning_rate": 6.2424242424242434e-06, |
|
"loss": 0.4912, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 1.1819838285446167, |
|
"learning_rate": 6.232323232323233e-06, |
|
"loss": 0.4129, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 1.315292477607727, |
|
"learning_rate": 6.222222222222223e-06, |
|
"loss": 0.477, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 1.007524013519287, |
|
"learning_rate": 6.212121212121213e-06, |
|
"loss": 0.414, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 1.1948471069335938, |
|
"learning_rate": 6.202020202020203e-06, |
|
"loss": 0.4576, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 1.3339303731918335, |
|
"learning_rate": 6.191919191919192e-06, |
|
"loss": 0.5547, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 1.2568715810775757, |
|
"learning_rate": 6.181818181818182e-06, |
|
"loss": 0.3941, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 1.2084232568740845, |
|
"learning_rate": 6.171717171717172e-06, |
|
"loss": 0.462, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 1.0328818559646606, |
|
"learning_rate": 6.1616161616161615e-06, |
|
"loss": 0.4484, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 1.9673748016357422, |
|
"learning_rate": 6.151515151515152e-06, |
|
"loss": 0.4543, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 1.4105424880981445, |
|
"learning_rate": 6.141414141414141e-06, |
|
"loss": 0.4964, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 0.7082942128181458, |
|
"learning_rate": 6.1313131313131315e-06, |
|
"loss": 0.5297, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 1.2604327201843262, |
|
"learning_rate": 6.121212121212121e-06, |
|
"loss": 0.4207, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 0.999332070350647, |
|
"learning_rate": 6.111111111111112e-06, |
|
"loss": 0.4791, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 1.139615535736084, |
|
"learning_rate": 6.1010101010101015e-06, |
|
"loss": 0.4281, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 1.1164727210998535, |
|
"learning_rate": 6.090909090909092e-06, |
|
"loss": 0.3807, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 1.0829392671585083, |
|
"learning_rate": 6.080808080808081e-06, |
|
"loss": 0.4996, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 0.8551573753356934, |
|
"learning_rate": 6.0707070707070715e-06, |
|
"loss": 0.4759, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 1.224134087562561, |
|
"learning_rate": 6.060606060606061e-06, |
|
"loss": 0.4244, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"eval_loss": 0.5992664694786072, |
|
"eval_runtime": 334.8169, |
|
"eval_samples_per_second": 2.987, |
|
"eval_steps_per_second": 2.987, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 1.1042165756225586, |
|
"learning_rate": 6.050505050505051e-06, |
|
"loss": 0.4815, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 1.087483525276184, |
|
"learning_rate": 6.040404040404041e-06, |
|
"loss": 0.4918, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 1.0040034055709839, |
|
"learning_rate": 6.030303030303031e-06, |
|
"loss": 0.4666, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 1.2772231101989746, |
|
"learning_rate": 6.0202020202020204e-06, |
|
"loss": 0.4847, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 1.217619776725769, |
|
"learning_rate": 6.010101010101011e-06, |
|
"loss": 0.4138, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 1.109666347503662, |
|
"learning_rate": 6e-06, |
|
"loss": 0.4799, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 1.2436999082565308, |
|
"learning_rate": 5.9898989898989904e-06, |
|
"loss": 0.4218, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 1.252548098564148, |
|
"learning_rate": 5.97979797979798e-06, |
|
"loss": 0.4585, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 1.2544574737548828, |
|
"learning_rate": 5.96969696969697e-06, |
|
"loss": 0.4633, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 1.144337773323059, |
|
"learning_rate": 5.95959595959596e-06, |
|
"loss": 0.4408, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 1.067790150642395, |
|
"learning_rate": 5.949494949494949e-06, |
|
"loss": 0.4349, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.8417388200759888, |
|
"learning_rate": 5.93939393939394e-06, |
|
"loss": 0.4859, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 1.5285634994506836, |
|
"learning_rate": 5.9292929292929305e-06, |
|
"loss": 0.4556, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 1.1098241806030273, |
|
"learning_rate": 5.91919191919192e-06, |
|
"loss": 0.5067, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 0.8364139795303345, |
|
"learning_rate": 5.90909090909091e-06, |
|
"loss": 0.4781, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 0.8289257287979126, |
|
"learning_rate": 5.8989898989899e-06, |
|
"loss": 0.3966, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.9829197525978088, |
|
"learning_rate": 5.88888888888889e-06, |
|
"loss": 0.4382, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.7760851383209229, |
|
"learning_rate": 5.878787878787879e-06, |
|
"loss": 0.3977, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 1.0614337921142578, |
|
"learning_rate": 5.868686868686869e-06, |
|
"loss": 0.4382, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 1.098379135131836, |
|
"learning_rate": 5.858585858585859e-06, |
|
"loss": 0.3688, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 1.2058717012405396, |
|
"learning_rate": 5.8484848484848485e-06, |
|
"loss": 0.4048, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 0.9828083515167236, |
|
"learning_rate": 5.838383838383839e-06, |
|
"loss": 0.4997, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 1.327776551246643, |
|
"learning_rate": 5.828282828282828e-06, |
|
"loss": 0.4621, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 1.2171368598937988, |
|
"learning_rate": 5.8181818181818185e-06, |
|
"loss": 0.4177, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 1.4939155578613281, |
|
"learning_rate": 5.808080808080808e-06, |
|
"loss": 0.4821, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 1.523369550704956, |
|
"learning_rate": 5.797979797979798e-06, |
|
"loss": 0.4116, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 1.3256771564483643, |
|
"learning_rate": 5.787878787878788e-06, |
|
"loss": 0.4948, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 1.5732948780059814, |
|
"learning_rate": 5.777777777777778e-06, |
|
"loss": 0.5275, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 1.1372538805007935, |
|
"learning_rate": 5.767676767676768e-06, |
|
"loss": 0.463, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 0.9834716320037842, |
|
"learning_rate": 5.7575757575757586e-06, |
|
"loss": 0.4383, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 1.443036437034607, |
|
"learning_rate": 5.747474747474748e-06, |
|
"loss": 0.4781, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 1.2556253671646118, |
|
"learning_rate": 5.737373737373738e-06, |
|
"loss": 0.4159, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 1.272565245628357, |
|
"learning_rate": 5.727272727272728e-06, |
|
"loss": 0.433, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 1.078231930732727, |
|
"learning_rate": 5.717171717171718e-06, |
|
"loss": 0.4644, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 1.1723682880401611, |
|
"learning_rate": 5.7070707070707075e-06, |
|
"loss": 0.4555, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 1.2048708200454712, |
|
"learning_rate": 5.696969696969698e-06, |
|
"loss": 0.4618, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 0.8913971781730652, |
|
"learning_rate": 5.686868686868687e-06, |
|
"loss": 0.4015, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 1.1153560876846313, |
|
"learning_rate": 5.6767676767676775e-06, |
|
"loss": 0.5233, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 1.055448293685913, |
|
"learning_rate": 5.666666666666667e-06, |
|
"loss": 0.398, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 1.2389256954193115, |
|
"learning_rate": 5.656565656565657e-06, |
|
"loss": 0.453, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 0.9692911505699158, |
|
"learning_rate": 5.646464646464647e-06, |
|
"loss": 0.3773, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 0.9325522780418396, |
|
"learning_rate": 5.636363636363636e-06, |
|
"loss": 0.4471, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 1.0931851863861084, |
|
"learning_rate": 5.626262626262626e-06, |
|
"loss": 0.452, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 0.8405607342720032, |
|
"learning_rate": 5.616161616161616e-06, |
|
"loss": 0.4743, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 1.1500225067138672, |
|
"learning_rate": 5.606060606060606e-06, |
|
"loss": 0.4135, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 1.0774083137512207, |
|
"learning_rate": 5.595959595959597e-06, |
|
"loss": 0.3775, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 1.1228448152542114, |
|
"learning_rate": 5.585858585858587e-06, |
|
"loss": 0.5094, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 0.7777533531188965, |
|
"learning_rate": 5.575757575757577e-06, |
|
"loss": 0.4158, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 1.1433957815170288, |
|
"learning_rate": 5.565656565656566e-06, |
|
"loss": 0.4466, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 1.3501386642456055, |
|
"learning_rate": 5.555555555555557e-06, |
|
"loss": 0.5255, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 1.4286872148513794, |
|
"learning_rate": 5.545454545454546e-06, |
|
"loss": 0.5021, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 0.9643106460571289, |
|
"learning_rate": 5.5353535353535355e-06, |
|
"loss": 0.4004, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 0.8871945142745972, |
|
"learning_rate": 5.525252525252526e-06, |
|
"loss": 0.3657, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 1.7113863229751587, |
|
"learning_rate": 5.515151515151515e-06, |
|
"loss": 0.374, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 1.3388639688491821, |
|
"learning_rate": 5.5050505050505056e-06, |
|
"loss": 0.4632, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 1.1603946685791016, |
|
"learning_rate": 5.494949494949495e-06, |
|
"loss": 0.4569, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 1.250316858291626, |
|
"learning_rate": 5.484848484848485e-06, |
|
"loss": 0.3804, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 1.1444791555404663, |
|
"learning_rate": 5.474747474747475e-06, |
|
"loss": 0.4521, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 0.804335355758667, |
|
"learning_rate": 5.464646464646465e-06, |
|
"loss": 0.3925, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 1.2074679136276245, |
|
"learning_rate": 5.4545454545454545e-06, |
|
"loss": 0.3929, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 1.3282480239868164, |
|
"learning_rate": 5.444444444444445e-06, |
|
"loss": 0.4577, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 1.075721263885498, |
|
"learning_rate": 5.434343434343434e-06, |
|
"loss": 0.4422, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 1.4774171113967896, |
|
"learning_rate": 5.424242424242425e-06, |
|
"loss": 0.4462, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 0.7713523507118225, |
|
"learning_rate": 5.414141414141415e-06, |
|
"loss": 0.4863, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 0.9707368016242981, |
|
"learning_rate": 5.404040404040405e-06, |
|
"loss": 0.4625, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 1.6415660381317139, |
|
"learning_rate": 5.3939393939393945e-06, |
|
"loss": 0.5686, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 0.9006029367446899, |
|
"learning_rate": 5.383838383838385e-06, |
|
"loss": 0.4107, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 1.580544114112854, |
|
"learning_rate": 5.373737373737374e-06, |
|
"loss": 0.4864, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 2.001404285430908, |
|
"learning_rate": 5.3636363636363645e-06, |
|
"loss": 0.4311, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 0.8837793469429016, |
|
"learning_rate": 5.353535353535354e-06, |
|
"loss": 0.5013, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 1.3012306690216064, |
|
"learning_rate": 5.343434343434344e-06, |
|
"loss": 0.4551, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 0.9690237641334534, |
|
"learning_rate": 5.333333333333334e-06, |
|
"loss": 0.4975, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 1.2548136711120605, |
|
"learning_rate": 5.323232323232324e-06, |
|
"loss": 0.3947, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 1.3401527404785156, |
|
"learning_rate": 5.313131313131313e-06, |
|
"loss": 0.4532, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 1.481223225593567, |
|
"learning_rate": 5.303030303030303e-06, |
|
"loss": 0.4406, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 1.0860910415649414, |
|
"learning_rate": 5.292929292929293e-06, |
|
"loss": 0.4261, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 1.8275758028030396, |
|
"learning_rate": 5.2828282828282825e-06, |
|
"loss": 0.4282, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 0.8796372413635254, |
|
"learning_rate": 5.272727272727273e-06, |
|
"loss": 0.4372, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 1.7672245502471924, |
|
"learning_rate": 5.262626262626262e-06, |
|
"loss": 0.4625, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 1.187982439994812, |
|
"learning_rate": 5.252525252525253e-06, |
|
"loss": 0.4431, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 1.0611240863800049, |
|
"learning_rate": 5.242424242424244e-06, |
|
"loss": 0.4097, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 1.259895920753479, |
|
"learning_rate": 5.232323232323233e-06, |
|
"loss": 0.42, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 1.0518171787261963, |
|
"learning_rate": 5.2222222222222226e-06, |
|
"loss": 0.4103, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 1.1404093503952026, |
|
"learning_rate": 5.212121212121213e-06, |
|
"loss": 0.4167, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 1.3545587062835693, |
|
"learning_rate": 5.202020202020202e-06, |
|
"loss": 0.4231, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 1.3752548694610596, |
|
"learning_rate": 5.191919191919193e-06, |
|
"loss": 0.4385, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 1.0795814990997314, |
|
"learning_rate": 5.181818181818182e-06, |
|
"loss": 0.5562, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 1.176770806312561, |
|
"learning_rate": 5.171717171717172e-06, |
|
"loss": 0.3841, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 1.0362882614135742, |
|
"learning_rate": 5.161616161616162e-06, |
|
"loss": 0.412, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 0.854006826877594, |
|
"learning_rate": 5.151515151515152e-06, |
|
"loss": 0.4833, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 1.257077693939209, |
|
"learning_rate": 5.1414141414141415e-06, |
|
"loss": 0.4173, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 1.0905293226242065, |
|
"learning_rate": 5.131313131313132e-06, |
|
"loss": 0.4145, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 1.1781564950942993, |
|
"learning_rate": 5.121212121212121e-06, |
|
"loss": 0.4619, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 0.746283233165741, |
|
"learning_rate": 5.1111111111111115e-06, |
|
"loss": 0.4203, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 1.3257133960723877, |
|
"learning_rate": 5.101010101010101e-06, |
|
"loss": 0.442, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 0.8805459141731262, |
|
"learning_rate": 5.090909090909091e-06, |
|
"loss": 0.4587, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 1.0892632007598877, |
|
"learning_rate": 5.0808080808080815e-06, |
|
"loss": 0.4005, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 1.2787294387817383, |
|
"learning_rate": 5.070707070707072e-06, |
|
"loss": 0.4589, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 0.8316894173622131, |
|
"learning_rate": 5.060606060606061e-06, |
|
"loss": 0.4888, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 1.487030029296875, |
|
"learning_rate": 5.0505050505050515e-06, |
|
"loss": 0.3977, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.9546489119529724, |
|
"learning_rate": 5.040404040404041e-06, |
|
"loss": 0.5509, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 1.015142560005188, |
|
"learning_rate": 5.030303030303031e-06, |
|
"loss": 0.3927, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 1.2369190454483032, |
|
"learning_rate": 5.020202020202021e-06, |
|
"loss": 0.4477, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 1.1981439590454102, |
|
"learning_rate": 5.010101010101011e-06, |
|
"loss": 0.4566, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 1.507062554359436, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3958, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 1.2192580699920654, |
|
"learning_rate": 4.98989898989899e-06, |
|
"loss": 0.4423, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 1.2157553434371948, |
|
"learning_rate": 4.97979797979798e-06, |
|
"loss": 0.4268, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 1.0647870302200317, |
|
"learning_rate": 4.9696969696969696e-06, |
|
"loss": 0.4907, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 1.071527123451233, |
|
"learning_rate": 4.95959595959596e-06, |
|
"loss": 0.4471, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 1.6718330383300781, |
|
"learning_rate": 4.94949494949495e-06, |
|
"loss": 0.4899, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 1.4382431507110596, |
|
"learning_rate": 4.93939393939394e-06, |
|
"loss": 0.4316, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 1.5574830770492554, |
|
"learning_rate": 4.92929292929293e-06, |
|
"loss": 0.4293, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 1.417203664779663, |
|
"learning_rate": 4.919191919191919e-06, |
|
"loss": 0.3967, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 0.9984129667282104, |
|
"learning_rate": 4.90909090909091e-06, |
|
"loss": 0.4927, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 1.204757809638977, |
|
"learning_rate": 4.898989898989899e-06, |
|
"loss": 0.5291, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 0.959578812122345, |
|
"learning_rate": 4.888888888888889e-06, |
|
"loss": 0.4275, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 1.5679115056991577, |
|
"learning_rate": 4.878787878787879e-06, |
|
"loss": 0.4559, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 0.9421458840370178, |
|
"learning_rate": 4.868686868686869e-06, |
|
"loss": 0.4496, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 1.3694195747375488, |
|
"learning_rate": 4.858585858585859e-06, |
|
"loss": 0.4168, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 0.9521112442016602, |
|
"learning_rate": 4.848484848484849e-06, |
|
"loss": 0.469, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 1.3057090044021606, |
|
"learning_rate": 4.838383838383839e-06, |
|
"loss": 0.5104, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 1.2285308837890625, |
|
"learning_rate": 4.8282828282828285e-06, |
|
"loss": 0.499, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 1.2023276090621948, |
|
"learning_rate": 4.818181818181819e-06, |
|
"loss": 0.4013, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 1.4066063165664673, |
|
"learning_rate": 4.808080808080808e-06, |
|
"loss": 0.4593, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 1.085758090019226, |
|
"learning_rate": 4.7979797979797985e-06, |
|
"loss": 0.3793, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 1.192099928855896, |
|
"learning_rate": 4.787878787878788e-06, |
|
"loss": 0.4223, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 1.4430789947509766, |
|
"learning_rate": 4.777777777777778e-06, |
|
"loss": 0.407, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 1.2750104665756226, |
|
"learning_rate": 4.7676767676767685e-06, |
|
"loss": 0.435, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 0.9866845607757568, |
|
"learning_rate": 4.757575757575758e-06, |
|
"loss": 0.4411, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 1.2444196939468384, |
|
"learning_rate": 4.747474747474748e-06, |
|
"loss": 0.4571, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 1.0778636932373047, |
|
"learning_rate": 4.737373737373738e-06, |
|
"loss": 0.3838, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 1.4677258729934692, |
|
"learning_rate": 4.727272727272728e-06, |
|
"loss": 0.4906, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 1.2698739767074585, |
|
"learning_rate": 4.717171717171717e-06, |
|
"loss": 0.4368, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 1.499267578125, |
|
"learning_rate": 4.707070707070707e-06, |
|
"loss": 0.4075, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 1.106027364730835, |
|
"learning_rate": 4.696969696969698e-06, |
|
"loss": 0.482, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.8394500017166138, |
|
"learning_rate": 4.6868686868686874e-06, |
|
"loss": 0.4472, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 1.4100090265274048, |
|
"learning_rate": 4.676767676767677e-06, |
|
"loss": 0.4038, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 1.072623610496521, |
|
"learning_rate": 4.666666666666667e-06, |
|
"loss": 0.4491, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 1.120101809501648, |
|
"learning_rate": 4.656565656565657e-06, |
|
"loss": 0.4316, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 1.4536925554275513, |
|
"learning_rate": 4.646464646464647e-06, |
|
"loss": 0.425, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 1.1416407823562622, |
|
"learning_rate": 4.636363636363636e-06, |
|
"loss": 0.4577, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 0.91941237449646, |
|
"learning_rate": 4.626262626262627e-06, |
|
"loss": 0.4237, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 1.3551466464996338, |
|
"learning_rate": 4.616161616161616e-06, |
|
"loss": 0.4854, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 1.127760648727417, |
|
"learning_rate": 4.606060606060606e-06, |
|
"loss": 0.4674, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 1.0127209424972534, |
|
"learning_rate": 4.595959595959597e-06, |
|
"loss": 0.347, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 1.1834722757339478, |
|
"learning_rate": 4.585858585858586e-06, |
|
"loss": 0.4954, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 1.0718438625335693, |
|
"learning_rate": 4.575757575757576e-06, |
|
"loss": 0.4073, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 1.146881341934204, |
|
"learning_rate": 4.565656565656566e-06, |
|
"loss": 0.4052, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 1.426000714302063, |
|
"learning_rate": 4.555555555555556e-06, |
|
"loss": 0.4552, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 1.4062806367874146, |
|
"learning_rate": 4.5454545454545455e-06, |
|
"loss": 0.3671, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 1.276281714439392, |
|
"learning_rate": 4.535353535353536e-06, |
|
"loss": 0.3792, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 1.0640497207641602, |
|
"learning_rate": 4.525252525252526e-06, |
|
"loss": 0.5246, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 1.0342713594436646, |
|
"learning_rate": 4.5151515151515155e-06, |
|
"loss": 0.5189, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 2.512465000152588, |
|
"learning_rate": 4.505050505050506e-06, |
|
"loss": 0.4245, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 1.3084385395050049, |
|
"learning_rate": 4.494949494949495e-06, |
|
"loss": 0.462, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 1.2058395147323608, |
|
"learning_rate": 4.4848484848484855e-06, |
|
"loss": 0.466, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 1.2008490562438965, |
|
"learning_rate": 4.474747474747475e-06, |
|
"loss": 0.4181, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.923298716545105, |
|
"learning_rate": 4.464646464646465e-06, |
|
"loss": 0.408, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 1.1701871156692505, |
|
"learning_rate": 4.454545454545455e-06, |
|
"loss": 0.4696, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 1.1120387315750122, |
|
"learning_rate": 4.444444444444444e-06, |
|
"loss": 0.432, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 1.445493221282959, |
|
"learning_rate": 4.434343434343435e-06, |
|
"loss": 0.4397, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 0.978252649307251, |
|
"learning_rate": 4.424242424242425e-06, |
|
"loss": 0.4448, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 1.1885161399841309, |
|
"learning_rate": 4.414141414141415e-06, |
|
"loss": 0.3415, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 1.4562844038009644, |
|
"learning_rate": 4.4040404040404044e-06, |
|
"loss": 0.4312, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 1.5203760862350464, |
|
"learning_rate": 4.393939393939394e-06, |
|
"loss": 0.4395, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 1.4762277603149414, |
|
"learning_rate": 4.383838383838384e-06, |
|
"loss": 0.3605, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 2.0301284790039062, |
|
"learning_rate": 4.373737373737374e-06, |
|
"loss": 0.3548, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 1.2977855205535889, |
|
"learning_rate": 4.363636363636364e-06, |
|
"loss": 0.4309, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 1.4677530527114868, |
|
"learning_rate": 4.353535353535353e-06, |
|
"loss": 0.4431, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 1.1909211874008179, |
|
"learning_rate": 4.343434343434344e-06, |
|
"loss": 0.3571, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 1.2588609457015991, |
|
"learning_rate": 4.333333333333334e-06, |
|
"loss": 0.4226, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 1.4346741437911987, |
|
"learning_rate": 4.323232323232323e-06, |
|
"loss": 0.3029, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 1.5790741443634033, |
|
"learning_rate": 4.313131313131314e-06, |
|
"loss": 0.4236, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 0.9452656507492065, |
|
"learning_rate": 4.303030303030303e-06, |
|
"loss": 0.3965, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 1.3782027959823608, |
|
"learning_rate": 4.292929292929293e-06, |
|
"loss": 0.3925, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 1.4818460941314697, |
|
"learning_rate": 4.282828282828283e-06, |
|
"loss": 0.4207, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 1.8508193492889404, |
|
"learning_rate": 4.272727272727273e-06, |
|
"loss": 0.3512, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 0.9073811173439026, |
|
"learning_rate": 4.262626262626263e-06, |
|
"loss": 0.3946, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 1.285936951637268, |
|
"learning_rate": 4.252525252525253e-06, |
|
"loss": 0.4387, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 1.3050516843795776, |
|
"learning_rate": 4.242424242424243e-06, |
|
"loss": 0.5095, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 1.601285696029663, |
|
"learning_rate": 4.2323232323232325e-06, |
|
"loss": 0.3595, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 1.4098488092422485, |
|
"learning_rate": 4.222222222222223e-06, |
|
"loss": 0.3726, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 1.3759149312973022, |
|
"learning_rate": 4.212121212121212e-06, |
|
"loss": 0.3576, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 1.1540992259979248, |
|
"learning_rate": 4.2020202020202026e-06, |
|
"loss": 0.4564, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 1.2275508642196655, |
|
"learning_rate": 4.191919191919192e-06, |
|
"loss": 0.4272, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 2.126412868499756, |
|
"learning_rate": 4.181818181818182e-06, |
|
"loss": 0.403, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 1.4939548969268799, |
|
"learning_rate": 4.1717171717171726e-06, |
|
"loss": 0.4235, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 1.2192233800888062, |
|
"learning_rate": 4.161616161616162e-06, |
|
"loss": 0.4342, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 1.251043438911438, |
|
"learning_rate": 4.151515151515152e-06, |
|
"loss": 0.3835, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 1.4634102582931519, |
|
"learning_rate": 4.141414141414142e-06, |
|
"loss": 0.424, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 1.5133440494537354, |
|
"learning_rate": 4.131313131313132e-06, |
|
"loss": 0.4223, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 0.8985018730163574, |
|
"learning_rate": 4.1212121212121215e-06, |
|
"loss": 0.5013, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 1.4917223453521729, |
|
"learning_rate": 4.111111111111111e-06, |
|
"loss": 0.4636, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 1.6001038551330566, |
|
"learning_rate": 4.101010101010101e-06, |
|
"loss": 0.3814, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 1.5513330698013306, |
|
"learning_rate": 4.0909090909090915e-06, |
|
"loss": 0.4455, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 1.8656607866287231, |
|
"learning_rate": 4.080808080808081e-06, |
|
"loss": 0.4245, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 1.4433702230453491, |
|
"learning_rate": 4.070707070707071e-06, |
|
"loss": 0.4544, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 1.2587636709213257, |
|
"learning_rate": 4.060606060606061e-06, |
|
"loss": 0.4406, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 1.6164227724075317, |
|
"learning_rate": 4.050505050505051e-06, |
|
"loss": 0.3538, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 1.4971836805343628, |
|
"learning_rate": 4.04040404040404e-06, |
|
"loss": 0.4548, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"eval_loss": 0.6277722716331482, |
|
"eval_runtime": 334.0785, |
|
"eval_samples_per_second": 2.993, |
|
"eval_steps_per_second": 2.993, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 1.5129014253616333, |
|
"learning_rate": 4.030303030303031e-06, |
|
"loss": 0.4389, |
|
"step": 6010 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 1.4870412349700928, |
|
"learning_rate": 4.02020202020202e-06, |
|
"loss": 0.3601, |
|
"step": 6020 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 1.3821163177490234, |
|
"learning_rate": 4.01010101010101e-06, |
|
"loss": 0.3857, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 1.152169942855835, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.3656, |
|
"step": 6040 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 1.6697760820388794, |
|
"learning_rate": 3.98989898989899e-06, |
|
"loss": 0.389, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 1.7103252410888672, |
|
"learning_rate": 3.97979797979798e-06, |
|
"loss": 0.3876, |
|
"step": 6060 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 1.7297860383987427, |
|
"learning_rate": 3.96969696969697e-06, |
|
"loss": 0.4479, |
|
"step": 6070 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 1.400660753250122, |
|
"learning_rate": 3.95959595959596e-06, |
|
"loss": 0.3743, |
|
"step": 6080 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 1.217431664466858, |
|
"learning_rate": 3.9494949494949496e-06, |
|
"loss": 0.4568, |
|
"step": 6090 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 1.5356212854385376, |
|
"learning_rate": 3.93939393939394e-06, |
|
"loss": 0.4324, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 1.227851390838623, |
|
"learning_rate": 3.929292929292929e-06, |
|
"loss": 0.3789, |
|
"step": 6110 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 1.499538779258728, |
|
"learning_rate": 3.9191919191919196e-06, |
|
"loss": 0.4569, |
|
"step": 6120 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 1.25396728515625, |
|
"learning_rate": 3.90909090909091e-06, |
|
"loss": 0.3727, |
|
"step": 6130 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 1.3319553136825562, |
|
"learning_rate": 3.898989898989899e-06, |
|
"loss": 0.4564, |
|
"step": 6140 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 2.0902342796325684, |
|
"learning_rate": 3.88888888888889e-06, |
|
"loss": 0.4177, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 1.4777964353561401, |
|
"learning_rate": 3.878787878787879e-06, |
|
"loss": 0.431, |
|
"step": 6160 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 1.1954903602600098, |
|
"learning_rate": 3.868686868686869e-06, |
|
"loss": 0.411, |
|
"step": 6170 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 1.538866400718689, |
|
"learning_rate": 3.858585858585859e-06, |
|
"loss": 0.3584, |
|
"step": 6180 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 1.0525565147399902, |
|
"learning_rate": 3.848484848484848e-06, |
|
"loss": 0.3909, |
|
"step": 6190 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 1.3541011810302734, |
|
"learning_rate": 3.8383838383838385e-06, |
|
"loss": 0.358, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 1.3592562675476074, |
|
"learning_rate": 3.828282828282829e-06, |
|
"loss": 0.3781, |
|
"step": 6210 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 1.8746424913406372, |
|
"learning_rate": 3.818181818181819e-06, |
|
"loss": 0.5072, |
|
"step": 6220 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 1.6377208232879639, |
|
"learning_rate": 3.8080808080808085e-06, |
|
"loss": 0.5005, |
|
"step": 6230 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 1.8624753952026367, |
|
"learning_rate": 3.7979797979797984e-06, |
|
"loss": 0.3831, |
|
"step": 6240 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 1.2943841218948364, |
|
"learning_rate": 3.7878787878787882e-06, |
|
"loss": 0.4214, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 1.8572790622711182, |
|
"learning_rate": 3.777777777777778e-06, |
|
"loss": 0.3869, |
|
"step": 6260 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 1.3550498485565186, |
|
"learning_rate": 3.767676767676768e-06, |
|
"loss": 0.4073, |
|
"step": 6270 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 1.6371854543685913, |
|
"learning_rate": 3.757575757575758e-06, |
|
"loss": 0.4122, |
|
"step": 6280 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 1.6021220684051514, |
|
"learning_rate": 3.747474747474748e-06, |
|
"loss": 0.4068, |
|
"step": 6290 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 1.9373170137405396, |
|
"learning_rate": 3.737373737373738e-06, |
|
"loss": 0.4103, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 1.2740237712860107, |
|
"learning_rate": 3.727272727272728e-06, |
|
"loss": 0.4246, |
|
"step": 6310 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 1.4819579124450684, |
|
"learning_rate": 3.7171717171717177e-06, |
|
"loss": 0.4298, |
|
"step": 6320 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 1.417238712310791, |
|
"learning_rate": 3.7070707070707075e-06, |
|
"loss": 0.3961, |
|
"step": 6330 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 1.6524369716644287, |
|
"learning_rate": 3.6969696969696974e-06, |
|
"loss": 0.3793, |
|
"step": 6340 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 1.2900643348693848, |
|
"learning_rate": 3.686868686868687e-06, |
|
"loss": 0.3608, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 1.5816043615341187, |
|
"learning_rate": 3.6767676767676767e-06, |
|
"loss": 0.3703, |
|
"step": 6360 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 1.6086316108703613, |
|
"learning_rate": 3.6666666666666666e-06, |
|
"loss": 0.3597, |
|
"step": 6370 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 1.3897535800933838, |
|
"learning_rate": 3.6565656565656573e-06, |
|
"loss": 0.4305, |
|
"step": 6380 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 2.2776060104370117, |
|
"learning_rate": 3.6464646464646467e-06, |
|
"loss": 0.3946, |
|
"step": 6390 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 1.1567403078079224, |
|
"learning_rate": 3.6363636363636366e-06, |
|
"loss": 0.3988, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 1.0619862079620361, |
|
"learning_rate": 3.6262626262626264e-06, |
|
"loss": 0.3561, |
|
"step": 6410 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 2.857037305831909, |
|
"learning_rate": 3.6161616161616163e-06, |
|
"loss": 0.4394, |
|
"step": 6420 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 1.4043927192687988, |
|
"learning_rate": 3.606060606060606e-06, |
|
"loss": 0.3804, |
|
"step": 6430 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 1.1000697612762451, |
|
"learning_rate": 3.595959595959596e-06, |
|
"loss": 0.3943, |
|
"step": 6440 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 1.880038857460022, |
|
"learning_rate": 3.585858585858586e-06, |
|
"loss": 0.425, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 1.408372163772583, |
|
"learning_rate": 3.575757575757576e-06, |
|
"loss": 0.5019, |
|
"step": 6460 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 1.4338643550872803, |
|
"learning_rate": 3.565656565656566e-06, |
|
"loss": 0.4515, |
|
"step": 6470 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 2.0675547122955322, |
|
"learning_rate": 3.555555555555556e-06, |
|
"loss": 0.4414, |
|
"step": 6480 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 1.9026405811309814, |
|
"learning_rate": 3.5454545454545458e-06, |
|
"loss": 0.4789, |
|
"step": 6490 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 1.708087682723999, |
|
"learning_rate": 3.5353535353535356e-06, |
|
"loss": 0.3745, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 1.4911117553710938, |
|
"learning_rate": 3.5252525252525255e-06, |
|
"loss": 0.4387, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 2.0985512733459473, |
|
"learning_rate": 3.5151515151515154e-06, |
|
"loss": 0.4337, |
|
"step": 6520 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 1.6386370658874512, |
|
"learning_rate": 3.5050505050505052e-06, |
|
"loss": 0.3987, |
|
"step": 6530 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 1.5496248006820679, |
|
"learning_rate": 3.494949494949495e-06, |
|
"loss": 0.4323, |
|
"step": 6540 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 1.8171871900558472, |
|
"learning_rate": 3.4848484848484854e-06, |
|
"loss": 0.4434, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 1.2652944326400757, |
|
"learning_rate": 3.4747474747474752e-06, |
|
"loss": 0.3703, |
|
"step": 6560 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 1.0056226253509521, |
|
"learning_rate": 3.464646464646465e-06, |
|
"loss": 0.3524, |
|
"step": 6570 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 1.5017718076705933, |
|
"learning_rate": 3.454545454545455e-06, |
|
"loss": 0.3776, |
|
"step": 6580 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 1.4463821649551392, |
|
"learning_rate": 3.444444444444445e-06, |
|
"loss": 0.4926, |
|
"step": 6590 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 1.4280391931533813, |
|
"learning_rate": 3.4343434343434347e-06, |
|
"loss": 0.4056, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 1.3990509510040283, |
|
"learning_rate": 3.4242424242424246e-06, |
|
"loss": 0.3657, |
|
"step": 6610 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 2.033510446548462, |
|
"learning_rate": 3.414141414141414e-06, |
|
"loss": 0.3977, |
|
"step": 6620 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 1.3510665893554688, |
|
"learning_rate": 3.4040404040404047e-06, |
|
"loss": 0.4227, |
|
"step": 6630 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 1.2315497398376465, |
|
"learning_rate": 3.3939393939393946e-06, |
|
"loss": 0.395, |
|
"step": 6640 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 1.7533644437789917, |
|
"learning_rate": 3.3838383838383844e-06, |
|
"loss": 0.4927, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 1.7972692251205444, |
|
"learning_rate": 3.3737373737373743e-06, |
|
"loss": 0.3826, |
|
"step": 6660 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 1.2529045343399048, |
|
"learning_rate": 3.3636363636363637e-06, |
|
"loss": 0.3399, |
|
"step": 6670 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 1.9954489469528198, |
|
"learning_rate": 3.3535353535353536e-06, |
|
"loss": 0.3751, |
|
"step": 6680 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 1.2907588481903076, |
|
"learning_rate": 3.3434343434343435e-06, |
|
"loss": 0.3789, |
|
"step": 6690 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 1.9876916408538818, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 0.41, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 2.368973970413208, |
|
"learning_rate": 3.323232323232323e-06, |
|
"loss": 0.4146, |
|
"step": 6710 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 1.4498718976974487, |
|
"learning_rate": 3.3131313131313135e-06, |
|
"loss": 0.4255, |
|
"step": 6720 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 1.1825134754180908, |
|
"learning_rate": 3.3030303030303033e-06, |
|
"loss": 0.3758, |
|
"step": 6730 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 1.837396502494812, |
|
"learning_rate": 3.292929292929293e-06, |
|
"loss": 0.4129, |
|
"step": 6740 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 1.2423911094665527, |
|
"learning_rate": 3.282828282828283e-06, |
|
"loss": 0.4081, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 2.317641258239746, |
|
"learning_rate": 3.272727272727273e-06, |
|
"loss": 0.4525, |
|
"step": 6760 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 1.5409480333328247, |
|
"learning_rate": 3.262626262626263e-06, |
|
"loss": 0.3919, |
|
"step": 6770 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 1.5040165185928345, |
|
"learning_rate": 3.2525252525252527e-06, |
|
"loss": 0.3644, |
|
"step": 6780 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 1.073905110359192, |
|
"learning_rate": 3.2424242424242425e-06, |
|
"loss": 0.4178, |
|
"step": 6790 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 1.8809980154037476, |
|
"learning_rate": 3.232323232323233e-06, |
|
"loss": 0.4324, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 1.6550686359405518, |
|
"learning_rate": 3.2222222222222227e-06, |
|
"loss": 0.41, |
|
"step": 6810 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 1.2928190231323242, |
|
"learning_rate": 3.2121212121212125e-06, |
|
"loss": 0.4178, |
|
"step": 6820 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 2.924306869506836, |
|
"learning_rate": 3.2020202020202024e-06, |
|
"loss": 0.4388, |
|
"step": 6830 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 1.3699476718902588, |
|
"learning_rate": 3.1919191919191923e-06, |
|
"loss": 0.3703, |
|
"step": 6840 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 1.1735562086105347, |
|
"learning_rate": 3.181818181818182e-06, |
|
"loss": 0.4546, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 2.5108041763305664, |
|
"learning_rate": 3.171717171717172e-06, |
|
"loss": 0.4331, |
|
"step": 6860 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 1.8626171350479126, |
|
"learning_rate": 3.161616161616162e-06, |
|
"loss": 0.4169, |
|
"step": 6870 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 1.476062297821045, |
|
"learning_rate": 3.1515151515151517e-06, |
|
"loss": 0.4041, |
|
"step": 6880 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 1.1613414287567139, |
|
"learning_rate": 3.141414141414142e-06, |
|
"loss": 0.3558, |
|
"step": 6890 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 1.7106380462646484, |
|
"learning_rate": 3.131313131313132e-06, |
|
"loss": 0.4175, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 1.3592215776443481, |
|
"learning_rate": 3.1212121212121217e-06, |
|
"loss": 0.3388, |
|
"step": 6910 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 1.7043094635009766, |
|
"learning_rate": 3.1111111111111116e-06, |
|
"loss": 0.4189, |
|
"step": 6920 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 1.5782170295715332, |
|
"learning_rate": 3.1010101010101014e-06, |
|
"loss": 0.434, |
|
"step": 6930 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 1.4425175189971924, |
|
"learning_rate": 3.090909090909091e-06, |
|
"loss": 0.4013, |
|
"step": 6940 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 1.5864651203155518, |
|
"learning_rate": 3.0808080808080807e-06, |
|
"loss": 0.4702, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 1.678449034690857, |
|
"learning_rate": 3.0707070707070706e-06, |
|
"loss": 0.4722, |
|
"step": 6960 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 1.8242285251617432, |
|
"learning_rate": 3.0606060606060605e-06, |
|
"loss": 0.4264, |
|
"step": 6970 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 2.037673234939575, |
|
"learning_rate": 3.0505050505050508e-06, |
|
"loss": 0.4268, |
|
"step": 6980 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 1.530069351196289, |
|
"learning_rate": 3.0404040404040406e-06, |
|
"loss": 0.4355, |
|
"step": 6990 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 2.4778358936309814, |
|
"learning_rate": 3.0303030303030305e-06, |
|
"loss": 0.4214, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 2.0160224437713623, |
|
"learning_rate": 3.0202020202020203e-06, |
|
"loss": 0.3948, |
|
"step": 7010 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 1.18585205078125, |
|
"learning_rate": 3.0101010101010102e-06, |
|
"loss": 0.4199, |
|
"step": 7020 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 1.516297698020935, |
|
"learning_rate": 3e-06, |
|
"loss": 0.4519, |
|
"step": 7030 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 1.7270208597183228, |
|
"learning_rate": 2.98989898989899e-06, |
|
"loss": 0.4673, |
|
"step": 7040 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 1.7389216423034668, |
|
"learning_rate": 2.97979797979798e-06, |
|
"loss": 0.4164, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 1.3944525718688965, |
|
"learning_rate": 2.96969696969697e-06, |
|
"loss": 0.3907, |
|
"step": 7060 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 1.219854712486267, |
|
"learning_rate": 2.95959595959596e-06, |
|
"loss": 0.3551, |
|
"step": 7070 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 2.0792431831359863, |
|
"learning_rate": 2.94949494949495e-06, |
|
"loss": 0.4021, |
|
"step": 7080 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 1.2674493789672852, |
|
"learning_rate": 2.9393939393939397e-06, |
|
"loss": 0.346, |
|
"step": 7090 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 1.7033464908599854, |
|
"learning_rate": 2.9292929292929295e-06, |
|
"loss": 0.422, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 3.0683019161224365, |
|
"learning_rate": 2.9191919191919194e-06, |
|
"loss": 0.3551, |
|
"step": 7110 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 1.7624000310897827, |
|
"learning_rate": 2.9090909090909093e-06, |
|
"loss": 0.4915, |
|
"step": 7120 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 1.8647630214691162, |
|
"learning_rate": 2.898989898989899e-06, |
|
"loss": 0.3403, |
|
"step": 7130 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 1.8980121612548828, |
|
"learning_rate": 2.888888888888889e-06, |
|
"loss": 0.4502, |
|
"step": 7140 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 1.0902882814407349, |
|
"learning_rate": 2.8787878787878793e-06, |
|
"loss": 0.3302, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 1.4546654224395752, |
|
"learning_rate": 2.868686868686869e-06, |
|
"loss": 0.3765, |
|
"step": 7160 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 1.2355202436447144, |
|
"learning_rate": 2.858585858585859e-06, |
|
"loss": 0.3574, |
|
"step": 7170 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 1.8936539888381958, |
|
"learning_rate": 2.848484848484849e-06, |
|
"loss": 0.4014, |
|
"step": 7180 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 1.3609634637832642, |
|
"learning_rate": 2.8383838383838387e-06, |
|
"loss": 0.4422, |
|
"step": 7190 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 1.4429062604904175, |
|
"learning_rate": 2.8282828282828286e-06, |
|
"loss": 0.3675, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 1.6361467838287354, |
|
"learning_rate": 2.818181818181818e-06, |
|
"loss": 0.3673, |
|
"step": 7210 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 1.495545744895935, |
|
"learning_rate": 2.808080808080808e-06, |
|
"loss": 0.3771, |
|
"step": 7220 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 1.4525377750396729, |
|
"learning_rate": 2.7979797979797986e-06, |
|
"loss": 0.4315, |
|
"step": 7230 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 1.5371636152267456, |
|
"learning_rate": 2.7878787878787885e-06, |
|
"loss": 0.3852, |
|
"step": 7240 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 1.2793852090835571, |
|
"learning_rate": 2.7777777777777783e-06, |
|
"loss": 0.4267, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 1.440574288368225, |
|
"learning_rate": 2.7676767676767678e-06, |
|
"loss": 0.4398, |
|
"step": 7260 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 1.8655972480773926, |
|
"learning_rate": 2.7575757575757576e-06, |
|
"loss": 0.3594, |
|
"step": 7270 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 1.0927013158798218, |
|
"learning_rate": 2.7474747474747475e-06, |
|
"loss": 0.3555, |
|
"step": 7280 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 2.1925394535064697, |
|
"learning_rate": 2.7373737373737374e-06, |
|
"loss": 0.3849, |
|
"step": 7290 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 2.2034037113189697, |
|
"learning_rate": 2.7272727272727272e-06, |
|
"loss": 0.4148, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 1.269071102142334, |
|
"learning_rate": 2.717171717171717e-06, |
|
"loss": 0.3587, |
|
"step": 7310 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 1.5352184772491455, |
|
"learning_rate": 2.7070707070707074e-06, |
|
"loss": 0.3811, |
|
"step": 7320 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 2.0000691413879395, |
|
"learning_rate": 2.6969696969696972e-06, |
|
"loss": 0.4263, |
|
"step": 7330 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 1.831752896308899, |
|
"learning_rate": 2.686868686868687e-06, |
|
"loss": 0.3788, |
|
"step": 7340 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 1.6060395240783691, |
|
"learning_rate": 2.676767676767677e-06, |
|
"loss": 0.4195, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 1.7236618995666504, |
|
"learning_rate": 2.666666666666667e-06, |
|
"loss": 0.3733, |
|
"step": 7360 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 1.2738254070281982, |
|
"learning_rate": 2.6565656565656567e-06, |
|
"loss": 0.3988, |
|
"step": 7370 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 1.5250334739685059, |
|
"learning_rate": 2.6464646464646466e-06, |
|
"loss": 0.4551, |
|
"step": 7380 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 2.97196364402771, |
|
"learning_rate": 2.6363636363636364e-06, |
|
"loss": 0.4457, |
|
"step": 7390 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 1.76936674118042, |
|
"learning_rate": 2.6262626262626267e-06, |
|
"loss": 0.364, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 1.852996587753296, |
|
"learning_rate": 2.6161616161616166e-06, |
|
"loss": 0.3847, |
|
"step": 7410 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 1.663035273551941, |
|
"learning_rate": 2.6060606060606064e-06, |
|
"loss": 0.3657, |
|
"step": 7420 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 1.9585185050964355, |
|
"learning_rate": 2.5959595959595963e-06, |
|
"loss": 0.4104, |
|
"step": 7430 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 1.87462317943573, |
|
"learning_rate": 2.585858585858586e-06, |
|
"loss": 0.3836, |
|
"step": 7440 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 1.6113030910491943, |
|
"learning_rate": 2.575757575757576e-06, |
|
"loss": 0.508, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 1.8767685890197754, |
|
"learning_rate": 2.565656565656566e-06, |
|
"loss": 0.4736, |
|
"step": 7460 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 1.8729183673858643, |
|
"learning_rate": 2.5555555555555557e-06, |
|
"loss": 0.4945, |
|
"step": 7470 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 1.2904634475708008, |
|
"learning_rate": 2.5454545454545456e-06, |
|
"loss": 0.3822, |
|
"step": 7480 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"grad_norm": 1.2536317110061646, |
|
"learning_rate": 2.535353535353536e-06, |
|
"loss": 0.4009, |
|
"step": 7490 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"grad_norm": 1.9122370481491089, |
|
"learning_rate": 2.5252525252525258e-06, |
|
"loss": 0.48, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"grad_norm": 1.3834714889526367, |
|
"learning_rate": 2.5151515151515156e-06, |
|
"loss": 0.448, |
|
"step": 7510 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 1.2706272602081299, |
|
"learning_rate": 2.5050505050505055e-06, |
|
"loss": 0.3774, |
|
"step": 7520 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 1.407159686088562, |
|
"learning_rate": 2.494949494949495e-06, |
|
"loss": 0.4177, |
|
"step": 7530 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 1.6914201974868774, |
|
"learning_rate": 2.4848484848484848e-06, |
|
"loss": 0.4274, |
|
"step": 7540 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"grad_norm": 1.6943836212158203, |
|
"learning_rate": 2.474747474747475e-06, |
|
"loss": 0.3993, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"grad_norm": 2.1789302825927734, |
|
"learning_rate": 2.464646464646465e-06, |
|
"loss": 0.4064, |
|
"step": 7560 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"grad_norm": 1.6597346067428589, |
|
"learning_rate": 2.454545454545455e-06, |
|
"loss": 0.3905, |
|
"step": 7570 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"grad_norm": 1.2498160600662231, |
|
"learning_rate": 2.4444444444444447e-06, |
|
"loss": 0.4245, |
|
"step": 7580 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"grad_norm": 2.0250113010406494, |
|
"learning_rate": 2.4343434343434345e-06, |
|
"loss": 0.3716, |
|
"step": 7590 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 1.5236455202102661, |
|
"learning_rate": 2.4242424242424244e-06, |
|
"loss": 0.4595, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 1.778412103652954, |
|
"learning_rate": 2.4141414141414143e-06, |
|
"loss": 0.385, |
|
"step": 7610 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 1.3730769157409668, |
|
"learning_rate": 2.404040404040404e-06, |
|
"loss": 0.3449, |
|
"step": 7620 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 1.288809061050415, |
|
"learning_rate": 2.393939393939394e-06, |
|
"loss": 0.4099, |
|
"step": 7630 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 1.6752142906188965, |
|
"learning_rate": 2.3838383838383843e-06, |
|
"loss": 0.4574, |
|
"step": 7640 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 1.304540753364563, |
|
"learning_rate": 2.373737373737374e-06, |
|
"loss": 0.4122, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 1.5244289636611938, |
|
"learning_rate": 2.363636363636364e-06, |
|
"loss": 0.4547, |
|
"step": 7660 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 1.4657320976257324, |
|
"learning_rate": 2.3535353535353534e-06, |
|
"loss": 0.3845, |
|
"step": 7670 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 1.4184165000915527, |
|
"learning_rate": 2.3434343434343437e-06, |
|
"loss": 0.3815, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 1.4459203481674194, |
|
"learning_rate": 2.3333333333333336e-06, |
|
"loss": 0.3597, |
|
"step": 7690 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 1.182944893836975, |
|
"learning_rate": 2.3232323232323234e-06, |
|
"loss": 0.3873, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 1.7956231832504272, |
|
"learning_rate": 2.3131313131313133e-06, |
|
"loss": 0.4001, |
|
"step": 7710 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"grad_norm": 1.7619251012802124, |
|
"learning_rate": 2.303030303030303e-06, |
|
"loss": 0.4178, |
|
"step": 7720 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"grad_norm": 1.4194896221160889, |
|
"learning_rate": 2.292929292929293e-06, |
|
"loss": 0.3489, |
|
"step": 7730 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"grad_norm": 1.4569259881973267, |
|
"learning_rate": 2.282828282828283e-06, |
|
"loss": 0.4014, |
|
"step": 7740 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"grad_norm": 1.6338679790496826, |
|
"learning_rate": 2.2727272727272728e-06, |
|
"loss": 0.3425, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"grad_norm": 1.6008449792861938, |
|
"learning_rate": 2.262626262626263e-06, |
|
"loss": 0.4129, |
|
"step": 7760 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 2.567551851272583, |
|
"learning_rate": 2.252525252525253e-06, |
|
"loss": 0.4304, |
|
"step": 7770 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 1.5594419240951538, |
|
"learning_rate": 2.2424242424242428e-06, |
|
"loss": 0.4809, |
|
"step": 7780 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 2.100327253341675, |
|
"learning_rate": 2.2323232323232326e-06, |
|
"loss": 0.3663, |
|
"step": 7790 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 1.9268852472305298, |
|
"learning_rate": 2.222222222222222e-06, |
|
"loss": 0.485, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 1.378103494644165, |
|
"learning_rate": 2.2121212121212124e-06, |
|
"loss": 0.3487, |
|
"step": 7810 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 1.3824481964111328, |
|
"learning_rate": 2.2020202020202022e-06, |
|
"loss": 0.4487, |
|
"step": 7820 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 1.4353455305099487, |
|
"learning_rate": 2.191919191919192e-06, |
|
"loss": 0.3761, |
|
"step": 7830 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 1.4255388975143433, |
|
"learning_rate": 2.181818181818182e-06, |
|
"loss": 0.4219, |
|
"step": 7840 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 1.2923437356948853, |
|
"learning_rate": 2.171717171717172e-06, |
|
"loss": 0.4413, |
|
"step": 7850 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 1.5469424724578857, |
|
"learning_rate": 2.1616161616161617e-06, |
|
"loss": 0.3907, |
|
"step": 7860 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 1.5933393239974976, |
|
"learning_rate": 2.1515151515151515e-06, |
|
"loss": 0.3958, |
|
"step": 7870 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"grad_norm": 1.3640625476837158, |
|
"learning_rate": 2.1414141414141414e-06, |
|
"loss": 0.3836, |
|
"step": 7880 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"grad_norm": 1.3010022640228271, |
|
"learning_rate": 2.1313131313131317e-06, |
|
"loss": 0.4202, |
|
"step": 7890 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"grad_norm": 2.231398344039917, |
|
"learning_rate": 2.1212121212121216e-06, |
|
"loss": 0.4527, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 1.9403585195541382, |
|
"learning_rate": 2.1111111111111114e-06, |
|
"loss": 0.3446, |
|
"step": 7910 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 3.341773748397827, |
|
"learning_rate": 2.1010101010101013e-06, |
|
"loss": 0.3637, |
|
"step": 7920 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 1.5845195055007935, |
|
"learning_rate": 2.090909090909091e-06, |
|
"loss": 0.379, |
|
"step": 7930 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"grad_norm": 1.9772624969482422, |
|
"learning_rate": 2.080808080808081e-06, |
|
"loss": 0.3954, |
|
"step": 7940 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"grad_norm": 1.5883660316467285, |
|
"learning_rate": 2.070707070707071e-06, |
|
"loss": 0.4589, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"grad_norm": 1.7022806406021118, |
|
"learning_rate": 2.0606060606060607e-06, |
|
"loss": 0.3767, |
|
"step": 7960 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"grad_norm": 1.246584177017212, |
|
"learning_rate": 2.0505050505050506e-06, |
|
"loss": 0.4161, |
|
"step": 7970 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"grad_norm": 2.11494517326355, |
|
"learning_rate": 2.0404040404040405e-06, |
|
"loss": 0.3635, |
|
"step": 7980 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 2.0264945030212402, |
|
"learning_rate": 2.0303030303030303e-06, |
|
"loss": 0.3698, |
|
"step": 7990 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 1.9246190786361694, |
|
"learning_rate": 2.02020202020202e-06, |
|
"loss": 0.3972, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"eval_loss": 0.6264104843139648, |
|
"eval_runtime": 335.1608, |
|
"eval_samples_per_second": 2.984, |
|
"eval_steps_per_second": 2.984, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 1.8549860715866089, |
|
"learning_rate": 2.01010101010101e-06, |
|
"loss": 0.4053, |
|
"step": 8010 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 1.6804758310317993, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 0.3761, |
|
"step": 8020 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 2.452481508255005, |
|
"learning_rate": 1.98989898989899e-06, |
|
"loss": 0.4573, |
|
"step": 8030 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 1.8172208070755005, |
|
"learning_rate": 1.97979797979798e-06, |
|
"loss": 0.4571, |
|
"step": 8040 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 1.3481515645980835, |
|
"learning_rate": 1.96969696969697e-06, |
|
"loss": 0.4052, |
|
"step": 8050 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 1.9910420179367065, |
|
"learning_rate": 1.9595959595959598e-06, |
|
"loss": 0.3759, |
|
"step": 8060 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 1.9770097732543945, |
|
"learning_rate": 1.9494949494949496e-06, |
|
"loss": 0.4141, |
|
"step": 8070 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 1.4468657970428467, |
|
"learning_rate": 1.9393939393939395e-06, |
|
"loss": 0.4264, |
|
"step": 8080 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 1.1269161701202393, |
|
"learning_rate": 1.9292929292929294e-06, |
|
"loss": 0.3926, |
|
"step": 8090 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 1.2029356956481934, |
|
"learning_rate": 1.9191919191919192e-06, |
|
"loss": 0.3155, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"grad_norm": 1.3906350135803223, |
|
"learning_rate": 1.9090909090909095e-06, |
|
"loss": 0.3354, |
|
"step": 8110 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"grad_norm": 1.9998856782913208, |
|
"learning_rate": 1.8989898989898992e-06, |
|
"loss": 0.3758, |
|
"step": 8120 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 2.2910966873168945, |
|
"learning_rate": 1.888888888888889e-06, |
|
"loss": 0.3651, |
|
"step": 8130 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 1.4889897108078003, |
|
"learning_rate": 1.878787878787879e-06, |
|
"loss": 0.3672, |
|
"step": 8140 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 1.2390172481536865, |
|
"learning_rate": 1.868686868686869e-06, |
|
"loss": 0.4516, |
|
"step": 8150 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"grad_norm": 1.5891129970550537, |
|
"learning_rate": 1.8585858585858588e-06, |
|
"loss": 0.3899, |
|
"step": 8160 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"grad_norm": 2.1633973121643066, |
|
"learning_rate": 1.8484848484848487e-06, |
|
"loss": 0.4178, |
|
"step": 8170 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"grad_norm": 1.6631094217300415, |
|
"learning_rate": 1.8383838383838384e-06, |
|
"loss": 0.368, |
|
"step": 8180 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"grad_norm": 1.9333834648132324, |
|
"learning_rate": 1.8282828282828286e-06, |
|
"loss": 0.4958, |
|
"step": 8190 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"grad_norm": 1.834139347076416, |
|
"learning_rate": 1.8181818181818183e-06, |
|
"loss": 0.3822, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"grad_norm": 1.6175683736801147, |
|
"learning_rate": 1.8080808080808082e-06, |
|
"loss": 0.4646, |
|
"step": 8210 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 2.004728078842163, |
|
"learning_rate": 1.797979797979798e-06, |
|
"loss": 0.4034, |
|
"step": 8220 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 1.8528070449829102, |
|
"learning_rate": 1.787878787878788e-06, |
|
"loss": 0.3967, |
|
"step": 8230 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 1.8886363506317139, |
|
"learning_rate": 1.777777777777778e-06, |
|
"loss": 0.4329, |
|
"step": 8240 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 1.5515146255493164, |
|
"learning_rate": 1.7676767676767678e-06, |
|
"loss": 0.4003, |
|
"step": 8250 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 1.4062939882278442, |
|
"learning_rate": 1.7575757575757577e-06, |
|
"loss": 0.4563, |
|
"step": 8260 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"grad_norm": 1.3575536012649536, |
|
"learning_rate": 1.7474747474747475e-06, |
|
"loss": 0.4096, |
|
"step": 8270 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"grad_norm": 1.683371663093567, |
|
"learning_rate": 1.7373737373737376e-06, |
|
"loss": 0.4453, |
|
"step": 8280 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"grad_norm": 1.2981189489364624, |
|
"learning_rate": 1.7272727272727275e-06, |
|
"loss": 0.4343, |
|
"step": 8290 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"grad_norm": 1.9953092336654663, |
|
"learning_rate": 1.7171717171717173e-06, |
|
"loss": 0.4054, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"grad_norm": 1.868001103401184, |
|
"learning_rate": 1.707070707070707e-06, |
|
"loss": 0.3699, |
|
"step": 8310 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"grad_norm": 1.6378178596496582, |
|
"learning_rate": 1.6969696969696973e-06, |
|
"loss": 0.4349, |
|
"step": 8320 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"grad_norm": 1.5435936450958252, |
|
"learning_rate": 1.6868686868686871e-06, |
|
"loss": 0.4299, |
|
"step": 8330 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"grad_norm": 1.603030800819397, |
|
"learning_rate": 1.6767676767676768e-06, |
|
"loss": 0.3993, |
|
"step": 8340 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"grad_norm": 1.4103410243988037, |
|
"learning_rate": 1.6666666666666667e-06, |
|
"loss": 0.4429, |
|
"step": 8350 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 2.4723174571990967, |
|
"learning_rate": 1.6565656565656567e-06, |
|
"loss": 0.4893, |
|
"step": 8360 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 1.4646846055984497, |
|
"learning_rate": 1.6464646464646466e-06, |
|
"loss": 0.4203, |
|
"step": 8370 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"grad_norm": 2.3898401260375977, |
|
"learning_rate": 1.6363636363636365e-06, |
|
"loss": 0.4673, |
|
"step": 8380 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"grad_norm": 1.8736991882324219, |
|
"learning_rate": 1.6262626262626263e-06, |
|
"loss": 0.3362, |
|
"step": 8390 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"grad_norm": 2.236264705657959, |
|
"learning_rate": 1.6161616161616164e-06, |
|
"loss": 0.3925, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"grad_norm": 1.7149522304534912, |
|
"learning_rate": 1.6060606060606063e-06, |
|
"loss": 0.3835, |
|
"step": 8410 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"grad_norm": 2.272951602935791, |
|
"learning_rate": 1.5959595959595961e-06, |
|
"loss": 0.3595, |
|
"step": 8420 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"grad_norm": 1.6639195680618286, |
|
"learning_rate": 1.585858585858586e-06, |
|
"loss": 0.3229, |
|
"step": 8430 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"grad_norm": 1.678045630455017, |
|
"learning_rate": 1.5757575757575759e-06, |
|
"loss": 0.3458, |
|
"step": 8440 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"grad_norm": 2.393176555633545, |
|
"learning_rate": 1.565656565656566e-06, |
|
"loss": 0.3679, |
|
"step": 8450 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"grad_norm": 1.6466505527496338, |
|
"learning_rate": 1.5555555555555558e-06, |
|
"loss": 0.3861, |
|
"step": 8460 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"grad_norm": 1.6506351232528687, |
|
"learning_rate": 1.5454545454545454e-06, |
|
"loss": 0.3166, |
|
"step": 8470 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"grad_norm": 1.601408839225769, |
|
"learning_rate": 1.5353535353535353e-06, |
|
"loss": 0.3851, |
|
"step": 8480 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"grad_norm": 1.6463664770126343, |
|
"learning_rate": 1.5252525252525254e-06, |
|
"loss": 0.3991, |
|
"step": 8490 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"grad_norm": 1.8262842893600464, |
|
"learning_rate": 1.5151515151515152e-06, |
|
"loss": 0.3317, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"grad_norm": 1.3132091760635376, |
|
"learning_rate": 1.5050505050505051e-06, |
|
"loss": 0.4197, |
|
"step": 8510 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"grad_norm": 1.895925521850586, |
|
"learning_rate": 1.494949494949495e-06, |
|
"loss": 0.44, |
|
"step": 8520 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"grad_norm": 1.3110499382019043, |
|
"learning_rate": 1.484848484848485e-06, |
|
"loss": 0.3834, |
|
"step": 8530 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"grad_norm": 1.5614486932754517, |
|
"learning_rate": 1.474747474747475e-06, |
|
"loss": 0.4561, |
|
"step": 8540 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"grad_norm": 1.6834906339645386, |
|
"learning_rate": 1.4646464646464648e-06, |
|
"loss": 0.402, |
|
"step": 8550 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"grad_norm": 2.3522043228149414, |
|
"learning_rate": 1.4545454545454546e-06, |
|
"loss": 0.3555, |
|
"step": 8560 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"grad_norm": 1.7741584777832031, |
|
"learning_rate": 1.4444444444444445e-06, |
|
"loss": 0.4577, |
|
"step": 8570 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"grad_norm": 1.9629969596862793, |
|
"learning_rate": 1.4343434343434346e-06, |
|
"loss": 0.3733, |
|
"step": 8580 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"grad_norm": 1.4532711505889893, |
|
"learning_rate": 1.4242424242424244e-06, |
|
"loss": 0.3482, |
|
"step": 8590 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"grad_norm": 1.9763760566711426, |
|
"learning_rate": 1.4141414141414143e-06, |
|
"loss": 0.3519, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"grad_norm": 1.5623893737792969, |
|
"learning_rate": 1.404040404040404e-06, |
|
"loss": 0.3324, |
|
"step": 8610 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"grad_norm": 1.898377776145935, |
|
"learning_rate": 1.3939393939393942e-06, |
|
"loss": 0.3657, |
|
"step": 8620 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"grad_norm": 1.3679566383361816, |
|
"learning_rate": 1.3838383838383839e-06, |
|
"loss": 0.3967, |
|
"step": 8630 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"grad_norm": 1.603039026260376, |
|
"learning_rate": 1.3737373737373738e-06, |
|
"loss": 0.4179, |
|
"step": 8640 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"grad_norm": 1.5435354709625244, |
|
"learning_rate": 1.3636363636363636e-06, |
|
"loss": 0.4348, |
|
"step": 8650 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"grad_norm": 2.4842565059661865, |
|
"learning_rate": 1.3535353535353537e-06, |
|
"loss": 0.3157, |
|
"step": 8660 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"grad_norm": 2.318984031677246, |
|
"learning_rate": 1.3434343434343436e-06, |
|
"loss": 0.3616, |
|
"step": 8670 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"grad_norm": 2.5582473278045654, |
|
"learning_rate": 1.3333333333333334e-06, |
|
"loss": 0.387, |
|
"step": 8680 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"grad_norm": 1.9661940336227417, |
|
"learning_rate": 1.3232323232323233e-06, |
|
"loss": 0.3568, |
|
"step": 8690 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"grad_norm": 1.3907606601715088, |
|
"learning_rate": 1.3131313131313134e-06, |
|
"loss": 0.4107, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"grad_norm": 1.5419985055923462, |
|
"learning_rate": 1.3030303030303032e-06, |
|
"loss": 0.3466, |
|
"step": 8710 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"grad_norm": 2.0082972049713135, |
|
"learning_rate": 1.292929292929293e-06, |
|
"loss": 0.4568, |
|
"step": 8720 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"grad_norm": 1.0879377126693726, |
|
"learning_rate": 1.282828282828283e-06, |
|
"loss": 0.4042, |
|
"step": 8730 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"grad_norm": 1.4870342016220093, |
|
"learning_rate": 1.2727272727272728e-06, |
|
"loss": 0.4116, |
|
"step": 8740 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"grad_norm": 1.3549796342849731, |
|
"learning_rate": 1.2626262626262629e-06, |
|
"loss": 0.3937, |
|
"step": 8750 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"grad_norm": 1.4277349710464478, |
|
"learning_rate": 1.2525252525252527e-06, |
|
"loss": 0.3823, |
|
"step": 8760 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"grad_norm": 1.7359371185302734, |
|
"learning_rate": 1.2424242424242424e-06, |
|
"loss": 0.3203, |
|
"step": 8770 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"grad_norm": 2.7469658851623535, |
|
"learning_rate": 1.2323232323232325e-06, |
|
"loss": 0.4171, |
|
"step": 8780 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"grad_norm": 1.2497518062591553, |
|
"learning_rate": 1.2222222222222223e-06, |
|
"loss": 0.4159, |
|
"step": 8790 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"grad_norm": 1.4180413484573364, |
|
"learning_rate": 1.2121212121212122e-06, |
|
"loss": 0.363, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"grad_norm": 1.3709354400634766, |
|
"learning_rate": 1.202020202020202e-06, |
|
"loss": 0.3005, |
|
"step": 8810 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"grad_norm": 1.4277822971343994, |
|
"learning_rate": 1.1919191919191921e-06, |
|
"loss": 0.3716, |
|
"step": 8820 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"grad_norm": 1.8180736303329468, |
|
"learning_rate": 1.181818181818182e-06, |
|
"loss": 0.3736, |
|
"step": 8830 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"grad_norm": 2.0248830318450928, |
|
"learning_rate": 1.1717171717171719e-06, |
|
"loss": 0.3758, |
|
"step": 8840 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"grad_norm": 2.3396706581115723, |
|
"learning_rate": 1.1616161616161617e-06, |
|
"loss": 0.3683, |
|
"step": 8850 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"grad_norm": 2.6537208557128906, |
|
"learning_rate": 1.1515151515151516e-06, |
|
"loss": 0.4474, |
|
"step": 8860 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"grad_norm": 2.07914662361145, |
|
"learning_rate": 1.1414141414141414e-06, |
|
"loss": 0.4113, |
|
"step": 8870 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"grad_norm": 1.5532647371292114, |
|
"learning_rate": 1.1313131313131315e-06, |
|
"loss": 0.3953, |
|
"step": 8880 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"grad_norm": 2.3757212162017822, |
|
"learning_rate": 1.1212121212121214e-06, |
|
"loss": 0.3571, |
|
"step": 8890 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"grad_norm": 1.8071672916412354, |
|
"learning_rate": 1.111111111111111e-06, |
|
"loss": 0.4147, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 1.7433156967163086, |
|
"learning_rate": 1.1010101010101011e-06, |
|
"loss": 0.3714, |
|
"step": 8910 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 2.17182993888855, |
|
"learning_rate": 1.090909090909091e-06, |
|
"loss": 0.3693, |
|
"step": 8920 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 1.5401519536972046, |
|
"learning_rate": 1.0808080808080808e-06, |
|
"loss": 0.4193, |
|
"step": 8930 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"grad_norm": 1.375801682472229, |
|
"learning_rate": 1.0707070707070707e-06, |
|
"loss": 0.3854, |
|
"step": 8940 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"grad_norm": 1.6598793268203735, |
|
"learning_rate": 1.0606060606060608e-06, |
|
"loss": 0.3471, |
|
"step": 8950 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"grad_norm": 1.7847528457641602, |
|
"learning_rate": 1.0505050505050506e-06, |
|
"loss": 0.3995, |
|
"step": 8960 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"grad_norm": 2.7083232402801514, |
|
"learning_rate": 1.0404040404040405e-06, |
|
"loss": 0.3826, |
|
"step": 8970 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"grad_norm": 1.7476744651794434, |
|
"learning_rate": 1.0303030303030304e-06, |
|
"loss": 0.3486, |
|
"step": 8980 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"grad_norm": 1.5604398250579834, |
|
"learning_rate": 1.0202020202020202e-06, |
|
"loss": 0.3717, |
|
"step": 8990 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"grad_norm": 1.8920303583145142, |
|
"learning_rate": 1.01010101010101e-06, |
|
"loss": 0.3302, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"grad_norm": 2.215111017227173, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 0.38, |
|
"step": 9010 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"grad_norm": 1.8379169702529907, |
|
"learning_rate": 9.8989898989899e-07, |
|
"loss": 0.3557, |
|
"step": 9020 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"grad_norm": 1.6511839628219604, |
|
"learning_rate": 9.797979797979799e-07, |
|
"loss": 0.332, |
|
"step": 9030 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"grad_norm": 2.9243552684783936, |
|
"learning_rate": 9.696969696969698e-07, |
|
"loss": 0.3942, |
|
"step": 9040 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"grad_norm": 1.9199024438858032, |
|
"learning_rate": 9.595959595959596e-07, |
|
"loss": 0.4057, |
|
"step": 9050 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"grad_norm": 3.3495843410491943, |
|
"learning_rate": 9.494949494949496e-07, |
|
"loss": 0.3524, |
|
"step": 9060 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"grad_norm": 2.3352134227752686, |
|
"learning_rate": 9.393939393939395e-07, |
|
"loss": 0.3617, |
|
"step": 9070 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"grad_norm": 1.6895115375518799, |
|
"learning_rate": 9.292929292929294e-07, |
|
"loss": 0.3091, |
|
"step": 9080 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"grad_norm": 2.0261287689208984, |
|
"learning_rate": 9.191919191919192e-07, |
|
"loss": 0.3869, |
|
"step": 9090 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"grad_norm": 2.361117124557495, |
|
"learning_rate": 9.090909090909091e-07, |
|
"loss": 0.3772, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"grad_norm": 2.926316022872925, |
|
"learning_rate": 8.98989898989899e-07, |
|
"loss": 0.3755, |
|
"step": 9110 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"grad_norm": 1.0803959369659424, |
|
"learning_rate": 8.88888888888889e-07, |
|
"loss": 0.4371, |
|
"step": 9120 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"grad_norm": 3.058452606201172, |
|
"learning_rate": 8.787878787878788e-07, |
|
"loss": 0.3267, |
|
"step": 9130 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"grad_norm": 1.4999336004257202, |
|
"learning_rate": 8.686868686868688e-07, |
|
"loss": 0.4196, |
|
"step": 9140 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"grad_norm": 2.2035491466522217, |
|
"learning_rate": 8.585858585858587e-07, |
|
"loss": 0.3322, |
|
"step": 9150 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"grad_norm": 1.7281761169433594, |
|
"learning_rate": 8.484848484848486e-07, |
|
"loss": 0.3657, |
|
"step": 9160 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"grad_norm": 2.016667366027832, |
|
"learning_rate": 8.383838383838384e-07, |
|
"loss": 0.4026, |
|
"step": 9170 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"grad_norm": 1.896865963935852, |
|
"learning_rate": 8.282828282828284e-07, |
|
"loss": 0.3644, |
|
"step": 9180 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"grad_norm": 2.0801470279693604, |
|
"learning_rate": 8.181818181818182e-07, |
|
"loss": 0.3979, |
|
"step": 9190 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"grad_norm": 1.481464147567749, |
|
"learning_rate": 8.080808080808082e-07, |
|
"loss": 0.3229, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"grad_norm": 2.261199951171875, |
|
"learning_rate": 7.979797979797981e-07, |
|
"loss": 0.4248, |
|
"step": 9210 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"grad_norm": 1.8218512535095215, |
|
"learning_rate": 7.878787878787879e-07, |
|
"loss": 0.3618, |
|
"step": 9220 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"grad_norm": 1.943556308746338, |
|
"learning_rate": 7.777777777777779e-07, |
|
"loss": 0.436, |
|
"step": 9230 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"grad_norm": 1.8238531351089478, |
|
"learning_rate": 7.676767676767677e-07, |
|
"loss": 0.3386, |
|
"step": 9240 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"grad_norm": 2.0833752155303955, |
|
"learning_rate": 7.575757575757576e-07, |
|
"loss": 0.3952, |
|
"step": 9250 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"grad_norm": 2.1366446018218994, |
|
"learning_rate": 7.474747474747475e-07, |
|
"loss": 0.3986, |
|
"step": 9260 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"grad_norm": 2.684802770614624, |
|
"learning_rate": 7.373737373737375e-07, |
|
"loss": 0.4074, |
|
"step": 9270 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"grad_norm": 1.9561058282852173, |
|
"learning_rate": 7.272727272727273e-07, |
|
"loss": 0.3966, |
|
"step": 9280 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"grad_norm": 1.5268564224243164, |
|
"learning_rate": 7.171717171717173e-07, |
|
"loss": 0.3095, |
|
"step": 9290 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"grad_norm": 2.303208827972412, |
|
"learning_rate": 7.070707070707071e-07, |
|
"loss": 0.3863, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"grad_norm": 2.7735016345977783, |
|
"learning_rate": 6.969696969696971e-07, |
|
"loss": 0.3552, |
|
"step": 9310 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"grad_norm": 1.4639602899551392, |
|
"learning_rate": 6.868686868686869e-07, |
|
"loss": 0.3841, |
|
"step": 9320 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"grad_norm": 1.7208130359649658, |
|
"learning_rate": 6.767676767676768e-07, |
|
"loss": 0.3682, |
|
"step": 9330 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"grad_norm": 1.7992736101150513, |
|
"learning_rate": 6.666666666666667e-07, |
|
"loss": 0.3792, |
|
"step": 9340 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"grad_norm": 1.563179850578308, |
|
"learning_rate": 6.565656565656567e-07, |
|
"loss": 0.4083, |
|
"step": 9350 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"grad_norm": 2.7487456798553467, |
|
"learning_rate": 6.464646464646465e-07, |
|
"loss": 0.453, |
|
"step": 9360 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"grad_norm": 2.036696195602417, |
|
"learning_rate": 6.363636363636364e-07, |
|
"loss": 0.3631, |
|
"step": 9370 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"grad_norm": 1.628597617149353, |
|
"learning_rate": 6.262626262626264e-07, |
|
"loss": 0.4038, |
|
"step": 9380 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"grad_norm": 1.8104825019836426, |
|
"learning_rate": 6.161616161616162e-07, |
|
"loss": 0.4188, |
|
"step": 9390 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"grad_norm": 1.982826828956604, |
|
"learning_rate": 6.060606060606061e-07, |
|
"loss": 0.4019, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"grad_norm": 2.4340901374816895, |
|
"learning_rate": 5.959595959595961e-07, |
|
"loss": 0.4311, |
|
"step": 9410 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"grad_norm": 2.0379843711853027, |
|
"learning_rate": 5.858585858585859e-07, |
|
"loss": 0.3615, |
|
"step": 9420 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"grad_norm": 1.7289820909500122, |
|
"learning_rate": 5.757575757575758e-07, |
|
"loss": 0.3851, |
|
"step": 9430 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"grad_norm": 2.0815517902374268, |
|
"learning_rate": 5.656565656565658e-07, |
|
"loss": 0.3125, |
|
"step": 9440 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"grad_norm": 2.199430465698242, |
|
"learning_rate": 5.555555555555555e-07, |
|
"loss": 0.4031, |
|
"step": 9450 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"grad_norm": 2.192812442779541, |
|
"learning_rate": 5.454545454545455e-07, |
|
"loss": 0.3844, |
|
"step": 9460 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"grad_norm": 2.0369627475738525, |
|
"learning_rate": 5.353535353535354e-07, |
|
"loss": 0.3461, |
|
"step": 9470 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"grad_norm": 3.0597352981567383, |
|
"learning_rate": 5.252525252525253e-07, |
|
"loss": 0.3532, |
|
"step": 9480 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"grad_norm": 2.017716407775879, |
|
"learning_rate": 5.151515151515152e-07, |
|
"loss": 0.4273, |
|
"step": 9490 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"grad_norm": 2.3162925243377686, |
|
"learning_rate": 5.05050505050505e-07, |
|
"loss": 0.3905, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"grad_norm": 1.9750950336456299, |
|
"learning_rate": 4.94949494949495e-07, |
|
"loss": 0.3807, |
|
"step": 9510 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"grad_norm": 1.793368935585022, |
|
"learning_rate": 4.848484848484849e-07, |
|
"loss": 0.3258, |
|
"step": 9520 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"grad_norm": 1.605327844619751, |
|
"learning_rate": 4.747474747474748e-07, |
|
"loss": 0.4191, |
|
"step": 9530 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"grad_norm": 2.2928264141082764, |
|
"learning_rate": 4.646464646464647e-07, |
|
"loss": 0.4188, |
|
"step": 9540 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"grad_norm": 2.223863363265991, |
|
"learning_rate": 4.5454545454545457e-07, |
|
"loss": 0.4292, |
|
"step": 9550 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"grad_norm": 1.770444631576538, |
|
"learning_rate": 4.444444444444445e-07, |
|
"loss": 0.3991, |
|
"step": 9560 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"grad_norm": 1.7093805074691772, |
|
"learning_rate": 4.343434343434344e-07, |
|
"loss": 0.3806, |
|
"step": 9570 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"grad_norm": 2.489004135131836, |
|
"learning_rate": 4.242424242424243e-07, |
|
"loss": 0.3923, |
|
"step": 9580 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"grad_norm": 2.8307559490203857, |
|
"learning_rate": 4.141414141414142e-07, |
|
"loss": 0.4213, |
|
"step": 9590 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"grad_norm": 1.902875542640686, |
|
"learning_rate": 4.040404040404041e-07, |
|
"loss": 0.3651, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"grad_norm": 2.018641948699951, |
|
"learning_rate": 3.9393939393939396e-07, |
|
"loss": 0.3456, |
|
"step": 9610 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"grad_norm": 2.076756000518799, |
|
"learning_rate": 3.838383838383838e-07, |
|
"loss": 0.3826, |
|
"step": 9620 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"grad_norm": 2.1234145164489746, |
|
"learning_rate": 3.7373737373737374e-07, |
|
"loss": 0.4183, |
|
"step": 9630 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"grad_norm": 1.7573988437652588, |
|
"learning_rate": 3.6363636363636366e-07, |
|
"loss": 0.3506, |
|
"step": 9640 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"grad_norm": 1.9573911428451538, |
|
"learning_rate": 3.535353535353536e-07, |
|
"loss": 0.4331, |
|
"step": 9650 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"grad_norm": 1.8499737977981567, |
|
"learning_rate": 3.4343434343434344e-07, |
|
"loss": 0.3849, |
|
"step": 9660 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"grad_norm": 2.653956413269043, |
|
"learning_rate": 3.3333333333333335e-07, |
|
"loss": 0.4345, |
|
"step": 9670 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"grad_norm": 2.0091283321380615, |
|
"learning_rate": 3.2323232323232327e-07, |
|
"loss": 0.3769, |
|
"step": 9680 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"grad_norm": 2.343541383743286, |
|
"learning_rate": 3.131313131313132e-07, |
|
"loss": 0.3538, |
|
"step": 9690 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"grad_norm": 1.603804588317871, |
|
"learning_rate": 3.0303030303030305e-07, |
|
"loss": 0.3768, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"grad_norm": 2.1877195835113525, |
|
"learning_rate": 2.9292929292929296e-07, |
|
"loss": 0.3868, |
|
"step": 9710 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"grad_norm": 1.3070385456085205, |
|
"learning_rate": 2.828282828282829e-07, |
|
"loss": 0.4385, |
|
"step": 9720 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"grad_norm": 2.4190921783447266, |
|
"learning_rate": 2.7272727272727274e-07, |
|
"loss": 0.3697, |
|
"step": 9730 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"grad_norm": 1.8476630449295044, |
|
"learning_rate": 2.6262626262626266e-07, |
|
"loss": 0.3389, |
|
"step": 9740 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"grad_norm": 1.9247606992721558, |
|
"learning_rate": 2.525252525252525e-07, |
|
"loss": 0.3559, |
|
"step": 9750 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"grad_norm": 1.3740005493164062, |
|
"learning_rate": 2.4242424242424244e-07, |
|
"loss": 0.4177, |
|
"step": 9760 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"grad_norm": 1.7385820150375366, |
|
"learning_rate": 2.3232323232323235e-07, |
|
"loss": 0.3621, |
|
"step": 9770 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"grad_norm": 2.197925567626953, |
|
"learning_rate": 2.2222222222222224e-07, |
|
"loss": 0.341, |
|
"step": 9780 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"grad_norm": 2.315577983856201, |
|
"learning_rate": 2.1212121212121216e-07, |
|
"loss": 0.3684, |
|
"step": 9790 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"grad_norm": 2.049004077911377, |
|
"learning_rate": 2.0202020202020205e-07, |
|
"loss": 0.3577, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"grad_norm": 1.669946551322937, |
|
"learning_rate": 1.919191919191919e-07, |
|
"loss": 0.3952, |
|
"step": 9810 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"grad_norm": 2.759409189224243, |
|
"learning_rate": 1.8181818181818183e-07, |
|
"loss": 0.3598, |
|
"step": 9820 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"grad_norm": 1.5693753957748413, |
|
"learning_rate": 1.7171717171717172e-07, |
|
"loss": 0.3845, |
|
"step": 9830 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"grad_norm": 2.0964393615722656, |
|
"learning_rate": 1.6161616161616163e-07, |
|
"loss": 0.3791, |
|
"step": 9840 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"grad_norm": 2.3079283237457275, |
|
"learning_rate": 1.5151515151515152e-07, |
|
"loss": 0.3396, |
|
"step": 9850 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"grad_norm": 1.7049084901809692, |
|
"learning_rate": 1.4141414141414144e-07, |
|
"loss": 0.4195, |
|
"step": 9860 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"grad_norm": 2.8570940494537354, |
|
"learning_rate": 1.3131313131313133e-07, |
|
"loss": 0.36, |
|
"step": 9870 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"grad_norm": 2.7875728607177734, |
|
"learning_rate": 1.2121212121212122e-07, |
|
"loss": 0.3816, |
|
"step": 9880 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"grad_norm": 2.0262162685394287, |
|
"learning_rate": 1.1111111111111112e-07, |
|
"loss": 0.4193, |
|
"step": 9890 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"grad_norm": 1.8195960521697998, |
|
"learning_rate": 1.0101010101010103e-07, |
|
"loss": 0.373, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"grad_norm": 1.600714921951294, |
|
"learning_rate": 9.090909090909091e-08, |
|
"loss": 0.4019, |
|
"step": 9910 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"grad_norm": 1.8309797048568726, |
|
"learning_rate": 8.080808080808082e-08, |
|
"loss": 0.3346, |
|
"step": 9920 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"grad_norm": 1.5934892892837524, |
|
"learning_rate": 7.070707070707072e-08, |
|
"loss": 0.3395, |
|
"step": 9930 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"grad_norm": 1.8032306432724, |
|
"learning_rate": 6.060606060606061e-08, |
|
"loss": 0.4162, |
|
"step": 9940 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"grad_norm": 1.7956101894378662, |
|
"learning_rate": 5.050505050505051e-08, |
|
"loss": 0.5069, |
|
"step": 9950 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"grad_norm": 1.9345638751983643, |
|
"learning_rate": 4.040404040404041e-08, |
|
"loss": 0.3925, |
|
"step": 9960 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"grad_norm": 1.4399749040603638, |
|
"learning_rate": 3.0303030303030305e-08, |
|
"loss": 0.3952, |
|
"step": 9970 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"grad_norm": 2.1770801544189453, |
|
"learning_rate": 2.0202020202020204e-08, |
|
"loss": 0.3711, |
|
"step": 9980 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"grad_norm": 3.2925233840942383, |
|
"learning_rate": 1.0101010101010102e-08, |
|
"loss": 0.3993, |
|
"step": 9990 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"grad_norm": 1.633353352546692, |
|
"learning_rate": 0.0, |
|
"loss": 0.4093, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"eval_loss": 0.6603949069976807, |
|
"eval_runtime": 335.7393, |
|
"eval_samples_per_second": 2.979, |
|
"eval_steps_per_second": 2.979, |
|
"step": 10000 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 10000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 2000, |
|
"total_flos": 8.1602751234048e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|