|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.9975757575757576, |
|
"eval_steps": 123, |
|
"global_step": 1236, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0016161616161616162, |
|
"grad_norm": 280.0, |
|
"learning_rate": 8.064516129032259e-08, |
|
"loss": 4.116, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0032323232323232323, |
|
"grad_norm": 268.0, |
|
"learning_rate": 1.6129032258064518e-07, |
|
"loss": 4.16, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0048484848484848485, |
|
"grad_norm": 316.0, |
|
"learning_rate": 2.4193548387096775e-07, |
|
"loss": 4.0967, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.006464646464646465, |
|
"grad_norm": 280.0, |
|
"learning_rate": 3.2258064516129035e-07, |
|
"loss": 4.1925, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.00808080808080808, |
|
"grad_norm": 300.0, |
|
"learning_rate": 4.032258064516129e-07, |
|
"loss": 4.0364, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.009696969696969697, |
|
"grad_norm": 290.0, |
|
"learning_rate": 4.838709677419355e-07, |
|
"loss": 3.9787, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.011313131313131313, |
|
"grad_norm": 272.0, |
|
"learning_rate": 5.645161290322581e-07, |
|
"loss": 3.9483, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.01292929292929293, |
|
"grad_norm": 212.0, |
|
"learning_rate": 6.451612903225807e-07, |
|
"loss": 3.8711, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.014545454545454545, |
|
"grad_norm": 195.0, |
|
"learning_rate": 7.258064516129033e-07, |
|
"loss": 3.6116, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01616161616161616, |
|
"grad_norm": 183.0, |
|
"learning_rate": 8.064516129032258e-07, |
|
"loss": 3.6654, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.017777777777777778, |
|
"grad_norm": 151.0, |
|
"learning_rate": 8.870967741935485e-07, |
|
"loss": 3.3067, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.019393939393939394, |
|
"grad_norm": 130.0, |
|
"learning_rate": 9.67741935483871e-07, |
|
"loss": 3.2813, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.02101010101010101, |
|
"grad_norm": 126.0, |
|
"learning_rate": 1.0483870967741936e-06, |
|
"loss": 3.1032, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.022626262626262626, |
|
"grad_norm": 94.5, |
|
"learning_rate": 1.1290322580645162e-06, |
|
"loss": 3.2868, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.024242424242424242, |
|
"grad_norm": 98.5, |
|
"learning_rate": 1.2096774193548388e-06, |
|
"loss": 2.9182, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02585858585858586, |
|
"grad_norm": 82.0, |
|
"learning_rate": 1.2903225806451614e-06, |
|
"loss": 2.9145, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.027474747474747475, |
|
"grad_norm": 98.5, |
|
"learning_rate": 1.3709677419354838e-06, |
|
"loss": 2.7479, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.02909090909090909, |
|
"grad_norm": 94.5, |
|
"learning_rate": 1.4516129032258066e-06, |
|
"loss": 2.5529, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.030707070707070707, |
|
"grad_norm": 71.0, |
|
"learning_rate": 1.5322580645161292e-06, |
|
"loss": 2.6872, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.03232323232323232, |
|
"grad_norm": 63.25, |
|
"learning_rate": 1.6129032258064516e-06, |
|
"loss": 2.3578, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03393939393939394, |
|
"grad_norm": 76.0, |
|
"learning_rate": 1.6935483870967742e-06, |
|
"loss": 2.2456, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.035555555555555556, |
|
"grad_norm": 111.5, |
|
"learning_rate": 1.774193548387097e-06, |
|
"loss": 2.0884, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.037171717171717175, |
|
"grad_norm": 49.5, |
|
"learning_rate": 1.8548387096774196e-06, |
|
"loss": 2.1233, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.03878787878787879, |
|
"grad_norm": 47.75, |
|
"learning_rate": 1.935483870967742e-06, |
|
"loss": 1.8103, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.04040404040404041, |
|
"grad_norm": 49.5, |
|
"learning_rate": 2.0161290322580646e-06, |
|
"loss": 1.6413, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.04202020202020202, |
|
"grad_norm": 45.75, |
|
"learning_rate": 2.096774193548387e-06, |
|
"loss": 2.0003, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.04363636363636364, |
|
"grad_norm": 43.5, |
|
"learning_rate": 2.17741935483871e-06, |
|
"loss": 1.7371, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.04525252525252525, |
|
"grad_norm": 40.0, |
|
"learning_rate": 2.2580645161290324e-06, |
|
"loss": 1.6431, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.04686868686868687, |
|
"grad_norm": 33.75, |
|
"learning_rate": 2.338709677419355e-06, |
|
"loss": 1.7026, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.048484848484848485, |
|
"grad_norm": 38.5, |
|
"learning_rate": 2.4193548387096776e-06, |
|
"loss": 1.6614, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.050101010101010104, |
|
"grad_norm": 33.25, |
|
"learning_rate": 2.5e-06, |
|
"loss": 1.6833, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.05171717171717172, |
|
"grad_norm": 34.75, |
|
"learning_rate": 2.580645161290323e-06, |
|
"loss": 1.3788, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.05333333333333334, |
|
"grad_norm": 30.5, |
|
"learning_rate": 2.6612903225806454e-06, |
|
"loss": 1.4505, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.05494949494949495, |
|
"grad_norm": 34.5, |
|
"learning_rate": 2.7419354838709676e-06, |
|
"loss": 1.3686, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.05656565656565657, |
|
"grad_norm": 29.125, |
|
"learning_rate": 2.822580645161291e-06, |
|
"loss": 1.5094, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.05818181818181818, |
|
"grad_norm": 27.625, |
|
"learning_rate": 2.903225806451613e-06, |
|
"loss": 1.1449, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.0597979797979798, |
|
"grad_norm": 26.0, |
|
"learning_rate": 2.983870967741936e-06, |
|
"loss": 1.2051, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.061414141414141414, |
|
"grad_norm": 30.5, |
|
"learning_rate": 3.0645161290322584e-06, |
|
"loss": 1.4705, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.06303030303030303, |
|
"grad_norm": 27.125, |
|
"learning_rate": 3.145161290322581e-06, |
|
"loss": 1.4405, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.06464646464646465, |
|
"grad_norm": 26.5, |
|
"learning_rate": 3.225806451612903e-06, |
|
"loss": 1.2536, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.06626262626262626, |
|
"grad_norm": 28.375, |
|
"learning_rate": 3.306451612903226e-06, |
|
"loss": 1.3682, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.06787878787878789, |
|
"grad_norm": 28.375, |
|
"learning_rate": 3.3870967741935484e-06, |
|
"loss": 1.2269, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.0694949494949495, |
|
"grad_norm": 25.75, |
|
"learning_rate": 3.4677419354838714e-06, |
|
"loss": 1.155, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.07111111111111111, |
|
"grad_norm": 23.75, |
|
"learning_rate": 3.548387096774194e-06, |
|
"loss": 1.3323, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.07272727272727272, |
|
"grad_norm": 27.125, |
|
"learning_rate": 3.6290322580645166e-06, |
|
"loss": 1.4642, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.07434343434343435, |
|
"grad_norm": 25.875, |
|
"learning_rate": 3.7096774193548392e-06, |
|
"loss": 1.1611, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.07595959595959596, |
|
"grad_norm": 25.375, |
|
"learning_rate": 3.7903225806451614e-06, |
|
"loss": 1.36, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.07757575757575758, |
|
"grad_norm": 25.5, |
|
"learning_rate": 3.870967741935484e-06, |
|
"loss": 1.463, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.07919191919191919, |
|
"grad_norm": 28.25, |
|
"learning_rate": 3.951612903225807e-06, |
|
"loss": 1.3702, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.08080808080808081, |
|
"grad_norm": 25.625, |
|
"learning_rate": 4.032258064516129e-06, |
|
"loss": 1.3247, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.08242424242424243, |
|
"grad_norm": 19.625, |
|
"learning_rate": 4.112903225806452e-06, |
|
"loss": 1.1308, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.08404040404040404, |
|
"grad_norm": 22.0, |
|
"learning_rate": 4.193548387096774e-06, |
|
"loss": 1.1602, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.08565656565656565, |
|
"grad_norm": 22.0, |
|
"learning_rate": 4.274193548387097e-06, |
|
"loss": 1.2107, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.08727272727272728, |
|
"grad_norm": 22.0, |
|
"learning_rate": 4.35483870967742e-06, |
|
"loss": 1.1575, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.08888888888888889, |
|
"grad_norm": 23.5, |
|
"learning_rate": 4.435483870967742e-06, |
|
"loss": 1.3044, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.0905050505050505, |
|
"grad_norm": 20.625, |
|
"learning_rate": 4.516129032258065e-06, |
|
"loss": 0.9626, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.09212121212121212, |
|
"grad_norm": 23.0, |
|
"learning_rate": 4.596774193548387e-06, |
|
"loss": 1.5338, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.09373737373737374, |
|
"grad_norm": 21.5, |
|
"learning_rate": 4.67741935483871e-06, |
|
"loss": 1.3822, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.09535353535353536, |
|
"grad_norm": 18.0, |
|
"learning_rate": 4.758064516129033e-06, |
|
"loss": 0.8616, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.09696969696969697, |
|
"grad_norm": 20.75, |
|
"learning_rate": 4.838709677419355e-06, |
|
"loss": 1.133, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.09858585858585858, |
|
"grad_norm": 21.125, |
|
"learning_rate": 4.919354838709678e-06, |
|
"loss": 1.2883, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.10020202020202021, |
|
"grad_norm": 16.875, |
|
"learning_rate": 5e-06, |
|
"loss": 1.0195, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.10181818181818182, |
|
"grad_norm": 16.25, |
|
"learning_rate": 5.080645161290323e-06, |
|
"loss": 1.1219, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.10343434343434343, |
|
"grad_norm": 16.75, |
|
"learning_rate": 5.161290322580646e-06, |
|
"loss": 1.183, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.10505050505050505, |
|
"grad_norm": 20.375, |
|
"learning_rate": 5.241935483870968e-06, |
|
"loss": 1.1628, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.10666666666666667, |
|
"grad_norm": 18.75, |
|
"learning_rate": 5.322580645161291e-06, |
|
"loss": 0.9526, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.10828282828282829, |
|
"grad_norm": 18.25, |
|
"learning_rate": 5.4032258064516126e-06, |
|
"loss": 0.9687, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.1098989898989899, |
|
"grad_norm": 17.625, |
|
"learning_rate": 5.483870967741935e-06, |
|
"loss": 0.9895, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.11151515151515151, |
|
"grad_norm": 18.75, |
|
"learning_rate": 5.564516129032258e-06, |
|
"loss": 0.9116, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.11313131313131314, |
|
"grad_norm": 19.625, |
|
"learning_rate": 5.645161290322582e-06, |
|
"loss": 1.3456, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.11474747474747475, |
|
"grad_norm": 19.375, |
|
"learning_rate": 5.725806451612904e-06, |
|
"loss": 0.9474, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.11636363636363636, |
|
"grad_norm": 15.75, |
|
"learning_rate": 5.806451612903226e-06, |
|
"loss": 1.0595, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.11797979797979798, |
|
"grad_norm": 18.0, |
|
"learning_rate": 5.887096774193549e-06, |
|
"loss": 1.0755, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.1195959595959596, |
|
"grad_norm": 17.5, |
|
"learning_rate": 5.967741935483872e-06, |
|
"loss": 0.9928, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.12121212121212122, |
|
"grad_norm": 25.125, |
|
"learning_rate": 6.048387096774194e-06, |
|
"loss": 0.8585, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.12282828282828283, |
|
"grad_norm": 17.0, |
|
"learning_rate": 6.129032258064517e-06, |
|
"loss": 1.0376, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.12444444444444444, |
|
"grad_norm": 18.875, |
|
"learning_rate": 6.209677419354839e-06, |
|
"loss": 0.8824, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.12606060606060607, |
|
"grad_norm": 19.875, |
|
"learning_rate": 6.290322580645162e-06, |
|
"loss": 1.0713, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.12767676767676767, |
|
"grad_norm": 16.875, |
|
"learning_rate": 6.370967741935485e-06, |
|
"loss": 1.1012, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.1292929292929293, |
|
"grad_norm": 20.5, |
|
"learning_rate": 6.451612903225806e-06, |
|
"loss": 1.2101, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.13090909090909092, |
|
"grad_norm": 16.375, |
|
"learning_rate": 6.532258064516129e-06, |
|
"loss": 1.4869, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.13252525252525252, |
|
"grad_norm": 18.375, |
|
"learning_rate": 6.612903225806452e-06, |
|
"loss": 1.1957, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.13414141414141414, |
|
"grad_norm": 17.0, |
|
"learning_rate": 6.693548387096774e-06, |
|
"loss": 0.9294, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.13575757575757577, |
|
"grad_norm": 14.8125, |
|
"learning_rate": 6.774193548387097e-06, |
|
"loss": 0.7969, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.13737373737373737, |
|
"grad_norm": 16.625, |
|
"learning_rate": 6.854838709677419e-06, |
|
"loss": 0.9239, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.138989898989899, |
|
"grad_norm": 13.625, |
|
"learning_rate": 6.935483870967743e-06, |
|
"loss": 0.9914, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.1406060606060606, |
|
"grad_norm": 13.6875, |
|
"learning_rate": 7.0161290322580654e-06, |
|
"loss": 0.643, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.14222222222222222, |
|
"grad_norm": 15.0625, |
|
"learning_rate": 7.096774193548388e-06, |
|
"loss": 1.0152, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.14383838383838385, |
|
"grad_norm": 14.3125, |
|
"learning_rate": 7.177419354838711e-06, |
|
"loss": 0.7406, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.14545454545454545, |
|
"grad_norm": 16.875, |
|
"learning_rate": 7.258064516129033e-06, |
|
"loss": 1.0667, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.14707070707070707, |
|
"grad_norm": 14.8125, |
|
"learning_rate": 7.338709677419356e-06, |
|
"loss": 0.958, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.1486868686868687, |
|
"grad_norm": 15.4375, |
|
"learning_rate": 7.4193548387096784e-06, |
|
"loss": 0.9992, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.1503030303030303, |
|
"grad_norm": 16.25, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.7043, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.15191919191919193, |
|
"grad_norm": 16.875, |
|
"learning_rate": 7.580645161290323e-06, |
|
"loss": 1.2198, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.15353535353535352, |
|
"grad_norm": 19.5, |
|
"learning_rate": 7.661290322580646e-06, |
|
"loss": 1.2321, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.15515151515151515, |
|
"grad_norm": 15.4375, |
|
"learning_rate": 7.741935483870968e-06, |
|
"loss": 0.9691, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.15676767676767678, |
|
"grad_norm": 15.625, |
|
"learning_rate": 7.822580645161291e-06, |
|
"loss": 0.7549, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.15838383838383838, |
|
"grad_norm": 14.6875, |
|
"learning_rate": 7.903225806451613e-06, |
|
"loss": 0.8696, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 14.0, |
|
"learning_rate": 7.983870967741935e-06, |
|
"loss": 0.9764, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.16161616161616163, |
|
"grad_norm": 14.5, |
|
"learning_rate": 8.064516129032258e-06, |
|
"loss": 0.8763, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.16323232323232323, |
|
"grad_norm": 13.5625, |
|
"learning_rate": 8.145161290322582e-06, |
|
"loss": 0.9208, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.16484848484848486, |
|
"grad_norm": 16.0, |
|
"learning_rate": 8.225806451612904e-06, |
|
"loss": 1.1447, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.16646464646464645, |
|
"grad_norm": 16.25, |
|
"learning_rate": 8.306451612903227e-06, |
|
"loss": 0.9742, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.16808080808080808, |
|
"grad_norm": 17.125, |
|
"learning_rate": 8.387096774193549e-06, |
|
"loss": 0.9766, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.1696969696969697, |
|
"grad_norm": 15.4375, |
|
"learning_rate": 8.467741935483872e-06, |
|
"loss": 1.0782, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.1713131313131313, |
|
"grad_norm": 13.75, |
|
"learning_rate": 8.548387096774194e-06, |
|
"loss": 0.9839, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.17292929292929293, |
|
"grad_norm": 13.75, |
|
"learning_rate": 8.629032258064517e-06, |
|
"loss": 0.8056, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.17454545454545456, |
|
"grad_norm": 13.875, |
|
"learning_rate": 8.70967741935484e-06, |
|
"loss": 0.8916, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.17616161616161616, |
|
"grad_norm": 14.6875, |
|
"learning_rate": 8.790322580645163e-06, |
|
"loss": 0.8902, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.17777777777777778, |
|
"grad_norm": 18.125, |
|
"learning_rate": 8.870967741935484e-06, |
|
"loss": 1.027, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.17939393939393938, |
|
"grad_norm": 13.3125, |
|
"learning_rate": 8.951612903225806e-06, |
|
"loss": 0.9191, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.181010101010101, |
|
"grad_norm": 14.0625, |
|
"learning_rate": 9.03225806451613e-06, |
|
"loss": 0.9115, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.18262626262626264, |
|
"grad_norm": 14.8125, |
|
"learning_rate": 9.112903225806451e-06, |
|
"loss": 1.4586, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.18424242424242424, |
|
"grad_norm": 14.3125, |
|
"learning_rate": 9.193548387096775e-06, |
|
"loss": 0.6925, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.18585858585858586, |
|
"grad_norm": 13.875, |
|
"learning_rate": 9.274193548387097e-06, |
|
"loss": 1.0962, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.1874747474747475, |
|
"grad_norm": 14.25, |
|
"learning_rate": 9.35483870967742e-06, |
|
"loss": 0.8336, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.1890909090909091, |
|
"grad_norm": 24.375, |
|
"learning_rate": 9.435483870967743e-06, |
|
"loss": 1.1903, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.1907070707070707, |
|
"grad_norm": 11.9375, |
|
"learning_rate": 9.516129032258065e-06, |
|
"loss": 0.6861, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.1923232323232323, |
|
"grad_norm": 19.25, |
|
"learning_rate": 9.596774193548389e-06, |
|
"loss": 1.1889, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.19393939393939394, |
|
"grad_norm": 14.4375, |
|
"learning_rate": 9.67741935483871e-06, |
|
"loss": 1.0123, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.19555555555555557, |
|
"grad_norm": 15.625, |
|
"learning_rate": 9.758064516129034e-06, |
|
"loss": 0.9163, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.19717171717171716, |
|
"grad_norm": 14.25, |
|
"learning_rate": 9.838709677419356e-06, |
|
"loss": 0.9699, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.1987878787878788, |
|
"grad_norm": 13.25, |
|
"learning_rate": 9.919354838709679e-06, |
|
"loss": 0.9863, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.1987878787878788, |
|
"eval_loss": 0.8790417313575745, |
|
"eval_runtime": 35.1712, |
|
"eval_samples_per_second": 31.276, |
|
"eval_steps_per_second": 3.924, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.20040404040404042, |
|
"grad_norm": 14.1875, |
|
"learning_rate": 1e-05, |
|
"loss": 1.1048, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.20202020202020202, |
|
"grad_norm": 13.375, |
|
"learning_rate": 9.999980046003546e-06, |
|
"loss": 0.7441, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.20363636363636364, |
|
"grad_norm": 12.9375, |
|
"learning_rate": 9.999920184173449e-06, |
|
"loss": 0.9276, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.20525252525252524, |
|
"grad_norm": 13.125, |
|
"learning_rate": 9.999820414987502e-06, |
|
"loss": 0.9395, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.20686868686868687, |
|
"grad_norm": 14.625, |
|
"learning_rate": 9.999680739242022e-06, |
|
"loss": 1.0927, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.2084848484848485, |
|
"grad_norm": 13.875, |
|
"learning_rate": 9.999501158051846e-06, |
|
"loss": 0.8983, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.2101010101010101, |
|
"grad_norm": 12.9375, |
|
"learning_rate": 9.999281672850317e-06, |
|
"loss": 0.8245, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.21171717171717172, |
|
"grad_norm": 15.5625, |
|
"learning_rate": 9.99902228538928e-06, |
|
"loss": 0.8986, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.21333333333333335, |
|
"grad_norm": 15.625, |
|
"learning_rate": 9.99872299773906e-06, |
|
"loss": 1.0636, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.21494949494949495, |
|
"grad_norm": 13.125, |
|
"learning_rate": 9.998383812288451e-06, |
|
"loss": 0.8439, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.21656565656565657, |
|
"grad_norm": 12.25, |
|
"learning_rate": 9.998004731744696e-06, |
|
"loss": 0.8364, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.21818181818181817, |
|
"grad_norm": 12.0, |
|
"learning_rate": 9.997585759133463e-06, |
|
"loss": 0.8303, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.2197979797979798, |
|
"grad_norm": 11.5625, |
|
"learning_rate": 9.997126897798826e-06, |
|
"loss": 0.8335, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.22141414141414142, |
|
"grad_norm": 13.625, |
|
"learning_rate": 9.996628151403226e-06, |
|
"loss": 0.9116, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.22303030303030302, |
|
"grad_norm": 10.8125, |
|
"learning_rate": 9.996089523927461e-06, |
|
"loss": 0.7399, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.22464646464646465, |
|
"grad_norm": 9.9375, |
|
"learning_rate": 9.995511019670639e-06, |
|
"loss": 1.1413, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.22626262626262628, |
|
"grad_norm": 11.625, |
|
"learning_rate": 9.994892643250147e-06, |
|
"loss": 1.0564, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.22787878787878788, |
|
"grad_norm": 11.6875, |
|
"learning_rate": 9.99423439960162e-06, |
|
"loss": 1.013, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.2294949494949495, |
|
"grad_norm": 11.5, |
|
"learning_rate": 9.993536293978892e-06, |
|
"loss": 1.154, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.2311111111111111, |
|
"grad_norm": 11.4375, |
|
"learning_rate": 9.992798331953962e-06, |
|
"loss": 0.7173, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.23272727272727273, |
|
"grad_norm": 9.9375, |
|
"learning_rate": 9.99202051941695e-06, |
|
"loss": 0.6389, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.23434343434343435, |
|
"grad_norm": 11.5, |
|
"learning_rate": 9.991202862576036e-06, |
|
"loss": 1.1159, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.23595959595959595, |
|
"grad_norm": 12.625, |
|
"learning_rate": 9.99034536795744e-06, |
|
"loss": 0.9197, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.23757575757575758, |
|
"grad_norm": 13.0, |
|
"learning_rate": 9.989448042405328e-06, |
|
"loss": 0.9929, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.2391919191919192, |
|
"grad_norm": 11.875, |
|
"learning_rate": 9.9885108930818e-06, |
|
"loss": 0.7332, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.2408080808080808, |
|
"grad_norm": 11.9375, |
|
"learning_rate": 9.987533927466804e-06, |
|
"loss": 0.8305, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.24242424242424243, |
|
"grad_norm": 12.375, |
|
"learning_rate": 9.986517153358086e-06, |
|
"loss": 0.7982, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.24404040404040403, |
|
"grad_norm": 10.5625, |
|
"learning_rate": 9.98546057887113e-06, |
|
"loss": 1.0044, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.24565656565656566, |
|
"grad_norm": 12.75, |
|
"learning_rate": 9.984364212439089e-06, |
|
"loss": 0.9286, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.24727272727272728, |
|
"grad_norm": 11.5, |
|
"learning_rate": 9.98322806281272e-06, |
|
"loss": 1.0573, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.24888888888888888, |
|
"grad_norm": 11.0, |
|
"learning_rate": 9.982052139060312e-06, |
|
"loss": 0.8163, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.2505050505050505, |
|
"grad_norm": 10.0625, |
|
"learning_rate": 9.980836450567619e-06, |
|
"loss": 0.8431, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.25212121212121213, |
|
"grad_norm": 12.5, |
|
"learning_rate": 9.979581007037776e-06, |
|
"loss": 0.6704, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.25373737373737376, |
|
"grad_norm": 10.875, |
|
"learning_rate": 9.978285818491232e-06, |
|
"loss": 0.6866, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.25535353535353533, |
|
"grad_norm": 14.375, |
|
"learning_rate": 9.976950895265657e-06, |
|
"loss": 1.0205, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.25696969696969696, |
|
"grad_norm": 9.75, |
|
"learning_rate": 9.975576248015878e-06, |
|
"loss": 0.9211, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.2585858585858586, |
|
"grad_norm": 11.9375, |
|
"learning_rate": 9.974161887713775e-06, |
|
"loss": 1.1265, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.2602020202020202, |
|
"grad_norm": 9.0, |
|
"learning_rate": 9.972707825648204e-06, |
|
"loss": 0.8746, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.26181818181818184, |
|
"grad_norm": 12.8125, |
|
"learning_rate": 9.971214073424906e-06, |
|
"loss": 0.846, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.2634343434343434, |
|
"grad_norm": 10.1875, |
|
"learning_rate": 9.969680642966409e-06, |
|
"loss": 0.6882, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.26505050505050504, |
|
"grad_norm": 13.5625, |
|
"learning_rate": 9.968107546511942e-06, |
|
"loss": 0.9636, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.26666666666666666, |
|
"grad_norm": 11.1875, |
|
"learning_rate": 9.966494796617328e-06, |
|
"loss": 0.6918, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.2682828282828283, |
|
"grad_norm": 9.375, |
|
"learning_rate": 9.964842406154892e-06, |
|
"loss": 1.0313, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.2698989898989899, |
|
"grad_norm": 10.625, |
|
"learning_rate": 9.963150388313347e-06, |
|
"loss": 0.9354, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.27151515151515154, |
|
"grad_norm": 12.3125, |
|
"learning_rate": 9.961418756597703e-06, |
|
"loss": 0.9895, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.2731313131313131, |
|
"grad_norm": 12.3125, |
|
"learning_rate": 9.959647524829148e-06, |
|
"loss": 0.9479, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.27474747474747474, |
|
"grad_norm": 11.1875, |
|
"learning_rate": 9.957836707144943e-06, |
|
"loss": 1.0128, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.27636363636363637, |
|
"grad_norm": 9.6875, |
|
"learning_rate": 9.95598631799831e-06, |
|
"loss": 0.7783, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.277979797979798, |
|
"grad_norm": 11.8125, |
|
"learning_rate": 9.95409637215831e-06, |
|
"loss": 0.9492, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.2795959595959596, |
|
"grad_norm": 8.8125, |
|
"learning_rate": 9.952166884709735e-06, |
|
"loss": 0.8773, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.2812121212121212, |
|
"grad_norm": 9.6875, |
|
"learning_rate": 9.950197871052974e-06, |
|
"loss": 1.0219, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.2828282828282828, |
|
"grad_norm": 10.25, |
|
"learning_rate": 9.94818934690391e-06, |
|
"loss": 0.6188, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.28444444444444444, |
|
"grad_norm": 12.1875, |
|
"learning_rate": 9.94614132829377e-06, |
|
"loss": 0.7879, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.28606060606060607, |
|
"grad_norm": 10.5625, |
|
"learning_rate": 9.94405383156902e-06, |
|
"loss": 0.7926, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.2876767676767677, |
|
"grad_norm": 10.1875, |
|
"learning_rate": 9.941926873391223e-06, |
|
"loss": 0.7642, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.28929292929292927, |
|
"grad_norm": 11.9375, |
|
"learning_rate": 9.939760470736902e-06, |
|
"loss": 0.8427, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.2909090909090909, |
|
"grad_norm": 12.5, |
|
"learning_rate": 9.937554640897414e-06, |
|
"loss": 0.7917, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.2925252525252525, |
|
"grad_norm": 11.875, |
|
"learning_rate": 9.935309401478808e-06, |
|
"loss": 0.8325, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.29414141414141415, |
|
"grad_norm": 10.0, |
|
"learning_rate": 9.933024770401682e-06, |
|
"loss": 0.7708, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.2957575757575758, |
|
"grad_norm": 9.5625, |
|
"learning_rate": 9.930700765901046e-06, |
|
"loss": 1.1573, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.2973737373737374, |
|
"grad_norm": 10.4375, |
|
"learning_rate": 9.928337406526172e-06, |
|
"loss": 1.0751, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.298989898989899, |
|
"grad_norm": 8.25, |
|
"learning_rate": 9.925934711140444e-06, |
|
"loss": 0.6648, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.3006060606060606, |
|
"grad_norm": 9.625, |
|
"learning_rate": 9.923492698921214e-06, |
|
"loss": 1.0982, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.3022222222222222, |
|
"grad_norm": 9.5, |
|
"learning_rate": 9.921011389359638e-06, |
|
"loss": 0.9648, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.30383838383838385, |
|
"grad_norm": 13.4375, |
|
"learning_rate": 9.918490802260538e-06, |
|
"loss": 0.7589, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.3054545454545455, |
|
"grad_norm": 10.4375, |
|
"learning_rate": 9.915930957742228e-06, |
|
"loss": 0.6699, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.30707070707070705, |
|
"grad_norm": 10.9375, |
|
"learning_rate": 9.913331876236358e-06, |
|
"loss": 0.9228, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.3086868686868687, |
|
"grad_norm": 9.75, |
|
"learning_rate": 9.910693578487755e-06, |
|
"loss": 0.6985, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.3103030303030303, |
|
"grad_norm": 9.0625, |
|
"learning_rate": 9.90801608555425e-06, |
|
"loss": 0.8004, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.31191919191919193, |
|
"grad_norm": 9.25, |
|
"learning_rate": 9.905299418806517e-06, |
|
"loss": 0.8232, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.31353535353535356, |
|
"grad_norm": 10.625, |
|
"learning_rate": 9.902543599927903e-06, |
|
"loss": 1.3909, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.3151515151515151, |
|
"grad_norm": 9.25, |
|
"learning_rate": 9.899748650914245e-06, |
|
"loss": 0.9673, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.31676767676767675, |
|
"grad_norm": 12.0, |
|
"learning_rate": 9.896914594073703e-06, |
|
"loss": 0.9208, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.3183838383838384, |
|
"grad_norm": 9.4375, |
|
"learning_rate": 9.894041452026584e-06, |
|
"loss": 0.8361, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 9.6875, |
|
"learning_rate": 9.891129247705153e-06, |
|
"loss": 1.0054, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.32161616161616163, |
|
"grad_norm": 10.0625, |
|
"learning_rate": 9.888178004353456e-06, |
|
"loss": 0.8643, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.32323232323232326, |
|
"grad_norm": 8.75, |
|
"learning_rate": 9.885187745527132e-06, |
|
"loss": 0.738, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.32484848484848483, |
|
"grad_norm": 10.0, |
|
"learning_rate": 9.88215849509323e-06, |
|
"loss": 0.9793, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.32646464646464646, |
|
"grad_norm": 10.4375, |
|
"learning_rate": 9.879090277230005e-06, |
|
"loss": 0.7254, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.3280808080808081, |
|
"grad_norm": 11.5, |
|
"learning_rate": 9.875983116426745e-06, |
|
"loss": 0.8806, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.3296969696969697, |
|
"grad_norm": 8.9375, |
|
"learning_rate": 9.87283703748356e-06, |
|
"loss": 0.9895, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.33131313131313134, |
|
"grad_norm": 8.4375, |
|
"learning_rate": 9.869652065511188e-06, |
|
"loss": 0.7317, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.3329292929292929, |
|
"grad_norm": 9.5, |
|
"learning_rate": 9.866428225930798e-06, |
|
"loss": 0.6969, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.33454545454545453, |
|
"grad_norm": 10.1875, |
|
"learning_rate": 9.863165544473781e-06, |
|
"loss": 0.8589, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.33616161616161616, |
|
"grad_norm": 8.5625, |
|
"learning_rate": 9.859864047181551e-06, |
|
"loss": 0.6515, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.3377777777777778, |
|
"grad_norm": 7.84375, |
|
"learning_rate": 9.856523760405339e-06, |
|
"loss": 0.6014, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.3393939393939394, |
|
"grad_norm": 8.8125, |
|
"learning_rate": 9.853144710805966e-06, |
|
"loss": 0.7315, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.341010101010101, |
|
"grad_norm": 11.25, |
|
"learning_rate": 9.849726925353655e-06, |
|
"loss": 0.721, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.3426262626262626, |
|
"grad_norm": 9.8125, |
|
"learning_rate": 9.846270431327793e-06, |
|
"loss": 0.8893, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.34424242424242424, |
|
"grad_norm": 8.9375, |
|
"learning_rate": 9.842775256316732e-06, |
|
"loss": 0.9498, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.34585858585858587, |
|
"grad_norm": 11.125, |
|
"learning_rate": 9.839241428217553e-06, |
|
"loss": 0.8841, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.3474747474747475, |
|
"grad_norm": 10.125, |
|
"learning_rate": 9.835668975235857e-06, |
|
"loss": 0.9036, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.3490909090909091, |
|
"grad_norm": 9.6875, |
|
"learning_rate": 9.832057925885526e-06, |
|
"loss": 0.8493, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.3507070707070707, |
|
"grad_norm": 9.375, |
|
"learning_rate": 9.828408308988506e-06, |
|
"loss": 0.8034, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.3523232323232323, |
|
"grad_norm": 8.9375, |
|
"learning_rate": 9.824720153674578e-06, |
|
"loss": 0.7762, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.35393939393939394, |
|
"grad_norm": 11.6875, |
|
"learning_rate": 9.820993489381114e-06, |
|
"loss": 0.986, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.35555555555555557, |
|
"grad_norm": 9.8125, |
|
"learning_rate": 9.817228345852853e-06, |
|
"loss": 0.9599, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.3571717171717172, |
|
"grad_norm": 9.1875, |
|
"learning_rate": 9.813424753141658e-06, |
|
"loss": 0.7152, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.35878787878787877, |
|
"grad_norm": 7.75, |
|
"learning_rate": 9.809582741606283e-06, |
|
"loss": 0.828, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.3604040404040404, |
|
"grad_norm": 9.3125, |
|
"learning_rate": 9.805702341912117e-06, |
|
"loss": 0.9487, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.362020202020202, |
|
"grad_norm": 9.125, |
|
"learning_rate": 9.801783585030959e-06, |
|
"loss": 0.8378, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.36363636363636365, |
|
"grad_norm": 10.5625, |
|
"learning_rate": 9.797826502240746e-06, |
|
"loss": 1.0327, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.3652525252525253, |
|
"grad_norm": 38.75, |
|
"learning_rate": 9.79383112512533e-06, |
|
"loss": 0.877, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.36686868686868684, |
|
"grad_norm": 8.6875, |
|
"learning_rate": 9.789797485574204e-06, |
|
"loss": 1.0547, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.36848484848484847, |
|
"grad_norm": 9.1875, |
|
"learning_rate": 9.785725615782262e-06, |
|
"loss": 0.6674, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.3701010101010101, |
|
"grad_norm": 14.75, |
|
"learning_rate": 9.781615548249533e-06, |
|
"loss": 0.8929, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.3717171717171717, |
|
"grad_norm": 8.5, |
|
"learning_rate": 9.777467315780926e-06, |
|
"loss": 0.6194, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.37333333333333335, |
|
"grad_norm": 9.6875, |
|
"learning_rate": 9.773280951485967e-06, |
|
"loss": 0.7332, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.374949494949495, |
|
"grad_norm": 7.5625, |
|
"learning_rate": 9.769056488778538e-06, |
|
"loss": 0.6245, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.37656565656565655, |
|
"grad_norm": 8.25, |
|
"learning_rate": 9.764793961376602e-06, |
|
"loss": 0.8382, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.3781818181818182, |
|
"grad_norm": 8.9375, |
|
"learning_rate": 9.760493403301941e-06, |
|
"loss": 0.6372, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.3797979797979798, |
|
"grad_norm": 11.875, |
|
"learning_rate": 9.756154848879885e-06, |
|
"loss": 0.9138, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.3814141414141414, |
|
"grad_norm": 8.25, |
|
"learning_rate": 9.751778332739033e-06, |
|
"loss": 0.9099, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.38303030303030305, |
|
"grad_norm": 9.1875, |
|
"learning_rate": 9.74736388981098e-06, |
|
"loss": 0.864, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.3846464646464646, |
|
"grad_norm": 8.4375, |
|
"learning_rate": 9.74291155533004e-06, |
|
"loss": 0.6037, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.38626262626262625, |
|
"grad_norm": 8.875, |
|
"learning_rate": 9.738421364832956e-06, |
|
"loss": 0.6612, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.3878787878787879, |
|
"grad_norm": 8.25, |
|
"learning_rate": 9.733893354158628e-06, |
|
"loss": 0.772, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.3894949494949495, |
|
"grad_norm": 8.6875, |
|
"learning_rate": 9.729327559447817e-06, |
|
"loss": 0.7761, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.39111111111111113, |
|
"grad_norm": 9.4375, |
|
"learning_rate": 9.724724017142869e-06, |
|
"loss": 0.8807, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.3927272727272727, |
|
"grad_norm": 8.5, |
|
"learning_rate": 9.720082763987404e-06, |
|
"loss": 0.8429, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.39434343434343433, |
|
"grad_norm": 7.5625, |
|
"learning_rate": 9.715403837026046e-06, |
|
"loss": 0.4624, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.39595959595959596, |
|
"grad_norm": 10.4375, |
|
"learning_rate": 9.710687273604111e-06, |
|
"loss": 0.865, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.3975757575757576, |
|
"grad_norm": 6.875, |
|
"learning_rate": 9.705933111367314e-06, |
|
"loss": 0.7918, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.3975757575757576, |
|
"eval_loss": 0.8323823809623718, |
|
"eval_runtime": 35.1362, |
|
"eval_samples_per_second": 31.307, |
|
"eval_steps_per_second": 3.928, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.3991919191919192, |
|
"grad_norm": 8.375, |
|
"learning_rate": 9.701141388261471e-06, |
|
"loss": 0.7728, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.40080808080808084, |
|
"grad_norm": 9.5, |
|
"learning_rate": 9.69631214253219e-06, |
|
"loss": 0.8228, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.4024242424242424, |
|
"grad_norm": 10.8125, |
|
"learning_rate": 9.691445412724576e-06, |
|
"loss": 0.8661, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.40404040404040403, |
|
"grad_norm": 10.6875, |
|
"learning_rate": 9.68654123768291e-06, |
|
"loss": 0.8111, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.40565656565656566, |
|
"grad_norm": 8.5, |
|
"learning_rate": 9.681599656550346e-06, |
|
"loss": 0.7355, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.4072727272727273, |
|
"grad_norm": 8.1875, |
|
"learning_rate": 9.676620708768608e-06, |
|
"loss": 0.8194, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.4088888888888889, |
|
"grad_norm": 10.25, |
|
"learning_rate": 9.67160443407765e-06, |
|
"loss": 0.8502, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.4105050505050505, |
|
"grad_norm": 16.625, |
|
"learning_rate": 9.666550872515367e-06, |
|
"loss": 0.841, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.4121212121212121, |
|
"grad_norm": 9.3125, |
|
"learning_rate": 9.661460064417263e-06, |
|
"loss": 1.1208, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.41373737373737374, |
|
"grad_norm": 10.0625, |
|
"learning_rate": 9.656332050416118e-06, |
|
"loss": 0.9173, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.41535353535353536, |
|
"grad_norm": 9.5625, |
|
"learning_rate": 9.651166871441684e-06, |
|
"loss": 1.1702, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.416969696969697, |
|
"grad_norm": 9.875, |
|
"learning_rate": 9.645964568720345e-06, |
|
"loss": 0.8802, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.41858585858585856, |
|
"grad_norm": 8.3125, |
|
"learning_rate": 9.640725183774797e-06, |
|
"loss": 0.9126, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.4202020202020202, |
|
"grad_norm": 8.25, |
|
"learning_rate": 9.635448758423703e-06, |
|
"loss": 0.6081, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.4218181818181818, |
|
"grad_norm": 7.6875, |
|
"learning_rate": 9.630135334781373e-06, |
|
"loss": 0.7091, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.42343434343434344, |
|
"grad_norm": 9.8125, |
|
"learning_rate": 9.624784955257423e-06, |
|
"loss": 0.7537, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.42505050505050507, |
|
"grad_norm": 8.375, |
|
"learning_rate": 9.619397662556434e-06, |
|
"loss": 0.7439, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.4266666666666667, |
|
"grad_norm": 8.5625, |
|
"learning_rate": 9.613973499677613e-06, |
|
"loss": 0.9897, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.42828282828282827, |
|
"grad_norm": 9.9375, |
|
"learning_rate": 9.608512509914453e-06, |
|
"loss": 0.7775, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.4298989898989899, |
|
"grad_norm": 9.6875, |
|
"learning_rate": 9.60301473685438e-06, |
|
"loss": 0.8957, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.4315151515151515, |
|
"grad_norm": 11.0, |
|
"learning_rate": 9.597480224378412e-06, |
|
"loss": 1.0668, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.43313131313131314, |
|
"grad_norm": 10.6875, |
|
"learning_rate": 9.591909016660806e-06, |
|
"loss": 0.8081, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.43474747474747477, |
|
"grad_norm": 9.25, |
|
"learning_rate": 9.586301158168706e-06, |
|
"loss": 0.8119, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.43636363636363634, |
|
"grad_norm": 11.4375, |
|
"learning_rate": 9.580656693661787e-06, |
|
"loss": 0.8829, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.43797979797979797, |
|
"grad_norm": 9.8125, |
|
"learning_rate": 9.574975668191899e-06, |
|
"loss": 0.7646, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.4395959595959596, |
|
"grad_norm": 10.25, |
|
"learning_rate": 9.569258127102708e-06, |
|
"loss": 0.8685, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.4412121212121212, |
|
"grad_norm": 10.0, |
|
"learning_rate": 9.56350411602933e-06, |
|
"loss": 1.093, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.44282828282828285, |
|
"grad_norm": 51.5, |
|
"learning_rate": 9.55771368089797e-06, |
|
"loss": 0.8206, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.4444444444444444, |
|
"grad_norm": 8.5625, |
|
"learning_rate": 9.551886867925565e-06, |
|
"loss": 1.0301, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.44606060606060605, |
|
"grad_norm": 10.125, |
|
"learning_rate": 9.546023723619387e-06, |
|
"loss": 1.0908, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.4476767676767677, |
|
"grad_norm": 9.3125, |
|
"learning_rate": 9.540124294776704e-06, |
|
"loss": 0.7014, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.4492929292929293, |
|
"grad_norm": 7.84375, |
|
"learning_rate": 9.534188628484391e-06, |
|
"loss": 0.682, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.4509090909090909, |
|
"grad_norm": 7.59375, |
|
"learning_rate": 9.528216772118553e-06, |
|
"loss": 0.7439, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.45252525252525255, |
|
"grad_norm": 8.125, |
|
"learning_rate": 9.522208773344147e-06, |
|
"loss": 0.7983, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.4541414141414141, |
|
"grad_norm": 6.40625, |
|
"learning_rate": 9.516164680114612e-06, |
|
"loss": 0.4831, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.45575757575757575, |
|
"grad_norm": 10.375, |
|
"learning_rate": 9.510084540671471e-06, |
|
"loss": 0.7558, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.4573737373737374, |
|
"grad_norm": 9.8125, |
|
"learning_rate": 9.50396840354396e-06, |
|
"loss": 0.5978, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.458989898989899, |
|
"grad_norm": 8.0625, |
|
"learning_rate": 9.497816317548625e-06, |
|
"loss": 0.9693, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.46060606060606063, |
|
"grad_norm": 7.65625, |
|
"learning_rate": 9.491628331788949e-06, |
|
"loss": 0.6546, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.4622222222222222, |
|
"grad_norm": 8.9375, |
|
"learning_rate": 9.485404495654953e-06, |
|
"loss": 0.9247, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.4638383838383838, |
|
"grad_norm": 8.0, |
|
"learning_rate": 9.479144858822795e-06, |
|
"loss": 0.6692, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.46545454545454545, |
|
"grad_norm": 10.125, |
|
"learning_rate": 9.472849471254386e-06, |
|
"loss": 1.1385, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.4670707070707071, |
|
"grad_norm": 8.125, |
|
"learning_rate": 9.466518383196981e-06, |
|
"loss": 0.6915, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.4686868686868687, |
|
"grad_norm": 6.625, |
|
"learning_rate": 9.460151645182784e-06, |
|
"loss": 0.6693, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.4703030303030303, |
|
"grad_norm": 8.6875, |
|
"learning_rate": 9.453749308028542e-06, |
|
"loss": 0.641, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.4719191919191919, |
|
"grad_norm": 8.75, |
|
"learning_rate": 9.447311422835141e-06, |
|
"loss": 0.8019, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.47353535353535353, |
|
"grad_norm": 9.0, |
|
"learning_rate": 9.440838040987194e-06, |
|
"loss": 0.8253, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.47515151515151516, |
|
"grad_norm": 9.4375, |
|
"learning_rate": 9.43432921415264e-06, |
|
"loss": 1.1923, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.4767676767676768, |
|
"grad_norm": 7.90625, |
|
"learning_rate": 9.427784994282318e-06, |
|
"loss": 0.7972, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.4783838383838384, |
|
"grad_norm": 8.0, |
|
"learning_rate": 9.421205433609568e-06, |
|
"loss": 0.7207, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 8.5625, |
|
"learning_rate": 9.414590584649798e-06, |
|
"loss": 0.861, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.4816161616161616, |
|
"grad_norm": 8.8125, |
|
"learning_rate": 9.407940500200082e-06, |
|
"loss": 0.8226, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.48323232323232324, |
|
"grad_norm": 7.75, |
|
"learning_rate": 9.40125523333872e-06, |
|
"loss": 0.9907, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.48484848484848486, |
|
"grad_norm": 8.3125, |
|
"learning_rate": 9.39453483742483e-06, |
|
"loss": 0.7025, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.4864646464646465, |
|
"grad_norm": 6.6875, |
|
"learning_rate": 9.387779366097915e-06, |
|
"loss": 0.5553, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.48808080808080806, |
|
"grad_norm": 6.40625, |
|
"learning_rate": 9.380988873277436e-06, |
|
"loss": 0.6758, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.4896969696969697, |
|
"grad_norm": 7.0625, |
|
"learning_rate": 9.37416341316238e-06, |
|
"loss": 0.5991, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.4913131313131313, |
|
"grad_norm": 8.8125, |
|
"learning_rate": 9.367303040230828e-06, |
|
"loss": 0.9227, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.49292929292929294, |
|
"grad_norm": 23.75, |
|
"learning_rate": 9.360407809239527e-06, |
|
"loss": 0.6559, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.49454545454545457, |
|
"grad_norm": 9.625, |
|
"learning_rate": 9.35347777522344e-06, |
|
"loss": 0.6758, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.49616161616161614, |
|
"grad_norm": 8.875, |
|
"learning_rate": 9.346512993495315e-06, |
|
"loss": 0.8509, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.49777777777777776, |
|
"grad_norm": 8.6875, |
|
"learning_rate": 9.33951351964525e-06, |
|
"loss": 0.6474, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.4993939393939394, |
|
"grad_norm": 8.0625, |
|
"learning_rate": 9.33247940954023e-06, |
|
"loss": 0.7196, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.501010101010101, |
|
"grad_norm": 8.125, |
|
"learning_rate": 9.3254107193237e-06, |
|
"loss": 0.7781, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.5026262626262626, |
|
"grad_norm": 11.1875, |
|
"learning_rate": 9.318307505415108e-06, |
|
"loss": 1.1145, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.5042424242424243, |
|
"grad_norm": 8.6875, |
|
"learning_rate": 9.311169824509454e-06, |
|
"loss": 0.979, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.5058585858585859, |
|
"grad_norm": 9.6875, |
|
"learning_rate": 9.303997733576846e-06, |
|
"loss": 1.0086, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.5074747474747475, |
|
"grad_norm": 5.9375, |
|
"learning_rate": 9.296791289862031e-06, |
|
"loss": 0.5292, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.509090909090909, |
|
"grad_norm": 8.1875, |
|
"learning_rate": 9.289550550883952e-06, |
|
"loss": 0.7044, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.5107070707070707, |
|
"grad_norm": 8.5625, |
|
"learning_rate": 9.28227557443528e-06, |
|
"loss": 0.9252, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.5123232323232323, |
|
"grad_norm": 7.125, |
|
"learning_rate": 9.274966418581958e-06, |
|
"loss": 0.5309, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.5139393939393939, |
|
"grad_norm": 8.0, |
|
"learning_rate": 9.267623141662734e-06, |
|
"loss": 0.6308, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.5155555555555555, |
|
"grad_norm": 8.875, |
|
"learning_rate": 9.260245802288694e-06, |
|
"loss": 0.7667, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.5171717171717172, |
|
"grad_norm": 12.9375, |
|
"learning_rate": 9.252834459342801e-06, |
|
"loss": 0.8895, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.5187878787878788, |
|
"grad_norm": 7.03125, |
|
"learning_rate": 9.24538917197942e-06, |
|
"loss": 0.3923, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.5204040404040404, |
|
"grad_norm": 11.125, |
|
"learning_rate": 9.237909999623847e-06, |
|
"loss": 0.9289, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.522020202020202, |
|
"grad_norm": 10.0625, |
|
"learning_rate": 9.230397001971829e-06, |
|
"loss": 0.8841, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.5236363636363637, |
|
"grad_norm": 10.0, |
|
"learning_rate": 9.222850238989104e-06, |
|
"loss": 1.0293, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.5252525252525253, |
|
"grad_norm": 8.5625, |
|
"learning_rate": 9.215269770910897e-06, |
|
"loss": 0.9055, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.5268686868686868, |
|
"grad_norm": 10.0625, |
|
"learning_rate": 9.207655658241469e-06, |
|
"loss": 0.4496, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.5284848484848484, |
|
"grad_norm": 9.25, |
|
"learning_rate": 9.200007961753605e-06, |
|
"loss": 1.0731, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.5301010101010101, |
|
"grad_norm": 8.1875, |
|
"learning_rate": 9.192326742488153e-06, |
|
"loss": 0.672, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.5317171717171717, |
|
"grad_norm": 10.9375, |
|
"learning_rate": 9.184612061753518e-06, |
|
"loss": 1.0366, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.5333333333333333, |
|
"grad_norm": 8.625, |
|
"learning_rate": 9.176863981125185e-06, |
|
"loss": 0.9249, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.534949494949495, |
|
"grad_norm": 8.1875, |
|
"learning_rate": 9.169082562445228e-06, |
|
"loss": 0.6611, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.5365656565656566, |
|
"grad_norm": 7.625, |
|
"learning_rate": 9.161267867821802e-06, |
|
"loss": 0.7343, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.5381818181818182, |
|
"grad_norm": 8.75, |
|
"learning_rate": 9.153419959628666e-06, |
|
"loss": 0.9306, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.5397979797979798, |
|
"grad_norm": 6.75, |
|
"learning_rate": 9.14553890050467e-06, |
|
"loss": 0.7274, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.5414141414141415, |
|
"grad_norm": 9.1875, |
|
"learning_rate": 9.137624753353268e-06, |
|
"loss": 0.5954, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.5430303030303031, |
|
"grad_norm": 7.125, |
|
"learning_rate": 9.129677581342e-06, |
|
"loss": 1.0952, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.5446464646464646, |
|
"grad_norm": 8.375, |
|
"learning_rate": 9.121697447902006e-06, |
|
"loss": 0.6458, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.5462626262626262, |
|
"grad_norm": 8.1875, |
|
"learning_rate": 9.113684416727511e-06, |
|
"loss": 0.6578, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.5478787878787879, |
|
"grad_norm": 8.3125, |
|
"learning_rate": 9.10563855177531e-06, |
|
"loss": 0.6899, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.5494949494949495, |
|
"grad_norm": 10.375, |
|
"learning_rate": 9.097559917264268e-06, |
|
"loss": 0.7607, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.5511111111111111, |
|
"grad_norm": 9.875, |
|
"learning_rate": 9.089448577674799e-06, |
|
"loss": 0.6255, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.5527272727272727, |
|
"grad_norm": 8.6875, |
|
"learning_rate": 9.081304597748366e-06, |
|
"loss": 0.9227, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.5543434343434344, |
|
"grad_norm": 6.71875, |
|
"learning_rate": 9.073128042486945e-06, |
|
"loss": 0.6744, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.555959595959596, |
|
"grad_norm": 6.65625, |
|
"learning_rate": 9.064918977152517e-06, |
|
"loss": 0.4926, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.5575757575757576, |
|
"grad_norm": 6.90625, |
|
"learning_rate": 9.056677467266543e-06, |
|
"loss": 0.5658, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.5591919191919192, |
|
"grad_norm": 8.3125, |
|
"learning_rate": 9.048403578609454e-06, |
|
"loss": 0.9654, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.5608080808080808, |
|
"grad_norm": 9.4375, |
|
"learning_rate": 9.040097377220103e-06, |
|
"loss": 0.8399, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.5624242424242424, |
|
"grad_norm": 8.875, |
|
"learning_rate": 9.031758929395259e-06, |
|
"loss": 0.7202, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.564040404040404, |
|
"grad_norm": 9.3125, |
|
"learning_rate": 9.02338830168906e-06, |
|
"loss": 0.5778, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.5656565656565656, |
|
"grad_norm": 6.375, |
|
"learning_rate": 9.014985560912499e-06, |
|
"loss": 0.4703, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.5672727272727273, |
|
"grad_norm": 8.5625, |
|
"learning_rate": 9.00655077413288e-06, |
|
"loss": 0.8051, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.5688888888888889, |
|
"grad_norm": 5.9375, |
|
"learning_rate": 8.998084008673284e-06, |
|
"loss": 0.5963, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.5705050505050505, |
|
"grad_norm": 8.1875, |
|
"learning_rate": 8.989585332112039e-06, |
|
"loss": 0.9701, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.5721212121212121, |
|
"grad_norm": 9.9375, |
|
"learning_rate": 8.981054812282162e-06, |
|
"loss": 1.0809, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.5737373737373738, |
|
"grad_norm": 8.1875, |
|
"learning_rate": 8.972492517270845e-06, |
|
"loss": 0.8909, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.5753535353535354, |
|
"grad_norm": 7.71875, |
|
"learning_rate": 8.963898515418885e-06, |
|
"loss": 0.5967, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.576969696969697, |
|
"grad_norm": 7.34375, |
|
"learning_rate": 8.955272875320159e-06, |
|
"loss": 0.74, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.5785858585858585, |
|
"grad_norm": 7.71875, |
|
"learning_rate": 8.946615665821059e-06, |
|
"loss": 0.6306, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.5802020202020202, |
|
"grad_norm": 7.0625, |
|
"learning_rate": 8.937926956019957e-06, |
|
"loss": 0.5952, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.5818181818181818, |
|
"grad_norm": 9.0, |
|
"learning_rate": 8.929206815266653e-06, |
|
"loss": 0.5917, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.5834343434343434, |
|
"grad_norm": 6.9375, |
|
"learning_rate": 8.920455313161801e-06, |
|
"loss": 0.6248, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.585050505050505, |
|
"grad_norm": 8.375, |
|
"learning_rate": 8.911672519556386e-06, |
|
"loss": 0.7684, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.5866666666666667, |
|
"grad_norm": 7.125, |
|
"learning_rate": 8.902858504551136e-06, |
|
"loss": 0.6592, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.5882828282828283, |
|
"grad_norm": 8.6875, |
|
"learning_rate": 8.89401333849598e-06, |
|
"loss": 0.6805, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.5898989898989899, |
|
"grad_norm": 8.6875, |
|
"learning_rate": 8.885137091989488e-06, |
|
"loss": 0.8623, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.5915151515151515, |
|
"grad_norm": 8.1875, |
|
"learning_rate": 8.87622983587829e-06, |
|
"loss": 1.0672, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.5931313131313132, |
|
"grad_norm": 7.8125, |
|
"learning_rate": 8.867291641256534e-06, |
|
"loss": 1.091, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.5947474747474748, |
|
"grad_norm": 12.1875, |
|
"learning_rate": 8.8583225794653e-06, |
|
"loss": 0.7798, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.5963636363636363, |
|
"grad_norm": 6.53125, |
|
"learning_rate": 8.849322722092037e-06, |
|
"loss": 0.5133, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.5963636363636363, |
|
"eval_loss": 0.7914893627166748, |
|
"eval_runtime": 35.1895, |
|
"eval_samples_per_second": 31.259, |
|
"eval_steps_per_second": 3.922, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.597979797979798, |
|
"grad_norm": 7.65625, |
|
"learning_rate": 8.840292140969995e-06, |
|
"loss": 0.9505, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.5995959595959596, |
|
"grad_norm": 8.1875, |
|
"learning_rate": 8.83123090817765e-06, |
|
"loss": 0.6006, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.6012121212121212, |
|
"grad_norm": 8.1875, |
|
"learning_rate": 8.82213909603812e-06, |
|
"loss": 0.7318, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.6028282828282828, |
|
"grad_norm": 8.125, |
|
"learning_rate": 8.813016777118604e-06, |
|
"loss": 0.725, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.6044444444444445, |
|
"grad_norm": 8.0, |
|
"learning_rate": 8.803864024229786e-06, |
|
"loss": 0.8758, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.6060606060606061, |
|
"grad_norm": 7.6875, |
|
"learning_rate": 8.79468091042527e-06, |
|
"loss": 0.8636, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.6076767676767677, |
|
"grad_norm": 9.75, |
|
"learning_rate": 8.78546750900098e-06, |
|
"loss": 0.7711, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.6092929292929293, |
|
"grad_norm": 7.21875, |
|
"learning_rate": 8.77622389349459e-06, |
|
"loss": 0.5907, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.610909090909091, |
|
"grad_norm": 6.15625, |
|
"learning_rate": 8.766950137684929e-06, |
|
"loss": 0.6627, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.6125252525252525, |
|
"grad_norm": 8.125, |
|
"learning_rate": 8.757646315591391e-06, |
|
"loss": 0.9963, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.6141414141414141, |
|
"grad_norm": 6.25, |
|
"learning_rate": 8.748312501473351e-06, |
|
"loss": 0.8233, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.6157575757575757, |
|
"grad_norm": 7.5, |
|
"learning_rate": 8.738948769829565e-06, |
|
"loss": 0.5933, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.6173737373737374, |
|
"grad_norm": 6.8125, |
|
"learning_rate": 8.72955519539758e-06, |
|
"loss": 0.4457, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.618989898989899, |
|
"grad_norm": 8.1875, |
|
"learning_rate": 8.72013185315314e-06, |
|
"loss": 0.6291, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.6206060606060606, |
|
"grad_norm": 4.6875, |
|
"learning_rate": 8.710678818309576e-06, |
|
"loss": 0.5969, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.6222222222222222, |
|
"grad_norm": 6.1875, |
|
"learning_rate": 8.701196166317221e-06, |
|
"loss": 0.4308, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.6238383838383839, |
|
"grad_norm": 9.5, |
|
"learning_rate": 8.691683972862792e-06, |
|
"loss": 0.8222, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.6254545454545455, |
|
"grad_norm": 8.125, |
|
"learning_rate": 8.682142313868802e-06, |
|
"loss": 0.8346, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.6270707070707071, |
|
"grad_norm": 7.40625, |
|
"learning_rate": 8.672571265492944e-06, |
|
"loss": 0.7311, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.6286868686868687, |
|
"grad_norm": 8.0625, |
|
"learning_rate": 8.66297090412748e-06, |
|
"loss": 0.7654, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.6303030303030303, |
|
"grad_norm": 9.25, |
|
"learning_rate": 8.653341306398645e-06, |
|
"loss": 0.8764, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.6319191919191919, |
|
"grad_norm": 7.75, |
|
"learning_rate": 8.643682549166021e-06, |
|
"loss": 0.8139, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.6335353535353535, |
|
"grad_norm": 7.4375, |
|
"learning_rate": 8.63399470952193e-06, |
|
"loss": 0.8704, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.6351515151515151, |
|
"grad_norm": 6.6875, |
|
"learning_rate": 8.62427786479082e-06, |
|
"loss": 0.5697, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.6367676767676768, |
|
"grad_norm": 7.46875, |
|
"learning_rate": 8.614532092528645e-06, |
|
"loss": 0.6327, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.6383838383838384, |
|
"grad_norm": 7.75, |
|
"learning_rate": 8.604757470522247e-06, |
|
"loss": 0.7221, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 7.28125, |
|
"learning_rate": 8.594954076788736e-06, |
|
"loss": 0.7422, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.6416161616161616, |
|
"grad_norm": 14.1875, |
|
"learning_rate": 8.585121989574865e-06, |
|
"loss": 0.6289, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.6432323232323233, |
|
"grad_norm": 7.65625, |
|
"learning_rate": 8.575261287356407e-06, |
|
"loss": 0.8722, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.6448484848484849, |
|
"grad_norm": 7.125, |
|
"learning_rate": 8.565372048837529e-06, |
|
"loss": 0.7178, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.6464646464646465, |
|
"grad_norm": 6.90625, |
|
"learning_rate": 8.555454352950161e-06, |
|
"loss": 0.6246, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.648080808080808, |
|
"grad_norm": 8.4375, |
|
"learning_rate": 8.545508278853373e-06, |
|
"loss": 0.8206, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.6496969696969697, |
|
"grad_norm": 10.125, |
|
"learning_rate": 8.535533905932739e-06, |
|
"loss": 0.9123, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.6513131313131313, |
|
"grad_norm": 8.5625, |
|
"learning_rate": 8.525531313799693e-06, |
|
"loss": 0.9784, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.6529292929292929, |
|
"grad_norm": 8.9375, |
|
"learning_rate": 8.515500582290914e-06, |
|
"loss": 0.9171, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.6545454545454545, |
|
"grad_norm": 5.84375, |
|
"learning_rate": 8.505441791467674e-06, |
|
"loss": 0.5419, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.6561616161616162, |
|
"grad_norm": 6.625, |
|
"learning_rate": 8.495355021615204e-06, |
|
"loss": 0.6661, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.6577777777777778, |
|
"grad_norm": 7.625, |
|
"learning_rate": 8.48524035324205e-06, |
|
"loss": 0.8925, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.6593939393939394, |
|
"grad_norm": 7.0625, |
|
"learning_rate": 8.475097867079437e-06, |
|
"loss": 0.5441, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.661010101010101, |
|
"grad_norm": 6.21875, |
|
"learning_rate": 8.464927644080617e-06, |
|
"loss": 0.5148, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.6626262626262627, |
|
"grad_norm": 8.125, |
|
"learning_rate": 8.454729765420228e-06, |
|
"loss": 0.7649, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.6642424242424242, |
|
"grad_norm": 7.65625, |
|
"learning_rate": 8.444504312493642e-06, |
|
"loss": 0.6308, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.6658585858585858, |
|
"grad_norm": 9.9375, |
|
"learning_rate": 8.434251366916323e-06, |
|
"loss": 0.6534, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.6674747474747474, |
|
"grad_norm": 7.46875, |
|
"learning_rate": 8.423971010523165e-06, |
|
"loss": 0.8665, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.6690909090909091, |
|
"grad_norm": 6.78125, |
|
"learning_rate": 8.413663325367845e-06, |
|
"loss": 0.5408, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.6707070707070707, |
|
"grad_norm": 6.59375, |
|
"learning_rate": 8.403328393722169e-06, |
|
"loss": 0.6455, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.6723232323232323, |
|
"grad_norm": 6.5625, |
|
"learning_rate": 8.392966298075413e-06, |
|
"loss": 0.6351, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.673939393939394, |
|
"grad_norm": 9.5625, |
|
"learning_rate": 8.382577121133664e-06, |
|
"loss": 0.6295, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.6755555555555556, |
|
"grad_norm": 7.625, |
|
"learning_rate": 8.372160945819164e-06, |
|
"loss": 1.2937, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.6771717171717172, |
|
"grad_norm": 9.25, |
|
"learning_rate": 8.361717855269643e-06, |
|
"loss": 0.731, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.6787878787878788, |
|
"grad_norm": 6.6875, |
|
"learning_rate": 8.351247932837655e-06, |
|
"loss": 0.5192, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.6804040404040405, |
|
"grad_norm": 8.125, |
|
"learning_rate": 8.34075126208992e-06, |
|
"loss": 0.8637, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.682020202020202, |
|
"grad_norm": 7.78125, |
|
"learning_rate": 8.330227926806652e-06, |
|
"loss": 0.6211, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.6836363636363636, |
|
"grad_norm": 7.71875, |
|
"learning_rate": 8.319678010980883e-06, |
|
"loss": 0.7818, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.6852525252525252, |
|
"grad_norm": 5.5625, |
|
"learning_rate": 8.309101598817812e-06, |
|
"loss": 0.4556, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.6868686868686869, |
|
"grad_norm": 5.875, |
|
"learning_rate": 8.298498774734114e-06, |
|
"loss": 0.6321, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.6884848484848485, |
|
"grad_norm": 7.375, |
|
"learning_rate": 8.287869623357275e-06, |
|
"loss": 0.8331, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.6901010101010101, |
|
"grad_norm": 8.625, |
|
"learning_rate": 8.277214229524913e-06, |
|
"loss": 0.7505, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.6917171717171717, |
|
"grad_norm": 6.34375, |
|
"learning_rate": 8.266532678284103e-06, |
|
"loss": 0.6113, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.6933333333333334, |
|
"grad_norm": 8.0, |
|
"learning_rate": 8.255825054890705e-06, |
|
"loss": 0.5578, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.694949494949495, |
|
"grad_norm": 9.0, |
|
"learning_rate": 8.245091444808663e-06, |
|
"loss": 1.0137, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.6965656565656566, |
|
"grad_norm": 7.34375, |
|
"learning_rate": 8.234331933709353e-06, |
|
"loss": 0.5748, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.6981818181818182, |
|
"grad_norm": 7.15625, |
|
"learning_rate": 8.223546607470863e-06, |
|
"loss": 0.6973, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.6997979797979798, |
|
"grad_norm": 8.375, |
|
"learning_rate": 8.212735552177347e-06, |
|
"loss": 0.6268, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.7014141414141414, |
|
"grad_norm": 6.875, |
|
"learning_rate": 8.201898854118301e-06, |
|
"loss": 0.6871, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.703030303030303, |
|
"grad_norm": 9.5, |
|
"learning_rate": 8.191036599787908e-06, |
|
"loss": 0.8489, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.7046464646464646, |
|
"grad_norm": 6.875, |
|
"learning_rate": 8.18014887588431e-06, |
|
"loss": 0.6502, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.7062626262626263, |
|
"grad_norm": 6.21875, |
|
"learning_rate": 8.169235769308959e-06, |
|
"loss": 0.5236, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.7078787878787879, |
|
"grad_norm": 10.4375, |
|
"learning_rate": 8.158297367165885e-06, |
|
"loss": 0.8233, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.7094949494949495, |
|
"grad_norm": 9.3125, |
|
"learning_rate": 8.147333756761027e-06, |
|
"loss": 0.7471, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.7111111111111111, |
|
"grad_norm": 9.1875, |
|
"learning_rate": 8.13634502560152e-06, |
|
"loss": 0.627, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.7127272727272728, |
|
"grad_norm": 6.6875, |
|
"learning_rate": 8.125331261395004e-06, |
|
"loss": 0.6162, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.7143434343434344, |
|
"grad_norm": 9.25, |
|
"learning_rate": 8.114292552048925e-06, |
|
"loss": 0.7379, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.7159595959595959, |
|
"grad_norm": 6.125, |
|
"learning_rate": 8.10322898566983e-06, |
|
"loss": 0.6932, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.7175757575757575, |
|
"grad_norm": 6.0, |
|
"learning_rate": 8.092140650562665e-06, |
|
"loss": 0.5414, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.7191919191919192, |
|
"grad_norm": 6.8125, |
|
"learning_rate": 8.08102763523007e-06, |
|
"loss": 0.6522, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.7208080808080808, |
|
"grad_norm": 6.15625, |
|
"learning_rate": 8.069890028371672e-06, |
|
"loss": 0.5511, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.7224242424242424, |
|
"grad_norm": 8.5, |
|
"learning_rate": 8.058727918883376e-06, |
|
"loss": 0.9194, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.724040404040404, |
|
"grad_norm": 6.15625, |
|
"learning_rate": 8.047541395856661e-06, |
|
"loss": 0.5101, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.7256565656565657, |
|
"grad_norm": 6.78125, |
|
"learning_rate": 8.036330548577866e-06, |
|
"loss": 0.664, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.7272727272727273, |
|
"grad_norm": 7.21875, |
|
"learning_rate": 8.025095466527468e-06, |
|
"loss": 0.6514, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.7288888888888889, |
|
"grad_norm": 8.25, |
|
"learning_rate": 8.013836239379388e-06, |
|
"loss": 0.8009, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.7305050505050505, |
|
"grad_norm": 6.59375, |
|
"learning_rate": 8.002552957000254e-06, |
|
"loss": 0.5222, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.7321212121212122, |
|
"grad_norm": 6.71875, |
|
"learning_rate": 7.991245709448697e-06, |
|
"loss": 0.6088, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.7337373737373737, |
|
"grad_norm": 7.65625, |
|
"learning_rate": 7.979914586974628e-06, |
|
"loss": 0.8188, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.7353535353535353, |
|
"grad_norm": 6.96875, |
|
"learning_rate": 7.968559680018518e-06, |
|
"loss": 0.696, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.7369696969696969, |
|
"grad_norm": 7.90625, |
|
"learning_rate": 7.957181079210676e-06, |
|
"loss": 0.7024, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.7385858585858586, |
|
"grad_norm": 9.25, |
|
"learning_rate": 7.945778875370527e-06, |
|
"loss": 0.9176, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.7402020202020202, |
|
"grad_norm": 7.46875, |
|
"learning_rate": 7.934353159505885e-06, |
|
"loss": 0.9633, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.7418181818181818, |
|
"grad_norm": 7.28125, |
|
"learning_rate": 7.922904022812228e-06, |
|
"loss": 0.6307, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.7434343434343434, |
|
"grad_norm": 6.84375, |
|
"learning_rate": 7.911431556671967e-06, |
|
"loss": 0.6905, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.7450505050505051, |
|
"grad_norm": 6.875, |
|
"learning_rate": 7.899935852653723e-06, |
|
"loss": 0.4554, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.7466666666666667, |
|
"grad_norm": 6.21875, |
|
"learning_rate": 7.888417002511592e-06, |
|
"loss": 0.5635, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.7482828282828283, |
|
"grad_norm": 8.875, |
|
"learning_rate": 7.876875098184411e-06, |
|
"loss": 1.0392, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.74989898989899, |
|
"grad_norm": 5.875, |
|
"learning_rate": 7.865310231795026e-06, |
|
"loss": 0.5778, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.7515151515151515, |
|
"grad_norm": 7.09375, |
|
"learning_rate": 7.853722495649558e-06, |
|
"loss": 0.6214, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.7531313131313131, |
|
"grad_norm": 7.1875, |
|
"learning_rate": 7.842111982236669e-06, |
|
"loss": 0.8281, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.7547474747474747, |
|
"grad_norm": 7.9375, |
|
"learning_rate": 7.83047878422681e-06, |
|
"loss": 0.7688, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.7563636363636363, |
|
"grad_norm": 9.0625, |
|
"learning_rate": 7.818822994471504e-06, |
|
"loss": 0.6517, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.757979797979798, |
|
"grad_norm": 6.65625, |
|
"learning_rate": 7.807144706002582e-06, |
|
"loss": 0.7734, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.7595959595959596, |
|
"grad_norm": 7.4375, |
|
"learning_rate": 7.79544401203146e-06, |
|
"loss": 0.7312, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.7612121212121212, |
|
"grad_norm": 6.5625, |
|
"learning_rate": 7.783721005948374e-06, |
|
"loss": 0.8063, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.7628282828282829, |
|
"grad_norm": 7.96875, |
|
"learning_rate": 7.771975781321655e-06, |
|
"loss": 0.6811, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.7644444444444445, |
|
"grad_norm": 6.4375, |
|
"learning_rate": 7.760208431896971e-06, |
|
"loss": 0.866, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.7660606060606061, |
|
"grad_norm": 7.0625, |
|
"learning_rate": 7.748419051596586e-06, |
|
"loss": 0.8808, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.7676767676767676, |
|
"grad_norm": 7.9375, |
|
"learning_rate": 7.736607734518593e-06, |
|
"loss": 0.9093, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.7692929292929293, |
|
"grad_norm": 8.3125, |
|
"learning_rate": 7.72477457493619e-06, |
|
"loss": 0.7615, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.7709090909090909, |
|
"grad_norm": 6.6875, |
|
"learning_rate": 7.712919667296902e-06, |
|
"loss": 0.5422, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.7725252525252525, |
|
"grad_norm": 5.8125, |
|
"learning_rate": 7.701043106221847e-06, |
|
"loss": 0.5013, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.7741414141414141, |
|
"grad_norm": 9.25, |
|
"learning_rate": 7.689144986504966e-06, |
|
"loss": 0.8879, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.7757575757575758, |
|
"grad_norm": 7.40625, |
|
"learning_rate": 7.677225403112277e-06, |
|
"loss": 0.5958, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.7773737373737374, |
|
"grad_norm": 6.59375, |
|
"learning_rate": 7.665284451181106e-06, |
|
"loss": 0.5568, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.778989898989899, |
|
"grad_norm": 8.25, |
|
"learning_rate": 7.653322226019341e-06, |
|
"loss": 0.8891, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.7806060606060606, |
|
"grad_norm": 6.59375, |
|
"learning_rate": 7.64133882310466e-06, |
|
"loss": 0.6576, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.7822222222222223, |
|
"grad_norm": 7.5, |
|
"learning_rate": 7.629334338083774e-06, |
|
"loss": 0.7541, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.7838383838383839, |
|
"grad_norm": 8.3125, |
|
"learning_rate": 7.617308866771667e-06, |
|
"loss": 0.9125, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.7854545454545454, |
|
"grad_norm": 7.25, |
|
"learning_rate": 7.605262505150819e-06, |
|
"loss": 0.7351, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.787070707070707, |
|
"grad_norm": 6.625, |
|
"learning_rate": 7.5931953493704545e-06, |
|
"loss": 0.5468, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.7886868686868687, |
|
"grad_norm": 7.21875, |
|
"learning_rate": 7.58110749574577e-06, |
|
"loss": 0.5995, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.7903030303030303, |
|
"grad_norm": 7.78125, |
|
"learning_rate": 7.568999040757157e-06, |
|
"loss": 0.6156, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.7919191919191919, |
|
"grad_norm": 6.46875, |
|
"learning_rate": 7.556870081049444e-06, |
|
"loss": 0.7599, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.7935353535353535, |
|
"grad_norm": 6.90625, |
|
"learning_rate": 7.5447207134311195e-06, |
|
"loss": 0.7025, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.7951515151515152, |
|
"grad_norm": 6.9375, |
|
"learning_rate": 7.532551034873558e-06, |
|
"loss": 0.5702, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.7951515151515152, |
|
"eval_loss": 0.7591387629508972, |
|
"eval_runtime": 40.0722, |
|
"eval_samples_per_second": 27.45, |
|
"eval_steps_per_second": 3.444, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.7967676767676768, |
|
"grad_norm": 7.28125, |
|
"learning_rate": 7.520361142510248e-06, |
|
"loss": 0.6548, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.7983838383838384, |
|
"grad_norm": 8.9375, |
|
"learning_rate": 7.50815113363602e-06, |
|
"loss": 0.8229, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 7.59375, |
|
"learning_rate": 7.49592110570626e-06, |
|
"loss": 1.0282, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.8016161616161617, |
|
"grad_norm": 9.375, |
|
"learning_rate": 7.483671156336142e-06, |
|
"loss": 0.9156, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.8032323232323232, |
|
"grad_norm": 7.25, |
|
"learning_rate": 7.471401383299847e-06, |
|
"loss": 0.6175, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.8048484848484848, |
|
"grad_norm": 7.78125, |
|
"learning_rate": 7.459111884529775e-06, |
|
"loss": 0.7255, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.8064646464646464, |
|
"grad_norm": 8.9375, |
|
"learning_rate": 7.446802758115775e-06, |
|
"loss": 0.8077, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.8080808080808081, |
|
"grad_norm": 5.34375, |
|
"learning_rate": 7.43447410230435e-06, |
|
"loss": 0.7292, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.8096969696969697, |
|
"grad_norm": 7.71875, |
|
"learning_rate": 7.422126015497884e-06, |
|
"loss": 0.7707, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.8113131313131313, |
|
"grad_norm": 5.53125, |
|
"learning_rate": 7.409758596253849e-06, |
|
"loss": 0.5303, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.812929292929293, |
|
"grad_norm": 6.59375, |
|
"learning_rate": 7.397371943284017e-06, |
|
"loss": 0.4893, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.8145454545454546, |
|
"grad_norm": 7.625, |
|
"learning_rate": 7.384966155453686e-06, |
|
"loss": 0.6416, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.8161616161616162, |
|
"grad_norm": 7.8125, |
|
"learning_rate": 7.372541331780871e-06, |
|
"loss": 0.8562, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.8177777777777778, |
|
"grad_norm": 6.71875, |
|
"learning_rate": 7.360097571435527e-06, |
|
"loss": 0.6703, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.8193939393939393, |
|
"grad_norm": 7.6875, |
|
"learning_rate": 7.347634973738753e-06, |
|
"loss": 1.2725, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.821010101010101, |
|
"grad_norm": 6.125, |
|
"learning_rate": 7.335153638162005e-06, |
|
"loss": 0.6887, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.8226262626262626, |
|
"grad_norm": 6.84375, |
|
"learning_rate": 7.322653664326289e-06, |
|
"loss": 0.7664, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.8242424242424242, |
|
"grad_norm": 8.25, |
|
"learning_rate": 7.310135152001381e-06, |
|
"loss": 0.9012, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.8258585858585858, |
|
"grad_norm": 6.96875, |
|
"learning_rate": 7.29759820110502e-06, |
|
"loss": 0.6616, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.8274747474747475, |
|
"grad_norm": 7.5625, |
|
"learning_rate": 7.285042911702116e-06, |
|
"loss": 0.6404, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.8290909090909091, |
|
"grad_norm": 5.90625, |
|
"learning_rate": 7.27246938400395e-06, |
|
"loss": 0.437, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.8307070707070707, |
|
"grad_norm": 8.75, |
|
"learning_rate": 7.259877718367372e-06, |
|
"loss": 0.7375, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.8323232323232324, |
|
"grad_norm": 9.0, |
|
"learning_rate": 7.2472680152940015e-06, |
|
"loss": 0.5881, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.833939393939394, |
|
"grad_norm": 5.90625, |
|
"learning_rate": 7.234640375429427e-06, |
|
"loss": 0.628, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.8355555555555556, |
|
"grad_norm": 9.6875, |
|
"learning_rate": 7.2219948995624035e-06, |
|
"loss": 0.7878, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.8371717171717171, |
|
"grad_norm": 7.40625, |
|
"learning_rate": 7.209331688624039e-06, |
|
"loss": 0.5814, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.8387878787878787, |
|
"grad_norm": 7.71875, |
|
"learning_rate": 7.1966508436870044e-06, |
|
"loss": 1.1205, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.8404040404040404, |
|
"grad_norm": 7.34375, |
|
"learning_rate": 7.183952465964711e-06, |
|
"loss": 0.8673, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.842020202020202, |
|
"grad_norm": 6.375, |
|
"learning_rate": 7.171236656810513e-06, |
|
"loss": 0.4973, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.8436363636363636, |
|
"grad_norm": 7.59375, |
|
"learning_rate": 7.158503517716894e-06, |
|
"loss": 0.6818, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.8452525252525253, |
|
"grad_norm": 8.375, |
|
"learning_rate": 7.145753150314661e-06, |
|
"loss": 0.763, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.8468686868686869, |
|
"grad_norm": 8.4375, |
|
"learning_rate": 7.132985656372126e-06, |
|
"loss": 0.7879, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.8484848484848485, |
|
"grad_norm": 6.25, |
|
"learning_rate": 7.120201137794301e-06, |
|
"loss": 0.3823, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.8501010101010101, |
|
"grad_norm": 5.84375, |
|
"learning_rate": 7.1073996966220835e-06, |
|
"loss": 0.4849, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.8517171717171718, |
|
"grad_norm": 6.625, |
|
"learning_rate": 7.0945814350314346e-06, |
|
"loss": 0.5307, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.8533333333333334, |
|
"grad_norm": 10.1875, |
|
"learning_rate": 7.0817464553325764e-06, |
|
"loss": 0.8415, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.8549494949494949, |
|
"grad_norm": 7.5, |
|
"learning_rate": 7.068894859969162e-06, |
|
"loss": 0.9093, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.8565656565656565, |
|
"grad_norm": 8.0625, |
|
"learning_rate": 7.0560267515174685e-06, |
|
"loss": 0.6398, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.8581818181818182, |
|
"grad_norm": 6.90625, |
|
"learning_rate": 7.043142232685572e-06, |
|
"loss": 0.7314, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.8597979797979798, |
|
"grad_norm": 7.0, |
|
"learning_rate": 7.030241406312528e-06, |
|
"loss": 0.6262, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.8614141414141414, |
|
"grad_norm": 6.3125, |
|
"learning_rate": 7.0173243753675544e-06, |
|
"loss": 0.7267, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.863030303030303, |
|
"grad_norm": 6.28125, |
|
"learning_rate": 7.004391242949209e-06, |
|
"loss": 0.5482, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.8646464646464647, |
|
"grad_norm": 7.46875, |
|
"learning_rate": 6.991442112284561e-06, |
|
"loss": 0.8512, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.8662626262626263, |
|
"grad_norm": 6.3125, |
|
"learning_rate": 6.978477086728375e-06, |
|
"loss": 0.4953, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.8678787878787879, |
|
"grad_norm": 7.71875, |
|
"learning_rate": 6.96549626976228e-06, |
|
"loss": 0.4961, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.8694949494949495, |
|
"grad_norm": 8.6875, |
|
"learning_rate": 6.952499764993945e-06, |
|
"loss": 0.9078, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.8711111111111111, |
|
"grad_norm": 7.59375, |
|
"learning_rate": 6.9394876761562555e-06, |
|
"loss": 0.631, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.8727272727272727, |
|
"grad_norm": 7.5625, |
|
"learning_rate": 6.926460107106483e-06, |
|
"loss": 0.6733, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.8743434343434343, |
|
"grad_norm": 6.6875, |
|
"learning_rate": 6.913417161825449e-06, |
|
"loss": 0.8478, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.8759595959595959, |
|
"grad_norm": 5.71875, |
|
"learning_rate": 6.900358944416712e-06, |
|
"loss": 0.503, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.8775757575757576, |
|
"grad_norm": 5.84375, |
|
"learning_rate": 6.887285559105721e-06, |
|
"loss": 0.8665, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.8791919191919192, |
|
"grad_norm": 6.65625, |
|
"learning_rate": 6.874197110238986e-06, |
|
"loss": 0.5614, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.8808080808080808, |
|
"grad_norm": 6.53125, |
|
"learning_rate": 6.861093702283254e-06, |
|
"loss": 0.7636, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.8824242424242424, |
|
"grad_norm": 11.6875, |
|
"learning_rate": 6.847975439824669e-06, |
|
"loss": 0.74, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.8840404040404041, |
|
"grad_norm": 7.46875, |
|
"learning_rate": 6.834842427567934e-06, |
|
"loss": 0.7544, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.8856565656565657, |
|
"grad_norm": 6.4375, |
|
"learning_rate": 6.8216947703354815e-06, |
|
"loss": 0.8037, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.8872727272727273, |
|
"grad_norm": 8.9375, |
|
"learning_rate": 6.808532573066635e-06, |
|
"loss": 0.7922, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.8888888888888888, |
|
"grad_norm": 6.78125, |
|
"learning_rate": 6.795355940816768e-06, |
|
"loss": 0.5742, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.8905050505050505, |
|
"grad_norm": 8.9375, |
|
"learning_rate": 6.7821649787564706e-06, |
|
"loss": 0.6695, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.8921212121212121, |
|
"grad_norm": 10.6875, |
|
"learning_rate": 6.7689597921707065e-06, |
|
"loss": 0.8931, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.8937373737373737, |
|
"grad_norm": 7.6875, |
|
"learning_rate": 6.755740486457973e-06, |
|
"loss": 0.8255, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.8953535353535353, |
|
"grad_norm": 7.65625, |
|
"learning_rate": 6.742507167129465e-06, |
|
"loss": 0.5539, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.896969696969697, |
|
"grad_norm": 7.9375, |
|
"learning_rate": 6.729259939808223e-06, |
|
"loss": 0.6744, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.8985858585858586, |
|
"grad_norm": 7.0625, |
|
"learning_rate": 6.715998910228296e-06, |
|
"loss": 0.5843, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.9002020202020202, |
|
"grad_norm": 6.4375, |
|
"learning_rate": 6.702724184233904e-06, |
|
"loss": 0.6715, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.9018181818181819, |
|
"grad_norm": 6.65625, |
|
"learning_rate": 6.689435867778578e-06, |
|
"loss": 0.5671, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.9034343434343435, |
|
"grad_norm": 11.875, |
|
"learning_rate": 6.676134066924325e-06, |
|
"loss": 0.8063, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.9050505050505051, |
|
"grad_norm": 7.03125, |
|
"learning_rate": 6.6628188878407806e-06, |
|
"loss": 0.4545, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.9066666666666666, |
|
"grad_norm": 7.09375, |
|
"learning_rate": 6.64949043680436e-06, |
|
"loss": 0.7163, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.9082828282828282, |
|
"grad_norm": 8.4375, |
|
"learning_rate": 6.636148820197409e-06, |
|
"loss": 0.8048, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.9098989898989899, |
|
"grad_norm": 7.65625, |
|
"learning_rate": 6.622794144507356e-06, |
|
"loss": 0.7068, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.9115151515151515, |
|
"grad_norm": 8.75, |
|
"learning_rate": 6.609426516325859e-06, |
|
"loss": 0.5749, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.9131313131313131, |
|
"grad_norm": 7.375, |
|
"learning_rate": 6.596046042347964e-06, |
|
"loss": 0.5476, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.9147474747474748, |
|
"grad_norm": 8.9375, |
|
"learning_rate": 6.58265282937124e-06, |
|
"loss": 0.9369, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.9163636363636364, |
|
"grad_norm": 9.75, |
|
"learning_rate": 6.569246984294938e-06, |
|
"loss": 0.9137, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.917979797979798, |
|
"grad_norm": 7.75, |
|
"learning_rate": 6.555828614119132e-06, |
|
"loss": 0.6025, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.9195959595959596, |
|
"grad_norm": 7.28125, |
|
"learning_rate": 6.542397825943867e-06, |
|
"loss": 0.6785, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.9212121212121213, |
|
"grad_norm": 8.5, |
|
"learning_rate": 6.528954726968302e-06, |
|
"loss": 0.7209, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.9228282828282828, |
|
"grad_norm": 6.8125, |
|
"learning_rate": 6.515499424489857e-06, |
|
"loss": 0.5772, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.9244444444444444, |
|
"grad_norm": 7.9375, |
|
"learning_rate": 6.502032025903356e-06, |
|
"loss": 0.7312, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.926060606060606, |
|
"grad_norm": 7.75, |
|
"learning_rate": 6.4885526387001654e-06, |
|
"loss": 0.8718, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.9276767676767677, |
|
"grad_norm": 6.25, |
|
"learning_rate": 6.475061370467346e-06, |
|
"loss": 0.7196, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.9292929292929293, |
|
"grad_norm": 7.84375, |
|
"learning_rate": 6.461558328886786e-06, |
|
"loss": 0.6602, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.9309090909090909, |
|
"grad_norm": 6.9375, |
|
"learning_rate": 6.4480436217343366e-06, |
|
"loss": 0.6234, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.9325252525252525, |
|
"grad_norm": 7.25, |
|
"learning_rate": 6.434517356878974e-06, |
|
"loss": 0.5093, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.9341414141414142, |
|
"grad_norm": 8.875, |
|
"learning_rate": 6.420979642281909e-06, |
|
"loss": 0.7573, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.9357575757575758, |
|
"grad_norm": 7.625, |
|
"learning_rate": 6.4074305859957475e-06, |
|
"loss": 1.0258, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.9373737373737374, |
|
"grad_norm": 5.9375, |
|
"learning_rate": 6.393870296163616e-06, |
|
"loss": 0.7432, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.938989898989899, |
|
"grad_norm": 8.8125, |
|
"learning_rate": 6.380298881018307e-06, |
|
"loss": 1.0014, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.9406060606060606, |
|
"grad_norm": 7.4375, |
|
"learning_rate": 6.366716448881407e-06, |
|
"loss": 0.7915, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.9422222222222222, |
|
"grad_norm": 5.90625, |
|
"learning_rate": 6.353123108162435e-06, |
|
"loss": 0.5849, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.9438383838383838, |
|
"grad_norm": 5.65625, |
|
"learning_rate": 6.339518967357985e-06, |
|
"loss": 0.785, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.9454545454545454, |
|
"grad_norm": 7.125, |
|
"learning_rate": 6.325904135050844e-06, |
|
"loss": 0.5567, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.9470707070707071, |
|
"grad_norm": 7.125, |
|
"learning_rate": 6.312278719909138e-06, |
|
"loss": 0.8426, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.9486868686868687, |
|
"grad_norm": 6.625, |
|
"learning_rate": 6.298642830685464e-06, |
|
"loss": 0.6369, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.9503030303030303, |
|
"grad_norm": 7.34375, |
|
"learning_rate": 6.284996576216014e-06, |
|
"loss": 0.6441, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.9519191919191919, |
|
"grad_norm": 7.0, |
|
"learning_rate": 6.271340065419715e-06, |
|
"loss": 0.7032, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.9535353535353536, |
|
"grad_norm": 6.71875, |
|
"learning_rate": 6.257673407297352e-06, |
|
"loss": 0.5248, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.9551515151515152, |
|
"grad_norm": 7.40625, |
|
"learning_rate": 6.243996710930705e-06, |
|
"loss": 0.5664, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.9567676767676768, |
|
"grad_norm": 7.3125, |
|
"learning_rate": 6.230310085481677e-06, |
|
"loss": 0.4842, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.9583838383838383, |
|
"grad_norm": 6.28125, |
|
"learning_rate": 6.216613640191414e-06, |
|
"loss": 0.5453, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 8.0, |
|
"learning_rate": 6.2029074843794445e-06, |
|
"loss": 0.6635, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.9616161616161616, |
|
"grad_norm": 6.71875, |
|
"learning_rate": 6.189191727442807e-06, |
|
"loss": 0.6053, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.9632323232323232, |
|
"grad_norm": 6.15625, |
|
"learning_rate": 6.175466478855161e-06, |
|
"loss": 0.5201, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.9648484848484848, |
|
"grad_norm": 6.53125, |
|
"learning_rate": 6.1617318481659364e-06, |
|
"loss": 0.6634, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.9664646464646465, |
|
"grad_norm": 7.90625, |
|
"learning_rate": 6.147987944999441e-06, |
|
"loss": 0.7641, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.9680808080808081, |
|
"grad_norm": 7.3125, |
|
"learning_rate": 6.13423487905399e-06, |
|
"loss": 0.7179, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.9696969696969697, |
|
"grad_norm": 6.53125, |
|
"learning_rate": 6.1204727601010396e-06, |
|
"loss": 0.7291, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.9713131313131314, |
|
"grad_norm": 8.1875, |
|
"learning_rate": 6.106701697984296e-06, |
|
"loss": 0.7674, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.972929292929293, |
|
"grad_norm": 7.1875, |
|
"learning_rate": 6.092921802618849e-06, |
|
"loss": 0.7901, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.9745454545454545, |
|
"grad_norm": 5.75, |
|
"learning_rate": 6.079133183990292e-06, |
|
"loss": 0.6376, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.9761616161616161, |
|
"grad_norm": 7.125, |
|
"learning_rate": 6.065335952153846e-06, |
|
"loss": 0.7688, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.9777777777777777, |
|
"grad_norm": 8.8125, |
|
"learning_rate": 6.051530217233473e-06, |
|
"loss": 0.7441, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.9793939393939394, |
|
"grad_norm": 7.21875, |
|
"learning_rate": 6.037716089421011e-06, |
|
"loss": 0.5193, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.981010101010101, |
|
"grad_norm": 5.90625, |
|
"learning_rate": 6.023893678975281e-06, |
|
"loss": 0.6732, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.9826262626262626, |
|
"grad_norm": 6.625, |
|
"learning_rate": 6.010063096221215e-06, |
|
"loss": 1.2178, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.9842424242424243, |
|
"grad_norm": 6.0, |
|
"learning_rate": 5.996224451548974e-06, |
|
"loss": 0.432, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.9858585858585859, |
|
"grad_norm": 6.875, |
|
"learning_rate": 5.982377855413063e-06, |
|
"loss": 0.6985, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.9874747474747475, |
|
"grad_norm": 6.15625, |
|
"learning_rate": 5.968523418331453e-06, |
|
"loss": 0.4962, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.9890909090909091, |
|
"grad_norm": 6.0, |
|
"learning_rate": 5.954661250884704e-06, |
|
"loss": 0.5834, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.9907070707070708, |
|
"grad_norm": 8.3125, |
|
"learning_rate": 5.940791463715068e-06, |
|
"loss": 0.7957, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.9923232323232323, |
|
"grad_norm": 8.25, |
|
"learning_rate": 5.926914167525618e-06, |
|
"loss": 0.6498, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.9939393939393939, |
|
"grad_norm": 6.96875, |
|
"learning_rate": 5.913029473079365e-06, |
|
"loss": 0.7897, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.9939393939393939, |
|
"eval_loss": 0.6975554823875427, |
|
"eval_runtime": 35.4933, |
|
"eval_samples_per_second": 30.992, |
|
"eval_steps_per_second": 3.888, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.9955555555555555, |
|
"grad_norm": 6.5, |
|
"learning_rate": 5.899137491198364e-06, |
|
"loss": 0.9353, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.9971717171717172, |
|
"grad_norm": 5.40625, |
|
"learning_rate": 5.885238332762837e-06, |
|
"loss": 0.6782, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.9987878787878788, |
|
"grad_norm": 6.75, |
|
"learning_rate": 5.871332108710292e-06, |
|
"loss": 0.6207, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.0004040404040404, |
|
"grad_norm": 5.21875, |
|
"learning_rate": 5.8574189300346215e-06, |
|
"loss": 0.4077, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.002020202020202, |
|
"grad_norm": 8.1875, |
|
"learning_rate": 5.843498907785236e-06, |
|
"loss": 0.8129, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.0036363636363637, |
|
"grad_norm": 5.21875, |
|
"learning_rate": 5.829572153066166e-06, |
|
"loss": 0.3507, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.0052525252525253, |
|
"grad_norm": 5.8125, |
|
"learning_rate": 5.815638777035175e-06, |
|
"loss": 0.4918, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.006868686868687, |
|
"grad_norm": 7.375, |
|
"learning_rate": 5.801698890902877e-06, |
|
"loss": 0.451, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.0084848484848485, |
|
"grad_norm": 6.9375, |
|
"learning_rate": 5.78775260593185e-06, |
|
"loss": 0.5601, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.0101010101010102, |
|
"grad_norm": 12.6875, |
|
"learning_rate": 5.77380003343574e-06, |
|
"loss": 0.8753, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.0117171717171718, |
|
"grad_norm": 6.28125, |
|
"learning_rate": 5.759841284778379e-06, |
|
"loss": 0.5141, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.0133333333333334, |
|
"grad_norm": 6.9375, |
|
"learning_rate": 5.7458764713729e-06, |
|
"loss": 0.5174, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.014949494949495, |
|
"grad_norm": 8.875, |
|
"learning_rate": 5.731905704680834e-06, |
|
"loss": 0.3319, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.0165656565656567, |
|
"grad_norm": 9.875, |
|
"learning_rate": 5.717929096211232e-06, |
|
"loss": 0.4503, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.018181818181818, |
|
"grad_norm": 7.28125, |
|
"learning_rate": 5.703946757519777e-06, |
|
"loss": 0.3714, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.0197979797979797, |
|
"grad_norm": 9.9375, |
|
"learning_rate": 5.689958800207879e-06, |
|
"loss": 0.42, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.0214141414141413, |
|
"grad_norm": 10.625, |
|
"learning_rate": 5.6759653359218e-06, |
|
"loss": 0.6338, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.023030303030303, |
|
"grad_norm": 7.03125, |
|
"learning_rate": 5.661966476351756e-06, |
|
"loss": 0.4457, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.0246464646464646, |
|
"grad_norm": 6.28125, |
|
"learning_rate": 5.647962333231021e-06, |
|
"loss": 0.4263, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.0262626262626262, |
|
"grad_norm": 8.5, |
|
"learning_rate": 5.633953018335047e-06, |
|
"loss": 0.6676, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.0278787878787878, |
|
"grad_norm": 8.3125, |
|
"learning_rate": 5.6199386434805615e-06, |
|
"loss": 0.4648, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.0294949494949495, |
|
"grad_norm": 7.1875, |
|
"learning_rate": 5.605919320524677e-06, |
|
"loss": 0.4956, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.031111111111111, |
|
"grad_norm": 6.8125, |
|
"learning_rate": 5.591895161364006e-06, |
|
"loss": 0.4562, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.0327272727272727, |
|
"grad_norm": 7.0, |
|
"learning_rate": 5.577866277933752e-06, |
|
"loss": 0.3596, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.0343434343434343, |
|
"grad_norm": 6.65625, |
|
"learning_rate": 5.563832782206835e-06, |
|
"loss": 0.5325, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.035959595959596, |
|
"grad_norm": 8.1875, |
|
"learning_rate": 5.549794786192984e-06, |
|
"loss": 0.3813, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.0375757575757576, |
|
"grad_norm": 7.15625, |
|
"learning_rate": 5.535752401937846e-06, |
|
"loss": 0.5279, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.0391919191919192, |
|
"grad_norm": 6.625, |
|
"learning_rate": 5.521705741522096e-06, |
|
"loss": 0.4679, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.0408080808080808, |
|
"grad_norm": 6.5, |
|
"learning_rate": 5.507654917060541e-06, |
|
"loss": 0.396, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.0424242424242425, |
|
"grad_norm": 7.09375, |
|
"learning_rate": 5.49360004070122e-06, |
|
"loss": 0.5639, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.044040404040404, |
|
"grad_norm": 5.8125, |
|
"learning_rate": 5.4795412246245126e-06, |
|
"loss": 0.5076, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.0456565656565657, |
|
"grad_norm": 6.15625, |
|
"learning_rate": 5.465478581042248e-06, |
|
"loss": 0.4221, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.0472727272727274, |
|
"grad_norm": 6.0625, |
|
"learning_rate": 5.451412222196801e-06, |
|
"loss": 0.5823, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.048888888888889, |
|
"grad_norm": 7.28125, |
|
"learning_rate": 5.4373422603602025e-06, |
|
"loss": 0.5174, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.0505050505050506, |
|
"grad_norm": 7.3125, |
|
"learning_rate": 5.4232688078332384e-06, |
|
"loss": 0.4169, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.0521212121212122, |
|
"grad_norm": 7.625, |
|
"learning_rate": 5.409191976944559e-06, |
|
"loss": 0.8318, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.0537373737373736, |
|
"grad_norm": 5.28125, |
|
"learning_rate": 5.395111880049775e-06, |
|
"loss": 0.2674, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.0553535353535353, |
|
"grad_norm": 6.5625, |
|
"learning_rate": 5.3810286295305715e-06, |
|
"loss": 0.5487, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.056969696969697, |
|
"grad_norm": 7.125, |
|
"learning_rate": 5.366942337793798e-06, |
|
"loss": 0.4007, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.0585858585858585, |
|
"grad_norm": 7.09375, |
|
"learning_rate": 5.352853117270581e-06, |
|
"loss": 0.7315, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.0602020202020201, |
|
"grad_norm": 6.5, |
|
"learning_rate": 5.338761080415425e-06, |
|
"loss": 0.502, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.0618181818181818, |
|
"grad_norm": 7.3125, |
|
"learning_rate": 5.324666339705307e-06, |
|
"loss": 0.4223, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.0634343434343434, |
|
"grad_norm": 7.25, |
|
"learning_rate": 5.310569007638795e-06, |
|
"loss": 0.5229, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.065050505050505, |
|
"grad_norm": 7.15625, |
|
"learning_rate": 5.296469196735134e-06, |
|
"loss": 0.7768, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.0666666666666667, |
|
"grad_norm": 5.6875, |
|
"learning_rate": 5.28236701953335e-06, |
|
"loss": 0.4436, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.0682828282828283, |
|
"grad_norm": 6.0, |
|
"learning_rate": 5.268262588591364e-06, |
|
"loss": 0.5257, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.06989898989899, |
|
"grad_norm": 7.96875, |
|
"learning_rate": 5.254156016485082e-06, |
|
"loss": 0.571, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.0715151515151515, |
|
"grad_norm": 6.09375, |
|
"learning_rate": 5.240047415807499e-06, |
|
"loss": 0.5387, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.0731313131313132, |
|
"grad_norm": 7.1875, |
|
"learning_rate": 5.225936899167803e-06, |
|
"loss": 0.8149, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.0747474747474748, |
|
"grad_norm": 5.6875, |
|
"learning_rate": 5.211824579190473e-06, |
|
"loss": 0.5172, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.0763636363636364, |
|
"grad_norm": 5.03125, |
|
"learning_rate": 5.197710568514381e-06, |
|
"loss": 0.2866, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.077979797979798, |
|
"grad_norm": 6.5625, |
|
"learning_rate": 5.183594979791896e-06, |
|
"loss": 0.3989, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.0795959595959597, |
|
"grad_norm": 5.09375, |
|
"learning_rate": 5.169477925687981e-06, |
|
"loss": 0.4623, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.0812121212121213, |
|
"grad_norm": 6.28125, |
|
"learning_rate": 5.155359518879295e-06, |
|
"loss": 0.535, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.082828282828283, |
|
"grad_norm": 6.375, |
|
"learning_rate": 5.141239872053294e-06, |
|
"loss": 0.4243, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.0844444444444445, |
|
"grad_norm": 5.96875, |
|
"learning_rate": 5.1271190979073296e-06, |
|
"loss": 0.4805, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.086060606060606, |
|
"grad_norm": 5.375, |
|
"learning_rate": 5.112997309147753e-06, |
|
"loss": 0.3017, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.0876767676767676, |
|
"grad_norm": 7.28125, |
|
"learning_rate": 5.098874618489017e-06, |
|
"loss": 0.3953, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.0892929292929292, |
|
"grad_norm": 6.40625, |
|
"learning_rate": 5.084751138652764e-06, |
|
"loss": 0.4452, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.0909090909090908, |
|
"grad_norm": 6.90625, |
|
"learning_rate": 5.070626982366943e-06, |
|
"loss": 0.6184, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.0925252525252525, |
|
"grad_norm": 7.3125, |
|
"learning_rate": 5.0565022623649e-06, |
|
"loss": 0.5576, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.094141414141414, |
|
"grad_norm": 6.75, |
|
"learning_rate": 5.0423770913844795e-06, |
|
"loss": 0.4086, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.0957575757575757, |
|
"grad_norm": 9.625, |
|
"learning_rate": 5.028251582167126e-06, |
|
"loss": 0.6037, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.0973737373737373, |
|
"grad_norm": 7.84375, |
|
"learning_rate": 5.0141258474569855e-06, |
|
"loss": 0.6863, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.098989898989899, |
|
"grad_norm": 7.4375, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4384, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.1006060606060606, |
|
"grad_norm": 5.96875, |
|
"learning_rate": 4.985874152543015e-06, |
|
"loss": 0.5086, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.1022222222222222, |
|
"grad_norm": 7.75, |
|
"learning_rate": 4.971748417832876e-06, |
|
"loss": 0.4695, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.1038383838383838, |
|
"grad_norm": 7.78125, |
|
"learning_rate": 4.957622908615522e-06, |
|
"loss": 0.466, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.1054545454545455, |
|
"grad_norm": 5.78125, |
|
"learning_rate": 4.943497737635103e-06, |
|
"loss": 0.5558, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.107070707070707, |
|
"grad_norm": 6.9375, |
|
"learning_rate": 4.92937301763306e-06, |
|
"loss": 0.3521, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.1086868686868687, |
|
"grad_norm": 7.21875, |
|
"learning_rate": 4.915248861347239e-06, |
|
"loss": 0.5391, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.1103030303030303, |
|
"grad_norm": 7.40625, |
|
"learning_rate": 4.901125381510986e-06, |
|
"loss": 0.4364, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.111919191919192, |
|
"grad_norm": 6.34375, |
|
"learning_rate": 4.887002690852249e-06, |
|
"loss": 0.5012, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.1135353535353536, |
|
"grad_norm": 6.59375, |
|
"learning_rate": 4.872880902092673e-06, |
|
"loss": 0.4982, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.1151515151515152, |
|
"grad_norm": 6.84375, |
|
"learning_rate": 4.858760127946707e-06, |
|
"loss": 0.4903, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.1167676767676769, |
|
"grad_norm": 8.0625, |
|
"learning_rate": 4.8446404811207055e-06, |
|
"loss": 0.6549, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.1183838383838385, |
|
"grad_norm": 7.0, |
|
"learning_rate": 4.830522074312019e-06, |
|
"loss": 0.6238, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 6.125, |
|
"learning_rate": 4.816405020208105e-06, |
|
"loss": 0.2768, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.1216161616161617, |
|
"grad_norm": 5.875, |
|
"learning_rate": 4.80228943148562e-06, |
|
"loss": 0.3653, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.1232323232323231, |
|
"grad_norm": 5.9375, |
|
"learning_rate": 4.788175420809528e-06, |
|
"loss": 0.4203, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.1248484848484848, |
|
"grad_norm": 6.8125, |
|
"learning_rate": 4.774063100832199e-06, |
|
"loss": 0.3433, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.1264646464646464, |
|
"grad_norm": 6.65625, |
|
"learning_rate": 4.759952584192502e-06, |
|
"loss": 0.3582, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.128080808080808, |
|
"grad_norm": 5.84375, |
|
"learning_rate": 4.745843983514919e-06, |
|
"loss": 0.3239, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.1296969696969696, |
|
"grad_norm": 7.78125, |
|
"learning_rate": 4.731737411408637e-06, |
|
"loss": 0.6284, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.1313131313131313, |
|
"grad_norm": 5.90625, |
|
"learning_rate": 4.717632980466652e-06, |
|
"loss": 0.4641, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.132929292929293, |
|
"grad_norm": 7.96875, |
|
"learning_rate": 4.703530803264868e-06, |
|
"loss": 0.4764, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.1345454545454545, |
|
"grad_norm": 6.375, |
|
"learning_rate": 4.6894309923612055e-06, |
|
"loss": 0.3617, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.1361616161616162, |
|
"grad_norm": 6.28125, |
|
"learning_rate": 4.675333660294693e-06, |
|
"loss": 0.3734, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.1377777777777778, |
|
"grad_norm": 7.5, |
|
"learning_rate": 4.661238919584578e-06, |
|
"loss": 0.789, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.1393939393939394, |
|
"grad_norm": 9.4375, |
|
"learning_rate": 4.647146882729422e-06, |
|
"loss": 0.8066, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.141010101010101, |
|
"grad_norm": 7.78125, |
|
"learning_rate": 4.633057662206205e-06, |
|
"loss": 0.5443, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.1426262626262627, |
|
"grad_norm": 6.84375, |
|
"learning_rate": 4.618971370469431e-06, |
|
"loss": 0.5255, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.1442424242424243, |
|
"grad_norm": 7.65625, |
|
"learning_rate": 4.6048881199502265e-06, |
|
"loss": 0.4648, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.145858585858586, |
|
"grad_norm": 7.21875, |
|
"learning_rate": 4.590808023055444e-06, |
|
"loss": 0.4431, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 1.1474747474747475, |
|
"grad_norm": 6.59375, |
|
"learning_rate": 4.576731192166762e-06, |
|
"loss": 0.4793, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.1490909090909092, |
|
"grad_norm": 6.46875, |
|
"learning_rate": 4.562657739639798e-06, |
|
"loss": 0.467, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 1.1507070707070708, |
|
"grad_norm": 5.96875, |
|
"learning_rate": 4.548587777803198e-06, |
|
"loss": 0.6655, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 1.1523232323232324, |
|
"grad_norm": 7.125, |
|
"learning_rate": 4.534521418957753e-06, |
|
"loss": 0.4943, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 1.1539393939393938, |
|
"grad_norm": 8.8125, |
|
"learning_rate": 4.520458775375488e-06, |
|
"loss": 0.4532, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.1555555555555554, |
|
"grad_norm": 5.90625, |
|
"learning_rate": 4.506399959298781e-06, |
|
"loss": 0.7173, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.157171717171717, |
|
"grad_norm": 6.90625, |
|
"learning_rate": 4.49234508293946e-06, |
|
"loss": 0.4683, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 1.1587878787878787, |
|
"grad_norm": 5.84375, |
|
"learning_rate": 4.4782942584779046e-06, |
|
"loss": 0.3148, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 1.1604040404040403, |
|
"grad_norm": 6.6875, |
|
"learning_rate": 4.464247598062156e-06, |
|
"loss": 0.4546, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 1.162020202020202, |
|
"grad_norm": 7.0, |
|
"learning_rate": 4.450205213807018e-06, |
|
"loss": 0.4984, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 1.1636363636363636, |
|
"grad_norm": 5.46875, |
|
"learning_rate": 4.436167217793167e-06, |
|
"loss": 0.445, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.1652525252525252, |
|
"grad_norm": 6.53125, |
|
"learning_rate": 4.4221337220662495e-06, |
|
"loss": 0.5306, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 1.1668686868686868, |
|
"grad_norm": 8.6875, |
|
"learning_rate": 4.408104838635996e-06, |
|
"loss": 0.974, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 1.1684848484848485, |
|
"grad_norm": 5.5625, |
|
"learning_rate": 4.394080679475324e-06, |
|
"loss": 0.3139, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 1.17010101010101, |
|
"grad_norm": 6.0625, |
|
"learning_rate": 4.38006135651944e-06, |
|
"loss": 0.5718, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 1.1717171717171717, |
|
"grad_norm": 6.40625, |
|
"learning_rate": 4.366046981664954e-06, |
|
"loss": 0.6332, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.1733333333333333, |
|
"grad_norm": 7.15625, |
|
"learning_rate": 4.3520376667689815e-06, |
|
"loss": 0.7909, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 1.174949494949495, |
|
"grad_norm": 6.40625, |
|
"learning_rate": 4.338033523648247e-06, |
|
"loss": 0.4901, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 1.1765656565656566, |
|
"grad_norm": 6.375, |
|
"learning_rate": 4.3240346640782014e-06, |
|
"loss": 0.4123, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 1.1781818181818182, |
|
"grad_norm": 7.625, |
|
"learning_rate": 4.3100411997921235e-06, |
|
"loss": 0.6332, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 1.1797979797979798, |
|
"grad_norm": 5.71875, |
|
"learning_rate": 4.2960532424802235e-06, |
|
"loss": 0.4957, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.1814141414141415, |
|
"grad_norm": 7.59375, |
|
"learning_rate": 4.282070903788767e-06, |
|
"loss": 0.5822, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 1.183030303030303, |
|
"grad_norm": 9.6875, |
|
"learning_rate": 4.268094295319167e-06, |
|
"loss": 0.549, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 1.1846464646464647, |
|
"grad_norm": 6.6875, |
|
"learning_rate": 4.2541235286271e-06, |
|
"loss": 0.5147, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 1.1862626262626264, |
|
"grad_norm": 7.46875, |
|
"learning_rate": 4.240158715221621e-06, |
|
"loss": 0.6269, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 1.187878787878788, |
|
"grad_norm": 6.09375, |
|
"learning_rate": 4.2261999665642615e-06, |
|
"loss": 0.3747, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.1894949494949496, |
|
"grad_norm": 6.25, |
|
"learning_rate": 4.212247394068151e-06, |
|
"loss": 0.5491, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 1.1911111111111112, |
|
"grad_norm": 5.3125, |
|
"learning_rate": 4.198301109097124e-06, |
|
"loss": 0.4715, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 1.1927272727272726, |
|
"grad_norm": 14.9375, |
|
"learning_rate": 4.184361222964827e-06, |
|
"loss": 0.5872, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 1.1927272727272726, |
|
"eval_loss": 0.676762580871582, |
|
"eval_runtime": 35.4724, |
|
"eval_samples_per_second": 31.01, |
|
"eval_steps_per_second": 3.89, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 1.1943434343434343, |
|
"grad_norm": 6.03125, |
|
"learning_rate": 4.170427846933835e-06, |
|
"loss": 0.4474, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 1.195959595959596, |
|
"grad_norm": 5.78125, |
|
"learning_rate": 4.1565010922147644e-06, |
|
"loss": 0.2681, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.1975757575757575, |
|
"grad_norm": 7.5, |
|
"learning_rate": 4.142581069965379e-06, |
|
"loss": 0.4707, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 1.1991919191919191, |
|
"grad_norm": 6.65625, |
|
"learning_rate": 4.1286678912897095e-06, |
|
"loss": 0.5952, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 1.2008080808080808, |
|
"grad_norm": 6.53125, |
|
"learning_rate": 4.114761667237164e-06, |
|
"loss": 0.4401, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 1.2024242424242424, |
|
"grad_norm": 6.875, |
|
"learning_rate": 4.100862508801639e-06, |
|
"loss": 0.4933, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 1.204040404040404, |
|
"grad_norm": 6.09375, |
|
"learning_rate": 4.086970526920638e-06, |
|
"loss": 0.4491, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.2056565656565656, |
|
"grad_norm": 6.1875, |
|
"learning_rate": 4.0730858324743845e-06, |
|
"loss": 0.2304, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 1.2072727272727273, |
|
"grad_norm": 6.96875, |
|
"learning_rate": 4.059208536284936e-06, |
|
"loss": 0.492, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 1.208888888888889, |
|
"grad_norm": 7.125, |
|
"learning_rate": 4.045338749115299e-06, |
|
"loss": 0.4473, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 1.2105050505050505, |
|
"grad_norm": 7.25, |
|
"learning_rate": 4.031476581668549e-06, |
|
"loss": 0.5541, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 1.2121212121212122, |
|
"grad_norm": 7.3125, |
|
"learning_rate": 4.017622144586938e-06, |
|
"loss": 0.5086, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.2137373737373738, |
|
"grad_norm": 10.4375, |
|
"learning_rate": 4.003775548451028e-06, |
|
"loss": 0.4265, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 1.2153535353535354, |
|
"grad_norm": 5.90625, |
|
"learning_rate": 3.989936903778785e-06, |
|
"loss": 0.5193, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 1.216969696969697, |
|
"grad_norm": 6.84375, |
|
"learning_rate": 3.97610632102472e-06, |
|
"loss": 0.4394, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 1.2185858585858587, |
|
"grad_norm": 7.03125, |
|
"learning_rate": 3.962283910578991e-06, |
|
"loss": 0.7359, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 1.2202020202020203, |
|
"grad_norm": 6.34375, |
|
"learning_rate": 3.948469782766528e-06, |
|
"loss": 0.4886, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.221818181818182, |
|
"grad_norm": 7.1875, |
|
"learning_rate": 3.934664047846157e-06, |
|
"loss": 0.4353, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 1.2234343434343433, |
|
"grad_norm": 6.9375, |
|
"learning_rate": 3.9208668160097095e-06, |
|
"loss": 0.5272, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 1.225050505050505, |
|
"grad_norm": 6.03125, |
|
"learning_rate": 3.907078197381153e-06, |
|
"loss": 0.4523, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 1.2266666666666666, |
|
"grad_norm": 10.625, |
|
"learning_rate": 3.893298302015705e-06, |
|
"loss": 0.6854, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 1.2282828282828282, |
|
"grad_norm": 6.75, |
|
"learning_rate": 3.879527239898962e-06, |
|
"loss": 0.3579, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.2298989898989898, |
|
"grad_norm": 5.3125, |
|
"learning_rate": 3.8657651209460104e-06, |
|
"loss": 0.2616, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 1.2315151515151515, |
|
"grad_norm": 7.0625, |
|
"learning_rate": 3.85201205500056e-06, |
|
"loss": 0.5633, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 1.233131313131313, |
|
"grad_norm": 6.03125, |
|
"learning_rate": 3.838268151834065e-06, |
|
"loss": 0.3226, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 1.2347474747474747, |
|
"grad_norm": 13.3125, |
|
"learning_rate": 3.8245335211448404e-06, |
|
"loss": 0.4658, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 1.2363636363636363, |
|
"grad_norm": 6.125, |
|
"learning_rate": 3.810808272557196e-06, |
|
"loss": 0.493, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.237979797979798, |
|
"grad_norm": 6.46875, |
|
"learning_rate": 3.797092515620557e-06, |
|
"loss": 0.4253, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 1.2395959595959596, |
|
"grad_norm": 7.28125, |
|
"learning_rate": 3.7833863598085897e-06, |
|
"loss": 0.4679, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 1.2412121212121212, |
|
"grad_norm": 8.5, |
|
"learning_rate": 3.769689914518326e-06, |
|
"loss": 0.7057, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 1.2428282828282828, |
|
"grad_norm": 8.25, |
|
"learning_rate": 3.7560032890692967e-06, |
|
"loss": 0.4716, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 1.2444444444444445, |
|
"grad_norm": 7.9375, |
|
"learning_rate": 3.7423265927026473e-06, |
|
"loss": 0.9959, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.246060606060606, |
|
"grad_norm": 5.65625, |
|
"learning_rate": 3.728659934580286e-06, |
|
"loss": 0.3721, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 1.2476767676767677, |
|
"grad_norm": 6.6875, |
|
"learning_rate": 3.715003423783986e-06, |
|
"loss": 0.454, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 1.2492929292929293, |
|
"grad_norm": 11.25, |
|
"learning_rate": 3.701357169314536e-06, |
|
"loss": 0.7889, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 1.250909090909091, |
|
"grad_norm": 6.5, |
|
"learning_rate": 3.6877212800908625e-06, |
|
"loss": 0.8363, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 1.2525252525252526, |
|
"grad_norm": 7.59375, |
|
"learning_rate": 3.6740958649491575e-06, |
|
"loss": 0.8208, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.2541414141414142, |
|
"grad_norm": 6.4375, |
|
"learning_rate": 3.660481032642016e-06, |
|
"loss": 0.5369, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 1.2557575757575759, |
|
"grad_norm": 6.71875, |
|
"learning_rate": 3.646876891837566e-06, |
|
"loss": 0.3812, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 1.2573737373737375, |
|
"grad_norm": 5.96875, |
|
"learning_rate": 3.633283551118595e-06, |
|
"loss": 0.401, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 1.258989898989899, |
|
"grad_norm": 6.3125, |
|
"learning_rate": 3.6197011189816945e-06, |
|
"loss": 0.5256, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 1.2606060606060607, |
|
"grad_norm": 5.5, |
|
"learning_rate": 3.6061297038363853e-06, |
|
"loss": 0.3921, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.2622222222222224, |
|
"grad_norm": 7.28125, |
|
"learning_rate": 3.5925694140042546e-06, |
|
"loss": 0.5519, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 1.2638383838383838, |
|
"grad_norm": 6.03125, |
|
"learning_rate": 3.579020357718092e-06, |
|
"loss": 0.6655, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 1.2654545454545454, |
|
"grad_norm": 6.0625, |
|
"learning_rate": 3.5654826431210273e-06, |
|
"loss": 0.7744, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 1.267070707070707, |
|
"grad_norm": 5.5625, |
|
"learning_rate": 3.5519563782656642e-06, |
|
"loss": 0.4487, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 1.2686868686868686, |
|
"grad_norm": 5.625, |
|
"learning_rate": 3.5384416711132173e-06, |
|
"loss": 0.449, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.2703030303030303, |
|
"grad_norm": 7.5, |
|
"learning_rate": 3.524938629532656e-06, |
|
"loss": 0.5011, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 1.271919191919192, |
|
"grad_norm": 7.0625, |
|
"learning_rate": 3.5114473612998367e-06, |
|
"loss": 0.5925, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 1.2735353535353535, |
|
"grad_norm": 7.46875, |
|
"learning_rate": 3.497967974096647e-06, |
|
"loss": 0.4433, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.2751515151515151, |
|
"grad_norm": 6.6875, |
|
"learning_rate": 3.4845005755101446e-06, |
|
"loss": 0.4608, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 1.2767676767676768, |
|
"grad_norm": 7.28125, |
|
"learning_rate": 3.4710452730316978e-06, |
|
"loss": 0.5457, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.2783838383838384, |
|
"grad_norm": 6.65625, |
|
"learning_rate": 3.457602174056134e-06, |
|
"loss": 0.5484, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 5.90625, |
|
"learning_rate": 3.4441713858808684e-06, |
|
"loss": 0.4372, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 1.2816161616161617, |
|
"grad_norm": 5.21875, |
|
"learning_rate": 3.4307530157050624e-06, |
|
"loss": 0.4388, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 1.2832323232323233, |
|
"grad_norm": 6.53125, |
|
"learning_rate": 3.4173471706287607e-06, |
|
"loss": 0.7221, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 1.284848484848485, |
|
"grad_norm": 6.0625, |
|
"learning_rate": 3.4039539576520376e-06, |
|
"loss": 0.4252, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.2864646464646465, |
|
"grad_norm": 6.125, |
|
"learning_rate": 3.3905734836741415e-06, |
|
"loss": 0.5235, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 1.2880808080808082, |
|
"grad_norm": 7.0, |
|
"learning_rate": 3.3772058554926456e-06, |
|
"loss": 0.5727, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 1.2896969696969696, |
|
"grad_norm": 5.0, |
|
"learning_rate": 3.3638511798025927e-06, |
|
"loss": 0.4155, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.2913131313131312, |
|
"grad_norm": 7.8125, |
|
"learning_rate": 3.350509563195641e-06, |
|
"loss": 0.4508, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 1.2929292929292928, |
|
"grad_norm": 7.15625, |
|
"learning_rate": 3.3371811121592203e-06, |
|
"loss": 0.5727, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.2945454545454544, |
|
"grad_norm": 5.75, |
|
"learning_rate": 3.3238659330756774e-06, |
|
"loss": 0.4035, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 1.296161616161616, |
|
"grad_norm": 8.0, |
|
"learning_rate": 3.3105641322214238e-06, |
|
"loss": 0.7284, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 1.2977777777777777, |
|
"grad_norm": 5.75, |
|
"learning_rate": 3.2972758157660966e-06, |
|
"loss": 0.3584, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 1.2993939393939393, |
|
"grad_norm": 5.125, |
|
"learning_rate": 3.2840010897717045e-06, |
|
"loss": 0.4011, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 1.301010101010101, |
|
"grad_norm": 5.9375, |
|
"learning_rate": 3.270740060191779e-06, |
|
"loss": 0.4707, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.3026262626262626, |
|
"grad_norm": 5.90625, |
|
"learning_rate": 3.257492832870537e-06, |
|
"loss": 0.5322, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 1.3042424242424242, |
|
"grad_norm": 7.375, |
|
"learning_rate": 3.2442595135420287e-06, |
|
"loss": 0.5998, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 1.3058585858585858, |
|
"grad_norm": 6.03125, |
|
"learning_rate": 3.2310402078292956e-06, |
|
"loss": 0.4243, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 1.3074747474747475, |
|
"grad_norm": 6.09375, |
|
"learning_rate": 3.2178350212435315e-06, |
|
"loss": 0.3165, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 1.309090909090909, |
|
"grad_norm": 5.71875, |
|
"learning_rate": 3.2046440591832323e-06, |
|
"loss": 0.5172, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.3107070707070707, |
|
"grad_norm": 6.40625, |
|
"learning_rate": 3.191467426933365e-06, |
|
"loss": 0.5222, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 1.3123232323232323, |
|
"grad_norm": 6.8125, |
|
"learning_rate": 3.178305229664519e-06, |
|
"loss": 0.4126, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 1.313939393939394, |
|
"grad_norm": 5.0625, |
|
"learning_rate": 3.1651575724320665e-06, |
|
"loss": 0.2886, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 1.3155555555555556, |
|
"grad_norm": 6.3125, |
|
"learning_rate": 3.152024560175332e-06, |
|
"loss": 0.3833, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 1.3171717171717172, |
|
"grad_norm": 6.59375, |
|
"learning_rate": 3.1389062977167474e-06, |
|
"loss": 0.3823, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.3187878787878788, |
|
"grad_norm": 7.5, |
|
"learning_rate": 3.125802889761016e-06, |
|
"loss": 0.5198, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 1.3204040404040405, |
|
"grad_norm": 7.03125, |
|
"learning_rate": 3.112714440894281e-06, |
|
"loss": 0.3382, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 1.322020202020202, |
|
"grad_norm": 8.125, |
|
"learning_rate": 3.099641055583289e-06, |
|
"loss": 0.6866, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 1.3236363636363637, |
|
"grad_norm": 6.5, |
|
"learning_rate": 3.0865828381745515e-06, |
|
"loss": 0.3631, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 1.3252525252525253, |
|
"grad_norm": 9.375, |
|
"learning_rate": 3.073539892893519e-06, |
|
"loss": 0.5295, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.326868686868687, |
|
"grad_norm": 7.34375, |
|
"learning_rate": 3.0605123238437457e-06, |
|
"loss": 0.4532, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 1.3284848484848486, |
|
"grad_norm": 6.90625, |
|
"learning_rate": 3.047500235006056e-06, |
|
"loss": 0.4369, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 1.3301010101010102, |
|
"grad_norm": 6.625, |
|
"learning_rate": 3.0345037302377216e-06, |
|
"loss": 0.3557, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 1.3317171717171719, |
|
"grad_norm": 7.84375, |
|
"learning_rate": 3.021522913271627e-06, |
|
"loss": 0.7774, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 1.3333333333333333, |
|
"grad_norm": 6.375, |
|
"learning_rate": 3.008557887715441e-06, |
|
"loss": 0.5469, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.3349494949494949, |
|
"grad_norm": 6.84375, |
|
"learning_rate": 2.995608757050793e-06, |
|
"loss": 0.5189, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 1.3365656565656565, |
|
"grad_norm": 6.84375, |
|
"learning_rate": 2.9826756246324477e-06, |
|
"loss": 0.4486, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 1.3381818181818181, |
|
"grad_norm": 7.1875, |
|
"learning_rate": 2.969758593687475e-06, |
|
"loss": 0.9107, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 1.3397979797979798, |
|
"grad_norm": 11.375, |
|
"learning_rate": 2.95685776731443e-06, |
|
"loss": 0.9758, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 1.3414141414141414, |
|
"grad_norm": 8.625, |
|
"learning_rate": 2.9439732484825323e-06, |
|
"loss": 0.3861, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.343030303030303, |
|
"grad_norm": 7.4375, |
|
"learning_rate": 2.931105140030838e-06, |
|
"loss": 0.3662, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 1.3446464646464646, |
|
"grad_norm": 6.75, |
|
"learning_rate": 2.9182535446674244e-06, |
|
"loss": 0.4409, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 1.3462626262626263, |
|
"grad_norm": 9.4375, |
|
"learning_rate": 2.905418564968565e-06, |
|
"loss": 0.4852, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 1.347878787878788, |
|
"grad_norm": 7.03125, |
|
"learning_rate": 2.8926003033779194e-06, |
|
"loss": 0.6186, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 1.3494949494949495, |
|
"grad_norm": 13.4375, |
|
"learning_rate": 2.8797988622057005e-06, |
|
"loss": 0.3766, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.3511111111111112, |
|
"grad_norm": 11.1875, |
|
"learning_rate": 2.8670143436278757e-06, |
|
"loss": 0.5832, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 1.3527272727272728, |
|
"grad_norm": 13.625, |
|
"learning_rate": 2.8542468496853404e-06, |
|
"loss": 0.3591, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 1.3543434343434344, |
|
"grad_norm": 8.625, |
|
"learning_rate": 2.8414964822831063e-06, |
|
"loss": 0.7896, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 1.355959595959596, |
|
"grad_norm": 9.4375, |
|
"learning_rate": 2.8287633431894877e-06, |
|
"loss": 0.8662, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 1.3575757575757577, |
|
"grad_norm": 8.75, |
|
"learning_rate": 2.8160475340352913e-06, |
|
"loss": 0.4789, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.359191919191919, |
|
"grad_norm": 6.78125, |
|
"learning_rate": 2.8033491563129977e-06, |
|
"loss": 0.5332, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 1.3608080808080807, |
|
"grad_norm": 6.78125, |
|
"learning_rate": 2.790668311375962e-06, |
|
"loss": 0.5736, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 1.3624242424242423, |
|
"grad_norm": 6.6875, |
|
"learning_rate": 2.7780051004375986e-06, |
|
"loss": 0.3348, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 1.364040404040404, |
|
"grad_norm": 9.3125, |
|
"learning_rate": 2.765359624570574e-06, |
|
"loss": 0.5082, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 1.3656565656565656, |
|
"grad_norm": 11.125, |
|
"learning_rate": 2.752731984706e-06, |
|
"loss": 0.4075, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.3672727272727272, |
|
"grad_norm": 8.5, |
|
"learning_rate": 2.7401222816326316e-06, |
|
"loss": 0.5683, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 1.3688888888888888, |
|
"grad_norm": 5.34375, |
|
"learning_rate": 2.7275306159960522e-06, |
|
"loss": 0.2826, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 1.3705050505050504, |
|
"grad_norm": 6.28125, |
|
"learning_rate": 2.714957088297886e-06, |
|
"loss": 0.3661, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 1.372121212121212, |
|
"grad_norm": 6.40625, |
|
"learning_rate": 2.7024017988949802e-06, |
|
"loss": 0.9216, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 1.3737373737373737, |
|
"grad_norm": 7.5, |
|
"learning_rate": 2.6898648479986187e-06, |
|
"loss": 0.5641, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.3753535353535353, |
|
"grad_norm": 7.15625, |
|
"learning_rate": 2.677346335673713e-06, |
|
"loss": 0.4403, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 1.376969696969697, |
|
"grad_norm": 5.59375, |
|
"learning_rate": 2.664846361837997e-06, |
|
"loss": 0.3652, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 1.3785858585858586, |
|
"grad_norm": 8.9375, |
|
"learning_rate": 2.6523650262612476e-06, |
|
"loss": 0.4473, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 1.3802020202020202, |
|
"grad_norm": 8.25, |
|
"learning_rate": 2.639902428564475e-06, |
|
"loss": 0.6482, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 1.3818181818181818, |
|
"grad_norm": 6.5625, |
|
"learning_rate": 2.6274586682191306e-06, |
|
"loss": 0.3526, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.3834343434343435, |
|
"grad_norm": 6.59375, |
|
"learning_rate": 2.6150338445463146e-06, |
|
"loss": 0.4567, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 1.385050505050505, |
|
"grad_norm": 5.5625, |
|
"learning_rate": 2.6026280567159847e-06, |
|
"loss": 0.4546, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 1.3866666666666667, |
|
"grad_norm": 6.375, |
|
"learning_rate": 2.5902414037461544e-06, |
|
"loss": 0.355, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 1.3882828282828283, |
|
"grad_norm": 9.0, |
|
"learning_rate": 2.5778739845021177e-06, |
|
"loss": 0.6155, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 1.38989898989899, |
|
"grad_norm": 6.71875, |
|
"learning_rate": 2.565525897695651e-06, |
|
"loss": 0.5631, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.3915151515151516, |
|
"grad_norm": 7.0, |
|
"learning_rate": 2.5531972418842266e-06, |
|
"loss": 0.4242, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 1.3915151515151516, |
|
"eval_loss": 0.6649448275566101, |
|
"eval_runtime": 35.1088, |
|
"eval_samples_per_second": 31.331, |
|
"eval_steps_per_second": 3.931, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 1.3931313131313132, |
|
"grad_norm": 7.125, |
|
"learning_rate": 2.540888115470226e-06, |
|
"loss": 0.6219, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 1.3947474747474748, |
|
"grad_norm": 9.5, |
|
"learning_rate": 2.5285986167001565e-06, |
|
"loss": 0.3452, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 1.3963636363636365, |
|
"grad_norm": 7.78125, |
|
"learning_rate": 2.51632884366386e-06, |
|
"loss": 0.5206, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 1.397979797979798, |
|
"grad_norm": 5.40625, |
|
"learning_rate": 2.5040788942937423e-06, |
|
"loss": 0.3927, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.3995959595959597, |
|
"grad_norm": 5.8125, |
|
"learning_rate": 2.4918488663639824e-06, |
|
"loss": 0.3102, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 1.4012121212121211, |
|
"grad_norm": 7.21875, |
|
"learning_rate": 2.479638857489753e-06, |
|
"loss": 0.4863, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 1.4028282828282828, |
|
"grad_norm": 7.0625, |
|
"learning_rate": 2.4674489651264433e-06, |
|
"loss": 0.415, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 1.4044444444444444, |
|
"grad_norm": 7.5, |
|
"learning_rate": 2.4552792865688814e-06, |
|
"loss": 0.4191, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 1.406060606060606, |
|
"grad_norm": 6.21875, |
|
"learning_rate": 2.4431299189505563e-06, |
|
"loss": 0.5285, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.4076767676767676, |
|
"grad_norm": 6.875, |
|
"learning_rate": 2.4310009592428442e-06, |
|
"loss": 0.569, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 1.4092929292929293, |
|
"grad_norm": 6.4375, |
|
"learning_rate": 2.418892504254231e-06, |
|
"loss": 0.5699, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 1.410909090909091, |
|
"grad_norm": 8.1875, |
|
"learning_rate": 2.4068046506295455e-06, |
|
"loss": 0.5794, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 1.4125252525252525, |
|
"grad_norm": 7.65625, |
|
"learning_rate": 2.394737494849184e-06, |
|
"loss": 0.4674, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 1.4141414141414141, |
|
"grad_norm": 9.375, |
|
"learning_rate": 2.3826911332283364e-06, |
|
"loss": 0.619, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.4157575757575758, |
|
"grad_norm": 10.25, |
|
"learning_rate": 2.3706656619162278e-06, |
|
"loss": 0.7549, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 1.4173737373737374, |
|
"grad_norm": 5.5625, |
|
"learning_rate": 2.358661176895342e-06, |
|
"loss": 0.3443, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 1.418989898989899, |
|
"grad_norm": 6.96875, |
|
"learning_rate": 2.34667777398066e-06, |
|
"loss": 0.6654, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 1.4206060606060606, |
|
"grad_norm": 6.875, |
|
"learning_rate": 2.3347155488188944e-06, |
|
"loss": 0.6335, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 1.4222222222222223, |
|
"grad_norm": 6.90625, |
|
"learning_rate": 2.322774596887726e-06, |
|
"loss": 0.4496, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.423838383838384, |
|
"grad_norm": 5.3125, |
|
"learning_rate": 2.3108550134950352e-06, |
|
"loss": 0.3563, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 1.4254545454545455, |
|
"grad_norm": 5.59375, |
|
"learning_rate": 2.298956893778154e-06, |
|
"loss": 0.4681, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 1.427070707070707, |
|
"grad_norm": 8.0, |
|
"learning_rate": 2.2870803327030995e-06, |
|
"loss": 0.6678, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 1.4286868686868686, |
|
"grad_norm": 5.34375, |
|
"learning_rate": 2.275225425063813e-06, |
|
"loss": 0.3222, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 1.4303030303030302, |
|
"grad_norm": 7.90625, |
|
"learning_rate": 2.2633922654814082e-06, |
|
"loss": 0.5111, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.4319191919191918, |
|
"grad_norm": 5.8125, |
|
"learning_rate": 2.251580948403416e-06, |
|
"loss": 0.2793, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 1.4335353535353534, |
|
"grad_norm": 6.5, |
|
"learning_rate": 2.23979156810303e-06, |
|
"loss": 0.3486, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 1.435151515151515, |
|
"grad_norm": 5.6875, |
|
"learning_rate": 2.2280242186783473e-06, |
|
"loss": 0.4422, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 1.4367676767676767, |
|
"grad_norm": 7.0, |
|
"learning_rate": 2.2162789940516268e-06, |
|
"loss": 0.4664, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 1.4383838383838383, |
|
"grad_norm": 7.8125, |
|
"learning_rate": 2.204555987968541e-06, |
|
"loss": 0.408, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 6.1875, |
|
"learning_rate": 2.192855293997418e-06, |
|
"loss": 0.3908, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 1.4416161616161616, |
|
"grad_norm": 6.71875, |
|
"learning_rate": 2.1811770055284968e-06, |
|
"loss": 0.6109, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 1.4432323232323232, |
|
"grad_norm": 5.9375, |
|
"learning_rate": 2.1695212157731904e-06, |
|
"loss": 0.3194, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 1.4448484848484848, |
|
"grad_norm": 7.3125, |
|
"learning_rate": 2.157888017763333e-06, |
|
"loss": 0.5721, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 1.4464646464646465, |
|
"grad_norm": 6.4375, |
|
"learning_rate": 2.1462775043504423e-06, |
|
"loss": 0.4683, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.448080808080808, |
|
"grad_norm": 7.90625, |
|
"learning_rate": 2.134689768204975e-06, |
|
"loss": 0.3507, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 1.4496969696969697, |
|
"grad_norm": 6.1875, |
|
"learning_rate": 2.12312490181559e-06, |
|
"loss": 0.5323, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 1.4513131313131313, |
|
"grad_norm": 6.15625, |
|
"learning_rate": 2.1115829974884097e-06, |
|
"loss": 0.4394, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 1.452929292929293, |
|
"grad_norm": 6.0, |
|
"learning_rate": 2.1000641473462773e-06, |
|
"loss": 0.4603, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 1.4545454545454546, |
|
"grad_norm": 6.34375, |
|
"learning_rate": 2.0885684433280336e-06, |
|
"loss": 0.4744, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.4561616161616162, |
|
"grad_norm": 7.375, |
|
"learning_rate": 2.077095977187773e-06, |
|
"loss": 0.4406, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 1.4577777777777778, |
|
"grad_norm": 7.03125, |
|
"learning_rate": 2.065646840494115e-06, |
|
"loss": 0.3625, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 1.4593939393939395, |
|
"grad_norm": 5.46875, |
|
"learning_rate": 2.054221124629473e-06, |
|
"loss": 0.3098, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 1.461010101010101, |
|
"grad_norm": 5.4375, |
|
"learning_rate": 2.042818920789326e-06, |
|
"loss": 0.3083, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 1.4626262626262627, |
|
"grad_norm": 6.4375, |
|
"learning_rate": 2.0314403199814846e-06, |
|
"loss": 0.614, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 1.4642424242424243, |
|
"grad_norm": 5.65625, |
|
"learning_rate": 2.020085413025375e-06, |
|
"loss": 0.3322, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 1.465858585858586, |
|
"grad_norm": 6.21875, |
|
"learning_rate": 2.008754290551306e-06, |
|
"loss": 0.4278, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 1.4674747474747476, |
|
"grad_norm": 7.25, |
|
"learning_rate": 1.9974470429997482e-06, |
|
"loss": 0.6041, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 1.4690909090909092, |
|
"grad_norm": 6.375, |
|
"learning_rate": 1.9861637606206134e-06, |
|
"loss": 0.593, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 1.4707070707070706, |
|
"grad_norm": 8.25, |
|
"learning_rate": 1.974904533472532e-06, |
|
"loss": 0.4545, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.4723232323232323, |
|
"grad_norm": 11.125, |
|
"learning_rate": 1.9636694514221356e-06, |
|
"loss": 0.6286, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 1.4739393939393939, |
|
"grad_norm": 5.8125, |
|
"learning_rate": 1.9524586041433393e-06, |
|
"loss": 0.3233, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 1.4755555555555555, |
|
"grad_norm": 5.1875, |
|
"learning_rate": 1.9412720811166248e-06, |
|
"loss": 0.263, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 1.4771717171717171, |
|
"grad_norm": 7.03125, |
|
"learning_rate": 1.9301099716283293e-06, |
|
"loss": 0.504, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 1.4787878787878788, |
|
"grad_norm": 6.46875, |
|
"learning_rate": 1.918972364769932e-06, |
|
"loss": 0.4073, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 1.4804040404040404, |
|
"grad_norm": 6.8125, |
|
"learning_rate": 1.907859349437336e-06, |
|
"loss": 0.3346, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 1.482020202020202, |
|
"grad_norm": 7.09375, |
|
"learning_rate": 1.896771014330171e-06, |
|
"loss": 0.4134, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 1.4836363636363636, |
|
"grad_norm": 6.28125, |
|
"learning_rate": 1.8857074479510761e-06, |
|
"loss": 0.3783, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 1.4852525252525253, |
|
"grad_norm": 6.65625, |
|
"learning_rate": 1.8746687386049972e-06, |
|
"loss": 0.3182, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 1.486868686868687, |
|
"grad_norm": 7.0, |
|
"learning_rate": 1.8636549743984815e-06, |
|
"loss": 0.431, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.4884848484848485, |
|
"grad_norm": 6.90625, |
|
"learning_rate": 1.8526662432389752e-06, |
|
"loss": 0.3474, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 1.4901010101010101, |
|
"grad_norm": 7.875, |
|
"learning_rate": 1.8417026328341158e-06, |
|
"loss": 0.511, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 1.4917171717171718, |
|
"grad_norm": 7.84375, |
|
"learning_rate": 1.8307642306910423e-06, |
|
"loss": 0.3839, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 1.4933333333333334, |
|
"grad_norm": 7.25, |
|
"learning_rate": 1.8198511241156902e-06, |
|
"loss": 0.3581, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 1.494949494949495, |
|
"grad_norm": 8.375, |
|
"learning_rate": 1.8089634002120953e-06, |
|
"loss": 0.4388, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.4965656565656564, |
|
"grad_norm": 7.375, |
|
"learning_rate": 1.7981011458816988e-06, |
|
"loss": 0.6741, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 1.498181818181818, |
|
"grad_norm": 8.3125, |
|
"learning_rate": 1.7872644478226563e-06, |
|
"loss": 0.3082, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 1.4997979797979797, |
|
"grad_norm": 6.9375, |
|
"learning_rate": 1.776453392529139e-06, |
|
"loss": 0.6083, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 1.5014141414141413, |
|
"grad_norm": 8.25, |
|
"learning_rate": 1.7656680662906495e-06, |
|
"loss": 0.5088, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 1.503030303030303, |
|
"grad_norm": 7.65625, |
|
"learning_rate": 1.7549085551913358e-06, |
|
"loss": 0.5867, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.5046464646464646, |
|
"grad_norm": 7.15625, |
|
"learning_rate": 1.744174945109296e-06, |
|
"loss": 0.4078, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 1.5062626262626262, |
|
"grad_norm": 6.59375, |
|
"learning_rate": 1.7334673217158976e-06, |
|
"loss": 0.7761, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 1.5078787878787878, |
|
"grad_norm": 10.8125, |
|
"learning_rate": 1.7227857704750888e-06, |
|
"loss": 0.3331, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 1.5094949494949494, |
|
"grad_norm": 6.84375, |
|
"learning_rate": 1.7121303766427266e-06, |
|
"loss": 0.3509, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 1.511111111111111, |
|
"grad_norm": 5.25, |
|
"learning_rate": 1.7015012252658864e-06, |
|
"loss": 0.2569, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 1.5127272727272727, |
|
"grad_norm": 8.9375, |
|
"learning_rate": 1.6908984011821883e-06, |
|
"loss": 0.3199, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 1.5143434343434343, |
|
"grad_norm": 28.5, |
|
"learning_rate": 1.6803219890191174e-06, |
|
"loss": 0.8267, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 1.515959595959596, |
|
"grad_norm": 9.25, |
|
"learning_rate": 1.669772073193352e-06, |
|
"loss": 0.5876, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 1.5175757575757576, |
|
"grad_norm": 6.6875, |
|
"learning_rate": 1.6592487379100819e-06, |
|
"loss": 0.4238, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 1.5191919191919192, |
|
"grad_norm": 8.125, |
|
"learning_rate": 1.6487520671623469e-06, |
|
"loss": 0.349, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.5208080808080808, |
|
"grad_norm": 7.75, |
|
"learning_rate": 1.6382821447303593e-06, |
|
"loss": 0.4342, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 1.5224242424242425, |
|
"grad_norm": 6.625, |
|
"learning_rate": 1.6278390541808364e-06, |
|
"loss": 0.4918, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 1.524040404040404, |
|
"grad_norm": 6.84375, |
|
"learning_rate": 1.6174228788663366e-06, |
|
"loss": 0.4434, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 1.5256565656565657, |
|
"grad_norm": 8.25, |
|
"learning_rate": 1.6070337019245896e-06, |
|
"loss": 0.381, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 1.5272727272727273, |
|
"grad_norm": 7.125, |
|
"learning_rate": 1.5966716062778336e-06, |
|
"loss": 0.6952, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 1.528888888888889, |
|
"grad_norm": 6.28125, |
|
"learning_rate": 1.5863366746321578e-06, |
|
"loss": 0.5478, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 1.5305050505050506, |
|
"grad_norm": 6.125, |
|
"learning_rate": 1.5760289894768377e-06, |
|
"loss": 0.329, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 1.5321212121212122, |
|
"grad_norm": 8.0, |
|
"learning_rate": 1.5657486330836786e-06, |
|
"loss": 0.5797, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 1.5337373737373738, |
|
"grad_norm": 8.75, |
|
"learning_rate": 1.5554956875063587e-06, |
|
"loss": 0.5311, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 1.5353535353535355, |
|
"grad_norm": 7.25, |
|
"learning_rate": 1.5452702345797738e-06, |
|
"loss": 0.5798, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.536969696969697, |
|
"grad_norm": 6.90625, |
|
"learning_rate": 1.5350723559193837e-06, |
|
"loss": 0.428, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 1.5385858585858587, |
|
"grad_norm": 5.9375, |
|
"learning_rate": 1.5249021329205638e-06, |
|
"loss": 0.4676, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 1.5402020202020203, |
|
"grad_norm": 6.875, |
|
"learning_rate": 1.51475964675795e-06, |
|
"loss": 0.5698, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 1.541818181818182, |
|
"grad_norm": 7.03125, |
|
"learning_rate": 1.5046449783847965e-06, |
|
"loss": 0.5788, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 1.5434343434343434, |
|
"grad_norm": 6.875, |
|
"learning_rate": 1.4945582085323275e-06, |
|
"loss": 0.5295, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 1.545050505050505, |
|
"grad_norm": 5.96875, |
|
"learning_rate": 1.4844994177090871e-06, |
|
"loss": 0.5326, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 1.5466666666666666, |
|
"grad_norm": 6.0, |
|
"learning_rate": 1.4744686862003082e-06, |
|
"loss": 0.4787, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 1.5482828282828283, |
|
"grad_norm": 6.59375, |
|
"learning_rate": 1.4644660940672628e-06, |
|
"loss": 0.561, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 1.5498989898989899, |
|
"grad_norm": 9.0, |
|
"learning_rate": 1.4544917211466264e-06, |
|
"loss": 0.6131, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 1.5515151515151515, |
|
"grad_norm": 6.5, |
|
"learning_rate": 1.4445456470498392e-06, |
|
"loss": 0.4817, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.5531313131313131, |
|
"grad_norm": 5.1875, |
|
"learning_rate": 1.4346279511624728e-06, |
|
"loss": 0.4069, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 1.5547474747474748, |
|
"grad_norm": 6.21875, |
|
"learning_rate": 1.4247387126435957e-06, |
|
"loss": 0.5329, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 1.5563636363636364, |
|
"grad_norm": 6.65625, |
|
"learning_rate": 1.414878010425137e-06, |
|
"loss": 0.4998, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 1.557979797979798, |
|
"grad_norm": 7.03125, |
|
"learning_rate": 1.4050459232112652e-06, |
|
"loss": 0.4898, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 1.5595959595959596, |
|
"grad_norm": 6.71875, |
|
"learning_rate": 1.3952425294777533e-06, |
|
"loss": 0.5784, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 1.561212121212121, |
|
"grad_norm": 7.375, |
|
"learning_rate": 1.3854679074713557e-06, |
|
"loss": 0.546, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 1.5628282828282827, |
|
"grad_norm": 7.5, |
|
"learning_rate": 1.3757221352091805e-06, |
|
"loss": 0.5018, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 1.5644444444444443, |
|
"grad_norm": 7.46875, |
|
"learning_rate": 1.3660052904780707e-06, |
|
"loss": 0.3646, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 1.566060606060606, |
|
"grad_norm": 8.5625, |
|
"learning_rate": 1.3563174508339793e-06, |
|
"loss": 0.6095, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 1.5676767676767676, |
|
"grad_norm": 6.09375, |
|
"learning_rate": 1.3466586936013548e-06, |
|
"loss": 0.4514, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.5692929292929292, |
|
"grad_norm": 6.71875, |
|
"learning_rate": 1.337029095872519e-06, |
|
"loss": 0.6468, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 1.5709090909090908, |
|
"grad_norm": 5.625, |
|
"learning_rate": 1.3274287345070564e-06, |
|
"loss": 0.4297, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 1.5725252525252524, |
|
"grad_norm": 9.5, |
|
"learning_rate": 1.3178576861311988e-06, |
|
"loss": 0.3784, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 1.574141414141414, |
|
"grad_norm": 9.875, |
|
"learning_rate": 1.3083160271372092e-06, |
|
"loss": 0.5492, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 1.5757575757575757, |
|
"grad_norm": 6.90625, |
|
"learning_rate": 1.2988038336827814e-06, |
|
"loss": 0.4873, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 1.5773737373737373, |
|
"grad_norm": 6.09375, |
|
"learning_rate": 1.2893211816904243e-06, |
|
"loss": 0.3761, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 1.578989898989899, |
|
"grad_norm": 7.4375, |
|
"learning_rate": 1.2798681468468605e-06, |
|
"loss": 0.5108, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 1.5806060606060606, |
|
"grad_norm": 5.75, |
|
"learning_rate": 1.2704448046024192e-06, |
|
"loss": 0.2867, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 1.5822222222222222, |
|
"grad_norm": 6.09375, |
|
"learning_rate": 1.261051230170437e-06, |
|
"loss": 0.3189, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 1.5838383838383838, |
|
"grad_norm": 5.9375, |
|
"learning_rate": 1.2516874985266508e-06, |
|
"loss": 0.3282, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.5854545454545454, |
|
"grad_norm": 6.15625, |
|
"learning_rate": 1.2423536844086105e-06, |
|
"loss": 0.6539, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 1.587070707070707, |
|
"grad_norm": 6.1875, |
|
"learning_rate": 1.2330498623150722e-06, |
|
"loss": 0.3337, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 1.5886868686868687, |
|
"grad_norm": 7.25, |
|
"learning_rate": 1.2237761065054106e-06, |
|
"loss": 0.3437, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 1.5903030303030303, |
|
"grad_norm": 6.84375, |
|
"learning_rate": 1.2145324909990202e-06, |
|
"loss": 0.5222, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 1.5903030303030303, |
|
"eval_loss": 0.6608893275260925, |
|
"eval_runtime": 35.0868, |
|
"eval_samples_per_second": 31.351, |
|
"eval_steps_per_second": 3.933, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 1.591919191919192, |
|
"grad_norm": 5.25, |
|
"learning_rate": 1.2053190895747324e-06, |
|
"loss": 0.3712, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 1.5935353535353536, |
|
"grad_norm": 6.78125, |
|
"learning_rate": 1.1961359757702151e-06, |
|
"loss": 0.54, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 1.5951515151515152, |
|
"grad_norm": 8.375, |
|
"learning_rate": 1.1869832228813987e-06, |
|
"loss": 0.5581, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 1.5967676767676768, |
|
"grad_norm": 5.78125, |
|
"learning_rate": 1.1778609039618804e-06, |
|
"loss": 0.4538, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 1.5983838383838385, |
|
"grad_norm": 6.0, |
|
"learning_rate": 1.168769091822351e-06, |
|
"loss": 0.4087, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 6.84375, |
|
"learning_rate": 1.1597078590300054e-06, |
|
"loss": 0.3286, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.6016161616161617, |
|
"grad_norm": 7.75, |
|
"learning_rate": 1.150677277907964e-06, |
|
"loss": 0.3279, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 1.6032323232323233, |
|
"grad_norm": 5.625, |
|
"learning_rate": 1.1416774205347015e-06, |
|
"loss": 0.6002, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 1.604848484848485, |
|
"grad_norm": 6.1875, |
|
"learning_rate": 1.1327083587434662e-06, |
|
"loss": 0.3384, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 1.6064646464646466, |
|
"grad_norm": 6.53125, |
|
"learning_rate": 1.1237701641217097e-06, |
|
"loss": 0.3252, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 1.6080808080808082, |
|
"grad_norm": 7.0625, |
|
"learning_rate": 1.1148629080105128e-06, |
|
"loss": 0.7288, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 1.6096969696969698, |
|
"grad_norm": 6.6875, |
|
"learning_rate": 1.1059866615040205e-06, |
|
"loss": 0.731, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 1.6113131313131315, |
|
"grad_norm": 6.46875, |
|
"learning_rate": 1.0971414954488657e-06, |
|
"loss": 0.4641, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 1.6129292929292929, |
|
"grad_norm": 6.3125, |
|
"learning_rate": 1.0883274804436155e-06, |
|
"loss": 0.5988, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 1.6145454545454545, |
|
"grad_norm": 6.125, |
|
"learning_rate": 1.0795446868381987e-06, |
|
"loss": 0.3712, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 1.6161616161616161, |
|
"grad_norm": 7.40625, |
|
"learning_rate": 1.0707931847333487e-06, |
|
"loss": 0.495, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.6177777777777778, |
|
"grad_norm": 6.375, |
|
"learning_rate": 1.0620730439800424e-06, |
|
"loss": 0.5169, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 1.6193939393939394, |
|
"grad_norm": 6.0, |
|
"learning_rate": 1.053384334178944e-06, |
|
"loss": 0.6019, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 1.621010101010101, |
|
"grad_norm": 6.5, |
|
"learning_rate": 1.0447271246798446e-06, |
|
"loss": 0.5812, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 1.6226262626262626, |
|
"grad_norm": 7.34375, |
|
"learning_rate": 1.036101484581117e-06, |
|
"loss": 0.4106, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 1.6242424242424243, |
|
"grad_norm": 9.6875, |
|
"learning_rate": 1.027507482729157e-06, |
|
"loss": 0.8248, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 1.625858585858586, |
|
"grad_norm": 7.21875, |
|
"learning_rate": 1.0189451877178386e-06, |
|
"loss": 0.4905, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 1.6274747474747475, |
|
"grad_norm": 8.9375, |
|
"learning_rate": 1.0104146678879633e-06, |
|
"loss": 0.4978, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 1.6290909090909091, |
|
"grad_norm": 9.25, |
|
"learning_rate": 1.0019159913267156e-06, |
|
"loss": 0.5907, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 1.6307070707070705, |
|
"grad_norm": 6.09375, |
|
"learning_rate": 9.934492258671214e-07, |
|
"loss": 0.384, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 1.6323232323232322, |
|
"grad_norm": 5.625, |
|
"learning_rate": 9.850144390875022e-07, |
|
"loss": 0.2712, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.6339393939393938, |
|
"grad_norm": 6.59375, |
|
"learning_rate": 9.76611698310942e-07, |
|
"loss": 0.5483, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 1.6355555555555554, |
|
"grad_norm": 6.9375, |
|
"learning_rate": 9.68241070604743e-07, |
|
"loss": 0.5543, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 1.637171717171717, |
|
"grad_norm": 6.65625, |
|
"learning_rate": 9.59902622779898e-07, |
|
"loss": 0.6036, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 1.6387878787878787, |
|
"grad_norm": 7.125, |
|
"learning_rate": 9.51596421390547e-07, |
|
"loss": 0.4629, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 1.6404040404040403, |
|
"grad_norm": 6.71875, |
|
"learning_rate": 9.433225327334567e-07, |
|
"loss": 0.4255, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 1.642020202020202, |
|
"grad_norm": 6.78125, |
|
"learning_rate": 9.350810228474855e-07, |
|
"loss": 0.6725, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 1.6436363636363636, |
|
"grad_norm": 8.875, |
|
"learning_rate": 9.268719575130563e-07, |
|
"loss": 0.4409, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 1.6452525252525252, |
|
"grad_norm": 5.5, |
|
"learning_rate": 9.186954022516343e-07, |
|
"loss": 0.4973, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 1.6468686868686868, |
|
"grad_norm": 7.5, |
|
"learning_rate": 9.105514223252016e-07, |
|
"loss": 0.4426, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 1.6484848484848484, |
|
"grad_norm": 9.8125, |
|
"learning_rate": 9.024400827357344e-07, |
|
"loss": 0.5614, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.65010101010101, |
|
"grad_norm": 8.875, |
|
"learning_rate": 8.943614482246915e-07, |
|
"loss": 0.6925, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 1.6517171717171717, |
|
"grad_norm": 7.65625, |
|
"learning_rate": 8.863155832724895e-07, |
|
"loss": 0.5336, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 1.6533333333333333, |
|
"grad_norm": 7.3125, |
|
"learning_rate": 8.783025520979932e-07, |
|
"loss": 0.383, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 1.654949494949495, |
|
"grad_norm": 7.59375, |
|
"learning_rate": 8.703224186580012e-07, |
|
"loss": 0.6345, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 1.6565656565656566, |
|
"grad_norm": 5.75, |
|
"learning_rate": 8.623752466467344e-07, |
|
"loss": 0.4726, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 1.6581818181818182, |
|
"grad_norm": 7.0625, |
|
"learning_rate": 8.544610994953317e-07, |
|
"loss": 0.4826, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 1.6597979797979798, |
|
"grad_norm": 11.875, |
|
"learning_rate": 8.465800403713365e-07, |
|
"loss": 0.4592, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 1.6614141414141415, |
|
"grad_norm": 5.78125, |
|
"learning_rate": 8.387321321781977e-07, |
|
"loss": 0.4643, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 1.663030303030303, |
|
"grad_norm": 7.09375, |
|
"learning_rate": 8.309174375547724e-07, |
|
"loss": 0.5006, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 1.6646464646464647, |
|
"grad_norm": 10.25, |
|
"learning_rate": 8.23136018874815e-07, |
|
"loss": 0.5035, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.6662626262626263, |
|
"grad_norm": 6.40625, |
|
"learning_rate": 8.153879382464836e-07, |
|
"loss": 0.3378, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 1.667878787878788, |
|
"grad_norm": 7.0625, |
|
"learning_rate": 8.07673257511849e-07, |
|
"loss": 0.5713, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 1.6694949494949496, |
|
"grad_norm": 7.3125, |
|
"learning_rate": 7.999920382463955e-07, |
|
"loss": 0.3865, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 1.6711111111111112, |
|
"grad_norm": 8.3125, |
|
"learning_rate": 7.923443417585324e-07, |
|
"loss": 0.3851, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 1.6727272727272728, |
|
"grad_norm": 5.9375, |
|
"learning_rate": 7.847302290891029e-07, |
|
"loss": 0.5374, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 1.6743434343434345, |
|
"grad_norm": 7.84375, |
|
"learning_rate": 7.771497610108981e-07, |
|
"loss": 0.8689, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 1.675959595959596, |
|
"grad_norm": 7.46875, |
|
"learning_rate": 7.696029980281722e-07, |
|
"loss": 0.557, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 1.6775757575757577, |
|
"grad_norm": 7.8125, |
|
"learning_rate": 7.620900003761561e-07, |
|
"loss": 0.4862, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 1.6791919191919193, |
|
"grad_norm": 6.5625, |
|
"learning_rate": 7.546108280205816e-07, |
|
"loss": 0.658, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 1.680808080808081, |
|
"grad_norm": 9.0625, |
|
"learning_rate": 7.471655406572003e-07, |
|
"loss": 0.6738, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.6824242424242424, |
|
"grad_norm": 5.875, |
|
"learning_rate": 7.397541977113076e-07, |
|
"loss": 0.4773, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 1.684040404040404, |
|
"grad_norm": 7.625, |
|
"learning_rate": 7.323768583372675e-07, |
|
"loss": 0.4712, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 1.6856565656565656, |
|
"grad_norm": 7.21875, |
|
"learning_rate": 7.250335814180432e-07, |
|
"loss": 0.7264, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 1.6872727272727273, |
|
"grad_norm": 7.09375, |
|
"learning_rate": 7.177244255647209e-07, |
|
"loss": 0.6175, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 1.6888888888888889, |
|
"grad_norm": 6.21875, |
|
"learning_rate": 7.10449449116049e-07, |
|
"loss": 0.3118, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 1.6905050505050505, |
|
"grad_norm": 9.3125, |
|
"learning_rate": 7.032087101379703e-07, |
|
"loss": 0.6699, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 1.6921212121212121, |
|
"grad_norm": 6.875, |
|
"learning_rate": 6.960022664231558e-07, |
|
"loss": 0.3863, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 1.6937373737373738, |
|
"grad_norm": 6.84375, |
|
"learning_rate": 6.888301754905469e-07, |
|
"loss": 0.51, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 1.6953535353535354, |
|
"grad_norm": 8.0, |
|
"learning_rate": 6.816924945848941e-07, |
|
"loss": 0.5381, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 1.696969696969697, |
|
"grad_norm": 8.5, |
|
"learning_rate": 6.745892806763015e-07, |
|
"loss": 0.6998, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.6985858585858584, |
|
"grad_norm": 5.90625, |
|
"learning_rate": 6.675205904597715e-07, |
|
"loss": 0.4438, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 1.70020202020202, |
|
"grad_norm": 6.96875, |
|
"learning_rate": 6.604864803547511e-07, |
|
"loss": 0.615, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 1.7018181818181817, |
|
"grad_norm": 5.75, |
|
"learning_rate": 6.534870065046839e-07, |
|
"loss": 0.2991, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 1.7034343434343433, |
|
"grad_norm": 7.21875, |
|
"learning_rate": 6.465222247765618e-07, |
|
"loss": 0.4271, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 1.705050505050505, |
|
"grad_norm": 5.8125, |
|
"learning_rate": 6.395921907604747e-07, |
|
"loss": 0.4636, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 1.7066666666666666, |
|
"grad_norm": 7.125, |
|
"learning_rate": 6.326969597691724e-07, |
|
"loss": 0.4994, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 1.7082828282828282, |
|
"grad_norm": 10.1875, |
|
"learning_rate": 6.258365868376215e-07, |
|
"loss": 0.5785, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 1.7098989898989898, |
|
"grad_norm": 6.6875, |
|
"learning_rate": 6.190111267225651e-07, |
|
"loss": 0.8024, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 1.7115151515151514, |
|
"grad_norm": 7.75, |
|
"learning_rate": 6.122206339020854e-07, |
|
"loss": 0.3282, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 1.713131313131313, |
|
"grad_norm": 7.46875, |
|
"learning_rate": 6.054651625751717e-07, |
|
"loss": 0.4669, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.7147474747474747, |
|
"grad_norm": 7.25, |
|
"learning_rate": 5.987447666612823e-07, |
|
"loss": 0.584, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 1.7163636363636363, |
|
"grad_norm": 6.875, |
|
"learning_rate": 5.920594997999202e-07, |
|
"loss": 0.5449, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 1.717979797979798, |
|
"grad_norm": 10.125, |
|
"learning_rate": 5.854094153502022e-07, |
|
"loss": 0.4517, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 1.7195959595959596, |
|
"grad_norm": 8.5625, |
|
"learning_rate": 5.787945663904332e-07, |
|
"loss": 0.6394, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 1.7212121212121212, |
|
"grad_norm": 5.34375, |
|
"learning_rate": 5.722150057176823e-07, |
|
"loss": 0.395, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 1.7228282828282828, |
|
"grad_norm": 8.375, |
|
"learning_rate": 5.65670785847362e-07, |
|
"loss": 0.4517, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 1.7244444444444444, |
|
"grad_norm": 9.375, |
|
"learning_rate": 5.591619590128067e-07, |
|
"loss": 0.4588, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 1.726060606060606, |
|
"grad_norm": 6.96875, |
|
"learning_rate": 5.526885771648599e-07, |
|
"loss": 0.2908, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 1.7276767676767677, |
|
"grad_norm": 6.875, |
|
"learning_rate": 5.462506919714583e-07, |
|
"loss": 0.3348, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 1.7292929292929293, |
|
"grad_norm": 6.6875, |
|
"learning_rate": 5.398483548172162e-07, |
|
"loss": 0.3347, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.730909090909091, |
|
"grad_norm": 8.3125, |
|
"learning_rate": 5.334816168030204e-07, |
|
"loss": 0.6141, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 1.7325252525252526, |
|
"grad_norm": 7.03125, |
|
"learning_rate": 5.271505287456153e-07, |
|
"loss": 0.437, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 1.7341414141414142, |
|
"grad_norm": 6.53125, |
|
"learning_rate": 5.208551411772056e-07, |
|
"loss": 0.4954, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 1.7357575757575758, |
|
"grad_norm": 7.40625, |
|
"learning_rate": 5.145955043450484e-07, |
|
"loss": 0.4811, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 1.7373737373737375, |
|
"grad_norm": 6.6875, |
|
"learning_rate": 5.083716682110512e-07, |
|
"loss": 0.6364, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 1.738989898989899, |
|
"grad_norm": 5.46875, |
|
"learning_rate": 5.021836824513759e-07, |
|
"loss": 0.2266, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 1.7406060606060607, |
|
"grad_norm": 6.53125, |
|
"learning_rate": 4.960315964560425e-07, |
|
"loss": 0.3873, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 1.7422222222222223, |
|
"grad_norm": 5.375, |
|
"learning_rate": 4.899154593285294e-07, |
|
"loss": 0.3454, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 1.743838383838384, |
|
"grad_norm": 5.21875, |
|
"learning_rate": 4.838353198853884e-07, |
|
"loss": 0.2861, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 1.7454545454545456, |
|
"grad_norm": 5.75, |
|
"learning_rate": 4.777912266558532e-07, |
|
"loss": 0.4245, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.7470707070707072, |
|
"grad_norm": 7.03125, |
|
"learning_rate": 4.717832278814488e-07, |
|
"loss": 0.735, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 1.7486868686868688, |
|
"grad_norm": 7.21875, |
|
"learning_rate": 4.658113715156098e-07, |
|
"loss": 0.7658, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 1.7503030303030302, |
|
"grad_norm": 6.78125, |
|
"learning_rate": 4.5987570522329773e-07, |
|
"loss": 0.5362, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 1.7519191919191919, |
|
"grad_norm": 5.96875, |
|
"learning_rate": 4.5397627638061604e-07, |
|
"loss": 0.3993, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 1.7535353535353535, |
|
"grad_norm": 4.96875, |
|
"learning_rate": 4.481131320744386e-07, |
|
"loss": 0.4245, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 1.7551515151515151, |
|
"grad_norm": 10.4375, |
|
"learning_rate": 4.422863191020299e-07, |
|
"loss": 0.4338, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 1.7567676767676768, |
|
"grad_norm": 6.34375, |
|
"learning_rate": 4.3649588397067177e-07, |
|
"loss": 0.4729, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 1.7583838383838384, |
|
"grad_norm": 7.03125, |
|
"learning_rate": 4.307418728972934e-07, |
|
"loss": 0.7243, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 8.5625, |
|
"learning_rate": 4.2502433180810153e-07, |
|
"loss": 0.3767, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 1.7616161616161616, |
|
"grad_norm": 7.5, |
|
"learning_rate": 4.193433063382135e-07, |
|
"loss": 0.3824, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.7632323232323233, |
|
"grad_norm": 7.28125, |
|
"learning_rate": 4.1369884183129485e-07, |
|
"loss": 0.7881, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 1.7648484848484849, |
|
"grad_norm": 6.65625, |
|
"learning_rate": 4.080909833391944e-07, |
|
"loss": 0.8348, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 1.7664646464646465, |
|
"grad_norm": 6.5, |
|
"learning_rate": 4.0251977562158873e-07, |
|
"loss": 0.4176, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 1.768080808080808, |
|
"grad_norm": 6.9375, |
|
"learning_rate": 3.9698526314562114e-07, |
|
"loss": 1.021, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 1.7696969696969695, |
|
"grad_norm": 7.9375, |
|
"learning_rate": 3.9148749008554807e-07, |
|
"loss": 0.6396, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 1.7713131313131312, |
|
"grad_norm": 7.6875, |
|
"learning_rate": 3.8602650032238675e-07, |
|
"loss": 0.5306, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 1.7729292929292928, |
|
"grad_norm": 7.8125, |
|
"learning_rate": 3.8060233744356634e-07, |
|
"loss": 0.5574, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 1.7745454545454544, |
|
"grad_norm": 7.4375, |
|
"learning_rate": 3.752150447425773e-07, |
|
"loss": 0.6494, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 1.776161616161616, |
|
"grad_norm": 6.125, |
|
"learning_rate": 3.698646652186272e-07, |
|
"loss": 0.5226, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 1.7777777777777777, |
|
"grad_norm": 8.625, |
|
"learning_rate": 3.6455124157629805e-07, |
|
"loss": 0.7729, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.7793939393939393, |
|
"grad_norm": 6.96875, |
|
"learning_rate": 3.5927481622520463e-07, |
|
"loss": 0.5724, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 1.781010101010101, |
|
"grad_norm": 6.34375, |
|
"learning_rate": 3.5403543127965514e-07, |
|
"loss": 0.5896, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 1.7826262626262626, |
|
"grad_norm": 7.625, |
|
"learning_rate": 3.488331285583174e-07, |
|
"loss": 0.5196, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 1.7842424242424242, |
|
"grad_norm": 5.65625, |
|
"learning_rate": 3.436679495838835e-07, |
|
"loss": 0.4324, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 1.7858585858585858, |
|
"grad_norm": 5.9375, |
|
"learning_rate": 3.38539935582739e-07, |
|
"loss": 0.2973, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 1.7874747474747474, |
|
"grad_norm": 5.8125, |
|
"learning_rate": 3.334491274846324e-07, |
|
"loss": 0.3711, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 1.789090909090909, |
|
"grad_norm": 4.71875, |
|
"learning_rate": 3.283955659223509e-07, |
|
"loss": 0.2609, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 1.789090909090909, |
|
"eval_loss": 0.6599124670028687, |
|
"eval_runtime": 35.4231, |
|
"eval_samples_per_second": 31.053, |
|
"eval_steps_per_second": 3.896, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 1.7907070707070707, |
|
"grad_norm": 5.65625, |
|
"learning_rate": 3.233792912313943e-07, |
|
"loss": 0.4951, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 1.7923232323232323, |
|
"grad_norm": 7.90625, |
|
"learning_rate": 3.1840034344965355e-07, |
|
"loss": 0.5709, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 1.793939393939394, |
|
"grad_norm": 7.25, |
|
"learning_rate": 3.134587623170909e-07, |
|
"loss": 0.4571, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.7955555555555556, |
|
"grad_norm": 5.65625, |
|
"learning_rate": 3.085545872754242e-07, |
|
"loss": 0.5489, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 1.7971717171717172, |
|
"grad_norm": 6.875, |
|
"learning_rate": 3.0368785746780925e-07, |
|
"loss": 0.3417, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 1.7987878787878788, |
|
"grad_norm": 6.65625, |
|
"learning_rate": 2.9885861173852946e-07, |
|
"loss": 0.5436, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 1.8004040404040405, |
|
"grad_norm": 6.90625, |
|
"learning_rate": 2.940668886326864e-07, |
|
"loss": 0.407, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 1.802020202020202, |
|
"grad_norm": 6.9375, |
|
"learning_rate": 2.8931272639589005e-07, |
|
"loss": 0.5136, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 1.8036363636363637, |
|
"grad_norm": 6.21875, |
|
"learning_rate": 2.8459616297395464e-07, |
|
"loss": 0.3449, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 1.8052525252525253, |
|
"grad_norm": 7.0, |
|
"learning_rate": 2.7991723601259677e-07, |
|
"loss": 0.3335, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 1.806868686868687, |
|
"grad_norm": 6.5, |
|
"learning_rate": 2.7527598285713387e-07, |
|
"loss": 0.7282, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 1.8084848484848486, |
|
"grad_norm": 6.15625, |
|
"learning_rate": 2.706724405521838e-07, |
|
"loss": 0.3397, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 1.8101010101010102, |
|
"grad_norm": 5.78125, |
|
"learning_rate": 2.6610664584137413e-07, |
|
"loss": 0.3164, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.8117171717171718, |
|
"grad_norm": 7.5625, |
|
"learning_rate": 2.615786351670452e-07, |
|
"loss": 0.548, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 1.8133333333333335, |
|
"grad_norm": 12.9375, |
|
"learning_rate": 2.570884446699612e-07, |
|
"loss": 0.5915, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 1.814949494949495, |
|
"grad_norm": 6.6875, |
|
"learning_rate": 2.5263611018901957e-07, |
|
"loss": 0.371, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 1.8165656565656567, |
|
"grad_norm": 6.65625, |
|
"learning_rate": 2.482216672609677e-07, |
|
"loss": 0.4942, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 1.8181818181818183, |
|
"grad_norm": 7.375, |
|
"learning_rate": 2.438451511201162e-07, |
|
"loss": 0.4319, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 1.8197979797979797, |
|
"grad_norm": 6.9375, |
|
"learning_rate": 2.3950659669806033e-07, |
|
"loss": 0.5259, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 1.8214141414141414, |
|
"grad_norm": 7.8125, |
|
"learning_rate": 2.3520603862339952e-07, |
|
"loss": 0.4634, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 1.823030303030303, |
|
"grad_norm": 5.9375, |
|
"learning_rate": 2.3094351122146307e-07, |
|
"loss": 0.446, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 1.8246464646464646, |
|
"grad_norm": 5.96875, |
|
"learning_rate": 2.2671904851403303e-07, |
|
"loss": 0.6715, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 1.8262626262626263, |
|
"grad_norm": 6.71875, |
|
"learning_rate": 2.2253268421907503e-07, |
|
"loss": 0.4167, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.8278787878787879, |
|
"grad_norm": 6.65625, |
|
"learning_rate": 2.1838445175046796e-07, |
|
"loss": 0.6699, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 1.8294949494949495, |
|
"grad_norm": 6.8125, |
|
"learning_rate": 2.142743842177386e-07, |
|
"loss": 0.4689, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 1.8311111111111111, |
|
"grad_norm": 6.71875, |
|
"learning_rate": 2.1020251442579632e-07, |
|
"loss": 0.6284, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 1.8327272727272728, |
|
"grad_norm": 6.15625, |
|
"learning_rate": 2.0616887487467096e-07, |
|
"loss": 0.4624, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 1.8343434343434344, |
|
"grad_norm": 7.0625, |
|
"learning_rate": 2.0217349775925487e-07, |
|
"loss": 0.4118, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 1.835959595959596, |
|
"grad_norm": 11.125, |
|
"learning_rate": 1.98216414969043e-07, |
|
"loss": 0.478, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 1.8375757575757574, |
|
"grad_norm": 5.34375, |
|
"learning_rate": 1.942976580878825e-07, |
|
"loss": 0.334, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 1.839191919191919, |
|
"grad_norm": 7.5625, |
|
"learning_rate": 1.9041725839371805e-07, |
|
"loss": 0.4451, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 1.8408080808080807, |
|
"grad_norm": 7.71875, |
|
"learning_rate": 1.8657524685834206e-07, |
|
"loss": 0.3302, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 1.8424242424242423, |
|
"grad_norm": 9.5, |
|
"learning_rate": 1.8277165414714858e-07, |
|
"loss": 0.5024, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.844040404040404, |
|
"grad_norm": 5.125, |
|
"learning_rate": 1.7900651061888762e-07, |
|
"loss": 0.4093, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 1.8456565656565656, |
|
"grad_norm": 7.78125, |
|
"learning_rate": 1.752798463254235e-07, |
|
"loss": 0.9122, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 1.8472727272727272, |
|
"grad_norm": 5.625, |
|
"learning_rate": 1.7159169101149398e-07, |
|
"loss": 0.493, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 1.8488888888888888, |
|
"grad_norm": 6.21875, |
|
"learning_rate": 1.6794207411447548e-07, |
|
"loss": 0.6134, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 1.8505050505050504, |
|
"grad_norm": 7.4375, |
|
"learning_rate": 1.6433102476414386e-07, |
|
"loss": 0.612, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 1.852121212121212, |
|
"grad_norm": 7.6875, |
|
"learning_rate": 1.6075857178244613e-07, |
|
"loss": 0.5233, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 1.8537373737373737, |
|
"grad_norm": 6.40625, |
|
"learning_rate": 1.5722474368326802e-07, |
|
"loss": 0.7897, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 1.8553535353535353, |
|
"grad_norm": 7.0625, |
|
"learning_rate": 1.5372956867220678e-07, |
|
"loss": 0.3268, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 1.856969696969697, |
|
"grad_norm": 6.8125, |
|
"learning_rate": 1.5027307464634656e-07, |
|
"loss": 0.8276, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 1.8585858585858586, |
|
"grad_norm": 6.1875, |
|
"learning_rate": 1.4685528919403446e-07, |
|
"loss": 0.5105, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.8602020202020202, |
|
"grad_norm": 6.46875, |
|
"learning_rate": 1.434762395946626e-07, |
|
"loss": 0.5541, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 1.8618181818181818, |
|
"grad_norm": 5.96875, |
|
"learning_rate": 1.4013595281844872e-07, |
|
"loss": 0.3773, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 1.8634343434343434, |
|
"grad_norm": 7.8125, |
|
"learning_rate": 1.368344555262202e-07, |
|
"loss": 0.3672, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 1.865050505050505, |
|
"grad_norm": 5.90625, |
|
"learning_rate": 1.335717740692033e-07, |
|
"loss": 0.47, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 1.8666666666666667, |
|
"grad_norm": 6.53125, |
|
"learning_rate": 1.30347934488812e-07, |
|
"loss": 0.458, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 1.8682828282828283, |
|
"grad_norm": 6.34375, |
|
"learning_rate": 1.2716296251644e-07, |
|
"loss": 0.3902, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 1.86989898989899, |
|
"grad_norm": 7.84375, |
|
"learning_rate": 1.2401688357325415e-07, |
|
"loss": 0.2978, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 1.8715151515151516, |
|
"grad_norm": 6.8125, |
|
"learning_rate": 1.2090972276999513e-07, |
|
"loss": 0.4911, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 1.8731313131313132, |
|
"grad_norm": 7.90625, |
|
"learning_rate": 1.1784150490677215e-07, |
|
"loss": 0.501, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 1.8747474747474748, |
|
"grad_norm": 7.09375, |
|
"learning_rate": 1.1481225447286803e-07, |
|
"loss": 0.3906, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.8763636363636365, |
|
"grad_norm": 6.125, |
|
"learning_rate": 1.1182199564654495e-07, |
|
"loss": 0.5567, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 1.877979797979798, |
|
"grad_norm": 7.03125, |
|
"learning_rate": 1.0887075229484789e-07, |
|
"loss": 0.4789, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 1.8795959595959597, |
|
"grad_norm": 7.1875, |
|
"learning_rate": 1.0595854797341709e-07, |
|
"loss": 0.3514, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 1.8812121212121213, |
|
"grad_norm": 7.125, |
|
"learning_rate": 1.0308540592629756e-07, |
|
"loss": 0.5602, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 1.882828282828283, |
|
"grad_norm": 6.03125, |
|
"learning_rate": 1.0025134908575707e-07, |
|
"loss": 0.2898, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 1.8844444444444446, |
|
"grad_norm": 5.90625, |
|
"learning_rate": 9.745640007209844e-08, |
|
"loss": 0.2949, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 1.8860606060606062, |
|
"grad_norm": 6.625, |
|
"learning_rate": 9.470058119348257e-08, |
|
"loss": 0.5841, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 1.8876767676767678, |
|
"grad_norm": 8.25, |
|
"learning_rate": 9.198391444575072e-08, |
|
"loss": 0.4759, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 1.8892929292929292, |
|
"grad_norm": 8.25, |
|
"learning_rate": 8.930642151224634e-08, |
|
"loss": 0.6854, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 1.8909090909090909, |
|
"grad_norm": 6.71875, |
|
"learning_rate": 8.666812376364187e-08, |
|
"loss": 0.3352, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.8925252525252525, |
|
"grad_norm": 6.90625, |
|
"learning_rate": 8.406904225777223e-08, |
|
"loss": 0.3283, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 1.8941414141414141, |
|
"grad_norm": 6.5625, |
|
"learning_rate": 8.150919773946165e-08, |
|
"loss": 0.4876, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 1.8957575757575758, |
|
"grad_norm": 5.6875, |
|
"learning_rate": 7.898861064036257e-08, |
|
"loss": 0.6124, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 1.8973737373737374, |
|
"grad_norm": 7.6875, |
|
"learning_rate": 7.650730107878812e-08, |
|
"loss": 0.5362, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 1.898989898989899, |
|
"grad_norm": 8.9375, |
|
"learning_rate": 7.406528885955665e-08, |
|
"loss": 0.4961, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 1.9006060606060606, |
|
"grad_norm": 6.5625, |
|
"learning_rate": 7.166259347382854e-08, |
|
"loss": 0.3595, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 1.9022222222222223, |
|
"grad_norm": 5.8125, |
|
"learning_rate": 6.929923409895345e-08, |
|
"loss": 0.3119, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 1.9038383838383839, |
|
"grad_norm": 6.0625, |
|
"learning_rate": 6.697522959831837e-08, |
|
"loss": 0.3637, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 1.9054545454545453, |
|
"grad_norm": 8.375, |
|
"learning_rate": 6.46905985211932e-08, |
|
"loss": 0.3966, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 1.907070707070707, |
|
"grad_norm": 6.9375, |
|
"learning_rate": 6.244535910258697e-08, |
|
"loss": 0.6424, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.9086868686868685, |
|
"grad_norm": 8.5625, |
|
"learning_rate": 6.023952926309917e-08, |
|
"loss": 0.5532, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 1.9103030303030302, |
|
"grad_norm": 6.1875, |
|
"learning_rate": 5.8073126608778064e-08, |
|
"loss": 0.5825, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 1.9119191919191918, |
|
"grad_norm": 6.84375, |
|
"learning_rate": 5.594616843097978e-08, |
|
"loss": 0.3147, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 1.9135353535353534, |
|
"grad_norm": 8.125, |
|
"learning_rate": 5.3858671706230605e-08, |
|
"loss": 0.309, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 1.915151515151515, |
|
"grad_norm": 5.90625, |
|
"learning_rate": 5.181065309609212e-08, |
|
"loss": 0.3509, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 1.9167676767676767, |
|
"grad_norm": 6.09375, |
|
"learning_rate": 4.98021289470263e-08, |
|
"loss": 0.3494, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 1.9183838383838383, |
|
"grad_norm": 5.375, |
|
"learning_rate": 4.7833115290266686e-08, |
|
"loss": 0.2529, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 6.90625, |
|
"learning_rate": 4.590362784169022e-08, |
|
"loss": 0.6008, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 1.9216161616161616, |
|
"grad_norm": 6.53125, |
|
"learning_rate": 4.401368200169009e-08, |
|
"loss": 0.5284, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 1.9232323232323232, |
|
"grad_norm": 6.375, |
|
"learning_rate": 4.2163292855056936e-08, |
|
"loss": 0.5237, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.9248484848484848, |
|
"grad_norm": 7.34375, |
|
"learning_rate": 4.035247517085283e-08, |
|
"loss": 0.5049, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 1.9264646464646464, |
|
"grad_norm": 8.0625, |
|
"learning_rate": 3.858124340229863e-08, |
|
"loss": 0.5204, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 1.928080808080808, |
|
"grad_norm": 8.0625, |
|
"learning_rate": 3.684961168665457e-08, |
|
"loss": 0.4971, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 1.9296969696969697, |
|
"grad_norm": 11.0, |
|
"learning_rate": 3.515759384510986e-08, |
|
"loss": 0.4429, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 1.9313131313131313, |
|
"grad_norm": 6.40625, |
|
"learning_rate": 3.350520338267216e-08, |
|
"loss": 0.5476, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 1.932929292929293, |
|
"grad_norm": 7.1875, |
|
"learning_rate": 3.1892453488058803e-08, |
|
"loss": 0.454, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 1.9345454545454546, |
|
"grad_norm": 6.9375, |
|
"learning_rate": 3.0319357033591904e-08, |
|
"loss": 0.5787, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 1.9361616161616162, |
|
"grad_norm": 8.125, |
|
"learning_rate": 2.87859265750956e-08, |
|
"loss": 0.6485, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 1.9377777777777778, |
|
"grad_norm": 7.0, |
|
"learning_rate": 2.7292174351797317e-08, |
|
"loss": 0.3967, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 1.9393939393939394, |
|
"grad_norm": 7.46875, |
|
"learning_rate": 2.5838112286226123e-08, |
|
"loss": 0.7807, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.941010101010101, |
|
"grad_norm": 7.625, |
|
"learning_rate": 2.4423751984122835e-08, |
|
"loss": 0.834, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 1.9426262626262627, |
|
"grad_norm": 8.375, |
|
"learning_rate": 2.304910473434341e-08, |
|
"loss": 0.4306, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 1.9442424242424243, |
|
"grad_norm": 6.8125, |
|
"learning_rate": 2.1714181508770138e-08, |
|
"loss": 0.4254, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 1.945858585858586, |
|
"grad_norm": 6.125, |
|
"learning_rate": 2.0418992962224495e-08, |
|
"loss": 0.5607, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 1.9474747474747476, |
|
"grad_norm": 7.09375, |
|
"learning_rate": 1.916354943238219e-08, |
|
"loss": 0.6082, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 1.9490909090909092, |
|
"grad_norm": 7.28125, |
|
"learning_rate": 1.7947860939688255e-08, |
|
"loss": 0.5441, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 1.9507070707070708, |
|
"grad_norm": 5.6875, |
|
"learning_rate": 1.6771937187280986e-08, |
|
"loss": 0.4209, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 1.9523232323232325, |
|
"grad_norm": 9.0625, |
|
"learning_rate": 1.563578756091144e-08, |
|
"loss": 0.5322, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 1.953939393939394, |
|
"grad_norm": 8.625, |
|
"learning_rate": 1.4539421128870745e-08, |
|
"loss": 0.455, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 1.9555555555555557, |
|
"grad_norm": 7.90625, |
|
"learning_rate": 1.3482846641914572e-08, |
|
"loss": 0.6719, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.9571717171717171, |
|
"grad_norm": 6.09375, |
|
"learning_rate": 1.2466072533197093e-08, |
|
"loss": 0.4821, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 1.9587878787878787, |
|
"grad_norm": 6.65625, |
|
"learning_rate": 1.1489106918200487e-08, |
|
"loss": 0.5824, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 1.9604040404040404, |
|
"grad_norm": 6.53125, |
|
"learning_rate": 1.0551957594672201e-08, |
|
"loss": 0.5726, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 1.962020202020202, |
|
"grad_norm": 6.5625, |
|
"learning_rate": 9.654632042562229e-09, |
|
"loss": 0.3726, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 1.9636363636363636, |
|
"grad_norm": 5.65625, |
|
"learning_rate": 8.797137423963153e-09, |
|
"loss": 0.382, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 1.9652525252525253, |
|
"grad_norm": 6.5, |
|
"learning_rate": 7.979480583052423e-09, |
|
"loss": 0.5233, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 1.9668686868686869, |
|
"grad_norm": 6.9375, |
|
"learning_rate": 7.201668046038501e-09, |
|
"loss": 0.501, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 1.9684848484848485, |
|
"grad_norm": 7.75, |
|
"learning_rate": 6.4637060211092395e-09, |
|
"loss": 0.5653, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 1.9701010101010101, |
|
"grad_norm": 5.40625, |
|
"learning_rate": 5.765600398381366e-09, |
|
"loss": 0.3169, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 1.9717171717171718, |
|
"grad_norm": 7.125, |
|
"learning_rate": 5.107356749853298e-09, |
|
"loss": 0.4751, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.9733333333333334, |
|
"grad_norm": 7.1875, |
|
"learning_rate": 4.488980329361847e-09, |
|
"loss": 0.3681, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 1.9749494949494948, |
|
"grad_norm": 7.9375, |
|
"learning_rate": 3.910476072539471e-09, |
|
"loss": 0.3618, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 1.9765656565656564, |
|
"grad_norm": 6.375, |
|
"learning_rate": 3.3718485967748626e-09, |
|
"loss": 0.3531, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 1.978181818181818, |
|
"grad_norm": 7.46875, |
|
"learning_rate": 2.8731022011757593e-09, |
|
"loss": 0.5474, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 1.9797979797979797, |
|
"grad_norm": 7.1875, |
|
"learning_rate": 2.4142408665367433e-09, |
|
"loss": 0.4938, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 1.9814141414141413, |
|
"grad_norm": 5.375, |
|
"learning_rate": 1.9952682553042722e-09, |
|
"loss": 0.3083, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 1.983030303030303, |
|
"grad_norm": 7.3125, |
|
"learning_rate": 1.6161877115494773e-09, |
|
"loss": 0.3612, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 1.9846464646464645, |
|
"grad_norm": 8.0, |
|
"learning_rate": 1.2770022609409628e-09, |
|
"loss": 0.393, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 1.9862626262626262, |
|
"grad_norm": 6.3125, |
|
"learning_rate": 9.777146107209367e-10, |
|
"loss": 0.5605, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 1.9878787878787878, |
|
"grad_norm": 7.5, |
|
"learning_rate": 7.18327149683562e-10, |
|
"loss": 0.4834, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.9878787878787878, |
|
"eval_loss": 0.6601172089576721, |
|
"eval_runtime": 35.1117, |
|
"eval_samples_per_second": 31.329, |
|
"eval_steps_per_second": 3.93, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.9894949494949494, |
|
"grad_norm": 6.875, |
|
"learning_rate": 4.988419481549711e-10, |
|
"loss": 0.5459, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 1.991111111111111, |
|
"grad_norm": 6.34375, |
|
"learning_rate": 3.1926075797827914e-10, |
|
"loss": 0.4832, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 1.9927272727272727, |
|
"grad_norm": 6.6875, |
|
"learning_rate": 1.7958501249859538e-10, |
|
"loss": 0.4882, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 1.9943434343434343, |
|
"grad_norm": 8.375, |
|
"learning_rate": 7.9815826551366e-11, |
|
"loss": 0.6767, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 1.995959595959596, |
|
"grad_norm": 6.6875, |
|
"learning_rate": 1.9953996454047386e-11, |
|
"loss": 0.4317, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 1.9975757575757576, |
|
"grad_norm": 7.09375, |
|
"learning_rate": 0.0, |
|
"loss": 0.5997, |
|
"step": 1236 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1236, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.926082991007826e+17, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|