|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9994472084024323, |
|
"eval_steps": 70, |
|
"global_step": 1130, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0008844665561083472, |
|
"grad_norm": 51.0, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 2.9884, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0017689331122166944, |
|
"grad_norm": 45.75, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 2.9128, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0026533996683250414, |
|
"grad_norm": 50.5, |
|
"learning_rate": 1.5e-06, |
|
"loss": 2.8706, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0035378662244333887, |
|
"grad_norm": 51.75, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 2.9548, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.004422332780541736, |
|
"grad_norm": 49.75, |
|
"learning_rate": 2.5e-06, |
|
"loss": 2.9129, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.005306799336650083, |
|
"grad_norm": 40.5, |
|
"learning_rate": 3e-06, |
|
"loss": 2.7836, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.00619126589275843, |
|
"grad_norm": 35.5, |
|
"learning_rate": 3.5e-06, |
|
"loss": 2.7584, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0070757324488667775, |
|
"grad_norm": 31.375, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 2.7227, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.007960199004975124, |
|
"grad_norm": 30.625, |
|
"learning_rate": 4.5e-06, |
|
"loss": 2.6632, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.008844665561083471, |
|
"grad_norm": 25.375, |
|
"learning_rate": 5e-06, |
|
"loss": 2.595, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.009729132117191818, |
|
"grad_norm": 22.75, |
|
"learning_rate": 5.500000000000001e-06, |
|
"loss": 2.5175, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.010613598673300166, |
|
"grad_norm": 19.875, |
|
"learning_rate": 6e-06, |
|
"loss": 2.3391, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.011498065229408513, |
|
"grad_norm": 17.375, |
|
"learning_rate": 6.5000000000000004e-06, |
|
"loss": 2.1773, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.01238253178551686, |
|
"grad_norm": 14.875, |
|
"learning_rate": 7e-06, |
|
"loss": 2.2454, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.013266998341625208, |
|
"grad_norm": 16.0, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 2.1009, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.014151464897733555, |
|
"grad_norm": 13.75, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 2.0104, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.015035931453841902, |
|
"grad_norm": 11.75, |
|
"learning_rate": 8.5e-06, |
|
"loss": 2.0199, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.015920398009950248, |
|
"grad_norm": 11.4375, |
|
"learning_rate": 9e-06, |
|
"loss": 1.8737, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.016804864566058595, |
|
"grad_norm": 6.78125, |
|
"learning_rate": 9.5e-06, |
|
"loss": 1.9285, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.017689331122166942, |
|
"grad_norm": 11.4375, |
|
"learning_rate": 1e-05, |
|
"loss": 1.6982, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01857379767827529, |
|
"grad_norm": 15.25, |
|
"learning_rate": 9.99997997403252e-06, |
|
"loss": 1.6093, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.019458264234383637, |
|
"grad_norm": 9.75, |
|
"learning_rate": 9.999919896290497e-06, |
|
"loss": 1.7027, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.020342730790491984, |
|
"grad_norm": 4.71875, |
|
"learning_rate": 9.999819767255175e-06, |
|
"loss": 1.6295, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.02122719734660033, |
|
"grad_norm": 8.75, |
|
"learning_rate": 9.999679587728626e-06, |
|
"loss": 1.6263, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.02211166390270868, |
|
"grad_norm": 9.0625, |
|
"learning_rate": 9.999499358833745e-06, |
|
"loss": 1.6724, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.022996130458817026, |
|
"grad_norm": 4.21875, |
|
"learning_rate": 9.999279082014233e-06, |
|
"loss": 1.8481, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.023880597014925373, |
|
"grad_norm": 8.25, |
|
"learning_rate": 9.999018759034594e-06, |
|
"loss": 1.6871, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.02476506357103372, |
|
"grad_norm": 5.5625, |
|
"learning_rate": 9.998718391980113e-06, |
|
"loss": 1.676, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.025649530127142068, |
|
"grad_norm": 6.90625, |
|
"learning_rate": 9.998377983256851e-06, |
|
"loss": 1.664, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.026533996683250415, |
|
"grad_norm": 5.53125, |
|
"learning_rate": 9.99799753559161e-06, |
|
"loss": 1.7022, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.027418463239358763, |
|
"grad_norm": 3.28125, |
|
"learning_rate": 9.997577052031922e-06, |
|
"loss": 1.6794, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.02830292979546711, |
|
"grad_norm": 5.90625, |
|
"learning_rate": 9.997116535946028e-06, |
|
"loss": 1.5217, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.029187396351575457, |
|
"grad_norm": 3.359375, |
|
"learning_rate": 9.996615991022835e-06, |
|
"loss": 1.6774, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.030071862907683804, |
|
"grad_norm": 4.5, |
|
"learning_rate": 9.996075421271905e-06, |
|
"loss": 1.5377, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.03095632946379215, |
|
"grad_norm": 3.78125, |
|
"learning_rate": 9.99549483102341e-06, |
|
"loss": 1.6521, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.031840796019900496, |
|
"grad_norm": 3.375, |
|
"learning_rate": 9.9948742249281e-06, |
|
"loss": 1.5706, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.03272526257600884, |
|
"grad_norm": 3.8125, |
|
"learning_rate": 9.994213607957273e-06, |
|
"loss": 1.6699, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.03360972913211719, |
|
"grad_norm": 3.375, |
|
"learning_rate": 9.993512985402724e-06, |
|
"loss": 1.5173, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.03449419568822554, |
|
"grad_norm": 4.34375, |
|
"learning_rate": 9.992772362876714e-06, |
|
"loss": 1.7547, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.035378662244333885, |
|
"grad_norm": 3.40625, |
|
"learning_rate": 9.991991746311916e-06, |
|
"loss": 1.6122, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.03626312880044223, |
|
"grad_norm": 3.15625, |
|
"learning_rate": 9.99117114196137e-06, |
|
"loss": 1.6103, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.03714759535655058, |
|
"grad_norm": 3.234375, |
|
"learning_rate": 9.99031055639843e-06, |
|
"loss": 1.6719, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.03803206191265893, |
|
"grad_norm": 3.09375, |
|
"learning_rate": 9.989409996516726e-06, |
|
"loss": 1.6497, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.038916528468767274, |
|
"grad_norm": 3.515625, |
|
"learning_rate": 9.988469469530086e-06, |
|
"loss": 1.4652, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.03980099502487562, |
|
"grad_norm": 3.78125, |
|
"learning_rate": 9.9874889829725e-06, |
|
"loss": 1.6086, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.04068546158098397, |
|
"grad_norm": 3.265625, |
|
"learning_rate": 9.986468544698041e-06, |
|
"loss": 1.4361, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.041569928137092316, |
|
"grad_norm": 3.234375, |
|
"learning_rate": 9.985408162880813e-06, |
|
"loss": 1.5678, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.04245439469320066, |
|
"grad_norm": 3.265625, |
|
"learning_rate": 9.984307846014891e-06, |
|
"loss": 1.6907, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.04333886124930901, |
|
"grad_norm": 3.859375, |
|
"learning_rate": 9.983167602914236e-06, |
|
"loss": 1.6741, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.04422332780541736, |
|
"grad_norm": 3.25, |
|
"learning_rate": 9.981987442712634e-06, |
|
"loss": 1.5514, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.045107794361525705, |
|
"grad_norm": 3.203125, |
|
"learning_rate": 9.980767374863627e-06, |
|
"loss": 1.5447, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.04599226091763405, |
|
"grad_norm": 3.265625, |
|
"learning_rate": 9.97950740914043e-06, |
|
"loss": 1.5559, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.0468767274737424, |
|
"grad_norm": 3.625, |
|
"learning_rate": 9.978207555635856e-06, |
|
"loss": 1.5351, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.04776119402985075, |
|
"grad_norm": 3.15625, |
|
"learning_rate": 9.976867824762236e-06, |
|
"loss": 1.5018, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.048645660585959094, |
|
"grad_norm": 3.34375, |
|
"learning_rate": 9.97548822725133e-06, |
|
"loss": 1.6645, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.04953012714206744, |
|
"grad_norm": 3.96875, |
|
"learning_rate": 9.974068774154252e-06, |
|
"loss": 1.6275, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.05041459369817579, |
|
"grad_norm": 3.3125, |
|
"learning_rate": 9.972609476841368e-06, |
|
"loss": 1.5563, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.051299060254284136, |
|
"grad_norm": 3.46875, |
|
"learning_rate": 9.971110347002213e-06, |
|
"loss": 1.4901, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.05218352681039248, |
|
"grad_norm": 3.296875, |
|
"learning_rate": 9.9695713966454e-06, |
|
"loss": 1.5567, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.05306799336650083, |
|
"grad_norm": 3.3125, |
|
"learning_rate": 9.967992638098517e-06, |
|
"loss": 1.5663, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.05395245992260918, |
|
"grad_norm": 3.609375, |
|
"learning_rate": 9.966374084008025e-06, |
|
"loss": 1.5848, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.054836926478717525, |
|
"grad_norm": 3.453125, |
|
"learning_rate": 9.964715747339178e-06, |
|
"loss": 1.5302, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.05572139303482587, |
|
"grad_norm": 3.609375, |
|
"learning_rate": 9.963017641375888e-06, |
|
"loss": 1.5876, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.05660585959093422, |
|
"grad_norm": 3.65625, |
|
"learning_rate": 9.961279779720641e-06, |
|
"loss": 1.5807, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.05749032614704257, |
|
"grad_norm": 4.03125, |
|
"learning_rate": 9.959502176294384e-06, |
|
"loss": 1.6079, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.058374792703150914, |
|
"grad_norm": 3.9375, |
|
"learning_rate": 9.957684845336409e-06, |
|
"loss": 1.6045, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.05925925925925926, |
|
"grad_norm": 4.25, |
|
"learning_rate": 9.955827801404237e-06, |
|
"loss": 1.3866, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.06014372581536761, |
|
"grad_norm": 4.34375, |
|
"learning_rate": 9.95393105937351e-06, |
|
"loss": 1.5315, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.061028192371475956, |
|
"grad_norm": 4.25, |
|
"learning_rate": 9.951994634437866e-06, |
|
"loss": 1.5193, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.0619126589275843, |
|
"grad_norm": 4.0, |
|
"learning_rate": 9.950018542108818e-06, |
|
"loss": 1.5626, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.0619126589275843, |
|
"eval_loss": 1.5007312297821045, |
|
"eval_runtime": 233.3408, |
|
"eval_samples_per_second": 68.912, |
|
"eval_steps_per_second": 8.614, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06279712548369265, |
|
"grad_norm": 3.828125, |
|
"learning_rate": 9.948002798215632e-06, |
|
"loss": 1.5859, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.06368159203980099, |
|
"grad_norm": 4.0625, |
|
"learning_rate": 9.945947418905193e-06, |
|
"loss": 1.6998, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.06456605859590935, |
|
"grad_norm": 4.59375, |
|
"learning_rate": 9.943852420641888e-06, |
|
"loss": 1.6379, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.06545052515201769, |
|
"grad_norm": 4.625, |
|
"learning_rate": 9.941717820207461e-06, |
|
"loss": 1.6244, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.06633499170812604, |
|
"grad_norm": 4.90625, |
|
"learning_rate": 9.939543634700891e-06, |
|
"loss": 1.5815, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.06721945826423438, |
|
"grad_norm": 4.625, |
|
"learning_rate": 9.937329881538242e-06, |
|
"loss": 1.6174, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.06810392482034273, |
|
"grad_norm": 5.46875, |
|
"learning_rate": 9.935076578452535e-06, |
|
"loss": 1.461, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.06898839137645107, |
|
"grad_norm": 5.59375, |
|
"learning_rate": 9.932783743493599e-06, |
|
"loss": 1.6119, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.06987285793255943, |
|
"grad_norm": 5.65625, |
|
"learning_rate": 9.930451395027932e-06, |
|
"loss": 1.592, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.07075732448866777, |
|
"grad_norm": 6.34375, |
|
"learning_rate": 9.928079551738542e-06, |
|
"loss": 1.551, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07164179104477612, |
|
"grad_norm": 6.96875, |
|
"learning_rate": 9.925668232624819e-06, |
|
"loss": 1.455, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.07252625760088446, |
|
"grad_norm": 6.34375, |
|
"learning_rate": 9.923217457002356e-06, |
|
"loss": 1.6723, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.07341072415699282, |
|
"grad_norm": 7.1875, |
|
"learning_rate": 9.92072724450282e-06, |
|
"loss": 1.4825, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.07429519071310116, |
|
"grad_norm": 7.09375, |
|
"learning_rate": 9.91819761507377e-06, |
|
"loss": 1.5309, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.07517965726920951, |
|
"grad_norm": 8.5, |
|
"learning_rate": 9.915628588978522e-06, |
|
"loss": 1.4844, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.07606412382531785, |
|
"grad_norm": 9.125, |
|
"learning_rate": 9.913020186795967e-06, |
|
"loss": 1.3599, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.07694859038142621, |
|
"grad_norm": 8.5, |
|
"learning_rate": 9.910372429420416e-06, |
|
"loss": 1.4616, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.07783305693753455, |
|
"grad_norm": 8.8125, |
|
"learning_rate": 9.907685338061433e-06, |
|
"loss": 1.579, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.0787175234936429, |
|
"grad_norm": 9.375, |
|
"learning_rate": 9.904958934243655e-06, |
|
"loss": 1.4275, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.07960199004975124, |
|
"grad_norm": 7.9375, |
|
"learning_rate": 9.902193239806634e-06, |
|
"loss": 1.593, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.0804864566058596, |
|
"grad_norm": 8.9375, |
|
"learning_rate": 9.899388276904653e-06, |
|
"loss": 1.4569, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.08137092316196794, |
|
"grad_norm": 8.5, |
|
"learning_rate": 9.89654406800655e-06, |
|
"loss": 1.4831, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.08225538971807629, |
|
"grad_norm": 7.78125, |
|
"learning_rate": 9.893660635895542e-06, |
|
"loss": 1.4014, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.08313985627418463, |
|
"grad_norm": 6.6875, |
|
"learning_rate": 9.890738003669029e-06, |
|
"loss": 1.5062, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.08402432283029299, |
|
"grad_norm": 5.5, |
|
"learning_rate": 9.887776194738433e-06, |
|
"loss": 1.6041, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.08490878938640133, |
|
"grad_norm": 5.9375, |
|
"learning_rate": 9.884775232828985e-06, |
|
"loss": 1.3051, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.08579325594250968, |
|
"grad_norm": 5.3125, |
|
"learning_rate": 9.881735141979554e-06, |
|
"loss": 1.5133, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.08667772249861802, |
|
"grad_norm": 4.875, |
|
"learning_rate": 9.878655946542443e-06, |
|
"loss": 1.6156, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.08756218905472637, |
|
"grad_norm": 4.90625, |
|
"learning_rate": 9.8755376711832e-06, |
|
"loss": 1.4838, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.08844665561083472, |
|
"grad_norm": 5.21875, |
|
"learning_rate": 9.872380340880416e-06, |
|
"loss": 1.4328, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.08933112216694307, |
|
"grad_norm": 4.9375, |
|
"learning_rate": 9.869183980925531e-06, |
|
"loss": 1.4843, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.09021558872305141, |
|
"grad_norm": 4.8125, |
|
"learning_rate": 9.865948616922623e-06, |
|
"loss": 1.263, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.09110005527915975, |
|
"grad_norm": 4.34375, |
|
"learning_rate": 9.862674274788212e-06, |
|
"loss": 1.3736, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.0919845218352681, |
|
"grad_norm": 3.984375, |
|
"learning_rate": 9.85936098075104e-06, |
|
"loss": 1.3568, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.09286898839137644, |
|
"grad_norm": 3.6875, |
|
"learning_rate": 9.856008761351882e-06, |
|
"loss": 1.3707, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.0937534549474848, |
|
"grad_norm": 3.28125, |
|
"learning_rate": 9.85261764344331e-06, |
|
"loss": 1.3825, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.09463792150359314, |
|
"grad_norm": 3.046875, |
|
"learning_rate": 9.849187654189486e-06, |
|
"loss": 1.4497, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.0955223880597015, |
|
"grad_norm": 2.875, |
|
"learning_rate": 9.845718821065957e-06, |
|
"loss": 1.5509, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.09640685461580983, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 9.842211171859418e-06, |
|
"loss": 1.3991, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.09729132117191819, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 9.838664734667496e-06, |
|
"loss": 1.4616, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.09817578772802653, |
|
"grad_norm": 2.765625, |
|
"learning_rate": 9.835079537898523e-06, |
|
"loss": 1.3058, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.09906025428413488, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 9.831455610271315e-06, |
|
"loss": 1.3997, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.09994472084024322, |
|
"grad_norm": 2.25, |
|
"learning_rate": 9.827792980814934e-06, |
|
"loss": 1.4942, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.10082918739635158, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 9.82409167886846e-06, |
|
"loss": 1.4756, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.10171365395245992, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 9.820351734080754e-06, |
|
"loss": 1.4339, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.10259812050856827, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 9.81657317641022e-06, |
|
"loss": 1.4626, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.10348258706467661, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 9.81275603612457e-06, |
|
"loss": 1.3905, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.10436705362078497, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 9.808900343800568e-06, |
|
"loss": 1.3814, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.1052515201768933, |
|
"grad_norm": 2.640625, |
|
"learning_rate": 9.80500613032381e-06, |
|
"loss": 1.5248, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.10613598673300166, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 9.801073426888447e-06, |
|
"loss": 1.4267, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.10702045328911, |
|
"grad_norm": 3.328125, |
|
"learning_rate": 9.79710226499696e-06, |
|
"loss": 1.297, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.10790491984521836, |
|
"grad_norm": 2.71875, |
|
"learning_rate": 9.79309267645989e-06, |
|
"loss": 1.4822, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.1087893864013267, |
|
"grad_norm": 3.375, |
|
"learning_rate": 9.789044693395593e-06, |
|
"loss": 1.5098, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.10967385295743505, |
|
"grad_norm": 2.375, |
|
"learning_rate": 9.784958348229978e-06, |
|
"loss": 1.5161, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.11055831951354339, |
|
"grad_norm": 2.625, |
|
"learning_rate": 9.780833673696255e-06, |
|
"loss": 1.636, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.11144278606965174, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 9.776670702834662e-06, |
|
"loss": 1.4871, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.11232725262576009, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 9.772469468992206e-06, |
|
"loss": 1.5421, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.11321171918186844, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 9.768230005822394e-06, |
|
"loss": 1.4452, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.11409618573797678, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 9.76395234728497e-06, |
|
"loss": 1.3874, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.11498065229408513, |
|
"grad_norm": 3.078125, |
|
"learning_rate": 9.759636527645633e-06, |
|
"loss": 1.4366, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.11586511885019347, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 9.755282581475769e-06, |
|
"loss": 1.4365, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.11674958540630183, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 9.750890543652169e-06, |
|
"loss": 1.4513, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.11763405196241017, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 9.74646044935676e-06, |
|
"loss": 1.4241, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.11851851851851852, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 9.741992334076309e-06, |
|
"loss": 1.5274, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.11940298507462686, |
|
"grad_norm": 2.921875, |
|
"learning_rate": 9.737486233602149e-06, |
|
"loss": 1.3942, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.12028745163073522, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 9.73294218402989e-06, |
|
"loss": 1.4371, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.12117191818684356, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 9.728360221759125e-06, |
|
"loss": 1.4276, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.12205638474295191, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 9.723740383493146e-06, |
|
"loss": 1.4581, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.12294085129906025, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 9.719082706238645e-06, |
|
"loss": 1.4475, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.1238253178551686, |
|
"grad_norm": 2.25, |
|
"learning_rate": 9.714387227305422e-06, |
|
"loss": 1.4863, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.1238253178551686, |
|
"eval_loss": 1.3686763048171997, |
|
"eval_runtime": 232.4295, |
|
"eval_samples_per_second": 69.182, |
|
"eval_steps_per_second": 8.648, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.12470978441127695, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 9.709653984306077e-06, |
|
"loss": 1.3077, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.1255942509673853, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 9.70488301515572e-06, |
|
"loss": 1.2957, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.12647871752349366, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 9.700074358071658e-06, |
|
"loss": 1.2871, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.12736318407960198, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 9.695228051573098e-06, |
|
"loss": 1.2346, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.12824765063571034, |
|
"grad_norm": 2.8125, |
|
"learning_rate": 9.69034413448083e-06, |
|
"loss": 1.5424, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.1291321171918187, |
|
"grad_norm": 3.53125, |
|
"learning_rate": 9.68542264591692e-06, |
|
"loss": 1.4396, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.13001658374792704, |
|
"grad_norm": 2.8125, |
|
"learning_rate": 9.680463625304393e-06, |
|
"loss": 1.3637, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.13090105030403537, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 9.675467112366925e-06, |
|
"loss": 1.5764, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.13178551686014373, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 9.670433147128522e-06, |
|
"loss": 1.3826, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.13266998341625208, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 9.665361769913187e-06, |
|
"loss": 1.4979, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.13355444997236043, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 9.660253021344618e-06, |
|
"loss": 1.3917, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.13443891652846876, |
|
"grad_norm": 2.625, |
|
"learning_rate": 9.65510694234587e-06, |
|
"loss": 1.5089, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.13532338308457711, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 9.649923574139023e-06, |
|
"loss": 1.3935, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.13620784964068547, |
|
"grad_norm": 2.78125, |
|
"learning_rate": 9.644702958244865e-06, |
|
"loss": 1.3694, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.1370923161967938, |
|
"grad_norm": 2.5, |
|
"learning_rate": 9.639445136482549e-06, |
|
"loss": 1.3446, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.13797678275290215, |
|
"grad_norm": 3.171875, |
|
"learning_rate": 9.634150150969259e-06, |
|
"loss": 1.4295, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.1388612493090105, |
|
"grad_norm": 2.25, |
|
"learning_rate": 9.628818044119884e-06, |
|
"loss": 1.3697, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.13974571586511886, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 9.623448858646658e-06, |
|
"loss": 1.392, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.14063018242122718, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 9.618042637558838e-06, |
|
"loss": 1.4678, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.14151464897733554, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 9.612599424162344e-06, |
|
"loss": 1.3916, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1423991155334439, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 9.607119262059426e-06, |
|
"loss": 1.4652, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.14328358208955225, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 9.601602195148299e-06, |
|
"loss": 1.4551, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.14416804864566057, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 9.596048267622806e-06, |
|
"loss": 1.4511, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.14505251520176893, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 9.590457523972055e-06, |
|
"loss": 1.4071, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.14593698175787728, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 9.584830008980068e-06, |
|
"loss": 1.4284, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.14682144831398564, |
|
"grad_norm": 2.375, |
|
"learning_rate": 9.579165767725416e-06, |
|
"loss": 1.4594, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.14770591487009396, |
|
"grad_norm": 2.375, |
|
"learning_rate": 9.573464845580864e-06, |
|
"loss": 1.4633, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.14859038142620232, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 9.567727288213005e-06, |
|
"loss": 1.56, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.14947484798231067, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 9.561953141581895e-06, |
|
"loss": 1.3288, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.15035931453841903, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 9.55614245194068e-06, |
|
"loss": 1.3863, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.15124378109452735, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 9.550295265835235e-06, |
|
"loss": 1.3928, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.1521282476506357, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 9.544411630103782e-06, |
|
"loss": 1.4527, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.15301271420674406, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 9.538491591876522e-06, |
|
"loss": 1.3693, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.15389718076285241, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 9.53253519857525e-06, |
|
"loss": 1.2812, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.15478164731896074, |
|
"grad_norm": 2.125, |
|
"learning_rate": 9.526542497912984e-06, |
|
"loss": 1.4142, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.1556661138750691, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 9.520513537893574e-06, |
|
"loss": 1.4069, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.15655058043117745, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 9.514448366811322e-06, |
|
"loss": 1.3569, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.1574350469872858, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 9.508347033250597e-06, |
|
"loss": 1.4351, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.15831951354339413, |
|
"grad_norm": 2.375, |
|
"learning_rate": 9.502209586085444e-06, |
|
"loss": 1.4548, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.15920398009950248, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 9.496036074479184e-06, |
|
"loss": 1.5012, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.16008844665561084, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 9.48982654788404e-06, |
|
"loss": 1.4076, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.1609729132117192, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 9.48358105604072e-06, |
|
"loss": 1.3964, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.16185737976782752, |
|
"grad_norm": 2.5, |
|
"learning_rate": 9.47729964897803e-06, |
|
"loss": 1.4238, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.16274184632393587, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 9.470982377012475e-06, |
|
"loss": 1.3678, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.16362631288004423, |
|
"grad_norm": 2.90625, |
|
"learning_rate": 9.464629290747844e-06, |
|
"loss": 1.4618, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.16451077943615258, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 9.458240441074818e-06, |
|
"loss": 1.3658, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.1653952459922609, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 9.451815879170556e-06, |
|
"loss": 1.4175, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.16627971254836926, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 9.445355656498284e-06, |
|
"loss": 1.4043, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.16716417910447762, |
|
"grad_norm": 2.96875, |
|
"learning_rate": 9.438859824806889e-06, |
|
"loss": 1.3923, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.16804864566058597, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 9.432328436130493e-06, |
|
"loss": 1.3122, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.1689331122166943, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 9.425761542788049e-06, |
|
"loss": 1.4381, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.16981757877280265, |
|
"grad_norm": 2.375, |
|
"learning_rate": 9.419159197382913e-06, |
|
"loss": 1.3126, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.170702045328911, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 9.412521452802425e-06, |
|
"loss": 1.4128, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.17158651188501936, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 9.40584836221749e-06, |
|
"loss": 1.2311, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.1724709784411277, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 9.399139979082148e-06, |
|
"loss": 1.402, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.17335544499723604, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 9.39239635713314e-06, |
|
"loss": 1.3014, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.1742399115533444, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 9.38561755038949e-06, |
|
"loss": 1.3299, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.17512437810945275, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 9.37880361315206e-06, |
|
"loss": 1.4074, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.17600884466556108, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 9.37195460000313e-06, |
|
"loss": 1.306, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.17689331122166943, |
|
"grad_norm": 2.859375, |
|
"learning_rate": 9.365070565805941e-06, |
|
"loss": 1.445, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.17777777777777778, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 9.358151565704275e-06, |
|
"loss": 1.3732, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.17866224433388614, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 9.351197655121995e-06, |
|
"loss": 1.3544, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.17954671088999447, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 9.34420888976262e-06, |
|
"loss": 1.367, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.18043117744610282, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 9.337185325608864e-06, |
|
"loss": 1.4574, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.18131564400221117, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 9.330127018922195e-06, |
|
"loss": 1.4689, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.1822001105583195, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 9.323034026242378e-06, |
|
"loss": 1.2427, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.18308457711442785, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 9.315906404387032e-06, |
|
"loss": 1.3404, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.1839690436705362, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 9.308744210451167e-06, |
|
"loss": 1.4636, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.18485351022664456, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 9.301547501806725e-06, |
|
"loss": 1.3681, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.1857379767827529, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 9.294316336102132e-06, |
|
"loss": 1.3969, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.1857379767827529, |
|
"eval_loss": 1.3509321212768555, |
|
"eval_runtime": 232.201, |
|
"eval_samples_per_second": 69.25, |
|
"eval_steps_per_second": 8.656, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.18662244333886124, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 9.287050771261818e-06, |
|
"loss": 1.3528, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.1875069098949696, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 9.279750865485772e-06, |
|
"loss": 1.3776, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.18839137645107795, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 9.272416677249067e-06, |
|
"loss": 1.3311, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.18927584300718628, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 9.265048265301385e-06, |
|
"loss": 1.4167, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.19016030956329463, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 9.257645688666557e-06, |
|
"loss": 1.2651, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.191044776119403, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 9.25020900664209e-06, |
|
"loss": 1.4641, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.19192924267551134, |
|
"grad_norm": 2.796875, |
|
"learning_rate": 9.242738278798682e-06, |
|
"loss": 1.3616, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.19281370923161967, |
|
"grad_norm": 2.25, |
|
"learning_rate": 9.235233564979756e-06, |
|
"loss": 1.4412, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.19369817578772802, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 9.227694925300972e-06, |
|
"loss": 1.372, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.19458264234383638, |
|
"grad_norm": 2.375, |
|
"learning_rate": 9.220122420149753e-06, |
|
"loss": 1.4577, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.19546710889994473, |
|
"grad_norm": 3.65625, |
|
"learning_rate": 9.212516110184794e-06, |
|
"loss": 1.2607, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.19635157545605306, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 9.204876056335584e-06, |
|
"loss": 1.394, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.1972360420121614, |
|
"grad_norm": 2.875, |
|
"learning_rate": 9.197202319801908e-06, |
|
"loss": 1.4744, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.19812050856826977, |
|
"grad_norm": 2.8125, |
|
"learning_rate": 9.18949496205337e-06, |
|
"loss": 1.3798, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.19900497512437812, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 9.181754044828882e-06, |
|
"loss": 1.5303, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.19988944168048645, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 9.17397963013619e-06, |
|
"loss": 1.5118, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.2007739082365948, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 9.166171780251365e-06, |
|
"loss": 1.4777, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.20165837479270315, |
|
"grad_norm": 2.875, |
|
"learning_rate": 9.158330557718304e-06, |
|
"loss": 1.3949, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.2025428413488115, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 9.150456025348236e-06, |
|
"loss": 1.6279, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.20342730790491984, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 9.142548246219212e-06, |
|
"loss": 1.3632, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.2043117744610282, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 9.134607283675601e-06, |
|
"loss": 1.3441, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.20519624101713654, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 9.12663320132759e-06, |
|
"loss": 1.402, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.2060807075732449, |
|
"grad_norm": 3.375, |
|
"learning_rate": 9.118626063050661e-06, |
|
"loss": 1.5275, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.20696517412935322, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 9.110585932985095e-06, |
|
"loss": 1.38, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.20784964068546158, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 9.102512875535439e-06, |
|
"loss": 1.3739, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.20873410724156993, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 9.09440695537001e-06, |
|
"loss": 1.495, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.2096185737976783, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 9.086268237420368e-06, |
|
"loss": 1.3662, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.2105030403537866, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 9.07809678688079e-06, |
|
"loss": 1.4109, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.21138750690989497, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 9.069892669207757e-06, |
|
"loss": 1.3512, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.21227197346600332, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 9.06165595011943e-06, |
|
"loss": 1.4568, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.21315644002211168, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 9.053386695595114e-06, |
|
"loss": 1.4943, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.21404090657822, |
|
"grad_norm": 2.671875, |
|
"learning_rate": 9.045084971874738e-06, |
|
"loss": 1.503, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.21492537313432836, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 9.036750845458323e-06, |
|
"loss": 1.4121, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.2158098396904367, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 9.028384383105444e-06, |
|
"loss": 1.3212, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.21669430624654507, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 9.019985651834703e-06, |
|
"loss": 1.3527, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.2175787728026534, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 9.01155471892319e-06, |
|
"loss": 1.246, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.21846323935876175, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 9.00309165190594e-06, |
|
"loss": 1.4277, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.2193477059148701, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 8.994596518575393e-06, |
|
"loss": 1.4682, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.22023217247097845, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 8.986069386980855e-06, |
|
"loss": 1.3714, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.22111663902708678, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 8.97751032542795e-06, |
|
"loss": 1.3973, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.22200110558319514, |
|
"grad_norm": 2.5, |
|
"learning_rate": 8.968919402478076e-06, |
|
"loss": 1.4555, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.2228855721393035, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 8.960296686947846e-06, |
|
"loss": 1.22, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.22377003869541182, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 8.95164224790855e-06, |
|
"loss": 1.3659, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.22465450525152017, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 8.942956154685596e-06, |
|
"loss": 1.3025, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.22553897180762852, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 8.93423847685795e-06, |
|
"loss": 1.3139, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.22642343836373688, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 8.925489284257587e-06, |
|
"loss": 1.4009, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.2273079049198452, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 8.916708646968924e-06, |
|
"loss": 1.4532, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.22819237147595356, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 8.907896635328262e-06, |
|
"loss": 1.387, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.2290768380320619, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 8.899053319923228e-06, |
|
"loss": 1.3644, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.22996130458817027, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 8.890178771592198e-06, |
|
"loss": 1.4128, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.2308457711442786, |
|
"grad_norm": 2.25, |
|
"learning_rate": 8.881273061423741e-06, |
|
"loss": 1.5197, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.23173023770038695, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 8.87233626075604e-06, |
|
"loss": 1.3812, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.2326147042564953, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 8.863368441176326e-06, |
|
"loss": 1.2539, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.23349917081260366, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 8.854369674520306e-06, |
|
"loss": 1.4247, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.23438363736871198, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 8.845340032871584e-06, |
|
"loss": 1.3402, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.23526810392482034, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 8.836279588561084e-06, |
|
"loss": 1.3674, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.2361525704809287, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 8.827188414166468e-06, |
|
"loss": 1.4622, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.23703703703703705, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 8.818066582511564e-06, |
|
"loss": 1.323, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.23792150359314537, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 8.808914166665773e-06, |
|
"loss": 1.4084, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.23880597014925373, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 8.799731239943488e-06, |
|
"loss": 1.3691, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.23969043670536208, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 8.790517875903506e-06, |
|
"loss": 1.2452, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.24057490326147044, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 8.781274148348438e-06, |
|
"loss": 1.3614, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.24145936981757876, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 8.772000131324117e-06, |
|
"loss": 1.4875, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.24234383637368712, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 8.762695899119012e-06, |
|
"loss": 1.2157, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.24322830292979547, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 8.753361526263622e-06, |
|
"loss": 1.6414, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.24411276948590382, |
|
"grad_norm": 2.375, |
|
"learning_rate": 8.743997087529886e-06, |
|
"loss": 1.2842, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.24499723604201215, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 8.734602657930582e-06, |
|
"loss": 1.4622, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.2458817025981205, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 8.725178312718727e-06, |
|
"loss": 1.4923, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.24676616915422886, |
|
"grad_norm": 2.25, |
|
"learning_rate": 8.715724127386971e-06, |
|
"loss": 1.4421, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.2476506357103372, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 8.706240177667003e-06, |
|
"loss": 1.3777, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.2476506357103372, |
|
"eval_loss": 1.340869665145874, |
|
"eval_runtime": 232.0202, |
|
"eval_samples_per_second": 69.304, |
|
"eval_steps_per_second": 8.663, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.24853510226644554, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 8.696726539528924e-06, |
|
"loss": 1.2984, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.2494195688225539, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 8.687183289180659e-06, |
|
"loss": 1.4034, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.25030403537866225, |
|
"grad_norm": 2.125, |
|
"learning_rate": 8.67761050306734e-06, |
|
"loss": 1.4313, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.2511885019347706, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 8.668008257870684e-06, |
|
"loss": 1.3227, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.25207296849087896, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 8.658376630508391e-06, |
|
"loss": 1.3931, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.2529574350469873, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 8.648715698133529e-06, |
|
"loss": 1.4474, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.2538419016030956, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 8.639025538133899e-06, |
|
"loss": 1.204, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.25472636815920396, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 8.629306228131435e-06, |
|
"loss": 1.1931, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.2556108347153123, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 8.619557845981571e-06, |
|
"loss": 1.4676, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.2564953012714207, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 8.609780469772623e-06, |
|
"loss": 1.3624, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.257379767827529, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 8.599974177825155e-06, |
|
"loss": 1.3875, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.2582642343836374, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 8.590139048691361e-06, |
|
"loss": 1.2976, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.25914870093974574, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 8.580275161154432e-06, |
|
"loss": 1.4271, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.2600331674958541, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 8.570382594227923e-06, |
|
"loss": 1.377, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.2609176340519624, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 8.56046142715513e-06, |
|
"loss": 1.3283, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.26180210060807074, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 8.550511739408428e-06, |
|
"loss": 1.3989, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.2626865671641791, |
|
"grad_norm": 2.125, |
|
"learning_rate": 8.540533610688679e-06, |
|
"loss": 1.3885, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.26357103372028745, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 8.53052712092455e-06, |
|
"loss": 1.4217, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.2644555002763958, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 8.520492350271895e-06, |
|
"loss": 1.2601, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.26533996683250416, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 8.510429379113114e-06, |
|
"loss": 1.4437, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.2662244333886125, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 8.500338288056496e-06, |
|
"loss": 1.3637, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.26710889994472087, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 8.490219157935589e-06, |
|
"loss": 1.3865, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.26799336650082917, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 8.480072069808541e-06, |
|
"loss": 1.4416, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.2688778330569375, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 8.469897104957454e-06, |
|
"loss": 1.3866, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.2697622996130459, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 8.459694344887732e-06, |
|
"loss": 1.4329, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.27064676616915423, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 8.449463871327434e-06, |
|
"loss": 1.4178, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.2715312327252626, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 8.439205766226613e-06, |
|
"loss": 1.3813, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.27241569928137094, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 8.428920111756658e-06, |
|
"loss": 1.5067, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.2733001658374793, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 8.418606990309645e-06, |
|
"loss": 1.4275, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.2741846323935876, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 8.408266484497664e-06, |
|
"loss": 1.3738, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.27506909894969594, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 8.397898677152173e-06, |
|
"loss": 1.2003, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.2759535655058043, |
|
"grad_norm": 2.25, |
|
"learning_rate": 8.387503651323317e-06, |
|
"loss": 1.3476, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.27683803206191265, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 8.377081490279278e-06, |
|
"loss": 1.4104, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.277722498618021, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 8.366632277505598e-06, |
|
"loss": 1.3786, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.27860696517412936, |
|
"grad_norm": 2.125, |
|
"learning_rate": 8.356156096704516e-06, |
|
"loss": 1.4073, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.2794914317302377, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 8.345653031794292e-06, |
|
"loss": 1.5986, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.28037589828634607, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 8.335123166908544e-06, |
|
"loss": 1.4255, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.28126036484245437, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 8.324566586395563e-06, |
|
"loss": 1.4524, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.2821448313985627, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 8.313983374817643e-06, |
|
"loss": 1.4044, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.2830292979546711, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 8.303373616950408e-06, |
|
"loss": 1.3609, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.28391376451077943, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 8.29273739778212e-06, |
|
"loss": 1.5053, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.2847982310668878, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 8.282074802513016e-06, |
|
"loss": 1.3513, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.28568269762299614, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 8.271385916554605e-06, |
|
"loss": 1.4534, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.2865671641791045, |
|
"grad_norm": 2.375, |
|
"learning_rate": 8.260670825529002e-06, |
|
"loss": 1.2827, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.28745163073521285, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 8.249929615268234e-06, |
|
"loss": 1.3483, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.28833609729132115, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 8.239162371813552e-06, |
|
"loss": 1.3943, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.2892205638474295, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 8.22836918141474e-06, |
|
"loss": 1.3578, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.29010503040353786, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 8.217550130529433e-06, |
|
"loss": 1.3517, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.2909894969596462, |
|
"grad_norm": 2.375, |
|
"learning_rate": 8.206705305822414e-06, |
|
"loss": 1.422, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.29187396351575456, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 8.195834794164925e-06, |
|
"loss": 1.3983, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.2927584300718629, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 8.184938682633973e-06, |
|
"loss": 1.3837, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.2936428966279713, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 8.17401705851163e-06, |
|
"loss": 1.4879, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.2945273631840796, |
|
"grad_norm": 2.65625, |
|
"learning_rate": 8.163070009284326e-06, |
|
"loss": 1.3282, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.2954118297401879, |
|
"grad_norm": 2.375, |
|
"learning_rate": 8.152097622642167e-06, |
|
"loss": 1.3357, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.2962962962962963, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 8.141099986478212e-06, |
|
"loss": 1.2913, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.29718076285240463, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 8.130077188887786e-06, |
|
"loss": 1.4665, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.298065229408513, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 8.119029318167763e-06, |
|
"loss": 1.2694, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.29894969596462134, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 8.107956462815862e-06, |
|
"loss": 1.3418, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.2998341625207297, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 8.096858711529938e-06, |
|
"loss": 1.4219, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.30071862907683805, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 8.085736153207277e-06, |
|
"loss": 1.4296, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.3016030956329464, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 8.074588876943872e-06, |
|
"loss": 1.3679, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.3024875621890547, |
|
"grad_norm": 2.71875, |
|
"learning_rate": 8.063416972033725e-06, |
|
"loss": 1.2633, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.30337202874516306, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 8.05222052796811e-06, |
|
"loss": 1.4455, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.3042564953012714, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 8.040999634434883e-06, |
|
"loss": 1.35, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.30514096185737977, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 8.029754381317741e-06, |
|
"loss": 1.3095, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.3060254284134881, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 8.018484858695514e-06, |
|
"loss": 1.4285, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.3069098949695965, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 8.00719115684144e-06, |
|
"loss": 1.3694, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.30779436152570483, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 7.995873366222442e-06, |
|
"loss": 1.5045, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.3086788280818132, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 7.9845315774984e-06, |
|
"loss": 1.4376, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.3095632946379215, |
|
"grad_norm": 2.125, |
|
"learning_rate": 7.973165881521435e-06, |
|
"loss": 1.3382, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.3095632946379215, |
|
"eval_loss": 1.3338196277618408, |
|
"eval_runtime": 231.9317, |
|
"eval_samples_per_second": 69.331, |
|
"eval_steps_per_second": 8.666, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.31044776119402984, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 7.961776369335164e-06, |
|
"loss": 1.4081, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.3113322277501382, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 7.950363132173992e-06, |
|
"loss": 1.4814, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.31221669430624654, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 7.938926261462366e-06, |
|
"loss": 1.4584, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.3131011608623549, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 7.927465848814045e-06, |
|
"loss": 1.5012, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.31398562741846325, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 7.915981986031367e-06, |
|
"loss": 1.4007, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.3148700939745716, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 7.90447476510452e-06, |
|
"loss": 1.2796, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.3157545605306799, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 7.892944278210796e-06, |
|
"loss": 1.2907, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.31663902708678826, |
|
"grad_norm": 2.640625, |
|
"learning_rate": 7.88139061771386e-06, |
|
"loss": 1.4433, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.3175234936428966, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 7.869813876162997e-06, |
|
"loss": 1.2836, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.31840796019900497, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 7.858214146292394e-06, |
|
"loss": 1.3365, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.3192924267551133, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 7.846591521020373e-06, |
|
"loss": 1.437, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.3201768933112217, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 7.834946093448658e-06, |
|
"loss": 1.2714, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.32106135986733003, |
|
"grad_norm": 2.125, |
|
"learning_rate": 7.823277956861635e-06, |
|
"loss": 1.4181, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.3219458264234384, |
|
"grad_norm": 2.25, |
|
"learning_rate": 7.811587204725593e-06, |
|
"loss": 1.4788, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.3228302929795467, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 7.799873930687979e-06, |
|
"loss": 1.3304, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.32371475953565504, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 7.78813822857665e-06, |
|
"loss": 1.3729, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.3245992260917634, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 7.776380192399122e-06, |
|
"loss": 1.3222, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.32548369264787175, |
|
"grad_norm": 2.125, |
|
"learning_rate": 7.764599916341817e-06, |
|
"loss": 1.3088, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.3263681592039801, |
|
"grad_norm": 2.5, |
|
"learning_rate": 7.752797494769307e-06, |
|
"loss": 1.3756, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.32725262576008846, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 7.74097302222355e-06, |
|
"loss": 1.3111, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.3281370923161968, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 7.729126593423151e-06, |
|
"loss": 1.386, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.32902155887230516, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 7.71725830326259e-06, |
|
"loss": 1.3747, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.32990602542841346, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 7.70536824681146e-06, |
|
"loss": 1.3323, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.3307904919845218, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 7.69345651931372e-06, |
|
"loss": 1.4747, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.33167495854063017, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 7.681523216186912e-06, |
|
"loss": 1.3596, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.3325594250967385, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 7.669568433021418e-06, |
|
"loss": 1.2835, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.3334438916528469, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 7.65759226557967e-06, |
|
"loss": 1.3523, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.33432835820895523, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 7.64559480979541e-06, |
|
"loss": 1.327, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.3352128247650636, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 7.633576161772898e-06, |
|
"loss": 1.2579, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.33609729132117194, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 7.621536417786159e-06, |
|
"loss": 1.362, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.33698175787728024, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 7.6094756742782e-06, |
|
"loss": 1.3091, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.3378662244333886, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 7.597394027860243e-06, |
|
"loss": 1.4317, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.33875069098949695, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 7.585291575310952e-06, |
|
"loss": 1.3915, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.3396351575456053, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 7.5731684135756566e-06, |
|
"loss": 1.3549, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.34051962410171366, |
|
"grad_norm": 2.25, |
|
"learning_rate": 7.5610246397655715e-06, |
|
"loss": 1.3647, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.341404090657822, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 7.548860351157028e-06, |
|
"loss": 1.3884, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.34228855721393037, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 7.536675645190682e-06, |
|
"loss": 1.3217, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.3431730237700387, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 7.524470619470745e-06, |
|
"loss": 1.2801, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.344057490326147, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 7.512245371764197e-06, |
|
"loss": 1.4198, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.3449419568822554, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 1.547, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.34582642343836373, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 7.487734602268326e-06, |
|
"loss": 1.3439, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.3467108899944721, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 7.475449276819753e-06, |
|
"loss": 1.4559, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.34759535655058044, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 7.463144122064494e-06, |
|
"loss": 1.3188, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.3484798231066888, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 7.4508192365716005e-06, |
|
"loss": 1.3856, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.34936428966279715, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 7.438474719068174e-06, |
|
"loss": 1.3753, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.3502487562189055, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 7.426110668438578e-06, |
|
"loss": 1.3117, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.3511332227750138, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 7.413727183723642e-06, |
|
"loss": 1.3912, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.35201768933112215, |
|
"grad_norm": 2.125, |
|
"learning_rate": 7.401324364119872e-06, |
|
"loss": 1.3095, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.3529021558872305, |
|
"grad_norm": 2.125, |
|
"learning_rate": 7.388902308978652e-06, |
|
"loss": 1.3545, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.35378662244333886, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 7.37646111780545e-06, |
|
"loss": 1.4058, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.3546710889994472, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 7.364000890259024e-06, |
|
"loss": 1.3587, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.35555555555555557, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 7.351521726150619e-06, |
|
"loss": 1.348, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.3564400221116639, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 7.339023725443166e-06, |
|
"loss": 1.322, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.3573244886677723, |
|
"grad_norm": 2.25, |
|
"learning_rate": 7.326506988250488e-06, |
|
"loss": 1.3455, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.3582089552238806, |
|
"grad_norm": 2.25, |
|
"learning_rate": 7.313971614836496e-06, |
|
"loss": 1.3546, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.35909342177998893, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 7.3014177056143795e-06, |
|
"loss": 1.4034, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.3599778883360973, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 7.288845361145812e-06, |
|
"loss": 1.2737, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.36086235489220564, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 7.276254682140137e-06, |
|
"loss": 1.3589, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.361746821448314, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 7.2636457694535655e-06, |
|
"loss": 1.4803, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.36263128800442235, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 7.251018724088367e-06, |
|
"loss": 1.3554, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.3635157545605307, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 7.238373647192063e-06, |
|
"loss": 1.338, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.364400221116639, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 7.225710640056614e-06, |
|
"loss": 1.3909, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.36528468767274735, |
|
"grad_norm": 2.25, |
|
"learning_rate": 7.213029804117604e-06, |
|
"loss": 1.33, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.3661691542288557, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 7.200331240953439e-06, |
|
"loss": 1.4574, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.36705362078496406, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 7.187615052284522e-06, |
|
"loss": 1.4585, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.3679380873410724, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 7.174881339972448e-06, |
|
"loss": 1.4293, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.36882255389718077, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 7.16213020601918e-06, |
|
"loss": 1.3263, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.3697070204532891, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 7.149361752566233e-06, |
|
"loss": 1.3842, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.3705914870093975, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 7.136576081893863e-06, |
|
"loss": 1.3817, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.3714759535655058, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 7.12377329642024e-06, |
|
"loss": 1.4406, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.3714759535655058, |
|
"eval_loss": 1.3291162252426147, |
|
"eval_runtime": 231.9227, |
|
"eval_samples_per_second": 69.333, |
|
"eval_steps_per_second": 8.667, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.37236042012161413, |
|
"grad_norm": 2.125, |
|
"learning_rate": 7.110953498700629e-06, |
|
"loss": 1.4744, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.3732448866777225, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 7.09811679142657e-06, |
|
"loss": 1.39, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.37412935323383084, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 7.085263277425058e-06, |
|
"loss": 1.5196, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.3750138197899392, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 7.072393059657716e-06, |
|
"loss": 1.4646, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.37589828634604755, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 7.059506241219964e-06, |
|
"loss": 1.3383, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.3767827529021559, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 7.046602925340209e-06, |
|
"loss": 1.4669, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.37766721945826426, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 7.033683215379002e-06, |
|
"loss": 1.3031, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.37855168601437256, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 7.020747214828221e-06, |
|
"loss": 1.4132, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.3794361525704809, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 7.007795027310236e-06, |
|
"loss": 1.3288, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.38032061912658927, |
|
"grad_norm": 2.125, |
|
"learning_rate": 6.994826756577082e-06, |
|
"loss": 1.3066, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.3812050856826976, |
|
"grad_norm": 3.234375, |
|
"learning_rate": 6.981842506509626e-06, |
|
"loss": 1.4464, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.382089552238806, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 6.968842381116736e-06, |
|
"loss": 1.3772, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.38297401879491433, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 6.955826484534447e-06, |
|
"loss": 1.3471, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.3838584853510227, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 6.942794921025127e-06, |
|
"loss": 1.3815, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.38474295190713104, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 6.9297477949766445e-06, |
|
"loss": 1.3692, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.38562741846323934, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 6.916685210901527e-06, |
|
"loss": 1.4499, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.3865118850193477, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 6.903607273436128e-06, |
|
"loss": 1.4358, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.38739635157545604, |
|
"grad_norm": 2.125, |
|
"learning_rate": 6.890514087339789e-06, |
|
"loss": 1.4993, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.3882808181315644, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 6.8774057574939956e-06, |
|
"loss": 1.3827, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.38916528468767275, |
|
"grad_norm": 2.125, |
|
"learning_rate": 6.864282388901544e-06, |
|
"loss": 1.3918, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.3900497512437811, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 6.851144086685695e-06, |
|
"loss": 1.3348, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.39093421779988946, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 6.837990956089334e-06, |
|
"loss": 1.4058, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.3918186843559978, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 6.824823102474127e-06, |
|
"loss": 1.2347, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.3927031509121061, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 6.811640631319677e-06, |
|
"loss": 1.3793, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.39358761746821447, |
|
"grad_norm": 2.375, |
|
"learning_rate": 6.79844364822268e-06, |
|
"loss": 1.406, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.3944720840243228, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 6.785232258896078e-06, |
|
"loss": 1.3245, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.3953565505804312, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 6.77200656916821e-06, |
|
"loss": 1.4432, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.39624101713653953, |
|
"grad_norm": 2.125, |
|
"learning_rate": 6.758766684981972e-06, |
|
"loss": 1.2411, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.3971254836926479, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 6.745512712393958e-06, |
|
"loss": 1.3898, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.39800995024875624, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 6.732244757573619e-06, |
|
"loss": 1.3282, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.3988944168048646, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 6.718962926802407e-06, |
|
"loss": 1.4219, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.3997788833609729, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 6.705667326472926e-06, |
|
"loss": 1.4502, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.40066334991708125, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 6.69235806308808e-06, |
|
"loss": 1.4133, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.4015478164731896, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 6.67903524326022e-06, |
|
"loss": 1.359, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.40243228302929795, |
|
"grad_norm": 2.125, |
|
"learning_rate": 6.665698973710289e-06, |
|
"loss": 1.4398, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.4033167495854063, |
|
"grad_norm": 2.046875, |
|
"learning_rate": 6.652349361266967e-06, |
|
"loss": 1.4904, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.40420121614151466, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 6.638986512865814e-06, |
|
"loss": 1.4179, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.405085682697623, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 6.625610535548418e-06, |
|
"loss": 1.346, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.4059701492537313, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 6.612221536461534e-06, |
|
"loss": 1.2897, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.40685461580983967, |
|
"grad_norm": 2.25, |
|
"learning_rate": 6.598819622856227e-06, |
|
"loss": 1.3319, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.407739082365948, |
|
"grad_norm": 2.125, |
|
"learning_rate": 6.585404902087011e-06, |
|
"loss": 1.3922, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.4086235489220564, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 6.571977481610989e-06, |
|
"loss": 1.3128, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.40950801547816473, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 6.558537468986998e-06, |
|
"loss": 1.3726, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.4103924820342731, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 6.545084971874738e-06, |
|
"loss": 1.3356, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.41127694859038144, |
|
"grad_norm": 2.25, |
|
"learning_rate": 6.531620098033919e-06, |
|
"loss": 1.413, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.4121614151464898, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 6.518142955323389e-06, |
|
"loss": 1.4754, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.4130458817025981, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 6.504653651700278e-06, |
|
"loss": 1.3714, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.41393034825870645, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 6.4911522952191275e-06, |
|
"loss": 1.513, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.4148148148148148, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 6.477638994031028e-06, |
|
"loss": 1.3935, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.41569928137092316, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 6.464113856382752e-06, |
|
"loss": 1.3362, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.4165837479270315, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 6.450576990615888e-06, |
|
"loss": 1.4558, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.41746821448313987, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 6.437028505165965e-06, |
|
"loss": 1.3723, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.4183526810392482, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 6.423468508561599e-06, |
|
"loss": 1.3712, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.4192371475953566, |
|
"grad_norm": 2.015625, |
|
"learning_rate": 6.409897109423607e-06, |
|
"loss": 1.3105, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.4201216141514649, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 6.396314416464151e-06, |
|
"loss": 1.5579, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.4210060807075732, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 6.382720538485856e-06, |
|
"loss": 1.3702, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.4218905472636816, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 6.3691155843809475e-06, |
|
"loss": 1.467, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.42277501381978994, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 6.355499663130369e-06, |
|
"loss": 1.243, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.4236594803758983, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 6.341872883802923e-06, |
|
"loss": 1.4165, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.42454394693200664, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 6.328235355554382e-06, |
|
"loss": 1.3261, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.425428413488115, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 6.314587187626628e-06, |
|
"loss": 1.4347, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.42631288004422335, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 6.3009284893467655e-06, |
|
"loss": 1.4363, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.42719734660033165, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 6.287259370126255e-06, |
|
"loss": 1.3918, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.42808181315644, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 6.273579939460032e-06, |
|
"loss": 1.2938, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.42896627971254836, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 6.259890306925627e-06, |
|
"loss": 1.3493, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.4298507462686567, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 6.246190582182296e-06, |
|
"loss": 1.3912, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.43073521282476507, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 6.232480874970137e-06, |
|
"loss": 1.3116, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.4316196793808734, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 6.218761295109209e-06, |
|
"loss": 1.3492, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.4325041459369818, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 6.205031952498656e-06, |
|
"loss": 1.2913, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.43338861249309013, |
|
"grad_norm": 2.125, |
|
"learning_rate": 6.191292957115825e-06, |
|
"loss": 1.4216, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.43338861249309013, |
|
"eval_loss": 1.3258094787597656, |
|
"eval_runtime": 232.1391, |
|
"eval_samples_per_second": 69.269, |
|
"eval_steps_per_second": 8.659, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.43427307904919843, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 6.177544419015388e-06, |
|
"loss": 1.2843, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.4351575456053068, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 6.163786448328455e-06, |
|
"loss": 1.2391, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.43604201216141514, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 6.150019155261694e-06, |
|
"loss": 1.4237, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.4369264787175235, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 6.136242650096451e-06, |
|
"loss": 1.4529, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.43781094527363185, |
|
"grad_norm": 2.375, |
|
"learning_rate": 6.122457043187863e-06, |
|
"loss": 1.3786, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.4386954118297402, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 6.108662444963979e-06, |
|
"loss": 1.2776, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.43957987838584855, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 6.094858965924866e-06, |
|
"loss": 1.3655, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.4404643449419569, |
|
"grad_norm": 2.125, |
|
"learning_rate": 6.081046716641735e-06, |
|
"loss": 1.398, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.4413488114980652, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 6.0672258077560475e-06, |
|
"loss": 1.3377, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.44223327805417356, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 6.053396349978632e-06, |
|
"loss": 1.4135, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.4431177446102819, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 6.039558454088796e-06, |
|
"loss": 1.3852, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.44400221116639027, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 6.025712230933443e-06, |
|
"loss": 1.3643, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.4448866777224986, |
|
"grad_norm": 2.375, |
|
"learning_rate": 6.0118577914261786e-06, |
|
"loss": 1.1846, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.445771144278607, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 5.997995246546424e-06, |
|
"loss": 1.1764, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.44665561083471533, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 5.984124707338528e-06, |
|
"loss": 1.3886, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.44754007739082363, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 5.970246284910877e-06, |
|
"loss": 1.4707, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.448424543946932, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 5.9563600904350074e-06, |
|
"loss": 1.2391, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.44930901050304034, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 5.942466235144708e-06, |
|
"loss": 1.4436, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.4501934770591487, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 5.9285648303351404e-06, |
|
"loss": 1.2727, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.45107794361525705, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 5.914655987361934e-06, |
|
"loss": 1.377, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.4519624101713654, |
|
"grad_norm": 2.125, |
|
"learning_rate": 5.900739817640304e-06, |
|
"loss": 1.39, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.45284687672747376, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 5.886816432644155e-06, |
|
"loss": 1.4615, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.4537313432835821, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 5.872885943905191e-06, |
|
"loss": 1.4085, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.4546158098396904, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 5.858948463012015e-06, |
|
"loss": 1.3797, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.45550027639579876, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 5.8450041016092465e-06, |
|
"loss": 1.315, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.4563847429519071, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 5.831052971396613e-06, |
|
"loss": 1.423, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.4572692095080155, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 5.817095184128067e-06, |
|
"loss": 1.4338, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.4581536760641238, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 5.803130851610887e-06, |
|
"loss": 1.397, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.4590381426202322, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 5.78916008570478e-06, |
|
"loss": 1.4068, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.45992260917634054, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 5.77518299832099e-06, |
|
"loss": 1.363, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.4608070757324489, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 5.761199701421392e-06, |
|
"loss": 1.4428, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.4616915422885572, |
|
"grad_norm": 2.25, |
|
"learning_rate": 5.747210307017609e-06, |
|
"loss": 1.3775, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.46257600884466554, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 5.733214927170101e-06, |
|
"loss": 1.3901, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.4634604754007739, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 5.719213673987277e-06, |
|
"loss": 1.3603, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.46434494195688225, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 5.705206659624597e-06, |
|
"loss": 1.477, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.4652294085129906, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 5.6911939962836625e-06, |
|
"loss": 1.3506, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.46611387506909896, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 5.6771757962113325e-06, |
|
"loss": 1.157, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.4669983416252073, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 5.663152171698813e-06, |
|
"loss": 1.4238, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.46788280818131567, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 5.649123235080762e-06, |
|
"loss": 1.2986, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.46876727473742397, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 5.635089098734394e-06, |
|
"loss": 1.4711, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.4696517412935323, |
|
"grad_norm": 2.125, |
|
"learning_rate": 5.621049875078571e-06, |
|
"loss": 1.3763, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.4705362078496407, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 5.6070056765729065e-06, |
|
"loss": 1.1883, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.47142067440574903, |
|
"grad_norm": 2.375, |
|
"learning_rate": 5.592956615716867e-06, |
|
"loss": 1.4552, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.4723051409618574, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 5.578902805048866e-06, |
|
"loss": 1.2559, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.47318960751796574, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 5.564844357145365e-06, |
|
"loss": 1.3076, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.4740740740740741, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 5.550781384619974e-06, |
|
"loss": 1.3541, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.47495854063018245, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 5.536714000122543e-06, |
|
"loss": 1.2893, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.47584300718629075, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 5.522642316338268e-06, |
|
"loss": 1.4, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.4767274737423991, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 5.50856644598678e-06, |
|
"loss": 1.3689, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.47761194029850745, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 5.49448650182125e-06, |
|
"loss": 1.3987, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.4784964068546158, |
|
"grad_norm": 2.25, |
|
"learning_rate": 5.480402596627477e-06, |
|
"loss": 1.3482, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.47938087341072416, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 5.466314843222993e-06, |
|
"loss": 1.3633, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.4802653399668325, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 5.452223354456154e-06, |
|
"loss": 1.3443, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.48114980652294087, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 5.438128243205241e-06, |
|
"loss": 1.2519, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.4820342730790492, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 5.4240296223775465e-06, |
|
"loss": 1.2557, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.4829187396351575, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 5.409927604908481e-06, |
|
"loss": 1.4529, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.4838032061912659, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 5.395822303760662e-06, |
|
"loss": 1.3618, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.48468767274737423, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 5.381713831923008e-06, |
|
"loss": 1.4106, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.4855721393034826, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 5.367602302409841e-06, |
|
"loss": 1.2277, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.48645660585959094, |
|
"grad_norm": 2.125, |
|
"learning_rate": 5.353487828259973e-06, |
|
"loss": 1.3436, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.4873410724156993, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 5.339370522535805e-06, |
|
"loss": 1.4189, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.48822553897180765, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 5.325250498322417e-06, |
|
"loss": 1.5008, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.489110005527916, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 5.311127868726667e-06, |
|
"loss": 1.4573, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.4899944720840243, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 5.297002746876284e-06, |
|
"loss": 1.525, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.49087893864013266, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 5.282875245918963e-06, |
|
"loss": 1.308, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.491763405196241, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 5.26874547902145e-06, |
|
"loss": 1.3713, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.49264787175234936, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 5.2546135593686484e-06, |
|
"loss": 1.4282, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.4935323383084577, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 5.240479600162704e-06, |
|
"loss": 1.5133, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.4944168048645661, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 5.226343714622096e-06, |
|
"loss": 1.3118, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.4953012714206744, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 5.212206015980742e-06, |
|
"loss": 1.4079, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.4953012714206744, |
|
"eval_loss": 1.3238087892532349, |
|
"eval_runtime": 232.2455, |
|
"eval_samples_per_second": 69.237, |
|
"eval_steps_per_second": 8.655, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.4961857379767827, |
|
"grad_norm": 2.046875, |
|
"learning_rate": 5.198066617487078e-06, |
|
"loss": 1.4577, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.4970702045328911, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 5.1839256324031575e-06, |
|
"loss": 1.3982, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.49795467108899943, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 5.169783174003744e-06, |
|
"loss": 1.3064, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.4988391376451078, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 5.155639355575402e-06, |
|
"loss": 1.4527, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.49972360420121614, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 5.141494290415592e-06, |
|
"loss": 1.3521, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.5006080707573245, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 5.127348091831755e-06, |
|
"loss": 1.4069, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.5014925373134328, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 5.1132008731404225e-06, |
|
"loss": 1.3273, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.5023770038695412, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 5.099052747666289e-06, |
|
"loss": 1.3473, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.5032614704256495, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 5.084903828741312e-06, |
|
"loss": 1.4105, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.5041459369817579, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 5.070754229703811e-06, |
|
"loss": 1.3755, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.5050304035378662, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 5.056604063897549e-06, |
|
"loss": 1.4316, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.5059148700939746, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 5.042453444670829e-06, |
|
"loss": 1.3914, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.5067993366500829, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 5.028302485375588e-06, |
|
"loss": 1.4373, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.5076838032061912, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 5.0141512993664864e-06, |
|
"loss": 1.3995, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.5085682697622996, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 5e-06, |
|
"loss": 1.2879, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.5094527363184079, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 4.985848700633515e-06, |
|
"loss": 1.346, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.5103372028745163, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 4.971697514624414e-06, |
|
"loss": 1.3491, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.5112216694306246, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 4.957546555329174e-06, |
|
"loss": 1.3267, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.512106135986733, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 4.943395936102453e-06, |
|
"loss": 1.4237, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.5129906025428413, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 4.929245770296191e-06, |
|
"loss": 1.3134, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.5138750690989496, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 4.91509617125869e-06, |
|
"loss": 1.3902, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.514759535655058, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 4.900947252333712e-06, |
|
"loss": 1.3079, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.5156440022111664, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 4.8867991268595775e-06, |
|
"loss": 1.3071, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.5165284687672748, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 4.872651908168244e-06, |
|
"loss": 1.2942, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.5174129353233831, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 4.85850570958441e-06, |
|
"loss": 1.4196, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.5182974018794915, |
|
"grad_norm": 2.25, |
|
"learning_rate": 4.844360644424599e-06, |
|
"loss": 1.3455, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.5191818684355998, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.830216825996257e-06, |
|
"loss": 1.504, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.5200663349917082, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 4.816074367596843e-06, |
|
"loss": 1.3046, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.5209508015478165, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 4.801933382512923e-06, |
|
"loss": 1.3623, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.5218352681039248, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 4.78779398401926e-06, |
|
"loss": 1.3359, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.5227197346600332, |
|
"grad_norm": 2.125, |
|
"learning_rate": 4.7736562853779066e-06, |
|
"loss": 1.5075, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.5236042012161415, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 4.7595203998373e-06, |
|
"loss": 1.4021, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.5244886677722499, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 4.745386440631354e-06, |
|
"loss": 1.2704, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.5253731343283582, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 4.73125452097855e-06, |
|
"loss": 1.3103, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.5262576008844666, |
|
"grad_norm": 2.046875, |
|
"learning_rate": 4.717124754081038e-06, |
|
"loss": 1.3654, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.5271420674405749, |
|
"grad_norm": 2.125, |
|
"learning_rate": 4.702997253123716e-06, |
|
"loss": 1.322, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.5280265339966832, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 4.688872131273334e-06, |
|
"loss": 1.2978, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.5289110005527916, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 4.674749501677585e-06, |
|
"loss": 1.4011, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.5297954671088999, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 4.660629477464197e-06, |
|
"loss": 1.3308, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.5306799336650083, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 4.646512171740028e-06, |
|
"loss": 1.3854, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.5315644002211166, |
|
"grad_norm": 2.25, |
|
"learning_rate": 4.6323976975901595e-06, |
|
"loss": 1.2851, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.532448866777225, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 4.618286168076993e-06, |
|
"loss": 1.4358, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.5333333333333333, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 4.6041776962393405e-06, |
|
"loss": 1.34, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.5342177998894417, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 4.590072395091521e-06, |
|
"loss": 1.3445, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.53510226644555, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 4.575970377622456e-06, |
|
"loss": 1.3103, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.5359867330016583, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 4.56187175679476e-06, |
|
"loss": 1.2384, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.5368711995577667, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 4.547776645543846e-06, |
|
"loss": 1.3134, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.537755666113875, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 4.533685156777008e-06, |
|
"loss": 1.3305, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.5386401326699835, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 4.519597403372524e-06, |
|
"loss": 1.4062, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.5395245992260918, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 4.505513498178752e-06, |
|
"loss": 1.3568, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.5404090657822002, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 4.491433554013221e-06, |
|
"loss": 1.2757, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.5412935323383085, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 4.477357683661734e-06, |
|
"loss": 1.2589, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.5421779988944168, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 4.463285999877458e-06, |
|
"loss": 1.4028, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.5430624654505252, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 4.449218615380029e-06, |
|
"loss": 1.3952, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.5439469320066335, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 4.435155642854637e-06, |
|
"loss": 1.4054, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.5448313985627419, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 4.421097194951137e-06, |
|
"loss": 1.3026, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.5457158651188502, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 4.407043384283136e-06, |
|
"loss": 1.4435, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.5466003316749586, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 4.3929943234270935e-06, |
|
"loss": 1.3515, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.5474847982310669, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 4.37895012492143e-06, |
|
"loss": 1.3434, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.5483692647871752, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 4.364910901265607e-06, |
|
"loss": 1.479, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.5492537313432836, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 4.350876764919238e-06, |
|
"loss": 1.2995, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.5501381978993919, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 4.336847828301189e-06, |
|
"loss": 1.4547, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.5510226644555003, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 4.322824203788669e-06, |
|
"loss": 1.3666, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.5519071310116086, |
|
"grad_norm": 2.015625, |
|
"learning_rate": 4.308806003716338e-06, |
|
"loss": 1.3513, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.552791597567717, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 4.294793340375405e-06, |
|
"loss": 1.3136, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.5536760641238253, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 4.280786326012724e-06, |
|
"loss": 1.3245, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.5545605306799337, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 4.266785072829901e-06, |
|
"loss": 1.3171, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.555444997236042, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 4.252789692982394e-06, |
|
"loss": 1.2222, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.5563294637921503, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 4.23880029857861e-06, |
|
"loss": 1.547, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.5572139303482587, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 4.224817001679011e-06, |
|
"loss": 1.2893, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.5572139303482587, |
|
"eval_loss": 1.3222172260284424, |
|
"eval_runtime": 232.1074, |
|
"eval_samples_per_second": 69.278, |
|
"eval_steps_per_second": 8.66, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.558098396904367, |
|
"grad_norm": 2.046875, |
|
"learning_rate": 4.210839914295219e-06, |
|
"loss": 1.3732, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.5589828634604754, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 4.196869148389114e-06, |
|
"loss": 1.4756, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.5598673300165837, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 4.1829048158719344e-06, |
|
"loss": 1.3808, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.5607517965726921, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 4.168947028603389e-06, |
|
"loss": 1.2927, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.5616362631288004, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 4.154995898390756e-06, |
|
"loss": 1.3053, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.5625207296849087, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 4.141051536987986e-06, |
|
"loss": 1.4368, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.5634051962410171, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 4.127114056094811e-06, |
|
"loss": 1.3628, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.5642896627971254, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 4.113183567355846e-06, |
|
"loss": 1.2668, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.5651741293532339, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 4.099260182359698e-06, |
|
"loss": 1.3524, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.5660585959093422, |
|
"grad_norm": 2.046875, |
|
"learning_rate": 4.085344012638067e-06, |
|
"loss": 1.4395, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.5669430624654506, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 4.071435169664861e-06, |
|
"loss": 1.4211, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.5678275290215589, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 4.057533764855291e-06, |
|
"loss": 1.4624, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.5687119955776673, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 4.043639909564993e-06, |
|
"loss": 1.3607, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.5695964621337756, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 4.029753715089124e-06, |
|
"loss": 1.4639, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.5704809286898839, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 4.015875292661474e-06, |
|
"loss": 1.2738, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.5713653952459923, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 4.002004753453577e-06, |
|
"loss": 1.266, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.5722498618021006, |
|
"grad_norm": 2.046875, |
|
"learning_rate": 3.988142208573822e-06, |
|
"loss": 1.3372, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.573134328358209, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 3.9742877690665575e-06, |
|
"loss": 1.4597, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.5740187949143173, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 3.960441545911205e-06, |
|
"loss": 1.3481, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.5749032614704257, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 3.94660365002137e-06, |
|
"loss": 1.454, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.575787728026534, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 3.932774192243953e-06, |
|
"loss": 1.3835, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.5766721945826423, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 3.918953283358266e-06, |
|
"loss": 1.4907, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.5775566611387507, |
|
"grad_norm": 2.125, |
|
"learning_rate": 3.905141034075135e-06, |
|
"loss": 1.3345, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.578441127694859, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 3.891337555036022e-06, |
|
"loss": 1.5018, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.5793255942509674, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 3.877542956812137e-06, |
|
"loss": 1.3936, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.5802100608070757, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 3.863757349903551e-06, |
|
"loss": 1.3161, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.5810945273631841, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 3.849980844738308e-06, |
|
"loss": 1.4803, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.5819789939192924, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 3.836213551671547e-06, |
|
"loss": 1.3776, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.5828634604754007, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 3.822455580984613e-06, |
|
"loss": 1.3356, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.5837479270315091, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 3.808707042884176e-06, |
|
"loss": 1.4152, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.5846323935876174, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 3.794968047501345e-06, |
|
"loss": 1.549, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.5855168601437258, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 3.781238704890793e-06, |
|
"loss": 1.4049, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.5864013266998341, |
|
"grad_norm": 2.25, |
|
"learning_rate": 3.767519125029865e-06, |
|
"loss": 1.4221, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.5872857932559425, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 3.7538094178177054e-06, |
|
"loss": 1.2047, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.5881702598120508, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 3.7401096930743753e-06, |
|
"loss": 1.3547, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.5890547263681593, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 3.72642006053997e-06, |
|
"loss": 1.4558, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.5899391929242676, |
|
"grad_norm": 2.125, |
|
"learning_rate": 3.7127406298737458e-06, |
|
"loss": 1.4111, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.5908236594803759, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 3.6990715106532353e-06, |
|
"loss": 1.3221, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.5917081260364843, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 3.6854128123733736e-06, |
|
"loss": 1.2724, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.5925925925925926, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 3.6717646444456196e-06, |
|
"loss": 1.3462, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.593477059148701, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 3.658127116197079e-06, |
|
"loss": 1.3287, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.5943615257048093, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 3.6445003368696317e-06, |
|
"loss": 1.2705, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.5952459922609177, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 3.6308844156190546e-06, |
|
"loss": 1.4935, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.596130458817026, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 3.6172794615141448e-06, |
|
"loss": 1.4558, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.5970149253731343, |
|
"grad_norm": 2.25, |
|
"learning_rate": 3.60368558353585e-06, |
|
"loss": 1.4027, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.5978993919292427, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 3.590102890576394e-06, |
|
"loss": 1.3063, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.598783858485351, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 3.576531491438403e-06, |
|
"loss": 1.3965, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.5996683250414594, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 3.5629714948340354e-06, |
|
"loss": 1.2793, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.6005527915975677, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 3.5494230093841144e-06, |
|
"loss": 1.5579, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.6014372581536761, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 3.5358861436172487e-06, |
|
"loss": 1.2379, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.6023217247097844, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 3.5223610059689733e-06, |
|
"loss": 1.2644, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.6032061912658928, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 3.5088477047808738e-06, |
|
"loss": 1.3687, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.6040906578220011, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 3.495346348299724e-06, |
|
"loss": 1.3623, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.6049751243781094, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 3.481857044676613e-06, |
|
"loss": 1.4687, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.6058595909342178, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 3.4683799019660834e-06, |
|
"loss": 1.4703, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.6067440574903261, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 3.4549150281252635e-06, |
|
"loss": 1.3077, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.6076285240464345, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 3.4414625310130038e-06, |
|
"loss": 1.3626, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.6085129906025428, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 3.4280225183890124e-06, |
|
"loss": 1.4234, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.6093974571586512, |
|
"grad_norm": 2.125, |
|
"learning_rate": 3.4145950979129916e-06, |
|
"loss": 1.3617, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.6102819237147595, |
|
"grad_norm": 2.25, |
|
"learning_rate": 3.401180377143774e-06, |
|
"loss": 1.3452, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.6111663902708678, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 3.3877784635384668e-06, |
|
"loss": 1.3217, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.6120508568269762, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 3.374389464451583e-06, |
|
"loss": 1.4221, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.6129353233830845, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 3.361013487134188e-06, |
|
"loss": 1.3176, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.613819789939193, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 3.3476506387330355e-06, |
|
"loss": 1.2636, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.6147042564953012, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 3.3343010262897125e-06, |
|
"loss": 1.199, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.6155887230514097, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 3.320964756739781e-06, |
|
"loss": 1.3831, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.616473189607518, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 3.3076419369119216e-06, |
|
"loss": 1.3128, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.6173576561636264, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 3.2943326735270766e-06, |
|
"loss": 1.3597, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.6182421227197347, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 3.2810370731975956e-06, |
|
"loss": 1.4076, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.619126589275843, |
|
"grad_norm": 2.125, |
|
"learning_rate": 3.2677552424263836e-06, |
|
"loss": 1.376, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.619126589275843, |
|
"eval_loss": 1.3213250637054443, |
|
"eval_runtime": 232.1662, |
|
"eval_samples_per_second": 69.261, |
|
"eval_steps_per_second": 8.658, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.6200110558319514, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 3.254487287606044e-06, |
|
"loss": 1.3793, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.6208955223880597, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 3.2412333150180298e-06, |
|
"loss": 1.4251, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.6217799889441681, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 3.2279934308317905e-06, |
|
"loss": 1.4261, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.6226644555002764, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 3.2147677411039236e-06, |
|
"loss": 1.4114, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.6235489220563848, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 3.2015563517773214e-06, |
|
"loss": 1.2844, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.6244333886124931, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 3.1883593686803244e-06, |
|
"loss": 1.4281, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.6253178551686014, |
|
"grad_norm": 2.25, |
|
"learning_rate": 3.1751768975258745e-06, |
|
"loss": 1.3457, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.6262023217247098, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 3.162009043910668e-06, |
|
"loss": 1.3013, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.6270867882808181, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 3.1488559133143065e-06, |
|
"loss": 1.347, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.6279712548369265, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 3.1357176110984578e-06, |
|
"loss": 1.2946, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.6288557213930348, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 3.122594242506006e-06, |
|
"loss": 1.3807, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.6297401879491432, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 3.1094859126602132e-06, |
|
"loss": 1.3381, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.6306246545052515, |
|
"grad_norm": 2.25, |
|
"learning_rate": 3.096392726563874e-06, |
|
"loss": 1.3269, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.6315091210613598, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 3.0833147890984735e-06, |
|
"loss": 1.2699, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.6323935876174682, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 3.070252205023356e-06, |
|
"loss": 1.3409, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.6332780541735765, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 3.0572050789748732e-06, |
|
"loss": 1.3592, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.6341625207296849, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 3.044173515465554e-06, |
|
"loss": 1.4928, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.6350469872857932, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 3.031157618883265e-06, |
|
"loss": 1.3309, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.6359314538419016, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 3.0181574934903745e-06, |
|
"loss": 1.4954, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.6368159203980099, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 3.0051732434229185e-06, |
|
"loss": 1.2679, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.6377003869541183, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 2.992204972689765e-06, |
|
"loss": 1.3471, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.6385848535102266, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 2.9792527851717805e-06, |
|
"loss": 1.4871, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.639469320066335, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 2.966316784621e-06, |
|
"loss": 1.3921, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.6403537866224434, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 2.9533970746597935e-06, |
|
"loss": 1.4691, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.6412382531785517, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 2.9404937587800374e-06, |
|
"loss": 1.4068, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.6421227197346601, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 2.9276069403422846e-06, |
|
"loss": 1.437, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.6430071862907684, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 2.9147367225749425e-06, |
|
"loss": 1.4186, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.6438916528468768, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 2.90188320857343e-06, |
|
"loss": 1.3671, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.6447761194029851, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 2.889046501299373e-06, |
|
"loss": 1.2498, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.6456605859590934, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 2.8762267035797607e-06, |
|
"loss": 1.4189, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.6465450525152018, |
|
"grad_norm": 2.25, |
|
"learning_rate": 2.8634239181061384e-06, |
|
"loss": 1.3515, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.6474295190713101, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 2.850638247433768e-06, |
|
"loss": 1.4201, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.6483139856274185, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 2.837869793980823e-06, |
|
"loss": 1.345, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.6491984521835268, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 2.8251186600275533e-06, |
|
"loss": 1.2402, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.6500829187396352, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 2.8123849477154808e-06, |
|
"loss": 1.3014, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.6509673852957435, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 2.799668759046564e-06, |
|
"loss": 1.3631, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.6518518518518519, |
|
"grad_norm": 2.125, |
|
"learning_rate": 2.786970195882398e-06, |
|
"loss": 1.4583, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.6527363184079602, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 2.7742893599433875e-06, |
|
"loss": 1.4511, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.6536207849640685, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 2.761626352807938e-06, |
|
"loss": 1.4266, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.6545052515201769, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 2.748981275911633e-06, |
|
"loss": 1.3963, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.6553897180762852, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 2.736354230546437e-06, |
|
"loss": 1.5029, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.6562741846323936, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 2.7237453178598645e-06, |
|
"loss": 1.5213, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.6571586511885019, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 2.71115463885419e-06, |
|
"loss": 1.2602, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.6580431177446103, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 2.698582294385621e-06, |
|
"loss": 1.4041, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.6589275843007186, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 2.6860283851635067e-06, |
|
"loss": 1.2874, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.6598120508568269, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 2.6734930117495133e-06, |
|
"loss": 1.4202, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.6606965174129353, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 2.6609762745568358e-06, |
|
"loss": 1.3251, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.6615809839690436, |
|
"grad_norm": 2.015625, |
|
"learning_rate": 2.6484782738493843e-06, |
|
"loss": 1.4304, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.662465450525152, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 2.6359991097409767e-06, |
|
"loss": 1.3746, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.6633499170812603, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 2.6235388821945497e-06, |
|
"loss": 1.3906, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.6642343836373688, |
|
"grad_norm": 2.125, |
|
"learning_rate": 2.61109769102135e-06, |
|
"loss": 1.3656, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.665118850193477, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 2.598675635880129e-06, |
|
"loss": 1.3807, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.6660033167495855, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 2.5862728162763595e-06, |
|
"loss": 1.3882, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.6668877833056938, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 2.573889331561423e-06, |
|
"loss": 1.4212, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.6677722498618021, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 2.5615252809318287e-06, |
|
"loss": 1.3437, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.6686567164179105, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 2.5491807634284016e-06, |
|
"loss": 1.4545, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.6695411829740188, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 2.536855877935507e-06, |
|
"loss": 1.3538, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.6704256495301272, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 2.524550723180249e-06, |
|
"loss": 1.3707, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.6713101160862355, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 2.5122653977316758e-06, |
|
"loss": 1.4478, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.6721945826423439, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 2.5000000000000015e-06, |
|
"loss": 1.3143, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.6730790491984522, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 2.487754628235805e-06, |
|
"loss": 1.3425, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.6739635157545605, |
|
"grad_norm": 2.125, |
|
"learning_rate": 2.4755293805292572e-06, |
|
"loss": 1.4169, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.6748479823106689, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 2.4633243548093195e-06, |
|
"loss": 1.3797, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.6757324488667772, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 2.4511396488429727e-06, |
|
"loss": 1.4054, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.6766169154228856, |
|
"grad_norm": 2.03125, |
|
"learning_rate": 2.4389753602344298e-06, |
|
"loss": 1.3736, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.6775013819789939, |
|
"grad_norm": 2.03125, |
|
"learning_rate": 2.4268315864243447e-06, |
|
"loss": 1.3543, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.6783858485351023, |
|
"grad_norm": 2.125, |
|
"learning_rate": 2.4147084246890478e-06, |
|
"loss": 1.3911, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.6792703150912106, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 2.402605972139759e-06, |
|
"loss": 1.3499, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.6801547816473189, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 2.3905243257218007e-06, |
|
"loss": 1.4691, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.6810392482034273, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 2.3784635822138424e-06, |
|
"loss": 1.2953, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.6810392482034273, |
|
"eval_loss": 1.3208374977111816, |
|
"eval_runtime": 232.2504, |
|
"eval_samples_per_second": 69.236, |
|
"eval_steps_per_second": 8.654, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.6819237147595356, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 2.366423838227102e-06, |
|
"loss": 1.3865, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.682808181315644, |
|
"grad_norm": 2.125, |
|
"learning_rate": 2.354405190204592e-06, |
|
"loss": 1.2972, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.6836926478717523, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 2.342407734420331e-06, |
|
"loss": 1.2642, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.6845771144278607, |
|
"grad_norm": 2.125, |
|
"learning_rate": 2.3304315669785853e-06, |
|
"loss": 1.3523, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.685461580983969, |
|
"grad_norm": 2.046875, |
|
"learning_rate": 2.318476783813088e-06, |
|
"loss": 1.3066, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.6863460475400774, |
|
"grad_norm": 2.125, |
|
"learning_rate": 2.306543480686281e-06, |
|
"loss": 1.4334, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.6872305140961857, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 2.294631753188541e-06, |
|
"loss": 1.3997, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.688114980652294, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 2.2827416967374115e-06, |
|
"loss": 1.4262, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.6889994472084024, |
|
"grad_norm": 2.046875, |
|
"learning_rate": 2.270873406576849e-06, |
|
"loss": 1.4025, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.6898839137645107, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 2.2590269777764516e-06, |
|
"loss": 1.4295, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.6907683803206192, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 2.247202505230695e-06, |
|
"loss": 1.3638, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.6916528468767275, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 2.2354000836581834e-06, |
|
"loss": 1.3582, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.6925373134328359, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 2.223619807600878e-06, |
|
"loss": 1.3499, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.6934217799889442, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 2.2118617714233524e-06, |
|
"loss": 1.3486, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.6943062465450525, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 2.2001260693120236e-06, |
|
"loss": 1.3466, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.6951907131011609, |
|
"grad_norm": 2.03125, |
|
"learning_rate": 2.1884127952744095e-06, |
|
"loss": 1.45, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.6960751796572692, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 2.1767220431383652e-06, |
|
"loss": 1.4239, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.6969596462133776, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 2.1650539065513415e-06, |
|
"loss": 1.3557, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.6978441127694859, |
|
"grad_norm": 2.015625, |
|
"learning_rate": 2.153408478979628e-06, |
|
"loss": 1.4248, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.6987285793255943, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 2.141785853707607e-06, |
|
"loss": 1.3066, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.6996130458817026, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 2.130186123837002e-06, |
|
"loss": 1.3607, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.700497512437811, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 2.118609382286142e-06, |
|
"loss": 1.3326, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.7013819789939193, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 2.107055721789203e-06, |
|
"loss": 1.3718, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.7022664455500276, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 2.0955252348954806e-06, |
|
"loss": 1.3615, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.703150912106136, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 2.0840180139686333e-06, |
|
"loss": 1.4377, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.7040353786622443, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 2.0725341511859576e-06, |
|
"loss": 1.4253, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.7049198452183527, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 2.061073738537635e-06, |
|
"loss": 1.3843, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.705804311774461, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 2.0496368678260094e-06, |
|
"loss": 1.4443, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.7066887783305694, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 2.038223630664835e-06, |
|
"loss": 1.3969, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.7075732448866777, |
|
"grad_norm": 2.125, |
|
"learning_rate": 2.0268341184785674e-06, |
|
"loss": 1.3988, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.708457711442786, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 2.015468422501599e-06, |
|
"loss": 1.389, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.7093421779988944, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 2.004126633777559e-06, |
|
"loss": 1.3717, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.7102266445550027, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 1.992808843158559e-06, |
|
"loss": 1.5092, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.7111111111111111, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 1.9815151413044865e-06, |
|
"loss": 1.377, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.7119955776672194, |
|
"grad_norm": 2.25, |
|
"learning_rate": 1.9702456186822595e-06, |
|
"loss": 1.2182, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.7128800442233278, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 1.959000365565119e-06, |
|
"loss": 1.4019, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.7137645107794361, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 1.947779472031891e-06, |
|
"loss": 1.4615, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.7146489773355446, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 1.936583027966279e-06, |
|
"loss": 1.4442, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.7155334438916529, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 1.9254111230561282e-06, |
|
"loss": 1.3233, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.7164179104477612, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 1.9142638467927254e-06, |
|
"loss": 1.3184, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.7173023770038696, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 1.9031412884700612e-06, |
|
"loss": 1.4767, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.7181868435599779, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 1.8920435371841394e-06, |
|
"loss": 1.3654, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.7190713101160863, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 1.8809706818322375e-06, |
|
"loss": 1.3787, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.7199557766721946, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 1.8699228111122146e-06, |
|
"loss": 1.4234, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.720840243228303, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 1.8589000135217882e-06, |
|
"loss": 1.4363, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.7217247097844113, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 1.8479023773578354e-06, |
|
"loss": 1.3332, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.7226091763405196, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 1.8369299907156745e-06, |
|
"loss": 1.5322, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.723493642896628, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 1.8259829414883728e-06, |
|
"loss": 1.4411, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.7243781094527363, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 1.815061317366027e-06, |
|
"loss": 1.3387, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.7252625760088447, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 1.8041652058350768e-06, |
|
"loss": 1.5099, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.726147042564953, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 1.7932946941775882e-06, |
|
"loss": 1.3271, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.7270315091210614, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 1.78244986947057e-06, |
|
"loss": 1.3248, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.7279159756771697, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 1.7716308185852605e-06, |
|
"loss": 1.4185, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.728800442233278, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 1.7608376281864502e-06, |
|
"loss": 1.3921, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.7296849087893864, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 1.7500703847317663e-06, |
|
"loss": 1.3979, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.7305693753454947, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 1.739329174470999e-06, |
|
"loss": 1.3455, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.7314538419016031, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 1.7286140834453958e-06, |
|
"loss": 1.5016, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.7323383084577114, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 1.7179251974869858e-06, |
|
"loss": 1.4437, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.7332227750138198, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 1.7072626022178796e-06, |
|
"loss": 1.4511, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.7341072415699281, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 1.6966263830495939e-06, |
|
"loss": 1.4144, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.7349917081260365, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 1.6860166251823574e-06, |
|
"loss": 1.4895, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.7358761746821448, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 1.6754334136044398e-06, |
|
"loss": 1.344, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.7367606412382531, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 1.6648768330914578e-06, |
|
"loss": 1.3382, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.7376451077943615, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 1.6543469682057105e-06, |
|
"loss": 1.3857, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.7385295743504698, |
|
"grad_norm": 2.125, |
|
"learning_rate": 1.6438439032954857e-06, |
|
"loss": 1.4671, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.7394140409065783, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 1.633367722494404e-06, |
|
"loss": 1.4239, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.7402985074626866, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 1.6229185097207228e-06, |
|
"loss": 1.3192, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.741182974018795, |
|
"grad_norm": 2.125, |
|
"learning_rate": 1.6124963486766844e-06, |
|
"loss": 1.3301, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.7420674405749033, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 1.6021013228478277e-06, |
|
"loss": 1.2805, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.7429519071310116, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 1.5917335155023368e-06, |
|
"loss": 1.2969, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.7429519071310116, |
|
"eval_loss": 1.3206154108047485, |
|
"eval_runtime": 232.2366, |
|
"eval_samples_per_second": 69.24, |
|
"eval_steps_per_second": 8.655, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.74383637368712, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 1.5813930096903562e-06, |
|
"loss": 1.2511, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.7447208402432283, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 1.5710798882433432e-06, |
|
"loss": 1.3885, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.7456053067993367, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 1.5607942337733884e-06, |
|
"loss": 1.3645, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.746489773355445, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 1.5505361286725679e-06, |
|
"loss": 1.3793, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.7473742399115534, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 1.5403056551122697e-06, |
|
"loss": 1.3091, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.7482587064676617, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 1.5301028950425494e-06, |
|
"loss": 1.4169, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.7491431730237701, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 1.5199279301914592e-06, |
|
"loss": 1.2407, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.7500276395798784, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 1.5097808420644117e-06, |
|
"loss": 1.5407, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.7509121061359867, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 1.4996617119435036e-06, |
|
"loss": 1.4126, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.7517965726920951, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 1.4895706208868876e-06, |
|
"loss": 1.3619, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.7526810392482034, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 1.4795076497281052e-06, |
|
"loss": 1.3949, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.7535655058043118, |
|
"grad_norm": 2.25, |
|
"learning_rate": 1.4694728790754514e-06, |
|
"loss": 1.2663, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.7544499723604201, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 1.459466389311321e-06, |
|
"loss": 1.2417, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.7553344389165285, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 1.4494882605915717e-06, |
|
"loss": 1.257, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.7562189054726368, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 1.439538572844873e-06, |
|
"loss": 1.3101, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.7571033720287451, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 1.4296174057720775e-06, |
|
"loss": 1.3139, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.7579878385848535, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 1.4197248388455693e-06, |
|
"loss": 1.3913, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.7588723051409618, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 1.4098609513086414e-06, |
|
"loss": 1.3652, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.7597567716970702, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 1.4000258221748452e-06, |
|
"loss": 1.399, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.7606412382531785, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 1.390219530227378e-06, |
|
"loss": 1.4919, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.7615257048092869, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 1.3804421540184282e-06, |
|
"loss": 1.3179, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.7624101713653952, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 1.3706937718685654e-06, |
|
"loss": 1.4212, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.7632946379215035, |
|
"grad_norm": 2.25, |
|
"learning_rate": 1.3609744618661013e-06, |
|
"loss": 1.4791, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.764179104477612, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 1.3512843018664728e-06, |
|
"loss": 1.3187, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.7650635710337202, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 1.3416233694916086e-06, |
|
"loss": 1.2747, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.7659480375898287, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 1.3319917421293182e-06, |
|
"loss": 1.366, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.766832504145937, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 1.3223894969326623e-06, |
|
"loss": 1.4011, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.7677169707020454, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 1.312816710819343e-06, |
|
"loss": 1.4062, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.7686014372581537, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 1.3032734604710784e-06, |
|
"loss": 1.4579, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.7694859038142621, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 1.2937598223330006e-06, |
|
"loss": 1.2437, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.7703703703703704, |
|
"grad_norm": 2.25, |
|
"learning_rate": 1.2842758726130283e-06, |
|
"loss": 1.3704, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.7712548369264787, |
|
"grad_norm": 2.125, |
|
"learning_rate": 1.2748216872812747e-06, |
|
"loss": 1.349, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.7721393034825871, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 1.2653973420694182e-06, |
|
"loss": 1.4582, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.7730237700386954, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 1.256002912470115e-06, |
|
"loss": 1.3579, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.7739082365948038, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 1.246638473736378e-06, |
|
"loss": 1.2888, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.7747927031509121, |
|
"grad_norm": 2.125, |
|
"learning_rate": 1.2373041008809888e-06, |
|
"loss": 1.2819, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.7756771697070205, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 1.2279998686758833e-06, |
|
"loss": 1.469, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.7765616362631288, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 1.2187258516515644e-06, |
|
"loss": 1.4948, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.7774461028192371, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 1.2094821240964955e-06, |
|
"loss": 1.3585, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.7783305693753455, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 1.2002687600565138e-06, |
|
"loss": 1.3644, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.7792150359314538, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 1.1910858333342279e-06, |
|
"loss": 1.4203, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.7800995024875622, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 1.1819334174884378e-06, |
|
"loss": 1.3598, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.7809839690436705, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 1.172811585833532e-06, |
|
"loss": 1.2861, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.7818684355997789, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 1.1637204114389179e-06, |
|
"loss": 1.4354, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.7827529021558872, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 1.1546599671284158e-06, |
|
"loss": 1.3396, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.7836373687119956, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 1.1456303254796941e-06, |
|
"loss": 1.3573, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.7845218352681039, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 1.1366315588236743e-06, |
|
"loss": 1.4086, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.7854063018242122, |
|
"grad_norm": 2.125, |
|
"learning_rate": 1.127663739243962e-06, |
|
"loss": 1.3828, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.7862907683803206, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 1.1187269385762605e-06, |
|
"loss": 1.4209, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.7871752349364289, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 1.1098212284078037e-06, |
|
"loss": 1.3129, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.7880597014925373, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 1.1009466800767738e-06, |
|
"loss": 1.4199, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.7889441680486456, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 1.09210336467174e-06, |
|
"loss": 1.4955, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.789828634604754, |
|
"grad_norm": 2.125, |
|
"learning_rate": 1.0832913530310784e-06, |
|
"loss": 1.33, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.7907131011608624, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 1.0745107157424155e-06, |
|
"loss": 1.4374, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.7915975677169707, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 1.0657615231420492e-06, |
|
"loss": 1.3894, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.7924820342730791, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 1.0570438453144044e-06, |
|
"loss": 1.4137, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.7933665008291874, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 1.0483577520914495e-06, |
|
"loss": 1.3512, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.7942509673852958, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 1.0397033130521554e-06, |
|
"loss": 1.3495, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.7951354339414041, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 1.0310805975219256e-06, |
|
"loss": 1.2432, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.7960199004975125, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 1.0224896745720513e-06, |
|
"loss": 1.4603, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.7969043670536208, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 1.0139306130191463e-06, |
|
"loss": 1.3466, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.7977888336097292, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 1.0054034814246093e-06, |
|
"loss": 1.5046, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.7986733001658375, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 9.969083480940617e-07, |
|
"loss": 1.5009, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.7995577667219458, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 9.884452810768115e-07, |
|
"loss": 1.3271, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.8004422332780542, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 9.80014348165298e-07, |
|
"loss": 1.4613, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.8013266998341625, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 9.71615616894559e-07, |
|
"loss": 1.4525, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.8022111663902709, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 9.632491545416788e-07, |
|
"loss": 1.325, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.8030956329463792, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 9.549150281252633e-07, |
|
"loss": 1.4775, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.8039800995024876, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 9.466133044048865e-07, |
|
"loss": 1.475, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.8048645660585959, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 9.383440498805712e-07, |
|
"loss": 1.4142, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.8048645660585959, |
|
"eval_loss": 1.320538878440857, |
|
"eval_runtime": 232.0847, |
|
"eval_samples_per_second": 69.285, |
|
"eval_steps_per_second": 8.661, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.8057490326147042, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 9.301073307922432e-07, |
|
"loss": 1.3282, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.8066334991708126, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 9.219032131192124e-07, |
|
"loss": 1.4358, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.8075179657269209, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 9.137317625796338e-07, |
|
"loss": 1.2671, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.8084024322830293, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 9.055930446299916e-07, |
|
"loss": 1.4075, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.8092868988391376, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 8.974871244645628e-07, |
|
"loss": 1.3825, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.810171365395246, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 8.894140670149082e-07, |
|
"loss": 1.3359, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.8110558319513543, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 8.813739369493395e-07, |
|
"loss": 1.4309, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.8119402985074626, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 8.733667986724109e-07, |
|
"loss": 1.4381, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.812824765063571, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 8.653927163243986e-07, |
|
"loss": 1.2608, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.8137092316196793, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 8.574517537807897e-07, |
|
"loss": 1.4192, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.8145936981757878, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 8.495439746517648e-07, |
|
"loss": 1.2921, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.815478164731896, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 8.416694422816974e-07, |
|
"loss": 1.4909, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.8163626312880045, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 8.338282197486364e-07, |
|
"loss": 1.3452, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.8172470978441128, |
|
"grad_norm": 2.03125, |
|
"learning_rate": 8.260203698638114e-07, |
|
"loss": 1.3678, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.8181315644002212, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 8.182459551711197e-07, |
|
"loss": 1.2546, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.8190160309563295, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 8.105050379466334e-07, |
|
"loss": 1.4046, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.8199004975124378, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 8.027976801980924e-07, |
|
"loss": 1.405, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.8207849640685462, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 7.95123943664417e-07, |
|
"loss": 1.3807, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.8216694306246545, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 7.87483889815207e-07, |
|
"loss": 1.4121, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 0.8225538971807629, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 7.798775798502484e-07, |
|
"loss": 1.3807, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.8234383637368712, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 7.723050746990291e-07, |
|
"loss": 1.3344, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.8243228302929796, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 7.647664350202461e-07, |
|
"loss": 1.4637, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 0.8252072968490879, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 7.572617212013189e-07, |
|
"loss": 1.3765, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 0.8260917634051962, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 7.497909933579117e-07, |
|
"loss": 1.4303, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 0.8269762299613046, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 7.423543113334436e-07, |
|
"loss": 1.3633, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.8278606965174129, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 7.349517346986173e-07, |
|
"loss": 1.3611, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 0.8287451630735213, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 7.275833227509344e-07, |
|
"loss": 1.3908, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 0.8296296296296296, |
|
"grad_norm": 2.25, |
|
"learning_rate": 7.202491345142288e-07, |
|
"loss": 1.2941, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 0.830514096185738, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 7.129492287381828e-07, |
|
"loss": 1.3899, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 0.8313985627418463, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 7.056836638978698e-07, |
|
"loss": 1.3957, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.8322830292979547, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 6.984524981932756e-07, |
|
"loss": 1.4181, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 0.833167495854063, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 6.912557895488342e-07, |
|
"loss": 1.3393, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 0.8340519624101713, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 6.84093595612968e-07, |
|
"loss": 1.3599, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 0.8349364289662797, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 6.769659737576229e-07, |
|
"loss": 1.333, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 0.835820895522388, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 6.698729810778065e-07, |
|
"loss": 1.3796, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.8367053620784964, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 6.628146743911374e-07, |
|
"loss": 1.2729, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 0.8375898286346047, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 6.55791110237381e-07, |
|
"loss": 1.449, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 0.8384742951907131, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 6.488023448780068e-07, |
|
"loss": 1.3217, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 0.8393587617468214, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 6.418484342957277e-07, |
|
"loss": 1.3816, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 0.8402432283029297, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 6.349294341940593e-07, |
|
"loss": 1.3077, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.8411276948590382, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 6.280453999968711e-07, |
|
"loss": 1.3948, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 0.8420121614151465, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 6.211963868479393e-07, |
|
"loss": 1.4487, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 0.8428966279712549, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 6.143824496105122e-07, |
|
"loss": 1.3836, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 0.8437810945273632, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 6.076036428668613e-07, |
|
"loss": 1.4319, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 0.8446655610834716, |
|
"grad_norm": 2.125, |
|
"learning_rate": 6.008600209178539e-07, |
|
"loss": 1.35, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.8455500276395799, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 5.941516377825102e-07, |
|
"loss": 1.2769, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 0.8464344941956883, |
|
"grad_norm": 2.125, |
|
"learning_rate": 5.874785471975753e-07, |
|
"loss": 1.3282, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 0.8473189607517966, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 5.808408026170892e-07, |
|
"loss": 1.3316, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 0.8482034273079049, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 5.742384572119519e-07, |
|
"loss": 1.3037, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 0.8490878938640133, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 5.676715638695063e-07, |
|
"loss": 1.3868, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.8499723604201216, |
|
"grad_norm": 2.125, |
|
"learning_rate": 5.611401751931112e-07, |
|
"loss": 1.3841, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.85085682697623, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 5.546443435017146e-07, |
|
"loss": 1.43, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 0.8517412935323383, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 5.481841208294447e-07, |
|
"loss": 1.2815, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 0.8526257600884467, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 5.417595589251828e-07, |
|
"loss": 1.2582, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 0.853510226644555, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 5.353707092521581e-07, |
|
"loss": 1.3579, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.8543946932006633, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 5.290176229875271e-07, |
|
"loss": 1.2397, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 0.8552791597567717, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 5.227003510219714e-07, |
|
"loss": 1.5733, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 0.85616362631288, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 5.16418943959282e-07, |
|
"loss": 1.4332, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 0.8570480928689884, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 5.101734521159613e-07, |
|
"loss": 1.505, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 0.8579325594250967, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 5.039639255208156e-07, |
|
"loss": 1.3825, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.8588170259812051, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 4.977904139145579e-07, |
|
"loss": 1.4532, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 0.8597014925373134, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 4.916529667494024e-07, |
|
"loss": 1.4113, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 0.8605859590934217, |
|
"grad_norm": 2.046875, |
|
"learning_rate": 4.855516331886783e-07, |
|
"loss": 1.3548, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 0.8614704256495301, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 4.794864621064266e-07, |
|
"loss": 1.4388, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 0.8623548922056384, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 4.734575020870169e-07, |
|
"loss": 1.4733, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.8632393587617468, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 4.6746480142475015e-07, |
|
"loss": 1.4202, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 0.8641238253178551, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 4.6150840812348e-07, |
|
"loss": 1.4845, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 0.8650082918739636, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 4.5558836989621933e-07, |
|
"loss": 1.5008, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 0.8658927584300719, |
|
"grad_norm": 1.9921875, |
|
"learning_rate": 4.497047341647676e-07, |
|
"loss": 1.3423, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 0.8667772249861803, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 4.43857548059321e-07, |
|
"loss": 1.3769, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.8667772249861803, |
|
"eval_loss": 1.320468544960022, |
|
"eval_runtime": 232.1389, |
|
"eval_samples_per_second": 69.269, |
|
"eval_steps_per_second": 8.659, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.8676616915422886, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 4.380468584181069e-07, |
|
"loss": 1.5007, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 0.8685461580983969, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 4.322727117869951e-07, |
|
"loss": 1.3305, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 0.8694306246545053, |
|
"grad_norm": 2.125, |
|
"learning_rate": 4.265351544191365e-07, |
|
"loss": 1.321, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 0.8703150912106136, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 4.208342322745845e-07, |
|
"loss": 1.2659, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 0.871199557766722, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 4.151699910199336e-07, |
|
"loss": 1.2902, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.8720840243228303, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 4.095424760279454e-07, |
|
"loss": 1.377, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 0.8729684908789387, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 4.039517323771958e-07, |
|
"loss": 1.3572, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 0.873852957435047, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 3.983978048517029e-07, |
|
"loss": 1.3266, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 0.8747374239911553, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 3.9288073794057634e-07, |
|
"loss": 1.3651, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 0.8756218905472637, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 3.87400575837657e-07, |
|
"loss": 1.3594, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.876506357103372, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 3.819573624411643e-07, |
|
"loss": 1.3874, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 0.8773908236594804, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 3.765511413533429e-07, |
|
"loss": 1.3306, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.8782752902155887, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 3.711819558801183e-07, |
|
"loss": 1.5526, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 0.8791597567716971, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 3.6584984903074104e-07, |
|
"loss": 1.3536, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 0.8800442233278054, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 3.6055486351745327e-07, |
|
"loss": 1.4116, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.8809286898839138, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 3.5529704175513604e-07, |
|
"loss": 1.368, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 0.8818131564400221, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 3.500764258609779e-07, |
|
"loss": 1.4066, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 0.8826976229961304, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 3.4489305765413097e-07, |
|
"loss": 1.4416, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 0.8835820895522388, |
|
"grad_norm": 2.125, |
|
"learning_rate": 3.3974697865538195e-07, |
|
"loss": 1.397, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 0.8844665561083471, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 3.346382300868134e-07, |
|
"loss": 1.4165, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.8853510226644555, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 3.2956685287148014e-07, |
|
"loss": 1.3116, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 0.8862354892205638, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 3.24532887633075e-07, |
|
"loss": 1.3223, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 0.8871199557766722, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 3.195363746956087e-07, |
|
"loss": 1.4175, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 0.8880044223327805, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 3.1457735408308153e-07, |
|
"loss": 1.34, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 0.8888888888888888, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 3.096558655191706e-07, |
|
"loss": 1.3023, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.8897733554449972, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 3.047719484269018e-07, |
|
"loss": 1.5113, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 0.8906578220011055, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 2.9992564192834253e-07, |
|
"loss": 1.3554, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 0.891542288557214, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 2.9511698484428144e-07, |
|
"loss": 1.2778, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 0.8924267551133223, |
|
"grad_norm": 1.9765625, |
|
"learning_rate": 2.9034601569392417e-07, |
|
"loss": 1.441, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 0.8933112216694307, |
|
"grad_norm": 2.125, |
|
"learning_rate": 2.85612772694579e-07, |
|
"loss": 1.3236, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.894195688225539, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 2.809172937613552e-07, |
|
"loss": 1.3639, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 0.8950801547816473, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 2.762596165068548e-07, |
|
"loss": 1.4892, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 0.8959646213377557, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 2.7163977824087696e-07, |
|
"loss": 1.428, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 0.896849087893864, |
|
"grad_norm": 2.125, |
|
"learning_rate": 2.6705781597011147e-07, |
|
"loss": 1.4441, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 0.8977335544499724, |
|
"grad_norm": 2.125, |
|
"learning_rate": 2.6251376639785163e-07, |
|
"loss": 1.3683, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.8986180210060807, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 2.5800766592369077e-07, |
|
"loss": 1.385, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 0.8995024875621891, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 2.535395506432403e-07, |
|
"loss": 1.4734, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 0.9003869541182974, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 2.4910945634783024e-07, |
|
"loss": 1.4088, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 0.9012714206744058, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 2.447174185242324e-07, |
|
"loss": 1.3204, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 0.9021558872305141, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 2.403634723543674e-07, |
|
"loss": 1.2867, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.9030403537866224, |
|
"grad_norm": 2.125, |
|
"learning_rate": 2.3604765271503073e-07, |
|
"loss": 1.5298, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 0.9039248203427308, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 2.3176999417760637e-07, |
|
"loss": 1.4862, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 0.9048092868988391, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 2.2753053100779632e-07, |
|
"loss": 1.4299, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.9056937534549475, |
|
"grad_norm": 2.921875, |
|
"learning_rate": 2.2332929716533947e-07, |
|
"loss": 1.1436, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.9065782200110558, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 2.1916632630374579e-07, |
|
"loss": 1.4004, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.9074626865671642, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 2.1504165177002212e-07, |
|
"loss": 1.3453, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 0.9083471531232725, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 2.1095530660440954e-07, |
|
"loss": 1.3255, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 0.9092316196793808, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 2.069073235401109e-07, |
|
"loss": 1.3531, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 0.9101160862354892, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 2.028977350030409e-07, |
|
"loss": 1.2367, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 0.9110005527915975, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 1.989265731115525e-07, |
|
"loss": 1.2896, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.9118850193477059, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 1.9499386967619104e-07, |
|
"loss": 1.4309, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 0.9127694859038142, |
|
"grad_norm": 2.046875, |
|
"learning_rate": 1.910996561994316e-07, |
|
"loss": 1.4226, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 0.9136539524599226, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 1.872439638754331e-07, |
|
"loss": 1.3422, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 0.914538419016031, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 1.834268235897807e-07, |
|
"loss": 1.4291, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 0.9154228855721394, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 1.7964826591924722e-07, |
|
"loss": 1.3052, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.9163073521282477, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 1.7590832113154045e-07, |
|
"loss": 1.4362, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 0.917191818684356, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 1.7220701918506666e-07, |
|
"loss": 1.3082, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 0.9180762852404644, |
|
"grad_norm": 2.25, |
|
"learning_rate": 1.685443897286859e-07, |
|
"loss": 1.444, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 0.9189607517965727, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 1.6492046210147884e-07, |
|
"loss": 1.3503, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 0.9198452183526811, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 1.6133526533250566e-07, |
|
"loss": 1.4377, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.9207296849087894, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 1.577888281405826e-07, |
|
"loss": 1.3313, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 0.9216141514648978, |
|
"grad_norm": 2.125, |
|
"learning_rate": 1.5428117893404315e-07, |
|
"loss": 1.3644, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 0.9224986180210061, |
|
"grad_norm": 2.03125, |
|
"learning_rate": 1.5081234581051484e-07, |
|
"loss": 1.3837, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 0.9233830845771144, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 1.473823565566923e-07, |
|
"loss": 1.4241, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 0.9242675511332228, |
|
"grad_norm": 2.046875, |
|
"learning_rate": 1.4399123864811904e-07, |
|
"loss": 1.3139, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.9251520176893311, |
|
"grad_norm": 2.125, |
|
"learning_rate": 1.4063901924895985e-07, |
|
"loss": 1.5017, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 0.9260364842454395, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 1.3732572521179043e-07, |
|
"loss": 1.3503, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 0.9269209508015478, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 1.3405138307737765e-07, |
|
"loss": 1.3755, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 0.9278054173576562, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 1.3081601907447007e-07, |
|
"loss": 1.4103, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 0.9286898839137645, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 1.2761965911958385e-07, |
|
"loss": 1.3431, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.9286898839137645, |
|
"eval_loss": 1.320536732673645, |
|
"eval_runtime": 232.1531, |
|
"eval_samples_per_second": 69.265, |
|
"eval_steps_per_second": 8.658, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.9295743504698729, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 1.244623288168012e-07, |
|
"loss": 1.3322, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 0.9304588170259812, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 1.2134405345755773e-07, |
|
"loss": 1.265, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 0.9313432835820895, |
|
"grad_norm": 2.125, |
|
"learning_rate": 1.1826485802044707e-07, |
|
"loss": 1.2818, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 0.9322277501381979, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 1.1522476717101605e-07, |
|
"loss": 1.3637, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 0.9331122166943062, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 1.1222380526156929e-07, |
|
"loss": 1.3624, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.9339966832504146, |
|
"grad_norm": 2.125, |
|
"learning_rate": 1.0926199633097156e-07, |
|
"loss": 1.4057, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 0.9348811498065229, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 1.0633936410446077e-07, |
|
"loss": 1.3419, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 0.9357656163626313, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 1.0345593199344972e-07, |
|
"loss": 1.352, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 0.9366500829187396, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 1.0061172309534739e-07, |
|
"loss": 1.3915, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 0.9375345494748479, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 9.780676019336632e-08, |
|
"loss": 1.2872, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.9384190160309563, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 9.504106575634664e-08, |
|
"loss": 1.3331, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 0.9393034825870646, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 9.231466193856897e-08, |
|
"loss": 1.4363, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 0.940187949143173, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 8.96275705795846e-08, |
|
"loss": 1.4318, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 0.9410724156992814, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 8.697981320403337e-08, |
|
"loss": 1.492, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 0.9419568822553898, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 8.437141102147883e-08, |
|
"loss": 1.5143, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.9428413488114981, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 8.18023849262306e-08, |
|
"loss": 1.4165, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 0.9437258153676064, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 7.927275549718228e-08, |
|
"loss": 1.3239, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 0.9446102819237148, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 7.678254299764431e-08, |
|
"loss": 1.3245, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 0.9454947484798231, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 7.433176737518255e-08, |
|
"loss": 1.4069, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 0.9463792150359315, |
|
"grad_norm": 2.25, |
|
"learning_rate": 7.192044826145772e-08, |
|
"loss": 1.2734, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.9472636815920398, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 6.954860497207006e-08, |
|
"loss": 1.36, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 0.9481481481481482, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 6.721625650640163e-08, |
|
"loss": 1.4026, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 0.9490326147042565, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 6.49234215474659e-08, |
|
"loss": 1.2729, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 0.9499170812603649, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 6.267011846175896e-08, |
|
"loss": 1.3159, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 0.9508015478164732, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 6.045636529911025e-08, |
|
"loss": 1.2304, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.9516860143725815, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 5.82821797925387e-08, |
|
"loss": 1.4583, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 0.9525704809286899, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 5.6147579358112324e-08, |
|
"loss": 1.4346, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 0.9534549474847982, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 5.4052581094806713e-08, |
|
"loss": 1.359, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 0.9543394140409066, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 5.19972017843684e-08, |
|
"loss": 1.2627, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 0.9552238805970149, |
|
"grad_norm": 2.125, |
|
"learning_rate": 4.998145789118114e-08, |
|
"loss": 1.3761, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.9561083471531233, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 4.800536556213375e-08, |
|
"loss": 1.4208, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 0.9569928137092316, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 4.60689406264897e-08, |
|
"loss": 1.3827, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 0.9578772802653399, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 4.417219859576383e-08, |
|
"loss": 1.3627, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 0.9587617468214483, |
|
"grad_norm": 2.25, |
|
"learning_rate": 4.231515466359193e-08, |
|
"loss": 1.3031, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 0.9596462133775566, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 4.0497823705615836e-08, |
|
"loss": 1.4236, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.960530679933665, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 3.8720220279359065e-08, |
|
"loss": 1.4736, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 0.9614151464897733, |
|
"grad_norm": 2.125, |
|
"learning_rate": 3.698235862411359e-08, |
|
"loss": 1.3541, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 0.9622996130458817, |
|
"grad_norm": 2.125, |
|
"learning_rate": 3.528425266082325e-08, |
|
"loss": 1.4398, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 0.96318407960199, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 3.36259159919744e-08, |
|
"loss": 1.3503, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 0.9640685461580984, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 3.2007361901485455e-08, |
|
"loss": 1.3672, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.9649530127142067, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 3.042860335460085e-08, |
|
"loss": 1.3574, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 0.965837479270315, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 2.8889652997787233e-08, |
|
"loss": 1.3291, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 0.9667219458264235, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 2.7390523158633552e-08, |
|
"loss": 1.3926, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 0.9676064123825318, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 2.593122584574892e-08, |
|
"loss": 1.2835, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 0.9684908789386402, |
|
"grad_norm": 2.125, |
|
"learning_rate": 2.4511772748669894e-08, |
|
"loss": 1.2501, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.9693753454947485, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 2.3132175237765564e-08, |
|
"loss": 1.2829, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 0.9702598120508569, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 2.179244436414485e-08, |
|
"loss": 1.3862, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 0.9711442786069652, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 2.0492590859571558e-08, |
|
"loss": 1.3531, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 0.9720287451630735, |
|
"grad_norm": 2.125, |
|
"learning_rate": 1.9232625136374472e-08, |
|
"loss": 1.3427, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 0.9729132117191819, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 1.8012557287367394e-08, |
|
"loss": 1.4296, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.9737976782752902, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 1.6832397085765893e-08, |
|
"loss": 1.3767, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 0.9746821448313986, |
|
"grad_norm": 2.125, |
|
"learning_rate": 1.5692153985109037e-08, |
|
"loss": 1.2763, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 0.9755666113875069, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 1.4591837119186104e-08, |
|
"loss": 1.459, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 0.9764510779436153, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 1.3531455301960539e-08, |
|
"loss": 1.3972, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 0.9773355444997236, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 1.2511017027501682e-08, |
|
"loss": 1.303, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.978220011055832, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 1.1530530469914258e-08, |
|
"loss": 1.4383, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 0.9791044776119403, |
|
"grad_norm": 2.125, |
|
"learning_rate": 1.0590003483275657e-08, |
|
"loss": 1.3901, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 0.9799889441680486, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 9.689443601570425e-09, |
|
"loss": 1.385, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 0.980873410724157, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 8.828858038632538e-09, |
|
"loss": 1.3062, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 0.9817578772802653, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 8.008253688084888e-09, |
|
"loss": 1.4997, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.9826423438363737, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 7.227637123285447e-09, |
|
"loss": 1.3024, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 0.983526810392482, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 6.487014597275631e-09, |
|
"loss": 1.4312, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 0.9844112769485904, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 5.7863920427286834e-09, |
|
"loss": 1.5325, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 0.9852957435046987, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 5.125775071901373e-09, |
|
"loss": 1.3651, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 0.986180210060807, |
|
"grad_norm": 2.03125, |
|
"learning_rate": 4.505168976592922e-09, |
|
"loss": 1.3628, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.9870646766169154, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 3.924578728096707e-09, |
|
"loss": 1.2619, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 0.9879491431730237, |
|
"grad_norm": 2.125, |
|
"learning_rate": 3.3840089771658424e-09, |
|
"loss": 1.4065, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 0.9888336097291321, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 2.8834640539737723e-09, |
|
"loss": 1.3817, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 0.9897180762852404, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 2.422947968078182e-09, |
|
"loss": 1.2757, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 0.9906025428413489, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 2.002464408392135e-09, |
|
"loss": 1.4751, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.9906025428413489, |
|
"eval_loss": 1.3204842805862427, |
|
"eval_runtime": 232.1096, |
|
"eval_samples_per_second": 69.278, |
|
"eval_steps_per_second": 8.66, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.9914870093974572, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 1.622016743150212e-09, |
|
"loss": 1.3439, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 0.9923714759535655, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 1.2816080198868596e-09, |
|
"loss": 1.422, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 0.9932559425096739, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 9.812409654075261e-10, |
|
"loss": 1.5471, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 0.9941404090657822, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 7.209179857675664e-10, |
|
"loss": 1.3187, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 0.9950248756218906, |
|
"grad_norm": 2.125, |
|
"learning_rate": 5.006411662555888e-10, |
|
"loss": 1.3619, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.9959093421779989, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 3.204122713740265e-10, |
|
"loss": 1.4264, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 0.9967938087341073, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 1.8023274482636965e-10, |
|
"loss": 1.2781, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 0.9976782752902156, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 8.01037095038426e-11, |
|
"loss": 1.5524, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 0.998562741846324, |
|
"grad_norm": 2.03125, |
|
"learning_rate": 2.0025967479853082e-11, |
|
"loss": 1.522, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 0.9994472084024323, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 0.0, |
|
"loss": 1.279, |
|
"step": 1130 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1130, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 565, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4.3240198238109696e+17, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|