jssky's picture
Training in progress, step 1000, checkpoint
045c54d verified
{
"best_metric": 0.22850316762924194,
"best_model_checkpoint": "miner_id_24_1/checkpoint-1000",
"epoch": 0.5092297899427116,
"eval_steps": 200,
"global_step": 1000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0005092297899427117,
"grad_norm": 7.810187816619873,
"learning_rate": 1e-05,
"loss": 1.0307,
"step": 1
},
{
"epoch": 0.0010184595798854233,
"grad_norm": 11.901244163513184,
"learning_rate": 2e-05,
"loss": 1.2115,
"step": 2
},
{
"epoch": 0.001527689369828135,
"grad_norm": 26.66057777404785,
"learning_rate": 3e-05,
"loss": 1.3876,
"step": 3
},
{
"epoch": 0.0020369191597708466,
"grad_norm": 36.39434051513672,
"learning_rate": 4e-05,
"loss": 2.3747,
"step": 4
},
{
"epoch": 0.002546148949713558,
"grad_norm": 38.85515594482422,
"learning_rate": 5e-05,
"loss": 2.2585,
"step": 5
},
{
"epoch": 0.00305537873965627,
"grad_norm": 31.20570945739746,
"learning_rate": 6e-05,
"loss": 1.8399,
"step": 6
},
{
"epoch": 0.0035646085295989813,
"grad_norm": 53.85519027709961,
"learning_rate": 7e-05,
"loss": 1.7199,
"step": 7
},
{
"epoch": 0.004073838319541693,
"grad_norm": 39.146419525146484,
"learning_rate": 8e-05,
"loss": 2.0047,
"step": 8
},
{
"epoch": 0.004583068109484405,
"grad_norm": 67.70555877685547,
"learning_rate": 9e-05,
"loss": 1.7328,
"step": 9
},
{
"epoch": 0.005092297899427116,
"grad_norm": 43.93235778808594,
"learning_rate": 0.0001,
"loss": 1.2339,
"step": 10
},
{
"epoch": 0.005601527689369828,
"grad_norm": 28.690454483032227,
"learning_rate": 9.999974825027756e-05,
"loss": 0.9872,
"step": 11
},
{
"epoch": 0.00611075747931254,
"grad_norm": 56.98477554321289,
"learning_rate": 9.999899300364532e-05,
"loss": 1.0972,
"step": 12
},
{
"epoch": 0.006619987269255252,
"grad_norm": 18.829212188720703,
"learning_rate": 9.999773426770865e-05,
"loss": 0.4863,
"step": 13
},
{
"epoch": 0.007129217059197963,
"grad_norm": 5.442621231079102,
"learning_rate": 9.999597205514297e-05,
"loss": 0.124,
"step": 14
},
{
"epoch": 0.0076384468491406746,
"grad_norm": 19.687393188476562,
"learning_rate": 9.999370638369377e-05,
"loss": 0.2149,
"step": 15
},
{
"epoch": 0.008147676639083386,
"grad_norm": 6.360881805419922,
"learning_rate": 9.99909372761763e-05,
"loss": 0.1119,
"step": 16
},
{
"epoch": 0.008656906429026098,
"grad_norm": 4.7020978927612305,
"learning_rate": 9.998766476047547e-05,
"loss": 0.1823,
"step": 17
},
{
"epoch": 0.00916613621896881,
"grad_norm": 2.705449342727661,
"learning_rate": 9.998388886954547e-05,
"loss": 0.1085,
"step": 18
},
{
"epoch": 0.009675366008911522,
"grad_norm": 2.5857841968536377,
"learning_rate": 9.997960964140947e-05,
"loss": 0.0976,
"step": 19
},
{
"epoch": 0.010184595798854232,
"grad_norm": 16.0050106048584,
"learning_rate": 9.997482711915927e-05,
"loss": 0.1568,
"step": 20
},
{
"epoch": 0.010693825588796944,
"grad_norm": 8.77568244934082,
"learning_rate": 9.99695413509548e-05,
"loss": 0.1151,
"step": 21
},
{
"epoch": 0.011203055378739656,
"grad_norm": 1.004607915878296,
"learning_rate": 9.996375239002369e-05,
"loss": 0.0802,
"step": 22
},
{
"epoch": 0.011712285168682368,
"grad_norm": 2.0350327491760254,
"learning_rate": 9.995746029466071e-05,
"loss": 0.0452,
"step": 23
},
{
"epoch": 0.01222151495862508,
"grad_norm": 301.12799072265625,
"learning_rate": 9.99506651282272e-05,
"loss": 7.738,
"step": 24
},
{
"epoch": 0.012730744748567792,
"grad_norm": 391.00762939453125,
"learning_rate": 9.99433669591504e-05,
"loss": 6.659,
"step": 25
},
{
"epoch": 0.013239974538510503,
"grad_norm": 192.62831115722656,
"learning_rate": 9.993556586092281e-05,
"loss": 3.7739,
"step": 26
},
{
"epoch": 0.013749204328453215,
"grad_norm": 79.25320434570312,
"learning_rate": 9.992726191210138e-05,
"loss": 1.652,
"step": 27
},
{
"epoch": 0.014258434118395925,
"grad_norm": 54.88349914550781,
"learning_rate": 9.991845519630678e-05,
"loss": 0.9881,
"step": 28
},
{
"epoch": 0.014767663908338637,
"grad_norm": 44.994449615478516,
"learning_rate": 9.990914580222257e-05,
"loss": 0.6137,
"step": 29
},
{
"epoch": 0.015276893698281349,
"grad_norm": 26.1174259185791,
"learning_rate": 9.989933382359422e-05,
"loss": 0.5558,
"step": 30
},
{
"epoch": 0.01578612348822406,
"grad_norm": 15.71074390411377,
"learning_rate": 9.988901935922826e-05,
"loss": 0.5383,
"step": 31
},
{
"epoch": 0.016295353278166773,
"grad_norm": 15.574135780334473,
"learning_rate": 9.987820251299122e-05,
"loss": 0.3819,
"step": 32
},
{
"epoch": 0.016804583068109485,
"grad_norm": 10.029973030090332,
"learning_rate": 9.986688339380862e-05,
"loss": 0.2771,
"step": 33
},
{
"epoch": 0.017313812858052197,
"grad_norm": 10.430713653564453,
"learning_rate": 9.985506211566388e-05,
"loss": 0.2751,
"step": 34
},
{
"epoch": 0.01782304264799491,
"grad_norm": 11.25537109375,
"learning_rate": 9.984273879759713e-05,
"loss": 0.1899,
"step": 35
},
{
"epoch": 0.01833227243793762,
"grad_norm": 9.658145904541016,
"learning_rate": 9.982991356370404e-05,
"loss": 0.3,
"step": 36
},
{
"epoch": 0.018841502227880332,
"grad_norm": 19.340084075927734,
"learning_rate": 9.981658654313457e-05,
"loss": 0.275,
"step": 37
},
{
"epoch": 0.019350732017823044,
"grad_norm": 16.586389541625977,
"learning_rate": 9.98027578700917e-05,
"loss": 0.2442,
"step": 38
},
{
"epoch": 0.019859961807765756,
"grad_norm": 7.003499984741211,
"learning_rate": 9.978842768382998e-05,
"loss": 0.11,
"step": 39
},
{
"epoch": 0.020369191597708464,
"grad_norm": 21.9989013671875,
"learning_rate": 9.977359612865423e-05,
"loss": 0.1143,
"step": 40
},
{
"epoch": 0.020878421387651176,
"grad_norm": 11.020797729492188,
"learning_rate": 9.975826335391808e-05,
"loss": 0.0857,
"step": 41
},
{
"epoch": 0.021387651177593888,
"grad_norm": 10.189589500427246,
"learning_rate": 9.974242951402235e-05,
"loss": 0.0433,
"step": 42
},
{
"epoch": 0.0218968809675366,
"grad_norm": 9.400593757629395,
"learning_rate": 9.972609476841367e-05,
"loss": 0.1319,
"step": 43
},
{
"epoch": 0.022406110757479312,
"grad_norm": 38.592594146728516,
"learning_rate": 9.970925928158274e-05,
"loss": 0.1928,
"step": 44
},
{
"epoch": 0.022915340547422024,
"grad_norm": 35.85441589355469,
"learning_rate": 9.969192322306271e-05,
"loss": 0.0925,
"step": 45
},
{
"epoch": 0.023424570337364736,
"grad_norm": 42.03386688232422,
"learning_rate": 9.967408676742751e-05,
"loss": 0.0906,
"step": 46
},
{
"epoch": 0.023933800127307447,
"grad_norm": 11.04820728302002,
"learning_rate": 9.965575009429006e-05,
"loss": 0.0713,
"step": 47
},
{
"epoch": 0.02444302991725016,
"grad_norm": 14.304110527038574,
"learning_rate": 9.963691338830044e-05,
"loss": 0.1799,
"step": 48
},
{
"epoch": 0.02495225970719287,
"grad_norm": 12.146672248840332,
"learning_rate": 9.961757683914406e-05,
"loss": 0.0691,
"step": 49
},
{
"epoch": 0.025461489497135583,
"grad_norm": 20.32611846923828,
"learning_rate": 9.959774064153977e-05,
"loss": 0.2816,
"step": 50
},
{
"epoch": 0.025970719287078295,
"grad_norm": 7.227572441101074,
"learning_rate": 9.957740499523787e-05,
"loss": 1.0021,
"step": 51
},
{
"epoch": 0.026479949077021007,
"grad_norm": 10.347118377685547,
"learning_rate": 9.955657010501806e-05,
"loss": 1.0069,
"step": 52
},
{
"epoch": 0.02698917886696372,
"grad_norm": 15.916715621948242,
"learning_rate": 9.953523618068749e-05,
"loss": 0.9734,
"step": 53
},
{
"epoch": 0.02749840865690643,
"grad_norm": 17.439035415649414,
"learning_rate": 9.951340343707852e-05,
"loss": 1.2134,
"step": 54
},
{
"epoch": 0.028007638446849142,
"grad_norm": 28.1020450592041,
"learning_rate": 9.949107209404665e-05,
"loss": 1.4139,
"step": 55
},
{
"epoch": 0.02851686823679185,
"grad_norm": 12.133536338806152,
"learning_rate": 9.946824237646824e-05,
"loss": 0.5215,
"step": 56
},
{
"epoch": 0.029026098026734563,
"grad_norm": 20.322139739990234,
"learning_rate": 9.944491451423828e-05,
"loss": 0.8477,
"step": 57
},
{
"epoch": 0.029535327816677275,
"grad_norm": 18.471527099609375,
"learning_rate": 9.942108874226811e-05,
"loss": 0.9458,
"step": 58
},
{
"epoch": 0.030044557606619986,
"grad_norm": 18.556156158447266,
"learning_rate": 9.939676530048301e-05,
"loss": 0.9743,
"step": 59
},
{
"epoch": 0.030553787396562698,
"grad_norm": 14.058014869689941,
"learning_rate": 9.937194443381972e-05,
"loss": 0.6557,
"step": 60
},
{
"epoch": 0.03106301718650541,
"grad_norm": 25.136165618896484,
"learning_rate": 9.934662639222412e-05,
"loss": 0.8277,
"step": 61
},
{
"epoch": 0.03157224697644812,
"grad_norm": 13.859735488891602,
"learning_rate": 9.93208114306486e-05,
"loss": 0.3034,
"step": 62
},
{
"epoch": 0.03208147676639083,
"grad_norm": 3.4193358421325684,
"learning_rate": 9.929449980904952e-05,
"loss": 0.0745,
"step": 63
},
{
"epoch": 0.032590706556333546,
"grad_norm": 6.933294773101807,
"learning_rate": 9.926769179238466e-05,
"loss": 0.1584,
"step": 64
},
{
"epoch": 0.033099936346276254,
"grad_norm": 8.660609245300293,
"learning_rate": 9.924038765061042e-05,
"loss": 0.2456,
"step": 65
},
{
"epoch": 0.03360916613621897,
"grad_norm": 1.4555833339691162,
"learning_rate": 9.921258765867919e-05,
"loss": 0.0626,
"step": 66
},
{
"epoch": 0.03411839592616168,
"grad_norm": 2.2231998443603516,
"learning_rate": 9.918429209653662e-05,
"loss": 0.1195,
"step": 67
},
{
"epoch": 0.03462762571610439,
"grad_norm": 5.060727596282959,
"learning_rate": 9.915550124911866e-05,
"loss": 0.1108,
"step": 68
},
{
"epoch": 0.0351368555060471,
"grad_norm": 6.343770503997803,
"learning_rate": 9.912621540634887e-05,
"loss": 0.1127,
"step": 69
},
{
"epoch": 0.03564608529598982,
"grad_norm": 4.716639518737793,
"learning_rate": 9.909643486313533e-05,
"loss": 0.0833,
"step": 70
},
{
"epoch": 0.036155315085932525,
"grad_norm": 7.187185764312744,
"learning_rate": 9.90661599193678e-05,
"loss": 0.1607,
"step": 71
},
{
"epoch": 0.03666454487587524,
"grad_norm": 2.778947353363037,
"learning_rate": 9.903539087991462e-05,
"loss": 0.1233,
"step": 72
},
{
"epoch": 0.03717377466581795,
"grad_norm": 1.5811668634414673,
"learning_rate": 9.900412805461967e-05,
"loss": 0.1392,
"step": 73
},
{
"epoch": 0.037683004455760664,
"grad_norm": 136.75079345703125,
"learning_rate": 9.897237175829926e-05,
"loss": 4.0886,
"step": 74
},
{
"epoch": 0.03819223424570337,
"grad_norm": 242.6491241455078,
"learning_rate": 9.894012231073894e-05,
"loss": 6.7782,
"step": 75
},
{
"epoch": 0.03870146403564609,
"grad_norm": 100.27400207519531,
"learning_rate": 9.890738003669029e-05,
"loss": 1.7429,
"step": 76
},
{
"epoch": 0.0392106938255888,
"grad_norm": 49.41951370239258,
"learning_rate": 9.887414526586763e-05,
"loss": 0.6478,
"step": 77
},
{
"epoch": 0.03971992361553151,
"grad_norm": 15.152222633361816,
"learning_rate": 9.884041833294476e-05,
"loss": 0.3272,
"step": 78
},
{
"epoch": 0.04022915340547422,
"grad_norm": 11.572675704956055,
"learning_rate": 9.880619957755151e-05,
"loss": 0.2417,
"step": 79
},
{
"epoch": 0.04073838319541693,
"grad_norm": 6.667335033416748,
"learning_rate": 9.877148934427037e-05,
"loss": 0.2272,
"step": 80
},
{
"epoch": 0.041247612985359644,
"grad_norm": 11.849577903747559,
"learning_rate": 9.873628798263296e-05,
"loss": 0.2427,
"step": 81
},
{
"epoch": 0.04175684277530235,
"grad_norm": 9.412842750549316,
"learning_rate": 9.870059584711668e-05,
"loss": 0.2196,
"step": 82
},
{
"epoch": 0.04226607256524507,
"grad_norm": 9.481209754943848,
"learning_rate": 9.866441329714088e-05,
"loss": 0.1711,
"step": 83
},
{
"epoch": 0.042775302355187776,
"grad_norm": 8.62416934967041,
"learning_rate": 9.862774069706346e-05,
"loss": 0.2399,
"step": 84
},
{
"epoch": 0.04328453214513049,
"grad_norm": 13.821141242980957,
"learning_rate": 9.859057841617709e-05,
"loss": 0.1519,
"step": 85
},
{
"epoch": 0.0437937619350732,
"grad_norm": 9.316679000854492,
"learning_rate": 9.855292682870551e-05,
"loss": 0.2179,
"step": 86
},
{
"epoch": 0.044302991725015915,
"grad_norm": 20.34062385559082,
"learning_rate": 9.851478631379982e-05,
"loss": 0.1978,
"step": 87
},
{
"epoch": 0.044812221514958624,
"grad_norm": 14.848787307739258,
"learning_rate": 9.847615725553456e-05,
"loss": 0.203,
"step": 88
},
{
"epoch": 0.04532145130490134,
"grad_norm": 9.669304847717285,
"learning_rate": 9.843704004290392e-05,
"loss": 0.1324,
"step": 89
},
{
"epoch": 0.04583068109484405,
"grad_norm": 5.988533020019531,
"learning_rate": 9.839743506981782e-05,
"loss": 0.0655,
"step": 90
},
{
"epoch": 0.04633991088478676,
"grad_norm": 7.444858074188232,
"learning_rate": 9.835734273509786e-05,
"loss": 0.0616,
"step": 91
},
{
"epoch": 0.04684914067472947,
"grad_norm": 19.514366149902344,
"learning_rate": 9.831676344247342e-05,
"loss": 0.0411,
"step": 92
},
{
"epoch": 0.047358370464672186,
"grad_norm": 6.495416164398193,
"learning_rate": 9.827569760057755e-05,
"loss": 0.0808,
"step": 93
},
{
"epoch": 0.047867600254614895,
"grad_norm": 9.067540168762207,
"learning_rate": 9.82341456229428e-05,
"loss": 0.1453,
"step": 94
},
{
"epoch": 0.0483768300445576,
"grad_norm": 4.768366813659668,
"learning_rate": 9.819210792799712e-05,
"loss": 0.0876,
"step": 95
},
{
"epoch": 0.04888605983450032,
"grad_norm": 10.522812843322754,
"learning_rate": 9.814958493905963e-05,
"loss": 0.0634,
"step": 96
},
{
"epoch": 0.04939528962444303,
"grad_norm": 35.72486877441406,
"learning_rate": 9.810657708433637e-05,
"loss": 0.1568,
"step": 97
},
{
"epoch": 0.04990451941438574,
"grad_norm": 58.850852966308594,
"learning_rate": 9.806308479691595e-05,
"loss": 0.0519,
"step": 98
},
{
"epoch": 0.05041374920432845,
"grad_norm": 10.575145721435547,
"learning_rate": 9.801910851476523e-05,
"loss": 0.1669,
"step": 99
},
{
"epoch": 0.050922978994271166,
"grad_norm": 13.279051780700684,
"learning_rate": 9.797464868072488e-05,
"loss": 0.1777,
"step": 100
},
{
"epoch": 0.051432208784213874,
"grad_norm": 9.112642288208008,
"learning_rate": 9.792970574250493e-05,
"loss": 0.7836,
"step": 101
},
{
"epoch": 0.05194143857415659,
"grad_norm": 10.229832649230957,
"learning_rate": 9.788428015268027e-05,
"loss": 1.2149,
"step": 102
},
{
"epoch": 0.0524506683640993,
"grad_norm": 12.976715087890625,
"learning_rate": 9.783837236868609e-05,
"loss": 1.1371,
"step": 103
},
{
"epoch": 0.052959898154042014,
"grad_norm": 6.877295970916748,
"learning_rate": 9.779198285281325e-05,
"loss": 0.2919,
"step": 104
},
{
"epoch": 0.05346912794398472,
"grad_norm": 12.871697425842285,
"learning_rate": 9.77451120722037e-05,
"loss": 1.0255,
"step": 105
},
{
"epoch": 0.05397835773392744,
"grad_norm": 19.823925018310547,
"learning_rate": 9.769776049884563e-05,
"loss": 1.4255,
"step": 106
},
{
"epoch": 0.054487587523870146,
"grad_norm": 14.80654525756836,
"learning_rate": 9.764992860956889e-05,
"loss": 0.8401,
"step": 107
},
{
"epoch": 0.05499681731381286,
"grad_norm": 24.64175033569336,
"learning_rate": 9.760161688604008e-05,
"loss": 1.0325,
"step": 108
},
{
"epoch": 0.05550604710375557,
"grad_norm": 26.200424194335938,
"learning_rate": 9.755282581475769e-05,
"loss": 1.4366,
"step": 109
},
{
"epoch": 0.056015276893698285,
"grad_norm": 17.368080139160156,
"learning_rate": 9.750355588704727e-05,
"loss": 1.0868,
"step": 110
},
{
"epoch": 0.05652450668364099,
"grad_norm": 11.78357219696045,
"learning_rate": 9.745380759905647e-05,
"loss": 0.4192,
"step": 111
},
{
"epoch": 0.0570337364735837,
"grad_norm": 13.371063232421875,
"learning_rate": 9.740358145174998e-05,
"loss": 0.9413,
"step": 112
},
{
"epoch": 0.05754296626352642,
"grad_norm": 15.481206893920898,
"learning_rate": 9.735287795090455e-05,
"loss": 1.2106,
"step": 113
},
{
"epoch": 0.058052196053469125,
"grad_norm": 7.285238265991211,
"learning_rate": 9.730169760710386e-05,
"loss": 0.1473,
"step": 114
},
{
"epoch": 0.05856142584341184,
"grad_norm": 13.799015045166016,
"learning_rate": 9.725004093573342e-05,
"loss": 0.1718,
"step": 115
},
{
"epoch": 0.05907065563335455,
"grad_norm": 9.683958053588867,
"learning_rate": 9.719790845697533e-05,
"loss": 0.1202,
"step": 116
},
{
"epoch": 0.059579885423297264,
"grad_norm": 2.17852520942688,
"learning_rate": 9.714530069580309e-05,
"loss": 0.0936,
"step": 117
},
{
"epoch": 0.06008911521323997,
"grad_norm": 3.735697031021118,
"learning_rate": 9.709221818197624e-05,
"loss": 0.1458,
"step": 118
},
{
"epoch": 0.06059834500318269,
"grad_norm": 6.55530309677124,
"learning_rate": 9.703866145003511e-05,
"loss": 0.189,
"step": 119
},
{
"epoch": 0.061107574793125397,
"grad_norm": 5.885622978210449,
"learning_rate": 9.698463103929542e-05,
"loss": 0.1048,
"step": 120
},
{
"epoch": 0.06161680458306811,
"grad_norm": 7.249486446380615,
"learning_rate": 9.693012749384279e-05,
"loss": 0.112,
"step": 121
},
{
"epoch": 0.06212603437301082,
"grad_norm": 7.16094970703125,
"learning_rate": 9.687515136252731e-05,
"loss": 0.1314,
"step": 122
},
{
"epoch": 0.06263526416295354,
"grad_norm": 13.14775276184082,
"learning_rate": 9.681970319895803e-05,
"loss": 0.15,
"step": 123
},
{
"epoch": 0.06314449395289624,
"grad_norm": 1.7535552978515625,
"learning_rate": 9.676378356149734e-05,
"loss": 0.1078,
"step": 124
},
{
"epoch": 0.06365372374283895,
"grad_norm": 157.29830932617188,
"learning_rate": 9.670739301325534e-05,
"loss": 1.5483,
"step": 125
},
{
"epoch": 0.06416295353278166,
"grad_norm": 396.1761474609375,
"learning_rate": 9.665053212208426e-05,
"loss": 3.7025,
"step": 126
},
{
"epoch": 0.06467218332272438,
"grad_norm": 119.90931701660156,
"learning_rate": 9.659320146057262e-05,
"loss": 0.9201,
"step": 127
},
{
"epoch": 0.06518141311266709,
"grad_norm": 37.91402053833008,
"learning_rate": 9.653540160603956e-05,
"loss": 0.5631,
"step": 128
},
{
"epoch": 0.0656906429026098,
"grad_norm": 12.050881385803223,
"learning_rate": 9.647713314052896e-05,
"loss": 0.3231,
"step": 129
},
{
"epoch": 0.06619987269255251,
"grad_norm": 8.638886451721191,
"learning_rate": 9.641839665080363e-05,
"loss": 0.1641,
"step": 130
},
{
"epoch": 0.06670910248249523,
"grad_norm": 6.96414041519165,
"learning_rate": 9.635919272833938e-05,
"loss": 0.1458,
"step": 131
},
{
"epoch": 0.06721833227243794,
"grad_norm": 9.606210708618164,
"learning_rate": 9.629952196931901e-05,
"loss": 0.1746,
"step": 132
},
{
"epoch": 0.06772756206238065,
"grad_norm": 10.889528274536133,
"learning_rate": 9.623938497462646e-05,
"loss": 0.1276,
"step": 133
},
{
"epoch": 0.06823679185232336,
"grad_norm": 10.934017181396484,
"learning_rate": 9.617878234984055e-05,
"loss": 0.2305,
"step": 134
},
{
"epoch": 0.06874602164226608,
"grad_norm": 10.173789978027344,
"learning_rate": 9.611771470522908e-05,
"loss": 0.2105,
"step": 135
},
{
"epoch": 0.06925525143220879,
"grad_norm": 8.497604370117188,
"learning_rate": 9.60561826557425e-05,
"loss": 0.1653,
"step": 136
},
{
"epoch": 0.0697644812221515,
"grad_norm": 11.240355491638184,
"learning_rate": 9.599418682100793e-05,
"loss": 0.1518,
"step": 137
},
{
"epoch": 0.0702737110120942,
"grad_norm": 23.68250274658203,
"learning_rate": 9.593172782532268e-05,
"loss": 0.1257,
"step": 138
},
{
"epoch": 0.07078294080203693,
"grad_norm": 12.878501892089844,
"learning_rate": 9.586880629764817e-05,
"loss": 0.0837,
"step": 139
},
{
"epoch": 0.07129217059197963,
"grad_norm": 7.515162467956543,
"learning_rate": 9.580542287160348e-05,
"loss": 0.0495,
"step": 140
},
{
"epoch": 0.07180140038192234,
"grad_norm": 8.767376899719238,
"learning_rate": 9.574157818545901e-05,
"loss": 0.0835,
"step": 141
},
{
"epoch": 0.07231063017186505,
"grad_norm": 13.098119735717773,
"learning_rate": 9.567727288213005e-05,
"loss": 0.0426,
"step": 142
},
{
"epoch": 0.07281985996180776,
"grad_norm": 5.534914970397949,
"learning_rate": 9.561250760917027e-05,
"loss": 0.0358,
"step": 143
},
{
"epoch": 0.07332908975175048,
"grad_norm": 6.893864154815674,
"learning_rate": 9.554728301876526e-05,
"loss": 0.0407,
"step": 144
},
{
"epoch": 0.07383831954169319,
"grad_norm": 9.333673477172852,
"learning_rate": 9.548159976772592e-05,
"loss": 0.0433,
"step": 145
},
{
"epoch": 0.0743475493316359,
"grad_norm": 10.31505012512207,
"learning_rate": 9.541545851748186e-05,
"loss": 0.0814,
"step": 146
},
{
"epoch": 0.0748567791215786,
"grad_norm": 4.145845413208008,
"learning_rate": 9.534885993407474e-05,
"loss": 0.0354,
"step": 147
},
{
"epoch": 0.07536600891152133,
"grad_norm": 7.609471321105957,
"learning_rate": 9.528180468815155e-05,
"loss": 0.0806,
"step": 148
},
{
"epoch": 0.07587523870146404,
"grad_norm": 6.148724555969238,
"learning_rate": 9.521429345495787e-05,
"loss": 0.0316,
"step": 149
},
{
"epoch": 0.07638446849140675,
"grad_norm": 14.903536796569824,
"learning_rate": 9.514632691433107e-05,
"loss": 0.1945,
"step": 150
},
{
"epoch": 0.07689369828134945,
"grad_norm": 8.966485977172852,
"learning_rate": 9.507790575069347e-05,
"loss": 0.9285,
"step": 151
},
{
"epoch": 0.07740292807129218,
"grad_norm": 10.010855674743652,
"learning_rate": 9.50090306530454e-05,
"loss": 0.9562,
"step": 152
},
{
"epoch": 0.07791215786123488,
"grad_norm": 15.471893310546875,
"learning_rate": 9.493970231495835e-05,
"loss": 0.5357,
"step": 153
},
{
"epoch": 0.0784213876511776,
"grad_norm": 9.118985176086426,
"learning_rate": 9.486992143456792e-05,
"loss": 0.5833,
"step": 154
},
{
"epoch": 0.0789306174411203,
"grad_norm": 25.249879837036133,
"learning_rate": 9.479968871456679e-05,
"loss": 1.2881,
"step": 155
},
{
"epoch": 0.07943984723106302,
"grad_norm": 29.803438186645508,
"learning_rate": 9.472900486219769e-05,
"loss": 1.3695,
"step": 156
},
{
"epoch": 0.07994907702100573,
"grad_norm": 23.109800338745117,
"learning_rate": 9.46578705892462e-05,
"loss": 0.8352,
"step": 157
},
{
"epoch": 0.08045830681094844,
"grad_norm": 19.88869285583496,
"learning_rate": 9.458628661203367e-05,
"loss": 1.0865,
"step": 158
},
{
"epoch": 0.08096753660089115,
"grad_norm": 27.150493621826172,
"learning_rate": 9.451425365140996e-05,
"loss": 0.9212,
"step": 159
},
{
"epoch": 0.08147676639083386,
"grad_norm": 21.8201847076416,
"learning_rate": 9.444177243274618e-05,
"loss": 1.2252,
"step": 160
},
{
"epoch": 0.08198599618077658,
"grad_norm": 14.624476432800293,
"learning_rate": 9.43688436859274e-05,
"loss": 0.8962,
"step": 161
},
{
"epoch": 0.08249522597071929,
"grad_norm": 21.079187393188477,
"learning_rate": 9.429546814534529e-05,
"loss": 0.8082,
"step": 162
},
{
"epoch": 0.083004455760662,
"grad_norm": 25.841928482055664,
"learning_rate": 9.422164654989072e-05,
"loss": 0.8788,
"step": 163
},
{
"epoch": 0.0835136855506047,
"grad_norm": 6.6645941734313965,
"learning_rate": 9.414737964294636e-05,
"loss": 0.2285,
"step": 164
},
{
"epoch": 0.08402291534054743,
"grad_norm": 9.64619255065918,
"learning_rate": 9.407266817237911e-05,
"loss": 0.1814,
"step": 165
},
{
"epoch": 0.08453214513049014,
"grad_norm": 2.2856459617614746,
"learning_rate": 9.399751289053267e-05,
"loss": 0.0883,
"step": 166
},
{
"epoch": 0.08504137492043284,
"grad_norm": 5.339344024658203,
"learning_rate": 9.392191455421988e-05,
"loss": 0.1506,
"step": 167
},
{
"epoch": 0.08555060471037555,
"grad_norm": 3.965207099914551,
"learning_rate": 9.384587392471515e-05,
"loss": 0.1156,
"step": 168
},
{
"epoch": 0.08605983450031827,
"grad_norm": 2.786485195159912,
"learning_rate": 9.376939176774679e-05,
"loss": 0.095,
"step": 169
},
{
"epoch": 0.08656906429026098,
"grad_norm": 2.5898613929748535,
"learning_rate": 9.369246885348926e-05,
"loss": 0.1513,
"step": 170
},
{
"epoch": 0.08707829408020369,
"grad_norm": 3.1944878101348877,
"learning_rate": 9.361510595655545e-05,
"loss": 0.1361,
"step": 171
},
{
"epoch": 0.0875875238701464,
"grad_norm": 2.0986759662628174,
"learning_rate": 9.353730385598887e-05,
"loss": 0.112,
"step": 172
},
{
"epoch": 0.08809675366008912,
"grad_norm": 3.246211528778076,
"learning_rate": 9.345906333525581e-05,
"loss": 0.0693,
"step": 173
},
{
"epoch": 0.08860598345003183,
"grad_norm": 2.6745364665985107,
"learning_rate": 9.338038518223747e-05,
"loss": 0.1327,
"step": 174
},
{
"epoch": 0.08911521323997454,
"grad_norm": 97.89273071289062,
"learning_rate": 9.330127018922194e-05,
"loss": 0.9497,
"step": 175
},
{
"epoch": 0.08962444302991725,
"grad_norm": 20.47549819946289,
"learning_rate": 9.322171915289635e-05,
"loss": 0.3825,
"step": 176
},
{
"epoch": 0.09013367281985996,
"grad_norm": 10.546618461608887,
"learning_rate": 9.314173287433873e-05,
"loss": 0.3422,
"step": 177
},
{
"epoch": 0.09064290260980268,
"grad_norm": 8.488680839538574,
"learning_rate": 9.306131215901003e-05,
"loss": 0.4135,
"step": 178
},
{
"epoch": 0.09115213239974539,
"grad_norm": 9.975773811340332,
"learning_rate": 9.298045781674596e-05,
"loss": 0.2914,
"step": 179
},
{
"epoch": 0.0916613621896881,
"grad_norm": 9.072954177856445,
"learning_rate": 9.289917066174886e-05,
"loss": 0.2371,
"step": 180
},
{
"epoch": 0.0921705919796308,
"grad_norm": 7.301230430603027,
"learning_rate": 9.281745151257946e-05,
"loss": 0.1286,
"step": 181
},
{
"epoch": 0.09267982176957353,
"grad_norm": 5.187671184539795,
"learning_rate": 9.273530119214868e-05,
"loss": 0.1105,
"step": 182
},
{
"epoch": 0.09318905155951623,
"grad_norm": 10.456426620483398,
"learning_rate": 9.265272052770936e-05,
"loss": 0.1123,
"step": 183
},
{
"epoch": 0.09369828134945894,
"grad_norm": 7.074233531951904,
"learning_rate": 9.256971035084785e-05,
"loss": 0.174,
"step": 184
},
{
"epoch": 0.09420751113940165,
"grad_norm": 7.056065082550049,
"learning_rate": 9.248627149747573e-05,
"loss": 0.209,
"step": 185
},
{
"epoch": 0.09471674092934437,
"grad_norm": 7.530999183654785,
"learning_rate": 9.24024048078213e-05,
"loss": 0.1251,
"step": 186
},
{
"epoch": 0.09522597071928708,
"grad_norm": 11.38913631439209,
"learning_rate": 9.231811112642121e-05,
"loss": 0.2015,
"step": 187
},
{
"epoch": 0.09573520050922979,
"grad_norm": 6.872330188751221,
"learning_rate": 9.223339130211192e-05,
"loss": 0.1438,
"step": 188
},
{
"epoch": 0.0962444302991725,
"grad_norm": 10.519857406616211,
"learning_rate": 9.214824618802109e-05,
"loss": 0.1032,
"step": 189
},
{
"epoch": 0.0967536600891152,
"grad_norm": 8.731643676757812,
"learning_rate": 9.206267664155907e-05,
"loss": 0.0608,
"step": 190
},
{
"epoch": 0.09726288987905793,
"grad_norm": 4.590799808502197,
"learning_rate": 9.197668352441025e-05,
"loss": 0.0585,
"step": 191
},
{
"epoch": 0.09777211966900064,
"grad_norm": 4.542694568634033,
"learning_rate": 9.189026770252436e-05,
"loss": 0.038,
"step": 192
},
{
"epoch": 0.09828134945894335,
"grad_norm": 22.831832885742188,
"learning_rate": 9.18034300461078e-05,
"loss": 0.0438,
"step": 193
},
{
"epoch": 0.09879057924888605,
"grad_norm": 21.84329605102539,
"learning_rate": 9.171617142961477e-05,
"loss": 0.1501,
"step": 194
},
{
"epoch": 0.09929980903882878,
"grad_norm": 8.787395477294922,
"learning_rate": 9.162849273173857e-05,
"loss": 0.0465,
"step": 195
},
{
"epoch": 0.09980903882877148,
"grad_norm": 3.925507068634033,
"learning_rate": 9.154039483540273e-05,
"loss": 0.0239,
"step": 196
},
{
"epoch": 0.1003182686187142,
"grad_norm": 4.768054008483887,
"learning_rate": 9.145187862775209e-05,
"loss": 0.0463,
"step": 197
},
{
"epoch": 0.1008274984086569,
"grad_norm": 10.094438552856445,
"learning_rate": 9.136294500014386e-05,
"loss": 0.024,
"step": 198
},
{
"epoch": 0.10133672819859962,
"grad_norm": 5.6756463050842285,
"learning_rate": 9.12735948481387e-05,
"loss": 0.0279,
"step": 199
},
{
"epoch": 0.10184595798854233,
"grad_norm": 18.045185089111328,
"learning_rate": 9.118382907149165e-05,
"loss": 0.1826,
"step": 200
},
{
"epoch": 0.10184595798854233,
"eval_loss": 0.6390340328216553,
"eval_runtime": 378.2914,
"eval_samples_per_second": 8.745,
"eval_steps_per_second": 2.186,
"step": 200
},
{
"epoch": 0.10235518777848504,
"grad_norm": 5.750209808349609,
"learning_rate": 9.109364857414306e-05,
"loss": 1.0072,
"step": 201
},
{
"epoch": 0.10286441756842775,
"grad_norm": 6.447965145111084,
"learning_rate": 9.100305426420956e-05,
"loss": 0.903,
"step": 202
},
{
"epoch": 0.10337364735837047,
"grad_norm": 9.898489952087402,
"learning_rate": 9.091204705397484e-05,
"loss": 1.0206,
"step": 203
},
{
"epoch": 0.10388287714831318,
"grad_norm": 6.285497188568115,
"learning_rate": 9.082062785988049e-05,
"loss": 0.3841,
"step": 204
},
{
"epoch": 0.10439210693825589,
"grad_norm": 17.02065658569336,
"learning_rate": 9.072879760251679e-05,
"loss": 1.5343,
"step": 205
},
{
"epoch": 0.1049013367281986,
"grad_norm": 17.704208374023438,
"learning_rate": 9.06365572066134e-05,
"loss": 0.9827,
"step": 206
},
{
"epoch": 0.1054105665181413,
"grad_norm": 10.860851287841797,
"learning_rate": 9.05439076010301e-05,
"loss": 0.3846,
"step": 207
},
{
"epoch": 0.10591979630808403,
"grad_norm": 16.583065032958984,
"learning_rate": 9.045084971874738e-05,
"loss": 0.6791,
"step": 208
},
{
"epoch": 0.10642902609802674,
"grad_norm": 15.18968677520752,
"learning_rate": 9.035738449685707e-05,
"loss": 1.09,
"step": 209
},
{
"epoch": 0.10693825588796944,
"grad_norm": 16.335735321044922,
"learning_rate": 9.026351287655294e-05,
"loss": 1.1956,
"step": 210
},
{
"epoch": 0.10744748567791215,
"grad_norm": 17.723297119140625,
"learning_rate": 9.016923580312113e-05,
"loss": 0.4732,
"step": 211
},
{
"epoch": 0.10795671546785487,
"grad_norm": 20.851490020751953,
"learning_rate": 9.007455422593077e-05,
"loss": 0.821,
"step": 212
},
{
"epoch": 0.10846594525779758,
"grad_norm": 15.048613548278809,
"learning_rate": 8.997946909842425e-05,
"loss": 0.9223,
"step": 213
},
{
"epoch": 0.10897517504774029,
"grad_norm": 2.7917280197143555,
"learning_rate": 8.988398137810777e-05,
"loss": 0.1104,
"step": 214
},
{
"epoch": 0.109484404837683,
"grad_norm": 3.5661802291870117,
"learning_rate": 8.978809202654162e-05,
"loss": 0.1082,
"step": 215
},
{
"epoch": 0.10999363462762572,
"grad_norm": 4.870175838470459,
"learning_rate": 8.969180200933047e-05,
"loss": 0.1652,
"step": 216
},
{
"epoch": 0.11050286441756843,
"grad_norm": 2.00311541557312,
"learning_rate": 8.959511229611376e-05,
"loss": 0.1195,
"step": 217
},
{
"epoch": 0.11101209420751114,
"grad_norm": 3.949178695678711,
"learning_rate": 8.949802386055581e-05,
"loss": 0.1085,
"step": 218
},
{
"epoch": 0.11152132399745385,
"grad_norm": 1.9956891536712646,
"learning_rate": 8.940053768033609e-05,
"loss": 0.0933,
"step": 219
},
{
"epoch": 0.11203055378739657,
"grad_norm": 2.3544695377349854,
"learning_rate": 8.930265473713938e-05,
"loss": 0.121,
"step": 220
},
{
"epoch": 0.11253978357733928,
"grad_norm": 1.6948124170303345,
"learning_rate": 8.92043760166458e-05,
"loss": 0.0807,
"step": 221
},
{
"epoch": 0.11304901336728199,
"grad_norm": 3.766237735748291,
"learning_rate": 8.910570250852097e-05,
"loss": 0.2488,
"step": 222
},
{
"epoch": 0.1135582431572247,
"grad_norm": 1.9978468418121338,
"learning_rate": 8.900663520640604e-05,
"loss": 0.0865,
"step": 223
},
{
"epoch": 0.1140674729471674,
"grad_norm": 3.3670530319213867,
"learning_rate": 8.890717510790763e-05,
"loss": 0.1258,
"step": 224
},
{
"epoch": 0.11457670273711013,
"grad_norm": 135.1156463623047,
"learning_rate": 8.880732321458784e-05,
"loss": 1.1012,
"step": 225
},
{
"epoch": 0.11508593252705283,
"grad_norm": 78.30040740966797,
"learning_rate": 8.870708053195413e-05,
"loss": 0.6246,
"step": 226
},
{
"epoch": 0.11559516231699554,
"grad_norm": 9.838759422302246,
"learning_rate": 8.860644806944918e-05,
"loss": 0.1683,
"step": 227
},
{
"epoch": 0.11610439210693825,
"grad_norm": 11.655884742736816,
"learning_rate": 8.850542684044078e-05,
"loss": 0.191,
"step": 228
},
{
"epoch": 0.11661362189688097,
"grad_norm": 5.8414130210876465,
"learning_rate": 8.840401786221159e-05,
"loss": 0.2464,
"step": 229
},
{
"epoch": 0.11712285168682368,
"grad_norm": 7.483433723449707,
"learning_rate": 8.83022221559489e-05,
"loss": 0.2158,
"step": 230
},
{
"epoch": 0.11763208147676639,
"grad_norm": 6.090114116668701,
"learning_rate": 8.820004074673433e-05,
"loss": 0.199,
"step": 231
},
{
"epoch": 0.1181413112667091,
"grad_norm": 7.228814125061035,
"learning_rate": 8.809747466353356e-05,
"loss": 0.1263,
"step": 232
},
{
"epoch": 0.11865054105665182,
"grad_norm": 7.769136428833008,
"learning_rate": 8.799452493918585e-05,
"loss": 0.1385,
"step": 233
},
{
"epoch": 0.11915977084659453,
"grad_norm": 7.88167667388916,
"learning_rate": 8.789119261039385e-05,
"loss": 0.1602,
"step": 234
},
{
"epoch": 0.11966900063653724,
"grad_norm": 5.976058483123779,
"learning_rate": 8.778747871771292e-05,
"loss": 0.1044,
"step": 235
},
{
"epoch": 0.12017823042647995,
"grad_norm": 7.186121463775635,
"learning_rate": 8.768338430554082e-05,
"loss": 0.1026,
"step": 236
},
{
"epoch": 0.12068746021642265,
"grad_norm": 7.408992767333984,
"learning_rate": 8.757891042210714e-05,
"loss": 0.1399,
"step": 237
},
{
"epoch": 0.12119669000636538,
"grad_norm": 9.789539337158203,
"learning_rate": 8.74740581194627e-05,
"loss": 0.082,
"step": 238
},
{
"epoch": 0.12170591979630808,
"grad_norm": 7.328338146209717,
"learning_rate": 8.736882845346906e-05,
"loss": 0.0742,
"step": 239
},
{
"epoch": 0.12221514958625079,
"grad_norm": 12.503471374511719,
"learning_rate": 8.726322248378775e-05,
"loss": 0.0757,
"step": 240
},
{
"epoch": 0.1227243793761935,
"grad_norm": 5.140643119812012,
"learning_rate": 8.715724127386972e-05,
"loss": 0.0174,
"step": 241
},
{
"epoch": 0.12323360916613622,
"grad_norm": 4.965789318084717,
"learning_rate": 8.705088589094459e-05,
"loss": 0.0311,
"step": 242
},
{
"epoch": 0.12374283895607893,
"grad_norm": 22.65897560119629,
"learning_rate": 8.694415740600988e-05,
"loss": 0.0395,
"step": 243
},
{
"epoch": 0.12425206874602164,
"grad_norm": 5.265443325042725,
"learning_rate": 8.683705689382024e-05,
"loss": 0.0543,
"step": 244
},
{
"epoch": 0.12476129853596435,
"grad_norm": 6.697726249694824,
"learning_rate": 8.672958543287666e-05,
"loss": 0.0549,
"step": 245
},
{
"epoch": 0.12527052832590707,
"grad_norm": 9.752626419067383,
"learning_rate": 8.662174410541555e-05,
"loss": 0.0395,
"step": 246
},
{
"epoch": 0.12577975811584977,
"grad_norm": 7.68189001083374,
"learning_rate": 8.651353399739787e-05,
"loss": 0.0678,
"step": 247
},
{
"epoch": 0.1262889879057925,
"grad_norm": 11.428817749023438,
"learning_rate": 8.640495619849821e-05,
"loss": 0.1091,
"step": 248
},
{
"epoch": 0.1267982176957352,
"grad_norm": 18.397584915161133,
"learning_rate": 8.629601180209381e-05,
"loss": 0.128,
"step": 249
},
{
"epoch": 0.1273074474856779,
"grad_norm": 7.687522888183594,
"learning_rate": 8.618670190525352e-05,
"loss": 0.0705,
"step": 250
},
{
"epoch": 0.12781667727562063,
"grad_norm": 6.8483805656433105,
"learning_rate": 8.607702760872678e-05,
"loss": 0.8196,
"step": 251
},
{
"epoch": 0.12832590706556332,
"grad_norm": 8.953585624694824,
"learning_rate": 8.596699001693255e-05,
"loss": 0.9635,
"step": 252
},
{
"epoch": 0.12883513685550604,
"grad_norm": 7.499317646026611,
"learning_rate": 8.585659023794818e-05,
"loss": 0.5593,
"step": 253
},
{
"epoch": 0.12934436664544877,
"grad_norm": 10.518817901611328,
"learning_rate": 8.574582938349817e-05,
"loss": 0.5622,
"step": 254
},
{
"epoch": 0.12985359643539146,
"grad_norm": 18.279752731323242,
"learning_rate": 8.563470856894316e-05,
"loss": 1.0561,
"step": 255
},
{
"epoch": 0.13036282622533418,
"grad_norm": 13.749988555908203,
"learning_rate": 8.552322891326846e-05,
"loss": 0.6541,
"step": 256
},
{
"epoch": 0.1308720560152769,
"grad_norm": 16.35479164123535,
"learning_rate": 8.541139153907296e-05,
"loss": 0.8946,
"step": 257
},
{
"epoch": 0.1313812858052196,
"grad_norm": 15.60219955444336,
"learning_rate": 8.529919757255783e-05,
"loss": 1.2002,
"step": 258
},
{
"epoch": 0.13189051559516232,
"grad_norm": 17.93536949157715,
"learning_rate": 8.518664814351502e-05,
"loss": 0.7515,
"step": 259
},
{
"epoch": 0.13239974538510502,
"grad_norm": 10.655067443847656,
"learning_rate": 8.507374438531607e-05,
"loss": 0.7691,
"step": 260
},
{
"epoch": 0.13290897517504774,
"grad_norm": 17.118453979492188,
"learning_rate": 8.496048743490053e-05,
"loss": 0.8141,
"step": 261
},
{
"epoch": 0.13341820496499046,
"grad_norm": 6.289425373077393,
"learning_rate": 8.484687843276469e-05,
"loss": 0.334,
"step": 262
},
{
"epoch": 0.13392743475493316,
"grad_norm": 7.217257976531982,
"learning_rate": 8.473291852294987e-05,
"loss": 0.2386,
"step": 263
},
{
"epoch": 0.13443666454487588,
"grad_norm": 3.586156129837036,
"learning_rate": 8.461860885303114e-05,
"loss": 0.1299,
"step": 264
},
{
"epoch": 0.1349458943348186,
"grad_norm": 3.3640799522399902,
"learning_rate": 8.450395057410561e-05,
"loss": 0.0798,
"step": 265
},
{
"epoch": 0.1354551241247613,
"grad_norm": 4.526169300079346,
"learning_rate": 8.438894484078086e-05,
"loss": 0.1487,
"step": 266
},
{
"epoch": 0.13596435391470402,
"grad_norm": 2.676795244216919,
"learning_rate": 8.427359281116334e-05,
"loss": 0.109,
"step": 267
},
{
"epoch": 0.1364735837046467,
"grad_norm": 0.9461959004402161,
"learning_rate": 8.415789564684673e-05,
"loss": 0.1007,
"step": 268
},
{
"epoch": 0.13698281349458943,
"grad_norm": 2.397101879119873,
"learning_rate": 8.404185451290018e-05,
"loss": 0.1238,
"step": 269
},
{
"epoch": 0.13749204328453216,
"grad_norm": 6.66584587097168,
"learning_rate": 8.392547057785661e-05,
"loss": 0.1607,
"step": 270
},
{
"epoch": 0.13800127307447485,
"grad_norm": 1.6675441265106201,
"learning_rate": 8.380874501370097e-05,
"loss": 0.0728,
"step": 271
},
{
"epoch": 0.13851050286441757,
"grad_norm": 1.8109138011932373,
"learning_rate": 8.369167899585841e-05,
"loss": 0.0844,
"step": 272
},
{
"epoch": 0.13901973265436027,
"grad_norm": 9.230074882507324,
"learning_rate": 8.357427370318239e-05,
"loss": 0.2044,
"step": 273
},
{
"epoch": 0.139528962444303,
"grad_norm": 10.465958595275879,
"learning_rate": 8.345653031794292e-05,
"loss": 0.2347,
"step": 274
},
{
"epoch": 0.1400381922342457,
"grad_norm": 14.745471000671387,
"learning_rate": 8.333845002581458e-05,
"loss": 0.254,
"step": 275
},
{
"epoch": 0.1405474220241884,
"grad_norm": 6.833587646484375,
"learning_rate": 8.322003401586462e-05,
"loss": 0.1399,
"step": 276
},
{
"epoch": 0.14105665181413113,
"grad_norm": 7.580754280090332,
"learning_rate": 8.310128348054094e-05,
"loss": 0.164,
"step": 277
},
{
"epoch": 0.14156588160407385,
"grad_norm": 10.550285339355469,
"learning_rate": 8.298219961566009e-05,
"loss": 0.132,
"step": 278
},
{
"epoch": 0.14207511139401655,
"grad_norm": 4.947827339172363,
"learning_rate": 8.286278362039528e-05,
"loss": 0.0824,
"step": 279
},
{
"epoch": 0.14258434118395927,
"grad_norm": 9.395261764526367,
"learning_rate": 8.274303669726426e-05,
"loss": 0.1049,
"step": 280
},
{
"epoch": 0.14309357097390196,
"grad_norm": 6.372900009155273,
"learning_rate": 8.262296005211721e-05,
"loss": 0.119,
"step": 281
},
{
"epoch": 0.14360280076384468,
"grad_norm": 10.334807395935059,
"learning_rate": 8.250255489412463e-05,
"loss": 0.1494,
"step": 282
},
{
"epoch": 0.1441120305537874,
"grad_norm": 7.954561710357666,
"learning_rate": 8.238182243576512e-05,
"loss": 0.1484,
"step": 283
},
{
"epoch": 0.1446212603437301,
"grad_norm": 6.839869022369385,
"learning_rate": 8.226076389281316e-05,
"loss": 0.1011,
"step": 284
},
{
"epoch": 0.14513049013367282,
"grad_norm": 8.359034538269043,
"learning_rate": 8.213938048432697e-05,
"loss": 0.1297,
"step": 285
},
{
"epoch": 0.14563971992361552,
"grad_norm": 5.433843612670898,
"learning_rate": 8.201767343263612e-05,
"loss": 0.1206,
"step": 286
},
{
"epoch": 0.14614894971355824,
"grad_norm": 8.870092391967773,
"learning_rate": 8.189564396332928e-05,
"loss": 0.0581,
"step": 287
},
{
"epoch": 0.14665817950350096,
"grad_norm": 10.579423904418945,
"learning_rate": 8.177329330524182e-05,
"loss": 0.0415,
"step": 288
},
{
"epoch": 0.14716740929344366,
"grad_norm": 4.38845157623291,
"learning_rate": 8.165062269044353e-05,
"loss": 0.0424,
"step": 289
},
{
"epoch": 0.14767663908338638,
"grad_norm": 3.134714126586914,
"learning_rate": 8.152763335422613e-05,
"loss": 0.0138,
"step": 290
},
{
"epoch": 0.1481858688733291,
"grad_norm": 7.17431116104126,
"learning_rate": 8.140432653509089e-05,
"loss": 0.1052,
"step": 291
},
{
"epoch": 0.1486950986632718,
"grad_norm": 9.987316131591797,
"learning_rate": 8.128070347473609e-05,
"loss": 0.1305,
"step": 292
},
{
"epoch": 0.14920432845321452,
"grad_norm": 14.93751335144043,
"learning_rate": 8.115676541804456e-05,
"loss": 0.0562,
"step": 293
},
{
"epoch": 0.1497135582431572,
"grad_norm": 11.657471656799316,
"learning_rate": 8.103251361307119e-05,
"loss": 0.1214,
"step": 294
},
{
"epoch": 0.15022278803309994,
"grad_norm": 0.8434633612632751,
"learning_rate": 8.090794931103026e-05,
"loss": 0.0029,
"step": 295
},
{
"epoch": 0.15073201782304266,
"grad_norm": 3.910121440887451,
"learning_rate": 8.07830737662829e-05,
"loss": 0.0248,
"step": 296
},
{
"epoch": 0.15124124761298535,
"grad_norm": 4.701965808868408,
"learning_rate": 8.065788823632451e-05,
"loss": 0.0083,
"step": 297
},
{
"epoch": 0.15175047740292807,
"grad_norm": 5.969517707824707,
"learning_rate": 8.053239398177191e-05,
"loss": 0.0785,
"step": 298
},
{
"epoch": 0.1522597071928708,
"grad_norm": 9.37517261505127,
"learning_rate": 8.04065922663509e-05,
"loss": 0.0725,
"step": 299
},
{
"epoch": 0.1527689369828135,
"grad_norm": 22.21939468383789,
"learning_rate": 8.028048435688333e-05,
"loss": 0.0937,
"step": 300
},
{
"epoch": 0.1532781667727562,
"grad_norm": 15.828902244567871,
"learning_rate": 8.015407152327448e-05,
"loss": 0.6293,
"step": 301
},
{
"epoch": 0.1537873965626989,
"grad_norm": 7.590906620025635,
"learning_rate": 8.002735503850016e-05,
"loss": 0.6786,
"step": 302
},
{
"epoch": 0.15429662635264163,
"grad_norm": 8.043692588806152,
"learning_rate": 7.990033617859396e-05,
"loss": 0.731,
"step": 303
},
{
"epoch": 0.15480585614258435,
"grad_norm": 9.872035026550293,
"learning_rate": 7.97730162226344e-05,
"loss": 0.2062,
"step": 304
},
{
"epoch": 0.15531508593252705,
"grad_norm": 18.133535385131836,
"learning_rate": 7.964539645273204e-05,
"loss": 1.1256,
"step": 305
},
{
"epoch": 0.15582431572246977,
"grad_norm": 17.49220848083496,
"learning_rate": 7.95174781540165e-05,
"loss": 1.0813,
"step": 306
},
{
"epoch": 0.15633354551241246,
"grad_norm": 19.740583419799805,
"learning_rate": 7.938926261462366e-05,
"loss": 0.5487,
"step": 307
},
{
"epoch": 0.1568427753023552,
"grad_norm": 19.193031311035156,
"learning_rate": 7.926075112568259e-05,
"loss": 1.0428,
"step": 308
},
{
"epoch": 0.1573520050922979,
"grad_norm": 14.3748197555542,
"learning_rate": 7.913194498130252e-05,
"loss": 0.901,
"step": 309
},
{
"epoch": 0.1578612348822406,
"grad_norm": 18.86087989807129,
"learning_rate": 7.900284547855991e-05,
"loss": 0.7603,
"step": 310
},
{
"epoch": 0.15837046467218333,
"grad_norm": 15.366621971130371,
"learning_rate": 7.887345391748533e-05,
"loss": 0.2624,
"step": 311
},
{
"epoch": 0.15887969446212605,
"grad_norm": 19.380908966064453,
"learning_rate": 7.874377160105036e-05,
"loss": 1.0304,
"step": 312
},
{
"epoch": 0.15938892425206874,
"grad_norm": 13.959190368652344,
"learning_rate": 7.861379983515449e-05,
"loss": 0.7812,
"step": 313
},
{
"epoch": 0.15989815404201146,
"grad_norm": 3.1605825424194336,
"learning_rate": 7.848353992861195e-05,
"loss": 0.0522,
"step": 314
},
{
"epoch": 0.16040738383195416,
"grad_norm": 3.092629909515381,
"learning_rate": 7.835299319313853e-05,
"loss": 0.1617,
"step": 315
},
{
"epoch": 0.16091661362189688,
"grad_norm": 3.086357355117798,
"learning_rate": 7.822216094333847e-05,
"loss": 0.2058,
"step": 316
},
{
"epoch": 0.1614258434118396,
"grad_norm": 3.3937745094299316,
"learning_rate": 7.809104449669101e-05,
"loss": 0.1046,
"step": 317
},
{
"epoch": 0.1619350732017823,
"grad_norm": 1.3770623207092285,
"learning_rate": 7.795964517353735e-05,
"loss": 0.1156,
"step": 318
},
{
"epoch": 0.16244430299172502,
"grad_norm": 1.293850302696228,
"learning_rate": 7.78279642970672e-05,
"loss": 0.0919,
"step": 319
},
{
"epoch": 0.16295353278166771,
"grad_norm": 4.897433757781982,
"learning_rate": 7.769600319330552e-05,
"loss": 0.1203,
"step": 320
},
{
"epoch": 0.16346276257161044,
"grad_norm": 0.744999885559082,
"learning_rate": 7.756376319109917e-05,
"loss": 0.0906,
"step": 321
},
{
"epoch": 0.16397199236155316,
"grad_norm": 2.635500192642212,
"learning_rate": 7.74312456221035e-05,
"loss": 0.1324,
"step": 322
},
{
"epoch": 0.16448122215149585,
"grad_norm": 3.7716286182403564,
"learning_rate": 7.729845182076895e-05,
"loss": 0.09,
"step": 323
},
{
"epoch": 0.16499045194143858,
"grad_norm": 1.3198482990264893,
"learning_rate": 7.716538312432766e-05,
"loss": 0.0999,
"step": 324
},
{
"epoch": 0.1654996817313813,
"grad_norm": 6.107431888580322,
"learning_rate": 7.703204087277988e-05,
"loss": 0.0661,
"step": 325
},
{
"epoch": 0.166008911521324,
"grad_norm": 15.861464500427246,
"learning_rate": 7.689842640888063e-05,
"loss": 0.1399,
"step": 326
},
{
"epoch": 0.16651814131126672,
"grad_norm": 8.088520050048828,
"learning_rate": 7.676454107812607e-05,
"loss": 0.2258,
"step": 327
},
{
"epoch": 0.1670273711012094,
"grad_norm": 10.10229206085205,
"learning_rate": 7.663038622873999e-05,
"loss": 0.206,
"step": 328
},
{
"epoch": 0.16753660089115213,
"grad_norm": 9.973199844360352,
"learning_rate": 7.649596321166024e-05,
"loss": 0.1449,
"step": 329
},
{
"epoch": 0.16804583068109485,
"grad_norm": 5.558497905731201,
"learning_rate": 7.636127338052512e-05,
"loss": 0.084,
"step": 330
},
{
"epoch": 0.16855506047103755,
"grad_norm": 6.0195722579956055,
"learning_rate": 7.622631809165973e-05,
"loss": 0.2007,
"step": 331
},
{
"epoch": 0.16906429026098027,
"grad_norm": 9.740483283996582,
"learning_rate": 7.60910987040623e-05,
"loss": 0.1116,
"step": 332
},
{
"epoch": 0.16957352005092297,
"grad_norm": 4.960144519805908,
"learning_rate": 7.595561657939061e-05,
"loss": 0.0648,
"step": 333
},
{
"epoch": 0.1700827498408657,
"grad_norm": 8.019902229309082,
"learning_rate": 7.58198730819481e-05,
"loss": 0.1621,
"step": 334
},
{
"epoch": 0.1705919796308084,
"grad_norm": 5.274026870727539,
"learning_rate": 7.568386957867033e-05,
"loss": 0.1088,
"step": 335
},
{
"epoch": 0.1711012094207511,
"grad_norm": 8.776835441589355,
"learning_rate": 7.554760743911103e-05,
"loss": 0.071,
"step": 336
},
{
"epoch": 0.17161043921069383,
"grad_norm": 4.958322525024414,
"learning_rate": 7.541108803542846e-05,
"loss": 0.1097,
"step": 337
},
{
"epoch": 0.17211966900063655,
"grad_norm": 8.615314483642578,
"learning_rate": 7.52743127423715e-05,
"loss": 0.114,
"step": 338
},
{
"epoch": 0.17262889879057924,
"grad_norm": 8.077454566955566,
"learning_rate": 7.51372829372658e-05,
"loss": 0.0687,
"step": 339
},
{
"epoch": 0.17313812858052197,
"grad_norm": 9.958930969238281,
"learning_rate": 7.500000000000001e-05,
"loss": 0.0614,
"step": 340
},
{
"epoch": 0.17364735837046466,
"grad_norm": 7.942490100860596,
"learning_rate": 7.486246531301177e-05,
"loss": 0.0948,
"step": 341
},
{
"epoch": 0.17415658816040738,
"grad_norm": 3.127333164215088,
"learning_rate": 7.472468026127385e-05,
"loss": 0.0392,
"step": 342
},
{
"epoch": 0.1746658179503501,
"grad_norm": 8.586334228515625,
"learning_rate": 7.45866462322802e-05,
"loss": 0.1213,
"step": 343
},
{
"epoch": 0.1751750477402928,
"grad_norm": 13.56935977935791,
"learning_rate": 7.444836461603195e-05,
"loss": 0.2354,
"step": 344
},
{
"epoch": 0.17568427753023552,
"grad_norm": 4.473952770233154,
"learning_rate": 7.430983680502344e-05,
"loss": 0.0905,
"step": 345
},
{
"epoch": 0.17619350732017824,
"grad_norm": 4.765993118286133,
"learning_rate": 7.417106419422819e-05,
"loss": 0.0345,
"step": 346
},
{
"epoch": 0.17670273711012094,
"grad_norm": 17.153976440429688,
"learning_rate": 7.403204818108487e-05,
"loss": 0.1301,
"step": 347
},
{
"epoch": 0.17721196690006366,
"grad_norm": 6.228205680847168,
"learning_rate": 7.389279016548316e-05,
"loss": 0.0662,
"step": 348
},
{
"epoch": 0.17772119669000636,
"grad_norm": 6.544225692749023,
"learning_rate": 7.375329154974975e-05,
"loss": 0.0388,
"step": 349
},
{
"epoch": 0.17823042647994908,
"grad_norm": 9.20956039428711,
"learning_rate": 7.361355373863414e-05,
"loss": 0.0441,
"step": 350
},
{
"epoch": 0.1787396562698918,
"grad_norm": 7.032777786254883,
"learning_rate": 7.347357813929454e-05,
"loss": 0.889,
"step": 351
},
{
"epoch": 0.1792488860598345,
"grad_norm": 6.679413795471191,
"learning_rate": 7.333336616128369e-05,
"loss": 0.8573,
"step": 352
},
{
"epoch": 0.17975811584977722,
"grad_norm": 30.212705612182617,
"learning_rate": 7.319291921653464e-05,
"loss": 0.5632,
"step": 353
},
{
"epoch": 0.1802673456397199,
"grad_norm": 11.796157836914062,
"learning_rate": 7.305223871934657e-05,
"loss": 0.9938,
"step": 354
},
{
"epoch": 0.18077657542966263,
"grad_norm": 10.97513484954834,
"learning_rate": 7.291132608637052e-05,
"loss": 0.6269,
"step": 355
},
{
"epoch": 0.18128580521960536,
"grad_norm": 31.140769958496094,
"learning_rate": 7.277018273659517e-05,
"loss": 0.7136,
"step": 356
},
{
"epoch": 0.18179503500954805,
"grad_norm": 12.688136100769043,
"learning_rate": 7.262881009133242e-05,
"loss": 0.9942,
"step": 357
},
{
"epoch": 0.18230426479949077,
"grad_norm": 14.422432899475098,
"learning_rate": 7.24872095742033e-05,
"loss": 1.2091,
"step": 358
},
{
"epoch": 0.1828134945894335,
"grad_norm": 14.67697525024414,
"learning_rate": 7.23453826111234e-05,
"loss": 0.8391,
"step": 359
},
{
"epoch": 0.1833227243793762,
"grad_norm": 19.619991302490234,
"learning_rate": 7.220333063028872e-05,
"loss": 0.714,
"step": 360
},
{
"epoch": 0.1838319541693189,
"grad_norm": 23.350360870361328,
"learning_rate": 7.206105506216106e-05,
"loss": 1.0458,
"step": 361
},
{
"epoch": 0.1843411839592616,
"grad_norm": 14.576053619384766,
"learning_rate": 7.191855733945387e-05,
"loss": 0.7657,
"step": 362
},
{
"epoch": 0.18485041374920433,
"grad_norm": 5.723056793212891,
"learning_rate": 7.177583889711762e-05,
"loss": 0.1924,
"step": 363
},
{
"epoch": 0.18535964353914705,
"grad_norm": 4.669350624084473,
"learning_rate": 7.163290117232542e-05,
"loss": 0.1258,
"step": 364
},
{
"epoch": 0.18586887332908975,
"grad_norm": 3.4824283123016357,
"learning_rate": 7.148974560445859e-05,
"loss": 0.1544,
"step": 365
},
{
"epoch": 0.18637810311903247,
"grad_norm": 2.7098116874694824,
"learning_rate": 7.13463736350921e-05,
"loss": 0.1359,
"step": 366
},
{
"epoch": 0.18688733290897516,
"grad_norm": 9.567126274108887,
"learning_rate": 7.120278670798009e-05,
"loss": 0.1792,
"step": 367
},
{
"epoch": 0.18739656269891788,
"grad_norm": 15.874075889587402,
"learning_rate": 7.105898626904134e-05,
"loss": 0.2289,
"step": 368
},
{
"epoch": 0.1879057924888606,
"grad_norm": 15.76208782196045,
"learning_rate": 7.091497376634464e-05,
"loss": 0.2294,
"step": 369
},
{
"epoch": 0.1884150222788033,
"grad_norm": 10.5492525100708,
"learning_rate": 7.077075065009433e-05,
"loss": 0.1747,
"step": 370
},
{
"epoch": 0.18892425206874602,
"grad_norm": 4.195391654968262,
"learning_rate": 7.062631837261557e-05,
"loss": 0.1075,
"step": 371
},
{
"epoch": 0.18943348185868875,
"grad_norm": 2.2104382514953613,
"learning_rate": 7.048167838833977e-05,
"loss": 0.1676,
"step": 372
},
{
"epoch": 0.18994271164863144,
"grad_norm": 2.322936534881592,
"learning_rate": 7.033683215379002e-05,
"loss": 0.1456,
"step": 373
},
{
"epoch": 0.19045194143857416,
"grad_norm": 9.757192611694336,
"learning_rate": 7.019178112756624e-05,
"loss": 0.1133,
"step": 374
},
{
"epoch": 0.19096117122851686,
"grad_norm": 8.49422550201416,
"learning_rate": 7.004652677033068e-05,
"loss": 0.1131,
"step": 375
},
{
"epoch": 0.19147040101845958,
"grad_norm": 8.021090507507324,
"learning_rate": 6.990107054479312e-05,
"loss": 0.184,
"step": 376
},
{
"epoch": 0.1919796308084023,
"grad_norm": 10.471813201904297,
"learning_rate": 6.97554139156961e-05,
"loss": 0.1842,
"step": 377
},
{
"epoch": 0.192488860598345,
"grad_norm": 7.779923915863037,
"learning_rate": 6.960955834980028e-05,
"loss": 0.1842,
"step": 378
},
{
"epoch": 0.19299809038828772,
"grad_norm": 5.776662826538086,
"learning_rate": 6.946350531586959e-05,
"loss": 0.0991,
"step": 379
},
{
"epoch": 0.1935073201782304,
"grad_norm": 6.379248142242432,
"learning_rate": 6.931725628465643e-05,
"loss": 0.1489,
"step": 380
},
{
"epoch": 0.19401654996817314,
"grad_norm": 7.362051010131836,
"learning_rate": 6.917081272888697e-05,
"loss": 0.2167,
"step": 381
},
{
"epoch": 0.19452577975811586,
"grad_norm": 4.724095821380615,
"learning_rate": 6.902417612324615e-05,
"loss": 0.1161,
"step": 382
},
{
"epoch": 0.19503500954805855,
"grad_norm": 5.9344563484191895,
"learning_rate": 6.8877347944363e-05,
"loss": 0.1312,
"step": 383
},
{
"epoch": 0.19554423933800127,
"grad_norm": 7.308444976806641,
"learning_rate": 6.873032967079561e-05,
"loss": 0.0874,
"step": 384
},
{
"epoch": 0.196053469127944,
"grad_norm": 4.611720561981201,
"learning_rate": 6.858312278301637e-05,
"loss": 0.1284,
"step": 385
},
{
"epoch": 0.1965626989178867,
"grad_norm": 4.12031364440918,
"learning_rate": 6.843572876339705e-05,
"loss": 0.0646,
"step": 386
},
{
"epoch": 0.1970719287078294,
"grad_norm": 9.122203826904297,
"learning_rate": 6.828814909619373e-05,
"loss": 0.1022,
"step": 387
},
{
"epoch": 0.1975811584977721,
"grad_norm": 11.14821720123291,
"learning_rate": 6.814038526753205e-05,
"loss": 0.1363,
"step": 388
},
{
"epoch": 0.19809038828771483,
"grad_norm": 9.928780555725098,
"learning_rate": 6.799243876539212e-05,
"loss": 0.0705,
"step": 389
},
{
"epoch": 0.19859961807765755,
"grad_norm": 3.8702805042266846,
"learning_rate": 6.784431107959359e-05,
"loss": 0.06,
"step": 390
},
{
"epoch": 0.19910884786760025,
"grad_norm": 8.202292442321777,
"learning_rate": 6.769600370178059e-05,
"loss": 0.1628,
"step": 391
},
{
"epoch": 0.19961807765754297,
"grad_norm": 7.946099758148193,
"learning_rate": 6.754751812540679e-05,
"loss": 0.0789,
"step": 392
},
{
"epoch": 0.2001273074474857,
"grad_norm": 7.631869316101074,
"learning_rate": 6.739885584572026e-05,
"loss": 0.0329,
"step": 393
},
{
"epoch": 0.2006365372374284,
"grad_norm": 6.246660232543945,
"learning_rate": 6.725001835974853e-05,
"loss": 0.1132,
"step": 394
},
{
"epoch": 0.2011457670273711,
"grad_norm": 4.672296524047852,
"learning_rate": 6.710100716628344e-05,
"loss": 0.0243,
"step": 395
},
{
"epoch": 0.2016549968173138,
"grad_norm": 3.7364211082458496,
"learning_rate": 6.695182376586603e-05,
"loss": 0.0231,
"step": 396
},
{
"epoch": 0.20216422660725653,
"grad_norm": 14.668068885803223,
"learning_rate": 6.680246966077151e-05,
"loss": 0.0503,
"step": 397
},
{
"epoch": 0.20267345639719925,
"grad_norm": 10.774435043334961,
"learning_rate": 6.665294635499404e-05,
"loss": 0.0425,
"step": 398
},
{
"epoch": 0.20318268618714194,
"grad_norm": 5.576310634613037,
"learning_rate": 6.650325535423167e-05,
"loss": 0.0264,
"step": 399
},
{
"epoch": 0.20369191597708466,
"grad_norm": 6.781991958618164,
"learning_rate": 6.635339816587109e-05,
"loss": 0.0692,
"step": 400
},
{
"epoch": 0.20369191597708466,
"eval_loss": 0.2951599359512329,
"eval_runtime": 378.4652,
"eval_samples_per_second": 8.741,
"eval_steps_per_second": 2.185,
"step": 400
},
{
"epoch": 0.20420114576702736,
"grad_norm": 5.35024881362915,
"learning_rate": 6.620337629897254e-05,
"loss": 0.8258,
"step": 401
},
{
"epoch": 0.20471037555697008,
"grad_norm": 6.47793436050415,
"learning_rate": 6.605319126425454e-05,
"loss": 0.722,
"step": 402
},
{
"epoch": 0.2052196053469128,
"grad_norm": 4.193264961242676,
"learning_rate": 6.590284457407876e-05,
"loss": 0.2432,
"step": 403
},
{
"epoch": 0.2057288351368555,
"grad_norm": 8.675887107849121,
"learning_rate": 6.575233774243465e-05,
"loss": 0.7769,
"step": 404
},
{
"epoch": 0.20623806492679822,
"grad_norm": 11.540266036987305,
"learning_rate": 6.560167228492436e-05,
"loss": 0.7733,
"step": 405
},
{
"epoch": 0.20674729471674094,
"grad_norm": 12.353882789611816,
"learning_rate": 6.545084971874738e-05,
"loss": 0.6889,
"step": 406
},
{
"epoch": 0.20725652450668364,
"grad_norm": 7.550869941711426,
"learning_rate": 6.529987156268526e-05,
"loss": 0.5014,
"step": 407
},
{
"epoch": 0.20776575429662636,
"grad_norm": 38.89470672607422,
"learning_rate": 6.514873933708638e-05,
"loss": 1.478,
"step": 408
},
{
"epoch": 0.20827498408656905,
"grad_norm": 12.111885070800781,
"learning_rate": 6.499745456385054e-05,
"loss": 1.0569,
"step": 409
},
{
"epoch": 0.20878421387651178,
"grad_norm": 16.79547691345215,
"learning_rate": 6.484601876641375e-05,
"loss": 0.9007,
"step": 410
},
{
"epoch": 0.2092934436664545,
"grad_norm": 7.7194108963012695,
"learning_rate": 6.46944334697328e-05,
"loss": 0.5792,
"step": 411
},
{
"epoch": 0.2098026734563972,
"grad_norm": 10.509634017944336,
"learning_rate": 6.454270020026995e-05,
"loss": 1.0819,
"step": 412
},
{
"epoch": 0.21031190324633992,
"grad_norm": 8.156822204589844,
"learning_rate": 6.439082048597755e-05,
"loss": 0.5238,
"step": 413
},
{
"epoch": 0.2108211330362826,
"grad_norm": 6.081987380981445,
"learning_rate": 6.423879585628261e-05,
"loss": 0.0934,
"step": 414
},
{
"epoch": 0.21133036282622533,
"grad_norm": 3.127657413482666,
"learning_rate": 6.408662784207149e-05,
"loss": 0.1243,
"step": 415
},
{
"epoch": 0.21183959261616805,
"grad_norm": 2.4891467094421387,
"learning_rate": 6.39343179756744e-05,
"loss": 0.0989,
"step": 416
},
{
"epoch": 0.21234882240611075,
"grad_norm": 3.107302665710449,
"learning_rate": 6.378186779084995e-05,
"loss": 0.1264,
"step": 417
},
{
"epoch": 0.21285805219605347,
"grad_norm": 4.113406181335449,
"learning_rate": 6.36292788227699e-05,
"loss": 0.1366,
"step": 418
},
{
"epoch": 0.2133672819859962,
"grad_norm": 3.0692574977874756,
"learning_rate": 6.34765526080034e-05,
"loss": 0.1656,
"step": 419
},
{
"epoch": 0.2138765117759389,
"grad_norm": 2.2917490005493164,
"learning_rate": 6.332369068450174e-05,
"loss": 0.0729,
"step": 420
},
{
"epoch": 0.2143857415658816,
"grad_norm": 3.4825942516326904,
"learning_rate": 6.317069459158284e-05,
"loss": 0.1348,
"step": 421
},
{
"epoch": 0.2148949713558243,
"grad_norm": 7.805583477020264,
"learning_rate": 6.30175658699156e-05,
"loss": 0.061,
"step": 422
},
{
"epoch": 0.21540420114576703,
"grad_norm": 4.299949645996094,
"learning_rate": 6.286430606150459e-05,
"loss": 0.1709,
"step": 423
},
{
"epoch": 0.21591343093570975,
"grad_norm": 5.390031814575195,
"learning_rate": 6.271091670967436e-05,
"loss": 0.1293,
"step": 424
},
{
"epoch": 0.21642266072565244,
"grad_norm": 10.182729721069336,
"learning_rate": 6.255739935905396e-05,
"loss": 0.2065,
"step": 425
},
{
"epoch": 0.21693189051559517,
"grad_norm": 8.289228439331055,
"learning_rate": 6.240375555556145e-05,
"loss": 0.17,
"step": 426
},
{
"epoch": 0.21744112030553786,
"grad_norm": 9.04494571685791,
"learning_rate": 6.22499868463882e-05,
"loss": 0.2247,
"step": 427
},
{
"epoch": 0.21795035009548058,
"grad_norm": 7.830061435699463,
"learning_rate": 6.209609477998338e-05,
"loss": 0.1934,
"step": 428
},
{
"epoch": 0.2184595798854233,
"grad_norm": 11.166521072387695,
"learning_rate": 6.194208090603844e-05,
"loss": 0.1904,
"step": 429
},
{
"epoch": 0.218968809675366,
"grad_norm": 5.363670825958252,
"learning_rate": 6.178794677547137e-05,
"loss": 0.1516,
"step": 430
},
{
"epoch": 0.21947803946530872,
"grad_norm": 4.773486614227295,
"learning_rate": 6.163369394041111e-05,
"loss": 0.1323,
"step": 431
},
{
"epoch": 0.21998726925525144,
"grad_norm": 4.628374099731445,
"learning_rate": 6.147932395418205e-05,
"loss": 0.1193,
"step": 432
},
{
"epoch": 0.22049649904519414,
"grad_norm": 4.363916397094727,
"learning_rate": 6.132483837128823e-05,
"loss": 0.134,
"step": 433
},
{
"epoch": 0.22100572883513686,
"grad_norm": 5.665075302124023,
"learning_rate": 6.117023874739772e-05,
"loss": 0.16,
"step": 434
},
{
"epoch": 0.22151495862507956,
"grad_norm": 6.854954242706299,
"learning_rate": 6.1015526639327035e-05,
"loss": 0.169,
"step": 435
},
{
"epoch": 0.22202418841502228,
"grad_norm": 7.282088756561279,
"learning_rate": 6.0860703605025395e-05,
"loss": 0.139,
"step": 436
},
{
"epoch": 0.222533418204965,
"grad_norm": 9.19260025024414,
"learning_rate": 6.0705771203559024e-05,
"loss": 0.1304,
"step": 437
},
{
"epoch": 0.2230426479949077,
"grad_norm": 6.518429279327393,
"learning_rate": 6.05507309950955e-05,
"loss": 0.1446,
"step": 438
},
{
"epoch": 0.22355187778485042,
"grad_norm": 4.916598320007324,
"learning_rate": 6.0395584540887963e-05,
"loss": 0.025,
"step": 439
},
{
"epoch": 0.22406110757479314,
"grad_norm": 4.131253719329834,
"learning_rate": 6.024033340325954e-05,
"loss": 0.0445,
"step": 440
},
{
"epoch": 0.22457033736473583,
"grad_norm": 5.542179107666016,
"learning_rate": 6.008497914558744e-05,
"loss": 0.0124,
"step": 441
},
{
"epoch": 0.22507956715467856,
"grad_norm": 9.51887035369873,
"learning_rate": 5.992952333228728e-05,
"loss": 0.1175,
"step": 442
},
{
"epoch": 0.22558879694462125,
"grad_norm": 1.3054120540618896,
"learning_rate": 5.9773967528797414e-05,
"loss": 0.0157,
"step": 443
},
{
"epoch": 0.22609802673456397,
"grad_norm": 3.81061053276062,
"learning_rate": 5.9618313301563055e-05,
"loss": 0.0323,
"step": 444
},
{
"epoch": 0.2266072565245067,
"grad_norm": 6.797232151031494,
"learning_rate": 5.946256221802051e-05,
"loss": 0.0509,
"step": 445
},
{
"epoch": 0.2271164863144494,
"grad_norm": 12.037452697753906,
"learning_rate": 5.9306715846581506e-05,
"loss": 0.0986,
"step": 446
},
{
"epoch": 0.2276257161043921,
"grad_norm": 8.045632362365723,
"learning_rate": 5.915077575661723e-05,
"loss": 0.1348,
"step": 447
},
{
"epoch": 0.2281349458943348,
"grad_norm": 6.889299392700195,
"learning_rate": 5.8994743518442694e-05,
"loss": 0.0482,
"step": 448
},
{
"epoch": 0.22864417568427753,
"grad_norm": 7.197052955627441,
"learning_rate": 5.8838620703300784e-05,
"loss": 0.098,
"step": 449
},
{
"epoch": 0.22915340547422025,
"grad_norm": 4.620372295379639,
"learning_rate": 5.868240888334653e-05,
"loss": 0.0217,
"step": 450
},
{
"epoch": 0.22966263526416295,
"grad_norm": 8.069796562194824,
"learning_rate": 5.85261096316312e-05,
"loss": 0.7489,
"step": 451
},
{
"epoch": 0.23017186505410567,
"grad_norm": 6.844252109527588,
"learning_rate": 5.836972452208654e-05,
"loss": 0.591,
"step": 452
},
{
"epoch": 0.2306810948440484,
"grad_norm": 4.255478858947754,
"learning_rate": 5.821325512950886e-05,
"loss": 0.201,
"step": 453
},
{
"epoch": 0.23119032463399108,
"grad_norm": 13.691847801208496,
"learning_rate": 5.805670302954321e-05,
"loss": 0.9461,
"step": 454
},
{
"epoch": 0.2316995544239338,
"grad_norm": 13.480101585388184,
"learning_rate": 5.79000697986675e-05,
"loss": 0.8203,
"step": 455
},
{
"epoch": 0.2322087842138765,
"grad_norm": 10.721153259277344,
"learning_rate": 5.7743357014176624e-05,
"loss": 0.6616,
"step": 456
},
{
"epoch": 0.23271801400381922,
"grad_norm": 13.593427658081055,
"learning_rate": 5.7586566254166583e-05,
"loss": 1.0115,
"step": 457
},
{
"epoch": 0.23322724379376195,
"grad_norm": 16.89806365966797,
"learning_rate": 5.7429699097518585e-05,
"loss": 1.2747,
"step": 458
},
{
"epoch": 0.23373647358370464,
"grad_norm": 9.398344993591309,
"learning_rate": 5.7272757123883184e-05,
"loss": 0.8052,
"step": 459
},
{
"epoch": 0.23424570337364736,
"grad_norm": 9.867512702941895,
"learning_rate": 5.7115741913664264e-05,
"loss": 0.5866,
"step": 460
},
{
"epoch": 0.23475493316359006,
"grad_norm": 11.531373023986816,
"learning_rate": 5.695865504800327e-05,
"loss": 0.7859,
"step": 461
},
{
"epoch": 0.23526416295353278,
"grad_norm": 14.971497535705566,
"learning_rate": 5.680149810876322e-05,
"loss": 0.8722,
"step": 462
},
{
"epoch": 0.2357733927434755,
"grad_norm": 6.004801273345947,
"learning_rate": 5.664427267851271e-05,
"loss": 0.3771,
"step": 463
},
{
"epoch": 0.2362826225334182,
"grad_norm": 1.8728851079940796,
"learning_rate": 5.6486980340510086e-05,
"loss": 0.1001,
"step": 464
},
{
"epoch": 0.23679185232336092,
"grad_norm": 5.338820934295654,
"learning_rate": 5.6329622678687463e-05,
"loss": 0.1665,
"step": 465
},
{
"epoch": 0.23730108211330364,
"grad_norm": 1.342680811882019,
"learning_rate": 5.617220127763474e-05,
"loss": 0.1127,
"step": 466
},
{
"epoch": 0.23781031190324634,
"grad_norm": 0.8968675136566162,
"learning_rate": 5.601471772258368e-05,
"loss": 0.1042,
"step": 467
},
{
"epoch": 0.23831954169318906,
"grad_norm": 1.3605763912200928,
"learning_rate": 5.585717359939192e-05,
"loss": 0.0833,
"step": 468
},
{
"epoch": 0.23882877148313175,
"grad_norm": 1.4521266222000122,
"learning_rate": 5.569957049452703e-05,
"loss": 0.0471,
"step": 469
},
{
"epoch": 0.23933800127307447,
"grad_norm": 0.7509562969207764,
"learning_rate": 5.5541909995050554e-05,
"loss": 0.0411,
"step": 470
},
{
"epoch": 0.2398472310630172,
"grad_norm": 4.2879743576049805,
"learning_rate": 5.538419368860196e-05,
"loss": 0.1356,
"step": 471
},
{
"epoch": 0.2403564608529599,
"grad_norm": 2.3868203163146973,
"learning_rate": 5.522642316338268e-05,
"loss": 0.0857,
"step": 472
},
{
"epoch": 0.2408656906429026,
"grad_norm": 5.234532833099365,
"learning_rate": 5.506860000814017e-05,
"loss": 0.2399,
"step": 473
},
{
"epoch": 0.2413749204328453,
"grad_norm": 5.502321243286133,
"learning_rate": 5.4910725812151864e-05,
"loss": 0.1112,
"step": 474
},
{
"epoch": 0.24188415022278803,
"grad_norm": 6.480447292327881,
"learning_rate": 5.475280216520913e-05,
"loss": 0.1651,
"step": 475
},
{
"epoch": 0.24239338001273075,
"grad_norm": 7.716391086578369,
"learning_rate": 5.4594830657601384e-05,
"loss": 0.18,
"step": 476
},
{
"epoch": 0.24290260980267345,
"grad_norm": 6.902336120605469,
"learning_rate": 5.443681288009991e-05,
"loss": 0.184,
"step": 477
},
{
"epoch": 0.24341183959261617,
"grad_norm": 9.32155704498291,
"learning_rate": 5.427875042394199e-05,
"loss": 0.238,
"step": 478
},
{
"epoch": 0.2439210693825589,
"grad_norm": 7.200125217437744,
"learning_rate": 5.412064488081482e-05,
"loss": 0.1355,
"step": 479
},
{
"epoch": 0.24443029917250159,
"grad_norm": 7.890231132507324,
"learning_rate": 5.396249784283942e-05,
"loss": 0.2136,
"step": 480
},
{
"epoch": 0.2449395289624443,
"grad_norm": 3.1145589351654053,
"learning_rate": 5.3804310902554754e-05,
"loss": 0.1003,
"step": 481
},
{
"epoch": 0.245448758752387,
"grad_norm": 7.380165100097656,
"learning_rate": 5.364608565290155e-05,
"loss": 0.0856,
"step": 482
},
{
"epoch": 0.24595798854232973,
"grad_norm": 7.165464878082275,
"learning_rate": 5.348782368720626e-05,
"loss": 0.1174,
"step": 483
},
{
"epoch": 0.24646721833227245,
"grad_norm": 6.7587175369262695,
"learning_rate": 5.3329526599165204e-05,
"loss": 0.1159,
"step": 484
},
{
"epoch": 0.24697644812221514,
"grad_norm": 6.113307952880859,
"learning_rate": 5.317119598282823e-05,
"loss": 0.1381,
"step": 485
},
{
"epoch": 0.24748567791215786,
"grad_norm": 4.088643550872803,
"learning_rate": 5.301283343258293e-05,
"loss": 0.0852,
"step": 486
},
{
"epoch": 0.2479949077021006,
"grad_norm": 6.351160526275635,
"learning_rate": 5.2854440543138406e-05,
"loss": 0.1145,
"step": 487
},
{
"epoch": 0.24850413749204328,
"grad_norm": 6.1809539794921875,
"learning_rate": 5.2696018909509306e-05,
"loss": 0.1721,
"step": 488
},
{
"epoch": 0.249013367281986,
"grad_norm": 6.563344478607178,
"learning_rate": 5.253757012699972e-05,
"loss": 0.19,
"step": 489
},
{
"epoch": 0.2495225970719287,
"grad_norm": 9.084705352783203,
"learning_rate": 5.2379095791187124e-05,
"loss": 0.0925,
"step": 490
},
{
"epoch": 0.2500318268618714,
"grad_norm": 2.910612106323242,
"learning_rate": 5.2220597497906307e-05,
"loss": 0.0231,
"step": 491
},
{
"epoch": 0.25054105665181414,
"grad_norm": 7.0364556312561035,
"learning_rate": 5.2062076843233366e-05,
"loss": 0.0679,
"step": 492
},
{
"epoch": 0.25105028644175686,
"grad_norm": 2.0886571407318115,
"learning_rate": 5.1903535423469505e-05,
"loss": 0.0088,
"step": 493
},
{
"epoch": 0.25155951623169953,
"grad_norm": 6.553752422332764,
"learning_rate": 5.174497483512506e-05,
"loss": 0.0633,
"step": 494
},
{
"epoch": 0.25206874602164225,
"grad_norm": 6.098299503326416,
"learning_rate": 5.158639667490339e-05,
"loss": 0.0672,
"step": 495
},
{
"epoch": 0.252577975811585,
"grad_norm": 2.6952426433563232,
"learning_rate": 5.142780253968481e-05,
"loss": 0.0067,
"step": 496
},
{
"epoch": 0.2530872056015277,
"grad_norm": 4.000742435455322,
"learning_rate": 5.126919402651052e-05,
"loss": 0.0326,
"step": 497
},
{
"epoch": 0.2535964353914704,
"grad_norm": 3.690657615661621,
"learning_rate": 5.1110572732566475e-05,
"loss": 0.0185,
"step": 498
},
{
"epoch": 0.2541056651814131,
"grad_norm": 3.398660898208618,
"learning_rate": 5.095194025516733e-05,
"loss": 0.0101,
"step": 499
},
{
"epoch": 0.2546148949713558,
"grad_norm": 4.581557273864746,
"learning_rate": 5.0793298191740404e-05,
"loss": 0.0981,
"step": 500
},
{
"epoch": 0.25512412476129853,
"grad_norm": 7.636013984680176,
"learning_rate": 5.063464813980948e-05,
"loss": 0.9549,
"step": 501
},
{
"epoch": 0.25563335455124125,
"grad_norm": 7.066486358642578,
"learning_rate": 5.047599169697884e-05,
"loss": 0.728,
"step": 502
},
{
"epoch": 0.256142584341184,
"grad_norm": 7.615071773529053,
"learning_rate": 5.03173304609171e-05,
"loss": 0.2771,
"step": 503
},
{
"epoch": 0.25665181413112664,
"grad_norm": 12.108363151550293,
"learning_rate": 5.015866602934112e-05,
"loss": 0.8318,
"step": 504
},
{
"epoch": 0.25716104392106937,
"grad_norm": 11.92260456085205,
"learning_rate": 5e-05,
"loss": 0.6557,
"step": 505
},
{
"epoch": 0.2576702737110121,
"grad_norm": 10.278399467468262,
"learning_rate": 4.984133397065889e-05,
"loss": 0.4904,
"step": 506
},
{
"epoch": 0.2581795035009548,
"grad_norm": 26.604215621948242,
"learning_rate": 4.968266953908292e-05,
"loss": 1.0672,
"step": 507
},
{
"epoch": 0.25868873329089753,
"grad_norm": 16.138303756713867,
"learning_rate": 4.952400830302117e-05,
"loss": 1.4055,
"step": 508
},
{
"epoch": 0.25919796308084025,
"grad_norm": 18.837907791137695,
"learning_rate": 4.9365351860190526e-05,
"loss": 0.9641,
"step": 509
},
{
"epoch": 0.2597071928707829,
"grad_norm": 27.194965362548828,
"learning_rate": 4.92067018082596e-05,
"loss": 0.7279,
"step": 510
},
{
"epoch": 0.26021642266072564,
"grad_norm": 12.584843635559082,
"learning_rate": 4.9048059744832666e-05,
"loss": 0.8671,
"step": 511
},
{
"epoch": 0.26072565245066837,
"grad_norm": 11.650589942932129,
"learning_rate": 4.888942726743353e-05,
"loss": 1.0271,
"step": 512
},
{
"epoch": 0.2612348822406111,
"grad_norm": 1.3385618925094604,
"learning_rate": 4.8730805973489476e-05,
"loss": 0.0686,
"step": 513
},
{
"epoch": 0.2617441120305538,
"grad_norm": 2.49957013130188,
"learning_rate": 4.85721974603152e-05,
"loss": 0.1366,
"step": 514
},
{
"epoch": 0.2622533418204965,
"grad_norm": 3.0434892177581787,
"learning_rate": 4.841360332509663e-05,
"loss": 0.1553,
"step": 515
},
{
"epoch": 0.2627625716104392,
"grad_norm": 3.903759717941284,
"learning_rate": 4.825502516487497e-05,
"loss": 0.1314,
"step": 516
},
{
"epoch": 0.2632718014003819,
"grad_norm": 1.2716883420944214,
"learning_rate": 4.8096464576530507e-05,
"loss": 0.0685,
"step": 517
},
{
"epoch": 0.26378103119032464,
"grad_norm": 1.2184836864471436,
"learning_rate": 4.7937923156766646e-05,
"loss": 0.1099,
"step": 518
},
{
"epoch": 0.26429026098026737,
"grad_norm": 5.961424350738525,
"learning_rate": 4.77794025020937e-05,
"loss": 0.1555,
"step": 519
},
{
"epoch": 0.26479949077021003,
"grad_norm": 4.472005844116211,
"learning_rate": 4.762090420881289e-05,
"loss": 0.1798,
"step": 520
},
{
"epoch": 0.26530872056015276,
"grad_norm": 1.6707897186279297,
"learning_rate": 4.7462429873000295e-05,
"loss": 0.0802,
"step": 521
},
{
"epoch": 0.2658179503500955,
"grad_norm": 1.6405971050262451,
"learning_rate": 4.730398109049071e-05,
"loss": 0.1076,
"step": 522
},
{
"epoch": 0.2663271801400382,
"grad_norm": 5.0963053703308105,
"learning_rate": 4.71455594568616e-05,
"loss": 0.137,
"step": 523
},
{
"epoch": 0.2668364099299809,
"grad_norm": 7.195677280426025,
"learning_rate": 4.698716656741708e-05,
"loss": 0.0832,
"step": 524
},
{
"epoch": 0.2673456397199236,
"grad_norm": 6.091291427612305,
"learning_rate": 4.6828804017171776e-05,
"loss": 0.0695,
"step": 525
},
{
"epoch": 0.2678548695098663,
"grad_norm": 5.164731025695801,
"learning_rate": 4.667047340083481e-05,
"loss": 0.1888,
"step": 526
},
{
"epoch": 0.26836409929980903,
"grad_norm": 6.838079929351807,
"learning_rate": 4.6512176312793736e-05,
"loss": 0.1742,
"step": 527
},
{
"epoch": 0.26887332908975176,
"grad_norm": 7.102747440338135,
"learning_rate": 4.635391434709847e-05,
"loss": 0.1379,
"step": 528
},
{
"epoch": 0.2693825588796945,
"grad_norm": 4.239371299743652,
"learning_rate": 4.619568909744524e-05,
"loss": 0.0998,
"step": 529
},
{
"epoch": 0.2698917886696372,
"grad_norm": 7.476315498352051,
"learning_rate": 4.603750215716057e-05,
"loss": 0.1463,
"step": 530
},
{
"epoch": 0.27040101845957987,
"grad_norm": 8.441755294799805,
"learning_rate": 4.587935511918521e-05,
"loss": 0.1281,
"step": 531
},
{
"epoch": 0.2709102482495226,
"grad_norm": 5.9278564453125,
"learning_rate": 4.5721249576058027e-05,
"loss": 0.0733,
"step": 532
},
{
"epoch": 0.2714194780394653,
"grad_norm": 3.918498992919922,
"learning_rate": 4.5563187119900104e-05,
"loss": 0.0616,
"step": 533
},
{
"epoch": 0.27192870782940803,
"grad_norm": 9.709593772888184,
"learning_rate": 4.5405169342398634e-05,
"loss": 0.1179,
"step": 534
},
{
"epoch": 0.27243793761935076,
"grad_norm": 5.810922145843506,
"learning_rate": 4.5247197834790876e-05,
"loss": 0.1212,
"step": 535
},
{
"epoch": 0.2729471674092934,
"grad_norm": 4.741395950317383,
"learning_rate": 4.508927418784815e-05,
"loss": 0.0867,
"step": 536
},
{
"epoch": 0.27345639719923615,
"grad_norm": 3.2584261894226074,
"learning_rate": 4.493139999185983e-05,
"loss": 0.0676,
"step": 537
},
{
"epoch": 0.27396562698917887,
"grad_norm": 3.621077060699463,
"learning_rate": 4.477357683661734e-05,
"loss": 0.0495,
"step": 538
},
{
"epoch": 0.2744748567791216,
"grad_norm": 3.1728439331054688,
"learning_rate": 4.461580631139805e-05,
"loss": 0.0396,
"step": 539
},
{
"epoch": 0.2749840865690643,
"grad_norm": 3.342538833618164,
"learning_rate": 4.445809000494946e-05,
"loss": 0.0535,
"step": 540
},
{
"epoch": 0.275493316359007,
"grad_norm": 9.337960243225098,
"learning_rate": 4.4300429505472976e-05,
"loss": 0.0665,
"step": 541
},
{
"epoch": 0.2760025461489497,
"grad_norm": 6.381386756896973,
"learning_rate": 4.4142826400608086e-05,
"loss": 0.0668,
"step": 542
},
{
"epoch": 0.2765117759388924,
"grad_norm": 2.0168566703796387,
"learning_rate": 4.398528227741633e-05,
"loss": 0.0073,
"step": 543
},
{
"epoch": 0.27702100572883515,
"grad_norm": 1.1732912063598633,
"learning_rate": 4.3827798722365264e-05,
"loss": 0.0031,
"step": 544
},
{
"epoch": 0.27753023551877787,
"grad_norm": 2.5314676761627197,
"learning_rate": 4.3670377321312535e-05,
"loss": 0.0079,
"step": 545
},
{
"epoch": 0.27803946530872053,
"grad_norm": 9.042901039123535,
"learning_rate": 4.351301965948991e-05,
"loss": 0.1532,
"step": 546
},
{
"epoch": 0.27854869509866326,
"grad_norm": 11.261495590209961,
"learning_rate": 4.33557273214873e-05,
"loss": 0.1224,
"step": 547
},
{
"epoch": 0.279057924888606,
"grad_norm": 2.12158465385437,
"learning_rate": 4.3198501891236804e-05,
"loss": 0.0167,
"step": 548
},
{
"epoch": 0.2795671546785487,
"grad_norm": 2.104827404022217,
"learning_rate": 4.3041344951996746e-05,
"loss": 0.0119,
"step": 549
},
{
"epoch": 0.2800763844684914,
"grad_norm": 7.128782749176025,
"learning_rate": 4.288425808633575e-05,
"loss": 0.1068,
"step": 550
},
{
"epoch": 0.2805856142584341,
"grad_norm": 4.171801567077637,
"learning_rate": 4.272724287611684e-05,
"loss": 0.7447,
"step": 551
},
{
"epoch": 0.2810948440483768,
"grad_norm": 5.680448532104492,
"learning_rate": 4.2570300902481426e-05,
"loss": 0.7541,
"step": 552
},
{
"epoch": 0.28160407383831954,
"grad_norm": 3.7267653942108154,
"learning_rate": 4.241343374583343e-05,
"loss": 0.2126,
"step": 553
},
{
"epoch": 0.28211330362826226,
"grad_norm": 12.336726188659668,
"learning_rate": 4.2256642985823395e-05,
"loss": 0.7713,
"step": 554
},
{
"epoch": 0.282622533418205,
"grad_norm": 14.465222358703613,
"learning_rate": 4.20999302013325e-05,
"loss": 0.8353,
"step": 555
},
{
"epoch": 0.2831317632081477,
"grad_norm": 15.680033683776855,
"learning_rate": 4.19432969704568e-05,
"loss": 0.7337,
"step": 556
},
{
"epoch": 0.28364099299809037,
"grad_norm": 19.08047866821289,
"learning_rate": 4.178674487049116e-05,
"loss": 0.6195,
"step": 557
},
{
"epoch": 0.2841502227880331,
"grad_norm": 15.031834602355957,
"learning_rate": 4.163027547791347e-05,
"loss": 1.3184,
"step": 558
},
{
"epoch": 0.2846594525779758,
"grad_norm": 15.743155479431152,
"learning_rate": 4.147389036836881e-05,
"loss": 1.1384,
"step": 559
},
{
"epoch": 0.28516868236791854,
"grad_norm": 8.661066055297852,
"learning_rate": 4.131759111665349e-05,
"loss": 0.6242,
"step": 560
},
{
"epoch": 0.28567791215786126,
"grad_norm": 13.872480392456055,
"learning_rate": 4.116137929669921e-05,
"loss": 0.8754,
"step": 561
},
{
"epoch": 0.2861871419478039,
"grad_norm": 10.3219575881958,
"learning_rate": 4.100525648155731e-05,
"loss": 0.6358,
"step": 562
},
{
"epoch": 0.28669637173774665,
"grad_norm": 11.022652626037598,
"learning_rate": 4.084922424338277e-05,
"loss": 0.2152,
"step": 563
},
{
"epoch": 0.28720560152768937,
"grad_norm": 9.58282470703125,
"learning_rate": 4.06932841534185e-05,
"loss": 0.1844,
"step": 564
},
{
"epoch": 0.2877148313176321,
"grad_norm": 6.9711503982543945,
"learning_rate": 4.0537437781979506e-05,
"loss": 0.1819,
"step": 565
},
{
"epoch": 0.2882240611075748,
"grad_norm": 2.8225958347320557,
"learning_rate": 4.038168669843697e-05,
"loss": 0.12,
"step": 566
},
{
"epoch": 0.2887332908975175,
"grad_norm": 3.3902416229248047,
"learning_rate": 4.0226032471202604e-05,
"loss": 0.1144,
"step": 567
},
{
"epoch": 0.2892425206874602,
"grad_norm": 2.9288244247436523,
"learning_rate": 4.007047666771274e-05,
"loss": 0.1365,
"step": 568
},
{
"epoch": 0.2897517504774029,
"grad_norm": 1.9439268112182617,
"learning_rate": 3.991502085441259e-05,
"loss": 0.1248,
"step": 569
},
{
"epoch": 0.29026098026734565,
"grad_norm": 3.1058437824249268,
"learning_rate": 3.9759666596740476e-05,
"loss": 0.1281,
"step": 570
},
{
"epoch": 0.29077021005728837,
"grad_norm": 1.4958165884017944,
"learning_rate": 3.960441545911204e-05,
"loss": 0.0681,
"step": 571
},
{
"epoch": 0.29127943984723104,
"grad_norm": 7.544384479522705,
"learning_rate": 3.944926900490452e-05,
"loss": 0.0738,
"step": 572
},
{
"epoch": 0.29178866963717376,
"grad_norm": 10.52266788482666,
"learning_rate": 3.929422879644099e-05,
"loss": 0.1693,
"step": 573
},
{
"epoch": 0.2922978994271165,
"grad_norm": 18.5694637298584,
"learning_rate": 3.913929639497462e-05,
"loss": 0.2253,
"step": 574
},
{
"epoch": 0.2928071292170592,
"grad_norm": 4.89534330368042,
"learning_rate": 3.898447336067297e-05,
"loss": 0.0848,
"step": 575
},
{
"epoch": 0.2933163590070019,
"grad_norm": 3.438270092010498,
"learning_rate": 3.882976125260229e-05,
"loss": 0.0828,
"step": 576
},
{
"epoch": 0.29382558879694465,
"grad_norm": 3.7927188873291016,
"learning_rate": 3.8675161628711776e-05,
"loss": 0.0915,
"step": 577
},
{
"epoch": 0.2943348185868873,
"grad_norm": 10.36831283569336,
"learning_rate": 3.852067604581794e-05,
"loss": 0.1148,
"step": 578
},
{
"epoch": 0.29484404837683004,
"grad_norm": 7.768423557281494,
"learning_rate": 3.836630605958888e-05,
"loss": 0.1411,
"step": 579
},
{
"epoch": 0.29535327816677276,
"grad_norm": 4.291931629180908,
"learning_rate": 3.821205322452863e-05,
"loss": 0.0666,
"step": 580
},
{
"epoch": 0.2958625079567155,
"grad_norm": 5.690462589263916,
"learning_rate": 3.8057919093961553e-05,
"loss": 0.1047,
"step": 581
},
{
"epoch": 0.2963717377466582,
"grad_norm": 4.0527238845825195,
"learning_rate": 3.790390522001662e-05,
"loss": 0.1114,
"step": 582
},
{
"epoch": 0.29688096753660087,
"grad_norm": 5.845585346221924,
"learning_rate": 3.775001315361183e-05,
"loss": 0.1266,
"step": 583
},
{
"epoch": 0.2973901973265436,
"grad_norm": 3.8803865909576416,
"learning_rate": 3.759624444443858e-05,
"loss": 0.0934,
"step": 584
},
{
"epoch": 0.2978994271164863,
"grad_norm": 4.592535495758057,
"learning_rate": 3.744260064094604e-05,
"loss": 0.0689,
"step": 585
},
{
"epoch": 0.29840865690642904,
"grad_norm": 4.82613468170166,
"learning_rate": 3.728908329032567e-05,
"loss": 0.1099,
"step": 586
},
{
"epoch": 0.29891788669637176,
"grad_norm": 7.046941757202148,
"learning_rate": 3.713569393849543e-05,
"loss": 0.1384,
"step": 587
},
{
"epoch": 0.2994271164863144,
"grad_norm": 8.008650779724121,
"learning_rate": 3.69824341300844e-05,
"loss": 0.0777,
"step": 588
},
{
"epoch": 0.29993634627625715,
"grad_norm": 3.9791100025177,
"learning_rate": 3.6829305408417166e-05,
"loss": 0.0693,
"step": 589
},
{
"epoch": 0.30044557606619987,
"grad_norm": 6.2310590744018555,
"learning_rate": 3.6676309315498256e-05,
"loss": 0.0861,
"step": 590
},
{
"epoch": 0.3009548058561426,
"grad_norm": 8.531147956848145,
"learning_rate": 3.6523447391996614e-05,
"loss": 0.0882,
"step": 591
},
{
"epoch": 0.3014640356460853,
"grad_norm": 3.792165994644165,
"learning_rate": 3.6370721177230116e-05,
"loss": 0.0685,
"step": 592
},
{
"epoch": 0.301973265436028,
"grad_norm": 4.004833221435547,
"learning_rate": 3.6218132209150045e-05,
"loss": 0.0936,
"step": 593
},
{
"epoch": 0.3024824952259707,
"grad_norm": 15.85920238494873,
"learning_rate": 3.606568202432562e-05,
"loss": 0.2744,
"step": 594
},
{
"epoch": 0.3029917250159134,
"grad_norm": 0.3684404492378235,
"learning_rate": 3.591337215792852e-05,
"loss": 0.0018,
"step": 595
},
{
"epoch": 0.30350095480585615,
"grad_norm": 9.898280143737793,
"learning_rate": 3.5761204143717385e-05,
"loss": 0.0506,
"step": 596
},
{
"epoch": 0.30401018459579887,
"grad_norm": 1.1307116746902466,
"learning_rate": 3.560917951402245e-05,
"loss": 0.0033,
"step": 597
},
{
"epoch": 0.3045194143857416,
"grad_norm": 4.563685417175293,
"learning_rate": 3.545729979973005e-05,
"loss": 0.0299,
"step": 598
},
{
"epoch": 0.30502864417568426,
"grad_norm": 4.1302080154418945,
"learning_rate": 3.530556653026721e-05,
"loss": 0.0331,
"step": 599
},
{
"epoch": 0.305537873965627,
"grad_norm": 6.200318813323975,
"learning_rate": 3.515398123358627e-05,
"loss": 0.0764,
"step": 600
},
{
"epoch": 0.305537873965627,
"eval_loss": 0.2474220246076584,
"eval_runtime": 377.4722,
"eval_samples_per_second": 8.764,
"eval_steps_per_second": 2.191,
"step": 600
},
{
"epoch": 0.3060471037555697,
"grad_norm": 5.079921722412109,
"learning_rate": 3.5002545436149474e-05,
"loss": 0.8877,
"step": 601
},
{
"epoch": 0.3065563335455124,
"grad_norm": 5.308548927307129,
"learning_rate": 3.485126066291364e-05,
"loss": 0.8411,
"step": 602
},
{
"epoch": 0.30706556333545515,
"grad_norm": 7.06653356552124,
"learning_rate": 3.470012843731476e-05,
"loss": 0.8479,
"step": 603
},
{
"epoch": 0.3075747931253978,
"grad_norm": 2.4465208053588867,
"learning_rate": 3.4549150281252636e-05,
"loss": 0.0402,
"step": 604
},
{
"epoch": 0.30808402291534054,
"grad_norm": 7.19598913192749,
"learning_rate": 3.439832771507565e-05,
"loss": 0.5183,
"step": 605
},
{
"epoch": 0.30859325270528326,
"grad_norm": 10.159460067749023,
"learning_rate": 3.424766225756537e-05,
"loss": 0.6912,
"step": 606
},
{
"epoch": 0.309102482495226,
"grad_norm": 6.36456298828125,
"learning_rate": 3.4097155425921254e-05,
"loss": 0.4891,
"step": 607
},
{
"epoch": 0.3096117122851687,
"grad_norm": 28.50994873046875,
"learning_rate": 3.394680873574546e-05,
"loss": 1.2461,
"step": 608
},
{
"epoch": 0.31012094207511137,
"grad_norm": 11.220614433288574,
"learning_rate": 3.3796623701027476e-05,
"loss": 0.8775,
"step": 609
},
{
"epoch": 0.3106301718650541,
"grad_norm": 9.175284385681152,
"learning_rate": 3.364660183412892e-05,
"loss": 0.8295,
"step": 610
},
{
"epoch": 0.3111394016549968,
"grad_norm": 13.259340286254883,
"learning_rate": 3.349674464576834e-05,
"loss": 0.8886,
"step": 611
},
{
"epoch": 0.31164863144493954,
"grad_norm": 10.061495780944824,
"learning_rate": 3.334705364500596e-05,
"loss": 0.7599,
"step": 612
},
{
"epoch": 0.31215786123488226,
"grad_norm": 4.028329849243164,
"learning_rate": 3.3197530339228487e-05,
"loss": 0.1656,
"step": 613
},
{
"epoch": 0.31266709102482493,
"grad_norm": 2.669538736343384,
"learning_rate": 3.304817623413397e-05,
"loss": 0.124,
"step": 614
},
{
"epoch": 0.31317632081476765,
"grad_norm": 3.3735191822052,
"learning_rate": 3.289899283371657e-05,
"loss": 0.1331,
"step": 615
},
{
"epoch": 0.3136855506047104,
"grad_norm": 1.7686545848846436,
"learning_rate": 3.274998164025148e-05,
"loss": 0.1367,
"step": 616
},
{
"epoch": 0.3141947803946531,
"grad_norm": 2.966285467147827,
"learning_rate": 3.260114415427975e-05,
"loss": 0.1389,
"step": 617
},
{
"epoch": 0.3147040101845958,
"grad_norm": 6.92048454284668,
"learning_rate": 3.2452481874593234e-05,
"loss": 0.1157,
"step": 618
},
{
"epoch": 0.3152132399745385,
"grad_norm": 8.68355655670166,
"learning_rate": 3.230399629821942e-05,
"loss": 0.1763,
"step": 619
},
{
"epoch": 0.3157224697644812,
"grad_norm": 3.1240475177764893,
"learning_rate": 3.215568892040641e-05,
"loss": 0.1004,
"step": 620
},
{
"epoch": 0.31623169955442393,
"grad_norm": 1.7762776613235474,
"learning_rate": 3.200756123460788e-05,
"loss": 0.0993,
"step": 621
},
{
"epoch": 0.31674092934436665,
"grad_norm": 2.550874710083008,
"learning_rate": 3.1859614732467954e-05,
"loss": 0.0514,
"step": 622
},
{
"epoch": 0.3172501591343094,
"grad_norm": 3.7702434062957764,
"learning_rate": 3.171185090380628e-05,
"loss": 0.0691,
"step": 623
},
{
"epoch": 0.3177593889242521,
"grad_norm": 4.985774517059326,
"learning_rate": 3.156427123660297e-05,
"loss": 0.0982,
"step": 624
},
{
"epoch": 0.31826861871419476,
"grad_norm": 9.22007942199707,
"learning_rate": 3.141687721698363e-05,
"loss": 0.123,
"step": 625
},
{
"epoch": 0.3187778485041375,
"grad_norm": 3.031754970550537,
"learning_rate": 3.12696703292044e-05,
"loss": 0.0507,
"step": 626
},
{
"epoch": 0.3192870782940802,
"grad_norm": 6.516983509063721,
"learning_rate": 3.1122652055637015e-05,
"loss": 0.0944,
"step": 627
},
{
"epoch": 0.31979630808402293,
"grad_norm": 4.626400470733643,
"learning_rate": 3.097582387675385e-05,
"loss": 0.1333,
"step": 628
},
{
"epoch": 0.32030553787396565,
"grad_norm": 5.953018665313721,
"learning_rate": 3.082918727111304e-05,
"loss": 0.1317,
"step": 629
},
{
"epoch": 0.3208147676639083,
"grad_norm": 4.307436466217041,
"learning_rate": 3.0682743715343564e-05,
"loss": 0.0859,
"step": 630
},
{
"epoch": 0.32132399745385104,
"grad_norm": 5.189202308654785,
"learning_rate": 3.053649468413043e-05,
"loss": 0.1205,
"step": 631
},
{
"epoch": 0.32183322724379376,
"grad_norm": 7.541786193847656,
"learning_rate": 3.0390441650199724e-05,
"loss": 0.1567,
"step": 632
},
{
"epoch": 0.3223424570337365,
"grad_norm": 5.107945442199707,
"learning_rate": 3.0244586084303905e-05,
"loss": 0.0836,
"step": 633
},
{
"epoch": 0.3228516868236792,
"grad_norm": 5.2220563888549805,
"learning_rate": 3.0098929455206904e-05,
"loss": 0.0487,
"step": 634
},
{
"epoch": 0.3233609166136219,
"grad_norm": 3.8281970024108887,
"learning_rate": 2.9953473229669328e-05,
"loss": 0.0783,
"step": 635
},
{
"epoch": 0.3238701464035646,
"grad_norm": 4.100976943969727,
"learning_rate": 2.9808218872433767e-05,
"loss": 0.1336,
"step": 636
},
{
"epoch": 0.3243793761935073,
"grad_norm": 4.79497766494751,
"learning_rate": 2.9663167846209998e-05,
"loss": 0.0657,
"step": 637
},
{
"epoch": 0.32488860598345004,
"grad_norm": 5.689431190490723,
"learning_rate": 2.9518321611660237e-05,
"loss": 0.033,
"step": 638
},
{
"epoch": 0.32539783577339276,
"grad_norm": 7.910229206085205,
"learning_rate": 2.9373681627384447e-05,
"loss": 0.0893,
"step": 639
},
{
"epoch": 0.32590706556333543,
"grad_norm": 3.019529104232788,
"learning_rate": 2.9229249349905684e-05,
"loss": 0.0205,
"step": 640
},
{
"epoch": 0.32641629535327815,
"grad_norm": 2.8111181259155273,
"learning_rate": 2.9085026233655365e-05,
"loss": 0.0133,
"step": 641
},
{
"epoch": 0.3269255251432209,
"grad_norm": 4.1778974533081055,
"learning_rate": 2.894101373095867e-05,
"loss": 0.0162,
"step": 642
},
{
"epoch": 0.3274347549331636,
"grad_norm": 40.197940826416016,
"learning_rate": 2.8797213292019926e-05,
"loss": 0.0561,
"step": 643
},
{
"epoch": 0.3279439847231063,
"grad_norm": 5.798105239868164,
"learning_rate": 2.8653626364907917e-05,
"loss": 0.0468,
"step": 644
},
{
"epoch": 0.32845321451304904,
"grad_norm": 0.09037820249795914,
"learning_rate": 2.851025439554142e-05,
"loss": 0.0004,
"step": 645
},
{
"epoch": 0.3289624443029917,
"grad_norm": 6.750631809234619,
"learning_rate": 2.8367098827674578e-05,
"loss": 0.12,
"step": 646
},
{
"epoch": 0.32947167409293443,
"grad_norm": 8.365553855895996,
"learning_rate": 2.8224161102882397e-05,
"loss": 0.0613,
"step": 647
},
{
"epoch": 0.32998090388287715,
"grad_norm": 8.15538501739502,
"learning_rate": 2.8081442660546125e-05,
"loss": 0.1204,
"step": 648
},
{
"epoch": 0.3304901336728199,
"grad_norm": 8.706940650939941,
"learning_rate": 2.7938944937838923e-05,
"loss": 0.0566,
"step": 649
},
{
"epoch": 0.3309993634627626,
"grad_norm": 4.927478790283203,
"learning_rate": 2.7796669369711294e-05,
"loss": 0.1079,
"step": 650
},
{
"epoch": 0.33150859325270526,
"grad_norm": 4.741428852081299,
"learning_rate": 2.7654617388876615e-05,
"loss": 0.7197,
"step": 651
},
{
"epoch": 0.332017823042648,
"grad_norm": 4.9175944328308105,
"learning_rate": 2.7512790425796718e-05,
"loss": 0.5017,
"step": 652
},
{
"epoch": 0.3325270528325907,
"grad_norm": 3.583587884902954,
"learning_rate": 2.7371189908667604e-05,
"loss": 0.2062,
"step": 653
},
{
"epoch": 0.33303628262253343,
"grad_norm": 9.022587776184082,
"learning_rate": 2.7229817263404866e-05,
"loss": 0.7837,
"step": 654
},
{
"epoch": 0.33354551241247615,
"grad_norm": 9.906291007995605,
"learning_rate": 2.708867391362948e-05,
"loss": 0.6524,
"step": 655
},
{
"epoch": 0.3340547422024188,
"grad_norm": 5.258704662322998,
"learning_rate": 2.694776128065345e-05,
"loss": 0.3687,
"step": 656
},
{
"epoch": 0.33456397199236154,
"grad_norm": 7.302427768707275,
"learning_rate": 2.6807080783465376e-05,
"loss": 0.4876,
"step": 657
},
{
"epoch": 0.33507320178230426,
"grad_norm": 13.841546058654785,
"learning_rate": 2.6666633838716314e-05,
"loss": 1.4033,
"step": 658
},
{
"epoch": 0.335582431572247,
"grad_norm": 11.859368324279785,
"learning_rate": 2.6526421860705473e-05,
"loss": 0.9077,
"step": 659
},
{
"epoch": 0.3360916613621897,
"grad_norm": 13.534501075744629,
"learning_rate": 2.638644626136587e-05,
"loss": 0.7607,
"step": 660
},
{
"epoch": 0.3366008911521324,
"grad_norm": 7.9164958000183105,
"learning_rate": 2.6246708450250256e-05,
"loss": 0.599,
"step": 661
},
{
"epoch": 0.3371101209420751,
"grad_norm": 8.299764633178711,
"learning_rate": 2.6107209834516854e-05,
"loss": 1.0367,
"step": 662
},
{
"epoch": 0.3376193507320178,
"grad_norm": 4.36458683013916,
"learning_rate": 2.596795181891514e-05,
"loss": 0.4012,
"step": 663
},
{
"epoch": 0.33812858052196054,
"grad_norm": 2.5791258811950684,
"learning_rate": 2.5828935805771802e-05,
"loss": 0.1412,
"step": 664
},
{
"epoch": 0.33863781031190326,
"grad_norm": 4.219427585601807,
"learning_rate": 2.5690163194976575e-05,
"loss": 0.1824,
"step": 665
},
{
"epoch": 0.33914704010184593,
"grad_norm": 2.9232358932495117,
"learning_rate": 2.5551635383968065e-05,
"loss": 0.17,
"step": 666
},
{
"epoch": 0.33965626989178865,
"grad_norm": 2.1133458614349365,
"learning_rate": 2.5413353767719805e-05,
"loss": 0.1109,
"step": 667
},
{
"epoch": 0.3401654996817314,
"grad_norm": 0.8237698078155518,
"learning_rate": 2.5275319738726165e-05,
"loss": 0.0692,
"step": 668
},
{
"epoch": 0.3406747294716741,
"grad_norm": 2.2106404304504395,
"learning_rate": 2.513753468698826e-05,
"loss": 0.1241,
"step": 669
},
{
"epoch": 0.3411839592616168,
"grad_norm": 2.5900228023529053,
"learning_rate": 2.500000000000001e-05,
"loss": 0.1617,
"step": 670
},
{
"epoch": 0.34169318905155954,
"grad_norm": 0.9092320799827576,
"learning_rate": 2.486271706273421e-05,
"loss": 0.1002,
"step": 671
},
{
"epoch": 0.3422024188415022,
"grad_norm": 3.5675482749938965,
"learning_rate": 2.4725687257628534e-05,
"loss": 0.0889,
"step": 672
},
{
"epoch": 0.34271164863144493,
"grad_norm": 4.1621479988098145,
"learning_rate": 2.4588911964571553e-05,
"loss": 0.119,
"step": 673
},
{
"epoch": 0.34322087842138765,
"grad_norm": 1.428825855255127,
"learning_rate": 2.4452392560888976e-05,
"loss": 0.0871,
"step": 674
},
{
"epoch": 0.3437301082113304,
"grad_norm": 4.961071968078613,
"learning_rate": 2.4316130421329697e-05,
"loss": 0.0718,
"step": 675
},
{
"epoch": 0.3442393380012731,
"grad_norm": 5.432969570159912,
"learning_rate": 2.418012691805191e-05,
"loss": 0.0782,
"step": 676
},
{
"epoch": 0.34474856779121577,
"grad_norm": 4.57743501663208,
"learning_rate": 2.4044383420609406e-05,
"loss": 0.1596,
"step": 677
},
{
"epoch": 0.3452577975811585,
"grad_norm": 7.462673187255859,
"learning_rate": 2.3908901295937713e-05,
"loss": 0.1173,
"step": 678
},
{
"epoch": 0.3457670273711012,
"grad_norm": 4.732002258300781,
"learning_rate": 2.3773681908340284e-05,
"loss": 0.111,
"step": 679
},
{
"epoch": 0.34627625716104393,
"grad_norm": 5.835433483123779,
"learning_rate": 2.363872661947488e-05,
"loss": 0.1395,
"step": 680
},
{
"epoch": 0.34678548695098665,
"grad_norm": 4.240106105804443,
"learning_rate": 2.350403678833976e-05,
"loss": 0.0979,
"step": 681
},
{
"epoch": 0.3472947167409293,
"grad_norm": 6.862554550170898,
"learning_rate": 2.336961377126001e-05,
"loss": 0.1356,
"step": 682
},
{
"epoch": 0.34780394653087204,
"grad_norm": 5.05124568939209,
"learning_rate": 2.3235458921873925e-05,
"loss": 0.1112,
"step": 683
},
{
"epoch": 0.34831317632081477,
"grad_norm": 5.341157913208008,
"learning_rate": 2.310157359111938e-05,
"loss": 0.0908,
"step": 684
},
{
"epoch": 0.3488224061107575,
"grad_norm": 3.359665870666504,
"learning_rate": 2.296795912722014e-05,
"loss": 0.081,
"step": 685
},
{
"epoch": 0.3493316359007002,
"grad_norm": 4.865165710449219,
"learning_rate": 2.283461687567236e-05,
"loss": 0.1172,
"step": 686
},
{
"epoch": 0.3498408656906429,
"grad_norm": 8.436277389526367,
"learning_rate": 2.2701548179231048e-05,
"loss": 0.1962,
"step": 687
},
{
"epoch": 0.3503500954805856,
"grad_norm": 6.217014312744141,
"learning_rate": 2.2568754377896516e-05,
"loss": 0.0362,
"step": 688
},
{
"epoch": 0.3508593252705283,
"grad_norm": 19.41240119934082,
"learning_rate": 2.2436236808900844e-05,
"loss": 0.275,
"step": 689
},
{
"epoch": 0.35136855506047104,
"grad_norm": 6.9711456298828125,
"learning_rate": 2.2303996806694488e-05,
"loss": 0.0658,
"step": 690
},
{
"epoch": 0.35187778485041377,
"grad_norm": 6.803055763244629,
"learning_rate": 2.2172035702932825e-05,
"loss": 0.0384,
"step": 691
},
{
"epoch": 0.3523870146403565,
"grad_norm": 3.9196035861968994,
"learning_rate": 2.2040354826462668e-05,
"loss": 0.0957,
"step": 692
},
{
"epoch": 0.35289624443029916,
"grad_norm": 3.145606756210327,
"learning_rate": 2.1908955503308993e-05,
"loss": 0.0623,
"step": 693
},
{
"epoch": 0.3534054742202419,
"grad_norm": 16.734477996826172,
"learning_rate": 2.1777839056661554e-05,
"loss": 0.0769,
"step": 694
},
{
"epoch": 0.3539147040101846,
"grad_norm": 9.658271789550781,
"learning_rate": 2.164700680686147e-05,
"loss": 0.0265,
"step": 695
},
{
"epoch": 0.3544239338001273,
"grad_norm": 1.6071585416793823,
"learning_rate": 2.1516460071388062e-05,
"loss": 0.0115,
"step": 696
},
{
"epoch": 0.35493316359007004,
"grad_norm": 4.15781307220459,
"learning_rate": 2.1386200164845526e-05,
"loss": 0.0719,
"step": 697
},
{
"epoch": 0.3554423933800127,
"grad_norm": 1.0766063928604126,
"learning_rate": 2.125622839894964e-05,
"loss": 0.0062,
"step": 698
},
{
"epoch": 0.35595162316995543,
"grad_norm": 3.8494861125946045,
"learning_rate": 2.1126546082514664e-05,
"loss": 0.0104,
"step": 699
},
{
"epoch": 0.35646085295989816,
"grad_norm": 4.333930015563965,
"learning_rate": 2.09971545214401e-05,
"loss": 0.069,
"step": 700
},
{
"epoch": 0.3569700827498409,
"grad_norm": 5.104018211364746,
"learning_rate": 2.086805501869749e-05,
"loss": 0.735,
"step": 701
},
{
"epoch": 0.3574793125397836,
"grad_norm": 4.622631072998047,
"learning_rate": 2.073924887431744e-05,
"loss": 0.6267,
"step": 702
},
{
"epoch": 0.35798854232972627,
"grad_norm": 2.708951234817505,
"learning_rate": 2.061073738537635e-05,
"loss": 0.1031,
"step": 703
},
{
"epoch": 0.358497772119669,
"grad_norm": 6.911785125732422,
"learning_rate": 2.048252184598352e-05,
"loss": 0.5889,
"step": 704
},
{
"epoch": 0.3590070019096117,
"grad_norm": 8.187259674072266,
"learning_rate": 2.0354603547267985e-05,
"loss": 0.5466,
"step": 705
},
{
"epoch": 0.35951623169955443,
"grad_norm": 6.681227207183838,
"learning_rate": 2.0226983777365604e-05,
"loss": 0.5335,
"step": 706
},
{
"epoch": 0.36002546148949716,
"grad_norm": 13.172379493713379,
"learning_rate": 2.0099663821406056e-05,
"loss": 0.6291,
"step": 707
},
{
"epoch": 0.3605346912794398,
"grad_norm": 10.422917366027832,
"learning_rate": 1.9972644961499854e-05,
"loss": 0.8471,
"step": 708
},
{
"epoch": 0.36104392106938255,
"grad_norm": 13.669676780700684,
"learning_rate": 1.9845928476725524e-05,
"loss": 1.0402,
"step": 709
},
{
"epoch": 0.36155315085932527,
"grad_norm": 6.9141716957092285,
"learning_rate": 1.9719515643116674e-05,
"loss": 0.6035,
"step": 710
},
{
"epoch": 0.362062380649268,
"grad_norm": 8.188703536987305,
"learning_rate": 1.959340773364911e-05,
"loss": 0.6699,
"step": 711
},
{
"epoch": 0.3625716104392107,
"grad_norm": 7.684673309326172,
"learning_rate": 1.946760601822809e-05,
"loss": 0.7657,
"step": 712
},
{
"epoch": 0.3630808402291534,
"grad_norm": 2.5300745964050293,
"learning_rate": 1.9342111763675512e-05,
"loss": 0.1569,
"step": 713
},
{
"epoch": 0.3635900700190961,
"grad_norm": 1.3801597356796265,
"learning_rate": 1.9216926233717085e-05,
"loss": 0.081,
"step": 714
},
{
"epoch": 0.3640992998090388,
"grad_norm": 1.2587306499481201,
"learning_rate": 1.9092050688969738e-05,
"loss": 0.1421,
"step": 715
},
{
"epoch": 0.36460852959898155,
"grad_norm": 1.1372148990631104,
"learning_rate": 1.8967486386928817e-05,
"loss": 0.0961,
"step": 716
},
{
"epoch": 0.36511775938892427,
"grad_norm": 0.7794922590255737,
"learning_rate": 1.8843234581955442e-05,
"loss": 0.1008,
"step": 717
},
{
"epoch": 0.365626989178867,
"grad_norm": 1.1420189142227173,
"learning_rate": 1.8719296525263922e-05,
"loss": 0.1071,
"step": 718
},
{
"epoch": 0.36613621896880966,
"grad_norm": 1.6180529594421387,
"learning_rate": 1.859567346490913e-05,
"loss": 0.0875,
"step": 719
},
{
"epoch": 0.3666454487587524,
"grad_norm": 1.0748051404953003,
"learning_rate": 1.847236664577389e-05,
"loss": 0.1288,
"step": 720
},
{
"epoch": 0.3671546785486951,
"grad_norm": 1.0483026504516602,
"learning_rate": 1.8349377309556486e-05,
"loss": 0.0774,
"step": 721
},
{
"epoch": 0.3676639083386378,
"grad_norm": 1.506415843963623,
"learning_rate": 1.8226706694758195e-05,
"loss": 0.1362,
"step": 722
},
{
"epoch": 0.36817313812858055,
"grad_norm": 1.0291399955749512,
"learning_rate": 1.810435603667075e-05,
"loss": 0.1071,
"step": 723
},
{
"epoch": 0.3686823679185232,
"grad_norm": 5.942626953125,
"learning_rate": 1.7982326567363888e-05,
"loss": 0.1614,
"step": 724
},
{
"epoch": 0.36919159770846594,
"grad_norm": 12.163902282714844,
"learning_rate": 1.7860619515673033e-05,
"loss": 0.1458,
"step": 725
},
{
"epoch": 0.36970082749840866,
"grad_norm": 3.687087059020996,
"learning_rate": 1.773923610718686e-05,
"loss": 0.0919,
"step": 726
},
{
"epoch": 0.3702100572883514,
"grad_norm": 4.903958320617676,
"learning_rate": 1.7618177564234905e-05,
"loss": 0.1254,
"step": 727
},
{
"epoch": 0.3707192870782941,
"grad_norm": 4.808300495147705,
"learning_rate": 1.7497445105875377e-05,
"loss": 0.1473,
"step": 728
},
{
"epoch": 0.37122851686823677,
"grad_norm": 5.668433666229248,
"learning_rate": 1.73770399478828e-05,
"loss": 0.1524,
"step": 729
},
{
"epoch": 0.3717377466581795,
"grad_norm": 8.04811954498291,
"learning_rate": 1.725696330273575e-05,
"loss": 0.0919,
"step": 730
},
{
"epoch": 0.3722469764481222,
"grad_norm": 4.406056880950928,
"learning_rate": 1.7137216379604727e-05,
"loss": 0.1046,
"step": 731
},
{
"epoch": 0.37275620623806494,
"grad_norm": 7.855915546417236,
"learning_rate": 1.7017800384339928e-05,
"loss": 0.0709,
"step": 732
},
{
"epoch": 0.37326543602800766,
"grad_norm": 3.947338342666626,
"learning_rate": 1.6898716519459074e-05,
"loss": 0.072,
"step": 733
},
{
"epoch": 0.3737746658179503,
"grad_norm": 7.404916763305664,
"learning_rate": 1.6779965984135377e-05,
"loss": 0.1313,
"step": 734
},
{
"epoch": 0.37428389560789305,
"grad_norm": 4.375075817108154,
"learning_rate": 1.6661549974185424e-05,
"loss": 0.0829,
"step": 735
},
{
"epoch": 0.37479312539783577,
"grad_norm": 4.642003536224365,
"learning_rate": 1.6543469682057106e-05,
"loss": 0.0602,
"step": 736
},
{
"epoch": 0.3753023551877785,
"grad_norm": 5.987782001495361,
"learning_rate": 1.6425726296817633e-05,
"loss": 0.0868,
"step": 737
},
{
"epoch": 0.3758115849777212,
"grad_norm": 10.39226245880127,
"learning_rate": 1.6308321004141607e-05,
"loss": 0.1273,
"step": 738
},
{
"epoch": 0.37632081476766394,
"grad_norm": 6.080224514007568,
"learning_rate": 1.619125498629904e-05,
"loss": 0.0465,
"step": 739
},
{
"epoch": 0.3768300445576066,
"grad_norm": 4.397584915161133,
"learning_rate": 1.60745294221434e-05,
"loss": 0.0625,
"step": 740
},
{
"epoch": 0.3773392743475493,
"grad_norm": 8.622246742248535,
"learning_rate": 1.595814548709983e-05,
"loss": 0.0664,
"step": 741
},
{
"epoch": 0.37784850413749205,
"grad_norm": 8.313824653625488,
"learning_rate": 1.5842104353153287e-05,
"loss": 0.0271,
"step": 742
},
{
"epoch": 0.37835773392743477,
"grad_norm": 2.949397325515747,
"learning_rate": 1.5726407188836673e-05,
"loss": 0.0345,
"step": 743
},
{
"epoch": 0.3788669637173775,
"grad_norm": 3.27532696723938,
"learning_rate": 1.5611055159219152e-05,
"loss": 0.0561,
"step": 744
},
{
"epoch": 0.37937619350732016,
"grad_norm": 0.19333244860172272,
"learning_rate": 1.549604942589441e-05,
"loss": 0.0013,
"step": 745
},
{
"epoch": 0.3798854232972629,
"grad_norm": 4.911413192749023,
"learning_rate": 1.5381391146968866e-05,
"loss": 0.0187,
"step": 746
},
{
"epoch": 0.3803946530872056,
"grad_norm": 6.5160627365112305,
"learning_rate": 1.526708147705013e-05,
"loss": 0.0928,
"step": 747
},
{
"epoch": 0.3809038828771483,
"grad_norm": 5.048818111419678,
"learning_rate": 1.5153121567235335e-05,
"loss": 0.013,
"step": 748
},
{
"epoch": 0.38141311266709105,
"grad_norm": 4.852001667022705,
"learning_rate": 1.5039512565099467e-05,
"loss": 0.079,
"step": 749
},
{
"epoch": 0.3819223424570337,
"grad_norm": 3.08077335357666,
"learning_rate": 1.4926255614683932e-05,
"loss": 0.0403,
"step": 750
},
{
"epoch": 0.38243157224697644,
"grad_norm": 4.824317932128906,
"learning_rate": 1.481335185648498e-05,
"loss": 0.7268,
"step": 751
},
{
"epoch": 0.38294080203691916,
"grad_norm": 4.761933326721191,
"learning_rate": 1.4700802427442179e-05,
"loss": 0.5991,
"step": 752
},
{
"epoch": 0.3834500318268619,
"grad_norm": 4.185091495513916,
"learning_rate": 1.458860846092705e-05,
"loss": 0.2459,
"step": 753
},
{
"epoch": 0.3839592616168046,
"grad_norm": 10.620789527893066,
"learning_rate": 1.4476771086731567e-05,
"loss": 0.79,
"step": 754
},
{
"epoch": 0.38446849140674727,
"grad_norm": 11.214977264404297,
"learning_rate": 1.4365291431056871e-05,
"loss": 0.7926,
"step": 755
},
{
"epoch": 0.38497772119669,
"grad_norm": 5.3124895095825195,
"learning_rate": 1.4254170616501827e-05,
"loss": 0.4243,
"step": 756
},
{
"epoch": 0.3854869509866327,
"grad_norm": 11.870231628417969,
"learning_rate": 1.414340976205183e-05,
"loss": 0.6992,
"step": 757
},
{
"epoch": 0.38599618077657544,
"grad_norm": 9.170869827270508,
"learning_rate": 1.4033009983067452e-05,
"loss": 0.8608,
"step": 758
},
{
"epoch": 0.38650541056651816,
"grad_norm": 10.806239128112793,
"learning_rate": 1.3922972391273226e-05,
"loss": 0.8629,
"step": 759
},
{
"epoch": 0.3870146403564608,
"grad_norm": 9.26331901550293,
"learning_rate": 1.3813298094746491e-05,
"loss": 0.5738,
"step": 760
},
{
"epoch": 0.38752387014640355,
"grad_norm": 10.236473083496094,
"learning_rate": 1.3703988197906209e-05,
"loss": 0.6671,
"step": 761
},
{
"epoch": 0.38803309993634627,
"grad_norm": 6.966268539428711,
"learning_rate": 1.3595043801501794e-05,
"loss": 0.7432,
"step": 762
},
{
"epoch": 0.388542329726289,
"grad_norm": 5.45960807800293,
"learning_rate": 1.3486466002602133e-05,
"loss": 0.5434,
"step": 763
},
{
"epoch": 0.3890515595162317,
"grad_norm": 1.5403192043304443,
"learning_rate": 1.3378255894584463e-05,
"loss": 0.1034,
"step": 764
},
{
"epoch": 0.38956078930617444,
"grad_norm": 1.8921170234680176,
"learning_rate": 1.327041456712334e-05,
"loss": 0.084,
"step": 765
},
{
"epoch": 0.3900700190961171,
"grad_norm": 4.200987815856934,
"learning_rate": 1.3162943106179749e-05,
"loss": 0.1189,
"step": 766
},
{
"epoch": 0.3905792488860598,
"grad_norm": 2.609253406524658,
"learning_rate": 1.3055842593990131e-05,
"loss": 0.2064,
"step": 767
},
{
"epoch": 0.39108847867600255,
"grad_norm": 2.868013858795166,
"learning_rate": 1.2949114109055415e-05,
"loss": 0.1238,
"step": 768
},
{
"epoch": 0.39159770846594527,
"grad_norm": 2.5417683124542236,
"learning_rate": 1.2842758726130283e-05,
"loss": 0.0929,
"step": 769
},
{
"epoch": 0.392106938255888,
"grad_norm": 1.0600427389144897,
"learning_rate": 1.2736777516212266e-05,
"loss": 0.1323,
"step": 770
},
{
"epoch": 0.39261616804583066,
"grad_norm": 1.9886938333511353,
"learning_rate": 1.2631171546530968e-05,
"loss": 0.0904,
"step": 771
},
{
"epoch": 0.3931253978357734,
"grad_norm": 0.7915336489677429,
"learning_rate": 1.2525941880537307e-05,
"loss": 0.0964,
"step": 772
},
{
"epoch": 0.3936346276257161,
"grad_norm": 2.3213069438934326,
"learning_rate": 1.2421089577892869e-05,
"loss": 0.1248,
"step": 773
},
{
"epoch": 0.3941438574156588,
"grad_norm": 3.6811985969543457,
"learning_rate": 1.2316615694459189e-05,
"loss": 0.0861,
"step": 774
},
{
"epoch": 0.39465308720560155,
"grad_norm": 4.516244411468506,
"learning_rate": 1.2212521282287092e-05,
"loss": 0.1168,
"step": 775
},
{
"epoch": 0.3951623169955442,
"grad_norm": 4.109873294830322,
"learning_rate": 1.2108807389606158e-05,
"loss": 0.1002,
"step": 776
},
{
"epoch": 0.39567154678548694,
"grad_norm": 4.258279323577881,
"learning_rate": 1.2005475060814159e-05,
"loss": 0.1388,
"step": 777
},
{
"epoch": 0.39618077657542966,
"grad_norm": 3.9544007778167725,
"learning_rate": 1.1902525336466464e-05,
"loss": 0.1318,
"step": 778
},
{
"epoch": 0.3966900063653724,
"grad_norm": 5.926074981689453,
"learning_rate": 1.1799959253265668e-05,
"loss": 0.1125,
"step": 779
},
{
"epoch": 0.3971992361553151,
"grad_norm": 8.683218955993652,
"learning_rate": 1.1697777844051105e-05,
"loss": 0.0995,
"step": 780
},
{
"epoch": 0.39770846594525777,
"grad_norm": 3.9073028564453125,
"learning_rate": 1.1595982137788403e-05,
"loss": 0.0903,
"step": 781
},
{
"epoch": 0.3982176957352005,
"grad_norm": 5.088838577270508,
"learning_rate": 1.1494573159559213e-05,
"loss": 0.1193,
"step": 782
},
{
"epoch": 0.3987269255251432,
"grad_norm": 5.782503128051758,
"learning_rate": 1.1393551930550828e-05,
"loss": 0.0823,
"step": 783
},
{
"epoch": 0.39923615531508594,
"grad_norm": 5.548112869262695,
"learning_rate": 1.1292919468045877e-05,
"loss": 0.0797,
"step": 784
},
{
"epoch": 0.39974538510502866,
"grad_norm": 6.514894962310791,
"learning_rate": 1.1192676785412154e-05,
"loss": 0.1448,
"step": 785
},
{
"epoch": 0.4002546148949714,
"grad_norm": 4.215356826782227,
"learning_rate": 1.1092824892092373e-05,
"loss": 0.0659,
"step": 786
},
{
"epoch": 0.40076384468491405,
"grad_norm": 7.318501949310303,
"learning_rate": 1.099336479359398e-05,
"loss": 0.1557,
"step": 787
},
{
"epoch": 0.4012730744748568,
"grad_norm": 4.508605003356934,
"learning_rate": 1.0894297491479045e-05,
"loss": 0.0383,
"step": 788
},
{
"epoch": 0.4017823042647995,
"grad_norm": 2.6559200286865234,
"learning_rate": 1.0795623983354215e-05,
"loss": 0.0212,
"step": 789
},
{
"epoch": 0.4022915340547422,
"grad_norm": 1.4912053346633911,
"learning_rate": 1.0697345262860636e-05,
"loss": 0.0104,
"step": 790
},
{
"epoch": 0.40280076384468494,
"grad_norm": 7.236735820770264,
"learning_rate": 1.0599462319663905e-05,
"loss": 0.0706,
"step": 791
},
{
"epoch": 0.4033099936346276,
"grad_norm": 4.715954303741455,
"learning_rate": 1.0501976139444191e-05,
"loss": 0.048,
"step": 792
},
{
"epoch": 0.40381922342457033,
"grad_norm": 2.1985514163970947,
"learning_rate": 1.0404887703886251e-05,
"loss": 0.0125,
"step": 793
},
{
"epoch": 0.40432845321451305,
"grad_norm": 7.244698524475098,
"learning_rate": 1.0308197990669538e-05,
"loss": 0.0468,
"step": 794
},
{
"epoch": 0.4048376830044558,
"grad_norm": 1.7899662256240845,
"learning_rate": 1.021190797345839e-05,
"loss": 0.0033,
"step": 795
},
{
"epoch": 0.4053469127943985,
"grad_norm": 7.417436122894287,
"learning_rate": 1.0116018621892237e-05,
"loss": 0.1219,
"step": 796
},
{
"epoch": 0.40585614258434116,
"grad_norm": 2.332493782043457,
"learning_rate": 1.0020530901575754e-05,
"loss": 0.0177,
"step": 797
},
{
"epoch": 0.4063653723742839,
"grad_norm": 6.933743000030518,
"learning_rate": 9.92544577406923e-06,
"loss": 0.0993,
"step": 798
},
{
"epoch": 0.4068746021642266,
"grad_norm": 6.604279518127441,
"learning_rate": 9.830764196878872e-06,
"loss": 0.0523,
"step": 799
},
{
"epoch": 0.40738383195416933,
"grad_norm": 1.6020522117614746,
"learning_rate": 9.73648712344707e-06,
"loss": 0.0222,
"step": 800
},
{
"epoch": 0.40738383195416933,
"eval_loss": 0.23512445390224457,
"eval_runtime": 376.2064,
"eval_samples_per_second": 8.793,
"eval_steps_per_second": 2.198,
"step": 800
},
{
"epoch": 0.40789306174411205,
"grad_norm": 4.510726451873779,
"learning_rate": 9.642615503142926e-06,
"loss": 0.7091,
"step": 801
},
{
"epoch": 0.4084022915340547,
"grad_norm": 4.7646942138671875,
"learning_rate": 9.549150281252633e-06,
"loss": 0.4744,
"step": 802
},
{
"epoch": 0.40891152132399744,
"grad_norm": 6.916759014129639,
"learning_rate": 9.456092398969902e-06,
"loss": 0.6527,
"step": 803
},
{
"epoch": 0.40942075111394016,
"grad_norm": 4.222986221313477,
"learning_rate": 9.363442793386606e-06,
"loss": 0.3524,
"step": 804
},
{
"epoch": 0.4099299809038829,
"grad_norm": 8.524075508117676,
"learning_rate": 9.271202397483215e-06,
"loss": 0.9119,
"step": 805
},
{
"epoch": 0.4104392106938256,
"grad_norm": 12.714594841003418,
"learning_rate": 9.179372140119525e-06,
"loss": 0.4737,
"step": 806
},
{
"epoch": 0.4109484404837683,
"grad_norm": 8.219633102416992,
"learning_rate": 9.087952946025175e-06,
"loss": 0.4039,
"step": 807
},
{
"epoch": 0.411457670273711,
"grad_norm": 11.758675575256348,
"learning_rate": 8.996945735790447e-06,
"loss": 0.8365,
"step": 808
},
{
"epoch": 0.4119669000636537,
"grad_norm": 10.59483528137207,
"learning_rate": 8.906351425856952e-06,
"loss": 1.0241,
"step": 809
},
{
"epoch": 0.41247612985359644,
"grad_norm": 5.28472375869751,
"learning_rate": 8.816170928508365e-06,
"loss": 0.5595,
"step": 810
},
{
"epoch": 0.41298535964353916,
"grad_norm": 8.57630729675293,
"learning_rate": 8.7264051518613e-06,
"loss": 0.5879,
"step": 811
},
{
"epoch": 0.4134945894334819,
"grad_norm": 8.637681007385254,
"learning_rate": 8.637054999856148e-06,
"loss": 0.8988,
"step": 812
},
{
"epoch": 0.41400381922342455,
"grad_norm": 1.3487462997436523,
"learning_rate": 8.548121372247918e-06,
"loss": 0.1034,
"step": 813
},
{
"epoch": 0.4145130490133673,
"grad_norm": 0.9683374762535095,
"learning_rate": 8.459605164597267e-06,
"loss": 0.0647,
"step": 814
},
{
"epoch": 0.41502227880331,
"grad_norm": 0.556820273399353,
"learning_rate": 8.371507268261437e-06,
"loss": 0.0749,
"step": 815
},
{
"epoch": 0.4155315085932527,
"grad_norm": 2.4960896968841553,
"learning_rate": 8.283828570385238e-06,
"loss": 0.1691,
"step": 816
},
{
"epoch": 0.41604073838319544,
"grad_norm": 1.6859462261199951,
"learning_rate": 8.196569953892202e-06,
"loss": 0.1085,
"step": 817
},
{
"epoch": 0.4165499681731381,
"grad_norm": 3.0034708976745605,
"learning_rate": 8.109732297475635e-06,
"loss": 0.0917,
"step": 818
},
{
"epoch": 0.41705919796308083,
"grad_norm": 1.4022327661514282,
"learning_rate": 8.023316475589754e-06,
"loss": 0.1208,
"step": 819
},
{
"epoch": 0.41756842775302355,
"grad_norm": 0.8047292232513428,
"learning_rate": 7.937323358440935e-06,
"loss": 0.0582,
"step": 820
},
{
"epoch": 0.4180776575429663,
"grad_norm": 1.763879656791687,
"learning_rate": 7.851753811978924e-06,
"loss": 0.1686,
"step": 821
},
{
"epoch": 0.418586887332909,
"grad_norm": 1.504451036453247,
"learning_rate": 7.766608697888095e-06,
"loss": 0.0718,
"step": 822
},
{
"epoch": 0.41909611712285166,
"grad_norm": 1.7512840032577515,
"learning_rate": 7.681888873578786e-06,
"loss": 0.1577,
"step": 823
},
{
"epoch": 0.4196053469127944,
"grad_norm": 10.731456756591797,
"learning_rate": 7.597595192178702e-06,
"loss": 0.1255,
"step": 824
},
{
"epoch": 0.4201145767027371,
"grad_norm": 4.191404819488525,
"learning_rate": 7.513728502524286e-06,
"loss": 0.1061,
"step": 825
},
{
"epoch": 0.42062380649267983,
"grad_norm": 6.975319862365723,
"learning_rate": 7.430289649152156e-06,
"loss": 0.1258,
"step": 826
},
{
"epoch": 0.42113303628262255,
"grad_norm": 5.663224697113037,
"learning_rate": 7.347279472290647e-06,
"loss": 0.1082,
"step": 827
},
{
"epoch": 0.4216422660725652,
"grad_norm": 6.514274597167969,
"learning_rate": 7.264698807851328e-06,
"loss": 0.1209,
"step": 828
},
{
"epoch": 0.42215149586250794,
"grad_norm": 5.257199764251709,
"learning_rate": 7.182548487420554e-06,
"loss": 0.0995,
"step": 829
},
{
"epoch": 0.42266072565245066,
"grad_norm": 3.9015791416168213,
"learning_rate": 7.100829338251147e-06,
"loss": 0.1605,
"step": 830
},
{
"epoch": 0.4231699554423934,
"grad_norm": 5.840038776397705,
"learning_rate": 7.019542183254046e-06,
"loss": 0.1126,
"step": 831
},
{
"epoch": 0.4236791852323361,
"grad_norm": 11.773760795593262,
"learning_rate": 6.9386878409899715e-06,
"loss": 0.1449,
"step": 832
},
{
"epoch": 0.42418841502227883,
"grad_norm": 6.619400978088379,
"learning_rate": 6.858267125661272e-06,
"loss": 0.1207,
"step": 833
},
{
"epoch": 0.4246976448122215,
"grad_norm": 5.580018520355225,
"learning_rate": 6.778280847103669e-06,
"loss": 0.0902,
"step": 834
},
{
"epoch": 0.4252068746021642,
"grad_norm": 5.762646675109863,
"learning_rate": 6.698729810778065e-06,
"loss": 0.0773,
"step": 835
},
{
"epoch": 0.42571610439210694,
"grad_norm": 5.908243179321289,
"learning_rate": 6.619614817762537e-06,
"loss": 0.0891,
"step": 836
},
{
"epoch": 0.42622533418204966,
"grad_norm": 4.759620666503906,
"learning_rate": 6.540936664744196e-06,
"loss": 0.092,
"step": 837
},
{
"epoch": 0.4267345639719924,
"grad_norm": 3.2303318977355957,
"learning_rate": 6.462696144011149e-06,
"loss": 0.0409,
"step": 838
},
{
"epoch": 0.42724379376193505,
"grad_norm": 4.1941986083984375,
"learning_rate": 6.384894043444567e-06,
"loss": 0.1015,
"step": 839
},
{
"epoch": 0.4277530235518778,
"grad_norm": 2.3727965354919434,
"learning_rate": 6.3075311465107535e-06,
"loss": 0.0168,
"step": 840
},
{
"epoch": 0.4282622533418205,
"grad_norm": 6.094393730163574,
"learning_rate": 6.230608232253227e-06,
"loss": 0.098,
"step": 841
},
{
"epoch": 0.4287714831317632,
"grad_norm": 6.916983127593994,
"learning_rate": 6.154126075284855e-06,
"loss": 0.0278,
"step": 842
},
{
"epoch": 0.42928071292170594,
"grad_norm": 1.7201404571533203,
"learning_rate": 6.078085445780129e-06,
"loss": 0.0032,
"step": 843
},
{
"epoch": 0.4297899427116486,
"grad_norm": 9.973702430725098,
"learning_rate": 6.002487109467347e-06,
"loss": 0.0374,
"step": 844
},
{
"epoch": 0.43029917250159133,
"grad_norm": 1.6437783241271973,
"learning_rate": 5.927331827620903e-06,
"loss": 0.0081,
"step": 845
},
{
"epoch": 0.43080840229153405,
"grad_norm": 4.856413841247559,
"learning_rate": 5.852620357053651e-06,
"loss": 0.0392,
"step": 846
},
{
"epoch": 0.4313176320814768,
"grad_norm": 5.264596939086914,
"learning_rate": 5.778353450109286e-06,
"loss": 0.0289,
"step": 847
},
{
"epoch": 0.4318268618714195,
"grad_norm": 4.01646089553833,
"learning_rate": 5.704531854654721e-06,
"loss": 0.0063,
"step": 848
},
{
"epoch": 0.43233609166136217,
"grad_norm": 5.703643798828125,
"learning_rate": 5.631156314072605e-06,
"loss": 0.0557,
"step": 849
},
{
"epoch": 0.4328453214513049,
"grad_norm": 9.367152214050293,
"learning_rate": 5.558227567253832e-06,
"loss": 0.1552,
"step": 850
},
{
"epoch": 0.4333545512412476,
"grad_norm": 4.262320518493652,
"learning_rate": 5.485746348590048e-06,
"loss": 0.6515,
"step": 851
},
{
"epoch": 0.43386378103119033,
"grad_norm": 5.231907367706299,
"learning_rate": 5.413713387966329e-06,
"loss": 0.5992,
"step": 852
},
{
"epoch": 0.43437301082113305,
"grad_norm": 4.903708457946777,
"learning_rate": 5.34212941075381e-06,
"loss": 0.3601,
"step": 853
},
{
"epoch": 0.4348822406110757,
"grad_norm": 6.984781742095947,
"learning_rate": 5.270995137802315e-06,
"loss": 0.5169,
"step": 854
},
{
"epoch": 0.43539147040101844,
"grad_norm": 7.836650371551514,
"learning_rate": 5.200311285433213e-06,
"loss": 0.6677,
"step": 855
},
{
"epoch": 0.43590070019096117,
"grad_norm": 5.570544719696045,
"learning_rate": 5.13007856543209e-06,
"loss": 0.3495,
"step": 856
},
{
"epoch": 0.4364099299809039,
"grad_norm": 21.38435935974121,
"learning_rate": 5.060297685041659e-06,
"loss": 1.0934,
"step": 857
},
{
"epoch": 0.4369191597708466,
"grad_norm": 11.60948371887207,
"learning_rate": 4.99096934695461e-06,
"loss": 0.8771,
"step": 858
},
{
"epoch": 0.43742838956078933,
"grad_norm": 11.168967247009277,
"learning_rate": 4.922094249306558e-06,
"loss": 0.822,
"step": 859
},
{
"epoch": 0.437937619350732,
"grad_norm": 9.118983268737793,
"learning_rate": 4.853673085668947e-06,
"loss": 0.7252,
"step": 860
},
{
"epoch": 0.4384468491406747,
"grad_norm": 8.641736030578613,
"learning_rate": 4.78570654504214e-06,
"loss": 0.8113,
"step": 861
},
{
"epoch": 0.43895607893061744,
"grad_norm": 7.092012882232666,
"learning_rate": 4.7181953118484556e-06,
"loss": 0.6584,
"step": 862
},
{
"epoch": 0.43946530872056017,
"grad_norm": 1.0570333003997803,
"learning_rate": 4.651140065925269e-06,
"loss": 0.1191,
"step": 863
},
{
"epoch": 0.4399745385105029,
"grad_norm": 2.1120429039001465,
"learning_rate": 4.58454148251814e-06,
"loss": 0.1825,
"step": 864
},
{
"epoch": 0.44048376830044556,
"grad_norm": 0.5101509094238281,
"learning_rate": 4.5184002322740785e-06,
"loss": 0.0776,
"step": 865
},
{
"epoch": 0.4409929980903883,
"grad_norm": 1.4004946947097778,
"learning_rate": 4.452716981234744e-06,
"loss": 0.1078,
"step": 866
},
{
"epoch": 0.441502227880331,
"grad_norm": 1.1376584768295288,
"learning_rate": 4.387492390829734e-06,
"loss": 0.0968,
"step": 867
},
{
"epoch": 0.4420114576702737,
"grad_norm": 1.968505620956421,
"learning_rate": 4.322727117869951e-06,
"loss": 0.1204,
"step": 868
},
{
"epoch": 0.44252068746021644,
"grad_norm": 0.5514800548553467,
"learning_rate": 4.258421814540992e-06,
"loss": 0.0586,
"step": 869
},
{
"epoch": 0.4430299172501591,
"grad_norm": 2.0910208225250244,
"learning_rate": 4.19457712839652e-06,
"loss": 0.1634,
"step": 870
},
{
"epoch": 0.44353914704010183,
"grad_norm": 3.2385759353637695,
"learning_rate": 4.131193702351827e-06,
"loss": 0.1482,
"step": 871
},
{
"epoch": 0.44404837683004456,
"grad_norm": 1.8257766962051392,
"learning_rate": 4.068272174677335e-06,
"loss": 0.1311,
"step": 872
},
{
"epoch": 0.4445576066199873,
"grad_norm": 1.4228204488754272,
"learning_rate": 4.005813178992091e-06,
"loss": 0.0839,
"step": 873
},
{
"epoch": 0.44506683640993,
"grad_norm": 14.156405448913574,
"learning_rate": 3.9438173442575e-06,
"loss": 0.1061,
"step": 874
},
{
"epoch": 0.44557606619987267,
"grad_norm": 7.2103047370910645,
"learning_rate": 3.8822852947709375e-06,
"loss": 0.1517,
"step": 875
},
{
"epoch": 0.4460852959898154,
"grad_norm": 4.403718948364258,
"learning_rate": 3.821217650159453e-06,
"loss": 0.1108,
"step": 876
},
{
"epoch": 0.4465945257797581,
"grad_norm": 4.354316711425781,
"learning_rate": 3.760615025373543e-06,
"loss": 0.1314,
"step": 877
},
{
"epoch": 0.44710375556970083,
"grad_norm": 3.967412233352661,
"learning_rate": 3.700478030680987e-06,
"loss": 0.1001,
"step": 878
},
{
"epoch": 0.44761298535964356,
"grad_norm": 4.008363246917725,
"learning_rate": 3.6408072716606346e-06,
"loss": 0.0968,
"step": 879
},
{
"epoch": 0.4481222151495863,
"grad_norm": 6.099944114685059,
"learning_rate": 3.581603349196372e-06,
"loss": 0.1582,
"step": 880
},
{
"epoch": 0.44863144493952894,
"grad_norm": 5.96859073638916,
"learning_rate": 3.522866859471047e-06,
"loss": 0.1445,
"step": 881
},
{
"epoch": 0.44914067472947167,
"grad_norm": 3.3854691982269287,
"learning_rate": 3.4645983939604496e-06,
"loss": 0.0567,
"step": 882
},
{
"epoch": 0.4496499045194144,
"grad_norm": 3.6964199542999268,
"learning_rate": 3.406798539427386e-06,
"loss": 0.1331,
"step": 883
},
{
"epoch": 0.4501591343093571,
"grad_norm": 3.9392178058624268,
"learning_rate": 3.349467877915746e-06,
"loss": 0.1401,
"step": 884
},
{
"epoch": 0.45066836409929983,
"grad_norm": 8.413680076599121,
"learning_rate": 3.2926069867446675e-06,
"loss": 0.0784,
"step": 885
},
{
"epoch": 0.4511775938892425,
"grad_norm": 3.3428969383239746,
"learning_rate": 3.2362164385026706e-06,
"loss": 0.1658,
"step": 886
},
{
"epoch": 0.4516868236791852,
"grad_norm": 4.6566386222839355,
"learning_rate": 3.180296801041971e-06,
"loss": 0.1105,
"step": 887
},
{
"epoch": 0.45219605346912795,
"grad_norm": 4.742708683013916,
"learning_rate": 3.1248486374726883e-06,
"loss": 0.0988,
"step": 888
},
{
"epoch": 0.45270528325907067,
"grad_norm": 5.364091873168945,
"learning_rate": 3.069872506157212e-06,
"loss": 0.0267,
"step": 889
},
{
"epoch": 0.4532145130490134,
"grad_norm": 7.658147811889648,
"learning_rate": 3.0153689607045845e-06,
"loss": 0.1993,
"step": 890
},
{
"epoch": 0.45372374283895606,
"grad_norm": 5.430888652801514,
"learning_rate": 2.961338549964893e-06,
"loss": 0.0843,
"step": 891
},
{
"epoch": 0.4542329726288988,
"grad_norm": 4.619664669036865,
"learning_rate": 2.9077818180237693e-06,
"loss": 0.0777,
"step": 892
},
{
"epoch": 0.4547422024188415,
"grad_norm": 0.4149484932422638,
"learning_rate": 2.8546993041969173e-06,
"loss": 0.0021,
"step": 893
},
{
"epoch": 0.4552514322087842,
"grad_norm": 3.0641720294952393,
"learning_rate": 2.802091543024671e-06,
"loss": 0.0088,
"step": 894
},
{
"epoch": 0.45576066199872695,
"grad_norm": 3.299743175506592,
"learning_rate": 2.7499590642665774e-06,
"loss": 0.0406,
"step": 895
},
{
"epoch": 0.4562698917886696,
"grad_norm": 5.375297546386719,
"learning_rate": 2.6983023928961404e-06,
"loss": 0.0383,
"step": 896
},
{
"epoch": 0.45677912157861233,
"grad_norm": 1.769721508026123,
"learning_rate": 2.647122049095463e-06,
"loss": 0.0077,
"step": 897
},
{
"epoch": 0.45728835136855506,
"grad_norm": 7.771859645843506,
"learning_rate": 2.596418548250029e-06,
"loss": 0.0876,
"step": 898
},
{
"epoch": 0.4577975811584978,
"grad_norm": 5.935695171356201,
"learning_rate": 2.546192400943537e-06,
"loss": 0.0387,
"step": 899
},
{
"epoch": 0.4583068109484405,
"grad_norm": 5.693915367126465,
"learning_rate": 2.496444112952734e-06,
"loss": 0.1489,
"step": 900
},
{
"epoch": 0.45881604073838317,
"grad_norm": 3.7777862548828125,
"learning_rate": 2.4471741852423237e-06,
"loss": 0.6961,
"step": 901
},
{
"epoch": 0.4593252705283259,
"grad_norm": 5.661333084106445,
"learning_rate": 2.3983831139599287e-06,
"loss": 0.6676,
"step": 902
},
{
"epoch": 0.4598345003182686,
"grad_norm": 3.3415160179138184,
"learning_rate": 2.3500713904311024e-06,
"loss": 0.1848,
"step": 903
},
{
"epoch": 0.46034373010821134,
"grad_norm": 9.008618354797363,
"learning_rate": 2.3022395011543686e-06,
"loss": 0.8188,
"step": 904
},
{
"epoch": 0.46085295989815406,
"grad_norm": 6.555294036865234,
"learning_rate": 2.2548879277963064e-06,
"loss": 0.5172,
"step": 905
},
{
"epoch": 0.4613621896880968,
"grad_norm": 15.021227836608887,
"learning_rate": 2.208017147186736e-06,
"loss": 0.6723,
"step": 906
},
{
"epoch": 0.46187141947803945,
"grad_norm": 9.838711738586426,
"learning_rate": 2.161627631313923e-06,
"loss": 0.7112,
"step": 907
},
{
"epoch": 0.46238064926798217,
"grad_norm": 12.633716583251953,
"learning_rate": 2.1157198473197414e-06,
"loss": 0.9927,
"step": 908
},
{
"epoch": 0.4628898790579249,
"grad_norm": 7.977221488952637,
"learning_rate": 2.070294257495081e-06,
"loss": 0.558,
"step": 909
},
{
"epoch": 0.4633991088478676,
"grad_norm": 9.081966400146484,
"learning_rate": 2.0253513192751373e-06,
"loss": 0.5821,
"step": 910
},
{
"epoch": 0.46390833863781034,
"grad_norm": 8.361538887023926,
"learning_rate": 1.9808914852347813e-06,
"loss": 0.9417,
"step": 911
},
{
"epoch": 0.464417568427753,
"grad_norm": 2.8661348819732666,
"learning_rate": 1.9369152030840556e-06,
"loss": 0.3311,
"step": 912
},
{
"epoch": 0.4649267982176957,
"grad_norm": 1.429822564125061,
"learning_rate": 1.8934229156636452e-06,
"loss": 0.1388,
"step": 913
},
{
"epoch": 0.46543602800763845,
"grad_norm": 1.2748867273330688,
"learning_rate": 1.8504150609403858e-06,
"loss": 0.1259,
"step": 914
},
{
"epoch": 0.46594525779758117,
"grad_norm": 0.8820096850395203,
"learning_rate": 1.807892072002898e-06,
"loss": 0.0854,
"step": 915
},
{
"epoch": 0.4664544875875239,
"grad_norm": 2.467775344848633,
"learning_rate": 1.7658543770572189e-06,
"loss": 0.0945,
"step": 916
},
{
"epoch": 0.46696371737746656,
"grad_norm": 4.9203877449035645,
"learning_rate": 1.724302399422456e-06,
"loss": 0.1121,
"step": 917
},
{
"epoch": 0.4674729471674093,
"grad_norm": 0.4738912284374237,
"learning_rate": 1.6832365575265741e-06,
"loss": 0.0745,
"step": 918
},
{
"epoch": 0.467982176957352,
"grad_norm": 1.508836269378662,
"learning_rate": 1.6426572649021476e-06,
"loss": 0.1326,
"step": 919
},
{
"epoch": 0.4684914067472947,
"grad_norm": 1.2223467826843262,
"learning_rate": 1.6025649301821876e-06,
"loss": 0.1413,
"step": 920
},
{
"epoch": 0.46900063653723745,
"grad_norm": 0.6743190288543701,
"learning_rate": 1.5629599570960718e-06,
"loss": 0.0968,
"step": 921
},
{
"epoch": 0.4695098663271801,
"grad_norm": 1.4873098134994507,
"learning_rate": 1.523842744465437e-06,
"loss": 0.102,
"step": 922
},
{
"epoch": 0.47001909611712284,
"grad_norm": 1.1552408933639526,
"learning_rate": 1.4852136862001764e-06,
"loss": 0.0608,
"step": 923
},
{
"epoch": 0.47052832590706556,
"grad_norm": 5.109307289123535,
"learning_rate": 1.4470731712944884e-06,
"loss": 0.0851,
"step": 924
},
{
"epoch": 0.4710375556970083,
"grad_norm": 4.766353130340576,
"learning_rate": 1.4094215838229176e-06,
"loss": 0.1149,
"step": 925
},
{
"epoch": 0.471546785486951,
"grad_norm": 14.194299697875977,
"learning_rate": 1.372259302936546e-06,
"loss": 0.2078,
"step": 926
},
{
"epoch": 0.4720560152768937,
"grad_norm": 4.660562038421631,
"learning_rate": 1.3355867028591208e-06,
"loss": 0.0858,
"step": 927
},
{
"epoch": 0.4725652450668364,
"grad_norm": 10.358325004577637,
"learning_rate": 1.2994041528833266e-06,
"loss": 0.0983,
"step": 928
},
{
"epoch": 0.4730744748567791,
"grad_norm": 6.56409215927124,
"learning_rate": 1.2637120173670358e-06,
"loss": 0.0809,
"step": 929
},
{
"epoch": 0.47358370464672184,
"grad_norm": 4.377633094787598,
"learning_rate": 1.2285106557296477e-06,
"loss": 0.094,
"step": 930
},
{
"epoch": 0.47409293443666456,
"grad_norm": 5.809483051300049,
"learning_rate": 1.1938004224484988e-06,
"loss": 0.1196,
"step": 931
},
{
"epoch": 0.4746021642266073,
"grad_norm": 3.2537131309509277,
"learning_rate": 1.1595816670552428e-06,
"loss": 0.1031,
"step": 932
},
{
"epoch": 0.47511139401654995,
"grad_norm": 6.928438186645508,
"learning_rate": 1.1258547341323699e-06,
"loss": 0.068,
"step": 933
},
{
"epoch": 0.47562062380649267,
"grad_norm": 3.240041971206665,
"learning_rate": 1.0926199633097157e-06,
"loss": 0.0739,
"step": 934
},
{
"epoch": 0.4761298535964354,
"grad_norm": 6.833902835845947,
"learning_rate": 1.0598776892610685e-06,
"loss": 0.0857,
"step": 935
},
{
"epoch": 0.4766390833863781,
"grad_norm": 5.259301662445068,
"learning_rate": 1.02762824170074e-06,
"loss": 0.1534,
"step": 936
},
{
"epoch": 0.47714831317632084,
"grad_norm": 11.154175758361816,
"learning_rate": 9.958719453803278e-07,
"loss": 0.1273,
"step": 937
},
{
"epoch": 0.4776575429662635,
"grad_norm": 2.975895404815674,
"learning_rate": 9.646091200853802e-07,
"loss": 0.015,
"step": 938
},
{
"epoch": 0.4781667727562062,
"grad_norm": 3.766637086868286,
"learning_rate": 9.338400806321978e-07,
"loss": 0.0202,
"step": 939
},
{
"epoch": 0.47867600254614895,
"grad_norm": 2.1836998462677,
"learning_rate": 9.035651368646648e-07,
"loss": 0.0426,
"step": 940
},
{
"epoch": 0.47918523233609167,
"grad_norm": 3.746504306793213,
"learning_rate": 8.737845936511335e-07,
"loss": 0.0458,
"step": 941
},
{
"epoch": 0.4796944621260344,
"grad_norm": 4.881604194641113,
"learning_rate": 8.444987508813451e-07,
"loss": 0.0482,
"step": 942
},
{
"epoch": 0.48020369191597706,
"grad_norm": 15.250972747802734,
"learning_rate": 8.157079034633974e-07,
"loss": 0.0486,
"step": 943
},
{
"epoch": 0.4807129217059198,
"grad_norm": 4.671970844268799,
"learning_rate": 7.874123413208145e-07,
"loss": 0.0408,
"step": 944
},
{
"epoch": 0.4812221514958625,
"grad_norm": 3.882068157196045,
"learning_rate": 7.596123493895991e-07,
"loss": 0.0702,
"step": 945
},
{
"epoch": 0.4817313812858052,
"grad_norm": 3.0033390522003174,
"learning_rate": 7.323082076153509e-07,
"loss": 0.0722,
"step": 946
},
{
"epoch": 0.48224061107574795,
"grad_norm": 16.160083770751953,
"learning_rate": 7.055001909504755e-07,
"loss": 0.0567,
"step": 947
},
{
"epoch": 0.4827498408656906,
"grad_norm": 5.195967674255371,
"learning_rate": 6.791885693514133e-07,
"loss": 0.0777,
"step": 948
},
{
"epoch": 0.48325907065563334,
"grad_norm": 15.034565925598145,
"learning_rate": 6.533736077758868e-07,
"loss": 0.073,
"step": 949
},
{
"epoch": 0.48376830044557606,
"grad_norm": 3.8854053020477295,
"learning_rate": 6.280555661802856e-07,
"loss": 0.0394,
"step": 950
},
{
"epoch": 0.4842775302355188,
"grad_norm": 3.7877042293548584,
"learning_rate": 6.032346995169968e-07,
"loss": 0.7431,
"step": 951
},
{
"epoch": 0.4847867600254615,
"grad_norm": 11.732136726379395,
"learning_rate": 5.78911257731879e-07,
"loss": 0.6649,
"step": 952
},
{
"epoch": 0.4852959898154042,
"grad_norm": 5.163148403167725,
"learning_rate": 5.550854857617193e-07,
"loss": 0.4255,
"step": 953
},
{
"epoch": 0.4858052196053469,
"grad_norm": 7.868194580078125,
"learning_rate": 5.317576235317756e-07,
"loss": 0.7605,
"step": 954
},
{
"epoch": 0.4863144493952896,
"grad_norm": 7.837756633758545,
"learning_rate": 5.089279059533658e-07,
"loss": 0.7603,
"step": 955
},
{
"epoch": 0.48682367918523234,
"grad_norm": 6.926958084106445,
"learning_rate": 4.865965629214819e-07,
"loss": 0.3583,
"step": 956
},
{
"epoch": 0.48733290897517506,
"grad_norm": 6.891408920288086,
"learning_rate": 4.647638193125137e-07,
"loss": 0.5063,
"step": 957
},
{
"epoch": 0.4878421387651178,
"grad_norm": 17.593307495117188,
"learning_rate": 4.434298949819449e-07,
"loss": 1.0916,
"step": 958
},
{
"epoch": 0.48835136855506045,
"grad_norm": 13.12249755859375,
"learning_rate": 4.2259500476214407e-07,
"loss": 1.178,
"step": 959
},
{
"epoch": 0.48886059834500317,
"grad_norm": 8.691963195800781,
"learning_rate": 4.02259358460233e-07,
"loss": 0.6026,
"step": 960
},
{
"epoch": 0.4893698281349459,
"grad_norm": 5.080837726593018,
"learning_rate": 3.824231608559492e-07,
"loss": 0.379,
"step": 961
},
{
"epoch": 0.4898790579248886,
"grad_norm": 10.101880073547363,
"learning_rate": 3.630866116995757e-07,
"loss": 0.8812,
"step": 962
},
{
"epoch": 0.49038828771483134,
"grad_norm": 2.2711408138275146,
"learning_rate": 3.4424990570994797e-07,
"loss": 0.1018,
"step": 963
},
{
"epoch": 0.490897517504774,
"grad_norm": 0.9632775187492371,
"learning_rate": 3.2591323257248893e-07,
"loss": 0.1066,
"step": 964
},
{
"epoch": 0.49140674729471673,
"grad_norm": 2.8695173263549805,
"learning_rate": 3.080767769372939e-07,
"loss": 0.1316,
"step": 965
},
{
"epoch": 0.49191597708465945,
"grad_norm": 1.3276772499084473,
"learning_rate": 2.907407184172706e-07,
"loss": 0.0816,
"step": 966
},
{
"epoch": 0.4924252068746022,
"grad_norm": 4.322078227996826,
"learning_rate": 2.7390523158633554e-07,
"loss": 0.1199,
"step": 967
},
{
"epoch": 0.4929344366645449,
"grad_norm": 0.6163370013237,
"learning_rate": 2.5757048597765396e-07,
"loss": 0.0881,
"step": 968
},
{
"epoch": 0.49344366645448756,
"grad_norm": 0.8970826864242554,
"learning_rate": 2.4173664608193593e-07,
"loss": 0.095,
"step": 969
},
{
"epoch": 0.4939528962444303,
"grad_norm": 1.772135615348816,
"learning_rate": 2.2640387134577058e-07,
"loss": 0.0945,
"step": 970
},
{
"epoch": 0.494462126034373,
"grad_norm": 1.8468323945999146,
"learning_rate": 2.1157231617002783e-07,
"loss": 0.136,
"step": 971
},
{
"epoch": 0.49497135582431573,
"grad_norm": 2.6759517192840576,
"learning_rate": 1.9724212990830938e-07,
"loss": 0.138,
"step": 972
},
{
"epoch": 0.49548058561425845,
"grad_norm": 0.8388791680335999,
"learning_rate": 1.8341345686543332e-07,
"loss": 0.1131,
"step": 973
},
{
"epoch": 0.4959898154042012,
"grad_norm": 5.01349401473999,
"learning_rate": 1.7008643629596866e-07,
"loss": 0.104,
"step": 974
},
{
"epoch": 0.49649904519414384,
"grad_norm": 8.658512115478516,
"learning_rate": 1.5726120240288634e-07,
"loss": 0.088,
"step": 975
},
{
"epoch": 0.49700827498408656,
"grad_norm": 10.031734466552734,
"learning_rate": 1.449378843361271e-07,
"loss": 0.1412,
"step": 976
},
{
"epoch": 0.4975175047740293,
"grad_norm": 5.323798656463623,
"learning_rate": 1.3311660619138578e-07,
"loss": 0.0799,
"step": 977
},
{
"epoch": 0.498026734563972,
"grad_norm": 14.896018981933594,
"learning_rate": 1.2179748700879012e-07,
"loss": 0.0547,
"step": 978
},
{
"epoch": 0.49853596435391473,
"grad_norm": 2.8598153591156006,
"learning_rate": 1.109806407717462e-07,
"loss": 0.057,
"step": 979
},
{
"epoch": 0.4990451941438574,
"grad_norm": 4.470668792724609,
"learning_rate": 1.0066617640578368e-07,
"loss": 0.1221,
"step": 980
},
{
"epoch": 0.4995544239338001,
"grad_norm": 6.321918964385986,
"learning_rate": 9.085419777743465e-08,
"loss": 0.1274,
"step": 981
},
{
"epoch": 0.5000636537237428,
"grad_norm": 4.125798225402832,
"learning_rate": 8.15448036932176e-08,
"loss": 0.0822,
"step": 982
},
{
"epoch": 0.5005728835136856,
"grad_norm": 5.414987564086914,
"learning_rate": 7.273808789862724e-08,
"loss": 0.0796,
"step": 983
},
{
"epoch": 0.5010821133036283,
"grad_norm": 5.301061153411865,
"learning_rate": 6.443413907720186e-08,
"loss": 0.1011,
"step": 984
},
{
"epoch": 0.501591343093571,
"grad_norm": 6.373870849609375,
"learning_rate": 5.663304084960186e-08,
"loss": 0.1335,
"step": 985
},
{
"epoch": 0.5021005728835137,
"grad_norm": 3.59035062789917,
"learning_rate": 4.933487177280482e-08,
"loss": 0.06,
"step": 986
},
{
"epoch": 0.5026098026734565,
"grad_norm": 4.328834533691406,
"learning_rate": 4.253970533929508e-08,
"loss": 0.0565,
"step": 987
},
{
"epoch": 0.5031190324633991,
"grad_norm": 2.652255058288574,
"learning_rate": 3.624760997631982e-08,
"loss": 0.0428,
"step": 988
},
{
"epoch": 0.5036282622533418,
"grad_norm": 4.667551040649414,
"learning_rate": 3.04586490452119e-08,
"loss": 0.0206,
"step": 989
},
{
"epoch": 0.5041374920432845,
"grad_norm": 7.6786112785339355,
"learning_rate": 2.5172880840745873e-08,
"loss": 0.1095,
"step": 990
},
{
"epoch": 0.5046467218332272,
"grad_norm": 3.08980131149292,
"learning_rate": 2.0390358590538504e-08,
"loss": 0.0337,
"step": 991
},
{
"epoch": 0.50515595162317,
"grad_norm": 2.030238389968872,
"learning_rate": 1.6111130454543598e-08,
"loss": 0.0064,
"step": 992
},
{
"epoch": 0.5056651814131127,
"grad_norm": 0.7045100331306458,
"learning_rate": 1.2335239524541299e-08,
"loss": 0.0105,
"step": 993
},
{
"epoch": 0.5061744112030554,
"grad_norm": 8.8118896484375,
"learning_rate": 9.06272382371065e-09,
"loss": 0.0789,
"step": 994
},
{
"epoch": 0.5066836409929981,
"grad_norm": 8.802488327026367,
"learning_rate": 6.293616306246586e-09,
"loss": 0.0811,
"step": 995
},
{
"epoch": 0.5071928707829408,
"grad_norm": 4.467785358428955,
"learning_rate": 4.0279448570323954e-09,
"loss": 0.0437,
"step": 996
},
{
"epoch": 0.5077021005728836,
"grad_norm": 8.837767601013184,
"learning_rate": 2.265732291356626e-09,
"loss": 0.0425,
"step": 997
},
{
"epoch": 0.5082113303628262,
"grad_norm": 7.292797565460205,
"learning_rate": 1.0069963546743832e-09,
"loss": 0.0808,
"step": 998
},
{
"epoch": 0.5087205601527689,
"grad_norm": 3.702165365219116,
"learning_rate": 2.5174972244634833e-10,
"loss": 0.0469,
"step": 999
},
{
"epoch": 0.5092297899427116,
"grad_norm": 8.40251636505127,
"learning_rate": 0.0,
"loss": 0.0725,
"step": 1000
},
{
"epoch": 0.5092297899427116,
"eval_loss": 0.22850316762924194,
"eval_runtime": 378.274,
"eval_samples_per_second": 8.745,
"eval_steps_per_second": 2.186,
"step": 1000
}
],
"logging_steps": 1,
"max_steps": 1000,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 200,
"stateful_callbacks": {
"EarlyStoppingCallback": {
"args": {
"early_stopping_patience": 5,
"early_stopping_threshold": 0.0
},
"attributes": {
"early_stopping_patience_counter": 0
}
},
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 2.0800300944866673e+18,
"train_batch_size": 8,
"trial_name": null,
"trial_params": null
}