SansarK's picture
Upload folder using huggingface_hub
b34ed77 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 8.883248730964468,
"eval_steps": 500,
"global_step": 3500,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.025380710659898477,
"grad_norm": 1.7737329006195068,
"learning_rate": 4.9873096446700515e-05,
"loss": 0.4425,
"step": 10
},
{
"epoch": 0.050761421319796954,
"grad_norm": 1.6950019598007202,
"learning_rate": 4.9746192893401014e-05,
"loss": 0.5594,
"step": 20
},
{
"epoch": 0.07614213197969544,
"grad_norm": 3.7862815856933594,
"learning_rate": 4.961928934010153e-05,
"loss": 0.5435,
"step": 30
},
{
"epoch": 0.10152284263959391,
"grad_norm": 2.450183868408203,
"learning_rate": 4.949238578680203e-05,
"loss": 0.5429,
"step": 40
},
{
"epoch": 0.12690355329949238,
"grad_norm": 1.6361172199249268,
"learning_rate": 4.936548223350254e-05,
"loss": 0.6185,
"step": 50
},
{
"epoch": 0.15228426395939088,
"grad_norm": 1.3755431175231934,
"learning_rate": 4.9238578680203045e-05,
"loss": 0.5633,
"step": 60
},
{
"epoch": 0.17766497461928935,
"grad_norm": 2.1979947090148926,
"learning_rate": 4.911167512690356e-05,
"loss": 0.5031,
"step": 70
},
{
"epoch": 0.20304568527918782,
"grad_norm": 1.493660569190979,
"learning_rate": 4.8984771573604064e-05,
"loss": 0.5942,
"step": 80
},
{
"epoch": 0.22842639593908629,
"grad_norm": 1.642001748085022,
"learning_rate": 4.885786802030457e-05,
"loss": 0.5513,
"step": 90
},
{
"epoch": 0.25380710659898476,
"grad_norm": 4.11527681350708,
"learning_rate": 4.873096446700508e-05,
"loss": 0.5544,
"step": 100
},
{
"epoch": 0.27918781725888325,
"grad_norm": 1.966141700744629,
"learning_rate": 4.860406091370558e-05,
"loss": 0.5311,
"step": 110
},
{
"epoch": 0.30456852791878175,
"grad_norm": 1.490822196006775,
"learning_rate": 4.8477157360406095e-05,
"loss": 0.4546,
"step": 120
},
{
"epoch": 0.3299492385786802,
"grad_norm": 2.575047254562378,
"learning_rate": 4.83502538071066e-05,
"loss": 0.4532,
"step": 130
},
{
"epoch": 0.3553299492385787,
"grad_norm": 1.9474763870239258,
"learning_rate": 4.822335025380711e-05,
"loss": 0.4499,
"step": 140
},
{
"epoch": 0.38071065989847713,
"grad_norm": 2.9648075103759766,
"learning_rate": 4.809644670050762e-05,
"loss": 0.5425,
"step": 150
},
{
"epoch": 0.40609137055837563,
"grad_norm": 1.9473344087600708,
"learning_rate": 4.7969543147208126e-05,
"loss": 0.529,
"step": 160
},
{
"epoch": 0.43147208121827413,
"grad_norm": 2.3414478302001953,
"learning_rate": 4.784263959390863e-05,
"loss": 0.5281,
"step": 170
},
{
"epoch": 0.45685279187817257,
"grad_norm": 1.2157188653945923,
"learning_rate": 4.771573604060914e-05,
"loss": 0.4963,
"step": 180
},
{
"epoch": 0.48223350253807107,
"grad_norm": 3.3916754722595215,
"learning_rate": 4.758883248730965e-05,
"loss": 0.4245,
"step": 190
},
{
"epoch": 0.5076142131979695,
"grad_norm": 3.030148983001709,
"learning_rate": 4.746192893401015e-05,
"loss": 0.3352,
"step": 200
},
{
"epoch": 0.5329949238578681,
"grad_norm": 2.1008453369140625,
"learning_rate": 4.733502538071066e-05,
"loss": 0.6109,
"step": 210
},
{
"epoch": 0.5583756345177665,
"grad_norm": 1.6099292039871216,
"learning_rate": 4.7208121827411175e-05,
"loss": 0.5327,
"step": 220
},
{
"epoch": 0.583756345177665,
"grad_norm": 1.7559535503387451,
"learning_rate": 4.7081218274111674e-05,
"loss": 0.4475,
"step": 230
},
{
"epoch": 0.6091370558375635,
"grad_norm": 1.5413782596588135,
"learning_rate": 4.695431472081219e-05,
"loss": 0.534,
"step": 240
},
{
"epoch": 0.6345177664974619,
"grad_norm": 1.810226321220398,
"learning_rate": 4.682741116751269e-05,
"loss": 0.5739,
"step": 250
},
{
"epoch": 0.6598984771573604,
"grad_norm": 3.676072120666504,
"learning_rate": 4.67005076142132e-05,
"loss": 0.4702,
"step": 260
},
{
"epoch": 0.6852791878172588,
"grad_norm": 2.691065788269043,
"learning_rate": 4.6573604060913705e-05,
"loss": 0.5858,
"step": 270
},
{
"epoch": 0.7106598984771574,
"grad_norm": 1.8071280717849731,
"learning_rate": 4.644670050761422e-05,
"loss": 0.5294,
"step": 280
},
{
"epoch": 0.7360406091370558,
"grad_norm": 4.747298240661621,
"learning_rate": 4.631979695431472e-05,
"loss": 0.4718,
"step": 290
},
{
"epoch": 0.7614213197969543,
"grad_norm": 2.4223101139068604,
"learning_rate": 4.619289340101523e-05,
"loss": 0.4712,
"step": 300
},
{
"epoch": 0.7868020304568528,
"grad_norm": 2.2267863750457764,
"learning_rate": 4.606598984771574e-05,
"loss": 0.5829,
"step": 310
},
{
"epoch": 0.8121827411167513,
"grad_norm": 2.0561490058898926,
"learning_rate": 4.593908629441624e-05,
"loss": 0.4033,
"step": 320
},
{
"epoch": 0.8375634517766497,
"grad_norm": 3.0562074184417725,
"learning_rate": 4.5812182741116755e-05,
"loss": 0.6535,
"step": 330
},
{
"epoch": 0.8629441624365483,
"grad_norm": 3.355173110961914,
"learning_rate": 4.568527918781726e-05,
"loss": 0.4635,
"step": 340
},
{
"epoch": 0.8883248730964467,
"grad_norm": 4.357395172119141,
"learning_rate": 4.555837563451777e-05,
"loss": 0.4995,
"step": 350
},
{
"epoch": 0.9137055837563451,
"grad_norm": 1.0308293104171753,
"learning_rate": 4.543147208121827e-05,
"loss": 0.5207,
"step": 360
},
{
"epoch": 0.9390862944162437,
"grad_norm": 1.9433645009994507,
"learning_rate": 4.5304568527918786e-05,
"loss": 0.2991,
"step": 370
},
{
"epoch": 0.9644670050761421,
"grad_norm": 3.8325693607330322,
"learning_rate": 4.517766497461929e-05,
"loss": 0.6014,
"step": 380
},
{
"epoch": 0.9898477157360406,
"grad_norm": 1.0364559888839722,
"learning_rate": 4.50507614213198e-05,
"loss": 0.3693,
"step": 390
},
{
"epoch": 1.0,
"eval_loss": 0.49558719992637634,
"eval_runtime": 8.8144,
"eval_samples_per_second": 89.4,
"eval_steps_per_second": 11.232,
"step": 394
},
{
"epoch": 1.015228426395939,
"grad_norm": 2.267185926437378,
"learning_rate": 4.492385786802031e-05,
"loss": 0.5173,
"step": 400
},
{
"epoch": 1.0406091370558375,
"grad_norm": 1.7508792877197266,
"learning_rate": 4.479695431472081e-05,
"loss": 0.4722,
"step": 410
},
{
"epoch": 1.0659898477157361,
"grad_norm": 1.0362215042114258,
"learning_rate": 4.467005076142132e-05,
"loss": 0.4053,
"step": 420
},
{
"epoch": 1.0913705583756346,
"grad_norm": 1.5984660387039185,
"learning_rate": 4.454314720812183e-05,
"loss": 0.6716,
"step": 430
},
{
"epoch": 1.116751269035533,
"grad_norm": 2.6108462810516357,
"learning_rate": 4.4416243654822335e-05,
"loss": 0.4829,
"step": 440
},
{
"epoch": 1.1421319796954315,
"grad_norm": 6.843586444854736,
"learning_rate": 4.428934010152285e-05,
"loss": 0.4716,
"step": 450
},
{
"epoch": 1.16751269035533,
"grad_norm": 5.383293151855469,
"learning_rate": 4.416243654822335e-05,
"loss": 0.6196,
"step": 460
},
{
"epoch": 1.1928934010152283,
"grad_norm": 1.4819399118423462,
"learning_rate": 4.403553299492386e-05,
"loss": 0.4594,
"step": 470
},
{
"epoch": 1.218274111675127,
"grad_norm": 3.468229055404663,
"learning_rate": 4.3908629441624365e-05,
"loss": 0.4838,
"step": 480
},
{
"epoch": 1.2436548223350254,
"grad_norm": 3.6312994956970215,
"learning_rate": 4.378172588832488e-05,
"loss": 0.4686,
"step": 490
},
{
"epoch": 1.2690355329949239,
"grad_norm": 1.4617408514022827,
"learning_rate": 4.365482233502538e-05,
"loss": 0.4094,
"step": 500
},
{
"epoch": 1.2944162436548223,
"grad_norm": 0.9887295365333557,
"learning_rate": 4.352791878172589e-05,
"loss": 0.5219,
"step": 510
},
{
"epoch": 1.3197969543147208,
"grad_norm": 1.6651699542999268,
"learning_rate": 4.34010152284264e-05,
"loss": 0.6159,
"step": 520
},
{
"epoch": 1.3451776649746192,
"grad_norm": 2.0405914783477783,
"learning_rate": 4.32741116751269e-05,
"loss": 0.41,
"step": 530
},
{
"epoch": 1.3705583756345177,
"grad_norm": 4.893404483795166,
"learning_rate": 4.3147208121827415e-05,
"loss": 0.5679,
"step": 540
},
{
"epoch": 1.3959390862944163,
"grad_norm": 2.044116735458374,
"learning_rate": 4.302030456852792e-05,
"loss": 0.4693,
"step": 550
},
{
"epoch": 1.4213197969543148,
"grad_norm": 2.040766477584839,
"learning_rate": 4.289340101522843e-05,
"loss": 0.4736,
"step": 560
},
{
"epoch": 1.4467005076142132,
"grad_norm": 1.883665919303894,
"learning_rate": 4.276649746192893e-05,
"loss": 0.6475,
"step": 570
},
{
"epoch": 1.4720812182741116,
"grad_norm": 1.6452559232711792,
"learning_rate": 4.2639593908629446e-05,
"loss": 0.5092,
"step": 580
},
{
"epoch": 1.49746192893401,
"grad_norm": 2.059413194656372,
"learning_rate": 4.251269035532995e-05,
"loss": 0.5469,
"step": 590
},
{
"epoch": 1.5228426395939088,
"grad_norm": 5.789554595947266,
"learning_rate": 4.238578680203046e-05,
"loss": 0.4548,
"step": 600
},
{
"epoch": 1.548223350253807,
"grad_norm": 1.7683823108673096,
"learning_rate": 4.225888324873097e-05,
"loss": 0.3807,
"step": 610
},
{
"epoch": 1.5736040609137056,
"grad_norm": 1.6267879009246826,
"learning_rate": 4.213197969543147e-05,
"loss": 0.3814,
"step": 620
},
{
"epoch": 1.598984771573604,
"grad_norm": 2.024305582046509,
"learning_rate": 4.200507614213198e-05,
"loss": 0.5454,
"step": 630
},
{
"epoch": 1.6243654822335025,
"grad_norm": 2.527076482772827,
"learning_rate": 4.187817258883249e-05,
"loss": 0.5996,
"step": 640
},
{
"epoch": 1.649746192893401,
"grad_norm": 2.8863742351531982,
"learning_rate": 4.1751269035532995e-05,
"loss": 0.4548,
"step": 650
},
{
"epoch": 1.6751269035532994,
"grad_norm": 1.8288835287094116,
"learning_rate": 4.162436548223351e-05,
"loss": 0.3666,
"step": 660
},
{
"epoch": 1.700507614213198,
"grad_norm": 2.004929304122925,
"learning_rate": 4.1497461928934013e-05,
"loss": 0.4643,
"step": 670
},
{
"epoch": 1.7258883248730963,
"grad_norm": 2.2006640434265137,
"learning_rate": 4.137055837563452e-05,
"loss": 0.5934,
"step": 680
},
{
"epoch": 1.751269035532995,
"grad_norm": 1.2337054014205933,
"learning_rate": 4.1243654822335025e-05,
"loss": 0.5315,
"step": 690
},
{
"epoch": 1.7766497461928934,
"grad_norm": 1.163268804550171,
"learning_rate": 4.111675126903554e-05,
"loss": 0.4484,
"step": 700
},
{
"epoch": 1.8020304568527918,
"grad_norm": 1.2907366752624512,
"learning_rate": 4.098984771573604e-05,
"loss": 0.4027,
"step": 710
},
{
"epoch": 1.8274111675126905,
"grad_norm": 1.8477003574371338,
"learning_rate": 4.086294416243655e-05,
"loss": 0.503,
"step": 720
},
{
"epoch": 1.8527918781725887,
"grad_norm": 2.8408010005950928,
"learning_rate": 4.073604060913706e-05,
"loss": 0.5602,
"step": 730
},
{
"epoch": 1.8781725888324874,
"grad_norm": 3.5004968643188477,
"learning_rate": 4.060913705583756e-05,
"loss": 0.5681,
"step": 740
},
{
"epoch": 1.9035532994923858,
"grad_norm": 1.7411813735961914,
"learning_rate": 4.0482233502538075e-05,
"loss": 0.4501,
"step": 750
},
{
"epoch": 1.9289340101522843,
"grad_norm": 4.301488876342773,
"learning_rate": 4.035532994923858e-05,
"loss": 0.5262,
"step": 760
},
{
"epoch": 1.9543147208121827,
"grad_norm": 0.8759877681732178,
"learning_rate": 4.022842639593909e-05,
"loss": 0.4527,
"step": 770
},
{
"epoch": 1.9796954314720812,
"grad_norm": 2.361971855163574,
"learning_rate": 4.010152284263959e-05,
"loss": 0.487,
"step": 780
},
{
"epoch": 2.0,
"eval_loss": 0.4712839722633362,
"eval_runtime": 8.7867,
"eval_samples_per_second": 89.681,
"eval_steps_per_second": 11.267,
"step": 788
},
{
"epoch": 2.00507614213198,
"grad_norm": 2.517071485519409,
"learning_rate": 3.9974619289340106e-05,
"loss": 0.4159,
"step": 790
},
{
"epoch": 2.030456852791878,
"grad_norm": 1.2710137367248535,
"learning_rate": 3.9847715736040605e-05,
"loss": 0.4777,
"step": 800
},
{
"epoch": 2.0558375634517767,
"grad_norm": 3.463624954223633,
"learning_rate": 3.972081218274112e-05,
"loss": 0.4792,
"step": 810
},
{
"epoch": 2.081218274111675,
"grad_norm": 1.5746160745620728,
"learning_rate": 3.959390862944163e-05,
"loss": 0.4457,
"step": 820
},
{
"epoch": 2.1065989847715736,
"grad_norm": 1.3191742897033691,
"learning_rate": 3.946700507614213e-05,
"loss": 0.4615,
"step": 830
},
{
"epoch": 2.1319796954314723,
"grad_norm": 4.860599040985107,
"learning_rate": 3.934010152284264e-05,
"loss": 0.4459,
"step": 840
},
{
"epoch": 2.1573604060913705,
"grad_norm": 2.636868715286255,
"learning_rate": 3.921319796954315e-05,
"loss": 0.537,
"step": 850
},
{
"epoch": 2.182741116751269,
"grad_norm": 1.672642469406128,
"learning_rate": 3.9086294416243655e-05,
"loss": 0.4848,
"step": 860
},
{
"epoch": 2.2081218274111674,
"grad_norm": 8.49184799194336,
"learning_rate": 3.895939086294416e-05,
"loss": 0.4826,
"step": 870
},
{
"epoch": 2.233502538071066,
"grad_norm": 1.6219210624694824,
"learning_rate": 3.8832487309644673e-05,
"loss": 0.3403,
"step": 880
},
{
"epoch": 2.2588832487309647,
"grad_norm": 1.356152892112732,
"learning_rate": 3.870558375634518e-05,
"loss": 0.5415,
"step": 890
},
{
"epoch": 2.284263959390863,
"grad_norm": 3.455899238586426,
"learning_rate": 3.8578680203045685e-05,
"loss": 0.4958,
"step": 900
},
{
"epoch": 2.3096446700507616,
"grad_norm": 3.994067430496216,
"learning_rate": 3.84517766497462e-05,
"loss": 0.4324,
"step": 910
},
{
"epoch": 2.33502538071066,
"grad_norm": 5.774569034576416,
"learning_rate": 3.83248730964467e-05,
"loss": 0.3871,
"step": 920
},
{
"epoch": 2.3604060913705585,
"grad_norm": 4.524348258972168,
"learning_rate": 3.819796954314721e-05,
"loss": 0.5642,
"step": 930
},
{
"epoch": 2.3857868020304567,
"grad_norm": 1.5101829767227173,
"learning_rate": 3.8071065989847716e-05,
"loss": 0.5396,
"step": 940
},
{
"epoch": 2.4111675126903553,
"grad_norm": 1.4824628829956055,
"learning_rate": 3.794416243654822e-05,
"loss": 0.5178,
"step": 950
},
{
"epoch": 2.436548223350254,
"grad_norm": 1.3847397565841675,
"learning_rate": 3.7817258883248735e-05,
"loss": 0.3747,
"step": 960
},
{
"epoch": 2.4619289340101522,
"grad_norm": 2.138828992843628,
"learning_rate": 3.769035532994924e-05,
"loss": 0.5756,
"step": 970
},
{
"epoch": 2.487309644670051,
"grad_norm": 2.0886220932006836,
"learning_rate": 3.756345177664975e-05,
"loss": 0.5496,
"step": 980
},
{
"epoch": 2.512690355329949,
"grad_norm": 2.8054535388946533,
"learning_rate": 3.743654822335025e-05,
"loss": 0.5984,
"step": 990
},
{
"epoch": 2.5380710659898478,
"grad_norm": 3.6035268306732178,
"learning_rate": 3.7309644670050766e-05,
"loss": 0.4475,
"step": 1000
},
{
"epoch": 2.563451776649746,
"grad_norm": 2.931821823120117,
"learning_rate": 3.7182741116751265e-05,
"loss": 0.5179,
"step": 1010
},
{
"epoch": 2.5888324873096447,
"grad_norm": 1.8696964979171753,
"learning_rate": 3.705583756345178e-05,
"loss": 0.6541,
"step": 1020
},
{
"epoch": 2.6142131979695433,
"grad_norm": 1.36885666847229,
"learning_rate": 3.692893401015229e-05,
"loss": 0.3136,
"step": 1030
},
{
"epoch": 2.6395939086294415,
"grad_norm": 3.520601511001587,
"learning_rate": 3.680203045685279e-05,
"loss": 0.4434,
"step": 1040
},
{
"epoch": 2.66497461928934,
"grad_norm": 3.95267915725708,
"learning_rate": 3.66751269035533e-05,
"loss": 0.5012,
"step": 1050
},
{
"epoch": 2.6903553299492384,
"grad_norm": 2.064523935317993,
"learning_rate": 3.654822335025381e-05,
"loss": 0.3251,
"step": 1060
},
{
"epoch": 2.715736040609137,
"grad_norm": 3.3240392208099365,
"learning_rate": 3.6421319796954315e-05,
"loss": 0.7704,
"step": 1070
},
{
"epoch": 2.7411167512690353,
"grad_norm": 3.2816076278686523,
"learning_rate": 3.629441624365482e-05,
"loss": 0.4753,
"step": 1080
},
{
"epoch": 2.766497461928934,
"grad_norm": 2.7639636993408203,
"learning_rate": 3.6167512690355334e-05,
"loss": 0.4443,
"step": 1090
},
{
"epoch": 2.7918781725888326,
"grad_norm": 2.5042450428009033,
"learning_rate": 3.604060913705584e-05,
"loss": 0.4979,
"step": 1100
},
{
"epoch": 2.817258883248731,
"grad_norm": 2.695377826690674,
"learning_rate": 3.5913705583756346e-05,
"loss": 0.4835,
"step": 1110
},
{
"epoch": 2.8426395939086295,
"grad_norm": 2.2236552238464355,
"learning_rate": 3.578680203045686e-05,
"loss": 0.5162,
"step": 1120
},
{
"epoch": 2.868020304568528,
"grad_norm": 5.009507179260254,
"learning_rate": 3.565989847715736e-05,
"loss": 0.4046,
"step": 1130
},
{
"epoch": 2.8934010152284264,
"grad_norm": 2.037398338317871,
"learning_rate": 3.553299492385787e-05,
"loss": 0.4502,
"step": 1140
},
{
"epoch": 2.9187817258883246,
"grad_norm": 5.31472635269165,
"learning_rate": 3.5406091370558376e-05,
"loss": 0.6371,
"step": 1150
},
{
"epoch": 2.9441624365482233,
"grad_norm": 1.666244387626648,
"learning_rate": 3.527918781725888e-05,
"loss": 0.4088,
"step": 1160
},
{
"epoch": 2.969543147208122,
"grad_norm": 4.006988525390625,
"learning_rate": 3.5152284263959395e-05,
"loss": 0.4483,
"step": 1170
},
{
"epoch": 2.99492385786802,
"grad_norm": 2.429619312286377,
"learning_rate": 3.50253807106599e-05,
"loss": 0.4617,
"step": 1180
},
{
"epoch": 3.0,
"eval_loss": 0.5104432106018066,
"eval_runtime": 8.8793,
"eval_samples_per_second": 88.746,
"eval_steps_per_second": 11.15,
"step": 1182
},
{
"epoch": 3.020304568527919,
"grad_norm": 1.9103385210037231,
"learning_rate": 3.489847715736041e-05,
"loss": 0.461,
"step": 1190
},
{
"epoch": 3.045685279187817,
"grad_norm": 2.838453769683838,
"learning_rate": 3.477157360406091e-05,
"loss": 0.4661,
"step": 1200
},
{
"epoch": 3.0710659898477157,
"grad_norm": 2.706493616104126,
"learning_rate": 3.4644670050761426e-05,
"loss": 0.4335,
"step": 1210
},
{
"epoch": 3.0964467005076144,
"grad_norm": 4.179074287414551,
"learning_rate": 3.451776649746193e-05,
"loss": 0.3517,
"step": 1220
},
{
"epoch": 3.1218274111675126,
"grad_norm": 1.1146340370178223,
"learning_rate": 3.439086294416244e-05,
"loss": 0.2261,
"step": 1230
},
{
"epoch": 3.1472081218274113,
"grad_norm": 2.5486109256744385,
"learning_rate": 3.4263959390862944e-05,
"loss": 0.3664,
"step": 1240
},
{
"epoch": 3.1725888324873095,
"grad_norm": 2.886986017227173,
"learning_rate": 3.413705583756345e-05,
"loss": 0.3861,
"step": 1250
},
{
"epoch": 3.197969543147208,
"grad_norm": 3.5457489490509033,
"learning_rate": 3.401015228426396e-05,
"loss": 0.5043,
"step": 1260
},
{
"epoch": 3.223350253807107,
"grad_norm": 4.039999485015869,
"learning_rate": 3.388324873096447e-05,
"loss": 0.3362,
"step": 1270
},
{
"epoch": 3.248730964467005,
"grad_norm": 4.554147720336914,
"learning_rate": 3.3756345177664975e-05,
"loss": 0.4709,
"step": 1280
},
{
"epoch": 3.2741116751269037,
"grad_norm": 2.4214282035827637,
"learning_rate": 3.362944162436548e-05,
"loss": 0.3041,
"step": 1290
},
{
"epoch": 3.299492385786802,
"grad_norm": 4.587721824645996,
"learning_rate": 3.3502538071065994e-05,
"loss": 0.6426,
"step": 1300
},
{
"epoch": 3.3248730964467006,
"grad_norm": 2.0584123134613037,
"learning_rate": 3.33756345177665e-05,
"loss": 0.3861,
"step": 1310
},
{
"epoch": 3.350253807106599,
"grad_norm": 2.152904510498047,
"learning_rate": 3.3248730964467006e-05,
"loss": 0.434,
"step": 1320
},
{
"epoch": 3.3756345177664975,
"grad_norm": 3.3086836338043213,
"learning_rate": 3.312182741116752e-05,
"loss": 0.406,
"step": 1330
},
{
"epoch": 3.401015228426396,
"grad_norm": 2.4695167541503906,
"learning_rate": 3.299492385786802e-05,
"loss": 0.4226,
"step": 1340
},
{
"epoch": 3.4263959390862944,
"grad_norm": 2.482093334197998,
"learning_rate": 3.286802030456853e-05,
"loss": 0.335,
"step": 1350
},
{
"epoch": 3.451776649746193,
"grad_norm": 6.9000959396362305,
"learning_rate": 3.2741116751269036e-05,
"loss": 0.3506,
"step": 1360
},
{
"epoch": 3.4771573604060912,
"grad_norm": 2.6196579933166504,
"learning_rate": 3.261421319796954e-05,
"loss": 0.3377,
"step": 1370
},
{
"epoch": 3.50253807106599,
"grad_norm": 1.8537606000900269,
"learning_rate": 3.248730964467005e-05,
"loss": 0.399,
"step": 1380
},
{
"epoch": 3.527918781725888,
"grad_norm": 7.022123336791992,
"learning_rate": 3.236040609137056e-05,
"loss": 0.4255,
"step": 1390
},
{
"epoch": 3.553299492385787,
"grad_norm": 3.112264394760132,
"learning_rate": 3.223350253807107e-05,
"loss": 0.399,
"step": 1400
},
{
"epoch": 3.5786802030456855,
"grad_norm": 4.884408473968506,
"learning_rate": 3.210659898477157e-05,
"loss": 0.3533,
"step": 1410
},
{
"epoch": 3.6040609137055837,
"grad_norm": 3.7339515686035156,
"learning_rate": 3.1979695431472086e-05,
"loss": 0.4329,
"step": 1420
},
{
"epoch": 3.6294416243654823,
"grad_norm": 11.797500610351562,
"learning_rate": 3.185279187817259e-05,
"loss": 0.4496,
"step": 1430
},
{
"epoch": 3.6548223350253806,
"grad_norm": 2.509971857070923,
"learning_rate": 3.17258883248731e-05,
"loss": 0.3758,
"step": 1440
},
{
"epoch": 3.6802030456852792,
"grad_norm": 2.554452419281006,
"learning_rate": 3.1598984771573604e-05,
"loss": 0.2789,
"step": 1450
},
{
"epoch": 3.7055837563451774,
"grad_norm": 13.046540260314941,
"learning_rate": 3.147208121827411e-05,
"loss": 0.52,
"step": 1460
},
{
"epoch": 3.730964467005076,
"grad_norm": 6.821984767913818,
"learning_rate": 3.134517766497462e-05,
"loss": 0.4285,
"step": 1470
},
{
"epoch": 3.7563451776649748,
"grad_norm": 2.692779064178467,
"learning_rate": 3.121827411167513e-05,
"loss": 0.2977,
"step": 1480
},
{
"epoch": 3.781725888324873,
"grad_norm": 3.510523796081543,
"learning_rate": 3.1091370558375635e-05,
"loss": 0.4887,
"step": 1490
},
{
"epoch": 3.8071065989847717,
"grad_norm": 4.374626159667969,
"learning_rate": 3.096446700507614e-05,
"loss": 0.2093,
"step": 1500
},
{
"epoch": 3.8324873096446703,
"grad_norm": 0.6560042500495911,
"learning_rate": 3.0837563451776654e-05,
"loss": 0.613,
"step": 1510
},
{
"epoch": 3.8578680203045685,
"grad_norm": 4.9201483726501465,
"learning_rate": 3.071065989847716e-05,
"loss": 0.4483,
"step": 1520
},
{
"epoch": 3.8832487309644668,
"grad_norm": 2.497208833694458,
"learning_rate": 3.0583756345177666e-05,
"loss": 0.4434,
"step": 1530
},
{
"epoch": 3.9086294416243654,
"grad_norm": 5.912675380706787,
"learning_rate": 3.0456852791878175e-05,
"loss": 0.4259,
"step": 1540
},
{
"epoch": 3.934010152284264,
"grad_norm": 1.8950650691986084,
"learning_rate": 3.032994923857868e-05,
"loss": 0.4649,
"step": 1550
},
{
"epoch": 3.9593908629441623,
"grad_norm": 15.265602111816406,
"learning_rate": 3.020304568527919e-05,
"loss": 0.4176,
"step": 1560
},
{
"epoch": 3.984771573604061,
"grad_norm": 6.062721252441406,
"learning_rate": 3.0076142131979696e-05,
"loss": 0.3854,
"step": 1570
},
{
"epoch": 4.0,
"eval_loss": 0.5223032832145691,
"eval_runtime": 8.7536,
"eval_samples_per_second": 90.02,
"eval_steps_per_second": 11.31,
"step": 1576
},
{
"epoch": 4.01015228426396,
"grad_norm": 4.975412368774414,
"learning_rate": 2.9949238578680206e-05,
"loss": 0.4798,
"step": 1580
},
{
"epoch": 4.035532994923858,
"grad_norm": 5.022281646728516,
"learning_rate": 2.982233502538071e-05,
"loss": 0.3322,
"step": 1590
},
{
"epoch": 4.060913705583756,
"grad_norm": 5.229846000671387,
"learning_rate": 2.969543147208122e-05,
"loss": 0.2691,
"step": 1600
},
{
"epoch": 4.086294416243655,
"grad_norm": 21.07980728149414,
"learning_rate": 2.956852791878173e-05,
"loss": 0.285,
"step": 1610
},
{
"epoch": 4.111675126903553,
"grad_norm": 28.23366355895996,
"learning_rate": 2.9441624365482233e-05,
"loss": 0.1946,
"step": 1620
},
{
"epoch": 4.137055837563452,
"grad_norm": 0.49203088879585266,
"learning_rate": 2.9314720812182743e-05,
"loss": 0.2176,
"step": 1630
},
{
"epoch": 4.16243654822335,
"grad_norm": 6.196091175079346,
"learning_rate": 2.918781725888325e-05,
"loss": 0.1592,
"step": 1640
},
{
"epoch": 4.187817258883249,
"grad_norm": 0.43765848875045776,
"learning_rate": 2.9060913705583758e-05,
"loss": 0.3764,
"step": 1650
},
{
"epoch": 4.213197969543147,
"grad_norm": 4.543421745300293,
"learning_rate": 2.8934010152284264e-05,
"loss": 0.3371,
"step": 1660
},
{
"epoch": 4.238578680203045,
"grad_norm": 2.611579418182373,
"learning_rate": 2.8807106598984774e-05,
"loss": 0.3052,
"step": 1670
},
{
"epoch": 4.2639593908629445,
"grad_norm": 3.3673095703125,
"learning_rate": 2.8680203045685283e-05,
"loss": 0.1776,
"step": 1680
},
{
"epoch": 4.289340101522843,
"grad_norm": 0.7387822270393372,
"learning_rate": 2.855329949238579e-05,
"loss": 0.3827,
"step": 1690
},
{
"epoch": 4.314720812182741,
"grad_norm": 16.637300491333008,
"learning_rate": 2.84263959390863e-05,
"loss": 0.3993,
"step": 1700
},
{
"epoch": 4.340101522842639,
"grad_norm": 6.28633975982666,
"learning_rate": 2.82994923857868e-05,
"loss": 0.3733,
"step": 1710
},
{
"epoch": 4.365482233502538,
"grad_norm": 3.3295745849609375,
"learning_rate": 2.8172588832487314e-05,
"loss": 0.1166,
"step": 1720
},
{
"epoch": 4.3908629441624365,
"grad_norm": 14.652837753295898,
"learning_rate": 2.8045685279187816e-05,
"loss": 0.3326,
"step": 1730
},
{
"epoch": 4.416243654822335,
"grad_norm": 31.232574462890625,
"learning_rate": 2.7918781725888326e-05,
"loss": 0.2127,
"step": 1740
},
{
"epoch": 4.441624365482234,
"grad_norm": 12.528191566467285,
"learning_rate": 2.7791878172588832e-05,
"loss": 0.4061,
"step": 1750
},
{
"epoch": 4.467005076142132,
"grad_norm": 1.7689141035079956,
"learning_rate": 2.766497461928934e-05,
"loss": 0.3366,
"step": 1760
},
{
"epoch": 4.49238578680203,
"grad_norm": 4.724018096923828,
"learning_rate": 2.753807106598985e-05,
"loss": 0.2359,
"step": 1770
},
{
"epoch": 4.517766497461929,
"grad_norm": 1.561919093132019,
"learning_rate": 2.7411167512690357e-05,
"loss": 0.3106,
"step": 1780
},
{
"epoch": 4.543147208121828,
"grad_norm": 5.754579067230225,
"learning_rate": 2.7284263959390866e-05,
"loss": 0.2982,
"step": 1790
},
{
"epoch": 4.568527918781726,
"grad_norm": 1.181515097618103,
"learning_rate": 2.715736040609137e-05,
"loss": 0.2384,
"step": 1800
},
{
"epoch": 4.593908629441624,
"grad_norm": 7.012800693511963,
"learning_rate": 2.703045685279188e-05,
"loss": 0.394,
"step": 1810
},
{
"epoch": 4.619289340101523,
"grad_norm": 0.49557140469551086,
"learning_rate": 2.6903553299492384e-05,
"loss": 0.2869,
"step": 1820
},
{
"epoch": 4.644670050761421,
"grad_norm": 3.6110830307006836,
"learning_rate": 2.6776649746192893e-05,
"loss": 0.3724,
"step": 1830
},
{
"epoch": 4.67005076142132,
"grad_norm": 3.3351943492889404,
"learning_rate": 2.6649746192893406e-05,
"loss": 0.2966,
"step": 1840
},
{
"epoch": 4.695431472081218,
"grad_norm": 1.342678427696228,
"learning_rate": 2.652284263959391e-05,
"loss": 0.3826,
"step": 1850
},
{
"epoch": 4.720812182741117,
"grad_norm": 0.4344848096370697,
"learning_rate": 2.6395939086294418e-05,
"loss": 0.1962,
"step": 1860
},
{
"epoch": 4.746192893401015,
"grad_norm": 56.233985900878906,
"learning_rate": 2.6269035532994924e-05,
"loss": 0.3758,
"step": 1870
},
{
"epoch": 4.771573604060913,
"grad_norm": 10.647058486938477,
"learning_rate": 2.6142131979695434e-05,
"loss": 0.356,
"step": 1880
},
{
"epoch": 4.7969543147208125,
"grad_norm": 0.9422564506530762,
"learning_rate": 2.6015228426395936e-05,
"loss": 0.2271,
"step": 1890
},
{
"epoch": 4.822335025380711,
"grad_norm": 0.22354674339294434,
"learning_rate": 2.588832487309645e-05,
"loss": 0.1374,
"step": 1900
},
{
"epoch": 4.847715736040609,
"grad_norm": 8.779372215270996,
"learning_rate": 2.576142131979696e-05,
"loss": 0.329,
"step": 1910
},
{
"epoch": 4.873096446700508,
"grad_norm": 41.76162338256836,
"learning_rate": 2.563451776649746e-05,
"loss": 0.5914,
"step": 1920
},
{
"epoch": 4.898477157360406,
"grad_norm": 26.878578186035156,
"learning_rate": 2.5507614213197974e-05,
"loss": 0.5029,
"step": 1930
},
{
"epoch": 4.9238578680203045,
"grad_norm": 0.41534122824668884,
"learning_rate": 2.5380710659898476e-05,
"loss": 0.1997,
"step": 1940
},
{
"epoch": 4.949238578680203,
"grad_norm": 4.708491325378418,
"learning_rate": 2.5253807106598986e-05,
"loss": 0.3183,
"step": 1950
},
{
"epoch": 4.974619289340102,
"grad_norm": 0.31694722175598145,
"learning_rate": 2.5126903553299492e-05,
"loss": 0.3627,
"step": 1960
},
{
"epoch": 5.0,
"grad_norm": 2.7903828620910645,
"learning_rate": 2.5e-05,
"loss": 0.454,
"step": 1970
},
{
"epoch": 5.0,
"eval_loss": 0.6968420743942261,
"eval_runtime": 8.7659,
"eval_samples_per_second": 89.894,
"eval_steps_per_second": 11.294,
"step": 1970
},
{
"epoch": 5.025380710659898,
"grad_norm": 0.33573099970817566,
"learning_rate": 2.4873096446700507e-05,
"loss": 0.0922,
"step": 1980
},
{
"epoch": 5.050761421319797,
"grad_norm": 0.38098305463790894,
"learning_rate": 2.4746192893401017e-05,
"loss": 0.1296,
"step": 1990
},
{
"epoch": 5.0761421319796955,
"grad_norm": 6.538486480712891,
"learning_rate": 2.4619289340101523e-05,
"loss": 0.2388,
"step": 2000
},
{
"epoch": 5.101522842639594,
"grad_norm": 0.3389257788658142,
"learning_rate": 2.4492385786802032e-05,
"loss": 0.1525,
"step": 2010
},
{
"epoch": 5.126903553299492,
"grad_norm": 29.134950637817383,
"learning_rate": 2.436548223350254e-05,
"loss": 0.2023,
"step": 2020
},
{
"epoch": 5.152284263959391,
"grad_norm": 2.5168628692626953,
"learning_rate": 2.4238578680203047e-05,
"loss": 0.1126,
"step": 2030
},
{
"epoch": 5.177664974619289,
"grad_norm": 0.4162174463272095,
"learning_rate": 2.4111675126903553e-05,
"loss": 0.1466,
"step": 2040
},
{
"epoch": 5.2030456852791875,
"grad_norm": 0.08261796087026596,
"learning_rate": 2.3984771573604063e-05,
"loss": 0.1032,
"step": 2050
},
{
"epoch": 5.228426395939087,
"grad_norm": 0.13249030709266663,
"learning_rate": 2.385786802030457e-05,
"loss": 0.0117,
"step": 2060
},
{
"epoch": 5.253807106598985,
"grad_norm": 54.72965621948242,
"learning_rate": 2.3730964467005075e-05,
"loss": 0.1383,
"step": 2070
},
{
"epoch": 5.279187817258883,
"grad_norm": 10.03510856628418,
"learning_rate": 2.3604060913705588e-05,
"loss": 0.3034,
"step": 2080
},
{
"epoch": 5.304568527918782,
"grad_norm": 3.790813446044922,
"learning_rate": 2.3477157360406094e-05,
"loss": 0.0487,
"step": 2090
},
{
"epoch": 5.32994923857868,
"grad_norm": 10.79232120513916,
"learning_rate": 2.33502538071066e-05,
"loss": 0.1897,
"step": 2100
},
{
"epoch": 5.355329949238579,
"grad_norm": 3.8837833404541016,
"learning_rate": 2.322335025380711e-05,
"loss": 0.5641,
"step": 2110
},
{
"epoch": 5.380710659898477,
"grad_norm": 0.5432278513908386,
"learning_rate": 2.3096446700507615e-05,
"loss": 0.3232,
"step": 2120
},
{
"epoch": 5.406091370558376,
"grad_norm": 2.8115179538726807,
"learning_rate": 2.296954314720812e-05,
"loss": 0.2467,
"step": 2130
},
{
"epoch": 5.431472081218274,
"grad_norm": 0.23173533380031586,
"learning_rate": 2.284263959390863e-05,
"loss": 0.0366,
"step": 2140
},
{
"epoch": 5.456852791878172,
"grad_norm": 0.22747744619846344,
"learning_rate": 2.2715736040609136e-05,
"loss": 0.2833,
"step": 2150
},
{
"epoch": 5.482233502538071,
"grad_norm": 0.18748463690280914,
"learning_rate": 2.2588832487309646e-05,
"loss": 0.2338,
"step": 2160
},
{
"epoch": 5.50761421319797,
"grad_norm": 0.2156940996646881,
"learning_rate": 2.2461928934010155e-05,
"loss": 0.0311,
"step": 2170
},
{
"epoch": 5.532994923857868,
"grad_norm": 26.930912017822266,
"learning_rate": 2.233502538071066e-05,
"loss": 0.2466,
"step": 2180
},
{
"epoch": 5.558375634517766,
"grad_norm": 23.53707504272461,
"learning_rate": 2.2208121827411167e-05,
"loss": 0.2277,
"step": 2190
},
{
"epoch": 5.583756345177665,
"grad_norm": 6.032989025115967,
"learning_rate": 2.2081218274111677e-05,
"loss": 0.1784,
"step": 2200
},
{
"epoch": 5.6091370558375635,
"grad_norm": 15.71194076538086,
"learning_rate": 2.1954314720812183e-05,
"loss": 0.2382,
"step": 2210
},
{
"epoch": 5.634517766497462,
"grad_norm": 6.757187366485596,
"learning_rate": 2.182741116751269e-05,
"loss": 0.2233,
"step": 2220
},
{
"epoch": 5.659898477157361,
"grad_norm": 6.697112560272217,
"learning_rate": 2.17005076142132e-05,
"loss": 0.2094,
"step": 2230
},
{
"epoch": 5.685279187817259,
"grad_norm": 12.990015983581543,
"learning_rate": 2.1573604060913707e-05,
"loss": 0.167,
"step": 2240
},
{
"epoch": 5.710659898477157,
"grad_norm": 0.234835684299469,
"learning_rate": 2.1446700507614213e-05,
"loss": 0.1446,
"step": 2250
},
{
"epoch": 5.7360406091370555,
"grad_norm": 6.382123947143555,
"learning_rate": 2.1319796954314723e-05,
"loss": 0.2736,
"step": 2260
},
{
"epoch": 5.761421319796955,
"grad_norm": 0.26427528262138367,
"learning_rate": 2.119289340101523e-05,
"loss": 0.1443,
"step": 2270
},
{
"epoch": 5.786802030456853,
"grad_norm": 0.2004345953464508,
"learning_rate": 2.1065989847715735e-05,
"loss": 0.1685,
"step": 2280
},
{
"epoch": 5.812182741116751,
"grad_norm": 0.21756359934806824,
"learning_rate": 2.0939086294416244e-05,
"loss": 0.385,
"step": 2290
},
{
"epoch": 5.837563451776649,
"grad_norm": 5.085987567901611,
"learning_rate": 2.0812182741116754e-05,
"loss": 0.233,
"step": 2300
},
{
"epoch": 5.862944162436548,
"grad_norm": 0.2375006377696991,
"learning_rate": 2.068527918781726e-05,
"loss": 0.2106,
"step": 2310
},
{
"epoch": 5.888324873096447,
"grad_norm": 11.780427932739258,
"learning_rate": 2.055837563451777e-05,
"loss": 0.2142,
"step": 2320
},
{
"epoch": 5.913705583756345,
"grad_norm": 1.2173970937728882,
"learning_rate": 2.0431472081218275e-05,
"loss": 0.0754,
"step": 2330
},
{
"epoch": 5.939086294416244,
"grad_norm": 6.028761386871338,
"learning_rate": 2.030456852791878e-05,
"loss": 0.4157,
"step": 2340
},
{
"epoch": 5.964467005076142,
"grad_norm": 19.581605911254883,
"learning_rate": 2.017766497461929e-05,
"loss": 0.1866,
"step": 2350
},
{
"epoch": 5.98984771573604,
"grad_norm": 0.22685207426548004,
"learning_rate": 2.0050761421319797e-05,
"loss": 0.2823,
"step": 2360
},
{
"epoch": 6.0,
"eval_loss": 0.9028552770614624,
"eval_runtime": 8.7725,
"eval_samples_per_second": 89.826,
"eval_steps_per_second": 11.285,
"step": 2364
},
{
"epoch": 6.0152284263959395,
"grad_norm": 0.280078649520874,
"learning_rate": 1.9923857868020303e-05,
"loss": 0.1616,
"step": 2370
},
{
"epoch": 6.040609137055838,
"grad_norm": 0.17733518779277802,
"learning_rate": 1.9796954314720815e-05,
"loss": 0.0129,
"step": 2380
},
{
"epoch": 6.065989847715736,
"grad_norm": 4.652949333190918,
"learning_rate": 1.967005076142132e-05,
"loss": 0.2607,
"step": 2390
},
{
"epoch": 6.091370558375634,
"grad_norm": 17.740243911743164,
"learning_rate": 1.9543147208121827e-05,
"loss": 0.2297,
"step": 2400
},
{
"epoch": 6.116751269035533,
"grad_norm": 59.82321548461914,
"learning_rate": 1.9416243654822337e-05,
"loss": 0.2063,
"step": 2410
},
{
"epoch": 6.1421319796954315,
"grad_norm": 0.1367008090019226,
"learning_rate": 1.9289340101522843e-05,
"loss": 0.1211,
"step": 2420
},
{
"epoch": 6.16751269035533,
"grad_norm": 0.14475467801094055,
"learning_rate": 1.916243654822335e-05,
"loss": 0.0078,
"step": 2430
},
{
"epoch": 6.192893401015229,
"grad_norm": 14.927183151245117,
"learning_rate": 1.9035532994923858e-05,
"loss": 0.1697,
"step": 2440
},
{
"epoch": 6.218274111675127,
"grad_norm": 0.11621695011854172,
"learning_rate": 1.8908629441624368e-05,
"loss": 0.0255,
"step": 2450
},
{
"epoch": 6.243654822335025,
"grad_norm": 0.12786021828651428,
"learning_rate": 1.8781725888324874e-05,
"loss": 0.008,
"step": 2460
},
{
"epoch": 6.269035532994923,
"grad_norm": 0.07750383019447327,
"learning_rate": 1.8654822335025383e-05,
"loss": 0.21,
"step": 2470
},
{
"epoch": 6.2944162436548226,
"grad_norm": 0.11915043741464615,
"learning_rate": 1.852791878172589e-05,
"loss": 0.0661,
"step": 2480
},
{
"epoch": 6.319796954314721,
"grad_norm": 0.09122921526432037,
"learning_rate": 1.8401015228426395e-05,
"loss": 0.0079,
"step": 2490
},
{
"epoch": 6.345177664974619,
"grad_norm": 0.08648554980754852,
"learning_rate": 1.8274111675126904e-05,
"loss": 0.121,
"step": 2500
},
{
"epoch": 6.370558375634518,
"grad_norm": 0.13753150403499603,
"learning_rate": 1.814720812182741e-05,
"loss": 0.0717,
"step": 2510
},
{
"epoch": 6.395939086294416,
"grad_norm": 20.555654525756836,
"learning_rate": 1.802030456852792e-05,
"loss": 0.0759,
"step": 2520
},
{
"epoch": 6.4213197969543145,
"grad_norm": 0.07365628331899643,
"learning_rate": 1.789340101522843e-05,
"loss": 0.0801,
"step": 2530
},
{
"epoch": 6.446700507614214,
"grad_norm": 0.05513671413064003,
"learning_rate": 1.7766497461928935e-05,
"loss": 0.0839,
"step": 2540
},
{
"epoch": 6.472081218274112,
"grad_norm": 1.4962226152420044,
"learning_rate": 1.763959390862944e-05,
"loss": 0.1288,
"step": 2550
},
{
"epoch": 6.49746192893401,
"grad_norm": 10.59736442565918,
"learning_rate": 1.751269035532995e-05,
"loss": 0.1318,
"step": 2560
},
{
"epoch": 6.522842639593908,
"grad_norm": 0.06739044189453125,
"learning_rate": 1.7385786802030457e-05,
"loss": 0.1288,
"step": 2570
},
{
"epoch": 6.548223350253807,
"grad_norm": 3.2351067066192627,
"learning_rate": 1.7258883248730966e-05,
"loss": 0.0801,
"step": 2580
},
{
"epoch": 6.573604060913706,
"grad_norm": 0.10322804003953934,
"learning_rate": 1.7131979695431472e-05,
"loss": 0.0687,
"step": 2590
},
{
"epoch": 6.598984771573604,
"grad_norm": 1.834286093711853,
"learning_rate": 1.700507614213198e-05,
"loss": 0.1842,
"step": 2600
},
{
"epoch": 6.624365482233502,
"grad_norm": 0.10404256731271744,
"learning_rate": 1.6878172588832487e-05,
"loss": 0.2148,
"step": 2610
},
{
"epoch": 6.649746192893401,
"grad_norm": 0.11388051509857178,
"learning_rate": 1.6751269035532997e-05,
"loss": 0.2577,
"step": 2620
},
{
"epoch": 6.675126903553299,
"grad_norm": 0.1220388114452362,
"learning_rate": 1.6624365482233503e-05,
"loss": 0.0425,
"step": 2630
},
{
"epoch": 6.700507614213198,
"grad_norm": 18.549720764160156,
"learning_rate": 1.649746192893401e-05,
"loss": 0.2702,
"step": 2640
},
{
"epoch": 6.725888324873097,
"grad_norm": 0.12684045732021332,
"learning_rate": 1.6370558375634518e-05,
"loss": 0.334,
"step": 2650
},
{
"epoch": 6.751269035532995,
"grad_norm": 0.27108675241470337,
"learning_rate": 1.6243654822335024e-05,
"loss": 0.1592,
"step": 2660
},
{
"epoch": 6.776649746192893,
"grad_norm": 0.17042884230613708,
"learning_rate": 1.6116751269035534e-05,
"loss": 0.0726,
"step": 2670
},
{
"epoch": 6.802030456852792,
"grad_norm": 0.11051168292760849,
"learning_rate": 1.5989847715736043e-05,
"loss": 0.0702,
"step": 2680
},
{
"epoch": 6.8274111675126905,
"grad_norm": 0.11623559892177582,
"learning_rate": 1.586294416243655e-05,
"loss": 0.1979,
"step": 2690
},
{
"epoch": 6.852791878172589,
"grad_norm": 0.16744251549243927,
"learning_rate": 1.5736040609137055e-05,
"loss": 0.1624,
"step": 2700
},
{
"epoch": 6.878172588832487,
"grad_norm": 62.56240463256836,
"learning_rate": 1.5609137055837564e-05,
"loss": 0.1029,
"step": 2710
},
{
"epoch": 6.903553299492386,
"grad_norm": 0.11622487008571625,
"learning_rate": 1.548223350253807e-05,
"loss": 0.0093,
"step": 2720
},
{
"epoch": 6.928934010152284,
"grad_norm": 0.09292230755090714,
"learning_rate": 1.535532994923858e-05,
"loss": 0.1308,
"step": 2730
},
{
"epoch": 6.9543147208121825,
"grad_norm": 0.20956498384475708,
"learning_rate": 1.5228426395939088e-05,
"loss": 0.0052,
"step": 2740
},
{
"epoch": 6.979695431472082,
"grad_norm": 44.860416412353516,
"learning_rate": 1.5101522842639595e-05,
"loss": 0.2451,
"step": 2750
},
{
"epoch": 7.0,
"eval_loss": 1.0761854648590088,
"eval_runtime": 8.8065,
"eval_samples_per_second": 89.479,
"eval_steps_per_second": 11.242,
"step": 2758
},
{
"epoch": 7.00507614213198,
"grad_norm": 0.09350736439228058,
"learning_rate": 1.4974619289340103e-05,
"loss": 0.0638,
"step": 2760
},
{
"epoch": 7.030456852791878,
"grad_norm": 3.844224214553833,
"learning_rate": 1.484771573604061e-05,
"loss": 0.0735,
"step": 2770
},
{
"epoch": 7.055837563451776,
"grad_norm": 0.09096001088619232,
"learning_rate": 1.4720812182741117e-05,
"loss": 0.0736,
"step": 2780
},
{
"epoch": 7.081218274111675,
"grad_norm": 0.06552541255950928,
"learning_rate": 1.4593908629441624e-05,
"loss": 0.0525,
"step": 2790
},
{
"epoch": 7.106598984771574,
"grad_norm": 0.10838861763477325,
"learning_rate": 1.4467005076142132e-05,
"loss": 0.4002,
"step": 2800
},
{
"epoch": 7.131979695431472,
"grad_norm": 0.12170158326625824,
"learning_rate": 1.4340101522842641e-05,
"loss": 0.1041,
"step": 2810
},
{
"epoch": 7.157360406091371,
"grad_norm": 0.15080027282238007,
"learning_rate": 1.421319796954315e-05,
"loss": 0.085,
"step": 2820
},
{
"epoch": 7.182741116751269,
"grad_norm": 0.13942450284957886,
"learning_rate": 1.4086294416243657e-05,
"loss": 0.007,
"step": 2830
},
{
"epoch": 7.208121827411167,
"grad_norm": 0.09515988826751709,
"learning_rate": 1.3959390862944163e-05,
"loss": 0.1357,
"step": 2840
},
{
"epoch": 7.233502538071066,
"grad_norm": 0.09349621832370758,
"learning_rate": 1.383248730964467e-05,
"loss": 0.0644,
"step": 2850
},
{
"epoch": 7.258883248730965,
"grad_norm": 0.25502246618270874,
"learning_rate": 1.3705583756345178e-05,
"loss": 0.0059,
"step": 2860
},
{
"epoch": 7.284263959390863,
"grad_norm": 5.170173168182373,
"learning_rate": 1.3578680203045684e-05,
"loss": 0.072,
"step": 2870
},
{
"epoch": 7.309644670050761,
"grad_norm": 0.09840302914381027,
"learning_rate": 1.3451776649746192e-05,
"loss": 0.1308,
"step": 2880
},
{
"epoch": 7.33502538071066,
"grad_norm": 0.07067205011844635,
"learning_rate": 1.3324873096446703e-05,
"loss": 0.1064,
"step": 2890
},
{
"epoch": 7.3604060913705585,
"grad_norm": 2.9675955772399902,
"learning_rate": 1.3197969543147209e-05,
"loss": 0.1342,
"step": 2900
},
{
"epoch": 7.385786802030457,
"grad_norm": 0.07539036124944687,
"learning_rate": 1.3071065989847717e-05,
"loss": 0.0642,
"step": 2910
},
{
"epoch": 7.411167512690355,
"grad_norm": 0.19838696718215942,
"learning_rate": 1.2944162436548224e-05,
"loss": 0.2019,
"step": 2920
},
{
"epoch": 7.436548223350254,
"grad_norm": 0.08878432959318161,
"learning_rate": 1.281725888324873e-05,
"loss": 0.104,
"step": 2930
},
{
"epoch": 7.461928934010152,
"grad_norm": 0.09144201874732971,
"learning_rate": 1.2690355329949238e-05,
"loss": 0.0046,
"step": 2940
},
{
"epoch": 7.4873096446700504,
"grad_norm": 0.0494062639772892,
"learning_rate": 1.2563451776649746e-05,
"loss": 0.0705,
"step": 2950
},
{
"epoch": 7.5126903553299496,
"grad_norm": 0.1255367249250412,
"learning_rate": 1.2436548223350254e-05,
"loss": 0.0382,
"step": 2960
},
{
"epoch": 7.538071065989848,
"grad_norm": 0.06985324621200562,
"learning_rate": 1.2309644670050761e-05,
"loss": 0.0863,
"step": 2970
},
{
"epoch": 7.563451776649746,
"grad_norm": 0.08622120320796967,
"learning_rate": 1.218274111675127e-05,
"loss": 0.0729,
"step": 2980
},
{
"epoch": 7.588832487309645,
"grad_norm": 0.07268553227186203,
"learning_rate": 1.2055837563451777e-05,
"loss": 0.1981,
"step": 2990
},
{
"epoch": 7.614213197969543,
"grad_norm": 5.232954025268555,
"learning_rate": 1.1928934010152284e-05,
"loss": 0.0885,
"step": 3000
},
{
"epoch": 7.6395939086294415,
"grad_norm": 0.07025603950023651,
"learning_rate": 1.1802030456852794e-05,
"loss": 0.1437,
"step": 3010
},
{
"epoch": 7.66497461928934,
"grad_norm": 3.0778110027313232,
"learning_rate": 1.16751269035533e-05,
"loss": 0.2441,
"step": 3020
},
{
"epoch": 7.690355329949239,
"grad_norm": 0.1191609650850296,
"learning_rate": 1.1548223350253808e-05,
"loss": 0.0717,
"step": 3030
},
{
"epoch": 7.715736040609137,
"grad_norm": 0.07436536252498627,
"learning_rate": 1.1421319796954315e-05,
"loss": 0.0636,
"step": 3040
},
{
"epoch": 7.741116751269035,
"grad_norm": 0.08062940835952759,
"learning_rate": 1.1294416243654823e-05,
"loss": 0.0918,
"step": 3050
},
{
"epoch": 7.7664974619289335,
"grad_norm": 0.06816007196903229,
"learning_rate": 1.116751269035533e-05,
"loss": 0.1376,
"step": 3060
},
{
"epoch": 7.791878172588833,
"grad_norm": 2.068460464477539,
"learning_rate": 1.1040609137055838e-05,
"loss": 0.0642,
"step": 3070
},
{
"epoch": 7.817258883248731,
"grad_norm": 0.1358051747083664,
"learning_rate": 1.0913705583756344e-05,
"loss": 0.1455,
"step": 3080
},
{
"epoch": 7.842639593908629,
"grad_norm": 0.07467884570360184,
"learning_rate": 1.0786802030456854e-05,
"loss": 0.0041,
"step": 3090
},
{
"epoch": 7.868020304568528,
"grad_norm": 0.08706346899271011,
"learning_rate": 1.0659898477157361e-05,
"loss": 0.0838,
"step": 3100
},
{
"epoch": 7.893401015228426,
"grad_norm": 0.07726430892944336,
"learning_rate": 1.0532994923857867e-05,
"loss": 0.0689,
"step": 3110
},
{
"epoch": 7.918781725888325,
"grad_norm": 0.09187603741884232,
"learning_rate": 1.0406091370558377e-05,
"loss": 0.0785,
"step": 3120
},
{
"epoch": 7.944162436548224,
"grad_norm": 0.1465052366256714,
"learning_rate": 1.0279187817258885e-05,
"loss": 0.0778,
"step": 3130
},
{
"epoch": 7.969543147208122,
"grad_norm": 0.06538953632116318,
"learning_rate": 1.015228426395939e-05,
"loss": 0.0577,
"step": 3140
},
{
"epoch": 7.99492385786802,
"grad_norm": 0.17358700931072235,
"learning_rate": 1.0025380710659898e-05,
"loss": 0.0041,
"step": 3150
},
{
"epoch": 8.0,
"eval_loss": 1.1998591423034668,
"eval_runtime": 8.6883,
"eval_samples_per_second": 90.697,
"eval_steps_per_second": 11.395,
"step": 3152
},
{
"epoch": 8.02030456852792,
"grad_norm": 0.05773229897022247,
"learning_rate": 9.898477157360408e-06,
"loss": 0.0856,
"step": 3160
},
{
"epoch": 8.045685279187817,
"grad_norm": 0.07206518203020096,
"learning_rate": 9.771573604060914e-06,
"loss": 0.1201,
"step": 3170
},
{
"epoch": 8.071065989847716,
"grad_norm": 0.07798884809017181,
"learning_rate": 9.644670050761421e-06,
"loss": 0.069,
"step": 3180
},
{
"epoch": 8.096446700507615,
"grad_norm": 0.07664274424314499,
"learning_rate": 9.517766497461929e-06,
"loss": 0.0036,
"step": 3190
},
{
"epoch": 8.121827411167512,
"grad_norm": 0.050856612622737885,
"learning_rate": 9.390862944162437e-06,
"loss": 0.003,
"step": 3200
},
{
"epoch": 8.147208121827411,
"grad_norm": 0.0529194138944149,
"learning_rate": 9.263959390862944e-06,
"loss": 0.1412,
"step": 3210
},
{
"epoch": 8.17258883248731,
"grad_norm": 13.00708293914795,
"learning_rate": 9.137055837563452e-06,
"loss": 0.0513,
"step": 3220
},
{
"epoch": 8.197969543147208,
"grad_norm": 0.06696105003356934,
"learning_rate": 9.01015228426396e-06,
"loss": 0.0704,
"step": 3230
},
{
"epoch": 8.223350253807107,
"grad_norm": 0.06790643185377121,
"learning_rate": 8.883248730964468e-06,
"loss": 0.1119,
"step": 3240
},
{
"epoch": 8.248730964467006,
"grad_norm": 0.06100468337535858,
"learning_rate": 8.756345177664975e-06,
"loss": 0.0032,
"step": 3250
},
{
"epoch": 8.274111675126903,
"grad_norm": 8.70279312133789,
"learning_rate": 8.629441624365483e-06,
"loss": 0.0811,
"step": 3260
},
{
"epoch": 8.299492385786802,
"grad_norm": 0.09535681456327438,
"learning_rate": 8.50253807106599e-06,
"loss": 0.0795,
"step": 3270
},
{
"epoch": 8.3248730964467,
"grad_norm": 0.07176022976636887,
"learning_rate": 8.375634517766498e-06,
"loss": 0.0893,
"step": 3280
},
{
"epoch": 8.350253807106599,
"grad_norm": 0.051213160157203674,
"learning_rate": 8.248730964467004e-06,
"loss": 0.0701,
"step": 3290
},
{
"epoch": 8.375634517766498,
"grad_norm": 0.09963097423315048,
"learning_rate": 8.121827411167512e-06,
"loss": 0.0745,
"step": 3300
},
{
"epoch": 8.401015228426395,
"grad_norm": 0.0691935271024704,
"learning_rate": 7.994923857868022e-06,
"loss": 0.0884,
"step": 3310
},
{
"epoch": 8.426395939086294,
"grad_norm": 0.16761644184589386,
"learning_rate": 7.868020304568528e-06,
"loss": 0.0043,
"step": 3320
},
{
"epoch": 8.451776649746193,
"grad_norm": 0.04886673018336296,
"learning_rate": 7.741116751269035e-06,
"loss": 0.0037,
"step": 3330
},
{
"epoch": 8.47715736040609,
"grad_norm": 0.06593126058578491,
"learning_rate": 7.614213197969544e-06,
"loss": 0.0029,
"step": 3340
},
{
"epoch": 8.50253807106599,
"grad_norm": 8.315119743347168,
"learning_rate": 7.4873096446700515e-06,
"loss": 0.1359,
"step": 3350
},
{
"epoch": 8.527918781725889,
"grad_norm": 0.19459432363510132,
"learning_rate": 7.360406091370558e-06,
"loss": 0.0771,
"step": 3360
},
{
"epoch": 8.553299492385786,
"grad_norm": 0.09421133995056152,
"learning_rate": 7.233502538071066e-06,
"loss": 0.003,
"step": 3370
},
{
"epoch": 8.578680203045685,
"grad_norm": 0.04559624567627907,
"learning_rate": 7.106598984771575e-06,
"loss": 0.0647,
"step": 3380
},
{
"epoch": 8.604060913705585,
"grad_norm": 3.285724639892578,
"learning_rate": 6.9796954314720814e-06,
"loss": 0.0744,
"step": 3390
},
{
"epoch": 8.629441624365482,
"grad_norm": 0.0715055987238884,
"learning_rate": 6.852791878172589e-06,
"loss": 0.0696,
"step": 3400
},
{
"epoch": 8.654822335025381,
"grad_norm": 0.05040270462632179,
"learning_rate": 6.725888324873096e-06,
"loss": 0.0028,
"step": 3410
},
{
"epoch": 8.680203045685278,
"grad_norm": 0.05061913654208183,
"learning_rate": 6.5989847715736045e-06,
"loss": 0.1397,
"step": 3420
},
{
"epoch": 8.705583756345177,
"grad_norm": 0.06146248057484627,
"learning_rate": 6.472081218274112e-06,
"loss": 0.0757,
"step": 3430
},
{
"epoch": 8.730964467005077,
"grad_norm": 0.0398724228143692,
"learning_rate": 6.345177664974619e-06,
"loss": 0.0888,
"step": 3440
},
{
"epoch": 8.756345177664974,
"grad_norm": 0.058130159974098206,
"learning_rate": 6.218274111675127e-06,
"loss": 0.0714,
"step": 3450
},
{
"epoch": 8.781725888324873,
"grad_norm": 0.06121835485100746,
"learning_rate": 6.091370558375635e-06,
"loss": 0.0329,
"step": 3460
},
{
"epoch": 8.807106598984772,
"grad_norm": 0.048920340836048126,
"learning_rate": 5.964467005076142e-06,
"loss": 0.0032,
"step": 3470
},
{
"epoch": 8.83248730964467,
"grad_norm": 0.05234299972653389,
"learning_rate": 5.83756345177665e-06,
"loss": 0.07,
"step": 3480
},
{
"epoch": 8.857868020304569,
"grad_norm": 0.04120004549622536,
"learning_rate": 5.710659898477158e-06,
"loss": 0.152,
"step": 3490
},
{
"epoch": 8.883248730964468,
"grad_norm": 0.0480622835457325,
"learning_rate": 5.583756345177665e-06,
"loss": 0.162,
"step": 3500
}
],
"logging_steps": 10,
"max_steps": 3940,
"num_input_tokens_seen": 0,
"num_train_epochs": 10,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 3709087162368000.0,
"train_batch_size": 8,
"trial_name": null,
"trial_params": null
}