|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.997953488372093, |
|
"eval_steps": 500, |
|
"global_step": 2013, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.014883720930232559, |
|
"grad_norm": 12.09994395919115, |
|
"learning_rate": 1.655629139072848e-07, |
|
"loss": 0.8894, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.029767441860465118, |
|
"grad_norm": 4.954666557139731, |
|
"learning_rate": 3.311258278145696e-07, |
|
"loss": 0.8403, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.044651162790697675, |
|
"grad_norm": 2.6897749196373066, |
|
"learning_rate": 4.966887417218544e-07, |
|
"loss": 0.7599, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.059534883720930236, |
|
"grad_norm": 1.6864897198632844, |
|
"learning_rate": 6.622516556291392e-07, |
|
"loss": 0.7065, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.07441860465116279, |
|
"grad_norm": 1.4408752436652603, |
|
"learning_rate": 8.27814569536424e-07, |
|
"loss": 0.6683, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.08930232558139535, |
|
"grad_norm": 1.531285186081538, |
|
"learning_rate": 9.933774834437087e-07, |
|
"loss": 0.6414, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.10418604651162791, |
|
"grad_norm": 1.3543454625606715, |
|
"learning_rate": 1.1589403973509934e-06, |
|
"loss": 0.6245, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.11906976744186047, |
|
"grad_norm": 1.6805043782242386, |
|
"learning_rate": 1.3245033112582784e-06, |
|
"loss": 0.6067, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.13395348837209303, |
|
"grad_norm": 1.622726415982876, |
|
"learning_rate": 1.490066225165563e-06, |
|
"loss": 0.6026, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.14883720930232558, |
|
"grad_norm": 1.5904822357469823, |
|
"learning_rate": 1.655629139072848e-06, |
|
"loss": 0.5879, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.16372093023255813, |
|
"grad_norm": 1.7535371189936502, |
|
"learning_rate": 1.8211920529801325e-06, |
|
"loss": 0.5868, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.1786046511627907, |
|
"grad_norm": 1.5177280883443078, |
|
"learning_rate": 1.9867549668874175e-06, |
|
"loss": 0.5737, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.19348837209302325, |
|
"grad_norm": 1.768921545248635, |
|
"learning_rate": 2.152317880794702e-06, |
|
"loss": 0.567, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.20837209302325582, |
|
"grad_norm": 2.312126734150201, |
|
"learning_rate": 2.317880794701987e-06, |
|
"loss": 0.5634, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.22325581395348837, |
|
"grad_norm": 2.28355122015822, |
|
"learning_rate": 2.4834437086092716e-06, |
|
"loss": 0.5624, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.23813953488372094, |
|
"grad_norm": 1.5684581849256145, |
|
"learning_rate": 2.6490066225165567e-06, |
|
"loss": 0.5577, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.25302325581395346, |
|
"grad_norm": 2.3117262356536763, |
|
"learning_rate": 2.814569536423841e-06, |
|
"loss": 0.5464, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.26790697674418606, |
|
"grad_norm": 1.5302515797600287, |
|
"learning_rate": 2.980132450331126e-06, |
|
"loss": 0.5475, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.2827906976744186, |
|
"grad_norm": 1.9345332085767333, |
|
"learning_rate": 3.145695364238411e-06, |
|
"loss": 0.5441, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.29767441860465116, |
|
"grad_norm": 2.9098962218672098, |
|
"learning_rate": 3.311258278145696e-06, |
|
"loss": 0.544, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.3125581395348837, |
|
"grad_norm": 2.6881740405161407, |
|
"learning_rate": 3.4768211920529803e-06, |
|
"loss": 0.5329, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.32744186046511625, |
|
"grad_norm": 2.3359559961757843, |
|
"learning_rate": 3.642384105960265e-06, |
|
"loss": 0.5314, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.34232558139534885, |
|
"grad_norm": 2.4955049636721216, |
|
"learning_rate": 3.80794701986755e-06, |
|
"loss": 0.529, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.3572093023255814, |
|
"grad_norm": 2.1711263753421632, |
|
"learning_rate": 3.973509933774835e-06, |
|
"loss": 0.5225, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.37209302325581395, |
|
"grad_norm": 2.694920018594133, |
|
"learning_rate": 4.139072847682119e-06, |
|
"loss": 0.5227, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.3869767441860465, |
|
"grad_norm": 3.0680068154611373, |
|
"learning_rate": 4.304635761589404e-06, |
|
"loss": 0.5238, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.4018604651162791, |
|
"grad_norm": 2.02557092483253, |
|
"learning_rate": 4.4701986754966895e-06, |
|
"loss": 0.517, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.41674418604651164, |
|
"grad_norm": 2.7310686859093916, |
|
"learning_rate": 4.635761589403974e-06, |
|
"loss": 0.5176, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.4316279069767442, |
|
"grad_norm": 2.0826169110471997, |
|
"learning_rate": 4.801324503311259e-06, |
|
"loss": 0.5184, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.44651162790697674, |
|
"grad_norm": 2.972052559490264, |
|
"learning_rate": 4.966887417218543e-06, |
|
"loss": 0.515, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.4613953488372093, |
|
"grad_norm": 2.612798976679404, |
|
"learning_rate": 4.999757269464535e-06, |
|
"loss": 0.5141, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.4762790697674419, |
|
"grad_norm": 2.5363200990684116, |
|
"learning_rate": 4.998771266421582e-06, |
|
"loss": 0.5087, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.49116279069767443, |
|
"grad_norm": 1.9683808505033924, |
|
"learning_rate": 4.997027152360594e-06, |
|
"loss": 0.5114, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.5060465116279069, |
|
"grad_norm": 2.154598782155924, |
|
"learning_rate": 4.994525515261374e-06, |
|
"loss": 0.509, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.5209302325581395, |
|
"grad_norm": 1.9161226824804498, |
|
"learning_rate": 4.991267198481659e-06, |
|
"loss": 0.5058, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.5358139534883721, |
|
"grad_norm": 2.2519599437510753, |
|
"learning_rate": 4.987253300472809e-06, |
|
"loss": 0.5038, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.5506976744186046, |
|
"grad_norm": 1.719862276545399, |
|
"learning_rate": 4.982485174409488e-06, |
|
"loss": 0.4994, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.5655813953488372, |
|
"grad_norm": 2.177940295574328, |
|
"learning_rate": 4.976964427733482e-06, |
|
"loss": 0.5016, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.5804651162790697, |
|
"grad_norm": 1.6346006458180546, |
|
"learning_rate": 4.970692921611799e-06, |
|
"loss": 0.4941, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.5953488372093023, |
|
"grad_norm": 1.7603844394703574, |
|
"learning_rate": 4.963672770309218e-06, |
|
"loss": 0.4994, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.6102325581395349, |
|
"grad_norm": 2.278283898845916, |
|
"learning_rate": 4.955906340475533e-06, |
|
"loss": 0.501, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.6251162790697674, |
|
"grad_norm": 1.5611721745461695, |
|
"learning_rate": 4.947396250347695e-06, |
|
"loss": 0.4963, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.9733224203680875, |
|
"learning_rate": 4.938145368867153e-06, |
|
"loss": 0.4939, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.6548837209302325, |
|
"grad_norm": 1.8779984579315334, |
|
"learning_rate": 4.928156814712661e-06, |
|
"loss": 0.4958, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.6697674418604651, |
|
"grad_norm": 1.9838008724267557, |
|
"learning_rate": 4.917433955248912e-06, |
|
"loss": 0.4882, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.6846511627906977, |
|
"grad_norm": 2.149542021168797, |
|
"learning_rate": 4.905980405391312e-06, |
|
"loss": 0.4934, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.6995348837209302, |
|
"grad_norm": 1.4353286450009755, |
|
"learning_rate": 4.8938000263873265e-06, |
|
"loss": 0.4872, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.7144186046511628, |
|
"grad_norm": 2.312227707270547, |
|
"learning_rate": 4.880896924514751e-06, |
|
"loss": 0.4846, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.7293023255813953, |
|
"grad_norm": 2.5246529283224337, |
|
"learning_rate": 4.8672754496974064e-06, |
|
"loss": 0.4847, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.7441860465116279, |
|
"grad_norm": 1.8274651189787874, |
|
"learning_rate": 4.852940194038676e-06, |
|
"loss": 0.4838, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.7590697674418605, |
|
"grad_norm": 2.8994680346266315, |
|
"learning_rate": 4.837895990273408e-06, |
|
"loss": 0.4875, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.773953488372093, |
|
"grad_norm": 1.8666092756187436, |
|
"learning_rate": 4.822147910138687e-06, |
|
"loss": 0.4873, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.7888372093023256, |
|
"grad_norm": 1.9992173556155106, |
|
"learning_rate": 4.8057012626640435e-06, |
|
"loss": 0.4741, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.8037209302325582, |
|
"grad_norm": 2.057376829586277, |
|
"learning_rate": 4.788561592381664e-06, |
|
"loss": 0.4814, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.8186046511627907, |
|
"grad_norm": 2.6795388861334515, |
|
"learning_rate": 4.770734677457199e-06, |
|
"loss": 0.4775, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.8334883720930233, |
|
"grad_norm": 2.7439384401468776, |
|
"learning_rate": 4.752226527741813e-06, |
|
"loss": 0.4789, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.8483720930232558, |
|
"grad_norm": 1.8800574954738543, |
|
"learning_rate": 4.733043382746139e-06, |
|
"loss": 0.4764, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.8632558139534884, |
|
"grad_norm": 1.9323414743910012, |
|
"learning_rate": 4.713191709536792e-06, |
|
"loss": 0.4712, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.878139534883721, |
|
"grad_norm": 2.3312096463084, |
|
"learning_rate": 4.692678200556178e-06, |
|
"loss": 0.4735, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.8930232558139535, |
|
"grad_norm": 2.159350507916537, |
|
"learning_rate": 4.671509771366324e-06, |
|
"loss": 0.4746, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.9079069767441861, |
|
"grad_norm": 1.7675503743690197, |
|
"learning_rate": 4.649693558317485e-06, |
|
"loss": 0.4698, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.9227906976744186, |
|
"grad_norm": 2.2459263091128685, |
|
"learning_rate": 4.627236916142326e-06, |
|
"loss": 0.4678, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.9376744186046512, |
|
"grad_norm": 2.005145073353852, |
|
"learning_rate": 4.604147415476471e-06, |
|
"loss": 0.4716, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.9525581395348838, |
|
"grad_norm": 2.5633576833945257, |
|
"learning_rate": 4.5804328403062735e-06, |
|
"loss": 0.4712, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.9674418604651163, |
|
"grad_norm": 2.1030324239259937, |
|
"learning_rate": 4.5561011853446686e-06, |
|
"loss": 0.469, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.9823255813953489, |
|
"grad_norm": 2.5648420297297814, |
|
"learning_rate": 4.5311606533359665e-06, |
|
"loss": 0.4661, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.9972093023255814, |
|
"grad_norm": 1.6917829294317552, |
|
"learning_rate": 4.50561965229053e-06, |
|
"loss": 0.4722, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.9986976744186047, |
|
"eval_loss": 0.058389123529195786, |
|
"eval_runtime": 468.1425, |
|
"eval_samples_per_second": 38.67, |
|
"eval_steps_per_second": 0.605, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.0130232558139536, |
|
"grad_norm": 2.2119223606388143, |
|
"learning_rate": 4.479486792650245e-06, |
|
"loss": 0.4052, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.027906976744186, |
|
"grad_norm": 1.7284245139555596, |
|
"learning_rate": 4.452770884385747e-06, |
|
"loss": 0.3906, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.0427906976744186, |
|
"grad_norm": 1.9008131985343542, |
|
"learning_rate": 4.4254809340263875e-06, |
|
"loss": 0.393, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.0576744186046512, |
|
"grad_norm": 1.687438841502772, |
|
"learning_rate": 4.397626141623928e-06, |
|
"loss": 0.3946, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.0725581395348838, |
|
"grad_norm": 1.531354838860777, |
|
"learning_rate": 4.369215897650995e-06, |
|
"loss": 0.3944, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.0874418604651164, |
|
"grad_norm": 1.5159046604752562, |
|
"learning_rate": 4.340259779835348e-06, |
|
"loss": 0.3921, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.1023255813953488, |
|
"grad_norm": 2.2360180914056, |
|
"learning_rate": 4.310767549931009e-06, |
|
"loss": 0.3977, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.1172093023255814, |
|
"grad_norm": 2.0669891250338037, |
|
"learning_rate": 4.28074915042736e-06, |
|
"loss": 0.394, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.132093023255814, |
|
"grad_norm": 2.072678701441777, |
|
"learning_rate": 4.250214701197305e-06, |
|
"loss": 0.3972, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.1469767441860466, |
|
"grad_norm": 1.637900992094672, |
|
"learning_rate": 4.219174496085636e-06, |
|
"loss": 0.3983, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.1618604651162792, |
|
"grad_norm": 1.4911303829134943, |
|
"learning_rate": 4.18763899943875e-06, |
|
"loss": 0.3976, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.1767441860465115, |
|
"grad_norm": 1.4717948476323155, |
|
"learning_rate": 4.155618842576882e-06, |
|
"loss": 0.3946, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.1916279069767441, |
|
"grad_norm": 2.1096638906121017, |
|
"learning_rate": 4.1231248202100474e-06, |
|
"loss": 0.395, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.2065116279069767, |
|
"grad_norm": 1.7307582159948425, |
|
"learning_rate": 4.09016788679891e-06, |
|
"loss": 0.3991, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.2213953488372093, |
|
"grad_norm": 1.8188324807887766, |
|
"learning_rate": 4.056759152861782e-06, |
|
"loss": 0.4013, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.236279069767442, |
|
"grad_norm": 1.6311500740393643, |
|
"learning_rate": 4.022909881229028e-06, |
|
"loss": 0.3951, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.2511627906976743, |
|
"grad_norm": 1.532434286126909, |
|
"learning_rate": 3.988631483246104e-06, |
|
"loss": 0.3981, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.266046511627907, |
|
"grad_norm": 1.5195258602788826, |
|
"learning_rate": 3.953935514926546e-06, |
|
"loss": 0.3962, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.2809302325581395, |
|
"grad_norm": 1.5512026772569776, |
|
"learning_rate": 3.9188336730561745e-06, |
|
"loss": 0.3951, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.2958139534883721, |
|
"grad_norm": 1.5805766911542627, |
|
"learning_rate": 3.8833377912498494e-06, |
|
"loss": 0.3934, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.3106976744186047, |
|
"grad_norm": 1.4470120259789245, |
|
"learning_rate": 3.847459835962095e-06, |
|
"loss": 0.3959, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.3255813953488373, |
|
"grad_norm": 1.4153491164463297, |
|
"learning_rate": 3.8112119024529367e-06, |
|
"loss": 0.3926, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.3404651162790697, |
|
"grad_norm": 1.5623580611007561, |
|
"learning_rate": 3.774606210710323e-06, |
|
"loss": 0.3966, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.3553488372093023, |
|
"grad_norm": 2.0061828482545208, |
|
"learning_rate": 3.737655101330493e-06, |
|
"loss": 0.3963, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.370232558139535, |
|
"grad_norm": 1.6752845300217272, |
|
"learning_rate": 3.700371031357687e-06, |
|
"loss": 0.3982, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.3851162790697673, |
|
"grad_norm": 1.453648031726924, |
|
"learning_rate": 3.6627665700845944e-06, |
|
"loss": 0.3933, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 1.604604958585821, |
|
"learning_rate": 3.624854394814972e-06, |
|
"loss": 0.3983, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.4148837209302325, |
|
"grad_norm": 1.5686410800038224, |
|
"learning_rate": 3.5866472865898326e-06, |
|
"loss": 0.3947, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.4297674418604651, |
|
"grad_norm": 1.4681023810304712, |
|
"learning_rate": 3.5481581258786783e-06, |
|
"loss": 0.3923, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.4446511627906977, |
|
"grad_norm": 1.5058117091538117, |
|
"learning_rate": 3.5093998882372038e-06, |
|
"loss": 0.3888, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.4595348837209303, |
|
"grad_norm": 1.5292352650200591, |
|
"learning_rate": 3.470385639932944e-06, |
|
"loss": 0.3949, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.474418604651163, |
|
"grad_norm": 1.5178272188443236, |
|
"learning_rate": 3.4311285335403495e-06, |
|
"loss": 0.3928, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.4893023255813953, |
|
"grad_norm": 1.5454271963199493, |
|
"learning_rate": 3.3916418035067527e-06, |
|
"loss": 0.3912, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.504186046511628, |
|
"grad_norm": 1.5155446625582503, |
|
"learning_rate": 3.3519387616907477e-06, |
|
"loss": 0.3938, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.5190697674418605, |
|
"grad_norm": 1.4857502104949298, |
|
"learning_rate": 3.312032792874458e-06, |
|
"loss": 0.3911, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.5339534883720929, |
|
"grad_norm": 1.5313728464277128, |
|
"learning_rate": 3.2719373502512326e-06, |
|
"loss": 0.3913, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.5488372093023255, |
|
"grad_norm": 1.4680834760988197, |
|
"learning_rate": 3.231665950890274e-06, |
|
"loss": 0.3884, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.563720930232558, |
|
"grad_norm": 1.4590605145952864, |
|
"learning_rate": 3.1912321711797285e-06, |
|
"loss": 0.3897, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.5786046511627907, |
|
"grad_norm": 1.6407130925832443, |
|
"learning_rate": 3.1506496422497856e-06, |
|
"loss": 0.3926, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.5934883720930233, |
|
"grad_norm": 1.600536994156853, |
|
"learning_rate": 3.1099320453773195e-06, |
|
"loss": 0.3918, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.608372093023256, |
|
"grad_norm": 1.5393558781139518, |
|
"learning_rate": 3.0690931073736233e-06, |
|
"loss": 0.3849, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.6232558139534885, |
|
"grad_norm": 1.477302583613782, |
|
"learning_rate": 3.0281465959567974e-06, |
|
"loss": 0.3926, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.6381395348837209, |
|
"grad_norm": 1.6623553818348256, |
|
"learning_rate": 2.9871063151103395e-06, |
|
"loss": 0.3912, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.6530232558139535, |
|
"grad_norm": 1.9690646932475977, |
|
"learning_rate": 2.945986100429519e-06, |
|
"loss": 0.3883, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.667906976744186, |
|
"grad_norm": 1.8207999668811277, |
|
"learning_rate": 2.9047998144570836e-06, |
|
"loss": 0.3852, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.6827906976744185, |
|
"grad_norm": 1.882433398444346, |
|
"learning_rate": 2.8635613420098922e-06, |
|
"loss": 0.389, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.697674418604651, |
|
"grad_norm": 1.7892281887915626, |
|
"learning_rate": 2.8222845854980257e-06, |
|
"loss": 0.387, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.7125581395348837, |
|
"grad_norm": 1.5794335002166047, |
|
"learning_rate": 2.7809834602379822e-06, |
|
"loss": 0.3883, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.7274418604651163, |
|
"grad_norm": 1.5231365105574948, |
|
"learning_rate": 2.739671889761507e-06, |
|
"loss": 0.3889, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.7423255813953489, |
|
"grad_norm": 1.5759952031593878, |
|
"learning_rate": 2.698363801121661e-06, |
|
"loss": 0.3824, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.7572093023255815, |
|
"grad_norm": 1.6498265119319904, |
|
"learning_rate": 2.657073120197702e-06, |
|
"loss": 0.3874, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.772093023255814, |
|
"grad_norm": 1.5972767247924684, |
|
"learning_rate": 2.6158137670003563e-06, |
|
"loss": 0.383, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.7869767441860465, |
|
"grad_norm": 1.3189887482016291, |
|
"learning_rate": 2.574599650979073e-06, |
|
"loss": 0.382, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.801860465116279, |
|
"grad_norm": 1.403465850397339, |
|
"learning_rate": 2.5334446663328414e-06, |
|
"loss": 0.3847, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.8167441860465117, |
|
"grad_norm": 1.338762290831888, |
|
"learning_rate": 2.492362687326143e-06, |
|
"loss": 0.3836, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.831627906976744, |
|
"grad_norm": 1.3565518289445442, |
|
"learning_rate": 2.4513675636116257e-06, |
|
"loss": 0.3848, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.8465116279069766, |
|
"grad_norm": 1.4242607844677293, |
|
"learning_rate": 2.4104731155610806e-06, |
|
"loss": 0.3796, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.8613953488372093, |
|
"grad_norm": 1.2675438338602558, |
|
"learning_rate": 2.369693129606284e-06, |
|
"loss": 0.3824, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.8762790697674419, |
|
"grad_norm": 1.4458262899421106, |
|
"learning_rate": 2.329041353591282e-06, |
|
"loss": 0.3871, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.8911627906976745, |
|
"grad_norm": 1.331432154246179, |
|
"learning_rate": 2.288531492137687e-06, |
|
"loss": 0.3805, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.906046511627907, |
|
"grad_norm": 1.51201037304163, |
|
"learning_rate": 2.248177202024544e-06, |
|
"loss": 0.382, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.9209302325581397, |
|
"grad_norm": 1.32189658216536, |
|
"learning_rate": 2.207992087584323e-06, |
|
"loss": 0.3807, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.935813953488372, |
|
"grad_norm": 1.3385233652650261, |
|
"learning_rate": 2.167989696116599e-06, |
|
"loss": 0.3849, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.9506976744186046, |
|
"grad_norm": 1.4812331676972927, |
|
"learning_rate": 2.1281835133209493e-06, |
|
"loss": 0.3823, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.9655813953488372, |
|
"grad_norm": 1.2877441615477965, |
|
"learning_rate": 2.0885869587506267e-06, |
|
"loss": 0.3811, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.9804651162790696, |
|
"grad_norm": 1.3833520048613848, |
|
"learning_rate": 2.0492133812885277e-06, |
|
"loss": 0.3784, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.9953488372093022, |
|
"grad_norm": 1.4660482978156375, |
|
"learning_rate": 2.0100760546469864e-06, |
|
"loss": 0.3816, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.9983255813953489, |
|
"eval_loss": 0.05558985471725464, |
|
"eval_runtime": 459.9835, |
|
"eval_samples_per_second": 39.356, |
|
"eval_steps_per_second": 0.615, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 2.0111627906976746, |
|
"grad_norm": 1.8852621531035139, |
|
"learning_rate": 1.9711881728929063e-06, |
|
"loss": 0.3147, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.026046511627907, |
|
"grad_norm": 1.5742414705077779, |
|
"learning_rate": 1.93256284599975e-06, |
|
"loss": 0.2856, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 2.0409302325581393, |
|
"grad_norm": 1.5835034555715797, |
|
"learning_rate": 1.8942130954278742e-06, |
|
"loss": 0.287, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 2.055813953488372, |
|
"grad_norm": 1.4838228071676645, |
|
"learning_rate": 1.8561518497346986e-06, |
|
"loss": 0.2865, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 2.0706976744186045, |
|
"grad_norm": 1.498870672430201, |
|
"learning_rate": 1.8183919402162088e-06, |
|
"loss": 0.2832, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 2.085581395348837, |
|
"grad_norm": 1.541904110678706, |
|
"learning_rate": 1.7809460965812341e-06, |
|
"loss": 0.2869, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.1004651162790697, |
|
"grad_norm": 1.4561848736193037, |
|
"learning_rate": 1.7438269426599806e-06, |
|
"loss": 0.2835, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 2.1153488372093023, |
|
"grad_norm": 1.4220192506403004, |
|
"learning_rate": 1.7070469921482557e-06, |
|
"loss": 0.2814, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 2.130232558139535, |
|
"grad_norm": 1.4385076328435982, |
|
"learning_rate": 1.6706186443888218e-06, |
|
"loss": 0.2853, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 2.1451162790697675, |
|
"grad_norm": 1.6761700740327472, |
|
"learning_rate": 1.6345541801912975e-06, |
|
"loss": 0.2824, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 1.4615800646871493, |
|
"learning_rate": 1.598865757692033e-06, |
|
"loss": 0.2838, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 2.1748837209302327, |
|
"grad_norm": 1.5221607059340931, |
|
"learning_rate": 1.5635654082553159e-06, |
|
"loss": 0.2861, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 2.1897674418604653, |
|
"grad_norm": 1.4695991331643223, |
|
"learning_rate": 1.5286650324173402e-06, |
|
"loss": 0.2815, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 2.2046511627906975, |
|
"grad_norm": 1.5130331041622915, |
|
"learning_rate": 1.49417639587427e-06, |
|
"loss": 0.285, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 2.21953488372093, |
|
"grad_norm": 1.4147707905859286, |
|
"learning_rate": 1.4601111255157516e-06, |
|
"loss": 0.2861, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 2.2344186046511627, |
|
"grad_norm": 1.515339784306767, |
|
"learning_rate": 1.426480705505238e-06, |
|
"loss": 0.2864, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 2.2493023255813953, |
|
"grad_norm": 1.343709147071654, |
|
"learning_rate": 1.3932964734084178e-06, |
|
"loss": 0.2859, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 2.264186046511628, |
|
"grad_norm": 1.5685605135233283, |
|
"learning_rate": 1.3605696163710685e-06, |
|
"loss": 0.2851, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 2.2790697674418605, |
|
"grad_norm": 1.3902994790791074, |
|
"learning_rate": 1.3283111673476198e-06, |
|
"loss": 0.2819, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 2.293953488372093, |
|
"grad_norm": 1.4043809415014048, |
|
"learning_rate": 1.2965320013816992e-06, |
|
"loss": 0.284, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 2.3088372093023257, |
|
"grad_norm": 1.3928586506764522, |
|
"learning_rate": 1.2652428319399101e-06, |
|
"loss": 0.2834, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 2.3237209302325583, |
|
"grad_norm": 1.4659553086533261, |
|
"learning_rate": 1.2344542073000847e-06, |
|
"loss": 0.2847, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 2.3386046511627905, |
|
"grad_norm": 1.422957011958867, |
|
"learning_rate": 1.2041765069952232e-06, |
|
"loss": 0.2819, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 2.353488372093023, |
|
"grad_norm": 1.465284377414126, |
|
"learning_rate": 1.1744199383143215e-06, |
|
"loss": 0.2809, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 2.3683720930232557, |
|
"grad_norm": 1.4400767691991236, |
|
"learning_rate": 1.1451945328612658e-06, |
|
"loss": 0.2834, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 2.3832558139534883, |
|
"grad_norm": 1.3521728187942337, |
|
"learning_rate": 1.1165101431729562e-06, |
|
"loss": 0.282, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 2.398139534883721, |
|
"grad_norm": 1.3847828855675668, |
|
"learning_rate": 1.0883764393977927e-06, |
|
"loss": 0.2825, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 2.4130232558139535, |
|
"grad_norm": 1.3575203139903693, |
|
"learning_rate": 1.0608029060356588e-06, |
|
"loss": 0.2839, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 2.427906976744186, |
|
"grad_norm": 1.4058947311018655, |
|
"learning_rate": 1.0337988387404791e-06, |
|
"loss": 0.2806, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 2.4427906976744187, |
|
"grad_norm": 1.4641846648455186, |
|
"learning_rate": 1.0073733411864462e-06, |
|
"loss": 0.2829, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 2.4576744186046513, |
|
"grad_norm": 1.3649324864016108, |
|
"learning_rate": 9.815353219989754e-07, |
|
"loss": 0.2827, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 2.472558139534884, |
|
"grad_norm": 1.350449271286142, |
|
"learning_rate": 9.562934917514021e-07, |
|
"loss": 0.282, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 2.4874418604651165, |
|
"grad_norm": 1.3972117947740512, |
|
"learning_rate": 9.316563600284535e-07, |
|
"loss": 0.2822, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 2.5023255813953487, |
|
"grad_norm": 1.4565576079970999, |
|
"learning_rate": 9.076322325574793e-07, |
|
"loss": 0.2832, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 2.5172093023255813, |
|
"grad_norm": 1.4043152891357864, |
|
"learning_rate": 8.842292084084076e-07, |
|
"loss": 0.2833, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 2.532093023255814, |
|
"grad_norm": 1.3896394291435263, |
|
"learning_rate": 8.614551772633615e-07, |
|
"loss": 0.2832, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 2.5469767441860465, |
|
"grad_norm": 1.4258374826387117, |
|
"learning_rate": 8.393178167568755e-07, |
|
"loss": 0.2827, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 2.561860465116279, |
|
"grad_norm": 1.407749674891561, |
|
"learning_rate": 8.178245898875936e-07, |
|
"loss": 0.2825, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 2.5767441860465117, |
|
"grad_norm": 1.413125902188421, |
|
"learning_rate": 7.969827425023237e-07, |
|
"loss": 0.2872, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 2.5916279069767443, |
|
"grad_norm": 1.3473962411110556, |
|
"learning_rate": 7.767993008533013e-07, |
|
"loss": 0.2844, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 2.606511627906977, |
|
"grad_norm": 1.404106793744438, |
|
"learning_rate": 7.572810692294846e-07, |
|
"loss": 0.2825, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 2.6213953488372095, |
|
"grad_norm": 1.3805216307523198, |
|
"learning_rate": 7.384346276626742e-07, |
|
"loss": 0.2816, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 2.6362790697674416, |
|
"grad_norm": 1.3718811044132653, |
|
"learning_rate": 7.202663297092399e-07, |
|
"loss": 0.2815, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 2.6511627906976747, |
|
"grad_norm": 1.4225261849028261, |
|
"learning_rate": 7.027823003081922e-07, |
|
"loss": 0.2842, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 2.666046511627907, |
|
"grad_norm": 1.4054086415352616, |
|
"learning_rate": 6.859884337163317e-07, |
|
"loss": 0.2825, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 2.6809302325581394, |
|
"grad_norm": 1.4199694290168978, |
|
"learning_rate": 6.698903915211616e-07, |
|
"loss": 0.2813, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 2.695813953488372, |
|
"grad_norm": 1.4944137400292614, |
|
"learning_rate": 6.544936007322386e-07, |
|
"loss": 0.2815, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 2.7106976744186047, |
|
"grad_norm": 1.3783156351943808, |
|
"learning_rate": 6.398032519516111e-07, |
|
"loss": 0.2822, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 2.7255813953488373, |
|
"grad_norm": 1.433571561201421, |
|
"learning_rate": 6.258242976239481e-07, |
|
"loss": 0.2838, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 2.74046511627907, |
|
"grad_norm": 1.3514962485654185, |
|
"learning_rate": 6.125614503669588e-07, |
|
"loss": 0.2849, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 2.7553488372093025, |
|
"grad_norm": 1.4597760031013953, |
|
"learning_rate": 6.000191813826666e-07, |
|
"loss": 0.2826, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 2.7702325581395346, |
|
"grad_norm": 1.3371170140281539, |
|
"learning_rate": 5.882017189500664e-07, |
|
"loss": 0.282, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 2.7851162790697677, |
|
"grad_norm": 1.3460302132963022, |
|
"learning_rate": 5.771130469996746e-07, |
|
"loss": 0.2817, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 1.397811165218838, |
|
"learning_rate": 5.667569037704612e-07, |
|
"loss": 0.2806, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 2.8148837209302324, |
|
"grad_norm": 1.3434330531321284, |
|
"learning_rate": 5.571367805496029e-07, |
|
"loss": 0.2828, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 2.829767441860465, |
|
"grad_norm": 1.3531163030358948, |
|
"learning_rate": 5.482559204954935e-07, |
|
"loss": 0.2828, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 2.8446511627906976, |
|
"grad_norm": 1.3428261741086946, |
|
"learning_rate": 5.401173175444026e-07, |
|
"loss": 0.281, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 2.8595348837209302, |
|
"grad_norm": 1.3439493884557654, |
|
"learning_rate": 5.327237154011533e-07, |
|
"loss": 0.2811, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 2.874418604651163, |
|
"grad_norm": 1.3575567573206913, |
|
"learning_rate": 5.260776066141565e-07, |
|
"loss": 0.2797, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 2.8893023255813954, |
|
"grad_norm": 1.4000714352577568, |
|
"learning_rate": 5.201812317351187e-07, |
|
"loss": 0.2803, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 2.904186046511628, |
|
"grad_norm": 1.3711991273663426, |
|
"learning_rate": 5.150365785637017e-07, |
|
"loss": 0.2798, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 2.9190697674418606, |
|
"grad_norm": 1.373654423872898, |
|
"learning_rate": 5.106453814773902e-07, |
|
"loss": 0.2776, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 2.933953488372093, |
|
"grad_norm": 1.3534625494022425, |
|
"learning_rate": 5.070091208467953e-07, |
|
"loss": 0.2835, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 2.948837209302326, |
|
"grad_norm": 1.3254134088824696, |
|
"learning_rate": 5.041290225365879e-07, |
|
"loss": 0.2809, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 2.963720930232558, |
|
"grad_norm": 1.306976854112462, |
|
"learning_rate": 5.020060574922324e-07, |
|
"loss": 0.279, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 2.9786046511627906, |
|
"grad_norm": 1.3107886288482267, |
|
"learning_rate": 5.006409414126598e-07, |
|
"loss": 0.2807, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2.993488372093023, |
|
"grad_norm": 1.3832142175918538, |
|
"learning_rate": 5.000341345089891e-07, |
|
"loss": 0.2796, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 2.997953488372093, |
|
"eval_loss": 0.05797627940773964, |
|
"eval_runtime": 449.2792, |
|
"eval_samples_per_second": 40.293, |
|
"eval_steps_per_second": 0.63, |
|
"step": 2013 |
|
}, |
|
{ |
|
"epoch": 2.997953488372093, |
|
"step": 2013, |
|
"total_flos": 3371640595415040.0, |
|
"train_loss": 0.40245939461792873, |
|
"train_runtime": 65376.8125, |
|
"train_samples_per_second": 15.783, |
|
"train_steps_per_second": 0.031 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 2013, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3371640595415040.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|