|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.8313847752663029, |
|
"eval_steps": 500, |
|
"global_step": 4000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 1229.812744140625, |
|
"learning_rate": 5e-06, |
|
"loss": 30.4769, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 309.7672119140625, |
|
"learning_rate": 1e-05, |
|
"loss": 27.4271, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 192.40821838378906, |
|
"learning_rate": 1.5e-05, |
|
"loss": 23.191, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 101.36874389648438, |
|
"learning_rate": 2e-05, |
|
"loss": 18.2568, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 56.78367614746094, |
|
"learning_rate": 2.5e-05, |
|
"loss": 13.6882, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 22.516868591308594, |
|
"learning_rate": 3e-05, |
|
"loss": 10.1169, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 13.551531791687012, |
|
"learning_rate": 3.5e-05, |
|
"loss": 8.5886, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 12.628911018371582, |
|
"learning_rate": 4e-05, |
|
"loss": 8.2086, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 13.996498107910156, |
|
"learning_rate": 4.5e-05, |
|
"loss": 8.1833, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 11.634190559387207, |
|
"learning_rate": 5e-05, |
|
"loss": 8.0995, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 8.44438362121582, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 8.0283, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 4.912370204925537, |
|
"learning_rate": 6e-05, |
|
"loss": 7.9718, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 4.106713771820068, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 7.9734, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 9.923928260803223, |
|
"learning_rate": 7e-05, |
|
"loss": 7.8768, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 17.63799285888672, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 7.7631, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 14.726093292236328, |
|
"learning_rate": 8e-05, |
|
"loss": 7.557, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 12.786348342895508, |
|
"learning_rate": 8.5e-05, |
|
"loss": 7.4308, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 6.956538677215576, |
|
"learning_rate": 9e-05, |
|
"loss": 7.2131, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 7.928043365478516, |
|
"learning_rate": 9.5e-05, |
|
"loss": 7.0953, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 9.65689468383789, |
|
"learning_rate": 0.0001, |
|
"loss": 6.9548, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 6.416757583618164, |
|
"learning_rate": 9.999972205865686e-05, |
|
"loss": 6.7371, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 8.33735179901123, |
|
"learning_rate": 9.999888823771751e-05, |
|
"loss": 6.6175, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 7.704575061798096, |
|
"learning_rate": 9.999749854645204e-05, |
|
"loss": 6.432, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 10.699024200439453, |
|
"learning_rate": 9.99955530003106e-05, |
|
"loss": 6.2739, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 6.885202407836914, |
|
"learning_rate": 9.99930516209231e-05, |
|
"loss": 6.1744, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 9.077549934387207, |
|
"learning_rate": 9.998999443609897e-05, |
|
"loss": 5.9334, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 8.783599853515625, |
|
"learning_rate": 9.998638147982696e-05, |
|
"loss": 5.9392, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 7.453628063201904, |
|
"learning_rate": 9.998221279227467e-05, |
|
"loss": 5.8259, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.611512660980225, |
|
"learning_rate": 9.997748841978812e-05, |
|
"loss": 5.7614, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.822922706604004, |
|
"learning_rate": 9.997220841489122e-05, |
|
"loss": 5.6118, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 5.933773994445801, |
|
"learning_rate": 9.996637283628528e-05, |
|
"loss": 5.522, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 6.299020767211914, |
|
"learning_rate": 9.995998174884821e-05, |
|
"loss": 5.4244, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 6.691274166107178, |
|
"learning_rate": 9.995303522363394e-05, |
|
"loss": 5.3919, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 5.277099132537842, |
|
"learning_rate": 9.99455333378715e-05, |
|
"loss": 5.3489, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.7971572875976562, |
|
"learning_rate": 9.993747617496428e-05, |
|
"loss": 5.2339, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.9199624061584473, |
|
"learning_rate": 9.9928863824489e-05, |
|
"loss": 5.1713, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.2884421348571777, |
|
"learning_rate": 9.99196963821948e-05, |
|
"loss": 5.13, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 5.122908592224121, |
|
"learning_rate": 9.990997395000217e-05, |
|
"loss": 5.0172, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.893023729324341, |
|
"learning_rate": 9.989969663600169e-05, |
|
"loss": 5.0076, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.7886602878570557, |
|
"learning_rate": 9.9888864554453e-05, |
|
"loss": 5.0649, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.7208797931671143, |
|
"learning_rate": 9.987747782578342e-05, |
|
"loss": 4.8919, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.6963469982147217, |
|
"learning_rate": 9.986553657658668e-05, |
|
"loss": 4.7996, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 4.400980472564697, |
|
"learning_rate": 9.985304093962145e-05, |
|
"loss": 4.9208, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 6.301896095275879, |
|
"learning_rate": 9.983999105380988e-05, |
|
"loss": 4.7683, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.1751794815063477, |
|
"learning_rate": 9.982638706423608e-05, |
|
"loss": 4.8343, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 5.351551055908203, |
|
"learning_rate": 9.98122291221445e-05, |
|
"loss": 4.6515, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.5814805030822754, |
|
"learning_rate": 9.979751738493826e-05, |
|
"loss": 4.671, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.3766465187072754, |
|
"learning_rate": 9.978225201617732e-05, |
|
"loss": 4.641, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.2348690032958984, |
|
"learning_rate": 9.976643318557678e-05, |
|
"loss": 4.6256, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.7532126903533936, |
|
"learning_rate": 9.975006106900495e-05, |
|
"loss": 4.4895, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.8409101963043213, |
|
"learning_rate": 9.973313584848132e-05, |
|
"loss": 4.6573, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.0017435550689697, |
|
"learning_rate": 9.971565771217464e-05, |
|
"loss": 4.5779, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.3714191913604736, |
|
"learning_rate": 9.969762685440076e-05, |
|
"loss": 4.5046, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.982652187347412, |
|
"learning_rate": 9.967904347562054e-05, |
|
"loss": 4.5502, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.0139358043670654, |
|
"learning_rate": 9.965990778243755e-05, |
|
"loss": 4.4332, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.529115915298462, |
|
"learning_rate": 9.964021998759577e-05, |
|
"loss": 4.3909, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.1116204261779785, |
|
"learning_rate": 9.961998030997733e-05, |
|
"loss": 4.3632, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.561253309249878, |
|
"learning_rate": 9.95991889745999e-05, |
|
"loss": 4.4698, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.0149197578430176, |
|
"learning_rate": 9.957784621261441e-05, |
|
"loss": 4.4313, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.329812049865723, |
|
"learning_rate": 9.955595226130226e-05, |
|
"loss": 4.344, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.1647002696990967, |
|
"learning_rate": 9.953350736407282e-05, |
|
"loss": 4.2796, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.213648796081543, |
|
"learning_rate": 9.951051177046069e-05, |
|
"loss": 4.2897, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.8537462949752808, |
|
"learning_rate": 9.948696573612292e-05, |
|
"loss": 4.2881, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.3809573650360107, |
|
"learning_rate": 9.946286952283618e-05, |
|
"loss": 4.3036, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.212860107421875, |
|
"learning_rate": 9.943822339849381e-05, |
|
"loss": 4.2527, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.1665050983428955, |
|
"learning_rate": 9.941302763710288e-05, |
|
"loss": 4.2345, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.2548954486846924, |
|
"learning_rate": 9.938728251878116e-05, |
|
"loss": 4.2502, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.7102184295654297, |
|
"learning_rate": 9.936098832975393e-05, |
|
"loss": 4.3006, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.172853469848633, |
|
"learning_rate": 9.933414536235091e-05, |
|
"loss": 4.246, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.476499557495117, |
|
"learning_rate": 9.93067539150029e-05, |
|
"loss": 4.1389, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.0237905979156494, |
|
"learning_rate": 9.927881429223853e-05, |
|
"loss": 4.2699, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.0171585083007812, |
|
"learning_rate": 9.925032680468085e-05, |
|
"loss": 4.1228, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.2988505363464355, |
|
"learning_rate": 9.922129176904388e-05, |
|
"loss": 4.1321, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.95521879196167, |
|
"learning_rate": 9.919170950812911e-05, |
|
"loss": 4.049, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.220893144607544, |
|
"learning_rate": 9.916158035082184e-05, |
|
"loss": 4.1948, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.9344232082366943, |
|
"learning_rate": 9.913090463208763e-05, |
|
"loss": 4.0705, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.3103578090667725, |
|
"learning_rate": 9.90996826929685e-05, |
|
"loss": 4.1431, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.692507743835449, |
|
"learning_rate": 9.906791488057916e-05, |
|
"loss": 4.016, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.183342218399048, |
|
"learning_rate": 9.903560154810313e-05, |
|
"loss": 4.1322, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.961439609527588, |
|
"learning_rate": 9.900274305478887e-05, |
|
"loss": 3.9821, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.1895840167999268, |
|
"learning_rate": 9.896933976594572e-05, |
|
"loss": 4.126, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.0895981788635254, |
|
"learning_rate": 9.893539205293989e-05, |
|
"loss": 4.0772, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.85659122467041, |
|
"learning_rate": 9.890090029319028e-05, |
|
"loss": 3.9736, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.9629476070404053, |
|
"learning_rate": 9.886586487016433e-05, |
|
"loss": 4.0457, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.3512821197509766, |
|
"learning_rate": 9.883028617337378e-05, |
|
"loss": 4.0104, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.0070366859436035, |
|
"learning_rate": 9.879416459837022e-05, |
|
"loss": 4.0205, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.785737156867981, |
|
"learning_rate": 9.875750054674082e-05, |
|
"loss": 4.0264, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.6705329418182373, |
|
"learning_rate": 9.872029442610382e-05, |
|
"loss": 3.9757, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.837396502494812, |
|
"learning_rate": 9.8682546650104e-05, |
|
"loss": 4.0169, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.0840909481048584, |
|
"learning_rate": 9.864425763840802e-05, |
|
"loss": 3.98, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.6531455516815186, |
|
"learning_rate": 9.860542781669988e-05, |
|
"loss": 4.0411, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.8012709617614746, |
|
"learning_rate": 9.85660576166761e-05, |
|
"loss": 3.9331, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.9777172803878784, |
|
"learning_rate": 9.852614747604093e-05, |
|
"loss": 3.8908, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.182304859161377, |
|
"learning_rate": 9.848569783850145e-05, |
|
"loss": 3.9088, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.920596122741699, |
|
"learning_rate": 9.844470915376278e-05, |
|
"loss": 3.9597, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.971817135810852, |
|
"learning_rate": 9.840318187752292e-05, |
|
"loss": 3.8223, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.046733856201172, |
|
"learning_rate": 9.836111647146771e-05, |
|
"loss": 3.9495, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.4442347288131714, |
|
"learning_rate": 9.831851340326577e-05, |
|
"loss": 3.9561, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.5948429107666016, |
|
"learning_rate": 9.82753731465633e-05, |
|
"loss": 3.9302, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.8231884241104126, |
|
"learning_rate": 9.823169618097871e-05, |
|
"loss": 3.9291, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.004462480545044, |
|
"learning_rate": 9.81874829920974e-05, |
|
"loss": 3.8685, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.7805441617965698, |
|
"learning_rate": 9.814273407146623e-05, |
|
"loss": 3.8173, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.4531677961349487, |
|
"learning_rate": 9.809744991658829e-05, |
|
"loss": 3.8398, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.6244157552719116, |
|
"learning_rate": 9.805163103091708e-05, |
|
"loss": 3.8935, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.486452341079712, |
|
"learning_rate": 9.800527792385112e-05, |
|
"loss": 3.8778, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.142691135406494, |
|
"learning_rate": 9.79583911107282e-05, |
|
"loss": 3.919, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.5212677717208862, |
|
"learning_rate": 9.791097111281968e-05, |
|
"loss": 3.8523, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.7378300428390503, |
|
"learning_rate": 9.786301845732467e-05, |
|
"loss": 3.835, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.5763616561889648, |
|
"learning_rate": 9.781453367736418e-05, |
|
"loss": 3.8253, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.663087248802185, |
|
"learning_rate": 9.776551731197524e-05, |
|
"loss": 3.7616, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.545979619026184, |
|
"learning_rate": 9.771596990610478e-05, |
|
"loss": 3.8516, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.53286874294281, |
|
"learning_rate": 9.766589201060372e-05, |
|
"loss": 3.77, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.624846935272217, |
|
"learning_rate": 9.761528418222077e-05, |
|
"loss": 3.7981, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.213869571685791, |
|
"learning_rate": 9.756414698359624e-05, |
|
"loss": 3.7741, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.046447515487671, |
|
"learning_rate": 9.75124809832558e-05, |
|
"loss": 3.8351, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.3796635866165161, |
|
"learning_rate": 9.746028675560413e-05, |
|
"loss": 3.7778, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.6972594261169434, |
|
"learning_rate": 9.740756488091861e-05, |
|
"loss": 3.7298, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.337314486503601, |
|
"learning_rate": 9.735431594534277e-05, |
|
"loss": 3.7911, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.4657835960388184, |
|
"learning_rate": 9.730054054087983e-05, |
|
"loss": 3.8175, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.644771933555603, |
|
"learning_rate": 9.724623926538612e-05, |
|
"loss": 3.7631, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.5241328477859497, |
|
"learning_rate": 9.719141272256443e-05, |
|
"loss": 3.7058, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.7510253190994263, |
|
"learning_rate": 9.713606152195726e-05, |
|
"loss": 3.8659, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.608384370803833, |
|
"learning_rate": 9.708018627894011e-05, |
|
"loss": 3.7078, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.44278085231781, |
|
"learning_rate": 9.702378761471456e-05, |
|
"loss": 3.7982, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.9439903497695923, |
|
"learning_rate": 9.696686615630146e-05, |
|
"loss": 3.7216, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.7295832633972168, |
|
"learning_rate": 9.690942253653385e-05, |
|
"loss": 3.7839, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.4564378261566162, |
|
"learning_rate": 9.685145739405002e-05, |
|
"loss": 3.7383, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.3756966590881348, |
|
"learning_rate": 9.679297137328634e-05, |
|
"loss": 3.6504, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.4621644020080566, |
|
"learning_rate": 9.673396512447013e-05, |
|
"loss": 3.7754, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.62283194065094, |
|
"learning_rate": 9.667443930361247e-05, |
|
"loss": 3.6665, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.5261329412460327, |
|
"learning_rate": 9.661439457250076e-05, |
|
"loss": 3.7083, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.560340166091919, |
|
"learning_rate": 9.655383159869158e-05, |
|
"loss": 3.744, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.6525319814682007, |
|
"learning_rate": 9.649275105550309e-05, |
|
"loss": 3.7419, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.4129501581192017, |
|
"learning_rate": 9.643115362200762e-05, |
|
"loss": 3.7203, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.337715983390808, |
|
"learning_rate": 9.636903998302409e-05, |
|
"loss": 3.6888, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.7548418045043945, |
|
"learning_rate": 9.630641082911045e-05, |
|
"loss": 3.6591, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.4376401901245117, |
|
"learning_rate": 9.624326685655593e-05, |
|
"loss": 3.5796, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.5718088150024414, |
|
"learning_rate": 9.617960876737337e-05, |
|
"loss": 3.6026, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.4157310724258423, |
|
"learning_rate": 9.611543726929134e-05, |
|
"loss": 3.7093, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.5425848960876465, |
|
"learning_rate": 9.605075307574635e-05, |
|
"loss": 3.7303, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.287452220916748, |
|
"learning_rate": 9.598555690587487e-05, |
|
"loss": 3.6574, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.592053771018982, |
|
"learning_rate": 9.591984948450532e-05, |
|
"loss": 3.6116, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.5864181518554688, |
|
"learning_rate": 9.585363154215008e-05, |
|
"loss": 3.6425, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.4077820777893066, |
|
"learning_rate": 9.578690381499728e-05, |
|
"loss": 3.686, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.3487296104431152, |
|
"learning_rate": 9.571966704490271e-05, |
|
"loss": 3.6217, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.9488983154296875, |
|
"learning_rate": 9.565192197938148e-05, |
|
"loss": 3.5956, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.4107372760772705, |
|
"learning_rate": 9.558366937159977e-05, |
|
"loss": 3.737, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.4383083581924438, |
|
"learning_rate": 9.551490998036646e-05, |
|
"loss": 3.648, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.3736857175827026, |
|
"learning_rate": 9.544564457012463e-05, |
|
"loss": 3.6411, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.3991824388504028, |
|
"learning_rate": 9.537587391094314e-05, |
|
"loss": 3.6154, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.4682576656341553, |
|
"learning_rate": 9.5305598778508e-05, |
|
"loss": 3.6828, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.429841160774231, |
|
"learning_rate": 9.52348199541138e-05, |
|
"loss": 3.5335, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.5785666704177856, |
|
"learning_rate": 9.516353822465504e-05, |
|
"loss": 3.6533, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.5011215209960938, |
|
"learning_rate": 9.509175438261726e-05, |
|
"loss": 3.659, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.679041862487793, |
|
"learning_rate": 9.501946922606838e-05, |
|
"loss": 3.579, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.58845853805542, |
|
"learning_rate": 9.494668355864973e-05, |
|
"loss": 3.5899, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.2959877252578735, |
|
"learning_rate": 9.487339818956716e-05, |
|
"loss": 3.5654, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.5493981838226318, |
|
"learning_rate": 9.479961393358203e-05, |
|
"loss": 3.6012, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.5047404766082764, |
|
"learning_rate": 9.472533161100215e-05, |
|
"loss": 3.6367, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.3275983333587646, |
|
"learning_rate": 9.465055204767265e-05, |
|
"loss": 3.6274, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.3751220703125, |
|
"learning_rate": 9.457527607496685e-05, |
|
"loss": 3.5199, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.3353458642959595, |
|
"learning_rate": 9.44995045297769e-05, |
|
"loss": 3.5321, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.2492051124572754, |
|
"learning_rate": 9.442323825450464e-05, |
|
"loss": 3.6332, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.0786397457122803, |
|
"learning_rate": 9.43464780970521e-05, |
|
"loss": 3.6151, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.4719089269638062, |
|
"learning_rate": 9.426922491081212e-05, |
|
"loss": 3.4847, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.3098955154418945, |
|
"learning_rate": 9.419147955465888e-05, |
|
"loss": 3.5445, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.4527875185012817, |
|
"learning_rate": 9.411324289293832e-05, |
|
"loss": 3.5209, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.2426012754440308, |
|
"learning_rate": 9.403451579545859e-05, |
|
"loss": 3.5557, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.3350285291671753, |
|
"learning_rate": 9.395529913748025e-05, |
|
"loss": 3.5265, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.466402530670166, |
|
"learning_rate": 9.387559379970672e-05, |
|
"loss": 3.5044, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.9047514200210571, |
|
"learning_rate": 9.379540066827431e-05, |
|
"loss": 3.4028, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.1885713338851929, |
|
"learning_rate": 9.371472063474248e-05, |
|
"loss": 3.5112, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.418020486831665, |
|
"learning_rate": 9.363355459608394e-05, |
|
"loss": 3.5393, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.5337977409362793, |
|
"learning_rate": 9.355190345467457e-05, |
|
"loss": 3.498, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.5761481523513794, |
|
"learning_rate": 9.346976811828352e-05, |
|
"loss": 3.4533, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.394687533378601, |
|
"learning_rate": 9.338714950006297e-05, |
|
"loss": 3.6147, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.4166648387908936, |
|
"learning_rate": 9.330404851853817e-05, |
|
"loss": 3.5151, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.5528873205184937, |
|
"learning_rate": 9.3220466097597e-05, |
|
"loss": 3.5674, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.3615038394927979, |
|
"learning_rate": 9.313640316647991e-05, |
|
"loss": 3.4514, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.3137192726135254, |
|
"learning_rate": 9.305186065976945e-05, |
|
"loss": 3.5113, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.2795889377593994, |
|
"learning_rate": 9.296683951737993e-05, |
|
"loss": 3.5497, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.1420422792434692, |
|
"learning_rate": 9.288134068454697e-05, |
|
"loss": 3.5683, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 2.9743645191192627, |
|
"learning_rate": 9.2795365111817e-05, |
|
"loss": 3.5278, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.2144616842269897, |
|
"learning_rate": 9.270891375503665e-05, |
|
"loss": 3.5173, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.5434770584106445, |
|
"learning_rate": 9.262198757534218e-05, |
|
"loss": 3.5055, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.326240062713623, |
|
"learning_rate": 9.253458753914874e-05, |
|
"loss": 3.5729, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.3340810537338257, |
|
"learning_rate": 9.244671461813969e-05, |
|
"loss": 3.5003, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.2375303506851196, |
|
"learning_rate": 9.235836978925572e-05, |
|
"loss": 3.5078, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.30227530002594, |
|
"learning_rate": 9.226955403468406e-05, |
|
"loss": 3.5024, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.679872751235962, |
|
"learning_rate": 9.21802683418475e-05, |
|
"loss": 3.5012, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.2751423120498657, |
|
"learning_rate": 9.209051370339347e-05, |
|
"loss": 3.504, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.7539143562316895, |
|
"learning_rate": 9.200029111718295e-05, |
|
"loss": 3.5367, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.2914196252822876, |
|
"learning_rate": 9.190960158627941e-05, |
|
"loss": 3.4866, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.5011461973190308, |
|
"learning_rate": 9.181844611893766e-05, |
|
"loss": 3.5024, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.258388638496399, |
|
"learning_rate": 9.172682572859261e-05, |
|
"loss": 3.3675, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.1322250366210938, |
|
"learning_rate": 9.163474143384806e-05, |
|
"loss": 3.4236, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.1992788314819336, |
|
"learning_rate": 9.154219425846528e-05, |
|
"loss": 3.4403, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.2604423761367798, |
|
"learning_rate": 9.144918523135175e-05, |
|
"loss": 3.4361, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.2565964460372925, |
|
"learning_rate": 9.13557153865496e-05, |
|
"loss": 3.513, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.2667522430419922, |
|
"learning_rate": 9.12617857632242e-05, |
|
"loss": 3.3979, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.21877920627594, |
|
"learning_rate": 9.116739740565259e-05, |
|
"loss": 3.5425, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.243408441543579, |
|
"learning_rate": 9.107255136321184e-05, |
|
"loss": 3.4138, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.416135549545288, |
|
"learning_rate": 9.09772486903674e-05, |
|
"loss": 3.4064, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.505253791809082, |
|
"learning_rate": 9.08814904466614e-05, |
|
"loss": 3.4608, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.468630313873291, |
|
"learning_rate": 9.078527769670085e-05, |
|
"loss": 3.4985, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.4280701875686646, |
|
"learning_rate": 9.068861151014575e-05, |
|
"loss": 3.4031, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.1800689697265625, |
|
"learning_rate": 9.05914929616973e-05, |
|
"loss": 3.3947, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.3605331182479858, |
|
"learning_rate": 9.04939231310859e-05, |
|
"loss": 3.4994, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.2450255155563354, |
|
"learning_rate": 9.039590310305914e-05, |
|
"loss": 3.4788, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.280288815498352, |
|
"learning_rate": 9.029743396736974e-05, |
|
"loss": 3.3975, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.3635969161987305, |
|
"learning_rate": 9.019851681876348e-05, |
|
"loss": 3.4155, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.2809624671936035, |
|
"learning_rate": 9.009915275696693e-05, |
|
"loss": 3.4478, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.5251933336257935, |
|
"learning_rate": 8.999934288667534e-05, |
|
"loss": 3.6159, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.2694870233535767, |
|
"learning_rate": 8.989908831754028e-05, |
|
"loss": 3.4153, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.2396315336227417, |
|
"learning_rate": 8.979839016415735e-05, |
|
"loss": 3.4796, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.390388011932373, |
|
"learning_rate": 8.969724954605373e-05, |
|
"loss": 3.4476, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.3664170503616333, |
|
"learning_rate": 8.959566758767581e-05, |
|
"loss": 3.4889, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.537876844406128, |
|
"learning_rate": 8.949364541837661e-05, |
|
"loss": 3.4718, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.397546648979187, |
|
"learning_rate": 8.939118417240329e-05, |
|
"loss": 3.5307, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.2351093292236328, |
|
"learning_rate": 8.92882849888845e-05, |
|
"loss": 3.3648, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.4052352905273438, |
|
"learning_rate": 8.918494901181773e-05, |
|
"loss": 3.436, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.319915771484375, |
|
"learning_rate": 8.908117739005659e-05, |
|
"loss": 3.4592, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.1869890689849854, |
|
"learning_rate": 8.897697127729805e-05, |
|
"loss": 3.4234, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.2945029735565186, |
|
"learning_rate": 8.887233183206957e-05, |
|
"loss": 3.3872, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.2278895378112793, |
|
"learning_rate": 8.876726021771627e-05, |
|
"loss": 3.4017, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.1132441759109497, |
|
"learning_rate": 8.866175760238798e-05, |
|
"loss": 3.4569, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.2101727724075317, |
|
"learning_rate": 8.855582515902625e-05, |
|
"loss": 3.46, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.4088022708892822, |
|
"learning_rate": 8.844946406535131e-05, |
|
"loss": 3.329, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.5124653577804565, |
|
"learning_rate": 8.834267550384893e-05, |
|
"loss": 3.407, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.2741353511810303, |
|
"learning_rate": 8.823546066175741e-05, |
|
"loss": 3.4585, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.128075361251831, |
|
"learning_rate": 8.81278207310542e-05, |
|
"loss": 3.3034, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.306127667427063, |
|
"learning_rate": 8.801975690844278e-05, |
|
"loss": 3.4291, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.1444599628448486, |
|
"learning_rate": 8.791127039533934e-05, |
|
"loss": 3.3713, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.2846177816390991, |
|
"learning_rate": 8.780236239785935e-05, |
|
"loss": 3.3423, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.2519681453704834, |
|
"learning_rate": 8.76930341268042e-05, |
|
"loss": 3.3863, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.1739153861999512, |
|
"learning_rate": 8.758328679764776e-05, |
|
"loss": 3.3694, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.2181482315063477, |
|
"learning_rate": 8.747312163052284e-05, |
|
"loss": 3.3413, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.2021340131759644, |
|
"learning_rate": 8.736253985020761e-05, |
|
"loss": 3.382, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.2046457529067993, |
|
"learning_rate": 8.725154268611203e-05, |
|
"loss": 3.3032, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.1830154657363892, |
|
"learning_rate": 8.714013137226411e-05, |
|
"loss": 3.4315, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.2441033124923706, |
|
"learning_rate": 8.702830714729628e-05, |
|
"loss": 3.4461, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.1201214790344238, |
|
"learning_rate": 8.691607125443153e-05, |
|
"loss": 3.3202, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.1856852769851685, |
|
"learning_rate": 8.680342494146967e-05, |
|
"loss": 3.3683, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.256367564201355, |
|
"learning_rate": 8.66903694607734e-05, |
|
"loss": 3.4501, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.2275352478027344, |
|
"learning_rate": 8.65769060692544e-05, |
|
"loss": 3.3701, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.179276704788208, |
|
"learning_rate": 8.646303602835936e-05, |
|
"loss": 3.4582, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.1398378610610962, |
|
"learning_rate": 8.634876060405597e-05, |
|
"loss": 3.3763, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.2729588747024536, |
|
"learning_rate": 8.623408106681884e-05, |
|
"loss": 3.4178, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.4245160818099976, |
|
"learning_rate": 8.611899869161535e-05, |
|
"loss": 3.4572, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.11946439743042, |
|
"learning_rate": 8.600351475789147e-05, |
|
"loss": 3.3231, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.1230659484863281, |
|
"learning_rate": 8.588763054955764e-05, |
|
"loss": 3.3122, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.2719365358352661, |
|
"learning_rate": 8.57713473549743e-05, |
|
"loss": 3.3193, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.1483792066574097, |
|
"learning_rate": 8.565466646693778e-05, |
|
"loss": 3.2684, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.1922017335891724, |
|
"learning_rate": 8.553758918266578e-05, |
|
"loss": 3.4181, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.228605031967163, |
|
"learning_rate": 8.5420116803783e-05, |
|
"loss": 3.3389, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.23223876953125, |
|
"learning_rate": 8.530225063630668e-05, |
|
"loss": 3.4186, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.138245701789856, |
|
"learning_rate": 8.518399199063205e-05, |
|
"loss": 3.3469, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.2012712955474854, |
|
"learning_rate": 8.50653421815178e-05, |
|
"loss": 3.3803, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.1761367321014404, |
|
"learning_rate": 8.494630252807138e-05, |
|
"loss": 3.3373, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.1363654136657715, |
|
"learning_rate": 8.482687435373449e-05, |
|
"loss": 3.4441, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.247525930404663, |
|
"learning_rate": 8.470705898626817e-05, |
|
"loss": 3.348, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.1885273456573486, |
|
"learning_rate": 8.458685775773822e-05, |
|
"loss": 3.3302, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.1339763402938843, |
|
"learning_rate": 8.446627200450025e-05, |
|
"loss": 3.3123, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.0527379512786865, |
|
"learning_rate": 8.434530306718493e-05, |
|
"loss": 3.3242, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.3958696126937866, |
|
"learning_rate": 8.4223952290683e-05, |
|
"loss": 3.3615, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.123361587524414, |
|
"learning_rate": 8.41022210241304e-05, |
|
"loss": 3.3276, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.1145395040512085, |
|
"learning_rate": 8.398011062089316e-05, |
|
"loss": 3.2965, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.1637969017028809, |
|
"learning_rate": 8.385762243855249e-05, |
|
"loss": 3.3045, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.1752508878707886, |
|
"learning_rate": 8.373475783888958e-05, |
|
"loss": 3.2793, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.1751797199249268, |
|
"learning_rate": 8.36115181878705e-05, |
|
"loss": 3.375, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.2501639127731323, |
|
"learning_rate": 8.348790485563101e-05, |
|
"loss": 3.3518, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.4987150430679321, |
|
"learning_rate": 8.336391921646134e-05, |
|
"loss": 3.2781, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.1865673065185547, |
|
"learning_rate": 8.323956264879089e-05, |
|
"loss": 3.2958, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.1272923946380615, |
|
"learning_rate": 8.311483653517294e-05, |
|
"loss": 3.2893, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.13765287399292, |
|
"learning_rate": 8.298974226226919e-05, |
|
"loss": 3.2754, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.2192575931549072, |
|
"learning_rate": 8.28642812208345e-05, |
|
"loss": 3.3247, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.0930324792861938, |
|
"learning_rate": 8.273845480570123e-05, |
|
"loss": 3.3496, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.430282711982727, |
|
"learning_rate": 8.26122644157639e-05, |
|
"loss": 3.343, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.1841108798980713, |
|
"learning_rate": 8.248571145396362e-05, |
|
"loss": 3.3798, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.1953662633895874, |
|
"learning_rate": 8.235879732727236e-05, |
|
"loss": 3.3721, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.1492798328399658, |
|
"learning_rate": 8.223152344667745e-05, |
|
"loss": 3.3421, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.1533442735671997, |
|
"learning_rate": 8.21038912271658e-05, |
|
"loss": 3.236, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.3999956846237183, |
|
"learning_rate": 8.197590208770824e-05, |
|
"loss": 3.2426, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.1835312843322754, |
|
"learning_rate": 8.184755745124371e-05, |
|
"loss": 3.3129, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.1276029348373413, |
|
"learning_rate": 8.171885874466342e-05, |
|
"loss": 3.2657, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.065759539604187, |
|
"learning_rate": 8.158980739879507e-05, |
|
"loss": 3.335, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.103005290031433, |
|
"learning_rate": 8.146040484838677e-05, |
|
"loss": 3.2582, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.1258820295333862, |
|
"learning_rate": 8.133065253209132e-05, |
|
"loss": 3.3361, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.1314821243286133, |
|
"learning_rate": 8.120055189245e-05, |
|
"loss": 3.3443, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.3966840505599976, |
|
"learning_rate": 8.10701043758767e-05, |
|
"loss": 3.2131, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.3545833826065063, |
|
"learning_rate": 8.093931143264174e-05, |
|
"loss": 3.279, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.2523497343063354, |
|
"learning_rate": 8.080817451685576e-05, |
|
"loss": 3.3573, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.1441946029663086, |
|
"learning_rate": 8.067669508645356e-05, |
|
"loss": 3.2457, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.1906113624572754, |
|
"learning_rate": 8.054487460317797e-05, |
|
"loss": 3.3027, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.1724116802215576, |
|
"learning_rate": 8.041271453256345e-05, |
|
"loss": 3.3604, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.189816951751709, |
|
"learning_rate": 8.02802163439199e-05, |
|
"loss": 3.2289, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.4529297351837158, |
|
"learning_rate": 8.01473815103163e-05, |
|
"loss": 3.2517, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.2230924367904663, |
|
"learning_rate": 8.001421150856434e-05, |
|
"loss": 3.2214, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.1627131700515747, |
|
"learning_rate": 7.988070781920197e-05, |
|
"loss": 3.2423, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.193337321281433, |
|
"learning_rate": 7.9746871926477e-05, |
|
"loss": 3.2245, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.08041250705719, |
|
"learning_rate": 7.961270531833052e-05, |
|
"loss": 3.1718, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.207674264907837, |
|
"learning_rate": 7.947820948638045e-05, |
|
"loss": 3.3168, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.1085306406021118, |
|
"learning_rate": 7.934338592590486e-05, |
|
"loss": 3.3181, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.2514351606369019, |
|
"learning_rate": 7.92082361358254e-05, |
|
"loss": 3.2197, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.1564998626708984, |
|
"learning_rate": 7.907276161869065e-05, |
|
"loss": 3.1698, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.152014136314392, |
|
"learning_rate": 7.893696388065936e-05, |
|
"loss": 3.3014, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.1435388326644897, |
|
"learning_rate": 7.88008444314838e-05, |
|
"loss": 3.2903, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.0654579401016235, |
|
"learning_rate": 7.866440478449283e-05, |
|
"loss": 3.248, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.0817036628723145, |
|
"learning_rate": 7.852764645657522e-05, |
|
"loss": 3.2552, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.1185789108276367, |
|
"learning_rate": 7.839057096816271e-05, |
|
"loss": 3.3086, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.160009503364563, |
|
"learning_rate": 7.82531798432131e-05, |
|
"loss": 3.3689, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.2025865316390991, |
|
"learning_rate": 7.811547460919333e-05, |
|
"loss": 3.2443, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.1262131929397583, |
|
"learning_rate": 7.797745679706254e-05, |
|
"loss": 3.2118, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.1751744747161865, |
|
"learning_rate": 7.783912794125496e-05, |
|
"loss": 3.319, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.2392690181732178, |
|
"learning_rate": 7.770048957966291e-05, |
|
"loss": 3.2533, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.1627576351165771, |
|
"learning_rate": 7.756154325361967e-05, |
|
"loss": 3.2821, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.2147516012191772, |
|
"learning_rate": 7.74222905078824e-05, |
|
"loss": 3.3135, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.495335340499878, |
|
"learning_rate": 7.728273289061489e-05, |
|
"loss": 3.2368, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.2422459125518799, |
|
"learning_rate": 7.714287195337044e-05, |
|
"loss": 3.1872, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.0939897298812866, |
|
"learning_rate": 7.700270925107448e-05, |
|
"loss": 3.2568, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.2069770097732544, |
|
"learning_rate": 7.686224634200742e-05, |
|
"loss": 3.2425, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.1149344444274902, |
|
"learning_rate": 7.672148478778722e-05, |
|
"loss": 3.2489, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.1385297775268555, |
|
"learning_rate": 7.658042615335212e-05, |
|
"loss": 3.2202, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.2272182703018188, |
|
"learning_rate": 7.643907200694318e-05, |
|
"loss": 3.1268, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.0669175386428833, |
|
"learning_rate": 7.629742392008684e-05, |
|
"loss": 3.201, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.39219069480896, |
|
"learning_rate": 7.615548346757749e-05, |
|
"loss": 3.2481, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.3066028356552124, |
|
"learning_rate": 7.60132522274599e-05, |
|
"loss": 3.2454, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.1821011304855347, |
|
"learning_rate": 7.587073178101178e-05, |
|
"loss": 3.2107, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.1814302206039429, |
|
"learning_rate": 7.572792371272609e-05, |
|
"loss": 3.2949, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.2134374380111694, |
|
"learning_rate": 7.55848296102935e-05, |
|
"loss": 3.2682, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.2237021923065186, |
|
"learning_rate": 7.544145106458465e-05, |
|
"loss": 3.2396, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.1517393589019775, |
|
"learning_rate": 7.529778966963259e-05, |
|
"loss": 3.1926, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.0898007154464722, |
|
"learning_rate": 7.515384702261496e-05, |
|
"loss": 3.1994, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.1451698541641235, |
|
"learning_rate": 7.500962472383627e-05, |
|
"loss": 3.1632, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.080106258392334, |
|
"learning_rate": 7.486512437671011e-05, |
|
"loss": 3.1445, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.1294046640396118, |
|
"learning_rate": 7.472034758774128e-05, |
|
"loss": 3.1905, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.0651402473449707, |
|
"learning_rate": 7.457529596650797e-05, |
|
"loss": 3.2374, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.3194466829299927, |
|
"learning_rate": 7.442997112564392e-05, |
|
"loss": 3.2435, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.1329237222671509, |
|
"learning_rate": 7.428437468082037e-05, |
|
"loss": 3.3095, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.1752973794937134, |
|
"learning_rate": 7.413850825072817e-05, |
|
"loss": 3.1358, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.0169298648834229, |
|
"learning_rate": 7.39923734570598e-05, |
|
"loss": 3.2223, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.0975102186203003, |
|
"learning_rate": 7.384597192449126e-05, |
|
"loss": 3.2105, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.2045339345932007, |
|
"learning_rate": 7.369930528066412e-05, |
|
"loss": 3.2965, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.0657718181610107, |
|
"learning_rate": 7.355237515616732e-05, |
|
"loss": 3.1994, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.1542158126831055, |
|
"learning_rate": 7.340518318451914e-05, |
|
"loss": 3.2854, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.2519826889038086, |
|
"learning_rate": 7.325773100214893e-05, |
|
"loss": 3.2101, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.1538292169570923, |
|
"learning_rate": 7.311002024837899e-05, |
|
"loss": 3.1743, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.3539190292358398, |
|
"learning_rate": 7.296205256540633e-05, |
|
"loss": 3.3418, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.2613801956176758, |
|
"learning_rate": 7.281382959828443e-05, |
|
"loss": 3.2483, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.0935710668563843, |
|
"learning_rate": 7.26653529949049e-05, |
|
"loss": 3.2259, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.0635665655136108, |
|
"learning_rate": 7.25166244059792e-05, |
|
"loss": 3.1394, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.1002237796783447, |
|
"learning_rate": 7.236764548502029e-05, |
|
"loss": 3.2551, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.0740032196044922, |
|
"learning_rate": 7.221841788832421e-05, |
|
"loss": 3.2319, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.1868687868118286, |
|
"learning_rate": 7.206894327495173e-05, |
|
"loss": 3.1983, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.1227601766586304, |
|
"learning_rate": 7.191922330670982e-05, |
|
"loss": 3.2175, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.1705244779586792, |
|
"learning_rate": 7.176925964813326e-05, |
|
"loss": 3.1498, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.4268603324890137, |
|
"learning_rate": 7.161905396646607e-05, |
|
"loss": 3.2766, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.1663274765014648, |
|
"learning_rate": 7.146860793164299e-05, |
|
"loss": 3.2363, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.143417477607727, |
|
"learning_rate": 7.131792321627098e-05, |
|
"loss": 3.2707, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.0310893058776855, |
|
"learning_rate": 7.116700149561048e-05, |
|
"loss": 3.1236, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.0529377460479736, |
|
"learning_rate": 7.101584444755696e-05, |
|
"loss": 3.219, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.1156840324401855, |
|
"learning_rate": 7.086445375262212e-05, |
|
"loss": 3.1371, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.1329755783081055, |
|
"learning_rate": 7.071283109391528e-05, |
|
"loss": 3.2273, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.060715913772583, |
|
"learning_rate": 7.056097815712466e-05, |
|
"loss": 3.1384, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.9990625977516174, |
|
"learning_rate": 7.040889663049862e-05, |
|
"loss": 3.1813, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.1197738647460938, |
|
"learning_rate": 7.025658820482693e-05, |
|
"loss": 3.1422, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.136229157447815, |
|
"learning_rate": 7.010405457342192e-05, |
|
"loss": 3.2805, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.1726741790771484, |
|
"learning_rate": 6.995129743209967e-05, |
|
"loss": 3.1917, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.1462650299072266, |
|
"learning_rate": 6.97983184791612e-05, |
|
"loss": 3.2554, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.136553406715393, |
|
"learning_rate": 6.964511941537355e-05, |
|
"loss": 3.2403, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.0682226419448853, |
|
"learning_rate": 6.949170194395083e-05, |
|
"loss": 3.1274, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.0860931873321533, |
|
"learning_rate": 6.933806777053536e-05, |
|
"loss": 3.2707, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.0465028285980225, |
|
"learning_rate": 6.918421860317872e-05, |
|
"loss": 3.1675, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.100434422492981, |
|
"learning_rate": 6.903015615232263e-05, |
|
"loss": 3.2351, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.0993447303771973, |
|
"learning_rate": 6.887588213078012e-05, |
|
"loss": 3.2404, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.0658077001571655, |
|
"learning_rate": 6.87213982537163e-05, |
|
"loss": 3.2169, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.0856738090515137, |
|
"learning_rate": 6.856670623862943e-05, |
|
"loss": 3.1248, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.1014505624771118, |
|
"learning_rate": 6.841180780533179e-05, |
|
"loss": 3.1161, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.0805455446243286, |
|
"learning_rate": 6.82567046759305e-05, |
|
"loss": 3.1812, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.082909345626831, |
|
"learning_rate": 6.810139857480844e-05, |
|
"loss": 3.1427, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.1378077268600464, |
|
"learning_rate": 6.794589122860509e-05, |
|
"loss": 3.0901, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.0580015182495117, |
|
"learning_rate": 6.779018436619725e-05, |
|
"loss": 3.2417, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.1437370777130127, |
|
"learning_rate": 6.763427971867992e-05, |
|
"loss": 3.1671, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.0612616539001465, |
|
"learning_rate": 6.747817901934699e-05, |
|
"loss": 3.1191, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.3342070579528809, |
|
"learning_rate": 6.732188400367197e-05, |
|
"loss": 3.2476, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.2217915058135986, |
|
"learning_rate": 6.716539640928871e-05, |
|
"loss": 3.2108, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.1861838102340698, |
|
"learning_rate": 6.70087179759721e-05, |
|
"loss": 3.1584, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.0744543075561523, |
|
"learning_rate": 6.685185044561874e-05, |
|
"loss": 3.2015, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.2175796031951904, |
|
"learning_rate": 6.669479556222747e-05, |
|
"loss": 3.1226, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.1044460535049438, |
|
"learning_rate": 6.653755507188013e-05, |
|
"loss": 3.1128, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.1110048294067383, |
|
"learning_rate": 6.638013072272205e-05, |
|
"loss": 3.1404, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.1088255643844604, |
|
"learning_rate": 6.622252426494259e-05, |
|
"loss": 3.1565, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.0379773378372192, |
|
"learning_rate": 6.606473745075581e-05, |
|
"loss": 3.1129, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.0543949604034424, |
|
"learning_rate": 6.590677203438084e-05, |
|
"loss": 3.2115, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.0617347955703735, |
|
"learning_rate": 6.574862977202252e-05, |
|
"loss": 3.1027, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.2704519033432007, |
|
"learning_rate": 6.559031242185174e-05, |
|
"loss": 3.1921, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.120644450187683, |
|
"learning_rate": 6.543182174398597e-05, |
|
"loss": 3.212, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.186946988105774, |
|
"learning_rate": 6.52731595004697e-05, |
|
"loss": 3.164, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.1211094856262207, |
|
"learning_rate": 6.51143274552548e-05, |
|
"loss": 3.1306, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.0689617395401, |
|
"learning_rate": 6.495532737418098e-05, |
|
"loss": 3.1182, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.0167192220687866, |
|
"learning_rate": 6.479616102495605e-05, |
|
"loss": 3.2118, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.0616776943206787, |
|
"learning_rate": 6.463683017713638e-05, |
|
"loss": 3.1227, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.060515284538269, |
|
"learning_rate": 6.447733660210715e-05, |
|
"loss": 3.1194, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.0889480113983154, |
|
"learning_rate": 6.431768207306272e-05, |
|
"loss": 3.0782, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.0803637504577637, |
|
"learning_rate": 6.415786836498684e-05, |
|
"loss": 3.1184, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.064009428024292, |
|
"learning_rate": 6.399789725463298e-05, |
|
"loss": 3.1652, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.0829176902770996, |
|
"learning_rate": 6.383777052050458e-05, |
|
"loss": 3.1046, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.0532939434051514, |
|
"learning_rate": 6.367748994283518e-05, |
|
"loss": 3.0792, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.196413516998291, |
|
"learning_rate": 6.351705730356877e-05, |
|
"loss": 3.1204, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.1028838157653809, |
|
"learning_rate": 6.335647438633987e-05, |
|
"loss": 3.1243, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.0548689365386963, |
|
"learning_rate": 6.319574297645374e-05, |
|
"loss": 3.1802, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.1255388259887695, |
|
"learning_rate": 6.303486486086654e-05, |
|
"loss": 3.1596, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.1859532594680786, |
|
"learning_rate": 6.287384182816546e-05, |
|
"loss": 3.1474, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.064068078994751, |
|
"learning_rate": 6.271267566854883e-05, |
|
"loss": 3.1942, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.0523204803466797, |
|
"learning_rate": 6.255136817380618e-05, |
|
"loss": 3.1562, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.1364482641220093, |
|
"learning_rate": 6.23899211372984e-05, |
|
"loss": 3.0299, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.1925618648529053, |
|
"learning_rate": 6.222833635393772e-05, |
|
"loss": 3.0583, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.0866798162460327, |
|
"learning_rate": 6.206661562016782e-05, |
|
"loss": 3.1356, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.0916551351547241, |
|
"learning_rate": 6.190476073394382e-05, |
|
"loss": 3.045, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.0586055517196655, |
|
"learning_rate": 6.17427734947123e-05, |
|
"loss": 3.1927, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.0540417432785034, |
|
"learning_rate": 6.158065570339127e-05, |
|
"loss": 3.1094, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.0485178232192993, |
|
"learning_rate": 6.141840916235021e-05, |
|
"loss": 3.1503, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.1303540468215942, |
|
"learning_rate": 6.125603567539001e-05, |
|
"loss": 3.1794, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.0090653896331787, |
|
"learning_rate": 6.109353704772284e-05, |
|
"loss": 3.1685, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.0770632028579712, |
|
"learning_rate": 6.0930915085952164e-05, |
|
"loss": 3.0851, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.0763593912124634, |
|
"learning_rate": 6.076817159805267e-05, |
|
"loss": 3.0592, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.077744722366333, |
|
"learning_rate": 6.06053083933501e-05, |
|
"loss": 3.0575, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.0479717254638672, |
|
"learning_rate": 6.044232728250116e-05, |
|
"loss": 3.2182, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.1060945987701416, |
|
"learning_rate": 6.027923007747339e-05, |
|
"loss": 3.1065, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.0798442363739014, |
|
"learning_rate": 6.011601859152506e-05, |
|
"loss": 2.9653, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.166008472442627, |
|
"learning_rate": 5.995269463918495e-05, |
|
"loss": 3.0984, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.1726555824279785, |
|
"learning_rate": 5.97892600362322e-05, |
|
"loss": 3.086, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.0481421947479248, |
|
"learning_rate": 5.962571659967614e-05, |
|
"loss": 3.1516, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.9961311221122742, |
|
"learning_rate": 5.946206614773606e-05, |
|
"loss": 3.1116, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.0177178382873535, |
|
"learning_rate": 5.929831049982103e-05, |
|
"loss": 3.1132, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.084097981452942, |
|
"learning_rate": 5.9134451476509633e-05, |
|
"loss": 3.1533, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.1300958395004272, |
|
"learning_rate": 5.897049089952974e-05, |
|
"loss": 3.069, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.1346057653427124, |
|
"learning_rate": 5.880643059173826e-05, |
|
"loss": 3.1014, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.0111093521118164, |
|
"learning_rate": 5.864227237710093e-05, |
|
"loss": 3.1847, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.1515612602233887, |
|
"learning_rate": 5.847801808067189e-05, |
|
"loss": 3.1194, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.0243968963623047, |
|
"learning_rate": 5.831366952857357e-05, |
|
"loss": 3.0739, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.0244972705841064, |
|
"learning_rate": 5.814922854797622e-05, |
|
"loss": 3.0589, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.1009114980697632, |
|
"learning_rate": 5.798469696707775e-05, |
|
"loss": 3.0715, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.0368943214416504, |
|
"learning_rate": 5.782007661508331e-05, |
|
"loss": 3.0485, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.1286373138427734, |
|
"learning_rate": 5.765536932218495e-05, |
|
"loss": 3.1076, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.0644299983978271, |
|
"learning_rate": 5.7490576919541315e-05, |
|
"loss": 3.0415, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.1599712371826172, |
|
"learning_rate": 5.732570123925729e-05, |
|
"loss": 3.0608, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.2678030729293823, |
|
"learning_rate": 5.7160744114363593e-05, |
|
"loss": 3.1866, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.0837265253067017, |
|
"learning_rate": 5.699570737879641e-05, |
|
"loss": 3.0818, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.1460355520248413, |
|
"learning_rate": 5.683059286737702e-05, |
|
"loss": 3.0558, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.1030579805374146, |
|
"learning_rate": 5.666540241579139e-05, |
|
"loss": 3.15, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.2594730854034424, |
|
"learning_rate": 5.6500137860569766e-05, |
|
"loss": 3.1769, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.2388299703598022, |
|
"learning_rate": 5.633480103906624e-05, |
|
"loss": 3.0187, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.030984878540039, |
|
"learning_rate": 5.616939378943834e-05, |
|
"loss": 3.0997, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.0577046871185303, |
|
"learning_rate": 5.6003917950626595e-05, |
|
"loss": 3.1403, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.1050175428390503, |
|
"learning_rate": 5.583837536233407e-05, |
|
"loss": 3.0793, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.063862681388855, |
|
"learning_rate": 5.567276786500596e-05, |
|
"loss": 3.0294, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.0375818014144897, |
|
"learning_rate": 5.5507097299809054e-05, |
|
"loss": 3.1218, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.9979878664016724, |
|
"learning_rate": 5.534136550861133e-05, |
|
"loss": 3.1139, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.0724773406982422, |
|
"learning_rate": 5.5175574333961465e-05, |
|
"loss": 3.0563, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.01706063747406, |
|
"learning_rate": 5.500972561906832e-05, |
|
"loss": 3.1836, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.0690789222717285, |
|
"learning_rate": 5.484382120778048e-05, |
|
"loss": 3.1078, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.1447081565856934, |
|
"learning_rate": 5.467786294456575e-05, |
|
"loss": 3.0429, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.0642063617706299, |
|
"learning_rate": 5.451185267449061e-05, |
|
"loss": 3.0135, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.0696765184402466, |
|
"learning_rate": 5.43457922431998e-05, |
|
"loss": 3.1465, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.0368304252624512, |
|
"learning_rate": 5.417968349689566e-05, |
|
"loss": 2.9582, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.098090410232544, |
|
"learning_rate": 5.401352828231772e-05, |
|
"loss": 3.0758, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.0280003547668457, |
|
"learning_rate": 5.384732844672211e-05, |
|
"loss": 3.0935, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.0697113275527954, |
|
"learning_rate": 5.368108583786107e-05, |
|
"loss": 3.0966, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.1267234086990356, |
|
"learning_rate": 5.3514802303962344e-05, |
|
"loss": 3.0999, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.1041789054870605, |
|
"learning_rate": 5.334847969370868e-05, |
|
"loss": 3.0606, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.1038250923156738, |
|
"learning_rate": 5.3182119856217284e-05, |
|
"loss": 3.1977, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.0840222835540771, |
|
"learning_rate": 5.3015724641019214e-05, |
|
"loss": 3.0736, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.0754737854003906, |
|
"learning_rate": 5.284929589803884e-05, |
|
"loss": 3.115, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.0331034660339355, |
|
"learning_rate": 5.2682835477573336e-05, |
|
"loss": 3.0537, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.9996258616447449, |
|
"learning_rate": 5.2516345230271965e-05, |
|
"loss": 3.0924, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.054716944694519, |
|
"learning_rate": 5.234982700711569e-05, |
|
"loss": 3.0757, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.0223090648651123, |
|
"learning_rate": 5.218328265939643e-05, |
|
"loss": 3.066, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.2190570831298828, |
|
"learning_rate": 5.201671403869657e-05, |
|
"loss": 3.0556, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.1981064081192017, |
|
"learning_rate": 5.1850122996868366e-05, |
|
"loss": 3.087, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.1506836414337158, |
|
"learning_rate": 5.168351138601334e-05, |
|
"loss": 3.0792, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.1408671140670776, |
|
"learning_rate": 5.1516881058461675e-05, |
|
"loss": 3.151, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.0261212587356567, |
|
"learning_rate": 5.135023386675166e-05, |
|
"loss": 3.0216, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.0941778421401978, |
|
"learning_rate": 5.118357166360906e-05, |
|
"loss": 3.0108, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.1151127815246582, |
|
"learning_rate": 5.101689630192655e-05, |
|
"loss": 3.13, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.0719926357269287, |
|
"learning_rate": 5.085020963474307e-05, |
|
"loss": 2.9846, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.1672226190567017, |
|
"learning_rate": 5.068351351522329e-05, |
|
"loss": 3.1311, |
|
"step": 2435 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.039842128753662, |
|
"learning_rate": 5.0516809796636935e-05, |
|
"loss": 3.1064, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.050147533416748, |
|
"learning_rate": 5.035010033233821e-05, |
|
"loss": 3.0082, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.9957916736602783, |
|
"learning_rate": 5.018338697574523e-05, |
|
"loss": 3.0923, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.055060625076294, |
|
"learning_rate": 5.0016671580319354e-05, |
|
"loss": 2.9972, |
|
"step": 2455 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.0098164081573486, |
|
"learning_rate": 4.984995599954461e-05, |
|
"loss": 3.1395, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.0814324617385864, |
|
"learning_rate": 4.968324208690712e-05, |
|
"loss": 3.0554, |
|
"step": 2465 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.0801881551742554, |
|
"learning_rate": 4.951653169587441e-05, |
|
"loss": 3.0265, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.0839911699295044, |
|
"learning_rate": 4.93498266798749e-05, |
|
"loss": 3.1256, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.0653108358383179, |
|
"learning_rate": 4.918312889227722e-05, |
|
"loss": 3.1095, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.0367735624313354, |
|
"learning_rate": 4.901644018636966e-05, |
|
"loss": 3.0546, |
|
"step": 2485 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.0559877157211304, |
|
"learning_rate": 4.8849762415339526e-05, |
|
"loss": 2.9952, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.0303627252578735, |
|
"learning_rate": 4.868309743225256e-05, |
|
"loss": 3.0636, |
|
"step": 2495 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.1090822219848633, |
|
"learning_rate": 4.851644709003233e-05, |
|
"loss": 3.0876, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.0675830841064453, |
|
"learning_rate": 4.834981324143964e-05, |
|
"loss": 3.0573, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.0036590099334717, |
|
"learning_rate": 4.818319773905191e-05, |
|
"loss": 3.0297, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.041754126548767, |
|
"learning_rate": 4.801660243524261e-05, |
|
"loss": 2.9507, |
|
"step": 2515 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.0789331197738647, |
|
"learning_rate": 4.7850029182160626e-05, |
|
"loss": 3.0633, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.015762209892273, |
|
"learning_rate": 4.768347983170973e-05, |
|
"loss": 3.0388, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.0645495653152466, |
|
"learning_rate": 4.7516956235527884e-05, |
|
"loss": 3.1631, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.0723949670791626, |
|
"learning_rate": 4.735046024496682e-05, |
|
"loss": 2.9642, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.036794900894165, |
|
"learning_rate": 4.7183993711071286e-05, |
|
"loss": 3.0467, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.1749889850616455, |
|
"learning_rate": 4.7017558484558554e-05, |
|
"loss": 3.0597, |
|
"step": 2545 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.0650322437286377, |
|
"learning_rate": 4.6851156415797844e-05, |
|
"loss": 3.0598, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.0720289945602417, |
|
"learning_rate": 4.6684789354789746e-05, |
|
"loss": 2.9977, |
|
"step": 2555 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.0925712585449219, |
|
"learning_rate": 4.651845915114563e-05, |
|
"loss": 3.1103, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.063539981842041, |
|
"learning_rate": 4.6352167654067095e-05, |
|
"loss": 3.0225, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.0431323051452637, |
|
"learning_rate": 4.618591671232544e-05, |
|
"loss": 3.0463, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.0726792812347412, |
|
"learning_rate": 4.601970817424106e-05, |
|
"loss": 3.0796, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.1471610069274902, |
|
"learning_rate": 4.585354388766292e-05, |
|
"loss": 3.0079, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.1070219278335571, |
|
"learning_rate": 4.568742569994802e-05, |
|
"loss": 3.1182, |
|
"step": 2585 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.158220648765564, |
|
"learning_rate": 4.552135545794086e-05, |
|
"loss": 3.0372, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.0492193698883057, |
|
"learning_rate": 4.535533500795288e-05, |
|
"loss": 3.0207, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.2903916835784912, |
|
"learning_rate": 4.5189366195741953e-05, |
|
"loss": 3.0064, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.0783137083053589, |
|
"learning_rate": 4.502345086649186e-05, |
|
"loss": 3.0026, |
|
"step": 2605 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.0791529417037964, |
|
"learning_rate": 4.485759086479179e-05, |
|
"loss": 3.033, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.0538674592971802, |
|
"learning_rate": 4.469178803461579e-05, |
|
"loss": 3.0676, |
|
"step": 2615 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.1873081922531128, |
|
"learning_rate": 4.4526044219302326e-05, |
|
"loss": 2.9884, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.1096831560134888, |
|
"learning_rate": 4.4360361261533745e-05, |
|
"loss": 3.0778, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.0487239360809326, |
|
"learning_rate": 4.419474100331579e-05, |
|
"loss": 3.0849, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.0631368160247803, |
|
"learning_rate": 4.402918528595715e-05, |
|
"loss": 3.0974, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.0593897104263306, |
|
"learning_rate": 4.386369595004896e-05, |
|
"loss": 3.0487, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.9948830604553223, |
|
"learning_rate": 4.3698274835444354e-05, |
|
"loss": 3.0793, |
|
"step": 2645 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.033443570137024, |
|
"learning_rate": 4.3532923781238e-05, |
|
"loss": 3.101, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.073686957359314, |
|
"learning_rate": 4.336764462574566e-05, |
|
"loss": 3.082, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.032832145690918, |
|
"learning_rate": 4.320243920648376e-05, |
|
"loss": 3.006, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.1529659032821655, |
|
"learning_rate": 4.303730936014894e-05, |
|
"loss": 2.9774, |
|
"step": 2665 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.0000460147857666, |
|
"learning_rate": 4.287225692259765e-05, |
|
"loss": 2.9237, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.9973639249801636, |
|
"learning_rate": 4.270728372882575e-05, |
|
"loss": 3.0115, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.05452299118042, |
|
"learning_rate": 4.254239161294804e-05, |
|
"loss": 3.0474, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.0591851472854614, |
|
"learning_rate": 4.237758240817802e-05, |
|
"loss": 3.0432, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.0358829498291016, |
|
"learning_rate": 4.2212857946807336e-05, |
|
"loss": 3.0401, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.175520658493042, |
|
"learning_rate": 4.2048220060185516e-05, |
|
"loss": 3.0245, |
|
"step": 2695 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.0333977937698364, |
|
"learning_rate": 4.188367057869957e-05, |
|
"loss": 2.8782, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.07869553565979, |
|
"learning_rate": 4.171921133175365e-05, |
|
"loss": 3.0724, |
|
"step": 2705 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.0302010774612427, |
|
"learning_rate": 4.155484414774872e-05, |
|
"loss": 2.9962, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.0263460874557495, |
|
"learning_rate": 4.139057085406221e-05, |
|
"loss": 3.0747, |
|
"step": 2715 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.1280380487442017, |
|
"learning_rate": 4.1226393277027726e-05, |
|
"loss": 3.0475, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.0348632335662842, |
|
"learning_rate": 4.106231324191471e-05, |
|
"loss": 3.0591, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.0165739059448242, |
|
"learning_rate": 4.089833257290817e-05, |
|
"loss": 3.1233, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.0293173789978027, |
|
"learning_rate": 4.073445309308842e-05, |
|
"loss": 3.0282, |
|
"step": 2735 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.0683913230895996, |
|
"learning_rate": 4.0570676624410756e-05, |
|
"loss": 3.0318, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.0587563514709473, |
|
"learning_rate": 4.040700498768525e-05, |
|
"loss": 3.0647, |
|
"step": 2745 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.0239886045455933, |
|
"learning_rate": 4.024344000255648e-05, |
|
"loss": 2.9992, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.0538274049758911, |
|
"learning_rate": 4.0079983487483313e-05, |
|
"loss": 3.0528, |
|
"step": 2755 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.0498000383377075, |
|
"learning_rate": 3.9916637259718683e-05, |
|
"loss": 3.105, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.211574912071228, |
|
"learning_rate": 3.9753403135289396e-05, |
|
"loss": 2.9811, |
|
"step": 2765 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.0358808040618896, |
|
"learning_rate": 3.9590282928975914e-05, |
|
"loss": 2.9964, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.0570118427276611, |
|
"learning_rate": 3.942727845429221e-05, |
|
"loss": 2.906, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.2332627773284912, |
|
"learning_rate": 3.926439152346558e-05, |
|
"loss": 3.022, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.1530132293701172, |
|
"learning_rate": 3.910162394741653e-05, |
|
"loss": 2.9988, |
|
"step": 2785 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.0132982730865479, |
|
"learning_rate": 3.893897753573861e-05, |
|
"loss": 2.9654, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.0742069482803345, |
|
"learning_rate": 3.877645409667829e-05, |
|
"loss": 3.0135, |
|
"step": 2795 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.0182029008865356, |
|
"learning_rate": 3.861405543711491e-05, |
|
"loss": 2.9888, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.0430015325546265, |
|
"learning_rate": 3.8451783362540507e-05, |
|
"loss": 3.0477, |
|
"step": 2805 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.1208815574645996, |
|
"learning_rate": 3.828963967703983e-05, |
|
"loss": 3.0401, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.1161303520202637, |
|
"learning_rate": 3.8127626183270223e-05, |
|
"loss": 2.9986, |
|
"step": 2815 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.0346485376358032, |
|
"learning_rate": 3.796574468244161e-05, |
|
"loss": 3.0071, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.0584827661514282, |
|
"learning_rate": 3.7803996974296444e-05, |
|
"loss": 3.0357, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.0474509000778198, |
|
"learning_rate": 3.7642384857089776e-05, |
|
"loss": 2.9715, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.0421720743179321, |
|
"learning_rate": 3.748091012756915e-05, |
|
"loss": 2.9982, |
|
"step": 2835 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.0171830654144287, |
|
"learning_rate": 3.731957458095467e-05, |
|
"loss": 3.0238, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.1705058813095093, |
|
"learning_rate": 3.71583800109191e-05, |
|
"loss": 2.9714, |
|
"step": 2845 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.02668297290802, |
|
"learning_rate": 3.699732820956784e-05, |
|
"loss": 3.0152, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.1950023174285889, |
|
"learning_rate": 3.6836420967419057e-05, |
|
"loss": 2.958, |
|
"step": 2855 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.0784318447113037, |
|
"learning_rate": 3.6675660073383745e-05, |
|
"loss": 2.9889, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.0552057027816772, |
|
"learning_rate": 3.6515047314745856e-05, |
|
"loss": 2.9629, |
|
"step": 2865 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.1507591009140015, |
|
"learning_rate": 3.6354584477142437e-05, |
|
"loss": 3.0797, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.0158036947250366, |
|
"learning_rate": 3.6194273344543736e-05, |
|
"loss": 3.0202, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.9868055582046509, |
|
"learning_rate": 3.6034115699233425e-05, |
|
"loss": 2.9834, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.0302212238311768, |
|
"learning_rate": 3.5874113321788736e-05, |
|
"loss": 3.047, |
|
"step": 2885 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.0958445072174072, |
|
"learning_rate": 3.571426799106071e-05, |
|
"loss": 2.9674, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.9823729395866394, |
|
"learning_rate": 3.555458148415437e-05, |
|
"loss": 3.0503, |
|
"step": 2895 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.0195577144622803, |
|
"learning_rate": 3.539505557640901e-05, |
|
"loss": 2.9481, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.0259385108947754, |
|
"learning_rate": 3.523569204137843e-05, |
|
"loss": 2.9699, |
|
"step": 2905 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.059617519378662, |
|
"learning_rate": 3.5076492650811246e-05, |
|
"loss": 2.9348, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.0996873378753662, |
|
"learning_rate": 3.491745917463113e-05, |
|
"loss": 3.0041, |
|
"step": 2915 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.0191603899002075, |
|
"learning_rate": 3.475859338091721e-05, |
|
"loss": 2.9241, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.084431529045105, |
|
"learning_rate": 3.4599897035884374e-05, |
|
"loss": 3.1061, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.0117292404174805, |
|
"learning_rate": 3.444137190386363e-05, |
|
"loss": 3.0229, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.034151315689087, |
|
"learning_rate": 3.4283019747282514e-05, |
|
"loss": 2.9843, |
|
"step": 2935 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.1034634113311768, |
|
"learning_rate": 3.412484232664545e-05, |
|
"loss": 3.0178, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.256437063217163, |
|
"learning_rate": 3.396684140051424e-05, |
|
"loss": 3.0308, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.1831390857696533, |
|
"learning_rate": 3.3809018725488466e-05, |
|
"loss": 3.0132, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.0726600885391235, |
|
"learning_rate": 3.365137605618598e-05, |
|
"loss": 2.9744, |
|
"step": 2955 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.0197064876556396, |
|
"learning_rate": 3.3493915145223395e-05, |
|
"loss": 2.9329, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.0443058013916016, |
|
"learning_rate": 3.3336637743196584e-05, |
|
"loss": 3.0242, |
|
"step": 2965 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.0014925003051758, |
|
"learning_rate": 3.317954559866126e-05, |
|
"loss": 2.9349, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.0623462200164795, |
|
"learning_rate": 3.302264045811344e-05, |
|
"loss": 2.9755, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.0433576107025146, |
|
"learning_rate": 3.286592406597021e-05, |
|
"loss": 2.9424, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.0413775444030762, |
|
"learning_rate": 3.270939816455012e-05, |
|
"loss": 2.988, |
|
"step": 2985 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.0432535409927368, |
|
"learning_rate": 3.255306449405395e-05, |
|
"loss": 2.9819, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.1115838289260864, |
|
"learning_rate": 3.2396924792545304e-05, |
|
"loss": 2.9621, |
|
"step": 2995 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.0652596950531006, |
|
"learning_rate": 3.224098079593132e-05, |
|
"loss": 2.9722, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.0429493188858032, |
|
"learning_rate": 3.2085234237943354e-05, |
|
"loss": 2.9245, |
|
"step": 3005 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.1402490139007568, |
|
"learning_rate": 3.19296868501177e-05, |
|
"loss": 2.9569, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.0204291343688965, |
|
"learning_rate": 3.177434036177636e-05, |
|
"loss": 3.0853, |
|
"step": 3015 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.1746922731399536, |
|
"learning_rate": 3.1619196500007804e-05, |
|
"loss": 2.9717, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.0246679782867432, |
|
"learning_rate": 3.146425698964776e-05, |
|
"loss": 2.994, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.0718449354171753, |
|
"learning_rate": 3.1309523553260046e-05, |
|
"loss": 3.082, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.0518536567687988, |
|
"learning_rate": 3.115499791111743e-05, |
|
"loss": 3.0246, |
|
"step": 3035 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.0678455829620361, |
|
"learning_rate": 3.10006817811825e-05, |
|
"loss": 2.8946, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.118604302406311, |
|
"learning_rate": 3.084657687908855e-05, |
|
"loss": 3.0081, |
|
"step": 3045 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.168013334274292, |
|
"learning_rate": 3.069268491812052e-05, |
|
"loss": 2.9994, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.0308539867401123, |
|
"learning_rate": 3.0539007609195934e-05, |
|
"loss": 3.0219, |
|
"step": 3055 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.0488004684448242, |
|
"learning_rate": 3.0385546660845908e-05, |
|
"loss": 2.9881, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.0688416957855225, |
|
"learning_rate": 3.0232303779196132e-05, |
|
"loss": 3.0163, |
|
"step": 3065 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.0394408702850342, |
|
"learning_rate": 3.0079280667947885e-05, |
|
"loss": 2.986, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.0241698026657104, |
|
"learning_rate": 2.9926479028359132e-05, |
|
"loss": 2.906, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.0117335319519043, |
|
"learning_rate": 2.97739005592256e-05, |
|
"loss": 3.0282, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.05054771900177, |
|
"learning_rate": 2.962154695686187e-05, |
|
"loss": 2.9591, |
|
"step": 3085 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.0562255382537842, |
|
"learning_rate": 2.9469419915082536e-05, |
|
"loss": 3.0694, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.039699673652649, |
|
"learning_rate": 2.9317521125183368e-05, |
|
"loss": 2.9505, |
|
"step": 3095 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.013299822807312, |
|
"learning_rate": 2.9165852275922524e-05, |
|
"loss": 2.946, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.0763413906097412, |
|
"learning_rate": 2.901441505350174e-05, |
|
"loss": 2.9782, |
|
"step": 3105 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.0640761852264404, |
|
"learning_rate": 2.886321114154762e-05, |
|
"loss": 2.9114, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.0359145402908325, |
|
"learning_rate": 2.87122422210929e-05, |
|
"loss": 2.9279, |
|
"step": 3115 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.0430670976638794, |
|
"learning_rate": 2.8561509970557736e-05, |
|
"loss": 2.9701, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.0799660682678223, |
|
"learning_rate": 2.8411016065731146e-05, |
|
"loss": 2.989, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.0684157609939575, |
|
"learning_rate": 2.826076217975222e-05, |
|
"loss": 2.9469, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.1316529512405396, |
|
"learning_rate": 2.8110749983091632e-05, |
|
"loss": 2.9968, |
|
"step": 3135 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.9836276173591614, |
|
"learning_rate": 2.7960981143533053e-05, |
|
"loss": 3.0318, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.0165281295776367, |
|
"learning_rate": 2.781145732615457e-05, |
|
"loss": 2.9819, |
|
"step": 3145 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.0563925504684448, |
|
"learning_rate": 2.7662180193310218e-05, |
|
"loss": 2.9495, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.9946759343147278, |
|
"learning_rate": 2.751315140461145e-05, |
|
"loss": 2.9372, |
|
"step": 3155 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.0857210159301758, |
|
"learning_rate": 2.7364372616908744e-05, |
|
"loss": 2.9728, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.0404404401779175, |
|
"learning_rate": 2.7215845484273152e-05, |
|
"loss": 2.9315, |
|
"step": 3165 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.0203680992126465, |
|
"learning_rate": 2.7067571657977893e-05, |
|
"loss": 2.9986, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.0924718379974365, |
|
"learning_rate": 2.691955278648003e-05, |
|
"loss": 2.9762, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.0304733514785767, |
|
"learning_rate": 2.6771790515402112e-05, |
|
"loss": 3.059, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.2347314357757568, |
|
"learning_rate": 2.6624286487513916e-05, |
|
"loss": 2.9933, |
|
"step": 3185 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.1086734533309937, |
|
"learning_rate": 2.6477042342714137e-05, |
|
"loss": 3.002, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.0901292562484741, |
|
"learning_rate": 2.633005971801219e-05, |
|
"loss": 3.0097, |
|
"step": 3195 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.1064410209655762, |
|
"learning_rate": 2.6183340247510013e-05, |
|
"loss": 2.9945, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.0292625427246094, |
|
"learning_rate": 2.6036885562383856e-05, |
|
"loss": 3.0133, |
|
"step": 3205 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.1139415502548218, |
|
"learning_rate": 2.5890697290866206e-05, |
|
"loss": 2.9208, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.0623078346252441, |
|
"learning_rate": 2.5744777058227642e-05, |
|
"loss": 2.9424, |
|
"step": 3215 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.0108777284622192, |
|
"learning_rate": 2.5599126486758777e-05, |
|
"loss": 2.9031, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.0001078844070435, |
|
"learning_rate": 2.5453747195752243e-05, |
|
"loss": 2.8608, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.1540250778198242, |
|
"learning_rate": 2.530864080148464e-05, |
|
"loss": 2.9748, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.124299168586731, |
|
"learning_rate": 2.5163808917198615e-05, |
|
"loss": 2.9228, |
|
"step": 3235 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.0335966348648071, |
|
"learning_rate": 2.501925315308492e-05, |
|
"loss": 2.9867, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.0575364828109741, |
|
"learning_rate": 2.4874975116264477e-05, |
|
"loss": 2.972, |
|
"step": 3245 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.044665813446045, |
|
"learning_rate": 2.4730976410770534e-05, |
|
"loss": 2.979, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.0527253150939941, |
|
"learning_rate": 2.458725863753084e-05, |
|
"loss": 2.9601, |
|
"step": 3255 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.0238512754440308, |
|
"learning_rate": 2.4443823394349834e-05, |
|
"loss": 2.8508, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.101250410079956, |
|
"learning_rate": 2.430067227589088e-05, |
|
"loss": 2.9144, |
|
"step": 3265 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.9972196817398071, |
|
"learning_rate": 2.4157806873658517e-05, |
|
"loss": 2.9406, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.162981629371643, |
|
"learning_rate": 2.401522877598087e-05, |
|
"loss": 2.9642, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.107654094696045, |
|
"learning_rate": 2.3872939567991827e-05, |
|
"loss": 2.9408, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.0248068571090698, |
|
"learning_rate": 2.373094083161353e-05, |
|
"loss": 2.8816, |
|
"step": 3285 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.029221534729004, |
|
"learning_rate": 2.358923414553877e-05, |
|
"loss": 2.9401, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.0091698169708252, |
|
"learning_rate": 2.3447821085213405e-05, |
|
"loss": 2.8775, |
|
"step": 3295 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.0124790668487549, |
|
"learning_rate": 2.3306703222818878e-05, |
|
"loss": 2.9208, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.0469365119934082, |
|
"learning_rate": 2.3165882127254705e-05, |
|
"loss": 2.9814, |
|
"step": 3305 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.2536821365356445, |
|
"learning_rate": 2.302535936412108e-05, |
|
"loss": 2.9304, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.0279680490493774, |
|
"learning_rate": 2.2885136495701415e-05, |
|
"loss": 2.8435, |
|
"step": 3315 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.0763942003250122, |
|
"learning_rate": 2.274521508094501e-05, |
|
"loss": 2.9261, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.030404806137085, |
|
"learning_rate": 2.2605596675449698e-05, |
|
"loss": 3.0172, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.0439190864562988, |
|
"learning_rate": 2.246628283144457e-05, |
|
"loss": 2.8992, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.0290809869766235, |
|
"learning_rate": 2.232727509777269e-05, |
|
"loss": 3.0806, |
|
"step": 3335 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.058119773864746, |
|
"learning_rate": 2.2188575019873932e-05, |
|
"loss": 2.9167, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.0898820161819458, |
|
"learning_rate": 2.2050184139767704e-05, |
|
"loss": 2.8946, |
|
"step": 3345 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.0208537578582764, |
|
"learning_rate": 2.191210399603591e-05, |
|
"loss": 2.931, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.017960786819458, |
|
"learning_rate": 2.1774336123805772e-05, |
|
"loss": 2.9429, |
|
"step": 3355 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.1384835243225098, |
|
"learning_rate": 2.1636882054732776e-05, |
|
"loss": 2.9701, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.1678301095962524, |
|
"learning_rate": 2.1499743316983684e-05, |
|
"loss": 2.9414, |
|
"step": 3365 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.090770959854126, |
|
"learning_rate": 2.1362921435219473e-05, |
|
"loss": 2.9129, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.0095657110214233, |
|
"learning_rate": 2.1226417930578464e-05, |
|
"loss": 2.8908, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.0220203399658203, |
|
"learning_rate": 2.109023432065935e-05, |
|
"loss": 2.9649, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.0544750690460205, |
|
"learning_rate": 2.095437211950434e-05, |
|
"loss": 2.9183, |
|
"step": 3385 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.0294827222824097, |
|
"learning_rate": 2.0818832837582352e-05, |
|
"loss": 2.9431, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.0410583019256592, |
|
"learning_rate": 2.068361798177218e-05, |
|
"loss": 2.9157, |
|
"step": 3395 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.1043765544891357, |
|
"learning_rate": 2.0548729055345778e-05, |
|
"loss": 2.9759, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.0321478843688965, |
|
"learning_rate": 2.0414167557951514e-05, |
|
"loss": 2.8631, |
|
"step": 3405 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.0459082126617432, |
|
"learning_rate": 2.0279934985597527e-05, |
|
"loss": 3.0045, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.0183168649673462, |
|
"learning_rate": 2.0146032830635054e-05, |
|
"loss": 2.9016, |
|
"step": 3415 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.006795883178711, |
|
"learning_rate": 2.001246258174192e-05, |
|
"loss": 2.9133, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.0646164417266846, |
|
"learning_rate": 1.9879225723905886e-05, |
|
"loss": 2.9729, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.0548053979873657, |
|
"learning_rate": 1.9746323738408203e-05, |
|
"loss": 2.9754, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.073244571685791, |
|
"learning_rate": 1.9613758102807117e-05, |
|
"loss": 2.9281, |
|
"step": 3435 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.0305603742599487, |
|
"learning_rate": 1.9481530290921474e-05, |
|
"loss": 3.0633, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.0235049724578857, |
|
"learning_rate": 1.934964177281428e-05, |
|
"loss": 2.9378, |
|
"step": 3445 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.0278953313827515, |
|
"learning_rate": 1.9218094014776434e-05, |
|
"loss": 2.9178, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.0318273305892944, |
|
"learning_rate": 1.9086888479310333e-05, |
|
"loss": 3.0008, |
|
"step": 3455 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.027692198753357, |
|
"learning_rate": 1.895602662511371e-05, |
|
"loss": 2.9887, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.0255205631256104, |
|
"learning_rate": 1.8825509907063327e-05, |
|
"loss": 2.9067, |
|
"step": 3465 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.032726526260376, |
|
"learning_rate": 1.8695339776198872e-05, |
|
"loss": 2.8492, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.0367424488067627, |
|
"learning_rate": 1.8565517679706783e-05, |
|
"loss": 2.9587, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.0441900491714478, |
|
"learning_rate": 1.8436045060904174e-05, |
|
"loss": 2.9499, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.090521216392517, |
|
"learning_rate": 1.830692335922279e-05, |
|
"loss": 2.9482, |
|
"step": 3485 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.057732105255127, |
|
"learning_rate": 1.8178154010192994e-05, |
|
"loss": 2.9731, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.021804928779602, |
|
"learning_rate": 1.8049738445427822e-05, |
|
"loss": 2.978, |
|
"step": 3495 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.0467053651809692, |
|
"learning_rate": 1.7921678092607052e-05, |
|
"loss": 2.9755, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.0179669857025146, |
|
"learning_rate": 1.7793974375461352e-05, |
|
"loss": 2.938, |
|
"step": 3505 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.0066503286361694, |
|
"learning_rate": 1.7666628713756417e-05, |
|
"loss": 2.8577, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.0826014280319214, |
|
"learning_rate": 1.7539642523277228e-05, |
|
"loss": 2.9725, |
|
"step": 3515 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.091303825378418, |
|
"learning_rate": 1.7413017215812273e-05, |
|
"loss": 2.9723, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.063717246055603, |
|
"learning_rate": 1.728675419913788e-05, |
|
"loss": 2.9279, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.069888949394226, |
|
"learning_rate": 1.716085487700253e-05, |
|
"loss": 2.9628, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.1069707870483398, |
|
"learning_rate": 1.703532064911131e-05, |
|
"loss": 2.9247, |
|
"step": 3535 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.064904808998108, |
|
"learning_rate": 1.6910152911110283e-05, |
|
"loss": 2.9383, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.0969486236572266, |
|
"learning_rate": 1.6785353054571024e-05, |
|
"loss": 2.9503, |
|
"step": 3545 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.0619415044784546, |
|
"learning_rate": 1.666092246697512e-05, |
|
"loss": 2.8936, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.1957061290740967, |
|
"learning_rate": 1.6536862531698766e-05, |
|
"loss": 2.9335, |
|
"step": 3555 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.1700977087020874, |
|
"learning_rate": 1.6413174627997328e-05, |
|
"loss": 3.0092, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.0224504470825195, |
|
"learning_rate": 1.6289860130990147e-05, |
|
"loss": 2.9349, |
|
"step": 3565 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.0252118110656738, |
|
"learning_rate": 1.6166920411645064e-05, |
|
"loss": 2.9296, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.0665481090545654, |
|
"learning_rate": 1.6044356836763315e-05, |
|
"loss": 2.9078, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.0700303316116333, |
|
"learning_rate": 1.5922170768964285e-05, |
|
"loss": 2.9363, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.104561448097229, |
|
"learning_rate": 1.5800363566670362e-05, |
|
"loss": 3.0238, |
|
"step": 3585 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.0166821479797363, |
|
"learning_rate": 1.5678936584091852e-05, |
|
"loss": 2.8869, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.0269566774368286, |
|
"learning_rate": 1.5557891171211892e-05, |
|
"loss": 3.0122, |
|
"step": 3595 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.0524886846542358, |
|
"learning_rate": 1.5437228673771465e-05, |
|
"loss": 2.9004, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.018042802810669, |
|
"learning_rate": 1.5316950433254445e-05, |
|
"loss": 2.9853, |
|
"step": 3605 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.0478148460388184, |
|
"learning_rate": 1.5197057786872649e-05, |
|
"loss": 2.8939, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.0328203439712524, |
|
"learning_rate": 1.5077552067551015e-05, |
|
"loss": 2.9921, |
|
"step": 3615 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.0347979068756104, |
|
"learning_rate": 1.4958434603912747e-05, |
|
"loss": 2.9324, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.0036380290985107, |
|
"learning_rate": 1.4839706720264546e-05, |
|
"loss": 2.9383, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.990047037601471, |
|
"learning_rate": 1.4721369736581924e-05, |
|
"loss": 2.9131, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.0028934478759766, |
|
"learning_rate": 1.4603424968494484e-05, |
|
"loss": 2.9047, |
|
"step": 3635 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.0312615633010864, |
|
"learning_rate": 1.448587372727132e-05, |
|
"loss": 2.8849, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.0701656341552734, |
|
"learning_rate": 1.4368717319806419e-05, |
|
"loss": 2.9952, |
|
"step": 3645 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.0108087062835693, |
|
"learning_rate": 1.4251957048604152e-05, |
|
"loss": 2.8067, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.0179967880249023, |
|
"learning_rate": 1.413559421176479e-05, |
|
"loss": 2.9415, |
|
"step": 3655 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.0271110534667969, |
|
"learning_rate": 1.4019630102970056e-05, |
|
"loss": 2.9605, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.0323963165283203, |
|
"learning_rate": 1.3904066011468753e-05, |
|
"loss": 2.9647, |
|
"step": 3665 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.0108588933944702, |
|
"learning_rate": 1.3788903222062433e-05, |
|
"loss": 2.8892, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.0895428657531738, |
|
"learning_rate": 1.3674143015091118e-05, |
|
"loss": 2.9541, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.055143117904663, |
|
"learning_rate": 1.355978666641905e-05, |
|
"loss": 2.967, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.0495216846466064, |
|
"learning_rate": 1.3445835447420507e-05, |
|
"loss": 2.9434, |
|
"step": 3685 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.1741347312927246, |
|
"learning_rate": 1.3332290624965688e-05, |
|
"loss": 2.8592, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.0490764379501343, |
|
"learning_rate": 1.3219153461406609e-05, |
|
"loss": 2.8898, |
|
"step": 3695 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.0156313180923462, |
|
"learning_rate": 1.3106425214563078e-05, |
|
"loss": 2.8871, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.0351041555404663, |
|
"learning_rate": 1.2994107137708716e-05, |
|
"loss": 2.9649, |
|
"step": 3705 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.9821464419364929, |
|
"learning_rate": 1.2882200479556988e-05, |
|
"loss": 2.7567, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.0293828248977661, |
|
"learning_rate": 1.2770706484247397e-05, |
|
"loss": 3.044, |
|
"step": 3715 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.0418314933776855, |
|
"learning_rate": 1.2659626391331564e-05, |
|
"loss": 2.9845, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.1662086248397827, |
|
"learning_rate": 1.2548961435759493e-05, |
|
"loss": 2.9251, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.004215955734253, |
|
"learning_rate": 1.2438712847865846e-05, |
|
"loss": 2.7973, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.051112174987793, |
|
"learning_rate": 1.2328881853356244e-05, |
|
"loss": 2.879, |
|
"step": 3735 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.0411102771759033, |
|
"learning_rate": 1.221946967329365e-05, |
|
"loss": 2.9298, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.110374927520752, |
|
"learning_rate": 1.2110477524084796e-05, |
|
"loss": 2.8414, |
|
"step": 3745 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.0748777389526367, |
|
"learning_rate": 1.2001906617466657e-05, |
|
"loss": 2.9641, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.0381336212158203, |
|
"learning_rate": 1.1893758160492978e-05, |
|
"loss": 2.9166, |
|
"step": 3755 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.0083729028701782, |
|
"learning_rate": 1.1786033355520859e-05, |
|
"loss": 2.7783, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.0652658939361572, |
|
"learning_rate": 1.1678733400197373e-05, |
|
"loss": 2.9004, |
|
"step": 3765 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.041797161102295, |
|
"learning_rate": 1.1571859487446263e-05, |
|
"loss": 2.955, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.0103260278701782, |
|
"learning_rate": 1.1465412805454695e-05, |
|
"loss": 2.9241, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.046828031539917, |
|
"learning_rate": 1.1359394537660011e-05, |
|
"loss": 2.924, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.0475406646728516, |
|
"learning_rate": 1.125380586273661e-05, |
|
"loss": 2.9699, |
|
"step": 3785 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.046965479850769, |
|
"learning_rate": 1.1148647954582808e-05, |
|
"loss": 2.8634, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.0719529390335083, |
|
"learning_rate": 1.1043921982307819e-05, |
|
"loss": 2.9003, |
|
"step": 3795 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.020414113998413, |
|
"learning_rate": 1.0939629110218735e-05, |
|
"loss": 2.8621, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.0194061994552612, |
|
"learning_rate": 1.0835770497807596e-05, |
|
"loss": 2.9386, |
|
"step": 3805 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.0563113689422607, |
|
"learning_rate": 1.0732347299738493e-05, |
|
"loss": 2.9796, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.0426850318908691, |
|
"learning_rate": 1.0629360665834732e-05, |
|
"loss": 2.9268, |
|
"step": 3815 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.037277340888977, |
|
"learning_rate": 1.052681174106604e-05, |
|
"loss": 2.9021, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.0325355529785156, |
|
"learning_rate": 1.0424701665535852e-05, |
|
"loss": 2.9348, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.0245922803878784, |
|
"learning_rate": 1.0323031574468638e-05, |
|
"loss": 2.8854, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.025760293006897, |
|
"learning_rate": 1.0221802598197261e-05, |
|
"loss": 2.894, |
|
"step": 3835 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.1027809381484985, |
|
"learning_rate": 1.0121015862150423e-05, |
|
"loss": 2.8603, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.08948814868927, |
|
"learning_rate": 1.0020672486840154e-05, |
|
"loss": 2.966, |
|
"step": 3845 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.0438921451568604, |
|
"learning_rate": 9.920773587849364e-06, |
|
"loss": 2.885, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.0353776216506958, |
|
"learning_rate": 9.821320275819401e-06, |
|
"loss": 2.8779, |
|
"step": 3855 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.0220935344696045, |
|
"learning_rate": 9.72231365643777e-06, |
|
"loss": 2.864, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.0436768531799316, |
|
"learning_rate": 9.623754830425779e-06, |
|
"loss": 2.991, |
|
"step": 3865 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.072954773902893, |
|
"learning_rate": 9.52564489352632e-06, |
|
"loss": 2.9132, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.0170408487319946, |
|
"learning_rate": 9.427984936491702e-06, |
|
"loss": 2.9115, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.992523729801178, |
|
"learning_rate": 9.330776045071509e-06, |
|
"loss": 2.9234, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.0199633836746216, |
|
"learning_rate": 9.23401930000054e-06, |
|
"loss": 2.966, |
|
"step": 3885 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.0794175863265991, |
|
"learning_rate": 9.137715776986772e-06, |
|
"loss": 2.9374, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.0142548084259033, |
|
"learning_rate": 9.041866546699434e-06, |
|
"loss": 2.9269, |
|
"step": 3895 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.0138798952102661, |
|
"learning_rate": 8.946472674757078e-06, |
|
"loss": 2.847, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.9981939196586609, |
|
"learning_rate": 8.851535221715735e-06, |
|
"loss": 2.9043, |
|
"step": 3905 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.0097284317016602, |
|
"learning_rate": 8.757055243057132e-06, |
|
"loss": 2.9587, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.0881823301315308, |
|
"learning_rate": 8.663033789176967e-06, |
|
"loss": 2.8831, |
|
"step": 3915 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.1520594358444214, |
|
"learning_rate": 8.5694719053732e-06, |
|
"loss": 2.8992, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.141189694404602, |
|
"learning_rate": 8.476370631834458e-06, |
|
"loss": 2.8394, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.0872468948364258, |
|
"learning_rate": 8.383731003628452e-06, |
|
"loss": 2.8789, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.0820999145507812, |
|
"learning_rate": 8.291554050690508e-06, |
|
"loss": 2.8336, |
|
"step": 3935 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.0161839723587036, |
|
"learning_rate": 8.199840797812058e-06, |
|
"loss": 2.8955, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.0762035846710205, |
|
"learning_rate": 8.108592264629295e-06, |
|
"loss": 2.9941, |
|
"step": 3945 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.0508668422698975, |
|
"learning_rate": 8.017809465611803e-06, |
|
"loss": 2.797, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.0316884517669678, |
|
"learning_rate": 7.927493410051324e-06, |
|
"loss": 2.8648, |
|
"step": 3955 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.9886409044265747, |
|
"learning_rate": 7.837645102050473e-06, |
|
"loss": 2.8788, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.0440611839294434, |
|
"learning_rate": 7.748265540511635e-06, |
|
"loss": 2.9606, |
|
"step": 3965 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.057845950126648, |
|
"learning_rate": 7.65935571912582e-06, |
|
"loss": 2.8397, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.1614924669265747, |
|
"learning_rate": 7.5709166263616405e-06, |
|
"loss": 2.9892, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.0376337766647339, |
|
"learning_rate": 7.482949245454302e-06, |
|
"loss": 2.8739, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.079857587814331, |
|
"learning_rate": 7.3954545543946876e-06, |
|
"loss": 2.8968, |
|
"step": 3985 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.051576018333435, |
|
"learning_rate": 7.308433525918468e-06, |
|
"loss": 2.9745, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.0478307008743286, |
|
"learning_rate": 7.221887127495313e-06, |
|
"loss": 2.9211, |
|
"step": 3995 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.063901424407959, |
|
"learning_rate": 7.1358163213181114e-06, |
|
"loss": 2.911, |
|
"step": 4000 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 4811, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 1000, |
|
"total_flos": 1.1658574908358656e+19, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|