|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.3004115226337449, |
|
"eval_steps": 110, |
|
"global_step": 219, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0013717421124828531, |
|
"grad_norm": 13.75906753540039, |
|
"learning_rate": 1.8281535648994516e-06, |
|
"loss": 1.2468, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0027434842249657062, |
|
"grad_norm": 14.05058765411377, |
|
"learning_rate": 3.6563071297989032e-06, |
|
"loss": 1.4692, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.00411522633744856, |
|
"grad_norm": 13.991771697998047, |
|
"learning_rate": 5.484460694698355e-06, |
|
"loss": 1.2457, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0054869684499314125, |
|
"grad_norm": 13.429465293884277, |
|
"learning_rate": 7.3126142595978065e-06, |
|
"loss": 1.1859, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.006858710562414266, |
|
"grad_norm": 12.403002738952637, |
|
"learning_rate": 9.140767824497258e-06, |
|
"loss": 1.2404, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.00823045267489712, |
|
"grad_norm": 1.5524662733078003, |
|
"learning_rate": 1.096892138939671e-05, |
|
"loss": 0.042, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.009602194787379973, |
|
"grad_norm": 10.1494140625, |
|
"learning_rate": 1.2797074954296162e-05, |
|
"loss": 0.8856, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.010973936899862825, |
|
"grad_norm": Infinity, |
|
"learning_rate": 1.2797074954296162e-05, |
|
"loss": 1.5417, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.012345679012345678, |
|
"grad_norm": 0.4553964138031006, |
|
"learning_rate": 1.4625228519195613e-05, |
|
"loss": 0.023, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.013717421124828532, |
|
"grad_norm": 10.332369804382324, |
|
"learning_rate": 1.6453382084095062e-05, |
|
"loss": 0.8655, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.015089163237311385, |
|
"grad_norm": 7.544310092926025, |
|
"learning_rate": 1.8281535648994517e-05, |
|
"loss": 0.5894, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.01646090534979424, |
|
"grad_norm": 8.16427230834961, |
|
"learning_rate": 2.0109689213893968e-05, |
|
"loss": 0.7053, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01783264746227709, |
|
"grad_norm": 7.403252601623535, |
|
"learning_rate": 2.193784277879342e-05, |
|
"loss": 0.5857, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.019204389574759947, |
|
"grad_norm": 8.974674224853516, |
|
"learning_rate": 2.376599634369287e-05, |
|
"loss": 0.8375, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0205761316872428, |
|
"grad_norm": 13.417745590209961, |
|
"learning_rate": 2.5594149908592324e-05, |
|
"loss": 0.9043, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02194787379972565, |
|
"grad_norm": 12.881294250488281, |
|
"learning_rate": 2.742230347349177e-05, |
|
"loss": 0.8756, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.023319615912208505, |
|
"grad_norm": 7.439205169677734, |
|
"learning_rate": 2.9250457038391226e-05, |
|
"loss": 0.5076, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.024691358024691357, |
|
"grad_norm": 8.46964168548584, |
|
"learning_rate": 3.107861060329068e-05, |
|
"loss": 0.4757, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.02606310013717421, |
|
"grad_norm": 17.02773666381836, |
|
"learning_rate": 3.2906764168190124e-05, |
|
"loss": 0.9993, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.027434842249657063, |
|
"grad_norm": 6.2668776512146, |
|
"learning_rate": 3.473491773308958e-05, |
|
"loss": 0.2622, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02880658436213992, |
|
"grad_norm": 8.273824691772461, |
|
"learning_rate": 3.656307129798903e-05, |
|
"loss": 0.3497, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.03017832647462277, |
|
"grad_norm": 5.5460944175720215, |
|
"learning_rate": 3.839122486288849e-05, |
|
"loss": 0.2514, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.03155006858710562, |
|
"grad_norm": 4.283128261566162, |
|
"learning_rate": 4.0219378427787935e-05, |
|
"loss": 0.1673, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.03292181069958848, |
|
"grad_norm": 4.708792209625244, |
|
"learning_rate": 4.204753199268738e-05, |
|
"loss": 0.203, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.03429355281207133, |
|
"grad_norm": 14.491021156311035, |
|
"learning_rate": 4.387568555758684e-05, |
|
"loss": 0.698, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03566529492455418, |
|
"grad_norm": 7.903520584106445, |
|
"learning_rate": 4.570383912248629e-05, |
|
"loss": 0.3401, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.037037037037037035, |
|
"grad_norm": 7.333080291748047, |
|
"learning_rate": 4.753199268738574e-05, |
|
"loss": 0.2185, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.038408779149519894, |
|
"grad_norm": 8.625358581542969, |
|
"learning_rate": 4.936014625228519e-05, |
|
"loss": 0.4424, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.039780521262002745, |
|
"grad_norm": 1.5588488578796387, |
|
"learning_rate": 5.118829981718465e-05, |
|
"loss": 0.0381, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.0411522633744856, |
|
"grad_norm": 12.401138305664062, |
|
"learning_rate": 5.3016453382084095e-05, |
|
"loss": 0.8215, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.04252400548696845, |
|
"grad_norm": 5.405845642089844, |
|
"learning_rate": 5.484460694698354e-05, |
|
"loss": 0.1542, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0438957475994513, |
|
"grad_norm": 8.558808326721191, |
|
"learning_rate": 5.6672760511883e-05, |
|
"loss": 0.6893, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.04526748971193416, |
|
"grad_norm": 7.206741809844971, |
|
"learning_rate": 5.850091407678245e-05, |
|
"loss": 0.3773, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.04663923182441701, |
|
"grad_norm": 8.300729751586914, |
|
"learning_rate": 6.0329067641681906e-05, |
|
"loss": 0.538, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.04801097393689986, |
|
"grad_norm": 0.2500181496143341, |
|
"learning_rate": 6.215722120658135e-05, |
|
"loss": 0.0073, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.04938271604938271, |
|
"grad_norm": 17.457223892211914, |
|
"learning_rate": 6.398537477148081e-05, |
|
"loss": 2.378, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.05075445816186557, |
|
"grad_norm": 10.884990692138672, |
|
"learning_rate": 6.581352833638025e-05, |
|
"loss": 0.5949, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.05212620027434842, |
|
"grad_norm": 10.013723373413086, |
|
"learning_rate": 6.764168190127972e-05, |
|
"loss": 0.7071, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.053497942386831275, |
|
"grad_norm": 4.653324604034424, |
|
"learning_rate": 6.946983546617916e-05, |
|
"loss": 0.1607, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.05486968449931413, |
|
"grad_norm": 9.527400970458984, |
|
"learning_rate": 7.129798903107861e-05, |
|
"loss": 0.7735, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.056241426611796985, |
|
"grad_norm": 12.477531433105469, |
|
"learning_rate": 7.312614259597807e-05, |
|
"loss": 0.7594, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.05761316872427984, |
|
"grad_norm": 5.369799613952637, |
|
"learning_rate": 7.495429616087751e-05, |
|
"loss": 0.3569, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.05898491083676269, |
|
"grad_norm": 5.1385908126831055, |
|
"learning_rate": 7.678244972577697e-05, |
|
"loss": 0.2454, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.06035665294924554, |
|
"grad_norm": 6.1807708740234375, |
|
"learning_rate": 7.861060329067642e-05, |
|
"loss": 0.2723, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.06172839506172839, |
|
"grad_norm": 7.941879749298096, |
|
"learning_rate": 8.043875685557587e-05, |
|
"loss": 0.5338, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.06310013717421124, |
|
"grad_norm": 5.015410423278809, |
|
"learning_rate": 8.226691042047532e-05, |
|
"loss": 0.1891, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.0644718792866941, |
|
"grad_norm": 7.299699306488037, |
|
"learning_rate": 8.409506398537477e-05, |
|
"loss": 0.3647, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.06584362139917696, |
|
"grad_norm": 8.421393394470215, |
|
"learning_rate": 8.592321755027423e-05, |
|
"loss": 0.383, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.06721536351165981, |
|
"grad_norm": 5.5915937423706055, |
|
"learning_rate": 8.775137111517367e-05, |
|
"loss": 0.2353, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.06858710562414266, |
|
"grad_norm": 8.187829971313477, |
|
"learning_rate": 8.957952468007313e-05, |
|
"loss": 0.5541, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06995884773662552, |
|
"grad_norm": 6.386786460876465, |
|
"learning_rate": 9.140767824497258e-05, |
|
"loss": 0.4908, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.07133058984910837, |
|
"grad_norm": 8.64050006866455, |
|
"learning_rate": 9.323583180987204e-05, |
|
"loss": 0.586, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.07270233196159122, |
|
"grad_norm": 5.879551410675049, |
|
"learning_rate": 9.506398537477148e-05, |
|
"loss": 0.2241, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.07407407407407407, |
|
"grad_norm": 7.824138164520264, |
|
"learning_rate": 9.689213893967093e-05, |
|
"loss": 0.6046, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.07544581618655692, |
|
"grad_norm": 6.351109504699707, |
|
"learning_rate": 9.872029250457039e-05, |
|
"loss": 0.231, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.07681755829903979, |
|
"grad_norm": 9.437410354614258, |
|
"learning_rate": 0.00010054844606946984, |
|
"loss": 0.7105, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.07818930041152264, |
|
"grad_norm": 8.40911865234375, |
|
"learning_rate": 0.0001023765996343693, |
|
"loss": 0.5591, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.07956104252400549, |
|
"grad_norm": 7.631382942199707, |
|
"learning_rate": 0.00010420475319926874, |
|
"loss": 0.5194, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.08093278463648834, |
|
"grad_norm": 5.773220062255859, |
|
"learning_rate": 0.00010603290676416819, |
|
"loss": 0.3297, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.0823045267489712, |
|
"grad_norm": 1.3606321811676025, |
|
"learning_rate": 0.00010786106032906765, |
|
"loss": 0.0299, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.08367626886145405, |
|
"grad_norm": 7.216275215148926, |
|
"learning_rate": 0.00010968921389396709, |
|
"loss": 0.3514, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.0850480109739369, |
|
"grad_norm": 4.70477294921875, |
|
"learning_rate": 0.00011151736745886655, |
|
"loss": 0.1932, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.08641975308641975, |
|
"grad_norm": 6.754104137420654, |
|
"learning_rate": 0.000113345521023766, |
|
"loss": 0.4035, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.0877914951989026, |
|
"grad_norm": 0.19067375361919403, |
|
"learning_rate": 0.00011517367458866546, |
|
"loss": 0.0094, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.08916323731138547, |
|
"grad_norm": 1.1715893745422363, |
|
"learning_rate": 0.0001170018281535649, |
|
"loss": 0.0148, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.09053497942386832, |
|
"grad_norm": 1.6287739276885986, |
|
"learning_rate": 0.00011882998171846434, |
|
"loss": 0.0231, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.09190672153635117, |
|
"grad_norm": 7.027708053588867, |
|
"learning_rate": 0.00012065813528336381, |
|
"loss": 0.3204, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.09327846364883402, |
|
"grad_norm": 7.248253345489502, |
|
"learning_rate": 0.00012248628884826325, |
|
"loss": 0.3011, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.09465020576131687, |
|
"grad_norm": 9.592718124389648, |
|
"learning_rate": 0.0001243144424131627, |
|
"loss": 0.3871, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.09602194787379972, |
|
"grad_norm": 5.128874778747559, |
|
"learning_rate": 0.00012614259597806216, |
|
"loss": 0.1823, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.09739368998628258, |
|
"grad_norm": 6.496853351593018, |
|
"learning_rate": 0.00012797074954296162, |
|
"loss": 0.3572, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.09876543209876543, |
|
"grad_norm": 6.564659118652344, |
|
"learning_rate": 0.00012979890310786104, |
|
"loss": 0.5289, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.10013717421124829, |
|
"grad_norm": 6.480371952056885, |
|
"learning_rate": 0.0001316270566727605, |
|
"loss": 0.3223, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.10150891632373114, |
|
"grad_norm": 7.222306728363037, |
|
"learning_rate": 0.00013345521023765998, |
|
"loss": 0.3247, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.102880658436214, |
|
"grad_norm": 5.406076431274414, |
|
"learning_rate": 0.00013528336380255943, |
|
"loss": 0.2133, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.10425240054869685, |
|
"grad_norm": 11.029163360595703, |
|
"learning_rate": 0.00013711151736745886, |
|
"loss": 0.8249, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.1056241426611797, |
|
"grad_norm": 7.284115314483643, |
|
"learning_rate": 0.00013893967093235832, |
|
"loss": 0.4341, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.10699588477366255, |
|
"grad_norm": 6.240738868713379, |
|
"learning_rate": 0.00014076782449725777, |
|
"loss": 0.2932, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.1083676268861454, |
|
"grad_norm": 0.1745665967464447, |
|
"learning_rate": 0.00014259597806215722, |
|
"loss": 0.0099, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.10973936899862825, |
|
"grad_norm": 5.460353851318359, |
|
"learning_rate": 0.00014442413162705668, |
|
"loss": 0.3348, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.1111111111111111, |
|
"grad_norm": 8.869246482849121, |
|
"learning_rate": 0.00014625228519195613, |
|
"loss": 0.6405, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.11248285322359397, |
|
"grad_norm": 4.475996971130371, |
|
"learning_rate": 0.0001480804387568556, |
|
"loss": 0.1536, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.11385459533607682, |
|
"grad_norm": 2.700299024581909, |
|
"learning_rate": 0.00014990859232175501, |
|
"loss": 0.1299, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.11522633744855967, |
|
"grad_norm": 7.5515618324279785, |
|
"learning_rate": 0.00015173674588665447, |
|
"loss": 0.5863, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.11659807956104253, |
|
"grad_norm": 9.869407653808594, |
|
"learning_rate": 0.00015356489945155395, |
|
"loss": 0.7205, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.11796982167352538, |
|
"grad_norm": 8.208423614501953, |
|
"learning_rate": 0.00015539305301645338, |
|
"loss": 0.4052, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.11934156378600823, |
|
"grad_norm": 6.408420562744141, |
|
"learning_rate": 0.00015722120658135283, |
|
"loss": 0.3953, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.12071330589849108, |
|
"grad_norm": 7.050099849700928, |
|
"learning_rate": 0.00015904936014625229, |
|
"loss": 0.5598, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.12208504801097393, |
|
"grad_norm": 5.326991558074951, |
|
"learning_rate": 0.00016087751371115174, |
|
"loss": 0.2856, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.12345679012345678, |
|
"grad_norm": 4.510193347930908, |
|
"learning_rate": 0.0001627056672760512, |
|
"loss": 0.2277, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.12482853223593965, |
|
"grad_norm": 5.769596576690674, |
|
"learning_rate": 0.00016453382084095065, |
|
"loss": 0.3296, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.1262002743484225, |
|
"grad_norm": 6.066390037536621, |
|
"learning_rate": 0.0001663619744058501, |
|
"loss": 0.3079, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.12757201646090535, |
|
"grad_norm": 6.80173921585083, |
|
"learning_rate": 0.00016819012797074953, |
|
"loss": 0.4867, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.1289437585733882, |
|
"grad_norm": 6.219693183898926, |
|
"learning_rate": 0.00017001828153564899, |
|
"loss": 0.4319, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.13031550068587106, |
|
"grad_norm": 5.316290855407715, |
|
"learning_rate": 0.00017184643510054847, |
|
"loss": 0.2952, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.13168724279835392, |
|
"grad_norm": 6.86447811126709, |
|
"learning_rate": 0.00017367458866544792, |
|
"loss": 0.5531, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.13305898491083676, |
|
"grad_norm": 1.2648167610168457, |
|
"learning_rate": 0.00017550274223034735, |
|
"loss": 0.0296, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.13443072702331962, |
|
"grad_norm": 8.14661979675293, |
|
"learning_rate": 0.0001773308957952468, |
|
"loss": 0.8536, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.13580246913580246, |
|
"grad_norm": 8.927884101867676, |
|
"learning_rate": 0.00017915904936014626, |
|
"loss": 0.4879, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.13717421124828533, |
|
"grad_norm": 9.555243492126465, |
|
"learning_rate": 0.00018098720292504568, |
|
"loss": 0.67, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.13854595336076816, |
|
"grad_norm": 7.783656120300293, |
|
"learning_rate": 0.00018281535648994517, |
|
"loss": 0.4813, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.13991769547325103, |
|
"grad_norm": 0.5169872641563416, |
|
"learning_rate": 0.00018464351005484462, |
|
"loss": 0.0488, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.1412894375857339, |
|
"grad_norm": 6.967692852020264, |
|
"learning_rate": 0.00018647166361974407, |
|
"loss": 0.5388, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.14266117969821673, |
|
"grad_norm": 6.324373245239258, |
|
"learning_rate": 0.0001882998171846435, |
|
"loss": 0.376, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.1440329218106996, |
|
"grad_norm": 0.7642683982849121, |
|
"learning_rate": 0.00019012797074954296, |
|
"loss": 0.017, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.14540466392318244, |
|
"grad_norm": 8.600672721862793, |
|
"learning_rate": 0.00019195612431444244, |
|
"loss": 0.7542, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.1467764060356653, |
|
"grad_norm": 7.111880302429199, |
|
"learning_rate": 0.00019378427787934186, |
|
"loss": 0.4063, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.14814814814814814, |
|
"grad_norm": 6.075577735900879, |
|
"learning_rate": 0.00019561243144424132, |
|
"loss": 0.3658, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.149519890260631, |
|
"grad_norm": 6.12313175201416, |
|
"learning_rate": 0.00019744058500914077, |
|
"loss": 0.4389, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.15089163237311384, |
|
"grad_norm": 5.813235759735107, |
|
"learning_rate": 0.00019926873857404023, |
|
"loss": 0.3803, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15089163237311384, |
|
"eval_Qnli-dev_cosine_accuracy": 0.705078125, |
|
"eval_Qnli-dev_cosine_accuracy_threshold": 0.6866907477378845, |
|
"eval_Qnli-dev_cosine_ap": 0.7567018413685389, |
|
"eval_Qnli-dev_cosine_f1": 0.6931818181818182, |
|
"eval_Qnli-dev_cosine_f1_threshold": 0.6343963146209717, |
|
"eval_Qnli-dev_cosine_precision": 0.6267123287671232, |
|
"eval_Qnli-dev_cosine_recall": 0.7754237288135594, |
|
"eval_allNLI-dev_cosine_accuracy": 0.76953125, |
|
"eval_allNLI-dev_cosine_accuracy_threshold": 0.7752166986465454, |
|
"eval_allNLI-dev_cosine_ap": 0.6627175481841632, |
|
"eval_allNLI-dev_cosine_f1": 0.6624737945492662, |
|
"eval_allNLI-dev_cosine_f1_threshold": 0.6564935445785522, |
|
"eval_allNLI-dev_cosine_precision": 0.5197368421052632, |
|
"eval_allNLI-dev_cosine_recall": 0.9132947976878613, |
|
"eval_sequential_score": 0.7567018413685389, |
|
"eval_sts-test_pearson_cosine": 0.9026620207137961, |
|
"eval_sts-test_spearman_cosine": 0.913678627606199, |
|
"eval_vitaminc-pairs_loss": 2.009296178817749, |
|
"eval_vitaminc-pairs_runtime": 14.3224, |
|
"eval_vitaminc-pairs_samples_per_second": 8.937, |
|
"eval_vitaminc-pairs_steps_per_second": 0.07, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15089163237311384, |
|
"eval_negation-triplets_loss": 1.59572434425354, |
|
"eval_negation-triplets_runtime": 1.1528, |
|
"eval_negation-triplets_samples_per_second": 111.029, |
|
"eval_negation-triplets_steps_per_second": 0.867, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15089163237311384, |
|
"eval_scitail-pairs-pos_loss": 0.061776161193847656, |
|
"eval_scitail-pairs-pos_runtime": 1.5728, |
|
"eval_scitail-pairs-pos_samples_per_second": 81.383, |
|
"eval_scitail-pairs-pos_steps_per_second": 0.636, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15089163237311384, |
|
"eval_scitail-pairs-qa_loss": 0.009187542833387852, |
|
"eval_scitail-pairs-qa_runtime": 1.2102, |
|
"eval_scitail-pairs-qa_samples_per_second": 105.771, |
|
"eval_scitail-pairs-qa_steps_per_second": 0.826, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15089163237311384, |
|
"eval_xsum-pairs_loss": 0.37210211157798767, |
|
"eval_xsum-pairs_runtime": 6.2854, |
|
"eval_xsum-pairs_samples_per_second": 20.365, |
|
"eval_xsum-pairs_steps_per_second": 0.159, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15089163237311384, |
|
"eval_sciq_pairs_loss": 0.04122849553823471, |
|
"eval_sciq_pairs_runtime": 8.8116, |
|
"eval_sciq_pairs_samples_per_second": 14.526, |
|
"eval_sciq_pairs_steps_per_second": 0.113, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15089163237311384, |
|
"eval_qasc_pairs_loss": 0.4748501479625702, |
|
"eval_qasc_pairs_runtime": 1.3827, |
|
"eval_qasc_pairs_samples_per_second": 92.573, |
|
"eval_qasc_pairs_steps_per_second": 0.723, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15089163237311384, |
|
"eval_openbookqa_pairs_loss": 1.1540580987930298, |
|
"eval_openbookqa_pairs_runtime": 1.1788, |
|
"eval_openbookqa_pairs_samples_per_second": 108.581, |
|
"eval_openbookqa_pairs_steps_per_second": 0.848, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15089163237311384, |
|
"eval_nq_pairs_loss": 0.2363465428352356, |
|
"eval_nq_pairs_runtime": 7.8515, |
|
"eval_nq_pairs_samples_per_second": 16.303, |
|
"eval_nq_pairs_steps_per_second": 0.127, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15089163237311384, |
|
"eval_trivia_pairs_loss": 0.6520176529884338, |
|
"eval_trivia_pairs_runtime": 8.9067, |
|
"eval_trivia_pairs_samples_per_second": 14.371, |
|
"eval_trivia_pairs_steps_per_second": 0.112, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15089163237311384, |
|
"eval_gooaq_pairs_loss": 0.22620199620723724, |
|
"eval_gooaq_pairs_runtime": 2.067, |
|
"eval_gooaq_pairs_samples_per_second": 61.924, |
|
"eval_gooaq_pairs_steps_per_second": 0.484, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15089163237311384, |
|
"eval_paws-pos_loss": 0.02822125516831875, |
|
"eval_paws-pos_runtime": 1.5117, |
|
"eval_paws-pos_samples_per_second": 84.672, |
|
"eval_paws-pos_steps_per_second": 0.662, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15089163237311384, |
|
"eval_global_dataset_loss": 0.30668479204177856, |
|
"eval_global_dataset_runtime": 33.2591, |
|
"eval_global_dataset_samples_per_second": 11.546, |
|
"eval_global_dataset_steps_per_second": 0.06, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.1522633744855967, |
|
"grad_norm": 4.30504846572876, |
|
"learning_rate": 0.00020109689213893968, |
|
"loss": 0.2478, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.15363511659807957, |
|
"grad_norm": 6.559568881988525, |
|
"learning_rate": 0.00020292504570383914, |
|
"loss": 0.8402, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.1550068587105624, |
|
"grad_norm": 5.812280654907227, |
|
"learning_rate": 0.0002047531992687386, |
|
"loss": 0.6608, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.15637860082304528, |
|
"grad_norm": 2.0805885791778564, |
|
"learning_rate": 0.00020658135283363802, |
|
"loss": 0.0934, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.15775034293552812, |
|
"grad_norm": 5.199294090270996, |
|
"learning_rate": 0.00020840950639853747, |
|
"loss": 0.3907, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.15912208504801098, |
|
"grad_norm": 6.3685078620910645, |
|
"learning_rate": 0.00021023765996343693, |
|
"loss": 0.449, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.16049382716049382, |
|
"grad_norm": 6.4199652671813965, |
|
"learning_rate": 0.00021206581352833638, |
|
"loss": 0.4041, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.16186556927297668, |
|
"grad_norm": 6.015898704528809, |
|
"learning_rate": 0.00021389396709323584, |
|
"loss": 0.6749, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.16323731138545952, |
|
"grad_norm": 7.721911430358887, |
|
"learning_rate": 0.0002157221206581353, |
|
"loss": 0.4847, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.1646090534979424, |
|
"grad_norm": 1.8774610757827759, |
|
"learning_rate": 0.00021755027422303474, |
|
"loss": 0.0526, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.16598079561042525, |
|
"grad_norm": 8.094359397888184, |
|
"learning_rate": 0.00021937842778793417, |
|
"loss": 0.6795, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.1673525377229081, |
|
"grad_norm": 0.33090323209762573, |
|
"learning_rate": 0.00022120658135283365, |
|
"loss": 0.0064, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.16872427983539096, |
|
"grad_norm": 7.3609418869018555, |
|
"learning_rate": 0.0002230347349177331, |
|
"loss": 0.5918, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.1700960219478738, |
|
"grad_norm": 6.189216613769531, |
|
"learning_rate": 0.00022486288848263253, |
|
"loss": 0.3544, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.17146776406035666, |
|
"grad_norm": 5.588890075683594, |
|
"learning_rate": 0.000226691042047532, |
|
"loss": 0.3849, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.1728395061728395, |
|
"grad_norm": 3.4582345485687256, |
|
"learning_rate": 0.00022851919561243144, |
|
"loss": 0.2051, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.17421124828532236, |
|
"grad_norm": 4.075862407684326, |
|
"learning_rate": 0.00023034734917733092, |
|
"loss": 0.2129, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.1755829903978052, |
|
"grad_norm": 15.110091209411621, |
|
"learning_rate": 0.00023217550274223035, |
|
"loss": 2.7937, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.17695473251028807, |
|
"grad_norm": 0.35791516304016113, |
|
"learning_rate": 0.0002340036563071298, |
|
"loss": 0.0166, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.17832647462277093, |
|
"grad_norm": 7.5200090408325195, |
|
"learning_rate": 0.00023583180987202926, |
|
"loss": 0.7856, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.17969821673525377, |
|
"grad_norm": 6.566864490509033, |
|
"learning_rate": 0.0002376599634369287, |
|
"loss": 0.8368, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.18106995884773663, |
|
"grad_norm": 4.958701133728027, |
|
"learning_rate": 0.00023948811700182814, |
|
"loss": 0.3813, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.18244170096021947, |
|
"grad_norm": 5.745133876800537, |
|
"learning_rate": 0.00024131627056672762, |
|
"loss": 0.5695, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.18381344307270234, |
|
"grad_norm": 4.952736854553223, |
|
"learning_rate": 0.00024314442413162708, |
|
"loss": 0.351, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.18518518518518517, |
|
"grad_norm": 5.733601093292236, |
|
"learning_rate": 0.0002449725776965265, |
|
"loss": 0.3821, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.18655692729766804, |
|
"grad_norm": 5.019097328186035, |
|
"learning_rate": 0.00024680073126142596, |
|
"loss": 0.3249, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.18792866941015088, |
|
"grad_norm": 5.300777912139893, |
|
"learning_rate": 0.0002486288848263254, |
|
"loss": 0.3404, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.18930041152263374, |
|
"grad_norm": 4.518141269683838, |
|
"learning_rate": 0.00025045703839122487, |
|
"loss": 0.4535, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.1906721536351166, |
|
"grad_norm": 1.0158088207244873, |
|
"learning_rate": 0.0002522851919561243, |
|
"loss": 0.0577, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.19204389574759945, |
|
"grad_norm": 5.966796398162842, |
|
"learning_rate": 0.0002541133455210238, |
|
"loss": 0.7431, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.1934156378600823, |
|
"grad_norm": 6.123642921447754, |
|
"learning_rate": 0.00025594149908592323, |
|
"loss": 0.6778, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.19478737997256515, |
|
"grad_norm": 5.842874050140381, |
|
"learning_rate": 0.0002577696526508227, |
|
"loss": 0.5436, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.19615912208504802, |
|
"grad_norm": 4.759068012237549, |
|
"learning_rate": 0.0002595978062157221, |
|
"loss": 0.3582, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.19753086419753085, |
|
"grad_norm": 4.080338478088379, |
|
"learning_rate": 0.00026142595978062154, |
|
"loss": 0.316, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.19890260631001372, |
|
"grad_norm": 4.1391448974609375, |
|
"learning_rate": 0.000263254113345521, |
|
"loss": 0.4446, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.20027434842249658, |
|
"grad_norm": 5.856256008148193, |
|
"learning_rate": 0.0002650822669104205, |
|
"loss": 0.7792, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.20164609053497942, |
|
"grad_norm": 7.747331142425537, |
|
"learning_rate": 0.00026691042047531996, |
|
"loss": 1.1147, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.2030178326474623, |
|
"grad_norm": 6.825289249420166, |
|
"learning_rate": 0.0002687385740402194, |
|
"loss": 0.8267, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.20438957475994513, |
|
"grad_norm": 7.336719512939453, |
|
"learning_rate": 0.00027056672760511887, |
|
"loss": 0.8149, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.205761316872428, |
|
"grad_norm": 6.731626510620117, |
|
"learning_rate": 0.00027239488117001827, |
|
"loss": 0.942, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.20713305898491083, |
|
"grad_norm": 10.727692604064941, |
|
"learning_rate": 0.0002742230347349177, |
|
"loss": 2.4865, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.2085048010973937, |
|
"grad_norm": 8.583380699157715, |
|
"learning_rate": 0.0002760511882998172, |
|
"loss": 1.0715, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.20987654320987653, |
|
"grad_norm": 6.236877918243408, |
|
"learning_rate": 0.00027787934186471663, |
|
"loss": 0.6219, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.2112482853223594, |
|
"grad_norm": 6.254538536071777, |
|
"learning_rate": 0.0002797074954296161, |
|
"loss": 0.8705, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.21262002743484226, |
|
"grad_norm": 3.0917959213256836, |
|
"learning_rate": 0.00028153564899451554, |
|
"loss": 0.2407, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.2139917695473251, |
|
"grad_norm": 4.438024997711182, |
|
"learning_rate": 0.000283363802559415, |
|
"loss": 0.4925, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.21536351165980797, |
|
"grad_norm": 0.43344631791114807, |
|
"learning_rate": 0.00028519195612431445, |
|
"loss": 0.0316, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.2167352537722908, |
|
"grad_norm": 5.73934268951416, |
|
"learning_rate": 0.0002870201096892139, |
|
"loss": 0.3935, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.21810699588477367, |
|
"grad_norm": 4.532804012298584, |
|
"learning_rate": 0.00028884826325411336, |
|
"loss": 0.2083, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.2194787379972565, |
|
"grad_norm": 4.846848487854004, |
|
"learning_rate": 0.0002906764168190128, |
|
"loss": 0.2798, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.22085048010973937, |
|
"grad_norm": 7.060863018035889, |
|
"learning_rate": 0.00029250457038391227, |
|
"loss": 0.8777, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.2222222222222222, |
|
"grad_norm": 0.012754157185554504, |
|
"learning_rate": 0.0002943327239488117, |
|
"loss": 0.0002, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.22359396433470508, |
|
"grad_norm": 4.094379901885986, |
|
"learning_rate": 0.0002961608775137112, |
|
"loss": 0.2736, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.22496570644718794, |
|
"grad_norm": 10.741785049438477, |
|
"learning_rate": 0.0002979890310786106, |
|
"loss": 2.4185, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.22633744855967078, |
|
"grad_norm": 4.820891380310059, |
|
"learning_rate": 0.00029981718464351003, |
|
"loss": 0.7767, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.22770919067215364, |
|
"grad_norm": 6.423076152801514, |
|
"learning_rate": 0.0003016453382084095, |
|
"loss": 0.7971, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.22908093278463648, |
|
"grad_norm": 4.492727756500244, |
|
"learning_rate": 0.00030347349177330894, |
|
"loss": 0.4535, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.23045267489711935, |
|
"grad_norm": 5.301379680633545, |
|
"learning_rate": 0.00030530164533820845, |
|
"loss": 0.6654, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.23182441700960219, |
|
"grad_norm": 5.155853748321533, |
|
"learning_rate": 0.0003071297989031079, |
|
"loss": 0.3985, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.23319615912208505, |
|
"grad_norm": 0.4378865361213684, |
|
"learning_rate": 0.00030895795246800735, |
|
"loss": 0.0338, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.2345679012345679, |
|
"grad_norm": 4.022473335266113, |
|
"learning_rate": 0.00031078610603290675, |
|
"loss": 0.1834, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.23593964334705075, |
|
"grad_norm": 7.863429069519043, |
|
"learning_rate": 0.0003126142595978062, |
|
"loss": 0.603, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.23731138545953362, |
|
"grad_norm": 8.951998710632324, |
|
"learning_rate": 0.00031444241316270566, |
|
"loss": 0.7871, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.23868312757201646, |
|
"grad_norm": 6.265102386474609, |
|
"learning_rate": 0.0003162705667276051, |
|
"loss": 0.4304, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.24005486968449932, |
|
"grad_norm": 6.6486005783081055, |
|
"learning_rate": 0.00031809872029250457, |
|
"loss": 0.649, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.24142661179698216, |
|
"grad_norm": 0.47100114822387695, |
|
"learning_rate": 0.000319926873857404, |
|
"loss": 0.048, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.24279835390946503, |
|
"grad_norm": 4.884115695953369, |
|
"learning_rate": 0.0003217550274223035, |
|
"loss": 0.4079, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.24417009602194786, |
|
"grad_norm": 4.508667469024658, |
|
"learning_rate": 0.0003235831809872029, |
|
"loss": 0.4627, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.24554183813443073, |
|
"grad_norm": 3.22367262840271, |
|
"learning_rate": 0.0003254113345521024, |
|
"loss": 0.3703, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.24691358024691357, |
|
"grad_norm": 7.695303916931152, |
|
"learning_rate": 0.00032723948811700184, |
|
"loss": 0.8343, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.24828532235939643, |
|
"grad_norm": 7.249318599700928, |
|
"learning_rate": 0.0003290676416819013, |
|
"loss": 0.692, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.2496570644718793, |
|
"grad_norm": 11.686202049255371, |
|
"learning_rate": 0.00033089579524680075, |
|
"loss": 2.7071, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.25102880658436216, |
|
"grad_norm": 6.061092376708984, |
|
"learning_rate": 0.0003327239488117002, |
|
"loss": 0.8451, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.252400548696845, |
|
"grad_norm": 5.932607650756836, |
|
"learning_rate": 0.00033455210237659966, |
|
"loss": 0.635, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.25377229080932784, |
|
"grad_norm": 3.491114616394043, |
|
"learning_rate": 0.00033638025594149906, |
|
"loss": 0.312, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.2551440329218107, |
|
"grad_norm": 6.4914164543151855, |
|
"learning_rate": 0.0003382084095063985, |
|
"loss": 0.6996, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.25651577503429357, |
|
"grad_norm": 6.15857458114624, |
|
"learning_rate": 0.00034003656307129797, |
|
"loss": 0.4432, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.2578875171467764, |
|
"grad_norm": 4.767185211181641, |
|
"learning_rate": 0.0003418647166361974, |
|
"loss": 0.375, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.25925925925925924, |
|
"grad_norm": 7.944342613220215, |
|
"learning_rate": 0.00034369287020109693, |
|
"loss": 0.9366, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.2606310013717421, |
|
"grad_norm": 6.573953628540039, |
|
"learning_rate": 0.0003455210237659964, |
|
"loss": 0.755, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.262002743484225, |
|
"grad_norm": 4.173367023468018, |
|
"learning_rate": 0.00034734917733089584, |
|
"loss": 0.6068, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.26337448559670784, |
|
"grad_norm": 5.26171875, |
|
"learning_rate": 0.00034917733089579524, |
|
"loss": 0.5336, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.26474622770919065, |
|
"grad_norm": 6.669304370880127, |
|
"learning_rate": 0.0003510054844606947, |
|
"loss": 0.8783, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.2661179698216735, |
|
"grad_norm": 4.4192938804626465, |
|
"learning_rate": 0.00035283363802559415, |
|
"loss": 0.3576, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.2674897119341564, |
|
"grad_norm": 10.117819786071777, |
|
"learning_rate": 0.0003546617915904936, |
|
"loss": 2.1854, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.26886145404663925, |
|
"grad_norm": 5.256247520446777, |
|
"learning_rate": 0.00035648994515539306, |
|
"loss": 0.7835, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.27023319615912206, |
|
"grad_norm": 5.784887313842773, |
|
"learning_rate": 0.0003583180987202925, |
|
"loss": 0.5668, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.2716049382716049, |
|
"grad_norm": 4.977567672729492, |
|
"learning_rate": 0.00036014625228519197, |
|
"loss": 0.7033, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.2729766803840878, |
|
"grad_norm": 0.011424711905419827, |
|
"learning_rate": 0.00036197440585009137, |
|
"loss": 0.0002, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.27434842249657065, |
|
"grad_norm": 5.805008411407471, |
|
"learning_rate": 0.0003638025594149909, |
|
"loss": 0.5791, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.2757201646090535, |
|
"grad_norm": 3.8826043605804443, |
|
"learning_rate": 0.00036563071297989033, |
|
"loss": 0.2697, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.27709190672153633, |
|
"grad_norm": 6.563521385192871, |
|
"learning_rate": 0.0003674588665447898, |
|
"loss": 0.6261, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.2784636488340192, |
|
"grad_norm": 4.584529399871826, |
|
"learning_rate": 0.00036928702010968924, |
|
"loss": 0.3253, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.27983539094650206, |
|
"grad_norm": 6.636009216308594, |
|
"learning_rate": 0.0003711151736745887, |
|
"loss": 0.8323, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.2812071330589849, |
|
"grad_norm": 5.0911359786987305, |
|
"learning_rate": 0.00037294332723948815, |
|
"loss": 0.4472, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.2825788751714678, |
|
"grad_norm": 3.9219255447387695, |
|
"learning_rate": 0.00037477148080438755, |
|
"loss": 0.3342, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.2839506172839506, |
|
"grad_norm": 5.114777565002441, |
|
"learning_rate": 0.000376599634369287, |
|
"loss": 0.6313, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.28532235939643347, |
|
"grad_norm": 0.3298715353012085, |
|
"learning_rate": 0.00037842778793418646, |
|
"loss": 0.059, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.28669410150891633, |
|
"grad_norm": 1.5965046882629395, |
|
"learning_rate": 0.0003802559414990859, |
|
"loss": 0.1195, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.2880658436213992, |
|
"grad_norm": 0.39121323823928833, |
|
"learning_rate": 0.00038208409506398537, |
|
"loss": 0.0296, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.289437585733882, |
|
"grad_norm": 4.317224025726318, |
|
"learning_rate": 0.0003839122486288849, |
|
"loss": 0.5316, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.2908093278463649, |
|
"grad_norm": 4.000308036804199, |
|
"learning_rate": 0.00038574040219378433, |
|
"loss": 0.5201, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.29218106995884774, |
|
"grad_norm": 6.2192301750183105, |
|
"learning_rate": 0.00038756855575868373, |
|
"loss": 0.6602, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.2935528120713306, |
|
"grad_norm": 6.702320098876953, |
|
"learning_rate": 0.0003893967093235832, |
|
"loss": 0.9578, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.29492455418381347, |
|
"grad_norm": 3.9136242866516113, |
|
"learning_rate": 0.00039122486288848264, |
|
"loss": 0.2089, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.2962962962962963, |
|
"grad_norm": 6.901303768157959, |
|
"learning_rate": 0.0003930530164533821, |
|
"loss": 1.2112, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.29766803840877915, |
|
"grad_norm": 4.04884672164917, |
|
"learning_rate": 0.00039488117001828155, |
|
"loss": 0.3294, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.299039780521262, |
|
"grad_norm": 5.46201753616333, |
|
"learning_rate": 0.000396709323583181, |
|
"loss": 0.867, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.3004115226337449, |
|
"grad_norm": 5.559458255767822, |
|
"learning_rate": 0.00039853747714808046, |
|
"loss": 1.1745, |
|
"step": 219 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 2187, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 219, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 0.0, |
|
"train_batch_size": 128, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|