|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.6008230452674898, |
|
"eval_steps": 110, |
|
"global_step": 438, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0013717421124828531, |
|
"grad_norm": 13.75906753540039, |
|
"learning_rate": 1.8281535648994516e-06, |
|
"loss": 1.2468, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0027434842249657062, |
|
"grad_norm": 14.05058765411377, |
|
"learning_rate": 3.6563071297989032e-06, |
|
"loss": 1.4692, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.00411522633744856, |
|
"grad_norm": 13.991771697998047, |
|
"learning_rate": 5.484460694698355e-06, |
|
"loss": 1.2457, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0054869684499314125, |
|
"grad_norm": 13.429465293884277, |
|
"learning_rate": 7.3126142595978065e-06, |
|
"loss": 1.1859, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.006858710562414266, |
|
"grad_norm": 12.403002738952637, |
|
"learning_rate": 9.140767824497258e-06, |
|
"loss": 1.2404, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.00823045267489712, |
|
"grad_norm": 1.5524662733078003, |
|
"learning_rate": 1.096892138939671e-05, |
|
"loss": 0.042, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.009602194787379973, |
|
"grad_norm": 10.1494140625, |
|
"learning_rate": 1.2797074954296162e-05, |
|
"loss": 0.8856, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.010973936899862825, |
|
"grad_norm": Infinity, |
|
"learning_rate": 1.2797074954296162e-05, |
|
"loss": 1.5417, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.012345679012345678, |
|
"grad_norm": 0.4553964138031006, |
|
"learning_rate": 1.4625228519195613e-05, |
|
"loss": 0.023, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.013717421124828532, |
|
"grad_norm": 10.332369804382324, |
|
"learning_rate": 1.6453382084095062e-05, |
|
"loss": 0.8655, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.015089163237311385, |
|
"grad_norm": 7.544310092926025, |
|
"learning_rate": 1.8281535648994517e-05, |
|
"loss": 0.5894, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.01646090534979424, |
|
"grad_norm": 8.16427230834961, |
|
"learning_rate": 2.0109689213893968e-05, |
|
"loss": 0.7053, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01783264746227709, |
|
"grad_norm": 7.403252601623535, |
|
"learning_rate": 2.193784277879342e-05, |
|
"loss": 0.5857, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.019204389574759947, |
|
"grad_norm": 8.974674224853516, |
|
"learning_rate": 2.376599634369287e-05, |
|
"loss": 0.8375, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0205761316872428, |
|
"grad_norm": 13.417745590209961, |
|
"learning_rate": 2.5594149908592324e-05, |
|
"loss": 0.9043, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02194787379972565, |
|
"grad_norm": 12.881294250488281, |
|
"learning_rate": 2.742230347349177e-05, |
|
"loss": 0.8756, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.023319615912208505, |
|
"grad_norm": 7.439205169677734, |
|
"learning_rate": 2.9250457038391226e-05, |
|
"loss": 0.5076, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.024691358024691357, |
|
"grad_norm": 8.46964168548584, |
|
"learning_rate": 3.107861060329068e-05, |
|
"loss": 0.4757, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.02606310013717421, |
|
"grad_norm": 17.02773666381836, |
|
"learning_rate": 3.2906764168190124e-05, |
|
"loss": 0.9993, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.027434842249657063, |
|
"grad_norm": 6.2668776512146, |
|
"learning_rate": 3.473491773308958e-05, |
|
"loss": 0.2622, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02880658436213992, |
|
"grad_norm": 8.273824691772461, |
|
"learning_rate": 3.656307129798903e-05, |
|
"loss": 0.3497, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.03017832647462277, |
|
"grad_norm": 5.5460944175720215, |
|
"learning_rate": 3.839122486288849e-05, |
|
"loss": 0.2514, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.03155006858710562, |
|
"grad_norm": 4.283128261566162, |
|
"learning_rate": 4.0219378427787935e-05, |
|
"loss": 0.1673, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.03292181069958848, |
|
"grad_norm": 4.708792209625244, |
|
"learning_rate": 4.204753199268738e-05, |
|
"loss": 0.203, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.03429355281207133, |
|
"grad_norm": 14.491021156311035, |
|
"learning_rate": 4.387568555758684e-05, |
|
"loss": 0.698, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03566529492455418, |
|
"grad_norm": 7.903520584106445, |
|
"learning_rate": 4.570383912248629e-05, |
|
"loss": 0.3401, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.037037037037037035, |
|
"grad_norm": 7.333080291748047, |
|
"learning_rate": 4.753199268738574e-05, |
|
"loss": 0.2185, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.038408779149519894, |
|
"grad_norm": 8.625358581542969, |
|
"learning_rate": 4.936014625228519e-05, |
|
"loss": 0.4424, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.039780521262002745, |
|
"grad_norm": 1.5588488578796387, |
|
"learning_rate": 5.118829981718465e-05, |
|
"loss": 0.0381, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.0411522633744856, |
|
"grad_norm": 12.401138305664062, |
|
"learning_rate": 5.3016453382084095e-05, |
|
"loss": 0.8215, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.04252400548696845, |
|
"grad_norm": 5.405845642089844, |
|
"learning_rate": 5.484460694698354e-05, |
|
"loss": 0.1542, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0438957475994513, |
|
"grad_norm": 8.558808326721191, |
|
"learning_rate": 5.6672760511883e-05, |
|
"loss": 0.6893, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.04526748971193416, |
|
"grad_norm": 7.206741809844971, |
|
"learning_rate": 5.850091407678245e-05, |
|
"loss": 0.3773, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.04663923182441701, |
|
"grad_norm": 8.300729751586914, |
|
"learning_rate": 6.0329067641681906e-05, |
|
"loss": 0.538, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.04801097393689986, |
|
"grad_norm": 0.2500181496143341, |
|
"learning_rate": 6.215722120658135e-05, |
|
"loss": 0.0073, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.04938271604938271, |
|
"grad_norm": 17.457223892211914, |
|
"learning_rate": 6.398537477148081e-05, |
|
"loss": 2.378, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.05075445816186557, |
|
"grad_norm": 10.884990692138672, |
|
"learning_rate": 6.581352833638025e-05, |
|
"loss": 0.5949, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.05212620027434842, |
|
"grad_norm": 10.013723373413086, |
|
"learning_rate": 6.764168190127972e-05, |
|
"loss": 0.7071, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.053497942386831275, |
|
"grad_norm": 4.653324604034424, |
|
"learning_rate": 6.946983546617916e-05, |
|
"loss": 0.1607, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.05486968449931413, |
|
"grad_norm": 9.527400970458984, |
|
"learning_rate": 7.129798903107861e-05, |
|
"loss": 0.7735, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.056241426611796985, |
|
"grad_norm": 12.477531433105469, |
|
"learning_rate": 7.312614259597807e-05, |
|
"loss": 0.7594, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.05761316872427984, |
|
"grad_norm": 5.369799613952637, |
|
"learning_rate": 7.495429616087751e-05, |
|
"loss": 0.3569, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.05898491083676269, |
|
"grad_norm": 5.1385908126831055, |
|
"learning_rate": 7.678244972577697e-05, |
|
"loss": 0.2454, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.06035665294924554, |
|
"grad_norm": 6.1807708740234375, |
|
"learning_rate": 7.861060329067642e-05, |
|
"loss": 0.2723, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.06172839506172839, |
|
"grad_norm": 7.941879749298096, |
|
"learning_rate": 8.043875685557587e-05, |
|
"loss": 0.5338, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.06310013717421124, |
|
"grad_norm": 5.015410423278809, |
|
"learning_rate": 8.226691042047532e-05, |
|
"loss": 0.1891, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.0644718792866941, |
|
"grad_norm": 7.299699306488037, |
|
"learning_rate": 8.409506398537477e-05, |
|
"loss": 0.3647, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.06584362139917696, |
|
"grad_norm": 8.421393394470215, |
|
"learning_rate": 8.592321755027423e-05, |
|
"loss": 0.383, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.06721536351165981, |
|
"grad_norm": 5.5915937423706055, |
|
"learning_rate": 8.775137111517367e-05, |
|
"loss": 0.2353, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.06858710562414266, |
|
"grad_norm": 8.187829971313477, |
|
"learning_rate": 8.957952468007313e-05, |
|
"loss": 0.5541, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06995884773662552, |
|
"grad_norm": 6.386786460876465, |
|
"learning_rate": 9.140767824497258e-05, |
|
"loss": 0.4908, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.07133058984910837, |
|
"grad_norm": 8.64050006866455, |
|
"learning_rate": 9.323583180987204e-05, |
|
"loss": 0.586, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.07270233196159122, |
|
"grad_norm": 5.879551410675049, |
|
"learning_rate": 9.506398537477148e-05, |
|
"loss": 0.2241, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.07407407407407407, |
|
"grad_norm": 7.824138164520264, |
|
"learning_rate": 9.689213893967093e-05, |
|
"loss": 0.6046, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.07544581618655692, |
|
"grad_norm": 6.351109504699707, |
|
"learning_rate": 9.872029250457039e-05, |
|
"loss": 0.231, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.07681755829903979, |
|
"grad_norm": 9.437410354614258, |
|
"learning_rate": 0.00010054844606946984, |
|
"loss": 0.7105, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.07818930041152264, |
|
"grad_norm": 8.40911865234375, |
|
"learning_rate": 0.0001023765996343693, |
|
"loss": 0.5591, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.07956104252400549, |
|
"grad_norm": 7.631382942199707, |
|
"learning_rate": 0.00010420475319926874, |
|
"loss": 0.5194, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.08093278463648834, |
|
"grad_norm": 5.773220062255859, |
|
"learning_rate": 0.00010603290676416819, |
|
"loss": 0.3297, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.0823045267489712, |
|
"grad_norm": 1.3606321811676025, |
|
"learning_rate": 0.00010786106032906765, |
|
"loss": 0.0299, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.08367626886145405, |
|
"grad_norm": 7.216275215148926, |
|
"learning_rate": 0.00010968921389396709, |
|
"loss": 0.3514, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.0850480109739369, |
|
"grad_norm": 4.70477294921875, |
|
"learning_rate": 0.00011151736745886655, |
|
"loss": 0.1932, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.08641975308641975, |
|
"grad_norm": 6.754104137420654, |
|
"learning_rate": 0.000113345521023766, |
|
"loss": 0.4035, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.0877914951989026, |
|
"grad_norm": 0.19067375361919403, |
|
"learning_rate": 0.00011517367458866546, |
|
"loss": 0.0094, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.08916323731138547, |
|
"grad_norm": 1.1715893745422363, |
|
"learning_rate": 0.0001170018281535649, |
|
"loss": 0.0148, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.09053497942386832, |
|
"grad_norm": 1.6287739276885986, |
|
"learning_rate": 0.00011882998171846434, |
|
"loss": 0.0231, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.09190672153635117, |
|
"grad_norm": 7.027708053588867, |
|
"learning_rate": 0.00012065813528336381, |
|
"loss": 0.3204, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.09327846364883402, |
|
"grad_norm": 7.248253345489502, |
|
"learning_rate": 0.00012248628884826325, |
|
"loss": 0.3011, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.09465020576131687, |
|
"grad_norm": 9.592718124389648, |
|
"learning_rate": 0.0001243144424131627, |
|
"loss": 0.3871, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.09602194787379972, |
|
"grad_norm": 5.128874778747559, |
|
"learning_rate": 0.00012614259597806216, |
|
"loss": 0.1823, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.09739368998628258, |
|
"grad_norm": 6.496853351593018, |
|
"learning_rate": 0.00012797074954296162, |
|
"loss": 0.3572, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.09876543209876543, |
|
"grad_norm": 6.564659118652344, |
|
"learning_rate": 0.00012979890310786104, |
|
"loss": 0.5289, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.10013717421124829, |
|
"grad_norm": 6.480371952056885, |
|
"learning_rate": 0.0001316270566727605, |
|
"loss": 0.3223, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.10150891632373114, |
|
"grad_norm": 7.222306728363037, |
|
"learning_rate": 0.00013345521023765998, |
|
"loss": 0.3247, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.102880658436214, |
|
"grad_norm": 5.406076431274414, |
|
"learning_rate": 0.00013528336380255943, |
|
"loss": 0.2133, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.10425240054869685, |
|
"grad_norm": 11.029163360595703, |
|
"learning_rate": 0.00013711151736745886, |
|
"loss": 0.8249, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.1056241426611797, |
|
"grad_norm": 7.284115314483643, |
|
"learning_rate": 0.00013893967093235832, |
|
"loss": 0.4341, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.10699588477366255, |
|
"grad_norm": 6.240738868713379, |
|
"learning_rate": 0.00014076782449725777, |
|
"loss": 0.2932, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.1083676268861454, |
|
"grad_norm": 0.1745665967464447, |
|
"learning_rate": 0.00014259597806215722, |
|
"loss": 0.0099, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.10973936899862825, |
|
"grad_norm": 5.460353851318359, |
|
"learning_rate": 0.00014442413162705668, |
|
"loss": 0.3348, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.1111111111111111, |
|
"grad_norm": 8.869246482849121, |
|
"learning_rate": 0.00014625228519195613, |
|
"loss": 0.6405, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.11248285322359397, |
|
"grad_norm": 4.475996971130371, |
|
"learning_rate": 0.0001480804387568556, |
|
"loss": 0.1536, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.11385459533607682, |
|
"grad_norm": 2.700299024581909, |
|
"learning_rate": 0.00014990859232175501, |
|
"loss": 0.1299, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.11522633744855967, |
|
"grad_norm": 7.5515618324279785, |
|
"learning_rate": 0.00015173674588665447, |
|
"loss": 0.5863, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.11659807956104253, |
|
"grad_norm": 9.869407653808594, |
|
"learning_rate": 0.00015356489945155395, |
|
"loss": 0.7205, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.11796982167352538, |
|
"grad_norm": 8.208423614501953, |
|
"learning_rate": 0.00015539305301645338, |
|
"loss": 0.4052, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.11934156378600823, |
|
"grad_norm": 6.408420562744141, |
|
"learning_rate": 0.00015722120658135283, |
|
"loss": 0.3953, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.12071330589849108, |
|
"grad_norm": 7.050099849700928, |
|
"learning_rate": 0.00015904936014625229, |
|
"loss": 0.5598, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.12208504801097393, |
|
"grad_norm": 5.326991558074951, |
|
"learning_rate": 0.00016087751371115174, |
|
"loss": 0.2856, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.12345679012345678, |
|
"grad_norm": 4.510193347930908, |
|
"learning_rate": 0.0001627056672760512, |
|
"loss": 0.2277, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.12482853223593965, |
|
"grad_norm": 5.769596576690674, |
|
"learning_rate": 0.00016453382084095065, |
|
"loss": 0.3296, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.1262002743484225, |
|
"grad_norm": 6.066390037536621, |
|
"learning_rate": 0.0001663619744058501, |
|
"loss": 0.3079, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.12757201646090535, |
|
"grad_norm": 6.80173921585083, |
|
"learning_rate": 0.00016819012797074953, |
|
"loss": 0.4867, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.1289437585733882, |
|
"grad_norm": 6.219693183898926, |
|
"learning_rate": 0.00017001828153564899, |
|
"loss": 0.4319, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.13031550068587106, |
|
"grad_norm": 5.316290855407715, |
|
"learning_rate": 0.00017184643510054847, |
|
"loss": 0.2952, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.13168724279835392, |
|
"grad_norm": 6.86447811126709, |
|
"learning_rate": 0.00017367458866544792, |
|
"loss": 0.5531, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.13305898491083676, |
|
"grad_norm": 1.2648167610168457, |
|
"learning_rate": 0.00017550274223034735, |
|
"loss": 0.0296, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.13443072702331962, |
|
"grad_norm": 8.14661979675293, |
|
"learning_rate": 0.0001773308957952468, |
|
"loss": 0.8536, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.13580246913580246, |
|
"grad_norm": 8.927884101867676, |
|
"learning_rate": 0.00017915904936014626, |
|
"loss": 0.4879, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.13717421124828533, |
|
"grad_norm": 9.555243492126465, |
|
"learning_rate": 0.00018098720292504568, |
|
"loss": 0.67, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.13854595336076816, |
|
"grad_norm": 7.783656120300293, |
|
"learning_rate": 0.00018281535648994517, |
|
"loss": 0.4813, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.13991769547325103, |
|
"grad_norm": 0.5169872641563416, |
|
"learning_rate": 0.00018464351005484462, |
|
"loss": 0.0488, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.1412894375857339, |
|
"grad_norm": 6.967692852020264, |
|
"learning_rate": 0.00018647166361974407, |
|
"loss": 0.5388, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.14266117969821673, |
|
"grad_norm": 6.324373245239258, |
|
"learning_rate": 0.0001882998171846435, |
|
"loss": 0.376, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.1440329218106996, |
|
"grad_norm": 0.7642683982849121, |
|
"learning_rate": 0.00019012797074954296, |
|
"loss": 0.017, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.14540466392318244, |
|
"grad_norm": 8.600672721862793, |
|
"learning_rate": 0.00019195612431444244, |
|
"loss": 0.7542, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.1467764060356653, |
|
"grad_norm": 7.111880302429199, |
|
"learning_rate": 0.00019378427787934186, |
|
"loss": 0.4063, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.14814814814814814, |
|
"grad_norm": 6.075577735900879, |
|
"learning_rate": 0.00019561243144424132, |
|
"loss": 0.3658, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.149519890260631, |
|
"grad_norm": 6.12313175201416, |
|
"learning_rate": 0.00019744058500914077, |
|
"loss": 0.4389, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.15089163237311384, |
|
"grad_norm": 5.813235759735107, |
|
"learning_rate": 0.00019926873857404023, |
|
"loss": 0.3803, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15089163237311384, |
|
"eval_Qnli-dev_cosine_accuracy": 0.705078125, |
|
"eval_Qnli-dev_cosine_accuracy_threshold": 0.6866907477378845, |
|
"eval_Qnli-dev_cosine_ap": 0.7567018413685389, |
|
"eval_Qnli-dev_cosine_f1": 0.6931818181818182, |
|
"eval_Qnli-dev_cosine_f1_threshold": 0.6343963146209717, |
|
"eval_Qnli-dev_cosine_precision": 0.6267123287671232, |
|
"eval_Qnli-dev_cosine_recall": 0.7754237288135594, |
|
"eval_allNLI-dev_cosine_accuracy": 0.76953125, |
|
"eval_allNLI-dev_cosine_accuracy_threshold": 0.7752166986465454, |
|
"eval_allNLI-dev_cosine_ap": 0.6627175481841632, |
|
"eval_allNLI-dev_cosine_f1": 0.6624737945492662, |
|
"eval_allNLI-dev_cosine_f1_threshold": 0.6564935445785522, |
|
"eval_allNLI-dev_cosine_precision": 0.5197368421052632, |
|
"eval_allNLI-dev_cosine_recall": 0.9132947976878613, |
|
"eval_sequential_score": 0.7567018413685389, |
|
"eval_sts-test_pearson_cosine": 0.9026620207137961, |
|
"eval_sts-test_spearman_cosine": 0.913678627606199, |
|
"eval_vitaminc-pairs_loss": 2.009296178817749, |
|
"eval_vitaminc-pairs_runtime": 14.3224, |
|
"eval_vitaminc-pairs_samples_per_second": 8.937, |
|
"eval_vitaminc-pairs_steps_per_second": 0.07, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15089163237311384, |
|
"eval_negation-triplets_loss": 1.59572434425354, |
|
"eval_negation-triplets_runtime": 1.1528, |
|
"eval_negation-triplets_samples_per_second": 111.029, |
|
"eval_negation-triplets_steps_per_second": 0.867, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15089163237311384, |
|
"eval_scitail-pairs-pos_loss": 0.061776161193847656, |
|
"eval_scitail-pairs-pos_runtime": 1.5728, |
|
"eval_scitail-pairs-pos_samples_per_second": 81.383, |
|
"eval_scitail-pairs-pos_steps_per_second": 0.636, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15089163237311384, |
|
"eval_scitail-pairs-qa_loss": 0.009187542833387852, |
|
"eval_scitail-pairs-qa_runtime": 1.2102, |
|
"eval_scitail-pairs-qa_samples_per_second": 105.771, |
|
"eval_scitail-pairs-qa_steps_per_second": 0.826, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15089163237311384, |
|
"eval_xsum-pairs_loss": 0.37210211157798767, |
|
"eval_xsum-pairs_runtime": 6.2854, |
|
"eval_xsum-pairs_samples_per_second": 20.365, |
|
"eval_xsum-pairs_steps_per_second": 0.159, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15089163237311384, |
|
"eval_sciq_pairs_loss": 0.04122849553823471, |
|
"eval_sciq_pairs_runtime": 8.8116, |
|
"eval_sciq_pairs_samples_per_second": 14.526, |
|
"eval_sciq_pairs_steps_per_second": 0.113, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15089163237311384, |
|
"eval_qasc_pairs_loss": 0.4748501479625702, |
|
"eval_qasc_pairs_runtime": 1.3827, |
|
"eval_qasc_pairs_samples_per_second": 92.573, |
|
"eval_qasc_pairs_steps_per_second": 0.723, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15089163237311384, |
|
"eval_openbookqa_pairs_loss": 1.1540580987930298, |
|
"eval_openbookqa_pairs_runtime": 1.1788, |
|
"eval_openbookqa_pairs_samples_per_second": 108.581, |
|
"eval_openbookqa_pairs_steps_per_second": 0.848, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15089163237311384, |
|
"eval_nq_pairs_loss": 0.2363465428352356, |
|
"eval_nq_pairs_runtime": 7.8515, |
|
"eval_nq_pairs_samples_per_second": 16.303, |
|
"eval_nq_pairs_steps_per_second": 0.127, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15089163237311384, |
|
"eval_trivia_pairs_loss": 0.6520176529884338, |
|
"eval_trivia_pairs_runtime": 8.9067, |
|
"eval_trivia_pairs_samples_per_second": 14.371, |
|
"eval_trivia_pairs_steps_per_second": 0.112, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15089163237311384, |
|
"eval_gooaq_pairs_loss": 0.22620199620723724, |
|
"eval_gooaq_pairs_runtime": 2.067, |
|
"eval_gooaq_pairs_samples_per_second": 61.924, |
|
"eval_gooaq_pairs_steps_per_second": 0.484, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15089163237311384, |
|
"eval_paws-pos_loss": 0.02822125516831875, |
|
"eval_paws-pos_runtime": 1.5117, |
|
"eval_paws-pos_samples_per_second": 84.672, |
|
"eval_paws-pos_steps_per_second": 0.662, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15089163237311384, |
|
"eval_global_dataset_loss": 0.30668479204177856, |
|
"eval_global_dataset_runtime": 33.2591, |
|
"eval_global_dataset_samples_per_second": 11.546, |
|
"eval_global_dataset_steps_per_second": 0.06, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.1522633744855967, |
|
"grad_norm": 4.30504846572876, |
|
"learning_rate": 0.00020109689213893968, |
|
"loss": 0.2478, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.15363511659807957, |
|
"grad_norm": 6.559568881988525, |
|
"learning_rate": 0.00020292504570383914, |
|
"loss": 0.8402, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.1550068587105624, |
|
"grad_norm": 5.812280654907227, |
|
"learning_rate": 0.0002047531992687386, |
|
"loss": 0.6608, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.15637860082304528, |
|
"grad_norm": 2.0805885791778564, |
|
"learning_rate": 0.00020658135283363802, |
|
"loss": 0.0934, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.15775034293552812, |
|
"grad_norm": 5.199294090270996, |
|
"learning_rate": 0.00020840950639853747, |
|
"loss": 0.3907, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.15912208504801098, |
|
"grad_norm": 6.3685078620910645, |
|
"learning_rate": 0.00021023765996343693, |
|
"loss": 0.449, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.16049382716049382, |
|
"grad_norm": 6.4199652671813965, |
|
"learning_rate": 0.00021206581352833638, |
|
"loss": 0.4041, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.16186556927297668, |
|
"grad_norm": 6.015898704528809, |
|
"learning_rate": 0.00021389396709323584, |
|
"loss": 0.6749, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.16323731138545952, |
|
"grad_norm": 7.721911430358887, |
|
"learning_rate": 0.0002157221206581353, |
|
"loss": 0.4847, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.1646090534979424, |
|
"grad_norm": 1.8774610757827759, |
|
"learning_rate": 0.00021755027422303474, |
|
"loss": 0.0526, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.16598079561042525, |
|
"grad_norm": 8.094359397888184, |
|
"learning_rate": 0.00021937842778793417, |
|
"loss": 0.6795, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.1673525377229081, |
|
"grad_norm": 0.33090323209762573, |
|
"learning_rate": 0.00022120658135283365, |
|
"loss": 0.0064, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.16872427983539096, |
|
"grad_norm": 7.3609418869018555, |
|
"learning_rate": 0.0002230347349177331, |
|
"loss": 0.5918, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.1700960219478738, |
|
"grad_norm": 6.189216613769531, |
|
"learning_rate": 0.00022486288848263253, |
|
"loss": 0.3544, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.17146776406035666, |
|
"grad_norm": 5.588890075683594, |
|
"learning_rate": 0.000226691042047532, |
|
"loss": 0.3849, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.1728395061728395, |
|
"grad_norm": 3.4582345485687256, |
|
"learning_rate": 0.00022851919561243144, |
|
"loss": 0.2051, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.17421124828532236, |
|
"grad_norm": 4.075862407684326, |
|
"learning_rate": 0.00023034734917733092, |
|
"loss": 0.2129, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.1755829903978052, |
|
"grad_norm": 15.110091209411621, |
|
"learning_rate": 0.00023217550274223035, |
|
"loss": 2.7937, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.17695473251028807, |
|
"grad_norm": 0.35791516304016113, |
|
"learning_rate": 0.0002340036563071298, |
|
"loss": 0.0166, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.17832647462277093, |
|
"grad_norm": 7.5200090408325195, |
|
"learning_rate": 0.00023583180987202926, |
|
"loss": 0.7856, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.17969821673525377, |
|
"grad_norm": 6.566864490509033, |
|
"learning_rate": 0.0002376599634369287, |
|
"loss": 0.8368, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.18106995884773663, |
|
"grad_norm": 4.958701133728027, |
|
"learning_rate": 0.00023948811700182814, |
|
"loss": 0.3813, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.18244170096021947, |
|
"grad_norm": 5.745133876800537, |
|
"learning_rate": 0.00024131627056672762, |
|
"loss": 0.5695, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.18381344307270234, |
|
"grad_norm": 4.952736854553223, |
|
"learning_rate": 0.00024314442413162708, |
|
"loss": 0.351, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.18518518518518517, |
|
"grad_norm": 5.733601093292236, |
|
"learning_rate": 0.0002449725776965265, |
|
"loss": 0.3821, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.18655692729766804, |
|
"grad_norm": 5.019097328186035, |
|
"learning_rate": 0.00024680073126142596, |
|
"loss": 0.3249, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.18792866941015088, |
|
"grad_norm": 5.300777912139893, |
|
"learning_rate": 0.0002486288848263254, |
|
"loss": 0.3404, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.18930041152263374, |
|
"grad_norm": 4.518141269683838, |
|
"learning_rate": 0.00025045703839122487, |
|
"loss": 0.4535, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.1906721536351166, |
|
"grad_norm": 1.0158088207244873, |
|
"learning_rate": 0.0002522851919561243, |
|
"loss": 0.0577, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.19204389574759945, |
|
"grad_norm": 5.966796398162842, |
|
"learning_rate": 0.0002541133455210238, |
|
"loss": 0.7431, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.1934156378600823, |
|
"grad_norm": 6.123642921447754, |
|
"learning_rate": 0.00025594149908592323, |
|
"loss": 0.6778, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.19478737997256515, |
|
"grad_norm": 5.842874050140381, |
|
"learning_rate": 0.0002577696526508227, |
|
"loss": 0.5436, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.19615912208504802, |
|
"grad_norm": 4.759068012237549, |
|
"learning_rate": 0.0002595978062157221, |
|
"loss": 0.3582, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.19753086419753085, |
|
"grad_norm": 4.080338478088379, |
|
"learning_rate": 0.00026142595978062154, |
|
"loss": 0.316, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.19890260631001372, |
|
"grad_norm": 4.1391448974609375, |
|
"learning_rate": 0.000263254113345521, |
|
"loss": 0.4446, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.20027434842249658, |
|
"grad_norm": 5.856256008148193, |
|
"learning_rate": 0.0002650822669104205, |
|
"loss": 0.7792, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.20164609053497942, |
|
"grad_norm": 7.747331142425537, |
|
"learning_rate": 0.00026691042047531996, |
|
"loss": 1.1147, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.2030178326474623, |
|
"grad_norm": 6.825289249420166, |
|
"learning_rate": 0.0002687385740402194, |
|
"loss": 0.8267, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.20438957475994513, |
|
"grad_norm": 7.336719512939453, |
|
"learning_rate": 0.00027056672760511887, |
|
"loss": 0.8149, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.205761316872428, |
|
"grad_norm": 6.731626510620117, |
|
"learning_rate": 0.00027239488117001827, |
|
"loss": 0.942, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.20713305898491083, |
|
"grad_norm": 10.727692604064941, |
|
"learning_rate": 0.0002742230347349177, |
|
"loss": 2.4865, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.2085048010973937, |
|
"grad_norm": 8.583380699157715, |
|
"learning_rate": 0.0002760511882998172, |
|
"loss": 1.0715, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.20987654320987653, |
|
"grad_norm": 6.236877918243408, |
|
"learning_rate": 0.00027787934186471663, |
|
"loss": 0.6219, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.2112482853223594, |
|
"grad_norm": 6.254538536071777, |
|
"learning_rate": 0.0002797074954296161, |
|
"loss": 0.8705, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.21262002743484226, |
|
"grad_norm": 3.0917959213256836, |
|
"learning_rate": 0.00028153564899451554, |
|
"loss": 0.2407, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.2139917695473251, |
|
"grad_norm": 4.438024997711182, |
|
"learning_rate": 0.000283363802559415, |
|
"loss": 0.4925, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.21536351165980797, |
|
"grad_norm": 0.43344631791114807, |
|
"learning_rate": 0.00028519195612431445, |
|
"loss": 0.0316, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.2167352537722908, |
|
"grad_norm": 5.73934268951416, |
|
"learning_rate": 0.0002870201096892139, |
|
"loss": 0.3935, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.21810699588477367, |
|
"grad_norm": 4.532804012298584, |
|
"learning_rate": 0.00028884826325411336, |
|
"loss": 0.2083, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.2194787379972565, |
|
"grad_norm": 4.846848487854004, |
|
"learning_rate": 0.0002906764168190128, |
|
"loss": 0.2798, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.22085048010973937, |
|
"grad_norm": 7.060863018035889, |
|
"learning_rate": 0.00029250457038391227, |
|
"loss": 0.8777, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.2222222222222222, |
|
"grad_norm": 0.012754157185554504, |
|
"learning_rate": 0.0002943327239488117, |
|
"loss": 0.0002, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.22359396433470508, |
|
"grad_norm": 4.094379901885986, |
|
"learning_rate": 0.0002961608775137112, |
|
"loss": 0.2736, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.22496570644718794, |
|
"grad_norm": 10.741785049438477, |
|
"learning_rate": 0.0002979890310786106, |
|
"loss": 2.4185, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.22633744855967078, |
|
"grad_norm": 4.820891380310059, |
|
"learning_rate": 0.00029981718464351003, |
|
"loss": 0.7767, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.22770919067215364, |
|
"grad_norm": 6.423076152801514, |
|
"learning_rate": 0.0003016453382084095, |
|
"loss": 0.7971, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.22908093278463648, |
|
"grad_norm": 4.492727756500244, |
|
"learning_rate": 0.00030347349177330894, |
|
"loss": 0.4535, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.23045267489711935, |
|
"grad_norm": 5.301379680633545, |
|
"learning_rate": 0.00030530164533820845, |
|
"loss": 0.6654, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.23182441700960219, |
|
"grad_norm": 5.155853748321533, |
|
"learning_rate": 0.0003071297989031079, |
|
"loss": 0.3985, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.23319615912208505, |
|
"grad_norm": 0.4378865361213684, |
|
"learning_rate": 0.00030895795246800735, |
|
"loss": 0.0338, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.2345679012345679, |
|
"grad_norm": 4.022473335266113, |
|
"learning_rate": 0.00031078610603290675, |
|
"loss": 0.1834, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.23593964334705075, |
|
"grad_norm": 7.863429069519043, |
|
"learning_rate": 0.0003126142595978062, |
|
"loss": 0.603, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.23731138545953362, |
|
"grad_norm": 8.951998710632324, |
|
"learning_rate": 0.00031444241316270566, |
|
"loss": 0.7871, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.23868312757201646, |
|
"grad_norm": 6.265102386474609, |
|
"learning_rate": 0.0003162705667276051, |
|
"loss": 0.4304, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.24005486968449932, |
|
"grad_norm": 6.6486005783081055, |
|
"learning_rate": 0.00031809872029250457, |
|
"loss": 0.649, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.24142661179698216, |
|
"grad_norm": 0.47100114822387695, |
|
"learning_rate": 0.000319926873857404, |
|
"loss": 0.048, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.24279835390946503, |
|
"grad_norm": 4.884115695953369, |
|
"learning_rate": 0.0003217550274223035, |
|
"loss": 0.4079, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.24417009602194786, |
|
"grad_norm": 4.508667469024658, |
|
"learning_rate": 0.0003235831809872029, |
|
"loss": 0.4627, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.24554183813443073, |
|
"grad_norm": 3.22367262840271, |
|
"learning_rate": 0.0003254113345521024, |
|
"loss": 0.3703, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.24691358024691357, |
|
"grad_norm": 7.695303916931152, |
|
"learning_rate": 0.00032723948811700184, |
|
"loss": 0.8343, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.24828532235939643, |
|
"grad_norm": 7.249318599700928, |
|
"learning_rate": 0.0003290676416819013, |
|
"loss": 0.692, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.2496570644718793, |
|
"grad_norm": 11.686202049255371, |
|
"learning_rate": 0.00033089579524680075, |
|
"loss": 2.7071, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.25102880658436216, |
|
"grad_norm": 6.061092376708984, |
|
"learning_rate": 0.0003327239488117002, |
|
"loss": 0.8451, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.252400548696845, |
|
"grad_norm": 5.932607650756836, |
|
"learning_rate": 0.00033455210237659966, |
|
"loss": 0.635, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.25377229080932784, |
|
"grad_norm": 3.491114616394043, |
|
"learning_rate": 0.00033638025594149906, |
|
"loss": 0.312, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.2551440329218107, |
|
"grad_norm": 6.4914164543151855, |
|
"learning_rate": 0.0003382084095063985, |
|
"loss": 0.6996, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.25651577503429357, |
|
"grad_norm": 6.15857458114624, |
|
"learning_rate": 0.00034003656307129797, |
|
"loss": 0.4432, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.2578875171467764, |
|
"grad_norm": 4.767185211181641, |
|
"learning_rate": 0.0003418647166361974, |
|
"loss": 0.375, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.25925925925925924, |
|
"grad_norm": 7.944342613220215, |
|
"learning_rate": 0.00034369287020109693, |
|
"loss": 0.9366, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.2606310013717421, |
|
"grad_norm": 6.573953628540039, |
|
"learning_rate": 0.0003455210237659964, |
|
"loss": 0.755, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.262002743484225, |
|
"grad_norm": 4.173367023468018, |
|
"learning_rate": 0.00034734917733089584, |
|
"loss": 0.6068, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.26337448559670784, |
|
"grad_norm": 5.26171875, |
|
"learning_rate": 0.00034917733089579524, |
|
"loss": 0.5336, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.26474622770919065, |
|
"grad_norm": 6.669304370880127, |
|
"learning_rate": 0.0003510054844606947, |
|
"loss": 0.8783, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.2661179698216735, |
|
"grad_norm": 4.4192938804626465, |
|
"learning_rate": 0.00035283363802559415, |
|
"loss": 0.3576, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.2674897119341564, |
|
"grad_norm": 10.117819786071777, |
|
"learning_rate": 0.0003546617915904936, |
|
"loss": 2.1854, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.26886145404663925, |
|
"grad_norm": 5.256247520446777, |
|
"learning_rate": 0.00035648994515539306, |
|
"loss": 0.7835, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.27023319615912206, |
|
"grad_norm": 5.784887313842773, |
|
"learning_rate": 0.0003583180987202925, |
|
"loss": 0.5668, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.2716049382716049, |
|
"grad_norm": 4.977567672729492, |
|
"learning_rate": 0.00036014625228519197, |
|
"loss": 0.7033, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.2729766803840878, |
|
"grad_norm": 0.011424711905419827, |
|
"learning_rate": 0.00036197440585009137, |
|
"loss": 0.0002, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.27434842249657065, |
|
"grad_norm": 5.805008411407471, |
|
"learning_rate": 0.0003638025594149909, |
|
"loss": 0.5791, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.2757201646090535, |
|
"grad_norm": 3.8826043605804443, |
|
"learning_rate": 0.00036563071297989033, |
|
"loss": 0.2697, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.27709190672153633, |
|
"grad_norm": 6.563521385192871, |
|
"learning_rate": 0.0003674588665447898, |
|
"loss": 0.6261, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.2784636488340192, |
|
"grad_norm": 4.584529399871826, |
|
"learning_rate": 0.00036928702010968924, |
|
"loss": 0.3253, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.27983539094650206, |
|
"grad_norm": 6.636009216308594, |
|
"learning_rate": 0.0003711151736745887, |
|
"loss": 0.8323, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.2812071330589849, |
|
"grad_norm": 5.0911359786987305, |
|
"learning_rate": 0.00037294332723948815, |
|
"loss": 0.4472, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.2825788751714678, |
|
"grad_norm": 3.9219255447387695, |
|
"learning_rate": 0.00037477148080438755, |
|
"loss": 0.3342, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.2839506172839506, |
|
"grad_norm": 5.114777565002441, |
|
"learning_rate": 0.000376599634369287, |
|
"loss": 0.6313, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.28532235939643347, |
|
"grad_norm": 0.3298715353012085, |
|
"learning_rate": 0.00037842778793418646, |
|
"loss": 0.059, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.28669410150891633, |
|
"grad_norm": 1.5965046882629395, |
|
"learning_rate": 0.0003802559414990859, |
|
"loss": 0.1195, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.2880658436213992, |
|
"grad_norm": 0.39121323823928833, |
|
"learning_rate": 0.00038208409506398537, |
|
"loss": 0.0296, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.289437585733882, |
|
"grad_norm": 4.317224025726318, |
|
"learning_rate": 0.0003839122486288849, |
|
"loss": 0.5316, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.2908093278463649, |
|
"grad_norm": 4.000308036804199, |
|
"learning_rate": 0.00038574040219378433, |
|
"loss": 0.5201, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.29218106995884774, |
|
"grad_norm": 6.2192301750183105, |
|
"learning_rate": 0.00038756855575868373, |
|
"loss": 0.6602, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.2935528120713306, |
|
"grad_norm": 6.702320098876953, |
|
"learning_rate": 0.0003893967093235832, |
|
"loss": 0.9578, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.29492455418381347, |
|
"grad_norm": 3.9136242866516113, |
|
"learning_rate": 0.00039122486288848264, |
|
"loss": 0.2089, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.2962962962962963, |
|
"grad_norm": 6.901303768157959, |
|
"learning_rate": 0.0003930530164533821, |
|
"loss": 1.2112, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.29766803840877915, |
|
"grad_norm": 4.04884672164917, |
|
"learning_rate": 0.00039488117001828155, |
|
"loss": 0.3294, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.299039780521262, |
|
"grad_norm": 5.46201753616333, |
|
"learning_rate": 0.000396709323583181, |
|
"loss": 0.867, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.3004115226337449, |
|
"grad_norm": 5.559458255767822, |
|
"learning_rate": 0.00039853747714808046, |
|
"loss": 1.1745, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.3017832647462277, |
|
"grad_norm": 4.930731296539307, |
|
"learning_rate": 0.00040036563071297986, |
|
"loss": 0.7287, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.3017832647462277, |
|
"eval_Qnli-dev_cosine_accuracy": 0.736328125, |
|
"eval_Qnli-dev_cosine_accuracy_threshold": 0.8059661388397217, |
|
"eval_Qnli-dev_cosine_ap": 0.7693397822540732, |
|
"eval_Qnli-dev_cosine_f1": 0.720136518771331, |
|
"eval_Qnli-dev_cosine_f1_threshold": 0.7346209287643433, |
|
"eval_Qnli-dev_cosine_precision": 0.6028571428571429, |
|
"eval_Qnli-dev_cosine_recall": 0.8940677966101694, |
|
"eval_allNLI-dev_cosine_accuracy": 0.751953125, |
|
"eval_allNLI-dev_cosine_accuracy_threshold": 0.8584632873535156, |
|
"eval_allNLI-dev_cosine_ap": 0.6542482370347211, |
|
"eval_allNLI-dev_cosine_f1": 0.6681127982646421, |
|
"eval_allNLI-dev_cosine_f1_threshold": 0.7551147937774658, |
|
"eval_allNLI-dev_cosine_precision": 0.5347222222222222, |
|
"eval_allNLI-dev_cosine_recall": 0.8901734104046243, |
|
"eval_sequential_score": 0.7693397822540732, |
|
"eval_sts-test_pearson_cosine": 0.8635423204589071, |
|
"eval_sts-test_spearman_cosine": 0.8907274956890058, |
|
"eval_vitaminc-pairs_loss": 2.5131773948669434, |
|
"eval_vitaminc-pairs_runtime": 14.3452, |
|
"eval_vitaminc-pairs_samples_per_second": 8.923, |
|
"eval_vitaminc-pairs_steps_per_second": 0.07, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.3017832647462277, |
|
"eval_negation-triplets_loss": 1.5992437601089478, |
|
"eval_negation-triplets_runtime": 1.1695, |
|
"eval_negation-triplets_samples_per_second": 109.451, |
|
"eval_negation-triplets_steps_per_second": 0.855, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.3017832647462277, |
|
"eval_scitail-pairs-pos_loss": 0.12100159376859665, |
|
"eval_scitail-pairs-pos_runtime": 1.5843, |
|
"eval_scitail-pairs-pos_samples_per_second": 80.791, |
|
"eval_scitail-pairs-pos_steps_per_second": 0.631, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.3017832647462277, |
|
"eval_scitail-pairs-qa_loss": 0.029047677293419838, |
|
"eval_scitail-pairs-qa_runtime": 1.2089, |
|
"eval_scitail-pairs-qa_samples_per_second": 105.881, |
|
"eval_scitail-pairs-qa_steps_per_second": 0.827, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.3017832647462277, |
|
"eval_xsum-pairs_loss": 0.6063941717147827, |
|
"eval_xsum-pairs_runtime": 6.3291, |
|
"eval_xsum-pairs_samples_per_second": 20.224, |
|
"eval_xsum-pairs_steps_per_second": 0.158, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.3017832647462277, |
|
"eval_sciq_pairs_loss": 0.05205194652080536, |
|
"eval_sciq_pairs_runtime": 8.8533, |
|
"eval_sciq_pairs_samples_per_second": 14.458, |
|
"eval_sciq_pairs_steps_per_second": 0.113, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.3017832647462277, |
|
"eval_qasc_pairs_loss": 0.7244825959205627, |
|
"eval_qasc_pairs_runtime": 1.3845, |
|
"eval_qasc_pairs_samples_per_second": 92.449, |
|
"eval_qasc_pairs_steps_per_second": 0.722, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.3017832647462277, |
|
"eval_openbookqa_pairs_loss": 1.4260488748550415, |
|
"eval_openbookqa_pairs_runtime": 1.1784, |
|
"eval_openbookqa_pairs_samples_per_second": 108.623, |
|
"eval_openbookqa_pairs_steps_per_second": 0.849, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.3017832647462277, |
|
"eval_nq_pairs_loss": 0.7104523777961731, |
|
"eval_nq_pairs_runtime": 7.8911, |
|
"eval_nq_pairs_samples_per_second": 16.221, |
|
"eval_nq_pairs_steps_per_second": 0.127, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.3017832647462277, |
|
"eval_trivia_pairs_loss": 0.8537120223045349, |
|
"eval_trivia_pairs_runtime": 8.9305, |
|
"eval_trivia_pairs_samples_per_second": 14.333, |
|
"eval_trivia_pairs_steps_per_second": 0.112, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.3017832647462277, |
|
"eval_gooaq_pairs_loss": 0.5029886960983276, |
|
"eval_gooaq_pairs_runtime": 2.0675, |
|
"eval_gooaq_pairs_samples_per_second": 61.91, |
|
"eval_gooaq_pairs_steps_per_second": 0.484, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.3017832647462277, |
|
"eval_paws-pos_loss": 0.03150199353694916, |
|
"eval_paws-pos_runtime": 1.5078, |
|
"eval_paws-pos_samples_per_second": 84.892, |
|
"eval_paws-pos_steps_per_second": 0.663, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.3017832647462277, |
|
"eval_global_dataset_loss": 0.18923546373844147, |
|
"eval_global_dataset_runtime": 33.3421, |
|
"eval_global_dataset_samples_per_second": 11.517, |
|
"eval_global_dataset_steps_per_second": 0.06, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.30315500685871055, |
|
"grad_norm": 3.582907199859619, |
|
"learning_rate": 0.00040219378427787936, |
|
"loss": 0.5484, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.3045267489711934, |
|
"grad_norm": 4.960206031799316, |
|
"learning_rate": 0.0004040219378427788, |
|
"loss": 0.9396, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.3058984910836763, |
|
"grad_norm": 4.219746112823486, |
|
"learning_rate": 0.0004058500914076783, |
|
"loss": 0.4335, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.30727023319615915, |
|
"grad_norm": 6.449894428253174, |
|
"learning_rate": 0.00040767824497257773, |
|
"loss": 0.9026, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.30864197530864196, |
|
"grad_norm": 6.239223003387451, |
|
"learning_rate": 0.0004095063985374772, |
|
"loss": 0.7214, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.3100137174211248, |
|
"grad_norm": 4.418921947479248, |
|
"learning_rate": 0.00041133455210237664, |
|
"loss": 0.4794, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.3113854595336077, |
|
"grad_norm": 0.012527555227279663, |
|
"learning_rate": 0.00041316270566727604, |
|
"loss": 0.0003, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.31275720164609055, |
|
"grad_norm": 2.672603130340576, |
|
"learning_rate": 0.0004149908592321755, |
|
"loss": 0.3003, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.31412894375857336, |
|
"grad_norm": 4.433743476867676, |
|
"learning_rate": 0.00041681901279707495, |
|
"loss": 0.4667, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.31550068587105623, |
|
"grad_norm": 4.458980083465576, |
|
"learning_rate": 0.0004186471663619744, |
|
"loss": 0.5006, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.3168724279835391, |
|
"grad_norm": 5.0898237228393555, |
|
"learning_rate": 0.00042047531992687385, |
|
"loss": 0.5555, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.31824417009602196, |
|
"grad_norm": 4.338139533996582, |
|
"learning_rate": 0.00042230347349177336, |
|
"loss": 0.4437, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.3196159122085048, |
|
"grad_norm": 5.023694038391113, |
|
"learning_rate": 0.00042413162705667276, |
|
"loss": 0.8813, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.32098765432098764, |
|
"grad_norm": 6.410233497619629, |
|
"learning_rate": 0.0004259597806215722, |
|
"loss": 1.1836, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.3223593964334705, |
|
"grad_norm": 3.8459813594818115, |
|
"learning_rate": 0.00042778793418647167, |
|
"loss": 0.3176, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.32373113854595337, |
|
"grad_norm": 5.539570331573486, |
|
"learning_rate": 0.0004296160877513711, |
|
"loss": 0.6248, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.32510288065843623, |
|
"grad_norm": 3.5191774368286133, |
|
"learning_rate": 0.0004314442413162706, |
|
"loss": 0.3623, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.32647462277091904, |
|
"grad_norm": 3.2997043132781982, |
|
"learning_rate": 0.00043327239488117003, |
|
"loss": 0.3205, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.3278463648834019, |
|
"grad_norm": 3.9236536026000977, |
|
"learning_rate": 0.0004351005484460695, |
|
"loss": 0.439, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.3292181069958848, |
|
"grad_norm": 3.9750499725341797, |
|
"learning_rate": 0.0004369287020109689, |
|
"loss": 0.653, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.33058984910836764, |
|
"grad_norm": 4.344120502471924, |
|
"learning_rate": 0.00043875685557586834, |
|
"loss": 0.5743, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.3319615912208505, |
|
"grad_norm": 3.2905893325805664, |
|
"learning_rate": 0.0004405850091407678, |
|
"loss": 0.5844, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 5.996461391448975, |
|
"learning_rate": 0.0004424131627056673, |
|
"loss": 0.9451, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.3347050754458162, |
|
"grad_norm": 4.085718631744385, |
|
"learning_rate": 0.00044424131627056676, |
|
"loss": 0.7071, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.33607681755829905, |
|
"grad_norm": 0.26297450065612793, |
|
"learning_rate": 0.0004460694698354662, |
|
"loss": 0.0226, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.3374485596707819, |
|
"grad_norm": 5.191401481628418, |
|
"learning_rate": 0.00044789762340036567, |
|
"loss": 1.0585, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.3388203017832647, |
|
"grad_norm": 5.426116466522217, |
|
"learning_rate": 0.00044972577696526507, |
|
"loss": 1.0764, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.3401920438957476, |
|
"grad_norm": 0.17406082153320312, |
|
"learning_rate": 0.0004515539305301645, |
|
"loss": 0.0289, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.34156378600823045, |
|
"grad_norm": 4.900349140167236, |
|
"learning_rate": 0.000453382084095064, |
|
"loss": 0.5588, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.3429355281207133, |
|
"grad_norm": 5.373581409454346, |
|
"learning_rate": 0.00045521023765996343, |
|
"loss": 0.7509, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.3443072702331962, |
|
"grad_norm": 5.4629106521606445, |
|
"learning_rate": 0.0004570383912248629, |
|
"loss": 0.8388, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.345679012345679, |
|
"grad_norm": 4.140360355377197, |
|
"learning_rate": 0.00045886654478976234, |
|
"loss": 0.5444, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.34705075445816186, |
|
"grad_norm": 5.176646709442139, |
|
"learning_rate": 0.00046069469835466185, |
|
"loss": 1.2432, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.3484224965706447, |
|
"grad_norm": 5.172772407531738, |
|
"learning_rate": 0.00046252285191956125, |
|
"loss": 0.8329, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.3497942386831276, |
|
"grad_norm": 4.968120574951172, |
|
"learning_rate": 0.0004643510054844607, |
|
"loss": 0.7158, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.3511659807956104, |
|
"grad_norm": 5.91867208480835, |
|
"learning_rate": 0.00046617915904936016, |
|
"loss": 0.935, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.35253772290809327, |
|
"grad_norm": 6.223313808441162, |
|
"learning_rate": 0.0004680073126142596, |
|
"loss": 0.8777, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.35390946502057613, |
|
"grad_norm": 11.474114418029785, |
|
"learning_rate": 0.00046983546617915907, |
|
"loss": 3.1178, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.355281207133059, |
|
"grad_norm": 6.462806701660156, |
|
"learning_rate": 0.0004716636197440585, |
|
"loss": 0.9418, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.35665294924554186, |
|
"grad_norm": 5.5286173820495605, |
|
"learning_rate": 0.000473491773308958, |
|
"loss": 0.6252, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.35802469135802467, |
|
"grad_norm": 8.872392654418945, |
|
"learning_rate": 0.0004753199268738574, |
|
"loss": 2.9074, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.35939643347050754, |
|
"grad_norm": 2.4954137802124023, |
|
"learning_rate": 0.00047714808043875683, |
|
"loss": 0.303, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.3607681755829904, |
|
"grad_norm": 3.22896671295166, |
|
"learning_rate": 0.0004789762340036563, |
|
"loss": 0.7146, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.36213991769547327, |
|
"grad_norm": 4.280979156494141, |
|
"learning_rate": 0.0004808043875685558, |
|
"loss": 0.6848, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.3635116598079561, |
|
"grad_norm": 3.9235236644744873, |
|
"learning_rate": 0.00048263254113345525, |
|
"loss": 0.47, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.36488340192043894, |
|
"grad_norm": 3.460500717163086, |
|
"learning_rate": 0.0004844606946983547, |
|
"loss": 0.5762, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.3662551440329218, |
|
"grad_norm": 11.026795387268066, |
|
"learning_rate": 0.00048628884826325416, |
|
"loss": 3.3872, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.3676268861454047, |
|
"grad_norm": 3.2708449363708496, |
|
"learning_rate": 0.00048811700182815356, |
|
"loss": 0.4195, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.36899862825788754, |
|
"grad_norm": 5.177340030670166, |
|
"learning_rate": 0.000489945155393053, |
|
"loss": 1.2292, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.37037037037037035, |
|
"grad_norm": 5.413723945617676, |
|
"learning_rate": 0.0004917733089579525, |
|
"loss": 1.1249, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.3717421124828532, |
|
"grad_norm": 4.922053337097168, |
|
"learning_rate": 0.0004936014625228519, |
|
"loss": 1.0863, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.3731138545953361, |
|
"grad_norm": 4.433996677398682, |
|
"learning_rate": 0.0004954296160877514, |
|
"loss": 0.9361, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.37448559670781895, |
|
"grad_norm": 5.205246448516846, |
|
"learning_rate": 0.0004972577696526508, |
|
"loss": 0.7965, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.37585733882030176, |
|
"grad_norm": 4.139344215393066, |
|
"learning_rate": 0.0004990859232175503, |
|
"loss": 0.7914, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.3772290809327846, |
|
"grad_norm": 0.1397838592529297, |
|
"learning_rate": 0.0005009140767824497, |
|
"loss": 0.0027, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.3786008230452675, |
|
"grad_norm": 3.3880808353424072, |
|
"learning_rate": 0.0005027422303473492, |
|
"loss": 0.6585, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.37997256515775035, |
|
"grad_norm": 4.524999141693115, |
|
"learning_rate": 0.0005045703839122486, |
|
"loss": 0.5388, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.3813443072702332, |
|
"grad_norm": 6.445588111877441, |
|
"learning_rate": 0.000506398537477148, |
|
"loss": 1.238, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.38271604938271603, |
|
"grad_norm": 2.8341169357299805, |
|
"learning_rate": 0.0005082266910420476, |
|
"loss": 0.3782, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.3840877914951989, |
|
"grad_norm": 5.0054240226745605, |
|
"learning_rate": 0.0005100548446069469, |
|
"loss": 1.1769, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.38545953360768176, |
|
"grad_norm": 3.6371870040893555, |
|
"learning_rate": 0.0005118829981718465, |
|
"loss": 0.4808, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.3868312757201646, |
|
"grad_norm": 8.928922653198242, |
|
"learning_rate": 0.0005137111517367459, |
|
"loss": 3.2104, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.38820301783264743, |
|
"grad_norm": 2.7686235904693604, |
|
"learning_rate": 0.0005155393053016454, |
|
"loss": 0.3027, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.3895747599451303, |
|
"grad_norm": 1.498235821723938, |
|
"learning_rate": 0.0005173674588665448, |
|
"loss": 0.1422, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.39094650205761317, |
|
"grad_norm": 3.578543186187744, |
|
"learning_rate": 0.0005191956124314442, |
|
"loss": 0.6059, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.39231824417009603, |
|
"grad_norm": 2.900531053543091, |
|
"learning_rate": 0.0005210237659963437, |
|
"loss": 0.3491, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.3936899862825789, |
|
"grad_norm": 5.693866729736328, |
|
"learning_rate": 0.0005228519195612431, |
|
"loss": 1.1603, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.3950617283950617, |
|
"grad_norm": 3.7944750785827637, |
|
"learning_rate": 0.0005246800731261426, |
|
"loss": 0.5784, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.39643347050754457, |
|
"grad_norm": 4.433256149291992, |
|
"learning_rate": 0.000526508226691042, |
|
"loss": 0.6532, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.39780521262002744, |
|
"grad_norm": 3.986520290374756, |
|
"learning_rate": 0.0005283363802559416, |
|
"loss": 0.5613, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.3991769547325103, |
|
"grad_norm": 4.399818420410156, |
|
"learning_rate": 0.000530164533820841, |
|
"loss": 0.8469, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.40054869684499317, |
|
"grad_norm": 3.0586366653442383, |
|
"learning_rate": 0.0005319926873857404, |
|
"loss": 0.4484, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.401920438957476, |
|
"grad_norm": 0.10376634448766708, |
|
"learning_rate": 0.0005338208409506399, |
|
"loss": 0.0034, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.40329218106995884, |
|
"grad_norm": 8.303990364074707, |
|
"learning_rate": 0.0005356489945155393, |
|
"loss": 3.0798, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.4046639231824417, |
|
"grad_norm": 4.507086277008057, |
|
"learning_rate": 0.0005374771480804388, |
|
"loss": 0.8632, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.4060356652949246, |
|
"grad_norm": 5.233419895172119, |
|
"learning_rate": 0.0005393053016453382, |
|
"loss": 1.2788, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.4074074074074074, |
|
"grad_norm": 4.281100273132324, |
|
"learning_rate": 0.0005411334552102377, |
|
"loss": 1.2439, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.40877914951989025, |
|
"grad_norm": 1.219199299812317, |
|
"learning_rate": 0.0005429616087751371, |
|
"loss": 0.1067, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.4101508916323731, |
|
"grad_norm": 3.0503711700439453, |
|
"learning_rate": 0.0005447897623400365, |
|
"loss": 0.4197, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.411522633744856, |
|
"grad_norm": 3.5557351112365723, |
|
"learning_rate": 0.000546617915904936, |
|
"loss": 0.56, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.41289437585733885, |
|
"grad_norm": 4.1112470626831055, |
|
"learning_rate": 0.0005484460694698354, |
|
"loss": 0.7235, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.41426611796982166, |
|
"grad_norm": 2.915947675704956, |
|
"learning_rate": 0.000550274223034735, |
|
"loss": 0.3506, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.4156378600823045, |
|
"grad_norm": 4.125770568847656, |
|
"learning_rate": 0.0005521023765996344, |
|
"loss": 0.6808, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.4170096021947874, |
|
"grad_norm": 5.084654808044434, |
|
"learning_rate": 0.0005539305301645339, |
|
"loss": 0.9406, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.41838134430727025, |
|
"grad_norm": 4.542891025543213, |
|
"learning_rate": 0.0005557586837294333, |
|
"loss": 0.6707, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.41975308641975306, |
|
"grad_norm": 4.285159587860107, |
|
"learning_rate": 0.0005575868372943327, |
|
"loss": 1.0718, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.42112482853223593, |
|
"grad_norm": 5.053350925445557, |
|
"learning_rate": 0.0005594149908592322, |
|
"loss": 0.9847, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.4224965706447188, |
|
"grad_norm": 1.4923471212387085, |
|
"learning_rate": 0.0005612431444241316, |
|
"loss": 0.122, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.42386831275720166, |
|
"grad_norm": 4.332481384277344, |
|
"learning_rate": 0.0005630712979890311, |
|
"loss": 0.8221, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.4252400548696845, |
|
"grad_norm": 4.23899507522583, |
|
"learning_rate": 0.0005648994515539305, |
|
"loss": 0.9891, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.42661179698216734, |
|
"grad_norm": 4.370994567871094, |
|
"learning_rate": 0.00056672760511883, |
|
"loss": 0.926, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.4279835390946502, |
|
"grad_norm": 0.23886460065841675, |
|
"learning_rate": 0.0005685557586837294, |
|
"loss": 0.0151, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.42935528120713307, |
|
"grad_norm": 7.701839447021484, |
|
"learning_rate": 0.0005703839122486289, |
|
"loss": 2.8429, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.43072702331961593, |
|
"grad_norm": 4.738073825836182, |
|
"learning_rate": 0.0005722120658135283, |
|
"loss": 0.9917, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.43209876543209874, |
|
"grad_norm": 3.7907347679138184, |
|
"learning_rate": 0.0005740402193784278, |
|
"loss": 0.9199, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.4334705075445816, |
|
"grad_norm": 2.7892837524414062, |
|
"learning_rate": 0.0005758683729433273, |
|
"loss": 0.3931, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.4348422496570645, |
|
"grad_norm": 3.414641857147217, |
|
"learning_rate": 0.0005776965265082267, |
|
"loss": 0.7119, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.43621399176954734, |
|
"grad_norm": 0.023860761895775795, |
|
"learning_rate": 0.0005795246800731262, |
|
"loss": 0.0008, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.4375857338820302, |
|
"grad_norm": 3.0215470790863037, |
|
"learning_rate": 0.0005813528336380256, |
|
"loss": 0.4985, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.438957475994513, |
|
"grad_norm": 3.1884336471557617, |
|
"learning_rate": 0.000583180987202925, |
|
"loss": 0.7732, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.4403292181069959, |
|
"grad_norm": 3.4848649501800537, |
|
"learning_rate": 0.0005850091407678245, |
|
"loss": 0.7515, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.44170096021947874, |
|
"grad_norm": 0.9857578873634338, |
|
"learning_rate": 0.0005868372943327239, |
|
"loss": 0.0885, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.4430727023319616, |
|
"grad_norm": 4.590999126434326, |
|
"learning_rate": 0.0005886654478976234, |
|
"loss": 1.1677, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.4444444444444444, |
|
"grad_norm": 4.161310195922852, |
|
"learning_rate": 0.0005904936014625229, |
|
"loss": 0.7439, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.4458161865569273, |
|
"grad_norm": 4.842323303222656, |
|
"learning_rate": 0.0005923217550274223, |
|
"loss": 1.3473, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.44718792866941015, |
|
"grad_norm": 3.332562208175659, |
|
"learning_rate": 0.0005941499085923218, |
|
"loss": 0.7273, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.448559670781893, |
|
"grad_norm": 4.295160293579102, |
|
"learning_rate": 0.0005959780621572211, |
|
"loss": 0.843, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.4499314128943759, |
|
"grad_norm": 3.641636848449707, |
|
"learning_rate": 0.0005978062157221207, |
|
"loss": 0.7881, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.4513031550068587, |
|
"grad_norm": 2.767233371734619, |
|
"learning_rate": 0.0005996343692870201, |
|
"loss": 0.5319, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.45267489711934156, |
|
"grad_norm": 2.925886869430542, |
|
"learning_rate": 0.0006014625228519196, |
|
"loss": 0.4826, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.45267489711934156, |
|
"eval_Qnli-dev_cosine_accuracy": 0.708984375, |
|
"eval_Qnli-dev_cosine_accuracy_threshold": 0.8169091939926147, |
|
"eval_Qnli-dev_cosine_ap": 0.7472481805376167, |
|
"eval_Qnli-dev_cosine_f1": 0.7189781021897811, |
|
"eval_Qnli-dev_cosine_f1_threshold": 0.7571755051612854, |
|
"eval_Qnli-dev_cosine_precision": 0.6314102564102564, |
|
"eval_Qnli-dev_cosine_recall": 0.8347457627118644, |
|
"eval_allNLI-dev_cosine_accuracy": 0.748046875, |
|
"eval_allNLI-dev_cosine_accuracy_threshold": 0.8621972799301147, |
|
"eval_allNLI-dev_cosine_ap": 0.6527104471447597, |
|
"eval_allNLI-dev_cosine_f1": 0.6606334841628959, |
|
"eval_allNLI-dev_cosine_f1_threshold": 0.7864561080932617, |
|
"eval_allNLI-dev_cosine_precision": 0.5427509293680297, |
|
"eval_allNLI-dev_cosine_recall": 0.8439306358381503, |
|
"eval_sequential_score": 0.7472481805376167, |
|
"eval_sts-test_pearson_cosine": 0.8465015878560311, |
|
"eval_sts-test_spearman_cosine": 0.8833058569973334, |
|
"eval_vitaminc-pairs_loss": 2.5387091636657715, |
|
"eval_vitaminc-pairs_runtime": 14.3065, |
|
"eval_vitaminc-pairs_samples_per_second": 8.947, |
|
"eval_vitaminc-pairs_steps_per_second": 0.07, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.45267489711934156, |
|
"eval_negation-triplets_loss": 1.8608626127243042, |
|
"eval_negation-triplets_runtime": 1.1519, |
|
"eval_negation-triplets_samples_per_second": 111.119, |
|
"eval_negation-triplets_steps_per_second": 0.868, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.45267489711934156, |
|
"eval_scitail-pairs-pos_loss": 0.07322188466787338, |
|
"eval_scitail-pairs-pos_runtime": 1.5539, |
|
"eval_scitail-pairs-pos_samples_per_second": 82.372, |
|
"eval_scitail-pairs-pos_steps_per_second": 0.644, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.45267489711934156, |
|
"eval_scitail-pairs-qa_loss": 0.026681702584028244, |
|
"eval_scitail-pairs-qa_runtime": 1.2098, |
|
"eval_scitail-pairs-qa_samples_per_second": 105.799, |
|
"eval_scitail-pairs-qa_steps_per_second": 0.827, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.45267489711934156, |
|
"eval_xsum-pairs_loss": 0.6542444825172424, |
|
"eval_xsum-pairs_runtime": 6.2906, |
|
"eval_xsum-pairs_samples_per_second": 20.348, |
|
"eval_xsum-pairs_steps_per_second": 0.159, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.45267489711934156, |
|
"eval_sciq_pairs_loss": 0.06421030312776566, |
|
"eval_sciq_pairs_runtime": 8.8514, |
|
"eval_sciq_pairs_samples_per_second": 14.461, |
|
"eval_sciq_pairs_steps_per_second": 0.113, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.45267489711934156, |
|
"eval_qasc_pairs_loss": 0.8813464641571045, |
|
"eval_qasc_pairs_runtime": 1.3875, |
|
"eval_qasc_pairs_samples_per_second": 92.249, |
|
"eval_qasc_pairs_steps_per_second": 0.721, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.45267489711934156, |
|
"eval_openbookqa_pairs_loss": 1.4074363708496094, |
|
"eval_openbookqa_pairs_runtime": 1.1826, |
|
"eval_openbookqa_pairs_samples_per_second": 108.237, |
|
"eval_openbookqa_pairs_steps_per_second": 0.846, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.45267489711934156, |
|
"eval_nq_pairs_loss": 0.62897789478302, |
|
"eval_nq_pairs_runtime": 7.8684, |
|
"eval_nq_pairs_samples_per_second": 16.268, |
|
"eval_nq_pairs_steps_per_second": 0.127, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.45267489711934156, |
|
"eval_trivia_pairs_loss": 1.084182620048523, |
|
"eval_trivia_pairs_runtime": 8.9262, |
|
"eval_trivia_pairs_samples_per_second": 14.34, |
|
"eval_trivia_pairs_steps_per_second": 0.112, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.45267489711934156, |
|
"eval_gooaq_pairs_loss": 0.6594768762588501, |
|
"eval_gooaq_pairs_runtime": 2.0651, |
|
"eval_gooaq_pairs_samples_per_second": 61.982, |
|
"eval_gooaq_pairs_steps_per_second": 0.484, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.45267489711934156, |
|
"eval_paws-pos_loss": 0.03268549218773842, |
|
"eval_paws-pos_runtime": 1.5103, |
|
"eval_paws-pos_samples_per_second": 84.754, |
|
"eval_paws-pos_steps_per_second": 0.662, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.45267489711934156, |
|
"eval_global_dataset_loss": 0.21171291172504425, |
|
"eval_global_dataset_runtime": 33.2988, |
|
"eval_global_dataset_samples_per_second": 11.532, |
|
"eval_global_dataset_steps_per_second": 0.06, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.4540466392318244, |
|
"grad_norm": 2.6243412494659424, |
|
"learning_rate": 0.000603290676416819, |
|
"loss": 0.6096, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.4554183813443073, |
|
"grad_norm": 2.776013135910034, |
|
"learning_rate": 0.0006051188299817185, |
|
"loss": 0.3687, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.4567901234567901, |
|
"grad_norm": 4.418542385101318, |
|
"learning_rate": 0.0006069469835466179, |
|
"loss": 0.9713, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.45816186556927296, |
|
"grad_norm": 5.112300872802734, |
|
"learning_rate": 0.0006087751371115173, |
|
"loss": 1.3203, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.45953360768175583, |
|
"grad_norm": 2.1659023761749268, |
|
"learning_rate": 0.0006106032906764169, |
|
"loss": 0.3443, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.4609053497942387, |
|
"grad_norm": 4.783431529998779, |
|
"learning_rate": 0.0006124314442413162, |
|
"loss": 1.4592, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.46227709190672156, |
|
"grad_norm": 3.507357597351074, |
|
"learning_rate": 0.0006142595978062158, |
|
"loss": 0.8, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.46364883401920437, |
|
"grad_norm": 3.118370771408081, |
|
"learning_rate": 0.0006160877513711151, |
|
"loss": 0.5481, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.46502057613168724, |
|
"grad_norm": 4.21981143951416, |
|
"learning_rate": 0.0006179159049360147, |
|
"loss": 1.4286, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.4663923182441701, |
|
"grad_norm": 2.675670862197876, |
|
"learning_rate": 0.0006197440585009141, |
|
"loss": 0.4012, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.46776406035665297, |
|
"grad_norm": 1.6964771747589111, |
|
"learning_rate": 0.0006215722120658135, |
|
"loss": 0.1474, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.4691358024691358, |
|
"grad_norm": 4.83234167098999, |
|
"learning_rate": 0.000623400365630713, |
|
"loss": 1.2755, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.47050754458161864, |
|
"grad_norm": 2.7494046688079834, |
|
"learning_rate": 0.0006252285191956124, |
|
"loss": 0.4935, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.4718792866941015, |
|
"grad_norm": 4.708520412445068, |
|
"learning_rate": 0.0006270566727605119, |
|
"loss": 1.0101, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.4732510288065844, |
|
"grad_norm": 2.6878857612609863, |
|
"learning_rate": 0.0006288848263254113, |
|
"loss": 0.4529, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.47462277091906724, |
|
"grad_norm": 2.571988105773926, |
|
"learning_rate": 0.0006307129798903109, |
|
"loss": 0.3516, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.47599451303155005, |
|
"grad_norm": 3.210439443588257, |
|
"learning_rate": 0.0006325411334552102, |
|
"loss": 0.4045, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.4773662551440329, |
|
"grad_norm": 4.047224521636963, |
|
"learning_rate": 0.0006343692870201097, |
|
"loss": 1.2326, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.4787379972565158, |
|
"grad_norm": 4.121635437011719, |
|
"learning_rate": 0.0006361974405850091, |
|
"loss": 0.8951, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.48010973936899864, |
|
"grad_norm": 3.480602741241455, |
|
"learning_rate": 0.0006380255941499086, |
|
"loss": 0.6783, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.48148148148148145, |
|
"grad_norm": 4.286612033843994, |
|
"learning_rate": 0.000639853747714808, |
|
"loss": 1.1821, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.4828532235939643, |
|
"grad_norm": 3.077362537384033, |
|
"learning_rate": 0.0006416819012797075, |
|
"loss": 0.5101, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.4842249657064472, |
|
"grad_norm": 3.127272844314575, |
|
"learning_rate": 0.000643510054844607, |
|
"loss": 0.844, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.48559670781893005, |
|
"grad_norm": 2.6955454349517822, |
|
"learning_rate": 0.0006453382084095064, |
|
"loss": 0.5413, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.4869684499314129, |
|
"grad_norm": 3.285903215408325, |
|
"learning_rate": 0.0006471663619744058, |
|
"loss": 0.9982, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.4883401920438957, |
|
"grad_norm": 3.6762568950653076, |
|
"learning_rate": 0.0006489945155393053, |
|
"loss": 0.937, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.4897119341563786, |
|
"grad_norm": 3.365633964538574, |
|
"learning_rate": 0.0006508226691042048, |
|
"loss": 0.7977, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.49108367626886146, |
|
"grad_norm": 3.7572262287139893, |
|
"learning_rate": 0.0006526508226691042, |
|
"loss": 0.8697, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.4924554183813443, |
|
"grad_norm": 3.309539794921875, |
|
"learning_rate": 0.0006544789762340037, |
|
"loss": 1.1136, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.49382716049382713, |
|
"grad_norm": 4.250339508056641, |
|
"learning_rate": 0.0006563071297989031, |
|
"loss": 1.3018, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.49519890260631, |
|
"grad_norm": 0.07554444670677185, |
|
"learning_rate": 0.0006581352833638026, |
|
"loss": 0.0023, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.49657064471879286, |
|
"grad_norm": 3.6443750858306885, |
|
"learning_rate": 0.0006599634369287019, |
|
"loss": 0.7019, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.49794238683127573, |
|
"grad_norm": 0.19442614912986755, |
|
"learning_rate": 0.0006617915904936015, |
|
"loss": 0.0481, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.4993141289437586, |
|
"grad_norm": 3.7148313522338867, |
|
"learning_rate": 0.0006636197440585009, |
|
"loss": 0.891, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.5006858710562414, |
|
"grad_norm": 2.7239511013031006, |
|
"learning_rate": 0.0006654478976234004, |
|
"loss": 0.6353, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.5020576131687243, |
|
"grad_norm": 2.5572762489318848, |
|
"learning_rate": 0.0006672760511882999, |
|
"loss": 0.5181, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.5034293552812071, |
|
"grad_norm": 3.162834405899048, |
|
"learning_rate": 0.0006691042047531993, |
|
"loss": 0.8311, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.50480109739369, |
|
"grad_norm": 0.06661587208509445, |
|
"learning_rate": 0.0006709323583180988, |
|
"loss": 0.0026, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.5061728395061729, |
|
"grad_norm": 2.4806196689605713, |
|
"learning_rate": 0.0006727605118829981, |
|
"loss": 0.5004, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.5075445816186557, |
|
"grad_norm": 2.5774953365325928, |
|
"learning_rate": 0.0006745886654478977, |
|
"loss": 0.4511, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.5089163237311386, |
|
"grad_norm": 2.2276206016540527, |
|
"learning_rate": 0.000676416819012797, |
|
"loss": 0.4558, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.5102880658436214, |
|
"grad_norm": 2.652674674987793, |
|
"learning_rate": 0.0006782449725776966, |
|
"loss": 0.5073, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.5116598079561042, |
|
"grad_norm": 2.2147669792175293, |
|
"learning_rate": 0.0006800731261425959, |
|
"loss": 0.3979, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.5130315500685871, |
|
"grad_norm": 1.5278068780899048, |
|
"learning_rate": 0.0006819012797074955, |
|
"loss": 0.1665, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.51440329218107, |
|
"grad_norm": 2.9153432846069336, |
|
"learning_rate": 0.0006837294332723948, |
|
"loss": 0.8231, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.5157750342935528, |
|
"grad_norm": 4.252976894378662, |
|
"learning_rate": 0.0006855575868372943, |
|
"loss": 1.0406, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.5171467764060357, |
|
"grad_norm": 3.768838405609131, |
|
"learning_rate": 0.0006873857404021939, |
|
"loss": 0.725, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.5185185185185185, |
|
"grad_norm": 1.8634569644927979, |
|
"learning_rate": 0.0006892138939670932, |
|
"loss": 0.2603, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.5198902606310014, |
|
"grad_norm": 5.095807075500488, |
|
"learning_rate": 0.0006910420475319928, |
|
"loss": 1.7357, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.5212620027434842, |
|
"grad_norm": 2.8866710662841797, |
|
"learning_rate": 0.0006928702010968921, |
|
"loss": 0.9147, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.522633744855967, |
|
"grad_norm": 2.369819402694702, |
|
"learning_rate": 0.0006946983546617917, |
|
"loss": 0.4277, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.52400548696845, |
|
"grad_norm": 1.7865092754364014, |
|
"learning_rate": 0.000696526508226691, |
|
"loss": 0.4788, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.5253772290809328, |
|
"grad_norm": 3.0729360580444336, |
|
"learning_rate": 0.0006983546617915905, |
|
"loss": 0.7666, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.5267489711934157, |
|
"grad_norm": 3.023810386657715, |
|
"learning_rate": 0.0007001828153564899, |
|
"loss": 0.7728, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.5281207133058985, |
|
"grad_norm": 2.5811986923217773, |
|
"learning_rate": 0.0007020109689213894, |
|
"loss": 0.4744, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.5294924554183813, |
|
"grad_norm": 2.748720407485962, |
|
"learning_rate": 0.0007038391224862888, |
|
"loss": 1.1014, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.5308641975308642, |
|
"grad_norm": 3.950869560241699, |
|
"learning_rate": 0.0007056672760511883, |
|
"loss": 1.5588, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.532235939643347, |
|
"grad_norm": 1.595503330230713, |
|
"learning_rate": 0.0007074954296160879, |
|
"loss": 0.3185, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.53360768175583, |
|
"grad_norm": 3.505636692047119, |
|
"learning_rate": 0.0007093235831809872, |
|
"loss": 1.3348, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.5349794238683128, |
|
"grad_norm": 1.0456945896148682, |
|
"learning_rate": 0.0007111517367458867, |
|
"loss": 0.1656, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.5363511659807956, |
|
"grad_norm": 2.747938871383667, |
|
"learning_rate": 0.0007129798903107861, |
|
"loss": 0.9375, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.5377229080932785, |
|
"grad_norm": 4.741049289703369, |
|
"learning_rate": 0.0007148080438756856, |
|
"loss": 1.4665, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.5390946502057613, |
|
"grad_norm": 4.830301284790039, |
|
"learning_rate": 0.000716636197440585, |
|
"loss": 1.4635, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.5404663923182441, |
|
"grad_norm": 2.938199758529663, |
|
"learning_rate": 0.0007184643510054845, |
|
"loss": 0.8677, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.541838134430727, |
|
"grad_norm": 6.291453838348389, |
|
"learning_rate": 0.0007202925045703839, |
|
"loss": 3.033, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.5432098765432098, |
|
"grad_norm": 3.0533947944641113, |
|
"learning_rate": 0.0007221206581352834, |
|
"loss": 1.4375, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.5445816186556928, |
|
"grad_norm": 3.13800048828125, |
|
"learning_rate": 0.0007239488117001827, |
|
"loss": 0.9762, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.5459533607681756, |
|
"grad_norm": 1.1184616088867188, |
|
"learning_rate": 0.0007257769652650823, |
|
"loss": 0.1333, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.5473251028806584, |
|
"grad_norm": 3.610217571258545, |
|
"learning_rate": 0.0007276051188299818, |
|
"loss": 1.1823, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.5486968449931413, |
|
"grad_norm": 3.6696395874023438, |
|
"learning_rate": 0.0007294332723948812, |
|
"loss": 1.2443, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.5500685871056241, |
|
"grad_norm": 2.967648506164551, |
|
"learning_rate": 0.0007312614259597807, |
|
"loss": 0.662, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.551440329218107, |
|
"grad_norm": 0.8750410079956055, |
|
"learning_rate": 0.0007330895795246801, |
|
"loss": 0.0709, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.5528120713305898, |
|
"grad_norm": 3.0801315307617188, |
|
"learning_rate": 0.0007349177330895796, |
|
"loss": 0.5822, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.5541838134430727, |
|
"grad_norm": 3.701993227005005, |
|
"learning_rate": 0.0007367458866544789, |
|
"loss": 1.0826, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.5555555555555556, |
|
"grad_norm": 3.439502716064453, |
|
"learning_rate": 0.0007385740402193785, |
|
"loss": 0.7953, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.5569272976680384, |
|
"grad_norm": 4.8917341232299805, |
|
"learning_rate": 0.0007404021937842778, |
|
"loss": 1.6109, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.5582990397805213, |
|
"grad_norm": 3.793834924697876, |
|
"learning_rate": 0.0007422303473491774, |
|
"loss": 1.2505, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.5596707818930041, |
|
"grad_norm": 3.0604796409606934, |
|
"learning_rate": 0.0007440585009140767, |
|
"loss": 1.0019, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.5610425240054869, |
|
"grad_norm": 2.872400999069214, |
|
"learning_rate": 0.0007458866544789763, |
|
"loss": 0.8224, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.5624142661179699, |
|
"grad_norm": 2.631157398223877, |
|
"learning_rate": 0.0007477148080438758, |
|
"loss": 0.6592, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.5637860082304527, |
|
"grad_norm": 2.843379020690918, |
|
"learning_rate": 0.0007495429616087751, |
|
"loss": 0.7099, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.5651577503429356, |
|
"grad_norm": 2.3366591930389404, |
|
"learning_rate": 0.0007513711151736747, |
|
"loss": 0.5484, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.5665294924554184, |
|
"grad_norm": 3.4202780723571777, |
|
"learning_rate": 0.000753199268738574, |
|
"loss": 1.3019, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.5679012345679012, |
|
"grad_norm": 0.9125491976737976, |
|
"learning_rate": 0.0007550274223034736, |
|
"loss": 0.1266, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.5692729766803841, |
|
"grad_norm": 2.8945682048797607, |
|
"learning_rate": 0.0007568555758683729, |
|
"loss": 0.8932, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.5706447187928669, |
|
"grad_norm": 2.898399591445923, |
|
"learning_rate": 0.0007586837294332725, |
|
"loss": 0.7547, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.5720164609053497, |
|
"grad_norm": 2.9406332969665527, |
|
"learning_rate": 0.0007605118829981718, |
|
"loss": 0.7614, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.5733882030178327, |
|
"grad_norm": 2.1753182411193848, |
|
"learning_rate": 0.0007623400365630713, |
|
"loss": 0.7364, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.5747599451303155, |
|
"grad_norm": 2.5140652656555176, |
|
"learning_rate": 0.0007641681901279707, |
|
"loss": 0.6539, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.5761316872427984, |
|
"grad_norm": 2.0174620151519775, |
|
"learning_rate": 0.0007659963436928702, |
|
"loss": 0.3848, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.5775034293552812, |
|
"grad_norm": 0.11105114966630936, |
|
"learning_rate": 0.0007678244972577697, |
|
"loss": 0.0394, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.578875171467764, |
|
"grad_norm": 1.8194284439086914, |
|
"learning_rate": 0.0007696526508226691, |
|
"loss": 0.4623, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.5802469135802469, |
|
"grad_norm": 0.7781994342803955, |
|
"learning_rate": 0.0007714808043875687, |
|
"loss": 0.0783, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.5816186556927297, |
|
"grad_norm": 3.7422642707824707, |
|
"learning_rate": 0.000773308957952468, |
|
"loss": 1.4366, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.5829903978052127, |
|
"grad_norm": 3.9761717319488525, |
|
"learning_rate": 0.0007751371115173675, |
|
"loss": 1.3568, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.5843621399176955, |
|
"grad_norm": 2.948404550552368, |
|
"learning_rate": 0.0007769652650822669, |
|
"loss": 0.9065, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.5857338820301783, |
|
"grad_norm": 2.7700140476226807, |
|
"learning_rate": 0.0007787934186471664, |
|
"loss": 0.6723, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.5871056241426612, |
|
"grad_norm": 2.8618569374084473, |
|
"learning_rate": 0.0007806215722120658, |
|
"loss": 0.7596, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.588477366255144, |
|
"grad_norm": 0.956656813621521, |
|
"learning_rate": 0.0007824497257769653, |
|
"loss": 0.1426, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.5898491083676269, |
|
"grad_norm": 2.8430604934692383, |
|
"learning_rate": 0.0007842778793418648, |
|
"loss": 1.3264, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.5912208504801097, |
|
"grad_norm": 0.11261007934808731, |
|
"learning_rate": 0.0007861060329067642, |
|
"loss": 0.0442, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.5925925925925926, |
|
"grad_norm": 2.7201974391937256, |
|
"learning_rate": 0.0007879341864716636, |
|
"loss": 0.6046, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.5939643347050755, |
|
"grad_norm": 2.5923287868499756, |
|
"learning_rate": 0.0007897623400365631, |
|
"loss": 0.6173, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.5953360768175583, |
|
"grad_norm": 4.77182674407959, |
|
"learning_rate": 0.0007915904936014625, |
|
"loss": 2.8892, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.5967078189300411, |
|
"grad_norm": 3.1731245517730713, |
|
"learning_rate": 0.000793418647166362, |
|
"loss": 1.3149, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.598079561042524, |
|
"grad_norm": 2.849473237991333, |
|
"learning_rate": 0.0007952468007312615, |
|
"loss": 1.1886, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.5994513031550068, |
|
"grad_norm": 3.5986573696136475, |
|
"learning_rate": 0.0007970749542961609, |
|
"loss": 1.2799, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.6008230452674898, |
|
"grad_norm": 2.668875217437744, |
|
"learning_rate": 0.0007989031078610604, |
|
"loss": 0.7527, |
|
"step": 438 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 2187, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 219, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 0.0, |
|
"train_batch_size": 128, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|