|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 1863, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0005367686527106817, |
|
"grad_norm": 24.532250817459463, |
|
"learning_rate": 5.3475935828877005e-08, |
|
"loss": 1.3138, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0026838432635534087, |
|
"grad_norm": 22.479300081612294, |
|
"learning_rate": 2.6737967914438503e-07, |
|
"loss": 1.3002, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.005367686527106817, |
|
"grad_norm": 15.54111760887706, |
|
"learning_rate": 5.347593582887701e-07, |
|
"loss": 1.2704, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.008051529790660225, |
|
"grad_norm": 12.534742860225304, |
|
"learning_rate": 8.021390374331551e-07, |
|
"loss": 1.1283, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.010735373054213635, |
|
"grad_norm": 9.147671503937955, |
|
"learning_rate": 1.0695187165775401e-06, |
|
"loss": 1.0341, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.013419216317767043, |
|
"grad_norm": 3.433701365796985, |
|
"learning_rate": 1.3368983957219254e-06, |
|
"loss": 0.9283, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.01610305958132045, |
|
"grad_norm": 3.4271738875345377, |
|
"learning_rate": 1.6042780748663103e-06, |
|
"loss": 0.8924, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01878690284487386, |
|
"grad_norm": 2.8975789735077973, |
|
"learning_rate": 1.8716577540106954e-06, |
|
"loss": 0.8584, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.02147074610842727, |
|
"grad_norm": 2.8729414451167226, |
|
"learning_rate": 2.1390374331550802e-06, |
|
"loss": 0.8417, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.024154589371980676, |
|
"grad_norm": 2.827429910087497, |
|
"learning_rate": 2.4064171122994653e-06, |
|
"loss": 0.8229, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.026838432635534086, |
|
"grad_norm": 2.8959001486827574, |
|
"learning_rate": 2.673796791443851e-06, |
|
"loss": 0.8203, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.029522275899087493, |
|
"grad_norm": 3.0595595781040337, |
|
"learning_rate": 2.9411764705882355e-06, |
|
"loss": 0.8131, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.0322061191626409, |
|
"grad_norm": 3.0267540777499375, |
|
"learning_rate": 3.2085561497326205e-06, |
|
"loss": 0.8006, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03488996242619431, |
|
"grad_norm": 3.0813472677719207, |
|
"learning_rate": 3.4759358288770056e-06, |
|
"loss": 0.7948, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.03757380568974772, |
|
"grad_norm": 3.0313107960276615, |
|
"learning_rate": 3.7433155080213907e-06, |
|
"loss": 0.7748, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.040257648953301126, |
|
"grad_norm": 3.0434680433124246, |
|
"learning_rate": 4.010695187165775e-06, |
|
"loss": 0.7711, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.04294149221685454, |
|
"grad_norm": 2.9437229789589527, |
|
"learning_rate": 4.2780748663101604e-06, |
|
"loss": 0.7757, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.045625335480407946, |
|
"grad_norm": 3.186097917871563, |
|
"learning_rate": 4.5454545454545455e-06, |
|
"loss": 0.7595, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.04830917874396135, |
|
"grad_norm": 3.001408491927411, |
|
"learning_rate": 4.812834224598931e-06, |
|
"loss": 0.7398, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.05099302200751476, |
|
"grad_norm": 3.255982858352429, |
|
"learning_rate": 5.0802139037433165e-06, |
|
"loss": 0.7472, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.05367686527106817, |
|
"grad_norm": 3.02587969572454, |
|
"learning_rate": 5.347593582887702e-06, |
|
"loss": 0.7477, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.05636070853462158, |
|
"grad_norm": 2.9034466316514718, |
|
"learning_rate": 5.614973262032086e-06, |
|
"loss": 0.7328, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.059044551798174985, |
|
"grad_norm": 2.963969783004761, |
|
"learning_rate": 5.882352941176471e-06, |
|
"loss": 0.7306, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.06172839506172839, |
|
"grad_norm": 3.011826081077009, |
|
"learning_rate": 6.149732620320856e-06, |
|
"loss": 0.7386, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.0644122383252818, |
|
"grad_norm": 3.0450295583090754, |
|
"learning_rate": 6.417112299465241e-06, |
|
"loss": 0.7278, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.06709608158883522, |
|
"grad_norm": 2.8442223467397247, |
|
"learning_rate": 6.684491978609626e-06, |
|
"loss": 0.7188, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.06977992485238863, |
|
"grad_norm": 2.896755919332201, |
|
"learning_rate": 6.951871657754011e-06, |
|
"loss": 0.7112, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.07246376811594203, |
|
"grad_norm": 2.9785923822216636, |
|
"learning_rate": 7.219251336898396e-06, |
|
"loss": 0.7006, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.07514761137949544, |
|
"grad_norm": 2.8639433452496337, |
|
"learning_rate": 7.486631016042781e-06, |
|
"loss": 0.7012, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.07783145464304884, |
|
"grad_norm": 2.9748426164928032, |
|
"learning_rate": 7.754010695187166e-06, |
|
"loss": 0.6996, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.08051529790660225, |
|
"grad_norm": 2.8259720368976606, |
|
"learning_rate": 8.02139037433155e-06, |
|
"loss": 0.7063, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.08319914117015566, |
|
"grad_norm": 2.988719545239136, |
|
"learning_rate": 8.288770053475937e-06, |
|
"loss": 0.7006, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.08588298443370908, |
|
"grad_norm": 2.920246855352462, |
|
"learning_rate": 8.556149732620321e-06, |
|
"loss": 0.7103, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.08856682769726248, |
|
"grad_norm": 3.0463191656329185, |
|
"learning_rate": 8.823529411764707e-06, |
|
"loss": 0.6985, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.09125067096081589, |
|
"grad_norm": 2.6865694036401706, |
|
"learning_rate": 9.090909090909091e-06, |
|
"loss": 0.6975, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.0939345142243693, |
|
"grad_norm": 2.626512525739788, |
|
"learning_rate": 9.358288770053477e-06, |
|
"loss": 0.7027, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.0966183574879227, |
|
"grad_norm": 2.8908193689229704, |
|
"learning_rate": 9.625668449197861e-06, |
|
"loss": 0.7071, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.09930220075147611, |
|
"grad_norm": 3.005600658965135, |
|
"learning_rate": 9.893048128342247e-06, |
|
"loss": 0.6974, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.10198604401502952, |
|
"grad_norm": 2.6820135855233427, |
|
"learning_rate": 9.999920944317846e-06, |
|
"loss": 0.7078, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.10466988727858294, |
|
"grad_norm": 2.8217337120554595, |
|
"learning_rate": 9.99943783531341e-06, |
|
"loss": 0.6827, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.10735373054213634, |
|
"grad_norm": 2.6986357719933087, |
|
"learning_rate": 9.998515579512446e-06, |
|
"loss": 0.7012, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.11003757380568975, |
|
"grad_norm": 2.5840789947652096, |
|
"learning_rate": 9.997154257925199e-06, |
|
"loss": 0.6933, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.11272141706924316, |
|
"grad_norm": 2.6923419523116348, |
|
"learning_rate": 9.995353990129115e-06, |
|
"loss": 0.6877, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.11540526033279656, |
|
"grad_norm": 2.6327095351046244, |
|
"learning_rate": 9.99311493425834e-06, |
|
"loss": 0.6913, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.11808910359634997, |
|
"grad_norm": 2.7716420958204337, |
|
"learning_rate": 9.99043728698983e-06, |
|
"loss": 0.6777, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.12077294685990338, |
|
"grad_norm": 2.5671683281410216, |
|
"learning_rate": 9.987321283526072e-06, |
|
"loss": 0.678, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.12345679012345678, |
|
"grad_norm": 2.6849722010316626, |
|
"learning_rate": 9.983767197574432e-06, |
|
"loss": 0.6908, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.1261406333870102, |
|
"grad_norm": 2.5611382815074015, |
|
"learning_rate": 9.979775341323097e-06, |
|
"loss": 0.6792, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.1288244766505636, |
|
"grad_norm": 2.5649832974368474, |
|
"learning_rate": 9.975346065413673e-06, |
|
"loss": 0.6856, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.13150831991411702, |
|
"grad_norm": 2.408926177053808, |
|
"learning_rate": 9.970479758910365e-06, |
|
"loss": 0.6904, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.13419216317767044, |
|
"grad_norm": 2.6082645630171726, |
|
"learning_rate": 9.965176849265814e-06, |
|
"loss": 0.6873, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.13687600644122383, |
|
"grad_norm": 2.5439914668381114, |
|
"learning_rate": 9.959437802283552e-06, |
|
"loss": 0.6773, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.13955984970477725, |
|
"grad_norm": 2.490950435247341, |
|
"learning_rate": 9.953263122077077e-06, |
|
"loss": 0.6755, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.14224369296833064, |
|
"grad_norm": 2.5615157225803595, |
|
"learning_rate": 9.946653351025575e-06, |
|
"loss": 0.6675, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.14492753623188406, |
|
"grad_norm": 2.5970332229833426, |
|
"learning_rate": 9.939609069726279e-06, |
|
"loss": 0.6727, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.14761137949543746, |
|
"grad_norm": 2.698178435782262, |
|
"learning_rate": 9.932130896943477e-06, |
|
"loss": 0.6651, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.15029522275899088, |
|
"grad_norm": 2.6376811569878114, |
|
"learning_rate": 9.924219489554145e-06, |
|
"loss": 0.6627, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.1529790660225443, |
|
"grad_norm": 2.597622100810795, |
|
"learning_rate": 9.915875542490257e-06, |
|
"loss": 0.6853, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.1556629092860977, |
|
"grad_norm": 2.6511726087211174, |
|
"learning_rate": 9.907099788677745e-06, |
|
"loss": 0.6701, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.1583467525496511, |
|
"grad_norm": 2.6406315274257337, |
|
"learning_rate": 9.897892998972113e-06, |
|
"loss": 0.6693, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.1610305958132045, |
|
"grad_norm": 2.4213035415558966, |
|
"learning_rate": 9.888255982090728e-06, |
|
"loss": 0.6602, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.16371443907675792, |
|
"grad_norm": 3.4846721277150046, |
|
"learning_rate": 9.878189584541783e-06, |
|
"loss": 0.6608, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.16639828234031132, |
|
"grad_norm": 7.014634191079299, |
|
"learning_rate": 9.867694690549943e-06, |
|
"loss": 0.6653, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.16908212560386474, |
|
"grad_norm": 2.932139360044067, |
|
"learning_rate": 9.85677222197867e-06, |
|
"loss": 0.6657, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.17176596886741816, |
|
"grad_norm": 2.3975958892517566, |
|
"learning_rate": 9.845423138249254e-06, |
|
"loss": 0.6529, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.17444981213097155, |
|
"grad_norm": 2.5923171187304477, |
|
"learning_rate": 9.833648436256525e-06, |
|
"loss": 0.6618, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.17713365539452497, |
|
"grad_norm": 2.4438855676344433, |
|
"learning_rate": 9.821449150281308e-06, |
|
"loss": 0.6483, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.17981749865807836, |
|
"grad_norm": 2.519109888602999, |
|
"learning_rate": 9.808826351899551e-06, |
|
"loss": 0.6515, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.18250134192163178, |
|
"grad_norm": 2.540504649667965, |
|
"learning_rate": 9.795781149888216e-06, |
|
"loss": 0.6629, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.18518518518518517, |
|
"grad_norm": 2.4014123234116, |
|
"learning_rate": 9.782314690127867e-06, |
|
"loss": 0.6631, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.1878690284487386, |
|
"grad_norm": 2.4262806054618484, |
|
"learning_rate": 9.768428155502038e-06, |
|
"loss": 0.6583, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.19055287171229202, |
|
"grad_norm": 2.401370354238805, |
|
"learning_rate": 9.754122765793306e-06, |
|
"loss": 0.6443, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.1932367149758454, |
|
"grad_norm": 2.569449085939293, |
|
"learning_rate": 9.739399777576169e-06, |
|
"loss": 0.6375, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.19592055823939883, |
|
"grad_norm": 2.631241782716603, |
|
"learning_rate": 9.72426048410665e-06, |
|
"loss": 0.6233, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.19860440150295222, |
|
"grad_norm": 2.426731921614948, |
|
"learning_rate": 9.70870621520871e-06, |
|
"loss": 0.622, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.20128824476650564, |
|
"grad_norm": 2.672676505801346, |
|
"learning_rate": 9.692738337157441e-06, |
|
"loss": 0.6304, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.20397208803005903, |
|
"grad_norm": 2.3546077000692054, |
|
"learning_rate": 9.676358252559034e-06, |
|
"loss": 0.6471, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.20665593129361245, |
|
"grad_norm": 2.603636786602239, |
|
"learning_rate": 9.6595674002276e-06, |
|
"loss": 0.6518, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.20933977455716588, |
|
"grad_norm": 2.6654147172003766, |
|
"learning_rate": 9.642367255058767e-06, |
|
"loss": 0.6432, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.21202361782071927, |
|
"grad_norm": 2.7927745223130342, |
|
"learning_rate": 9.624759327900131e-06, |
|
"loss": 0.6221, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.2147074610842727, |
|
"grad_norm": 2.322677945570656, |
|
"learning_rate": 9.606745165418554e-06, |
|
"loss": 0.6236, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.21739130434782608, |
|
"grad_norm": 2.606115769256056, |
|
"learning_rate": 9.58832634996429e-06, |
|
"loss": 0.6288, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.2200751476113795, |
|
"grad_norm": 2.4723541437661414, |
|
"learning_rate": 9.569504499432005e-06, |
|
"loss": 0.608, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.2227589908749329, |
|
"grad_norm": 2.290568268708195, |
|
"learning_rate": 9.550281267118659e-06, |
|
"loss": 0.6087, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.22544283413848631, |
|
"grad_norm": 2.351420152069522, |
|
"learning_rate": 9.530658341578276e-06, |
|
"loss": 0.6138, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.2281266774020397, |
|
"grad_norm": 2.4088354796246016, |
|
"learning_rate": 9.510637446473633e-06, |
|
"loss": 0.6163, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.23081052066559313, |
|
"grad_norm": 2.439124497002319, |
|
"learning_rate": 9.490220340424844e-06, |
|
"loss": 0.6149, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.23349436392914655, |
|
"grad_norm": 2.3651788352560428, |
|
"learning_rate": 9.469408816854898e-06, |
|
"loss": 0.6185, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.23617820719269994, |
|
"grad_norm": 2.618918214991491, |
|
"learning_rate": 9.448204703832102e-06, |
|
"loss": 0.611, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.23886205045625336, |
|
"grad_norm": 2.48742969484966, |
|
"learning_rate": 9.426609863909537e-06, |
|
"loss": 0.6219, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.24154589371980675, |
|
"grad_norm": 2.5469891537442613, |
|
"learning_rate": 9.404626193961427e-06, |
|
"loss": 0.6179, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.24422973698336017, |
|
"grad_norm": 2.3741171814621853, |
|
"learning_rate": 9.382255625016527e-06, |
|
"loss": 0.6106, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.24691358024691357, |
|
"grad_norm": 2.3804430378530115, |
|
"learning_rate": 9.359500122088511e-06, |
|
"loss": 0.6153, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.249597423510467, |
|
"grad_norm": 2.3284131550985396, |
|
"learning_rate": 9.336361684003353e-06, |
|
"loss": 0.6093, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.2522812667740204, |
|
"grad_norm": 2.456024811325016, |
|
"learning_rate": 9.312842343223764e-06, |
|
"loss": 0.6169, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.2549651100375738, |
|
"grad_norm": 2.4687514469686693, |
|
"learning_rate": 9.288944165670651e-06, |
|
"loss": 0.6143, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.2576489533011272, |
|
"grad_norm": 2.390708189479994, |
|
"learning_rate": 9.264669250541658e-06, |
|
"loss": 0.5868, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.2603327965646806, |
|
"grad_norm": 2.3542679707474217, |
|
"learning_rate": 9.240019730126764e-06, |
|
"loss": 0.6135, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.26301663982823403, |
|
"grad_norm": 2.3093208133811634, |
|
"learning_rate": 9.214997769620998e-06, |
|
"loss": 0.5951, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.26570048309178745, |
|
"grad_norm": 2.3453177239592233, |
|
"learning_rate": 9.189605566934235e-06, |
|
"loss": 0.6055, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.2683843263553409, |
|
"grad_norm": 2.68049017575752, |
|
"learning_rate": 9.163845352498141e-06, |
|
"loss": 0.6012, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.27106816961889424, |
|
"grad_norm": 2.410046138698886, |
|
"learning_rate": 9.137719389070259e-06, |
|
"loss": 0.6054, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.27375201288244766, |
|
"grad_norm": 2.681286439839222, |
|
"learning_rate": 9.111229971535231e-06, |
|
"loss": 0.603, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.2764358561460011, |
|
"grad_norm": 2.558308980840885, |
|
"learning_rate": 9.084379426703245e-06, |
|
"loss": 0.602, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.2791196994095545, |
|
"grad_norm": 2.4105884527460573, |
|
"learning_rate": 9.057170113105622e-06, |
|
"loss": 0.5946, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.28180354267310787, |
|
"grad_norm": 2.2742333866866855, |
|
"learning_rate": 9.029604420787666e-06, |
|
"loss": 0.5811, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.2844873859366613, |
|
"grad_norm": 2.519471188975317, |
|
"learning_rate": 9.001684771098709e-06, |
|
"loss": 0.5868, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.2871712292002147, |
|
"grad_norm": 2.6490393180845015, |
|
"learning_rate": 8.973413616479429e-06, |
|
"loss": 0.5917, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.2898550724637681, |
|
"grad_norm": 2.383951019415346, |
|
"learning_rate": 8.944793440246435e-06, |
|
"loss": 0.5888, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.29253891572732155, |
|
"grad_norm": 2.4461042507556803, |
|
"learning_rate": 8.915826756374118e-06, |
|
"loss": 0.5782, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.2952227589908749, |
|
"grad_norm": 2.4291911892877622, |
|
"learning_rate": 8.88651610927384e-06, |
|
"loss": 0.5678, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.29790660225442833, |
|
"grad_norm": 2.4530217266247663, |
|
"learning_rate": 8.856864073570429e-06, |
|
"loss": 0.5659, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.30059044551798175, |
|
"grad_norm": 2.4328222140868827, |
|
"learning_rate": 8.82687325387603e-06, |
|
"loss": 0.578, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.3032742887815352, |
|
"grad_norm": 2.4319552814059437, |
|
"learning_rate": 8.796546284561307e-06, |
|
"loss": 0.5804, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.3059581320450886, |
|
"grad_norm": 2.657187133317878, |
|
"learning_rate": 8.765885829524059e-06, |
|
"loss": 0.5789, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.30864197530864196, |
|
"grad_norm": 2.2697836625371663, |
|
"learning_rate": 8.734894581955208e-06, |
|
"loss": 0.573, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.3113258185721954, |
|
"grad_norm": 2.401887770020238, |
|
"learning_rate": 8.703575264102245e-06, |
|
"loss": 0.5776, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.3140096618357488, |
|
"grad_norm": 2.2792894044568586, |
|
"learning_rate": 8.6719306270301e-06, |
|
"loss": 0.5589, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.3166935050993022, |
|
"grad_norm": 2.5255443105698134, |
|
"learning_rate": 8.639963450379494e-06, |
|
"loss": 0.566, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.3193773483628556, |
|
"grad_norm": 2.8143947788616295, |
|
"learning_rate": 8.607676542122782e-06, |
|
"loss": 0.5662, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.322061191626409, |
|
"grad_norm": 2.3227435317108434, |
|
"learning_rate": 8.575072738317297e-06, |
|
"loss": 0.5813, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.3247450348899624, |
|
"grad_norm": 2.5213044273313896, |
|
"learning_rate": 8.542154902856232e-06, |
|
"loss": 0.586, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.32742887815351585, |
|
"grad_norm": 2.3633935742818912, |
|
"learning_rate": 8.508925927217083e-06, |
|
"loss": 0.5557, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.33011272141706927, |
|
"grad_norm": 2.336769550705686, |
|
"learning_rate": 8.475388730207662e-06, |
|
"loss": 0.5596, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.33279656468062263, |
|
"grad_norm": 2.4227646867382235, |
|
"learning_rate": 8.441546257709708e-06, |
|
"loss": 0.5709, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.33548040794417605, |
|
"grad_norm": 2.53539797666282, |
|
"learning_rate": 8.407401482420129e-06, |
|
"loss": 0.5546, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.33816425120772947, |
|
"grad_norm": 2.6247283116786355, |
|
"learning_rate": 8.372957403589873e-06, |
|
"loss": 0.5689, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.3408480944712829, |
|
"grad_norm": 2.5918886646394323, |
|
"learning_rate": 8.33821704676049e-06, |
|
"loss": 0.555, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.3435319377348363, |
|
"grad_norm": 2.341174391808383, |
|
"learning_rate": 8.303183463498357e-06, |
|
"loss": 0.5622, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.3462157809983897, |
|
"grad_norm": 2.3492859021408936, |
|
"learning_rate": 8.26785973112664e-06, |
|
"loss": 0.5422, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.3488996242619431, |
|
"grad_norm": 2.4364746430045767, |
|
"learning_rate": 8.232248952454978e-06, |
|
"loss": 0.5444, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.3515834675254965, |
|
"grad_norm": 2.3799694997859584, |
|
"learning_rate": 8.196354255506937e-06, |
|
"loss": 0.559, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.35426731078904994, |
|
"grad_norm": 2.3548609807079317, |
|
"learning_rate": 8.160178793245254e-06, |
|
"loss": 0.5636, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.3569511540526033, |
|
"grad_norm": 2.351750584868109, |
|
"learning_rate": 8.12372574329487e-06, |
|
"loss": 0.5429, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.3596349973161567, |
|
"grad_norm": 2.3597264710105854, |
|
"learning_rate": 8.086998307663815e-06, |
|
"loss": 0.5379, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.36231884057971014, |
|
"grad_norm": 2.3278172683561005, |
|
"learning_rate": 8.049999712461956e-06, |
|
"loss": 0.5458, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.36500268384326356, |
|
"grad_norm": 2.4331488317138863, |
|
"learning_rate": 8.012733207617602e-06, |
|
"loss": 0.5513, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.367686527106817, |
|
"grad_norm": 2.399924321390654, |
|
"learning_rate": 7.975202066592038e-06, |
|
"loss": 0.5236, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.37037037037037035, |
|
"grad_norm": 2.492472394720929, |
|
"learning_rate": 7.937409586091996e-06, |
|
"loss": 0.5315, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.37305421363392377, |
|
"grad_norm": 2.335660439190083, |
|
"learning_rate": 7.899359085780062e-06, |
|
"loss": 0.5419, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.3757380568974772, |
|
"grad_norm": 2.365824065630226, |
|
"learning_rate": 7.86105390798309e-06, |
|
"loss": 0.5326, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.3784219001610306, |
|
"grad_norm": 2.5349179902891463, |
|
"learning_rate": 7.822497417398607e-06, |
|
"loss": 0.5299, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.38110574342458403, |
|
"grad_norm": 2.2732071597123866, |
|
"learning_rate": 7.783693000799263e-06, |
|
"loss": 0.5395, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.3837895866881374, |
|
"grad_norm": 2.5477511751272677, |
|
"learning_rate": 7.744644066735335e-06, |
|
"loss": 0.5264, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.3864734299516908, |
|
"grad_norm": 2.3110188045053803, |
|
"learning_rate": 7.705354045235335e-06, |
|
"loss": 0.515, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.38915727321524424, |
|
"grad_norm": 2.357694411169259, |
|
"learning_rate": 7.665826387504708e-06, |
|
"loss": 0.5228, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.39184111647879766, |
|
"grad_norm": 2.333837584901147, |
|
"learning_rate": 7.626064565622684e-06, |
|
"loss": 0.5354, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.394524959742351, |
|
"grad_norm": 2.3812550691442205, |
|
"learning_rate": 7.586072072237291e-06, |
|
"loss": 0.5262, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.39720880300590444, |
|
"grad_norm": 2.414868601462948, |
|
"learning_rate": 7.545852420258566e-06, |
|
"loss": 0.5267, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.39989264626945786, |
|
"grad_norm": 2.9293269013478493, |
|
"learning_rate": 7.505409142549987e-06, |
|
"loss": 0.5141, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.4025764895330113, |
|
"grad_norm": 2.330251706558549, |
|
"learning_rate": 7.464745791618138e-06, |
|
"loss": 0.5071, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.4052603327965647, |
|
"grad_norm": 2.3362656238807937, |
|
"learning_rate": 7.423865939300674e-06, |
|
"loss": 0.5017, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.40794417606011807, |
|
"grad_norm": 2.4001763619738523, |
|
"learning_rate": 7.382773176452561e-06, |
|
"loss": 0.5349, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.4106280193236715, |
|
"grad_norm": 2.3709879728169185, |
|
"learning_rate": 7.341471112630667e-06, |
|
"loss": 0.5326, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.4133118625872249, |
|
"grad_norm": 5.503655512197041, |
|
"learning_rate": 7.2999633757766956e-06, |
|
"loss": 0.5163, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.41599570585077833, |
|
"grad_norm": 2.3922360788223846, |
|
"learning_rate": 7.258253611898509e-06, |
|
"loss": 0.5186, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.41867954911433175, |
|
"grad_norm": 2.3251447839049275, |
|
"learning_rate": 7.216345484749876e-06, |
|
"loss": 0.5079, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.4213633923778851, |
|
"grad_norm": 2.2951407025338013, |
|
"learning_rate": 7.174242675508636e-06, |
|
"loss": 0.5068, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.42404723564143854, |
|
"grad_norm": 2.6480277827202694, |
|
"learning_rate": 7.131948882453361e-06, |
|
"loss": 0.4885, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.42673107890499196, |
|
"grad_norm": 2.2951285614754737, |
|
"learning_rate": 7.089467820638491e-06, |
|
"loss": 0.4987, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.4294149221685454, |
|
"grad_norm": 2.3181424069661545, |
|
"learning_rate": 7.0468032215680116e-06, |
|
"loss": 0.5221, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.43209876543209874, |
|
"grad_norm": 2.2684038771454045, |
|
"learning_rate": 7.003958832867681e-06, |
|
"loss": 0.5242, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.43478260869565216, |
|
"grad_norm": 2.676404966090793, |
|
"learning_rate": 6.960938417955841e-06, |
|
"loss": 0.5069, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.4374664519592056, |
|
"grad_norm": 2.349894917490143, |
|
"learning_rate": 6.917745755712839e-06, |
|
"loss": 0.5004, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.440150295222759, |
|
"grad_norm": 2.3871699426635598, |
|
"learning_rate": 6.874384640149101e-06, |
|
"loss": 0.5077, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.4428341384863124, |
|
"grad_norm": 2.3369644314777593, |
|
"learning_rate": 6.830858880071859e-06, |
|
"loss": 0.4891, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.4455179817498658, |
|
"grad_norm": 2.336086582871217, |
|
"learning_rate": 6.7871722987505975e-06, |
|
"loss": 0.5015, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.4482018250134192, |
|
"grad_norm": 2.4367965667465836, |
|
"learning_rate": 6.743328733581211e-06, |
|
"loss": 0.4997, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.45088566827697263, |
|
"grad_norm": 2.531976248724946, |
|
"learning_rate": 6.6993320357489396e-06, |
|
"loss": 0.4955, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.45356951154052605, |
|
"grad_norm": 2.2364342225876688, |
|
"learning_rate": 6.6551860698900804e-06, |
|
"loss": 0.487, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.4562533548040794, |
|
"grad_norm": 2.3568280656289313, |
|
"learning_rate": 6.610894713752513e-06, |
|
"loss": 0.4979, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.45893719806763283, |
|
"grad_norm": 2.3652469723056293, |
|
"learning_rate": 6.566461857855096e-06, |
|
"loss": 0.4814, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.46162104133118625, |
|
"grad_norm": 2.472649298194179, |
|
"learning_rate": 6.52189140514591e-06, |
|
"loss": 0.4911, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.4643048845947397, |
|
"grad_norm": 2.29208612124955, |
|
"learning_rate": 6.477187270659444e-06, |
|
"loss": 0.4857, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.4669887278582931, |
|
"grad_norm": 2.3253472470844354, |
|
"learning_rate": 6.432353381172686e-06, |
|
"loss": 0.4846, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.46967257112184646, |
|
"grad_norm": 2.5172836620382135, |
|
"learning_rate": 6.387393674860205e-06, |
|
"loss": 0.4818, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.4723564143853999, |
|
"grad_norm": 2.351389915294756, |
|
"learning_rate": 6.3423121009482226e-06, |
|
"loss": 0.5004, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.4750402576489533, |
|
"grad_norm": 2.563901057893881, |
|
"learning_rate": 6.297112619367721e-06, |
|
"loss": 0.4893, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.4777241009125067, |
|
"grad_norm": 2.1864865097415604, |
|
"learning_rate": 6.2517992004066e-06, |
|
"loss": 0.4927, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.48040794417606014, |
|
"grad_norm": 2.280701532767888, |
|
"learning_rate": 6.2063758243609275e-06, |
|
"loss": 0.4662, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.4830917874396135, |
|
"grad_norm": 2.921678160976539, |
|
"learning_rate": 6.160846481185326e-06, |
|
"loss": 0.4706, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.4857756307031669, |
|
"grad_norm": 2.283768315182298, |
|
"learning_rate": 6.115215170142481e-06, |
|
"loss": 0.4788, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.48845947396672035, |
|
"grad_norm": 2.333889889432955, |
|
"learning_rate": 6.069485899451863e-06, |
|
"loss": 0.4664, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.49114331723027377, |
|
"grad_norm": 2.599686462975885, |
|
"learning_rate": 6.023662685937643e-06, |
|
"loss": 0.4497, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.49382716049382713, |
|
"grad_norm": 2.5492827283831216, |
|
"learning_rate": 5.9777495546758556e-06, |
|
"loss": 0.4857, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.49651100375738055, |
|
"grad_norm": 2.4175643213741553, |
|
"learning_rate": 5.9317505386408436e-06, |
|
"loss": 0.4608, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.499194847020934, |
|
"grad_norm": 2.377055030538647, |
|
"learning_rate": 5.885669678350997e-06, |
|
"loss": 0.4803, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.5018786902844874, |
|
"grad_norm": 2.2334506971052965, |
|
"learning_rate": 5.839511021513853e-06, |
|
"loss": 0.4493, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.5045625335480408, |
|
"grad_norm": 2.32425118765916, |
|
"learning_rate": 5.793278622670527e-06, |
|
"loss": 0.4589, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.5072463768115942, |
|
"grad_norm": 2.410888326463012, |
|
"learning_rate": 5.746976542839583e-06, |
|
"loss": 0.4831, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.5099302200751477, |
|
"grad_norm": 2.2260467185492057, |
|
"learning_rate": 5.700608849160307e-06, |
|
"loss": 0.4613, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.512614063338701, |
|
"grad_norm": 2.345626470642079, |
|
"learning_rate": 5.654179614535457e-06, |
|
"loss": 0.4611, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.5152979066022544, |
|
"grad_norm": 2.4115998693660496, |
|
"learning_rate": 5.607692917273499e-06, |
|
"loss": 0.4816, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.5179817498658078, |
|
"grad_norm": 2.2111047514137927, |
|
"learning_rate": 5.561152840730371e-06, |
|
"loss": 0.465, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.5206655931293612, |
|
"grad_norm": 2.5262140714719212, |
|
"learning_rate": 5.514563472950812e-06, |
|
"loss": 0.4622, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.5233494363929146, |
|
"grad_norm": 2.2195436050046817, |
|
"learning_rate": 5.46792890630926e-06, |
|
"loss": 0.4558, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.5260332796564681, |
|
"grad_norm": 2.2799485984178944, |
|
"learning_rate": 5.421253237150389e-06, |
|
"loss": 0.4648, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.5287171229200215, |
|
"grad_norm": 2.6301434385590885, |
|
"learning_rate": 5.374540565429288e-06, |
|
"loss": 0.4631, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.5314009661835749, |
|
"grad_norm": 2.471540341708861, |
|
"learning_rate": 5.327794994351321e-06, |
|
"loss": 0.4524, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.5340848094471283, |
|
"grad_norm": 2.337507326917371, |
|
"learning_rate": 5.281020630011703e-06, |
|
"loss": 0.4684, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.5367686527106817, |
|
"grad_norm": 2.3216726905360288, |
|
"learning_rate": 5.234221581034834e-06, |
|
"loss": 0.445, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.5394524959742351, |
|
"grad_norm": 2.644572388685312, |
|
"learning_rate": 5.187401958213391e-06, |
|
"loss": 0.4493, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.5421363392377885, |
|
"grad_norm": 2.560926397352056, |
|
"learning_rate": 5.140565874147236e-06, |
|
"loss": 0.4279, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.5448201825013419, |
|
"grad_norm": 2.2873708099653247, |
|
"learning_rate": 5.093717442882185e-06, |
|
"loss": 0.4512, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.5475040257648953, |
|
"grad_norm": 2.260442565307831, |
|
"learning_rate": 5.046860779548613e-06, |
|
"loss": 0.4401, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.5501878690284487, |
|
"grad_norm": 2.3869038782477543, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4382, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.5528717122920022, |
|
"grad_norm": 2.468747863225457, |
|
"learning_rate": 4.953139220451386e-06, |
|
"loss": 0.4722, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.5555555555555556, |
|
"grad_norm": 2.2473611442378507, |
|
"learning_rate": 4.906282557117817e-06, |
|
"loss": 0.4495, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.558239398819109, |
|
"grad_norm": 2.3833544443902532, |
|
"learning_rate": 4.859434125852764e-06, |
|
"loss": 0.4525, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.5609232420826624, |
|
"grad_norm": 2.2632676724347367, |
|
"learning_rate": 4.812598041786611e-06, |
|
"loss": 0.4425, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.5636070853462157, |
|
"grad_norm": 2.3341675236603447, |
|
"learning_rate": 4.765778418965168e-06, |
|
"loss": 0.4496, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.5662909286097692, |
|
"grad_norm": 2.3837166818982816, |
|
"learning_rate": 4.718979369988299e-06, |
|
"loss": 0.4421, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.5689747718733226, |
|
"grad_norm": 2.2164362902062744, |
|
"learning_rate": 4.672205005648681e-06, |
|
"loss": 0.4311, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.571658615136876, |
|
"grad_norm": 2.1717995325593806, |
|
"learning_rate": 4.625459434570712e-06, |
|
"loss": 0.4231, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.5743424584004294, |
|
"grad_norm": 2.365900130956694, |
|
"learning_rate": 4.578746762849612e-06, |
|
"loss": 0.4188, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.5770263016639828, |
|
"grad_norm": 2.270809807847061, |
|
"learning_rate": 4.532071093690741e-06, |
|
"loss": 0.432, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.5797101449275363, |
|
"grad_norm": 2.3336831813622014, |
|
"learning_rate": 4.485436527049189e-06, |
|
"loss": 0.4364, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.5823939881910897, |
|
"grad_norm": 2.2310929205351657, |
|
"learning_rate": 4.438847159269631e-06, |
|
"loss": 0.4237, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.5850778314546431, |
|
"grad_norm": 2.2709185614877057, |
|
"learning_rate": 4.392307082726503e-06, |
|
"loss": 0.4117, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.5877616747181964, |
|
"grad_norm": 2.376948990363339, |
|
"learning_rate": 4.345820385464543e-06, |
|
"loss": 0.4184, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.5904455179817498, |
|
"grad_norm": 2.4527120854156954, |
|
"learning_rate": 4.299391150839694e-06, |
|
"loss": 0.4124, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.5931293612453032, |
|
"grad_norm": 2.405380755864835, |
|
"learning_rate": 4.253023457160418e-06, |
|
"loss": 0.4235, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.5958132045088567, |
|
"grad_norm": 2.2780181294314965, |
|
"learning_rate": 4.2067213773294745e-06, |
|
"loss": 0.4311, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.5984970477724101, |
|
"grad_norm": 2.352910332550843, |
|
"learning_rate": 4.16048897848615e-06, |
|
"loss": 0.4258, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.6011808910359635, |
|
"grad_norm": 2.305493324748258, |
|
"learning_rate": 4.114330321649004e-06, |
|
"loss": 0.4293, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.6038647342995169, |
|
"grad_norm": 2.426031927463211, |
|
"learning_rate": 4.068249461359158e-06, |
|
"loss": 0.4164, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.6065485775630703, |
|
"grad_norm": 2.1758999179831053, |
|
"learning_rate": 4.022250445324145e-06, |
|
"loss": 0.4275, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.6092324208266238, |
|
"grad_norm": 2.2904273454936783, |
|
"learning_rate": 3.976337314062358e-06, |
|
"loss": 0.4086, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.6119162640901772, |
|
"grad_norm": 2.201522523908664, |
|
"learning_rate": 3.9305141005481385e-06, |
|
"loss": 0.4138, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.6146001073537305, |
|
"grad_norm": 2.218773679624799, |
|
"learning_rate": 3.8847848298575205e-06, |
|
"loss": 0.4208, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.6172839506172839, |
|
"grad_norm": 2.338212794329488, |
|
"learning_rate": 3.839153518814677e-06, |
|
"loss": 0.4072, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.6199677938808373, |
|
"grad_norm": 2.3310971417294275, |
|
"learning_rate": 3.7936241756390746e-06, |
|
"loss": 0.4214, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.6226516371443908, |
|
"grad_norm": 2.268125759049343, |
|
"learning_rate": 3.748200799593402e-06, |
|
"loss": 0.4037, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.6253354804079442, |
|
"grad_norm": 2.3815930670980903, |
|
"learning_rate": 3.7028873806322797e-06, |
|
"loss": 0.4053, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.6280193236714976, |
|
"grad_norm": 2.4002503785531535, |
|
"learning_rate": 3.6576878990517783e-06, |
|
"loss": 0.427, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.630703166935051, |
|
"grad_norm": 2.2287683393400157, |
|
"learning_rate": 3.6126063251397968e-06, |
|
"loss": 0.4047, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.6333870101986044, |
|
"grad_norm": 2.24030612251451, |
|
"learning_rate": 3.567646618827315e-06, |
|
"loss": 0.4072, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.6360708534621579, |
|
"grad_norm": 2.2476861153746173, |
|
"learning_rate": 3.522812729340557e-06, |
|
"loss": 0.4057, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.6387546967257112, |
|
"grad_norm": 2.250620066869653, |
|
"learning_rate": 3.4781085948540907e-06, |
|
"loss": 0.4135, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.6414385399892646, |
|
"grad_norm": 2.426377994243079, |
|
"learning_rate": 3.4335381421449056e-06, |
|
"loss": 0.3927, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.644122383252818, |
|
"grad_norm": 2.232109850238908, |
|
"learning_rate": 3.3891052862474884e-06, |
|
"loss": 0.399, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.6468062265163714, |
|
"grad_norm": 2.241392662695428, |
|
"learning_rate": 3.3448139301099212e-06, |
|
"loss": 0.3984, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.6494900697799249, |
|
"grad_norm": 2.1992057878349844, |
|
"learning_rate": 3.3006679642510596e-06, |
|
"loss": 0.399, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.6521739130434783, |
|
"grad_norm": 2.3288135117678, |
|
"learning_rate": 3.2566712664187907e-06, |
|
"loss": 0.4058, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.6548577563070317, |
|
"grad_norm": 2.1656603730720914, |
|
"learning_rate": 3.2128277012494046e-06, |
|
"loss": 0.4043, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.6575415995705851, |
|
"grad_norm": 2.2233875096004385, |
|
"learning_rate": 3.169141119928142e-06, |
|
"loss": 0.3953, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.6602254428341385, |
|
"grad_norm": 2.1164011887056136, |
|
"learning_rate": 3.125615359850902e-06, |
|
"loss": 0.3927, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.6629092860976918, |
|
"grad_norm": 2.40060936517856, |
|
"learning_rate": 3.082254244287163e-06, |
|
"loss": 0.3999, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.6655931293612453, |
|
"grad_norm": 2.1792932052704166, |
|
"learning_rate": 3.0390615820441605e-06, |
|
"loss": 0.3869, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.6682769726247987, |
|
"grad_norm": 2.282109833051735, |
|
"learning_rate": 2.9960411671323215e-06, |
|
"loss": 0.3901, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.6709608158883521, |
|
"grad_norm": 2.2959829437776276, |
|
"learning_rate": 2.9531967784319897e-06, |
|
"loss": 0.3847, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.6736446591519055, |
|
"grad_norm": 2.1816584624767494, |
|
"learning_rate": 2.9105321793615106e-06, |
|
"loss": 0.3889, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.6763285024154589, |
|
"grad_norm": 2.1709836152453676, |
|
"learning_rate": 2.8680511175466384e-06, |
|
"loss": 0.3921, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.6790123456790124, |
|
"grad_norm": 2.0728452674033644, |
|
"learning_rate": 2.8257573244913637e-06, |
|
"loss": 0.393, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.6816961889425658, |
|
"grad_norm": 2.292875337921466, |
|
"learning_rate": 2.783654515250126e-06, |
|
"loss": 0.3903, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.6843800322061192, |
|
"grad_norm": 2.2405815280316843, |
|
"learning_rate": 2.741746388101493e-06, |
|
"loss": 0.3837, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.6870638754696726, |
|
"grad_norm": 2.084014176066845, |
|
"learning_rate": 2.700036624223308e-06, |
|
"loss": 0.383, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.6897477187332259, |
|
"grad_norm": 2.4092559373163254, |
|
"learning_rate": 2.6585288873693355e-06, |
|
"loss": 0.3798, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.6924315619967794, |
|
"grad_norm": 2.0250661432577393, |
|
"learning_rate": 2.6172268235474396e-06, |
|
"loss": 0.3841, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.6951154052603328, |
|
"grad_norm": 2.1763028564231295, |
|
"learning_rate": 2.576134060699328e-06, |
|
"loss": 0.3831, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.6977992485238862, |
|
"grad_norm": 2.373398864029605, |
|
"learning_rate": 2.5352542083818636e-06, |
|
"loss": 0.3823, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.7004830917874396, |
|
"grad_norm": 2.1727773171979643, |
|
"learning_rate": 2.494590857450016e-06, |
|
"loss": 0.3768, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.703166935050993, |
|
"grad_norm": 2.149371676612498, |
|
"learning_rate": 2.454147579741436e-06, |
|
"loss": 0.3822, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.7058507783145465, |
|
"grad_norm": 2.2872835046116777, |
|
"learning_rate": 2.4139279277627113e-06, |
|
"loss": 0.3871, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.7085346215780999, |
|
"grad_norm": 2.153287257767556, |
|
"learning_rate": 2.373935434377316e-06, |
|
"loss": 0.3755, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.7112184648416533, |
|
"grad_norm": 2.081735295703771, |
|
"learning_rate": 2.3341736124952924e-06, |
|
"loss": 0.3774, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.7139023081052066, |
|
"grad_norm": 2.3934001061899544, |
|
"learning_rate": 2.294645954764666e-06, |
|
"loss": 0.3819, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.71658615136876, |
|
"grad_norm": 2.189084634687172, |
|
"learning_rate": 2.2553559332646675e-06, |
|
"loss": 0.3833, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.7192699946323134, |
|
"grad_norm": 2.2004454536339018, |
|
"learning_rate": 2.216306999200739e-06, |
|
"loss": 0.3662, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.7219538378958669, |
|
"grad_norm": 2.164856903929889, |
|
"learning_rate": 2.1775025826013953e-06, |
|
"loss": 0.3717, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.7246376811594203, |
|
"grad_norm": 2.2223965015758167, |
|
"learning_rate": 2.1389460920169105e-06, |
|
"loss": 0.3677, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.7273215244229737, |
|
"grad_norm": 2.1657596827057977, |
|
"learning_rate": 2.100640914219939e-06, |
|
"loss": 0.3694, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.7300053676865271, |
|
"grad_norm": 2.2173155958372806, |
|
"learning_rate": 2.062590413908007e-06, |
|
"loss": 0.37, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.7326892109500805, |
|
"grad_norm": 2.0909707865425027, |
|
"learning_rate": 2.0247979334079646e-06, |
|
"loss": 0.3632, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.735373054213634, |
|
"grad_norm": 2.2709941625135555, |
|
"learning_rate": 1.9872667923823996e-06, |
|
"loss": 0.3767, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.7380568974771873, |
|
"grad_norm": 2.025865786970852, |
|
"learning_rate": 1.9500002875380458e-06, |
|
"loss": 0.3799, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.7407407407407407, |
|
"grad_norm": 2.1733581353054454, |
|
"learning_rate": 1.9130016923361843e-06, |
|
"loss": 0.3774, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.7434245840042941, |
|
"grad_norm": 2.25182494556845, |
|
"learning_rate": 1.8762742567051318e-06, |
|
"loss": 0.3733, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.7461084272678475, |
|
"grad_norm": 2.124622472715877, |
|
"learning_rate": 1.8398212067547455e-06, |
|
"loss": 0.38, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.748792270531401, |
|
"grad_norm": 2.1174183808257716, |
|
"learning_rate": 1.8036457444930643e-06, |
|
"loss": 0.364, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.7514761137949544, |
|
"grad_norm": 2.095221806332882, |
|
"learning_rate": 1.7677510475450244e-06, |
|
"loss": 0.3644, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.7541599570585078, |
|
"grad_norm": 2.191592696456213, |
|
"learning_rate": 1.7321402688733618e-06, |
|
"loss": 0.3655, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.7568438003220612, |
|
"grad_norm": 2.1010897455276734, |
|
"learning_rate": 1.6968165365016443e-06, |
|
"loss": 0.356, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.7595276435856146, |
|
"grad_norm": 2.2759244034217883, |
|
"learning_rate": 1.661782953239512e-06, |
|
"loss": 0.354, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.7622114868491681, |
|
"grad_norm": 2.250779850948082, |
|
"learning_rate": 1.6270425964101272e-06, |
|
"loss": 0.3723, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.7648953301127214, |
|
"grad_norm": 2.205374517042519, |
|
"learning_rate": 1.5925985175798741e-06, |
|
"loss": 0.3603, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.7675791733762748, |
|
"grad_norm": 2.3498037872131685, |
|
"learning_rate": 1.5584537422902923e-06, |
|
"loss": 0.3688, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.7702630166398282, |
|
"grad_norm": 2.198726674592499, |
|
"learning_rate": 1.5246112697923389e-06, |
|
"loss": 0.3677, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.7729468599033816, |
|
"grad_norm": 2.1347598985352705, |
|
"learning_rate": 1.4910740727829176e-06, |
|
"loss": 0.3577, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.775630703166935, |
|
"grad_norm": 1.9265646159076812, |
|
"learning_rate": 1.457845097143769e-06, |
|
"loss": 0.3629, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.7783145464304885, |
|
"grad_norm": 2.104097817328808, |
|
"learning_rate": 1.424927261682703e-06, |
|
"loss": 0.3742, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.7809983896940419, |
|
"grad_norm": 2.2536366979029703, |
|
"learning_rate": 1.3923234578772177e-06, |
|
"loss": 0.356, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.7836822329575953, |
|
"grad_norm": 2.028767198712377, |
|
"learning_rate": 1.3600365496205059e-06, |
|
"loss": 0.3656, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.7863660762211487, |
|
"grad_norm": 2.1494026106033894, |
|
"learning_rate": 1.3280693729699018e-06, |
|
"loss": 0.3633, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.789049919484702, |
|
"grad_norm": 2.144837474228769, |
|
"learning_rate": 1.2964247358977567e-06, |
|
"loss": 0.3687, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.7917337627482555, |
|
"grad_norm": 1.9179097398769962, |
|
"learning_rate": 1.265105418044793e-06, |
|
"loss": 0.3604, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.7944176060118089, |
|
"grad_norm": 2.205613654909181, |
|
"learning_rate": 1.2341141704759418e-06, |
|
"loss": 0.3618, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.7971014492753623, |
|
"grad_norm": 2.1038507710445473, |
|
"learning_rate": 1.2034537154386933e-06, |
|
"loss": 0.3662, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.7997852925389157, |
|
"grad_norm": 2.1118262886014256, |
|
"learning_rate": 1.173126746123972e-06, |
|
"loss": 0.3425, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.8024691358024691, |
|
"grad_norm": 2.069286005090326, |
|
"learning_rate": 1.1431359264295717e-06, |
|
"loss": 0.3635, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.8051529790660226, |
|
"grad_norm": 2.0457106069970177, |
|
"learning_rate": 1.113483890726162e-06, |
|
"loss": 0.3515, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.807836822329576, |
|
"grad_norm": 2.2183430868311804, |
|
"learning_rate": 1.084173243625884e-06, |
|
"loss": 0.3569, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.8105206655931294, |
|
"grad_norm": 2.0344709131176484, |
|
"learning_rate": 1.0552065597535671e-06, |
|
"loss": 0.3657, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.8132045088566827, |
|
"grad_norm": 1.9907375662103048, |
|
"learning_rate": 1.0265863835205709e-06, |
|
"loss": 0.354, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.8158883521202361, |
|
"grad_norm": 2.1485376873924507, |
|
"learning_rate": 9.983152289012926e-07, |
|
"loss": 0.349, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.8185721953837896, |
|
"grad_norm": 2.237249281661755, |
|
"learning_rate": 9.70395579212336e-07, |
|
"loss": 0.3597, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.821256038647343, |
|
"grad_norm": 2.147026323371532, |
|
"learning_rate": 9.428298868943791e-07, |
|
"loss": 0.3543, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.8239398819108964, |
|
"grad_norm": 2.0799388572163533, |
|
"learning_rate": 9.15620573296756e-07, |
|
"loss": 0.3467, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.8266237251744498, |
|
"grad_norm": 2.2341120733387494, |
|
"learning_rate": 8.887700284647699e-07, |
|
"loss": 0.3523, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.8293075684380032, |
|
"grad_norm": 2.160195897291193, |
|
"learning_rate": 8.622806109297432e-07, |
|
"loss": 0.3521, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.8319914117015567, |
|
"grad_norm": 2.1704772185740664, |
|
"learning_rate": 8.361546475018589e-07, |
|
"loss": 0.3478, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.8346752549651101, |
|
"grad_norm": 2.1013253656723494, |
|
"learning_rate": 8.103944330657665e-07, |
|
"loss": 0.3484, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.8373590982286635, |
|
"grad_norm": 1.9819529400537306, |
|
"learning_rate": 7.850022303790033e-07, |
|
"loss": 0.3487, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.8400429414922168, |
|
"grad_norm": 2.13766447402272, |
|
"learning_rate": 7.599802698732356e-07, |
|
"loss": 0.3565, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.8427267847557702, |
|
"grad_norm": 2.038419435075784, |
|
"learning_rate": 7.353307494583445e-07, |
|
"loss": 0.3423, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.8454106280193237, |
|
"grad_norm": 2.1486967728137554, |
|
"learning_rate": 7.1105583432935e-07, |
|
"loss": 0.3459, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.8480944712828771, |
|
"grad_norm": 2.009446726024661, |
|
"learning_rate": 6.871576567762373e-07, |
|
"loss": 0.3576, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.8507783145464305, |
|
"grad_norm": 2.1352354185011286, |
|
"learning_rate": 6.63638315996647e-07, |
|
"loss": 0.3495, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.8534621578099839, |
|
"grad_norm": 2.1148156049768305, |
|
"learning_rate": 6.404998779114912e-07, |
|
"loss": 0.3479, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.8561460010735373, |
|
"grad_norm": 2.1166468876773794, |
|
"learning_rate": 6.177443749834743e-07, |
|
"loss": 0.3417, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.8588298443370908, |
|
"grad_norm": 2.0787291132598122, |
|
"learning_rate": 5.953738060385761e-07, |
|
"loss": 0.346, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.8615136876006442, |
|
"grad_norm": 2.217292596926989, |
|
"learning_rate": 5.733901360904648e-07, |
|
"loss": 0.3411, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.8641975308641975, |
|
"grad_norm": 2.1642916240096355, |
|
"learning_rate": 5.517952961678997e-07, |
|
"loss": 0.3349, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.8668813741277509, |
|
"grad_norm": 2.0909486003584887, |
|
"learning_rate": 5.305911831451044e-07, |
|
"loss": 0.3491, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.8695652173913043, |
|
"grad_norm": 2.049728625753877, |
|
"learning_rate": 5.097796595751564e-07, |
|
"loss": 0.332, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.8722490606548577, |
|
"grad_norm": 2.0464556041654443, |
|
"learning_rate": 4.89362553526368e-07, |
|
"loss": 0.3509, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.8749329039184112, |
|
"grad_norm": 2.008080188893636, |
|
"learning_rate": 4.6934165842172476e-07, |
|
"loss": 0.3327, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.8776167471819646, |
|
"grad_norm": 2.0461563237864206, |
|
"learning_rate": 4.4971873288134237e-07, |
|
"loss": 0.3477, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.880300590445518, |
|
"grad_norm": 1.9259918611757012, |
|
"learning_rate": 4.304955005679962e-07, |
|
"loss": 0.3501, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.8829844337090714, |
|
"grad_norm": 2.1512221676085135, |
|
"learning_rate": 4.1167365003571047e-07, |
|
"loss": 0.3474, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.8856682769726248, |
|
"grad_norm": 2.068297381192029, |
|
"learning_rate": 3.9325483458144684e-07, |
|
"loss": 0.3416, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.8883521202361782, |
|
"grad_norm": 2.0719476007696644, |
|
"learning_rate": 3.752406720998691e-07, |
|
"loss": 0.3494, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.8910359634997316, |
|
"grad_norm": 2.187345102618568, |
|
"learning_rate": 3.5763274494123424e-07, |
|
"loss": 0.3435, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.893719806763285, |
|
"grad_norm": 2.0083898672947695, |
|
"learning_rate": 3.404325997723995e-07, |
|
"loss": 0.3359, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.8964036500268384, |
|
"grad_norm": 1.9668453339791825, |
|
"learning_rate": 3.2364174744096587e-07, |
|
"loss": 0.3433, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.8990874932903918, |
|
"grad_norm": 1.958103712575425, |
|
"learning_rate": 3.072616628425601e-07, |
|
"loss": 0.3416, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.9017713365539453, |
|
"grad_norm": 2.031949910949981, |
|
"learning_rate": 2.9129378479129e-07, |
|
"loss": 0.3565, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.9044551798174987, |
|
"grad_norm": 1.9869345393432467, |
|
"learning_rate": 2.7573951589335177e-07, |
|
"loss": 0.3405, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.9071390230810521, |
|
"grad_norm": 2.0792432646936994, |
|
"learning_rate": 2.60600222423833e-07, |
|
"loss": 0.3495, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.9098228663446055, |
|
"grad_norm": 2.0883501214443916, |
|
"learning_rate": 2.45877234206694e-07, |
|
"loss": 0.3329, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.9125067096081588, |
|
"grad_norm": 2.1613119200340742, |
|
"learning_rate": 2.3157184449796365e-07, |
|
"loss": 0.3395, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.9151905528717122, |
|
"grad_norm": 1.9941904808234305, |
|
"learning_rate": 2.1768530987213267e-07, |
|
"loss": 0.3443, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.9178743961352657, |
|
"grad_norm": 2.0396795637412417, |
|
"learning_rate": 2.0421885011178532e-07, |
|
"loss": 0.349, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.9205582393988191, |
|
"grad_norm": 2.0580171530995743, |
|
"learning_rate": 1.911736481004489e-07, |
|
"loss": 0.3501, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.9232420826623725, |
|
"grad_norm": 2.0365505244300177, |
|
"learning_rate": 1.7855084971869385e-07, |
|
"loss": 0.3389, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.9259259259259259, |
|
"grad_norm": 1.9808336724602207, |
|
"learning_rate": 1.663515637434765e-07, |
|
"loss": 0.3273, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.9286097691894794, |
|
"grad_norm": 2.071334998406864, |
|
"learning_rate": 1.5457686175074816e-07, |
|
"loss": 0.3378, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.9312936124530328, |
|
"grad_norm": 2.032001004284079, |
|
"learning_rate": 1.432277780213298e-07, |
|
"loss": 0.3387, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.9339774557165862, |
|
"grad_norm": 2.3423646041282193, |
|
"learning_rate": 1.3230530945005704e-07, |
|
"loss": 0.3493, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.9366612989801396, |
|
"grad_norm": 2.237272627315429, |
|
"learning_rate": 1.2181041545821727e-07, |
|
"loss": 0.3399, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.9393451422436929, |
|
"grad_norm": 1.910922922082721, |
|
"learning_rate": 1.1174401790927325e-07, |
|
"loss": 0.3324, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.9420289855072463, |
|
"grad_norm": 2.0289396869646543, |
|
"learning_rate": 1.0210700102788796e-07, |
|
"loss": 0.3353, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.9447128287707998, |
|
"grad_norm": 1.9930688135886, |
|
"learning_rate": 9.290021132225546e-08, |
|
"loss": 0.343, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.9473966720343532, |
|
"grad_norm": 1.9869439455930584, |
|
"learning_rate": 8.412445750974385e-08, |
|
"loss": 0.3433, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.9500805152979066, |
|
"grad_norm": 1.9706742656413212, |
|
"learning_rate": 7.578051044585644e-08, |
|
"loss": 0.3276, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.95276435856146, |
|
"grad_norm": 2.0487015305286196, |
|
"learning_rate": 6.786910305652373e-08, |
|
"loss": 0.3424, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.9554482018250134, |
|
"grad_norm": 1.8807707801511768, |
|
"learning_rate": 6.0390930273721e-08, |
|
"loss": 0.3253, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.9581320450885669, |
|
"grad_norm": 2.1651075487867972, |
|
"learning_rate": 5.334664897442721e-08, |
|
"loss": 0.3427, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.9608158883521203, |
|
"grad_norm": 1.9918327197264334, |
|
"learning_rate": 4.673687792292436e-08, |
|
"loss": 0.3468, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.9634997316156736, |
|
"grad_norm": 2.0832421947647908, |
|
"learning_rate": 4.0562197716448316e-08, |
|
"loss": 0.3413, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.966183574879227, |
|
"grad_norm": 1.8518143782906564, |
|
"learning_rate": 3.4823150734186184e-08, |
|
"loss": 0.3314, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.9688674181427804, |
|
"grad_norm": 1.8591662070449098, |
|
"learning_rate": 2.9520241089636692e-08, |
|
"loss": 0.3402, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.9715512614063339, |
|
"grad_norm": 2.004188829558215, |
|
"learning_rate": 2.4653934586328365e-08, |
|
"loss": 0.3382, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.9742351046698873, |
|
"grad_norm": 1.8626147867674074, |
|
"learning_rate": 2.022465867690282e-08, |
|
"loss": 0.3398, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.9769189479334407, |
|
"grad_norm": 1.9428514502995295, |
|
"learning_rate": 1.6232802425568683e-08, |
|
"loss": 0.3438, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.9796027911969941, |
|
"grad_norm": 2.023262028137228, |
|
"learning_rate": 1.2678716473927821e-08, |
|
"loss": 0.3261, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.9822866344605475, |
|
"grad_norm": 1.9375157251991955, |
|
"learning_rate": 9.562713010171088e-09, |
|
"loss": 0.3411, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.984970477724101, |
|
"grad_norm": 1.9840670565628429, |
|
"learning_rate": 6.885065741661367e-09, |
|
"loss": 0.3448, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.9876543209876543, |
|
"grad_norm": 2.1326177570275067, |
|
"learning_rate": 4.646009870885593e-09, |
|
"loss": 0.3509, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.9903381642512077, |
|
"grad_norm": 2.2161553703672867, |
|
"learning_rate": 2.845742074801261e-09, |
|
"loss": 0.336, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.9930220075147611, |
|
"grad_norm": 1.962610075480555, |
|
"learning_rate": 1.484420487553595e-09, |
|
"loss": 0.348, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.9957058507783145, |
|
"grad_norm": 1.9169206999486972, |
|
"learning_rate": 5.621646865899832e-10, |
|
"loss": 0.3394, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 0.998389694041868, |
|
"grad_norm": 1.9220724874106698, |
|
"learning_rate": 7.905568215504921e-11, |
|
"loss": 0.347, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_runtime": 3.3936, |
|
"eval_samples_per_second": 2.947, |
|
"eval_steps_per_second": 0.884, |
|
"step": 1863 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 1863, |
|
"total_flos": 195037149265920.0, |
|
"train_loss": 0.5024847409063555, |
|
"train_runtime": 16621.9401, |
|
"train_samples_per_second": 1.793, |
|
"train_steps_per_second": 0.112 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1863, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 195037149265920.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|