|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 2283, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0004380201489268506, |
|
"grad_norm": 244.0565657784503, |
|
"learning_rate": 8.733624454148472e-08, |
|
"loss": 8.7812, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.002190100744634253, |
|
"grad_norm": 242.08745729404998, |
|
"learning_rate": 4.366812227074236e-07, |
|
"loss": 8.8047, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.004380201489268506, |
|
"grad_norm": 215.31824760158125, |
|
"learning_rate": 8.733624454148472e-07, |
|
"loss": 8.6969, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.006570302233902759, |
|
"grad_norm": 124.79925946857198, |
|
"learning_rate": 1.3100436681222709e-06, |
|
"loss": 8.3187, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.008760402978537012, |
|
"grad_norm": 95.62100949931548, |
|
"learning_rate": 1.7467248908296944e-06, |
|
"loss": 7.6672, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.010950503723171266, |
|
"grad_norm": 56.19522998010826, |
|
"learning_rate": 2.183406113537118e-06, |
|
"loss": 6.7922, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.013140604467805518, |
|
"grad_norm": 34.7212601740852, |
|
"learning_rate": 2.6200873362445417e-06, |
|
"loss": 5.9031, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.015330705212439772, |
|
"grad_norm": 30.849397382141074, |
|
"learning_rate": 3.0567685589519653e-06, |
|
"loss": 5.1156, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.017520805957074025, |
|
"grad_norm": 16.184875412033424, |
|
"learning_rate": 3.493449781659389e-06, |
|
"loss": 3.9531, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.01971090670170828, |
|
"grad_norm": 13.212043262995175, |
|
"learning_rate": 3.930131004366812e-06, |
|
"loss": 3.1414, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.021901007446342532, |
|
"grad_norm": 6.575354808182119, |
|
"learning_rate": 4.366812227074236e-06, |
|
"loss": 2.5, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.024091108190976786, |
|
"grad_norm": 2.5861843997481873, |
|
"learning_rate": 4.80349344978166e-06, |
|
"loss": 1.9785, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.026281208935611037, |
|
"grad_norm": 1.566359722353574, |
|
"learning_rate": 5.2401746724890834e-06, |
|
"loss": 1.7488, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.02847130968024529, |
|
"grad_norm": 0.9077919251278966, |
|
"learning_rate": 5.676855895196507e-06, |
|
"loss": 1.6184, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.030661410424879545, |
|
"grad_norm": 0.6573333175178419, |
|
"learning_rate": 6.1135371179039305e-06, |
|
"loss": 1.5391, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.0328515111695138, |
|
"grad_norm": 0.5238887296216804, |
|
"learning_rate": 6.550218340611354e-06, |
|
"loss": 1.4633, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.03504161191414805, |
|
"grad_norm": 0.4821823564791243, |
|
"learning_rate": 6.986899563318778e-06, |
|
"loss": 1.4199, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.03723171265878231, |
|
"grad_norm": 0.4129490577402294, |
|
"learning_rate": 7.423580786026201e-06, |
|
"loss": 1.4109, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.03942181340341656, |
|
"grad_norm": 0.40472709691340175, |
|
"learning_rate": 7.860262008733624e-06, |
|
"loss": 1.4168, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.04161191414805081, |
|
"grad_norm": 0.3808604144712055, |
|
"learning_rate": 8.296943231441049e-06, |
|
"loss": 1.3668, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.043802014892685065, |
|
"grad_norm": 0.367625493325619, |
|
"learning_rate": 8.733624454148473e-06, |
|
"loss": 1.3066, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.045992115637319315, |
|
"grad_norm": 0.359816415568165, |
|
"learning_rate": 9.170305676855896e-06, |
|
"loss": 1.3449, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.04818221638195357, |
|
"grad_norm": 0.341837652413913, |
|
"learning_rate": 9.60698689956332e-06, |
|
"loss": 1.3273, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.05037231712658782, |
|
"grad_norm": 0.34301552589118167, |
|
"learning_rate": 1.0043668122270742e-05, |
|
"loss": 1.2949, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.052562417871222074, |
|
"grad_norm": 0.3628435999185446, |
|
"learning_rate": 1.0480349344978167e-05, |
|
"loss": 1.2871, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.05475251861585633, |
|
"grad_norm": 0.3679166427745723, |
|
"learning_rate": 1.0917030567685592e-05, |
|
"loss": 1.3207, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.05694261936049058, |
|
"grad_norm": 0.34368277634691846, |
|
"learning_rate": 1.1353711790393014e-05, |
|
"loss": 1.2645, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.05913272010512484, |
|
"grad_norm": 0.3551051401296533, |
|
"learning_rate": 1.179039301310044e-05, |
|
"loss": 1.2707, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.06132282084975909, |
|
"grad_norm": 0.334086491935665, |
|
"learning_rate": 1.2227074235807861e-05, |
|
"loss": 1.2742, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.06351292159439334, |
|
"grad_norm": 0.3344453192406718, |
|
"learning_rate": 1.2663755458515286e-05, |
|
"loss": 1.2441, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.0657030223390276, |
|
"grad_norm": 0.3486113854746094, |
|
"learning_rate": 1.3100436681222708e-05, |
|
"loss": 1.2934, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.06789312308366185, |
|
"grad_norm": 0.3450373622328735, |
|
"learning_rate": 1.3537117903930132e-05, |
|
"loss": 1.2473, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.0700832238282961, |
|
"grad_norm": 0.3254863200637601, |
|
"learning_rate": 1.3973799126637555e-05, |
|
"loss": 1.2613, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.07227332457293036, |
|
"grad_norm": 0.3206618735664889, |
|
"learning_rate": 1.4410480349344979e-05, |
|
"loss": 1.275, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.07446342531756461, |
|
"grad_norm": 0.36155580225009337, |
|
"learning_rate": 1.4847161572052402e-05, |
|
"loss": 1.2379, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.07665352606219886, |
|
"grad_norm": 0.331500878464258, |
|
"learning_rate": 1.5283842794759826e-05, |
|
"loss": 1.234, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.07884362680683311, |
|
"grad_norm": 0.35558769100827337, |
|
"learning_rate": 1.5720524017467248e-05, |
|
"loss": 1.2641, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.08103372755146737, |
|
"grad_norm": 0.3514292771304392, |
|
"learning_rate": 1.6157205240174673e-05, |
|
"loss": 1.2258, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.08322382829610162, |
|
"grad_norm": 0.3158735252448049, |
|
"learning_rate": 1.6593886462882098e-05, |
|
"loss": 1.2414, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.08541392904073587, |
|
"grad_norm": 0.3332987501140752, |
|
"learning_rate": 1.703056768558952e-05, |
|
"loss": 1.2238, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.08760402978537013, |
|
"grad_norm": 0.3306118702879735, |
|
"learning_rate": 1.7467248908296945e-05, |
|
"loss": 1.2316, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.08979413053000437, |
|
"grad_norm": 0.3189870404362557, |
|
"learning_rate": 1.7903930131004367e-05, |
|
"loss": 1.2355, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.09198423127463863, |
|
"grad_norm": 0.3015408510885377, |
|
"learning_rate": 1.8340611353711792e-05, |
|
"loss": 1.2188, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.09417433201927289, |
|
"grad_norm": 0.3227399112930752, |
|
"learning_rate": 1.8777292576419214e-05, |
|
"loss": 1.2164, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.09636443276390715, |
|
"grad_norm": 0.3316376300779914, |
|
"learning_rate": 1.921397379912664e-05, |
|
"loss": 1.2457, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.09855453350854139, |
|
"grad_norm": 0.31785017269226773, |
|
"learning_rate": 1.965065502183406e-05, |
|
"loss": 1.1949, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.10074463425317565, |
|
"grad_norm": 0.3305460477093586, |
|
"learning_rate": 1.9999988303153612e-05, |
|
"loss": 1.2184, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.1029347349978099, |
|
"grad_norm": 0.32274702495374813, |
|
"learning_rate": 1.9999578916403086e-05, |
|
"loss": 1.2242, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.10512483574244415, |
|
"grad_norm": 0.3309888084620984, |
|
"learning_rate": 1.999858471469601e-05, |
|
"loss": 1.1996, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.1073149364870784, |
|
"grad_norm": 0.31589160373633207, |
|
"learning_rate": 1.9997005756177228e-05, |
|
"loss": 1.1754, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.10950503723171266, |
|
"grad_norm": 0.3245071336888696, |
|
"learning_rate": 1.9994842133190485e-05, |
|
"loss": 1.2059, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.1116951379763469, |
|
"grad_norm": 0.34753890904189133, |
|
"learning_rate": 1.999209397227302e-05, |
|
"loss": 1.2012, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.11388523872098116, |
|
"grad_norm": 0.30057443535443235, |
|
"learning_rate": 1.9988761434148164e-05, |
|
"loss": 1.2281, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.11607533946561542, |
|
"grad_norm": 0.30036747933816504, |
|
"learning_rate": 1.998484471371593e-05, |
|
"loss": 1.1785, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.11826544021024968, |
|
"grad_norm": 0.32539575218814926, |
|
"learning_rate": 1.9980344040041636e-05, |
|
"loss": 1.1742, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.12045554095488392, |
|
"grad_norm": 0.31075948876414167, |
|
"learning_rate": 1.99752596763425e-05, |
|
"loss": 1.209, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.12264564169951818, |
|
"grad_norm": 0.2948383024933128, |
|
"learning_rate": 1.9969591919972226e-05, |
|
"loss": 1.2059, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.12483574244415244, |
|
"grad_norm": 0.30809860203285494, |
|
"learning_rate": 1.9963341102403652e-05, |
|
"loss": 1.1996, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.12702584318878668, |
|
"grad_norm": 0.3063101893237704, |
|
"learning_rate": 1.995650758920932e-05, |
|
"loss": 1.2, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.12921594393342092, |
|
"grad_norm": 0.29266235209581604, |
|
"learning_rate": 1.9949091780040143e-05, |
|
"loss": 1.173, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.1314060446780552, |
|
"grad_norm": 0.3236695292638355, |
|
"learning_rate": 1.9941094108601985e-05, |
|
"loss": 1.1832, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.13359614542268944, |
|
"grad_norm": 0.29386460049329405, |
|
"learning_rate": 1.9932515042630335e-05, |
|
"loss": 1.1746, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.1357862461673237, |
|
"grad_norm": 0.3304096067847152, |
|
"learning_rate": 1.9923355083862933e-05, |
|
"loss": 1.2004, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.13797634691195795, |
|
"grad_norm": 0.297618875850005, |
|
"learning_rate": 1.9913614768010418e-05, |
|
"loss": 1.1656, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.1401664476565922, |
|
"grad_norm": 0.3453392768634244, |
|
"learning_rate": 1.9903294664725023e-05, |
|
"loss": 1.1473, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.14235654840122647, |
|
"grad_norm": 0.2952934857480258, |
|
"learning_rate": 1.989239537756723e-05, |
|
"loss": 1.1898, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.1445466491458607, |
|
"grad_norm": 0.3096490330878614, |
|
"learning_rate": 1.9880917543970503e-05, |
|
"loss": 1.1727, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.14673674989049496, |
|
"grad_norm": 0.305522689793203, |
|
"learning_rate": 1.986886183520398e-05, |
|
"loss": 1.166, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.14892685063512923, |
|
"grad_norm": 0.2977399891862773, |
|
"learning_rate": 1.985622895633323e-05, |
|
"loss": 1.1605, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.15111695137976347, |
|
"grad_norm": 0.3080937705338333, |
|
"learning_rate": 1.9843019646179014e-05, |
|
"loss": 1.1785, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.1533070521243977, |
|
"grad_norm": 0.28864764946025717, |
|
"learning_rate": 1.982923467727408e-05, |
|
"loss": 1.1797, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.15549715286903198, |
|
"grad_norm": 0.29002073593801136, |
|
"learning_rate": 1.981487485581797e-05, |
|
"loss": 1.1676, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.15768725361366623, |
|
"grad_norm": 0.3018457130707611, |
|
"learning_rate": 1.9799941021629886e-05, |
|
"loss": 1.1738, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.15987735435830047, |
|
"grad_norm": 0.30507162972432783, |
|
"learning_rate": 1.9784434048099565e-05, |
|
"loss": 1.1668, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.16206745510293474, |
|
"grad_norm": 0.3049773910908397, |
|
"learning_rate": 1.97683548421362e-05, |
|
"loss": 1.1781, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.164257555847569, |
|
"grad_norm": 0.3024971125857499, |
|
"learning_rate": 1.9751704344115402e-05, |
|
"loss": 1.1625, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.16644765659220323, |
|
"grad_norm": 0.28148210956054226, |
|
"learning_rate": 1.97344835278242e-05, |
|
"loss": 1.1687, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.1686377573368375, |
|
"grad_norm": 0.2996144816775727, |
|
"learning_rate": 1.97166934004041e-05, |
|
"loss": 1.207, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.17082785808147175, |
|
"grad_norm": 0.29532940992965984, |
|
"learning_rate": 1.9698335002292176e-05, |
|
"loss": 1.1848, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.173017958826106, |
|
"grad_norm": 0.3001673093160351, |
|
"learning_rate": 1.967940940716021e-05, |
|
"loss": 1.1828, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.17520805957074026, |
|
"grad_norm": 0.30610198280704054, |
|
"learning_rate": 1.9659917721851924e-05, |
|
"loss": 1.1867, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.1773981603153745, |
|
"grad_norm": 0.2925881036239608, |
|
"learning_rate": 1.963986108631823e-05, |
|
"loss": 1.1418, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.17958826106000875, |
|
"grad_norm": 0.29339092688050755, |
|
"learning_rate": 1.961924067355056e-05, |
|
"loss": 1.1637, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.18177836180464302, |
|
"grad_norm": 0.30407476923887133, |
|
"learning_rate": 1.9598057689512277e-05, |
|
"loss": 1.1508, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.18396846254927726, |
|
"grad_norm": 0.29998056902468595, |
|
"learning_rate": 1.957631337306814e-05, |
|
"loss": 1.1578, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.18615856329391153, |
|
"grad_norm": 0.3025021503047764, |
|
"learning_rate": 1.9554008995911837e-05, |
|
"loss": 1.1812, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.18834866403854578, |
|
"grad_norm": 0.2960918602914393, |
|
"learning_rate": 1.953114586249164e-05, |
|
"loss": 1.1793, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.19053876478318002, |
|
"grad_norm": 0.3196259778914729, |
|
"learning_rate": 1.950772530993409e-05, |
|
"loss": 1.1738, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.1927288655278143, |
|
"grad_norm": 0.2993685894942767, |
|
"learning_rate": 1.9483748707965803e-05, |
|
"loss": 1.1512, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.19491896627244854, |
|
"grad_norm": 0.3002942784736228, |
|
"learning_rate": 1.945921745883337e-05, |
|
"loss": 1.1625, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.19710906701708278, |
|
"grad_norm": 0.30887780802399506, |
|
"learning_rate": 1.9434132997221347e-05, |
|
"loss": 1.1719, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.19929916776171705, |
|
"grad_norm": 0.29738335551869177, |
|
"learning_rate": 1.9408496790168337e-05, |
|
"loss": 1.1641, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.2014892685063513, |
|
"grad_norm": 0.30233929664046716, |
|
"learning_rate": 1.9382310336981206e-05, |
|
"loss": 1.1512, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.20367936925098554, |
|
"grad_norm": 0.29527032498064754, |
|
"learning_rate": 1.935557516914739e-05, |
|
"loss": 1.148, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.2058694699956198, |
|
"grad_norm": 0.29787746244856167, |
|
"learning_rate": 1.932829285024533e-05, |
|
"loss": 1.1516, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.20805957074025405, |
|
"grad_norm": 0.2913683096092213, |
|
"learning_rate": 1.9300464975853032e-05, |
|
"loss": 1.1344, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.2102496714848883, |
|
"grad_norm": 0.27500340802613793, |
|
"learning_rate": 1.9272093173454735e-05, |
|
"loss": 1.1574, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.21243977222952257, |
|
"grad_norm": 0.2812766184977934, |
|
"learning_rate": 1.9243179102345753e-05, |
|
"loss": 1.148, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.2146298729741568, |
|
"grad_norm": 0.2865109750359857, |
|
"learning_rate": 1.9213724453535417e-05, |
|
"loss": 1.1578, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.21681997371879105, |
|
"grad_norm": 0.29189523569878567, |
|
"learning_rate": 1.9183730949648173e-05, |
|
"loss": 1.1602, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.21901007446342532, |
|
"grad_norm": 0.29979676151962115, |
|
"learning_rate": 1.9153200344822855e-05, |
|
"loss": 1.1355, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.22120017520805957, |
|
"grad_norm": 0.300831896633153, |
|
"learning_rate": 1.912213442461009e-05, |
|
"loss": 1.1391, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.2233902759526938, |
|
"grad_norm": 0.29049722290498076, |
|
"learning_rate": 1.9090535005867853e-05, |
|
"loss": 1.1727, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.22558037669732808, |
|
"grad_norm": 0.3104171548245787, |
|
"learning_rate": 1.9058403936655235e-05, |
|
"loss": 1.1551, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.22777047744196233, |
|
"grad_norm": 0.2944155570969255, |
|
"learning_rate": 1.9025743096124346e-05, |
|
"loss": 1.1527, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.22996057818659657, |
|
"grad_norm": 0.3169970998158994, |
|
"learning_rate": 1.899255439441043e-05, |
|
"loss": 1.158, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.23215067893123084, |
|
"grad_norm": 0.2842476129923102, |
|
"learning_rate": 1.8958839772520125e-05, |
|
"loss": 1.1758, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.23434077967586509, |
|
"grad_norm": 0.2855513642633395, |
|
"learning_rate": 1.8924601202217977e-05, |
|
"loss": 1.1357, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.23653088042049936, |
|
"grad_norm": 0.2848004802653552, |
|
"learning_rate": 1.888984068591111e-05, |
|
"loss": 1.1391, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.2387209811651336, |
|
"grad_norm": 0.28099993296693176, |
|
"learning_rate": 1.8854560256532098e-05, |
|
"loss": 1.1355, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.24091108190976784, |
|
"grad_norm": 0.3070333803041777, |
|
"learning_rate": 1.8818761977420114e-05, |
|
"loss": 1.143, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.24310118265440211, |
|
"grad_norm": 0.28305196192266907, |
|
"learning_rate": 1.878244794220022e-05, |
|
"loss": 1.1625, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.24529128339903636, |
|
"grad_norm": 0.29533684816936556, |
|
"learning_rate": 1.874562027466093e-05, |
|
"loss": 1.1695, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.2474813841436706, |
|
"grad_norm": 0.29819831907768873, |
|
"learning_rate": 1.8708281128630023e-05, |
|
"loss": 1.1469, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.24967148488830487, |
|
"grad_norm": 0.28827778580342983, |
|
"learning_rate": 1.8670432687848562e-05, |
|
"loss": 1.149, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.2518615856329391, |
|
"grad_norm": 0.2865593620968661, |
|
"learning_rate": 1.8632077165843174e-05, |
|
"loss": 1.1246, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.25405168637757336, |
|
"grad_norm": 0.2873333172793282, |
|
"learning_rate": 1.8593216805796612e-05, |
|
"loss": 1.1402, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.25624178712220763, |
|
"grad_norm": 0.28696704010138063, |
|
"learning_rate": 1.8553853880416555e-05, |
|
"loss": 1.1293, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.25843188786684185, |
|
"grad_norm": 0.27933859864997074, |
|
"learning_rate": 1.8513990691802686e-05, |
|
"loss": 1.1691, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.2606219886114761, |
|
"grad_norm": 0.2870969603657859, |
|
"learning_rate": 1.8473629571312073e-05, |
|
"loss": 1.1617, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.2628120893561104, |
|
"grad_norm": 0.2843186093708674, |
|
"learning_rate": 1.8432772879422797e-05, |
|
"loss": 1.1355, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.26500219010074466, |
|
"grad_norm": 0.31588736128898504, |
|
"learning_rate": 1.8391423005595928e-05, |
|
"loss": 1.1359, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.2671922908453789, |
|
"grad_norm": 0.29315028625446876, |
|
"learning_rate": 1.8349582368135764e-05, |
|
"loss": 1.1551, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.26938239159001315, |
|
"grad_norm": 0.2882074502213313, |
|
"learning_rate": 1.8307253414048395e-05, |
|
"loss": 1.1363, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.2715724923346474, |
|
"grad_norm": 0.2932578089430528, |
|
"learning_rate": 1.8264438618898617e-05, |
|
"loss": 1.1455, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.27376259307928164, |
|
"grad_norm": 0.27801162707284016, |
|
"learning_rate": 1.8221140486665125e-05, |
|
"loss": 1.1621, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.2759526938239159, |
|
"grad_norm": 0.29375240288760845, |
|
"learning_rate": 1.8177361549594068e-05, |
|
"loss": 1.1492, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.2781427945685502, |
|
"grad_norm": 0.276903179408844, |
|
"learning_rate": 1.813310436805099e-05, |
|
"loss": 1.1445, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.2803328953131844, |
|
"grad_norm": 0.27955633362670035, |
|
"learning_rate": 1.8088371530371044e-05, |
|
"loss": 1.1531, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.28252299605781866, |
|
"grad_norm": 0.2799230772005429, |
|
"learning_rate": 1.804316565270765e-05, |
|
"loss": 1.1512, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.28471309680245294, |
|
"grad_norm": 0.27521037714056357, |
|
"learning_rate": 1.7997489378879465e-05, |
|
"loss": 1.1438, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.28690319754708715, |
|
"grad_norm": 0.29389670124687384, |
|
"learning_rate": 1.7951345380215795e-05, |
|
"loss": 1.152, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.2890932982917214, |
|
"grad_norm": 0.28364632855748595, |
|
"learning_rate": 1.7904736355400324e-05, |
|
"loss": 1.1576, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.2912833990363557, |
|
"grad_norm": 0.2767305609560192, |
|
"learning_rate": 1.785766503031332e-05, |
|
"loss": 1.1414, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.2934734997809899, |
|
"grad_norm": 0.28006968803895216, |
|
"learning_rate": 1.78101341578722e-05, |
|
"loss": 1.1367, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.2956636005256242, |
|
"grad_norm": 0.2938177814391214, |
|
"learning_rate": 1.7762146517870526e-05, |
|
"loss": 1.132, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.29785370127025845, |
|
"grad_norm": 0.2763427209869464, |
|
"learning_rate": 1.7713704916815432e-05, |
|
"loss": 1.1461, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.30004380201489267, |
|
"grad_norm": 0.3006381369806697, |
|
"learning_rate": 1.76648121877635e-05, |
|
"loss": 1.134, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.30223390275952694, |
|
"grad_norm": 0.2910687567455647, |
|
"learning_rate": 1.7615471190155056e-05, |
|
"loss": 1.1176, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.3044240035041612, |
|
"grad_norm": 0.2678904446910071, |
|
"learning_rate": 1.7565684809646946e-05, |
|
"loss": 1.1223, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.3066141042487954, |
|
"grad_norm": 0.2753813506939354, |
|
"learning_rate": 1.7515455957943772e-05, |
|
"loss": 1.1574, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.3088042049934297, |
|
"grad_norm": 0.28669942485252364, |
|
"learning_rate": 1.746478757262761e-05, |
|
"loss": 1.1496, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.31099430573806397, |
|
"grad_norm": 0.2843980836766018, |
|
"learning_rate": 1.7413682616986185e-05, |
|
"loss": 1.1363, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.3131844064826982, |
|
"grad_norm": 0.2808223481563682, |
|
"learning_rate": 1.73621440798396e-05, |
|
"loss": 1.1305, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.31537450722733246, |
|
"grad_norm": 0.2922348240666733, |
|
"learning_rate": 1.7310174975365508e-05, |
|
"loss": 1.1252, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.31756460797196673, |
|
"grad_norm": 0.28067686922560126, |
|
"learning_rate": 1.7257778342922853e-05, |
|
"loss": 1.1348, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.31975470871660094, |
|
"grad_norm": 0.2945526878670907, |
|
"learning_rate": 1.7204957246874103e-05, |
|
"loss": 1.1527, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.3219448094612352, |
|
"grad_norm": 0.276941149759669, |
|
"learning_rate": 1.7151714776406034e-05, |
|
"loss": 1.1102, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.3241349102058695, |
|
"grad_norm": 0.289439650523264, |
|
"learning_rate": 1.7098054045349076e-05, |
|
"loss": 1.125, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.3263250109505037, |
|
"grad_norm": 0.2801285692740154, |
|
"learning_rate": 1.7043978191995177e-05, |
|
"loss": 1.127, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.328515111695138, |
|
"grad_norm": 0.3350495049060471, |
|
"learning_rate": 1.6989490378914304e-05, |
|
"loss": 1.1227, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.33070521243977224, |
|
"grad_norm": 0.2808592002786063, |
|
"learning_rate": 1.6934593792769435e-05, |
|
"loss": 1.152, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.33289531318440646, |
|
"grad_norm": 0.27808585554127363, |
|
"learning_rate": 1.6879291644130235e-05, |
|
"loss": 1.1309, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.33508541392904073, |
|
"grad_norm": 0.27069745729859374, |
|
"learning_rate": 1.682358716728525e-05, |
|
"loss": 1.1416, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.337275514673675, |
|
"grad_norm": 0.29843211507422185, |
|
"learning_rate": 1.676748362005279e-05, |
|
"loss": 1.1449, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.3394656154183092, |
|
"grad_norm": 0.28556892181225596, |
|
"learning_rate": 1.671098428359037e-05, |
|
"loss": 1.143, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.3416557161629435, |
|
"grad_norm": 0.30065398619371225, |
|
"learning_rate": 1.6654092462202828e-05, |
|
"loss": 1.1227, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.34384581690757776, |
|
"grad_norm": 0.2681674526100603, |
|
"learning_rate": 1.6596811483149077e-05, |
|
"loss": 1.1273, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.346035917652212, |
|
"grad_norm": 0.29230660631987243, |
|
"learning_rate": 1.6539144696447504e-05, |
|
"loss": 1.1277, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.34822601839684625, |
|
"grad_norm": 0.27613144345526425, |
|
"learning_rate": 1.6481095474680062e-05, |
|
"loss": 1.1285, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.3504161191414805, |
|
"grad_norm": 0.28721223205254864, |
|
"learning_rate": 1.642266721279502e-05, |
|
"loss": 1.1215, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.35260621988611474, |
|
"grad_norm": 0.2839922006300779, |
|
"learning_rate": 1.6363863327908405e-05, |
|
"loss": 1.1219, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.354796320630749, |
|
"grad_norm": 0.2721319526014653, |
|
"learning_rate": 1.630468725910417e-05, |
|
"loss": 1.1297, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.3569864213753833, |
|
"grad_norm": 0.2666725910793476, |
|
"learning_rate": 1.6245142467233067e-05, |
|
"loss": 1.1525, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.3591765221200175, |
|
"grad_norm": 0.2749768948580958, |
|
"learning_rate": 1.618523243471021e-05, |
|
"loss": 1.1252, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.36136662286465177, |
|
"grad_norm": 0.27001888357231557, |
|
"learning_rate": 1.6124960665311447e-05, |
|
"loss": 1.1227, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.36355672360928604, |
|
"grad_norm": 0.27097536893189883, |
|
"learning_rate": 1.606433068396843e-05, |
|
"loss": 1.1273, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.3657468243539203, |
|
"grad_norm": 0.28488649062206617, |
|
"learning_rate": 1.6003346036562457e-05, |
|
"loss": 1.1186, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.3679369250985545, |
|
"grad_norm": 0.27688500142196154, |
|
"learning_rate": 1.5942010289717108e-05, |
|
"loss": 1.1402, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.3701270258431888, |
|
"grad_norm": 0.2674674925612205, |
|
"learning_rate": 1.588032703058964e-05, |
|
"loss": 1.0869, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.37231712658782307, |
|
"grad_norm": 0.2856317902597983, |
|
"learning_rate": 1.5818299866661216e-05, |
|
"loss": 1.1398, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.3745072273324573, |
|
"grad_norm": 0.27673240509708347, |
|
"learning_rate": 1.5755932425525907e-05, |
|
"loss": 1.1215, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.37669732807709155, |
|
"grad_norm": 0.2736444678975509, |
|
"learning_rate": 1.569322835467853e-05, |
|
"loss": 1.1176, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.3788874288217258, |
|
"grad_norm": 0.2768113382591734, |
|
"learning_rate": 1.563019132130136e-05, |
|
"loss": 1.1383, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.38107752956636004, |
|
"grad_norm": 0.28162020446683744, |
|
"learning_rate": 1.5566825012049623e-05, |
|
"loss": 1.1363, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.3832676303109943, |
|
"grad_norm": 0.27091660063236284, |
|
"learning_rate": 1.5503133132835916e-05, |
|
"loss": 1.1215, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.3854577310556286, |
|
"grad_norm": 0.2666248763577975, |
|
"learning_rate": 1.5439119408613442e-05, |
|
"loss": 1.1254, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.3876478318002628, |
|
"grad_norm": 0.2703004250271796, |
|
"learning_rate": 1.5374787583158188e-05, |
|
"loss": 1.1582, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.38983793254489707, |
|
"grad_norm": 0.28795897475828547, |
|
"learning_rate": 1.531014141884994e-05, |
|
"loss": 1.1301, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.39202803328953134, |
|
"grad_norm": 0.26937126154119884, |
|
"learning_rate": 1.5245184696452286e-05, |
|
"loss": 1.0959, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.39421813403416556, |
|
"grad_norm": 0.2708023087895912, |
|
"learning_rate": 1.5179921214891469e-05, |
|
"loss": 1.1313, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.39640823477879983, |
|
"grad_norm": 0.2851242116128433, |
|
"learning_rate": 1.5114354791034225e-05, |
|
"loss": 1.1316, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.3985983355234341, |
|
"grad_norm": 0.270999449215019, |
|
"learning_rate": 1.5048489259464552e-05, |
|
"loss": 1.1297, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.4007884362680683, |
|
"grad_norm": 0.2815986051423743, |
|
"learning_rate": 1.4982328472259453e-05, |
|
"loss": 1.1414, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.4029785370127026, |
|
"grad_norm": 0.25822461525801005, |
|
"learning_rate": 1.4915876298763654e-05, |
|
"loss": 1.1287, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.40516863775733686, |
|
"grad_norm": 0.2747332123131356, |
|
"learning_rate": 1.4849136625363297e-05, |
|
"loss": 1.1141, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.4073587385019711, |
|
"grad_norm": 0.2712357004324246, |
|
"learning_rate": 1.4782113355258656e-05, |
|
"loss": 1.1074, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.40954883924660535, |
|
"grad_norm": 0.27085881871204187, |
|
"learning_rate": 1.471481040823587e-05, |
|
"loss": 1.1105, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.4117389399912396, |
|
"grad_norm": 0.27599882086276023, |
|
"learning_rate": 1.4647231720437687e-05, |
|
"loss": 1.123, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.41392904073587383, |
|
"grad_norm": 0.27922116769860744, |
|
"learning_rate": 1.4579381244133265e-05, |
|
"loss": 1.1469, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.4161191414805081, |
|
"grad_norm": 0.2735506939973002, |
|
"learning_rate": 1.4511262947487037e-05, |
|
"loss": 1.134, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.4183092422251424, |
|
"grad_norm": 0.26886344993062933, |
|
"learning_rate": 1.444288081432662e-05, |
|
"loss": 1.1246, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.4204993429697766, |
|
"grad_norm": 0.26964142533259994, |
|
"learning_rate": 1.4374238843909845e-05, |
|
"loss": 1.1275, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.42268944371441086, |
|
"grad_norm": 0.2745245268675833, |
|
"learning_rate": 1.4305341050690845e-05, |
|
"loss": 1.1457, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.42487954445904513, |
|
"grad_norm": 0.27343512900772715, |
|
"learning_rate": 1.4236191464085286e-05, |
|
"loss": 1.1109, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.42706964520367935, |
|
"grad_norm": 0.2743571452940947, |
|
"learning_rate": 1.4166794128234705e-05, |
|
"loss": 1.1187, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.4292597459483136, |
|
"grad_norm": 0.26544274644395, |
|
"learning_rate": 1.4097153101770008e-05, |
|
"loss": 1.1469, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.4314498466929479, |
|
"grad_norm": 0.5158719980721467, |
|
"learning_rate": 1.4027272457574082e-05, |
|
"loss": 1.125, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.4336399474375821, |
|
"grad_norm": 0.27681551273825566, |
|
"learning_rate": 1.3957156282543605e-05, |
|
"loss": 1.1363, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.4358300481822164, |
|
"grad_norm": 0.27949169274878094, |
|
"learning_rate": 1.388680867735004e-05, |
|
"loss": 1.1254, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.43802014892685065, |
|
"grad_norm": 0.2693597774135224, |
|
"learning_rate": 1.381623375619979e-05, |
|
"loss": 1.1104, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.44021024967148487, |
|
"grad_norm": 0.27336241345507317, |
|
"learning_rate": 1.3745435646593613e-05, |
|
"loss": 1.1346, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.44240035041611914, |
|
"grad_norm": 0.2697587637867333, |
|
"learning_rate": 1.3674418489085192e-05, |
|
"loss": 1.1313, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.4445904511607534, |
|
"grad_norm": 0.2637933472764974, |
|
"learning_rate": 1.360318643703901e-05, |
|
"loss": 1.1174, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.4467805519053876, |
|
"grad_norm": 0.2625274872058839, |
|
"learning_rate": 1.3531743656387426e-05, |
|
"loss": 1.1314, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.4489706526500219, |
|
"grad_norm": 0.2612762476528872, |
|
"learning_rate": 1.346009432538705e-05, |
|
"loss": 1.1205, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.45116075339465617, |
|
"grad_norm": 0.2804760409295694, |
|
"learning_rate": 1.3388242634374367e-05, |
|
"loss": 1.1547, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.4533508541392904, |
|
"grad_norm": 0.2730844934530705, |
|
"learning_rate": 1.331619278552068e-05, |
|
"loss": 1.1301, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.45554095488392465, |
|
"grad_norm": 0.2761820674316818, |
|
"learning_rate": 1.3243948992586347e-05, |
|
"loss": 1.1383, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.4577310556285589, |
|
"grad_norm": 0.2687484002119245, |
|
"learning_rate": 1.3171515480674342e-05, |
|
"loss": 1.1248, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.45992115637319314, |
|
"grad_norm": 0.27893259508437857, |
|
"learning_rate": 1.309889648598316e-05, |
|
"loss": 1.1535, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.4621112571178274, |
|
"grad_norm": 0.2741964520386731, |
|
"learning_rate": 1.3026096255559055e-05, |
|
"loss": 1.1066, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.4643013578624617, |
|
"grad_norm": 0.2766533566738994, |
|
"learning_rate": 1.295311904704767e-05, |
|
"loss": 1.1641, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.4664914586070959, |
|
"grad_norm": 0.2715162801769272, |
|
"learning_rate": 1.2879969128445025e-05, |
|
"loss": 1.125, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.46868155935173017, |
|
"grad_norm": 0.26726252131908934, |
|
"learning_rate": 1.2806650777847913e-05, |
|
"loss": 1.116, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.47087166009636444, |
|
"grad_norm": 0.2840924656915755, |
|
"learning_rate": 1.2733168283203692e-05, |
|
"loss": 1.1318, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.4730617608409987, |
|
"grad_norm": 0.26702580899914813, |
|
"learning_rate": 1.265952594205952e-05, |
|
"loss": 1.1156, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.47525186158563293, |
|
"grad_norm": 0.2773818319183054, |
|
"learning_rate": 1.2585728061311003e-05, |
|
"loss": 1.1184, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.4774419623302672, |
|
"grad_norm": 0.27175516307591485, |
|
"learning_rate": 1.2511778956950325e-05, |
|
"loss": 1.1418, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.47963206307490147, |
|
"grad_norm": 0.2689155932559786, |
|
"learning_rate": 1.243768295381382e-05, |
|
"loss": 1.1006, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.4818221638195357, |
|
"grad_norm": 0.26134780287986276, |
|
"learning_rate": 1.2363444385329052e-05, |
|
"loss": 1.108, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.48401226456416996, |
|
"grad_norm": 0.2742997540168624, |
|
"learning_rate": 1.2289067593261358e-05, |
|
"loss": 1.1141, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.48620236530880423, |
|
"grad_norm": 0.29141512596729113, |
|
"learning_rate": 1.2214556927459955e-05, |
|
"loss": 1.1176, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.48839246605343845, |
|
"grad_norm": 0.27353849576189837, |
|
"learning_rate": 1.2139916745603509e-05, |
|
"loss": 1.1258, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.4905825667980727, |
|
"grad_norm": 0.27107693520298354, |
|
"learning_rate": 1.2065151412945308e-05, |
|
"loss": 1.1098, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.492772667542707, |
|
"grad_norm": 0.2819238585953994, |
|
"learning_rate": 1.1990265302057948e-05, |
|
"loss": 1.1225, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.4949627682873412, |
|
"grad_norm": 0.266552343582286, |
|
"learning_rate": 1.191526279257762e-05, |
|
"loss": 1.1016, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.4971528690319755, |
|
"grad_norm": 0.278057928098842, |
|
"learning_rate": 1.1840148270947962e-05, |
|
"loss": 1.1252, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.49934296977660975, |
|
"grad_norm": 0.2936883558777508, |
|
"learning_rate": 1.1764926130163525e-05, |
|
"loss": 1.1359, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.501533070521244, |
|
"grad_norm": 0.2834365935036193, |
|
"learning_rate": 1.1689600769512855e-05, |
|
"loss": 1.0994, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.5037231712658782, |
|
"grad_norm": 0.274648980748947, |
|
"learning_rate": 1.1614176594321202e-05, |
|
"loss": 1.092, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.5059132720105125, |
|
"grad_norm": 0.28409682473235875, |
|
"learning_rate": 1.1538658015692892e-05, |
|
"loss": 1.1346, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.5081033727551467, |
|
"grad_norm": 0.2850776782098923, |
|
"learning_rate": 1.146304945025332e-05, |
|
"loss": 1.1227, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.5102934734997809, |
|
"grad_norm": 0.2610428092893358, |
|
"learning_rate": 1.1387355319890685e-05, |
|
"loss": 1.0969, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.5124835742444153, |
|
"grad_norm": 0.2740958199191816, |
|
"learning_rate": 1.1311580051497344e-05, |
|
"loss": 1.1246, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.5146736749890495, |
|
"grad_norm": 0.2848288147278702, |
|
"learning_rate": 1.123572807671094e-05, |
|
"loss": 1.1109, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.5168637757336837, |
|
"grad_norm": 0.26817858926421695, |
|
"learning_rate": 1.1159803831655206e-05, |
|
"loss": 1.1273, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.519053876478318, |
|
"grad_norm": 0.2697805969038893, |
|
"learning_rate": 1.1083811756680523e-05, |
|
"loss": 1.1289, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.5212439772229522, |
|
"grad_norm": 0.2705343306376679, |
|
"learning_rate": 1.1007756296104237e-05, |
|
"loss": 1.1469, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.5234340779675866, |
|
"grad_norm": 0.27282726875245455, |
|
"learning_rate": 1.0931641897950733e-05, |
|
"loss": 1.1332, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.5256241787122208, |
|
"grad_norm": 0.28059459041359747, |
|
"learning_rate": 1.08554730136913e-05, |
|
"loss": 1.0916, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.527814279456855, |
|
"grad_norm": 0.2638181805357147, |
|
"learning_rate": 1.0779254097983788e-05, |
|
"loss": 1.1057, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.5300043802014893, |
|
"grad_norm": 0.26522994514380227, |
|
"learning_rate": 1.0702989608412096e-05, |
|
"loss": 1.1027, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.5321944809461235, |
|
"grad_norm": 0.25855768118273403, |
|
"learning_rate": 1.0626684005225443e-05, |
|
"loss": 1.1332, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.5343845816907578, |
|
"grad_norm": 0.27413179764811996, |
|
"learning_rate": 1.0550341751077553e-05, |
|
"loss": 1.1176, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.5365746824353921, |
|
"grad_norm": 0.2642903693875254, |
|
"learning_rate": 1.0473967310765629e-05, |
|
"loss": 1.0857, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.5387647831800263, |
|
"grad_norm": 0.2628203067700955, |
|
"learning_rate": 1.039756515096926e-05, |
|
"loss": 1.1195, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.5409548839246605, |
|
"grad_norm": 0.2783963215365502, |
|
"learning_rate": 1.0321139739989167e-05, |
|
"loss": 1.1215, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.5431449846692948, |
|
"grad_norm": 0.2772449600445937, |
|
"learning_rate": 1.0244695547485912e-05, |
|
"loss": 1.1037, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.545335085413929, |
|
"grad_norm": 0.2660438451116181, |
|
"learning_rate": 1.0168237044218452e-05, |
|
"loss": 1.1262, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.5475251861585633, |
|
"grad_norm": 0.2598928720700204, |
|
"learning_rate": 1.0091768701782714e-05, |
|
"loss": 1.1246, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.5497152869031976, |
|
"grad_norm": 0.2777074874692869, |
|
"learning_rate": 1.0015294992350044e-05, |
|
"loss": 1.1055, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.5519053876478318, |
|
"grad_norm": 0.28449089911369874, |
|
"learning_rate": 9.93882038840568e-06, |
|
"loss": 1.1223, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.554095488392466, |
|
"grad_norm": 0.2768874782485583, |
|
"learning_rate": 9.862349362487172e-06, |
|
"loss": 1.0961, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.5562855891371004, |
|
"grad_norm": 0.26460753865275577, |
|
"learning_rate": 9.785886386922806e-06, |
|
"loss": 1.1076, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.5584756898817346, |
|
"grad_norm": 0.28161562628451337, |
|
"learning_rate": 9.709435933570068e-06, |
|
"loss": 1.1029, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.5606657906263688, |
|
"grad_norm": 0.2563062588428807, |
|
"learning_rate": 9.633002473554077e-06, |
|
"loss": 1.1096, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.5628558913710031, |
|
"grad_norm": 0.26175322953452584, |
|
"learning_rate": 9.556590477006123e-06, |
|
"loss": 1.1229, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.5650459921156373, |
|
"grad_norm": 0.26468057620238034, |
|
"learning_rate": 9.480204412802236e-06, |
|
"loss": 1.0994, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.5672360928602715, |
|
"grad_norm": 0.25809069797603174, |
|
"learning_rate": 9.403848748301802e-06, |
|
"loss": 1.1059, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.5694261936049059, |
|
"grad_norm": 0.27252102620530055, |
|
"learning_rate": 9.327527949086327e-06, |
|
"loss": 1.1092, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.5716162943495401, |
|
"grad_norm": 0.2715402906362093, |
|
"learning_rate": 9.251246478698242e-06, |
|
"loss": 1.1367, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.5738063950941743, |
|
"grad_norm": 0.26828019230229255, |
|
"learning_rate": 9.175008798379893e-06, |
|
"loss": 1.1043, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.5759964958388086, |
|
"grad_norm": 0.2770878196295294, |
|
"learning_rate": 9.098819366812594e-06, |
|
"loss": 1.1213, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.5781865965834428, |
|
"grad_norm": 0.281651051703252, |
|
"learning_rate": 9.022682639855883e-06, |
|
"loss": 1.1367, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.5803766973280771, |
|
"grad_norm": 0.27314753294139044, |
|
"learning_rate": 8.946603070286926e-06, |
|
"loss": 1.107, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.5825667980727114, |
|
"grad_norm": 0.26226614181355684, |
|
"learning_rate": 8.870585107540101e-06, |
|
"loss": 1.1281, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.5847568988173456, |
|
"grad_norm": 0.27293558156801573, |
|
"learning_rate": 8.79463319744677e-06, |
|
"loss": 1.0809, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.5869469995619798, |
|
"grad_norm": 0.26602352956316966, |
|
"learning_rate": 8.718751781975284e-06, |
|
"loss": 1.1385, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.5891371003066141, |
|
"grad_norm": 0.2746959499141311, |
|
"learning_rate": 8.642945298971168e-06, |
|
"loss": 1.1172, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.5913272010512484, |
|
"grad_norm": 0.2677554902377095, |
|
"learning_rate": 8.56721818189763e-06, |
|
"loss": 1.1133, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.5935173017958826, |
|
"grad_norm": 0.2662798900188826, |
|
"learning_rate": 8.491574859576222e-06, |
|
"loss": 1.1104, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.5957074025405169, |
|
"grad_norm": 0.26469719917634077, |
|
"learning_rate": 8.416019755927851e-06, |
|
"loss": 1.1086, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.5978975032851511, |
|
"grad_norm": 0.26386029178727227, |
|
"learning_rate": 8.340557289714055e-06, |
|
"loss": 1.1168, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.6000876040297853, |
|
"grad_norm": 0.253515604893805, |
|
"learning_rate": 8.26519187427856e-06, |
|
"loss": 1.1187, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.6022777047744197, |
|
"grad_norm": 0.277605351754751, |
|
"learning_rate": 8.189927917289182e-06, |
|
"loss": 1.1059, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.6044678055190539, |
|
"grad_norm": 0.26442216190859963, |
|
"learning_rate": 8.114769820480039e-06, |
|
"loss": 1.1176, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.6066579062636881, |
|
"grad_norm": 0.2734826868231617, |
|
"learning_rate": 8.03972197939414e-06, |
|
"loss": 1.1215, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.6088480070083224, |
|
"grad_norm": 0.2663060487289671, |
|
"learning_rate": 7.964788783126294e-06, |
|
"loss": 1.1037, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.6110381077529566, |
|
"grad_norm": 0.26118107033447696, |
|
"learning_rate": 7.889974614066425e-06, |
|
"loss": 1.0967, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.6132282084975909, |
|
"grad_norm": 0.27155741223634616, |
|
"learning_rate": 7.815283847643277e-06, |
|
"loss": 1.1203, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.6154183092422252, |
|
"grad_norm": 0.27035981345201915, |
|
"learning_rate": 7.740720852068524e-06, |
|
"loss": 1.1428, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.6176084099868594, |
|
"grad_norm": 0.2683573304766547, |
|
"learning_rate": 7.66628998808129e-06, |
|
"loss": 1.1301, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.6197985107314936, |
|
"grad_norm": 0.26324766469257666, |
|
"learning_rate": 7.591995608693118e-06, |
|
"loss": 1.1115, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.6219886114761279, |
|
"grad_norm": 0.26863159797075176, |
|
"learning_rate": 7.5178420589334025e-06, |
|
"loss": 1.1055, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.6241787122207622, |
|
"grad_norm": 0.25941294580030216, |
|
"learning_rate": 7.443833675595254e-06, |
|
"loss": 1.1293, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.6263688129653964, |
|
"grad_norm": 0.2623305114567609, |
|
"learning_rate": 7.369974786981879e-06, |
|
"loss": 1.0961, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.6285589137100307, |
|
"grad_norm": 0.2857793789171064, |
|
"learning_rate": 7.296269712653436e-06, |
|
"loss": 1.1051, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.6307490144546649, |
|
"grad_norm": 0.2738731727686292, |
|
"learning_rate": 7.222722763174428e-06, |
|
"loss": 1.0965, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.6329391151992991, |
|
"grad_norm": 0.2780751677609743, |
|
"learning_rate": 7.149338239861579e-06, |
|
"loss": 1.1125, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.6351292159439335, |
|
"grad_norm": 0.271978624523703, |
|
"learning_rate": 7.076120434532292e-06, |
|
"loss": 1.0795, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.6373193166885677, |
|
"grad_norm": 0.2889878149869347, |
|
"learning_rate": 7.003073629253638e-06, |
|
"loss": 1.1094, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.6395094174332019, |
|
"grad_norm": 0.26177418700227856, |
|
"learning_rate": 6.9302020960919405e-06, |
|
"loss": 1.132, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.6416995181778362, |
|
"grad_norm": 0.27487300616889226, |
|
"learning_rate": 6.857510096862901e-06, |
|
"loss": 1.1131, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.6438896189224704, |
|
"grad_norm": 0.2673004044565668, |
|
"learning_rate": 6.785001882882376e-06, |
|
"loss": 1.1047, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.6460797196671046, |
|
"grad_norm": 0.2637312152468036, |
|
"learning_rate": 6.712681694717723e-06, |
|
"loss": 1.1096, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.648269820411739, |
|
"grad_norm": 0.2630882968696908, |
|
"learning_rate": 6.640553761939822e-06, |
|
"loss": 1.0973, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.6504599211563732, |
|
"grad_norm": 0.2638392419100043, |
|
"learning_rate": 6.568622302875682e-06, |
|
"loss": 1.0982, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.6526500219010074, |
|
"grad_norm": 0.2626309470206596, |
|
"learning_rate": 6.496891524361757e-06, |
|
"loss": 1.1172, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.6548401226456417, |
|
"grad_norm": 0.265160449047166, |
|
"learning_rate": 6.42536562149791e-06, |
|
"loss": 1.1125, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.657030223390276, |
|
"grad_norm": 0.26619884121137627, |
|
"learning_rate": 6.354048777402061e-06, |
|
"loss": 1.1328, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.6592203241349102, |
|
"grad_norm": 0.2600814085765291, |
|
"learning_rate": 6.282945162965548e-06, |
|
"loss": 1.117, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.6614104248795445, |
|
"grad_norm": 0.272185670744957, |
|
"learning_rate": 6.212058936609189e-06, |
|
"loss": 1.1209, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.6636005256241787, |
|
"grad_norm": 0.2641333188928149, |
|
"learning_rate": 6.1413942440400994e-06, |
|
"loss": 1.1168, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.6657906263688129, |
|
"grad_norm": 0.26009676604807275, |
|
"learning_rate": 6.070955218009213e-06, |
|
"loss": 1.0936, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.6679807271134472, |
|
"grad_norm": 0.2615413153711293, |
|
"learning_rate": 6.0007459780695885e-06, |
|
"loss": 1.1061, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.6701708278580815, |
|
"grad_norm": 0.2892089912864181, |
|
"learning_rate": 5.93077063033549e-06, |
|
"loss": 1.1055, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.6723609286027157, |
|
"grad_norm": 0.2629578247792853, |
|
"learning_rate": 5.861033267242238e-06, |
|
"loss": 1.1146, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.67455102934735, |
|
"grad_norm": 0.2653857163412152, |
|
"learning_rate": 5.791537967306869e-06, |
|
"loss": 1.1107, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.6767411300919842, |
|
"grad_norm": 0.2641301458887951, |
|
"learning_rate": 5.722288794889603e-06, |
|
"loss": 1.0953, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.6789312308366184, |
|
"grad_norm": 0.2627115214160706, |
|
"learning_rate": 5.653289799956161e-06, |
|
"loss": 1.0957, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.6811213315812528, |
|
"grad_norm": 0.2744248525372843, |
|
"learning_rate": 5.584545017840886e-06, |
|
"loss": 1.0973, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.683311432325887, |
|
"grad_norm": 0.26271843375673565, |
|
"learning_rate": 5.516058469010741e-06, |
|
"loss": 1.1066, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.6855015330705212, |
|
"grad_norm": 0.28239608054669124, |
|
"learning_rate": 5.447834158830202e-06, |
|
"loss": 1.1457, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.6876916338151555, |
|
"grad_norm": 0.2682726187574874, |
|
"learning_rate": 5.379876077326979e-06, |
|
"loss": 1.0771, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.6898817345597897, |
|
"grad_norm": 0.2693944920009028, |
|
"learning_rate": 5.312188198958681e-06, |
|
"loss": 1.1191, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.692071835304424, |
|
"grad_norm": 0.27066235636281594, |
|
"learning_rate": 5.244774482380361e-06, |
|
"loss": 1.1191, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.6942619360490583, |
|
"grad_norm": 0.2607089341186109, |
|
"learning_rate": 5.177638870213008e-06, |
|
"loss": 1.0949, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.6964520367936925, |
|
"grad_norm": 0.26730049681200124, |
|
"learning_rate": 5.1107852888129715e-06, |
|
"loss": 1.159, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.6986421375383267, |
|
"grad_norm": 0.2561679895991625, |
|
"learning_rate": 5.04421764804232e-06, |
|
"loss": 1.0938, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.700832238282961, |
|
"grad_norm": 0.25827915348141745, |
|
"learning_rate": 4.977939841040175e-06, |
|
"loss": 1.1178, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.7030223390275953, |
|
"grad_norm": 0.2672625917790982, |
|
"learning_rate": 4.911955743995042e-06, |
|
"loss": 1.1195, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.7052124397722295, |
|
"grad_norm": 0.2712982534329863, |
|
"learning_rate": 4.846269215918106e-06, |
|
"loss": 1.1215, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.7074025405168638, |
|
"grad_norm": 0.2637027906298979, |
|
"learning_rate": 4.780884098417531e-06, |
|
"loss": 1.098, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.709592641261498, |
|
"grad_norm": 0.26810920883601624, |
|
"learning_rate": 4.7158042154738094e-06, |
|
"loss": 1.1176, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.7117827420061322, |
|
"grad_norm": 0.26084193501541225, |
|
"learning_rate": 4.6510333732160915e-06, |
|
"loss": 1.1262, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.7139728427507666, |
|
"grad_norm": 0.2700256165770299, |
|
"learning_rate": 4.5865753596996335e-06, |
|
"loss": 1.1148, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.7161629434954008, |
|
"grad_norm": 0.2639599226097337, |
|
"learning_rate": 4.522433944684197e-06, |
|
"loss": 1.1156, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.718353044240035, |
|
"grad_norm": 0.2597365152157971, |
|
"learning_rate": 4.45861287941363e-06, |
|
"loss": 1.0854, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.7205431449846693, |
|
"grad_norm": 0.26268213694795733, |
|
"learning_rate": 4.395115896396457e-06, |
|
"loss": 1.1258, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.7227332457293035, |
|
"grad_norm": 0.26221093408579443, |
|
"learning_rate": 4.331946709187578e-06, |
|
"loss": 1.1109, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.7249233464739377, |
|
"grad_norm": 0.2607246299946745, |
|
"learning_rate": 4.269109012171112e-06, |
|
"loss": 1.1104, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.7271134472185721, |
|
"grad_norm": 0.2574716134373636, |
|
"learning_rate": 4.206606480344303e-06, |
|
"loss": 1.0932, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.7293035479632063, |
|
"grad_norm": 0.265233772269521, |
|
"learning_rate": 4.14444276910263e-06, |
|
"loss": 1.1332, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.7314936487078406, |
|
"grad_norm": 0.25655122243148587, |
|
"learning_rate": 4.08262151402598e-06, |
|
"loss": 1.0891, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.7336837494524748, |
|
"grad_norm": 0.25501243107566296, |
|
"learning_rate": 4.021146330666065e-06, |
|
"loss": 1.0902, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.735873850197109, |
|
"grad_norm": 0.2612800818169169, |
|
"learning_rate": 3.960020814334936e-06, |
|
"loss": 1.1195, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.7380639509417434, |
|
"grad_norm": 0.2643874720892855, |
|
"learning_rate": 3.899248539894756e-06, |
|
"loss": 1.1137, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.7402540516863776, |
|
"grad_norm": 0.26838926718062733, |
|
"learning_rate": 3.838833061548682e-06, |
|
"loss": 1.1094, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.7424441524310118, |
|
"grad_norm": 0.26970931976433926, |
|
"learning_rate": 3.7787779126330314e-06, |
|
"loss": 1.1297, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.7446342531756461, |
|
"grad_norm": 0.26137631703110864, |
|
"learning_rate": 3.7190866054106358e-06, |
|
"loss": 1.1254, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.7468243539202803, |
|
"grad_norm": 0.2614031117426139, |
|
"learning_rate": 3.659762630865411e-06, |
|
"loss": 1.1041, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.7490144546649146, |
|
"grad_norm": 0.2580239989121174, |
|
"learning_rate": 3.600809458498219e-06, |
|
"loss": 1.1076, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.7512045554095489, |
|
"grad_norm": 0.2672365887137131, |
|
"learning_rate": 3.542230536123925e-06, |
|
"loss": 1.0988, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.7533946561541831, |
|
"grad_norm": 0.26577844660787864, |
|
"learning_rate": 3.484029289669797e-06, |
|
"loss": 1.098, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.7555847568988173, |
|
"grad_norm": 0.2563307410321518, |
|
"learning_rate": 3.4262091229750973e-06, |
|
"loss": 1.1051, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.7577748576434516, |
|
"grad_norm": 0.25998557620881213, |
|
"learning_rate": 3.3687734175920505e-06, |
|
"loss": 1.1121, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.7599649583880859, |
|
"grad_norm": 0.2641859803660572, |
|
"learning_rate": 3.311725532588049e-06, |
|
"loss": 1.1223, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.7621550591327201, |
|
"grad_norm": 0.26214013514430917, |
|
"learning_rate": 3.2550688043492273e-06, |
|
"loss": 1.11, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.7643451598773544, |
|
"grad_norm": 0.2613558739863409, |
|
"learning_rate": 3.1988065463853204e-06, |
|
"loss": 1.0902, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.7665352606219886, |
|
"grad_norm": 0.26742133303345345, |
|
"learning_rate": 3.1429420491358696e-06, |
|
"loss": 1.1262, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.7687253613666228, |
|
"grad_norm": 0.2599204528063472, |
|
"learning_rate": 3.0874785797778096e-06, |
|
"loss": 1.148, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.7709154621112572, |
|
"grad_norm": 0.2566302858053561, |
|
"learning_rate": 3.0324193820343685e-06, |
|
"loss": 1.1109, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.7731055628558914, |
|
"grad_norm": 0.260139567695262, |
|
"learning_rate": 2.977767675985377e-06, |
|
"loss": 1.1344, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.7752956636005256, |
|
"grad_norm": 0.2683465626714785, |
|
"learning_rate": 2.9235266578789268e-06, |
|
"loss": 1.1246, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.7774857643451599, |
|
"grad_norm": 0.2594022825090287, |
|
"learning_rate": 2.8696994999444614e-06, |
|
"loss": 1.1074, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.7796758650897941, |
|
"grad_norm": 0.26202834015198223, |
|
"learning_rate": 2.816289350207249e-06, |
|
"loss": 1.0943, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.7818659658344284, |
|
"grad_norm": 0.2581965218546036, |
|
"learning_rate": 2.76329933230425e-06, |
|
"loss": 1.1088, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.7840560665790627, |
|
"grad_norm": 0.262190008267224, |
|
"learning_rate": 2.7107325453014687e-06, |
|
"loss": 1.1098, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.7862461673236969, |
|
"grad_norm": 0.2534248970210751, |
|
"learning_rate": 2.658592063512684e-06, |
|
"loss": 1.0893, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.7884362680683311, |
|
"grad_norm": 0.2633626852571572, |
|
"learning_rate": 2.6068809363196633e-06, |
|
"loss": 1.091, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.7906263688129654, |
|
"grad_norm": 0.25921581241790814, |
|
"learning_rate": 2.5556021879938074e-06, |
|
"loss": 1.093, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.7928164695575997, |
|
"grad_norm": 0.2621990361536461, |
|
"learning_rate": 2.5047588175192995e-06, |
|
"loss": 1.1254, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.7950065703022339, |
|
"grad_norm": 0.2535710412898093, |
|
"learning_rate": 2.454353798417698e-06, |
|
"loss": 1.1072, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.7971966710468682, |
|
"grad_norm": 0.25964995240654326, |
|
"learning_rate": 2.4043900785740436e-06, |
|
"loss": 1.1023, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.7993867717915024, |
|
"grad_norm": 0.2539381357636022, |
|
"learning_rate": 2.354870580064439e-06, |
|
"loss": 1.1133, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.8015768725361366, |
|
"grad_norm": 0.2575538966366128, |
|
"learning_rate": 2.305798198985173e-06, |
|
"loss": 1.0879, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.803766973280771, |
|
"grad_norm": 0.25244614295211687, |
|
"learning_rate": 2.257175805283338e-06, |
|
"loss": 1.1137, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.8059570740254052, |
|
"grad_norm": 0.2668071511501433, |
|
"learning_rate": 2.209006242588977e-06, |
|
"loss": 1.1187, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.8081471747700394, |
|
"grad_norm": 0.26833063688258807, |
|
"learning_rate": 2.1612923280487883e-06, |
|
"loss": 1.1049, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.8103372755146737, |
|
"grad_norm": 0.25994289950129074, |
|
"learning_rate": 2.114036852161361e-06, |
|
"loss": 1.0926, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.8125273762593079, |
|
"grad_norm": 0.26330093138785376, |
|
"learning_rate": 2.0672425786139794e-06, |
|
"loss": 1.0885, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 0.8147174770039421, |
|
"grad_norm": 0.2627529440350032, |
|
"learning_rate": 2.020912244120983e-06, |
|
"loss": 1.0902, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.8169075777485765, |
|
"grad_norm": 0.25955955375887674, |
|
"learning_rate": 1.9750485582637245e-06, |
|
"loss": 1.127, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 0.8190976784932107, |
|
"grad_norm": 0.2615144960394142, |
|
"learning_rate": 1.9296542033321008e-06, |
|
"loss": 1.1074, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.8212877792378449, |
|
"grad_norm": 0.2650186451464187, |
|
"learning_rate": 1.8847318341676657e-06, |
|
"loss": 1.1234, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.8234778799824792, |
|
"grad_norm": 0.2641320073264704, |
|
"learning_rate": 1.840284078008393e-06, |
|
"loss": 1.0957, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.8256679807271134, |
|
"grad_norm": 0.2581562950846561, |
|
"learning_rate": 1.7963135343349914e-06, |
|
"loss": 1.1094, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 0.8278580814717477, |
|
"grad_norm": 0.25731784586468565, |
|
"learning_rate": 1.7528227747189152e-06, |
|
"loss": 1.1086, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.830048182216382, |
|
"grad_norm": 0.2618515550927689, |
|
"learning_rate": 1.7098143426719293e-06, |
|
"loss": 1.0984, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 0.8322382829610162, |
|
"grad_norm": 0.25314788080692846, |
|
"learning_rate": 1.667290753497386e-06, |
|
"loss": 1.0965, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.8344283837056504, |
|
"grad_norm": 0.25504674250436493, |
|
"learning_rate": 1.6252544941430982e-06, |
|
"loss": 1.0957, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 0.8366184844502847, |
|
"grad_norm": 0.2559793997504819, |
|
"learning_rate": 1.5837080230559077e-06, |
|
"loss": 1.0947, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.838808585194919, |
|
"grad_norm": 0.26055333090211424, |
|
"learning_rate": 1.5426537700378985e-06, |
|
"loss": 1.0961, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 0.8409986859395532, |
|
"grad_norm": 0.2576679394012241, |
|
"learning_rate": 1.5020941361042885e-06, |
|
"loss": 1.0934, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.8431887866841875, |
|
"grad_norm": 0.25828962040009074, |
|
"learning_rate": 1.4620314933430269e-06, |
|
"loss": 1.1121, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.8453788874288217, |
|
"grad_norm": 0.25513013439947374, |
|
"learning_rate": 1.4224681847760346e-06, |
|
"loss": 1.1145, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.8475689881734559, |
|
"grad_norm": 0.26643194015943783, |
|
"learning_rate": 1.383406524222206e-06, |
|
"loss": 1.1002, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 0.8497590889180903, |
|
"grad_norm": 0.25674411294345, |
|
"learning_rate": 1.3448487961620637e-06, |
|
"loss": 1.1355, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.8519491896627245, |
|
"grad_norm": 0.2605245623523021, |
|
"learning_rate": 1.3067972556041753e-06, |
|
"loss": 1.1207, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 0.8541392904073587, |
|
"grad_norm": 0.2616395396378427, |
|
"learning_rate": 1.2692541279532466e-06, |
|
"loss": 1.1129, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.856329391151993, |
|
"grad_norm": 0.2558570340754774, |
|
"learning_rate": 1.2322216088799955e-06, |
|
"loss": 1.1156, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 0.8585194918966272, |
|
"grad_norm": 0.25398464154559003, |
|
"learning_rate": 1.1957018641927187e-06, |
|
"loss": 1.1137, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.8607095926412615, |
|
"grad_norm": 0.2614386394260854, |
|
"learning_rate": 1.1596970297106458e-06, |
|
"loss": 1.1254, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 0.8628996933858958, |
|
"grad_norm": 0.2620955323370706, |
|
"learning_rate": 1.1242092111390157e-06, |
|
"loss": 1.085, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.86508979413053, |
|
"grad_norm": 0.2640807465997075, |
|
"learning_rate": 1.0892404839459269e-06, |
|
"loss": 1.1074, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.8672798948751642, |
|
"grad_norm": 0.2580694157592811, |
|
"learning_rate": 1.054792893240969e-06, |
|
"loss": 1.1098, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.8694699956197985, |
|
"grad_norm": 0.2651420557425447, |
|
"learning_rate": 1.0208684536555968e-06, |
|
"loss": 1.1275, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 0.8716600963644328, |
|
"grad_norm": 0.262002558002591, |
|
"learning_rate": 9.874691492253242e-07, |
|
"loss": 1.1039, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.873850197109067, |
|
"grad_norm": 0.25919863963901085, |
|
"learning_rate": 9.545969332736748e-07, |
|
"loss": 1.1195, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 0.8760402978537013, |
|
"grad_norm": 0.2668241542423085, |
|
"learning_rate": 9.222537282979593e-07, |
|
"loss": 1.1004, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.8782303985983355, |
|
"grad_norm": 0.2601150473608183, |
|
"learning_rate": 8.904414258568306e-07, |
|
"loss": 1.1027, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 0.8804204993429697, |
|
"grad_norm": 0.2548383260510458, |
|
"learning_rate": 8.591618864596541e-07, |
|
"loss": 1.1051, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.8826106000876041, |
|
"grad_norm": 0.2600128178676384, |
|
"learning_rate": 8.284169394577124e-07, |
|
"loss": 1.1236, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 0.8848007008322383, |
|
"grad_norm": 0.26066874164079284, |
|
"learning_rate": 7.982083829372023e-07, |
|
"loss": 1.1242, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.8869908015768725, |
|
"grad_norm": 0.25692211657826886, |
|
"learning_rate": 7.685379836140872e-07, |
|
"loss": 1.1406, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 0.8891809023215068, |
|
"grad_norm": 0.26481424368233436, |
|
"learning_rate": 7.394074767307591e-07, |
|
"loss": 1.1313, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.891371003066141, |
|
"grad_norm": 0.2618210555897095, |
|
"learning_rate": 7.10818565954573e-07, |
|
"loss": 1.1193, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 0.8935611038107752, |
|
"grad_norm": 0.26537735037912497, |
|
"learning_rate": 6.827729232781943e-07, |
|
"loss": 1.1273, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.8957512045554096, |
|
"grad_norm": 0.2565351465887861, |
|
"learning_rate": 6.552721889218194e-07, |
|
"loss": 1.1074, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 0.8979413053000438, |
|
"grad_norm": 0.2641157557567522, |
|
"learning_rate": 6.283179712372489e-07, |
|
"loss": 1.1102, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.900131406044678, |
|
"grad_norm": 0.258380894950019, |
|
"learning_rate": 6.019118466138285e-07, |
|
"loss": 1.0936, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 0.9023215067893123, |
|
"grad_norm": 0.2621614154084412, |
|
"learning_rate": 5.760553593862506e-07, |
|
"loss": 1.0959, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.9045116075339465, |
|
"grad_norm": 0.2560826189907596, |
|
"learning_rate": 5.507500217442341e-07, |
|
"loss": 1.1016, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 0.9067017082785808, |
|
"grad_norm": 0.2590724071414488, |
|
"learning_rate": 5.259973136440921e-07, |
|
"loss": 1.1082, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.9088918090232151, |
|
"grad_norm": 0.2573319556492272, |
|
"learning_rate": 5.017986827221733e-07, |
|
"loss": 1.1082, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 0.9110819097678493, |
|
"grad_norm": 0.34516100541646555, |
|
"learning_rate": 4.781555442102015e-07, |
|
"loss": 1.1355, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.9132720105124835, |
|
"grad_norm": 0.26180704480049366, |
|
"learning_rate": 4.5506928085250033e-07, |
|
"loss": 1.0871, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 0.9154621112571178, |
|
"grad_norm": 0.25833323890857174, |
|
"learning_rate": 4.3254124282513634e-07, |
|
"loss": 1.1062, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.9176522120017521, |
|
"grad_norm": 0.26187121720127504, |
|
"learning_rate": 4.1057274765694765e-07, |
|
"loss": 1.0939, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 0.9198423127463863, |
|
"grad_norm": 0.2573692262543959, |
|
"learning_rate": 3.891650801524871e-07, |
|
"loss": 1.1074, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.9220324134910206, |
|
"grad_norm": 0.2649955238574841, |
|
"learning_rate": 3.6831949231689203e-07, |
|
"loss": 1.1168, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 0.9242225142356548, |
|
"grad_norm": 0.2553634372156294, |
|
"learning_rate": 3.4803720328264733e-07, |
|
"loss": 1.1168, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.926412614980289, |
|
"grad_norm": 0.26728677917909055, |
|
"learning_rate": 3.283193992383049e-07, |
|
"loss": 1.1273, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 0.9286027157249234, |
|
"grad_norm": 0.2607892654338355, |
|
"learning_rate": 3.091672333590923e-07, |
|
"loss": 1.0867, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.9307928164695576, |
|
"grad_norm": 0.2527633048942072, |
|
"learning_rate": 2.905818257394799e-07, |
|
"loss": 1.1133, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 0.9329829172141918, |
|
"grad_norm": 0.25568945394844267, |
|
"learning_rate": 2.725642633276737e-07, |
|
"loss": 1.1098, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.9351730179588261, |
|
"grad_norm": 0.2597970875903515, |
|
"learning_rate": 2.5511559986204247e-07, |
|
"loss": 1.1184, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 0.9373631187034603, |
|
"grad_norm": 0.26327747871941953, |
|
"learning_rate": 2.3823685580949273e-07, |
|
"loss": 1.0965, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.9395532194480947, |
|
"grad_norm": 0.2607010813003693, |
|
"learning_rate": 2.219290183057865e-07, |
|
"loss": 1.1352, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 0.9417433201927289, |
|
"grad_norm": 0.2530017456095944, |
|
"learning_rate": 2.0619304109781636e-07, |
|
"loss": 1.1012, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.9439334209373631, |
|
"grad_norm": 0.2519870547260623, |
|
"learning_rate": 1.9102984448781337e-07, |
|
"loss": 1.0752, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 0.9461235216819974, |
|
"grad_norm": 0.2530254165097107, |
|
"learning_rate": 1.764403152795402e-07, |
|
"loss": 1.0934, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.9483136224266316, |
|
"grad_norm": 0.25397273342292553, |
|
"learning_rate": 1.6242530672641143e-07, |
|
"loss": 1.1219, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 0.9505037231712659, |
|
"grad_norm": 0.26174974744061347, |
|
"learning_rate": 1.4898563848160464e-07, |
|
"loss": 1.1473, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.9526938239159002, |
|
"grad_norm": 0.24955513279870836, |
|
"learning_rate": 1.3612209655011866e-07, |
|
"loss": 1.1195, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 0.9548839246605344, |
|
"grad_norm": 0.2583751624724583, |
|
"learning_rate": 1.2383543324280044e-07, |
|
"loss": 1.1189, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.9570740254051686, |
|
"grad_norm": 0.2588289009510903, |
|
"learning_rate": 1.1212636713235581e-07, |
|
"loss": 1.1416, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 0.9592641261498029, |
|
"grad_norm": 0.2583550968959351, |
|
"learning_rate": 1.0099558301132072e-07, |
|
"loss": 1.1254, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.9614542268944372, |
|
"grad_norm": 0.2527293991159154, |
|
"learning_rate": 9.044373185200906e-08, |
|
"loss": 1.1031, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 0.9636443276390714, |
|
"grad_norm": 0.2521840148118415, |
|
"learning_rate": 8.047143076844399e-08, |
|
"loss": 1.0783, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.9658344283837057, |
|
"grad_norm": 0.2606898444853039, |
|
"learning_rate": 7.10792629802659e-08, |
|
"loss": 1.1113, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 0.9680245291283399, |
|
"grad_norm": 0.25532227510506406, |
|
"learning_rate": 6.226777777862513e-08, |
|
"loss": 1.1293, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.9702146298729741, |
|
"grad_norm": 0.2565349165845633, |
|
"learning_rate": 5.403749049405438e-08, |
|
"loss": 1.1025, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 0.9724047306176085, |
|
"grad_norm": 0.25458880932233896, |
|
"learning_rate": 4.638888246633166e-08, |
|
"loss": 1.1121, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.9745948313622427, |
|
"grad_norm": 0.25398250551108936, |
|
"learning_rate": 3.932240101633178e-08, |
|
"loss": 1.1023, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 0.9767849321068769, |
|
"grad_norm": 0.25422167947403135, |
|
"learning_rate": 3.283845941985941e-08, |
|
"loss": 1.0758, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.9789750328515112, |
|
"grad_norm": 0.2569295535003476, |
|
"learning_rate": 2.693743688348627e-08, |
|
"loss": 1.1168, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 0.9811651335961454, |
|
"grad_norm": 0.2611140820134084, |
|
"learning_rate": 2.1619678522366573e-08, |
|
"loss": 1.098, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.9833552343407796, |
|
"grad_norm": 0.2585042183570841, |
|
"learning_rate": 1.68854953400599e-08, |
|
"loss": 1.1182, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 0.985545335085414, |
|
"grad_norm": 0.2699764722438751, |
|
"learning_rate": 1.2735164210337935e-08, |
|
"loss": 1.1629, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.9877354358300482, |
|
"grad_norm": 0.2604976252616973, |
|
"learning_rate": 9.168927860994104e-09, |
|
"loss": 1.1049, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 0.9899255365746824, |
|
"grad_norm": 0.2570709645581014, |
|
"learning_rate": 6.186994859646023e-09, |
|
"loss": 1.0938, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.9921156373193167, |
|
"grad_norm": 0.2534472927063691, |
|
"learning_rate": 3.7895396015374955e-09, |
|
"loss": 1.1094, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 0.994305738063951, |
|
"grad_norm": 0.2532795250404029, |
|
"learning_rate": 1.9767022993444353e-09, |
|
"loss": 1.1031, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.9964958388085852, |
|
"grad_norm": 0.25604334514879273, |
|
"learning_rate": 7.485889749658803e-10, |
|
"loss": 1.1129, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 0.9986859395532195, |
|
"grad_norm": 0.26226473069552947, |
|
"learning_rate": 1.0527145333227851e-10, |
|
"loss": 1.1066, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 2283, |
|
"total_flos": 216025446481920.0, |
|
"train_loss": 1.2513469975087603, |
|
"train_runtime": 7749.9665, |
|
"train_samples_per_second": 18.846, |
|
"train_steps_per_second": 0.295 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 2283, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": false, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 216025446481920.0, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|