|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0, |
|
"eval_steps": 200, |
|
"global_step": 1124, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0017793594306049821, |
|
"grad_norm": 3.658687401390472, |
|
"learning_rate": 9.99998046979289e-06, |
|
"loss": 0.1395, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0035587188612099642, |
|
"grad_norm": 3.177567124468238, |
|
"learning_rate": 9.999921879324127e-06, |
|
"loss": 0.1278, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.005338078291814947, |
|
"grad_norm": 3.9123704947904914, |
|
"learning_rate": 9.999824229051425e-06, |
|
"loss": 0.1474, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0071174377224199285, |
|
"grad_norm": 4.8383344066076, |
|
"learning_rate": 9.999687519737639e-06, |
|
"loss": 0.1845, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.008896797153024912, |
|
"grad_norm": 4.013123227527269, |
|
"learning_rate": 9.99951175245075e-06, |
|
"loss": 0.1284, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.010676156583629894, |
|
"grad_norm": 4.92246803301761, |
|
"learning_rate": 9.999296928563868e-06, |
|
"loss": 0.1838, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.012455516014234875, |
|
"grad_norm": 5.062643945350466, |
|
"learning_rate": 9.999043049755216e-06, |
|
"loss": 0.1765, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.014234875444839857, |
|
"grad_norm": 3.9598048167483846, |
|
"learning_rate": 9.998750118008117e-06, |
|
"loss": 0.1127, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01601423487544484, |
|
"grad_norm": 4.290361284828448, |
|
"learning_rate": 9.998418135610974e-06, |
|
"loss": 0.128, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.017793594306049824, |
|
"grad_norm": 5.260530447777749, |
|
"learning_rate": 9.998047105157265e-06, |
|
"loss": 0.2255, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.019572953736654804, |
|
"grad_norm": 3.8995273437231703, |
|
"learning_rate": 9.997637029545509e-06, |
|
"loss": 0.1171, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.021352313167259787, |
|
"grad_norm": 5.1833195169731905, |
|
"learning_rate": 9.997187911979252e-06, |
|
"loss": 0.1613, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.023131672597864767, |
|
"grad_norm": 4.575154555783548, |
|
"learning_rate": 9.996699755967035e-06, |
|
"loss": 0.1786, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.02491103202846975, |
|
"grad_norm": 4.209412887523948, |
|
"learning_rate": 9.996172565322375e-06, |
|
"loss": 0.1541, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.026690391459074734, |
|
"grad_norm": 3.991248024332673, |
|
"learning_rate": 9.995606344163728e-06, |
|
"loss": 0.145, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.028469750889679714, |
|
"grad_norm": 3.8820572892766982, |
|
"learning_rate": 9.995001096914462e-06, |
|
"loss": 0.1515, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.030249110320284697, |
|
"grad_norm": 3.934506810264301, |
|
"learning_rate": 9.994356828302818e-06, |
|
"loss": 0.1523, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.03202846975088968, |
|
"grad_norm": 3.3170692318193775, |
|
"learning_rate": 9.993673543361874e-06, |
|
"loss": 0.1304, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.033807829181494664, |
|
"grad_norm": 5.483749242245127, |
|
"learning_rate": 9.992951247429512e-06, |
|
"loss": 0.2155, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.03558718861209965, |
|
"grad_norm": 3.85867995115402, |
|
"learning_rate": 9.992189946148366e-06, |
|
"loss": 0.1376, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.037366548042704624, |
|
"grad_norm": 3.5848444788736695, |
|
"learning_rate": 9.991389645465786e-06, |
|
"loss": 0.1146, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.03914590747330961, |
|
"grad_norm": 3.9386291550575208, |
|
"learning_rate": 9.990550351633784e-06, |
|
"loss": 0.1691, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.04092526690391459, |
|
"grad_norm": 3.7136305499678226, |
|
"learning_rate": 9.989672071208993e-06, |
|
"loss": 0.1811, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.042704626334519574, |
|
"grad_norm": 3.9602707145327884, |
|
"learning_rate": 9.988754811052616e-06, |
|
"loss": 0.2093, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.04448398576512456, |
|
"grad_norm": 4.543444885088083, |
|
"learning_rate": 9.987798578330365e-06, |
|
"loss": 0.2065, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.046263345195729534, |
|
"grad_norm": 4.0453317974487195, |
|
"learning_rate": 9.986803380512406e-06, |
|
"loss": 0.1712, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.04804270462633452, |
|
"grad_norm": 3.5813417906658147, |
|
"learning_rate": 9.98576922537331e-06, |
|
"loss": 0.185, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.0498220640569395, |
|
"grad_norm": 3.9693374278328024, |
|
"learning_rate": 9.984696120991979e-06, |
|
"loss": 0.1874, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.051601423487544484, |
|
"grad_norm": 3.6136137633033556, |
|
"learning_rate": 9.983584075751598e-06, |
|
"loss": 0.1274, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.05338078291814947, |
|
"grad_norm": 3.442065181867227, |
|
"learning_rate": 9.982433098339553e-06, |
|
"loss": 0.1599, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05516014234875445, |
|
"grad_norm": 4.579598587032412, |
|
"learning_rate": 9.981243197747375e-06, |
|
"loss": 0.2051, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.05693950177935943, |
|
"grad_norm": 5.377241384417466, |
|
"learning_rate": 9.980014383270668e-06, |
|
"loss": 0.2268, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.05871886120996441, |
|
"grad_norm": 4.127201126088361, |
|
"learning_rate": 9.978746664509032e-06, |
|
"loss": 0.1683, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.060498220640569395, |
|
"grad_norm": 3.54002782236816, |
|
"learning_rate": 9.97744005136599e-06, |
|
"loss": 0.1393, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.06227758007117438, |
|
"grad_norm": 3.9994835236941086, |
|
"learning_rate": 9.976094554048912e-06, |
|
"loss": 0.1765, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.06405693950177936, |
|
"grad_norm": 4.4118780771112345, |
|
"learning_rate": 9.974710183068935e-06, |
|
"loss": 0.1497, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.06583629893238434, |
|
"grad_norm": 3.5814134592923725, |
|
"learning_rate": 9.97328694924088e-06, |
|
"loss": 0.1313, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.06761565836298933, |
|
"grad_norm": 3.546933346677075, |
|
"learning_rate": 9.971824863683168e-06, |
|
"loss": 0.179, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.0693950177935943, |
|
"grad_norm": 4.409508801427335, |
|
"learning_rate": 9.970323937817732e-06, |
|
"loss": 0.1532, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.0711743772241993, |
|
"grad_norm": 4.064823711697666, |
|
"learning_rate": 9.968784183369929e-06, |
|
"loss": 0.1333, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.07295373665480427, |
|
"grad_norm": 4.13603007432746, |
|
"learning_rate": 9.96720561236845e-06, |
|
"loss": 0.1685, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.07473309608540925, |
|
"grad_norm": 4.888012261539274, |
|
"learning_rate": 9.965588237145219e-06, |
|
"loss": 0.1915, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.07651245551601424, |
|
"grad_norm": 3.9626430689687036, |
|
"learning_rate": 9.963932070335307e-06, |
|
"loss": 0.1496, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.07829181494661921, |
|
"grad_norm": 4.242117551125977, |
|
"learning_rate": 9.962237124876828e-06, |
|
"loss": 0.1711, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.0800711743772242, |
|
"grad_norm": 3.709769981412935, |
|
"learning_rate": 9.960503414010833e-06, |
|
"loss": 0.1619, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.08185053380782918, |
|
"grad_norm": 3.9413968816462632, |
|
"learning_rate": 9.958730951281218e-06, |
|
"loss": 0.1538, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.08362989323843416, |
|
"grad_norm": 4.3108872937569975, |
|
"learning_rate": 9.956919750534607e-06, |
|
"loss": 0.1752, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.08540925266903915, |
|
"grad_norm": 4.120381736597941, |
|
"learning_rate": 9.955069825920249e-06, |
|
"loss": 0.1993, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.08718861209964412, |
|
"grad_norm": 3.7008662255838223, |
|
"learning_rate": 9.953181191889913e-06, |
|
"loss": 0.177, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.08896797153024912, |
|
"grad_norm": 3.7327017984860205, |
|
"learning_rate": 9.95125386319776e-06, |
|
"loss": 0.1331, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.09074733096085409, |
|
"grad_norm": 4.898664864393144, |
|
"learning_rate": 9.949287854900243e-06, |
|
"loss": 0.193, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.09252669039145907, |
|
"grad_norm": 4.41333486774631, |
|
"learning_rate": 9.947283182355982e-06, |
|
"loss": 0.1702, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.09430604982206406, |
|
"grad_norm": 4.5123884113207655, |
|
"learning_rate": 9.945239861225644e-06, |
|
"loss": 0.1612, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.09608540925266904, |
|
"grad_norm": 3.649771626940322, |
|
"learning_rate": 9.943157907471825e-06, |
|
"loss": 0.1597, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.09786476868327403, |
|
"grad_norm": 3.307023602450769, |
|
"learning_rate": 9.941037337358918e-06, |
|
"loss": 0.123, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.099644128113879, |
|
"grad_norm": 4.536200193662386, |
|
"learning_rate": 9.938878167452991e-06, |
|
"loss": 0.188, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.10142348754448399, |
|
"grad_norm": 4.760547475949593, |
|
"learning_rate": 9.936680414621663e-06, |
|
"loss": 0.185, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.10320284697508897, |
|
"grad_norm": 3.501317999588697, |
|
"learning_rate": 9.934444096033958e-06, |
|
"loss": 0.1382, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.10498220640569395, |
|
"grad_norm": 3.857460526437692, |
|
"learning_rate": 9.932169229160183e-06, |
|
"loss": 0.1842, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.10676156583629894, |
|
"grad_norm": 3.963762867573309, |
|
"learning_rate": 9.929855831771787e-06, |
|
"loss": 0.1466, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.10854092526690391, |
|
"grad_norm": 4.011692639832354, |
|
"learning_rate": 9.927503921941218e-06, |
|
"loss": 0.1714, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.1103202846975089, |
|
"grad_norm": 4.053527018107621, |
|
"learning_rate": 9.925113518041796e-06, |
|
"loss": 0.1931, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.11209964412811388, |
|
"grad_norm": 4.717516848519711, |
|
"learning_rate": 9.922684638747551e-06, |
|
"loss": 0.2132, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.11387900355871886, |
|
"grad_norm": 3.5257909656988105, |
|
"learning_rate": 9.920217303033091e-06, |
|
"loss": 0.1424, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.11565836298932385, |
|
"grad_norm": 4.354106189848372, |
|
"learning_rate": 9.917711530173444e-06, |
|
"loss": 0.1758, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.11743772241992882, |
|
"grad_norm": 5.075294159002145, |
|
"learning_rate": 9.91516733974392e-06, |
|
"loss": 0.1927, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.11921708185053381, |
|
"grad_norm": 3.5397032677916367, |
|
"learning_rate": 9.912584751619943e-06, |
|
"loss": 0.1667, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.12099644128113879, |
|
"grad_norm": 3.2533949953809786, |
|
"learning_rate": 9.909963785976902e-06, |
|
"loss": 0.151, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.12277580071174377, |
|
"grad_norm": 3.7444304749697235, |
|
"learning_rate": 9.907304463290004e-06, |
|
"loss": 0.1716, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.12455516014234876, |
|
"grad_norm": 3.367073596605293, |
|
"learning_rate": 9.904606804334094e-06, |
|
"loss": 0.1331, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.12633451957295375, |
|
"grad_norm": 3.6658304597748628, |
|
"learning_rate": 9.901870830183506e-06, |
|
"loss": 0.1485, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.12811387900355872, |
|
"grad_norm": 3.6467717156047574, |
|
"learning_rate": 9.899096562211902e-06, |
|
"loss": 0.1568, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.1298932384341637, |
|
"grad_norm": 3.9321033097713793, |
|
"learning_rate": 9.896284022092088e-06, |
|
"loss": 0.1946, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.13167259786476868, |
|
"grad_norm": 4.539491715011178, |
|
"learning_rate": 9.893433231795864e-06, |
|
"loss": 0.2308, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.13345195729537365, |
|
"grad_norm": 4.679094878109431, |
|
"learning_rate": 9.890544213593838e-06, |
|
"loss": 0.2087, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.13523131672597866, |
|
"grad_norm": 3.6526708728475215, |
|
"learning_rate": 9.887616990055262e-06, |
|
"loss": 0.1778, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.13701067615658363, |
|
"grad_norm": 4.169163043025102, |
|
"learning_rate": 9.884651584047845e-06, |
|
"loss": 0.1767, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.1387900355871886, |
|
"grad_norm": 3.9250479835163414, |
|
"learning_rate": 9.881648018737587e-06, |
|
"loss": 0.1892, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.14056939501779359, |
|
"grad_norm": 3.8081750219404236, |
|
"learning_rate": 9.878606317588588e-06, |
|
"loss": 0.1375, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.1423487544483986, |
|
"grad_norm": 4.800604016795646, |
|
"learning_rate": 9.875526504362868e-06, |
|
"loss": 0.2149, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.14412811387900357, |
|
"grad_norm": 4.261060979335161, |
|
"learning_rate": 9.872408603120187e-06, |
|
"loss": 0.2105, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.14590747330960854, |
|
"grad_norm": 4.065020569329937, |
|
"learning_rate": 9.869252638217846e-06, |
|
"loss": 0.1518, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.14768683274021352, |
|
"grad_norm": 3.878513262413614, |
|
"learning_rate": 9.866058634310503e-06, |
|
"loss": 0.1835, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.1494661921708185, |
|
"grad_norm": 3.845921176335027, |
|
"learning_rate": 9.862826616349981e-06, |
|
"loss": 0.1747, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.1512455516014235, |
|
"grad_norm": 4.43554395509351, |
|
"learning_rate": 9.859556609585075e-06, |
|
"loss": 0.1802, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.15302491103202848, |
|
"grad_norm": 3.7815632135200574, |
|
"learning_rate": 9.856248639561346e-06, |
|
"loss": 0.1488, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.15480427046263345, |
|
"grad_norm": 4.282652569919178, |
|
"learning_rate": 9.85290273212093e-06, |
|
"loss": 0.1515, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.15658362989323843, |
|
"grad_norm": 3.9128576117413405, |
|
"learning_rate": 9.849518913402334e-06, |
|
"loss": 0.1703, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.1583629893238434, |
|
"grad_norm": 3.9703665836867437, |
|
"learning_rate": 9.84609720984023e-06, |
|
"loss": 0.1691, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.1601423487544484, |
|
"grad_norm": 4.185704105043625, |
|
"learning_rate": 9.84263764816525e-06, |
|
"loss": 0.1716, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.1619217081850534, |
|
"grad_norm": 3.5891599904229676, |
|
"learning_rate": 9.839140255403776e-06, |
|
"loss": 0.1415, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.16370106761565836, |
|
"grad_norm": 3.7928658187309914, |
|
"learning_rate": 9.83560505887773e-06, |
|
"loss": 0.175, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.16548042704626334, |
|
"grad_norm": 3.3704849779278994, |
|
"learning_rate": 9.83203208620436e-06, |
|
"loss": 0.1619, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.16725978647686832, |
|
"grad_norm": 3.796828606138377, |
|
"learning_rate": 9.828421365296023e-06, |
|
"loss": 0.1482, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.16903914590747332, |
|
"grad_norm": 4.615498572850572, |
|
"learning_rate": 9.824772924359974e-06, |
|
"loss": 0.1908, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.1708185053380783, |
|
"grad_norm": 3.728839698612651, |
|
"learning_rate": 9.821086791898133e-06, |
|
"loss": 0.16, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.17259786476868327, |
|
"grad_norm": 3.2049577534094094, |
|
"learning_rate": 9.817362996706872e-06, |
|
"loss": 0.1689, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.17437722419928825, |
|
"grad_norm": 3.2279347901357522, |
|
"learning_rate": 9.81360156787679e-06, |
|
"loss": 0.1351, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.17615658362989323, |
|
"grad_norm": 3.516415169951149, |
|
"learning_rate": 9.809802534792477e-06, |
|
"loss": 0.1603, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.17793594306049823, |
|
"grad_norm": 3.977286669031502, |
|
"learning_rate": 9.805965927132294e-06, |
|
"loss": 0.1636, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1797153024911032, |
|
"grad_norm": 3.5005080399753075, |
|
"learning_rate": 9.802091774868143e-06, |
|
"loss": 0.1578, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.18149466192170818, |
|
"grad_norm": 3.365373107736434, |
|
"learning_rate": 9.798180108265218e-06, |
|
"loss": 0.1725, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.18327402135231316, |
|
"grad_norm": 3.3131535411815864, |
|
"learning_rate": 9.794230957881785e-06, |
|
"loss": 0.125, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.18505338078291814, |
|
"grad_norm": 4.101195342978997, |
|
"learning_rate": 9.79024435456893e-06, |
|
"loss": 0.1572, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.18683274021352314, |
|
"grad_norm": 4.49012750776341, |
|
"learning_rate": 9.786220329470334e-06, |
|
"loss": 0.1652, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.18861209964412812, |
|
"grad_norm": 3.635601663345282, |
|
"learning_rate": 9.782158914022011e-06, |
|
"loss": 0.1519, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.1903914590747331, |
|
"grad_norm": 3.9534531450238797, |
|
"learning_rate": 9.778060139952075e-06, |
|
"loss": 0.2045, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.19217081850533807, |
|
"grad_norm": 4.047231999594814, |
|
"learning_rate": 9.773924039280488e-06, |
|
"loss": 0.1595, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.19395017793594305, |
|
"grad_norm": 2.9855379335963064, |
|
"learning_rate": 9.769750644318814e-06, |
|
"loss": 0.1149, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.19572953736654805, |
|
"grad_norm": 4.28186876116843, |
|
"learning_rate": 9.765539987669956e-06, |
|
"loss": 0.1606, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.19750889679715303, |
|
"grad_norm": 4.1718882566652615, |
|
"learning_rate": 9.761292102227917e-06, |
|
"loss": 0.2121, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.199288256227758, |
|
"grad_norm": 4.932623073689126, |
|
"learning_rate": 9.757007021177529e-06, |
|
"loss": 0.212, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.20106761565836298, |
|
"grad_norm": 3.077988855299502, |
|
"learning_rate": 9.752684777994197e-06, |
|
"loss": 0.1373, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.20284697508896798, |
|
"grad_norm": 4.069040890530539, |
|
"learning_rate": 9.748325406443647e-06, |
|
"loss": 0.1713, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.20462633451957296, |
|
"grad_norm": 4.181982760746228, |
|
"learning_rate": 9.743928940581646e-06, |
|
"loss": 0.1898, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.20640569395017794, |
|
"grad_norm": 5.382830001513348, |
|
"learning_rate": 9.739495414753754e-06, |
|
"loss": 0.2149, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.20818505338078291, |
|
"grad_norm": 3.7660433037491337, |
|
"learning_rate": 9.73502486359504e-06, |
|
"loss": 0.1608, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.2099644128113879, |
|
"grad_norm": 4.455567562101738, |
|
"learning_rate": 9.73051732202982e-06, |
|
"loss": 0.167, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.2117437722419929, |
|
"grad_norm": 3.675888355578825, |
|
"learning_rate": 9.725972825271381e-06, |
|
"loss": 0.1507, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.21352313167259787, |
|
"grad_norm": 3.674319404299456, |
|
"learning_rate": 9.721391408821713e-06, |
|
"loss": 0.1627, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.21530249110320285, |
|
"grad_norm": 3.735245273603654, |
|
"learning_rate": 9.716773108471213e-06, |
|
"loss": 0.1924, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.21708185053380782, |
|
"grad_norm": 3.43553656025277, |
|
"learning_rate": 9.712117960298433e-06, |
|
"loss": 0.1671, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.2188612099644128, |
|
"grad_norm": 4.085997096715217, |
|
"learning_rate": 9.707426000669773e-06, |
|
"loss": 0.1621, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.2206405693950178, |
|
"grad_norm": 3.9167489978102314, |
|
"learning_rate": 9.702697266239211e-06, |
|
"loss": 0.1678, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.22241992882562278, |
|
"grad_norm": 4.300111392829452, |
|
"learning_rate": 9.697931793948012e-06, |
|
"loss": 0.2106, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.22419928825622776, |
|
"grad_norm": 3.338273690007913, |
|
"learning_rate": 9.693129621024441e-06, |
|
"loss": 0.1598, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.22597864768683273, |
|
"grad_norm": 3.3687998869024742, |
|
"learning_rate": 9.68829078498347e-06, |
|
"loss": 0.155, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.2277580071174377, |
|
"grad_norm": 4.272220794014751, |
|
"learning_rate": 9.683415323626487e-06, |
|
"loss": 0.1641, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.22953736654804271, |
|
"grad_norm": 5.017070021853791, |
|
"learning_rate": 9.678503275040997e-06, |
|
"loss": 0.2004, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.2313167259786477, |
|
"grad_norm": 3.693542755498243, |
|
"learning_rate": 9.673554677600336e-06, |
|
"loss": 0.1648, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.23309608540925267, |
|
"grad_norm": 3.49184342751485, |
|
"learning_rate": 9.668569569963355e-06, |
|
"loss": 0.1774, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.23487544483985764, |
|
"grad_norm": 2.900568687575795, |
|
"learning_rate": 9.663547991074129e-06, |
|
"loss": 0.1218, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.23665480427046262, |
|
"grad_norm": 4.210955217942697, |
|
"learning_rate": 9.658489980161643e-06, |
|
"loss": 0.1682, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.23843416370106763, |
|
"grad_norm": 3.7999187035913553, |
|
"learning_rate": 9.653395576739504e-06, |
|
"loss": 0.1803, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.2402135231316726, |
|
"grad_norm": 4.422544497338594, |
|
"learning_rate": 9.648264820605611e-06, |
|
"loss": 0.1883, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.24199288256227758, |
|
"grad_norm": 3.6262352329292717, |
|
"learning_rate": 9.643097751841854e-06, |
|
"loss": 0.2352, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.24377224199288255, |
|
"grad_norm": 3.486618136560569, |
|
"learning_rate": 9.637894410813803e-06, |
|
"loss": 0.1842, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.24555160142348753, |
|
"grad_norm": 4.3313313283633175, |
|
"learning_rate": 9.632654838170393e-06, |
|
"loss": 0.1979, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.24733096085409254, |
|
"grad_norm": 4.0405506110889355, |
|
"learning_rate": 9.627379074843595e-06, |
|
"loss": 0.1879, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.2491103202846975, |
|
"grad_norm": 4.016808131044334, |
|
"learning_rate": 9.622067162048111e-06, |
|
"loss": 0.201, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.2508896797153025, |
|
"grad_norm": 3.144449226879444, |
|
"learning_rate": 9.616719141281044e-06, |
|
"loss": 0.179, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.2526690391459075, |
|
"grad_norm": 4.289229369875811, |
|
"learning_rate": 9.611335054321576e-06, |
|
"loss": 0.2304, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.25444839857651247, |
|
"grad_norm": 4.048273673922859, |
|
"learning_rate": 9.605914943230637e-06, |
|
"loss": 0.1746, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.25622775800711745, |
|
"grad_norm": 3.3223941299195405, |
|
"learning_rate": 9.600458850350588e-06, |
|
"loss": 0.1593, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.2580071174377224, |
|
"grad_norm": 3.931590848582908, |
|
"learning_rate": 9.594966818304875e-06, |
|
"loss": 0.1822, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.2597864768683274, |
|
"grad_norm": 3.650707503835131, |
|
"learning_rate": 9.589438889997712e-06, |
|
"loss": 0.1414, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.2615658362989324, |
|
"grad_norm": 3.3504965744204567, |
|
"learning_rate": 9.583875108613727e-06, |
|
"loss": 0.1404, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.26334519572953735, |
|
"grad_norm": 3.6250206048365725, |
|
"learning_rate": 9.578275517617646e-06, |
|
"loss": 0.1705, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.26512455516014233, |
|
"grad_norm": 3.7064286442811527, |
|
"learning_rate": 9.572640160753936e-06, |
|
"loss": 0.1626, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.2669039145907473, |
|
"grad_norm": 3.7554520763936505, |
|
"learning_rate": 9.566969082046471e-06, |
|
"loss": 0.1797, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.26868327402135234, |
|
"grad_norm": 2.620983961386856, |
|
"learning_rate": 9.561262325798188e-06, |
|
"loss": 0.1322, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.2704626334519573, |
|
"grad_norm": 3.253040038592763, |
|
"learning_rate": 9.555519936590739e-06, |
|
"loss": 0.143, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.2722419928825623, |
|
"grad_norm": 3.200438760991816, |
|
"learning_rate": 9.549741959284147e-06, |
|
"loss": 0.1414, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.27402135231316727, |
|
"grad_norm": 4.121675654726017, |
|
"learning_rate": 9.543928439016445e-06, |
|
"loss": 0.1693, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.27580071174377224, |
|
"grad_norm": 3.637403107060314, |
|
"learning_rate": 9.538079421203339e-06, |
|
"loss": 0.166, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.2775800711743772, |
|
"grad_norm": 3.3745826364096088, |
|
"learning_rate": 9.532194951537838e-06, |
|
"loss": 0.1451, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.2793594306049822, |
|
"grad_norm": 4.007802969807231, |
|
"learning_rate": 9.52627507598991e-06, |
|
"loss": 0.1621, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.28113879003558717, |
|
"grad_norm": 3.260888897838836, |
|
"learning_rate": 9.52031984080611e-06, |
|
"loss": 0.1417, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.28291814946619215, |
|
"grad_norm": 3.5747680573202905, |
|
"learning_rate": 9.514329292509227e-06, |
|
"loss": 0.1421, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.2846975088967972, |
|
"grad_norm": 3.5710681038888685, |
|
"learning_rate": 9.508303477897925e-06, |
|
"loss": 0.153, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.28647686832740216, |
|
"grad_norm": 4.079458376261874, |
|
"learning_rate": 9.502242444046365e-06, |
|
"loss": 0.1664, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.28825622775800713, |
|
"grad_norm": 4.261739377476509, |
|
"learning_rate": 9.496146238303846e-06, |
|
"loss": 0.2003, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.2900355871886121, |
|
"grad_norm": 4.313297530665723, |
|
"learning_rate": 9.49001490829443e-06, |
|
"loss": 0.1719, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.2918149466192171, |
|
"grad_norm": 4.42329592595159, |
|
"learning_rate": 9.483848501916578e-06, |
|
"loss": 0.246, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.29359430604982206, |
|
"grad_norm": 3.6922030093348526, |
|
"learning_rate": 9.477647067342766e-06, |
|
"loss": 0.1834, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.29537366548042704, |
|
"grad_norm": 3.956065402253437, |
|
"learning_rate": 9.471410653019115e-06, |
|
"loss": 0.1816, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.297153024911032, |
|
"grad_norm": 3.97582600631354, |
|
"learning_rate": 9.46513930766501e-06, |
|
"loss": 0.1962, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.298932384341637, |
|
"grad_norm": 3.970012630181967, |
|
"learning_rate": 9.458833080272723e-06, |
|
"loss": 0.1668, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.30071174377224197, |
|
"grad_norm": 3.8867282046798666, |
|
"learning_rate": 9.45249202010702e-06, |
|
"loss": 0.1616, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.302491103202847, |
|
"grad_norm": 3.665436286005375, |
|
"learning_rate": 9.446116176704791e-06, |
|
"loss": 0.1615, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.304270462633452, |
|
"grad_norm": 3.944144399022084, |
|
"learning_rate": 9.439705599874653e-06, |
|
"loss": 0.1771, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.30604982206405695, |
|
"grad_norm": 4.065737950759166, |
|
"learning_rate": 9.433260339696564e-06, |
|
"loss": 0.2155, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.30782918149466193, |
|
"grad_norm": 3.837428478645305, |
|
"learning_rate": 9.426780446521429e-06, |
|
"loss": 0.2008, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.3096085409252669, |
|
"grad_norm": 3.400327492598346, |
|
"learning_rate": 9.42026597097071e-06, |
|
"loss": 0.1839, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.3113879003558719, |
|
"grad_norm": 3.157842804621906, |
|
"learning_rate": 9.413716963936033e-06, |
|
"loss": 0.1463, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.31316725978647686, |
|
"grad_norm": 3.3185190999798597, |
|
"learning_rate": 9.407133476578778e-06, |
|
"loss": 0.1642, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.31494661921708184, |
|
"grad_norm": 4.277407750283493, |
|
"learning_rate": 9.400515560329698e-06, |
|
"loss": 0.2155, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.3167259786476868, |
|
"grad_norm": 3.5544836483286146, |
|
"learning_rate": 9.393863266888501e-06, |
|
"loss": 0.1579, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.3185053380782918, |
|
"grad_norm": 3.937335731935511, |
|
"learning_rate": 9.387176648223457e-06, |
|
"loss": 0.1774, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.3202846975088968, |
|
"grad_norm": 4.1010156907842505, |
|
"learning_rate": 9.38045575657098e-06, |
|
"loss": 0.1853, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.3220640569395018, |
|
"grad_norm": 3.844259477065041, |
|
"learning_rate": 9.37370064443524e-06, |
|
"loss": 0.1752, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.3238434163701068, |
|
"grad_norm": 3.1323058321872725, |
|
"learning_rate": 9.366911364587726e-06, |
|
"loss": 0.1478, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.32562277580071175, |
|
"grad_norm": 3.3437894177981935, |
|
"learning_rate": 9.360087970066854e-06, |
|
"loss": 0.1489, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.3274021352313167, |
|
"grad_norm": 3.6570334163919633, |
|
"learning_rate": 9.353230514177553e-06, |
|
"loss": 0.1411, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.3291814946619217, |
|
"grad_norm": 3.707931419025354, |
|
"learning_rate": 9.346339050490832e-06, |
|
"loss": 0.1449, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.3309608540925267, |
|
"grad_norm": 4.492080127257436, |
|
"learning_rate": 9.33941363284338e-06, |
|
"loss": 0.1631, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.33274021352313166, |
|
"grad_norm": 3.670011544756579, |
|
"learning_rate": 9.332454315337129e-06, |
|
"loss": 0.1533, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.33451957295373663, |
|
"grad_norm": 3.9277175197173153, |
|
"learning_rate": 9.325461152338846e-06, |
|
"loss": 0.1734, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.33629893238434166, |
|
"grad_norm": 3.901788915486977, |
|
"learning_rate": 9.3184341984797e-06, |
|
"loss": 0.1671, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.33807829181494664, |
|
"grad_norm": 4.4192828586036885, |
|
"learning_rate": 9.311373508654838e-06, |
|
"loss": 0.1712, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.3398576512455516, |
|
"grad_norm": 4.878336443820035, |
|
"learning_rate": 9.30427913802295e-06, |
|
"loss": 0.2377, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.3416370106761566, |
|
"grad_norm": 4.237860817362849, |
|
"learning_rate": 9.297151142005852e-06, |
|
"loss": 0.1663, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.34341637010676157, |
|
"grad_norm": 3.6542720930687214, |
|
"learning_rate": 9.289989576288035e-06, |
|
"loss": 0.1588, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.34519572953736655, |
|
"grad_norm": 4.001300196034673, |
|
"learning_rate": 9.282794496816244e-06, |
|
"loss": 0.1891, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.3469750889679715, |
|
"grad_norm": 4.249988665919447, |
|
"learning_rate": 9.27556595979904e-06, |
|
"loss": 0.1508, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.3487544483985765, |
|
"grad_norm": 3.7503601244959, |
|
"learning_rate": 9.26830402170635e-06, |
|
"loss": 0.1619, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.3505338078291815, |
|
"grad_norm": 3.7039079743781387, |
|
"learning_rate": 9.261008739269035e-06, |
|
"loss": 0.1476, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.35231316725978645, |
|
"grad_norm": 4.121473524631294, |
|
"learning_rate": 9.253680169478448e-06, |
|
"loss": 0.1715, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.3540925266903915, |
|
"grad_norm": 4.034157007855028, |
|
"learning_rate": 9.246318369585983e-06, |
|
"loss": 0.1571, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.35587188612099646, |
|
"grad_norm": 4.676542798507013, |
|
"learning_rate": 9.238923397102629e-06, |
|
"loss": 0.1965, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.35587188612099646, |
|
"eval_loss": 0.18451206386089325, |
|
"eval_runtime": 1.5775, |
|
"eval_samples_per_second": 29.16, |
|
"eval_steps_per_second": 7.607, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.35765124555160144, |
|
"grad_norm": 3.4097163747347676, |
|
"learning_rate": 9.231495309798525e-06, |
|
"loss": 0.1393, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.3594306049822064, |
|
"grad_norm": 3.9329016959881447, |
|
"learning_rate": 9.224034165702506e-06, |
|
"loss": 0.1708, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.3612099644128114, |
|
"grad_norm": 2.8305856424021156, |
|
"learning_rate": 9.216540023101646e-06, |
|
"loss": 0.1413, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.36298932384341637, |
|
"grad_norm": 3.2383633293202867, |
|
"learning_rate": 9.209012940540806e-06, |
|
"loss": 0.1483, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.36476868327402134, |
|
"grad_norm": 3.2002923030721697, |
|
"learning_rate": 9.20145297682218e-06, |
|
"loss": 0.1666, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.3665480427046263, |
|
"grad_norm": 4.449892888247499, |
|
"learning_rate": 9.193860191004833e-06, |
|
"loss": 0.2239, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.3683274021352313, |
|
"grad_norm": 3.7527361998083912, |
|
"learning_rate": 9.186234642404234e-06, |
|
"loss": 0.1996, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.3701067615658363, |
|
"grad_norm": 4.411510792955676, |
|
"learning_rate": 9.178576390591803e-06, |
|
"loss": 0.1817, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.3718861209964413, |
|
"grad_norm": 3.16099503657051, |
|
"learning_rate": 9.170885495394435e-06, |
|
"loss": 0.139, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.3736654804270463, |
|
"grad_norm": 4.37338786791743, |
|
"learning_rate": 9.16316201689404e-06, |
|
"loss": 0.1761, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.37544483985765126, |
|
"grad_norm": 5.506684706071413, |
|
"learning_rate": 9.155406015427076e-06, |
|
"loss": 0.2021, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.37722419928825623, |
|
"grad_norm": 4.21426992586056, |
|
"learning_rate": 9.147617551584066e-06, |
|
"loss": 0.1748, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.3790035587188612, |
|
"grad_norm": 3.513633260293238, |
|
"learning_rate": 9.139796686209135e-06, |
|
"loss": 0.2019, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.3807829181494662, |
|
"grad_norm": 3.5635035977397296, |
|
"learning_rate": 9.131943480399531e-06, |
|
"loss": 0.1527, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.38256227758007116, |
|
"grad_norm": 3.6626851396287936, |
|
"learning_rate": 9.124057995505148e-06, |
|
"loss": 0.1576, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.38434163701067614, |
|
"grad_norm": 4.9491055736331395, |
|
"learning_rate": 9.11614029312805e-06, |
|
"loss": 0.2569, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.3861209964412811, |
|
"grad_norm": 4.603733048145616, |
|
"learning_rate": 9.108190435121982e-06, |
|
"loss": 0.184, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.3879003558718861, |
|
"grad_norm": 4.060162486617455, |
|
"learning_rate": 9.100208483591892e-06, |
|
"loss": 0.1608, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.3896797153024911, |
|
"grad_norm": 4.714936643257856, |
|
"learning_rate": 9.092194500893448e-06, |
|
"loss": 0.2355, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.3914590747330961, |
|
"grad_norm": 3.6711856252423134, |
|
"learning_rate": 9.084148549632547e-06, |
|
"loss": 0.1606, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.3932384341637011, |
|
"grad_norm": 3.1357804102568374, |
|
"learning_rate": 9.076070692664827e-06, |
|
"loss": 0.1657, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.39501779359430605, |
|
"grad_norm": 3.8131745243251216, |
|
"learning_rate": 9.067960993095176e-06, |
|
"loss": 0.178, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.39679715302491103, |
|
"grad_norm": 3.875646848369458, |
|
"learning_rate": 9.059819514277238e-06, |
|
"loss": 0.1652, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.398576512455516, |
|
"grad_norm": 3.5577062331635165, |
|
"learning_rate": 9.05164631981292e-06, |
|
"loss": 0.1615, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.400355871886121, |
|
"grad_norm": 3.638018887689576, |
|
"learning_rate": 9.043441473551893e-06, |
|
"loss": 0.1844, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.40213523131672596, |
|
"grad_norm": 3.917290921571143, |
|
"learning_rate": 9.035205039591099e-06, |
|
"loss": 0.1838, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.40391459074733094, |
|
"grad_norm": 4.172845825613448, |
|
"learning_rate": 9.02693708227424e-06, |
|
"loss": 0.1766, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.40569395017793597, |
|
"grad_norm": 3.2608622242786747, |
|
"learning_rate": 9.018637666191284e-06, |
|
"loss": 0.2001, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.40747330960854095, |
|
"grad_norm": 3.78274789728316, |
|
"learning_rate": 9.010306856177958e-06, |
|
"loss": 0.2146, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.4092526690391459, |
|
"grad_norm": 4.19440659193774, |
|
"learning_rate": 9.001944717315236e-06, |
|
"loss": 0.2047, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.4110320284697509, |
|
"grad_norm": 4.2348911081955185, |
|
"learning_rate": 8.993551314928846e-06, |
|
"loss": 0.1891, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.4128113879003559, |
|
"grad_norm": 3.406012066135414, |
|
"learning_rate": 8.985126714588739e-06, |
|
"loss": 0.1224, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.41459074733096085, |
|
"grad_norm": 3.8032239303686395, |
|
"learning_rate": 8.976670982108591e-06, |
|
"loss": 0.1757, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.41637010676156583, |
|
"grad_norm": 4.449186848287025, |
|
"learning_rate": 8.968184183545285e-06, |
|
"loss": 0.2354, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.4181494661921708, |
|
"grad_norm": 3.9428055131473054, |
|
"learning_rate": 8.959666385198396e-06, |
|
"loss": 0.1822, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.4199288256227758, |
|
"grad_norm": 3.5021195235576807, |
|
"learning_rate": 8.951117653609666e-06, |
|
"loss": 0.1515, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.42170818505338076, |
|
"grad_norm": 4.363850681942495, |
|
"learning_rate": 8.9425380555625e-06, |
|
"loss": 0.1707, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.4234875444839858, |
|
"grad_norm": 2.7681000905192885, |
|
"learning_rate": 8.933927658081423e-06, |
|
"loss": 0.1195, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.42526690391459077, |
|
"grad_norm": 3.665504005637689, |
|
"learning_rate": 8.925286528431578e-06, |
|
"loss": 0.1778, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.42704626334519574, |
|
"grad_norm": 3.9068027826688483, |
|
"learning_rate": 8.916614734118184e-06, |
|
"loss": 0.1369, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.4288256227758007, |
|
"grad_norm": 3.1765871273784034, |
|
"learning_rate": 8.907912342886016e-06, |
|
"loss": 0.1599, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.4306049822064057, |
|
"grad_norm": 3.677170065236907, |
|
"learning_rate": 8.899179422718877e-06, |
|
"loss": 0.1673, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.43238434163701067, |
|
"grad_norm": 3.967643437549682, |
|
"learning_rate": 8.890416041839061e-06, |
|
"loss": 0.1598, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.43416370106761565, |
|
"grad_norm": 3.1947781069967474, |
|
"learning_rate": 8.881622268706825e-06, |
|
"loss": 0.1653, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.4359430604982206, |
|
"grad_norm": 3.4465942708932835, |
|
"learning_rate": 8.872798172019856e-06, |
|
"loss": 0.1547, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.4377224199288256, |
|
"grad_norm": 3.7845204350928614, |
|
"learning_rate": 8.863943820712726e-06, |
|
"loss": 0.1827, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.4395017793594306, |
|
"grad_norm": 3.664090881288538, |
|
"learning_rate": 8.855059283956363e-06, |
|
"loss": 0.212, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.4412811387900356, |
|
"grad_norm": 3.747224833589761, |
|
"learning_rate": 8.8461446311575e-06, |
|
"loss": 0.1556, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.4430604982206406, |
|
"grad_norm": 3.429324406977528, |
|
"learning_rate": 8.837199931958147e-06, |
|
"loss": 0.1386, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.44483985765124556, |
|
"grad_norm": 3.675462836915605, |
|
"learning_rate": 8.828225256235035e-06, |
|
"loss": 0.1854, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.44661921708185054, |
|
"grad_norm": 3.845500608190304, |
|
"learning_rate": 8.819220674099074e-06, |
|
"loss": 0.1743, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.4483985765124555, |
|
"grad_norm": 3.3695317869341235, |
|
"learning_rate": 8.810186255894804e-06, |
|
"loss": 0.1589, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.4501779359430605, |
|
"grad_norm": 3.8876246254916693, |
|
"learning_rate": 8.801122072199848e-06, |
|
"loss": 0.1507, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.45195729537366547, |
|
"grad_norm": 3.2330372651062733, |
|
"learning_rate": 8.792028193824364e-06, |
|
"loss": 0.127, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.45373665480427045, |
|
"grad_norm": 3.072865956218073, |
|
"learning_rate": 8.782904691810478e-06, |
|
"loss": 0.1264, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.4555160142348754, |
|
"grad_norm": 3.3769442654375936, |
|
"learning_rate": 8.77375163743175e-06, |
|
"loss": 0.1543, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.45729537366548045, |
|
"grad_norm": 3.3779642533742855, |
|
"learning_rate": 8.764569102192593e-06, |
|
"loss": 0.1732, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.45907473309608543, |
|
"grad_norm": 3.2938481702142397, |
|
"learning_rate": 8.755357157827735e-06, |
|
"loss": 0.1174, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.4608540925266904, |
|
"grad_norm": 3.9777451032938176, |
|
"learning_rate": 8.746115876301651e-06, |
|
"loss": 0.1665, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.4626334519572954, |
|
"grad_norm": 3.271747144540461, |
|
"learning_rate": 8.736845329807994e-06, |
|
"loss": 0.1537, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.46441281138790036, |
|
"grad_norm": 4.208040959150267, |
|
"learning_rate": 8.727545590769044e-06, |
|
"loss": 0.1651, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.46619217081850534, |
|
"grad_norm": 3.7511948867059792, |
|
"learning_rate": 8.718216731835131e-06, |
|
"loss": 0.1696, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.4679715302491103, |
|
"grad_norm": 3.78015843461773, |
|
"learning_rate": 8.708858825884075e-06, |
|
"loss": 0.1907, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.4697508896797153, |
|
"grad_norm": 2.700058009020738, |
|
"learning_rate": 8.699471946020612e-06, |
|
"loss": 0.138, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.47153024911032027, |
|
"grad_norm": 4.0009204414525374, |
|
"learning_rate": 8.690056165575825e-06, |
|
"loss": 0.1667, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.47330960854092524, |
|
"grad_norm": 3.597205393854175, |
|
"learning_rate": 8.680611558106571e-06, |
|
"loss": 0.1658, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.4750889679715303, |
|
"grad_norm": 3.1799592549985403, |
|
"learning_rate": 8.671138197394907e-06, |
|
"loss": 0.1269, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.47686832740213525, |
|
"grad_norm": 2.863770024333171, |
|
"learning_rate": 8.661636157447511e-06, |
|
"loss": 0.1506, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.4786476868327402, |
|
"grad_norm": 4.867852181166165, |
|
"learning_rate": 8.652105512495106e-06, |
|
"loss": 0.1957, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.4804270462633452, |
|
"grad_norm": 3.839701813057967, |
|
"learning_rate": 8.64254633699188e-06, |
|
"loss": 0.1496, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.4822064056939502, |
|
"grad_norm": 3.7667678043954522, |
|
"learning_rate": 8.632958705614905e-06, |
|
"loss": 0.1676, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.48398576512455516, |
|
"grad_norm": 3.7854123314743324, |
|
"learning_rate": 8.623342693263549e-06, |
|
"loss": 0.1424, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.48576512455516013, |
|
"grad_norm": 3.9574217655350368, |
|
"learning_rate": 8.6136983750589e-06, |
|
"loss": 0.1646, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.4875444839857651, |
|
"grad_norm": 3.7936751710423082, |
|
"learning_rate": 8.604025826343167e-06, |
|
"loss": 0.1549, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.4893238434163701, |
|
"grad_norm": 4.634260095825603, |
|
"learning_rate": 8.594325122679107e-06, |
|
"loss": 0.2097, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.49110320284697506, |
|
"grad_norm": 3.588270161463648, |
|
"learning_rate": 8.584596339849419e-06, |
|
"loss": 0.1898, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.4928825622775801, |
|
"grad_norm": 3.300053767053535, |
|
"learning_rate": 8.574839553856157e-06, |
|
"loss": 0.1396, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.49466192170818507, |
|
"grad_norm": 4.292729138449231, |
|
"learning_rate": 8.565054840920145e-06, |
|
"loss": 0.2114, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.49644128113879005, |
|
"grad_norm": 3.2640474694173665, |
|
"learning_rate": 8.55524227748037e-06, |
|
"loss": 0.1365, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.498220640569395, |
|
"grad_norm": 3.4655719776103306, |
|
"learning_rate": 8.545401940193392e-06, |
|
"loss": 0.1293, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 3.47607368124659, |
|
"learning_rate": 8.535533905932739e-06, |
|
"loss": 0.1524, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.501779359430605, |
|
"grad_norm": 4.584806676200785, |
|
"learning_rate": 8.525638251788312e-06, |
|
"loss": 0.2252, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.50355871886121, |
|
"grad_norm": 3.69359179585815, |
|
"learning_rate": 8.515715055065783e-06, |
|
"loss": 0.1814, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.505338078291815, |
|
"grad_norm": 4.1590929428606795, |
|
"learning_rate": 8.505764393285985e-06, |
|
"loss": 0.2093, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.5071174377224199, |
|
"grad_norm": 3.740781966600415, |
|
"learning_rate": 8.495786344184314e-06, |
|
"loss": 0.1556, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.5088967971530249, |
|
"grad_norm": 4.338895922270033, |
|
"learning_rate": 8.485780985710113e-06, |
|
"loss": 0.1807, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.5106761565836299, |
|
"grad_norm": 3.4129983654862235, |
|
"learning_rate": 8.475748396026074e-06, |
|
"loss": 0.1579, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.5124555160142349, |
|
"grad_norm": 3.7055869120602254, |
|
"learning_rate": 8.46568865350762e-06, |
|
"loss": 0.1612, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.5142348754448398, |
|
"grad_norm": 4.534140198068331, |
|
"learning_rate": 8.45560183674229e-06, |
|
"loss": 0.2042, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.5160142348754448, |
|
"grad_norm": 4.215053397204151, |
|
"learning_rate": 8.445488024529133e-06, |
|
"loss": 0.1917, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.5177935943060499, |
|
"grad_norm": 3.24503986298264, |
|
"learning_rate": 8.435347295878087e-06, |
|
"loss": 0.1256, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.5195729537366548, |
|
"grad_norm": 3.256746697571176, |
|
"learning_rate": 8.425179730009368e-06, |
|
"loss": 0.1313, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.5213523131672598, |
|
"grad_norm": 3.8932880192335846, |
|
"learning_rate": 8.41498540635284e-06, |
|
"loss": 0.1647, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.5231316725978647, |
|
"grad_norm": 4.463938077439203, |
|
"learning_rate": 8.404764404547404e-06, |
|
"loss": 0.1725, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.5249110320284698, |
|
"grad_norm": 4.477873913705349, |
|
"learning_rate": 8.394516804440374e-06, |
|
"loss": 0.1828, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.5266903914590747, |
|
"grad_norm": 4.7241714190531034, |
|
"learning_rate": 8.384242686086848e-06, |
|
"loss": 0.1786, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.5284697508896797, |
|
"grad_norm": 3.808236569757432, |
|
"learning_rate": 8.373942129749094e-06, |
|
"loss": 0.2075, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.5302491103202847, |
|
"grad_norm": 3.0009638281219932, |
|
"learning_rate": 8.363615215895908e-06, |
|
"loss": 0.1523, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.5320284697508897, |
|
"grad_norm": 4.53746797007053, |
|
"learning_rate": 8.353262025202e-06, |
|
"loss": 0.1898, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.5338078291814946, |
|
"grad_norm": 3.2735266806906913, |
|
"learning_rate": 8.342882638547351e-06, |
|
"loss": 0.1456, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.5355871886120996, |
|
"grad_norm": 3.4230553563785526, |
|
"learning_rate": 8.332477137016587e-06, |
|
"loss": 0.1494, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.5373665480427047, |
|
"grad_norm": 3.5284197122116105, |
|
"learning_rate": 8.322045601898354e-06, |
|
"loss": 0.1565, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.5391459074733096, |
|
"grad_norm": 3.300484607058103, |
|
"learning_rate": 8.311588114684665e-06, |
|
"loss": 0.1437, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.5409252669039146, |
|
"grad_norm": 3.94778730172822, |
|
"learning_rate": 8.301104757070276e-06, |
|
"loss": 0.2502, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.5427046263345195, |
|
"grad_norm": 4.6084953265327995, |
|
"learning_rate": 8.290595610952045e-06, |
|
"loss": 0.1632, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.5444839857651246, |
|
"grad_norm": 4.454744729508573, |
|
"learning_rate": 8.280060758428294e-06, |
|
"loss": 0.2277, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.5462633451957295, |
|
"grad_norm": 2.8458153265109485, |
|
"learning_rate": 8.269500281798164e-06, |
|
"loss": 0.1227, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.5480427046263345, |
|
"grad_norm": 3.75678653494641, |
|
"learning_rate": 8.258914263560971e-06, |
|
"loss": 0.1668, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.5498220640569395, |
|
"grad_norm": 3.3665597443559987, |
|
"learning_rate": 8.248302786415567e-06, |
|
"loss": 0.1563, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.5516014234875445, |
|
"grad_norm": 3.9390010543126044, |
|
"learning_rate": 8.237665933259693e-06, |
|
"loss": 0.1736, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.5533807829181495, |
|
"grad_norm": 3.7636423808878, |
|
"learning_rate": 8.227003787189323e-06, |
|
"loss": 0.1529, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.5551601423487544, |
|
"grad_norm": 3.0748259587336157, |
|
"learning_rate": 8.216316431498028e-06, |
|
"loss": 0.1492, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.5569395017793595, |
|
"grad_norm": 3.4135404058745267, |
|
"learning_rate": 8.205603949676317e-06, |
|
"loss": 0.1897, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.5587188612099644, |
|
"grad_norm": 3.421210826827016, |
|
"learning_rate": 8.194866425410984e-06, |
|
"loss": 0.1695, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.5604982206405694, |
|
"grad_norm": 3.885473392572468, |
|
"learning_rate": 8.184103942584456e-06, |
|
"loss": 0.1704, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.5622775800711743, |
|
"grad_norm": 3.5257652785127758, |
|
"learning_rate": 8.173316585274144e-06, |
|
"loss": 0.155, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.5640569395017794, |
|
"grad_norm": 3.2228543714919446, |
|
"learning_rate": 8.162504437751775e-06, |
|
"loss": 0.1506, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.5658362989323843, |
|
"grad_norm": 4.072286278058262, |
|
"learning_rate": 8.151667584482742e-06, |
|
"loss": 0.1534, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.5676156583629893, |
|
"grad_norm": 4.40041473053257, |
|
"learning_rate": 8.140806110125442e-06, |
|
"loss": 0.1868, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.5693950177935944, |
|
"grad_norm": 3.6767500875472003, |
|
"learning_rate": 8.129920099530608e-06, |
|
"loss": 0.1861, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.5711743772241993, |
|
"grad_norm": 3.1644812596110192, |
|
"learning_rate": 8.119009637740663e-06, |
|
"loss": 0.102, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.5729537366548043, |
|
"grad_norm": 3.2379145590309313, |
|
"learning_rate": 8.108074809989032e-06, |
|
"loss": 0.1588, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.5747330960854092, |
|
"grad_norm": 3.1041680718352285, |
|
"learning_rate": 8.097115701699498e-06, |
|
"loss": 0.1132, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.5765124555160143, |
|
"grad_norm": 3.7279835302749733, |
|
"learning_rate": 8.086132398485525e-06, |
|
"loss": 0.1825, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.5782918149466192, |
|
"grad_norm": 3.144713504184157, |
|
"learning_rate": 8.075124986149583e-06, |
|
"loss": 0.1384, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.5800711743772242, |
|
"grad_norm": 3.616700783824194, |
|
"learning_rate": 8.064093550682494e-06, |
|
"loss": 0.1488, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.5818505338078291, |
|
"grad_norm": 3.631832890357557, |
|
"learning_rate": 8.053038178262742e-06, |
|
"loss": 0.1309, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.5836298932384342, |
|
"grad_norm": 3.730263831756241, |
|
"learning_rate": 8.041958955255815e-06, |
|
"loss": 0.174, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.5854092526690391, |
|
"grad_norm": 3.4344973244051236, |
|
"learning_rate": 8.030855968213518e-06, |
|
"loss": 0.1504, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.5871886120996441, |
|
"grad_norm": 4.170311185424267, |
|
"learning_rate": 8.019729303873307e-06, |
|
"loss": 0.176, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.5889679715302492, |
|
"grad_norm": 3.420722091406306, |
|
"learning_rate": 8.008579049157607e-06, |
|
"loss": 0.1322, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.5907473309608541, |
|
"grad_norm": 2.9042643546270575, |
|
"learning_rate": 7.99740529117313e-06, |
|
"loss": 0.1405, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.5925266903914591, |
|
"grad_norm": 4.031120434870058, |
|
"learning_rate": 7.986208117210198e-06, |
|
"loss": 0.1577, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.594306049822064, |
|
"grad_norm": 4.249558841527255, |
|
"learning_rate": 7.974987614742066e-06, |
|
"loss": 0.1856, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.5960854092526691, |
|
"grad_norm": 4.514361281033718, |
|
"learning_rate": 7.963743871424224e-06, |
|
"loss": 0.1928, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.597864768683274, |
|
"grad_norm": 3.621708991132865, |
|
"learning_rate": 7.952476975093729e-06, |
|
"loss": 0.1468, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.599644128113879, |
|
"grad_norm": 4.036739039424598, |
|
"learning_rate": 7.941187013768508e-06, |
|
"loss": 0.1977, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.6014234875444839, |
|
"grad_norm": 3.7924829505404096, |
|
"learning_rate": 7.929874075646673e-06, |
|
"loss": 0.1636, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.603202846975089, |
|
"grad_norm": 4.268373138658924, |
|
"learning_rate": 7.918538249105835e-06, |
|
"loss": 0.1548, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.604982206405694, |
|
"grad_norm": 3.981345474705445, |
|
"learning_rate": 7.907179622702409e-06, |
|
"loss": 0.1693, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.6067615658362989, |
|
"grad_norm": 3.4855628136526406, |
|
"learning_rate": 7.895798285170927e-06, |
|
"loss": 0.1524, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.608540925266904, |
|
"grad_norm": 3.3128273260227927, |
|
"learning_rate": 7.88439432542334e-06, |
|
"loss": 0.1335, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.6103202846975089, |
|
"grad_norm": 3.612128081425007, |
|
"learning_rate": 7.872967832548327e-06, |
|
"loss": 0.1848, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.6120996441281139, |
|
"grad_norm": 3.4822967327810392, |
|
"learning_rate": 7.861518895810597e-06, |
|
"loss": 0.1969, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.6138790035587188, |
|
"grad_norm": 3.4848266793961225, |
|
"learning_rate": 7.850047604650188e-06, |
|
"loss": 0.1808, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.6156583629893239, |
|
"grad_norm": 3.9698434405868332, |
|
"learning_rate": 7.838554048681783e-06, |
|
"loss": 0.1907, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.6174377224199288, |
|
"grad_norm": 3.5754915463124424, |
|
"learning_rate": 7.827038317693988e-06, |
|
"loss": 0.2427, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.6192170818505338, |
|
"grad_norm": 3.44704896427669, |
|
"learning_rate": 7.815500501648654e-06, |
|
"loss": 0.1475, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.6209964412811388, |
|
"grad_norm": 3.7787759138719026, |
|
"learning_rate": 7.80394069068015e-06, |
|
"loss": 0.1901, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.6227758007117438, |
|
"grad_norm": 3.6440759891971077, |
|
"learning_rate": 7.79235897509468e-06, |
|
"loss": 0.1521, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.6245551601423488, |
|
"grad_norm": 4.074086962945949, |
|
"learning_rate": 7.780755445369563e-06, |
|
"loss": 0.1742, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.6263345195729537, |
|
"grad_norm": 4.258816833648301, |
|
"learning_rate": 7.769130192152538e-06, |
|
"loss": 0.2055, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.6281138790035588, |
|
"grad_norm": 3.6421413226990444, |
|
"learning_rate": 7.757483306261042e-06, |
|
"loss": 0.1915, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.6298932384341637, |
|
"grad_norm": 4.1813304853750495, |
|
"learning_rate": 7.745814878681516e-06, |
|
"loss": 0.1854, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.6316725978647687, |
|
"grad_norm": 3.4312681127185427, |
|
"learning_rate": 7.734125000568684e-06, |
|
"loss": 0.1589, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.6334519572953736, |
|
"grad_norm": 3.691932859374742, |
|
"learning_rate": 7.722413763244837e-06, |
|
"loss": 0.1517, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.6352313167259787, |
|
"grad_norm": 2.8857099038516503, |
|
"learning_rate": 7.710681258199136e-06, |
|
"loss": 0.1277, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.6370106761565836, |
|
"grad_norm": 3.8603707347524483, |
|
"learning_rate": 7.69892757708688e-06, |
|
"loss": 0.1565, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.6387900355871886, |
|
"grad_norm": 3.6189683514325797, |
|
"learning_rate": 7.687152811728799e-06, |
|
"loss": 0.1625, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.6405693950177936, |
|
"grad_norm": 3.863952692860074, |
|
"learning_rate": 7.675357054110337e-06, |
|
"loss": 0.1789, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.6423487544483986, |
|
"grad_norm": 3.5006757396109425, |
|
"learning_rate": 7.663540396380931e-06, |
|
"loss": 0.1458, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.6441281138790036, |
|
"grad_norm": 3.4090764533417697, |
|
"learning_rate": 7.651702930853287e-06, |
|
"loss": 0.1499, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.6459074733096085, |
|
"grad_norm": 3.986819969002751, |
|
"learning_rate": 7.639844750002668e-06, |
|
"loss": 0.1764, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.6476868327402135, |
|
"grad_norm": 3.214419199148135, |
|
"learning_rate": 7.627965946466167e-06, |
|
"loss": 0.1784, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.6494661921708185, |
|
"grad_norm": 3.1907742100716106, |
|
"learning_rate": 7.616066613041977e-06, |
|
"loss": 0.151, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.6512455516014235, |
|
"grad_norm": 3.3738199012382597, |
|
"learning_rate": 7.6041468426886785e-06, |
|
"loss": 0.1558, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.6530249110320284, |
|
"grad_norm": 2.4921649322218036, |
|
"learning_rate": 7.592206728524507e-06, |
|
"loss": 0.1037, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.6548042704626335, |
|
"grad_norm": 3.427513472473539, |
|
"learning_rate": 7.580246363826621e-06, |
|
"loss": 0.148, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.6565836298932385, |
|
"grad_norm": 3.9425809473143363, |
|
"learning_rate": 7.568265842030381e-06, |
|
"loss": 0.1847, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.6583629893238434, |
|
"grad_norm": 3.770494923466618, |
|
"learning_rate": 7.556265256728618e-06, |
|
"loss": 0.1871, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.6601423487544484, |
|
"grad_norm": 3.1566766715340653, |
|
"learning_rate": 7.544244701670894e-06, |
|
"loss": 0.1629, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.6619217081850534, |
|
"grad_norm": 3.5267428251192263, |
|
"learning_rate": 7.532204270762786e-06, |
|
"loss": 0.1578, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.6637010676156584, |
|
"grad_norm": 3.373930350585529, |
|
"learning_rate": 7.520144058065133e-06, |
|
"loss": 0.1434, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.6654804270462633, |
|
"grad_norm": 3.667667080996467, |
|
"learning_rate": 7.50806415779332e-06, |
|
"loss": 0.1508, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.6672597864768683, |
|
"grad_norm": 4.082830509418172, |
|
"learning_rate": 7.495964664316525e-06, |
|
"loss": 0.1699, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.6690391459074733, |
|
"grad_norm": 3.6753453024599962, |
|
"learning_rate": 7.4838456721569975e-06, |
|
"loss": 0.1625, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.6708185053380783, |
|
"grad_norm": 3.3042063467441816, |
|
"learning_rate": 7.471707275989304e-06, |
|
"loss": 0.1516, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.6725978647686833, |
|
"grad_norm": 3.285653246996516, |
|
"learning_rate": 7.459549570639602e-06, |
|
"loss": 0.1455, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.6743772241992882, |
|
"grad_norm": 3.912604620042447, |
|
"learning_rate": 7.447372651084896e-06, |
|
"loss": 0.1598, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.6761565836298933, |
|
"grad_norm": 3.388018122432041, |
|
"learning_rate": 7.435176612452286e-06, |
|
"loss": 0.1349, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.6779359430604982, |
|
"grad_norm": 3.6298575726401117, |
|
"learning_rate": 7.4229615500182396e-06, |
|
"loss": 0.1649, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.6797153024911032, |
|
"grad_norm": 2.9662290299329466, |
|
"learning_rate": 7.4107275592078345e-06, |
|
"loss": 0.1293, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.6814946619217082, |
|
"grad_norm": 3.0330498561142614, |
|
"learning_rate": 7.398474735594022e-06, |
|
"loss": 0.13, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.6832740213523132, |
|
"grad_norm": 3.7750838819297496, |
|
"learning_rate": 7.386203174896872e-06, |
|
"loss": 0.155, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.6850533807829181, |
|
"grad_norm": 3.5511889931195335, |
|
"learning_rate": 7.373912972982838e-06, |
|
"loss": 0.1486, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.6868327402135231, |
|
"grad_norm": 3.3129862262476117, |
|
"learning_rate": 7.361604225863992e-06, |
|
"loss": 0.1523, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.6886120996441281, |
|
"grad_norm": 4.017853985222379, |
|
"learning_rate": 7.349277029697287e-06, |
|
"loss": 0.1575, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.6903914590747331, |
|
"grad_norm": 3.3738841651189477, |
|
"learning_rate": 7.336931480783801e-06, |
|
"loss": 0.1394, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.6921708185053381, |
|
"grad_norm": 4.435369411733798, |
|
"learning_rate": 7.3245676755679854e-06, |
|
"loss": 0.1796, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.693950177935943, |
|
"grad_norm": 4.027352873273559, |
|
"learning_rate": 7.312185710636911e-06, |
|
"loss": 0.1691, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.6957295373665481, |
|
"grad_norm": 3.53995593495739, |
|
"learning_rate": 7.299785682719512e-06, |
|
"loss": 0.1165, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.697508896797153, |
|
"grad_norm": 3.5358915025442452, |
|
"learning_rate": 7.287367688685835e-06, |
|
"loss": 0.1571, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.699288256227758, |
|
"grad_norm": 5.0260498083426395, |
|
"learning_rate": 7.274931825546279e-06, |
|
"loss": 0.1824, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.701067615658363, |
|
"grad_norm": 3.5490613399244637, |
|
"learning_rate": 7.262478190450834e-06, |
|
"loss": 0.1347, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.702846975088968, |
|
"grad_norm": 4.093087510411513, |
|
"learning_rate": 7.250006880688332e-06, |
|
"loss": 0.1893, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.7046263345195729, |
|
"grad_norm": 3.778983455472157, |
|
"learning_rate": 7.2375179936856775e-06, |
|
"loss": 0.1534, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.7064056939501779, |
|
"grad_norm": 3.2182769496465284, |
|
"learning_rate": 7.22501162700709e-06, |
|
"loss": 0.156, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.708185053380783, |
|
"grad_norm": 3.482963891501086, |
|
"learning_rate": 7.21248787835334e-06, |
|
"loss": 0.1301, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.7099644128113879, |
|
"grad_norm": 3.762530990350532, |
|
"learning_rate": 7.199946845560994e-06, |
|
"loss": 0.1509, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.7117437722419929, |
|
"grad_norm": 4.743844763635748, |
|
"learning_rate": 7.1873886266016365e-06, |
|
"loss": 0.2348, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.7117437722419929, |
|
"eval_loss": 0.17470039427280426, |
|
"eval_runtime": 1.5671, |
|
"eval_samples_per_second": 29.354, |
|
"eval_steps_per_second": 7.658, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.7135231316725978, |
|
"grad_norm": 4.413499424601276, |
|
"learning_rate": 7.174813319581115e-06, |
|
"loss": 0.1762, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.7153024911032029, |
|
"grad_norm": 5.148559143301299, |
|
"learning_rate": 7.162221022738768e-06, |
|
"loss": 0.1722, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.7170818505338078, |
|
"grad_norm": 3.7957288243424516, |
|
"learning_rate": 7.149611834446664e-06, |
|
"loss": 0.1992, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.7188612099644128, |
|
"grad_norm": 3.954367205127323, |
|
"learning_rate": 7.136985853208824e-06, |
|
"loss": 0.1831, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.7206405693950177, |
|
"grad_norm": 3.4025416987018215, |
|
"learning_rate": 7.124343177660462e-06, |
|
"loss": 0.1567, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.7224199288256228, |
|
"grad_norm": 4.156971489847549, |
|
"learning_rate": 7.111683906567206e-06, |
|
"loss": 0.1812, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.7241992882562278, |
|
"grad_norm": 3.7037366272517516, |
|
"learning_rate": 7.099008138824329e-06, |
|
"loss": 0.1414, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.7259786476868327, |
|
"grad_norm": 3.1944512609207396, |
|
"learning_rate": 7.086315973455982e-06, |
|
"loss": 0.1599, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.7277580071174378, |
|
"grad_norm": 3.722798683278507, |
|
"learning_rate": 7.0736075096144084e-06, |
|
"loss": 0.1721, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.7295373665480427, |
|
"grad_norm": 3.4101797005561068, |
|
"learning_rate": 7.060882846579182e-06, |
|
"loss": 0.1448, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.7313167259786477, |
|
"grad_norm": 3.7449036458098437, |
|
"learning_rate": 7.048142083756427e-06, |
|
"loss": 0.1653, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.7330960854092526, |
|
"grad_norm": 3.946939388039515, |
|
"learning_rate": 7.035385320678035e-06, |
|
"loss": 0.1815, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.7348754448398577, |
|
"grad_norm": 3.569676243453198, |
|
"learning_rate": 7.022612657000898e-06, |
|
"loss": 0.1674, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.7366548042704626, |
|
"grad_norm": 3.334380824260123, |
|
"learning_rate": 7.0098241925061215e-06, |
|
"loss": 0.1758, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.7384341637010676, |
|
"grad_norm": 4.693501472138011, |
|
"learning_rate": 6.997020027098249e-06, |
|
"loss": 0.1585, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.7402135231316725, |
|
"grad_norm": 3.554655958427637, |
|
"learning_rate": 6.9842002608044844e-06, |
|
"loss": 0.1702, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.7419928825622776, |
|
"grad_norm": 3.631049807525455, |
|
"learning_rate": 6.971364993773901e-06, |
|
"loss": 0.1652, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.7437722419928826, |
|
"grad_norm": 3.4734464720937868, |
|
"learning_rate": 6.958514326276669e-06, |
|
"loss": 0.1527, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.7455516014234875, |
|
"grad_norm": 3.818578973067952, |
|
"learning_rate": 6.945648358703269e-06, |
|
"loss": 0.1562, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.7473309608540926, |
|
"grad_norm": 3.1783559990305608, |
|
"learning_rate": 6.932767191563703e-06, |
|
"loss": 0.1773, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.7491103202846975, |
|
"grad_norm": 3.454645838804028, |
|
"learning_rate": 6.919870925486718e-06, |
|
"loss": 0.1264, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.7508896797153025, |
|
"grad_norm": 3.2080196915916876, |
|
"learning_rate": 6.906959661219011e-06, |
|
"loss": 0.1414, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.7526690391459074, |
|
"grad_norm": 3.2517726698710576, |
|
"learning_rate": 6.8940334996244505e-06, |
|
"loss": 0.1484, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.7544483985765125, |
|
"grad_norm": 3.5728178334134433, |
|
"learning_rate": 6.881092541683279e-06, |
|
"loss": 0.1391, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.7562277580071174, |
|
"grad_norm": 3.3398872172132164, |
|
"learning_rate": 6.8681368884913345e-06, |
|
"loss": 0.1259, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.7580071174377224, |
|
"grad_norm": 2.9253409698804074, |
|
"learning_rate": 6.855166641259252e-06, |
|
"loss": 0.1458, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.7597864768683275, |
|
"grad_norm": 3.3129399963010417, |
|
"learning_rate": 6.8421819013116766e-06, |
|
"loss": 0.1549, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.7615658362989324, |
|
"grad_norm": 3.4552859362664448, |
|
"learning_rate": 6.829182770086474e-06, |
|
"loss": 0.1713, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.7633451957295374, |
|
"grad_norm": 3.6262646618931016, |
|
"learning_rate": 6.816169349133934e-06, |
|
"loss": 0.1638, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.7651245551601423, |
|
"grad_norm": 3.2767226141366117, |
|
"learning_rate": 6.803141740115979e-06, |
|
"loss": 0.1113, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.7669039145907474, |
|
"grad_norm": 5.220582543643068, |
|
"learning_rate": 6.7901000448053676e-06, |
|
"loss": 0.2222, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.7686832740213523, |
|
"grad_norm": 3.330739235101099, |
|
"learning_rate": 6.777044365084907e-06, |
|
"loss": 0.135, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.7704626334519573, |
|
"grad_norm": 3.578688941563653, |
|
"learning_rate": 6.763974802946649e-06, |
|
"loss": 0.1435, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.7722419928825622, |
|
"grad_norm": 3.7516696333915696, |
|
"learning_rate": 6.750891460491093e-06, |
|
"loss": 0.2088, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.7740213523131673, |
|
"grad_norm": 4.079680829304015, |
|
"learning_rate": 6.737794439926395e-06, |
|
"loss": 0.2049, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.7758007117437722, |
|
"grad_norm": 3.56670111757419, |
|
"learning_rate": 6.724683843567567e-06, |
|
"loss": 0.1652, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.7775800711743772, |
|
"grad_norm": 3.757001830650091, |
|
"learning_rate": 6.711559773835672e-06, |
|
"loss": 0.1604, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.7793594306049823, |
|
"grad_norm": 3.6983262718145213, |
|
"learning_rate": 6.69842233325703e-06, |
|
"loss": 0.1723, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.7811387900355872, |
|
"grad_norm": 4.268395621125127, |
|
"learning_rate": 6.685271624462416e-06, |
|
"loss": 0.2074, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.7829181494661922, |
|
"grad_norm": 3.1267941483363204, |
|
"learning_rate": 6.672107750186255e-06, |
|
"loss": 0.1458, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.7846975088967971, |
|
"grad_norm": 2.559700416652466, |
|
"learning_rate": 6.658930813265825e-06, |
|
"loss": 0.1236, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.7864768683274022, |
|
"grad_norm": 3.2948385402474973, |
|
"learning_rate": 6.645740916640449e-06, |
|
"loss": 0.1563, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.7882562277580071, |
|
"grad_norm": 2.6420581640130067, |
|
"learning_rate": 6.63253816335069e-06, |
|
"loss": 0.1187, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.7900355871886121, |
|
"grad_norm": 3.1516105206441303, |
|
"learning_rate": 6.619322656537552e-06, |
|
"loss": 0.1603, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.791814946619217, |
|
"grad_norm": 3.078043180573147, |
|
"learning_rate": 6.606094499441671e-06, |
|
"loss": 0.1479, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.7935943060498221, |
|
"grad_norm": 3.441892484325609, |
|
"learning_rate": 6.592853795402502e-06, |
|
"loss": 0.1506, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.7953736654804271, |
|
"grad_norm": 3.7162717026320093, |
|
"learning_rate": 6.579600647857525e-06, |
|
"loss": 0.1519, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.797153024911032, |
|
"grad_norm": 4.017335160782036, |
|
"learning_rate": 6.566335160341425e-06, |
|
"loss": 0.1877, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.798932384341637, |
|
"grad_norm": 2.8992859926305665, |
|
"learning_rate": 6.553057436485289e-06, |
|
"loss": 0.1495, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.800711743772242, |
|
"grad_norm": 3.055050506776708, |
|
"learning_rate": 6.539767580015799e-06, |
|
"loss": 0.1846, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.802491103202847, |
|
"grad_norm": 4.018876573355116, |
|
"learning_rate": 6.52646569475441e-06, |
|
"loss": 0.1464, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.8042704626334519, |
|
"grad_norm": 4.157425073434661, |
|
"learning_rate": 6.513151884616556e-06, |
|
"loss": 0.2121, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.806049822064057, |
|
"grad_norm": 3.8462535975275136, |
|
"learning_rate": 6.499826253610823e-06, |
|
"loss": 0.1557, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.8078291814946619, |
|
"grad_norm": 4.119598939105835, |
|
"learning_rate": 6.486488905838143e-06, |
|
"loss": 0.1318, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.8096085409252669, |
|
"grad_norm": 3.2012231259832156, |
|
"learning_rate": 6.473139945490984e-06, |
|
"loss": 0.1193, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.8113879003558719, |
|
"grad_norm": 3.7065510758705162, |
|
"learning_rate": 6.459779476852528e-06, |
|
"loss": 0.1569, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.8131672597864769, |
|
"grad_norm": 3.650129217555027, |
|
"learning_rate": 6.446407604295863e-06, |
|
"loss": 0.1768, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.8149466192170819, |
|
"grad_norm": 3.821786422393586, |
|
"learning_rate": 6.433024432283169e-06, |
|
"loss": 0.1334, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.8167259786476868, |
|
"grad_norm": 3.6206058850570075, |
|
"learning_rate": 6.41963006536489e-06, |
|
"loss": 0.1524, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.8185053380782918, |
|
"grad_norm": 3.5176284090450576, |
|
"learning_rate": 6.4062246081789316e-06, |
|
"loss": 0.1524, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.8202846975088968, |
|
"grad_norm": 3.001959964092433, |
|
"learning_rate": 6.392808165449836e-06, |
|
"loss": 0.1087, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.8220640569395018, |
|
"grad_norm": 3.12814019508968, |
|
"learning_rate": 6.379380841987965e-06, |
|
"loss": 0.1357, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.8238434163701067, |
|
"grad_norm": 3.5044532514196414, |
|
"learning_rate": 6.365942742688684e-06, |
|
"loss": 0.1246, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.8256227758007118, |
|
"grad_norm": 4.61118686008092, |
|
"learning_rate": 6.352493972531535e-06, |
|
"loss": 0.2079, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.8274021352313167, |
|
"grad_norm": 4.142773881196344, |
|
"learning_rate": 6.339034636579425e-06, |
|
"loss": 0.1684, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.8291814946619217, |
|
"grad_norm": 3.350640393131762, |
|
"learning_rate": 6.325564839977802e-06, |
|
"loss": 0.1258, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.8309608540925267, |
|
"grad_norm": 2.917654176587402, |
|
"learning_rate": 6.312084687953835e-06, |
|
"loss": 0.1147, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.8327402135231317, |
|
"grad_norm": 4.152881838659127, |
|
"learning_rate": 6.298594285815585e-06, |
|
"loss": 0.1827, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.8345195729537367, |
|
"grad_norm": 4.238662241797531, |
|
"learning_rate": 6.2850937389511936e-06, |
|
"loss": 0.1982, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.8362989323843416, |
|
"grad_norm": 3.3674754877986444, |
|
"learning_rate": 6.271583152828049e-06, |
|
"loss": 0.1186, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.8380782918149466, |
|
"grad_norm": 3.4069711502508087, |
|
"learning_rate": 6.258062632991972e-06, |
|
"loss": 0.1168, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.8398576512455516, |
|
"grad_norm": 3.581823908366754, |
|
"learning_rate": 6.244532285066382e-06, |
|
"loss": 0.1499, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.8416370106761566, |
|
"grad_norm": 3.2886298817884603, |
|
"learning_rate": 6.2309922147514775e-06, |
|
"loss": 0.1482, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.8434163701067615, |
|
"grad_norm": 3.2778236414764934, |
|
"learning_rate": 6.2174425278234115e-06, |
|
"loss": 0.1836, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.8451957295373665, |
|
"grad_norm": 3.8889151695336657, |
|
"learning_rate": 6.20388333013346e-06, |
|
"loss": 0.13, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.8469750889679716, |
|
"grad_norm": 3.4566716008330274, |
|
"learning_rate": 6.190314727607196e-06, |
|
"loss": 0.1785, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.8487544483985765, |
|
"grad_norm": 3.7315510173570794, |
|
"learning_rate": 6.176736826243671e-06, |
|
"loss": 0.1684, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.8505338078291815, |
|
"grad_norm": 3.5813877735346757, |
|
"learning_rate": 6.163149732114571e-06, |
|
"loss": 0.1558, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.8523131672597865, |
|
"grad_norm": 4.005118968280642, |
|
"learning_rate": 6.149553551363404e-06, |
|
"loss": 0.1553, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.8540925266903915, |
|
"grad_norm": 3.5909697498087594, |
|
"learning_rate": 6.1359483902046605e-06, |
|
"loss": 0.1712, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.8558718861209964, |
|
"grad_norm": 3.464696089600724, |
|
"learning_rate": 6.122334354922984e-06, |
|
"loss": 0.1518, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.8576512455516014, |
|
"grad_norm": 3.0547554012515, |
|
"learning_rate": 6.108711551872347e-06, |
|
"loss": 0.1504, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.8594306049822064, |
|
"grad_norm": 3.8520942919450856, |
|
"learning_rate": 6.095080087475218e-06, |
|
"loss": 0.1515, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.8612099644128114, |
|
"grad_norm": 3.3208436089642883, |
|
"learning_rate": 6.0814400682217236e-06, |
|
"loss": 0.1699, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.8629893238434164, |
|
"grad_norm": 3.393845767945605, |
|
"learning_rate": 6.067791600668823e-06, |
|
"loss": 0.1172, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.8647686832740213, |
|
"grad_norm": 2.9759418943725375, |
|
"learning_rate": 6.054134791439479e-06, |
|
"loss": 0.1088, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.8665480427046264, |
|
"grad_norm": 3.5227735163218905, |
|
"learning_rate": 6.040469747221815e-06, |
|
"loss": 0.1299, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.8683274021352313, |
|
"grad_norm": 3.8557853832447333, |
|
"learning_rate": 6.026796574768288e-06, |
|
"loss": 0.146, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.8701067615658363, |
|
"grad_norm": 3.664493633389319, |
|
"learning_rate": 6.013115380894854e-06, |
|
"loss": 0.1449, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.8718861209964412, |
|
"grad_norm": 2.9569541290013293, |
|
"learning_rate": 5.999426272480133e-06, |
|
"loss": 0.1083, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.8736654804270463, |
|
"grad_norm": 3.8525110837158993, |
|
"learning_rate": 5.985729356464575e-06, |
|
"loss": 0.1614, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.8754448398576512, |
|
"grad_norm": 3.6917851367927907, |
|
"learning_rate": 5.972024739849622e-06, |
|
"loss": 0.1487, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.8772241992882562, |
|
"grad_norm": 2.4847847450957117, |
|
"learning_rate": 5.958312529696874e-06, |
|
"loss": 0.1036, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.8790035587188612, |
|
"grad_norm": 3.8543766719358996, |
|
"learning_rate": 5.944592833127253e-06, |
|
"loss": 0.1981, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.8807829181494662, |
|
"grad_norm": 3.412156004706168, |
|
"learning_rate": 5.9308657573201645e-06, |
|
"loss": 0.1776, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.8825622775800712, |
|
"grad_norm": 3.56981516764016, |
|
"learning_rate": 5.917131409512663e-06, |
|
"loss": 0.1416, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.8843416370106761, |
|
"grad_norm": 3.2852863927468348, |
|
"learning_rate": 5.903389896998611e-06, |
|
"loss": 0.1463, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.8861209964412812, |
|
"grad_norm": 3.56187088945062, |
|
"learning_rate": 5.889641327127843e-06, |
|
"loss": 0.1552, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.8879003558718861, |
|
"grad_norm": 3.571053086215999, |
|
"learning_rate": 5.875885807305326e-06, |
|
"loss": 0.1668, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.8896797153024911, |
|
"grad_norm": 3.0023092315531277, |
|
"learning_rate": 5.862123444990319e-06, |
|
"loss": 0.1339, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.891459074733096, |
|
"grad_norm": 4.115990208148188, |
|
"learning_rate": 5.848354347695537e-06, |
|
"loss": 0.2309, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.8932384341637011, |
|
"grad_norm": 3.4286397870973397, |
|
"learning_rate": 5.83457862298631e-06, |
|
"loss": 0.1722, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.895017793594306, |
|
"grad_norm": 3.9424956175085395, |
|
"learning_rate": 5.8207963784797396e-06, |
|
"loss": 0.1657, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.896797153024911, |
|
"grad_norm": 3.496118042191163, |
|
"learning_rate": 5.807007721843862e-06, |
|
"loss": 0.1691, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.8985765124555161, |
|
"grad_norm": 4.386329827251197, |
|
"learning_rate": 5.793212760796804e-06, |
|
"loss": 0.2153, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.900355871886121, |
|
"grad_norm": 3.452678485505408, |
|
"learning_rate": 5.779411603105947e-06, |
|
"loss": 0.1809, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.902135231316726, |
|
"grad_norm": 3.268102687515428, |
|
"learning_rate": 5.765604356587076e-06, |
|
"loss": 0.1487, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.9039145907473309, |
|
"grad_norm": 3.5804298490864523, |
|
"learning_rate": 5.751791129103545e-06, |
|
"loss": 0.1553, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.905693950177936, |
|
"grad_norm": 3.6716938720566166, |
|
"learning_rate": 5.737972028565431e-06, |
|
"loss": 0.1782, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.9074733096085409, |
|
"grad_norm": 3.704013391896724, |
|
"learning_rate": 5.7241471629286934e-06, |
|
"loss": 0.1587, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.9092526690391459, |
|
"grad_norm": 3.860479895487176, |
|
"learning_rate": 5.7103166401943276e-06, |
|
"loss": 0.1731, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.9110320284697508, |
|
"grad_norm": 3.7352710946545695, |
|
"learning_rate": 5.696480568407523e-06, |
|
"loss": 0.1441, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.9128113879003559, |
|
"grad_norm": 3.9343306973648633, |
|
"learning_rate": 5.682639055656817e-06, |
|
"loss": 0.1825, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.9145907473309609, |
|
"grad_norm": 4.183432823639452, |
|
"learning_rate": 5.668792210073255e-06, |
|
"loss": 0.1987, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.9163701067615658, |
|
"grad_norm": 3.819373517829799, |
|
"learning_rate": 5.654940139829544e-06, |
|
"loss": 0.1577, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.9181494661921709, |
|
"grad_norm": 3.4678858664233605, |
|
"learning_rate": 5.641082953139201e-06, |
|
"loss": 0.1375, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.9199288256227758, |
|
"grad_norm": 3.7854134891077993, |
|
"learning_rate": 5.6272207582557195e-06, |
|
"loss": 0.1186, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.9217081850533808, |
|
"grad_norm": 3.224470484632322, |
|
"learning_rate": 5.61335366347171e-06, |
|
"loss": 0.12, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.9234875444839857, |
|
"grad_norm": 3.572159866348334, |
|
"learning_rate": 5.599481777118071e-06, |
|
"loss": 0.168, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.9252669039145908, |
|
"grad_norm": 3.677005672206943, |
|
"learning_rate": 5.585605207563124e-06, |
|
"loss": 0.1271, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.9270462633451957, |
|
"grad_norm": 3.276226113295591, |
|
"learning_rate": 5.571724063211782e-06, |
|
"loss": 0.1381, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.9288256227758007, |
|
"grad_norm": 3.3501072564165475, |
|
"learning_rate": 5.557838452504692e-06, |
|
"loss": 0.1094, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.9306049822064056, |
|
"grad_norm": 4.128041439228439, |
|
"learning_rate": 5.5439484839173996e-06, |
|
"loss": 0.1703, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.9323843416370107, |
|
"grad_norm": 3.3948206724745233, |
|
"learning_rate": 5.530054265959486e-06, |
|
"loss": 0.1444, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.9341637010676157, |
|
"grad_norm": 3.60777887447379, |
|
"learning_rate": 5.516155907173735e-06, |
|
"loss": 0.1745, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.9359430604982206, |
|
"grad_norm": 2.9930062844193897, |
|
"learning_rate": 5.5022535161352764e-06, |
|
"loss": 0.1348, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.9377224199288257, |
|
"grad_norm": 4.125114417320741, |
|
"learning_rate": 5.488347201450741e-06, |
|
"loss": 0.1487, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.9395017793594306, |
|
"grad_norm": 3.2112862564154554, |
|
"learning_rate": 5.47443707175741e-06, |
|
"loss": 0.1511, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.9412811387900356, |
|
"grad_norm": 3.4006851479550044, |
|
"learning_rate": 5.46052323572237e-06, |
|
"loss": 0.1376, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.9430604982206405, |
|
"grad_norm": 2.9202443660944475, |
|
"learning_rate": 5.446605802041662e-06, |
|
"loss": 0.1161, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.9448398576512456, |
|
"grad_norm": 2.807167924855131, |
|
"learning_rate": 5.432684879439428e-06, |
|
"loss": 0.1225, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.9466192170818505, |
|
"grad_norm": 3.0447693407425938, |
|
"learning_rate": 5.418760576667071e-06, |
|
"loss": 0.103, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.9483985765124555, |
|
"grad_norm": 4.606186286395574, |
|
"learning_rate": 5.404833002502398e-06, |
|
"loss": 0.1992, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.9501779359430605, |
|
"grad_norm": 4.233369716179881, |
|
"learning_rate": 5.39090226574877e-06, |
|
"loss": 0.1656, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.9519572953736655, |
|
"grad_norm": 4.350748628823142, |
|
"learning_rate": 5.376968475234258e-06, |
|
"loss": 0.1993, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.9537366548042705, |
|
"grad_norm": 2.8977228562757786, |
|
"learning_rate": 5.363031739810787e-06, |
|
"loss": 0.116, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.9555160142348754, |
|
"grad_norm": 5.063328608519483, |
|
"learning_rate": 5.349092168353291e-06, |
|
"loss": 0.1687, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.9572953736654805, |
|
"grad_norm": 3.124045245649726, |
|
"learning_rate": 5.335149869758855e-06, |
|
"loss": 0.1258, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.9590747330960854, |
|
"grad_norm": 3.898844668783439, |
|
"learning_rate": 5.32120495294587e-06, |
|
"loss": 0.1531, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.9608540925266904, |
|
"grad_norm": 3.138008081648577, |
|
"learning_rate": 5.3072575268531835e-06, |
|
"loss": 0.1587, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.9626334519572953, |
|
"grad_norm": 3.7512857304520644, |
|
"learning_rate": 5.293307700439242e-06, |
|
"loss": 0.1681, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.9644128113879004, |
|
"grad_norm": 4.700357522176544, |
|
"learning_rate": 5.2793555826812456e-06, |
|
"loss": 0.1808, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.9661921708185054, |
|
"grad_norm": 3.3589282311904114, |
|
"learning_rate": 5.265401282574294e-06, |
|
"loss": 0.1195, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.9679715302491103, |
|
"grad_norm": 3.7977336354106352, |
|
"learning_rate": 5.2514449091305375e-06, |
|
"loss": 0.149, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.9697508896797153, |
|
"grad_norm": 3.996669567303368, |
|
"learning_rate": 5.237486571378317e-06, |
|
"loss": 0.1585, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.9715302491103203, |
|
"grad_norm": 3.4967021955368875, |
|
"learning_rate": 5.22352637836133e-06, |
|
"loss": 0.1936, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.9733096085409253, |
|
"grad_norm": 3.5071052835136682, |
|
"learning_rate": 5.209564439137755e-06, |
|
"loss": 0.1411, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.9750889679715302, |
|
"grad_norm": 3.650287907900508, |
|
"learning_rate": 5.195600862779421e-06, |
|
"loss": 0.1984, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.9768683274021353, |
|
"grad_norm": 2.692992945251571, |
|
"learning_rate": 5.181635758370942e-06, |
|
"loss": 0.1011, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.9786476868327402, |
|
"grad_norm": 3.8684013891691715, |
|
"learning_rate": 5.167669235008871e-06, |
|
"loss": 0.1834, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.9804270462633452, |
|
"grad_norm": 4.375771826172036, |
|
"learning_rate": 5.153701401800845e-06, |
|
"loss": 0.2019, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.9822064056939501, |
|
"grad_norm": 3.6733708886716725, |
|
"learning_rate": 5.139732367864736e-06, |
|
"loss": 0.1626, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.9839857651245552, |
|
"grad_norm": 3.552693676135797, |
|
"learning_rate": 5.1257622423277934e-06, |
|
"loss": 0.1437, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.9857651245551602, |
|
"grad_norm": 3.133536922635619, |
|
"learning_rate": 5.111791134325793e-06, |
|
"loss": 0.1438, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.9875444839857651, |
|
"grad_norm": 2.853933989930919, |
|
"learning_rate": 5.097819153002192e-06, |
|
"loss": 0.1317, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.9893238434163701, |
|
"grad_norm": 3.0766184099788645, |
|
"learning_rate": 5.083846407507263e-06, |
|
"loss": 0.1244, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.9911032028469751, |
|
"grad_norm": 3.9830375074676616, |
|
"learning_rate": 5.0698730069972535e-06, |
|
"loss": 0.2011, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.9928825622775801, |
|
"grad_norm": 2.954078898504635, |
|
"learning_rate": 5.055899060633524e-06, |
|
"loss": 0.1169, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.994661921708185, |
|
"grad_norm": 3.164268362537539, |
|
"learning_rate": 5.041924677581702e-06, |
|
"loss": 0.1604, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.99644128113879, |
|
"grad_norm": 3.4677986840901314, |
|
"learning_rate": 5.0279499670108245e-06, |
|
"loss": 0.1572, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.998220640569395, |
|
"grad_norm": 3.087777487097337, |
|
"learning_rate": 5.013975038092491e-06, |
|
"loss": 0.1382, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 3.096258409969609, |
|
"learning_rate": 5e-06, |
|
"loss": 0.1124, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.001779359430605, |
|
"grad_norm": 2.49258938134702, |
|
"learning_rate": 4.98602496190751e-06, |
|
"loss": 0.0812, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.00355871886121, |
|
"grad_norm": 2.1317294024612186, |
|
"learning_rate": 4.9720500329891755e-06, |
|
"loss": 0.0678, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.0053380782918149, |
|
"grad_norm": 3.2101438157515814, |
|
"learning_rate": 4.9580753224183005e-06, |
|
"loss": 0.103, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.00711743772242, |
|
"grad_norm": 2.5730805864012063, |
|
"learning_rate": 4.944100939366478e-06, |
|
"loss": 0.0766, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.008896797153025, |
|
"grad_norm": 2.02538773346026, |
|
"learning_rate": 4.930126993002748e-06, |
|
"loss": 0.0661, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.01067615658363, |
|
"grad_norm": 2.4237925079463465, |
|
"learning_rate": 4.9161535924927375e-06, |
|
"loss": 0.0777, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.0124555160142348, |
|
"grad_norm": 2.4201173189692238, |
|
"learning_rate": 4.90218084699781e-06, |
|
"loss": 0.0649, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.0142348754448398, |
|
"grad_norm": 2.8976565480214846, |
|
"learning_rate": 4.888208865674208e-06, |
|
"loss": 0.0944, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.0160142348754448, |
|
"grad_norm": 2.519162701735869, |
|
"learning_rate": 4.874237757672209e-06, |
|
"loss": 0.0806, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.0177935943060499, |
|
"grad_norm": 3.5573499198132965, |
|
"learning_rate": 4.8602676321352646e-06, |
|
"loss": 0.1123, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.019572953736655, |
|
"grad_norm": 3.014022999200103, |
|
"learning_rate": 4.846298598199155e-06, |
|
"loss": 0.0753, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.0213523131672597, |
|
"grad_norm": 3.000238294657395, |
|
"learning_rate": 4.832330764991131e-06, |
|
"loss": 0.072, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.0231316725978647, |
|
"grad_norm": 3.497142084523458, |
|
"learning_rate": 4.81836424162906e-06, |
|
"loss": 0.0978, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.0249110320284698, |
|
"grad_norm": 3.5356524833252583, |
|
"learning_rate": 4.80439913722058e-06, |
|
"loss": 0.0613, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.0266903914590748, |
|
"grad_norm": 3.201767069725305, |
|
"learning_rate": 4.790435560862247e-06, |
|
"loss": 0.0863, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.0284697508896796, |
|
"grad_norm": 3.7687461139582537, |
|
"learning_rate": 4.776473621638673e-06, |
|
"loss": 0.0698, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.0302491103202847, |
|
"grad_norm": 3.6727514395767935, |
|
"learning_rate": 4.762513428621684e-06, |
|
"loss": 0.0858, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.0320284697508897, |
|
"grad_norm": 3.926789188446102, |
|
"learning_rate": 4.748555090869464e-06, |
|
"loss": 0.0662, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.0338078291814947, |
|
"grad_norm": 3.5378643160884917, |
|
"learning_rate": 4.734598717425706e-06, |
|
"loss": 0.0882, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.0355871886120998, |
|
"grad_norm": 3.900256498827343, |
|
"learning_rate": 4.720644417318755e-06, |
|
"loss": 0.0983, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.0373665480427046, |
|
"grad_norm": 5.361662284420851, |
|
"learning_rate": 4.70669229956076e-06, |
|
"loss": 0.0939, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.0391459074733096, |
|
"grad_norm": 3.760483798744191, |
|
"learning_rate": 4.692742473146818e-06, |
|
"loss": 0.0795, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.0409252669039146, |
|
"grad_norm": 3.051777090685822, |
|
"learning_rate": 4.678795047054131e-06, |
|
"loss": 0.0848, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.0427046263345197, |
|
"grad_norm": 3.3498107213541513, |
|
"learning_rate": 4.664850130241146e-06, |
|
"loss": 0.0711, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.0444839857651245, |
|
"grad_norm": 3.0154701479306816, |
|
"learning_rate": 4.650907831646711e-06, |
|
"loss": 0.064, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.0462633451957295, |
|
"grad_norm": 2.907194365738717, |
|
"learning_rate": 4.636968260189214e-06, |
|
"loss": 0.1008, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.0480427046263345, |
|
"grad_norm": 2.733420730155859, |
|
"learning_rate": 4.623031524765744e-06, |
|
"loss": 0.0627, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.0498220640569396, |
|
"grad_norm": 3.205837671620435, |
|
"learning_rate": 4.609097734251231e-06, |
|
"loss": 0.0764, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.0516014234875444, |
|
"grad_norm": 3.4622560360621675, |
|
"learning_rate": 4.595166997497605e-06, |
|
"loss": 0.0629, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.0533807829181494, |
|
"grad_norm": 2.8187627517398792, |
|
"learning_rate": 4.58123942333293e-06, |
|
"loss": 0.073, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.0551601423487544, |
|
"grad_norm": 3.1602244408151403, |
|
"learning_rate": 4.567315120560573e-06, |
|
"loss": 0.0683, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.0569395017793595, |
|
"grad_norm": 3.07152764454429, |
|
"learning_rate": 4.553394197958339e-06, |
|
"loss": 0.0746, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.0587188612099645, |
|
"grad_norm": 2.7667129106549644, |
|
"learning_rate": 4.539476764277631e-06, |
|
"loss": 0.059, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.0604982206405693, |
|
"grad_norm": 2.5591833891693234, |
|
"learning_rate": 4.525562928242592e-06, |
|
"loss": 0.054, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.0622775800711743, |
|
"grad_norm": 3.767591354050939, |
|
"learning_rate": 4.511652798549261e-06, |
|
"loss": 0.0829, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.0640569395017794, |
|
"grad_norm": 3.236688188679765, |
|
"learning_rate": 4.497746483864725e-06, |
|
"loss": 0.0638, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.0658362989323844, |
|
"grad_norm": 2.410742088045987, |
|
"learning_rate": 4.483844092826267e-06, |
|
"loss": 0.0479, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.0676156583629894, |
|
"grad_norm": 2.3953534514595716, |
|
"learning_rate": 4.469945734040516e-06, |
|
"loss": 0.0645, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.0676156583629894, |
|
"eval_loss": 0.18102993071079254, |
|
"eval_runtime": 1.5734, |
|
"eval_samples_per_second": 29.237, |
|
"eval_steps_per_second": 7.627, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.0693950177935942, |
|
"grad_norm": 2.963352148979262, |
|
"learning_rate": 4.456051516082603e-06, |
|
"loss": 0.0657, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.0711743772241993, |
|
"grad_norm": 4.234445672360233, |
|
"learning_rate": 4.442161547495309e-06, |
|
"loss": 0.0903, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.0729537366548043, |
|
"grad_norm": 3.9938479967371863, |
|
"learning_rate": 4.42827593678822e-06, |
|
"loss": 0.0828, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.0747330960854093, |
|
"grad_norm": 3.6603963572722416, |
|
"learning_rate": 4.414394792436877e-06, |
|
"loss": 0.0688, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.0765124555160142, |
|
"grad_norm": 3.3203476742650335, |
|
"learning_rate": 4.400518222881931e-06, |
|
"loss": 0.087, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.0782918149466192, |
|
"grad_norm": 3.204993197825429, |
|
"learning_rate": 4.386646336528291e-06, |
|
"loss": 0.0664, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.0800711743772242, |
|
"grad_norm": 2.87689071504344, |
|
"learning_rate": 4.372779241744282e-06, |
|
"loss": 0.0606, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.0818505338078293, |
|
"grad_norm": 3.012877453762652, |
|
"learning_rate": 4.358917046860799e-06, |
|
"loss": 0.0725, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.083629893238434, |
|
"grad_norm": 3.082431549143466, |
|
"learning_rate": 4.345059860170458e-06, |
|
"loss": 0.079, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.085409252669039, |
|
"grad_norm": 3.5896517604957885, |
|
"learning_rate": 4.331207789926746e-06, |
|
"loss": 0.068, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.0871886120996441, |
|
"grad_norm": 2.6892899111940194, |
|
"learning_rate": 4.317360944343184e-06, |
|
"loss": 0.065, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.0889679715302492, |
|
"grad_norm": 2.78546076531323, |
|
"learning_rate": 4.303519431592479e-06, |
|
"loss": 0.0594, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.0907473309608542, |
|
"grad_norm": 4.034390494410621, |
|
"learning_rate": 4.289683359805673e-06, |
|
"loss": 0.0913, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.092526690391459, |
|
"grad_norm": 3.585238132373164, |
|
"learning_rate": 4.275852837071309e-06, |
|
"loss": 0.077, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.094306049822064, |
|
"grad_norm": 2.664050838231545, |
|
"learning_rate": 4.26202797143457e-06, |
|
"loss": 0.0645, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.096085409252669, |
|
"grad_norm": 3.0955550535898775, |
|
"learning_rate": 4.248208870896456e-06, |
|
"loss": 0.071, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.097864768683274, |
|
"grad_norm": 3.2411922872194983, |
|
"learning_rate": 4.234395643412925e-06, |
|
"loss": 0.0696, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.099644128113879, |
|
"grad_norm": 3.3462144103342992, |
|
"learning_rate": 4.220588396894055e-06, |
|
"loss": 0.066, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.101423487544484, |
|
"grad_norm": 3.2005723505659653, |
|
"learning_rate": 4.2067872392031965e-06, |
|
"loss": 0.0717, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.103202846975089, |
|
"grad_norm": 3.365080939241089, |
|
"learning_rate": 4.192992278156141e-06, |
|
"loss": 0.0706, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.104982206405694, |
|
"grad_norm": 3.5334324156691026, |
|
"learning_rate": 4.179203621520262e-06, |
|
"loss": 0.0768, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.106761565836299, |
|
"grad_norm": 3.963095560652706, |
|
"learning_rate": 4.165421377013691e-06, |
|
"loss": 0.0643, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.1085409252669038, |
|
"grad_norm": 3.65738311965089, |
|
"learning_rate": 4.151645652304465e-06, |
|
"loss": 0.0712, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.1103202846975089, |
|
"grad_norm": 3.5190717729232674, |
|
"learning_rate": 4.137876555009684e-06, |
|
"loss": 0.0903, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.112099644128114, |
|
"grad_norm": 3.2941462803496147, |
|
"learning_rate": 4.124114192694676e-06, |
|
"loss": 0.0639, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.113879003558719, |
|
"grad_norm": 3.4373286405888064, |
|
"learning_rate": 4.110358672872158e-06, |
|
"loss": 0.0765, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.1156583629893237, |
|
"grad_norm": 3.4242164931041748, |
|
"learning_rate": 4.0966101030013915e-06, |
|
"loss": 0.091, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.1174377224199288, |
|
"grad_norm": 2.7396939546404595, |
|
"learning_rate": 4.082868590487339e-06, |
|
"loss": 0.067, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.1192170818505338, |
|
"grad_norm": 2.5085548746997706, |
|
"learning_rate": 4.069134242679837e-06, |
|
"loss": 0.0565, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.1209964412811388, |
|
"grad_norm": 3.61653759638108, |
|
"learning_rate": 4.055407166872748e-06, |
|
"loss": 0.0694, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.1227758007117439, |
|
"grad_norm": 3.924086165616401, |
|
"learning_rate": 4.041687470303127e-06, |
|
"loss": 0.093, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.1245551601423487, |
|
"grad_norm": 3.078397331666336, |
|
"learning_rate": 4.02797526015038e-06, |
|
"loss": 0.0938, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.1263345195729537, |
|
"grad_norm": 2.897156135692726, |
|
"learning_rate": 4.014270643535427e-06, |
|
"loss": 0.0612, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.1281138790035588, |
|
"grad_norm": 3.536487947974576, |
|
"learning_rate": 4.000573727519868e-06, |
|
"loss": 0.0806, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.1298932384341638, |
|
"grad_norm": 3.350311762810452, |
|
"learning_rate": 3.9868846191051465e-06, |
|
"loss": 0.0867, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.1316725978647686, |
|
"grad_norm": 3.1568541455796644, |
|
"learning_rate": 3.973203425231715e-06, |
|
"loss": 0.0808, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.1334519572953736, |
|
"grad_norm": 2.8416600385535755, |
|
"learning_rate": 3.959530252778187e-06, |
|
"loss": 0.0959, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.1352313167259787, |
|
"grad_norm": 3.4359867815385434, |
|
"learning_rate": 3.945865208560522e-06, |
|
"loss": 0.0914, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.1370106761565837, |
|
"grad_norm": 3.026250038952647, |
|
"learning_rate": 3.932208399331177e-06, |
|
"loss": 0.0917, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.1387900355871885, |
|
"grad_norm": 3.9844112920637484, |
|
"learning_rate": 3.918559931778277e-06, |
|
"loss": 0.0771, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.1405693950177935, |
|
"grad_norm": 2.8506946851936723, |
|
"learning_rate": 3.904919912524784e-06, |
|
"loss": 0.0542, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.1423487544483986, |
|
"grad_norm": 2.8888466376546766, |
|
"learning_rate": 3.891288448127654e-06, |
|
"loss": 0.0676, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.1441281138790036, |
|
"grad_norm": 3.5147869038777055, |
|
"learning_rate": 3.877665645077017e-06, |
|
"loss": 0.0799, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.1459074733096086, |
|
"grad_norm": 3.7001804909560128, |
|
"learning_rate": 3.86405160979534e-06, |
|
"loss": 0.0922, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.1476868327402134, |
|
"grad_norm": 2.954393191383862, |
|
"learning_rate": 3.850446448636597e-06, |
|
"loss": 0.0598, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.1494661921708185, |
|
"grad_norm": 2.570616333369147, |
|
"learning_rate": 3.8368502678854296e-06, |
|
"loss": 0.0521, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.1512455516014235, |
|
"grad_norm": 2.7447183888299382, |
|
"learning_rate": 3.8232631737563306e-06, |
|
"loss": 0.0577, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.1530249110320285, |
|
"grad_norm": 3.417849309898849, |
|
"learning_rate": 3.809685272392804e-06, |
|
"loss": 0.0699, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.1548042704626336, |
|
"grad_norm": 3.7199338550990126, |
|
"learning_rate": 3.796116669866543e-06, |
|
"loss": 0.0927, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.1565836298932384, |
|
"grad_norm": 2.912376410556558, |
|
"learning_rate": 3.78255747217659e-06, |
|
"loss": 0.0599, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.1583629893238434, |
|
"grad_norm": 3.047981747921209, |
|
"learning_rate": 3.769007785248523e-06, |
|
"loss": 0.06, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.1601423487544484, |
|
"grad_norm": 2.980030192357363, |
|
"learning_rate": 3.7554677149336186e-06, |
|
"loss": 0.0659, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.1619217081850535, |
|
"grad_norm": 3.2281302889688783, |
|
"learning_rate": 3.7419373670080284e-06, |
|
"loss": 0.0829, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.1637010676156583, |
|
"grad_norm": 3.6641816775768676, |
|
"learning_rate": 3.7284168471719527e-06, |
|
"loss": 0.0886, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.1654804270462633, |
|
"grad_norm": 3.172426621239396, |
|
"learning_rate": 3.7149062610488085e-06, |
|
"loss": 0.0776, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.1672597864768683, |
|
"grad_norm": 3.127127728071547, |
|
"learning_rate": 3.701405714184416e-06, |
|
"loss": 0.074, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.1690391459074734, |
|
"grad_norm": 2.6252185816273617, |
|
"learning_rate": 3.687915312046166e-06, |
|
"loss": 0.0784, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.1708185053380782, |
|
"grad_norm": 2.8704550081759264, |
|
"learning_rate": 3.6744351600221994e-06, |
|
"loss": 0.053, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.1725978647686832, |
|
"grad_norm": 3.154603448946708, |
|
"learning_rate": 3.6609653634205773e-06, |
|
"loss": 0.1122, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.1743772241992882, |
|
"grad_norm": 4.310991196581793, |
|
"learning_rate": 3.647506027468467e-06, |
|
"loss": 0.0752, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.1761565836298933, |
|
"grad_norm": 3.213012083336547, |
|
"learning_rate": 3.6340572573113176e-06, |
|
"loss": 0.0763, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.1779359430604983, |
|
"grad_norm": 2.6694949288475156, |
|
"learning_rate": 3.6206191580120346e-06, |
|
"loss": 0.0671, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.1797153024911031, |
|
"grad_norm": 3.2420859377445583, |
|
"learning_rate": 3.6071918345501655e-06, |
|
"loss": 0.0652, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.1814946619217082, |
|
"grad_norm": 2.8097788194753948, |
|
"learning_rate": 3.5937753918210705e-06, |
|
"loss": 0.068, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.1832740213523132, |
|
"grad_norm": 2.990966033932749, |
|
"learning_rate": 3.5803699346351117e-06, |
|
"loss": 0.067, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.1850533807829182, |
|
"grad_norm": 2.449202882258069, |
|
"learning_rate": 3.566975567716833e-06, |
|
"loss": 0.0663, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.1868327402135233, |
|
"grad_norm": 3.2756393225167573, |
|
"learning_rate": 3.5535923957041374e-06, |
|
"loss": 0.0664, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.188612099644128, |
|
"grad_norm": 2.74679443326539, |
|
"learning_rate": 3.540220523147474e-06, |
|
"loss": 0.0651, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.190391459074733, |
|
"grad_norm": 3.1843811063038263, |
|
"learning_rate": 3.5268600545090183e-06, |
|
"loss": 0.0623, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.1921708185053381, |
|
"grad_norm": 2.7931635940934965, |
|
"learning_rate": 3.513511094161858e-06, |
|
"loss": 0.0662, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.193950177935943, |
|
"grad_norm": 3.0142765438390193, |
|
"learning_rate": 3.5001737463891793e-06, |
|
"loss": 0.0568, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.195729537366548, |
|
"grad_norm": 2.8198984591232783, |
|
"learning_rate": 3.4868481153834454e-06, |
|
"loss": 0.0885, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.197508896797153, |
|
"grad_norm": 3.0661582784824932, |
|
"learning_rate": 3.4735343052455905e-06, |
|
"loss": 0.0647, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.199288256227758, |
|
"grad_norm": 3.4892039472840697, |
|
"learning_rate": 3.4602324199842026e-06, |
|
"loss": 0.0661, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.201067615658363, |
|
"grad_norm": 2.9004773148647645, |
|
"learning_rate": 3.446942563514711e-06, |
|
"loss": 0.071, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.2028469750889679, |
|
"grad_norm": 3.051970418841176, |
|
"learning_rate": 3.4336648396585777e-06, |
|
"loss": 0.0512, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.204626334519573, |
|
"grad_norm": 3.7548707498901166, |
|
"learning_rate": 3.4203993521424774e-06, |
|
"loss": 0.0809, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.206405693950178, |
|
"grad_norm": 3.4175034488988163, |
|
"learning_rate": 3.407146204597499e-06, |
|
"loss": 0.0782, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.208185053380783, |
|
"grad_norm": 2.859790257172025, |
|
"learning_rate": 3.3939055005583305e-06, |
|
"loss": 0.0582, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.209964412811388, |
|
"grad_norm": 3.24538011764899, |
|
"learning_rate": 3.3806773434624475e-06, |
|
"loss": 0.0746, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.2117437722419928, |
|
"grad_norm": 2.9939543898698773, |
|
"learning_rate": 3.3674618366493117e-06, |
|
"loss": 0.0906, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.2135231316725978, |
|
"grad_norm": 2.7938676971610605, |
|
"learning_rate": 3.3542590833595533e-06, |
|
"loss": 0.0712, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.2153024911032029, |
|
"grad_norm": 3.071730588870675, |
|
"learning_rate": 3.341069186734176e-06, |
|
"loss": 0.0718, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.217081850533808, |
|
"grad_norm": 2.625381236880264, |
|
"learning_rate": 3.3278922498137455e-06, |
|
"loss": 0.0544, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.2188612099644127, |
|
"grad_norm": 3.2605083471187757, |
|
"learning_rate": 3.314728375537587e-06, |
|
"loss": 0.0692, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.2206405693950177, |
|
"grad_norm": 2.7182883755604137, |
|
"learning_rate": 3.3015776667429724e-06, |
|
"loss": 0.0903, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.2224199288256228, |
|
"grad_norm": 3.8869347400190404, |
|
"learning_rate": 3.2884402261643296e-06, |
|
"loss": 0.0744, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.2241992882562278, |
|
"grad_norm": 3.5642687425161927, |
|
"learning_rate": 3.2753161564324344e-06, |
|
"loss": 0.0705, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.2259786476868326, |
|
"grad_norm": 4.551802813888371, |
|
"learning_rate": 3.262205560073605e-06, |
|
"loss": 0.0731, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.2277580071174377, |
|
"grad_norm": 2.801031384157556, |
|
"learning_rate": 3.249108539508909e-06, |
|
"loss": 0.0464, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.2295373665480427, |
|
"grad_norm": 4.296767514427448, |
|
"learning_rate": 3.2360251970533527e-06, |
|
"loss": 0.105, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.2313167259786477, |
|
"grad_norm": 3.093306148831403, |
|
"learning_rate": 3.2229556349150947e-06, |
|
"loss": 0.0754, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.2330960854092528, |
|
"grad_norm": 2.8741056095748916, |
|
"learning_rate": 3.2098999551946337e-06, |
|
"loss": 0.0523, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.2348754448398576, |
|
"grad_norm": 3.6914252769122187, |
|
"learning_rate": 3.1968582598840234e-06, |
|
"loss": 0.0856, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.2366548042704626, |
|
"grad_norm": 3.986941165815034, |
|
"learning_rate": 3.183830650866068e-06, |
|
"loss": 0.0703, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.2384341637010676, |
|
"grad_norm": 2.9881342615167314, |
|
"learning_rate": 3.1708172299135266e-06, |
|
"loss": 0.0649, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.2402135231316727, |
|
"grad_norm": 4.224849065529721, |
|
"learning_rate": 3.1578180986883234e-06, |
|
"loss": 0.1014, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.2419928825622777, |
|
"grad_norm": 3.0878355928401935, |
|
"learning_rate": 3.1448333587407486e-06, |
|
"loss": 0.0668, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.2437722419928825, |
|
"grad_norm": 3.642392217149066, |
|
"learning_rate": 3.131863111508667e-06, |
|
"loss": 0.0723, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.2455516014234875, |
|
"grad_norm": 3.5892764803813844, |
|
"learning_rate": 3.118907458316722e-06, |
|
"loss": 0.063, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.2473309608540926, |
|
"grad_norm": 2.7489055419661836, |
|
"learning_rate": 3.105966500375551e-06, |
|
"loss": 0.0608, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.2491103202846976, |
|
"grad_norm": 3.9273400950091037, |
|
"learning_rate": 3.0930403387809892e-06, |
|
"loss": 0.0997, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.2508896797153026, |
|
"grad_norm": 3.4263471141564343, |
|
"learning_rate": 3.080129074513285e-06, |
|
"loss": 0.0813, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.2526690391459074, |
|
"grad_norm": 3.228327262583916, |
|
"learning_rate": 3.067232808436299e-06, |
|
"loss": 0.0714, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.2544483985765125, |
|
"grad_norm": 3.1289294476833622, |
|
"learning_rate": 3.0543516412967327e-06, |
|
"loss": 0.0647, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.2562277580071175, |
|
"grad_norm": 2.3924912452814096, |
|
"learning_rate": 3.041485673723331e-06, |
|
"loss": 0.0577, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.2580071174377223, |
|
"grad_norm": 3.5953961760286646, |
|
"learning_rate": 3.0286350062261017e-06, |
|
"loss": 0.075, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.2597864768683273, |
|
"grad_norm": 2.4566234271071035, |
|
"learning_rate": 3.0157997391955172e-06, |
|
"loss": 0.0626, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.2615658362989324, |
|
"grad_norm": 3.0998152352975183, |
|
"learning_rate": 3.0029799729017518e-06, |
|
"loss": 0.0719, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 1.2633451957295374, |
|
"grad_norm": 2.6707015928601994, |
|
"learning_rate": 2.9901758074938797e-06, |
|
"loss": 0.0684, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.2651245551601424, |
|
"grad_norm": 3.2551980371582223, |
|
"learning_rate": 2.977387342999103e-06, |
|
"loss": 0.0709, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 1.2669039145907472, |
|
"grad_norm": 3.331426964847741, |
|
"learning_rate": 2.964614679321966e-06, |
|
"loss": 0.0641, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 1.2686832740213523, |
|
"grad_norm": 2.617892133658879, |
|
"learning_rate": 2.951857916243574e-06, |
|
"loss": 0.0744, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 1.2704626334519573, |
|
"grad_norm": 4.33045271443242, |
|
"learning_rate": 2.9391171534208185e-06, |
|
"loss": 0.1079, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.2722419928825623, |
|
"grad_norm": 3.9900783721238056, |
|
"learning_rate": 2.9263924903855932e-06, |
|
"loss": 0.0603, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.2740213523131674, |
|
"grad_norm": 3.253076908566338, |
|
"learning_rate": 2.9136840265440213e-06, |
|
"loss": 0.0726, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 1.2758007117437722, |
|
"grad_norm": 3.3586729543507747, |
|
"learning_rate": 2.9009918611756732e-06, |
|
"loss": 0.0807, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 1.2775800711743772, |
|
"grad_norm": 2.8785457789183337, |
|
"learning_rate": 2.8883160934327968e-06, |
|
"loss": 0.058, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 1.2793594306049823, |
|
"grad_norm": 3.814698509146615, |
|
"learning_rate": 2.8756568223395396e-06, |
|
"loss": 0.0899, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 1.281138790035587, |
|
"grad_norm": 3.217912950416559, |
|
"learning_rate": 2.8630141467911777e-06, |
|
"loss": 0.0689, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.282918149466192, |
|
"grad_norm": 2.794431882149881, |
|
"learning_rate": 2.8503881655533395e-06, |
|
"loss": 0.0578, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 1.2846975088967971, |
|
"grad_norm": 3.267508789650145, |
|
"learning_rate": 2.837778977261235e-06, |
|
"loss": 0.0703, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 1.2864768683274022, |
|
"grad_norm": 3.015897822666296, |
|
"learning_rate": 2.8251866804188875e-06, |
|
"loss": 0.0685, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 1.2882562277580072, |
|
"grad_norm": 3.2500889484622437, |
|
"learning_rate": 2.812611373398365e-06, |
|
"loss": 0.0822, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 1.290035587188612, |
|
"grad_norm": 3.305543207264949, |
|
"learning_rate": 2.8000531544390064e-06, |
|
"loss": 0.0723, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.291814946619217, |
|
"grad_norm": 3.585896880008094, |
|
"learning_rate": 2.7875121216466595e-06, |
|
"loss": 0.0715, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 1.293594306049822, |
|
"grad_norm": 3.4564796921981635, |
|
"learning_rate": 2.7749883729929105e-06, |
|
"loss": 0.0818, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 1.295373665480427, |
|
"grad_norm": 2.934981328904215, |
|
"learning_rate": 2.762482006314324e-06, |
|
"loss": 0.0556, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 1.2971530249110321, |
|
"grad_norm": 3.6439708468052103, |
|
"learning_rate": 2.7499931193116692e-06, |
|
"loss": 0.0747, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 1.298932384341637, |
|
"grad_norm": 3.283208822985146, |
|
"learning_rate": 2.737521809549167e-06, |
|
"loss": 0.0687, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.300711743772242, |
|
"grad_norm": 3.246771545315838, |
|
"learning_rate": 2.725068174453722e-06, |
|
"loss": 0.0829, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 1.302491103202847, |
|
"grad_norm": 3.5752520691893603, |
|
"learning_rate": 2.712632311314165e-06, |
|
"loss": 0.0785, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 1.304270462633452, |
|
"grad_norm": 4.301560306076933, |
|
"learning_rate": 2.7002143172804875e-06, |
|
"loss": 0.0735, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 1.306049822064057, |
|
"grad_norm": 3.779500126598476, |
|
"learning_rate": 2.6878142893630904e-06, |
|
"loss": 0.0755, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 1.3078291814946619, |
|
"grad_norm": 3.238612148358189, |
|
"learning_rate": 2.6754323244320154e-06, |
|
"loss": 0.0909, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.309608540925267, |
|
"grad_norm": 2.986567722106932, |
|
"learning_rate": 2.6630685192161995e-06, |
|
"loss": 0.0772, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 1.311387900355872, |
|
"grad_norm": 3.81496582353021, |
|
"learning_rate": 2.650722970302714e-06, |
|
"loss": 0.0734, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 1.3131672597864767, |
|
"grad_norm": 3.704962611911283, |
|
"learning_rate": 2.638395774136009e-06, |
|
"loss": 0.0767, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 1.3149466192170818, |
|
"grad_norm": 3.3846267085890185, |
|
"learning_rate": 2.6260870270171645e-06, |
|
"loss": 0.0762, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 1.3167259786476868, |
|
"grad_norm": 3.7792034793787046, |
|
"learning_rate": 2.613796825103129e-06, |
|
"loss": 0.0875, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.3185053380782918, |
|
"grad_norm": 2.6143815944303745, |
|
"learning_rate": 2.60152526440598e-06, |
|
"loss": 0.0511, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 1.3202846975088969, |
|
"grad_norm": 2.7980790544929057, |
|
"learning_rate": 2.5892724407921667e-06, |
|
"loss": 0.0512, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 1.3220640569395017, |
|
"grad_norm": 4.173933699096102, |
|
"learning_rate": 2.577038449981763e-06, |
|
"loss": 0.081, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 1.3238434163701067, |
|
"grad_norm": 3.587109391859856, |
|
"learning_rate": 2.564823387547716e-06, |
|
"loss": 0.0739, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 1.3256227758007118, |
|
"grad_norm": 4.04022667247858, |
|
"learning_rate": 2.552627348915106e-06, |
|
"loss": 0.0864, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.3274021352313168, |
|
"grad_norm": 3.553306710810093, |
|
"learning_rate": 2.5404504293603983e-06, |
|
"loss": 0.0955, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 1.3291814946619218, |
|
"grad_norm": 4.104208885576115, |
|
"learning_rate": 2.528292724010697e-06, |
|
"loss": 0.1102, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 1.3309608540925266, |
|
"grad_norm": 3.460254104546819, |
|
"learning_rate": 2.5161543278430055e-06, |
|
"loss": 0.0847, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 1.3327402135231317, |
|
"grad_norm": 3.6720639020071544, |
|
"learning_rate": 2.5040353356834756e-06, |
|
"loss": 0.0822, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 1.3345195729537367, |
|
"grad_norm": 3.1228693322160996, |
|
"learning_rate": 2.4919358422066816e-06, |
|
"loss": 0.0604, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.3362989323843417, |
|
"grad_norm": 2.748437263254216, |
|
"learning_rate": 2.4798559419348672e-06, |
|
"loss": 0.0529, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 1.3380782918149468, |
|
"grad_norm": 2.4902051135841305, |
|
"learning_rate": 2.4677957292372166e-06, |
|
"loss": 0.0567, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 1.3398576512455516, |
|
"grad_norm": 2.7547359737249804, |
|
"learning_rate": 2.455755298329107e-06, |
|
"loss": 0.0611, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 1.3416370106761566, |
|
"grad_norm": 2.6553315163667524, |
|
"learning_rate": 2.4437347432713838e-06, |
|
"loss": 0.0672, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 1.3434163701067616, |
|
"grad_norm": 3.4268109770090978, |
|
"learning_rate": 2.431734157969619e-06, |
|
"loss": 0.0764, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.3451957295373664, |
|
"grad_norm": 3.8755232327997446, |
|
"learning_rate": 2.4197536361733792e-06, |
|
"loss": 0.102, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 1.3469750889679715, |
|
"grad_norm": 3.1700636602818086, |
|
"learning_rate": 2.407793271475495e-06, |
|
"loss": 0.0771, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 1.3487544483985765, |
|
"grad_norm": 2.999142375716433, |
|
"learning_rate": 2.3958531573113223e-06, |
|
"loss": 0.0933, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 1.3505338078291815, |
|
"grad_norm": 3.4849421887454053, |
|
"learning_rate": 2.3839333869580243e-06, |
|
"loss": 0.0844, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 1.3523131672597866, |
|
"grad_norm": 3.0941674889334236, |
|
"learning_rate": 2.372034053533835e-06, |
|
"loss": 0.0726, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.3540925266903914, |
|
"grad_norm": 2.5686110082520766, |
|
"learning_rate": 2.360155249997334e-06, |
|
"loss": 0.0703, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 1.3558718861209964, |
|
"grad_norm": 2.7865477520135085, |
|
"learning_rate": 2.348297069146715e-06, |
|
"loss": 0.0559, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 1.3576512455516014, |
|
"grad_norm": 2.3898890828425436, |
|
"learning_rate": 2.3364596036190706e-06, |
|
"loss": 0.0551, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 1.3594306049822065, |
|
"grad_norm": 2.9829958705456567, |
|
"learning_rate": 2.3246429458896637e-06, |
|
"loss": 0.0709, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 1.3612099644128115, |
|
"grad_norm": 2.8738290345973256, |
|
"learning_rate": 2.312847188271203e-06, |
|
"loss": 0.0776, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.3629893238434163, |
|
"grad_norm": 3.0905050558853544, |
|
"learning_rate": 2.301072422913123e-06, |
|
"loss": 0.0512, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 1.3647686832740213, |
|
"grad_norm": 3.210741333942984, |
|
"learning_rate": 2.2893187418008666e-06, |
|
"loss": 0.0668, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 1.3665480427046264, |
|
"grad_norm": 2.8607685857184193, |
|
"learning_rate": 2.2775862367551642e-06, |
|
"loss": 0.0643, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 1.3683274021352312, |
|
"grad_norm": 3.2028957338955495, |
|
"learning_rate": 2.265874999431318e-06, |
|
"loss": 0.0528, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 1.3701067615658362, |
|
"grad_norm": 3.112635297968323, |
|
"learning_rate": 2.254185121318484e-06, |
|
"loss": 0.0605, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.3718861209964412, |
|
"grad_norm": 3.5361657911464284, |
|
"learning_rate": 2.2425166937389596e-06, |
|
"loss": 0.0764, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 1.3736654804270463, |
|
"grad_norm": 2.924020870744169, |
|
"learning_rate": 2.2308698078474645e-06, |
|
"loss": 0.0739, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 1.3754448398576513, |
|
"grad_norm": 2.858131338368024, |
|
"learning_rate": 2.219244554630438e-06, |
|
"loss": 0.0729, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 1.3772241992882561, |
|
"grad_norm": 2.9867703349275936, |
|
"learning_rate": 2.207641024905322e-06, |
|
"loss": 0.0599, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 1.3790035587188612, |
|
"grad_norm": 3.1991912006832233, |
|
"learning_rate": 2.1960593093198508e-06, |
|
"loss": 0.0578, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.3807829181494662, |
|
"grad_norm": 2.9472495866764907, |
|
"learning_rate": 2.184499498351347e-06, |
|
"loss": 0.0581, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 1.3825622775800712, |
|
"grad_norm": 3.001946156898124, |
|
"learning_rate": 2.172961682306011e-06, |
|
"loss": 0.0802, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 1.3843416370106763, |
|
"grad_norm": 3.0705608886958657, |
|
"learning_rate": 2.1614459513182173e-06, |
|
"loss": 0.0658, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 1.386120996441281, |
|
"grad_norm": 2.9980196235719894, |
|
"learning_rate": 2.149952395349813e-06, |
|
"loss": 0.0594, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 1.387900355871886, |
|
"grad_norm": 3.2159439277713284, |
|
"learning_rate": 2.1384811041894055e-06, |
|
"loss": 0.0661, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.3896797153024911, |
|
"grad_norm": 3.7635522624947297, |
|
"learning_rate": 2.1270321674516736e-06, |
|
"loss": 0.0776, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 1.3914590747330962, |
|
"grad_norm": 2.899961542916672, |
|
"learning_rate": 2.1156056745766593e-06, |
|
"loss": 0.0625, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 1.3932384341637012, |
|
"grad_norm": 3.0845629233320406, |
|
"learning_rate": 2.104201714829074e-06, |
|
"loss": 0.0738, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 1.395017793594306, |
|
"grad_norm": 3.266645933274165, |
|
"learning_rate": 2.0928203772975917e-06, |
|
"loss": 0.0841, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 1.396797153024911, |
|
"grad_norm": 2.8224066501885448, |
|
"learning_rate": 2.081461750894166e-06, |
|
"loss": 0.0673, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.398576512455516, |
|
"grad_norm": 3.3583282797297414, |
|
"learning_rate": 2.070125924353328e-06, |
|
"loss": 0.0636, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 1.4003558718861209, |
|
"grad_norm": 2.9637227839589912, |
|
"learning_rate": 2.058812986231493e-06, |
|
"loss": 0.0745, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 1.402135231316726, |
|
"grad_norm": 3.4520060465487714, |
|
"learning_rate": 2.0475230249062727e-06, |
|
"loss": 0.1137, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.403914590747331, |
|
"grad_norm": 3.184485044782164, |
|
"learning_rate": 2.0362561285757766e-06, |
|
"loss": 0.0769, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 1.405693950177936, |
|
"grad_norm": 2.806816671243507, |
|
"learning_rate": 2.0250123852579347e-06, |
|
"loss": 0.0526, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.407473309608541, |
|
"grad_norm": 2.2733451232658823, |
|
"learning_rate": 2.013791882789801e-06, |
|
"loss": 0.0442, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 1.4092526690391458, |
|
"grad_norm": 3.016940726087215, |
|
"learning_rate": 2.0025947088268714e-06, |
|
"loss": 0.043, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 1.4110320284697508, |
|
"grad_norm": 2.6283536444577122, |
|
"learning_rate": 1.9914209508423943e-06, |
|
"loss": 0.0577, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 1.4128113879003559, |
|
"grad_norm": 2.833376322385772, |
|
"learning_rate": 1.9802706961266936e-06, |
|
"loss": 0.0633, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 1.414590747330961, |
|
"grad_norm": 3.3938382135417897, |
|
"learning_rate": 1.969144031786483e-06, |
|
"loss": 0.0755, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.416370106761566, |
|
"grad_norm": 2.850962132252063, |
|
"learning_rate": 1.958041044744186e-06, |
|
"loss": 0.0814, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 1.4181494661921707, |
|
"grad_norm": 2.7126804335468853, |
|
"learning_rate": 1.94696182173726e-06, |
|
"loss": 0.0476, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 1.4199288256227758, |
|
"grad_norm": 3.4241227243705037, |
|
"learning_rate": 1.9359064493175077e-06, |
|
"loss": 0.0743, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.4217081850533808, |
|
"grad_norm": 3.284047754770386, |
|
"learning_rate": 1.9248750138504176e-06, |
|
"loss": 0.0621, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 1.4234875444839858, |
|
"grad_norm": 3.4040460221463054, |
|
"learning_rate": 1.9138676015144765e-06, |
|
"loss": 0.0643, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.4234875444839858, |
|
"eval_loss": 0.17664127051830292, |
|
"eval_runtime": 1.5707, |
|
"eval_samples_per_second": 29.287, |
|
"eval_steps_per_second": 7.64, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.4252669039145909, |
|
"grad_norm": 3.54366133876467, |
|
"learning_rate": 1.9028842983005036e-06, |
|
"loss": 0.0877, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 1.4270462633451957, |
|
"grad_norm": 3.596911776355096, |
|
"learning_rate": 1.8919251900109697e-06, |
|
"loss": 0.0828, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 1.4288256227758007, |
|
"grad_norm": 2.9941838955437943, |
|
"learning_rate": 1.8809903622593395e-06, |
|
"loss": 0.0658, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 1.4306049822064058, |
|
"grad_norm": 2.670399009000733, |
|
"learning_rate": 1.870079900469392e-06, |
|
"loss": 0.045, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 1.4323843416370106, |
|
"grad_norm": 3.2580080405130034, |
|
"learning_rate": 1.8591938898745593e-06, |
|
"loss": 0.0738, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.4341637010676156, |
|
"grad_norm": 2.9992642037495085, |
|
"learning_rate": 1.8483324155172594e-06, |
|
"loss": 0.0655, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 1.4359430604982206, |
|
"grad_norm": 2.6770120825283805, |
|
"learning_rate": 1.837495562248226e-06, |
|
"loss": 0.077, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 1.4377224199288257, |
|
"grad_norm": 3.241780530487706, |
|
"learning_rate": 1.8266834147258577e-06, |
|
"loss": 0.0672, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 1.4395017793594307, |
|
"grad_norm": 2.923628402358049, |
|
"learning_rate": 1.8158960574155455e-06, |
|
"loss": 0.0599, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 1.4412811387900355, |
|
"grad_norm": 3.659524059891711, |
|
"learning_rate": 1.8051335745890196e-06, |
|
"loss": 0.0746, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.4430604982206405, |
|
"grad_norm": 3.2754546185257953, |
|
"learning_rate": 1.7943960503236856e-06, |
|
"loss": 0.0885, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 1.4448398576512456, |
|
"grad_norm": 2.781760975359573, |
|
"learning_rate": 1.7836835685019732e-06, |
|
"loss": 0.0677, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 1.4466192170818506, |
|
"grad_norm": 3.7140123337014073, |
|
"learning_rate": 1.7729962128106787e-06, |
|
"loss": 0.0669, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 1.4483985765124556, |
|
"grad_norm": 3.040884681071234, |
|
"learning_rate": 1.7623340667403089e-06, |
|
"loss": 0.0554, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 1.4501779359430604, |
|
"grad_norm": 3.957500165209126, |
|
"learning_rate": 1.7516972135844352e-06, |
|
"loss": 0.0942, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.4519572953736655, |
|
"grad_norm": 3.564822690436673, |
|
"learning_rate": 1.741085736439031e-06, |
|
"loss": 0.064, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 1.4537366548042705, |
|
"grad_norm": 3.3368236166422713, |
|
"learning_rate": 1.730499718201838e-06, |
|
"loss": 0.0731, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 1.4555160142348753, |
|
"grad_norm": 3.402069196876779, |
|
"learning_rate": 1.7199392415717064e-06, |
|
"loss": 0.0668, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 1.4572953736654806, |
|
"grad_norm": 3.5169339926849648, |
|
"learning_rate": 1.7094043890479557e-06, |
|
"loss": 0.082, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 1.4590747330960854, |
|
"grad_norm": 2.728683117712834, |
|
"learning_rate": 1.698895242929725e-06, |
|
"loss": 0.0747, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.4608540925266904, |
|
"grad_norm": 2.979382764924642, |
|
"learning_rate": 1.6884118853153358e-06, |
|
"loss": 0.0521, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 1.4626334519572954, |
|
"grad_norm": 3.830402921694487, |
|
"learning_rate": 1.6779543981016478e-06, |
|
"loss": 0.078, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 1.4644128113879002, |
|
"grad_norm": 2.995879443045243, |
|
"learning_rate": 1.6675228629834133e-06, |
|
"loss": 0.0784, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 1.4661921708185053, |
|
"grad_norm": 2.6844591781386895, |
|
"learning_rate": 1.657117361452651e-06, |
|
"loss": 0.0582, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 1.4679715302491103, |
|
"grad_norm": 2.794007202949437, |
|
"learning_rate": 1.6467379747980011e-06, |
|
"loss": 0.0687, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.4697508896797153, |
|
"grad_norm": 2.359851790494547, |
|
"learning_rate": 1.6363847841040914e-06, |
|
"loss": 0.0535, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 1.4715302491103204, |
|
"grad_norm": 2.701485556896073, |
|
"learning_rate": 1.626057870250906e-06, |
|
"loss": 0.0496, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 1.4733096085409252, |
|
"grad_norm": 2.8050621800595787, |
|
"learning_rate": 1.6157573139131527e-06, |
|
"loss": 0.0472, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 1.4750889679715302, |
|
"grad_norm": 2.77158076207917, |
|
"learning_rate": 1.605483195559628e-06, |
|
"loss": 0.0739, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 1.4768683274021353, |
|
"grad_norm": 3.000243961105045, |
|
"learning_rate": 1.5952355954525966e-06, |
|
"loss": 0.0493, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.4786476868327403, |
|
"grad_norm": 2.8080374843042297, |
|
"learning_rate": 1.5850145936471607e-06, |
|
"loss": 0.0644, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 1.4804270462633453, |
|
"grad_norm": 3.714658848333057, |
|
"learning_rate": 1.5748202699906335e-06, |
|
"loss": 0.0633, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 1.4822064056939501, |
|
"grad_norm": 2.1039017509865645, |
|
"learning_rate": 1.5646527041219128e-06, |
|
"loss": 0.0439, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 1.4839857651245552, |
|
"grad_norm": 2.8879281914141273, |
|
"learning_rate": 1.5545119754708682e-06, |
|
"loss": 0.0629, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 1.4857651245551602, |
|
"grad_norm": 3.8618859847604234, |
|
"learning_rate": 1.544398163257711e-06, |
|
"loss": 0.0874, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.487544483985765, |
|
"grad_norm": 2.8548552056006584, |
|
"learning_rate": 1.5343113464923808e-06, |
|
"loss": 0.0657, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 1.48932384341637, |
|
"grad_norm": 2.379942567164302, |
|
"learning_rate": 1.524251603973927e-06, |
|
"loss": 0.0511, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 1.491103202846975, |
|
"grad_norm": 3.0290227868988984, |
|
"learning_rate": 1.5142190142898883e-06, |
|
"loss": 0.0682, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 1.49288256227758, |
|
"grad_norm": 3.3182701987270047, |
|
"learning_rate": 1.5042136558156883e-06, |
|
"loss": 0.0752, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 1.4946619217081851, |
|
"grad_norm": 3.8713186357288563, |
|
"learning_rate": 1.4942356067140162e-06, |
|
"loss": 0.0868, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.49644128113879, |
|
"grad_norm": 2.383250781729, |
|
"learning_rate": 1.4842849449342195e-06, |
|
"loss": 0.0562, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 1.498220640569395, |
|
"grad_norm": 2.7697598804185897, |
|
"learning_rate": 1.4743617482116896e-06, |
|
"loss": 0.0741, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 3.3622962393079203, |
|
"learning_rate": 1.4644660940672628e-06, |
|
"loss": 0.0557, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 1.501779359430605, |
|
"grad_norm": 2.9524335095426277, |
|
"learning_rate": 1.454598059806609e-06, |
|
"loss": 0.0744, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 1.50355871886121, |
|
"grad_norm": 3.329296631219426, |
|
"learning_rate": 1.4447577225196296e-06, |
|
"loss": 0.0729, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.5053380782918149, |
|
"grad_norm": 3.2874123728981663, |
|
"learning_rate": 1.4349451590798564e-06, |
|
"loss": 0.0654, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 1.50711743772242, |
|
"grad_norm": 3.0171573501353692, |
|
"learning_rate": 1.4251604461438444e-06, |
|
"loss": 0.0753, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 1.508896797153025, |
|
"grad_norm": 2.816941373653401, |
|
"learning_rate": 1.4154036601505834e-06, |
|
"loss": 0.0592, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 1.5106761565836297, |
|
"grad_norm": 3.270492677398863, |
|
"learning_rate": 1.4056748773208933e-06, |
|
"loss": 0.0772, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 1.512455516014235, |
|
"grad_norm": 3.176430624364355, |
|
"learning_rate": 1.3959741736568339e-06, |
|
"loss": 0.0717, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.5142348754448398, |
|
"grad_norm": 2.7985112977822078, |
|
"learning_rate": 1.3863016249411027e-06, |
|
"loss": 0.0552, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 1.5160142348754448, |
|
"grad_norm": 2.678537911931075, |
|
"learning_rate": 1.376657306736453e-06, |
|
"loss": 0.0695, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 1.5177935943060499, |
|
"grad_norm": 3.1870532768946402, |
|
"learning_rate": 1.3670412943850975e-06, |
|
"loss": 0.0744, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 1.5195729537366547, |
|
"grad_norm": 2.6451250583581865, |
|
"learning_rate": 1.3574536630081208e-06, |
|
"loss": 0.0419, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 1.52135231316726, |
|
"grad_norm": 3.0346471389967395, |
|
"learning_rate": 1.347894487504896e-06, |
|
"loss": 0.0528, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.5231316725978647, |
|
"grad_norm": 2.867261806273139, |
|
"learning_rate": 1.3383638425524909e-06, |
|
"loss": 0.0522, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 1.5249110320284698, |
|
"grad_norm": 3.073279075438378, |
|
"learning_rate": 1.3288618026050943e-06, |
|
"loss": 0.0663, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 1.5266903914590748, |
|
"grad_norm": 2.98478324679089, |
|
"learning_rate": 1.31938844189343e-06, |
|
"loss": 0.0606, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 1.5284697508896796, |
|
"grad_norm": 3.458063646223215, |
|
"learning_rate": 1.3099438344241777e-06, |
|
"loss": 0.0744, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 1.5302491103202847, |
|
"grad_norm": 2.6537507297516663, |
|
"learning_rate": 1.3005280539793908e-06, |
|
"loss": 0.0591, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.5320284697508897, |
|
"grad_norm": 3.0172970889584008, |
|
"learning_rate": 1.2911411741159273e-06, |
|
"loss": 0.0592, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 1.5338078291814945, |
|
"grad_norm": 2.8573849129073383, |
|
"learning_rate": 1.2817832681648712e-06, |
|
"loss": 0.0977, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 1.5355871886120998, |
|
"grad_norm": 3.207670857262839, |
|
"learning_rate": 1.2724544092309581e-06, |
|
"loss": 0.0665, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 1.5373665480427046, |
|
"grad_norm": 2.9162391812899964, |
|
"learning_rate": 1.2631546701920073e-06, |
|
"loss": 0.0664, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 1.5391459074733096, |
|
"grad_norm": 2.964075689308196, |
|
"learning_rate": 1.2538841236983519e-06, |
|
"loss": 0.0698, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.5409252669039146, |
|
"grad_norm": 3.115274358451816, |
|
"learning_rate": 1.244642842172266e-06, |
|
"loss": 0.0521, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 1.5427046263345194, |
|
"grad_norm": 3.1908026377084564, |
|
"learning_rate": 1.2354308978074088e-06, |
|
"loss": 0.0679, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 1.5444839857651247, |
|
"grad_norm": 2.906449192056007, |
|
"learning_rate": 1.2262483625682514e-06, |
|
"loss": 0.0716, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 1.5462633451957295, |
|
"grad_norm": 3.3907375914049975, |
|
"learning_rate": 1.2170953081895214e-06, |
|
"loss": 0.0707, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 1.5480427046263345, |
|
"grad_norm": 2.862801487537389, |
|
"learning_rate": 1.2079718061756369e-06, |
|
"loss": 0.0503, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.5498220640569396, |
|
"grad_norm": 3.005258117645404, |
|
"learning_rate": 1.1988779278001517e-06, |
|
"loss": 0.0616, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 1.5516014234875444, |
|
"grad_norm": 3.2748273340440175, |
|
"learning_rate": 1.1898137441051982e-06, |
|
"loss": 0.0726, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 1.5533807829181496, |
|
"grad_norm": 3.0665788643293994, |
|
"learning_rate": 1.1807793259009282e-06, |
|
"loss": 0.0751, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 1.5551601423487544, |
|
"grad_norm": 2.89600450661726, |
|
"learning_rate": 1.1717747437649657e-06, |
|
"loss": 0.0665, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 1.5569395017793595, |
|
"grad_norm": 2.7714732535423745, |
|
"learning_rate": 1.1628000680418533e-06, |
|
"loss": 0.0534, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.5587188612099645, |
|
"grad_norm": 3.146076978189336, |
|
"learning_rate": 1.1538553688425002e-06, |
|
"loss": 0.0585, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 1.5604982206405693, |
|
"grad_norm": 3.1356617184222673, |
|
"learning_rate": 1.14494071604364e-06, |
|
"loss": 0.0601, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 1.5622775800711743, |
|
"grad_norm": 2.9553513322876697, |
|
"learning_rate": 1.1360561792872754e-06, |
|
"loss": 0.0566, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 1.5640569395017794, |
|
"grad_norm": 3.6154420214573717, |
|
"learning_rate": 1.127201827980145e-06, |
|
"loss": 0.0885, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 1.5658362989323842, |
|
"grad_norm": 2.591646694765396, |
|
"learning_rate": 1.1183777312931748e-06, |
|
"loss": 0.0492, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.5676156583629894, |
|
"grad_norm": 2.7239297488972958, |
|
"learning_rate": 1.1095839581609407e-06, |
|
"loss": 0.071, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 1.5693950177935942, |
|
"grad_norm": 3.3103837963364353, |
|
"learning_rate": 1.1008205772811248e-06, |
|
"loss": 0.0764, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 1.5711743772241993, |
|
"grad_norm": 3.23797425599256, |
|
"learning_rate": 1.0920876571139843e-06, |
|
"loss": 0.0765, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 1.5729537366548043, |
|
"grad_norm": 3.349955914380848, |
|
"learning_rate": 1.0833852658818167e-06, |
|
"loss": 0.0743, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 1.5747330960854091, |
|
"grad_norm": 3.243899237607166, |
|
"learning_rate": 1.0747134715684221e-06, |
|
"loss": 0.0604, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.5765124555160144, |
|
"grad_norm": 2.75286603570387, |
|
"learning_rate": 1.0660723419185776e-06, |
|
"loss": 0.0587, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 1.5782918149466192, |
|
"grad_norm": 3.3722758470769767, |
|
"learning_rate": 1.0574619444375017e-06, |
|
"loss": 0.0657, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 1.5800711743772242, |
|
"grad_norm": 2.6144967753285666, |
|
"learning_rate": 1.0488823463903341e-06, |
|
"loss": 0.0622, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 1.5818505338078293, |
|
"grad_norm": 2.6553737864262725, |
|
"learning_rate": 1.0403336148016053e-06, |
|
"loss": 0.0749, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 1.583629893238434, |
|
"grad_norm": 2.840242667179585, |
|
"learning_rate": 1.0318158164547159e-06, |
|
"loss": 0.0763, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.585409252669039, |
|
"grad_norm": 2.6346911849026995, |
|
"learning_rate": 1.0233290178914096e-06, |
|
"loss": 0.0568, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 1.5871886120996441, |
|
"grad_norm": 2.5289671282891297, |
|
"learning_rate": 1.014873285411262e-06, |
|
"loss": 0.0581, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 1.5889679715302492, |
|
"grad_norm": 3.6077551323777755, |
|
"learning_rate": 1.006448685071154e-06, |
|
"loss": 0.0816, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 1.5907473309608542, |
|
"grad_norm": 3.454280721313296, |
|
"learning_rate": 9.980552826847635e-07, |
|
"loss": 0.0773, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 1.592526690391459, |
|
"grad_norm": 3.621861789141318, |
|
"learning_rate": 9.896931438220453e-07, |
|
"loss": 0.0867, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.594306049822064, |
|
"grad_norm": 3.392929747141218, |
|
"learning_rate": 9.813623338087181e-07, |
|
"loss": 0.0688, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 1.596085409252669, |
|
"grad_norm": 4.143709852564452, |
|
"learning_rate": 9.730629177257623e-07, |
|
"loss": 0.0956, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 1.5978647686832739, |
|
"grad_norm": 3.1225648923232696, |
|
"learning_rate": 9.64794960408903e-07, |
|
"loss": 0.0602, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 1.5996441281138791, |
|
"grad_norm": 3.1140613435407865, |
|
"learning_rate": 9.565585264481092e-07, |
|
"loss": 0.059, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 1.601423487544484, |
|
"grad_norm": 2.8693085711707287, |
|
"learning_rate": 9.483536801870835e-07, |
|
"loss": 0.0621, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.603202846975089, |
|
"grad_norm": 2.609196728377582, |
|
"learning_rate": 9.401804857227648e-07, |
|
"loss": 0.0473, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 1.604982206405694, |
|
"grad_norm": 2.761792454523691, |
|
"learning_rate": 9.320390069048258e-07, |
|
"loss": 0.07, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 1.6067615658362988, |
|
"grad_norm": 3.1197438157066224, |
|
"learning_rate": 9.239293073351735e-07, |
|
"loss": 0.0619, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 1.608540925266904, |
|
"grad_norm": 3.53571644515477, |
|
"learning_rate": 9.158514503674543e-07, |
|
"loss": 0.0729, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 1.6103202846975089, |
|
"grad_norm": 2.8741692148804314, |
|
"learning_rate": 9.078054991065532e-07, |
|
"loss": 0.0601, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 1.612099644128114, |
|
"grad_norm": 2.9963928684389884, |
|
"learning_rate": 8.997915164081095e-07, |
|
"loss": 0.0707, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 1.613879003558719, |
|
"grad_norm": 3.426537322048409, |
|
"learning_rate": 8.918095648780195e-07, |
|
"loss": 0.0671, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 1.6156583629893237, |
|
"grad_norm": 3.0643250011205874, |
|
"learning_rate": 8.838597068719518e-07, |
|
"loss": 0.0664, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 1.6174377224199288, |
|
"grad_norm": 3.6357729268118617, |
|
"learning_rate": 8.75942004494853e-07, |
|
"loss": 0.0684, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 1.6192170818505338, |
|
"grad_norm": 3.6271758966609395, |
|
"learning_rate": 8.680565196004704e-07, |
|
"loss": 0.0776, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.6209964412811388, |
|
"grad_norm": 3.5174621010003655, |
|
"learning_rate": 8.602033137908666e-07, |
|
"loss": 0.0592, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 1.6227758007117439, |
|
"grad_norm": 3.36541911897023, |
|
"learning_rate": 8.523824484159348e-07, |
|
"loss": 0.0652, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 1.6245551601423487, |
|
"grad_norm": 3.69057377098791, |
|
"learning_rate": 8.445939845729245e-07, |
|
"loss": 0.0568, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 1.6263345195729537, |
|
"grad_norm": 2.515366469108778, |
|
"learning_rate": 8.368379831059592e-07, |
|
"loss": 0.0606, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 1.6281138790035588, |
|
"grad_norm": 2.603799736320011, |
|
"learning_rate": 8.29114504605566e-07, |
|
"loss": 0.0532, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 1.6298932384341636, |
|
"grad_norm": 2.695178266745452, |
|
"learning_rate": 8.21423609408199e-07, |
|
"loss": 0.0705, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 1.6316725978647688, |
|
"grad_norm": 3.4511870182157196, |
|
"learning_rate": 8.137653575957666e-07, |
|
"loss": 0.0611, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 1.6334519572953736, |
|
"grad_norm": 3.9227732289409385, |
|
"learning_rate": 8.061398089951678e-07, |
|
"loss": 0.0796, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 1.6352313167259787, |
|
"grad_norm": 2.8555652532664024, |
|
"learning_rate": 7.985470231778203e-07, |
|
"loss": 0.0591, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 1.6370106761565837, |
|
"grad_norm": 3.380297686151528, |
|
"learning_rate": 7.909870594591951e-07, |
|
"loss": 0.0673, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.6387900355871885, |
|
"grad_norm": 3.211907914634212, |
|
"learning_rate": 7.834599768983553e-07, |
|
"loss": 0.0612, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 1.6405693950177938, |
|
"grad_norm": 2.7405031414285856, |
|
"learning_rate": 7.759658342974951e-07, |
|
"loss": 0.0581, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 1.6423487544483986, |
|
"grad_norm": 3.3248336732300574, |
|
"learning_rate": 7.685046902014747e-07, |
|
"loss": 0.0763, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 1.6441281138790036, |
|
"grad_norm": 3.4759266741212484, |
|
"learning_rate": 7.61076602897371e-07, |
|
"loss": 0.0682, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 1.6459074733096086, |
|
"grad_norm": 2.3917598916047282, |
|
"learning_rate": 7.536816304140177e-07, |
|
"loss": 0.0462, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.6476868327402134, |
|
"grad_norm": 2.826543682762982, |
|
"learning_rate": 7.46319830521553e-07, |
|
"loss": 0.0383, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 1.6494661921708185, |
|
"grad_norm": 2.6827281387145634, |
|
"learning_rate": 7.389912607309662e-07, |
|
"loss": 0.0719, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 1.6512455516014235, |
|
"grad_norm": 3.8119150686022465, |
|
"learning_rate": 7.316959782936516e-07, |
|
"loss": 0.0997, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 1.6530249110320283, |
|
"grad_norm": 2.4845047390085626, |
|
"learning_rate": 7.244340402009608e-07, |
|
"loss": 0.0533, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 1.6548042704626336, |
|
"grad_norm": 3.6828165702189004, |
|
"learning_rate": 7.172055031837572e-07, |
|
"loss": 0.0834, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.6565836298932384, |
|
"grad_norm": 2.695514001349758, |
|
"learning_rate": 7.100104237119676e-07, |
|
"loss": 0.0629, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 1.6583629893238434, |
|
"grad_norm": 2.642177017296404, |
|
"learning_rate": 7.028488579941506e-07, |
|
"loss": 0.0784, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 1.6601423487544484, |
|
"grad_norm": 3.5485918832907255, |
|
"learning_rate": 6.957208619770505e-07, |
|
"loss": 0.0776, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 1.6619217081850532, |
|
"grad_norm": 3.352597332407956, |
|
"learning_rate": 6.886264913451635e-07, |
|
"loss": 0.0712, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 1.6637010676156585, |
|
"grad_norm": 3.1835587617367844, |
|
"learning_rate": 6.815658015203014e-07, |
|
"loss": 0.07, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 1.6654804270462633, |
|
"grad_norm": 3.4061169153729334, |
|
"learning_rate": 6.745388476611553e-07, |
|
"loss": 0.0717, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 1.6672597864768683, |
|
"grad_norm": 3.0158946637862547, |
|
"learning_rate": 6.67545684662873e-07, |
|
"loss": 0.0687, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 1.6690391459074734, |
|
"grad_norm": 2.270718794234393, |
|
"learning_rate": 6.605863671566221e-07, |
|
"loss": 0.0439, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 1.6708185053380782, |
|
"grad_norm": 2.8170583032975864, |
|
"learning_rate": 6.536609495091695e-07, |
|
"loss": 0.0522, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 1.6725978647686834, |
|
"grad_norm": 3.5578347301706943, |
|
"learning_rate": 6.467694858224488e-07, |
|
"loss": 0.0748, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.6743772241992882, |
|
"grad_norm": 2.813101820845763, |
|
"learning_rate": 6.399120299331468e-07, |
|
"loss": 0.0431, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 1.6761565836298933, |
|
"grad_norm": 3.3166338115470992, |
|
"learning_rate": 6.330886354122768e-07, |
|
"loss": 0.0657, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 1.6779359430604983, |
|
"grad_norm": 3.2692729127609303, |
|
"learning_rate": 6.262993555647617e-07, |
|
"loss": 0.0787, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 1.6797153024911031, |
|
"grad_norm": 2.568984705616588, |
|
"learning_rate": 6.1954424342902e-07, |
|
"loss": 0.0609, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 1.6814946619217082, |
|
"grad_norm": 4.000912924414758, |
|
"learning_rate": 6.128233517765448e-07, |
|
"loss": 0.0839, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 1.6832740213523132, |
|
"grad_norm": 3.219229031964599, |
|
"learning_rate": 6.061367331114992e-07, |
|
"loss": 0.0598, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 1.685053380782918, |
|
"grad_norm": 3.2456969164301324, |
|
"learning_rate": 5.994844396703025e-07, |
|
"loss": 0.0973, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 1.6868327402135233, |
|
"grad_norm": 3.0325525674699088, |
|
"learning_rate": 5.928665234212233e-07, |
|
"loss": 0.0589, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 1.688612099644128, |
|
"grad_norm": 3.0207910699199982, |
|
"learning_rate": 5.862830360639698e-07, |
|
"loss": 0.0678, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 1.690391459074733, |
|
"grad_norm": 3.3631042489332867, |
|
"learning_rate": 5.797340290292907e-07, |
|
"loss": 0.0655, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.6921708185053381, |
|
"grad_norm": 3.1706047313639267, |
|
"learning_rate": 5.732195534785723e-07, |
|
"loss": 0.0693, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 1.693950177935943, |
|
"grad_norm": 2.6408438799075067, |
|
"learning_rate": 5.667396603034369e-07, |
|
"loss": 0.0452, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 1.6957295373665482, |
|
"grad_norm": 3.7625890481468978, |
|
"learning_rate": 5.602944001253486e-07, |
|
"loss": 0.0789, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 1.697508896797153, |
|
"grad_norm": 3.4069534747816927, |
|
"learning_rate": 5.538838232952104e-07, |
|
"loss": 0.0748, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 1.699288256227758, |
|
"grad_norm": 3.603254667107962, |
|
"learning_rate": 5.475079798929816e-07, |
|
"loss": 0.0977, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 1.701067615658363, |
|
"grad_norm": 2.631883970223696, |
|
"learning_rate": 5.411669197272795e-07, |
|
"loss": 0.0502, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 1.7028469750889679, |
|
"grad_norm": 2.8057993643577914, |
|
"learning_rate": 5.348606923349903e-07, |
|
"loss": 0.0583, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 1.704626334519573, |
|
"grad_norm": 3.3025993883725504, |
|
"learning_rate": 5.285893469808855e-07, |
|
"loss": 0.0509, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 1.706405693950178, |
|
"grad_norm": 3.360289988672313, |
|
"learning_rate": 5.223529326572352e-07, |
|
"loss": 0.0637, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 1.708185053380783, |
|
"grad_norm": 3.2322215031716834, |
|
"learning_rate": 5.161514980834232e-07, |
|
"loss": 0.0727, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.709964412811388, |
|
"grad_norm": 3.4733492396816117, |
|
"learning_rate": 5.099850917055709e-07, |
|
"loss": 0.06, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 1.7117437722419928, |
|
"grad_norm": 3.1182770439070118, |
|
"learning_rate": 5.038537616961559e-07, |
|
"loss": 0.0586, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 1.7135231316725978, |
|
"grad_norm": 3.1897745805113815, |
|
"learning_rate": 4.977575559536358e-07, |
|
"loss": 0.0546, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 1.7153024911032029, |
|
"grad_norm": 3.0642521218888, |
|
"learning_rate": 4.916965221020753e-07, |
|
"loss": 0.0565, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 1.7170818505338077, |
|
"grad_norm": 2.5861062459312896, |
|
"learning_rate": 4.856707074907729e-07, |
|
"loss": 0.0513, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 1.718861209964413, |
|
"grad_norm": 3.140989728814077, |
|
"learning_rate": 4.796801591938922e-07, |
|
"loss": 0.0698, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 1.7206405693950177, |
|
"grad_norm": 3.763917631516923, |
|
"learning_rate": 4.737249240100911e-07, |
|
"loss": 0.0825, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 1.7224199288256228, |
|
"grad_norm": 2.8045289345515014, |
|
"learning_rate": 4.6780504846216155e-07, |
|
"loss": 0.0459, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 1.7241992882562278, |
|
"grad_norm": 2.7841786012454994, |
|
"learning_rate": 4.619205787966613e-07, |
|
"loss": 0.0476, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 1.7259786476868326, |
|
"grad_norm": 2.614880840574876, |
|
"learning_rate": 4.560715609835548e-07, |
|
"loss": 0.0459, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.7277580071174379, |
|
"grad_norm": 2.5690504891697152, |
|
"learning_rate": 4.5025804071585464e-07, |
|
"loss": 0.0492, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 1.7295373665480427, |
|
"grad_norm": 3.125920038383312, |
|
"learning_rate": 4.4448006340926163e-07, |
|
"loss": 0.0705, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 1.7313167259786477, |
|
"grad_norm": 3.0682648555725076, |
|
"learning_rate": 4.3873767420181344e-07, |
|
"loss": 0.0635, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 1.7330960854092528, |
|
"grad_norm": 3.2436006071493253, |
|
"learning_rate": 4.3303091795353024e-07, |
|
"loss": 0.0679, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 1.7348754448398576, |
|
"grad_norm": 2.810064745752848, |
|
"learning_rate": 4.2735983924606596e-07, |
|
"loss": 0.0472, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 1.7366548042704626, |
|
"grad_norm": 3.3177063977923975, |
|
"learning_rate": 4.2172448238235464e-07, |
|
"loss": 0.0489, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 1.7384341637010676, |
|
"grad_norm": 2.7498511633925555, |
|
"learning_rate": 4.161248913862731e-07, |
|
"loss": 0.0474, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 1.7402135231316724, |
|
"grad_norm": 3.0480340494466986, |
|
"learning_rate": 4.1056111000228937e-07, |
|
"loss": 0.0586, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 1.7419928825622777, |
|
"grad_norm": 3.7858395003178504, |
|
"learning_rate": 4.0503318169512417e-07, |
|
"loss": 0.0719, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 1.7437722419928825, |
|
"grad_norm": 2.6331632035919412, |
|
"learning_rate": 3.9954114964941336e-07, |
|
"loss": 0.0534, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.7455516014234875, |
|
"grad_norm": 3.04707570727385, |
|
"learning_rate": 3.9408505676936327e-07, |
|
"loss": 0.0611, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 1.7473309608540926, |
|
"grad_norm": 3.2244955830980384, |
|
"learning_rate": 3.886649456784253e-07, |
|
"loss": 0.0698, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 1.7491103202846974, |
|
"grad_norm": 2.8908662320588623, |
|
"learning_rate": 3.8328085871895624e-07, |
|
"loss": 0.0801, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 1.7508896797153026, |
|
"grad_norm": 2.8376806377365535, |
|
"learning_rate": 3.779328379518898e-07, |
|
"loss": 0.0566, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 1.7526690391459074, |
|
"grad_norm": 2.9348029648419343, |
|
"learning_rate": 3.7262092515640556e-07, |
|
"loss": 0.0587, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 1.7544483985765125, |
|
"grad_norm": 2.4484408651790748, |
|
"learning_rate": 3.673451618296081e-07, |
|
"loss": 0.0453, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 1.7562277580071175, |
|
"grad_norm": 3.0419363978406087, |
|
"learning_rate": 3.621055891861963e-07, |
|
"loss": 0.0653, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 1.7580071174377223, |
|
"grad_norm": 3.17293244422561, |
|
"learning_rate": 3.56902248158148e-07, |
|
"loss": 0.0629, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 1.7597864768683276, |
|
"grad_norm": 3.3896784088726473, |
|
"learning_rate": 3.517351793943913e-07, |
|
"loss": 0.0647, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 1.7615658362989324, |
|
"grad_norm": 3.709313467739001, |
|
"learning_rate": 3.4660442326049704e-07, |
|
"loss": 0.0584, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.7633451957295374, |
|
"grad_norm": 2.7466821091247877, |
|
"learning_rate": 3.4151001983835696e-07, |
|
"loss": 0.0659, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 1.7651245551601424, |
|
"grad_norm": 4.238166656136811, |
|
"learning_rate": 3.364520089258727e-07, |
|
"loss": 0.0747, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 1.7669039145907472, |
|
"grad_norm": 2.9489633244470617, |
|
"learning_rate": 3.314304300366461e-07, |
|
"loss": 0.0637, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 1.7686832740213523, |
|
"grad_norm": 3.303112152853039, |
|
"learning_rate": 3.2644532239966444e-07, |
|
"loss": 0.0714, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 1.7704626334519573, |
|
"grad_norm": 2.3286227096107233, |
|
"learning_rate": 3.2149672495900286e-07, |
|
"loss": 0.0553, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 1.7722419928825621, |
|
"grad_norm": 3.002420523623007, |
|
"learning_rate": 3.165846763735153e-07, |
|
"loss": 0.0691, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 1.7740213523131674, |
|
"grad_norm": 2.507962828234642, |
|
"learning_rate": 3.117092150165324e-07, |
|
"loss": 0.0547, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 1.7758007117437722, |
|
"grad_norm": 3.063988340174308, |
|
"learning_rate": 3.068703789755606e-07, |
|
"loss": 0.0587, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 1.7775800711743772, |
|
"grad_norm": 3.245348666184576, |
|
"learning_rate": 3.020682060519886e-07, |
|
"loss": 0.066, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 1.7793594306049823, |
|
"grad_norm": 3.33708314978665, |
|
"learning_rate": 2.9730273376078923e-07, |
|
"loss": 0.0536, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.7793594306049823, |
|
"eval_loss": 0.17078149318695068, |
|
"eval_runtime": 1.575, |
|
"eval_samples_per_second": 29.207, |
|
"eval_steps_per_second": 7.619, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.781138790035587, |
|
"grad_norm": 3.08491965275037, |
|
"learning_rate": 2.9257399933022737e-07, |
|
"loss": 0.0595, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 1.7829181494661923, |
|
"grad_norm": 2.8293925074613666, |
|
"learning_rate": 2.8788203970156805e-07, |
|
"loss": 0.0554, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 1.7846975088967971, |
|
"grad_norm": 2.914056829587285, |
|
"learning_rate": 2.832268915287878e-07, |
|
"loss": 0.0698, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 1.7864768683274022, |
|
"grad_norm": 3.475103321357514, |
|
"learning_rate": 2.7860859117828985e-07, |
|
"loss": 0.0732, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 1.7882562277580072, |
|
"grad_norm": 4.197781991039924, |
|
"learning_rate": 2.740271747286194e-07, |
|
"loss": 0.1216, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 1.790035587188612, |
|
"grad_norm": 3.115238863104138, |
|
"learning_rate": 2.6948267797018145e-07, |
|
"loss": 0.0654, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 1.791814946619217, |
|
"grad_norm": 3.0104259057828924, |
|
"learning_rate": 2.649751364049613e-07, |
|
"loss": 0.0469, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 1.793594306049822, |
|
"grad_norm": 3.031989090330956, |
|
"learning_rate": 2.6050458524624735e-07, |
|
"loss": 0.0594, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 1.795373665480427, |
|
"grad_norm": 2.9034377290065296, |
|
"learning_rate": 2.560710594183552e-07, |
|
"loss": 0.0537, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 1.7971530249110321, |
|
"grad_norm": 2.8338368574878787, |
|
"learning_rate": 2.5167459355635524e-07, |
|
"loss": 0.0664, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.798932384341637, |
|
"grad_norm": 3.2013351867422064, |
|
"learning_rate": 2.473152220058039e-07, |
|
"loss": 0.0574, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 1.800711743772242, |
|
"grad_norm": 3.713380705266405, |
|
"learning_rate": 2.429929788224722e-07, |
|
"loss": 0.0752, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 1.802491103202847, |
|
"grad_norm": 3.019698601220017, |
|
"learning_rate": 2.38707897772083e-07, |
|
"loss": 0.0789, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 1.8042704626334518, |
|
"grad_norm": 3.168339547670028, |
|
"learning_rate": 2.3446001233004333e-07, |
|
"loss": 0.0856, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 1.806049822064057, |
|
"grad_norm": 3.091122366073355, |
|
"learning_rate": 2.3024935568118745e-07, |
|
"loss": 0.0599, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 1.8078291814946619, |
|
"grad_norm": 2.6670570026088476, |
|
"learning_rate": 2.2607596071951288e-07, |
|
"loss": 0.0455, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 1.809608540925267, |
|
"grad_norm": 2.5794471673496857, |
|
"learning_rate": 2.2193986004792667e-07, |
|
"loss": 0.0532, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 1.811387900355872, |
|
"grad_norm": 2.3729839359330867, |
|
"learning_rate": 2.1784108597799058e-07, |
|
"loss": 0.0377, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 1.8131672597864767, |
|
"grad_norm": 2.5847335483196874, |
|
"learning_rate": 2.1377967052966685e-07, |
|
"loss": 0.0521, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 1.814946619217082, |
|
"grad_norm": 2.5537359564210163, |
|
"learning_rate": 2.0975564543107007e-07, |
|
"loss": 0.0602, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.8167259786476868, |
|
"grad_norm": 2.9478129624731713, |
|
"learning_rate": 2.057690421182168e-07, |
|
"loss": 0.0551, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 1.8185053380782918, |
|
"grad_norm": 2.6428907737400253, |
|
"learning_rate": 2.01819891734783e-07, |
|
"loss": 0.0533, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 1.8202846975088969, |
|
"grad_norm": 2.884618502948543, |
|
"learning_rate": 1.979082251318576e-07, |
|
"loss": 0.0583, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 1.8220640569395017, |
|
"grad_norm": 2.931286121728397, |
|
"learning_rate": 1.9403407286770592e-07, |
|
"loss": 0.0547, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 1.8238434163701067, |
|
"grad_norm": 2.4902702635410745, |
|
"learning_rate": 1.9019746520752502e-07, |
|
"loss": 0.0539, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 1.8256227758007118, |
|
"grad_norm": 2.459533241577976, |
|
"learning_rate": 1.8639843212321206e-07, |
|
"loss": 0.0494, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 1.8274021352313166, |
|
"grad_norm": 2.716735792277242, |
|
"learning_rate": 1.826370032931285e-07, |
|
"loss": 0.0562, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 1.8291814946619218, |
|
"grad_norm": 2.9672051343039105, |
|
"learning_rate": 1.789132081018674e-07, |
|
"loss": 0.0508, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 1.8309608540925266, |
|
"grad_norm": 2.6209610020534826, |
|
"learning_rate": 1.7522707564002706e-07, |
|
"loss": 0.0522, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 1.8327402135231317, |
|
"grad_norm": 2.910649246396408, |
|
"learning_rate": 1.7157863470397718e-07, |
|
"loss": 0.047, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.8345195729537367, |
|
"grad_norm": 2.926573555897664, |
|
"learning_rate": 1.6796791379564138e-07, |
|
"loss": 0.0573, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 1.8362989323843415, |
|
"grad_norm": 3.118525781423007, |
|
"learning_rate": 1.6439494112227173e-07, |
|
"loss": 0.0532, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 1.8380782918149468, |
|
"grad_norm": 3.0359877864990468, |
|
"learning_rate": 1.6085974459622567e-07, |
|
"loss": 0.0558, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 1.8398576512455516, |
|
"grad_norm": 2.6027489910436468, |
|
"learning_rate": 1.573623518347517e-07, |
|
"loss": 0.0454, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 1.8416370106761566, |
|
"grad_norm": 2.61107359714197, |
|
"learning_rate": 1.5390279015977117e-07, |
|
"loss": 0.0636, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 1.8434163701067616, |
|
"grad_norm": 2.607141570260676, |
|
"learning_rate": 1.5048108659766693e-07, |
|
"loss": 0.0532, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 1.8451957295373664, |
|
"grad_norm": 2.5208981660946774, |
|
"learning_rate": 1.470972678790711e-07, |
|
"loss": 0.0663, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 1.8469750889679717, |
|
"grad_norm": 3.1379745491159245, |
|
"learning_rate": 1.437513604386559e-07, |
|
"loss": 0.0862, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 1.8487544483985765, |
|
"grad_norm": 3.281694142818919, |
|
"learning_rate": 1.404433904149266e-07, |
|
"loss": 0.0642, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 1.8505338078291815, |
|
"grad_norm": 3.5754480507674016, |
|
"learning_rate": 1.3717338365001943e-07, |
|
"loss": 0.0602, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.8523131672597866, |
|
"grad_norm": 2.9569173122667287, |
|
"learning_rate": 1.3394136568949834e-07, |
|
"loss": 0.077, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 1.8540925266903914, |
|
"grad_norm": 4.392490713187842, |
|
"learning_rate": 1.307473617821553e-07, |
|
"loss": 0.0759, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 1.8558718861209964, |
|
"grad_norm": 3.515965124350243, |
|
"learning_rate": 1.275913968798137e-07, |
|
"loss": 0.062, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 1.8576512455516014, |
|
"grad_norm": 2.8201211139689413, |
|
"learning_rate": 1.2447349563713186e-07, |
|
"loss": 0.0703, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 1.8594306049822062, |
|
"grad_norm": 3.177131414977041, |
|
"learning_rate": 1.213936824114137e-07, |
|
"loss": 0.0629, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 1.8612099644128115, |
|
"grad_norm": 3.6558088304914236, |
|
"learning_rate": 1.1835198126241509e-07, |
|
"loss": 0.068, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 1.8629893238434163, |
|
"grad_norm": 3.0831640004488254, |
|
"learning_rate": 1.1534841595215617e-07, |
|
"loss": 0.0632, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 1.8647686832740213, |
|
"grad_norm": 2.6480339178367993, |
|
"learning_rate": 1.1238300994473983e-07, |
|
"loss": 0.0444, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 1.8665480427046264, |
|
"grad_norm": 3.1774625025731478, |
|
"learning_rate": 1.0945578640616183e-07, |
|
"loss": 0.0923, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 1.8683274021352312, |
|
"grad_norm": 2.623374248899996, |
|
"learning_rate": 1.0656676820413603e-07, |
|
"loss": 0.0401, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.8701067615658364, |
|
"grad_norm": 2.7492547369658333, |
|
"learning_rate": 1.0371597790791166e-07, |
|
"loss": 0.0595, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 1.8718861209964412, |
|
"grad_norm": 3.0660405308361005, |
|
"learning_rate": 1.0090343778809908e-07, |
|
"loss": 0.0605, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 1.8736654804270463, |
|
"grad_norm": 3.6668355268679447, |
|
"learning_rate": 9.812916981649433e-08, |
|
"loss": 0.0685, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 1.8754448398576513, |
|
"grad_norm": 2.813183370597854, |
|
"learning_rate": 9.539319566590766e-08, |
|
"loss": 0.0675, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 1.8772241992882561, |
|
"grad_norm": 2.934170632029326, |
|
"learning_rate": 9.269553670999743e-08, |
|
"loss": 0.0728, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 1.8790035587188612, |
|
"grad_norm": 4.102875550503117, |
|
"learning_rate": 9.003621402309815e-08, |
|
"loss": 0.0594, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 1.8807829181494662, |
|
"grad_norm": 2.9963430646447757, |
|
"learning_rate": 8.741524838005888e-08, |
|
"loss": 0.0554, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 1.8825622775800712, |
|
"grad_norm": 3.100994506467712, |
|
"learning_rate": 8.483266025608061e-08, |
|
"loss": 0.0469, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 1.8843416370106763, |
|
"grad_norm": 2.5844271969213013, |
|
"learning_rate": 8.228846982655525e-08, |
|
"loss": 0.0528, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 1.886120996441281, |
|
"grad_norm": 2.6997991607840475, |
|
"learning_rate": 7.978269696691021e-08, |
|
"loss": 0.0648, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.887900355871886, |
|
"grad_norm": 3.1486305021602843, |
|
"learning_rate": 7.731536125244965e-08, |
|
"loss": 0.065, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 1.8896797153024911, |
|
"grad_norm": 3.526302108616637, |
|
"learning_rate": 7.488648195820513e-08, |
|
"loss": 0.0959, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 1.891459074733096, |
|
"grad_norm": 4.627372104411266, |
|
"learning_rate": 7.249607805878245e-08, |
|
"loss": 0.0558, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 1.8932384341637012, |
|
"grad_norm": 3.127927481867301, |
|
"learning_rate": 7.014416822821557e-08, |
|
"loss": 0.0689, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 1.895017793594306, |
|
"grad_norm": 2.5912338910789106, |
|
"learning_rate": 6.783077083981793e-08, |
|
"loss": 0.0428, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 1.896797153024911, |
|
"grad_norm": 2.9826326256803735, |
|
"learning_rate": 6.55559039660425e-08, |
|
"loss": 0.0635, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 1.898576512455516, |
|
"grad_norm": 2.968945184646842, |
|
"learning_rate": 6.331958537833693e-08, |
|
"loss": 0.0605, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 1.9003558718861209, |
|
"grad_norm": 3.009385936924448, |
|
"learning_rate": 6.112183254700866e-08, |
|
"loss": 0.0615, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 1.9021352313167261, |
|
"grad_norm": 2.8973922915372037, |
|
"learning_rate": 5.8962662641083856e-08, |
|
"loss": 0.0602, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 1.903914590747331, |
|
"grad_norm": 3.1414984443721954, |
|
"learning_rate": 5.6842092528176516e-08, |
|
"loss": 0.0588, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.905693950177936, |
|
"grad_norm": 2.8465228703779735, |
|
"learning_rate": 5.476013877435626e-08, |
|
"loss": 0.0606, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 1.907473309608541, |
|
"grad_norm": 3.0771605024773256, |
|
"learning_rate": 5.271681764401848e-08, |
|
"loss": 0.0683, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 1.9092526690391458, |
|
"grad_norm": 2.535541833859978, |
|
"learning_rate": 5.071214509975775e-08, |
|
"loss": 0.0497, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 1.9110320284697508, |
|
"grad_norm": 3.33771958996778, |
|
"learning_rate": 4.8746136802240716e-08, |
|
"loss": 0.0602, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 1.9128113879003559, |
|
"grad_norm": 3.2837947579931583, |
|
"learning_rate": 4.6818808110087875e-08, |
|
"loss": 0.0676, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 1.914590747330961, |
|
"grad_norm": 3.370063371385513, |
|
"learning_rate": 4.493017407975087e-08, |
|
"loss": 0.0813, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 1.916370106761566, |
|
"grad_norm": 3.2507656960575173, |
|
"learning_rate": 4.308024946539424e-08, |
|
"loss": 0.0514, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 1.9181494661921707, |
|
"grad_norm": 2.75421176555794, |
|
"learning_rate": 4.1269048718783344e-08, |
|
"loss": 0.0477, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 1.9199288256227758, |
|
"grad_norm": 2.6901482212836116, |
|
"learning_rate": 3.9496585989167726e-08, |
|
"loss": 0.0562, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 1.9217081850533808, |
|
"grad_norm": 2.6249483224428323, |
|
"learning_rate": 3.776287512317345e-08, |
|
"loss": 0.0507, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.9234875444839856, |
|
"grad_norm": 3.097923721612598, |
|
"learning_rate": 3.606792966469375e-08, |
|
"loss": 0.0611, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 1.9252669039145909, |
|
"grad_norm": 2.5868886402489615, |
|
"learning_rate": 3.4411762854782426e-08, |
|
"loss": 0.0587, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 1.9270462633451957, |
|
"grad_norm": 3.027367813541252, |
|
"learning_rate": 3.279438763155174e-08, |
|
"loss": 0.0474, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 1.9288256227758007, |
|
"grad_norm": 2.8919179873639536, |
|
"learning_rate": 3.121581663007134e-08, |
|
"loss": 0.0621, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 1.9306049822064058, |
|
"grad_norm": 3.298336791782147, |
|
"learning_rate": 2.967606218226837e-08, |
|
"loss": 0.0689, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 1.9323843416370106, |
|
"grad_norm": 2.5172472671681865, |
|
"learning_rate": 2.8175136316832e-08, |
|
"loss": 0.0488, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 1.9341637010676158, |
|
"grad_norm": 3.6133839854516507, |
|
"learning_rate": 2.6713050759120117e-08, |
|
"loss": 0.0952, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 1.9359430604982206, |
|
"grad_norm": 3.0499271112442905, |
|
"learning_rate": 2.528981693106558e-08, |
|
"loss": 0.061, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 1.9377224199288257, |
|
"grad_norm": 2.827408924803449, |
|
"learning_rate": 2.3905445951089013e-08, |
|
"loss": 0.0658, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 1.9395017793594307, |
|
"grad_norm": 3.438694667315307, |
|
"learning_rate": 2.2559948634011673e-08, |
|
"loss": 0.0595, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.9412811387900355, |
|
"grad_norm": 3.2506032355889, |
|
"learning_rate": 2.125333549096942e-08, |
|
"loss": 0.0704, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 1.9430604982206405, |
|
"grad_norm": 2.7455373308517053, |
|
"learning_rate": 1.9985616729332747e-08, |
|
"loss": 0.0618, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 1.9448398576512456, |
|
"grad_norm": 3.253799894556552, |
|
"learning_rate": 1.8756802252625773e-08, |
|
"loss": 0.0734, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 1.9466192170818504, |
|
"grad_norm": 2.883224562268727, |
|
"learning_rate": 1.75669016604485e-08, |
|
"loss": 0.0488, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 1.9483985765124556, |
|
"grad_norm": 2.7938585816623416, |
|
"learning_rate": 1.6415924248403547e-08, |
|
"loss": 0.0399, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 1.9501779359430604, |
|
"grad_norm": 3.4030463680948015, |
|
"learning_rate": 1.5303879008021773e-08, |
|
"loss": 0.0765, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 1.9519572953736655, |
|
"grad_norm": 2.6990758136062856, |
|
"learning_rate": 1.4230774626691756e-08, |
|
"loss": 0.0581, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 1.9537366548042705, |
|
"grad_norm": 3.389386239077509, |
|
"learning_rate": 1.3196619487594875e-08, |
|
"loss": 0.0748, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 1.9555160142348753, |
|
"grad_norm": 3.3220492475171115, |
|
"learning_rate": 1.2201421669636448e-08, |
|
"loss": 0.0734, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 1.9572953736654806, |
|
"grad_norm": 2.8561578597097523, |
|
"learning_rate": 1.1245188947384133e-08, |
|
"loss": 0.06, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.9590747330960854, |
|
"grad_norm": 2.9553076142678485, |
|
"learning_rate": 1.0327928791006858e-08, |
|
"loss": 0.0612, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 1.9608540925266904, |
|
"grad_norm": 2.8681407086226463, |
|
"learning_rate": 9.449648366217645e-09, |
|
"loss": 0.0608, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 1.9626334519572954, |
|
"grad_norm": 2.9847265874024136, |
|
"learning_rate": 8.61035453421588e-09, |
|
"loss": 0.061, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 1.9644128113879002, |
|
"grad_norm": 2.5635406052862724, |
|
"learning_rate": 7.81005385163458e-09, |
|
"loss": 0.0591, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 1.9661921708185055, |
|
"grad_norm": 2.652799834223619, |
|
"learning_rate": 7.048752570488205e-09, |
|
"loss": 0.0666, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 1.9679715302491103, |
|
"grad_norm": 3.3553681386458605, |
|
"learning_rate": 6.326456638125478e-09, |
|
"loss": 0.0862, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 1.9697508896797153, |
|
"grad_norm": 2.7939588520463916, |
|
"learning_rate": 5.643171697183314e-09, |
|
"loss": 0.0465, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 1.9715302491103204, |
|
"grad_norm": 3.0234200478731386, |
|
"learning_rate": 4.998903085539075e-09, |
|
"loss": 0.0599, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 1.9733096085409252, |
|
"grad_norm": 2.6114006674030006, |
|
"learning_rate": 4.393655836272825e-09, |
|
"loss": 0.0436, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 1.9750889679715302, |
|
"grad_norm": 3.2642238481474455, |
|
"learning_rate": 3.8274346776262514e-09, |
|
"loss": 0.0542, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.9768683274021353, |
|
"grad_norm": 3.1306419328777384, |
|
"learning_rate": 3.300244032966582e-09, |
|
"loss": 0.0644, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 1.97864768683274, |
|
"grad_norm": 3.4183555022287178, |
|
"learning_rate": 2.8120880207493928e-09, |
|
"loss": 0.0567, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 1.9804270462633453, |
|
"grad_norm": 2.9331625983616183, |
|
"learning_rate": 2.362970454491409e-09, |
|
"loss": 0.0576, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 1.9822064056939501, |
|
"grad_norm": 3.78563772760766, |
|
"learning_rate": 1.952894842735531e-09, |
|
"loss": 0.0681, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 1.9839857651245552, |
|
"grad_norm": 1.8332504794322084, |
|
"learning_rate": 1.5818643890258555e-09, |
|
"loss": 0.0373, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 1.9857651245551602, |
|
"grad_norm": 4.090337165282337, |
|
"learning_rate": 1.2498819918843609e-09, |
|
"loss": 0.0611, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 1.987544483985765, |
|
"grad_norm": 3.239959934017561, |
|
"learning_rate": 9.569502447837053e-10, |
|
"loss": 0.0558, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 1.9893238434163703, |
|
"grad_norm": 3.3440707032174175, |
|
"learning_rate": 7.03071436131686e-10, |
|
"loss": 0.0579, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 1.991103202846975, |
|
"grad_norm": 2.6836670773918088, |
|
"learning_rate": 4.882475492506977e-10, |
|
"loss": 0.062, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 1.99288256227758, |
|
"grad_norm": 2.9975646097604582, |
|
"learning_rate": 3.124802623627465e-10, |
|
"loss": 0.07, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.9946619217081851, |
|
"grad_norm": 3.2646641933007694, |
|
"learning_rate": 1.7577094857557097e-10, |
|
"loss": 0.0851, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 1.99644128113879, |
|
"grad_norm": 3.0011383034230166, |
|
"learning_rate": 7.812067587487093e-11, |
|
"loss": 0.0738, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 1.998220640569395, |
|
"grad_norm": 2.5112479996607475, |
|
"learning_rate": 1.9530207111539967e-11, |
|
"loss": 0.0514, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 2.2623503612720475, |
|
"learning_rate": 0.0, |
|
"loss": 0.0533, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"step": 1124, |
|
"total_flos": 2468145598464.0, |
|
"train_loss": 0.11588864623373407, |
|
"train_runtime": 685.3878, |
|
"train_samples_per_second": 13.111, |
|
"train_steps_per_second": 1.64 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1124, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 2000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2468145598464.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|