|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.5781153767193594, |
|
"eval_steps": 88, |
|
"global_step": 704, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.000821186614658181, |
|
"grad_norm": 0.480191707611084, |
|
"learning_rate": 2e-05, |
|
"loss": 1.332, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.001642373229316362, |
|
"grad_norm": 0.43048733472824097, |
|
"learning_rate": 4e-05, |
|
"loss": 1.2784, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.002463559843974543, |
|
"grad_norm": 0.33739173412323, |
|
"learning_rate": 6e-05, |
|
"loss": 1.3286, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.003284746458632724, |
|
"grad_norm": 0.432579904794693, |
|
"learning_rate": 8e-05, |
|
"loss": 1.3079, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0041059330732909054, |
|
"grad_norm": 0.3490436375141144, |
|
"learning_rate": 0.0001, |
|
"loss": 1.2182, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.004927119687949086, |
|
"grad_norm": 0.20206260681152344, |
|
"learning_rate": 9.997257268239166e-05, |
|
"loss": 1.2828, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.005748306302607267, |
|
"grad_norm": 0.15355628728866577, |
|
"learning_rate": 9.994514536478333e-05, |
|
"loss": 1.1616, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.006569492917265448, |
|
"grad_norm": 0.16756780445575714, |
|
"learning_rate": 9.9917718047175e-05, |
|
"loss": 1.0935, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.00739067953192363, |
|
"grad_norm": 0.17429664731025696, |
|
"learning_rate": 9.989029072956665e-05, |
|
"loss": 0.9694, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.008211866146581811, |
|
"grad_norm": 0.22355175018310547, |
|
"learning_rate": 9.986286341195832e-05, |
|
"loss": 0.9407, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.009033052761239991, |
|
"grad_norm": 0.33407703042030334, |
|
"learning_rate": 9.983543609434997e-05, |
|
"loss": 0.7717, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.009854239375898173, |
|
"grad_norm": 0.47473278641700745, |
|
"learning_rate": 9.980800877674164e-05, |
|
"loss": 0.7763, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.010675425990556354, |
|
"grad_norm": 0.2812059819698334, |
|
"learning_rate": 9.978058145913331e-05, |
|
"loss": 0.6258, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.011496612605214535, |
|
"grad_norm": 0.23547925055027008, |
|
"learning_rate": 9.975315414152496e-05, |
|
"loss": 0.5968, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.012317799219872716, |
|
"grad_norm": 0.18453630805015564, |
|
"learning_rate": 9.972572682391662e-05, |
|
"loss": 0.5368, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.013138985834530896, |
|
"grad_norm": 0.16103577613830566, |
|
"learning_rate": 9.969829950630828e-05, |
|
"loss": 0.4909, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.013960172449189078, |
|
"grad_norm": 0.18651455640792847, |
|
"learning_rate": 9.967087218869995e-05, |
|
"loss": 0.5135, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.01478135906384726, |
|
"grad_norm": 0.11300642043352127, |
|
"learning_rate": 9.96434448710916e-05, |
|
"loss": 0.5069, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.01560254567850544, |
|
"grad_norm": 0.10415703803300858, |
|
"learning_rate": 9.961601755348327e-05, |
|
"loss": 0.4851, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.016423732293163622, |
|
"grad_norm": 0.11693017929792404, |
|
"learning_rate": 9.958859023587493e-05, |
|
"loss": 0.4625, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.017244918907821802, |
|
"grad_norm": 0.10035043954849243, |
|
"learning_rate": 9.95611629182666e-05, |
|
"loss": 0.4822, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.018066105522479982, |
|
"grad_norm": 0.10483390837907791, |
|
"learning_rate": 9.953373560065826e-05, |
|
"loss": 0.4342, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.018887292137138165, |
|
"grad_norm": 2.8405802249908447, |
|
"learning_rate": 9.950630828304992e-05, |
|
"loss": 0.4664, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.019708478751796345, |
|
"grad_norm": 0.13821998238563538, |
|
"learning_rate": 9.947888096544159e-05, |
|
"loss": 0.4468, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.020529665366454525, |
|
"grad_norm": 0.1991378366947174, |
|
"learning_rate": 9.945145364783325e-05, |
|
"loss": 0.4605, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.02135085198111271, |
|
"grad_norm": 0.07619134336709976, |
|
"learning_rate": 9.942402633022491e-05, |
|
"loss": 0.4597, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.02217203859577089, |
|
"grad_norm": 0.13373583555221558, |
|
"learning_rate": 9.939659901261658e-05, |
|
"loss": 0.4626, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.02299322521042907, |
|
"grad_norm": 0.09962721168994904, |
|
"learning_rate": 9.936917169500823e-05, |
|
"loss": 0.4638, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.023814411825087253, |
|
"grad_norm": 0.09395964443683624, |
|
"learning_rate": 9.93417443773999e-05, |
|
"loss": 0.4569, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.024635598439745433, |
|
"grad_norm": 0.09109952300786972, |
|
"learning_rate": 9.931431705979157e-05, |
|
"loss": 0.4439, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.025456785054403613, |
|
"grad_norm": 0.10370515286922455, |
|
"learning_rate": 9.928688974218322e-05, |
|
"loss": 0.4425, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.026277971669061793, |
|
"grad_norm": 0.2153477966785431, |
|
"learning_rate": 9.925946242457488e-05, |
|
"loss": 0.4503, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.027099158283719976, |
|
"grad_norm": 0.08772841095924377, |
|
"learning_rate": 9.923203510696654e-05, |
|
"loss": 0.419, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.027920344898378156, |
|
"grad_norm": 0.10951374471187592, |
|
"learning_rate": 9.920460778935821e-05, |
|
"loss": 0.4353, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.028741531513036336, |
|
"grad_norm": 0.09190870076417923, |
|
"learning_rate": 9.917718047174987e-05, |
|
"loss": 0.5196, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.02956271812769452, |
|
"grad_norm": 0.07667124271392822, |
|
"learning_rate": 9.914975315414153e-05, |
|
"loss": 0.4358, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.0303839047423527, |
|
"grad_norm": 0.1514267474412918, |
|
"learning_rate": 9.912232583653319e-05, |
|
"loss": 0.411, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.03120509135701088, |
|
"grad_norm": 0.09086549282073975, |
|
"learning_rate": 9.909489851892486e-05, |
|
"loss": 0.4003, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.032026277971669063, |
|
"grad_norm": 0.2616782486438751, |
|
"learning_rate": 9.906747120131652e-05, |
|
"loss": 0.4842, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.032847464586327244, |
|
"grad_norm": 0.11908283084630966, |
|
"learning_rate": 9.904004388370818e-05, |
|
"loss": 0.4143, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.033668651200985424, |
|
"grad_norm": 0.07770542800426483, |
|
"learning_rate": 9.901261656609983e-05, |
|
"loss": 0.3873, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.034489837815643604, |
|
"grad_norm": 0.08934606611728668, |
|
"learning_rate": 9.89851892484915e-05, |
|
"loss": 0.4235, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.035311024430301784, |
|
"grad_norm": 0.09303563088178635, |
|
"learning_rate": 9.895776193088317e-05, |
|
"loss": 0.4103, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.036132211044959964, |
|
"grad_norm": 0.08622181415557861, |
|
"learning_rate": 9.893033461327482e-05, |
|
"loss": 0.448, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.03695339765961815, |
|
"grad_norm": 0.08822862058877945, |
|
"learning_rate": 9.890290729566649e-05, |
|
"loss": 0.3855, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.03777458427427633, |
|
"grad_norm": 0.08557698875665665, |
|
"learning_rate": 9.887547997805814e-05, |
|
"loss": 0.3945, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.03859577088893451, |
|
"grad_norm": 0.07540106773376465, |
|
"learning_rate": 9.884805266044981e-05, |
|
"loss": 0.4192, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.03941695750359269, |
|
"grad_norm": 0.1023702397942543, |
|
"learning_rate": 9.882062534284148e-05, |
|
"loss": 0.4126, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.04023814411825087, |
|
"grad_norm": 0.07779772579669952, |
|
"learning_rate": 9.879319802523313e-05, |
|
"loss": 0.4244, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.04105933073290905, |
|
"grad_norm": 0.08826564252376556, |
|
"learning_rate": 9.876577070762479e-05, |
|
"loss": 0.415, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.04188051734756724, |
|
"grad_norm": 0.08254576474428177, |
|
"learning_rate": 9.873834339001646e-05, |
|
"loss": 0.4346, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.04270170396222542, |
|
"grad_norm": 0.08287151902914047, |
|
"learning_rate": 9.871091607240812e-05, |
|
"loss": 0.4142, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.0435228905768836, |
|
"grad_norm": 0.08196476101875305, |
|
"learning_rate": 9.868348875479978e-05, |
|
"loss": 0.3822, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.04434407719154178, |
|
"grad_norm": 0.08654092252254486, |
|
"learning_rate": 9.865606143719145e-05, |
|
"loss": 0.3937, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.04516526380619996, |
|
"grad_norm": 0.1102684736251831, |
|
"learning_rate": 9.86286341195831e-05, |
|
"loss": 0.4508, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.04598645042085814, |
|
"grad_norm": 0.08240954577922821, |
|
"learning_rate": 9.860120680197478e-05, |
|
"loss": 0.4069, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.04680763703551632, |
|
"grad_norm": 0.08542217314243317, |
|
"learning_rate": 9.857377948436644e-05, |
|
"loss": 0.5002, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.047628823650174505, |
|
"grad_norm": 0.08390172570943832, |
|
"learning_rate": 9.854635216675809e-05, |
|
"loss": 0.3851, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.048450010264832685, |
|
"grad_norm": 0.10694168508052826, |
|
"learning_rate": 9.851892484914976e-05, |
|
"loss": 0.4026, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.049271196879490865, |
|
"grad_norm": 0.0852806493639946, |
|
"learning_rate": 9.849149753154143e-05, |
|
"loss": 0.424, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.050092383494149045, |
|
"grad_norm": 0.15425831079483032, |
|
"learning_rate": 9.846407021393308e-05, |
|
"loss": 0.4307, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.050913570108807225, |
|
"grad_norm": 0.08546218276023865, |
|
"learning_rate": 9.843664289632475e-05, |
|
"loss": 0.4386, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.051734756723465405, |
|
"grad_norm": 0.08588795363903046, |
|
"learning_rate": 9.84092155787164e-05, |
|
"loss": 0.4433, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.052555943338123585, |
|
"grad_norm": 0.08901514858007431, |
|
"learning_rate": 9.838178826110807e-05, |
|
"loss": 0.4263, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.05337712995278177, |
|
"grad_norm": 0.17668095231056213, |
|
"learning_rate": 9.835436094349974e-05, |
|
"loss": 0.407, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.05419831656743995, |
|
"grad_norm": 0.081763856112957, |
|
"learning_rate": 9.83269336258914e-05, |
|
"loss": 0.374, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.05501950318209813, |
|
"grad_norm": 0.09063572436571121, |
|
"learning_rate": 9.829950630828305e-05, |
|
"loss": 0.3839, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.05584068979675631, |
|
"grad_norm": 0.08264392614364624, |
|
"learning_rate": 9.827207899067472e-05, |
|
"loss": 0.4676, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.05666187641141449, |
|
"grad_norm": 0.0827123150229454, |
|
"learning_rate": 9.824465167306638e-05, |
|
"loss": 0.3801, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.05748306302607267, |
|
"grad_norm": 0.07972189038991928, |
|
"learning_rate": 9.821722435545804e-05, |
|
"loss": 0.3758, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.05830424964073085, |
|
"grad_norm": 0.08738942444324493, |
|
"learning_rate": 9.81897970378497e-05, |
|
"loss": 0.3634, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.05912543625538904, |
|
"grad_norm": 0.08442792296409607, |
|
"learning_rate": 9.816236972024136e-05, |
|
"loss": 0.3803, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.05994662287004722, |
|
"grad_norm": 0.2751137316226959, |
|
"learning_rate": 9.813494240263303e-05, |
|
"loss": 0.3869, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.0607678094847054, |
|
"grad_norm": 0.19429296255111694, |
|
"learning_rate": 9.81075150850247e-05, |
|
"loss": 0.3992, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.06158899609936358, |
|
"grad_norm": 0.08392605930566788, |
|
"learning_rate": 9.808008776741635e-05, |
|
"loss": 0.361, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.06241018271402176, |
|
"grad_norm": 2.01043963432312, |
|
"learning_rate": 9.8052660449808e-05, |
|
"loss": 0.3787, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.06323136932867994, |
|
"grad_norm": 0.0808538943529129, |
|
"learning_rate": 9.802523313219967e-05, |
|
"loss": 0.4013, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.06405255594333813, |
|
"grad_norm": 0.07846518605947495, |
|
"learning_rate": 9.799780581459134e-05, |
|
"loss": 0.3808, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.0648737425579963, |
|
"grad_norm": 0.08152970671653748, |
|
"learning_rate": 9.7970378496983e-05, |
|
"loss": 0.4076, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.06569492917265449, |
|
"grad_norm": 0.07745791226625443, |
|
"learning_rate": 9.794295117937466e-05, |
|
"loss": 0.3981, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.06651611578731266, |
|
"grad_norm": 0.0842173770070076, |
|
"learning_rate": 9.791552386176632e-05, |
|
"loss": 0.4469, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.06733730240197085, |
|
"grad_norm": 0.07920663058757782, |
|
"learning_rate": 9.788809654415799e-05, |
|
"loss": 0.3988, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.06815848901662903, |
|
"grad_norm": 0.07899456471204758, |
|
"learning_rate": 9.786066922654965e-05, |
|
"loss": 0.4418, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.06897967563128721, |
|
"grad_norm": 0.07229727506637573, |
|
"learning_rate": 9.783324190894131e-05, |
|
"loss": 0.4016, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.0698008622459454, |
|
"grad_norm": 0.08062436431646347, |
|
"learning_rate": 9.780581459133296e-05, |
|
"loss": 0.3762, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.07062204886060357, |
|
"grad_norm": 0.08440960198640823, |
|
"learning_rate": 9.777838727372464e-05, |
|
"loss": 0.4362, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.07144323547526175, |
|
"grad_norm": 0.07544733583927155, |
|
"learning_rate": 9.77509599561163e-05, |
|
"loss": 0.4135, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.07226442208991993, |
|
"grad_norm": 0.1760656237602234, |
|
"learning_rate": 9.772353263850797e-05, |
|
"loss": 0.3887, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.07226442208991993, |
|
"eval_runtime": 494.3052, |
|
"eval_samples_per_second": 0.399, |
|
"eval_steps_per_second": 0.2, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.07308560870457811, |
|
"grad_norm": 0.08049104362726212, |
|
"learning_rate": 9.769610532089962e-05, |
|
"loss": 0.4272, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.0739067953192363, |
|
"grad_norm": 0.07617965340614319, |
|
"learning_rate": 9.766867800329129e-05, |
|
"loss": 0.3873, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.07472798193389447, |
|
"grad_norm": 0.07975462824106216, |
|
"learning_rate": 9.764125068568296e-05, |
|
"loss": 0.3903, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.07554916854855266, |
|
"grad_norm": 0.08299189805984497, |
|
"learning_rate": 9.761382336807461e-05, |
|
"loss": 0.404, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.07637035516321083, |
|
"grad_norm": 0.08578819036483765, |
|
"learning_rate": 9.758639605046626e-05, |
|
"loss": 0.3983, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.07719154177786902, |
|
"grad_norm": 0.10872071981430054, |
|
"learning_rate": 9.755896873285793e-05, |
|
"loss": 0.3553, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.07801272839252721, |
|
"grad_norm": 0.08121436834335327, |
|
"learning_rate": 9.75315414152496e-05, |
|
"loss": 0.408, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.07883391500718538, |
|
"grad_norm": 0.09175996482372284, |
|
"learning_rate": 9.750411409764125e-05, |
|
"loss": 0.4008, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.07965510162184357, |
|
"grad_norm": 0.08122789114713669, |
|
"learning_rate": 9.747668678003292e-05, |
|
"loss": 0.4045, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.08047628823650174, |
|
"grad_norm": 0.08402436226606369, |
|
"learning_rate": 9.744925946242458e-05, |
|
"loss": 0.3814, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.08129747485115993, |
|
"grad_norm": 0.08454861491918564, |
|
"learning_rate": 9.742183214481624e-05, |
|
"loss": 0.3904, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.0821186614658181, |
|
"grad_norm": 0.08125888556241989, |
|
"learning_rate": 9.739440482720791e-05, |
|
"loss": 0.3681, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.08293984808047629, |
|
"grad_norm": 0.08544078469276428, |
|
"learning_rate": 9.736697750959957e-05, |
|
"loss": 0.367, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.08376103469513448, |
|
"grad_norm": 0.07539089769124985, |
|
"learning_rate": 9.733955019199122e-05, |
|
"loss": 0.3821, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.08458222130979265, |
|
"grad_norm": 0.07529085874557495, |
|
"learning_rate": 9.731212287438289e-05, |
|
"loss": 0.4169, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.08540340792445084, |
|
"grad_norm": 0.07588034868240356, |
|
"learning_rate": 9.728469555677456e-05, |
|
"loss": 0.4211, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.08622459453910901, |
|
"grad_norm": 0.08019097149372101, |
|
"learning_rate": 9.725726823916621e-05, |
|
"loss": 0.4033, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.0870457811537672, |
|
"grad_norm": 0.07878712564706802, |
|
"learning_rate": 9.722984092155788e-05, |
|
"loss": 0.3842, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.08786696776842537, |
|
"grad_norm": 0.08083963394165039, |
|
"learning_rate": 9.720241360394953e-05, |
|
"loss": 0.3558, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.08868815438308356, |
|
"grad_norm": 0.08340411633253098, |
|
"learning_rate": 9.71749862863412e-05, |
|
"loss": 0.4037, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.08950934099774174, |
|
"grad_norm": 0.09317754209041595, |
|
"learning_rate": 9.714755896873287e-05, |
|
"loss": 0.3874, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.09033052761239992, |
|
"grad_norm": 0.07143125683069229, |
|
"learning_rate": 9.712013165112452e-05, |
|
"loss": 0.3441, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.0911517142270581, |
|
"grad_norm": 0.0909111350774765, |
|
"learning_rate": 9.709270433351618e-05, |
|
"loss": 0.3552, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.09197290084171628, |
|
"grad_norm": 0.08041603863239288, |
|
"learning_rate": 9.706527701590785e-05, |
|
"loss": 0.394, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.09279408745637446, |
|
"grad_norm": 0.08147992193698883, |
|
"learning_rate": 9.703784969829951e-05, |
|
"loss": 0.3916, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.09361527407103264, |
|
"grad_norm": 0.08365318924188614, |
|
"learning_rate": 9.701042238069117e-05, |
|
"loss": 0.4075, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.09443646068569082, |
|
"grad_norm": 0.083246149122715, |
|
"learning_rate": 9.698299506308284e-05, |
|
"loss": 0.3566, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.09525764730034901, |
|
"grad_norm": 0.0942649245262146, |
|
"learning_rate": 9.695556774547449e-05, |
|
"loss": 0.3516, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.09607883391500718, |
|
"grad_norm": 0.08110091835260391, |
|
"learning_rate": 9.692814042786616e-05, |
|
"loss": 0.3902, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.09690002052966537, |
|
"grad_norm": 0.43124014139175415, |
|
"learning_rate": 9.690071311025783e-05, |
|
"loss": 0.3956, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.09772120714432354, |
|
"grad_norm": 0.08204706013202667, |
|
"learning_rate": 9.687328579264948e-05, |
|
"loss": 0.3865, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.09854239375898173, |
|
"grad_norm": 0.08262762427330017, |
|
"learning_rate": 9.684585847504113e-05, |
|
"loss": 0.3777, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.0993635803736399, |
|
"grad_norm": 0.08233962953090668, |
|
"learning_rate": 9.681843115743282e-05, |
|
"loss": 0.3976, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.10018476698829809, |
|
"grad_norm": 0.7428120970726013, |
|
"learning_rate": 9.679100383982447e-05, |
|
"loss": 0.377, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.10100595360295628, |
|
"grad_norm": 0.07909400761127472, |
|
"learning_rate": 9.676357652221614e-05, |
|
"loss": 0.3654, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.10182714021761445, |
|
"grad_norm": 0.08714035898447037, |
|
"learning_rate": 9.673614920460779e-05, |
|
"loss": 0.4181, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.10264832683227264, |
|
"grad_norm": 0.08017311245203018, |
|
"learning_rate": 9.670872188699946e-05, |
|
"loss": 0.4052, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.10346951344693081, |
|
"grad_norm": 0.13821078836917877, |
|
"learning_rate": 9.668129456939113e-05, |
|
"loss": 0.3733, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.104290700061589, |
|
"grad_norm": 0.1609969437122345, |
|
"learning_rate": 9.665386725178278e-05, |
|
"loss": 0.3686, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.10511188667624717, |
|
"grad_norm": 0.1280309557914734, |
|
"learning_rate": 9.662643993417444e-05, |
|
"loss": 0.4036, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.10593307329090536, |
|
"grad_norm": 0.08747898787260056, |
|
"learning_rate": 9.65990126165661e-05, |
|
"loss": 0.3587, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.10675425990556354, |
|
"grad_norm": 0.1252209097146988, |
|
"learning_rate": 9.657158529895777e-05, |
|
"loss": 0.3626, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.10757544652022172, |
|
"grad_norm": 0.09374388307332993, |
|
"learning_rate": 9.654415798134943e-05, |
|
"loss": 0.3824, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.1083966331348799, |
|
"grad_norm": 0.08730709552764893, |
|
"learning_rate": 9.65167306637411e-05, |
|
"loss": 0.3827, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.10921781974953808, |
|
"grad_norm": 0.07719024270772934, |
|
"learning_rate": 9.648930334613275e-05, |
|
"loss": 0.3762, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.11003900636419627, |
|
"grad_norm": 0.08843278139829636, |
|
"learning_rate": 9.646187602852442e-05, |
|
"loss": 0.3843, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.11086019297885444, |
|
"grad_norm": 0.08568207919597626, |
|
"learning_rate": 9.643444871091608e-05, |
|
"loss": 0.3594, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.11168137959351263, |
|
"grad_norm": 0.08556952327489853, |
|
"learning_rate": 9.640702139330774e-05, |
|
"loss": 0.3886, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.11250256620817081, |
|
"grad_norm": 0.09016801416873932, |
|
"learning_rate": 9.63795940756994e-05, |
|
"loss": 0.4023, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.11332375282282899, |
|
"grad_norm": 0.08133590966463089, |
|
"learning_rate": 9.635216675809106e-05, |
|
"loss": 0.3634, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.11414493943748717, |
|
"grad_norm": 0.2211730182170868, |
|
"learning_rate": 9.632473944048273e-05, |
|
"loss": 0.3518, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.11496612605214535, |
|
"grad_norm": 0.08816584199666977, |
|
"learning_rate": 9.629731212287438e-05, |
|
"loss": 0.3727, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.11578731266680353, |
|
"grad_norm": 0.11618969589471817, |
|
"learning_rate": 9.626988480526605e-05, |
|
"loss": 0.4247, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.1166084992814617, |
|
"grad_norm": 0.09243030101060867, |
|
"learning_rate": 9.62424574876577e-05, |
|
"loss": 0.396, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.11742968589611989, |
|
"grad_norm": 0.08566376566886902, |
|
"learning_rate": 9.621503017004937e-05, |
|
"loss": 0.3852, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.11825087251077808, |
|
"grad_norm": 0.08220973610877991, |
|
"learning_rate": 9.618760285244104e-05, |
|
"loss": 0.3961, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.11907205912543625, |
|
"grad_norm": 0.08240345865488052, |
|
"learning_rate": 9.61601755348327e-05, |
|
"loss": 0.3518, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.11989324574009444, |
|
"grad_norm": 0.08472532778978348, |
|
"learning_rate": 9.613274821722435e-05, |
|
"loss": 0.3586, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.12071443235475261, |
|
"grad_norm": 0.08407485485076904, |
|
"learning_rate": 9.610532089961602e-05, |
|
"loss": 0.3797, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.1215356189694108, |
|
"grad_norm": 0.09284385293722153, |
|
"learning_rate": 9.607789358200769e-05, |
|
"loss": 0.3499, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.12235680558406897, |
|
"grad_norm": 0.08499818295240402, |
|
"learning_rate": 9.605046626439934e-05, |
|
"loss": 0.3722, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.12317799219872716, |
|
"grad_norm": 0.080271415412426, |
|
"learning_rate": 9.602303894679101e-05, |
|
"loss": 0.3871, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.12399917881338535, |
|
"grad_norm": 0.07850060611963272, |
|
"learning_rate": 9.599561162918266e-05, |
|
"loss": 0.3679, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.12482036542804352, |
|
"grad_norm": 0.07685016840696335, |
|
"learning_rate": 9.596818431157433e-05, |
|
"loss": 0.4078, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.1256415520427017, |
|
"grad_norm": 0.09402357786893845, |
|
"learning_rate": 9.5940756993966e-05, |
|
"loss": 0.3996, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.12646273865735988, |
|
"grad_norm": 0.08445476740598679, |
|
"learning_rate": 9.591332967635765e-05, |
|
"loss": 0.3841, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.12728392527201807, |
|
"grad_norm": 0.08233911544084549, |
|
"learning_rate": 9.588590235874932e-05, |
|
"loss": 0.3529, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.12810511188667625, |
|
"grad_norm": 0.07896068692207336, |
|
"learning_rate": 9.585847504114099e-05, |
|
"loss": 0.3585, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.12892629850133444, |
|
"grad_norm": 0.0822276696562767, |
|
"learning_rate": 9.583104772353264e-05, |
|
"loss": 0.3962, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.1297474851159926, |
|
"grad_norm": 0.07977598905563354, |
|
"learning_rate": 9.580362040592431e-05, |
|
"loss": 0.454, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.1305686717306508, |
|
"grad_norm": 0.0857616737484932, |
|
"learning_rate": 9.577619308831597e-05, |
|
"loss": 0.3954, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.13138985834530897, |
|
"grad_norm": 0.0874355211853981, |
|
"learning_rate": 9.574876577070763e-05, |
|
"loss": 0.3591, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.13221104495996716, |
|
"grad_norm": 0.07877468317747116, |
|
"learning_rate": 9.57213384530993e-05, |
|
"loss": 0.348, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.13303223157462532, |
|
"grad_norm": 0.08618593961000443, |
|
"learning_rate": 9.569391113549096e-05, |
|
"loss": 0.3723, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.1338534181892835, |
|
"grad_norm": 0.08144336938858032, |
|
"learning_rate": 9.566648381788261e-05, |
|
"loss": 0.4146, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.1346746048039417, |
|
"grad_norm": 0.07322760671377182, |
|
"learning_rate": 9.563905650027428e-05, |
|
"loss": 0.3107, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.13549579141859988, |
|
"grad_norm": 0.08007095754146576, |
|
"learning_rate": 9.561162918266595e-05, |
|
"loss": 0.4128, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.13631697803325807, |
|
"grad_norm": 0.09636646509170532, |
|
"learning_rate": 9.55842018650576e-05, |
|
"loss": 0.4089, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.13713816464791623, |
|
"grad_norm": 0.08381053060293198, |
|
"learning_rate": 9.555677454744927e-05, |
|
"loss": 0.3624, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.13795935126257441, |
|
"grad_norm": 0.07476504147052765, |
|
"learning_rate": 9.552934722984092e-05, |
|
"loss": 0.3906, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.1387805378772326, |
|
"grad_norm": 0.0901239663362503, |
|
"learning_rate": 9.550191991223259e-05, |
|
"loss": 0.3378, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.1396017244918908, |
|
"grad_norm": 0.0813356265425682, |
|
"learning_rate": 9.547449259462426e-05, |
|
"loss": 0.3627, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.14042291110654898, |
|
"grad_norm": 0.14319093525409698, |
|
"learning_rate": 9.544706527701591e-05, |
|
"loss": 0.3512, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.14124409772120713, |
|
"grad_norm": 0.13329866528511047, |
|
"learning_rate": 9.541963795940757e-05, |
|
"loss": 0.3809, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.14206528433586532, |
|
"grad_norm": 0.0815596953034401, |
|
"learning_rate": 9.539221064179923e-05, |
|
"loss": 0.3444, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.1428864709505235, |
|
"grad_norm": 0.08646956831216812, |
|
"learning_rate": 9.53647833241909e-05, |
|
"loss": 0.3699, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.1437076575651817, |
|
"grad_norm": 0.09374339133501053, |
|
"learning_rate": 9.533735600658256e-05, |
|
"loss": 0.3569, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.14452884417983985, |
|
"grad_norm": 0.0834718644618988, |
|
"learning_rate": 9.530992868897422e-05, |
|
"loss": 0.3708, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.14452884417983985, |
|
"eval_runtime": 493.9539, |
|
"eval_samples_per_second": 0.399, |
|
"eval_steps_per_second": 0.2, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.14535003079449804, |
|
"grad_norm": 0.08648290485143661, |
|
"learning_rate": 9.528250137136588e-05, |
|
"loss": 0.379, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.14617121740915623, |
|
"grad_norm": 0.08577203750610352, |
|
"learning_rate": 9.525507405375755e-05, |
|
"loss": 0.4368, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.14699240402381442, |
|
"grad_norm": 0.1023576483130455, |
|
"learning_rate": 9.522764673614921e-05, |
|
"loss": 0.3553, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.1478135906384726, |
|
"grad_norm": 0.08062634617090225, |
|
"learning_rate": 9.520021941854087e-05, |
|
"loss": 0.3616, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.14863477725313076, |
|
"grad_norm": 0.07487751543521881, |
|
"learning_rate": 9.517279210093252e-05, |
|
"loss": 0.4413, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.14945596386778895, |
|
"grad_norm": 0.07332492619752884, |
|
"learning_rate": 9.514536478332419e-05, |
|
"loss": 0.3166, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.15027715048244714, |
|
"grad_norm": 0.08658608049154282, |
|
"learning_rate": 9.511793746571586e-05, |
|
"loss": 0.332, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.15109833709710532, |
|
"grad_norm": 0.10461894422769547, |
|
"learning_rate": 9.509051014810751e-05, |
|
"loss": 0.3653, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.1519195237117635, |
|
"grad_norm": 0.19619494676589966, |
|
"learning_rate": 9.506308283049918e-05, |
|
"loss": 0.4224, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.15274071032642167, |
|
"grad_norm": 0.08313202857971191, |
|
"learning_rate": 9.503565551289084e-05, |
|
"loss": 0.3512, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.15356189694107986, |
|
"grad_norm": 0.08686342090368271, |
|
"learning_rate": 9.50082281952825e-05, |
|
"loss": 0.3352, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.15438308355573804, |
|
"grad_norm": 0.09020522236824036, |
|
"learning_rate": 9.498080087767417e-05, |
|
"loss": 0.4138, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.15520427017039623, |
|
"grad_norm": 0.0799839124083519, |
|
"learning_rate": 9.495337356006583e-05, |
|
"loss": 0.3436, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.15602545678505442, |
|
"grad_norm": 0.08851379156112671, |
|
"learning_rate": 9.49259462424575e-05, |
|
"loss": 0.3933, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.15684664339971258, |
|
"grad_norm": 0.08521082252264023, |
|
"learning_rate": 9.489851892484916e-05, |
|
"loss": 0.3667, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.15766783001437076, |
|
"grad_norm": 0.09019312262535095, |
|
"learning_rate": 9.487109160724082e-05, |
|
"loss": 0.3556, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.15848901662902895, |
|
"grad_norm": 0.08162654936313629, |
|
"learning_rate": 9.484366428963248e-05, |
|
"loss": 0.3864, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.15931020324368714, |
|
"grad_norm": 0.08963490277528763, |
|
"learning_rate": 9.481623697202414e-05, |
|
"loss": 0.391, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.1601313898583453, |
|
"grad_norm": 0.10057719051837921, |
|
"learning_rate": 9.47888096544158e-05, |
|
"loss": 0.3599, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.16095257647300348, |
|
"grad_norm": 0.1694149523973465, |
|
"learning_rate": 9.476138233680747e-05, |
|
"loss": 0.3612, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.16177376308766167, |
|
"grad_norm": 0.08255323767662048, |
|
"learning_rate": 9.473395501919913e-05, |
|
"loss": 0.3441, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.16259494970231986, |
|
"grad_norm": 0.09228333085775375, |
|
"learning_rate": 9.470652770159078e-05, |
|
"loss": 0.3432, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.16341613631697804, |
|
"grad_norm": 0.11736617982387543, |
|
"learning_rate": 9.467910038398245e-05, |
|
"loss": 0.3408, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.1642373229316362, |
|
"grad_norm": 0.16636626422405243, |
|
"learning_rate": 9.465167306637412e-05, |
|
"loss": 0.3639, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.1650585095462944, |
|
"grad_norm": 0.0912085771560669, |
|
"learning_rate": 9.462424574876577e-05, |
|
"loss": 0.4055, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.16587969616095258, |
|
"grad_norm": 0.08452475070953369, |
|
"learning_rate": 9.459681843115744e-05, |
|
"loss": 0.3497, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.16670088277561076, |
|
"grad_norm": 0.09225429594516754, |
|
"learning_rate": 9.45693911135491e-05, |
|
"loss": 0.3765, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.16752206939026895, |
|
"grad_norm": 0.08500406891107559, |
|
"learning_rate": 9.454196379594076e-05, |
|
"loss": 0.3622, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.1683432560049271, |
|
"grad_norm": 0.08277002722024918, |
|
"learning_rate": 9.451453647833243e-05, |
|
"loss": 0.3596, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.1691644426195853, |
|
"grad_norm": 0.0855122059583664, |
|
"learning_rate": 9.448710916072408e-05, |
|
"loss": 0.3495, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.16998562923424348, |
|
"grad_norm": 0.11409081518650055, |
|
"learning_rate": 9.445968184311574e-05, |
|
"loss": 0.3123, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.17080681584890167, |
|
"grad_norm": 0.08898866921663284, |
|
"learning_rate": 9.443225452550741e-05, |
|
"loss": 0.3383, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.17162800246355983, |
|
"grad_norm": 0.07967101782560349, |
|
"learning_rate": 9.440482720789908e-05, |
|
"loss": 0.3927, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.17244918907821802, |
|
"grad_norm": 0.08267655968666077, |
|
"learning_rate": 9.437739989029073e-05, |
|
"loss": 0.3503, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.1732703756928762, |
|
"grad_norm": 0.1024966835975647, |
|
"learning_rate": 9.43499725726824e-05, |
|
"loss": 0.343, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.1740915623075344, |
|
"grad_norm": 0.11825034767389297, |
|
"learning_rate": 9.432254525507405e-05, |
|
"loss": 0.3694, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.17491274892219258, |
|
"grad_norm": 0.08225111663341522, |
|
"learning_rate": 9.429511793746572e-05, |
|
"loss": 0.3625, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.17573393553685074, |
|
"grad_norm": 0.082975834608078, |
|
"learning_rate": 9.426769061985739e-05, |
|
"loss": 0.3692, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.17655512215150893, |
|
"grad_norm": 0.09659875184297562, |
|
"learning_rate": 9.424026330224904e-05, |
|
"loss": 0.3857, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.1773763087661671, |
|
"grad_norm": 0.08381886035203934, |
|
"learning_rate": 9.42128359846407e-05, |
|
"loss": 0.3658, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.1781974953808253, |
|
"grad_norm": 0.08097488433122635, |
|
"learning_rate": 9.418540866703238e-05, |
|
"loss": 0.3556, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.1790186819954835, |
|
"grad_norm": 0.08661879599094391, |
|
"learning_rate": 9.415798134942403e-05, |
|
"loss": 0.3495, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.17983986861014165, |
|
"grad_norm": 0.09621778875589371, |
|
"learning_rate": 9.413055403181569e-05, |
|
"loss": 0.3488, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.18066105522479983, |
|
"grad_norm": 0.0867924615740776, |
|
"learning_rate": 9.410312671420735e-05, |
|
"loss": 0.3496, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.18148224183945802, |
|
"grad_norm": 0.09928230196237564, |
|
"learning_rate": 9.407569939659901e-05, |
|
"loss": 0.411, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.1823034284541162, |
|
"grad_norm": 0.08545473217964172, |
|
"learning_rate": 9.404827207899069e-05, |
|
"loss": 0.3707, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.18312461506877437, |
|
"grad_norm": 0.1317296177148819, |
|
"learning_rate": 9.402084476138234e-05, |
|
"loss": 0.4021, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.18394580168343255, |
|
"grad_norm": 0.09009065479040146, |
|
"learning_rate": 9.3993417443774e-05, |
|
"loss": 0.3451, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.18476698829809074, |
|
"grad_norm": 0.08332253247499466, |
|
"learning_rate": 9.396599012616567e-05, |
|
"loss": 0.3457, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.18558817491274893, |
|
"grad_norm": 0.08737312257289886, |
|
"learning_rate": 9.393856280855733e-05, |
|
"loss": 0.3721, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.1864093615274071, |
|
"grad_norm": 0.07472239434719086, |
|
"learning_rate": 9.391113549094899e-05, |
|
"loss": 0.3542, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.18723054814206527, |
|
"grad_norm": 0.08319877088069916, |
|
"learning_rate": 9.388370817334066e-05, |
|
"loss": 0.361, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.18805173475672346, |
|
"grad_norm": 0.08192326873540878, |
|
"learning_rate": 9.385628085573231e-05, |
|
"loss": 0.344, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.18887292137138165, |
|
"grad_norm": 0.11502642929553986, |
|
"learning_rate": 9.382885353812398e-05, |
|
"loss": 0.363, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.18969410798603983, |
|
"grad_norm": 0.08567750453948975, |
|
"learning_rate": 9.380142622051565e-05, |
|
"loss": 0.3565, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.19051529460069802, |
|
"grad_norm": 0.08059141039848328, |
|
"learning_rate": 9.37739989029073e-05, |
|
"loss": 0.3722, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.19133648121535618, |
|
"grad_norm": 0.07670270651578903, |
|
"learning_rate": 9.374657158529896e-05, |
|
"loss": 0.3493, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.19215766783001437, |
|
"grad_norm": 0.08022642135620117, |
|
"learning_rate": 9.371914426769062e-05, |
|
"loss": 0.4142, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.19297885444467255, |
|
"grad_norm": 0.08015397936105728, |
|
"learning_rate": 9.369171695008229e-05, |
|
"loss": 0.3442, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.19380004105933074, |
|
"grad_norm": 0.08329442143440247, |
|
"learning_rate": 9.366428963247395e-05, |
|
"loss": 0.3587, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.1946212276739889, |
|
"grad_norm": 0.08311276882886887, |
|
"learning_rate": 9.363686231486561e-05, |
|
"loss": 0.3792, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.1954424142886471, |
|
"grad_norm": 0.1332862675189972, |
|
"learning_rate": 9.360943499725727e-05, |
|
"loss": 0.3451, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.19626360090330527, |
|
"grad_norm": 0.081804558634758, |
|
"learning_rate": 9.358200767964894e-05, |
|
"loss": 0.4212, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.19708478751796346, |
|
"grad_norm": 0.08397019654512405, |
|
"learning_rate": 9.35545803620406e-05, |
|
"loss": 0.3482, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.19790597413262165, |
|
"grad_norm": 0.09495637565851212, |
|
"learning_rate": 9.352715304443226e-05, |
|
"loss": 0.3432, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.1987271607472798, |
|
"grad_norm": 0.09187504649162292, |
|
"learning_rate": 9.349972572682391e-05, |
|
"loss": 0.3938, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.199548347361938, |
|
"grad_norm": 0.100834921002388, |
|
"learning_rate": 9.347229840921558e-05, |
|
"loss": 0.3567, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.20036953397659618, |
|
"grad_norm": 0.09292273223400116, |
|
"learning_rate": 9.344487109160725e-05, |
|
"loss": 0.3802, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.20119072059125437, |
|
"grad_norm": 0.07785986363887787, |
|
"learning_rate": 9.34174437739989e-05, |
|
"loss": 0.3832, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.20201190720591256, |
|
"grad_norm": 0.081189826130867, |
|
"learning_rate": 9.339001645639057e-05, |
|
"loss": 0.3448, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.20283309382057071, |
|
"grad_norm": 0.08209879696369171, |
|
"learning_rate": 9.336258913878222e-05, |
|
"loss": 0.3602, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.2036542804352289, |
|
"grad_norm": 0.0813421905040741, |
|
"learning_rate": 9.333516182117389e-05, |
|
"loss": 0.3477, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.2044754670498871, |
|
"grad_norm": 0.08188773691654205, |
|
"learning_rate": 9.330773450356556e-05, |
|
"loss": 0.3466, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.20529665366454528, |
|
"grad_norm": 0.09966633468866348, |
|
"learning_rate": 9.328030718595721e-05, |
|
"loss": 0.3776, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.20611784027920346, |
|
"grad_norm": 0.08154954016208649, |
|
"learning_rate": 9.325287986834887e-05, |
|
"loss": 0.3727, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.20693902689386162, |
|
"grad_norm": 0.07942931354045868, |
|
"learning_rate": 9.322545255074055e-05, |
|
"loss": 0.3468, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.2077602135085198, |
|
"grad_norm": 0.07944433391094208, |
|
"learning_rate": 9.31980252331322e-05, |
|
"loss": 0.3512, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.208581400123178, |
|
"grad_norm": 0.08637971431016922, |
|
"learning_rate": 9.317059791552387e-05, |
|
"loss": 0.3397, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.20940258673783618, |
|
"grad_norm": 0.08189195394515991, |
|
"learning_rate": 9.314317059791553e-05, |
|
"loss": 0.4105, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.21022377335249434, |
|
"grad_norm": 0.08190836012363434, |
|
"learning_rate": 9.31157432803072e-05, |
|
"loss": 0.346, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.21104495996715253, |
|
"grad_norm": 0.08331865072250366, |
|
"learning_rate": 9.308831596269886e-05, |
|
"loss": 0.3655, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.21186614658181072, |
|
"grad_norm": 0.0822620540857315, |
|
"learning_rate": 9.306088864509052e-05, |
|
"loss": 0.372, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.2126873331964689, |
|
"grad_norm": 0.08011777698993683, |
|
"learning_rate": 9.303346132748217e-05, |
|
"loss": 0.3304, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.2135085198111271, |
|
"grad_norm": 0.08395062386989594, |
|
"learning_rate": 9.300603400987384e-05, |
|
"loss": 0.3903, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.21432970642578525, |
|
"grad_norm": 0.08101452887058258, |
|
"learning_rate": 9.297860669226551e-05, |
|
"loss": 0.3579, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.21515089304044344, |
|
"grad_norm": 0.11504925042390823, |
|
"learning_rate": 9.295117937465716e-05, |
|
"loss": 0.3723, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.21597207965510162, |
|
"grad_norm": 0.0704338401556015, |
|
"learning_rate": 9.292375205704883e-05, |
|
"loss": 0.3341, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.2167932662697598, |
|
"grad_norm": 0.07944470643997192, |
|
"learning_rate": 9.289632473944048e-05, |
|
"loss": 0.3273, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.2167932662697598, |
|
"eval_runtime": 507.3366, |
|
"eval_samples_per_second": 0.388, |
|
"eval_steps_per_second": 0.195, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.217614452884418, |
|
"grad_norm": 0.0778859481215477, |
|
"learning_rate": 9.286889742183215e-05, |
|
"loss": 0.3707, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.21843563949907616, |
|
"grad_norm": 0.07564377039670944, |
|
"learning_rate": 9.284147010422382e-05, |
|
"loss": 0.334, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.21925682611373434, |
|
"grad_norm": 0.07744503021240234, |
|
"learning_rate": 9.281404278661547e-05, |
|
"loss": 0.3521, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.22007801272839253, |
|
"grad_norm": 0.08139525353908539, |
|
"learning_rate": 9.278661546900713e-05, |
|
"loss": 0.4069, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.22089919934305072, |
|
"grad_norm": 0.08162204176187515, |
|
"learning_rate": 9.27591881513988e-05, |
|
"loss": 0.3292, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.22172038595770888, |
|
"grad_norm": 0.07798879593610764, |
|
"learning_rate": 9.273176083379046e-05, |
|
"loss": 0.3441, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.22254157257236706, |
|
"grad_norm": 0.07629597932100296, |
|
"learning_rate": 9.270433351618212e-05, |
|
"loss": 0.3989, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.22336275918702525, |
|
"grad_norm": 0.07695835083723068, |
|
"learning_rate": 9.267690619857379e-05, |
|
"loss": 0.3506, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.22418394580168344, |
|
"grad_norm": 0.07938527315855026, |
|
"learning_rate": 9.264947888096544e-05, |
|
"loss": 0.327, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.22500513241634162, |
|
"grad_norm": 0.08125253766775131, |
|
"learning_rate": 9.262205156335711e-05, |
|
"loss": 0.3749, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.22582631903099978, |
|
"grad_norm": 0.07831385731697083, |
|
"learning_rate": 9.259462424574878e-05, |
|
"loss": 0.3477, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.22664750564565797, |
|
"grad_norm": 0.08408527821302414, |
|
"learning_rate": 9.256719692814043e-05, |
|
"loss": 0.3488, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.22746869226031616, |
|
"grad_norm": 0.0800737589597702, |
|
"learning_rate": 9.253976961053208e-05, |
|
"loss": 0.3607, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.22828987887497434, |
|
"grad_norm": 0.07969113439321518, |
|
"learning_rate": 9.251234229292375e-05, |
|
"loss": 0.38, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.22911106548963253, |
|
"grad_norm": 0.08066916465759277, |
|
"learning_rate": 9.248491497531542e-05, |
|
"loss": 0.3469, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.2299322521042907, |
|
"grad_norm": 0.07686860859394073, |
|
"learning_rate": 9.245748765770708e-05, |
|
"loss": 0.3311, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.23075343871894888, |
|
"grad_norm": 0.13063722848892212, |
|
"learning_rate": 9.243006034009874e-05, |
|
"loss": 0.3091, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.23157462533360706, |
|
"grad_norm": 0.08465570211410522, |
|
"learning_rate": 9.24026330224904e-05, |
|
"loss": 0.3548, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.23239581194826525, |
|
"grad_norm": 0.07824988663196564, |
|
"learning_rate": 9.237520570488207e-05, |
|
"loss": 0.3485, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.2332169985629234, |
|
"grad_norm": 0.087236687541008, |
|
"learning_rate": 9.234777838727373e-05, |
|
"loss": 0.3564, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.2340381851775816, |
|
"grad_norm": 0.09531310200691223, |
|
"learning_rate": 9.232035106966539e-05, |
|
"loss": 0.3763, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.23485937179223978, |
|
"grad_norm": 0.0783536285161972, |
|
"learning_rate": 9.229292375205704e-05, |
|
"loss": 0.3479, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.23568055840689797, |
|
"grad_norm": 0.08209957927465439, |
|
"learning_rate": 9.226549643444872e-05, |
|
"loss": 0.3563, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.23650174502155616, |
|
"grad_norm": 0.08195500820875168, |
|
"learning_rate": 9.223806911684038e-05, |
|
"loss": 0.3879, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.23732293163621432, |
|
"grad_norm": 0.0796385332942009, |
|
"learning_rate": 9.221064179923205e-05, |
|
"loss": 0.3787, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.2381441182508725, |
|
"grad_norm": 0.08523210138082504, |
|
"learning_rate": 9.21832144816237e-05, |
|
"loss": 0.3608, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.2389653048655307, |
|
"grad_norm": 0.08573302626609802, |
|
"learning_rate": 9.215578716401537e-05, |
|
"loss": 0.3366, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.23978649148018888, |
|
"grad_norm": 0.08111650496721268, |
|
"learning_rate": 9.212835984640704e-05, |
|
"loss": 0.3331, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.24060767809484707, |
|
"grad_norm": 0.07877853512763977, |
|
"learning_rate": 9.210093252879869e-05, |
|
"loss": 0.3346, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.24142886470950523, |
|
"grad_norm": 0.0814940333366394, |
|
"learning_rate": 9.207350521119034e-05, |
|
"loss": 0.3701, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.2422500513241634, |
|
"grad_norm": 0.0843539908528328, |
|
"learning_rate": 9.204607789358201e-05, |
|
"loss": 0.3339, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.2430712379388216, |
|
"grad_norm": 0.0875789001584053, |
|
"learning_rate": 9.201865057597368e-05, |
|
"loss": 0.362, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.2438924245534798, |
|
"grad_norm": 0.08261071145534515, |
|
"learning_rate": 9.199122325836533e-05, |
|
"loss": 0.3578, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.24471361116813795, |
|
"grad_norm": 0.09160041064023972, |
|
"learning_rate": 9.1963795940757e-05, |
|
"loss": 0.387, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.24553479778279613, |
|
"grad_norm": 0.08152728527784348, |
|
"learning_rate": 9.193636862314866e-05, |
|
"loss": 0.3282, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.24635598439745432, |
|
"grad_norm": 0.079656682908535, |
|
"learning_rate": 9.190894130554032e-05, |
|
"loss": 0.3238, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.2471771710121125, |
|
"grad_norm": 0.0797642394900322, |
|
"learning_rate": 9.188151398793199e-05, |
|
"loss": 0.3506, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.2479983576267707, |
|
"grad_norm": 0.08688945323228836, |
|
"learning_rate": 9.185408667032365e-05, |
|
"loss": 0.4177, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.24881954424142885, |
|
"grad_norm": 0.106851726770401, |
|
"learning_rate": 9.18266593527153e-05, |
|
"loss": 0.3527, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.24964073085608704, |
|
"grad_norm": 0.08586329221725464, |
|
"learning_rate": 9.179923203510697e-05, |
|
"loss": 0.3763, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.25046191747074525, |
|
"grad_norm": 0.1141560897231102, |
|
"learning_rate": 9.177180471749864e-05, |
|
"loss": 0.3451, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.2512831040854034, |
|
"grad_norm": 0.08728976547718048, |
|
"learning_rate": 9.174437739989029e-05, |
|
"loss": 0.3444, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.2521042907000616, |
|
"grad_norm": 0.07848083972930908, |
|
"learning_rate": 9.171695008228196e-05, |
|
"loss": 0.344, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.25292547731471976, |
|
"grad_norm": 0.08140011131763458, |
|
"learning_rate": 9.168952276467361e-05, |
|
"loss": 0.3338, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.25374666392937795, |
|
"grad_norm": 0.07934946566820145, |
|
"learning_rate": 9.166209544706528e-05, |
|
"loss": 0.3869, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.25456785054403613, |
|
"grad_norm": 0.08028525859117508, |
|
"learning_rate": 9.163466812945695e-05, |
|
"loss": 0.3141, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.2553890371586943, |
|
"grad_norm": 0.08121496438980103, |
|
"learning_rate": 9.16072408118486e-05, |
|
"loss": 0.4098, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.2562102237733525, |
|
"grad_norm": 0.09907019138336182, |
|
"learning_rate": 9.157981349424026e-05, |
|
"loss": 0.3594, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.2570314103880107, |
|
"grad_norm": 0.10270131379365921, |
|
"learning_rate": 9.155238617663193e-05, |
|
"loss": 0.3713, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.2578525970026689, |
|
"grad_norm": 0.08354981243610382, |
|
"learning_rate": 9.15249588590236e-05, |
|
"loss": 0.3943, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.258673783617327, |
|
"grad_norm": 0.09153895825147629, |
|
"learning_rate": 9.149753154141525e-05, |
|
"loss": 0.3499, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.2594949702319852, |
|
"grad_norm": 0.07772937417030334, |
|
"learning_rate": 9.147010422380692e-05, |
|
"loss": 0.3323, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.2603161568466434, |
|
"grad_norm": 0.08953258395195007, |
|
"learning_rate": 9.144267690619857e-05, |
|
"loss": 0.3684, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.2611373434613016, |
|
"grad_norm": 0.08204226195812225, |
|
"learning_rate": 9.141524958859024e-05, |
|
"loss": 0.3387, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.26195853007595976, |
|
"grad_norm": 0.07604125887155533, |
|
"learning_rate": 9.13878222709819e-05, |
|
"loss": 0.3104, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.26277971669061795, |
|
"grad_norm": 0.07614021003246307, |
|
"learning_rate": 9.136039495337356e-05, |
|
"loss": 0.329, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.26360090330527614, |
|
"grad_norm": 0.19391578435897827, |
|
"learning_rate": 9.133296763576523e-05, |
|
"loss": 0.349, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.2644220899199343, |
|
"grad_norm": 0.07927168160676956, |
|
"learning_rate": 9.13055403181569e-05, |
|
"loss": 0.3495, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.2652432765345925, |
|
"grad_norm": 0.08091975003480911, |
|
"learning_rate": 9.127811300054855e-05, |
|
"loss": 0.3563, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.26606446314925064, |
|
"grad_norm": 0.07865800708532333, |
|
"learning_rate": 9.125068568294022e-05, |
|
"loss": 0.36, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.2668856497639088, |
|
"grad_norm": 0.08946891874074936, |
|
"learning_rate": 9.122325836533187e-05, |
|
"loss": 0.3434, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.267706836378567, |
|
"grad_norm": 0.07987753301858902, |
|
"learning_rate": 9.119583104772354e-05, |
|
"loss": 0.3593, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.2685280229932252, |
|
"grad_norm": 0.07894265651702881, |
|
"learning_rate": 9.116840373011521e-05, |
|
"loss": 0.3746, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.2693492096078834, |
|
"grad_norm": 0.08121046423912048, |
|
"learning_rate": 9.114097641250686e-05, |
|
"loss": 0.357, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.2701703962225416, |
|
"grad_norm": 0.08320184051990509, |
|
"learning_rate": 9.111354909489852e-05, |
|
"loss": 0.3783, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.27099158283719976, |
|
"grad_norm": 0.10911049693822861, |
|
"learning_rate": 9.108612177729018e-05, |
|
"loss": 0.3699, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.27181276945185795, |
|
"grad_norm": 0.09739838540554047, |
|
"learning_rate": 9.105869445968185e-05, |
|
"loss": 0.3524, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.27263395606651614, |
|
"grad_norm": 0.0874953344464302, |
|
"learning_rate": 9.103126714207351e-05, |
|
"loss": 0.3807, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.2734551426811743, |
|
"grad_norm": 0.09758396446704865, |
|
"learning_rate": 9.100383982446518e-05, |
|
"loss": 0.3876, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.27427632929583246, |
|
"grad_norm": 0.08628170937299728, |
|
"learning_rate": 9.097641250685683e-05, |
|
"loss": 0.3632, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.27509751591049064, |
|
"grad_norm": 0.08430638164281845, |
|
"learning_rate": 9.09489851892485e-05, |
|
"loss": 0.3448, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.27591870252514883, |
|
"grad_norm": 0.0725574865937233, |
|
"learning_rate": 9.092155787164017e-05, |
|
"loss": 0.2914, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.276739889139807, |
|
"grad_norm": 0.08421092480421066, |
|
"learning_rate": 9.089413055403182e-05, |
|
"loss": 0.3676, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.2775610757544652, |
|
"grad_norm": 0.08702068030834198, |
|
"learning_rate": 9.086670323642347e-05, |
|
"loss": 0.345, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.2783822623691234, |
|
"grad_norm": 0.08053667843341827, |
|
"learning_rate": 9.083927591881514e-05, |
|
"loss": 0.324, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.2792034489837816, |
|
"grad_norm": 0.08294267952442169, |
|
"learning_rate": 9.081184860120681e-05, |
|
"loss": 0.348, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.28002463559843976, |
|
"grad_norm": 0.18557026982307434, |
|
"learning_rate": 9.078442128359846e-05, |
|
"loss": 0.3483, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.28084582221309795, |
|
"grad_norm": 0.08436847478151321, |
|
"learning_rate": 9.075699396599013e-05, |
|
"loss": 0.3641, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.2816670088277561, |
|
"grad_norm": 0.08285505324602127, |
|
"learning_rate": 9.072956664838179e-05, |
|
"loss": 0.3641, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.28248819544241427, |
|
"grad_norm": 0.08681569993495941, |
|
"learning_rate": 9.070213933077345e-05, |
|
"loss": 0.342, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.28330938205707246, |
|
"grad_norm": 0.08668892085552216, |
|
"learning_rate": 9.067471201316512e-05, |
|
"loss": 0.3514, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.28413056867173064, |
|
"grad_norm": 0.08393285423517227, |
|
"learning_rate": 9.064728469555678e-05, |
|
"loss": 0.3531, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.28495175528638883, |
|
"grad_norm": 0.07783684879541397, |
|
"learning_rate": 9.061985737794843e-05, |
|
"loss": 0.3792, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.285772941901047, |
|
"grad_norm": 0.08259117603302002, |
|
"learning_rate": 9.059243006034011e-05, |
|
"loss": 0.375, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.2865941285157052, |
|
"grad_norm": 0.0969453975558281, |
|
"learning_rate": 9.056500274273177e-05, |
|
"loss": 0.3776, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.2874153151303634, |
|
"grad_norm": 0.08154502511024475, |
|
"learning_rate": 9.053757542512342e-05, |
|
"loss": 0.3577, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.2882365017450216, |
|
"grad_norm": 0.09322196245193481, |
|
"learning_rate": 9.051014810751509e-05, |
|
"loss": 0.3774, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.2890576883596797, |
|
"grad_norm": 0.0804852545261383, |
|
"learning_rate": 9.048272078990674e-05, |
|
"loss": 0.333, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.2890576883596797, |
|
"eval_runtime": 493.668, |
|
"eval_samples_per_second": 0.399, |
|
"eval_steps_per_second": 0.201, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.2898788749743379, |
|
"grad_norm": 0.08082845062017441, |
|
"learning_rate": 9.045529347229841e-05, |
|
"loss": 0.3111, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.2907000615889961, |
|
"grad_norm": 0.08396881073713303, |
|
"learning_rate": 9.042786615469008e-05, |
|
"loss": 0.3429, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.29152124820365427, |
|
"grad_norm": 0.08037351071834564, |
|
"learning_rate": 9.040043883708173e-05, |
|
"loss": 0.349, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.29234243481831246, |
|
"grad_norm": 0.10421521961688995, |
|
"learning_rate": 9.03730115194734e-05, |
|
"loss": 0.3228, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.29316362143297064, |
|
"grad_norm": 0.08268436044454575, |
|
"learning_rate": 9.034558420186507e-05, |
|
"loss": 0.3414, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.29398480804762883, |
|
"grad_norm": 0.08203061670064926, |
|
"learning_rate": 9.031815688425672e-05, |
|
"loss": 0.3596, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.294805994662287, |
|
"grad_norm": 0.07778481394052505, |
|
"learning_rate": 9.029072956664839e-05, |
|
"loss": 0.3337, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.2956271812769452, |
|
"grad_norm": 0.14194349944591522, |
|
"learning_rate": 9.026330224904005e-05, |
|
"loss": 0.3613, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.2964483678916034, |
|
"grad_norm": 0.09854190051555634, |
|
"learning_rate": 9.023587493143171e-05, |
|
"loss": 0.3862, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.2972695545062615, |
|
"grad_norm": 0.07878783345222473, |
|
"learning_rate": 9.020844761382338e-05, |
|
"loss": 0.3294, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.2980907411209197, |
|
"grad_norm": 0.07692375034093857, |
|
"learning_rate": 9.018102029621504e-05, |
|
"loss": 0.3083, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.2989119277355779, |
|
"grad_norm": 0.0772586315870285, |
|
"learning_rate": 9.015359297860669e-05, |
|
"loss": 0.3086, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.2997331143502361, |
|
"grad_norm": 0.0739215686917305, |
|
"learning_rate": 9.012616566099836e-05, |
|
"loss": 0.3103, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.30055430096489427, |
|
"grad_norm": 0.08434160053730011, |
|
"learning_rate": 9.009873834339003e-05, |
|
"loss": 0.3485, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.30137548757955246, |
|
"grad_norm": 0.08038529008626938, |
|
"learning_rate": 9.007131102578168e-05, |
|
"loss": 0.3421, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.30219667419421065, |
|
"grad_norm": 0.08303964138031006, |
|
"learning_rate": 9.004388370817335e-05, |
|
"loss": 0.3349, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.30301786080886883, |
|
"grad_norm": 0.08672379702329636, |
|
"learning_rate": 9.0016456390565e-05, |
|
"loss": 0.3228, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.303839047423527, |
|
"grad_norm": 0.08417553454637527, |
|
"learning_rate": 8.998902907295667e-05, |
|
"loss": 0.3312, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.30466023403818515, |
|
"grad_norm": 0.0842244029045105, |
|
"learning_rate": 8.996160175534834e-05, |
|
"loss": 0.3545, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.30548142065284334, |
|
"grad_norm": 0.08479683846235275, |
|
"learning_rate": 8.993417443773999e-05, |
|
"loss": 0.3446, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.3063026072675015, |
|
"grad_norm": 0.07726123929023743, |
|
"learning_rate": 8.990674712013165e-05, |
|
"loss": 0.3116, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.3071237938821597, |
|
"grad_norm": 0.08586291968822479, |
|
"learning_rate": 8.987931980252331e-05, |
|
"loss": 0.3617, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.3079449804968179, |
|
"grad_norm": 0.07475403696298599, |
|
"learning_rate": 8.985189248491498e-05, |
|
"loss": 0.3481, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.3087661671114761, |
|
"grad_norm": 0.08379320055246353, |
|
"learning_rate": 8.982446516730664e-05, |
|
"loss": 0.3434, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.3095873537261343, |
|
"grad_norm": 0.07624776661396027, |
|
"learning_rate": 8.97970378496983e-05, |
|
"loss": 0.3262, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.31040854034079246, |
|
"grad_norm": 0.07878948748111725, |
|
"learning_rate": 8.976961053208996e-05, |
|
"loss": 0.3366, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.31122972695545065, |
|
"grad_norm": 0.08924887329339981, |
|
"learning_rate": 8.974218321448163e-05, |
|
"loss": 0.3175, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.31205091357010883, |
|
"grad_norm": 0.08927389234304428, |
|
"learning_rate": 8.97147558968733e-05, |
|
"loss": 0.3338, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.31287210018476697, |
|
"grad_norm": 0.08250463008880615, |
|
"learning_rate": 8.968732857926495e-05, |
|
"loss": 0.3314, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.31369328679942515, |
|
"grad_norm": 0.0977792888879776, |
|
"learning_rate": 8.96599012616566e-05, |
|
"loss": 0.3222, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.31451447341408334, |
|
"grad_norm": 0.08197218924760818, |
|
"learning_rate": 8.963247394404828e-05, |
|
"loss": 0.3288, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.3153356600287415, |
|
"grad_norm": 0.12502075731754303, |
|
"learning_rate": 8.960504662643994e-05, |
|
"loss": 0.3053, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.3161568466433997, |
|
"grad_norm": 0.08058564364910126, |
|
"learning_rate": 8.95776193088316e-05, |
|
"loss": 0.3292, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.3169780332580579, |
|
"grad_norm": 0.08365239202976227, |
|
"learning_rate": 8.955019199122326e-05, |
|
"loss": 0.3747, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.3177992198727161, |
|
"grad_norm": 0.07539816945791245, |
|
"learning_rate": 8.952276467361493e-05, |
|
"loss": 0.3179, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.3186204064873743, |
|
"grad_norm": 0.08136752992868423, |
|
"learning_rate": 8.94953373560066e-05, |
|
"loss": 0.3647, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.31944159310203246, |
|
"grad_norm": 0.09546865522861481, |
|
"learning_rate": 8.946791003839825e-05, |
|
"loss": 0.4056, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.3202627797166906, |
|
"grad_norm": 0.08654627203941345, |
|
"learning_rate": 8.94404827207899e-05, |
|
"loss": 0.3853, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.3210839663313488, |
|
"grad_norm": 0.0822821855545044, |
|
"learning_rate": 8.941305540318157e-05, |
|
"loss": 0.327, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.32190515294600697, |
|
"grad_norm": 0.08066459000110626, |
|
"learning_rate": 8.938562808557324e-05, |
|
"loss": 0.3585, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.32272633956066515, |
|
"grad_norm": 0.08845409005880356, |
|
"learning_rate": 8.93582007679649e-05, |
|
"loss": 0.3419, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.32354752617532334, |
|
"grad_norm": 0.08482253551483154, |
|
"learning_rate": 8.933077345035656e-05, |
|
"loss": 0.3263, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.32436871278998153, |
|
"grad_norm": 0.0801006481051445, |
|
"learning_rate": 8.930334613274822e-05, |
|
"loss": 0.3713, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.3251898994046397, |
|
"grad_norm": 0.08470006287097931, |
|
"learning_rate": 8.927591881513989e-05, |
|
"loss": 0.3631, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.3260110860192979, |
|
"grad_norm": 0.08457473665475845, |
|
"learning_rate": 8.924849149753155e-05, |
|
"loss": 0.339, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.3268322726339561, |
|
"grad_norm": 0.14836351573467255, |
|
"learning_rate": 8.922106417992321e-05, |
|
"loss": 0.3039, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.3276534592486142, |
|
"grad_norm": 0.08391403406858444, |
|
"learning_rate": 8.919363686231486e-05, |
|
"loss": 0.3415, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.3284746458632724, |
|
"grad_norm": 0.07998157292604446, |
|
"learning_rate": 8.916620954470653e-05, |
|
"loss": 0.2943, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.3292958324779306, |
|
"grad_norm": 0.07682473957538605, |
|
"learning_rate": 8.91387822270982e-05, |
|
"loss": 0.3413, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.3301170190925888, |
|
"grad_norm": 0.09960923343896866, |
|
"learning_rate": 8.911135490948985e-05, |
|
"loss": 0.356, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.33093820570724697, |
|
"grad_norm": 0.08010439574718475, |
|
"learning_rate": 8.908392759188152e-05, |
|
"loss": 0.3408, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.33175939232190516, |
|
"grad_norm": 0.0811014398932457, |
|
"learning_rate": 8.905650027427318e-05, |
|
"loss": 0.3159, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.33258057893656334, |
|
"grad_norm": 0.08086768537759781, |
|
"learning_rate": 8.902907295666484e-05, |
|
"loss": 0.3145, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.33340176555122153, |
|
"grad_norm": 0.0778844803571701, |
|
"learning_rate": 8.900164563905651e-05, |
|
"loss": 0.3271, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.3342229521658797, |
|
"grad_norm": 0.08636100590229034, |
|
"learning_rate": 8.897421832144817e-05, |
|
"loss": 0.395, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.3350441387805379, |
|
"grad_norm": 0.08080980181694031, |
|
"learning_rate": 8.894679100383982e-05, |
|
"loss": 0.3819, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.33586532539519603, |
|
"grad_norm": 0.08312460780143738, |
|
"learning_rate": 8.891936368623149e-05, |
|
"loss": 0.3413, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.3366865120098542, |
|
"grad_norm": 0.07980356365442276, |
|
"learning_rate": 8.889193636862316e-05, |
|
"loss": 0.3546, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.3375076986245124, |
|
"grad_norm": 0.0868874043226242, |
|
"learning_rate": 8.886450905101481e-05, |
|
"loss": 0.3354, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.3383288852391706, |
|
"grad_norm": 0.08026042580604553, |
|
"learning_rate": 8.883708173340648e-05, |
|
"loss": 0.355, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.3391500718538288, |
|
"grad_norm": 0.0777151882648468, |
|
"learning_rate": 8.880965441579813e-05, |
|
"loss": 0.2835, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.33997125846848697, |
|
"grad_norm": 0.08007453382015228, |
|
"learning_rate": 8.87822270981898e-05, |
|
"loss": 0.3042, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.34079244508314516, |
|
"grad_norm": 0.08239760994911194, |
|
"learning_rate": 8.875479978058147e-05, |
|
"loss": 0.4139, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.34161363169780334, |
|
"grad_norm": 0.08022072911262512, |
|
"learning_rate": 8.872737246297312e-05, |
|
"loss": 0.319, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.34243481831246153, |
|
"grad_norm": 0.09205158054828644, |
|
"learning_rate": 8.869994514536478e-05, |
|
"loss": 0.3283, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.34325600492711966, |
|
"grad_norm": 0.07974108308553696, |
|
"learning_rate": 8.867251782775646e-05, |
|
"loss": 0.3331, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.34407719154177785, |
|
"grad_norm": 0.0810483992099762, |
|
"learning_rate": 8.864509051014811e-05, |
|
"loss": 0.4005, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.34489837815643604, |
|
"grad_norm": 0.08448805660009384, |
|
"learning_rate": 8.861766319253978e-05, |
|
"loss": 0.3327, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.3457195647710942, |
|
"grad_norm": 0.0865749716758728, |
|
"learning_rate": 8.859023587493143e-05, |
|
"loss": 0.4459, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.3465407513857524, |
|
"grad_norm": 0.08855796605348587, |
|
"learning_rate": 8.85628085573231e-05, |
|
"loss": 0.3566, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.3473619380004106, |
|
"grad_norm": 0.08421412855386734, |
|
"learning_rate": 8.853538123971477e-05, |
|
"loss": 0.3248, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.3481831246150688, |
|
"grad_norm": 0.07903390377759933, |
|
"learning_rate": 8.850795392210642e-05, |
|
"loss": 0.3608, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.34900431122972697, |
|
"grad_norm": 0.09979040920734406, |
|
"learning_rate": 8.848052660449808e-05, |
|
"loss": 0.3079, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.34982549784438516, |
|
"grad_norm": 0.08713112026453018, |
|
"learning_rate": 8.845309928688975e-05, |
|
"loss": 0.3462, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.35064668445904335, |
|
"grad_norm": 0.07836464792490005, |
|
"learning_rate": 8.842567196928141e-05, |
|
"loss": 0.3467, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.3514678710737015, |
|
"grad_norm": 0.07553427666425705, |
|
"learning_rate": 8.839824465167307e-05, |
|
"loss": 0.3325, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.35228905768835966, |
|
"grad_norm": 0.08221621066331863, |
|
"learning_rate": 8.837081733406474e-05, |
|
"loss": 0.3366, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.35311024430301785, |
|
"grad_norm": 0.09283772855997086, |
|
"learning_rate": 8.834339001645639e-05, |
|
"loss": 0.3518, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.35393143091767604, |
|
"grad_norm": 0.08067970722913742, |
|
"learning_rate": 8.831596269884806e-05, |
|
"loss": 0.3521, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.3547526175323342, |
|
"grad_norm": 0.08192427456378937, |
|
"learning_rate": 8.828853538123973e-05, |
|
"loss": 0.3794, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.3555738041469924, |
|
"grad_norm": 0.08332204073667526, |
|
"learning_rate": 8.826110806363138e-05, |
|
"loss": 0.3243, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.3563949907616506, |
|
"grad_norm": 0.08099368214607239, |
|
"learning_rate": 8.823368074602304e-05, |
|
"loss": 0.3409, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.3572161773763088, |
|
"grad_norm": 0.0801381841301918, |
|
"learning_rate": 8.82062534284147e-05, |
|
"loss": 0.3489, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.358037363990967, |
|
"grad_norm": 0.0828125849366188, |
|
"learning_rate": 8.817882611080637e-05, |
|
"loss": 0.3332, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.3588585506056251, |
|
"grad_norm": 0.08463995903730392, |
|
"learning_rate": 8.815139879319803e-05, |
|
"loss": 0.3532, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.3596797372202833, |
|
"grad_norm": 0.08150382339954376, |
|
"learning_rate": 8.81239714755897e-05, |
|
"loss": 0.3271, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.3605009238349415, |
|
"grad_norm": 0.08224772661924362, |
|
"learning_rate": 8.809654415798135e-05, |
|
"loss": 0.3847, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.36132211044959966, |
|
"grad_norm": 0.07509076595306396, |
|
"learning_rate": 8.806911684037302e-05, |
|
"loss": 0.346, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.36132211044959966, |
|
"eval_runtime": 493.5837, |
|
"eval_samples_per_second": 0.399, |
|
"eval_steps_per_second": 0.201, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.36214329706425785, |
|
"grad_norm": 0.07801686972379684, |
|
"learning_rate": 8.804168952276468e-05, |
|
"loss": 0.3352, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.36296448367891604, |
|
"grad_norm": 0.08195506781339645, |
|
"learning_rate": 8.801426220515634e-05, |
|
"loss": 0.3076, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.3637856702935742, |
|
"grad_norm": 0.08178213983774185, |
|
"learning_rate": 8.798683488754799e-05, |
|
"loss": 0.3565, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.3646068569082324, |
|
"grad_norm": 0.07905025035142899, |
|
"learning_rate": 8.795940756993966e-05, |
|
"loss": 0.3513, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.3654280435228906, |
|
"grad_norm": 0.1251908391714096, |
|
"learning_rate": 8.793198025233133e-05, |
|
"loss": 0.3656, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.36624923013754873, |
|
"grad_norm": 0.08813164383172989, |
|
"learning_rate": 8.790455293472298e-05, |
|
"loss": 0.3352, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.3670704167522069, |
|
"grad_norm": 0.0773930549621582, |
|
"learning_rate": 8.787712561711465e-05, |
|
"loss": 0.3241, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.3678916033668651, |
|
"grad_norm": 0.08977091312408447, |
|
"learning_rate": 8.78496982995063e-05, |
|
"loss": 0.3308, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.3687127899815233, |
|
"grad_norm": 0.0813109427690506, |
|
"learning_rate": 8.782227098189797e-05, |
|
"loss": 0.344, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.3695339765961815, |
|
"grad_norm": 0.08452528715133667, |
|
"learning_rate": 8.779484366428964e-05, |
|
"loss": 0.2985, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.37035516321083967, |
|
"grad_norm": 0.08880957961082458, |
|
"learning_rate": 8.77674163466813e-05, |
|
"loss": 0.3674, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.37117634982549785, |
|
"grad_norm": 0.08020122349262238, |
|
"learning_rate": 8.773998902907295e-05, |
|
"loss": 0.3487, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.37199753644015604, |
|
"grad_norm": 0.09180288016796112, |
|
"learning_rate": 8.771256171146463e-05, |
|
"loss": 0.364, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.3728187230548142, |
|
"grad_norm": 0.08132430911064148, |
|
"learning_rate": 8.768513439385628e-05, |
|
"loss": 0.3428, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.3736399096694724, |
|
"grad_norm": 0.08801679313182831, |
|
"learning_rate": 8.765770707624795e-05, |
|
"loss": 0.3287, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.37446109628413055, |
|
"grad_norm": 0.088813915848732, |
|
"learning_rate": 8.763027975863961e-05, |
|
"loss": 0.3508, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.37528228289878873, |
|
"grad_norm": 0.07454930245876312, |
|
"learning_rate": 8.760285244103128e-05, |
|
"loss": 0.3267, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.3761034695134469, |
|
"grad_norm": 0.0816444680094719, |
|
"learning_rate": 8.757542512342294e-05, |
|
"loss": 0.3237, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.3769246561281051, |
|
"grad_norm": 0.08916571736335754, |
|
"learning_rate": 8.75479978058146e-05, |
|
"loss": 0.3599, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.3777458427427633, |
|
"grad_norm": 0.08580256998538971, |
|
"learning_rate": 8.752057048820625e-05, |
|
"loss": 0.351, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.3785670293574215, |
|
"grad_norm": 0.0849752202630043, |
|
"learning_rate": 8.749314317059792e-05, |
|
"loss": 0.3616, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.37938821597207967, |
|
"grad_norm": 0.11230983585119247, |
|
"learning_rate": 8.746571585298959e-05, |
|
"loss": 0.3401, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.38020940258673785, |
|
"grad_norm": 0.08614380657672882, |
|
"learning_rate": 8.743828853538124e-05, |
|
"loss": 0.3603, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.38103058920139604, |
|
"grad_norm": 0.08322657644748688, |
|
"learning_rate": 8.741086121777291e-05, |
|
"loss": 0.3087, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.3818517758160542, |
|
"grad_norm": 0.08578125387430191, |
|
"learning_rate": 8.738343390016456e-05, |
|
"loss": 0.3483, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.38267296243071236, |
|
"grad_norm": 0.08472792059183121, |
|
"learning_rate": 8.735600658255623e-05, |
|
"loss": 0.3445, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.38349414904537055, |
|
"grad_norm": 0.08300035446882248, |
|
"learning_rate": 8.73285792649479e-05, |
|
"loss": 0.3384, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.38431533566002873, |
|
"grad_norm": 0.08582165092229843, |
|
"learning_rate": 8.730115194733955e-05, |
|
"loss": 0.3497, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.3851365222746869, |
|
"grad_norm": 0.28932541608810425, |
|
"learning_rate": 8.727372462973121e-05, |
|
"loss": 0.3091, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.3859577088893451, |
|
"grad_norm": 0.08396289497613907, |
|
"learning_rate": 8.724629731212288e-05, |
|
"loss": 0.3472, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.3867788955040033, |
|
"grad_norm": 0.08243526518344879, |
|
"learning_rate": 8.721886999451454e-05, |
|
"loss": 0.3641, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.3876000821186615, |
|
"grad_norm": 0.08014799654483795, |
|
"learning_rate": 8.71914426769062e-05, |
|
"loss": 0.3389, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.38842126873331967, |
|
"grad_norm": 0.08125128597021103, |
|
"learning_rate": 8.716401535929787e-05, |
|
"loss": 0.3293, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.3892424553479778, |
|
"grad_norm": 0.08730563521385193, |
|
"learning_rate": 8.713658804168952e-05, |
|
"loss": 0.3102, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.390063641962636, |
|
"grad_norm": 0.08945506811141968, |
|
"learning_rate": 8.710916072408119e-05, |
|
"loss": 0.3496, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.3908848285772942, |
|
"grad_norm": 0.09083441644906998, |
|
"learning_rate": 8.708173340647286e-05, |
|
"loss": 0.3411, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.39170601519195236, |
|
"grad_norm": 0.08667677640914917, |
|
"learning_rate": 8.705430608886451e-05, |
|
"loss": 0.3289, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.39252720180661055, |
|
"grad_norm": 0.08793242275714874, |
|
"learning_rate": 8.702687877125617e-05, |
|
"loss": 0.3542, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.39334838842126874, |
|
"grad_norm": 0.08769497275352478, |
|
"learning_rate": 8.699945145364785e-05, |
|
"loss": 0.3207, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.3941695750359269, |
|
"grad_norm": 0.08303678780794144, |
|
"learning_rate": 8.69720241360395e-05, |
|
"loss": 0.343, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.3949907616505851, |
|
"grad_norm": 0.07943850755691528, |
|
"learning_rate": 8.694459681843116e-05, |
|
"loss": 0.3273, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.3958119482652433, |
|
"grad_norm": 0.08450903743505478, |
|
"learning_rate": 8.691716950082282e-05, |
|
"loss": 0.3895, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.3966331348799015, |
|
"grad_norm": 0.07759539037942886, |
|
"learning_rate": 8.688974218321448e-05, |
|
"loss": 0.3426, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.3974543214945596, |
|
"grad_norm": 0.09013447165489197, |
|
"learning_rate": 8.686231486560615e-05, |
|
"loss": 0.336, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.3982755081092178, |
|
"grad_norm": 0.08396545797586441, |
|
"learning_rate": 8.683488754799781e-05, |
|
"loss": 0.3647, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.399096694723876, |
|
"grad_norm": 0.08143208920955658, |
|
"learning_rate": 8.680746023038947e-05, |
|
"loss": 0.3239, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.3999178813385342, |
|
"grad_norm": 0.07628796994686127, |
|
"learning_rate": 8.678003291278114e-05, |
|
"loss": 0.3389, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.40073906795319236, |
|
"grad_norm": 0.08144954591989517, |
|
"learning_rate": 8.67526055951728e-05, |
|
"loss": 0.3258, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.40156025456785055, |
|
"grad_norm": 0.08240176737308502, |
|
"learning_rate": 8.672517827756446e-05, |
|
"loss": 0.3316, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.40238144118250874, |
|
"grad_norm": 0.09237710386514664, |
|
"learning_rate": 8.669775095995613e-05, |
|
"loss": 0.3435, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.4032026277971669, |
|
"grad_norm": 0.07773042470216751, |
|
"learning_rate": 8.667032364234778e-05, |
|
"loss": 0.3479, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.4040238144118251, |
|
"grad_norm": 0.08410782366991043, |
|
"learning_rate": 8.664289632473945e-05, |
|
"loss": 0.3792, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.40484500102648324, |
|
"grad_norm": 0.08251044154167175, |
|
"learning_rate": 8.661546900713112e-05, |
|
"loss": 0.3388, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.40566618764114143, |
|
"grad_norm": 0.08804575353860855, |
|
"learning_rate": 8.658804168952277e-05, |
|
"loss": 0.3388, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.4064873742557996, |
|
"grad_norm": 0.10321494936943054, |
|
"learning_rate": 8.656061437191442e-05, |
|
"loss": 0.3348, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.4073085608704578, |
|
"grad_norm": 0.07642900943756104, |
|
"learning_rate": 8.653318705430609e-05, |
|
"loss": 0.3155, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.408129747485116, |
|
"grad_norm": 0.08629249781370163, |
|
"learning_rate": 8.650575973669776e-05, |
|
"loss": 0.3303, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.4089509340997742, |
|
"grad_norm": 0.07933122664690018, |
|
"learning_rate": 8.647833241908941e-05, |
|
"loss": 0.3399, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.40977212071443236, |
|
"grad_norm": 0.0802338495850563, |
|
"learning_rate": 8.645090510148108e-05, |
|
"loss": 0.3623, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.41059330732909055, |
|
"grad_norm": 0.07566798478364944, |
|
"learning_rate": 8.642347778387274e-05, |
|
"loss": 0.3195, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.41141449394374874, |
|
"grad_norm": 0.0790770947933197, |
|
"learning_rate": 8.63960504662644e-05, |
|
"loss": 0.3478, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.4122356805584069, |
|
"grad_norm": 0.08219192922115326, |
|
"learning_rate": 8.636862314865607e-05, |
|
"loss": 0.4037, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.41305686717306506, |
|
"grad_norm": 0.09459681063890457, |
|
"learning_rate": 8.634119583104773e-05, |
|
"loss": 0.3604, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.41387805378772324, |
|
"grad_norm": 0.08201157301664352, |
|
"learning_rate": 8.631376851343938e-05, |
|
"loss": 0.3561, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.41469924040238143, |
|
"grad_norm": 0.08181356638669968, |
|
"learning_rate": 8.628634119583105e-05, |
|
"loss": 0.3359, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.4155204270170396, |
|
"grad_norm": 0.08242693543434143, |
|
"learning_rate": 8.625891387822272e-05, |
|
"loss": 0.3501, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.4163416136316978, |
|
"grad_norm": 0.08045560866594315, |
|
"learning_rate": 8.623148656061437e-05, |
|
"loss": 0.3188, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.417162800246356, |
|
"grad_norm": 0.08392325043678284, |
|
"learning_rate": 8.620405924300604e-05, |
|
"loss": 0.3607, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.4179839868610142, |
|
"grad_norm": 0.0920289009809494, |
|
"learning_rate": 8.61766319253977e-05, |
|
"loss": 0.3425, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.41880517347567237, |
|
"grad_norm": 0.08921059966087341, |
|
"learning_rate": 8.614920460778936e-05, |
|
"loss": 0.3386, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.41962636009033055, |
|
"grad_norm": 0.08129517734050751, |
|
"learning_rate": 8.612177729018103e-05, |
|
"loss": 0.3631, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.4204475467049887, |
|
"grad_norm": 0.19061192870140076, |
|
"learning_rate": 8.609434997257268e-05, |
|
"loss": 0.3285, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.42126873331964687, |
|
"grad_norm": 0.08446541428565979, |
|
"learning_rate": 8.606692265496434e-05, |
|
"loss": 0.4133, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.42208991993430506, |
|
"grad_norm": 0.07793641835451126, |
|
"learning_rate": 8.603949533735602e-05, |
|
"loss": 0.3166, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.42291110654896324, |
|
"grad_norm": 0.07862336188554764, |
|
"learning_rate": 8.601206801974767e-05, |
|
"loss": 0.3448, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.42373229316362143, |
|
"grad_norm": 0.07776626199483871, |
|
"learning_rate": 8.598464070213933e-05, |
|
"loss": 0.3717, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.4245534797782796, |
|
"grad_norm": 0.08171868324279785, |
|
"learning_rate": 8.5957213384531e-05, |
|
"loss": 0.3384, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.4253746663929378, |
|
"grad_norm": 0.08469153940677643, |
|
"learning_rate": 8.592978606692266e-05, |
|
"loss": 0.3238, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.426195853007596, |
|
"grad_norm": 0.0739326924085617, |
|
"learning_rate": 8.590235874931433e-05, |
|
"loss": 0.3033, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.4270170396222542, |
|
"grad_norm": 0.08084508031606674, |
|
"learning_rate": 8.587493143170599e-05, |
|
"loss": 0.3557, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.4278382262369123, |
|
"grad_norm": 0.079572893679142, |
|
"learning_rate": 8.584750411409764e-05, |
|
"loss": 0.3369, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.4286594128515705, |
|
"grad_norm": 0.09056146442890167, |
|
"learning_rate": 8.582007679648931e-05, |
|
"loss": 0.3362, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.4294805994662287, |
|
"grad_norm": 0.09055227041244507, |
|
"learning_rate": 8.579264947888098e-05, |
|
"loss": 0.3457, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.43030178608088687, |
|
"grad_norm": 0.11645710468292236, |
|
"learning_rate": 8.576522216127263e-05, |
|
"loss": 0.3378, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.43112297269554506, |
|
"grad_norm": 0.08169250190258026, |
|
"learning_rate": 8.57377948436643e-05, |
|
"loss": 0.3406, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.43194415931020325, |
|
"grad_norm": 0.08108235895633698, |
|
"learning_rate": 8.571036752605595e-05, |
|
"loss": 0.3253, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.43276534592486143, |
|
"grad_norm": 0.07971398532390594, |
|
"learning_rate": 8.568294020844762e-05, |
|
"loss": 0.3389, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.4335865325395196, |
|
"grad_norm": 0.08281555771827698, |
|
"learning_rate": 8.565551289083929e-05, |
|
"loss": 0.3289, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.4335865325395196, |
|
"eval_runtime": 493.1791, |
|
"eval_samples_per_second": 0.399, |
|
"eval_steps_per_second": 0.201, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.4344077191541778, |
|
"grad_norm": 0.08380738645792007, |
|
"learning_rate": 8.562808557323094e-05, |
|
"loss": 0.3445, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.435228905768836, |
|
"grad_norm": 0.07987751066684723, |
|
"learning_rate": 8.56006582556226e-05, |
|
"loss": 0.3149, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.4360500923834941, |
|
"grad_norm": 0.08102700114250183, |
|
"learning_rate": 8.557323093801427e-05, |
|
"loss": 0.3309, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.4368712789981523, |
|
"grad_norm": 0.08056002855300903, |
|
"learning_rate": 8.554580362040593e-05, |
|
"loss": 0.3579, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.4376924656128105, |
|
"grad_norm": 0.08129940181970596, |
|
"learning_rate": 8.551837630279759e-05, |
|
"loss": 0.3414, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.4385136522274687, |
|
"grad_norm": 0.07720111310482025, |
|
"learning_rate": 8.549094898518926e-05, |
|
"loss": 0.3066, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.4393348388421269, |
|
"grad_norm": 0.08578766137361526, |
|
"learning_rate": 8.546352166758091e-05, |
|
"loss": 0.4056, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.44015602545678506, |
|
"grad_norm": 0.14550408720970154, |
|
"learning_rate": 8.543609434997258e-05, |
|
"loss": 0.353, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.44097721207144325, |
|
"grad_norm": 0.07624532282352448, |
|
"learning_rate": 8.540866703236425e-05, |
|
"loss": 0.323, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.44179839868610143, |
|
"grad_norm": 0.07774993777275085, |
|
"learning_rate": 8.53812397147559e-05, |
|
"loss": 0.3244, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.4426195853007596, |
|
"grad_norm": 0.08061113953590393, |
|
"learning_rate": 8.535381239714755e-05, |
|
"loss": 0.3209, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.44344077191541775, |
|
"grad_norm": 0.07771355658769608, |
|
"learning_rate": 8.532638507953922e-05, |
|
"loss": 0.3604, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.44426195853007594, |
|
"grad_norm": 0.08160841464996338, |
|
"learning_rate": 8.529895776193089e-05, |
|
"loss": 0.3373, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.4450831451447341, |
|
"grad_norm": 0.0819459930062294, |
|
"learning_rate": 8.527153044432254e-05, |
|
"loss": 0.3097, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.4459043317593923, |
|
"grad_norm": 0.08429809659719467, |
|
"learning_rate": 8.524410312671421e-05, |
|
"loss": 0.3538, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.4467255183740505, |
|
"grad_norm": 0.08519409596920013, |
|
"learning_rate": 8.521667580910587e-05, |
|
"loss": 0.3486, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.4475467049887087, |
|
"grad_norm": 0.08900930732488632, |
|
"learning_rate": 8.518924849149753e-05, |
|
"loss": 0.3339, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.4483678916033669, |
|
"grad_norm": 0.08914072066545486, |
|
"learning_rate": 8.51618211738892e-05, |
|
"loss": 0.2933, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.44918907821802506, |
|
"grad_norm": 0.08086919039487839, |
|
"learning_rate": 8.513439385628086e-05, |
|
"loss": 0.3367, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.45001026483268325, |
|
"grad_norm": 0.08358173817396164, |
|
"learning_rate": 8.510696653867251e-05, |
|
"loss": 0.3443, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.45083145144734144, |
|
"grad_norm": 0.0807274803519249, |
|
"learning_rate": 8.507953922106419e-05, |
|
"loss": 0.3332, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.45165263806199957, |
|
"grad_norm": 0.08126116544008255, |
|
"learning_rate": 8.505211190345585e-05, |
|
"loss": 0.3485, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.45247382467665775, |
|
"grad_norm": 0.08835196495056152, |
|
"learning_rate": 8.50246845858475e-05, |
|
"loss": 0.3739, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.45329501129131594, |
|
"grad_norm": 0.07614720612764359, |
|
"learning_rate": 8.499725726823917e-05, |
|
"loss": 0.3464, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.45411619790597413, |
|
"grad_norm": 0.07444674521684647, |
|
"learning_rate": 8.496982995063084e-05, |
|
"loss": 0.3553, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.4549373845206323, |
|
"grad_norm": 0.0741548240184784, |
|
"learning_rate": 8.49424026330225e-05, |
|
"loss": 0.341, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.4557585711352905, |
|
"grad_norm": 0.0820796862244606, |
|
"learning_rate": 8.491497531541416e-05, |
|
"loss": 0.3359, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.4565797577499487, |
|
"grad_norm": 0.08371058106422424, |
|
"learning_rate": 8.488754799780581e-05, |
|
"loss": 0.3339, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.4574009443646069, |
|
"grad_norm": 0.08354955166578293, |
|
"learning_rate": 8.486012068019748e-05, |
|
"loss": 0.3619, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.45822213097926506, |
|
"grad_norm": 0.08976832777261734, |
|
"learning_rate": 8.483269336258915e-05, |
|
"loss": 0.353, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.4590433175939232, |
|
"grad_norm": 0.07933076471090317, |
|
"learning_rate": 8.48052660449808e-05, |
|
"loss": 0.3764, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.4598645042085814, |
|
"grad_norm": 0.08624580502510071, |
|
"learning_rate": 8.477783872737247e-05, |
|
"loss": 0.3372, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.46068569082323957, |
|
"grad_norm": 0.08301227539777756, |
|
"learning_rate": 8.475041140976413e-05, |
|
"loss": 0.3748, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.46150687743789776, |
|
"grad_norm": 0.07408706098794937, |
|
"learning_rate": 8.47229840921558e-05, |
|
"loss": 0.2622, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.46232806405255594, |
|
"grad_norm": 0.07799118012189865, |
|
"learning_rate": 8.469555677454746e-05, |
|
"loss": 0.3614, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.46314925066721413, |
|
"grad_norm": 0.08185340464115143, |
|
"learning_rate": 8.466812945693912e-05, |
|
"loss": 0.3423, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.4639704372818723, |
|
"grad_norm": 0.0817733034491539, |
|
"learning_rate": 8.464070213933077e-05, |
|
"loss": 0.3519, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.4647916238965305, |
|
"grad_norm": 0.08026120066642761, |
|
"learning_rate": 8.461327482172244e-05, |
|
"loss": 0.3372, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.4656128105111887, |
|
"grad_norm": 0.07413862645626068, |
|
"learning_rate": 8.45858475041141e-05, |
|
"loss": 0.3017, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.4664339971258468, |
|
"grad_norm": 0.09256873279809952, |
|
"learning_rate": 8.455842018650576e-05, |
|
"loss": 0.3495, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.467255183740505, |
|
"grad_norm": 0.08319979161024094, |
|
"learning_rate": 8.453099286889743e-05, |
|
"loss": 0.3339, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.4680763703551632, |
|
"grad_norm": 0.08824891597032547, |
|
"learning_rate": 8.450356555128908e-05, |
|
"loss": 0.3272, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.4688975569698214, |
|
"grad_norm": 0.0790686085820198, |
|
"learning_rate": 8.447613823368075e-05, |
|
"loss": 0.3446, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.46971874358447957, |
|
"grad_norm": 0.08730736374855042, |
|
"learning_rate": 8.444871091607242e-05, |
|
"loss": 0.3731, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.47053993019913776, |
|
"grad_norm": 0.08023872971534729, |
|
"learning_rate": 8.442128359846407e-05, |
|
"loss": 0.3226, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.47136111681379594, |
|
"grad_norm": 0.09242340922355652, |
|
"learning_rate": 8.439385628085573e-05, |
|
"loss": 0.3229, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.47218230342845413, |
|
"grad_norm": 0.07871225476264954, |
|
"learning_rate": 8.43664289632474e-05, |
|
"loss": 0.3112, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.4730034900431123, |
|
"grad_norm": 0.08368773013353348, |
|
"learning_rate": 8.433900164563906e-05, |
|
"loss": 0.3357, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.4738246766577705, |
|
"grad_norm": 0.07561603933572769, |
|
"learning_rate": 8.431157432803072e-05, |
|
"loss": 0.3125, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.47464586327242864, |
|
"grad_norm": 0.08441564440727234, |
|
"learning_rate": 8.428414701042238e-05, |
|
"loss": 0.3211, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.4754670498870868, |
|
"grad_norm": 0.09772761911153793, |
|
"learning_rate": 8.425671969281404e-05, |
|
"loss": 0.3285, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.476288236501745, |
|
"grad_norm": 0.07900305837392807, |
|
"learning_rate": 8.422929237520571e-05, |
|
"loss": 0.344, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.4771094231164032, |
|
"grad_norm": 0.13551943004131317, |
|
"learning_rate": 8.420186505759737e-05, |
|
"loss": 0.3515, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.4779306097310614, |
|
"grad_norm": 0.07592803239822388, |
|
"learning_rate": 8.417443773998903e-05, |
|
"loss": 0.3377, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.47875179634571957, |
|
"grad_norm": 0.08301271498203278, |
|
"learning_rate": 8.414701042238068e-05, |
|
"loss": 0.365, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.47957298296037776, |
|
"grad_norm": 0.11750177294015884, |
|
"learning_rate": 8.411958310477237e-05, |
|
"loss": 0.3159, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.48039416957503595, |
|
"grad_norm": 0.08506017178297043, |
|
"learning_rate": 8.409215578716402e-05, |
|
"loss": 0.3381, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.48121535618969413, |
|
"grad_norm": 0.0827595517039299, |
|
"learning_rate": 8.406472846955569e-05, |
|
"loss": 0.3497, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.48203654280435226, |
|
"grad_norm": 0.07703883200883865, |
|
"learning_rate": 8.403730115194734e-05, |
|
"loss": 0.2989, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.48285772941901045, |
|
"grad_norm": 0.07976412773132324, |
|
"learning_rate": 8.400987383433901e-05, |
|
"loss": 0.3617, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.48367891603366864, |
|
"grad_norm": 0.09424680471420288, |
|
"learning_rate": 8.398244651673068e-05, |
|
"loss": 0.3623, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.4845001026483268, |
|
"grad_norm": 0.08831481635570526, |
|
"learning_rate": 8.395501919912233e-05, |
|
"loss": 0.3526, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.485321289262985, |
|
"grad_norm": 0.07811284810304642, |
|
"learning_rate": 8.392759188151399e-05, |
|
"loss": 0.3721, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.4861424758776432, |
|
"grad_norm": 0.08139461278915405, |
|
"learning_rate": 8.390016456390565e-05, |
|
"loss": 0.33, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.4869636624923014, |
|
"grad_norm": 0.0807470753788948, |
|
"learning_rate": 8.387273724629732e-05, |
|
"loss": 0.372, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.4877848491069596, |
|
"grad_norm": 0.08856850117444992, |
|
"learning_rate": 8.384530992868898e-05, |
|
"loss": 0.3288, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.48860603572161776, |
|
"grad_norm": 0.08670219779014587, |
|
"learning_rate": 8.381788261108064e-05, |
|
"loss": 0.4113, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.4894272223362759, |
|
"grad_norm": 0.0814259722828865, |
|
"learning_rate": 8.37904552934723e-05, |
|
"loss": 0.3735, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.4902484089509341, |
|
"grad_norm": 0.08457124978303909, |
|
"learning_rate": 8.376302797586397e-05, |
|
"loss": 0.3516, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.49106959556559227, |
|
"grad_norm": 0.0774846151471138, |
|
"learning_rate": 8.373560065825563e-05, |
|
"loss": 0.3604, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.49189078218025045, |
|
"grad_norm": 0.07951226830482483, |
|
"learning_rate": 8.370817334064729e-05, |
|
"loss": 0.3016, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.49271196879490864, |
|
"grad_norm": 0.08235177397727966, |
|
"learning_rate": 8.368074602303894e-05, |
|
"loss": 0.3502, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.4935331554095668, |
|
"grad_norm": 0.08112053573131561, |
|
"learning_rate": 8.365331870543061e-05, |
|
"loss": 0.3122, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.494354342024225, |
|
"grad_norm": 0.07517421990633011, |
|
"learning_rate": 8.362589138782228e-05, |
|
"loss": 0.3482, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.4951755286388832, |
|
"grad_norm": 0.08243374526500702, |
|
"learning_rate": 8.359846407021393e-05, |
|
"loss": 0.3455, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.4959967152535414, |
|
"grad_norm": 0.08028637617826462, |
|
"learning_rate": 8.35710367526056e-05, |
|
"loss": 0.3454, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.4968179018681996, |
|
"grad_norm": 0.08006370067596436, |
|
"learning_rate": 8.354360943499726e-05, |
|
"loss": 0.3216, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.4976390884828577, |
|
"grad_norm": 0.1940547525882721, |
|
"learning_rate": 8.351618211738892e-05, |
|
"loss": 0.3017, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.4984602750975159, |
|
"grad_norm": 0.07875222712755203, |
|
"learning_rate": 8.348875479978059e-05, |
|
"loss": 0.3295, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.4992814617121741, |
|
"grad_norm": 0.07511032372713089, |
|
"learning_rate": 8.346132748217225e-05, |
|
"loss": 0.328, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.5001026483268323, |
|
"grad_norm": 0.08197803795337677, |
|
"learning_rate": 8.34339001645639e-05, |
|
"loss": 0.3232, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.5009238349414905, |
|
"grad_norm": 0.08152435719966888, |
|
"learning_rate": 8.340647284695558e-05, |
|
"loss": 0.3546, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.5017450215561486, |
|
"grad_norm": 0.0796707347035408, |
|
"learning_rate": 8.337904552934724e-05, |
|
"loss": 0.3518, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.5025662081708068, |
|
"grad_norm": 0.07969588786363602, |
|
"learning_rate": 8.335161821173889e-05, |
|
"loss": 0.3195, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.503387394785465, |
|
"grad_norm": 0.08270443230867386, |
|
"learning_rate": 8.332419089413056e-05, |
|
"loss": 0.3244, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.5042085814001231, |
|
"grad_norm": 0.08303502202033997, |
|
"learning_rate": 8.329676357652221e-05, |
|
"loss": 0.3386, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.5050297680147814, |
|
"grad_norm": 0.07783270627260208, |
|
"learning_rate": 8.326933625891388e-05, |
|
"loss": 0.3046, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.5058509546294395, |
|
"grad_norm": 0.08112157136201859, |
|
"learning_rate": 8.324190894130555e-05, |
|
"loss": 0.3244, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.5058509546294395, |
|
"eval_runtime": 494.4481, |
|
"eval_samples_per_second": 0.398, |
|
"eval_steps_per_second": 0.2, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.5066721412440978, |
|
"grad_norm": 0.08969878405332565, |
|
"learning_rate": 8.32144816236972e-05, |
|
"loss": 0.3365, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.5074933278587559, |
|
"grad_norm": 0.08142217993736267, |
|
"learning_rate": 8.318705430608886e-05, |
|
"loss": 0.3413, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.5083145144734141, |
|
"grad_norm": 0.08319269865751266, |
|
"learning_rate": 8.315962698848054e-05, |
|
"loss": 0.3458, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.5091357010880723, |
|
"grad_norm": 0.08104688674211502, |
|
"learning_rate": 8.313219967087219e-05, |
|
"loss": 0.3286, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.5099568877027304, |
|
"grad_norm": 0.08447278290987015, |
|
"learning_rate": 8.310477235326386e-05, |
|
"loss": 0.3485, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.5107780743173886, |
|
"grad_norm": 0.08879225701093674, |
|
"learning_rate": 8.307734503565551e-05, |
|
"loss": 0.2986, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.5115992609320468, |
|
"grad_norm": 0.07878706604242325, |
|
"learning_rate": 8.304991771804718e-05, |
|
"loss": 0.3301, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.512420447546705, |
|
"grad_norm": 0.08089771121740341, |
|
"learning_rate": 8.302249040043885e-05, |
|
"loss": 0.3183, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.5132416341613631, |
|
"grad_norm": 0.08130493015050888, |
|
"learning_rate": 8.29950630828305e-05, |
|
"loss": 0.3559, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.5140628207760214, |
|
"grad_norm": 0.0880448967218399, |
|
"learning_rate": 8.296763576522216e-05, |
|
"loss": 0.3567, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.5148840073906795, |
|
"grad_norm": 0.087735615670681, |
|
"learning_rate": 8.294020844761383e-05, |
|
"loss": 0.3123, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.5157051940053378, |
|
"grad_norm": 0.08659686893224716, |
|
"learning_rate": 8.29127811300055e-05, |
|
"loss": 0.3212, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.5165263806199959, |
|
"grad_norm": 0.0853281244635582, |
|
"learning_rate": 8.288535381239715e-05, |
|
"loss": 0.3337, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.517347567234654, |
|
"grad_norm": 0.07861575484275818, |
|
"learning_rate": 8.285792649478882e-05, |
|
"loss": 0.3666, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.5181687538493123, |
|
"grad_norm": 0.08911813795566559, |
|
"learning_rate": 8.283049917718047e-05, |
|
"loss": 0.349, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.5189899404639704, |
|
"grad_norm": 0.08234114944934845, |
|
"learning_rate": 8.280307185957214e-05, |
|
"loss": 0.3354, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.5198111270786286, |
|
"grad_norm": 0.08694402128458023, |
|
"learning_rate": 8.277564454196381e-05, |
|
"loss": 0.3321, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.5206323136932868, |
|
"grad_norm": 0.08186987787485123, |
|
"learning_rate": 8.274821722435546e-05, |
|
"loss": 0.3595, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.521453500307945, |
|
"grad_norm": 0.08425264060497284, |
|
"learning_rate": 8.272078990674712e-05, |
|
"loss": 0.3641, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.5222746869226031, |
|
"grad_norm": 0.1728411465883255, |
|
"learning_rate": 8.269336258913878e-05, |
|
"loss": 0.3646, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.5230958735372614, |
|
"grad_norm": 0.09049440920352936, |
|
"learning_rate": 8.266593527153045e-05, |
|
"loss": 0.3368, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.5239170601519195, |
|
"grad_norm": 0.081216000020504, |
|
"learning_rate": 8.26385079539221e-05, |
|
"loss": 0.3814, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.5247382467665777, |
|
"grad_norm": 0.08497685194015503, |
|
"learning_rate": 8.261108063631377e-05, |
|
"loss": 0.3315, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.5255594333812359, |
|
"grad_norm": 0.08426640927791595, |
|
"learning_rate": 8.258365331870543e-05, |
|
"loss": 0.3366, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.526380619995894, |
|
"grad_norm": 0.08354829251766205, |
|
"learning_rate": 8.25562260010971e-05, |
|
"loss": 0.3439, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.5272018066105523, |
|
"grad_norm": 0.08015681803226471, |
|
"learning_rate": 8.252879868348876e-05, |
|
"loss": 0.3401, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.5280229932252104, |
|
"grad_norm": 0.08374017477035522, |
|
"learning_rate": 8.250137136588042e-05, |
|
"loss": 0.3533, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.5288441798398686, |
|
"grad_norm": 0.09182158857584, |
|
"learning_rate": 8.247394404827207e-05, |
|
"loss": 0.2983, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.5296653664545268, |
|
"grad_norm": 0.08268212527036667, |
|
"learning_rate": 8.244651673066375e-05, |
|
"loss": 0.331, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.530486553069185, |
|
"grad_norm": 0.08059857040643692, |
|
"learning_rate": 8.241908941305541e-05, |
|
"loss": 0.3448, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.5313077396838431, |
|
"grad_norm": 0.08811867982149124, |
|
"learning_rate": 8.239166209544706e-05, |
|
"loss": 0.3678, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.5321289262985013, |
|
"grad_norm": 0.07894317060709, |
|
"learning_rate": 8.236423477783873e-05, |
|
"loss": 0.3456, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.5329501129131595, |
|
"grad_norm": 0.07806131988763809, |
|
"learning_rate": 8.23368074602304e-05, |
|
"loss": 0.3286, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.5337712995278177, |
|
"grad_norm": 0.08501176536083221, |
|
"learning_rate": 8.230938014262205e-05, |
|
"loss": 0.3383, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.5345924861424759, |
|
"grad_norm": 0.12988948822021484, |
|
"learning_rate": 8.228195282501372e-05, |
|
"loss": 0.3384, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.535413672757134, |
|
"grad_norm": 0.08005869388580322, |
|
"learning_rate": 8.225452550740537e-05, |
|
"loss": 0.323, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.5362348593717923, |
|
"grad_norm": 0.07902330160140991, |
|
"learning_rate": 8.222709818979704e-05, |
|
"loss": 0.3248, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.5370560459864504, |
|
"grad_norm": 0.08319025486707687, |
|
"learning_rate": 8.219967087218871e-05, |
|
"loss": 0.3145, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.5378772326011086, |
|
"grad_norm": 0.08141138404607773, |
|
"learning_rate": 8.217224355458037e-05, |
|
"loss": 0.3105, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.5386984192157668, |
|
"grad_norm": 0.07592695206403732, |
|
"learning_rate": 8.214481623697203e-05, |
|
"loss": 0.3184, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.5395196058304249, |
|
"grad_norm": 0.16522720456123352, |
|
"learning_rate": 8.211738891936369e-05, |
|
"loss": 0.3492, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.5403407924450832, |
|
"grad_norm": 0.0965307429432869, |
|
"learning_rate": 8.208996160175536e-05, |
|
"loss": 0.3012, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.5411619790597413, |
|
"grad_norm": 0.08132067322731018, |
|
"learning_rate": 8.206253428414702e-05, |
|
"loss": 0.3256, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.5419831656743995, |
|
"grad_norm": 0.08183111250400543, |
|
"learning_rate": 8.203510696653868e-05, |
|
"loss": 0.3591, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.5428043522890577, |
|
"grad_norm": 0.07674401253461838, |
|
"learning_rate": 8.200767964893033e-05, |
|
"loss": 0.3273, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.5436255389037159, |
|
"grad_norm": 0.07937440276145935, |
|
"learning_rate": 8.1980252331322e-05, |
|
"loss": 0.3028, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.544446725518374, |
|
"grad_norm": 0.07886291295289993, |
|
"learning_rate": 8.195282501371367e-05, |
|
"loss": 0.3452, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.5452679121330323, |
|
"grad_norm": 0.078894704580307, |
|
"learning_rate": 8.192539769610532e-05, |
|
"loss": 0.3447, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.5460890987476904, |
|
"grad_norm": 0.0840119943022728, |
|
"learning_rate": 8.189797037849699e-05, |
|
"loss": 0.3348, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.5469102853623486, |
|
"grad_norm": 0.08318603038787842, |
|
"learning_rate": 8.187054306088864e-05, |
|
"loss": 0.3523, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.5477314719770068, |
|
"grad_norm": 0.08060989528894424, |
|
"learning_rate": 8.184311574328031e-05, |
|
"loss": 0.3211, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.5485526585916649, |
|
"grad_norm": 0.07878737151622772, |
|
"learning_rate": 8.181568842567198e-05, |
|
"loss": 0.3865, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.5493738452063232, |
|
"grad_norm": 0.08277317881584167, |
|
"learning_rate": 8.178826110806363e-05, |
|
"loss": 0.3507, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.5501950318209813, |
|
"grad_norm": 0.15883035957813263, |
|
"learning_rate": 8.176083379045529e-05, |
|
"loss": 0.3428, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.5510162184356395, |
|
"grad_norm": 0.08117318898439407, |
|
"learning_rate": 8.173340647284696e-05, |
|
"loss": 0.3297, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.5518374050502977, |
|
"grad_norm": 0.07886844873428345, |
|
"learning_rate": 8.170597915523862e-05, |
|
"loss": 0.3489, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.5526585916649559, |
|
"grad_norm": 0.0774691253900528, |
|
"learning_rate": 8.167855183763028e-05, |
|
"loss": 0.3438, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.553479778279614, |
|
"grad_norm": 0.07944227755069733, |
|
"learning_rate": 8.165112452002195e-05, |
|
"loss": 0.3183, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.5543009648942723, |
|
"grad_norm": 0.08308850973844528, |
|
"learning_rate": 8.16236972024136e-05, |
|
"loss": 0.3268, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.5551221515089304, |
|
"grad_norm": 0.07913679629564285, |
|
"learning_rate": 8.159626988480527e-05, |
|
"loss": 0.3358, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.5559433381235885, |
|
"grad_norm": 0.08472824096679688, |
|
"learning_rate": 8.156884256719694e-05, |
|
"loss": 0.3619, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.5567645247382468, |
|
"grad_norm": 0.07870602607727051, |
|
"learning_rate": 8.154141524958859e-05, |
|
"loss": 0.3372, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.5575857113529049, |
|
"grad_norm": 0.07889708876609802, |
|
"learning_rate": 8.151398793198025e-05, |
|
"loss": 0.34, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.5584068979675632, |
|
"grad_norm": 0.0856415405869484, |
|
"learning_rate": 8.148656061437193e-05, |
|
"loss": 0.3132, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.5592280845822213, |
|
"grad_norm": 0.0840810164809227, |
|
"learning_rate": 8.145913329676358e-05, |
|
"loss": 0.3451, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.5600492711968795, |
|
"grad_norm": 0.07759146392345428, |
|
"learning_rate": 8.143170597915524e-05, |
|
"loss": 0.3452, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.5608704578115377, |
|
"grad_norm": 0.08038268238306046, |
|
"learning_rate": 8.14042786615469e-05, |
|
"loss": 0.3103, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.5616916444261959, |
|
"grad_norm": 0.07695576548576355, |
|
"learning_rate": 8.137685134393857e-05, |
|
"loss": 0.342, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.562512831040854, |
|
"grad_norm": 0.08458397537469864, |
|
"learning_rate": 8.134942402633024e-05, |
|
"loss": 0.344, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.5633340176555122, |
|
"grad_norm": 0.08124341070652008, |
|
"learning_rate": 8.13219967087219e-05, |
|
"loss": 0.3235, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.5641552042701704, |
|
"grad_norm": 0.09097409248352051, |
|
"learning_rate": 8.129456939111355e-05, |
|
"loss": 0.3142, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.5649763908848285, |
|
"grad_norm": 0.07753387093544006, |
|
"learning_rate": 8.126714207350522e-05, |
|
"loss": 0.3295, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.5657975774994868, |
|
"grad_norm": 0.07960804551839828, |
|
"learning_rate": 8.123971475589688e-05, |
|
"loss": 0.3156, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.5666187641141449, |
|
"grad_norm": 0.0779690146446228, |
|
"learning_rate": 8.121228743828854e-05, |
|
"loss": 0.3224, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.5674399507288032, |
|
"grad_norm": 0.07964863628149033, |
|
"learning_rate": 8.11848601206802e-05, |
|
"loss": 0.3846, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.5682611373434613, |
|
"grad_norm": 0.08143079280853271, |
|
"learning_rate": 8.115743280307186e-05, |
|
"loss": 0.3268, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.5690823239581195, |
|
"grad_norm": 0.08909327536821365, |
|
"learning_rate": 8.113000548546353e-05, |
|
"loss": 0.3354, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.5699035105727777, |
|
"grad_norm": 0.07217192649841309, |
|
"learning_rate": 8.11025781678552e-05, |
|
"loss": 0.3077, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.5707246971874358, |
|
"grad_norm": 0.07756021618843079, |
|
"learning_rate": 8.107515085024685e-05, |
|
"loss": 0.3244, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.571545883802094, |
|
"grad_norm": 0.08227197080850601, |
|
"learning_rate": 8.10477235326385e-05, |
|
"loss": 0.34, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.5723670704167522, |
|
"grad_norm": 0.07765581458806992, |
|
"learning_rate": 8.102029621503017e-05, |
|
"loss": 0.2898, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.5731882570314104, |
|
"grad_norm": 0.07710105925798416, |
|
"learning_rate": 8.099286889742184e-05, |
|
"loss": 0.3056, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.5740094436460685, |
|
"grad_norm": 0.07832709699869156, |
|
"learning_rate": 8.09654415798135e-05, |
|
"loss": 0.3338, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.5748306302607268, |
|
"grad_norm": 0.08406300097703934, |
|
"learning_rate": 8.093801426220516e-05, |
|
"loss": 0.3483, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.5756518168753849, |
|
"grad_norm": 0.07969654351472855, |
|
"learning_rate": 8.091058694459682e-05, |
|
"loss": 0.3645, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.5764730034900432, |
|
"grad_norm": 0.07744769752025604, |
|
"learning_rate": 8.088315962698848e-05, |
|
"loss": 0.3324, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.5772941901047013, |
|
"grad_norm": 0.07306936383247375, |
|
"learning_rate": 8.085573230938015e-05, |
|
"loss": 0.3011, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.5781153767193594, |
|
"grad_norm": 0.09406828135251999, |
|
"learning_rate": 8.082830499177181e-05, |
|
"loss": 0.3518, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.5781153767193594, |
|
"eval_runtime": 494.5472, |
|
"eval_samples_per_second": 0.398, |
|
"eval_steps_per_second": 0.2, |
|
"step": 704 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 3651, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 88, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.1068114755114435e+19, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|