|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.2349409384585264, |
|
"eval_steps": 500, |
|
"global_step": 3600, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0006526137179403511, |
|
"grad_norm": 17.690582114691438, |
|
"learning_rate": 1.948051948051948e-06, |
|
"loss": 1.3559, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0013052274358807021, |
|
"grad_norm": 7.768088366444893, |
|
"learning_rate": 3.896103896103896e-06, |
|
"loss": 1.2706, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.001957841153821053, |
|
"grad_norm": 7.705313536090087, |
|
"learning_rate": 5.844155844155845e-06, |
|
"loss": 1.3781, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0026104548717614043, |
|
"grad_norm": 34.39078827766783, |
|
"learning_rate": 7.792207792207792e-06, |
|
"loss": 1.2749, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0032630685897017554, |
|
"grad_norm": 68.28824334896528, |
|
"learning_rate": 9.74025974025974e-06, |
|
"loss": 1.2955, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.003915682307642106, |
|
"grad_norm": 14.220322607917241, |
|
"learning_rate": 1.168831168831169e-05, |
|
"loss": 1.2315, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0045682960255824575, |
|
"grad_norm": 12.611848231734811, |
|
"learning_rate": 1.3636363636363637e-05, |
|
"loss": 1.0953, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.0052209097435228086, |
|
"grad_norm": 6.055664298727015, |
|
"learning_rate": 1.5584415584415583e-05, |
|
"loss": 1.105, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.00587352346146316, |
|
"grad_norm": 3.52269227801977, |
|
"learning_rate": 1.753246753246753e-05, |
|
"loss": 0.9563, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.006526137179403511, |
|
"grad_norm": 10.771884023354394, |
|
"learning_rate": 1.948051948051948e-05, |
|
"loss": 0.9523, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.007178750897343862, |
|
"grad_norm": 33.41476483216757, |
|
"learning_rate": 2.1428571428571428e-05, |
|
"loss": 0.832, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.007831364615284213, |
|
"grad_norm": 31.120240364617406, |
|
"learning_rate": 2.337662337662338e-05, |
|
"loss": 0.8376, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.008483978333224564, |
|
"grad_norm": 5.517231564060886, |
|
"learning_rate": 2.5324675324675325e-05, |
|
"loss": 0.8293, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.009136592051164915, |
|
"grad_norm": 4.311605388342058, |
|
"learning_rate": 2.7272727272727273e-05, |
|
"loss": 0.8295, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.009789205769105266, |
|
"grad_norm": 6.997724163121519, |
|
"learning_rate": 2.922077922077922e-05, |
|
"loss": 0.7662, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.010441819487045617, |
|
"grad_norm": 6.517836234400708, |
|
"learning_rate": 2.999998841890695e-05, |
|
"loss": 0.8158, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.011094433204985968, |
|
"grad_norm": 4.186989141019666, |
|
"learning_rate": 2.99999176456253e-05, |
|
"loss": 0.8037, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.01174704692292632, |
|
"grad_norm": 5.181546943355458, |
|
"learning_rate": 2.9999782533305785e-05, |
|
"loss": 0.7274, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.01239966064086667, |
|
"grad_norm": 3.767076521211455, |
|
"learning_rate": 2.9999583082527935e-05, |
|
"loss": 0.7474, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.013052274358807021, |
|
"grad_norm": 18.84416377940188, |
|
"learning_rate": 2.999931929414726e-05, |
|
"loss": 0.7708, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.013704888076747372, |
|
"grad_norm": 3.169160630444992, |
|
"learning_rate": 2.999899116929522e-05, |
|
"loss": 0.8279, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.014357501794687724, |
|
"grad_norm": 1.912782077307437, |
|
"learning_rate": 2.999859870937924e-05, |
|
"loss": 0.7407, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.015010115512628075, |
|
"grad_norm": 3.3906505952914974, |
|
"learning_rate": 2.9998141916082696e-05, |
|
"loss": 0.7732, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.015662729230568426, |
|
"grad_norm": 2.7144492322383584, |
|
"learning_rate": 2.999762079136491e-05, |
|
"loss": 0.7272, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.01631534294850878, |
|
"grad_norm": 7.109330196029837, |
|
"learning_rate": 2.9997035337461135e-05, |
|
"loss": 0.7748, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.016967956666449128, |
|
"grad_norm": 1.6054280593801813, |
|
"learning_rate": 2.9996385556882555e-05, |
|
"loss": 0.7676, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.01762057038438948, |
|
"grad_norm": 10.883212441614672, |
|
"learning_rate": 2.9995671452416274e-05, |
|
"loss": 0.735, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.01827318410232983, |
|
"grad_norm": 3.511064886507805, |
|
"learning_rate": 2.999489302712529e-05, |
|
"loss": 0.7741, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.018925797820270183, |
|
"grad_norm": 3.618603818375307, |
|
"learning_rate": 2.9994050284348497e-05, |
|
"loss": 0.749, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.019578411538210532, |
|
"grad_norm": 6.012944880342178, |
|
"learning_rate": 2.9993143227700668e-05, |
|
"loss": 0.7411, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.020231025256150885, |
|
"grad_norm": 2.348670372295822, |
|
"learning_rate": 2.9992171861072428e-05, |
|
"loss": 0.7394, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.020883638974091234, |
|
"grad_norm": 4.728309497649916, |
|
"learning_rate": 2.9991136188630263e-05, |
|
"loss": 0.8077, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.021536252692031587, |
|
"grad_norm": 15.611917863290122, |
|
"learning_rate": 2.9990036214816467e-05, |
|
"loss": 0.7209, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.022188866409971936, |
|
"grad_norm": 3.7315277354070817, |
|
"learning_rate": 2.998887194434916e-05, |
|
"loss": 0.7101, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.02284148012791229, |
|
"grad_norm": 6.618759094750745, |
|
"learning_rate": 2.998764338222222e-05, |
|
"loss": 0.7759, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.02349409384585264, |
|
"grad_norm": 6.770044306239603, |
|
"learning_rate": 2.998635053370533e-05, |
|
"loss": 0.7398, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.02414670756379299, |
|
"grad_norm": 12.471224202357552, |
|
"learning_rate": 2.998499340434389e-05, |
|
"loss": 0.7046, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.02479932128173334, |
|
"grad_norm": 4.147359416986547, |
|
"learning_rate": 2.9983571999959013e-05, |
|
"loss": 0.761, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.025451934999673693, |
|
"grad_norm": 34.84722866603778, |
|
"learning_rate": 2.9982086326647533e-05, |
|
"loss": 0.757, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.026104548717614043, |
|
"grad_norm": 5.245498180313093, |
|
"learning_rate": 2.998053639078193e-05, |
|
"loss": 0.7536, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.026757162435554396, |
|
"grad_norm": 36.55990241841121, |
|
"learning_rate": 2.997892219901034e-05, |
|
"loss": 0.7395, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.027409776153494745, |
|
"grad_norm": 5.03198653806696, |
|
"learning_rate": 2.9977243758256494e-05, |
|
"loss": 0.7208, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.028062389871435098, |
|
"grad_norm": 11.376914733036081, |
|
"learning_rate": 2.997550107571972e-05, |
|
"loss": 0.719, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.028715003589375447, |
|
"grad_norm": 2.958119684662306, |
|
"learning_rate": 2.9973694158874898e-05, |
|
"loss": 0.7271, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.0293676173073158, |
|
"grad_norm": 6.037096737490817, |
|
"learning_rate": 2.9971823015472418e-05, |
|
"loss": 0.7356, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.03002023102525615, |
|
"grad_norm": 5.3042973640363575, |
|
"learning_rate": 2.9969887653538164e-05, |
|
"loss": 0.7207, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.030672844743196502, |
|
"grad_norm": 2.4985603001745624, |
|
"learning_rate": 2.996788808137347e-05, |
|
"loss": 0.7769, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.03132545846113685, |
|
"grad_norm": 7.607065841315647, |
|
"learning_rate": 2.9965824307555084e-05, |
|
"loss": 0.7091, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.03197807217907721, |
|
"grad_norm": 4.322533035107957, |
|
"learning_rate": 2.9963696340935144e-05, |
|
"loss": 0.7114, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.03263068589701756, |
|
"grad_norm": 5.878565903250334, |
|
"learning_rate": 2.9961504190641108e-05, |
|
"loss": 0.7284, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.033283299614957906, |
|
"grad_norm": 5.0026507027119855, |
|
"learning_rate": 2.9959247866075764e-05, |
|
"loss": 0.6992, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.033935913332898256, |
|
"grad_norm": 7.12632150273901, |
|
"learning_rate": 2.9956927376917137e-05, |
|
"loss": 0.7285, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.03458852705083861, |
|
"grad_norm": 5.211123255860348, |
|
"learning_rate": 2.9954542733118496e-05, |
|
"loss": 0.7511, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.03524114076877896, |
|
"grad_norm": 9.925273547498618, |
|
"learning_rate": 2.995209394490827e-05, |
|
"loss": 0.7699, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.03589375448671931, |
|
"grad_norm": 7.418381681996765, |
|
"learning_rate": 2.9949581022790025e-05, |
|
"loss": 0.759, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.03654636820465966, |
|
"grad_norm": 4.352380973507467, |
|
"learning_rate": 2.9947003977542423e-05, |
|
"loss": 0.7537, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.037198981922600016, |
|
"grad_norm": 9.712842120769198, |
|
"learning_rate": 2.9944362820219167e-05, |
|
"loss": 0.7063, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.037851595640540366, |
|
"grad_norm": 5.757600819230482, |
|
"learning_rate": 2.994165756214895e-05, |
|
"loss": 0.7893, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.038504209358480715, |
|
"grad_norm": 5.529209601152462, |
|
"learning_rate": 2.9938888214935426e-05, |
|
"loss": 0.6771, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.039156823076421064, |
|
"grad_norm": 10.550479346499758, |
|
"learning_rate": 2.9936054790457127e-05, |
|
"loss": 0.737, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.03980943679436142, |
|
"grad_norm": 8.284279553451016, |
|
"learning_rate": 2.9933157300867437e-05, |
|
"loss": 0.7182, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.04046205051230177, |
|
"grad_norm": 8.18511648646326, |
|
"learning_rate": 2.9930195758594542e-05, |
|
"loss": 0.6901, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.04111466423024212, |
|
"grad_norm": 14.569754827631956, |
|
"learning_rate": 2.9927170176341365e-05, |
|
"loss": 0.7008, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.04176727794818247, |
|
"grad_norm": 4.214581273685441, |
|
"learning_rate": 2.992408056708551e-05, |
|
"loss": 0.7489, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.042419891666122825, |
|
"grad_norm": 10.038596627079452, |
|
"learning_rate": 2.9920926944079224e-05, |
|
"loss": 0.7649, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.043072505384063174, |
|
"grad_norm": 2.386544029221306, |
|
"learning_rate": 2.9917709320849305e-05, |
|
"loss": 0.7223, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.043725119102003523, |
|
"grad_norm": 8.286359254511249, |
|
"learning_rate": 2.9914427711197096e-05, |
|
"loss": 0.7089, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.04437773281994387, |
|
"grad_norm": 4.235819327444911, |
|
"learning_rate": 2.9911082129198372e-05, |
|
"loss": 0.7138, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.04503034653788423, |
|
"grad_norm": 5.187338033698449, |
|
"learning_rate": 2.9907672589203316e-05, |
|
"loss": 0.7192, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.04568296025582458, |
|
"grad_norm": 6.360475337181379, |
|
"learning_rate": 2.9904199105836443e-05, |
|
"loss": 0.7094, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.04633557397376493, |
|
"grad_norm": 4.906400836156689, |
|
"learning_rate": 2.990066169399654e-05, |
|
"loss": 0.654, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.04698818769170528, |
|
"grad_norm": 17.600495314130633, |
|
"learning_rate": 2.9897060368856603e-05, |
|
"loss": 0.7299, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.04764080140964563, |
|
"grad_norm": 7.765935941492389, |
|
"learning_rate": 2.989339514586377e-05, |
|
"loss": 0.7486, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.04829341512758598, |
|
"grad_norm": 7.30026395137639, |
|
"learning_rate": 2.9889666040739252e-05, |
|
"loss": 0.6941, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.04894602884552633, |
|
"grad_norm": 4.676985481218465, |
|
"learning_rate": 2.9885873069478275e-05, |
|
"loss": 0.7701, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.04959864256346668, |
|
"grad_norm": 42.50656974727186, |
|
"learning_rate": 2.9882016248350006e-05, |
|
"loss": 0.7428, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.05025125628140704, |
|
"grad_norm": 3.9893667031114766, |
|
"learning_rate": 2.9878095593897474e-05, |
|
"loss": 0.7204, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.05090386999934739, |
|
"grad_norm": 8.909028486553332, |
|
"learning_rate": 2.9874111122937518e-05, |
|
"loss": 0.7336, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.051556483717287736, |
|
"grad_norm": 5.256925284136456, |
|
"learning_rate": 2.9870062852560698e-05, |
|
"loss": 0.7674, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.052209097435228086, |
|
"grad_norm": 5.835535487534073, |
|
"learning_rate": 2.986595080013123e-05, |
|
"loss": 0.7547, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.05286171115316844, |
|
"grad_norm": 4.7337998648314565, |
|
"learning_rate": 2.9861774983286913e-05, |
|
"loss": 0.7412, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.05351432487110879, |
|
"grad_norm": 4.020304406250962, |
|
"learning_rate": 2.9857535419939053e-05, |
|
"loss": 0.7351, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.05416693858904914, |
|
"grad_norm": 7.005748568175158, |
|
"learning_rate": 2.9853232128272367e-05, |
|
"loss": 0.7146, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.05481955230698949, |
|
"grad_norm": 12.598315147497464, |
|
"learning_rate": 2.984886512674494e-05, |
|
"loss": 0.7066, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.055472166024929846, |
|
"grad_norm": 5.636755294839953, |
|
"learning_rate": 2.9844434434088114e-05, |
|
"loss": 0.8033, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.056124779742870196, |
|
"grad_norm": 2.5964949457129305, |
|
"learning_rate": 2.9839940069306436e-05, |
|
"loss": 0.718, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.056777393460810545, |
|
"grad_norm": 5.496060434333994, |
|
"learning_rate": 2.9835382051677548e-05, |
|
"loss": 0.7382, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.057430007178750894, |
|
"grad_norm": 3.367511777906771, |
|
"learning_rate": 2.9830760400752117e-05, |
|
"loss": 0.7049, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.05808262089669125, |
|
"grad_norm": 12.228282751386294, |
|
"learning_rate": 2.9826075136353762e-05, |
|
"loss": 0.7135, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.0587352346146316, |
|
"grad_norm": 7.426066867205744, |
|
"learning_rate": 2.9821326278578955e-05, |
|
"loss": 0.6966, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.05938784833257195, |
|
"grad_norm": 5.720080945169142, |
|
"learning_rate": 2.981651384779693e-05, |
|
"loss": 0.7325, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.0600404620505123, |
|
"grad_norm": 3.3362738196336275, |
|
"learning_rate": 2.9811637864649622e-05, |
|
"loss": 0.7013, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.060693075768452655, |
|
"grad_norm": 5.5481143050516675, |
|
"learning_rate": 2.980669835005154e-05, |
|
"loss": 0.7107, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.061345689486393004, |
|
"grad_norm": 2.7247889305754533, |
|
"learning_rate": 2.980169532518971e-05, |
|
"loss": 0.6839, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.06199830320433335, |
|
"grad_norm": 12.705144630158374, |
|
"learning_rate": 2.9796628811523576e-05, |
|
"loss": 0.7061, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.0626509169222737, |
|
"grad_norm": 3.1174966376805777, |
|
"learning_rate": 2.9791498830784896e-05, |
|
"loss": 0.706, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.06330353064021406, |
|
"grad_norm": 6.454819870022971, |
|
"learning_rate": 2.9786305404977657e-05, |
|
"loss": 0.6901, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.06395614435815442, |
|
"grad_norm": 8.62099817289566, |
|
"learning_rate": 2.9781048556377982e-05, |
|
"loss": 0.6737, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.06460875807609476, |
|
"grad_norm": 12.649532843245389, |
|
"learning_rate": 2.977572830753404e-05, |
|
"loss": 0.6777, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.06526137179403511, |
|
"grad_norm": 5.019508830810828, |
|
"learning_rate": 2.9770344681265925e-05, |
|
"loss": 0.7125, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.06591398551197546, |
|
"grad_norm": 5.417114630539967, |
|
"learning_rate": 2.9764897700665595e-05, |
|
"loss": 0.7558, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.06656659922991581, |
|
"grad_norm": 13.487574757960102, |
|
"learning_rate": 2.975938738909674e-05, |
|
"loss": 0.7305, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.06721921294785617, |
|
"grad_norm": 4.115297871929447, |
|
"learning_rate": 2.97538137701947e-05, |
|
"loss": 0.7382, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.06787182666579651, |
|
"grad_norm": 4.218133725965425, |
|
"learning_rate": 2.974817686786636e-05, |
|
"loss": 0.7131, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.06852444038373687, |
|
"grad_norm": 23.754945260227526, |
|
"learning_rate": 2.9742476706290044e-05, |
|
"loss": 0.6854, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.06917705410167722, |
|
"grad_norm": 9.992382581534882, |
|
"learning_rate": 2.973671330991541e-05, |
|
"loss": 0.7224, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.06982966781961757, |
|
"grad_norm": 9.022842665053004, |
|
"learning_rate": 2.973088670346336e-05, |
|
"loss": 0.69, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.07048228153755792, |
|
"grad_norm": 7.180693480173149, |
|
"learning_rate": 2.97249969119259e-05, |
|
"loss": 0.6752, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.07113489525549826, |
|
"grad_norm": 4.631581340679664, |
|
"learning_rate": 2.9719043960566088e-05, |
|
"loss": 0.7078, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.07178750897343862, |
|
"grad_norm": 3.8365551360021497, |
|
"learning_rate": 2.9713027874917867e-05, |
|
"loss": 0.7455, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.07244012269137898, |
|
"grad_norm": 20.612721990589407, |
|
"learning_rate": 2.9706948680785984e-05, |
|
"loss": 0.7123, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.07309273640931932, |
|
"grad_norm": 8.515913036269723, |
|
"learning_rate": 2.9700806404245893e-05, |
|
"loss": 0.6755, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.07374535012725968, |
|
"grad_norm": 8.702591994450561, |
|
"learning_rate": 2.9694601071643607e-05, |
|
"loss": 0.743, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.07439796384520003, |
|
"grad_norm": 20.204623397644042, |
|
"learning_rate": 2.968833270959562e-05, |
|
"loss": 0.6995, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.07505057756314037, |
|
"grad_norm": 3.4150625200259563, |
|
"learning_rate": 2.9682001344988768e-05, |
|
"loss": 0.7245, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.07570319128108073, |
|
"grad_norm": 4.827412673105033, |
|
"learning_rate": 2.967560700498013e-05, |
|
"loss": 0.6764, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.07635580499902107, |
|
"grad_norm": 5.9778449783108965, |
|
"learning_rate": 2.9669149716996897e-05, |
|
"loss": 0.7094, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.07700841871696143, |
|
"grad_norm": 4.626419468156439, |
|
"learning_rate": 2.9662629508736278e-05, |
|
"loss": 0.7139, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.07766103243490179, |
|
"grad_norm": 8.23953369228554, |
|
"learning_rate": 2.9656046408165344e-05, |
|
"loss": 0.7132, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.07831364615284213, |
|
"grad_norm": 5.755275462407804, |
|
"learning_rate": 2.964940044352095e-05, |
|
"loss": 0.6923, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.07896625987078248, |
|
"grad_norm": 3.8396649246253816, |
|
"learning_rate": 2.9642691643309572e-05, |
|
"loss": 0.7082, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.07961887358872284, |
|
"grad_norm": 5.7429454484886415, |
|
"learning_rate": 2.963592003630723e-05, |
|
"loss": 0.7095, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.08027148730666318, |
|
"grad_norm": 17.628494673763004, |
|
"learning_rate": 2.962908565155932e-05, |
|
"loss": 0.7309, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.08092410102460354, |
|
"grad_norm": 4.83400055237192, |
|
"learning_rate": 2.9622188518380528e-05, |
|
"loss": 0.6925, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.08157671474254388, |
|
"grad_norm": 3.1535973307593905, |
|
"learning_rate": 2.9615228666354667e-05, |
|
"loss": 0.7441, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.08222932846048424, |
|
"grad_norm": 4.085385929026401, |
|
"learning_rate": 2.9608206125334586e-05, |
|
"loss": 0.7137, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.0828819421784246, |
|
"grad_norm": 4.299591870123697, |
|
"learning_rate": 2.9601120925442016e-05, |
|
"loss": 0.7515, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.08353455589636494, |
|
"grad_norm": 12.873434323415678, |
|
"learning_rate": 2.959397309706746e-05, |
|
"loss": 0.6852, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.0841871696143053, |
|
"grad_norm": 6.427088345402557, |
|
"learning_rate": 2.958676267087004e-05, |
|
"loss": 0.6499, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.08483978333224565, |
|
"grad_norm": 4.70723263638176, |
|
"learning_rate": 2.9579489677777387e-05, |
|
"loss": 0.6803, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.08549239705018599, |
|
"grad_norm": 4.819218491318424, |
|
"learning_rate": 2.9572154148985495e-05, |
|
"loss": 0.6798, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.08614501076812635, |
|
"grad_norm": 3.0652661968089827, |
|
"learning_rate": 2.9564756115958592e-05, |
|
"loss": 0.6935, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.08679762448606669, |
|
"grad_norm": 5.997224165634556, |
|
"learning_rate": 2.9557295610429017e-05, |
|
"loss": 0.7133, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.08745023820400705, |
|
"grad_norm": 3.3593003375605717, |
|
"learning_rate": 2.954977266439706e-05, |
|
"loss": 0.7335, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.0881028519219474, |
|
"grad_norm": 4.161242018302672, |
|
"learning_rate": 2.954218731013083e-05, |
|
"loss": 0.7054, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.08875546563988775, |
|
"grad_norm": 5.827431481546491, |
|
"learning_rate": 2.953453958016614e-05, |
|
"loss": 0.6321, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.0894080793578281, |
|
"grad_norm": 7.1039105888444904, |
|
"learning_rate": 2.952682950730634e-05, |
|
"loss": 0.6941, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.09006069307576846, |
|
"grad_norm": 2.7616336275225892, |
|
"learning_rate": 2.951905712462219e-05, |
|
"loss": 0.6928, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.0907133067937088, |
|
"grad_norm": 4.261061690296871, |
|
"learning_rate": 2.9511222465451716e-05, |
|
"loss": 0.7176, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.09136592051164916, |
|
"grad_norm": 5.4134818862551395, |
|
"learning_rate": 2.950332556340006e-05, |
|
"loss": 0.7048, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.0920185342295895, |
|
"grad_norm": 6.3477656240577085, |
|
"learning_rate": 2.949536645233935e-05, |
|
"loss": 0.6842, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.09267114794752986, |
|
"grad_norm": 63.477804314776044, |
|
"learning_rate": 2.9487345166408545e-05, |
|
"loss": 0.6876, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.09332376166547021, |
|
"grad_norm": 4.368664541213622, |
|
"learning_rate": 2.9479261740013286e-05, |
|
"loss": 0.6913, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.09397637538341055, |
|
"grad_norm": 9.476938465079238, |
|
"learning_rate": 2.9471116207825754e-05, |
|
"loss": 0.6891, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.09462898910135091, |
|
"grad_norm": 8.434794578560851, |
|
"learning_rate": 2.9462908604784523e-05, |
|
"loss": 0.6585, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.09528160281929127, |
|
"grad_norm": 4.798759761163433, |
|
"learning_rate": 2.945463896609441e-05, |
|
"loss": 0.6736, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.09593421653723161, |
|
"grad_norm": 9.782724872581115, |
|
"learning_rate": 2.9446307327226306e-05, |
|
"loss": 0.6659, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.09658683025517197, |
|
"grad_norm": 3.997516099278308, |
|
"learning_rate": 2.9437913723917058e-05, |
|
"loss": 0.6527, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.09723944397311232, |
|
"grad_norm": 4.623015725563099, |
|
"learning_rate": 2.942945819216928e-05, |
|
"loss": 0.7274, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.09789205769105266, |
|
"grad_norm": 3.2197835799755055, |
|
"learning_rate": 2.942094076825123e-05, |
|
"loss": 0.6966, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.09854467140899302, |
|
"grad_norm": 3.5107988249516984, |
|
"learning_rate": 2.9412361488696628e-05, |
|
"loss": 0.7235, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.09919728512693336, |
|
"grad_norm": 18.7865650951996, |
|
"learning_rate": 2.9403720390304518e-05, |
|
"loss": 0.7382, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.09984989884487372, |
|
"grad_norm": 3.85598692653545, |
|
"learning_rate": 2.93950175101391e-05, |
|
"loss": 0.7475, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.10050251256281408, |
|
"grad_norm": 20.459657003411998, |
|
"learning_rate": 2.938625288552957e-05, |
|
"loss": 0.6558, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.10115512628075442, |
|
"grad_norm": 6.416583997846208, |
|
"learning_rate": 2.9377426554069976e-05, |
|
"loss": 0.7205, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.10180773999869477, |
|
"grad_norm": 5.532087704430113, |
|
"learning_rate": 2.936853855361904e-05, |
|
"loss": 0.7189, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.10246035371663513, |
|
"grad_norm": 4.756518458886862, |
|
"learning_rate": 2.9359588922299986e-05, |
|
"loss": 0.7088, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.10311296743457547, |
|
"grad_norm": 5.775658785412931, |
|
"learning_rate": 2.9350577698500408e-05, |
|
"loss": 0.682, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.10376558115251583, |
|
"grad_norm": 7.714313915746094, |
|
"learning_rate": 2.9341504920872087e-05, |
|
"loss": 0.7393, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.10441819487045617, |
|
"grad_norm": 11.153510433173501, |
|
"learning_rate": 2.933237062833082e-05, |
|
"loss": 0.6616, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.10507080858839653, |
|
"grad_norm": 4.575896778703132, |
|
"learning_rate": 2.9323174860056258e-05, |
|
"loss": 0.7168, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.10572342230633688, |
|
"grad_norm": 46.2282201673799, |
|
"learning_rate": 2.9313917655491744e-05, |
|
"loss": 0.7016, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.10637603602427723, |
|
"grad_norm": 51.80540559466864, |
|
"learning_rate": 2.9304599054344148e-05, |
|
"loss": 0.6709, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.10702864974221758, |
|
"grad_norm": 4.607057564284905, |
|
"learning_rate": 2.9295219096583663e-05, |
|
"loss": 0.6894, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.10768126346015794, |
|
"grad_norm": 4.158724628963882, |
|
"learning_rate": 2.9285777822443686e-05, |
|
"loss": 0.6847, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.10833387717809828, |
|
"grad_norm": 6.634813921115065, |
|
"learning_rate": 2.92762752724206e-05, |
|
"loss": 0.7094, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.10898649089603864, |
|
"grad_norm": 2.437577662086608, |
|
"learning_rate": 2.926671148727362e-05, |
|
"loss": 0.69, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.10963910461397898, |
|
"grad_norm": 22.722071681603026, |
|
"learning_rate": 2.925708650802463e-05, |
|
"loss": 0.704, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.11029171833191934, |
|
"grad_norm": 2.913996227830109, |
|
"learning_rate": 2.9247400375957976e-05, |
|
"loss": 0.7211, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.11094433204985969, |
|
"grad_norm": 5.279852146043678, |
|
"learning_rate": 2.923765313262032e-05, |
|
"loss": 0.6693, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.11159694576780003, |
|
"grad_norm": 4.47116557104752, |
|
"learning_rate": 2.9227844819820434e-05, |
|
"loss": 0.6958, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.11224955948574039, |
|
"grad_norm": 6.9451125709413, |
|
"learning_rate": 2.9217975479629047e-05, |
|
"loss": 0.6549, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.11290217320368075, |
|
"grad_norm": 8.218016152925602, |
|
"learning_rate": 2.920804515437865e-05, |
|
"loss": 0.7034, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.11355478692162109, |
|
"grad_norm": 4.725968454405007, |
|
"learning_rate": 2.9198053886663314e-05, |
|
"loss": 0.7292, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.11420740063956145, |
|
"grad_norm": 8.078717621365517, |
|
"learning_rate": 2.9188001719338514e-05, |
|
"loss": 0.6556, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.11486001435750179, |
|
"grad_norm": 9.045448414751244, |
|
"learning_rate": 2.9177888695520933e-05, |
|
"loss": 0.7223, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.11551262807544214, |
|
"grad_norm": 4.318313439658446, |
|
"learning_rate": 2.916771485858829e-05, |
|
"loss": 0.6762, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.1161652417933825, |
|
"grad_norm": 13.078874026489483, |
|
"learning_rate": 2.9157480252179156e-05, |
|
"loss": 0.7189, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.11681785551132284, |
|
"grad_norm": 8.585825980992096, |
|
"learning_rate": 2.9147184920192745e-05, |
|
"loss": 0.7142, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.1174704692292632, |
|
"grad_norm": 138.05135197182966, |
|
"learning_rate": 2.9136828906788765e-05, |
|
"loss": 0.6514, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.11812308294720356, |
|
"grad_norm": 2.9764498791953167, |
|
"learning_rate": 2.9126412256387172e-05, |
|
"loss": 0.6835, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.1187756966651439, |
|
"grad_norm": 10.553495101154766, |
|
"learning_rate": 2.9115935013668038e-05, |
|
"loss": 0.7049, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.11942831038308425, |
|
"grad_norm": 3.8520760029457755, |
|
"learning_rate": 2.910539722357132e-05, |
|
"loss": 0.6805, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.1200809241010246, |
|
"grad_norm": 6.002281391018973, |
|
"learning_rate": 2.9094798931296692e-05, |
|
"loss": 0.7044, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.12073353781896495, |
|
"grad_norm": 9.04002888227592, |
|
"learning_rate": 2.9084140182303328e-05, |
|
"loss": 0.7221, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.12138615153690531, |
|
"grad_norm": 2.324509546376411, |
|
"learning_rate": 2.907342102230972e-05, |
|
"loss": 0.7, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.12203876525484565, |
|
"grad_norm": 4.136482440945801, |
|
"learning_rate": 2.9062641497293485e-05, |
|
"loss": 0.7213, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.12269137897278601, |
|
"grad_norm": 2.9193932096141673, |
|
"learning_rate": 2.9051801653491158e-05, |
|
"loss": 0.6656, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.12334399269072636, |
|
"grad_norm": 6.319528726800553, |
|
"learning_rate": 2.9040901537398008e-05, |
|
"loss": 0.6701, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.1239966064086667, |
|
"grad_norm": 3.5042010027049533, |
|
"learning_rate": 2.9029941195767824e-05, |
|
"loss": 0.6454, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.12464922012660706, |
|
"grad_norm": 4.3096531856338895, |
|
"learning_rate": 2.9018920675612712e-05, |
|
"loss": 0.6818, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.1253018338445474, |
|
"grad_norm": 6.849873350879978, |
|
"learning_rate": 2.900784002420292e-05, |
|
"loss": 0.7674, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.12595444756248778, |
|
"grad_norm": 7.645127543651015, |
|
"learning_rate": 2.89966992890666e-05, |
|
"loss": 0.7, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.12660706128042812, |
|
"grad_norm": 3.334578297503325, |
|
"learning_rate": 2.8985498517989623e-05, |
|
"loss": 0.6783, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.12725967499836846, |
|
"grad_norm": 7.91381208921764, |
|
"learning_rate": 2.897423775901538e-05, |
|
"loss": 0.7327, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.12791228871630883, |
|
"grad_norm": 7.74516810255062, |
|
"learning_rate": 2.8962917060444562e-05, |
|
"loss": 0.7145, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.12856490243424917, |
|
"grad_norm": 5.468499401345368, |
|
"learning_rate": 2.8951536470834957e-05, |
|
"loss": 0.6807, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.12921751615218952, |
|
"grad_norm": 3.7005534196349963, |
|
"learning_rate": 2.894009603900125e-05, |
|
"loss": 0.7004, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.12987012987012986, |
|
"grad_norm": 3.6355286647446716, |
|
"learning_rate": 2.89285958140148e-05, |
|
"loss": 0.6804, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.13052274358807023, |
|
"grad_norm": 4.199898882057381, |
|
"learning_rate": 2.891703584520343e-05, |
|
"loss": 0.7268, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.13117535730601057, |
|
"grad_norm": 4.9058604480600385, |
|
"learning_rate": 2.8905416182151244e-05, |
|
"loss": 0.7168, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.1318279710239509, |
|
"grad_norm": 4.2197732320304535, |
|
"learning_rate": 2.8893736874698367e-05, |
|
"loss": 0.701, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.13248058474189128, |
|
"grad_norm": 4.66981547707892, |
|
"learning_rate": 2.888199797294078e-05, |
|
"loss": 0.6624, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.13313319845983163, |
|
"grad_norm": 2.1811717459296274, |
|
"learning_rate": 2.887019952723006e-05, |
|
"loss": 0.7181, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.13378581217777197, |
|
"grad_norm": 5.9404267175163135, |
|
"learning_rate": 2.8858341588173194e-05, |
|
"loss": 0.7231, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.13443842589571234, |
|
"grad_norm": 3.279458419701383, |
|
"learning_rate": 2.884642420663236e-05, |
|
"loss": 0.6863, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.13509103961365268, |
|
"grad_norm": 3.441960856231174, |
|
"learning_rate": 2.8834447433724693e-05, |
|
"loss": 0.6985, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.13574365333159302, |
|
"grad_norm": 6.296333990212765, |
|
"learning_rate": 2.8822411320822074e-05, |
|
"loss": 0.7166, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.1363962670495334, |
|
"grad_norm": 4.087401381303271, |
|
"learning_rate": 2.881031591955092e-05, |
|
"loss": 0.6765, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.13704888076747374, |
|
"grad_norm": 6.6119114402937305, |
|
"learning_rate": 2.879816128179195e-05, |
|
"loss": 0.7228, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.13770149448541408, |
|
"grad_norm": 4.897020539614396, |
|
"learning_rate": 2.8785947459679953e-05, |
|
"loss": 0.7173, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.13835410820335445, |
|
"grad_norm": 3.482291928967508, |
|
"learning_rate": 2.87736745056036e-05, |
|
"loss": 0.685, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.1390067219212948, |
|
"grad_norm": 12.111542144788995, |
|
"learning_rate": 2.876134247220517e-05, |
|
"loss": 0.7198, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.13965933563923513, |
|
"grad_norm": 7.1442170238273786, |
|
"learning_rate": 2.8748951412380384e-05, |
|
"loss": 0.7549, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.14031194935717548, |
|
"grad_norm": 4.841978475189581, |
|
"learning_rate": 2.873650137927811e-05, |
|
"loss": 0.6388, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.14096456307511585, |
|
"grad_norm": 4.053407088369962, |
|
"learning_rate": 2.872399242630018e-05, |
|
"loss": 0.6837, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.1416171767930562, |
|
"grad_norm": 2.537769446137546, |
|
"learning_rate": 2.871142460710117e-05, |
|
"loss": 0.678, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.14226979051099653, |
|
"grad_norm": 6.452520531522298, |
|
"learning_rate": 2.8698797975588133e-05, |
|
"loss": 0.669, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.1429224042289369, |
|
"grad_norm": 6.840893292905677, |
|
"learning_rate": 2.868611258592038e-05, |
|
"loss": 0.7022, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.14357501794687724, |
|
"grad_norm": 4.634307774411798, |
|
"learning_rate": 2.867336849250926e-05, |
|
"loss": 0.6873, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.14422763166481758, |
|
"grad_norm": 3.74851807876996, |
|
"learning_rate": 2.866056575001793e-05, |
|
"loss": 0.6946, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.14488024538275796, |
|
"grad_norm": 2.0416537952666123, |
|
"learning_rate": 2.8647704413361095e-05, |
|
"loss": 0.6637, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.1455328591006983, |
|
"grad_norm": 3.555607066840982, |
|
"learning_rate": 2.863478453770479e-05, |
|
"loss": 0.71, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.14618547281863864, |
|
"grad_norm": 5.481578146356155, |
|
"learning_rate": 2.862180617846615e-05, |
|
"loss": 0.7335, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.146838086536579, |
|
"grad_norm": 4.089959406196587, |
|
"learning_rate": 2.8608769391313153e-05, |
|
"loss": 0.6951, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.14749070025451935, |
|
"grad_norm": 4.054901293812647, |
|
"learning_rate": 2.8595674232164403e-05, |
|
"loss": 0.6933, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.1481433139724597, |
|
"grad_norm": 11.977393154495003, |
|
"learning_rate": 2.8582520757188858e-05, |
|
"loss": 0.6793, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.14879592769040006, |
|
"grad_norm": 9.862463610911956, |
|
"learning_rate": 2.856930902280563e-05, |
|
"loss": 0.693, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.1494485414083404, |
|
"grad_norm": 6.6933607998065305, |
|
"learning_rate": 2.8556039085683717e-05, |
|
"loss": 0.6512, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.15010115512628075, |
|
"grad_norm": 13.33031435802581, |
|
"learning_rate": 2.8542711002741765e-05, |
|
"loss": 0.6818, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.1507537688442211, |
|
"grad_norm": 3.55607266109624, |
|
"learning_rate": 2.8529324831147817e-05, |
|
"loss": 0.6969, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.15140638256216146, |
|
"grad_norm": 2.76158562626684, |
|
"learning_rate": 2.8515880628319084e-05, |
|
"loss": 0.6469, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.1520589962801018, |
|
"grad_norm": 14.835614032644079, |
|
"learning_rate": 2.8502378451921686e-05, |
|
"loss": 0.7095, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.15271160999804215, |
|
"grad_norm": 3.837370004844092, |
|
"learning_rate": 2.8488818359870418e-05, |
|
"loss": 0.6549, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.15336422371598252, |
|
"grad_norm": 8.077714625547088, |
|
"learning_rate": 2.8475200410328477e-05, |
|
"loss": 0.7036, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.15401683743392286, |
|
"grad_norm": 4.208380267696714, |
|
"learning_rate": 2.846152466170724e-05, |
|
"loss": 0.7083, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.1546694511518632, |
|
"grad_norm": 5.882366675192563, |
|
"learning_rate": 2.8447791172665994e-05, |
|
"loss": 0.6774, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.15532206486980357, |
|
"grad_norm": 10.332970110344341, |
|
"learning_rate": 2.8434000002111686e-05, |
|
"loss": 0.6871, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.15597467858774391, |
|
"grad_norm": 5.233682432902447, |
|
"learning_rate": 2.84201512091987e-05, |
|
"loss": 0.6685, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.15662729230568426, |
|
"grad_norm": 4.051922217192083, |
|
"learning_rate": 2.840624485332855e-05, |
|
"loss": 0.6893, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.15727990602362463, |
|
"grad_norm": 8.208037033800602, |
|
"learning_rate": 2.8392280994149673e-05, |
|
"loss": 0.7163, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.15793251974156497, |
|
"grad_norm": 6.38338502443, |
|
"learning_rate": 2.8378259691557143e-05, |
|
"loss": 0.6738, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.1585851334595053, |
|
"grad_norm": 5.91196516331796, |
|
"learning_rate": 2.8364181005692437e-05, |
|
"loss": 0.6777, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.15923774717744568, |
|
"grad_norm": 4.255829123415314, |
|
"learning_rate": 2.835004499694316e-05, |
|
"loss": 0.7101, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.15989036089538602, |
|
"grad_norm": 6.462145986397376, |
|
"learning_rate": 2.833585172594279e-05, |
|
"loss": 0.7031, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.16054297461332637, |
|
"grad_norm": 4.701946326296281, |
|
"learning_rate": 2.8321601253570425e-05, |
|
"loss": 0.7195, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.16119558833126674, |
|
"grad_norm": 2.7081882046671337, |
|
"learning_rate": 2.830729364095051e-05, |
|
"loss": 0.697, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.16184820204920708, |
|
"grad_norm": 12.027371026479301, |
|
"learning_rate": 2.8292928949452578e-05, |
|
"loss": 0.6701, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.16250081576714742, |
|
"grad_norm": 4.375553205119334, |
|
"learning_rate": 2.8278507240691e-05, |
|
"loss": 0.6698, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.16315342948508776, |
|
"grad_norm": 2.3145460407904968, |
|
"learning_rate": 2.8264028576524714e-05, |
|
"loss": 0.6619, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.16380604320302813, |
|
"grad_norm": 9.62186450170952, |
|
"learning_rate": 2.824949301905694e-05, |
|
"loss": 0.7049, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.16445865692096848, |
|
"grad_norm": 3.242262212053335, |
|
"learning_rate": 2.8234900630634945e-05, |
|
"loss": 0.6684, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.16511127063890882, |
|
"grad_norm": 5.874441891397036, |
|
"learning_rate": 2.8220251473849747e-05, |
|
"loss": 0.6841, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.1657638843568492, |
|
"grad_norm": 5.5132935867506685, |
|
"learning_rate": 2.8205545611535876e-05, |
|
"loss": 0.7077, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.16641649807478953, |
|
"grad_norm": 4.178673623014923, |
|
"learning_rate": 2.8190783106771076e-05, |
|
"loss": 0.6779, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.16706911179272987, |
|
"grad_norm": 3.8308465742128974, |
|
"learning_rate": 2.8175964022876057e-05, |
|
"loss": 0.6638, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.16772172551067024, |
|
"grad_norm": 3.122478077844367, |
|
"learning_rate": 2.8161088423414197e-05, |
|
"loss": 0.7063, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.1683743392286106, |
|
"grad_norm": 4.633331835812846, |
|
"learning_rate": 2.8146156372191306e-05, |
|
"loss": 0.6555, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.16902695294655093, |
|
"grad_norm": 17.541201672163616, |
|
"learning_rate": 2.8131167933255323e-05, |
|
"loss": 0.7001, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.1696795666644913, |
|
"grad_norm": 6.134101489242109, |
|
"learning_rate": 2.8116123170896046e-05, |
|
"loss": 0.7173, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.17033218038243164, |
|
"grad_norm": 8.073444833192916, |
|
"learning_rate": 2.8101022149644868e-05, |
|
"loss": 0.6771, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.17098479410037198, |
|
"grad_norm": 6.780231733384814, |
|
"learning_rate": 2.8085864934274488e-05, |
|
"loss": 0.7313, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.17163740781831235, |
|
"grad_norm": 3.857057315644847, |
|
"learning_rate": 2.8070651589798638e-05, |
|
"loss": 0.6844, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.1722900215362527, |
|
"grad_norm": 2.6968793212573106, |
|
"learning_rate": 2.805538218147181e-05, |
|
"loss": 0.6806, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.17294263525419304, |
|
"grad_norm": 3.347039146921558, |
|
"learning_rate": 2.8040056774788968e-05, |
|
"loss": 0.6993, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.17359524897213338, |
|
"grad_norm": 3.3015836383740456, |
|
"learning_rate": 2.8024675435485257e-05, |
|
"loss": 0.7268, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.17424786269007375, |
|
"grad_norm": 3.317639807807373, |
|
"learning_rate": 2.8009238229535758e-05, |
|
"loss": 0.6674, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.1749004764080141, |
|
"grad_norm": 5.874714621247871, |
|
"learning_rate": 2.7993745223155156e-05, |
|
"loss": 0.6571, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.17555309012595444, |
|
"grad_norm": 3.435629734690448, |
|
"learning_rate": 2.7978196482797496e-05, |
|
"loss": 0.6998, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.1762057038438948, |
|
"grad_norm": 30.168294226928044, |
|
"learning_rate": 2.7962592075155875e-05, |
|
"loss": 0.6531, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.17685831756183515, |
|
"grad_norm": 3.6088167890096865, |
|
"learning_rate": 2.794693206716217e-05, |
|
"loss": 0.644, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.1775109312797755, |
|
"grad_norm": 5.471338847334227, |
|
"learning_rate": 2.7931216525986733e-05, |
|
"loss": 0.6864, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.17816354499771586, |
|
"grad_norm": 3.5451837616331763, |
|
"learning_rate": 2.7915445519038124e-05, |
|
"loss": 0.6794, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.1788161587156562, |
|
"grad_norm": 4.9951779955670705, |
|
"learning_rate": 2.7899619113962806e-05, |
|
"loss": 0.6702, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.17946877243359655, |
|
"grad_norm": 6.849902588089016, |
|
"learning_rate": 2.7883737378644866e-05, |
|
"loss": 0.679, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.18012138615153692, |
|
"grad_norm": 11.250488948793029, |
|
"learning_rate": 2.786780038120572e-05, |
|
"loss": 0.7153, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.18077399986947726, |
|
"grad_norm": 13.184900398210909, |
|
"learning_rate": 2.7851808190003803e-05, |
|
"loss": 0.6734, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.1814266135874176, |
|
"grad_norm": 7.234749638062084, |
|
"learning_rate": 2.7835760873634318e-05, |
|
"loss": 0.6677, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.18207922730535797, |
|
"grad_norm": 13.085129406019094, |
|
"learning_rate": 2.7819658500928897e-05, |
|
"loss": 0.691, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.1827318410232983, |
|
"grad_norm": 3.5248377823535, |
|
"learning_rate": 2.780350114095533e-05, |
|
"loss": 0.6397, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.18338445474123866, |
|
"grad_norm": 26.796889805025064, |
|
"learning_rate": 2.7787288863017263e-05, |
|
"loss": 0.7222, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.184037068459179, |
|
"grad_norm": 3.125330335323775, |
|
"learning_rate": 2.77710217366539e-05, |
|
"loss": 0.7082, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.18468968217711937, |
|
"grad_norm": 5.07678317608623, |
|
"learning_rate": 2.775469983163972e-05, |
|
"loss": 0.642, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.1853422958950597, |
|
"grad_norm": 3.8465548570361605, |
|
"learning_rate": 2.773832321798414e-05, |
|
"loss": 0.735, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.18599490961300005, |
|
"grad_norm": 2.596039278811819, |
|
"learning_rate": 2.7721891965931252e-05, |
|
"loss": 0.6885, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.18664752333094042, |
|
"grad_norm": 14.003078667143276, |
|
"learning_rate": 2.7705406145959505e-05, |
|
"loss": 0.7036, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.18730013704888077, |
|
"grad_norm": 4.5877627845774365, |
|
"learning_rate": 2.7688865828781413e-05, |
|
"loss": 0.6995, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.1879527507668211, |
|
"grad_norm": 7.044776485625493, |
|
"learning_rate": 2.767227108534323e-05, |
|
"loss": 0.7161, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.18860536448476148, |
|
"grad_norm": 3.072683258675077, |
|
"learning_rate": 2.765562198682468e-05, |
|
"loss": 0.7406, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.18925797820270182, |
|
"grad_norm": 8.84720999495458, |
|
"learning_rate": 2.763891860463861e-05, |
|
"loss": 0.6727, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.18991059192064216, |
|
"grad_norm": 9.494450604703584, |
|
"learning_rate": 2.7622161010430717e-05, |
|
"loss": 0.6859, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.19056320563858253, |
|
"grad_norm": 2.803990269441359, |
|
"learning_rate": 2.7605349276079238e-05, |
|
"loss": 0.7, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.19121581935652288, |
|
"grad_norm": 6.501435928194476, |
|
"learning_rate": 2.7588483473694613e-05, |
|
"loss": 0.66, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.19186843307446322, |
|
"grad_norm": 3.9434365221991667, |
|
"learning_rate": 2.7571563675619202e-05, |
|
"loss": 0.6575, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.1925210467924036, |
|
"grad_norm": 6.473206095572867, |
|
"learning_rate": 2.7554589954426986e-05, |
|
"loss": 0.6509, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.19317366051034393, |
|
"grad_norm": 6.359600241231479, |
|
"learning_rate": 2.7537562382923217e-05, |
|
"loss": 0.6554, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.19382627422828427, |
|
"grad_norm": 9.209939347687689, |
|
"learning_rate": 2.752048103414413e-05, |
|
"loss": 0.723, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.19447888794622464, |
|
"grad_norm": 23.703947692957573, |
|
"learning_rate": 2.7503345981356633e-05, |
|
"loss": 0.6927, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.19513150166416499, |
|
"grad_norm": 8.212074266427711, |
|
"learning_rate": 2.7486157298057986e-05, |
|
"loss": 0.6999, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.19578411538210533, |
|
"grad_norm": 5.189620722030213, |
|
"learning_rate": 2.7468915057975487e-05, |
|
"loss": 0.6722, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.19643672910004567, |
|
"grad_norm": 4.021048004797352, |
|
"learning_rate": 2.745161933506614e-05, |
|
"loss": 0.66, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.19708934281798604, |
|
"grad_norm": 4.156307822377388, |
|
"learning_rate": 2.7434270203516373e-05, |
|
"loss": 0.6987, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.19774195653592638, |
|
"grad_norm": 4.75441596904026, |
|
"learning_rate": 2.7416867737741683e-05, |
|
"loss": 0.6963, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.19839457025386673, |
|
"grad_norm": 5.880889862093012, |
|
"learning_rate": 2.739941201238635e-05, |
|
"loss": 0.6445, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.1990471839718071, |
|
"grad_norm": 4.686264689115436, |
|
"learning_rate": 2.738190310232308e-05, |
|
"loss": 0.751, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.19969979768974744, |
|
"grad_norm": 5.015165789251315, |
|
"learning_rate": 2.7364341082652716e-05, |
|
"loss": 0.6676, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.20035241140768778, |
|
"grad_norm": 4.629542414090964, |
|
"learning_rate": 2.734672602870391e-05, |
|
"loss": 0.6625, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.20100502512562815, |
|
"grad_norm": 3.582640218606122, |
|
"learning_rate": 2.7329058016032773e-05, |
|
"loss": 0.7243, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.2016576388435685, |
|
"grad_norm": 35.16576433756719, |
|
"learning_rate": 2.7311337120422588e-05, |
|
"loss": 0.6734, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.20231025256150884, |
|
"grad_norm": 8.429048501615004, |
|
"learning_rate": 2.729356341788347e-05, |
|
"loss": 0.6761, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.2029628662794492, |
|
"grad_norm": 10.361847043143795, |
|
"learning_rate": 2.727573698465202e-05, |
|
"loss": 0.691, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.20361547999738955, |
|
"grad_norm": 3.125915545815778, |
|
"learning_rate": 2.7257857897191044e-05, |
|
"loss": 0.6439, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.2042680937153299, |
|
"grad_norm": 3.7497179561089937, |
|
"learning_rate": 2.7239926232189167e-05, |
|
"loss": 0.6707, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.20492070743327026, |
|
"grad_norm": 3.56873592233637, |
|
"learning_rate": 2.722194206656056e-05, |
|
"loss": 0.6673, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.2055733211512106, |
|
"grad_norm": 10.54660445254849, |
|
"learning_rate": 2.7203905477444574e-05, |
|
"loss": 0.6829, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.20622593486915095, |
|
"grad_norm": 3.6820544103925608, |
|
"learning_rate": 2.7185816542205407e-05, |
|
"loss": 0.6568, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.2068785485870913, |
|
"grad_norm": 8.593912019069307, |
|
"learning_rate": 2.7167675338431813e-05, |
|
"loss": 0.6375, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.20753116230503166, |
|
"grad_norm": 4.561901523582492, |
|
"learning_rate": 2.7149481943936718e-05, |
|
"loss": 0.6929, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.208183776022972, |
|
"grad_norm": 3.3662024382508022, |
|
"learning_rate": 2.7131236436756917e-05, |
|
"loss": 0.7155, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.20883638974091234, |
|
"grad_norm": 3.4110738212405542, |
|
"learning_rate": 2.7112938895152733e-05, |
|
"loss": 0.7195, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.2094890034588527, |
|
"grad_norm": 3.6699604643477413, |
|
"learning_rate": 2.709458939760768e-05, |
|
"loss": 0.6266, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.21014161717679306, |
|
"grad_norm": 1.7543595371849152, |
|
"learning_rate": 2.7076188022828125e-05, |
|
"loss": 0.6956, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.2107942308947334, |
|
"grad_norm": 3.9842297097437647, |
|
"learning_rate": 2.7057734849742944e-05, |
|
"loss": 0.6616, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.21144684461267377, |
|
"grad_norm": 5.223697167736633, |
|
"learning_rate": 2.7039229957503207e-05, |
|
"loss": 0.6951, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.2120994583306141, |
|
"grad_norm": 4.189342719626237, |
|
"learning_rate": 2.7020673425481807e-05, |
|
"loss": 0.6937, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.21275207204855445, |
|
"grad_norm": 5.746674612678528, |
|
"learning_rate": 2.700206533327315e-05, |
|
"loss": 0.6897, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.21340468576649482, |
|
"grad_norm": 7.965064539148002, |
|
"learning_rate": 2.6983405760692782e-05, |
|
"loss": 0.6999, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.21405729948443517, |
|
"grad_norm": 3.6218679643616953, |
|
"learning_rate": 2.696469478777708e-05, |
|
"loss": 0.6702, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.2147099132023755, |
|
"grad_norm": 4.021322876113058, |
|
"learning_rate": 2.6945932494782878e-05, |
|
"loss": 0.736, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.21536252692031588, |
|
"grad_norm": 5.6555927379479245, |
|
"learning_rate": 2.692711896218715e-05, |
|
"loss": 0.6576, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.21601514063825622, |
|
"grad_norm": 5.56743771282241, |
|
"learning_rate": 2.6908254270686633e-05, |
|
"loss": 0.6824, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.21666775435619656, |
|
"grad_norm": 51.07644011063222, |
|
"learning_rate": 2.688933850119753e-05, |
|
"loss": 0.6517, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.2173203680741369, |
|
"grad_norm": 6.112257177604341, |
|
"learning_rate": 2.6870371734855092e-05, |
|
"loss": 0.6619, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.21797298179207727, |
|
"grad_norm": 2.531145729460449, |
|
"learning_rate": 2.685135405301335e-05, |
|
"loss": 0.7057, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.21862559551001762, |
|
"grad_norm": 1.995398951056393, |
|
"learning_rate": 2.6832285537244697e-05, |
|
"loss": 0.687, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.21927820922795796, |
|
"grad_norm": 2.3570011145840364, |
|
"learning_rate": 2.6813166269339587e-05, |
|
"loss": 0.6698, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.21993082294589833, |
|
"grad_norm": 3.6679571799521327, |
|
"learning_rate": 2.6793996331306157e-05, |
|
"loss": 0.6715, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.22058343666383867, |
|
"grad_norm": 28.476293508674676, |
|
"learning_rate": 2.6774775805369875e-05, |
|
"loss": 0.66, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.22123605038177901, |
|
"grad_norm": 3.2467702810162637, |
|
"learning_rate": 2.675550477397321e-05, |
|
"loss": 0.6721, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.22188866409971938, |
|
"grad_norm": 1.6459151219515613, |
|
"learning_rate": 2.6736183319775253e-05, |
|
"loss": 0.7111, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.22254127781765973, |
|
"grad_norm": 3.964489549873003, |
|
"learning_rate": 2.6716811525651386e-05, |
|
"loss": 0.7202, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.22319389153560007, |
|
"grad_norm": 2.1839922437285586, |
|
"learning_rate": 2.6697389474692896e-05, |
|
"loss": 0.6455, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.22384650525354044, |
|
"grad_norm": 6.5190517278902, |
|
"learning_rate": 2.6677917250206642e-05, |
|
"loss": 0.6543, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.22449911897148078, |
|
"grad_norm": 3.0950755352607677, |
|
"learning_rate": 2.6658394935714707e-05, |
|
"loss": 0.6567, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.22515173268942112, |
|
"grad_norm": 22.06688372148941, |
|
"learning_rate": 2.6638822614954007e-05, |
|
"loss": 0.673, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.2258043464073615, |
|
"grad_norm": 4.066518820487963, |
|
"learning_rate": 2.6619200371875952e-05, |
|
"loss": 0.6879, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.22645696012530184, |
|
"grad_norm": 3.121077860957678, |
|
"learning_rate": 2.659952829064609e-05, |
|
"loss": 0.6874, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.22710957384324218, |
|
"grad_norm": 2.6914813691439923, |
|
"learning_rate": 2.6579806455643734e-05, |
|
"loss": 0.6774, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.22776218756118255, |
|
"grad_norm": 4.968423508767704, |
|
"learning_rate": 2.656003495146162e-05, |
|
"loss": 0.6941, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.2284148012791229, |
|
"grad_norm": 3.7025765443419836, |
|
"learning_rate": 2.6540213862905497e-05, |
|
"loss": 0.697, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.22906741499706323, |
|
"grad_norm": 3.5133821241789924, |
|
"learning_rate": 2.652034327499383e-05, |
|
"loss": 0.6825, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.22972002871500358, |
|
"grad_norm": 3.204727293127362, |
|
"learning_rate": 2.6500423272957385e-05, |
|
"loss": 0.6552, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.23037264243294395, |
|
"grad_norm": 10.477867050932252, |
|
"learning_rate": 2.6480453942238878e-05, |
|
"loss": 0.7001, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.2310252561508843, |
|
"grad_norm": 4.290081854975228, |
|
"learning_rate": 2.6460435368492618e-05, |
|
"loss": 0.7023, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.23167786986882463, |
|
"grad_norm": 2.1041454905838695, |
|
"learning_rate": 2.6440367637584127e-05, |
|
"loss": 0.6998, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.232330483586765, |
|
"grad_norm": 10.10346106723381, |
|
"learning_rate": 2.642025083558978e-05, |
|
"loss": 0.7233, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.23298309730470534, |
|
"grad_norm": 2.801890474132496, |
|
"learning_rate": 2.6400085048796427e-05, |
|
"loss": 0.6821, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.2336357110226457, |
|
"grad_norm": 3.935419895594201, |
|
"learning_rate": 2.6379870363701032e-05, |
|
"loss": 0.7208, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.23428832474058606, |
|
"grad_norm": 8.554570222512323, |
|
"learning_rate": 2.6359606867010294e-05, |
|
"loss": 0.6633, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.2349409384585264, |
|
"grad_norm": 6.675371241157707, |
|
"learning_rate": 2.6339294645640287e-05, |
|
"loss": 0.6769, |
|
"step": 3600 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 15323, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 400, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 9.84085412488151e+18, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|