|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.052209097435228086, |
|
"eval_steps": 500, |
|
"global_step": 800, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0006526137179403511, |
|
"grad_norm": 17.690582114691438, |
|
"learning_rate": 1.948051948051948e-06, |
|
"loss": 1.3559, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0013052274358807021, |
|
"grad_norm": 7.768088366444893, |
|
"learning_rate": 3.896103896103896e-06, |
|
"loss": 1.2706, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.001957841153821053, |
|
"grad_norm": 7.705313536090087, |
|
"learning_rate": 5.844155844155845e-06, |
|
"loss": 1.3781, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0026104548717614043, |
|
"grad_norm": 34.39078827766783, |
|
"learning_rate": 7.792207792207792e-06, |
|
"loss": 1.2749, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0032630685897017554, |
|
"grad_norm": 68.28824334896528, |
|
"learning_rate": 9.74025974025974e-06, |
|
"loss": 1.2955, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.003915682307642106, |
|
"grad_norm": 14.220322607917241, |
|
"learning_rate": 1.168831168831169e-05, |
|
"loss": 1.2315, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0045682960255824575, |
|
"grad_norm": 12.611848231734811, |
|
"learning_rate": 1.3636363636363637e-05, |
|
"loss": 1.0953, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.0052209097435228086, |
|
"grad_norm": 6.055664298727015, |
|
"learning_rate": 1.5584415584415583e-05, |
|
"loss": 1.105, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.00587352346146316, |
|
"grad_norm": 3.52269227801977, |
|
"learning_rate": 1.753246753246753e-05, |
|
"loss": 0.9563, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.006526137179403511, |
|
"grad_norm": 10.771884023354394, |
|
"learning_rate": 1.948051948051948e-05, |
|
"loss": 0.9523, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.007178750897343862, |
|
"grad_norm": 33.41476483216757, |
|
"learning_rate": 2.1428571428571428e-05, |
|
"loss": 0.832, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.007831364615284213, |
|
"grad_norm": 31.120240364617406, |
|
"learning_rate": 2.337662337662338e-05, |
|
"loss": 0.8376, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.008483978333224564, |
|
"grad_norm": 5.517231564060886, |
|
"learning_rate": 2.5324675324675325e-05, |
|
"loss": 0.8293, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.009136592051164915, |
|
"grad_norm": 4.311605388342058, |
|
"learning_rate": 2.7272727272727273e-05, |
|
"loss": 0.8295, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.009789205769105266, |
|
"grad_norm": 6.997724163121519, |
|
"learning_rate": 2.922077922077922e-05, |
|
"loss": 0.7662, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.010441819487045617, |
|
"grad_norm": 6.517836234400708, |
|
"learning_rate": 2.999998841890695e-05, |
|
"loss": 0.8158, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.011094433204985968, |
|
"grad_norm": 4.186989141019666, |
|
"learning_rate": 2.99999176456253e-05, |
|
"loss": 0.8037, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.01174704692292632, |
|
"grad_norm": 5.181546943355458, |
|
"learning_rate": 2.9999782533305785e-05, |
|
"loss": 0.7274, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.01239966064086667, |
|
"grad_norm": 3.767076521211455, |
|
"learning_rate": 2.9999583082527935e-05, |
|
"loss": 0.7474, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.013052274358807021, |
|
"grad_norm": 18.84416377940188, |
|
"learning_rate": 2.999931929414726e-05, |
|
"loss": 0.7708, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.013704888076747372, |
|
"grad_norm": 3.169160630444992, |
|
"learning_rate": 2.999899116929522e-05, |
|
"loss": 0.8279, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.014357501794687724, |
|
"grad_norm": 1.912782077307437, |
|
"learning_rate": 2.999859870937924e-05, |
|
"loss": 0.7407, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.015010115512628075, |
|
"grad_norm": 3.3906505952914974, |
|
"learning_rate": 2.9998141916082696e-05, |
|
"loss": 0.7732, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.015662729230568426, |
|
"grad_norm": 2.7144492322383584, |
|
"learning_rate": 2.999762079136491e-05, |
|
"loss": 0.7272, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.01631534294850878, |
|
"grad_norm": 7.109330196029837, |
|
"learning_rate": 2.9997035337461135e-05, |
|
"loss": 0.7748, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.016967956666449128, |
|
"grad_norm": 1.6054280593801813, |
|
"learning_rate": 2.9996385556882555e-05, |
|
"loss": 0.7676, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.01762057038438948, |
|
"grad_norm": 10.883212441614672, |
|
"learning_rate": 2.9995671452416274e-05, |
|
"loss": 0.735, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.01827318410232983, |
|
"grad_norm": 3.511064886507805, |
|
"learning_rate": 2.999489302712529e-05, |
|
"loss": 0.7741, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.018925797820270183, |
|
"grad_norm": 3.618603818375307, |
|
"learning_rate": 2.9994050284348497e-05, |
|
"loss": 0.749, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.019578411538210532, |
|
"grad_norm": 6.012944880342178, |
|
"learning_rate": 2.9993143227700668e-05, |
|
"loss": 0.7411, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.020231025256150885, |
|
"grad_norm": 2.348670372295822, |
|
"learning_rate": 2.9992171861072428e-05, |
|
"loss": 0.7394, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.020883638974091234, |
|
"grad_norm": 4.728309497649916, |
|
"learning_rate": 2.9991136188630263e-05, |
|
"loss": 0.8077, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.021536252692031587, |
|
"grad_norm": 15.611917863290122, |
|
"learning_rate": 2.9990036214816467e-05, |
|
"loss": 0.7209, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.022188866409971936, |
|
"grad_norm": 3.7315277354070817, |
|
"learning_rate": 2.998887194434916e-05, |
|
"loss": 0.7101, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.02284148012791229, |
|
"grad_norm": 6.618759094750745, |
|
"learning_rate": 2.998764338222222e-05, |
|
"loss": 0.7759, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.02349409384585264, |
|
"grad_norm": 6.770044306239603, |
|
"learning_rate": 2.998635053370533e-05, |
|
"loss": 0.7398, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.02414670756379299, |
|
"grad_norm": 12.471224202357552, |
|
"learning_rate": 2.998499340434389e-05, |
|
"loss": 0.7046, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.02479932128173334, |
|
"grad_norm": 4.147359416986547, |
|
"learning_rate": 2.9983571999959013e-05, |
|
"loss": 0.761, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.025451934999673693, |
|
"grad_norm": 34.84722866603778, |
|
"learning_rate": 2.9982086326647533e-05, |
|
"loss": 0.757, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.026104548717614043, |
|
"grad_norm": 5.245498180313093, |
|
"learning_rate": 2.998053639078193e-05, |
|
"loss": 0.7536, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.026757162435554396, |
|
"grad_norm": 36.55990241841121, |
|
"learning_rate": 2.997892219901034e-05, |
|
"loss": 0.7395, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.027409776153494745, |
|
"grad_norm": 5.03198653806696, |
|
"learning_rate": 2.9977243758256494e-05, |
|
"loss": 0.7208, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.028062389871435098, |
|
"grad_norm": 11.376914733036081, |
|
"learning_rate": 2.997550107571972e-05, |
|
"loss": 0.719, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.028715003589375447, |
|
"grad_norm": 2.958119684662306, |
|
"learning_rate": 2.9973694158874898e-05, |
|
"loss": 0.7271, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.0293676173073158, |
|
"grad_norm": 6.037096737490817, |
|
"learning_rate": 2.9971823015472418e-05, |
|
"loss": 0.7356, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.03002023102525615, |
|
"grad_norm": 5.3042973640363575, |
|
"learning_rate": 2.9969887653538164e-05, |
|
"loss": 0.7207, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.030672844743196502, |
|
"grad_norm": 2.4985603001745624, |
|
"learning_rate": 2.996788808137347e-05, |
|
"loss": 0.7769, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.03132545846113685, |
|
"grad_norm": 7.607065841315647, |
|
"learning_rate": 2.9965824307555084e-05, |
|
"loss": 0.7091, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.03197807217907721, |
|
"grad_norm": 4.322533035107957, |
|
"learning_rate": 2.9963696340935144e-05, |
|
"loss": 0.7114, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.03263068589701756, |
|
"grad_norm": 5.878565903250334, |
|
"learning_rate": 2.9961504190641108e-05, |
|
"loss": 0.7284, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.033283299614957906, |
|
"grad_norm": 5.0026507027119855, |
|
"learning_rate": 2.9959247866075764e-05, |
|
"loss": 0.6992, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.033935913332898256, |
|
"grad_norm": 7.12632150273901, |
|
"learning_rate": 2.9956927376917137e-05, |
|
"loss": 0.7285, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.03458852705083861, |
|
"grad_norm": 5.211123255860348, |
|
"learning_rate": 2.9954542733118496e-05, |
|
"loss": 0.7511, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.03524114076877896, |
|
"grad_norm": 9.925273547498618, |
|
"learning_rate": 2.995209394490827e-05, |
|
"loss": 0.7699, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.03589375448671931, |
|
"grad_norm": 7.418381681996765, |
|
"learning_rate": 2.9949581022790025e-05, |
|
"loss": 0.759, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.03654636820465966, |
|
"grad_norm": 4.352380973507467, |
|
"learning_rate": 2.9947003977542423e-05, |
|
"loss": 0.7537, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.037198981922600016, |
|
"grad_norm": 9.712842120769198, |
|
"learning_rate": 2.9944362820219167e-05, |
|
"loss": 0.7063, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.037851595640540366, |
|
"grad_norm": 5.757600819230482, |
|
"learning_rate": 2.994165756214895e-05, |
|
"loss": 0.7893, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.038504209358480715, |
|
"grad_norm": 5.529209601152462, |
|
"learning_rate": 2.9938888214935426e-05, |
|
"loss": 0.6771, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.039156823076421064, |
|
"grad_norm": 10.550479346499758, |
|
"learning_rate": 2.9936054790457127e-05, |
|
"loss": 0.737, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.03980943679436142, |
|
"grad_norm": 8.284279553451016, |
|
"learning_rate": 2.9933157300867437e-05, |
|
"loss": 0.7182, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.04046205051230177, |
|
"grad_norm": 8.18511648646326, |
|
"learning_rate": 2.9930195758594542e-05, |
|
"loss": 0.6901, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.04111466423024212, |
|
"grad_norm": 14.569754827631956, |
|
"learning_rate": 2.9927170176341365e-05, |
|
"loss": 0.7008, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.04176727794818247, |
|
"grad_norm": 4.214581273685441, |
|
"learning_rate": 2.992408056708551e-05, |
|
"loss": 0.7489, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.042419891666122825, |
|
"grad_norm": 10.038596627079452, |
|
"learning_rate": 2.9920926944079224e-05, |
|
"loss": 0.7649, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.043072505384063174, |
|
"grad_norm": 2.386544029221306, |
|
"learning_rate": 2.9917709320849305e-05, |
|
"loss": 0.7223, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.043725119102003523, |
|
"grad_norm": 8.286359254511249, |
|
"learning_rate": 2.9914427711197096e-05, |
|
"loss": 0.7089, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.04437773281994387, |
|
"grad_norm": 4.235819327444911, |
|
"learning_rate": 2.9911082129198372e-05, |
|
"loss": 0.7138, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.04503034653788423, |
|
"grad_norm": 5.187338033698449, |
|
"learning_rate": 2.9907672589203316e-05, |
|
"loss": 0.7192, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.04568296025582458, |
|
"grad_norm": 6.360475337181379, |
|
"learning_rate": 2.9904199105836443e-05, |
|
"loss": 0.7094, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.04633557397376493, |
|
"grad_norm": 4.906400836156689, |
|
"learning_rate": 2.990066169399654e-05, |
|
"loss": 0.654, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.04698818769170528, |
|
"grad_norm": 17.600495314130633, |
|
"learning_rate": 2.9897060368856603e-05, |
|
"loss": 0.7299, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.04764080140964563, |
|
"grad_norm": 7.765935941492389, |
|
"learning_rate": 2.989339514586377e-05, |
|
"loss": 0.7486, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.04829341512758598, |
|
"grad_norm": 7.30026395137639, |
|
"learning_rate": 2.9889666040739252e-05, |
|
"loss": 0.6941, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.04894602884552633, |
|
"grad_norm": 4.676985481218465, |
|
"learning_rate": 2.9885873069478275e-05, |
|
"loss": 0.7701, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.04959864256346668, |
|
"grad_norm": 42.50656974727186, |
|
"learning_rate": 2.9882016248350006e-05, |
|
"loss": 0.7428, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.05025125628140704, |
|
"grad_norm": 3.9893667031114766, |
|
"learning_rate": 2.9878095593897474e-05, |
|
"loss": 0.7204, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.05090386999934739, |
|
"grad_norm": 8.909028486553332, |
|
"learning_rate": 2.9874111122937518e-05, |
|
"loss": 0.7336, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.051556483717287736, |
|
"grad_norm": 5.256925284136456, |
|
"learning_rate": 2.9870062852560698e-05, |
|
"loss": 0.7674, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.052209097435228086, |
|
"grad_norm": 5.835535487534073, |
|
"learning_rate": 2.986595080013123e-05, |
|
"loss": 0.7547, |
|
"step": 800 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 15323, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 400, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.1868564721958912e+18, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|