|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.9850746268656714, |
|
"eval_steps": 500, |
|
"global_step": 525, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.05685856432125089, |
|
"grad_norm": 5.9803857599927595, |
|
"learning_rate": 3.7037037037037037e-06, |
|
"loss": 3.0422, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.11371712864250177, |
|
"grad_norm": 2.217843129478048, |
|
"learning_rate": 7.4074074074074075e-06, |
|
"loss": 2.4412, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.17057569296375266, |
|
"grad_norm": 1.8496604433568333, |
|
"learning_rate": 9.99910461334869e-06, |
|
"loss": 2.1975, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.22743425728500355, |
|
"grad_norm": 2.211490671202065, |
|
"learning_rate": 9.983195548029173e-06, |
|
"loss": 2.0619, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.28429282160625446, |
|
"grad_norm": 1.4387489754724267, |
|
"learning_rate": 9.947461858764978e-06, |
|
"loss": 1.9967, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.3411513859275053, |
|
"grad_norm": 1.8279329868726204, |
|
"learning_rate": 9.892045704727864e-06, |
|
"loss": 1.9387, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.39800995024875624, |
|
"grad_norm": 1.6474902291327627, |
|
"learning_rate": 9.817167547748729e-06, |
|
"loss": 1.9172, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.4548685145700071, |
|
"grad_norm": 1.657541170060607, |
|
"learning_rate": 9.723125275255325e-06, |
|
"loss": 1.8705, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.511727078891258, |
|
"grad_norm": 1.696974285018719, |
|
"learning_rate": 9.610293015188067e-06, |
|
"loss": 1.8323, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.5685856432125089, |
|
"grad_norm": 1.5303782561090193, |
|
"learning_rate": 9.47911964760858e-06, |
|
"loss": 1.8171, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.6254442075337597, |
|
"grad_norm": 1.5651172480757691, |
|
"learning_rate": 9.330127018922195e-06, |
|
"loss": 1.8148, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.6823027718550106, |
|
"grad_norm": 1.5409210855141433, |
|
"learning_rate": 9.163907865818806e-06, |
|
"loss": 1.783, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.7391613361762616, |
|
"grad_norm": 1.5363163924078007, |
|
"learning_rate": 8.98112345719122e-06, |
|
"loss": 1.7854, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.7960199004975125, |
|
"grad_norm": 1.5576565905261832, |
|
"learning_rate": 8.782500963412156e-06, |
|
"loss": 1.7504, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.8528784648187633, |
|
"grad_norm": 1.521719538010432, |
|
"learning_rate": 8.568830563435695e-06, |
|
"loss": 1.7504, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.9097370291400142, |
|
"grad_norm": 1.5810495823530497, |
|
"learning_rate": 8.34096230123198e-06, |
|
"loss": 1.7645, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.9665955934612651, |
|
"grad_norm": 1.41921208948242, |
|
"learning_rate": 8.099802704061194e-06, |
|
"loss": 1.7222, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.023454157782516, |
|
"grad_norm": 2.303489781396609, |
|
"learning_rate": 7.846311176040331e-06, |
|
"loss": 1.6839, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.080312722103767, |
|
"grad_norm": 1.7804848302086473, |
|
"learning_rate": 7.581496181350203e-06, |
|
"loss": 1.5172, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.1371712864250179, |
|
"grad_norm": 1.5922077446873317, |
|
"learning_rate": 7.30641123226703e-06, |
|
"loss": 1.5129, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.1940298507462686, |
|
"grad_norm": 1.600712736103309, |
|
"learning_rate": 7.022150697979385e-06, |
|
"loss": 1.5027, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.2508884150675195, |
|
"grad_norm": 1.5405911558473169, |
|
"learning_rate": 6.7298454508642945e-06, |
|
"loss": 1.4733, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.3077469793887704, |
|
"grad_norm": 1.6005186629850159, |
|
"learning_rate": 6.4306583675428435e-06, |
|
"loss": 1.5009, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.3646055437100213, |
|
"grad_norm": 1.6179249599606138, |
|
"learning_rate": 6.125779702613471e-06, |
|
"loss": 1.4981, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.4214641080312722, |
|
"grad_norm": 1.5595473213575002, |
|
"learning_rate": 5.816422353467562e-06, |
|
"loss": 1.4929, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.4783226723525231, |
|
"grad_norm": 1.610680010605817, |
|
"learning_rate": 5.503817035025341e-06, |
|
"loss": 1.4962, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.535181236673774, |
|
"grad_norm": 1.5401109765819234, |
|
"learning_rate": 5.189207383588353e-06, |
|
"loss": 1.4828, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.5920398009950247, |
|
"grad_norm": 1.5437992683845965, |
|
"learning_rate": 4.873845009286879e-06, |
|
"loss": 1.4943, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.6488983653162759, |
|
"grad_norm": 1.569798351736018, |
|
"learning_rate": 4.5589845168051176e-06, |
|
"loss": 1.47, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.7057569296375266, |
|
"grad_norm": 1.5635972598860413, |
|
"learning_rate": 4.245878514193131e-06, |
|
"loss": 1.4717, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.7626154939587777, |
|
"grad_norm": 1.583256520765993, |
|
"learning_rate": 3.935772629621996e-06, |
|
"loss": 1.4759, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.8194740582800284, |
|
"grad_norm": 1.5703071827809085, |
|
"learning_rate": 3.62990055590697e-06, |
|
"loss": 1.4731, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.8763326226012793, |
|
"grad_norm": 1.5673344510472709, |
|
"learning_rate": 3.3294791425130512e-06, |
|
"loss": 1.4609, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.9331911869225302, |
|
"grad_norm": 1.5649245970464676, |
|
"learning_rate": 3.035703554568331e-06, |
|
"loss": 1.4557, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.9900497512437811, |
|
"grad_norm": 1.5535579680886489, |
|
"learning_rate": 2.749742518144061e-06, |
|
"loss": 1.4604, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.046908315565032, |
|
"grad_norm": 2.6898081511100944, |
|
"learning_rate": 2.4727336707170973e-06, |
|
"loss": 1.2976, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.1037668798862827, |
|
"grad_norm": 2.294462090007101, |
|
"learning_rate": 2.2057790353119533e-06, |
|
"loss": 1.2109, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.160625444207534, |
|
"grad_norm": 2.0734336172332375, |
|
"learning_rate": 1.949940636327671e-06, |
|
"loss": 1.2101, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.2174840085287846, |
|
"grad_norm": 1.9313522409801105, |
|
"learning_rate": 1.7062362744910321e-06, |
|
"loss": 1.2124, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 2.2743425728500357, |
|
"grad_norm": 1.9741447028673955, |
|
"learning_rate": 1.4756354777446004e-06, |
|
"loss": 1.2204, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.3312011371712864, |
|
"grad_norm": 1.9826092702551152, |
|
"learning_rate": 1.2590556441781725e-06, |
|
"loss": 1.2031, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 2.388059701492537, |
|
"grad_norm": 1.9337513040949874, |
|
"learning_rate": 1.057358392348171e-06, |
|
"loss": 1.1978, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.4449182658137882, |
|
"grad_norm": 1.9619207022209448, |
|
"learning_rate": 8.713461335044981e-07, |
|
"loss": 1.1929, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 2.501776830135039, |
|
"grad_norm": 1.9462643434600588, |
|
"learning_rate": 7.017588793615499e-07, |
|
"loss": 1.2075, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 2.55863539445629, |
|
"grad_norm": 1.9622874707007205, |
|
"learning_rate": 5.492712981130171e-07, |
|
"loss": 1.2068, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.6154939587775408, |
|
"grad_norm": 1.9185457493104283, |
|
"learning_rate": 4.144900304025101e-07, |
|
"loss": 1.1924, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 2.672352523098792, |
|
"grad_norm": 1.9287168348109207, |
|
"learning_rate": 2.979512759278719e-07, |
|
"loss": 1.1847, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.7292110874200426, |
|
"grad_norm": 1.898626201871725, |
|
"learning_rate": 2.0011866028038617e-07, |
|
"loss": 1.192, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 2.7860696517412933, |
|
"grad_norm": 1.9641608263451322, |
|
"learning_rate": 1.2138139050522024e-07, |
|
"loss": 1.204, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.8429282160625444, |
|
"grad_norm": 1.9152149742726723, |
|
"learning_rate": 6.20527067208232e-08, |
|
"loss": 1.2093, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.8429282160625444, |
|
"eval_loss": 1.7461528778076172, |
|
"eval_runtime": 307.7688, |
|
"eval_samples_per_second": 32.492, |
|
"eval_steps_per_second": 0.51, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.8997867803837956, |
|
"grad_norm": 1.9127782775890796, |
|
"learning_rate": 2.236863595720562e-08, |
|
"loss": 1.1844, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.9566453447050463, |
|
"grad_norm": 1.8938021124768716, |
|
"learning_rate": 2.4870531706872037e-09, |
|
"loss": 1.1984, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.9850746268656714, |
|
"step": 525, |
|
"total_flos": 165772559646720.0, |
|
"train_loss": 1.5532971572875978, |
|
"train_runtime": 24046.0291, |
|
"train_samples_per_second": 11.228, |
|
"train_steps_per_second": 0.022 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 525, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 165772559646720.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|