|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.29315196998123827, |
|
"eval_steps": 500, |
|
"global_step": 7500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.003908692933083177, |
|
"grad_norm": 0.6370952129364014, |
|
"learning_rate": 4.98240400406663e-05, |
|
"loss": 1.4791, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.007817385866166354, |
|
"grad_norm": 0.27393946051597595, |
|
"learning_rate": 4.9628528974739973e-05, |
|
"loss": 1.3743, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.01172607879924953, |
|
"grad_norm": 0.364713191986084, |
|
"learning_rate": 4.943301790881364e-05, |
|
"loss": 1.3525, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.015634771732332707, |
|
"grad_norm": 0.33364734053611755, |
|
"learning_rate": 4.923750684288731e-05, |
|
"loss": 1.3457, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.019543464665415886, |
|
"grad_norm": 0.3672705292701721, |
|
"learning_rate": 4.904199577696098e-05, |
|
"loss": 1.3375, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.02345215759849906, |
|
"grad_norm": 0.16828787326812744, |
|
"learning_rate": 4.884648471103465e-05, |
|
"loss": 1.3348, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.02736085053158224, |
|
"grad_norm": 0.24634529650211334, |
|
"learning_rate": 4.8650973645108314e-05, |
|
"loss": 1.3343, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.031269543464665414, |
|
"grad_norm": 0.5399248003959656, |
|
"learning_rate": 4.845546257918198e-05, |
|
"loss": 1.3287, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.03517823639774859, |
|
"grad_norm": 0.5724157691001892, |
|
"learning_rate": 4.825995151325565e-05, |
|
"loss": 1.3236, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.03908692933083177, |
|
"grad_norm": 0.5199714303016663, |
|
"learning_rate": 4.8064440447329324e-05, |
|
"loss": 1.3189, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.04299562226391495, |
|
"grad_norm": 0.30912989377975464, |
|
"learning_rate": 4.786892938140299e-05, |
|
"loss": 1.315, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.04690431519699812, |
|
"grad_norm": 0.37046098709106445, |
|
"learning_rate": 4.767341831547666e-05, |
|
"loss": 1.3151, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.0508130081300813, |
|
"grad_norm": 0.26445892453193665, |
|
"learning_rate": 4.747790724955033e-05, |
|
"loss": 1.3118, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.05472170106316448, |
|
"grad_norm": 0.3207770586013794, |
|
"learning_rate": 4.728239618362399e-05, |
|
"loss": 1.3087, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.05863039399624766, |
|
"grad_norm": 0.4711158573627472, |
|
"learning_rate": 4.7086885117697664e-05, |
|
"loss": 1.31, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.06253908692933083, |
|
"grad_norm": 0.524340808391571, |
|
"learning_rate": 4.689137405177133e-05, |
|
"loss": 1.3062, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.06644777986241401, |
|
"grad_norm": 0.3912469744682312, |
|
"learning_rate": 4.6695862985845e-05, |
|
"loss": 1.3078, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.07035647279549719, |
|
"grad_norm": 0.3625667691230774, |
|
"learning_rate": 4.6500351919918674e-05, |
|
"loss": 1.3039, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.07426516572858036, |
|
"grad_norm": 0.3897973299026489, |
|
"learning_rate": 4.630484085399234e-05, |
|
"loss": 1.3027, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.07817385866166354, |
|
"grad_norm": 0.30837342143058777, |
|
"learning_rate": 4.6109329788066005e-05, |
|
"loss": 1.3055, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.08208255159474671, |
|
"grad_norm": 0.3017924129962921, |
|
"learning_rate": 4.591381872213967e-05, |
|
"loss": 1.3021, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.0859912445278299, |
|
"grad_norm": 0.5234686136245728, |
|
"learning_rate": 4.571830765621334e-05, |
|
"loss": 1.3054, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.08989993746091307, |
|
"grad_norm": 0.3180347681045532, |
|
"learning_rate": 4.5522796590287015e-05, |
|
"loss": 1.3016, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.09380863039399624, |
|
"grad_norm": 0.22097885608673096, |
|
"learning_rate": 4.532728552436068e-05, |
|
"loss": 1.3014, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.09771732332707943, |
|
"grad_norm": 0.3503361642360687, |
|
"learning_rate": 4.513177445843435e-05, |
|
"loss": 1.3004, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.1016260162601626, |
|
"grad_norm": 0.3880802392959595, |
|
"learning_rate": 4.493626339250802e-05, |
|
"loss": 1.2992, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.10553470919324578, |
|
"grad_norm": 0.3632693886756897, |
|
"learning_rate": 4.474075232658168e-05, |
|
"loss": 1.3009, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.10944340212632896, |
|
"grad_norm": 0.4512663781642914, |
|
"learning_rate": 4.4545241260655355e-05, |
|
"loss": 1.2999, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.11335209505941213, |
|
"grad_norm": 0.3041514456272125, |
|
"learning_rate": 4.434973019472902e-05, |
|
"loss": 1.3007, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.11726078799249531, |
|
"grad_norm": 0.34578946232795715, |
|
"learning_rate": 4.415421912880269e-05, |
|
"loss": 1.2985, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.12116948092557848, |
|
"grad_norm": 0.2616026997566223, |
|
"learning_rate": 4.3958708062876365e-05, |
|
"loss": 1.2984, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.12507817385866166, |
|
"grad_norm": 0.253749817609787, |
|
"learning_rate": 4.376319699695003e-05, |
|
"loss": 1.2963, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.12898686679174484, |
|
"grad_norm": 0.24605727195739746, |
|
"learning_rate": 4.35676859310237e-05, |
|
"loss": 1.3005, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.13289555972482803, |
|
"grad_norm": 0.31458768248558044, |
|
"learning_rate": 4.337217486509736e-05, |
|
"loss": 1.3008, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.13680425265791119, |
|
"grad_norm": 0.23786158859729767, |
|
"learning_rate": 4.3176663799171034e-05, |
|
"loss": 1.2975, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.14071294559099437, |
|
"grad_norm": 0.3280108869075775, |
|
"learning_rate": 4.2981152733244706e-05, |
|
"loss": 1.2987, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.14462163852407756, |
|
"grad_norm": 0.25433337688446045, |
|
"learning_rate": 4.278564166731837e-05, |
|
"loss": 1.2972, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.1485303314571607, |
|
"grad_norm": 0.2792419493198395, |
|
"learning_rate": 4.2590130601392044e-05, |
|
"loss": 1.2944, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.1524390243902439, |
|
"grad_norm": 0.34164726734161377, |
|
"learning_rate": 4.239461953546571e-05, |
|
"loss": 1.298, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.15634771732332708, |
|
"grad_norm": 0.2886170744895935, |
|
"learning_rate": 4.219910846953938e-05, |
|
"loss": 1.2948, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.16025641025641027, |
|
"grad_norm": 0.3007793426513672, |
|
"learning_rate": 4.2003597403613047e-05, |
|
"loss": 1.2954, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.16416510318949343, |
|
"grad_norm": 0.32882916927337646, |
|
"learning_rate": 4.180808633768671e-05, |
|
"loss": 1.2942, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.1680737961225766, |
|
"grad_norm": 0.2950049936771393, |
|
"learning_rate": 4.1612575271760384e-05, |
|
"loss": 1.2938, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.1719824890556598, |
|
"grad_norm": 0.7425180673599243, |
|
"learning_rate": 4.141706420583405e-05, |
|
"loss": 1.2926, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.17589118198874296, |
|
"grad_norm": 0.26747408509254456, |
|
"learning_rate": 4.122155313990772e-05, |
|
"loss": 1.2933, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.17979987492182614, |
|
"grad_norm": 0.3664441704750061, |
|
"learning_rate": 4.1026042073981394e-05, |
|
"loss": 1.2916, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 0.18370856785490933, |
|
"grad_norm": 0.32603704929351807, |
|
"learning_rate": 4.083053100805506e-05, |
|
"loss": 1.2934, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 0.18761726078799248, |
|
"grad_norm": 0.4145171046257019, |
|
"learning_rate": 4.0635019942128725e-05, |
|
"loss": 1.2922, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.19152595372107567, |
|
"grad_norm": 0.42517679929733276, |
|
"learning_rate": 4.04395088762024e-05, |
|
"loss": 1.293, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 0.19543464665415886, |
|
"grad_norm": 0.48030540347099304, |
|
"learning_rate": 4.024399781027606e-05, |
|
"loss": 1.2906, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.199343339587242, |
|
"grad_norm": 0.4900824725627899, |
|
"learning_rate": 4.0048486744349735e-05, |
|
"loss": 1.293, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 0.2032520325203252, |
|
"grad_norm": 0.400856614112854, |
|
"learning_rate": 3.98529756784234e-05, |
|
"loss": 1.2903, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 0.20716072545340838, |
|
"grad_norm": 0.3311282694339752, |
|
"learning_rate": 3.965746461249707e-05, |
|
"loss": 1.2917, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 0.21106941838649157, |
|
"grad_norm": 0.5248217582702637, |
|
"learning_rate": 3.946195354657074e-05, |
|
"loss": 1.2913, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 0.21497811131957473, |
|
"grad_norm": 0.6463488936424255, |
|
"learning_rate": 3.92664424806444e-05, |
|
"loss": 1.2899, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.2188868042526579, |
|
"grad_norm": 0.3202870786190033, |
|
"learning_rate": 3.9070931414718075e-05, |
|
"loss": 1.2866, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 0.2227954971857411, |
|
"grad_norm": 0.5758823752403259, |
|
"learning_rate": 3.887542034879174e-05, |
|
"loss": 1.2907, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 0.22670419011882426, |
|
"grad_norm": 0.4410454034805298, |
|
"learning_rate": 3.867990928286541e-05, |
|
"loss": 1.2885, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 0.23061288305190744, |
|
"grad_norm": 0.3980095088481903, |
|
"learning_rate": 3.8484398216939085e-05, |
|
"loss": 1.2895, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 0.23452157598499063, |
|
"grad_norm": 0.3670996427536011, |
|
"learning_rate": 3.828888715101275e-05, |
|
"loss": 1.2888, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.23843026891807378, |
|
"grad_norm": 0.302168071269989, |
|
"learning_rate": 3.8093376085086416e-05, |
|
"loss": 1.2874, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 0.24233896185115697, |
|
"grad_norm": 0.4420079290866852, |
|
"learning_rate": 3.789786501916008e-05, |
|
"loss": 1.2849, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 0.24624765478424016, |
|
"grad_norm": 0.4248519539833069, |
|
"learning_rate": 3.770235395323375e-05, |
|
"loss": 1.2853, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 0.2501563477173233, |
|
"grad_norm": 0.5483678579330444, |
|
"learning_rate": 3.7506842887307426e-05, |
|
"loss": 1.2883, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 0.2540650406504065, |
|
"grad_norm": 0.7262341380119324, |
|
"learning_rate": 3.731133182138109e-05, |
|
"loss": 1.2893, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.2579737335834897, |
|
"grad_norm": 0.47499603033065796, |
|
"learning_rate": 3.711582075545476e-05, |
|
"loss": 1.2861, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 0.26188242651657284, |
|
"grad_norm": 0.4093151092529297, |
|
"learning_rate": 3.692030968952843e-05, |
|
"loss": 1.288, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 0.26579111944965605, |
|
"grad_norm": 0.3050064146518707, |
|
"learning_rate": 3.6724798623602094e-05, |
|
"loss": 1.2891, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 0.2696998123827392, |
|
"grad_norm": 0.28250014781951904, |
|
"learning_rate": 3.6529287557675766e-05, |
|
"loss": 1.288, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 0.27360850531582237, |
|
"grad_norm": 0.37311726808547974, |
|
"learning_rate": 3.633377649174943e-05, |
|
"loss": 1.2864, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.2775171982489056, |
|
"grad_norm": 0.3157326877117157, |
|
"learning_rate": 3.6138265425823104e-05, |
|
"loss": 1.2862, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 0.28142589118198874, |
|
"grad_norm": 0.3368279039859772, |
|
"learning_rate": 3.5942754359896776e-05, |
|
"loss": 1.2884, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 0.2853345841150719, |
|
"grad_norm": 0.4087933301925659, |
|
"learning_rate": 3.574724329397044e-05, |
|
"loss": 1.2848, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 0.2892432770481551, |
|
"grad_norm": 0.9965713620185852, |
|
"learning_rate": 3.555173222804411e-05, |
|
"loss": 1.2832, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 0.29315196998123827, |
|
"grad_norm": 0.4364921748638153, |
|
"learning_rate": 3.535622116211777e-05, |
|
"loss": 1.2869, |
|
"step": 7500 |
|
} |
|
], |
|
"logging_steps": 100, |
|
"max_steps": 25584, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5268213596160000.0, |
|
"train_batch_size": 48, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|