|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 4731, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03170577045022194, |
|
"grad_norm": 347.7311096191406, |
|
"learning_rate": 4.94715704924963e-05, |
|
"loss": 8.5747, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06341154090044387, |
|
"grad_norm": 216.6991424560547, |
|
"learning_rate": 4.8943140984992603e-05, |
|
"loss": 3.9346, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.09511731135066583, |
|
"grad_norm": 1.8576736450195312, |
|
"learning_rate": 4.8414711477488904e-05, |
|
"loss": 0.7161, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.12682308180088775, |
|
"grad_norm": 1.3204137086868286, |
|
"learning_rate": 4.788628196998521e-05, |
|
"loss": 0.6464, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.15852885225110971, |
|
"grad_norm": 1.0963375568389893, |
|
"learning_rate": 4.7357852462481505e-05, |
|
"loss": 0.5442, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.19023462270133165, |
|
"grad_norm": 1.6606075763702393, |
|
"learning_rate": 4.6829422954977806e-05, |
|
"loss": 0.6393, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.2219403931515536, |
|
"grad_norm": 0.8779278993606567, |
|
"learning_rate": 4.630099344747411e-05, |
|
"loss": 0.689, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.2536461636017755, |
|
"grad_norm": 0.6462757587432861, |
|
"learning_rate": 4.577256393997041e-05, |
|
"loss": 0.5693, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.2853519340519975, |
|
"grad_norm": 0.670758843421936, |
|
"learning_rate": 4.5244134432466714e-05, |
|
"loss": 0.4993, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.31705770450221943, |
|
"grad_norm": 0.7589445114135742, |
|
"learning_rate": 4.4715704924963015e-05, |
|
"loss": 0.6332, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.34876347495244137, |
|
"grad_norm": 0.7136730551719666, |
|
"learning_rate": 4.4187275417459315e-05, |
|
"loss": 0.5269, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.3804692454026633, |
|
"grad_norm": 0.9714166522026062, |
|
"learning_rate": 4.3658845909955616e-05, |
|
"loss": 0.5706, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.41217501585288524, |
|
"grad_norm": 0.7672790884971619, |
|
"learning_rate": 4.3130416402451916e-05, |
|
"loss": 0.5689, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.4438807863031072, |
|
"grad_norm": 1.4292829036712646, |
|
"learning_rate": 4.260198689494822e-05, |
|
"loss": 0.6206, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.4755865567533291, |
|
"grad_norm": 0.8618677258491516, |
|
"learning_rate": 4.207355738744452e-05, |
|
"loss": 0.5413, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.507292327203551, |
|
"grad_norm": 0.9496092796325684, |
|
"learning_rate": 4.154512787994082e-05, |
|
"loss": 0.5418, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.5389980976537729, |
|
"grad_norm": 0.8666266202926636, |
|
"learning_rate": 4.101669837243712e-05, |
|
"loss": 0.597, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.570703868103995, |
|
"grad_norm": 1.0330841541290283, |
|
"learning_rate": 4.048826886493342e-05, |
|
"loss": 0.5309, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.6024096385542169, |
|
"grad_norm": 0.6087735295295715, |
|
"learning_rate": 3.995983935742972e-05, |
|
"loss": 0.5225, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.6341154090044389, |
|
"grad_norm": 0.9573830962181091, |
|
"learning_rate": 3.943140984992602e-05, |
|
"loss": 0.5227, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.6658211794546608, |
|
"grad_norm": 0.6024531126022339, |
|
"learning_rate": 3.890298034242232e-05, |
|
"loss": 0.4838, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.6975269499048827, |
|
"grad_norm": 0.5939881801605225, |
|
"learning_rate": 3.837455083491862e-05, |
|
"loss": 0.541, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.7292327203551047, |
|
"grad_norm": 0.6684996485710144, |
|
"learning_rate": 3.784612132741492e-05, |
|
"loss": 0.5198, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.7609384908053266, |
|
"grad_norm": 1.1875889301300049, |
|
"learning_rate": 3.731769181991123e-05, |
|
"loss": 0.6105, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.7926442612555485, |
|
"grad_norm": 0.570648729801178, |
|
"learning_rate": 3.678926231240752e-05, |
|
"loss": 0.5735, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.8243500317057705, |
|
"grad_norm": 0.6439939141273499, |
|
"learning_rate": 3.626083280490383e-05, |
|
"loss": 0.5283, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.8560558021559924, |
|
"grad_norm": 0.6673012375831604, |
|
"learning_rate": 3.573240329740013e-05, |
|
"loss": 0.6057, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.8877615726062144, |
|
"grad_norm": 0.4656541645526886, |
|
"learning_rate": 3.5203973789896424e-05, |
|
"loss": 0.4664, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.9194673430564363, |
|
"grad_norm": 1.075217366218567, |
|
"learning_rate": 3.467554428239273e-05, |
|
"loss": 0.5336, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.9511731135066582, |
|
"grad_norm": 1.2251460552215576, |
|
"learning_rate": 3.414711477488903e-05, |
|
"loss": 0.5354, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.9828788839568802, |
|
"grad_norm": 0.822003185749054, |
|
"learning_rate": 3.361868526738533e-05, |
|
"loss": 0.6133, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.014584654407102, |
|
"grad_norm": 0.5614562630653381, |
|
"learning_rate": 3.309025575988163e-05, |
|
"loss": 0.5059, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.046290424857324, |
|
"grad_norm": 0.5171016454696655, |
|
"learning_rate": 3.2561826252377934e-05, |
|
"loss": 0.5285, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.0779961953075459, |
|
"grad_norm": 0.9495669007301331, |
|
"learning_rate": 3.2033396744874234e-05, |
|
"loss": 0.4955, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.1097019657577678, |
|
"grad_norm": 0.9576842784881592, |
|
"learning_rate": 3.1504967237370535e-05, |
|
"loss": 0.5107, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.1414077362079897, |
|
"grad_norm": 0.6107915639877319, |
|
"learning_rate": 3.097653772986684e-05, |
|
"loss": 0.519, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.1731135066582117, |
|
"grad_norm": 0.5931509137153625, |
|
"learning_rate": 3.0448108222363136e-05, |
|
"loss": 0.5699, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.2048192771084336, |
|
"grad_norm": 0.6647824048995972, |
|
"learning_rate": 2.991967871485944e-05, |
|
"loss": 0.5022, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.2365250475586556, |
|
"grad_norm": 0.5938781499862671, |
|
"learning_rate": 2.939124920735574e-05, |
|
"loss": 0.4507, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.2682308180088775, |
|
"grad_norm": 1.1466686725616455, |
|
"learning_rate": 2.886281969985204e-05, |
|
"loss": 0.6235, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.2999365884590994, |
|
"grad_norm": 0.4420027434825897, |
|
"learning_rate": 2.833439019234834e-05, |
|
"loss": 0.6154, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.3316423589093214, |
|
"grad_norm": 0.746532142162323, |
|
"learning_rate": 2.7805960684844646e-05, |
|
"loss": 0.4898, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.3633481293595433, |
|
"grad_norm": 1.0959316492080688, |
|
"learning_rate": 2.7277531177340943e-05, |
|
"loss": 0.4544, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.3950538998097652, |
|
"grad_norm": 0.8176191449165344, |
|
"learning_rate": 2.6749101669837247e-05, |
|
"loss": 0.481, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.4267596702599874, |
|
"grad_norm": 0.8100579380989075, |
|
"learning_rate": 2.6220672162333547e-05, |
|
"loss": 0.4333, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.4584654407102093, |
|
"grad_norm": 0.8650724291801453, |
|
"learning_rate": 2.5692242654829844e-05, |
|
"loss": 0.4629, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.4901712111604313, |
|
"grad_norm": 1.6807165145874023, |
|
"learning_rate": 2.5163813147326148e-05, |
|
"loss": 0.4126, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.521876981610653, |
|
"grad_norm": 0.6398534774780273, |
|
"learning_rate": 2.463538363982245e-05, |
|
"loss": 0.4776, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.553582752060875, |
|
"grad_norm": 0.7640373110771179, |
|
"learning_rate": 2.410695413231875e-05, |
|
"loss": 0.4901, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 1.5852885225110969, |
|
"grad_norm": 0.644173800945282, |
|
"learning_rate": 2.357852462481505e-05, |
|
"loss": 0.5133, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.6169942929613188, |
|
"grad_norm": 0.577743649482727, |
|
"learning_rate": 2.305009511731135e-05, |
|
"loss": 0.4221, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 1.6487000634115407, |
|
"grad_norm": 0.6014630198478699, |
|
"learning_rate": 2.252166560980765e-05, |
|
"loss": 0.4766, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.6804058338617627, |
|
"grad_norm": 0.6592232584953308, |
|
"learning_rate": 2.1993236102303955e-05, |
|
"loss": 0.5726, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 1.7121116043119848, |
|
"grad_norm": 0.7719835042953491, |
|
"learning_rate": 2.1464806594800255e-05, |
|
"loss": 0.4941, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.7438173747622068, |
|
"grad_norm": 0.9424024224281311, |
|
"learning_rate": 2.0936377087296556e-05, |
|
"loss": 0.5236, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 1.7755231452124287, |
|
"grad_norm": 1.0417604446411133, |
|
"learning_rate": 2.0407947579792857e-05, |
|
"loss": 0.5269, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.8072289156626506, |
|
"grad_norm": 0.42674678564071655, |
|
"learning_rate": 1.9879518072289157e-05, |
|
"loss": 0.5067, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 1.8389346861128726, |
|
"grad_norm": 0.6428393721580505, |
|
"learning_rate": 1.9351088564785458e-05, |
|
"loss": 0.4381, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 1.8706404565630945, |
|
"grad_norm": 0.936777651309967, |
|
"learning_rate": 1.882265905728176e-05, |
|
"loss": 0.6561, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 1.9023462270133165, |
|
"grad_norm": 1.3489336967468262, |
|
"learning_rate": 1.8294229549778062e-05, |
|
"loss": 0.4983, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.9340519974635384, |
|
"grad_norm": 0.6496655344963074, |
|
"learning_rate": 1.776580004227436e-05, |
|
"loss": 0.4759, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 1.9657577679137603, |
|
"grad_norm": 0.712348997592926, |
|
"learning_rate": 1.7237370534770663e-05, |
|
"loss": 0.4621, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 1.9974635383639823, |
|
"grad_norm": 0.5334410667419434, |
|
"learning_rate": 1.6708941027266964e-05, |
|
"loss": 0.5246, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 2.029169308814204, |
|
"grad_norm": 0.9799864888191223, |
|
"learning_rate": 1.6180511519763264e-05, |
|
"loss": 0.388, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.060875079264426, |
|
"grad_norm": 0.6191227436065674, |
|
"learning_rate": 1.5652082012259565e-05, |
|
"loss": 0.422, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 2.092580849714648, |
|
"grad_norm": 0.6415786743164062, |
|
"learning_rate": 1.5123652504755867e-05, |
|
"loss": 0.4213, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.12428662016487, |
|
"grad_norm": 1.024706244468689, |
|
"learning_rate": 1.4595222997252168e-05, |
|
"loss": 0.4378, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 2.1559923906150917, |
|
"grad_norm": 1.0147749185562134, |
|
"learning_rate": 1.4066793489748466e-05, |
|
"loss": 0.5441, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 2.187698161065314, |
|
"grad_norm": 1.1020487546920776, |
|
"learning_rate": 1.353836398224477e-05, |
|
"loss": 0.554, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 2.2194039315155356, |
|
"grad_norm": 0.7216790318489075, |
|
"learning_rate": 1.3009934474741071e-05, |
|
"loss": 0.4983, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.251109701965758, |
|
"grad_norm": 1.2225050926208496, |
|
"learning_rate": 1.2481504967237371e-05, |
|
"loss": 0.4586, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 2.2828154724159795, |
|
"grad_norm": 0.7050449252128601, |
|
"learning_rate": 1.1953075459733672e-05, |
|
"loss": 0.4235, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 2.314521242866202, |
|
"grad_norm": 0.6529106497764587, |
|
"learning_rate": 1.1424645952229973e-05, |
|
"loss": 0.5061, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 2.3462270133164234, |
|
"grad_norm": 0.5396713018417358, |
|
"learning_rate": 1.0896216444726275e-05, |
|
"loss": 0.4983, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 2.3779327837666457, |
|
"grad_norm": 0.6897116899490356, |
|
"learning_rate": 1.0367786937222575e-05, |
|
"loss": 0.4658, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 2.4096385542168672, |
|
"grad_norm": 1.0474814176559448, |
|
"learning_rate": 9.839357429718876e-06, |
|
"loss": 0.4578, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 2.4413443246670896, |
|
"grad_norm": 0.5905832648277283, |
|
"learning_rate": 9.310927922215176e-06, |
|
"loss": 0.4349, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 2.473050095117311, |
|
"grad_norm": 0.6709778904914856, |
|
"learning_rate": 8.782498414711479e-06, |
|
"loss": 0.4351, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 2.5047558655675335, |
|
"grad_norm": 0.5383151173591614, |
|
"learning_rate": 8.254068907207779e-06, |
|
"loss": 0.52, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 2.536461636017755, |
|
"grad_norm": 0.9183222055435181, |
|
"learning_rate": 7.72563939970408e-06, |
|
"loss": 0.5099, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 2.5681674064679774, |
|
"grad_norm": 1.0442452430725098, |
|
"learning_rate": 7.197209892200381e-06, |
|
"loss": 0.561, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 2.599873176918199, |
|
"grad_norm": 0.4895930886268616, |
|
"learning_rate": 6.6687803846966825e-06, |
|
"loss": 0.4435, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 2.6315789473684212, |
|
"grad_norm": 0.6229114532470703, |
|
"learning_rate": 6.140350877192982e-06, |
|
"loss": 0.5262, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 2.6632847178186427, |
|
"grad_norm": 0.647319495677948, |
|
"learning_rate": 5.6119213696892836e-06, |
|
"loss": 0.5116, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 2.694990488268865, |
|
"grad_norm": 0.6666443347930908, |
|
"learning_rate": 5.083491862185585e-06, |
|
"loss": 0.5408, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 2.7266962587190866, |
|
"grad_norm": 0.6282554268836975, |
|
"learning_rate": 4.5550623546818855e-06, |
|
"loss": 0.4801, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 2.758402029169309, |
|
"grad_norm": 1.0603163242340088, |
|
"learning_rate": 4.026632847178187e-06, |
|
"loss": 0.4662, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 2.7901077996195305, |
|
"grad_norm": 0.7217985391616821, |
|
"learning_rate": 3.498203339674488e-06, |
|
"loss": 0.3947, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 2.821813570069753, |
|
"grad_norm": 0.49021148681640625, |
|
"learning_rate": 2.9697738321707884e-06, |
|
"loss": 0.4047, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 2.853519340519975, |
|
"grad_norm": 0.9363583922386169, |
|
"learning_rate": 2.4413443246670898e-06, |
|
"loss": 0.4912, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 2.8852251109701967, |
|
"grad_norm": 0.9978638291358948, |
|
"learning_rate": 1.9129148171633903e-06, |
|
"loss": 0.579, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 2.9169308814204187, |
|
"grad_norm": 0.6237664222717285, |
|
"learning_rate": 1.3844853096596915e-06, |
|
"loss": 0.4324, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 2.9486366518706406, |
|
"grad_norm": 0.6290932893753052, |
|
"learning_rate": 8.560558021559924e-07, |
|
"loss": 0.4495, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 2.9803424223208625, |
|
"grad_norm": 0.8747038245201111, |
|
"learning_rate": 3.276262946522934e-07, |
|
"loss": 0.44, |
|
"step": 4700 |
|
} |
|
], |
|
"logging_steps": 50, |
|
"max_steps": 4731, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4943906537472000.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|