|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.853519340519975, |
|
"eval_steps": 500, |
|
"global_step": 4500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03170577045022194, |
|
"grad_norm": 306.2328796386719, |
|
"learning_rate": 4.94715704924963e-05, |
|
"loss": 5.2387, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06341154090044387, |
|
"grad_norm": 2.433814525604248, |
|
"learning_rate": 4.8943140984992603e-05, |
|
"loss": 1.131, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.09511731135066583, |
|
"grad_norm": 1.5357435941696167, |
|
"learning_rate": 4.8414711477488904e-05, |
|
"loss": 0.5924, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.12682308180088775, |
|
"grad_norm": 1.2382903099060059, |
|
"learning_rate": 4.788628196998521e-05, |
|
"loss": 0.6336, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.15852885225110971, |
|
"grad_norm": 1.0461214780807495, |
|
"learning_rate": 4.7357852462481505e-05, |
|
"loss": 0.5381, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.19023462270133165, |
|
"grad_norm": 1.6466267108917236, |
|
"learning_rate": 4.6829422954977806e-05, |
|
"loss": 0.6359, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.2219403931515536, |
|
"grad_norm": 0.8996474146842957, |
|
"learning_rate": 4.630099344747411e-05, |
|
"loss": 0.6849, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.2536461636017755, |
|
"grad_norm": 0.6775561571121216, |
|
"learning_rate": 4.577256393997041e-05, |
|
"loss": 0.5673, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.2853519340519975, |
|
"grad_norm": 0.6857176423072815, |
|
"learning_rate": 4.5244134432466714e-05, |
|
"loss": 0.4975, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.31705770450221943, |
|
"grad_norm": 0.7963148951530457, |
|
"learning_rate": 4.4715704924963015e-05, |
|
"loss": 0.6324, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.34876347495244137, |
|
"grad_norm": 0.7400671243667603, |
|
"learning_rate": 4.4187275417459315e-05, |
|
"loss": 0.5257, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.3804692454026633, |
|
"grad_norm": 0.8451323509216309, |
|
"learning_rate": 4.3658845909955616e-05, |
|
"loss": 0.5694, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.41217501585288524, |
|
"grad_norm": 0.8110406398773193, |
|
"learning_rate": 4.3130416402451916e-05, |
|
"loss": 0.5675, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.4438807863031072, |
|
"grad_norm": 1.4713784456253052, |
|
"learning_rate": 4.260198689494822e-05, |
|
"loss": 0.6193, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.4755865567533291, |
|
"grad_norm": 0.8999515175819397, |
|
"learning_rate": 4.207355738744452e-05, |
|
"loss": 0.5402, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.507292327203551, |
|
"grad_norm": 1.0108189582824707, |
|
"learning_rate": 4.154512787994082e-05, |
|
"loss": 0.5411, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.5389980976537729, |
|
"grad_norm": 0.8886571526527405, |
|
"learning_rate": 4.101669837243712e-05, |
|
"loss": 0.5962, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.570703868103995, |
|
"grad_norm": 1.0816487073898315, |
|
"learning_rate": 4.048826886493342e-05, |
|
"loss": 0.5302, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.6024096385542169, |
|
"grad_norm": 0.6238551735877991, |
|
"learning_rate": 3.995983935742972e-05, |
|
"loss": 0.5216, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.6341154090044389, |
|
"grad_norm": 1.0108839273452759, |
|
"learning_rate": 3.943140984992602e-05, |
|
"loss": 0.522, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.6658211794546608, |
|
"grad_norm": 0.6171120405197144, |
|
"learning_rate": 3.890298034242232e-05, |
|
"loss": 0.4833, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.6975269499048827, |
|
"grad_norm": 0.634709894657135, |
|
"learning_rate": 3.837455083491862e-05, |
|
"loss": 0.5404, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.7292327203551047, |
|
"grad_norm": 0.6804570555686951, |
|
"learning_rate": 3.784612132741492e-05, |
|
"loss": 0.5194, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.7609384908053266, |
|
"grad_norm": 1.224907636642456, |
|
"learning_rate": 3.731769181991123e-05, |
|
"loss": 0.6098, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.7926442612555485, |
|
"grad_norm": 0.6107101440429688, |
|
"learning_rate": 3.678926231240752e-05, |
|
"loss": 0.5721, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.8243500317057705, |
|
"grad_norm": 0.672882080078125, |
|
"learning_rate": 3.626083280490383e-05, |
|
"loss": 0.5276, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.8560558021559924, |
|
"grad_norm": 0.7011030316352844, |
|
"learning_rate": 3.573240329740013e-05, |
|
"loss": 0.6049, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.8877615726062144, |
|
"grad_norm": 0.4889984726905823, |
|
"learning_rate": 3.5203973789896424e-05, |
|
"loss": 0.4657, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.9194673430564363, |
|
"grad_norm": 1.1118626594543457, |
|
"learning_rate": 3.467554428239273e-05, |
|
"loss": 0.5328, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.9511731135066582, |
|
"grad_norm": 1.2527000904083252, |
|
"learning_rate": 3.414711477488903e-05, |
|
"loss": 0.5355, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.9828788839568802, |
|
"grad_norm": 0.8558881878852844, |
|
"learning_rate": 3.361868526738533e-05, |
|
"loss": 0.6129, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.014584654407102, |
|
"grad_norm": 0.5950063467025757, |
|
"learning_rate": 3.309025575988163e-05, |
|
"loss": 0.5058, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.046290424857324, |
|
"grad_norm": 0.5575762987136841, |
|
"learning_rate": 3.2561826252377934e-05, |
|
"loss": 0.5286, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.0779961953075459, |
|
"grad_norm": 0.9821068644523621, |
|
"learning_rate": 3.2033396744874234e-05, |
|
"loss": 0.4943, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.1097019657577678, |
|
"grad_norm": 0.9642919301986694, |
|
"learning_rate": 3.1504967237370535e-05, |
|
"loss": 0.5102, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.1414077362079897, |
|
"grad_norm": 0.6422669887542725, |
|
"learning_rate": 3.097653772986684e-05, |
|
"loss": 0.5178, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.1731135066582117, |
|
"grad_norm": 0.6244462728500366, |
|
"learning_rate": 3.0448108222363136e-05, |
|
"loss": 0.5699, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.2048192771084336, |
|
"grad_norm": 0.6992059946060181, |
|
"learning_rate": 2.991967871485944e-05, |
|
"loss": 0.5031, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.2365250475586556, |
|
"grad_norm": 0.609976589679718, |
|
"learning_rate": 2.939124920735574e-05, |
|
"loss": 0.4497, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.2682308180088775, |
|
"grad_norm": 1.18268883228302, |
|
"learning_rate": 2.886281969985204e-05, |
|
"loss": 0.6237, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.2999365884590994, |
|
"grad_norm": 0.467398077249527, |
|
"learning_rate": 2.833439019234834e-05, |
|
"loss": 0.6139, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.3316423589093214, |
|
"grad_norm": 0.7419503927230835, |
|
"learning_rate": 2.7805960684844646e-05, |
|
"loss": 0.4893, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.3633481293595433, |
|
"grad_norm": 1.152620792388916, |
|
"learning_rate": 2.7277531177340943e-05, |
|
"loss": 0.4538, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.3950538998097652, |
|
"grad_norm": 0.8632370829582214, |
|
"learning_rate": 2.6749101669837247e-05, |
|
"loss": 0.4809, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.4267596702599874, |
|
"grad_norm": 0.8322456479072571, |
|
"learning_rate": 2.6220672162333547e-05, |
|
"loss": 0.4329, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.4584654407102093, |
|
"grad_norm": 0.8780176639556885, |
|
"learning_rate": 2.5692242654829844e-05, |
|
"loss": 0.4623, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.4901712111604313, |
|
"grad_norm": 1.8087239265441895, |
|
"learning_rate": 2.5163813147326148e-05, |
|
"loss": 0.4098, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.521876981610653, |
|
"grad_norm": 0.6666421890258789, |
|
"learning_rate": 2.463538363982245e-05, |
|
"loss": 0.4768, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.553582752060875, |
|
"grad_norm": 0.7871991991996765, |
|
"learning_rate": 2.410695413231875e-05, |
|
"loss": 0.4899, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 1.5852885225110969, |
|
"grad_norm": 0.6651085615158081, |
|
"learning_rate": 2.357852462481505e-05, |
|
"loss": 0.5141, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.6169942929613188, |
|
"grad_norm": 0.5895043611526489, |
|
"learning_rate": 2.305009511731135e-05, |
|
"loss": 0.4217, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 1.6487000634115407, |
|
"grad_norm": 0.6137962341308594, |
|
"learning_rate": 2.252166560980765e-05, |
|
"loss": 0.4758, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.6804058338617627, |
|
"grad_norm": 0.686687707901001, |
|
"learning_rate": 2.1993236102303955e-05, |
|
"loss": 0.5715, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 1.7121116043119848, |
|
"grad_norm": 0.7576509118080139, |
|
"learning_rate": 2.1464806594800255e-05, |
|
"loss": 0.494, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.7438173747622068, |
|
"grad_norm": 0.9644522666931152, |
|
"learning_rate": 2.0936377087296556e-05, |
|
"loss": 0.5243, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 1.7755231452124287, |
|
"grad_norm": 1.0673723220825195, |
|
"learning_rate": 2.0407947579792857e-05, |
|
"loss": 0.5254, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.8072289156626506, |
|
"grad_norm": 0.4408644735813141, |
|
"learning_rate": 1.9879518072289157e-05, |
|
"loss": 0.5063, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 1.8389346861128726, |
|
"grad_norm": 0.6671141386032104, |
|
"learning_rate": 1.9351088564785458e-05, |
|
"loss": 0.4384, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 1.8706404565630945, |
|
"grad_norm": 0.9607345461845398, |
|
"learning_rate": 1.882265905728176e-05, |
|
"loss": 0.6554, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 1.9023462270133165, |
|
"grad_norm": 1.385923981666565, |
|
"learning_rate": 1.8294229549778062e-05, |
|
"loss": 0.499, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.9340519974635384, |
|
"grad_norm": 0.6698076725006104, |
|
"learning_rate": 1.776580004227436e-05, |
|
"loss": 0.4753, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 1.9657577679137603, |
|
"grad_norm": 0.7723073363304138, |
|
"learning_rate": 1.7237370534770663e-05, |
|
"loss": 0.462, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 1.9974635383639823, |
|
"grad_norm": 0.5628081560134888, |
|
"learning_rate": 1.6708941027266964e-05, |
|
"loss": 0.5225, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 2.029169308814204, |
|
"grad_norm": 1.0150758028030396, |
|
"learning_rate": 1.6180511519763264e-05, |
|
"loss": 0.3888, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.060875079264426, |
|
"grad_norm": 0.6272999048233032, |
|
"learning_rate": 1.5652082012259565e-05, |
|
"loss": 0.4211, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 2.092580849714648, |
|
"grad_norm": 0.672383725643158, |
|
"learning_rate": 1.5123652504755867e-05, |
|
"loss": 0.4215, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.12428662016487, |
|
"grad_norm": 1.0365363359451294, |
|
"learning_rate": 1.4595222997252168e-05, |
|
"loss": 0.4371, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 2.1559923906150917, |
|
"grad_norm": 1.0515477657318115, |
|
"learning_rate": 1.4066793489748466e-05, |
|
"loss": 0.544, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 2.187698161065314, |
|
"grad_norm": 1.1331499814987183, |
|
"learning_rate": 1.353836398224477e-05, |
|
"loss": 0.5532, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 2.2194039315155356, |
|
"grad_norm": 0.7524999976158142, |
|
"learning_rate": 1.3009934474741071e-05, |
|
"loss": 0.4989, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.251109701965758, |
|
"grad_norm": 1.2541077136993408, |
|
"learning_rate": 1.2481504967237371e-05, |
|
"loss": 0.4576, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 2.2828154724159795, |
|
"grad_norm": 0.7316697835922241, |
|
"learning_rate": 1.1953075459733672e-05, |
|
"loss": 0.4228, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 2.314521242866202, |
|
"grad_norm": 0.680396318435669, |
|
"learning_rate": 1.1424645952229973e-05, |
|
"loss": 0.5042, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 2.3462270133164234, |
|
"grad_norm": 0.5581865310668945, |
|
"learning_rate": 1.0896216444726275e-05, |
|
"loss": 0.4988, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 2.3779327837666457, |
|
"grad_norm": 0.7080293893814087, |
|
"learning_rate": 1.0367786937222575e-05, |
|
"loss": 0.4647, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 2.4096385542168672, |
|
"grad_norm": 1.065440058708191, |
|
"learning_rate": 9.839357429718876e-06, |
|
"loss": 0.4572, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 2.4413443246670896, |
|
"grad_norm": 0.6128831505775452, |
|
"learning_rate": 9.310927922215176e-06, |
|
"loss": 0.4352, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 2.473050095117311, |
|
"grad_norm": 0.6767387390136719, |
|
"learning_rate": 8.782498414711479e-06, |
|
"loss": 0.4352, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 2.5047558655675335, |
|
"grad_norm": 0.56428062915802, |
|
"learning_rate": 8.254068907207779e-06, |
|
"loss": 0.5202, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 2.536461636017755, |
|
"grad_norm": 0.9430966973304749, |
|
"learning_rate": 7.72563939970408e-06, |
|
"loss": 0.5099, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 2.5681674064679774, |
|
"grad_norm": 1.0679271221160889, |
|
"learning_rate": 7.197209892200381e-06, |
|
"loss": 0.5605, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 2.599873176918199, |
|
"grad_norm": 0.5181096792221069, |
|
"learning_rate": 6.6687803846966825e-06, |
|
"loss": 0.4438, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 2.6315789473684212, |
|
"grad_norm": 0.6481731534004211, |
|
"learning_rate": 6.140350877192982e-06, |
|
"loss": 0.5255, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 2.6632847178186427, |
|
"grad_norm": 0.6905925869941711, |
|
"learning_rate": 5.6119213696892836e-06, |
|
"loss": 0.5113, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 2.694990488268865, |
|
"grad_norm": 0.7039913535118103, |
|
"learning_rate": 5.083491862185585e-06, |
|
"loss": 0.5378, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 2.7266962587190866, |
|
"grad_norm": 0.6583430171012878, |
|
"learning_rate": 4.5550623546818855e-06, |
|
"loss": 0.4796, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 2.758402029169309, |
|
"grad_norm": 1.0837993621826172, |
|
"learning_rate": 4.026632847178187e-06, |
|
"loss": 0.4655, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 2.7901077996195305, |
|
"grad_norm": 0.7365415692329407, |
|
"learning_rate": 3.498203339674488e-06, |
|
"loss": 0.3938, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 2.821813570069753, |
|
"grad_norm": 0.5105493664741516, |
|
"learning_rate": 2.9697738321707884e-06, |
|
"loss": 0.4043, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 2.853519340519975, |
|
"grad_norm": 0.9684469103813171, |
|
"learning_rate": 2.4413443246670898e-06, |
|
"loss": 0.4907, |
|
"step": 4500 |
|
} |
|
], |
|
"logging_steps": 50, |
|
"max_steps": 4731, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4702733991936000.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|