|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.996178343949045, |
|
"eval_steps": 500, |
|
"global_step": 1470, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.02038216560509554, |
|
"grad_norm": 7.3640559222825805, |
|
"learning_rate": 6.756756756756758e-07, |
|
"loss": 0.5218, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.04076433121019108, |
|
"grad_norm": 1.9597929093946138, |
|
"learning_rate": 1.3513513513513515e-06, |
|
"loss": 0.4442, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.061146496815286625, |
|
"grad_norm": 1.366556978476813, |
|
"learning_rate": 2.0270270270270273e-06, |
|
"loss": 0.3968, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.08152866242038216, |
|
"grad_norm": 1.5343177845993703, |
|
"learning_rate": 2.702702702702703e-06, |
|
"loss": 0.3717, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.10191082802547771, |
|
"grad_norm": 1.5235479557468576, |
|
"learning_rate": 3.3783783783783788e-06, |
|
"loss": 0.3573, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.12229299363057325, |
|
"grad_norm": 1.5210742465140967, |
|
"learning_rate": 4.0540540540540545e-06, |
|
"loss": 0.3447, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.14267515923566879, |
|
"grad_norm": 1.4560884271057324, |
|
"learning_rate": 4.72972972972973e-06, |
|
"loss": 0.336, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.16305732484076432, |
|
"grad_norm": 1.3653484275359835, |
|
"learning_rate": 4.999794894487084e-06, |
|
"loss": 0.3285, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.18343949044585986, |
|
"grad_norm": 1.5231339353031599, |
|
"learning_rate": 4.998541607324267e-06, |
|
"loss": 0.3226, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.20382165605095542, |
|
"grad_norm": 1.34927340034923, |
|
"learning_rate": 4.996149614425655e-06, |
|
"loss": 0.3234, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.22420382165605096, |
|
"grad_norm": 1.3383365926789343, |
|
"learning_rate": 4.992620127143546e-06, |
|
"loss": 0.3159, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.2445859872611465, |
|
"grad_norm": 1.279507514764783, |
|
"learning_rate": 4.987954932879781e-06, |
|
"loss": 0.3155, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.26496815286624203, |
|
"grad_norm": 1.2523662594214922, |
|
"learning_rate": 4.982156394180578e-06, |
|
"loss": 0.3089, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.28535031847133757, |
|
"grad_norm": 1.5508610615211977, |
|
"learning_rate": 4.975227447540084e-06, |
|
"loss": 0.3081, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.3057324840764331, |
|
"grad_norm": 1.5577467324922831, |
|
"learning_rate": 4.967171601913286e-06, |
|
"loss": 0.2999, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.32611464968152865, |
|
"grad_norm": 1.4024580607747494, |
|
"learning_rate": 4.957992936939001e-06, |
|
"loss": 0.3014, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.3464968152866242, |
|
"grad_norm": 1.397084384608207, |
|
"learning_rate": 4.947696100873868e-06, |
|
"loss": 0.3003, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.3668789808917197, |
|
"grad_norm": 1.2608713430895078, |
|
"learning_rate": 4.936286308238376e-06, |
|
"loss": 0.2962, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.3872611464968153, |
|
"grad_norm": 1.4602055977294606, |
|
"learning_rate": 4.923769337176137e-06, |
|
"loss": 0.2949, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.40764331210191085, |
|
"grad_norm": 1.3574725912947458, |
|
"learning_rate": 4.910151526527711e-06, |
|
"loss": 0.2913, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.4280254777070064, |
|
"grad_norm": 1.3255996037154008, |
|
"learning_rate": 4.895439772620496e-06, |
|
"loss": 0.2909, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.4484076433121019, |
|
"grad_norm": 1.209522300925548, |
|
"learning_rate": 4.879641525776294e-06, |
|
"loss": 0.2874, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.46878980891719746, |
|
"grad_norm": 1.3036670455198982, |
|
"learning_rate": 4.8627647865383185e-06, |
|
"loss": 0.2887, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.489171974522293, |
|
"grad_norm": 1.4420527770171871, |
|
"learning_rate": 4.844818101619563e-06, |
|
"loss": 0.2893, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.5095541401273885, |
|
"grad_norm": 1.5008539741569011, |
|
"learning_rate": 4.825810559574577e-06, |
|
"loss": 0.2831, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.5299363057324841, |
|
"grad_norm": 1.1363685690254681, |
|
"learning_rate": 4.805751786196844e-06, |
|
"loss": 0.2781, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.5503184713375796, |
|
"grad_norm": 1.3886569509043332, |
|
"learning_rate": 4.784651939644088e-06, |
|
"loss": 0.2812, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.5707006369426751, |
|
"grad_norm": 1.208611174951388, |
|
"learning_rate": 4.762521705293985e-06, |
|
"loss": 0.2839, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5910828025477707, |
|
"grad_norm": 1.163584249987649, |
|
"learning_rate": 4.739372290332876e-06, |
|
"loss": 0.2772, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.6114649681528662, |
|
"grad_norm": 1.2694569325884293, |
|
"learning_rate": 4.715215418080228e-06, |
|
"loss": 0.2774, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.6318471337579618, |
|
"grad_norm": 1.1438695314175629, |
|
"learning_rate": 4.690063322051714e-06, |
|
"loss": 0.279, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.6522292993630573, |
|
"grad_norm": 1.1455759975487239, |
|
"learning_rate": 4.6639287397639175e-06, |
|
"loss": 0.2762, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.6726114649681528, |
|
"grad_norm": 1.44840804645582, |
|
"learning_rate": 4.636824906283807e-06, |
|
"loss": 0.2752, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.6929936305732484, |
|
"grad_norm": 1.1318324006100882, |
|
"learning_rate": 4.608765547526235e-06, |
|
"loss": 0.2696, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.7133757961783439, |
|
"grad_norm": 1.4105199522926484, |
|
"learning_rate": 4.5797648733028665e-06, |
|
"loss": 0.2714, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.7337579617834394, |
|
"grad_norm": 1.1828019600051816, |
|
"learning_rate": 4.54983757012605e-06, |
|
"loss": 0.2675, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.7541401273885351, |
|
"grad_norm": 1.1209568717673557, |
|
"learning_rate": 4.518998793771276e-06, |
|
"loss": 0.2698, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.7745222929936306, |
|
"grad_norm": 1.3068840388262846, |
|
"learning_rate": 4.487264161601997e-06, |
|
"loss": 0.2666, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.7949044585987262, |
|
"grad_norm": 1.40475775183392, |
|
"learning_rate": 4.454649744660687e-06, |
|
"loss": 0.2687, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.8152866242038217, |
|
"grad_norm": 1.064147060517822, |
|
"learning_rate": 4.4211720595301474e-06, |
|
"loss": 0.2668, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.8356687898089172, |
|
"grad_norm": 1.273665530555761, |
|
"learning_rate": 4.386848059969186e-06, |
|
"loss": 0.2655, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.8560509554140128, |
|
"grad_norm": 1.2644936064921932, |
|
"learning_rate": 4.351695128326899e-06, |
|
"loss": 0.267, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.8764331210191083, |
|
"grad_norm": 1.1524074004874778, |
|
"learning_rate": 4.315731066739907e-06, |
|
"loss": 0.2621, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.8968152866242038, |
|
"grad_norm": 1.2734388359258864, |
|
"learning_rate": 4.278974088117002e-06, |
|
"loss": 0.2646, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.9171974522292994, |
|
"grad_norm": 1.1535683141057886, |
|
"learning_rate": 4.241442806915776e-06, |
|
"loss": 0.2622, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.9375796178343949, |
|
"grad_norm": 1.1270411566076795, |
|
"learning_rate": 4.203156229715885e-06, |
|
"loss": 0.2647, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.9579617834394905, |
|
"grad_norm": 1.1492915594780593, |
|
"learning_rate": 4.164133745593752e-06, |
|
"loss": 0.2604, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.978343949044586, |
|
"grad_norm": 1.1077787065388747, |
|
"learning_rate": 4.1243951163035515e-06, |
|
"loss": 0.2589, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.9987261146496815, |
|
"grad_norm": 1.04425073531816, |
|
"learning_rate": 4.083960466269468e-06, |
|
"loss": 0.2567, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.9987261146496815, |
|
"eval_loss": 0.032234665006399155, |
|
"eval_runtime": 329.9438, |
|
"eval_samples_per_second": 40.07, |
|
"eval_steps_per_second": 0.627, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.019108280254777, |
|
"grad_norm": 1.4452926570009665, |
|
"learning_rate": 4.042850272394296e-06, |
|
"loss": 0.1917, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.0394904458598726, |
|
"grad_norm": 1.1692376024393614, |
|
"learning_rate": 4.001085353689527e-06, |
|
"loss": 0.1828, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.0598726114649681, |
|
"grad_norm": 1.2077457145616015, |
|
"learning_rate": 3.958686860732198e-06, |
|
"loss": 0.1823, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.0802547770700637, |
|
"grad_norm": 1.1829681497737912, |
|
"learning_rate": 3.915676264953819e-06, |
|
"loss": 0.1824, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.1006369426751592, |
|
"grad_norm": 1.212198809480288, |
|
"learning_rate": 3.8720753477668174e-06, |
|
"loss": 0.1835, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.1210191082802548, |
|
"grad_norm": 1.4514789562338672, |
|
"learning_rate": 3.8279061895340065e-06, |
|
"loss": 0.1838, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.1414012738853503, |
|
"grad_norm": 1.2292597073935565, |
|
"learning_rate": 3.783191158386649e-06, |
|
"loss": 0.1799, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.1617834394904458, |
|
"grad_norm": 1.091816482106913, |
|
"learning_rate": 3.7379528988968004e-06, |
|
"loss": 0.1797, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.1821656050955414, |
|
"grad_norm": 1.1348703066662176, |
|
"learning_rate": 3.6922143206096463e-06, |
|
"loss": 0.1814, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.202547770700637, |
|
"grad_norm": 1.2696934307053864, |
|
"learning_rate": 3.645998586441664e-06, |
|
"loss": 0.1814, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.2229299363057324, |
|
"grad_norm": 1.1409724326559059, |
|
"learning_rate": 3.5993291009504583e-06, |
|
"loss": 0.1828, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.243312101910828, |
|
"grad_norm": 1.2402396526237325, |
|
"learning_rate": 3.5522294984822414e-06, |
|
"loss": 0.1842, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.2636942675159235, |
|
"grad_norm": 1.289864855038617, |
|
"learning_rate": 3.504723631202927e-06, |
|
"loss": 0.1824, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.284076433121019, |
|
"grad_norm": 1.1569779602515864, |
|
"learning_rate": 3.4568355570189293e-06, |
|
"loss": 0.1833, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.3044585987261146, |
|
"grad_norm": 1.1664095681219364, |
|
"learning_rate": 3.4085895273937616e-06, |
|
"loss": 0.1822, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.3248407643312101, |
|
"grad_norm": 1.1062062982676109, |
|
"learning_rate": 3.3600099750666186e-06, |
|
"loss": 0.1801, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.3452229299363057, |
|
"grad_norm": 1.0870703803021455, |
|
"learning_rate": 3.3111215016791615e-06, |
|
"loss": 0.1811, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.3656050955414012, |
|
"grad_norm": 1.070032061400707, |
|
"learning_rate": 3.2619488653167524e-06, |
|
"loss": 0.1816, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.385987261146497, |
|
"grad_norm": 1.3367455282881715, |
|
"learning_rate": 3.2125169679704816e-06, |
|
"loss": 0.1813, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.4063694267515925, |
|
"grad_norm": 1.187247916290702, |
|
"learning_rate": 3.1628508429263055e-06, |
|
"loss": 0.1779, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.426751592356688, |
|
"grad_norm": 1.1091917590396907, |
|
"learning_rate": 3.112975642087696e-06, |
|
"loss": 0.1802, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.4471337579617836, |
|
"grad_norm": 1.123698745710789, |
|
"learning_rate": 3.0629166232382244e-06, |
|
"loss": 0.1814, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.467515923566879, |
|
"grad_norm": 1.1042223426325448, |
|
"learning_rate": 3.012699137250523e-06, |
|
"loss": 0.1824, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.4878980891719746, |
|
"grad_norm": 1.050554441234522, |
|
"learning_rate": 2.962348615248099e-06, |
|
"loss": 0.1796, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.5082802547770702, |
|
"grad_norm": 1.2199041359267095, |
|
"learning_rate": 2.9118905557265194e-06, |
|
"loss": 0.181, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.5286624203821657, |
|
"grad_norm": 1.1808278399972332, |
|
"learning_rate": 2.861350511640466e-06, |
|
"loss": 0.1803, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.5490445859872612, |
|
"grad_norm": 1.0852792148234631, |
|
"learning_rate": 2.81075407746322e-06, |
|
"loss": 0.1798, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.5694267515923568, |
|
"grad_norm": 1.1397500179113436, |
|
"learning_rate": 2.760126876225113e-06, |
|
"loss": 0.1794, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.5898089171974523, |
|
"grad_norm": 1.0868979078693752, |
|
"learning_rate": 2.7094945465375237e-06, |
|
"loss": 0.1784, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.6101910828025479, |
|
"grad_norm": 1.185135837873844, |
|
"learning_rate": 2.6588827296089765e-06, |
|
"loss": 0.1796, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.6305732484076434, |
|
"grad_norm": 1.0667742842947785, |
|
"learning_rate": 2.608317056259933e-06, |
|
"loss": 0.1778, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.650955414012739, |
|
"grad_norm": 1.1263879048027836, |
|
"learning_rate": 2.557823133942836e-06, |
|
"loss": 0.1818, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.6713375796178345, |
|
"grad_norm": 1.164600608354229, |
|
"learning_rate": 2.507426533773994e-06, |
|
"loss": 0.1818, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.69171974522293, |
|
"grad_norm": 1.1434673428111148, |
|
"learning_rate": 2.4571527775838637e-06, |
|
"loss": 0.1785, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.7121019108280255, |
|
"grad_norm": 1.1080982356818843, |
|
"learning_rate": 2.407027324992293e-06, |
|
"loss": 0.1767, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.732484076433121, |
|
"grad_norm": 1.1236879328083964, |
|
"learning_rate": 2.357075560515267e-06, |
|
"loss": 0.1764, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.7528662420382166, |
|
"grad_norm": 1.07775485257901, |
|
"learning_rate": 2.3073227807096903e-06, |
|
"loss": 0.1766, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.7732484076433122, |
|
"grad_norm": 1.0978325229339359, |
|
"learning_rate": 2.2577941813627137e-06, |
|
"loss": 0.1776, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.7936305732484077, |
|
"grad_norm": 1.0266269472342355, |
|
"learning_rate": 2.2085148447320886e-06, |
|
"loss": 0.1763, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.8140127388535032, |
|
"grad_norm": 1.075781931859834, |
|
"learning_rate": 2.1595097268440215e-06, |
|
"loss": 0.1761, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.8343949044585988, |
|
"grad_norm": 1.1486032295285624, |
|
"learning_rate": 2.110803644854949e-06, |
|
"loss": 0.1756, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.8547770700636943, |
|
"grad_norm": 1.0561746481203738, |
|
"learning_rate": 2.062421264483641e-06, |
|
"loss": 0.1754, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.8751592356687898, |
|
"grad_norm": 1.1673017813466813, |
|
"learning_rate": 2.0143870875199952e-06, |
|
"loss": 0.1764, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.8955414012738854, |
|
"grad_norm": 1.1439217602579965, |
|
"learning_rate": 1.9667254394168497e-06, |
|
"loss": 0.1742, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.915923566878981, |
|
"grad_norm": 1.1584206154770065, |
|
"learning_rate": 1.9194604569710933e-06, |
|
"loss": 0.1795, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.9363057324840764, |
|
"grad_norm": 1.0480077245390713, |
|
"learning_rate": 1.8726160761003172e-06, |
|
"loss": 0.1748, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.956687898089172, |
|
"grad_norm": 1.1162273428280736, |
|
"learning_rate": 1.8262160197211954e-06, |
|
"loss": 0.1761, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.9770700636942675, |
|
"grad_norm": 1.1160462930422237, |
|
"learning_rate": 1.780283785735729e-06, |
|
"loss": 0.174, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.997452229299363, |
|
"grad_norm": 1.062423344058906, |
|
"learning_rate": 1.734842635131455e-06, |
|
"loss": 0.1749, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.9994904458598726, |
|
"eval_loss": 0.030678020790219307, |
|
"eval_runtime": 338.9213, |
|
"eval_samples_per_second": 39.009, |
|
"eval_steps_per_second": 0.611, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 2.0178343949044586, |
|
"grad_norm": 1.4150638555592978, |
|
"learning_rate": 1.689915580201614e-06, |
|
"loss": 0.1179, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.038216560509554, |
|
"grad_norm": 1.230931823658668, |
|
"learning_rate": 1.6455253728912765e-06, |
|
"loss": 0.106, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.0585987261146497, |
|
"grad_norm": 1.078514779623916, |
|
"learning_rate": 1.6016944932753123e-06, |
|
"loss": 0.1055, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.078980891719745, |
|
"grad_norm": 1.1594743845163793, |
|
"learning_rate": 1.558445138174033e-06, |
|
"loss": 0.1068, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.0993630573248407, |
|
"grad_norm": 1.1071569450433687, |
|
"learning_rate": 1.5157992099122896e-06, |
|
"loss": 0.1061, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.1197452229299363, |
|
"grad_norm": 1.1427254217014393, |
|
"learning_rate": 1.4737783052277017e-06, |
|
"loss": 0.1054, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.140127388535032, |
|
"grad_norm": 1.1862335656291514, |
|
"learning_rate": 1.432403704333643e-06, |
|
"loss": 0.1056, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.1605095541401274, |
|
"grad_norm": 1.1251247878141808, |
|
"learning_rate": 1.3916963601425293e-06, |
|
"loss": 0.1058, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.180891719745223, |
|
"grad_norm": 1.1084298272819255, |
|
"learning_rate": 1.3516768876548404e-06, |
|
"loss": 0.1068, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.2012738853503184, |
|
"grad_norm": 1.1823405832071576, |
|
"learning_rate": 1.3123655535192873e-06, |
|
"loss": 0.1059, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.221656050955414, |
|
"grad_norm": 1.1309238431972, |
|
"learning_rate": 1.2737822657693763e-06, |
|
"loss": 0.106, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.2420382165605095, |
|
"grad_norm": 1.1830449008249062, |
|
"learning_rate": 1.235946563741594e-06, |
|
"loss": 0.106, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.262420382165605, |
|
"grad_norm": 1.1465025990749025, |
|
"learning_rate": 1.1988776081802975e-06, |
|
"loss": 0.1049, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.2828025477707006, |
|
"grad_norm": 1.183957689341336, |
|
"learning_rate": 1.1625941715343404e-06, |
|
"loss": 0.1054, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.303184713375796, |
|
"grad_norm": 1.1333744410647408, |
|
"learning_rate": 1.1271146284503326e-06, |
|
"loss": 0.1055, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.3235668789808916, |
|
"grad_norm": 1.1879433920175209, |
|
"learning_rate": 1.0924569464673593e-06, |
|
"loss": 0.1055, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.343949044585987, |
|
"grad_norm": 1.146900632019417, |
|
"learning_rate": 1.0586386769178644e-06, |
|
"loss": 0.106, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.3643312101910827, |
|
"grad_norm": 1.132036422681508, |
|
"learning_rate": 1.0256769460393087e-06, |
|
"loss": 0.1056, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.3847133757961783, |
|
"grad_norm": 1.0853627031360338, |
|
"learning_rate": 9.935884463011108e-07, |
|
"loss": 0.1045, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.405095541401274, |
|
"grad_norm": 1.1322288498135122, |
|
"learning_rate": 9.623894279512455e-07, |
|
"loss": 0.1054, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.4254777070063693, |
|
"grad_norm": 1.0894698960326725, |
|
"learning_rate": 9.320956907868051e-07, |
|
"loss": 0.1056, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.445859872611465, |
|
"grad_norm": 1.1474536034439595, |
|
"learning_rate": 9.027225761526653e-07, |
|
"loss": 0.1043, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.4662420382165604, |
|
"grad_norm": 1.1143668115183167, |
|
"learning_rate": 8.742849591723315e-07, |
|
"loss": 0.1065, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.486624203821656, |
|
"grad_norm": 1.1060933833835738, |
|
"learning_rate": 8.467972412148767e-07, |
|
"loss": 0.1049, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.5070063694267515, |
|
"grad_norm": 1.1258549382443122, |
|
"learning_rate": 8.202733426018117e-07, |
|
"loss": 0.1039, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.527388535031847, |
|
"grad_norm": 1.1747748534183018, |
|
"learning_rate": 7.947266955575562e-07, |
|
"loss": 0.1061, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.5477707006369426, |
|
"grad_norm": 1.1215150862371803, |
|
"learning_rate": 7.701702374070962e-07, |
|
"loss": 0.1054, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.568152866242038, |
|
"grad_norm": 1.1567778163976754, |
|
"learning_rate": 7.466164040242694e-07, |
|
"loss": 0.104, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.5885350318471336, |
|
"grad_norm": 1.1338966482025068, |
|
"learning_rate": 7.24077123533992e-07, |
|
"loss": 0.1048, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.608917197452229, |
|
"grad_norm": 1.0699364001483016, |
|
"learning_rate": 7.025638102716238e-07, |
|
"loss": 0.1046, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.6292993630573247, |
|
"grad_norm": 1.085055290267827, |
|
"learning_rate": 6.820873590025216e-07, |
|
"loss": 0.1042, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.6496815286624202, |
|
"grad_norm": 1.085099277831117, |
|
"learning_rate": 6.626581394047174e-07, |
|
"loss": 0.1043, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.6700636942675158, |
|
"grad_norm": 1.1741130632881172, |
|
"learning_rate": 6.442859908175084e-07, |
|
"loss": 0.1053, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.6904458598726113, |
|
"grad_norm": 1.086080275618794, |
|
"learning_rate": 6.269802172586215e-07, |
|
"loss": 0.1055, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.710828025477707, |
|
"grad_norm": 1.1054751783303671, |
|
"learning_rate": 6.107495827124764e-07, |
|
"loss": 0.1041, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.7312101910828024, |
|
"grad_norm": 1.0983120768210253, |
|
"learning_rate": 5.956023066919313e-07, |
|
"loss": 0.1026, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 2.7515923566878984, |
|
"grad_norm": 1.109557569000835, |
|
"learning_rate": 5.815460600757599e-07, |
|
"loss": 0.1037, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.771974522292994, |
|
"grad_norm": 1.1282912667207265, |
|
"learning_rate": 5.685879612239649e-07, |
|
"loss": 0.1057, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 2.7923566878980894, |
|
"grad_norm": 1.0863425429571905, |
|
"learning_rate": 5.567345723729062e-07, |
|
"loss": 0.1047, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 2.812738853503185, |
|
"grad_norm": 1.0546515835835897, |
|
"learning_rate": 5.459918963120477e-07, |
|
"loss": 0.1042, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 2.8331210191082805, |
|
"grad_norm": 1.0748377352085035, |
|
"learning_rate": 5.363653733440328e-07, |
|
"loss": 0.1057, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 2.853503184713376, |
|
"grad_norm": 1.0869969236098675, |
|
"learning_rate": 5.278598785296044e-07, |
|
"loss": 0.105, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.8738853503184716, |
|
"grad_norm": 1.0525214029938958, |
|
"learning_rate": 5.2047971921878e-07, |
|
"loss": 0.1039, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 2.894267515923567, |
|
"grad_norm": 1.0788702761102988, |
|
"learning_rate": 5.142286328695235e-07, |
|
"loss": 0.105, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 2.9146496815286627, |
|
"grad_norm": 1.0984819174990816, |
|
"learning_rate": 5.091097851550239e-07, |
|
"loss": 0.1032, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 2.935031847133758, |
|
"grad_norm": 1.072624641241873, |
|
"learning_rate": 5.051257683605363e-07, |
|
"loss": 0.1036, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 2.9554140127388537, |
|
"grad_norm": 1.116104824847335, |
|
"learning_rate": 5.022786000705963e-07, |
|
"loss": 0.1027, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 2.9757961783439493, |
|
"grad_norm": 1.0814921699434623, |
|
"learning_rate": 5.005697221472769e-07, |
|
"loss": 0.1029, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 2.996178343949045, |
|
"grad_norm": 1.0668548799222997, |
|
"learning_rate": 5e-07, |
|
"loss": 0.1027, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 2.996178343949045, |
|
"eval_loss": 0.03400026634335518, |
|
"eval_runtime": 335.9006, |
|
"eval_samples_per_second": 39.36, |
|
"eval_steps_per_second": 0.616, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 2.996178343949045, |
|
"step": 1470, |
|
"total_flos": 2462095371141120.0, |
|
"train_loss": 0.19492420712295844, |
|
"train_runtime": 47387.4935, |
|
"train_samples_per_second": 15.902, |
|
"train_steps_per_second": 0.031 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 1470, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2462095371141120.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|