|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.1212068666551067, |
|
"eval_steps": 1000, |
|
"global_step": 9000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0034680076296167853, |
|
"grad_norm": 0.7528897523880005, |
|
"learning_rate": 0.00019982, |
|
"loss": 4.9086, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.006936015259233571, |
|
"grad_norm": 0.17007838189601898, |
|
"learning_rate": 0.00019962000000000002, |
|
"loss": 0.1029, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.010404022888850355, |
|
"grad_norm": 0.12162350863218307, |
|
"learning_rate": 0.00019942, |
|
"loss": 0.0687, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.013872030518467141, |
|
"grad_norm": 0.08915918320417404, |
|
"learning_rate": 0.00019922, |
|
"loss": 0.0636, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.017340038148083926, |
|
"grad_norm": 0.11356709897518158, |
|
"learning_rate": 0.00019902, |
|
"loss": 0.0623, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02080804577770071, |
|
"grad_norm": 0.09400122612714767, |
|
"learning_rate": 0.00019882, |
|
"loss": 0.0561, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.024276053407317495, |
|
"grad_norm": 0.1102517619729042, |
|
"learning_rate": 0.00019862000000000002, |
|
"loss": 0.0603, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.027744061036934282, |
|
"grad_norm": 0.09301582723855972, |
|
"learning_rate": 0.00019842000000000001, |
|
"loss": 0.0559, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.031212068666551067, |
|
"grad_norm": 0.08400452882051468, |
|
"learning_rate": 0.00019822, |
|
"loss": 0.0645, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.03468007629616785, |
|
"grad_norm": 0.10444998741149902, |
|
"learning_rate": 0.00019802, |
|
"loss": 0.0644, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.038148083925784636, |
|
"grad_norm": 0.06524047255516052, |
|
"learning_rate": 0.00019782, |
|
"loss": 0.0599, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.04161609155540142, |
|
"grad_norm": 0.0640910267829895, |
|
"learning_rate": 0.00019762, |
|
"loss": 0.0542, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.045084099185018205, |
|
"grad_norm": 0.07332012802362442, |
|
"learning_rate": 0.00019742000000000002, |
|
"loss": 0.0567, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.04855210681463499, |
|
"grad_norm": 4.02905797958374, |
|
"learning_rate": 0.00019722, |
|
"loss": 0.0592, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.05202011444425178, |
|
"grad_norm": 0.09805350750684738, |
|
"learning_rate": 0.00019702, |
|
"loss": 0.0657, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.055488122073868565, |
|
"grad_norm": 0.056836508214473724, |
|
"learning_rate": 0.00019682, |
|
"loss": 0.0523, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.05895612970348535, |
|
"grad_norm": 0.08670804649591446, |
|
"learning_rate": 0.00019662, |
|
"loss": 0.0585, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.062424137333102134, |
|
"grad_norm": 0.08405158668756485, |
|
"learning_rate": 0.00019642, |
|
"loss": 0.0537, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.06589214496271892, |
|
"grad_norm": 0.0825357437133789, |
|
"learning_rate": 0.00019622000000000002, |
|
"loss": 0.055, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.0693601525923357, |
|
"grad_norm": 0.06040720269083977, |
|
"learning_rate": 0.00019602, |
|
"loss": 0.0554, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.07282816022195249, |
|
"grad_norm": 0.08198798447847366, |
|
"learning_rate": 0.00019582, |
|
"loss": 0.0599, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.07629616785156927, |
|
"grad_norm": 0.05527138710021973, |
|
"learning_rate": 0.00019562, |
|
"loss": 0.0533, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.07976417548118606, |
|
"grad_norm": 0.07315631955862045, |
|
"learning_rate": 0.00019542, |
|
"loss": 0.0528, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.08323218311080284, |
|
"grad_norm": 0.08420062810182571, |
|
"learning_rate": 0.00019522, |
|
"loss": 0.0518, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.08670019074041962, |
|
"grad_norm": 0.0529639795422554, |
|
"learning_rate": 0.00019502, |
|
"loss": 0.0568, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.09016819837003641, |
|
"grad_norm": 0.09339221566915512, |
|
"learning_rate": 0.00019482, |
|
"loss": 0.0557, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.0936362059996532, |
|
"grad_norm": 0.06453025341033936, |
|
"learning_rate": 0.00019462, |
|
"loss": 0.062, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.09710421362926998, |
|
"grad_norm": 0.07090363651514053, |
|
"learning_rate": 0.00019442, |
|
"loss": 0.0609, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.10057222125888678, |
|
"grad_norm": 0.1122497022151947, |
|
"learning_rate": 0.00019422, |
|
"loss": 0.0541, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.10404022888850356, |
|
"grad_norm": 0.09782398492097855, |
|
"learning_rate": 0.00019402, |
|
"loss": 0.0553, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.10750823651812035, |
|
"grad_norm": 0.06216060370206833, |
|
"learning_rate": 0.00019382, |
|
"loss": 0.0483, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.11097624414773713, |
|
"grad_norm": 0.08817891031503677, |
|
"learning_rate": 0.00019362, |
|
"loss": 0.058, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.11444425177735391, |
|
"grad_norm": 0.07251620292663574, |
|
"learning_rate": 0.00019342, |
|
"loss": 0.0477, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.1179122594069707, |
|
"grad_norm": 0.059537626802921295, |
|
"learning_rate": 0.00019322, |
|
"loss": 0.0559, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.12138026703658748, |
|
"grad_norm": 0.06329932063817978, |
|
"learning_rate": 0.00019302, |
|
"loss": 0.0557, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.12484827466620427, |
|
"grad_norm": 0.06601905822753906, |
|
"learning_rate": 0.00019282000000000001, |
|
"loss": 0.0575, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.12831628229582104, |
|
"grad_norm": 0.0639985054731369, |
|
"learning_rate": 0.00019262, |
|
"loss": 0.0519, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.13178428992543784, |
|
"grad_norm": 0.08187698572874069, |
|
"learning_rate": 0.00019242, |
|
"loss": 0.0545, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.13525229755505463, |
|
"grad_norm": 0.06790990382432938, |
|
"learning_rate": 0.00019222, |
|
"loss": 0.0474, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.1387203051846714, |
|
"grad_norm": 0.04906002804636955, |
|
"learning_rate": 0.00019202, |
|
"loss": 0.0579, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.1421883128142882, |
|
"grad_norm": 0.06125594303011894, |
|
"learning_rate": 0.00019182, |
|
"loss": 0.0421, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.14565632044390497, |
|
"grad_norm": 0.06252908706665039, |
|
"learning_rate": 0.00019162, |
|
"loss": 0.0577, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.14912432807352177, |
|
"grad_norm": 0.09376012533903122, |
|
"learning_rate": 0.00019142, |
|
"loss": 0.0546, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.15259233570313854, |
|
"grad_norm": 0.05025137588381767, |
|
"learning_rate": 0.00019122, |
|
"loss": 0.0505, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.15606034333275534, |
|
"grad_norm": 0.07034559547901154, |
|
"learning_rate": 0.00019102, |
|
"loss": 0.0495, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.1595283509623721, |
|
"grad_norm": 0.07475865632295609, |
|
"learning_rate": 0.00019082, |
|
"loss": 0.0533, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.1629963585919889, |
|
"grad_norm": 0.07459491491317749, |
|
"learning_rate": 0.00019062, |
|
"loss": 0.0551, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.16646436622160568, |
|
"grad_norm": 0.052578963339328766, |
|
"learning_rate": 0.00019042, |
|
"loss": 0.0564, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.16993237385122248, |
|
"grad_norm": 0.05785336345434189, |
|
"learning_rate": 0.00019022, |
|
"loss": 0.0549, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.17340038148083925, |
|
"grad_norm": 0.06621215492486954, |
|
"learning_rate": 0.00019002, |
|
"loss": 0.0528, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.17686838911045605, |
|
"grad_norm": 0.07456778734922409, |
|
"learning_rate": 0.00018982000000000002, |
|
"loss": 0.0496, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.18033639674007282, |
|
"grad_norm": 0.06371001899242401, |
|
"learning_rate": 0.00018962000000000002, |
|
"loss": 0.0509, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.18380440436968962, |
|
"grad_norm": 0.052791863679885864, |
|
"learning_rate": 0.00018942, |
|
"loss": 0.0504, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.1872724119993064, |
|
"grad_norm": 0.06491260975599289, |
|
"learning_rate": 0.00018922, |
|
"loss": 0.0527, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.19074041962892319, |
|
"grad_norm": 0.07835149765014648, |
|
"learning_rate": 0.00018902000000000003, |
|
"loss": 0.0564, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.19420842725853996, |
|
"grad_norm": 0.05977100506424904, |
|
"learning_rate": 0.00018882000000000003, |
|
"loss": 0.0477, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.19767643488815675, |
|
"grad_norm": 0.07109620422124863, |
|
"learning_rate": 0.00018862000000000002, |
|
"loss": 0.0537, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.20114444251777355, |
|
"grad_norm": 0.10347943007946014, |
|
"learning_rate": 0.00018842000000000002, |
|
"loss": 0.0542, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.20461245014739032, |
|
"grad_norm": 0.06503281742334366, |
|
"learning_rate": 0.00018822, |
|
"loss": 0.0484, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.20808045777700712, |
|
"grad_norm": 0.07903438806533813, |
|
"learning_rate": 0.00018802, |
|
"loss": 0.0597, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.2115484654066239, |
|
"grad_norm": 0.0713895708322525, |
|
"learning_rate": 0.00018782000000000003, |
|
"loss": 0.0498, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.2150164730362407, |
|
"grad_norm": 0.061313629150390625, |
|
"learning_rate": 0.00018762000000000002, |
|
"loss": 0.053, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.21848448066585746, |
|
"grad_norm": 0.07045572996139526, |
|
"learning_rate": 0.00018742000000000002, |
|
"loss": 0.0541, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.22195248829547426, |
|
"grad_norm": 0.07118247449398041, |
|
"learning_rate": 0.00018722, |
|
"loss": 0.0586, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.22542049592509103, |
|
"grad_norm": 0.05364071577787399, |
|
"learning_rate": 0.00018702, |
|
"loss": 0.0497, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.22888850355470783, |
|
"grad_norm": 0.07208040356636047, |
|
"learning_rate": 0.00018682000000000003, |
|
"loss": 0.0559, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.2323565111843246, |
|
"grad_norm": 0.07200731337070465, |
|
"learning_rate": 0.00018662000000000003, |
|
"loss": 0.048, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.2358245188139414, |
|
"grad_norm": 0.05730220302939415, |
|
"learning_rate": 0.00018642000000000002, |
|
"loss": 0.0495, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.23929252644355817, |
|
"grad_norm": 0.06378819793462753, |
|
"learning_rate": 0.00018622000000000002, |
|
"loss": 0.0552, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.24276053407317497, |
|
"grad_norm": 0.05866115912795067, |
|
"learning_rate": 0.00018602, |
|
"loss": 0.0541, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.24622854170279174, |
|
"grad_norm": 0.07053161412477493, |
|
"learning_rate": 0.00018582, |
|
"loss": 0.0533, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.24969654933240854, |
|
"grad_norm": 0.07450433820486069, |
|
"learning_rate": 0.00018562000000000003, |
|
"loss": 0.0527, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.25316455696202533, |
|
"grad_norm": 0.07172481715679169, |
|
"learning_rate": 0.00018542000000000002, |
|
"loss": 0.0583, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.2566325645916421, |
|
"grad_norm": 0.0536239892244339, |
|
"learning_rate": 0.00018522000000000002, |
|
"loss": 0.0486, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.2601005722212589, |
|
"grad_norm": 0.06321065127849579, |
|
"learning_rate": 0.00018502000000000001, |
|
"loss": 0.0412, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.2635685798508757, |
|
"grad_norm": 0.056946441531181335, |
|
"learning_rate": 0.00018482, |
|
"loss": 0.051, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.26703658748049247, |
|
"grad_norm": 0.05819573253393173, |
|
"learning_rate": 0.00018462, |
|
"loss": 0.0517, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.27050459511010927, |
|
"grad_norm": 0.05857665091753006, |
|
"learning_rate": 0.00018442000000000003, |
|
"loss": 0.0529, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.273972602739726, |
|
"grad_norm": 0.06014329940080643, |
|
"learning_rate": 0.00018422000000000002, |
|
"loss": 0.0544, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.2774406103693428, |
|
"grad_norm": 0.09187959134578705, |
|
"learning_rate": 0.00018402000000000002, |
|
"loss": 0.0439, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.2809086179989596, |
|
"grad_norm": 0.056131429970264435, |
|
"learning_rate": 0.00018382, |
|
"loss": 0.0389, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.2843766256285764, |
|
"grad_norm": 0.04884343966841698, |
|
"learning_rate": 0.00018362, |
|
"loss": 0.0475, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.28784463325819315, |
|
"grad_norm": 0.09032488614320755, |
|
"learning_rate": 0.00018342, |
|
"loss": 0.056, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.29131264088780995, |
|
"grad_norm": 0.09926522523164749, |
|
"learning_rate": 0.00018322000000000002, |
|
"loss": 0.0515, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.29478064851742675, |
|
"grad_norm": 0.04553750529885292, |
|
"learning_rate": 0.00018302000000000002, |
|
"loss": 0.0505, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.29824865614704354, |
|
"grad_norm": 0.058485984802246094, |
|
"learning_rate": 0.00018282000000000001, |
|
"loss": 0.0453, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.3017166637766603, |
|
"grad_norm": 0.052825070917606354, |
|
"learning_rate": 0.00018262, |
|
"loss": 0.053, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.3051846714062771, |
|
"grad_norm": 0.07626510411500931, |
|
"learning_rate": 0.00018242, |
|
"loss": 0.0474, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.3086526790358939, |
|
"grad_norm": 0.07244163751602173, |
|
"learning_rate": 0.00018222, |
|
"loss": 0.0456, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.3121206866655107, |
|
"grad_norm": 0.09651289880275726, |
|
"learning_rate": 0.00018202000000000002, |
|
"loss": 0.0499, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.3155886942951274, |
|
"grad_norm": 0.0737752839922905, |
|
"learning_rate": 0.00018182000000000002, |
|
"loss": 0.0517, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.3190567019247442, |
|
"grad_norm": 0.06631263345479965, |
|
"learning_rate": 0.00018162, |
|
"loss": 0.0499, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.322524709554361, |
|
"grad_norm": 0.10696552693843842, |
|
"learning_rate": 0.00018142, |
|
"loss": 0.0524, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.3259927171839778, |
|
"grad_norm": 0.06753025203943253, |
|
"learning_rate": 0.00018122, |
|
"loss": 0.0458, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.32946072481359456, |
|
"grad_norm": 0.05970798432826996, |
|
"learning_rate": 0.00018102000000000003, |
|
"loss": 0.0471, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.33292873244321136, |
|
"grad_norm": 0.05324438214302063, |
|
"learning_rate": 0.00018082000000000002, |
|
"loss": 0.0468, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.33639674007282816, |
|
"grad_norm": 0.0749637559056282, |
|
"learning_rate": 0.00018062000000000002, |
|
"loss": 0.0507, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.33986474770244496, |
|
"grad_norm": 0.07317759841680527, |
|
"learning_rate": 0.00018042, |
|
"loss": 0.0482, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.34333275533206176, |
|
"grad_norm": 0.11602938175201416, |
|
"learning_rate": 0.00018022, |
|
"loss": 0.0558, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.3468007629616785, |
|
"grad_norm": 0.07047244161367416, |
|
"learning_rate": 0.00018002, |
|
"loss": 0.0547, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.3468007629616785, |
|
"eval_loss": 0.062305010855197906, |
|
"eval_runtime": 714.4647, |
|
"eval_samples_per_second": 12.852, |
|
"eval_steps_per_second": 1.607, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.3502687705912953, |
|
"grad_norm": 0.07715447247028351, |
|
"learning_rate": 0.00017982000000000002, |
|
"loss": 0.0445, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.3537367782209121, |
|
"grad_norm": 0.08189492672681808, |
|
"learning_rate": 0.00017962000000000002, |
|
"loss": 0.0529, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.3572047858505289, |
|
"grad_norm": 0.06011577695608139, |
|
"learning_rate": 0.00017942, |
|
"loss": 0.0469, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.36067279348014564, |
|
"grad_norm": 0.06397314369678497, |
|
"learning_rate": 0.00017922, |
|
"loss": 0.0542, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.36414080110976244, |
|
"grad_norm": 0.06121763586997986, |
|
"learning_rate": 0.00017902, |
|
"loss": 0.0474, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.36760880873937923, |
|
"grad_norm": 0.0765228345990181, |
|
"learning_rate": 0.00017882, |
|
"loss": 0.0476, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.37107681636899603, |
|
"grad_norm": 0.0813635066151619, |
|
"learning_rate": 0.00017862000000000002, |
|
"loss": 0.0488, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.3745448239986128, |
|
"grad_norm": 0.06827688962221146, |
|
"learning_rate": 0.00017842000000000002, |
|
"loss": 0.0431, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.3780128316282296, |
|
"grad_norm": 0.06176091730594635, |
|
"learning_rate": 0.00017822, |
|
"loss": 0.0513, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.38148083925784637, |
|
"grad_norm": 0.07062922418117523, |
|
"learning_rate": 0.00017802, |
|
"loss": 0.0499, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.38494884688746317, |
|
"grad_norm": 0.059431031346321106, |
|
"learning_rate": 0.00017782, |
|
"loss": 0.0556, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.3884168545170799, |
|
"grad_norm": 0.06391894072294235, |
|
"learning_rate": 0.00017762, |
|
"loss": 0.0535, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.3918848621466967, |
|
"grad_norm": 0.08487355709075928, |
|
"learning_rate": 0.00017742000000000002, |
|
"loss": 0.0509, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.3953528697763135, |
|
"grad_norm": 0.06291911005973816, |
|
"learning_rate": 0.00017722000000000001, |
|
"loss": 0.0462, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.3988208774059303, |
|
"grad_norm": 0.06936580687761307, |
|
"learning_rate": 0.00017702, |
|
"loss": 0.0465, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.4022888850355471, |
|
"grad_norm": 0.06751543283462524, |
|
"learning_rate": 0.00017682, |
|
"loss": 0.0553, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.40575689266516385, |
|
"grad_norm": 0.08026771247386932, |
|
"learning_rate": 0.00017662, |
|
"loss": 0.0503, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.40922490029478065, |
|
"grad_norm": 0.05316636711359024, |
|
"learning_rate": 0.00017642, |
|
"loss": 0.0426, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.41269290792439745, |
|
"grad_norm": 0.15491995215415955, |
|
"learning_rate": 0.00017622000000000002, |
|
"loss": 0.0481, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.41616091555401424, |
|
"grad_norm": 0.059617578983306885, |
|
"learning_rate": 0.00017602, |
|
"loss": 0.0443, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.419628923183631, |
|
"grad_norm": 0.08931437879800797, |
|
"learning_rate": 0.00017582, |
|
"loss": 0.0462, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.4230969308132478, |
|
"grad_norm": 1.4069609642028809, |
|
"learning_rate": 0.00017562, |
|
"loss": 0.0577, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.4265649384428646, |
|
"grad_norm": 7.056313514709473, |
|
"learning_rate": 0.00017542, |
|
"loss": 0.3501, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.4300329460724814, |
|
"grad_norm": 0.8197808265686035, |
|
"learning_rate": 0.00017522000000000002, |
|
"loss": 0.1137, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.4335009537020981, |
|
"grad_norm": 0.4104197025299072, |
|
"learning_rate": 0.00017502000000000001, |
|
"loss": 0.1134, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.4369689613317149, |
|
"grad_norm": 1.3801881074905396, |
|
"learning_rate": 0.00017482, |
|
"loss": 0.0869, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.4404369689613317, |
|
"grad_norm": 3.6437034606933594, |
|
"learning_rate": 0.00017462, |
|
"loss": 0.0544, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.4439049765909485, |
|
"grad_norm": 1.1371792554855347, |
|
"learning_rate": 0.00017442, |
|
"loss": 0.0929, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.44737298422056526, |
|
"grad_norm": 2.9385204315185547, |
|
"learning_rate": 0.00017422, |
|
"loss": 0.1166, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.45084099185018206, |
|
"grad_norm": 0.8243001699447632, |
|
"learning_rate": 0.00017402000000000002, |
|
"loss": 0.0907, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.45430899947979886, |
|
"grad_norm": 1.3375756740570068, |
|
"learning_rate": 0.00017382, |
|
"loss": 0.0641, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.45777700710941566, |
|
"grad_norm": 0.7172356843948364, |
|
"learning_rate": 0.00017362, |
|
"loss": 0.0575, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.4612450147390324, |
|
"grad_norm": 0.18255668878555298, |
|
"learning_rate": 0.00017342, |
|
"loss": 0.0628, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.4647130223686492, |
|
"grad_norm": 0.11856569349765778, |
|
"learning_rate": 0.00017322, |
|
"loss": 0.1111, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.468181029998266, |
|
"grad_norm": 0.16621063649654388, |
|
"learning_rate": 0.00017302, |
|
"loss": 0.0446, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.4716490376278828, |
|
"grad_norm": 0.10158076882362366, |
|
"learning_rate": 0.00017282000000000002, |
|
"loss": 0.0532, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.4751170452574996, |
|
"grad_norm": 0.08783379942178726, |
|
"learning_rate": 0.00017262, |
|
"loss": 0.0491, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.47858505288711634, |
|
"grad_norm": 0.08198387920856476, |
|
"learning_rate": 0.00017242, |
|
"loss": 0.0485, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.48205306051673313, |
|
"grad_norm": 0.09546195715665817, |
|
"learning_rate": 0.00017222, |
|
"loss": 0.0486, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.48552106814634993, |
|
"grad_norm": 0.13258413970470428, |
|
"learning_rate": 0.00017202, |
|
"loss": 0.0544, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.48898907577596673, |
|
"grad_norm": 0.09508573263883591, |
|
"learning_rate": 0.00017182, |
|
"loss": 0.0486, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.4924570834055835, |
|
"grad_norm": 0.07258226722478867, |
|
"learning_rate": 0.00017162000000000001, |
|
"loss": 0.0448, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.49592509103520027, |
|
"grad_norm": 0.05399150773882866, |
|
"learning_rate": 0.00017142, |
|
"loss": 0.0436, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.49939309866481707, |
|
"grad_norm": 0.06922592222690582, |
|
"learning_rate": 0.00017122, |
|
"loss": 0.0451, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.5028611062944338, |
|
"grad_norm": 0.05059856176376343, |
|
"learning_rate": 0.00017102, |
|
"loss": 0.0413, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.5063291139240507, |
|
"grad_norm": 0.06339547783136368, |
|
"learning_rate": 0.00017082, |
|
"loss": 0.0495, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.5097971215536674, |
|
"grad_norm": 0.0521874763071537, |
|
"learning_rate": 0.00017062, |
|
"loss": 0.0489, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.5132651291832842, |
|
"grad_norm": 0.15334908664226532, |
|
"learning_rate": 0.00017042, |
|
"loss": 0.04, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.516733136812901, |
|
"grad_norm": 0.07494404166936874, |
|
"learning_rate": 0.00017022, |
|
"loss": 0.0537, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.5202011444425177, |
|
"grad_norm": 0.052238237112760544, |
|
"learning_rate": 0.00017002, |
|
"loss": 0.0492, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.5236691520721346, |
|
"grad_norm": 0.06761351227760315, |
|
"learning_rate": 0.00016982, |
|
"loss": 0.051, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.5271371597017513, |
|
"grad_norm": 0.07580805569887161, |
|
"learning_rate": 0.00016962, |
|
"loss": 0.0498, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.5306051673313681, |
|
"grad_norm": 0.07397795468568802, |
|
"learning_rate": 0.00016942000000000001, |
|
"loss": 0.0471, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.5340731749609849, |
|
"grad_norm": 0.04779529199004173, |
|
"learning_rate": 0.00016922, |
|
"loss": 0.0465, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.5375411825906017, |
|
"grad_norm": 0.08508727699518204, |
|
"learning_rate": 0.00016902, |
|
"loss": 0.046, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.5410091902202185, |
|
"grad_norm": 0.06656762957572937, |
|
"learning_rate": 0.00016882, |
|
"loss": 0.0433, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.5444771978498353, |
|
"grad_norm": 0.05622195079922676, |
|
"learning_rate": 0.00016862, |
|
"loss": 0.0455, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.547945205479452, |
|
"grad_norm": 0.06422954052686691, |
|
"learning_rate": 0.00016842, |
|
"loss": 0.0428, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.5514132131090689, |
|
"grad_norm": 0.08891351521015167, |
|
"learning_rate": 0.00016822, |
|
"loss": 0.0495, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.5548812207386856, |
|
"grad_norm": 0.08472294360399246, |
|
"learning_rate": 0.00016802, |
|
"loss": 0.0525, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.5583492283683024, |
|
"grad_norm": 0.08518495410680771, |
|
"learning_rate": 0.00016782, |
|
"loss": 0.0518, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.5618172359979192, |
|
"grad_norm": 0.1859533041715622, |
|
"learning_rate": 0.00016762, |
|
"loss": 0.0478, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.565285243627536, |
|
"grad_norm": 0.05245356634259224, |
|
"learning_rate": 0.00016742, |
|
"loss": 0.0418, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.5687532512571528, |
|
"grad_norm": 0.05982668697834015, |
|
"learning_rate": 0.00016722, |
|
"loss": 0.0458, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.5722212588867696, |
|
"grad_norm": 0.0750059187412262, |
|
"learning_rate": 0.00016702, |
|
"loss": 0.0489, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.5756892665163863, |
|
"grad_norm": 0.061683133244514465, |
|
"learning_rate": 0.00016682, |
|
"loss": 0.0461, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.5791572741460032, |
|
"grad_norm": 0.06833604723215103, |
|
"learning_rate": 0.00016662, |
|
"loss": 0.0466, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.5826252817756199, |
|
"grad_norm": 0.05507722124457359, |
|
"learning_rate": 0.00016642, |
|
"loss": 0.0416, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.5860932894052366, |
|
"grad_norm": 0.09453442692756653, |
|
"learning_rate": 0.00016622, |
|
"loss": 0.0418, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.5895612970348535, |
|
"grad_norm": 0.0574457123875618, |
|
"learning_rate": 0.00016601999999999999, |
|
"loss": 0.0495, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.5930293046644702, |
|
"grad_norm": 0.0829281136393547, |
|
"learning_rate": 0.00016582, |
|
"loss": 0.0435, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.5964973122940871, |
|
"grad_norm": 0.0569952018558979, |
|
"learning_rate": 0.00016562, |
|
"loss": 0.0476, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.5999653199237038, |
|
"grad_norm": 0.09291055798530579, |
|
"learning_rate": 0.00016542, |
|
"loss": 0.0446, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.6034333275533206, |
|
"grad_norm": 0.03767919912934303, |
|
"learning_rate": 0.00016522, |
|
"loss": 0.0426, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.6069013351829374, |
|
"grad_norm": 0.07064680010080338, |
|
"learning_rate": 0.00016502, |
|
"loss": 0.0491, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.6103693428125542, |
|
"grad_norm": 0.05687623471021652, |
|
"learning_rate": 0.00016482, |
|
"loss": 0.0454, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.613837350442171, |
|
"grad_norm": 0.04933289438486099, |
|
"learning_rate": 0.00016462, |
|
"loss": 0.0385, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.6173053580717878, |
|
"grad_norm": 0.0763295516371727, |
|
"learning_rate": 0.00016442000000000003, |
|
"loss": 0.0448, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.6207733657014045, |
|
"grad_norm": 0.04926356300711632, |
|
"learning_rate": 0.00016422000000000002, |
|
"loss": 0.0484, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.6242413733310214, |
|
"grad_norm": 0.07130167633295059, |
|
"learning_rate": 0.00016402000000000002, |
|
"loss": 0.0403, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.6277093809606381, |
|
"grad_norm": 0.06025327742099762, |
|
"learning_rate": 0.00016382000000000001, |
|
"loss": 0.0483, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.6311773885902549, |
|
"grad_norm": 0.06522911787033081, |
|
"learning_rate": 0.00016362, |
|
"loss": 0.0433, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.6346453962198717, |
|
"grad_norm": 0.09565310180187225, |
|
"learning_rate": 0.00016342, |
|
"loss": 0.0539, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.6381134038494884, |
|
"grad_norm": 0.08908990025520325, |
|
"learning_rate": 0.00016322000000000003, |
|
"loss": 0.0479, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.6415814114791053, |
|
"grad_norm": 0.05405285581946373, |
|
"learning_rate": 0.00016302000000000002, |
|
"loss": 0.0416, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.645049419108722, |
|
"grad_norm": 0.0656275674700737, |
|
"learning_rate": 0.00016282000000000002, |
|
"loss": 0.0455, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.6485174267383388, |
|
"grad_norm": 0.30592814087867737, |
|
"learning_rate": 0.00016262, |
|
"loss": 0.0471, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.6519854343679556, |
|
"grad_norm": 0.06433047354221344, |
|
"learning_rate": 0.00016242, |
|
"loss": 0.045, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.6554534419975724, |
|
"grad_norm": 0.05807631090283394, |
|
"learning_rate": 0.00016222000000000003, |
|
"loss": 0.0502, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.6589214496271891, |
|
"grad_norm": 0.09822454303503036, |
|
"learning_rate": 0.00016202000000000002, |
|
"loss": 0.0465, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.662389457256806, |
|
"grad_norm": 0.06036192551255226, |
|
"learning_rate": 0.00016182000000000002, |
|
"loss": 0.0455, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.6658574648864227, |
|
"grad_norm": 0.05637621134519577, |
|
"learning_rate": 0.00016162000000000001, |
|
"loss": 0.0459, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.6693254725160396, |
|
"grad_norm": 0.062302861362695694, |
|
"learning_rate": 0.00016142, |
|
"loss": 0.0461, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.6727934801456563, |
|
"grad_norm": 0.05843142420053482, |
|
"learning_rate": 0.00016122, |
|
"loss": 0.0455, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.6762614877752731, |
|
"grad_norm": 0.052565112709999084, |
|
"learning_rate": 0.00016102000000000003, |
|
"loss": 0.0432, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.6797294954048899, |
|
"grad_norm": 0.059093691408634186, |
|
"learning_rate": 0.00016082000000000002, |
|
"loss": 0.0433, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.6831975030345067, |
|
"grad_norm": 0.07197156548500061, |
|
"learning_rate": 0.00016062000000000002, |
|
"loss": 0.0465, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.6866655106641235, |
|
"grad_norm": 0.039968665689229965, |
|
"learning_rate": 0.00016042, |
|
"loss": 0.0381, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.6901335182937403, |
|
"grad_norm": 0.05672089383006096, |
|
"learning_rate": 0.00016022, |
|
"loss": 0.0422, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.693601525923357, |
|
"grad_norm": 0.10138531029224396, |
|
"learning_rate": 0.00016002, |
|
"loss": 0.0464, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.693601525923357, |
|
"eval_loss": 0.06039171665906906, |
|
"eval_runtime": 710.8114, |
|
"eval_samples_per_second": 12.918, |
|
"eval_steps_per_second": 1.615, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.6970695335529739, |
|
"grad_norm": 0.0607437863945961, |
|
"learning_rate": 0.00015982000000000002, |
|
"loss": 0.0481, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.7005375411825906, |
|
"grad_norm": 0.06681676208972931, |
|
"learning_rate": 0.00015962000000000002, |
|
"loss": 0.049, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.7040055488122073, |
|
"grad_norm": 0.07225602120161057, |
|
"learning_rate": 0.00015942000000000002, |
|
"loss": 0.0465, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.7074735564418242, |
|
"grad_norm": 0.056885506957769394, |
|
"learning_rate": 0.00015922, |
|
"loss": 0.0452, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.7109415640714409, |
|
"grad_norm": 0.05572199076414108, |
|
"learning_rate": 0.00015902, |
|
"loss": 0.0423, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.7144095717010578, |
|
"grad_norm": 0.0516788586974144, |
|
"learning_rate": 0.00015882, |
|
"loss": 0.0404, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.7178775793306745, |
|
"grad_norm": 0.05214313417673111, |
|
"learning_rate": 0.00015862000000000002, |
|
"loss": 0.0428, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.7213455869602913, |
|
"grad_norm": 0.06317329406738281, |
|
"learning_rate": 0.00015842000000000002, |
|
"loss": 0.0447, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.7248135945899081, |
|
"grad_norm": 0.07574247568845749, |
|
"learning_rate": 0.00015822, |
|
"loss": 0.0395, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.7282816022195249, |
|
"grad_norm": 0.06367363035678864, |
|
"learning_rate": 0.00015802, |
|
"loss": 0.0439, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.7317496098491416, |
|
"grad_norm": 0.06190785393118858, |
|
"learning_rate": 0.00015782, |
|
"loss": 0.0404, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.7352176174787585, |
|
"grad_norm": 0.07103675603866577, |
|
"learning_rate": 0.00015762, |
|
"loss": 0.0468, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.7386856251083752, |
|
"grad_norm": 0.04957522451877594, |
|
"learning_rate": 0.00015742000000000002, |
|
"loss": 0.0453, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.7421536327379921, |
|
"grad_norm": 0.07643826305866241, |
|
"learning_rate": 0.00015722000000000002, |
|
"loss": 0.0404, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.7456216403676088, |
|
"grad_norm": 0.04660920053720474, |
|
"learning_rate": 0.00015702, |
|
"loss": 0.0373, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.7490896479972255, |
|
"grad_norm": 0.06206243112683296, |
|
"learning_rate": 0.00015682, |
|
"loss": 0.0406, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.7525576556268424, |
|
"grad_norm": 0.07452013343572617, |
|
"learning_rate": 0.00015662, |
|
"loss": 0.043, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.7560256632564591, |
|
"grad_norm": 0.04379798844456673, |
|
"learning_rate": 0.00015642000000000002, |
|
"loss": 0.0494, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.759493670886076, |
|
"grad_norm": 0.05709415674209595, |
|
"learning_rate": 0.00015622000000000002, |
|
"loss": 0.041, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.7629616785156927, |
|
"grad_norm": 0.07468123733997345, |
|
"learning_rate": 0.00015602000000000001, |
|
"loss": 0.0403, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.7664296861453095, |
|
"grad_norm": 0.08100239932537079, |
|
"learning_rate": 0.00015582, |
|
"loss": 0.0481, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.7698976937749263, |
|
"grad_norm": 0.07269110530614853, |
|
"learning_rate": 0.00015562, |
|
"loss": 0.0394, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.7733657014045431, |
|
"grad_norm": 0.060352034866809845, |
|
"learning_rate": 0.00015542, |
|
"loss": 0.04, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.7768337090341598, |
|
"grad_norm": 0.05698138475418091, |
|
"learning_rate": 0.00015522000000000002, |
|
"loss": 0.0383, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.7803017166637767, |
|
"grad_norm": 0.06030441075563431, |
|
"learning_rate": 0.00015502000000000002, |
|
"loss": 0.0456, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.7837697242933934, |
|
"grad_norm": 0.08272086083889008, |
|
"learning_rate": 0.00015482, |
|
"loss": 0.0443, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.7872377319230103, |
|
"grad_norm": 0.06746231019496918, |
|
"learning_rate": 0.00015462, |
|
"loss": 0.04, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.790705739552627, |
|
"grad_norm": 0.08844996243715286, |
|
"learning_rate": 0.00015442, |
|
"loss": 0.0418, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.7941737471822438, |
|
"grad_norm": 0.04906987398862839, |
|
"learning_rate": 0.00015422, |
|
"loss": 0.0452, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.7976417548118606, |
|
"grad_norm": 0.053966376930475235, |
|
"learning_rate": 0.00015402000000000002, |
|
"loss": 0.0404, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.8011097624414774, |
|
"grad_norm": 0.07681586593389511, |
|
"learning_rate": 0.00015382000000000001, |
|
"loss": 0.05, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.8045777700710942, |
|
"grad_norm": 0.059431836009025574, |
|
"learning_rate": 0.00015362, |
|
"loss": 0.0496, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.808045777700711, |
|
"grad_norm": 0.058449339121580124, |
|
"learning_rate": 0.00015342, |
|
"loss": 0.0419, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.8115137853303277, |
|
"grad_norm": 0.05404651537537575, |
|
"learning_rate": 0.00015322, |
|
"loss": 0.0382, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.8149817929599446, |
|
"grad_norm": 0.06095472350716591, |
|
"learning_rate": 0.00015302, |
|
"loss": 0.0487, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.8184498005895613, |
|
"grad_norm": 0.07456117123365402, |
|
"learning_rate": 0.00015282000000000002, |
|
"loss": 0.0386, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.821917808219178, |
|
"grad_norm": 0.06098613142967224, |
|
"learning_rate": 0.00015262, |
|
"loss": 0.0425, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.8253858158487949, |
|
"grad_norm": 0.07008852809667587, |
|
"learning_rate": 0.00015242, |
|
"loss": 0.0457, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.8288538234784116, |
|
"grad_norm": 0.06413611769676208, |
|
"learning_rate": 0.00015222, |
|
"loss": 0.0433, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.8323218311080285, |
|
"grad_norm": 0.05168429762125015, |
|
"learning_rate": 0.00015202, |
|
"loss": 0.0441, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.8357898387376452, |
|
"grad_norm": 0.0708162784576416, |
|
"learning_rate": 0.00015182, |
|
"loss": 0.0393, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.839257846367262, |
|
"grad_norm": 0.05856487527489662, |
|
"learning_rate": 0.00015162000000000002, |
|
"loss": 0.0437, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.8427258539968788, |
|
"grad_norm": 0.06069020926952362, |
|
"learning_rate": 0.00015142, |
|
"loss": 0.0484, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.8461938616264956, |
|
"grad_norm": 0.0641227513551712, |
|
"learning_rate": 0.00015122, |
|
"loss": 0.046, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.8496618692561123, |
|
"grad_norm": 0.06422239542007446, |
|
"learning_rate": 0.00015102, |
|
"loss": 0.043, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.8531298768857292, |
|
"grad_norm": 0.07488572597503662, |
|
"learning_rate": 0.00015082, |
|
"loss": 0.0517, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.8565978845153459, |
|
"grad_norm": 0.06784242391586304, |
|
"learning_rate": 0.00015062000000000002, |
|
"loss": 0.0437, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.8600658921449628, |
|
"grad_norm": 0.07483550906181335, |
|
"learning_rate": 0.00015042, |
|
"loss": 0.0491, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.8635338997745795, |
|
"grad_norm": 0.06362838298082352, |
|
"learning_rate": 0.00015022, |
|
"loss": 0.043, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.8670019074041962, |
|
"grad_norm": 0.06175532937049866, |
|
"learning_rate": 0.00015002, |
|
"loss": 0.0425, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.8704699150338131, |
|
"grad_norm": 0.08987358957529068, |
|
"learning_rate": 0.00014982, |
|
"loss": 0.0479, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.8739379226634298, |
|
"grad_norm": 0.05209062620997429, |
|
"learning_rate": 0.00014962, |
|
"loss": 0.045, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.8774059302930467, |
|
"grad_norm": 0.055044736713171005, |
|
"learning_rate": 0.00014942000000000002, |
|
"loss": 0.0469, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.8808739379226634, |
|
"grad_norm": 0.0646812692284584, |
|
"learning_rate": 0.00014922, |
|
"loss": 0.0426, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.8843419455522802, |
|
"grad_norm": 0.048389263451099396, |
|
"learning_rate": 0.00014902, |
|
"loss": 0.0417, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.887809953181897, |
|
"grad_norm": 0.07316736876964569, |
|
"learning_rate": 0.00014882, |
|
"loss": 0.0412, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.8912779608115138, |
|
"grad_norm": 0.06801818311214447, |
|
"learning_rate": 0.00014862, |
|
"loss": 0.0446, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.8947459684411305, |
|
"grad_norm": 0.08190831542015076, |
|
"learning_rate": 0.00014842, |
|
"loss": 0.0423, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.8982139760707474, |
|
"grad_norm": 0.05850045010447502, |
|
"learning_rate": 0.00014822000000000001, |
|
"loss": 0.0418, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.9016819837003641, |
|
"grad_norm": 0.07633431255817413, |
|
"learning_rate": 0.00014802, |
|
"loss": 0.0439, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.905149991329981, |
|
"grad_norm": 0.07250861078500748, |
|
"learning_rate": 0.00014782, |
|
"loss": 0.0456, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.9086179989595977, |
|
"grad_norm": 0.05381698161363602, |
|
"learning_rate": 0.00014762, |
|
"loss": 0.0398, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.9120860065892145, |
|
"grad_norm": 0.05847073718905449, |
|
"learning_rate": 0.00014742, |
|
"loss": 0.0426, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.9155540142188313, |
|
"grad_norm": 0.04121188446879387, |
|
"learning_rate": 0.00014722, |
|
"loss": 0.0379, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.9190220218484481, |
|
"grad_norm": 0.05500589683651924, |
|
"learning_rate": 0.00014702, |
|
"loss": 0.0429, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.9224900294780648, |
|
"grad_norm": 0.07371719181537628, |
|
"learning_rate": 0.00014682, |
|
"loss": 0.0404, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.9259580371076817, |
|
"grad_norm": 0.07182417809963226, |
|
"learning_rate": 0.00014662, |
|
"loss": 0.0425, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.9294260447372984, |
|
"grad_norm": 0.07057616114616394, |
|
"learning_rate": 0.00014642, |
|
"loss": 0.0447, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.9328940523669152, |
|
"grad_norm": 0.06059495732188225, |
|
"learning_rate": 0.00014622, |
|
"loss": 0.0467, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.936362059996532, |
|
"grad_norm": 0.05471622198820114, |
|
"learning_rate": 0.00014602, |
|
"loss": 0.0429, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.9398300676261487, |
|
"grad_norm": 0.2795173227787018, |
|
"learning_rate": 0.00014582, |
|
"loss": 0.0441, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.9432980752557656, |
|
"grad_norm": 0.05505786091089249, |
|
"learning_rate": 0.00014562, |
|
"loss": 0.0481, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.9467660828853823, |
|
"grad_norm": 0.06604549288749695, |
|
"learning_rate": 0.00014542, |
|
"loss": 0.0436, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.9502340905149992, |
|
"grad_norm": 0.04876833036541939, |
|
"learning_rate": 0.00014522, |
|
"loss": 0.0427, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.9537020981446159, |
|
"grad_norm": 0.06586755067110062, |
|
"learning_rate": 0.00014502, |
|
"loss": 0.0376, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.9571701057742327, |
|
"grad_norm": 0.05991548299789429, |
|
"learning_rate": 0.00014482, |
|
"loss": 0.0425, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.9606381134038495, |
|
"grad_norm": 0.05464167147874832, |
|
"learning_rate": 0.00014462, |
|
"loss": 0.0475, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.9641061210334663, |
|
"grad_norm": 0.08428128808736801, |
|
"learning_rate": 0.00014442, |
|
"loss": 0.0407, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.967574128663083, |
|
"grad_norm": 0.05811979994177818, |
|
"learning_rate": 0.00014422, |
|
"loss": 0.0417, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.9710421362926999, |
|
"grad_norm": 0.06777170300483704, |
|
"learning_rate": 0.00014402, |
|
"loss": 0.0394, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.9745101439223166, |
|
"grad_norm": 0.07404989749193192, |
|
"learning_rate": 0.00014382, |
|
"loss": 0.0451, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.9779781515519335, |
|
"grad_norm": 0.08595024049282074, |
|
"learning_rate": 0.00014362, |
|
"loss": 0.0518, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.9814461591815502, |
|
"grad_norm": 0.07936517149209976, |
|
"learning_rate": 0.00014342, |
|
"loss": 0.0368, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.984914166811167, |
|
"grad_norm": 0.08811389654874802, |
|
"learning_rate": 0.00014322, |
|
"loss": 0.0444, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.9883821744407838, |
|
"grad_norm": 0.06864507496356964, |
|
"learning_rate": 0.00014302, |
|
"loss": 0.0445, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.9918501820704005, |
|
"grad_norm": 0.05267275124788284, |
|
"learning_rate": 0.00014282, |
|
"loss": 0.0485, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.9953181897000173, |
|
"grad_norm": 0.051028452813625336, |
|
"learning_rate": 0.00014261999999999999, |
|
"loss": 0.0416, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.9987861973296341, |
|
"grad_norm": 0.047300126403570175, |
|
"learning_rate": 0.00014242, |
|
"loss": 0.0443, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 1.0022542049592509, |
|
"grad_norm": 0.06848949193954468, |
|
"learning_rate": 0.00014222, |
|
"loss": 0.0422, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 1.0057222125888676, |
|
"grad_norm": 0.05861698463559151, |
|
"learning_rate": 0.00014202, |
|
"loss": 0.0298, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 1.0091902202184846, |
|
"grad_norm": 0.0671234130859375, |
|
"learning_rate": 0.00014182, |
|
"loss": 0.0271, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 1.0126582278481013, |
|
"grad_norm": 0.07007008045911789, |
|
"learning_rate": 0.00014162, |
|
"loss": 0.0279, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 1.016126235477718, |
|
"grad_norm": 0.07409070432186127, |
|
"learning_rate": 0.00014141999999999998, |
|
"loss": 0.0332, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 1.0195942431073348, |
|
"grad_norm": 0.0820993110537529, |
|
"learning_rate": 0.00014122, |
|
"loss": 0.032, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 1.0230622507369516, |
|
"grad_norm": 0.0683741420507431, |
|
"learning_rate": 0.00014102, |
|
"loss": 0.0285, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 1.0265302583665683, |
|
"grad_norm": 0.09559917449951172, |
|
"learning_rate": 0.00014082, |
|
"loss": 0.0328, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 1.0299982659961853, |
|
"grad_norm": 0.07927672564983368, |
|
"learning_rate": 0.00014062, |
|
"loss": 0.0317, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 1.033466273625802, |
|
"grad_norm": 0.06615123897790909, |
|
"learning_rate": 0.00014042, |
|
"loss": 0.0289, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 1.0369342812554188, |
|
"grad_norm": 0.051615212112665176, |
|
"learning_rate": 0.00014022, |
|
"loss": 0.0274, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 1.0404022888850355, |
|
"grad_norm": 0.060853827744722366, |
|
"learning_rate": 0.00014002, |
|
"loss": 0.033, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.0404022888850355, |
|
"eval_loss": 0.060293715447187424, |
|
"eval_runtime": 713.1843, |
|
"eval_samples_per_second": 12.875, |
|
"eval_steps_per_second": 1.61, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.0438702965146522, |
|
"grad_norm": 0.05449477955698967, |
|
"learning_rate": 0.00013982000000000003, |
|
"loss": 0.0308, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 1.0473383041442692, |
|
"grad_norm": 0.06479578465223312, |
|
"learning_rate": 0.00013962000000000002, |
|
"loss": 0.0343, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 1.050806311773886, |
|
"grad_norm": 0.060166362673044205, |
|
"learning_rate": 0.00013942000000000002, |
|
"loss": 0.0366, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 1.0542743194035027, |
|
"grad_norm": 0.07008329033851624, |
|
"learning_rate": 0.00013922, |
|
"loss": 0.0327, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 1.0577423270331194, |
|
"grad_norm": 0.07188612222671509, |
|
"learning_rate": 0.00013902, |
|
"loss": 0.0317, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 1.0612103346627362, |
|
"grad_norm": 0.06554035097360611, |
|
"learning_rate": 0.00013882000000000003, |
|
"loss": 0.031, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 1.0646783422923531, |
|
"grad_norm": 0.0675990879535675, |
|
"learning_rate": 0.00013862000000000002, |
|
"loss": 0.0341, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 1.0681463499219699, |
|
"grad_norm": 0.061157677322626114, |
|
"learning_rate": 0.00013842000000000002, |
|
"loss": 0.0301, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 1.0716143575515866, |
|
"grad_norm": 0.07951588183641434, |
|
"learning_rate": 0.00013822000000000001, |
|
"loss": 0.0289, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 1.0750823651812034, |
|
"grad_norm": 0.1063622459769249, |
|
"learning_rate": 0.00013802, |
|
"loss": 0.0325, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 1.0785503728108201, |
|
"grad_norm": 0.07916730642318726, |
|
"learning_rate": 0.00013782, |
|
"loss": 0.0338, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 1.082018380440437, |
|
"grad_norm": 0.0792151466012001, |
|
"learning_rate": 0.00013762000000000003, |
|
"loss": 0.0287, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 1.0854863880700538, |
|
"grad_norm": 0.0631512925028801, |
|
"learning_rate": 0.00013742000000000002, |
|
"loss": 0.031, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 1.0889543956996706, |
|
"grad_norm": 0.07032682001590729, |
|
"learning_rate": 0.00013722000000000002, |
|
"loss": 0.0284, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 1.0924224033292873, |
|
"grad_norm": 0.07017088681459427, |
|
"learning_rate": 0.00013702, |
|
"loss": 0.0292, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 1.095890410958904, |
|
"grad_norm": 0.06100435182452202, |
|
"learning_rate": 0.00013682, |
|
"loss": 0.0359, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 1.0993584185885208, |
|
"grad_norm": 0.05606581270694733, |
|
"learning_rate": 0.00013662, |
|
"loss": 0.032, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 1.1028264262181378, |
|
"grad_norm": 0.07687368988990784, |
|
"learning_rate": 0.00013642000000000003, |
|
"loss": 0.0353, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 1.1062944338477545, |
|
"grad_norm": 0.05796977877616882, |
|
"learning_rate": 0.00013622000000000002, |
|
"loss": 0.0286, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 1.1097624414773712, |
|
"grad_norm": 0.08586996048688889, |
|
"learning_rate": 0.00013602000000000002, |
|
"loss": 0.0285, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 1.113230449106988, |
|
"grad_norm": 0.08199802041053772, |
|
"learning_rate": 0.00013582, |
|
"loss": 0.0346, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 1.1166984567366047, |
|
"grad_norm": 0.06581319123506546, |
|
"learning_rate": 0.00013562, |
|
"loss": 0.029, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 1.1201664643662217, |
|
"grad_norm": 0.06755177676677704, |
|
"learning_rate": 0.00013542, |
|
"loss": 0.0323, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 1.1236344719958384, |
|
"grad_norm": 0.07359416782855988, |
|
"learning_rate": 0.00013522000000000002, |
|
"loss": 0.031, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 1.1271024796254552, |
|
"grad_norm": 0.07140175998210907, |
|
"learning_rate": 0.00013502000000000002, |
|
"loss": 0.0341, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 1.130570487255072, |
|
"grad_norm": 0.07570434361696243, |
|
"learning_rate": 0.00013482000000000001, |
|
"loss": 0.0307, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 1.1340384948846887, |
|
"grad_norm": 0.0646577849984169, |
|
"learning_rate": 0.00013462, |
|
"loss": 0.0353, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 1.1375065025143056, |
|
"grad_norm": 0.07016121596097946, |
|
"learning_rate": 0.00013442, |
|
"loss": 0.0367, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 1.1409745101439224, |
|
"grad_norm": 0.06299825757741928, |
|
"learning_rate": 0.00013422, |
|
"loss": 0.0355, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 1.1444425177735391, |
|
"grad_norm": 0.07232199609279633, |
|
"learning_rate": 0.00013402000000000002, |
|
"loss": 0.0302, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 1.1479105254031559, |
|
"grad_norm": 0.06672387570142746, |
|
"learning_rate": 0.00013382000000000002, |
|
"loss": 0.0314, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 1.1513785330327726, |
|
"grad_norm": 0.06597165018320084, |
|
"learning_rate": 0.00013362, |
|
"loss": 0.0316, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 1.1548465406623896, |
|
"grad_norm": 0.07945774495601654, |
|
"learning_rate": 0.00013342, |
|
"loss": 0.0304, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 1.1583145482920063, |
|
"grad_norm": 0.0883309543132782, |
|
"learning_rate": 0.00013322, |
|
"loss": 0.0313, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 1.161782555921623, |
|
"grad_norm": 0.1610005646944046, |
|
"learning_rate": 0.00013302000000000002, |
|
"loss": 0.0379, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 1.1652505635512398, |
|
"grad_norm": 0.08350630104541779, |
|
"learning_rate": 0.00013282000000000002, |
|
"loss": 0.0283, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 1.1687185711808565, |
|
"grad_norm": 0.08908521384000778, |
|
"learning_rate": 0.00013262000000000001, |
|
"loss": 0.0271, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 1.1721865788104733, |
|
"grad_norm": 0.06909502297639847, |
|
"learning_rate": 0.00013242, |
|
"loss": 0.0312, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 1.1756545864400902, |
|
"grad_norm": 0.0837428942322731, |
|
"learning_rate": 0.00013222, |
|
"loss": 0.0351, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 1.179122594069707, |
|
"grad_norm": 0.06636606156826019, |
|
"learning_rate": 0.00013202, |
|
"loss": 0.0284, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 1.1825906016993237, |
|
"grad_norm": 0.0990837961435318, |
|
"learning_rate": 0.00013182000000000002, |
|
"loss": 0.0301, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 1.1860586093289405, |
|
"grad_norm": 0.08313869684934616, |
|
"learning_rate": 0.00013162000000000002, |
|
"loss": 0.0335, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 1.1895266169585572, |
|
"grad_norm": 0.07330479472875595, |
|
"learning_rate": 0.00013142, |
|
"loss": 0.0328, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 1.1929946245881742, |
|
"grad_norm": 0.05378459393978119, |
|
"learning_rate": 0.00013122, |
|
"loss": 0.0306, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 1.196462632217791, |
|
"grad_norm": 0.06030990183353424, |
|
"learning_rate": 0.00013102, |
|
"loss": 0.0303, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 1.1999306398474077, |
|
"grad_norm": 0.07298003882169724, |
|
"learning_rate": 0.00013082, |
|
"loss": 0.0312, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 1.2033986474770244, |
|
"grad_norm": 0.06307482719421387, |
|
"learning_rate": 0.00013062000000000002, |
|
"loss": 0.0324, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 1.2068666551066412, |
|
"grad_norm": 0.06007950380444527, |
|
"learning_rate": 0.00013042000000000002, |
|
"loss": 0.0339, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 1.2103346627362581, |
|
"grad_norm": 0.07138363271951675, |
|
"learning_rate": 0.00013022, |
|
"loss": 0.0371, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 1.2138026703658749, |
|
"grad_norm": 0.06266158819198608, |
|
"learning_rate": 0.00013002, |
|
"loss": 0.0329, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.2172706779954916, |
|
"grad_norm": 0.06397438049316406, |
|
"learning_rate": 0.00012982, |
|
"loss": 0.0296, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 1.2207386856251083, |
|
"grad_norm": 0.061814188957214355, |
|
"learning_rate": 0.00012962, |
|
"loss": 0.0306, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 1.224206693254725, |
|
"grad_norm": 0.060092389583587646, |
|
"learning_rate": 0.00012942000000000002, |
|
"loss": 0.0323, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 1.227674700884342, |
|
"grad_norm": 0.10667088627815247, |
|
"learning_rate": 0.00012922, |
|
"loss": 0.0298, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 1.2311427085139588, |
|
"grad_norm": 0.09048482030630112, |
|
"learning_rate": 0.00012902, |
|
"loss": 0.0319, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 1.2346107161435755, |
|
"grad_norm": 0.09124518185853958, |
|
"learning_rate": 0.00012882, |
|
"loss": 0.0361, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 1.2380787237731923, |
|
"grad_norm": 0.05594000220298767, |
|
"learning_rate": 0.00012862, |
|
"loss": 0.0341, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 1.241546731402809, |
|
"grad_norm": 0.06354895979166031, |
|
"learning_rate": 0.00012842, |
|
"loss": 0.0281, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 1.2450147390324258, |
|
"grad_norm": 0.059312548488378525, |
|
"learning_rate": 0.00012822000000000002, |
|
"loss": 0.0276, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 1.2484827466620427, |
|
"grad_norm": 0.06291409581899643, |
|
"learning_rate": 0.00012802, |
|
"loss": 0.0315, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 1.2519507542916595, |
|
"grad_norm": 0.05183565244078636, |
|
"learning_rate": 0.00012782, |
|
"loss": 0.0294, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 1.2554187619212762, |
|
"grad_norm": 0.06372030079364777, |
|
"learning_rate": 0.00012762, |
|
"loss": 0.03, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 1.258886769550893, |
|
"grad_norm": 0.1327325701713562, |
|
"learning_rate": 0.00012742, |
|
"loss": 0.0366, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 1.26235477718051, |
|
"grad_norm": 0.07337796688079834, |
|
"learning_rate": 0.00012722000000000002, |
|
"loss": 0.0324, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 1.2658227848101267, |
|
"grad_norm": 0.06626396626234055, |
|
"learning_rate": 0.00012702000000000001, |
|
"loss": 0.0322, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 1.2692907924397434, |
|
"grad_norm": 0.07255198061466217, |
|
"learning_rate": 0.00012682, |
|
"loss": 0.036, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 1.2727588000693602, |
|
"grad_norm": 0.0766686201095581, |
|
"learning_rate": 0.00012662, |
|
"loss": 0.0315, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 1.276226807698977, |
|
"grad_norm": 0.06377042084932327, |
|
"learning_rate": 0.00012642, |
|
"loss": 0.0322, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 1.2796948153285936, |
|
"grad_norm": 0.0723329707980156, |
|
"learning_rate": 0.00012622, |
|
"loss": 0.0356, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 1.2831628229582104, |
|
"grad_norm": 0.0876326933503151, |
|
"learning_rate": 0.00012602000000000002, |
|
"loss": 0.0346, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 1.2866308305878273, |
|
"grad_norm": 0.07476814091205597, |
|
"learning_rate": 0.00012582, |
|
"loss": 0.0316, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 1.290098838217444, |
|
"grad_norm": 0.07503268122673035, |
|
"learning_rate": 0.00012562, |
|
"loss": 0.0318, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 1.2935668458470608, |
|
"grad_norm": 0.10708837956190109, |
|
"learning_rate": 0.00012542, |
|
"loss": 0.0349, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 1.2970348534766776, |
|
"grad_norm": 0.08280046284198761, |
|
"learning_rate": 0.00012522, |
|
"loss": 0.0324, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 1.3005028611062945, |
|
"grad_norm": 0.06630868464708328, |
|
"learning_rate": 0.00012502, |
|
"loss": 0.0305, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 1.3039708687359113, |
|
"grad_norm": 0.09755595773458481, |
|
"learning_rate": 0.00012482000000000001, |
|
"loss": 0.0338, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 1.307438876365528, |
|
"grad_norm": 0.08033673465251923, |
|
"learning_rate": 0.00012462, |
|
"loss": 0.0312, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 1.3109068839951448, |
|
"grad_norm": 0.0811261385679245, |
|
"learning_rate": 0.00012442, |
|
"loss": 0.0322, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 1.3143748916247615, |
|
"grad_norm": 0.06264316290616989, |
|
"learning_rate": 0.00012422, |
|
"loss": 0.0292, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 1.3178428992543783, |
|
"grad_norm": 0.07748369127511978, |
|
"learning_rate": 0.00012402, |
|
"loss": 0.0302, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 1.3213109068839952, |
|
"grad_norm": 0.0690523013472557, |
|
"learning_rate": 0.00012382, |
|
"loss": 0.0336, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 1.324778914513612, |
|
"grad_norm": 0.09423090517520905, |
|
"learning_rate": 0.00012362, |
|
"loss": 0.0295, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 1.3282469221432287, |
|
"grad_norm": 0.08562049269676208, |
|
"learning_rate": 0.00012342, |
|
"loss": 0.0292, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 1.3317149297728454, |
|
"grad_norm": 0.05059509724378586, |
|
"learning_rate": 0.00012322, |
|
"loss": 0.0266, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 1.3351829374024624, |
|
"grad_norm": 0.06358881294727325, |
|
"learning_rate": 0.00012302, |
|
"loss": 0.0309, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 1.3386509450320792, |
|
"grad_norm": 0.1298878937959671, |
|
"learning_rate": 0.00012282, |
|
"loss": 0.0356, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 1.342118952661696, |
|
"grad_norm": 0.06405311822891235, |
|
"learning_rate": 0.00012262, |
|
"loss": 0.0317, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 1.3455869602913126, |
|
"grad_norm": 0.07615106552839279, |
|
"learning_rate": 0.00012242, |
|
"loss": 0.0285, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 1.3490549679209294, |
|
"grad_norm": 0.08331302553415298, |
|
"learning_rate": 0.00012222, |
|
"loss": 0.0359, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 1.3525229755505461, |
|
"grad_norm": 0.06869524717330933, |
|
"learning_rate": 0.00012202, |
|
"loss": 0.0319, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 1.3559909831801629, |
|
"grad_norm": 0.08540484309196472, |
|
"learning_rate": 0.00012182, |
|
"loss": 0.0328, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 1.3594589908097798, |
|
"grad_norm": 0.07091011852025986, |
|
"learning_rate": 0.00012162, |
|
"loss": 0.0368, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 1.3629269984393966, |
|
"grad_norm": 0.075434111058712, |
|
"learning_rate": 0.00012142, |
|
"loss": 0.0336, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 1.3663950060690133, |
|
"grad_norm": 0.06716951727867126, |
|
"learning_rate": 0.00012122, |
|
"loss": 0.0335, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 1.36986301369863, |
|
"grad_norm": 0.09082087874412537, |
|
"learning_rate": 0.00012102, |
|
"loss": 0.0345, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 1.373331021328247, |
|
"grad_norm": 0.0903453379869461, |
|
"learning_rate": 0.00012082, |
|
"loss": 0.0415, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 1.3767990289578638, |
|
"grad_norm": 0.052235305309295654, |
|
"learning_rate": 0.00012062, |
|
"loss": 0.0282, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 1.3802670365874805, |
|
"grad_norm": 0.07253699749708176, |
|
"learning_rate": 0.00012042, |
|
"loss": 0.0316, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 1.3837350442170973, |
|
"grad_norm": 0.0548410601913929, |
|
"learning_rate": 0.00012022, |
|
"loss": 0.0303, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 1.387203051846714, |
|
"grad_norm": 0.08785740286111832, |
|
"learning_rate": 0.00012001999999999999, |
|
"loss": 0.0337, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.387203051846714, |
|
"eval_loss": 0.060144323855638504, |
|
"eval_runtime": 708.9302, |
|
"eval_samples_per_second": 12.952, |
|
"eval_steps_per_second": 1.619, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.3906710594763307, |
|
"grad_norm": 0.0650157481431961, |
|
"learning_rate": 0.00011982, |
|
"loss": 0.0327, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 1.3941390671059477, |
|
"grad_norm": 0.06679214537143707, |
|
"learning_rate": 0.00011962, |
|
"loss": 0.0352, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 1.3976070747355644, |
|
"grad_norm": 0.08368890732526779, |
|
"learning_rate": 0.00011942, |
|
"loss": 0.0346, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 1.4010750823651812, |
|
"grad_norm": 0.09027834981679916, |
|
"learning_rate": 0.00011922, |
|
"loss": 0.0304, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 1.404543089994798, |
|
"grad_norm": 0.0602988600730896, |
|
"learning_rate": 0.00011902, |
|
"loss": 0.0278, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 1.408011097624415, |
|
"grad_norm": 0.08348573744297028, |
|
"learning_rate": 0.00011882, |
|
"loss": 0.0316, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 1.4114791052540316, |
|
"grad_norm": 0.065241239964962, |
|
"learning_rate": 0.00011862, |
|
"loss": 0.0308, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 1.4149471128836484, |
|
"grad_norm": 0.058722469955682755, |
|
"learning_rate": 0.00011842, |
|
"loss": 0.0312, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 1.4184151205132651, |
|
"grad_norm": 0.06701633334159851, |
|
"learning_rate": 0.00011822, |
|
"loss": 0.0303, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 1.4218831281428819, |
|
"grad_norm": 0.06035483255982399, |
|
"learning_rate": 0.00011802, |
|
"loss": 0.0263, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 1.4253511357724986, |
|
"grad_norm": 0.07209423929452896, |
|
"learning_rate": 0.00011782, |
|
"loss": 0.0299, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 1.4288191434021154, |
|
"grad_norm": 0.08608460426330566, |
|
"learning_rate": 0.00011762, |
|
"loss": 0.0299, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 1.4322871510317323, |
|
"grad_norm": 0.06970153748989105, |
|
"learning_rate": 0.00011742, |
|
"loss": 0.0345, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 1.435755158661349, |
|
"grad_norm": 0.05995609238743782, |
|
"learning_rate": 0.00011721999999999999, |
|
"loss": 0.0284, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 1.4392231662909658, |
|
"grad_norm": 0.03222940117120743, |
|
"learning_rate": 0.00011702, |
|
"loss": 0.0293, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 1.4426911739205825, |
|
"grad_norm": 0.07076498866081238, |
|
"learning_rate": 0.00011682, |
|
"loss": 0.0362, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 1.4461591815501995, |
|
"grad_norm": 0.07425186783075333, |
|
"learning_rate": 0.00011661999999999999, |
|
"loss": 0.0281, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 1.4496271891798163, |
|
"grad_norm": 0.04875819757580757, |
|
"learning_rate": 0.00011642, |
|
"loss": 0.0361, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 1.453095196809433, |
|
"grad_norm": 0.06577154994010925, |
|
"learning_rate": 0.00011622, |
|
"loss": 0.0291, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 1.4565632044390497, |
|
"grad_norm": 0.08174604177474976, |
|
"learning_rate": 0.00011601999999999999, |
|
"loss": 0.0284, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 1.4600312120686665, |
|
"grad_norm": 0.08212857693433762, |
|
"learning_rate": 0.00011582, |
|
"loss": 0.03, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 1.4634992196982832, |
|
"grad_norm": 0.06090838089585304, |
|
"learning_rate": 0.00011562, |
|
"loss": 0.028, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 1.4669672273279002, |
|
"grad_norm": 0.07029874622821808, |
|
"learning_rate": 0.00011541999999999999, |
|
"loss": 0.033, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 1.470435234957517, |
|
"grad_norm": 0.10370688140392303, |
|
"learning_rate": 0.00011522, |
|
"loss": 0.0341, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 1.4739032425871337, |
|
"grad_norm": 0.06497831642627716, |
|
"learning_rate": 0.00011501999999999999, |
|
"loss": 0.0355, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 1.4773712502167504, |
|
"grad_norm": 0.047859255224466324, |
|
"learning_rate": 0.00011482000000000002, |
|
"loss": 0.0312, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 1.4808392578463674, |
|
"grad_norm": 0.044814929366111755, |
|
"learning_rate": 0.00011462000000000001, |
|
"loss": 0.0325, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 1.4843072654759841, |
|
"grad_norm": 0.08687663078308105, |
|
"learning_rate": 0.00011442000000000002, |
|
"loss": 0.0346, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 1.4877752731056009, |
|
"grad_norm": 0.06687606126070023, |
|
"learning_rate": 0.00011422000000000001, |
|
"loss": 0.0318, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 1.4912432807352176, |
|
"grad_norm": 0.0769667997956276, |
|
"learning_rate": 0.00011402000000000001, |
|
"loss": 0.0314, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 1.4947112883648344, |
|
"grad_norm": 0.07912110537290573, |
|
"learning_rate": 0.00011382000000000002, |
|
"loss": 0.0269, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 1.498179295994451, |
|
"grad_norm": 0.06801219284534454, |
|
"learning_rate": 0.00011362000000000001, |
|
"loss": 0.032, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 1.5016473036240678, |
|
"grad_norm": 0.07353610545396805, |
|
"learning_rate": 0.00011342000000000001, |
|
"loss": 0.0328, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 1.5051153112536848, |
|
"grad_norm": 0.05926644429564476, |
|
"learning_rate": 0.00011322000000000002, |
|
"loss": 0.0268, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 1.5085833188833015, |
|
"grad_norm": 0.07942460477352142, |
|
"learning_rate": 0.00011302000000000001, |
|
"loss": 0.0299, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 1.5120513265129183, |
|
"grad_norm": 0.09032566100358963, |
|
"learning_rate": 0.00011282000000000002, |
|
"loss": 0.0344, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 1.5155193341425353, |
|
"grad_norm": 0.08992986381053925, |
|
"learning_rate": 0.00011262000000000002, |
|
"loss": 0.0282, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 1.518987341772152, |
|
"grad_norm": 0.10055962204933167, |
|
"learning_rate": 0.00011242000000000001, |
|
"loss": 0.0368, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 1.5224553494017687, |
|
"grad_norm": 0.06706701964139938, |
|
"learning_rate": 0.00011222000000000002, |
|
"loss": 0.0348, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 1.5259233570313855, |
|
"grad_norm": 0.07412678748369217, |
|
"learning_rate": 0.00011202000000000002, |
|
"loss": 0.0322, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 1.5293913646610022, |
|
"grad_norm": 0.0761900544166565, |
|
"learning_rate": 0.00011182000000000001, |
|
"loss": 0.0334, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 1.532859372290619, |
|
"grad_norm": 0.06172578036785126, |
|
"learning_rate": 0.00011162000000000002, |
|
"loss": 0.0326, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 1.5363273799202357, |
|
"grad_norm": 0.06953331083059311, |
|
"learning_rate": 0.00011142000000000001, |
|
"loss": 0.0298, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 1.5397953875498525, |
|
"grad_norm": 0.07618329674005508, |
|
"learning_rate": 0.00011122000000000001, |
|
"loss": 0.0283, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 1.5432633951794694, |
|
"grad_norm": 0.07265307009220123, |
|
"learning_rate": 0.00011102000000000002, |
|
"loss": 0.032, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 1.5467314028090862, |
|
"grad_norm": 0.07409724593162537, |
|
"learning_rate": 0.00011082000000000001, |
|
"loss": 0.0303, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 1.5501994104387031, |
|
"grad_norm": 0.05352557823061943, |
|
"learning_rate": 0.00011062000000000001, |
|
"loss": 0.0304, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 1.5536674180683199, |
|
"grad_norm": 0.10606401413679123, |
|
"learning_rate": 0.00011042000000000002, |
|
"loss": 0.0338, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 1.5571354256979366, |
|
"grad_norm": 0.07364092022180557, |
|
"learning_rate": 0.00011022000000000001, |
|
"loss": 0.0308, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 1.5606034333275534, |
|
"grad_norm": 0.08737417310476303, |
|
"learning_rate": 0.00011002000000000001, |
|
"loss": 0.0299, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 1.56407144095717, |
|
"grad_norm": 0.07062090188264847, |
|
"learning_rate": 0.00010982000000000002, |
|
"loss": 0.0314, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 1.5675394485867868, |
|
"grad_norm": 0.0711718276143074, |
|
"learning_rate": 0.00010962000000000001, |
|
"loss": 0.0322, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 1.5710074562164036, |
|
"grad_norm": 0.08718711882829666, |
|
"learning_rate": 0.00010942, |
|
"loss": 0.0297, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 1.5744754638460203, |
|
"grad_norm": 0.06502439081668854, |
|
"learning_rate": 0.00010922000000000001, |
|
"loss": 0.0385, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 1.5779434714756373, |
|
"grad_norm": 0.07162761688232422, |
|
"learning_rate": 0.00010902000000000001, |
|
"loss": 0.0301, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 1.581411479105254, |
|
"grad_norm": 0.07721313089132309, |
|
"learning_rate": 0.00010882, |
|
"loss": 0.0284, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 1.5848794867348708, |
|
"grad_norm": 0.07071566581726074, |
|
"learning_rate": 0.00010862000000000001, |
|
"loss": 0.0352, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 1.5883474943644877, |
|
"grad_norm": 0.1029210165143013, |
|
"learning_rate": 0.00010842000000000001, |
|
"loss": 0.0319, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 1.5918155019941045, |
|
"grad_norm": 0.05683687701821327, |
|
"learning_rate": 0.00010822, |
|
"loss": 0.0343, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 1.5952835096237212, |
|
"grad_norm": 0.05821290984749794, |
|
"learning_rate": 0.00010802000000000001, |
|
"loss": 0.039, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 1.598751517253338, |
|
"grad_norm": 0.07931312173604965, |
|
"learning_rate": 0.00010782000000000001, |
|
"loss": 0.0294, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 1.6022195248829547, |
|
"grad_norm": 0.06197603419423103, |
|
"learning_rate": 0.00010762, |
|
"loss": 0.0322, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 1.6056875325125715, |
|
"grad_norm": 0.06312838196754456, |
|
"learning_rate": 0.00010742000000000001, |
|
"loss": 0.0298, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 1.6091555401421882, |
|
"grad_norm": 0.07855828106403351, |
|
"learning_rate": 0.00010722000000000001, |
|
"loss": 0.0303, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 1.612623547771805, |
|
"grad_norm": 0.055718790739774704, |
|
"learning_rate": 0.00010702000000000002, |
|
"loss": 0.0311, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 1.616091555401422, |
|
"grad_norm": 0.07305306941270828, |
|
"learning_rate": 0.00010682000000000001, |
|
"loss": 0.0303, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 1.6195595630310387, |
|
"grad_norm": 0.07300154864788055, |
|
"learning_rate": 0.00010662, |
|
"loss": 0.0301, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 1.6230275706606556, |
|
"grad_norm": 0.06121309846639633, |
|
"learning_rate": 0.00010642000000000001, |
|
"loss": 0.029, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 1.6264955782902724, |
|
"grad_norm": 0.060993146151304245, |
|
"learning_rate": 0.00010622000000000001, |
|
"loss": 0.0297, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 1.629963585919889, |
|
"grad_norm": 0.07691816985607147, |
|
"learning_rate": 0.00010602, |
|
"loss": 0.0339, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 1.6334315935495058, |
|
"grad_norm": 0.07278670370578766, |
|
"learning_rate": 0.00010582000000000001, |
|
"loss": 0.0262, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 1.6368996011791226, |
|
"grad_norm": 0.05704551190137863, |
|
"learning_rate": 0.00010562000000000001, |
|
"loss": 0.028, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 1.6403676088087393, |
|
"grad_norm": 0.07973553240299225, |
|
"learning_rate": 0.00010542, |
|
"loss": 0.0328, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 1.643835616438356, |
|
"grad_norm": 0.05720138177275658, |
|
"learning_rate": 0.00010522000000000001, |
|
"loss": 0.0309, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 1.6473036240679728, |
|
"grad_norm": 0.07787197083234787, |
|
"learning_rate": 0.00010502000000000001, |
|
"loss": 0.0294, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 1.6507716316975898, |
|
"grad_norm": 0.08834118396043777, |
|
"learning_rate": 0.00010482, |
|
"loss": 0.0301, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 1.6542396393272065, |
|
"grad_norm": 0.08383214473724365, |
|
"learning_rate": 0.00010462000000000001, |
|
"loss": 0.0322, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 1.6577076469568233, |
|
"grad_norm": 0.08266714960336685, |
|
"learning_rate": 0.00010442, |
|
"loss": 0.0323, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 1.6611756545864402, |
|
"grad_norm": 0.06535809487104416, |
|
"learning_rate": 0.00010422, |
|
"loss": 0.0334, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 1.664643662216057, |
|
"grad_norm": 0.07224865257740021, |
|
"learning_rate": 0.00010402000000000001, |
|
"loss": 0.0312, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 1.6681116698456737, |
|
"grad_norm": 0.07816470414400101, |
|
"learning_rate": 0.00010382, |
|
"loss": 0.0342, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 1.6715796774752905, |
|
"grad_norm": 0.48284128308296204, |
|
"learning_rate": 0.00010362, |
|
"loss": 0.0387, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 1.6750476851049072, |
|
"grad_norm": 0.15331751108169556, |
|
"learning_rate": 0.00010342000000000001, |
|
"loss": 0.0289, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 1.678515692734524, |
|
"grad_norm": 0.08506326377391815, |
|
"learning_rate": 0.00010322, |
|
"loss": 0.0299, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 1.6819837003641407, |
|
"grad_norm": 0.06808125227689743, |
|
"learning_rate": 0.00010302, |
|
"loss": 0.0266, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 1.6854517079937574, |
|
"grad_norm": 0.07349207252264023, |
|
"learning_rate": 0.00010282000000000001, |
|
"loss": 0.0297, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 1.6889197156233744, |
|
"grad_norm": 0.3372306525707245, |
|
"learning_rate": 0.00010262, |
|
"loss": 0.0342, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 1.6923877232529911, |
|
"grad_norm": 0.10550106316804886, |
|
"learning_rate": 0.00010242, |
|
"loss": 0.0338, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 1.695855730882608, |
|
"grad_norm": 0.0708487331867218, |
|
"learning_rate": 0.00010222000000000001, |
|
"loss": 0.0294, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 1.6993237385122248, |
|
"grad_norm": 0.5124090313911438, |
|
"learning_rate": 0.00010202, |
|
"loss": 0.0321, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 1.7027917461418416, |
|
"grad_norm": 0.08598774671554565, |
|
"learning_rate": 0.00010182, |
|
"loss": 0.0326, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 1.7062597537714583, |
|
"grad_norm": 0.06315886229276657, |
|
"learning_rate": 0.00010162, |
|
"loss": 0.0358, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 1.709727761401075, |
|
"grad_norm": 0.2513497769832611, |
|
"learning_rate": 0.00010142, |
|
"loss": 0.0352, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 1.7131957690306918, |
|
"grad_norm": 0.06378067284822464, |
|
"learning_rate": 0.00010122000000000001, |
|
"loss": 0.0277, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 1.7166637766603086, |
|
"grad_norm": 0.08322855085134506, |
|
"learning_rate": 0.00010102, |
|
"loss": 0.0303, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 1.7201317842899253, |
|
"grad_norm": 0.1097235381603241, |
|
"learning_rate": 0.00010082, |
|
"loss": 0.0328, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 1.7235997919195423, |
|
"grad_norm": 0.06668414920568466, |
|
"learning_rate": 0.00010062000000000001, |
|
"loss": 0.034, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 1.727067799549159, |
|
"grad_norm": 0.08046268671751022, |
|
"learning_rate": 0.00010042, |
|
"loss": 0.0315, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 1.7305358071787758, |
|
"grad_norm": 0.06834772229194641, |
|
"learning_rate": 0.00010022, |
|
"loss": 0.0304, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 1.7340038148083927, |
|
"grad_norm": 0.06567910313606262, |
|
"learning_rate": 0.00010002000000000001, |
|
"loss": 0.0338, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 1.7340038148083927, |
|
"eval_loss": 0.059129249304533005, |
|
"eval_runtime": 713.2468, |
|
"eval_samples_per_second": 12.874, |
|
"eval_steps_per_second": 1.61, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 1.7374718224380095, |
|
"grad_norm": 0.0854811891913414, |
|
"learning_rate": 9.982e-05, |
|
"loss": 0.0308, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 1.7409398300676262, |
|
"grad_norm": 0.07272527366876602, |
|
"learning_rate": 9.962e-05, |
|
"loss": 0.0333, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 1.744407837697243, |
|
"grad_norm": 0.0846826583147049, |
|
"learning_rate": 9.942000000000001e-05, |
|
"loss": 0.0342, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 1.7478758453268597, |
|
"grad_norm": 0.06776320934295654, |
|
"learning_rate": 9.922e-05, |
|
"loss": 0.0333, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 1.7513438529564764, |
|
"grad_norm": 0.07815729081630707, |
|
"learning_rate": 9.902e-05, |
|
"loss": 0.0355, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 1.7548118605860932, |
|
"grad_norm": 0.13363681733608246, |
|
"learning_rate": 9.882e-05, |
|
"loss": 0.032, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 1.75827986821571, |
|
"grad_norm": 0.05876624956727028, |
|
"learning_rate": 9.862e-05, |
|
"loss": 0.033, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 1.7617478758453269, |
|
"grad_norm": 0.10773160308599472, |
|
"learning_rate": 9.842e-05, |
|
"loss": 0.0309, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 1.7652158834749436, |
|
"grad_norm": 0.05928561091423035, |
|
"learning_rate": 9.822e-05, |
|
"loss": 0.0288, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 1.7686838911045606, |
|
"grad_norm": 0.058999065309762955, |
|
"learning_rate": 9.802e-05, |
|
"loss": 0.0251, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 1.7721518987341773, |
|
"grad_norm": 0.0768052414059639, |
|
"learning_rate": 9.782e-05, |
|
"loss": 0.0292, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 1.775619906363794, |
|
"grad_norm": 0.1691245585680008, |
|
"learning_rate": 9.762e-05, |
|
"loss": 0.0327, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 1.7790879139934108, |
|
"grad_norm": 0.08563978224992752, |
|
"learning_rate": 9.742e-05, |
|
"loss": 0.0321, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 1.7825559216230276, |
|
"grad_norm": 0.07125357538461685, |
|
"learning_rate": 9.722e-05, |
|
"loss": 0.0281, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 1.7860239292526443, |
|
"grad_norm": 0.10519967973232269, |
|
"learning_rate": 9.702e-05, |
|
"loss": 0.03, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 1.789491936882261, |
|
"grad_norm": 0.08440076559782028, |
|
"learning_rate": 9.682e-05, |
|
"loss": 0.0309, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 1.7929599445118778, |
|
"grad_norm": 0.09944937378168106, |
|
"learning_rate": 9.661999999999999e-05, |
|
"loss": 0.0323, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 1.7964279521414948, |
|
"grad_norm": 0.07523104548454285, |
|
"learning_rate": 9.642e-05, |
|
"loss": 0.026, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 1.7998959597711115, |
|
"grad_norm": 0.09478747099637985, |
|
"learning_rate": 9.622000000000001e-05, |
|
"loss": 0.0304, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 1.8033639674007282, |
|
"grad_norm": 0.0627417042851448, |
|
"learning_rate": 9.602e-05, |
|
"loss": 0.0353, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 1.8068319750303452, |
|
"grad_norm": 0.06294772773981094, |
|
"learning_rate": 9.582000000000001e-05, |
|
"loss": 0.0301, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 1.810299982659962, |
|
"grad_norm": 0.06453125923871994, |
|
"learning_rate": 9.562000000000001e-05, |
|
"loss": 0.0372, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 1.8137679902895787, |
|
"grad_norm": 0.08603645861148834, |
|
"learning_rate": 9.542e-05, |
|
"loss": 0.0313, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 1.8172359979191954, |
|
"grad_norm": 0.0609930120408535, |
|
"learning_rate": 9.522000000000001e-05, |
|
"loss": 0.0337, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 1.8207040055488122, |
|
"grad_norm": 0.06789145618677139, |
|
"learning_rate": 9.502000000000001e-05, |
|
"loss": 0.0291, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 1.824172013178429, |
|
"grad_norm": 0.062086399644613266, |
|
"learning_rate": 9.482e-05, |
|
"loss": 0.0284, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 1.8276400208080457, |
|
"grad_norm": 0.0755184143781662, |
|
"learning_rate": 9.462000000000001e-05, |
|
"loss": 0.0311, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 1.8311080284376624, |
|
"grad_norm": 0.06724268943071365, |
|
"learning_rate": 9.442000000000001e-05, |
|
"loss": 0.0288, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 1.8345760360672794, |
|
"grad_norm": 0.08856779336929321, |
|
"learning_rate": 9.422e-05, |
|
"loss": 0.0313, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 1.8380440436968961, |
|
"grad_norm": 0.08593250811100006, |
|
"learning_rate": 9.402000000000001e-05, |
|
"loss": 0.0299, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 1.841512051326513, |
|
"grad_norm": 0.07944291085004807, |
|
"learning_rate": 9.382e-05, |
|
"loss": 0.0309, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 1.8449800589561298, |
|
"grad_norm": 0.06297358870506287, |
|
"learning_rate": 9.362e-05, |
|
"loss": 0.0322, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 1.8484480665857466, |
|
"grad_norm": 0.0893145203590393, |
|
"learning_rate": 9.342000000000001e-05, |
|
"loss": 0.0297, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 1.8519160742153633, |
|
"grad_norm": 0.06626788526773453, |
|
"learning_rate": 9.322e-05, |
|
"loss": 0.0359, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 1.85538408184498, |
|
"grad_norm": 0.10941380262374878, |
|
"learning_rate": 9.302e-05, |
|
"loss": 0.0322, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 1.8588520894745968, |
|
"grad_norm": 0.08435889333486557, |
|
"learning_rate": 9.282000000000001e-05, |
|
"loss": 0.0361, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 1.8623200971042135, |
|
"grad_norm": 0.08986232429742813, |
|
"learning_rate": 9.262e-05, |
|
"loss": 0.0288, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 1.8657881047338303, |
|
"grad_norm": 0.0800371989607811, |
|
"learning_rate": 9.242000000000001e-05, |
|
"loss": 0.0339, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 1.8692561123634472, |
|
"grad_norm": 0.08191009610891342, |
|
"learning_rate": 9.222000000000001e-05, |
|
"loss": 0.0284, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 1.872724119993064, |
|
"grad_norm": 0.10277281701564789, |
|
"learning_rate": 9.202e-05, |
|
"loss": 0.0307, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 1.8761921276226807, |
|
"grad_norm": 0.08141244202852249, |
|
"learning_rate": 9.182000000000001e-05, |
|
"loss": 0.0327, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 1.8796601352522977, |
|
"grad_norm": 0.07554444670677185, |
|
"learning_rate": 9.162000000000001e-05, |
|
"loss": 0.031, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 1.8831281428819144, |
|
"grad_norm": 0.0898871198296547, |
|
"learning_rate": 9.142e-05, |
|
"loss": 0.0306, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 1.8865961505115312, |
|
"grad_norm": 0.06314833462238312, |
|
"learning_rate": 9.122000000000001e-05, |
|
"loss": 0.0348, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 1.890064158141148, |
|
"grad_norm": 0.06320305913686752, |
|
"learning_rate": 9.102e-05, |
|
"loss": 0.0277, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 1.8935321657707647, |
|
"grad_norm": 0.09586924314498901, |
|
"learning_rate": 9.082e-05, |
|
"loss": 0.0361, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 1.8970001734003814, |
|
"grad_norm": 0.0577247217297554, |
|
"learning_rate": 9.062000000000001e-05, |
|
"loss": 0.0295, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 1.9004681810299981, |
|
"grad_norm": 0.06483156979084015, |
|
"learning_rate": 9.042e-05, |
|
"loss": 0.0311, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 1.903936188659615, |
|
"grad_norm": 0.0748353898525238, |
|
"learning_rate": 9.022e-05, |
|
"loss": 0.0324, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 1.9074041962892319, |
|
"grad_norm": 0.09499184042215347, |
|
"learning_rate": 9.002000000000001e-05, |
|
"loss": 0.0307, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 1.9108722039188486, |
|
"grad_norm": 0.07709678262472153, |
|
"learning_rate": 8.982e-05, |
|
"loss": 0.0337, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 1.9143402115484656, |
|
"grad_norm": 0.06375749409198761, |
|
"learning_rate": 8.962e-05, |
|
"loss": 0.0231, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 1.9178082191780823, |
|
"grad_norm": 0.04287609085440636, |
|
"learning_rate": 8.942000000000001e-05, |
|
"loss": 0.0243, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 1.921276226807699, |
|
"grad_norm": 0.07707373052835464, |
|
"learning_rate": 8.922e-05, |
|
"loss": 0.0304, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 1.9247442344373158, |
|
"grad_norm": 0.06724567711353302, |
|
"learning_rate": 8.902e-05, |
|
"loss": 0.0323, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 1.9282122420669325, |
|
"grad_norm": 0.06895706802606583, |
|
"learning_rate": 8.882000000000001e-05, |
|
"loss": 0.0346, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 1.9316802496965493, |
|
"grad_norm": 0.0870724767446518, |
|
"learning_rate": 8.862e-05, |
|
"loss": 0.0326, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 1.935148257326166, |
|
"grad_norm": 0.061669524759054184, |
|
"learning_rate": 8.842e-05, |
|
"loss": 0.0287, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 1.9386162649557828, |
|
"grad_norm": 0.06450454145669937, |
|
"learning_rate": 8.822e-05, |
|
"loss": 0.0295, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 1.9420842725853997, |
|
"grad_norm": 0.07439760863780975, |
|
"learning_rate": 8.802e-05, |
|
"loss": 0.0325, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 1.9455522802150165, |
|
"grad_norm": 0.08620608597993851, |
|
"learning_rate": 8.782e-05, |
|
"loss": 0.0306, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 1.9490202878446332, |
|
"grad_norm": 0.08417027443647385, |
|
"learning_rate": 8.762e-05, |
|
"loss": 0.0339, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 1.9524882954742502, |
|
"grad_norm": 0.0670836940407753, |
|
"learning_rate": 8.742e-05, |
|
"loss": 0.0318, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 1.955956303103867, |
|
"grad_norm": 0.08096006512641907, |
|
"learning_rate": 8.722e-05, |
|
"loss": 0.0269, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 1.9594243107334837, |
|
"grad_norm": 0.04559866338968277, |
|
"learning_rate": 8.702e-05, |
|
"loss": 0.0271, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 1.9628923183631004, |
|
"grad_norm": 0.08112025260925293, |
|
"learning_rate": 8.682e-05, |
|
"loss": 0.036, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 1.9663603259927172, |
|
"grad_norm": 0.07936326414346695, |
|
"learning_rate": 8.662000000000001e-05, |
|
"loss": 0.0323, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 1.969828333622334, |
|
"grad_norm": 0.09269768744707108, |
|
"learning_rate": 8.642e-05, |
|
"loss": 0.0318, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 1.9732963412519506, |
|
"grad_norm": 0.06654026359319687, |
|
"learning_rate": 8.622e-05, |
|
"loss": 0.0291, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 1.9767643488815674, |
|
"grad_norm": 0.0971643254160881, |
|
"learning_rate": 8.602e-05, |
|
"loss": 0.031, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 1.9802323565111843, |
|
"grad_norm": 0.06067187711596489, |
|
"learning_rate": 8.582e-05, |
|
"loss": 0.0306, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 1.983700364140801, |
|
"grad_norm": 0.0945192202925682, |
|
"learning_rate": 8.562e-05, |
|
"loss": 0.0328, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 1.987168371770418, |
|
"grad_norm": 0.07627417147159576, |
|
"learning_rate": 8.542e-05, |
|
"loss": 0.0352, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 1.9906363794000348, |
|
"grad_norm": 0.08669853955507278, |
|
"learning_rate": 8.522e-05, |
|
"loss": 0.0337, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 1.9941043870296515, |
|
"grad_norm": 0.06610149890184402, |
|
"learning_rate": 8.502e-05, |
|
"loss": 0.0288, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 1.9975723946592683, |
|
"grad_norm": 0.06989070028066635, |
|
"learning_rate": 8.482e-05, |
|
"loss": 0.0306, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 2.001040402288885, |
|
"grad_norm": 0.04675092548131943, |
|
"learning_rate": 8.462e-05, |
|
"loss": 0.0245, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 2.0045084099185018, |
|
"grad_norm": 0.053521353751420975, |
|
"learning_rate": 8.442e-05, |
|
"loss": 0.0158, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 2.0079764175481185, |
|
"grad_norm": 0.06453324854373932, |
|
"learning_rate": 8.422e-05, |
|
"loss": 0.0131, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 2.0114444251777353, |
|
"grad_norm": 0.0763096958398819, |
|
"learning_rate": 8.402e-05, |
|
"loss": 0.0112, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 2.014912432807352, |
|
"grad_norm": 0.0837683454155922, |
|
"learning_rate": 8.382e-05, |
|
"loss": 0.0135, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 2.018380440436969, |
|
"grad_norm": 0.0868675634264946, |
|
"learning_rate": 8.362000000000002e-05, |
|
"loss": 0.0116, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 2.021848448066586, |
|
"grad_norm": 0.06839966773986816, |
|
"learning_rate": 8.342000000000001e-05, |
|
"loss": 0.011, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 2.0253164556962027, |
|
"grad_norm": 0.10859765112400055, |
|
"learning_rate": 8.322e-05, |
|
"loss": 0.0132, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 2.0287844633258194, |
|
"grad_norm": 0.08824854344129562, |
|
"learning_rate": 8.302000000000001e-05, |
|
"loss": 0.0129, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 2.032252470955436, |
|
"grad_norm": 0.0745101124048233, |
|
"learning_rate": 8.282000000000001e-05, |
|
"loss": 0.013, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 2.035720478585053, |
|
"grad_norm": 0.06870684772729874, |
|
"learning_rate": 8.262e-05, |
|
"loss": 0.0126, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 2.0391884862146696, |
|
"grad_norm": 0.08353777974843979, |
|
"learning_rate": 8.242000000000001e-05, |
|
"loss": 0.0127, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 2.0426564938442864, |
|
"grad_norm": 0.07282493263483047, |
|
"learning_rate": 8.222000000000001e-05, |
|
"loss": 0.014, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 2.046124501473903, |
|
"grad_norm": 0.07777410745620728, |
|
"learning_rate": 8.202e-05, |
|
"loss": 0.0108, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 2.04959250910352, |
|
"grad_norm": 0.06361842900514603, |
|
"learning_rate": 8.182000000000001e-05, |
|
"loss": 0.0132, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 2.0530605167331366, |
|
"grad_norm": 0.09724973887205124, |
|
"learning_rate": 8.162000000000001e-05, |
|
"loss": 0.0139, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 2.056528524362754, |
|
"grad_norm": 0.09359490126371384, |
|
"learning_rate": 8.142e-05, |
|
"loss": 0.013, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 2.0599965319923705, |
|
"grad_norm": 0.06116607412695885, |
|
"learning_rate": 8.122000000000001e-05, |
|
"loss": 0.0143, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 2.0634645396219873, |
|
"grad_norm": 0.06787212938070297, |
|
"learning_rate": 8.102000000000001e-05, |
|
"loss": 0.0137, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 2.066932547251604, |
|
"grad_norm": 0.06523068249225616, |
|
"learning_rate": 8.082e-05, |
|
"loss": 0.0116, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 2.0704005548812208, |
|
"grad_norm": 0.09076279401779175, |
|
"learning_rate": 8.062000000000001e-05, |
|
"loss": 0.01, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 2.0738685625108375, |
|
"grad_norm": 0.07997199892997742, |
|
"learning_rate": 8.042e-05, |
|
"loss": 0.0132, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 2.0773365701404543, |
|
"grad_norm": 0.0561593659222126, |
|
"learning_rate": 8.022e-05, |
|
"loss": 0.0117, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 2.080804577770071, |
|
"grad_norm": 0.08588268607854843, |
|
"learning_rate": 8.002000000000001e-05, |
|
"loss": 0.0125, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 2.080804577770071, |
|
"eval_loss": 0.06557230651378632, |
|
"eval_runtime": 707.733, |
|
"eval_samples_per_second": 12.974, |
|
"eval_steps_per_second": 1.622, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 2.0842725853996877, |
|
"grad_norm": 0.09114642441272736, |
|
"learning_rate": 7.982e-05, |
|
"loss": 0.0107, |
|
"step": 6010 |
|
}, |
|
{ |
|
"epoch": 2.0877405930293045, |
|
"grad_norm": 0.06732100248336792, |
|
"learning_rate": 7.962e-05, |
|
"loss": 0.012, |
|
"step": 6020 |
|
}, |
|
{ |
|
"epoch": 2.0912086006589217, |
|
"grad_norm": 0.08021605014801025, |
|
"learning_rate": 7.942000000000001e-05, |
|
"loss": 0.0123, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 2.0946766082885384, |
|
"grad_norm": 0.06761088222265244, |
|
"learning_rate": 7.922e-05, |
|
"loss": 0.0117, |
|
"step": 6040 |
|
}, |
|
{ |
|
"epoch": 2.098144615918155, |
|
"grad_norm": 0.11337202787399292, |
|
"learning_rate": 7.902e-05, |
|
"loss": 0.0131, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 2.101612623547772, |
|
"grad_norm": 0.09859013557434082, |
|
"learning_rate": 7.882000000000001e-05, |
|
"loss": 0.0139, |
|
"step": 6060 |
|
}, |
|
{ |
|
"epoch": 2.1050806311773886, |
|
"grad_norm": 0.08745191991329193, |
|
"learning_rate": 7.862e-05, |
|
"loss": 0.0139, |
|
"step": 6070 |
|
}, |
|
{ |
|
"epoch": 2.1085486388070054, |
|
"grad_norm": 0.04903840273618698, |
|
"learning_rate": 7.842e-05, |
|
"loss": 0.0142, |
|
"step": 6080 |
|
}, |
|
{ |
|
"epoch": 2.112016646436622, |
|
"grad_norm": 0.07992135733366013, |
|
"learning_rate": 7.822e-05, |
|
"loss": 0.0123, |
|
"step": 6090 |
|
}, |
|
{ |
|
"epoch": 2.115484654066239, |
|
"grad_norm": 0.08518462628126144, |
|
"learning_rate": 7.802e-05, |
|
"loss": 0.0138, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 2.1189526616958556, |
|
"grad_norm": 0.08434431999921799, |
|
"learning_rate": 7.782000000000001e-05, |
|
"loss": 0.0155, |
|
"step": 6110 |
|
}, |
|
{ |
|
"epoch": 2.1224206693254724, |
|
"grad_norm": 0.08734823763370514, |
|
"learning_rate": 7.762e-05, |
|
"loss": 0.0125, |
|
"step": 6120 |
|
}, |
|
{ |
|
"epoch": 2.125888676955089, |
|
"grad_norm": 0.08129347115755081, |
|
"learning_rate": 7.742e-05, |
|
"loss": 0.0118, |
|
"step": 6130 |
|
}, |
|
{ |
|
"epoch": 2.1293566845847063, |
|
"grad_norm": 0.09434698522090912, |
|
"learning_rate": 7.722000000000001e-05, |
|
"loss": 0.0128, |
|
"step": 6140 |
|
}, |
|
{ |
|
"epoch": 2.132824692214323, |
|
"grad_norm": 0.08226180821657181, |
|
"learning_rate": 7.702e-05, |
|
"loss": 0.0159, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 2.1362926998439398, |
|
"grad_norm": 0.06351976841688156, |
|
"learning_rate": 7.682e-05, |
|
"loss": 0.0135, |
|
"step": 6160 |
|
}, |
|
{ |
|
"epoch": 2.1397607074735565, |
|
"grad_norm": 0.07568191736936569, |
|
"learning_rate": 7.662000000000001e-05, |
|
"loss": 0.0122, |
|
"step": 6170 |
|
}, |
|
{ |
|
"epoch": 2.1432287151031733, |
|
"grad_norm": 0.08052569627761841, |
|
"learning_rate": 7.642e-05, |
|
"loss": 0.0123, |
|
"step": 6180 |
|
}, |
|
{ |
|
"epoch": 2.14669672273279, |
|
"grad_norm": 0.08767958730459213, |
|
"learning_rate": 7.622e-05, |
|
"loss": 0.0145, |
|
"step": 6190 |
|
}, |
|
{ |
|
"epoch": 2.1501647303624067, |
|
"grad_norm": 0.09768091887235641, |
|
"learning_rate": 7.602000000000001e-05, |
|
"loss": 0.0113, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 2.1536327379920235, |
|
"grad_norm": 0.09293138980865479, |
|
"learning_rate": 7.582e-05, |
|
"loss": 0.0111, |
|
"step": 6210 |
|
}, |
|
{ |
|
"epoch": 2.1571007456216402, |
|
"grad_norm": 0.07943341881036758, |
|
"learning_rate": 7.562e-05, |
|
"loss": 0.0133, |
|
"step": 6220 |
|
}, |
|
{ |
|
"epoch": 2.160568753251257, |
|
"grad_norm": 0.08316273242235184, |
|
"learning_rate": 7.542e-05, |
|
"loss": 0.0142, |
|
"step": 6230 |
|
}, |
|
{ |
|
"epoch": 2.164036760880874, |
|
"grad_norm": 0.05390129238367081, |
|
"learning_rate": 7.522e-05, |
|
"loss": 0.0128, |
|
"step": 6240 |
|
}, |
|
{ |
|
"epoch": 2.167504768510491, |
|
"grad_norm": 0.05505843088030815, |
|
"learning_rate": 7.502e-05, |
|
"loss": 0.0124, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 2.1709727761401076, |
|
"grad_norm": 0.08427543193101883, |
|
"learning_rate": 7.482e-05, |
|
"loss": 0.0136, |
|
"step": 6260 |
|
}, |
|
{ |
|
"epoch": 2.1744407837697244, |
|
"grad_norm": 0.06911098957061768, |
|
"learning_rate": 7.462e-05, |
|
"loss": 0.0117, |
|
"step": 6270 |
|
}, |
|
{ |
|
"epoch": 2.177908791399341, |
|
"grad_norm": 0.08595360815525055, |
|
"learning_rate": 7.442e-05, |
|
"loss": 0.0155, |
|
"step": 6280 |
|
}, |
|
{ |
|
"epoch": 2.181376799028958, |
|
"grad_norm": 0.0827205702662468, |
|
"learning_rate": 7.422e-05, |
|
"loss": 0.0127, |
|
"step": 6290 |
|
}, |
|
{ |
|
"epoch": 2.1848448066585746, |
|
"grad_norm": 0.12057662755250931, |
|
"learning_rate": 7.402e-05, |
|
"loss": 0.0113, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 2.1883128142881914, |
|
"grad_norm": 0.10641255229711533, |
|
"learning_rate": 7.382e-05, |
|
"loss": 0.0123, |
|
"step": 6310 |
|
}, |
|
{ |
|
"epoch": 2.191780821917808, |
|
"grad_norm": 0.11188361793756485, |
|
"learning_rate": 7.362e-05, |
|
"loss": 0.013, |
|
"step": 6320 |
|
}, |
|
{ |
|
"epoch": 2.195248829547425, |
|
"grad_norm": 0.06386271864175797, |
|
"learning_rate": 7.342e-05, |
|
"loss": 0.0114, |
|
"step": 6330 |
|
}, |
|
{ |
|
"epoch": 2.1987168371770416, |
|
"grad_norm": 0.09285665303468704, |
|
"learning_rate": 7.322e-05, |
|
"loss": 0.0156, |
|
"step": 6340 |
|
}, |
|
{ |
|
"epoch": 2.2021848448066588, |
|
"grad_norm": 0.0885651484131813, |
|
"learning_rate": 7.302e-05, |
|
"loss": 0.013, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 2.2056528524362755, |
|
"grad_norm": 0.09753404557704926, |
|
"learning_rate": 7.282e-05, |
|
"loss": 0.0135, |
|
"step": 6360 |
|
}, |
|
{ |
|
"epoch": 2.2091208600658923, |
|
"grad_norm": 0.06870284676551819, |
|
"learning_rate": 7.261999999999999e-05, |
|
"loss": 0.0163, |
|
"step": 6370 |
|
}, |
|
{ |
|
"epoch": 2.212588867695509, |
|
"grad_norm": 0.07846927642822266, |
|
"learning_rate": 7.242e-05, |
|
"loss": 0.0147, |
|
"step": 6380 |
|
}, |
|
{ |
|
"epoch": 2.2160568753251257, |
|
"grad_norm": 0.06510089337825775, |
|
"learning_rate": 7.222e-05, |
|
"loss": 0.0129, |
|
"step": 6390 |
|
}, |
|
{ |
|
"epoch": 2.2195248829547425, |
|
"grad_norm": 0.13771073520183563, |
|
"learning_rate": 7.202e-05, |
|
"loss": 0.0151, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 2.2229928905843592, |
|
"grad_norm": 0.08157498389482498, |
|
"learning_rate": 7.182e-05, |
|
"loss": 0.0136, |
|
"step": 6410 |
|
}, |
|
{ |
|
"epoch": 2.226460898213976, |
|
"grad_norm": 0.09444098174571991, |
|
"learning_rate": 7.162e-05, |
|
"loss": 0.016, |
|
"step": 6420 |
|
}, |
|
{ |
|
"epoch": 2.2299289058435927, |
|
"grad_norm": 0.0773581713438034, |
|
"learning_rate": 7.142e-05, |
|
"loss": 0.0132, |
|
"step": 6430 |
|
}, |
|
{ |
|
"epoch": 2.2333969134732095, |
|
"grad_norm": 0.10038639605045319, |
|
"learning_rate": 7.122000000000001e-05, |
|
"loss": 0.0142, |
|
"step": 6440 |
|
}, |
|
{ |
|
"epoch": 2.2368649211028266, |
|
"grad_norm": 0.10728047043085098, |
|
"learning_rate": 7.102000000000001e-05, |
|
"loss": 0.0152, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 2.2403329287324434, |
|
"grad_norm": 0.10695026069879532, |
|
"learning_rate": 7.082e-05, |
|
"loss": 0.0163, |
|
"step": 6460 |
|
}, |
|
{ |
|
"epoch": 2.24380093636206, |
|
"grad_norm": 0.06996133178472519, |
|
"learning_rate": 7.062000000000001e-05, |
|
"loss": 0.0149, |
|
"step": 6470 |
|
}, |
|
{ |
|
"epoch": 2.247268943991677, |
|
"grad_norm": 0.04395058751106262, |
|
"learning_rate": 7.042000000000001e-05, |
|
"loss": 0.0127, |
|
"step": 6480 |
|
}, |
|
{ |
|
"epoch": 2.2507369516212936, |
|
"grad_norm": 0.06262753158807755, |
|
"learning_rate": 7.022e-05, |
|
"loss": 0.014, |
|
"step": 6490 |
|
}, |
|
{ |
|
"epoch": 2.2542049592509104, |
|
"grad_norm": 0.073272705078125, |
|
"learning_rate": 7.002000000000001e-05, |
|
"loss": 0.0121, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 2.257672966880527, |
|
"grad_norm": 0.11415940523147583, |
|
"learning_rate": 6.982e-05, |
|
"loss": 0.0136, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 2.261140974510144, |
|
"grad_norm": 0.09325289726257324, |
|
"learning_rate": 6.962e-05, |
|
"loss": 0.0121, |
|
"step": 6520 |
|
}, |
|
{ |
|
"epoch": 2.2646089821397606, |
|
"grad_norm": 0.07223498821258545, |
|
"learning_rate": 6.942000000000001e-05, |
|
"loss": 0.0143, |
|
"step": 6530 |
|
}, |
|
{ |
|
"epoch": 2.2680769897693773, |
|
"grad_norm": 0.08595094084739685, |
|
"learning_rate": 6.922e-05, |
|
"loss": 0.0137, |
|
"step": 6540 |
|
}, |
|
{ |
|
"epoch": 2.271544997398994, |
|
"grad_norm": 0.08120746910572052, |
|
"learning_rate": 6.902000000000001e-05, |
|
"loss": 0.0101, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 2.2750130050286113, |
|
"grad_norm": 0.1085987389087677, |
|
"learning_rate": 6.882000000000001e-05, |
|
"loss": 0.0149, |
|
"step": 6560 |
|
}, |
|
{ |
|
"epoch": 2.278481012658228, |
|
"grad_norm": 0.07946083694696426, |
|
"learning_rate": 6.862e-05, |
|
"loss": 0.0133, |
|
"step": 6570 |
|
}, |
|
{ |
|
"epoch": 2.2819490202878447, |
|
"grad_norm": 0.05504854768514633, |
|
"learning_rate": 6.842000000000001e-05, |
|
"loss": 0.0139, |
|
"step": 6580 |
|
}, |
|
{ |
|
"epoch": 2.2854170279174615, |
|
"grad_norm": 0.07158561050891876, |
|
"learning_rate": 6.822000000000001e-05, |
|
"loss": 0.0135, |
|
"step": 6590 |
|
}, |
|
{ |
|
"epoch": 2.2888850355470782, |
|
"grad_norm": 0.06974880397319794, |
|
"learning_rate": 6.802e-05, |
|
"loss": 0.0123, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 2.292353043176695, |
|
"grad_norm": 0.08541780710220337, |
|
"learning_rate": 6.782000000000001e-05, |
|
"loss": 0.0144, |
|
"step": 6610 |
|
}, |
|
{ |
|
"epoch": 2.2958210508063117, |
|
"grad_norm": 0.10203000158071518, |
|
"learning_rate": 6.762e-05, |
|
"loss": 0.0138, |
|
"step": 6620 |
|
}, |
|
{ |
|
"epoch": 2.2992890584359285, |
|
"grad_norm": 0.09905651211738586, |
|
"learning_rate": 6.742e-05, |
|
"loss": 0.0145, |
|
"step": 6630 |
|
}, |
|
{ |
|
"epoch": 2.302757066065545, |
|
"grad_norm": 0.04088059067726135, |
|
"learning_rate": 6.722000000000001e-05, |
|
"loss": 0.011, |
|
"step": 6640 |
|
}, |
|
{ |
|
"epoch": 2.306225073695162, |
|
"grad_norm": 0.10128574818372726, |
|
"learning_rate": 6.702e-05, |
|
"loss": 0.0159, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 2.309693081324779, |
|
"grad_norm": 0.08455543220043182, |
|
"learning_rate": 6.682e-05, |
|
"loss": 0.0131, |
|
"step": 6660 |
|
}, |
|
{ |
|
"epoch": 2.313161088954396, |
|
"grad_norm": 0.1268063485622406, |
|
"learning_rate": 6.662000000000001e-05, |
|
"loss": 0.0129, |
|
"step": 6670 |
|
}, |
|
{ |
|
"epoch": 2.3166290965840126, |
|
"grad_norm": 0.11783897876739502, |
|
"learning_rate": 6.642e-05, |
|
"loss": 0.0106, |
|
"step": 6680 |
|
}, |
|
{ |
|
"epoch": 2.3200971042136294, |
|
"grad_norm": 0.09608971327543259, |
|
"learning_rate": 6.622e-05, |
|
"loss": 0.0137, |
|
"step": 6690 |
|
}, |
|
{ |
|
"epoch": 2.323565111843246, |
|
"grad_norm": 0.07495573163032532, |
|
"learning_rate": 6.602000000000001e-05, |
|
"loss": 0.0131, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 2.327033119472863, |
|
"grad_norm": 0.07772886753082275, |
|
"learning_rate": 6.582e-05, |
|
"loss": 0.0124, |
|
"step": 6710 |
|
}, |
|
{ |
|
"epoch": 2.3305011271024796, |
|
"grad_norm": 0.07365009188652039, |
|
"learning_rate": 6.562e-05, |
|
"loss": 0.0112, |
|
"step": 6720 |
|
}, |
|
{ |
|
"epoch": 2.3339691347320963, |
|
"grad_norm": 0.0697893276810646, |
|
"learning_rate": 6.542000000000001e-05, |
|
"loss": 0.0105, |
|
"step": 6730 |
|
}, |
|
{ |
|
"epoch": 2.337437142361713, |
|
"grad_norm": 0.09057148545980453, |
|
"learning_rate": 6.522e-05, |
|
"loss": 0.0134, |
|
"step": 6740 |
|
}, |
|
{ |
|
"epoch": 2.34090514999133, |
|
"grad_norm": 0.09601489454507828, |
|
"learning_rate": 6.502e-05, |
|
"loss": 0.014, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 2.3443731576209466, |
|
"grad_norm": 0.11969607323408127, |
|
"learning_rate": 6.482e-05, |
|
"loss": 0.0116, |
|
"step": 6760 |
|
}, |
|
{ |
|
"epoch": 2.3478411652505637, |
|
"grad_norm": 0.103757843375206, |
|
"learning_rate": 6.462e-05, |
|
"loss": 0.0088, |
|
"step": 6770 |
|
}, |
|
{ |
|
"epoch": 2.3513091728801805, |
|
"grad_norm": 0.09077152609825134, |
|
"learning_rate": 6.442e-05, |
|
"loss": 0.0111, |
|
"step": 6780 |
|
}, |
|
{ |
|
"epoch": 2.3547771805097972, |
|
"grad_norm": 0.06362780928611755, |
|
"learning_rate": 6.422e-05, |
|
"loss": 0.01, |
|
"step": 6790 |
|
}, |
|
{ |
|
"epoch": 2.358245188139414, |
|
"grad_norm": 0.10334885120391846, |
|
"learning_rate": 6.402e-05, |
|
"loss": 0.0144, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 2.3617131957690307, |
|
"grad_norm": 0.0676029622554779, |
|
"learning_rate": 6.382e-05, |
|
"loss": 0.0131, |
|
"step": 6810 |
|
}, |
|
{ |
|
"epoch": 2.3651812033986475, |
|
"grad_norm": 0.06794017553329468, |
|
"learning_rate": 6.362e-05, |
|
"loss": 0.0136, |
|
"step": 6820 |
|
}, |
|
{ |
|
"epoch": 2.368649211028264, |
|
"grad_norm": 0.11005677282810211, |
|
"learning_rate": 6.342e-05, |
|
"loss": 0.0141, |
|
"step": 6830 |
|
}, |
|
{ |
|
"epoch": 2.372117218657881, |
|
"grad_norm": 0.07998326420783997, |
|
"learning_rate": 6.322000000000001e-05, |
|
"loss": 0.0119, |
|
"step": 6840 |
|
}, |
|
{ |
|
"epoch": 2.3755852262874977, |
|
"grad_norm": 0.07724535465240479, |
|
"learning_rate": 6.302e-05, |
|
"loss": 0.0149, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 2.3790532339171144, |
|
"grad_norm": 0.07563537359237671, |
|
"learning_rate": 6.282e-05, |
|
"loss": 0.0145, |
|
"step": 6860 |
|
}, |
|
{ |
|
"epoch": 2.3825212415467316, |
|
"grad_norm": 0.048786722123622894, |
|
"learning_rate": 6.262000000000001e-05, |
|
"loss": 0.0122, |
|
"step": 6870 |
|
}, |
|
{ |
|
"epoch": 2.3859892491763484, |
|
"grad_norm": 0.09792380034923553, |
|
"learning_rate": 6.242e-05, |
|
"loss": 0.0133, |
|
"step": 6880 |
|
}, |
|
{ |
|
"epoch": 2.389457256805965, |
|
"grad_norm": 0.06414589285850525, |
|
"learning_rate": 6.222e-05, |
|
"loss": 0.0119, |
|
"step": 6890 |
|
}, |
|
{ |
|
"epoch": 2.392925264435582, |
|
"grad_norm": 0.0844031348824501, |
|
"learning_rate": 6.202e-05, |
|
"loss": 0.0139, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 2.3963932720651986, |
|
"grad_norm": 0.0652434229850769, |
|
"learning_rate": 6.182e-05, |
|
"loss": 0.01, |
|
"step": 6910 |
|
}, |
|
{ |
|
"epoch": 2.3998612796948153, |
|
"grad_norm": 0.07981958985328674, |
|
"learning_rate": 6.162e-05, |
|
"loss": 0.0105, |
|
"step": 6920 |
|
}, |
|
{ |
|
"epoch": 2.403329287324432, |
|
"grad_norm": 0.064891017973423, |
|
"learning_rate": 6.142e-05, |
|
"loss": 0.0138, |
|
"step": 6930 |
|
}, |
|
{ |
|
"epoch": 2.406797294954049, |
|
"grad_norm": 0.07090698927640915, |
|
"learning_rate": 6.122e-05, |
|
"loss": 0.0103, |
|
"step": 6940 |
|
}, |
|
{ |
|
"epoch": 2.4102653025836656, |
|
"grad_norm": 0.09227363765239716, |
|
"learning_rate": 6.102e-05, |
|
"loss": 0.0146, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 2.4137333102132823, |
|
"grad_norm": 0.056207556277513504, |
|
"learning_rate": 6.082e-05, |
|
"loss": 0.0143, |
|
"step": 6960 |
|
}, |
|
{ |
|
"epoch": 2.417201317842899, |
|
"grad_norm": 0.08221688121557236, |
|
"learning_rate": 6.062e-05, |
|
"loss": 0.0158, |
|
"step": 6970 |
|
}, |
|
{ |
|
"epoch": 2.4206693254725162, |
|
"grad_norm": 0.08280789107084274, |
|
"learning_rate": 6.042e-05, |
|
"loss": 0.0139, |
|
"step": 6980 |
|
}, |
|
{ |
|
"epoch": 2.424137333102133, |
|
"grad_norm": 0.06660090386867523, |
|
"learning_rate": 6.0219999999999996e-05, |
|
"loss": 0.013, |
|
"step": 6990 |
|
}, |
|
{ |
|
"epoch": 2.4276053407317497, |
|
"grad_norm": 0.10185576230287552, |
|
"learning_rate": 6.002e-05, |
|
"loss": 0.0153, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 2.4276053407317497, |
|
"eval_loss": 0.0646032989025116, |
|
"eval_runtime": 713.661, |
|
"eval_samples_per_second": 12.866, |
|
"eval_steps_per_second": 1.609, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 2.4310733483613665, |
|
"grad_norm": 0.11863771080970764, |
|
"learning_rate": 5.982e-05, |
|
"loss": 0.0149, |
|
"step": 7010 |
|
}, |
|
{ |
|
"epoch": 2.434541355990983, |
|
"grad_norm": 0.08282611519098282, |
|
"learning_rate": 5.9619999999999995e-05, |
|
"loss": 0.0114, |
|
"step": 7020 |
|
}, |
|
{ |
|
"epoch": 2.4380093636206, |
|
"grad_norm": 0.10327205806970596, |
|
"learning_rate": 5.942e-05, |
|
"loss": 0.0113, |
|
"step": 7030 |
|
}, |
|
{ |
|
"epoch": 2.4414773712502167, |
|
"grad_norm": 0.07424825429916382, |
|
"learning_rate": 5.922e-05, |
|
"loss": 0.0149, |
|
"step": 7040 |
|
}, |
|
{ |
|
"epoch": 2.4449453788798334, |
|
"grad_norm": 0.07107970863580704, |
|
"learning_rate": 5.902e-05, |
|
"loss": 0.0133, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 2.44841338650945, |
|
"grad_norm": 0.08729968965053558, |
|
"learning_rate": 5.8819999999999996e-05, |
|
"loss": 0.0136, |
|
"step": 7060 |
|
}, |
|
{ |
|
"epoch": 2.451881394139067, |
|
"grad_norm": 0.06300070136785507, |
|
"learning_rate": 5.862000000000001e-05, |
|
"loss": 0.01, |
|
"step": 7070 |
|
}, |
|
{ |
|
"epoch": 2.455349401768684, |
|
"grad_norm": 0.107483871281147, |
|
"learning_rate": 5.8420000000000006e-05, |
|
"loss": 0.0124, |
|
"step": 7080 |
|
}, |
|
{ |
|
"epoch": 2.458817409398301, |
|
"grad_norm": 0.06871318072080612, |
|
"learning_rate": 5.822000000000001e-05, |
|
"loss": 0.013, |
|
"step": 7090 |
|
}, |
|
{ |
|
"epoch": 2.4622854170279176, |
|
"grad_norm": 0.10566007345914841, |
|
"learning_rate": 5.802000000000001e-05, |
|
"loss": 0.0133, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 2.4657534246575343, |
|
"grad_norm": 0.04511050507426262, |
|
"learning_rate": 5.7820000000000005e-05, |
|
"loss": 0.012, |
|
"step": 7110 |
|
}, |
|
{ |
|
"epoch": 2.469221432287151, |
|
"grad_norm": 0.09614109992980957, |
|
"learning_rate": 5.762000000000001e-05, |
|
"loss": 0.0115, |
|
"step": 7120 |
|
}, |
|
{ |
|
"epoch": 2.472689439916768, |
|
"grad_norm": 0.07863055169582367, |
|
"learning_rate": 5.742000000000001e-05, |
|
"loss": 0.0141, |
|
"step": 7130 |
|
}, |
|
{ |
|
"epoch": 2.4761574475463846, |
|
"grad_norm": 0.09857816249132156, |
|
"learning_rate": 5.7220000000000004e-05, |
|
"loss": 0.0131, |
|
"step": 7140 |
|
}, |
|
{ |
|
"epoch": 2.4796254551760013, |
|
"grad_norm": 0.11649773269891739, |
|
"learning_rate": 5.7020000000000006e-05, |
|
"loss": 0.014, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 2.483093462805618, |
|
"grad_norm": 0.09917131811380386, |
|
"learning_rate": 5.682000000000001e-05, |
|
"loss": 0.0119, |
|
"step": 7160 |
|
}, |
|
{ |
|
"epoch": 2.486561470435235, |
|
"grad_norm": 0.090948186814785, |
|
"learning_rate": 5.6620000000000003e-05, |
|
"loss": 0.0131, |
|
"step": 7170 |
|
}, |
|
{ |
|
"epoch": 2.4900294780648515, |
|
"grad_norm": 0.10430017858743668, |
|
"learning_rate": 5.6420000000000005e-05, |
|
"loss": 0.0122, |
|
"step": 7180 |
|
}, |
|
{ |
|
"epoch": 2.4934974856944687, |
|
"grad_norm": 0.08023589104413986, |
|
"learning_rate": 5.622000000000001e-05, |
|
"loss": 0.0146, |
|
"step": 7190 |
|
}, |
|
{ |
|
"epoch": 2.4969654933240855, |
|
"grad_norm": 0.058440957218408585, |
|
"learning_rate": 5.602000000000001e-05, |
|
"loss": 0.0125, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 2.500433500953702, |
|
"grad_norm": 0.07056768983602524, |
|
"learning_rate": 5.5820000000000004e-05, |
|
"loss": 0.0122, |
|
"step": 7210 |
|
}, |
|
{ |
|
"epoch": 2.503901508583319, |
|
"grad_norm": 0.09822002053260803, |
|
"learning_rate": 5.5620000000000006e-05, |
|
"loss": 0.0111, |
|
"step": 7220 |
|
}, |
|
{ |
|
"epoch": 2.5073695162129357, |
|
"grad_norm": 0.04473882168531418, |
|
"learning_rate": 5.542000000000001e-05, |
|
"loss": 0.0126, |
|
"step": 7230 |
|
}, |
|
{ |
|
"epoch": 2.5108375238425524, |
|
"grad_norm": 0.07200278341770172, |
|
"learning_rate": 5.522e-05, |
|
"loss": 0.0123, |
|
"step": 7240 |
|
}, |
|
{ |
|
"epoch": 2.514305531472169, |
|
"grad_norm": 0.054528553038835526, |
|
"learning_rate": 5.5020000000000005e-05, |
|
"loss": 0.0139, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 2.517773539101786, |
|
"grad_norm": 0.06125443056225777, |
|
"learning_rate": 5.482000000000001e-05, |
|
"loss": 0.0136, |
|
"step": 7260 |
|
}, |
|
{ |
|
"epoch": 2.5212415467314027, |
|
"grad_norm": 0.0841405987739563, |
|
"learning_rate": 5.462e-05, |
|
"loss": 0.012, |
|
"step": 7270 |
|
}, |
|
{ |
|
"epoch": 2.52470955436102, |
|
"grad_norm": 0.05345413088798523, |
|
"learning_rate": 5.4420000000000004e-05, |
|
"loss": 0.0124, |
|
"step": 7280 |
|
}, |
|
{ |
|
"epoch": 2.5281775619906366, |
|
"grad_norm": 0.1266576498746872, |
|
"learning_rate": 5.4220000000000006e-05, |
|
"loss": 0.0115, |
|
"step": 7290 |
|
}, |
|
{ |
|
"epoch": 2.5316455696202533, |
|
"grad_norm": 0.06762862950563431, |
|
"learning_rate": 5.402e-05, |
|
"loss": 0.0144, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 2.53511357724987, |
|
"grad_norm": 0.06965386867523193, |
|
"learning_rate": 5.382e-05, |
|
"loss": 0.0107, |
|
"step": 7310 |
|
}, |
|
{ |
|
"epoch": 2.538581584879487, |
|
"grad_norm": 0.13501428067684174, |
|
"learning_rate": 5.3620000000000005e-05, |
|
"loss": 0.0122, |
|
"step": 7320 |
|
}, |
|
{ |
|
"epoch": 2.5420495925091036, |
|
"grad_norm": 0.0995524674654007, |
|
"learning_rate": 5.342e-05, |
|
"loss": 0.0129, |
|
"step": 7330 |
|
}, |
|
{ |
|
"epoch": 2.5455176001387203, |
|
"grad_norm": 0.10781200975179672, |
|
"learning_rate": 5.322e-05, |
|
"loss": 0.0123, |
|
"step": 7340 |
|
}, |
|
{ |
|
"epoch": 2.548985607768337, |
|
"grad_norm": 0.08666220307350159, |
|
"learning_rate": 5.3020000000000004e-05, |
|
"loss": 0.0128, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 2.552453615397954, |
|
"grad_norm": 0.0817803218960762, |
|
"learning_rate": 5.2820000000000006e-05, |
|
"loss": 0.0133, |
|
"step": 7360 |
|
}, |
|
{ |
|
"epoch": 2.5559216230275705, |
|
"grad_norm": 0.08346541970968246, |
|
"learning_rate": 5.262e-05, |
|
"loss": 0.0142, |
|
"step": 7370 |
|
}, |
|
{ |
|
"epoch": 2.5593896306571873, |
|
"grad_norm": 0.08062835782766342, |
|
"learning_rate": 5.242e-05, |
|
"loss": 0.0108, |
|
"step": 7380 |
|
}, |
|
{ |
|
"epoch": 2.562857638286804, |
|
"grad_norm": 0.07476343959569931, |
|
"learning_rate": 5.2220000000000005e-05, |
|
"loss": 0.0129, |
|
"step": 7390 |
|
}, |
|
{ |
|
"epoch": 2.5663256459164208, |
|
"grad_norm": 0.0925203412771225, |
|
"learning_rate": 5.202e-05, |
|
"loss": 0.0137, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 2.569793653546038, |
|
"grad_norm": 0.06427571177482605, |
|
"learning_rate": 5.182e-05, |
|
"loss": 0.012, |
|
"step": 7410 |
|
}, |
|
{ |
|
"epoch": 2.5732616611756547, |
|
"grad_norm": 0.08569345623254776, |
|
"learning_rate": 5.1620000000000004e-05, |
|
"loss": 0.0103, |
|
"step": 7420 |
|
}, |
|
{ |
|
"epoch": 2.5767296688052714, |
|
"grad_norm": 0.09565524011850357, |
|
"learning_rate": 5.142e-05, |
|
"loss": 0.0099, |
|
"step": 7430 |
|
}, |
|
{ |
|
"epoch": 2.580197676434888, |
|
"grad_norm": 0.09814833849668503, |
|
"learning_rate": 5.122e-05, |
|
"loss": 0.0125, |
|
"step": 7440 |
|
}, |
|
{ |
|
"epoch": 2.583665684064505, |
|
"grad_norm": 0.09652476012706757, |
|
"learning_rate": 5.102e-05, |
|
"loss": 0.0139, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 2.5871336916941217, |
|
"grad_norm": 0.09088584780693054, |
|
"learning_rate": 5.082e-05, |
|
"loss": 0.0129, |
|
"step": 7460 |
|
}, |
|
{ |
|
"epoch": 2.5906016993237384, |
|
"grad_norm": 0.0785548985004425, |
|
"learning_rate": 5.062e-05, |
|
"loss": 0.0119, |
|
"step": 7470 |
|
}, |
|
{ |
|
"epoch": 2.594069706953355, |
|
"grad_norm": 0.0708330050110817, |
|
"learning_rate": 5.042e-05, |
|
"loss": 0.0121, |
|
"step": 7480 |
|
}, |
|
{ |
|
"epoch": 2.5975377145829723, |
|
"grad_norm": 0.09456871449947357, |
|
"learning_rate": 5.0220000000000004e-05, |
|
"loss": 0.0127, |
|
"step": 7490 |
|
}, |
|
{ |
|
"epoch": 2.601005722212589, |
|
"grad_norm": 0.05804060399532318, |
|
"learning_rate": 5.002e-05, |
|
"loss": 0.012, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 2.604473729842206, |
|
"grad_norm": 0.08347133547067642, |
|
"learning_rate": 4.982e-05, |
|
"loss": 0.0121, |
|
"step": 7510 |
|
}, |
|
{ |
|
"epoch": 2.6079417374718226, |
|
"grad_norm": 0.10097778588533401, |
|
"learning_rate": 4.962e-05, |
|
"loss": 0.0116, |
|
"step": 7520 |
|
}, |
|
{ |
|
"epoch": 2.6114097451014393, |
|
"grad_norm": 0.07667971402406693, |
|
"learning_rate": 4.942e-05, |
|
"loss": 0.0144, |
|
"step": 7530 |
|
}, |
|
{ |
|
"epoch": 2.614877752731056, |
|
"grad_norm": 0.09747060388326645, |
|
"learning_rate": 4.9220000000000006e-05, |
|
"loss": 0.0115, |
|
"step": 7540 |
|
}, |
|
{ |
|
"epoch": 2.618345760360673, |
|
"grad_norm": 0.07479839771986008, |
|
"learning_rate": 4.902e-05, |
|
"loss": 0.0127, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 2.6218137679902895, |
|
"grad_norm": 0.11268935352563858, |
|
"learning_rate": 4.8820000000000004e-05, |
|
"loss": 0.0143, |
|
"step": 7560 |
|
}, |
|
{ |
|
"epoch": 2.6252817756199063, |
|
"grad_norm": 0.0846814289689064, |
|
"learning_rate": 4.8620000000000005e-05, |
|
"loss": 0.0133, |
|
"step": 7570 |
|
}, |
|
{ |
|
"epoch": 2.628749783249523, |
|
"grad_norm": 0.10621094703674316, |
|
"learning_rate": 4.842000000000001e-05, |
|
"loss": 0.013, |
|
"step": 7580 |
|
}, |
|
{ |
|
"epoch": 2.6322177908791398, |
|
"grad_norm": 0.07603222131729126, |
|
"learning_rate": 4.822e-05, |
|
"loss": 0.0124, |
|
"step": 7590 |
|
}, |
|
{ |
|
"epoch": 2.6356857985087565, |
|
"grad_norm": 0.08150995522737503, |
|
"learning_rate": 4.8020000000000004e-05, |
|
"loss": 0.013, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 2.6391538061383732, |
|
"grad_norm": 0.08962032943964005, |
|
"learning_rate": 4.7820000000000006e-05, |
|
"loss": 0.0119, |
|
"step": 7610 |
|
}, |
|
{ |
|
"epoch": 2.6426218137679904, |
|
"grad_norm": 0.07465488463640213, |
|
"learning_rate": 4.762e-05, |
|
"loss": 0.0116, |
|
"step": 7620 |
|
}, |
|
{ |
|
"epoch": 2.646089821397607, |
|
"grad_norm": 0.1040426716208458, |
|
"learning_rate": 4.742e-05, |
|
"loss": 0.0115, |
|
"step": 7630 |
|
}, |
|
{ |
|
"epoch": 2.649557829027224, |
|
"grad_norm": 0.14476630091667175, |
|
"learning_rate": 4.7220000000000005e-05, |
|
"loss": 0.0114, |
|
"step": 7640 |
|
}, |
|
{ |
|
"epoch": 2.6530258366568407, |
|
"grad_norm": 0.07727625221014023, |
|
"learning_rate": 4.702e-05, |
|
"loss": 0.0122, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 2.6564938442864574, |
|
"grad_norm": 0.0987255796790123, |
|
"learning_rate": 4.682e-05, |
|
"loss": 0.0132, |
|
"step": 7660 |
|
}, |
|
{ |
|
"epoch": 2.659961851916074, |
|
"grad_norm": 0.07335236668586731, |
|
"learning_rate": 4.6620000000000004e-05, |
|
"loss": 0.0126, |
|
"step": 7670 |
|
}, |
|
{ |
|
"epoch": 2.663429859545691, |
|
"grad_norm": 0.09524571150541306, |
|
"learning_rate": 4.642e-05, |
|
"loss": 0.0114, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 2.6668978671753076, |
|
"grad_norm": 0.11405625939369202, |
|
"learning_rate": 4.622e-05, |
|
"loss": 0.0144, |
|
"step": 7690 |
|
}, |
|
{ |
|
"epoch": 2.670365874804925, |
|
"grad_norm": 0.09738898277282715, |
|
"learning_rate": 4.602e-05, |
|
"loss": 0.0139, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 2.6738338824345416, |
|
"grad_norm": 0.08633222430944443, |
|
"learning_rate": 4.5820000000000005e-05, |
|
"loss": 0.0136, |
|
"step": 7710 |
|
}, |
|
{ |
|
"epoch": 2.6773018900641583, |
|
"grad_norm": 0.09597263485193253, |
|
"learning_rate": 4.562e-05, |
|
"loss": 0.015, |
|
"step": 7720 |
|
}, |
|
{ |
|
"epoch": 2.680769897693775, |
|
"grad_norm": 0.0945490151643753, |
|
"learning_rate": 4.542e-05, |
|
"loss": 0.012, |
|
"step": 7730 |
|
}, |
|
{ |
|
"epoch": 2.684237905323392, |
|
"grad_norm": 0.0953177884221077, |
|
"learning_rate": 4.5220000000000004e-05, |
|
"loss": 0.0121, |
|
"step": 7740 |
|
}, |
|
{ |
|
"epoch": 2.6877059129530085, |
|
"grad_norm": 0.09284798800945282, |
|
"learning_rate": 4.502e-05, |
|
"loss": 0.0128, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 2.6911739205826253, |
|
"grad_norm": 0.10929513722658157, |
|
"learning_rate": 4.482e-05, |
|
"loss": 0.0122, |
|
"step": 7760 |
|
}, |
|
{ |
|
"epoch": 2.694641928212242, |
|
"grad_norm": 0.10034655779600143, |
|
"learning_rate": 4.462e-05, |
|
"loss": 0.0137, |
|
"step": 7770 |
|
}, |
|
{ |
|
"epoch": 2.6981099358418588, |
|
"grad_norm": 0.06497249752283096, |
|
"learning_rate": 4.442e-05, |
|
"loss": 0.0116, |
|
"step": 7780 |
|
}, |
|
{ |
|
"epoch": 2.7015779434714755, |
|
"grad_norm": 0.061711184680461884, |
|
"learning_rate": 4.422e-05, |
|
"loss": 0.0118, |
|
"step": 7790 |
|
}, |
|
{ |
|
"epoch": 2.7050459511010922, |
|
"grad_norm": 0.08474565297365189, |
|
"learning_rate": 4.402e-05, |
|
"loss": 0.0113, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 2.708513958730709, |
|
"grad_norm": 0.06698320806026459, |
|
"learning_rate": 4.382e-05, |
|
"loss": 0.0116, |
|
"step": 7810 |
|
}, |
|
{ |
|
"epoch": 2.7119819663603257, |
|
"grad_norm": 0.11040494590997696, |
|
"learning_rate": 4.362e-05, |
|
"loss": 0.014, |
|
"step": 7820 |
|
}, |
|
{ |
|
"epoch": 2.715449973989943, |
|
"grad_norm": 0.077610544860363, |
|
"learning_rate": 4.342e-05, |
|
"loss": 0.0148, |
|
"step": 7830 |
|
}, |
|
{ |
|
"epoch": 2.7189179816195597, |
|
"grad_norm": 0.07504422217607498, |
|
"learning_rate": 4.3219999999999996e-05, |
|
"loss": 0.0109, |
|
"step": 7840 |
|
}, |
|
{ |
|
"epoch": 2.7223859892491764, |
|
"grad_norm": 0.083879753947258, |
|
"learning_rate": 4.3020000000000005e-05, |
|
"loss": 0.0136, |
|
"step": 7850 |
|
}, |
|
{ |
|
"epoch": 2.725853996878793, |
|
"grad_norm": 0.07689789682626724, |
|
"learning_rate": 4.282000000000001e-05, |
|
"loss": 0.013, |
|
"step": 7860 |
|
}, |
|
{ |
|
"epoch": 2.72932200450841, |
|
"grad_norm": 0.07872592657804489, |
|
"learning_rate": 4.262e-05, |
|
"loss": 0.0123, |
|
"step": 7870 |
|
}, |
|
{ |
|
"epoch": 2.7327900121380266, |
|
"grad_norm": 0.10761665552854538, |
|
"learning_rate": 4.2420000000000004e-05, |
|
"loss": 0.0141, |
|
"step": 7880 |
|
}, |
|
{ |
|
"epoch": 2.7362580197676434, |
|
"grad_norm": 0.07873303443193436, |
|
"learning_rate": 4.2220000000000006e-05, |
|
"loss": 0.0114, |
|
"step": 7890 |
|
}, |
|
{ |
|
"epoch": 2.73972602739726, |
|
"grad_norm": 0.05685030296444893, |
|
"learning_rate": 4.202e-05, |
|
"loss": 0.0102, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 2.7431940350268773, |
|
"grad_norm": 0.10359474271535873, |
|
"learning_rate": 4.182e-05, |
|
"loss": 0.0131, |
|
"step": 7910 |
|
}, |
|
{ |
|
"epoch": 2.746662042656494, |
|
"grad_norm": 0.07630922645330429, |
|
"learning_rate": 4.1620000000000005e-05, |
|
"loss": 0.0115, |
|
"step": 7920 |
|
}, |
|
{ |
|
"epoch": 2.750130050286111, |
|
"grad_norm": 0.09103643894195557, |
|
"learning_rate": 4.142000000000001e-05, |
|
"loss": 0.012, |
|
"step": 7930 |
|
}, |
|
{ |
|
"epoch": 2.7535980579157275, |
|
"grad_norm": 0.08436847478151321, |
|
"learning_rate": 4.122e-05, |
|
"loss": 0.0115, |
|
"step": 7940 |
|
}, |
|
{ |
|
"epoch": 2.7570660655453443, |
|
"grad_norm": 0.09147916734218597, |
|
"learning_rate": 4.1020000000000004e-05, |
|
"loss": 0.0164, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 2.760534073174961, |
|
"grad_norm": 0.1505836397409439, |
|
"learning_rate": 4.0820000000000006e-05, |
|
"loss": 0.0136, |
|
"step": 7960 |
|
}, |
|
{ |
|
"epoch": 2.7640020808045778, |
|
"grad_norm": 0.06953336298465729, |
|
"learning_rate": 4.062e-05, |
|
"loss": 0.0114, |
|
"step": 7970 |
|
}, |
|
{ |
|
"epoch": 2.7674700884341945, |
|
"grad_norm": 0.08808320015668869, |
|
"learning_rate": 4.042e-05, |
|
"loss": 0.0095, |
|
"step": 7980 |
|
}, |
|
{ |
|
"epoch": 2.7709380960638113, |
|
"grad_norm": 0.10280350595712662, |
|
"learning_rate": 4.0220000000000005e-05, |
|
"loss": 0.0095, |
|
"step": 7990 |
|
}, |
|
{ |
|
"epoch": 2.774406103693428, |
|
"grad_norm": 0.11173941940069199, |
|
"learning_rate": 4.002e-05, |
|
"loss": 0.0128, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 2.774406103693428, |
|
"eval_loss": 0.06569719314575195, |
|
"eval_runtime": 714.8383, |
|
"eval_samples_per_second": 12.845, |
|
"eval_steps_per_second": 1.606, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 2.7778741113230447, |
|
"grad_norm": 0.0838409885764122, |
|
"learning_rate": 3.982e-05, |
|
"loss": 0.0132, |
|
"step": 8010 |
|
}, |
|
{ |
|
"epoch": 2.7813421189526615, |
|
"grad_norm": 0.04326736554503441, |
|
"learning_rate": 3.9620000000000004e-05, |
|
"loss": 0.011, |
|
"step": 8020 |
|
}, |
|
{ |
|
"epoch": 2.7848101265822782, |
|
"grad_norm": 0.054840609431266785, |
|
"learning_rate": 3.942e-05, |
|
"loss": 0.0104, |
|
"step": 8030 |
|
}, |
|
{ |
|
"epoch": 2.7882781342118954, |
|
"grad_norm": 0.055376045405864716, |
|
"learning_rate": 3.922e-05, |
|
"loss": 0.012, |
|
"step": 8040 |
|
}, |
|
{ |
|
"epoch": 2.791746141841512, |
|
"grad_norm": 0.10542810708284378, |
|
"learning_rate": 3.902e-05, |
|
"loss": 0.0143, |
|
"step": 8050 |
|
}, |
|
{ |
|
"epoch": 2.795214149471129, |
|
"grad_norm": 0.09201773256063461, |
|
"learning_rate": 3.882e-05, |
|
"loss": 0.0131, |
|
"step": 8060 |
|
}, |
|
{ |
|
"epoch": 2.7986821571007456, |
|
"grad_norm": 0.10081171244382858, |
|
"learning_rate": 3.862e-05, |
|
"loss": 0.0132, |
|
"step": 8070 |
|
}, |
|
{ |
|
"epoch": 2.8021501647303624, |
|
"grad_norm": 0.13332189619541168, |
|
"learning_rate": 3.842e-05, |
|
"loss": 0.0119, |
|
"step": 8080 |
|
}, |
|
{ |
|
"epoch": 2.805618172359979, |
|
"grad_norm": 0.060715481638908386, |
|
"learning_rate": 3.822e-05, |
|
"loss": 0.0132, |
|
"step": 8090 |
|
}, |
|
{ |
|
"epoch": 2.809086179989596, |
|
"grad_norm": 0.08016696572303772, |
|
"learning_rate": 3.802e-05, |
|
"loss": 0.0118, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 2.8125541876192126, |
|
"grad_norm": 0.07697981595993042, |
|
"learning_rate": 3.782e-05, |
|
"loss": 0.0125, |
|
"step": 8110 |
|
}, |
|
{ |
|
"epoch": 2.81602219524883, |
|
"grad_norm": 0.10400475561618805, |
|
"learning_rate": 3.762e-05, |
|
"loss": 0.0096, |
|
"step": 8120 |
|
}, |
|
{ |
|
"epoch": 2.8194902028784465, |
|
"grad_norm": 0.09359320998191833, |
|
"learning_rate": 3.742e-05, |
|
"loss": 0.0124, |
|
"step": 8130 |
|
}, |
|
{ |
|
"epoch": 2.8229582105080633, |
|
"grad_norm": 0.08394026756286621, |
|
"learning_rate": 3.722e-05, |
|
"loss": 0.0101, |
|
"step": 8140 |
|
}, |
|
{ |
|
"epoch": 2.82642621813768, |
|
"grad_norm": 0.08613195270299911, |
|
"learning_rate": 3.702e-05, |
|
"loss": 0.0138, |
|
"step": 8150 |
|
}, |
|
{ |
|
"epoch": 2.8298942257672968, |
|
"grad_norm": 0.1419047713279724, |
|
"learning_rate": 3.682e-05, |
|
"loss": 0.0109, |
|
"step": 8160 |
|
}, |
|
{ |
|
"epoch": 2.8333622333969135, |
|
"grad_norm": 0.08159425854682922, |
|
"learning_rate": 3.6620000000000005e-05, |
|
"loss": 0.0117, |
|
"step": 8170 |
|
}, |
|
{ |
|
"epoch": 2.8368302410265303, |
|
"grad_norm": 0.07656218111515045, |
|
"learning_rate": 3.642000000000001e-05, |
|
"loss": 0.014, |
|
"step": 8180 |
|
}, |
|
{ |
|
"epoch": 2.840298248656147, |
|
"grad_norm": 0.07194588333368301, |
|
"learning_rate": 3.622e-05, |
|
"loss": 0.0097, |
|
"step": 8190 |
|
}, |
|
{ |
|
"epoch": 2.8437662562857637, |
|
"grad_norm": 0.09303127229213715, |
|
"learning_rate": 3.6020000000000004e-05, |
|
"loss": 0.0098, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 2.8472342639153805, |
|
"grad_norm": 0.07557094097137451, |
|
"learning_rate": 3.5820000000000006e-05, |
|
"loss": 0.0105, |
|
"step": 8210 |
|
}, |
|
{ |
|
"epoch": 2.8507022715449972, |
|
"grad_norm": 0.07813210785388947, |
|
"learning_rate": 3.562e-05, |
|
"loss": 0.0108, |
|
"step": 8220 |
|
}, |
|
{ |
|
"epoch": 2.854170279174614, |
|
"grad_norm": 0.1083005964756012, |
|
"learning_rate": 3.542e-05, |
|
"loss": 0.0109, |
|
"step": 8230 |
|
}, |
|
{ |
|
"epoch": 2.8576382868042307, |
|
"grad_norm": 0.11126165091991425, |
|
"learning_rate": 3.5220000000000005e-05, |
|
"loss": 0.0117, |
|
"step": 8240 |
|
}, |
|
{ |
|
"epoch": 2.861106294433848, |
|
"grad_norm": 0.07165870070457458, |
|
"learning_rate": 3.502e-05, |
|
"loss": 0.0101, |
|
"step": 8250 |
|
}, |
|
{ |
|
"epoch": 2.8645743020634646, |
|
"grad_norm": 0.11130272597074509, |
|
"learning_rate": 3.482e-05, |
|
"loss": 0.0132, |
|
"step": 8260 |
|
}, |
|
{ |
|
"epoch": 2.8680423096930814, |
|
"grad_norm": 0.1115003302693367, |
|
"learning_rate": 3.4620000000000004e-05, |
|
"loss": 0.0157, |
|
"step": 8270 |
|
}, |
|
{ |
|
"epoch": 2.871510317322698, |
|
"grad_norm": 0.11023005098104477, |
|
"learning_rate": 3.442e-05, |
|
"loss": 0.0117, |
|
"step": 8280 |
|
}, |
|
{ |
|
"epoch": 2.874978324952315, |
|
"grad_norm": 0.11513439565896988, |
|
"learning_rate": 3.422e-05, |
|
"loss": 0.0132, |
|
"step": 8290 |
|
}, |
|
{ |
|
"epoch": 2.8784463325819316, |
|
"grad_norm": 0.07337312400341034, |
|
"learning_rate": 3.402e-05, |
|
"loss": 0.011, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 2.8819143402115484, |
|
"grad_norm": 0.08071292191743851, |
|
"learning_rate": 3.3820000000000005e-05, |
|
"loss": 0.012, |
|
"step": 8310 |
|
}, |
|
{ |
|
"epoch": 2.885382347841165, |
|
"grad_norm": 0.09460859000682831, |
|
"learning_rate": 3.362e-05, |
|
"loss": 0.0118, |
|
"step": 8320 |
|
}, |
|
{ |
|
"epoch": 2.8888503554707823, |
|
"grad_norm": 0.08895740658044815, |
|
"learning_rate": 3.342e-05, |
|
"loss": 0.011, |
|
"step": 8330 |
|
}, |
|
{ |
|
"epoch": 2.892318363100399, |
|
"grad_norm": 0.08386581391096115, |
|
"learning_rate": 3.3220000000000004e-05, |
|
"loss": 0.0108, |
|
"step": 8340 |
|
}, |
|
{ |
|
"epoch": 2.8957863707300158, |
|
"grad_norm": 0.09314845502376556, |
|
"learning_rate": 3.302e-05, |
|
"loss": 0.0131, |
|
"step": 8350 |
|
}, |
|
{ |
|
"epoch": 2.8992543783596325, |
|
"grad_norm": 0.06327735632658005, |
|
"learning_rate": 3.282e-05, |
|
"loss": 0.0101, |
|
"step": 8360 |
|
}, |
|
{ |
|
"epoch": 2.9027223859892493, |
|
"grad_norm": 0.07077737152576447, |
|
"learning_rate": 3.262e-05, |
|
"loss": 0.0109, |
|
"step": 8370 |
|
}, |
|
{ |
|
"epoch": 2.906190393618866, |
|
"grad_norm": 0.07617966085672379, |
|
"learning_rate": 3.242e-05, |
|
"loss": 0.0104, |
|
"step": 8380 |
|
}, |
|
{ |
|
"epoch": 2.9096584012484827, |
|
"grad_norm": 0.10365297645330429, |
|
"learning_rate": 3.222e-05, |
|
"loss": 0.0114, |
|
"step": 8390 |
|
}, |
|
{ |
|
"epoch": 2.9131264088780995, |
|
"grad_norm": 0.07602348923683167, |
|
"learning_rate": 3.202e-05, |
|
"loss": 0.011, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 2.9165944165077162, |
|
"grad_norm": 0.10498479008674622, |
|
"learning_rate": 3.182e-05, |
|
"loss": 0.0118, |
|
"step": 8410 |
|
}, |
|
{ |
|
"epoch": 2.920062424137333, |
|
"grad_norm": 0.08163636922836304, |
|
"learning_rate": 3.162e-05, |
|
"loss": 0.0121, |
|
"step": 8420 |
|
}, |
|
{ |
|
"epoch": 2.9235304317669497, |
|
"grad_norm": 0.09529927372932434, |
|
"learning_rate": 3.142e-05, |
|
"loss": 0.0128, |
|
"step": 8430 |
|
}, |
|
{ |
|
"epoch": 2.9269984393965665, |
|
"grad_norm": 0.10269023478031158, |
|
"learning_rate": 3.122e-05, |
|
"loss": 0.0102, |
|
"step": 8440 |
|
}, |
|
{ |
|
"epoch": 2.930466447026183, |
|
"grad_norm": 0.1097848117351532, |
|
"learning_rate": 3.102e-05, |
|
"loss": 0.0128, |
|
"step": 8450 |
|
}, |
|
{ |
|
"epoch": 2.9339344546558004, |
|
"grad_norm": 0.06423291563987732, |
|
"learning_rate": 3.082e-05, |
|
"loss": 0.0086, |
|
"step": 8460 |
|
}, |
|
{ |
|
"epoch": 2.937402462285417, |
|
"grad_norm": 0.10673278570175171, |
|
"learning_rate": 3.062e-05, |
|
"loss": 0.0128, |
|
"step": 8470 |
|
}, |
|
{ |
|
"epoch": 2.940870469915034, |
|
"grad_norm": 0.08257333934307098, |
|
"learning_rate": 3.0420000000000004e-05, |
|
"loss": 0.0122, |
|
"step": 8480 |
|
}, |
|
{ |
|
"epoch": 2.9443384775446506, |
|
"grad_norm": 0.08055904507637024, |
|
"learning_rate": 3.0220000000000005e-05, |
|
"loss": 0.0115, |
|
"step": 8490 |
|
}, |
|
{ |
|
"epoch": 2.9478064851742674, |
|
"grad_norm": 0.09221120178699493, |
|
"learning_rate": 3.0020000000000004e-05, |
|
"loss": 0.0117, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 2.951274492803884, |
|
"grad_norm": 0.0831000804901123, |
|
"learning_rate": 2.9820000000000002e-05, |
|
"loss": 0.0108, |
|
"step": 8510 |
|
}, |
|
{ |
|
"epoch": 2.954742500433501, |
|
"grad_norm": 0.06930562108755112, |
|
"learning_rate": 2.9620000000000004e-05, |
|
"loss": 0.0117, |
|
"step": 8520 |
|
}, |
|
{ |
|
"epoch": 2.9582105080631176, |
|
"grad_norm": 0.07918361574411392, |
|
"learning_rate": 2.9420000000000003e-05, |
|
"loss": 0.0113, |
|
"step": 8530 |
|
}, |
|
{ |
|
"epoch": 2.9616785156927348, |
|
"grad_norm": 0.06908438354730606, |
|
"learning_rate": 2.922e-05, |
|
"loss": 0.0101, |
|
"step": 8540 |
|
}, |
|
{ |
|
"epoch": 2.9651465233223515, |
|
"grad_norm": 0.08593347668647766, |
|
"learning_rate": 2.9020000000000003e-05, |
|
"loss": 0.0127, |
|
"step": 8550 |
|
}, |
|
{ |
|
"epoch": 2.9686145309519683, |
|
"grad_norm": 0.09223376214504242, |
|
"learning_rate": 2.8820000000000002e-05, |
|
"loss": 0.0137, |
|
"step": 8560 |
|
}, |
|
{ |
|
"epoch": 2.972082538581585, |
|
"grad_norm": 0.09096557646989822, |
|
"learning_rate": 2.8620000000000004e-05, |
|
"loss": 0.0104, |
|
"step": 8570 |
|
}, |
|
{ |
|
"epoch": 2.9755505462112017, |
|
"grad_norm": 0.0837118849158287, |
|
"learning_rate": 2.8420000000000002e-05, |
|
"loss": 0.0113, |
|
"step": 8580 |
|
}, |
|
{ |
|
"epoch": 2.9790185538408185, |
|
"grad_norm": 0.060600802302360535, |
|
"learning_rate": 2.822e-05, |
|
"loss": 0.0119, |
|
"step": 8590 |
|
}, |
|
{ |
|
"epoch": 2.9824865614704352, |
|
"grad_norm": 0.09768325090408325, |
|
"learning_rate": 2.8020000000000003e-05, |
|
"loss": 0.0094, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 2.985954569100052, |
|
"grad_norm": 0.10265090316534042, |
|
"learning_rate": 2.782e-05, |
|
"loss": 0.0117, |
|
"step": 8610 |
|
}, |
|
{ |
|
"epoch": 2.9894225767296687, |
|
"grad_norm": 0.11693336814641953, |
|
"learning_rate": 2.762e-05, |
|
"loss": 0.0126, |
|
"step": 8620 |
|
}, |
|
{ |
|
"epoch": 2.9928905843592855, |
|
"grad_norm": 0.06177123263478279, |
|
"learning_rate": 2.7420000000000002e-05, |
|
"loss": 0.0112, |
|
"step": 8630 |
|
}, |
|
{ |
|
"epoch": 2.996358591988902, |
|
"grad_norm": 0.07003209739923477, |
|
"learning_rate": 2.722e-05, |
|
"loss": 0.0111, |
|
"step": 8640 |
|
}, |
|
{ |
|
"epoch": 2.999826599618519, |
|
"grad_norm": 0.09619972854852676, |
|
"learning_rate": 2.7020000000000002e-05, |
|
"loss": 0.0113, |
|
"step": 8650 |
|
}, |
|
{ |
|
"epoch": 3.003294607248136, |
|
"grad_norm": 0.04991637170314789, |
|
"learning_rate": 2.682e-05, |
|
"loss": 0.0045, |
|
"step": 8660 |
|
}, |
|
{ |
|
"epoch": 3.006762614877753, |
|
"grad_norm": 0.054620299488306046, |
|
"learning_rate": 2.662e-05, |
|
"loss": 0.0029, |
|
"step": 8670 |
|
}, |
|
{ |
|
"epoch": 3.0102306225073696, |
|
"grad_norm": 0.031793851405382156, |
|
"learning_rate": 2.642e-05, |
|
"loss": 0.003, |
|
"step": 8680 |
|
}, |
|
{ |
|
"epoch": 3.0136986301369864, |
|
"grad_norm": 0.03903364762663841, |
|
"learning_rate": 2.622e-05, |
|
"loss": 0.0024, |
|
"step": 8690 |
|
}, |
|
{ |
|
"epoch": 3.017166637766603, |
|
"grad_norm": 0.05518382415175438, |
|
"learning_rate": 2.602e-05, |
|
"loss": 0.0028, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 3.02063464539622, |
|
"grad_norm": 0.02921813167631626, |
|
"learning_rate": 2.582e-05, |
|
"loss": 0.0018, |
|
"step": 8710 |
|
}, |
|
{ |
|
"epoch": 3.0241026530258366, |
|
"grad_norm": 0.08628415316343307, |
|
"learning_rate": 2.562e-05, |
|
"loss": 0.0027, |
|
"step": 8720 |
|
}, |
|
{ |
|
"epoch": 3.0275706606554533, |
|
"grad_norm": 0.034173641353845596, |
|
"learning_rate": 2.542e-05, |
|
"loss": 0.002, |
|
"step": 8730 |
|
}, |
|
{ |
|
"epoch": 3.03103866828507, |
|
"grad_norm": 0.03343451768159866, |
|
"learning_rate": 2.522e-05, |
|
"loss": 0.0028, |
|
"step": 8740 |
|
}, |
|
{ |
|
"epoch": 3.034506675914687, |
|
"grad_norm": 0.0506439134478569, |
|
"learning_rate": 2.5019999999999998e-05, |
|
"loss": 0.002, |
|
"step": 8750 |
|
}, |
|
{ |
|
"epoch": 3.037974683544304, |
|
"grad_norm": 0.011204255744814873, |
|
"learning_rate": 2.4820000000000003e-05, |
|
"loss": 0.0026, |
|
"step": 8760 |
|
}, |
|
{ |
|
"epoch": 3.0414426911739207, |
|
"grad_norm": 0.05876510962843895, |
|
"learning_rate": 2.462e-05, |
|
"loss": 0.0023, |
|
"step": 8770 |
|
}, |
|
{ |
|
"epoch": 3.0449106988035375, |
|
"grad_norm": 0.017792142927646637, |
|
"learning_rate": 2.442e-05, |
|
"loss": 0.0014, |
|
"step": 8780 |
|
}, |
|
{ |
|
"epoch": 3.0483787064331542, |
|
"grad_norm": 0.0634002834558487, |
|
"learning_rate": 2.4220000000000002e-05, |
|
"loss": 0.002, |
|
"step": 8790 |
|
}, |
|
{ |
|
"epoch": 3.051846714062771, |
|
"grad_norm": 0.10669626295566559, |
|
"learning_rate": 2.402e-05, |
|
"loss": 0.002, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 3.0553147216923877, |
|
"grad_norm": 0.02126486971974373, |
|
"learning_rate": 2.3820000000000002e-05, |
|
"loss": 0.002, |
|
"step": 8810 |
|
}, |
|
{ |
|
"epoch": 3.0587827293220045, |
|
"grad_norm": 0.13548636436462402, |
|
"learning_rate": 2.362e-05, |
|
"loss": 0.0034, |
|
"step": 8820 |
|
}, |
|
{ |
|
"epoch": 3.062250736951621, |
|
"grad_norm": 0.04129517078399658, |
|
"learning_rate": 2.342e-05, |
|
"loss": 0.0032, |
|
"step": 8830 |
|
}, |
|
{ |
|
"epoch": 3.065718744581238, |
|
"grad_norm": 0.0712270438671112, |
|
"learning_rate": 2.322e-05, |
|
"loss": 0.0025, |
|
"step": 8840 |
|
}, |
|
{ |
|
"epoch": 3.0691867522108547, |
|
"grad_norm": 0.03855830430984497, |
|
"learning_rate": 2.302e-05, |
|
"loss": 0.0022, |
|
"step": 8850 |
|
}, |
|
{ |
|
"epoch": 3.0726547598404714, |
|
"grad_norm": 0.06443187594413757, |
|
"learning_rate": 2.282e-05, |
|
"loss": 0.0028, |
|
"step": 8860 |
|
}, |
|
{ |
|
"epoch": 3.0761227674700886, |
|
"grad_norm": 0.09318159520626068, |
|
"learning_rate": 2.2620000000000004e-05, |
|
"loss": 0.0021, |
|
"step": 8870 |
|
}, |
|
{ |
|
"epoch": 3.0795907750997054, |
|
"grad_norm": 0.06871949136257172, |
|
"learning_rate": 2.2420000000000002e-05, |
|
"loss": 0.0033, |
|
"step": 8880 |
|
}, |
|
{ |
|
"epoch": 3.083058782729322, |
|
"grad_norm": 0.031738586723804474, |
|
"learning_rate": 2.222e-05, |
|
"loss": 0.0016, |
|
"step": 8890 |
|
}, |
|
{ |
|
"epoch": 3.086526790358939, |
|
"grad_norm": 0.028125595301389694, |
|
"learning_rate": 2.2020000000000003e-05, |
|
"loss": 0.0026, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 3.0899947979885556, |
|
"grad_norm": 0.046880729496479034, |
|
"learning_rate": 2.182e-05, |
|
"loss": 0.0021, |
|
"step": 8910 |
|
}, |
|
{ |
|
"epoch": 3.0934628056181723, |
|
"grad_norm": 0.06030425429344177, |
|
"learning_rate": 2.162e-05, |
|
"loss": 0.0029, |
|
"step": 8920 |
|
}, |
|
{ |
|
"epoch": 3.096930813247789, |
|
"grad_norm": 0.028561096638441086, |
|
"learning_rate": 2.142e-05, |
|
"loss": 0.0023, |
|
"step": 8930 |
|
}, |
|
{ |
|
"epoch": 3.100398820877406, |
|
"grad_norm": 0.04128929600119591, |
|
"learning_rate": 2.122e-05, |
|
"loss": 0.0022, |
|
"step": 8940 |
|
}, |
|
{ |
|
"epoch": 3.1038668285070226, |
|
"grad_norm": 0.04722205549478531, |
|
"learning_rate": 2.1020000000000002e-05, |
|
"loss": 0.0027, |
|
"step": 8950 |
|
}, |
|
{ |
|
"epoch": 3.1073348361366393, |
|
"grad_norm": 0.057234570384025574, |
|
"learning_rate": 2.082e-05, |
|
"loss": 0.0016, |
|
"step": 8960 |
|
}, |
|
{ |
|
"epoch": 3.1108028437662565, |
|
"grad_norm": 0.010785787366330624, |
|
"learning_rate": 2.062e-05, |
|
"loss": 0.0016, |
|
"step": 8970 |
|
}, |
|
{ |
|
"epoch": 3.1142708513958732, |
|
"grad_norm": 0.08110074698925018, |
|
"learning_rate": 2.042e-05, |
|
"loss": 0.0022, |
|
"step": 8980 |
|
}, |
|
{ |
|
"epoch": 3.11773885902549, |
|
"grad_norm": 0.04953372851014137, |
|
"learning_rate": 2.022e-05, |
|
"loss": 0.0022, |
|
"step": 8990 |
|
}, |
|
{ |
|
"epoch": 3.1212068666551067, |
|
"grad_norm": 0.022535376250743866, |
|
"learning_rate": 2.002e-05, |
|
"loss": 0.0015, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 3.1212068666551067, |
|
"eval_loss": 0.0743083506822586, |
|
"eval_runtime": 714.2967, |
|
"eval_samples_per_second": 12.855, |
|
"eval_steps_per_second": 1.607, |
|
"step": 9000 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 10000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 1000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 6.85989473816475e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|