|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0, |
|
"eval_steps": 5000, |
|
"global_step": 2014, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.009930486593843098, |
|
"grad_norm": Infinity, |
|
"learning_rate": 0.0, |
|
"loss": 14.711, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.019860973187686197, |
|
"grad_norm": 4983.64990234375, |
|
"learning_rate": 2.0000000000000002e-07, |
|
"loss": 14.5984, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.029791459781529295, |
|
"grad_norm": 19263.828125, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 14.0403, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.039721946375372394, |
|
"grad_norm": 33094.21484375, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 12.7662, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04965243296921549, |
|
"grad_norm": 92821.109375, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 11.4273, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05958291956305859, |
|
"grad_norm": 8011.658203125, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 10.0555, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.06951340615690169, |
|
"grad_norm": 6365.61962890625, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 9.0921, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.07944389275074479, |
|
"grad_norm": 398.2843933105469, |
|
"learning_rate": 1.4000000000000001e-06, |
|
"loss": 7.661, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.08937437934458789, |
|
"grad_norm": 158.12139892578125, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 6.3558, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.09930486593843098, |
|
"grad_norm": 103.36075592041016, |
|
"learning_rate": 1.8000000000000001e-06, |
|
"loss": 5.9047, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.10923535253227408, |
|
"grad_norm": 565.8269653320312, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 5.6256, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.11916583912611718, |
|
"grad_norm": 414.0867919921875, |
|
"learning_rate": 2.2e-06, |
|
"loss": 5.3668, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.12909632571996027, |
|
"grad_norm": 153.89100646972656, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 5.0483, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.13902681231380337, |
|
"grad_norm": 149.46444702148438, |
|
"learning_rate": 2.6e-06, |
|
"loss": 4.8008, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.14895729890764647, |
|
"grad_norm": 259.7363586425781, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 4.6652, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.15888778550148958, |
|
"grad_norm": 575.7271728515625, |
|
"learning_rate": 3e-06, |
|
"loss": 4.4912, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.16881827209533268, |
|
"grad_norm": 316.88525390625, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 4.2479, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.17874875868917578, |
|
"grad_norm": 310.1224670410156, |
|
"learning_rate": 3.4000000000000005e-06, |
|
"loss": 4.0592, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.18867924528301888, |
|
"grad_norm": 366.346435546875, |
|
"learning_rate": 3.6000000000000003e-06, |
|
"loss": 3.8585, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.19860973187686196, |
|
"grad_norm": 242.16476440429688, |
|
"learning_rate": 3.8000000000000005e-06, |
|
"loss": 3.6879, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.20854021847070506, |
|
"grad_norm": 445.31646728515625, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 3.5381, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.21847070506454816, |
|
"grad_norm": 345.4029235839844, |
|
"learning_rate": 4.2000000000000004e-06, |
|
"loss": 3.272, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.22840119165839126, |
|
"grad_norm": 243.06161499023438, |
|
"learning_rate": 4.4e-06, |
|
"loss": 3.0486, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.23833167825223436, |
|
"grad_norm": 237.27239990234375, |
|
"learning_rate": 4.600000000000001e-06, |
|
"loss": 2.7942, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.24826216484607747, |
|
"grad_norm": 209.28985595703125, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 2.5742, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.25819265143992054, |
|
"grad_norm": 458.2777099609375, |
|
"learning_rate": 5e-06, |
|
"loss": 2.5509, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.26812313803376364, |
|
"grad_norm": 431.66522216796875, |
|
"learning_rate": 5.2e-06, |
|
"loss": 2.4819, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.27805362462760674, |
|
"grad_norm": 314.18792724609375, |
|
"learning_rate": 5.400000000000001e-06, |
|
"loss": 2.2044, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.28798411122144985, |
|
"grad_norm": 366.9264831542969, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 1.9759, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.29791459781529295, |
|
"grad_norm": 148.99159240722656, |
|
"learning_rate": 5.8e-06, |
|
"loss": 1.7, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.30784508440913605, |
|
"grad_norm": 324.4754638671875, |
|
"learning_rate": 6e-06, |
|
"loss": 1.5349, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.31777557100297915, |
|
"grad_norm": 205.2728729248047, |
|
"learning_rate": 6.200000000000001e-06, |
|
"loss": 1.4228, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.32770605759682225, |
|
"grad_norm": 312.1114807128906, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 1.3114, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.33763654419066536, |
|
"grad_norm": 274.58221435546875, |
|
"learning_rate": 6.600000000000001e-06, |
|
"loss": 1.2638, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.34756703078450846, |
|
"grad_norm": 239.2190399169922, |
|
"learning_rate": 6.800000000000001e-06, |
|
"loss": 1.031, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.35749751737835156, |
|
"grad_norm": 208.7018585205078, |
|
"learning_rate": 7e-06, |
|
"loss": 0.92, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.36742800397219466, |
|
"grad_norm": 237.0693817138672, |
|
"learning_rate": 7.2000000000000005e-06, |
|
"loss": 1.665, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.37735849056603776, |
|
"grad_norm": 208.263671875, |
|
"learning_rate": 7.4e-06, |
|
"loss": 0.7744, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.3872889771598808, |
|
"grad_norm": 184.25924682617188, |
|
"learning_rate": 7.600000000000001e-06, |
|
"loss": 0.809, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.3972194637537239, |
|
"grad_norm": 146.69178771972656, |
|
"learning_rate": 7.800000000000002e-06, |
|
"loss": 0.7246, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.407149950347567, |
|
"grad_norm": 207.02853393554688, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.7616, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.4170804369414101, |
|
"grad_norm": 146.81492614746094, |
|
"learning_rate": 8.2e-06, |
|
"loss": 0.5971, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.4270109235352532, |
|
"grad_norm": 135.36524963378906, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 1.2755, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.4369414101290963, |
|
"grad_norm": 298.3485107421875, |
|
"learning_rate": 8.6e-06, |
|
"loss": 1.6886, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.4468718967229394, |
|
"grad_norm": 240.9180908203125, |
|
"learning_rate": 8.8e-06, |
|
"loss": 0.8591, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.4568023833167825, |
|
"grad_norm": 152.96194458007812, |
|
"learning_rate": 9e-06, |
|
"loss": 0.7211, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.4667328699106256, |
|
"grad_norm": 5467.83447265625, |
|
"learning_rate": 9.180000000000002e-06, |
|
"loss": 1.4453, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.4766633565044687, |
|
"grad_norm": 249.68312072753906, |
|
"learning_rate": 9.38e-06, |
|
"loss": 0.718, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.48659384309831183, |
|
"grad_norm": 75.28675842285156, |
|
"learning_rate": 9.58e-06, |
|
"loss": 0.5454, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.49652432969215493, |
|
"grad_norm": 98.99024963378906, |
|
"learning_rate": 9.780000000000001e-06, |
|
"loss": 0.5422, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.506454816285998, |
|
"grad_norm": 97.877197265625, |
|
"learning_rate": 9.980000000000001e-06, |
|
"loss": 0.5424, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.5163853028798411, |
|
"grad_norm": 58.41091537475586, |
|
"learning_rate": 9.940554821664465e-06, |
|
"loss": 0.4753, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.5263157894736842, |
|
"grad_norm": 50.91957092285156, |
|
"learning_rate": 9.87450462351387e-06, |
|
"loss": 0.4657, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.5362462760675273, |
|
"grad_norm": 66.574462890625, |
|
"learning_rate": 9.808454425363277e-06, |
|
"loss": 0.378, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.5461767626613704, |
|
"grad_norm": 108.19845581054688, |
|
"learning_rate": 9.742404227212683e-06, |
|
"loss": 0.4171, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.5561072492552135, |
|
"grad_norm": 44.33118438720703, |
|
"learning_rate": 9.676354029062089e-06, |
|
"loss": 0.4289, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.5660377358490566, |
|
"grad_norm": 61.063758850097656, |
|
"learning_rate": 9.610303830911495e-06, |
|
"loss": 0.3511, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.5759682224428997, |
|
"grad_norm": 91.71956634521484, |
|
"learning_rate": 9.544253632760899e-06, |
|
"loss": 0.4377, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.5858987090367428, |
|
"grad_norm": 46.6739501953125, |
|
"learning_rate": 9.478203434610305e-06, |
|
"loss": 0.351, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.5958291956305859, |
|
"grad_norm": 38.2111701965332, |
|
"learning_rate": 9.41215323645971e-06, |
|
"loss": 0.3556, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.605759682224429, |
|
"grad_norm": 61.8499755859375, |
|
"learning_rate": 9.346103038309115e-06, |
|
"loss": 0.4027, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.6156901688182721, |
|
"grad_norm": 219.7929229736328, |
|
"learning_rate": 9.280052840158521e-06, |
|
"loss": 0.4932, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.6256206554121152, |
|
"grad_norm": 52.08991241455078, |
|
"learning_rate": 9.214002642007927e-06, |
|
"loss": 0.4588, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.6355511420059583, |
|
"grad_norm": 28.5477294921875, |
|
"learning_rate": 9.147952443857333e-06, |
|
"loss": 0.3302, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.6454816285998014, |
|
"grad_norm": 24.50048065185547, |
|
"learning_rate": 9.081902245706737e-06, |
|
"loss": 0.3144, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.6554121151936445, |
|
"grad_norm": 59.86844253540039, |
|
"learning_rate": 9.015852047556145e-06, |
|
"loss": 0.3919, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.6653426017874876, |
|
"grad_norm": 22.47603988647461, |
|
"learning_rate": 8.949801849405549e-06, |
|
"loss": 0.37, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.6752730883813307, |
|
"grad_norm": 21.400466918945312, |
|
"learning_rate": 8.883751651254955e-06, |
|
"loss": 0.3293, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.6852035749751738, |
|
"grad_norm": 16.669824600219727, |
|
"learning_rate": 8.81770145310436e-06, |
|
"loss": 0.2647, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.6951340615690169, |
|
"grad_norm": 23.852384567260742, |
|
"learning_rate": 8.751651254953765e-06, |
|
"loss": 0.3246, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.70506454816286, |
|
"grad_norm": 38.244258880615234, |
|
"learning_rate": 8.685601056803171e-06, |
|
"loss": 0.385, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.7149950347567031, |
|
"grad_norm": 29.78006362915039, |
|
"learning_rate": 8.619550858652577e-06, |
|
"loss": 0.3071, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.7249255213505462, |
|
"grad_norm": 24.519458770751953, |
|
"learning_rate": 8.553500660501983e-06, |
|
"loss": 0.3429, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.7348560079443893, |
|
"grad_norm": 25.153671264648438, |
|
"learning_rate": 8.487450462351387e-06, |
|
"loss": 0.3843, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.7447864945382324, |
|
"grad_norm": 27.24303436279297, |
|
"learning_rate": 8.421400264200793e-06, |
|
"loss": 0.369, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.7547169811320755, |
|
"grad_norm": 14.806120872497559, |
|
"learning_rate": 8.355350066050199e-06, |
|
"loss": 0.301, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.7646474677259185, |
|
"grad_norm": 22.724933624267578, |
|
"learning_rate": 8.289299867899603e-06, |
|
"loss": 0.3101, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.7745779543197616, |
|
"grad_norm": 15.89284610748291, |
|
"learning_rate": 8.22324966974901e-06, |
|
"loss": 0.3665, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.7845084409136047, |
|
"grad_norm": 87.12956237792969, |
|
"learning_rate": 8.157199471598415e-06, |
|
"loss": 0.4025, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.7944389275074478, |
|
"grad_norm": 27.03380012512207, |
|
"learning_rate": 8.091149273447821e-06, |
|
"loss": 0.2919, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.8043694141012909, |
|
"grad_norm": 27.622365951538086, |
|
"learning_rate": 8.025099075297227e-06, |
|
"loss": 0.3441, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.814299900695134, |
|
"grad_norm": 32.42015838623047, |
|
"learning_rate": 7.959048877146633e-06, |
|
"loss": 0.3307, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.8242303872889771, |
|
"grad_norm": 16.96957015991211, |
|
"learning_rate": 7.892998678996037e-06, |
|
"loss": 0.3224, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.8341608738828202, |
|
"grad_norm": 12.882108688354492, |
|
"learning_rate": 7.826948480845443e-06, |
|
"loss": 0.2992, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.8440913604766633, |
|
"grad_norm": 17.12813949584961, |
|
"learning_rate": 7.760898282694849e-06, |
|
"loss": 0.2783, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.8540218470705064, |
|
"grad_norm": 57.04343795776367, |
|
"learning_rate": 7.694848084544253e-06, |
|
"loss": 0.4445, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.8639523336643495, |
|
"grad_norm": 17.658185958862305, |
|
"learning_rate": 7.628797886393659e-06, |
|
"loss": 0.322, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.8738828202581926, |
|
"grad_norm": 37.49673080444336, |
|
"learning_rate": 7.562747688243065e-06, |
|
"loss": 0.5781, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.8838133068520357, |
|
"grad_norm": 22.256288528442383, |
|
"learning_rate": 7.49669749009247e-06, |
|
"loss": 0.3146, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.8937437934458788, |
|
"grad_norm": 28.0767765045166, |
|
"learning_rate": 7.430647291941876e-06, |
|
"loss": 0.2795, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.9036742800397219, |
|
"grad_norm": 26.734256744384766, |
|
"learning_rate": 7.364597093791282e-06, |
|
"loss": 0.286, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.913604766633565, |
|
"grad_norm": 16.944440841674805, |
|
"learning_rate": 7.298546895640688e-06, |
|
"loss": 0.2396, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.9235352532274081, |
|
"grad_norm": 11.377096176147461, |
|
"learning_rate": 7.232496697490093e-06, |
|
"loss": 0.3234, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.9334657398212513, |
|
"grad_norm": 21.92275047302246, |
|
"learning_rate": 7.166446499339499e-06, |
|
"loss": 0.3054, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.9433962264150944, |
|
"grad_norm": 11.469223976135254, |
|
"learning_rate": 7.100396301188904e-06, |
|
"loss": 0.2805, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.9533267130089375, |
|
"grad_norm": 21.188453674316406, |
|
"learning_rate": 7.034346103038309e-06, |
|
"loss": 0.3169, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.9632571996027806, |
|
"grad_norm": 14.333311080932617, |
|
"learning_rate": 6.968295904887715e-06, |
|
"loss": 0.2381, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.9731876861966237, |
|
"grad_norm": 10.782018661499023, |
|
"learning_rate": 6.90224570673712e-06, |
|
"loss": 0.2914, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.9831181727904668, |
|
"grad_norm": 12.117927551269531, |
|
"learning_rate": 6.836195508586526e-06, |
|
"loss": 0.3186, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.9930486593843099, |
|
"grad_norm": 15.109565734863281, |
|
"learning_rate": 6.7701453104359315e-06, |
|
"loss": 0.2323, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.002979145978153, |
|
"grad_norm": 27.17137336730957, |
|
"learning_rate": 6.704095112285337e-06, |
|
"loss": 0.2664, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.012909632571996, |
|
"grad_norm": 17.559146881103516, |
|
"learning_rate": 6.6380449141347425e-06, |
|
"loss": 0.3117, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.0228401191658392, |
|
"grad_norm": 18.243568420410156, |
|
"learning_rate": 6.571994715984148e-06, |
|
"loss": 0.2746, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.0327706057596822, |
|
"grad_norm": 19.41541862487793, |
|
"learning_rate": 6.505944517833554e-06, |
|
"loss": 0.2757, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.0427010923535254, |
|
"grad_norm": 23.584136962890625, |
|
"learning_rate": 6.43989431968296e-06, |
|
"loss": 0.3311, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.0526315789473684, |
|
"grad_norm": 8.158661842346191, |
|
"learning_rate": 6.3738441215323654e-06, |
|
"loss": 0.2664, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.0625620655412116, |
|
"grad_norm": 14.265728950500488, |
|
"learning_rate": 6.3077939233817705e-06, |
|
"loss": 0.3737, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.0724925521350546, |
|
"grad_norm": 45.05762481689453, |
|
"learning_rate": 6.2417437252311765e-06, |
|
"loss": 0.2805, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.0824230387288978, |
|
"grad_norm": 26.889284133911133, |
|
"learning_rate": 6.1756935270805816e-06, |
|
"loss": 0.3286, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.0923535253227408, |
|
"grad_norm": 14.202479362487793, |
|
"learning_rate": 6.1096433289299875e-06, |
|
"loss": 0.2859, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.102284011916584, |
|
"grad_norm": 14.509482383728027, |
|
"learning_rate": 6.043593130779393e-06, |
|
"loss": 0.2255, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.112214498510427, |
|
"grad_norm": 17.728469848632812, |
|
"learning_rate": 5.9775429326287985e-06, |
|
"loss": 0.2592, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.1221449851042702, |
|
"grad_norm": 15.352997779846191, |
|
"learning_rate": 5.911492734478204e-06, |
|
"loss": 0.2538, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.1320754716981132, |
|
"grad_norm": 12.579967498779297, |
|
"learning_rate": 5.845442536327609e-06, |
|
"loss": 0.2803, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.1420059582919564, |
|
"grad_norm": 9.809431076049805, |
|
"learning_rate": 5.779392338177015e-06, |
|
"loss": 0.2874, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.1519364448857994, |
|
"grad_norm": 18.45332145690918, |
|
"learning_rate": 5.7133421400264215e-06, |
|
"loss": 0.3388, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.1618669314796426, |
|
"grad_norm": 15.697205543518066, |
|
"learning_rate": 5.6472919418758266e-06, |
|
"loss": 0.6377, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.1717974180734856, |
|
"grad_norm": 245.89466857910156, |
|
"learning_rate": 5.581241743725232e-06, |
|
"loss": 0.4898, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.1817279046673286, |
|
"grad_norm": 13.679891586303711, |
|
"learning_rate": 5.515191545574638e-06, |
|
"loss": 0.3288, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.1916583912611718, |
|
"grad_norm": 16.556507110595703, |
|
"learning_rate": 5.449141347424043e-06, |
|
"loss": 0.3122, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.201588877855015, |
|
"grad_norm": 14.205398559570312, |
|
"learning_rate": 5.383091149273449e-06, |
|
"loss": 0.2981, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.211519364448858, |
|
"grad_norm": 12.833841323852539, |
|
"learning_rate": 5.317040951122854e-06, |
|
"loss": 0.3071, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.221449851042701, |
|
"grad_norm": 23.980009078979492, |
|
"learning_rate": 5.250990752972259e-06, |
|
"loss": 0.2632, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.2313803376365442, |
|
"grad_norm": 17.329069137573242, |
|
"learning_rate": 5.184940554821665e-06, |
|
"loss": 0.3039, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.2413108242303874, |
|
"grad_norm": 24.93738555908203, |
|
"learning_rate": 5.11889035667107e-06, |
|
"loss": 0.3467, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.2512413108242304, |
|
"grad_norm": 14.432474136352539, |
|
"learning_rate": 5.052840158520476e-06, |
|
"loss": 0.2253, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.2611717974180734, |
|
"grad_norm": 21.262495040893555, |
|
"learning_rate": 4.986789960369882e-06, |
|
"loss": 0.3464, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.2711022840119166, |
|
"grad_norm": 16.39492416381836, |
|
"learning_rate": 4.920739762219287e-06, |
|
"loss": 0.2812, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.2810327706057598, |
|
"grad_norm": 9.893121719360352, |
|
"learning_rate": 4.854689564068693e-06, |
|
"loss": 0.302, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.2909632571996028, |
|
"grad_norm": 7.951263427734375, |
|
"learning_rate": 4.788639365918098e-06, |
|
"loss": 0.4157, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.3008937437934458, |
|
"grad_norm": 8.177956581115723, |
|
"learning_rate": 4.722589167767504e-06, |
|
"loss": 0.206, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.310824230387289, |
|
"grad_norm": 6.12276029586792, |
|
"learning_rate": 4.65653896961691e-06, |
|
"loss": 0.2143, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.320754716981132, |
|
"grad_norm": 16.6575984954834, |
|
"learning_rate": 4.590488771466315e-06, |
|
"loss": 0.2677, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.3306852035749752, |
|
"grad_norm": 8.128823280334473, |
|
"learning_rate": 4.52443857331572e-06, |
|
"loss": 0.2279, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.3406156901688182, |
|
"grad_norm": 21.915355682373047, |
|
"learning_rate": 4.458388375165126e-06, |
|
"loss": 0.2724, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.3505461767626614, |
|
"grad_norm": 18.645946502685547, |
|
"learning_rate": 4.392338177014531e-06, |
|
"loss": 0.2307, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.3604766633565044, |
|
"grad_norm": 12.229573249816895, |
|
"learning_rate": 4.326287978863937e-06, |
|
"loss": 0.3043, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.3704071499503476, |
|
"grad_norm": 21.465803146362305, |
|
"learning_rate": 4.260237780713343e-06, |
|
"loss": 0.3025, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.3803376365441906, |
|
"grad_norm": 12.59046459197998, |
|
"learning_rate": 4.194187582562748e-06, |
|
"loss": 0.3685, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.3902681231380338, |
|
"grad_norm": 20.229293823242188, |
|
"learning_rate": 4.128137384412154e-06, |
|
"loss": 0.2627, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.4001986097318768, |
|
"grad_norm": 5.609831809997559, |
|
"learning_rate": 4.062087186261559e-06, |
|
"loss": 0.3011, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.41012909632572, |
|
"grad_norm": 13.317000389099121, |
|
"learning_rate": 3.996036988110964e-06, |
|
"loss": 0.226, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.420059582919563, |
|
"grad_norm": 10.011391639709473, |
|
"learning_rate": 3.92998678996037e-06, |
|
"loss": 0.2295, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.4299900695134062, |
|
"grad_norm": 14.472879409790039, |
|
"learning_rate": 3.863936591809776e-06, |
|
"loss": 0.2577, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.4399205561072492, |
|
"grad_norm": 7.8662614822387695, |
|
"learning_rate": 3.7978863936591815e-06, |
|
"loss": 0.3256, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.4498510427010922, |
|
"grad_norm": 23.64836883544922, |
|
"learning_rate": 3.731836195508587e-06, |
|
"loss": 0.336, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.4597815292949354, |
|
"grad_norm": 4.431447982788086, |
|
"learning_rate": 3.665785997357992e-06, |
|
"loss": 0.3114, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.4697120158887786, |
|
"grad_norm": 37.42151641845703, |
|
"learning_rate": 3.5997357992073977e-06, |
|
"loss": 0.358, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.4796425024826216, |
|
"grad_norm": 9.819945335388184, |
|
"learning_rate": 3.533685601056803e-06, |
|
"loss": 0.2852, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.4895729890764646, |
|
"grad_norm": 7.768363952636719, |
|
"learning_rate": 3.467635402906209e-06, |
|
"loss": 0.2703, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.4995034756703078, |
|
"grad_norm": 22.80790901184082, |
|
"learning_rate": 3.4015852047556147e-06, |
|
"loss": 0.2419, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.509433962264151, |
|
"grad_norm": 14.61506175994873, |
|
"learning_rate": 3.33553500660502e-06, |
|
"loss": 0.2263, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.519364448857994, |
|
"grad_norm": 17.9895076751709, |
|
"learning_rate": 3.2694848084544257e-06, |
|
"loss": 0.3096, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.529294935451837, |
|
"grad_norm": 17.43570899963379, |
|
"learning_rate": 3.2034346103038312e-06, |
|
"loss": 0.2388, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.5392254220456802, |
|
"grad_norm": 24.771135330200195, |
|
"learning_rate": 3.1373844121532363e-06, |
|
"loss": 0.2859, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.5491559086395235, |
|
"grad_norm": 14.774213790893555, |
|
"learning_rate": 3.0713342140026427e-06, |
|
"loss": 0.2478, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.5590863952333665, |
|
"grad_norm": 9.036184310913086, |
|
"learning_rate": 3.0052840158520478e-06, |
|
"loss": 0.2538, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.5690168818272094, |
|
"grad_norm": 15.238768577575684, |
|
"learning_rate": 2.9392338177014533e-06, |
|
"loss": 0.1995, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.5789473684210527, |
|
"grad_norm": 7.666260242462158, |
|
"learning_rate": 2.873183619550859e-06, |
|
"loss": 0.2648, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.5888778550148959, |
|
"grad_norm": 14.299159049987793, |
|
"learning_rate": 2.8071334214002643e-06, |
|
"loss": 0.2817, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.5988083416087389, |
|
"grad_norm": 8.902689933776855, |
|
"learning_rate": 2.74108322324967e-06, |
|
"loss": 0.6261, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.6087388282025818, |
|
"grad_norm": 11.452303886413574, |
|
"learning_rate": 2.6750330250990754e-06, |
|
"loss": 0.2743, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.618669314796425, |
|
"grad_norm": 8.431117057800293, |
|
"learning_rate": 2.6089828269484813e-06, |
|
"loss": 0.2472, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.6285998013902683, |
|
"grad_norm": 13.621753692626953, |
|
"learning_rate": 2.542932628797887e-06, |
|
"loss": 0.3552, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.6385302879841113, |
|
"grad_norm": 28.211891174316406, |
|
"learning_rate": 2.4768824306472924e-06, |
|
"loss": 0.2211, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.6484607745779543, |
|
"grad_norm": 9.634784698486328, |
|
"learning_rate": 2.4108322324966975e-06, |
|
"loss": 0.7658, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.6583912611717975, |
|
"grad_norm": 10.460068702697754, |
|
"learning_rate": 2.3447820343461034e-06, |
|
"loss": 0.2257, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.6683217477656405, |
|
"grad_norm": 8.11988639831543, |
|
"learning_rate": 2.278731836195509e-06, |
|
"loss": 0.2626, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.6782522343594835, |
|
"grad_norm": 12.30396842956543, |
|
"learning_rate": 2.2126816380449144e-06, |
|
"loss": 0.2174, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.6881827209533267, |
|
"grad_norm": 45.93663787841797, |
|
"learning_rate": 2.14663143989432e-06, |
|
"loss": 0.3282, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.6981132075471699, |
|
"grad_norm": 15.4673490524292, |
|
"learning_rate": 2.0805812417437255e-06, |
|
"loss": 0.2126, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.7080436941410129, |
|
"grad_norm": 13.173709869384766, |
|
"learning_rate": 2.014531043593131e-06, |
|
"loss": 0.2311, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.7179741807348559, |
|
"grad_norm": 17.14373779296875, |
|
"learning_rate": 1.9484808454425365e-06, |
|
"loss": 0.2538, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.727904667328699, |
|
"grad_norm": 13.05432415008545, |
|
"learning_rate": 1.882430647291942e-06, |
|
"loss": 0.3432, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.7378351539225423, |
|
"grad_norm": 6.768345832824707, |
|
"learning_rate": 1.8163804491413476e-06, |
|
"loss": 0.2487, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.7477656405163853, |
|
"grad_norm": 7.953887939453125, |
|
"learning_rate": 1.750330250990753e-06, |
|
"loss": 0.2478, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.7576961271102283, |
|
"grad_norm": 10.380583763122559, |
|
"learning_rate": 1.6842800528401588e-06, |
|
"loss": 0.2797, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.7676266137040715, |
|
"grad_norm": 18.359220504760742, |
|
"learning_rate": 1.6182298546895641e-06, |
|
"loss": 0.3262, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.7775571002979147, |
|
"grad_norm": 39.89724349975586, |
|
"learning_rate": 1.5521796565389696e-06, |
|
"loss": 0.2176, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.7874875868917577, |
|
"grad_norm": 9.103048324584961, |
|
"learning_rate": 1.4861294583883754e-06, |
|
"loss": 0.2202, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.7974180734856007, |
|
"grad_norm": 22.96454429626465, |
|
"learning_rate": 1.420079260237781e-06, |
|
"loss": 0.2874, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.8073485600794439, |
|
"grad_norm": 16.194610595703125, |
|
"learning_rate": 1.3540290620871862e-06, |
|
"loss": 0.168, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.817279046673287, |
|
"grad_norm": 7.695756435394287, |
|
"learning_rate": 1.287978863936592e-06, |
|
"loss": 0.2298, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.82720953326713, |
|
"grad_norm": 13.192455291748047, |
|
"learning_rate": 1.2219286657859975e-06, |
|
"loss": 0.2309, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.837140019860973, |
|
"grad_norm": 16.100967407226562, |
|
"learning_rate": 1.155878467635403e-06, |
|
"loss": 0.2933, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.8470705064548163, |
|
"grad_norm": 4.693732261657715, |
|
"learning_rate": 1.0898282694848085e-06, |
|
"loss": 0.3162, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.8570009930486595, |
|
"grad_norm": 15.807625770568848, |
|
"learning_rate": 1.023778071334214e-06, |
|
"loss": 0.2593, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.8669314796425025, |
|
"grad_norm": 8.402117729187012, |
|
"learning_rate": 9.577278731836198e-07, |
|
"loss": 0.2331, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.8768619662363455, |
|
"grad_norm": 8.832892417907715, |
|
"learning_rate": 8.916776750330252e-07, |
|
"loss": 0.2713, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.8867924528301887, |
|
"grad_norm": 20.126842498779297, |
|
"learning_rate": 8.256274768824307e-07, |
|
"loss": 0.3002, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.896722939424032, |
|
"grad_norm": 7.1585845947265625, |
|
"learning_rate": 7.595772787318363e-07, |
|
"loss": 0.2557, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.906653426017875, |
|
"grad_norm": 9.247796058654785, |
|
"learning_rate": 6.935270805812417e-07, |
|
"loss": 0.2147, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.916583912611718, |
|
"grad_norm": 19.770483016967773, |
|
"learning_rate": 6.274768824306473e-07, |
|
"loss": 0.3213, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.9265143992055611, |
|
"grad_norm": 18.85097885131836, |
|
"learning_rate": 5.614266842800529e-07, |
|
"loss": 0.3408, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.9364448857994043, |
|
"grad_norm": 24.601226806640625, |
|
"learning_rate": 4.953764861294584e-07, |
|
"loss": 0.2798, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.9463753723932473, |
|
"grad_norm": 13.348860740661621, |
|
"learning_rate": 4.29326287978864e-07, |
|
"loss": 0.2631, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.9563058589870903, |
|
"grad_norm": 9.122193336486816, |
|
"learning_rate": 3.6327608982826953e-07, |
|
"loss": 0.2529, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.9662363455809335, |
|
"grad_norm": 20.501909255981445, |
|
"learning_rate": 2.9722589167767505e-07, |
|
"loss": 0.2518, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.9761668321747765, |
|
"grad_norm": 9.407881736755371, |
|
"learning_rate": 2.311756935270806e-07, |
|
"loss": 0.2078, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 1.9860973187686195, |
|
"grad_norm": 22.076322555541992, |
|
"learning_rate": 1.6512549537648615e-07, |
|
"loss": 0.2104, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.9960278053624627, |
|
"grad_norm": 18.52741241455078, |
|
"learning_rate": 9.907529722589168e-08, |
|
"loss": 0.2356, |
|
"step": 2010 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 2014, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 5000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4.4898226126848e+16, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|