|
{ |
|
"best_metric": 0.019110437482595444, |
|
"best_model_checkpoint": "runs/deepseek_lora_20240422-151242/checkpoint-2500", |
|
"epoch": 1.5665387326701654, |
|
"eval_steps": 500, |
|
"global_step": 5000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 1.4764970541000366, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 1.5176, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.0680618286132812, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 1.5115, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.892202138900757, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 1.4821, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.8446696996688843, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 1.4669, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.351251244544983, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.5485, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.4435632228851318, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 1.3646, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.500685691833496, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 1.5509, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.027316093444824, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 1.4173, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.5012431144714355, |
|
"learning_rate": 3.6000000000000003e-06, |
|
"loss": 1.4232, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.025063991546631, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.3952, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.2457600831985474, |
|
"learning_rate": 4.4e-06, |
|
"loss": 1.1888, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.2635124921798706, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 1.2259, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.6183345317840576, |
|
"learning_rate": 5.2e-06, |
|
"loss": 1.0052, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.38128924369812, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 0.9928, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.641148328781128, |
|
"learning_rate": 6e-06, |
|
"loss": 0.9287, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.0428839921951294, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 0.7218, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.0025560855865479, |
|
"learning_rate": 6.800000000000001e-06, |
|
"loss": 0.6115, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.962586522102356, |
|
"learning_rate": 7.2000000000000005e-06, |
|
"loss": 0.5337, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.46322694420814514, |
|
"learning_rate": 7.600000000000001e-06, |
|
"loss": 0.3379, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.5135998725891113, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.3204, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.4679415225982666, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 0.2534, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.6532612442970276, |
|
"learning_rate": 8.8e-06, |
|
"loss": 0.1868, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.841340184211731, |
|
"learning_rate": 9.200000000000002e-06, |
|
"loss": 0.2609, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.7561472058296204, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 0.3831, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.38061589002609253, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3377, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.24261775612831116, |
|
"learning_rate": 1.04e-05, |
|
"loss": 0.1452, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.345815896987915, |
|
"learning_rate": 1.0800000000000002e-05, |
|
"loss": 0.0759, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.30060628056526184, |
|
"learning_rate": 1.1200000000000001e-05, |
|
"loss": 0.233, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.45316824316978455, |
|
"learning_rate": 1.16e-05, |
|
"loss": 0.1512, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.31342798471450806, |
|
"learning_rate": 1.2e-05, |
|
"loss": 0.1612, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.0591862201690674, |
|
"learning_rate": 1.2400000000000002e-05, |
|
"loss": 0.1128, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.04360930994153023, |
|
"learning_rate": 1.2800000000000001e-05, |
|
"loss": 0.1209, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.2543979585170746, |
|
"learning_rate": 1.3200000000000002e-05, |
|
"loss": 0.139, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.55256462097168, |
|
"learning_rate": 1.3600000000000002e-05, |
|
"loss": 0.1386, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.032540880143642426, |
|
"learning_rate": 1.4e-05, |
|
"loss": 0.0659, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.020825587213039398, |
|
"learning_rate": 1.4400000000000001e-05, |
|
"loss": 0.0491, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 5.304826736450195, |
|
"learning_rate": 1.48e-05, |
|
"loss": 0.1335, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.03812132403254509, |
|
"learning_rate": 1.5200000000000002e-05, |
|
"loss": 0.0341, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.0986132025718689, |
|
"learning_rate": 1.5600000000000003e-05, |
|
"loss": 0.1627, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.4882945716381073, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 0.0757, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.015025519765913486, |
|
"learning_rate": 1.64e-05, |
|
"loss": 0.0658, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.16647090017795563, |
|
"learning_rate": 1.6800000000000002e-05, |
|
"loss": 0.0375, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.016350600868463516, |
|
"learning_rate": 1.72e-05, |
|
"loss": 0.1766, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.011344443075358868, |
|
"learning_rate": 1.76e-05, |
|
"loss": 0.0802, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.05872802808880806, |
|
"learning_rate": 1.8e-05, |
|
"loss": 0.0866, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.6746442317962646, |
|
"learning_rate": 1.8400000000000003e-05, |
|
"loss": 0.0869, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.01068535540252924, |
|
"learning_rate": 1.88e-05, |
|
"loss": 0.2154, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.020563703030347824, |
|
"learning_rate": 1.9200000000000003e-05, |
|
"loss": 0.101, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.17447501420974731, |
|
"learning_rate": 1.9600000000000002e-05, |
|
"loss": 0.1116, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 3.7621819972991943, |
|
"learning_rate": 2e-05, |
|
"loss": 0.1465, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"eval_loss": 0.05017045885324478, |
|
"eval_runtime": 61.9228, |
|
"eval_samples_per_second": 16.149, |
|
"eval_steps_per_second": 16.149, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.015244358219206333, |
|
"learning_rate": 1.9955555555555557e-05, |
|
"loss": 0.0218, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.20013077557086945, |
|
"learning_rate": 1.9911111111111112e-05, |
|
"loss": 0.0845, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.007578707300126553, |
|
"learning_rate": 1.9866666666666667e-05, |
|
"loss": 0.1364, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.6152952909469604, |
|
"learning_rate": 1.9822222222222226e-05, |
|
"loss": 0.1135, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.0188419818878174, |
|
"learning_rate": 1.977777777777778e-05, |
|
"loss": 0.1452, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.007765072397887707, |
|
"learning_rate": 1.9733333333333336e-05, |
|
"loss": 0.1082, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.11964543163776398, |
|
"learning_rate": 1.968888888888889e-05, |
|
"loss": 0.0912, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 4.242813587188721, |
|
"learning_rate": 1.9644444444444447e-05, |
|
"loss": 0.1243, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.00892120786011219, |
|
"learning_rate": 1.9600000000000002e-05, |
|
"loss": 0.1229, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.008041107095777988, |
|
"learning_rate": 1.9555555555555557e-05, |
|
"loss": 0.1782, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.013454565778374672, |
|
"learning_rate": 1.9511111111111113e-05, |
|
"loss": 0.0647, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.15643712878227234, |
|
"learning_rate": 1.9466666666666668e-05, |
|
"loss": 0.0449, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.01665414497256279, |
|
"learning_rate": 1.9422222222222223e-05, |
|
"loss": 0.0636, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.01023570355027914, |
|
"learning_rate": 1.9377777777777778e-05, |
|
"loss": 0.0566, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.00858321599662304, |
|
"learning_rate": 1.9333333333333333e-05, |
|
"loss": 0.2039, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 4.273760795593262, |
|
"learning_rate": 1.928888888888889e-05, |
|
"loss": 0.0662, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.022378291934728622, |
|
"learning_rate": 1.9244444444444444e-05, |
|
"loss": 0.1163, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.08007720112800598, |
|
"learning_rate": 1.9200000000000003e-05, |
|
"loss": 0.0781, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.018208255991339684, |
|
"learning_rate": 1.9155555555555558e-05, |
|
"loss": 0.099, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.07240291684865952, |
|
"learning_rate": 1.9111111111111113e-05, |
|
"loss": 0.0522, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.5902518033981323, |
|
"learning_rate": 1.9066666666666668e-05, |
|
"loss": 0.0421, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.01304863765835762, |
|
"learning_rate": 1.9022222222222223e-05, |
|
"loss": 0.0968, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.013874703086912632, |
|
"learning_rate": 1.897777777777778e-05, |
|
"loss": 0.016, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.01731191948056221, |
|
"learning_rate": 1.8933333333333334e-05, |
|
"loss": 0.0936, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 3.513253688812256, |
|
"learning_rate": 1.888888888888889e-05, |
|
"loss": 0.1283, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.9819907546043396, |
|
"learning_rate": 1.8844444444444444e-05, |
|
"loss": 0.1182, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 4.303761005401611, |
|
"learning_rate": 1.88e-05, |
|
"loss": 0.0491, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 5.017575740814209, |
|
"learning_rate": 1.8755555555555558e-05, |
|
"loss": 0.115, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.07593460381031036, |
|
"learning_rate": 1.8711111111111113e-05, |
|
"loss": 0.0237, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 3.1718034744262695, |
|
"learning_rate": 1.866666666666667e-05, |
|
"loss": 0.1243, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.009034764021635056, |
|
"learning_rate": 1.8622222222222224e-05, |
|
"loss": 0.0435, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.008039540611207485, |
|
"learning_rate": 1.857777777777778e-05, |
|
"loss": 0.0797, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.030577246099710464, |
|
"learning_rate": 1.8533333333333334e-05, |
|
"loss": 0.0902, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.01614721678197384, |
|
"learning_rate": 1.848888888888889e-05, |
|
"loss": 0.0443, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.00745397200807929, |
|
"learning_rate": 1.8444444444444448e-05, |
|
"loss": 0.013, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.0033397756051272154, |
|
"learning_rate": 1.8400000000000003e-05, |
|
"loss": 0.0177, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.005004946142435074, |
|
"learning_rate": 1.835555555555556e-05, |
|
"loss": 0.0414, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.005703146569430828, |
|
"learning_rate": 1.8311111111111114e-05, |
|
"loss": 0.1875, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.011084661819040775, |
|
"learning_rate": 1.826666666666667e-05, |
|
"loss": 0.0093, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 2.5405566692352295, |
|
"learning_rate": 1.8222222222222224e-05, |
|
"loss": 0.074, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.003973611164838076, |
|
"learning_rate": 1.817777777777778e-05, |
|
"loss": 0.0506, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.04966599866747856, |
|
"learning_rate": 1.8133333333333335e-05, |
|
"loss": 0.0544, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.01420037355273962, |
|
"learning_rate": 1.808888888888889e-05, |
|
"loss": 0.0153, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 3.305298328399658, |
|
"learning_rate": 1.8044444444444445e-05, |
|
"loss": 0.0359, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 2.4599621295928955, |
|
"learning_rate": 1.8e-05, |
|
"loss": 0.0808, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 2.250840187072754, |
|
"learning_rate": 1.7955555555555556e-05, |
|
"loss": 0.1756, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 2.541337251663208, |
|
"learning_rate": 1.791111111111111e-05, |
|
"loss": 0.0827, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.04627770185470581, |
|
"learning_rate": 1.7866666666666666e-05, |
|
"loss": 0.0777, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.06008163094520569, |
|
"learning_rate": 1.782222222222222e-05, |
|
"loss": 0.0921, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.03849005699157715, |
|
"learning_rate": 1.7777777777777777e-05, |
|
"loss": 0.0555, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"eval_loss": 0.028874782845377922, |
|
"eval_runtime": 61.928, |
|
"eval_samples_per_second": 16.148, |
|
"eval_steps_per_second": 16.148, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.006071585696190596, |
|
"learning_rate": 1.7733333333333335e-05, |
|
"loss": 0.0399, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.012464660219848156, |
|
"learning_rate": 1.768888888888889e-05, |
|
"loss": 0.075, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.0500749833881855, |
|
"learning_rate": 1.7644444444444446e-05, |
|
"loss": 0.1375, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.1470282077789307, |
|
"learning_rate": 1.76e-05, |
|
"loss": 0.0725, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 4.040950775146484, |
|
"learning_rate": 1.7555555555555556e-05, |
|
"loss": 0.0757, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.010639629326760769, |
|
"learning_rate": 1.751111111111111e-05, |
|
"loss": 0.0495, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.009533255361020565, |
|
"learning_rate": 1.7466666666666667e-05, |
|
"loss": 0.0125, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 2.4316189289093018, |
|
"learning_rate": 1.7422222222222222e-05, |
|
"loss": 0.089, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.18850259482860565, |
|
"learning_rate": 1.737777777777778e-05, |
|
"loss": 0.0359, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.012972812168300152, |
|
"learning_rate": 1.7333333333333336e-05, |
|
"loss": 0.0618, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.8784104585647583, |
|
"learning_rate": 1.728888888888889e-05, |
|
"loss": 0.106, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.02918626181781292, |
|
"learning_rate": 1.7244444444444446e-05, |
|
"loss": 0.0367, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 5.1463236808776855, |
|
"learning_rate": 1.72e-05, |
|
"loss": 0.0784, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 2.754035234451294, |
|
"learning_rate": 1.7155555555555557e-05, |
|
"loss": 0.08, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.014391366392374039, |
|
"learning_rate": 1.7111111111111112e-05, |
|
"loss": 0.1047, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.016152633354067802, |
|
"learning_rate": 1.706666666666667e-05, |
|
"loss": 0.0453, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.020005827769637108, |
|
"learning_rate": 1.7022222222222226e-05, |
|
"loss": 0.0388, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.004559040535241365, |
|
"learning_rate": 1.697777777777778e-05, |
|
"loss": 0.0306, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.5455616116523743, |
|
"learning_rate": 1.6933333333333336e-05, |
|
"loss": 0.0595, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.00697706313803792, |
|
"learning_rate": 1.688888888888889e-05, |
|
"loss": 0.1236, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.926461935043335, |
|
"learning_rate": 1.6844444444444447e-05, |
|
"loss": 0.0155, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 2.4559099674224854, |
|
"learning_rate": 1.6800000000000002e-05, |
|
"loss": 0.0083, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.033357810229063034, |
|
"learning_rate": 1.6755555555555557e-05, |
|
"loss": 0.0337, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.008271641097962856, |
|
"learning_rate": 1.6711111111111112e-05, |
|
"loss": 0.0618, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.003830987960100174, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 0.0097, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.021310480311512947, |
|
"learning_rate": 1.6622222222222223e-05, |
|
"loss": 0.0717, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 2.166944980621338, |
|
"learning_rate": 1.6577777777777778e-05, |
|
"loss": 0.0135, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.0050102900713682175, |
|
"learning_rate": 1.6533333333333333e-05, |
|
"loss": 0.02, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.044907063245773315, |
|
"learning_rate": 1.648888888888889e-05, |
|
"loss": 0.0688, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.06957779079675674, |
|
"learning_rate": 1.6444444444444444e-05, |
|
"loss": 0.0887, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.025342583656311, |
|
"learning_rate": 1.64e-05, |
|
"loss": 0.0536, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.011287701316177845, |
|
"learning_rate": 1.6355555555555557e-05, |
|
"loss": 0.0663, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 3.220019578933716, |
|
"learning_rate": 1.6311111111111113e-05, |
|
"loss": 0.0495, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.6111379861831665, |
|
"learning_rate": 1.6266666666666668e-05, |
|
"loss": 0.0658, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.004258489701896906, |
|
"learning_rate": 1.6222222222222223e-05, |
|
"loss": 0.0022, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.0197640657424927, |
|
"learning_rate": 1.617777777777778e-05, |
|
"loss": 0.0969, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.003954906482249498, |
|
"learning_rate": 1.6133333333333334e-05, |
|
"loss": 0.0474, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 5.326837539672852, |
|
"learning_rate": 1.608888888888889e-05, |
|
"loss": 0.0683, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.008893176913261414, |
|
"learning_rate": 1.6044444444444444e-05, |
|
"loss": 0.0577, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.007870793342590332, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 0.0658, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.003645359305664897, |
|
"learning_rate": 1.5955555555555558e-05, |
|
"loss": 0.004, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.6187639236450195, |
|
"learning_rate": 1.5911111111111113e-05, |
|
"loss": 0.0814, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.7969435453414917, |
|
"learning_rate": 1.586666666666667e-05, |
|
"loss": 0.0154, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 2.8159172534942627, |
|
"learning_rate": 1.5822222222222224e-05, |
|
"loss": 0.1169, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.7929564714431763, |
|
"learning_rate": 1.577777777777778e-05, |
|
"loss": 0.1261, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.2537187337875366, |
|
"learning_rate": 1.5733333333333334e-05, |
|
"loss": 0.0193, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.004017004277557135, |
|
"learning_rate": 1.5688888888888893e-05, |
|
"loss": 0.0246, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 3.6317222118377686, |
|
"learning_rate": 1.5644444444444448e-05, |
|
"loss": 0.0987, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.007867836393415928, |
|
"learning_rate": 1.5600000000000003e-05, |
|
"loss": 0.0535, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.21237313747406, |
|
"learning_rate": 1.555555555555556e-05, |
|
"loss": 0.0514, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"eval_loss": 0.021845491603016853, |
|
"eval_runtime": 61.8947, |
|
"eval_samples_per_second": 16.156, |
|
"eval_steps_per_second": 16.156, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.022142503410577774, |
|
"learning_rate": 1.5511111111111114e-05, |
|
"loss": 0.0561, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.5647516250610352, |
|
"learning_rate": 1.546666666666667e-05, |
|
"loss": 0.0338, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.00992211140692234, |
|
"learning_rate": 1.5422222222222224e-05, |
|
"loss": 0.0595, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.0023656291887164116, |
|
"learning_rate": 1.537777777777778e-05, |
|
"loss": 0.0406, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.043642736971378326, |
|
"learning_rate": 1.5333333333333334e-05, |
|
"loss": 0.1006, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.14472796022891998, |
|
"learning_rate": 1.528888888888889e-05, |
|
"loss": 0.0087, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.002529141725972295, |
|
"learning_rate": 1.5244444444444447e-05, |
|
"loss": 0.0283, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.0024450107011944056, |
|
"learning_rate": 1.5200000000000002e-05, |
|
"loss": 0.0695, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.016819190233945847, |
|
"learning_rate": 1.5155555555555557e-05, |
|
"loss": 0.0606, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.9701637029647827, |
|
"learning_rate": 1.5111111111111112e-05, |
|
"loss": 0.0807, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.029984062537550926, |
|
"learning_rate": 1.5066666666666668e-05, |
|
"loss": 0.0693, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.3928338587284088, |
|
"learning_rate": 1.5022222222222223e-05, |
|
"loss": 0.0741, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.005559510085731745, |
|
"learning_rate": 1.497777777777778e-05, |
|
"loss": 0.0725, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.0025930607225745916, |
|
"learning_rate": 1.4933333333333335e-05, |
|
"loss": 0.1192, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 2.3535702228546143, |
|
"learning_rate": 1.488888888888889e-05, |
|
"loss": 0.0606, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 3.1475963592529297, |
|
"learning_rate": 1.4844444444444445e-05, |
|
"loss": 0.0956, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.003930664621293545, |
|
"learning_rate": 1.48e-05, |
|
"loss": 0.0246, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 3.315476655960083, |
|
"learning_rate": 1.4755555555555556e-05, |
|
"loss": 0.0953, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.8841826319694519, |
|
"learning_rate": 1.4711111111111111e-05, |
|
"loss": 0.035, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.01530434563755989, |
|
"learning_rate": 1.4666666666666666e-05, |
|
"loss": 0.0383, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 7.836932182312012, |
|
"learning_rate": 1.4622222222222225e-05, |
|
"loss": 0.0981, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.0168469101190567, |
|
"learning_rate": 1.457777777777778e-05, |
|
"loss": 0.1095, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.15857402980327606, |
|
"learning_rate": 1.4533333333333335e-05, |
|
"loss": 0.0574, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.5915563106536865, |
|
"learning_rate": 1.448888888888889e-05, |
|
"loss": 0.0305, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.039337050169706345, |
|
"learning_rate": 1.4444444444444446e-05, |
|
"loss": 0.0177, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.0033416992519050837, |
|
"learning_rate": 1.4400000000000001e-05, |
|
"loss": 0.0009, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.00678109098225832, |
|
"learning_rate": 1.4355555555555556e-05, |
|
"loss": 0.07, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 2.4513912200927734, |
|
"learning_rate": 1.4311111111111111e-05, |
|
"loss": 0.0865, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.966366708278656, |
|
"learning_rate": 1.4266666666666668e-05, |
|
"loss": 0.0032, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 2.9548590183258057, |
|
"learning_rate": 1.4222222222222224e-05, |
|
"loss": 0.0721, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 3.815798044204712, |
|
"learning_rate": 1.4177777777777779e-05, |
|
"loss": 0.032, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 2.7791800498962402, |
|
"learning_rate": 1.4133333333333334e-05, |
|
"loss": 0.0763, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.004075185861438513, |
|
"learning_rate": 1.408888888888889e-05, |
|
"loss": 0.0085, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.0038460579235106707, |
|
"learning_rate": 1.4044444444444445e-05, |
|
"loss": 0.0696, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.005857239011675119, |
|
"learning_rate": 1.4e-05, |
|
"loss": 0.0559, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.1006237044930458, |
|
"learning_rate": 1.3955555555555558e-05, |
|
"loss": 0.059, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.004246099852025509, |
|
"learning_rate": 1.3911111111111114e-05, |
|
"loss": 0.0866, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.00666034035384655, |
|
"learning_rate": 1.3866666666666669e-05, |
|
"loss": 0.0452, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.0027115230914205313, |
|
"learning_rate": 1.3822222222222224e-05, |
|
"loss": 0.033, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 3.016477108001709, |
|
"learning_rate": 1.377777777777778e-05, |
|
"loss": 0.0437, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.4079822301864624, |
|
"learning_rate": 1.3733333333333335e-05, |
|
"loss": 0.0474, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.0025102542713284492, |
|
"learning_rate": 1.368888888888889e-05, |
|
"loss": 0.0631, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.394373893737793, |
|
"learning_rate": 1.3644444444444445e-05, |
|
"loss": 0.0446, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.0866854190826416, |
|
"learning_rate": 1.3600000000000002e-05, |
|
"loss": 0.0073, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.007402149029076099, |
|
"learning_rate": 1.3555555555555557e-05, |
|
"loss": 0.0311, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.004811545833945274, |
|
"learning_rate": 1.3511111111111112e-05, |
|
"loss": 0.0861, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.004000263754278421, |
|
"learning_rate": 1.3466666666666668e-05, |
|
"loss": 0.0342, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.003166991053149104, |
|
"learning_rate": 1.3422222222222223e-05, |
|
"loss": 0.0897, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.006424722261726856, |
|
"learning_rate": 1.3377777777777778e-05, |
|
"loss": 0.0161, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.0020944385323673487, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 0.0358, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"eval_loss": 0.017703723162412643, |
|
"eval_runtime": 61.8922, |
|
"eval_samples_per_second": 16.157, |
|
"eval_steps_per_second": 16.157, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.003021718468517065, |
|
"learning_rate": 1.3288888888888889e-05, |
|
"loss": 0.0445, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.010362805798649788, |
|
"learning_rate": 1.3244444444444447e-05, |
|
"loss": 0.0308, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.32867729663848877, |
|
"learning_rate": 1.3200000000000002e-05, |
|
"loss": 0.0514, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.0034784479066729546, |
|
"learning_rate": 1.3155555555555558e-05, |
|
"loss": 0.0002, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.08931613713502884, |
|
"learning_rate": 1.3111111111111113e-05, |
|
"loss": 0.0351, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.0024316832423210144, |
|
"learning_rate": 1.3066666666666668e-05, |
|
"loss": 0.0822, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.6956457495689392, |
|
"learning_rate": 1.3022222222222223e-05, |
|
"loss": 0.0656, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.006866452284157276, |
|
"learning_rate": 1.2977777777777779e-05, |
|
"loss": 0.0709, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.6907499432563782, |
|
"learning_rate": 1.2933333333333334e-05, |
|
"loss": 0.0474, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.3049083948135376, |
|
"learning_rate": 1.288888888888889e-05, |
|
"loss": 0.096, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.015370524488389492, |
|
"learning_rate": 1.2844444444444446e-05, |
|
"loss": 0.0433, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.02007671445608139, |
|
"learning_rate": 1.2800000000000001e-05, |
|
"loss": 0.0422, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.00284277624450624, |
|
"learning_rate": 1.2755555555555556e-05, |
|
"loss": 0.0432, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.0024309258442372084, |
|
"learning_rate": 1.2711111111111112e-05, |
|
"loss": 0.0462, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.016368340700864792, |
|
"learning_rate": 1.2666666666666667e-05, |
|
"loss": 0.0804, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.005066912155598402, |
|
"learning_rate": 1.2622222222222222e-05, |
|
"loss": 0.0807, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.002282659988850355, |
|
"learning_rate": 1.257777777777778e-05, |
|
"loss": 0.0395, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.8781790137290955, |
|
"learning_rate": 1.2533333333333336e-05, |
|
"loss": 0.0168, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.0021755376365035772, |
|
"learning_rate": 1.2488888888888891e-05, |
|
"loss": 0.1348, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.0018356326036155224, |
|
"learning_rate": 1.2444444444444446e-05, |
|
"loss": 0.0419, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.0016609402373433113, |
|
"learning_rate": 1.2400000000000002e-05, |
|
"loss": 0.0738, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.0487405061721802, |
|
"learning_rate": 1.2355555555555557e-05, |
|
"loss": 0.0209, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.0034472872503101826, |
|
"learning_rate": 1.2311111111111112e-05, |
|
"loss": 0.0322, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 2.6054458618164062, |
|
"learning_rate": 1.2266666666666667e-05, |
|
"loss": 0.1416, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 2.022294044494629, |
|
"learning_rate": 1.2222222222222224e-05, |
|
"loss": 0.0846, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.010752925649285316, |
|
"learning_rate": 1.217777777777778e-05, |
|
"loss": 0.0093, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.9160808324813843, |
|
"learning_rate": 1.2133333333333335e-05, |
|
"loss": 0.0751, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.002021084539592266, |
|
"learning_rate": 1.208888888888889e-05, |
|
"loss": 0.0271, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.0034389556385576725, |
|
"learning_rate": 1.2044444444444445e-05, |
|
"loss": 0.0233, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.0031041186302900314, |
|
"learning_rate": 1.2e-05, |
|
"loss": 0.068, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.0017185291508212686, |
|
"learning_rate": 1.1955555555555556e-05, |
|
"loss": 0.0369, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.008447349071502686, |
|
"learning_rate": 1.191111111111111e-05, |
|
"loss": 0.0549, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.0062310476787388325, |
|
"learning_rate": 1.186666666666667e-05, |
|
"loss": 0.0575, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.7583394646644592, |
|
"learning_rate": 1.1822222222222225e-05, |
|
"loss": 0.0256, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.4174383878707886, |
|
"learning_rate": 1.177777777777778e-05, |
|
"loss": 0.0805, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.003498498583212495, |
|
"learning_rate": 1.1733333333333335e-05, |
|
"loss": 0.0472, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 4.035282135009766, |
|
"learning_rate": 1.168888888888889e-05, |
|
"loss": 0.0662, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.0031760677229613066, |
|
"learning_rate": 1.1644444444444446e-05, |
|
"loss": 0.0367, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.006041550077497959, |
|
"learning_rate": 1.16e-05, |
|
"loss": 0.0408, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.004563288297504187, |
|
"learning_rate": 1.1555555555555556e-05, |
|
"loss": 0.0133, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 2.7603859901428223, |
|
"learning_rate": 1.1511111111111113e-05, |
|
"loss": 0.0268, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.0016988619463518262, |
|
"learning_rate": 1.1466666666666668e-05, |
|
"loss": 0.0542, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.005680690053850412, |
|
"learning_rate": 1.1422222222222223e-05, |
|
"loss": 0.0311, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 3.9060475826263428, |
|
"learning_rate": 1.1377777777777779e-05, |
|
"loss": 0.1159, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 2.005553722381592, |
|
"learning_rate": 1.1333333333333334e-05, |
|
"loss": 0.0648, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 4.020055294036865, |
|
"learning_rate": 1.1288888888888889e-05, |
|
"loss": 0.0312, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.021578742191195488, |
|
"learning_rate": 1.1244444444444444e-05, |
|
"loss": 0.0285, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.00151452433783561, |
|
"learning_rate": 1.1200000000000001e-05, |
|
"loss": 0.0666, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.127124309539795, |
|
"learning_rate": 1.1155555555555556e-05, |
|
"loss": 0.0362, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.0015013846568763256, |
|
"learning_rate": 1.1111111111111113e-05, |
|
"loss": 0.0575, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"eval_loss": 0.019110437482595444, |
|
"eval_runtime": 61.8844, |
|
"eval_samples_per_second": 16.159, |
|
"eval_steps_per_second": 16.159, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.0033025951124727726, |
|
"learning_rate": 1.1066666666666669e-05, |
|
"loss": 0.081, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.0032579610124230385, |
|
"learning_rate": 1.1022222222222224e-05, |
|
"loss": 0.0207, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.838714838027954, |
|
"learning_rate": 1.0977777777777779e-05, |
|
"loss": 0.1599, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.0018555274000391364, |
|
"learning_rate": 1.0933333333333334e-05, |
|
"loss": 0.002, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.0020195182878524065, |
|
"learning_rate": 1.088888888888889e-05, |
|
"loss": 0.008, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.0014295022701844573, |
|
"learning_rate": 1.0844444444444446e-05, |
|
"loss": 0.0276, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 2.145263433456421, |
|
"learning_rate": 1.0800000000000002e-05, |
|
"loss": 0.0248, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 2.27980375289917, |
|
"learning_rate": 1.0755555555555557e-05, |
|
"loss": 0.0358, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 2.667983293533325, |
|
"learning_rate": 1.0711111111111112e-05, |
|
"loss": 0.0651, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.0018497890559956431, |
|
"learning_rate": 1.0666666666666667e-05, |
|
"loss": 0.0387, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.0013170776655897498, |
|
"learning_rate": 1.0622222222222223e-05, |
|
"loss": 0.0173, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.004894789308309555, |
|
"learning_rate": 1.0577777777777778e-05, |
|
"loss": 0.043, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.04001269116997719, |
|
"learning_rate": 1.0533333333333333e-05, |
|
"loss": 0.0926, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.0019531013676896691, |
|
"learning_rate": 1.048888888888889e-05, |
|
"loss": 0.0511, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 2.29565167427063, |
|
"learning_rate": 1.0444444444444445e-05, |
|
"loss": 0.0803, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 2.7071011066436768, |
|
"learning_rate": 1.04e-05, |
|
"loss": 0.0604, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.0376460887491703, |
|
"learning_rate": 1.0355555555555557e-05, |
|
"loss": 0.0155, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.001858079107478261, |
|
"learning_rate": 1.0311111111111113e-05, |
|
"loss": 0.0665, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.0015587485395371914, |
|
"learning_rate": 1.0266666666666668e-05, |
|
"loss": 0.0613, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.047985997051000595, |
|
"learning_rate": 1.0222222222222223e-05, |
|
"loss": 0.0032, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 2.2135114669799805, |
|
"learning_rate": 1.0177777777777778e-05, |
|
"loss": 0.0508, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 1.1044480800628662, |
|
"learning_rate": 1.0133333333333335e-05, |
|
"loss": 0.0333, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.047782231122255325, |
|
"learning_rate": 1.008888888888889e-05, |
|
"loss": 0.0256, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.0029533940833061934, |
|
"learning_rate": 1.0044444444444446e-05, |
|
"loss": 0.0107, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.003864617319777608, |
|
"learning_rate": 1e-05, |
|
"loss": 0.0508, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.006308398675173521, |
|
"learning_rate": 9.955555555555556e-06, |
|
"loss": 0.0599, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.0014458250952884555, |
|
"learning_rate": 9.911111111111113e-06, |
|
"loss": 0.0457, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.0018096743151545525, |
|
"learning_rate": 9.866666666666668e-06, |
|
"loss": 0.0773, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.002803443931043148, |
|
"learning_rate": 9.822222222222223e-06, |
|
"loss": 0.0229, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.0039856089279055595, |
|
"learning_rate": 9.777777777777779e-06, |
|
"loss": 0.0738, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.0016119845677167177, |
|
"learning_rate": 9.733333333333334e-06, |
|
"loss": 0.1069, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.044685885310173035, |
|
"learning_rate": 9.688888888888889e-06, |
|
"loss": 0.0718, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.5644834041595459, |
|
"learning_rate": 9.644444444444444e-06, |
|
"loss": 0.0539, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.6698065400123596, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 0.0671, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.009338573552668095, |
|
"learning_rate": 9.555555555555556e-06, |
|
"loss": 0.0238, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.002303204732015729, |
|
"learning_rate": 9.511111111111112e-06, |
|
"loss": 0.0531, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.004544748924672604, |
|
"learning_rate": 9.466666666666667e-06, |
|
"loss": 0.133, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.004034551791846752, |
|
"learning_rate": 9.422222222222222e-06, |
|
"loss": 0.0174, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.0032248455099761486, |
|
"learning_rate": 9.377777777777779e-06, |
|
"loss": 0.0571, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.2451166957616806, |
|
"learning_rate": 9.333333333333334e-06, |
|
"loss": 0.0304, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.001918753026984632, |
|
"learning_rate": 9.28888888888889e-06, |
|
"loss": 0.0826, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.0014626473421230912, |
|
"learning_rate": 9.244444444444445e-06, |
|
"loss": 0.0288, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.003409222001209855, |
|
"learning_rate": 9.200000000000002e-06, |
|
"loss": 0.041, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.002315065125003457, |
|
"learning_rate": 9.155555555555557e-06, |
|
"loss": 0.0322, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.002485796343535185, |
|
"learning_rate": 9.111111111111112e-06, |
|
"loss": 0.0132, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.0027836193330585957, |
|
"learning_rate": 9.066666666666667e-06, |
|
"loss": 0.0424, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 1.2203580141067505, |
|
"learning_rate": 9.022222222222223e-06, |
|
"loss": 0.0607, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.0014286149526014924, |
|
"learning_rate": 8.977777777777778e-06, |
|
"loss": 0.0082, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.0011963992146775126, |
|
"learning_rate": 8.933333333333333e-06, |
|
"loss": 0.0447, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.0012861526338383555, |
|
"learning_rate": 8.888888888888888e-06, |
|
"loss": 0.0394, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"eval_loss": 0.021464653313159943, |
|
"eval_runtime": 62.0041, |
|
"eval_samples_per_second": 16.128, |
|
"eval_steps_per_second": 16.128, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 2.6348252296447754, |
|
"learning_rate": 8.844444444444445e-06, |
|
"loss": 0.1294, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.0012073700781911612, |
|
"learning_rate": 8.8e-06, |
|
"loss": 0.0304, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.003195586148649454, |
|
"learning_rate": 8.755555555555556e-06, |
|
"loss": 0.0161, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.9521057605743408, |
|
"learning_rate": 8.711111111111111e-06, |
|
"loss": 0.1185, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.035520199686288834, |
|
"learning_rate": 8.666666666666668e-06, |
|
"loss": 0.0357, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.0031184712424874306, |
|
"learning_rate": 8.622222222222223e-06, |
|
"loss": 0.0505, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.8742607831954956, |
|
"learning_rate": 8.577777777777778e-06, |
|
"loss": 0.0607, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.001605386845767498, |
|
"learning_rate": 8.533333333333335e-06, |
|
"loss": 0.048, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 2.8198726177215576, |
|
"learning_rate": 8.48888888888889e-06, |
|
"loss": 0.029, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.0011747152311727405, |
|
"learning_rate": 8.444444444444446e-06, |
|
"loss": 0.0745, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.0037345418240875006, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 0.1246, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.03968101739883423, |
|
"learning_rate": 8.355555555555556e-06, |
|
"loss": 0.028, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.0017666455823928118, |
|
"learning_rate": 8.311111111111111e-06, |
|
"loss": 0.0398, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.0016725193709135056, |
|
"learning_rate": 8.266666666666667e-06, |
|
"loss": 0.0874, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 3.0152335166931152, |
|
"learning_rate": 8.222222222222222e-06, |
|
"loss": 0.0233, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.003967667929828167, |
|
"learning_rate": 8.177777777777779e-06, |
|
"loss": 0.0322, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.0012324820272624493, |
|
"learning_rate": 8.133333333333334e-06, |
|
"loss": 0.01, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.001890141749754548, |
|
"learning_rate": 8.08888888888889e-06, |
|
"loss": 0.0617, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.0017083596903830767, |
|
"learning_rate": 8.044444444444444e-06, |
|
"loss": 0.031, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.1974055767059326, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.0136, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 1.8210293054580688, |
|
"learning_rate": 7.955555555555557e-06, |
|
"loss": 0.0174, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 3.1489944458007812, |
|
"learning_rate": 7.911111111111112e-06, |
|
"loss": 0.0776, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.0014464000705629587, |
|
"learning_rate": 7.866666666666667e-06, |
|
"loss": 0.0164, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.5747243762016296, |
|
"learning_rate": 7.822222222222224e-06, |
|
"loss": 0.0093, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.009251836687326431, |
|
"learning_rate": 7.77777777777778e-06, |
|
"loss": 0.0171, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.2069794237613678, |
|
"learning_rate": 7.733333333333334e-06, |
|
"loss": 0.0301, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 2.251051664352417, |
|
"learning_rate": 7.68888888888889e-06, |
|
"loss": 0.0707, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.0010599648812785745, |
|
"learning_rate": 7.644444444444445e-06, |
|
"loss": 0.0053, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.0016899543115869164, |
|
"learning_rate": 7.600000000000001e-06, |
|
"loss": 0.0006, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.003940447699278593, |
|
"learning_rate": 7.555555555555556e-06, |
|
"loss": 0.014, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.7399486899375916, |
|
"learning_rate": 7.511111111111111e-06, |
|
"loss": 0.0309, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.0006520531023852527, |
|
"learning_rate": 7.4666666666666675e-06, |
|
"loss": 0.0218, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 2.1491100788116455, |
|
"learning_rate": 7.422222222222223e-06, |
|
"loss": 0.1215, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 3.6713123321533203, |
|
"learning_rate": 7.377777777777778e-06, |
|
"loss": 0.0429, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.03133914992213249, |
|
"learning_rate": 7.333333333333333e-06, |
|
"loss": 0.027, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.002692360896617174, |
|
"learning_rate": 7.28888888888889e-06, |
|
"loss": 0.0216, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.0034821536391973495, |
|
"learning_rate": 7.244444444444445e-06, |
|
"loss": 0.043, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.00455117505043745, |
|
"learning_rate": 7.2000000000000005e-06, |
|
"loss": 0.0688, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 3.5100715160369873, |
|
"learning_rate": 7.155555555555556e-06, |
|
"loss": 0.0136, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.0031513776630163193, |
|
"learning_rate": 7.111111111111112e-06, |
|
"loss": 0.0155, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 1.3787556886672974, |
|
"learning_rate": 7.066666666666667e-06, |
|
"loss": 0.0254, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.000773483538068831, |
|
"learning_rate": 7.022222222222222e-06, |
|
"loss": 0.0031, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.0014327614335343242, |
|
"learning_rate": 6.977777777777779e-06, |
|
"loss": 0.0376, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.0031394639518111944, |
|
"learning_rate": 6.9333333333333344e-06, |
|
"loss": 0.0596, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.0024705962277948856, |
|
"learning_rate": 6.88888888888889e-06, |
|
"loss": 0.0095, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.0005248151137493551, |
|
"learning_rate": 6.844444444444445e-06, |
|
"loss": 0.0199, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.49637269973754883, |
|
"learning_rate": 6.800000000000001e-06, |
|
"loss": 0.0154, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 1.3430148363113403, |
|
"learning_rate": 6.755555555555556e-06, |
|
"loss": 0.0395, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 1.885108232498169, |
|
"learning_rate": 6.711111111111111e-06, |
|
"loss": 0.0472, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.0007279856945388019, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.0369, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"eval_loss": 0.022176509723067284, |
|
"eval_runtime": 61.8847, |
|
"eval_samples_per_second": 16.159, |
|
"eval_steps_per_second": 16.159, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.003986152354627848, |
|
"learning_rate": 6.6222222222222236e-06, |
|
"loss": 0.0583, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.0014766375534236431, |
|
"learning_rate": 6.577777777777779e-06, |
|
"loss": 0.0153, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 0.791298508644104, |
|
"learning_rate": 6.533333333333334e-06, |
|
"loss": 0.0508, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 0.02302105911076069, |
|
"learning_rate": 6.488888888888889e-06, |
|
"loss": 0.0275, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 0.00240346509963274, |
|
"learning_rate": 6.444444444444445e-06, |
|
"loss": 0.0062, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.0011688686208799481, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 0.0458, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.0007077022455632687, |
|
"learning_rate": 6.355555555555556e-06, |
|
"loss": 0.0274, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.0014391880249604583, |
|
"learning_rate": 6.311111111111111e-06, |
|
"loss": 0.0178, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.002971325535327196, |
|
"learning_rate": 6.266666666666668e-06, |
|
"loss": 0.0099, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 1.5885258913040161, |
|
"learning_rate": 6.222222222222223e-06, |
|
"loss": 0.0277, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 0.0023686839267611504, |
|
"learning_rate": 6.177777777777778e-06, |
|
"loss": 0.0001, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 0.0018330584280192852, |
|
"learning_rate": 6.133333333333334e-06, |
|
"loss": 0.034, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 0.5174291133880615, |
|
"learning_rate": 6.08888888888889e-06, |
|
"loss": 0.0095, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 0.6314389705657959, |
|
"learning_rate": 6.044444444444445e-06, |
|
"loss": 0.035, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 4.273723125457764, |
|
"learning_rate": 6e-06, |
|
"loss": 0.0332, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 1.590868592262268, |
|
"learning_rate": 5.955555555555555e-06, |
|
"loss": 0.0398, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 1.3291077613830566, |
|
"learning_rate": 5.911111111111112e-06, |
|
"loss": 0.0306, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 0.0011222765315324068, |
|
"learning_rate": 5.8666666666666675e-06, |
|
"loss": 0.0194, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 1.7731515169143677, |
|
"learning_rate": 5.822222222222223e-06, |
|
"loss": 0.0747, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 3.05898380279541, |
|
"learning_rate": 5.777777777777778e-06, |
|
"loss": 0.0239, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.0017392480513080955, |
|
"learning_rate": 5.733333333333334e-06, |
|
"loss": 0.0415, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.03126866742968559, |
|
"learning_rate": 5.688888888888889e-06, |
|
"loss": 0.0814, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.0008881581015884876, |
|
"learning_rate": 5.6444444444444445e-06, |
|
"loss": 0.0211, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.0007252222276292741, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 0.0441, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.002276132581755519, |
|
"learning_rate": 5.555555555555557e-06, |
|
"loss": 0.0433, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.0014605377800762653, |
|
"learning_rate": 5.511111111111112e-06, |
|
"loss": 0.0204, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.0017992451321333647, |
|
"learning_rate": 5.466666666666667e-06, |
|
"loss": 0.0992, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.0018204891821369529, |
|
"learning_rate": 5.422222222222223e-06, |
|
"loss": 0.0132, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 0.16287018358707428, |
|
"learning_rate": 5.3777777777777784e-06, |
|
"loss": 0.0291, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 0.21604575216770172, |
|
"learning_rate": 5.333333333333334e-06, |
|
"loss": 0.0158, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 0.0013267812319099903, |
|
"learning_rate": 5.288888888888889e-06, |
|
"loss": 0.0053, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.0006841329741291702, |
|
"learning_rate": 5.244444444444445e-06, |
|
"loss": 0.0212, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.19196736812591553, |
|
"learning_rate": 5.2e-06, |
|
"loss": 0.0206, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.001533058239147067, |
|
"learning_rate": 5.155555555555556e-06, |
|
"loss": 0.0338, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 0.001141805900260806, |
|
"learning_rate": 5.1111111111111115e-06, |
|
"loss": 0.0006, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 1.453786849975586, |
|
"learning_rate": 5.0666666666666676e-06, |
|
"loss": 0.0449, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 0.0016863587079569697, |
|
"learning_rate": 5.022222222222223e-06, |
|
"loss": 0.0379, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.0005151178920641541, |
|
"learning_rate": 4.977777777777778e-06, |
|
"loss": 0.0318, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 2.0469911098480225, |
|
"learning_rate": 4.933333333333334e-06, |
|
"loss": 0.0185, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.7001366019248962, |
|
"learning_rate": 4.888888888888889e-06, |
|
"loss": 0.0566, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 0.0007412733975797892, |
|
"learning_rate": 4.8444444444444446e-06, |
|
"loss": 0.0003, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 0.0008465189021080732, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 0.0493, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 2.3275091648101807, |
|
"learning_rate": 4.755555555555556e-06, |
|
"loss": 0.0109, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 0.0014587095938622952, |
|
"learning_rate": 4.711111111111111e-06, |
|
"loss": 0.0, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.0013576536439359188, |
|
"learning_rate": 4.666666666666667e-06, |
|
"loss": 0.0255, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.008543136529624462, |
|
"learning_rate": 4.622222222222222e-06, |
|
"loss": 0.0352, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.0009798618266358972, |
|
"learning_rate": 4.5777777777777785e-06, |
|
"loss": 0.0556, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.0005635471316054463, |
|
"learning_rate": 4.533333333333334e-06, |
|
"loss": 0.0325, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.8276817202568054, |
|
"learning_rate": 4.488888888888889e-06, |
|
"loss": 0.0129, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.013259605504572392, |
|
"learning_rate": 4.444444444444444e-06, |
|
"loss": 0.0193, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"eval_loss": 0.022383807227015495, |
|
"eval_runtime": 61.879, |
|
"eval_samples_per_second": 16.161, |
|
"eval_steps_per_second": 16.161, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 0.0017350054113194346, |
|
"learning_rate": 4.4e-06, |
|
"loss": 0.0464, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 0.8484432697296143, |
|
"learning_rate": 4.3555555555555555e-06, |
|
"loss": 0.0215, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 8.376138687133789, |
|
"learning_rate": 4.3111111111111115e-06, |
|
"loss": 0.0781, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 0.0006440070574171841, |
|
"learning_rate": 4.266666666666668e-06, |
|
"loss": 0.01, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 0.001261880504898727, |
|
"learning_rate": 4.222222222222223e-06, |
|
"loss": 0.1061, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 0.9878569841384888, |
|
"learning_rate": 4.177777777777778e-06, |
|
"loss": 0.0254, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.05665569379925728, |
|
"learning_rate": 4.133333333333333e-06, |
|
"loss": 0.0044, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.6482369303703308, |
|
"learning_rate": 4.088888888888889e-06, |
|
"loss": 0.0259, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.0012170624686405063, |
|
"learning_rate": 4.044444444444445e-06, |
|
"loss": 0.0438, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.062455616891384125, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.0284, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 0.0014791043940931559, |
|
"learning_rate": 3.955555555555556e-06, |
|
"loss": 0.0212, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 3.445824384689331, |
|
"learning_rate": 3.911111111111112e-06, |
|
"loss": 0.0511, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 3.981981039047241, |
|
"learning_rate": 3.866666666666667e-06, |
|
"loss": 0.0482, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 0.04528600350022316, |
|
"learning_rate": 3.8222222222222224e-06, |
|
"loss": 0.0224, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 0.0005888556479476392, |
|
"learning_rate": 3.777777777777778e-06, |
|
"loss": 0.0517, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 2.5039355754852295, |
|
"learning_rate": 3.7333333333333337e-06, |
|
"loss": 0.037, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 2.8256263732910156, |
|
"learning_rate": 3.688888888888889e-06, |
|
"loss": 0.0261, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 0.0010332756210118532, |
|
"learning_rate": 3.644444444444445e-06, |
|
"loss": 0.0269, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 0.0005628996295854449, |
|
"learning_rate": 3.6000000000000003e-06, |
|
"loss": 0.0053, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.0006191189750097692, |
|
"learning_rate": 3.555555555555556e-06, |
|
"loss": 0.0056, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 2.178837537765503, |
|
"learning_rate": 3.511111111111111e-06, |
|
"loss": 0.0136, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.00813736766576767, |
|
"learning_rate": 3.4666666666666672e-06, |
|
"loss": 0.0249, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 0.0015987426741048694, |
|
"learning_rate": 3.4222222222222224e-06, |
|
"loss": 0.0424, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 0.0005763465305790305, |
|
"learning_rate": 3.377777777777778e-06, |
|
"loss": 0.0149, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 0.0005146890762262046, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 0.0207, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 0.0012806435115635395, |
|
"learning_rate": 3.2888888888888894e-06, |
|
"loss": 0.0157, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 0.0015921818558126688, |
|
"learning_rate": 3.2444444444444446e-06, |
|
"loss": 0.029, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 3.0711112022399902, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 0.1302, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 0.0007020393386483192, |
|
"learning_rate": 3.1555555555555555e-06, |
|
"loss": 0.0076, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 0.0005048629245720804, |
|
"learning_rate": 3.1111111111111116e-06, |
|
"loss": 0.0086, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 0.0023996131494641304, |
|
"learning_rate": 3.066666666666667e-06, |
|
"loss": 0.0196, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 0.0025332309305667877, |
|
"learning_rate": 3.0222222222222225e-06, |
|
"loss": 0.0461, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 0.0007468872354365885, |
|
"learning_rate": 2.9777777777777777e-06, |
|
"loss": 0.0282, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 0.0005446248687803745, |
|
"learning_rate": 2.9333333333333338e-06, |
|
"loss": 0.0469, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 0.0011565398890525103, |
|
"learning_rate": 2.888888888888889e-06, |
|
"loss": 0.0335, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 1.8544560670852661, |
|
"learning_rate": 2.8444444444444446e-06, |
|
"loss": 0.0703, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 3.215923547744751, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 0.0396, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 1.535111427307129, |
|
"learning_rate": 2.755555555555556e-06, |
|
"loss": 0.0719, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 0.01463981345295906, |
|
"learning_rate": 2.7111111111111116e-06, |
|
"loss": 0.0246, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 4.059357643127441, |
|
"learning_rate": 2.666666666666667e-06, |
|
"loss": 0.0367, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 0.0006797238602302969, |
|
"learning_rate": 2.6222222222222225e-06, |
|
"loss": 0.0489, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 1.5824676752090454, |
|
"learning_rate": 2.577777777777778e-06, |
|
"loss": 0.0285, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 0.00045088992919772863, |
|
"learning_rate": 2.5333333333333338e-06, |
|
"loss": 0.0328, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 1.129044771194458, |
|
"learning_rate": 2.488888888888889e-06, |
|
"loss": 0.058, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 0.00038907071575522423, |
|
"learning_rate": 2.4444444444444447e-06, |
|
"loss": 0.0504, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 0.0011422157986089587, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 0.022, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 1.4815700054168701, |
|
"learning_rate": 2.3555555555555555e-06, |
|
"loss": 0.0416, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 2.4049506187438965, |
|
"learning_rate": 2.311111111111111e-06, |
|
"loss": 0.0286, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 0.32649314403533936, |
|
"learning_rate": 2.266666666666667e-06, |
|
"loss": 0.0191, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 0.001270271954126656, |
|
"learning_rate": 2.222222222222222e-06, |
|
"loss": 0.0208, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"eval_loss": 0.018216300755739212, |
|
"eval_runtime": 61.8518, |
|
"eval_samples_per_second": 16.168, |
|
"eval_steps_per_second": 16.168, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 0.0010293573141098022, |
|
"learning_rate": 2.1777777777777777e-06, |
|
"loss": 0.0186, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 0.47490406036376953, |
|
"learning_rate": 2.133333333333334e-06, |
|
"loss": 0.0181, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 0.000406662467867136, |
|
"learning_rate": 2.088888888888889e-06, |
|
"loss": 0.0383, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 0.0009539751335978508, |
|
"learning_rate": 2.0444444444444447e-06, |
|
"loss": 0.0139, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 0.0005080964183434844, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 0.0084, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 0.0007780414307489991, |
|
"learning_rate": 1.955555555555556e-06, |
|
"loss": 0.0133, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 0.0003762560954783112, |
|
"learning_rate": 1.9111111111111112e-06, |
|
"loss": 0.0149, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 0.0010716378455981612, |
|
"learning_rate": 1.8666666666666669e-06, |
|
"loss": 0.0096, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.0012602531351149082, |
|
"learning_rate": 1.8222222222222225e-06, |
|
"loss": 0.0501, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.0011576686520129442, |
|
"learning_rate": 1.777777777777778e-06, |
|
"loss": 0.0367, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.00044630825868807733, |
|
"learning_rate": 1.7333333333333336e-06, |
|
"loss": 0.0335, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 0.0005398432840593159, |
|
"learning_rate": 1.688888888888889e-06, |
|
"loss": 0.0359, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 0.7338486909866333, |
|
"learning_rate": 1.6444444444444447e-06, |
|
"loss": 0.0557, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 0.0004819166788365692, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 0.0, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 1.6127287149429321, |
|
"learning_rate": 1.5555555555555558e-06, |
|
"loss": 0.0276, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 0.07772258669137955, |
|
"learning_rate": 1.5111111111111112e-06, |
|
"loss": 0.088, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 0.0005863177357241511, |
|
"learning_rate": 1.4666666666666669e-06, |
|
"loss": 0.0132, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 0.0006563079077750444, |
|
"learning_rate": 1.4222222222222223e-06, |
|
"loss": 0.0028, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 2.803096294403076, |
|
"learning_rate": 1.377777777777778e-06, |
|
"loss": 0.0694, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 0.7182486653327942, |
|
"learning_rate": 1.3333333333333334e-06, |
|
"loss": 0.0065, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.0009489316144026816, |
|
"learning_rate": 1.288888888888889e-06, |
|
"loss": 0.0136, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.001954195322468877, |
|
"learning_rate": 1.2444444444444445e-06, |
|
"loss": 0.0142, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.5916552543640137, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 0.0469, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 2.119339942932129, |
|
"learning_rate": 1.1555555555555556e-06, |
|
"loss": 0.0551, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 0.0009017220581881702, |
|
"learning_rate": 1.111111111111111e-06, |
|
"loss": 0.0486, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 0.0012501273304224014, |
|
"learning_rate": 1.066666666666667e-06, |
|
"loss": 0.0048, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 0.8751838803291321, |
|
"learning_rate": 1.0222222222222223e-06, |
|
"loss": 0.0409, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.0013982664095237851, |
|
"learning_rate": 9.77777777777778e-07, |
|
"loss": 0.0577, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.0014040243113413453, |
|
"learning_rate": 9.333333333333334e-07, |
|
"loss": 0.031, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 3.977519989013672, |
|
"learning_rate": 8.88888888888889e-07, |
|
"loss": 0.1057, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 0.0011434502666816115, |
|
"learning_rate": 8.444444444444445e-07, |
|
"loss": 0.0066, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 0.0013947113184258342, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 0.0337, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 0.3012702167034149, |
|
"learning_rate": 7.555555555555556e-07, |
|
"loss": 0.0554, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.1272149384021759, |
|
"learning_rate": 7.111111111111112e-07, |
|
"loss": 0.0208, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.004117014352232218, |
|
"learning_rate": 6.666666666666667e-07, |
|
"loss": 0.0357, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.0005114726372994483, |
|
"learning_rate": 6.222222222222223e-07, |
|
"loss": 0.0189, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 0.000449322717031464, |
|
"learning_rate": 5.777777777777778e-07, |
|
"loss": 0.079, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 1.3229221105575562, |
|
"learning_rate": 5.333333333333335e-07, |
|
"loss": 0.0197, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 4.149044990539551, |
|
"learning_rate": 4.88888888888889e-07, |
|
"loss": 0.0383, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 0.00048808971769176424, |
|
"learning_rate": 4.444444444444445e-07, |
|
"loss": 0.0473, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 0.0010669564362615347, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 0.0838, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 0.030236389487981796, |
|
"learning_rate": 3.555555555555556e-07, |
|
"loss": 0.0108, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 0.0004108142456971109, |
|
"learning_rate": 3.111111111111111e-07, |
|
"loss": 0.0301, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 0.0008436466450802982, |
|
"learning_rate": 2.666666666666667e-07, |
|
"loss": 0.0101, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 0.0007567039574496448, |
|
"learning_rate": 2.2222222222222224e-07, |
|
"loss": 0.0237, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 0.001360149122774601, |
|
"learning_rate": 1.777777777777778e-07, |
|
"loss": 0.0352, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 0.0012200737837702036, |
|
"learning_rate": 1.3333333333333336e-07, |
|
"loss": 0.0482, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 0.0006987242959439754, |
|
"learning_rate": 8.88888888888889e-08, |
|
"loss": 0.0164, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 0.005137372761964798, |
|
"learning_rate": 4.444444444444445e-08, |
|
"loss": 0.0424, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 0.0009189796401187778, |
|
"learning_rate": 0.0, |
|
"loss": 0.0353, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"eval_loss": 0.021896515041589737, |
|
"eval_runtime": 61.7867, |
|
"eval_samples_per_second": 16.185, |
|
"eval_steps_per_second": 16.185, |
|
"step": 5000 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 5000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 2500, |
|
"total_flos": 8.051062996992e+16, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|