|
{ |
|
"best_metric": 0.4066200256347656, |
|
"best_model_checkpoint": "mikhail_panzo/fil_b32_le4_s8000/checkpoint-5500", |
|
"epoch": 175.82417582417582, |
|
"eval_steps": 500, |
|
"global_step": 8000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 1.098901098901099, |
|
"grad_norm": 1.5917891263961792, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.7987, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 2.197802197802198, |
|
"grad_norm": 2.48260235786438, |
|
"learning_rate": 4.950000000000001e-06, |
|
"loss": 0.7263, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 3.2967032967032965, |
|
"grad_norm": 4.328954219818115, |
|
"learning_rate": 7.45e-06, |
|
"loss": 0.6646, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 4.395604395604396, |
|
"grad_norm": 2.710536241531372, |
|
"learning_rate": 9.950000000000001e-06, |
|
"loss": 0.6154, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 5.4945054945054945, |
|
"grad_norm": 2.7451584339141846, |
|
"learning_rate": 1.2450000000000001e-05, |
|
"loss": 0.5546, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 6.593406593406593, |
|
"grad_norm": 2.8580620288848877, |
|
"learning_rate": 1.4950000000000001e-05, |
|
"loss": 0.529, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 7.6923076923076925, |
|
"grad_norm": 2.8751707077026367, |
|
"learning_rate": 1.745e-05, |
|
"loss": 0.5265, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 8.791208791208792, |
|
"grad_norm": 1.5821752548217773, |
|
"learning_rate": 1.995e-05, |
|
"loss": 0.4996, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 9.89010989010989, |
|
"grad_norm": 1.9110968112945557, |
|
"learning_rate": 2.245e-05, |
|
"loss": 0.503, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 10.989010989010989, |
|
"grad_norm": 3.7658803462982178, |
|
"learning_rate": 2.495e-05, |
|
"loss": 0.498, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 10.989010989010989, |
|
"eval_loss": 0.4393291473388672, |
|
"eval_runtime": 12.9638, |
|
"eval_samples_per_second": 12.419, |
|
"eval_steps_per_second": 1.62, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 12.087912087912088, |
|
"grad_norm": 2.30264949798584, |
|
"learning_rate": 2.7450000000000003e-05, |
|
"loss": 0.4936, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 13.186813186813186, |
|
"grad_norm": 3.3187615871429443, |
|
"learning_rate": 2.995e-05, |
|
"loss": 0.4753, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 14.285714285714286, |
|
"grad_norm": 3.1503515243530273, |
|
"learning_rate": 3.245e-05, |
|
"loss": 0.4722, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 15.384615384615385, |
|
"grad_norm": 2.1364340782165527, |
|
"learning_rate": 3.495e-05, |
|
"loss": 0.4666, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 16.483516483516482, |
|
"grad_norm": 1.972012996673584, |
|
"learning_rate": 3.745e-05, |
|
"loss": 0.462, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 17.582417582417584, |
|
"grad_norm": 1.9032899141311646, |
|
"learning_rate": 3.995e-05, |
|
"loss": 0.4681, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 18.681318681318682, |
|
"grad_norm": 1.9877583980560303, |
|
"learning_rate": 4.245e-05, |
|
"loss": 0.4647, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 19.78021978021978, |
|
"grad_norm": 1.8145549297332764, |
|
"learning_rate": 4.495e-05, |
|
"loss": 0.4613, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 20.87912087912088, |
|
"grad_norm": 2.619379997253418, |
|
"learning_rate": 4.745e-05, |
|
"loss": 0.4557, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 21.978021978021978, |
|
"grad_norm": 4.430926322937012, |
|
"learning_rate": 4.995e-05, |
|
"loss": 0.448, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 21.978021978021978, |
|
"eval_loss": 0.4195362627506256, |
|
"eval_runtime": 12.2513, |
|
"eval_samples_per_second": 13.141, |
|
"eval_steps_per_second": 1.714, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 23.076923076923077, |
|
"grad_norm": 4.793135166168213, |
|
"learning_rate": 5.245e-05, |
|
"loss": 0.4498, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 24.175824175824175, |
|
"grad_norm": 2.612823009490967, |
|
"learning_rate": 5.495e-05, |
|
"loss": 0.443, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 25.274725274725274, |
|
"grad_norm": 3.3249428272247314, |
|
"learning_rate": 5.745e-05, |
|
"loss": 0.4456, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 26.373626373626372, |
|
"grad_norm": 3.8821775913238525, |
|
"learning_rate": 5.995000000000001e-05, |
|
"loss": 0.4586, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 27.47252747252747, |
|
"grad_norm": 6.9926252365112305, |
|
"learning_rate": 6.245000000000001e-05, |
|
"loss": 0.4479, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 28.571428571428573, |
|
"grad_norm": 7.615652084350586, |
|
"learning_rate": 6.494999999999999e-05, |
|
"loss": 0.4446, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 29.67032967032967, |
|
"grad_norm": 1.9948512315750122, |
|
"learning_rate": 6.745e-05, |
|
"loss": 0.4547, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 30.76923076923077, |
|
"grad_norm": 1.4590964317321777, |
|
"learning_rate": 6.995e-05, |
|
"loss": 0.4391, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 31.86813186813187, |
|
"grad_norm": 2.5002665519714355, |
|
"learning_rate": 7.245000000000001e-05, |
|
"loss": 0.4418, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 32.967032967032964, |
|
"grad_norm": 3.9836597442626953, |
|
"learning_rate": 7.495e-05, |
|
"loss": 0.4411, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 32.967032967032964, |
|
"eval_loss": 0.4205290973186493, |
|
"eval_runtime": 12.102, |
|
"eval_samples_per_second": 13.304, |
|
"eval_steps_per_second": 1.735, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 34.065934065934066, |
|
"grad_norm": 4.376543045043945, |
|
"learning_rate": 7.745e-05, |
|
"loss": 0.44, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 35.16483516483517, |
|
"grad_norm": 5.079801559448242, |
|
"learning_rate": 7.995e-05, |
|
"loss": 0.438, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 36.26373626373626, |
|
"grad_norm": 1.6529037952423096, |
|
"learning_rate": 8.245e-05, |
|
"loss": 0.4308, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 37.362637362637365, |
|
"grad_norm": 3.664045572280884, |
|
"learning_rate": 8.495e-05, |
|
"loss": 0.4397, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 38.46153846153846, |
|
"grad_norm": 2.1333911418914795, |
|
"learning_rate": 8.745000000000001e-05, |
|
"loss": 0.4441, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 39.56043956043956, |
|
"grad_norm": 3.5315911769866943, |
|
"learning_rate": 8.995e-05, |
|
"loss": 0.4325, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 40.65934065934066, |
|
"grad_norm": 7.4896674156188965, |
|
"learning_rate": 9.245e-05, |
|
"loss": 0.4299, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 41.75824175824176, |
|
"grad_norm": 3.11539363861084, |
|
"learning_rate": 9.495e-05, |
|
"loss": 0.4384, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 42.857142857142854, |
|
"grad_norm": 1.5338855981826782, |
|
"learning_rate": 9.745000000000001e-05, |
|
"loss": 0.4259, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 43.956043956043956, |
|
"grad_norm": 4.518911361694336, |
|
"learning_rate": 9.995e-05, |
|
"loss": 0.4347, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 43.956043956043956, |
|
"eval_loss": 0.4253252148628235, |
|
"eval_runtime": 11.7613, |
|
"eval_samples_per_second": 13.689, |
|
"eval_steps_per_second": 1.786, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 45.05494505494506, |
|
"grad_norm": 3.7814793586730957, |
|
"learning_rate": 9.918333333333334e-05, |
|
"loss": 0.4287, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 46.15384615384615, |
|
"grad_norm": 6.209993362426758, |
|
"learning_rate": 9.835e-05, |
|
"loss": 0.4285, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 47.252747252747255, |
|
"grad_norm": 3.44171142578125, |
|
"learning_rate": 9.751666666666666e-05, |
|
"loss": 0.4301, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 48.35164835164835, |
|
"grad_norm": 3.956023931503296, |
|
"learning_rate": 9.668333333333334e-05, |
|
"loss": 0.4257, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 49.45054945054945, |
|
"grad_norm": 2.9259955883026123, |
|
"learning_rate": 9.586666666666667e-05, |
|
"loss": 0.441, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 50.54945054945055, |
|
"grad_norm": 3.86029314994812, |
|
"learning_rate": 9.503333333333334e-05, |
|
"loss": 0.4308, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 51.64835164835165, |
|
"grad_norm": 3.556795358657837, |
|
"learning_rate": 9.42e-05, |
|
"loss": 0.4165, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 52.747252747252745, |
|
"grad_norm": 3.4808130264282227, |
|
"learning_rate": 9.336666666666667e-05, |
|
"loss": 0.4153, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 53.84615384615385, |
|
"grad_norm": 2.37046217918396, |
|
"learning_rate": 9.253333333333334e-05, |
|
"loss": 0.416, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 54.94505494505494, |
|
"grad_norm": 1.724293828010559, |
|
"learning_rate": 9.17e-05, |
|
"loss": 0.4173, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 54.94505494505494, |
|
"eval_loss": 0.41511598229408264, |
|
"eval_runtime": 12.1765, |
|
"eval_samples_per_second": 13.222, |
|
"eval_steps_per_second": 1.725, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 56.043956043956044, |
|
"grad_norm": 1.3938038349151611, |
|
"learning_rate": 9.086666666666666e-05, |
|
"loss": 0.4175, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 57.142857142857146, |
|
"grad_norm": 1.3193321228027344, |
|
"learning_rate": 9.003333333333333e-05, |
|
"loss": 0.4169, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 58.24175824175824, |
|
"grad_norm": 4.696841716766357, |
|
"learning_rate": 8.92e-05, |
|
"loss": 0.4091, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 59.34065934065934, |
|
"grad_norm": 2.6104297637939453, |
|
"learning_rate": 8.836666666666667e-05, |
|
"loss": 0.4115, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 60.43956043956044, |
|
"grad_norm": 2.989949941635132, |
|
"learning_rate": 8.753333333333334e-05, |
|
"loss": 0.4266, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 61.53846153846154, |
|
"grad_norm": 8.316046714782715, |
|
"learning_rate": 8.67e-05, |
|
"loss": 0.4087, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 62.637362637362635, |
|
"grad_norm": 4.265425205230713, |
|
"learning_rate": 8.586666666666668e-05, |
|
"loss": 0.4063, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 63.73626373626374, |
|
"grad_norm": 3.9043214321136475, |
|
"learning_rate": 8.503333333333334e-05, |
|
"loss": 0.4018, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 64.83516483516483, |
|
"grad_norm": 2.512340545654297, |
|
"learning_rate": 8.42e-05, |
|
"loss": 0.4059, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 65.93406593406593, |
|
"grad_norm": 3.410762071609497, |
|
"learning_rate": 8.336666666666667e-05, |
|
"loss": 0.4012, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 65.93406593406593, |
|
"eval_loss": 0.411817729473114, |
|
"eval_runtime": 12.4908, |
|
"eval_samples_per_second": 12.889, |
|
"eval_steps_per_second": 1.681, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 67.03296703296704, |
|
"grad_norm": 1.7443562746047974, |
|
"learning_rate": 8.253333333333334e-05, |
|
"loss": 0.3996, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 68.13186813186813, |
|
"grad_norm": 7.796085357666016, |
|
"learning_rate": 8.17e-05, |
|
"loss": 0.4001, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 69.23076923076923, |
|
"grad_norm": 4.286561965942383, |
|
"learning_rate": 8.086666666666666e-05, |
|
"loss": 0.4028, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 70.32967032967034, |
|
"grad_norm": 1.7267292737960815, |
|
"learning_rate": 8.003333333333333e-05, |
|
"loss": 0.3981, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 71.42857142857143, |
|
"grad_norm": 1.720483422279358, |
|
"learning_rate": 7.920000000000001e-05, |
|
"loss": 0.4044, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 72.52747252747253, |
|
"grad_norm": 2.6275534629821777, |
|
"learning_rate": 7.836666666666667e-05, |
|
"loss": 0.3915, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 73.62637362637362, |
|
"grad_norm": 1.1752153635025024, |
|
"learning_rate": 7.753333333333334e-05, |
|
"loss": 0.4001, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 74.72527472527473, |
|
"grad_norm": 1.8559554815292358, |
|
"learning_rate": 7.670000000000001e-05, |
|
"loss": 0.3975, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 75.82417582417582, |
|
"grad_norm": 2.82486629486084, |
|
"learning_rate": 7.586666666666668e-05, |
|
"loss": 0.3986, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 76.92307692307692, |
|
"grad_norm": 2.659985065460205, |
|
"learning_rate": 7.503333333333333e-05, |
|
"loss": 0.4023, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 76.92307692307692, |
|
"eval_loss": 0.40923717617988586, |
|
"eval_runtime": 12.2621, |
|
"eval_samples_per_second": 13.13, |
|
"eval_steps_per_second": 1.713, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 78.02197802197803, |
|
"grad_norm": 2.169241428375244, |
|
"learning_rate": 7.42e-05, |
|
"loss": 0.3924, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 79.12087912087912, |
|
"grad_norm": 1.7540392875671387, |
|
"learning_rate": 7.336666666666667e-05, |
|
"loss": 0.3892, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 80.21978021978022, |
|
"grad_norm": 1.3049631118774414, |
|
"learning_rate": 7.253333333333334e-05, |
|
"loss": 0.3969, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 81.31868131868131, |
|
"grad_norm": 1.0093408823013306, |
|
"learning_rate": 7.17e-05, |
|
"loss": 0.395, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 82.41758241758242, |
|
"grad_norm": 1.3580188751220703, |
|
"learning_rate": 7.086666666666666e-05, |
|
"loss": 0.3913, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 83.51648351648352, |
|
"grad_norm": 1.6554079055786133, |
|
"learning_rate": 7.003333333333335e-05, |
|
"loss": 0.3935, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 84.61538461538461, |
|
"grad_norm": 2.003830909729004, |
|
"learning_rate": 6.92e-05, |
|
"loss": 0.3981, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 85.71428571428571, |
|
"grad_norm": 3.338681697845459, |
|
"learning_rate": 6.836666666666667e-05, |
|
"loss": 0.3873, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 86.81318681318682, |
|
"grad_norm": 2.0752291679382324, |
|
"learning_rate": 6.753333333333334e-05, |
|
"loss": 0.388, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 87.91208791208791, |
|
"grad_norm": 1.809194564819336, |
|
"learning_rate": 6.670000000000001e-05, |
|
"loss": 0.3873, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 87.91208791208791, |
|
"eval_loss": 0.4116482436656952, |
|
"eval_runtime": 13.309, |
|
"eval_samples_per_second": 12.097, |
|
"eval_steps_per_second": 1.578, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 89.01098901098901, |
|
"grad_norm": 1.8667621612548828, |
|
"learning_rate": 6.586666666666666e-05, |
|
"loss": 0.3867, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 90.10989010989012, |
|
"grad_norm": 1.7244277000427246, |
|
"learning_rate": 6.503333333333333e-05, |
|
"loss": 0.3802, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 91.20879120879121, |
|
"grad_norm": 1.6144648790359497, |
|
"learning_rate": 6.42e-05, |
|
"loss": 0.3853, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 92.3076923076923, |
|
"grad_norm": 1.299013614654541, |
|
"learning_rate": 6.336666666666667e-05, |
|
"loss": 0.3837, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 93.4065934065934, |
|
"grad_norm": 2.6695783138275146, |
|
"learning_rate": 6.253333333333333e-05, |
|
"loss": 0.3832, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 94.50549450549451, |
|
"grad_norm": 1.719193696975708, |
|
"learning_rate": 6.170000000000001e-05, |
|
"loss": 0.3832, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 95.6043956043956, |
|
"grad_norm": 2.300633668899536, |
|
"learning_rate": 6.086666666666667e-05, |
|
"loss": 0.381, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 96.7032967032967, |
|
"grad_norm": 1.0958266258239746, |
|
"learning_rate": 6.003333333333334e-05, |
|
"loss": 0.3834, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 97.8021978021978, |
|
"grad_norm": 1.2469346523284912, |
|
"learning_rate": 5.92e-05, |
|
"loss": 0.3746, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 98.9010989010989, |
|
"grad_norm": 2.032564878463745, |
|
"learning_rate": 5.836666666666667e-05, |
|
"loss": 0.381, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 98.9010989010989, |
|
"eval_loss": 0.4088541567325592, |
|
"eval_runtime": 12.5723, |
|
"eval_samples_per_second": 12.806, |
|
"eval_steps_per_second": 1.67, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 100.0, |
|
"grad_norm": 2.846341371536255, |
|
"learning_rate": 5.753333333333334e-05, |
|
"loss": 0.3763, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 101.0989010989011, |
|
"grad_norm": 1.3910062313079834, |
|
"learning_rate": 5.6699999999999996e-05, |
|
"loss": 0.3725, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 102.1978021978022, |
|
"grad_norm": 1.37867271900177, |
|
"learning_rate": 5.5866666666666665e-05, |
|
"loss": 0.3802, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 103.2967032967033, |
|
"grad_norm": 1.5553981065750122, |
|
"learning_rate": 5.5033333333333334e-05, |
|
"loss": 0.3762, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 104.3956043956044, |
|
"grad_norm": 1.2945665121078491, |
|
"learning_rate": 5.420000000000001e-05, |
|
"loss": 0.372, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 105.49450549450549, |
|
"grad_norm": 1.3214540481567383, |
|
"learning_rate": 5.3366666666666665e-05, |
|
"loss": 0.3738, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 106.5934065934066, |
|
"grad_norm": 1.4942032098770142, |
|
"learning_rate": 5.2533333333333334e-05, |
|
"loss": 0.3833, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 107.6923076923077, |
|
"grad_norm": 1.166876196861267, |
|
"learning_rate": 5.17e-05, |
|
"loss": 0.3703, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 108.79120879120879, |
|
"grad_norm": 1.1540136337280273, |
|
"learning_rate": 5.086666666666667e-05, |
|
"loss": 0.3757, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 109.89010989010988, |
|
"grad_norm": 2.1926376819610596, |
|
"learning_rate": 5.005e-05, |
|
"loss": 0.3804, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 109.89010989010988, |
|
"eval_loss": 0.4093373715877533, |
|
"eval_runtime": 12.5943, |
|
"eval_samples_per_second": 12.784, |
|
"eval_steps_per_second": 1.667, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 110.98901098901099, |
|
"grad_norm": 4.544153690338135, |
|
"learning_rate": 4.9216666666666666e-05, |
|
"loss": 0.3751, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 112.08791208791209, |
|
"grad_norm": 2.686535596847534, |
|
"learning_rate": 4.8383333333333335e-05, |
|
"loss": 0.3727, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 113.18681318681318, |
|
"grad_norm": 1.5733741521835327, |
|
"learning_rate": 4.755e-05, |
|
"loss": 0.3747, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 114.28571428571429, |
|
"grad_norm": 0.9618715643882751, |
|
"learning_rate": 4.671666666666667e-05, |
|
"loss": 0.3652, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 115.38461538461539, |
|
"grad_norm": 1.1063612699508667, |
|
"learning_rate": 4.5883333333333335e-05, |
|
"loss": 0.3731, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 116.48351648351648, |
|
"grad_norm": 2.2875099182128906, |
|
"learning_rate": 4.5050000000000004e-05, |
|
"loss": 0.3749, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 117.58241758241758, |
|
"grad_norm": 2.0566062927246094, |
|
"learning_rate": 4.4216666666666666e-05, |
|
"loss": 0.3648, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 118.68131868131869, |
|
"grad_norm": 1.7075659036636353, |
|
"learning_rate": 4.3383333333333335e-05, |
|
"loss": 0.3643, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 119.78021978021978, |
|
"grad_norm": 1.8719024658203125, |
|
"learning_rate": 4.2550000000000004e-05, |
|
"loss": 0.3664, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 120.87912087912088, |
|
"grad_norm": 1.7855234146118164, |
|
"learning_rate": 4.171666666666667e-05, |
|
"loss": 0.3724, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 120.87912087912088, |
|
"eval_loss": 0.4066200256347656, |
|
"eval_runtime": 12.1606, |
|
"eval_samples_per_second": 13.239, |
|
"eval_steps_per_second": 1.727, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 121.97802197802197, |
|
"grad_norm": 1.4458872079849243, |
|
"learning_rate": 4.0883333333333335e-05, |
|
"loss": 0.3685, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 123.07692307692308, |
|
"grad_norm": 1.9380217790603638, |
|
"learning_rate": 4.0050000000000004e-05, |
|
"loss": 0.3689, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 124.17582417582418, |
|
"grad_norm": 1.1235325336456299, |
|
"learning_rate": 3.921666666666667e-05, |
|
"loss": 0.3735, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 125.27472527472527, |
|
"grad_norm": 1.0528467893600464, |
|
"learning_rate": 3.8383333333333336e-05, |
|
"loss": 0.3645, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 126.37362637362638, |
|
"grad_norm": 0.8706468939781189, |
|
"learning_rate": 3.7550000000000005e-05, |
|
"loss": 0.3651, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 127.47252747252747, |
|
"grad_norm": 2.1038143634796143, |
|
"learning_rate": 3.671666666666667e-05, |
|
"loss": 0.3655, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 128.57142857142858, |
|
"grad_norm": 1.101017951965332, |
|
"learning_rate": 3.5883333333333336e-05, |
|
"loss": 0.3652, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 129.67032967032966, |
|
"grad_norm": 2.1845877170562744, |
|
"learning_rate": 3.505e-05, |
|
"loss": 0.3637, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 130.76923076923077, |
|
"grad_norm": 1.1006217002868652, |
|
"learning_rate": 3.421666666666667e-05, |
|
"loss": 0.362, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 131.86813186813185, |
|
"grad_norm": 1.1332905292510986, |
|
"learning_rate": 3.3383333333333336e-05, |
|
"loss": 0.3665, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 131.86813186813185, |
|
"eval_loss": 0.40923577547073364, |
|
"eval_runtime": 11.9318, |
|
"eval_samples_per_second": 13.493, |
|
"eval_steps_per_second": 1.76, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 132.96703296703296, |
|
"grad_norm": 1.4925904273986816, |
|
"learning_rate": 3.2550000000000005e-05, |
|
"loss": 0.3604, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 134.06593406593407, |
|
"grad_norm": 1.3827078342437744, |
|
"learning_rate": 3.171666666666667e-05, |
|
"loss": 0.3639, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 135.16483516483515, |
|
"grad_norm": 1.6526421308517456, |
|
"learning_rate": 3.0883333333333336e-05, |
|
"loss": 0.3606, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 136.26373626373626, |
|
"grad_norm": 1.5088169574737549, |
|
"learning_rate": 3.0050000000000002e-05, |
|
"loss": 0.3645, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 137.36263736263737, |
|
"grad_norm": 1.899960994720459, |
|
"learning_rate": 2.921666666666667e-05, |
|
"loss": 0.3585, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 138.46153846153845, |
|
"grad_norm": 0.7441226243972778, |
|
"learning_rate": 2.8383333333333333e-05, |
|
"loss": 0.361, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 139.56043956043956, |
|
"grad_norm": 1.273861289024353, |
|
"learning_rate": 2.7550000000000002e-05, |
|
"loss": 0.3635, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 140.65934065934067, |
|
"grad_norm": 1.0249220132827759, |
|
"learning_rate": 2.6716666666666668e-05, |
|
"loss": 0.3637, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 141.75824175824175, |
|
"grad_norm": 1.155131220817566, |
|
"learning_rate": 2.5883333333333337e-05, |
|
"loss": 0.361, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 142.85714285714286, |
|
"grad_norm": 1.1497211456298828, |
|
"learning_rate": 2.5050000000000002e-05, |
|
"loss": 0.3635, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 142.85714285714286, |
|
"eval_loss": 0.4099365472793579, |
|
"eval_runtime": 12.3576, |
|
"eval_samples_per_second": 13.028, |
|
"eval_steps_per_second": 1.699, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 143.95604395604394, |
|
"grad_norm": 1.2265280485153198, |
|
"learning_rate": 2.4216666666666668e-05, |
|
"loss": 0.3604, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 145.05494505494505, |
|
"grad_norm": 1.600876808166504, |
|
"learning_rate": 2.3383333333333334e-05, |
|
"loss": 0.3592, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 146.15384615384616, |
|
"grad_norm": 1.137926697731018, |
|
"learning_rate": 2.2550000000000003e-05, |
|
"loss": 0.3608, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 147.25274725274724, |
|
"grad_norm": 1.297443151473999, |
|
"learning_rate": 2.1716666666666668e-05, |
|
"loss": 0.3576, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 148.35164835164835, |
|
"grad_norm": 0.8834235668182373, |
|
"learning_rate": 2.0883333333333334e-05, |
|
"loss": 0.3581, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 149.45054945054946, |
|
"grad_norm": 1.0530905723571777, |
|
"learning_rate": 2.0050000000000003e-05, |
|
"loss": 0.3569, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 150.54945054945054, |
|
"grad_norm": 1.1900426149368286, |
|
"learning_rate": 1.921666666666667e-05, |
|
"loss": 0.3642, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 151.64835164835165, |
|
"grad_norm": 0.733370304107666, |
|
"learning_rate": 1.8383333333333334e-05, |
|
"loss": 0.3556, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 152.74725274725276, |
|
"grad_norm": 1.85074782371521, |
|
"learning_rate": 1.755e-05, |
|
"loss": 0.3533, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 153.84615384615384, |
|
"grad_norm": 1.4953721761703491, |
|
"learning_rate": 1.6716666666666665e-05, |
|
"loss": 0.3562, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 153.84615384615384, |
|
"eval_loss": 0.4075365662574768, |
|
"eval_runtime": 13.1335, |
|
"eval_samples_per_second": 12.259, |
|
"eval_steps_per_second": 1.599, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 154.94505494505495, |
|
"grad_norm": 0.9426752328872681, |
|
"learning_rate": 1.5883333333333334e-05, |
|
"loss": 0.3567, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 156.04395604395606, |
|
"grad_norm": 1.0722122192382812, |
|
"learning_rate": 1.505e-05, |
|
"loss": 0.3559, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 157.14285714285714, |
|
"grad_norm": 1.1290276050567627, |
|
"learning_rate": 1.4216666666666667e-05, |
|
"loss": 0.3654, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 158.24175824175825, |
|
"grad_norm": 1.0128904581069946, |
|
"learning_rate": 1.3383333333333335e-05, |
|
"loss": 0.357, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 159.34065934065933, |
|
"grad_norm": 1.2913401126861572, |
|
"learning_rate": 1.255e-05, |
|
"loss": 0.3556, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 160.43956043956044, |
|
"grad_norm": 1.0247690677642822, |
|
"learning_rate": 1.1716666666666667e-05, |
|
"loss": 0.3512, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 161.53846153846155, |
|
"grad_norm": 1.1389926671981812, |
|
"learning_rate": 1.0883333333333335e-05, |
|
"loss": 0.3553, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 162.63736263736263, |
|
"grad_norm": 0.853523850440979, |
|
"learning_rate": 1.005e-05, |
|
"loss": 0.357, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 163.73626373626374, |
|
"grad_norm": 0.8105210065841675, |
|
"learning_rate": 9.216666666666666e-06, |
|
"loss": 0.3512, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 164.83516483516485, |
|
"grad_norm": 0.891890287399292, |
|
"learning_rate": 8.383333333333333e-06, |
|
"loss": 0.3581, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 164.83516483516485, |
|
"eval_loss": 0.4096794128417969, |
|
"eval_runtime": 12.3085, |
|
"eval_samples_per_second": 13.08, |
|
"eval_steps_per_second": 1.706, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 165.93406593406593, |
|
"grad_norm": 1.1622841358184814, |
|
"learning_rate": 7.55e-06, |
|
"loss": 0.3542, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 167.03296703296704, |
|
"grad_norm": 0.767804741859436, |
|
"learning_rate": 6.716666666666667e-06, |
|
"loss": 0.3516, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 168.13186813186815, |
|
"grad_norm": 0.7939496636390686, |
|
"learning_rate": 5.8833333333333335e-06, |
|
"loss": 0.3516, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 169.23076923076923, |
|
"grad_norm": 0.826722264289856, |
|
"learning_rate": 5.050000000000001e-06, |
|
"loss": 0.3536, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 170.32967032967034, |
|
"grad_norm": 1.0055506229400635, |
|
"learning_rate": 4.216666666666666e-06, |
|
"loss": 0.3515, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 171.42857142857142, |
|
"grad_norm": 0.9588921070098877, |
|
"learning_rate": 3.3833333333333337e-06, |
|
"loss": 0.3501, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 172.52747252747253, |
|
"grad_norm": 0.7313802242279053, |
|
"learning_rate": 2.55e-06, |
|
"loss": 0.353, |
|
"step": 7850 |
|
}, |
|
{ |
|
"epoch": 173.62637362637363, |
|
"grad_norm": 0.7923821210861206, |
|
"learning_rate": 1.7166666666666668e-06, |
|
"loss": 0.3526, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 174.72527472527472, |
|
"grad_norm": 0.7615464329719543, |
|
"learning_rate": 8.833333333333334e-07, |
|
"loss": 0.3473, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 175.82417582417582, |
|
"grad_norm": 0.6758411526679993, |
|
"learning_rate": 5.0000000000000004e-08, |
|
"loss": 0.3461, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 175.82417582417582, |
|
"eval_loss": 0.40872836112976074, |
|
"eval_runtime": 13.3243, |
|
"eval_samples_per_second": 12.083, |
|
"eval_steps_per_second": 1.576, |
|
"step": 8000 |
|
} |
|
], |
|
"logging_steps": 50, |
|
"max_steps": 8000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 178, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4.743072546477725e+16, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|