|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.963768115942029, |
|
"eval_steps": 35, |
|
"global_step": 207, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 43.0, |
|
"learning_rate": 2.0000000000000002e-07, |
|
"loss": 0.7175, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"eval_loss": 0.7699161767959595, |
|
"eval_runtime": 19.565, |
|
"eval_samples_per_second": 200.818, |
|
"eval_steps_per_second": 200.818, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 42.5, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 0.7271, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 42.5, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 0.7161, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 41.25, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 0.7088, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 42.5, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 0.7022, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 41.0, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 0.6846, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 38.25, |
|
"learning_rate": 1.4000000000000001e-06, |
|
"loss": 0.629, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 32.5, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 0.5753, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 28.125, |
|
"learning_rate": 1.8000000000000001e-06, |
|
"loss": 0.5368, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 22.125, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 0.4946, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 19.125, |
|
"learning_rate": 2.2e-06, |
|
"loss": 0.4351, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 15.3125, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 0.3873, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 12.25, |
|
"learning_rate": 2.6e-06, |
|
"loss": 0.3382, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 10.3125, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 0.2777, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 8.4375, |
|
"learning_rate": 3e-06, |
|
"loss": 0.241, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 7.46875, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 0.2278, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 5.65625, |
|
"learning_rate": 3.4000000000000005e-06, |
|
"loss": 0.1924, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 4.65625, |
|
"learning_rate": 3.6000000000000003e-06, |
|
"loss": 0.1635, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 3.546875, |
|
"learning_rate": 3.8000000000000005e-06, |
|
"loss": 0.1549, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.1374, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 2.875, |
|
"learning_rate": 4.2000000000000004e-06, |
|
"loss": 0.12, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 4.4e-06, |
|
"loss": 0.118, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.600000000000001e-06, |
|
"loss": 0.1095, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 0.0964, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.7578125, |
|
"learning_rate": 5e-06, |
|
"loss": 0.0887, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.703125, |
|
"learning_rate": 5.2e-06, |
|
"loss": 0.077, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.625, |
|
"learning_rate": 5.400000000000001e-06, |
|
"loss": 0.074, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 0.0659, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 5.8e-06, |
|
"loss": 0.0586, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 6e-06, |
|
"loss": 0.0518, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.609375, |
|
"learning_rate": 6.200000000000001e-06, |
|
"loss": 0.0566, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 0.0475, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 6.600000000000001e-06, |
|
"loss": 0.0495, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 6.800000000000001e-06, |
|
"loss": 0.0493, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 7e-06, |
|
"loss": 0.055, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"eval_loss": 0.03942573070526123, |
|
"eval_runtime": 19.6691, |
|
"eval_samples_per_second": 199.755, |
|
"eval_steps_per_second": 199.755, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 7.2000000000000005e-06, |
|
"loss": 0.0457, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 7.4e-06, |
|
"loss": 0.0452, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.83203125, |
|
"learning_rate": 7.600000000000001e-06, |
|
"loss": 0.0373, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 7.800000000000002e-06, |
|
"loss": 0.0379, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.0403, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.91015625, |
|
"learning_rate": 8.2e-06, |
|
"loss": 0.0445, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 0.0377, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.73046875, |
|
"learning_rate": 8.6e-06, |
|
"loss": 0.0367, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.88671875, |
|
"learning_rate": 8.8e-06, |
|
"loss": 0.035, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 9e-06, |
|
"loss": 0.0417, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.81640625, |
|
"learning_rate": 9.200000000000002e-06, |
|
"loss": 0.0279, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.91796875, |
|
"learning_rate": 9.4e-06, |
|
"loss": 0.0292, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 0.0345, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.8515625, |
|
"learning_rate": 9.800000000000001e-06, |
|
"loss": 0.0348, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.98046875, |
|
"learning_rate": 1e-05, |
|
"loss": 0.0337, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.75, |
|
"learning_rate": 1.02e-05, |
|
"loss": 0.0361, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.71875, |
|
"learning_rate": 1.04e-05, |
|
"loss": 0.0363, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.7265625, |
|
"learning_rate": 1.0600000000000002e-05, |
|
"loss": 0.0329, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.68359375, |
|
"learning_rate": 1.0800000000000002e-05, |
|
"loss": 0.032, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 1.1000000000000001e-05, |
|
"loss": 0.0275, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.62890625, |
|
"learning_rate": 1.1200000000000001e-05, |
|
"loss": 0.0304, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.7421875, |
|
"learning_rate": 1.14e-05, |
|
"loss": 0.0325, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 1.16e-05, |
|
"loss": 0.027, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 1.18e-05, |
|
"loss": 0.0283, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.62890625, |
|
"learning_rate": 1.2e-05, |
|
"loss": 0.0301, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.734375, |
|
"learning_rate": 1.22e-05, |
|
"loss": 0.0334, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.6875, |
|
"learning_rate": 1.2400000000000002e-05, |
|
"loss": 0.0312, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.58203125, |
|
"learning_rate": 1.2600000000000001e-05, |
|
"loss": 0.0282, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.65625, |
|
"learning_rate": 1.2800000000000001e-05, |
|
"loss": 0.0315, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.66015625, |
|
"learning_rate": 1.3000000000000001e-05, |
|
"loss": 0.0316, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 1.3200000000000002e-05, |
|
"loss": 0.0292, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.84375, |
|
"learning_rate": 1.3400000000000002e-05, |
|
"loss": 0.0304, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 1.3600000000000002e-05, |
|
"loss": 0.0235, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.6875, |
|
"learning_rate": 1.38e-05, |
|
"loss": 0.0262, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.8046875, |
|
"learning_rate": 1.4e-05, |
|
"loss": 0.03, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"eval_loss": 0.023079946637153625, |
|
"eval_runtime": 19.5963, |
|
"eval_samples_per_second": 200.497, |
|
"eval_steps_per_second": 200.497, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.77734375, |
|
"learning_rate": 1.4200000000000001e-05, |
|
"loss": 0.0226, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.671875, |
|
"learning_rate": 1.4400000000000001e-05, |
|
"loss": 0.0274, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 1.46e-05, |
|
"loss": 0.0241, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.6953125, |
|
"learning_rate": 1.48e-05, |
|
"loss": 0.0239, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 0.0237, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 1.5200000000000002e-05, |
|
"loss": 0.0231, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 1.54e-05, |
|
"loss": 0.0234, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 1.5600000000000003e-05, |
|
"loss": 0.0209, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 0.67578125, |
|
"learning_rate": 1.58e-05, |
|
"loss": 0.0191, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 0.0232, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.640625, |
|
"learning_rate": 1.62e-05, |
|
"loss": 0.0263, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.68359375, |
|
"learning_rate": 1.64e-05, |
|
"loss": 0.0221, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.60546875, |
|
"learning_rate": 1.66e-05, |
|
"loss": 0.0207, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 1.6800000000000002e-05, |
|
"loss": 0.0224, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 0.6328125, |
|
"learning_rate": 1.7e-05, |
|
"loss": 0.0206, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 0.8046875, |
|
"learning_rate": 1.72e-05, |
|
"loss": 0.0244, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.7265625, |
|
"learning_rate": 1.7400000000000003e-05, |
|
"loss": 0.0223, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 1.76e-05, |
|
"loss": 0.02, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 0.67578125, |
|
"learning_rate": 1.7800000000000002e-05, |
|
"loss": 0.0237, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 0.69921875, |
|
"learning_rate": 1.8e-05, |
|
"loss": 0.0222, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 1.8200000000000002e-05, |
|
"loss": 0.0225, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 1.8400000000000003e-05, |
|
"loss": 0.0204, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 1.86e-05, |
|
"loss": 0.0199, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 1.88e-05, |
|
"loss": 0.0202, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 1.9e-05, |
|
"loss": 0.021, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 0.7421875, |
|
"learning_rate": 1.9200000000000003e-05, |
|
"loss": 0.0219, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 0.60546875, |
|
"learning_rate": 1.94e-05, |
|
"loss": 0.021, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 1.9600000000000002e-05, |
|
"loss": 0.0207, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 0.62890625, |
|
"learning_rate": 1.98e-05, |
|
"loss": 0.0181, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 2e-05, |
|
"loss": 0.0203, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 0.75390625, |
|
"learning_rate": 1.9995690062269985e-05, |
|
"loss": 0.0194, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 1.9982763964192586e-05, |
|
"loss": 0.0207, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 1.996123284790336e-05, |
|
"loss": 0.0204, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 0.70703125, |
|
"learning_rate": 1.9931115272956405e-05, |
|
"loss": 0.0225, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 1.989243720032624e-05, |
|
"loss": 0.0215, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"eval_loss": 0.020286040380597115, |
|
"eval_runtime": 19.6725, |
|
"eval_samples_per_second": 199.72, |
|
"eval_steps_per_second": 199.72, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.48828125, |
|
"learning_rate": 1.9845231970029774e-05, |
|
"loss": 0.0199, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 1.978954027238763e-05, |
|
"loss": 0.021, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 1.972541011294959e-05, |
|
"loss": 0.0196, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 1.9652896771114416e-05, |
|
"loss": 0.0193, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 1.9572062752479684e-05, |
|
"loss": 0.02, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 1.9482977734962753e-05, |
|
"loss": 0.0219, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 1.9385718508739263e-05, |
|
"loss": 0.0184, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 1.9280368910050943e-05, |
|
"loss": 0.019, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 0.470703125, |
|
"learning_rate": 1.9167019748939847e-05, |
|
"loss": 0.019, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 1.9045768730971198e-05, |
|
"loss": 0.0237, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 1.8916720373012425e-05, |
|
"loss": 0.0222, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 1.8779985913140927e-05, |
|
"loss": 0.0215, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 1.8635683214758213e-05, |
|
"loss": 0.0207, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 1.8483936664993152e-05, |
|
"loss": 0.0188, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 1.8324877067481782e-05, |
|
"loss": 0.0199, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 1.815864152961624e-05, |
|
"loss": 0.0208, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 1.798537334435986e-05, |
|
"loss": 0.0182, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 1.780522186673046e-05, |
|
"loss": 0.0199, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 1.7618342385058147e-05, |
|
"loss": 0.0158, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 1.7424895987128723e-05, |
|
"loss": 0.0206, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 1.7225049421328024e-05, |
|
"loss": 0.0192, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 1.7018974952906885e-05, |
|
"loss": 0.0164, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 1.680685021549063e-05, |
|
"loss": 0.0186, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 1.658885805796111e-05, |
|
"loss": 0.0178, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 1.636518638684325e-05, |
|
"loss": 0.0179, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 1.613602800433194e-05, |
|
"loss": 0.0191, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 1.590158044209897e-05, |
|
"loss": 0.0201, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 1.566204579102317e-05, |
|
"loss": 0.0196, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 1.5417630526990613e-05, |
|
"loss": 0.0197, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 1.5168545332914942e-05, |
|
"loss": 0.0185, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 1.4915004917131345e-05, |
|
"loss": 0.0162, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 1.4657227828320637e-05, |
|
"loss": 0.0208, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 1.4395436267123017e-05, |
|
"loss": 0.0184, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 1.4129855894603885e-05, |
|
"loss": 0.0181, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 1.3860715637736817e-05, |
|
"loss": 0.0185, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"eval_loss": 0.019323738291859627, |
|
"eval_runtime": 19.6403, |
|
"eval_samples_per_second": 200.047, |
|
"eval_steps_per_second": 200.047, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 1.358824749207136e-05, |
|
"loss": 0.0139, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 1.331268632175576e-05, |
|
"loss": 0.0129, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 0.26953125, |
|
"learning_rate": 1.3034269657086993e-05, |
|
"loss": 0.0128, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 1.27532374897626e-05, |
|
"loss": 0.0116, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 1.2469832066010843e-05, |
|
"loss": 0.011, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 1.2184297677777463e-05, |
|
"loss": 0.0113, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 1.1896880452149077e-05, |
|
"loss": 0.0113, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 1.1607828139194683e-05, |
|
"loss": 0.0135, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 0.298828125, |
|
"learning_rate": 1.1317389898408188e-05, |
|
"loss": 0.0116, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 0.27734375, |
|
"learning_rate": 1.1025816083936036e-05, |
|
"loss": 0.0119, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 1.073335802877504e-05, |
|
"loss": 0.0112, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 1.0440267828126478e-05, |
|
"loss": 0.0127, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 0.271484375, |
|
"learning_rate": 1.0146798122093167e-05, |
|
"loss": 0.011, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 9.853201877906836e-06, |
|
"loss": 0.0111, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 9.559732171873524e-06, |
|
"loss": 0.0111, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 9.266641971224963e-06, |
|
"loss": 0.0129, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 8.974183916063967e-06, |
|
"loss": 0.0114, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 8.682610101591813e-06, |
|
"loss": 0.0122, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 8.39217186080532e-06, |
|
"loss": 0.0108, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 0.2451171875, |
|
"learning_rate": 8.103119547850924e-06, |
|
"loss": 0.0099, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 0.265625, |
|
"learning_rate": 7.815702322222539e-06, |
|
"loss": 0.01, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 7.530167933989161e-06, |
|
"loss": 0.0122, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 7.246762510237404e-06, |
|
"loss": 0.0123, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 6.965730342913011e-06, |
|
"loss": 0.0126, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 6.687313678244243e-06, |
|
"loss": 0.0117, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 6.411752507928643e-06, |
|
"loss": 0.0106, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 6.139284362263185e-06, |
|
"loss": 0.0105, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 5.8701441053961185e-06, |
|
"loss": 0.01, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 0.314453125, |
|
"learning_rate": 5.604563732876989e-06, |
|
"loss": 0.0113, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 5.342772171679364e-06, |
|
"loss": 0.0111, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 0.271484375, |
|
"learning_rate": 5.084995082868658e-06, |
|
"loss": 0.0108, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 4.831454667085059e-06, |
|
"loss": 0.0114, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 4.58236947300939e-06, |
|
"loss": 0.012, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 4.33795420897683e-06, |
|
"loss": 0.0102, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 4.098419557901036e-06, |
|
"loss": 0.0106, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"eval_loss": 0.02010701596736908, |
|
"eval_runtime": 19.6652, |
|
"eval_samples_per_second": 199.795, |
|
"eval_steps_per_second": 199.795, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 3.8639719956680624e-06, |
|
"loss": 0.0119, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 3.6348136131567537e-06, |
|
"loss": 0.0112, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 3.4111419420388904e-06, |
|
"loss": 0.0114, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 3.1931497845093753e-06, |
|
"loss": 0.0109, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 0.291015625, |
|
"learning_rate": 2.981025047093118e-06, |
|
"loss": 0.0105, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 2.77495057867198e-06, |
|
"loss": 0.0109, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 2.57510401287128e-06, |
|
"loss": 0.0112, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 0.29296875, |
|
"learning_rate": 2.381657614941858e-06, |
|
"loss": 0.0101, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 2.1947781332695406e-06, |
|
"loss": 0.0109, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 2.0146266556401405e-06, |
|
"loss": 0.0103, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 0.27734375, |
|
"learning_rate": 1.8413584703837618e-06, |
|
"loss": 0.0102, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 1.6751229325182194e-06, |
|
"loss": 0.0101, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 1.516063335006851e-06, |
|
"loss": 0.0107, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 1.3643167852417894e-06, |
|
"loss": 0.0101, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"grad_norm": 0.3046875, |
|
"learning_rate": 1.2200140868590759e-06, |
|
"loss": 0.0111, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 0.2470703125, |
|
"learning_rate": 1.0832796269875757e-06, |
|
"loss": 0.0098, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 0.26953125, |
|
"learning_rate": 9.542312690288035e-07, |
|
"loss": 0.0113, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 8.329802510601559e-07, |
|
"loss": 0.0128, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 7.196310899490577e-07, |
|
"loss": 0.0114, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 6.142814912607409e-07, |
|
"loss": 0.0111, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 0.2490234375, |
|
"learning_rate": 5.17022265037247e-07, |
|
"loss": 0.0102, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 0.28125, |
|
"learning_rate": 4.2793724752031807e-07, |
|
"loss": 0.0089, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"grad_norm": 0.28515625, |
|
"learning_rate": 3.471032288855869e-07, |
|
"loss": 0.0108, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"grad_norm": 0.29296875, |
|
"learning_rate": 2.745898870504116e-07, |
|
"loss": 0.0092, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 2.104597276123721e-07, |
|
"loss": 0.0096, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 0.267578125, |
|
"learning_rate": 1.5476802997022812e-07, |
|
"loss": 0.0112, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 0.29296875, |
|
"learning_rate": 1.075627996737627e-07, |
|
"loss": 0.0107, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 6.888472704359661e-08, |
|
"loss": 0.011, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 0.283203125, |
|
"learning_rate": 3.8767152096641504e-08, |
|
"loss": 0.0099, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"grad_norm": 0.251953125, |
|
"learning_rate": 1.7236035807416397e-08, |
|
"loss": 0.0096, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 0.291015625, |
|
"learning_rate": 4.309937730015978e-09, |
|
"loss": 0.0106, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 0.259765625, |
|
"learning_rate": 0.0, |
|
"loss": 0.0082, |
|
"step": 207 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 207, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 69, |
|
"total_flos": 6.108686871838065e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|