|
{ |
|
"best_metric": 0.955227792263031, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-150", |
|
"epoch": 0.9615384615384616, |
|
"eval_steps": 50, |
|
"global_step": 150, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00641025641025641, |
|
"grad_norm": 9.957436561584473, |
|
"learning_rate": 1e-05, |
|
"loss": 7.4946, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00641025641025641, |
|
"eval_loss": 2.116502523422241, |
|
"eval_runtime": 27.7986, |
|
"eval_samples_per_second": 9.461, |
|
"eval_steps_per_second": 2.374, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01282051282051282, |
|
"grad_norm": 11.247956275939941, |
|
"learning_rate": 2e-05, |
|
"loss": 8.1836, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.019230769230769232, |
|
"grad_norm": 11.235127449035645, |
|
"learning_rate": 3e-05, |
|
"loss": 8.2321, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.02564102564102564, |
|
"grad_norm": 9.880584716796875, |
|
"learning_rate": 4e-05, |
|
"loss": 7.7768, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.03205128205128205, |
|
"grad_norm": 6.761786460876465, |
|
"learning_rate": 5e-05, |
|
"loss": 7.4105, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.038461538461538464, |
|
"grad_norm": 5.910149574279785, |
|
"learning_rate": 6e-05, |
|
"loss": 6.8102, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.04487179487179487, |
|
"grad_norm": 7.675437927246094, |
|
"learning_rate": 7e-05, |
|
"loss": 6.6599, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.05128205128205128, |
|
"grad_norm": 6.464653491973877, |
|
"learning_rate": 8e-05, |
|
"loss": 6.0097, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.057692307692307696, |
|
"grad_norm": 6.064931392669678, |
|
"learning_rate": 9e-05, |
|
"loss": 5.4483, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0641025641025641, |
|
"grad_norm": 4.900819301605225, |
|
"learning_rate": 0.0001, |
|
"loss": 4.9754, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.07051282051282051, |
|
"grad_norm": 4.617133140563965, |
|
"learning_rate": 9.999316524962345e-05, |
|
"loss": 4.7351, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.07692307692307693, |
|
"grad_norm": 4.90165901184082, |
|
"learning_rate": 9.997266286704631e-05, |
|
"loss": 4.7322, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.08333333333333333, |
|
"grad_norm": 4.873775482177734, |
|
"learning_rate": 9.993849845741524e-05, |
|
"loss": 4.3286, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.08974358974358974, |
|
"grad_norm": 4.3032026290893555, |
|
"learning_rate": 9.989068136093873e-05, |
|
"loss": 4.4172, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.09615384615384616, |
|
"grad_norm": 3.951387643814087, |
|
"learning_rate": 9.98292246503335e-05, |
|
"loss": 4.3254, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.10256410256410256, |
|
"grad_norm": 3.966798782348633, |
|
"learning_rate": 9.975414512725057e-05, |
|
"loss": 4.4062, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.10897435897435898, |
|
"grad_norm": 3.9771132469177246, |
|
"learning_rate": 9.966546331768191e-05, |
|
"loss": 4.2944, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.11538461538461539, |
|
"grad_norm": 4.00201416015625, |
|
"learning_rate": 9.956320346634876e-05, |
|
"loss": 4.1275, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.12179487179487179, |
|
"grad_norm": 4.415872573852539, |
|
"learning_rate": 9.944739353007344e-05, |
|
"loss": 4.3262, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.1282051282051282, |
|
"grad_norm": 4.9452080726623535, |
|
"learning_rate": 9.931806517013612e-05, |
|
"loss": 4.0444, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.1346153846153846, |
|
"grad_norm": 5.381906032562256, |
|
"learning_rate": 9.917525374361912e-05, |
|
"loss": 4.0786, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.14102564102564102, |
|
"grad_norm": 5.061701774597168, |
|
"learning_rate": 9.901899829374047e-05, |
|
"loss": 3.9276, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.14743589743589744, |
|
"grad_norm": 5.450504302978516, |
|
"learning_rate": 9.884934153917997e-05, |
|
"loss": 4.2544, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.15384615384615385, |
|
"grad_norm": 4.766714096069336, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 4.3889, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.16025641025641027, |
|
"grad_norm": 8.331812858581543, |
|
"learning_rate": 9.847001329696653e-05, |
|
"loss": 4.6597, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.16666666666666666, |
|
"grad_norm": 4.654125690460205, |
|
"learning_rate": 9.826044551386744e-05, |
|
"loss": 4.2013, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.17307692307692307, |
|
"grad_norm": 4.443905830383301, |
|
"learning_rate": 9.803768380684242e-05, |
|
"loss": 4.014, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.1794871794871795, |
|
"grad_norm": 4.30183744430542, |
|
"learning_rate": 9.780178907671789e-05, |
|
"loss": 3.9669, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.1858974358974359, |
|
"grad_norm": 4.961958408355713, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 3.9399, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.19230769230769232, |
|
"grad_norm": 5.458182334899902, |
|
"learning_rate": 9.729086208503174e-05, |
|
"loss": 4.163, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.1987179487179487, |
|
"grad_norm": 5.726374626159668, |
|
"learning_rate": 9.701596950580806e-05, |
|
"loss": 3.965, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.20512820512820512, |
|
"grad_norm": 4.620736122131348, |
|
"learning_rate": 9.672822322997305e-05, |
|
"loss": 3.9121, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.21153846153846154, |
|
"grad_norm": 5.1553215980529785, |
|
"learning_rate": 9.642770192448536e-05, |
|
"loss": 3.8999, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.21794871794871795, |
|
"grad_norm": 5.532503128051758, |
|
"learning_rate": 9.611448774886924e-05, |
|
"loss": 4.5878, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.22435897435897437, |
|
"grad_norm": 4.936411380767822, |
|
"learning_rate": 9.578866633275288e-05, |
|
"loss": 3.991, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.23076923076923078, |
|
"grad_norm": 5.254283428192139, |
|
"learning_rate": 9.545032675245813e-05, |
|
"loss": 3.7305, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.23717948717948717, |
|
"grad_norm": 5.494096279144287, |
|
"learning_rate": 9.509956150664796e-05, |
|
"loss": 3.866, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.24358974358974358, |
|
"grad_norm": 8.530524253845215, |
|
"learning_rate": 9.473646649103818e-05, |
|
"loss": 4.9389, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 8.089641571044922, |
|
"learning_rate": 9.43611409721806e-05, |
|
"loss": 6.0769, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.2564102564102564, |
|
"grad_norm": 7.604012489318848, |
|
"learning_rate": 9.397368756032445e-05, |
|
"loss": 6.0996, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.26282051282051283, |
|
"grad_norm": 5.347238540649414, |
|
"learning_rate": 9.357421218136386e-05, |
|
"loss": 5.706, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.2692307692307692, |
|
"grad_norm": 3.8665168285369873, |
|
"learning_rate": 9.316282404787871e-05, |
|
"loss": 5.249, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.27564102564102566, |
|
"grad_norm": 3.3026998043060303, |
|
"learning_rate": 9.273963562927695e-05, |
|
"loss": 5.2123, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.28205128205128205, |
|
"grad_norm": 3.3289437294006348, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 5.036, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.28846153846153844, |
|
"grad_norm": 3.3549132347106934, |
|
"learning_rate": 9.185832391312644e-05, |
|
"loss": 5.0075, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.2948717948717949, |
|
"grad_norm": 3.145491600036621, |
|
"learning_rate": 9.140044155740101e-05, |
|
"loss": 4.5827, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.30128205128205127, |
|
"grad_norm": 3.2935290336608887, |
|
"learning_rate": 9.093124073433463e-05, |
|
"loss": 4.7193, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.3076923076923077, |
|
"grad_norm": 3.1303982734680176, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 4.3667, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.3141025641025641, |
|
"grad_norm": 2.9355759620666504, |
|
"learning_rate": 8.995939984474624e-05, |
|
"loss": 4.0385, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.32051282051282054, |
|
"grad_norm": 3.22135591506958, |
|
"learning_rate": 8.945702546981969e-05, |
|
"loss": 4.1746, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.32051282051282054, |
|
"eval_loss": 1.0742619037628174, |
|
"eval_runtime": 28.2421, |
|
"eval_samples_per_second": 9.312, |
|
"eval_steps_per_second": 2.337, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.3269230769230769, |
|
"grad_norm": 3.3093056678771973, |
|
"learning_rate": 8.894386393810563e-05, |
|
"loss": 3.9733, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 3.6032257080078125, |
|
"learning_rate": 8.842005554284296e-05, |
|
"loss": 4.1736, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.33974358974358976, |
|
"grad_norm": 3.5169215202331543, |
|
"learning_rate": 8.788574348801675e-05, |
|
"loss": 4.7065, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.34615384615384615, |
|
"grad_norm": 3.892195463180542, |
|
"learning_rate": 8.73410738492077e-05, |
|
"loss": 3.6555, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.3525641025641026, |
|
"grad_norm": 3.566694736480713, |
|
"learning_rate": 8.678619553365659e-05, |
|
"loss": 3.4694, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.358974358974359, |
|
"grad_norm": 3.5258185863494873, |
|
"learning_rate": 8.622126023955446e-05, |
|
"loss": 4.0258, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.36538461538461536, |
|
"grad_norm": 3.711472988128662, |
|
"learning_rate": 8.564642241456986e-05, |
|
"loss": 3.9599, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.3717948717948718, |
|
"grad_norm": 3.8064444065093994, |
|
"learning_rate": 8.506183921362443e-05, |
|
"loss": 4.0288, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.3782051282051282, |
|
"grad_norm": 4.256228446960449, |
|
"learning_rate": 8.44676704559283e-05, |
|
"loss": 4.2489, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.38461538461538464, |
|
"grad_norm": 3.41007924079895, |
|
"learning_rate": 8.386407858128706e-05, |
|
"loss": 3.6778, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.391025641025641, |
|
"grad_norm": 4.161782264709473, |
|
"learning_rate": 8.32512286056924e-05, |
|
"loss": 3.8408, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.3974358974358974, |
|
"grad_norm": 4.120133876800537, |
|
"learning_rate": 8.262928807620843e-05, |
|
"loss": 3.8373, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.40384615384615385, |
|
"grad_norm": 3.7649734020233154, |
|
"learning_rate": 8.199842702516583e-05, |
|
"loss": 3.8083, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.41025641025641024, |
|
"grad_norm": 3.989408016204834, |
|
"learning_rate": 8.135881792367686e-05, |
|
"loss": 3.7964, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.4166666666666667, |
|
"grad_norm": 4.099987030029297, |
|
"learning_rate": 8.07106356344834e-05, |
|
"loss": 3.8704, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.4230769230769231, |
|
"grad_norm": 4.085524082183838, |
|
"learning_rate": 8.005405736415126e-05, |
|
"loss": 3.7143, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.42948717948717946, |
|
"grad_norm": 5.65880823135376, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 3.625, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.4358974358974359, |
|
"grad_norm": 4.0476393699646, |
|
"learning_rate": 7.871643313414718e-05, |
|
"loss": 3.6684, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.4423076923076923, |
|
"grad_norm": 4.744653224945068, |
|
"learning_rate": 7.803575286758364e-05, |
|
"loss": 3.4566, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.44871794871794873, |
|
"grad_norm": 4.958125114440918, |
|
"learning_rate": 7.734740790612136e-05, |
|
"loss": 4.0852, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.4551282051282051, |
|
"grad_norm": 6.67598819732666, |
|
"learning_rate": 7.66515864363997e-05, |
|
"loss": 3.5015, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.46153846153846156, |
|
"grad_norm": 5.2367706298828125, |
|
"learning_rate": 7.594847868906076e-05, |
|
"loss": 3.952, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.46794871794871795, |
|
"grad_norm": 4.691414833068848, |
|
"learning_rate": 7.52382768867422e-05, |
|
"loss": 3.2776, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.47435897435897434, |
|
"grad_norm": 5.058370113372803, |
|
"learning_rate": 7.452117519152542e-05, |
|
"loss": 3.6019, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.4807692307692308, |
|
"grad_norm": 5.959715843200684, |
|
"learning_rate": 7.379736965185368e-05, |
|
"loss": 3.5346, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.48717948717948717, |
|
"grad_norm": 10.346742630004883, |
|
"learning_rate": 7.30670581489344e-05, |
|
"loss": 4.6809, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.4935897435897436, |
|
"grad_norm": 11.073515892028809, |
|
"learning_rate": 7.233044034264034e-05, |
|
"loss": 6.4677, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 9.660663604736328, |
|
"learning_rate": 7.158771761692464e-05, |
|
"loss": 5.9138, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.5064102564102564, |
|
"grad_norm": 7.799278259277344, |
|
"learning_rate": 7.083909302476453e-05, |
|
"loss": 5.3653, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.5128205128205128, |
|
"grad_norm": 7.223135471343994, |
|
"learning_rate": 7.008477123264848e-05, |
|
"loss": 5.1873, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.5192307692307693, |
|
"grad_norm": 5.180141925811768, |
|
"learning_rate": 6.932495846462261e-05, |
|
"loss": 4.6571, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.5256410256410257, |
|
"grad_norm": 3.5540072917938232, |
|
"learning_rate": 6.855986244591104e-05, |
|
"loss": 4.1324, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.532051282051282, |
|
"grad_norm": 2.854039430618286, |
|
"learning_rate": 6.778969234612584e-05, |
|
"loss": 4.1329, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.5384615384615384, |
|
"grad_norm": 3.3957605361938477, |
|
"learning_rate": 6.701465872208216e-05, |
|
"loss": 4.7686, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.5448717948717948, |
|
"grad_norm": 3.0800118446350098, |
|
"learning_rate": 6.623497346023418e-05, |
|
"loss": 4.3467, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.5512820512820513, |
|
"grad_norm": 3.383251428604126, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 4.4548, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.5576923076923077, |
|
"grad_norm": 3.3952181339263916, |
|
"learning_rate": 6.466250186922325e-05, |
|
"loss": 4.0999, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.5641025641025641, |
|
"grad_norm": 3.5181736946105957, |
|
"learning_rate": 6.387014543809223e-05, |
|
"loss": 4.7229, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.5705128205128205, |
|
"grad_norm": 3.4152626991271973, |
|
"learning_rate": 6.307399704769099e-05, |
|
"loss": 3.8945, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.5769230769230769, |
|
"grad_norm": 3.2614777088165283, |
|
"learning_rate": 6.227427435703997e-05, |
|
"loss": 3.7917, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.5833333333333334, |
|
"grad_norm": 3.2620959281921387, |
|
"learning_rate": 6.147119600233758e-05, |
|
"loss": 3.7493, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.5897435897435898, |
|
"grad_norm": 3.280299186706543, |
|
"learning_rate": 6.066498153718735e-05, |
|
"loss": 3.8057, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.5961538461538461, |
|
"grad_norm": 3.3490095138549805, |
|
"learning_rate": 5.985585137257401e-05, |
|
"loss": 4.0438, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.6025641025641025, |
|
"grad_norm": 3.555368423461914, |
|
"learning_rate": 5.90440267166055e-05, |
|
"loss": 3.5393, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.6089743589743589, |
|
"grad_norm": 3.605402946472168, |
|
"learning_rate": 5.8229729514036705e-05, |
|
"loss": 3.8683, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.6153846153846154, |
|
"grad_norm": 3.5116748809814453, |
|
"learning_rate": 5.74131823855921e-05, |
|
"loss": 3.9831, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.6217948717948718, |
|
"grad_norm": 3.322603702545166, |
|
"learning_rate": 5.6594608567103456e-05, |
|
"loss": 3.9772, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.6282051282051282, |
|
"grad_norm": 3.720085620880127, |
|
"learning_rate": 5.577423184847932e-05, |
|
"loss": 3.6853, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.6346153846153846, |
|
"grad_norm": 3.5957906246185303, |
|
"learning_rate": 5.495227651252315e-05, |
|
"loss": 3.8104, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.6410256410256411, |
|
"grad_norm": 3.5254197120666504, |
|
"learning_rate": 5.4128967273616625e-05, |
|
"loss": 3.6103, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.6410256410256411, |
|
"eval_loss": 0.9980463981628418, |
|
"eval_runtime": 28.245, |
|
"eval_samples_per_second": 9.311, |
|
"eval_steps_per_second": 2.337, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.6474358974358975, |
|
"grad_norm": 3.6587297916412354, |
|
"learning_rate": 5.330452921628497e-05, |
|
"loss": 3.918, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.6538461538461539, |
|
"grad_norm": 3.831242084503174, |
|
"learning_rate": 5.247918773366112e-05, |
|
"loss": 3.7586, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.6602564102564102, |
|
"grad_norm": 3.78513503074646, |
|
"learning_rate": 5.165316846586541e-05, |
|
"loss": 3.5943, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 3.6787242889404297, |
|
"learning_rate": 5.0826697238317935e-05, |
|
"loss": 3.4123, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.6730769230769231, |
|
"grad_norm": 3.7676947116851807, |
|
"learning_rate": 5e-05, |
|
"loss": 3.1584, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.6794871794871795, |
|
"grad_norm": 4.073101043701172, |
|
"learning_rate": 4.917330276168208e-05, |
|
"loss": 3.45, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.6858974358974359, |
|
"grad_norm": 4.140744209289551, |
|
"learning_rate": 4.834683153413459e-05, |
|
"loss": 3.5452, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.6923076923076923, |
|
"grad_norm": 4.776129722595215, |
|
"learning_rate": 4.7520812266338885e-05, |
|
"loss": 3.7265, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.6987179487179487, |
|
"grad_norm": 4.0117506980896, |
|
"learning_rate": 4.669547078371504e-05, |
|
"loss": 3.2593, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.7051282051282052, |
|
"grad_norm": 4.677291393280029, |
|
"learning_rate": 4.5871032726383386e-05, |
|
"loss": 3.1475, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.7115384615384616, |
|
"grad_norm": 5.10119104385376, |
|
"learning_rate": 4.504772348747687e-05, |
|
"loss": 3.3635, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.717948717948718, |
|
"grad_norm": 5.42010498046875, |
|
"learning_rate": 4.4225768151520694e-05, |
|
"loss": 4.0145, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.7243589743589743, |
|
"grad_norm": 5.240736484527588, |
|
"learning_rate": 4.3405391432896555e-05, |
|
"loss": 4.2514, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.7307692307692307, |
|
"grad_norm": 7.772806167602539, |
|
"learning_rate": 4.2586817614407895e-05, |
|
"loss": 4.5696, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.7371794871794872, |
|
"grad_norm": 8.802128791809082, |
|
"learning_rate": 4.17702704859633e-05, |
|
"loss": 5.8478, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.7435897435897436, |
|
"grad_norm": 7.847908020019531, |
|
"learning_rate": 4.095597328339452e-05, |
|
"loss": 5.0477, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 7.966437816619873, |
|
"learning_rate": 4.0144148627425993e-05, |
|
"loss": 5.0813, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.7564102564102564, |
|
"grad_norm": 8.309389114379883, |
|
"learning_rate": 3.933501846281267e-05, |
|
"loss": 5.3415, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.7628205128205128, |
|
"grad_norm": 6.793262958526611, |
|
"learning_rate": 3.852880399766243e-05, |
|
"loss": 4.9559, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.7692307692307693, |
|
"grad_norm": 6.368189334869385, |
|
"learning_rate": 3.772572564296005e-05, |
|
"loss": 4.708, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.7756410256410257, |
|
"grad_norm": 4.444794654846191, |
|
"learning_rate": 3.6926002952309016e-05, |
|
"loss": 4.6569, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.782051282051282, |
|
"grad_norm": 5.020304203033447, |
|
"learning_rate": 3.612985456190778e-05, |
|
"loss": 4.4671, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.7884615384615384, |
|
"grad_norm": 4.116661071777344, |
|
"learning_rate": 3.533749813077677e-05, |
|
"loss": 4.1168, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.7948717948717948, |
|
"grad_norm": 3.835765838623047, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 4.2167, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.8012820512820513, |
|
"grad_norm": 3.623770236968994, |
|
"learning_rate": 3.3765026539765834e-05, |
|
"loss": 4.1236, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.8076923076923077, |
|
"grad_norm": 3.4973554611206055, |
|
"learning_rate": 3.298534127791785e-05, |
|
"loss": 4.2815, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.8141025641025641, |
|
"grad_norm": 3.046509027481079, |
|
"learning_rate": 3.221030765387417e-05, |
|
"loss": 4.2196, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.8205128205128205, |
|
"grad_norm": 2.9827661514282227, |
|
"learning_rate": 3.144013755408895e-05, |
|
"loss": 4.0768, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.8269230769230769, |
|
"grad_norm": 2.907933235168457, |
|
"learning_rate": 3.0675041535377405e-05, |
|
"loss": 3.9771, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.8333333333333334, |
|
"grad_norm": 3.1948816776275635, |
|
"learning_rate": 2.991522876735154e-05, |
|
"loss": 4.0728, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.8397435897435898, |
|
"grad_norm": 3.368992805480957, |
|
"learning_rate": 2.916090697523549e-05, |
|
"loss": 4.0119, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.8461538461538461, |
|
"grad_norm": 3.3085951805114746, |
|
"learning_rate": 2.8412282383075363e-05, |
|
"loss": 3.6765, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.8525641025641025, |
|
"grad_norm": 3.207343816757202, |
|
"learning_rate": 2.766955965735968e-05, |
|
"loss": 3.6675, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.8589743589743589, |
|
"grad_norm": 3.4521420001983643, |
|
"learning_rate": 2.693294185106562e-05, |
|
"loss": 3.7139, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.8653846153846154, |
|
"grad_norm": 3.430363655090332, |
|
"learning_rate": 2.6202630348146324e-05, |
|
"loss": 3.9139, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.8717948717948718, |
|
"grad_norm": 3.638237237930298, |
|
"learning_rate": 2.547882480847461e-05, |
|
"loss": 3.9763, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.8782051282051282, |
|
"grad_norm": 3.6488420963287354, |
|
"learning_rate": 2.476172311325783e-05, |
|
"loss": 3.6282, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.8846153846153846, |
|
"grad_norm": 3.8091797828674316, |
|
"learning_rate": 2.405152131093926e-05, |
|
"loss": 3.75, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.8910256410256411, |
|
"grad_norm": 4.3403706550598145, |
|
"learning_rate": 2.3348413563600325e-05, |
|
"loss": 3.7878, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.8974358974358975, |
|
"grad_norm": 3.413701295852661, |
|
"learning_rate": 2.2652592093878666e-05, |
|
"loss": 3.7514, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.9038461538461539, |
|
"grad_norm": 3.869084596633911, |
|
"learning_rate": 2.196424713241637e-05, |
|
"loss": 3.8538, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.9102564102564102, |
|
"grad_norm": 3.5061874389648438, |
|
"learning_rate": 2.128356686585282e-05, |
|
"loss": 3.4699, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.9166666666666666, |
|
"grad_norm": 3.533756971359253, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 3.6468, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.9230769230769231, |
|
"grad_norm": 3.7761576175689697, |
|
"learning_rate": 1.9945942635848748e-05, |
|
"loss": 3.6921, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.9294871794871795, |
|
"grad_norm": 3.948042154312134, |
|
"learning_rate": 1.928936436551661e-05, |
|
"loss": 3.6487, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.9358974358974359, |
|
"grad_norm": 3.8867499828338623, |
|
"learning_rate": 1.8641182076323148e-05, |
|
"loss": 3.178, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.9423076923076923, |
|
"grad_norm": 4.0144782066345215, |
|
"learning_rate": 1.800157297483417e-05, |
|
"loss": 3.2615, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.9487179487179487, |
|
"grad_norm": 3.983966112136841, |
|
"learning_rate": 1.7370711923791567e-05, |
|
"loss": 3.024, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.9551282051282052, |
|
"grad_norm": 4.11751651763916, |
|
"learning_rate": 1.6748771394307585e-05, |
|
"loss": 3.3496, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.9615384615384616, |
|
"grad_norm": 4.289830207824707, |
|
"learning_rate": 1.6135921418712956e-05, |
|
"loss": 3.9751, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.9615384615384616, |
|
"eval_loss": 0.955227792263031, |
|
"eval_runtime": 28.2085, |
|
"eval_samples_per_second": 9.323, |
|
"eval_steps_per_second": 2.34, |
|
"step": 150 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.48136509800448e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|