|
{ |
|
"best_metric": 18.07297866495742, |
|
"best_model_checkpoint": "./checkpoint-3000", |
|
"epoch": 29.339853300733495, |
|
"eval_steps": 1000, |
|
"global_step": 3000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 39.86528778076172, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 2.0555, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": Infinity, |
|
"learning_rate": 9.800000000000001e-07, |
|
"loss": 1.5219, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 6.200109958648682, |
|
"learning_rate": 1.48e-06, |
|
"loss": 1.0167, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 5.486103057861328, |
|
"learning_rate": 1.98e-06, |
|
"loss": 0.7299, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 5.532894134521484, |
|
"learning_rate": 2.4800000000000004e-06, |
|
"loss": 0.6318, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 4.895535945892334, |
|
"learning_rate": 2.9800000000000003e-06, |
|
"loss": 0.5503, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 5.065937519073486, |
|
"learning_rate": 3.48e-06, |
|
"loss": 0.4999, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 4.807600498199463, |
|
"learning_rate": 3.980000000000001e-06, |
|
"loss": 0.4457, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 4.568106174468994, |
|
"learning_rate": 4.48e-06, |
|
"loss": 0.3779, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 4.705833911895752, |
|
"learning_rate": 4.980000000000001e-06, |
|
"loss": 0.3332, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"grad_norm": 4.315070629119873, |
|
"learning_rate": 5.480000000000001e-06, |
|
"loss": 0.2858, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"grad_norm": 2.660728693008423, |
|
"learning_rate": 5.98e-06, |
|
"loss": 0.2228, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"grad_norm": 2.5569632053375244, |
|
"learning_rate": 6.480000000000001e-06, |
|
"loss": 0.1766, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"grad_norm": 2.2663660049438477, |
|
"learning_rate": 6.98e-06, |
|
"loss": 0.1475, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"grad_norm": 2.5257091522216797, |
|
"learning_rate": 7.48e-06, |
|
"loss": 0.1492, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"grad_norm": 2.3954405784606934, |
|
"learning_rate": 7.980000000000002e-06, |
|
"loss": 0.142, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"grad_norm": 2.181328296661377, |
|
"learning_rate": 8.48e-06, |
|
"loss": 0.1125, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"grad_norm": 1.9877594709396362, |
|
"learning_rate": 8.98e-06, |
|
"loss": 0.0908, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"grad_norm": 1.8853321075439453, |
|
"learning_rate": 9.48e-06, |
|
"loss": 0.0892, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"grad_norm": 2.3802549839019775, |
|
"learning_rate": 9.980000000000001e-06, |
|
"loss": 0.091, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"grad_norm": 1.3576879501342773, |
|
"learning_rate": 9.946666666666667e-06, |
|
"loss": 0.0713, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"grad_norm": 2.4532103538513184, |
|
"learning_rate": 9.891111111111113e-06, |
|
"loss": 0.0534, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"grad_norm": 1.3736106157302856, |
|
"learning_rate": 9.835555555555556e-06, |
|
"loss": 0.0512, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"grad_norm": 2.0095458030700684, |
|
"learning_rate": 9.780000000000001e-06, |
|
"loss": 0.0571, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 6.11, |
|
"grad_norm": 1.2924531698226929, |
|
"learning_rate": 9.724444444444445e-06, |
|
"loss": 0.0453, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"grad_norm": 1.0012321472167969, |
|
"learning_rate": 9.66888888888889e-06, |
|
"loss": 0.0292, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"grad_norm": 1.4161434173583984, |
|
"learning_rate": 9.613333333333335e-06, |
|
"loss": 0.0325, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"grad_norm": 5.784367084503174, |
|
"learning_rate": 9.557777777777777e-06, |
|
"loss": 0.031, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"grad_norm": 0.8382102251052856, |
|
"learning_rate": 9.502222222222223e-06, |
|
"loss": 0.0247, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"grad_norm": 1.2963491678237915, |
|
"learning_rate": 9.446666666666667e-06, |
|
"loss": 0.0162, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 7.58, |
|
"grad_norm": 1.7834402322769165, |
|
"learning_rate": 9.391111111111111e-06, |
|
"loss": 0.0175, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 7.82, |
|
"grad_norm": 0.9083292484283447, |
|
"learning_rate": 9.335555555555557e-06, |
|
"loss": 0.0193, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 8.07, |
|
"grad_norm": 0.5552634596824646, |
|
"learning_rate": 9.280000000000001e-06, |
|
"loss": 0.0157, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 8.31, |
|
"grad_norm": 1.1231069564819336, |
|
"learning_rate": 9.224444444444445e-06, |
|
"loss": 0.0105, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 8.56, |
|
"grad_norm": 1.206103801727295, |
|
"learning_rate": 9.168888888888889e-06, |
|
"loss": 0.0109, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"grad_norm": 0.8872191309928894, |
|
"learning_rate": 9.113333333333335e-06, |
|
"loss": 0.0126, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 9.05, |
|
"grad_norm": 0.7421383261680603, |
|
"learning_rate": 9.057777777777779e-06, |
|
"loss": 0.0107, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 9.29, |
|
"grad_norm": 0.7581607103347778, |
|
"learning_rate": 9.002222222222223e-06, |
|
"loss": 0.006, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 9.54, |
|
"grad_norm": 0.6848894953727722, |
|
"learning_rate": 8.946666666666669e-06, |
|
"loss": 0.006, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 9.78, |
|
"grad_norm": 1.044122576713562, |
|
"learning_rate": 8.891111111111111e-06, |
|
"loss": 0.0067, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 9.78, |
|
"eval_loss": 0.41375118494033813, |
|
"eval_runtime": 1461.809, |
|
"eval_samples_per_second": 1.98, |
|
"eval_steps_per_second": 0.495, |
|
"eval_wer": 18.938284039923083, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 10.02, |
|
"grad_norm": 0.6757261753082275, |
|
"learning_rate": 8.835555555555557e-06, |
|
"loss": 0.0058, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 10.27, |
|
"grad_norm": 1.085519552230835, |
|
"learning_rate": 8.78e-06, |
|
"loss": 0.0037, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 10.51, |
|
"grad_norm": 0.8559943437576294, |
|
"learning_rate": 8.724444444444445e-06, |
|
"loss": 0.0044, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 10.76, |
|
"grad_norm": 1.7756787538528442, |
|
"learning_rate": 8.66888888888889e-06, |
|
"loss": 0.0056, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"grad_norm": 0.5664415955543518, |
|
"learning_rate": 8.613333333333333e-06, |
|
"loss": 0.0048, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 11.25, |
|
"grad_norm": 0.621498703956604, |
|
"learning_rate": 8.557777777777778e-06, |
|
"loss": 0.0038, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 11.49, |
|
"grad_norm": 0.9859088659286499, |
|
"learning_rate": 8.502222222222223e-06, |
|
"loss": 0.0035, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 11.74, |
|
"grad_norm": 1.2961162328720093, |
|
"learning_rate": 8.446666666666668e-06, |
|
"loss": 0.0041, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 11.98, |
|
"grad_norm": 0.5769420862197876, |
|
"learning_rate": 8.391111111111112e-06, |
|
"loss": 0.0035, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 12.22, |
|
"grad_norm": 0.5504060387611389, |
|
"learning_rate": 8.335555555555556e-06, |
|
"loss": 0.0022, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 12.47, |
|
"grad_norm": 0.7063620090484619, |
|
"learning_rate": 8.28e-06, |
|
"loss": 0.0027, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 12.71, |
|
"grad_norm": 0.6650658845901489, |
|
"learning_rate": 8.224444444444444e-06, |
|
"loss": 0.0029, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 12.96, |
|
"grad_norm": 0.6803381443023682, |
|
"learning_rate": 8.16888888888889e-06, |
|
"loss": 0.0023, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 13.2, |
|
"grad_norm": 0.19391104578971863, |
|
"learning_rate": 8.113333333333334e-06, |
|
"loss": 0.0013, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 13.45, |
|
"grad_norm": 0.43767812848091125, |
|
"learning_rate": 8.057777777777778e-06, |
|
"loss": 0.002, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 13.69, |
|
"grad_norm": 0.6082565188407898, |
|
"learning_rate": 8.002222222222222e-06, |
|
"loss": 0.0022, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 13.94, |
|
"grad_norm": 0.30705004930496216, |
|
"learning_rate": 7.946666666666666e-06, |
|
"loss": 0.002, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 14.18, |
|
"grad_norm": 0.18880507349967957, |
|
"learning_rate": 7.891111111111112e-06, |
|
"loss": 0.0015, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 14.43, |
|
"grad_norm": 0.32524725794792175, |
|
"learning_rate": 7.835555555555556e-06, |
|
"loss": 0.0015, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 14.67, |
|
"grad_norm": 2.48786997795105, |
|
"learning_rate": 7.78e-06, |
|
"loss": 0.0015, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 14.91, |
|
"grad_norm": 0.3373986482620239, |
|
"learning_rate": 7.724444444444446e-06, |
|
"loss": 0.0013, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 15.16, |
|
"grad_norm": 0.29098883271217346, |
|
"learning_rate": 7.66888888888889e-06, |
|
"loss": 0.0011, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 15.4, |
|
"grad_norm": 0.12477891892194748, |
|
"learning_rate": 7.613333333333334e-06, |
|
"loss": 0.0008, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 15.65, |
|
"grad_norm": 0.06489470601081848, |
|
"learning_rate": 7.557777777777779e-06, |
|
"loss": 0.0011, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 15.89, |
|
"grad_norm": 0.061178650707006454, |
|
"learning_rate": 7.502222222222223e-06, |
|
"loss": 0.001, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 16.14, |
|
"grad_norm": 0.038977060467004776, |
|
"learning_rate": 7.446666666666668e-06, |
|
"loss": 0.0007, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 16.38, |
|
"grad_norm": 0.22110821306705475, |
|
"learning_rate": 7.3911111111111125e-06, |
|
"loss": 0.0007, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 16.63, |
|
"grad_norm": 0.5320185422897339, |
|
"learning_rate": 7.335555555555556e-06, |
|
"loss": 0.0007, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 16.87, |
|
"grad_norm": 0.7823454737663269, |
|
"learning_rate": 7.280000000000001e-06, |
|
"loss": 0.0008, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 17.11, |
|
"grad_norm": 0.043301377445459366, |
|
"learning_rate": 7.224444444444445e-06, |
|
"loss": 0.001, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 17.36, |
|
"grad_norm": 0.06231601908802986, |
|
"learning_rate": 7.1688888888888895e-06, |
|
"loss": 0.0005, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 17.6, |
|
"grad_norm": 0.05838339775800705, |
|
"learning_rate": 7.113333333333334e-06, |
|
"loss": 0.0005, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 17.85, |
|
"grad_norm": 0.05545497685670853, |
|
"learning_rate": 7.057777777777778e-06, |
|
"loss": 0.0012, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 18.09, |
|
"grad_norm": 0.4030478894710541, |
|
"learning_rate": 7.0022222222222225e-06, |
|
"loss": 0.0008, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 18.34, |
|
"grad_norm": 0.27439093589782715, |
|
"learning_rate": 6.946666666666667e-06, |
|
"loss": 0.0007, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 18.58, |
|
"grad_norm": 0.25452977418899536, |
|
"learning_rate": 6.891111111111111e-06, |
|
"loss": 0.0007, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 18.83, |
|
"grad_norm": 0.06759922206401825, |
|
"learning_rate": 6.835555555555556e-06, |
|
"loss": 0.0007, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 19.07, |
|
"grad_norm": 0.25859466195106506, |
|
"learning_rate": 6.780000000000001e-06, |
|
"loss": 0.0006, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 19.32, |
|
"grad_norm": 0.7427995800971985, |
|
"learning_rate": 6.724444444444444e-06, |
|
"loss": 0.0005, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 19.56, |
|
"grad_norm": 0.0788324698805809, |
|
"learning_rate": 6.668888888888889e-06, |
|
"loss": 0.0008, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 19.56, |
|
"eval_loss": 0.49481505155563354, |
|
"eval_runtime": 1457.5675, |
|
"eval_samples_per_second": 1.985, |
|
"eval_steps_per_second": 0.497, |
|
"eval_wer": 18.4735830052193, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 19.8, |
|
"grad_norm": 0.04227956011891365, |
|
"learning_rate": 6.613333333333334e-06, |
|
"loss": 0.0008, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 20.05, |
|
"grad_norm": 0.5580443739891052, |
|
"learning_rate": 6.557777777777778e-06, |
|
"loss": 0.001, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 20.29, |
|
"grad_norm": 0.7394335865974426, |
|
"learning_rate": 6.502222222222223e-06, |
|
"loss": 0.0014, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 20.54, |
|
"grad_norm": 0.8055688142776489, |
|
"learning_rate": 6.446666666666668e-06, |
|
"loss": 0.0011, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 20.78, |
|
"grad_norm": 0.13119255006313324, |
|
"learning_rate": 6.391111111111111e-06, |
|
"loss": 0.0016, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 21.03, |
|
"grad_norm": 0.21813702583312988, |
|
"learning_rate": 6.335555555555556e-06, |
|
"loss": 0.0014, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 21.27, |
|
"grad_norm": 0.1066213995218277, |
|
"learning_rate": 6.280000000000001e-06, |
|
"loss": 0.0009, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 21.52, |
|
"grad_norm": 0.8583650588989258, |
|
"learning_rate": 6.224444444444445e-06, |
|
"loss": 0.0012, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 21.76, |
|
"grad_norm": 1.2513171434402466, |
|
"learning_rate": 6.16888888888889e-06, |
|
"loss": 0.0021, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 22.0, |
|
"grad_norm": 0.8390223979949951, |
|
"learning_rate": 6.113333333333333e-06, |
|
"loss": 0.0018, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 22.25, |
|
"grad_norm": 0.8746078610420227, |
|
"learning_rate": 6.057777777777778e-06, |
|
"loss": 0.0015, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 22.49, |
|
"grad_norm": 0.13358770310878754, |
|
"learning_rate": 6.002222222222223e-06, |
|
"loss": 0.0016, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 22.74, |
|
"grad_norm": 0.07681471109390259, |
|
"learning_rate": 5.946666666666668e-06, |
|
"loss": 0.0011, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 22.98, |
|
"grad_norm": 0.5511406660079956, |
|
"learning_rate": 5.891111111111112e-06, |
|
"loss": 0.0013, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 23.23, |
|
"grad_norm": 0.23318354785442352, |
|
"learning_rate": 5.8355555555555565e-06, |
|
"loss": 0.0011, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 23.47, |
|
"grad_norm": 0.12396834790706635, |
|
"learning_rate": 5.78e-06, |
|
"loss": 0.0009, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 23.72, |
|
"grad_norm": 0.0644838809967041, |
|
"learning_rate": 5.724444444444445e-06, |
|
"loss": 0.0006, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 23.96, |
|
"grad_norm": 0.47172439098358154, |
|
"learning_rate": 5.6688888888888895e-06, |
|
"loss": 0.0007, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 24.21, |
|
"grad_norm": 0.030231019482016563, |
|
"learning_rate": 5.613333333333334e-06, |
|
"loss": 0.0005, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 24.45, |
|
"grad_norm": 0.01894545555114746, |
|
"learning_rate": 5.557777777777778e-06, |
|
"loss": 0.0003, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 24.69, |
|
"grad_norm": 0.18070322275161743, |
|
"learning_rate": 5.5022222222222224e-06, |
|
"loss": 0.0009, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 24.94, |
|
"grad_norm": 0.0435708686709404, |
|
"learning_rate": 5.4466666666666665e-06, |
|
"loss": 0.0009, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 25.18, |
|
"grad_norm": 0.07220447063446045, |
|
"learning_rate": 5.391111111111111e-06, |
|
"loss": 0.0005, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 25.43, |
|
"grad_norm": 0.01733986660838127, |
|
"learning_rate": 5.335555555555556e-06, |
|
"loss": 0.0004, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 25.67, |
|
"grad_norm": 0.03520004078745842, |
|
"learning_rate": 5.28e-06, |
|
"loss": 0.0007, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 25.92, |
|
"grad_norm": 0.03853292763233185, |
|
"learning_rate": 5.224444444444445e-06, |
|
"loss": 0.0005, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 26.16, |
|
"grad_norm": 0.13450591266155243, |
|
"learning_rate": 5.168888888888889e-06, |
|
"loss": 0.0005, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 26.41, |
|
"grad_norm": 0.029255390167236328, |
|
"learning_rate": 5.113333333333333e-06, |
|
"loss": 0.0004, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 26.65, |
|
"grad_norm": 0.025706447660923004, |
|
"learning_rate": 5.057777777777778e-06, |
|
"loss": 0.0003, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 26.89, |
|
"grad_norm": 0.902415931224823, |
|
"learning_rate": 5.002222222222223e-06, |
|
"loss": 0.0002, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 27.14, |
|
"grad_norm": 0.013656423427164555, |
|
"learning_rate": 4.946666666666667e-06, |
|
"loss": 0.0002, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 27.38, |
|
"grad_norm": 0.018052740022540092, |
|
"learning_rate": 4.891111111111111e-06, |
|
"loss": 0.0003, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 27.63, |
|
"grad_norm": 0.07001502811908722, |
|
"learning_rate": 4.835555555555556e-06, |
|
"loss": 0.0002, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 27.87, |
|
"grad_norm": 0.01241993810981512, |
|
"learning_rate": 4.78e-06, |
|
"loss": 0.0003, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 28.12, |
|
"grad_norm": 0.013495378196239471, |
|
"learning_rate": 4.724444444444445e-06, |
|
"loss": 0.0001, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 28.36, |
|
"grad_norm": 0.011048069223761559, |
|
"learning_rate": 4.66888888888889e-06, |
|
"loss": 0.0001, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 28.61, |
|
"grad_norm": 0.015970442444086075, |
|
"learning_rate": 4.613333333333334e-06, |
|
"loss": 0.0002, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 28.85, |
|
"grad_norm": 0.009559527039527893, |
|
"learning_rate": 4.557777777777778e-06, |
|
"loss": 0.0002, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 29.1, |
|
"grad_norm": 0.010293275117874146, |
|
"learning_rate": 4.502222222222223e-06, |
|
"loss": 0.0001, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 29.34, |
|
"grad_norm": 0.01116804126650095, |
|
"learning_rate": 4.446666666666667e-06, |
|
"loss": 0.0001, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 29.34, |
|
"eval_loss": 0.5352661609649658, |
|
"eval_runtime": 1458.2257, |
|
"eval_samples_per_second": 1.985, |
|
"eval_steps_per_second": 0.496, |
|
"eval_wer": 18.07297866495742, |
|
"step": 3000 |
|
} |
|
], |
|
"logging_steps": 25, |
|
"max_steps": 5000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 50, |
|
"save_steps": 1000, |
|
"total_flos": 5.537492095500288e+19, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|