|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 24.12123763418228, |
|
"eval_steps": 100, |
|
"global_step": 57300, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.6430506706237793, |
|
"learning_rate": 9.5e-06, |
|
"loss": 5.6141, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"eval_cer": 0.9770320871299519, |
|
"eval_loss": 2.355087995529175, |
|
"eval_runtime": 329.8418, |
|
"eval_samples_per_second": 28.735, |
|
"eval_steps_per_second": 3.593, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.838773012161255, |
|
"learning_rate": 1.9500000000000003e-05, |
|
"loss": 2.1498, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"eval_cer": 0.6385162879824802, |
|
"eval_loss": 1.961960792541504, |
|
"eval_runtime": 334.0577, |
|
"eval_samples_per_second": 28.372, |
|
"eval_steps_per_second": 3.547, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 3.848980665206909, |
|
"learning_rate": 2.95e-05, |
|
"loss": 2.6461, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"eval_cer": 0.5852479371162645, |
|
"eval_loss": 2.6564223766326904, |
|
"eval_runtime": 350.7705, |
|
"eval_samples_per_second": 27.021, |
|
"eval_steps_per_second": 3.378, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 3.9299519062042236, |
|
"learning_rate": 3.9500000000000005e-05, |
|
"loss": 2.359, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"eval_cer": 0.5907913261116108, |
|
"eval_loss": 2.6195201873779297, |
|
"eval_runtime": 341.3159, |
|
"eval_samples_per_second": 27.769, |
|
"eval_steps_per_second": 3.472, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 4.236645698547363, |
|
"learning_rate": 4.9500000000000004e-05, |
|
"loss": 4.2363, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"eval_cer": 0.624139650385202, |
|
"eval_loss": 2.457566261291504, |
|
"eval_runtime": 356.3471, |
|
"eval_samples_per_second": 26.598, |
|
"eval_steps_per_second": 3.325, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 13.769584655761719, |
|
"learning_rate": 4.9932862190812725e-05, |
|
"loss": 2.6933, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"eval_cer": 0.6483369833013961, |
|
"eval_loss": 3.4720070362091064, |
|
"eval_runtime": 345.5879, |
|
"eval_samples_per_second": 27.426, |
|
"eval_steps_per_second": 3.429, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 4.292990207672119, |
|
"learning_rate": 4.986289752650177e-05, |
|
"loss": 4.1075, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"eval_cer": 0.6181098705564898, |
|
"eval_loss": 3.3849761486053467, |
|
"eval_runtime": 353.3198, |
|
"eval_samples_per_second": 26.826, |
|
"eval_steps_per_second": 3.354, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 4.32611083984375, |
|
"learning_rate": 4.97922261484099e-05, |
|
"loss": 2.6907, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"eval_cer": 0.6191510891243988, |
|
"eval_loss": 2.5412750244140625, |
|
"eval_runtime": 343.4967, |
|
"eval_samples_per_second": 27.593, |
|
"eval_steps_per_second": 3.45, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 3.173403263092041, |
|
"learning_rate": 4.972155477031802e-05, |
|
"loss": 4.2294, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"eval_cer": 0.5840918423213797, |
|
"eval_loss": 2.6817102432250977, |
|
"eval_runtime": 354.5341, |
|
"eval_samples_per_second": 26.734, |
|
"eval_steps_per_second": 3.342, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 4.8158440589904785, |
|
"learning_rate": 4.965088339222615e-05, |
|
"loss": 2.2949, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"eval_cer": 0.5736649915920379, |
|
"eval_loss": 1.9513429403305054, |
|
"eval_runtime": 345.1224, |
|
"eval_samples_per_second": 27.463, |
|
"eval_steps_per_second": 3.434, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 3.0650887489318848, |
|
"learning_rate": 4.958021201413428e-05, |
|
"loss": 2.6115, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"eval_cer": 0.5645799929607759, |
|
"eval_loss": 2.8409903049468994, |
|
"eval_runtime": 352.0824, |
|
"eval_samples_per_second": 26.92, |
|
"eval_steps_per_second": 3.366, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 3.0540664196014404, |
|
"learning_rate": 4.950954063604241e-05, |
|
"loss": 2.3453, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"eval_cer": 0.598292499315631, |
|
"eval_loss": 1.8713030815124512, |
|
"eval_runtime": 344.1368, |
|
"eval_samples_per_second": 27.541, |
|
"eval_steps_per_second": 3.443, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 4.366485118865967, |
|
"learning_rate": 4.9438869257950535e-05, |
|
"loss": 3.2578, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"eval_cer": 0.5734816784638849, |
|
"eval_loss": 2.000699758529663, |
|
"eval_runtime": 350.6902, |
|
"eval_samples_per_second": 27.027, |
|
"eval_steps_per_second": 3.379, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 81.67564392089844, |
|
"learning_rate": 4.936819787985866e-05, |
|
"loss": 1.9657, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"eval_cer": 0.6325427241797349, |
|
"eval_loss": 3.403754472732544, |
|
"eval_runtime": 340.0489, |
|
"eval_samples_per_second": 27.872, |
|
"eval_steps_per_second": 3.485, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 3.837653875350952, |
|
"learning_rate": 4.929752650176679e-05, |
|
"loss": 2.6865, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"eval_cer": 0.5574918853388604, |
|
"eval_loss": 3.5201330184936523, |
|
"eval_runtime": 351.432, |
|
"eval_samples_per_second": 26.97, |
|
"eval_steps_per_second": 3.372, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 2.9047458171844482, |
|
"learning_rate": 4.922685512367491e-05, |
|
"loss": 2.4105, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"eval_cer": 0.5886624496499941, |
|
"eval_loss": 3.5269789695739746, |
|
"eval_runtime": 347.023, |
|
"eval_samples_per_second": 27.312, |
|
"eval_steps_per_second": 3.415, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 2.9270007610321045, |
|
"learning_rate": 4.915618374558304e-05, |
|
"loss": 2.418, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"eval_cer": 0.624877791247898, |
|
"eval_loss": 2.498126983642578, |
|
"eval_runtime": 355.9419, |
|
"eval_samples_per_second": 26.628, |
|
"eval_steps_per_second": 3.329, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 3.458078622817993, |
|
"learning_rate": 4.908551236749117e-05, |
|
"loss": 2.6118, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"eval_cer": 0.5718074185600877, |
|
"eval_loss": 3.407224655151367, |
|
"eval_runtime": 343.8365, |
|
"eval_samples_per_second": 27.565, |
|
"eval_steps_per_second": 3.446, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.9933278560638428, |
|
"learning_rate": 4.90148409893993e-05, |
|
"loss": 3.2046, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"eval_cer": 0.5539844941535333, |
|
"eval_loss": 1.928993821144104, |
|
"eval_runtime": 357.2509, |
|
"eval_samples_per_second": 26.53, |
|
"eval_steps_per_second": 3.317, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 4.917328357696533, |
|
"learning_rate": 4.8944169611307425e-05, |
|
"loss": 3.1904, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"eval_cer": 0.5568001838019632, |
|
"eval_loss": 2.729193925857544, |
|
"eval_runtime": 341.3463, |
|
"eval_samples_per_second": 27.767, |
|
"eval_steps_per_second": 3.472, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 2.842353343963623, |
|
"learning_rate": 4.8873498233215547e-05, |
|
"loss": 2.1649, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"eval_cer": 0.544757733369833, |
|
"eval_loss": 2.421653985977173, |
|
"eval_runtime": 358.7261, |
|
"eval_samples_per_second": 26.421, |
|
"eval_steps_per_second": 3.303, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 4.818225383758545, |
|
"learning_rate": 4.880282685512368e-05, |
|
"loss": 2.2775, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"eval_cer": 0.5533832270931915, |
|
"eval_loss": 2.188101291656494, |
|
"eval_runtime": 354.4215, |
|
"eval_samples_per_second": 26.742, |
|
"eval_steps_per_second": 3.343, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 2.9436004161834717, |
|
"learning_rate": 4.87321554770318e-05, |
|
"loss": 1.6308, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"eval_cer": 0.5162586523796489, |
|
"eval_loss": 3.3528623580932617, |
|
"eval_runtime": 360.9925, |
|
"eval_samples_per_second": 26.255, |
|
"eval_steps_per_second": 3.283, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 7.935622692108154, |
|
"learning_rate": 4.866148409893993e-05, |
|
"loss": 2.4462, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"eval_cer": 0.575390579171718, |
|
"eval_loss": 2.0690765380859375, |
|
"eval_runtime": 343.7001, |
|
"eval_samples_per_second": 27.576, |
|
"eval_steps_per_second": 3.448, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 2.40964937210083, |
|
"learning_rate": 4.859081272084806e-05, |
|
"loss": 2.3368, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"eval_cer": 0.5851746118650033, |
|
"eval_loss": 1.9030027389526367, |
|
"eval_runtime": 352.7743, |
|
"eval_samples_per_second": 26.867, |
|
"eval_steps_per_second": 3.359, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 3.7618064880371094, |
|
"learning_rate": 4.852084805653711e-05, |
|
"loss": 2.4157, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"eval_cer": 0.5566339798991045, |
|
"eval_loss": 3.213930368423462, |
|
"eval_runtime": 341.3273, |
|
"eval_samples_per_second": 27.768, |
|
"eval_steps_per_second": 3.472, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 9.61824893951416, |
|
"learning_rate": 4.845017667844523e-05, |
|
"loss": 2.7299, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"eval_cer": 0.5819702983848891, |
|
"eval_loss": 1.7893775701522827, |
|
"eval_runtime": 353.5497, |
|
"eval_samples_per_second": 26.808, |
|
"eval_steps_per_second": 3.352, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 2.6824772357940674, |
|
"learning_rate": 4.8379505300353364e-05, |
|
"loss": 2.934, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"eval_cer": 0.5119813460560791, |
|
"eval_loss": 2.804206609725952, |
|
"eval_runtime": 340.4666, |
|
"eval_samples_per_second": 27.838, |
|
"eval_steps_per_second": 3.481, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 4.691165924072266, |
|
"learning_rate": 4.8308833922261485e-05, |
|
"loss": 2.0012, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"eval_cer": 0.5138047006374409, |
|
"eval_loss": 3.412604331970215, |
|
"eval_runtime": 349.7896, |
|
"eval_samples_per_second": 27.096, |
|
"eval_steps_per_second": 3.388, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 2.8883514404296875, |
|
"learning_rate": 4.823816254416961e-05, |
|
"loss": 3.5496, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"eval_cer": 0.5417074029173673, |
|
"eval_loss": 1.7423540353775024, |
|
"eval_runtime": 339.3199, |
|
"eval_samples_per_second": 27.932, |
|
"eval_steps_per_second": 3.492, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 3.9499669075012207, |
|
"learning_rate": 4.816749116607774e-05, |
|
"loss": 2.4353, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"eval_cer": 0.5479229400492746, |
|
"eval_loss": 3.4527170658111572, |
|
"eval_runtime": 354.1425, |
|
"eval_samples_per_second": 26.763, |
|
"eval_steps_per_second": 3.346, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 2.9536569118499756, |
|
"learning_rate": 4.809681978798587e-05, |
|
"loss": 2.9787, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"eval_cer": 0.5677500879903015, |
|
"eval_loss": 2.6674227714538574, |
|
"eval_runtime": 344.0522, |
|
"eval_samples_per_second": 27.548, |
|
"eval_steps_per_second": 3.444, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 3.094930648803711, |
|
"learning_rate": 4.8026148409894e-05, |
|
"loss": 2.2166, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"eval_cer": 0.5857514371749247, |
|
"eval_loss": 3.2282423973083496, |
|
"eval_runtime": 358.5567, |
|
"eval_samples_per_second": 26.434, |
|
"eval_steps_per_second": 3.305, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 5.626856803894043, |
|
"learning_rate": 4.795547703180212e-05, |
|
"loss": 2.6222, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"eval_cer": 0.5495043213014743, |
|
"eval_loss": 1.7686785459518433, |
|
"eval_runtime": 342.362, |
|
"eval_samples_per_second": 27.684, |
|
"eval_steps_per_second": 3.461, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 5.24758768081665, |
|
"learning_rate": 4.7884805653710253e-05, |
|
"loss": 2.078, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"eval_cer": 0.5179402448085723, |
|
"eval_loss": 2.244272470474243, |
|
"eval_runtime": 354.1685, |
|
"eval_samples_per_second": 26.761, |
|
"eval_steps_per_second": 3.346, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 6.822329044342041, |
|
"learning_rate": 4.7814134275618375e-05, |
|
"loss": 2.7694, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"eval_cer": 0.5457598451370693, |
|
"eval_loss": 2.137202501296997, |
|
"eval_runtime": 345.4818, |
|
"eval_samples_per_second": 27.434, |
|
"eval_steps_per_second": 3.43, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 2.648332357406616, |
|
"learning_rate": 4.77434628975265e-05, |
|
"loss": 1.74, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"eval_cer": 0.5568441789527199, |
|
"eval_loss": 3.1145806312561035, |
|
"eval_runtime": 362.0538, |
|
"eval_samples_per_second": 26.178, |
|
"eval_steps_per_second": 3.273, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 17.834945678710938, |
|
"learning_rate": 4.767279151943463e-05, |
|
"loss": 1.7586, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"eval_cer": 0.5344653122678034, |
|
"eval_loss": 1.8897360563278198, |
|
"eval_runtime": 345.2891, |
|
"eval_samples_per_second": 27.449, |
|
"eval_steps_per_second": 3.432, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 7.307125568389893, |
|
"learning_rate": 4.760212014134276e-05, |
|
"loss": 1.6227, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"eval_cer": 0.551083258378632, |
|
"eval_loss": 1.9913257360458374, |
|
"eval_runtime": 354.7374, |
|
"eval_samples_per_second": 26.718, |
|
"eval_steps_per_second": 3.34, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 4.344304084777832, |
|
"learning_rate": 4.753144876325089e-05, |
|
"loss": 3.1757, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"eval_cer": 0.5003299636306754, |
|
"eval_loss": 1.8998807668685913, |
|
"eval_runtime": 348.4123, |
|
"eval_samples_per_second": 27.203, |
|
"eval_steps_per_second": 3.401, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 2.8654837608337402, |
|
"learning_rate": 4.746077738515901e-05, |
|
"loss": 2.226, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"eval_cer": 0.5324904188338352, |
|
"eval_loss": 3.042088031768799, |
|
"eval_runtime": 354.1261, |
|
"eval_samples_per_second": 26.764, |
|
"eval_steps_per_second": 3.346, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 7.595367908477783, |
|
"learning_rate": 4.7390106007067143e-05, |
|
"loss": 2.7925, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"eval_cer": 0.5470479253842243, |
|
"eval_loss": 3.0831844806671143, |
|
"eval_runtime": 344.2619, |
|
"eval_samples_per_second": 27.531, |
|
"eval_steps_per_second": 3.442, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 7.425869941711426, |
|
"learning_rate": 4.7319434628975265e-05, |
|
"loss": 3.2591, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"eval_cer": 0.5522955691994837, |
|
"eval_loss": 1.7015308141708374, |
|
"eval_runtime": 353.5748, |
|
"eval_samples_per_second": 26.806, |
|
"eval_steps_per_second": 3.351, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 2.405174493789673, |
|
"learning_rate": 4.724876325088339e-05, |
|
"loss": 2.373, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"eval_cer": 0.5220977865550819, |
|
"eval_loss": 2.095752477645874, |
|
"eval_runtime": 344.3236, |
|
"eval_samples_per_second": 27.526, |
|
"eval_steps_per_second": 3.442, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 3.823211908340454, |
|
"learning_rate": 4.717809187279153e-05, |
|
"loss": 2.3919, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"eval_cer": 0.5331259043447656, |
|
"eval_loss": 2.41994571685791, |
|
"eval_runtime": 353.9416, |
|
"eval_samples_per_second": 26.778, |
|
"eval_steps_per_second": 3.348, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 2.0702412128448486, |
|
"learning_rate": 4.710742049469965e-05, |
|
"loss": 2.4842, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"eval_cer": 0.5329523679167807, |
|
"eval_loss": 3.652872085571289, |
|
"eval_runtime": 339.8921, |
|
"eval_samples_per_second": 27.885, |
|
"eval_steps_per_second": 3.486, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 3.5580544471740723, |
|
"learning_rate": 4.703745583038869e-05, |
|
"loss": 3.577, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"eval_cer": 0.5378284971256502, |
|
"eval_loss": 2.777487277984619, |
|
"eval_runtime": 356.1415, |
|
"eval_samples_per_second": 26.613, |
|
"eval_steps_per_second": 3.327, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 2.3930351734161377, |
|
"learning_rate": 4.6966784452296826e-05, |
|
"loss": 2.733, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"eval_cer": 0.5305424113253294, |
|
"eval_loss": 3.2367630004882812, |
|
"eval_runtime": 346.0446, |
|
"eval_samples_per_second": 27.39, |
|
"eval_steps_per_second": 3.424, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 4.198298931121826, |
|
"learning_rate": 4.6896113074204954e-05, |
|
"loss": 1.616, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"eval_cer": 0.523026573071057, |
|
"eval_loss": 2.150926351547241, |
|
"eval_runtime": 371.9418, |
|
"eval_samples_per_second": 25.482, |
|
"eval_steps_per_second": 3.186, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 3.6696269512176514, |
|
"learning_rate": 4.6825441696113075e-05, |
|
"loss": 1.8252, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"eval_cer": 0.5257933792186461, |
|
"eval_loss": 3.1695752143859863, |
|
"eval_runtime": 351.2533, |
|
"eval_samples_per_second": 26.983, |
|
"eval_steps_per_second": 3.374, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 5.002595901489258, |
|
"learning_rate": 4.67547703180212e-05, |
|
"loss": 2.0594, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"eval_cer": 0.5161682179030933, |
|
"eval_loss": 2.4667067527770996, |
|
"eval_runtime": 377.3711, |
|
"eval_samples_per_second": 25.116, |
|
"eval_steps_per_second": 3.14, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 3.2300162315368652, |
|
"learning_rate": 4.668409893992933e-05, |
|
"loss": 1.8512, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"eval_cer": 0.5493185639982793, |
|
"eval_loss": 1.743632435798645, |
|
"eval_runtime": 361.1881, |
|
"eval_samples_per_second": 26.241, |
|
"eval_steps_per_second": 3.281, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 2.109961986541748, |
|
"learning_rate": 4.661342756183746e-05, |
|
"loss": 2.4585, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"eval_cer": 0.5222273278323101, |
|
"eval_loss": 1.6703613996505737, |
|
"eval_runtime": 374.7053, |
|
"eval_samples_per_second": 25.295, |
|
"eval_steps_per_second": 3.162, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 3.7831201553344727, |
|
"learning_rate": 4.654275618374558e-05, |
|
"loss": 1.6107, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"eval_cer": 0.5470601462594346, |
|
"eval_loss": 2.0870959758758545, |
|
"eval_runtime": 367.5169, |
|
"eval_samples_per_second": 25.789, |
|
"eval_steps_per_second": 3.224, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 12.008744239807129, |
|
"learning_rate": 4.6472084805653715e-05, |
|
"loss": 2.3883, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"eval_cer": 0.5516771929138478, |
|
"eval_loss": 3.582747459411621, |
|
"eval_runtime": 376.938, |
|
"eval_samples_per_second": 25.145, |
|
"eval_steps_per_second": 3.144, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 2.4807026386260986, |
|
"learning_rate": 4.6401413427561844e-05, |
|
"loss": 2.1991, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"eval_cer": 0.5373152203668218, |
|
"eval_loss": 3.5865066051483154, |
|
"eval_runtime": 360.1614, |
|
"eval_samples_per_second": 26.316, |
|
"eval_steps_per_second": 3.29, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 6.418155670166016, |
|
"learning_rate": 4.6330742049469965e-05, |
|
"loss": 2.3905, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"eval_cer": 0.527748719252278, |
|
"eval_loss": 2.6406588554382324, |
|
"eval_runtime": 379.816, |
|
"eval_samples_per_second": 24.954, |
|
"eval_steps_per_second": 3.12, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 2.4806301593780518, |
|
"learning_rate": 4.626007067137809e-05, |
|
"loss": 3.3218, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"eval_cer": 0.5217727112744907, |
|
"eval_loss": 1.8109639883041382, |
|
"eval_runtime": 351.8213, |
|
"eval_samples_per_second": 26.94, |
|
"eval_steps_per_second": 3.368, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 2.4776511192321777, |
|
"learning_rate": 4.618939929328622e-05, |
|
"loss": 2.3876, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"eval_cer": 0.5089310156036134, |
|
"eval_loss": 3.134875774383545, |
|
"eval_runtime": 372.2436, |
|
"eval_samples_per_second": 25.462, |
|
"eval_steps_per_second": 3.183, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 9.97956657409668, |
|
"learning_rate": 4.611872791519435e-05, |
|
"loss": 4.1527, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"eval_cer": 0.4981815337687224, |
|
"eval_loss": 1.7425427436828613, |
|
"eval_runtime": 363.658, |
|
"eval_samples_per_second": 26.063, |
|
"eval_steps_per_second": 3.259, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 3.353976249694824, |
|
"learning_rate": 4.604805653710248e-05, |
|
"loss": 1.7398, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"eval_cer": 0.5255465175394001, |
|
"eval_loss": 2.6437551975250244, |
|
"eval_runtime": 380.4559, |
|
"eval_samples_per_second": 24.912, |
|
"eval_steps_per_second": 3.115, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 2.671917676925659, |
|
"learning_rate": 4.5978091872791526e-05, |
|
"loss": 2.2359, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"eval_cer": 0.537452094169176, |
|
"eval_loss": 3.4509007930755615, |
|
"eval_runtime": 364.3511, |
|
"eval_samples_per_second": 26.013, |
|
"eval_steps_per_second": 3.252, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 10.607972145080566, |
|
"learning_rate": 4.590742049469965e-05, |
|
"loss": 3.6419, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"eval_cer": 0.5374105431934614, |
|
"eval_loss": 2.2622523307800293, |
|
"eval_runtime": 370.8325, |
|
"eval_samples_per_second": 25.559, |
|
"eval_steps_per_second": 3.196, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"grad_norm": 4.678300857543945, |
|
"learning_rate": 4.5836749116607775e-05, |
|
"loss": 1.7309, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"eval_cer": 0.5418393883696375, |
|
"eval_loss": 1.8007577657699585, |
|
"eval_runtime": 357.8741, |
|
"eval_samples_per_second": 26.484, |
|
"eval_steps_per_second": 3.311, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 3.6259689331054688, |
|
"learning_rate": 4.57660777385159e-05, |
|
"loss": 2.784, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"eval_cer": 0.5228799225685347, |
|
"eval_loss": 2.9312586784362793, |
|
"eval_runtime": 371.6433, |
|
"eval_samples_per_second": 25.503, |
|
"eval_steps_per_second": 3.189, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"grad_norm": 1.8296568393707275, |
|
"learning_rate": 4.569540636042403e-05, |
|
"loss": 2.4269, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"eval_cer": 0.5211445582886864, |
|
"eval_loss": 3.522822618484497, |
|
"eval_runtime": 359.0335, |
|
"eval_samples_per_second": 26.399, |
|
"eval_steps_per_second": 3.301, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 23.777395248413086, |
|
"learning_rate": 4.562473498233216e-05, |
|
"loss": 1.5294, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"eval_cer": 0.5097864768683275, |
|
"eval_loss": 2.758403778076172, |
|
"eval_runtime": 375.8984, |
|
"eval_samples_per_second": 25.214, |
|
"eval_steps_per_second": 3.152, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"grad_norm": 5.520616054534912, |
|
"learning_rate": 4.555406360424028e-05, |
|
"loss": 1.6177, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"eval_cer": 0.5417905048687967, |
|
"eval_loss": 2.8678946495056152, |
|
"eval_runtime": 360.269, |
|
"eval_samples_per_second": 26.308, |
|
"eval_steps_per_second": 3.289, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 12.704540252685547, |
|
"learning_rate": 4.5483392226148416e-05, |
|
"loss": 2.6892, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"eval_cer": 0.5306426225020531, |
|
"eval_loss": 2.7854502201080322, |
|
"eval_runtime": 372.5558, |
|
"eval_samples_per_second": 25.44, |
|
"eval_steps_per_second": 3.181, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 3.1302738189697266, |
|
"learning_rate": 4.541272084805654e-05, |
|
"loss": 3.1467, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"eval_cer": 0.5148948027061906, |
|
"eval_loss": 2.9890220165252686, |
|
"eval_runtime": 359.2287, |
|
"eval_samples_per_second": 26.384, |
|
"eval_steps_per_second": 3.299, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"grad_norm": 1.750848412513733, |
|
"learning_rate": 4.5342049469964665e-05, |
|
"loss": 2.1825, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"eval_cer": 0.5565606546478432, |
|
"eval_loss": 3.195390224456787, |
|
"eval_runtime": 366.6828, |
|
"eval_samples_per_second": 25.848, |
|
"eval_steps_per_second": 3.232, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"grad_norm": 237.01815795898438, |
|
"learning_rate": 4.527137809187279e-05, |
|
"loss": 3.0545, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"eval_cer": 0.5361493488717688, |
|
"eval_loss": 2.276334524154663, |
|
"eval_runtime": 373.4974, |
|
"eval_samples_per_second": 25.376, |
|
"eval_steps_per_second": 3.173, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"grad_norm": 2.9628891944885254, |
|
"learning_rate": 4.520070671378092e-05, |
|
"loss": 1.9898, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"eval_cer": 0.5218631457510461, |
|
"eval_loss": 1.94766104221344, |
|
"eval_runtime": 364.5953, |
|
"eval_samples_per_second": 25.996, |
|
"eval_steps_per_second": 3.25, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"grad_norm": 19.71111488342285, |
|
"learning_rate": 4.513003533568905e-05, |
|
"loss": 1.7744, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"eval_cer": 0.5048052481326503, |
|
"eval_loss": 2.699909210205078, |
|
"eval_runtime": 377.4252, |
|
"eval_samples_per_second": 25.112, |
|
"eval_steps_per_second": 3.14, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"grad_norm": 3.2357943058013916, |
|
"learning_rate": 4.505936395759717e-05, |
|
"loss": 1.8872, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"eval_cer": 0.5123968558132259, |
|
"eval_loss": 2.0029726028442383, |
|
"eval_runtime": 372.4224, |
|
"eval_samples_per_second": 25.45, |
|
"eval_steps_per_second": 3.182, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 2.642812967300415, |
|
"learning_rate": 4.4988692579505305e-05, |
|
"loss": 3.1774, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"eval_cer": 0.5154300770403973, |
|
"eval_loss": 2.36077618598938, |
|
"eval_runtime": 375.4381, |
|
"eval_samples_per_second": 25.245, |
|
"eval_steps_per_second": 3.156, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"grad_norm": 4.245635509490967, |
|
"learning_rate": 4.491802120141343e-05, |
|
"loss": 1.7844, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"eval_cer": 0.5169381330413358, |
|
"eval_loss": 2.6845099925994873, |
|
"eval_runtime": 355.3776, |
|
"eval_samples_per_second": 26.67, |
|
"eval_steps_per_second": 3.334, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"grad_norm": 2.799055576324463, |
|
"learning_rate": 4.4847349823321555e-05, |
|
"loss": 3.4812, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"eval_cer": 0.49670036369324627, |
|
"eval_loss": 3.4256324768066406, |
|
"eval_runtime": 374.5672, |
|
"eval_samples_per_second": 25.304, |
|
"eval_steps_per_second": 3.164, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"grad_norm": 2.1845884323120117, |
|
"learning_rate": 4.477667844522968e-05, |
|
"loss": 3.0965, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"eval_cer": 0.5227894880919792, |
|
"eval_loss": 1.759318232536316, |
|
"eval_runtime": 366.7135, |
|
"eval_samples_per_second": 25.846, |
|
"eval_steps_per_second": 3.231, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"grad_norm": 4.01229190826416, |
|
"learning_rate": 4.470600706713781e-05, |
|
"loss": 2.008, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"eval_cer": 0.5001319854522701, |
|
"eval_loss": 1.97179114818573, |
|
"eval_runtime": 377.7336, |
|
"eval_samples_per_second": 25.092, |
|
"eval_steps_per_second": 3.137, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"grad_norm": 1.3265995979309082, |
|
"learning_rate": 4.463533568904594e-05, |
|
"loss": 1.7854, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"eval_cer": 0.48717785772945915, |
|
"eval_loss": 1.6064122915267944, |
|
"eval_runtime": 358.3216, |
|
"eval_samples_per_second": 26.451, |
|
"eval_steps_per_second": 3.307, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"grad_norm": 3.6079511642456055, |
|
"learning_rate": 4.456466431095407e-05, |
|
"loss": 1.7713, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"eval_cer": 0.495703140276094, |
|
"eval_loss": 2.7158584594726562, |
|
"eval_runtime": 373.6599, |
|
"eval_samples_per_second": 25.365, |
|
"eval_steps_per_second": 3.171, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"grad_norm": 3.969160318374634, |
|
"learning_rate": 4.4493992932862195e-05, |
|
"loss": 3.3072, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"eval_cer": 0.49535606742012434, |
|
"eval_loss": 1.9461767673492432, |
|
"eval_runtime": 357.0354, |
|
"eval_samples_per_second": 26.546, |
|
"eval_steps_per_second": 3.319, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"grad_norm": 1.3938627243041992, |
|
"learning_rate": 4.442332155477032e-05, |
|
"loss": 2.1757, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"eval_cer": 0.5164395213327597, |
|
"eval_loss": 1.6015843152999878, |
|
"eval_runtime": 375.0822, |
|
"eval_samples_per_second": 25.269, |
|
"eval_steps_per_second": 3.159, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"grad_norm": 7.331507682800293, |
|
"learning_rate": 4.4352650176678445e-05, |
|
"loss": 2.3463, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"eval_cer": 0.4924132806695084, |
|
"eval_loss": 3.185905933380127, |
|
"eval_runtime": 357.5938, |
|
"eval_samples_per_second": 26.505, |
|
"eval_steps_per_second": 3.314, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"grad_norm": 2.3522162437438965, |
|
"learning_rate": 4.428197879858658e-05, |
|
"loss": 1.9984, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"eval_cer": 0.5185977278948809, |
|
"eval_loss": 2.115999221801758, |
|
"eval_runtime": 377.0919, |
|
"eval_samples_per_second": 25.134, |
|
"eval_steps_per_second": 3.142, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"grad_norm": 5.214267730712891, |
|
"learning_rate": 4.42113074204947e-05, |
|
"loss": 1.5957, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"eval_cer": 0.5082393140667162, |
|
"eval_loss": 1.6540412902832031, |
|
"eval_runtime": 357.3421, |
|
"eval_samples_per_second": 26.524, |
|
"eval_steps_per_second": 3.316, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"grad_norm": 14.154474258422852, |
|
"learning_rate": 4.414063604240283e-05, |
|
"loss": 1.6646, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"eval_cer": 0.49412175902389427, |
|
"eval_loss": 2.7898120880126953, |
|
"eval_runtime": 372.8281, |
|
"eval_samples_per_second": 25.422, |
|
"eval_steps_per_second": 3.178, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"grad_norm": 6.580379486083984, |
|
"learning_rate": 4.406996466431096e-05, |
|
"loss": 1.9858, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"eval_cer": 0.5368801572093387, |
|
"eval_loss": 3.0827128887176514, |
|
"eval_runtime": 359.3692, |
|
"eval_samples_per_second": 26.374, |
|
"eval_steps_per_second": 3.297, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"grad_norm": 4.16035270690918, |
|
"learning_rate": 4.3999293286219085e-05, |
|
"loss": 2.312, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"eval_cer": 0.503619823237261, |
|
"eval_loss": 1.6672168970108032, |
|
"eval_runtime": 375.3507, |
|
"eval_samples_per_second": 25.251, |
|
"eval_steps_per_second": 3.157, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"grad_norm": 1.9299957752227783, |
|
"learning_rate": 4.392862190812721e-05, |
|
"loss": 2.0043, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"eval_cer": 0.4974531696061945, |
|
"eval_loss": 3.044623613357544, |
|
"eval_runtime": 365.1986, |
|
"eval_samples_per_second": 25.953, |
|
"eval_steps_per_second": 3.245, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"grad_norm": 1.68573796749115, |
|
"learning_rate": 4.3857950530035335e-05, |
|
"loss": 2.5474, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"eval_cer": 0.5048394665832389, |
|
"eval_loss": 2.7017831802368164, |
|
"eval_runtime": 374.6095, |
|
"eval_samples_per_second": 25.301, |
|
"eval_steps_per_second": 3.163, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"grad_norm": 4.48902702331543, |
|
"learning_rate": 4.378727915194347e-05, |
|
"loss": 2.2683, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"eval_cer": 0.49138917132689375, |
|
"eval_loss": 1.5836848020553589, |
|
"eval_runtime": 360.7206, |
|
"eval_samples_per_second": 26.275, |
|
"eval_steps_per_second": 3.285, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"grad_norm": 2.788163661956787, |
|
"learning_rate": 4.371731448763251e-05, |
|
"loss": 2.4278, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"eval_cer": 0.5205579562785968, |
|
"eval_loss": 2.4102938175201416, |
|
"eval_runtime": 376.1786, |
|
"eval_samples_per_second": 25.195, |
|
"eval_steps_per_second": 3.15, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 2.6063647270202637, |
|
"learning_rate": 4.364664310954063e-05, |
|
"loss": 1.5116, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_cer": 0.4881310859958547, |
|
"eval_loss": 1.728480339050293, |
|
"eval_runtime": 352.8942, |
|
"eval_samples_per_second": 26.858, |
|
"eval_steps_per_second": 3.358, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"grad_norm": 5.240590572357178, |
|
"learning_rate": 4.357597173144877e-05, |
|
"loss": 2.2426, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"eval_cer": 0.49515320089163506, |
|
"eval_loss": 2.496321201324463, |
|
"eval_runtime": 373.8495, |
|
"eval_samples_per_second": 25.352, |
|
"eval_steps_per_second": 3.17, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"grad_norm": 2.557533025741577, |
|
"learning_rate": 4.3505300353356896e-05, |
|
"loss": 1.8649, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"eval_cer": 0.5054896171444214, |
|
"eval_loss": 3.00071120262146, |
|
"eval_runtime": 362.1744, |
|
"eval_samples_per_second": 26.17, |
|
"eval_steps_per_second": 3.272, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"grad_norm": 27.368144989013672, |
|
"learning_rate": 4.343462897526502e-05, |
|
"loss": 1.6976, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"eval_cer": 0.4800359782566188, |
|
"eval_loss": 2.5932295322418213, |
|
"eval_runtime": 371.4743, |
|
"eval_samples_per_second": 25.515, |
|
"eval_steps_per_second": 3.19, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"grad_norm": 4.671775817871094, |
|
"learning_rate": 4.336395759717315e-05, |
|
"loss": 1.7712, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"eval_cer": 0.4871509718039967, |
|
"eval_loss": 2.37919282913208, |
|
"eval_runtime": 357.2849, |
|
"eval_samples_per_second": 26.528, |
|
"eval_steps_per_second": 3.317, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"grad_norm": 2.77481746673584, |
|
"learning_rate": 4.329328621908127e-05, |
|
"loss": 1.45, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"eval_cer": 0.49421952602557584, |
|
"eval_loss": 1.6775341033935547, |
|
"eval_runtime": 376.5454, |
|
"eval_samples_per_second": 25.171, |
|
"eval_steps_per_second": 3.147, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"grad_norm": 7.3151140213012695, |
|
"learning_rate": 4.32226148409894e-05, |
|
"loss": 1.5163, |
|
"step": 10100 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"eval_cer": 0.49458370810683977, |
|
"eval_loss": 2.463066577911377, |
|
"eval_runtime": 349.9183, |
|
"eval_samples_per_second": 27.086, |
|
"eval_steps_per_second": 3.387, |
|
"step": 10100 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"grad_norm": 3.947378635406494, |
|
"learning_rate": 4.315194346289753e-05, |
|
"loss": 1.4527, |
|
"step": 10200 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"eval_cer": 0.49508965234054203, |
|
"eval_loss": 3.2406046390533447, |
|
"eval_runtime": 377.1597, |
|
"eval_samples_per_second": 25.13, |
|
"eval_steps_per_second": 3.142, |
|
"step": 10200 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"grad_norm": 7.427024841308594, |
|
"learning_rate": 4.308127208480566e-05, |
|
"loss": 1.4446, |
|
"step": 10300 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"eval_cer": 0.48428639865472606, |
|
"eval_loss": 2.7247695922851562, |
|
"eval_runtime": 356.4628, |
|
"eval_samples_per_second": 26.589, |
|
"eval_steps_per_second": 3.324, |
|
"step": 10300 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"grad_norm": 3.1798312664031982, |
|
"learning_rate": 4.3010600706713785e-05, |
|
"loss": 2.4877, |
|
"step": 10400 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"eval_cer": 0.48304475773336986, |
|
"eval_loss": 2.2158772945404053, |
|
"eval_runtime": 376.4543, |
|
"eval_samples_per_second": 25.177, |
|
"eval_steps_per_second": 3.148, |
|
"step": 10400 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"grad_norm": 2.0509414672851562, |
|
"learning_rate": 4.293992932862191e-05, |
|
"loss": 1.9214, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"eval_cer": 0.4835262602166517, |
|
"eval_loss": 1.906830906867981, |
|
"eval_runtime": 357.384, |
|
"eval_samples_per_second": 26.52, |
|
"eval_steps_per_second": 3.316, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"grad_norm": 3.71893048286438, |
|
"learning_rate": 4.286925795053004e-05, |
|
"loss": 1.4426, |
|
"step": 10600 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"eval_cer": 0.48283211450471236, |
|
"eval_loss": 2.5203747749328613, |
|
"eval_runtime": 378.5225, |
|
"eval_samples_per_second": 25.039, |
|
"eval_steps_per_second": 3.131, |
|
"step": 10600 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"grad_norm": 3.5400538444519043, |
|
"learning_rate": 4.279858657243816e-05, |
|
"loss": 2.5204, |
|
"step": 10700 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"eval_cer": 0.496866567596105, |
|
"eval_loss": 3.1510612964630127, |
|
"eval_runtime": 360.9029, |
|
"eval_samples_per_second": 26.262, |
|
"eval_steps_per_second": 3.283, |
|
"step": 10700 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"grad_norm": 3.434525966644287, |
|
"learning_rate": 4.272791519434629e-05, |
|
"loss": 1.605, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"eval_cer": 0.4771103007312972, |
|
"eval_loss": 3.0746238231658936, |
|
"eval_runtime": 370.845, |
|
"eval_samples_per_second": 25.558, |
|
"eval_steps_per_second": 3.195, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"grad_norm": 6.7493815422058105, |
|
"learning_rate": 4.265724381625442e-05, |
|
"loss": 2.153, |
|
"step": 10900 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"eval_cer": 0.4859728794337335, |
|
"eval_loss": 2.0545401573181152, |
|
"eval_runtime": 361.9049, |
|
"eval_samples_per_second": 26.189, |
|
"eval_steps_per_second": 3.274, |
|
"step": 10900 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"grad_norm": 1.7070709466934204, |
|
"learning_rate": 4.258657243816255e-05, |
|
"loss": 1.8732, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"eval_cer": 0.4698926518321536, |
|
"eval_loss": 2.4386019706726074, |
|
"eval_runtime": 373.6335, |
|
"eval_samples_per_second": 25.367, |
|
"eval_steps_per_second": 3.172, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"grad_norm": 11.575754165649414, |
|
"learning_rate": 4.2515901060070675e-05, |
|
"loss": 1.9211, |
|
"step": 11100 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"eval_cer": 0.5030527746275077, |
|
"eval_loss": 2.1613056659698486, |
|
"eval_runtime": 353.1639, |
|
"eval_samples_per_second": 26.837, |
|
"eval_steps_per_second": 3.355, |
|
"step": 11100 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"grad_norm": 5.623499393463135, |
|
"learning_rate": 4.24452296819788e-05, |
|
"loss": 2.6473, |
|
"step": 11200 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"eval_cer": 0.5010998787689179, |
|
"eval_loss": 2.048527240753174, |
|
"eval_runtime": 372.3781, |
|
"eval_samples_per_second": 25.453, |
|
"eval_steps_per_second": 3.182, |
|
"step": 11200 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"grad_norm": 4.054663181304932, |
|
"learning_rate": 4.237455830388693e-05, |
|
"loss": 1.4888, |
|
"step": 11300 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"eval_cer": 0.48732939658206564, |
|
"eval_loss": 2.4912757873535156, |
|
"eval_runtime": 353.9893, |
|
"eval_samples_per_second": 26.775, |
|
"eval_steps_per_second": 3.348, |
|
"step": 11300 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"grad_norm": 1.9226583242416382, |
|
"learning_rate": 4.230388692579505e-05, |
|
"loss": 1.4556, |
|
"step": 11400 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"eval_cer": 0.4807105705682218, |
|
"eval_loss": 1.4841840267181396, |
|
"eval_runtime": 376.2491, |
|
"eval_samples_per_second": 25.191, |
|
"eval_steps_per_second": 3.15, |
|
"step": 11400 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"grad_norm": 6.85697603225708, |
|
"learning_rate": 4.223321554770318e-05, |
|
"loss": 1.4391, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"eval_cer": 0.48008730593250165, |
|
"eval_loss": 3.111499547958374, |
|
"eval_runtime": 358.9952, |
|
"eval_samples_per_second": 26.401, |
|
"eval_steps_per_second": 3.301, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"grad_norm": 12.522397994995117, |
|
"learning_rate": 4.216254416961131e-05, |
|
"loss": 1.4244, |
|
"step": 11600 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"eval_cer": 0.48068612881780143, |
|
"eval_loss": 2.598745822906494, |
|
"eval_runtime": 385.9814, |
|
"eval_samples_per_second": 24.556, |
|
"eval_steps_per_second": 3.07, |
|
"step": 11600 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"grad_norm": 3.1015026569366455, |
|
"learning_rate": 4.209187279151944e-05, |
|
"loss": 3.7378, |
|
"step": 11700 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"eval_cer": 0.47458791208791207, |
|
"eval_loss": 2.3908824920654297, |
|
"eval_runtime": 373.2148, |
|
"eval_samples_per_second": 25.396, |
|
"eval_steps_per_second": 3.175, |
|
"step": 11700 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"grad_norm": 86.87032318115234, |
|
"learning_rate": 4.2021201413427565e-05, |
|
"loss": 2.8329, |
|
"step": 11800 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"eval_cer": 0.4754898126784248, |
|
"eval_loss": 2.441450357437134, |
|
"eval_runtime": 446.0173, |
|
"eval_samples_per_second": 21.25, |
|
"eval_steps_per_second": 2.657, |
|
"step": 11800 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"grad_norm": 2.7503468990325928, |
|
"learning_rate": 4.195053003533569e-05, |
|
"loss": 2.4912, |
|
"step": 11900 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"eval_cer": 0.488764127331743, |
|
"eval_loss": 1.6247801780700684, |
|
"eval_runtime": 361.3079, |
|
"eval_samples_per_second": 26.232, |
|
"eval_steps_per_second": 3.28, |
|
"step": 11900 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"grad_norm": 2.511701822280884, |
|
"learning_rate": 4.187985865724382e-05, |
|
"loss": 2.009, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"eval_cer": 0.46521938915177347, |
|
"eval_loss": 1.8090691566467285, |
|
"eval_runtime": 401.8599, |
|
"eval_samples_per_second": 23.585, |
|
"eval_steps_per_second": 2.949, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"grad_norm": 4.231322765350342, |
|
"learning_rate": 4.180918727915194e-05, |
|
"loss": 1.6484, |
|
"step": 12100 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"eval_cer": 0.483240291736733, |
|
"eval_loss": 1.89494788646698, |
|
"eval_runtime": 367.7673, |
|
"eval_samples_per_second": 25.772, |
|
"eval_steps_per_second": 3.222, |
|
"step": 12100 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"grad_norm": 3.436452865600586, |
|
"learning_rate": 4.173851590106007e-05, |
|
"loss": 1.4205, |
|
"step": 12200 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"eval_cer": 0.48146582065621213, |
|
"eval_loss": 2.3205745220184326, |
|
"eval_runtime": 384.4394, |
|
"eval_samples_per_second": 24.654, |
|
"eval_steps_per_second": 3.082, |
|
"step": 12200 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"grad_norm": 1.96918523311615, |
|
"learning_rate": 4.16678445229682e-05, |
|
"loss": 1.3964, |
|
"step": 12300 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"eval_cer": 0.4692033944702984, |
|
"eval_loss": 2.9126245975494385, |
|
"eval_runtime": 358.8304, |
|
"eval_samples_per_second": 26.414, |
|
"eval_steps_per_second": 3.302, |
|
"step": 12300 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"grad_norm": 6.869575500488281, |
|
"learning_rate": 4.159717314487633e-05, |
|
"loss": 2.0721, |
|
"step": 12400 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"eval_cer": 0.4858066755308748, |
|
"eval_loss": 3.2426090240478516, |
|
"eval_runtime": 377.7703, |
|
"eval_samples_per_second": 25.089, |
|
"eval_steps_per_second": 3.137, |
|
"step": 12400 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"grad_norm": 2.511401414871216, |
|
"learning_rate": 4.1526501766784455e-05, |
|
"loss": 1.747, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"eval_cer": 0.5091607680575652, |
|
"eval_loss": 3.057870864868164, |
|
"eval_runtime": 381.9144, |
|
"eval_samples_per_second": 24.817, |
|
"eval_steps_per_second": 3.103, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"grad_norm": 9.076600074768066, |
|
"learning_rate": 4.1455830388692577e-05, |
|
"loss": 2.0271, |
|
"step": 12600 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"eval_cer": 0.48266591060185365, |
|
"eval_loss": 2.858898639678955, |
|
"eval_runtime": 405.4609, |
|
"eval_samples_per_second": 23.376, |
|
"eval_steps_per_second": 2.923, |
|
"step": 12600 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"grad_norm": 1.6710706949234009, |
|
"learning_rate": 4.138515901060071e-05, |
|
"loss": 1.7331, |
|
"step": 12700 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"eval_cer": 0.501048551093035, |
|
"eval_loss": 2.9208521842956543, |
|
"eval_runtime": 361.6906, |
|
"eval_samples_per_second": 26.205, |
|
"eval_steps_per_second": 3.276, |
|
"step": 12700 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"grad_norm": 1.902030348777771, |
|
"learning_rate": 4.131448763250883e-05, |
|
"loss": 2.1405, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"eval_cer": 0.4769636502287748, |
|
"eval_loss": 2.750805139541626, |
|
"eval_runtime": 383.778, |
|
"eval_samples_per_second": 24.697, |
|
"eval_steps_per_second": 3.088, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"grad_norm": 5.2548041343688965, |
|
"learning_rate": 4.124381625441696e-05, |
|
"loss": 1.967, |
|
"step": 12900 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"eval_cer": 0.4813216143287318, |
|
"eval_loss": 2.39349627494812, |
|
"eval_runtime": 366.8876, |
|
"eval_samples_per_second": 25.834, |
|
"eval_steps_per_second": 3.23, |
|
"step": 12900 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"grad_norm": 3.4912497997283936, |
|
"learning_rate": 4.1173144876325096e-05, |
|
"loss": 2.5075, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"eval_cer": 0.48205731101638577, |
|
"eval_loss": 2.9853949546813965, |
|
"eval_runtime": 382.1207, |
|
"eval_samples_per_second": 24.804, |
|
"eval_steps_per_second": 3.101, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"grad_norm": 3.671482801437378, |
|
"learning_rate": 4.110247349823322e-05, |
|
"loss": 2.3712, |
|
"step": 13100 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"eval_cer": 0.47388154550076256, |
|
"eval_loss": 2.6945626735687256, |
|
"eval_runtime": 374.115, |
|
"eval_samples_per_second": 25.334, |
|
"eval_steps_per_second": 3.167, |
|
"step": 13100 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"grad_norm": 3.400202512741089, |
|
"learning_rate": 4.1031802120141345e-05, |
|
"loss": 2.035, |
|
"step": 13200 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"eval_cer": 0.4721559579210825, |
|
"eval_loss": 2.723193645477295, |
|
"eval_runtime": 442.6463, |
|
"eval_samples_per_second": 21.412, |
|
"eval_steps_per_second": 2.677, |
|
"step": 13200 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"grad_norm": 28.22166633605957, |
|
"learning_rate": 4.0961130742049467e-05, |
|
"loss": 2.583, |
|
"step": 13300 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"eval_cer": 0.4679446443236479, |
|
"eval_loss": 2.848484516143799, |
|
"eval_runtime": 385.7533, |
|
"eval_samples_per_second": 24.57, |
|
"eval_steps_per_second": 3.072, |
|
"step": 13300 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"grad_norm": 2.6984710693359375, |
|
"learning_rate": 4.08904593639576e-05, |
|
"loss": 1.4041, |
|
"step": 13400 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"eval_cer": 0.46995620038324665, |
|
"eval_loss": 2.8926751613616943, |
|
"eval_runtime": 379.5665, |
|
"eval_samples_per_second": 24.971, |
|
"eval_steps_per_second": 3.122, |
|
"step": 13400 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"grad_norm": 28.124893188476562, |
|
"learning_rate": 4.081978798586573e-05, |
|
"loss": 1.3813, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"eval_cer": 0.453462907199562, |
|
"eval_loss": 3.064070701599121, |
|
"eval_runtime": 364.1898, |
|
"eval_samples_per_second": 26.025, |
|
"eval_steps_per_second": 3.254, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"grad_norm": 4.778401851654053, |
|
"learning_rate": 4.074911660777385e-05, |
|
"loss": 1.9508, |
|
"step": 13600 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"eval_cer": 0.4695724649016464, |
|
"eval_loss": 1.914655327796936, |
|
"eval_runtime": 378.9026, |
|
"eval_samples_per_second": 25.014, |
|
"eval_steps_per_second": 3.127, |
|
"step": 13600 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"grad_norm": 4.1898345947265625, |
|
"learning_rate": 4.0678445229681986e-05, |
|
"loss": 2.1823, |
|
"step": 13700 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"eval_cer": 0.47729850220953424, |
|
"eval_loss": 1.71451997756958, |
|
"eval_runtime": 365.8718, |
|
"eval_samples_per_second": 25.905, |
|
"eval_steps_per_second": 3.239, |
|
"step": 13700 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"grad_norm": 1.8314547538757324, |
|
"learning_rate": 4.060777385159011e-05, |
|
"loss": 2.3976, |
|
"step": 13800 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"eval_cer": 0.4735149192444566, |
|
"eval_loss": 2.7786667346954346, |
|
"eval_runtime": 377.8869, |
|
"eval_samples_per_second": 25.082, |
|
"eval_steps_per_second": 3.136, |
|
"step": 13800 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"grad_norm": 3.4417295455932617, |
|
"learning_rate": 4.0537102473498235e-05, |
|
"loss": 2.0057, |
|
"step": 13900 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"eval_cer": 0.47049147471745334, |
|
"eval_loss": 3.252101182937622, |
|
"eval_runtime": 365.7598, |
|
"eval_samples_per_second": 25.913, |
|
"eval_steps_per_second": 3.24, |
|
"step": 13900 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"grad_norm": 4.99106502532959, |
|
"learning_rate": 4.0466431095406356e-05, |
|
"loss": 1.7663, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"eval_cer": 0.4712345039302335, |
|
"eval_loss": 2.6297221183776855, |
|
"eval_runtime": 401.5004, |
|
"eval_samples_per_second": 23.606, |
|
"eval_steps_per_second": 2.951, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"grad_norm": 4.448285102844238, |
|
"learning_rate": 4.039575971731449e-05, |
|
"loss": 1.842, |
|
"step": 14100 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"eval_cer": 0.46505562942395684, |
|
"eval_loss": 2.5416791439056396, |
|
"eval_runtime": 383.7378, |
|
"eval_samples_per_second": 24.699, |
|
"eval_steps_per_second": 3.088, |
|
"step": 14100 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"grad_norm": 2.196765184402466, |
|
"learning_rate": 4.032579505300353e-05, |
|
"loss": 2.0249, |
|
"step": 14200 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"eval_cer": 0.4723050525986469, |
|
"eval_loss": 1.9896740913391113, |
|
"eval_runtime": 382.0154, |
|
"eval_samples_per_second": 24.811, |
|
"eval_steps_per_second": 3.102, |
|
"step": 14200 |
|
}, |
|
{ |
|
"epoch": 6.02, |
|
"grad_norm": 2.4070873260498047, |
|
"learning_rate": 4.025512367491166e-05, |
|
"loss": 2.6354, |
|
"step": 14300 |
|
}, |
|
{ |
|
"epoch": 6.02, |
|
"eval_cer": 0.46424660748504165, |
|
"eval_loss": 1.524404525756836, |
|
"eval_runtime": 361.2522, |
|
"eval_samples_per_second": 26.237, |
|
"eval_steps_per_second": 3.28, |
|
"step": 14300 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"grad_norm": 2.6403534412384033, |
|
"learning_rate": 4.018445229681979e-05, |
|
"loss": 1.4007, |
|
"step": 14400 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"eval_cer": 0.4687561104376051, |
|
"eval_loss": 2.6799304485321045, |
|
"eval_runtime": 377.8259, |
|
"eval_samples_per_second": 25.086, |
|
"eval_steps_per_second": 3.136, |
|
"step": 14400 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"grad_norm": 16.431684494018555, |
|
"learning_rate": 4.011378091872792e-05, |
|
"loss": 1.3673, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"eval_cer": 0.4627141097336827, |
|
"eval_loss": 2.739257335662842, |
|
"eval_runtime": 365.917, |
|
"eval_samples_per_second": 25.902, |
|
"eval_steps_per_second": 3.238, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"grad_norm": 1.2545260190963745, |
|
"learning_rate": 4.0043109540636045e-05, |
|
"loss": 1.398, |
|
"step": 14600 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"eval_cer": 0.46276054905948144, |
|
"eval_loss": 2.623035430908203, |
|
"eval_runtime": 380.5058, |
|
"eval_samples_per_second": 24.909, |
|
"eval_steps_per_second": 3.114, |
|
"step": 14600 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"grad_norm": 4.757970333099365, |
|
"learning_rate": 3.9972438162544173e-05, |
|
"loss": 1.7177, |
|
"step": 14700 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"eval_cer": 0.4671600641351531, |
|
"eval_loss": 3.1201841831207275, |
|
"eval_runtime": 364.6269, |
|
"eval_samples_per_second": 25.994, |
|
"eval_steps_per_second": 3.25, |
|
"step": 14700 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"grad_norm": 2.604834794998169, |
|
"learning_rate": 3.99017667844523e-05, |
|
"loss": 1.4781, |
|
"step": 14800 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"eval_cer": 0.4741137421297564, |
|
"eval_loss": 2.4959936141967773, |
|
"eval_runtime": 380.3259, |
|
"eval_samples_per_second": 24.921, |
|
"eval_steps_per_second": 3.116, |
|
"step": 14800 |
|
}, |
|
{ |
|
"epoch": 6.27, |
|
"grad_norm": 5.067722797393799, |
|
"learning_rate": 3.983109540636042e-05, |
|
"loss": 1.6941, |
|
"step": 14900 |
|
}, |
|
{ |
|
"epoch": 6.27, |
|
"eval_cer": 0.4551322787532752, |
|
"eval_loss": 2.24949049949646, |
|
"eval_runtime": 359.186, |
|
"eval_samples_per_second": 26.387, |
|
"eval_steps_per_second": 3.299, |
|
"step": 14900 |
|
}, |
|
{ |
|
"epoch": 6.31, |
|
"grad_norm": 4.323084354400635, |
|
"learning_rate": 3.976042402826856e-05, |
|
"loss": 1.4552, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 6.31, |
|
"eval_cer": 0.459111395721716, |
|
"eval_loss": 2.0859220027923584, |
|
"eval_runtime": 380.4099, |
|
"eval_samples_per_second": 24.915, |
|
"eval_steps_per_second": 3.115, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"grad_norm": 3.9104325771331787, |
|
"learning_rate": 3.968975265017668e-05, |
|
"loss": 2.836, |
|
"step": 15100 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"eval_cer": 0.47354180516991906, |
|
"eval_loss": 1.5567090511322021, |
|
"eval_runtime": 368.3958, |
|
"eval_samples_per_second": 25.728, |
|
"eval_steps_per_second": 3.217, |
|
"step": 15100 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"grad_norm": 22.448034286499023, |
|
"learning_rate": 3.961908127208481e-05, |
|
"loss": 1.3858, |
|
"step": 15200 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"eval_cer": 0.45766688827187046, |
|
"eval_loss": 1.999880313873291, |
|
"eval_runtime": 380.7492, |
|
"eval_samples_per_second": 24.893, |
|
"eval_steps_per_second": 3.112, |
|
"step": 15200 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"grad_norm": 10.558205604553223, |
|
"learning_rate": 3.9548409893992935e-05, |
|
"loss": 1.3921, |
|
"step": 15300 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"eval_cer": 0.47037904266551955, |
|
"eval_loss": 2.6411163806915283, |
|
"eval_runtime": 363.6122, |
|
"eval_samples_per_second": 26.066, |
|
"eval_steps_per_second": 3.259, |
|
"step": 15300 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"grad_norm": 32.08698654174805, |
|
"learning_rate": 3.947773851590106e-05, |
|
"loss": 2.0638, |
|
"step": 15400 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"eval_cer": 0.4723588244495718, |
|
"eval_loss": 2.9272561073303223, |
|
"eval_runtime": 386.8412, |
|
"eval_samples_per_second": 24.501, |
|
"eval_steps_per_second": 3.063, |
|
"step": 15400 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"grad_norm": 1.8787578344345093, |
|
"learning_rate": 3.940706713780919e-05, |
|
"loss": 1.3608, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"eval_cer": 0.4626187869070431, |
|
"eval_loss": 3.0016987323760986, |
|
"eval_runtime": 353.2265, |
|
"eval_samples_per_second": 26.833, |
|
"eval_steps_per_second": 3.355, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 6.57, |
|
"grad_norm": 5.287090301513672, |
|
"learning_rate": 3.933639575971731e-05, |
|
"loss": 1.6601, |
|
"step": 15600 |
|
}, |
|
{ |
|
"epoch": 6.57, |
|
"eval_cer": 0.4719262054671307, |
|
"eval_loss": 2.0440595149993896, |
|
"eval_runtime": 377.8824, |
|
"eval_samples_per_second": 25.082, |
|
"eval_steps_per_second": 3.136, |
|
"step": 15600 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"grad_norm": 2.0426106452941895, |
|
"learning_rate": 3.926572438162545e-05, |
|
"loss": 1.9773, |
|
"step": 15700 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"eval_cer": 0.4639239763794924, |
|
"eval_loss": 2.3498713970184326, |
|
"eval_runtime": 365.2548, |
|
"eval_samples_per_second": 25.949, |
|
"eval_steps_per_second": 3.244, |
|
"step": 15700 |
|
}, |
|
{ |
|
"epoch": 6.65, |
|
"grad_norm": 2.1095080375671387, |
|
"learning_rate": 3.919505300353357e-05, |
|
"loss": 2.2505, |
|
"step": 15800 |
|
}, |
|
{ |
|
"epoch": 6.65, |
|
"eval_cer": 0.4647232216182394, |
|
"eval_loss": 1.8471035957336426, |
|
"eval_runtime": 388.913, |
|
"eval_samples_per_second": 24.37, |
|
"eval_steps_per_second": 3.047, |
|
"step": 15800 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"grad_norm": 3.6614439487457275, |
|
"learning_rate": 3.91243816254417e-05, |
|
"loss": 1.9341, |
|
"step": 15900 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"eval_cer": 0.4613698134605608, |
|
"eval_loss": 2.8519198894500732, |
|
"eval_runtime": 363.022, |
|
"eval_samples_per_second": 26.109, |
|
"eval_steps_per_second": 3.264, |
|
"step": 15900 |
|
}, |
|
{ |
|
"epoch": 6.74, |
|
"grad_norm": 8.302891731262207, |
|
"learning_rate": 3.9053710247349825e-05, |
|
"loss": 1.4603, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 6.74, |
|
"eval_cer": 0.47225128074772205, |
|
"eval_loss": 1.5901261568069458, |
|
"eval_runtime": 376.5314, |
|
"eval_samples_per_second": 25.172, |
|
"eval_steps_per_second": 3.147, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 6.78, |
|
"grad_norm": 3.2967019081115723, |
|
"learning_rate": 3.898303886925795e-05, |
|
"loss": 1.5493, |
|
"step": 16100 |
|
}, |
|
{ |
|
"epoch": 6.78, |
|
"eval_cer": 0.47416751398068124, |
|
"eval_loss": 1.6550699472427368, |
|
"eval_runtime": 363.1067, |
|
"eval_samples_per_second": 26.103, |
|
"eval_steps_per_second": 3.264, |
|
"step": 16100 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"grad_norm": 2.848189115524292, |
|
"learning_rate": 3.891236749116608e-05, |
|
"loss": 1.5642, |
|
"step": 16200 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"eval_cer": 0.4807154589183059, |
|
"eval_loss": 2.1456046104431152, |
|
"eval_runtime": 376.8904, |
|
"eval_samples_per_second": 25.148, |
|
"eval_steps_per_second": 3.144, |
|
"step": 16200 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"grad_norm": 4.148609638214111, |
|
"learning_rate": 3.88416961130742e-05, |
|
"loss": 1.379, |
|
"step": 16300 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"eval_cer": 0.4555746744358844, |
|
"eval_loss": 1.7261931896209717, |
|
"eval_runtime": 360.5421, |
|
"eval_samples_per_second": 26.288, |
|
"eval_steps_per_second": 3.287, |
|
"step": 16300 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"grad_norm": 8.180316925048828, |
|
"learning_rate": 3.877102473498234e-05, |
|
"loss": 1.6474, |
|
"step": 16400 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"eval_cer": 0.4800946384576278, |
|
"eval_loss": 2.309068202972412, |
|
"eval_runtime": 377.3872, |
|
"eval_samples_per_second": 25.115, |
|
"eval_steps_per_second": 3.14, |
|
"step": 16400 |
|
}, |
|
{ |
|
"epoch": 6.95, |
|
"grad_norm": 1.7500585317611694, |
|
"learning_rate": 3.870035335689046e-05, |
|
"loss": 1.3755, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 6.95, |
|
"eval_cer": 0.4641219545578976, |
|
"eval_loss": 1.5506832599639893, |
|
"eval_runtime": 359.5594, |
|
"eval_samples_per_second": 26.36, |
|
"eval_steps_per_second": 3.296, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"grad_norm": 1.8204373121261597, |
|
"learning_rate": 3.862968197879859e-05, |
|
"loss": 1.3562, |
|
"step": 16600 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"eval_cer": 0.46081498572601776, |
|
"eval_loss": 2.0778937339782715, |
|
"eval_runtime": 381.9051, |
|
"eval_samples_per_second": 24.818, |
|
"eval_steps_per_second": 3.103, |
|
"step": 16600 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"grad_norm": 2.9875051975250244, |
|
"learning_rate": 3.8559010600706715e-05, |
|
"loss": 1.5916, |
|
"step": 16700 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"eval_cer": 0.4623352626021665, |
|
"eval_loss": 1.5354750156402588, |
|
"eval_runtime": 364.3123, |
|
"eval_samples_per_second": 26.016, |
|
"eval_steps_per_second": 3.253, |
|
"step": 16700 |
|
}, |
|
{ |
|
"epoch": 7.07, |
|
"grad_norm": 4.643332004547119, |
|
"learning_rate": 3.848833922261484e-05, |
|
"loss": 1.3365, |
|
"step": 16800 |
|
}, |
|
{ |
|
"epoch": 7.07, |
|
"eval_cer": 0.46146024793711626, |
|
"eval_loss": 1.8595181703567505, |
|
"eval_runtime": 374.6805, |
|
"eval_samples_per_second": 25.296, |
|
"eval_steps_per_second": 3.163, |
|
"step": 16800 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"grad_norm": 2.433307409286499, |
|
"learning_rate": 3.841766784452297e-05, |
|
"loss": 1.6175, |
|
"step": 16900 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"eval_cer": 0.468870986664581, |
|
"eval_loss": 1.7079046964645386, |
|
"eval_runtime": 364.7345, |
|
"eval_samples_per_second": 25.986, |
|
"eval_steps_per_second": 3.249, |
|
"step": 16900 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"grad_norm": 4.391873359680176, |
|
"learning_rate": 3.834699646643109e-05, |
|
"loss": 1.3334, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"eval_cer": 0.4652682726526143, |
|
"eval_loss": 2.7629072666168213, |
|
"eval_runtime": 380.8752, |
|
"eval_samples_per_second": 24.885, |
|
"eval_steps_per_second": 3.111, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"grad_norm": 7.738865375518799, |
|
"learning_rate": 3.827632508833923e-05, |
|
"loss": 1.4858, |
|
"step": 17100 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"eval_cer": 0.4701737319619882, |
|
"eval_loss": 2.189877986907959, |
|
"eval_runtime": 363.6078, |
|
"eval_samples_per_second": 26.067, |
|
"eval_steps_per_second": 3.259, |
|
"step": 17100 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"grad_norm": Infinity, |
|
"learning_rate": 3.820636042402827e-05, |
|
"loss": 1.5317, |
|
"step": 17200 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"eval_cer": 0.47163290446208594, |
|
"eval_loss": 1.9506334066390991, |
|
"eval_runtime": 380.9688, |
|
"eval_samples_per_second": 24.879, |
|
"eval_steps_per_second": 3.11, |
|
"step": 17200 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"grad_norm": 19.556819915771484, |
|
"learning_rate": 3.81356890459364e-05, |
|
"loss": 1.3515, |
|
"step": 17300 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"eval_cer": 0.45843191506002895, |
|
"eval_loss": 2.2836623191833496, |
|
"eval_runtime": 361.9084, |
|
"eval_samples_per_second": 26.189, |
|
"eval_steps_per_second": 3.274, |
|
"step": 17300 |
|
}, |
|
{ |
|
"epoch": 7.32, |
|
"grad_norm": 3.9697859287261963, |
|
"learning_rate": 3.8065017667844525e-05, |
|
"loss": 1.3348, |
|
"step": 17400 |
|
}, |
|
{ |
|
"epoch": 7.32, |
|
"eval_cer": 0.460401920143913, |
|
"eval_loss": 2.626167058944702, |
|
"eval_runtime": 377.1422, |
|
"eval_samples_per_second": 25.131, |
|
"eval_steps_per_second": 3.142, |
|
"step": 17400 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"grad_norm": 2.342355251312256, |
|
"learning_rate": 3.7994346289752653e-05, |
|
"loss": 2.0519, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"eval_cer": 0.4643712604121857, |
|
"eval_loss": 1.923600196838379, |
|
"eval_runtime": 360.7805, |
|
"eval_samples_per_second": 26.271, |
|
"eval_steps_per_second": 3.285, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 7.41, |
|
"grad_norm": 1.692319393157959, |
|
"learning_rate": 3.7923674911660775e-05, |
|
"loss": 1.4731, |
|
"step": 17600 |
|
}, |
|
{ |
|
"epoch": 7.41, |
|
"eval_cer": 0.4658597630127879, |
|
"eval_loss": 1.808614730834961, |
|
"eval_runtime": 377.9004, |
|
"eval_samples_per_second": 25.081, |
|
"eval_steps_per_second": 3.136, |
|
"step": 17600 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"grad_norm": 2.243326187133789, |
|
"learning_rate": 3.78530035335689e-05, |
|
"loss": 1.3005, |
|
"step": 17700 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"eval_cer": 0.45969066520667945, |
|
"eval_loss": 1.8014905452728271, |
|
"eval_runtime": 369.7047, |
|
"eval_samples_per_second": 25.637, |
|
"eval_steps_per_second": 3.205, |
|
"step": 17700 |
|
}, |
|
{ |
|
"epoch": 7.49, |
|
"grad_norm": 3.0026488304138184, |
|
"learning_rate": 3.778233215547704e-05, |
|
"loss": 1.3286, |
|
"step": 17800 |
|
}, |
|
{ |
|
"epoch": 7.49, |
|
"eval_cer": 0.44898517852254505, |
|
"eval_loss": 1.3857167959213257, |
|
"eval_runtime": 387.1419, |
|
"eval_samples_per_second": 24.482, |
|
"eval_steps_per_second": 3.061, |
|
"step": 17800 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"grad_norm": 4.501519203186035, |
|
"learning_rate": 3.771166077738516e-05, |
|
"loss": 1.3409, |
|
"step": 17900 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"eval_cer": 0.4575813421453991, |
|
"eval_loss": 2.416992664337158, |
|
"eval_runtime": 359.7708, |
|
"eval_samples_per_second": 26.345, |
|
"eval_steps_per_second": 3.294, |
|
"step": 17900 |
|
}, |
|
{ |
|
"epoch": 7.58, |
|
"grad_norm": 10.005617141723633, |
|
"learning_rate": 3.764098939929329e-05, |
|
"loss": 2.1197, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 7.58, |
|
"eval_cer": 0.4567307692307692, |
|
"eval_loss": 1.900498628616333, |
|
"eval_runtime": 386.3412, |
|
"eval_samples_per_second": 24.533, |
|
"eval_steps_per_second": 3.067, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 7.62, |
|
"grad_norm": 3.4333086013793945, |
|
"learning_rate": 3.7570318021201415e-05, |
|
"loss": 1.3268, |
|
"step": 18100 |
|
}, |
|
{ |
|
"epoch": 7.62, |
|
"eval_cer": 0.4478413046028704, |
|
"eval_loss": 1.8988970518112183, |
|
"eval_runtime": 368.4314, |
|
"eval_samples_per_second": 25.725, |
|
"eval_steps_per_second": 3.216, |
|
"step": 18100 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"grad_norm": 4.548975944519043, |
|
"learning_rate": 3.749964664310954e-05, |
|
"loss": 1.3146, |
|
"step": 18200 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"eval_cer": 0.4571047280122013, |
|
"eval_loss": 2.1630055904388428, |
|
"eval_runtime": 388.4273, |
|
"eval_samples_per_second": 24.401, |
|
"eval_steps_per_second": 3.051, |
|
"step": 18200 |
|
}, |
|
{ |
|
"epoch": 7.7, |
|
"grad_norm": 1.7991045713424683, |
|
"learning_rate": 3.742897526501767e-05, |
|
"loss": 2.0035, |
|
"step": 18300 |
|
}, |
|
{ |
|
"epoch": 7.7, |
|
"eval_cer": 0.45147579289038364, |
|
"eval_loss": 2.4393255710601807, |
|
"eval_runtime": 378.3611, |
|
"eval_samples_per_second": 25.05, |
|
"eval_steps_per_second": 3.132, |
|
"step": 18300 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"grad_norm": 2.6425940990448, |
|
"learning_rate": 3.73583038869258e-05, |
|
"loss": 1.3294, |
|
"step": 18400 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"eval_cer": 0.4522603730788784, |
|
"eval_loss": 1.833477258682251, |
|
"eval_runtime": 381.8605, |
|
"eval_samples_per_second": 24.821, |
|
"eval_steps_per_second": 3.103, |
|
"step": 18400 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"grad_norm": 4.755228042602539, |
|
"learning_rate": 3.728763250883393e-05, |
|
"loss": 1.417, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"eval_cer": 0.47154491416057254, |
|
"eval_loss": 1.8104172945022583, |
|
"eval_runtime": 382.6075, |
|
"eval_samples_per_second": 24.772, |
|
"eval_steps_per_second": 3.097, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 7.83, |
|
"grad_norm": 2.874833822250366, |
|
"learning_rate": 3.721696113074205e-05, |
|
"loss": 1.3215, |
|
"step": 18600 |
|
}, |
|
{ |
|
"epoch": 7.83, |
|
"eval_cer": 0.45071076610222516, |
|
"eval_loss": 3.382107973098755, |
|
"eval_runtime": 367.7162, |
|
"eval_samples_per_second": 25.775, |
|
"eval_steps_per_second": 3.223, |
|
"step": 18600 |
|
}, |
|
{ |
|
"epoch": 7.87, |
|
"grad_norm": 2.091411590576172, |
|
"learning_rate": 3.714628975265018e-05, |
|
"loss": 1.7498, |
|
"step": 18700 |
|
}, |
|
{ |
|
"epoch": 7.87, |
|
"eval_cer": 0.44587129951898635, |
|
"eval_loss": 2.4873616695404053, |
|
"eval_runtime": 381.5796, |
|
"eval_samples_per_second": 24.839, |
|
"eval_steps_per_second": 3.106, |
|
"step": 18700 |
|
}, |
|
{ |
|
"epoch": 7.91, |
|
"grad_norm": 2.1210122108459473, |
|
"learning_rate": 3.7075618374558305e-05, |
|
"loss": 1.4463, |
|
"step": 18800 |
|
}, |
|
{ |
|
"epoch": 7.91, |
|
"eval_cer": 0.4508696374799578, |
|
"eval_loss": 2.1447994709014893, |
|
"eval_runtime": 360.7307, |
|
"eval_samples_per_second": 26.274, |
|
"eval_steps_per_second": 3.285, |
|
"step": 18800 |
|
}, |
|
{ |
|
"epoch": 7.96, |
|
"grad_norm": 9.81248950958252, |
|
"learning_rate": 3.700494699646643e-05, |
|
"loss": 1.327, |
|
"step": 18900 |
|
}, |
|
{ |
|
"epoch": 7.96, |
|
"eval_cer": 0.45252678815846076, |
|
"eval_loss": 2.999907970428467, |
|
"eval_runtime": 382.5678, |
|
"eval_samples_per_second": 24.775, |
|
"eval_steps_per_second": 3.097, |
|
"step": 18900 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"grad_norm": 2.9528186321258545, |
|
"learning_rate": 3.693427561837456e-05, |
|
"loss": 1.3228, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_cer": 0.4471960423917719, |
|
"eval_loss": 1.6979167461395264, |
|
"eval_runtime": 363.6541, |
|
"eval_samples_per_second": 26.063, |
|
"eval_steps_per_second": 3.259, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 8.04, |
|
"grad_norm": 3.4124321937561035, |
|
"learning_rate": 3.686360424028269e-05, |
|
"loss": 1.2656, |
|
"step": 19100 |
|
}, |
|
{ |
|
"epoch": 8.04, |
|
"eval_cer": 0.4476115521489187, |
|
"eval_loss": 2.0440304279327393, |
|
"eval_runtime": 381.9207, |
|
"eval_samples_per_second": 24.817, |
|
"eval_steps_per_second": 3.103, |
|
"step": 19100 |
|
}, |
|
{ |
|
"epoch": 8.08, |
|
"grad_norm": 4.451693058013916, |
|
"learning_rate": 3.679293286219082e-05, |
|
"loss": 1.2961, |
|
"step": 19200 |
|
}, |
|
{ |
|
"epoch": 8.08, |
|
"eval_cer": 0.44471520472410153, |
|
"eval_loss": 1.5803910493850708, |
|
"eval_runtime": 361.691, |
|
"eval_samples_per_second": 26.205, |
|
"eval_steps_per_second": 3.276, |
|
"step": 19200 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"grad_norm": 2.6323907375335693, |
|
"learning_rate": 3.672226148409894e-05, |
|
"loss": 1.2874, |
|
"step": 19300 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"eval_cer": 0.4615849008642603, |
|
"eval_loss": 1.754168152809143, |
|
"eval_runtime": 385.1298, |
|
"eval_samples_per_second": 24.61, |
|
"eval_steps_per_second": 3.077, |
|
"step": 19300 |
|
}, |
|
{ |
|
"epoch": 8.17, |
|
"grad_norm": 2.5475683212280273, |
|
"learning_rate": 3.665159010600707e-05, |
|
"loss": 1.5227, |
|
"step": 19400 |
|
}, |
|
{ |
|
"epoch": 8.17, |
|
"eval_cer": 0.45406906260998786, |
|
"eval_loss": 1.954167127609253, |
|
"eval_runtime": 364.6983, |
|
"eval_samples_per_second": 25.989, |
|
"eval_steps_per_second": 3.249, |
|
"step": 19400 |
|
}, |
|
{ |
|
"epoch": 8.21, |
|
"grad_norm": 1.623938798904419, |
|
"learning_rate": 3.6580918727915195e-05, |
|
"loss": 1.2988, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 8.21, |
|
"eval_cer": 0.4476164404990028, |
|
"eval_loss": 2.2084550857543945, |
|
"eval_runtime": 384.5947, |
|
"eval_samples_per_second": 24.644, |
|
"eval_steps_per_second": 3.081, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 8.25, |
|
"grad_norm": 3.2966043949127197, |
|
"learning_rate": 3.6510954063604243e-05, |
|
"loss": 1.5739, |
|
"step": 19600 |
|
}, |
|
{ |
|
"epoch": 8.25, |
|
"eval_cer": 0.451500234640804, |
|
"eval_loss": 1.4389057159423828, |
|
"eval_runtime": 366.8675, |
|
"eval_samples_per_second": 25.835, |
|
"eval_steps_per_second": 3.23, |
|
"step": 19600 |
|
}, |
|
{ |
|
"epoch": 8.29, |
|
"grad_norm": 2.2112064361572266, |
|
"learning_rate": 3.6440282685512365e-05, |
|
"loss": 1.4271, |
|
"step": 19700 |
|
}, |
|
{ |
|
"epoch": 8.29, |
|
"eval_cer": 0.4538075358804896, |
|
"eval_loss": 2.505011796951294, |
|
"eval_runtime": 384.1903, |
|
"eval_samples_per_second": 24.67, |
|
"eval_steps_per_second": 3.084, |
|
"step": 19700 |
|
}, |
|
{ |
|
"epoch": 8.34, |
|
"grad_norm": 3.7650928497314453, |
|
"learning_rate": 3.63696113074205e-05, |
|
"loss": 1.665, |
|
"step": 19800 |
|
}, |
|
{ |
|
"epoch": 8.34, |
|
"eval_cer": 0.4431655977474483, |
|
"eval_loss": 2.0151588916778564, |
|
"eval_runtime": 364.0023, |
|
"eval_samples_per_second": 26.038, |
|
"eval_steps_per_second": 3.255, |
|
"step": 19800 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"grad_norm": 1.848604679107666, |
|
"learning_rate": 3.629893992932862e-05, |
|
"loss": 2.2543, |
|
"step": 19900 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"eval_cer": 0.4581728325055727, |
|
"eval_loss": 1.8045251369476318, |
|
"eval_runtime": 381.1374, |
|
"eval_samples_per_second": 24.868, |
|
"eval_steps_per_second": 3.109, |
|
"step": 19900 |
|
}, |
|
{ |
|
"epoch": 8.42, |
|
"grad_norm": 34.23902893066406, |
|
"learning_rate": 3.622826855123675e-05, |
|
"loss": 1.313, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 8.42, |
|
"eval_cer": 0.4448887411520864, |
|
"eval_loss": 2.140085458755493, |
|
"eval_runtime": 364.9023, |
|
"eval_samples_per_second": 25.974, |
|
"eval_steps_per_second": 3.247, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 8.46, |
|
"grad_norm": 1.8063780069351196, |
|
"learning_rate": 3.615759717314488e-05, |
|
"loss": 2.0001, |
|
"step": 20100 |
|
}, |
|
{ |
|
"epoch": 8.46, |
|
"eval_cer": 0.44904628289859605, |
|
"eval_loss": 3.0254170894622803, |
|
"eval_runtime": 380.5352, |
|
"eval_samples_per_second": 24.907, |
|
"eval_steps_per_second": 3.114, |
|
"step": 20100 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"grad_norm": 4.191483974456787, |
|
"learning_rate": 3.6086925795053005e-05, |
|
"loss": 1.2751, |
|
"step": 20200 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"eval_cer": 0.4562517109225294, |
|
"eval_loss": 4.0329742431640625, |
|
"eval_runtime": 362.926, |
|
"eval_samples_per_second": 26.116, |
|
"eval_steps_per_second": 3.265, |
|
"step": 20200 |
|
}, |
|
{ |
|
"epoch": 8.55, |
|
"grad_norm": 6.447911262512207, |
|
"learning_rate": 3.6016254416961133e-05, |
|
"loss": 2.11, |
|
"step": 20300 |
|
}, |
|
{ |
|
"epoch": 8.55, |
|
"eval_cer": 0.43633657267998904, |
|
"eval_loss": 3.3446898460388184, |
|
"eval_runtime": 385.9035, |
|
"eval_samples_per_second": 24.561, |
|
"eval_steps_per_second": 3.071, |
|
"step": 20300 |
|
}, |
|
{ |
|
"epoch": 8.59, |
|
"grad_norm": 2.172891139984131, |
|
"learning_rate": 3.5945583038869255e-05, |
|
"loss": 1.4999, |
|
"step": 20400 |
|
}, |
|
{ |
|
"epoch": 8.59, |
|
"eval_cer": 0.4393355754565719, |
|
"eval_loss": 3.0930521488189697, |
|
"eval_runtime": 363.4753, |
|
"eval_samples_per_second": 26.076, |
|
"eval_steps_per_second": 3.26, |
|
"step": 20400 |
|
}, |
|
{ |
|
"epoch": 8.63, |
|
"grad_norm": 3.379791498184204, |
|
"learning_rate": 3.587491166077739e-05, |
|
"loss": 1.4381, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 8.63, |
|
"eval_cer": 0.4450402800046928, |
|
"eval_loss": 2.9096951484680176, |
|
"eval_runtime": 384.973, |
|
"eval_samples_per_second": 24.62, |
|
"eval_steps_per_second": 3.078, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 8.67, |
|
"grad_norm": 2.5090067386627197, |
|
"learning_rate": 3.580424028268551e-05, |
|
"loss": 1.6658, |
|
"step": 20600 |
|
}, |
|
{ |
|
"epoch": 8.67, |
|
"eval_cer": 0.4514855695905518, |
|
"eval_loss": 2.3211405277252197, |
|
"eval_runtime": 367.2057, |
|
"eval_samples_per_second": 25.811, |
|
"eval_steps_per_second": 3.227, |
|
"step": 20600 |
|
}, |
|
{ |
|
"epoch": 8.71, |
|
"grad_norm": 2.2183985710144043, |
|
"learning_rate": 3.573356890459364e-05, |
|
"loss": 2.1266, |
|
"step": 20700 |
|
}, |
|
{ |
|
"epoch": 8.71, |
|
"eval_cer": 0.45651323765202767, |
|
"eval_loss": 1.70187246799469, |
|
"eval_runtime": 382.2617, |
|
"eval_samples_per_second": 24.795, |
|
"eval_steps_per_second": 3.1, |
|
"step": 20700 |
|
}, |
|
{ |
|
"epoch": 8.76, |
|
"grad_norm": 1.713640570640564, |
|
"learning_rate": 3.5662897526501774e-05, |
|
"loss": 1.3038, |
|
"step": 20800 |
|
}, |
|
{ |
|
"epoch": 8.76, |
|
"eval_cer": 0.44218548355559034, |
|
"eval_loss": 1.8959708213806152, |
|
"eval_runtime": 362.6249, |
|
"eval_samples_per_second": 26.137, |
|
"eval_steps_per_second": 3.268, |
|
"step": 20800 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"grad_norm": 10.084565162658691, |
|
"learning_rate": 3.5592226148409895e-05, |
|
"loss": 1.3031, |
|
"step": 20900 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"eval_cer": 0.44013237652027687, |
|
"eval_loss": 1.9289778470993042, |
|
"eval_runtime": 383.5806, |
|
"eval_samples_per_second": 24.709, |
|
"eval_steps_per_second": 3.089, |
|
"step": 20900 |
|
}, |
|
{ |
|
"epoch": 8.84, |
|
"grad_norm": 2.5064008235931396, |
|
"learning_rate": 3.552155477031802e-05, |
|
"loss": 2.0089, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 8.84, |
|
"eval_cer": 0.4491318290250675, |
|
"eval_loss": 2.749382734298706, |
|
"eval_runtime": 372.5311, |
|
"eval_samples_per_second": 25.442, |
|
"eval_steps_per_second": 3.181, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 8.88, |
|
"grad_norm": 1.8727614879608154, |
|
"learning_rate": 3.5450883392226145e-05, |
|
"loss": 2.5929, |
|
"step": 21100 |
|
}, |
|
{ |
|
"epoch": 8.88, |
|
"eval_cer": 0.4467780884595831, |
|
"eval_loss": 2.1603004932403564, |
|
"eval_runtime": 382.1598, |
|
"eval_samples_per_second": 24.801, |
|
"eval_steps_per_second": 3.101, |
|
"step": 21100 |
|
}, |
|
{ |
|
"epoch": 8.92, |
|
"grad_norm": 2.88053560256958, |
|
"learning_rate": 3.538021201413428e-05, |
|
"loss": 1.2652, |
|
"step": 21200 |
|
}, |
|
{ |
|
"epoch": 8.92, |
|
"eval_cer": 0.44302872394509407, |
|
"eval_loss": 2.7272582054138184, |
|
"eval_runtime": 364.629, |
|
"eval_samples_per_second": 25.994, |
|
"eval_steps_per_second": 3.25, |
|
"step": 21200 |
|
}, |
|
{ |
|
"epoch": 8.97, |
|
"grad_norm": 3.520254135131836, |
|
"learning_rate": 3.53095406360424e-05, |
|
"loss": 1.2819, |
|
"step": 21300 |
|
}, |
|
{ |
|
"epoch": 8.97, |
|
"eval_cer": 0.44700784091353485, |
|
"eval_loss": 1.840844988822937, |
|
"eval_runtime": 393.1165, |
|
"eval_samples_per_second": 24.11, |
|
"eval_steps_per_second": 3.014, |
|
"step": 21300 |
|
}, |
|
{ |
|
"epoch": 9.01, |
|
"grad_norm": 1.5368082523345947, |
|
"learning_rate": 3.523886925795053e-05, |
|
"loss": 1.2587, |
|
"step": 21400 |
|
}, |
|
{ |
|
"epoch": 9.01, |
|
"eval_cer": 0.43732890774705724, |
|
"eval_loss": 2.2236878871917725, |
|
"eval_runtime": 363.5106, |
|
"eval_samples_per_second": 26.074, |
|
"eval_steps_per_second": 3.26, |
|
"step": 21400 |
|
}, |
|
{ |
|
"epoch": 9.05, |
|
"grad_norm": 2.522552251815796, |
|
"learning_rate": 3.5168197879858664e-05, |
|
"loss": 1.2512, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 9.05, |
|
"eval_cer": 0.44793418325446793, |
|
"eval_loss": 2.5103232860565186, |
|
"eval_runtime": 384.7009, |
|
"eval_samples_per_second": 24.637, |
|
"eval_steps_per_second": 3.08, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 9.09, |
|
"grad_norm": 17.708566665649414, |
|
"learning_rate": 3.5097526501766785e-05, |
|
"loss": 1.2248, |
|
"step": 21600 |
|
}, |
|
{ |
|
"epoch": 9.09, |
|
"eval_cer": 0.4432878064995503, |
|
"eval_loss": 2.5621256828308105, |
|
"eval_runtime": 370.664, |
|
"eval_samples_per_second": 25.57, |
|
"eval_steps_per_second": 3.197, |
|
"step": 21600 |
|
}, |
|
{ |
|
"epoch": 9.13, |
|
"grad_norm": 2.740123987197876, |
|
"learning_rate": 3.502685512367491e-05, |
|
"loss": 1.3298, |
|
"step": 21700 |
|
}, |
|
{ |
|
"epoch": 9.13, |
|
"eval_cer": 0.441982617027101, |
|
"eval_loss": 2.49664568901062, |
|
"eval_runtime": 372.6231, |
|
"eval_samples_per_second": 25.436, |
|
"eval_steps_per_second": 3.18, |
|
"step": 21700 |
|
}, |
|
{ |
|
"epoch": 9.18, |
|
"grad_norm": 7.561341762542725, |
|
"learning_rate": 3.495689045936396e-05, |
|
"loss": 1.2998, |
|
"step": 21800 |
|
}, |
|
{ |
|
"epoch": 9.18, |
|
"eval_cer": 0.43526113566149155, |
|
"eval_loss": 3.151437282562256, |
|
"eval_runtime": 381.1035, |
|
"eval_samples_per_second": 24.87, |
|
"eval_steps_per_second": 3.109, |
|
"step": 21800 |
|
}, |
|
{ |
|
"epoch": 9.22, |
|
"grad_norm": 2.588192939758301, |
|
"learning_rate": 3.488621908127209e-05, |
|
"loss": 1.2493, |
|
"step": 21900 |
|
}, |
|
{ |
|
"epoch": 9.22, |
|
"eval_cer": 0.46399241328066954, |
|
"eval_loss": 1.8955408334732056, |
|
"eval_runtime": 369.5234, |
|
"eval_samples_per_second": 25.649, |
|
"eval_steps_per_second": 3.207, |
|
"step": 21900 |
|
}, |
|
{ |
|
"epoch": 9.26, |
|
"grad_norm": 2.3700144290924072, |
|
"learning_rate": 3.481554770318021e-05, |
|
"loss": 1.3379, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 9.26, |
|
"eval_cer": 0.4379032888819366, |
|
"eval_loss": 1.7394232749938965, |
|
"eval_runtime": 383.407, |
|
"eval_samples_per_second": 24.72, |
|
"eval_steps_per_second": 3.091, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 9.3, |
|
"grad_norm": 2.381225824356079, |
|
"learning_rate": 3.4744876325088346e-05, |
|
"loss": 2.2422, |
|
"step": 22100 |
|
}, |
|
{ |
|
"epoch": 9.3, |
|
"eval_cer": 0.43050965937976615, |
|
"eval_loss": 1.691564917564392, |
|
"eval_runtime": 362.7906, |
|
"eval_samples_per_second": 26.125, |
|
"eval_steps_per_second": 3.266, |
|
"step": 22100 |
|
}, |
|
{ |
|
"epoch": 9.35, |
|
"grad_norm": 4.20334005355835, |
|
"learning_rate": 3.467420494699647e-05, |
|
"loss": 1.239, |
|
"step": 22200 |
|
}, |
|
{ |
|
"epoch": 9.35, |
|
"eval_cer": 0.44948623440616325, |
|
"eval_loss": 3.0320708751678467, |
|
"eval_runtime": 386.6159, |
|
"eval_samples_per_second": 24.515, |
|
"eval_steps_per_second": 3.065, |
|
"step": 22200 |
|
}, |
|
{ |
|
"epoch": 9.39, |
|
"grad_norm": 12.748970985412598, |
|
"learning_rate": 3.4603533568904595e-05, |
|
"loss": 1.622, |
|
"step": 22300 |
|
}, |
|
{ |
|
"epoch": 9.39, |
|
"eval_cer": 0.4492564819522115, |
|
"eval_loss": 2.2297956943511963, |
|
"eval_runtime": 368.8399, |
|
"eval_samples_per_second": 25.697, |
|
"eval_steps_per_second": 3.213, |
|
"step": 22300 |
|
}, |
|
{ |
|
"epoch": 9.43, |
|
"grad_norm": 10.591804504394531, |
|
"learning_rate": 3.4532862190812723e-05, |
|
"loss": 1.4811, |
|
"step": 22400 |
|
}, |
|
{ |
|
"epoch": 9.43, |
|
"eval_cer": 0.44453922412107466, |
|
"eval_loss": 2.3979556560516357, |
|
"eval_runtime": 383.7637, |
|
"eval_samples_per_second": 24.697, |
|
"eval_steps_per_second": 3.088, |
|
"step": 22400 |
|
}, |
|
{ |
|
"epoch": 9.47, |
|
"grad_norm": 7.108492374420166, |
|
"learning_rate": 3.446219081272085e-05, |
|
"loss": 1.5169, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 9.47, |
|
"eval_cer": 0.4425838840874428, |
|
"eval_loss": 1.763051152229309, |
|
"eval_runtime": 371.2126, |
|
"eval_samples_per_second": 25.533, |
|
"eval_steps_per_second": 3.192, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 9.51, |
|
"grad_norm": 2.171220302581787, |
|
"learning_rate": 3.439151943462898e-05, |
|
"loss": 1.2667, |
|
"step": 22600 |
|
}, |
|
{ |
|
"epoch": 9.51, |
|
"eval_cer": 0.44492784795275897, |
|
"eval_loss": 1.6504524946212769, |
|
"eval_runtime": 384.6648, |
|
"eval_samples_per_second": 24.64, |
|
"eval_steps_per_second": 3.081, |
|
"step": 22600 |
|
}, |
|
{ |
|
"epoch": 9.56, |
|
"grad_norm": 4.868325233459473, |
|
"learning_rate": 3.43208480565371e-05, |
|
"loss": 1.2425, |
|
"step": 22700 |
|
}, |
|
{ |
|
"epoch": 9.56, |
|
"eval_cer": 0.4337261937350905, |
|
"eval_loss": 1.6726810932159424, |
|
"eval_runtime": 360.9955, |
|
"eval_samples_per_second": 26.255, |
|
"eval_steps_per_second": 3.283, |
|
"step": 22700 |
|
}, |
|
{ |
|
"epoch": 9.6, |
|
"grad_norm": 4.784325122833252, |
|
"learning_rate": 3.4250176678445236e-05, |
|
"loss": 1.2519, |
|
"step": 22800 |
|
}, |
|
{ |
|
"epoch": 9.6, |
|
"eval_cer": 0.4394284541081694, |
|
"eval_loss": 2.026627540588379, |
|
"eval_runtime": 383.883, |
|
"eval_samples_per_second": 24.69, |
|
"eval_steps_per_second": 3.087, |
|
"step": 22800 |
|
}, |
|
{ |
|
"epoch": 9.64, |
|
"grad_norm": 34.55485534667969, |
|
"learning_rate": 3.417950530035336e-05, |
|
"loss": 1.2678, |
|
"step": 22900 |
|
}, |
|
{ |
|
"epoch": 9.64, |
|
"eval_cer": 0.45279075906300104, |
|
"eval_loss": 1.6792824268341064, |
|
"eval_runtime": 362.9993, |
|
"eval_samples_per_second": 26.11, |
|
"eval_steps_per_second": 3.264, |
|
"step": 22900 |
|
}, |
|
{ |
|
"epoch": 9.68, |
|
"grad_norm": 8.905746459960938, |
|
"learning_rate": 3.4108833922261485e-05, |
|
"loss": 1.2559, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 9.68, |
|
"eval_cer": 0.452297035704509, |
|
"eval_loss": 1.6737189292907715, |
|
"eval_runtime": 383.7761, |
|
"eval_samples_per_second": 24.697, |
|
"eval_steps_per_second": 3.088, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 9.72, |
|
"grad_norm": 3.8121345043182373, |
|
"learning_rate": 3.4038162544169613e-05, |
|
"loss": 1.2646, |
|
"step": 23100 |
|
}, |
|
{ |
|
"epoch": 9.72, |
|
"eval_cer": 0.45129247976223064, |
|
"eval_loss": 1.4810179471969604, |
|
"eval_runtime": 361.7355, |
|
"eval_samples_per_second": 26.201, |
|
"eval_steps_per_second": 3.276, |
|
"step": 23100 |
|
}, |
|
{ |
|
"epoch": 9.77, |
|
"grad_norm": 1.6551660299301147, |
|
"learning_rate": 3.396749116607774e-05, |
|
"loss": 1.2812, |
|
"step": 23200 |
|
}, |
|
{ |
|
"epoch": 9.77, |
|
"eval_cer": 0.4401079347698565, |
|
"eval_loss": 1.4080007076263428, |
|
"eval_runtime": 387.5504, |
|
"eval_samples_per_second": 24.456, |
|
"eval_steps_per_second": 3.058, |
|
"step": 23200 |
|
}, |
|
{ |
|
"epoch": 9.81, |
|
"grad_norm": 1.3945401906967163, |
|
"learning_rate": 3.389681978798587e-05, |
|
"loss": 1.2539, |
|
"step": 23300 |
|
}, |
|
{ |
|
"epoch": 9.81, |
|
"eval_cer": 0.44230280395760824, |
|
"eval_loss": 1.8802071809768677, |
|
"eval_runtime": 376.3421, |
|
"eval_samples_per_second": 25.185, |
|
"eval_steps_per_second": 3.149, |
|
"step": 23300 |
|
}, |
|
{ |
|
"epoch": 9.85, |
|
"grad_norm": 19.793508529663086, |
|
"learning_rate": 3.382614840989399e-05, |
|
"loss": 1.9455, |
|
"step": 23400 |
|
}, |
|
{ |
|
"epoch": 9.85, |
|
"eval_cer": 0.45229947987955105, |
|
"eval_loss": 1.607908010482788, |
|
"eval_runtime": 388.4585, |
|
"eval_samples_per_second": 24.399, |
|
"eval_steps_per_second": 3.051, |
|
"step": 23400 |
|
}, |
|
{ |
|
"epoch": 9.89, |
|
"grad_norm": 25.258953094482422, |
|
"learning_rate": 3.3755477031802126e-05, |
|
"loss": 1.2352, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 9.89, |
|
"eval_cer": 0.43993439834187165, |
|
"eval_loss": 1.3818862438201904, |
|
"eval_runtime": 363.0875, |
|
"eval_samples_per_second": 26.104, |
|
"eval_steps_per_second": 3.264, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 9.93, |
|
"grad_norm": 9.951347351074219, |
|
"learning_rate": 3.368480565371025e-05, |
|
"loss": 1.5735, |
|
"step": 23600 |
|
}, |
|
{ |
|
"epoch": 9.93, |
|
"eval_cer": 0.4422074811309687, |
|
"eval_loss": 1.6464565992355347, |
|
"eval_runtime": 386.065, |
|
"eval_samples_per_second": 24.55, |
|
"eval_steps_per_second": 3.069, |
|
"step": 23600 |
|
}, |
|
{ |
|
"epoch": 9.98, |
|
"grad_norm": 4.360719203948975, |
|
"learning_rate": 3.3614134275618375e-05, |
|
"loss": 1.6236, |
|
"step": 23700 |
|
}, |
|
{ |
|
"epoch": 9.98, |
|
"eval_cer": 0.44366909780610847, |
|
"eval_loss": 1.770129919052124, |
|
"eval_runtime": 361.9008, |
|
"eval_samples_per_second": 26.189, |
|
"eval_steps_per_second": 3.274, |
|
"step": 23700 |
|
}, |
|
{ |
|
"epoch": 10.02, |
|
"grad_norm": 12.59276294708252, |
|
"learning_rate": 3.35434628975265e-05, |
|
"loss": 1.2522, |
|
"step": 23800 |
|
}, |
|
{ |
|
"epoch": 10.02, |
|
"eval_cer": 0.43885651714833207, |
|
"eval_loss": 2.233562707901001, |
|
"eval_runtime": 381.3188, |
|
"eval_samples_per_second": 24.856, |
|
"eval_steps_per_second": 3.108, |
|
"step": 23800 |
|
}, |
|
{ |
|
"epoch": 10.06, |
|
"grad_norm": 6.893917560577393, |
|
"learning_rate": 3.347279151943463e-05, |
|
"loss": 1.2052, |
|
"step": 23900 |
|
}, |
|
{ |
|
"epoch": 10.06, |
|
"eval_cer": 0.44252522388643384, |
|
"eval_loss": 2.4695940017700195, |
|
"eval_runtime": 366.61, |
|
"eval_samples_per_second": 25.853, |
|
"eval_steps_per_second": 3.232, |
|
"step": 23900 |
|
}, |
|
{ |
|
"epoch": 10.1, |
|
"grad_norm": 2.430925130844116, |
|
"learning_rate": 3.340212014134276e-05, |
|
"loss": 1.185, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 10.1, |
|
"eval_cer": 0.4357133080442689, |
|
"eval_loss": 1.5434983968734741, |
|
"eval_runtime": 385.0716, |
|
"eval_samples_per_second": 24.614, |
|
"eval_steps_per_second": 3.077, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 10.15, |
|
"grad_norm": 2.367770195007324, |
|
"learning_rate": 3.333144876325088e-05, |
|
"loss": 1.2225, |
|
"step": 24100 |
|
}, |
|
{ |
|
"epoch": 10.15, |
|
"eval_cer": 0.4268898361425052, |
|
"eval_loss": 1.602295160293579, |
|
"eval_runtime": 368.0631, |
|
"eval_samples_per_second": 25.751, |
|
"eval_steps_per_second": 3.22, |
|
"step": 24100 |
|
}, |
|
{ |
|
"epoch": 10.19, |
|
"grad_norm": 2.6913535594940186, |
|
"learning_rate": 3.3260777385159016e-05, |
|
"loss": 1.2071, |
|
"step": 24200 |
|
}, |
|
{ |
|
"epoch": 10.19, |
|
"eval_cer": 0.43098382933792184, |
|
"eval_loss": 1.9375296831130981, |
|
"eval_runtime": 388.5754, |
|
"eval_samples_per_second": 24.392, |
|
"eval_steps_per_second": 3.05, |
|
"step": 24200 |
|
}, |
|
{ |
|
"epoch": 10.23, |
|
"grad_norm": 2.8861985206604004, |
|
"learning_rate": 3.319010600706714e-05, |
|
"loss": 1.2745, |
|
"step": 24300 |
|
}, |
|
{ |
|
"epoch": 10.23, |
|
"eval_cer": 0.43681563098822884, |
|
"eval_loss": 1.8604010343551636, |
|
"eval_runtime": 365.4924, |
|
"eval_samples_per_second": 25.932, |
|
"eval_steps_per_second": 3.242, |
|
"step": 24300 |
|
}, |
|
{ |
|
"epoch": 10.27, |
|
"grad_norm": 3.54598069190979, |
|
"learning_rate": 3.3119434628975265e-05, |
|
"loss": 1.3486, |
|
"step": 24400 |
|
}, |
|
{ |
|
"epoch": 10.27, |
|
"eval_cer": 0.4357133080442689, |
|
"eval_loss": 1.3347864151000977, |
|
"eval_runtime": 380.5736, |
|
"eval_samples_per_second": 24.905, |
|
"eval_steps_per_second": 3.114, |
|
"step": 24400 |
|
}, |
|
{ |
|
"epoch": 10.31, |
|
"grad_norm": 2.1198437213897705, |
|
"learning_rate": 3.304876325088339e-05, |
|
"loss": 1.1866, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 10.31, |
|
"eval_cer": 0.4299792733956435, |
|
"eval_loss": 1.308254361152649, |
|
"eval_runtime": 368.9525, |
|
"eval_samples_per_second": 25.689, |
|
"eval_steps_per_second": 3.212, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 10.36, |
|
"grad_norm": 2.029782295227051, |
|
"learning_rate": 3.297809187279152e-05, |
|
"loss": 1.1697, |
|
"step": 24600 |
|
}, |
|
{ |
|
"epoch": 10.36, |
|
"eval_cer": 0.42697293809393455, |
|
"eval_loss": 1.4594690799713135, |
|
"eval_runtime": 389.4322, |
|
"eval_samples_per_second": 24.338, |
|
"eval_steps_per_second": 3.043, |
|
"step": 24600 |
|
}, |
|
{ |
|
"epoch": 10.4, |
|
"grad_norm": 2.0395631790161133, |
|
"learning_rate": 3.290742049469965e-05, |
|
"loss": 1.1793, |
|
"step": 24700 |
|
}, |
|
{ |
|
"epoch": 10.4, |
|
"eval_cer": 0.42983506706816316, |
|
"eval_loss": 1.475653886795044, |
|
"eval_runtime": 374.0567, |
|
"eval_samples_per_second": 25.338, |
|
"eval_steps_per_second": 3.168, |
|
"step": 24700 |
|
}, |
|
{ |
|
"epoch": 10.44, |
|
"grad_norm": 6.735607624053955, |
|
"learning_rate": 3.283674911660777e-05, |
|
"loss": 1.4708, |
|
"step": 24800 |
|
}, |
|
{ |
|
"epoch": 10.44, |
|
"eval_cer": 0.4388076336474913, |
|
"eval_loss": 1.9250705242156982, |
|
"eval_runtime": 386.263, |
|
"eval_samples_per_second": 24.538, |
|
"eval_steps_per_second": 3.068, |
|
"step": 24800 |
|
}, |
|
{ |
|
"epoch": 10.48, |
|
"grad_norm": 1.8092114925384521, |
|
"learning_rate": 3.2766077738515906e-05, |
|
"loss": 1.1854, |
|
"step": 24900 |
|
}, |
|
{ |
|
"epoch": 10.48, |
|
"eval_cer": 0.4322132493840679, |
|
"eval_loss": 1.8587048053741455, |
|
"eval_runtime": 371.4948, |
|
"eval_samples_per_second": 25.513, |
|
"eval_steps_per_second": 3.19, |
|
"step": 24900 |
|
}, |
|
{ |
|
"epoch": 10.52, |
|
"grad_norm": 3.406538724899292, |
|
"learning_rate": 3.269540636042403e-05, |
|
"loss": 1.2144, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 10.52, |
|
"eval_cer": 0.4353711235383833, |
|
"eval_loss": 1.5310605764389038, |
|
"eval_runtime": 388.4081, |
|
"eval_samples_per_second": 24.402, |
|
"eval_steps_per_second": 3.051, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 10.57, |
|
"grad_norm": 21.588300704956055, |
|
"learning_rate": 3.2624734982332155e-05, |
|
"loss": 1.1869, |
|
"step": 25100 |
|
}, |
|
{ |
|
"epoch": 10.57, |
|
"eval_cer": 0.41679294904383873, |
|
"eval_loss": 1.753686785697937, |
|
"eval_runtime": 363.8235, |
|
"eval_samples_per_second": 26.051, |
|
"eval_steps_per_second": 3.257, |
|
"step": 25100 |
|
}, |
|
{ |
|
"epoch": 10.61, |
|
"grad_norm": 2.433943033218384, |
|
"learning_rate": 3.255406360424029e-05, |
|
"loss": 1.186, |
|
"step": 25200 |
|
}, |
|
{ |
|
"epoch": 10.61, |
|
"eval_cer": 0.4293755621602597, |
|
"eval_loss": 2.9843032360076904, |
|
"eval_runtime": 383.8866, |
|
"eval_samples_per_second": 24.69, |
|
"eval_steps_per_second": 3.087, |
|
"step": 25200 |
|
}, |
|
{ |
|
"epoch": 10.65, |
|
"grad_norm": 2.10840106010437, |
|
"learning_rate": 3.248339222614841e-05, |
|
"loss": 1.2008, |
|
"step": 25300 |
|
}, |
|
{ |
|
"epoch": 10.65, |
|
"eval_cer": 0.42626657150678504, |
|
"eval_loss": 2.721179485321045, |
|
"eval_runtime": 367.0761, |
|
"eval_samples_per_second": 25.82, |
|
"eval_steps_per_second": 3.228, |
|
"step": 25300 |
|
}, |
|
{ |
|
"epoch": 10.69, |
|
"grad_norm": 2.1997592449188232, |
|
"learning_rate": 3.241272084805654e-05, |
|
"loss": 1.1923, |
|
"step": 25400 |
|
}, |
|
{ |
|
"epoch": 10.69, |
|
"eval_cer": 0.42187438895623947, |
|
"eval_loss": 2.4305357933044434, |
|
"eval_runtime": 385.3951, |
|
"eval_samples_per_second": 24.593, |
|
"eval_steps_per_second": 3.075, |
|
"step": 25400 |
|
}, |
|
{ |
|
"epoch": 10.73, |
|
"grad_norm": 1.7977826595306396, |
|
"learning_rate": 3.234204946996466e-05, |
|
"loss": 1.7363, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 10.73, |
|
"eval_cer": 0.4337506354855109, |
|
"eval_loss": 2.236682176589966, |
|
"eval_runtime": 370.2296, |
|
"eval_samples_per_second": 25.6, |
|
"eval_steps_per_second": 3.201, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 10.78, |
|
"grad_norm": 3.47428035736084, |
|
"learning_rate": 3.2271378091872796e-05, |
|
"loss": 1.3785, |
|
"step": 25600 |
|
}, |
|
{ |
|
"epoch": 10.78, |
|
"eval_cer": 0.4252571272144226, |
|
"eval_loss": 2.5334064960479736, |
|
"eval_runtime": 380.018, |
|
"eval_samples_per_second": 24.941, |
|
"eval_steps_per_second": 3.118, |
|
"step": 25600 |
|
}, |
|
{ |
|
"epoch": 10.82, |
|
"grad_norm": 2.5327584743499756, |
|
"learning_rate": 3.2200706713780924e-05, |
|
"loss": 1.518, |
|
"step": 25700 |
|
}, |
|
{ |
|
"epoch": 10.82, |
|
"eval_cer": 0.42553087481913104, |
|
"eval_loss": 2.6629116535186768, |
|
"eval_runtime": 359.4773, |
|
"eval_samples_per_second": 26.366, |
|
"eval_steps_per_second": 3.296, |
|
"step": 25700 |
|
}, |
|
{ |
|
"epoch": 10.86, |
|
"grad_norm": 5.47965145111084, |
|
"learning_rate": 3.2130035335689045e-05, |
|
"loss": 1.1865, |
|
"step": 25800 |
|
}, |
|
{ |
|
"epoch": 10.86, |
|
"eval_cer": 0.4287083023737828, |
|
"eval_loss": 1.6172243356704712, |
|
"eval_runtime": 386.7546, |
|
"eval_samples_per_second": 24.506, |
|
"eval_steps_per_second": 3.064, |
|
"step": 25800 |
|
}, |
|
{ |
|
"epoch": 10.9, |
|
"grad_norm": 7.293168544769287, |
|
"learning_rate": 3.2060070671378093e-05, |
|
"loss": 1.3999, |
|
"step": 25900 |
|
}, |
|
{ |
|
"epoch": 10.9, |
|
"eval_cer": 0.42187194478119744, |
|
"eval_loss": 1.8544808626174927, |
|
"eval_runtime": 365.482, |
|
"eval_samples_per_second": 25.933, |
|
"eval_steps_per_second": 3.242, |
|
"step": 25900 |
|
}, |
|
{ |
|
"epoch": 10.95, |
|
"grad_norm": 3.8992362022399902, |
|
"learning_rate": 3.198939929328622e-05, |
|
"loss": 1.1808, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 10.95, |
|
"eval_cer": 0.419022036682179, |
|
"eval_loss": 1.6932332515716553, |
|
"eval_runtime": 384.5535, |
|
"eval_samples_per_second": 24.647, |
|
"eval_steps_per_second": 3.081, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 10.99, |
|
"grad_norm": 64.95602416992188, |
|
"learning_rate": 3.191872791519435e-05, |
|
"loss": 1.2023, |
|
"step": 26100 |
|
}, |
|
{ |
|
"epoch": 10.99, |
|
"eval_cer": 0.4221408040358218, |
|
"eval_loss": 1.5864986181259155, |
|
"eval_runtime": 363.5244, |
|
"eval_samples_per_second": 26.073, |
|
"eval_steps_per_second": 3.26, |
|
"step": 26100 |
|
}, |
|
{ |
|
"epoch": 11.03, |
|
"grad_norm": 1.6677500009536743, |
|
"learning_rate": 3.184805653710248e-05, |
|
"loss": 1.1585, |
|
"step": 26200 |
|
}, |
|
{ |
|
"epoch": 11.03, |
|
"eval_cer": 0.4356106526925032, |
|
"eval_loss": 1.4082372188568115, |
|
"eval_runtime": 386.3668, |
|
"eval_samples_per_second": 24.531, |
|
"eval_steps_per_second": 3.067, |
|
"step": 26200 |
|
}, |
|
{ |
|
"epoch": 11.07, |
|
"grad_norm": 1.4230600595474243, |
|
"learning_rate": 3.1777385159010606e-05, |
|
"loss": 1.2774, |
|
"step": 26300 |
|
}, |
|
{ |
|
"epoch": 11.07, |
|
"eval_cer": 0.41568573774979467, |
|
"eval_loss": 1.6947673559188843, |
|
"eval_runtime": 370.2625, |
|
"eval_samples_per_second": 25.598, |
|
"eval_steps_per_second": 3.2, |
|
"step": 26300 |
|
}, |
|
{ |
|
"epoch": 11.11, |
|
"grad_norm": 10.38554859161377, |
|
"learning_rate": 3.170671378091873e-05, |
|
"loss": 1.1303, |
|
"step": 26400 |
|
}, |
|
{ |
|
"epoch": 11.11, |
|
"eval_cer": 0.42248543271674943, |
|
"eval_loss": 2.5170605182647705, |
|
"eval_runtime": 383.496, |
|
"eval_samples_per_second": 24.715, |
|
"eval_steps_per_second": 3.09, |
|
"step": 26400 |
|
}, |
|
{ |
|
"epoch": 11.16, |
|
"grad_norm": 1.8860437870025635, |
|
"learning_rate": 3.1636042402826855e-05, |
|
"loss": 1.1186, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 11.16, |
|
"eval_cer": 0.4318832857533925, |
|
"eval_loss": 1.2920070886611938, |
|
"eval_runtime": 374.2965, |
|
"eval_samples_per_second": 25.322, |
|
"eval_steps_per_second": 3.166, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 11.2, |
|
"grad_norm": 2.1193289756774902, |
|
"learning_rate": 3.156537102473498e-05, |
|
"loss": 1.1587, |
|
"step": 26600 |
|
}, |
|
{ |
|
"epoch": 11.2, |
|
"eval_cer": 0.4244921004262641, |
|
"eval_loss": 2.2303755283355713, |
|
"eval_runtime": 390.1983, |
|
"eval_samples_per_second": 24.29, |
|
"eval_steps_per_second": 3.037, |
|
"step": 26600 |
|
}, |
|
{ |
|
"epoch": 11.24, |
|
"grad_norm": 13.682442665100098, |
|
"learning_rate": 3.149469964664311e-05, |
|
"loss": 1.1569, |
|
"step": 26700 |
|
}, |
|
{ |
|
"epoch": 11.24, |
|
"eval_cer": 0.4308762856360721, |
|
"eval_loss": 1.715321660041809, |
|
"eval_runtime": 368.2313, |
|
"eval_samples_per_second": 25.739, |
|
"eval_steps_per_second": 3.218, |
|
"step": 26700 |
|
}, |
|
{ |
|
"epoch": 11.28, |
|
"grad_norm": 1.7043545246124268, |
|
"learning_rate": 3.142402826855124e-05, |
|
"loss": 1.1348, |
|
"step": 26800 |
|
}, |
|
{ |
|
"epoch": 11.28, |
|
"eval_cer": 0.41776573071057055, |
|
"eval_loss": 2.2900288105010986, |
|
"eval_runtime": 394.8533, |
|
"eval_samples_per_second": 24.004, |
|
"eval_steps_per_second": 3.001, |
|
"step": 26800 |
|
}, |
|
{ |
|
"epoch": 11.32, |
|
"grad_norm": 1.8619517087936401, |
|
"learning_rate": 3.135335689045937e-05, |
|
"loss": 1.1366, |
|
"step": 26900 |
|
}, |
|
{ |
|
"epoch": 11.32, |
|
"eval_cer": 0.42347532360877554, |
|
"eval_loss": 1.863010287284851, |
|
"eval_runtime": 370.2336, |
|
"eval_samples_per_second": 25.6, |
|
"eval_steps_per_second": 3.201, |
|
"step": 26900 |
|
}, |
|
{ |
|
"epoch": 11.37, |
|
"grad_norm": 1.628806710243225, |
|
"learning_rate": 3.1282685512367496e-05, |
|
"loss": 1.1399, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 11.37, |
|
"eval_cer": 0.4168051699190489, |
|
"eval_loss": 1.782639741897583, |
|
"eval_runtime": 391.1594, |
|
"eval_samples_per_second": 24.231, |
|
"eval_steps_per_second": 3.029, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 11.41, |
|
"grad_norm": 3.158604621887207, |
|
"learning_rate": 3.121201413427562e-05, |
|
"loss": 1.1373, |
|
"step": 27100 |
|
}, |
|
{ |
|
"epoch": 11.41, |
|
"eval_cer": 0.4201585780767275, |
|
"eval_loss": 1.5199880599975586, |
|
"eval_runtime": 368.0376, |
|
"eval_samples_per_second": 25.753, |
|
"eval_steps_per_second": 3.22, |
|
"step": 27100 |
|
}, |
|
{ |
|
"epoch": 11.45, |
|
"grad_norm": 1.9599921703338623, |
|
"learning_rate": 3.1141342756183745e-05, |
|
"loss": 1.1358, |
|
"step": 27200 |
|
}, |
|
{ |
|
"epoch": 11.45, |
|
"eval_cer": 0.4173404442532556, |
|
"eval_loss": 1.598440170288086, |
|
"eval_runtime": 388.8994, |
|
"eval_samples_per_second": 24.371, |
|
"eval_steps_per_second": 3.047, |
|
"step": 27200 |
|
}, |
|
{ |
|
"epoch": 11.49, |
|
"grad_norm": 1.7773711681365967, |
|
"learning_rate": 3.107067137809187e-05, |
|
"loss": 1.1567, |
|
"step": 27300 |
|
}, |
|
{ |
|
"epoch": 11.49, |
|
"eval_cer": 0.4288060693754644, |
|
"eval_loss": 3.0100185871124268, |
|
"eval_runtime": 392.5508, |
|
"eval_samples_per_second": 24.145, |
|
"eval_steps_per_second": 3.019, |
|
"step": 27300 |
|
}, |
|
{ |
|
"epoch": 11.53, |
|
"grad_norm": 19.142953872680664, |
|
"learning_rate": 3.1e-05, |
|
"loss": 1.1284, |
|
"step": 27400 |
|
}, |
|
{ |
|
"epoch": 11.53, |
|
"eval_cer": 0.43251877126432287, |
|
"eval_loss": 2.1405186653137207, |
|
"eval_runtime": 413.4099, |
|
"eval_samples_per_second": 22.926, |
|
"eval_steps_per_second": 2.866, |
|
"step": 27400 |
|
}, |
|
{ |
|
"epoch": 11.58, |
|
"grad_norm": 9.353096961975098, |
|
"learning_rate": 3.092932862190813e-05, |
|
"loss": 1.3106, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 11.58, |
|
"eval_cer": 0.4250787024363537, |
|
"eval_loss": 2.7651588916778564, |
|
"eval_runtime": 372.5808, |
|
"eval_samples_per_second": 25.439, |
|
"eval_steps_per_second": 3.181, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 11.62, |
|
"grad_norm": 2.0867862701416016, |
|
"learning_rate": 3.085865724381626e-05, |
|
"loss": 1.1146, |
|
"step": 27600 |
|
}, |
|
{ |
|
"epoch": 11.62, |
|
"eval_cer": 0.41358374721364044, |
|
"eval_loss": 2.7960143089294434, |
|
"eval_runtime": 392.1126, |
|
"eval_samples_per_second": 24.172, |
|
"eval_steps_per_second": 3.022, |
|
"step": 27600 |
|
}, |
|
{ |
|
"epoch": 11.66, |
|
"grad_norm": 1.8952158689498901, |
|
"learning_rate": 3.0787985865724386e-05, |
|
"loss": 1.1186, |
|
"step": 27700 |
|
}, |
|
{ |
|
"epoch": 11.66, |
|
"eval_cer": 0.41495004106214073, |
|
"eval_loss": 2.6641368865966797, |
|
"eval_runtime": 367.6488, |
|
"eval_samples_per_second": 25.78, |
|
"eval_steps_per_second": 3.223, |
|
"step": 27700 |
|
}, |
|
{ |
|
"epoch": 11.7, |
|
"grad_norm": 6.817134380340576, |
|
"learning_rate": 3.071802120141343e-05, |
|
"loss": 1.581, |
|
"step": 27800 |
|
}, |
|
{ |
|
"epoch": 11.7, |
|
"eval_cer": 0.4181641312424231, |
|
"eval_loss": 2.996872901916504, |
|
"eval_runtime": 382.8066, |
|
"eval_samples_per_second": 24.759, |
|
"eval_steps_per_second": 3.096, |
|
"step": 27800 |
|
}, |
|
{ |
|
"epoch": 11.74, |
|
"grad_norm": 4.087586879730225, |
|
"learning_rate": 3.0647349823321555e-05, |
|
"loss": 1.1385, |
|
"step": 27900 |
|
}, |
|
{ |
|
"epoch": 11.74, |
|
"eval_cer": 0.42436255914903603, |
|
"eval_loss": 2.688666343688965, |
|
"eval_runtime": 375.5428, |
|
"eval_samples_per_second": 25.238, |
|
"eval_steps_per_second": 3.155, |
|
"step": 27900 |
|
}, |
|
{ |
|
"epoch": 11.79, |
|
"grad_norm": 2.216014862060547, |
|
"learning_rate": 3.0576678445229683e-05, |
|
"loss": 1.2095, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 11.79, |
|
"eval_cer": 0.42844188729420046, |
|
"eval_loss": 1.8659263849258423, |
|
"eval_runtime": 392.5017, |
|
"eval_samples_per_second": 24.148, |
|
"eval_steps_per_second": 3.019, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 11.83, |
|
"grad_norm": 6.676244258880615, |
|
"learning_rate": 3.0506007067137808e-05, |
|
"loss": 1.4826, |
|
"step": 28100 |
|
}, |
|
{ |
|
"epoch": 11.83, |
|
"eval_cer": 0.4170275898478745, |
|
"eval_loss": 1.5196857452392578, |
|
"eval_runtime": 370.7857, |
|
"eval_samples_per_second": 25.562, |
|
"eval_steps_per_second": 3.196, |
|
"step": 28100 |
|
}, |
|
{ |
|
"epoch": 11.87, |
|
"grad_norm": 5.378576755523682, |
|
"learning_rate": 3.0435335689045936e-05, |
|
"loss": 1.125, |
|
"step": 28200 |
|
}, |
|
{ |
|
"epoch": 11.87, |
|
"eval_cer": 0.4203663329553009, |
|
"eval_loss": 1.5670437812805176, |
|
"eval_runtime": 388.8978, |
|
"eval_samples_per_second": 24.371, |
|
"eval_steps_per_second": 3.047, |
|
"step": 28200 |
|
}, |
|
{ |
|
"epoch": 11.91, |
|
"grad_norm": 4.532820701599121, |
|
"learning_rate": 3.0364664310954068e-05, |
|
"loss": 1.1398, |
|
"step": 28300 |
|
}, |
|
{ |
|
"epoch": 11.91, |
|
"eval_cer": 0.42046654413202456, |
|
"eval_loss": 1.8523951768875122, |
|
"eval_runtime": 367.1427, |
|
"eval_samples_per_second": 25.816, |
|
"eval_steps_per_second": 3.228, |
|
"step": 28300 |
|
}, |
|
{ |
|
"epoch": 11.96, |
|
"grad_norm": 13.059289932250977, |
|
"learning_rate": 3.0293992932862192e-05, |
|
"loss": 1.1318, |
|
"step": 28400 |
|
}, |
|
{ |
|
"epoch": 11.96, |
|
"eval_cer": 0.4224341050408666, |
|
"eval_loss": 2.1082444190979004, |
|
"eval_runtime": 393.3473, |
|
"eval_samples_per_second": 24.096, |
|
"eval_steps_per_second": 3.013, |
|
"step": 28400 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"grad_norm": 5.903038501739502, |
|
"learning_rate": 3.0223321554770317e-05, |
|
"loss": 1.1131, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"eval_cer": 0.41807125259082556, |
|
"eval_loss": 1.7783021926879883, |
|
"eval_runtime": 365.8673, |
|
"eval_samples_per_second": 25.906, |
|
"eval_steps_per_second": 3.239, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 12.04, |
|
"grad_norm": 7.630153656005859, |
|
"learning_rate": 3.015265017667845e-05, |
|
"loss": 1.0524, |
|
"step": 28600 |
|
}, |
|
{ |
|
"epoch": 12.04, |
|
"eval_cer": 0.4113008877243753, |
|
"eval_loss": 1.9460973739624023, |
|
"eval_runtime": 389.3684, |
|
"eval_samples_per_second": 24.342, |
|
"eval_steps_per_second": 3.043, |
|
"step": 28600 |
|
}, |
|
{ |
|
"epoch": 12.08, |
|
"grad_norm": 16.8402099609375, |
|
"learning_rate": 3.0081978798586573e-05, |
|
"loss": 1.073, |
|
"step": 28700 |
|
}, |
|
{ |
|
"epoch": 12.08, |
|
"eval_cer": 0.4151773493410504, |
|
"eval_loss": 2.0268595218658447, |
|
"eval_runtime": 367.5014, |
|
"eval_samples_per_second": 25.79, |
|
"eval_steps_per_second": 3.224, |
|
"step": 28700 |
|
}, |
|
{ |
|
"epoch": 12.12, |
|
"grad_norm": 1.9278266429901123, |
|
"learning_rate": 3.0011307420494698e-05, |
|
"loss": 1.0857, |
|
"step": 28800 |
|
}, |
|
{ |
|
"epoch": 12.12, |
|
"eval_cer": 0.4125180868953111, |
|
"eval_loss": 1.7026644945144653, |
|
"eval_runtime": 390.3027, |
|
"eval_samples_per_second": 24.284, |
|
"eval_steps_per_second": 3.036, |
|
"step": 28800 |
|
}, |
|
{ |
|
"epoch": 12.17, |
|
"grad_norm": 1.4629472494125366, |
|
"learning_rate": 2.994063604240283e-05, |
|
"loss": 1.1243, |
|
"step": 28900 |
|
}, |
|
{ |
|
"epoch": 12.17, |
|
"eval_cer": 0.411283778499081, |
|
"eval_loss": 2.061549663543701, |
|
"eval_runtime": 372.4148, |
|
"eval_samples_per_second": 25.45, |
|
"eval_steps_per_second": 3.182, |
|
"step": 28900 |
|
}, |
|
{ |
|
"epoch": 12.21, |
|
"grad_norm": 15.632731437683105, |
|
"learning_rate": 2.9869964664310958e-05, |
|
"loss": 1.0708, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 12.21, |
|
"eval_cer": 0.41651920143913024, |
|
"eval_loss": 2.2883496284484863, |
|
"eval_runtime": 388.5509, |
|
"eval_samples_per_second": 24.393, |
|
"eval_steps_per_second": 3.05, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 12.25, |
|
"grad_norm": 6.1355180740356445, |
|
"learning_rate": 2.9799293286219082e-05, |
|
"loss": 1.0688, |
|
"step": 29100 |
|
}, |
|
{ |
|
"epoch": 12.25, |
|
"eval_cer": 0.4098588244495718, |
|
"eval_loss": 2.406083822250366, |
|
"eval_runtime": 374.5529, |
|
"eval_samples_per_second": 25.305, |
|
"eval_steps_per_second": 3.164, |
|
"step": 29100 |
|
}, |
|
{ |
|
"epoch": 12.29, |
|
"grad_norm": 26.448299407958984, |
|
"learning_rate": 2.9728621908127207e-05, |
|
"loss": 1.091, |
|
"step": 29200 |
|
}, |
|
{ |
|
"epoch": 12.29, |
|
"eval_cer": 0.41021811818075166, |
|
"eval_loss": 1.8954789638519287, |
|
"eval_runtime": 388.9635, |
|
"eval_samples_per_second": 24.367, |
|
"eval_steps_per_second": 3.047, |
|
"step": 29200 |
|
}, |
|
{ |
|
"epoch": 12.33, |
|
"grad_norm": 19.144874572753906, |
|
"learning_rate": 2.965795053003534e-05, |
|
"loss": 1.0624, |
|
"step": 29300 |
|
}, |
|
{ |
|
"epoch": 12.33, |
|
"eval_cer": 0.41046497985999764, |
|
"eval_loss": 1.6875534057617188, |
|
"eval_runtime": 368.9225, |
|
"eval_samples_per_second": 25.691, |
|
"eval_steps_per_second": 3.212, |
|
"step": 29300 |
|
}, |
|
{ |
|
"epoch": 12.38, |
|
"grad_norm": 2.1881866455078125, |
|
"learning_rate": 2.9587279151943463e-05, |
|
"loss": 1.0462, |
|
"step": 29400 |
|
}, |
|
{ |
|
"epoch": 12.38, |
|
"eval_cer": 0.40699180712525906, |
|
"eval_loss": 1.977940320968628, |
|
"eval_runtime": 389.1348, |
|
"eval_samples_per_second": 24.357, |
|
"eval_steps_per_second": 3.045, |
|
"step": 29400 |
|
}, |
|
{ |
|
"epoch": 12.42, |
|
"grad_norm": 2.38139009475708, |
|
"learning_rate": 2.951660777385159e-05, |
|
"loss": 1.0711, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 12.42, |
|
"eval_cer": 0.4067278362207188, |
|
"eval_loss": 1.981135606765747, |
|
"eval_runtime": 374.3024, |
|
"eval_samples_per_second": 25.322, |
|
"eval_steps_per_second": 3.166, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 12.46, |
|
"grad_norm": 13.329228401184082, |
|
"learning_rate": 2.944593639575972e-05, |
|
"loss": 1.041, |
|
"step": 29600 |
|
}, |
|
{ |
|
"epoch": 12.46, |
|
"eval_cer": 0.41142309647647723, |
|
"eval_loss": 2.5235061645507812, |
|
"eval_runtime": 396.1248, |
|
"eval_samples_per_second": 23.927, |
|
"eval_steps_per_second": 2.991, |
|
"step": 29600 |
|
}, |
|
{ |
|
"epoch": 12.5, |
|
"grad_norm": 3.899754524230957, |
|
"learning_rate": 2.9375265017667848e-05, |
|
"loss": 1.1686, |
|
"step": 29700 |
|
}, |
|
{ |
|
"epoch": 12.5, |
|
"eval_cer": 0.4094359821672989, |
|
"eval_loss": 1.6854169368743896, |
|
"eval_runtime": 371.974, |
|
"eval_samples_per_second": 25.48, |
|
"eval_steps_per_second": 3.186, |
|
"step": 29700 |
|
}, |
|
{ |
|
"epoch": 12.54, |
|
"grad_norm": 4.518542766571045, |
|
"learning_rate": 2.9304593639575972e-05, |
|
"loss": 1.0679, |
|
"step": 29800 |
|
}, |
|
{ |
|
"epoch": 12.54, |
|
"eval_cer": 0.4047798287122131, |
|
"eval_loss": 2.345759868621826, |
|
"eval_runtime": 387.3324, |
|
"eval_samples_per_second": 24.47, |
|
"eval_steps_per_second": 3.059, |
|
"step": 29800 |
|
}, |
|
{ |
|
"epoch": 12.59, |
|
"grad_norm": 3.159818410873413, |
|
"learning_rate": 2.9233922261484097e-05, |
|
"loss": 1.0724, |
|
"step": 29900 |
|
}, |
|
{ |
|
"epoch": 12.59, |
|
"eval_cer": 0.4022965468695006, |
|
"eval_loss": 2.1892457008361816, |
|
"eval_runtime": 368.124, |
|
"eval_samples_per_second": 25.747, |
|
"eval_steps_per_second": 3.219, |
|
"step": 29900 |
|
}, |
|
{ |
|
"epoch": 12.63, |
|
"grad_norm": 5.468438148498535, |
|
"learning_rate": 2.916395759717315e-05, |
|
"loss": 1.3109, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 12.63, |
|
"eval_cer": 0.4083043291228345, |
|
"eval_loss": 2.8631815910339355, |
|
"eval_runtime": 397.7294, |
|
"eval_samples_per_second": 23.83, |
|
"eval_steps_per_second": 2.979, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 12.67, |
|
"grad_norm": 3.4033985137939453, |
|
"learning_rate": 2.9093286219081274e-05, |
|
"loss": 1.0724, |
|
"step": 30100 |
|
}, |
|
{ |
|
"epoch": 12.67, |
|
"eval_cer": 0.4132855578585116, |
|
"eval_loss": 2.082000732421875, |
|
"eval_runtime": 370.8747, |
|
"eval_samples_per_second": 25.556, |
|
"eval_steps_per_second": 3.195, |
|
"step": 30100 |
|
}, |
|
{ |
|
"epoch": 12.71, |
|
"grad_norm": 8.163016319274902, |
|
"learning_rate": 2.9022614840989398e-05, |
|
"loss": 1.2078, |
|
"step": 30200 |
|
}, |
|
{ |
|
"epoch": 12.71, |
|
"eval_cer": 0.41148908920261235, |
|
"eval_loss": 2.2178566455841064, |
|
"eval_runtime": 392.2715, |
|
"eval_samples_per_second": 24.162, |
|
"eval_steps_per_second": 3.021, |
|
"step": 30200 |
|
}, |
|
{ |
|
"epoch": 12.76, |
|
"grad_norm": 7.318975448608398, |
|
"learning_rate": 2.895194346289753e-05, |
|
"loss": 1.0775, |
|
"step": 30300 |
|
}, |
|
{ |
|
"epoch": 12.76, |
|
"eval_cer": 0.4067913847718118, |
|
"eval_loss": 2.1497671604156494, |
|
"eval_runtime": 374.4081, |
|
"eval_samples_per_second": 25.315, |
|
"eval_steps_per_second": 3.165, |
|
"step": 30300 |
|
}, |
|
{ |
|
"epoch": 12.8, |
|
"grad_norm": 1.8343230485916138, |
|
"learning_rate": 2.8881272084805654e-05, |
|
"loss": 1.0572, |
|
"step": 30400 |
|
}, |
|
{ |
|
"epoch": 12.8, |
|
"eval_cer": 0.40438142818036055, |
|
"eval_loss": 2.247307062149048, |
|
"eval_runtime": 396.7902, |
|
"eval_samples_per_second": 23.887, |
|
"eval_steps_per_second": 2.986, |
|
"step": 30400 |
|
}, |
|
{ |
|
"epoch": 12.84, |
|
"grad_norm": 2.385056495666504, |
|
"learning_rate": 2.881060070671378e-05, |
|
"loss": 1.0668, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 12.84, |
|
"eval_cer": 0.40543242344843766, |
|
"eval_loss": 2.0315330028533936, |
|
"eval_runtime": 370.6236, |
|
"eval_samples_per_second": 25.573, |
|
"eval_steps_per_second": 3.197, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 12.88, |
|
"grad_norm": 3.699707508087158, |
|
"learning_rate": 2.873992932862191e-05, |
|
"loss": 1.2704, |
|
"step": 30600 |
|
}, |
|
{ |
|
"epoch": 12.88, |
|
"eval_cer": 0.40483360056313794, |
|
"eval_loss": 3.221102476119995, |
|
"eval_runtime": 396.883, |
|
"eval_samples_per_second": 23.881, |
|
"eval_steps_per_second": 2.986, |
|
"step": 30600 |
|
}, |
|
{ |
|
"epoch": 12.92, |
|
"grad_norm": 2.9071247577667236, |
|
"learning_rate": 2.866925795053004e-05, |
|
"loss": 2.4896, |
|
"step": 30700 |
|
}, |
|
{ |
|
"epoch": 12.92, |
|
"eval_cer": 0.4067840522466857, |
|
"eval_loss": 2.3889622688293457, |
|
"eval_runtime": 374.4971, |
|
"eval_samples_per_second": 25.309, |
|
"eval_steps_per_second": 3.164, |
|
"step": 30700 |
|
}, |
|
{ |
|
"epoch": 12.97, |
|
"grad_norm": 2.6603596210479736, |
|
"learning_rate": 2.8598586572438163e-05, |
|
"loss": 1.0564, |
|
"step": 30800 |
|
}, |
|
{ |
|
"epoch": 12.97, |
|
"eval_cer": 0.40476760783700283, |
|
"eval_loss": 2.143598794937134, |
|
"eval_runtime": 399.5314, |
|
"eval_samples_per_second": 23.723, |
|
"eval_steps_per_second": 2.966, |
|
"step": 30800 |
|
}, |
|
{ |
|
"epoch": 13.01, |
|
"grad_norm": 2.8563497066497803, |
|
"learning_rate": 2.8527915194346288e-05, |
|
"loss": 1.0411, |
|
"step": 30900 |
|
}, |
|
{ |
|
"epoch": 13.01, |
|
"eval_cer": 0.40590170505650935, |
|
"eval_loss": 2.6135616302490234, |
|
"eval_runtime": 370.2003, |
|
"eval_samples_per_second": 25.602, |
|
"eval_steps_per_second": 3.201, |
|
"step": 30900 |
|
}, |
|
{ |
|
"epoch": 13.05, |
|
"grad_norm": 3.738598585128784, |
|
"learning_rate": 2.845724381625442e-05, |
|
"loss": 0.9841, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 13.05, |
|
"eval_cer": 0.4061705643111337, |
|
"eval_loss": 2.7012014389038086, |
|
"eval_runtime": 394.4654, |
|
"eval_samples_per_second": 24.027, |
|
"eval_steps_per_second": 3.004, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 13.09, |
|
"grad_norm": 3.3957040309906006, |
|
"learning_rate": 2.8386572438162544e-05, |
|
"loss": 1.444, |
|
"step": 31100 |
|
}, |
|
{ |
|
"epoch": 13.09, |
|
"eval_cer": 0.3994588596456924, |
|
"eval_loss": 2.732797861099243, |
|
"eval_runtime": 371.9235, |
|
"eval_samples_per_second": 25.484, |
|
"eval_steps_per_second": 3.186, |
|
"step": 31100 |
|
}, |
|
{ |
|
"epoch": 13.13, |
|
"grad_norm": 2.130281448364258, |
|
"learning_rate": 2.8315901060070672e-05, |
|
"loss": 0.9865, |
|
"step": 31200 |
|
}, |
|
{ |
|
"epoch": 13.13, |
|
"eval_cer": 0.4065005279418091, |
|
"eval_loss": 2.812870502471924, |
|
"eval_runtime": 395.591, |
|
"eval_samples_per_second": 23.959, |
|
"eval_steps_per_second": 2.996, |
|
"step": 31200 |
|
}, |
|
{ |
|
"epoch": 13.18, |
|
"grad_norm": 3.105449676513672, |
|
"learning_rate": 2.82452296819788e-05, |
|
"loss": 0.9784, |
|
"step": 31300 |
|
}, |
|
{ |
|
"epoch": 13.18, |
|
"eval_cer": 0.3964916311446561, |
|
"eval_loss": 2.1949923038482666, |
|
"eval_runtime": 377.7569, |
|
"eval_samples_per_second": 25.09, |
|
"eval_steps_per_second": 3.137, |
|
"step": 31300 |
|
}, |
|
{ |
|
"epoch": 13.22, |
|
"grad_norm": 3.4888200759887695, |
|
"learning_rate": 2.817455830388693e-05, |
|
"loss": 0.9847, |
|
"step": 31400 |
|
}, |
|
{ |
|
"epoch": 13.22, |
|
"eval_cer": 0.39935864846896874, |
|
"eval_loss": 2.095374584197998, |
|
"eval_runtime": 395.2097, |
|
"eval_samples_per_second": 23.982, |
|
"eval_steps_per_second": 2.998, |
|
"step": 31400 |
|
}, |
|
{ |
|
"epoch": 13.26, |
|
"grad_norm": 1.688668966293335, |
|
"learning_rate": 2.8103886925795053e-05, |
|
"loss": 1.0167, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 13.26, |
|
"eval_cer": 0.40794747956669664, |
|
"eval_loss": 1.7657544612884521, |
|
"eval_runtime": 377.384, |
|
"eval_samples_per_second": 25.115, |
|
"eval_steps_per_second": 3.14, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 13.3, |
|
"grad_norm": 2.9201929569244385, |
|
"learning_rate": 2.8033215547703178e-05, |
|
"loss": 0.9921, |
|
"step": 31600 |
|
}, |
|
{ |
|
"epoch": 13.3, |
|
"eval_cer": 0.40619256188651204, |
|
"eval_loss": 1.6753425598144531, |
|
"eval_runtime": 390.1339, |
|
"eval_samples_per_second": 24.294, |
|
"eval_steps_per_second": 3.037, |
|
"step": 31600 |
|
}, |
|
{ |
|
"epoch": 13.34, |
|
"grad_norm": 3.985402822494507, |
|
"learning_rate": 2.796254416961131e-05, |
|
"loss": 0.9951, |
|
"step": 31700 |
|
}, |
|
{ |
|
"epoch": 13.34, |
|
"eval_cer": 0.4047969379375073, |
|
"eval_loss": 1.8533233404159546, |
|
"eval_runtime": 373.1578, |
|
"eval_samples_per_second": 25.399, |
|
"eval_steps_per_second": 3.176, |
|
"step": 31700 |
|
}, |
|
{ |
|
"epoch": 13.39, |
|
"grad_norm": 2.613786458969116, |
|
"learning_rate": 2.7891872791519434e-05, |
|
"loss": 1.002, |
|
"step": 31800 |
|
}, |
|
{ |
|
"epoch": 13.39, |
|
"eval_cer": 0.39678737632474287, |
|
"eval_loss": 2.017918109893799, |
|
"eval_runtime": 398.8604, |
|
"eval_samples_per_second": 23.763, |
|
"eval_steps_per_second": 2.971, |
|
"step": 31800 |
|
}, |
|
{ |
|
"epoch": 13.43, |
|
"grad_norm": 5.32037878036499, |
|
"learning_rate": 2.7821201413427562e-05, |
|
"loss": 1.0059, |
|
"step": 31900 |
|
}, |
|
{ |
|
"epoch": 13.43, |
|
"eval_cer": 0.40224277501857575, |
|
"eval_loss": 2.3004064559936523, |
|
"eval_runtime": 375.8974, |
|
"eval_samples_per_second": 25.214, |
|
"eval_steps_per_second": 3.152, |
|
"step": 31900 |
|
}, |
|
{ |
|
"epoch": 13.47, |
|
"grad_norm": 1.963639736175537, |
|
"learning_rate": 2.7750530035335694e-05, |
|
"loss": 0.9866, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 13.47, |
|
"eval_cer": 0.4031495639591725, |
|
"eval_loss": 2.7950069904327393, |
|
"eval_runtime": 392.4567, |
|
"eval_samples_per_second": 24.15, |
|
"eval_steps_per_second": 3.019, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 13.51, |
|
"grad_norm": 10.024706840515137, |
|
"learning_rate": 2.767985865724382e-05, |
|
"loss": 1.0195, |
|
"step": 32100 |
|
}, |
|
{ |
|
"epoch": 13.51, |
|
"eval_cer": 0.409443314692425, |
|
"eval_loss": 2.4231760501861572, |
|
"eval_runtime": 370.8807, |
|
"eval_samples_per_second": 25.555, |
|
"eval_steps_per_second": 3.195, |
|
"step": 32100 |
|
}, |
|
{ |
|
"epoch": 13.56, |
|
"grad_norm": 4.829713344573975, |
|
"learning_rate": 2.7609187279151943e-05, |
|
"loss": 1.0437, |
|
"step": 32200 |
|
}, |
|
{ |
|
"epoch": 13.56, |
|
"eval_cer": 0.4043276563294357, |
|
"eval_loss": 1.8479336500167847, |
|
"eval_runtime": 398.7479, |
|
"eval_samples_per_second": 23.769, |
|
"eval_steps_per_second": 2.972, |
|
"step": 32200 |
|
}, |
|
{ |
|
"epoch": 13.6, |
|
"grad_norm": 3.0680336952209473, |
|
"learning_rate": 2.7538515901060075e-05, |
|
"loss": 1.4222, |
|
"step": 32300 |
|
}, |
|
{ |
|
"epoch": 13.6, |
|
"eval_cer": 0.4040881271753158, |
|
"eval_loss": 2.828200101852417, |
|
"eval_runtime": 367.3731, |
|
"eval_samples_per_second": 25.799, |
|
"eval_steps_per_second": 3.226, |
|
"step": 32300 |
|
}, |
|
{ |
|
"epoch": 13.64, |
|
"grad_norm": 2.740971565246582, |
|
"learning_rate": 2.74678445229682e-05, |
|
"loss": 1.0443, |
|
"step": 32400 |
|
}, |
|
{ |
|
"epoch": 13.64, |
|
"eval_cer": 0.422089476359939, |
|
"eval_loss": 2.9092776775360107, |
|
"eval_runtime": 389.1697, |
|
"eval_samples_per_second": 24.354, |
|
"eval_steps_per_second": 3.045, |
|
"step": 32400 |
|
}, |
|
{ |
|
"epoch": 13.68, |
|
"grad_norm": 7.612214088439941, |
|
"learning_rate": 2.7397173144876324e-05, |
|
"loss": 1.0561, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 13.68, |
|
"eval_cer": 0.4040587970748113, |
|
"eval_loss": 1.9982529878616333, |
|
"eval_runtime": 371.4689, |
|
"eval_samples_per_second": 25.515, |
|
"eval_steps_per_second": 3.19, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 13.72, |
|
"grad_norm": 2.935277223587036, |
|
"learning_rate": 2.7326501766784452e-05, |
|
"loss": 1.0508, |
|
"step": 32600 |
|
}, |
|
{ |
|
"epoch": 13.72, |
|
"eval_cer": 0.40732665910601856, |
|
"eval_loss": 2.4883534908294678, |
|
"eval_runtime": 395.2638, |
|
"eval_samples_per_second": 23.979, |
|
"eval_steps_per_second": 2.998, |
|
"step": 32600 |
|
}, |
|
{ |
|
"epoch": 13.77, |
|
"grad_norm": 2.7527854442596436, |
|
"learning_rate": 2.7255830388692584e-05, |
|
"loss": 1.0314, |
|
"step": 32700 |
|
}, |
|
{ |
|
"epoch": 13.77, |
|
"eval_cer": 0.40216944976731456, |
|
"eval_loss": 2.4499011039733887, |
|
"eval_runtime": 369.8082, |
|
"eval_samples_per_second": 25.63, |
|
"eval_steps_per_second": 3.204, |
|
"step": 32700 |
|
}, |
|
{ |
|
"epoch": 13.81, |
|
"grad_norm": 1.3860234022140503, |
|
"learning_rate": 2.718515901060071e-05, |
|
"loss": 1.7756, |
|
"step": 32800 |
|
}, |
|
{ |
|
"epoch": 13.81, |
|
"eval_cer": 0.41578839310156035, |
|
"eval_loss": 2.565376043319702, |
|
"eval_runtime": 390.8183, |
|
"eval_samples_per_second": 24.252, |
|
"eval_steps_per_second": 3.032, |
|
"step": 32800 |
|
}, |
|
{ |
|
"epoch": 13.85, |
|
"grad_norm": 9.211983680725098, |
|
"learning_rate": 2.7114487632508833e-05, |
|
"loss": 1.0164, |
|
"step": 32900 |
|
}, |
|
{ |
|
"epoch": 13.85, |
|
"eval_cer": 0.39727621133315083, |
|
"eval_loss": 2.448713779449463, |
|
"eval_runtime": 372.957, |
|
"eval_samples_per_second": 25.413, |
|
"eval_steps_per_second": 3.177, |
|
"step": 32900 |
|
}, |
|
{ |
|
"epoch": 13.89, |
|
"grad_norm": 73.3850326538086, |
|
"learning_rate": 2.7043816254416965e-05, |
|
"loss": 1.0211, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 13.89, |
|
"eval_cer": 0.405021802041375, |
|
"eval_loss": 2.4291627407073975, |
|
"eval_runtime": 390.776, |
|
"eval_samples_per_second": 24.254, |
|
"eval_steps_per_second": 3.032, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 13.93, |
|
"grad_norm": 2.177152395248413, |
|
"learning_rate": 2.697314487632509e-05, |
|
"loss": 1.2605, |
|
"step": 33100 |
|
}, |
|
{ |
|
"epoch": 13.93, |
|
"eval_cer": 0.40312512220875213, |
|
"eval_loss": 2.450680732727051, |
|
"eval_runtime": 369.1148, |
|
"eval_samples_per_second": 25.678, |
|
"eval_steps_per_second": 3.21, |
|
"step": 33100 |
|
}, |
|
{ |
|
"epoch": 13.98, |
|
"grad_norm": 2.1010019779205322, |
|
"learning_rate": 2.6902473498233218e-05, |
|
"loss": 0.9974, |
|
"step": 33200 |
|
}, |
|
{ |
|
"epoch": 13.98, |
|
"eval_cer": 0.4032106683352235, |
|
"eval_loss": 2.416074514389038, |
|
"eval_runtime": 395.8532, |
|
"eval_samples_per_second": 23.943, |
|
"eval_steps_per_second": 2.994, |
|
"step": 33200 |
|
}, |
|
{ |
|
"epoch": 14.02, |
|
"grad_norm": 5.9587812423706055, |
|
"learning_rate": 2.6831802120141342e-05, |
|
"loss": 0.9724, |
|
"step": 33300 |
|
}, |
|
{ |
|
"epoch": 14.02, |
|
"eval_cer": 0.3974839662117242, |
|
"eval_loss": 2.979396343231201, |
|
"eval_runtime": 370.8376, |
|
"eval_samples_per_second": 25.558, |
|
"eval_steps_per_second": 3.195, |
|
"step": 33300 |
|
}, |
|
{ |
|
"epoch": 14.06, |
|
"grad_norm": 2.916482925415039, |
|
"learning_rate": 2.6761130742049474e-05, |
|
"loss": 0.9488, |
|
"step": 33400 |
|
}, |
|
{ |
|
"epoch": 14.06, |
|
"eval_cer": 0.39579504125767473, |
|
"eval_loss": 2.687380075454712, |
|
"eval_runtime": 390.5164, |
|
"eval_samples_per_second": 24.27, |
|
"eval_steps_per_second": 3.034, |
|
"step": 33400 |
|
}, |
|
{ |
|
"epoch": 14.1, |
|
"grad_norm": 5.404353618621826, |
|
"learning_rate": 2.66904593639576e-05, |
|
"loss": 0.9346, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 14.1, |
|
"eval_cer": 0.39129775918032145, |
|
"eval_loss": 2.3900837898254395, |
|
"eval_runtime": 372.0348, |
|
"eval_samples_per_second": 25.476, |
|
"eval_steps_per_second": 3.185, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 14.14, |
|
"grad_norm": 2.114933729171753, |
|
"learning_rate": 2.6619787985865723e-05, |
|
"loss": 0.9445, |
|
"step": 33600 |
|
}, |
|
{ |
|
"epoch": 14.14, |
|
"eval_cer": 0.3915470650346095, |
|
"eval_loss": 2.192039728164673, |
|
"eval_runtime": 401.3991, |
|
"eval_samples_per_second": 23.612, |
|
"eval_steps_per_second": 2.952, |
|
"step": 33600 |
|
}, |
|
{ |
|
"epoch": 14.19, |
|
"grad_norm": 22.767732620239258, |
|
"learning_rate": 2.6549116607773855e-05, |
|
"loss": 0.9435, |
|
"step": 33700 |
|
}, |
|
{ |
|
"epoch": 14.19, |
|
"eval_cer": 0.39616655586406474, |
|
"eval_loss": 2.0086452960968018, |
|
"eval_runtime": 374.9441, |
|
"eval_samples_per_second": 25.278, |
|
"eval_steps_per_second": 3.16, |
|
"step": 33700 |
|
}, |
|
{ |
|
"epoch": 14.23, |
|
"grad_norm": 2.4856455326080322, |
|
"learning_rate": 2.647844522968198e-05, |
|
"loss": 0.9387, |
|
"step": 33800 |
|
}, |
|
{ |
|
"epoch": 14.23, |
|
"eval_cer": 0.3982514371749247, |
|
"eval_loss": 1.5301440954208374, |
|
"eval_runtime": 391.5929, |
|
"eval_samples_per_second": 24.204, |
|
"eval_steps_per_second": 3.026, |
|
"step": 33800 |
|
}, |
|
{ |
|
"epoch": 14.27, |
|
"grad_norm": 21.76763916015625, |
|
"learning_rate": 2.6407773851590107e-05, |
|
"loss": 0.9334, |
|
"step": 33900 |
|
}, |
|
{ |
|
"epoch": 14.27, |
|
"eval_cer": 0.3909311329240155, |
|
"eval_loss": 2.4011144638061523, |
|
"eval_runtime": 373.6109, |
|
"eval_samples_per_second": 25.369, |
|
"eval_steps_per_second": 3.172, |
|
"step": 33900 |
|
}, |
|
{ |
|
"epoch": 14.31, |
|
"grad_norm": 6.195644855499268, |
|
"learning_rate": 2.6337102473498232e-05, |
|
"loss": 0.9263, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 14.31, |
|
"eval_cer": 0.39136375190645656, |
|
"eval_loss": 2.636327028274536, |
|
"eval_runtime": 397.3915, |
|
"eval_samples_per_second": 23.851, |
|
"eval_steps_per_second": 2.982, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 14.35, |
|
"grad_norm": 8.33462905883789, |
|
"learning_rate": 2.6266431095406364e-05, |
|
"loss": 1.2049, |
|
"step": 34100 |
|
}, |
|
{ |
|
"epoch": 14.35, |
|
"eval_cer": 0.38994857455711546, |
|
"eval_loss": 1.6413642168045044, |
|
"eval_runtime": 373.2342, |
|
"eval_samples_per_second": 25.394, |
|
"eval_steps_per_second": 3.175, |
|
"step": 34100 |
|
}, |
|
{ |
|
"epoch": 14.4, |
|
"grad_norm": 2.263068675994873, |
|
"learning_rate": 2.619575971731449e-05, |
|
"loss": 0.9314, |
|
"step": 34200 |
|
}, |
|
{ |
|
"epoch": 14.4, |
|
"eval_cer": 0.3896846036525752, |
|
"eval_loss": 1.7732062339782715, |
|
"eval_runtime": 392.0357, |
|
"eval_samples_per_second": 24.176, |
|
"eval_steps_per_second": 3.023, |
|
"step": 34200 |
|
}, |
|
{ |
|
"epoch": 14.44, |
|
"grad_norm": 3.9732091426849365, |
|
"learning_rate": 2.6125088339222613e-05, |
|
"loss": 0.9228, |
|
"step": 34300 |
|
}, |
|
{ |
|
"epoch": 14.44, |
|
"eval_cer": 0.3950935630206093, |
|
"eval_loss": 1.7944254875183105, |
|
"eval_runtime": 369.9816, |
|
"eval_samples_per_second": 25.617, |
|
"eval_steps_per_second": 3.203, |
|
"step": 34300 |
|
}, |
|
{ |
|
"epoch": 14.48, |
|
"grad_norm": 1.9228265285491943, |
|
"learning_rate": 2.6054416961130745e-05, |
|
"loss": 0.9234, |
|
"step": 34400 |
|
}, |
|
{ |
|
"epoch": 14.48, |
|
"eval_cer": 0.39062316686871845, |
|
"eval_loss": 1.7525721788406372, |
|
"eval_runtime": 386.1261, |
|
"eval_samples_per_second": 24.546, |
|
"eval_steps_per_second": 3.069, |
|
"step": 34400 |
|
}, |
|
{ |
|
"epoch": 14.52, |
|
"grad_norm": 10.801593780517578, |
|
"learning_rate": 2.598374558303887e-05, |
|
"loss": 0.9669, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 14.52, |
|
"eval_cer": 0.3913124242305737, |
|
"eval_loss": 1.5855803489685059, |
|
"eval_runtime": 376.3532, |
|
"eval_samples_per_second": 25.184, |
|
"eval_steps_per_second": 3.149, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 14.57, |
|
"grad_norm": 2.1704392433166504, |
|
"learning_rate": 2.5913074204946997e-05, |
|
"loss": 0.9261, |
|
"step": 34600 |
|
}, |
|
{ |
|
"epoch": 14.57, |
|
"eval_cer": 0.38692268585507017, |
|
"eval_loss": 1.6806480884552002, |
|
"eval_runtime": 393.3479, |
|
"eval_samples_per_second": 24.096, |
|
"eval_steps_per_second": 3.013, |
|
"step": 34600 |
|
}, |
|
{ |
|
"epoch": 14.61, |
|
"grad_norm": 4.049818515777588, |
|
"learning_rate": 2.5842402826855122e-05, |
|
"loss": 0.9017, |
|
"step": 34700 |
|
}, |
|
{ |
|
"epoch": 14.61, |
|
"eval_cer": 0.3871744358844003, |
|
"eval_loss": 1.6744087934494019, |
|
"eval_runtime": 365.5955, |
|
"eval_samples_per_second": 25.925, |
|
"eval_steps_per_second": 3.241, |
|
"step": 34700 |
|
}, |
|
{ |
|
"epoch": 14.65, |
|
"grad_norm": 3.3934333324432373, |
|
"learning_rate": 2.5771731448763254e-05, |
|
"loss": 0.9375, |
|
"step": 34800 |
|
}, |
|
{ |
|
"epoch": 14.65, |
|
"eval_cer": 0.3903005357631692, |
|
"eval_loss": 1.944000005722046, |
|
"eval_runtime": 404.5333, |
|
"eval_samples_per_second": 23.429, |
|
"eval_steps_per_second": 2.929, |
|
"step": 34800 |
|
}, |
|
{ |
|
"epoch": 14.69, |
|
"grad_norm": 2.0570108890533447, |
|
"learning_rate": 2.5701060070671378e-05, |
|
"loss": 0.9158, |
|
"step": 34900 |
|
}, |
|
{ |
|
"epoch": 14.69, |
|
"eval_cer": 0.3864998435727973, |
|
"eval_loss": 1.8147820234298706, |
|
"eval_runtime": 366.6552, |
|
"eval_samples_per_second": 25.85, |
|
"eval_steps_per_second": 3.232, |
|
"step": 34900 |
|
}, |
|
{ |
|
"epoch": 14.73, |
|
"grad_norm": 2.4123330116271973, |
|
"learning_rate": 2.5631095406360423e-05, |
|
"loss": 1.0998, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 14.73, |
|
"eval_cer": 0.3869789018810371, |
|
"eval_loss": 1.8494433164596558, |
|
"eval_runtime": 395.7827, |
|
"eval_samples_per_second": 23.947, |
|
"eval_steps_per_second": 2.994, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 14.78, |
|
"grad_norm": 2.7764666080474854, |
|
"learning_rate": 2.5560424028268555e-05, |
|
"loss": 0.9243, |
|
"step": 35100 |
|
}, |
|
{ |
|
"epoch": 14.78, |
|
"eval_cer": 0.38634097219506475, |
|
"eval_loss": 1.8553264141082764, |
|
"eval_runtime": 368.5207, |
|
"eval_samples_per_second": 25.719, |
|
"eval_steps_per_second": 3.216, |
|
"step": 35100 |
|
}, |
|
{ |
|
"epoch": 14.82, |
|
"grad_norm": 37.1422119140625, |
|
"learning_rate": 2.548975265017668e-05, |
|
"loss": 0.9169, |
|
"step": 35200 |
|
}, |
|
{ |
|
"epoch": 14.82, |
|
"eval_cer": 0.3885431739079426, |
|
"eval_loss": 2.9854750633239746, |
|
"eval_runtime": 393.2259, |
|
"eval_samples_per_second": 24.103, |
|
"eval_steps_per_second": 3.014, |
|
"step": 35200 |
|
}, |
|
{ |
|
"epoch": 14.86, |
|
"grad_norm": 2.0946056842803955, |
|
"learning_rate": 2.5419081272084804e-05, |
|
"loss": 0.9188, |
|
"step": 35300 |
|
}, |
|
{ |
|
"epoch": 14.86, |
|
"eval_cer": 0.38827187047827616, |
|
"eval_loss": 2.4621119499206543, |
|
"eval_runtime": 369.124, |
|
"eval_samples_per_second": 25.677, |
|
"eval_steps_per_second": 3.21, |
|
"step": 35300 |
|
}, |
|
{ |
|
"epoch": 14.9, |
|
"grad_norm": 2.0357742309570312, |
|
"learning_rate": 2.5348409893992936e-05, |
|
"loss": 0.9183, |
|
"step": 35400 |
|
}, |
|
{ |
|
"epoch": 14.9, |
|
"eval_cer": 0.38374036995033434, |
|
"eval_loss": 1.656967043876648, |
|
"eval_runtime": 397.837, |
|
"eval_samples_per_second": 23.824, |
|
"eval_steps_per_second": 2.979, |
|
"step": 35400 |
|
}, |
|
{ |
|
"epoch": 14.94, |
|
"grad_norm": 2.4356577396392822, |
|
"learning_rate": 2.527773851590106e-05, |
|
"loss": 0.9208, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 14.94, |
|
"eval_cer": 0.3846764889914356, |
|
"eval_loss": 1.6596330404281616, |
|
"eval_runtime": 368.5045, |
|
"eval_samples_per_second": 25.72, |
|
"eval_steps_per_second": 3.216, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 14.99, |
|
"grad_norm": 3.9734749794006348, |
|
"learning_rate": 2.520706713780919e-05, |
|
"loss": 0.9122, |
|
"step": 35600 |
|
}, |
|
{ |
|
"epoch": 14.99, |
|
"eval_cer": 0.3868713581791874, |
|
"eval_loss": 2.068138360977173, |
|
"eval_runtime": 401.8562, |
|
"eval_samples_per_second": 23.586, |
|
"eval_steps_per_second": 2.949, |
|
"step": 35600 |
|
}, |
|
{ |
|
"epoch": 15.03, |
|
"grad_norm": 1.6432957649230957, |
|
"learning_rate": 2.513639575971732e-05, |
|
"loss": 0.872, |
|
"step": 35700 |
|
}, |
|
{ |
|
"epoch": 15.03, |
|
"eval_cer": 0.3834226271948692, |
|
"eval_loss": 2.0865533351898193, |
|
"eval_runtime": 372.0301, |
|
"eval_samples_per_second": 25.476, |
|
"eval_steps_per_second": 3.185, |
|
"step": 35700 |
|
}, |
|
{ |
|
"epoch": 15.07, |
|
"grad_norm": 2.7166266441345215, |
|
"learning_rate": 2.5065724381625445e-05, |
|
"loss": 0.8238, |
|
"step": 35800 |
|
}, |
|
{ |
|
"epoch": 15.07, |
|
"eval_cer": 0.38250361737906224, |
|
"eval_loss": 2.33854079246521, |
|
"eval_runtime": 394.007, |
|
"eval_samples_per_second": 24.055, |
|
"eval_steps_per_second": 3.008, |
|
"step": 35800 |
|
}, |
|
{ |
|
"epoch": 15.11, |
|
"grad_norm": 5.8333282470703125, |
|
"learning_rate": 2.499505300353357e-05, |
|
"loss": 0.8495, |
|
"step": 35900 |
|
}, |
|
{ |
|
"epoch": 15.11, |
|
"eval_cer": 0.38729908881154435, |
|
"eval_loss": 1.496200680732727, |
|
"eval_runtime": 379.9915, |
|
"eval_samples_per_second": 24.943, |
|
"eval_steps_per_second": 3.118, |
|
"step": 35900 |
|
}, |
|
{ |
|
"epoch": 15.15, |
|
"grad_norm": 8.684925079345703, |
|
"learning_rate": 2.4924381625441698e-05, |
|
"loss": 0.8427, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 15.15, |
|
"eval_cer": 0.3852386492511048, |
|
"eval_loss": 2.0919713973999023, |
|
"eval_runtime": 387.8899, |
|
"eval_samples_per_second": 24.435, |
|
"eval_steps_per_second": 3.055, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 15.2, |
|
"grad_norm": 2.094461441040039, |
|
"learning_rate": 2.4853710247349822e-05, |
|
"loss": 0.8353, |
|
"step": 36100 |
|
}, |
|
{ |
|
"epoch": 15.2, |
|
"eval_cer": 0.38358149857260176, |
|
"eval_loss": 1.544800043106079, |
|
"eval_runtime": 374.0519, |
|
"eval_samples_per_second": 25.339, |
|
"eval_steps_per_second": 3.168, |
|
"step": 36100 |
|
}, |
|
{ |
|
"epoch": 15.24, |
|
"grad_norm": 7.441462516784668, |
|
"learning_rate": 2.478303886925795e-05, |
|
"loss": 0.8271, |
|
"step": 36200 |
|
}, |
|
{ |
|
"epoch": 15.24, |
|
"eval_cer": 0.3848451370693364, |
|
"eval_loss": 2.453676700592041, |
|
"eval_runtime": 392.8332, |
|
"eval_samples_per_second": 24.127, |
|
"eval_steps_per_second": 3.017, |
|
"step": 36200 |
|
}, |
|
{ |
|
"epoch": 15.28, |
|
"grad_norm": 1.4820858240127563, |
|
"learning_rate": 2.4712367491166082e-05, |
|
"loss": 0.8265, |
|
"step": 36300 |
|
}, |
|
{ |
|
"epoch": 15.28, |
|
"eval_cer": 0.3837550350005866, |
|
"eval_loss": 2.4253969192504883, |
|
"eval_runtime": 373.2352, |
|
"eval_samples_per_second": 25.394, |
|
"eval_steps_per_second": 3.175, |
|
"step": 36300 |
|
}, |
|
{ |
|
"epoch": 15.32, |
|
"grad_norm": 4.485095024108887, |
|
"learning_rate": 2.4641696113074207e-05, |
|
"loss": 0.8412, |
|
"step": 36400 |
|
}, |
|
{ |
|
"epoch": 15.32, |
|
"eval_cer": 0.3884747370067655, |
|
"eval_loss": 2.459603786468506, |
|
"eval_runtime": 390.2769, |
|
"eval_samples_per_second": 24.285, |
|
"eval_steps_per_second": 3.036, |
|
"step": 36400 |
|
}, |
|
{ |
|
"epoch": 15.37, |
|
"grad_norm": 7.8216471672058105, |
|
"learning_rate": 2.4571024734982335e-05, |
|
"loss": 0.8567, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 15.37, |
|
"eval_cer": 0.3837354816002503, |
|
"eval_loss": 3.254591226577759, |
|
"eval_runtime": 377.582, |
|
"eval_samples_per_second": 25.102, |
|
"eval_steps_per_second": 3.138, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 15.41, |
|
"grad_norm": 2.479659080505371, |
|
"learning_rate": 2.450035335689046e-05, |
|
"loss": 0.8539, |
|
"step": 36600 |
|
}, |
|
{ |
|
"epoch": 15.41, |
|
"eval_cer": 0.3853241953775762, |
|
"eval_loss": 1.766408085823059, |
|
"eval_runtime": 396.6796, |
|
"eval_samples_per_second": 23.893, |
|
"eval_steps_per_second": 2.987, |
|
"step": 36600 |
|
}, |
|
{ |
|
"epoch": 15.45, |
|
"grad_norm": 3.3775389194488525, |
|
"learning_rate": 2.4429681978798587e-05, |
|
"loss": 0.8428, |
|
"step": 36700 |
|
}, |
|
{ |
|
"epoch": 15.45, |
|
"eval_cer": 0.3814917289116577, |
|
"eval_loss": 1.7772597074508667, |
|
"eval_runtime": 373.2028, |
|
"eval_samples_per_second": 25.396, |
|
"eval_steps_per_second": 3.175, |
|
"step": 36700 |
|
}, |
|
{ |
|
"epoch": 15.49, |
|
"grad_norm": 1.7568877935409546, |
|
"learning_rate": 2.4359010600706712e-05, |
|
"loss": 0.8502, |
|
"step": 36800 |
|
}, |
|
{ |
|
"epoch": 15.49, |
|
"eval_cer": 0.3838430253021, |
|
"eval_loss": 2.477693796157837, |
|
"eval_runtime": 393.1117, |
|
"eval_samples_per_second": 24.11, |
|
"eval_steps_per_second": 3.014, |
|
"step": 36800 |
|
}, |
|
{ |
|
"epoch": 15.53, |
|
"grad_norm": 1.5188319683074951, |
|
"learning_rate": 2.4288339222614844e-05, |
|
"loss": 1.1122, |
|
"step": 36900 |
|
}, |
|
{ |
|
"epoch": 15.53, |
|
"eval_cer": 0.3783705173829729, |
|
"eval_loss": 2.0704104900360107, |
|
"eval_runtime": 376.2486, |
|
"eval_samples_per_second": 25.191, |
|
"eval_steps_per_second": 3.15, |
|
"step": 36900 |
|
}, |
|
{ |
|
"epoch": 15.58, |
|
"grad_norm": 5.990358352661133, |
|
"learning_rate": 2.4217667844522972e-05, |
|
"loss": 0.8376, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 15.58, |
|
"eval_cer": 0.38022809041492317, |
|
"eval_loss": 1.795945405960083, |
|
"eval_runtime": 393.6062, |
|
"eval_samples_per_second": 24.08, |
|
"eval_steps_per_second": 3.011, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 15.62, |
|
"grad_norm": 37.267982482910156, |
|
"learning_rate": 2.4146996466431096e-05, |
|
"loss": 0.8422, |
|
"step": 37100 |
|
}, |
|
{ |
|
"epoch": 15.62, |
|
"eval_cer": 0.38063137929685975, |
|
"eval_loss": 2.6164886951446533, |
|
"eval_runtime": 378.7126, |
|
"eval_samples_per_second": 25.027, |
|
"eval_steps_per_second": 3.129, |
|
"step": 37100 |
|
}, |
|
{ |
|
"epoch": 15.66, |
|
"grad_norm": 51.162017822265625, |
|
"learning_rate": 2.4076325088339225e-05, |
|
"loss": 0.8849, |
|
"step": 37200 |
|
}, |
|
{ |
|
"epoch": 15.66, |
|
"eval_cer": 0.38121553713190726, |
|
"eval_loss": 1.649156928062439, |
|
"eval_runtime": 393.043, |
|
"eval_samples_per_second": 24.114, |
|
"eval_steps_per_second": 3.015, |
|
"step": 37200 |
|
}, |
|
{ |
|
"epoch": 15.7, |
|
"grad_norm": 1.459133505821228, |
|
"learning_rate": 2.400565371024735e-05, |
|
"loss": 0.8247, |
|
"step": 37300 |
|
}, |
|
{ |
|
"epoch": 15.7, |
|
"eval_cer": 0.37826297368112316, |
|
"eval_loss": 1.726231575012207, |
|
"eval_runtime": 372.5489, |
|
"eval_samples_per_second": 25.441, |
|
"eval_steps_per_second": 3.181, |
|
"step": 37300 |
|
}, |
|
{ |
|
"epoch": 15.74, |
|
"grad_norm": 1.8703219890594482, |
|
"learning_rate": 2.3934982332155477e-05, |
|
"loss": 0.8085, |
|
"step": 37400 |
|
}, |
|
{ |
|
"epoch": 15.74, |
|
"eval_cer": 0.38297778733721793, |
|
"eval_loss": 2.4081192016601562, |
|
"eval_runtime": 395.9622, |
|
"eval_samples_per_second": 23.937, |
|
"eval_steps_per_second": 2.993, |
|
"step": 37400 |
|
}, |
|
{ |
|
"epoch": 15.79, |
|
"grad_norm": 1.6956226825714111, |
|
"learning_rate": 2.3864310954063605e-05, |
|
"loss": 0.8194, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 15.79, |
|
"eval_cer": 0.3802305345899652, |
|
"eval_loss": 2.873920202255249, |
|
"eval_runtime": 373.9204, |
|
"eval_samples_per_second": 25.348, |
|
"eval_steps_per_second": 3.169, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 15.83, |
|
"grad_norm": 10.501407623291016, |
|
"learning_rate": 2.3793639575971734e-05, |
|
"loss": 0.843, |
|
"step": 37600 |
|
}, |
|
{ |
|
"epoch": 15.83, |
|
"eval_cer": 0.37860271401196666, |
|
"eval_loss": 1.4808061122894287, |
|
"eval_runtime": 394.8535, |
|
"eval_samples_per_second": 24.004, |
|
"eval_steps_per_second": 3.001, |
|
"step": 37600 |
|
}, |
|
{ |
|
"epoch": 15.87, |
|
"grad_norm": 4.368974208831787, |
|
"learning_rate": 2.372296819787986e-05, |
|
"loss": 0.824, |
|
"step": 37700 |
|
}, |
|
{ |
|
"epoch": 15.87, |
|
"eval_cer": 0.3751050995268077, |
|
"eval_loss": 1.6123182773590088, |
|
"eval_runtime": 373.2585, |
|
"eval_samples_per_second": 25.393, |
|
"eval_steps_per_second": 3.175, |
|
"step": 37700 |
|
}, |
|
{ |
|
"epoch": 15.91, |
|
"grad_norm": 17.767349243164062, |
|
"learning_rate": 2.3652296819787986e-05, |
|
"loss": 0.8418, |
|
"step": 37800 |
|
}, |
|
{ |
|
"epoch": 15.91, |
|
"eval_cer": 0.37160748504164876, |
|
"eval_loss": 1.7542308568954468, |
|
"eval_runtime": 389.6677, |
|
"eval_samples_per_second": 24.323, |
|
"eval_steps_per_second": 3.041, |
|
"step": 37800 |
|
}, |
|
{ |
|
"epoch": 15.95, |
|
"grad_norm": 6.326321601867676, |
|
"learning_rate": 2.3581625441696114e-05, |
|
"loss": 0.8193, |
|
"step": 37900 |
|
}, |
|
{ |
|
"epoch": 15.95, |
|
"eval_cer": 0.3725704900082124, |
|
"eval_loss": 2.059025764465332, |
|
"eval_runtime": 377.283, |
|
"eval_samples_per_second": 25.122, |
|
"eval_steps_per_second": 3.141, |
|
"step": 37900 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"grad_norm": 2.2139954566955566, |
|
"learning_rate": 2.351095406360424e-05, |
|
"loss": 0.8378, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"eval_cer": 0.37425452661217784, |
|
"eval_loss": 1.9064280986785889, |
|
"eval_runtime": 411.9001, |
|
"eval_samples_per_second": 23.01, |
|
"eval_steps_per_second": 2.877, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 16.04, |
|
"grad_norm": 2.1273019313812256, |
|
"learning_rate": 2.3440282685512367e-05, |
|
"loss": 0.7478, |
|
"step": 38100 |
|
}, |
|
{ |
|
"epoch": 16.04, |
|
"eval_cer": 0.3710159946814751, |
|
"eval_loss": 1.7977242469787598, |
|
"eval_runtime": 380.1262, |
|
"eval_samples_per_second": 24.934, |
|
"eval_steps_per_second": 3.117, |
|
"step": 38100 |
|
}, |
|
{ |
|
"epoch": 16.08, |
|
"grad_norm": 3.238290786743164, |
|
"learning_rate": 2.3369611307420495e-05, |
|
"loss": 0.738, |
|
"step": 38200 |
|
}, |
|
{ |
|
"epoch": 16.08, |
|
"eval_cer": 0.37189589769660947, |
|
"eval_loss": 1.8234237432479858, |
|
"eval_runtime": 391.4385, |
|
"eval_samples_per_second": 24.213, |
|
"eval_steps_per_second": 3.027, |
|
"step": 38200 |
|
}, |
|
{ |
|
"epoch": 16.12, |
|
"grad_norm": 3.291322708129883, |
|
"learning_rate": 2.3298939929328624e-05, |
|
"loss": 0.7327, |
|
"step": 38300 |
|
}, |
|
{ |
|
"epoch": 16.12, |
|
"eval_cer": 0.3755817136600055, |
|
"eval_loss": 1.935049057006836, |
|
"eval_runtime": 376.3761, |
|
"eval_samples_per_second": 25.182, |
|
"eval_steps_per_second": 3.148, |
|
"step": 38300 |
|
}, |
|
{ |
|
"epoch": 16.17, |
|
"grad_norm": 3.3271946907043457, |
|
"learning_rate": 2.322826855123675e-05, |
|
"loss": 0.7428, |
|
"step": 38400 |
|
}, |
|
{ |
|
"epoch": 16.17, |
|
"eval_cer": 0.37296644636502285, |
|
"eval_loss": 1.7951716184616089, |
|
"eval_runtime": 401.591, |
|
"eval_samples_per_second": 23.601, |
|
"eval_steps_per_second": 2.951, |
|
"step": 38400 |
|
}, |
|
{ |
|
"epoch": 16.21, |
|
"grad_norm": 2.013002395629883, |
|
"learning_rate": 2.3157597173144876e-05, |
|
"loss": 0.7499, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 16.21, |
|
"eval_cer": 0.37256560165812835, |
|
"eval_loss": 1.7327371835708618, |
|
"eval_runtime": 380.9473, |
|
"eval_samples_per_second": 24.88, |
|
"eval_steps_per_second": 3.111, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 16.25, |
|
"grad_norm": 3.8447253704071045, |
|
"learning_rate": 2.3086925795053004e-05, |
|
"loss": 0.7266, |
|
"step": 38600 |
|
}, |
|
{ |
|
"epoch": 16.25, |
|
"eval_cer": 0.37181523992022214, |
|
"eval_loss": 2.117140293121338, |
|
"eval_runtime": 398.2953, |
|
"eval_samples_per_second": 23.796, |
|
"eval_steps_per_second": 2.975, |
|
"step": 38600 |
|
}, |
|
{ |
|
"epoch": 16.29, |
|
"grad_norm": 2.052280902862549, |
|
"learning_rate": 2.301625441696113e-05, |
|
"loss": 0.737, |
|
"step": 38700 |
|
}, |
|
{ |
|
"epoch": 16.29, |
|
"eval_cer": 0.37181768409526417, |
|
"eval_loss": 2.6678144931793213, |
|
"eval_runtime": 376.034, |
|
"eval_samples_per_second": 25.205, |
|
"eval_steps_per_second": 3.151, |
|
"step": 38700 |
|
}, |
|
{ |
|
"epoch": 16.33, |
|
"grad_norm": 1.934323787689209, |
|
"learning_rate": 2.2945583038869257e-05, |
|
"loss": 0.7233, |
|
"step": 38800 |
|
}, |
|
{ |
|
"epoch": 16.33, |
|
"eval_cer": 0.3704929412224786, |
|
"eval_loss": 2.179749011993408, |
|
"eval_runtime": 389.749, |
|
"eval_samples_per_second": 24.318, |
|
"eval_steps_per_second": 3.04, |
|
"step": 38800 |
|
}, |
|
{ |
|
"epoch": 16.38, |
|
"grad_norm": 2.3759796619415283, |
|
"learning_rate": 2.287491166077739e-05, |
|
"loss": 0.743, |
|
"step": 38900 |
|
}, |
|
{ |
|
"epoch": 16.38, |
|
"eval_cer": 0.3723774001798913, |
|
"eval_loss": 2.006964683532715, |
|
"eval_runtime": 381.2342, |
|
"eval_samples_per_second": 24.861, |
|
"eval_steps_per_second": 3.108, |
|
"step": 38900 |
|
}, |
|
{ |
|
"epoch": 16.42, |
|
"grad_norm": 1.9076517820358276, |
|
"learning_rate": 2.2804240282685513e-05, |
|
"loss": 0.7521, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 16.42, |
|
"eval_cer": 0.3758725704900082, |
|
"eval_loss": 1.847259759902954, |
|
"eval_runtime": 394.9054, |
|
"eval_samples_per_second": 24.001, |
|
"eval_steps_per_second": 3.001, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 16.46, |
|
"grad_norm": 7.669488906860352, |
|
"learning_rate": 2.273356890459364e-05, |
|
"loss": 0.7523, |
|
"step": 39100 |
|
}, |
|
{ |
|
"epoch": 16.46, |
|
"eval_cer": 0.3722576356028313, |
|
"eval_loss": 1.65703284740448, |
|
"eval_runtime": 378.1244, |
|
"eval_samples_per_second": 25.066, |
|
"eval_steps_per_second": 3.134, |
|
"step": 39100 |
|
}, |
|
{ |
|
"epoch": 16.5, |
|
"grad_norm": 2.213913679122925, |
|
"learning_rate": 2.2662897526501766e-05, |
|
"loss": 0.7423, |
|
"step": 39200 |
|
}, |
|
{ |
|
"epoch": 16.5, |
|
"eval_cer": 0.3694517226545696, |
|
"eval_loss": 1.7742842435836792, |
|
"eval_runtime": 396.973, |
|
"eval_samples_per_second": 23.876, |
|
"eval_steps_per_second": 2.985, |
|
"step": 39200 |
|
}, |
|
{ |
|
"epoch": 16.54, |
|
"grad_norm": 3.6008243560791016, |
|
"learning_rate": 2.2592226148409894e-05, |
|
"loss": 0.731, |
|
"step": 39300 |
|
}, |
|
{ |
|
"epoch": 16.54, |
|
"eval_cer": 0.36915842164952484, |
|
"eval_loss": 1.7430915832519531, |
|
"eval_runtime": 370.2202, |
|
"eval_samples_per_second": 25.601, |
|
"eval_steps_per_second": 3.201, |
|
"step": 39300 |
|
}, |
|
{ |
|
"epoch": 16.59, |
|
"grad_norm": 2.914562463760376, |
|
"learning_rate": 2.2521554770318022e-05, |
|
"loss": 0.7409, |
|
"step": 39400 |
|
}, |
|
{ |
|
"epoch": 16.59, |
|
"eval_cer": 0.36913153572406243, |
|
"eval_loss": 1.8427553176879883, |
|
"eval_runtime": 393.2137, |
|
"eval_samples_per_second": 24.104, |
|
"eval_steps_per_second": 3.014, |
|
"step": 39400 |
|
}, |
|
{ |
|
"epoch": 16.63, |
|
"grad_norm": 4.166165828704834, |
|
"learning_rate": 2.245088339222615e-05, |
|
"loss": 0.7502, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 16.63, |
|
"eval_cer": 0.366770462633452, |
|
"eval_loss": 1.7825747728347778, |
|
"eval_runtime": 387.4051, |
|
"eval_samples_per_second": 24.465, |
|
"eval_steps_per_second": 3.059, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 16.67, |
|
"grad_norm": 4.028792858123779, |
|
"learning_rate": 2.238021201413428e-05, |
|
"loss": 0.738, |
|
"step": 39600 |
|
}, |
|
{ |
|
"epoch": 16.67, |
|
"eval_cer": 0.36522085565679874, |
|
"eval_loss": 1.9882181882858276, |
|
"eval_runtime": 394.101, |
|
"eval_samples_per_second": 24.05, |
|
"eval_steps_per_second": 3.007, |
|
"step": 39600 |
|
}, |
|
{ |
|
"epoch": 16.71, |
|
"grad_norm": 2.6417016983032227, |
|
"learning_rate": 2.2309540636042403e-05, |
|
"loss": 0.7357, |
|
"step": 39700 |
|
}, |
|
{ |
|
"epoch": 16.71, |
|
"eval_cer": 0.3660787610965547, |
|
"eval_loss": 2.067323923110962, |
|
"eval_runtime": 386.7519, |
|
"eval_samples_per_second": 24.507, |
|
"eval_steps_per_second": 3.064, |
|
"step": 39700 |
|
}, |
|
{ |
|
"epoch": 16.75, |
|
"grad_norm": 3.9978723526000977, |
|
"learning_rate": 2.223886925795053e-05, |
|
"loss": 0.7286, |
|
"step": 39800 |
|
}, |
|
{ |
|
"epoch": 16.75, |
|
"eval_cer": 0.3679216690782527, |
|
"eval_loss": 1.6313321590423584, |
|
"eval_runtime": 398.1769, |
|
"eval_samples_per_second": 23.803, |
|
"eval_steps_per_second": 2.976, |
|
"step": 39800 |
|
}, |
|
{ |
|
"epoch": 16.8, |
|
"grad_norm": 1.9248440265655518, |
|
"learning_rate": 2.2168197879858656e-05, |
|
"loss": 0.7211, |
|
"step": 39900 |
|
}, |
|
{ |
|
"epoch": 16.8, |
|
"eval_cer": 0.365851452817645, |
|
"eval_loss": 1.687284231185913, |
|
"eval_runtime": 376.8233, |
|
"eval_samples_per_second": 25.152, |
|
"eval_steps_per_second": 3.145, |
|
"step": 39900 |
|
}, |
|
{ |
|
"epoch": 16.84, |
|
"grad_norm": 10.867044448852539, |
|
"learning_rate": 2.2097526501766784e-05, |
|
"loss": 0.7286, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 16.84, |
|
"eval_cer": 0.36576346251613157, |
|
"eval_loss": 2.3375942707061768, |
|
"eval_runtime": 391.3944, |
|
"eval_samples_per_second": 24.216, |
|
"eval_steps_per_second": 3.028, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 16.88, |
|
"grad_norm": 2.3577775955200195, |
|
"learning_rate": 2.2026855123674912e-05, |
|
"loss": 0.7599, |
|
"step": 40100 |
|
}, |
|
{ |
|
"epoch": 16.88, |
|
"eval_cer": 0.36309197919518205, |
|
"eval_loss": 1.8121538162231445, |
|
"eval_runtime": 378.3056, |
|
"eval_samples_per_second": 25.054, |
|
"eval_steps_per_second": 3.132, |
|
"step": 40100 |
|
}, |
|
{ |
|
"epoch": 16.92, |
|
"grad_norm": 3.6556479930877686, |
|
"learning_rate": 2.1956890459363957e-05, |
|
"loss": 1.0403, |
|
"step": 40200 |
|
}, |
|
{ |
|
"epoch": 16.92, |
|
"eval_cer": 0.3646293652966251, |
|
"eval_loss": 2.4574332237243652, |
|
"eval_runtime": 392.3067, |
|
"eval_samples_per_second": 24.16, |
|
"eval_steps_per_second": 3.021, |
|
"step": 40200 |
|
}, |
|
{ |
|
"epoch": 16.96, |
|
"grad_norm": 1.9411290884017944, |
|
"learning_rate": 2.1886219081272085e-05, |
|
"loss": 0.7538, |
|
"step": 40300 |
|
}, |
|
{ |
|
"epoch": 16.96, |
|
"eval_cer": 0.36607387274647063, |
|
"eval_loss": 1.7143244743347168, |
|
"eval_runtime": 378.8409, |
|
"eval_samples_per_second": 25.018, |
|
"eval_steps_per_second": 3.128, |
|
"step": 40300 |
|
}, |
|
{ |
|
"epoch": 17.01, |
|
"grad_norm": 7.614956378936768, |
|
"learning_rate": 2.1815547703180214e-05, |
|
"loss": 0.7241, |
|
"step": 40400 |
|
}, |
|
{ |
|
"epoch": 17.01, |
|
"eval_cer": 0.3618430057486997, |
|
"eval_loss": 1.6417515277862549, |
|
"eval_runtime": 391.0073, |
|
"eval_samples_per_second": 24.24, |
|
"eval_steps_per_second": 3.031, |
|
"step": 40400 |
|
}, |
|
{ |
|
"epoch": 17.05, |
|
"grad_norm": 1.9776512384414673, |
|
"learning_rate": 2.1744876325088338e-05, |
|
"loss": 0.6509, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 17.05, |
|
"eval_cer": 0.3647295764733487, |
|
"eval_loss": 2.075551986694336, |
|
"eval_runtime": 398.6714, |
|
"eval_samples_per_second": 23.774, |
|
"eval_steps_per_second": 2.972, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 17.09, |
|
"grad_norm": 3.229382276535034, |
|
"learning_rate": 2.167420494699647e-05, |
|
"loss": 0.6699, |
|
"step": 40600 |
|
}, |
|
{ |
|
"epoch": 17.09, |
|
"eval_cer": 0.36611786789722733, |
|
"eval_loss": 1.8951997756958008, |
|
"eval_runtime": 377.7399, |
|
"eval_samples_per_second": 25.091, |
|
"eval_steps_per_second": 3.137, |
|
"step": 40600 |
|
}, |
|
{ |
|
"epoch": 17.13, |
|
"grad_norm": 7.590035915374756, |
|
"learning_rate": 2.1603533568904594e-05, |
|
"loss": 0.6376, |
|
"step": 40700 |
|
}, |
|
{ |
|
"epoch": 17.13, |
|
"eval_cer": 0.36229762230651913, |
|
"eval_loss": 2.3001914024353027, |
|
"eval_runtime": 393.9708, |
|
"eval_samples_per_second": 24.058, |
|
"eval_steps_per_second": 3.008, |
|
"step": 40700 |
|
}, |
|
{ |
|
"epoch": 17.18, |
|
"grad_norm": 3.8723325729370117, |
|
"learning_rate": 2.1532862190812723e-05, |
|
"loss": 0.6424, |
|
"step": 40800 |
|
}, |
|
{ |
|
"epoch": 17.18, |
|
"eval_cer": 0.3627106878886238, |
|
"eval_loss": 1.9817591905593872, |
|
"eval_runtime": 380.3308, |
|
"eval_samples_per_second": 24.92, |
|
"eval_steps_per_second": 3.116, |
|
"step": 40800 |
|
}, |
|
{ |
|
"epoch": 17.22, |
|
"grad_norm": 7.374648094177246, |
|
"learning_rate": 2.146219081272085e-05, |
|
"loss": 0.6245, |
|
"step": 40900 |
|
}, |
|
{ |
|
"epoch": 17.22, |
|
"eval_cer": 0.36244916115912557, |
|
"eval_loss": 2.089902400970459, |
|
"eval_runtime": 399.0568, |
|
"eval_samples_per_second": 23.751, |
|
"eval_steps_per_second": 2.97, |
|
"step": 40900 |
|
}, |
|
{ |
|
"epoch": 17.26, |
|
"grad_norm": 27.956933975219727, |
|
"learning_rate": 2.1391519434628975e-05, |
|
"loss": 0.6492, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 17.26, |
|
"eval_cer": 0.36416741621367954, |
|
"eval_loss": 2.015270948410034, |
|
"eval_runtime": 376.6268, |
|
"eval_samples_per_second": 25.165, |
|
"eval_steps_per_second": 3.146, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 17.3, |
|
"grad_norm": 5.671442031860352, |
|
"learning_rate": 2.1320848056537103e-05, |
|
"loss": 0.6544, |
|
"step": 41100 |
|
}, |
|
{ |
|
"epoch": 17.3, |
|
"eval_cer": 0.3657659066911736, |
|
"eval_loss": 1.7072508335113525, |
|
"eval_runtime": 397.2129, |
|
"eval_samples_per_second": 23.861, |
|
"eval_steps_per_second": 2.983, |
|
"step": 41100 |
|
}, |
|
{ |
|
"epoch": 17.34, |
|
"grad_norm": 1.6116443872451782, |
|
"learning_rate": 2.125017667844523e-05, |
|
"loss": 0.6304, |
|
"step": 41200 |
|
}, |
|
{ |
|
"epoch": 17.34, |
|
"eval_cer": 0.36735950881858354, |
|
"eval_loss": 2.110800266265869, |
|
"eval_runtime": 379.2769, |
|
"eval_samples_per_second": 24.99, |
|
"eval_steps_per_second": 3.124, |
|
"step": 41200 |
|
}, |
|
{ |
|
"epoch": 17.39, |
|
"grad_norm": 2.9379818439483643, |
|
"learning_rate": 2.117950530035336e-05, |
|
"loss": 0.6426, |
|
"step": 41300 |
|
}, |
|
{ |
|
"epoch": 17.39, |
|
"eval_cer": 0.3644313871182199, |
|
"eval_loss": 2.184722900390625, |
|
"eval_runtime": 397.6084, |
|
"eval_samples_per_second": 23.838, |
|
"eval_steps_per_second": 2.98, |
|
"step": 41300 |
|
}, |
|
{ |
|
"epoch": 17.43, |
|
"grad_norm": 2.8842365741729736, |
|
"learning_rate": 2.1108833922261484e-05, |
|
"loss": 0.6913, |
|
"step": 41400 |
|
}, |
|
{ |
|
"epoch": 17.43, |
|
"eval_cer": 0.36425785069023503, |
|
"eval_loss": 1.9356818199157715, |
|
"eval_runtime": 381.9605, |
|
"eval_samples_per_second": 24.814, |
|
"eval_steps_per_second": 3.102, |
|
"step": 41400 |
|
}, |
|
{ |
|
"epoch": 17.47, |
|
"grad_norm": 2.586341142654419, |
|
"learning_rate": 2.1038162544169613e-05, |
|
"loss": 0.6534, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 17.47, |
|
"eval_cer": 0.36276690391459077, |
|
"eval_loss": 1.7679296731948853, |
|
"eval_runtime": 404.4195, |
|
"eval_samples_per_second": 23.436, |
|
"eval_steps_per_second": 2.93, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 17.51, |
|
"grad_norm": 12.988226890563965, |
|
"learning_rate": 2.096749116607774e-05, |
|
"loss": 0.655, |
|
"step": 41600 |
|
}, |
|
{ |
|
"epoch": 17.51, |
|
"eval_cer": 0.3648982245512495, |
|
"eval_loss": 1.7326639890670776, |
|
"eval_runtime": 382.6806, |
|
"eval_samples_per_second": 24.767, |
|
"eval_steps_per_second": 3.097, |
|
"step": 41600 |
|
}, |
|
{ |
|
"epoch": 17.55, |
|
"grad_norm": 3.8716864585876465, |
|
"learning_rate": 2.0896819787985865e-05, |
|
"loss": 0.6464, |
|
"step": 41700 |
|
}, |
|
{ |
|
"epoch": 17.55, |
|
"eval_cer": 0.36285245004106215, |
|
"eval_loss": 2.095261573791504, |
|
"eval_runtime": 400.7202, |
|
"eval_samples_per_second": 23.652, |
|
"eval_steps_per_second": 2.957, |
|
"step": 41700 |
|
}, |
|
{ |
|
"epoch": 17.6, |
|
"grad_norm": 1.966536521911621, |
|
"learning_rate": 2.0826148409893993e-05, |
|
"loss": 0.6675, |
|
"step": 41800 |
|
}, |
|
{ |
|
"epoch": 17.6, |
|
"eval_cer": 0.3588268937468226, |
|
"eval_loss": 1.8380521535873413, |
|
"eval_runtime": 382.7754, |
|
"eval_samples_per_second": 24.761, |
|
"eval_steps_per_second": 3.096, |
|
"step": 41800 |
|
}, |
|
{ |
|
"epoch": 17.64, |
|
"grad_norm": 2.0896823406219482, |
|
"learning_rate": 2.075547703180212e-05, |
|
"loss": 0.6531, |
|
"step": 41900 |
|
}, |
|
{ |
|
"epoch": 17.64, |
|
"eval_cer": 0.3638618943334246, |
|
"eval_loss": 1.911123514175415, |
|
"eval_runtime": 401.57, |
|
"eval_samples_per_second": 23.602, |
|
"eval_steps_per_second": 2.951, |
|
"step": 41900 |
|
}, |
|
{ |
|
"epoch": 17.68, |
|
"grad_norm": 2.2857401371002197, |
|
"learning_rate": 2.068480565371025e-05, |
|
"loss": 0.6536, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 17.68, |
|
"eval_cer": 0.3620947557780298, |
|
"eval_loss": 1.524117112159729, |
|
"eval_runtime": 379.6794, |
|
"eval_samples_per_second": 24.963, |
|
"eval_steps_per_second": 3.121, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 17.72, |
|
"grad_norm": 3.275186777114868, |
|
"learning_rate": 2.0614134275618374e-05, |
|
"loss": 0.641, |
|
"step": 42100 |
|
}, |
|
{ |
|
"epoch": 17.72, |
|
"eval_cer": 0.36079689883070665, |
|
"eval_loss": 1.8855453729629517, |
|
"eval_runtime": 405.5391, |
|
"eval_samples_per_second": 23.371, |
|
"eval_steps_per_second": 2.922, |
|
"step": 42100 |
|
}, |
|
{ |
|
"epoch": 17.76, |
|
"grad_norm": 3.293827533721924, |
|
"learning_rate": 2.054416961130742e-05, |
|
"loss": 0.755, |
|
"step": 42200 |
|
}, |
|
{ |
|
"epoch": 17.76, |
|
"eval_cer": 0.3628133432403895, |
|
"eval_loss": 2.0661118030548096, |
|
"eval_runtime": 378.6947, |
|
"eval_samples_per_second": 25.028, |
|
"eval_steps_per_second": 3.129, |
|
"step": 42200 |
|
}, |
|
{ |
|
"epoch": 17.81, |
|
"grad_norm": 2.2286911010742188, |
|
"learning_rate": 2.0473498233215547e-05, |
|
"loss": 0.6528, |
|
"step": 42300 |
|
}, |
|
{ |
|
"epoch": 17.81, |
|
"eval_cer": 0.3575412576747096, |
|
"eval_loss": 1.9768012762069702, |
|
"eval_runtime": 400.1546, |
|
"eval_samples_per_second": 23.686, |
|
"eval_steps_per_second": 2.961, |
|
"step": 42300 |
|
}, |
|
{ |
|
"epoch": 17.85, |
|
"grad_norm": 1.9678279161453247, |
|
"learning_rate": 2.0402826855123676e-05, |
|
"loss": 0.641, |
|
"step": 42400 |
|
}, |
|
{ |
|
"epoch": 17.85, |
|
"eval_cer": 0.3585751437174925, |
|
"eval_loss": 2.0739636421203613, |
|
"eval_runtime": 379.1375, |
|
"eval_samples_per_second": 24.999, |
|
"eval_steps_per_second": 3.126, |
|
"step": 42400 |
|
}, |
|
{ |
|
"epoch": 17.89, |
|
"grad_norm": 4.927220344543457, |
|
"learning_rate": 2.0332155477031804e-05, |
|
"loss": 0.6499, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 17.89, |
|
"eval_cer": 0.3600440929177584, |
|
"eval_loss": 2.656733512878418, |
|
"eval_runtime": 405.5316, |
|
"eval_samples_per_second": 23.372, |
|
"eval_steps_per_second": 2.922, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 17.93, |
|
"grad_norm": 1.9719730615615845, |
|
"learning_rate": 2.0261484098939932e-05, |
|
"loss": 0.6453, |
|
"step": 42600 |
|
}, |
|
{ |
|
"epoch": 17.93, |
|
"eval_cer": 0.3558694419459544, |
|
"eval_loss": 3.2607502937316895, |
|
"eval_runtime": 381.4619, |
|
"eval_samples_per_second": 24.847, |
|
"eval_steps_per_second": 3.106, |
|
"step": 42600 |
|
}, |
|
{ |
|
"epoch": 17.98, |
|
"grad_norm": 4.013895034790039, |
|
"learning_rate": 2.0190812720848056e-05, |
|
"loss": 0.6373, |
|
"step": 42700 |
|
}, |
|
{ |
|
"epoch": 17.98, |
|
"eval_cer": 0.3598925540651519, |
|
"eval_loss": 3.0558464527130127, |
|
"eval_runtime": 405.1328, |
|
"eval_samples_per_second": 23.395, |
|
"eval_steps_per_second": 2.925, |
|
"step": 42700 |
|
}, |
|
{ |
|
"epoch": 18.02, |
|
"grad_norm": 5.639492988586426, |
|
"learning_rate": 2.0120141342756185e-05, |
|
"loss": 0.6082, |
|
"step": 42800 |
|
}, |
|
{ |
|
"epoch": 18.02, |
|
"eval_cer": 0.3542000703922412, |
|
"eval_loss": 2.6185314655303955, |
|
"eval_runtime": 382.1723, |
|
"eval_samples_per_second": 24.8, |
|
"eval_steps_per_second": 3.101, |
|
"step": 42800 |
|
}, |
|
{ |
|
"epoch": 18.06, |
|
"grad_norm": 4.65023946762085, |
|
"learning_rate": 2.004946996466431e-05, |
|
"loss": 0.5527, |
|
"step": 42900 |
|
}, |
|
{ |
|
"epoch": 18.06, |
|
"eval_cer": 0.35880245199640215, |
|
"eval_loss": 2.8290772438049316, |
|
"eval_runtime": 400.9741, |
|
"eval_samples_per_second": 23.637, |
|
"eval_steps_per_second": 2.955, |
|
"step": 42900 |
|
}, |
|
{ |
|
"epoch": 18.1, |
|
"grad_norm": 1.5041050910949707, |
|
"learning_rate": 1.997879858657244e-05, |
|
"loss": 0.5502, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 18.1, |
|
"eval_cer": 0.3574239372726917, |
|
"eval_loss": 2.1965556144714355, |
|
"eval_runtime": 380.8445, |
|
"eval_samples_per_second": 24.887, |
|
"eval_steps_per_second": 3.112, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 18.14, |
|
"grad_norm": 2.088670015335083, |
|
"learning_rate": 1.990812720848057e-05, |
|
"loss": 0.5513, |
|
"step": 43100 |
|
}, |
|
{ |
|
"epoch": 18.14, |
|
"eval_cer": 0.35743126979781786, |
|
"eval_loss": 3.069037914276123, |
|
"eval_runtime": 401.5683, |
|
"eval_samples_per_second": 23.602, |
|
"eval_steps_per_second": 2.951, |
|
"step": 43100 |
|
}, |
|
{ |
|
"epoch": 18.19, |
|
"grad_norm": 13.482215881347656, |
|
"learning_rate": 1.9837455830388694e-05, |
|
"loss": 0.5517, |
|
"step": 43200 |
|
}, |
|
{ |
|
"epoch": 18.19, |
|
"eval_cer": 0.35501398068124046, |
|
"eval_loss": 3.0275192260742188, |
|
"eval_runtime": 382.7019, |
|
"eval_samples_per_second": 24.766, |
|
"eval_steps_per_second": 3.096, |
|
"step": 43200 |
|
}, |
|
{ |
|
"epoch": 18.23, |
|
"grad_norm": 2.318678140640259, |
|
"learning_rate": 1.976749116607774e-05, |
|
"loss": 0.7104, |
|
"step": 43300 |
|
}, |
|
{ |
|
"epoch": 18.23, |
|
"eval_cer": 0.35606253177427555, |
|
"eval_loss": 3.2951183319091797, |
|
"eval_runtime": 399.8798, |
|
"eval_samples_per_second": 23.702, |
|
"eval_steps_per_second": 2.963, |
|
"step": 43300 |
|
}, |
|
{ |
|
"epoch": 18.27, |
|
"grad_norm": 2.007765769958496, |
|
"learning_rate": 1.9696819787985867e-05, |
|
"loss": 0.551, |
|
"step": 43400 |
|
}, |
|
{ |
|
"epoch": 18.27, |
|
"eval_cer": 0.35518996128426733, |
|
"eval_loss": 3.7015881538391113, |
|
"eval_runtime": 380.644, |
|
"eval_samples_per_second": 24.9, |
|
"eval_steps_per_second": 3.113, |
|
"step": 43400 |
|
}, |
|
{ |
|
"epoch": 18.31, |
|
"grad_norm": 1.9974658489227295, |
|
"learning_rate": 1.9626148409893995e-05, |
|
"loss": 0.5745, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 18.31, |
|
"eval_cer": 0.35752903679949943, |
|
"eval_loss": 3.192854404449463, |
|
"eval_runtime": 397.0976, |
|
"eval_samples_per_second": 23.868, |
|
"eval_steps_per_second": 2.984, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 18.35, |
|
"grad_norm": 1.8913908004760742, |
|
"learning_rate": 1.9555477031802123e-05, |
|
"loss": 0.5569, |
|
"step": 43600 |
|
}, |
|
{ |
|
"epoch": 18.35, |
|
"eval_cer": 0.35657092018301983, |
|
"eval_loss": 3.314192771911621, |
|
"eval_runtime": 382.2019, |
|
"eval_samples_per_second": 24.798, |
|
"eval_steps_per_second": 3.1, |
|
"step": 43600 |
|
}, |
|
{ |
|
"epoch": 18.4, |
|
"grad_norm": 5.3773956298828125, |
|
"learning_rate": 1.9484805653710248e-05, |
|
"loss": 0.5594, |
|
"step": 43700 |
|
}, |
|
{ |
|
"epoch": 18.4, |
|
"eval_cer": 0.3562653983027648, |
|
"eval_loss": 2.0783262252807617, |
|
"eval_runtime": 404.1105, |
|
"eval_samples_per_second": 23.454, |
|
"eval_steps_per_second": 2.932, |
|
"step": 43700 |
|
}, |
|
{ |
|
"epoch": 18.44, |
|
"grad_norm": 1.7726545333862305, |
|
"learning_rate": 1.9414134275618376e-05, |
|
"loss": 0.5482, |
|
"step": 43800 |
|
}, |
|
{ |
|
"epoch": 18.44, |
|
"eval_cer": 0.3563827187047828, |
|
"eval_loss": 2.1228716373443604, |
|
"eval_runtime": 379.1678, |
|
"eval_samples_per_second": 24.997, |
|
"eval_steps_per_second": 3.125, |
|
"step": 43800 |
|
}, |
|
{ |
|
"epoch": 18.48, |
|
"grad_norm": 8.040356636047363, |
|
"learning_rate": 1.93434628975265e-05, |
|
"loss": 0.5427, |
|
"step": 43900 |
|
}, |
|
{ |
|
"epoch": 18.48, |
|
"eval_cer": 0.3585995854679129, |
|
"eval_loss": 2.0755696296691895, |
|
"eval_runtime": 399.7267, |
|
"eval_samples_per_second": 23.711, |
|
"eval_steps_per_second": 2.965, |
|
"step": 43900 |
|
}, |
|
{ |
|
"epoch": 18.52, |
|
"grad_norm": 7.321507930755615, |
|
"learning_rate": 1.927279151943463e-05, |
|
"loss": 0.5651, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 18.52, |
|
"eval_cer": 0.35462046849947204, |
|
"eval_loss": 2.0843379497528076, |
|
"eval_runtime": 380.2237, |
|
"eval_samples_per_second": 24.927, |
|
"eval_steps_per_second": 3.117, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 18.56, |
|
"grad_norm": 2.2667276859283447, |
|
"learning_rate": 1.9202120141342757e-05, |
|
"loss": 0.5645, |
|
"step": 44100 |
|
}, |
|
{ |
|
"epoch": 18.56, |
|
"eval_cer": 0.35216651675726407, |
|
"eval_loss": 1.9949140548706055, |
|
"eval_runtime": 407.2675, |
|
"eval_samples_per_second": 23.272, |
|
"eval_steps_per_second": 2.91, |
|
"step": 44100 |
|
}, |
|
{ |
|
"epoch": 18.61, |
|
"grad_norm": 2.5292446613311768, |
|
"learning_rate": 1.9131448763250885e-05, |
|
"loss": 0.5488, |
|
"step": 44200 |
|
}, |
|
{ |
|
"epoch": 18.61, |
|
"eval_cer": 0.35419029369207305, |
|
"eval_loss": 1.832000732421875, |
|
"eval_runtime": 383.3497, |
|
"eval_samples_per_second": 24.724, |
|
"eval_steps_per_second": 3.091, |
|
"step": 44200 |
|
}, |
|
{ |
|
"epoch": 18.65, |
|
"grad_norm": 4.608269691467285, |
|
"learning_rate": 1.9060777385159013e-05, |
|
"loss": 0.5694, |
|
"step": 44300 |
|
}, |
|
{ |
|
"epoch": 18.65, |
|
"eval_cer": 0.3574825974737007, |
|
"eval_loss": 2.2215840816497803, |
|
"eval_runtime": 404.7052, |
|
"eval_samples_per_second": 23.42, |
|
"eval_steps_per_second": 2.928, |
|
"step": 44300 |
|
}, |
|
{ |
|
"epoch": 18.69, |
|
"grad_norm": 3.4327402114868164, |
|
"learning_rate": 1.8990106007067137e-05, |
|
"loss": 0.5747, |
|
"step": 44400 |
|
}, |
|
{ |
|
"epoch": 18.69, |
|
"eval_cer": 0.3554588205388917, |
|
"eval_loss": 2.0720789432525635, |
|
"eval_runtime": 385.2409, |
|
"eval_samples_per_second": 24.603, |
|
"eval_steps_per_second": 3.076, |
|
"step": 44400 |
|
}, |
|
{ |
|
"epoch": 18.73, |
|
"grad_norm": 2.587009906768799, |
|
"learning_rate": 1.8919434628975266e-05, |
|
"loss": 0.5442, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 18.73, |
|
"eval_cer": 0.35546126471393374, |
|
"eval_loss": 2.185746192932129, |
|
"eval_runtime": 399.8878, |
|
"eval_samples_per_second": 23.702, |
|
"eval_steps_per_second": 2.963, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 18.77, |
|
"grad_norm": 2.142084836959839, |
|
"learning_rate": 1.8849469964664314e-05, |
|
"loss": 0.9587, |
|
"step": 44600 |
|
}, |
|
{ |
|
"epoch": 18.77, |
|
"eval_cer": 0.353083082398029, |
|
"eval_loss": 2.363218307495117, |
|
"eval_runtime": 384.8343, |
|
"eval_samples_per_second": 24.629, |
|
"eval_steps_per_second": 3.079, |
|
"step": 44600 |
|
}, |
|
{ |
|
"epoch": 18.82, |
|
"grad_norm": 8.04695987701416, |
|
"learning_rate": 1.877879858657244e-05, |
|
"loss": 0.5773, |
|
"step": 44700 |
|
}, |
|
{ |
|
"epoch": 18.82, |
|
"eval_cer": 0.35235960658558524, |
|
"eval_loss": 2.419173002243042, |
|
"eval_runtime": 407.1529, |
|
"eval_samples_per_second": 23.279, |
|
"eval_steps_per_second": 2.91, |
|
"step": 44700 |
|
}, |
|
{ |
|
"epoch": 18.86, |
|
"grad_norm": 2.3344297409057617, |
|
"learning_rate": 1.8708127208480567e-05, |
|
"loss": 0.658, |
|
"step": 44800 |
|
}, |
|
{ |
|
"epoch": 18.86, |
|
"eval_cer": 0.3527897813929842, |
|
"eval_loss": 2.6321187019348145, |
|
"eval_runtime": 378.5146, |
|
"eval_samples_per_second": 25.04, |
|
"eval_steps_per_second": 3.131, |
|
"step": 44800 |
|
}, |
|
{ |
|
"epoch": 18.9, |
|
"grad_norm": 1.822228193283081, |
|
"learning_rate": 1.8637455830388695e-05, |
|
"loss": 0.5733, |
|
"step": 44900 |
|
}, |
|
{ |
|
"epoch": 18.9, |
|
"eval_cer": 0.34930194360799344, |
|
"eval_loss": 2.7781569957733154, |
|
"eval_runtime": 405.1046, |
|
"eval_samples_per_second": 23.396, |
|
"eval_steps_per_second": 2.925, |
|
"step": 44900 |
|
}, |
|
{ |
|
"epoch": 18.94, |
|
"grad_norm": 15.387603759765625, |
|
"learning_rate": 1.856678445229682e-05, |
|
"loss": 0.5554, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 18.94, |
|
"eval_cer": 0.35281422314340466, |
|
"eval_loss": 2.2615067958831787, |
|
"eval_runtime": 386.1566, |
|
"eval_samples_per_second": 24.544, |
|
"eval_steps_per_second": 3.069, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 18.99, |
|
"grad_norm": 2.5482892990112305, |
|
"learning_rate": 1.8496113074204948e-05, |
|
"loss": 0.5893, |
|
"step": 45100 |
|
}, |
|
{ |
|
"epoch": 18.99, |
|
"eval_cer": 0.3533568300027375, |
|
"eval_loss": 2.9341318607330322, |
|
"eval_runtime": 402.0749, |
|
"eval_samples_per_second": 23.573, |
|
"eval_steps_per_second": 2.947, |
|
"step": 45100 |
|
}, |
|
{ |
|
"epoch": 19.03, |
|
"grad_norm": 1.673799991607666, |
|
"learning_rate": 1.8425441696113076e-05, |
|
"loss": 0.5004, |
|
"step": 45200 |
|
}, |
|
{ |
|
"epoch": 19.03, |
|
"eval_cer": 0.35489666027922256, |
|
"eval_loss": 2.622223138809204, |
|
"eval_runtime": 462.6029, |
|
"eval_samples_per_second": 20.488, |
|
"eval_steps_per_second": 2.562, |
|
"step": 45200 |
|
}, |
|
{ |
|
"epoch": 19.07, |
|
"grad_norm": 2.5973594188690186, |
|
"learning_rate": 1.8354770318021204e-05, |
|
"loss": 0.476, |
|
"step": 45300 |
|
}, |
|
{ |
|
"epoch": 19.07, |
|
"eval_cer": 0.3551215243830902, |
|
"eval_loss": 2.0877623558044434, |
|
"eval_runtime": 371.4246, |
|
"eval_samples_per_second": 25.518, |
|
"eval_steps_per_second": 3.19, |
|
"step": 45300 |
|
}, |
|
{ |
|
"epoch": 19.11, |
|
"grad_norm": 2.274106502532959, |
|
"learning_rate": 1.828409893992933e-05, |
|
"loss": 0.4914, |
|
"step": 45400 |
|
}, |
|
{ |
|
"epoch": 19.11, |
|
"eval_cer": 0.35471090297602753, |
|
"eval_loss": 2.134805917739868, |
|
"eval_runtime": 380.1079, |
|
"eval_samples_per_second": 24.935, |
|
"eval_steps_per_second": 3.118, |
|
"step": 45400 |
|
}, |
|
{ |
|
"epoch": 19.15, |
|
"grad_norm": 16.124771118164062, |
|
"learning_rate": 1.8213427561837457e-05, |
|
"loss": 1.2144, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 19.15, |
|
"eval_cer": 0.35294376442063274, |
|
"eval_loss": 1.9654364585876465, |
|
"eval_runtime": 365.9062, |
|
"eval_samples_per_second": 25.903, |
|
"eval_steps_per_second": 3.239, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 19.2, |
|
"grad_norm": 5.378861427307129, |
|
"learning_rate": 1.8142756183745585e-05, |
|
"loss": 0.4807, |
|
"step": 45600 |
|
}, |
|
{ |
|
"epoch": 19.2, |
|
"eval_cer": 0.35123773024128896, |
|
"eval_loss": 2.7749032974243164, |
|
"eval_runtime": 380.1844, |
|
"eval_samples_per_second": 24.93, |
|
"eval_steps_per_second": 3.117, |
|
"step": 45600 |
|
}, |
|
{ |
|
"epoch": 19.24, |
|
"grad_norm": 1.9357730150222778, |
|
"learning_rate": 1.807208480565371e-05, |
|
"loss": 0.4868, |
|
"step": 45700 |
|
}, |
|
{ |
|
"epoch": 19.24, |
|
"eval_cer": 0.35421473544249343, |
|
"eval_loss": 2.5809061527252197, |
|
"eval_runtime": 368.1076, |
|
"eval_samples_per_second": 25.748, |
|
"eval_steps_per_second": 3.219, |
|
"step": 45700 |
|
}, |
|
{ |
|
"epoch": 19.28, |
|
"grad_norm": 2.2689895629882812, |
|
"learning_rate": 1.8001413427561838e-05, |
|
"loss": 0.471, |
|
"step": 45800 |
|
}, |
|
{ |
|
"epoch": 19.28, |
|
"eval_cer": 0.3532492863008877, |
|
"eval_loss": 2.3049328327178955, |
|
"eval_runtime": 377.068, |
|
"eval_samples_per_second": 25.136, |
|
"eval_steps_per_second": 3.143, |
|
"step": 45800 |
|
}, |
|
{ |
|
"epoch": 19.32, |
|
"grad_norm": 6.475673198699951, |
|
"learning_rate": 1.7930742049469966e-05, |
|
"loss": 0.4834, |
|
"step": 45900 |
|
}, |
|
{ |
|
"epoch": 19.32, |
|
"eval_cer": 0.3522642837589457, |
|
"eval_loss": 2.4670591354370117, |
|
"eval_runtime": 369.8781, |
|
"eval_samples_per_second": 25.625, |
|
"eval_steps_per_second": 3.204, |
|
"step": 45900 |
|
}, |
|
{ |
|
"epoch": 19.36, |
|
"grad_norm": 2.3827292919158936, |
|
"learning_rate": 1.7860070671378094e-05, |
|
"loss": 0.471, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 19.36, |
|
"eval_cer": 0.3508588831097728, |
|
"eval_loss": 2.3617963790893555, |
|
"eval_runtime": 380.0492, |
|
"eval_samples_per_second": 24.939, |
|
"eval_steps_per_second": 3.118, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 19.41, |
|
"grad_norm": 1.383456826210022, |
|
"learning_rate": 1.778939929328622e-05, |
|
"loss": 0.494, |
|
"step": 46100 |
|
}, |
|
{ |
|
"epoch": 19.41, |
|
"eval_cer": 0.3493679363341285, |
|
"eval_loss": 2.267214298248291, |
|
"eval_runtime": 370.4047, |
|
"eval_samples_per_second": 25.588, |
|
"eval_steps_per_second": 3.199, |
|
"step": 46100 |
|
}, |
|
{ |
|
"epoch": 19.45, |
|
"grad_norm": 5.505711555480957, |
|
"learning_rate": 1.7718727915194347e-05, |
|
"loss": 0.4749, |
|
"step": 46200 |
|
}, |
|
{ |
|
"epoch": 19.45, |
|
"eval_cer": 0.3519318759532283, |
|
"eval_loss": 3.048175573348999, |
|
"eval_runtime": 386.8635, |
|
"eval_samples_per_second": 24.5, |
|
"eval_steps_per_second": 3.063, |
|
"step": 46200 |
|
}, |
|
{ |
|
"epoch": 19.49, |
|
"grad_norm": 11.937617301940918, |
|
"learning_rate": 1.7648056537102475e-05, |
|
"loss": 0.4895, |
|
"step": 46300 |
|
}, |
|
{ |
|
"epoch": 19.49, |
|
"eval_cer": 0.3529168784951703, |
|
"eval_loss": 2.974360942840576, |
|
"eval_runtime": 369.5199, |
|
"eval_samples_per_second": 25.65, |
|
"eval_steps_per_second": 3.207, |
|
"step": 46300 |
|
}, |
|
{ |
|
"epoch": 19.53, |
|
"grad_norm": 2.0051708221435547, |
|
"learning_rate": 1.75773851590106e-05, |
|
"loss": 0.4878, |
|
"step": 46400 |
|
}, |
|
{ |
|
"epoch": 19.53, |
|
"eval_cer": 0.3492921669078253, |
|
"eval_loss": 3.078077793121338, |
|
"eval_runtime": 380.6797, |
|
"eval_samples_per_second": 24.898, |
|
"eval_steps_per_second": 3.113, |
|
"step": 46400 |
|
}, |
|
{ |
|
"epoch": 19.57, |
|
"grad_norm": 2.106623888015747, |
|
"learning_rate": 1.750671378091873e-05, |
|
"loss": 0.4886, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 19.57, |
|
"eval_cer": 0.34946814751085215, |
|
"eval_loss": 3.198930263519287, |
|
"eval_runtime": 372.2046, |
|
"eval_samples_per_second": 25.464, |
|
"eval_steps_per_second": 3.184, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 19.62, |
|
"grad_norm": 2.0272178649902344, |
|
"learning_rate": 1.7436042402826856e-05, |
|
"loss": 0.4745, |
|
"step": 46600 |
|
}, |
|
{ |
|
"epoch": 19.62, |
|
"eval_cer": 0.3511057447890188, |
|
"eval_loss": 3.3171424865722656, |
|
"eval_runtime": 381.183, |
|
"eval_samples_per_second": 24.865, |
|
"eval_steps_per_second": 3.109, |
|
"step": 46600 |
|
}, |
|
{ |
|
"epoch": 19.66, |
|
"grad_norm": 4.2316412925720215, |
|
"learning_rate": 1.7365371024734984e-05, |
|
"loss": 0.4875, |
|
"step": 46700 |
|
}, |
|
{ |
|
"epoch": 19.66, |
|
"eval_cer": 0.3484904774940362, |
|
"eval_loss": 2.8240554332733154, |
|
"eval_runtime": 368.9178, |
|
"eval_samples_per_second": 25.691, |
|
"eval_steps_per_second": 3.212, |
|
"step": 46700 |
|
}, |
|
{ |
|
"epoch": 19.7, |
|
"grad_norm": 2.5890541076660156, |
|
"learning_rate": 1.7294699646643112e-05, |
|
"loss": 0.4757, |
|
"step": 46800 |
|
}, |
|
{ |
|
"epoch": 19.7, |
|
"eval_cer": 0.3484415939931954, |
|
"eval_loss": 2.6541638374328613, |
|
"eval_runtime": 380.984, |
|
"eval_samples_per_second": 24.878, |
|
"eval_steps_per_second": 3.11, |
|
"step": 46800 |
|
}, |
|
{ |
|
"epoch": 19.74, |
|
"grad_norm": 5.675697326660156, |
|
"learning_rate": 1.7224028268551237e-05, |
|
"loss": 0.506, |
|
"step": 46900 |
|
}, |
|
{ |
|
"epoch": 19.74, |
|
"eval_cer": 0.3504775918032146, |
|
"eval_loss": 2.4880096912384033, |
|
"eval_runtime": 369.3344, |
|
"eval_samples_per_second": 25.662, |
|
"eval_steps_per_second": 3.208, |
|
"step": 46900 |
|
}, |
|
{ |
|
"epoch": 19.79, |
|
"grad_norm": 2.5223989486694336, |
|
"learning_rate": 1.7153356890459365e-05, |
|
"loss": 0.4852, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 19.79, |
|
"eval_cer": 0.34921639748152206, |
|
"eval_loss": 2.3917760848999023, |
|
"eval_runtime": 383.3871, |
|
"eval_samples_per_second": 24.722, |
|
"eval_steps_per_second": 3.091, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 19.83, |
|
"grad_norm": 13.549793243408203, |
|
"learning_rate": 1.7082685512367493e-05, |
|
"loss": 0.4983, |
|
"step": 47100 |
|
}, |
|
{ |
|
"epoch": 19.83, |
|
"eval_cer": 0.34820450901411754, |
|
"eval_loss": 2.331942319869995, |
|
"eval_runtime": 370.3076, |
|
"eval_samples_per_second": 25.595, |
|
"eval_steps_per_second": 3.2, |
|
"step": 47100 |
|
}, |
|
{ |
|
"epoch": 19.87, |
|
"grad_norm": 1.6992497444152832, |
|
"learning_rate": 1.701201413427562e-05, |
|
"loss": 0.4712, |
|
"step": 47200 |
|
}, |
|
{ |
|
"epoch": 19.87, |
|
"eval_cer": 0.34815073716319267, |
|
"eval_loss": 2.558990001678467, |
|
"eval_runtime": 412.1751, |
|
"eval_samples_per_second": 22.995, |
|
"eval_steps_per_second": 2.875, |
|
"step": 47200 |
|
}, |
|
{ |
|
"epoch": 19.91, |
|
"grad_norm": 3.102717399597168, |
|
"learning_rate": 1.6941342756183746e-05, |
|
"loss": 0.4767, |
|
"step": 47300 |
|
}, |
|
{ |
|
"epoch": 19.91, |
|
"eval_cer": 0.3449659770834148, |
|
"eval_loss": 2.632328748703003, |
|
"eval_runtime": 385.8283, |
|
"eval_samples_per_second": 24.565, |
|
"eval_steps_per_second": 3.071, |
|
"step": 47300 |
|
}, |
|
{ |
|
"epoch": 19.95, |
|
"grad_norm": 3.6561086177825928, |
|
"learning_rate": 1.6870671378091874e-05, |
|
"loss": 0.4914, |
|
"step": 47400 |
|
}, |
|
{ |
|
"epoch": 19.95, |
|
"eval_cer": 0.3475079191271362, |
|
"eval_loss": 2.747345447540283, |
|
"eval_runtime": 406.0174, |
|
"eval_samples_per_second": 23.344, |
|
"eval_steps_per_second": 2.919, |
|
"step": 47400 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"grad_norm": 2.65822696685791, |
|
"learning_rate": 1.6800000000000002e-05, |
|
"loss": 0.4636, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"eval_cer": 0.34717795549646085, |
|
"eval_loss": 3.2108983993530273, |
|
"eval_runtime": 391.3482, |
|
"eval_samples_per_second": 24.219, |
|
"eval_steps_per_second": 3.028, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 20.04, |
|
"grad_norm": 2.881988525390625, |
|
"learning_rate": 1.6729328621908126e-05, |
|
"loss": 0.4024, |
|
"step": 47600 |
|
}, |
|
{ |
|
"epoch": 20.04, |
|
"eval_cer": 0.3465546908607407, |
|
"eval_loss": 2.992396831512451, |
|
"eval_runtime": 406.9581, |
|
"eval_samples_per_second": 23.29, |
|
"eval_steps_per_second": 2.912, |
|
"step": 47600 |
|
}, |
|
{ |
|
"epoch": 20.08, |
|
"grad_norm": 2.220200777053833, |
|
"learning_rate": 1.6658657243816255e-05, |
|
"loss": 0.4103, |
|
"step": 47700 |
|
}, |
|
{ |
|
"epoch": 20.08, |
|
"eval_cer": 0.3478818779085683, |
|
"eval_loss": 3.0730020999908447, |
|
"eval_runtime": 382.3538, |
|
"eval_samples_per_second": 24.789, |
|
"eval_steps_per_second": 3.099, |
|
"step": 47700 |
|
}, |
|
{ |
|
"epoch": 20.12, |
|
"grad_norm": 2.015397071838379, |
|
"learning_rate": 1.6587985865724383e-05, |
|
"loss": 0.4211, |
|
"step": 47800 |
|
}, |
|
{ |
|
"epoch": 20.12, |
|
"eval_cer": 0.3458312150482969, |
|
"eval_loss": 2.9953062534332275, |
|
"eval_runtime": 408.1484, |
|
"eval_samples_per_second": 23.222, |
|
"eval_steps_per_second": 2.903, |
|
"step": 47800 |
|
}, |
|
{ |
|
"epoch": 20.16, |
|
"grad_norm": 7.887564182281494, |
|
"learning_rate": 1.651731448763251e-05, |
|
"loss": 0.415, |
|
"step": 47900 |
|
}, |
|
{ |
|
"epoch": 20.16, |
|
"eval_cer": 0.3473783778499081, |
|
"eval_loss": 3.28759765625, |
|
"eval_runtime": 390.4889, |
|
"eval_samples_per_second": 24.272, |
|
"eval_steps_per_second": 3.035, |
|
"step": 47900 |
|
}, |
|
{ |
|
"epoch": 20.21, |
|
"grad_norm": 2.610433578491211, |
|
"learning_rate": 1.644664310954064e-05, |
|
"loss": 0.4143, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 20.21, |
|
"eval_cer": 0.3463151617066208, |
|
"eval_loss": 2.6613733768463135, |
|
"eval_runtime": 407.7959, |
|
"eval_samples_per_second": 23.242, |
|
"eval_steps_per_second": 2.906, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 20.25, |
|
"grad_norm": 2.2651526927948, |
|
"learning_rate": 1.637667844522968e-05, |
|
"loss": 0.5371, |
|
"step": 48100 |
|
}, |
|
{ |
|
"epoch": 20.25, |
|
"eval_cer": 0.3461636228540143, |
|
"eval_loss": 2.631626605987549, |
|
"eval_runtime": 388.4014, |
|
"eval_samples_per_second": 24.403, |
|
"eval_steps_per_second": 3.051, |
|
"step": 48100 |
|
}, |
|
{ |
|
"epoch": 20.29, |
|
"grad_norm": 2.476924180984497, |
|
"learning_rate": 1.630600706713781e-05, |
|
"loss": 0.4173, |
|
"step": 48200 |
|
}, |
|
{ |
|
"epoch": 20.29, |
|
"eval_cer": 0.3475372492276407, |
|
"eval_loss": 2.8535077571868896, |
|
"eval_runtime": 407.6688, |
|
"eval_samples_per_second": 23.249, |
|
"eval_steps_per_second": 2.907, |
|
"step": 48200 |
|
}, |
|
{ |
|
"epoch": 20.33, |
|
"grad_norm": 2.1427981853485107, |
|
"learning_rate": 1.623533568904594e-05, |
|
"loss": 0.4149, |
|
"step": 48300 |
|
}, |
|
{ |
|
"epoch": 20.33, |
|
"eval_cer": 0.3440518556176919, |
|
"eval_loss": 2.9584333896636963, |
|
"eval_runtime": 385.1561, |
|
"eval_samples_per_second": 24.608, |
|
"eval_steps_per_second": 3.077, |
|
"step": 48300 |
|
}, |
|
{ |
|
"epoch": 20.37, |
|
"grad_norm": 6.375877857208252, |
|
"learning_rate": 1.6164664310954065e-05, |
|
"loss": 0.4005, |
|
"step": 48400 |
|
}, |
|
{ |
|
"epoch": 20.37, |
|
"eval_cer": 0.3466133510617496, |
|
"eval_loss": 3.129476547241211, |
|
"eval_runtime": 408.2033, |
|
"eval_samples_per_second": 23.219, |
|
"eval_steps_per_second": 2.903, |
|
"step": 48400 |
|
}, |
|
{ |
|
"epoch": 20.42, |
|
"grad_norm": 8.823423385620117, |
|
"learning_rate": 1.6093992932862193e-05, |
|
"loss": 0.3974, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 20.42, |
|
"eval_cer": 0.3472977200735208, |
|
"eval_loss": 2.675215482711792, |
|
"eval_runtime": 389.4912, |
|
"eval_samples_per_second": 24.334, |
|
"eval_steps_per_second": 3.042, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 20.46, |
|
"grad_norm": 4.963177680969238, |
|
"learning_rate": 1.6023321554770318e-05, |
|
"loss": 0.456, |
|
"step": 48600 |
|
}, |
|
{ |
|
"epoch": 20.46, |
|
"eval_cer": 0.34791120800907277, |
|
"eval_loss": 2.5059995651245117, |
|
"eval_runtime": 407.1464, |
|
"eval_samples_per_second": 23.279, |
|
"eval_steps_per_second": 2.911, |
|
"step": 48600 |
|
}, |
|
{ |
|
"epoch": 20.5, |
|
"grad_norm": 1.5969637632369995, |
|
"learning_rate": 1.5952650176678446e-05, |
|
"loss": 0.3946, |
|
"step": 48700 |
|
}, |
|
{ |
|
"epoch": 20.5, |
|
"eval_cer": 0.34289331664776507, |
|
"eval_loss": 3.0652801990509033, |
|
"eval_runtime": 388.6588, |
|
"eval_samples_per_second": 24.386, |
|
"eval_steps_per_second": 3.049, |
|
"step": 48700 |
|
}, |
|
{ |
|
"epoch": 20.54, |
|
"grad_norm": 5.763105392456055, |
|
"learning_rate": 1.588197879858657e-05, |
|
"loss": 0.4039, |
|
"step": 48800 |
|
}, |
|
{ |
|
"epoch": 20.54, |
|
"eval_cer": 0.34577988737241405, |
|
"eval_loss": 2.2274067401885986, |
|
"eval_runtime": 406.2895, |
|
"eval_samples_per_second": 23.328, |
|
"eval_steps_per_second": 2.917, |
|
"step": 48800 |
|
}, |
|
{ |
|
"epoch": 20.59, |
|
"grad_norm": 4.299105167388916, |
|
"learning_rate": 1.5811307420494702e-05, |
|
"loss": 0.4253, |
|
"step": 48900 |
|
}, |
|
{ |
|
"epoch": 20.59, |
|
"eval_cer": 0.3451859528371984, |
|
"eval_loss": 3.0053601264953613, |
|
"eval_runtime": 386.4818, |
|
"eval_samples_per_second": 24.524, |
|
"eval_steps_per_second": 3.066, |
|
"step": 48900 |
|
}, |
|
{ |
|
"epoch": 20.63, |
|
"grad_norm": 1.9635140895843506, |
|
"learning_rate": 1.574063604240283e-05, |
|
"loss": 0.4048, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 20.63, |
|
"eval_cer": 0.34388076336474915, |
|
"eval_loss": 2.86527156829834, |
|
"eval_runtime": 408.1952, |
|
"eval_samples_per_second": 23.219, |
|
"eval_steps_per_second": 2.903, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 20.67, |
|
"grad_norm": 2.3898985385894775, |
|
"learning_rate": 1.5669964664310955e-05, |
|
"loss": 0.4189, |
|
"step": 49100 |
|
}, |
|
{ |
|
"epoch": 20.67, |
|
"eval_cer": 0.34160034805052597, |
|
"eval_loss": 3.328908681869507, |
|
"eval_runtime": 391.6176, |
|
"eval_samples_per_second": 24.202, |
|
"eval_steps_per_second": 3.026, |
|
"step": 49100 |
|
}, |
|
{ |
|
"epoch": 20.71, |
|
"grad_norm": 5.6570048332214355, |
|
"learning_rate": 1.5599293286219083e-05, |
|
"loss": 0.4067, |
|
"step": 49200 |
|
}, |
|
{ |
|
"epoch": 20.71, |
|
"eval_cer": 0.3445235814008056, |
|
"eval_loss": 3.190380811691284, |
|
"eval_runtime": 410.6709, |
|
"eval_samples_per_second": 23.079, |
|
"eval_steps_per_second": 2.886, |
|
"step": 49200 |
|
}, |
|
{ |
|
"epoch": 20.75, |
|
"grad_norm": 6.448336124420166, |
|
"learning_rate": 1.5528621908127208e-05, |
|
"loss": 0.428, |
|
"step": 49300 |
|
}, |
|
{ |
|
"epoch": 20.75, |
|
"eval_cer": 0.3451126275859372, |
|
"eval_loss": 3.366471529006958, |
|
"eval_runtime": 393.2567, |
|
"eval_samples_per_second": 24.101, |
|
"eval_steps_per_second": 3.013, |
|
"step": 49300 |
|
}, |
|
{ |
|
"epoch": 20.8, |
|
"grad_norm": 3.681830406188965, |
|
"learning_rate": 1.5457950530035336e-05, |
|
"loss": 0.4141, |
|
"step": 49400 |
|
}, |
|
{ |
|
"epoch": 20.8, |
|
"eval_cer": 0.3429764185991944, |
|
"eval_loss": 3.280654191970825, |
|
"eval_runtime": 408.0304, |
|
"eval_samples_per_second": 23.229, |
|
"eval_steps_per_second": 2.904, |
|
"step": 49400 |
|
}, |
|
{ |
|
"epoch": 20.84, |
|
"grad_norm": 3.107179641723633, |
|
"learning_rate": 1.5387279151943464e-05, |
|
"loss": 0.4127, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 20.84, |
|
"eval_cer": 0.34173233350279614, |
|
"eval_loss": 2.8979499340057373, |
|
"eval_runtime": 387.9855, |
|
"eval_samples_per_second": 24.429, |
|
"eval_steps_per_second": 3.054, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 20.88, |
|
"grad_norm": 2.1260199546813965, |
|
"learning_rate": 1.5316607773851592e-05, |
|
"loss": 0.4222, |
|
"step": 49600 |
|
}, |
|
{ |
|
"epoch": 20.88, |
|
"eval_cer": 0.3436339016855031, |
|
"eval_loss": 2.4059560298919678, |
|
"eval_runtime": 412.8035, |
|
"eval_samples_per_second": 22.96, |
|
"eval_steps_per_second": 2.871, |
|
"step": 49600 |
|
}, |
|
{ |
|
"epoch": 20.92, |
|
"grad_norm": 1.9174062013626099, |
|
"learning_rate": 1.5245936395759718e-05, |
|
"loss": 0.4093, |
|
"step": 49700 |
|
}, |
|
{ |
|
"epoch": 20.92, |
|
"eval_cer": 0.3437194478119745, |
|
"eval_loss": 2.5948760509490967, |
|
"eval_runtime": 390.0559, |
|
"eval_samples_per_second": 24.299, |
|
"eval_steps_per_second": 3.038, |
|
"step": 49700 |
|
}, |
|
{ |
|
"epoch": 20.96, |
|
"grad_norm": 2.362006902694702, |
|
"learning_rate": 1.5175265017667845e-05, |
|
"loss": 0.4054, |
|
"step": 49800 |
|
}, |
|
{ |
|
"epoch": 20.96, |
|
"eval_cer": 0.34167611747682924, |
|
"eval_loss": 2.4431283473968506, |
|
"eval_runtime": 408.3684, |
|
"eval_samples_per_second": 23.209, |
|
"eval_steps_per_second": 2.902, |
|
"step": 49800 |
|
}, |
|
{ |
|
"epoch": 21.01, |
|
"grad_norm": 1.828452706336975, |
|
"learning_rate": 1.5104593639575973e-05, |
|
"loss": 0.389, |
|
"step": 49900 |
|
}, |
|
{ |
|
"epoch": 21.01, |
|
"eval_cer": 0.3424289233897775, |
|
"eval_loss": 2.5479896068573, |
|
"eval_runtime": 390.6666, |
|
"eval_samples_per_second": 24.261, |
|
"eval_steps_per_second": 3.033, |
|
"step": 49900 |
|
}, |
|
{ |
|
"epoch": 21.05, |
|
"grad_norm": 4.511066913604736, |
|
"learning_rate": 1.50339222614841e-05, |
|
"loss": 0.3451, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 21.05, |
|
"eval_cer": 0.34278332877087325, |
|
"eval_loss": 2.672588348388672, |
|
"eval_runtime": 415.0883, |
|
"eval_samples_per_second": 22.834, |
|
"eval_steps_per_second": 2.855, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 21.09, |
|
"grad_norm": 3.952530860900879, |
|
"learning_rate": 1.4963250883392227e-05, |
|
"loss": 0.3449, |
|
"step": 50100 |
|
}, |
|
{ |
|
"epoch": 21.09, |
|
"eval_cer": 0.3425169136912909, |
|
"eval_loss": 2.945607900619507, |
|
"eval_runtime": 386.6565, |
|
"eval_samples_per_second": 24.513, |
|
"eval_steps_per_second": 3.065, |
|
"step": 50100 |
|
}, |
|
{ |
|
"epoch": 21.13, |
|
"grad_norm": 2.7008559703826904, |
|
"learning_rate": 1.4892579505300355e-05, |
|
"loss": 0.3479, |
|
"step": 50200 |
|
}, |
|
{ |
|
"epoch": 21.13, |
|
"eval_cer": 0.34289331664776507, |
|
"eval_loss": 2.209360122680664, |
|
"eval_runtime": 412.345, |
|
"eval_samples_per_second": 22.986, |
|
"eval_steps_per_second": 2.874, |
|
"step": 50200 |
|
}, |
|
{ |
|
"epoch": 21.17, |
|
"grad_norm": 1.4452073574066162, |
|
"learning_rate": 1.482190812720848e-05, |
|
"loss": 0.3465, |
|
"step": 50300 |
|
}, |
|
{ |
|
"epoch": 21.17, |
|
"eval_cer": 0.33977454929412226, |
|
"eval_loss": 2.268566370010376, |
|
"eval_runtime": 390.1756, |
|
"eval_samples_per_second": 24.292, |
|
"eval_steps_per_second": 3.037, |
|
"step": 50300 |
|
}, |
|
{ |
|
"epoch": 21.22, |
|
"grad_norm": 4.574779987335205, |
|
"learning_rate": 1.475123674911661e-05, |
|
"loss": 0.3408, |
|
"step": 50400 |
|
}, |
|
{ |
|
"epoch": 21.22, |
|
"eval_cer": 0.34143414414766726, |
|
"eval_loss": 2.403733491897583, |
|
"eval_runtime": 415.1727, |
|
"eval_samples_per_second": 22.829, |
|
"eval_steps_per_second": 2.854, |
|
"step": 50400 |
|
}, |
|
{ |
|
"epoch": 21.26, |
|
"grad_norm": 1.5709911584854126, |
|
"learning_rate": 1.4680565371024735e-05, |
|
"loss": 0.3363, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 21.26, |
|
"eval_cer": 0.3419229791560752, |
|
"eval_loss": 2.4089136123657227, |
|
"eval_runtime": 388.419, |
|
"eval_samples_per_second": 24.401, |
|
"eval_steps_per_second": 3.051, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 21.3, |
|
"grad_norm": 4.430927276611328, |
|
"learning_rate": 1.4609893992932863e-05, |
|
"loss": 0.3461, |
|
"step": 50600 |
|
}, |
|
{ |
|
"epoch": 21.3, |
|
"eval_cer": 0.34185698642994017, |
|
"eval_loss": 2.8909881114959717, |
|
"eval_runtime": 408.6465, |
|
"eval_samples_per_second": 23.194, |
|
"eval_steps_per_second": 2.9, |
|
"step": 50600 |
|
}, |
|
{ |
|
"epoch": 21.34, |
|
"grad_norm": 1.9088200330734253, |
|
"learning_rate": 1.4539222614840989e-05, |
|
"loss": 0.3561, |
|
"step": 50700 |
|
}, |
|
{ |
|
"epoch": 21.34, |
|
"eval_cer": 0.3429764185991944, |
|
"eval_loss": 2.6278483867645264, |
|
"eval_runtime": 388.0807, |
|
"eval_samples_per_second": 24.423, |
|
"eval_steps_per_second": 3.053, |
|
"step": 50700 |
|
}, |
|
{ |
|
"epoch": 21.38, |
|
"grad_norm": 5.978403568267822, |
|
"learning_rate": 1.4468551236749117e-05, |
|
"loss": 0.3601, |
|
"step": 50800 |
|
}, |
|
{ |
|
"epoch": 21.38, |
|
"eval_cer": 0.34170789175237576, |
|
"eval_loss": 2.4659318923950195, |
|
"eval_runtime": 412.615, |
|
"eval_samples_per_second": 22.971, |
|
"eval_steps_per_second": 2.872, |
|
"step": 50800 |
|
}, |
|
{ |
|
"epoch": 21.43, |
|
"grad_norm": 1.794754981994629, |
|
"learning_rate": 1.4397879858657245e-05, |
|
"loss": 0.3487, |
|
"step": 50900 |
|
}, |
|
{ |
|
"epoch": 21.43, |
|
"eval_cer": 0.34158079465018965, |
|
"eval_loss": 2.544365406036377, |
|
"eval_runtime": 390.5089, |
|
"eval_samples_per_second": 24.271, |
|
"eval_steps_per_second": 3.035, |
|
"step": 50900 |
|
}, |
|
{ |
|
"epoch": 21.47, |
|
"grad_norm": 3.5707991123199463, |
|
"learning_rate": 1.4327208480565372e-05, |
|
"loss": 0.349, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 21.47, |
|
"eval_cer": 0.338039185014274, |
|
"eval_loss": 2.3195223808288574, |
|
"eval_runtime": 421.6099, |
|
"eval_samples_per_second": 22.48, |
|
"eval_steps_per_second": 2.811, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 21.51, |
|
"grad_norm": 1.6567233800888062, |
|
"learning_rate": 1.42565371024735e-05, |
|
"loss": 0.3534, |
|
"step": 51100 |
|
}, |
|
{ |
|
"epoch": 21.51, |
|
"eval_cer": 0.33963767549176804, |
|
"eval_loss": 2.4492955207824707, |
|
"eval_runtime": 387.5977, |
|
"eval_samples_per_second": 24.453, |
|
"eval_steps_per_second": 3.057, |
|
"step": 51100 |
|
}, |
|
{ |
|
"epoch": 21.55, |
|
"grad_norm": 2.3857908248901367, |
|
"learning_rate": 1.4185865724381625e-05, |
|
"loss": 0.3469, |
|
"step": 51200 |
|
}, |
|
{ |
|
"epoch": 21.55, |
|
"eval_cer": 0.3406764498846349, |
|
"eval_loss": 2.440829277038574, |
|
"eval_runtime": 418.2267, |
|
"eval_samples_per_second": 22.662, |
|
"eval_steps_per_second": 2.833, |
|
"step": 51200 |
|
}, |
|
{ |
|
"epoch": 21.6, |
|
"grad_norm": 1.5880221128463745, |
|
"learning_rate": 1.4115194346289753e-05, |
|
"loss": 0.3514, |
|
"step": 51300 |
|
}, |
|
{ |
|
"epoch": 21.6, |
|
"eval_cer": 0.34056157365765904, |
|
"eval_loss": 2.3072359561920166, |
|
"eval_runtime": 388.3106, |
|
"eval_samples_per_second": 24.408, |
|
"eval_steps_per_second": 3.052, |
|
"step": 51300 |
|
}, |
|
{ |
|
"epoch": 21.64, |
|
"grad_norm": 1.9676817655563354, |
|
"learning_rate": 1.4044522968197882e-05, |
|
"loss": 0.3469, |
|
"step": 51400 |
|
}, |
|
{ |
|
"epoch": 21.64, |
|
"eval_cer": 0.34236293066364243, |
|
"eval_loss": 2.332427740097046, |
|
"eval_runtime": 411.3092, |
|
"eval_samples_per_second": 23.043, |
|
"eval_steps_per_second": 2.881, |
|
"step": 51400 |
|
}, |
|
{ |
|
"epoch": 21.68, |
|
"grad_norm": 2.4417457580566406, |
|
"learning_rate": 1.3973851590106007e-05, |
|
"loss": 0.3535, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 21.68, |
|
"eval_cer": 0.3423751515388526, |
|
"eval_loss": 2.3060784339904785, |
|
"eval_runtime": 391.5586, |
|
"eval_samples_per_second": 24.206, |
|
"eval_steps_per_second": 3.026, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 21.72, |
|
"grad_norm": 5.330277919769287, |
|
"learning_rate": 1.3903180212014135e-05, |
|
"loss": 0.3529, |
|
"step": 51600 |
|
}, |
|
{ |
|
"epoch": 21.72, |
|
"eval_cer": 0.34077421688631654, |
|
"eval_loss": 2.4329657554626465, |
|
"eval_runtime": 422.4059, |
|
"eval_samples_per_second": 22.438, |
|
"eval_steps_per_second": 2.805, |
|
"step": 51600 |
|
}, |
|
{ |
|
"epoch": 21.76, |
|
"grad_norm": 2.12868595123291, |
|
"learning_rate": 1.3833215547703182e-05, |
|
"loss": 0.3733, |
|
"step": 51700 |
|
}, |
|
{ |
|
"epoch": 21.76, |
|
"eval_cer": 0.34047358335614564, |
|
"eval_loss": 2.4534361362457275, |
|
"eval_runtime": 382.1988, |
|
"eval_samples_per_second": 24.799, |
|
"eval_steps_per_second": 3.1, |
|
"step": 51700 |
|
}, |
|
{ |
|
"epoch": 21.81, |
|
"grad_norm": 1.6962841749191284, |
|
"learning_rate": 1.3762544169611308e-05, |
|
"loss": 0.3523, |
|
"step": 51800 |
|
}, |
|
{ |
|
"epoch": 21.81, |
|
"eval_cer": 0.3399554182472332, |
|
"eval_loss": 2.549654483795166, |
|
"eval_runtime": 415.7902, |
|
"eval_samples_per_second": 22.795, |
|
"eval_steps_per_second": 2.85, |
|
"step": 51800 |
|
}, |
|
{ |
|
"epoch": 21.85, |
|
"grad_norm": 1.762255072593689, |
|
"learning_rate": 1.3691872791519436e-05, |
|
"loss": 0.3523, |
|
"step": 51900 |
|
}, |
|
{ |
|
"epoch": 21.85, |
|
"eval_cer": 0.33849624574713544, |
|
"eval_loss": 2.5730228424072266, |
|
"eval_runtime": 391.5055, |
|
"eval_samples_per_second": 24.209, |
|
"eval_steps_per_second": 3.027, |
|
"step": 51900 |
|
}, |
|
{ |
|
"epoch": 21.89, |
|
"grad_norm": 1.794063687324524, |
|
"learning_rate": 1.3621201413427561e-05, |
|
"loss": 0.3608, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 21.89, |
|
"eval_cer": 0.3389410856047867, |
|
"eval_loss": 2.29063081741333, |
|
"eval_runtime": 429.6088, |
|
"eval_samples_per_second": 22.062, |
|
"eval_steps_per_second": 2.758, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 21.93, |
|
"grad_norm": 5.02766227722168, |
|
"learning_rate": 1.3550530035335691e-05, |
|
"loss": 0.3614, |
|
"step": 52100 |
|
}, |
|
{ |
|
"epoch": 21.93, |
|
"eval_cer": 0.33830315591881427, |
|
"eval_loss": 2.339839458465576, |
|
"eval_runtime": 388.2936, |
|
"eval_samples_per_second": 24.409, |
|
"eval_steps_per_second": 3.052, |
|
"step": 52100 |
|
}, |
|
{ |
|
"epoch": 21.97, |
|
"grad_norm": 1.7431000471115112, |
|
"learning_rate": 1.3479858657243816e-05, |
|
"loss": 0.3401, |
|
"step": 52200 |
|
}, |
|
{ |
|
"epoch": 21.97, |
|
"eval_cer": 0.3359225294278675, |
|
"eval_loss": 2.4058258533477783, |
|
"eval_runtime": 412.0183, |
|
"eval_samples_per_second": 23.004, |
|
"eval_steps_per_second": 2.876, |
|
"step": 52200 |
|
}, |
|
{ |
|
"epoch": 22.02, |
|
"grad_norm": 2.714993953704834, |
|
"learning_rate": 1.3409187279151944e-05, |
|
"loss": 0.3222, |
|
"step": 52300 |
|
}, |
|
{ |
|
"epoch": 22.02, |
|
"eval_cer": 0.33601051972938095, |
|
"eval_loss": 2.8305118083953857, |
|
"eval_runtime": 391.1586, |
|
"eval_samples_per_second": 24.231, |
|
"eval_steps_per_second": 3.029, |
|
"step": 52300 |
|
}, |
|
{ |
|
"epoch": 22.06, |
|
"grad_norm": 1.8999407291412354, |
|
"learning_rate": 1.3338515901060072e-05, |
|
"loss": 0.3022, |
|
"step": 52400 |
|
}, |
|
{ |
|
"epoch": 22.06, |
|
"eval_cer": 0.33706151499745807, |
|
"eval_loss": 2.4940497875213623, |
|
"eval_runtime": 422.3325, |
|
"eval_samples_per_second": 22.442, |
|
"eval_steps_per_second": 2.806, |
|
"step": 52400 |
|
}, |
|
{ |
|
"epoch": 22.1, |
|
"grad_norm": 1.6719143390655518, |
|
"learning_rate": 1.3267844522968198e-05, |
|
"loss": 0.3009, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 22.1, |
|
"eval_cer": 0.3367682139924133, |
|
"eval_loss": 2.5411734580993652, |
|
"eval_runtime": 390.8575, |
|
"eval_samples_per_second": 24.249, |
|
"eval_steps_per_second": 3.032, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 22.14, |
|
"grad_norm": 2.155449151992798, |
|
"learning_rate": 1.3197173144876326e-05, |
|
"loss": 0.2968, |
|
"step": 52600 |
|
}, |
|
{ |
|
"epoch": 22.14, |
|
"eval_cer": 0.3380514058894842, |
|
"eval_loss": 2.6296260356903076, |
|
"eval_runtime": 425.2856, |
|
"eval_samples_per_second": 22.286, |
|
"eval_steps_per_second": 2.786, |
|
"step": 52600 |
|
}, |
|
{ |
|
"epoch": 22.18, |
|
"grad_norm": 11.153074264526367, |
|
"learning_rate": 1.3126501766784453e-05, |
|
"loss": 0.2936, |
|
"step": 52700 |
|
}, |
|
{ |
|
"epoch": 22.18, |
|
"eval_cer": 0.33938836963748, |
|
"eval_loss": 2.600175142288208, |
|
"eval_runtime": 394.8419, |
|
"eval_samples_per_second": 24.005, |
|
"eval_steps_per_second": 3.001, |
|
"step": 52700 |
|
}, |
|
{ |
|
"epoch": 22.23, |
|
"grad_norm": 1.9212164878845215, |
|
"learning_rate": 1.305583038869258e-05, |
|
"loss": 0.3058, |
|
"step": 52800 |
|
}, |
|
{ |
|
"epoch": 22.23, |
|
"eval_cer": 0.3395081342145399, |
|
"eval_loss": 2.419398546218872, |
|
"eval_runtime": 417.7544, |
|
"eval_samples_per_second": 22.688, |
|
"eval_steps_per_second": 2.837, |
|
"step": 52800 |
|
}, |
|
{ |
|
"epoch": 22.27, |
|
"grad_norm": 4.1538004875183105, |
|
"learning_rate": 1.2985159010600709e-05, |
|
"loss": 0.2954, |
|
"step": 52900 |
|
}, |
|
{ |
|
"epoch": 22.27, |
|
"eval_cer": 0.33905107348167846, |
|
"eval_loss": 2.4820761680603027, |
|
"eval_runtime": 388.8242, |
|
"eval_samples_per_second": 24.376, |
|
"eval_steps_per_second": 3.048, |
|
"step": 52900 |
|
}, |
|
{ |
|
"epoch": 22.31, |
|
"grad_norm": 2.294267177581787, |
|
"learning_rate": 1.2914487632508834e-05, |
|
"loss": 0.3038, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 22.31, |
|
"eval_cer": 0.3415172460990966, |
|
"eval_loss": 2.524266004562378, |
|
"eval_runtime": 399.4446, |
|
"eval_samples_per_second": 23.728, |
|
"eval_steps_per_second": 2.967, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 22.35, |
|
"grad_norm": 2.0384035110473633, |
|
"learning_rate": 1.2843816254416963e-05, |
|
"loss": 0.2994, |
|
"step": 53100 |
|
}, |
|
{ |
|
"epoch": 22.35, |
|
"eval_cer": 0.3371079543232568, |
|
"eval_loss": 2.487698554992676, |
|
"eval_runtime": 374.207, |
|
"eval_samples_per_second": 25.328, |
|
"eval_steps_per_second": 3.167, |
|
"step": 53100 |
|
}, |
|
{ |
|
"epoch": 22.4, |
|
"grad_norm": 1.3863327503204346, |
|
"learning_rate": 1.2773144876325088e-05, |
|
"loss": 0.2997, |
|
"step": 53200 |
|
}, |
|
{ |
|
"epoch": 22.4, |
|
"eval_cer": 0.33839359039536976, |
|
"eval_loss": 2.4660918712615967, |
|
"eval_runtime": 423.5986, |
|
"eval_samples_per_second": 22.375, |
|
"eval_steps_per_second": 2.797, |
|
"step": 53200 |
|
}, |
|
{ |
|
"epoch": 22.44, |
|
"grad_norm": 1.9845781326293945, |
|
"learning_rate": 1.2702473498233216e-05, |
|
"loss": 0.2999, |
|
"step": 53300 |
|
}, |
|
{ |
|
"epoch": 22.44, |
|
"eval_cer": 0.3362524930585429, |
|
"eval_loss": 2.4160196781158447, |
|
"eval_runtime": 387.3168, |
|
"eval_samples_per_second": 24.471, |
|
"eval_steps_per_second": 3.06, |
|
"step": 53300 |
|
}, |
|
{ |
|
"epoch": 22.48, |
|
"grad_norm": 2.225783586502075, |
|
"learning_rate": 1.2631802120141343e-05, |
|
"loss": 0.2854, |
|
"step": 53400 |
|
}, |
|
{ |
|
"epoch": 22.48, |
|
"eval_cer": 0.33407228892104335, |
|
"eval_loss": 2.5062761306762695, |
|
"eval_runtime": 421.3473, |
|
"eval_samples_per_second": 22.495, |
|
"eval_steps_per_second": 2.812, |
|
"step": 53400 |
|
}, |
|
{ |
|
"epoch": 22.52, |
|
"grad_norm": 9.648059844970703, |
|
"learning_rate": 1.256113074204947e-05, |
|
"loss": 0.2985, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 22.52, |
|
"eval_cer": 0.3380709592898205, |
|
"eval_loss": 2.483365774154663, |
|
"eval_runtime": 392.706, |
|
"eval_samples_per_second": 24.135, |
|
"eval_steps_per_second": 3.018, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 22.56, |
|
"grad_norm": 1.8772177696228027, |
|
"learning_rate": 1.2490459363957597e-05, |
|
"loss": 0.3069, |
|
"step": 53600 |
|
}, |
|
{ |
|
"epoch": 22.56, |
|
"eval_cer": 0.3383080442688984, |
|
"eval_loss": 2.481276035308838, |
|
"eval_runtime": 425.0385, |
|
"eval_samples_per_second": 22.299, |
|
"eval_steps_per_second": 2.788, |
|
"step": 53600 |
|
}, |
|
{ |
|
"epoch": 22.61, |
|
"grad_norm": 1.4407734870910645, |
|
"learning_rate": 1.2419787985865725e-05, |
|
"loss": 0.3018, |
|
"step": 53700 |
|
}, |
|
{ |
|
"epoch": 22.61, |
|
"eval_cer": 0.3348739783348324, |
|
"eval_loss": 2.4129292964935303, |
|
"eval_runtime": 399.5718, |
|
"eval_samples_per_second": 23.72, |
|
"eval_steps_per_second": 2.966, |
|
"step": 53700 |
|
}, |
|
{ |
|
"epoch": 22.65, |
|
"grad_norm": 5.736636638641357, |
|
"learning_rate": 1.2349116607773852e-05, |
|
"loss": 0.2925, |
|
"step": 53800 |
|
}, |
|
{ |
|
"epoch": 22.65, |
|
"eval_cer": 0.3356121191975284, |
|
"eval_loss": 2.4841222763061523, |
|
"eval_runtime": 427.1982, |
|
"eval_samples_per_second": 22.186, |
|
"eval_steps_per_second": 2.774, |
|
"step": 53800 |
|
}, |
|
{ |
|
"epoch": 22.69, |
|
"grad_norm": 6.094464302062988, |
|
"learning_rate": 1.227844522968198e-05, |
|
"loss": 0.2981, |
|
"step": 53900 |
|
}, |
|
{ |
|
"epoch": 22.69, |
|
"eval_cer": 0.3367071096163623, |
|
"eval_loss": 2.4353983402252197, |
|
"eval_runtime": 389.5123, |
|
"eval_samples_per_second": 24.333, |
|
"eval_steps_per_second": 3.042, |
|
"step": 53900 |
|
}, |
|
{ |
|
"epoch": 22.73, |
|
"grad_norm": 1.8495489358901978, |
|
"learning_rate": 1.2207773851590106e-05, |
|
"loss": 0.2977, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 22.73, |
|
"eval_cer": 0.3380245199640217, |
|
"eval_loss": 2.4169256687164307, |
|
"eval_runtime": 432.3991, |
|
"eval_samples_per_second": 21.92, |
|
"eval_steps_per_second": 2.741, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 22.77, |
|
"grad_norm": 1.7505403757095337, |
|
"learning_rate": 1.2137102473498234e-05, |
|
"loss": 0.3062, |
|
"step": 54100 |
|
}, |
|
{ |
|
"epoch": 22.77, |
|
"eval_cer": 0.3384766923467991, |
|
"eval_loss": 2.552931070327759, |
|
"eval_runtime": 394.4096, |
|
"eval_samples_per_second": 24.031, |
|
"eval_steps_per_second": 3.004, |
|
"step": 54100 |
|
}, |
|
{ |
|
"epoch": 22.82, |
|
"grad_norm": 3.5150487422943115, |
|
"learning_rate": 1.206643109540636e-05, |
|
"loss": 0.3035, |
|
"step": 54200 |
|
}, |
|
{ |
|
"epoch": 22.82, |
|
"eval_cer": 0.3343362598255837, |
|
"eval_loss": 2.274048328399658, |
|
"eval_runtime": 434.158, |
|
"eval_samples_per_second": 21.831, |
|
"eval_steps_per_second": 2.729, |
|
"step": 54200 |
|
}, |
|
{ |
|
"epoch": 22.86, |
|
"grad_norm": 2.194784641265869, |
|
"learning_rate": 1.1995759717314487e-05, |
|
"loss": 0.3109, |
|
"step": 54300 |
|
}, |
|
{ |
|
"epoch": 22.86, |
|
"eval_cer": 0.33527482304172695, |
|
"eval_loss": 2.228667736053467, |
|
"eval_runtime": 394.9356, |
|
"eval_samples_per_second": 23.999, |
|
"eval_steps_per_second": 3.0, |
|
"step": 54300 |
|
}, |
|
{ |
|
"epoch": 22.9, |
|
"grad_norm": 4.459865570068359, |
|
"learning_rate": 1.1925088339222615e-05, |
|
"loss": 0.3105, |
|
"step": 54400 |
|
}, |
|
{ |
|
"epoch": 22.9, |
|
"eval_cer": 0.33500596378710257, |
|
"eval_loss": 2.3388936519622803, |
|
"eval_runtime": 432.3816, |
|
"eval_samples_per_second": 21.92, |
|
"eval_steps_per_second": 2.741, |
|
"step": 54400 |
|
}, |
|
{ |
|
"epoch": 22.94, |
|
"grad_norm": 2.0811266899108887, |
|
"learning_rate": 1.1854416961130743e-05, |
|
"loss": 0.3078, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 22.94, |
|
"eval_cer": 0.3347126627820578, |
|
"eval_loss": 2.5939130783081055, |
|
"eval_runtime": 402.0697, |
|
"eval_samples_per_second": 23.573, |
|
"eval_steps_per_second": 2.947, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 22.98, |
|
"grad_norm": 4.0244011878967285, |
|
"learning_rate": 1.178374558303887e-05, |
|
"loss": 0.3026, |
|
"step": 54600 |
|
}, |
|
{ |
|
"epoch": 22.98, |
|
"eval_cer": 0.33435581322592, |
|
"eval_loss": 2.545149564743042, |
|
"eval_runtime": 441.4018, |
|
"eval_samples_per_second": 21.473, |
|
"eval_steps_per_second": 2.685, |
|
"step": 54600 |
|
}, |
|
{ |
|
"epoch": 23.03, |
|
"grad_norm": 2.544414758682251, |
|
"learning_rate": 1.1713074204946998e-05, |
|
"loss": 0.2848, |
|
"step": 54700 |
|
}, |
|
{ |
|
"epoch": 23.03, |
|
"eval_cer": 0.33448291032810606, |
|
"eval_loss": 2.603243112564087, |
|
"eval_runtime": 394.5283, |
|
"eval_samples_per_second": 24.024, |
|
"eval_steps_per_second": 3.004, |
|
"step": 54700 |
|
}, |
|
{ |
|
"epoch": 23.07, |
|
"grad_norm": 1.4476730823516846, |
|
"learning_rate": 1.1642402826855124e-05, |
|
"loss": 0.2693, |
|
"step": 54800 |
|
}, |
|
{ |
|
"epoch": 23.07, |
|
"eval_cer": 0.3340185170701185, |
|
"eval_loss": 2.598785638809204, |
|
"eval_runtime": 422.01, |
|
"eval_samples_per_second": 22.459, |
|
"eval_steps_per_second": 2.808, |
|
"step": 54800 |
|
}, |
|
{ |
|
"epoch": 23.11, |
|
"grad_norm": 2.027191400527954, |
|
"learning_rate": 1.157173144876325e-05, |
|
"loss": 0.2632, |
|
"step": 54900 |
|
}, |
|
{ |
|
"epoch": 23.11, |
|
"eval_cer": 0.33527237886668493, |
|
"eval_loss": 2.615438461303711, |
|
"eval_runtime": 400.4466, |
|
"eval_samples_per_second": 23.669, |
|
"eval_steps_per_second": 2.959, |
|
"step": 54900 |
|
}, |
|
{ |
|
"epoch": 23.15, |
|
"grad_norm": 2.888965606689453, |
|
"learning_rate": 1.1501060070671379e-05, |
|
"loss": 0.2584, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 23.15, |
|
"eval_cer": 0.3347419928825623, |
|
"eval_loss": 2.6486945152282715, |
|
"eval_runtime": 425.0006, |
|
"eval_samples_per_second": 22.301, |
|
"eval_steps_per_second": 2.788, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 23.2, |
|
"grad_norm": 1.428572654724121, |
|
"learning_rate": 1.1430388692579507e-05, |
|
"loss": 0.2592, |
|
"step": 55100 |
|
}, |
|
{ |
|
"epoch": 23.2, |
|
"eval_cer": 0.33205340033631847, |
|
"eval_loss": 2.5768284797668457, |
|
"eval_runtime": 410.5876, |
|
"eval_samples_per_second": 23.084, |
|
"eval_steps_per_second": 2.886, |
|
"step": 55100 |
|
}, |
|
{ |
|
"epoch": 23.24, |
|
"grad_norm": 1.8620479106903076, |
|
"learning_rate": 1.1359717314487633e-05, |
|
"loss": 0.2719, |
|
"step": 55200 |
|
}, |
|
{ |
|
"epoch": 23.24, |
|
"eval_cer": 0.3328697548003598, |
|
"eval_loss": 3.229948043823242, |
|
"eval_runtime": 423.4357, |
|
"eval_samples_per_second": 22.384, |
|
"eval_steps_per_second": 2.799, |
|
"step": 55200 |
|
}, |
|
{ |
|
"epoch": 23.28, |
|
"grad_norm": 1.727643609046936, |
|
"learning_rate": 1.128904593639576e-05, |
|
"loss": 0.2628, |
|
"step": 55300 |
|
}, |
|
{ |
|
"epoch": 23.28, |
|
"eval_cer": 0.33284286887489734, |
|
"eval_loss": 2.7340400218963623, |
|
"eval_runtime": 394.6323, |
|
"eval_samples_per_second": 24.017, |
|
"eval_steps_per_second": 3.003, |
|
"step": 55300 |
|
}, |
|
{ |
|
"epoch": 23.32, |
|
"grad_norm": 2.5659921169281006, |
|
"learning_rate": 1.1218374558303888e-05, |
|
"loss": 0.2612, |
|
"step": 55400 |
|
}, |
|
{ |
|
"epoch": 23.32, |
|
"eval_cer": 0.3338303155918814, |
|
"eval_loss": 2.4699885845184326, |
|
"eval_runtime": 423.6808, |
|
"eval_samples_per_second": 22.371, |
|
"eval_steps_per_second": 2.797, |
|
"step": 55400 |
|
}, |
|
{ |
|
"epoch": 23.36, |
|
"grad_norm": 1.429592490196228, |
|
"learning_rate": 1.1147703180212014e-05, |
|
"loss": 0.2702, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 23.36, |
|
"eval_cer": 0.33553879394626723, |
|
"eval_loss": 2.7780275344848633, |
|
"eval_runtime": 398.5381, |
|
"eval_samples_per_second": 23.782, |
|
"eval_steps_per_second": 2.973, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 23.41, |
|
"grad_norm": 2.3033080101013184, |
|
"learning_rate": 1.107703180212014e-05, |
|
"loss": 0.2574, |
|
"step": 55600 |
|
}, |
|
{ |
|
"epoch": 23.41, |
|
"eval_cer": 0.3336567791638966, |
|
"eval_loss": 2.5886073112487793, |
|
"eval_runtime": 422.633, |
|
"eval_samples_per_second": 22.426, |
|
"eval_steps_per_second": 2.804, |
|
"step": 55600 |
|
}, |
|
{ |
|
"epoch": 23.45, |
|
"grad_norm": 3.2907676696777344, |
|
"learning_rate": 1.100636042402827e-05, |
|
"loss": 0.2612, |
|
"step": 55700 |
|
}, |
|
{ |
|
"epoch": 23.45, |
|
"eval_cer": 0.33410161902154784, |
|
"eval_loss": 2.841667413711548, |
|
"eval_runtime": 403.4827, |
|
"eval_samples_per_second": 23.49, |
|
"eval_steps_per_second": 2.937, |
|
"step": 55700 |
|
}, |
|
{ |
|
"epoch": 23.49, |
|
"grad_norm": 1.8252880573272705, |
|
"learning_rate": 1.0935689045936397e-05, |
|
"loss": 0.2634, |
|
"step": 55800 |
|
}, |
|
{ |
|
"epoch": 23.49, |
|
"eval_cer": 0.3340087403699503, |
|
"eval_loss": 2.7159037590026855, |
|
"eval_runtime": 426.7576, |
|
"eval_samples_per_second": 22.209, |
|
"eval_steps_per_second": 2.777, |
|
"step": 55800 |
|
}, |
|
{ |
|
"epoch": 23.53, |
|
"grad_norm": 1.250098466873169, |
|
"learning_rate": 1.0865017667844523e-05, |
|
"loss": 0.2592, |
|
"step": 55900 |
|
}, |
|
{ |
|
"epoch": 23.53, |
|
"eval_cer": 0.3338596456923859, |
|
"eval_loss": 2.6912946701049805, |
|
"eval_runtime": 397.5959, |
|
"eval_samples_per_second": 23.838, |
|
"eval_steps_per_second": 2.98, |
|
"step": 55900 |
|
}, |
|
{ |
|
"epoch": 23.57, |
|
"grad_norm": 3.1888105869293213, |
|
"learning_rate": 1.0794346289752651e-05, |
|
"loss": 0.2537, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 23.57, |
|
"eval_cer": 0.3346197841304603, |
|
"eval_loss": 2.708544969558716, |
|
"eval_runtime": 427.2487, |
|
"eval_samples_per_second": 22.184, |
|
"eval_steps_per_second": 2.774, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 23.62, |
|
"grad_norm": 9.785385131835938, |
|
"learning_rate": 1.0723674911660778e-05, |
|
"loss": 0.2622, |
|
"step": 56100 |
|
}, |
|
{ |
|
"epoch": 23.62, |
|
"eval_cer": 0.3341309491220523, |
|
"eval_loss": 2.625296115875244, |
|
"eval_runtime": 401.2011, |
|
"eval_samples_per_second": 23.624, |
|
"eval_steps_per_second": 2.954, |
|
"step": 56100 |
|
}, |
|
{ |
|
"epoch": 23.66, |
|
"grad_norm": 1.4341765642166138, |
|
"learning_rate": 1.0653003533568904e-05, |
|
"loss": 0.26, |
|
"step": 56200 |
|
}, |
|
{ |
|
"epoch": 23.66, |
|
"eval_cer": 0.33526260216651677, |
|
"eval_loss": 2.6399149894714355, |
|
"eval_runtime": 433.4463, |
|
"eval_samples_per_second": 21.867, |
|
"eval_steps_per_second": 2.734, |
|
"step": 56200 |
|
}, |
|
{ |
|
"epoch": 23.7, |
|
"grad_norm": 1.7979743480682373, |
|
"learning_rate": 1.0582332155477032e-05, |
|
"loss": 0.2688, |
|
"step": 56300 |
|
}, |
|
{ |
|
"epoch": 23.7, |
|
"eval_cer": 0.3343387040006257, |
|
"eval_loss": 2.507894515991211, |
|
"eval_runtime": 402.3559, |
|
"eval_samples_per_second": 23.556, |
|
"eval_steps_per_second": 2.945, |
|
"step": 56300 |
|
}, |
|
{ |
|
"epoch": 23.74, |
|
"grad_norm": 2.900815725326538, |
|
"learning_rate": 1.051166077738516e-05, |
|
"loss": 0.2671, |
|
"step": 56400 |
|
}, |
|
{ |
|
"epoch": 23.74, |
|
"eval_cer": 0.33364211411364436, |
|
"eval_loss": 2.6333584785461426, |
|
"eval_runtime": 425.9033, |
|
"eval_samples_per_second": 22.254, |
|
"eval_steps_per_second": 2.782, |
|
"step": 56400 |
|
}, |
|
{ |
|
"epoch": 23.78, |
|
"grad_norm": 1.366709589958191, |
|
"learning_rate": 1.0440989399293287e-05, |
|
"loss": 0.2509, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 23.78, |
|
"eval_cer": 0.3335125728364163, |
|
"eval_loss": 2.557394504547119, |
|
"eval_runtime": 402.0125, |
|
"eval_samples_per_second": 23.576, |
|
"eval_steps_per_second": 2.948, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 23.83, |
|
"grad_norm": 3.0282020568847656, |
|
"learning_rate": 1.0370318021201413e-05, |
|
"loss": 0.2639, |
|
"step": 56600 |
|
}, |
|
{ |
|
"epoch": 23.83, |
|
"eval_cer": 0.33265222322161825, |
|
"eval_loss": 2.564777374267578, |
|
"eval_runtime": 430.1757, |
|
"eval_samples_per_second": 22.033, |
|
"eval_steps_per_second": 2.755, |
|
"step": 56600 |
|
}, |
|
{ |
|
"epoch": 23.87, |
|
"grad_norm": 1.940981388092041, |
|
"learning_rate": 1.0299646643109541e-05, |
|
"loss": 0.2606, |
|
"step": 56700 |
|
}, |
|
{ |
|
"epoch": 23.87, |
|
"eval_cer": 0.33390608501818464, |
|
"eval_loss": 2.5213394165039062, |
|
"eval_runtime": 397.5174, |
|
"eval_samples_per_second": 23.843, |
|
"eval_steps_per_second": 2.981, |
|
"step": 56700 |
|
}, |
|
{ |
|
"epoch": 23.91, |
|
"grad_norm": 1.5934607982635498, |
|
"learning_rate": 1.0228975265017668e-05, |
|
"loss": 0.256, |
|
"step": 56800 |
|
}, |
|
{ |
|
"epoch": 23.91, |
|
"eval_cer": 0.33383764811700756, |
|
"eval_loss": 2.509065866470337, |
|
"eval_runtime": 427.9199, |
|
"eval_samples_per_second": 22.149, |
|
"eval_steps_per_second": 2.769, |
|
"step": 56800 |
|
}, |
|
{ |
|
"epoch": 23.95, |
|
"grad_norm": 6.75713586807251, |
|
"learning_rate": 1.0158303886925796e-05, |
|
"loss": 0.262, |
|
"step": 56900 |
|
}, |
|
{ |
|
"epoch": 23.95, |
|
"eval_cer": 0.33297974267725156, |
|
"eval_loss": 2.544567108154297, |
|
"eval_runtime": 400.6912, |
|
"eval_samples_per_second": 23.654, |
|
"eval_steps_per_second": 2.957, |
|
"step": 56900 |
|
}, |
|
{ |
|
"epoch": 23.99, |
|
"grad_norm": 1.9022639989852905, |
|
"learning_rate": 1.0087632508833924e-05, |
|
"loss": 0.2657, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 23.99, |
|
"eval_cer": 0.33296507762699934, |
|
"eval_loss": 2.5121145248413086, |
|
"eval_runtime": 423.803, |
|
"eval_samples_per_second": 22.364, |
|
"eval_steps_per_second": 2.796, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 24.04, |
|
"grad_norm": 1.854609727859497, |
|
"learning_rate": 1.001696113074205e-05, |
|
"loss": 0.2253, |
|
"step": 57100 |
|
}, |
|
{ |
|
"epoch": 24.04, |
|
"eval_cer": 0.3338425364670916, |
|
"eval_loss": 2.654775381088257, |
|
"eval_runtime": 395.2094, |
|
"eval_samples_per_second": 23.982, |
|
"eval_steps_per_second": 2.998, |
|
"step": 57100 |
|
}, |
|
{ |
|
"epoch": 24.08, |
|
"grad_norm": 5.454607009887695, |
|
"learning_rate": 9.946289752650177e-06, |
|
"loss": 0.2339, |
|
"step": 57200 |
|
}, |
|
{ |
|
"epoch": 24.08, |
|
"eval_cer": 0.3325251261194322, |
|
"eval_loss": 2.6627321243286133, |
|
"eval_runtime": 422.5781, |
|
"eval_samples_per_second": 22.429, |
|
"eval_steps_per_second": 2.804, |
|
"step": 57200 |
|
}, |
|
{ |
|
"epoch": 24.12, |
|
"grad_norm": 4.305576801300049, |
|
"learning_rate": 9.875618374558305e-06, |
|
"loss": 0.2229, |
|
"step": 57300 |
|
}, |
|
{ |
|
"epoch": 24.12, |
|
"eval_cer": 0.33326815533221227, |
|
"eval_loss": 2.737283945083618, |
|
"eval_runtime": 400.1034, |
|
"eval_samples_per_second": 23.689, |
|
"eval_steps_per_second": 2.962, |
|
"step": 57300 |
|
} |
|
], |
|
"logging_steps": 100, |
|
"max_steps": 71250, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 30, |
|
"save_steps": 100, |
|
"total_flos": 6.279302169302917e+20, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|