whisper-medium-pt-cv16-fleurs / trainer_state.json
fsicoli's picture
Upload 17 files
742a8a7 verified
raw
history blame
32.2 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 4.6690790241624835,
"eval_steps": 1000,
"global_step": 5000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.02,
"grad_norm": 38.9212532043457,
"learning_rate": 4.6e-09,
"loss": 1.2718,
"step": 25
},
{
"epoch": 0.05,
"grad_norm": 38.4885368347168,
"learning_rate": 9.599999999999998e-09,
"loss": 1.2556,
"step": 50
},
{
"epoch": 0.07,
"grad_norm": 41.782493591308594,
"learning_rate": 1.46e-08,
"loss": 1.2688,
"step": 75
},
{
"epoch": 0.09,
"grad_norm": 37.6844367980957,
"learning_rate": 1.9599999999999997e-08,
"loss": 1.2761,
"step": 100
},
{
"epoch": 0.12,
"grad_norm": 33.974815368652344,
"learning_rate": 2.46e-08,
"loss": 1.2864,
"step": 125
},
{
"epoch": 0.14,
"grad_norm": 35.566436767578125,
"learning_rate": 2.96e-08,
"loss": 1.2951,
"step": 150
},
{
"epoch": 0.16,
"grad_norm": 36.60067367553711,
"learning_rate": 3.46e-08,
"loss": 1.3226,
"step": 175
},
{
"epoch": 0.19,
"grad_norm": 34.72969055175781,
"learning_rate": 3.9600000000000004e-08,
"loss": 1.2698,
"step": 200
},
{
"epoch": 0.21,
"grad_norm": 34.141136169433594,
"learning_rate": 4.4599999999999996e-08,
"loss": 1.2497,
"step": 225
},
{
"epoch": 0.23,
"grad_norm": 32.15596389770508,
"learning_rate": 4.9599999999999994e-08,
"loss": 1.2155,
"step": 250
},
{
"epoch": 0.26,
"grad_norm": 31.284732818603516,
"learning_rate": 5.46e-08,
"loss": 1.2187,
"step": 275
},
{
"epoch": 0.28,
"grad_norm": 29.053382873535156,
"learning_rate": 5.96e-08,
"loss": 1.1431,
"step": 300
},
{
"epoch": 0.3,
"grad_norm": 28.53986167907715,
"learning_rate": 6.46e-08,
"loss": 1.1342,
"step": 325
},
{
"epoch": 0.33,
"grad_norm": 26.937896728515625,
"learning_rate": 6.959999999999999e-08,
"loss": 1.0694,
"step": 350
},
{
"epoch": 0.35,
"grad_norm": 28.588802337646484,
"learning_rate": 7.459999999999999e-08,
"loss": 0.9657,
"step": 375
},
{
"epoch": 0.37,
"grad_norm": 28.448694229125977,
"learning_rate": 7.96e-08,
"loss": 0.9004,
"step": 400
},
{
"epoch": 0.4,
"grad_norm": 27.52383804321289,
"learning_rate": 8.459999999999999e-08,
"loss": 0.8309,
"step": 425
},
{
"epoch": 0.42,
"grad_norm": 30.11163902282715,
"learning_rate": 8.96e-08,
"loss": 0.6978,
"step": 450
},
{
"epoch": 0.44,
"grad_norm": 23.116390228271484,
"learning_rate": 9.46e-08,
"loss": 0.6042,
"step": 475
},
{
"epoch": 0.47,
"grad_norm": 22.033464431762695,
"learning_rate": 9.959999999999999e-08,
"loss": 0.5676,
"step": 500
},
{
"epoch": 0.49,
"grad_norm": 21.632957458496094,
"learning_rate": 1.046e-07,
"loss": 0.4607,
"step": 525
},
{
"epoch": 0.51,
"grad_norm": 17.29313087463379,
"learning_rate": 1.096e-07,
"loss": 0.4817,
"step": 550
},
{
"epoch": 0.54,
"grad_norm": 19.6722412109375,
"learning_rate": 1.146e-07,
"loss": 0.4447,
"step": 575
},
{
"epoch": 0.56,
"grad_norm": 16.24555206298828,
"learning_rate": 1.1959999999999999e-07,
"loss": 0.4028,
"step": 600
},
{
"epoch": 0.58,
"grad_norm": 15.58441162109375,
"learning_rate": 1.246e-07,
"loss": 0.3872,
"step": 625
},
{
"epoch": 0.61,
"grad_norm": 14.177542686462402,
"learning_rate": 1.296e-07,
"loss": 0.3831,
"step": 650
},
{
"epoch": 0.63,
"grad_norm": 14.774901390075684,
"learning_rate": 1.346e-07,
"loss": 0.3505,
"step": 675
},
{
"epoch": 0.65,
"grad_norm": 13.562920570373535,
"learning_rate": 1.396e-07,
"loss": 0.3468,
"step": 700
},
{
"epoch": 0.68,
"grad_norm": 12.933964729309082,
"learning_rate": 1.446e-07,
"loss": 0.3402,
"step": 725
},
{
"epoch": 0.7,
"grad_norm": 12.395262718200684,
"learning_rate": 1.4960000000000002e-07,
"loss": 0.3317,
"step": 750
},
{
"epoch": 0.72,
"grad_norm": 14.720085144042969,
"learning_rate": 1.5459999999999997e-07,
"loss": 0.2923,
"step": 775
},
{
"epoch": 0.75,
"grad_norm": 14.832243919372559,
"learning_rate": 1.5959999999999997e-07,
"loss": 0.3061,
"step": 800
},
{
"epoch": 0.77,
"grad_norm": 11.47397518157959,
"learning_rate": 1.6459999999999998e-07,
"loss": 0.2994,
"step": 825
},
{
"epoch": 0.79,
"grad_norm": 17.4206485748291,
"learning_rate": 1.6959999999999998e-07,
"loss": 0.2873,
"step": 850
},
{
"epoch": 0.82,
"grad_norm": 12.757941246032715,
"learning_rate": 1.746e-07,
"loss": 0.263,
"step": 875
},
{
"epoch": 0.84,
"grad_norm": 15.563201904296875,
"learning_rate": 1.796e-07,
"loss": 0.2659,
"step": 900
},
{
"epoch": 0.86,
"grad_norm": 12.566412925720215,
"learning_rate": 1.8459999999999997e-07,
"loss": 0.269,
"step": 925
},
{
"epoch": 0.89,
"grad_norm": 11.327591896057129,
"learning_rate": 1.8959999999999998e-07,
"loss": 0.2678,
"step": 950
},
{
"epoch": 0.91,
"grad_norm": 11.70463752746582,
"learning_rate": 1.9459999999999998e-07,
"loss": 0.2567,
"step": 975
},
{
"epoch": 0.93,
"grad_norm": 11.723058700561523,
"learning_rate": 1.996e-07,
"loss": 0.2552,
"step": 1000
},
{
"epoch": 0.93,
"eval_loss": 0.21999725699424744,
"eval_runtime": 7789.979,
"eval_samples_per_second": 1.208,
"eval_steps_per_second": 0.604,
"eval_wer": 0.12200871952204101,
"step": 1000
},
{
"epoch": 0.96,
"grad_norm": 15.0565824508667,
"learning_rate": 2.046e-07,
"loss": 0.2629,
"step": 1025
},
{
"epoch": 0.98,
"grad_norm": 12.149203300476074,
"learning_rate": 2.096e-07,
"loss": 0.2535,
"step": 1050
},
{
"epoch": 1.0,
"grad_norm": 4.3349385261535645,
"learning_rate": 2.146e-07,
"loss": 0.2429,
"step": 1075
},
{
"epoch": 1.03,
"grad_norm": 4.134186744689941,
"learning_rate": 2.1959999999999998e-07,
"loss": 0.2214,
"step": 1100
},
{
"epoch": 1.05,
"grad_norm": 3.6862359046936035,
"learning_rate": 2.2459999999999999e-07,
"loss": 0.2307,
"step": 1125
},
{
"epoch": 1.07,
"grad_norm": 4.983838081359863,
"learning_rate": 2.296e-07,
"loss": 0.2327,
"step": 1150
},
{
"epoch": 1.1,
"grad_norm": 5.041070461273193,
"learning_rate": 2.346e-07,
"loss": 0.224,
"step": 1175
},
{
"epoch": 1.12,
"grad_norm": 4.438192844390869,
"learning_rate": 2.396e-07,
"loss": 0.2376,
"step": 1200
},
{
"epoch": 1.14,
"grad_norm": 5.2538161277771,
"learning_rate": 2.446e-07,
"loss": 0.2234,
"step": 1225
},
{
"epoch": 1.17,
"grad_norm": 4.371279239654541,
"learning_rate": 2.4959999999999996e-07,
"loss": 0.2104,
"step": 1250
},
{
"epoch": 1.19,
"grad_norm": 3.8350584506988525,
"learning_rate": 2.5459999999999996e-07,
"loss": 0.2221,
"step": 1275
},
{
"epoch": 1.21,
"grad_norm": 4.0391845703125,
"learning_rate": 2.5959999999999997e-07,
"loss": 0.2147,
"step": 1300
},
{
"epoch": 1.24,
"grad_norm": 4.656424045562744,
"learning_rate": 2.646e-07,
"loss": 0.2259,
"step": 1325
},
{
"epoch": 1.26,
"grad_norm": 4.003029823303223,
"learning_rate": 2.696e-07,
"loss": 0.2073,
"step": 1350
},
{
"epoch": 1.28,
"grad_norm": 3.689852476119995,
"learning_rate": 2.746e-07,
"loss": 0.1964,
"step": 1375
},
{
"epoch": 1.31,
"grad_norm": 2.934403419494629,
"learning_rate": 2.796e-07,
"loss": 0.1997,
"step": 1400
},
{
"epoch": 1.33,
"grad_norm": 4.054893493652344,
"learning_rate": 2.846e-07,
"loss": 0.1945,
"step": 1425
},
{
"epoch": 1.35,
"grad_norm": 4.135422229766846,
"learning_rate": 2.896e-07,
"loss": 0.2014,
"step": 1450
},
{
"epoch": 1.38,
"grad_norm": 4.4028706550598145,
"learning_rate": 2.9459999999999995e-07,
"loss": 0.2013,
"step": 1475
},
{
"epoch": 1.4,
"grad_norm": 4.187674045562744,
"learning_rate": 2.9959999999999996e-07,
"loss": 0.2108,
"step": 1500
},
{
"epoch": 1.42,
"grad_norm": 5.227431297302246,
"learning_rate": 3.0459999999999996e-07,
"loss": 0.21,
"step": 1525
},
{
"epoch": 1.45,
"grad_norm": 3.673109769821167,
"learning_rate": 3.0959999999999997e-07,
"loss": 0.1994,
"step": 1550
},
{
"epoch": 1.47,
"grad_norm": 4.424607753753662,
"learning_rate": 3.1459999999999997e-07,
"loss": 0.2143,
"step": 1575
},
{
"epoch": 1.49,
"grad_norm": 4.143054485321045,
"learning_rate": 3.196e-07,
"loss": 0.1961,
"step": 1600
},
{
"epoch": 1.52,
"grad_norm": 5.010011672973633,
"learning_rate": 3.246e-07,
"loss": 0.1935,
"step": 1625
},
{
"epoch": 1.54,
"grad_norm": 4.047982215881348,
"learning_rate": 3.296e-07,
"loss": 0.1925,
"step": 1650
},
{
"epoch": 1.56,
"grad_norm": 3.367611885070801,
"learning_rate": 3.346e-07,
"loss": 0.2028,
"step": 1675
},
{
"epoch": 1.59,
"grad_norm": 3.4524459838867188,
"learning_rate": 3.396e-07,
"loss": 0.2027,
"step": 1700
},
{
"epoch": 1.61,
"grad_norm": 4.717390537261963,
"learning_rate": 3.446e-07,
"loss": 0.2,
"step": 1725
},
{
"epoch": 1.63,
"grad_norm": 4.299264430999756,
"learning_rate": 3.496e-07,
"loss": 0.2117,
"step": 1750
},
{
"epoch": 1.66,
"grad_norm": 3.89416241645813,
"learning_rate": 3.546e-07,
"loss": 0.2055,
"step": 1775
},
{
"epoch": 1.68,
"grad_norm": 4.462187767028809,
"learning_rate": 3.5959999999999996e-07,
"loss": 0.2061,
"step": 1800
},
{
"epoch": 1.7,
"grad_norm": 4.449103355407715,
"learning_rate": 3.6459999999999997e-07,
"loss": 0.1999,
"step": 1825
},
{
"epoch": 1.73,
"grad_norm": 2.920868158340454,
"learning_rate": 3.696e-07,
"loss": 0.2063,
"step": 1850
},
{
"epoch": 1.75,
"grad_norm": 3.8898050785064697,
"learning_rate": 3.746e-07,
"loss": 0.1862,
"step": 1875
},
{
"epoch": 1.77,
"grad_norm": 3.56378173828125,
"learning_rate": 3.796e-07,
"loss": 0.1974,
"step": 1900
},
{
"epoch": 1.8,
"grad_norm": 3.6201984882354736,
"learning_rate": 3.846e-07,
"loss": 0.2001,
"step": 1925
},
{
"epoch": 1.82,
"grad_norm": 4.066067218780518,
"learning_rate": 3.896e-07,
"loss": 0.1954,
"step": 1950
},
{
"epoch": 1.84,
"grad_norm": 3.4573721885681152,
"learning_rate": 3.946e-07,
"loss": 0.1885,
"step": 1975
},
{
"epoch": 1.87,
"grad_norm": 3.976249933242798,
"learning_rate": 3.996e-07,
"loss": 0.1928,
"step": 2000
},
{
"epoch": 1.87,
"eval_loss": 0.1644940823316574,
"eval_runtime": 8054.0522,
"eval_samples_per_second": 1.169,
"eval_steps_per_second": 0.584,
"eval_wer": 0.10624899079606007,
"step": 2000
},
{
"epoch": 1.89,
"grad_norm": 3.10294246673584,
"learning_rate": 4.0439999999999994e-07,
"loss": 0.1868,
"step": 2025
},
{
"epoch": 1.91,
"grad_norm": 3.2514965534210205,
"learning_rate": 4.0939999999999995e-07,
"loss": 0.194,
"step": 2050
},
{
"epoch": 1.94,
"grad_norm": 4.614846229553223,
"learning_rate": 4.1439999999999995e-07,
"loss": 0.1998,
"step": 2075
},
{
"epoch": 1.96,
"grad_norm": 3.231562852859497,
"learning_rate": 4.1939999999999996e-07,
"loss": 0.1841,
"step": 2100
},
{
"epoch": 1.98,
"grad_norm": 4.466641902923584,
"learning_rate": 4.2439999999999996e-07,
"loss": 0.175,
"step": 2125
},
{
"epoch": 2.01,
"grad_norm": 3.2693140506744385,
"learning_rate": 4.2939999999999997e-07,
"loss": 0.1824,
"step": 2150
},
{
"epoch": 2.03,
"grad_norm": 3.7038068771362305,
"learning_rate": 4.3439999999999997e-07,
"loss": 0.1779,
"step": 2175
},
{
"epoch": 2.05,
"grad_norm": 4.4231181144714355,
"learning_rate": 4.394e-07,
"loss": 0.1704,
"step": 2200
},
{
"epoch": 2.08,
"grad_norm": 3.4143128395080566,
"learning_rate": 4.444e-07,
"loss": 0.1559,
"step": 2225
},
{
"epoch": 2.1,
"grad_norm": 3.6965668201446533,
"learning_rate": 4.494e-07,
"loss": 0.1708,
"step": 2250
},
{
"epoch": 2.12,
"grad_norm": 4.041949272155762,
"learning_rate": 4.544e-07,
"loss": 0.1625,
"step": 2275
},
{
"epoch": 2.15,
"grad_norm": 3.2054636478424072,
"learning_rate": 4.5939999999999994e-07,
"loss": 0.1631,
"step": 2300
},
{
"epoch": 2.17,
"grad_norm": 4.389201641082764,
"learning_rate": 4.6439999999999995e-07,
"loss": 0.1833,
"step": 2325
},
{
"epoch": 2.19,
"grad_norm": 3.396207094192505,
"learning_rate": 4.6939999999999995e-07,
"loss": 0.1629,
"step": 2350
},
{
"epoch": 2.22,
"grad_norm": 3.7798011302948,
"learning_rate": 4.7439999999999996e-07,
"loss": 0.158,
"step": 2375
},
{
"epoch": 2.24,
"grad_norm": 3.4497921466827393,
"learning_rate": 4.794e-07,
"loss": 0.1659,
"step": 2400
},
{
"epoch": 2.26,
"grad_norm": 4.368722915649414,
"learning_rate": 4.844e-07,
"loss": 0.1624,
"step": 2425
},
{
"epoch": 2.29,
"grad_norm": 3.625420570373535,
"learning_rate": 4.894e-07,
"loss": 0.1729,
"step": 2450
},
{
"epoch": 2.31,
"grad_norm": 4.549854278564453,
"learning_rate": 4.944e-07,
"loss": 0.1647,
"step": 2475
},
{
"epoch": 2.33,
"grad_norm": 3.4779980182647705,
"learning_rate": 4.994e-07,
"loss": 0.1638,
"step": 2500
},
{
"epoch": 2.36,
"grad_norm": 6.931514739990234,
"learning_rate": 5.043999999999999e-07,
"loss": 0.1617,
"step": 2525
},
{
"epoch": 2.38,
"grad_norm": 5.141421318054199,
"learning_rate": 5.093999999999999e-07,
"loss": 0.1682,
"step": 2550
},
{
"epoch": 2.4,
"grad_norm": 3.208487033843994,
"learning_rate": 5.143999999999999e-07,
"loss": 0.1478,
"step": 2575
},
{
"epoch": 2.43,
"grad_norm": 4.365899562835693,
"learning_rate": 5.194e-07,
"loss": 0.1542,
"step": 2600
},
{
"epoch": 2.45,
"grad_norm": 3.7716119289398193,
"learning_rate": 5.243999999999999e-07,
"loss": 0.1668,
"step": 2625
},
{
"epoch": 2.47,
"grad_norm": 4.948827743530273,
"learning_rate": 5.294e-07,
"loss": 0.1583,
"step": 2650
},
{
"epoch": 2.5,
"grad_norm": 4.176829814910889,
"learning_rate": 5.343999999999999e-07,
"loss": 0.1607,
"step": 2675
},
{
"epoch": 2.52,
"grad_norm": 4.278001308441162,
"learning_rate": 5.394e-07,
"loss": 0.1558,
"step": 2700
},
{
"epoch": 2.54,
"grad_norm": 3.226017951965332,
"learning_rate": 5.443999999999999e-07,
"loss": 0.1495,
"step": 2725
},
{
"epoch": 2.57,
"grad_norm": 3.3415420055389404,
"learning_rate": 5.494e-07,
"loss": 0.1762,
"step": 2750
},
{
"epoch": 2.59,
"grad_norm": 3.7578542232513428,
"learning_rate": 5.543999999999999e-07,
"loss": 0.1684,
"step": 2775
},
{
"epoch": 2.61,
"grad_norm": 4.540279388427734,
"learning_rate": 5.594e-07,
"loss": 0.1629,
"step": 2800
},
{
"epoch": 2.64,
"grad_norm": 2.953626871109009,
"learning_rate": 5.643999999999999e-07,
"loss": 0.1539,
"step": 2825
},
{
"epoch": 2.66,
"grad_norm": 4.061501502990723,
"learning_rate": 5.694e-07,
"loss": 0.1428,
"step": 2850
},
{
"epoch": 2.68,
"grad_norm": 3.305874824523926,
"learning_rate": 5.744e-07,
"loss": 0.1713,
"step": 2875
},
{
"epoch": 2.71,
"grad_norm": 3.4820492267608643,
"learning_rate": 5.794e-07,
"loss": 0.1712,
"step": 2900
},
{
"epoch": 2.73,
"grad_norm": 4.517523765563965,
"learning_rate": 5.844e-07,
"loss": 0.1626,
"step": 2925
},
{
"epoch": 2.75,
"grad_norm": 5.121035099029541,
"learning_rate": 5.894e-07,
"loss": 0.1591,
"step": 2950
},
{
"epoch": 2.78,
"grad_norm": 3.2896835803985596,
"learning_rate": 5.944e-07,
"loss": 0.1652,
"step": 2975
},
{
"epoch": 2.8,
"grad_norm": 3.9433748722076416,
"learning_rate": 5.994e-07,
"loss": 0.1646,
"step": 3000
},
{
"epoch": 2.8,
"eval_loss": 0.15075725317001343,
"eval_runtime": 7921.0228,
"eval_samples_per_second": 1.188,
"eval_steps_per_second": 0.594,
"eval_wer": 0.10159857904085258,
"step": 3000
},
{
"epoch": 2.82,
"grad_norm": 6.672876834869385,
"learning_rate": 6.044e-07,
"loss": 0.1694,
"step": 3025
},
{
"epoch": 2.85,
"grad_norm": 3.1442010402679443,
"learning_rate": 6.094e-07,
"loss": 0.1655,
"step": 3050
},
{
"epoch": 2.87,
"grad_norm": 4.091799736022949,
"learning_rate": 6.143999999999999e-07,
"loss": 0.1568,
"step": 3075
},
{
"epoch": 2.89,
"grad_norm": 3.820805311203003,
"learning_rate": 6.194e-07,
"loss": 0.1635,
"step": 3100
},
{
"epoch": 2.92,
"grad_norm": 3.704472303390503,
"learning_rate": 6.242e-07,
"loss": 0.1587,
"step": 3125
},
{
"epoch": 2.94,
"grad_norm": 3.817859649658203,
"learning_rate": 6.291999999999999e-07,
"loss": 0.1443,
"step": 3150
},
{
"epoch": 2.96,
"grad_norm": 4.0161452293396,
"learning_rate": 6.342e-07,
"loss": 0.147,
"step": 3175
},
{
"epoch": 2.99,
"grad_norm": 5.137659072875977,
"learning_rate": 6.392e-07,
"loss": 0.1574,
"step": 3200
},
{
"epoch": 3.01,
"grad_norm": 3.7066938877105713,
"learning_rate": 6.442e-07,
"loss": 0.142,
"step": 3225
},
{
"epoch": 3.03,
"grad_norm": 4.4287919998168945,
"learning_rate": 6.492e-07,
"loss": 0.1367,
"step": 3250
},
{
"epoch": 3.06,
"grad_norm": 4.2995381355285645,
"learning_rate": 6.542e-07,
"loss": 0.1226,
"step": 3275
},
{
"epoch": 3.08,
"grad_norm": 3.6767024993896484,
"learning_rate": 6.592e-07,
"loss": 0.1533,
"step": 3300
},
{
"epoch": 3.1,
"grad_norm": 4.893792152404785,
"learning_rate": 6.642e-07,
"loss": 0.1304,
"step": 3325
},
{
"epoch": 3.13,
"grad_norm": 1.7213168144226074,
"learning_rate": 6.692e-07,
"loss": 0.143,
"step": 3350
},
{
"epoch": 3.15,
"grad_norm": 4.113219738006592,
"learning_rate": 6.742e-07,
"loss": 0.1311,
"step": 3375
},
{
"epoch": 3.17,
"grad_norm": 4.480813980102539,
"learning_rate": 6.792e-07,
"loss": 0.1429,
"step": 3400
},
{
"epoch": 3.2,
"grad_norm": 4.102989196777344,
"learning_rate": 6.842e-07,
"loss": 0.1361,
"step": 3425
},
{
"epoch": 3.22,
"grad_norm": 4.409080982208252,
"learning_rate": 6.892e-07,
"loss": 0.1271,
"step": 3450
},
{
"epoch": 3.25,
"grad_norm": 3.579719305038452,
"learning_rate": 6.942e-07,
"loss": 0.1303,
"step": 3475
},
{
"epoch": 3.27,
"grad_norm": 5.013184547424316,
"learning_rate": 6.992e-07,
"loss": 0.1359,
"step": 3500
},
{
"epoch": 3.29,
"grad_norm": 4.512373447418213,
"learning_rate": 7.042e-07,
"loss": 0.1359,
"step": 3525
},
{
"epoch": 3.32,
"grad_norm": 4.140831470489502,
"learning_rate": 7.092e-07,
"loss": 0.1239,
"step": 3550
},
{
"epoch": 3.34,
"grad_norm": 3.4033215045928955,
"learning_rate": 7.141999999999999e-07,
"loss": 0.133,
"step": 3575
},
{
"epoch": 3.36,
"grad_norm": 3.3019678592681885,
"learning_rate": 7.191999999999999e-07,
"loss": 0.1302,
"step": 3600
},
{
"epoch": 3.39,
"grad_norm": 3.88134765625,
"learning_rate": 7.241999999999999e-07,
"loss": 0.1437,
"step": 3625
},
{
"epoch": 3.41,
"grad_norm": 4.493922710418701,
"learning_rate": 7.291999999999999e-07,
"loss": 0.1346,
"step": 3650
},
{
"epoch": 3.43,
"grad_norm": 4.8288421630859375,
"learning_rate": 7.341999999999999e-07,
"loss": 0.1331,
"step": 3675
},
{
"epoch": 3.46,
"grad_norm": 4.28986120223999,
"learning_rate": 7.392e-07,
"loss": 0.1309,
"step": 3700
},
{
"epoch": 3.48,
"grad_norm": 3.411266326904297,
"learning_rate": 7.441999999999999e-07,
"loss": 0.1549,
"step": 3725
},
{
"epoch": 3.5,
"grad_norm": 3.317307949066162,
"learning_rate": 7.492e-07,
"loss": 0.1265,
"step": 3750
},
{
"epoch": 3.53,
"grad_norm": 4.695957183837891,
"learning_rate": 7.541999999999999e-07,
"loss": 0.1422,
"step": 3775
},
{
"epoch": 3.55,
"grad_norm": 3.6588873863220215,
"learning_rate": 7.592e-07,
"loss": 0.1437,
"step": 3800
},
{
"epoch": 3.57,
"grad_norm": 5.599457740783691,
"learning_rate": 7.641999999999999e-07,
"loss": 0.1436,
"step": 3825
},
{
"epoch": 3.6,
"grad_norm": 4.065281391143799,
"learning_rate": 7.692e-07,
"loss": 0.1281,
"step": 3850
},
{
"epoch": 3.62,
"grad_norm": 4.9737935066223145,
"learning_rate": 7.741999999999999e-07,
"loss": 0.1293,
"step": 3875
},
{
"epoch": 3.64,
"grad_norm": 3.3383023738861084,
"learning_rate": 7.792e-07,
"loss": 0.1329,
"step": 3900
},
{
"epoch": 3.67,
"grad_norm": 3.6315879821777344,
"learning_rate": 7.841999999999999e-07,
"loss": 0.1273,
"step": 3925
},
{
"epoch": 3.69,
"grad_norm": 4.887773036956787,
"learning_rate": 7.892e-07,
"loss": 0.136,
"step": 3950
},
{
"epoch": 3.71,
"grad_norm": 4.8274455070495605,
"learning_rate": 7.942e-07,
"loss": 0.1438,
"step": 3975
},
{
"epoch": 3.74,
"grad_norm": 4.641810417175293,
"learning_rate": 7.992e-07,
"loss": 0.1333,
"step": 4000
},
{
"epoch": 3.74,
"eval_loss": 0.14375422894954681,
"eval_runtime": 7811.8504,
"eval_samples_per_second": 1.205,
"eval_steps_per_second": 0.603,
"eval_wer": 0.09701275633780074,
"step": 4000
},
{
"epoch": 3.76,
"grad_norm": 5.432655334472656,
"learning_rate": 8.042e-07,
"loss": 0.1238,
"step": 4025
},
{
"epoch": 3.78,
"grad_norm": 4.146833419799805,
"learning_rate": 8.092e-07,
"loss": 0.1234,
"step": 4050
},
{
"epoch": 3.81,
"grad_norm": 4.496214389801025,
"learning_rate": 8.142e-07,
"loss": 0.1385,
"step": 4075
},
{
"epoch": 3.83,
"grad_norm": 4.550703525543213,
"learning_rate": 8.192e-07,
"loss": 0.1327,
"step": 4100
},
{
"epoch": 3.85,
"grad_norm": 4.395373821258545,
"learning_rate": 8.242e-07,
"loss": 0.1336,
"step": 4125
},
{
"epoch": 3.88,
"grad_norm": 3.292907238006592,
"learning_rate": 8.292e-07,
"loss": 0.1243,
"step": 4150
},
{
"epoch": 3.9,
"grad_norm": 3.963186502456665,
"learning_rate": 8.342e-07,
"loss": 0.1287,
"step": 4175
},
{
"epoch": 3.92,
"grad_norm": 3.482177972793579,
"learning_rate": 8.391999999999999e-07,
"loss": 0.1305,
"step": 4200
},
{
"epoch": 3.95,
"grad_norm": 3.331468105316162,
"learning_rate": 8.441999999999999e-07,
"loss": 0.1308,
"step": 4225
},
{
"epoch": 3.97,
"grad_norm": 5.990370750427246,
"learning_rate": 8.492e-07,
"loss": 0.1411,
"step": 4250
},
{
"epoch": 3.99,
"grad_norm": 3.512251615524292,
"learning_rate": 8.541999999999999e-07,
"loss": 0.132,
"step": 4275
},
{
"epoch": 4.02,
"grad_norm": 3.779296398162842,
"learning_rate": 8.592e-07,
"loss": 0.1277,
"step": 4300
},
{
"epoch": 4.04,
"grad_norm": 4.156597137451172,
"learning_rate": 8.641999999999999e-07,
"loss": 0.1082,
"step": 4325
},
{
"epoch": 4.06,
"grad_norm": 3.5820066928863525,
"learning_rate": 8.692e-07,
"loss": 0.117,
"step": 4350
},
{
"epoch": 4.09,
"grad_norm": 3.3737404346466064,
"learning_rate": 8.741999999999999e-07,
"loss": 0.1068,
"step": 4375
},
{
"epoch": 4.11,
"grad_norm": 4.2788166999816895,
"learning_rate": 8.792e-07,
"loss": 0.1129,
"step": 4400
},
{
"epoch": 4.13,
"grad_norm": 3.872753381729126,
"learning_rate": 8.841999999999999e-07,
"loss": 0.1124,
"step": 4425
},
{
"epoch": 4.16,
"grad_norm": 5.087253570556641,
"learning_rate": 8.892e-07,
"loss": 0.1149,
"step": 4450
},
{
"epoch": 4.18,
"grad_norm": 4.363770484924316,
"learning_rate": 8.941999999999999e-07,
"loss": 0.1105,
"step": 4475
},
{
"epoch": 4.2,
"grad_norm": 3.211000680923462,
"learning_rate": 8.992e-07,
"loss": 0.1162,
"step": 4500
},
{
"epoch": 4.23,
"grad_norm": 4.609809875488281,
"learning_rate": 9.042e-07,
"loss": 0.1165,
"step": 4525
},
{
"epoch": 4.25,
"grad_norm": 3.679553508758545,
"learning_rate": 9.092e-07,
"loss": 0.111,
"step": 4550
},
{
"epoch": 4.27,
"grad_norm": 3.6184656620025635,
"learning_rate": 9.142e-07,
"loss": 0.1243,
"step": 4575
},
{
"epoch": 4.3,
"grad_norm": 4.844442367553711,
"learning_rate": 9.192e-07,
"loss": 0.1112,
"step": 4600
},
{
"epoch": 4.32,
"grad_norm": 5.363363265991211,
"learning_rate": 9.242e-07,
"loss": 0.1193,
"step": 4625
},
{
"epoch": 4.34,
"grad_norm": 2.0149080753326416,
"learning_rate": 9.292e-07,
"loss": 0.1143,
"step": 4650
},
{
"epoch": 4.37,
"grad_norm": 5.45695161819458,
"learning_rate": 9.342e-07,
"loss": 0.1056,
"step": 4675
},
{
"epoch": 4.39,
"grad_norm": 5.325240612030029,
"learning_rate": 9.391999999999999e-07,
"loss": 0.114,
"step": 4700
},
{
"epoch": 4.41,
"grad_norm": 3.116534471511841,
"learning_rate": 9.442e-07,
"loss": 0.0995,
"step": 4725
},
{
"epoch": 4.44,
"grad_norm": 2.405822515487671,
"learning_rate": 9.492e-07,
"loss": 0.1165,
"step": 4750
},
{
"epoch": 4.46,
"grad_norm": 4.517841339111328,
"learning_rate": 9.542e-07,
"loss": 0.1052,
"step": 4775
},
{
"epoch": 4.48,
"grad_norm": 2.7329490184783936,
"learning_rate": 9.592e-07,
"loss": 0.102,
"step": 4800
},
{
"epoch": 4.51,
"grad_norm": 3.871314287185669,
"learning_rate": 9.641999999999998e-07,
"loss": 0.1081,
"step": 4825
},
{
"epoch": 4.53,
"grad_norm": 3.3892979621887207,
"learning_rate": 9.692e-07,
"loss": 0.1138,
"step": 4850
},
{
"epoch": 4.55,
"grad_norm": 4.42065954208374,
"learning_rate": 9.742e-07,
"loss": 0.106,
"step": 4875
},
{
"epoch": 4.58,
"grad_norm": 4.373811721801758,
"learning_rate": 9.791999999999999e-07,
"loss": 0.1043,
"step": 4900
},
{
"epoch": 4.6,
"grad_norm": 5.577483177185059,
"learning_rate": 9.841999999999998e-07,
"loss": 0.0962,
"step": 4925
},
{
"epoch": 4.62,
"grad_norm": 3.65592360496521,
"learning_rate": 9.892e-07,
"loss": 0.1161,
"step": 4950
},
{
"epoch": 4.65,
"grad_norm": 4.68386173248291,
"learning_rate": 9.942e-07,
"loss": 0.1071,
"step": 4975
},
{
"epoch": 4.67,
"grad_norm": 3.9556305408477783,
"learning_rate": 9.992e-07,
"loss": 0.1027,
"step": 5000
},
{
"epoch": 4.67,
"eval_loss": 0.14086556434631348,
"eval_runtime": 7847.8302,
"eval_samples_per_second": 1.2,
"eval_steps_per_second": 0.6,
"eval_wer": 0.09421927983206846,
"step": 5000
},
{
"epoch": 4.67,
"step": 5000,
"total_flos": 1.632967868976988e+20,
"train_loss": 0.2694411336898804,
"train_runtime": 106369.5753,
"train_samples_per_second": 1.504,
"train_steps_per_second": 0.047
}
],
"logging_steps": 25,
"max_steps": 5000,
"num_input_tokens_seen": 0,
"num_train_epochs": 5,
"save_steps": 1000,
"total_flos": 1.632967868976988e+20,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}