hyungjoochae's picture
Upload folder using huggingface_hub
b5bb8c7 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 4.9966329966329965,
"eval_steps": 500,
"global_step": 740,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.006734006734006734,
"grad_norm": 0.4674329161643982,
"learning_rate": 2.702702702702703e-07,
"loss": 0.8878,
"step": 1
},
{
"epoch": 0.013468013468013467,
"grad_norm": 0.4288882613182068,
"learning_rate": 5.405405405405406e-07,
"loss": 0.9554,
"step": 2
},
{
"epoch": 0.020202020202020204,
"grad_norm": 0.4333876669406891,
"learning_rate": 8.108108108108109e-07,
"loss": 0.9565,
"step": 3
},
{
"epoch": 0.026936026936026935,
"grad_norm": 0.4596662223339081,
"learning_rate": 1.0810810810810812e-06,
"loss": 0.9475,
"step": 4
},
{
"epoch": 0.03367003367003367,
"grad_norm": 0.476782888174057,
"learning_rate": 1.3513513513513515e-06,
"loss": 0.9307,
"step": 5
},
{
"epoch": 0.04040404040404041,
"grad_norm": 0.4502452313899994,
"learning_rate": 1.6216216216216219e-06,
"loss": 0.9489,
"step": 6
},
{
"epoch": 0.04713804713804714,
"grad_norm": 0.4531259536743164,
"learning_rate": 1.8918918918918922e-06,
"loss": 0.9744,
"step": 7
},
{
"epoch": 0.05387205387205387,
"grad_norm": 0.45402348041534424,
"learning_rate": 2.1621621621621623e-06,
"loss": 0.9425,
"step": 8
},
{
"epoch": 0.06060606060606061,
"grad_norm": 0.44291701912879944,
"learning_rate": 2.432432432432433e-06,
"loss": 0.9487,
"step": 9
},
{
"epoch": 0.06734006734006734,
"grad_norm": 0.4839108884334564,
"learning_rate": 2.702702702702703e-06,
"loss": 0.9382,
"step": 10
},
{
"epoch": 0.07407407407407407,
"grad_norm": 0.44631311297416687,
"learning_rate": 2.9729729729729736e-06,
"loss": 0.8985,
"step": 11
},
{
"epoch": 0.08080808080808081,
"grad_norm": 0.4725789427757263,
"learning_rate": 3.2432432432432437e-06,
"loss": 0.9924,
"step": 12
},
{
"epoch": 0.08754208754208755,
"grad_norm": 0.4641851782798767,
"learning_rate": 3.513513513513514e-06,
"loss": 0.9142,
"step": 13
},
{
"epoch": 0.09427609427609428,
"grad_norm": 0.46973735094070435,
"learning_rate": 3.7837837837837844e-06,
"loss": 0.9383,
"step": 14
},
{
"epoch": 0.10101010101010101,
"grad_norm": 0.4747369885444641,
"learning_rate": 4.0540540540540545e-06,
"loss": 0.9133,
"step": 15
},
{
"epoch": 0.10774410774410774,
"grad_norm": 0.4584101140499115,
"learning_rate": 4.324324324324325e-06,
"loss": 0.927,
"step": 16
},
{
"epoch": 0.11447811447811448,
"grad_norm": 0.4609006345272064,
"learning_rate": 4.594594594594596e-06,
"loss": 0.932,
"step": 17
},
{
"epoch": 0.12121212121212122,
"grad_norm": 0.4389339089393616,
"learning_rate": 4.864864864864866e-06,
"loss": 0.9318,
"step": 18
},
{
"epoch": 0.12794612794612795,
"grad_norm": 0.47249212861061096,
"learning_rate": 5.135135135135135e-06,
"loss": 0.9359,
"step": 19
},
{
"epoch": 0.13468013468013468,
"grad_norm": 0.46650782227516174,
"learning_rate": 5.405405405405406e-06,
"loss": 0.9073,
"step": 20
},
{
"epoch": 0.1414141414141414,
"grad_norm": 0.4591180980205536,
"learning_rate": 5.675675675675676e-06,
"loss": 0.9505,
"step": 21
},
{
"epoch": 0.14814814814814814,
"grad_norm": 0.49153509736061096,
"learning_rate": 5.945945945945947e-06,
"loss": 0.9104,
"step": 22
},
{
"epoch": 0.15488215488215487,
"grad_norm": 0.4966362416744232,
"learning_rate": 6.2162162162162164e-06,
"loss": 0.905,
"step": 23
},
{
"epoch": 0.16161616161616163,
"grad_norm": 0.4932939112186432,
"learning_rate": 6.486486486486487e-06,
"loss": 0.9592,
"step": 24
},
{
"epoch": 0.16835016835016836,
"grad_norm": 0.4675721824169159,
"learning_rate": 6.7567567567567575e-06,
"loss": 0.9215,
"step": 25
},
{
"epoch": 0.1750841750841751,
"grad_norm": 0.47080472111701965,
"learning_rate": 7.027027027027028e-06,
"loss": 0.9972,
"step": 26
},
{
"epoch": 0.18181818181818182,
"grad_norm": 0.4835442304611206,
"learning_rate": 7.297297297297298e-06,
"loss": 0.9141,
"step": 27
},
{
"epoch": 0.18855218855218855,
"grad_norm": 0.47605717182159424,
"learning_rate": 7.567567567567569e-06,
"loss": 0.9398,
"step": 28
},
{
"epoch": 0.19528619528619529,
"grad_norm": 0.4614209830760956,
"learning_rate": 7.837837837837838e-06,
"loss": 0.9129,
"step": 29
},
{
"epoch": 0.20202020202020202,
"grad_norm": 0.4712672531604767,
"learning_rate": 8.108108108108109e-06,
"loss": 0.9007,
"step": 30
},
{
"epoch": 0.20875420875420875,
"grad_norm": 0.4813186228275299,
"learning_rate": 8.378378378378378e-06,
"loss": 0.9331,
"step": 31
},
{
"epoch": 0.21548821548821548,
"grad_norm": 0.466824471950531,
"learning_rate": 8.64864864864865e-06,
"loss": 0.9479,
"step": 32
},
{
"epoch": 0.2222222222222222,
"grad_norm": 0.47713014483451843,
"learning_rate": 8.91891891891892e-06,
"loss": 0.9298,
"step": 33
},
{
"epoch": 0.22895622895622897,
"grad_norm": 0.4705861210823059,
"learning_rate": 9.189189189189191e-06,
"loss": 0.9446,
"step": 34
},
{
"epoch": 0.2356902356902357,
"grad_norm": 0.44828763604164124,
"learning_rate": 9.45945945945946e-06,
"loss": 0.9153,
"step": 35
},
{
"epoch": 0.24242424242424243,
"grad_norm": 0.4529784321784973,
"learning_rate": 9.729729729729732e-06,
"loss": 0.8959,
"step": 36
},
{
"epoch": 0.24915824915824916,
"grad_norm": 0.4544116258621216,
"learning_rate": 1e-05,
"loss": 0.9358,
"step": 37
},
{
"epoch": 0.2558922558922559,
"grad_norm": 0.47299325466156006,
"learning_rate": 9.999950073815046e-06,
"loss": 0.913,
"step": 38
},
{
"epoch": 0.26262626262626265,
"grad_norm": 0.46356692910194397,
"learning_rate": 9.999800296257234e-06,
"loss": 0.9063,
"step": 39
},
{
"epoch": 0.26936026936026936,
"grad_norm": 0.4557459056377411,
"learning_rate": 9.99955067031769e-06,
"loss": 0.9597,
"step": 40
},
{
"epoch": 0.2760942760942761,
"grad_norm": 0.4265868365764618,
"learning_rate": 9.999201200981566e-06,
"loss": 0.8871,
"step": 41
},
{
"epoch": 0.2828282828282828,
"grad_norm": 0.4292503297328949,
"learning_rate": 9.998751895227927e-06,
"loss": 0.8867,
"step": 42
},
{
"epoch": 0.2895622895622896,
"grad_norm": 0.3973385989665985,
"learning_rate": 9.998202762029626e-06,
"loss": 0.8663,
"step": 43
},
{
"epoch": 0.2962962962962963,
"grad_norm": 0.4129696190357208,
"learning_rate": 9.997553812353106e-06,
"loss": 0.8981,
"step": 44
},
{
"epoch": 0.30303030303030304,
"grad_norm": 0.4138651192188263,
"learning_rate": 9.996805059158208e-06,
"loss": 0.8688,
"step": 45
},
{
"epoch": 0.30976430976430974,
"grad_norm": 0.4028445780277252,
"learning_rate": 9.995956517397884e-06,
"loss": 0.9049,
"step": 46
},
{
"epoch": 0.3164983164983165,
"grad_norm": 0.3888985812664032,
"learning_rate": 9.995008204017914e-06,
"loss": 0.8332,
"step": 47
},
{
"epoch": 0.32323232323232326,
"grad_norm": 0.36326923966407776,
"learning_rate": 9.99396013795657e-06,
"loss": 0.8285,
"step": 48
},
{
"epoch": 0.32996632996632996,
"grad_norm": 0.343104749917984,
"learning_rate": 9.992812340144225e-06,
"loss": 0.8576,
"step": 49
},
{
"epoch": 0.3367003367003367,
"grad_norm": 0.3436315059661865,
"learning_rate": 9.991564833502944e-06,
"loss": 0.8509,
"step": 50
},
{
"epoch": 0.3434343434343434,
"grad_norm": 0.33012041449546814,
"learning_rate": 9.990217642946028e-06,
"loss": 0.8635,
"step": 51
},
{
"epoch": 0.3501683501683502,
"grad_norm": 0.32571494579315186,
"learning_rate": 9.988770795377512e-06,
"loss": 0.8585,
"step": 52
},
{
"epoch": 0.3569023569023569,
"grad_norm": 0.29031604528427124,
"learning_rate": 9.987224319691624e-06,
"loss": 0.8599,
"step": 53
},
{
"epoch": 0.36363636363636365,
"grad_norm": 0.2973954975605011,
"learning_rate": 9.98557824677222e-06,
"loss": 0.8528,
"step": 54
},
{
"epoch": 0.37037037037037035,
"grad_norm": 0.2861897945404053,
"learning_rate": 9.983832609492154e-06,
"loss": 0.8689,
"step": 55
},
{
"epoch": 0.3771043771043771,
"grad_norm": 0.2961727976799011,
"learning_rate": 9.981987442712634e-06,
"loss": 0.837,
"step": 56
},
{
"epoch": 0.3838383838383838,
"grad_norm": 0.28357475996017456,
"learning_rate": 9.98004278328251e-06,
"loss": 0.8244,
"step": 57
},
{
"epoch": 0.39057239057239057,
"grad_norm": 0.2743470370769501,
"learning_rate": 9.977998670037554e-06,
"loss": 0.8737,
"step": 58
},
{
"epoch": 0.39730639730639733,
"grad_norm": 0.2749946117401123,
"learning_rate": 9.97585514379968e-06,
"loss": 0.8848,
"step": 59
},
{
"epoch": 0.40404040404040403,
"grad_norm": 0.2607375979423523,
"learning_rate": 9.973612247376118e-06,
"loss": 0.8432,
"step": 60
},
{
"epoch": 0.4107744107744108,
"grad_norm": 0.25835004448890686,
"learning_rate": 9.971270025558576e-06,
"loss": 0.8362,
"step": 61
},
{
"epoch": 0.4175084175084175,
"grad_norm": 0.24998688697814941,
"learning_rate": 9.968828525122331e-06,
"loss": 0.7833,
"step": 62
},
{
"epoch": 0.42424242424242425,
"grad_norm": 0.25704672932624817,
"learning_rate": 9.966287794825305e-06,
"loss": 0.8186,
"step": 63
},
{
"epoch": 0.43097643097643096,
"grad_norm": 0.2764415740966797,
"learning_rate": 9.963647885407088e-06,
"loss": 0.798,
"step": 64
},
{
"epoch": 0.4377104377104377,
"grad_norm": 0.2261701226234436,
"learning_rate": 9.960908849587922e-06,
"loss": 0.7783,
"step": 65
},
{
"epoch": 0.4444444444444444,
"grad_norm": 0.26680904626846313,
"learning_rate": 9.958070742067649e-06,
"loss": 0.836,
"step": 66
},
{
"epoch": 0.4511784511784512,
"grad_norm": 0.23155707120895386,
"learning_rate": 9.955133619524623e-06,
"loss": 0.7938,
"step": 67
},
{
"epoch": 0.45791245791245794,
"grad_norm": 0.2517130374908447,
"learning_rate": 9.952097540614571e-06,
"loss": 0.8131,
"step": 68
},
{
"epoch": 0.46464646464646464,
"grad_norm": 0.2457548826932907,
"learning_rate": 9.948962565969431e-06,
"loss": 0.8164,
"step": 69
},
{
"epoch": 0.4713804713804714,
"grad_norm": 0.22898763418197632,
"learning_rate": 9.945728758196129e-06,
"loss": 0.7757,
"step": 70
},
{
"epoch": 0.4781144781144781,
"grad_norm": 0.2146165817975998,
"learning_rate": 9.942396181875342e-06,
"loss": 0.8254,
"step": 71
},
{
"epoch": 0.48484848484848486,
"grad_norm": 0.232045978307724,
"learning_rate": 9.938964903560198e-06,
"loss": 0.8173,
"step": 72
},
{
"epoch": 0.49158249158249157,
"grad_norm": 0.22347578406333923,
"learning_rate": 9.935434991774951e-06,
"loss": 0.8228,
"step": 73
},
{
"epoch": 0.4983164983164983,
"grad_norm": 0.22920922935009003,
"learning_rate": 9.931806517013612e-06,
"loss": 0.8275,
"step": 74
},
{
"epoch": 0.5050505050505051,
"grad_norm": 0.24698615074157715,
"learning_rate": 9.928079551738542e-06,
"loss": 0.8209,
"step": 75
},
{
"epoch": 0.5117845117845118,
"grad_norm": 0.23692172765731812,
"learning_rate": 9.924254170379007e-06,
"loss": 0.8215,
"step": 76
},
{
"epoch": 0.5185185185185185,
"grad_norm": 0.24865996837615967,
"learning_rate": 9.92033044932968e-06,
"loss": 0.7984,
"step": 77
},
{
"epoch": 0.5252525252525253,
"grad_norm": 0.23505768179893494,
"learning_rate": 9.916308466949134e-06,
"loss": 0.8014,
"step": 78
},
{
"epoch": 0.531986531986532,
"grad_norm": 0.2524789869785309,
"learning_rate": 9.912188303558263e-06,
"loss": 0.8097,
"step": 79
},
{
"epoch": 0.5387205387205387,
"grad_norm": 0.25673165917396545,
"learning_rate": 9.907970041438683e-06,
"loss": 0.8022,
"step": 80
},
{
"epoch": 0.5454545454545454,
"grad_norm": 0.2574709355831146,
"learning_rate": 9.903653764831088e-06,
"loss": 0.7995,
"step": 81
},
{
"epoch": 0.5521885521885522,
"grad_norm": 0.2411055862903595,
"learning_rate": 9.899239559933566e-06,
"loss": 0.7819,
"step": 82
},
{
"epoch": 0.5589225589225589,
"grad_norm": 0.24539045989513397,
"learning_rate": 9.894727514899883e-06,
"loss": 0.7956,
"step": 83
},
{
"epoch": 0.5656565656565656,
"grad_norm": 0.23138682544231415,
"learning_rate": 9.890117719837716e-06,
"loss": 0.7713,
"step": 84
},
{
"epoch": 0.5723905723905723,
"grad_norm": 0.21052764356136322,
"learning_rate": 9.885410266806858e-06,
"loss": 0.8155,
"step": 85
},
{
"epoch": 0.5791245791245792,
"grad_norm": 0.2486884891986847,
"learning_rate": 9.880605249817377e-06,
"loss": 0.8076,
"step": 86
},
{
"epoch": 0.5858585858585859,
"grad_norm": 0.2390533983707428,
"learning_rate": 9.875702764827739e-06,
"loss": 0.7993,
"step": 87
},
{
"epoch": 0.5925925925925926,
"grad_norm": 0.2276839017868042,
"learning_rate": 9.870702909742893e-06,
"loss": 0.776,
"step": 88
},
{
"epoch": 0.5993265993265994,
"grad_norm": 0.25543203949928284,
"learning_rate": 9.865605784412316e-06,
"loss": 0.7935,
"step": 89
},
{
"epoch": 0.6060606060606061,
"grad_norm": 0.23849958181381226,
"learning_rate": 9.860411490628017e-06,
"loss": 0.8,
"step": 90
},
{
"epoch": 0.6127946127946128,
"grad_norm": 0.22977019846439362,
"learning_rate": 9.855120132122503e-06,
"loss": 0.7497,
"step": 91
},
{
"epoch": 0.6195286195286195,
"grad_norm": 0.21937815845012665,
"learning_rate": 9.849731814566713e-06,
"loss": 0.7862,
"step": 92
},
{
"epoch": 0.6262626262626263,
"grad_norm": 0.23629795014858246,
"learning_rate": 9.844246645567903e-06,
"loss": 0.7901,
"step": 93
},
{
"epoch": 0.632996632996633,
"grad_norm": 0.22892089188098907,
"learning_rate": 9.838664734667496e-06,
"loss": 0.7674,
"step": 94
},
{
"epoch": 0.6397306397306397,
"grad_norm": 0.21019472181797028,
"learning_rate": 9.832986193338898e-06,
"loss": 0.779,
"step": 95
},
{
"epoch": 0.6464646464646465,
"grad_norm": 0.2447926104068756,
"learning_rate": 9.827211134985273e-06,
"loss": 0.7801,
"step": 96
},
{
"epoch": 0.6531986531986532,
"grad_norm": 0.23594583570957184,
"learning_rate": 9.821339674937274e-06,
"loss": 0.8055,
"step": 97
},
{
"epoch": 0.6599326599326599,
"grad_norm": 0.22600403428077698,
"learning_rate": 9.815371930450737e-06,
"loss": 0.8169,
"step": 98
},
{
"epoch": 0.6666666666666666,
"grad_norm": 0.24343664944171906,
"learning_rate": 9.809308020704353e-06,
"loss": 0.7987,
"step": 99
},
{
"epoch": 0.6734006734006734,
"grad_norm": 0.22726725041866302,
"learning_rate": 9.80314806679727e-06,
"loss": 0.7347,
"step": 100
},
{
"epoch": 0.6801346801346801,
"grad_norm": 0.22291769087314606,
"learning_rate": 9.79689219174669e-06,
"loss": 0.7644,
"step": 101
},
{
"epoch": 0.6868686868686869,
"grad_norm": 0.24143223464488983,
"learning_rate": 9.790540520485402e-06,
"loss": 0.7759,
"step": 102
},
{
"epoch": 0.6936026936026936,
"grad_norm": 0.2362246811389923,
"learning_rate": 9.78409317985929e-06,
"loss": 0.8292,
"step": 103
},
{
"epoch": 0.7003367003367004,
"grad_norm": 0.2230088859796524,
"learning_rate": 9.777550298624805e-06,
"loss": 0.7516,
"step": 104
},
{
"epoch": 0.7070707070707071,
"grad_norm": 0.22827690839767456,
"learning_rate": 9.770912007446385e-06,
"loss": 0.7386,
"step": 105
},
{
"epoch": 0.7138047138047138,
"grad_norm": 0.2498016208410263,
"learning_rate": 9.76417843889385e-06,
"loss": 0.7624,
"step": 106
},
{
"epoch": 0.7205387205387206,
"grad_norm": 0.21709798276424408,
"learning_rate": 9.757349727439759e-06,
"loss": 0.764,
"step": 107
},
{
"epoch": 0.7272727272727273,
"grad_norm": 0.21755844354629517,
"learning_rate": 9.750426009456713e-06,
"loss": 0.7877,
"step": 108
},
{
"epoch": 0.734006734006734,
"grad_norm": 0.227566659450531,
"learning_rate": 9.743407423214643e-06,
"loss": 0.7611,
"step": 109
},
{
"epoch": 0.7407407407407407,
"grad_norm": 0.2640649676322937,
"learning_rate": 9.736294108878044e-06,
"loss": 0.7729,
"step": 110
},
{
"epoch": 0.7474747474747475,
"grad_norm": 0.2128470242023468,
"learning_rate": 9.729086208503174e-06,
"loss": 0.7576,
"step": 111
},
{
"epoch": 0.7542087542087542,
"grad_norm": 0.22666677832603455,
"learning_rate": 9.72178386603522e-06,
"loss": 0.7779,
"step": 112
},
{
"epoch": 0.7609427609427609,
"grad_norm": 0.21836517751216888,
"learning_rate": 9.714387227305422e-06,
"loss": 0.7299,
"step": 113
},
{
"epoch": 0.7676767676767676,
"grad_norm": 0.2525302469730377,
"learning_rate": 9.70689644002816e-06,
"loss": 0.7633,
"step": 114
},
{
"epoch": 0.7744107744107744,
"grad_norm": 0.24227039515972137,
"learning_rate": 9.69931165379801e-06,
"loss": 0.785,
"step": 115
},
{
"epoch": 0.7811447811447811,
"grad_norm": 0.22788166999816895,
"learning_rate": 9.691633020086745e-06,
"loss": 0.7537,
"step": 116
},
{
"epoch": 0.7878787878787878,
"grad_norm": 0.24258917570114136,
"learning_rate": 9.683860692240322e-06,
"loss": 0.7695,
"step": 117
},
{
"epoch": 0.7946127946127947,
"grad_norm": 0.23187682032585144,
"learning_rate": 9.67599482547581e-06,
"loss": 0.7411,
"step": 118
},
{
"epoch": 0.8013468013468014,
"grad_norm": 0.23363040387630463,
"learning_rate": 9.668035576878296e-06,
"loss": 0.749,
"step": 119
},
{
"epoch": 0.8080808080808081,
"grad_norm": 0.2519008219242096,
"learning_rate": 9.65998310539775e-06,
"loss": 0.7784,
"step": 120
},
{
"epoch": 0.8148148148148148,
"grad_norm": 0.22125515341758728,
"learning_rate": 9.651837571845842e-06,
"loss": 0.7455,
"step": 121
},
{
"epoch": 0.8215488215488216,
"grad_norm": 0.25824788212776184,
"learning_rate": 9.643599138892737e-06,
"loss": 0.7614,
"step": 122
},
{
"epoch": 0.8282828282828283,
"grad_norm": 0.23877231776714325,
"learning_rate": 9.635267971063848e-06,
"loss": 0.765,
"step": 123
},
{
"epoch": 0.835016835016835,
"grad_norm": 0.2506805956363678,
"learning_rate": 9.626844234736546e-06,
"loss": 0.7746,
"step": 124
},
{
"epoch": 0.8417508417508418,
"grad_norm": 0.2351042479276657,
"learning_rate": 9.618328098136838e-06,
"loss": 0.7471,
"step": 125
},
{
"epoch": 0.8484848484848485,
"grad_norm": 0.25443699955940247,
"learning_rate": 9.609719731336005e-06,
"loss": 0.77,
"step": 126
},
{
"epoch": 0.8552188552188552,
"grad_norm": 0.23053216934204102,
"learning_rate": 9.601019306247214e-06,
"loss": 0.7415,
"step": 127
},
{
"epoch": 0.8619528619528619,
"grad_norm": 0.24876555800437927,
"learning_rate": 9.59222699662208e-06,
"loss": 0.7723,
"step": 128
},
{
"epoch": 0.8686868686868687,
"grad_norm": 0.230879545211792,
"learning_rate": 9.58334297804719e-06,
"loss": 0.773,
"step": 129
},
{
"epoch": 0.8754208754208754,
"grad_norm": 0.267036110162735,
"learning_rate": 9.574367427940609e-06,
"loss": 0.7782,
"step": 130
},
{
"epoch": 0.8821548821548821,
"grad_norm": 0.2757895588874817,
"learning_rate": 9.565300525548327e-06,
"loss": 0.7624,
"step": 131
},
{
"epoch": 0.8888888888888888,
"grad_norm": 0.2372322976589203,
"learning_rate": 9.55614245194068e-06,
"loss": 0.7282,
"step": 132
},
{
"epoch": 0.8956228956228957,
"grad_norm": 0.23569901287555695,
"learning_rate": 9.546893390008737e-06,
"loss": 0.75,
"step": 133
},
{
"epoch": 0.9023569023569024,
"grad_norm": 0.24758505821228027,
"learning_rate": 9.537553524460656e-06,
"loss": 0.7947,
"step": 134
},
{
"epoch": 0.9090909090909091,
"grad_norm": 0.23305581510066986,
"learning_rate": 9.528123041817972e-06,
"loss": 0.7617,
"step": 135
},
{
"epoch": 0.9158249158249159,
"grad_norm": 0.23048968613147736,
"learning_rate": 9.518602130411894e-06,
"loss": 0.7707,
"step": 136
},
{
"epoch": 0.9225589225589226,
"grad_norm": 0.2431011199951172,
"learning_rate": 9.508990980379537e-06,
"loss": 0.742,
"step": 137
},
{
"epoch": 0.9292929292929293,
"grad_norm": 0.23194094002246857,
"learning_rate": 9.499289783660126e-06,
"loss": 0.7617,
"step": 138
},
{
"epoch": 0.936026936026936,
"grad_norm": 0.24811971187591553,
"learning_rate": 9.489498733991151e-06,
"loss": 0.7579,
"step": 139
},
{
"epoch": 0.9427609427609428,
"grad_norm": 0.24215534329414368,
"learning_rate": 9.47961802690452e-06,
"loss": 0.7752,
"step": 140
},
{
"epoch": 0.9494949494949495,
"grad_norm": 0.22330717742443085,
"learning_rate": 9.469647859722634e-06,
"loss": 0.738,
"step": 141
},
{
"epoch": 0.9562289562289562,
"grad_norm": 0.23054972290992737,
"learning_rate": 9.459588431554458e-06,
"loss": 0.7754,
"step": 142
},
{
"epoch": 0.9629629629629629,
"grad_norm": 0.2682708203792572,
"learning_rate": 9.449439943291541e-06,
"loss": 0.6985,
"step": 143
},
{
"epoch": 0.9696969696969697,
"grad_norm": 0.23973660171031952,
"learning_rate": 9.439202597604004e-06,
"loss": 0.7323,
"step": 144
},
{
"epoch": 0.9764309764309764,
"grad_norm": 0.23046766221523285,
"learning_rate": 9.42887659893649e-06,
"loss": 0.7203,
"step": 145
},
{
"epoch": 0.9831649831649831,
"grad_norm": 0.24555841088294983,
"learning_rate": 9.41846215350409e-06,
"loss": 0.7499,
"step": 146
},
{
"epoch": 0.98989898989899,
"grad_norm": 0.25212836265563965,
"learning_rate": 9.407959469288215e-06,
"loss": 0.745,
"step": 147
},
{
"epoch": 0.9966329966329966,
"grad_norm": 0.23800650238990784,
"learning_rate": 9.397368756032445e-06,
"loss": 0.7439,
"step": 148
},
{
"epoch": 1.0067340067340067,
"grad_norm": 0.28960976004600525,
"learning_rate": 9.386690225238346e-06,
"loss": 0.9352,
"step": 149
},
{
"epoch": 1.0134680134680134,
"grad_norm": 0.23404806852340698,
"learning_rate": 9.375924090161238e-06,
"loss": 0.7395,
"step": 150
},
{
"epoch": 1.02020202020202,
"grad_norm": 0.2719663381576538,
"learning_rate": 9.365070565805941e-06,
"loss": 0.7852,
"step": 151
},
{
"epoch": 1.026936026936027,
"grad_norm": 0.2808714807033539,
"learning_rate": 9.354129868922483e-06,
"loss": 0.7412,
"step": 152
},
{
"epoch": 1.0336700336700337,
"grad_norm": 0.23448531329631805,
"learning_rate": 9.343102218001763e-06,
"loss": 0.7073,
"step": 153
},
{
"epoch": 1.0404040404040404,
"grad_norm": 0.24488256871700287,
"learning_rate": 9.331987833271199e-06,
"loss": 0.7665,
"step": 154
},
{
"epoch": 1.0471380471380471,
"grad_norm": 0.27663859724998474,
"learning_rate": 9.32078693669032e-06,
"loss": 0.7074,
"step": 155
},
{
"epoch": 1.0538720538720538,
"grad_norm": 0.25091904401779175,
"learning_rate": 9.309499751946345e-06,
"loss": 0.7394,
"step": 156
},
{
"epoch": 1.0606060606060606,
"grad_norm": 0.25539129972457886,
"learning_rate": 9.298126504449697e-06,
"loss": 0.7411,
"step": 157
},
{
"epoch": 1.0673400673400673,
"grad_norm": 0.24508924782276154,
"learning_rate": 9.286667421329523e-06,
"loss": 0.7413,
"step": 158
},
{
"epoch": 1.074074074074074,
"grad_norm": 0.2480177879333496,
"learning_rate": 9.275122731429142e-06,
"loss": 0.773,
"step": 159
},
{
"epoch": 1.0808080808080809,
"grad_norm": 0.23678265511989594,
"learning_rate": 9.263492665301486e-06,
"loss": 0.7516,
"step": 160
},
{
"epoch": 1.0875420875420876,
"grad_norm": 0.24282552301883698,
"learning_rate": 9.251777455204485e-06,
"loss": 0.7634,
"step": 161
},
{
"epoch": 1.0942760942760943,
"grad_norm": 0.2432844042778015,
"learning_rate": 9.239977335096439e-06,
"loss": 0.7366,
"step": 162
},
{
"epoch": 1.101010101010101,
"grad_norm": 0.30914798378944397,
"learning_rate": 9.228092540631342e-06,
"loss": 0.7038,
"step": 163
},
{
"epoch": 1.1077441077441077,
"grad_norm": 0.24757800996303558,
"learning_rate": 9.216123309154169e-06,
"loss": 0.7019,
"step": 164
},
{
"epoch": 1.1144781144781144,
"grad_norm": 0.25173789262771606,
"learning_rate": 9.204069879696144e-06,
"loss": 0.7032,
"step": 165
},
{
"epoch": 1.121212121212121,
"grad_norm": 0.25156348943710327,
"learning_rate": 9.191932492969972e-06,
"loss": 0.7092,
"step": 166
},
{
"epoch": 1.127946127946128,
"grad_norm": 0.23508214950561523,
"learning_rate": 9.179711391365015e-06,
"loss": 0.6959,
"step": 167
},
{
"epoch": 1.1346801346801347,
"grad_norm": 0.24067312479019165,
"learning_rate": 9.167406818942468e-06,
"loss": 0.6855,
"step": 168
},
{
"epoch": 1.1414141414141414,
"grad_norm": 0.24786119163036346,
"learning_rate": 9.155019021430469e-06,
"loss": 0.7041,
"step": 169
},
{
"epoch": 1.1481481481481481,
"grad_norm": 0.24527166783809662,
"learning_rate": 9.142548246219212e-06,
"loss": 0.7275,
"step": 170
},
{
"epoch": 1.1548821548821548,
"grad_norm": 0.25038015842437744,
"learning_rate": 9.129994742355985e-06,
"loss": 0.7218,
"step": 171
},
{
"epoch": 1.1616161616161615,
"grad_norm": 0.2645305395126343,
"learning_rate": 9.117358760540211e-06,
"loss": 0.7219,
"step": 172
},
{
"epoch": 1.1683501683501682,
"grad_norm": 0.2419489324092865,
"learning_rate": 9.104640553118436e-06,
"loss": 0.7023,
"step": 173
},
{
"epoch": 1.1750841750841752,
"grad_norm": 0.2727682888507843,
"learning_rate": 9.09184037407929e-06,
"loss": 0.7432,
"step": 174
},
{
"epoch": 1.1818181818181819,
"grad_norm": 0.23827053606510162,
"learning_rate": 9.078958479048419e-06,
"loss": 0.7002,
"step": 175
},
{
"epoch": 1.1885521885521886,
"grad_norm": 0.2524107098579407,
"learning_rate": 9.065995125283367e-06,
"loss": 0.7059,
"step": 176
},
{
"epoch": 1.1952861952861953,
"grad_norm": 0.2655806243419647,
"learning_rate": 9.052950571668458e-06,
"loss": 0.7418,
"step": 177
},
{
"epoch": 1.202020202020202,
"grad_norm": 0.249600350856781,
"learning_rate": 9.039825078709606e-06,
"loss": 0.7192,
"step": 178
},
{
"epoch": 1.2087542087542087,
"grad_norm": 0.23730139434337616,
"learning_rate": 9.026618908529132e-06,
"loss": 0.6776,
"step": 179
},
{
"epoch": 1.2154882154882154,
"grad_norm": 0.24353739619255066,
"learning_rate": 9.013332324860508e-06,
"loss": 0.7146,
"step": 180
},
{
"epoch": 1.2222222222222223,
"grad_norm": 0.26473721861839294,
"learning_rate": 8.999965593043113e-06,
"loss": 0.7188,
"step": 181
},
{
"epoch": 1.228956228956229,
"grad_norm": 0.27650538086891174,
"learning_rate": 8.986518980016914e-06,
"loss": 0.699,
"step": 182
},
{
"epoch": 1.2356902356902357,
"grad_norm": 0.2610260248184204,
"learning_rate": 8.972992754317144e-06,
"loss": 0.7126,
"step": 183
},
{
"epoch": 1.2424242424242424,
"grad_norm": 0.2537449896335602,
"learning_rate": 8.95938718606895e-06,
"loss": 0.7251,
"step": 184
},
{
"epoch": 1.2491582491582491,
"grad_norm": 0.25600242614746094,
"learning_rate": 8.94570254698197e-06,
"loss": 0.71,
"step": 185
},
{
"epoch": 1.2558922558922558,
"grad_norm": 0.28031960129737854,
"learning_rate": 8.931939110344935e-06,
"loss": 0.7254,
"step": 186
},
{
"epoch": 1.2626262626262625,
"grad_norm": 0.2574886679649353,
"learning_rate": 8.9180971510202e-06,
"loss": 0.7297,
"step": 187
},
{
"epoch": 1.2693602693602695,
"grad_norm": 0.25932928919792175,
"learning_rate": 8.904176945438255e-06,
"loss": 0.7118,
"step": 188
},
{
"epoch": 1.2760942760942762,
"grad_norm": 0.2755904197692871,
"learning_rate": 8.890178771592198e-06,
"loss": 0.7173,
"step": 189
},
{
"epoch": 1.2828282828282829,
"grad_norm": 0.26966917514801025,
"learning_rate": 8.8761029090322e-06,
"loss": 0.725,
"step": 190
},
{
"epoch": 1.2895622895622896,
"grad_norm": 0.27923187613487244,
"learning_rate": 8.861949638859908e-06,
"loss": 0.7216,
"step": 191
},
{
"epoch": 1.2962962962962963,
"grad_norm": 0.2591821551322937,
"learning_rate": 8.847719243722835e-06,
"loss": 0.7067,
"step": 192
},
{
"epoch": 1.303030303030303,
"grad_norm": 0.284316748380661,
"learning_rate": 8.833412007808714e-06,
"loss": 0.7303,
"step": 193
},
{
"epoch": 1.3097643097643097,
"grad_norm": 0.25714966654777527,
"learning_rate": 8.819028216839831e-06,
"loss": 0.7035,
"step": 194
},
{
"epoch": 1.3164983164983166,
"grad_norm": 0.2684500813484192,
"learning_rate": 8.804568158067308e-06,
"loss": 0.7089,
"step": 195
},
{
"epoch": 1.3232323232323233,
"grad_norm": 0.25351622700691223,
"learning_rate": 8.790032120265373e-06,
"loss": 0.7044,
"step": 196
},
{
"epoch": 1.32996632996633,
"grad_norm": 0.25690147280693054,
"learning_rate": 8.775420393725592e-06,
"loss": 0.7208,
"step": 197
},
{
"epoch": 1.3367003367003367,
"grad_norm": 0.2771032154560089,
"learning_rate": 8.760733270251065e-06,
"loss": 0.716,
"step": 198
},
{
"epoch": 1.3434343434343434,
"grad_norm": 0.2745297849178314,
"learning_rate": 8.745971043150614e-06,
"loss": 0.7165,
"step": 199
},
{
"epoch": 1.3501683501683501,
"grad_norm": 0.275926411151886,
"learning_rate": 8.73113400723291e-06,
"loss": 0.7015,
"step": 200
},
{
"epoch": 1.3569023569023568,
"grad_norm": 0.2907668650150299,
"learning_rate": 8.716222458800591e-06,
"loss": 0.738,
"step": 201
},
{
"epoch": 1.3636363636363638,
"grad_norm": 0.2874827980995178,
"learning_rate": 8.70123669564435e-06,
"loss": 0.6867,
"step": 202
},
{
"epoch": 1.3703703703703702,
"grad_norm": 0.27132922410964966,
"learning_rate": 8.686177017036979e-06,
"loss": 0.7107,
"step": 203
},
{
"epoch": 1.3771043771043772,
"grad_norm": 0.30173900723457336,
"learning_rate": 8.671043723727396e-06,
"loss": 0.6803,
"step": 204
},
{
"epoch": 1.3838383838383839,
"grad_norm": 0.29518023133277893,
"learning_rate": 8.655837117934642e-06,
"loss": 0.7151,
"step": 205
},
{
"epoch": 1.3905723905723906,
"grad_norm": 0.2551437318325043,
"learning_rate": 8.640557503341843e-06,
"loss": 0.6806,
"step": 206
},
{
"epoch": 1.3973063973063973,
"grad_norm": 0.261708527803421,
"learning_rate": 8.625205185090147e-06,
"loss": 0.7056,
"step": 207
},
{
"epoch": 1.404040404040404,
"grad_norm": 0.2630426287651062,
"learning_rate": 8.609780469772623e-06,
"loss": 0.6659,
"step": 208
},
{
"epoch": 1.410774410774411,
"grad_norm": 0.3132041394710541,
"learning_rate": 8.594283665428147e-06,
"loss": 0.7101,
"step": 209
},
{
"epoch": 1.4175084175084174,
"grad_norm": 0.26227623224258423,
"learning_rate": 8.57871508153525e-06,
"loss": 0.7279,
"step": 210
},
{
"epoch": 1.4242424242424243,
"grad_norm": 0.3779846131801605,
"learning_rate": 8.563075029005924e-06,
"loss": 0.7379,
"step": 211
},
{
"epoch": 1.430976430976431,
"grad_norm": 0.2800910472869873,
"learning_rate": 8.547363820179442e-06,
"loss": 0.7168,
"step": 212
},
{
"epoch": 1.4377104377104377,
"grad_norm": 0.2794981002807617,
"learning_rate": 8.531581768816085e-06,
"loss": 0.6948,
"step": 213
},
{
"epoch": 1.4444444444444444,
"grad_norm": 0.3207724392414093,
"learning_rate": 8.515729190090895e-06,
"loss": 0.7106,
"step": 214
},
{
"epoch": 1.4511784511784511,
"grad_norm": 0.27082759141921997,
"learning_rate": 8.499806400587391e-06,
"loss": 0.7566,
"step": 215
},
{
"epoch": 1.457912457912458,
"grad_norm": 0.30250316858291626,
"learning_rate": 8.483813718291223e-06,
"loss": 0.679,
"step": 216
},
{
"epoch": 1.4646464646464645,
"grad_norm": 0.28343290090560913,
"learning_rate": 8.467751462583837e-06,
"loss": 0.726,
"step": 217
},
{
"epoch": 1.4713804713804715,
"grad_norm": 0.2696777582168579,
"learning_rate": 8.451619954236093e-06,
"loss": 0.7017,
"step": 218
},
{
"epoch": 1.4781144781144782,
"grad_norm": 0.2962685823440552,
"learning_rate": 8.435419515401856e-06,
"loss": 0.7455,
"step": 219
},
{
"epoch": 1.4848484848484849,
"grad_norm": 0.28010743856430054,
"learning_rate": 8.419150469611572e-06,
"loss": 0.6767,
"step": 220
},
{
"epoch": 1.4915824915824916,
"grad_norm": 0.2887760102748871,
"learning_rate": 8.402813141765796e-06,
"loss": 0.6452,
"step": 221
},
{
"epoch": 1.4983164983164983,
"grad_norm": 0.276254802942276,
"learning_rate": 8.386407858128707e-06,
"loss": 0.6759,
"step": 222
},
{
"epoch": 1.5050505050505052,
"grad_norm": 0.27473849058151245,
"learning_rate": 8.369934946321594e-06,
"loss": 0.7207,
"step": 223
},
{
"epoch": 1.5117845117845117,
"grad_norm": 0.26720133423805237,
"learning_rate": 8.353394735316317e-06,
"loss": 0.6672,
"step": 224
},
{
"epoch": 1.5185185185185186,
"grad_norm": 0.2742222547531128,
"learning_rate": 8.336787555428728e-06,
"loss": 0.7073,
"step": 225
},
{
"epoch": 1.5252525252525253,
"grad_norm": 0.27671125531196594,
"learning_rate": 8.320113738312081e-06,
"loss": 0.7018,
"step": 226
},
{
"epoch": 1.531986531986532,
"grad_norm": 0.266973614692688,
"learning_rate": 8.303373616950408e-06,
"loss": 0.6699,
"step": 227
},
{
"epoch": 1.5387205387205387,
"grad_norm": 0.26349687576293945,
"learning_rate": 8.286567525651865e-06,
"loss": 0.7135,
"step": 228
},
{
"epoch": 1.5454545454545454,
"grad_norm": 0.30225032567977905,
"learning_rate": 8.269695800042061e-06,
"loss": 0.7318,
"step": 229
},
{
"epoch": 1.5521885521885523,
"grad_norm": 0.28462594747543335,
"learning_rate": 8.252758777057355e-06,
"loss": 0.6692,
"step": 230
},
{
"epoch": 1.5589225589225588,
"grad_norm": 0.3298182189464569,
"learning_rate": 8.235756794938123e-06,
"loss": 0.7219,
"step": 231
},
{
"epoch": 1.5656565656565657,
"grad_norm": 0.26634180545806885,
"learning_rate": 8.218690193222007e-06,
"loss": 0.6879,
"step": 232
},
{
"epoch": 1.5723905723905722,
"grad_norm": 0.2943713366985321,
"learning_rate": 8.201559312737131e-06,
"loss": 0.6677,
"step": 233
},
{
"epoch": 1.5791245791245792,
"grad_norm": 0.2855622470378876,
"learning_rate": 8.1843644955953e-06,
"loss": 0.6951,
"step": 234
},
{
"epoch": 1.5858585858585859,
"grad_norm": 0.2742707133293152,
"learning_rate": 8.167106085185161e-06,
"loss": 0.6899,
"step": 235
},
{
"epoch": 1.5925925925925926,
"grad_norm": 0.2689763307571411,
"learning_rate": 8.149784426165351e-06,
"loss": 0.6886,
"step": 236
},
{
"epoch": 1.5993265993265995,
"grad_norm": 0.3123777210712433,
"learning_rate": 8.13239986445761e-06,
"loss": 0.7141,
"step": 237
},
{
"epoch": 1.606060606060606,
"grad_norm": 0.284868448972702,
"learning_rate": 8.114952747239876e-06,
"loss": 0.6793,
"step": 238
},
{
"epoch": 1.612794612794613,
"grad_norm": 0.28802135586738586,
"learning_rate": 8.09744342293935e-06,
"loss": 0.6831,
"step": 239
},
{
"epoch": 1.6195286195286194,
"grad_norm": 0.2741600573062897,
"learning_rate": 8.079872241225534e-06,
"loss": 0.6887,
"step": 240
},
{
"epoch": 1.6262626262626263,
"grad_norm": 0.3155152499675751,
"learning_rate": 8.06223955300326e-06,
"loss": 0.7127,
"step": 241
},
{
"epoch": 1.632996632996633,
"grad_norm": 0.29525065422058105,
"learning_rate": 8.044545710405666e-06,
"loss": 0.7051,
"step": 242
},
{
"epoch": 1.6397306397306397,
"grad_norm": 0.3008731007575989,
"learning_rate": 8.026791066787177e-06,
"loss": 0.7012,
"step": 243
},
{
"epoch": 1.6464646464646466,
"grad_norm": 0.3014560639858246,
"learning_rate": 8.00897597671644e-06,
"loss": 0.6887,
"step": 244
},
{
"epoch": 1.6531986531986531,
"grad_norm": 0.28930526971817017,
"learning_rate": 7.991100795969248e-06,
"loss": 0.7171,
"step": 245
},
{
"epoch": 1.65993265993266,
"grad_norm": 0.3083915114402771,
"learning_rate": 7.973165881521435e-06,
"loss": 0.7202,
"step": 246
},
{
"epoch": 1.6666666666666665,
"grad_norm": 0.3567061126232147,
"learning_rate": 7.955171591541739e-06,
"loss": 0.7043,
"step": 247
},
{
"epoch": 1.6734006734006734,
"grad_norm": 0.33908185362815857,
"learning_rate": 7.937118285384666e-06,
"loss": 0.6687,
"step": 248
},
{
"epoch": 1.6801346801346801,
"grad_norm": 0.28633400797843933,
"learning_rate": 7.91900632358329e-06,
"loss": 0.7112,
"step": 249
},
{
"epoch": 1.6868686868686869,
"grad_norm": 0.3234066069126129,
"learning_rate": 7.90083606784208e-06,
"loss": 0.7095,
"step": 250
},
{
"epoch": 1.6936026936026936,
"grad_norm": 0.2888222932815552,
"learning_rate": 7.882607881029652e-06,
"loss": 0.7193,
"step": 251
},
{
"epoch": 1.7003367003367003,
"grad_norm": 0.29027071595191956,
"learning_rate": 7.864322127171535e-06,
"loss": 0.7157,
"step": 252
},
{
"epoch": 1.7070707070707072,
"grad_norm": 0.29393255710601807,
"learning_rate": 7.8459791714429e-06,
"loss": 0.7104,
"step": 253
},
{
"epoch": 1.7138047138047137,
"grad_norm": 0.3067580759525299,
"learning_rate": 7.827579380161272e-06,
"loss": 0.6964,
"step": 254
},
{
"epoch": 1.7205387205387206,
"grad_norm": 0.2851170003414154,
"learning_rate": 7.809123120779201e-06,
"loss": 0.6804,
"step": 255
},
{
"epoch": 1.7272727272727273,
"grad_norm": 0.3147255778312683,
"learning_rate": 7.790610761876936e-06,
"loss": 0.6992,
"step": 256
},
{
"epoch": 1.734006734006734,
"grad_norm": 0.28082358837127686,
"learning_rate": 7.772042673155057e-06,
"loss": 0.664,
"step": 257
},
{
"epoch": 1.7407407407407407,
"grad_norm": 0.3215820789337158,
"learning_rate": 7.753419225427097e-06,
"loss": 0.6968,
"step": 258
},
{
"epoch": 1.7474747474747474,
"grad_norm": 0.30421900749206543,
"learning_rate": 7.734740790612137e-06,
"loss": 0.6951,
"step": 259
},
{
"epoch": 1.7542087542087543,
"grad_norm": 0.292523592710495,
"learning_rate": 7.716007741727368e-06,
"loss": 0.6859,
"step": 260
},
{
"epoch": 1.7609427609427608,
"grad_norm": 0.301112562417984,
"learning_rate": 7.69722045288066e-06,
"loss": 0.6942,
"step": 261
},
{
"epoch": 1.7676767676767677,
"grad_norm": 0.31593406200408936,
"learning_rate": 7.678379299263076e-06,
"loss": 0.689,
"step": 262
},
{
"epoch": 1.7744107744107744,
"grad_norm": 0.33452534675598145,
"learning_rate": 7.659484657141382e-06,
"loss": 0.752,
"step": 263
},
{
"epoch": 1.7811447811447811,
"grad_norm": 0.3017633259296417,
"learning_rate": 7.64053690385054e-06,
"loss": 0.7052,
"step": 264
},
{
"epoch": 1.7878787878787878,
"grad_norm": 0.28459274768829346,
"learning_rate": 7.621536417786159e-06,
"loss": 0.6738,
"step": 265
},
{
"epoch": 1.7946127946127945,
"grad_norm": 0.2791585624217987,
"learning_rate": 7.602483578396955e-06,
"loss": 0.6819,
"step": 266
},
{
"epoch": 1.8013468013468015,
"grad_norm": 0.3020349144935608,
"learning_rate": 7.583378766177163e-06,
"loss": 0.72,
"step": 267
},
{
"epoch": 1.808080808080808,
"grad_norm": 0.28048840165138245,
"learning_rate": 7.564222362658935e-06,
"loss": 0.6961,
"step": 268
},
{
"epoch": 1.8148148148148149,
"grad_norm": 0.29612207412719727,
"learning_rate": 7.54501475040473e-06,
"loss": 0.6902,
"step": 269
},
{
"epoch": 1.8215488215488216,
"grad_norm": 0.27847015857696533,
"learning_rate": 7.52575631299967e-06,
"loss": 0.706,
"step": 270
},
{
"epoch": 1.8282828282828283,
"grad_norm": 0.29833778738975525,
"learning_rate": 7.5064474350438755e-06,
"loss": 0.6795,
"step": 271
},
{
"epoch": 1.835016835016835,
"grad_norm": 0.30200687050819397,
"learning_rate": 7.487088502144793e-06,
"loss": 0.672,
"step": 272
},
{
"epoch": 1.8417508417508417,
"grad_norm": 0.2916306257247925,
"learning_rate": 7.467679900909489e-06,
"loss": 0.6495,
"step": 273
},
{
"epoch": 1.8484848484848486,
"grad_norm": 0.28734859824180603,
"learning_rate": 7.4482220189369295e-06,
"loss": 0.6752,
"step": 274
},
{
"epoch": 1.855218855218855,
"grad_norm": 0.31577038764953613,
"learning_rate": 7.428715244810238e-06,
"loss": 0.7235,
"step": 275
},
{
"epoch": 1.861952861952862,
"grad_norm": 0.31323179602622986,
"learning_rate": 7.4091599680889425e-06,
"loss": 0.6839,
"step": 276
},
{
"epoch": 1.8686868686868687,
"grad_norm": 0.28605639934539795,
"learning_rate": 7.389556579301186e-06,
"loss": 0.6638,
"step": 277
},
{
"epoch": 1.8754208754208754,
"grad_norm": 0.2822740077972412,
"learning_rate": 7.369905469935935e-06,
"loss": 0.6982,
"step": 278
},
{
"epoch": 1.8821548821548821,
"grad_norm": 0.290394127368927,
"learning_rate": 7.350207032435157e-06,
"loss": 0.7118,
"step": 279
},
{
"epoch": 1.8888888888888888,
"grad_norm": 0.2912713587284088,
"learning_rate": 7.330461660185987e-06,
"loss": 0.6776,
"step": 280
},
{
"epoch": 1.8956228956228958,
"grad_norm": 0.32215046882629395,
"learning_rate": 7.3106697475128655e-06,
"loss": 0.6909,
"step": 281
},
{
"epoch": 1.9023569023569022,
"grad_norm": 0.28878840804100037,
"learning_rate": 7.2908316896696725e-06,
"loss": 0.6768,
"step": 282
},
{
"epoch": 1.9090909090909092,
"grad_norm": 0.32832571864128113,
"learning_rate": 7.270947882831823e-06,
"loss": 0.7195,
"step": 283
},
{
"epoch": 1.9158249158249159,
"grad_norm": 0.32346925139427185,
"learning_rate": 7.251018724088367e-06,
"loss": 0.6962,
"step": 284
},
{
"epoch": 1.9225589225589226,
"grad_norm": 0.32384803891181946,
"learning_rate": 7.231044611434049e-06,
"loss": 0.6501,
"step": 285
},
{
"epoch": 1.9292929292929293,
"grad_norm": 0.3192203640937805,
"learning_rate": 7.211025943761367e-06,
"loss": 0.712,
"step": 286
},
{
"epoch": 1.936026936026936,
"grad_norm": 0.2892184853553772,
"learning_rate": 7.190963120852601e-06,
"loss": 0.6559,
"step": 287
},
{
"epoch": 1.942760942760943,
"grad_norm": 0.30964168906211853,
"learning_rate": 7.1708565433718354e-06,
"loss": 0.7041,
"step": 288
},
{
"epoch": 1.9494949494949494,
"grad_norm": 0.3227485418319702,
"learning_rate": 7.150706612856952e-06,
"loss": 0.6702,
"step": 289
},
{
"epoch": 1.9562289562289563,
"grad_norm": 0.30388596653938293,
"learning_rate": 7.130513731711616e-06,
"loss": 0.7115,
"step": 290
},
{
"epoch": 1.9629629629629628,
"grad_norm": 0.30058157444000244,
"learning_rate": 7.1102783031972326e-06,
"loss": 0.6723,
"step": 291
},
{
"epoch": 1.9696969696969697,
"grad_norm": 0.2957688570022583,
"learning_rate": 7.0900007314249e-06,
"loss": 0.6609,
"step": 292
},
{
"epoch": 1.9764309764309764,
"grad_norm": 0.32087063789367676,
"learning_rate": 7.06968142134734e-06,
"loss": 0.6666,
"step": 293
},
{
"epoch": 1.9831649831649831,
"grad_norm": 0.31820395588874817,
"learning_rate": 7.0493207787508034e-06,
"loss": 0.6853,
"step": 294
},
{
"epoch": 1.98989898989899,
"grad_norm": 0.32074615359306335,
"learning_rate": 7.028919210246975e-06,
"loss": 0.6757,
"step": 295
},
{
"epoch": 1.9966329966329965,
"grad_norm": 0.3086588680744171,
"learning_rate": 7.008477123264849e-06,
"loss": 0.6955,
"step": 296
},
{
"epoch": 2.006734006734007,
"grad_norm": 0.34101998805999756,
"learning_rate": 6.987994926042588e-06,
"loss": 0.8372,
"step": 297
},
{
"epoch": 2.0134680134680134,
"grad_norm": 0.30261072516441345,
"learning_rate": 6.967473027619381e-06,
"loss": 0.6775,
"step": 298
},
{
"epoch": 2.0202020202020203,
"grad_norm": 0.3041035532951355,
"learning_rate": 6.946911837827267e-06,
"loss": 0.6918,
"step": 299
},
{
"epoch": 2.026936026936027,
"grad_norm": 0.3256051242351532,
"learning_rate": 6.926311767282951e-06,
"loss": 0.7202,
"step": 300
},
{
"epoch": 2.0336700336700337,
"grad_norm": 0.31417331099510193,
"learning_rate": 6.905673227379606e-06,
"loss": 0.7169,
"step": 301
},
{
"epoch": 2.04040404040404,
"grad_norm": 0.30272215604782104,
"learning_rate": 6.884996630278654e-06,
"loss": 0.6821,
"step": 302
},
{
"epoch": 2.047138047138047,
"grad_norm": 0.3101252019405365,
"learning_rate": 6.864282388901544e-06,
"loss": 0.6856,
"step": 303
},
{
"epoch": 2.053872053872054,
"grad_norm": 0.2862723767757416,
"learning_rate": 6.84353091692149e-06,
"loss": 0.6475,
"step": 304
},
{
"epoch": 2.0606060606060606,
"grad_norm": 0.30249181389808655,
"learning_rate": 6.822742628755228e-06,
"loss": 0.6957,
"step": 305
},
{
"epoch": 2.0673400673400675,
"grad_norm": 0.3228001892566681,
"learning_rate": 6.801917939554721e-06,
"loss": 0.6636,
"step": 306
},
{
"epoch": 2.074074074074074,
"grad_norm": 0.32464271783828735,
"learning_rate": 6.781057265198885e-06,
"loss": 0.6734,
"step": 307
},
{
"epoch": 2.080808080808081,
"grad_norm": 0.30116304755210876,
"learning_rate": 6.760161022285274e-06,
"loss": 0.6762,
"step": 308
},
{
"epoch": 2.0875420875420874,
"grad_norm": 0.31851866841316223,
"learning_rate": 6.739229628121765e-06,
"loss": 0.679,
"step": 309
},
{
"epoch": 2.0942760942760943,
"grad_norm": 0.30224835872650146,
"learning_rate": 6.7182635007182186e-06,
"loss": 0.6745,
"step": 310
},
{
"epoch": 2.101010101010101,
"grad_norm": 0.3195047676563263,
"learning_rate": 6.6972630587781385e-06,
"loss": 0.7181,
"step": 311
},
{
"epoch": 2.1077441077441077,
"grad_norm": 0.3004336357116699,
"learning_rate": 6.676228721690301e-06,
"loss": 0.6547,
"step": 312
},
{
"epoch": 2.1144781144781146,
"grad_norm": 0.2977725565433502,
"learning_rate": 6.655160909520391e-06,
"loss": 0.6855,
"step": 313
},
{
"epoch": 2.121212121212121,
"grad_norm": 0.3015722334384918,
"learning_rate": 6.634060043002603e-06,
"loss": 0.688,
"step": 314
},
{
"epoch": 2.127946127946128,
"grad_norm": 0.3413928151130676,
"learning_rate": 6.6129265435312405e-06,
"loss": 0.7018,
"step": 315
},
{
"epoch": 2.1346801346801345,
"grad_norm": 0.30066797137260437,
"learning_rate": 6.591760833152306e-06,
"loss": 0.6873,
"step": 316
},
{
"epoch": 2.1414141414141414,
"grad_norm": 0.3292277157306671,
"learning_rate": 6.570563334555068e-06,
"loss": 0.6709,
"step": 317
},
{
"epoch": 2.148148148148148,
"grad_norm": 0.29609090089797974,
"learning_rate": 6.54933447106362e-06,
"loss": 0.6582,
"step": 318
},
{
"epoch": 2.154882154882155,
"grad_norm": 0.3019602596759796,
"learning_rate": 6.52807466662843e-06,
"loss": 0.7016,
"step": 319
},
{
"epoch": 2.1616161616161618,
"grad_norm": 0.31941643357276917,
"learning_rate": 6.506784345817867e-06,
"loss": 0.6852,
"step": 320
},
{
"epoch": 2.1683501683501682,
"grad_norm": 0.33759576082229614,
"learning_rate": 6.48546393380973e-06,
"loss": 0.7457,
"step": 321
},
{
"epoch": 2.175084175084175,
"grad_norm": 0.32998767495155334,
"learning_rate": 6.464113856382752e-06,
"loss": 0.6925,
"step": 322
},
{
"epoch": 2.1818181818181817,
"grad_norm": 0.3006787598133087,
"learning_rate": 6.4427345399081e-06,
"loss": 0.6636,
"step": 323
},
{
"epoch": 2.1885521885521886,
"grad_norm": 0.31251853704452515,
"learning_rate": 6.421326411340855e-06,
"loss": 0.663,
"step": 324
},
{
"epoch": 2.1952861952861955,
"grad_norm": 0.31268492341041565,
"learning_rate": 6.399889898211495e-06,
"loss": 0.6522,
"step": 325
},
{
"epoch": 2.202020202020202,
"grad_norm": 0.31962957978248596,
"learning_rate": 6.378425428617343e-06,
"loss": 0.6674,
"step": 326
},
{
"epoch": 2.208754208754209,
"grad_norm": 0.29788488149642944,
"learning_rate": 6.356933431214034e-06,
"loss": 0.6994,
"step": 327
},
{
"epoch": 2.2154882154882154,
"grad_norm": 0.3490550220012665,
"learning_rate": 6.3354143352069415e-06,
"loss": 0.6686,
"step": 328
},
{
"epoch": 2.2222222222222223,
"grad_norm": 0.31461504101753235,
"learning_rate": 6.313868570342614e-06,
"loss": 0.6635,
"step": 329
},
{
"epoch": 2.228956228956229,
"grad_norm": 0.3282307982444763,
"learning_rate": 6.292296566900187e-06,
"loss": 0.689,
"step": 330
},
{
"epoch": 2.2356902356902357,
"grad_norm": 0.3187669813632965,
"learning_rate": 6.270698755682792e-06,
"loss": 0.6758,
"step": 331
},
{
"epoch": 2.242424242424242,
"grad_norm": 0.3053145706653595,
"learning_rate": 6.249075568008961e-06,
"loss": 0.675,
"step": 332
},
{
"epoch": 2.249158249158249,
"grad_norm": 0.3063344359397888,
"learning_rate": 6.227427435703997e-06,
"loss": 0.6877,
"step": 333
},
{
"epoch": 2.255892255892256,
"grad_norm": 0.3036075532436371,
"learning_rate": 6.205754791091364e-06,
"loss": 0.6875,
"step": 334
},
{
"epoch": 2.2626262626262625,
"grad_norm": 0.34430813789367676,
"learning_rate": 6.1840580669840455e-06,
"loss": 0.7325,
"step": 335
},
{
"epoch": 2.2693602693602695,
"grad_norm": 0.3094182312488556,
"learning_rate": 6.162337696675909e-06,
"loss": 0.6818,
"step": 336
},
{
"epoch": 2.276094276094276,
"grad_norm": 0.3417794704437256,
"learning_rate": 6.140594113933043e-06,
"loss": 0.6773,
"step": 337
},
{
"epoch": 2.282828282828283,
"grad_norm": 0.33146169781684875,
"learning_rate": 6.1188277529851015e-06,
"loss": 0.6962,
"step": 338
},
{
"epoch": 2.28956228956229,
"grad_norm": 0.3180915415287018,
"learning_rate": 6.097039048516628e-06,
"loss": 0.661,
"step": 339
},
{
"epoch": 2.2962962962962963,
"grad_norm": 0.30588653683662415,
"learning_rate": 6.075228435658379e-06,
"loss": 0.6746,
"step": 340
},
{
"epoch": 2.303030303030303,
"grad_norm": 0.3140996992588043,
"learning_rate": 6.053396349978632e-06,
"loss": 0.662,
"step": 341
},
{
"epoch": 2.3097643097643097,
"grad_norm": 0.2963620722293854,
"learning_rate": 6.031543227474486e-06,
"loss": 0.62,
"step": 342
},
{
"epoch": 2.3164983164983166,
"grad_norm": 0.32323580980300903,
"learning_rate": 6.009669504563154e-06,
"loss": 0.6728,
"step": 343
},
{
"epoch": 2.323232323232323,
"grad_norm": 0.33873456716537476,
"learning_rate": 5.9877756180732505e-06,
"loss": 0.6658,
"step": 344
},
{
"epoch": 2.32996632996633,
"grad_norm": 0.3323650658130646,
"learning_rate": 5.965862005236067e-06,
"loss": 0.7106,
"step": 345
},
{
"epoch": 2.3367003367003365,
"grad_norm": 0.30574050545692444,
"learning_rate": 5.943929103676839e-06,
"loss": 0.6791,
"step": 346
},
{
"epoch": 2.3434343434343434,
"grad_norm": 0.3394505977630615,
"learning_rate": 5.921977351406004e-06,
"loss": 0.6929,
"step": 347
},
{
"epoch": 2.3501683501683504,
"grad_norm": 0.3277929425239563,
"learning_rate": 5.900007186810461e-06,
"loss": 0.6757,
"step": 348
},
{
"epoch": 2.356902356902357,
"grad_norm": 0.33306998014450073,
"learning_rate": 5.878019048644812e-06,
"loss": 0.6909,
"step": 349
},
{
"epoch": 2.3636363636363638,
"grad_norm": 0.34115421772003174,
"learning_rate": 5.856013376022594e-06,
"loss": 0.7159,
"step": 350
},
{
"epoch": 2.3703703703703702,
"grad_norm": 0.32741618156433105,
"learning_rate": 5.833990608407525e-06,
"loss": 0.6522,
"step": 351
},
{
"epoch": 2.377104377104377,
"grad_norm": 0.30264726281166077,
"learning_rate": 5.811951185604709e-06,
"loss": 0.6774,
"step": 352
},
{
"epoch": 2.3838383838383836,
"grad_norm": 0.31635618209838867,
"learning_rate": 5.789895547751867e-06,
"loss": 0.6415,
"step": 353
},
{
"epoch": 2.3905723905723906,
"grad_norm": 0.3575221598148346,
"learning_rate": 5.767824135310538e-06,
"loss": 0.6525,
"step": 354
},
{
"epoch": 2.3973063973063975,
"grad_norm": 0.3207171857357025,
"learning_rate": 5.745737389057294e-06,
"loss": 0.6584,
"step": 355
},
{
"epoch": 2.404040404040404,
"grad_norm": 0.3243761360645294,
"learning_rate": 5.723635750074924e-06,
"loss": 0.6651,
"step": 356
},
{
"epoch": 2.410774410774411,
"grad_norm": 0.318534255027771,
"learning_rate": 5.701519659743636e-06,
"loss": 0.6829,
"step": 357
},
{
"epoch": 2.4175084175084174,
"grad_norm": 0.33522891998291016,
"learning_rate": 5.679389559732234e-06,
"loss": 0.6908,
"step": 358
},
{
"epoch": 2.4242424242424243,
"grad_norm": 0.32205212116241455,
"learning_rate": 5.657245891989307e-06,
"loss": 0.6681,
"step": 359
},
{
"epoch": 2.430976430976431,
"grad_norm": 0.3299495279788971,
"learning_rate": 5.635089098734394e-06,
"loss": 0.6678,
"step": 360
},
{
"epoch": 2.4377104377104377,
"grad_norm": 0.329253226518631,
"learning_rate": 5.61291962244916e-06,
"loss": 0.668,
"step": 361
},
{
"epoch": 2.4444444444444446,
"grad_norm": 0.3404013216495514,
"learning_rate": 5.59073790586855e-06,
"loss": 0.6334,
"step": 362
},
{
"epoch": 2.451178451178451,
"grad_norm": 0.33572322130203247,
"learning_rate": 5.568544391971964e-06,
"loss": 0.6728,
"step": 363
},
{
"epoch": 2.457912457912458,
"grad_norm": 0.33126330375671387,
"learning_rate": 5.546339523974389e-06,
"loss": 0.6262,
"step": 364
},
{
"epoch": 2.4646464646464645,
"grad_norm": 0.3154200613498688,
"learning_rate": 5.5241237453175664e-06,
"loss": 0.6421,
"step": 365
},
{
"epoch": 2.4713804713804715,
"grad_norm": 0.3080921173095703,
"learning_rate": 5.501897499661123e-06,
"loss": 0.66,
"step": 366
},
{
"epoch": 2.478114478114478,
"grad_norm": 0.3175220191478729,
"learning_rate": 5.4796612308737225e-06,
"loss": 0.6566,
"step": 367
},
{
"epoch": 2.484848484848485,
"grad_norm": 0.3048228621482849,
"learning_rate": 5.4574153830241905e-06,
"loss": 0.6684,
"step": 368
},
{
"epoch": 2.4915824915824913,
"grad_norm": 0.3418152630329132,
"learning_rate": 5.435160400372653e-06,
"loss": 0.7178,
"step": 369
},
{
"epoch": 2.4983164983164983,
"grad_norm": 0.34362682700157166,
"learning_rate": 5.412896727361663e-06,
"loss": 0.6925,
"step": 370
},
{
"epoch": 2.505050505050505,
"grad_norm": 0.30840158462524414,
"learning_rate": 5.390624808607321e-06,
"loss": 0.637,
"step": 371
},
{
"epoch": 2.5117845117845117,
"grad_norm": 0.33842259645462036,
"learning_rate": 5.368345088890401e-06,
"loss": 0.7088,
"step": 372
},
{
"epoch": 2.5185185185185186,
"grad_norm": 0.34740138053894043,
"learning_rate": 5.34605801314747e-06,
"loss": 0.6908,
"step": 373
},
{
"epoch": 2.525252525252525,
"grad_norm": 0.35594460368156433,
"learning_rate": 5.323764026461988e-06,
"loss": 0.6766,
"step": 374
},
{
"epoch": 2.531986531986532,
"grad_norm": 0.3934316635131836,
"learning_rate": 5.301463574055441e-06,
"loss": 0.6603,
"step": 375
},
{
"epoch": 2.538720538720539,
"grad_norm": 0.3513649106025696,
"learning_rate": 5.279157101278433e-06,
"loss": 0.6872,
"step": 376
},
{
"epoch": 2.5454545454545454,
"grad_norm": 0.38326022028923035,
"learning_rate": 5.256845053601795e-06,
"loss": 0.6723,
"step": 377
},
{
"epoch": 2.5521885521885523,
"grad_norm": 0.35960447788238525,
"learning_rate": 5.234527876607698e-06,
"loss": 0.6664,
"step": 378
},
{
"epoch": 2.558922558922559,
"grad_norm": 0.3370625674724579,
"learning_rate": 5.212206015980742e-06,
"loss": 0.6935,
"step": 379
},
{
"epoch": 2.5656565656565657,
"grad_norm": 0.3355901837348938,
"learning_rate": 5.189879917499067e-06,
"loss": 0.6628,
"step": 380
},
{
"epoch": 2.5723905723905722,
"grad_norm": 0.33717080950737,
"learning_rate": 5.1675500270254385e-06,
"loss": 0.6683,
"step": 381
},
{
"epoch": 2.579124579124579,
"grad_norm": 0.3160232603549957,
"learning_rate": 5.145216790498355e-06,
"loss": 0.6727,
"step": 382
},
{
"epoch": 2.5858585858585856,
"grad_norm": 0.3072569668292999,
"learning_rate": 5.122880653923134e-06,
"loss": 0.6664,
"step": 383
},
{
"epoch": 2.5925925925925926,
"grad_norm": 0.3160644471645355,
"learning_rate": 5.100542063363013e-06,
"loss": 0.6614,
"step": 384
},
{
"epoch": 2.5993265993265995,
"grad_norm": 0.33290305733680725,
"learning_rate": 5.07820146493023e-06,
"loss": 0.6872,
"step": 385
},
{
"epoch": 2.606060606060606,
"grad_norm": 0.3331235647201538,
"learning_rate": 5.055859304777127e-06,
"loss": 0.6443,
"step": 386
},
{
"epoch": 2.612794612794613,
"grad_norm": 0.3328459560871124,
"learning_rate": 5.033516029087231e-06,
"loss": 0.6589,
"step": 387
},
{
"epoch": 2.6195286195286194,
"grad_norm": 0.3131551742553711,
"learning_rate": 5.011172084066349e-06,
"loss": 0.648,
"step": 388
},
{
"epoch": 2.6262626262626263,
"grad_norm": 0.318933367729187,
"learning_rate": 4.988827915933652e-06,
"loss": 0.648,
"step": 389
},
{
"epoch": 2.6329966329966332,
"grad_norm": 0.3695783019065857,
"learning_rate": 4.966483970912769e-06,
"loss": 0.6652,
"step": 390
},
{
"epoch": 2.6397306397306397,
"grad_norm": 0.34661394357681274,
"learning_rate": 4.944140695222874e-06,
"loss": 0.6863,
"step": 391
},
{
"epoch": 2.6464646464646466,
"grad_norm": 0.3176628351211548,
"learning_rate": 4.92179853506977e-06,
"loss": 0.6591,
"step": 392
},
{
"epoch": 2.653198653198653,
"grad_norm": 0.3425232470035553,
"learning_rate": 4.899457936636988e-06,
"loss": 0.6772,
"step": 393
},
{
"epoch": 2.65993265993266,
"grad_norm": 0.32734331488609314,
"learning_rate": 4.877119346076868e-06,
"loss": 0.6877,
"step": 394
},
{
"epoch": 2.6666666666666665,
"grad_norm": 0.35816332697868347,
"learning_rate": 4.854783209501646e-06,
"loss": 0.6625,
"step": 395
},
{
"epoch": 2.6734006734006734,
"grad_norm": 0.322437584400177,
"learning_rate": 4.832449972974564e-06,
"loss": 0.6639,
"step": 396
},
{
"epoch": 2.68013468013468,
"grad_norm": 0.3232383131980896,
"learning_rate": 4.810120082500934e-06,
"loss": 0.6987,
"step": 397
},
{
"epoch": 2.686868686868687,
"grad_norm": 0.33261874318122864,
"learning_rate": 4.78779398401926e-06,
"loss": 0.6815,
"step": 398
},
{
"epoch": 2.6936026936026938,
"grad_norm": 0.3170070946216583,
"learning_rate": 4.765472123392304e-06,
"loss": 0.6881,
"step": 399
},
{
"epoch": 2.7003367003367003,
"grad_norm": 0.32975471019744873,
"learning_rate": 4.743154946398207e-06,
"loss": 0.6482,
"step": 400
},
{
"epoch": 2.707070707070707,
"grad_norm": 0.3293749988079071,
"learning_rate": 4.720842898721569e-06,
"loss": 0.629,
"step": 401
},
{
"epoch": 2.7138047138047137,
"grad_norm": 0.3324597477912903,
"learning_rate": 4.698536425944561e-06,
"loss": 0.6978,
"step": 402
},
{
"epoch": 2.7205387205387206,
"grad_norm": 0.3278536796569824,
"learning_rate": 4.6762359735380135e-06,
"loss": 0.6663,
"step": 403
},
{
"epoch": 2.7272727272727275,
"grad_norm": 0.33780574798583984,
"learning_rate": 4.653941986852533e-06,
"loss": 0.6926,
"step": 404
},
{
"epoch": 2.734006734006734,
"grad_norm": 0.35599419474601746,
"learning_rate": 4.6316549111096e-06,
"loss": 0.6759,
"step": 405
},
{
"epoch": 2.7407407407407405,
"grad_norm": 0.32220327854156494,
"learning_rate": 4.60937519139268e-06,
"loss": 0.6677,
"step": 406
},
{
"epoch": 2.7474747474747474,
"grad_norm": 0.32263022661209106,
"learning_rate": 4.587103272638339e-06,
"loss": 0.6727,
"step": 407
},
{
"epoch": 2.7542087542087543,
"grad_norm": 0.32157519459724426,
"learning_rate": 4.564839599627347e-06,
"loss": 0.6603,
"step": 408
},
{
"epoch": 2.760942760942761,
"grad_norm": 0.32805973291397095,
"learning_rate": 4.542584616975811e-06,
"loss": 0.6964,
"step": 409
},
{
"epoch": 2.7676767676767677,
"grad_norm": 0.37896931171417236,
"learning_rate": 4.5203387691262774e-06,
"loss": 0.6656,
"step": 410
},
{
"epoch": 2.774410774410774,
"grad_norm": 0.35789135098457336,
"learning_rate": 4.498102500338879e-06,
"loss": 0.6379,
"step": 411
},
{
"epoch": 2.781144781144781,
"grad_norm": 0.3611665368080139,
"learning_rate": 4.475876254682436e-06,
"loss": 0.6765,
"step": 412
},
{
"epoch": 2.787878787878788,
"grad_norm": 0.3591862618923187,
"learning_rate": 4.453660476025612e-06,
"loss": 0.6686,
"step": 413
},
{
"epoch": 2.7946127946127945,
"grad_norm": 0.32585394382476807,
"learning_rate": 4.431455608028038e-06,
"loss": 0.6655,
"step": 414
},
{
"epoch": 2.8013468013468015,
"grad_norm": 0.32006388902664185,
"learning_rate": 4.40926209413145e-06,
"loss": 0.6695,
"step": 415
},
{
"epoch": 2.808080808080808,
"grad_norm": 0.3642265200614929,
"learning_rate": 4.387080377550843e-06,
"loss": 0.643,
"step": 416
},
{
"epoch": 2.814814814814815,
"grad_norm": 0.3640241026878357,
"learning_rate": 4.364910901265607e-06,
"loss": 0.6859,
"step": 417
},
{
"epoch": 2.821548821548822,
"grad_norm": 0.31810638308525085,
"learning_rate": 4.342754108010695e-06,
"loss": 0.6802,
"step": 418
},
{
"epoch": 2.8282828282828283,
"grad_norm": 0.3474580645561218,
"learning_rate": 4.320610440267766e-06,
"loss": 0.6944,
"step": 419
},
{
"epoch": 2.8350168350168348,
"grad_norm": 0.340483695268631,
"learning_rate": 4.298480340256365e-06,
"loss": 0.663,
"step": 420
},
{
"epoch": 2.8417508417508417,
"grad_norm": 0.320322185754776,
"learning_rate": 4.2763642499250765e-06,
"loss": 0.6115,
"step": 421
},
{
"epoch": 2.8484848484848486,
"grad_norm": 0.33514732122421265,
"learning_rate": 4.254262610942707e-06,
"loss": 0.6722,
"step": 422
},
{
"epoch": 2.855218855218855,
"grad_norm": 0.3475134074687958,
"learning_rate": 4.232175864689464e-06,
"loss": 0.6517,
"step": 423
},
{
"epoch": 2.861952861952862,
"grad_norm": 0.32075583934783936,
"learning_rate": 4.210104452248135e-06,
"loss": 0.6677,
"step": 424
},
{
"epoch": 2.8686868686868685,
"grad_norm": 0.350498229265213,
"learning_rate": 4.188048814395293e-06,
"loss": 0.6616,
"step": 425
},
{
"epoch": 2.8754208754208754,
"grad_norm": 0.3592052161693573,
"learning_rate": 4.166009391592476e-06,
"loss": 0.6494,
"step": 426
},
{
"epoch": 2.8821548821548824,
"grad_norm": 0.32770347595214844,
"learning_rate": 4.1439866239774065e-06,
"loss": 0.6604,
"step": 427
},
{
"epoch": 2.888888888888889,
"grad_norm": 0.34549760818481445,
"learning_rate": 4.12198095135519e-06,
"loss": 0.657,
"step": 428
},
{
"epoch": 2.8956228956228958,
"grad_norm": 0.3358047604560852,
"learning_rate": 4.09999281318954e-06,
"loss": 0.6635,
"step": 429
},
{
"epoch": 2.9023569023569022,
"grad_norm": 0.3430233299732208,
"learning_rate": 4.078022648593997e-06,
"loss": 0.6589,
"step": 430
},
{
"epoch": 2.909090909090909,
"grad_norm": 0.3354911208152771,
"learning_rate": 4.056070896323163e-06,
"loss": 0.6881,
"step": 431
},
{
"epoch": 2.915824915824916,
"grad_norm": 0.3450566828250885,
"learning_rate": 4.034137994763934e-06,
"loss": 0.6407,
"step": 432
},
{
"epoch": 2.9225589225589226,
"grad_norm": 0.32240331172943115,
"learning_rate": 4.01222438192675e-06,
"loss": 0.6892,
"step": 433
},
{
"epoch": 2.929292929292929,
"grad_norm": 0.3347194790840149,
"learning_rate": 3.990330495436848e-06,
"loss": 0.6664,
"step": 434
},
{
"epoch": 2.936026936026936,
"grad_norm": 0.354972779750824,
"learning_rate": 3.968456772525515e-06,
"loss": 0.6419,
"step": 435
},
{
"epoch": 2.942760942760943,
"grad_norm": 0.3557754456996918,
"learning_rate": 3.94660365002137e-06,
"loss": 0.6996,
"step": 436
},
{
"epoch": 2.9494949494949494,
"grad_norm": 0.35374686121940613,
"learning_rate": 3.924771564341621e-06,
"loss": 0.6786,
"step": 437
},
{
"epoch": 2.9562289562289563,
"grad_norm": 0.3219892680644989,
"learning_rate": 3.902960951483375e-06,
"loss": 0.6512,
"step": 438
},
{
"epoch": 2.962962962962963,
"grad_norm": 0.36575546860694885,
"learning_rate": 3.881172247014899e-06,
"loss": 0.6893,
"step": 439
},
{
"epoch": 2.9696969696969697,
"grad_norm": 0.3633718490600586,
"learning_rate": 3.859405886066959e-06,
"loss": 0.6841,
"step": 440
},
{
"epoch": 2.9764309764309766,
"grad_norm": 0.3494553565979004,
"learning_rate": 3.837662303324093e-06,
"loss": 0.6643,
"step": 441
},
{
"epoch": 2.983164983164983,
"grad_norm": 0.3369433283805847,
"learning_rate": 3.815941933015956e-06,
"loss": 0.648,
"step": 442
},
{
"epoch": 2.98989898989899,
"grad_norm": 0.33903154730796814,
"learning_rate": 3.794245208908639e-06,
"loss": 0.6514,
"step": 443
},
{
"epoch": 2.9966329966329965,
"grad_norm": 0.3544797897338867,
"learning_rate": 3.7725725642960047e-06,
"loss": 0.6624,
"step": 444
},
{
"epoch": 3.006734006734007,
"grad_norm": 0.3794746398925781,
"learning_rate": 3.750924431991041e-06,
"loss": 0.7897,
"step": 445
},
{
"epoch": 3.0134680134680134,
"grad_norm": 0.36432963609695435,
"learning_rate": 3.729301244317208e-06,
"loss": 0.6377,
"step": 446
},
{
"epoch": 3.0202020202020203,
"grad_norm": 0.3496221601963043,
"learning_rate": 3.7077034330998154e-06,
"loss": 0.6642,
"step": 447
},
{
"epoch": 3.026936026936027,
"grad_norm": 0.3397136628627777,
"learning_rate": 3.686131429657387e-06,
"loss": 0.6914,
"step": 448
},
{
"epoch": 3.0336700336700337,
"grad_norm": 0.3500446081161499,
"learning_rate": 3.6645856647930593e-06,
"loss": 0.6865,
"step": 449
},
{
"epoch": 3.04040404040404,
"grad_norm": 0.3360905349254608,
"learning_rate": 3.643066568785969e-06,
"loss": 0.695,
"step": 450
},
{
"epoch": 3.047138047138047,
"grad_norm": 0.31209760904312134,
"learning_rate": 3.6215745713826585e-06,
"loss": 0.6644,
"step": 451
},
{
"epoch": 3.053872053872054,
"grad_norm": 0.33437401056289673,
"learning_rate": 3.6001101017885086e-06,
"loss": 0.6565,
"step": 452
},
{
"epoch": 3.0606060606060606,
"grad_norm": 0.34896934032440186,
"learning_rate": 3.578673588659145e-06,
"loss": 0.6267,
"step": 453
},
{
"epoch": 3.0673400673400675,
"grad_norm": 0.3185829520225525,
"learning_rate": 3.557265460091902e-06,
"loss": 0.6679,
"step": 454
},
{
"epoch": 3.074074074074074,
"grad_norm": 0.34871649742126465,
"learning_rate": 3.5358861436172487e-06,
"loss": 0.6655,
"step": 455
},
{
"epoch": 3.080808080808081,
"grad_norm": 0.3568170964717865,
"learning_rate": 3.5145360661902717e-06,
"loss": 0.669,
"step": 456
},
{
"epoch": 3.0875420875420874,
"grad_norm": 0.33438074588775635,
"learning_rate": 3.493215654182134e-06,
"loss": 0.6867,
"step": 457
},
{
"epoch": 3.0942760942760943,
"grad_norm": 0.3569088876247406,
"learning_rate": 3.471925333371572e-06,
"loss": 0.6719,
"step": 458
},
{
"epoch": 3.101010101010101,
"grad_norm": 0.34062451124191284,
"learning_rate": 3.4506655289363815e-06,
"loss": 0.671,
"step": 459
},
{
"epoch": 3.1077441077441077,
"grad_norm": 0.3303685486316681,
"learning_rate": 3.429436665444934e-06,
"loss": 0.6606,
"step": 460
},
{
"epoch": 3.1144781144781146,
"grad_norm": 0.341522753238678,
"learning_rate": 3.408239166847696e-06,
"loss": 0.6535,
"step": 461
},
{
"epoch": 3.121212121212121,
"grad_norm": 0.3573176860809326,
"learning_rate": 3.387073456468761e-06,
"loss": 0.6317,
"step": 462
},
{
"epoch": 3.127946127946128,
"grad_norm": 0.3084196150302887,
"learning_rate": 3.365939956997399e-06,
"loss": 0.6747,
"step": 463
},
{
"epoch": 3.1346801346801345,
"grad_norm": 0.3253374993801117,
"learning_rate": 3.344839090479609e-06,
"loss": 0.6341,
"step": 464
},
{
"epoch": 3.1414141414141414,
"grad_norm": 0.33250677585601807,
"learning_rate": 3.3237712783097003e-06,
"loss": 0.6536,
"step": 465
},
{
"epoch": 3.148148148148148,
"grad_norm": 0.34407663345336914,
"learning_rate": 3.3027369412218623e-06,
"loss": 0.6628,
"step": 466
},
{
"epoch": 3.154882154882155,
"grad_norm": 0.3363301455974579,
"learning_rate": 3.2817364992817835e-06,
"loss": 0.6471,
"step": 467
},
{
"epoch": 3.1616161616161618,
"grad_norm": 0.3450077176094055,
"learning_rate": 3.260770371878236e-06,
"loss": 0.6546,
"step": 468
},
{
"epoch": 3.1683501683501682,
"grad_norm": 0.35277360677719116,
"learning_rate": 3.239838977714728e-06,
"loss": 0.66,
"step": 469
},
{
"epoch": 3.175084175084175,
"grad_norm": 0.3627408742904663,
"learning_rate": 3.2189427348011174e-06,
"loss": 0.6818,
"step": 470
},
{
"epoch": 3.1818181818181817,
"grad_norm": 0.36723682284355164,
"learning_rate": 3.198082060445281e-06,
"loss": 0.6656,
"step": 471
},
{
"epoch": 3.1885521885521886,
"grad_norm": 0.3304051160812378,
"learning_rate": 3.1772573712447753e-06,
"loss": 0.694,
"step": 472
},
{
"epoch": 3.1952861952861955,
"grad_norm": 0.34227272868156433,
"learning_rate": 3.1564690830785106e-06,
"loss": 0.6639,
"step": 473
},
{
"epoch": 3.202020202020202,
"grad_norm": 0.36177343130111694,
"learning_rate": 3.1357176110984578e-06,
"loss": 0.6562,
"step": 474
},
{
"epoch": 3.208754208754209,
"grad_norm": 0.37928614020347595,
"learning_rate": 3.115003369721346e-06,
"loss": 0.6941,
"step": 475
},
{
"epoch": 3.2154882154882154,
"grad_norm": 0.34737908840179443,
"learning_rate": 3.0943267726203965e-06,
"loss": 0.6531,
"step": 476
},
{
"epoch": 3.2222222222222223,
"grad_norm": 0.36090391874313354,
"learning_rate": 3.0736882327170502e-06,
"loss": 0.6559,
"step": 477
},
{
"epoch": 3.228956228956229,
"grad_norm": 0.3377262353897095,
"learning_rate": 3.053088162172734e-06,
"loss": 0.6387,
"step": 478
},
{
"epoch": 3.2356902356902357,
"grad_norm": 0.36763814091682434,
"learning_rate": 3.0325269723806212e-06,
"loss": 0.6775,
"step": 479
},
{
"epoch": 3.242424242424242,
"grad_norm": 0.33776357769966125,
"learning_rate": 3.012005073957413e-06,
"loss": 0.6881,
"step": 480
},
{
"epoch": 3.249158249158249,
"grad_norm": 0.3867388963699341,
"learning_rate": 2.991522876735154e-06,
"loss": 0.6742,
"step": 481
},
{
"epoch": 3.255892255892256,
"grad_norm": 0.3418063223361969,
"learning_rate": 2.9710807897530257e-06,
"loss": 0.6363,
"step": 482
},
{
"epoch": 3.2626262626262625,
"grad_norm": 0.34139132499694824,
"learning_rate": 2.9506792212491987e-06,
"loss": 0.6546,
"step": 483
},
{
"epoch": 3.2693602693602695,
"grad_norm": 0.36169758439064026,
"learning_rate": 2.9303185786526617e-06,
"loss": 0.6599,
"step": 484
},
{
"epoch": 3.276094276094276,
"grad_norm": 0.38258957862854004,
"learning_rate": 2.9099992685751015e-06,
"loss": 0.6774,
"step": 485
},
{
"epoch": 3.282828282828283,
"grad_norm": 0.34986090660095215,
"learning_rate": 2.889721696802768e-06,
"loss": 0.6995,
"step": 486
},
{
"epoch": 3.28956228956229,
"grad_norm": 0.33852121233940125,
"learning_rate": 2.8694862682883867e-06,
"loss": 0.6521,
"step": 487
},
{
"epoch": 3.2962962962962963,
"grad_norm": 0.34015560150146484,
"learning_rate": 2.84929338714305e-06,
"loss": 0.6349,
"step": 488
},
{
"epoch": 3.303030303030303,
"grad_norm": 0.34431853890419006,
"learning_rate": 2.8291434566281654e-06,
"loss": 0.7103,
"step": 489
},
{
"epoch": 3.3097643097643097,
"grad_norm": 0.3356325924396515,
"learning_rate": 2.809036879147401e-06,
"loss": 0.7099,
"step": 490
},
{
"epoch": 3.3164983164983166,
"grad_norm": 0.33619171380996704,
"learning_rate": 2.7889740562386357e-06,
"loss": 0.6572,
"step": 491
},
{
"epoch": 3.323232323232323,
"grad_norm": 0.3263258934020996,
"learning_rate": 2.768955388565953e-06,
"loss": 0.6563,
"step": 492
},
{
"epoch": 3.32996632996633,
"grad_norm": 0.3441072702407837,
"learning_rate": 2.748981275911633e-06,
"loss": 0.657,
"step": 493
},
{
"epoch": 3.3367003367003365,
"grad_norm": 0.3515641689300537,
"learning_rate": 2.7290521171681772e-06,
"loss": 0.6828,
"step": 494
},
{
"epoch": 3.3434343434343434,
"grad_norm": 0.3332815170288086,
"learning_rate": 2.709168310330329e-06,
"loss": 0.6681,
"step": 495
},
{
"epoch": 3.3501683501683504,
"grad_norm": 0.3621049225330353,
"learning_rate": 2.6893302524871357e-06,
"loss": 0.637,
"step": 496
},
{
"epoch": 3.356902356902357,
"grad_norm": 0.36041849851608276,
"learning_rate": 2.6695383398140155e-06,
"loss": 0.6604,
"step": 497
},
{
"epoch": 3.3636363636363638,
"grad_norm": 0.3576204776763916,
"learning_rate": 2.6497929675648435e-06,
"loss": 0.6803,
"step": 498
},
{
"epoch": 3.3703703703703702,
"grad_norm": 0.35570481419563293,
"learning_rate": 2.6300945300640678e-06,
"loss": 0.6428,
"step": 499
},
{
"epoch": 3.377104377104377,
"grad_norm": 0.3621430993080139,
"learning_rate": 2.610443420698815e-06,
"loss": 0.6608,
"step": 500
},
{
"epoch": 3.3838383838383836,
"grad_norm": 0.33905336260795593,
"learning_rate": 2.5908400319110588e-06,
"loss": 0.705,
"step": 501
},
{
"epoch": 3.3905723905723906,
"grad_norm": 0.340137779712677,
"learning_rate": 2.5712847551897613e-06,
"loss": 0.6851,
"step": 502
},
{
"epoch": 3.3973063973063975,
"grad_norm": 0.334957480430603,
"learning_rate": 2.5517779810630725e-06,
"loss": 0.6364,
"step": 503
},
{
"epoch": 3.404040404040404,
"grad_norm": 0.36302581429481506,
"learning_rate": 2.5323200990905106e-06,
"loss": 0.6379,
"step": 504
},
{
"epoch": 3.410774410774411,
"grad_norm": 0.3351585268974304,
"learning_rate": 2.512911497855207e-06,
"loss": 0.6672,
"step": 505
},
{
"epoch": 3.4175084175084174,
"grad_norm": 0.3567354381084442,
"learning_rate": 2.493552564956126e-06,
"loss": 0.6523,
"step": 506
},
{
"epoch": 3.4242424242424243,
"grad_norm": 0.32313767075538635,
"learning_rate": 2.4742436870003326e-06,
"loss": 0.6684,
"step": 507
},
{
"epoch": 3.430976430976431,
"grad_norm": 0.3529592454433441,
"learning_rate": 2.4549852495952727e-06,
"loss": 0.6598,
"step": 508
},
{
"epoch": 3.4377104377104377,
"grad_norm": 0.37786442041397095,
"learning_rate": 2.4357776373410656e-06,
"loss": 0.6633,
"step": 509
},
{
"epoch": 3.4444444444444446,
"grad_norm": 0.361615389585495,
"learning_rate": 2.4166212338228384e-06,
"loss": 0.6698,
"step": 510
},
{
"epoch": 3.451178451178451,
"grad_norm": 0.328656405210495,
"learning_rate": 2.3975164216030456e-06,
"loss": 0.6614,
"step": 511
},
{
"epoch": 3.457912457912458,
"grad_norm": 0.33849841356277466,
"learning_rate": 2.3784635822138424e-06,
"loss": 0.6817,
"step": 512
},
{
"epoch": 3.4646464646464645,
"grad_norm": 0.341377854347229,
"learning_rate": 2.3594630961494615e-06,
"loss": 0.678,
"step": 513
},
{
"epoch": 3.4713804713804715,
"grad_norm": 0.37352806329727173,
"learning_rate": 2.340515342858618e-06,
"loss": 0.6711,
"step": 514
},
{
"epoch": 3.478114478114478,
"grad_norm": 0.3643670678138733,
"learning_rate": 2.3216207007369247e-06,
"loss": 0.6759,
"step": 515
},
{
"epoch": 3.484848484848485,
"grad_norm": 0.3378360867500305,
"learning_rate": 2.3027795471193404e-06,
"loss": 0.644,
"step": 516
},
{
"epoch": 3.4915824915824913,
"grad_norm": 0.32422709465026855,
"learning_rate": 2.283992258272634e-06,
"loss": 0.6352,
"step": 517
},
{
"epoch": 3.4983164983164983,
"grad_norm": 0.37651920318603516,
"learning_rate": 2.265259209387867e-06,
"loss": 0.6611,
"step": 518
},
{
"epoch": 3.505050505050505,
"grad_norm": 0.37039998173713684,
"learning_rate": 2.2465807745729057e-06,
"loss": 0.6201,
"step": 519
},
{
"epoch": 3.5117845117845117,
"grad_norm": 0.3663279116153717,
"learning_rate": 2.2279573268449447e-06,
"loss": 0.6561,
"step": 520
},
{
"epoch": 3.5185185185185186,
"grad_norm": 0.4385327100753784,
"learning_rate": 2.209389238123066e-06,
"loss": 0.6281,
"step": 521
},
{
"epoch": 3.525252525252525,
"grad_norm": 0.35003790259361267,
"learning_rate": 2.1908768792208e-06,
"loss": 0.6357,
"step": 522
},
{
"epoch": 3.531986531986532,
"grad_norm": 0.35347285866737366,
"learning_rate": 2.172420619838729e-06,
"loss": 0.6702,
"step": 523
},
{
"epoch": 3.538720538720539,
"grad_norm": 0.37107527256011963,
"learning_rate": 2.1540208285570997e-06,
"loss": 0.6652,
"step": 524
},
{
"epoch": 3.5454545454545454,
"grad_norm": 0.3373602628707886,
"learning_rate": 2.135677872828467e-06,
"loss": 0.6556,
"step": 525
},
{
"epoch": 3.5521885521885523,
"grad_norm": 0.3483744263648987,
"learning_rate": 2.1173921189703523e-06,
"loss": 0.6596,
"step": 526
},
{
"epoch": 3.558922558922559,
"grad_norm": 0.3399178683757782,
"learning_rate": 2.0991639321579214e-06,
"loss": 0.6438,
"step": 527
},
{
"epoch": 3.5656565656565657,
"grad_norm": 0.3555428385734558,
"learning_rate": 2.0809936764167106e-06,
"loss": 0.6733,
"step": 528
},
{
"epoch": 3.5723905723905722,
"grad_norm": 0.327832967042923,
"learning_rate": 2.0628817146153353e-06,
"loss": 0.6709,
"step": 529
},
{
"epoch": 3.579124579124579,
"grad_norm": 0.36541980504989624,
"learning_rate": 2.0448284084582626e-06,
"loss": 0.6506,
"step": 530
},
{
"epoch": 3.5858585858585856,
"grad_norm": 0.3318157494068146,
"learning_rate": 2.0268341184785674e-06,
"loss": 0.6574,
"step": 531
},
{
"epoch": 3.5925925925925926,
"grad_norm": 0.3311092257499695,
"learning_rate": 2.0088992040307532e-06,
"loss": 0.6492,
"step": 532
},
{
"epoch": 3.5993265993265995,
"grad_norm": 0.3647598326206207,
"learning_rate": 1.991024023283562e-06,
"loss": 0.6976,
"step": 533
},
{
"epoch": 3.606060606060606,
"grad_norm": 0.38615575432777405,
"learning_rate": 1.9732089332128256e-06,
"loss": 0.6697,
"step": 534
},
{
"epoch": 3.612794612794613,
"grad_norm": 0.32451504468917847,
"learning_rate": 1.955454289594336e-06,
"loss": 0.6649,
"step": 535
},
{
"epoch": 3.6195286195286194,
"grad_norm": 0.3254964053630829,
"learning_rate": 1.937760446996741e-06,
"loss": 0.6555,
"step": 536
},
{
"epoch": 3.6262626262626263,
"grad_norm": 0.37884649634361267,
"learning_rate": 1.920127758774466e-06,
"loss": 0.6872,
"step": 537
},
{
"epoch": 3.6329966329966332,
"grad_norm": 0.33180147409439087,
"learning_rate": 1.902556577060652e-06,
"loss": 0.6525,
"step": 538
},
{
"epoch": 3.6397306397306397,
"grad_norm": 0.331493079662323,
"learning_rate": 1.8850472527601249e-06,
"loss": 0.6473,
"step": 539
},
{
"epoch": 3.6464646464646466,
"grad_norm": 0.37020570039749146,
"learning_rate": 1.8676001355423896e-06,
"loss": 0.6347,
"step": 540
},
{
"epoch": 3.653198653198653,
"grad_norm": 0.3622906506061554,
"learning_rate": 1.8502155738346488e-06,
"loss": 0.6535,
"step": 541
},
{
"epoch": 3.65993265993266,
"grad_norm": 0.35044801235198975,
"learning_rate": 1.8328939148148396e-06,
"loss": 0.6701,
"step": 542
},
{
"epoch": 3.6666666666666665,
"grad_norm": 0.3409467041492462,
"learning_rate": 1.8156355044047008e-06,
"loss": 0.6502,
"step": 543
},
{
"epoch": 3.6734006734006734,
"grad_norm": 0.4045044779777527,
"learning_rate": 1.7984406872628702e-06,
"loss": 0.6462,
"step": 544
},
{
"epoch": 3.68013468013468,
"grad_norm": 0.34429696202278137,
"learning_rate": 1.7813098067779949e-06,
"loss": 0.6579,
"step": 545
},
{
"epoch": 3.686868686868687,
"grad_norm": 0.3808661699295044,
"learning_rate": 1.764243205061879e-06,
"loss": 0.6805,
"step": 546
},
{
"epoch": 3.6936026936026938,
"grad_norm": 0.349867582321167,
"learning_rate": 1.7472412229426456e-06,
"loss": 0.6472,
"step": 547
},
{
"epoch": 3.7003367003367003,
"grad_norm": 0.3594163656234741,
"learning_rate": 1.7303041999579395e-06,
"loss": 0.6308,
"step": 548
},
{
"epoch": 3.707070707070707,
"grad_norm": 0.3346593976020813,
"learning_rate": 1.7134324743481367e-06,
"loss": 0.6296,
"step": 549
},
{
"epoch": 3.7138047138047137,
"grad_norm": 0.33241137862205505,
"learning_rate": 1.6966263830495939e-06,
"loss": 0.6743,
"step": 550
},
{
"epoch": 3.7205387205387206,
"grad_norm": 0.3571833074092865,
"learning_rate": 1.6798862616879185e-06,
"loss": 0.6987,
"step": 551
},
{
"epoch": 3.7272727272727275,
"grad_norm": 0.33375877141952515,
"learning_rate": 1.6632124445712717e-06,
"loss": 0.6712,
"step": 552
},
{
"epoch": 3.734006734006734,
"grad_norm": 0.33569273352622986,
"learning_rate": 1.6466052646836834e-06,
"loss": 0.6404,
"step": 553
},
{
"epoch": 3.7407407407407405,
"grad_norm": 0.37933966517448425,
"learning_rate": 1.630065053678407e-06,
"loss": 0.6713,
"step": 554
},
{
"epoch": 3.7474747474747474,
"grad_norm": 0.34397804737091064,
"learning_rate": 1.6135921418712959e-06,
"loss": 0.6684,
"step": 555
},
{
"epoch": 3.7542087542087543,
"grad_norm": 0.3571912348270416,
"learning_rate": 1.5971868582342047e-06,
"loss": 0.6493,
"step": 556
},
{
"epoch": 3.760942760942761,
"grad_norm": 0.3806966245174408,
"learning_rate": 1.5808495303884297e-06,
"loss": 0.6655,
"step": 557
},
{
"epoch": 3.7676767676767677,
"grad_norm": 0.3504544794559479,
"learning_rate": 1.5645804845981443e-06,
"loss": 0.6584,
"step": 558
},
{
"epoch": 3.774410774410774,
"grad_norm": 0.36383214592933655,
"learning_rate": 1.5483800457639092e-06,
"loss": 0.6584,
"step": 559
},
{
"epoch": 3.781144781144781,
"grad_norm": 0.34493446350097656,
"learning_rate": 1.5322485374161627e-06,
"loss": 0.6298,
"step": 560
},
{
"epoch": 3.787878787878788,
"grad_norm": 0.3173808753490448,
"learning_rate": 1.516186281708778e-06,
"loss": 0.6626,
"step": 561
},
{
"epoch": 3.7946127946127945,
"grad_norm": 0.35654887557029724,
"learning_rate": 1.5001935994126105e-06,
"loss": 0.6376,
"step": 562
},
{
"epoch": 3.8013468013468015,
"grad_norm": 0.35921257734298706,
"learning_rate": 1.4842708099091046e-06,
"loss": 0.6613,
"step": 563
},
{
"epoch": 3.808080808080808,
"grad_norm": 0.3472289741039276,
"learning_rate": 1.468418231183918e-06,
"loss": 0.6449,
"step": 564
},
{
"epoch": 3.814814814814815,
"grad_norm": 0.34949102997779846,
"learning_rate": 1.4526361798205597e-06,
"loss": 0.6715,
"step": 565
},
{
"epoch": 3.821548821548822,
"grad_norm": 0.3607044816017151,
"learning_rate": 1.4369249709940759e-06,
"loss": 0.6407,
"step": 566
},
{
"epoch": 3.8282828282828283,
"grad_norm": 0.3401264548301697,
"learning_rate": 1.4212849184647521e-06,
"loss": 0.6821,
"step": 567
},
{
"epoch": 3.8350168350168348,
"grad_norm": 0.367316335439682,
"learning_rate": 1.4057163345718532e-06,
"loss": 0.6716,
"step": 568
},
{
"epoch": 3.8417508417508417,
"grad_norm": 0.36435818672180176,
"learning_rate": 1.390219530227378e-06,
"loss": 0.6927,
"step": 569
},
{
"epoch": 3.8484848484848486,
"grad_norm": 0.35164639353752136,
"learning_rate": 1.374794814909854e-06,
"loss": 0.6816,
"step": 570
},
{
"epoch": 3.855218855218855,
"grad_norm": 0.3416944146156311,
"learning_rate": 1.3594424966581555e-06,
"loss": 0.6842,
"step": 571
},
{
"epoch": 3.861952861952862,
"grad_norm": 0.3335227370262146,
"learning_rate": 1.344162882065359e-06,
"loss": 0.6905,
"step": 572
},
{
"epoch": 3.8686868686868685,
"grad_norm": 0.33466219902038574,
"learning_rate": 1.328956276272606e-06,
"loss": 0.6199,
"step": 573
},
{
"epoch": 3.8754208754208754,
"grad_norm": 0.3441416323184967,
"learning_rate": 1.3138229829630222e-06,
"loss": 0.6286,
"step": 574
},
{
"epoch": 3.8821548821548824,
"grad_norm": 0.3628382980823517,
"learning_rate": 1.2987633043556507e-06,
"loss": 0.6922,
"step": 575
},
{
"epoch": 3.888888888888889,
"grad_norm": 0.34411177039146423,
"learning_rate": 1.2837775411994092e-06,
"loss": 0.6772,
"step": 576
},
{
"epoch": 3.8956228956228958,
"grad_norm": 0.36984753608703613,
"learning_rate": 1.2688659927670916e-06,
"loss": 0.6648,
"step": 577
},
{
"epoch": 3.9023569023569022,
"grad_norm": 0.33258670568466187,
"learning_rate": 1.2540289568493862e-06,
"loss": 0.6833,
"step": 578
},
{
"epoch": 3.909090909090909,
"grad_norm": 0.3239952027797699,
"learning_rate": 1.2392667297489358e-06,
"loss": 0.6492,
"step": 579
},
{
"epoch": 3.915824915824916,
"grad_norm": 0.33867695927619934,
"learning_rate": 1.2245796062744103e-06,
"loss": 0.6806,
"step": 580
},
{
"epoch": 3.9225589225589226,
"grad_norm": 0.3413620889186859,
"learning_rate": 1.2099678797346282e-06,
"loss": 0.681,
"step": 581
},
{
"epoch": 3.929292929292929,
"grad_norm": 0.39322641491889954,
"learning_rate": 1.1954318419326938e-06,
"loss": 0.6874,
"step": 582
},
{
"epoch": 3.936026936026936,
"grad_norm": 0.35259631276130676,
"learning_rate": 1.1809717831601697e-06,
"loss": 0.6163,
"step": 583
},
{
"epoch": 3.942760942760943,
"grad_norm": 0.3440069854259491,
"learning_rate": 1.1665879921912887e-06,
"loss": 0.7002,
"step": 584
},
{
"epoch": 3.9494949494949494,
"grad_norm": 0.3570648729801178,
"learning_rate": 1.1522807562771676e-06,
"loss": 0.6548,
"step": 585
},
{
"epoch": 3.9562289562289563,
"grad_norm": 0.36446353793144226,
"learning_rate": 1.1380503611400933e-06,
"loss": 0.6517,
"step": 586
},
{
"epoch": 3.962962962962963,
"grad_norm": 0.35223159193992615,
"learning_rate": 1.1238970909677993e-06,
"loss": 0.6607,
"step": 587
},
{
"epoch": 3.9696969696969697,
"grad_norm": 0.35263752937316895,
"learning_rate": 1.1098212284078037e-06,
"loss": 0.6274,
"step": 588
},
{
"epoch": 3.9764309764309766,
"grad_norm": 0.3411799371242523,
"learning_rate": 1.095823054561747e-06,
"loss": 0.6384,
"step": 589
},
{
"epoch": 3.983164983164983,
"grad_norm": 0.3355887830257416,
"learning_rate": 1.0819028489798006e-06,
"loss": 0.6544,
"step": 590
},
{
"epoch": 3.98989898989899,
"grad_norm": 0.3442433774471283,
"learning_rate": 1.068060889655066e-06,
"loss": 0.6583,
"step": 591
},
{
"epoch": 3.9966329966329965,
"grad_norm": 0.3523848056793213,
"learning_rate": 1.0542974530180327e-06,
"loss": 0.6097,
"step": 592
},
{
"epoch": 4.006734006734007,
"grad_norm": 0.3924785256385803,
"learning_rate": 1.0406128139310534e-06,
"loss": 0.846,
"step": 593
},
{
"epoch": 4.013468013468014,
"grad_norm": 0.3342352509498596,
"learning_rate": 1.027007245682855e-06,
"loss": 0.6714,
"step": 594
},
{
"epoch": 4.02020202020202,
"grad_norm": 0.339802622795105,
"learning_rate": 1.013481019983088e-06,
"loss": 0.6192,
"step": 595
},
{
"epoch": 4.026936026936027,
"grad_norm": 0.33818167448043823,
"learning_rate": 1.0000344069568885e-06,
"loss": 0.6688,
"step": 596
},
{
"epoch": 4.033670033670034,
"grad_norm": 0.34660354256629944,
"learning_rate": 9.866676751394927e-07,
"loss": 0.6628,
"step": 597
},
{
"epoch": 4.040404040404041,
"grad_norm": 0.35256409645080566,
"learning_rate": 9.733810914708692e-07,
"loss": 0.6967,
"step": 598
},
{
"epoch": 4.047138047138047,
"grad_norm": 0.35528430342674255,
"learning_rate": 9.601749212903937e-07,
"loss": 0.664,
"step": 599
},
{
"epoch": 4.053872053872054,
"grad_norm": 0.3445955216884613,
"learning_rate": 9.470494283315451e-07,
"loss": 0.624,
"step": 600
},
{
"epoch": 4.0606060606060606,
"grad_norm": 0.33463314175605774,
"learning_rate": 9.340048747166341e-07,
"loss": 0.6553,
"step": 601
},
{
"epoch": 4.0673400673400675,
"grad_norm": 0.3332568109035492,
"learning_rate": 9.210415209515833e-07,
"loss": 0.6406,
"step": 602
},
{
"epoch": 4.074074074074074,
"grad_norm": 0.3433932065963745,
"learning_rate": 9.08159625920711e-07,
"loss": 0.6914,
"step": 603
},
{
"epoch": 4.08080808080808,
"grad_norm": 0.3482319712638855,
"learning_rate": 8.953594468815663e-07,
"loss": 0.6554,
"step": 604
},
{
"epoch": 4.087542087542087,
"grad_norm": 0.3479653000831604,
"learning_rate": 8.826412394597906e-07,
"loss": 0.6565,
"step": 605
},
{
"epoch": 4.094276094276094,
"grad_norm": 0.321195513010025,
"learning_rate": 8.700052576440166e-07,
"loss": 0.6451,
"step": 606
},
{
"epoch": 4.101010101010101,
"grad_norm": 0.3691120743751526,
"learning_rate": 8.574517537807897e-07,
"loss": 0.6492,
"step": 607
},
{
"epoch": 4.107744107744108,
"grad_norm": 0.3686451017856598,
"learning_rate": 8.449809785695318e-07,
"loss": 0.688,
"step": 608
},
{
"epoch": 4.114478114478114,
"grad_norm": 0.3513583838939667,
"learning_rate": 8.325931810575344e-07,
"loss": 0.6408,
"step": 609
},
{
"epoch": 4.121212121212121,
"grad_norm": 0.3904244303703308,
"learning_rate": 8.202886086349848e-07,
"loss": 0.673,
"step": 610
},
{
"epoch": 4.127946127946128,
"grad_norm": 0.3348163664340973,
"learning_rate": 8.080675070300303e-07,
"loss": 0.6526,
"step": 611
},
{
"epoch": 4.134680134680135,
"grad_norm": 0.3590138256549835,
"learning_rate": 7.959301203038566e-07,
"loss": 0.6592,
"step": 612
},
{
"epoch": 4.141414141414141,
"grad_norm": 0.34956321120262146,
"learning_rate": 7.838766908458339e-07,
"loss": 0.65,
"step": 613
},
{
"epoch": 4.148148148148148,
"grad_norm": 0.3524308204650879,
"learning_rate": 7.719074593686593e-07,
"loss": 0.6401,
"step": 614
},
{
"epoch": 4.154882154882155,
"grad_norm": 0.36425477266311646,
"learning_rate": 7.600226649035619e-07,
"loss": 0.6332,
"step": 615
},
{
"epoch": 4.161616161616162,
"grad_norm": 0.35078221559524536,
"learning_rate": 7.482225447955155e-07,
"loss": 0.6335,
"step": 616
},
{
"epoch": 4.168350168350169,
"grad_norm": 0.35954156517982483,
"learning_rate": 7.365073346985158e-07,
"loss": 0.6601,
"step": 617
},
{
"epoch": 4.175084175084175,
"grad_norm": 0.3586035668849945,
"learning_rate": 7.248772685708589e-07,
"loss": 0.692,
"step": 618
},
{
"epoch": 4.181818181818182,
"grad_norm": 0.3455102741718292,
"learning_rate": 7.133325786704792e-07,
"loss": 0.6438,
"step": 619
},
{
"epoch": 4.188552188552189,
"grad_norm": 0.35879337787628174,
"learning_rate": 7.018734955503048e-07,
"loss": 0.6797,
"step": 620
},
{
"epoch": 4.1952861952861955,
"grad_norm": 0.3559771776199341,
"learning_rate": 6.905002480536565e-07,
"loss": 0.6538,
"step": 621
},
{
"epoch": 4.202020202020202,
"grad_norm": 0.34636038541793823,
"learning_rate": 6.7921306330968e-07,
"loss": 0.6727,
"step": 622
},
{
"epoch": 4.2087542087542085,
"grad_norm": 0.35275956988334656,
"learning_rate": 6.680121667288026e-07,
"loss": 0.6649,
"step": 623
},
{
"epoch": 4.215488215488215,
"grad_norm": 0.36299997568130493,
"learning_rate": 6.568977819982386e-07,
"loss": 0.6465,
"step": 624
},
{
"epoch": 4.222222222222222,
"grad_norm": 0.3893291652202606,
"learning_rate": 6.458701310775184e-07,
"loss": 0.6528,
"step": 625
},
{
"epoch": 4.228956228956229,
"grad_norm": 0.36376526951789856,
"learning_rate": 6.349294341940593e-07,
"loss": 0.6147,
"step": 626
},
{
"epoch": 4.235690235690235,
"grad_norm": 0.3742962181568146,
"learning_rate": 6.240759098387628e-07,
"loss": 0.6785,
"step": 627
},
{
"epoch": 4.242424242424242,
"grad_norm": 0.35815274715423584,
"learning_rate": 6.133097747616546e-07,
"loss": 0.6711,
"step": 628
},
{
"epoch": 4.249158249158249,
"grad_norm": 0.38192108273506165,
"learning_rate": 6.026312439675553e-07,
"loss": 0.6642,
"step": 629
},
{
"epoch": 4.255892255892256,
"grad_norm": 0.3495500683784485,
"learning_rate": 5.92040530711786e-07,
"loss": 0.6516,
"step": 630
},
{
"epoch": 4.262626262626263,
"grad_norm": 0.35708263516426086,
"learning_rate": 5.815378464959109e-07,
"loss": 0.6885,
"step": 631
},
{
"epoch": 4.269360269360269,
"grad_norm": 0.3522356450557709,
"learning_rate": 5.711234010635103e-07,
"loss": 0.6596,
"step": 632
},
{
"epoch": 4.276094276094276,
"grad_norm": 0.3516038954257965,
"learning_rate": 5.607974023959977e-07,
"loss": 0.6948,
"step": 633
},
{
"epoch": 4.282828282828283,
"grad_norm": 0.36104774475097656,
"learning_rate": 5.505600567084602e-07,
"loss": 0.6342,
"step": 634
},
{
"epoch": 4.28956228956229,
"grad_norm": 0.3525451123714447,
"learning_rate": 5.40411568445543e-07,
"loss": 0.6697,
"step": 635
},
{
"epoch": 4.296296296296296,
"grad_norm": 0.41006627678871155,
"learning_rate": 5.303521402773665e-07,
"loss": 0.6251,
"step": 636
},
{
"epoch": 4.303030303030303,
"grad_norm": 0.37305814027786255,
"learning_rate": 5.203819730954807e-07,
"loss": 0.6732,
"step": 637
},
{
"epoch": 4.30976430976431,
"grad_norm": 0.35154303908348083,
"learning_rate": 5.105012660088493e-07,
"loss": 0.6548,
"step": 638
},
{
"epoch": 4.316498316498317,
"grad_norm": 0.33511778712272644,
"learning_rate": 5.007102163398758e-07,
"loss": 0.6634,
"step": 639
},
{
"epoch": 4.3232323232323235,
"grad_norm": 0.32016995549201965,
"learning_rate": 4.910090196204626e-07,
"loss": 0.654,
"step": 640
},
{
"epoch": 4.32996632996633,
"grad_norm": 0.375517338514328,
"learning_rate": 4.81397869588106e-07,
"loss": 0.7066,
"step": 641
},
{
"epoch": 4.3367003367003365,
"grad_norm": 0.31980428099632263,
"learning_rate": 4.718769581820309e-07,
"loss": 0.6498,
"step": 642
},
{
"epoch": 4.343434343434343,
"grad_norm": 0.3597594201564789,
"learning_rate": 4.6244647553934594e-07,
"loss": 0.6381,
"step": 643
},
{
"epoch": 4.35016835016835,
"grad_norm": 0.35346564650535583,
"learning_rate": 4.531066099912623e-07,
"loss": 0.6654,
"step": 644
},
{
"epoch": 4.356902356902357,
"grad_norm": 0.3588011860847473,
"learning_rate": 4.43857548059321e-07,
"loss": 0.6815,
"step": 645
},
{
"epoch": 4.363636363636363,
"grad_norm": 0.36538439989089966,
"learning_rate": 4.346994744516747e-07,
"loss": 0.6669,
"step": 646
},
{
"epoch": 4.37037037037037,
"grad_norm": 0.33307260274887085,
"learning_rate": 4.2563257205939124e-07,
"loss": 0.6248,
"step": 647
},
{
"epoch": 4.377104377104377,
"grad_norm": 0.34800708293914795,
"learning_rate": 4.1665702195280986e-07,
"loss": 0.6518,
"step": 648
},
{
"epoch": 4.383838383838384,
"grad_norm": 0.3960654139518738,
"learning_rate": 4.077730033779215e-07,
"loss": 0.6594,
"step": 649
},
{
"epoch": 4.390572390572391,
"grad_norm": 0.36860403418540955,
"learning_rate": 3.989806937527868e-07,
"loss": 0.6818,
"step": 650
},
{
"epoch": 4.397306397306397,
"grad_norm": 0.37295079231262207,
"learning_rate": 3.902802686639967e-07,
"loss": 0.6899,
"step": 651
},
{
"epoch": 4.404040404040404,
"grad_norm": 0.361380934715271,
"learning_rate": 3.816719018631637e-07,
"loss": 0.6926,
"step": 652
},
{
"epoch": 4.410774410774411,
"grad_norm": 0.3532719314098358,
"learning_rate": 3.7315576526345433e-07,
"loss": 0.6565,
"step": 653
},
{
"epoch": 4.417508417508418,
"grad_norm": 0.3923482894897461,
"learning_rate": 3.647320289361517e-07,
"loss": 0.6789,
"step": 654
},
{
"epoch": 4.424242424242424,
"grad_norm": 0.37747353315353394,
"learning_rate": 3.5640086110726337e-07,
"loss": 0.6913,
"step": 655
},
{
"epoch": 4.430976430976431,
"grad_norm": 0.34756359457969666,
"learning_rate": 3.4816242815416014e-07,
"loss": 0.6637,
"step": 656
},
{
"epoch": 4.437710437710438,
"grad_norm": 0.33792203664779663,
"learning_rate": 3.4001689460225197e-07,
"loss": 0.6322,
"step": 657
},
{
"epoch": 4.444444444444445,
"grad_norm": 0.3363687992095947,
"learning_rate": 3.3196442312170563e-07,
"loss": 0.6602,
"step": 658
},
{
"epoch": 4.451178451178452,
"grad_norm": 0.3448931872844696,
"learning_rate": 3.2400517452419176e-07,
"loss": 0.6821,
"step": 659
},
{
"epoch": 4.457912457912458,
"grad_norm": 0.3571867346763611,
"learning_rate": 3.161393077596797e-07,
"loss": 0.6489,
"step": 660
},
{
"epoch": 4.4646464646464645,
"grad_norm": 0.34630197286605835,
"learning_rate": 3.0836697991325547e-07,
"loss": 0.6547,
"step": 661
},
{
"epoch": 4.4713804713804715,
"grad_norm": 0.34990304708480835,
"learning_rate": 3.0068834620199106e-07,
"loss": 0.6487,
"step": 662
},
{
"epoch": 4.478114478114478,
"grad_norm": 0.37092679738998413,
"learning_rate": 2.931035599718396e-07,
"loss": 0.6768,
"step": 663
},
{
"epoch": 4.484848484848484,
"grad_norm": 0.3655143082141876,
"learning_rate": 2.85612772694579e-07,
"loss": 0.6247,
"step": 664
},
{
"epoch": 4.491582491582491,
"grad_norm": 0.35681506991386414,
"learning_rate": 2.7821613396478097e-07,
"loss": 0.6486,
"step": 665
},
{
"epoch": 4.498316498316498,
"grad_norm": 0.4038345515727997,
"learning_rate": 2.7091379149682683e-07,
"loss": 0.6404,
"step": 666
},
{
"epoch": 4.505050505050505,
"grad_norm": 0.3866751492023468,
"learning_rate": 2.63705891121957e-07,
"loss": 0.6327,
"step": 667
},
{
"epoch": 4.511784511784512,
"grad_norm": 0.3445982336997986,
"learning_rate": 2.5659257678535664e-07,
"loss": 0.6394,
"step": 668
},
{
"epoch": 4.518518518518518,
"grad_norm": 0.3618682622909546,
"learning_rate": 2.4957399054328815e-07,
"loss": 0.6162,
"step": 669
},
{
"epoch": 4.525252525252525,
"grad_norm": 0.3541615307331085,
"learning_rate": 2.42650272560242e-07,
"loss": 0.6997,
"step": 670
},
{
"epoch": 4.531986531986532,
"grad_norm": 0.3412024974822998,
"learning_rate": 2.3582156110614985e-07,
"loss": 0.6743,
"step": 671
},
{
"epoch": 4.538720538720539,
"grad_norm": 0.32731711864471436,
"learning_rate": 2.2908799255361546e-07,
"loss": 0.6335,
"step": 672
},
{
"epoch": 4.545454545454545,
"grad_norm": 0.35412031412124634,
"learning_rate": 2.2244970137519585e-07,
"loss": 0.6352,
"step": 673
},
{
"epoch": 4.552188552188552,
"grad_norm": 0.35853320360183716,
"learning_rate": 2.1590682014070997e-07,
"loss": 0.6372,
"step": 674
},
{
"epoch": 4.558922558922559,
"grad_norm": 0.34030717611312866,
"learning_rate": 2.0945947951459876e-07,
"loss": 0.6594,
"step": 675
},
{
"epoch": 4.565656565656566,
"grad_norm": 0.35133013129234314,
"learning_rate": 2.0310780825331056e-07,
"loss": 0.6511,
"step": 676
},
{
"epoch": 4.572390572390573,
"grad_norm": 0.3555893003940582,
"learning_rate": 1.968519332027302e-07,
"loss": 0.655,
"step": 677
},
{
"epoch": 4.57912457912458,
"grad_norm": 0.3459097146987915,
"learning_rate": 1.9069197929564854e-07,
"loss": 0.6512,
"step": 678
},
{
"epoch": 4.585858585858586,
"grad_norm": 0.343435674905777,
"learning_rate": 1.8462806954926306e-07,
"loss": 0.6377,
"step": 679
},
{
"epoch": 4.592592592592593,
"grad_norm": 0.3426160514354706,
"learning_rate": 1.786603250627278e-07,
"loss": 0.6625,
"step": 680
},
{
"epoch": 4.5993265993265995,
"grad_norm": 0.35558924078941345,
"learning_rate": 1.7278886501472804e-07,
"loss": 0.6585,
"step": 681
},
{
"epoch": 4.606060606060606,
"grad_norm": 0.34621286392211914,
"learning_rate": 1.6701380666110323e-07,
"loss": 0.6896,
"step": 682
},
{
"epoch": 4.6127946127946124,
"grad_norm": 0.378628671169281,
"learning_rate": 1.6133526533250566e-07,
"loss": 0.672,
"step": 683
},
{
"epoch": 4.619528619528619,
"grad_norm": 0.3545064926147461,
"learning_rate": 1.5575335443209882e-07,
"loss": 0.6493,
"step": 684
},
{
"epoch": 4.626262626262626,
"grad_norm": 0.33913904428482056,
"learning_rate": 1.5026818543328826e-07,
"loss": 0.6812,
"step": 685
},
{
"epoch": 4.632996632996633,
"grad_norm": 0.34131354093551636,
"learning_rate": 1.4487986787749763e-07,
"loss": 0.6662,
"step": 686
},
{
"epoch": 4.63973063973064,
"grad_norm": 0.44563478231430054,
"learning_rate": 1.3958850937198454e-07,
"loss": 0.692,
"step": 687
},
{
"epoch": 4.646464646464646,
"grad_norm": 0.3565541207790375,
"learning_rate": 1.3439421558768484e-07,
"loss": 0.6244,
"step": 688
},
{
"epoch": 4.653198653198653,
"grad_norm": 0.36837512254714966,
"learning_rate": 1.292970902571078e-07,
"loss": 0.6887,
"step": 689
},
{
"epoch": 4.65993265993266,
"grad_norm": 0.36443689465522766,
"learning_rate": 1.2429723517226212e-07,
"loss": 0.656,
"step": 690
},
{
"epoch": 4.666666666666667,
"grad_norm": 0.3696683347225189,
"learning_rate": 1.1939475018262481e-07,
"loss": 0.6817,
"step": 691
},
{
"epoch": 4.673400673400673,
"grad_norm": 0.38500481843948364,
"learning_rate": 1.1458973319314337e-07,
"loss": 0.6831,
"step": 692
},
{
"epoch": 4.68013468013468,
"grad_norm": 0.3367100656032562,
"learning_rate": 1.0988228016228508e-07,
"loss": 0.6722,
"step": 693
},
{
"epoch": 4.686868686868687,
"grad_norm": 0.3433123826980591,
"learning_rate": 1.05272485100118e-07,
"loss": 0.653,
"step": 694
},
{
"epoch": 4.693602693602694,
"grad_norm": 0.3778822720050812,
"learning_rate": 1.007604400664347e-07,
"loss": 0.6651,
"step": 695
},
{
"epoch": 4.700336700336701,
"grad_norm": 0.3530823588371277,
"learning_rate": 9.634623516891372e-08,
"loss": 0.7046,
"step": 696
},
{
"epoch": 4.707070707070707,
"grad_norm": 0.3648776113986969,
"learning_rate": 9.202995856131769e-08,
"loss": 0.6723,
"step": 697
},
{
"epoch": 4.713804713804714,
"grad_norm": 0.3672958016395569,
"learning_rate": 8.781169644173748e-08,
"loss": 0.621,
"step": 698
},
{
"epoch": 4.720538720538721,
"grad_norm": 0.39199933409690857,
"learning_rate": 8.369153305086641e-08,
"loss": 0.6699,
"step": 699
},
{
"epoch": 4.7272727272727275,
"grad_norm": 0.3422453999519348,
"learning_rate": 7.966955067032101e-08,
"loss": 0.6636,
"step": 700
},
{
"epoch": 4.7340067340067336,
"grad_norm": 0.3442992866039276,
"learning_rate": 7.574582962099508e-08,
"loss": 0.6333,
"step": 701
},
{
"epoch": 4.7407407407407405,
"grad_norm": 0.3297479450702667,
"learning_rate": 7.192044826145772e-08,
"loss": 0.6338,
"step": 702
},
{
"epoch": 4.747474747474747,
"grad_norm": 0.3699067234992981,
"learning_rate": 6.819348298638839e-08,
"loss": 0.6524,
"step": 703
},
{
"epoch": 4.754208754208754,
"grad_norm": 0.35329344868659973,
"learning_rate": 6.45650082250493e-08,
"loss": 0.6707,
"step": 704
},
{
"epoch": 4.760942760942761,
"grad_norm": 0.34827712178230286,
"learning_rate": 6.10350964398021e-08,
"loss": 0.6871,
"step": 705
},
{
"epoch": 4.767676767676767,
"grad_norm": 0.3724733591079712,
"learning_rate": 5.7603818124657984e-08,
"loss": 0.6606,
"step": 706
},
{
"epoch": 4.774410774410774,
"grad_norm": 0.35059911012649536,
"learning_rate": 5.4271241803871e-08,
"loss": 0.6445,
"step": 707
},
{
"epoch": 4.781144781144781,
"grad_norm": 0.3768954873085022,
"learning_rate": 5.103743403057027e-08,
"loss": 0.6711,
"step": 708
},
{
"epoch": 4.787878787878788,
"grad_norm": 0.3454023003578186,
"learning_rate": 4.7902459385429364e-08,
"loss": 0.628,
"step": 709
},
{
"epoch": 4.794612794612795,
"grad_norm": 0.37522533535957336,
"learning_rate": 4.486638047537795e-08,
"loss": 0.6669,
"step": 710
},
{
"epoch": 4.801346801346801,
"grad_norm": 0.3776388168334961,
"learning_rate": 4.192925793235159e-08,
"loss": 0.6872,
"step": 711
},
{
"epoch": 4.808080808080808,
"grad_norm": 0.3489745259284973,
"learning_rate": 3.909115041207889e-08,
"loss": 0.6755,
"step": 712
},
{
"epoch": 4.814814814814815,
"grad_norm": 0.3340746760368347,
"learning_rate": 3.635211459291188e-08,
"loss": 0.6406,
"step": 713
},
{
"epoch": 4.821548821548822,
"grad_norm": 0.3524295389652252,
"learning_rate": 3.37122051746952e-08,
"loss": 0.6712,
"step": 714
},
{
"epoch": 4.828282828282829,
"grad_norm": 0.33559226989746094,
"learning_rate": 3.117147487767092e-08,
"loss": 0.663,
"step": 715
},
{
"epoch": 4.835016835016835,
"grad_norm": 0.36806800961494446,
"learning_rate": 2.8729974441426557e-08,
"loss": 0.6637,
"step": 716
},
{
"epoch": 4.841750841750842,
"grad_norm": 0.34991908073425293,
"learning_rate": 2.6387752623883158e-08,
"loss": 0.6445,
"step": 717
},
{
"epoch": 4.848484848484849,
"grad_norm": 0.3314189016819,
"learning_rate": 2.4144856200321587e-08,
"loss": 0.7059,
"step": 718
},
{
"epoch": 4.8552188552188555,
"grad_norm": 0.3607062101364136,
"learning_rate": 2.2001329962446082e-08,
"loss": 0.6421,
"step": 719
},
{
"epoch": 4.861952861952862,
"grad_norm": 0.36060237884521484,
"learning_rate": 1.9957216717491067e-08,
"loss": 0.6548,
"step": 720
},
{
"epoch": 4.8686868686868685,
"grad_norm": 0.34536007046699524,
"learning_rate": 1.8012557287367394e-08,
"loss": 0.6049,
"step": 721
},
{
"epoch": 4.875420875420875,
"grad_norm": 0.3376261591911316,
"learning_rate": 1.616739050784577e-08,
"loss": 0.6078,
"step": 722
},
{
"epoch": 4.882154882154882,
"grad_norm": 0.35385504364967346,
"learning_rate": 1.4421753227780721e-08,
"loss": 0.6696,
"step": 723
},
{
"epoch": 4.888888888888889,
"grad_norm": 0.3818473219871521,
"learning_rate": 1.2775680308376726e-08,
"loss": 0.7137,
"step": 724
},
{
"epoch": 4.895622895622895,
"grad_norm": 0.37847810983657837,
"learning_rate": 1.1229204622489886e-08,
"loss": 0.6533,
"step": 725
},
{
"epoch": 4.902356902356902,
"grad_norm": 0.35258740186691284,
"learning_rate": 9.782357053972902e-09,
"loss": 0.6657,
"step": 726
},
{
"epoch": 4.909090909090909,
"grad_norm": 0.3426527678966522,
"learning_rate": 8.435166497057223e-09,
"loss": 0.6742,
"step": 727
},
{
"epoch": 4.915824915824916,
"grad_norm": 0.37788406014442444,
"learning_rate": 7.187659855776852e-09,
"loss": 0.6237,
"step": 728
},
{
"epoch": 4.922558922558922,
"grad_norm": 0.34548109769821167,
"learning_rate": 6.039862043430989e-09,
"loss": 0.6498,
"step": 729
},
{
"epoch": 4.929292929292929,
"grad_norm": 0.3507046103477478,
"learning_rate": 4.991795982085546e-09,
"loss": 0.672,
"step": 730
},
{
"epoch": 4.936026936026936,
"grad_norm": 0.35069385170936584,
"learning_rate": 4.043482602116844e-09,
"loss": 0.648,
"step": 731
},
{
"epoch": 4.942760942760943,
"grad_norm": 0.3565804362297058,
"learning_rate": 3.1949408417925043e-09,
"loss": 0.6355,
"step": 732
},
{
"epoch": 4.94949494949495,
"grad_norm": 0.343685120344162,
"learning_rate": 2.4461876468934164e-09,
"loss": 0.6055,
"step": 733
},
{
"epoch": 4.956228956228956,
"grad_norm": 0.3296184539794922,
"learning_rate": 1.797237970376231e-09,
"loss": 0.6438,
"step": 734
},
{
"epoch": 4.962962962962963,
"grad_norm": 0.3328549563884735,
"learning_rate": 1.2481047720735995e-09,
"loss": 0.6357,
"step": 735
},
{
"epoch": 4.96969696969697,
"grad_norm": 0.33267027139663696,
"learning_rate": 7.987990184354921e-10,
"loss": 0.6472,
"step": 736
},
{
"epoch": 4.976430976430977,
"grad_norm": 0.34349173307418823,
"learning_rate": 4.4932968231048426e-10,
"loss": 0.6738,
"step": 737
},
{
"epoch": 4.983164983164983,
"grad_norm": 0.3447794020175934,
"learning_rate": 1.997037427675652e-10,
"loss": 0.6348,
"step": 738
},
{
"epoch": 4.98989898989899,
"grad_norm": 0.3507779836654663,
"learning_rate": 4.992618495403001e-11,
"loss": 0.6163,
"step": 739
},
{
"epoch": 4.9966329966329965,
"grad_norm": 0.3817089796066284,
"learning_rate": 0.0,
"loss": 0.6669,
"step": 740
}
],
"logging_steps": 1,
"max_steps": 740,
"num_input_tokens_seen": 0,
"num_train_epochs": 5,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 6.536774873136497e+17,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}