PEFT
Safetensors
wtocLoRA_0126 / trainer_state.json
cella110n's picture
Upload 9 files
869a83f verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 4.980744544287548,
"eval_steps": 500,
"global_step": 485,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.01,
"learning_rate": 2.0000000000000003e-06,
"loss": 2.096,
"step": 1
},
{
"epoch": 0.02,
"learning_rate": 4.000000000000001e-06,
"loss": 2.1685,
"step": 2
},
{
"epoch": 0.03,
"learning_rate": 6e-06,
"loss": 1.9304,
"step": 3
},
{
"epoch": 0.04,
"learning_rate": 8.000000000000001e-06,
"loss": 2.3691,
"step": 4
},
{
"epoch": 0.05,
"learning_rate": 1e-05,
"loss": 2.073,
"step": 5
},
{
"epoch": 0.06,
"learning_rate": 9.999892908320647e-06,
"loss": 2.2502,
"step": 6
},
{
"epoch": 0.07,
"learning_rate": 9.999571637870035e-06,
"loss": 2.2303,
"step": 7
},
{
"epoch": 0.08,
"learning_rate": 9.999036202410324e-06,
"loss": 2.2856,
"step": 8
},
{
"epoch": 0.09,
"learning_rate": 9.998286624877786e-06,
"loss": 2.4648,
"step": 9
},
{
"epoch": 0.1,
"learning_rate": 9.997322937381829e-06,
"loss": 2.1098,
"step": 10
},
{
"epoch": 0.11,
"learning_rate": 9.996145181203616e-06,
"loss": 2.113,
"step": 11
},
{
"epoch": 0.12,
"learning_rate": 9.994753406794303e-06,
"loss": 2.4146,
"step": 12
},
{
"epoch": 0.13,
"learning_rate": 9.993147673772869e-06,
"loss": 2.5792,
"step": 13
},
{
"epoch": 0.14,
"learning_rate": 9.99132805092358e-06,
"loss": 2.2765,
"step": 14
},
{
"epoch": 0.15,
"learning_rate": 9.989294616193018e-06,
"loss": 2.1898,
"step": 15
},
{
"epoch": 0.16,
"learning_rate": 9.98704745668676e-06,
"loss": 2.1708,
"step": 16
},
{
"epoch": 0.17,
"learning_rate": 9.984586668665641e-06,
"loss": 1.8872,
"step": 17
},
{
"epoch": 0.18,
"learning_rate": 9.981912357541628e-06,
"loss": 2.389,
"step": 18
},
{
"epoch": 0.2,
"learning_rate": 9.979024637873309e-06,
"loss": 2.2232,
"step": 19
},
{
"epoch": 0.21,
"learning_rate": 9.975923633360985e-06,
"loss": 1.8677,
"step": 20
},
{
"epoch": 0.22,
"learning_rate": 9.972609476841368e-06,
"loss": 2.228,
"step": 21
},
{
"epoch": 0.23,
"learning_rate": 9.96908231028189e-06,
"loss": 1.8441,
"step": 22
},
{
"epoch": 0.24,
"learning_rate": 9.965342284774633e-06,
"loss": 2.2513,
"step": 23
},
{
"epoch": 0.25,
"learning_rate": 9.961389560529835e-06,
"loss": 2.145,
"step": 24
},
{
"epoch": 0.26,
"learning_rate": 9.957224306869053e-06,
"loss": 2.4188,
"step": 25
},
{
"epoch": 0.27,
"learning_rate": 9.952846702217886e-06,
"loss": 2.102,
"step": 26
},
{
"epoch": 0.28,
"learning_rate": 9.948256934098353e-06,
"loss": 2.326,
"step": 27
},
{
"epoch": 0.29,
"learning_rate": 9.943455199120836e-06,
"loss": 2.045,
"step": 28
},
{
"epoch": 0.3,
"learning_rate": 9.938441702975689e-06,
"loss": 2.4204,
"step": 29
},
{
"epoch": 0.31,
"learning_rate": 9.933216660424396e-06,
"loss": 2.4973,
"step": 30
},
{
"epoch": 0.32,
"learning_rate": 9.92778029529039e-06,
"loss": 1.7761,
"step": 31
},
{
"epoch": 0.33,
"learning_rate": 9.922132840449459e-06,
"loss": 2.3172,
"step": 32
},
{
"epoch": 0.34,
"learning_rate": 9.916274537819774e-06,
"loss": 2.2334,
"step": 33
},
{
"epoch": 0.35,
"learning_rate": 9.91020563835152e-06,
"loss": 2.2044,
"step": 34
},
{
"epoch": 0.36,
"learning_rate": 9.903926402016153e-06,
"loss": 1.8726,
"step": 35
},
{
"epoch": 0.37,
"learning_rate": 9.897437097795257e-06,
"loss": 2.2751,
"step": 36
},
{
"epoch": 0.38,
"learning_rate": 9.890738003669029e-06,
"loss": 1.5207,
"step": 37
},
{
"epoch": 0.39,
"learning_rate": 9.883829406604363e-06,
"loss": 2.0521,
"step": 38
},
{
"epoch": 0.4,
"learning_rate": 9.876711602542564e-06,
"loss": 2.2236,
"step": 39
},
{
"epoch": 0.41,
"learning_rate": 9.869384896386669e-06,
"loss": 2.0624,
"step": 40
},
{
"epoch": 0.42,
"learning_rate": 9.861849601988384e-06,
"loss": 2.2089,
"step": 41
},
{
"epoch": 0.43,
"learning_rate": 9.854106042134642e-06,
"loss": 1.7589,
"step": 42
},
{
"epoch": 0.44,
"learning_rate": 9.846154548533773e-06,
"loss": 1.9684,
"step": 43
},
{
"epoch": 0.45,
"learning_rate": 9.8379954618013e-06,
"loss": 1.9629,
"step": 44
},
{
"epoch": 0.46,
"learning_rate": 9.829629131445342e-06,
"loss": 2.2497,
"step": 45
},
{
"epoch": 0.47,
"learning_rate": 9.821055915851647e-06,
"loss": 2.1987,
"step": 46
},
{
"epoch": 0.48,
"learning_rate": 9.812276182268236e-06,
"loss": 2.3049,
"step": 47
},
{
"epoch": 0.49,
"learning_rate": 9.803290306789676e-06,
"loss": 1.8745,
"step": 48
},
{
"epoch": 0.5,
"learning_rate": 9.794098674340966e-06,
"loss": 2.0944,
"step": 49
},
{
"epoch": 0.51,
"learning_rate": 9.784701678661045e-06,
"loss": 1.7587,
"step": 50
},
{
"epoch": 0.52,
"learning_rate": 9.775099722285934e-06,
"loss": 1.7021,
"step": 51
},
{
"epoch": 0.53,
"learning_rate": 9.765293216531486e-06,
"loss": 2.0724,
"step": 52
},
{
"epoch": 0.54,
"learning_rate": 9.755282581475769e-06,
"loss": 1.65,
"step": 53
},
{
"epoch": 0.55,
"learning_rate": 9.745068245941071e-06,
"loss": 2.0913,
"step": 54
},
{
"epoch": 0.56,
"learning_rate": 9.73465064747553e-06,
"loss": 1.9629,
"step": 55
},
{
"epoch": 0.58,
"learning_rate": 9.72403023233439e-06,
"loss": 2.3623,
"step": 56
},
{
"epoch": 0.59,
"learning_rate": 9.713207455460893e-06,
"loss": 2.2547,
"step": 57
},
{
"epoch": 0.6,
"learning_rate": 9.702182780466775e-06,
"loss": 1.9144,
"step": 58
},
{
"epoch": 0.61,
"learning_rate": 9.690956679612422e-06,
"loss": 2.1871,
"step": 59
},
{
"epoch": 0.62,
"learning_rate": 9.67952963378663e-06,
"loss": 1.8868,
"step": 60
},
{
"epoch": 0.63,
"learning_rate": 9.667902132486009e-06,
"loss": 1.9261,
"step": 61
},
{
"epoch": 0.64,
"learning_rate": 9.656074673794018e-06,
"loss": 1.9513,
"step": 62
},
{
"epoch": 0.65,
"learning_rate": 9.644047764359623e-06,
"loss": 1.8614,
"step": 63
},
{
"epoch": 0.66,
"learning_rate": 9.63182191937559e-06,
"loss": 2.2613,
"step": 64
},
{
"epoch": 0.67,
"learning_rate": 9.619397662556434e-06,
"loss": 2.0888,
"step": 65
},
{
"epoch": 0.68,
"learning_rate": 9.606775526115963e-06,
"loss": 1.9784,
"step": 66
},
{
"epoch": 0.69,
"learning_rate": 9.593956050744493e-06,
"loss": 2.2996,
"step": 67
},
{
"epoch": 0.7,
"learning_rate": 9.58093978558568e-06,
"loss": 1.9712,
"step": 68
},
{
"epoch": 0.71,
"learning_rate": 9.567727288213005e-06,
"loss": 1.9814,
"step": 69
},
{
"epoch": 0.72,
"learning_rate": 9.55431912460588e-06,
"loss": 2.1213,
"step": 70
},
{
"epoch": 0.73,
"learning_rate": 9.540715869125407e-06,
"loss": 1.7144,
"step": 71
},
{
"epoch": 0.74,
"learning_rate": 9.526918104489777e-06,
"loss": 2.0376,
"step": 72
},
{
"epoch": 0.75,
"learning_rate": 9.512926421749305e-06,
"loss": 1.9465,
"step": 73
},
{
"epoch": 0.76,
"learning_rate": 9.498741420261109e-06,
"loss": 1.5664,
"step": 74
},
{
"epoch": 0.77,
"learning_rate": 9.484363707663443e-06,
"loss": 2.1632,
"step": 75
},
{
"epoch": 0.78,
"learning_rate": 9.469793899849663e-06,
"loss": 1.7367,
"step": 76
},
{
"epoch": 0.79,
"learning_rate": 9.45503262094184e-06,
"loss": 2.155,
"step": 77
},
{
"epoch": 0.8,
"learning_rate": 9.440080503264038e-06,
"loss": 2.1775,
"step": 78
},
{
"epoch": 0.81,
"learning_rate": 9.42493818731521e-06,
"loss": 2.2435,
"step": 79
},
{
"epoch": 0.82,
"learning_rate": 9.409606321741776e-06,
"loss": 2.0983,
"step": 80
},
{
"epoch": 0.83,
"learning_rate": 9.394085563309827e-06,
"loss": 1.8806,
"step": 81
},
{
"epoch": 0.84,
"learning_rate": 9.378376576876999e-06,
"loss": 2.1435,
"step": 82
},
{
"epoch": 0.85,
"learning_rate": 9.362480035363987e-06,
"loss": 1.9698,
"step": 83
},
{
"epoch": 0.86,
"learning_rate": 9.34639661972572e-06,
"loss": 1.9052,
"step": 84
},
{
"epoch": 0.87,
"learning_rate": 9.330127018922195e-06,
"loss": 1.916,
"step": 85
},
{
"epoch": 0.88,
"learning_rate": 9.31367192988896e-06,
"loss": 1.7788,
"step": 86
},
{
"epoch": 0.89,
"learning_rate": 9.297032057507264e-06,
"loss": 1.8848,
"step": 87
},
{
"epoch": 0.9,
"learning_rate": 9.280208114573859e-06,
"loss": 2.5085,
"step": 88
},
{
"epoch": 0.91,
"learning_rate": 9.263200821770462e-06,
"loss": 1.8004,
"step": 89
},
{
"epoch": 0.92,
"learning_rate": 9.246010907632894e-06,
"loss": 1.8324,
"step": 90
},
{
"epoch": 0.93,
"learning_rate": 9.228639108519867e-06,
"loss": 1.8939,
"step": 91
},
{
"epoch": 0.94,
"learning_rate": 9.211086168581433e-06,
"loss": 1.8893,
"step": 92
},
{
"epoch": 0.96,
"learning_rate": 9.193352839727122e-06,
"loss": 1.8633,
"step": 93
},
{
"epoch": 0.97,
"learning_rate": 9.175439881593716e-06,
"loss": 1.8647,
"step": 94
},
{
"epoch": 0.98,
"learning_rate": 9.157348061512728e-06,
"loss": 1.7561,
"step": 95
},
{
"epoch": 0.99,
"learning_rate": 9.139078154477512e-06,
"loss": 2.0783,
"step": 96
},
{
"epoch": 1.0,
"learning_rate": 9.120630943110078e-06,
"loss": 1.8299,
"step": 97
},
{
"epoch": 1.01,
"learning_rate": 9.102007217627568e-06,
"loss": 1.7287,
"step": 98
},
{
"epoch": 1.02,
"learning_rate": 9.083207775808395e-06,
"loss": 1.7407,
"step": 99
},
{
"epoch": 1.03,
"learning_rate": 9.064233422958078e-06,
"loss": 1.9198,
"step": 100
},
{
"epoch": 1.04,
"learning_rate": 9.045084971874738e-06,
"loss": 2.0098,
"step": 101
},
{
"epoch": 1.05,
"learning_rate": 9.025763242814291e-06,
"loss": 2.0738,
"step": 102
},
{
"epoch": 1.06,
"learning_rate": 9.006269063455305e-06,
"loss": 1.7076,
"step": 103
},
{
"epoch": 1.07,
"learning_rate": 8.986603268863536e-06,
"loss": 1.5768,
"step": 104
},
{
"epoch": 1.08,
"learning_rate": 8.966766701456177e-06,
"loss": 1.9178,
"step": 105
},
{
"epoch": 1.09,
"learning_rate": 8.94676021096575e-06,
"loss": 2.0072,
"step": 106
},
{
"epoch": 1.1,
"learning_rate": 8.926584654403725e-06,
"loss": 1.8287,
"step": 107
},
{
"epoch": 1.11,
"learning_rate": 8.906240896023794e-06,
"loss": 2.0671,
"step": 108
},
{
"epoch": 1.12,
"learning_rate": 8.885729807284855e-06,
"loss": 2.071,
"step": 109
},
{
"epoch": 1.13,
"learning_rate": 8.865052266813686e-06,
"loss": 1.7935,
"step": 110
},
{
"epoch": 1.14,
"learning_rate": 8.844209160367298e-06,
"loss": 2.0569,
"step": 111
},
{
"epoch": 1.15,
"learning_rate": 8.823201380795003e-06,
"loss": 1.6801,
"step": 112
},
{
"epoch": 1.16,
"learning_rate": 8.802029828000157e-06,
"loss": 1.8642,
"step": 113
},
{
"epoch": 1.17,
"learning_rate": 8.780695408901613e-06,
"loss": 1.9185,
"step": 114
},
{
"epoch": 1.18,
"learning_rate": 8.759199037394888e-06,
"loss": 1.8856,
"step": 115
},
{
"epoch": 1.19,
"learning_rate": 8.737541634312985e-06,
"loss": 1.8935,
"step": 116
},
{
"epoch": 1.2,
"learning_rate": 8.715724127386971e-06,
"loss": 2.1943,
"step": 117
},
{
"epoch": 1.21,
"learning_rate": 8.693747451206231e-06,
"loss": 2.0001,
"step": 118
},
{
"epoch": 1.22,
"learning_rate": 8.671612547178428e-06,
"loss": 1.5899,
"step": 119
},
{
"epoch": 1.23,
"learning_rate": 8.649320363489178e-06,
"loss": 2.0746,
"step": 120
},
{
"epoch": 1.24,
"learning_rate": 8.626871855061438e-06,
"loss": 2.1153,
"step": 121
},
{
"epoch": 1.25,
"learning_rate": 8.604267983514595e-06,
"loss": 1.8988,
"step": 122
},
{
"epoch": 1.26,
"learning_rate": 8.581509717123272e-06,
"loss": 1.5446,
"step": 123
},
{
"epoch": 1.27,
"learning_rate": 8.558598030775857e-06,
"loss": 1.7919,
"step": 124
},
{
"epoch": 1.28,
"learning_rate": 8.535533905932739e-06,
"loss": 1.8675,
"step": 125
},
{
"epoch": 1.29,
"learning_rate": 8.51231833058426e-06,
"loss": 2.081,
"step": 126
},
{
"epoch": 1.3,
"learning_rate": 8.488952299208402e-06,
"loss": 1.9819,
"step": 127
},
{
"epoch": 1.31,
"learning_rate": 8.465436812728181e-06,
"loss": 1.6908,
"step": 128
},
{
"epoch": 1.32,
"learning_rate": 8.44177287846877e-06,
"loss": 1.9568,
"step": 129
},
{
"epoch": 1.34,
"learning_rate": 8.417961510114357e-06,
"loss": 2.0137,
"step": 130
},
{
"epoch": 1.35,
"learning_rate": 8.39400372766471e-06,
"loss": 1.563,
"step": 131
},
{
"epoch": 1.36,
"learning_rate": 8.36990055739149e-06,
"loss": 1.5122,
"step": 132
},
{
"epoch": 1.37,
"learning_rate": 8.345653031794292e-06,
"loss": 1.6892,
"step": 133
},
{
"epoch": 1.38,
"learning_rate": 8.32126218955641e-06,
"loss": 1.6607,
"step": 134
},
{
"epoch": 1.39,
"learning_rate": 8.296729075500345e-06,
"loss": 1.783,
"step": 135
},
{
"epoch": 1.4,
"learning_rate": 8.272054740543053e-06,
"loss": 1.7135,
"step": 136
},
{
"epoch": 1.41,
"learning_rate": 8.247240241650918e-06,
"loss": 1.8907,
"step": 137
},
{
"epoch": 1.42,
"learning_rate": 8.222286641794488e-06,
"loss": 2.1797,
"step": 138
},
{
"epoch": 1.43,
"learning_rate": 8.197195009902924e-06,
"loss": 1.7325,
"step": 139
},
{
"epoch": 1.44,
"learning_rate": 8.171966420818227e-06,
"loss": 1.7757,
"step": 140
},
{
"epoch": 1.45,
"learning_rate": 8.146601955249187e-06,
"loss": 1.7632,
"step": 141
},
{
"epoch": 1.46,
"learning_rate": 8.12110269972509e-06,
"loss": 1.9396,
"step": 142
},
{
"epoch": 1.47,
"learning_rate": 8.095469746549172e-06,
"loss": 2.1597,
"step": 143
},
{
"epoch": 1.48,
"learning_rate": 8.069704193751834e-06,
"loss": 1.9464,
"step": 144
},
{
"epoch": 1.49,
"learning_rate": 8.043807145043604e-06,
"loss": 2.2462,
"step": 145
},
{
"epoch": 1.5,
"learning_rate": 8.017779709767857e-06,
"loss": 1.7391,
"step": 146
},
{
"epoch": 1.51,
"learning_rate": 7.991623002853296e-06,
"loss": 1.8469,
"step": 147
},
{
"epoch": 1.52,
"learning_rate": 7.965338144766186e-06,
"loss": 1.9731,
"step": 148
},
{
"epoch": 1.53,
"learning_rate": 7.938926261462366e-06,
"loss": 1.656,
"step": 149
},
{
"epoch": 1.54,
"learning_rate": 7.912388484339012e-06,
"loss": 1.7054,
"step": 150
},
{
"epoch": 1.55,
"learning_rate": 7.88572595018617e-06,
"loss": 1.9707,
"step": 151
},
{
"epoch": 1.56,
"learning_rate": 7.858939801138061e-06,
"loss": 1.8721,
"step": 152
},
{
"epoch": 1.57,
"learning_rate": 7.832031184624165e-06,
"loss": 1.8131,
"step": 153
},
{
"epoch": 1.58,
"learning_rate": 7.80500125332005e-06,
"loss": 1.906,
"step": 154
},
{
"epoch": 1.59,
"learning_rate": 7.777851165098012e-06,
"loss": 1.5043,
"step": 155
},
{
"epoch": 1.6,
"learning_rate": 7.750582082977468e-06,
"loss": 1.893,
"step": 156
},
{
"epoch": 1.61,
"learning_rate": 7.723195175075136e-06,
"loss": 1.5617,
"step": 157
},
{
"epoch": 1.62,
"learning_rate": 7.695691614555002e-06,
"loss": 1.9453,
"step": 158
},
{
"epoch": 1.63,
"learning_rate": 7.66807257957806e-06,
"loss": 2.1013,
"step": 159
},
{
"epoch": 1.64,
"learning_rate": 7.64033925325184e-06,
"loss": 1.9608,
"step": 160
},
{
"epoch": 1.65,
"learning_rate": 7.612492823579744e-06,
"loss": 1.8601,
"step": 161
},
{
"epoch": 1.66,
"learning_rate": 7.584534483410137e-06,
"loss": 2.0612,
"step": 162
},
{
"epoch": 1.67,
"learning_rate": 7.55646543038526e-06,
"loss": 1.5687,
"step": 163
},
{
"epoch": 1.68,
"learning_rate": 7.528286866889924e-06,
"loss": 1.8509,
"step": 164
},
{
"epoch": 1.69,
"learning_rate": 7.500000000000001e-06,
"loss": 1.9952,
"step": 165
},
{
"epoch": 1.7,
"learning_rate": 7.471606041430724e-06,
"loss": 1.7273,
"step": 166
},
{
"epoch": 1.72,
"learning_rate": 7.443106207484776e-06,
"loss": 2.0801,
"step": 167
},
{
"epoch": 1.73,
"learning_rate": 7.414501719000187e-06,
"loss": 1.8467,
"step": 168
},
{
"epoch": 1.74,
"learning_rate": 7.3857938012980425e-06,
"loss": 2.0655,
"step": 169
},
{
"epoch": 1.75,
"learning_rate": 7.3569836841299905e-06,
"loss": 1.5439,
"step": 170
},
{
"epoch": 1.76,
"learning_rate": 7.328072601625558e-06,
"loss": 2.0488,
"step": 171
},
{
"epoch": 1.77,
"learning_rate": 7.2990617922393e-06,
"loss": 2.1353,
"step": 172
},
{
"epoch": 1.78,
"learning_rate": 7.269952498697734e-06,
"loss": 1.9332,
"step": 173
},
{
"epoch": 1.79,
"learning_rate": 7.240745967946113e-06,
"loss": 1.733,
"step": 174
},
{
"epoch": 1.8,
"learning_rate": 7.211443451095007e-06,
"loss": 1.9494,
"step": 175
},
{
"epoch": 1.81,
"learning_rate": 7.18204620336671e-06,
"loss": 1.9574,
"step": 176
},
{
"epoch": 1.82,
"learning_rate": 7.1525554840414765e-06,
"loss": 1.6955,
"step": 177
},
{
"epoch": 1.83,
"learning_rate": 7.1229725564035665e-06,
"loss": 1.7218,
"step": 178
},
{
"epoch": 1.84,
"learning_rate": 7.093298687687141e-06,
"loss": 2.0606,
"step": 179
},
{
"epoch": 1.85,
"learning_rate": 7.063535149021974e-06,
"loss": 1.8343,
"step": 180
},
{
"epoch": 1.86,
"learning_rate": 7.033683215379002e-06,
"loss": 1.9391,
"step": 181
},
{
"epoch": 1.87,
"learning_rate": 7.0037441655157045e-06,
"loss": 1.6598,
"step": 182
},
{
"epoch": 1.88,
"learning_rate": 6.973719281921336e-06,
"loss": 1.9021,
"step": 183
},
{
"epoch": 1.89,
"learning_rate": 6.943609850761979e-06,
"loss": 1.7992,
"step": 184
},
{
"epoch": 1.9,
"learning_rate": 6.913417161825449e-06,
"loss": 1.6588,
"step": 185
},
{
"epoch": 1.91,
"learning_rate": 6.883142508466054e-06,
"loss": 1.9947,
"step": 186
},
{
"epoch": 1.92,
"learning_rate": 6.852787187549182e-06,
"loss": 1.6098,
"step": 187
},
{
"epoch": 1.93,
"learning_rate": 6.822352499395751e-06,
"loss": 1.7802,
"step": 188
},
{
"epoch": 1.94,
"learning_rate": 6.7918397477265e-06,
"loss": 1.7142,
"step": 189
},
{
"epoch": 1.95,
"learning_rate": 6.7612502396061685e-06,
"loss": 1.6245,
"step": 190
},
{
"epoch": 1.96,
"learning_rate": 6.730585285387465e-06,
"loss": 1.9101,
"step": 191
},
{
"epoch": 1.97,
"learning_rate": 6.6998461986549715e-06,
"loss": 1.8337,
"step": 192
},
{
"epoch": 1.98,
"learning_rate": 6.669034296168855e-06,
"loss": 1.6132,
"step": 193
},
{
"epoch": 1.99,
"learning_rate": 6.638150897808469e-06,
"loss": 2.017,
"step": 194
},
{
"epoch": 2.0,
"learning_rate": 6.607197326515808e-06,
"loss": 1.914,
"step": 195
},
{
"epoch": 2.01,
"learning_rate": 6.57617490823885e-06,
"loss": 1.9561,
"step": 196
},
{
"epoch": 2.02,
"learning_rate": 6.545084971874738e-06,
"loss": 1.3971,
"step": 197
},
{
"epoch": 2.03,
"learning_rate": 6.513928849212874e-06,
"loss": 1.867,
"step": 198
},
{
"epoch": 2.04,
"learning_rate": 6.482707874877855e-06,
"loss": 2.0052,
"step": 199
},
{
"epoch": 2.05,
"learning_rate": 6.451423386272312e-06,
"loss": 1.7125,
"step": 200
},
{
"epoch": 2.06,
"learning_rate": 6.420076723519615e-06,
"loss": 1.9154,
"step": 201
},
{
"epoch": 2.07,
"learning_rate": 6.388669229406462e-06,
"loss": 1.8889,
"step": 202
},
{
"epoch": 2.08,
"learning_rate": 6.3572022493253715e-06,
"loss": 2.2509,
"step": 203
},
{
"epoch": 2.09,
"learning_rate": 6.325677131217041e-06,
"loss": 1.8072,
"step": 204
},
{
"epoch": 2.11,
"learning_rate": 6.294095225512604e-06,
"loss": 1.9041,
"step": 205
},
{
"epoch": 2.12,
"learning_rate": 6.26245788507579e-06,
"loss": 1.87,
"step": 206
},
{
"epoch": 2.13,
"learning_rate": 6.230766465144966e-06,
"loss": 1.748,
"step": 207
},
{
"epoch": 2.14,
"learning_rate": 6.199022323275083e-06,
"loss": 1.6786,
"step": 208
},
{
"epoch": 2.15,
"learning_rate": 6.1672268192795285e-06,
"loss": 1.6332,
"step": 209
},
{
"epoch": 2.16,
"learning_rate": 6.135381315171867e-06,
"loss": 1.9005,
"step": 210
},
{
"epoch": 2.17,
"learning_rate": 6.103487175107508e-06,
"loss": 2.0861,
"step": 211
},
{
"epoch": 2.18,
"learning_rate": 6.071545765325254e-06,
"loss": 2.0755,
"step": 212
},
{
"epoch": 2.19,
"learning_rate": 6.039558454088796e-06,
"loss": 1.9971,
"step": 213
},
{
"epoch": 2.2,
"learning_rate": 6.0075266116280865e-06,
"loss": 1.3869,
"step": 214
},
{
"epoch": 2.21,
"learning_rate": 5.975451610080643e-06,
"loss": 1.6617,
"step": 215
},
{
"epoch": 2.22,
"learning_rate": 5.943334823432777e-06,
"loss": 1.9629,
"step": 216
},
{
"epoch": 2.23,
"learning_rate": 5.911177627460739e-06,
"loss": 1.7073,
"step": 217
},
{
"epoch": 2.24,
"learning_rate": 5.878981399671774e-06,
"loss": 1.5506,
"step": 218
},
{
"epoch": 2.25,
"learning_rate": 5.846747519245123e-06,
"loss": 2.0415,
"step": 219
},
{
"epoch": 2.26,
"learning_rate": 5.814477366972945e-06,
"loss": 1.7911,
"step": 220
},
{
"epoch": 2.27,
"learning_rate": 5.782172325201155e-06,
"loss": 1.8445,
"step": 221
},
{
"epoch": 2.28,
"learning_rate": 5.749833777770225e-06,
"loss": 1.6548,
"step": 222
},
{
"epoch": 2.29,
"learning_rate": 5.717463109955896e-06,
"loss": 1.7875,
"step": 223
},
{
"epoch": 2.3,
"learning_rate": 5.6850617084098416e-06,
"loss": 2.066,
"step": 224
},
{
"epoch": 2.31,
"learning_rate": 5.65263096110026e-06,
"loss": 1.6987,
"step": 225
},
{
"epoch": 2.32,
"learning_rate": 5.620172257252427e-06,
"loss": 1.7769,
"step": 226
},
{
"epoch": 2.33,
"learning_rate": 5.587686987289189e-06,
"loss": 2.0361,
"step": 227
},
{
"epoch": 2.34,
"learning_rate": 5.555176542771389e-06,
"loss": 1.9782,
"step": 228
},
{
"epoch": 2.35,
"learning_rate": 5.522642316338268e-06,
"loss": 1.8761,
"step": 229
},
{
"epoch": 2.36,
"learning_rate": 5.490085701647805e-06,
"loss": 2.0268,
"step": 230
},
{
"epoch": 2.37,
"learning_rate": 5.457508093317013e-06,
"loss": 1.8907,
"step": 231
},
{
"epoch": 2.38,
"learning_rate": 5.4249108868622095e-06,
"loss": 1.9567,
"step": 232
},
{
"epoch": 2.39,
"learning_rate": 5.392295478639226e-06,
"loss": 1.6737,
"step": 233
},
{
"epoch": 2.4,
"learning_rate": 5.3596632657835975e-06,
"loss": 1.5955,
"step": 234
},
{
"epoch": 2.41,
"learning_rate": 5.327015646150716e-06,
"loss": 1.874,
"step": 235
},
{
"epoch": 2.42,
"learning_rate": 5.294354018255945e-06,
"loss": 1.9019,
"step": 236
},
{
"epoch": 2.43,
"learning_rate": 5.2616797812147205e-06,
"loss": 1.694,
"step": 237
},
{
"epoch": 2.44,
"learning_rate": 5.228994334682605e-06,
"loss": 1.6992,
"step": 238
},
{
"epoch": 2.45,
"learning_rate": 5.1962990787953436e-06,
"loss": 1.3999,
"step": 239
},
{
"epoch": 2.46,
"learning_rate": 5.1635954141088815e-06,
"loss": 2.0097,
"step": 240
},
{
"epoch": 2.47,
"learning_rate": 5.130884741539367e-06,
"loss": 1.2645,
"step": 241
},
{
"epoch": 2.49,
"learning_rate": 5.098168462303141e-06,
"loss": 1.7132,
"step": 242
},
{
"epoch": 2.5,
"learning_rate": 5.065447977856723e-06,
"loss": 2.0156,
"step": 243
},
{
"epoch": 2.51,
"learning_rate": 5.0327246898367595e-06,
"loss": 1.7722,
"step": 244
},
{
"epoch": 2.52,
"learning_rate": 5e-06,
"loss": 1.7103,
"step": 245
},
{
"epoch": 2.53,
"learning_rate": 4.967275310163241e-06,
"loss": 1.7698,
"step": 246
},
{
"epoch": 2.54,
"learning_rate": 4.934552022143279e-06,
"loss": 1.8627,
"step": 247
},
{
"epoch": 2.55,
"learning_rate": 4.90183153769686e-06,
"loss": 1.4709,
"step": 248
},
{
"epoch": 2.56,
"learning_rate": 4.869115258460636e-06,
"loss": 1.8202,
"step": 249
},
{
"epoch": 2.57,
"learning_rate": 4.83640458589112e-06,
"loss": 2.1629,
"step": 250
},
{
"epoch": 2.58,
"learning_rate": 4.803700921204659e-06,
"loss": 1.8219,
"step": 251
},
{
"epoch": 2.59,
"learning_rate": 4.771005665317398e-06,
"loss": 1.6865,
"step": 252
},
{
"epoch": 2.6,
"learning_rate": 4.738320218785281e-06,
"loss": 1.6643,
"step": 253
},
{
"epoch": 2.61,
"learning_rate": 4.705645981744055e-06,
"loss": 1.9281,
"step": 254
},
{
"epoch": 2.62,
"learning_rate": 4.672984353849285e-06,
"loss": 1.6741,
"step": 255
},
{
"epoch": 2.63,
"learning_rate": 4.640336734216403e-06,
"loss": 1.6219,
"step": 256
},
{
"epoch": 2.64,
"learning_rate": 4.6077045213607765e-06,
"loss": 1.8155,
"step": 257
},
{
"epoch": 2.65,
"learning_rate": 4.575089113137792e-06,
"loss": 2.274,
"step": 258
},
{
"epoch": 2.66,
"learning_rate": 4.542491906682988e-06,
"loss": 1.7379,
"step": 259
},
{
"epoch": 2.67,
"learning_rate": 4.509914298352197e-06,
"loss": 1.9472,
"step": 260
},
{
"epoch": 2.68,
"learning_rate": 4.477357683661734e-06,
"loss": 1.8948,
"step": 261
},
{
"epoch": 2.69,
"learning_rate": 4.4448234572286126e-06,
"loss": 1.7278,
"step": 262
},
{
"epoch": 2.7,
"learning_rate": 4.4123130127108125e-06,
"loss": 1.6116,
"step": 263
},
{
"epoch": 2.71,
"learning_rate": 4.379827742747575e-06,
"loss": 1.3439,
"step": 264
},
{
"epoch": 2.72,
"learning_rate": 4.347369038899744e-06,
"loss": 1.5077,
"step": 265
},
{
"epoch": 2.73,
"learning_rate": 4.314938291590161e-06,
"loss": 2.1546,
"step": 266
},
{
"epoch": 2.74,
"learning_rate": 4.282536890044105e-06,
"loss": 1.6388,
"step": 267
},
{
"epoch": 2.75,
"learning_rate": 4.250166222229775e-06,
"loss": 2.0773,
"step": 268
},
{
"epoch": 2.76,
"learning_rate": 4.217827674798845e-06,
"loss": 1.8691,
"step": 269
},
{
"epoch": 2.77,
"learning_rate": 4.185522633027057e-06,
"loss": 1.8167,
"step": 270
},
{
"epoch": 2.78,
"learning_rate": 4.1532524807548776e-06,
"loss": 1.6949,
"step": 271
},
{
"epoch": 2.79,
"learning_rate": 4.1210186003282275e-06,
"loss": 1.7713,
"step": 272
},
{
"epoch": 2.8,
"learning_rate": 4.088822372539263e-06,
"loss": 1.8129,
"step": 273
},
{
"epoch": 2.81,
"learning_rate": 4.056665176567225e-06,
"loss": 1.8503,
"step": 274
},
{
"epoch": 2.82,
"learning_rate": 4.02454838991936e-06,
"loss": 2.165,
"step": 275
},
{
"epoch": 2.83,
"learning_rate": 3.992473388371914e-06,
"loss": 1.8886,
"step": 276
},
{
"epoch": 2.84,
"learning_rate": 3.960441545911205e-06,
"loss": 1.7817,
"step": 277
},
{
"epoch": 2.85,
"learning_rate": 3.928454234674748e-06,
"loss": 1.9154,
"step": 278
},
{
"epoch": 2.87,
"learning_rate": 3.8965128248924956e-06,
"loss": 1.8407,
"step": 279
},
{
"epoch": 2.88,
"learning_rate": 3.864618684828135e-06,
"loss": 1.7042,
"step": 280
},
{
"epoch": 2.89,
"learning_rate": 3.832773180720475e-06,
"loss": 1.5406,
"step": 281
},
{
"epoch": 2.9,
"learning_rate": 3.800977676724919e-06,
"loss": 2.158,
"step": 282
},
{
"epoch": 2.91,
"learning_rate": 3.769233534855035e-06,
"loss": 1.6803,
"step": 283
},
{
"epoch": 2.92,
"learning_rate": 3.7375421149242102e-06,
"loss": 1.6978,
"step": 284
},
{
"epoch": 2.93,
"learning_rate": 3.705904774487396e-06,
"loss": 1.7548,
"step": 285
},
{
"epoch": 2.94,
"learning_rate": 3.6743228687829596e-06,
"loss": 1.8232,
"step": 286
},
{
"epoch": 2.95,
"learning_rate": 3.6427977506746293e-06,
"loss": 1.791,
"step": 287
},
{
"epoch": 2.96,
"learning_rate": 3.6113307705935398e-06,
"loss": 1.5677,
"step": 288
},
{
"epoch": 2.97,
"learning_rate": 3.579923276480387e-06,
"loss": 1.6384,
"step": 289
},
{
"epoch": 2.98,
"learning_rate": 3.5485766137276894e-06,
"loss": 1.7552,
"step": 290
},
{
"epoch": 2.99,
"learning_rate": 3.517292125122146e-06,
"loss": 1.803,
"step": 291
},
{
"epoch": 3.0,
"learning_rate": 3.486071150787128e-06,
"loss": 1.8583,
"step": 292
},
{
"epoch": 3.01,
"learning_rate": 3.4549150281252635e-06,
"loss": 1.6726,
"step": 293
},
{
"epoch": 3.02,
"learning_rate": 3.4238250917611533e-06,
"loss": 1.8941,
"step": 294
},
{
"epoch": 3.03,
"learning_rate": 3.3928026734841935e-06,
"loss": 1.7255,
"step": 295
},
{
"epoch": 3.04,
"learning_rate": 3.3618491021915334e-06,
"loss": 1.9277,
"step": 296
},
{
"epoch": 3.05,
"learning_rate": 3.330965703831146e-06,
"loss": 1.8718,
"step": 297
},
{
"epoch": 3.06,
"learning_rate": 3.3001538013450285e-06,
"loss": 2.1211,
"step": 298
},
{
"epoch": 3.07,
"learning_rate": 3.269414714612534e-06,
"loss": 1.489,
"step": 299
},
{
"epoch": 3.08,
"learning_rate": 3.2387497603938327e-06,
"loss": 1.5291,
"step": 300
},
{
"epoch": 3.09,
"learning_rate": 3.2081602522734987e-06,
"loss": 1.7413,
"step": 301
},
{
"epoch": 3.1,
"learning_rate": 3.177647500604252e-06,
"loss": 1.8069,
"step": 302
},
{
"epoch": 3.11,
"learning_rate": 3.147212812450819e-06,
"loss": 1.7031,
"step": 303
},
{
"epoch": 3.12,
"learning_rate": 3.1168574915339465e-06,
"loss": 1.8848,
"step": 304
},
{
"epoch": 3.13,
"learning_rate": 3.0865828381745515e-06,
"loss": 1.8549,
"step": 305
},
{
"epoch": 3.14,
"learning_rate": 3.056390149238022e-06,
"loss": 1.877,
"step": 306
},
{
"epoch": 3.15,
"learning_rate": 3.0262807180786647e-06,
"loss": 1.8691,
"step": 307
},
{
"epoch": 3.16,
"learning_rate": 2.9962558344842963e-06,
"loss": 1.8115,
"step": 308
},
{
"epoch": 3.17,
"learning_rate": 2.966316784621e-06,
"loss": 1.7895,
"step": 309
},
{
"epoch": 3.18,
"learning_rate": 2.936464850978027e-06,
"loss": 1.4557,
"step": 310
},
{
"epoch": 3.19,
"learning_rate": 2.906701312312861e-06,
"loss": 1.606,
"step": 311
},
{
"epoch": 3.2,
"learning_rate": 2.8770274435964356e-06,
"loss": 1.734,
"step": 312
},
{
"epoch": 3.21,
"learning_rate": 2.8474445159585235e-06,
"loss": 1.6564,
"step": 313
},
{
"epoch": 3.22,
"learning_rate": 2.817953796633289e-06,
"loss": 1.7049,
"step": 314
},
{
"epoch": 3.23,
"learning_rate": 2.7885565489049948e-06,
"loss": 1.9666,
"step": 315
},
{
"epoch": 3.25,
"learning_rate": 2.759254032053888e-06,
"loss": 1.7699,
"step": 316
},
{
"epoch": 3.26,
"learning_rate": 2.7300475013022666e-06,
"loss": 1.885,
"step": 317
},
{
"epoch": 3.27,
"learning_rate": 2.700938207760701e-06,
"loss": 1.402,
"step": 318
},
{
"epoch": 3.28,
"learning_rate": 2.671927398374443e-06,
"loss": 1.7307,
"step": 319
},
{
"epoch": 3.29,
"learning_rate": 2.6430163158700116e-06,
"loss": 2.0606,
"step": 320
},
{
"epoch": 3.3,
"learning_rate": 2.614206198701958e-06,
"loss": 1.9808,
"step": 321
},
{
"epoch": 3.31,
"learning_rate": 2.5854982809998154e-06,
"loss": 2.2166,
"step": 322
},
{
"epoch": 3.32,
"learning_rate": 2.5568937925152272e-06,
"loss": 1.9819,
"step": 323
},
{
"epoch": 3.33,
"learning_rate": 2.5283939585692787e-06,
"loss": 1.7913,
"step": 324
},
{
"epoch": 3.34,
"learning_rate": 2.5000000000000015e-06,
"loss": 1.6969,
"step": 325
},
{
"epoch": 3.35,
"learning_rate": 2.471713133110078e-06,
"loss": 1.8397,
"step": 326
},
{
"epoch": 3.36,
"learning_rate": 2.4435345696147404e-06,
"loss": 1.986,
"step": 327
},
{
"epoch": 3.37,
"learning_rate": 2.4154655165898626e-06,
"loss": 1.8169,
"step": 328
},
{
"epoch": 3.38,
"learning_rate": 2.387507176420256e-06,
"loss": 1.7352,
"step": 329
},
{
"epoch": 3.39,
"learning_rate": 2.3596607467481602e-06,
"loss": 1.866,
"step": 330
},
{
"epoch": 3.4,
"learning_rate": 2.3319274204219427e-06,
"loss": 1.6605,
"step": 331
},
{
"epoch": 3.41,
"learning_rate": 2.304308385444999e-06,
"loss": 2.0025,
"step": 332
},
{
"epoch": 3.42,
"learning_rate": 2.2768048249248648e-06,
"loss": 1.7726,
"step": 333
},
{
"epoch": 3.43,
"learning_rate": 2.2494179170225333e-06,
"loss": 1.9575,
"step": 334
},
{
"epoch": 3.44,
"learning_rate": 2.2221488349019903e-06,
"loss": 1.9168,
"step": 335
},
{
"epoch": 3.45,
"learning_rate": 2.1949987466799524e-06,
"loss": 1.9093,
"step": 336
},
{
"epoch": 3.46,
"learning_rate": 2.1679688153758373e-06,
"loss": 2.3,
"step": 337
},
{
"epoch": 3.47,
"learning_rate": 2.1410601988619394e-06,
"loss": 2.0302,
"step": 338
},
{
"epoch": 3.48,
"learning_rate": 2.1142740498138327e-06,
"loss": 1.6836,
"step": 339
},
{
"epoch": 3.49,
"learning_rate": 2.08761151566099e-06,
"loss": 1.7969,
"step": 340
},
{
"epoch": 3.5,
"learning_rate": 2.061073738537635e-06,
"loss": 1.7367,
"step": 341
},
{
"epoch": 3.51,
"learning_rate": 2.034661855233815e-06,
"loss": 1.7379,
"step": 342
},
{
"epoch": 3.52,
"learning_rate": 2.008376997146705e-06,
"loss": 1.5373,
"step": 343
},
{
"epoch": 3.53,
"learning_rate": 1.982220290232143e-06,
"loss": 1.6176,
"step": 344
},
{
"epoch": 3.54,
"learning_rate": 1.956192854956397e-06,
"loss": 1.8133,
"step": 345
},
{
"epoch": 3.55,
"learning_rate": 1.9302958062481673e-06,
"loss": 1.9453,
"step": 346
},
{
"epoch": 3.56,
"learning_rate": 1.9045302534508298e-06,
"loss": 1.7963,
"step": 347
},
{
"epoch": 3.57,
"learning_rate": 1.8788973002749112e-06,
"loss": 1.5473,
"step": 348
},
{
"epoch": 3.58,
"learning_rate": 1.8533980447508138e-06,
"loss": 1.6311,
"step": 349
},
{
"epoch": 3.59,
"learning_rate": 1.8280335791817733e-06,
"loss": 1.9016,
"step": 350
},
{
"epoch": 3.6,
"learning_rate": 1.8028049900970768e-06,
"loss": 1.9685,
"step": 351
},
{
"epoch": 3.61,
"learning_rate": 1.777713358205514e-06,
"loss": 1.7867,
"step": 352
},
{
"epoch": 3.63,
"learning_rate": 1.7527597583490825e-06,
"loss": 1.8788,
"step": 353
},
{
"epoch": 3.64,
"learning_rate": 1.7279452594569484e-06,
"loss": 1.8419,
"step": 354
},
{
"epoch": 3.65,
"learning_rate": 1.7032709244996559e-06,
"loss": 1.9689,
"step": 355
},
{
"epoch": 3.66,
"learning_rate": 1.6787378104435931e-06,
"loss": 1.8386,
"step": 356
},
{
"epoch": 3.67,
"learning_rate": 1.6543469682057105e-06,
"loss": 1.6358,
"step": 357
},
{
"epoch": 3.68,
"learning_rate": 1.6300994426085103e-06,
"loss": 1.8798,
"step": 358
},
{
"epoch": 3.69,
"learning_rate": 1.6059962723352912e-06,
"loss": 1.6017,
"step": 359
},
{
"epoch": 3.7,
"learning_rate": 1.5820384898856433e-06,
"loss": 2.1665,
"step": 360
},
{
"epoch": 3.71,
"learning_rate": 1.5582271215312294e-06,
"loss": 1.7402,
"step": 361
},
{
"epoch": 3.72,
"learning_rate": 1.5345631872718214e-06,
"loss": 1.5392,
"step": 362
},
{
"epoch": 3.73,
"learning_rate": 1.5110477007916002e-06,
"loss": 1.3907,
"step": 363
},
{
"epoch": 3.74,
"learning_rate": 1.487681669415742e-06,
"loss": 1.7261,
"step": 364
},
{
"epoch": 3.75,
"learning_rate": 1.4644660940672628e-06,
"loss": 1.6662,
"step": 365
},
{
"epoch": 3.76,
"learning_rate": 1.4414019692241437e-06,
"loss": 1.6629,
"step": 366
},
{
"epoch": 3.77,
"learning_rate": 1.4184902828767288e-06,
"loss": 1.6335,
"step": 367
},
{
"epoch": 3.78,
"learning_rate": 1.395732016485406e-06,
"loss": 1.875,
"step": 368
},
{
"epoch": 3.79,
"learning_rate": 1.373128144938563e-06,
"loss": 1.8328,
"step": 369
},
{
"epoch": 3.8,
"learning_rate": 1.3506796365108232e-06,
"loss": 1.773,
"step": 370
},
{
"epoch": 3.81,
"learning_rate": 1.3283874528215735e-06,
"loss": 1.8612,
"step": 371
},
{
"epoch": 3.82,
"learning_rate": 1.30625254879377e-06,
"loss": 1.8199,
"step": 372
},
{
"epoch": 3.83,
"learning_rate": 1.2842758726130283e-06,
"loss": 1.7815,
"step": 373
},
{
"epoch": 3.84,
"learning_rate": 1.2624583656870153e-06,
"loss": 1.8004,
"step": 374
},
{
"epoch": 3.85,
"learning_rate": 1.2408009626051137e-06,
"loss": 1.7107,
"step": 375
},
{
"epoch": 3.86,
"learning_rate": 1.2193045910983864e-06,
"loss": 1.698,
"step": 376
},
{
"epoch": 3.87,
"learning_rate": 1.1979701719998454e-06,
"loss": 1.8479,
"step": 377
},
{
"epoch": 3.88,
"learning_rate": 1.1767986192049986e-06,
"loss": 1.7407,
"step": 378
},
{
"epoch": 3.89,
"learning_rate": 1.1557908396327028e-06,
"loss": 1.69,
"step": 379
},
{
"epoch": 3.9,
"learning_rate": 1.134947733186315e-06,
"loss": 1.7125,
"step": 380
},
{
"epoch": 3.91,
"learning_rate": 1.1142701927151456e-06,
"loss": 1.8915,
"step": 381
},
{
"epoch": 3.92,
"learning_rate": 1.0937591039762086e-06,
"loss": 1.7312,
"step": 382
},
{
"epoch": 3.93,
"learning_rate": 1.0734153455962765e-06,
"loss": 1.6998,
"step": 383
},
{
"epoch": 3.94,
"learning_rate": 1.0532397890342506e-06,
"loss": 1.8645,
"step": 384
},
{
"epoch": 3.95,
"learning_rate": 1.0332332985438248e-06,
"loss": 1.6769,
"step": 385
},
{
"epoch": 3.96,
"learning_rate": 1.013396731136465e-06,
"loss": 1.7662,
"step": 386
},
{
"epoch": 3.97,
"learning_rate": 9.937309365446973e-07,
"loss": 1.5442,
"step": 387
},
{
"epoch": 3.98,
"learning_rate": 9.742367571857092e-07,
"loss": 1.9731,
"step": 388
},
{
"epoch": 3.99,
"learning_rate": 9.549150281252633e-07,
"loss": 2.015,
"step": 389
},
{
"epoch": 4.01,
"learning_rate": 9.357665770419244e-07,
"loss": 1.8496,
"step": 390
},
{
"epoch": 4.02,
"learning_rate": 9.167922241916055e-07,
"loss": 2.0151,
"step": 391
},
{
"epoch": 4.03,
"learning_rate": 8.979927823724321e-07,
"loss": 1.5833,
"step": 392
},
{
"epoch": 4.04,
"learning_rate": 8.793690568899216e-07,
"loss": 1.6405,
"step": 393
},
{
"epoch": 4.05,
"learning_rate": 8.609218455224893e-07,
"loss": 1.6127,
"step": 394
},
{
"epoch": 4.06,
"learning_rate": 8.426519384872733e-07,
"loss": 1.5767,
"step": 395
},
{
"epoch": 4.07,
"learning_rate": 8.245601184062851e-07,
"loss": 1.799,
"step": 396
},
{
"epoch": 4.08,
"learning_rate": 8.066471602728804e-07,
"loss": 1.7683,
"step": 397
},
{
"epoch": 4.09,
"learning_rate": 7.88913831418568e-07,
"loss": 1.6665,
"step": 398
},
{
"epoch": 4.1,
"learning_rate": 7.71360891480134e-07,
"loss": 1.7135,
"step": 399
},
{
"epoch": 4.11,
"learning_rate": 7.539890923671061e-07,
"loss": 1.5239,
"step": 400
},
{
"epoch": 4.12,
"learning_rate": 7.367991782295392e-07,
"loss": 1.5723,
"step": 401
},
{
"epoch": 4.13,
"learning_rate": 7.197918854261432e-07,
"loss": 1.565,
"step": 402
},
{
"epoch": 4.14,
"learning_rate": 7.029679424927366e-07,
"loss": 1.8298,
"step": 403
},
{
"epoch": 4.15,
"learning_rate": 6.863280701110409e-07,
"loss": 1.7697,
"step": 404
},
{
"epoch": 4.16,
"learning_rate": 6.698729810778065e-07,
"loss": 1.6768,
"step": 405
},
{
"epoch": 4.17,
"learning_rate": 6.536033802742814e-07,
"loss": 1.6173,
"step": 406
},
{
"epoch": 4.18,
"learning_rate": 6.375199646360142e-07,
"loss": 2.0991,
"step": 407
},
{
"epoch": 4.19,
"learning_rate": 6.216234231230012e-07,
"loss": 1.6167,
"step": 408
},
{
"epoch": 4.2,
"learning_rate": 6.059144366901737e-07,
"loss": 1.8663,
"step": 409
},
{
"epoch": 4.21,
"learning_rate": 5.903936782582253e-07,
"loss": 1.653,
"step": 410
},
{
"epoch": 4.22,
"learning_rate": 5.750618126847912e-07,
"loss": 1.6468,
"step": 411
},
{
"epoch": 4.23,
"learning_rate": 5.599194967359639e-07,
"loss": 1.7839,
"step": 412
},
{
"epoch": 4.24,
"learning_rate": 5.449673790581611e-07,
"loss": 1.9525,
"step": 413
},
{
"epoch": 4.25,
"learning_rate": 5.302061001503395e-07,
"loss": 1.694,
"step": 414
},
{
"epoch": 4.26,
"learning_rate": 5.156362923365587e-07,
"loss": 2.1018,
"step": 415
},
{
"epoch": 4.27,
"learning_rate": 5.012585797388936e-07,
"loss": 1.8602,
"step": 416
},
{
"epoch": 4.28,
"learning_rate": 4.87073578250698e-07,
"loss": 1.8102,
"step": 417
},
{
"epoch": 4.29,
"learning_rate": 4.730818955102234e-07,
"loss": 1.528,
"step": 418
},
{
"epoch": 4.3,
"learning_rate": 4.5928413087459325e-07,
"loss": 1.9294,
"step": 419
},
{
"epoch": 4.31,
"learning_rate": 4.456808753941205e-07,
"loss": 1.4773,
"step": 420
},
{
"epoch": 4.32,
"learning_rate": 4.322727117869951e-07,
"loss": 1.5413,
"step": 421
},
{
"epoch": 4.33,
"learning_rate": 4.1906021441432074e-07,
"loss": 1.9885,
"step": 422
},
{
"epoch": 4.34,
"learning_rate": 4.0604394925550906e-07,
"loss": 1.6872,
"step": 423
},
{
"epoch": 4.35,
"learning_rate": 3.9322447388403796e-07,
"loss": 1.692,
"step": 424
},
{
"epoch": 4.36,
"learning_rate": 3.8060233744356634e-07,
"loss": 1.8052,
"step": 425
},
{
"epoch": 4.37,
"learning_rate": 3.6817808062440953e-07,
"loss": 1.8786,
"step": 426
},
{
"epoch": 4.39,
"learning_rate": 3.5595223564037884e-07,
"loss": 1.9182,
"step": 427
},
{
"epoch": 4.4,
"learning_rate": 3.439253262059822e-07,
"loss": 1.6238,
"step": 428
},
{
"epoch": 4.41,
"learning_rate": 3.320978675139919e-07,
"loss": 1.8523,
"step": 429
},
{
"epoch": 4.42,
"learning_rate": 3.204703662133724e-07,
"loss": 1.7301,
"step": 430
},
{
"epoch": 4.43,
"learning_rate": 3.0904332038757977e-07,
"loss": 1.5696,
"step": 431
},
{
"epoch": 4.44,
"learning_rate": 2.9781721953322627e-07,
"loss": 1.8397,
"step": 432
},
{
"epoch": 4.45,
"learning_rate": 2.867925445391079e-07,
"loss": 1.6669,
"step": 433
},
{
"epoch": 4.46,
"learning_rate": 2.7596976766560977e-07,
"loss": 1.7799,
"step": 434
},
{
"epoch": 4.47,
"learning_rate": 2.653493525244721e-07,
"loss": 1.9204,
"step": 435
},
{
"epoch": 4.48,
"learning_rate": 2.5493175405893076e-07,
"loss": 2.3138,
"step": 436
},
{
"epoch": 4.49,
"learning_rate": 2.447174185242324e-07,
"loss": 1.9517,
"step": 437
},
{
"epoch": 4.5,
"learning_rate": 2.3470678346851517e-07,
"loss": 1.9572,
"step": 438
},
{
"epoch": 4.51,
"learning_rate": 2.2490027771406686e-07,
"loss": 1.6796,
"step": 439
},
{
"epoch": 4.52,
"learning_rate": 2.152983213389559e-07,
"loss": 1.9822,
"step": 440
},
{
"epoch": 4.53,
"learning_rate": 2.0590132565903475e-07,
"loss": 1.9031,
"step": 441
},
{
"epoch": 4.54,
"learning_rate": 1.9670969321032407e-07,
"loss": 1.7692,
"step": 442
},
{
"epoch": 4.55,
"learning_rate": 1.8772381773176417e-07,
"loss": 1.7435,
"step": 443
},
{
"epoch": 4.56,
"learning_rate": 1.7894408414835362e-07,
"loss": 1.5606,
"step": 444
},
{
"epoch": 4.57,
"learning_rate": 1.7037086855465902e-07,
"loss": 1.9003,
"step": 445
},
{
"epoch": 4.58,
"learning_rate": 1.6200453819870122e-07,
"loss": 1.603,
"step": 446
},
{
"epoch": 4.59,
"learning_rate": 1.5384545146622854e-07,
"loss": 1.5294,
"step": 447
},
{
"epoch": 4.6,
"learning_rate": 1.4589395786535954e-07,
"loss": 1.7242,
"step": 448
},
{
"epoch": 4.61,
"learning_rate": 1.3815039801161723e-07,
"loss": 1.958,
"step": 449
},
{
"epoch": 4.62,
"learning_rate": 1.3061510361333186e-07,
"loss": 2.2857,
"step": 450
},
{
"epoch": 4.63,
"learning_rate": 1.232883974574367e-07,
"loss": 1.6402,
"step": 451
},
{
"epoch": 4.64,
"learning_rate": 1.1617059339563807e-07,
"loss": 1.9478,
"step": 452
},
{
"epoch": 4.65,
"learning_rate": 1.0926199633097156e-07,
"loss": 1.9499,
"step": 453
},
{
"epoch": 4.66,
"learning_rate": 1.0256290220474308e-07,
"loss": 2.0481,
"step": 454
},
{
"epoch": 4.67,
"learning_rate": 9.607359798384785e-08,
"loss": 2.0789,
"step": 455
},
{
"epoch": 4.68,
"learning_rate": 8.979436164848088e-08,
"loss": 1.7505,
"step": 456
},
{
"epoch": 4.69,
"learning_rate": 8.372546218022747e-08,
"loss": 1.4945,
"step": 457
},
{
"epoch": 4.7,
"learning_rate": 7.786715955054202e-08,
"loss": 1.8899,
"step": 458
},
{
"epoch": 4.71,
"learning_rate": 7.221970470961125e-08,
"loss": 1.7366,
"step": 459
},
{
"epoch": 4.72,
"learning_rate": 6.678333957560513e-08,
"loss": 1.8401,
"step": 460
},
{
"epoch": 4.73,
"learning_rate": 6.15582970243117e-08,
"loss": 1.6992,
"step": 461
},
{
"epoch": 4.74,
"learning_rate": 5.654480087916303e-08,
"loss": 2.0905,
"step": 462
},
{
"epoch": 4.75,
"learning_rate": 5.174306590164879e-08,
"loss": 1.8036,
"step": 463
},
{
"epoch": 4.77,
"learning_rate": 4.715329778211375e-08,
"loss": 1.7985,
"step": 464
},
{
"epoch": 4.78,
"learning_rate": 4.2775693130948094e-08,
"loss": 1.5621,
"step": 465
},
{
"epoch": 4.79,
"learning_rate": 3.861043947016474e-08,
"loss": 1.7846,
"step": 466
},
{
"epoch": 4.8,
"learning_rate": 3.465771522536854e-08,
"loss": 1.5247,
"step": 467
},
{
"epoch": 4.81,
"learning_rate": 3.09176897181096e-08,
"loss": 1.8658,
"step": 468
},
{
"epoch": 4.82,
"learning_rate": 2.7390523158633552e-08,
"loss": 1.6407,
"step": 469
},
{
"epoch": 4.83,
"learning_rate": 2.4076366639015914e-08,
"loss": 1.8986,
"step": 470
},
{
"epoch": 4.84,
"learning_rate": 2.097536212669171e-08,
"loss": 1.9711,
"step": 471
},
{
"epoch": 4.85,
"learning_rate": 1.8087642458373135e-08,
"loss": 2.034,
"step": 472
},
{
"epoch": 4.86,
"learning_rate": 1.541333133436018e-08,
"loss": 1.5749,
"step": 473
},
{
"epoch": 4.87,
"learning_rate": 1.2952543313240474e-08,
"loss": 1.6367,
"step": 474
},
{
"epoch": 4.88,
"learning_rate": 1.0705383806982606e-08,
"loss": 1.8309,
"step": 475
},
{
"epoch": 4.89,
"learning_rate": 8.671949076420883e-09,
"loss": 1.8067,
"step": 476
},
{
"epoch": 4.9,
"learning_rate": 6.852326227130835e-09,
"loss": 2.1646,
"step": 477
},
{
"epoch": 4.91,
"learning_rate": 5.246593205699424e-09,
"loss": 2.0148,
"step": 478
},
{
"epoch": 4.92,
"learning_rate": 3.854818796385495e-09,
"loss": 1.6979,
"step": 479
},
{
"epoch": 4.93,
"learning_rate": 2.6770626181715776e-09,
"loss": 1.7767,
"step": 480
},
{
"epoch": 4.94,
"learning_rate": 1.7133751222137007e-09,
"loss": 1.8102,
"step": 481
},
{
"epoch": 4.95,
"learning_rate": 9.637975896759077e-10,
"loss": 1.6763,
"step": 482
},
{
"epoch": 4.96,
"learning_rate": 4.283621299649987e-10,
"loss": 1.9275,
"step": 483
},
{
"epoch": 4.97,
"learning_rate": 1.0709167935385456e-10,
"loss": 1.9208,
"step": 484
},
{
"epoch": 4.98,
"learning_rate": 0.0,
"loss": 1.9557,
"step": 485
},
{
"epoch": 4.98,
"step": 485,
"total_flos": 4.3609614855438336e+17,
"train_loss": 1.863322993160523,
"train_runtime": 8795.9312,
"train_samples_per_second": 0.443,
"train_steps_per_second": 0.055
}
],
"logging_steps": 1.0,
"max_steps": 485,
"num_input_tokens_seen": 0,
"num_train_epochs": 5,
"save_steps": 1000,
"total_flos": 4.3609614855438336e+17,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}