AppleTree2017's picture
Training in progress, epoch 3
121a25b verified
{
"best_metric": 0.4402177035808563,
"best_model_checkpoint": "NHS-BiomedNLP-BiomedBERT-hypop-512\\checkpoint-794",
"epoch": 3.0,
"eval_steps": 500,
"global_step": 1191,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0,
"grad_norm": 10.960957527160645,
"learning_rate": 2.9987405541561713e-05,
"loss": 0.7342,
"step": 1
},
{
"epoch": 0.01,
"grad_norm": 9.521003723144531,
"learning_rate": 2.997481108312343e-05,
"loss": 0.7329,
"step": 2
},
{
"epoch": 0.01,
"grad_norm": 8.874771118164062,
"learning_rate": 2.9962216624685138e-05,
"loss": 0.7373,
"step": 3
},
{
"epoch": 0.01,
"grad_norm": 7.235786437988281,
"learning_rate": 2.994962216624685e-05,
"loss": 0.6377,
"step": 4
},
{
"epoch": 0.01,
"grad_norm": 13.307806968688965,
"learning_rate": 2.9937027707808566e-05,
"loss": 0.7346,
"step": 5
},
{
"epoch": 0.02,
"grad_norm": 5.770761013031006,
"learning_rate": 2.992443324937028e-05,
"loss": 0.6492,
"step": 6
},
{
"epoch": 0.02,
"grad_norm": 6.9356560707092285,
"learning_rate": 2.9911838790931988e-05,
"loss": 0.6832,
"step": 7
},
{
"epoch": 0.02,
"grad_norm": 13.655845642089844,
"learning_rate": 2.9899244332493704e-05,
"loss": 0.8057,
"step": 8
},
{
"epoch": 0.02,
"grad_norm": 8.541060447692871,
"learning_rate": 2.9886649874055416e-05,
"loss": 0.6656,
"step": 9
},
{
"epoch": 0.03,
"grad_norm": 6.785902500152588,
"learning_rate": 2.987405541561713e-05,
"loss": 0.6787,
"step": 10
},
{
"epoch": 0.03,
"grad_norm": 6.900331974029541,
"learning_rate": 2.9861460957178844e-05,
"loss": 0.6868,
"step": 11
},
{
"epoch": 0.03,
"grad_norm": 9.852057456970215,
"learning_rate": 2.9848866498740553e-05,
"loss": 0.6991,
"step": 12
},
{
"epoch": 0.03,
"grad_norm": 6.37741756439209,
"learning_rate": 2.9836272040302266e-05,
"loss": 0.6302,
"step": 13
},
{
"epoch": 0.04,
"grad_norm": 5.589299201965332,
"learning_rate": 2.982367758186398e-05,
"loss": 0.6055,
"step": 14
},
{
"epoch": 0.04,
"grad_norm": 10.325979232788086,
"learning_rate": 2.9811083123425694e-05,
"loss": 0.7367,
"step": 15
},
{
"epoch": 0.04,
"grad_norm": 10.072173118591309,
"learning_rate": 2.9798488664987406e-05,
"loss": 0.6229,
"step": 16
},
{
"epoch": 0.04,
"grad_norm": 10.84774398803711,
"learning_rate": 2.978589420654912e-05,
"loss": 0.601,
"step": 17
},
{
"epoch": 0.05,
"grad_norm": 12.358999252319336,
"learning_rate": 2.977329974811083e-05,
"loss": 0.6277,
"step": 18
},
{
"epoch": 0.05,
"grad_norm": 13.19227409362793,
"learning_rate": 2.9760705289672544e-05,
"loss": 0.5326,
"step": 19
},
{
"epoch": 0.05,
"grad_norm": 6.927993297576904,
"learning_rate": 2.974811083123426e-05,
"loss": 0.5725,
"step": 20
},
{
"epoch": 0.05,
"grad_norm": 11.326826095581055,
"learning_rate": 2.973551637279597e-05,
"loss": 0.6021,
"step": 21
},
{
"epoch": 0.06,
"grad_norm": 7.477938652038574,
"learning_rate": 2.972292191435768e-05,
"loss": 0.4767,
"step": 22
},
{
"epoch": 0.06,
"grad_norm": 17.05657958984375,
"learning_rate": 2.9710327455919397e-05,
"loss": 0.4546,
"step": 23
},
{
"epoch": 0.06,
"grad_norm": 10.406664848327637,
"learning_rate": 2.969773299748111e-05,
"loss": 0.4608,
"step": 24
},
{
"epoch": 0.06,
"grad_norm": 7.126372337341309,
"learning_rate": 2.968513853904282e-05,
"loss": 0.5253,
"step": 25
},
{
"epoch": 0.07,
"grad_norm": 22.958934783935547,
"learning_rate": 2.9672544080604534e-05,
"loss": 0.7278,
"step": 26
},
{
"epoch": 0.07,
"grad_norm": 15.145363807678223,
"learning_rate": 2.9659949622166247e-05,
"loss": 0.6793,
"step": 27
},
{
"epoch": 0.07,
"grad_norm": 6.81843900680542,
"learning_rate": 2.964735516372796e-05,
"loss": 0.5066,
"step": 28
},
{
"epoch": 0.07,
"grad_norm": 5.444020748138428,
"learning_rate": 2.9634760705289675e-05,
"loss": 0.2889,
"step": 29
},
{
"epoch": 0.08,
"grad_norm": 20.001272201538086,
"learning_rate": 2.9622166246851387e-05,
"loss": 0.4007,
"step": 30
},
{
"epoch": 0.08,
"grad_norm": 9.261313438415527,
"learning_rate": 2.9609571788413096e-05,
"loss": 0.4314,
"step": 31
},
{
"epoch": 0.08,
"grad_norm": 9.045961380004883,
"learning_rate": 2.9596977329974812e-05,
"loss": 0.7514,
"step": 32
},
{
"epoch": 0.08,
"grad_norm": 8.198954582214355,
"learning_rate": 2.9584382871536524e-05,
"loss": 0.498,
"step": 33
},
{
"epoch": 0.09,
"grad_norm": 13.375472068786621,
"learning_rate": 2.9571788413098237e-05,
"loss": 0.3819,
"step": 34
},
{
"epoch": 0.09,
"grad_norm": 9.266693115234375,
"learning_rate": 2.955919395465995e-05,
"loss": 0.4062,
"step": 35
},
{
"epoch": 0.09,
"grad_norm": 10.688496589660645,
"learning_rate": 2.9546599496221662e-05,
"loss": 0.37,
"step": 36
},
{
"epoch": 0.09,
"grad_norm": 4.4655046463012695,
"learning_rate": 2.9534005037783378e-05,
"loss": 0.1862,
"step": 37
},
{
"epoch": 0.1,
"grad_norm": 20.452878952026367,
"learning_rate": 2.952141057934509e-05,
"loss": 0.6387,
"step": 38
},
{
"epoch": 0.1,
"grad_norm": 6.115667819976807,
"learning_rate": 2.9508816120906802e-05,
"loss": 0.404,
"step": 39
},
{
"epoch": 0.1,
"grad_norm": 18.10906982421875,
"learning_rate": 2.9496221662468515e-05,
"loss": 0.5456,
"step": 40
},
{
"epoch": 0.1,
"grad_norm": 11.372830390930176,
"learning_rate": 2.9483627204030227e-05,
"loss": 0.2977,
"step": 41
},
{
"epoch": 0.11,
"grad_norm": 8.976436614990234,
"learning_rate": 2.947103274559194e-05,
"loss": 0.4582,
"step": 42
},
{
"epoch": 0.11,
"grad_norm": 16.73163414001465,
"learning_rate": 2.9458438287153656e-05,
"loss": 0.3448,
"step": 43
},
{
"epoch": 0.11,
"grad_norm": 8.729334831237793,
"learning_rate": 2.9445843828715368e-05,
"loss": 0.1669,
"step": 44
},
{
"epoch": 0.11,
"grad_norm": 4.726550579071045,
"learning_rate": 2.9433249370277077e-05,
"loss": 0.1398,
"step": 45
},
{
"epoch": 0.12,
"grad_norm": 4.698641300201416,
"learning_rate": 2.9420654911838793e-05,
"loss": 0.1377,
"step": 46
},
{
"epoch": 0.12,
"grad_norm": 15.612041473388672,
"learning_rate": 2.9408060453400505e-05,
"loss": 0.5077,
"step": 47
},
{
"epoch": 0.12,
"grad_norm": 18.663637161254883,
"learning_rate": 2.9395465994962218e-05,
"loss": 0.5674,
"step": 48
},
{
"epoch": 0.12,
"grad_norm": 24.12005043029785,
"learning_rate": 2.938287153652393e-05,
"loss": 0.546,
"step": 49
},
{
"epoch": 0.13,
"grad_norm": 16.070539474487305,
"learning_rate": 2.9370277078085643e-05,
"loss": 0.4623,
"step": 50
},
{
"epoch": 0.13,
"grad_norm": 26.91022491455078,
"learning_rate": 2.9357682619647355e-05,
"loss": 0.3909,
"step": 51
},
{
"epoch": 0.13,
"grad_norm": 13.222580909729004,
"learning_rate": 2.934508816120907e-05,
"loss": 0.5905,
"step": 52
},
{
"epoch": 0.13,
"grad_norm": 13.613443374633789,
"learning_rate": 2.9332493702770783e-05,
"loss": 0.3731,
"step": 53
},
{
"epoch": 0.14,
"grad_norm": 20.913328170776367,
"learning_rate": 2.9319899244332492e-05,
"loss": 0.789,
"step": 54
},
{
"epoch": 0.14,
"grad_norm": 10.94701099395752,
"learning_rate": 2.9307304785894208e-05,
"loss": 0.1207,
"step": 55
},
{
"epoch": 0.14,
"grad_norm": 10.41066837310791,
"learning_rate": 2.929471032745592e-05,
"loss": 0.4236,
"step": 56
},
{
"epoch": 0.14,
"grad_norm": 19.012123107910156,
"learning_rate": 2.9282115869017633e-05,
"loss": 0.474,
"step": 57
},
{
"epoch": 0.15,
"grad_norm": 6.834192276000977,
"learning_rate": 2.926952141057935e-05,
"loss": 0.3311,
"step": 58
},
{
"epoch": 0.15,
"grad_norm": 9.958150863647461,
"learning_rate": 2.9256926952141058e-05,
"loss": 0.3635,
"step": 59
},
{
"epoch": 0.15,
"grad_norm": 7.105954647064209,
"learning_rate": 2.924433249370277e-05,
"loss": 0.3664,
"step": 60
},
{
"epoch": 0.15,
"grad_norm": 10.265762329101562,
"learning_rate": 2.9231738035264486e-05,
"loss": 0.4841,
"step": 61
},
{
"epoch": 0.16,
"grad_norm": 7.81852912902832,
"learning_rate": 2.92191435768262e-05,
"loss": 0.2711,
"step": 62
},
{
"epoch": 0.16,
"grad_norm": 8.933136940002441,
"learning_rate": 2.9206549118387908e-05,
"loss": 0.4243,
"step": 63
},
{
"epoch": 0.16,
"grad_norm": 12.95289421081543,
"learning_rate": 2.9193954659949623e-05,
"loss": 0.3405,
"step": 64
},
{
"epoch": 0.16,
"grad_norm": 6.353665828704834,
"learning_rate": 2.9181360201511336e-05,
"loss": 0.3388,
"step": 65
},
{
"epoch": 0.17,
"grad_norm": 6.9620795249938965,
"learning_rate": 2.9168765743073048e-05,
"loss": 0.2494,
"step": 66
},
{
"epoch": 0.17,
"grad_norm": 21.418628692626953,
"learning_rate": 2.9156171284634764e-05,
"loss": 0.7494,
"step": 67
},
{
"epoch": 0.17,
"grad_norm": 11.0220308303833,
"learning_rate": 2.9143576826196473e-05,
"loss": 0.3143,
"step": 68
},
{
"epoch": 0.17,
"grad_norm": 17.424291610717773,
"learning_rate": 2.9130982367758185e-05,
"loss": 0.9001,
"step": 69
},
{
"epoch": 0.18,
"grad_norm": 20.80506134033203,
"learning_rate": 2.91183879093199e-05,
"loss": 0.6554,
"step": 70
},
{
"epoch": 0.18,
"grad_norm": 10.28822135925293,
"learning_rate": 2.9105793450881614e-05,
"loss": 0.6043,
"step": 71
},
{
"epoch": 0.18,
"grad_norm": 15.434713363647461,
"learning_rate": 2.9093198992443326e-05,
"loss": 0.4225,
"step": 72
},
{
"epoch": 0.18,
"grad_norm": 12.180867195129395,
"learning_rate": 2.908060453400504e-05,
"loss": 0.3573,
"step": 73
},
{
"epoch": 0.19,
"grad_norm": 11.896459579467773,
"learning_rate": 2.906801007556675e-05,
"loss": 0.5342,
"step": 74
},
{
"epoch": 0.19,
"grad_norm": 7.4386444091796875,
"learning_rate": 2.9055415617128463e-05,
"loss": 0.4236,
"step": 75
},
{
"epoch": 0.19,
"grad_norm": 11.402010917663574,
"learning_rate": 2.904282115869018e-05,
"loss": 0.3661,
"step": 76
},
{
"epoch": 0.19,
"grad_norm": 9.488443374633789,
"learning_rate": 2.903022670025189e-05,
"loss": 0.7061,
"step": 77
},
{
"epoch": 0.2,
"grad_norm": 15.988265991210938,
"learning_rate": 2.90176322418136e-05,
"loss": 0.6259,
"step": 78
},
{
"epoch": 0.2,
"grad_norm": 11.976941108703613,
"learning_rate": 2.9005037783375317e-05,
"loss": 0.5461,
"step": 79
},
{
"epoch": 0.2,
"grad_norm": 10.190409660339355,
"learning_rate": 2.899244332493703e-05,
"loss": 0.5816,
"step": 80
},
{
"epoch": 0.2,
"grad_norm": 6.492282867431641,
"learning_rate": 2.897984886649874e-05,
"loss": 0.2346,
"step": 81
},
{
"epoch": 0.21,
"grad_norm": 13.058635711669922,
"learning_rate": 2.8967254408060454e-05,
"loss": 0.3657,
"step": 82
},
{
"epoch": 0.21,
"grad_norm": 14.294368743896484,
"learning_rate": 2.8954659949622166e-05,
"loss": 0.5426,
"step": 83
},
{
"epoch": 0.21,
"grad_norm": 11.543447494506836,
"learning_rate": 2.894206549118388e-05,
"loss": 0.5771,
"step": 84
},
{
"epoch": 0.21,
"grad_norm": 7.160464286804199,
"learning_rate": 2.8929471032745595e-05,
"loss": 0.3088,
"step": 85
},
{
"epoch": 0.22,
"grad_norm": 16.53839683532715,
"learning_rate": 2.8916876574307307e-05,
"loss": 0.7966,
"step": 86
},
{
"epoch": 0.22,
"grad_norm": 10.61913013458252,
"learning_rate": 2.8904282115869016e-05,
"loss": 0.507,
"step": 87
},
{
"epoch": 0.22,
"grad_norm": 5.3180084228515625,
"learning_rate": 2.8891687657430732e-05,
"loss": 0.4556,
"step": 88
},
{
"epoch": 0.22,
"grad_norm": 7.9845805168151855,
"learning_rate": 2.8879093198992444e-05,
"loss": 0.462,
"step": 89
},
{
"epoch": 0.23,
"grad_norm": 19.176565170288086,
"learning_rate": 2.8866498740554157e-05,
"loss": 0.6049,
"step": 90
},
{
"epoch": 0.23,
"grad_norm": 6.490673065185547,
"learning_rate": 2.885390428211587e-05,
"loss": 0.4461,
"step": 91
},
{
"epoch": 0.23,
"grad_norm": 10.937054634094238,
"learning_rate": 2.884130982367758e-05,
"loss": 0.4222,
"step": 92
},
{
"epoch": 0.23,
"grad_norm": 14.133673667907715,
"learning_rate": 2.8828715365239294e-05,
"loss": 0.6477,
"step": 93
},
{
"epoch": 0.24,
"grad_norm": 7.424984931945801,
"learning_rate": 2.881612090680101e-05,
"loss": 0.3932,
"step": 94
},
{
"epoch": 0.24,
"grad_norm": 6.378269195556641,
"learning_rate": 2.8803526448362722e-05,
"loss": 0.3421,
"step": 95
},
{
"epoch": 0.24,
"grad_norm": 14.893821716308594,
"learning_rate": 2.879093198992443e-05,
"loss": 0.4213,
"step": 96
},
{
"epoch": 0.24,
"grad_norm": 7.521154880523682,
"learning_rate": 2.8778337531486147e-05,
"loss": 0.3938,
"step": 97
},
{
"epoch": 0.25,
"grad_norm": 7.131565093994141,
"learning_rate": 2.876574307304786e-05,
"loss": 0.4146,
"step": 98
},
{
"epoch": 0.25,
"grad_norm": 17.402559280395508,
"learning_rate": 2.8753148614609572e-05,
"loss": 0.6407,
"step": 99
},
{
"epoch": 0.25,
"grad_norm": 17.919559478759766,
"learning_rate": 2.8740554156171288e-05,
"loss": 0.5773,
"step": 100
},
{
"epoch": 0.25,
"grad_norm": 18.85222625732422,
"learning_rate": 2.8727959697732997e-05,
"loss": 0.8411,
"step": 101
},
{
"epoch": 0.26,
"grad_norm": 17.24286651611328,
"learning_rate": 2.871536523929471e-05,
"loss": 0.6594,
"step": 102
},
{
"epoch": 0.26,
"grad_norm": 8.199654579162598,
"learning_rate": 2.8702770780856425e-05,
"loss": 0.2904,
"step": 103
},
{
"epoch": 0.26,
"grad_norm": 11.349382400512695,
"learning_rate": 2.8690176322418137e-05,
"loss": 0.4303,
"step": 104
},
{
"epoch": 0.26,
"grad_norm": 5.197649002075195,
"learning_rate": 2.8677581863979846e-05,
"loss": 0.3887,
"step": 105
},
{
"epoch": 0.27,
"grad_norm": 6.405046463012695,
"learning_rate": 2.8664987405541562e-05,
"loss": 0.2129,
"step": 106
},
{
"epoch": 0.27,
"grad_norm": 11.765724182128906,
"learning_rate": 2.8652392947103275e-05,
"loss": 0.6096,
"step": 107
},
{
"epoch": 0.27,
"grad_norm": 15.173542022705078,
"learning_rate": 2.8639798488664987e-05,
"loss": 0.8199,
"step": 108
},
{
"epoch": 0.27,
"grad_norm": 15.437769889831543,
"learning_rate": 2.8627204030226703e-05,
"loss": 0.6021,
"step": 109
},
{
"epoch": 0.28,
"grad_norm": 8.98073673248291,
"learning_rate": 2.8614609571788412e-05,
"loss": 0.4355,
"step": 110
},
{
"epoch": 0.28,
"grad_norm": 8.301546096801758,
"learning_rate": 2.8602015113350128e-05,
"loss": 0.6601,
"step": 111
},
{
"epoch": 0.28,
"grad_norm": 14.387860298156738,
"learning_rate": 2.858942065491184e-05,
"loss": 0.482,
"step": 112
},
{
"epoch": 0.28,
"grad_norm": 14.607189178466797,
"learning_rate": 2.8576826196473553e-05,
"loss": 0.5696,
"step": 113
},
{
"epoch": 0.29,
"grad_norm": 8.80168628692627,
"learning_rate": 2.8564231738035265e-05,
"loss": 0.3216,
"step": 114
},
{
"epoch": 0.29,
"grad_norm": 10.599414825439453,
"learning_rate": 2.8551637279596978e-05,
"loss": 0.6489,
"step": 115
},
{
"epoch": 0.29,
"grad_norm": 11.09121322631836,
"learning_rate": 2.853904282115869e-05,
"loss": 0.2516,
"step": 116
},
{
"epoch": 0.29,
"grad_norm": 7.300434112548828,
"learning_rate": 2.8526448362720406e-05,
"loss": 0.4725,
"step": 117
},
{
"epoch": 0.3,
"grad_norm": 16.0921688079834,
"learning_rate": 2.8513853904282118e-05,
"loss": 0.6409,
"step": 118
},
{
"epoch": 0.3,
"grad_norm": 6.426705360412598,
"learning_rate": 2.8501259445843827e-05,
"loss": 0.5262,
"step": 119
},
{
"epoch": 0.3,
"grad_norm": 7.335501670837402,
"learning_rate": 2.8488664987405543e-05,
"loss": 0.4493,
"step": 120
},
{
"epoch": 0.3,
"grad_norm": 7.7583794593811035,
"learning_rate": 2.8476070528967256e-05,
"loss": 0.4348,
"step": 121
},
{
"epoch": 0.31,
"grad_norm": 6.631812572479248,
"learning_rate": 2.8463476070528968e-05,
"loss": 0.2344,
"step": 122
},
{
"epoch": 0.31,
"grad_norm": 19.749597549438477,
"learning_rate": 2.8450881612090684e-05,
"loss": 0.6976,
"step": 123
},
{
"epoch": 0.31,
"grad_norm": 8.363977432250977,
"learning_rate": 2.8438287153652393e-05,
"loss": 0.4216,
"step": 124
},
{
"epoch": 0.31,
"grad_norm": 10.750311851501465,
"learning_rate": 2.8425692695214105e-05,
"loss": 0.5134,
"step": 125
},
{
"epoch": 0.32,
"grad_norm": 8.392335891723633,
"learning_rate": 2.841309823677582e-05,
"loss": 0.485,
"step": 126
},
{
"epoch": 0.32,
"grad_norm": 9.613751411437988,
"learning_rate": 2.8400503778337533e-05,
"loss": 0.5329,
"step": 127
},
{
"epoch": 0.32,
"grad_norm": 5.709622859954834,
"learning_rate": 2.8387909319899243e-05,
"loss": 0.4024,
"step": 128
},
{
"epoch": 0.32,
"grad_norm": 11.972945213317871,
"learning_rate": 2.837531486146096e-05,
"loss": 0.3772,
"step": 129
},
{
"epoch": 0.33,
"grad_norm": 9.365653991699219,
"learning_rate": 2.836272040302267e-05,
"loss": 0.9204,
"step": 130
},
{
"epoch": 0.33,
"grad_norm": 8.527847290039062,
"learning_rate": 2.8350125944584383e-05,
"loss": 0.3677,
"step": 131
},
{
"epoch": 0.33,
"grad_norm": 10.193449020385742,
"learning_rate": 2.83375314861461e-05,
"loss": 0.4139,
"step": 132
},
{
"epoch": 0.34,
"grad_norm": 9.360228538513184,
"learning_rate": 2.8324937027707808e-05,
"loss": 0.1871,
"step": 133
},
{
"epoch": 0.34,
"grad_norm": 11.814716339111328,
"learning_rate": 2.831234256926952e-05,
"loss": 0.547,
"step": 134
},
{
"epoch": 0.34,
"grad_norm": 6.990214824676514,
"learning_rate": 2.8299748110831236e-05,
"loss": 0.229,
"step": 135
},
{
"epoch": 0.34,
"grad_norm": 9.345853805541992,
"learning_rate": 2.828715365239295e-05,
"loss": 0.3516,
"step": 136
},
{
"epoch": 0.35,
"grad_norm": 8.358905792236328,
"learning_rate": 2.827455919395466e-05,
"loss": 0.4395,
"step": 137
},
{
"epoch": 0.35,
"grad_norm": 4.4381327629089355,
"learning_rate": 2.8261964735516374e-05,
"loss": 0.4123,
"step": 138
},
{
"epoch": 0.35,
"grad_norm": 5.183178424835205,
"learning_rate": 2.8249370277078086e-05,
"loss": 0.2701,
"step": 139
},
{
"epoch": 0.35,
"grad_norm": 4.981905460357666,
"learning_rate": 2.82367758186398e-05,
"loss": 0.3501,
"step": 140
},
{
"epoch": 0.36,
"grad_norm": 4.92559814453125,
"learning_rate": 2.8224181360201514e-05,
"loss": 0.3215,
"step": 141
},
{
"epoch": 0.36,
"grad_norm": 10.248849868774414,
"learning_rate": 2.8211586901763223e-05,
"loss": 0.4752,
"step": 142
},
{
"epoch": 0.36,
"grad_norm": 24.566904067993164,
"learning_rate": 2.8198992443324936e-05,
"loss": 0.4138,
"step": 143
},
{
"epoch": 0.36,
"grad_norm": 14.266925811767578,
"learning_rate": 2.818639798488665e-05,
"loss": 0.2534,
"step": 144
},
{
"epoch": 0.37,
"grad_norm": 12.045469284057617,
"learning_rate": 2.8173803526448364e-05,
"loss": 0.3652,
"step": 145
},
{
"epoch": 0.37,
"grad_norm": 10.166648864746094,
"learning_rate": 2.8161209068010076e-05,
"loss": 0.4335,
"step": 146
},
{
"epoch": 0.37,
"grad_norm": 6.525755882263184,
"learning_rate": 2.814861460957179e-05,
"loss": 0.3855,
"step": 147
},
{
"epoch": 0.37,
"grad_norm": 7.986118316650391,
"learning_rate": 2.81360201511335e-05,
"loss": 0.3791,
"step": 148
},
{
"epoch": 0.38,
"grad_norm": 10.436816215515137,
"learning_rate": 2.8123425692695214e-05,
"loss": 0.5131,
"step": 149
},
{
"epoch": 0.38,
"grad_norm": 10.12028694152832,
"learning_rate": 2.811083123425693e-05,
"loss": 0.4801,
"step": 150
},
{
"epoch": 0.38,
"grad_norm": 11.41160774230957,
"learning_rate": 2.8098236775818642e-05,
"loss": 0.4166,
"step": 151
},
{
"epoch": 0.38,
"grad_norm": 11.20549201965332,
"learning_rate": 2.808564231738035e-05,
"loss": 0.5492,
"step": 152
},
{
"epoch": 0.39,
"grad_norm": 8.953025817871094,
"learning_rate": 2.8073047858942067e-05,
"loss": 0.5637,
"step": 153
},
{
"epoch": 0.39,
"grad_norm": 10.427567481994629,
"learning_rate": 2.806045340050378e-05,
"loss": 0.3726,
"step": 154
},
{
"epoch": 0.39,
"grad_norm": 10.698348999023438,
"learning_rate": 2.804785894206549e-05,
"loss": 0.581,
"step": 155
},
{
"epoch": 0.39,
"grad_norm": 11.016364097595215,
"learning_rate": 2.8035264483627204e-05,
"loss": 0.4332,
"step": 156
},
{
"epoch": 0.4,
"grad_norm": 9.223053932189941,
"learning_rate": 2.8022670025188917e-05,
"loss": 0.325,
"step": 157
},
{
"epoch": 0.4,
"grad_norm": 7.178832054138184,
"learning_rate": 2.801007556675063e-05,
"loss": 0.4933,
"step": 158
},
{
"epoch": 0.4,
"grad_norm": 7.114950180053711,
"learning_rate": 2.7997481108312345e-05,
"loss": 0.3691,
"step": 159
},
{
"epoch": 0.4,
"grad_norm": 11.535995483398438,
"learning_rate": 2.7984886649874057e-05,
"loss": 0.3785,
"step": 160
},
{
"epoch": 0.41,
"grad_norm": 11.003711700439453,
"learning_rate": 2.7972292191435766e-05,
"loss": 0.458,
"step": 161
},
{
"epoch": 0.41,
"grad_norm": 7.845949649810791,
"learning_rate": 2.7959697732997482e-05,
"loss": 0.3575,
"step": 162
},
{
"epoch": 0.41,
"grad_norm": 7.582950592041016,
"learning_rate": 2.7947103274559194e-05,
"loss": 0.5969,
"step": 163
},
{
"epoch": 0.41,
"grad_norm": 10.127944946289062,
"learning_rate": 2.7934508816120907e-05,
"loss": 0.4724,
"step": 164
},
{
"epoch": 0.42,
"grad_norm": 7.671854496002197,
"learning_rate": 2.7921914357682623e-05,
"loss": 0.5269,
"step": 165
},
{
"epoch": 0.42,
"grad_norm": 8.781110763549805,
"learning_rate": 2.7909319899244332e-05,
"loss": 0.7148,
"step": 166
},
{
"epoch": 0.42,
"grad_norm": 8.324240684509277,
"learning_rate": 2.7896725440806044e-05,
"loss": 0.428,
"step": 167
},
{
"epoch": 0.42,
"grad_norm": 9.713485717773438,
"learning_rate": 2.788413098236776e-05,
"loss": 0.5671,
"step": 168
},
{
"epoch": 0.43,
"grad_norm": 6.988973617553711,
"learning_rate": 2.7871536523929472e-05,
"loss": 0.506,
"step": 169
},
{
"epoch": 0.43,
"grad_norm": 19.869598388671875,
"learning_rate": 2.785894206549118e-05,
"loss": 0.5968,
"step": 170
},
{
"epoch": 0.43,
"grad_norm": 10.117294311523438,
"learning_rate": 2.7846347607052897e-05,
"loss": 0.413,
"step": 171
},
{
"epoch": 0.43,
"grad_norm": 7.587430477142334,
"learning_rate": 2.783375314861461e-05,
"loss": 0.4436,
"step": 172
},
{
"epoch": 0.44,
"grad_norm": 3.9757165908813477,
"learning_rate": 2.7821158690176322e-05,
"loss": 0.2495,
"step": 173
},
{
"epoch": 0.44,
"grad_norm": 6.600593566894531,
"learning_rate": 2.7808564231738038e-05,
"loss": 0.3974,
"step": 174
},
{
"epoch": 0.44,
"grad_norm": 7.599936485290527,
"learning_rate": 2.7795969773299747e-05,
"loss": 0.4323,
"step": 175
},
{
"epoch": 0.44,
"grad_norm": 8.428034782409668,
"learning_rate": 2.778337531486146e-05,
"loss": 0.48,
"step": 176
},
{
"epoch": 0.45,
"grad_norm": 10.259221076965332,
"learning_rate": 2.7770780856423175e-05,
"loss": 0.4639,
"step": 177
},
{
"epoch": 0.45,
"grad_norm": 8.270182609558105,
"learning_rate": 2.7758186397984888e-05,
"loss": 0.3487,
"step": 178
},
{
"epoch": 0.45,
"grad_norm": 5.60026741027832,
"learning_rate": 2.77455919395466e-05,
"loss": 0.4958,
"step": 179
},
{
"epoch": 0.45,
"grad_norm": 9.737464904785156,
"learning_rate": 2.7732997481108313e-05,
"loss": 0.4293,
"step": 180
},
{
"epoch": 0.46,
"grad_norm": 10.6303129196167,
"learning_rate": 2.7720403022670025e-05,
"loss": 0.4395,
"step": 181
},
{
"epoch": 0.46,
"grad_norm": 8.617947578430176,
"learning_rate": 2.770780856423174e-05,
"loss": 0.5972,
"step": 182
},
{
"epoch": 0.46,
"grad_norm": 9.682026863098145,
"learning_rate": 2.7695214105793453e-05,
"loss": 0.2741,
"step": 183
},
{
"epoch": 0.46,
"grad_norm": 5.532878398895264,
"learning_rate": 2.7682619647355162e-05,
"loss": 0.4699,
"step": 184
},
{
"epoch": 0.47,
"grad_norm": 11.166542053222656,
"learning_rate": 2.7670025188916878e-05,
"loss": 0.5117,
"step": 185
},
{
"epoch": 0.47,
"grad_norm": 6.78548526763916,
"learning_rate": 2.765743073047859e-05,
"loss": 0.3471,
"step": 186
},
{
"epoch": 0.47,
"grad_norm": 12.514228820800781,
"learning_rate": 2.7644836272040303e-05,
"loss": 0.6523,
"step": 187
},
{
"epoch": 0.47,
"grad_norm": 7.483336925506592,
"learning_rate": 2.763224181360202e-05,
"loss": 0.5714,
"step": 188
},
{
"epoch": 0.48,
"grad_norm": 8.872393608093262,
"learning_rate": 2.7619647355163728e-05,
"loss": 0.4711,
"step": 189
},
{
"epoch": 0.48,
"grad_norm": 5.779665946960449,
"learning_rate": 2.760705289672544e-05,
"loss": 0.3747,
"step": 190
},
{
"epoch": 0.48,
"grad_norm": 6.670152187347412,
"learning_rate": 2.7594458438287156e-05,
"loss": 0.3524,
"step": 191
},
{
"epoch": 0.48,
"grad_norm": 8.483373641967773,
"learning_rate": 2.758186397984887e-05,
"loss": 0.2449,
"step": 192
},
{
"epoch": 0.49,
"grad_norm": 7.961008548736572,
"learning_rate": 2.756926952141058e-05,
"loss": 0.5184,
"step": 193
},
{
"epoch": 0.49,
"grad_norm": 8.475528717041016,
"learning_rate": 2.7556675062972293e-05,
"loss": 0.4057,
"step": 194
},
{
"epoch": 0.49,
"grad_norm": 5.382789611816406,
"learning_rate": 2.7544080604534006e-05,
"loss": 0.3483,
"step": 195
},
{
"epoch": 0.49,
"grad_norm": 11.06598949432373,
"learning_rate": 2.7531486146095718e-05,
"loss": 0.4566,
"step": 196
},
{
"epoch": 0.5,
"grad_norm": 7.172100067138672,
"learning_rate": 2.7518891687657434e-05,
"loss": 0.5069,
"step": 197
},
{
"epoch": 0.5,
"grad_norm": 8.69959545135498,
"learning_rate": 2.7506297229219143e-05,
"loss": 0.4024,
"step": 198
},
{
"epoch": 0.5,
"grad_norm": 3.8215749263763428,
"learning_rate": 2.7493702770780855e-05,
"loss": 0.1903,
"step": 199
},
{
"epoch": 0.5,
"grad_norm": 6.7800726890563965,
"learning_rate": 2.748110831234257e-05,
"loss": 0.4091,
"step": 200
},
{
"epoch": 0.51,
"grad_norm": 11.993806838989258,
"learning_rate": 2.7468513853904284e-05,
"loss": 0.4989,
"step": 201
},
{
"epoch": 0.51,
"grad_norm": 14.418086051940918,
"learning_rate": 2.7455919395465996e-05,
"loss": 0.5218,
"step": 202
},
{
"epoch": 0.51,
"grad_norm": 9.064515113830566,
"learning_rate": 2.744332493702771e-05,
"loss": 0.357,
"step": 203
},
{
"epoch": 0.51,
"grad_norm": 4.245691299438477,
"learning_rate": 2.743073047858942e-05,
"loss": 0.3012,
"step": 204
},
{
"epoch": 0.52,
"grad_norm": 10.50638198852539,
"learning_rate": 2.7418136020151133e-05,
"loss": 0.3103,
"step": 205
},
{
"epoch": 0.52,
"grad_norm": 7.120365619659424,
"learning_rate": 2.740554156171285e-05,
"loss": 0.3018,
"step": 206
},
{
"epoch": 0.52,
"grad_norm": 14.648916244506836,
"learning_rate": 2.7392947103274562e-05,
"loss": 0.8458,
"step": 207
},
{
"epoch": 0.52,
"grad_norm": 11.240401268005371,
"learning_rate": 2.738035264483627e-05,
"loss": 0.2182,
"step": 208
},
{
"epoch": 0.53,
"grad_norm": 15.59676456451416,
"learning_rate": 2.7367758186397987e-05,
"loss": 0.2614,
"step": 209
},
{
"epoch": 0.53,
"grad_norm": 2.611619472503662,
"learning_rate": 2.73551637279597e-05,
"loss": 0.0725,
"step": 210
},
{
"epoch": 0.53,
"grad_norm": 15.017423629760742,
"learning_rate": 2.734256926952141e-05,
"loss": 0.6751,
"step": 211
},
{
"epoch": 0.53,
"grad_norm": 8.739672660827637,
"learning_rate": 2.7329974811083124e-05,
"loss": 0.1393,
"step": 212
},
{
"epoch": 0.54,
"grad_norm": 13.18393611907959,
"learning_rate": 2.7317380352644836e-05,
"loss": 0.4262,
"step": 213
},
{
"epoch": 0.54,
"grad_norm": 15.25338363647461,
"learning_rate": 2.730478589420655e-05,
"loss": 0.4582,
"step": 214
},
{
"epoch": 0.54,
"grad_norm": 4.455483913421631,
"learning_rate": 2.7292191435768265e-05,
"loss": 0.2272,
"step": 215
},
{
"epoch": 0.54,
"grad_norm": 5.5019145011901855,
"learning_rate": 2.7279596977329977e-05,
"loss": 0.2219,
"step": 216
},
{
"epoch": 0.55,
"grad_norm": 6.36189603805542,
"learning_rate": 2.7267002518891686e-05,
"loss": 0.3612,
"step": 217
},
{
"epoch": 0.55,
"grad_norm": 11.216201782226562,
"learning_rate": 2.7254408060453402e-05,
"loss": 0.4951,
"step": 218
},
{
"epoch": 0.55,
"grad_norm": 7.516535758972168,
"learning_rate": 2.7241813602015114e-05,
"loss": 0.3473,
"step": 219
},
{
"epoch": 0.55,
"grad_norm": 12.50316047668457,
"learning_rate": 2.7229219143576827e-05,
"loss": 0.5692,
"step": 220
},
{
"epoch": 0.56,
"grad_norm": 7.9396562576293945,
"learning_rate": 2.721662468513854e-05,
"loss": 0.3818,
"step": 221
},
{
"epoch": 0.56,
"grad_norm": 10.905122756958008,
"learning_rate": 2.720403022670025e-05,
"loss": 0.1508,
"step": 222
},
{
"epoch": 0.56,
"grad_norm": 11.742682456970215,
"learning_rate": 2.7191435768261964e-05,
"loss": 0.5067,
"step": 223
},
{
"epoch": 0.56,
"grad_norm": 9.895332336425781,
"learning_rate": 2.717884130982368e-05,
"loss": 0.4384,
"step": 224
},
{
"epoch": 0.57,
"grad_norm": 13.22032642364502,
"learning_rate": 2.7166246851385392e-05,
"loss": 0.2769,
"step": 225
},
{
"epoch": 0.57,
"grad_norm": 8.62972354888916,
"learning_rate": 2.71536523929471e-05,
"loss": 0.141,
"step": 226
},
{
"epoch": 0.57,
"grad_norm": 13.368087768554688,
"learning_rate": 2.7141057934508817e-05,
"loss": 0.485,
"step": 227
},
{
"epoch": 0.57,
"grad_norm": 5.622757911682129,
"learning_rate": 2.712846347607053e-05,
"loss": 0.3319,
"step": 228
},
{
"epoch": 0.58,
"grad_norm": 6.637231349945068,
"learning_rate": 2.7115869017632242e-05,
"loss": 0.5557,
"step": 229
},
{
"epoch": 0.58,
"grad_norm": 9.410402297973633,
"learning_rate": 2.7103274559193958e-05,
"loss": 0.4845,
"step": 230
},
{
"epoch": 0.58,
"grad_norm": 18.66705894470215,
"learning_rate": 2.7090680100755667e-05,
"loss": 0.9765,
"step": 231
},
{
"epoch": 0.58,
"grad_norm": 4.065969467163086,
"learning_rate": 2.707808564231738e-05,
"loss": 0.2138,
"step": 232
},
{
"epoch": 0.59,
"grad_norm": 12.393918991088867,
"learning_rate": 2.7065491183879095e-05,
"loss": 0.6254,
"step": 233
},
{
"epoch": 0.59,
"grad_norm": 13.665956497192383,
"learning_rate": 2.7052896725440807e-05,
"loss": 0.6912,
"step": 234
},
{
"epoch": 0.59,
"grad_norm": 6.5648345947265625,
"learning_rate": 2.7040302267002517e-05,
"loss": 0.4344,
"step": 235
},
{
"epoch": 0.59,
"grad_norm": 4.4842529296875,
"learning_rate": 2.7027707808564232e-05,
"loss": 0.3209,
"step": 236
},
{
"epoch": 0.6,
"grad_norm": 3.8340840339660645,
"learning_rate": 2.7015113350125945e-05,
"loss": 0.4045,
"step": 237
},
{
"epoch": 0.6,
"grad_norm": 10.3326997756958,
"learning_rate": 2.7002518891687657e-05,
"loss": 0.4998,
"step": 238
},
{
"epoch": 0.6,
"grad_norm": 8.519319534301758,
"learning_rate": 2.6989924433249373e-05,
"loss": 0.3126,
"step": 239
},
{
"epoch": 0.6,
"grad_norm": 5.563427448272705,
"learning_rate": 2.6977329974811082e-05,
"loss": 0.2947,
"step": 240
},
{
"epoch": 0.61,
"grad_norm": 4.757438659667969,
"learning_rate": 2.6964735516372794e-05,
"loss": 0.3535,
"step": 241
},
{
"epoch": 0.61,
"grad_norm": 7.1805419921875,
"learning_rate": 2.695214105793451e-05,
"loss": 0.4377,
"step": 242
},
{
"epoch": 0.61,
"grad_norm": 8.782963752746582,
"learning_rate": 2.6939546599496223e-05,
"loss": 0.5135,
"step": 243
},
{
"epoch": 0.61,
"grad_norm": 6.521146774291992,
"learning_rate": 2.6926952141057935e-05,
"loss": 0.4571,
"step": 244
},
{
"epoch": 0.62,
"grad_norm": 9.651432991027832,
"learning_rate": 2.6914357682619648e-05,
"loss": 0.3851,
"step": 245
},
{
"epoch": 0.62,
"grad_norm": 13.755444526672363,
"learning_rate": 2.690176322418136e-05,
"loss": 0.6536,
"step": 246
},
{
"epoch": 0.62,
"grad_norm": 6.192646503448486,
"learning_rate": 2.6889168765743072e-05,
"loss": 0.3358,
"step": 247
},
{
"epoch": 0.62,
"grad_norm": 10.30482292175293,
"learning_rate": 2.6876574307304788e-05,
"loss": 0.4194,
"step": 248
},
{
"epoch": 0.63,
"grad_norm": 6.788898944854736,
"learning_rate": 2.6863979848866497e-05,
"loss": 0.5712,
"step": 249
},
{
"epoch": 0.63,
"grad_norm": 10.21720027923584,
"learning_rate": 2.685138539042821e-05,
"loss": 0.3626,
"step": 250
},
{
"epoch": 0.63,
"grad_norm": 7.160099029541016,
"learning_rate": 2.6838790931989926e-05,
"loss": 0.26,
"step": 251
},
{
"epoch": 0.63,
"grad_norm": 10.605951309204102,
"learning_rate": 2.6826196473551638e-05,
"loss": 0.7586,
"step": 252
},
{
"epoch": 0.64,
"grad_norm": 10.67288589477539,
"learning_rate": 2.681360201511335e-05,
"loss": 0.5889,
"step": 253
},
{
"epoch": 0.64,
"grad_norm": 5.152278423309326,
"learning_rate": 2.6801007556675063e-05,
"loss": 0.2835,
"step": 254
},
{
"epoch": 0.64,
"grad_norm": 6.4969563484191895,
"learning_rate": 2.6788413098236775e-05,
"loss": 0.3987,
"step": 255
},
{
"epoch": 0.64,
"grad_norm": 8.223746299743652,
"learning_rate": 2.677581863979849e-05,
"loss": 0.3398,
"step": 256
},
{
"epoch": 0.65,
"grad_norm": 13.44676399230957,
"learning_rate": 2.6763224181360204e-05,
"loss": 0.5064,
"step": 257
},
{
"epoch": 0.65,
"grad_norm": 8.834419250488281,
"learning_rate": 2.6750629722921916e-05,
"loss": 0.4247,
"step": 258
},
{
"epoch": 0.65,
"grad_norm": 7.546679973602295,
"learning_rate": 2.673803526448363e-05,
"loss": 0.3359,
"step": 259
},
{
"epoch": 0.65,
"grad_norm": 11.630725860595703,
"learning_rate": 2.672544080604534e-05,
"loss": 0.4661,
"step": 260
},
{
"epoch": 0.66,
"grad_norm": 6.135303497314453,
"learning_rate": 2.6712846347607053e-05,
"loss": 0.2913,
"step": 261
},
{
"epoch": 0.66,
"grad_norm": 7.024556636810303,
"learning_rate": 2.670025188916877e-05,
"loss": 0.2892,
"step": 262
},
{
"epoch": 0.66,
"grad_norm": 11.134332656860352,
"learning_rate": 2.6687657430730478e-05,
"loss": 0.2884,
"step": 263
},
{
"epoch": 0.66,
"grad_norm": 19.467782974243164,
"learning_rate": 2.667506297229219e-05,
"loss": 0.5973,
"step": 264
},
{
"epoch": 0.67,
"grad_norm": 6.133577346801758,
"learning_rate": 2.6662468513853906e-05,
"loss": 0.4258,
"step": 265
},
{
"epoch": 0.67,
"grad_norm": 12.55841064453125,
"learning_rate": 2.664987405541562e-05,
"loss": 0.4355,
"step": 266
},
{
"epoch": 0.67,
"grad_norm": 17.070459365844727,
"learning_rate": 2.663727959697733e-05,
"loss": 0.7106,
"step": 267
},
{
"epoch": 0.68,
"grad_norm": 13.686192512512207,
"learning_rate": 2.6624685138539044e-05,
"loss": 0.526,
"step": 268
},
{
"epoch": 0.68,
"grad_norm": 11.354989051818848,
"learning_rate": 2.6612090680100756e-05,
"loss": 0.3703,
"step": 269
},
{
"epoch": 0.68,
"grad_norm": 9.988239288330078,
"learning_rate": 2.659949622166247e-05,
"loss": 0.4727,
"step": 270
},
{
"epoch": 0.68,
"grad_norm": 8.656394958496094,
"learning_rate": 2.6586901763224184e-05,
"loss": 0.1363,
"step": 271
},
{
"epoch": 0.69,
"grad_norm": 7.235680103302002,
"learning_rate": 2.6574307304785897e-05,
"loss": 0.2889,
"step": 272
},
{
"epoch": 0.69,
"grad_norm": 18.26453399658203,
"learning_rate": 2.6561712846347606e-05,
"loss": 0.6898,
"step": 273
},
{
"epoch": 0.69,
"grad_norm": 8.426389694213867,
"learning_rate": 2.654911838790932e-05,
"loss": 0.3522,
"step": 274
},
{
"epoch": 0.69,
"grad_norm": 9.811342239379883,
"learning_rate": 2.6536523929471034e-05,
"loss": 0.5926,
"step": 275
},
{
"epoch": 0.7,
"grad_norm": 6.944952011108398,
"learning_rate": 2.6523929471032746e-05,
"loss": 0.281,
"step": 276
},
{
"epoch": 0.7,
"grad_norm": 7.4408979415893555,
"learning_rate": 2.651133501259446e-05,
"loss": 0.5496,
"step": 277
},
{
"epoch": 0.7,
"grad_norm": 8.388789176940918,
"learning_rate": 2.649874055415617e-05,
"loss": 0.2775,
"step": 278
},
{
"epoch": 0.7,
"grad_norm": 4.758674144744873,
"learning_rate": 2.6486146095717884e-05,
"loss": 0.4558,
"step": 279
},
{
"epoch": 0.71,
"grad_norm": 6.666047096252441,
"learning_rate": 2.64735516372796e-05,
"loss": 0.4307,
"step": 280
},
{
"epoch": 0.71,
"grad_norm": 13.093096733093262,
"learning_rate": 2.6460957178841312e-05,
"loss": 0.5806,
"step": 281
},
{
"epoch": 0.71,
"grad_norm": 12.456526756286621,
"learning_rate": 2.644836272040302e-05,
"loss": 0.5669,
"step": 282
},
{
"epoch": 0.71,
"grad_norm": 6.2549004554748535,
"learning_rate": 2.6435768261964737e-05,
"loss": 0.3338,
"step": 283
},
{
"epoch": 0.72,
"grad_norm": 6.807823657989502,
"learning_rate": 2.642317380352645e-05,
"loss": 0.5899,
"step": 284
},
{
"epoch": 0.72,
"grad_norm": 5.582499027252197,
"learning_rate": 2.641057934508816e-05,
"loss": 0.4312,
"step": 285
},
{
"epoch": 0.72,
"grad_norm": 7.518726348876953,
"learning_rate": 2.6397984886649878e-05,
"loss": 0.3892,
"step": 286
},
{
"epoch": 0.72,
"grad_norm": 5.416556358337402,
"learning_rate": 2.6385390428211587e-05,
"loss": 0.2505,
"step": 287
},
{
"epoch": 0.73,
"grad_norm": 6.679108619689941,
"learning_rate": 2.63727959697733e-05,
"loss": 0.4348,
"step": 288
},
{
"epoch": 0.73,
"grad_norm": 4.240274429321289,
"learning_rate": 2.6360201511335015e-05,
"loss": 0.5039,
"step": 289
},
{
"epoch": 0.73,
"grad_norm": 9.389612197875977,
"learning_rate": 2.6347607052896727e-05,
"loss": 0.5208,
"step": 290
},
{
"epoch": 0.73,
"grad_norm": 7.322227478027344,
"learning_rate": 2.6335012594458436e-05,
"loss": 0.5255,
"step": 291
},
{
"epoch": 0.74,
"grad_norm": 6.759903430938721,
"learning_rate": 2.6322418136020152e-05,
"loss": 0.3596,
"step": 292
},
{
"epoch": 0.74,
"grad_norm": 4.8109822273254395,
"learning_rate": 2.6309823677581865e-05,
"loss": 0.2835,
"step": 293
},
{
"epoch": 0.74,
"grad_norm": 4.74676513671875,
"learning_rate": 2.6297229219143577e-05,
"loss": 0.321,
"step": 294
},
{
"epoch": 0.74,
"grad_norm": 8.768762588500977,
"learning_rate": 2.6284634760705293e-05,
"loss": 0.3736,
"step": 295
},
{
"epoch": 0.75,
"grad_norm": 6.698115825653076,
"learning_rate": 2.6272040302267002e-05,
"loss": 0.2979,
"step": 296
},
{
"epoch": 0.75,
"grad_norm": 9.396990776062012,
"learning_rate": 2.6259445843828714e-05,
"loss": 0.6949,
"step": 297
},
{
"epoch": 0.75,
"grad_norm": 6.292881011962891,
"learning_rate": 2.624685138539043e-05,
"loss": 0.5847,
"step": 298
},
{
"epoch": 0.75,
"grad_norm": 5.798967361450195,
"learning_rate": 2.6234256926952142e-05,
"loss": 0.413,
"step": 299
},
{
"epoch": 0.76,
"grad_norm": 8.518070220947266,
"learning_rate": 2.6221662468513855e-05,
"loss": 0.4463,
"step": 300
},
{
"epoch": 0.76,
"grad_norm": 7.179047584533691,
"learning_rate": 2.6209068010075567e-05,
"loss": 0.4101,
"step": 301
},
{
"epoch": 0.76,
"grad_norm": 8.893898963928223,
"learning_rate": 2.619647355163728e-05,
"loss": 0.3712,
"step": 302
},
{
"epoch": 0.76,
"grad_norm": 7.180310249328613,
"learning_rate": 2.6183879093198992e-05,
"loss": 0.4253,
"step": 303
},
{
"epoch": 0.77,
"grad_norm": 11.03390884399414,
"learning_rate": 2.6171284634760708e-05,
"loss": 0.3372,
"step": 304
},
{
"epoch": 0.77,
"grad_norm": 6.37282133102417,
"learning_rate": 2.6158690176322417e-05,
"loss": 0.2729,
"step": 305
},
{
"epoch": 0.77,
"grad_norm": 8.934061050415039,
"learning_rate": 2.614609571788413e-05,
"loss": 0.2433,
"step": 306
},
{
"epoch": 0.77,
"grad_norm": 7.522178649902344,
"learning_rate": 2.6133501259445845e-05,
"loss": 0.4284,
"step": 307
},
{
"epoch": 0.78,
"grad_norm": 8.500354766845703,
"learning_rate": 2.6120906801007558e-05,
"loss": 0.3639,
"step": 308
},
{
"epoch": 0.78,
"grad_norm": 11.417220115661621,
"learning_rate": 2.610831234256927e-05,
"loss": 0.7663,
"step": 309
},
{
"epoch": 0.78,
"grad_norm": 6.737428665161133,
"learning_rate": 2.6095717884130983e-05,
"loss": 0.2624,
"step": 310
},
{
"epoch": 0.78,
"grad_norm": 6.082789897918701,
"learning_rate": 2.6083123425692695e-05,
"loss": 0.4592,
"step": 311
},
{
"epoch": 0.79,
"grad_norm": 13.96414566040039,
"learning_rate": 2.6070528967254407e-05,
"loss": 0.7136,
"step": 312
},
{
"epoch": 0.79,
"grad_norm": 14.575349807739258,
"learning_rate": 2.6057934508816123e-05,
"loss": 0.5151,
"step": 313
},
{
"epoch": 0.79,
"grad_norm": 9.101264953613281,
"learning_rate": 2.6045340050377836e-05,
"loss": 0.3629,
"step": 314
},
{
"epoch": 0.79,
"grad_norm": 14.307868957519531,
"learning_rate": 2.6032745591939545e-05,
"loss": 0.2515,
"step": 315
},
{
"epoch": 0.8,
"grad_norm": 4.331357955932617,
"learning_rate": 2.602015113350126e-05,
"loss": 0.1049,
"step": 316
},
{
"epoch": 0.8,
"grad_norm": 10.65372371673584,
"learning_rate": 2.6007556675062973e-05,
"loss": 0.7491,
"step": 317
},
{
"epoch": 0.8,
"grad_norm": 7.100830554962158,
"learning_rate": 2.5994962216624685e-05,
"loss": 0.2969,
"step": 318
},
{
"epoch": 0.8,
"grad_norm": 8.876154899597168,
"learning_rate": 2.5982367758186398e-05,
"loss": 0.2077,
"step": 319
},
{
"epoch": 0.81,
"grad_norm": 9.560657501220703,
"learning_rate": 2.596977329974811e-05,
"loss": 0.6008,
"step": 320
},
{
"epoch": 0.81,
"grad_norm": 6.376749515533447,
"learning_rate": 2.5957178841309823e-05,
"loss": 0.2282,
"step": 321
},
{
"epoch": 0.81,
"grad_norm": 7.339711666107178,
"learning_rate": 2.594458438287154e-05,
"loss": 0.3252,
"step": 322
},
{
"epoch": 0.81,
"grad_norm": 4.212242126464844,
"learning_rate": 2.593198992443325e-05,
"loss": 0.1242,
"step": 323
},
{
"epoch": 0.82,
"grad_norm": 12.86931037902832,
"learning_rate": 2.5919395465994963e-05,
"loss": 0.5819,
"step": 324
},
{
"epoch": 0.82,
"grad_norm": 9.049539566040039,
"learning_rate": 2.5906801007556676e-05,
"loss": 0.5282,
"step": 325
},
{
"epoch": 0.82,
"grad_norm": 7.5245490074157715,
"learning_rate": 2.5894206549118388e-05,
"loss": 0.2818,
"step": 326
},
{
"epoch": 0.82,
"grad_norm": 5.835328578948975,
"learning_rate": 2.5881612090680104e-05,
"loss": 0.2127,
"step": 327
},
{
"epoch": 0.83,
"grad_norm": 6.4401469230651855,
"learning_rate": 2.5869017632241813e-05,
"loss": 0.327,
"step": 328
},
{
"epoch": 0.83,
"grad_norm": 8.760528564453125,
"learning_rate": 2.5856423173803526e-05,
"loss": 0.533,
"step": 329
},
{
"epoch": 0.83,
"grad_norm": 8.962298393249512,
"learning_rate": 2.584382871536524e-05,
"loss": 0.6162,
"step": 330
},
{
"epoch": 0.83,
"grad_norm": 4.031172275543213,
"learning_rate": 2.5831234256926954e-05,
"loss": 0.2266,
"step": 331
},
{
"epoch": 0.84,
"grad_norm": 12.244653701782227,
"learning_rate": 2.5818639798488666e-05,
"loss": 0.5697,
"step": 332
},
{
"epoch": 0.84,
"grad_norm": 4.518488883972168,
"learning_rate": 2.580604534005038e-05,
"loss": 0.1391,
"step": 333
},
{
"epoch": 0.84,
"grad_norm": 4.96370792388916,
"learning_rate": 2.579345088161209e-05,
"loss": 0.2026,
"step": 334
},
{
"epoch": 0.84,
"grad_norm": 8.518425941467285,
"learning_rate": 2.5780856423173803e-05,
"loss": 0.392,
"step": 335
},
{
"epoch": 0.85,
"grad_norm": 8.354524612426758,
"learning_rate": 2.576826196473552e-05,
"loss": 0.3324,
"step": 336
},
{
"epoch": 0.85,
"grad_norm": 5.117928981781006,
"learning_rate": 2.5755667506297232e-05,
"loss": 0.3849,
"step": 337
},
{
"epoch": 0.85,
"grad_norm": 5.336648464202881,
"learning_rate": 2.574307304785894e-05,
"loss": 0.241,
"step": 338
},
{
"epoch": 0.85,
"grad_norm": 3.6382648944854736,
"learning_rate": 2.5730478589420657e-05,
"loss": 0.3806,
"step": 339
},
{
"epoch": 0.86,
"grad_norm": 9.698990821838379,
"learning_rate": 2.571788413098237e-05,
"loss": 0.4922,
"step": 340
},
{
"epoch": 0.86,
"grad_norm": 7.453464984893799,
"learning_rate": 2.570528967254408e-05,
"loss": 0.323,
"step": 341
},
{
"epoch": 0.86,
"grad_norm": 5.546812534332275,
"learning_rate": 2.5692695214105794e-05,
"loss": 0.17,
"step": 342
},
{
"epoch": 0.86,
"grad_norm": 4.6894941329956055,
"learning_rate": 2.5680100755667506e-05,
"loss": 0.3012,
"step": 343
},
{
"epoch": 0.87,
"grad_norm": 12.336396217346191,
"learning_rate": 2.566750629722922e-05,
"loss": 0.7455,
"step": 344
},
{
"epoch": 0.87,
"grad_norm": 10.94264030456543,
"learning_rate": 2.5654911838790935e-05,
"loss": 0.2863,
"step": 345
},
{
"epoch": 0.87,
"grad_norm": 12.642402648925781,
"learning_rate": 2.5642317380352647e-05,
"loss": 0.2087,
"step": 346
},
{
"epoch": 0.87,
"grad_norm": 1.6862198114395142,
"learning_rate": 2.5629722921914356e-05,
"loss": 0.0703,
"step": 347
},
{
"epoch": 0.88,
"grad_norm": 6.8891191482543945,
"learning_rate": 2.5617128463476072e-05,
"loss": 0.3455,
"step": 348
},
{
"epoch": 0.88,
"grad_norm": 7.763698577880859,
"learning_rate": 2.5604534005037784e-05,
"loss": 0.3147,
"step": 349
},
{
"epoch": 0.88,
"grad_norm": 12.52840518951416,
"learning_rate": 2.5591939546599497e-05,
"loss": 0.6684,
"step": 350
},
{
"epoch": 0.88,
"grad_norm": 11.723652839660645,
"learning_rate": 2.5579345088161213e-05,
"loss": 0.3871,
"step": 351
},
{
"epoch": 0.89,
"grad_norm": 9.9241361618042,
"learning_rate": 2.556675062972292e-05,
"loss": 0.3243,
"step": 352
},
{
"epoch": 0.89,
"grad_norm": 8.628238677978516,
"learning_rate": 2.5554156171284634e-05,
"loss": 0.82,
"step": 353
},
{
"epoch": 0.89,
"grad_norm": 4.368703365325928,
"learning_rate": 2.554156171284635e-05,
"loss": 0.3554,
"step": 354
},
{
"epoch": 0.89,
"grad_norm": 4.239711761474609,
"learning_rate": 2.5528967254408062e-05,
"loss": 0.1529,
"step": 355
},
{
"epoch": 0.9,
"grad_norm": 9.269442558288574,
"learning_rate": 2.551637279596977e-05,
"loss": 0.4518,
"step": 356
},
{
"epoch": 0.9,
"grad_norm": 5.514954566955566,
"learning_rate": 2.5503778337531487e-05,
"loss": 0.1421,
"step": 357
},
{
"epoch": 0.9,
"grad_norm": 7.544525146484375,
"learning_rate": 2.54911838790932e-05,
"loss": 0.6922,
"step": 358
},
{
"epoch": 0.9,
"grad_norm": 4.167691707611084,
"learning_rate": 2.5478589420654912e-05,
"loss": 0.0966,
"step": 359
},
{
"epoch": 0.91,
"grad_norm": 7.282542705535889,
"learning_rate": 2.5465994962216628e-05,
"loss": 0.494,
"step": 360
},
{
"epoch": 0.91,
"grad_norm": 12.464173316955566,
"learning_rate": 2.5453400503778337e-05,
"loss": 0.4292,
"step": 361
},
{
"epoch": 0.91,
"grad_norm": 10.870161056518555,
"learning_rate": 2.544080604534005e-05,
"loss": 0.625,
"step": 362
},
{
"epoch": 0.91,
"grad_norm": 8.240769386291504,
"learning_rate": 2.5428211586901765e-05,
"loss": 0.7351,
"step": 363
},
{
"epoch": 0.92,
"grad_norm": 9.801168441772461,
"learning_rate": 2.5415617128463477e-05,
"loss": 0.2285,
"step": 364
},
{
"epoch": 0.92,
"grad_norm": 11.260651588439941,
"learning_rate": 2.540302267002519e-05,
"loss": 0.4224,
"step": 365
},
{
"epoch": 0.92,
"grad_norm": 14.770816802978516,
"learning_rate": 2.5390428211586902e-05,
"loss": 0.6083,
"step": 366
},
{
"epoch": 0.92,
"grad_norm": 12.272558212280273,
"learning_rate": 2.5377833753148615e-05,
"loss": 0.4445,
"step": 367
},
{
"epoch": 0.93,
"grad_norm": 6.899696350097656,
"learning_rate": 2.5365239294710327e-05,
"loss": 0.3629,
"step": 368
},
{
"epoch": 0.93,
"grad_norm": 8.54549789428711,
"learning_rate": 2.5352644836272043e-05,
"loss": 0.4865,
"step": 369
},
{
"epoch": 0.93,
"grad_norm": 8.818639755249023,
"learning_rate": 2.5340050377833752e-05,
"loss": 0.4459,
"step": 370
},
{
"epoch": 0.93,
"grad_norm": 10.441351890563965,
"learning_rate": 2.5327455919395464e-05,
"loss": 0.3823,
"step": 371
},
{
"epoch": 0.94,
"grad_norm": 10.242267608642578,
"learning_rate": 2.531486146095718e-05,
"loss": 0.4975,
"step": 372
},
{
"epoch": 0.94,
"grad_norm": 9.25827407836914,
"learning_rate": 2.5302267002518893e-05,
"loss": 0.3961,
"step": 373
},
{
"epoch": 0.94,
"grad_norm": 5.644846439361572,
"learning_rate": 2.5289672544080605e-05,
"loss": 0.1543,
"step": 374
},
{
"epoch": 0.94,
"grad_norm": 8.441045761108398,
"learning_rate": 2.5277078085642318e-05,
"loss": 0.3673,
"step": 375
},
{
"epoch": 0.95,
"grad_norm": 9.566481590270996,
"learning_rate": 2.526448362720403e-05,
"loss": 0.3034,
"step": 376
},
{
"epoch": 0.95,
"grad_norm": 11.111581802368164,
"learning_rate": 2.5251889168765742e-05,
"loss": 0.4208,
"step": 377
},
{
"epoch": 0.95,
"grad_norm": 10.74457836151123,
"learning_rate": 2.5239294710327458e-05,
"loss": 0.5335,
"step": 378
},
{
"epoch": 0.95,
"grad_norm": 7.5216450691223145,
"learning_rate": 2.522670025188917e-05,
"loss": 0.4418,
"step": 379
},
{
"epoch": 0.96,
"grad_norm": 10.419405937194824,
"learning_rate": 2.521410579345088e-05,
"loss": 0.4507,
"step": 380
},
{
"epoch": 0.96,
"grad_norm": 6.768444061279297,
"learning_rate": 2.5201511335012596e-05,
"loss": 0.6152,
"step": 381
},
{
"epoch": 0.96,
"grad_norm": 9.090213775634766,
"learning_rate": 2.5188916876574308e-05,
"loss": 0.6423,
"step": 382
},
{
"epoch": 0.96,
"grad_norm": 7.854204177856445,
"learning_rate": 2.517632241813602e-05,
"loss": 0.3511,
"step": 383
},
{
"epoch": 0.97,
"grad_norm": 8.918037414550781,
"learning_rate": 2.5163727959697733e-05,
"loss": 0.4304,
"step": 384
},
{
"epoch": 0.97,
"grad_norm": 7.300523281097412,
"learning_rate": 2.5151133501259445e-05,
"loss": 0.2343,
"step": 385
},
{
"epoch": 0.97,
"grad_norm": 5.29052734375,
"learning_rate": 2.5138539042821158e-05,
"loss": 0.2357,
"step": 386
},
{
"epoch": 0.97,
"grad_norm": 13.124969482421875,
"learning_rate": 2.5125944584382874e-05,
"loss": 0.4188,
"step": 387
},
{
"epoch": 0.98,
"grad_norm": 13.79240894317627,
"learning_rate": 2.5113350125944586e-05,
"loss": 0.4014,
"step": 388
},
{
"epoch": 0.98,
"grad_norm": 5.569485664367676,
"learning_rate": 2.5100755667506295e-05,
"loss": 0.2489,
"step": 389
},
{
"epoch": 0.98,
"grad_norm": 9.553081512451172,
"learning_rate": 2.508816120906801e-05,
"loss": 0.3377,
"step": 390
},
{
"epoch": 0.98,
"grad_norm": 13.31069564819336,
"learning_rate": 2.5075566750629723e-05,
"loss": 0.3421,
"step": 391
},
{
"epoch": 0.99,
"grad_norm": 8.653861999511719,
"learning_rate": 2.5062972292191436e-05,
"loss": 0.3805,
"step": 392
},
{
"epoch": 0.99,
"grad_norm": 8.786714553833008,
"learning_rate": 2.505037783375315e-05,
"loss": 0.2799,
"step": 393
},
{
"epoch": 0.99,
"grad_norm": 6.493752479553223,
"learning_rate": 2.503778337531486e-05,
"loss": 0.2796,
"step": 394
},
{
"epoch": 0.99,
"grad_norm": 6.619555473327637,
"learning_rate": 2.5025188916876573e-05,
"loss": 0.2407,
"step": 395
},
{
"epoch": 1.0,
"grad_norm": 6.245583534240723,
"learning_rate": 2.501259445843829e-05,
"loss": 0.2345,
"step": 396
},
{
"epoch": 1.0,
"grad_norm": 97.71422576904297,
"learning_rate": 2.5e-05,
"loss": 1.7921,
"step": 397
},
{
"epoch": 1.0,
"eval_accuracy": 0.8195583596214511,
"eval_f1": 0.818793132119398,
"eval_loss": 0.44511958956718445,
"eval_precision": 0.8183509587189882,
"eval_recall": 0.8213043799940811,
"eval_runtime": 4442.9836,
"eval_samples_per_second": 0.357,
"eval_steps_per_second": 0.023,
"step": 397
},
{
"epoch": 1.0,
"grad_norm": 7.2050909996032715,
"learning_rate": 2.4987405541561714e-05,
"loss": 0.2411,
"step": 398
},
{
"epoch": 1.01,
"grad_norm": 5.562217712402344,
"learning_rate": 2.4974811083123426e-05,
"loss": 0.2874,
"step": 399
},
{
"epoch": 1.01,
"grad_norm": 10.240361213684082,
"learning_rate": 2.496221662468514e-05,
"loss": 0.2287,
"step": 400
},
{
"epoch": 1.01,
"grad_norm": 11.001642227172852,
"learning_rate": 2.4949622166246854e-05,
"loss": 0.7281,
"step": 401
},
{
"epoch": 1.01,
"grad_norm": 14.056950569152832,
"learning_rate": 2.4937027707808567e-05,
"loss": 0.3952,
"step": 402
},
{
"epoch": 1.02,
"grad_norm": 7.004691123962402,
"learning_rate": 2.4924433249370276e-05,
"loss": 0.3898,
"step": 403
},
{
"epoch": 1.02,
"grad_norm": 5.306135654449463,
"learning_rate": 2.491183879093199e-05,
"loss": 0.3549,
"step": 404
},
{
"epoch": 1.02,
"grad_norm": 7.861663818359375,
"learning_rate": 2.4899244332493704e-05,
"loss": 0.2773,
"step": 405
},
{
"epoch": 1.02,
"grad_norm": 3.698382616043091,
"learning_rate": 2.4886649874055416e-05,
"loss": 0.2846,
"step": 406
},
{
"epoch": 1.03,
"grad_norm": 14.958970069885254,
"learning_rate": 2.4874055415617132e-05,
"loss": 0.2251,
"step": 407
},
{
"epoch": 1.03,
"grad_norm": 8.70470905303955,
"learning_rate": 2.486146095717884e-05,
"loss": 0.3024,
"step": 408
},
{
"epoch": 1.03,
"grad_norm": 7.765173435211182,
"learning_rate": 2.4848866498740554e-05,
"loss": 0.346,
"step": 409
},
{
"epoch": 1.03,
"grad_norm": 19.742538452148438,
"learning_rate": 2.483627204030227e-05,
"loss": 0.459,
"step": 410
},
{
"epoch": 1.04,
"grad_norm": 8.575105667114258,
"learning_rate": 2.4823677581863982e-05,
"loss": 0.4486,
"step": 411
},
{
"epoch": 1.04,
"grad_norm": 21.82974624633789,
"learning_rate": 2.481108312342569e-05,
"loss": 0.4693,
"step": 412
},
{
"epoch": 1.04,
"grad_norm": 6.933416366577148,
"learning_rate": 2.4798488664987407e-05,
"loss": 0.2311,
"step": 413
},
{
"epoch": 1.04,
"grad_norm": 4.775768756866455,
"learning_rate": 2.478589420654912e-05,
"loss": 0.2769,
"step": 414
},
{
"epoch": 1.05,
"grad_norm": 1.4635595083236694,
"learning_rate": 2.4773299748110832e-05,
"loss": 0.0557,
"step": 415
},
{
"epoch": 1.05,
"grad_norm": 17.7808895111084,
"learning_rate": 2.4760705289672548e-05,
"loss": 0.6811,
"step": 416
},
{
"epoch": 1.05,
"grad_norm": 16.204805374145508,
"learning_rate": 2.4748110831234257e-05,
"loss": 0.4921,
"step": 417
},
{
"epoch": 1.05,
"grad_norm": 3.415022611618042,
"learning_rate": 2.473551637279597e-05,
"loss": 0.2516,
"step": 418
},
{
"epoch": 1.06,
"grad_norm": 12.413846969604492,
"learning_rate": 2.4722921914357685e-05,
"loss": 0.2815,
"step": 419
},
{
"epoch": 1.06,
"grad_norm": 7.598104476928711,
"learning_rate": 2.4710327455919397e-05,
"loss": 0.1306,
"step": 420
},
{
"epoch": 1.06,
"grad_norm": 5.7529120445251465,
"learning_rate": 2.469773299748111e-05,
"loss": 0.1619,
"step": 421
},
{
"epoch": 1.06,
"grad_norm": 4.8923516273498535,
"learning_rate": 2.4685138539042822e-05,
"loss": 0.1385,
"step": 422
},
{
"epoch": 1.07,
"grad_norm": 3.9952762126922607,
"learning_rate": 2.4672544080604535e-05,
"loss": 0.0965,
"step": 423
},
{
"epoch": 1.07,
"grad_norm": 14.615901947021484,
"learning_rate": 2.4659949622166247e-05,
"loss": 0.3344,
"step": 424
},
{
"epoch": 1.07,
"grad_norm": 11.933664321899414,
"learning_rate": 2.4647355163727963e-05,
"loss": 0.4366,
"step": 425
},
{
"epoch": 1.07,
"grad_norm": 6.072282314300537,
"learning_rate": 2.4634760705289672e-05,
"loss": 0.1211,
"step": 426
},
{
"epoch": 1.08,
"grad_norm": 12.289721488952637,
"learning_rate": 2.4622166246851384e-05,
"loss": 0.4488,
"step": 427
},
{
"epoch": 1.08,
"grad_norm": 24.086645126342773,
"learning_rate": 2.46095717884131e-05,
"loss": 0.977,
"step": 428
},
{
"epoch": 1.08,
"grad_norm": 13.474416732788086,
"learning_rate": 2.4596977329974812e-05,
"loss": 0.6102,
"step": 429
},
{
"epoch": 1.08,
"grad_norm": 5.976496696472168,
"learning_rate": 2.4584382871536525e-05,
"loss": 0.2712,
"step": 430
},
{
"epoch": 1.09,
"grad_norm": 21.28632926940918,
"learning_rate": 2.4571788413098237e-05,
"loss": 0.4736,
"step": 431
},
{
"epoch": 1.09,
"grad_norm": 10.714360237121582,
"learning_rate": 2.455919395465995e-05,
"loss": 0.7356,
"step": 432
},
{
"epoch": 1.09,
"grad_norm": 15.236488342285156,
"learning_rate": 2.4546599496221662e-05,
"loss": 0.7644,
"step": 433
},
{
"epoch": 1.09,
"grad_norm": 11.787192344665527,
"learning_rate": 2.4534005037783378e-05,
"loss": 0.7589,
"step": 434
},
{
"epoch": 1.1,
"grad_norm": 12.629518508911133,
"learning_rate": 2.4521410579345087e-05,
"loss": 0.5611,
"step": 435
},
{
"epoch": 1.1,
"grad_norm": 10.646512985229492,
"learning_rate": 2.45088161209068e-05,
"loss": 0.5095,
"step": 436
},
{
"epoch": 1.1,
"grad_norm": 10.494978904724121,
"learning_rate": 2.4496221662468515e-05,
"loss": 0.2447,
"step": 437
},
{
"epoch": 1.1,
"grad_norm": 4.493246078491211,
"learning_rate": 2.4483627204030228e-05,
"loss": 0.0987,
"step": 438
},
{
"epoch": 1.11,
"grad_norm": 15.06545639038086,
"learning_rate": 2.447103274559194e-05,
"loss": 0.4021,
"step": 439
},
{
"epoch": 1.11,
"grad_norm": 1.9468973875045776,
"learning_rate": 2.4458438287153653e-05,
"loss": 0.0678,
"step": 440
},
{
"epoch": 1.11,
"grad_norm": 6.745187759399414,
"learning_rate": 2.4445843828715365e-05,
"loss": 0.2559,
"step": 441
},
{
"epoch": 1.11,
"grad_norm": 9.412677764892578,
"learning_rate": 2.4433249370277077e-05,
"loss": 0.5198,
"step": 442
},
{
"epoch": 1.12,
"grad_norm": 7.285678863525391,
"learning_rate": 2.4420654911838793e-05,
"loss": 0.5288,
"step": 443
},
{
"epoch": 1.12,
"grad_norm": 11.449817657470703,
"learning_rate": 2.4408060453400506e-05,
"loss": 0.5802,
"step": 444
},
{
"epoch": 1.12,
"grad_norm": 12.689784049987793,
"learning_rate": 2.4395465994962215e-05,
"loss": 0.2298,
"step": 445
},
{
"epoch": 1.12,
"grad_norm": 25.520742416381836,
"learning_rate": 2.438287153652393e-05,
"loss": 0.6149,
"step": 446
},
{
"epoch": 1.13,
"grad_norm": 7.158485412597656,
"learning_rate": 2.4370277078085643e-05,
"loss": 0.3499,
"step": 447
},
{
"epoch": 1.13,
"grad_norm": 5.239826679229736,
"learning_rate": 2.4357682619647355e-05,
"loss": 0.1604,
"step": 448
},
{
"epoch": 1.13,
"grad_norm": 9.69916820526123,
"learning_rate": 2.4345088161209068e-05,
"loss": 0.5048,
"step": 449
},
{
"epoch": 1.13,
"grad_norm": 7.610560417175293,
"learning_rate": 2.433249370277078e-05,
"loss": 0.345,
"step": 450
},
{
"epoch": 1.14,
"grad_norm": 9.45479965209961,
"learning_rate": 2.4319899244332493e-05,
"loss": 0.4998,
"step": 451
},
{
"epoch": 1.14,
"grad_norm": 6.088526725769043,
"learning_rate": 2.430730478589421e-05,
"loss": 0.3311,
"step": 452
},
{
"epoch": 1.14,
"grad_norm": 8.135771751403809,
"learning_rate": 2.429471032745592e-05,
"loss": 0.5105,
"step": 453
},
{
"epoch": 1.14,
"grad_norm": 4.41991662979126,
"learning_rate": 2.428211586901763e-05,
"loss": 0.3346,
"step": 454
},
{
"epoch": 1.15,
"grad_norm": 6.451519966125488,
"learning_rate": 2.4269521410579346e-05,
"loss": 0.2548,
"step": 455
},
{
"epoch": 1.15,
"grad_norm": 7.247366428375244,
"learning_rate": 2.4256926952141058e-05,
"loss": 0.5191,
"step": 456
},
{
"epoch": 1.15,
"grad_norm": 11.858521461486816,
"learning_rate": 2.424433249370277e-05,
"loss": 0.4406,
"step": 457
},
{
"epoch": 1.15,
"grad_norm": 9.090578079223633,
"learning_rate": 2.4231738035264486e-05,
"loss": 0.6375,
"step": 458
},
{
"epoch": 1.16,
"grad_norm": 9.295748710632324,
"learning_rate": 2.4219143576826196e-05,
"loss": 0.5871,
"step": 459
},
{
"epoch": 1.16,
"grad_norm": 5.650725364685059,
"learning_rate": 2.4206549118387908e-05,
"loss": 0.1907,
"step": 460
},
{
"epoch": 1.16,
"grad_norm": 14.45252513885498,
"learning_rate": 2.4193954659949624e-05,
"loss": 0.2786,
"step": 461
},
{
"epoch": 1.16,
"grad_norm": 6.475660800933838,
"learning_rate": 2.4181360201511336e-05,
"loss": 0.3618,
"step": 462
},
{
"epoch": 1.17,
"grad_norm": 8.952823638916016,
"learning_rate": 2.4168765743073045e-05,
"loss": 0.3385,
"step": 463
},
{
"epoch": 1.17,
"grad_norm": 4.224743843078613,
"learning_rate": 2.415617128463476e-05,
"loss": 0.3246,
"step": 464
},
{
"epoch": 1.17,
"grad_norm": 7.692142963409424,
"learning_rate": 2.4143576826196473e-05,
"loss": 0.4094,
"step": 465
},
{
"epoch": 1.17,
"grad_norm": 5.044216156005859,
"learning_rate": 2.4130982367758186e-05,
"loss": 0.2324,
"step": 466
},
{
"epoch": 1.18,
"grad_norm": 5.303596496582031,
"learning_rate": 2.4118387909319902e-05,
"loss": 0.2725,
"step": 467
},
{
"epoch": 1.18,
"grad_norm": 8.907066345214844,
"learning_rate": 2.410579345088161e-05,
"loss": 0.371,
"step": 468
},
{
"epoch": 1.18,
"grad_norm": 5.100632667541504,
"learning_rate": 2.4093198992443327e-05,
"loss": 0.3409,
"step": 469
},
{
"epoch": 1.18,
"grad_norm": 8.859848022460938,
"learning_rate": 2.408060453400504e-05,
"loss": 0.3745,
"step": 470
},
{
"epoch": 1.19,
"grad_norm": 7.951125144958496,
"learning_rate": 2.406801007556675e-05,
"loss": 0.5921,
"step": 471
},
{
"epoch": 1.19,
"grad_norm": 11.735611915588379,
"learning_rate": 2.4055415617128467e-05,
"loss": 0.359,
"step": 472
},
{
"epoch": 1.19,
"grad_norm": 7.200911998748779,
"learning_rate": 2.4042821158690176e-05,
"loss": 0.2809,
"step": 473
},
{
"epoch": 1.19,
"grad_norm": 11.294443130493164,
"learning_rate": 2.403022670025189e-05,
"loss": 0.5223,
"step": 474
},
{
"epoch": 1.2,
"grad_norm": 7.186520099639893,
"learning_rate": 2.4017632241813605e-05,
"loss": 0.2466,
"step": 475
},
{
"epoch": 1.2,
"grad_norm": 13.5769624710083,
"learning_rate": 2.4005037783375317e-05,
"loss": 0.5432,
"step": 476
},
{
"epoch": 1.2,
"grad_norm": 6.9906768798828125,
"learning_rate": 2.3992443324937026e-05,
"loss": 0.1902,
"step": 477
},
{
"epoch": 1.2,
"grad_norm": 5.3905110359191895,
"learning_rate": 2.3979848866498742e-05,
"loss": 0.2713,
"step": 478
},
{
"epoch": 1.21,
"grad_norm": 4.828819274902344,
"learning_rate": 2.3967254408060454e-05,
"loss": 0.3166,
"step": 479
},
{
"epoch": 1.21,
"grad_norm": 9.989480018615723,
"learning_rate": 2.3954659949622167e-05,
"loss": 0.4494,
"step": 480
},
{
"epoch": 1.21,
"grad_norm": 9.79456615447998,
"learning_rate": 2.3942065491183883e-05,
"loss": 0.4268,
"step": 481
},
{
"epoch": 1.21,
"grad_norm": 7.9519267082214355,
"learning_rate": 2.392947103274559e-05,
"loss": 0.3281,
"step": 482
},
{
"epoch": 1.22,
"grad_norm": 6.644056797027588,
"learning_rate": 2.3916876574307304e-05,
"loss": 0.1665,
"step": 483
},
{
"epoch": 1.22,
"grad_norm": 13.469619750976562,
"learning_rate": 2.390428211586902e-05,
"loss": 0.3806,
"step": 484
},
{
"epoch": 1.22,
"grad_norm": 8.886473655700684,
"learning_rate": 2.3891687657430732e-05,
"loss": 0.2951,
"step": 485
},
{
"epoch": 1.22,
"grad_norm": 16.603439331054688,
"learning_rate": 2.3879093198992445e-05,
"loss": 0.3215,
"step": 486
},
{
"epoch": 1.23,
"grad_norm": 5.1792192459106445,
"learning_rate": 2.3866498740554157e-05,
"loss": 0.2263,
"step": 487
},
{
"epoch": 1.23,
"grad_norm": 22.057132720947266,
"learning_rate": 2.385390428211587e-05,
"loss": 0.5097,
"step": 488
},
{
"epoch": 1.23,
"grad_norm": 9.38779067993164,
"learning_rate": 2.3841309823677582e-05,
"loss": 0.4057,
"step": 489
},
{
"epoch": 1.23,
"grad_norm": 4.386843681335449,
"learning_rate": 2.3828715365239298e-05,
"loss": 0.0996,
"step": 490
},
{
"epoch": 1.24,
"grad_norm": 21.32297706604004,
"learning_rate": 2.3816120906801007e-05,
"loss": 1.2138,
"step": 491
},
{
"epoch": 1.24,
"grad_norm": 5.6553874015808105,
"learning_rate": 2.380352644836272e-05,
"loss": 0.1967,
"step": 492
},
{
"epoch": 1.24,
"grad_norm": 6.671334266662598,
"learning_rate": 2.3790931989924435e-05,
"loss": 0.1301,
"step": 493
},
{
"epoch": 1.24,
"grad_norm": 10.686269760131836,
"learning_rate": 2.3778337531486147e-05,
"loss": 0.2715,
"step": 494
},
{
"epoch": 1.25,
"grad_norm": 6.189944744110107,
"learning_rate": 2.376574307304786e-05,
"loss": 0.4195,
"step": 495
},
{
"epoch": 1.25,
"grad_norm": 11.906808853149414,
"learning_rate": 2.3753148614609572e-05,
"loss": 0.3496,
"step": 496
},
{
"epoch": 1.25,
"grad_norm": 6.705618381500244,
"learning_rate": 2.3740554156171285e-05,
"loss": 0.2344,
"step": 497
},
{
"epoch": 1.25,
"grad_norm": 8.328521728515625,
"learning_rate": 2.3727959697732997e-05,
"loss": 0.1997,
"step": 498
},
{
"epoch": 1.26,
"grad_norm": 11.336451530456543,
"learning_rate": 2.3715365239294713e-05,
"loss": 0.5954,
"step": 499
},
{
"epoch": 1.26,
"grad_norm": 11.99789810180664,
"learning_rate": 2.3702770780856425e-05,
"loss": 0.355,
"step": 500
},
{
"epoch": 1.26,
"grad_norm": 17.559165954589844,
"learning_rate": 2.3690176322418134e-05,
"loss": 0.3469,
"step": 501
},
{
"epoch": 1.26,
"grad_norm": 7.059667110443115,
"learning_rate": 2.367758186397985e-05,
"loss": 0.1771,
"step": 502
},
{
"epoch": 1.27,
"grad_norm": 5.824542045593262,
"learning_rate": 2.3664987405541563e-05,
"loss": 0.1534,
"step": 503
},
{
"epoch": 1.27,
"grad_norm": 8.428627967834473,
"learning_rate": 2.3652392947103275e-05,
"loss": 0.2015,
"step": 504
},
{
"epoch": 1.27,
"grad_norm": 14.697854042053223,
"learning_rate": 2.3639798488664988e-05,
"loss": 0.6048,
"step": 505
},
{
"epoch": 1.27,
"grad_norm": 10.655426025390625,
"learning_rate": 2.36272040302267e-05,
"loss": 0.6308,
"step": 506
},
{
"epoch": 1.28,
"grad_norm": 6.204010009765625,
"learning_rate": 2.3614609571788412e-05,
"loss": 0.145,
"step": 507
},
{
"epoch": 1.28,
"grad_norm": 5.01179838180542,
"learning_rate": 2.3602015113350128e-05,
"loss": 0.1016,
"step": 508
},
{
"epoch": 1.28,
"grad_norm": 5.69111442565918,
"learning_rate": 2.358942065491184e-05,
"loss": 0.1569,
"step": 509
},
{
"epoch": 1.28,
"grad_norm": 8.344858169555664,
"learning_rate": 2.357682619647355e-05,
"loss": 0.1635,
"step": 510
},
{
"epoch": 1.29,
"grad_norm": 2.3715789318084717,
"learning_rate": 2.3564231738035266e-05,
"loss": 0.06,
"step": 511
},
{
"epoch": 1.29,
"grad_norm": 4.8666605949401855,
"learning_rate": 2.3551637279596978e-05,
"loss": 0.1141,
"step": 512
},
{
"epoch": 1.29,
"grad_norm": 9.464341163635254,
"learning_rate": 2.353904282115869e-05,
"loss": 0.2374,
"step": 513
},
{
"epoch": 1.29,
"grad_norm": 5.035337924957275,
"learning_rate": 2.3526448362720406e-05,
"loss": 0.3652,
"step": 514
},
{
"epoch": 1.3,
"grad_norm": 2.6235170364379883,
"learning_rate": 2.3513853904282115e-05,
"loss": 0.0372,
"step": 515
},
{
"epoch": 1.3,
"grad_norm": 14.479222297668457,
"learning_rate": 2.3501259445843828e-05,
"loss": 0.6558,
"step": 516
},
{
"epoch": 1.3,
"grad_norm": 9.507341384887695,
"learning_rate": 2.3488664987405544e-05,
"loss": 0.293,
"step": 517
},
{
"epoch": 1.3,
"grad_norm": 12.590784072875977,
"learning_rate": 2.3476070528967256e-05,
"loss": 0.1925,
"step": 518
},
{
"epoch": 1.31,
"grad_norm": 8.395529747009277,
"learning_rate": 2.3463476070528965e-05,
"loss": 0.3387,
"step": 519
},
{
"epoch": 1.31,
"grad_norm": 9.460827827453613,
"learning_rate": 2.345088161209068e-05,
"loss": 0.2957,
"step": 520
},
{
"epoch": 1.31,
"grad_norm": 12.13961410522461,
"learning_rate": 2.3438287153652393e-05,
"loss": 0.3519,
"step": 521
},
{
"epoch": 1.31,
"grad_norm": 3.21748685836792,
"learning_rate": 2.3425692695214106e-05,
"loss": 0.0368,
"step": 522
},
{
"epoch": 1.32,
"grad_norm": 13.214022636413574,
"learning_rate": 2.341309823677582e-05,
"loss": 0.4089,
"step": 523
},
{
"epoch": 1.32,
"grad_norm": 15.640990257263184,
"learning_rate": 2.340050377833753e-05,
"loss": 0.3837,
"step": 524
},
{
"epoch": 1.32,
"grad_norm": 7.136631011962891,
"learning_rate": 2.3387909319899243e-05,
"loss": 0.3011,
"step": 525
},
{
"epoch": 1.32,
"grad_norm": 8.388585090637207,
"learning_rate": 2.337531486146096e-05,
"loss": 0.2053,
"step": 526
},
{
"epoch": 1.33,
"grad_norm": 12.422948837280273,
"learning_rate": 2.336272040302267e-05,
"loss": 0.7142,
"step": 527
},
{
"epoch": 1.33,
"grad_norm": 6.136419773101807,
"learning_rate": 2.3350125944584384e-05,
"loss": 0.5098,
"step": 528
},
{
"epoch": 1.33,
"grad_norm": 14.847418785095215,
"learning_rate": 2.3337531486146096e-05,
"loss": 0.2521,
"step": 529
},
{
"epoch": 1.34,
"grad_norm": 9.92980670928955,
"learning_rate": 2.332493702770781e-05,
"loss": 0.3919,
"step": 530
},
{
"epoch": 1.34,
"grad_norm": 25.052722930908203,
"learning_rate": 2.331234256926952e-05,
"loss": 0.5285,
"step": 531
},
{
"epoch": 1.34,
"grad_norm": 11.806641578674316,
"learning_rate": 2.3299748110831237e-05,
"loss": 0.514,
"step": 532
},
{
"epoch": 1.34,
"grad_norm": 21.424930572509766,
"learning_rate": 2.3287153652392946e-05,
"loss": 0.2707,
"step": 533
},
{
"epoch": 1.35,
"grad_norm": 19.599058151245117,
"learning_rate": 2.3274559193954658e-05,
"loss": 0.3077,
"step": 534
},
{
"epoch": 1.35,
"grad_norm": 20.604076385498047,
"learning_rate": 2.3261964735516374e-05,
"loss": 1.0837,
"step": 535
},
{
"epoch": 1.35,
"grad_norm": 2.573046922683716,
"learning_rate": 2.3249370277078086e-05,
"loss": 0.0493,
"step": 536
},
{
"epoch": 1.35,
"grad_norm": 6.39470100402832,
"learning_rate": 2.32367758186398e-05,
"loss": 0.1642,
"step": 537
},
{
"epoch": 1.36,
"grad_norm": 8.209395408630371,
"learning_rate": 2.322418136020151e-05,
"loss": 0.1703,
"step": 538
},
{
"epoch": 1.36,
"grad_norm": 12.432522773742676,
"learning_rate": 2.3211586901763224e-05,
"loss": 0.3736,
"step": 539
},
{
"epoch": 1.36,
"grad_norm": 7.828361511230469,
"learning_rate": 2.3198992443324936e-05,
"loss": 0.1279,
"step": 540
},
{
"epoch": 1.36,
"grad_norm": 14.346171379089355,
"learning_rate": 2.3186397984886652e-05,
"loss": 0.2658,
"step": 541
},
{
"epoch": 1.37,
"grad_norm": 15.226658821105957,
"learning_rate": 2.317380352644836e-05,
"loss": 0.5368,
"step": 542
},
{
"epoch": 1.37,
"grad_norm": 12.5059175491333,
"learning_rate": 2.3161209068010077e-05,
"loss": 0.3146,
"step": 543
},
{
"epoch": 1.37,
"grad_norm": 8.119623184204102,
"learning_rate": 2.314861460957179e-05,
"loss": 0.1653,
"step": 544
},
{
"epoch": 1.37,
"grad_norm": 10.031901359558105,
"learning_rate": 2.3136020151133502e-05,
"loss": 0.3746,
"step": 545
},
{
"epoch": 1.38,
"grad_norm": 17.581632614135742,
"learning_rate": 2.3123425692695218e-05,
"loss": 0.6627,
"step": 546
},
{
"epoch": 1.38,
"grad_norm": 10.162378311157227,
"learning_rate": 2.3110831234256927e-05,
"loss": 0.5804,
"step": 547
},
{
"epoch": 1.38,
"grad_norm": 3.8320300579071045,
"learning_rate": 2.309823677581864e-05,
"loss": 0.0739,
"step": 548
},
{
"epoch": 1.38,
"grad_norm": 5.822529315948486,
"learning_rate": 2.3085642317380355e-05,
"loss": 0.075,
"step": 549
},
{
"epoch": 1.39,
"grad_norm": 4.666683673858643,
"learning_rate": 2.3073047858942067e-05,
"loss": 0.2555,
"step": 550
},
{
"epoch": 1.39,
"grad_norm": 6.855231285095215,
"learning_rate": 2.306045340050378e-05,
"loss": 0.316,
"step": 551
},
{
"epoch": 1.39,
"grad_norm": 14.629341125488281,
"learning_rate": 2.3047858942065492e-05,
"loss": 0.6001,
"step": 552
},
{
"epoch": 1.39,
"grad_norm": 6.551723480224609,
"learning_rate": 2.3035264483627205e-05,
"loss": 0.2065,
"step": 553
},
{
"epoch": 1.4,
"grad_norm": 10.324251174926758,
"learning_rate": 2.3022670025188917e-05,
"loss": 0.253,
"step": 554
},
{
"epoch": 1.4,
"grad_norm": 7.71247673034668,
"learning_rate": 2.3010075566750633e-05,
"loss": 0.1637,
"step": 555
},
{
"epoch": 1.4,
"grad_norm": 9.804146766662598,
"learning_rate": 2.2997481108312342e-05,
"loss": 0.323,
"step": 556
},
{
"epoch": 1.4,
"grad_norm": 14.511861801147461,
"learning_rate": 2.2984886649874054e-05,
"loss": 0.5289,
"step": 557
},
{
"epoch": 1.41,
"grad_norm": 4.078545570373535,
"learning_rate": 2.297229219143577e-05,
"loss": 0.1315,
"step": 558
},
{
"epoch": 1.41,
"grad_norm": 9.631327629089355,
"learning_rate": 2.2959697732997483e-05,
"loss": 0.2195,
"step": 559
},
{
"epoch": 1.41,
"grad_norm": 8.578085899353027,
"learning_rate": 2.2947103274559195e-05,
"loss": 0.4047,
"step": 560
},
{
"epoch": 1.41,
"grad_norm": 13.405282020568848,
"learning_rate": 2.2934508816120907e-05,
"loss": 0.9347,
"step": 561
},
{
"epoch": 1.42,
"grad_norm": 11.71997356414795,
"learning_rate": 2.292191435768262e-05,
"loss": 0.4144,
"step": 562
},
{
"epoch": 1.42,
"grad_norm": 5.992588043212891,
"learning_rate": 2.2909319899244332e-05,
"loss": 0.2267,
"step": 563
},
{
"epoch": 1.42,
"grad_norm": 6.550726890563965,
"learning_rate": 2.2896725440806048e-05,
"loss": 0.213,
"step": 564
},
{
"epoch": 1.42,
"grad_norm": 6.907693386077881,
"learning_rate": 2.288413098236776e-05,
"loss": 0.2641,
"step": 565
},
{
"epoch": 1.43,
"grad_norm": 20.506135940551758,
"learning_rate": 2.287153652392947e-05,
"loss": 0.6865,
"step": 566
},
{
"epoch": 1.43,
"grad_norm": 5.674739360809326,
"learning_rate": 2.2858942065491185e-05,
"loss": 0.5013,
"step": 567
},
{
"epoch": 1.43,
"grad_norm": 7.887065887451172,
"learning_rate": 2.2846347607052898e-05,
"loss": 0.3375,
"step": 568
},
{
"epoch": 1.43,
"grad_norm": 7.742120742797852,
"learning_rate": 2.283375314861461e-05,
"loss": 0.2199,
"step": 569
},
{
"epoch": 1.44,
"grad_norm": 5.057669162750244,
"learning_rate": 2.2821158690176323e-05,
"loss": 0.2353,
"step": 570
},
{
"epoch": 1.44,
"grad_norm": 11.653826713562012,
"learning_rate": 2.2808564231738035e-05,
"loss": 0.1651,
"step": 571
},
{
"epoch": 1.44,
"grad_norm": 6.661721229553223,
"learning_rate": 2.2795969773299747e-05,
"loss": 0.1668,
"step": 572
},
{
"epoch": 1.44,
"grad_norm": 13.69086742401123,
"learning_rate": 2.2783375314861463e-05,
"loss": 0.3931,
"step": 573
},
{
"epoch": 1.45,
"grad_norm": 4.947517395019531,
"learning_rate": 2.2770780856423176e-05,
"loss": 0.1813,
"step": 574
},
{
"epoch": 1.45,
"grad_norm": 9.176385879516602,
"learning_rate": 2.2758186397984885e-05,
"loss": 0.5455,
"step": 575
},
{
"epoch": 1.45,
"grad_norm": 14.556722640991211,
"learning_rate": 2.27455919395466e-05,
"loss": 0.4582,
"step": 576
},
{
"epoch": 1.45,
"grad_norm": 8.501097679138184,
"learning_rate": 2.2732997481108313e-05,
"loss": 0.4377,
"step": 577
},
{
"epoch": 1.46,
"grad_norm": 8.8375244140625,
"learning_rate": 2.2720403022670025e-05,
"loss": 0.4837,
"step": 578
},
{
"epoch": 1.46,
"grad_norm": 7.6127400398254395,
"learning_rate": 2.270780856423174e-05,
"loss": 0.2413,
"step": 579
},
{
"epoch": 1.46,
"grad_norm": 5.552041530609131,
"learning_rate": 2.269521410579345e-05,
"loss": 0.3138,
"step": 580
},
{
"epoch": 1.46,
"grad_norm": 11.838778495788574,
"learning_rate": 2.2682619647355163e-05,
"loss": 0.5564,
"step": 581
},
{
"epoch": 1.47,
"grad_norm": 10.92528247833252,
"learning_rate": 2.267002518891688e-05,
"loss": 0.4416,
"step": 582
},
{
"epoch": 1.47,
"grad_norm": 15.060375213623047,
"learning_rate": 2.265743073047859e-05,
"loss": 0.5191,
"step": 583
},
{
"epoch": 1.47,
"grad_norm": 6.89783239364624,
"learning_rate": 2.26448362720403e-05,
"loss": 0.2395,
"step": 584
},
{
"epoch": 1.47,
"grad_norm": 6.548165798187256,
"learning_rate": 2.2632241813602016e-05,
"loss": 0.16,
"step": 585
},
{
"epoch": 1.48,
"grad_norm": 7.645367622375488,
"learning_rate": 2.2619647355163728e-05,
"loss": 0.2274,
"step": 586
},
{
"epoch": 1.48,
"grad_norm": 6.4248247146606445,
"learning_rate": 2.260705289672544e-05,
"loss": 0.2073,
"step": 587
},
{
"epoch": 1.48,
"grad_norm": 16.396223068237305,
"learning_rate": 2.2594458438287157e-05,
"loss": 0.4331,
"step": 588
},
{
"epoch": 1.48,
"grad_norm": 15.62296199798584,
"learning_rate": 2.2581863979848866e-05,
"loss": 0.2843,
"step": 589
},
{
"epoch": 1.49,
"grad_norm": 9.540681838989258,
"learning_rate": 2.2569269521410578e-05,
"loss": 0.5034,
"step": 590
},
{
"epoch": 1.49,
"grad_norm": 6.93734073638916,
"learning_rate": 2.2556675062972294e-05,
"loss": 0.1416,
"step": 591
},
{
"epoch": 1.49,
"grad_norm": 14.586864471435547,
"learning_rate": 2.2544080604534006e-05,
"loss": 0.6036,
"step": 592
},
{
"epoch": 1.49,
"grad_norm": 10.943079948425293,
"learning_rate": 2.253148614609572e-05,
"loss": 0.3713,
"step": 593
},
{
"epoch": 1.5,
"grad_norm": 10.702349662780762,
"learning_rate": 2.251889168765743e-05,
"loss": 0.4334,
"step": 594
},
{
"epoch": 1.5,
"grad_norm": 5.976529121398926,
"learning_rate": 2.2506297229219144e-05,
"loss": 0.2093,
"step": 595
},
{
"epoch": 1.5,
"grad_norm": 8.961341857910156,
"learning_rate": 2.2493702770780856e-05,
"loss": 0.2841,
"step": 596
},
{
"epoch": 1.5,
"grad_norm": 23.375741958618164,
"learning_rate": 2.2481108312342572e-05,
"loss": 0.5659,
"step": 597
},
{
"epoch": 1.51,
"grad_norm": 5.276069164276123,
"learning_rate": 2.246851385390428e-05,
"loss": 0.1247,
"step": 598
},
{
"epoch": 1.51,
"grad_norm": 10.108563423156738,
"learning_rate": 2.2455919395465993e-05,
"loss": 0.2897,
"step": 599
},
{
"epoch": 1.51,
"grad_norm": 9.869269371032715,
"learning_rate": 2.244332493702771e-05,
"loss": 0.2426,
"step": 600
},
{
"epoch": 1.51,
"grad_norm": 3.3714253902435303,
"learning_rate": 2.243073047858942e-05,
"loss": 0.2662,
"step": 601
},
{
"epoch": 1.52,
"grad_norm": 2.3957178592681885,
"learning_rate": 2.2418136020151134e-05,
"loss": 0.0724,
"step": 602
},
{
"epoch": 1.52,
"grad_norm": 4.764224529266357,
"learning_rate": 2.2405541561712846e-05,
"loss": 0.0976,
"step": 603
},
{
"epoch": 1.52,
"grad_norm": 12.71890640258789,
"learning_rate": 2.239294710327456e-05,
"loss": 0.3899,
"step": 604
},
{
"epoch": 1.52,
"grad_norm": 13.903827667236328,
"learning_rate": 2.238035264483627e-05,
"loss": 0.566,
"step": 605
},
{
"epoch": 1.53,
"grad_norm": 9.071674346923828,
"learning_rate": 2.2367758186397987e-05,
"loss": 0.5847,
"step": 606
},
{
"epoch": 1.53,
"grad_norm": 21.84575843811035,
"learning_rate": 2.23551637279597e-05,
"loss": 0.4494,
"step": 607
},
{
"epoch": 1.53,
"grad_norm": 10.90080451965332,
"learning_rate": 2.234256926952141e-05,
"loss": 0.2771,
"step": 608
},
{
"epoch": 1.53,
"grad_norm": 7.766908168792725,
"learning_rate": 2.2329974811083124e-05,
"loss": 0.1046,
"step": 609
},
{
"epoch": 1.54,
"grad_norm": 11.402420043945312,
"learning_rate": 2.2317380352644837e-05,
"loss": 0.4345,
"step": 610
},
{
"epoch": 1.54,
"grad_norm": 6.0967607498168945,
"learning_rate": 2.230478589420655e-05,
"loss": 0.0931,
"step": 611
},
{
"epoch": 1.54,
"grad_norm": 9.602745056152344,
"learning_rate": 2.229219143576826e-05,
"loss": 0.4619,
"step": 612
},
{
"epoch": 1.54,
"grad_norm": 7.642356872558594,
"learning_rate": 2.2279596977329974e-05,
"loss": 0.0986,
"step": 613
},
{
"epoch": 1.55,
"grad_norm": 11.6745023727417,
"learning_rate": 2.226700251889169e-05,
"loss": 0.2138,
"step": 614
},
{
"epoch": 1.55,
"grad_norm": 4.516753673553467,
"learning_rate": 2.2254408060453402e-05,
"loss": 0.255,
"step": 615
},
{
"epoch": 1.55,
"grad_norm": 6.529497146606445,
"learning_rate": 2.2241813602015115e-05,
"loss": 0.0866,
"step": 616
},
{
"epoch": 1.55,
"grad_norm": 14.750448226928711,
"learning_rate": 2.2229219143576827e-05,
"loss": 0.4606,
"step": 617
},
{
"epoch": 1.56,
"grad_norm": 9.433393478393555,
"learning_rate": 2.221662468513854e-05,
"loss": 0.2749,
"step": 618
},
{
"epoch": 1.56,
"grad_norm": 13.138323783874512,
"learning_rate": 2.2204030226700252e-05,
"loss": 0.4306,
"step": 619
},
{
"epoch": 1.56,
"grad_norm": 9.385525703430176,
"learning_rate": 2.2191435768261968e-05,
"loss": 0.435,
"step": 620
},
{
"epoch": 1.56,
"grad_norm": 9.789994239807129,
"learning_rate": 2.217884130982368e-05,
"loss": 0.3421,
"step": 621
},
{
"epoch": 1.57,
"grad_norm": 12.882686614990234,
"learning_rate": 2.216624685138539e-05,
"loss": 0.5238,
"step": 622
},
{
"epoch": 1.57,
"grad_norm": 9.727291107177734,
"learning_rate": 2.2153652392947105e-05,
"loss": 0.2041,
"step": 623
},
{
"epoch": 1.57,
"grad_norm": 9.766213417053223,
"learning_rate": 2.2141057934508818e-05,
"loss": 0.3109,
"step": 624
},
{
"epoch": 1.57,
"grad_norm": 10.814486503601074,
"learning_rate": 2.212846347607053e-05,
"loss": 0.2085,
"step": 625
},
{
"epoch": 1.58,
"grad_norm": 6.508476734161377,
"learning_rate": 2.2115869017632242e-05,
"loss": 0.3238,
"step": 626
},
{
"epoch": 1.58,
"grad_norm": 13.337801933288574,
"learning_rate": 2.2103274559193955e-05,
"loss": 0.157,
"step": 627
},
{
"epoch": 1.58,
"grad_norm": 5.9723405838012695,
"learning_rate": 2.2090680100755667e-05,
"loss": 0.4441,
"step": 628
},
{
"epoch": 1.58,
"grad_norm": 6.157145023345947,
"learning_rate": 2.2078085642317383e-05,
"loss": 0.0856,
"step": 629
},
{
"epoch": 1.59,
"grad_norm": 6.4899373054504395,
"learning_rate": 2.2065491183879095e-05,
"loss": 0.3309,
"step": 630
},
{
"epoch": 1.59,
"grad_norm": 3.3382256031036377,
"learning_rate": 2.2052896725440805e-05,
"loss": 0.0748,
"step": 631
},
{
"epoch": 1.59,
"grad_norm": 7.103469371795654,
"learning_rate": 2.204030226700252e-05,
"loss": 0.2311,
"step": 632
},
{
"epoch": 1.59,
"grad_norm": 6.943920135498047,
"learning_rate": 2.2027707808564233e-05,
"loss": 0.1224,
"step": 633
},
{
"epoch": 1.6,
"grad_norm": 7.204896450042725,
"learning_rate": 2.2015113350125945e-05,
"loss": 0.3382,
"step": 634
},
{
"epoch": 1.6,
"grad_norm": 10.163846015930176,
"learning_rate": 2.200251889168766e-05,
"loss": 0.4018,
"step": 635
},
{
"epoch": 1.6,
"grad_norm": 14.589173316955566,
"learning_rate": 2.198992443324937e-05,
"loss": 0.2431,
"step": 636
},
{
"epoch": 1.6,
"grad_norm": 8.574972152709961,
"learning_rate": 2.1977329974811082e-05,
"loss": 0.15,
"step": 637
},
{
"epoch": 1.61,
"grad_norm": 5.912990093231201,
"learning_rate": 2.1964735516372798e-05,
"loss": 0.2588,
"step": 638
},
{
"epoch": 1.61,
"grad_norm": 9.575433731079102,
"learning_rate": 2.195214105793451e-05,
"loss": 0.3173,
"step": 639
},
{
"epoch": 1.61,
"grad_norm": 6.566403865814209,
"learning_rate": 2.193954659949622e-05,
"loss": 0.2146,
"step": 640
},
{
"epoch": 1.61,
"grad_norm": 15.933618545532227,
"learning_rate": 2.1926952141057936e-05,
"loss": 0.6077,
"step": 641
},
{
"epoch": 1.62,
"grad_norm": 6.235144138336182,
"learning_rate": 2.1914357682619648e-05,
"loss": 0.1017,
"step": 642
},
{
"epoch": 1.62,
"grad_norm": 14.474084854125977,
"learning_rate": 2.190176322418136e-05,
"loss": 0.4763,
"step": 643
},
{
"epoch": 1.62,
"grad_norm": 10.936660766601562,
"learning_rate": 2.1889168765743076e-05,
"loss": 0.3788,
"step": 644
},
{
"epoch": 1.62,
"grad_norm": 15.92046070098877,
"learning_rate": 2.1876574307304785e-05,
"loss": 0.5219,
"step": 645
},
{
"epoch": 1.63,
"grad_norm": 10.802763938903809,
"learning_rate": 2.1863979848866498e-05,
"loss": 0.5354,
"step": 646
},
{
"epoch": 1.63,
"grad_norm": 12.62859058380127,
"learning_rate": 2.1851385390428214e-05,
"loss": 0.2161,
"step": 647
},
{
"epoch": 1.63,
"grad_norm": 11.541522026062012,
"learning_rate": 2.1838790931989926e-05,
"loss": 0.2553,
"step": 648
},
{
"epoch": 1.63,
"grad_norm": 8.991678237915039,
"learning_rate": 2.182619647355164e-05,
"loss": 0.1912,
"step": 649
},
{
"epoch": 1.64,
"grad_norm": 11.52978515625,
"learning_rate": 2.181360201511335e-05,
"loss": 0.8781,
"step": 650
},
{
"epoch": 1.64,
"grad_norm": 15.376463890075684,
"learning_rate": 2.1801007556675063e-05,
"loss": 0.5357,
"step": 651
},
{
"epoch": 1.64,
"grad_norm": 9.569962501525879,
"learning_rate": 2.1788413098236776e-05,
"loss": 0.2626,
"step": 652
},
{
"epoch": 1.64,
"grad_norm": 11.40342903137207,
"learning_rate": 2.177581863979849e-05,
"loss": 0.5519,
"step": 653
},
{
"epoch": 1.65,
"grad_norm": 15.95056438446045,
"learning_rate": 2.17632241813602e-05,
"loss": 0.6138,
"step": 654
},
{
"epoch": 1.65,
"grad_norm": 8.395648002624512,
"learning_rate": 2.1750629722921913e-05,
"loss": 0.1949,
"step": 655
},
{
"epoch": 1.65,
"grad_norm": 12.559154510498047,
"learning_rate": 2.173803526448363e-05,
"loss": 0.443,
"step": 656
},
{
"epoch": 1.65,
"grad_norm": 9.407703399658203,
"learning_rate": 2.172544080604534e-05,
"loss": 0.3019,
"step": 657
},
{
"epoch": 1.66,
"grad_norm": 6.948453426361084,
"learning_rate": 2.1712846347607054e-05,
"loss": 0.2083,
"step": 658
},
{
"epoch": 1.66,
"grad_norm": 11.148189544677734,
"learning_rate": 2.1700251889168766e-05,
"loss": 0.3048,
"step": 659
},
{
"epoch": 1.66,
"grad_norm": 7.73734712600708,
"learning_rate": 2.168765743073048e-05,
"loss": 0.2406,
"step": 660
},
{
"epoch": 1.66,
"grad_norm": 8.320653915405273,
"learning_rate": 2.167506297229219e-05,
"loss": 0.2978,
"step": 661
},
{
"epoch": 1.67,
"grad_norm": 6.541665077209473,
"learning_rate": 2.1662468513853907e-05,
"loss": 0.2485,
"step": 662
},
{
"epoch": 1.67,
"grad_norm": 7.9342732429504395,
"learning_rate": 2.1649874055415616e-05,
"loss": 0.2664,
"step": 663
},
{
"epoch": 1.67,
"grad_norm": 10.804299354553223,
"learning_rate": 2.1637279596977328e-05,
"loss": 0.3781,
"step": 664
},
{
"epoch": 1.68,
"grad_norm": 8.100717544555664,
"learning_rate": 2.1624685138539044e-05,
"loss": 0.3833,
"step": 665
},
{
"epoch": 1.68,
"grad_norm": 6.31338357925415,
"learning_rate": 2.1612090680100756e-05,
"loss": 0.326,
"step": 666
},
{
"epoch": 1.68,
"grad_norm": 13.130090713500977,
"learning_rate": 2.159949622166247e-05,
"loss": 0.3128,
"step": 667
},
{
"epoch": 1.68,
"grad_norm": 8.29228401184082,
"learning_rate": 2.158690176322418e-05,
"loss": 0.1476,
"step": 668
},
{
"epoch": 1.69,
"grad_norm": 17.379844665527344,
"learning_rate": 2.1574307304785894e-05,
"loss": 0.325,
"step": 669
},
{
"epoch": 1.69,
"grad_norm": 3.1806654930114746,
"learning_rate": 2.1561712846347606e-05,
"loss": 0.0877,
"step": 670
},
{
"epoch": 1.69,
"grad_norm": 6.332939624786377,
"learning_rate": 2.1549118387909322e-05,
"loss": 0.3275,
"step": 671
},
{
"epoch": 1.69,
"grad_norm": 8.985432624816895,
"learning_rate": 2.1536523929471034e-05,
"loss": 0.2804,
"step": 672
},
{
"epoch": 1.7,
"grad_norm": 4.4867730140686035,
"learning_rate": 2.1523929471032743e-05,
"loss": 0.2085,
"step": 673
},
{
"epoch": 1.7,
"grad_norm": 8.214529991149902,
"learning_rate": 2.151133501259446e-05,
"loss": 0.2132,
"step": 674
},
{
"epoch": 1.7,
"grad_norm": 13.838793754577637,
"learning_rate": 2.1498740554156172e-05,
"loss": 0.3482,
"step": 675
},
{
"epoch": 1.7,
"grad_norm": 5.29369592666626,
"learning_rate": 2.1486146095717884e-05,
"loss": 0.1623,
"step": 676
},
{
"epoch": 1.71,
"grad_norm": 7.333521366119385,
"learning_rate": 2.1473551637279597e-05,
"loss": 0.1188,
"step": 677
},
{
"epoch": 1.71,
"grad_norm": 14.213542938232422,
"learning_rate": 2.146095717884131e-05,
"loss": 0.2021,
"step": 678
},
{
"epoch": 1.71,
"grad_norm": 12.950404167175293,
"learning_rate": 2.144836272040302e-05,
"loss": 0.3876,
"step": 679
},
{
"epoch": 1.71,
"grad_norm": 4.482484817504883,
"learning_rate": 2.1435768261964737e-05,
"loss": 0.1161,
"step": 680
},
{
"epoch": 1.72,
"grad_norm": 9.34411907196045,
"learning_rate": 2.142317380352645e-05,
"loss": 0.4212,
"step": 681
},
{
"epoch": 1.72,
"grad_norm": 9.932082176208496,
"learning_rate": 2.141057934508816e-05,
"loss": 0.3787,
"step": 682
},
{
"epoch": 1.72,
"grad_norm": 11.614188194274902,
"learning_rate": 2.1397984886649875e-05,
"loss": 0.4952,
"step": 683
},
{
"epoch": 1.72,
"grad_norm": 15.483943939208984,
"learning_rate": 2.1385390428211587e-05,
"loss": 0.7035,
"step": 684
},
{
"epoch": 1.73,
"grad_norm": 9.446450233459473,
"learning_rate": 2.13727959697733e-05,
"loss": 0.2137,
"step": 685
},
{
"epoch": 1.73,
"grad_norm": 5.429463863372803,
"learning_rate": 2.1360201511335015e-05,
"loss": 0.0765,
"step": 686
},
{
"epoch": 1.73,
"grad_norm": 7.334542274475098,
"learning_rate": 2.1347607052896724e-05,
"loss": 0.6548,
"step": 687
},
{
"epoch": 1.73,
"grad_norm": 4.637013912200928,
"learning_rate": 2.133501259445844e-05,
"loss": 0.0695,
"step": 688
},
{
"epoch": 1.74,
"grad_norm": 9.739296913146973,
"learning_rate": 2.1322418136020153e-05,
"loss": 0.8203,
"step": 689
},
{
"epoch": 1.74,
"grad_norm": 12.037601470947266,
"learning_rate": 2.1309823677581865e-05,
"loss": 0.2714,
"step": 690
},
{
"epoch": 1.74,
"grad_norm": 9.018967628479004,
"learning_rate": 2.1297229219143577e-05,
"loss": 0.1763,
"step": 691
},
{
"epoch": 1.74,
"grad_norm": 10.764039993286133,
"learning_rate": 2.128463476070529e-05,
"loss": 0.29,
"step": 692
},
{
"epoch": 1.75,
"grad_norm": 7.258769512176514,
"learning_rate": 2.1272040302267002e-05,
"loss": 0.2764,
"step": 693
},
{
"epoch": 1.75,
"grad_norm": 5.780182361602783,
"learning_rate": 2.1259445843828718e-05,
"loss": 0.2163,
"step": 694
},
{
"epoch": 1.75,
"grad_norm": 8.747794151306152,
"learning_rate": 2.124685138539043e-05,
"loss": 0.2305,
"step": 695
},
{
"epoch": 1.75,
"grad_norm": 6.942091464996338,
"learning_rate": 2.123425692695214e-05,
"loss": 0.0966,
"step": 696
},
{
"epoch": 1.76,
"grad_norm": 9.203085899353027,
"learning_rate": 2.1221662468513855e-05,
"loss": 0.1701,
"step": 697
},
{
"epoch": 1.76,
"grad_norm": 12.736608505249023,
"learning_rate": 2.1209068010075568e-05,
"loss": 0.5168,
"step": 698
},
{
"epoch": 1.76,
"grad_norm": 16.280555725097656,
"learning_rate": 2.119647355163728e-05,
"loss": 0.2754,
"step": 699
},
{
"epoch": 1.76,
"grad_norm": 19.578857421875,
"learning_rate": 2.1183879093198996e-05,
"loss": 0.7287,
"step": 700
},
{
"epoch": 1.77,
"grad_norm": 13.128076553344727,
"learning_rate": 2.1171284634760705e-05,
"loss": 0.5267,
"step": 701
},
{
"epoch": 1.77,
"grad_norm": 9.765761375427246,
"learning_rate": 2.1158690176322417e-05,
"loss": 0.241,
"step": 702
},
{
"epoch": 1.77,
"grad_norm": 4.671734809875488,
"learning_rate": 2.1146095717884133e-05,
"loss": 0.097,
"step": 703
},
{
"epoch": 1.77,
"grad_norm": 4.864780426025391,
"learning_rate": 2.1133501259445846e-05,
"loss": 0.279,
"step": 704
},
{
"epoch": 1.78,
"grad_norm": 9.086429595947266,
"learning_rate": 2.1120906801007555e-05,
"loss": 0.383,
"step": 705
},
{
"epoch": 1.78,
"grad_norm": 8.034072875976562,
"learning_rate": 2.110831234256927e-05,
"loss": 0.3027,
"step": 706
},
{
"epoch": 1.78,
"grad_norm": 17.50482177734375,
"learning_rate": 2.1095717884130983e-05,
"loss": 0.7063,
"step": 707
},
{
"epoch": 1.78,
"grad_norm": 15.067398071289062,
"learning_rate": 2.1083123425692695e-05,
"loss": 0.2317,
"step": 708
},
{
"epoch": 1.79,
"grad_norm": 12.081408500671387,
"learning_rate": 2.107052896725441e-05,
"loss": 0.6775,
"step": 709
},
{
"epoch": 1.79,
"grad_norm": 8.175625801086426,
"learning_rate": 2.105793450881612e-05,
"loss": 0.2379,
"step": 710
},
{
"epoch": 1.79,
"grad_norm": 11.485807418823242,
"learning_rate": 2.1045340050377833e-05,
"loss": 0.3477,
"step": 711
},
{
"epoch": 1.79,
"grad_norm": 10.902219772338867,
"learning_rate": 2.103274559193955e-05,
"loss": 0.3242,
"step": 712
},
{
"epoch": 1.8,
"grad_norm": 7.6868205070495605,
"learning_rate": 2.102015113350126e-05,
"loss": 0.2112,
"step": 713
},
{
"epoch": 1.8,
"grad_norm": 4.256736755371094,
"learning_rate": 2.1007556675062973e-05,
"loss": 0.1194,
"step": 714
},
{
"epoch": 1.8,
"grad_norm": 4.6595659255981445,
"learning_rate": 2.0994962216624686e-05,
"loss": 0.1548,
"step": 715
},
{
"epoch": 1.8,
"grad_norm": 12.510821342468262,
"learning_rate": 2.0982367758186398e-05,
"loss": 0.2877,
"step": 716
},
{
"epoch": 1.81,
"grad_norm": 5.698768138885498,
"learning_rate": 2.096977329974811e-05,
"loss": 0.3862,
"step": 717
},
{
"epoch": 1.81,
"grad_norm": 8.279985427856445,
"learning_rate": 2.0957178841309827e-05,
"loss": 0.3283,
"step": 718
},
{
"epoch": 1.81,
"grad_norm": 10.287877082824707,
"learning_rate": 2.0944584382871536e-05,
"loss": 0.3959,
"step": 719
},
{
"epoch": 1.81,
"grad_norm": 8.201375961303711,
"learning_rate": 2.0931989924433248e-05,
"loss": 0.1885,
"step": 720
},
{
"epoch": 1.82,
"grad_norm": 9.439096450805664,
"learning_rate": 2.0919395465994964e-05,
"loss": 0.1917,
"step": 721
},
{
"epoch": 1.82,
"grad_norm": 10.867537498474121,
"learning_rate": 2.0906801007556676e-05,
"loss": 0.3561,
"step": 722
},
{
"epoch": 1.82,
"grad_norm": 10.648015975952148,
"learning_rate": 2.089420654911839e-05,
"loss": 0.3826,
"step": 723
},
{
"epoch": 1.82,
"grad_norm": 20.001815795898438,
"learning_rate": 2.08816120906801e-05,
"loss": 0.4748,
"step": 724
},
{
"epoch": 1.83,
"grad_norm": 7.351145267486572,
"learning_rate": 2.0869017632241814e-05,
"loss": 0.0979,
"step": 725
},
{
"epoch": 1.83,
"grad_norm": 7.364979267120361,
"learning_rate": 2.0856423173803526e-05,
"loss": 0.3825,
"step": 726
},
{
"epoch": 1.83,
"grad_norm": 14.710403442382812,
"learning_rate": 2.0843828715365242e-05,
"loss": 0.472,
"step": 727
},
{
"epoch": 1.83,
"grad_norm": 3.746670961380005,
"learning_rate": 2.0831234256926954e-05,
"loss": 0.2175,
"step": 728
},
{
"epoch": 1.84,
"grad_norm": 9.130343437194824,
"learning_rate": 2.0818639798488663e-05,
"loss": 0.4235,
"step": 729
},
{
"epoch": 1.84,
"grad_norm": 13.102235794067383,
"learning_rate": 2.080604534005038e-05,
"loss": 0.615,
"step": 730
},
{
"epoch": 1.84,
"grad_norm": 6.6438822746276855,
"learning_rate": 2.079345088161209e-05,
"loss": 0.1462,
"step": 731
},
{
"epoch": 1.84,
"grad_norm": 15.286580085754395,
"learning_rate": 2.0780856423173804e-05,
"loss": 0.3657,
"step": 732
},
{
"epoch": 1.85,
"grad_norm": 12.244285583496094,
"learning_rate": 2.0768261964735516e-05,
"loss": 0.3817,
"step": 733
},
{
"epoch": 1.85,
"grad_norm": 6.858506679534912,
"learning_rate": 2.075566750629723e-05,
"loss": 0.3086,
"step": 734
},
{
"epoch": 1.85,
"grad_norm": 13.334952354431152,
"learning_rate": 2.074307304785894e-05,
"loss": 0.5208,
"step": 735
},
{
"epoch": 1.85,
"grad_norm": 14.200685501098633,
"learning_rate": 2.0730478589420657e-05,
"loss": 0.5627,
"step": 736
},
{
"epoch": 1.86,
"grad_norm": 5.623879909515381,
"learning_rate": 2.071788413098237e-05,
"loss": 0.1789,
"step": 737
},
{
"epoch": 1.86,
"grad_norm": 13.440046310424805,
"learning_rate": 2.070528967254408e-05,
"loss": 0.3759,
"step": 738
},
{
"epoch": 1.86,
"grad_norm": 8.08634090423584,
"learning_rate": 2.0692695214105794e-05,
"loss": 0.2264,
"step": 739
},
{
"epoch": 1.86,
"grad_norm": 15.935822486877441,
"learning_rate": 2.0680100755667507e-05,
"loss": 0.2984,
"step": 740
},
{
"epoch": 1.87,
"grad_norm": 8.345213890075684,
"learning_rate": 2.066750629722922e-05,
"loss": 0.1735,
"step": 741
},
{
"epoch": 1.87,
"grad_norm": 4.334136486053467,
"learning_rate": 2.0654911838790935e-05,
"loss": 0.1256,
"step": 742
},
{
"epoch": 1.87,
"grad_norm": 8.379855155944824,
"learning_rate": 2.0642317380352644e-05,
"loss": 0.1301,
"step": 743
},
{
"epoch": 1.87,
"grad_norm": 6.75809383392334,
"learning_rate": 2.0629722921914356e-05,
"loss": 0.2219,
"step": 744
},
{
"epoch": 1.88,
"grad_norm": 15.786321640014648,
"learning_rate": 2.0617128463476072e-05,
"loss": 0.2864,
"step": 745
},
{
"epoch": 1.88,
"grad_norm": 10.396544456481934,
"learning_rate": 2.0604534005037785e-05,
"loss": 0.7485,
"step": 746
},
{
"epoch": 1.88,
"grad_norm": 12.27066707611084,
"learning_rate": 2.0591939546599494e-05,
"loss": 0.2798,
"step": 747
},
{
"epoch": 1.88,
"grad_norm": 10.269973754882812,
"learning_rate": 2.057934508816121e-05,
"loss": 0.2683,
"step": 748
},
{
"epoch": 1.89,
"grad_norm": 11.86515998840332,
"learning_rate": 2.0566750629722922e-05,
"loss": 0.2667,
"step": 749
},
{
"epoch": 1.89,
"grad_norm": 14.70499038696289,
"learning_rate": 2.0554156171284634e-05,
"loss": 0.6922,
"step": 750
},
{
"epoch": 1.89,
"grad_norm": 8.275458335876465,
"learning_rate": 2.054156171284635e-05,
"loss": 0.2484,
"step": 751
},
{
"epoch": 1.89,
"grad_norm": 17.040943145751953,
"learning_rate": 2.052896725440806e-05,
"loss": 0.8717,
"step": 752
},
{
"epoch": 1.9,
"grad_norm": 13.45297622680664,
"learning_rate": 2.0516372795969772e-05,
"loss": 0.265,
"step": 753
},
{
"epoch": 1.9,
"grad_norm": 17.105192184448242,
"learning_rate": 2.0503778337531488e-05,
"loss": 0.6025,
"step": 754
},
{
"epoch": 1.9,
"grad_norm": 9.762206077575684,
"learning_rate": 2.04911838790932e-05,
"loss": 0.1654,
"step": 755
},
{
"epoch": 1.9,
"grad_norm": 9.832480430603027,
"learning_rate": 2.0478589420654912e-05,
"loss": 0.2347,
"step": 756
},
{
"epoch": 1.91,
"grad_norm": 4.055430889129639,
"learning_rate": 2.0465994962216625e-05,
"loss": 0.1007,
"step": 757
},
{
"epoch": 1.91,
"grad_norm": 11.626514434814453,
"learning_rate": 2.0453400503778337e-05,
"loss": 0.2964,
"step": 758
},
{
"epoch": 1.91,
"grad_norm": 4.129045009613037,
"learning_rate": 2.0440806045340053e-05,
"loss": 0.0867,
"step": 759
},
{
"epoch": 1.91,
"grad_norm": 9.8018798828125,
"learning_rate": 2.0428211586901765e-05,
"loss": 0.1439,
"step": 760
},
{
"epoch": 1.92,
"grad_norm": 17.625484466552734,
"learning_rate": 2.0415617128463475e-05,
"loss": 0.6171,
"step": 761
},
{
"epoch": 1.92,
"grad_norm": 13.455811500549316,
"learning_rate": 2.040302267002519e-05,
"loss": 0.1957,
"step": 762
},
{
"epoch": 1.92,
"grad_norm": 13.38533878326416,
"learning_rate": 2.0390428211586903e-05,
"loss": 0.4046,
"step": 763
},
{
"epoch": 1.92,
"grad_norm": 7.502546787261963,
"learning_rate": 2.0377833753148615e-05,
"loss": 0.1991,
"step": 764
},
{
"epoch": 1.93,
"grad_norm": 8.860077857971191,
"learning_rate": 2.036523929471033e-05,
"loss": 0.3378,
"step": 765
},
{
"epoch": 1.93,
"grad_norm": 19.337848663330078,
"learning_rate": 2.035264483627204e-05,
"loss": 0.6095,
"step": 766
},
{
"epoch": 1.93,
"grad_norm": 6.279582500457764,
"learning_rate": 2.0340050377833752e-05,
"loss": 0.3184,
"step": 767
},
{
"epoch": 1.93,
"grad_norm": 4.984986305236816,
"learning_rate": 2.032745591939547e-05,
"loss": 0.3696,
"step": 768
},
{
"epoch": 1.94,
"grad_norm": 11.388147354125977,
"learning_rate": 2.031486146095718e-05,
"loss": 0.4244,
"step": 769
},
{
"epoch": 1.94,
"grad_norm": 8.785746574401855,
"learning_rate": 2.030226700251889e-05,
"loss": 0.1574,
"step": 770
},
{
"epoch": 1.94,
"grad_norm": 17.234216690063477,
"learning_rate": 2.0289672544080606e-05,
"loss": 0.6394,
"step": 771
},
{
"epoch": 1.94,
"grad_norm": 9.10766887664795,
"learning_rate": 2.0277078085642318e-05,
"loss": 0.7971,
"step": 772
},
{
"epoch": 1.95,
"grad_norm": 10.063255310058594,
"learning_rate": 2.026448362720403e-05,
"loss": 0.1751,
"step": 773
},
{
"epoch": 1.95,
"grad_norm": 8.564946174621582,
"learning_rate": 2.0251889168765746e-05,
"loss": 0.2599,
"step": 774
},
{
"epoch": 1.95,
"grad_norm": 7.084752559661865,
"learning_rate": 2.0239294710327455e-05,
"loss": 0.2425,
"step": 775
},
{
"epoch": 1.95,
"grad_norm": 12.423927307128906,
"learning_rate": 2.0226700251889168e-05,
"loss": 0.5981,
"step": 776
},
{
"epoch": 1.96,
"grad_norm": 7.133967876434326,
"learning_rate": 2.0214105793450884e-05,
"loss": 0.4236,
"step": 777
},
{
"epoch": 1.96,
"grad_norm": 12.152637481689453,
"learning_rate": 2.0201511335012596e-05,
"loss": 0.9084,
"step": 778
},
{
"epoch": 1.96,
"grad_norm": 8.058826446533203,
"learning_rate": 2.018891687657431e-05,
"loss": 0.1888,
"step": 779
},
{
"epoch": 1.96,
"grad_norm": 12.304523468017578,
"learning_rate": 2.017632241813602e-05,
"loss": 0.3763,
"step": 780
},
{
"epoch": 1.97,
"grad_norm": 6.993175029754639,
"learning_rate": 2.0163727959697733e-05,
"loss": 0.3192,
"step": 781
},
{
"epoch": 1.97,
"grad_norm": 3.840372323989868,
"learning_rate": 2.0151133501259446e-05,
"loss": 0.116,
"step": 782
},
{
"epoch": 1.97,
"grad_norm": 6.5427446365356445,
"learning_rate": 2.013853904282116e-05,
"loss": 0.3154,
"step": 783
},
{
"epoch": 1.97,
"grad_norm": 6.756482124328613,
"learning_rate": 2.012594458438287e-05,
"loss": 0.3972,
"step": 784
},
{
"epoch": 1.98,
"grad_norm": 10.90357780456543,
"learning_rate": 2.0113350125944583e-05,
"loss": 0.2908,
"step": 785
},
{
"epoch": 1.98,
"grad_norm": 17.345783233642578,
"learning_rate": 2.01007556675063e-05,
"loss": 0.4357,
"step": 786
},
{
"epoch": 1.98,
"grad_norm": 4.750704288482666,
"learning_rate": 2.008816120906801e-05,
"loss": 0.212,
"step": 787
},
{
"epoch": 1.98,
"grad_norm": 13.136444091796875,
"learning_rate": 2.0075566750629724e-05,
"loss": 0.2416,
"step": 788
},
{
"epoch": 1.99,
"grad_norm": 6.8181376457214355,
"learning_rate": 2.0062972292191436e-05,
"loss": 0.1993,
"step": 789
},
{
"epoch": 1.99,
"grad_norm": 17.32404899597168,
"learning_rate": 2.005037783375315e-05,
"loss": 0.7192,
"step": 790
},
{
"epoch": 1.99,
"grad_norm": 10.616106986999512,
"learning_rate": 2.003778337531486e-05,
"loss": 0.3684,
"step": 791
},
{
"epoch": 1.99,
"grad_norm": 6.223758220672607,
"learning_rate": 2.0025188916876577e-05,
"loss": 0.2985,
"step": 792
},
{
"epoch": 2.0,
"grad_norm": 9.101003646850586,
"learning_rate": 2.001259445843829e-05,
"loss": 0.3622,
"step": 793
},
{
"epoch": 2.0,
"grad_norm": 11.418810844421387,
"learning_rate": 1.9999999999999998e-05,
"loss": 0.0876,
"step": 794
},
{
"epoch": 2.0,
"eval_accuracy": 0.804416403785489,
"eval_f1": 0.8032698800120437,
"eval_loss": 0.4402177035808563,
"eval_precision": 0.8026196885367871,
"eval_recall": 0.8048576887939576,
"eval_runtime": 1213.5699,
"eval_samples_per_second": 1.306,
"eval_steps_per_second": 0.082,
"step": 794
},
{
"epoch": 2.0,
"grad_norm": 5.976737976074219,
"learning_rate": 1.9987405541561714e-05,
"loss": 0.1186,
"step": 795
},
{
"epoch": 2.01,
"grad_norm": 9.083830833435059,
"learning_rate": 1.9974811083123426e-05,
"loss": 0.1377,
"step": 796
},
{
"epoch": 2.01,
"grad_norm": 5.250425338745117,
"learning_rate": 1.996221662468514e-05,
"loss": 0.1186,
"step": 797
},
{
"epoch": 2.01,
"grad_norm": 5.4060797691345215,
"learning_rate": 1.994962216624685e-05,
"loss": 0.1543,
"step": 798
},
{
"epoch": 2.01,
"grad_norm": 3.3773229122161865,
"learning_rate": 1.9937027707808564e-05,
"loss": 0.2486,
"step": 799
},
{
"epoch": 2.02,
"grad_norm": 10.780204772949219,
"learning_rate": 1.9924433249370276e-05,
"loss": 0.3412,
"step": 800
},
{
"epoch": 2.02,
"grad_norm": 7.569768905639648,
"learning_rate": 1.9911838790931992e-05,
"loss": 0.2412,
"step": 801
},
{
"epoch": 2.02,
"grad_norm": 3.2330408096313477,
"learning_rate": 1.9899244332493704e-05,
"loss": 0.0898,
"step": 802
},
{
"epoch": 2.02,
"grad_norm": 5.828372001647949,
"learning_rate": 1.9886649874055413e-05,
"loss": 0.419,
"step": 803
},
{
"epoch": 2.03,
"grad_norm": 8.416691780090332,
"learning_rate": 1.987405541561713e-05,
"loss": 0.5116,
"step": 804
},
{
"epoch": 2.03,
"grad_norm": 4.302773952484131,
"learning_rate": 1.9861460957178842e-05,
"loss": 0.0885,
"step": 805
},
{
"epoch": 2.03,
"grad_norm": 21.357067108154297,
"learning_rate": 1.9848866498740554e-05,
"loss": 0.256,
"step": 806
},
{
"epoch": 2.03,
"grad_norm": 12.905417442321777,
"learning_rate": 1.983627204030227e-05,
"loss": 0.409,
"step": 807
},
{
"epoch": 2.04,
"grad_norm": 6.17220401763916,
"learning_rate": 1.982367758186398e-05,
"loss": 0.1705,
"step": 808
},
{
"epoch": 2.04,
"grad_norm": 7.837858200073242,
"learning_rate": 1.981108312342569e-05,
"loss": 0.1145,
"step": 809
},
{
"epoch": 2.04,
"grad_norm": 4.5199809074401855,
"learning_rate": 1.9798488664987407e-05,
"loss": 0.1877,
"step": 810
},
{
"epoch": 2.04,
"grad_norm": 10.4535551071167,
"learning_rate": 1.978589420654912e-05,
"loss": 0.238,
"step": 811
},
{
"epoch": 2.05,
"grad_norm": 4.682440280914307,
"learning_rate": 1.977329974811083e-05,
"loss": 0.2157,
"step": 812
},
{
"epoch": 2.05,
"grad_norm": 3.6102426052093506,
"learning_rate": 1.9760705289672545e-05,
"loss": 0.0548,
"step": 813
},
{
"epoch": 2.05,
"grad_norm": 8.517151832580566,
"learning_rate": 1.9748110831234257e-05,
"loss": 0.1414,
"step": 814
},
{
"epoch": 2.05,
"grad_norm": 1.7788081169128418,
"learning_rate": 1.973551637279597e-05,
"loss": 0.0507,
"step": 815
},
{
"epoch": 2.06,
"grad_norm": 11.008373260498047,
"learning_rate": 1.9722921914357685e-05,
"loss": 0.4553,
"step": 816
},
{
"epoch": 2.06,
"grad_norm": 6.783651828765869,
"learning_rate": 1.9710327455919394e-05,
"loss": 0.1793,
"step": 817
},
{
"epoch": 2.06,
"grad_norm": 4.473495960235596,
"learning_rate": 1.9697732997481107e-05,
"loss": 0.1944,
"step": 818
},
{
"epoch": 2.06,
"grad_norm": 7.093316555023193,
"learning_rate": 1.9685138539042823e-05,
"loss": 0.3245,
"step": 819
},
{
"epoch": 2.07,
"grad_norm": 8.107620239257812,
"learning_rate": 1.9672544080604535e-05,
"loss": 0.074,
"step": 820
},
{
"epoch": 2.07,
"grad_norm": 7.021946907043457,
"learning_rate": 1.9659949622166247e-05,
"loss": 0.3628,
"step": 821
},
{
"epoch": 2.07,
"grad_norm": 6.592014312744141,
"learning_rate": 1.964735516372796e-05,
"loss": 0.3116,
"step": 822
},
{
"epoch": 2.07,
"grad_norm": 9.092830657958984,
"learning_rate": 1.9634760705289672e-05,
"loss": 0.3528,
"step": 823
},
{
"epoch": 2.08,
"grad_norm": 4.819520950317383,
"learning_rate": 1.9622166246851385e-05,
"loss": 0.0542,
"step": 824
},
{
"epoch": 2.08,
"grad_norm": 20.10268783569336,
"learning_rate": 1.96095717884131e-05,
"loss": 0.506,
"step": 825
},
{
"epoch": 2.08,
"grad_norm": 15.67265796661377,
"learning_rate": 1.959697732997481e-05,
"loss": 0.1978,
"step": 826
},
{
"epoch": 2.08,
"grad_norm": 6.482569217681885,
"learning_rate": 1.9584382871536522e-05,
"loss": 0.2595,
"step": 827
},
{
"epoch": 2.09,
"grad_norm": 7.855728626251221,
"learning_rate": 1.9571788413098238e-05,
"loss": 0.0733,
"step": 828
},
{
"epoch": 2.09,
"grad_norm": 14.004561424255371,
"learning_rate": 1.955919395465995e-05,
"loss": 0.2764,
"step": 829
},
{
"epoch": 2.09,
"grad_norm": 5.431961536407471,
"learning_rate": 1.9546599496221663e-05,
"loss": 0.0654,
"step": 830
},
{
"epoch": 2.09,
"grad_norm": 13.324678421020508,
"learning_rate": 1.9534005037783375e-05,
"loss": 0.3888,
"step": 831
},
{
"epoch": 2.1,
"grad_norm": 14.44890022277832,
"learning_rate": 1.9521410579345088e-05,
"loss": 0.2096,
"step": 832
},
{
"epoch": 2.1,
"grad_norm": 11.987161636352539,
"learning_rate": 1.9508816120906803e-05,
"loss": 0.119,
"step": 833
},
{
"epoch": 2.1,
"grad_norm": 15.736373901367188,
"learning_rate": 1.9496221662468516e-05,
"loss": 0.2762,
"step": 834
},
{
"epoch": 2.1,
"grad_norm": 8.252238273620605,
"learning_rate": 1.9483627204030228e-05,
"loss": 0.0847,
"step": 835
},
{
"epoch": 2.11,
"grad_norm": 4.142892360687256,
"learning_rate": 1.947103274559194e-05,
"loss": 0.0768,
"step": 836
},
{
"epoch": 2.11,
"grad_norm": 11.67130184173584,
"learning_rate": 1.9458438287153653e-05,
"loss": 0.2269,
"step": 837
},
{
"epoch": 2.11,
"grad_norm": 8.394164085388184,
"learning_rate": 1.9445843828715365e-05,
"loss": 0.1033,
"step": 838
},
{
"epoch": 2.11,
"grad_norm": 7.672865390777588,
"learning_rate": 1.943324937027708e-05,
"loss": 0.1247,
"step": 839
},
{
"epoch": 2.12,
"grad_norm": 4.08453893661499,
"learning_rate": 1.942065491183879e-05,
"loss": 0.0456,
"step": 840
},
{
"epoch": 2.12,
"grad_norm": 1.0883690118789673,
"learning_rate": 1.9408060453400503e-05,
"loss": 0.0326,
"step": 841
},
{
"epoch": 2.12,
"grad_norm": 20.027908325195312,
"learning_rate": 1.939546599496222e-05,
"loss": 0.1281,
"step": 842
},
{
"epoch": 2.12,
"grad_norm": 13.216873168945312,
"learning_rate": 1.938287153652393e-05,
"loss": 0.5115,
"step": 843
},
{
"epoch": 2.13,
"grad_norm": 12.752791404724121,
"learning_rate": 1.9370277078085643e-05,
"loss": 0.6858,
"step": 844
},
{
"epoch": 2.13,
"grad_norm": 18.067123413085938,
"learning_rate": 1.9357682619647356e-05,
"loss": 0.1749,
"step": 845
},
{
"epoch": 2.13,
"grad_norm": 4.583468914031982,
"learning_rate": 1.9345088161209068e-05,
"loss": 0.0346,
"step": 846
},
{
"epoch": 2.13,
"grad_norm": 12.90953540802002,
"learning_rate": 1.933249370277078e-05,
"loss": 0.2077,
"step": 847
},
{
"epoch": 2.14,
"grad_norm": 5.2107110023498535,
"learning_rate": 1.9319899244332497e-05,
"loss": 0.0584,
"step": 848
},
{
"epoch": 2.14,
"grad_norm": 8.783427238464355,
"learning_rate": 1.930730478589421e-05,
"loss": 0.5019,
"step": 849
},
{
"epoch": 2.14,
"grad_norm": 6.2126288414001465,
"learning_rate": 1.9294710327455918e-05,
"loss": 0.0539,
"step": 850
},
{
"epoch": 2.14,
"grad_norm": 13.92211627960205,
"learning_rate": 1.9282115869017634e-05,
"loss": 0.1497,
"step": 851
},
{
"epoch": 2.15,
"grad_norm": 28.381793975830078,
"learning_rate": 1.9269521410579346e-05,
"loss": 0.5454,
"step": 852
},
{
"epoch": 2.15,
"grad_norm": 16.814462661743164,
"learning_rate": 1.925692695214106e-05,
"loss": 0.2194,
"step": 853
},
{
"epoch": 2.15,
"grad_norm": 7.227400302886963,
"learning_rate": 1.924433249370277e-05,
"loss": 0.0549,
"step": 854
},
{
"epoch": 2.15,
"grad_norm": 15.914610862731934,
"learning_rate": 1.9231738035264484e-05,
"loss": 0.1357,
"step": 855
},
{
"epoch": 2.16,
"grad_norm": 16.585025787353516,
"learning_rate": 1.9219143576826196e-05,
"loss": 0.378,
"step": 856
},
{
"epoch": 2.16,
"grad_norm": 1.9572542905807495,
"learning_rate": 1.9206549118387912e-05,
"loss": 0.0286,
"step": 857
},
{
"epoch": 2.16,
"grad_norm": 17.232242584228516,
"learning_rate": 1.9193954659949624e-05,
"loss": 0.2815,
"step": 858
},
{
"epoch": 2.16,
"grad_norm": 3.981262683868408,
"learning_rate": 1.9181360201511333e-05,
"loss": 0.0536,
"step": 859
},
{
"epoch": 2.17,
"grad_norm": 26.572742462158203,
"learning_rate": 1.916876574307305e-05,
"loss": 0.2345,
"step": 860
},
{
"epoch": 2.17,
"grad_norm": 15.456450462341309,
"learning_rate": 1.915617128463476e-05,
"loss": 0.2391,
"step": 861
},
{
"epoch": 2.17,
"grad_norm": 16.02436065673828,
"learning_rate": 1.9143576826196474e-05,
"loss": 0.1117,
"step": 862
},
{
"epoch": 2.17,
"grad_norm": 3.077558994293213,
"learning_rate": 1.913098236775819e-05,
"loss": 0.0319,
"step": 863
},
{
"epoch": 2.18,
"grad_norm": 24.19316864013672,
"learning_rate": 1.91183879093199e-05,
"loss": 0.1006,
"step": 864
},
{
"epoch": 2.18,
"grad_norm": 14.604350090026855,
"learning_rate": 1.910579345088161e-05,
"loss": 0.2585,
"step": 865
},
{
"epoch": 2.18,
"grad_norm": 10.65208911895752,
"learning_rate": 1.9093198992443327e-05,
"loss": 0.0927,
"step": 866
},
{
"epoch": 2.18,
"grad_norm": 10.185251235961914,
"learning_rate": 1.908060453400504e-05,
"loss": 0.054,
"step": 867
},
{
"epoch": 2.19,
"grad_norm": 4.103867053985596,
"learning_rate": 1.906801007556675e-05,
"loss": 0.0463,
"step": 868
},
{
"epoch": 2.19,
"grad_norm": 17.59889030456543,
"learning_rate": 1.9055415617128464e-05,
"loss": 0.0794,
"step": 869
},
{
"epoch": 2.19,
"grad_norm": 7.415209770202637,
"learning_rate": 1.9042821158690177e-05,
"loss": 0.0596,
"step": 870
},
{
"epoch": 2.19,
"grad_norm": 11.936656951904297,
"learning_rate": 1.903022670025189e-05,
"loss": 0.1241,
"step": 871
},
{
"epoch": 2.2,
"grad_norm": 12.43713665008545,
"learning_rate": 1.9017632241813605e-05,
"loss": 0.2765,
"step": 872
},
{
"epoch": 2.2,
"grad_norm": 8.106919288635254,
"learning_rate": 1.9005037783375314e-05,
"loss": 0.2905,
"step": 873
},
{
"epoch": 2.2,
"grad_norm": 6.52731466293335,
"learning_rate": 1.8992443324937026e-05,
"loss": 0.0641,
"step": 874
},
{
"epoch": 2.2,
"grad_norm": 2.299429416656494,
"learning_rate": 1.8979848866498742e-05,
"loss": 0.0356,
"step": 875
},
{
"epoch": 2.21,
"grad_norm": 23.403858184814453,
"learning_rate": 1.8967254408060455e-05,
"loss": 0.4673,
"step": 876
},
{
"epoch": 2.21,
"grad_norm": 14.969141006469727,
"learning_rate": 1.8954659949622164e-05,
"loss": 0.1005,
"step": 877
},
{
"epoch": 2.21,
"grad_norm": 7.990150451660156,
"learning_rate": 1.894206549118388e-05,
"loss": 0.2583,
"step": 878
},
{
"epoch": 2.21,
"grad_norm": 4.63574743270874,
"learning_rate": 1.8929471032745592e-05,
"loss": 0.2944,
"step": 879
},
{
"epoch": 2.22,
"grad_norm": 15.271684646606445,
"learning_rate": 1.8916876574307304e-05,
"loss": 0.2335,
"step": 880
},
{
"epoch": 2.22,
"grad_norm": 18.186588287353516,
"learning_rate": 1.890428211586902e-05,
"loss": 0.1945,
"step": 881
},
{
"epoch": 2.22,
"grad_norm": 12.56149959564209,
"learning_rate": 1.889168765743073e-05,
"loss": 0.1999,
"step": 882
},
{
"epoch": 2.22,
"grad_norm": 2.1376261711120605,
"learning_rate": 1.8879093198992442e-05,
"loss": 0.0223,
"step": 883
},
{
"epoch": 2.23,
"grad_norm": 22.14487075805664,
"learning_rate": 1.8866498740554158e-05,
"loss": 0.1304,
"step": 884
},
{
"epoch": 2.23,
"grad_norm": 8.436043739318848,
"learning_rate": 1.885390428211587e-05,
"loss": 0.2673,
"step": 885
},
{
"epoch": 2.23,
"grad_norm": 11.742157936096191,
"learning_rate": 1.8841309823677582e-05,
"loss": 0.1181,
"step": 886
},
{
"epoch": 2.23,
"grad_norm": 38.36399459838867,
"learning_rate": 1.8828715365239295e-05,
"loss": 0.4267,
"step": 887
},
{
"epoch": 2.24,
"grad_norm": 15.271390914916992,
"learning_rate": 1.8816120906801007e-05,
"loss": 0.1224,
"step": 888
},
{
"epoch": 2.24,
"grad_norm": 11.08383846282959,
"learning_rate": 1.880352644836272e-05,
"loss": 0.106,
"step": 889
},
{
"epoch": 2.24,
"grad_norm": 29.323549270629883,
"learning_rate": 1.8790931989924436e-05,
"loss": 0.155,
"step": 890
},
{
"epoch": 2.24,
"grad_norm": 9.078880310058594,
"learning_rate": 1.8778337531486145e-05,
"loss": 0.0812,
"step": 891
},
{
"epoch": 2.25,
"grad_norm": 25.136865615844727,
"learning_rate": 1.8765743073047857e-05,
"loss": 0.2506,
"step": 892
},
{
"epoch": 2.25,
"grad_norm": 16.840970993041992,
"learning_rate": 1.8753148614609573e-05,
"loss": 0.1291,
"step": 893
},
{
"epoch": 2.25,
"grad_norm": 17.921939849853516,
"learning_rate": 1.8740554156171285e-05,
"loss": 0.3605,
"step": 894
},
{
"epoch": 2.25,
"grad_norm": 11.916851997375488,
"learning_rate": 1.8727959697732998e-05,
"loss": 0.1311,
"step": 895
},
{
"epoch": 2.26,
"grad_norm": 9.328707695007324,
"learning_rate": 1.871536523929471e-05,
"loss": 0.1106,
"step": 896
},
{
"epoch": 2.26,
"grad_norm": 13.462206840515137,
"learning_rate": 1.8702770780856423e-05,
"loss": 0.6508,
"step": 897
},
{
"epoch": 2.26,
"grad_norm": 5.1701741218566895,
"learning_rate": 1.8690176322418135e-05,
"loss": 0.0745,
"step": 898
},
{
"epoch": 2.26,
"grad_norm": 27.52861213684082,
"learning_rate": 1.867758186397985e-05,
"loss": 0.1263,
"step": 899
},
{
"epoch": 2.27,
"grad_norm": 3.011115074157715,
"learning_rate": 1.8664987405541563e-05,
"loss": 0.0328,
"step": 900
},
{
"epoch": 2.27,
"grad_norm": 20.727210998535156,
"learning_rate": 1.8652392947103272e-05,
"loss": 0.1041,
"step": 901
},
{
"epoch": 2.27,
"grad_norm": 16.751338958740234,
"learning_rate": 1.8639798488664988e-05,
"loss": 0.1396,
"step": 902
},
{
"epoch": 2.27,
"grad_norm": 11.478860855102539,
"learning_rate": 1.86272040302267e-05,
"loss": 0.4094,
"step": 903
},
{
"epoch": 2.28,
"grad_norm": 2.986703395843506,
"learning_rate": 1.8614609571788416e-05,
"loss": 0.0394,
"step": 904
},
{
"epoch": 2.28,
"grad_norm": 9.955742835998535,
"learning_rate": 1.8602015113350125e-05,
"loss": 0.2274,
"step": 905
},
{
"epoch": 2.28,
"grad_norm": 3.6407182216644287,
"learning_rate": 1.8589420654911838e-05,
"loss": 0.2928,
"step": 906
},
{
"epoch": 2.28,
"grad_norm": 21.52337074279785,
"learning_rate": 1.8576826196473554e-05,
"loss": 0.3569,
"step": 907
},
{
"epoch": 2.29,
"grad_norm": 0.8511970639228821,
"learning_rate": 1.8564231738035266e-05,
"loss": 0.0187,
"step": 908
},
{
"epoch": 2.29,
"grad_norm": 11.474825859069824,
"learning_rate": 1.855163727959698e-05,
"loss": 0.1168,
"step": 909
},
{
"epoch": 2.29,
"grad_norm": 5.235669136047363,
"learning_rate": 1.853904282115869e-05,
"loss": 0.0495,
"step": 910
},
{
"epoch": 2.29,
"grad_norm": 10.870506286621094,
"learning_rate": 1.8526448362720403e-05,
"loss": 0.2467,
"step": 911
},
{
"epoch": 2.3,
"grad_norm": 31.614450454711914,
"learning_rate": 1.8513853904282116e-05,
"loss": 0.2447,
"step": 912
},
{
"epoch": 2.3,
"grad_norm": 11.93704605102539,
"learning_rate": 1.850125944584383e-05,
"loss": 0.3164,
"step": 913
},
{
"epoch": 2.3,
"grad_norm": 21.598426818847656,
"learning_rate": 1.8488664987405544e-05,
"loss": 0.0576,
"step": 914
},
{
"epoch": 2.3,
"grad_norm": 5.371317386627197,
"learning_rate": 1.8476070528967253e-05,
"loss": 0.0423,
"step": 915
},
{
"epoch": 2.31,
"grad_norm": 29.41996192932129,
"learning_rate": 1.846347607052897e-05,
"loss": 0.1859,
"step": 916
},
{
"epoch": 2.31,
"grad_norm": 7.363552570343018,
"learning_rate": 1.845088161209068e-05,
"loss": 0.557,
"step": 917
},
{
"epoch": 2.31,
"grad_norm": 32.41464614868164,
"learning_rate": 1.8438287153652394e-05,
"loss": 0.4631,
"step": 918
},
{
"epoch": 2.31,
"grad_norm": 13.250725746154785,
"learning_rate": 1.8425692695214106e-05,
"loss": 0.1992,
"step": 919
},
{
"epoch": 2.32,
"grad_norm": 34.75584030151367,
"learning_rate": 1.841309823677582e-05,
"loss": 0.4553,
"step": 920
},
{
"epoch": 2.32,
"grad_norm": 7.2384233474731445,
"learning_rate": 1.840050377833753e-05,
"loss": 0.2492,
"step": 921
},
{
"epoch": 2.32,
"grad_norm": 4.95755672454834,
"learning_rate": 1.8387909319899247e-05,
"loss": 0.2653,
"step": 922
},
{
"epoch": 2.32,
"grad_norm": 44.550514221191406,
"learning_rate": 1.837531486146096e-05,
"loss": 0.5278,
"step": 923
},
{
"epoch": 2.33,
"grad_norm": 13.902865409851074,
"learning_rate": 1.8362720403022668e-05,
"loss": 0.3654,
"step": 924
},
{
"epoch": 2.33,
"grad_norm": 2.8645730018615723,
"learning_rate": 1.8350125944584384e-05,
"loss": 0.024,
"step": 925
},
{
"epoch": 2.33,
"grad_norm": 2.747000217437744,
"learning_rate": 1.8337531486146097e-05,
"loss": 0.0349,
"step": 926
},
{
"epoch": 2.34,
"grad_norm": 8.224370956420898,
"learning_rate": 1.832493702770781e-05,
"loss": 0.4737,
"step": 927
},
{
"epoch": 2.34,
"grad_norm": 30.833358764648438,
"learning_rate": 1.8312342569269525e-05,
"loss": 0.094,
"step": 928
},
{
"epoch": 2.34,
"grad_norm": 3.3213651180267334,
"learning_rate": 1.8299748110831234e-05,
"loss": 0.0378,
"step": 929
},
{
"epoch": 2.34,
"grad_norm": 25.978818893432617,
"learning_rate": 1.8287153652392946e-05,
"loss": 0.2553,
"step": 930
},
{
"epoch": 2.35,
"grad_norm": 34.23870086669922,
"learning_rate": 1.8274559193954662e-05,
"loss": 0.406,
"step": 931
},
{
"epoch": 2.35,
"grad_norm": 24.33919906616211,
"learning_rate": 1.8261964735516374e-05,
"loss": 0.5579,
"step": 932
},
{
"epoch": 2.35,
"grad_norm": 10.28875732421875,
"learning_rate": 1.8249370277078084e-05,
"loss": 0.2994,
"step": 933
},
{
"epoch": 2.35,
"grad_norm": 18.206113815307617,
"learning_rate": 1.82367758186398e-05,
"loss": 0.7409,
"step": 934
},
{
"epoch": 2.36,
"grad_norm": 7.450280666351318,
"learning_rate": 1.8224181360201512e-05,
"loss": 0.5185,
"step": 935
},
{
"epoch": 2.36,
"grad_norm": 26.665252685546875,
"learning_rate": 1.8211586901763224e-05,
"loss": 0.279,
"step": 936
},
{
"epoch": 2.36,
"grad_norm": 32.06397247314453,
"learning_rate": 1.819899244332494e-05,
"loss": 0.2519,
"step": 937
},
{
"epoch": 2.36,
"grad_norm": 20.590030670166016,
"learning_rate": 1.818639798488665e-05,
"loss": 0.1598,
"step": 938
},
{
"epoch": 2.37,
"grad_norm": 13.61314868927002,
"learning_rate": 1.817380352644836e-05,
"loss": 0.1431,
"step": 939
},
{
"epoch": 2.37,
"grad_norm": 16.68808937072754,
"learning_rate": 1.8161209068010077e-05,
"loss": 0.0691,
"step": 940
},
{
"epoch": 2.37,
"grad_norm": 21.29247283935547,
"learning_rate": 1.814861460957179e-05,
"loss": 0.2522,
"step": 941
},
{
"epoch": 2.37,
"grad_norm": 12.311421394348145,
"learning_rate": 1.8136020151133502e-05,
"loss": 0.223,
"step": 942
},
{
"epoch": 2.38,
"grad_norm": 10.444890975952148,
"learning_rate": 1.8123425692695215e-05,
"loss": 0.1636,
"step": 943
},
{
"epoch": 2.38,
"grad_norm": 35.49920654296875,
"learning_rate": 1.8110831234256927e-05,
"loss": 0.8437,
"step": 944
},
{
"epoch": 2.38,
"grad_norm": 7.902152061462402,
"learning_rate": 1.809823677581864e-05,
"loss": 0.1288,
"step": 945
},
{
"epoch": 2.38,
"grad_norm": 4.148161888122559,
"learning_rate": 1.8085642317380355e-05,
"loss": 0.046,
"step": 946
},
{
"epoch": 2.39,
"grad_norm": 14.699132919311523,
"learning_rate": 1.8073047858942064e-05,
"loss": 0.1743,
"step": 947
},
{
"epoch": 2.39,
"grad_norm": 3.610971450805664,
"learning_rate": 1.8060453400503777e-05,
"loss": 0.0522,
"step": 948
},
{
"epoch": 2.39,
"grad_norm": 4.12473201751709,
"learning_rate": 1.8047858942065493e-05,
"loss": 0.0616,
"step": 949
},
{
"epoch": 2.39,
"grad_norm": 5.2726616859436035,
"learning_rate": 1.8035264483627205e-05,
"loss": 0.0438,
"step": 950
},
{
"epoch": 2.4,
"grad_norm": 13.556662559509277,
"learning_rate": 1.8022670025188917e-05,
"loss": 0.1472,
"step": 951
},
{
"epoch": 2.4,
"grad_norm": 45.24021530151367,
"learning_rate": 1.801007556675063e-05,
"loss": 0.1432,
"step": 952
},
{
"epoch": 2.4,
"grad_norm": 19.90587043762207,
"learning_rate": 1.7997481108312342e-05,
"loss": 0.3582,
"step": 953
},
{
"epoch": 2.4,
"grad_norm": 10.678705215454102,
"learning_rate": 1.7984886649874055e-05,
"loss": 0.0898,
"step": 954
},
{
"epoch": 2.41,
"grad_norm": 12.450896263122559,
"learning_rate": 1.797229219143577e-05,
"loss": 0.1159,
"step": 955
},
{
"epoch": 2.41,
"grad_norm": 4.022710800170898,
"learning_rate": 1.7959697732997483e-05,
"loss": 0.054,
"step": 956
},
{
"epoch": 2.41,
"grad_norm": 18.864871978759766,
"learning_rate": 1.7947103274559192e-05,
"loss": 0.2458,
"step": 957
},
{
"epoch": 2.41,
"grad_norm": 21.411914825439453,
"learning_rate": 1.7934508816120908e-05,
"loss": 0.1763,
"step": 958
},
{
"epoch": 2.42,
"grad_norm": 14.408418655395508,
"learning_rate": 1.792191435768262e-05,
"loss": 0.2913,
"step": 959
},
{
"epoch": 2.42,
"grad_norm": 10.474907875061035,
"learning_rate": 1.7909319899244333e-05,
"loss": 0.0939,
"step": 960
},
{
"epoch": 2.42,
"grad_norm": 32.35608673095703,
"learning_rate": 1.7896725440806045e-05,
"loss": 0.4675,
"step": 961
},
{
"epoch": 2.42,
"grad_norm": 0.9104124307632446,
"learning_rate": 1.7884130982367758e-05,
"loss": 0.0231,
"step": 962
},
{
"epoch": 2.43,
"grad_norm": 0.9205286502838135,
"learning_rate": 1.787153652392947e-05,
"loss": 0.0203,
"step": 963
},
{
"epoch": 2.43,
"grad_norm": 15.181899070739746,
"learning_rate": 1.7858942065491186e-05,
"loss": 0.1106,
"step": 964
},
{
"epoch": 2.43,
"grad_norm": 4.9965033531188965,
"learning_rate": 1.7846347607052898e-05,
"loss": 0.0602,
"step": 965
},
{
"epoch": 2.43,
"grad_norm": 4.398914813995361,
"learning_rate": 1.7833753148614607e-05,
"loss": 0.0526,
"step": 966
},
{
"epoch": 2.44,
"grad_norm": 29.782489776611328,
"learning_rate": 1.7821158690176323e-05,
"loss": 0.1758,
"step": 967
},
{
"epoch": 2.44,
"grad_norm": 11.924981117248535,
"learning_rate": 1.7808564231738035e-05,
"loss": 0.3013,
"step": 968
},
{
"epoch": 2.44,
"grad_norm": 15.300978660583496,
"learning_rate": 1.7795969773299748e-05,
"loss": 0.0943,
"step": 969
},
{
"epoch": 2.44,
"grad_norm": 8.09597396850586,
"learning_rate": 1.7783375314861464e-05,
"loss": 0.0648,
"step": 970
},
{
"epoch": 2.45,
"grad_norm": 20.45931625366211,
"learning_rate": 1.7770780856423173e-05,
"loss": 0.4196,
"step": 971
},
{
"epoch": 2.45,
"grad_norm": 5.484227180480957,
"learning_rate": 1.7758186397984885e-05,
"loss": 0.0419,
"step": 972
},
{
"epoch": 2.45,
"grad_norm": 10.987439155578613,
"learning_rate": 1.77455919395466e-05,
"loss": 0.0789,
"step": 973
},
{
"epoch": 2.45,
"grad_norm": 20.312944412231445,
"learning_rate": 1.7732997481108313e-05,
"loss": 0.5257,
"step": 974
},
{
"epoch": 2.46,
"grad_norm": 25.339521408081055,
"learning_rate": 1.7720403022670026e-05,
"loss": 0.1372,
"step": 975
},
{
"epoch": 2.46,
"grad_norm": 17.586299896240234,
"learning_rate": 1.7707808564231738e-05,
"loss": 0.1025,
"step": 976
},
{
"epoch": 2.46,
"grad_norm": 18.70049476623535,
"learning_rate": 1.769521410579345e-05,
"loss": 0.055,
"step": 977
},
{
"epoch": 2.46,
"grad_norm": 9.797844886779785,
"learning_rate": 1.7682619647355167e-05,
"loss": 0.6287,
"step": 978
},
{
"epoch": 2.47,
"grad_norm": 1.4952588081359863,
"learning_rate": 1.767002518891688e-05,
"loss": 0.0154,
"step": 979
},
{
"epoch": 2.47,
"grad_norm": 0.32095134258270264,
"learning_rate": 1.7657430730478588e-05,
"loss": 0.0106,
"step": 980
},
{
"epoch": 2.47,
"grad_norm": 1.3020259141921997,
"learning_rate": 1.7644836272040304e-05,
"loss": 0.0136,
"step": 981
},
{
"epoch": 2.47,
"grad_norm": 44.08338928222656,
"learning_rate": 1.7632241813602016e-05,
"loss": 0.8547,
"step": 982
},
{
"epoch": 2.48,
"grad_norm": 21.83006477355957,
"learning_rate": 1.761964735516373e-05,
"loss": 0.3798,
"step": 983
},
{
"epoch": 2.48,
"grad_norm": 43.243038177490234,
"learning_rate": 1.760705289672544e-05,
"loss": 0.692,
"step": 984
},
{
"epoch": 2.48,
"grad_norm": 16.481504440307617,
"learning_rate": 1.7594458438287154e-05,
"loss": 0.548,
"step": 985
},
{
"epoch": 2.48,
"grad_norm": 26.669479370117188,
"learning_rate": 1.7581863979848866e-05,
"loss": 1.2603,
"step": 986
},
{
"epoch": 2.49,
"grad_norm": 17.41109275817871,
"learning_rate": 1.7569269521410582e-05,
"loss": 0.0898,
"step": 987
},
{
"epoch": 2.49,
"grad_norm": 11.004029273986816,
"learning_rate": 1.7556675062972294e-05,
"loss": 0.0521,
"step": 988
},
{
"epoch": 2.49,
"grad_norm": 16.72394561767578,
"learning_rate": 1.7544080604534003e-05,
"loss": 0.4888,
"step": 989
},
{
"epoch": 2.49,
"grad_norm": 7.188633441925049,
"learning_rate": 1.753148614609572e-05,
"loss": 0.031,
"step": 990
},
{
"epoch": 2.5,
"grad_norm": 0.7417293787002563,
"learning_rate": 1.751889168765743e-05,
"loss": 0.014,
"step": 991
},
{
"epoch": 2.5,
"grad_norm": 4.745822906494141,
"learning_rate": 1.7506297229219144e-05,
"loss": 0.0298,
"step": 992
},
{
"epoch": 2.5,
"grad_norm": 20.97715187072754,
"learning_rate": 1.749370277078086e-05,
"loss": 0.2558,
"step": 993
},
{
"epoch": 2.5,
"grad_norm": 27.40770149230957,
"learning_rate": 1.748110831234257e-05,
"loss": 0.1127,
"step": 994
},
{
"epoch": 2.51,
"grad_norm": 19.053752899169922,
"learning_rate": 1.746851385390428e-05,
"loss": 0.2859,
"step": 995
},
{
"epoch": 2.51,
"grad_norm": 41.415008544921875,
"learning_rate": 1.7455919395465997e-05,
"loss": 0.3327,
"step": 996
},
{
"epoch": 2.51,
"grad_norm": 10.322839736938477,
"learning_rate": 1.744332493702771e-05,
"loss": 0.2526,
"step": 997
},
{
"epoch": 2.51,
"grad_norm": 12.210159301757812,
"learning_rate": 1.743073047858942e-05,
"loss": 0.2203,
"step": 998
},
{
"epoch": 2.52,
"grad_norm": 21.103782653808594,
"learning_rate": 1.7418136020151134e-05,
"loss": 0.6672,
"step": 999
},
{
"epoch": 2.52,
"grad_norm": 3.4252419471740723,
"learning_rate": 1.7405541561712847e-05,
"loss": 0.025,
"step": 1000
},
{
"epoch": 2.52,
"grad_norm": 26.5319766998291,
"learning_rate": 1.739294710327456e-05,
"loss": 0.4177,
"step": 1001
},
{
"epoch": 2.52,
"grad_norm": 11.68613338470459,
"learning_rate": 1.7380352644836275e-05,
"loss": 0.2262,
"step": 1002
},
{
"epoch": 2.53,
"grad_norm": 14.039557456970215,
"learning_rate": 1.7367758186397984e-05,
"loss": 0.2382,
"step": 1003
},
{
"epoch": 2.53,
"grad_norm": 20.03414535522461,
"learning_rate": 1.7355163727959696e-05,
"loss": 0.2475,
"step": 1004
},
{
"epoch": 2.53,
"grad_norm": 29.128925323486328,
"learning_rate": 1.7342569269521412e-05,
"loss": 0.1902,
"step": 1005
},
{
"epoch": 2.53,
"grad_norm": 7.585634231567383,
"learning_rate": 1.7329974811083125e-05,
"loss": 0.0359,
"step": 1006
},
{
"epoch": 2.54,
"grad_norm": 0.3999019265174866,
"learning_rate": 1.7317380352644837e-05,
"loss": 0.0117,
"step": 1007
},
{
"epoch": 2.54,
"grad_norm": 12.283281326293945,
"learning_rate": 1.730478589420655e-05,
"loss": 0.0946,
"step": 1008
},
{
"epoch": 2.54,
"grad_norm": 9.220109939575195,
"learning_rate": 1.7292191435768262e-05,
"loss": 0.0397,
"step": 1009
},
{
"epoch": 2.54,
"grad_norm": 3.8018109798431396,
"learning_rate": 1.7279596977329974e-05,
"loss": 0.0305,
"step": 1010
},
{
"epoch": 2.55,
"grad_norm": 40.12253189086914,
"learning_rate": 1.726700251889169e-05,
"loss": 0.4056,
"step": 1011
},
{
"epoch": 2.55,
"grad_norm": 13.994961738586426,
"learning_rate": 1.72544080604534e-05,
"loss": 0.3435,
"step": 1012
},
{
"epoch": 2.55,
"grad_norm": 19.624113082885742,
"learning_rate": 1.7241813602015112e-05,
"loss": 0.4751,
"step": 1013
},
{
"epoch": 2.55,
"grad_norm": 20.66043472290039,
"learning_rate": 1.7229219143576828e-05,
"loss": 0.1713,
"step": 1014
},
{
"epoch": 2.56,
"grad_norm": 48.67464065551758,
"learning_rate": 1.721662468513854e-05,
"loss": 0.3976,
"step": 1015
},
{
"epoch": 2.56,
"grad_norm": 30.28778839111328,
"learning_rate": 1.7204030226700252e-05,
"loss": 0.3882,
"step": 1016
},
{
"epoch": 2.56,
"grad_norm": 31.674646377563477,
"learning_rate": 1.7191435768261965e-05,
"loss": 0.6509,
"step": 1017
},
{
"epoch": 2.56,
"grad_norm": 15.582225799560547,
"learning_rate": 1.7178841309823677e-05,
"loss": 0.5633,
"step": 1018
},
{
"epoch": 2.57,
"grad_norm": 8.46904182434082,
"learning_rate": 1.716624685138539e-05,
"loss": 0.0472,
"step": 1019
},
{
"epoch": 2.57,
"grad_norm": 6.153964519500732,
"learning_rate": 1.7153652392947106e-05,
"loss": 0.2258,
"step": 1020
},
{
"epoch": 2.57,
"grad_norm": 18.53000259399414,
"learning_rate": 1.7141057934508818e-05,
"loss": 0.2525,
"step": 1021
},
{
"epoch": 2.57,
"grad_norm": 7.518229007720947,
"learning_rate": 1.7128463476070527e-05,
"loss": 0.052,
"step": 1022
},
{
"epoch": 2.58,
"grad_norm": 16.217754364013672,
"learning_rate": 1.7115869017632243e-05,
"loss": 0.2544,
"step": 1023
},
{
"epoch": 2.58,
"grad_norm": 21.273475646972656,
"learning_rate": 1.7103274559193955e-05,
"loss": 0.1248,
"step": 1024
},
{
"epoch": 2.58,
"grad_norm": 10.482606887817383,
"learning_rate": 1.7090680100755668e-05,
"loss": 0.0675,
"step": 1025
},
{
"epoch": 2.58,
"grad_norm": 18.854312896728516,
"learning_rate": 1.707808564231738e-05,
"loss": 0.1067,
"step": 1026
},
{
"epoch": 2.59,
"grad_norm": 11.522710800170898,
"learning_rate": 1.7065491183879093e-05,
"loss": 0.2552,
"step": 1027
},
{
"epoch": 2.59,
"grad_norm": 15.483019828796387,
"learning_rate": 1.7052896725440805e-05,
"loss": 0.3915,
"step": 1028
},
{
"epoch": 2.59,
"grad_norm": 8.645544052124023,
"learning_rate": 1.704030226700252e-05,
"loss": 0.0815,
"step": 1029
},
{
"epoch": 2.59,
"grad_norm": 18.710952758789062,
"learning_rate": 1.7027707808564233e-05,
"loss": 0.3915,
"step": 1030
},
{
"epoch": 2.6,
"grad_norm": 14.992968559265137,
"learning_rate": 1.7015113350125942e-05,
"loss": 0.2023,
"step": 1031
},
{
"epoch": 2.6,
"grad_norm": 14.996759414672852,
"learning_rate": 1.7002518891687658e-05,
"loss": 0.2511,
"step": 1032
},
{
"epoch": 2.6,
"grad_norm": 6.331859111785889,
"learning_rate": 1.698992443324937e-05,
"loss": 0.0837,
"step": 1033
},
{
"epoch": 2.6,
"grad_norm": 2.4125723838806152,
"learning_rate": 1.6977329974811083e-05,
"loss": 0.0321,
"step": 1034
},
{
"epoch": 2.61,
"grad_norm": 14.080228805541992,
"learning_rate": 1.69647355163728e-05,
"loss": 0.8303,
"step": 1035
},
{
"epoch": 2.61,
"grad_norm": 11.021700859069824,
"learning_rate": 1.6952141057934508e-05,
"loss": 0.3159,
"step": 1036
},
{
"epoch": 2.61,
"grad_norm": 1.5516949892044067,
"learning_rate": 1.693954659949622e-05,
"loss": 0.0211,
"step": 1037
},
{
"epoch": 2.61,
"grad_norm": 24.504554748535156,
"learning_rate": 1.6926952141057936e-05,
"loss": 0.2686,
"step": 1038
},
{
"epoch": 2.62,
"grad_norm": 16.056262969970703,
"learning_rate": 1.691435768261965e-05,
"loss": 0.1862,
"step": 1039
},
{
"epoch": 2.62,
"grad_norm": 23.649192810058594,
"learning_rate": 1.6901763224181357e-05,
"loss": 0.7134,
"step": 1040
},
{
"epoch": 2.62,
"grad_norm": 4.870745658874512,
"learning_rate": 1.6889168765743073e-05,
"loss": 0.0574,
"step": 1041
},
{
"epoch": 2.62,
"grad_norm": 4.79144287109375,
"learning_rate": 1.6876574307304786e-05,
"loss": 0.1917,
"step": 1042
},
{
"epoch": 2.63,
"grad_norm": 6.743935585021973,
"learning_rate": 1.6863979848866498e-05,
"loss": 0.2646,
"step": 1043
},
{
"epoch": 2.63,
"grad_norm": 19.601837158203125,
"learning_rate": 1.6851385390428214e-05,
"loss": 0.2423,
"step": 1044
},
{
"epoch": 2.63,
"grad_norm": 2.839597225189209,
"learning_rate": 1.6838790931989923e-05,
"loss": 0.0257,
"step": 1045
},
{
"epoch": 2.63,
"grad_norm": 5.611330509185791,
"learning_rate": 1.6826196473551635e-05,
"loss": 0.2994,
"step": 1046
},
{
"epoch": 2.64,
"grad_norm": 9.278888702392578,
"learning_rate": 1.681360201511335e-05,
"loss": 0.311,
"step": 1047
},
{
"epoch": 2.64,
"grad_norm": 14.311319351196289,
"learning_rate": 1.6801007556675064e-05,
"loss": 0.0693,
"step": 1048
},
{
"epoch": 2.64,
"grad_norm": 22.729650497436523,
"learning_rate": 1.678841309823678e-05,
"loss": 0.2257,
"step": 1049
},
{
"epoch": 2.64,
"grad_norm": 12.39243221282959,
"learning_rate": 1.677581863979849e-05,
"loss": 0.3833,
"step": 1050
},
{
"epoch": 2.65,
"grad_norm": 14.148759841918945,
"learning_rate": 1.67632241813602e-05,
"loss": 0.2577,
"step": 1051
},
{
"epoch": 2.65,
"grad_norm": 20.707704544067383,
"learning_rate": 1.6750629722921917e-05,
"loss": 0.4987,
"step": 1052
},
{
"epoch": 2.65,
"grad_norm": 9.586409568786621,
"learning_rate": 1.673803526448363e-05,
"loss": 0.8248,
"step": 1053
},
{
"epoch": 2.65,
"grad_norm": 6.405531406402588,
"learning_rate": 1.6725440806045338e-05,
"loss": 0.3748,
"step": 1054
},
{
"epoch": 2.66,
"grad_norm": 9.89402961730957,
"learning_rate": 1.6712846347607054e-05,
"loss": 0.0682,
"step": 1055
},
{
"epoch": 2.66,
"grad_norm": 2.5575547218322754,
"learning_rate": 1.6700251889168767e-05,
"loss": 0.0412,
"step": 1056
},
{
"epoch": 2.66,
"grad_norm": 13.999021530151367,
"learning_rate": 1.668765743073048e-05,
"loss": 0.3433,
"step": 1057
},
{
"epoch": 2.66,
"grad_norm": 17.20004653930664,
"learning_rate": 1.6675062972292195e-05,
"loss": 0.1682,
"step": 1058
},
{
"epoch": 2.67,
"grad_norm": 15.041619300842285,
"learning_rate": 1.6662468513853904e-05,
"loss": 0.3703,
"step": 1059
},
{
"epoch": 2.67,
"grad_norm": 19.173490524291992,
"learning_rate": 1.6649874055415616e-05,
"loss": 0.234,
"step": 1060
},
{
"epoch": 2.67,
"grad_norm": 4.27054500579834,
"learning_rate": 1.6637279596977332e-05,
"loss": 0.0553,
"step": 1061
},
{
"epoch": 2.68,
"grad_norm": 19.725751876831055,
"learning_rate": 1.6624685138539044e-05,
"loss": 0.3931,
"step": 1062
},
{
"epoch": 2.68,
"grad_norm": 12.060813903808594,
"learning_rate": 1.6612090680100757e-05,
"loss": 0.1505,
"step": 1063
},
{
"epoch": 2.68,
"grad_norm": 29.19813346862793,
"learning_rate": 1.659949622166247e-05,
"loss": 0.1956,
"step": 1064
},
{
"epoch": 2.68,
"grad_norm": 5.451413154602051,
"learning_rate": 1.6586901763224182e-05,
"loss": 0.0523,
"step": 1065
},
{
"epoch": 2.69,
"grad_norm": 21.697498321533203,
"learning_rate": 1.6574307304785894e-05,
"loss": 0.3233,
"step": 1066
},
{
"epoch": 2.69,
"grad_norm": 16.664989471435547,
"learning_rate": 1.656171284634761e-05,
"loss": 0.1331,
"step": 1067
},
{
"epoch": 2.69,
"grad_norm": 9.683480262756348,
"learning_rate": 1.654911838790932e-05,
"loss": 0.221,
"step": 1068
},
{
"epoch": 2.69,
"grad_norm": 12.325887680053711,
"learning_rate": 1.653652392947103e-05,
"loss": 0.0978,
"step": 1069
},
{
"epoch": 2.7,
"grad_norm": 6.554139614105225,
"learning_rate": 1.6523929471032747e-05,
"loss": 0.1858,
"step": 1070
},
{
"epoch": 2.7,
"grad_norm": 18.414140701293945,
"learning_rate": 1.651133501259446e-05,
"loss": 0.4893,
"step": 1071
},
{
"epoch": 2.7,
"grad_norm": 9.108610153198242,
"learning_rate": 1.6498740554156172e-05,
"loss": 0.0853,
"step": 1072
},
{
"epoch": 2.7,
"grad_norm": 12.5266752243042,
"learning_rate": 1.6486146095717885e-05,
"loss": 0.478,
"step": 1073
},
{
"epoch": 2.71,
"grad_norm": 1.9131548404693604,
"learning_rate": 1.6473551637279597e-05,
"loss": 0.0385,
"step": 1074
},
{
"epoch": 2.71,
"grad_norm": 22.296390533447266,
"learning_rate": 1.646095717884131e-05,
"loss": 0.3907,
"step": 1075
},
{
"epoch": 2.71,
"grad_norm": 7.380468368530273,
"learning_rate": 1.6448362720403025e-05,
"loss": 0.2112,
"step": 1076
},
{
"epoch": 2.71,
"grad_norm": 8.03321647644043,
"learning_rate": 1.6435768261964738e-05,
"loss": 0.268,
"step": 1077
},
{
"epoch": 2.72,
"grad_norm": 28.10780906677246,
"learning_rate": 1.6423173803526447e-05,
"loss": 0.1584,
"step": 1078
},
{
"epoch": 2.72,
"grad_norm": 14.223876953125,
"learning_rate": 1.6410579345088163e-05,
"loss": 0.28,
"step": 1079
},
{
"epoch": 2.72,
"grad_norm": 6.363096237182617,
"learning_rate": 1.6397984886649875e-05,
"loss": 0.3052,
"step": 1080
},
{
"epoch": 2.72,
"grad_norm": 11.782586097717285,
"learning_rate": 1.6385390428211587e-05,
"loss": 0.2498,
"step": 1081
},
{
"epoch": 2.73,
"grad_norm": 6.359012126922607,
"learning_rate": 1.63727959697733e-05,
"loss": 0.3136,
"step": 1082
},
{
"epoch": 2.73,
"grad_norm": 6.206680774688721,
"learning_rate": 1.6360201511335012e-05,
"loss": 0.106,
"step": 1083
},
{
"epoch": 2.73,
"grad_norm": 13.259072303771973,
"learning_rate": 1.6347607052896725e-05,
"loss": 0.4578,
"step": 1084
},
{
"epoch": 2.73,
"grad_norm": 11.283618927001953,
"learning_rate": 1.633501259445844e-05,
"loss": 0.1433,
"step": 1085
},
{
"epoch": 2.74,
"grad_norm": 14.00245475769043,
"learning_rate": 1.6322418136020153e-05,
"loss": 0.1216,
"step": 1086
},
{
"epoch": 2.74,
"grad_norm": 20.484031677246094,
"learning_rate": 1.6309823677581862e-05,
"loss": 0.6095,
"step": 1087
},
{
"epoch": 2.74,
"grad_norm": 4.319401264190674,
"learning_rate": 1.6297229219143578e-05,
"loss": 0.3035,
"step": 1088
},
{
"epoch": 2.74,
"grad_norm": 8.893186569213867,
"learning_rate": 1.628463476070529e-05,
"loss": 0.2453,
"step": 1089
},
{
"epoch": 2.75,
"grad_norm": 20.96607208251953,
"learning_rate": 1.6272040302267003e-05,
"loss": 0.3969,
"step": 1090
},
{
"epoch": 2.75,
"grad_norm": 13.960694313049316,
"learning_rate": 1.6259445843828715e-05,
"loss": 0.2462,
"step": 1091
},
{
"epoch": 2.75,
"grad_norm": 9.796236991882324,
"learning_rate": 1.6246851385390428e-05,
"loss": 0.2312,
"step": 1092
},
{
"epoch": 2.75,
"grad_norm": 23.649677276611328,
"learning_rate": 1.623425692695214e-05,
"loss": 0.2756,
"step": 1093
},
{
"epoch": 2.76,
"grad_norm": 11.236509323120117,
"learning_rate": 1.6221662468513856e-05,
"loss": 0.1333,
"step": 1094
},
{
"epoch": 2.76,
"grad_norm": 19.394977569580078,
"learning_rate": 1.6209068010075568e-05,
"loss": 0.6215,
"step": 1095
},
{
"epoch": 2.76,
"grad_norm": 5.315592288970947,
"learning_rate": 1.6196473551637277e-05,
"loss": 0.0702,
"step": 1096
},
{
"epoch": 2.76,
"grad_norm": 18.263957977294922,
"learning_rate": 1.6183879093198993e-05,
"loss": 0.3489,
"step": 1097
},
{
"epoch": 2.77,
"grad_norm": 12.144902229309082,
"learning_rate": 1.6171284634760705e-05,
"loss": 0.1314,
"step": 1098
},
{
"epoch": 2.77,
"grad_norm": 10.130423545837402,
"learning_rate": 1.6158690176322418e-05,
"loss": 0.1701,
"step": 1099
},
{
"epoch": 2.77,
"grad_norm": 15.319389343261719,
"learning_rate": 1.6146095717884134e-05,
"loss": 0.1384,
"step": 1100
},
{
"epoch": 2.77,
"grad_norm": 3.53900146484375,
"learning_rate": 1.6133501259445843e-05,
"loss": 0.3098,
"step": 1101
},
{
"epoch": 2.78,
"grad_norm": 2.5617988109588623,
"learning_rate": 1.6120906801007555e-05,
"loss": 0.0469,
"step": 1102
},
{
"epoch": 2.78,
"grad_norm": 7.703180313110352,
"learning_rate": 1.610831234256927e-05,
"loss": 0.2913,
"step": 1103
},
{
"epoch": 2.78,
"grad_norm": 9.907478332519531,
"learning_rate": 1.6095717884130983e-05,
"loss": 0.34,
"step": 1104
},
{
"epoch": 2.78,
"grad_norm": 6.849803924560547,
"learning_rate": 1.6083123425692692e-05,
"loss": 0.253,
"step": 1105
},
{
"epoch": 2.79,
"grad_norm": 13.280983924865723,
"learning_rate": 1.607052896725441e-05,
"loss": 0.3651,
"step": 1106
},
{
"epoch": 2.79,
"grad_norm": 13.153647422790527,
"learning_rate": 1.605793450881612e-05,
"loss": 0.3285,
"step": 1107
},
{
"epoch": 2.79,
"grad_norm": 8.686355590820312,
"learning_rate": 1.6045340050377833e-05,
"loss": 0.3197,
"step": 1108
},
{
"epoch": 2.79,
"grad_norm": 7.679217338562012,
"learning_rate": 1.603274559193955e-05,
"loss": 0.3772,
"step": 1109
},
{
"epoch": 2.8,
"grad_norm": 3.92250657081604,
"learning_rate": 1.6020151133501258e-05,
"loss": 0.0491,
"step": 1110
},
{
"epoch": 2.8,
"grad_norm": 8.080659866333008,
"learning_rate": 1.600755667506297e-05,
"loss": 0.1022,
"step": 1111
},
{
"epoch": 2.8,
"grad_norm": 10.759270668029785,
"learning_rate": 1.5994962216624686e-05,
"loss": 0.2858,
"step": 1112
},
{
"epoch": 2.8,
"grad_norm": 2.1772842407226562,
"learning_rate": 1.59823677581864e-05,
"loss": 0.0408,
"step": 1113
},
{
"epoch": 2.81,
"grad_norm": 17.511062622070312,
"learning_rate": 1.596977329974811e-05,
"loss": 0.2311,
"step": 1114
},
{
"epoch": 2.81,
"grad_norm": 3.6058473587036133,
"learning_rate": 1.5957178841309824e-05,
"loss": 0.0536,
"step": 1115
},
{
"epoch": 2.81,
"grad_norm": 12.71088981628418,
"learning_rate": 1.5944584382871536e-05,
"loss": 0.1877,
"step": 1116
},
{
"epoch": 2.81,
"grad_norm": 4.7909345626831055,
"learning_rate": 1.593198992443325e-05,
"loss": 0.2639,
"step": 1117
},
{
"epoch": 2.82,
"grad_norm": 19.712451934814453,
"learning_rate": 1.5919395465994964e-05,
"loss": 0.3709,
"step": 1118
},
{
"epoch": 2.82,
"grad_norm": 25.3231258392334,
"learning_rate": 1.5906801007556673e-05,
"loss": 0.3071,
"step": 1119
},
{
"epoch": 2.82,
"grad_norm": 23.117774963378906,
"learning_rate": 1.589420654911839e-05,
"loss": 0.3033,
"step": 1120
},
{
"epoch": 2.82,
"grad_norm": 16.745607376098633,
"learning_rate": 1.58816120906801e-05,
"loss": 0.3242,
"step": 1121
},
{
"epoch": 2.83,
"grad_norm": 12.918922424316406,
"learning_rate": 1.5869017632241814e-05,
"loss": 0.1945,
"step": 1122
},
{
"epoch": 2.83,
"grad_norm": 12.439324378967285,
"learning_rate": 1.585642317380353e-05,
"loss": 0.3462,
"step": 1123
},
{
"epoch": 2.83,
"grad_norm": 12.006231307983398,
"learning_rate": 1.584382871536524e-05,
"loss": 0.3037,
"step": 1124
},
{
"epoch": 2.83,
"grad_norm": 19.369873046875,
"learning_rate": 1.583123425692695e-05,
"loss": 0.4054,
"step": 1125
},
{
"epoch": 2.84,
"grad_norm": 7.270785331726074,
"learning_rate": 1.5818639798488667e-05,
"loss": 0.1068,
"step": 1126
},
{
"epoch": 2.84,
"grad_norm": 16.919490814208984,
"learning_rate": 1.580604534005038e-05,
"loss": 0.1851,
"step": 1127
},
{
"epoch": 2.84,
"grad_norm": 14.456689834594727,
"learning_rate": 1.5793450881612092e-05,
"loss": 0.2693,
"step": 1128
},
{
"epoch": 2.84,
"grad_norm": 32.42156982421875,
"learning_rate": 1.5780856423173804e-05,
"loss": 0.4212,
"step": 1129
},
{
"epoch": 2.85,
"grad_norm": 14.696311950683594,
"learning_rate": 1.5768261964735517e-05,
"loss": 0.3479,
"step": 1130
},
{
"epoch": 2.85,
"grad_norm": 5.063379287719727,
"learning_rate": 1.575566750629723e-05,
"loss": 0.0615,
"step": 1131
},
{
"epoch": 2.85,
"grad_norm": 18.039127349853516,
"learning_rate": 1.5743073047858945e-05,
"loss": 0.2822,
"step": 1132
},
{
"epoch": 2.85,
"grad_norm": 10.71660327911377,
"learning_rate": 1.5730478589420654e-05,
"loss": 0.0876,
"step": 1133
},
{
"epoch": 2.86,
"grad_norm": 13.763422012329102,
"learning_rate": 1.5717884130982367e-05,
"loss": 0.1789,
"step": 1134
},
{
"epoch": 2.86,
"grad_norm": 18.239368438720703,
"learning_rate": 1.5705289672544082e-05,
"loss": 0.621,
"step": 1135
},
{
"epoch": 2.86,
"grad_norm": 19.79184341430664,
"learning_rate": 1.5692695214105795e-05,
"loss": 0.3946,
"step": 1136
},
{
"epoch": 2.86,
"grad_norm": 13.332741737365723,
"learning_rate": 1.5680100755667507e-05,
"loss": 0.3358,
"step": 1137
},
{
"epoch": 2.87,
"grad_norm": 12.152838706970215,
"learning_rate": 1.566750629722922e-05,
"loss": 0.3625,
"step": 1138
},
{
"epoch": 2.87,
"grad_norm": 19.56678009033203,
"learning_rate": 1.5654911838790932e-05,
"loss": 0.3858,
"step": 1139
},
{
"epoch": 2.87,
"grad_norm": 25.041582107543945,
"learning_rate": 1.5642317380352644e-05,
"loss": 0.374,
"step": 1140
},
{
"epoch": 2.87,
"grad_norm": 12.966362953186035,
"learning_rate": 1.562972292191436e-05,
"loss": 0.1982,
"step": 1141
},
{
"epoch": 2.88,
"grad_norm": 4.819403648376465,
"learning_rate": 1.5617128463476073e-05,
"loss": 0.059,
"step": 1142
},
{
"epoch": 2.88,
"grad_norm": 11.310750007629395,
"learning_rate": 1.5604534005037782e-05,
"loss": 0.2802,
"step": 1143
},
{
"epoch": 2.88,
"grad_norm": 18.544479370117188,
"learning_rate": 1.5591939546599498e-05,
"loss": 0.3296,
"step": 1144
},
{
"epoch": 2.88,
"grad_norm": 5.440631866455078,
"learning_rate": 1.557934508816121e-05,
"loss": 0.0817,
"step": 1145
},
{
"epoch": 2.89,
"grad_norm": 17.53709602355957,
"learning_rate": 1.5566750629722922e-05,
"loss": 0.2045,
"step": 1146
},
{
"epoch": 2.89,
"grad_norm": 5.029760360717773,
"learning_rate": 1.5554156171284635e-05,
"loss": 0.0772,
"step": 1147
},
{
"epoch": 2.89,
"grad_norm": 15.92929744720459,
"learning_rate": 1.5541561712846347e-05,
"loss": 0.2221,
"step": 1148
},
{
"epoch": 2.89,
"grad_norm": 14.654315948486328,
"learning_rate": 1.552896725440806e-05,
"loss": 0.194,
"step": 1149
},
{
"epoch": 2.9,
"grad_norm": 10.082984924316406,
"learning_rate": 1.5516372795969776e-05,
"loss": 0.3038,
"step": 1150
},
{
"epoch": 2.9,
"grad_norm": 12.362741470336914,
"learning_rate": 1.5503778337531488e-05,
"loss": 0.1464,
"step": 1151
},
{
"epoch": 2.9,
"grad_norm": 11.521105766296387,
"learning_rate": 1.5491183879093197e-05,
"loss": 0.2223,
"step": 1152
},
{
"epoch": 2.9,
"grad_norm": 3.9992454051971436,
"learning_rate": 1.5478589420654913e-05,
"loss": 0.0534,
"step": 1153
},
{
"epoch": 2.91,
"grad_norm": 9.422941207885742,
"learning_rate": 1.5465994962216625e-05,
"loss": 0.1071,
"step": 1154
},
{
"epoch": 2.91,
"grad_norm": 6.80059289932251,
"learning_rate": 1.5453400503778338e-05,
"loss": 0.3005,
"step": 1155
},
{
"epoch": 2.91,
"grad_norm": 5.678284168243408,
"learning_rate": 1.5440806045340053e-05,
"loss": 0.0574,
"step": 1156
},
{
"epoch": 2.91,
"grad_norm": 4.542283058166504,
"learning_rate": 1.5428211586901763e-05,
"loss": 0.2604,
"step": 1157
},
{
"epoch": 2.92,
"grad_norm": 4.583611011505127,
"learning_rate": 1.5415617128463475e-05,
"loss": 0.0593,
"step": 1158
},
{
"epoch": 2.92,
"grad_norm": 9.688311576843262,
"learning_rate": 1.540302267002519e-05,
"loss": 0.3942,
"step": 1159
},
{
"epoch": 2.92,
"grad_norm": 13.455863952636719,
"learning_rate": 1.5390428211586903e-05,
"loss": 0.0816,
"step": 1160
},
{
"epoch": 2.92,
"grad_norm": 18.235370635986328,
"learning_rate": 1.5377833753148612e-05,
"loss": 0.1245,
"step": 1161
},
{
"epoch": 2.93,
"grad_norm": 11.47364330291748,
"learning_rate": 1.5365239294710328e-05,
"loss": 0.2704,
"step": 1162
},
{
"epoch": 2.93,
"grad_norm": 9.850740432739258,
"learning_rate": 1.535264483627204e-05,
"loss": 0.2232,
"step": 1163
},
{
"epoch": 2.93,
"grad_norm": 12.394991874694824,
"learning_rate": 1.5340050377833753e-05,
"loss": 0.0972,
"step": 1164
},
{
"epoch": 2.93,
"grad_norm": 6.109569072723389,
"learning_rate": 1.532745591939547e-05,
"loss": 0.0611,
"step": 1165
},
{
"epoch": 2.94,
"grad_norm": 15.410727500915527,
"learning_rate": 1.5314861460957178e-05,
"loss": 0.2336,
"step": 1166
},
{
"epoch": 2.94,
"grad_norm": 9.841590881347656,
"learning_rate": 1.530226700251889e-05,
"loss": 0.2135,
"step": 1167
},
{
"epoch": 2.94,
"grad_norm": 2.900602102279663,
"learning_rate": 1.5289672544080606e-05,
"loss": 0.2371,
"step": 1168
},
{
"epoch": 2.94,
"grad_norm": 9.744848251342773,
"learning_rate": 1.527707808564232e-05,
"loss": 0.117,
"step": 1169
},
{
"epoch": 2.95,
"grad_norm": 20.94938850402832,
"learning_rate": 1.526448362720403e-05,
"loss": 0.2436,
"step": 1170
},
{
"epoch": 2.95,
"grad_norm": 4.247443199157715,
"learning_rate": 1.5251889168765742e-05,
"loss": 0.1908,
"step": 1171
},
{
"epoch": 2.95,
"grad_norm": 11.222188949584961,
"learning_rate": 1.5239294710327456e-05,
"loss": 0.1171,
"step": 1172
},
{
"epoch": 2.95,
"grad_norm": 0.8100425601005554,
"learning_rate": 1.522670025188917e-05,
"loss": 0.0229,
"step": 1173
},
{
"epoch": 2.96,
"grad_norm": 13.735158920288086,
"learning_rate": 1.5214105793450882e-05,
"loss": 0.4922,
"step": 1174
},
{
"epoch": 2.96,
"grad_norm": 40.11631774902344,
"learning_rate": 1.5201511335012593e-05,
"loss": 0.1388,
"step": 1175
},
{
"epoch": 2.96,
"grad_norm": 29.13542938232422,
"learning_rate": 1.5188916876574307e-05,
"loss": 0.1924,
"step": 1176
},
{
"epoch": 2.96,
"grad_norm": 7.6980767250061035,
"learning_rate": 1.5176322418136021e-05,
"loss": 0.0766,
"step": 1177
},
{
"epoch": 2.97,
"grad_norm": 14.650445938110352,
"learning_rate": 1.5163727959697734e-05,
"loss": 0.211,
"step": 1178
},
{
"epoch": 2.97,
"grad_norm": 15.319862365722656,
"learning_rate": 1.5151133501259448e-05,
"loss": 0.2816,
"step": 1179
},
{
"epoch": 2.97,
"grad_norm": 5.754809856414795,
"learning_rate": 1.5138539042821159e-05,
"loss": 0.263,
"step": 1180
},
{
"epoch": 2.97,
"grad_norm": 16.632143020629883,
"learning_rate": 1.5125944584382871e-05,
"loss": 0.2188,
"step": 1181
},
{
"epoch": 2.98,
"grad_norm": 12.786446571350098,
"learning_rate": 1.5113350125944585e-05,
"loss": 0.235,
"step": 1182
},
{
"epoch": 2.98,
"grad_norm": 16.24946403503418,
"learning_rate": 1.51007556675063e-05,
"loss": 0.3029,
"step": 1183
},
{
"epoch": 2.98,
"grad_norm": 12.54625129699707,
"learning_rate": 1.5088161209068012e-05,
"loss": 0.2153,
"step": 1184
},
{
"epoch": 2.98,
"grad_norm": 18.512767791748047,
"learning_rate": 1.5075566750629722e-05,
"loss": 0.6106,
"step": 1185
},
{
"epoch": 2.99,
"grad_norm": 22.6643009185791,
"learning_rate": 1.5062972292191437e-05,
"loss": 0.3158,
"step": 1186
},
{
"epoch": 2.99,
"grad_norm": 3.4380409717559814,
"learning_rate": 1.5050377833753149e-05,
"loss": 0.2336,
"step": 1187
},
{
"epoch": 2.99,
"grad_norm": 18.3015079498291,
"learning_rate": 1.5037783375314863e-05,
"loss": 0.3209,
"step": 1188
},
{
"epoch": 2.99,
"grad_norm": 10.597107887268066,
"learning_rate": 1.5025188916876574e-05,
"loss": 0.0967,
"step": 1189
},
{
"epoch": 3.0,
"grad_norm": 10.336237907409668,
"learning_rate": 1.5012594458438286e-05,
"loss": 0.1293,
"step": 1190
},
{
"epoch": 3.0,
"grad_norm": 2.06474232673645,
"learning_rate": 1.5e-05,
"loss": 0.0459,
"step": 1191
},
{
"epoch": 3.0,
"eval_accuracy": 0.8088328075709779,
"eval_f1": 0.8080378252480893,
"eval_loss": 0.5197204947471619,
"eval_precision": 0.8076567604174967,
"eval_recall": 0.8105337313586474,
"eval_runtime": 1211.4212,
"eval_samples_per_second": 1.308,
"eval_steps_per_second": 0.083,
"step": 1191
}
],
"logging_steps": 1,
"max_steps": 2382,
"num_input_tokens_seen": 0,
"num_train_epochs": 6,
"save_steps": 500,
"total_flos": 5002004273448960.0,
"train_batch_size": 16,
"trial_name": null,
"trial_params": null
}