ocr_correcteur-v1 / trainer_state (1).json
jeanflop's picture
Upload 4 files
2c45d8c verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.4,
"eval_steps": 500,
"global_step": 1200,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.002,
"grad_norm": 0.15426555275917053,
"learning_rate": 4.999e-05,
"loss": 1.05,
"step": 1
},
{
"epoch": 0.004,
"grad_norm": 0.12709155678749084,
"learning_rate": 4.9980000000000006e-05,
"loss": 1.0572,
"step": 2
},
{
"epoch": 0.006,
"grad_norm": 0.1603364646434784,
"learning_rate": 4.997e-05,
"loss": 1.0282,
"step": 3
},
{
"epoch": 0.008,
"grad_norm": 0.2332337498664856,
"learning_rate": 4.996e-05,
"loss": 1.1703,
"step": 4
},
{
"epoch": 0.01,
"grad_norm": 0.1483815610408783,
"learning_rate": 4.995e-05,
"loss": 1.1163,
"step": 5
},
{
"epoch": 0.012,
"grad_norm": 0.12220710515975952,
"learning_rate": 4.9940000000000006e-05,
"loss": 0.8897,
"step": 6
},
{
"epoch": 0.014,
"grad_norm": 0.15056240558624268,
"learning_rate": 4.9930000000000005e-05,
"loss": 0.8998,
"step": 7
},
{
"epoch": 0.016,
"grad_norm": 0.13436363637447357,
"learning_rate": 4.992e-05,
"loss": 0.9671,
"step": 8
},
{
"epoch": 0.018,
"grad_norm": 0.13434761762619019,
"learning_rate": 4.991e-05,
"loss": 0.9692,
"step": 9
},
{
"epoch": 0.02,
"grad_norm": 0.1641756296157837,
"learning_rate": 4.99e-05,
"loss": 1.0044,
"step": 10
},
{
"epoch": 0.022,
"grad_norm": 0.19019633531570435,
"learning_rate": 4.9890000000000005e-05,
"loss": 0.925,
"step": 11
},
{
"epoch": 0.024,
"grad_norm": 0.11101183295249939,
"learning_rate": 4.9880000000000004e-05,
"loss": 0.9182,
"step": 12
},
{
"epoch": 0.026,
"grad_norm": 0.16728107631206512,
"learning_rate": 4.987e-05,
"loss": 0.9582,
"step": 13
},
{
"epoch": 0.028,
"grad_norm": 0.13117073476314545,
"learning_rate": 4.986e-05,
"loss": 0.9796,
"step": 14
},
{
"epoch": 0.03,
"grad_norm": 0.13628996908664703,
"learning_rate": 4.9850000000000006e-05,
"loss": 1.0854,
"step": 15
},
{
"epoch": 0.032,
"grad_norm": 0.244254007935524,
"learning_rate": 4.9840000000000004e-05,
"loss": 0.98,
"step": 16
},
{
"epoch": 0.034,
"grad_norm": 0.15982504189014435,
"learning_rate": 4.983e-05,
"loss": 1.0,
"step": 17
},
{
"epoch": 0.036,
"grad_norm": 0.19700302183628082,
"learning_rate": 4.982e-05,
"loss": 0.976,
"step": 18
},
{
"epoch": 0.038,
"grad_norm": 0.13975825905799866,
"learning_rate": 4.981e-05,
"loss": 0.9673,
"step": 19
},
{
"epoch": 0.04,
"grad_norm": 0.19986604154109955,
"learning_rate": 4.9800000000000004e-05,
"loss": 1.0941,
"step": 20
},
{
"epoch": 0.042,
"grad_norm": 0.13514864444732666,
"learning_rate": 4.979e-05,
"loss": 0.9293,
"step": 21
},
{
"epoch": 0.044,
"grad_norm": 0.17641392350196838,
"learning_rate": 4.978e-05,
"loss": 1.0845,
"step": 22
},
{
"epoch": 0.046,
"grad_norm": 0.14343786239624023,
"learning_rate": 4.977e-05,
"loss": 0.9832,
"step": 23
},
{
"epoch": 0.048,
"grad_norm": 0.19530853629112244,
"learning_rate": 4.976e-05,
"loss": 1.0003,
"step": 24
},
{
"epoch": 0.05,
"grad_norm": 0.1906237006187439,
"learning_rate": 4.975e-05,
"loss": 1.0535,
"step": 25
},
{
"epoch": 0.052,
"grad_norm": 0.16383886337280273,
"learning_rate": 4.974e-05,
"loss": 0.9403,
"step": 26
},
{
"epoch": 0.054,
"grad_norm": 0.18397654592990875,
"learning_rate": 4.973000000000001e-05,
"loss": 0.9986,
"step": 27
},
{
"epoch": 0.056,
"grad_norm": 0.1469949334859848,
"learning_rate": 4.972e-05,
"loss": 0.8989,
"step": 28
},
{
"epoch": 0.058,
"grad_norm": 0.14918571710586548,
"learning_rate": 4.9710000000000003e-05,
"loss": 1.0134,
"step": 29
},
{
"epoch": 0.06,
"grad_norm": 0.17474521696567535,
"learning_rate": 4.97e-05,
"loss": 0.8832,
"step": 30
},
{
"epoch": 0.062,
"grad_norm": 0.1665133684873581,
"learning_rate": 4.969e-05,
"loss": 0.852,
"step": 31
},
{
"epoch": 0.064,
"grad_norm": 0.16474837064743042,
"learning_rate": 4.9680000000000005e-05,
"loss": 0.8692,
"step": 32
},
{
"epoch": 0.066,
"grad_norm": 0.1703304797410965,
"learning_rate": 4.967e-05,
"loss": 0.9744,
"step": 33
},
{
"epoch": 0.068,
"grad_norm": 0.363540917634964,
"learning_rate": 4.966e-05,
"loss": 0.9743,
"step": 34
},
{
"epoch": 0.07,
"grad_norm": 0.15182030200958252,
"learning_rate": 4.965e-05,
"loss": 0.8621,
"step": 35
},
{
"epoch": 0.072,
"grad_norm": 0.18470583856105804,
"learning_rate": 4.9640000000000006e-05,
"loss": 0.8994,
"step": 36
},
{
"epoch": 0.074,
"grad_norm": 0.17034071683883667,
"learning_rate": 4.9630000000000004e-05,
"loss": 0.8815,
"step": 37
},
{
"epoch": 0.076,
"grad_norm": 0.1921834647655487,
"learning_rate": 4.962e-05,
"loss": 0.8571,
"step": 38
},
{
"epoch": 0.078,
"grad_norm": 0.5177106857299805,
"learning_rate": 4.961e-05,
"loss": 0.8526,
"step": 39
},
{
"epoch": 0.08,
"grad_norm": 0.17441946268081665,
"learning_rate": 4.96e-05,
"loss": 0.9106,
"step": 40
},
{
"epoch": 0.082,
"grad_norm": 0.1906650960445404,
"learning_rate": 4.9590000000000005e-05,
"loss": 0.9408,
"step": 41
},
{
"epoch": 0.084,
"grad_norm": 0.15480925142765045,
"learning_rate": 4.958e-05,
"loss": 0.94,
"step": 42
},
{
"epoch": 0.086,
"grad_norm": 0.1611844003200531,
"learning_rate": 4.957e-05,
"loss": 0.8976,
"step": 43
},
{
"epoch": 0.088,
"grad_norm": 0.1564462035894394,
"learning_rate": 4.956e-05,
"loss": 0.8556,
"step": 44
},
{
"epoch": 0.09,
"grad_norm": 0.19315579533576965,
"learning_rate": 4.9550000000000005e-05,
"loss": 0.9154,
"step": 45
},
{
"epoch": 0.092,
"grad_norm": 0.19190064072608948,
"learning_rate": 4.9540000000000003e-05,
"loss": 0.8546,
"step": 46
},
{
"epoch": 0.094,
"grad_norm": 0.18955475091934204,
"learning_rate": 4.953e-05,
"loss": 0.9515,
"step": 47
},
{
"epoch": 0.096,
"grad_norm": 0.18206185102462769,
"learning_rate": 4.952e-05,
"loss": 0.9668,
"step": 48
},
{
"epoch": 0.098,
"grad_norm": 0.17476564645767212,
"learning_rate": 4.951e-05,
"loss": 0.8894,
"step": 49
},
{
"epoch": 0.1,
"grad_norm": 0.16770018637180328,
"learning_rate": 4.9500000000000004e-05,
"loss": 0.864,
"step": 50
},
{
"epoch": 0.102,
"grad_norm": 0.1613185703754425,
"learning_rate": 4.949e-05,
"loss": 0.8604,
"step": 51
},
{
"epoch": 0.104,
"grad_norm": 0.18597957491874695,
"learning_rate": 4.948000000000001e-05,
"loss": 1.0068,
"step": 52
},
{
"epoch": 0.106,
"grad_norm": 0.15847554802894592,
"learning_rate": 4.947e-05,
"loss": 0.8898,
"step": 53
},
{
"epoch": 0.108,
"grad_norm": 0.14919398725032806,
"learning_rate": 4.946e-05,
"loss": 0.8268,
"step": 54
},
{
"epoch": 0.11,
"grad_norm": 0.15170955657958984,
"learning_rate": 4.945e-05,
"loss": 0.7413,
"step": 55
},
{
"epoch": 0.112,
"grad_norm": 0.1621982753276825,
"learning_rate": 4.944e-05,
"loss": 0.9099,
"step": 56
},
{
"epoch": 0.114,
"grad_norm": 0.1732129007577896,
"learning_rate": 4.9430000000000006e-05,
"loss": 0.7594,
"step": 57
},
{
"epoch": 0.116,
"grad_norm": 0.17541876435279846,
"learning_rate": 4.942e-05,
"loss": 0.8599,
"step": 58
},
{
"epoch": 0.118,
"grad_norm": 0.20780043303966522,
"learning_rate": 4.941e-05,
"loss": 0.8937,
"step": 59
},
{
"epoch": 0.12,
"grad_norm": 0.15124286711215973,
"learning_rate": 4.94e-05,
"loss": 0.8622,
"step": 60
},
{
"epoch": 0.122,
"grad_norm": 0.15645167231559753,
"learning_rate": 4.939e-05,
"loss": 0.9051,
"step": 61
},
{
"epoch": 0.124,
"grad_norm": 0.29350391030311584,
"learning_rate": 4.9380000000000005e-05,
"loss": 0.8143,
"step": 62
},
{
"epoch": 0.126,
"grad_norm": 0.21682289242744446,
"learning_rate": 4.937e-05,
"loss": 0.919,
"step": 63
},
{
"epoch": 0.128,
"grad_norm": 0.1551298350095749,
"learning_rate": 4.936e-05,
"loss": 0.9609,
"step": 64
},
{
"epoch": 0.13,
"grad_norm": 0.18353714048862457,
"learning_rate": 4.935e-05,
"loss": 0.8417,
"step": 65
},
{
"epoch": 0.132,
"grad_norm": 0.16884168982505798,
"learning_rate": 4.9340000000000005e-05,
"loss": 0.8245,
"step": 66
},
{
"epoch": 0.134,
"grad_norm": 0.16556158661842346,
"learning_rate": 4.9330000000000004e-05,
"loss": 0.861,
"step": 67
},
{
"epoch": 0.136,
"grad_norm": 0.1595425009727478,
"learning_rate": 4.932e-05,
"loss": 0.8105,
"step": 68
},
{
"epoch": 0.138,
"grad_norm": 0.16665107011795044,
"learning_rate": 4.931e-05,
"loss": 0.9665,
"step": 69
},
{
"epoch": 0.14,
"grad_norm": 0.16219840943813324,
"learning_rate": 4.93e-05,
"loss": 0.8015,
"step": 70
},
{
"epoch": 0.142,
"grad_norm": 0.17223553359508514,
"learning_rate": 4.9290000000000004e-05,
"loss": 0.8522,
"step": 71
},
{
"epoch": 0.144,
"grad_norm": 0.15540814399719238,
"learning_rate": 4.928e-05,
"loss": 0.6988,
"step": 72
},
{
"epoch": 0.146,
"grad_norm": 0.17961961030960083,
"learning_rate": 4.927000000000001e-05,
"loss": 0.9313,
"step": 73
},
{
"epoch": 0.148,
"grad_norm": 0.17895761132240295,
"learning_rate": 4.926e-05,
"loss": 0.8712,
"step": 74
},
{
"epoch": 0.15,
"grad_norm": 0.17795026302337646,
"learning_rate": 4.9250000000000004e-05,
"loss": 0.8601,
"step": 75
},
{
"epoch": 0.152,
"grad_norm": 0.16494092345237732,
"learning_rate": 4.924e-05,
"loss": 0.8595,
"step": 76
},
{
"epoch": 0.154,
"grad_norm": 0.17727716267108917,
"learning_rate": 4.923e-05,
"loss": 0.7616,
"step": 77
},
{
"epoch": 0.156,
"grad_norm": 0.1806144267320633,
"learning_rate": 4.9220000000000006e-05,
"loss": 0.771,
"step": 78
},
{
"epoch": 0.158,
"grad_norm": 0.16605085134506226,
"learning_rate": 4.921e-05,
"loss": 0.7771,
"step": 79
},
{
"epoch": 0.16,
"grad_norm": 0.2618866264820099,
"learning_rate": 4.92e-05,
"loss": 0.9218,
"step": 80
},
{
"epoch": 0.162,
"grad_norm": 0.20908960700035095,
"learning_rate": 4.919e-05,
"loss": 0.7781,
"step": 81
},
{
"epoch": 0.164,
"grad_norm": 0.16217266023159027,
"learning_rate": 4.918000000000001e-05,
"loss": 0.8979,
"step": 82
},
{
"epoch": 0.166,
"grad_norm": 0.161507248878479,
"learning_rate": 4.9170000000000005e-05,
"loss": 0.6754,
"step": 83
},
{
"epoch": 0.168,
"grad_norm": 0.17677399516105652,
"learning_rate": 4.9160000000000004e-05,
"loss": 0.7224,
"step": 84
},
{
"epoch": 0.17,
"grad_norm": 0.2032463252544403,
"learning_rate": 4.915e-05,
"loss": 0.7824,
"step": 85
},
{
"epoch": 0.172,
"grad_norm": 0.18544171750545502,
"learning_rate": 4.914e-05,
"loss": 0.8323,
"step": 86
},
{
"epoch": 0.174,
"grad_norm": 0.17022430896759033,
"learning_rate": 4.9130000000000006e-05,
"loss": 0.8678,
"step": 87
},
{
"epoch": 0.176,
"grad_norm": 0.16055932641029358,
"learning_rate": 4.9120000000000004e-05,
"loss": 0.7293,
"step": 88
},
{
"epoch": 0.178,
"grad_norm": 0.1739533543586731,
"learning_rate": 4.911e-05,
"loss": 0.6897,
"step": 89
},
{
"epoch": 0.18,
"grad_norm": 0.17253080010414124,
"learning_rate": 4.91e-05,
"loss": 0.7663,
"step": 90
},
{
"epoch": 0.182,
"grad_norm": 0.18093682825565338,
"learning_rate": 4.9090000000000006e-05,
"loss": 0.8966,
"step": 91
},
{
"epoch": 0.184,
"grad_norm": 0.39482560753822327,
"learning_rate": 4.9080000000000004e-05,
"loss": 0.7794,
"step": 92
},
{
"epoch": 0.186,
"grad_norm": 0.1769993156194687,
"learning_rate": 4.907e-05,
"loss": 0.7503,
"step": 93
},
{
"epoch": 0.188,
"grad_norm": 0.1584313064813614,
"learning_rate": 4.906e-05,
"loss": 0.7138,
"step": 94
},
{
"epoch": 0.19,
"grad_norm": 0.21162880957126617,
"learning_rate": 4.905e-05,
"loss": 0.8229,
"step": 95
},
{
"epoch": 0.192,
"grad_norm": 0.1863650381565094,
"learning_rate": 4.9040000000000005e-05,
"loss": 0.7932,
"step": 96
},
{
"epoch": 0.194,
"grad_norm": 0.19603383541107178,
"learning_rate": 4.903e-05,
"loss": 0.7973,
"step": 97
},
{
"epoch": 0.196,
"grad_norm": 0.16941046714782715,
"learning_rate": 4.902e-05,
"loss": 0.7216,
"step": 98
},
{
"epoch": 0.198,
"grad_norm": 0.17508655786514282,
"learning_rate": 4.901e-05,
"loss": 0.6917,
"step": 99
},
{
"epoch": 0.2,
"grad_norm": 0.20799246430397034,
"learning_rate": 4.9e-05,
"loss": 0.7623,
"step": 100
},
{
"epoch": 0.202,
"grad_norm": 0.17266519367694855,
"learning_rate": 4.8990000000000004e-05,
"loss": 0.7838,
"step": 101
},
{
"epoch": 0.204,
"grad_norm": 0.15804770588874817,
"learning_rate": 4.898e-05,
"loss": 0.7198,
"step": 102
},
{
"epoch": 0.206,
"grad_norm": 0.18841996788978577,
"learning_rate": 4.897000000000001e-05,
"loss": 0.8427,
"step": 103
},
{
"epoch": 0.208,
"grad_norm": 0.2060270458459854,
"learning_rate": 4.896e-05,
"loss": 0.732,
"step": 104
},
{
"epoch": 0.21,
"grad_norm": 0.16148711740970612,
"learning_rate": 4.8950000000000004e-05,
"loss": 0.7576,
"step": 105
},
{
"epoch": 0.212,
"grad_norm": 0.16729658842086792,
"learning_rate": 4.894e-05,
"loss": 0.7049,
"step": 106
},
{
"epoch": 0.214,
"grad_norm": 0.18130692839622498,
"learning_rate": 4.893e-05,
"loss": 0.7943,
"step": 107
},
{
"epoch": 0.216,
"grad_norm": 0.16856049001216888,
"learning_rate": 4.8920000000000006e-05,
"loss": 0.7507,
"step": 108
},
{
"epoch": 0.218,
"grad_norm": 0.17912209033966064,
"learning_rate": 4.891e-05,
"loss": 0.7448,
"step": 109
},
{
"epoch": 0.22,
"grad_norm": 0.16425421833992004,
"learning_rate": 4.89e-05,
"loss": 0.7518,
"step": 110
},
{
"epoch": 0.222,
"grad_norm": 0.18968652188777924,
"learning_rate": 4.889e-05,
"loss": 0.7387,
"step": 111
},
{
"epoch": 0.224,
"grad_norm": 0.18210746347904205,
"learning_rate": 4.8880000000000006e-05,
"loss": 0.8284,
"step": 112
},
{
"epoch": 0.226,
"grad_norm": 0.16067589819431305,
"learning_rate": 4.8870000000000005e-05,
"loss": 0.7161,
"step": 113
},
{
"epoch": 0.228,
"grad_norm": 0.21508732438087463,
"learning_rate": 4.886e-05,
"loss": 0.7559,
"step": 114
},
{
"epoch": 0.23,
"grad_norm": 0.20675824582576752,
"learning_rate": 4.885e-05,
"loss": 0.7717,
"step": 115
},
{
"epoch": 0.232,
"grad_norm": 0.1855594366788864,
"learning_rate": 4.884e-05,
"loss": 0.7304,
"step": 116
},
{
"epoch": 0.234,
"grad_norm": 0.17346052825450897,
"learning_rate": 4.8830000000000005e-05,
"loss": 0.7946,
"step": 117
},
{
"epoch": 0.236,
"grad_norm": 0.16850675642490387,
"learning_rate": 4.8820000000000004e-05,
"loss": 0.8264,
"step": 118
},
{
"epoch": 0.238,
"grad_norm": 0.17360711097717285,
"learning_rate": 4.881e-05,
"loss": 0.7152,
"step": 119
},
{
"epoch": 0.24,
"grad_norm": 0.19422297179698944,
"learning_rate": 4.88e-05,
"loss": 0.7704,
"step": 120
},
{
"epoch": 0.242,
"grad_norm": 0.16386844217777252,
"learning_rate": 4.8790000000000006e-05,
"loss": 0.8024,
"step": 121
},
{
"epoch": 0.244,
"grad_norm": 0.18992245197296143,
"learning_rate": 4.8780000000000004e-05,
"loss": 0.6937,
"step": 122
},
{
"epoch": 0.246,
"grad_norm": 0.17040878534317017,
"learning_rate": 4.877e-05,
"loss": 0.7349,
"step": 123
},
{
"epoch": 0.248,
"grad_norm": 0.187296524643898,
"learning_rate": 4.876e-05,
"loss": 0.6242,
"step": 124
},
{
"epoch": 0.25,
"grad_norm": 0.17259983718395233,
"learning_rate": 4.875e-05,
"loss": 0.6656,
"step": 125
},
{
"epoch": 0.252,
"grad_norm": 0.16188134253025055,
"learning_rate": 4.8740000000000004e-05,
"loss": 0.7009,
"step": 126
},
{
"epoch": 0.254,
"grad_norm": 0.17831256985664368,
"learning_rate": 4.873e-05,
"loss": 0.7098,
"step": 127
},
{
"epoch": 0.256,
"grad_norm": 0.18309949338436127,
"learning_rate": 4.872000000000001e-05,
"loss": 0.8171,
"step": 128
},
{
"epoch": 0.258,
"grad_norm": 0.17562147974967957,
"learning_rate": 4.871e-05,
"loss": 0.7185,
"step": 129
},
{
"epoch": 0.26,
"grad_norm": 0.18238484859466553,
"learning_rate": 4.87e-05,
"loss": 0.7824,
"step": 130
},
{
"epoch": 0.262,
"grad_norm": 0.1970655918121338,
"learning_rate": 4.869e-05,
"loss": 0.6794,
"step": 131
},
{
"epoch": 0.264,
"grad_norm": 0.18091806769371033,
"learning_rate": 4.868e-05,
"loss": 0.7149,
"step": 132
},
{
"epoch": 0.266,
"grad_norm": 0.16607695817947388,
"learning_rate": 4.867000000000001e-05,
"loss": 0.6596,
"step": 133
},
{
"epoch": 0.268,
"grad_norm": 0.14630946516990662,
"learning_rate": 4.866e-05,
"loss": 0.5551,
"step": 134
},
{
"epoch": 0.27,
"grad_norm": 0.1951550394296646,
"learning_rate": 4.8650000000000003e-05,
"loss": 0.6972,
"step": 135
},
{
"epoch": 0.272,
"grad_norm": 0.16738222539424896,
"learning_rate": 4.864e-05,
"loss": 0.6976,
"step": 136
},
{
"epoch": 0.274,
"grad_norm": 0.17754323780536652,
"learning_rate": 4.863e-05,
"loss": 0.7231,
"step": 137
},
{
"epoch": 0.276,
"grad_norm": 0.19366562366485596,
"learning_rate": 4.8620000000000005e-05,
"loss": 0.7682,
"step": 138
},
{
"epoch": 0.278,
"grad_norm": 0.19526396691799164,
"learning_rate": 4.861e-05,
"loss": 0.6572,
"step": 139
},
{
"epoch": 0.28,
"grad_norm": 0.18090026080608368,
"learning_rate": 4.86e-05,
"loss": 0.7206,
"step": 140
},
{
"epoch": 0.282,
"grad_norm": 0.17786529660224915,
"learning_rate": 4.859e-05,
"loss": 0.7527,
"step": 141
},
{
"epoch": 0.284,
"grad_norm": 0.17932577431201935,
"learning_rate": 4.8580000000000006e-05,
"loss": 0.7523,
"step": 142
},
{
"epoch": 0.286,
"grad_norm": 0.17716765403747559,
"learning_rate": 4.8570000000000004e-05,
"loss": 0.66,
"step": 143
},
{
"epoch": 0.288,
"grad_norm": 0.19830791652202606,
"learning_rate": 4.856e-05,
"loss": 0.7504,
"step": 144
},
{
"epoch": 0.29,
"grad_norm": 0.19970470666885376,
"learning_rate": 4.855e-05,
"loss": 0.8374,
"step": 145
},
{
"epoch": 0.292,
"grad_norm": 0.17745786905288696,
"learning_rate": 4.854e-05,
"loss": 0.7288,
"step": 146
},
{
"epoch": 0.294,
"grad_norm": 0.16594083607196808,
"learning_rate": 4.8530000000000005e-05,
"loss": 0.6372,
"step": 147
},
{
"epoch": 0.296,
"grad_norm": 0.19067466259002686,
"learning_rate": 4.852e-05,
"loss": 0.7761,
"step": 148
},
{
"epoch": 0.298,
"grad_norm": 0.17589321732521057,
"learning_rate": 4.851e-05,
"loss": 0.7475,
"step": 149
},
{
"epoch": 0.3,
"grad_norm": 0.16668495535850525,
"learning_rate": 4.85e-05,
"loss": 0.655,
"step": 150
},
{
"epoch": 0.302,
"grad_norm": 0.18038712441921234,
"learning_rate": 4.8490000000000005e-05,
"loss": 0.7447,
"step": 151
},
{
"epoch": 0.304,
"grad_norm": 0.17747431993484497,
"learning_rate": 4.8480000000000003e-05,
"loss": 0.6821,
"step": 152
},
{
"epoch": 0.306,
"grad_norm": 0.18808145821094513,
"learning_rate": 4.847e-05,
"loss": 0.6878,
"step": 153
},
{
"epoch": 0.308,
"grad_norm": 0.16301684081554413,
"learning_rate": 4.846e-05,
"loss": 0.6687,
"step": 154
},
{
"epoch": 0.31,
"grad_norm": 0.1757572591304779,
"learning_rate": 4.845e-05,
"loss": 0.7166,
"step": 155
},
{
"epoch": 0.312,
"grad_norm": 0.16521325707435608,
"learning_rate": 4.8440000000000004e-05,
"loss": 0.6626,
"step": 156
},
{
"epoch": 0.314,
"grad_norm": 0.17104895412921906,
"learning_rate": 4.843e-05,
"loss": 0.694,
"step": 157
},
{
"epoch": 0.316,
"grad_norm": 0.2105347067117691,
"learning_rate": 4.842000000000001e-05,
"loss": 0.7123,
"step": 158
},
{
"epoch": 0.318,
"grad_norm": 0.1545153558254242,
"learning_rate": 4.841e-05,
"loss": 0.7374,
"step": 159
},
{
"epoch": 0.32,
"grad_norm": 0.18015415966510773,
"learning_rate": 4.8400000000000004e-05,
"loss": 0.8343,
"step": 160
},
{
"epoch": 0.322,
"grad_norm": 0.1678248643875122,
"learning_rate": 4.839e-05,
"loss": 0.677,
"step": 161
},
{
"epoch": 0.324,
"grad_norm": 0.2440311163663864,
"learning_rate": 4.838e-05,
"loss": 0.7022,
"step": 162
},
{
"epoch": 0.326,
"grad_norm": 0.1826225370168686,
"learning_rate": 4.8370000000000006e-05,
"loss": 0.7169,
"step": 163
},
{
"epoch": 0.328,
"grad_norm": 0.1914544403553009,
"learning_rate": 4.836e-05,
"loss": 0.745,
"step": 164
},
{
"epoch": 0.33,
"grad_norm": 0.18756432831287384,
"learning_rate": 4.835e-05,
"loss": 0.6353,
"step": 165
},
{
"epoch": 0.332,
"grad_norm": 0.16854585707187653,
"learning_rate": 4.834e-05,
"loss": 0.7072,
"step": 166
},
{
"epoch": 0.334,
"grad_norm": 0.2005048245191574,
"learning_rate": 4.833e-05,
"loss": 0.7651,
"step": 167
},
{
"epoch": 0.336,
"grad_norm": 0.19548290967941284,
"learning_rate": 4.8320000000000005e-05,
"loss": 0.736,
"step": 168
},
{
"epoch": 0.338,
"grad_norm": 0.17135733366012573,
"learning_rate": 4.8309999999999997e-05,
"loss": 0.8211,
"step": 169
},
{
"epoch": 0.34,
"grad_norm": 0.16657552123069763,
"learning_rate": 4.83e-05,
"loss": 0.7372,
"step": 170
},
{
"epoch": 0.342,
"grad_norm": 0.17498229444026947,
"learning_rate": 4.829e-05,
"loss": 0.7333,
"step": 171
},
{
"epoch": 0.344,
"grad_norm": 0.18211571872234344,
"learning_rate": 4.8280000000000005e-05,
"loss": 0.7547,
"step": 172
},
{
"epoch": 0.346,
"grad_norm": 0.1906624734401703,
"learning_rate": 4.8270000000000004e-05,
"loss": 0.7839,
"step": 173
},
{
"epoch": 0.348,
"grad_norm": 0.1763816624879837,
"learning_rate": 4.826e-05,
"loss": 0.6486,
"step": 174
},
{
"epoch": 0.35,
"grad_norm": 0.16783268749713898,
"learning_rate": 4.825e-05,
"loss": 0.6533,
"step": 175
},
{
"epoch": 0.352,
"grad_norm": 0.17041102051734924,
"learning_rate": 4.824e-05,
"loss": 0.7981,
"step": 176
},
{
"epoch": 0.354,
"grad_norm": 0.1828545331954956,
"learning_rate": 4.8230000000000004e-05,
"loss": 0.7116,
"step": 177
},
{
"epoch": 0.356,
"grad_norm": 0.20127569139003754,
"learning_rate": 4.822e-05,
"loss": 0.7525,
"step": 178
},
{
"epoch": 0.358,
"grad_norm": 0.1801321655511856,
"learning_rate": 4.821e-05,
"loss": 0.6858,
"step": 179
},
{
"epoch": 0.36,
"grad_norm": 0.18023674190044403,
"learning_rate": 4.82e-05,
"loss": 0.7719,
"step": 180
},
{
"epoch": 0.362,
"grad_norm": 0.19391784071922302,
"learning_rate": 4.8190000000000004e-05,
"loss": 0.7177,
"step": 181
},
{
"epoch": 0.364,
"grad_norm": 0.17303819954395294,
"learning_rate": 4.818e-05,
"loss": 0.6717,
"step": 182
},
{
"epoch": 0.366,
"grad_norm": 0.16896046698093414,
"learning_rate": 4.817e-05,
"loss": 0.6847,
"step": 183
},
{
"epoch": 0.368,
"grad_norm": 0.19223248958587646,
"learning_rate": 4.816e-05,
"loss": 0.6312,
"step": 184
},
{
"epoch": 0.37,
"grad_norm": 0.20020322501659393,
"learning_rate": 4.815e-05,
"loss": 0.6391,
"step": 185
},
{
"epoch": 0.372,
"grad_norm": 0.16758307814598083,
"learning_rate": 4.814e-05,
"loss": 0.6659,
"step": 186
},
{
"epoch": 0.374,
"grad_norm": 0.176300048828125,
"learning_rate": 4.813e-05,
"loss": 0.6823,
"step": 187
},
{
"epoch": 0.376,
"grad_norm": 0.17282813787460327,
"learning_rate": 4.812000000000001e-05,
"loss": 0.6885,
"step": 188
},
{
"epoch": 0.378,
"grad_norm": 0.1966462880373001,
"learning_rate": 4.8110000000000005e-05,
"loss": 0.6833,
"step": 189
},
{
"epoch": 0.38,
"grad_norm": 0.18239817023277283,
"learning_rate": 4.8100000000000004e-05,
"loss": 0.6464,
"step": 190
},
{
"epoch": 0.382,
"grad_norm": 0.17479926347732544,
"learning_rate": 4.809e-05,
"loss": 0.6868,
"step": 191
},
{
"epoch": 0.384,
"grad_norm": 0.1797116994857788,
"learning_rate": 4.808e-05,
"loss": 0.612,
"step": 192
},
{
"epoch": 0.386,
"grad_norm": 0.17411945760250092,
"learning_rate": 4.8070000000000006e-05,
"loss": 0.6431,
"step": 193
},
{
"epoch": 0.388,
"grad_norm": 0.20542018115520477,
"learning_rate": 4.8060000000000004e-05,
"loss": 0.6994,
"step": 194
},
{
"epoch": 0.39,
"grad_norm": 0.18406227231025696,
"learning_rate": 4.805e-05,
"loss": 0.6693,
"step": 195
},
{
"epoch": 0.392,
"grad_norm": 0.16543979942798615,
"learning_rate": 4.804e-05,
"loss": 0.5446,
"step": 196
},
{
"epoch": 0.394,
"grad_norm": 0.17976847290992737,
"learning_rate": 4.8030000000000006e-05,
"loss": 0.6433,
"step": 197
},
{
"epoch": 0.396,
"grad_norm": 0.26321882009506226,
"learning_rate": 4.8020000000000004e-05,
"loss": 0.73,
"step": 198
},
{
"epoch": 0.398,
"grad_norm": 0.19417919218540192,
"learning_rate": 4.801e-05,
"loss": 0.6931,
"step": 199
},
{
"epoch": 0.4,
"grad_norm": 0.17781345546245575,
"learning_rate": 4.8e-05,
"loss": 0.6867,
"step": 200
},
{
"epoch": 0.402,
"grad_norm": 0.188642680644989,
"learning_rate": 4.799e-05,
"loss": 0.6491,
"step": 201
},
{
"epoch": 0.404,
"grad_norm": 0.18071314692497253,
"learning_rate": 4.7980000000000005e-05,
"loss": 0.6237,
"step": 202
},
{
"epoch": 0.406,
"grad_norm": 0.21487824618816376,
"learning_rate": 4.797e-05,
"loss": 0.7617,
"step": 203
},
{
"epoch": 0.408,
"grad_norm": 0.17277319729328156,
"learning_rate": 4.796e-05,
"loss": 0.5965,
"step": 204
},
{
"epoch": 0.41,
"grad_norm": 0.18421082198619843,
"learning_rate": 4.795e-05,
"loss": 0.679,
"step": 205
},
{
"epoch": 0.412,
"grad_norm": 0.15729452669620514,
"learning_rate": 4.794e-05,
"loss": 0.6262,
"step": 206
},
{
"epoch": 0.414,
"grad_norm": 0.2214370220899582,
"learning_rate": 4.7930000000000004e-05,
"loss": 0.6288,
"step": 207
},
{
"epoch": 0.416,
"grad_norm": 0.17312027513980865,
"learning_rate": 4.792e-05,
"loss": 0.6782,
"step": 208
},
{
"epoch": 0.418,
"grad_norm": 0.21297283470630646,
"learning_rate": 4.791000000000001e-05,
"loss": 0.7311,
"step": 209
},
{
"epoch": 0.42,
"grad_norm": 0.20006674528121948,
"learning_rate": 4.79e-05,
"loss": 0.7033,
"step": 210
},
{
"epoch": 0.422,
"grad_norm": 0.19188763201236725,
"learning_rate": 4.7890000000000004e-05,
"loss": 0.6777,
"step": 211
},
{
"epoch": 0.424,
"grad_norm": 0.17428816854953766,
"learning_rate": 4.788e-05,
"loss": 0.6487,
"step": 212
},
{
"epoch": 0.426,
"grad_norm": 0.20363588631153107,
"learning_rate": 4.787e-05,
"loss": 0.8352,
"step": 213
},
{
"epoch": 0.428,
"grad_norm": 0.171103835105896,
"learning_rate": 4.7860000000000006e-05,
"loss": 0.5961,
"step": 214
},
{
"epoch": 0.43,
"grad_norm": 0.17176367342472076,
"learning_rate": 4.785e-05,
"loss": 0.6979,
"step": 215
},
{
"epoch": 0.432,
"grad_norm": 0.17316070199012756,
"learning_rate": 4.784e-05,
"loss": 0.592,
"step": 216
},
{
"epoch": 0.434,
"grad_norm": 0.17615540325641632,
"learning_rate": 4.783e-05,
"loss": 0.7628,
"step": 217
},
{
"epoch": 0.436,
"grad_norm": 0.1740606278181076,
"learning_rate": 4.7820000000000006e-05,
"loss": 0.6561,
"step": 218
},
{
"epoch": 0.438,
"grad_norm": 0.1783115118741989,
"learning_rate": 4.7810000000000005e-05,
"loss": 0.6712,
"step": 219
},
{
"epoch": 0.44,
"grad_norm": 0.1763063669204712,
"learning_rate": 4.78e-05,
"loss": 0.7533,
"step": 220
},
{
"epoch": 0.442,
"grad_norm": 0.19785965979099274,
"learning_rate": 4.779e-05,
"loss": 0.7371,
"step": 221
},
{
"epoch": 0.444,
"grad_norm": 0.2079770267009735,
"learning_rate": 4.778e-05,
"loss": 0.7435,
"step": 222
},
{
"epoch": 0.446,
"grad_norm": 0.18670807778835297,
"learning_rate": 4.7770000000000005e-05,
"loss": 0.6834,
"step": 223
},
{
"epoch": 0.448,
"grad_norm": 0.18333281576633453,
"learning_rate": 4.7760000000000004e-05,
"loss": 0.6122,
"step": 224
},
{
"epoch": 0.45,
"grad_norm": 0.19812564551830292,
"learning_rate": 4.775e-05,
"loss": 0.6303,
"step": 225
},
{
"epoch": 0.452,
"grad_norm": 0.20050670206546783,
"learning_rate": 4.774e-05,
"loss": 0.769,
"step": 226
},
{
"epoch": 0.454,
"grad_norm": 0.1639266014099121,
"learning_rate": 4.7730000000000005e-05,
"loss": 0.6178,
"step": 227
},
{
"epoch": 0.456,
"grad_norm": 0.17916633188724518,
"learning_rate": 4.7720000000000004e-05,
"loss": 0.6541,
"step": 228
},
{
"epoch": 0.458,
"grad_norm": 0.22116564214229584,
"learning_rate": 4.771e-05,
"loss": 0.7461,
"step": 229
},
{
"epoch": 0.46,
"grad_norm": 0.176322802901268,
"learning_rate": 4.77e-05,
"loss": 0.6412,
"step": 230
},
{
"epoch": 0.462,
"grad_norm": 0.19817325472831726,
"learning_rate": 4.769e-05,
"loss": 0.6134,
"step": 231
},
{
"epoch": 0.464,
"grad_norm": 0.1950073093175888,
"learning_rate": 4.7680000000000004e-05,
"loss": 0.6869,
"step": 232
},
{
"epoch": 0.466,
"grad_norm": 0.1868954300880432,
"learning_rate": 4.767e-05,
"loss": 0.6438,
"step": 233
},
{
"epoch": 0.468,
"grad_norm": 0.17018316686153412,
"learning_rate": 4.766000000000001e-05,
"loss": 0.6171,
"step": 234
},
{
"epoch": 0.47,
"grad_norm": 0.2001843899488449,
"learning_rate": 4.765e-05,
"loss": 0.6735,
"step": 235
},
{
"epoch": 0.472,
"grad_norm": 0.17621289193630219,
"learning_rate": 4.7640000000000005e-05,
"loss": 0.5407,
"step": 236
},
{
"epoch": 0.474,
"grad_norm": 0.19587376713752747,
"learning_rate": 4.763e-05,
"loss": 0.7755,
"step": 237
},
{
"epoch": 0.476,
"grad_norm": 0.1757344752550125,
"learning_rate": 4.762e-05,
"loss": 0.6121,
"step": 238
},
{
"epoch": 0.478,
"grad_norm": 0.19122175872325897,
"learning_rate": 4.761000000000001e-05,
"loss": 0.7537,
"step": 239
},
{
"epoch": 0.48,
"grad_norm": 0.1687469482421875,
"learning_rate": 4.76e-05,
"loss": 0.6598,
"step": 240
},
{
"epoch": 0.482,
"grad_norm": 0.16485217213630676,
"learning_rate": 4.7590000000000003e-05,
"loss": 0.545,
"step": 241
},
{
"epoch": 0.484,
"grad_norm": 0.1940855085849762,
"learning_rate": 4.758e-05,
"loss": 0.7865,
"step": 242
},
{
"epoch": 0.486,
"grad_norm": 0.2028513252735138,
"learning_rate": 4.757e-05,
"loss": 0.6879,
"step": 243
},
{
"epoch": 0.488,
"grad_norm": 0.15615610778331757,
"learning_rate": 4.7560000000000005e-05,
"loss": 0.559,
"step": 244
},
{
"epoch": 0.49,
"grad_norm": 0.18467123806476593,
"learning_rate": 4.755e-05,
"loss": 0.7475,
"step": 245
},
{
"epoch": 0.492,
"grad_norm": 0.1865721493959427,
"learning_rate": 4.754e-05,
"loss": 0.7141,
"step": 246
},
{
"epoch": 0.494,
"grad_norm": 0.18532711267471313,
"learning_rate": 4.753e-05,
"loss": 0.7313,
"step": 247
},
{
"epoch": 0.496,
"grad_norm": 0.1919030398130417,
"learning_rate": 4.7520000000000006e-05,
"loss": 0.6593,
"step": 248
},
{
"epoch": 0.498,
"grad_norm": 0.18372733891010284,
"learning_rate": 4.7510000000000004e-05,
"loss": 0.6839,
"step": 249
},
{
"epoch": 0.5,
"grad_norm": 0.18965525925159454,
"learning_rate": 4.75e-05,
"loss": 0.7376,
"step": 250
},
{
"epoch": 0.502,
"grad_norm": 0.20535258948802948,
"learning_rate": 4.749e-05,
"loss": 0.7515,
"step": 251
},
{
"epoch": 0.504,
"grad_norm": 0.16773927211761475,
"learning_rate": 4.748e-05,
"loss": 0.5974,
"step": 252
},
{
"epoch": 0.506,
"grad_norm": 0.16834793984889984,
"learning_rate": 4.7470000000000005e-05,
"loss": 0.5599,
"step": 253
},
{
"epoch": 0.508,
"grad_norm": 0.22490134835243225,
"learning_rate": 4.746e-05,
"loss": 0.7511,
"step": 254
},
{
"epoch": 0.51,
"grad_norm": 0.1897481083869934,
"learning_rate": 4.745e-05,
"loss": 0.6008,
"step": 255
},
{
"epoch": 0.512,
"grad_norm": 0.21182553470134735,
"learning_rate": 4.744e-05,
"loss": 0.7087,
"step": 256
},
{
"epoch": 0.514,
"grad_norm": 0.2457868903875351,
"learning_rate": 4.7430000000000005e-05,
"loss": 0.622,
"step": 257
},
{
"epoch": 0.516,
"grad_norm": 0.18708348274230957,
"learning_rate": 4.742e-05,
"loss": 0.6574,
"step": 258
},
{
"epoch": 0.518,
"grad_norm": 0.20146219432353973,
"learning_rate": 4.741e-05,
"loss": 0.6787,
"step": 259
},
{
"epoch": 0.52,
"grad_norm": 0.18447726964950562,
"learning_rate": 4.74e-05,
"loss": 0.6803,
"step": 260
},
{
"epoch": 0.522,
"grad_norm": 0.18880298733711243,
"learning_rate": 4.739e-05,
"loss": 0.7279,
"step": 261
},
{
"epoch": 0.524,
"grad_norm": 0.2038155347108841,
"learning_rate": 4.7380000000000004e-05,
"loss": 0.6424,
"step": 262
},
{
"epoch": 0.526,
"grad_norm": 0.18949934840202332,
"learning_rate": 4.737e-05,
"loss": 0.6297,
"step": 263
},
{
"epoch": 0.528,
"grad_norm": 0.19548696279525757,
"learning_rate": 4.736000000000001e-05,
"loss": 0.5877,
"step": 264
},
{
"epoch": 0.53,
"grad_norm": 0.171523779630661,
"learning_rate": 4.735e-05,
"loss": 0.6716,
"step": 265
},
{
"epoch": 0.532,
"grad_norm": 0.18637031316757202,
"learning_rate": 4.7340000000000004e-05,
"loss": 0.6865,
"step": 266
},
{
"epoch": 0.534,
"grad_norm": 0.18773992359638214,
"learning_rate": 4.733e-05,
"loss": 0.6441,
"step": 267
},
{
"epoch": 0.536,
"grad_norm": 0.1926441490650177,
"learning_rate": 4.732e-05,
"loss": 0.7257,
"step": 268
},
{
"epoch": 0.538,
"grad_norm": 0.1823345571756363,
"learning_rate": 4.7310000000000006e-05,
"loss": 0.6777,
"step": 269
},
{
"epoch": 0.54,
"grad_norm": 0.19136260449886322,
"learning_rate": 4.73e-05,
"loss": 0.6748,
"step": 270
},
{
"epoch": 0.542,
"grad_norm": 0.20468977093696594,
"learning_rate": 4.729e-05,
"loss": 0.6638,
"step": 271
},
{
"epoch": 0.544,
"grad_norm": 0.18765564262866974,
"learning_rate": 4.728e-05,
"loss": 0.6411,
"step": 272
},
{
"epoch": 0.546,
"grad_norm": 0.19151361286640167,
"learning_rate": 4.7270000000000007e-05,
"loss": 0.6509,
"step": 273
},
{
"epoch": 0.548,
"grad_norm": 0.1872202306985855,
"learning_rate": 4.7260000000000005e-05,
"loss": 0.6721,
"step": 274
},
{
"epoch": 0.55,
"grad_norm": 0.1839660257101059,
"learning_rate": 4.7249999999999997e-05,
"loss": 0.5901,
"step": 275
},
{
"epoch": 0.552,
"grad_norm": 0.17300380766391754,
"learning_rate": 4.724e-05,
"loss": 0.7104,
"step": 276
},
{
"epoch": 0.554,
"grad_norm": 0.20044074952602386,
"learning_rate": 4.723e-05,
"loss": 0.755,
"step": 277
},
{
"epoch": 0.556,
"grad_norm": 0.1961713582277298,
"learning_rate": 4.7220000000000005e-05,
"loss": 0.708,
"step": 278
},
{
"epoch": 0.558,
"grad_norm": 0.18064777553081512,
"learning_rate": 4.7210000000000004e-05,
"loss": 0.6549,
"step": 279
},
{
"epoch": 0.56,
"grad_norm": 0.17640075087547302,
"learning_rate": 4.72e-05,
"loss": 0.6918,
"step": 280
},
{
"epoch": 0.562,
"grad_norm": 0.17156340181827545,
"learning_rate": 4.719e-05,
"loss": 0.5443,
"step": 281
},
{
"epoch": 0.564,
"grad_norm": 0.17006738483905792,
"learning_rate": 4.718e-05,
"loss": 0.731,
"step": 282
},
{
"epoch": 0.566,
"grad_norm": 0.20639865100383759,
"learning_rate": 4.7170000000000004e-05,
"loss": 0.6952,
"step": 283
},
{
"epoch": 0.568,
"grad_norm": 0.17628632485866547,
"learning_rate": 4.716e-05,
"loss": 0.6038,
"step": 284
},
{
"epoch": 0.57,
"grad_norm": 0.20290911197662354,
"learning_rate": 4.715e-05,
"loss": 0.6803,
"step": 285
},
{
"epoch": 0.572,
"grad_norm": 0.24859286844730377,
"learning_rate": 4.714e-05,
"loss": 0.6594,
"step": 286
},
{
"epoch": 0.574,
"grad_norm": 0.18021917343139648,
"learning_rate": 4.7130000000000004e-05,
"loss": 0.7659,
"step": 287
},
{
"epoch": 0.576,
"grad_norm": 0.20608916878700256,
"learning_rate": 4.712e-05,
"loss": 0.7284,
"step": 288
},
{
"epoch": 0.578,
"grad_norm": 0.17671485245227814,
"learning_rate": 4.711e-05,
"loss": 0.6577,
"step": 289
},
{
"epoch": 0.58,
"grad_norm": 0.1899622082710266,
"learning_rate": 4.71e-05,
"loss": 0.6012,
"step": 290
},
{
"epoch": 0.582,
"grad_norm": 0.19019369781017303,
"learning_rate": 4.709e-05,
"loss": 0.6374,
"step": 291
},
{
"epoch": 0.584,
"grad_norm": 0.19268132746219635,
"learning_rate": 4.708e-05,
"loss": 0.6448,
"step": 292
},
{
"epoch": 0.586,
"grad_norm": 0.16056692600250244,
"learning_rate": 4.707e-05,
"loss": 0.5231,
"step": 293
},
{
"epoch": 0.588,
"grad_norm": 0.18093591928482056,
"learning_rate": 4.706000000000001e-05,
"loss": 0.6451,
"step": 294
},
{
"epoch": 0.59,
"grad_norm": 0.19243648648262024,
"learning_rate": 4.705e-05,
"loss": 0.6161,
"step": 295
},
{
"epoch": 0.592,
"grad_norm": 0.17207425832748413,
"learning_rate": 4.7040000000000004e-05,
"loss": 0.5465,
"step": 296
},
{
"epoch": 0.594,
"grad_norm": 0.17885583639144897,
"learning_rate": 4.703e-05,
"loss": 0.7451,
"step": 297
},
{
"epoch": 0.596,
"grad_norm": 0.19622336328029633,
"learning_rate": 4.702e-05,
"loss": 0.6251,
"step": 298
},
{
"epoch": 0.598,
"grad_norm": 0.19896407425403595,
"learning_rate": 4.7010000000000006e-05,
"loss": 0.6579,
"step": 299
},
{
"epoch": 0.6,
"grad_norm": 0.21171756088733673,
"learning_rate": 4.7e-05,
"loss": 0.6662,
"step": 300
},
{
"epoch": 0.602,
"grad_norm": 0.18232490122318268,
"learning_rate": 4.699e-05,
"loss": 0.5088,
"step": 301
},
{
"epoch": 0.604,
"grad_norm": 0.1896355003118515,
"learning_rate": 4.698e-05,
"loss": 0.6084,
"step": 302
},
{
"epoch": 0.606,
"grad_norm": 0.1955353021621704,
"learning_rate": 4.6970000000000006e-05,
"loss": 0.6105,
"step": 303
},
{
"epoch": 0.608,
"grad_norm": 0.22242772579193115,
"learning_rate": 4.6960000000000004e-05,
"loss": 0.6991,
"step": 304
},
{
"epoch": 0.61,
"grad_norm": 0.1831687092781067,
"learning_rate": 4.695e-05,
"loss": 0.6907,
"step": 305
},
{
"epoch": 0.612,
"grad_norm": 0.1904367059469223,
"learning_rate": 4.694e-05,
"loss": 0.6295,
"step": 306
},
{
"epoch": 0.614,
"grad_norm": 0.1965903788805008,
"learning_rate": 4.693e-05,
"loss": 0.6932,
"step": 307
},
{
"epoch": 0.616,
"grad_norm": 0.2138614058494568,
"learning_rate": 4.6920000000000005e-05,
"loss": 0.7003,
"step": 308
},
{
"epoch": 0.618,
"grad_norm": 0.18636015057563782,
"learning_rate": 4.691e-05,
"loss": 0.6633,
"step": 309
},
{
"epoch": 0.62,
"grad_norm": 0.18050327897071838,
"learning_rate": 4.69e-05,
"loss": 0.6142,
"step": 310
},
{
"epoch": 0.622,
"grad_norm": 0.1982421725988388,
"learning_rate": 4.689e-05,
"loss": 0.6773,
"step": 311
},
{
"epoch": 0.624,
"grad_norm": 0.1651502251625061,
"learning_rate": 4.688e-05,
"loss": 0.565,
"step": 312
},
{
"epoch": 0.626,
"grad_norm": 0.17334266006946564,
"learning_rate": 4.6870000000000004e-05,
"loss": 0.6226,
"step": 313
},
{
"epoch": 0.628,
"grad_norm": 0.17907755076885223,
"learning_rate": 4.686e-05,
"loss": 0.679,
"step": 314
},
{
"epoch": 0.63,
"grad_norm": 0.25513723492622375,
"learning_rate": 4.685000000000001e-05,
"loss": 0.5717,
"step": 315
},
{
"epoch": 0.632,
"grad_norm": 0.18325062096118927,
"learning_rate": 4.684e-05,
"loss": 0.6351,
"step": 316
},
{
"epoch": 0.634,
"grad_norm": 0.20759661495685577,
"learning_rate": 4.6830000000000004e-05,
"loss": 0.6668,
"step": 317
},
{
"epoch": 0.636,
"grad_norm": 0.19245915114879608,
"learning_rate": 4.682e-05,
"loss": 0.6529,
"step": 318
},
{
"epoch": 0.638,
"grad_norm": 0.1865272969007492,
"learning_rate": 4.681e-05,
"loss": 0.5857,
"step": 319
},
{
"epoch": 0.64,
"grad_norm": 0.2123071849346161,
"learning_rate": 4.6800000000000006e-05,
"loss": 0.65,
"step": 320
},
{
"epoch": 0.642,
"grad_norm": 0.1932321935892105,
"learning_rate": 4.679e-05,
"loss": 0.6381,
"step": 321
},
{
"epoch": 0.644,
"grad_norm": 0.19530931115150452,
"learning_rate": 4.678e-05,
"loss": 0.7162,
"step": 322
},
{
"epoch": 0.646,
"grad_norm": 0.17521904408931732,
"learning_rate": 4.677e-05,
"loss": 0.5582,
"step": 323
},
{
"epoch": 0.648,
"grad_norm": 0.19137729704380035,
"learning_rate": 4.6760000000000006e-05,
"loss": 0.5357,
"step": 324
},
{
"epoch": 0.65,
"grad_norm": 0.19260117411613464,
"learning_rate": 4.6750000000000005e-05,
"loss": 0.6176,
"step": 325
},
{
"epoch": 0.652,
"grad_norm": 0.1776568442583084,
"learning_rate": 4.674e-05,
"loss": 0.5889,
"step": 326
},
{
"epoch": 0.654,
"grad_norm": 0.2181580364704132,
"learning_rate": 4.673e-05,
"loss": 0.557,
"step": 327
},
{
"epoch": 0.656,
"grad_norm": 0.22205041348934174,
"learning_rate": 4.672e-05,
"loss": 0.6275,
"step": 328
},
{
"epoch": 0.658,
"grad_norm": 0.18241576850414276,
"learning_rate": 4.6710000000000005e-05,
"loss": 0.5873,
"step": 329
},
{
"epoch": 0.66,
"grad_norm": 0.17380136251449585,
"learning_rate": 4.6700000000000003e-05,
"loss": 0.5892,
"step": 330
},
{
"epoch": 0.662,
"grad_norm": 0.16387759149074554,
"learning_rate": 4.669e-05,
"loss": 0.5868,
"step": 331
},
{
"epoch": 0.664,
"grad_norm": 0.18557021021842957,
"learning_rate": 4.668e-05,
"loss": 0.5483,
"step": 332
},
{
"epoch": 0.666,
"grad_norm": 0.19413799047470093,
"learning_rate": 4.6670000000000005e-05,
"loss": 0.6387,
"step": 333
},
{
"epoch": 0.668,
"grad_norm": 0.17195573449134827,
"learning_rate": 4.6660000000000004e-05,
"loss": 0.607,
"step": 334
},
{
"epoch": 0.67,
"grad_norm": 0.17779302597045898,
"learning_rate": 4.665e-05,
"loss": 0.5974,
"step": 335
},
{
"epoch": 0.672,
"grad_norm": 0.18037612736225128,
"learning_rate": 4.664e-05,
"loss": 0.5918,
"step": 336
},
{
"epoch": 0.674,
"grad_norm": 0.1866716593503952,
"learning_rate": 4.663e-05,
"loss": 0.6466,
"step": 337
},
{
"epoch": 0.676,
"grad_norm": 0.20922498404979706,
"learning_rate": 4.6620000000000004e-05,
"loss": 0.651,
"step": 338
},
{
"epoch": 0.678,
"grad_norm": 0.18638573586940765,
"learning_rate": 4.661e-05,
"loss": 0.5522,
"step": 339
},
{
"epoch": 0.68,
"grad_norm": 0.2101825624704361,
"learning_rate": 4.660000000000001e-05,
"loss": 0.6816,
"step": 340
},
{
"epoch": 0.682,
"grad_norm": 0.17060095071792603,
"learning_rate": 4.659e-05,
"loss": 0.6462,
"step": 341
},
{
"epoch": 0.684,
"grad_norm": 0.1735481470823288,
"learning_rate": 4.6580000000000005e-05,
"loss": 0.6128,
"step": 342
},
{
"epoch": 0.686,
"grad_norm": 0.1967155635356903,
"learning_rate": 4.657e-05,
"loss": 0.597,
"step": 343
},
{
"epoch": 0.688,
"grad_norm": 0.18372522294521332,
"learning_rate": 4.656e-05,
"loss": 0.6613,
"step": 344
},
{
"epoch": 0.69,
"grad_norm": 0.16918990015983582,
"learning_rate": 4.655000000000001e-05,
"loss": 0.5559,
"step": 345
},
{
"epoch": 0.692,
"grad_norm": 0.1488986760377884,
"learning_rate": 4.654e-05,
"loss": 0.5447,
"step": 346
},
{
"epoch": 0.694,
"grad_norm": 0.1725722849369049,
"learning_rate": 4.6530000000000003e-05,
"loss": 0.6245,
"step": 347
},
{
"epoch": 0.696,
"grad_norm": 0.17919033765792847,
"learning_rate": 4.652e-05,
"loss": 0.5695,
"step": 348
},
{
"epoch": 0.698,
"grad_norm": 0.1841966211795807,
"learning_rate": 4.651e-05,
"loss": 0.5604,
"step": 349
},
{
"epoch": 0.7,
"grad_norm": 0.22056150436401367,
"learning_rate": 4.6500000000000005e-05,
"loss": 0.6476,
"step": 350
},
{
"epoch": 0.702,
"grad_norm": 0.2027232050895691,
"learning_rate": 4.649e-05,
"loss": 0.5757,
"step": 351
},
{
"epoch": 0.704,
"grad_norm": 0.18255826830863953,
"learning_rate": 4.648e-05,
"loss": 0.637,
"step": 352
},
{
"epoch": 0.706,
"grad_norm": 0.19408002495765686,
"learning_rate": 4.647e-05,
"loss": 0.6453,
"step": 353
},
{
"epoch": 0.708,
"grad_norm": 0.1874883621931076,
"learning_rate": 4.6460000000000006e-05,
"loss": 0.4938,
"step": 354
},
{
"epoch": 0.71,
"grad_norm": 0.20282092690467834,
"learning_rate": 4.6450000000000004e-05,
"loss": 0.594,
"step": 355
},
{
"epoch": 0.712,
"grad_norm": 0.19063018262386322,
"learning_rate": 4.644e-05,
"loss": 0.7343,
"step": 356
},
{
"epoch": 0.714,
"grad_norm": 0.1925002485513687,
"learning_rate": 4.643e-05,
"loss": 0.6524,
"step": 357
},
{
"epoch": 0.716,
"grad_norm": 0.18297319114208221,
"learning_rate": 4.642e-05,
"loss": 0.6049,
"step": 358
},
{
"epoch": 0.718,
"grad_norm": 0.17188158631324768,
"learning_rate": 4.6410000000000005e-05,
"loss": 0.5932,
"step": 359
},
{
"epoch": 0.72,
"grad_norm": 0.23126398026943207,
"learning_rate": 4.64e-05,
"loss": 0.7189,
"step": 360
},
{
"epoch": 0.722,
"grad_norm": 0.21801422536373138,
"learning_rate": 4.639e-05,
"loss": 0.642,
"step": 361
},
{
"epoch": 0.724,
"grad_norm": 0.19002754986286163,
"learning_rate": 4.638e-05,
"loss": 0.6988,
"step": 362
},
{
"epoch": 0.726,
"grad_norm": 0.18377232551574707,
"learning_rate": 4.6370000000000005e-05,
"loss": 0.652,
"step": 363
},
{
"epoch": 0.728,
"grad_norm": 0.1825602501630783,
"learning_rate": 4.636e-05,
"loss": 0.578,
"step": 364
},
{
"epoch": 0.73,
"grad_norm": 0.17906206846237183,
"learning_rate": 4.635e-05,
"loss": 0.5577,
"step": 365
},
{
"epoch": 0.732,
"grad_norm": 0.17708086967468262,
"learning_rate": 4.634e-05,
"loss": 0.5671,
"step": 366
},
{
"epoch": 0.734,
"grad_norm": 0.2228001058101654,
"learning_rate": 4.633e-05,
"loss": 0.648,
"step": 367
},
{
"epoch": 0.736,
"grad_norm": 0.1861792653799057,
"learning_rate": 4.6320000000000004e-05,
"loss": 0.6291,
"step": 368
},
{
"epoch": 0.738,
"grad_norm": 0.19840364158153534,
"learning_rate": 4.631e-05,
"loss": 0.5629,
"step": 369
},
{
"epoch": 0.74,
"grad_norm": 0.204924076795578,
"learning_rate": 4.630000000000001e-05,
"loss": 0.7094,
"step": 370
},
{
"epoch": 0.742,
"grad_norm": 0.18317362666130066,
"learning_rate": 4.629e-05,
"loss": 0.5192,
"step": 371
},
{
"epoch": 0.744,
"grad_norm": 0.20287658274173737,
"learning_rate": 4.6280000000000004e-05,
"loss": 0.6877,
"step": 372
},
{
"epoch": 0.746,
"grad_norm": 0.206755593419075,
"learning_rate": 4.627e-05,
"loss": 0.671,
"step": 373
},
{
"epoch": 0.748,
"grad_norm": 0.21340832114219666,
"learning_rate": 4.626e-05,
"loss": 0.644,
"step": 374
},
{
"epoch": 0.75,
"grad_norm": 0.20817971229553223,
"learning_rate": 4.6250000000000006e-05,
"loss": 0.7255,
"step": 375
},
{
"epoch": 0.752,
"grad_norm": 0.17707408964633942,
"learning_rate": 4.624e-05,
"loss": 0.5997,
"step": 376
},
{
"epoch": 0.754,
"grad_norm": 0.2087557464838028,
"learning_rate": 4.623e-05,
"loss": 0.6961,
"step": 377
},
{
"epoch": 0.756,
"grad_norm": 0.1889437437057495,
"learning_rate": 4.622e-05,
"loss": 0.5635,
"step": 378
},
{
"epoch": 0.758,
"grad_norm": 0.17022623121738434,
"learning_rate": 4.6210000000000006e-05,
"loss": 0.5472,
"step": 379
},
{
"epoch": 0.76,
"grad_norm": 0.1839427947998047,
"learning_rate": 4.6200000000000005e-05,
"loss": 0.5968,
"step": 380
},
{
"epoch": 0.762,
"grad_norm": 0.19771188497543335,
"learning_rate": 4.619e-05,
"loss": 0.5682,
"step": 381
},
{
"epoch": 0.764,
"grad_norm": 0.2051680088043213,
"learning_rate": 4.618e-05,
"loss": 0.6268,
"step": 382
},
{
"epoch": 0.766,
"grad_norm": 0.2551143765449524,
"learning_rate": 4.617e-05,
"loss": 0.6234,
"step": 383
},
{
"epoch": 0.768,
"grad_norm": 0.18750117719173431,
"learning_rate": 4.6160000000000005e-05,
"loss": 0.7146,
"step": 384
},
{
"epoch": 0.77,
"grad_norm": 0.19697488844394684,
"learning_rate": 4.6150000000000004e-05,
"loss": 0.654,
"step": 385
},
{
"epoch": 0.772,
"grad_norm": 0.20981992781162262,
"learning_rate": 4.614e-05,
"loss": 0.5874,
"step": 386
},
{
"epoch": 0.774,
"grad_norm": 0.1924438178539276,
"learning_rate": 4.613e-05,
"loss": 0.6167,
"step": 387
},
{
"epoch": 0.776,
"grad_norm": 0.1714833378791809,
"learning_rate": 4.612e-05,
"loss": 0.5145,
"step": 388
},
{
"epoch": 0.778,
"grad_norm": 0.17212292551994324,
"learning_rate": 4.6110000000000004e-05,
"loss": 0.5355,
"step": 389
},
{
"epoch": 0.78,
"grad_norm": 0.20480036735534668,
"learning_rate": 4.61e-05,
"loss": 0.6193,
"step": 390
},
{
"epoch": 0.782,
"grad_norm": 0.21422697603702545,
"learning_rate": 4.609e-05,
"loss": 0.6715,
"step": 391
},
{
"epoch": 0.784,
"grad_norm": 0.2247288078069687,
"learning_rate": 4.608e-05,
"loss": 0.597,
"step": 392
},
{
"epoch": 0.786,
"grad_norm": 0.1962817907333374,
"learning_rate": 4.6070000000000004e-05,
"loss": 0.5673,
"step": 393
},
{
"epoch": 0.788,
"grad_norm": 0.21063271164894104,
"learning_rate": 4.606e-05,
"loss": 0.6286,
"step": 394
},
{
"epoch": 0.79,
"grad_norm": 0.21100153028964996,
"learning_rate": 4.605e-05,
"loss": 0.6427,
"step": 395
},
{
"epoch": 0.792,
"grad_norm": 0.1841425895690918,
"learning_rate": 4.604e-05,
"loss": 0.5852,
"step": 396
},
{
"epoch": 0.794,
"grad_norm": 0.21620410680770874,
"learning_rate": 4.603e-05,
"loss": 0.6688,
"step": 397
},
{
"epoch": 0.796,
"grad_norm": 0.2009444534778595,
"learning_rate": 4.602e-05,
"loss": 0.5766,
"step": 398
},
{
"epoch": 0.798,
"grad_norm": 0.18764597177505493,
"learning_rate": 4.601e-05,
"loss": 0.5836,
"step": 399
},
{
"epoch": 0.8,
"grad_norm": 0.1796872466802597,
"learning_rate": 4.600000000000001e-05,
"loss": 0.6051,
"step": 400
},
{
"epoch": 0.802,
"grad_norm": 0.18313618004322052,
"learning_rate": 4.599e-05,
"loss": 0.5459,
"step": 401
},
{
"epoch": 0.804,
"grad_norm": 0.1805708110332489,
"learning_rate": 4.5980000000000004e-05,
"loss": 0.5259,
"step": 402
},
{
"epoch": 0.806,
"grad_norm": 0.14790281653404236,
"learning_rate": 4.597e-05,
"loss": 0.5112,
"step": 403
},
{
"epoch": 0.808,
"grad_norm": 0.19898934662342072,
"learning_rate": 4.596e-05,
"loss": 0.6487,
"step": 404
},
{
"epoch": 0.81,
"grad_norm": 0.2173125445842743,
"learning_rate": 4.5950000000000006e-05,
"loss": 0.5429,
"step": 405
},
{
"epoch": 0.812,
"grad_norm": 0.22148528695106506,
"learning_rate": 4.594e-05,
"loss": 0.6078,
"step": 406
},
{
"epoch": 0.814,
"grad_norm": 0.1788310557603836,
"learning_rate": 4.593e-05,
"loss": 0.5902,
"step": 407
},
{
"epoch": 0.816,
"grad_norm": 0.17214561998844147,
"learning_rate": 4.592e-05,
"loss": 0.6102,
"step": 408
},
{
"epoch": 0.818,
"grad_norm": 0.20561139285564423,
"learning_rate": 4.5910000000000006e-05,
"loss": 0.6989,
"step": 409
},
{
"epoch": 0.82,
"grad_norm": 0.1942395567893982,
"learning_rate": 4.5900000000000004e-05,
"loss": 0.5499,
"step": 410
},
{
"epoch": 0.822,
"grad_norm": 0.2051408886909485,
"learning_rate": 4.589e-05,
"loss": 0.5664,
"step": 411
},
{
"epoch": 0.824,
"grad_norm": 0.18937014043331146,
"learning_rate": 4.588e-05,
"loss": 0.6156,
"step": 412
},
{
"epoch": 0.826,
"grad_norm": 0.16688166558742523,
"learning_rate": 4.587e-05,
"loss": 0.5419,
"step": 413
},
{
"epoch": 0.828,
"grad_norm": 0.15259812772274017,
"learning_rate": 4.5860000000000005e-05,
"loss": 0.5237,
"step": 414
},
{
"epoch": 0.83,
"grad_norm": 0.18324361741542816,
"learning_rate": 4.585e-05,
"loss": 0.5555,
"step": 415
},
{
"epoch": 0.832,
"grad_norm": 0.20461533963680267,
"learning_rate": 4.584e-05,
"loss": 0.5434,
"step": 416
},
{
"epoch": 0.834,
"grad_norm": 0.1844327300786972,
"learning_rate": 4.583e-05,
"loss": 0.6336,
"step": 417
},
{
"epoch": 0.836,
"grad_norm": 0.18570420145988464,
"learning_rate": 4.5820000000000005e-05,
"loss": 0.5067,
"step": 418
},
{
"epoch": 0.838,
"grad_norm": 0.21089297533035278,
"learning_rate": 4.5810000000000004e-05,
"loss": 0.5911,
"step": 419
},
{
"epoch": 0.84,
"grad_norm": 0.21014462411403656,
"learning_rate": 4.58e-05,
"loss": 0.6797,
"step": 420
},
{
"epoch": 0.842,
"grad_norm": 0.20022115111351013,
"learning_rate": 4.579e-05,
"loss": 0.6301,
"step": 421
},
{
"epoch": 0.844,
"grad_norm": 0.2022683173418045,
"learning_rate": 4.578e-05,
"loss": 0.6682,
"step": 422
},
{
"epoch": 0.846,
"grad_norm": 0.19390150904655457,
"learning_rate": 4.5770000000000004e-05,
"loss": 0.6788,
"step": 423
},
{
"epoch": 0.848,
"grad_norm": 0.18870306015014648,
"learning_rate": 4.576e-05,
"loss": 0.656,
"step": 424
},
{
"epoch": 0.85,
"grad_norm": 0.1926574409008026,
"learning_rate": 4.575e-05,
"loss": 0.6379,
"step": 425
},
{
"epoch": 0.852,
"grad_norm": 0.22433732450008392,
"learning_rate": 4.574e-05,
"loss": 0.6082,
"step": 426
},
{
"epoch": 0.854,
"grad_norm": 0.18340998888015747,
"learning_rate": 4.573e-05,
"loss": 0.5933,
"step": 427
},
{
"epoch": 0.856,
"grad_norm": 0.19337745010852814,
"learning_rate": 4.572e-05,
"loss": 0.5538,
"step": 428
},
{
"epoch": 0.858,
"grad_norm": 0.207818865776062,
"learning_rate": 4.571e-05,
"loss": 0.6633,
"step": 429
},
{
"epoch": 0.86,
"grad_norm": 0.17644749581813812,
"learning_rate": 4.5700000000000006e-05,
"loss": 0.6244,
"step": 430
},
{
"epoch": 0.862,
"grad_norm": 0.19277946650981903,
"learning_rate": 4.569e-05,
"loss": 0.6499,
"step": 431
},
{
"epoch": 0.864,
"grad_norm": 0.18463121354579926,
"learning_rate": 4.568e-05,
"loss": 0.6349,
"step": 432
},
{
"epoch": 0.866,
"grad_norm": 0.1824328601360321,
"learning_rate": 4.567e-05,
"loss": 0.5354,
"step": 433
},
{
"epoch": 0.868,
"grad_norm": 0.19813786447048187,
"learning_rate": 4.566e-05,
"loss": 0.6313,
"step": 434
},
{
"epoch": 0.87,
"grad_norm": 0.1948874592781067,
"learning_rate": 4.5650000000000005e-05,
"loss": 0.6559,
"step": 435
},
{
"epoch": 0.872,
"grad_norm": 0.21239355206489563,
"learning_rate": 4.564e-05,
"loss": 0.7408,
"step": 436
},
{
"epoch": 0.874,
"grad_norm": 0.19978439807891846,
"learning_rate": 4.563e-05,
"loss": 0.6459,
"step": 437
},
{
"epoch": 0.876,
"grad_norm": 0.19137194752693176,
"learning_rate": 4.562e-05,
"loss": 0.5706,
"step": 438
},
{
"epoch": 0.878,
"grad_norm": 0.21602730453014374,
"learning_rate": 4.5610000000000005e-05,
"loss": 0.7547,
"step": 439
},
{
"epoch": 0.88,
"grad_norm": 0.18226025998592377,
"learning_rate": 4.5600000000000004e-05,
"loss": 0.5271,
"step": 440
},
{
"epoch": 0.882,
"grad_norm": 0.18746662139892578,
"learning_rate": 4.559e-05,
"loss": 0.6545,
"step": 441
},
{
"epoch": 0.884,
"grad_norm": 0.19350747764110565,
"learning_rate": 4.558e-05,
"loss": 0.4961,
"step": 442
},
{
"epoch": 0.886,
"grad_norm": 0.1879873424768448,
"learning_rate": 4.557e-05,
"loss": 0.6219,
"step": 443
},
{
"epoch": 0.888,
"grad_norm": 0.1813156008720398,
"learning_rate": 4.5560000000000004e-05,
"loss": 0.5553,
"step": 444
},
{
"epoch": 0.89,
"grad_norm": 0.19688071310520172,
"learning_rate": 4.555e-05,
"loss": 0.5252,
"step": 445
},
{
"epoch": 0.892,
"grad_norm": 0.21453718841075897,
"learning_rate": 4.554000000000001e-05,
"loss": 0.627,
"step": 446
},
{
"epoch": 0.894,
"grad_norm": 0.22127218544483185,
"learning_rate": 4.553e-05,
"loss": 0.5629,
"step": 447
},
{
"epoch": 0.896,
"grad_norm": 0.1822357475757599,
"learning_rate": 4.5520000000000005e-05,
"loss": 0.551,
"step": 448
},
{
"epoch": 0.898,
"grad_norm": 0.1829032003879547,
"learning_rate": 4.551e-05,
"loss": 0.5749,
"step": 449
},
{
"epoch": 0.9,
"grad_norm": 0.1852453649044037,
"learning_rate": 4.55e-05,
"loss": 0.5672,
"step": 450
},
{
"epoch": 0.902,
"grad_norm": 0.20939582586288452,
"learning_rate": 4.549000000000001e-05,
"loss": 0.6513,
"step": 451
},
{
"epoch": 0.904,
"grad_norm": 0.18649578094482422,
"learning_rate": 4.548e-05,
"loss": 0.5475,
"step": 452
},
{
"epoch": 0.906,
"grad_norm": 0.17368032038211823,
"learning_rate": 4.5470000000000003e-05,
"loss": 0.6117,
"step": 453
},
{
"epoch": 0.908,
"grad_norm": 0.20324939489364624,
"learning_rate": 4.546e-05,
"loss": 0.4885,
"step": 454
},
{
"epoch": 0.91,
"grad_norm": 0.19989271461963654,
"learning_rate": 4.545000000000001e-05,
"loss": 0.6796,
"step": 455
},
{
"epoch": 0.912,
"grad_norm": 0.23006810247898102,
"learning_rate": 4.5440000000000005e-05,
"loss": 0.6818,
"step": 456
},
{
"epoch": 0.914,
"grad_norm": 0.20702069997787476,
"learning_rate": 4.543e-05,
"loss": 0.6044,
"step": 457
},
{
"epoch": 0.916,
"grad_norm": 0.20405983924865723,
"learning_rate": 4.542e-05,
"loss": 0.6659,
"step": 458
},
{
"epoch": 0.918,
"grad_norm": 0.20356738567352295,
"learning_rate": 4.541e-05,
"loss": 0.6003,
"step": 459
},
{
"epoch": 0.92,
"grad_norm": 0.20328208804130554,
"learning_rate": 4.5400000000000006e-05,
"loss": 0.649,
"step": 460
},
{
"epoch": 0.922,
"grad_norm": 0.198821559548378,
"learning_rate": 4.5390000000000004e-05,
"loss": 0.5452,
"step": 461
},
{
"epoch": 0.924,
"grad_norm": 0.17357122898101807,
"learning_rate": 4.538e-05,
"loss": 0.6672,
"step": 462
},
{
"epoch": 0.926,
"grad_norm": 0.18239179253578186,
"learning_rate": 4.537e-05,
"loss": 0.6415,
"step": 463
},
{
"epoch": 0.928,
"grad_norm": 0.21714146435260773,
"learning_rate": 4.536e-05,
"loss": 0.6111,
"step": 464
},
{
"epoch": 0.93,
"grad_norm": 0.19968821108341217,
"learning_rate": 4.5350000000000005e-05,
"loss": 0.6068,
"step": 465
},
{
"epoch": 0.932,
"grad_norm": 0.21064497530460358,
"learning_rate": 4.534e-05,
"loss": 0.5667,
"step": 466
},
{
"epoch": 0.934,
"grad_norm": 0.19035053253173828,
"learning_rate": 4.533e-05,
"loss": 0.608,
"step": 467
},
{
"epoch": 0.936,
"grad_norm": 0.1872062236070633,
"learning_rate": 4.532e-05,
"loss": 0.4219,
"step": 468
},
{
"epoch": 0.938,
"grad_norm": 0.21659088134765625,
"learning_rate": 4.5310000000000005e-05,
"loss": 0.6364,
"step": 469
},
{
"epoch": 0.94,
"grad_norm": 0.2094154953956604,
"learning_rate": 4.53e-05,
"loss": 0.609,
"step": 470
},
{
"epoch": 0.942,
"grad_norm": 0.19277390837669373,
"learning_rate": 4.529e-05,
"loss": 0.5491,
"step": 471
},
{
"epoch": 0.944,
"grad_norm": 0.1932992786169052,
"learning_rate": 4.528e-05,
"loss": 0.6397,
"step": 472
},
{
"epoch": 0.946,
"grad_norm": 0.18232814967632294,
"learning_rate": 4.527e-05,
"loss": 0.6154,
"step": 473
},
{
"epoch": 0.948,
"grad_norm": 0.21726353466510773,
"learning_rate": 4.5260000000000004e-05,
"loss": 0.5741,
"step": 474
},
{
"epoch": 0.95,
"grad_norm": 0.1735064834356308,
"learning_rate": 4.525e-05,
"loss": 0.5322,
"step": 475
},
{
"epoch": 0.952,
"grad_norm": 0.22087444365024567,
"learning_rate": 4.524000000000001e-05,
"loss": 0.6378,
"step": 476
},
{
"epoch": 0.954,
"grad_norm": 0.2162839025259018,
"learning_rate": 4.523e-05,
"loss": 0.5818,
"step": 477
},
{
"epoch": 0.956,
"grad_norm": 0.21821214258670807,
"learning_rate": 4.5220000000000004e-05,
"loss": 0.6076,
"step": 478
},
{
"epoch": 0.958,
"grad_norm": 0.1862291395664215,
"learning_rate": 4.521e-05,
"loss": 0.5263,
"step": 479
},
{
"epoch": 0.96,
"grad_norm": 0.1847664713859558,
"learning_rate": 4.52e-05,
"loss": 0.5983,
"step": 480
},
{
"epoch": 0.962,
"grad_norm": 0.18088018894195557,
"learning_rate": 4.5190000000000006e-05,
"loss": 0.4899,
"step": 481
},
{
"epoch": 0.964,
"grad_norm": 0.22544273734092712,
"learning_rate": 4.518e-05,
"loss": 0.6154,
"step": 482
},
{
"epoch": 0.966,
"grad_norm": 0.23055431246757507,
"learning_rate": 4.517e-05,
"loss": 0.6242,
"step": 483
},
{
"epoch": 0.968,
"grad_norm": 0.20244711637496948,
"learning_rate": 4.516e-05,
"loss": 0.6026,
"step": 484
},
{
"epoch": 0.97,
"grad_norm": 0.21975699067115784,
"learning_rate": 4.5150000000000006e-05,
"loss": 0.6892,
"step": 485
},
{
"epoch": 0.972,
"grad_norm": 0.18714089691638947,
"learning_rate": 4.5140000000000005e-05,
"loss": 0.5943,
"step": 486
},
{
"epoch": 0.974,
"grad_norm": 0.21303005516529083,
"learning_rate": 4.513e-05,
"loss": 0.5504,
"step": 487
},
{
"epoch": 0.976,
"grad_norm": 0.2207714468240738,
"learning_rate": 4.512e-05,
"loss": 0.6564,
"step": 488
},
{
"epoch": 0.978,
"grad_norm": 0.17340348660945892,
"learning_rate": 4.511e-05,
"loss": 0.527,
"step": 489
},
{
"epoch": 0.98,
"grad_norm": 0.20630691945552826,
"learning_rate": 4.5100000000000005e-05,
"loss": 0.64,
"step": 490
},
{
"epoch": 0.982,
"grad_norm": 0.2178223878145218,
"learning_rate": 4.5090000000000004e-05,
"loss": 0.6673,
"step": 491
},
{
"epoch": 0.984,
"grad_norm": 0.1884588748216629,
"learning_rate": 4.508e-05,
"loss": 0.5482,
"step": 492
},
{
"epoch": 0.986,
"grad_norm": 0.189361572265625,
"learning_rate": 4.507e-05,
"loss": 0.6116,
"step": 493
},
{
"epoch": 0.988,
"grad_norm": 0.20682279765605927,
"learning_rate": 4.506e-05,
"loss": 0.5871,
"step": 494
},
{
"epoch": 0.99,
"grad_norm": 0.22821040451526642,
"learning_rate": 4.5050000000000004e-05,
"loss": 0.6539,
"step": 495
},
{
"epoch": 0.992,
"grad_norm": 0.21789148449897766,
"learning_rate": 4.504e-05,
"loss": 0.6111,
"step": 496
},
{
"epoch": 0.994,
"grad_norm": 0.1959594488143921,
"learning_rate": 4.503e-05,
"loss": 0.5606,
"step": 497
},
{
"epoch": 0.996,
"grad_norm": 0.2024727165699005,
"learning_rate": 4.502e-05,
"loss": 0.5868,
"step": 498
},
{
"epoch": 0.998,
"grad_norm": 0.2144571989774704,
"learning_rate": 4.5010000000000004e-05,
"loss": 0.6423,
"step": 499
},
{
"epoch": 1.0,
"grad_norm": 0.1946929693222046,
"learning_rate": 4.5e-05,
"loss": 0.6486,
"step": 500
},
{
"epoch": 1.002,
"grad_norm": 0.22168385982513428,
"learning_rate": 4.499e-05,
"loss": 0.6445,
"step": 501
},
{
"epoch": 1.004,
"grad_norm": 0.2581266462802887,
"learning_rate": 4.498e-05,
"loss": 0.597,
"step": 502
},
{
"epoch": 1.006,
"grad_norm": 0.2223937064409256,
"learning_rate": 4.497e-05,
"loss": 0.5913,
"step": 503
},
{
"epoch": 1.008,
"grad_norm": 0.19220681488513947,
"learning_rate": 4.496e-05,
"loss": 0.5636,
"step": 504
},
{
"epoch": 1.01,
"grad_norm": 0.19889682531356812,
"learning_rate": 4.495e-05,
"loss": 0.5497,
"step": 505
},
{
"epoch": 1.012,
"grad_norm": 0.19532665610313416,
"learning_rate": 4.494000000000001e-05,
"loss": 0.6121,
"step": 506
},
{
"epoch": 1.014,
"grad_norm": 0.21716678142547607,
"learning_rate": 4.493e-05,
"loss": 0.6513,
"step": 507
},
{
"epoch": 1.016,
"grad_norm": 0.18669895827770233,
"learning_rate": 4.4920000000000004e-05,
"loss": 0.5941,
"step": 508
},
{
"epoch": 1.018,
"grad_norm": 0.18874263763427734,
"learning_rate": 4.491e-05,
"loss": 0.5748,
"step": 509
},
{
"epoch": 1.02,
"grad_norm": 0.2070489227771759,
"learning_rate": 4.49e-05,
"loss": 0.6097,
"step": 510
},
{
"epoch": 1.022,
"grad_norm": 0.21579885482788086,
"learning_rate": 4.4890000000000006e-05,
"loss": 0.5662,
"step": 511
},
{
"epoch": 1.024,
"grad_norm": 0.18448017537593842,
"learning_rate": 4.488e-05,
"loss": 0.5953,
"step": 512
},
{
"epoch": 1.026,
"grad_norm": 0.201309934258461,
"learning_rate": 4.487e-05,
"loss": 0.5791,
"step": 513
},
{
"epoch": 1.028,
"grad_norm": 0.21169063448905945,
"learning_rate": 4.486e-05,
"loss": 0.6305,
"step": 514
},
{
"epoch": 1.03,
"grad_norm": 0.18058130145072937,
"learning_rate": 4.4850000000000006e-05,
"loss": 0.5972,
"step": 515
},
{
"epoch": 1.032,
"grad_norm": 0.2007116824388504,
"learning_rate": 4.4840000000000004e-05,
"loss": 0.5788,
"step": 516
},
{
"epoch": 1.034,
"grad_norm": 0.21135172247886658,
"learning_rate": 4.483e-05,
"loss": 0.62,
"step": 517
},
{
"epoch": 1.036,
"grad_norm": 0.19215519726276398,
"learning_rate": 4.482e-05,
"loss": 0.5778,
"step": 518
},
{
"epoch": 1.038,
"grad_norm": 0.1865689903497696,
"learning_rate": 4.481e-05,
"loss": 0.6138,
"step": 519
},
{
"epoch": 1.04,
"grad_norm": 0.20218238234519958,
"learning_rate": 4.4800000000000005e-05,
"loss": 0.5906,
"step": 520
},
{
"epoch": 1.042,
"grad_norm": 0.21387755870819092,
"learning_rate": 4.479e-05,
"loss": 0.5725,
"step": 521
},
{
"epoch": 1.044,
"grad_norm": 0.19877299666404724,
"learning_rate": 4.478e-05,
"loss": 0.5005,
"step": 522
},
{
"epoch": 1.046,
"grad_norm": 0.21403655409812927,
"learning_rate": 4.477e-05,
"loss": 0.6002,
"step": 523
},
{
"epoch": 1.048,
"grad_norm": 0.17705759406089783,
"learning_rate": 4.4760000000000005e-05,
"loss": 0.6179,
"step": 524
},
{
"epoch": 1.05,
"grad_norm": 0.18957218527793884,
"learning_rate": 4.4750000000000004e-05,
"loss": 0.5991,
"step": 525
},
{
"epoch": 1.052,
"grad_norm": 0.1773412674665451,
"learning_rate": 4.474e-05,
"loss": 0.591,
"step": 526
},
{
"epoch": 1.054,
"grad_norm": 0.20133773982524872,
"learning_rate": 4.473e-05,
"loss": 0.5994,
"step": 527
},
{
"epoch": 1.056,
"grad_norm": 0.20997783541679382,
"learning_rate": 4.472e-05,
"loss": 0.5802,
"step": 528
},
{
"epoch": 1.058,
"grad_norm": 0.17510485649108887,
"learning_rate": 4.4710000000000004e-05,
"loss": 0.6102,
"step": 529
},
{
"epoch": 1.06,
"grad_norm": 0.19752104580402374,
"learning_rate": 4.47e-05,
"loss": 0.7215,
"step": 530
},
{
"epoch": 1.062,
"grad_norm": 0.2410070151090622,
"learning_rate": 4.469e-05,
"loss": 0.6564,
"step": 531
},
{
"epoch": 1.064,
"grad_norm": 0.21721455454826355,
"learning_rate": 4.468e-05,
"loss": 0.548,
"step": 532
},
{
"epoch": 1.066,
"grad_norm": 0.23381918668746948,
"learning_rate": 4.467e-05,
"loss": 0.66,
"step": 533
},
{
"epoch": 1.068,
"grad_norm": 0.1823287308216095,
"learning_rate": 4.466e-05,
"loss": 0.521,
"step": 534
},
{
"epoch": 1.07,
"grad_norm": 0.20097286999225616,
"learning_rate": 4.465e-05,
"loss": 0.5167,
"step": 535
},
{
"epoch": 1.072,
"grad_norm": 0.19682855904102325,
"learning_rate": 4.4640000000000006e-05,
"loss": 0.5618,
"step": 536
},
{
"epoch": 1.074,
"grad_norm": 0.2131110429763794,
"learning_rate": 4.463e-05,
"loss": 0.7303,
"step": 537
},
{
"epoch": 1.076,
"grad_norm": 0.2144109010696411,
"learning_rate": 4.462e-05,
"loss": 0.6177,
"step": 538
},
{
"epoch": 1.078,
"grad_norm": 0.19851645827293396,
"learning_rate": 4.461e-05,
"loss": 0.6451,
"step": 539
},
{
"epoch": 1.08,
"grad_norm": 0.19800704717636108,
"learning_rate": 4.46e-05,
"loss": 0.6469,
"step": 540
},
{
"epoch": 1.082,
"grad_norm": 0.19389107823371887,
"learning_rate": 4.4590000000000005e-05,
"loss": 0.6013,
"step": 541
},
{
"epoch": 1.084,
"grad_norm": 0.20725345611572266,
"learning_rate": 4.458e-05,
"loss": 0.5639,
"step": 542
},
{
"epoch": 1.086,
"grad_norm": 0.20398126542568207,
"learning_rate": 4.457e-05,
"loss": 0.7066,
"step": 543
},
{
"epoch": 1.088,
"grad_norm": 0.23301570117473602,
"learning_rate": 4.456e-05,
"loss": 0.683,
"step": 544
},
{
"epoch": 1.09,
"grad_norm": 0.18302753567695618,
"learning_rate": 4.4550000000000005e-05,
"loss": 0.5672,
"step": 545
},
{
"epoch": 1.092,
"grad_norm": 0.19786565005779266,
"learning_rate": 4.4540000000000004e-05,
"loss": 0.6621,
"step": 546
},
{
"epoch": 1.094,
"grad_norm": 0.19150419533252716,
"learning_rate": 4.453e-05,
"loss": 0.5648,
"step": 547
},
{
"epoch": 1.096,
"grad_norm": 0.20977294445037842,
"learning_rate": 4.452e-05,
"loss": 0.5941,
"step": 548
},
{
"epoch": 1.098,
"grad_norm": 0.19033905863761902,
"learning_rate": 4.451e-05,
"loss": 0.5862,
"step": 549
},
{
"epoch": 1.1,
"grad_norm": 0.17870080471038818,
"learning_rate": 4.4500000000000004e-05,
"loss": 0.5119,
"step": 550
},
{
"epoch": 1.102,
"grad_norm": 0.19028523564338684,
"learning_rate": 4.449e-05,
"loss": 0.6146,
"step": 551
},
{
"epoch": 1.104,
"grad_norm": 0.20481224358081818,
"learning_rate": 4.448e-05,
"loss": 0.6182,
"step": 552
},
{
"epoch": 1.106,
"grad_norm": 0.5263721942901611,
"learning_rate": 4.447e-05,
"loss": 0.6316,
"step": 553
},
{
"epoch": 1.108,
"grad_norm": 0.20338623225688934,
"learning_rate": 4.4460000000000005e-05,
"loss": 0.6326,
"step": 554
},
{
"epoch": 1.11,
"grad_norm": 0.1929793357849121,
"learning_rate": 4.445e-05,
"loss": 0.5357,
"step": 555
},
{
"epoch": 1.112,
"grad_norm": 0.2107112854719162,
"learning_rate": 4.444e-05,
"loss": 0.6165,
"step": 556
},
{
"epoch": 1.114,
"grad_norm": 0.2129804641008377,
"learning_rate": 4.443e-05,
"loss": 0.6724,
"step": 557
},
{
"epoch": 1.116,
"grad_norm": 0.20006398856639862,
"learning_rate": 4.442e-05,
"loss": 0.5899,
"step": 558
},
{
"epoch": 1.1179999999999999,
"grad_norm": 0.20823417603969574,
"learning_rate": 4.4410000000000003e-05,
"loss": 0.5449,
"step": 559
},
{
"epoch": 1.12,
"grad_norm": 0.20882166922092438,
"learning_rate": 4.44e-05,
"loss": 0.5296,
"step": 560
},
{
"epoch": 1.1219999999999999,
"grad_norm": 0.19584539532661438,
"learning_rate": 4.439000000000001e-05,
"loss": 0.5562,
"step": 561
},
{
"epoch": 1.124,
"grad_norm": 0.19048091769218445,
"learning_rate": 4.438e-05,
"loss": 0.5809,
"step": 562
},
{
"epoch": 1.126,
"grad_norm": 0.19735245406627655,
"learning_rate": 4.4370000000000004e-05,
"loss": 0.6672,
"step": 563
},
{
"epoch": 1.1280000000000001,
"grad_norm": 0.20549283921718597,
"learning_rate": 4.436e-05,
"loss": 0.6006,
"step": 564
},
{
"epoch": 1.13,
"grad_norm": 0.19877098500728607,
"learning_rate": 4.435e-05,
"loss": 0.534,
"step": 565
},
{
"epoch": 1.1320000000000001,
"grad_norm": 0.20947475731372833,
"learning_rate": 4.4340000000000006e-05,
"loss": 0.5859,
"step": 566
},
{
"epoch": 1.134,
"grad_norm": 0.18569184839725494,
"learning_rate": 4.4330000000000004e-05,
"loss": 0.5441,
"step": 567
},
{
"epoch": 1.1360000000000001,
"grad_norm": 0.1995682269334793,
"learning_rate": 4.432e-05,
"loss": 0.5323,
"step": 568
},
{
"epoch": 1.138,
"grad_norm": 0.22524245083332062,
"learning_rate": 4.431e-05,
"loss": 0.6062,
"step": 569
},
{
"epoch": 1.1400000000000001,
"grad_norm": 0.19676834344863892,
"learning_rate": 4.43e-05,
"loss": 0.5421,
"step": 570
},
{
"epoch": 1.142,
"grad_norm": 0.20001918077468872,
"learning_rate": 4.4290000000000005e-05,
"loss": 0.503,
"step": 571
},
{
"epoch": 1.144,
"grad_norm": 0.19310520589351654,
"learning_rate": 4.428e-05,
"loss": 0.5714,
"step": 572
},
{
"epoch": 1.146,
"grad_norm": 0.1684369444847107,
"learning_rate": 4.427e-05,
"loss": 0.4657,
"step": 573
},
{
"epoch": 1.148,
"grad_norm": 0.2464480698108673,
"learning_rate": 4.426e-05,
"loss": 0.6095,
"step": 574
},
{
"epoch": 1.15,
"grad_norm": 0.21196356415748596,
"learning_rate": 4.4250000000000005e-05,
"loss": 0.486,
"step": 575
},
{
"epoch": 1.152,
"grad_norm": 0.18600516021251678,
"learning_rate": 4.424e-05,
"loss": 0.588,
"step": 576
},
{
"epoch": 1.154,
"grad_norm": 0.19391140341758728,
"learning_rate": 4.423e-05,
"loss": 0.5791,
"step": 577
},
{
"epoch": 1.156,
"grad_norm": 0.20038722455501556,
"learning_rate": 4.422e-05,
"loss": 0.5251,
"step": 578
},
{
"epoch": 1.158,
"grad_norm": 0.17348361015319824,
"learning_rate": 4.421e-05,
"loss": 0.5648,
"step": 579
},
{
"epoch": 1.16,
"grad_norm": 0.2228512465953827,
"learning_rate": 4.4200000000000004e-05,
"loss": 0.5741,
"step": 580
},
{
"epoch": 1.162,
"grad_norm": 0.1962224692106247,
"learning_rate": 4.419e-05,
"loss": 0.5391,
"step": 581
},
{
"epoch": 1.164,
"grad_norm": 0.20475374162197113,
"learning_rate": 4.418000000000001e-05,
"loss": 0.6273,
"step": 582
},
{
"epoch": 1.166,
"grad_norm": 0.20558544993400574,
"learning_rate": 4.417e-05,
"loss": 0.6236,
"step": 583
},
{
"epoch": 1.168,
"grad_norm": 0.23516933619976044,
"learning_rate": 4.4160000000000004e-05,
"loss": 0.5887,
"step": 584
},
{
"epoch": 1.17,
"grad_norm": 0.20617741346359253,
"learning_rate": 4.415e-05,
"loss": 0.617,
"step": 585
},
{
"epoch": 1.172,
"grad_norm": 0.216308131814003,
"learning_rate": 4.414e-05,
"loss": 0.6356,
"step": 586
},
{
"epoch": 1.174,
"grad_norm": 0.19473914802074432,
"learning_rate": 4.4130000000000006e-05,
"loss": 0.5473,
"step": 587
},
{
"epoch": 1.176,
"grad_norm": 0.20949441194534302,
"learning_rate": 4.412e-05,
"loss": 0.5388,
"step": 588
},
{
"epoch": 1.178,
"grad_norm": 0.21411944925785065,
"learning_rate": 4.411e-05,
"loss": 0.6231,
"step": 589
},
{
"epoch": 1.18,
"grad_norm": 0.18295326828956604,
"learning_rate": 4.41e-05,
"loss": 0.5703,
"step": 590
},
{
"epoch": 1.182,
"grad_norm": 0.20706209540367126,
"learning_rate": 4.4090000000000006e-05,
"loss": 0.6102,
"step": 591
},
{
"epoch": 1.184,
"grad_norm": 0.19791294634342194,
"learning_rate": 4.4080000000000005e-05,
"loss": 0.5409,
"step": 592
},
{
"epoch": 1.186,
"grad_norm": 0.2113964557647705,
"learning_rate": 4.407e-05,
"loss": 0.5932,
"step": 593
},
{
"epoch": 1.188,
"grad_norm": 0.17157748341560364,
"learning_rate": 4.406e-05,
"loss": 0.5442,
"step": 594
},
{
"epoch": 1.19,
"grad_norm": 0.2267071008682251,
"learning_rate": 4.405e-05,
"loss": 0.6715,
"step": 595
},
{
"epoch": 1.192,
"grad_norm": 0.22745326161384583,
"learning_rate": 4.4040000000000005e-05,
"loss": 0.6379,
"step": 596
},
{
"epoch": 1.194,
"grad_norm": 0.19248466193675995,
"learning_rate": 4.4030000000000004e-05,
"loss": 0.5462,
"step": 597
},
{
"epoch": 1.196,
"grad_norm": 0.1977124959230423,
"learning_rate": 4.402e-05,
"loss": 0.5127,
"step": 598
},
{
"epoch": 1.198,
"grad_norm": 0.2224949151277542,
"learning_rate": 4.401e-05,
"loss": 0.6466,
"step": 599
},
{
"epoch": 1.2,
"grad_norm": 0.2159275859594345,
"learning_rate": 4.4000000000000006e-05,
"loss": 0.6082,
"step": 600
},
{
"epoch": 1.202,
"grad_norm": 0.20515945553779602,
"learning_rate": 4.3990000000000004e-05,
"loss": 0.5903,
"step": 601
},
{
"epoch": 1.204,
"grad_norm": 0.19639264047145844,
"learning_rate": 4.398e-05,
"loss": 0.5765,
"step": 602
},
{
"epoch": 1.206,
"grad_norm": 0.22909431159496307,
"learning_rate": 4.397e-05,
"loss": 0.6025,
"step": 603
},
{
"epoch": 1.208,
"grad_norm": 0.2259034514427185,
"learning_rate": 4.396e-05,
"loss": 0.6427,
"step": 604
},
{
"epoch": 1.21,
"grad_norm": 0.19979901611804962,
"learning_rate": 4.3950000000000004e-05,
"loss": 0.5271,
"step": 605
},
{
"epoch": 1.212,
"grad_norm": 0.21502630412578583,
"learning_rate": 4.394e-05,
"loss": 0.5954,
"step": 606
},
{
"epoch": 1.214,
"grad_norm": 0.20371314883232117,
"learning_rate": 4.393e-05,
"loss": 0.5828,
"step": 607
},
{
"epoch": 1.216,
"grad_norm": 0.21107852458953857,
"learning_rate": 4.392e-05,
"loss": 0.6029,
"step": 608
},
{
"epoch": 1.218,
"grad_norm": 0.17909643054008484,
"learning_rate": 4.391e-05,
"loss": 0.6587,
"step": 609
},
{
"epoch": 1.22,
"grad_norm": 0.21194545924663544,
"learning_rate": 4.39e-05,
"loss": 0.5606,
"step": 610
},
{
"epoch": 1.222,
"grad_norm": 0.18832319974899292,
"learning_rate": 4.389e-05,
"loss": 0.5733,
"step": 611
},
{
"epoch": 1.224,
"grad_norm": 0.21508416533470154,
"learning_rate": 4.388000000000001e-05,
"loss": 0.5837,
"step": 612
},
{
"epoch": 1.226,
"grad_norm": 0.21378885209560394,
"learning_rate": 4.387e-05,
"loss": 0.5558,
"step": 613
},
{
"epoch": 1.228,
"grad_norm": 0.24820692837238312,
"learning_rate": 4.3860000000000004e-05,
"loss": 0.6971,
"step": 614
},
{
"epoch": 1.23,
"grad_norm": 0.1849009394645691,
"learning_rate": 4.385e-05,
"loss": 0.5712,
"step": 615
},
{
"epoch": 1.232,
"grad_norm": 0.23946769535541534,
"learning_rate": 4.384e-05,
"loss": 0.6747,
"step": 616
},
{
"epoch": 1.234,
"grad_norm": 0.220892533659935,
"learning_rate": 4.3830000000000006e-05,
"loss": 0.6101,
"step": 617
},
{
"epoch": 1.236,
"grad_norm": 0.18981412053108215,
"learning_rate": 4.382e-05,
"loss": 0.4892,
"step": 618
},
{
"epoch": 1.238,
"grad_norm": 0.23685996234416962,
"learning_rate": 4.381e-05,
"loss": 0.5582,
"step": 619
},
{
"epoch": 1.24,
"grad_norm": 0.19691988825798035,
"learning_rate": 4.38e-05,
"loss": 0.5823,
"step": 620
},
{
"epoch": 1.242,
"grad_norm": 0.23621264100074768,
"learning_rate": 4.3790000000000006e-05,
"loss": 0.6687,
"step": 621
},
{
"epoch": 1.244,
"grad_norm": 0.2227286845445633,
"learning_rate": 4.3780000000000004e-05,
"loss": 0.6677,
"step": 622
},
{
"epoch": 1.246,
"grad_norm": 0.20094943046569824,
"learning_rate": 4.377e-05,
"loss": 0.5783,
"step": 623
},
{
"epoch": 1.248,
"grad_norm": 0.2083892971277237,
"learning_rate": 4.376e-05,
"loss": 0.559,
"step": 624
},
{
"epoch": 1.25,
"grad_norm": 0.21730823814868927,
"learning_rate": 4.375e-05,
"loss": 0.542,
"step": 625
},
{
"epoch": 1.252,
"grad_norm": 0.21850605309009552,
"learning_rate": 4.3740000000000005e-05,
"loss": 0.6211,
"step": 626
},
{
"epoch": 1.254,
"grad_norm": 0.20119699835777283,
"learning_rate": 4.373e-05,
"loss": 0.5272,
"step": 627
},
{
"epoch": 1.256,
"grad_norm": 0.23064668476581573,
"learning_rate": 4.372e-05,
"loss": 0.6462,
"step": 628
},
{
"epoch": 1.258,
"grad_norm": 0.21558353304862976,
"learning_rate": 4.371e-05,
"loss": 0.5547,
"step": 629
},
{
"epoch": 1.26,
"grad_norm": 0.25317391753196716,
"learning_rate": 4.3700000000000005e-05,
"loss": 0.6799,
"step": 630
},
{
"epoch": 1.262,
"grad_norm": 0.1989932358264923,
"learning_rate": 4.3690000000000004e-05,
"loss": 0.5812,
"step": 631
},
{
"epoch": 1.264,
"grad_norm": 0.21172182261943817,
"learning_rate": 4.368e-05,
"loss": 0.62,
"step": 632
},
{
"epoch": 1.266,
"grad_norm": 0.21126477420330048,
"learning_rate": 4.367e-05,
"loss": 0.5217,
"step": 633
},
{
"epoch": 1.268,
"grad_norm": 0.20479610562324524,
"learning_rate": 4.366e-05,
"loss": 0.6741,
"step": 634
},
{
"epoch": 1.27,
"grad_norm": 0.2281307876110077,
"learning_rate": 4.3650000000000004e-05,
"loss": 0.5584,
"step": 635
},
{
"epoch": 1.272,
"grad_norm": 0.20038704574108124,
"learning_rate": 4.364e-05,
"loss": 0.5471,
"step": 636
},
{
"epoch": 1.274,
"grad_norm": 0.21315504610538483,
"learning_rate": 4.363000000000001e-05,
"loss": 0.5367,
"step": 637
},
{
"epoch": 1.276,
"grad_norm": 0.20510873198509216,
"learning_rate": 4.362e-05,
"loss": 0.6416,
"step": 638
},
{
"epoch": 1.278,
"grad_norm": 0.18110395967960358,
"learning_rate": 4.361e-05,
"loss": 0.6165,
"step": 639
},
{
"epoch": 1.28,
"grad_norm": 0.19763672351837158,
"learning_rate": 4.36e-05,
"loss": 0.548,
"step": 640
},
{
"epoch": 1.282,
"grad_norm": 0.22199787199497223,
"learning_rate": 4.359e-05,
"loss": 0.5948,
"step": 641
},
{
"epoch": 1.284,
"grad_norm": 0.21400614082813263,
"learning_rate": 4.3580000000000006e-05,
"loss": 0.476,
"step": 642
},
{
"epoch": 1.286,
"grad_norm": 0.1921880692243576,
"learning_rate": 4.357e-05,
"loss": 0.5716,
"step": 643
},
{
"epoch": 1.288,
"grad_norm": 0.1940234899520874,
"learning_rate": 4.356e-05,
"loss": 0.621,
"step": 644
},
{
"epoch": 1.29,
"grad_norm": 0.22476941347122192,
"learning_rate": 4.355e-05,
"loss": 0.586,
"step": 645
},
{
"epoch": 1.292,
"grad_norm": 0.199696347117424,
"learning_rate": 4.354e-05,
"loss": 0.6177,
"step": 646
},
{
"epoch": 1.294,
"grad_norm": 0.19897620379924774,
"learning_rate": 4.3530000000000005e-05,
"loss": 0.5981,
"step": 647
},
{
"epoch": 1.296,
"grad_norm": 0.18877290189266205,
"learning_rate": 4.352e-05,
"loss": 0.5264,
"step": 648
},
{
"epoch": 1.298,
"grad_norm": 0.1996156871318817,
"learning_rate": 4.351e-05,
"loss": 0.5537,
"step": 649
},
{
"epoch": 1.3,
"grad_norm": 0.19752252101898193,
"learning_rate": 4.35e-05,
"loss": 0.5518,
"step": 650
},
{
"epoch": 1.302,
"grad_norm": 0.19172196090221405,
"learning_rate": 4.3490000000000005e-05,
"loss": 0.517,
"step": 651
},
{
"epoch": 1.304,
"grad_norm": 0.1916598230600357,
"learning_rate": 4.3480000000000004e-05,
"loss": 0.6194,
"step": 652
},
{
"epoch": 1.306,
"grad_norm": 0.2249755561351776,
"learning_rate": 4.347e-05,
"loss": 0.5648,
"step": 653
},
{
"epoch": 1.308,
"grad_norm": 0.21257826685905457,
"learning_rate": 4.346e-05,
"loss": 0.5851,
"step": 654
},
{
"epoch": 1.31,
"grad_norm": 0.22375406324863434,
"learning_rate": 4.345e-05,
"loss": 0.563,
"step": 655
},
{
"epoch": 1.312,
"grad_norm": 0.2560722827911377,
"learning_rate": 4.3440000000000004e-05,
"loss": 0.7102,
"step": 656
},
{
"epoch": 1.314,
"grad_norm": 0.2261979579925537,
"learning_rate": 4.343e-05,
"loss": 0.6269,
"step": 657
},
{
"epoch": 1.316,
"grad_norm": 0.22072796523571014,
"learning_rate": 4.342e-05,
"loss": 0.585,
"step": 658
},
{
"epoch": 1.318,
"grad_norm": 0.2084806263446808,
"learning_rate": 4.341e-05,
"loss": 0.5933,
"step": 659
},
{
"epoch": 1.32,
"grad_norm": 0.1984473168849945,
"learning_rate": 4.3400000000000005e-05,
"loss": 0.5641,
"step": 660
},
{
"epoch": 1.322,
"grad_norm": 0.20633773505687714,
"learning_rate": 4.339e-05,
"loss": 0.6262,
"step": 661
},
{
"epoch": 1.324,
"grad_norm": 0.1869378536939621,
"learning_rate": 4.338e-05,
"loss": 0.4613,
"step": 662
},
{
"epoch": 1.326,
"grad_norm": 0.19371822476387024,
"learning_rate": 4.337e-05,
"loss": 0.6016,
"step": 663
},
{
"epoch": 1.328,
"grad_norm": 0.22572529315948486,
"learning_rate": 4.336e-05,
"loss": 0.6891,
"step": 664
},
{
"epoch": 1.33,
"grad_norm": 0.19980907440185547,
"learning_rate": 4.335e-05,
"loss": 0.5174,
"step": 665
},
{
"epoch": 1.332,
"grad_norm": 0.1863400638103485,
"learning_rate": 4.334e-05,
"loss": 0.5886,
"step": 666
},
{
"epoch": 1.334,
"grad_norm": 0.2504555583000183,
"learning_rate": 4.333000000000001e-05,
"loss": 0.5608,
"step": 667
},
{
"epoch": 1.336,
"grad_norm": 0.23737183213233948,
"learning_rate": 4.332e-05,
"loss": 0.6351,
"step": 668
},
{
"epoch": 1.338,
"grad_norm": 0.24397732317447662,
"learning_rate": 4.3310000000000004e-05,
"loss": 0.6332,
"step": 669
},
{
"epoch": 1.34,
"grad_norm": 0.186910942196846,
"learning_rate": 4.33e-05,
"loss": 0.4916,
"step": 670
},
{
"epoch": 1.342,
"grad_norm": 0.2204650193452835,
"learning_rate": 4.329e-05,
"loss": 0.6193,
"step": 671
},
{
"epoch": 1.3439999999999999,
"grad_norm": 0.20329062640666962,
"learning_rate": 4.3280000000000006e-05,
"loss": 0.6311,
"step": 672
},
{
"epoch": 1.346,
"grad_norm": 0.17689010500907898,
"learning_rate": 4.327e-05,
"loss": 0.5129,
"step": 673
},
{
"epoch": 1.3479999999999999,
"grad_norm": 0.19227999448776245,
"learning_rate": 4.326e-05,
"loss": 0.585,
"step": 674
},
{
"epoch": 1.35,
"grad_norm": 0.2173619568347931,
"learning_rate": 4.325e-05,
"loss": 0.5513,
"step": 675
},
{
"epoch": 1.3519999999999999,
"grad_norm": 0.21630556881427765,
"learning_rate": 4.324e-05,
"loss": 0.612,
"step": 676
},
{
"epoch": 1.354,
"grad_norm": 0.23232296109199524,
"learning_rate": 4.3230000000000005e-05,
"loss": 0.6401,
"step": 677
},
{
"epoch": 1.3559999999999999,
"grad_norm": 0.2330378144979477,
"learning_rate": 4.3219999999999996e-05,
"loss": 0.7614,
"step": 678
},
{
"epoch": 1.358,
"grad_norm": 0.1954951286315918,
"learning_rate": 4.321e-05,
"loss": 0.5531,
"step": 679
},
{
"epoch": 1.3599999999999999,
"grad_norm": 0.1955898106098175,
"learning_rate": 4.32e-05,
"loss": 0.4924,
"step": 680
},
{
"epoch": 1.362,
"grad_norm": 0.2058050036430359,
"learning_rate": 4.3190000000000005e-05,
"loss": 0.5021,
"step": 681
},
{
"epoch": 1.3639999999999999,
"grad_norm": 0.263340562582016,
"learning_rate": 4.318e-05,
"loss": 0.567,
"step": 682
},
{
"epoch": 1.366,
"grad_norm": 0.20733140408992767,
"learning_rate": 4.317e-05,
"loss": 0.6279,
"step": 683
},
{
"epoch": 1.3679999999999999,
"grad_norm": 0.22644178569316864,
"learning_rate": 4.316e-05,
"loss": 0.5664,
"step": 684
},
{
"epoch": 1.37,
"grad_norm": 0.27613282203674316,
"learning_rate": 4.315e-05,
"loss": 0.5829,
"step": 685
},
{
"epoch": 1.3719999999999999,
"grad_norm": 0.19206145405769348,
"learning_rate": 4.3140000000000004e-05,
"loss": 0.5515,
"step": 686
},
{
"epoch": 1.374,
"grad_norm": 0.25372427701950073,
"learning_rate": 4.313e-05,
"loss": 0.6937,
"step": 687
},
{
"epoch": 1.376,
"grad_norm": 0.22427460551261902,
"learning_rate": 4.312000000000001e-05,
"loss": 0.6961,
"step": 688
},
{
"epoch": 1.3780000000000001,
"grad_norm": 0.22864195704460144,
"learning_rate": 4.311e-05,
"loss": 0.7201,
"step": 689
},
{
"epoch": 1.38,
"grad_norm": 0.15411259233951569,
"learning_rate": 4.3100000000000004e-05,
"loss": 0.3898,
"step": 690
},
{
"epoch": 1.3820000000000001,
"grad_norm": 0.21335478127002716,
"learning_rate": 4.309e-05,
"loss": 0.6347,
"step": 691
},
{
"epoch": 1.384,
"grad_norm": 0.1923125833272934,
"learning_rate": 4.308e-05,
"loss": 0.6299,
"step": 692
},
{
"epoch": 1.3860000000000001,
"grad_norm": 0.20960748195648193,
"learning_rate": 4.3070000000000006e-05,
"loss": 0.606,
"step": 693
},
{
"epoch": 1.388,
"grad_norm": 0.20630641281604767,
"learning_rate": 4.306e-05,
"loss": 0.5883,
"step": 694
},
{
"epoch": 1.3900000000000001,
"grad_norm": 0.1649722456932068,
"learning_rate": 4.305e-05,
"loss": 0.4467,
"step": 695
},
{
"epoch": 1.392,
"grad_norm": 0.21387261152267456,
"learning_rate": 4.304e-05,
"loss": 0.6852,
"step": 696
},
{
"epoch": 1.3940000000000001,
"grad_norm": 0.22639335691928864,
"learning_rate": 4.3030000000000006e-05,
"loss": 0.542,
"step": 697
},
{
"epoch": 1.396,
"grad_norm": 0.21238407492637634,
"learning_rate": 4.3020000000000005e-05,
"loss": 0.6011,
"step": 698
},
{
"epoch": 1.3980000000000001,
"grad_norm": 0.2178695648908615,
"learning_rate": 4.301e-05,
"loss": 0.5566,
"step": 699
},
{
"epoch": 1.4,
"grad_norm": 0.1901402771472931,
"learning_rate": 4.3e-05,
"loss": 0.5757,
"step": 700
},
{
"epoch": 1.4020000000000001,
"grad_norm": 0.2045552283525467,
"learning_rate": 4.299e-05,
"loss": 0.5678,
"step": 701
},
{
"epoch": 1.404,
"grad_norm": 0.19475097954273224,
"learning_rate": 4.2980000000000005e-05,
"loss": 0.588,
"step": 702
},
{
"epoch": 1.4060000000000001,
"grad_norm": 0.2065313756465912,
"learning_rate": 4.2970000000000004e-05,
"loss": 0.6748,
"step": 703
},
{
"epoch": 1.408,
"grad_norm": 0.2813938558101654,
"learning_rate": 4.296e-05,
"loss": 0.601,
"step": 704
},
{
"epoch": 1.41,
"grad_norm": 0.23802025616168976,
"learning_rate": 4.295e-05,
"loss": 0.7323,
"step": 705
},
{
"epoch": 1.412,
"grad_norm": 0.23389388620853424,
"learning_rate": 4.2940000000000006e-05,
"loss": 0.5336,
"step": 706
},
{
"epoch": 1.414,
"grad_norm": 0.21070720255374908,
"learning_rate": 4.2930000000000004e-05,
"loss": 0.6171,
"step": 707
},
{
"epoch": 1.416,
"grad_norm": 0.21459278464317322,
"learning_rate": 4.292e-05,
"loss": 0.5903,
"step": 708
},
{
"epoch": 1.418,
"grad_norm": 0.21212553977966309,
"learning_rate": 4.291e-05,
"loss": 0.6458,
"step": 709
},
{
"epoch": 1.42,
"grad_norm": 0.212654247879982,
"learning_rate": 4.29e-05,
"loss": 0.5955,
"step": 710
},
{
"epoch": 1.422,
"grad_norm": 0.20657028257846832,
"learning_rate": 4.2890000000000004e-05,
"loss": 0.5186,
"step": 711
},
{
"epoch": 1.424,
"grad_norm": 0.2148103266954422,
"learning_rate": 4.288e-05,
"loss": 0.635,
"step": 712
},
{
"epoch": 1.426,
"grad_norm": 0.21794739365577698,
"learning_rate": 4.287000000000001e-05,
"loss": 0.5263,
"step": 713
},
{
"epoch": 1.428,
"grad_norm": 0.22709821164608002,
"learning_rate": 4.286e-05,
"loss": 0.5933,
"step": 714
},
{
"epoch": 1.43,
"grad_norm": 0.1945171058177948,
"learning_rate": 4.285e-05,
"loss": 0.5093,
"step": 715
},
{
"epoch": 1.432,
"grad_norm": 0.2270834892988205,
"learning_rate": 4.284e-05,
"loss": 0.5716,
"step": 716
},
{
"epoch": 1.434,
"grad_norm": 0.21322421729564667,
"learning_rate": 4.283e-05,
"loss": 0.6758,
"step": 717
},
{
"epoch": 1.436,
"grad_norm": 0.22180742025375366,
"learning_rate": 4.282000000000001e-05,
"loss": 0.6433,
"step": 718
},
{
"epoch": 1.438,
"grad_norm": 0.1959298551082611,
"learning_rate": 4.281e-05,
"loss": 0.6108,
"step": 719
},
{
"epoch": 1.44,
"grad_norm": 0.21371756494045258,
"learning_rate": 4.2800000000000004e-05,
"loss": 0.6228,
"step": 720
},
{
"epoch": 1.442,
"grad_norm": 0.2120112180709839,
"learning_rate": 4.279e-05,
"loss": 0.6034,
"step": 721
},
{
"epoch": 1.444,
"grad_norm": 0.20808736979961395,
"learning_rate": 4.278e-05,
"loss": 0.6471,
"step": 722
},
{
"epoch": 1.446,
"grad_norm": 0.20965223014354706,
"learning_rate": 4.2770000000000006e-05,
"loss": 0.5389,
"step": 723
},
{
"epoch": 1.448,
"grad_norm": 0.21139630675315857,
"learning_rate": 4.276e-05,
"loss": 0.5244,
"step": 724
},
{
"epoch": 1.45,
"grad_norm": 0.19360531866550446,
"learning_rate": 4.275e-05,
"loss": 0.5875,
"step": 725
},
{
"epoch": 1.452,
"grad_norm": 0.21325168013572693,
"learning_rate": 4.274e-05,
"loss": 0.5826,
"step": 726
},
{
"epoch": 1.454,
"grad_norm": 0.2077636867761612,
"learning_rate": 4.2730000000000006e-05,
"loss": 0.6809,
"step": 727
},
{
"epoch": 1.456,
"grad_norm": 0.20011232793331146,
"learning_rate": 4.2720000000000004e-05,
"loss": 0.4634,
"step": 728
},
{
"epoch": 1.458,
"grad_norm": 0.20724835991859436,
"learning_rate": 4.271e-05,
"loss": 0.5878,
"step": 729
},
{
"epoch": 1.46,
"grad_norm": 0.20728984475135803,
"learning_rate": 4.27e-05,
"loss": 0.4541,
"step": 730
},
{
"epoch": 1.462,
"grad_norm": 0.23507185280323029,
"learning_rate": 4.269e-05,
"loss": 0.6409,
"step": 731
},
{
"epoch": 1.464,
"grad_norm": 0.23297451436519623,
"learning_rate": 4.2680000000000005e-05,
"loss": 0.6633,
"step": 732
},
{
"epoch": 1.466,
"grad_norm": 0.2090078741312027,
"learning_rate": 4.267e-05,
"loss": 0.6347,
"step": 733
},
{
"epoch": 1.468,
"grad_norm": 0.20409215986728668,
"learning_rate": 4.266e-05,
"loss": 0.5472,
"step": 734
},
{
"epoch": 1.47,
"grad_norm": 0.21352814137935638,
"learning_rate": 4.265e-05,
"loss": 0.6449,
"step": 735
},
{
"epoch": 1.472,
"grad_norm": 0.20980428159236908,
"learning_rate": 4.2640000000000005e-05,
"loss": 0.6371,
"step": 736
},
{
"epoch": 1.474,
"grad_norm": 0.1911533921957016,
"learning_rate": 4.2630000000000004e-05,
"loss": 0.5392,
"step": 737
},
{
"epoch": 1.476,
"grad_norm": 0.21656915545463562,
"learning_rate": 4.262e-05,
"loss": 0.627,
"step": 738
},
{
"epoch": 1.478,
"grad_norm": 0.19826734066009521,
"learning_rate": 4.261e-05,
"loss": 0.6034,
"step": 739
},
{
"epoch": 1.48,
"grad_norm": 0.20627282559871674,
"learning_rate": 4.26e-05,
"loss": 0.5495,
"step": 740
},
{
"epoch": 1.482,
"grad_norm": 0.24123527109622955,
"learning_rate": 4.2590000000000004e-05,
"loss": 0.699,
"step": 741
},
{
"epoch": 1.484,
"grad_norm": 0.20298898220062256,
"learning_rate": 4.258e-05,
"loss": 0.5217,
"step": 742
},
{
"epoch": 1.486,
"grad_norm": 0.218084916472435,
"learning_rate": 4.257000000000001e-05,
"loss": 0.603,
"step": 743
},
{
"epoch": 1.488,
"grad_norm": 0.22439178824424744,
"learning_rate": 4.256e-05,
"loss": 0.5046,
"step": 744
},
{
"epoch": 1.49,
"grad_norm": 0.20961345732212067,
"learning_rate": 4.2550000000000004e-05,
"loss": 0.5791,
"step": 745
},
{
"epoch": 1.492,
"grad_norm": 0.21735183894634247,
"learning_rate": 4.254e-05,
"loss": 0.5744,
"step": 746
},
{
"epoch": 1.494,
"grad_norm": 0.2141823023557663,
"learning_rate": 4.253e-05,
"loss": 0.6365,
"step": 747
},
{
"epoch": 1.496,
"grad_norm": 0.19389428198337555,
"learning_rate": 4.2520000000000006e-05,
"loss": 0.5767,
"step": 748
},
{
"epoch": 1.498,
"grad_norm": 0.24830563366413116,
"learning_rate": 4.251e-05,
"loss": 0.6411,
"step": 749
},
{
"epoch": 1.5,
"grad_norm": 0.24806180596351624,
"learning_rate": 4.25e-05,
"loss": 0.641,
"step": 750
},
{
"epoch": 1.502,
"grad_norm": 0.19207115471363068,
"learning_rate": 4.249e-05,
"loss": 0.4554,
"step": 751
},
{
"epoch": 1.504,
"grad_norm": 0.21739742159843445,
"learning_rate": 4.248e-05,
"loss": 0.5802,
"step": 752
},
{
"epoch": 1.506,
"grad_norm": 0.20979043841362,
"learning_rate": 4.2470000000000005e-05,
"loss": 0.5499,
"step": 753
},
{
"epoch": 1.508,
"grad_norm": 0.18834327161312103,
"learning_rate": 4.246e-05,
"loss": 0.558,
"step": 754
},
{
"epoch": 1.51,
"grad_norm": 0.1966605931520462,
"learning_rate": 4.245e-05,
"loss": 0.5573,
"step": 755
},
{
"epoch": 1.512,
"grad_norm": 0.2214249223470688,
"learning_rate": 4.244e-05,
"loss": 0.5933,
"step": 756
},
{
"epoch": 1.514,
"grad_norm": 0.20649488270282745,
"learning_rate": 4.2430000000000005e-05,
"loss": 0.486,
"step": 757
},
{
"epoch": 1.516,
"grad_norm": 0.23040921986103058,
"learning_rate": 4.2420000000000004e-05,
"loss": 0.5451,
"step": 758
},
{
"epoch": 1.518,
"grad_norm": 0.19081412255764008,
"learning_rate": 4.241e-05,
"loss": 0.6122,
"step": 759
},
{
"epoch": 1.52,
"grad_norm": 0.20429189503192902,
"learning_rate": 4.24e-05,
"loss": 0.4951,
"step": 760
},
{
"epoch": 1.522,
"grad_norm": 0.2081070989370346,
"learning_rate": 4.239e-05,
"loss": 0.6102,
"step": 761
},
{
"epoch": 1.524,
"grad_norm": 0.19600601494312286,
"learning_rate": 4.2380000000000004e-05,
"loss": 0.5609,
"step": 762
},
{
"epoch": 1.526,
"grad_norm": 0.22128801047801971,
"learning_rate": 4.237e-05,
"loss": 0.514,
"step": 763
},
{
"epoch": 1.528,
"grad_norm": 0.21633316576480865,
"learning_rate": 4.236e-05,
"loss": 0.5348,
"step": 764
},
{
"epoch": 1.53,
"grad_norm": 0.23707932233810425,
"learning_rate": 4.235e-05,
"loss": 0.5494,
"step": 765
},
{
"epoch": 1.532,
"grad_norm": 0.2444704920053482,
"learning_rate": 4.2340000000000005e-05,
"loss": 0.5086,
"step": 766
},
{
"epoch": 1.534,
"grad_norm": 0.2355782687664032,
"learning_rate": 4.233e-05,
"loss": 0.6334,
"step": 767
},
{
"epoch": 1.536,
"grad_norm": 0.20617908239364624,
"learning_rate": 4.232e-05,
"loss": 0.5296,
"step": 768
},
{
"epoch": 1.538,
"grad_norm": 0.18543358147144318,
"learning_rate": 4.231e-05,
"loss": 0.4944,
"step": 769
},
{
"epoch": 1.54,
"grad_norm": 0.24593642354011536,
"learning_rate": 4.23e-05,
"loss": 0.6141,
"step": 770
},
{
"epoch": 1.542,
"grad_norm": 0.21925920248031616,
"learning_rate": 4.229e-05,
"loss": 0.5704,
"step": 771
},
{
"epoch": 1.544,
"grad_norm": 0.23085656762123108,
"learning_rate": 4.228e-05,
"loss": 0.5748,
"step": 772
},
{
"epoch": 1.546,
"grad_norm": 0.2114836573600769,
"learning_rate": 4.227000000000001e-05,
"loss": 0.5582,
"step": 773
},
{
"epoch": 1.548,
"grad_norm": 0.2578122019767761,
"learning_rate": 4.226e-05,
"loss": 0.5869,
"step": 774
},
{
"epoch": 1.55,
"grad_norm": 0.21830229461193085,
"learning_rate": 4.2250000000000004e-05,
"loss": 0.5825,
"step": 775
},
{
"epoch": 1.552,
"grad_norm": 0.2558196783065796,
"learning_rate": 4.224e-05,
"loss": 0.6324,
"step": 776
},
{
"epoch": 1.554,
"grad_norm": 0.19135409593582153,
"learning_rate": 4.223e-05,
"loss": 0.5364,
"step": 777
},
{
"epoch": 1.556,
"grad_norm": 0.22118104994297028,
"learning_rate": 4.2220000000000006e-05,
"loss": 0.5579,
"step": 778
},
{
"epoch": 1.558,
"grad_norm": 0.2239237129688263,
"learning_rate": 4.221e-05,
"loss": 0.5025,
"step": 779
},
{
"epoch": 1.56,
"grad_norm": 0.1993967741727829,
"learning_rate": 4.22e-05,
"loss": 0.5327,
"step": 780
},
{
"epoch": 1.562,
"grad_norm": 0.20883196592330933,
"learning_rate": 4.219e-05,
"loss": 0.6594,
"step": 781
},
{
"epoch": 1.564,
"grad_norm": 0.20599497854709625,
"learning_rate": 4.2180000000000006e-05,
"loss": 0.591,
"step": 782
},
{
"epoch": 1.5659999999999998,
"grad_norm": 0.2093878537416458,
"learning_rate": 4.2170000000000005e-05,
"loss": 0.4834,
"step": 783
},
{
"epoch": 1.568,
"grad_norm": 0.24116116762161255,
"learning_rate": 4.2159999999999996e-05,
"loss": 0.5273,
"step": 784
},
{
"epoch": 1.5699999999999998,
"grad_norm": 0.2020028680562973,
"learning_rate": 4.215e-05,
"loss": 0.526,
"step": 785
},
{
"epoch": 1.572,
"grad_norm": 0.20167556405067444,
"learning_rate": 4.214e-05,
"loss": 0.6276,
"step": 786
},
{
"epoch": 1.5739999999999998,
"grad_norm": 0.2037021368741989,
"learning_rate": 4.2130000000000005e-05,
"loss": 0.5864,
"step": 787
},
{
"epoch": 1.576,
"grad_norm": 0.22905926406383514,
"learning_rate": 4.212e-05,
"loss": 0.5766,
"step": 788
},
{
"epoch": 1.5779999999999998,
"grad_norm": 0.21906892955303192,
"learning_rate": 4.211e-05,
"loss": 0.5296,
"step": 789
},
{
"epoch": 1.58,
"grad_norm": 0.2502303421497345,
"learning_rate": 4.21e-05,
"loss": 0.5491,
"step": 790
},
{
"epoch": 1.5819999999999999,
"grad_norm": 0.2179962396621704,
"learning_rate": 4.209e-05,
"loss": 0.6227,
"step": 791
},
{
"epoch": 1.584,
"grad_norm": 0.2146979421377182,
"learning_rate": 4.2080000000000004e-05,
"loss": 0.5965,
"step": 792
},
{
"epoch": 1.5859999999999999,
"grad_norm": 0.2131640762090683,
"learning_rate": 4.207e-05,
"loss": 0.615,
"step": 793
},
{
"epoch": 1.588,
"grad_norm": 0.22561247646808624,
"learning_rate": 4.206e-05,
"loss": 0.4323,
"step": 794
},
{
"epoch": 1.5899999999999999,
"grad_norm": 0.22139142453670502,
"learning_rate": 4.205e-05,
"loss": 0.6603,
"step": 795
},
{
"epoch": 1.592,
"grad_norm": 0.20566140115261078,
"learning_rate": 4.2040000000000004e-05,
"loss": 0.5087,
"step": 796
},
{
"epoch": 1.5939999999999999,
"grad_norm": 0.24742665886878967,
"learning_rate": 4.203e-05,
"loss": 0.5964,
"step": 797
},
{
"epoch": 1.596,
"grad_norm": 0.20380236208438873,
"learning_rate": 4.202e-05,
"loss": 0.5487,
"step": 798
},
{
"epoch": 1.5979999999999999,
"grad_norm": 0.20481078326702118,
"learning_rate": 4.201e-05,
"loss": 0.5881,
"step": 799
},
{
"epoch": 1.6,
"grad_norm": 0.25701841711997986,
"learning_rate": 4.2e-05,
"loss": 0.6234,
"step": 800
},
{
"epoch": 1.6019999999999999,
"grad_norm": 0.1961895078420639,
"learning_rate": 4.199e-05,
"loss": 0.5342,
"step": 801
},
{
"epoch": 1.604,
"grad_norm": 0.203657329082489,
"learning_rate": 4.198e-05,
"loss": 0.6276,
"step": 802
},
{
"epoch": 1.6059999999999999,
"grad_norm": 0.21167868375778198,
"learning_rate": 4.1970000000000006e-05,
"loss": 0.5694,
"step": 803
},
{
"epoch": 1.608,
"grad_norm": 0.2066878229379654,
"learning_rate": 4.196e-05,
"loss": 0.4994,
"step": 804
},
{
"epoch": 1.6099999999999999,
"grad_norm": 0.21661029756069183,
"learning_rate": 4.195e-05,
"loss": 0.5586,
"step": 805
},
{
"epoch": 1.612,
"grad_norm": 0.1986166387796402,
"learning_rate": 4.194e-05,
"loss": 0.4946,
"step": 806
},
{
"epoch": 1.6139999999999999,
"grad_norm": 0.2023763805627823,
"learning_rate": 4.193e-05,
"loss": 0.4492,
"step": 807
},
{
"epoch": 1.616,
"grad_norm": 0.22971948981285095,
"learning_rate": 4.1920000000000005e-05,
"loss": 0.5701,
"step": 808
},
{
"epoch": 1.6179999999999999,
"grad_norm": 0.2328362613916397,
"learning_rate": 4.191e-05,
"loss": 0.5548,
"step": 809
},
{
"epoch": 1.62,
"grad_norm": 0.1914706826210022,
"learning_rate": 4.19e-05,
"loss": 0.5186,
"step": 810
},
{
"epoch": 1.6219999999999999,
"grad_norm": 0.20274528861045837,
"learning_rate": 4.189e-05,
"loss": 0.4636,
"step": 811
},
{
"epoch": 1.624,
"grad_norm": 0.19216802716255188,
"learning_rate": 4.1880000000000006e-05,
"loss": 0.5209,
"step": 812
},
{
"epoch": 1.626,
"grad_norm": 0.2118818759918213,
"learning_rate": 4.1870000000000004e-05,
"loss": 0.5066,
"step": 813
},
{
"epoch": 1.6280000000000001,
"grad_norm": 0.24689842760562897,
"learning_rate": 4.186e-05,
"loss": 0.6412,
"step": 814
},
{
"epoch": 1.63,
"grad_norm": 0.2002333402633667,
"learning_rate": 4.185e-05,
"loss": 0.6256,
"step": 815
},
{
"epoch": 1.6320000000000001,
"grad_norm": 0.2199128419160843,
"learning_rate": 4.184e-05,
"loss": 0.6407,
"step": 816
},
{
"epoch": 1.634,
"grad_norm": 0.21204575896263123,
"learning_rate": 4.1830000000000004e-05,
"loss": 0.5561,
"step": 817
},
{
"epoch": 1.6360000000000001,
"grad_norm": 0.19997568428516388,
"learning_rate": 4.182e-05,
"loss": 0.5599,
"step": 818
},
{
"epoch": 1.638,
"grad_norm": 0.22140006721019745,
"learning_rate": 4.181000000000001e-05,
"loss": 0.5745,
"step": 819
},
{
"epoch": 1.6400000000000001,
"grad_norm": 0.22329610586166382,
"learning_rate": 4.18e-05,
"loss": 0.5873,
"step": 820
},
{
"epoch": 1.642,
"grad_norm": 0.19843119382858276,
"learning_rate": 4.179e-05,
"loss": 0.5501,
"step": 821
},
{
"epoch": 1.6440000000000001,
"grad_norm": 0.2290477603673935,
"learning_rate": 4.178e-05,
"loss": 0.5568,
"step": 822
},
{
"epoch": 1.646,
"grad_norm": 0.24019767343997955,
"learning_rate": 4.177e-05,
"loss": 0.6133,
"step": 823
},
{
"epoch": 1.6480000000000001,
"grad_norm": 0.19814185798168182,
"learning_rate": 4.176000000000001e-05,
"loss": 0.5102,
"step": 824
},
{
"epoch": 1.65,
"grad_norm": 0.2241322547197342,
"learning_rate": 4.175e-05,
"loss": 0.5887,
"step": 825
},
{
"epoch": 1.6520000000000001,
"grad_norm": 0.19099442660808563,
"learning_rate": 4.1740000000000004e-05,
"loss": 0.5056,
"step": 826
},
{
"epoch": 1.654,
"grad_norm": 0.20805147290229797,
"learning_rate": 4.173e-05,
"loss": 0.472,
"step": 827
},
{
"epoch": 1.6560000000000001,
"grad_norm": 0.2212725132703781,
"learning_rate": 4.172e-05,
"loss": 0.6282,
"step": 828
},
{
"epoch": 1.658,
"grad_norm": 0.37296777963638306,
"learning_rate": 4.1710000000000006e-05,
"loss": 0.5325,
"step": 829
},
{
"epoch": 1.6600000000000001,
"grad_norm": 0.20890404284000397,
"learning_rate": 4.17e-05,
"loss": 0.4846,
"step": 830
},
{
"epoch": 1.662,
"grad_norm": 0.21045196056365967,
"learning_rate": 4.169e-05,
"loss": 0.4938,
"step": 831
},
{
"epoch": 1.6640000000000001,
"grad_norm": 0.21068882942199707,
"learning_rate": 4.168e-05,
"loss": 0.5429,
"step": 832
},
{
"epoch": 1.666,
"grad_norm": 0.24714180827140808,
"learning_rate": 4.1670000000000006e-05,
"loss": 0.6145,
"step": 833
},
{
"epoch": 1.6680000000000001,
"grad_norm": 0.22348974645137787,
"learning_rate": 4.1660000000000004e-05,
"loss": 0.5867,
"step": 834
},
{
"epoch": 1.67,
"grad_norm": 0.2049664705991745,
"learning_rate": 4.165e-05,
"loss": 0.5407,
"step": 835
},
{
"epoch": 1.6720000000000002,
"grad_norm": 0.2059626579284668,
"learning_rate": 4.164e-05,
"loss": 0.5682,
"step": 836
},
{
"epoch": 1.674,
"grad_norm": 0.26519888639450073,
"learning_rate": 4.163e-05,
"loss": 0.5899,
"step": 837
},
{
"epoch": 1.6760000000000002,
"grad_norm": 0.2156396508216858,
"learning_rate": 4.1620000000000005e-05,
"loss": 0.5087,
"step": 838
},
{
"epoch": 1.678,
"grad_norm": 0.20123973488807678,
"learning_rate": 4.161e-05,
"loss": 0.5533,
"step": 839
},
{
"epoch": 1.6800000000000002,
"grad_norm": 0.22913528978824615,
"learning_rate": 4.16e-05,
"loss": 0.5258,
"step": 840
},
{
"epoch": 1.682,
"grad_norm": 0.2171671986579895,
"learning_rate": 4.159e-05,
"loss": 0.543,
"step": 841
},
{
"epoch": 1.6840000000000002,
"grad_norm": 0.21813921630382538,
"learning_rate": 4.1580000000000005e-05,
"loss": 0.5416,
"step": 842
},
{
"epoch": 1.686,
"grad_norm": 0.22544947266578674,
"learning_rate": 4.1570000000000003e-05,
"loss": 0.6301,
"step": 843
},
{
"epoch": 1.688,
"grad_norm": 0.21432709693908691,
"learning_rate": 4.156e-05,
"loss": 0.5973,
"step": 844
},
{
"epoch": 1.69,
"grad_norm": 0.20817779004573822,
"learning_rate": 4.155e-05,
"loss": 0.5521,
"step": 845
},
{
"epoch": 1.692,
"grad_norm": 0.22415509819984436,
"learning_rate": 4.154e-05,
"loss": 0.5536,
"step": 846
},
{
"epoch": 1.694,
"grad_norm": 0.21055614948272705,
"learning_rate": 4.1530000000000004e-05,
"loss": 0.6203,
"step": 847
},
{
"epoch": 1.696,
"grad_norm": 0.23882174491882324,
"learning_rate": 4.152e-05,
"loss": 0.5853,
"step": 848
},
{
"epoch": 1.698,
"grad_norm": 0.20354397594928741,
"learning_rate": 4.151000000000001e-05,
"loss": 0.5458,
"step": 849
},
{
"epoch": 1.7,
"grad_norm": 0.22530478239059448,
"learning_rate": 4.15e-05,
"loss": 0.7185,
"step": 850
},
{
"epoch": 1.702,
"grad_norm": 0.25041744112968445,
"learning_rate": 4.1490000000000004e-05,
"loss": 0.5678,
"step": 851
},
{
"epoch": 1.704,
"grad_norm": 0.2135017216205597,
"learning_rate": 4.148e-05,
"loss": 0.6186,
"step": 852
},
{
"epoch": 1.706,
"grad_norm": 0.23338288068771362,
"learning_rate": 4.147e-05,
"loss": 0.6402,
"step": 853
},
{
"epoch": 1.708,
"grad_norm": 0.21697267889976501,
"learning_rate": 4.1460000000000006e-05,
"loss": 0.6018,
"step": 854
},
{
"epoch": 1.71,
"grad_norm": 0.2361728698015213,
"learning_rate": 4.145e-05,
"loss": 0.5507,
"step": 855
},
{
"epoch": 1.712,
"grad_norm": 0.23075933754444122,
"learning_rate": 4.144e-05,
"loss": 0.6301,
"step": 856
},
{
"epoch": 1.714,
"grad_norm": 0.2399604618549347,
"learning_rate": 4.143e-05,
"loss": 0.5621,
"step": 857
},
{
"epoch": 1.716,
"grad_norm": 0.22412839531898499,
"learning_rate": 4.142000000000001e-05,
"loss": 0.5519,
"step": 858
},
{
"epoch": 1.718,
"grad_norm": 0.22783443331718445,
"learning_rate": 4.1410000000000005e-05,
"loss": 0.5019,
"step": 859
},
{
"epoch": 1.72,
"grad_norm": 0.2074798047542572,
"learning_rate": 4.14e-05,
"loss": 0.5808,
"step": 860
},
{
"epoch": 1.722,
"grad_norm": 0.21460920572280884,
"learning_rate": 4.139e-05,
"loss": 0.496,
"step": 861
},
{
"epoch": 1.724,
"grad_norm": 0.2521255612373352,
"learning_rate": 4.138e-05,
"loss": 0.6452,
"step": 862
},
{
"epoch": 1.726,
"grad_norm": 0.21282966434955597,
"learning_rate": 4.1370000000000005e-05,
"loss": 0.547,
"step": 863
},
{
"epoch": 1.728,
"grad_norm": 0.1929510086774826,
"learning_rate": 4.1360000000000004e-05,
"loss": 0.5167,
"step": 864
},
{
"epoch": 1.73,
"grad_norm": 0.21537943184375763,
"learning_rate": 4.135e-05,
"loss": 0.5226,
"step": 865
},
{
"epoch": 1.732,
"grad_norm": 0.2190752625465393,
"learning_rate": 4.134e-05,
"loss": 0.6551,
"step": 866
},
{
"epoch": 1.734,
"grad_norm": 0.23043037950992584,
"learning_rate": 4.133e-05,
"loss": 0.5818,
"step": 867
},
{
"epoch": 1.736,
"grad_norm": 0.1956707090139389,
"learning_rate": 4.1320000000000004e-05,
"loss": 0.5653,
"step": 868
},
{
"epoch": 1.738,
"grad_norm": 0.20191261172294617,
"learning_rate": 4.131e-05,
"loss": 0.5275,
"step": 869
},
{
"epoch": 1.74,
"grad_norm": 0.217775359749794,
"learning_rate": 4.13e-05,
"loss": 0.5777,
"step": 870
},
{
"epoch": 1.742,
"grad_norm": 0.2121749073266983,
"learning_rate": 4.129e-05,
"loss": 0.5817,
"step": 871
},
{
"epoch": 1.744,
"grad_norm": 0.23671622574329376,
"learning_rate": 4.1280000000000005e-05,
"loss": 0.5791,
"step": 872
},
{
"epoch": 1.746,
"grad_norm": 0.21338392794132233,
"learning_rate": 4.127e-05,
"loss": 0.5255,
"step": 873
},
{
"epoch": 1.748,
"grad_norm": 0.2075783610343933,
"learning_rate": 4.126e-05,
"loss": 0.5348,
"step": 874
},
{
"epoch": 1.75,
"grad_norm": 0.21902282536029816,
"learning_rate": 4.125e-05,
"loss": 0.5297,
"step": 875
},
{
"epoch": 1.752,
"grad_norm": 0.22228483855724335,
"learning_rate": 4.124e-05,
"loss": 0.6283,
"step": 876
},
{
"epoch": 1.754,
"grad_norm": 0.20441770553588867,
"learning_rate": 4.123e-05,
"loss": 0.604,
"step": 877
},
{
"epoch": 1.756,
"grad_norm": 0.20509788393974304,
"learning_rate": 4.122e-05,
"loss": 0.5171,
"step": 878
},
{
"epoch": 1.758,
"grad_norm": 0.19927029311656952,
"learning_rate": 4.121000000000001e-05,
"loss": 0.5345,
"step": 879
},
{
"epoch": 1.76,
"grad_norm": 0.22879639267921448,
"learning_rate": 4.12e-05,
"loss": 0.7277,
"step": 880
},
{
"epoch": 1.762,
"grad_norm": 0.19165241718292236,
"learning_rate": 4.1190000000000004e-05,
"loss": 0.5839,
"step": 881
},
{
"epoch": 1.764,
"grad_norm": 0.22788327932357788,
"learning_rate": 4.118e-05,
"loss": 0.5701,
"step": 882
},
{
"epoch": 1.766,
"grad_norm": 0.24636496603488922,
"learning_rate": 4.117e-05,
"loss": 0.5136,
"step": 883
},
{
"epoch": 1.768,
"grad_norm": 0.19353416562080383,
"learning_rate": 4.1160000000000006e-05,
"loss": 0.5419,
"step": 884
},
{
"epoch": 1.77,
"grad_norm": 0.25121909379959106,
"learning_rate": 4.115e-05,
"loss": 0.5708,
"step": 885
},
{
"epoch": 1.772,
"grad_norm": 0.19388718903064728,
"learning_rate": 4.114e-05,
"loss": 0.5358,
"step": 886
},
{
"epoch": 1.774,
"grad_norm": 0.2159653753042221,
"learning_rate": 4.113e-05,
"loss": 0.5699,
"step": 887
},
{
"epoch": 1.776,
"grad_norm": 0.22405487298965454,
"learning_rate": 4.1120000000000006e-05,
"loss": 0.5936,
"step": 888
},
{
"epoch": 1.778,
"grad_norm": 0.23130780458450317,
"learning_rate": 4.1110000000000005e-05,
"loss": 0.5559,
"step": 889
},
{
"epoch": 1.78,
"grad_norm": 0.2627403438091278,
"learning_rate": 4.11e-05,
"loss": 0.718,
"step": 890
},
{
"epoch": 1.782,
"grad_norm": 0.2019093930721283,
"learning_rate": 4.109e-05,
"loss": 0.5455,
"step": 891
},
{
"epoch": 1.784,
"grad_norm": 0.2051168829202652,
"learning_rate": 4.108e-05,
"loss": 0.5509,
"step": 892
},
{
"epoch": 1.786,
"grad_norm": 0.2552720308303833,
"learning_rate": 4.1070000000000005e-05,
"loss": 0.6097,
"step": 893
},
{
"epoch": 1.788,
"grad_norm": 0.21365538239479065,
"learning_rate": 4.106e-05,
"loss": 0.6256,
"step": 894
},
{
"epoch": 1.79,
"grad_norm": 0.22518736124038696,
"learning_rate": 4.105e-05,
"loss": 0.5823,
"step": 895
},
{
"epoch": 1.792,
"grad_norm": 0.22332648932933807,
"learning_rate": 4.104e-05,
"loss": 0.5949,
"step": 896
},
{
"epoch": 1.794,
"grad_norm": 0.20917260646820068,
"learning_rate": 4.103e-05,
"loss": 0.5124,
"step": 897
},
{
"epoch": 1.796,
"grad_norm": 0.20652370154857635,
"learning_rate": 4.1020000000000004e-05,
"loss": 0.5732,
"step": 898
},
{
"epoch": 1.798,
"grad_norm": 0.22270749509334564,
"learning_rate": 4.101e-05,
"loss": 0.5921,
"step": 899
},
{
"epoch": 1.8,
"grad_norm": 0.25644269585609436,
"learning_rate": 4.1e-05,
"loss": 0.5824,
"step": 900
},
{
"epoch": 1.802,
"grad_norm": 0.2324872463941574,
"learning_rate": 4.099e-05,
"loss": 0.6288,
"step": 901
},
{
"epoch": 1.804,
"grad_norm": 0.2203238308429718,
"learning_rate": 4.0980000000000004e-05,
"loss": 0.6001,
"step": 902
},
{
"epoch": 1.806,
"grad_norm": 0.1977611482143402,
"learning_rate": 4.097e-05,
"loss": 0.5367,
"step": 903
},
{
"epoch": 1.808,
"grad_norm": 0.20962348580360413,
"learning_rate": 4.096e-05,
"loss": 0.5974,
"step": 904
},
{
"epoch": 1.81,
"grad_norm": 0.20759950578212738,
"learning_rate": 4.095e-05,
"loss": 0.5846,
"step": 905
},
{
"epoch": 1.812,
"grad_norm": 0.20946946740150452,
"learning_rate": 4.094e-05,
"loss": 0.5357,
"step": 906
},
{
"epoch": 1.814,
"grad_norm": 0.2401723712682724,
"learning_rate": 4.093e-05,
"loss": 0.6251,
"step": 907
},
{
"epoch": 1.8159999999999998,
"grad_norm": 0.183771014213562,
"learning_rate": 4.092e-05,
"loss": 0.4638,
"step": 908
},
{
"epoch": 1.818,
"grad_norm": 0.22831161320209503,
"learning_rate": 4.0910000000000006e-05,
"loss": 0.5606,
"step": 909
},
{
"epoch": 1.8199999999999998,
"grad_norm": 0.2133151739835739,
"learning_rate": 4.09e-05,
"loss": 0.6701,
"step": 910
},
{
"epoch": 1.822,
"grad_norm": 0.2002599835395813,
"learning_rate": 4.089e-05,
"loss": 0.5428,
"step": 911
},
{
"epoch": 1.8239999999999998,
"grad_norm": 0.21106943488121033,
"learning_rate": 4.088e-05,
"loss": 0.5172,
"step": 912
},
{
"epoch": 1.826,
"grad_norm": 0.22433418035507202,
"learning_rate": 4.087e-05,
"loss": 0.4971,
"step": 913
},
{
"epoch": 1.8279999999999998,
"grad_norm": 0.29436904191970825,
"learning_rate": 4.0860000000000005e-05,
"loss": 0.4824,
"step": 914
},
{
"epoch": 1.83,
"grad_norm": 0.2517542243003845,
"learning_rate": 4.085e-05,
"loss": 0.6387,
"step": 915
},
{
"epoch": 1.8319999999999999,
"grad_norm": 0.20705367624759674,
"learning_rate": 4.084e-05,
"loss": 0.546,
"step": 916
},
{
"epoch": 1.834,
"grad_norm": 0.21476560831069946,
"learning_rate": 4.083e-05,
"loss": 0.5628,
"step": 917
},
{
"epoch": 1.8359999999999999,
"grad_norm": 0.2123742401599884,
"learning_rate": 4.0820000000000006e-05,
"loss": 0.5286,
"step": 918
},
{
"epoch": 1.838,
"grad_norm": 0.20832893252372742,
"learning_rate": 4.0810000000000004e-05,
"loss": 0.5871,
"step": 919
},
{
"epoch": 1.8399999999999999,
"grad_norm": 0.1990961730480194,
"learning_rate": 4.08e-05,
"loss": 0.5139,
"step": 920
},
{
"epoch": 1.842,
"grad_norm": 0.2340562492609024,
"learning_rate": 4.079e-05,
"loss": 0.6197,
"step": 921
},
{
"epoch": 1.8439999999999999,
"grad_norm": 0.18560324609279633,
"learning_rate": 4.078e-05,
"loss": 0.398,
"step": 922
},
{
"epoch": 1.846,
"grad_norm": 0.2281452864408493,
"learning_rate": 4.0770000000000004e-05,
"loss": 0.6096,
"step": 923
},
{
"epoch": 1.8479999999999999,
"grad_norm": 0.22291038930416107,
"learning_rate": 4.076e-05,
"loss": 0.5313,
"step": 924
},
{
"epoch": 1.85,
"grad_norm": 0.20964789390563965,
"learning_rate": 4.075e-05,
"loss": 0.5965,
"step": 925
},
{
"epoch": 1.8519999999999999,
"grad_norm": 0.24047353863716125,
"learning_rate": 4.074e-05,
"loss": 0.6198,
"step": 926
},
{
"epoch": 1.854,
"grad_norm": 0.23723970353603363,
"learning_rate": 4.0730000000000005e-05,
"loss": 0.6799,
"step": 927
},
{
"epoch": 1.8559999999999999,
"grad_norm": 0.21580907702445984,
"learning_rate": 4.072e-05,
"loss": 0.5481,
"step": 928
},
{
"epoch": 1.858,
"grad_norm": 0.2145693600177765,
"learning_rate": 4.071e-05,
"loss": 0.552,
"step": 929
},
{
"epoch": 1.8599999999999999,
"grad_norm": 0.2449941337108612,
"learning_rate": 4.07e-05,
"loss": 0.5838,
"step": 930
},
{
"epoch": 1.862,
"grad_norm": 0.2502371370792389,
"learning_rate": 4.069e-05,
"loss": 0.5497,
"step": 931
},
{
"epoch": 1.8639999999999999,
"grad_norm": 0.18988710641860962,
"learning_rate": 4.0680000000000004e-05,
"loss": 0.5292,
"step": 932
},
{
"epoch": 1.866,
"grad_norm": 0.23330973088741302,
"learning_rate": 4.067e-05,
"loss": 0.5252,
"step": 933
},
{
"epoch": 1.8679999999999999,
"grad_norm": 0.2083967626094818,
"learning_rate": 4.066e-05,
"loss": 0.5631,
"step": 934
},
{
"epoch": 1.87,
"grad_norm": 0.2037411481142044,
"learning_rate": 4.065e-05,
"loss": 0.5596,
"step": 935
},
{
"epoch": 1.8719999999999999,
"grad_norm": 0.22660300135612488,
"learning_rate": 4.064e-05,
"loss": 0.587,
"step": 936
},
{
"epoch": 1.874,
"grad_norm": 0.24138827621936798,
"learning_rate": 4.063e-05,
"loss": 0.511,
"step": 937
},
{
"epoch": 1.876,
"grad_norm": 0.23298485577106476,
"learning_rate": 4.062e-05,
"loss": 0.5614,
"step": 938
},
{
"epoch": 1.8780000000000001,
"grad_norm": 0.2537238299846649,
"learning_rate": 4.0610000000000006e-05,
"loss": 0.5249,
"step": 939
},
{
"epoch": 1.88,
"grad_norm": 0.23439259827136993,
"learning_rate": 4.0600000000000004e-05,
"loss": 0.5895,
"step": 940
},
{
"epoch": 1.8820000000000001,
"grad_norm": 0.2297605574131012,
"learning_rate": 4.059e-05,
"loss": 0.5568,
"step": 941
},
{
"epoch": 1.884,
"grad_norm": 0.22932069003582,
"learning_rate": 4.058e-05,
"loss": 0.6007,
"step": 942
},
{
"epoch": 1.8860000000000001,
"grad_norm": 0.19363686442375183,
"learning_rate": 4.057e-05,
"loss": 0.526,
"step": 943
},
{
"epoch": 1.888,
"grad_norm": 0.22038032114505768,
"learning_rate": 4.0560000000000005e-05,
"loss": 0.6154,
"step": 944
},
{
"epoch": 1.8900000000000001,
"grad_norm": 0.20162424445152283,
"learning_rate": 4.055e-05,
"loss": 0.5284,
"step": 945
},
{
"epoch": 1.892,
"grad_norm": 0.24655213952064514,
"learning_rate": 4.054e-05,
"loss": 0.5562,
"step": 946
},
{
"epoch": 1.8940000000000001,
"grad_norm": 0.23588095605373383,
"learning_rate": 4.053e-05,
"loss": 0.7056,
"step": 947
},
{
"epoch": 1.896,
"grad_norm": 0.21690240502357483,
"learning_rate": 4.0520000000000005e-05,
"loss": 0.5517,
"step": 948
},
{
"epoch": 1.8980000000000001,
"grad_norm": 0.19915015995502472,
"learning_rate": 4.0510000000000003e-05,
"loss": 0.5044,
"step": 949
},
{
"epoch": 1.9,
"grad_norm": 0.22316230833530426,
"learning_rate": 4.05e-05,
"loss": 0.557,
"step": 950
},
{
"epoch": 1.9020000000000001,
"grad_norm": 0.23766140639781952,
"learning_rate": 4.049e-05,
"loss": 0.5601,
"step": 951
},
{
"epoch": 1.904,
"grad_norm": 0.25181880593299866,
"learning_rate": 4.048e-05,
"loss": 0.5983,
"step": 952
},
{
"epoch": 1.9060000000000001,
"grad_norm": 0.21048982441425323,
"learning_rate": 4.0470000000000004e-05,
"loss": 0.5901,
"step": 953
},
{
"epoch": 1.908,
"grad_norm": 0.19192460179328918,
"learning_rate": 4.046e-05,
"loss": 0.4686,
"step": 954
},
{
"epoch": 1.9100000000000001,
"grad_norm": 0.23322626948356628,
"learning_rate": 4.045000000000001e-05,
"loss": 0.6613,
"step": 955
},
{
"epoch": 1.912,
"grad_norm": 0.21661929786205292,
"learning_rate": 4.044e-05,
"loss": 0.5491,
"step": 956
},
{
"epoch": 1.9140000000000001,
"grad_norm": 0.21244138479232788,
"learning_rate": 4.0430000000000004e-05,
"loss": 0.5141,
"step": 957
},
{
"epoch": 1.916,
"grad_norm": 0.22383420169353485,
"learning_rate": 4.042e-05,
"loss": 0.5722,
"step": 958
},
{
"epoch": 1.9180000000000001,
"grad_norm": 0.234762042760849,
"learning_rate": 4.041e-05,
"loss": 0.5825,
"step": 959
},
{
"epoch": 1.92,
"grad_norm": 0.20146997272968292,
"learning_rate": 4.0400000000000006e-05,
"loss": 0.5272,
"step": 960
},
{
"epoch": 1.9220000000000002,
"grad_norm": 0.22217825055122375,
"learning_rate": 4.039e-05,
"loss": 0.6413,
"step": 961
},
{
"epoch": 1.924,
"grad_norm": 0.2346162497997284,
"learning_rate": 4.038e-05,
"loss": 0.6417,
"step": 962
},
{
"epoch": 1.9260000000000002,
"grad_norm": 0.2526434063911438,
"learning_rate": 4.037e-05,
"loss": 0.5815,
"step": 963
},
{
"epoch": 1.928,
"grad_norm": 0.22428862750530243,
"learning_rate": 4.0360000000000007e-05,
"loss": 0.6114,
"step": 964
},
{
"epoch": 1.9300000000000002,
"grad_norm": 0.21866799890995026,
"learning_rate": 4.0350000000000005e-05,
"loss": 0.542,
"step": 965
},
{
"epoch": 1.932,
"grad_norm": 0.22071845829486847,
"learning_rate": 4.034e-05,
"loss": 0.5551,
"step": 966
},
{
"epoch": 1.9340000000000002,
"grad_norm": 0.25092819333076477,
"learning_rate": 4.033e-05,
"loss": 0.6824,
"step": 967
},
{
"epoch": 1.936,
"grad_norm": 0.21841230988502502,
"learning_rate": 4.032e-05,
"loss": 0.604,
"step": 968
},
{
"epoch": 1.938,
"grad_norm": 0.22759634256362915,
"learning_rate": 4.0310000000000005e-05,
"loss": 0.6722,
"step": 969
},
{
"epoch": 1.94,
"grad_norm": 0.24783313274383545,
"learning_rate": 4.0300000000000004e-05,
"loss": 0.5625,
"step": 970
},
{
"epoch": 1.942,
"grad_norm": 0.22955209016799927,
"learning_rate": 4.029e-05,
"loss": 0.653,
"step": 971
},
{
"epoch": 1.944,
"grad_norm": 0.2600350081920624,
"learning_rate": 4.028e-05,
"loss": 0.6628,
"step": 972
},
{
"epoch": 1.946,
"grad_norm": 0.21005043387413025,
"learning_rate": 4.027e-05,
"loss": 0.4254,
"step": 973
},
{
"epoch": 1.948,
"grad_norm": 0.20057177543640137,
"learning_rate": 4.0260000000000004e-05,
"loss": 0.5552,
"step": 974
},
{
"epoch": 1.95,
"grad_norm": 0.2378489077091217,
"learning_rate": 4.025e-05,
"loss": 0.5861,
"step": 975
},
{
"epoch": 1.952,
"grad_norm": 0.27887988090515137,
"learning_rate": 4.024e-05,
"loss": 0.5742,
"step": 976
},
{
"epoch": 1.954,
"grad_norm": 0.19681380689144135,
"learning_rate": 4.023e-05,
"loss": 0.5129,
"step": 977
},
{
"epoch": 1.956,
"grad_norm": 0.21786653995513916,
"learning_rate": 4.0220000000000005e-05,
"loss": 0.5089,
"step": 978
},
{
"epoch": 1.958,
"grad_norm": 0.2101248949766159,
"learning_rate": 4.021e-05,
"loss": 0.6421,
"step": 979
},
{
"epoch": 1.96,
"grad_norm": 0.22144469618797302,
"learning_rate": 4.02e-05,
"loss": 0.5279,
"step": 980
},
{
"epoch": 1.962,
"grad_norm": 0.2304532378911972,
"learning_rate": 4.019e-05,
"loss": 0.5908,
"step": 981
},
{
"epoch": 1.964,
"grad_norm": 0.28714126348495483,
"learning_rate": 4.018e-05,
"loss": 0.6038,
"step": 982
},
{
"epoch": 1.966,
"grad_norm": 0.2407093048095703,
"learning_rate": 4.017e-05,
"loss": 0.5684,
"step": 983
},
{
"epoch": 1.968,
"grad_norm": 0.20832942426204681,
"learning_rate": 4.016e-05,
"loss": 0.5724,
"step": 984
},
{
"epoch": 1.97,
"grad_norm": 0.20581866800785065,
"learning_rate": 4.015000000000001e-05,
"loss": 0.5205,
"step": 985
},
{
"epoch": 1.972,
"grad_norm": 0.2234366089105606,
"learning_rate": 4.014e-05,
"loss": 0.6269,
"step": 986
},
{
"epoch": 1.974,
"grad_norm": 0.23339971899986267,
"learning_rate": 4.0130000000000004e-05,
"loss": 0.5409,
"step": 987
},
{
"epoch": 1.976,
"grad_norm": 0.21210043132305145,
"learning_rate": 4.012e-05,
"loss": 0.502,
"step": 988
},
{
"epoch": 1.978,
"grad_norm": 0.2021539956331253,
"learning_rate": 4.011e-05,
"loss": 0.5456,
"step": 989
},
{
"epoch": 1.98,
"grad_norm": 0.24333329498767853,
"learning_rate": 4.0100000000000006e-05,
"loss": 0.6098,
"step": 990
},
{
"epoch": 1.982,
"grad_norm": 0.19317735731601715,
"learning_rate": 4.009e-05,
"loss": 0.4954,
"step": 991
},
{
"epoch": 1.984,
"grad_norm": 0.28837406635284424,
"learning_rate": 4.008e-05,
"loss": 0.5882,
"step": 992
},
{
"epoch": 1.986,
"grad_norm": 0.233433797955513,
"learning_rate": 4.007e-05,
"loss": 0.5944,
"step": 993
},
{
"epoch": 1.988,
"grad_norm": 0.2529718577861786,
"learning_rate": 4.0060000000000006e-05,
"loss": 0.5822,
"step": 994
},
{
"epoch": 1.99,
"grad_norm": 0.2050531506538391,
"learning_rate": 4.0050000000000004e-05,
"loss": 0.5316,
"step": 995
},
{
"epoch": 1.992,
"grad_norm": 0.20875830948352814,
"learning_rate": 4.004e-05,
"loss": 0.5767,
"step": 996
},
{
"epoch": 1.994,
"grad_norm": 0.21856260299682617,
"learning_rate": 4.003e-05,
"loss": 0.576,
"step": 997
},
{
"epoch": 1.996,
"grad_norm": 0.2040548473596573,
"learning_rate": 4.002e-05,
"loss": 0.5035,
"step": 998
},
{
"epoch": 1.998,
"grad_norm": 0.22710789740085602,
"learning_rate": 4.0010000000000005e-05,
"loss": 0.6112,
"step": 999
},
{
"epoch": 2.0,
"grad_norm": 0.20931439101696014,
"learning_rate": 4e-05,
"loss": 0.5229,
"step": 1000
},
{
"epoch": 2.002,
"grad_norm": 0.2300502359867096,
"learning_rate": 3.999e-05,
"loss": 0.5785,
"step": 1001
},
{
"epoch": 2.004,
"grad_norm": 0.22193074226379395,
"learning_rate": 3.998e-05,
"loss": 0.5755,
"step": 1002
},
{
"epoch": 2.006,
"grad_norm": 0.24395062029361725,
"learning_rate": 3.9970000000000005e-05,
"loss": 0.5643,
"step": 1003
},
{
"epoch": 2.008,
"grad_norm": 0.19612465798854828,
"learning_rate": 3.9960000000000004e-05,
"loss": 0.5181,
"step": 1004
},
{
"epoch": 2.01,
"grad_norm": 0.2229042500257492,
"learning_rate": 3.995e-05,
"loss": 0.5675,
"step": 1005
},
{
"epoch": 2.012,
"grad_norm": 0.2308545857667923,
"learning_rate": 3.994e-05,
"loss": 0.5175,
"step": 1006
},
{
"epoch": 2.014,
"grad_norm": 0.22244416177272797,
"learning_rate": 3.993e-05,
"loss": 0.5007,
"step": 1007
},
{
"epoch": 2.016,
"grad_norm": 0.20587439835071564,
"learning_rate": 3.9920000000000004e-05,
"loss": 0.5403,
"step": 1008
},
{
"epoch": 2.018,
"grad_norm": 0.21140223741531372,
"learning_rate": 3.991e-05,
"loss": 0.5211,
"step": 1009
},
{
"epoch": 2.02,
"grad_norm": 0.2223360389471054,
"learning_rate": 3.99e-05,
"loss": 0.701,
"step": 1010
},
{
"epoch": 2.022,
"grad_norm": 0.22314807772636414,
"learning_rate": 3.989e-05,
"loss": 0.6075,
"step": 1011
},
{
"epoch": 2.024,
"grad_norm": 0.23748956620693207,
"learning_rate": 3.988e-05,
"loss": 0.5044,
"step": 1012
},
{
"epoch": 2.026,
"grad_norm": 0.23993243277072906,
"learning_rate": 3.987e-05,
"loss": 0.5213,
"step": 1013
},
{
"epoch": 2.028,
"grad_norm": 0.24043142795562744,
"learning_rate": 3.986e-05,
"loss": 0.5525,
"step": 1014
},
{
"epoch": 2.03,
"grad_norm": 0.23012974858283997,
"learning_rate": 3.9850000000000006e-05,
"loss": 0.5964,
"step": 1015
},
{
"epoch": 2.032,
"grad_norm": 0.2021389603614807,
"learning_rate": 3.984e-05,
"loss": 0.5016,
"step": 1016
},
{
"epoch": 2.034,
"grad_norm": 0.22271737456321716,
"learning_rate": 3.983e-05,
"loss": 0.5695,
"step": 1017
},
{
"epoch": 2.036,
"grad_norm": 0.2237047702074051,
"learning_rate": 3.982e-05,
"loss": 0.5098,
"step": 1018
},
{
"epoch": 2.038,
"grad_norm": 0.20218700170516968,
"learning_rate": 3.981e-05,
"loss": 0.4349,
"step": 1019
},
{
"epoch": 2.04,
"grad_norm": 0.24619650840759277,
"learning_rate": 3.9800000000000005e-05,
"loss": 0.6199,
"step": 1020
},
{
"epoch": 2.042,
"grad_norm": 0.24785549938678741,
"learning_rate": 3.979e-05,
"loss": 0.5889,
"step": 1021
},
{
"epoch": 2.044,
"grad_norm": 0.22918753325939178,
"learning_rate": 3.978e-05,
"loss": 0.5206,
"step": 1022
},
{
"epoch": 2.046,
"grad_norm": 0.22109267115592957,
"learning_rate": 3.977e-05,
"loss": 0.553,
"step": 1023
},
{
"epoch": 2.048,
"grad_norm": 0.22089530527591705,
"learning_rate": 3.9760000000000006e-05,
"loss": 0.5653,
"step": 1024
},
{
"epoch": 2.05,
"grad_norm": 0.2215888500213623,
"learning_rate": 3.9750000000000004e-05,
"loss": 0.6286,
"step": 1025
},
{
"epoch": 2.052,
"grad_norm": 0.2496936172246933,
"learning_rate": 3.974e-05,
"loss": 0.7117,
"step": 1026
},
{
"epoch": 2.054,
"grad_norm": 0.20800064504146576,
"learning_rate": 3.973e-05,
"loss": 0.498,
"step": 1027
},
{
"epoch": 2.056,
"grad_norm": 0.23079819977283478,
"learning_rate": 3.972e-05,
"loss": 0.6061,
"step": 1028
},
{
"epoch": 2.058,
"grad_norm": 0.23401056230068207,
"learning_rate": 3.9710000000000004e-05,
"loss": 0.5678,
"step": 1029
},
{
"epoch": 2.06,
"grad_norm": 0.27760186791419983,
"learning_rate": 3.97e-05,
"loss": 0.684,
"step": 1030
},
{
"epoch": 2.062,
"grad_norm": 0.22640343010425568,
"learning_rate": 3.969e-05,
"loss": 0.5338,
"step": 1031
},
{
"epoch": 2.064,
"grad_norm": 0.1947317123413086,
"learning_rate": 3.968e-05,
"loss": 0.4795,
"step": 1032
},
{
"epoch": 2.066,
"grad_norm": 0.22717192769050598,
"learning_rate": 3.9670000000000005e-05,
"loss": 0.5183,
"step": 1033
},
{
"epoch": 2.068,
"grad_norm": 0.21169620752334595,
"learning_rate": 3.966e-05,
"loss": 0.5641,
"step": 1034
},
{
"epoch": 2.07,
"grad_norm": 0.9110057950019836,
"learning_rate": 3.965e-05,
"loss": 0.6311,
"step": 1035
},
{
"epoch": 2.072,
"grad_norm": 0.2137368768453598,
"learning_rate": 3.964e-05,
"loss": 0.5862,
"step": 1036
},
{
"epoch": 2.074,
"grad_norm": 0.23863641917705536,
"learning_rate": 3.963e-05,
"loss": 0.5416,
"step": 1037
},
{
"epoch": 2.076,
"grad_norm": 0.2136143296957016,
"learning_rate": 3.9620000000000004e-05,
"loss": 0.5665,
"step": 1038
},
{
"epoch": 2.078,
"grad_norm": 0.22708338499069214,
"learning_rate": 3.961e-05,
"loss": 0.6251,
"step": 1039
},
{
"epoch": 2.08,
"grad_norm": 0.24242404103279114,
"learning_rate": 3.960000000000001e-05,
"loss": 0.5365,
"step": 1040
},
{
"epoch": 2.082,
"grad_norm": 0.24977032840251923,
"learning_rate": 3.959e-05,
"loss": 0.5447,
"step": 1041
},
{
"epoch": 2.084,
"grad_norm": 0.24284620583057404,
"learning_rate": 3.958e-05,
"loss": 0.6634,
"step": 1042
},
{
"epoch": 2.086,
"grad_norm": 0.23839087784290314,
"learning_rate": 3.957e-05,
"loss": 0.6501,
"step": 1043
},
{
"epoch": 2.088,
"grad_norm": 0.213258296251297,
"learning_rate": 3.956e-05,
"loss": 0.5919,
"step": 1044
},
{
"epoch": 2.09,
"grad_norm": 0.2643541097640991,
"learning_rate": 3.9550000000000006e-05,
"loss": 0.5912,
"step": 1045
},
{
"epoch": 2.092,
"grad_norm": 0.24359184503555298,
"learning_rate": 3.954e-05,
"loss": 0.5563,
"step": 1046
},
{
"epoch": 2.094,
"grad_norm": 0.19231514632701874,
"learning_rate": 3.953e-05,
"loss": 0.5082,
"step": 1047
},
{
"epoch": 2.096,
"grad_norm": 0.20474958419799805,
"learning_rate": 3.952e-05,
"loss": 0.4931,
"step": 1048
},
{
"epoch": 2.098,
"grad_norm": 0.22740337252616882,
"learning_rate": 3.951e-05,
"loss": 0.5513,
"step": 1049
},
{
"epoch": 2.1,
"grad_norm": 0.23002669215202332,
"learning_rate": 3.9500000000000005e-05,
"loss": 0.5292,
"step": 1050
},
{
"epoch": 2.102,
"grad_norm": 0.22021901607513428,
"learning_rate": 3.9489999999999996e-05,
"loss": 0.5926,
"step": 1051
},
{
"epoch": 2.104,
"grad_norm": 0.2307133972644806,
"learning_rate": 3.948e-05,
"loss": 0.5864,
"step": 1052
},
{
"epoch": 2.106,
"grad_norm": 0.2517574727535248,
"learning_rate": 3.947e-05,
"loss": 0.6357,
"step": 1053
},
{
"epoch": 2.108,
"grad_norm": 0.220330610871315,
"learning_rate": 3.9460000000000005e-05,
"loss": 0.4541,
"step": 1054
},
{
"epoch": 2.11,
"grad_norm": 0.24779756367206573,
"learning_rate": 3.9450000000000003e-05,
"loss": 0.5963,
"step": 1055
},
{
"epoch": 2.112,
"grad_norm": 0.22897957265377045,
"learning_rate": 3.944e-05,
"loss": 0.4901,
"step": 1056
},
{
"epoch": 2.114,
"grad_norm": 0.21515005826950073,
"learning_rate": 3.943e-05,
"loss": 0.5122,
"step": 1057
},
{
"epoch": 2.116,
"grad_norm": 0.23861292004585266,
"learning_rate": 3.942e-05,
"loss": 0.5649,
"step": 1058
},
{
"epoch": 2.118,
"grad_norm": 0.2284981906414032,
"learning_rate": 3.9410000000000004e-05,
"loss": 0.6206,
"step": 1059
},
{
"epoch": 2.12,
"grad_norm": 0.2444058358669281,
"learning_rate": 3.94e-05,
"loss": 0.5106,
"step": 1060
},
{
"epoch": 2.122,
"grad_norm": 0.22870151698589325,
"learning_rate": 3.939e-05,
"loss": 0.5614,
"step": 1061
},
{
"epoch": 2.124,
"grad_norm": 0.22062517702579498,
"learning_rate": 3.938e-05,
"loss": 0.6262,
"step": 1062
},
{
"epoch": 2.126,
"grad_norm": 0.24075838923454285,
"learning_rate": 3.9370000000000004e-05,
"loss": 0.5642,
"step": 1063
},
{
"epoch": 2.128,
"grad_norm": 0.2970450222492218,
"learning_rate": 3.936e-05,
"loss": 0.4678,
"step": 1064
},
{
"epoch": 2.13,
"grad_norm": 0.21903452277183533,
"learning_rate": 3.935e-05,
"loss": 0.5021,
"step": 1065
},
{
"epoch": 2.132,
"grad_norm": 0.23422181606292725,
"learning_rate": 3.9340000000000006e-05,
"loss": 0.519,
"step": 1066
},
{
"epoch": 2.134,
"grad_norm": 0.2115645855665207,
"learning_rate": 3.933e-05,
"loss": 0.541,
"step": 1067
},
{
"epoch": 2.136,
"grad_norm": 0.21695291996002197,
"learning_rate": 3.932e-05,
"loss": 0.5288,
"step": 1068
},
{
"epoch": 2.138,
"grad_norm": 0.23118314146995544,
"learning_rate": 3.931e-05,
"loss": 0.6113,
"step": 1069
},
{
"epoch": 2.14,
"grad_norm": 0.199484184384346,
"learning_rate": 3.9300000000000007e-05,
"loss": 0.4818,
"step": 1070
},
{
"epoch": 2.142,
"grad_norm": 0.22116996347904205,
"learning_rate": 3.9290000000000005e-05,
"loss": 0.5796,
"step": 1071
},
{
"epoch": 2.144,
"grad_norm": 0.20206792652606964,
"learning_rate": 3.9280000000000003e-05,
"loss": 0.4504,
"step": 1072
},
{
"epoch": 2.146,
"grad_norm": 0.22893157601356506,
"learning_rate": 3.927e-05,
"loss": 0.6186,
"step": 1073
},
{
"epoch": 2.148,
"grad_norm": 0.21714241802692413,
"learning_rate": 3.926e-05,
"loss": 0.4772,
"step": 1074
},
{
"epoch": 2.15,
"grad_norm": 0.22640830278396606,
"learning_rate": 3.9250000000000005e-05,
"loss": 0.5859,
"step": 1075
},
{
"epoch": 2.152,
"grad_norm": 0.23758919537067413,
"learning_rate": 3.9240000000000004e-05,
"loss": 0.5819,
"step": 1076
},
{
"epoch": 2.154,
"grad_norm": 0.22804389894008636,
"learning_rate": 3.923e-05,
"loss": 0.6115,
"step": 1077
},
{
"epoch": 2.156,
"grad_norm": 0.22379794716835022,
"learning_rate": 3.922e-05,
"loss": 0.5003,
"step": 1078
},
{
"epoch": 2.158,
"grad_norm": 0.21899166703224182,
"learning_rate": 3.921e-05,
"loss": 0.5034,
"step": 1079
},
{
"epoch": 2.16,
"grad_norm": 0.23076632618904114,
"learning_rate": 3.9200000000000004e-05,
"loss": 0.5094,
"step": 1080
},
{
"epoch": 2.162,
"grad_norm": 0.23180313408374786,
"learning_rate": 3.919e-05,
"loss": 0.5651,
"step": 1081
},
{
"epoch": 2.164,
"grad_norm": 0.22800596058368683,
"learning_rate": 3.918e-05,
"loss": 0.4437,
"step": 1082
},
{
"epoch": 2.166,
"grad_norm": 0.2834121584892273,
"learning_rate": 3.917e-05,
"loss": 0.5514,
"step": 1083
},
{
"epoch": 2.168,
"grad_norm": 0.21410004794597626,
"learning_rate": 3.9160000000000005e-05,
"loss": 0.5601,
"step": 1084
},
{
"epoch": 2.17,
"grad_norm": 0.23435719311237335,
"learning_rate": 3.915e-05,
"loss": 0.4677,
"step": 1085
},
{
"epoch": 2.172,
"grad_norm": 0.24208413064479828,
"learning_rate": 3.914e-05,
"loss": 0.575,
"step": 1086
},
{
"epoch": 2.174,
"grad_norm": 0.2928978502750397,
"learning_rate": 3.913e-05,
"loss": 0.6441,
"step": 1087
},
{
"epoch": 2.176,
"grad_norm": 0.24352501332759857,
"learning_rate": 3.912e-05,
"loss": 0.4845,
"step": 1088
},
{
"epoch": 2.178,
"grad_norm": 0.23127582669258118,
"learning_rate": 3.911e-05,
"loss": 0.5877,
"step": 1089
},
{
"epoch": 2.18,
"grad_norm": 0.2425108104944229,
"learning_rate": 3.91e-05,
"loss": 0.5549,
"step": 1090
},
{
"epoch": 2.182,
"grad_norm": 0.22946766018867493,
"learning_rate": 3.909000000000001e-05,
"loss": 0.5377,
"step": 1091
},
{
"epoch": 2.184,
"grad_norm": 0.22083094716072083,
"learning_rate": 3.908e-05,
"loss": 0.5841,
"step": 1092
},
{
"epoch": 2.186,
"grad_norm": 0.2550792098045349,
"learning_rate": 3.9070000000000004e-05,
"loss": 0.5348,
"step": 1093
},
{
"epoch": 2.188,
"grad_norm": 0.22484949231147766,
"learning_rate": 3.906e-05,
"loss": 0.5632,
"step": 1094
},
{
"epoch": 2.19,
"grad_norm": 0.21601122617721558,
"learning_rate": 3.905e-05,
"loss": 0.5059,
"step": 1095
},
{
"epoch": 2.192,
"grad_norm": 0.22118137776851654,
"learning_rate": 3.9040000000000006e-05,
"loss": 0.5811,
"step": 1096
},
{
"epoch": 2.194,
"grad_norm": 0.26987406611442566,
"learning_rate": 3.903e-05,
"loss": 0.6285,
"step": 1097
},
{
"epoch": 2.196,
"grad_norm": 0.20037396252155304,
"learning_rate": 3.902e-05,
"loss": 0.5504,
"step": 1098
},
{
"epoch": 2.198,
"grad_norm": 0.2180766761302948,
"learning_rate": 3.901e-05,
"loss": 0.594,
"step": 1099
},
{
"epoch": 2.2,
"grad_norm": 0.219328373670578,
"learning_rate": 3.9000000000000006e-05,
"loss": 0.5788,
"step": 1100
},
{
"epoch": 2.202,
"grad_norm": 0.21294790506362915,
"learning_rate": 3.8990000000000004e-05,
"loss": 0.5512,
"step": 1101
},
{
"epoch": 2.204,
"grad_norm": 0.2298852503299713,
"learning_rate": 3.898e-05,
"loss": 0.5362,
"step": 1102
},
{
"epoch": 2.206,
"grad_norm": 0.22244185209274292,
"learning_rate": 3.897e-05,
"loss": 0.5054,
"step": 1103
},
{
"epoch": 2.208,
"grad_norm": 0.20233727991580963,
"learning_rate": 3.896e-05,
"loss": 0.518,
"step": 1104
},
{
"epoch": 2.21,
"grad_norm": 0.22405940294265747,
"learning_rate": 3.8950000000000005e-05,
"loss": 0.5718,
"step": 1105
},
{
"epoch": 2.212,
"grad_norm": 0.2102762758731842,
"learning_rate": 3.894e-05,
"loss": 0.4936,
"step": 1106
},
{
"epoch": 2.214,
"grad_norm": 0.19645659625530243,
"learning_rate": 3.893e-05,
"loss": 0.432,
"step": 1107
},
{
"epoch": 2.216,
"grad_norm": 0.23301316797733307,
"learning_rate": 3.892e-05,
"loss": 0.5298,
"step": 1108
},
{
"epoch": 2.218,
"grad_norm": 0.2393760234117508,
"learning_rate": 3.8910000000000005e-05,
"loss": 0.5487,
"step": 1109
},
{
"epoch": 2.22,
"grad_norm": 0.22411087155342102,
"learning_rate": 3.8900000000000004e-05,
"loss": 0.5053,
"step": 1110
},
{
"epoch": 2.222,
"grad_norm": 0.2210693508386612,
"learning_rate": 3.889e-05,
"loss": 0.6067,
"step": 1111
},
{
"epoch": 2.224,
"grad_norm": 0.235130175948143,
"learning_rate": 3.888e-05,
"loss": 0.576,
"step": 1112
},
{
"epoch": 2.226,
"grad_norm": 0.2377382218837738,
"learning_rate": 3.887e-05,
"loss": 0.5263,
"step": 1113
},
{
"epoch": 2.228,
"grad_norm": 0.2421228140592575,
"learning_rate": 3.8860000000000004e-05,
"loss": 0.6216,
"step": 1114
},
{
"epoch": 2.23,
"grad_norm": 0.24329040944576263,
"learning_rate": 3.885e-05,
"loss": 0.5613,
"step": 1115
},
{
"epoch": 2.232,
"grad_norm": 0.20890939235687256,
"learning_rate": 3.884e-05,
"loss": 0.5432,
"step": 1116
},
{
"epoch": 2.234,
"grad_norm": 0.24169021844863892,
"learning_rate": 3.883e-05,
"loss": 0.5293,
"step": 1117
},
{
"epoch": 2.2359999999999998,
"grad_norm": 0.21673136949539185,
"learning_rate": 3.882e-05,
"loss": 0.5403,
"step": 1118
},
{
"epoch": 2.238,
"grad_norm": 0.2557161748409271,
"learning_rate": 3.881e-05,
"loss": 0.5339,
"step": 1119
},
{
"epoch": 2.24,
"grad_norm": 0.23125320672988892,
"learning_rate": 3.88e-05,
"loss": 0.592,
"step": 1120
},
{
"epoch": 2.242,
"grad_norm": 0.20030373334884644,
"learning_rate": 3.8790000000000006e-05,
"loss": 0.5503,
"step": 1121
},
{
"epoch": 2.2439999999999998,
"grad_norm": 0.24123123288154602,
"learning_rate": 3.878e-05,
"loss": 0.4135,
"step": 1122
},
{
"epoch": 2.246,
"grad_norm": 0.2905665636062622,
"learning_rate": 3.877e-05,
"loss": 0.5786,
"step": 1123
},
{
"epoch": 2.248,
"grad_norm": 0.2328137904405594,
"learning_rate": 3.876e-05,
"loss": 0.5716,
"step": 1124
},
{
"epoch": 2.25,
"grad_norm": 0.2417079508304596,
"learning_rate": 3.875e-05,
"loss": 0.5863,
"step": 1125
},
{
"epoch": 2.252,
"grad_norm": 0.23080268502235413,
"learning_rate": 3.8740000000000005e-05,
"loss": 0.4844,
"step": 1126
},
{
"epoch": 2.254,
"grad_norm": 0.21996165812015533,
"learning_rate": 3.873e-05,
"loss": 0.5092,
"step": 1127
},
{
"epoch": 2.2560000000000002,
"grad_norm": 0.2577206492424011,
"learning_rate": 3.872e-05,
"loss": 0.5431,
"step": 1128
},
{
"epoch": 2.258,
"grad_norm": 0.25457316637039185,
"learning_rate": 3.871e-05,
"loss": 0.6858,
"step": 1129
},
{
"epoch": 2.26,
"grad_norm": 0.2645741105079651,
"learning_rate": 3.8700000000000006e-05,
"loss": 0.5619,
"step": 1130
},
{
"epoch": 2.262,
"grad_norm": 0.22185394167900085,
"learning_rate": 3.8690000000000004e-05,
"loss": 0.559,
"step": 1131
},
{
"epoch": 2.2640000000000002,
"grad_norm": 0.2066442221403122,
"learning_rate": 3.868e-05,
"loss": 0.5291,
"step": 1132
},
{
"epoch": 2.266,
"grad_norm": 0.2601262032985687,
"learning_rate": 3.867e-05,
"loss": 0.6364,
"step": 1133
},
{
"epoch": 2.268,
"grad_norm": 0.22718307375907898,
"learning_rate": 3.866e-05,
"loss": 0.5746,
"step": 1134
},
{
"epoch": 2.27,
"grad_norm": 0.2720508873462677,
"learning_rate": 3.8650000000000004e-05,
"loss": 0.6349,
"step": 1135
},
{
"epoch": 2.2720000000000002,
"grad_norm": 0.22701649367809296,
"learning_rate": 3.864e-05,
"loss": 0.6053,
"step": 1136
},
{
"epoch": 2.274,
"grad_norm": 0.2616439163684845,
"learning_rate": 3.863e-05,
"loss": 0.5182,
"step": 1137
},
{
"epoch": 2.276,
"grad_norm": 0.27830225229263306,
"learning_rate": 3.862e-05,
"loss": 0.5754,
"step": 1138
},
{
"epoch": 2.278,
"grad_norm": 0.20636102557182312,
"learning_rate": 3.8610000000000005e-05,
"loss": 0.5245,
"step": 1139
},
{
"epoch": 2.2800000000000002,
"grad_norm": 0.21962614357471466,
"learning_rate": 3.86e-05,
"loss": 0.59,
"step": 1140
},
{
"epoch": 2.282,
"grad_norm": 0.21946291625499725,
"learning_rate": 3.859e-05,
"loss": 0.5318,
"step": 1141
},
{
"epoch": 2.284,
"grad_norm": 0.22070477902889252,
"learning_rate": 3.858e-05,
"loss": 0.5564,
"step": 1142
},
{
"epoch": 2.286,
"grad_norm": 0.27207010984420776,
"learning_rate": 3.857e-05,
"loss": 0.5756,
"step": 1143
},
{
"epoch": 2.288,
"grad_norm": 0.27739766240119934,
"learning_rate": 3.8560000000000004e-05,
"loss": 0.5278,
"step": 1144
},
{
"epoch": 2.29,
"grad_norm": 0.22324499487876892,
"learning_rate": 3.855e-05,
"loss": 0.5717,
"step": 1145
},
{
"epoch": 2.292,
"grad_norm": 0.23215104639530182,
"learning_rate": 3.854000000000001e-05,
"loss": 0.6419,
"step": 1146
},
{
"epoch": 2.294,
"grad_norm": 0.20787887275218964,
"learning_rate": 3.853e-05,
"loss": 0.582,
"step": 1147
},
{
"epoch": 2.296,
"grad_norm": 0.20145432651042938,
"learning_rate": 3.8520000000000004e-05,
"loss": 0.4721,
"step": 1148
},
{
"epoch": 2.298,
"grad_norm": 0.23586659133434296,
"learning_rate": 3.851e-05,
"loss": 0.5895,
"step": 1149
},
{
"epoch": 2.3,
"grad_norm": 0.2162531614303589,
"learning_rate": 3.85e-05,
"loss": 0.4895,
"step": 1150
},
{
"epoch": 2.302,
"grad_norm": 0.21131418645381927,
"learning_rate": 3.8490000000000006e-05,
"loss": 0.5607,
"step": 1151
},
{
"epoch": 2.304,
"grad_norm": 0.21754594147205353,
"learning_rate": 3.848e-05,
"loss": 0.4685,
"step": 1152
},
{
"epoch": 2.306,
"grad_norm": 0.23733021318912506,
"learning_rate": 3.847e-05,
"loss": 0.5187,
"step": 1153
},
{
"epoch": 2.308,
"grad_norm": 0.22440409660339355,
"learning_rate": 3.846e-05,
"loss": 0.6549,
"step": 1154
},
{
"epoch": 2.31,
"grad_norm": 0.21810773015022278,
"learning_rate": 3.845e-05,
"loss": 0.5227,
"step": 1155
},
{
"epoch": 2.312,
"grad_norm": 0.22836032509803772,
"learning_rate": 3.8440000000000005e-05,
"loss": 0.5562,
"step": 1156
},
{
"epoch": 2.314,
"grad_norm": 0.2178371548652649,
"learning_rate": 3.8429999999999996e-05,
"loss": 0.4847,
"step": 1157
},
{
"epoch": 2.316,
"grad_norm": 0.22366905212402344,
"learning_rate": 3.842e-05,
"loss": 0.5684,
"step": 1158
},
{
"epoch": 2.318,
"grad_norm": 0.21383610367774963,
"learning_rate": 3.841e-05,
"loss": 0.5217,
"step": 1159
},
{
"epoch": 2.32,
"grad_norm": 0.2699750065803528,
"learning_rate": 3.8400000000000005e-05,
"loss": 0.5617,
"step": 1160
},
{
"epoch": 2.322,
"grad_norm": 0.2394636571407318,
"learning_rate": 3.8390000000000003e-05,
"loss": 0.5369,
"step": 1161
},
{
"epoch": 2.324,
"grad_norm": 0.22675740718841553,
"learning_rate": 3.838e-05,
"loss": 0.5482,
"step": 1162
},
{
"epoch": 2.326,
"grad_norm": 0.2006162852048874,
"learning_rate": 3.837e-05,
"loss": 0.509,
"step": 1163
},
{
"epoch": 2.328,
"grad_norm": 0.2551890015602112,
"learning_rate": 3.836e-05,
"loss": 0.488,
"step": 1164
},
{
"epoch": 2.33,
"grad_norm": 0.20632576942443848,
"learning_rate": 3.8350000000000004e-05,
"loss": 0.4869,
"step": 1165
},
{
"epoch": 2.332,
"grad_norm": 0.20424461364746094,
"learning_rate": 3.834e-05,
"loss": 0.5745,
"step": 1166
},
{
"epoch": 2.334,
"grad_norm": 0.2168879508972168,
"learning_rate": 3.833e-05,
"loss": 0.5208,
"step": 1167
},
{
"epoch": 2.336,
"grad_norm": 0.2205127477645874,
"learning_rate": 3.832e-05,
"loss": 0.5025,
"step": 1168
},
{
"epoch": 2.338,
"grad_norm": 0.23642250895500183,
"learning_rate": 3.8310000000000004e-05,
"loss": 0.5903,
"step": 1169
},
{
"epoch": 2.34,
"grad_norm": 0.22600825130939484,
"learning_rate": 3.83e-05,
"loss": 0.4807,
"step": 1170
},
{
"epoch": 2.342,
"grad_norm": 0.22957386076450348,
"learning_rate": 3.829e-05,
"loss": 0.5777,
"step": 1171
},
{
"epoch": 2.344,
"grad_norm": 0.24798931181430817,
"learning_rate": 3.828e-05,
"loss": 0.5857,
"step": 1172
},
{
"epoch": 2.346,
"grad_norm": 0.2501101493835449,
"learning_rate": 3.827e-05,
"loss": 0.5004,
"step": 1173
},
{
"epoch": 2.348,
"grad_norm": 0.24327729642391205,
"learning_rate": 3.826e-05,
"loss": 0.6353,
"step": 1174
},
{
"epoch": 2.35,
"grad_norm": 0.26182571053504944,
"learning_rate": 3.825e-05,
"loss": 0.5299,
"step": 1175
},
{
"epoch": 2.352,
"grad_norm": 0.24347805976867676,
"learning_rate": 3.8240000000000007e-05,
"loss": 0.6362,
"step": 1176
},
{
"epoch": 2.354,
"grad_norm": 0.23492677509784698,
"learning_rate": 3.823e-05,
"loss": 0.5888,
"step": 1177
},
{
"epoch": 2.356,
"grad_norm": 0.2524511516094208,
"learning_rate": 3.822e-05,
"loss": 0.6127,
"step": 1178
},
{
"epoch": 2.358,
"grad_norm": 0.23998746275901794,
"learning_rate": 3.821e-05,
"loss": 0.5543,
"step": 1179
},
{
"epoch": 2.36,
"grad_norm": 0.22162604331970215,
"learning_rate": 3.82e-05,
"loss": 0.58,
"step": 1180
},
{
"epoch": 2.362,
"grad_norm": 0.21816958487033844,
"learning_rate": 3.8190000000000005e-05,
"loss": 0.4873,
"step": 1181
},
{
"epoch": 2.364,
"grad_norm": 0.20614151656627655,
"learning_rate": 3.818e-05,
"loss": 0.5233,
"step": 1182
},
{
"epoch": 2.366,
"grad_norm": 0.22421181201934814,
"learning_rate": 3.817e-05,
"loss": 0.5244,
"step": 1183
},
{
"epoch": 2.368,
"grad_norm": 0.23380468785762787,
"learning_rate": 3.816e-05,
"loss": 0.4835,
"step": 1184
},
{
"epoch": 2.37,
"grad_norm": 0.23658302426338196,
"learning_rate": 3.8150000000000006e-05,
"loss": 0.6125,
"step": 1185
},
{
"epoch": 2.372,
"grad_norm": 0.2474319487810135,
"learning_rate": 3.8140000000000004e-05,
"loss": 0.6016,
"step": 1186
},
{
"epoch": 2.374,
"grad_norm": 0.2601548135280609,
"learning_rate": 3.8129999999999996e-05,
"loss": 0.5145,
"step": 1187
},
{
"epoch": 2.376,
"grad_norm": 0.2442774921655655,
"learning_rate": 3.812e-05,
"loss": 0.5756,
"step": 1188
},
{
"epoch": 2.378,
"grad_norm": 0.2299203872680664,
"learning_rate": 3.811e-05,
"loss": 0.5553,
"step": 1189
},
{
"epoch": 2.38,
"grad_norm": 0.21725517511367798,
"learning_rate": 3.8100000000000005e-05,
"loss": 0.5216,
"step": 1190
},
{
"epoch": 2.382,
"grad_norm": 0.23462055623531342,
"learning_rate": 3.809e-05,
"loss": 0.5489,
"step": 1191
},
{
"epoch": 2.384,
"grad_norm": 0.21845324337482452,
"learning_rate": 3.808e-05,
"loss": 0.5335,
"step": 1192
},
{
"epoch": 2.386,
"grad_norm": 0.23328767716884613,
"learning_rate": 3.807e-05,
"loss": 0.5462,
"step": 1193
},
{
"epoch": 2.388,
"grad_norm": 0.23643261194229126,
"learning_rate": 3.806e-05,
"loss": 0.5572,
"step": 1194
},
{
"epoch": 2.39,
"grad_norm": 0.2143084853887558,
"learning_rate": 3.805e-05,
"loss": 0.4858,
"step": 1195
},
{
"epoch": 2.392,
"grad_norm": 0.21531163156032562,
"learning_rate": 3.804e-05,
"loss": 0.4647,
"step": 1196
},
{
"epoch": 2.394,
"grad_norm": 0.24806390702724457,
"learning_rate": 3.803000000000001e-05,
"loss": 0.5758,
"step": 1197
},
{
"epoch": 2.396,
"grad_norm": 0.24888557195663452,
"learning_rate": 3.802e-05,
"loss": 0.5831,
"step": 1198
},
{
"epoch": 2.398,
"grad_norm": 0.21910586953163147,
"learning_rate": 3.8010000000000004e-05,
"loss": 0.5288,
"step": 1199
},
{
"epoch": 2.4,
"grad_norm": 0.24126410484313965,
"learning_rate": 3.8e-05,
"loss": 0.5372,
"step": 1200
}
],
"logging_steps": 1,
"max_steps": 5000,
"num_input_tokens_seen": 0,
"num_train_epochs": 10,
"save_steps": 300,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 2.601053976526848e+16,
"train_batch_size": 16,
"trial_name": null,
"trial_params": null
}