apcl
/

chiayisu's picture
init
2f30e1d
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.3,
"eval_steps": 500,
"global_step": 1875,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0008,
"grad_norm": 1.40625,
"learning_rate": 0.0001,
"loss": 4.689,
"step": 5
},
{
"epoch": 0.0016,
"grad_norm": 0.84765625,
"learning_rate": 0.0001,
"loss": 1.7346,
"step": 10
},
{
"epoch": 0.0024,
"grad_norm": 0.474609375,
"learning_rate": 0.0001,
"loss": 1.0881,
"step": 15
},
{
"epoch": 0.0032,
"grad_norm": 0.44140625,
"learning_rate": 0.0001,
"loss": 0.7694,
"step": 20
},
{
"epoch": 0.004,
"grad_norm": 0.404296875,
"learning_rate": 0.0001,
"loss": 0.6821,
"step": 25
},
{
"epoch": 0.0048,
"grad_norm": 0.322265625,
"learning_rate": 0.0001,
"loss": 0.5054,
"step": 30
},
{
"epoch": 0.0056,
"grad_norm": 0.255859375,
"learning_rate": 0.0001,
"loss": 0.4251,
"step": 35
},
{
"epoch": 0.0064,
"grad_norm": 0.26171875,
"learning_rate": 0.0001,
"loss": 0.3436,
"step": 40
},
{
"epoch": 0.0072,
"grad_norm": 0.29296875,
"learning_rate": 0.0001,
"loss": 0.2863,
"step": 45
},
{
"epoch": 0.008,
"grad_norm": 0.2060546875,
"learning_rate": 0.0001,
"loss": 0.2451,
"step": 50
},
{
"epoch": 0.0088,
"grad_norm": 0.34375,
"learning_rate": 0.0001,
"loss": 0.2673,
"step": 55
},
{
"epoch": 0.0096,
"grad_norm": 0.263671875,
"learning_rate": 0.0001,
"loss": 0.2696,
"step": 60
},
{
"epoch": 0.0104,
"grad_norm": 0.53125,
"learning_rate": 0.0001,
"loss": 0.3373,
"step": 65
},
{
"epoch": 0.0112,
"grad_norm": 0.30078125,
"learning_rate": 0.0001,
"loss": 0.3218,
"step": 70
},
{
"epoch": 0.012,
"grad_norm": 0.1416015625,
"learning_rate": 0.0001,
"loss": 0.2769,
"step": 75
},
{
"epoch": 0.0128,
"grad_norm": 0.16796875,
"learning_rate": 0.0001,
"loss": 0.2303,
"step": 80
},
{
"epoch": 0.0136,
"grad_norm": 0.1904296875,
"learning_rate": 0.0001,
"loss": 0.3289,
"step": 85
},
{
"epoch": 0.0144,
"grad_norm": 0.2041015625,
"learning_rate": 0.0001,
"loss": 0.2959,
"step": 90
},
{
"epoch": 0.0152,
"grad_norm": 0.248046875,
"learning_rate": 0.0001,
"loss": 0.3123,
"step": 95
},
{
"epoch": 0.016,
"grad_norm": 0.26171875,
"learning_rate": 0.0001,
"loss": 0.2761,
"step": 100
},
{
"epoch": 0.0168,
"grad_norm": 0.203125,
"learning_rate": 0.0001,
"loss": 0.1851,
"step": 105
},
{
"epoch": 0.0176,
"grad_norm": 0.19921875,
"learning_rate": 0.0001,
"loss": 0.2661,
"step": 110
},
{
"epoch": 0.0184,
"grad_norm": 0.1865234375,
"learning_rate": 0.0001,
"loss": 0.264,
"step": 115
},
{
"epoch": 0.0192,
"grad_norm": 0.21875,
"learning_rate": 0.0001,
"loss": 0.3102,
"step": 120
},
{
"epoch": 0.02,
"grad_norm": 0.248046875,
"learning_rate": 0.0001,
"loss": 0.2606,
"step": 125
},
{
"epoch": 0.0208,
"grad_norm": 0.26171875,
"learning_rate": 0.0001,
"loss": 0.29,
"step": 130
},
{
"epoch": 0.0216,
"grad_norm": 0.1875,
"learning_rate": 0.0001,
"loss": 0.2738,
"step": 135
},
{
"epoch": 0.0224,
"grad_norm": 0.1259765625,
"learning_rate": 0.0001,
"loss": 0.3099,
"step": 140
},
{
"epoch": 0.0232,
"grad_norm": 0.1328125,
"learning_rate": 0.0001,
"loss": 0.2767,
"step": 145
},
{
"epoch": 0.024,
"grad_norm": 0.2109375,
"learning_rate": 0.0001,
"loss": 0.1845,
"step": 150
},
{
"epoch": 0.0248,
"grad_norm": 0.1416015625,
"learning_rate": 0.0001,
"loss": 0.1394,
"step": 155
},
{
"epoch": 0.0256,
"grad_norm": 0.140625,
"learning_rate": 0.0001,
"loss": 0.2288,
"step": 160
},
{
"epoch": 0.0264,
"grad_norm": 0.45703125,
"learning_rate": 0.0001,
"loss": 0.2108,
"step": 165
},
{
"epoch": 0.0272,
"grad_norm": 0.80859375,
"learning_rate": 0.0001,
"loss": 0.2433,
"step": 170
},
{
"epoch": 0.028,
"grad_norm": 0.1513671875,
"learning_rate": 0.0001,
"loss": 0.2611,
"step": 175
},
{
"epoch": 0.0288,
"grad_norm": 0.205078125,
"learning_rate": 0.0001,
"loss": 0.2008,
"step": 180
},
{
"epoch": 0.0296,
"grad_norm": 0.150390625,
"learning_rate": 0.0001,
"loss": 0.2414,
"step": 185
},
{
"epoch": 0.0304,
"grad_norm": 0.205078125,
"learning_rate": 0.0001,
"loss": 0.2327,
"step": 190
},
{
"epoch": 0.0312,
"grad_norm": 0.169921875,
"learning_rate": 0.0001,
"loss": 0.2349,
"step": 195
},
{
"epoch": 0.032,
"grad_norm": 0.185546875,
"learning_rate": 0.0001,
"loss": 0.2028,
"step": 200
},
{
"epoch": 0.0328,
"grad_norm": 0.173828125,
"learning_rate": 0.0001,
"loss": 0.2034,
"step": 205
},
{
"epoch": 0.0336,
"grad_norm": 0.169921875,
"learning_rate": 0.0001,
"loss": 0.2465,
"step": 210
},
{
"epoch": 0.0344,
"grad_norm": 0.09228515625,
"learning_rate": 0.0001,
"loss": 0.2518,
"step": 215
},
{
"epoch": 0.0352,
"grad_norm": 0.171875,
"learning_rate": 0.0001,
"loss": 0.2545,
"step": 220
},
{
"epoch": 0.036,
"grad_norm": 0.15625,
"learning_rate": 0.0001,
"loss": 0.2445,
"step": 225
},
{
"epoch": 0.0368,
"grad_norm": 0.345703125,
"learning_rate": 0.0001,
"loss": 0.2106,
"step": 230
},
{
"epoch": 0.0376,
"grad_norm": 0.1640625,
"learning_rate": 0.0001,
"loss": 0.2527,
"step": 235
},
{
"epoch": 0.0384,
"grad_norm": 0.14453125,
"learning_rate": 0.0001,
"loss": 0.2366,
"step": 240
},
{
"epoch": 0.0392,
"grad_norm": 0.17578125,
"learning_rate": 0.0001,
"loss": 0.2989,
"step": 245
},
{
"epoch": 0.04,
"grad_norm": 0.158203125,
"learning_rate": 0.0001,
"loss": 0.1834,
"step": 250
},
{
"epoch": 0.0408,
"grad_norm": 0.412109375,
"learning_rate": 0.0001,
"loss": 0.16,
"step": 255
},
{
"epoch": 0.0416,
"grad_norm": 0.3046875,
"learning_rate": 0.0001,
"loss": 0.2272,
"step": 260
},
{
"epoch": 0.0424,
"grad_norm": 0.216796875,
"learning_rate": 0.0001,
"loss": 0.2389,
"step": 265
},
{
"epoch": 0.0432,
"grad_norm": 0.14453125,
"learning_rate": 0.0001,
"loss": 0.2117,
"step": 270
},
{
"epoch": 0.044,
"grad_norm": 0.19140625,
"learning_rate": 0.0001,
"loss": 0.2676,
"step": 275
},
{
"epoch": 0.0448,
"grad_norm": 0.1494140625,
"learning_rate": 0.0001,
"loss": 0.2782,
"step": 280
},
{
"epoch": 0.0456,
"grad_norm": 0.13671875,
"learning_rate": 0.0001,
"loss": 0.2493,
"step": 285
},
{
"epoch": 0.0464,
"grad_norm": 0.302734375,
"learning_rate": 0.0001,
"loss": 0.2342,
"step": 290
},
{
"epoch": 0.0472,
"grad_norm": 0.1748046875,
"learning_rate": 0.0001,
"loss": 0.2198,
"step": 295
},
{
"epoch": 0.048,
"grad_norm": 0.2333984375,
"learning_rate": 0.0001,
"loss": 0.2454,
"step": 300
},
{
"epoch": 0.0488,
"grad_norm": 0.267578125,
"learning_rate": 0.0001,
"loss": 0.1626,
"step": 305
},
{
"epoch": 0.0496,
"grad_norm": 0.2353515625,
"learning_rate": 0.0001,
"loss": 0.2629,
"step": 310
},
{
"epoch": 0.0504,
"grad_norm": 0.189453125,
"learning_rate": 0.0001,
"loss": 0.2548,
"step": 315
},
{
"epoch": 0.0512,
"grad_norm": 0.111328125,
"learning_rate": 0.0001,
"loss": 0.2304,
"step": 320
},
{
"epoch": 0.052,
"grad_norm": 0.126953125,
"learning_rate": 0.0001,
"loss": 0.2173,
"step": 325
},
{
"epoch": 0.0528,
"grad_norm": 0.154296875,
"learning_rate": 0.0001,
"loss": 0.2702,
"step": 330
},
{
"epoch": 0.0536,
"grad_norm": 0.158203125,
"learning_rate": 0.0001,
"loss": 0.2952,
"step": 335
},
{
"epoch": 0.0544,
"grad_norm": 0.1953125,
"learning_rate": 0.0001,
"loss": 0.2542,
"step": 340
},
{
"epoch": 0.0552,
"grad_norm": 0.1513671875,
"learning_rate": 0.0001,
"loss": 0.2366,
"step": 345
},
{
"epoch": 0.056,
"grad_norm": 0.10888671875,
"learning_rate": 0.0001,
"loss": 0.1971,
"step": 350
},
{
"epoch": 0.0568,
"grad_norm": 0.2392578125,
"learning_rate": 0.0001,
"loss": 0.1493,
"step": 355
},
{
"epoch": 0.0576,
"grad_norm": 0.1064453125,
"learning_rate": 0.0001,
"loss": 0.1867,
"step": 360
},
{
"epoch": 0.0584,
"grad_norm": 0.169921875,
"learning_rate": 0.0001,
"loss": 0.243,
"step": 365
},
{
"epoch": 0.0592,
"grad_norm": 0.181640625,
"learning_rate": 0.0001,
"loss": 0.303,
"step": 370
},
{
"epoch": 0.06,
"grad_norm": 0.181640625,
"learning_rate": 0.0001,
"loss": 0.2186,
"step": 375
},
{
"epoch": 0.0608,
"grad_norm": 0.216796875,
"learning_rate": 0.0001,
"loss": 0.2225,
"step": 380
},
{
"epoch": 0.0616,
"grad_norm": 0.1640625,
"learning_rate": 0.0001,
"loss": 0.2671,
"step": 385
},
{
"epoch": 0.0624,
"grad_norm": 0.1474609375,
"learning_rate": 0.0001,
"loss": 0.2023,
"step": 390
},
{
"epoch": 0.0632,
"grad_norm": 0.1455078125,
"learning_rate": 0.0001,
"loss": 0.2071,
"step": 395
},
{
"epoch": 0.064,
"grad_norm": 0.177734375,
"learning_rate": 0.0001,
"loss": 0.1637,
"step": 400
},
{
"epoch": 0.0648,
"grad_norm": 0.1552734375,
"learning_rate": 0.0001,
"loss": 0.1475,
"step": 405
},
{
"epoch": 0.0656,
"grad_norm": 0.1533203125,
"learning_rate": 0.0001,
"loss": 0.2273,
"step": 410
},
{
"epoch": 0.0664,
"grad_norm": 0.1884765625,
"learning_rate": 0.0001,
"loss": 0.2323,
"step": 415
},
{
"epoch": 0.0672,
"grad_norm": 0.16796875,
"learning_rate": 0.0001,
"loss": 0.1927,
"step": 420
},
{
"epoch": 0.068,
"grad_norm": 0.1435546875,
"learning_rate": 0.0001,
"loss": 0.2496,
"step": 425
},
{
"epoch": 0.0688,
"grad_norm": 0.11962890625,
"learning_rate": 0.0001,
"loss": 0.2464,
"step": 430
},
{
"epoch": 0.0696,
"grad_norm": 0.12255859375,
"learning_rate": 0.0001,
"loss": 0.1749,
"step": 435
},
{
"epoch": 0.0704,
"grad_norm": 0.1962890625,
"learning_rate": 0.0001,
"loss": 0.2095,
"step": 440
},
{
"epoch": 0.0712,
"grad_norm": 0.16796875,
"learning_rate": 0.0001,
"loss": 0.2257,
"step": 445
},
{
"epoch": 0.072,
"grad_norm": 0.2119140625,
"learning_rate": 0.0001,
"loss": 0.2231,
"step": 450
},
{
"epoch": 0.0728,
"grad_norm": 0.10302734375,
"learning_rate": 0.0001,
"loss": 0.1399,
"step": 455
},
{
"epoch": 0.0736,
"grad_norm": 0.171875,
"learning_rate": 0.0001,
"loss": 0.2596,
"step": 460
},
{
"epoch": 0.0744,
"grad_norm": 0.17578125,
"learning_rate": 0.0001,
"loss": 0.2048,
"step": 465
},
{
"epoch": 0.0752,
"grad_norm": 0.205078125,
"learning_rate": 0.0001,
"loss": 0.2685,
"step": 470
},
{
"epoch": 0.076,
"grad_norm": 0.1552734375,
"learning_rate": 0.0001,
"loss": 0.2172,
"step": 475
},
{
"epoch": 0.0768,
"grad_norm": 0.265625,
"learning_rate": 0.0001,
"loss": 0.1859,
"step": 480
},
{
"epoch": 0.0776,
"grad_norm": 0.169921875,
"learning_rate": 0.0001,
"loss": 0.2264,
"step": 485
},
{
"epoch": 0.0784,
"grad_norm": 0.140625,
"learning_rate": 0.0001,
"loss": 0.2229,
"step": 490
},
{
"epoch": 0.0792,
"grad_norm": 0.1455078125,
"learning_rate": 0.0001,
"loss": 0.2411,
"step": 495
},
{
"epoch": 0.08,
"grad_norm": 0.095703125,
"learning_rate": 0.0001,
"loss": 0.1823,
"step": 500
},
{
"epoch": 0.0808,
"grad_norm": 0.12353515625,
"learning_rate": 0.0001,
"loss": 0.117,
"step": 505
},
{
"epoch": 0.0816,
"grad_norm": 0.130859375,
"learning_rate": 0.0001,
"loss": 0.2148,
"step": 510
},
{
"epoch": 0.0824,
"grad_norm": 0.173828125,
"learning_rate": 0.0001,
"loss": 0.2129,
"step": 515
},
{
"epoch": 0.0832,
"grad_norm": 0.1572265625,
"learning_rate": 0.0001,
"loss": 0.2379,
"step": 520
},
{
"epoch": 0.084,
"grad_norm": 0.14453125,
"learning_rate": 0.0001,
"loss": 0.2156,
"step": 525
},
{
"epoch": 0.0848,
"grad_norm": 0.11181640625,
"learning_rate": 0.0001,
"loss": 0.2384,
"step": 530
},
{
"epoch": 0.0856,
"grad_norm": 0.1337890625,
"learning_rate": 0.0001,
"loss": 0.2178,
"step": 535
},
{
"epoch": 0.0864,
"grad_norm": 0.1416015625,
"learning_rate": 0.0001,
"loss": 0.1884,
"step": 540
},
{
"epoch": 0.0872,
"grad_norm": 0.1630859375,
"learning_rate": 0.0001,
"loss": 0.1909,
"step": 545
},
{
"epoch": 0.088,
"grad_norm": 0.0908203125,
"learning_rate": 0.0001,
"loss": 0.1794,
"step": 550
},
{
"epoch": 0.0888,
"grad_norm": 0.130859375,
"learning_rate": 0.0001,
"loss": 0.1083,
"step": 555
},
{
"epoch": 0.0896,
"grad_norm": 0.1689453125,
"learning_rate": 0.0001,
"loss": 0.212,
"step": 560
},
{
"epoch": 0.0904,
"grad_norm": 0.1162109375,
"learning_rate": 0.0001,
"loss": 0.2142,
"step": 565
},
{
"epoch": 0.0912,
"grad_norm": 0.11083984375,
"learning_rate": 0.0001,
"loss": 0.2533,
"step": 570
},
{
"epoch": 0.092,
"grad_norm": 0.234375,
"learning_rate": 0.0001,
"loss": 0.2238,
"step": 575
},
{
"epoch": 0.0928,
"grad_norm": 0.0810546875,
"learning_rate": 0.0001,
"loss": 0.2452,
"step": 580
},
{
"epoch": 0.0936,
"grad_norm": 0.1357421875,
"learning_rate": 0.0001,
"loss": 0.2685,
"step": 585
},
{
"epoch": 0.0944,
"grad_norm": 0.1455078125,
"learning_rate": 0.0001,
"loss": 0.2388,
"step": 590
},
{
"epoch": 0.0952,
"grad_norm": 0.2314453125,
"learning_rate": 0.0001,
"loss": 0.2536,
"step": 595
},
{
"epoch": 0.096,
"grad_norm": 0.125,
"learning_rate": 0.0001,
"loss": 0.145,
"step": 600
},
{
"epoch": 0.0968,
"grad_norm": 0.12890625,
"learning_rate": 0.0001,
"loss": 0.1522,
"step": 605
},
{
"epoch": 0.0976,
"grad_norm": 0.1025390625,
"learning_rate": 0.0001,
"loss": 0.1953,
"step": 610
},
{
"epoch": 0.0984,
"grad_norm": 0.1435546875,
"learning_rate": 0.0001,
"loss": 0.2179,
"step": 615
},
{
"epoch": 0.0992,
"grad_norm": 0.1328125,
"learning_rate": 0.0001,
"loss": 0.2185,
"step": 620
},
{
"epoch": 0.1,
"grad_norm": 0.10986328125,
"learning_rate": 0.0001,
"loss": 0.2195,
"step": 625
},
{
"epoch": 0.1008,
"grad_norm": 0.1640625,
"learning_rate": 0.0001,
"loss": 0.2413,
"step": 630
},
{
"epoch": 0.1016,
"grad_norm": 0.232421875,
"learning_rate": 0.0001,
"loss": 0.2475,
"step": 635
},
{
"epoch": 0.1024,
"grad_norm": 0.10107421875,
"learning_rate": 0.0001,
"loss": 0.2092,
"step": 640
},
{
"epoch": 0.1032,
"grad_norm": 0.380859375,
"learning_rate": 0.0001,
"loss": 0.1856,
"step": 645
},
{
"epoch": 0.104,
"grad_norm": 0.1630859375,
"learning_rate": 0.0001,
"loss": 0.1842,
"step": 650
},
{
"epoch": 0.1048,
"grad_norm": 0.2041015625,
"learning_rate": 0.0001,
"loss": 0.1487,
"step": 655
},
{
"epoch": 0.1056,
"grad_norm": 0.369140625,
"learning_rate": 0.0001,
"loss": 0.2089,
"step": 660
},
{
"epoch": 0.1064,
"grad_norm": 0.1728515625,
"learning_rate": 0.0001,
"loss": 0.2037,
"step": 665
},
{
"epoch": 0.1072,
"grad_norm": 0.1611328125,
"learning_rate": 0.0001,
"loss": 0.2317,
"step": 670
},
{
"epoch": 0.108,
"grad_norm": 0.1787109375,
"learning_rate": 0.0001,
"loss": 0.2087,
"step": 675
},
{
"epoch": 0.1088,
"grad_norm": 0.1669921875,
"learning_rate": 0.0001,
"loss": 0.1762,
"step": 680
},
{
"epoch": 0.1096,
"grad_norm": 0.1337890625,
"learning_rate": 0.0001,
"loss": 0.2633,
"step": 685
},
{
"epoch": 0.1104,
"grad_norm": 0.130859375,
"learning_rate": 0.0001,
"loss": 0.2312,
"step": 690
},
{
"epoch": 0.1112,
"grad_norm": 0.353515625,
"learning_rate": 0.0001,
"loss": 0.2411,
"step": 695
},
{
"epoch": 0.112,
"grad_norm": 0.126953125,
"learning_rate": 0.0001,
"loss": 0.273,
"step": 700
},
{
"epoch": 0.1128,
"grad_norm": 0.1181640625,
"learning_rate": 0.0001,
"loss": 0.1281,
"step": 705
},
{
"epoch": 0.1136,
"grad_norm": 0.1630859375,
"learning_rate": 0.0001,
"loss": 0.1653,
"step": 710
},
{
"epoch": 0.1144,
"grad_norm": 0.10205078125,
"learning_rate": 0.0001,
"loss": 0.2346,
"step": 715
},
{
"epoch": 0.1152,
"grad_norm": 0.1552734375,
"learning_rate": 0.0001,
"loss": 0.2471,
"step": 720
},
{
"epoch": 0.116,
"grad_norm": 0.1318359375,
"learning_rate": 0.0001,
"loss": 0.2109,
"step": 725
},
{
"epoch": 0.1168,
"grad_norm": 0.1259765625,
"learning_rate": 0.0001,
"loss": 0.1469,
"step": 730
},
{
"epoch": 0.1176,
"grad_norm": 0.146484375,
"learning_rate": 0.0001,
"loss": 0.1833,
"step": 735
},
{
"epoch": 0.1184,
"grad_norm": 0.1259765625,
"learning_rate": 0.0001,
"loss": 0.2344,
"step": 740
},
{
"epoch": 0.1192,
"grad_norm": 0.107421875,
"learning_rate": 0.0001,
"loss": 0.215,
"step": 745
},
{
"epoch": 0.12,
"grad_norm": 0.1611328125,
"learning_rate": 0.0001,
"loss": 0.2045,
"step": 750
},
{
"epoch": 0.1208,
"grad_norm": 0.13671875,
"learning_rate": 0.0001,
"loss": 0.1123,
"step": 755
},
{
"epoch": 0.1216,
"grad_norm": 0.11767578125,
"learning_rate": 0.0001,
"loss": 0.2355,
"step": 760
},
{
"epoch": 0.1224,
"grad_norm": 0.12890625,
"learning_rate": 0.0001,
"loss": 0.2037,
"step": 765
},
{
"epoch": 0.1232,
"grad_norm": 0.0986328125,
"learning_rate": 0.0001,
"loss": 0.1903,
"step": 770
},
{
"epoch": 0.124,
"grad_norm": 0.197265625,
"learning_rate": 0.0001,
"loss": 0.237,
"step": 775
},
{
"epoch": 0.1248,
"grad_norm": 0.1318359375,
"learning_rate": 0.0001,
"loss": 0.2354,
"step": 780
},
{
"epoch": 0.1256,
"grad_norm": 0.3125,
"learning_rate": 0.0001,
"loss": 0.1892,
"step": 785
},
{
"epoch": 0.1264,
"grad_norm": 0.125,
"learning_rate": 0.0001,
"loss": 0.2252,
"step": 790
},
{
"epoch": 0.1272,
"grad_norm": 0.1708984375,
"learning_rate": 0.0001,
"loss": 0.2018,
"step": 795
},
{
"epoch": 0.128,
"grad_norm": 0.12353515625,
"learning_rate": 0.0001,
"loss": 0.1534,
"step": 800
},
{
"epoch": 0.1288,
"grad_norm": 0.107421875,
"learning_rate": 0.0001,
"loss": 0.0891,
"step": 805
},
{
"epoch": 0.1296,
"grad_norm": 0.1591796875,
"learning_rate": 0.0001,
"loss": 0.1928,
"step": 810
},
{
"epoch": 0.1304,
"grad_norm": 0.1435546875,
"learning_rate": 0.0001,
"loss": 0.2226,
"step": 815
},
{
"epoch": 0.1312,
"grad_norm": 0.2001953125,
"learning_rate": 0.0001,
"loss": 0.2488,
"step": 820
},
{
"epoch": 0.132,
"grad_norm": 0.12255859375,
"learning_rate": 0.0001,
"loss": 0.1882,
"step": 825
},
{
"epoch": 0.1328,
"grad_norm": 0.1328125,
"learning_rate": 0.0001,
"loss": 0.1771,
"step": 830
},
{
"epoch": 0.1336,
"grad_norm": 0.11083984375,
"learning_rate": 0.0001,
"loss": 0.2116,
"step": 835
},
{
"epoch": 0.1344,
"grad_norm": 0.15625,
"learning_rate": 0.0001,
"loss": 0.1891,
"step": 840
},
{
"epoch": 0.1352,
"grad_norm": 0.140625,
"learning_rate": 0.0001,
"loss": 0.2205,
"step": 845
},
{
"epoch": 0.136,
"grad_norm": 0.150390625,
"learning_rate": 0.0001,
"loss": 0.2217,
"step": 850
},
{
"epoch": 0.1368,
"grad_norm": 0.10986328125,
"learning_rate": 0.0001,
"loss": 0.126,
"step": 855
},
{
"epoch": 0.1376,
"grad_norm": 0.7890625,
"learning_rate": 0.0001,
"loss": 0.2353,
"step": 860
},
{
"epoch": 0.1384,
"grad_norm": 0.1826171875,
"learning_rate": 0.0001,
"loss": 0.2245,
"step": 865
},
{
"epoch": 0.1392,
"grad_norm": 0.1162109375,
"learning_rate": 0.0001,
"loss": 0.1781,
"step": 870
},
{
"epoch": 0.14,
"grad_norm": 0.115234375,
"learning_rate": 0.0001,
"loss": 0.2102,
"step": 875
},
{
"epoch": 0.1408,
"grad_norm": 0.10498046875,
"learning_rate": 0.0001,
"loss": 0.2161,
"step": 880
},
{
"epoch": 0.1416,
"grad_norm": 0.1279296875,
"learning_rate": 0.0001,
"loss": 0.1987,
"step": 885
},
{
"epoch": 0.1424,
"grad_norm": 0.1279296875,
"learning_rate": 0.0001,
"loss": 0.2455,
"step": 890
},
{
"epoch": 0.1432,
"grad_norm": 0.09912109375,
"learning_rate": 0.0001,
"loss": 0.1994,
"step": 895
},
{
"epoch": 0.144,
"grad_norm": 0.09814453125,
"learning_rate": 0.0001,
"loss": 0.2113,
"step": 900
},
{
"epoch": 0.1448,
"grad_norm": 0.09033203125,
"learning_rate": 0.0001,
"loss": 0.1152,
"step": 905
},
{
"epoch": 0.1456,
"grad_norm": 0.11328125,
"learning_rate": 0.0001,
"loss": 0.2258,
"step": 910
},
{
"epoch": 0.1464,
"grad_norm": 0.10986328125,
"learning_rate": 0.0001,
"loss": 0.2143,
"step": 915
},
{
"epoch": 0.1472,
"grad_norm": 0.1572265625,
"learning_rate": 0.0001,
"loss": 0.1933,
"step": 920
},
{
"epoch": 0.148,
"grad_norm": 0.12109375,
"learning_rate": 0.0001,
"loss": 0.2115,
"step": 925
},
{
"epoch": 0.1488,
"grad_norm": 0.2177734375,
"learning_rate": 0.0001,
"loss": 0.2349,
"step": 930
},
{
"epoch": 0.1496,
"grad_norm": 0.12353515625,
"learning_rate": 0.0001,
"loss": 0.1956,
"step": 935
},
{
"epoch": 0.1504,
"grad_norm": 0.12890625,
"learning_rate": 0.0001,
"loss": 0.2414,
"step": 940
},
{
"epoch": 0.1512,
"grad_norm": 0.1328125,
"learning_rate": 0.0001,
"loss": 0.1862,
"step": 945
},
{
"epoch": 0.152,
"grad_norm": 0.08447265625,
"learning_rate": 0.0001,
"loss": 0.1109,
"step": 950
},
{
"epoch": 0.1528,
"grad_norm": 0.1826171875,
"learning_rate": 0.0001,
"loss": 0.1174,
"step": 955
},
{
"epoch": 0.1536,
"grad_norm": 0.1318359375,
"learning_rate": 0.0001,
"loss": 0.1666,
"step": 960
},
{
"epoch": 0.1544,
"grad_norm": 0.1513671875,
"learning_rate": 0.0001,
"loss": 0.2178,
"step": 965
},
{
"epoch": 0.1552,
"grad_norm": 0.1591796875,
"learning_rate": 0.0001,
"loss": 0.1918,
"step": 970
},
{
"epoch": 0.156,
"grad_norm": 0.1416015625,
"learning_rate": 0.0001,
"loss": 0.1925,
"step": 975
},
{
"epoch": 0.1568,
"grad_norm": 0.16015625,
"learning_rate": 0.0001,
"loss": 0.1819,
"step": 980
},
{
"epoch": 0.1576,
"grad_norm": 0.1748046875,
"learning_rate": 0.0001,
"loss": 0.2156,
"step": 985
},
{
"epoch": 0.1584,
"grad_norm": 0.1357421875,
"learning_rate": 0.0001,
"loss": 0.1961,
"step": 990
},
{
"epoch": 0.1592,
"grad_norm": 0.134765625,
"learning_rate": 0.0001,
"loss": 0.1953,
"step": 995
},
{
"epoch": 0.16,
"grad_norm": 0.12890625,
"learning_rate": 0.0001,
"loss": 0.1646,
"step": 1000
},
{
"epoch": 0.1608,
"grad_norm": 0.11083984375,
"learning_rate": 0.0001,
"loss": 0.1414,
"step": 1005
},
{
"epoch": 0.1616,
"grad_norm": 0.0888671875,
"learning_rate": 0.0001,
"loss": 0.169,
"step": 1010
},
{
"epoch": 0.1624,
"grad_norm": 0.234375,
"learning_rate": 0.0001,
"loss": 0.2255,
"step": 1015
},
{
"epoch": 0.1632,
"grad_norm": 0.1220703125,
"learning_rate": 0.0001,
"loss": 0.197,
"step": 1020
},
{
"epoch": 0.164,
"grad_norm": 0.12890625,
"learning_rate": 0.0001,
"loss": 0.2226,
"step": 1025
},
{
"epoch": 0.1648,
"grad_norm": 0.1396484375,
"learning_rate": 0.0001,
"loss": 0.2078,
"step": 1030
},
{
"epoch": 0.1656,
"grad_norm": 0.267578125,
"learning_rate": 0.0001,
"loss": 0.2379,
"step": 1035
},
{
"epoch": 0.1664,
"grad_norm": 0.1806640625,
"learning_rate": 0.0001,
"loss": 0.2106,
"step": 1040
},
{
"epoch": 0.1672,
"grad_norm": 0.1748046875,
"learning_rate": 0.0001,
"loss": 0.2174,
"step": 1045
},
{
"epoch": 0.168,
"grad_norm": 0.158203125,
"learning_rate": 0.0001,
"loss": 0.2092,
"step": 1050
},
{
"epoch": 0.1688,
"grad_norm": 0.1416015625,
"learning_rate": 0.0001,
"loss": 0.1411,
"step": 1055
},
{
"epoch": 0.1696,
"grad_norm": 0.1279296875,
"learning_rate": 0.0001,
"loss": 0.1985,
"step": 1060
},
{
"epoch": 0.1704,
"grad_norm": 0.11962890625,
"learning_rate": 0.0001,
"loss": 0.1921,
"step": 1065
},
{
"epoch": 0.1712,
"grad_norm": 0.1337890625,
"learning_rate": 0.0001,
"loss": 0.2236,
"step": 1070
},
{
"epoch": 0.172,
"grad_norm": 0.11669921875,
"learning_rate": 0.0001,
"loss": 0.2522,
"step": 1075
},
{
"epoch": 0.1728,
"grad_norm": 0.08349609375,
"learning_rate": 0.0001,
"loss": 0.2513,
"step": 1080
},
{
"epoch": 0.1736,
"grad_norm": 0.11181640625,
"learning_rate": 0.0001,
"loss": 0.1741,
"step": 1085
},
{
"epoch": 0.1744,
"grad_norm": 0.1796875,
"learning_rate": 0.0001,
"loss": 0.2359,
"step": 1090
},
{
"epoch": 0.1752,
"grad_norm": 0.1201171875,
"learning_rate": 0.0001,
"loss": 0.2261,
"step": 1095
},
{
"epoch": 0.176,
"grad_norm": 0.12109375,
"learning_rate": 0.0001,
"loss": 0.171,
"step": 1100
},
{
"epoch": 0.1768,
"grad_norm": 0.158203125,
"learning_rate": 0.0001,
"loss": 0.1164,
"step": 1105
},
{
"epoch": 0.1776,
"grad_norm": 0.1396484375,
"learning_rate": 0.0001,
"loss": 0.1954,
"step": 1110
},
{
"epoch": 0.1784,
"grad_norm": 0.1435546875,
"learning_rate": 0.0001,
"loss": 0.1708,
"step": 1115
},
{
"epoch": 0.1792,
"grad_norm": 0.10400390625,
"learning_rate": 0.0001,
"loss": 0.1968,
"step": 1120
},
{
"epoch": 0.18,
"grad_norm": 0.08154296875,
"learning_rate": 0.0001,
"loss": 0.1896,
"step": 1125
},
{
"epoch": 0.1808,
"grad_norm": 0.134765625,
"learning_rate": 0.0001,
"loss": 0.1875,
"step": 1130
},
{
"epoch": 0.1816,
"grad_norm": 0.1767578125,
"learning_rate": 0.0001,
"loss": 0.1642,
"step": 1135
},
{
"epoch": 0.1824,
"grad_norm": 0.2216796875,
"learning_rate": 0.0001,
"loss": 0.2241,
"step": 1140
},
{
"epoch": 0.1832,
"grad_norm": 0.1328125,
"learning_rate": 0.0001,
"loss": 0.2052,
"step": 1145
},
{
"epoch": 0.184,
"grad_norm": 0.1337890625,
"learning_rate": 0.0001,
"loss": 0.147,
"step": 1150
},
{
"epoch": 0.1848,
"grad_norm": 0.240234375,
"learning_rate": 0.0001,
"loss": 0.1134,
"step": 1155
},
{
"epoch": 0.1856,
"grad_norm": 0.1259765625,
"learning_rate": 0.0001,
"loss": 0.1712,
"step": 1160
},
{
"epoch": 0.1864,
"grad_norm": 0.10400390625,
"learning_rate": 0.0001,
"loss": 0.1978,
"step": 1165
},
{
"epoch": 0.1872,
"grad_norm": 0.138671875,
"learning_rate": 0.0001,
"loss": 0.1976,
"step": 1170
},
{
"epoch": 0.188,
"grad_norm": 0.0732421875,
"learning_rate": 0.0001,
"loss": 0.2522,
"step": 1175
},
{
"epoch": 0.1888,
"grad_norm": 0.1474609375,
"learning_rate": 0.0001,
"loss": 0.1786,
"step": 1180
},
{
"epoch": 0.1896,
"grad_norm": 0.10498046875,
"learning_rate": 0.0001,
"loss": 0.2357,
"step": 1185
},
{
"epoch": 0.1904,
"grad_norm": 0.11181640625,
"learning_rate": 0.0001,
"loss": 0.2115,
"step": 1190
},
{
"epoch": 0.1912,
"grad_norm": 0.1552734375,
"learning_rate": 0.0001,
"loss": 0.1783,
"step": 1195
},
{
"epoch": 0.192,
"grad_norm": 0.15625,
"learning_rate": 0.0001,
"loss": 0.1781,
"step": 1200
},
{
"epoch": 0.1928,
"grad_norm": 0.1552734375,
"learning_rate": 0.0001,
"loss": 0.1219,
"step": 1205
},
{
"epoch": 0.1936,
"grad_norm": 0.11572265625,
"learning_rate": 0.0001,
"loss": 0.2014,
"step": 1210
},
{
"epoch": 0.1944,
"grad_norm": 0.1650390625,
"learning_rate": 0.0001,
"loss": 0.1604,
"step": 1215
},
{
"epoch": 0.1952,
"grad_norm": 0.10986328125,
"learning_rate": 0.0001,
"loss": 0.1931,
"step": 1220
},
{
"epoch": 0.196,
"grad_norm": 0.1240234375,
"learning_rate": 0.0001,
"loss": 0.1978,
"step": 1225
},
{
"epoch": 0.1968,
"grad_norm": 0.1240234375,
"learning_rate": 0.0001,
"loss": 0.2102,
"step": 1230
},
{
"epoch": 0.1976,
"grad_norm": 0.1728515625,
"learning_rate": 0.0001,
"loss": 0.1988,
"step": 1235
},
{
"epoch": 0.1984,
"grad_norm": 0.07861328125,
"learning_rate": 0.0001,
"loss": 0.1926,
"step": 1240
},
{
"epoch": 0.1992,
"grad_norm": 0.119140625,
"learning_rate": 0.0001,
"loss": 0.286,
"step": 1245
},
{
"epoch": 0.2,
"grad_norm": 0.2021484375,
"learning_rate": 0.0001,
"loss": 0.1553,
"step": 1250
},
{
"epoch": 0.2008,
"grad_norm": 0.12451171875,
"learning_rate": 0.0001,
"loss": 0.1009,
"step": 1255
},
{
"epoch": 0.2016,
"grad_norm": 0.126953125,
"learning_rate": 0.0001,
"loss": 0.2404,
"step": 1260
},
{
"epoch": 0.2024,
"grad_norm": 0.1484375,
"learning_rate": 0.0001,
"loss": 0.1902,
"step": 1265
},
{
"epoch": 0.2032,
"grad_norm": 0.1611328125,
"learning_rate": 0.0001,
"loss": 0.2331,
"step": 1270
},
{
"epoch": 0.204,
"grad_norm": 0.09814453125,
"learning_rate": 0.0001,
"loss": 0.2154,
"step": 1275
},
{
"epoch": 0.2048,
"grad_norm": 0.1123046875,
"learning_rate": 0.0001,
"loss": 0.2078,
"step": 1280
},
{
"epoch": 0.2056,
"grad_norm": 0.146484375,
"learning_rate": 0.0001,
"loss": 0.1657,
"step": 1285
},
{
"epoch": 0.2064,
"grad_norm": 0.1484375,
"learning_rate": 0.0001,
"loss": 0.2469,
"step": 1290
},
{
"epoch": 0.2072,
"grad_norm": 0.1083984375,
"learning_rate": 0.0001,
"loss": 0.202,
"step": 1295
},
{
"epoch": 0.208,
"grad_norm": 0.1337890625,
"learning_rate": 0.0001,
"loss": 0.2011,
"step": 1300
},
{
"epoch": 0.2088,
"grad_norm": 0.1279296875,
"learning_rate": 0.0001,
"loss": 0.1259,
"step": 1305
},
{
"epoch": 0.2096,
"grad_norm": 0.11279296875,
"learning_rate": 0.0001,
"loss": 0.1975,
"step": 1310
},
{
"epoch": 0.2104,
"grad_norm": 0.14453125,
"learning_rate": 0.0001,
"loss": 0.2325,
"step": 1315
},
{
"epoch": 0.2112,
"grad_norm": 0.10888671875,
"learning_rate": 0.0001,
"loss": 0.1985,
"step": 1320
},
{
"epoch": 0.212,
"grad_norm": 0.1064453125,
"learning_rate": 0.0001,
"loss": 0.1901,
"step": 1325
},
{
"epoch": 0.2128,
"grad_norm": 0.1474609375,
"learning_rate": 0.0001,
"loss": 0.2173,
"step": 1330
},
{
"epoch": 0.2136,
"grad_norm": 0.12255859375,
"learning_rate": 0.0001,
"loss": 0.1987,
"step": 1335
},
{
"epoch": 0.2144,
"grad_norm": 0.1416015625,
"learning_rate": 0.0001,
"loss": 0.2163,
"step": 1340
},
{
"epoch": 0.2152,
"grad_norm": 0.10791015625,
"learning_rate": 0.0001,
"loss": 0.2295,
"step": 1345
},
{
"epoch": 0.216,
"grad_norm": 0.1298828125,
"learning_rate": 0.0001,
"loss": 0.174,
"step": 1350
},
{
"epoch": 0.2168,
"grad_norm": 1.125,
"learning_rate": 0.0001,
"loss": 0.1232,
"step": 1355
},
{
"epoch": 0.2176,
"grad_norm": 0.0927734375,
"learning_rate": 0.0001,
"loss": 0.1548,
"step": 1360
},
{
"epoch": 0.2184,
"grad_norm": 0.1494140625,
"learning_rate": 0.0001,
"loss": 0.2059,
"step": 1365
},
{
"epoch": 0.2192,
"grad_norm": 0.150390625,
"learning_rate": 0.0001,
"loss": 0.2136,
"step": 1370
},
{
"epoch": 0.22,
"grad_norm": 0.12109375,
"learning_rate": 0.0001,
"loss": 0.2013,
"step": 1375
},
{
"epoch": 0.2208,
"grad_norm": 0.12890625,
"learning_rate": 0.0001,
"loss": 0.2255,
"step": 1380
},
{
"epoch": 0.2216,
"grad_norm": 0.0810546875,
"learning_rate": 0.0001,
"loss": 0.1901,
"step": 1385
},
{
"epoch": 0.2224,
"grad_norm": 0.10791015625,
"learning_rate": 0.0001,
"loss": 0.156,
"step": 1390
},
{
"epoch": 0.2232,
"grad_norm": 0.1171875,
"learning_rate": 0.0001,
"loss": 0.184,
"step": 1395
},
{
"epoch": 0.224,
"grad_norm": 0.12890625,
"learning_rate": 0.0001,
"loss": 0.1995,
"step": 1400
},
{
"epoch": 0.2248,
"grad_norm": 0.09521484375,
"learning_rate": 0.0001,
"loss": 0.0734,
"step": 1405
},
{
"epoch": 0.2256,
"grad_norm": 0.103515625,
"learning_rate": 0.0001,
"loss": 0.1533,
"step": 1410
},
{
"epoch": 0.2264,
"grad_norm": 0.12060546875,
"learning_rate": 0.0001,
"loss": 0.2087,
"step": 1415
},
{
"epoch": 0.2272,
"grad_norm": 0.1064453125,
"learning_rate": 0.0001,
"loss": 0.2178,
"step": 1420
},
{
"epoch": 0.228,
"grad_norm": 0.109375,
"learning_rate": 0.0001,
"loss": 0.2299,
"step": 1425
},
{
"epoch": 0.2288,
"grad_norm": 0.1640625,
"learning_rate": 0.0001,
"loss": 0.2153,
"step": 1430
},
{
"epoch": 0.2296,
"grad_norm": 0.07958984375,
"learning_rate": 0.0001,
"loss": 0.1606,
"step": 1435
},
{
"epoch": 0.2304,
"grad_norm": 0.1396484375,
"learning_rate": 0.0001,
"loss": 0.2145,
"step": 1440
},
{
"epoch": 0.2312,
"grad_norm": 0.111328125,
"learning_rate": 0.0001,
"loss": 0.2021,
"step": 1445
},
{
"epoch": 0.232,
"grad_norm": 0.1005859375,
"learning_rate": 0.0001,
"loss": 0.1507,
"step": 1450
},
{
"epoch": 0.2328,
"grad_norm": 0.10888671875,
"learning_rate": 0.0001,
"loss": 0.1108,
"step": 1455
},
{
"epoch": 0.2336,
"grad_norm": 0.1025390625,
"learning_rate": 0.0001,
"loss": 0.166,
"step": 1460
},
{
"epoch": 0.2344,
"grad_norm": 0.1591796875,
"learning_rate": 0.0001,
"loss": 0.2308,
"step": 1465
},
{
"epoch": 0.2352,
"grad_norm": 0.07421875,
"learning_rate": 0.0001,
"loss": 0.2085,
"step": 1470
},
{
"epoch": 0.236,
"grad_norm": 0.1201171875,
"learning_rate": 0.0001,
"loss": 0.2002,
"step": 1475
},
{
"epoch": 0.2368,
"grad_norm": 0.11279296875,
"learning_rate": 0.0001,
"loss": 0.1967,
"step": 1480
},
{
"epoch": 0.2376,
"grad_norm": 0.1240234375,
"learning_rate": 0.0001,
"loss": 0.1852,
"step": 1485
},
{
"epoch": 0.2384,
"grad_norm": 0.16796875,
"learning_rate": 0.0001,
"loss": 0.1941,
"step": 1490
},
{
"epoch": 0.2392,
"grad_norm": 0.2109375,
"learning_rate": 0.0001,
"loss": 0.2287,
"step": 1495
},
{
"epoch": 0.24,
"grad_norm": 0.10693359375,
"learning_rate": 0.0001,
"loss": 0.1463,
"step": 1500
},
{
"epoch": 0.2408,
"grad_norm": 0.12890625,
"learning_rate": 0.0001,
"loss": 0.0999,
"step": 1505
},
{
"epoch": 0.2416,
"grad_norm": 0.1142578125,
"learning_rate": 0.0001,
"loss": 0.1879,
"step": 1510
},
{
"epoch": 0.2424,
"grad_norm": 0.1513671875,
"learning_rate": 0.0001,
"loss": 0.2169,
"step": 1515
},
{
"epoch": 0.2432,
"grad_norm": 0.2177734375,
"learning_rate": 0.0001,
"loss": 0.2279,
"step": 1520
},
{
"epoch": 0.244,
"grad_norm": 0.10986328125,
"learning_rate": 0.0001,
"loss": 0.1754,
"step": 1525
},
{
"epoch": 0.2448,
"grad_norm": 0.1142578125,
"learning_rate": 0.0001,
"loss": 0.1734,
"step": 1530
},
{
"epoch": 0.2456,
"grad_norm": 0.166015625,
"learning_rate": 0.0001,
"loss": 0.2161,
"step": 1535
},
{
"epoch": 0.2464,
"grad_norm": 0.1259765625,
"learning_rate": 0.0001,
"loss": 0.1849,
"step": 1540
},
{
"epoch": 0.2472,
"grad_norm": 0.1279296875,
"learning_rate": 0.0001,
"loss": 0.1913,
"step": 1545
},
{
"epoch": 0.248,
"grad_norm": 0.1865234375,
"learning_rate": 0.0001,
"loss": 0.1725,
"step": 1550
},
{
"epoch": 0.2488,
"grad_norm": 0.0966796875,
"learning_rate": 0.0001,
"loss": 0.113,
"step": 1555
},
{
"epoch": 0.2496,
"grad_norm": 0.158203125,
"learning_rate": 0.0001,
"loss": 0.2416,
"step": 1560
},
{
"epoch": 0.2504,
"grad_norm": 0.1015625,
"learning_rate": 0.0001,
"loss": 0.1264,
"step": 1565
},
{
"epoch": 0.2512,
"grad_norm": 0.08349609375,
"learning_rate": 0.0001,
"loss": 0.1759,
"step": 1570
},
{
"epoch": 0.252,
"grad_norm": 0.140625,
"learning_rate": 0.0001,
"loss": 0.2139,
"step": 1575
},
{
"epoch": 0.2528,
"grad_norm": 0.06591796875,
"learning_rate": 0.0001,
"loss": 0.205,
"step": 1580
},
{
"epoch": 0.2536,
"grad_norm": 0.125,
"learning_rate": 0.0001,
"loss": 0.2081,
"step": 1585
},
{
"epoch": 0.2544,
"grad_norm": 0.1318359375,
"learning_rate": 0.0001,
"loss": 0.1647,
"step": 1590
},
{
"epoch": 0.2552,
"grad_norm": 0.1455078125,
"learning_rate": 0.0001,
"loss": 0.1911,
"step": 1595
},
{
"epoch": 0.256,
"grad_norm": 0.1435546875,
"learning_rate": 0.0001,
"loss": 0.1502,
"step": 1600
},
{
"epoch": 0.2568,
"grad_norm": 0.08740234375,
"learning_rate": 0.0001,
"loss": 0.111,
"step": 1605
},
{
"epoch": 0.2576,
"grad_norm": 0.126953125,
"learning_rate": 0.0001,
"loss": 0.2303,
"step": 1610
},
{
"epoch": 0.2584,
"grad_norm": 0.09814453125,
"learning_rate": 0.0001,
"loss": 0.1932,
"step": 1615
},
{
"epoch": 0.2592,
"grad_norm": 0.13671875,
"learning_rate": 0.0001,
"loss": 0.1406,
"step": 1620
},
{
"epoch": 0.26,
"grad_norm": 0.1494140625,
"learning_rate": 0.0001,
"loss": 0.1987,
"step": 1625
},
{
"epoch": 0.2608,
"grad_norm": 0.09912109375,
"learning_rate": 0.0001,
"loss": 0.1881,
"step": 1630
},
{
"epoch": 0.2616,
"grad_norm": 0.1171875,
"learning_rate": 0.0001,
"loss": 0.2512,
"step": 1635
},
{
"epoch": 0.2624,
"grad_norm": 0.158203125,
"learning_rate": 0.0001,
"loss": 0.2542,
"step": 1640
},
{
"epoch": 0.2632,
"grad_norm": 0.162109375,
"learning_rate": 0.0001,
"loss": 0.2546,
"step": 1645
},
{
"epoch": 0.264,
"grad_norm": 0.125,
"learning_rate": 0.0001,
"loss": 0.1859,
"step": 1650
},
{
"epoch": 0.2648,
"grad_norm": 0.1279296875,
"learning_rate": 0.0001,
"loss": 0.0963,
"step": 1655
},
{
"epoch": 0.2656,
"grad_norm": 0.10595703125,
"learning_rate": 0.0001,
"loss": 0.1543,
"step": 1660
},
{
"epoch": 0.2664,
"grad_norm": 0.13671875,
"learning_rate": 0.0001,
"loss": 0.2276,
"step": 1665
},
{
"epoch": 0.2672,
"grad_norm": 0.11181640625,
"learning_rate": 0.0001,
"loss": 0.2059,
"step": 1670
},
{
"epoch": 0.268,
"grad_norm": 0.08642578125,
"learning_rate": 0.0001,
"loss": 0.2119,
"step": 1675
},
{
"epoch": 0.2688,
"grad_norm": 0.12451171875,
"learning_rate": 0.0001,
"loss": 0.206,
"step": 1680
},
{
"epoch": 0.2696,
"grad_norm": 0.1474609375,
"learning_rate": 0.0001,
"loss": 0.1986,
"step": 1685
},
{
"epoch": 0.2704,
"grad_norm": 0.10302734375,
"learning_rate": 0.0001,
"loss": 0.2108,
"step": 1690
},
{
"epoch": 0.2712,
"grad_norm": 0.09228515625,
"learning_rate": 0.0001,
"loss": 0.2247,
"step": 1695
},
{
"epoch": 0.272,
"grad_norm": 0.0576171875,
"learning_rate": 0.0001,
"loss": 0.129,
"step": 1700
},
{
"epoch": 0.2728,
"grad_norm": 0.115234375,
"learning_rate": 0.0001,
"loss": 0.1472,
"step": 1705
},
{
"epoch": 0.2736,
"grad_norm": 0.13671875,
"learning_rate": 0.0001,
"loss": 0.1784,
"step": 1710
},
{
"epoch": 0.2744,
"grad_norm": 0.1630859375,
"learning_rate": 0.0001,
"loss": 0.1691,
"step": 1715
},
{
"epoch": 0.2752,
"grad_norm": 0.1513671875,
"learning_rate": 0.0001,
"loss": 0.1593,
"step": 1720
},
{
"epoch": 0.276,
"grad_norm": 0.1435546875,
"learning_rate": 0.0001,
"loss": 0.211,
"step": 1725
},
{
"epoch": 0.2768,
"grad_norm": 0.1796875,
"learning_rate": 0.0001,
"loss": 0.1739,
"step": 1730
},
{
"epoch": 0.2776,
"grad_norm": 0.0966796875,
"learning_rate": 0.0001,
"loss": 0.2385,
"step": 1735
},
{
"epoch": 0.2784,
"grad_norm": 0.13671875,
"learning_rate": 0.0001,
"loss": 0.2223,
"step": 1740
},
{
"epoch": 0.2792,
"grad_norm": 0.177734375,
"learning_rate": 0.0001,
"loss": 0.2089,
"step": 1745
},
{
"epoch": 0.28,
"grad_norm": 0.11474609375,
"learning_rate": 0.0001,
"loss": 0.1277,
"step": 1750
},
{
"epoch": 0.2808,
"grad_norm": 0.138671875,
"learning_rate": 0.0001,
"loss": 0.1222,
"step": 1755
},
{
"epoch": 0.2816,
"grad_norm": 0.11962890625,
"learning_rate": 0.0001,
"loss": 0.1997,
"step": 1760
},
{
"epoch": 0.2824,
"grad_norm": 0.1484375,
"learning_rate": 0.0001,
"loss": 0.1764,
"step": 1765
},
{
"epoch": 0.2832,
"grad_norm": 0.11376953125,
"learning_rate": 0.0001,
"loss": 0.1939,
"step": 1770
},
{
"epoch": 0.284,
"grad_norm": 0.1982421875,
"learning_rate": 0.0001,
"loss": 0.247,
"step": 1775
},
{
"epoch": 0.2848,
"grad_norm": 0.1318359375,
"learning_rate": 0.0001,
"loss": 0.1891,
"step": 1780
},
{
"epoch": 0.2856,
"grad_norm": 0.1298828125,
"learning_rate": 0.0001,
"loss": 0.2157,
"step": 1785
},
{
"epoch": 0.2864,
"grad_norm": 0.12109375,
"learning_rate": 0.0001,
"loss": 0.2058,
"step": 1790
},
{
"epoch": 0.2872,
"grad_norm": 0.09765625,
"learning_rate": 0.0001,
"loss": 0.2398,
"step": 1795
},
{
"epoch": 0.288,
"grad_norm": 0.07421875,
"learning_rate": 0.0001,
"loss": 0.1324,
"step": 1800
},
{
"epoch": 0.2888,
"grad_norm": 0.09130859375,
"learning_rate": 0.0001,
"loss": 0.1546,
"step": 1805
},
{
"epoch": 0.2896,
"grad_norm": 0.166015625,
"learning_rate": 0.0001,
"loss": 0.1853,
"step": 1810
},
{
"epoch": 0.2904,
"grad_norm": 0.12158203125,
"learning_rate": 0.0001,
"loss": 0.2209,
"step": 1815
},
{
"epoch": 0.2912,
"grad_norm": 0.109375,
"learning_rate": 0.0001,
"loss": 0.215,
"step": 1820
},
{
"epoch": 0.292,
"grad_norm": 0.150390625,
"learning_rate": 0.0001,
"loss": 0.2315,
"step": 1825
},
{
"epoch": 0.2928,
"grad_norm": 0.09326171875,
"learning_rate": 0.0001,
"loss": 0.2194,
"step": 1830
},
{
"epoch": 0.2936,
"grad_norm": 0.11669921875,
"learning_rate": 0.0001,
"loss": 0.202,
"step": 1835
},
{
"epoch": 0.2944,
"grad_norm": 0.09521484375,
"learning_rate": 0.0001,
"loss": 0.1646,
"step": 1840
},
{
"epoch": 0.2952,
"grad_norm": 0.10400390625,
"learning_rate": 0.0001,
"loss": 0.1569,
"step": 1845
},
{
"epoch": 0.296,
"grad_norm": 0.06005859375,
"learning_rate": 0.0001,
"loss": 0.164,
"step": 1850
},
{
"epoch": 0.2968,
"grad_norm": 0.169921875,
"learning_rate": 0.0001,
"loss": 0.1277,
"step": 1855
},
{
"epoch": 0.2976,
"grad_norm": 0.0654296875,
"learning_rate": 0.0001,
"loss": 0.1731,
"step": 1860
},
{
"epoch": 0.2984,
"grad_norm": 0.1142578125,
"learning_rate": 0.0001,
"loss": 0.2107,
"step": 1865
},
{
"epoch": 0.2992,
"grad_norm": 0.11474609375,
"learning_rate": 0.0001,
"loss": 0.1906,
"step": 1870
},
{
"epoch": 0.3,
"grad_norm": 0.1220703125,
"learning_rate": 0.0001,
"loss": 0.1658,
"step": 1875
},
{
"epoch": 0.3,
"step": 1875,
"total_flos": 7.012531902575002e+17,
"train_loss": 0.22804062151908874,
"train_runtime": 46172.2448,
"train_samples_per_second": 0.65,
"train_steps_per_second": 0.041
}
],
"logging_steps": 5,
"max_steps": 1875,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 90,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 7.012531902575002e+17,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}