diff --git "a/checkpoint-2289600/trainer_state.json" "b/checkpoint-2289600/trainer_state.json"
new file mode 100644--- /dev/null
+++ "b/checkpoint-2289600/trainer_state.json"
@@ -0,0 +1,27091 @@
+{
+  "best_metric": 3.843202590942383,
+  "best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/npi-sim-ques/transformer/0/checkpoints/checkpoint-915840",
+  "epoch": 1.0250006060157382,
+  "eval_steps": 10,
+  "global_step": 2289600,
+  "is_hyper_param_search": false,
+  "is_local_process_zero": true,
+  "is_world_process_zero": true,
+  "log_history": [
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.999998362119627e-05,
+      "loss": 10.859,
+      "step": 1
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.999161405248948e-05,
+      "loss": 6.8241,
+      "step": 512
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.998322810497896e-05,
+      "loss": 6.1916,
+      "step": 1024
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.997484215746844e-05,
+      "loss": 5.9753,
+      "step": 1536
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.996645620995792e-05,
+      "loss": 5.8086,
+      "step": 2048
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.99580702624474e-05,
+      "loss": 5.7055,
+      "step": 2560
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.994968431493688e-05,
+      "loss": 5.5974,
+      "step": 3072
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.994129836742636e-05,
+      "loss": 5.527,
+      "step": 3584
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.993291241991584e-05,
+      "loss": 5.4481,
+      "step": 4096
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.992452647240532e-05,
+      "loss": 5.3978,
+      "step": 4608
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.99161405248948e-05,
+      "loss": 5.3376,
+      "step": 5120
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.990775457738428e-05,
+      "loss": 5.3155,
+      "step": 5632
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.989938500867749e-05,
+      "loss": 5.2639,
+      "step": 6144
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.989099906116697e-05,
+      "loss": 5.1982,
+      "step": 6656
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.988261311365645e-05,
+      "loss": 5.1727,
+      "step": 7168
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.987422716614593e-05,
+      "loss": 5.1287,
+      "step": 7680
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.986584121863541e-05,
+      "loss": 5.1115,
+      "step": 8192
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.985745527112489e-05,
+      "loss": 5.0679,
+      "step": 8704
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.984906932361437e-05,
+      "loss": 5.0395,
+      "step": 9216
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.984068337610385e-05,
+      "loss": 5.0152,
+      "step": 9728
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.983229742859333e-05,
+      "loss": 4.9959,
+      "step": 10240
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.9823927859886547e-05,
+      "loss": 4.974,
+      "step": 10752
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.9815541912376026e-05,
+      "loss": 4.9529,
+      "step": 11264
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.9807155964865506e-05,
+      "loss": 4.9343,
+      "step": 11776
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.9798770017354986e-05,
+      "loss": 4.9132,
+      "step": 12288
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.9790384069844466e-05,
+      "loss": 4.8913,
+      "step": 12800
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.9782014501137675e-05,
+      "loss": 4.8642,
+      "step": 13312
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.9773628553627155e-05,
+      "loss": 4.85,
+      "step": 13824
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.9765242606116635e-05,
+      "loss": 4.8278,
+      "step": 14336
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.9756856658606115e-05,
+      "loss": 4.8214,
+      "step": 14848
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9748487089899324e-05,
+      "loss": 4.8035,
+      "step": 15360
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9740101142388804e-05,
+      "loss": 4.7879,
+      "step": 15872
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9731715194878284e-05,
+      "loss": 4.7779,
+      "step": 16384
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.97233456261715e-05,
+      "loss": 4.7592,
+      "step": 16896
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.971495967866098e-05,
+      "loss": 4.7561,
+      "step": 17408
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.970657373115046e-05,
+      "loss": 4.7505,
+      "step": 17920
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.969820416244367e-05,
+      "loss": 4.7318,
+      "step": 18432
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.968981821493315e-05,
+      "loss": 4.6964,
+      "step": 18944
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.968143226742263e-05,
+      "loss": 4.7076,
+      "step": 19456
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.967304631991211e-05,
+      "loss": 4.6806,
+      "step": 19968
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.966466037240159e-05,
+      "loss": 4.6745,
+      "step": 20480
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.965627442489107e-05,
+      "loss": 4.6717,
+      "step": 20992
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.964788847738054e-05,
+      "loss": 4.6646,
+      "step": 21504
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.963950252987002e-05,
+      "loss": 4.6471,
+      "step": 22016
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.96311165823595e-05,
+      "loss": 4.6479,
+      "step": 22528
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.962273063484898e-05,
+      "loss": 4.6399,
+      "step": 23040
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.961434468733847e-05,
+      "loss": 4.6312,
+      "step": 23552
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.960595873982795e-05,
+      "loss": 4.6228,
+      "step": 24064
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.959757279231743e-05,
+      "loss": 4.6012,
+      "step": 24576
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9589219602414374e-05,
+      "loss": 4.5941,
+      "step": 25088
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.958083365490385e-05,
+      "loss": 4.5998,
+      "step": 25600
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.957244770739333e-05,
+      "loss": 4.5853,
+      "step": 26112
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.956406175988281e-05,
+      "loss": 4.5702,
+      "step": 26624
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.955567581237229e-05,
+      "loss": 4.5583,
+      "step": 27136
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.954728986486177e-05,
+      "loss": 4.5627,
+      "step": 27648
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.953890391735125e-05,
+      "loss": 4.5466,
+      "step": 28160
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9530517969840727e-05,
+      "loss": 4.5598,
+      "step": 28672
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9522148401133936e-05,
+      "loss": 4.5335,
+      "step": 29184
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.951376245362342e-05,
+      "loss": 4.5394,
+      "step": 29696
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.95053765061129e-05,
+      "loss": 4.5325,
+      "step": 30208
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.949699055860238e-05,
+      "loss": 4.5083,
+      "step": 30720
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.948862098989559e-05,
+      "loss": 4.5159,
+      "step": 31232
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.948023504238507e-05,
+      "loss": 4.5044,
+      "step": 31744
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.947184909487455e-05,
+      "loss": 4.4941,
+      "step": 32256
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.946346314736403e-05,
+      "loss": 4.4924,
+      "step": 32768
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.945509357865724e-05,
+      "loss": 4.508,
+      "step": 33280
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.944670763114672e-05,
+      "loss": 4.4837,
+      "step": 33792
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.94383216836362e-05,
+      "loss": 4.4751,
+      "step": 34304
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.942993573612568e-05,
+      "loss": 4.4607,
+      "step": 34816
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.942154978861516e-05,
+      "loss": 4.4608,
+      "step": 35328
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.941318021990837e-05,
+      "loss": 4.4767,
+      "step": 35840
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9404794272397856e-05,
+      "loss": 4.477,
+      "step": 36352
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9396408324887336e-05,
+      "loss": 4.4497,
+      "step": 36864
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9388022377376816e-05,
+      "loss": 4.4677,
+      "step": 37376
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9379652808670025e-05,
+      "loss": 4.4606,
+      "step": 37888
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9371266861159505e-05,
+      "loss": 4.4457,
+      "step": 38400
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9362897292452714e-05,
+      "loss": 4.433,
+      "step": 38912
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9354511344942194e-05,
+      "loss": 4.4378,
+      "step": 39424
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9346125397431674e-05,
+      "loss": 4.4264,
+      "step": 39936
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9337739449921154e-05,
+      "loss": 4.4213,
+      "step": 40448
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9329353502410634e-05,
+      "loss": 4.4215,
+      "step": 40960
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9320967554900114e-05,
+      "loss": 4.4169,
+      "step": 41472
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9312581607389594e-05,
+      "loss": 4.421,
+      "step": 41984
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9304195659879074e-05,
+      "loss": 4.4168,
+      "step": 42496
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.929582609117229e-05,
+      "loss": 4.398,
+      "step": 43008
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.928744014366177e-05,
+      "loss": 4.4032,
+      "step": 43520
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.927905419615125e-05,
+      "loss": 4.4029,
+      "step": 44032
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.927066824864073e-05,
+      "loss": 4.3942,
+      "step": 44544
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.926229867993394e-05,
+      "loss": 4.3879,
+      "step": 45056
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.925392911122715e-05,
+      "loss": 4.386,
+      "step": 45568
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.924554316371663e-05,
+      "loss": 4.388,
+      "step": 46080
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.923715721620611e-05,
+      "loss": 4.3754,
+      "step": 46592
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.922877126869559e-05,
+      "loss": 4.3768,
+      "step": 47104
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.922038532118507e-05,
+      "loss": 4.3698,
+      "step": 47616
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.921199937367455e-05,
+      "loss": 4.3743,
+      "step": 48128
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.920361342616403e-05,
+      "loss": 4.3679,
+      "step": 48640
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.919522747865351e-05,
+      "loss": 4.3635,
+      "step": 49152
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.918685790994672e-05,
+      "loss": 4.3462,
+      "step": 49664
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.91784719624362e-05,
+      "loss": 4.3507,
+      "step": 50176
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.9170086014925676e-05,
+      "loss": 4.3537,
+      "step": 50688
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.9161700067415156e-05,
+      "loss": 4.3606,
+      "step": 51200
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.915333049870837e-05,
+      "loss": 4.3463,
+      "step": 51712
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.914496093000158e-05,
+      "loss": 4.3371,
+      "step": 52224
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.913657498249106e-05,
+      "loss": 4.3322,
+      "step": 52736
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.912818903498054e-05,
+      "loss": 4.3405,
+      "step": 53248
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.911980308747002e-05,
+      "loss": 4.3093,
+      "step": 53760
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.911143351876323e-05,
+      "loss": 4.3196,
+      "step": 54272
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.910304757125272e-05,
+      "loss": 4.3223,
+      "step": 54784
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.90946616237422e-05,
+      "loss": 4.3215,
+      "step": 55296
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.908627567623168e-05,
+      "loss": 4.3127,
+      "step": 55808
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.907788972872115e-05,
+      "loss": 4.3199,
+      "step": 56320
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.906950378121063e-05,
+      "loss": 4.301,
+      "step": 56832
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.906111783370011e-05,
+      "loss": 4.3264,
+      "step": 57344
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.9052748264993326e-05,
+      "loss": 4.3061,
+      "step": 57856
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.90443623174828e-05,
+      "loss": 4.3094,
+      "step": 58368
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.903597636997228e-05,
+      "loss": 4.3134,
+      "step": 58880
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.902759042246176e-05,
+      "loss": 4.3225,
+      "step": 59392
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.9019220853754975e-05,
+      "loss": 4.2985,
+      "step": 59904
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.9010834906244455e-05,
+      "loss": 4.2929,
+      "step": 60416
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.9002448958733935e-05,
+      "loss": 4.2985,
+      "step": 60928
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.8994063011223415e-05,
+      "loss": 4.2883,
+      "step": 61440
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.8985677063712895e-05,
+      "loss": 4.294,
+      "step": 61952
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.8977291116202375e-05,
+      "loss": 4.2895,
+      "step": 62464
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.8968905168691855e-05,
+      "loss": 4.2783,
+      "step": 62976
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.8960535599985064e-05,
+      "loss": 4.2954,
+      "step": 63488
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.8952149652474544e-05,
+      "loss": 4.2775,
+      "step": 64000
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.8943763704964024e-05,
+      "loss": 4.2749,
+      "step": 64512
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.8935377757453504e-05,
+      "loss": 4.2722,
+      "step": 65024
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.892700818874671e-05,
+      "loss": 4.2758,
+      "step": 65536
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.891862224123619e-05,
+      "loss": 4.2638,
+      "step": 66048
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.891023629372567e-05,
+      "loss": 4.2809,
+      "step": 66560
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.890185034621515e-05,
+      "loss": 4.2717,
+      "step": 67072
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.889346439870464e-05,
+      "loss": 4.26,
+      "step": 67584
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.888507845119412e-05,
+      "loss": 4.2531,
+      "step": 68096
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.887670888248733e-05,
+      "loss": 4.2636,
+      "step": 68608
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.886832293497681e-05,
+      "loss": 4.2597,
+      "step": 69120
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.885993698746629e-05,
+      "loss": 4.2631,
+      "step": 69632
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.885155103995577e-05,
+      "loss": 4.2578,
+      "step": 70144
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.884316509244525e-05,
+      "loss": 4.26,
+      "step": 70656
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.883479552373846e-05,
+      "loss": 4.2572,
+      "step": 71168
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.882640957622794e-05,
+      "loss": 4.2506,
+      "step": 71680
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.881802362871742e-05,
+      "loss": 4.2419,
+      "step": 72192
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.88096376812069e-05,
+      "loss": 4.2401,
+      "step": 72704
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.880125173369638e-05,
+      "loss": 4.2462,
+      "step": 73216
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.8792882164989586e-05,
+      "loss": 4.2255,
+      "step": 73728
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.878449621747907e-05,
+      "loss": 4.247,
+      "step": 74240
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.877611026996855e-05,
+      "loss": 4.2425,
+      "step": 74752
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.876772432245803e-05,
+      "loss": 4.2343,
+      "step": 75264
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.875933837494751e-05,
+      "loss": 4.236,
+      "step": 75776
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.875096880624072e-05,
+      "loss": 4.2355,
+      "step": 76288
+    },
+    {
+      "epoch": 0.03,
+      "eval_loss": 4.195679664611816,
+      "eval_runtime": 306.1733,
+      "eval_samples_per_second": 1246.324,
+      "eval_steps_per_second": 38.949,
+      "step": 76320
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.87425828587302e-05,
+      "loss": 4.2099,
+      "step": 76800
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.873419691121968e-05,
+      "loss": 4.2183,
+      "step": 77312
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.872581096370916e-05,
+      "loss": 4.2299,
+      "step": 77824
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.871744139500237e-05,
+      "loss": 4.2151,
+      "step": 78336
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.870905544749185e-05,
+      "loss": 4.2204,
+      "step": 78848
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.870066949998133e-05,
+      "loss": 4.2106,
+      "step": 79360
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.869228355247081e-05,
+      "loss": 4.2069,
+      "step": 79872
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.868389760496029e-05,
+      "loss": 4.2028,
+      "step": 80384
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.867551165744977e-05,
+      "loss": 4.2094,
+      "step": 80896
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.8667142088742986e-05,
+      "loss": 4.1963,
+      "step": 81408
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.865875614123246e-05,
+      "loss": 4.2179,
+      "step": 81920
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.865037019372194e-05,
+      "loss": 4.2123,
+      "step": 82432
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.8642000625015155e-05,
+      "loss": 4.1944,
+      "step": 82944
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.8633614677504635e-05,
+      "loss": 4.1901,
+      "step": 83456
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.862522872999411e-05,
+      "loss": 4.188,
+      "step": 83968
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.861684278248359e-05,
+      "loss": 4.1877,
+      "step": 84480
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.860845683497307e-05,
+      "loss": 4.184,
+      "step": 84992
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.860007088746255e-05,
+      "loss": 4.1855,
+      "step": 85504
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.859168493995203e-05,
+      "loss": 4.1782,
+      "step": 86016
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.858329899244151e-05,
+      "loss": 4.1954,
+      "step": 86528
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.8574929423734724e-05,
+      "loss": 4.1831,
+      "step": 87040
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.8566543476224204e-05,
+      "loss": 4.1864,
+      "step": 87552
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.8558157528713684e-05,
+      "loss": 4.1825,
+      "step": 88064
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.8549771581203164e-05,
+      "loss": 4.1921,
+      "step": 88576
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.854140201249637e-05,
+      "loss": 4.168,
+      "step": 89088
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.853301606498585e-05,
+      "loss": 4.1713,
+      "step": 89600
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.852463011747533e-05,
+      "loss": 4.1667,
+      "step": 90112
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.851624416996481e-05,
+      "loss": 4.1644,
+      "step": 90624
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.850785822245429e-05,
+      "loss": 4.1661,
+      "step": 91136
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.84994886537475e-05,
+      "loss": 4.1617,
+      "step": 91648
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.849111908504072e-05,
+      "loss": 4.1652,
+      "step": 92160
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.84827331375302e-05,
+      "loss": 4.167,
+      "step": 92672
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.847434719001968e-05,
+      "loss": 4.1554,
+      "step": 93184
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.846596124250916e-05,
+      "loss": 4.1636,
+      "step": 93696
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.845757529499864e-05,
+      "loss": 4.1702,
+      "step": 94208
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.844918934748812e-05,
+      "loss": 4.1568,
+      "step": 94720
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.84408033999776e-05,
+      "loss": 4.14,
+      "step": 95232
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.843241745246708e-05,
+      "loss": 4.1541,
+      "step": 95744
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.842404788376029e-05,
+      "loss": 4.1364,
+      "step": 96256
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.841566193624977e-05,
+      "loss": 4.1425,
+      "step": 96768
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.840727598873925e-05,
+      "loss": 4.1426,
+      "step": 97280
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.839889004122873e-05,
+      "loss": 4.1511,
+      "step": 97792
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.839050409371821e-05,
+      "loss": 4.1387,
+      "step": 98304
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.8382134525011416e-05,
+      "loss": 4.144,
+      "step": 98816
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.83737485775009e-05,
+      "loss": 4.1485,
+      "step": 99328
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.836536262999038e-05,
+      "loss": 4.1401,
+      "step": 99840
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.835697668247986e-05,
+      "loss": 4.1413,
+      "step": 100352
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.834860711377307e-05,
+      "loss": 4.1238,
+      "step": 100864
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.834022116626255e-05,
+      "loss": 4.1316,
+      "step": 101376
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.833183521875203e-05,
+      "loss": 4.138,
+      "step": 101888
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.832344927124151e-05,
+      "loss": 4.1298,
+      "step": 102400
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.831507970253472e-05,
+      "loss": 4.1258,
+      "step": 102912
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.83066937550242e-05,
+      "loss": 4.1113,
+      "step": 103424
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.829830780751368e-05,
+      "loss": 4.1231,
+      "step": 103936
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.828992186000316e-05,
+      "loss": 4.1128,
+      "step": 104448
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.828155229129637e-05,
+      "loss": 4.1277,
+      "step": 104960
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.8273166343785856e-05,
+      "loss": 4.1136,
+      "step": 105472
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.8264780396275336e-05,
+      "loss": 4.1221,
+      "step": 105984
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.8256394448764816e-05,
+      "loss": 4.1235,
+      "step": 106496
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.8248008501254296e-05,
+      "loss": 4.0997,
+      "step": 107008
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.8239638932547505e-05,
+      "loss": 4.1141,
+      "step": 107520
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.8231252985036985e-05,
+      "loss": 4.111,
+      "step": 108032
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.8222867037526465e-05,
+      "loss": 4.1008,
+      "step": 108544
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.8214481090015945e-05,
+      "loss": 4.1014,
+      "step": 109056
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.8206111521309154e-05,
+      "loss": 4.1191,
+      "step": 109568
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.8197725573798634e-05,
+      "loss": 4.1066,
+      "step": 110080
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.8189339626288114e-05,
+      "loss": 4.0947,
+      "step": 110592
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.8180953678777594e-05,
+      "loss": 4.0892,
+      "step": 111104
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.817258411007081e-05,
+      "loss": 4.0982,
+      "step": 111616
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.816419816256029e-05,
+      "loss": 4.11,
+      "step": 112128
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.815581221504977e-05,
+      "loss": 4.1146,
+      "step": 112640
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.814742626753924e-05,
+      "loss": 4.0979,
+      "step": 113152
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.813905669883246e-05,
+      "loss": 4.1118,
+      "step": 113664
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.813067075132194e-05,
+      "loss": 4.1066,
+      "step": 114176
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.812228480381141e-05,
+      "loss": 4.1017,
+      "step": 114688
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.811389885630089e-05,
+      "loss": 4.0869,
+      "step": 115200
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.810552928759411e-05,
+      "loss": 4.1021,
+      "step": 115712
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.809714334008359e-05,
+      "loss": 4.0914,
+      "step": 116224
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.808875739257306e-05,
+      "loss": 4.0878,
+      "step": 116736
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.808037144506255e-05,
+      "loss": 4.0904,
+      "step": 117248
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.807200187635576e-05,
+      "loss": 4.0867,
+      "step": 117760
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.8063615928845243e-05,
+      "loss": 4.1003,
+      "step": 118272
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.8055229981334717e-05,
+      "loss": 4.0886,
+      "step": 118784
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.8046844033824197e-05,
+      "loss": 4.082,
+      "step": 119296
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.803847446511741e-05,
+      "loss": 4.0839,
+      "step": 119808
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.8030088517606886e-05,
+      "loss": 4.0873,
+      "step": 120320
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.8021702570096366e-05,
+      "loss": 4.0813,
+      "step": 120832
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.8013316622585846e-05,
+      "loss": 4.0828,
+      "step": 121344
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.800494705387906e-05,
+      "loss": 4.0737,
+      "step": 121856
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.7996561106368535e-05,
+      "loss": 4.0888,
+      "step": 122368
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.7988175158858015e-05,
+      "loss": 4.0726,
+      "step": 122880
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.7979789211347494e-05,
+      "loss": 4.075,
+      "step": 123392
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.797140326383698e-05,
+      "loss": 4.0692,
+      "step": 123904
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.796301731632646e-05,
+      "loss": 4.0834,
+      "step": 124416
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.795463136881594e-05,
+      "loss": 4.0807,
+      "step": 124928
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.794624542130542e-05,
+      "loss": 4.0738,
+      "step": 125440
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.793787585259863e-05,
+      "loss": 4.0605,
+      "step": 125952
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.792948990508811e-05,
+      "loss": 4.0691,
+      "step": 126464
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.792110395757759e-05,
+      "loss": 4.0711,
+      "step": 126976
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.791271801006707e-05,
+      "loss": 4.0783,
+      "step": 127488
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.790434844136028e-05,
+      "loss": 4.0715,
+      "step": 128000
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.789596249384976e-05,
+      "loss": 4.0619,
+      "step": 128512
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.788757654633924e-05,
+      "loss": 4.0563,
+      "step": 129024
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.787920697763245e-05,
+      "loss": 4.069,
+      "step": 129536
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.7870821030121935e-05,
+      "loss": 4.0394,
+      "step": 130048
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.7862451461415144e-05,
+      "loss": 4.0555,
+      "step": 130560
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.7854065513904624e-05,
+      "loss": 4.058,
+      "step": 131072
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.7845679566394104e-05,
+      "loss": 4.0572,
+      "step": 131584
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.7837293618883584e-05,
+      "loss": 4.0478,
+      "step": 132096
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.7828907671373064e-05,
+      "loss": 4.0576,
+      "step": 132608
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.7820521723862544e-05,
+      "loss": 4.0495,
+      "step": 133120
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.7812135776352024e-05,
+      "loss": 4.0667,
+      "step": 133632
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.7803749828841504e-05,
+      "loss": 4.0516,
+      "step": 134144
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.779538026013471e-05,
+      "loss": 4.0521,
+      "step": 134656
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.778699431262419e-05,
+      "loss": 4.0681,
+      "step": 135168
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.777860836511367e-05,
+      "loss": 4.0724,
+      "step": 135680
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.777023879640689e-05,
+      "loss": 4.05,
+      "step": 136192
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.776185284889637e-05,
+      "loss": 4.0476,
+      "step": 136704
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.775346690138585e-05,
+      "loss": 4.0551,
+      "step": 137216
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.774508095387533e-05,
+      "loss": 4.0442,
+      "step": 137728
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.773669500636481e-05,
+      "loss": 4.0478,
+      "step": 138240
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.772830905885429e-05,
+      "loss": 4.0527,
+      "step": 138752
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.771992311134377e-05,
+      "loss": 4.0385,
+      "step": 139264
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.771155354263698e-05,
+      "loss": 4.0584,
+      "step": 139776
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.770316759512646e-05,
+      "loss": 4.0412,
+      "step": 140288
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.769478164761594e-05,
+      "loss": 4.045,
+      "step": 140800
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.768639570010542e-05,
+      "loss": 4.0367,
+      "step": 141312
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.7678026131398626e-05,
+      "loss": 4.0429,
+      "step": 141824
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.7669640183888106e-05,
+      "loss": 4.0373,
+      "step": 142336
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.7661254236377586e-05,
+      "loss": 4.0515,
+      "step": 142848
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.765286828886707e-05,
+      "loss": 4.0472,
+      "step": 143360
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.764449872016028e-05,
+      "loss": 4.0362,
+      "step": 143872
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.763611277264976e-05,
+      "loss": 4.0216,
+      "step": 144384
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.762772682513924e-05,
+      "loss": 4.0452,
+      "step": 144896
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.761934087762872e-05,
+      "loss": 4.036,
+      "step": 145408
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.761097130892193e-05,
+      "loss": 4.0435,
+      "step": 145920
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.760258536141141e-05,
+      "loss": 4.0388,
+      "step": 146432
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.759419941390089e-05,
+      "loss": 4.0382,
+      "step": 146944
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.758581346639037e-05,
+      "loss": 4.0391,
+      "step": 147456
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.757744389768358e-05,
+      "loss": 4.0408,
+      "step": 147968
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.756905795017306e-05,
+      "loss": 4.0272,
+      "step": 148480
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.756067200266254e-05,
+      "loss": 4.0257,
+      "step": 148992
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.755228605515202e-05,
+      "loss": 4.0354,
+      "step": 149504
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.7543916486445236e-05,
+      "loss": 4.0181,
+      "step": 150016
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.7535530538934716e-05,
+      "loss": 4.0338,
+      "step": 150528
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.7527144591424196e-05,
+      "loss": 4.0401,
+      "step": 151040
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.751875864391367e-05,
+      "loss": 4.0274,
+      "step": 151552
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.7510389075206885e-05,
+      "loss": 4.025,
+      "step": 152064
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.7502003127696365e-05,
+      "loss": 4.0283,
+      "step": 152576
+    },
+    {
+      "epoch": 1.03,
+      "eval_loss": 4.027114391326904,
+      "eval_runtime": 305.7714,
+      "eval_samples_per_second": 1247.962,
+      "eval_steps_per_second": 39.0,
+      "step": 152640
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.7493617180185845e-05,
+      "loss": 4.0066,
+      "step": 153088
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.748523123267532e-05,
+      "loss": 4.0157,
+      "step": 153600
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.74768452851648e-05,
+      "loss": 4.0274,
+      "step": 154112
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.746845933765428e-05,
+      "loss": 4.0189,
+      "step": 154624
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.7460073390143764e-05,
+      "loss": 4.025,
+      "step": 155136
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.7451687442633244e-05,
+      "loss": 4.0076,
+      "step": 155648
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.7443301495122724e-05,
+      "loss": 4.0186,
+      "step": 156160
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.7434915547612204e-05,
+      "loss": 4.0031,
+      "step": 156672
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.7426529600101684e-05,
+      "loss": 4.0153,
+      "step": 157184
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.7418143652591164e-05,
+      "loss": 4.0119,
+      "step": 157696
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.740977408388437e-05,
+      "loss": 4.0205,
+      "step": 158208
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.740138813637385e-05,
+      "loss": 4.0239,
+      "step": 158720
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.739301856766706e-05,
+      "loss": 4.0048,
+      "step": 159232
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.738463262015654e-05,
+      "loss": 4.0063,
+      "step": 159744
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.737624667264602e-05,
+      "loss": 4.0051,
+      "step": 160256
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.73678607251355e-05,
+      "loss": 4.0007,
+      "step": 160768
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.735947477762498e-05,
+      "loss": 3.9988,
+      "step": 161280
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.735108883011446e-05,
+      "loss": 4.0022,
+      "step": 161792
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.734270288260394e-05,
+      "loss": 3.9963,
+      "step": 162304
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.733431693509343e-05,
+      "loss": 4.0199,
+      "step": 162816
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.732593098758291e-05,
+      "loss": 3.9983,
+      "step": 163328
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.731754504007238e-05,
+      "loss": 4.013,
+      "step": 163840
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.730915909256186e-05,
+      "loss": 4.0055,
+      "step": 164352
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.730078952385508e-05,
+      "loss": 4.0122,
+      "step": 164864
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.729240357634456e-05,
+      "loss": 3.9867,
+      "step": 165376
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.728401762883403e-05,
+      "loss": 3.9994,
+      "step": 165888
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.727563168132351e-05,
+      "loss": 3.9882,
+      "step": 166400
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.726726211261673e-05,
+      "loss": 3.9957,
+      "step": 166912
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.725887616510621e-05,
+      "loss": 3.9895,
+      "step": 167424
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.725049021759568e-05,
+      "loss": 3.989,
+      "step": 167936
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.7242120648888896e-05,
+      "loss": 4.0,
+      "step": 168448
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.723373470137838e-05,
+      "loss": 3.9972,
+      "step": 168960
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.7225348753867856e-05,
+      "loss": 3.9833,
+      "step": 169472
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.7216962806357336e-05,
+      "loss": 3.9977,
+      "step": 169984
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.7208576858846816e-05,
+      "loss": 3.9958,
+      "step": 170496
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.7200190911336296e-05,
+      "loss": 3.9908,
+      "step": 171008
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.7191804963825775e-05,
+      "loss": 3.9784,
+      "step": 171520
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.7183419016315255e-05,
+      "loss": 3.989,
+      "step": 172032
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.7175049447608465e-05,
+      "loss": 3.9701,
+      "step": 172544
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.7166663500097945e-05,
+      "loss": 3.9844,
+      "step": 173056
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.7158277552587424e-05,
+      "loss": 3.9794,
+      "step": 173568
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.7149891605076904e-05,
+      "loss": 3.9896,
+      "step": 174080
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.714152203637012e-05,
+      "loss": 3.9799,
+      "step": 174592
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.71331360888596e-05,
+      "loss": 3.9836,
+      "step": 175104
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.712476652015281e-05,
+      "loss": 3.9899,
+      "step": 175616
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.711638057264229e-05,
+      "loss": 3.9846,
+      "step": 176128
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.710799462513177e-05,
+      "loss": 3.9836,
+      "step": 176640
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.709960867762125e-05,
+      "loss": 3.9665,
+      "step": 177152
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.709122273011073e-05,
+      "loss": 3.9718,
+      "step": 177664
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.708283678260021e-05,
+      "loss": 3.9847,
+      "step": 178176
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.707445083508969e-05,
+      "loss": 3.9742,
+      "step": 178688
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.706606488757917e-05,
+      "loss": 3.9691,
+      "step": 179200
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.705769531887238e-05,
+      "loss": 3.9611,
+      "step": 179712
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.704930937136186e-05,
+      "loss": 3.9743,
+      "step": 180224
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.704092342385134e-05,
+      "loss": 3.9576,
+      "step": 180736
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.703253747634082e-05,
+      "loss": 3.9771,
+      "step": 181248
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.7024167907634034e-05,
+      "loss": 3.9641,
+      "step": 181760
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.7015781960123514e-05,
+      "loss": 3.9754,
+      "step": 182272
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.7007396012612994e-05,
+      "loss": 3.9722,
+      "step": 182784
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.6999010065102474e-05,
+      "loss": 3.9547,
+      "step": 183296
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.699064049639568e-05,
+      "loss": 3.9638,
+      "step": 183808
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.698225454888516e-05,
+      "loss": 3.9653,
+      "step": 184320
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.697386860137464e-05,
+      "loss": 3.9565,
+      "step": 184832
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.696548265386412e-05,
+      "loss": 3.9618,
+      "step": 185344
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.695711308515733e-05,
+      "loss": 3.9693,
+      "step": 185856
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.694872713764681e-05,
+      "loss": 3.9652,
+      "step": 186368
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.694034119013629e-05,
+      "loss": 3.9542,
+      "step": 186880
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.693195524262577e-05,
+      "loss": 3.9449,
+      "step": 187392
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.692358567391899e-05,
+      "loss": 3.9542,
+      "step": 187904
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.691519972640847e-05,
+      "loss": 3.9666,
+      "step": 188416
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.690681377889795e-05,
+      "loss": 3.9735,
+      "step": 188928
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.6898444210191157e-05,
+      "loss": 3.9576,
+      "step": 189440
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.6890074641484366e-05,
+      "loss": 3.9701,
+      "step": 189952
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.6881688693973846e-05,
+      "loss": 3.968,
+      "step": 190464
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.6873302746463326e-05,
+      "loss": 3.9671,
+      "step": 190976
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.6864916798952806e-05,
+      "loss": 3.9428,
+      "step": 191488
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.6856530851442285e-05,
+      "loss": 3.967,
+      "step": 192000
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.6848144903931765e-05,
+      "loss": 3.9557,
+      "step": 192512
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.6839758956421245e-05,
+      "loss": 3.9491,
+      "step": 193024
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.683138938771446e-05,
+      "loss": 3.9573,
+      "step": 193536
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.682300344020394e-05,
+      "loss": 3.951,
+      "step": 194048
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.681461749269342e-05,
+      "loss": 3.9615,
+      "step": 194560
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.68062315451829e-05,
+      "loss": 3.9561,
+      "step": 195072
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.679784559767238e-05,
+      "loss": 3.9447,
+      "step": 195584
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.678947602896559e-05,
+      "loss": 3.9512,
+      "step": 196096
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.678109008145507e-05,
+      "loss": 3.9518,
+      "step": 196608
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.677270413394455e-05,
+      "loss": 3.9501,
+      "step": 197120
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.676431818643403e-05,
+      "loss": 3.9507,
+      "step": 197632
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.675593223892351e-05,
+      "loss": 3.9444,
+      "step": 198144
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.674756267021672e-05,
+      "loss": 3.9541,
+      "step": 198656
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.67391767227062e-05,
+      "loss": 3.9436,
+      "step": 199168
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.673079077519568e-05,
+      "loss": 3.9457,
+      "step": 199680
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.672240482768516e-05,
+      "loss": 3.9401,
+      "step": 200192
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.671401888017464e-05,
+      "loss": 3.9563,
+      "step": 200704
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.670563293266412e-05,
+      "loss": 3.9513,
+      "step": 201216
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.66972469851536e-05,
+      "loss": 3.9463,
+      "step": 201728
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.6688877416446815e-05,
+      "loss": 3.9374,
+      "step": 202240
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.668049146893629e-05,
+      "loss": 3.9461,
+      "step": 202752
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.667210552142577e-05,
+      "loss": 3.9382,
+      "step": 203264
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.666371957391525e-05,
+      "loss": 3.956,
+      "step": 203776
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.665533362640473e-05,
+      "loss": 3.9429,
+      "step": 204288
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.664696405769794e-05,
+      "loss": 3.9382,
+      "step": 204800
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.663857811018742e-05,
+      "loss": 3.9318,
+      "step": 205312
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.66301921626769e-05,
+      "loss": 3.9422,
+      "step": 205824
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.6621806215166383e-05,
+      "loss": 3.9198,
+      "step": 206336
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.6613420267655863e-05,
+      "loss": 3.9332,
+      "step": 206848
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.660505069894907e-05,
+      "loss": 3.9349,
+      "step": 207360
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.659666475143855e-05,
+      "loss": 3.9359,
+      "step": 207872
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.658827880392803e-05,
+      "loss": 3.9236,
+      "step": 208384
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.657989285641751e-05,
+      "loss": 3.9406,
+      "step": 208896
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.657150690890699e-05,
+      "loss": 3.9254,
+      "step": 209408
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.656312096139647e-05,
+      "loss": 3.9464,
+      "step": 209920
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.655473501388595e-05,
+      "loss": 3.9331,
+      "step": 210432
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.654636544517916e-05,
+      "loss": 3.9315,
+      "step": 210944
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.653797949766864e-05,
+      "loss": 3.9478,
+      "step": 211456
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.652959355015812e-05,
+      "loss": 3.9541,
+      "step": 211968
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.652122398145134e-05,
+      "loss": 3.9336,
+      "step": 212480
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.651283803394082e-05,
+      "loss": 3.9267,
+      "step": 212992
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.65044520864303e-05,
+      "loss": 3.9343,
+      "step": 213504
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.649606613891978e-05,
+      "loss": 3.9284,
+      "step": 214016
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.648768019140926e-05,
+      "loss": 3.929,
+      "step": 214528
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.647929424389874e-05,
+      "loss": 3.94,
+      "step": 215040
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.647090829638822e-05,
+      "loss": 3.9199,
+      "step": 215552
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.64625223488777e-05,
+      "loss": 3.9418,
+      "step": 216064
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.6454152780170906e-05,
+      "loss": 3.9291,
+      "step": 216576
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.6445766832660386e-05,
+      "loss": 3.9304,
+      "step": 217088
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.6437380885149866e-05,
+      "loss": 3.9215,
+      "step": 217600
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.6429011316443075e-05,
+      "loss": 3.9295,
+      "step": 218112
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.6420625368932555e-05,
+      "loss": 3.9211,
+      "step": 218624
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.6412239421422035e-05,
+      "loss": 3.9401,
+      "step": 219136
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.640385347391152e-05,
+      "loss": 3.9307,
+      "step": 219648
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.639548390520473e-05,
+      "loss": 3.9267,
+      "step": 220160
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.638709795769421e-05,
+      "loss": 3.9092,
+      "step": 220672
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.637871201018369e-05,
+      "loss": 3.9334,
+      "step": 221184
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.637032606267317e-05,
+      "loss": 3.9268,
+      "step": 221696
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.636194011516265e-05,
+      "loss": 3.9261,
+      "step": 222208
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.6353554167652124e-05,
+      "loss": 3.9233,
+      "step": 222720
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.6345168220141604e-05,
+      "loss": 3.9318,
+      "step": 223232
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.633679865143482e-05,
+      "loss": 3.9291,
+      "step": 223744
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.632841270392429e-05,
+      "loss": 3.9341,
+      "step": 224256
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.632002675641377e-05,
+      "loss": 3.915,
+      "step": 224768
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.631164080890326e-05,
+      "loss": 3.919,
+      "step": 225280
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.630325486139274e-05,
+      "loss": 3.9251,
+      "step": 225792
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.629488529268595e-05,
+      "loss": 3.9072,
+      "step": 226304
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.628649934517543e-05,
+      "loss": 3.9237,
+      "step": 226816
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.627811339766491e-05,
+      "loss": 3.9316,
+      "step": 227328
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.626972745015439e-05,
+      "loss": 3.9194,
+      "step": 227840
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.626134150264387e-05,
+      "loss": 3.9137,
+      "step": 228352
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.625297193393708e-05,
+      "loss": 3.9226,
+      "step": 228864
+    },
+    {
+      "epoch": 0.03,
+      "eval_loss": 3.9522695541381836,
+      "eval_runtime": 305.3529,
+      "eval_samples_per_second": 1249.672,
+      "eval_steps_per_second": 39.053,
+      "step": 228960
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.624458598642656e-05,
+      "loss": 3.9115,
+      "step": 229376
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.623620003891604e-05,
+      "loss": 3.9093,
+      "step": 229888
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.622781409140552e-05,
+      "loss": 3.9212,
+      "step": 230400
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.6219428143895e-05,
+      "loss": 3.9106,
+      "step": 230912
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.621105857518821e-05,
+      "loss": 3.9239,
+      "step": 231424
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.620267262767769e-05,
+      "loss": 3.9023,
+      "step": 231936
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.619428668016717e-05,
+      "loss": 3.9152,
+      "step": 232448
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.618590073265665e-05,
+      "loss": 3.8988,
+      "step": 232960
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.617753116394986e-05,
+      "loss": 3.9126,
+      "step": 233472
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.616914521643934e-05,
+      "loss": 3.9062,
+      "step": 233984
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.616075926892882e-05,
+      "loss": 3.9179,
+      "step": 234496
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.61523733214183e-05,
+      "loss": 3.9188,
+      "step": 235008
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.614402013151524e-05,
+      "loss": 3.9034,
+      "step": 235520
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.613563418400472e-05,
+      "loss": 3.907,
+      "step": 236032
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.61272482364942e-05,
+      "loss": 3.9025,
+      "step": 236544
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.611886228898368e-05,
+      "loss": 3.9034,
+      "step": 237056
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.611047634147316e-05,
+      "loss": 3.895,
+      "step": 237568
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.610209039396265e-05,
+      "loss": 3.9025,
+      "step": 238080
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.609370444645213e-05,
+      "loss": 3.8966,
+      "step": 238592
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.6085318498941607e-05,
+      "loss": 3.9249,
+      "step": 239104
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.6076932551431087e-05,
+      "loss": 3.9003,
+      "step": 239616
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.6068562982724296e-05,
+      "loss": 3.9081,
+      "step": 240128
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.6060177035213776e-05,
+      "loss": 3.9109,
+      "step": 240640
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.6051791087703256e-05,
+      "loss": 3.9114,
+      "step": 241152
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.6043405140192735e-05,
+      "loss": 3.8906,
+      "step": 241664
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.6035019192682215e-05,
+      "loss": 3.9003,
+      "step": 242176
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.6026649623975425e-05,
+      "loss": 3.8927,
+      "step": 242688
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.6018263676464904e-05,
+      "loss": 3.8981,
+      "step": 243200
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.6009877728954384e-05,
+      "loss": 3.8896,
+      "step": 243712
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.6001491781443864e-05,
+      "loss": 3.8902,
+      "step": 244224
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5993105833933344e-05,
+      "loss": 3.9009,
+      "step": 244736
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.598473626522656e-05,
+      "loss": 3.9028,
+      "step": 245248
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.597635031771604e-05,
+      "loss": 3.8941,
+      "step": 245760
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.596796437020552e-05,
+      "loss": 3.8994,
+      "step": 246272
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5959578422695e-05,
+      "loss": 3.9013,
+      "step": 246784
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.595120885398821e-05,
+      "loss": 3.8952,
+      "step": 247296
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.594282290647769e-05,
+      "loss": 3.8816,
+      "step": 247808
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.593443695896717e-05,
+      "loss": 3.8956,
+      "step": 248320
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.592605101145665e-05,
+      "loss": 3.8748,
+      "step": 248832
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.591768144274986e-05,
+      "loss": 3.8915,
+      "step": 249344
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.590929549523934e-05,
+      "loss": 3.885,
+      "step": 249856
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.590090954772882e-05,
+      "loss": 3.8983,
+      "step": 250368
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.58925236002183e-05,
+      "loss": 3.8947,
+      "step": 250880
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5884137652707785e-05,
+      "loss": 3.887,
+      "step": 251392
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5875768084000994e-05,
+      "loss": 3.8974,
+      "step": 251904
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5867382136490474e-05,
+      "loss": 3.8939,
+      "step": 252416
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5858996188979954e-05,
+      "loss": 3.8908,
+      "step": 252928
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.585061024146943e-05,
+      "loss": 3.876,
+      "step": 253440
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.584222429395891e-05,
+      "loss": 3.8793,
+      "step": 253952
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.583385472525212e-05,
+      "loss": 3.8957,
+      "step": 254464
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.58254687777416e-05,
+      "loss": 3.8792,
+      "step": 254976
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5817082830231076e-05,
+      "loss": 3.8774,
+      "step": 255488
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5808696882720556e-05,
+      "loss": 3.874,
+      "step": 256000
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5800310935210036e-05,
+      "loss": 3.8854,
+      "step": 256512
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.579192498769952e-05,
+      "loss": 3.8706,
+      "step": 257024
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.578355541899273e-05,
+      "loss": 3.8835,
+      "step": 257536
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.577516947148221e-05,
+      "loss": 3.8784,
+      "step": 258048
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.576678352397169e-05,
+      "loss": 3.8835,
+      "step": 258560
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.575839757646117e-05,
+      "loss": 3.8843,
+      "step": 259072
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.575001162895065e-05,
+      "loss": 3.8703,
+      "step": 259584
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.574164206024386e-05,
+      "loss": 3.8712,
+      "step": 260096
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.573325611273334e-05,
+      "loss": 3.8831,
+      "step": 260608
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.572487016522282e-05,
+      "loss": 3.8665,
+      "step": 261120
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.57164842177123e-05,
+      "loss": 3.874,
+      "step": 261632
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.570809827020178e-05,
+      "loss": 3.8836,
+      "step": 262144
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.569972870149499e-05,
+      "loss": 3.8823,
+      "step": 262656
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5691342753984476e-05,
+      "loss": 3.8634,
+      "step": 263168
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5682956806473956e-05,
+      "loss": 3.8613,
+      "step": 263680
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5674570858963436e-05,
+      "loss": 3.8674,
+      "step": 264192
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5666184911452916e-05,
+      "loss": 3.8771,
+      "step": 264704
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5657815342746125e-05,
+      "loss": 3.8853,
+      "step": 265216
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5649429395235605e-05,
+      "loss": 3.8726,
+      "step": 265728
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5641043447725085e-05,
+      "loss": 3.8836,
+      "step": 266240
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5632657500214565e-05,
+      "loss": 3.8841,
+      "step": 266752
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5624271552704045e-05,
+      "loss": 3.8826,
+      "step": 267264
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5615901983997254e-05,
+      "loss": 3.861,
+      "step": 267776
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5607516036486734e-05,
+      "loss": 3.8811,
+      "step": 268288
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5599130088976214e-05,
+      "loss": 3.8724,
+      "step": 268800
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5590744141465694e-05,
+      "loss": 3.8661,
+      "step": 269312
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.558237457275891e-05,
+      "loss": 3.8733,
+      "step": 269824
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.557398862524839e-05,
+      "loss": 3.8707,
+      "step": 270336
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.556560267773787e-05,
+      "loss": 3.8752,
+      "step": 270848
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.555721673022735e-05,
+      "loss": 3.8718,
+      "step": 271360
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.554883078271683e-05,
+      "loss": 3.8636,
+      "step": 271872
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.554046121401004e-05,
+      "loss": 3.8712,
+      "step": 272384
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.553207526649952e-05,
+      "loss": 3.8663,
+      "step": 272896
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5523689318989e-05,
+      "loss": 3.8685,
+      "step": 273408
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.551530337147848e-05,
+      "loss": 3.8672,
+      "step": 273920
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.550693380277169e-05,
+      "loss": 3.8619,
+      "step": 274432
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.549854785526117e-05,
+      "loss": 3.8737,
+      "step": 274944
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.549016190775065e-05,
+      "loss": 3.8671,
+      "step": 275456
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.548177596024013e-05,
+      "loss": 3.8606,
+      "step": 275968
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5473406391533343e-05,
+      "loss": 3.8606,
+      "step": 276480
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5465020444022823e-05,
+      "loss": 3.8783,
+      "step": 276992
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.54566344965123e-05,
+      "loss": 3.8697,
+      "step": 277504
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.544824854900178e-05,
+      "loss": 3.8641,
+      "step": 278016
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.543987898029499e-05,
+      "loss": 3.8545,
+      "step": 278528
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.543149303278447e-05,
+      "loss": 3.8637,
+      "step": 279040
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.542310708527395e-05,
+      "loss": 3.8566,
+      "step": 279552
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.541472113776343e-05,
+      "loss": 3.8783,
+      "step": 280064
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.540633519025291e-05,
+      "loss": 3.8629,
+      "step": 280576
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.539796562154612e-05,
+      "loss": 3.8604,
+      "step": 281088
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.53895796740356e-05,
+      "loss": 3.8548,
+      "step": 281600
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.538119372652508e-05,
+      "loss": 3.8647,
+      "step": 282112
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.537280777901456e-05,
+      "loss": 3.8368,
+      "step": 282624
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.536443821030778e-05,
+      "loss": 3.8566,
+      "step": 283136
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5356068641600986e-05,
+      "loss": 3.8553,
+      "step": 283648
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5347682694090466e-05,
+      "loss": 3.8591,
+      "step": 284160
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5339296746579946e-05,
+      "loss": 3.8439,
+      "step": 284672
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5330910799069426e-05,
+      "loss": 3.8626,
+      "step": 285184
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5322524851558906e-05,
+      "loss": 3.8481,
+      "step": 285696
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5314138904048386e-05,
+      "loss": 3.8657,
+      "step": 286208
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.530575295653786e-05,
+      "loss": 3.8596,
+      "step": 286720
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.529736700902734e-05,
+      "loss": 3.8512,
+      "step": 287232
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5288997440320555e-05,
+      "loss": 3.8684,
+      "step": 287744
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5280611492810035e-05,
+      "loss": 3.8769,
+      "step": 288256
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5272225545299515e-05,
+      "loss": 3.8581,
+      "step": 288768
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5263839597788995e-05,
+      "loss": 3.8522,
+      "step": 289280
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.525547002908221e-05,
+      "loss": 3.858,
+      "step": 289792
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5247084081571684e-05,
+      "loss": 3.8492,
+      "step": 290304
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5238698134061164e-05,
+      "loss": 3.8518,
+      "step": 290816
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5230312186550644e-05,
+      "loss": 3.8648,
+      "step": 291328
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5221926239040124e-05,
+      "loss": 3.8473,
+      "step": 291840
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.521355667033333e-05,
+      "loss": 3.8614,
+      "step": 292352
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.520517072282281e-05,
+      "loss": 3.8543,
+      "step": 292864
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.519678477531229e-05,
+      "loss": 3.8578,
+      "step": 293376
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.518839882780177e-05,
+      "loss": 3.8394,
+      "step": 293888
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.518002925909499e-05,
+      "loss": 3.8585,
+      "step": 294400
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.517164331158447e-05,
+      "loss": 3.845,
+      "step": 294912
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.516325736407395e-05,
+      "loss": 3.8624,
+      "step": 295424
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.515487141656343e-05,
+      "loss": 3.8528,
+      "step": 295936
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.514650184785664e-05,
+      "loss": 3.8583,
+      "step": 296448
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.513811590034612e-05,
+      "loss": 3.8355,
+      "step": 296960
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.51297299528356e-05,
+      "loss": 3.8614,
+      "step": 297472
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.512134400532508e-05,
+      "loss": 3.85,
+      "step": 297984
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5112974436618287e-05,
+      "loss": 3.853,
+      "step": 298496
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5104588489107766e-05,
+      "loss": 3.85,
+      "step": 299008
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5096202541597246e-05,
+      "loss": 3.8594,
+      "step": 299520
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5087816594086726e-05,
+      "loss": 3.8563,
+      "step": 300032
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.507944702537994e-05,
+      "loss": 3.858,
+      "step": 300544
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.507106107786942e-05,
+      "loss": 3.842,
+      "step": 301056
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.50626751303589e-05,
+      "loss": 3.848,
+      "step": 301568
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.505428918284838e-05,
+      "loss": 3.8483,
+      "step": 302080
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.504591961414159e-05,
+      "loss": 3.8381,
+      "step": 302592
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.503753366663107e-05,
+      "loss": 3.85,
+      "step": 303104
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.502914771912055e-05,
+      "loss": 3.8568,
+      "step": 303616
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.502076177161003e-05,
+      "loss": 3.8492,
+      "step": 304128
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.501239220290324e-05,
+      "loss": 3.8421,
+      "step": 304640
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.500400625539272e-05,
+      "loss": 3.8525,
+      "step": 305152
+    },
+    {
+      "epoch": 1.03,
+      "eval_loss": 3.911241292953491,
+      "eval_runtime": 306.3641,
+      "eval_samples_per_second": 1245.547,
+      "eval_steps_per_second": 38.924,
+      "step": 305280
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.49956203078822e-05,
+      "loss": 3.8319,
+      "step": 305664
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.498723436037168e-05,
+      "loss": 3.8381,
+      "step": 306176
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.497884841286116e-05,
+      "loss": 3.8475,
+      "step": 306688
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.497046246535065e-05,
+      "loss": 3.8442,
+      "step": 307200
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.496207651784013e-05,
+      "loss": 3.8552,
+      "step": 307712
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.495369057032961e-05,
+      "loss": 3.8331,
+      "step": 308224
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.4945304622819087e-05,
+      "loss": 3.8433,
+      "step": 308736
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.4936918675308567e-05,
+      "loss": 3.829,
+      "step": 309248
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.4928549106601776e-05,
+      "loss": 3.8446,
+      "step": 309760
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.4920163159091256e-05,
+      "loss": 3.8374,
+      "step": 310272
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.4911777211580736e-05,
+      "loss": 3.8412,
+      "step": 310784
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.4903391264070216e-05,
+      "loss": 3.8502,
+      "step": 311296
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.4895038074167154e-05,
+      "loss": 3.8302,
+      "step": 311808
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.4886652126656634e-05,
+      "loss": 3.8402,
+      "step": 312320
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.4878266179146114e-05,
+      "loss": 3.8316,
+      "step": 312832
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.48698802316356e-05,
+      "loss": 3.8354,
+      "step": 313344
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.486149428412508e-05,
+      "loss": 3.8241,
+      "step": 313856
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.485310833661456e-05,
+      "loss": 3.8335,
+      "step": 314368
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.484472238910404e-05,
+      "loss": 3.8309,
+      "step": 314880
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.483633644159352e-05,
+      "loss": 3.8528,
+      "step": 315392
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.482796687288673e-05,
+      "loss": 3.8306,
+      "step": 315904
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.481958092537621e-05,
+      "loss": 3.8392,
+      "step": 316416
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.481119497786569e-05,
+      "loss": 3.8439,
+      "step": 316928
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.480280903035516e-05,
+      "loss": 3.8452,
+      "step": 317440
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.479443946164838e-05,
+      "loss": 3.8225,
+      "step": 317952
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.478605351413786e-05,
+      "loss": 3.8355,
+      "step": 318464
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.477766756662734e-05,
+      "loss": 3.824,
+      "step": 318976
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.476928161911682e-05,
+      "loss": 3.8337,
+      "step": 319488
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.4760912050410034e-05,
+      "loss": 3.8208,
+      "step": 320000
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.4752526102899514e-05,
+      "loss": 3.8259,
+      "step": 320512
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.4744140155388994e-05,
+      "loss": 3.8349,
+      "step": 321024
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.47357705866822e-05,
+      "loss": 3.8361,
+      "step": 321536
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.472738463917168e-05,
+      "loss": 3.8263,
+      "step": 322048
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.471899869166116e-05,
+      "loss": 3.8363,
+      "step": 322560
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.4710612744150636e-05,
+      "loss": 3.8283,
+      "step": 323072
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.4702226796640116e-05,
+      "loss": 3.835,
+      "step": 323584
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.4693840849129596e-05,
+      "loss": 3.8163,
+      "step": 324096
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.4685454901619076e-05,
+      "loss": 3.8283,
+      "step": 324608
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.4677085332912285e-05,
+      "loss": 3.8144,
+      "step": 325120
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.466869938540177e-05,
+      "loss": 3.8218,
+      "step": 325632
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.466031343789125e-05,
+      "loss": 3.8193,
+      "step": 326144
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.465192749038073e-05,
+      "loss": 3.833,
+      "step": 326656
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.464355792167394e-05,
+      "loss": 3.8276,
+      "step": 327168
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.463517197416342e-05,
+      "loss": 3.8273,
+      "step": 327680
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.46267860266529e-05,
+      "loss": 3.8304,
+      "step": 328192
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.461840007914238e-05,
+      "loss": 3.8298,
+      "step": 328704
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.461003051043559e-05,
+      "loss": 3.825,
+      "step": 329216
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.460164456292507e-05,
+      "loss": 3.8173,
+      "step": 329728
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.459325861541455e-05,
+      "loss": 3.8084,
+      "step": 330240
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.458487266790403e-05,
+      "loss": 3.8311,
+      "step": 330752
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.457650309919724e-05,
+      "loss": 3.8191,
+      "step": 331264
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.4568117151686725e-05,
+      "loss": 3.8138,
+      "step": 331776
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.4559731204176205e-05,
+      "loss": 3.8147,
+      "step": 332288
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.4551345256665685e-05,
+      "loss": 3.8211,
+      "step": 332800
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.4542975687958895e-05,
+      "loss": 3.8067,
+      "step": 333312
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.4534589740448374e-05,
+      "loss": 3.815,
+      "step": 333824
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.4526203792937854e-05,
+      "loss": 3.8154,
+      "step": 334336
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.4517817845427334e-05,
+      "loss": 3.815,
+      "step": 334848
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.4509448276720543e-05,
+      "loss": 3.8267,
+      "step": 335360
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.4501062329210023e-05,
+      "loss": 3.8071,
+      "step": 335872
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.44926763816995e-05,
+      "loss": 3.8088,
+      "step": 336384
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.448429043418898e-05,
+      "loss": 3.8219,
+      "step": 336896
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.447592086548219e-05,
+      "loss": 3.8072,
+      "step": 337408
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.446753491797168e-05,
+      "loss": 3.8104,
+      "step": 337920
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.445914897046116e-05,
+      "loss": 3.8209,
+      "step": 338432
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.445076302295064e-05,
+      "loss": 3.8202,
+      "step": 338944
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.444239345424385e-05,
+      "loss": 3.8049,
+      "step": 339456
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.443400750673333e-05,
+      "loss": 3.799,
+      "step": 339968
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.442562155922281e-05,
+      "loss": 3.8015,
+      "step": 340480
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.441723561171229e-05,
+      "loss": 3.815,
+      "step": 340992
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.44088660430055e-05,
+      "loss": 3.8258,
+      "step": 341504
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.440048009549498e-05,
+      "loss": 3.8103,
+      "step": 342016
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.439209414798446e-05,
+      "loss": 3.8225,
+      "step": 342528
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.438370820047394e-05,
+      "loss": 3.8201,
+      "step": 343040
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.4375338631767146e-05,
+      "loss": 3.8249,
+      "step": 343552
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.436695268425663e-05,
+      "loss": 3.8013,
+      "step": 344064
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.435856673674611e-05,
+      "loss": 3.817,
+      "step": 344576
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.435018078923559e-05,
+      "loss": 3.8096,
+      "step": 345088
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.43418112205288e-05,
+      "loss": 3.8028,
+      "step": 345600
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.433342527301828e-05,
+      "loss": 3.8178,
+      "step": 346112
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.432503932550776e-05,
+      "loss": 3.8109,
+      "step": 346624
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.431665337799724e-05,
+      "loss": 3.8089,
+      "step": 347136
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.430828380929045e-05,
+      "loss": 3.8139,
+      "step": 347648
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.429989786177993e-05,
+      "loss": 3.8079,
+      "step": 348160
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.429151191426941e-05,
+      "loss": 3.8097,
+      "step": 348672
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.428312596675889e-05,
+      "loss": 3.8063,
+      "step": 349184
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.42747563980521e-05,
+      "loss": 3.8107,
+      "step": 349696
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.4266370450541586e-05,
+      "loss": 3.804,
+      "step": 350208
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.4257984503031066e-05,
+      "loss": 3.804,
+      "step": 350720
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.4249614934324276e-05,
+      "loss": 3.8121,
+      "step": 351232
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.4241228986813756e-05,
+      "loss": 3.8107,
+      "step": 351744
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.4232843039303235e-05,
+      "loss": 3.7989,
+      "step": 352256
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.4224457091792715e-05,
+      "loss": 3.8007,
+      "step": 352768
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.4216071144282195e-05,
+      "loss": 3.8191,
+      "step": 353280
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.4207685196771675e-05,
+      "loss": 3.8115,
+      "step": 353792
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.4199299249261155e-05,
+      "loss": 3.8091,
+      "step": 354304
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.4190913301750635e-05,
+      "loss": 3.7965,
+      "step": 354816
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.4182543733043844e-05,
+      "loss": 3.8048,
+      "step": 355328
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.4174157785533324e-05,
+      "loss": 3.8008,
+      "step": 355840
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.4165771838022804e-05,
+      "loss": 3.8148,
+      "step": 356352
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.4157385890512284e-05,
+      "loss": 3.8129,
+      "step": 356864
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.41490163218055e-05,
+      "loss": 3.8011,
+      "step": 357376
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.414063037429498e-05,
+      "loss": 3.7958,
+      "step": 357888
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.413224442678446e-05,
+      "loss": 3.8085,
+      "step": 358400
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.412385847927394e-05,
+      "loss": 3.7782,
+      "step": 358912
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.411548891056715e-05,
+      "loss": 3.8026,
+      "step": 359424
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.410710296305663e-05,
+      "loss": 3.7934,
+      "step": 359936
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.409871701554611e-05,
+      "loss": 3.8012,
+      "step": 360448
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.409033106803559e-05,
+      "loss": 3.7877,
+      "step": 360960
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.40819614993288e-05,
+      "loss": 3.8055,
+      "step": 361472
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.407357555181828e-05,
+      "loss": 3.7981,
+      "step": 361984
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.406518960430776e-05,
+      "loss": 3.8002,
+      "step": 362496
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.405680365679724e-05,
+      "loss": 3.8047,
+      "step": 363008
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.4048434088090454e-05,
+      "loss": 3.7932,
+      "step": 363520
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.4040048140579934e-05,
+      "loss": 3.8115,
+      "step": 364032
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.4031662193069414e-05,
+      "loss": 3.8228,
+      "step": 364544
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.4023276245558894e-05,
+      "loss": 3.8003,
+      "step": 365056
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.40149066768521e-05,
+      "loss": 3.7939,
+      "step": 365568
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.400652072934158e-05,
+      "loss": 3.8045,
+      "step": 366080
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.399813478183106e-05,
+      "loss": 3.7977,
+      "step": 366592
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.398974883432054e-05,
+      "loss": 3.7916,
+      "step": 367104
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.398137926561375e-05,
+      "loss": 3.8108,
+      "step": 367616
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.397299331810323e-05,
+      "loss": 3.7898,
+      "step": 368128
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.396460737059271e-05,
+      "loss": 3.8039,
+      "step": 368640
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.395623780188593e-05,
+      "loss": 3.8003,
+      "step": 369152
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.394785185437541e-05,
+      "loss": 3.8014,
+      "step": 369664
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.393946590686489e-05,
+      "loss": 3.7911,
+      "step": 370176
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.393107995935437e-05,
+      "loss": 3.7952,
+      "step": 370688
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.3922710390647576e-05,
+      "loss": 3.7939,
+      "step": 371200
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.3914324443137056e-05,
+      "loss": 3.8071,
+      "step": 371712
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.3905938495626536e-05,
+      "loss": 3.796,
+      "step": 372224
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.3897552548116016e-05,
+      "loss": 3.8007,
+      "step": 372736
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.3889182979409225e-05,
+      "loss": 3.7824,
+      "step": 373248
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.3880797031898705e-05,
+      "loss": 3.8046,
+      "step": 373760
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.3872411084388185e-05,
+      "loss": 3.7967,
+      "step": 374272
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.3864025136877665e-05,
+      "loss": 3.8011,
+      "step": 374784
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.385565556817088e-05,
+      "loss": 3.7939,
+      "step": 375296
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.384726962066036e-05,
+      "loss": 3.8049,
+      "step": 375808
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.383888367314984e-05,
+      "loss": 3.8004,
+      "step": 376320
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.3830497725639314e-05,
+      "loss": 3.8074,
+      "step": 376832
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.382212815693253e-05,
+      "loss": 3.7845,
+      "step": 377344
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.381374220942201e-05,
+      "loss": 3.7958,
+      "step": 377856
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.380535626191148e-05,
+      "loss": 3.7914,
+      "step": 378368
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.379697031440096e-05,
+      "loss": 3.7859,
+      "step": 378880
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.378860074569418e-05,
+      "loss": 3.7981,
+      "step": 379392
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.378021479818366e-05,
+      "loss": 3.7987,
+      "step": 379904
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.377182885067313e-05,
+      "loss": 3.8004,
+      "step": 380416
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.376344290316262e-05,
+      "loss": 3.7805,
+      "step": 380928
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.3755073334455835e-05,
+      "loss": 3.7987,
+      "step": 381440
+    },
+    {
+      "epoch": 0.03,
+      "eval_loss": 3.887401580810547,
+      "eval_runtime": 319.04,
+      "eval_samples_per_second": 1196.06,
+      "eval_steps_per_second": 37.378,
+      "step": 381600
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.3746687386945315e-05,
+      "loss": 3.7823,
+      "step": 381952
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.373830143943479e-05,
+      "loss": 3.7811,
+      "step": 382464
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.372991549192427e-05,
+      "loss": 3.7973,
+      "step": 382976
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.372152954441375e-05,
+      "loss": 3.7922,
+      "step": 383488
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.371314359690323e-05,
+      "loss": 3.7973,
+      "step": 384000
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.370475764939271e-05,
+      "loss": 3.7837,
+      "step": 384512
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.369637170188219e-05,
+      "loss": 3.7896,
+      "step": 385024
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.368798575437167e-05,
+      "loss": 3.7781,
+      "step": 385536
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.367961618566488e-05,
+      "loss": 3.7895,
+      "step": 386048
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.367123023815436e-05,
+      "loss": 3.7862,
+      "step": 386560
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.366284429064384e-05,
+      "loss": 3.7883,
+      "step": 387072
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.365445834313332e-05,
+      "loss": 3.7974,
+      "step": 387584
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.364610515323026e-05,
+      "loss": 3.7804,
+      "step": 388096
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.363771920571974e-05,
+      "loss": 3.7907,
+      "step": 388608
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.362933325820922e-05,
+      "loss": 3.7816,
+      "step": 389120
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.36209473106987e-05,
+      "loss": 3.7819,
+      "step": 389632
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.361256136318818e-05,
+      "loss": 3.7737,
+      "step": 390144
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.360417541567766e-05,
+      "loss": 3.7835,
+      "step": 390656
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.359578946816714e-05,
+      "loss": 3.7775,
+      "step": 391168
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.358740352065662e-05,
+      "loss": 3.798,
+      "step": 391680
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.357903395194983e-05,
+      "loss": 3.7854,
+      "step": 392192
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.357064800443931e-05,
+      "loss": 3.7871,
+      "step": 392704
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.356226205692879e-05,
+      "loss": 3.7934,
+      "step": 393216
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.355387610941827e-05,
+      "loss": 3.7932,
+      "step": 393728
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.3545506540711486e-05,
+      "loss": 3.7684,
+      "step": 394240
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.3537120593200966e-05,
+      "loss": 3.7852,
+      "step": 394752
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.3528734645690446e-05,
+      "loss": 3.7766,
+      "step": 395264
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.3520365076983655e-05,
+      "loss": 3.7803,
+      "step": 395776
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.3511979129473135e-05,
+      "loss": 3.771,
+      "step": 396288
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.3503593181962615e-05,
+      "loss": 3.773,
+      "step": 396800
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.3495207234452095e-05,
+      "loss": 3.7863,
+      "step": 397312
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.3486837665745304e-05,
+      "loss": 3.7834,
+      "step": 397824
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.3478451718234784e-05,
+      "loss": 3.7783,
+      "step": 398336
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.3470065770724264e-05,
+      "loss": 3.7818,
+      "step": 398848
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.3461679823213744e-05,
+      "loss": 3.7804,
+      "step": 399360
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.3453293875703224e-05,
+      "loss": 3.7819,
+      "step": 399872
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.344490792819271e-05,
+      "loss": 3.7701,
+      "step": 400384
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.343652198068219e-05,
+      "loss": 3.7708,
+      "step": 400896
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.34281524119754e-05,
+      "loss": 3.7693,
+      "step": 401408
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.341976646446488e-05,
+      "loss": 3.7692,
+      "step": 401920
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.341138051695436e-05,
+      "loss": 3.7699,
+      "step": 402432
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.340299456944384e-05,
+      "loss": 3.7819,
+      "step": 402944
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.339462500073705e-05,
+      "loss": 3.7799,
+      "step": 403456
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.338623905322653e-05,
+      "loss": 3.7771,
+      "step": 403968
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.337785310571601e-05,
+      "loss": 3.786,
+      "step": 404480
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.336946715820549e-05,
+      "loss": 3.7757,
+      "step": 404992
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.33610975894987e-05,
+      "loss": 3.7785,
+      "step": 405504
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.335271164198818e-05,
+      "loss": 3.7679,
+      "step": 406016
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.3344325694477664e-05,
+      "loss": 3.7558,
+      "step": 406528
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.3335939746967144e-05,
+      "loss": 3.7832,
+      "step": 407040
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.3327570178260353e-05,
+      "loss": 3.7695,
+      "step": 407552
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.331918423074983e-05,
+      "loss": 3.7667,
+      "step": 408064
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.331079828323931e-05,
+      "loss": 3.7664,
+      "step": 408576
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.330241233572879e-05,
+      "loss": 3.7689,
+      "step": 409088
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.3294042767022e-05,
+      "loss": 3.7592,
+      "step": 409600
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.328565681951148e-05,
+      "loss": 3.7668,
+      "step": 410112
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.327727087200096e-05,
+      "loss": 3.7693,
+      "step": 410624
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.326888492449044e-05,
+      "loss": 3.7661,
+      "step": 411136
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.326051535578365e-05,
+      "loss": 3.7774,
+      "step": 411648
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.325212940827313e-05,
+      "loss": 3.7597,
+      "step": 412160
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.324374346076261e-05,
+      "loss": 3.7607,
+      "step": 412672
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.32353575132521e-05,
+      "loss": 3.7751,
+      "step": 413184
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.322698794454531e-05,
+      "loss": 3.7586,
+      "step": 413696
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.321860199703479e-05,
+      "loss": 3.761,
+      "step": 414208
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.321021604952427e-05,
+      "loss": 3.7688,
+      "step": 414720
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.320183010201374e-05,
+      "loss": 3.7737,
+      "step": 415232
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.3193460533306956e-05,
+      "loss": 3.7573,
+      "step": 415744
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.3185074585796436e-05,
+      "loss": 3.7564,
+      "step": 416256
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.3176688638285916e-05,
+      "loss": 3.7504,
+      "step": 416768
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.316830269077539e-05,
+      "loss": 3.7691,
+      "step": 417280
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.3159933122068605e-05,
+      "loss": 3.7771,
+      "step": 417792
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.3151547174558085e-05,
+      "loss": 3.7661,
+      "step": 418304
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.3143161227047565e-05,
+      "loss": 3.7691,
+      "step": 418816
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.3134775279537045e-05,
+      "loss": 3.7712,
+      "step": 419328
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.312640571083026e-05,
+      "loss": 3.7812,
+      "step": 419840
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.311801976331974e-05,
+      "loss": 3.7557,
+      "step": 420352
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.3109633815809214e-05,
+      "loss": 3.772,
+      "step": 420864
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.3101247868298694e-05,
+      "loss": 3.7635,
+      "step": 421376
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.309287829959191e-05,
+      "loss": 3.7533,
+      "step": 421888
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.308449235208139e-05,
+      "loss": 3.771,
+      "step": 422400
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.307610640457086e-05,
+      "loss": 3.7652,
+      "step": 422912
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.306772045706034e-05,
+      "loss": 3.7638,
+      "step": 423424
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.305935088835356e-05,
+      "loss": 3.7685,
+      "step": 423936
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.305096494084304e-05,
+      "loss": 3.7627,
+      "step": 424448
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.304257899333252e-05,
+      "loss": 3.7571,
+      "step": 424960
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.3034193045822e-05,
+      "loss": 3.7621,
+      "step": 425472
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.3025823477115214e-05,
+      "loss": 3.766,
+      "step": 425984
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.301743752960469e-05,
+      "loss": 3.7532,
+      "step": 426496
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.3009067960897903e-05,
+      "loss": 3.7593,
+      "step": 427008
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.3000682013387383e-05,
+      "loss": 3.768,
+      "step": 427520
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.299229606587686e-05,
+      "loss": 3.7635,
+      "step": 428032
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.2983910118366337e-05,
+      "loss": 3.7538,
+      "step": 428544
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.2975524170855817e-05,
+      "loss": 3.7575,
+      "step": 429056
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.2967138223345296e-05,
+      "loss": 3.7731,
+      "step": 429568
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.2958752275834776e-05,
+      "loss": 3.7661,
+      "step": 430080
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.2950366328324256e-05,
+      "loss": 3.7654,
+      "step": 430592
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.294199675961747e-05,
+      "loss": 3.7465,
+      "step": 431104
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.293361081210695e-05,
+      "loss": 3.7601,
+      "step": 431616
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.292522486459643e-05,
+      "loss": 3.7571,
+      "step": 432128
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.291683891708591e-05,
+      "loss": 3.7657,
+      "step": 432640
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.290846934837912e-05,
+      "loss": 3.7676,
+      "step": 433152
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.29000834008686e-05,
+      "loss": 3.7564,
+      "step": 433664
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.289169745335808e-05,
+      "loss": 3.7485,
+      "step": 434176
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.288332788465129e-05,
+      "loss": 3.7623,
+      "step": 434688
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.287494193714077e-05,
+      "loss": 3.7343,
+      "step": 435200
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.286655598963025e-05,
+      "loss": 3.7562,
+      "step": 435712
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.285817004211973e-05,
+      "loss": 3.7453,
+      "step": 436224
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.2849800473412946e-05,
+      "loss": 3.7603,
+      "step": 436736
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.2841414525902426e-05,
+      "loss": 3.7433,
+      "step": 437248
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.2833028578391906e-05,
+      "loss": 3.7595,
+      "step": 437760
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.2824642630881386e-05,
+      "loss": 3.7506,
+      "step": 438272
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.2816273062174595e-05,
+      "loss": 3.7565,
+      "step": 438784
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.2807887114664075e-05,
+      "loss": 3.7606,
+      "step": 439296
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.2799501167153555e-05,
+      "loss": 3.7544,
+      "step": 439808
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.2791115219643035e-05,
+      "loss": 3.7652,
+      "step": 440320
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.2782745650936244e-05,
+      "loss": 3.7748,
+      "step": 440832
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.2774359703425724e-05,
+      "loss": 3.7568,
+      "step": 441344
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.2765973755915204e-05,
+      "loss": 3.7516,
+      "step": 441856
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.2757587808404684e-05,
+      "loss": 3.7529,
+      "step": 442368
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.27492182396979e-05,
+      "loss": 3.7562,
+      "step": 442880
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.274083229218738e-05,
+      "loss": 3.7439,
+      "step": 443392
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.273244634467686e-05,
+      "loss": 3.7705,
+      "step": 443904
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.272406039716634e-05,
+      "loss": 3.7414,
+      "step": 444416
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.271569082845955e-05,
+      "loss": 3.7639,
+      "step": 444928
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.270730488094903e-05,
+      "loss": 3.7551,
+      "step": 445440
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.269891893343851e-05,
+      "loss": 3.7561,
+      "step": 445952
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.269053298592799e-05,
+      "loss": 3.7468,
+      "step": 446464
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.26821634172212e-05,
+      "loss": 3.7541,
+      "step": 446976
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.267377746971068e-05,
+      "loss": 3.7472,
+      "step": 447488
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.266539152220016e-05,
+      "loss": 3.7645,
+      "step": 448000
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.265700557468964e-05,
+      "loss": 3.7519,
+      "step": 448512
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.264863600598285e-05,
+      "loss": 3.7601,
+      "step": 449024
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.264026643727606e-05,
+      "loss": 3.7405,
+      "step": 449536
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.263188048976554e-05,
+      "loss": 3.7611,
+      "step": 450048
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.262349454225502e-05,
+      "loss": 3.751,
+      "step": 450560
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.26151085947445e-05,
+      "loss": 3.7563,
+      "step": 451072
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.260673902603771e-05,
+      "loss": 3.7519,
+      "step": 451584
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.259835307852719e-05,
+      "loss": 3.7593,
+      "step": 452096
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.258996713101667e-05,
+      "loss": 3.7542,
+      "step": 452608
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.258158118350615e-05,
+      "loss": 3.7689,
+      "step": 453120
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.257319523599563e-05,
+      "loss": 3.7401,
+      "step": 453632
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.256480928848511e-05,
+      "loss": 3.7535,
+      "step": 454144
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.255643971977833e-05,
+      "loss": 3.7474,
+      "step": 454656
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.254805377226781e-05,
+      "loss": 3.747,
+      "step": 455168
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.253966782475729e-05,
+      "loss": 3.7489,
+      "step": 455680
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.253128187724677e-05,
+      "loss": 3.7572,
+      "step": 456192
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.252289592973625e-05,
+      "loss": 3.7582,
+      "step": 456704
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.251450998222573e-05,
+      "loss": 3.7377,
+      "step": 457216
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.250612403471521e-05,
+      "loss": 3.7585,
+      "step": 457728
+    },
+    {
+      "epoch": 0.03,
+      "eval_loss": 3.8706963062286377,
+      "eval_runtime": 304.4588,
+      "eval_samples_per_second": 1253.342,
+      "eval_steps_per_second": 39.168,
+      "step": 457920
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.249773808720469e-05,
+      "loss": 3.7489,
+      "step": 458240
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.2489368518497896e-05,
+      "loss": 3.7356,
+      "step": 458752
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.2480982570987376e-05,
+      "loss": 3.7563,
+      "step": 459264
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.2472596623476856e-05,
+      "loss": 3.749,
+      "step": 459776
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.2464210675966336e-05,
+      "loss": 3.7559,
+      "step": 460288
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.2455841107259545e-05,
+      "loss": 3.7414,
+      "step": 460800
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.2447455159749025e-05,
+      "loss": 3.7422,
+      "step": 461312
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.243906921223851e-05,
+      "loss": 3.7395,
+      "step": 461824
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.243068326472799e-05,
+      "loss": 3.7463,
+      "step": 462336
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.24223136960212e-05,
+      "loss": 3.7443,
+      "step": 462848
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.241392774851068e-05,
+      "loss": 3.7479,
+      "step": 463360
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.240555817980389e-05,
+      "loss": 3.7553,
+      "step": 463872
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.23971886110971e-05,
+      "loss": 3.7412,
+      "step": 464384
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.238880266358658e-05,
+      "loss": 3.743,
+      "step": 464896
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.238041671607606e-05,
+      "loss": 3.7433,
+      "step": 465408
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.237203076856554e-05,
+      "loss": 3.7384,
+      "step": 465920
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.236364482105502e-05,
+      "loss": 3.7309,
+      "step": 466432
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.23552588735445e-05,
+      "loss": 3.74,
+      "step": 466944
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.234687292603398e-05,
+      "loss": 3.7369,
+      "step": 467456
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.233848697852346e-05,
+      "loss": 3.7552,
+      "step": 467968
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.2330101031012945e-05,
+      "loss": 3.7446,
+      "step": 468480
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.232171508350242e-05,
+      "loss": 3.7482,
+      "step": 468992
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.2313345514795634e-05,
+      "loss": 3.7495,
+      "step": 469504
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.2304959567285114e-05,
+      "loss": 3.7496,
+      "step": 470016
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.229657361977459e-05,
+      "loss": 3.7321,
+      "step": 470528
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.228818767226407e-05,
+      "loss": 3.7451,
+      "step": 471040
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.227980172475355e-05,
+      "loss": 3.7311,
+      "step": 471552
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.227143215604676e-05,
+      "loss": 3.7394,
+      "step": 472064
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.2263046208536236e-05,
+      "loss": 3.7345,
+      "step": 472576
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.2254660261025716e-05,
+      "loss": 3.7303,
+      "step": 473088
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.2246274313515196e-05,
+      "loss": 3.746,
+      "step": 473600
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.223790474480841e-05,
+      "loss": 3.7423,
+      "step": 474112
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.222951879729789e-05,
+      "loss": 3.7365,
+      "step": 474624
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.222113284978737e-05,
+      "loss": 3.7423,
+      "step": 475136
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.221274690227685e-05,
+      "loss": 3.7375,
+      "step": 475648
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.220436095476633e-05,
+      "loss": 3.7434,
+      "step": 476160
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.219597500725581e-05,
+      "loss": 3.7316,
+      "step": 476672
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.218758905974529e-05,
+      "loss": 3.7311,
+      "step": 477184
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.217920311223477e-05,
+      "loss": 3.7258,
+      "step": 477696
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.217084992233171e-05,
+      "loss": 3.732,
+      "step": 478208
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.216246397482119e-05,
+      "loss": 3.7317,
+      "step": 478720
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.215407802731067e-05,
+      "loss": 3.7421,
+      "step": 479232
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.214569207980015e-05,
+      "loss": 3.7376,
+      "step": 479744
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.2137306132289637e-05,
+      "loss": 3.7372,
+      "step": 480256
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.2128920184779116e-05,
+      "loss": 3.7466,
+      "step": 480768
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.2120534237268596e-05,
+      "loss": 3.7367,
+      "step": 481280
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.2112164668561806e-05,
+      "loss": 3.7386,
+      "step": 481792
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.2103795099855015e-05,
+      "loss": 3.7256,
+      "step": 482304
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.2095409152344495e-05,
+      "loss": 3.7149,
+      "step": 482816
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.2087023204833975e-05,
+      "loss": 3.7462,
+      "step": 483328
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.2078637257323454e-05,
+      "loss": 3.7314,
+      "step": 483840
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.2070251309812934e-05,
+      "loss": 3.7238,
+      "step": 484352
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.2061865362302414e-05,
+      "loss": 3.7317,
+      "step": 484864
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.2053479414791894e-05,
+      "loss": 3.7246,
+      "step": 485376
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.2045093467281374e-05,
+      "loss": 3.7247,
+      "step": 485888
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.203672389857459e-05,
+      "loss": 3.7229,
+      "step": 486400
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.20283543298678e-05,
+      "loss": 3.7285,
+      "step": 486912
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.201996838235728e-05,
+      "loss": 3.7282,
+      "step": 487424
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.201158243484676e-05,
+      "loss": 3.7417,
+      "step": 487936
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.200319648733624e-05,
+      "loss": 3.7183,
+      "step": 488448
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.199481053982572e-05,
+      "loss": 3.7182,
+      "step": 488960
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.198644097111893e-05,
+      "loss": 3.7433,
+      "step": 489472
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.197805502360841e-05,
+      "loss": 3.7138,
+      "step": 489984
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.196966907609789e-05,
+      "loss": 3.7213,
+      "step": 490496
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.196128312858737e-05,
+      "loss": 3.7313,
+      "step": 491008
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.195289718107685e-05,
+      "loss": 3.7372,
+      "step": 491520
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.194451123356633e-05,
+      "loss": 3.7174,
+      "step": 492032
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.193612528605581e-05,
+      "loss": 3.718,
+      "step": 492544
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.192773933854529e-05,
+      "loss": 3.7106,
+      "step": 493056
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.1919369769838504e-05,
+      "loss": 3.725,
+      "step": 493568
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.1910983822327984e-05,
+      "loss": 3.7402,
+      "step": 494080
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.1902597874817464e-05,
+      "loss": 3.7271,
+      "step": 494592
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.1894211927306944e-05,
+      "loss": 3.7287,
+      "step": 495104
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.188584235860015e-05,
+      "loss": 3.7344,
+      "step": 495616
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.187747278989336e-05,
+      "loss": 3.7407,
+      "step": 496128
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.186908684238284e-05,
+      "loss": 3.7191,
+      "step": 496640
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.186070089487232e-05,
+      "loss": 3.7321,
+      "step": 497152
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.18523149473618e-05,
+      "loss": 3.7242,
+      "step": 497664
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.184392899985128e-05,
+      "loss": 3.7144,
+      "step": 498176
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.183554305234076e-05,
+      "loss": 3.7312,
+      "step": 498688
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.182715710483024e-05,
+      "loss": 3.7291,
+      "step": 499200
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.181877115731972e-05,
+      "loss": 3.7283,
+      "step": 499712
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.181040158861294e-05,
+      "loss": 3.7273,
+      "step": 500224
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.180201564110242e-05,
+      "loss": 3.728,
+      "step": 500736
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.17936296935919e-05,
+      "loss": 3.7175,
+      "step": 501248
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.178524374608137e-05,
+      "loss": 3.7265,
+      "step": 501760
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.1776874177374586e-05,
+      "loss": 3.7271,
+      "step": 502272
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.1768488229864066e-05,
+      "loss": 3.7192,
+      "step": 502784
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.1760102282353546e-05,
+      "loss": 3.7224,
+      "step": 503296
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.1751732713646755e-05,
+      "loss": 3.7246,
+      "step": 503808
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.1743346766136235e-05,
+      "loss": 3.7299,
+      "step": 504320
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.1734960818625715e-05,
+      "loss": 3.7183,
+      "step": 504832
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.1726574871115195e-05,
+      "loss": 3.7165,
+      "step": 505344
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.1718188923604675e-05,
+      "loss": 3.7353,
+      "step": 505856
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.1709802976094155e-05,
+      "loss": 3.7276,
+      "step": 506368
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.170143340738737e-05,
+      "loss": 3.7272,
+      "step": 506880
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.1693047459876844e-05,
+      "loss": 3.7095,
+      "step": 507392
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.1684661512366324e-05,
+      "loss": 3.7258,
+      "step": 507904
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.1676275564855804e-05,
+      "loss": 3.7173,
+      "step": 508416
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.166790599614902e-05,
+      "loss": 3.726,
+      "step": 508928
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.165952004863849e-05,
+      "loss": 3.7339,
+      "step": 509440
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.165113410112797e-05,
+      "loss": 3.7174,
+      "step": 509952
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.164274815361745e-05,
+      "loss": 3.7154,
+      "step": 510464
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.163436220610693e-05,
+      "loss": 3.7217,
+      "step": 510976
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.162597625859642e-05,
+      "loss": 3.6972,
+      "step": 511488
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.16175903110859e-05,
+      "loss": 3.7241,
+      "step": 512000
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.160920436357538e-05,
+      "loss": 3.7066,
+      "step": 512512
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.160083479486859e-05,
+      "loss": 3.7243,
+      "step": 513024
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.159244884735807e-05,
+      "loss": 3.7075,
+      "step": 513536
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.158406289984755e-05,
+      "loss": 3.722,
+      "step": 514048
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.157567695233703e-05,
+      "loss": 3.7152,
+      "step": 514560
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.156729100482651e-05,
+      "loss": 3.7172,
+      "step": 515072
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.155890505731599e-05,
+      "loss": 3.729,
+      "step": 515584
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.155051910980547e-05,
+      "loss": 3.7138,
+      "step": 516096
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.154213316229495e-05,
+      "loss": 3.7277,
+      "step": 516608
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.153377997239189e-05,
+      "loss": 3.7357,
+      "step": 517120
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.152539402488137e-05,
+      "loss": 3.7186,
+      "step": 517632
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.151700807737085e-05,
+      "loss": 3.7209,
+      "step": 518144
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.150862212986033e-05,
+      "loss": 3.7118,
+      "step": 518656
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.150023618234981e-05,
+      "loss": 3.7202,
+      "step": 519168
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.149185023483929e-05,
+      "loss": 3.7122,
+      "step": 519680
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.14834806661325e-05,
+      "loss": 3.7308,
+      "step": 520192
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.147509471862198e-05,
+      "loss": 3.7017,
+      "step": 520704
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.146670877111146e-05,
+      "loss": 3.7262,
+      "step": 521216
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.145832282360094e-05,
+      "loss": 3.7224,
+      "step": 521728
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.144995325489415e-05,
+      "loss": 3.7185,
+      "step": 522240
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.144156730738363e-05,
+      "loss": 3.709,
+      "step": 522752
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.143318135987311e-05,
+      "loss": 3.7213,
+      "step": 523264
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.142479541236259e-05,
+      "loss": 3.7078,
+      "step": 523776
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.141640946485207e-05,
+      "loss": 3.7301,
+      "step": 524288
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.140802351734155e-05,
+      "loss": 3.7134,
+      "step": 524800
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.139965394863477e-05,
+      "loss": 3.7243,
+      "step": 525312
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.139126800112425e-05,
+      "loss": 3.7009,
+      "step": 525824
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.138288205361373e-05,
+      "loss": 3.7291,
+      "step": 526336
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.137449610610321e-05,
+      "loss": 3.7166,
+      "step": 526848
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.1366126537396416e-05,
+      "loss": 3.722,
+      "step": 527360
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.1357740589885896e-05,
+      "loss": 3.7149,
+      "step": 527872
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.1349354642375376e-05,
+      "loss": 3.7159,
+      "step": 528384
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.1340968694864856e-05,
+      "loss": 3.7228,
+      "step": 528896
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.133258274735433e-05,
+      "loss": 3.735,
+      "step": 529408
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.132419679984381e-05,
+      "loss": 3.7012,
+      "step": 529920
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.1315827231137025e-05,
+      "loss": 3.7176,
+      "step": 530432
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.1307441283626505e-05,
+      "loss": 3.7117,
+      "step": 530944
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.1299055336115985e-05,
+      "loss": 3.7148,
+      "step": 531456
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.1290669388605465e-05,
+      "loss": 3.7095,
+      "step": 531968
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.1282283441094945e-05,
+      "loss": 3.7203,
+      "step": 532480
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.1273897493584425e-05,
+      "loss": 3.7206,
+      "step": 532992
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.1265527924877634e-05,
+      "loss": 3.7048,
+      "step": 533504
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.1257141977367114e-05,
+      "loss": 3.7256,
+      "step": 534016
+    },
+    {
+      "epoch": 1.03,
+      "eval_loss": 3.860018014907837,
+      "eval_runtime": 304.9784,
+      "eval_samples_per_second": 1251.207,
+      "eval_steps_per_second": 39.101,
+      "step": 534240
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.1248756029856594e-05,
+      "loss": 3.7097,
+      "step": 534528
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.1240370082346074e-05,
+      "loss": 3.7025,
+      "step": 535040
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.1231984134835553e-05,
+      "loss": 3.7194,
+      "step": 535552
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.1223598187325033e-05,
+      "loss": 3.7099,
+      "step": 536064
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.121522861861824e-05,
+      "loss": 3.7192,
+      "step": 536576
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.120684267110773e-05,
+      "loss": 3.7074,
+      "step": 537088
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.119845672359721e-05,
+      "loss": 3.7111,
+      "step": 537600
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.119007077608669e-05,
+      "loss": 3.7022,
+      "step": 538112
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.118168482857617e-05,
+      "loss": 3.7134,
+      "step": 538624
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.117329888106565e-05,
+      "loss": 3.7048,
+      "step": 539136
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.116491293355513e-05,
+      "loss": 3.7141,
+      "step": 539648
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.115654336484834e-05,
+      "loss": 3.7206,
+      "step": 540160
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.114817379614155e-05,
+      "loss": 3.7108,
+      "step": 540672
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.113978784863103e-05,
+      "loss": 3.7069,
+      "step": 541184
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.113140190112051e-05,
+      "loss": 3.7061,
+      "step": 541696
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.112301595360999e-05,
+      "loss": 3.7011,
+      "step": 542208
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.111463000609947e-05,
+      "loss": 3.6991,
+      "step": 542720
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.110624405858895e-05,
+      "loss": 3.7039,
+      "step": 543232
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.109785811107843e-05,
+      "loss": 3.7058,
+      "step": 543744
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.108947216356791e-05,
+      "loss": 3.7136,
+      "step": 544256
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.1081086216057394e-05,
+      "loss": 3.7145,
+      "step": 544768
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.107270026854687e-05,
+      "loss": 3.7117,
+      "step": 545280
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.106433069984008e-05,
+      "loss": 3.7152,
+      "step": 545792
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.105594475232956e-05,
+      "loss": 3.7144,
+      "step": 546304
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.104755880481904e-05,
+      "loss": 3.6965,
+      "step": 546816
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.1039172857308516e-05,
+      "loss": 3.7136,
+      "step": 547328
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.1030786909797996e-05,
+      "loss": 3.7013,
+      "step": 547840
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.102241734109121e-05,
+      "loss": 3.7041,
+      "step": 548352
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.1014031393580685e-05,
+      "loss": 3.6981,
+      "step": 548864
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.1005645446070165e-05,
+      "loss": 3.7017,
+      "step": 549376
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.0997259498559645e-05,
+      "loss": 3.707,
+      "step": 549888
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.098888992985286e-05,
+      "loss": 3.7122,
+      "step": 550400
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.098050398234234e-05,
+      "loss": 3.7057,
+      "step": 550912
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.097211803483182e-05,
+      "loss": 3.7084,
+      "step": 551424
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.09637320873213e-05,
+      "loss": 3.6998,
+      "step": 551936
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.095534613981078e-05,
+      "loss": 3.7098,
+      "step": 552448
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.094696019230026e-05,
+      "loss": 3.7055,
+      "step": 552960
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.093857424478974e-05,
+      "loss": 3.691,
+      "step": 553472
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.093020467608295e-05,
+      "loss": 3.6925,
+      "step": 553984
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.092181872857243e-05,
+      "loss": 3.6963,
+      "step": 554496
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.091343278106191e-05,
+      "loss": 3.7022,
+      "step": 555008
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.090504683355139e-05,
+      "loss": 3.7044,
+      "step": 555520
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.08966772648446e-05,
+      "loss": 3.7061,
+      "step": 556032
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.0888291317334085e-05,
+      "loss": 3.7015,
+      "step": 556544
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.0879905369823565e-05,
+      "loss": 3.7147,
+      "step": 557056
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.0871519422313045e-05,
+      "loss": 3.7024,
+      "step": 557568
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.0863133474802525e-05,
+      "loss": 3.7073,
+      "step": 558080
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.0854763906095734e-05,
+      "loss": 3.693,
+      "step": 558592
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.0846377958585214e-05,
+      "loss": 3.6776,
+      "step": 559104
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.0837992011074694e-05,
+      "loss": 3.7132,
+      "step": 559616
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.0829606063564174e-05,
+      "loss": 3.6976,
+      "step": 560128
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.0821220116053654e-05,
+      "loss": 3.6966,
+      "step": 560640
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.081285054734686e-05,
+      "loss": 3.6955,
+      "step": 561152
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.080446459983634e-05,
+      "loss": 3.6928,
+      "step": 561664
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.079607865232582e-05,
+      "loss": 3.6909,
+      "step": 562176
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.07876927048153e-05,
+      "loss": 3.6892,
+      "step": 562688
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.077932313610852e-05,
+      "loss": 3.6964,
+      "step": 563200
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.0770937188598e-05,
+      "loss": 3.6912,
+      "step": 563712
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.076255124108748e-05,
+      "loss": 3.7101,
+      "step": 564224
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.075416529357696e-05,
+      "loss": 3.6884,
+      "step": 564736
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.074577934606644e-05,
+      "loss": 3.6865,
+      "step": 565248
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.073740977735965e-05,
+      "loss": 3.7086,
+      "step": 565760
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.072902382984913e-05,
+      "loss": 3.6801,
+      "step": 566272
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.072063788233861e-05,
+      "loss": 3.688,
+      "step": 566784
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.071225193482809e-05,
+      "loss": 3.7002,
+      "step": 567296
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.070386598731757e-05,
+      "loss": 3.7031,
+      "step": 567808
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.069548003980705e-05,
+      "loss": 3.6861,
+      "step": 568320
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.068709409229652e-05,
+      "loss": 3.6864,
+      "step": 568832
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.0678724523589736e-05,
+      "loss": 3.6806,
+      "step": 569344
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.067033857607922e-05,
+      "loss": 3.6927,
+      "step": 569856
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.06619526285687e-05,
+      "loss": 3.7065,
+      "step": 570368
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.0653566681058176e-05,
+      "loss": 3.694,
+      "step": 570880
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.0645180733547656e-05,
+      "loss": 3.6959,
+      "step": 571392
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.063681116484087e-05,
+      "loss": 3.7025,
+      "step": 571904
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.0628425217330345e-05,
+      "loss": 3.708,
+      "step": 572416
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.0620039269819825e-05,
+      "loss": 3.6848,
+      "step": 572928
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.0611653322309305e-05,
+      "loss": 3.7048,
+      "step": 573440
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.060328375360252e-05,
+      "loss": 3.6875,
+      "step": 573952
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.0594897806091994e-05,
+      "loss": 3.6884,
+      "step": 574464
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.0586511858581474e-05,
+      "loss": 3.6941,
+      "step": 574976
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.057812591107096e-05,
+      "loss": 3.7011,
+      "step": 575488
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.056973996356044e-05,
+      "loss": 3.6926,
+      "step": 576000
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.056137039485365e-05,
+      "loss": 3.6993,
+      "step": 576512
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.055298444734313e-05,
+      "loss": 3.6929,
+      "step": 577024
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.054459849983261e-05,
+      "loss": 3.683,
+      "step": 577536
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.053621255232209e-05,
+      "loss": 3.6967,
+      "step": 578048
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.052782660481157e-05,
+      "loss": 3.6941,
+      "step": 578560
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.051945703610478e-05,
+      "loss": 3.6856,
+      "step": 579072
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.051107108859426e-05,
+      "loss": 3.6912,
+      "step": 579584
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.050268514108374e-05,
+      "loss": 3.6942,
+      "step": 580096
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.049429919357322e-05,
+      "loss": 3.6986,
+      "step": 580608
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.048592962486643e-05,
+      "loss": 3.6865,
+      "step": 581120
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.0477543677355915e-05,
+      "loss": 3.6843,
+      "step": 581632
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.0469157729845395e-05,
+      "loss": 3.7009,
+      "step": 582144
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.0460771782334875e-05,
+      "loss": 3.698,
+      "step": 582656
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.0452402213628084e-05,
+      "loss": 3.6945,
+      "step": 583168
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.0444016266117564e-05,
+      "loss": 3.6825,
+      "step": 583680
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.0435630318607044e-05,
+      "loss": 3.6898,
+      "step": 584192
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.0427244371096524e-05,
+      "loss": 3.6838,
+      "step": 584704
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.041887480238973e-05,
+      "loss": 3.6953,
+      "step": 585216
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.041048885487921e-05,
+      "loss": 3.6989,
+      "step": 585728
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.040210290736869e-05,
+      "loss": 3.6905,
+      "step": 586240
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.039371695985817e-05,
+      "loss": 3.6828,
+      "step": 586752
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.038533101234765e-05,
+      "loss": 3.6847,
+      "step": 587264
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.037696144364087e-05,
+      "loss": 3.6714,
+      "step": 587776
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.036857549613035e-05,
+      "loss": 3.6869,
+      "step": 588288
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.036018954861983e-05,
+      "loss": 3.6758,
+      "step": 588800
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.035180360110931e-05,
+      "loss": 3.6942,
+      "step": 589312
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.034343403240252e-05,
+      "loss": 3.6764,
+      "step": 589824
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.0335048084892e-05,
+      "loss": 3.6885,
+      "step": 590336
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.032666213738148e-05,
+      "loss": 3.6833,
+      "step": 590848
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.031827618987096e-05,
+      "loss": 3.6883,
+      "step": 591360
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.030989024236044e-05,
+      "loss": 3.6921,
+      "step": 591872
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.0301520673653646e-05,
+      "loss": 3.6836,
+      "step": 592384
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.0293134726143126e-05,
+      "loss": 3.6964,
+      "step": 592896
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.0284748778632606e-05,
+      "loss": 3.7015,
+      "step": 593408
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.0276362831122086e-05,
+      "loss": 3.6871,
+      "step": 593920
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.02679932624153e-05,
+      "loss": 3.6908,
+      "step": 594432
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.025960731490478e-05,
+      "loss": 3.6808,
+      "step": 594944
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.025122136739426e-05,
+      "loss": 3.6882,
+      "step": 595456
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.024283541988374e-05,
+      "loss": 3.6852,
+      "step": 595968
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.023444947237322e-05,
+      "loss": 3.693,
+      "step": 596480
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.022607990366643e-05,
+      "loss": 3.6751,
+      "step": 596992
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.021769395615591e-05,
+      "loss": 3.6949,
+      "step": 597504
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.020930800864539e-05,
+      "loss": 3.6911,
+      "step": 598016
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.020092206113487e-05,
+      "loss": 3.6872,
+      "step": 598528
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.019255249242808e-05,
+      "loss": 3.6759,
+      "step": 599040
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.018416654491756e-05,
+      "loss": 3.6916,
+      "step": 599552
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.017578059740704e-05,
+      "loss": 3.678,
+      "step": 600064
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.016739464989652e-05,
+      "loss": 3.6966,
+      "step": 600576
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.0159025081189736e-05,
+      "loss": 3.6867,
+      "step": 601088
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.0150639133679216e-05,
+      "loss": 3.691,
+      "step": 601600
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.0142253186168696e-05,
+      "loss": 3.6705,
+      "step": 602112
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.0133867238658175e-05,
+      "loss": 3.7001,
+      "step": 602624
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.0125497669951385e-05,
+      "loss": 3.6828,
+      "step": 603136
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.0117111722440865e-05,
+      "loss": 3.6893,
+      "step": 603648
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.0108725774930344e-05,
+      "loss": 3.685,
+      "step": 604160
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.0100339827419824e-05,
+      "loss": 3.6852,
+      "step": 604672
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.0091953879909304e-05,
+      "loss": 3.6926,
+      "step": 605184
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.008356793239878e-05,
+      "loss": 3.7019,
+      "step": 605696
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.007518198488826e-05,
+      "loss": 3.6722,
+      "step": 606208
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.006681241618147e-05,
+      "loss": 3.6883,
+      "step": 606720
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.005842646867095e-05,
+      "loss": 3.6822,
+      "step": 607232
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.005004052116043e-05,
+      "loss": 3.6838,
+      "step": 607744
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.004165457364991e-05,
+      "loss": 3.6782,
+      "step": 608256
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.003328500494313e-05,
+      "loss": 3.6903,
+      "step": 608768
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.00248990574326e-05,
+      "loss": 3.6917,
+      "step": 609280
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.001651310992208e-05,
+      "loss": 3.6724,
+      "step": 609792
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.000812716241156e-05,
+      "loss": 3.6925,
+      "step": 610304
+    },
+    {
+      "epoch": 0.03,
+      "eval_loss": 3.8526084423065186,
+      "eval_runtime": 311.9659,
+      "eval_samples_per_second": 1223.182,
+      "eval_steps_per_second": 38.225,
+      "step": 610560
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.999974121490104e-05,
+      "loss": 3.6811,
+      "step": 610816
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.999137164619425e-05,
+      "loss": 3.6753,
+      "step": 611328
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.998298569868373e-05,
+      "loss": 3.6848,
+      "step": 611840
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.997459975117321e-05,
+      "loss": 3.6796,
+      "step": 612352
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.996621380366269e-05,
+      "loss": 3.6905,
+      "step": 612864
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.995782785615218e-05,
+      "loss": 3.6734,
+      "step": 613376
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.994945828744539e-05,
+      "loss": 3.6875,
+      "step": 613888
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.994107233993487e-05,
+      "loss": 3.6722,
+      "step": 614400
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.993268639242435e-05,
+      "loss": 3.6814,
+      "step": 614912
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.992430044491383e-05,
+      "loss": 3.677,
+      "step": 615424
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.9915930876207036e-05,
+      "loss": 3.6787,
+      "step": 615936
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.9907544928696516e-05,
+      "loss": 3.6906,
+      "step": 616448
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.9899175359989725e-05,
+      "loss": 3.6791,
+      "step": 616960
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.9890789412479205e-05,
+      "loss": 3.6785,
+      "step": 617472
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.9882403464968685e-05,
+      "loss": 3.681,
+      "step": 617984
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.9874017517458165e-05,
+      "loss": 3.6679,
+      "step": 618496
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.9865631569947645e-05,
+      "loss": 3.6685,
+      "step": 619008
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.985724562243713e-05,
+      "loss": 3.6766,
+      "step": 619520
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.984885967492661e-05,
+      "loss": 3.6745,
+      "step": 620032
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.984047372741609e-05,
+      "loss": 3.6778,
+      "step": 620544
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.983208777990557e-05,
+      "loss": 3.6857,
+      "step": 621056
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.982370183239505e-05,
+      "loss": 3.6838,
+      "step": 621568
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.981533226368826e-05,
+      "loss": 3.6859,
+      "step": 622080
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.980694631617774e-05,
+      "loss": 3.6826,
+      "step": 622592
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.979856036866722e-05,
+      "loss": 3.669,
+      "step": 623104
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.97901744211567e-05,
+      "loss": 3.6823,
+      "step": 623616
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.978180485244991e-05,
+      "loss": 3.6687,
+      "step": 624128
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.977341890493939e-05,
+      "loss": 3.6769,
+      "step": 624640
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.976503295742887e-05,
+      "loss": 3.6721,
+      "step": 625152
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.975664700991835e-05,
+      "loss": 3.6696,
+      "step": 625664
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.974826106240783e-05,
+      "loss": 3.6759,
+      "step": 626176
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.9739891493701045e-05,
+      "loss": 3.6819,
+      "step": 626688
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.9731505546190525e-05,
+      "loss": 3.675,
+      "step": 627200
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.9723119598680005e-05,
+      "loss": 3.6783,
+      "step": 627712
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.9714733651169485e-05,
+      "loss": 3.6703,
+      "step": 628224
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.9706364082462694e-05,
+      "loss": 3.6829,
+      "step": 628736
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.9697978134952174e-05,
+      "loss": 3.6761,
+      "step": 629248
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.9689592187441654e-05,
+      "loss": 3.6637,
+      "step": 629760
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.9681206239931134e-05,
+      "loss": 3.6647,
+      "step": 630272
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.967283667122434e-05,
+      "loss": 3.6638,
+      "step": 630784
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.966445072371382e-05,
+      "loss": 3.6711,
+      "step": 631296
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.96560647762033e-05,
+      "loss": 3.6763,
+      "step": 631808
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.964767882869278e-05,
+      "loss": 3.6757,
+      "step": 632320
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.963929288118226e-05,
+      "loss": 3.6745,
+      "step": 632832
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.963092331247548e-05,
+      "loss": 3.6818,
+      "step": 633344
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.962253736496496e-05,
+      "loss": 3.6738,
+      "step": 633856
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.961415141745444e-05,
+      "loss": 3.6783,
+      "step": 634368
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.960576546994391e-05,
+      "loss": 3.6681,
+      "step": 634880
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.959739590123713e-05,
+      "loss": 3.6428,
+      "step": 635392
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.958900995372661e-05,
+      "loss": 3.6904,
+      "step": 635904
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.958062400621609e-05,
+      "loss": 3.6654,
+      "step": 636416
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.957223805870556e-05,
+      "loss": 3.6696,
+      "step": 636928
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.956386848999878e-05,
+      "loss": 3.6678,
+      "step": 637440
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.955548254248826e-05,
+      "loss": 3.6635,
+      "step": 637952
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.9547096594977737e-05,
+      "loss": 3.6583,
+      "step": 638464
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.9538710647467217e-05,
+      "loss": 3.6603,
+      "step": 638976
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.9530324699956696e-05,
+      "loss": 3.6687,
+      "step": 639488
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.952195513124991e-05,
+      "loss": 3.6622,
+      "step": 640000
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.9513569183739386e-05,
+      "loss": 3.6833,
+      "step": 640512
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.9505183236228865e-05,
+      "loss": 3.6613,
+      "step": 641024
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.9496797288718345e-05,
+      "loss": 3.6545,
+      "step": 641536
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.9488411341207825e-05,
+      "loss": 3.6784,
+      "step": 642048
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.9480041772501035e-05,
+      "loss": 3.6547,
+      "step": 642560
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.9471655824990514e-05,
+      "loss": 3.6585,
+      "step": 643072
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.9463269877479994e-05,
+      "loss": 3.674,
+      "step": 643584
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.945490030877321e-05,
+      "loss": 3.6739,
+      "step": 644096
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.944651436126269e-05,
+      "loss": 3.6581,
+      "step": 644608
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.943812841375217e-05,
+      "loss": 3.658,
+      "step": 645120
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.942974246624165e-05,
+      "loss": 3.6524,
+      "step": 645632
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.942137289753486e-05,
+      "loss": 3.6638,
+      "step": 646144
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.941298695002434e-05,
+      "loss": 3.678,
+      "step": 646656
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.940460100251382e-05,
+      "loss": 3.667,
+      "step": 647168
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.93962150550033e-05,
+      "loss": 3.6704,
+      "step": 647680
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.938782910749278e-05,
+      "loss": 3.6726,
+      "step": 648192
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.937945953878599e-05,
+      "loss": 3.6791,
+      "step": 648704
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.937107359127547e-05,
+      "loss": 3.6597,
+      "step": 649216
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.936268764376495e-05,
+      "loss": 3.6752,
+      "step": 649728
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.935430169625443e-05,
+      "loss": 3.6587,
+      "step": 650240
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.9345932127547644e-05,
+      "loss": 3.6618,
+      "step": 650752
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.9337546180037124e-05,
+      "loss": 3.6663,
+      "step": 651264
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.9329160232526604e-05,
+      "loss": 3.6712,
+      "step": 651776
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.9320774285016084e-05,
+      "loss": 3.6656,
+      "step": 652288
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.931240471630929e-05,
+      "loss": 3.6752,
+      "step": 652800
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.930401876879877e-05,
+      "loss": 3.6588,
+      "step": 653312
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.929563282128825e-05,
+      "loss": 3.657,
+      "step": 653824
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.928724687377773e-05,
+      "loss": 3.6706,
+      "step": 654336
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.927886092626721e-05,
+      "loss": 3.6669,
+      "step": 654848
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.927049135756042e-05,
+      "loss": 3.6526,
+      "step": 655360
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.92621054100499e-05,
+      "loss": 3.6662,
+      "step": 655872
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.925371946253938e-05,
+      "loss": 3.6653,
+      "step": 656384
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.924533351502886e-05,
+      "loss": 3.6736,
+      "step": 656896
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.923696394632208e-05,
+      "loss": 3.6571,
+      "step": 657408
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.922857799881156e-05,
+      "loss": 3.6558,
+      "step": 657920
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.922019205130104e-05,
+      "loss": 3.6743,
+      "step": 658432
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.921180610379052e-05,
+      "loss": 3.6676,
+      "step": 658944
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.920342015628e-05,
+      "loss": 3.666,
+      "step": 659456
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.9195050587573206e-05,
+      "loss": 3.6565,
+      "step": 659968
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.9186664640062686e-05,
+      "loss": 3.6617,
+      "step": 660480
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.9178278692552166e-05,
+      "loss": 3.6561,
+      "step": 660992
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.9169892745041646e-05,
+      "loss": 3.6673,
+      "step": 661504
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.9161523176334855e-05,
+      "loss": 3.6701,
+      "step": 662016
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.9153137228824335e-05,
+      "loss": 3.6625,
+      "step": 662528
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.9144751281313815e-05,
+      "loss": 3.6573,
+      "step": 663040
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.91363653338033e-05,
+      "loss": 3.6531,
+      "step": 663552
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.912799576509651e-05,
+      "loss": 3.6511,
+      "step": 664064
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.911960981758599e-05,
+      "loss": 3.6569,
+      "step": 664576
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.911122387007547e-05,
+      "loss": 3.6499,
+      "step": 665088
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.910283792256495e-05,
+      "loss": 3.6658,
+      "step": 665600
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.909446835385816e-05,
+      "loss": 3.6498,
+      "step": 666112
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.908608240634764e-05,
+      "loss": 3.6591,
+      "step": 666624
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.907769645883712e-05,
+      "loss": 3.654,
+      "step": 667136
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.90693105113266e-05,
+      "loss": 3.6628,
+      "step": 667648
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.906092456381608e-05,
+      "loss": 3.6663,
+      "step": 668160
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.905255499510929e-05,
+      "loss": 3.6556,
+      "step": 668672
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.904416904759877e-05,
+      "loss": 3.6725,
+      "step": 669184
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.9035783100088256e-05,
+      "loss": 3.6755,
+      "step": 669696
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.9027397152577736e-05,
+      "loss": 3.66,
+      "step": 670208
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.9019027583870945e-05,
+      "loss": 3.6632,
+      "step": 670720
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.9010641636360425e-05,
+      "loss": 3.6544,
+      "step": 671232
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.9002255688849905e-05,
+      "loss": 3.6561,
+      "step": 671744
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.8993869741339385e-05,
+      "loss": 3.664,
+      "step": 672256
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.8985500172632594e-05,
+      "loss": 3.6635,
+      "step": 672768
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.8977114225122074e-05,
+      "loss": 3.647,
+      "step": 673280
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.8968728277611554e-05,
+      "loss": 3.6682,
+      "step": 673792
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.8960342330101034e-05,
+      "loss": 3.6663,
+      "step": 674304
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.895197276139424e-05,
+      "loss": 3.6607,
+      "step": 674816
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.894358681388372e-05,
+      "loss": 3.6522,
+      "step": 675328
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.89352008663732e-05,
+      "loss": 3.6593,
+      "step": 675840
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.892681491886269e-05,
+      "loss": 3.6525,
+      "step": 676352
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.89184453501559e-05,
+      "loss": 3.6684,
+      "step": 676864
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.891005940264538e-05,
+      "loss": 3.6594,
+      "step": 677376
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.890167345513486e-05,
+      "loss": 3.667,
+      "step": 677888
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.889328750762434e-05,
+      "loss": 3.6449,
+      "step": 678400
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.888491793891755e-05,
+      "loss": 3.6733,
+      "step": 678912
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.887653199140703e-05,
+      "loss": 3.652,
+      "step": 679424
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.886814604389651e-05,
+      "loss": 3.6658,
+      "step": 679936
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.885976009638599e-05,
+      "loss": 3.6606,
+      "step": 680448
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.8851390527679196e-05,
+      "loss": 3.6551,
+      "step": 680960
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.8843004580168676e-05,
+      "loss": 3.6658,
+      "step": 681472
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.8834618632658156e-05,
+      "loss": 3.6761,
+      "step": 681984
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.882623268514764e-05,
+      "loss": 3.6449,
+      "step": 682496
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.881786311644085e-05,
+      "loss": 3.6637,
+      "step": 683008
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.880947716893033e-05,
+      "loss": 3.6554,
+      "step": 683520
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.880109122141981e-05,
+      "loss": 3.6545,
+      "step": 684032
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.8792705273909285e-05,
+      "loss": 3.6547,
+      "step": 684544
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.87843357052025e-05,
+      "loss": 3.6613,
+      "step": 685056
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.877594975769198e-05,
+      "loss": 3.6627,
+      "step": 685568
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.876756381018146e-05,
+      "loss": 3.653,
+      "step": 686080
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.8759177862670934e-05,
+      "loss": 3.6629,
+      "step": 686592
+    },
+    {
+      "epoch": 1.03,
+      "eval_loss": 3.8488738536834717,
+      "eval_runtime": 310.9473,
+      "eval_samples_per_second": 1227.189,
+      "eval_steps_per_second": 38.351,
+      "step": 686880
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.8750791915160414e-05,
+      "loss": 3.6492,
+      "step": 687104
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.8742405967649894e-05,
+      "loss": 3.6494,
+      "step": 687616
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.873402002013938e-05,
+      "loss": 3.6594,
+      "step": 688128
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.872563407262886e-05,
+      "loss": 3.6561,
+      "step": 688640
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.871726450392207e-05,
+      "loss": 3.6637,
+      "step": 689152
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.870887855641155e-05,
+      "loss": 3.6477,
+      "step": 689664
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.870049260890103e-05,
+      "loss": 3.662,
+      "step": 690176
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.869210666139051e-05,
+      "loss": 3.648,
+      "step": 690688
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.868372071387999e-05,
+      "loss": 3.6526,
+      "step": 691200
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.86753511451732e-05,
+      "loss": 3.6528,
+      "step": 691712
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.866696519766268e-05,
+      "loss": 3.6526,
+      "step": 692224
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.865857925015216e-05,
+      "loss": 3.6625,
+      "step": 692736
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.865020968144537e-05,
+      "loss": 3.6638,
+      "step": 693248
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.864182373393485e-05,
+      "loss": 3.6448,
+      "step": 693760
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.8633437786424334e-05,
+      "loss": 3.6508,
+      "step": 694272
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.8625051838913814e-05,
+      "loss": 3.6435,
+      "step": 694784
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.8616665891403294e-05,
+      "loss": 3.6418,
+      "step": 695296
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.8608279943892774e-05,
+      "loss": 3.6516,
+      "step": 695808
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.8599893996382254e-05,
+      "loss": 3.645,
+      "step": 696320
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.8591508048871734e-05,
+      "loss": 3.6518,
+      "step": 696832
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.858313848016494e-05,
+      "loss": 3.6625,
+      "step": 697344
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.857475253265442e-05,
+      "loss": 3.6608,
+      "step": 697856
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.85663665851439e-05,
+      "loss": 3.66,
+      "step": 698368
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.855798063763338e-05,
+      "loss": 3.6544,
+      "step": 698880
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.854961106892659e-05,
+      "loss": 3.6417,
+      "step": 699392
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.854122512141607e-05,
+      "loss": 3.6557,
+      "step": 699904
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.853283917390555e-05,
+      "loss": 3.6459,
+      "step": 700416
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.852445322639503e-05,
+      "loss": 3.6542,
+      "step": 700928
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.851608365768825e-05,
+      "loss": 3.6435,
+      "step": 701440
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.850769771017773e-05,
+      "loss": 3.6441,
+      "step": 701952
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.849931176266721e-05,
+      "loss": 3.6486,
+      "step": 702464
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.849092581515669e-05,
+      "loss": 3.6548,
+      "step": 702976
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.84825562464499e-05,
+      "loss": 3.6522,
+      "step": 703488
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.847417029893938e-05,
+      "loss": 3.6501,
+      "step": 704000
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.846578435142886e-05,
+      "loss": 3.6462,
+      "step": 704512
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.845739840391834e-05,
+      "loss": 3.6544,
+      "step": 705024
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.8449028835211546e-05,
+      "loss": 3.652,
+      "step": 705536
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.8440642887701026e-05,
+      "loss": 3.6391,
+      "step": 706048
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.8432256940190506e-05,
+      "loss": 3.6394,
+      "step": 706560
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.8423870992679986e-05,
+      "loss": 3.6375,
+      "step": 707072
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.841548504516947e-05,
+      "loss": 3.6416,
+      "step": 707584
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.8407099097658946e-05,
+      "loss": 3.6491,
+      "step": 708096
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.839872952895216e-05,
+      "loss": 3.653,
+      "step": 708608
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.839034358144164e-05,
+      "loss": 3.6486,
+      "step": 709120
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.838195763393112e-05,
+      "loss": 3.6542,
+      "step": 709632
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.8373571686420595e-05,
+      "loss": 3.6515,
+      "step": 710144
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.836520211771381e-05,
+      "loss": 3.6505,
+      "step": 710656
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.835681617020329e-05,
+      "loss": 3.6419,
+      "step": 711168
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.834843022269277e-05,
+      "loss": 3.6206,
+      "step": 711680
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.8340044275182244e-05,
+      "loss": 3.6619,
+      "step": 712192
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.833167470647546e-05,
+      "loss": 3.6396,
+      "step": 712704
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.832328875896494e-05,
+      "loss": 3.644,
+      "step": 713216
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.831490281145442e-05,
+      "loss": 3.6458,
+      "step": 713728
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.83065168639439e-05,
+      "loss": 3.6363,
+      "step": 714240
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.8298147295237115e-05,
+      "loss": 3.6337,
+      "step": 714752
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.8289761347726595e-05,
+      "loss": 3.6351,
+      "step": 715264
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.828137540021607e-05,
+      "loss": 3.6465,
+      "step": 715776
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.827298945270555e-05,
+      "loss": 3.6358,
+      "step": 716288
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.8264619883998764e-05,
+      "loss": 3.6577,
+      "step": 716800
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.8256233936488244e-05,
+      "loss": 3.6385,
+      "step": 717312
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.824784798897772e-05,
+      "loss": 3.6253,
+      "step": 717824
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.82394620414672e-05,
+      "loss": 3.6545,
+      "step": 718336
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.823107609395668e-05,
+      "loss": 3.6349,
+      "step": 718848
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.822270652524989e-05,
+      "loss": 3.6316,
+      "step": 719360
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.821432057773937e-05,
+      "loss": 3.6472,
+      "step": 719872
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.820593463022885e-05,
+      "loss": 3.6506,
+      "step": 720384
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.819754868271833e-05,
+      "loss": 3.6324,
+      "step": 720896
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.818917911401154e-05,
+      "loss": 3.6304,
+      "step": 721408
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.818079316650102e-05,
+      "loss": 3.628,
+      "step": 721920
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.81724072189905e-05,
+      "loss": 3.6401,
+      "step": 722432
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.816402127147998e-05,
+      "loss": 3.6511,
+      "step": 722944
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.815565170277319e-05,
+      "loss": 3.6412,
+      "step": 723456
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.814726575526267e-05,
+      "loss": 3.647,
+      "step": 723968
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.813887980775215e-05,
+      "loss": 3.6439,
+      "step": 724480
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.813049386024163e-05,
+      "loss": 3.6529,
+      "step": 724992
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.812212429153485e-05,
+      "loss": 3.6375,
+      "step": 725504
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.811373834402433e-05,
+      "loss": 3.6543,
+      "step": 726016
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.810535239651381e-05,
+      "loss": 3.6278,
+      "step": 726528
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.809696644900329e-05,
+      "loss": 3.638,
+      "step": 727040
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.8088596880296496e-05,
+      "loss": 3.6414,
+      "step": 727552
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.8080210932785976e-05,
+      "loss": 3.6435,
+      "step": 728064
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.8071824985275456e-05,
+      "loss": 3.6432,
+      "step": 728576
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.8063439037764936e-05,
+      "loss": 3.6478,
+      "step": 729088
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.8055069469058145e-05,
+      "loss": 3.6337,
+      "step": 729600
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.8046683521547625e-05,
+      "loss": 3.634,
+      "step": 730112
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.8038297574037105e-05,
+      "loss": 3.6433,
+      "step": 730624
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.8029911626526585e-05,
+      "loss": 3.6464,
+      "step": 731136
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.80215420578198e-05,
+      "loss": 3.6279,
+      "step": 731648
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.801315611030928e-05,
+      "loss": 3.6368,
+      "step": 732160
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.800477016279876e-05,
+      "loss": 3.6427,
+      "step": 732672
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.799638421528824e-05,
+      "loss": 3.6432,
+      "step": 733184
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.798801464658145e-05,
+      "loss": 3.6383,
+      "step": 733696
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.797962869907093e-05,
+      "loss": 3.632,
+      "step": 734208
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.797124275156041e-05,
+      "loss": 3.6473,
+      "step": 734720
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.796285680404989e-05,
+      "loss": 3.6457,
+      "step": 735232
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.79544872353431e-05,
+      "loss": 3.641,
+      "step": 735744
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.794610128783258e-05,
+      "loss": 3.6319,
+      "step": 736256
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.793771534032206e-05,
+      "loss": 3.6371,
+      "step": 736768
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.792932939281154e-05,
+      "loss": 3.6305,
+      "step": 737280
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.7920959824104754e-05,
+      "loss": 3.6459,
+      "step": 737792
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.7912573876594234e-05,
+      "loss": 3.6439,
+      "step": 738304
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.7904187929083714e-05,
+      "loss": 3.6364,
+      "step": 738816
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.7895801981573194e-05,
+      "loss": 3.6372,
+      "step": 739328
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.78874324128664e-05,
+      "loss": 3.6264,
+      "step": 739840
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.787904646535588e-05,
+      "loss": 3.6286,
+      "step": 740352
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.787066051784536e-05,
+      "loss": 3.6299,
+      "step": 740864
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.786227457033484e-05,
+      "loss": 3.629,
+      "step": 741376
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.785390500162805e-05,
+      "loss": 3.6404,
+      "step": 741888
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.784551905411753e-05,
+      "loss": 3.6262,
+      "step": 742400
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.783713310660701e-05,
+      "loss": 3.633,
+      "step": 742912
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.782874715909649e-05,
+      "loss": 3.6324,
+      "step": 743424
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.782037759038971e-05,
+      "loss": 3.6319,
+      "step": 743936
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.781199164287919e-05,
+      "loss": 3.6433,
+      "step": 744448
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.780360569536867e-05,
+      "loss": 3.6312,
+      "step": 744960
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.779521974785815e-05,
+      "loss": 3.6467,
+      "step": 745472
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.778685017915136e-05,
+      "loss": 3.6522,
+      "step": 745984
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.777846423164084e-05,
+      "loss": 3.6385,
+      "step": 746496
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.777007828413032e-05,
+      "loss": 3.6357,
+      "step": 747008
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.77616923366198e-05,
+      "loss": 3.6327,
+      "step": 747520
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.7753322767913006e-05,
+      "loss": 3.6346,
+      "step": 748032
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.7744936820402486e-05,
+      "loss": 3.6389,
+      "step": 748544
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.7736550872891966e-05,
+      "loss": 3.6368,
+      "step": 749056
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.7728164925381446e-05,
+      "loss": 3.6261,
+      "step": 749568
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.771979535667466e-05,
+      "loss": 3.6445,
+      "step": 750080
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.771140940916414e-05,
+      "loss": 3.6453,
+      "step": 750592
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.770302346165362e-05,
+      "loss": 3.6322,
+      "step": 751104
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.76946375141431e-05,
+      "loss": 3.631,
+      "step": 751616
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.768626794543631e-05,
+      "loss": 3.6387,
+      "step": 752128
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.767788199792579e-05,
+      "loss": 3.6262,
+      "step": 752640
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.766949605041527e-05,
+      "loss": 3.6477,
+      "step": 753152
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.766111010290475e-05,
+      "loss": 3.6341,
+      "step": 753664
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.765274053419796e-05,
+      "loss": 3.6437,
+      "step": 754176
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.764435458668744e-05,
+      "loss": 3.6206,
+      "step": 754688
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.763596863917692e-05,
+      "loss": 3.6442,
+      "step": 755200
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.76275826916664e-05,
+      "loss": 3.6389,
+      "step": 755712
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.7619213122959615e-05,
+      "loss": 3.6349,
+      "step": 756224
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.7610827175449095e-05,
+      "loss": 3.6364,
+      "step": 756736
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.7602441227938575e-05,
+      "loss": 3.6293,
+      "step": 757248
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.7594055280428055e-05,
+      "loss": 3.6391,
+      "step": 757760
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.7585669332917535e-05,
+      "loss": 3.6576,
+      "step": 758272
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.7577299764210744e-05,
+      "loss": 3.6193,
+      "step": 758784
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.7568913816700224e-05,
+      "loss": 3.6409,
+      "step": 759296
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.7560527869189704e-05,
+      "loss": 3.6323,
+      "step": 759808
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.7552141921679184e-05,
+      "loss": 3.6275,
+      "step": 760320
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.754377235297239e-05,
+      "loss": 3.6332,
+      "step": 760832
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.753538640546187e-05,
+      "loss": 3.6357,
+      "step": 761344
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.752700045795135e-05,
+      "loss": 3.6399,
+      "step": 761856
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.751861451044083e-05,
+      "loss": 3.6321,
+      "step": 762368
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.751024494173405e-05,
+      "loss": 3.638,
+      "step": 762880
+    },
+    {
+      "epoch": 0.03,
+      "eval_loss": 3.8461172580718994,
+      "eval_runtime": 306.6052,
+      "eval_samples_per_second": 1244.568,
+      "eval_steps_per_second": 38.894,
+      "step": 763200
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.750185899422353e-05,
+      "loss": 3.6282,
+      "step": 763392
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.749347304671301e-05,
+      "loss": 3.6262,
+      "step": 763904
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.748508709920249e-05,
+      "loss": 3.6346,
+      "step": 764416
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.74767175304957e-05,
+      "loss": 3.6322,
+      "step": 764928
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.746833158298518e-05,
+      "loss": 3.6427,
+      "step": 765440
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.745994563547466e-05,
+      "loss": 3.6256,
+      "step": 765952
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.745155968796414e-05,
+      "loss": 3.6352,
+      "step": 766464
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.744319011925735e-05,
+      "loss": 3.6239,
+      "step": 766976
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.743480417174683e-05,
+      "loss": 3.6314,
+      "step": 767488
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.742641822423631e-05,
+      "loss": 3.6278,
+      "step": 768000
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.7418032276725787e-05,
+      "loss": 3.626,
+      "step": 768512
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.7409662708019e-05,
+      "loss": 3.6411,
+      "step": 769024
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.740129313931221e-05,
+      "loss": 3.6432,
+      "step": 769536
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.739290719180169e-05,
+      "loss": 3.6202,
+      "step": 770048
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.738452124429117e-05,
+      "loss": 3.6305,
+      "step": 770560
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.737613529678065e-05,
+      "loss": 3.6163,
+      "step": 771072
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.736774934927013e-05,
+      "loss": 3.6186,
+      "step": 771584
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.735936340175961e-05,
+      "loss": 3.6278,
+      "step": 772096
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.735097745424909e-05,
+      "loss": 3.621,
+      "step": 772608
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.7342591506738564e-05,
+      "loss": 3.6305,
+      "step": 773120
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.7334205559228044e-05,
+      "loss": 3.6392,
+      "step": 773632
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.732583599052126e-05,
+      "loss": 3.6341,
+      "step": 774144
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.731745004301074e-05,
+      "loss": 3.6373,
+      "step": 774656
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.730906409550022e-05,
+      "loss": 3.6312,
+      "step": 775168
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.73006781479897e-05,
+      "loss": 3.6239,
+      "step": 775680
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.7292308579282916e-05,
+      "loss": 3.6296,
+      "step": 776192
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.728392263177239e-05,
+      "loss": 3.6251,
+      "step": 776704
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.727553668426187e-05,
+      "loss": 3.6322,
+      "step": 777216
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.726715073675135e-05,
+      "loss": 3.6173,
+      "step": 777728
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.7258781168044565e-05,
+      "loss": 3.6231,
+      "step": 778240
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.725039522053404e-05,
+      "loss": 3.626,
+      "step": 778752
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.724200927302352e-05,
+      "loss": 3.6321,
+      "step": 779264
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.7233623325513e-05,
+      "loss": 3.6331,
+      "step": 779776
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.7225253756806214e-05,
+      "loss": 3.6235,
+      "step": 780288
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.7216867809295694e-05,
+      "loss": 3.6244,
+      "step": 780800
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.7208481861785174e-05,
+      "loss": 3.629,
+      "step": 781312
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.7200095914274654e-05,
+      "loss": 3.63,
+      "step": 781824
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.719172634556786e-05,
+      "loss": 3.6154,
+      "step": 782336
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.718334039805734e-05,
+      "loss": 3.6162,
+      "step": 782848
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.717495445054682e-05,
+      "loss": 3.6173,
+      "step": 783360
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.71665685030363e-05,
+      "loss": 3.6177,
+      "step": 783872
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.715819893432951e-05,
+      "loss": 3.6232,
+      "step": 784384
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.714981298681899e-05,
+      "loss": 3.6305,
+      "step": 784896
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.714142703930847e-05,
+      "loss": 3.6263,
+      "step": 785408
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.713304109179795e-05,
+      "loss": 3.6313,
+      "step": 785920
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.712467152309117e-05,
+      "loss": 3.6277,
+      "step": 786432
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.711628557558065e-05,
+      "loss": 3.6291,
+      "step": 786944
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.710789962807013e-05,
+      "loss": 3.6206,
+      "step": 787456
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.709951368055961e-05,
+      "loss": 3.5979,
+      "step": 787968
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.7091144111852817e-05,
+      "loss": 3.6377,
+      "step": 788480
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.7082758164342297e-05,
+      "loss": 3.6145,
+      "step": 788992
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.7074372216831777e-05,
+      "loss": 3.6229,
+      "step": 789504
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.7065986269321256e-05,
+      "loss": 3.6227,
+      "step": 790016
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.7057616700614466e-05,
+      "loss": 3.6126,
+      "step": 790528
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.7049230753103946e-05,
+      "loss": 3.615,
+      "step": 791040
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.7040844805593425e-05,
+      "loss": 3.611,
+      "step": 791552
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.7032458858082905e-05,
+      "loss": 3.6259,
+      "step": 792064
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.702408928937612e-05,
+      "loss": 3.6115,
+      "step": 792576
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.70157033418656e-05,
+      "loss": 3.6344,
+      "step": 793088
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.700731739435508e-05,
+      "loss": 3.6165,
+      "step": 793600
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.699893144684456e-05,
+      "loss": 3.6027,
+      "step": 794112
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.699056187813777e-05,
+      "loss": 3.6333,
+      "step": 794624
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.698217593062725e-05,
+      "loss": 3.6101,
+      "step": 795136
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.697378998311673e-05,
+      "loss": 3.6064,
+      "step": 795648
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.696540403560621e-05,
+      "loss": 3.6268,
+      "step": 796160
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.695703446689942e-05,
+      "loss": 3.6277,
+      "step": 796672
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.69486485193889e-05,
+      "loss": 3.6091,
+      "step": 797184
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.694026257187838e-05,
+      "loss": 3.6115,
+      "step": 797696
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.693187662436786e-05,
+      "loss": 3.6058,
+      "step": 798208
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.6923507055661075e-05,
+      "loss": 3.6139,
+      "step": 798720
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.6915121108150555e-05,
+      "loss": 3.6291,
+      "step": 799232
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.6906735160640035e-05,
+      "loss": 3.6201,
+      "step": 799744
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.6898349213129515e-05,
+      "loss": 3.6225,
+      "step": 800256
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.6889979644422724e-05,
+      "loss": 3.6211,
+      "step": 800768
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.6881593696912204e-05,
+      "loss": 3.6303,
+      "step": 801280
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.6873207749401684e-05,
+      "loss": 3.6178,
+      "step": 801792
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.6864821801891164e-05,
+      "loss": 3.6294,
+      "step": 802304
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.685645223318437e-05,
+      "loss": 3.6062,
+      "step": 802816
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.684806628567385e-05,
+      "loss": 3.6148,
+      "step": 803328
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.683968033816333e-05,
+      "loss": 3.6214,
+      "step": 803840
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.683129439065281e-05,
+      "loss": 3.6202,
+      "step": 804352
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.682292482194603e-05,
+      "loss": 3.6252,
+      "step": 804864
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.681453887443551e-05,
+      "loss": 3.6254,
+      "step": 805376
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.680615292692499e-05,
+      "loss": 3.6154,
+      "step": 805888
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.679776697941447e-05,
+      "loss": 3.6096,
+      "step": 806400
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.678939741070768e-05,
+      "loss": 3.6194,
+      "step": 806912
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.678101146319716e-05,
+      "loss": 3.6236,
+      "step": 807424
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.677262551568664e-05,
+      "loss": 3.6061,
+      "step": 807936
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.676423956817612e-05,
+      "loss": 3.6176,
+      "step": 808448
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.6755869999469327e-05,
+      "loss": 3.6243,
+      "step": 808960
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.6747484051958807e-05,
+      "loss": 3.6165,
+      "step": 809472
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.6739098104448286e-05,
+      "loss": 3.6145,
+      "step": 809984
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.6730712156937766e-05,
+      "loss": 3.6115,
+      "step": 810496
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.672234258823098e-05,
+      "loss": 3.6223,
+      "step": 811008
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.671395664072046e-05,
+      "loss": 3.6269,
+      "step": 811520
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.670557069320994e-05,
+      "loss": 3.6196,
+      "step": 812032
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.669718474569942e-05,
+      "loss": 3.6122,
+      "step": 812544
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.668881517699263e-05,
+      "loss": 3.6132,
+      "step": 813056
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.668042922948211e-05,
+      "loss": 3.6115,
+      "step": 813568
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.667204328197159e-05,
+      "loss": 3.6221,
+      "step": 814080
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.666365733446107e-05,
+      "loss": 3.6229,
+      "step": 814592
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.665527138695055e-05,
+      "loss": 3.6132,
+      "step": 815104
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.664690181824376e-05,
+      "loss": 3.6131,
+      "step": 815616
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.663851587073324e-05,
+      "loss": 3.6103,
+      "step": 816128
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.663012992322272e-05,
+      "loss": 3.6044,
+      "step": 816640
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.66217439757122e-05,
+      "loss": 3.6109,
+      "step": 817152
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.6613374407005416e-05,
+      "loss": 3.6074,
+      "step": 817664
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.6604988459494896e-05,
+      "loss": 3.6125,
+      "step": 818176
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.6596602511984376e-05,
+      "loss": 3.6102,
+      "step": 818688
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.6588216564473856e-05,
+      "loss": 3.6067,
+      "step": 819200
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.6579846995767065e-05,
+      "loss": 3.6123,
+      "step": 819712
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.6571461048256545e-05,
+      "loss": 3.6096,
+      "step": 820224
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.6563075100746025e-05,
+      "loss": 3.6211,
+      "step": 820736
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.6554689153235505e-05,
+      "loss": 3.6073,
+      "step": 821248
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.6546319584528714e-05,
+      "loss": 3.6276,
+      "step": 821760
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.6537933637018194e-05,
+      "loss": 3.6303,
+      "step": 822272
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.6529547689507674e-05,
+      "loss": 3.6174,
+      "step": 822784
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.6521161741997154e-05,
+      "loss": 3.6123,
+      "step": 823296
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.651279217329037e-05,
+      "loss": 3.6111,
+      "step": 823808
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.650440622577985e-05,
+      "loss": 3.6124,
+      "step": 824320
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.649602027826933e-05,
+      "loss": 3.6216,
+      "step": 824832
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.648763433075881e-05,
+      "loss": 3.6142,
+      "step": 825344
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.647926476205202e-05,
+      "loss": 3.6058,
+      "step": 825856
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.64708788145415e-05,
+      "loss": 3.618,
+      "step": 826368
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.646249286703098e-05,
+      "loss": 3.6253,
+      "step": 826880
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.645410691952046e-05,
+      "loss": 3.6078,
+      "step": 827392
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.644573735081367e-05,
+      "loss": 3.6109,
+      "step": 827904
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.643735140330315e-05,
+      "loss": 3.6171,
+      "step": 828416
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.642896545579263e-05,
+      "loss": 3.6039,
+      "step": 828928
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.642057950828211e-05,
+      "loss": 3.6281,
+      "step": 829440
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.641220993957532e-05,
+      "loss": 3.6094,
+      "step": 829952
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.64038239920648e-05,
+      "loss": 3.6225,
+      "step": 830464
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.639543804455428e-05,
+      "loss": 3.598,
+      "step": 830976
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.638705209704376e-05,
+      "loss": 3.622,
+      "step": 831488
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.637868252833697e-05,
+      "loss": 3.6145,
+      "step": 832000
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.637029658082645e-05,
+      "loss": 3.6158,
+      "step": 832512
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.636191063331593e-05,
+      "loss": 3.6136,
+      "step": 833024
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.635352468580541e-05,
+      "loss": 3.6103,
+      "step": 833536
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.634515511709862e-05,
+      "loss": 3.6203,
+      "step": 834048
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.63367691695881e-05,
+      "loss": 3.6335,
+      "step": 834560
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.632838322207758e-05,
+      "loss": 3.6016,
+      "step": 835072
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.631999727456706e-05,
+      "loss": 3.6137,
+      "step": 835584
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.631162770586028e-05,
+      "loss": 3.6147,
+      "step": 836096
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.630324175834976e-05,
+      "loss": 3.6069,
+      "step": 836608
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.629485581083924e-05,
+      "loss": 3.612,
+      "step": 837120
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.628646986332871e-05,
+      "loss": 3.6144,
+      "step": 837632
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.6278100294621926e-05,
+      "loss": 3.6162,
+      "step": 838144
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.6269714347111406e-05,
+      "loss": 3.6138,
+      "step": 838656
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.6261328399600886e-05,
+      "loss": 3.6149,
+      "step": 839168
+    },
+    {
+      "epoch": 1.03,
+      "eval_loss": 3.8437752723693848,
+      "eval_runtime": 313.9789,
+      "eval_samples_per_second": 1215.34,
+      "eval_steps_per_second": 37.98,
+      "step": 839520
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.625294245209036e-05,
+      "loss": 3.5974,
+      "step": 839680
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.624455650457984e-05,
+      "loss": 3.6025,
+      "step": 840192
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.623617055706932e-05,
+      "loss": 3.6165,
+      "step": 840704
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.62277846095588e-05,
+      "loss": 3.609,
+      "step": 841216
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.6219415040852015e-05,
+      "loss": 3.6209,
+      "step": 841728
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.6211029093341495e-05,
+      "loss": 3.6043,
+      "step": 842240
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.6202643145830975e-05,
+      "loss": 3.6118,
+      "step": 842752
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.6194257198320455e-05,
+      "loss": 3.6055,
+      "step": 843264
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.6185887629613664e-05,
+      "loss": 3.6106,
+      "step": 843776
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.6177501682103144e-05,
+      "loss": 3.6034,
+      "step": 844288
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.6169115734592624e-05,
+      "loss": 3.6061,
+      "step": 844800
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.6160729787082104e-05,
+      "loss": 3.6243,
+      "step": 845312
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.615236021837531e-05,
+      "loss": 3.6184,
+      "step": 845824
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.614397427086479e-05,
+      "loss": 3.5993,
+      "step": 846336
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.613558832335427e-05,
+      "loss": 3.61,
+      "step": 846848
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.612720237584375e-05,
+      "loss": 3.5944,
+      "step": 847360
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.611881642833323e-05,
+      "loss": 3.5988,
+      "step": 847872
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.611043048082271e-05,
+      "loss": 3.6103,
+      "step": 848384
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.61020445333122e-05,
+      "loss": 3.5981,
+      "step": 848896
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.609365858580168e-05,
+      "loss": 3.6086,
+      "step": 849408
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.608528901709489e-05,
+      "loss": 3.6151,
+      "step": 849920
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.607690306958437e-05,
+      "loss": 3.6134,
+      "step": 850432
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.606851712207385e-05,
+      "loss": 3.6184,
+      "step": 850944
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.606013117456333e-05,
+      "loss": 3.6119,
+      "step": 851456
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.605176160585654e-05,
+      "loss": 3.6029,
+      "step": 851968
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.604337565834602e-05,
+      "loss": 3.6064,
+      "step": 852480
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.60349897108355e-05,
+      "loss": 3.6012,
+      "step": 852992
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.602660376332498e-05,
+      "loss": 3.6097,
+      "step": 853504
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.6018234194618186e-05,
+      "loss": 3.5988,
+      "step": 854016
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.6009848247107666e-05,
+      "loss": 3.6032,
+      "step": 854528
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.600146229959715e-05,
+      "loss": 3.6064,
+      "step": 855040
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.599307635208663e-05,
+      "loss": 3.6066,
+      "step": 855552
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.598470678337984e-05,
+      "loss": 3.6103,
+      "step": 856064
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.597632083586932e-05,
+      "loss": 3.6074,
+      "step": 856576
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.59679348883588e-05,
+      "loss": 3.6014,
+      "step": 857088
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.595954894084828e-05,
+      "loss": 3.6099,
+      "step": 857600
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.595117937214149e-05,
+      "loss": 3.6136,
+      "step": 858112
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.594279342463097e-05,
+      "loss": 3.5935,
+      "step": 858624
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.593440747712045e-05,
+      "loss": 3.5943,
+      "step": 859136
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.592602152960993e-05,
+      "loss": 3.5962,
+      "step": 859648
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.591765196090314e-05,
+      "loss": 3.5949,
+      "step": 860160
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.590926601339262e-05,
+      "loss": 3.6029,
+      "step": 860672
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.5900880065882106e-05,
+      "loss": 3.607,
+      "step": 861184
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.5892494118371586e-05,
+      "loss": 3.611,
+      "step": 861696
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.5884124549664796e-05,
+      "loss": 3.6066,
+      "step": 862208
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.5875738602154276e-05,
+      "loss": 3.6068,
+      "step": 862720
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.5867352654643755e-05,
+      "loss": 3.6071,
+      "step": 863232
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.5858966707133235e-05,
+      "loss": 3.6023,
+      "step": 863744
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.5850597138426445e-05,
+      "loss": 3.5778,
+      "step": 864256
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.5842211190915924e-05,
+      "loss": 3.6134,
+      "step": 864768
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.5833825243405404e-05,
+      "loss": 3.5936,
+      "step": 865280
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.5825439295894884e-05,
+      "loss": 3.6021,
+      "step": 865792
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.5817069727188093e-05,
+      "loss": 3.5989,
+      "step": 866304
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.5808683779677573e-05,
+      "loss": 3.5925,
+      "step": 866816
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.580029783216706e-05,
+      "loss": 3.5948,
+      "step": 867328
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.579191188465654e-05,
+      "loss": 3.5932,
+      "step": 867840
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.578354231594975e-05,
+      "loss": 3.6056,
+      "step": 868352
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.577515636843923e-05,
+      "loss": 3.5893,
+      "step": 868864
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.576677042092871e-05,
+      "loss": 3.6142,
+      "step": 869376
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.575838447341819e-05,
+      "loss": 3.6003,
+      "step": 869888
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.57500149047114e-05,
+      "loss": 3.576,
+      "step": 870400
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.574162895720088e-05,
+      "loss": 3.6129,
+      "step": 870912
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.573324300969036e-05,
+      "loss": 3.5898,
+      "step": 871424
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.572485706217984e-05,
+      "loss": 3.591,
+      "step": 871936
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.571648749347305e-05,
+      "loss": 3.6022,
+      "step": 872448
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.570810154596253e-05,
+      "loss": 3.6086,
+      "step": 872960
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.569971559845201e-05,
+      "loss": 3.5889,
+      "step": 873472
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.5691329650941494e-05,
+      "loss": 3.595,
+      "step": 873984
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.56829600822347e-05,
+      "loss": 3.5832,
+      "step": 874496
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.567457413472418e-05,
+      "loss": 3.5929,
+      "step": 875008
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.566618818721366e-05,
+      "loss": 3.6113,
+      "step": 875520
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.565780223970314e-05,
+      "loss": 3.5996,
+      "step": 876032
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.564943267099635e-05,
+      "loss": 3.6032,
+      "step": 876544
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.564104672348583e-05,
+      "loss": 3.6024,
+      "step": 877056
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.563266077597531e-05,
+      "loss": 3.6105,
+      "step": 877568
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.5624274828464785e-05,
+      "loss": 3.5998,
+      "step": 878080
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.5615905259758e-05,
+      "loss": 3.6085,
+      "step": 878592
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.560751931224748e-05,
+      "loss": 3.5888,
+      "step": 879104
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.559913336473696e-05,
+      "loss": 3.5935,
+      "step": 879616
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.559074741722644e-05,
+      "loss": 3.5989,
+      "step": 880128
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.5582377848519657e-05,
+      "loss": 3.6005,
+      "step": 880640
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.5573991901009137e-05,
+      "loss": 3.6021,
+      "step": 881152
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.556560595349861e-05,
+      "loss": 3.6066,
+      "step": 881664
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.555722000598809e-05,
+      "loss": 3.5998,
+      "step": 882176
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.5548850437281306e-05,
+      "loss": 3.5868,
+      "step": 882688
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.5540464489770785e-05,
+      "loss": 3.599,
+      "step": 883200
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.553207854226026e-05,
+      "loss": 3.6032,
+      "step": 883712
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.552369259474974e-05,
+      "loss": 3.5865,
+      "step": 884224
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.5515323026042954e-05,
+      "loss": 3.5964,
+      "step": 884736
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.5506937078532434e-05,
+      "loss": 3.6036,
+      "step": 885248
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.5498551131021914e-05,
+      "loss": 3.5961,
+      "step": 885760
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.5490165183511394e-05,
+      "loss": 3.5972,
+      "step": 886272
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.548179561480461e-05,
+      "loss": 3.5883,
+      "step": 886784
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.5473409667294083e-05,
+      "loss": 3.603,
+      "step": 887296
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.546502371978356e-05,
+      "loss": 3.6039,
+      "step": 887808
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.545663777227304e-05,
+      "loss": 3.6061,
+      "step": 888320
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.544826820356626e-05,
+      "loss": 3.5852,
+      "step": 888832
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.543988225605573e-05,
+      "loss": 3.5981,
+      "step": 889344
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.543149630854521e-05,
+      "loss": 3.5892,
+      "step": 889856
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.542311036103469e-05,
+      "loss": 3.6016,
+      "step": 890368
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.541474079232791e-05,
+      "loss": 3.6052,
+      "step": 890880
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.540635484481739e-05,
+      "loss": 3.59,
+      "step": 891392
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.539796889730687e-05,
+      "loss": 3.5957,
+      "step": 891904
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.538958294979635e-05,
+      "loss": 3.5886,
+      "step": 892416
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.538121338108956e-05,
+      "loss": 3.5863,
+      "step": 892928
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.537282743357904e-05,
+      "loss": 3.5916,
+      "step": 893440
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.536444148606852e-05,
+      "loss": 3.5893,
+      "step": 893952
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.5356055538558e-05,
+      "loss": 3.5937,
+      "step": 894464
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.5347685969851206e-05,
+      "loss": 3.5906,
+      "step": 894976
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.5339300022340686e-05,
+      "loss": 3.5883,
+      "step": 895488
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.5330914074830166e-05,
+      "loss": 3.5867,
+      "step": 896000
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.5322528127319646e-05,
+      "loss": 3.5904,
+      "step": 896512
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.531415855861286e-05,
+      "loss": 3.6067,
+      "step": 897024
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.530577261110234e-05,
+      "loss": 3.592,
+      "step": 897536
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.529738666359182e-05,
+      "loss": 3.6028,
+      "step": 898048
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.52890007160813e-05,
+      "loss": 3.6062,
+      "step": 898560
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.528063114737451e-05,
+      "loss": 3.6068,
+      "step": 899072
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.527224519986399e-05,
+      "loss": 3.5922,
+      "step": 899584
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.526385925235347e-05,
+      "loss": 3.5923,
+      "step": 900096
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.525547330484295e-05,
+      "loss": 3.5945,
+      "step": 900608
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.524710373613616e-05,
+      "loss": 3.5988,
+      "step": 901120
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.523871778862564e-05,
+      "loss": 3.5968,
+      "step": 901632
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.523033184111512e-05,
+      "loss": 3.5893,
+      "step": 902144
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.52219458936046e-05,
+      "loss": 3.5939,
+      "step": 902656
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.5213576324897815e-05,
+      "loss": 3.6047,
+      "step": 903168
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.5205190377387295e-05,
+      "loss": 3.5946,
+      "step": 903680
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.5196804429876775e-05,
+      "loss": 3.5867,
+      "step": 904192
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.5188418482366255e-05,
+      "loss": 3.5943,
+      "step": 904704
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.5180048913659464e-05,
+      "loss": 3.5862,
+      "step": 905216
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.5171662966148944e-05,
+      "loss": 3.6069,
+      "step": 905728
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.5163277018638424e-05,
+      "loss": 3.5918,
+      "step": 906240
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.5154891071127904e-05,
+      "loss": 3.6041,
+      "step": 906752
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.5146521502421113e-05,
+      "loss": 3.5793,
+      "step": 907264
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.513813555491059e-05,
+      "loss": 3.6004,
+      "step": 907776
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.512974960740007e-05,
+      "loss": 3.5949,
+      "step": 908288
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.512136365988955e-05,
+      "loss": 3.5945,
+      "step": 908800
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.511299409118277e-05,
+      "loss": 3.5962,
+      "step": 909312
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.510460814367225e-05,
+      "loss": 3.5876,
+      "step": 909824
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.509622219616173e-05,
+      "loss": 3.6019,
+      "step": 910336
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.508783624865121e-05,
+      "loss": 3.6124,
+      "step": 910848
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.507946667994442e-05,
+      "loss": 3.5847,
+      "step": 911360
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.50710807324339e-05,
+      "loss": 3.5934,
+      "step": 911872
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.506269478492338e-05,
+      "loss": 3.5968,
+      "step": 912384
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.505430883741286e-05,
+      "loss": 3.5901,
+      "step": 912896
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.504593926870607e-05,
+      "loss": 3.5874,
+      "step": 913408
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.503755332119555e-05,
+      "loss": 3.5985,
+      "step": 913920
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.502916737368503e-05,
+      "loss": 3.5919,
+      "step": 914432
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.502078142617451e-05,
+      "loss": 3.5943,
+      "step": 914944
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.501241185746772e-05,
+      "loss": 3.5961,
+      "step": 915456
+    },
+    {
+      "epoch": 0.03,
+      "eval_loss": 3.843202590942383,
+      "eval_runtime": 302.9232,
+      "eval_samples_per_second": 1259.695,
+      "eval_steps_per_second": 39.366,
+      "step": 915840
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.50040259099572e-05,
+      "loss": 3.5907,
+      "step": 915968
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.499563996244668e-05,
+      "loss": 3.5799,
+      "step": 916480
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.498725401493616e-05,
+      "loss": 3.5979,
+      "step": 916992
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.497888444622937e-05,
+      "loss": 3.5918,
+      "step": 917504
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.497049849871885e-05,
+      "loss": 3.598,
+      "step": 918016
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.496211255120833e-05,
+      "loss": 3.5862,
+      "step": 918528
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.495372660369781e-05,
+      "loss": 3.6008,
+      "step": 919040
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.494535703499102e-05,
+      "loss": 3.587,
+      "step": 919552
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.49369710874805e-05,
+      "loss": 3.5878,
+      "step": 920064
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.492858513996998e-05,
+      "loss": 3.5836,
+      "step": 920576
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.492019919245946e-05,
+      "loss": 3.5884,
+      "step": 921088
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.4911829623752676e-05,
+      "loss": 3.6001,
+      "step": 921600
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.4903460055045886e-05,
+      "loss": 3.6011,
+      "step": 922112
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.4895074107535366e-05,
+      "loss": 3.5791,
+      "step": 922624
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.4886688160024845e-05,
+      "loss": 3.5913,
+      "step": 923136
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.4878302212514325e-05,
+      "loss": 3.5754,
+      "step": 923648
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.4869916265003805e-05,
+      "loss": 3.5839,
+      "step": 924160
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.4861530317493285e-05,
+      "loss": 3.5853,
+      "step": 924672
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.4853144369982765e-05,
+      "loss": 3.5792,
+      "step": 925184
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.4844758422472245e-05,
+      "loss": 3.5897,
+      "step": 925696
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.4836388853765454e-05,
+      "loss": 3.5961,
+      "step": 926208
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.4828002906254934e-05,
+      "loss": 3.5927,
+      "step": 926720
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.4819616958744414e-05,
+      "loss": 3.6013,
+      "step": 927232
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.4811231011233894e-05,
+      "loss": 3.5938,
+      "step": 927744
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.480286144252711e-05,
+      "loss": 3.5828,
+      "step": 928256
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.479447549501659e-05,
+      "loss": 3.5913,
+      "step": 928768
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.478608954750607e-05,
+      "loss": 3.584,
+      "step": 929280
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.477770359999555e-05,
+      "loss": 3.5878,
+      "step": 929792
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.476933403128876e-05,
+      "loss": 3.5802,
+      "step": 930304
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.476094808377824e-05,
+      "loss": 3.583,
+      "step": 930816
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.475256213626772e-05,
+      "loss": 3.5913,
+      "step": 931328
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.47441761887572e-05,
+      "loss": 3.5863,
+      "step": 931840
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.473580662005041e-05,
+      "loss": 3.5865,
+      "step": 932352
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.472742067253989e-05,
+      "loss": 3.5939,
+      "step": 932864
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.471903472502937e-05,
+      "loss": 3.5827,
+      "step": 933376
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.471064877751885e-05,
+      "loss": 3.5868,
+      "step": 933888
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.4702279208812064e-05,
+      "loss": 3.595,
+      "step": 934400
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.4693893261301544e-05,
+      "loss": 3.5724,
+      "step": 934912
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.4685507313791024e-05,
+      "loss": 3.5799,
+      "step": 935424
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.4677121366280504e-05,
+      "loss": 3.5781,
+      "step": 935936
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.466875179757371e-05,
+      "loss": 3.5766,
+      "step": 936448
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.466036585006319e-05,
+      "loss": 3.5829,
+      "step": 936960
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.465197990255267e-05,
+      "loss": 3.5887,
+      "step": 937472
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.464359395504215e-05,
+      "loss": 3.5911,
+      "step": 937984
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.463522438633536e-05,
+      "loss": 3.5894,
+      "step": 938496
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.462683843882484e-05,
+      "loss": 3.5896,
+      "step": 939008
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.461845249131432e-05,
+      "loss": 3.5907,
+      "step": 939520
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.46100665438038e-05,
+      "loss": 3.5821,
+      "step": 940032
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.460169697509702e-05,
+      "loss": 3.5646,
+      "step": 940544
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.45933110275865e-05,
+      "loss": 3.5928,
+      "step": 941056
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.458492508007598e-05,
+      "loss": 3.5782,
+      "step": 941568
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.457653913256546e-05,
+      "loss": 3.5822,
+      "step": 942080
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.4568169563858666e-05,
+      "loss": 3.5821,
+      "step": 942592
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.4559783616348146e-05,
+      "loss": 3.5743,
+      "step": 943104
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.4551397668837626e-05,
+      "loss": 3.5757,
+      "step": 943616
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.4543011721327106e-05,
+      "loss": 3.5735,
+      "step": 944128
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.4534642152620315e-05,
+      "loss": 3.5876,
+      "step": 944640
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.4526256205109795e-05,
+      "loss": 3.5685,
+      "step": 945152
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.4517870257599275e-05,
+      "loss": 3.5974,
+      "step": 945664
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.4509484310088755e-05,
+      "loss": 3.5824,
+      "step": 946176
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.450111474138197e-05,
+      "loss": 3.5603,
+      "step": 946688
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.449272879387145e-05,
+      "loss": 3.5928,
+      "step": 947200
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.448434284636093e-05,
+      "loss": 3.5694,
+      "step": 947712
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.447595689885041e-05,
+      "loss": 3.5759,
+      "step": 948224
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.446758733014362e-05,
+      "loss": 3.5805,
+      "step": 948736
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.44592013826331e-05,
+      "loss": 3.59,
+      "step": 949248
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.445081543512258e-05,
+      "loss": 3.5729,
+      "step": 949760
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.444242948761205e-05,
+      "loss": 3.5752,
+      "step": 950272
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.443405991890527e-05,
+      "loss": 3.5619,
+      "step": 950784
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.442567397139475e-05,
+      "loss": 3.5795,
+      "step": 951296
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.441728802388423e-05,
+      "loss": 3.5887,
+      "step": 951808
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.440890207637371e-05,
+      "loss": 3.5844,
+      "step": 952320
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.4400532507666925e-05,
+      "loss": 3.5838,
+      "step": 952832
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.4392146560156405e-05,
+      "loss": 3.5848,
+      "step": 953344
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.4383760612645885e-05,
+      "loss": 3.5918,
+      "step": 953856
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.437537466513536e-05,
+      "loss": 3.5805,
+      "step": 954368
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.4367005096428574e-05,
+      "loss": 3.5882,
+      "step": 954880
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.4358619148918054e-05,
+      "loss": 3.5728,
+      "step": 955392
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.435023320140753e-05,
+      "loss": 3.5773,
+      "step": 955904
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.434184725389701e-05,
+      "loss": 3.5788,
+      "step": 956416
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.433347768519022e-05,
+      "loss": 3.5778,
+      "step": 956928
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.43250917376797e-05,
+      "loss": 3.5872,
+      "step": 957440
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.4316705790169176e-05,
+      "loss": 3.5836,
+      "step": 957952
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.430831984265866e-05,
+      "loss": 3.5868,
+      "step": 958464
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.429995027395188e-05,
+      "loss": 3.5683,
+      "step": 958976
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.429156432644136e-05,
+      "loss": 3.5802,
+      "step": 959488
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.428317837893083e-05,
+      "loss": 3.5848,
+      "step": 960000
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.427479243142031e-05,
+      "loss": 3.5708,
+      "step": 960512
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.426642286271353e-05,
+      "loss": 3.5755,
+      "step": 961024
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.4258036915203e-05,
+      "loss": 3.5846,
+      "step": 961536
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.424965096769248e-05,
+      "loss": 3.5793,
+      "step": 962048
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.424126502018196e-05,
+      "loss": 3.577,
+      "step": 962560
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.4232895451475176e-05,
+      "loss": 3.5743,
+      "step": 963072
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.422450950396465e-05,
+      "loss": 3.5834,
+      "step": 963584
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.421612355645413e-05,
+      "loss": 3.5891,
+      "step": 964096
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.4207737608943616e-05,
+      "loss": 3.5848,
+      "step": 964608
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.4199368040236825e-05,
+      "loss": 3.5674,
+      "step": 965120
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.4190982092726305e-05,
+      "loss": 3.5795,
+      "step": 965632
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.4182596145215785e-05,
+      "loss": 3.5712,
+      "step": 966144
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.4174210197705265e-05,
+      "loss": 3.5819,
+      "step": 966656
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.4165840628998474e-05,
+      "loss": 3.5906,
+      "step": 967168
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.4157454681487954e-05,
+      "loss": 3.572,
+      "step": 967680
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.4149068733977434e-05,
+      "loss": 3.5726,
+      "step": 968192
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.4140682786466914e-05,
+      "loss": 3.5737,
+      "step": 968704
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.413231321776012e-05,
+      "loss": 3.5684,
+      "step": 969216
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.41239272702496e-05,
+      "loss": 3.5681,
+      "step": 969728
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.411554132273908e-05,
+      "loss": 3.5754,
+      "step": 970240
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.410715537522857e-05,
+      "loss": 3.574,
+      "step": 970752
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.409878580652178e-05,
+      "loss": 3.5754,
+      "step": 971264
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.409039985901126e-05,
+      "loss": 3.5686,
+      "step": 971776
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.408201391150074e-05,
+      "loss": 3.5714,
+      "step": 972288
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.407362796399022e-05,
+      "loss": 3.5682,
+      "step": 972800
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.406525839528343e-05,
+      "loss": 3.5861,
+      "step": 973312
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.405687244777291e-05,
+      "loss": 3.5715,
+      "step": 973824
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.404848650026239e-05,
+      "loss": 3.5868,
+      "step": 974336
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.404010055275187e-05,
+      "loss": 3.5851,
+      "step": 974848
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.403173098404508e-05,
+      "loss": 3.5923,
+      "step": 975360
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.402334503653456e-05,
+      "loss": 3.5759,
+      "step": 975872
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.401495908902404e-05,
+      "loss": 3.5732,
+      "step": 976384
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.4006573141513524e-05,
+      "loss": 3.577,
+      "step": 976896
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.399820357280673e-05,
+      "loss": 3.5811,
+      "step": 977408
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.398981762529621e-05,
+      "loss": 3.5746,
+      "step": 977920
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.398143167778569e-05,
+      "loss": 3.577,
+      "step": 978432
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.397304573027517e-05,
+      "loss": 3.5737,
+      "step": 978944
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.396467616156838e-05,
+      "loss": 3.5861,
+      "step": 979456
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.395629021405786e-05,
+      "loss": 3.5739,
+      "step": 979968
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.394790426654734e-05,
+      "loss": 3.5712,
+      "step": 980480
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.393951831903682e-05,
+      "loss": 3.5776,
+      "step": 980992
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.393114875033003e-05,
+      "loss": 3.5699,
+      "step": 981504
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.392276280281951e-05,
+      "loss": 3.5869,
+      "step": 982016
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.391437685530899e-05,
+      "loss": 3.5719,
+      "step": 982528
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.390599090779848e-05,
+      "loss": 3.5852,
+      "step": 983040
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.3897621339091686e-05,
+      "loss": 3.5665,
+      "step": 983552
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.3889235391581166e-05,
+      "loss": 3.5796,
+      "step": 984064
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.3880849444070646e-05,
+      "loss": 3.5797,
+      "step": 984576
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.3872479875363855e-05,
+      "loss": 3.5762,
+      "step": 985088
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.3864093927853335e-05,
+      "loss": 3.5732,
+      "step": 985600
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.3855707980342815e-05,
+      "loss": 3.5788,
+      "step": 986112
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.3847322032832295e-05,
+      "loss": 3.5849,
+      "step": 986624
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.3838952464125504e-05,
+      "loss": 3.5914,
+      "step": 987136
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.3830566516614984e-05,
+      "loss": 3.5699,
+      "step": 987648
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.3822180569104464e-05,
+      "loss": 3.5738,
+      "step": 988160
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.3813794621593944e-05,
+      "loss": 3.5776,
+      "step": 988672
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.380542505288716e-05,
+      "loss": 3.5724,
+      "step": 989184
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.379703910537664e-05,
+      "loss": 3.5682,
+      "step": 989696
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.378865315786612e-05,
+      "loss": 3.5843,
+      "step": 990208
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.37802672103556e-05,
+      "loss": 3.5739,
+      "step": 990720
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.377188126284508e-05,
+      "loss": 3.583,
+      "step": 991232
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.376351169413829e-05,
+      "loss": 3.5736,
+      "step": 991744
+    },
+    {
+      "epoch": 1.03,
+      "eval_loss": 3.84389591217041,
+      "eval_runtime": 302.91,
+      "eval_samples_per_second": 1259.751,
+      "eval_steps_per_second": 39.368,
+      "step": 992160
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.375512574662777e-05,
+      "loss": 3.5769,
+      "step": 992256
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.374673979911725e-05,
+      "loss": 3.5605,
+      "step": 992768
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.373835385160673e-05,
+      "loss": 3.5794,
+      "step": 993280
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.372996790409621e-05,
+      "loss": 3.5785,
+      "step": 993792
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.372159833538942e-05,
+      "loss": 3.5789,
+      "step": 994304
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.37132123878789e-05,
+      "loss": 3.5725,
+      "step": 994816
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.370482644036838e-05,
+      "loss": 3.5775,
+      "step": 995328
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.3696440492857865e-05,
+      "loss": 3.5706,
+      "step": 995840
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.3688070924151074e-05,
+      "loss": 3.5655,
+      "step": 996352
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.3679684976640554e-05,
+      "loss": 3.5719,
+      "step": 996864
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.3671299029130034e-05,
+      "loss": 3.5689,
+      "step": 997376
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.3662913081619513e-05,
+      "loss": 3.5816,
+      "step": 997888
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.365455989171645e-05,
+      "loss": 3.5863,
+      "step": 998400
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.364617394420593e-05,
+      "loss": 3.5607,
+      "step": 998912
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.363778799669541e-05,
+      "loss": 3.5674,
+      "step": 999424
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.362940204918489e-05,
+      "loss": 3.5618,
+      "step": 999936
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.362101610167437e-05,
+      "loss": 3.5667,
+      "step": 1000448
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.361263015416385e-05,
+      "loss": 3.5698,
+      "step": 1000960
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.360424420665333e-05,
+      "loss": 3.5618,
+      "step": 1001472
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.359585825914282e-05,
+      "loss": 3.5666,
+      "step": 1001984
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.358748869043603e-05,
+      "loss": 3.5841,
+      "step": 1002496
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.357910274292551e-05,
+      "loss": 3.5716,
+      "step": 1003008
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.357071679541499e-05,
+      "loss": 3.5823,
+      "step": 1003520
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.356233084790447e-05,
+      "loss": 3.5782,
+      "step": 1004032
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.3553961279197676e-05,
+      "loss": 3.5664,
+      "step": 1004544
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.3545575331687156e-05,
+      "loss": 3.575,
+      "step": 1005056
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.3537189384176636e-05,
+      "loss": 3.5626,
+      "step": 1005568
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.3528803436666116e-05,
+      "loss": 3.5714,
+      "step": 1006080
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.3520433867959325e-05,
+      "loss": 3.5641,
+      "step": 1006592
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.3512047920448805e-05,
+      "loss": 3.562,
+      "step": 1007104
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.3503661972938285e-05,
+      "loss": 3.5744,
+      "step": 1007616
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.349527602542777e-05,
+      "loss": 3.5697,
+      "step": 1008128
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.348690645672098e-05,
+      "loss": 3.5748,
+      "step": 1008640
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.347852050921046e-05,
+      "loss": 3.5689,
+      "step": 1009152
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.347013456169994e-05,
+      "loss": 3.5669,
+      "step": 1009664
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.346174861418942e-05,
+      "loss": 3.5739,
+      "step": 1010176
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.345337904548263e-05,
+      "loss": 3.5794,
+      "step": 1010688
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.344499309797211e-05,
+      "loss": 3.5506,
+      "step": 1011200
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.343660715046159e-05,
+      "loss": 3.5666,
+      "step": 1011712
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.342822120295107e-05,
+      "loss": 3.5638,
+      "step": 1012224
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.341985163424428e-05,
+      "loss": 3.5558,
+      "step": 1012736
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.341146568673376e-05,
+      "loss": 3.5617,
+      "step": 1013248
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.340307973922324e-05,
+      "loss": 3.5764,
+      "step": 1013760
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.3394693791712726e-05,
+      "loss": 3.5715,
+      "step": 1014272
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.3386324223005935e-05,
+      "loss": 3.5725,
+      "step": 1014784
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.3377938275495415e-05,
+      "loss": 3.5705,
+      "step": 1015296
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.3369552327984895e-05,
+      "loss": 3.5721,
+      "step": 1015808
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.3361166380474374e-05,
+      "loss": 3.5639,
+      "step": 1016320
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.3352796811767584e-05,
+      "loss": 3.549,
+      "step": 1016832
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.3344410864257064e-05,
+      "loss": 3.5749,
+      "step": 1017344
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.3336024916746543e-05,
+      "loss": 3.5612,
+      "step": 1017856
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.3327638969236023e-05,
+      "loss": 3.5671,
+      "step": 1018368
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.331926940052923e-05,
+      "loss": 3.563,
+      "step": 1018880
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.331088345301871e-05,
+      "loss": 3.5575,
+      "step": 1019392
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.330249750550819e-05,
+      "loss": 3.5558,
+      "step": 1019904
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.329411155799768e-05,
+      "loss": 3.555,
+      "step": 1020416
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.328574198929089e-05,
+      "loss": 3.5718,
+      "step": 1020928
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.327735604178037e-05,
+      "loss": 3.5544,
+      "step": 1021440
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.326897009426985e-05,
+      "loss": 3.5729,
+      "step": 1021952
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.326058414675932e-05,
+      "loss": 3.5676,
+      "step": 1022464
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.325221457805254e-05,
+      "loss": 3.547,
+      "step": 1022976
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.324382863054202e-05,
+      "loss": 3.5725,
+      "step": 1023488
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.32354426830315e-05,
+      "loss": 3.555,
+      "step": 1024000
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.322705673552097e-05,
+      "loss": 3.5587,
+      "step": 1024512
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.3218687166814186e-05,
+      "loss": 3.559,
+      "step": 1025024
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.3210301219303666e-05,
+      "loss": 3.5726,
+      "step": 1025536
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.3201915271793146e-05,
+      "loss": 3.5579,
+      "step": 1026048
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.3193529324282626e-05,
+      "loss": 3.5561,
+      "step": 1026560
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.318515975557584e-05,
+      "loss": 3.5432,
+      "step": 1027072
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.317677380806532e-05,
+      "loss": 3.5606,
+      "step": 1027584
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.3168387860554795e-05,
+      "loss": 3.5718,
+      "step": 1028096
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.3160001913044275e-05,
+      "loss": 3.5711,
+      "step": 1028608
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.315163234433749e-05,
+      "loss": 3.5623,
+      "step": 1029120
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.314324639682697e-05,
+      "loss": 3.5693,
+      "step": 1029632
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.3134860449316444e-05,
+      "loss": 3.5753,
+      "step": 1030144
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.3126474501805924e-05,
+      "loss": 3.56,
+      "step": 1030656
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.311810493309914e-05,
+      "loss": 3.568,
+      "step": 1031168
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.310971898558862e-05,
+      "loss": 3.5625,
+      "step": 1031680
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.31013330380781e-05,
+      "loss": 3.5578,
+      "step": 1032192
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.309294709056758e-05,
+      "loss": 3.5603,
+      "step": 1032704
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.3084577521860796e-05,
+      "loss": 3.5625,
+      "step": 1033216
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.307619157435027e-05,
+      "loss": 3.5703,
+      "step": 1033728
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.306780562683975e-05,
+      "loss": 3.5675,
+      "step": 1034240
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.305941967932923e-05,
+      "loss": 3.5691,
+      "step": 1034752
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.3051050110622445e-05,
+      "loss": 3.5499,
+      "step": 1035264
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.304266416311192e-05,
+      "loss": 3.5655,
+      "step": 1035776
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.30342782156014e-05,
+      "loss": 3.5668,
+      "step": 1036288
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.302589226809088e-05,
+      "loss": 3.5561,
+      "step": 1036800
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.3017522699384094e-05,
+      "loss": 3.5577,
+      "step": 1037312
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.3009136751873574e-05,
+      "loss": 3.5647,
+      "step": 1037824
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.3000750804363053e-05,
+      "loss": 3.5608,
+      "step": 1038336
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.2992364856852533e-05,
+      "loss": 3.5615,
+      "step": 1038848
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.298399528814574e-05,
+      "loss": 3.5583,
+      "step": 1039360
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.297560934063522e-05,
+      "loss": 3.5693,
+      "step": 1039872
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.29672233931247e-05,
+      "loss": 3.5688,
+      "step": 1040384
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.295883744561418e-05,
+      "loss": 3.5689,
+      "step": 1040896
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.295046787690739e-05,
+      "loss": 3.5499,
+      "step": 1041408
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.294208192939687e-05,
+      "loss": 3.562,
+      "step": 1041920
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.293369598188635e-05,
+      "loss": 3.5578,
+      "step": 1042432
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.292531003437583e-05,
+      "loss": 3.5643,
+      "step": 1042944
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.291694046566905e-05,
+      "loss": 3.5727,
+      "step": 1043456
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.290855451815853e-05,
+      "loss": 3.5535,
+      "step": 1043968
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.290016857064801e-05,
+      "loss": 3.5556,
+      "step": 1044480
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.289178262313749e-05,
+      "loss": 3.5543,
+      "step": 1044992
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.2883413054430696e-05,
+      "loss": 3.5561,
+      "step": 1045504
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.2875027106920176e-05,
+      "loss": 3.5499,
+      "step": 1046016
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.2866641159409656e-05,
+      "loss": 3.5579,
+      "step": 1046528
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.2858255211899136e-05,
+      "loss": 3.5584,
+      "step": 1047040
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.2849885643192345e-05,
+      "loss": 3.5583,
+      "step": 1047552
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.2841499695681825e-05,
+      "loss": 3.5512,
+      "step": 1048064
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.2833113748171305e-05,
+      "loss": 3.5559,
+      "step": 1048576
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.2824727800660785e-05,
+      "loss": 3.5518,
+      "step": 1049088
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.2816358231954e-05,
+      "loss": 3.5694,
+      "step": 1049600
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.280797228444348e-05,
+      "loss": 3.5573,
+      "step": 1050112
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.279958633693296e-05,
+      "loss": 3.5665,
+      "step": 1050624
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.279120038942244e-05,
+      "loss": 3.5679,
+      "step": 1051136
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.278283082071565e-05,
+      "loss": 3.5783,
+      "step": 1051648
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.277444487320513e-05,
+      "loss": 3.5577,
+      "step": 1052160
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.276605892569461e-05,
+      "loss": 3.5591,
+      "step": 1052672
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.275767297818409e-05,
+      "loss": 3.5608,
+      "step": 1053184
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.27493034094773e-05,
+      "loss": 3.5593,
+      "step": 1053696
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.274091746196678e-05,
+      "loss": 3.5584,
+      "step": 1054208
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.273253151445626e-05,
+      "loss": 3.5605,
+      "step": 1054720
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.272414556694574e-05,
+      "loss": 3.5571,
+      "step": 1055232
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.271577599823895e-05,
+      "loss": 3.5667,
+      "step": 1055744
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.2707390050728435e-05,
+      "loss": 3.5587,
+      "step": 1056256
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.2699004103217914e-05,
+      "loss": 3.555,
+      "step": 1056768
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.2690618155707394e-05,
+      "loss": 3.5603,
+      "step": 1057280
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.2682248587000604e-05,
+      "loss": 3.5556,
+      "step": 1057792
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.2673862639490083e-05,
+      "loss": 3.5647,
+      "step": 1058304
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.2665476691979563e-05,
+      "loss": 3.5605,
+      "step": 1058816
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.265709074446904e-05,
+      "loss": 3.5665,
+      "step": 1059328
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.264872117576225e-05,
+      "loss": 3.5546,
+      "step": 1059840
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.264033522825173e-05,
+      "loss": 3.5598,
+      "step": 1060352
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.263194928074121e-05,
+      "loss": 3.5639,
+      "step": 1060864
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.262356333323069e-05,
+      "loss": 3.5584,
+      "step": 1061376
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.26151937645239e-05,
+      "loss": 3.5577,
+      "step": 1061888
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.260680781701339e-05,
+      "loss": 3.5584,
+      "step": 1062400
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.259842186950287e-05,
+      "loss": 3.5667,
+      "step": 1062912
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.259003592199235e-05,
+      "loss": 3.5762,
+      "step": 1063424
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.258166635328556e-05,
+      "loss": 3.5548,
+      "step": 1063936
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.257328040577504e-05,
+      "loss": 3.5543,
+      "step": 1064448
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.256489445826452e-05,
+      "loss": 3.5584,
+      "step": 1064960
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.2556508510754e-05,
+      "loss": 3.559,
+      "step": 1065472
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.2548138942047206e-05,
+      "loss": 3.5478,
+      "step": 1065984
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.2539752994536686e-05,
+      "loss": 3.5663,
+      "step": 1066496
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.2531367047026166e-05,
+      "loss": 3.5583,
+      "step": 1067008
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.2522981099515646e-05,
+      "loss": 3.5674,
+      "step": 1067520
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.2514611530808855e-05,
+      "loss": 3.5512,
+      "step": 1068032
+    },
+    {
+      "epoch": 0.03,
+      "eval_loss": 3.843731164932251,
+      "eval_runtime": 331.8069,
+      "eval_samples_per_second": 1150.039,
+      "eval_steps_per_second": 35.94,
+      "step": 1068480
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.250622558329834e-05,
+      "loss": 3.5613,
+      "step": 1068544
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.249783963578782e-05,
+      "loss": 3.5488,
+      "step": 1069056
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.24894536882773e-05,
+      "loss": 3.5583,
+      "step": 1069568
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.248108411957051e-05,
+      "loss": 3.5618,
+      "step": 1070080
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.247269817205999e-05,
+      "loss": 3.5624,
+      "step": 1070592
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.246431222454947e-05,
+      "loss": 3.5536,
+      "step": 1071104
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.245592627703895e-05,
+      "loss": 3.5552,
+      "step": 1071616
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.244755670833216e-05,
+      "loss": 3.5549,
+      "step": 1072128
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.243917076082164e-05,
+      "loss": 3.5515,
+      "step": 1072640
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.243078481331112e-05,
+      "loss": 3.5553,
+      "step": 1073152
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.24223988658006e-05,
+      "loss": 3.5539,
+      "step": 1073664
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.241402929709381e-05,
+      "loss": 3.568,
+      "step": 1074176
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.2405659728387025e-05,
+      "loss": 3.5714,
+      "step": 1074688
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.2397273780876505e-05,
+      "loss": 3.5453,
+      "step": 1075200
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.2388887833365985e-05,
+      "loss": 3.5531,
+      "step": 1075712
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.2380501885855465e-05,
+      "loss": 3.5425,
+      "step": 1076224
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.2372115938344944e-05,
+      "loss": 3.5542,
+      "step": 1076736
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.2363729990834424e-05,
+      "loss": 3.5488,
+      "step": 1077248
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.2355344043323904e-05,
+      "loss": 3.5488,
+      "step": 1077760
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.2346958095813384e-05,
+      "loss": 3.5477,
+      "step": 1078272
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.2338588527106593e-05,
+      "loss": 3.5675,
+      "step": 1078784
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.2330202579596073e-05,
+      "loss": 3.556,
+      "step": 1079296
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.232181663208555e-05,
+      "loss": 3.5638,
+      "step": 1079808
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.231343068457503e-05,
+      "loss": 3.5586,
+      "step": 1080320
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.230506111586825e-05,
+      "loss": 3.5583,
+      "step": 1080832
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.229667516835773e-05,
+      "loss": 3.5536,
+      "step": 1081344
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.228828922084721e-05,
+      "loss": 3.547,
+      "step": 1081856
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.227990327333669e-05,
+      "loss": 3.5539,
+      "step": 1082368
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.22715337046299e-05,
+      "loss": 3.5498,
+      "step": 1082880
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.226314775711938e-05,
+      "loss": 3.5477,
+      "step": 1083392
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.225476180960886e-05,
+      "loss": 3.5557,
+      "step": 1083904
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.224637586209834e-05,
+      "loss": 3.5492,
+      "step": 1084416
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.223800629339155e-05,
+      "loss": 3.5613,
+      "step": 1084928
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.222962034588103e-05,
+      "loss": 3.5503,
+      "step": 1085440
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.222123439837051e-05,
+      "loss": 3.5512,
+      "step": 1085952
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.221284845085999e-05,
+      "loss": 3.5622,
+      "step": 1086464
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.22044788821532e-05,
+      "loss": 3.5564,
+      "step": 1086976
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.219609293464268e-05,
+      "loss": 3.5347,
+      "step": 1087488
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.218770698713216e-05,
+      "loss": 3.551,
+      "step": 1088000
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.217932103962164e-05,
+      "loss": 3.5461,
+      "step": 1088512
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.217095147091485e-05,
+      "loss": 3.5387,
+      "step": 1089024
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.216256552340433e-05,
+      "loss": 3.5506,
+      "step": 1089536
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.215417957589381e-05,
+      "loss": 3.563,
+      "step": 1090048
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.214579362838329e-05,
+      "loss": 3.5507,
+      "step": 1090560
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.21374240596765e-05,
+      "loss": 3.5555,
+      "step": 1091072
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.212903811216598e-05,
+      "loss": 3.5546,
+      "step": 1091584
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.212065216465546e-05,
+      "loss": 3.5544,
+      "step": 1092096
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.211226621714494e-05,
+      "loss": 3.547,
+      "step": 1092608
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.2103896648438157e-05,
+      "loss": 3.5357,
+      "step": 1093120
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.2095510700927636e-05,
+      "loss": 3.5543,
+      "step": 1093632
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.2087124753417116e-05,
+      "loss": 3.547,
+      "step": 1094144
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.207873880590659e-05,
+      "loss": 3.5501,
+      "step": 1094656
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.2070369237199805e-05,
+      "loss": 3.5465,
+      "step": 1095168
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.2061983289689285e-05,
+      "loss": 3.5434,
+      "step": 1095680
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.2053597342178765e-05,
+      "loss": 3.5355,
+      "step": 1096192
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.204521139466824e-05,
+      "loss": 3.5393,
+      "step": 1096704
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.2036841825961454e-05,
+      "loss": 3.5604,
+      "step": 1097216
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.2028455878450934e-05,
+      "loss": 3.537,
+      "step": 1097728
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.2020069930940414e-05,
+      "loss": 3.5551,
+      "step": 1098240
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.2011683983429894e-05,
+      "loss": 3.5494,
+      "step": 1098752
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.2003314414723103e-05,
+      "loss": 3.5335,
+      "step": 1099264
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.199492846721259e-05,
+      "loss": 3.5489,
+      "step": 1099776
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.198654251970206e-05,
+      "loss": 3.5415,
+      "step": 1100288
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.197815657219154e-05,
+      "loss": 3.5456,
+      "step": 1100800
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.196978700348476e-05,
+      "loss": 3.5402,
+      "step": 1101312
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.196140105597424e-05,
+      "loss": 3.5585,
+      "step": 1101824
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.195301510846371e-05,
+      "loss": 3.5463,
+      "step": 1102336
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.194462916095319e-05,
+      "loss": 3.5354,
+      "step": 1102848
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.193625959224641e-05,
+      "loss": 3.5315,
+      "step": 1103360
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.192787364473589e-05,
+      "loss": 3.5439,
+      "step": 1103872
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.191948769722536e-05,
+      "loss": 3.5516,
+      "step": 1104384
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.191110174971484e-05,
+      "loss": 3.5609,
+      "step": 1104896
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.190273218100806e-05,
+      "loss": 3.5441,
+      "step": 1105408
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.189434623349754e-05,
+      "loss": 3.5571,
+      "step": 1105920
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.188596028598702e-05,
+      "loss": 3.5565,
+      "step": 1106432
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.18775743384765e-05,
+      "loss": 3.5453,
+      "step": 1106944
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.186920476976971e-05,
+      "loss": 3.549,
+      "step": 1107456
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.1860818822259186e-05,
+      "loss": 3.5466,
+      "step": 1107968
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.1852432874748666e-05,
+      "loss": 3.5451,
+      "step": 1108480
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.1844046927238146e-05,
+      "loss": 3.5438,
+      "step": 1108992
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.183567735853136e-05,
+      "loss": 3.546,
+      "step": 1109504
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.1827291411020835e-05,
+      "loss": 3.5561,
+      "step": 1110016
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.1818905463510315e-05,
+      "loss": 3.5559,
+      "step": 1110528
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.1810519515999795e-05,
+      "loss": 3.5495,
+      "step": 1111040
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.180214994729301e-05,
+      "loss": 3.5344,
+      "step": 1111552
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.179376399978249e-05,
+      "loss": 3.5471,
+      "step": 1112064
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.178537805227197e-05,
+      "loss": 3.549,
+      "step": 1112576
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.177699210476145e-05,
+      "loss": 3.5441,
+      "step": 1113088
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.176862253605466e-05,
+      "loss": 3.5417,
+      "step": 1113600
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.176023658854414e-05,
+      "loss": 3.5492,
+      "step": 1114112
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.175185064103362e-05,
+      "loss": 3.5483,
+      "step": 1114624
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.17434646935231e-05,
+      "loss": 3.5397,
+      "step": 1115136
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.173509512481631e-05,
+      "loss": 3.5442,
+      "step": 1115648
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.172670917730579e-05,
+      "loss": 3.55,
+      "step": 1116160
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.171832322979527e-05,
+      "loss": 3.5513,
+      "step": 1116672
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.170993728228475e-05,
+      "loss": 3.5565,
+      "step": 1117184
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.1701567713577964e-05,
+      "loss": 3.5335,
+      "step": 1117696
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.1693181766067444e-05,
+      "loss": 3.5458,
+      "step": 1118208
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.1684795818556924e-05,
+      "loss": 3.5465,
+      "step": 1118720
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.1676409871046404e-05,
+      "loss": 3.5427,
+      "step": 1119232
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.166804030233961e-05,
+      "loss": 3.5596,
+      "step": 1119744
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.165965435482909e-05,
+      "loss": 3.5418,
+      "step": 1120256
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.165126840731857e-05,
+      "loss": 3.5363,
+      "step": 1120768
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.164288245980805e-05,
+      "loss": 3.5423,
+      "step": 1121280
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.163451289110126e-05,
+      "loss": 3.5417,
+      "step": 1121792
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.162612694359074e-05,
+      "loss": 3.5303,
+      "step": 1122304
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.161774099608022e-05,
+      "loss": 3.5428,
+      "step": 1122816
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.16093550485697e-05,
+      "loss": 3.5423,
+      "step": 1123328
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.160098547986292e-05,
+      "loss": 3.5407,
+      "step": 1123840
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.15925995323524e-05,
+      "loss": 3.5335,
+      "step": 1124352
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.158421358484188e-05,
+      "loss": 3.5439,
+      "step": 1124864
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.157582763733136e-05,
+      "loss": 3.5315,
+      "step": 1125376
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.156745806862457e-05,
+      "loss": 3.5518,
+      "step": 1125888
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.155907212111405e-05,
+      "loss": 3.5413,
+      "step": 1126400
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.155068617360353e-05,
+      "loss": 3.5472,
+      "step": 1126912
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.154230022609301e-05,
+      "loss": 3.5536,
+      "step": 1127424
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.1533930657386216e-05,
+      "loss": 3.5641,
+      "step": 1127936
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.1525544709875696e-05,
+      "loss": 3.5422,
+      "step": 1128448
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.1517158762365176e-05,
+      "loss": 3.54,
+      "step": 1128960
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.1508772814854656e-05,
+      "loss": 3.5476,
+      "step": 1129472
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.150040324614787e-05,
+      "loss": 3.5388,
+      "step": 1129984
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.149201729863735e-05,
+      "loss": 3.5444,
+      "step": 1130496
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.148363135112683e-05,
+      "loss": 3.5449,
+      "step": 1131008
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.147524540361631e-05,
+      "loss": 3.5422,
+      "step": 1131520
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.146687583490952e-05,
+      "loss": 3.5523,
+      "step": 1132032
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.1458489887399e-05,
+      "loss": 3.5444,
+      "step": 1132544
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.145010393988848e-05,
+      "loss": 3.5377,
+      "step": 1133056
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.144171799237796e-05,
+      "loss": 3.5444,
+      "step": 1133568
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.143334842367117e-05,
+      "loss": 3.547,
+      "step": 1134080
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.142496247616065e-05,
+      "loss": 3.5424,
+      "step": 1134592
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.141657652865013e-05,
+      "loss": 3.5461,
+      "step": 1135104
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.140819058113961e-05,
+      "loss": 3.5494,
+      "step": 1135616
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.1399821012432825e-05,
+      "loss": 3.537,
+      "step": 1136128
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.1391435064922305e-05,
+      "loss": 3.5428,
+      "step": 1136640
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.1383049117411785e-05,
+      "loss": 3.5478,
+      "step": 1137152
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.1374663169901265e-05,
+      "loss": 3.5495,
+      "step": 1137664
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.1366293601194474e-05,
+      "loss": 3.5397,
+      "step": 1138176
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.1357907653683954e-05,
+      "loss": 3.544,
+      "step": 1138688
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.1349521706173434e-05,
+      "loss": 3.5526,
+      "step": 1139200
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.1341135758662914e-05,
+      "loss": 3.5533,
+      "step": 1139712
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.133276618995612e-05,
+      "loss": 3.5427,
+      "step": 1140224
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.13243802424456e-05,
+      "loss": 3.5433,
+      "step": 1140736
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.131599429493508e-05,
+      "loss": 3.5397,
+      "step": 1141248
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.130760834742456e-05,
+      "loss": 3.5442,
+      "step": 1141760
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.129923877871778e-05,
+      "loss": 3.5325,
+      "step": 1142272
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.129085283120726e-05,
+      "loss": 3.5533,
+      "step": 1142784
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.128246688369674e-05,
+      "loss": 3.5421,
+      "step": 1143296
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.127408093618622e-05,
+      "loss": 3.5477,
+      "step": 1143808
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.126571136747943e-05,
+      "loss": 3.5368,
+      "step": 1144320
+    },
+    {
+      "epoch": 1.03,
+      "eval_loss": 3.8446173667907715,
+      "eval_runtime": 317.0712,
+      "eval_samples_per_second": 1203.487,
+      "eval_steps_per_second": 37.61,
+      "step": 1144800
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.125732541996891e-05,
+      "loss": 3.5019,
+      "step": 1144832
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.124893947245839e-05,
+      "loss": 3.534,
+      "step": 1145344
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.124055352494787e-05,
+      "loss": 3.5436,
+      "step": 1145856
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.123216757743735e-05,
+      "loss": 3.5468,
+      "step": 1146368
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.122378162992683e-05,
+      "loss": 3.5443,
+      "step": 1146880
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.121541206122004e-05,
+      "loss": 3.5438,
+      "step": 1147392
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.120702611370952e-05,
+      "loss": 3.5386,
+      "step": 1147904
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.1198640166199004e-05,
+      "loss": 3.5373,
+      "step": 1148416
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.119027059749221e-05,
+      "loss": 3.5392,
+      "step": 1148928
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.118188464998169e-05,
+      "loss": 3.5397,
+      "step": 1149440
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.117349870247117e-05,
+      "loss": 3.5389,
+      "step": 1149952
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.116511275496065e-05,
+      "loss": 3.5519,
+      "step": 1150464
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.115674318625386e-05,
+      "loss": 3.5525,
+      "step": 1150976
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.114835723874334e-05,
+      "loss": 3.5302,
+      "step": 1151488
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.113997129123282e-05,
+      "loss": 3.5382,
+      "step": 1152000
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.11315853437223e-05,
+      "loss": 3.5271,
+      "step": 1152512
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.112319939621178e-05,
+      "loss": 3.54,
+      "step": 1153024
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.111482982750499e-05,
+      "loss": 3.5286,
+      "step": 1153536
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.110644387999447e-05,
+      "loss": 3.5364,
+      "step": 1154048
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.109805793248395e-05,
+      "loss": 3.5353,
+      "step": 1154560
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.108967198497344e-05,
+      "loss": 3.5492,
+      "step": 1155072
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.1081302416266646e-05,
+      "loss": 3.5437,
+      "step": 1155584
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.1072916468756126e-05,
+      "loss": 3.544,
+      "step": 1156096
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.1064530521245606e-05,
+      "loss": 3.546,
+      "step": 1156608
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.1056144573735086e-05,
+      "loss": 3.5446,
+      "step": 1157120
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.1047775005028295e-05,
+      "loss": 3.5374,
+      "step": 1157632
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.1039389057517775e-05,
+      "loss": 3.5311,
+      "step": 1158144
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.1031003110007255e-05,
+      "loss": 3.5399,
+      "step": 1158656
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.1022617162496735e-05,
+      "loss": 3.5332,
+      "step": 1159168
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.1014247593789944e-05,
+      "loss": 3.532,
+      "step": 1159680
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.1005861646279424e-05,
+      "loss": 3.5397,
+      "step": 1160192
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.0997475698768904e-05,
+      "loss": 3.5363,
+      "step": 1160704
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.0989089751258384e-05,
+      "loss": 3.5444,
+      "step": 1161216
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.09807201825516e-05,
+      "loss": 3.534,
+      "step": 1161728
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.097233423504108e-05,
+      "loss": 3.5376,
+      "step": 1162240
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.096394828753056e-05,
+      "loss": 3.5481,
+      "step": 1162752
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.095556234002003e-05,
+      "loss": 3.5409,
+      "step": 1163264
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.094719277131325e-05,
+      "loss": 3.5184,
+      "step": 1163776
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.093880682380273e-05,
+      "loss": 3.5437,
+      "step": 1164288
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.093042087629221e-05,
+      "loss": 3.5269,
+      "step": 1164800
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.092203492878168e-05,
+      "loss": 3.5223,
+      "step": 1165312
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.09136653600749e-05,
+      "loss": 3.535,
+      "step": 1165824
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.090527941256438e-05,
+      "loss": 3.5491,
+      "step": 1166336
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.089689346505386e-05,
+      "loss": 3.5328,
+      "step": 1166848
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.088850751754334e-05,
+      "loss": 3.5382,
+      "step": 1167360
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.0880137948836554e-05,
+      "loss": 3.5441,
+      "step": 1167872
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.0871752001326034e-05,
+      "loss": 3.541,
+      "step": 1168384
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.086336605381551e-05,
+      "loss": 3.53,
+      "step": 1168896
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.085498010630499e-05,
+      "loss": 3.5201,
+      "step": 1169408
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.08466105375982e-05,
+      "loss": 3.5391,
+      "step": 1169920
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.083822459008768e-05,
+      "loss": 3.5385,
+      "step": 1170432
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.0829838642577156e-05,
+      "loss": 3.5334,
+      "step": 1170944
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.0821452695066636e-05,
+      "loss": 3.5284,
+      "step": 1171456
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.081308312635985e-05,
+      "loss": 3.5259,
+      "step": 1171968
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.080469717884933e-05,
+      "loss": 3.527,
+      "step": 1172480
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.079631123133881e-05,
+      "loss": 3.5216,
+      "step": 1172992
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.078792528382829e-05,
+      "loss": 3.5437,
+      "step": 1173504
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.077955571512151e-05,
+      "loss": 3.5241,
+      "step": 1174016
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.077116976761098e-05,
+      "loss": 3.5391,
+      "step": 1174528
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.076278382010046e-05,
+      "loss": 3.5355,
+      "step": 1175040
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.075439787258994e-05,
+      "loss": 3.5202,
+      "step": 1175552
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.0746028303883156e-05,
+      "loss": 3.5346,
+      "step": 1176064
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.073764235637263e-05,
+      "loss": 3.5288,
+      "step": 1176576
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.072925640886211e-05,
+      "loss": 3.526,
+      "step": 1177088
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.072087046135159e-05,
+      "loss": 3.5225,
+      "step": 1177600
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.0712500892644805e-05,
+      "loss": 3.5481,
+      "step": 1178112
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.0704114945134285e-05,
+      "loss": 3.5293,
+      "step": 1178624
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.0695728997623765e-05,
+      "loss": 3.5242,
+      "step": 1179136
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.0687343050113245e-05,
+      "loss": 3.5159,
+      "step": 1179648
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.0678973481406454e-05,
+      "loss": 3.5279,
+      "step": 1180160
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.0670587533895934e-05,
+      "loss": 3.5362,
+      "step": 1180672
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.0662201586385414e-05,
+      "loss": 3.5425,
+      "step": 1181184
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.0653815638874894e-05,
+      "loss": 3.5282,
+      "step": 1181696
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.06454460701681e-05,
+      "loss": 3.5413,
+      "step": 1182208
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.063706012265758e-05,
+      "loss": 3.5422,
+      "step": 1182720
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.062867417514706e-05,
+      "loss": 3.5333,
+      "step": 1183232
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.062028822763654e-05,
+      "loss": 3.532,
+      "step": 1183744
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.061191865892976e-05,
+      "loss": 3.5317,
+      "step": 1184256
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.060353271141924e-05,
+      "loss": 3.5321,
+      "step": 1184768
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.059514676390872e-05,
+      "loss": 3.5303,
+      "step": 1185280
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.05867608163982e-05,
+      "loss": 3.529,
+      "step": 1185792
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.057839124769141e-05,
+      "loss": 3.541,
+      "step": 1186304
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.057000530018089e-05,
+      "loss": 3.5361,
+      "step": 1186816
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.056161935267037e-05,
+      "loss": 3.5371,
+      "step": 1187328
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.055323340515985e-05,
+      "loss": 3.5209,
+      "step": 1187840
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.054486383645306e-05,
+      "loss": 3.5305,
+      "step": 1188352
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.053647788894254e-05,
+      "loss": 3.5387,
+      "step": 1188864
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.052809194143202e-05,
+      "loss": 3.5317,
+      "step": 1189376
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.05197059939215e-05,
+      "loss": 3.5259,
+      "step": 1189888
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.051133642521471e-05,
+      "loss": 3.5319,
+      "step": 1190400
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.050295047770419e-05,
+      "loss": 3.5369,
+      "step": 1190912
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.049456453019367e-05,
+      "loss": 3.5226,
+      "step": 1191424
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.048617858268315e-05,
+      "loss": 3.5282,
+      "step": 1191936
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.047780901397636e-05,
+      "loss": 3.5359,
+      "step": 1192448
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.046942306646584e-05,
+      "loss": 3.534,
+      "step": 1192960
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.046103711895532e-05,
+      "loss": 3.5393,
+      "step": 1193472
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.04526511714448e-05,
+      "loss": 3.5273,
+      "step": 1193984
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.044428160273801e-05,
+      "loss": 3.5276,
+      "step": 1194496
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.043589565522749e-05,
+      "loss": 3.5315,
+      "step": 1195008
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.0427509707716974e-05,
+      "loss": 3.5336,
+      "step": 1195520
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.0419123760206454e-05,
+      "loss": 3.5404,
+      "step": 1196032
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.0410754191499663e-05,
+      "loss": 3.531,
+      "step": 1196544
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.0402368243989143e-05,
+      "loss": 3.5196,
+      "step": 1197056
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.0393982296478623e-05,
+      "loss": 3.5256,
+      "step": 1197568
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.0385596348968103e-05,
+      "loss": 3.5364,
+      "step": 1198080
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.0377226780261315e-05,
+      "loss": 3.5082,
+      "step": 1198592
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.0368840832750795e-05,
+      "loss": 3.5298,
+      "step": 1199104
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.0360454885240275e-05,
+      "loss": 3.532,
+      "step": 1199616
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.0352068937729755e-05,
+      "loss": 3.5241,
+      "step": 1200128
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.0343699369022964e-05,
+      "loss": 3.5181,
+      "step": 1200640
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.0335313421512444e-05,
+      "loss": 3.5312,
+      "step": 1201152
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.0326927474001927e-05,
+      "loss": 3.5176,
+      "step": 1201664
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.0318541526491407e-05,
+      "loss": 3.5381,
+      "step": 1202176
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.0310171957784617e-05,
+      "loss": 3.5255,
+      "step": 1202688
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.0301786010274096e-05,
+      "loss": 3.5324,
+      "step": 1203200
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.0293400062763576e-05,
+      "loss": 3.5376,
+      "step": 1203712
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.0285014115253056e-05,
+      "loss": 3.5506,
+      "step": 1204224
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.027664454654627e-05,
+      "loss": 3.5285,
+      "step": 1204736
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.026825859903575e-05,
+      "loss": 3.5244,
+      "step": 1205248
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.025987265152523e-05,
+      "loss": 3.5336,
+      "step": 1205760
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.025148670401471e-05,
+      "loss": 3.5267,
+      "step": 1206272
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.0243117135307918e-05,
+      "loss": 3.529,
+      "step": 1206784
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.0234731187797398e-05,
+      "loss": 3.5306,
+      "step": 1207296
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.022634524028688e-05,
+      "loss": 3.5282,
+      "step": 1207808
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.021795929277636e-05,
+      "loss": 3.5379,
+      "step": 1208320
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.020958972406957e-05,
+      "loss": 3.5293,
+      "step": 1208832
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.020120377655905e-05,
+      "loss": 3.524,
+      "step": 1209344
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.019281782904853e-05,
+      "loss": 3.5317,
+      "step": 1209856
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.018443188153801e-05,
+      "loss": 3.5304,
+      "step": 1210368
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.0176062312831223e-05,
+      "loss": 3.5242,
+      "step": 1210880
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.0167676365320702e-05,
+      "loss": 3.5335,
+      "step": 1211392
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.0159290417810182e-05,
+      "loss": 3.5372,
+      "step": 1211904
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.0150904470299662e-05,
+      "loss": 3.5266,
+      "step": 1212416
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.014253490159287e-05,
+      "loss": 3.5196,
+      "step": 1212928
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.013414895408235e-05,
+      "loss": 3.5336,
+      "step": 1213440
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.0125763006571835e-05,
+      "loss": 3.5307,
+      "step": 1213952
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.0117377059061315e-05,
+      "loss": 3.5284,
+      "step": 1214464
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.0109007490354524e-05,
+      "loss": 3.5322,
+      "step": 1214976
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.0100621542844004e-05,
+      "loss": 3.5396,
+      "step": 1215488
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.0092235595333484e-05,
+      "loss": 3.538,
+      "step": 1216000
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.0083849647822964e-05,
+      "loss": 3.5317,
+      "step": 1216512
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.0075480079116176e-05,
+      "loss": 3.5271,
+      "step": 1217024
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.0067094131605656e-05,
+      "loss": 3.5261,
+      "step": 1217536
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.0058708184095136e-05,
+      "loss": 3.5307,
+      "step": 1218048
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.0050322236584616e-05,
+      "loss": 3.5154,
+      "step": 1218560
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.0041952667877825e-05,
+      "loss": 3.538,
+      "step": 1219072
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.0033566720367305e-05,
+      "loss": 3.5352,
+      "step": 1219584
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.002518077285679e-05,
+      "loss": 3.531,
+      "step": 1220096
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.0016811204149998e-05,
+      "loss": 3.5224,
+      "step": 1220608
+    },
+    {
+      "epoch": 0.03,
+      "learning_rate": 3.0008425256639478e-05,
+      "loss": 3.5316,
+      "step": 1221120
+    },
+    {
+      "epoch": 0.03,
+      "eval_loss": 3.846557378768921,
+      "eval_runtime": 305.296,
+      "eval_samples_per_second": 1249.905,
+      "eval_steps_per_second": 39.06,
+      "step": 1221120
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.0000039309128957e-05,
+      "loss": 3.5181,
+      "step": 1221632
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.9991653361618437e-05,
+      "loss": 3.5259,
+      "step": 1222144
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.998328379291165e-05,
+      "loss": 3.5309,
+      "step": 1222656
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.997489784540113e-05,
+      "loss": 3.533,
+      "step": 1223168
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.996651189789061e-05,
+      "loss": 3.5309,
+      "step": 1223680
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.995812595038009e-05,
+      "loss": 3.5242,
+      "step": 1224192
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.99497563816733e-05,
+      "loss": 3.5252,
+      "step": 1224704
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.994137043416278e-05,
+      "loss": 3.5193,
+      "step": 1225216
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.993298448665226e-05,
+      "loss": 3.5311,
+      "step": 1225728
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.9924598539141742e-05,
+      "loss": 3.519,
+      "step": 1226240
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.991622897043495e-05,
+      "loss": 3.538,
+      "step": 1226752
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.990785940172816e-05,
+      "loss": 3.5427,
+      "step": 1227264
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.989947345421764e-05,
+      "loss": 3.5165,
+      "step": 1227776
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.989108750670712e-05,
+      "loss": 3.5212,
+      "step": 1228288
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.9882701559196604e-05,
+      "loss": 3.513,
+      "step": 1228800
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.9874315611686084e-05,
+      "loss": 3.5256,
+      "step": 1229312
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.9865929664175563e-05,
+      "loss": 3.5188,
+      "step": 1229824
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.9857543716665043e-05,
+      "loss": 3.5197,
+      "step": 1230336
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.9849157769154523e-05,
+      "loss": 3.5207,
+      "step": 1230848
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.9840788200447733e-05,
+      "loss": 3.5319,
+      "step": 1231360
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.9832402252937212e-05,
+      "loss": 3.5265,
+      "step": 1231872
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.9824016305426696e-05,
+      "loss": 3.5303,
+      "step": 1232384
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.9815630357916176e-05,
+      "loss": 3.53,
+      "step": 1232896
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.9807260789209385e-05,
+      "loss": 3.5337,
+      "step": 1233408
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.9798874841698865e-05,
+      "loss": 3.521,
+      "step": 1233920
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.9790488894188345e-05,
+      "loss": 3.5182,
+      "step": 1234432
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.9782102946677825e-05,
+      "loss": 3.5242,
+      "step": 1234944
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.9773733377971037e-05,
+      "loss": 3.5186,
+      "step": 1235456
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.9765347430460517e-05,
+      "loss": 3.5178,
+      "step": 1235968
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9756961482949997e-05,
+      "loss": 3.5205,
+      "step": 1236480
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9748575535439477e-05,
+      "loss": 3.5258,
+      "step": 1236992
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9740205966732686e-05,
+      "loss": 3.53,
+      "step": 1237504
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9731820019222166e-05,
+      "loss": 3.5166,
+      "step": 1238016
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9723434071711646e-05,
+      "loss": 3.5228,
+      "step": 1238528
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.971504812420113e-05,
+      "loss": 3.5337,
+      "step": 1239040
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.970667855549434e-05,
+      "loss": 3.5249,
+      "step": 1239552
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.969829260798382e-05,
+      "loss": 3.5087,
+      "step": 1240064
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.96899066604733e-05,
+      "loss": 3.5265,
+      "step": 1240576
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9681520712962775e-05,
+      "loss": 3.5103,
+      "step": 1241088
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.967315114425599e-05,
+      "loss": 3.5126,
+      "step": 1241600
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.966476519674547e-05,
+      "loss": 3.5186,
+      "step": 1242112
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.965637924923495e-05,
+      "loss": 3.5312,
+      "step": 1242624
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9647993301724424e-05,
+      "loss": 3.5215,
+      "step": 1243136
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.963962373301764e-05,
+      "loss": 3.5254,
+      "step": 1243648
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.963123778550712e-05,
+      "loss": 3.5305,
+      "step": 1244160
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9622851837996596e-05,
+      "loss": 3.5259,
+      "step": 1244672
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9614465890486076e-05,
+      "loss": 3.5195,
+      "step": 1245184
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9606096321779292e-05,
+      "loss": 3.5049,
+      "step": 1245696
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9597710374268772e-05,
+      "loss": 3.5232,
+      "step": 1246208
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9589324426758245e-05,
+      "loss": 3.5233,
+      "step": 1246720
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.958093847924773e-05,
+      "loss": 3.5178,
+      "step": 1247232
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9572568910540945e-05,
+      "loss": 3.5125,
+      "step": 1247744
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9564182963030424e-05,
+      "loss": 3.5116,
+      "step": 1248256
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9555797015519898e-05,
+      "loss": 3.5158,
+      "step": 1248768
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9547411068009378e-05,
+      "loss": 3.5056,
+      "step": 1249280
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9539041499302594e-05,
+      "loss": 3.5281,
+      "step": 1249792
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.953065555179207e-05,
+      "loss": 3.5121,
+      "step": 1250304
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.952226960428155e-05,
+      "loss": 3.525,
+      "step": 1250816
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.951388365677103e-05,
+      "loss": 3.5201,
+      "step": 1251328
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9505514088064246e-05,
+      "loss": 3.503,
+      "step": 1251840
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.949712814055372e-05,
+      "loss": 3.5223,
+      "step": 1252352
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.94887421930432e-05,
+      "loss": 3.515,
+      "step": 1252864
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9480356245532682e-05,
+      "loss": 3.5117,
+      "step": 1253376
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9471986676825898e-05,
+      "loss": 3.5081,
+      "step": 1253888
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.946360072931537e-05,
+      "loss": 3.5325,
+      "step": 1254400
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.945521478180485e-05,
+      "loss": 3.5189,
+      "step": 1254912
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.944682883429433e-05,
+      "loss": 3.509,
+      "step": 1255424
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9438459265587544e-05,
+      "loss": 3.5045,
+      "step": 1255936
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9430073318077024e-05,
+      "loss": 3.5113,
+      "step": 1256448
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9421687370566504e-05,
+      "loss": 3.5221,
+      "step": 1256960
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9413301423055984e-05,
+      "loss": 3.5308,
+      "step": 1257472
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9404931854349193e-05,
+      "loss": 3.5138,
+      "step": 1257984
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9396545906838673e-05,
+      "loss": 3.5284,
+      "step": 1258496
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9388159959328153e-05,
+      "loss": 3.5251,
+      "step": 1259008
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9379774011817636e-05,
+      "loss": 3.521,
+      "step": 1259520
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9371404443110845e-05,
+      "loss": 3.5147,
+      "step": 1260032
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9363018495600325e-05,
+      "loss": 3.52,
+      "step": 1260544
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9354632548089805e-05,
+      "loss": 3.5156,
+      "step": 1261056
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9346246600579285e-05,
+      "loss": 3.5167,
+      "step": 1261568
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9337877031872497e-05,
+      "loss": 3.5172,
+      "step": 1262080
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9329491084361977e-05,
+      "loss": 3.5202,
+      "step": 1262592
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9321105136851457e-05,
+      "loss": 3.5246,
+      "step": 1263104
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9312719189340937e-05,
+      "loss": 3.5233,
+      "step": 1263616
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9304349620634146e-05,
+      "loss": 3.5125,
+      "step": 1264128
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9295963673123626e-05,
+      "loss": 3.5142,
+      "step": 1264640
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.9287577725613106e-05,
+      "loss": 3.5252,
+      "step": 1265152
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.927919177810259e-05,
+      "loss": 3.5154,
+      "step": 1265664
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.92708222093958e-05,
+      "loss": 3.5146,
+      "step": 1266176
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.926243626188528e-05,
+      "loss": 3.5152,
+      "step": 1266688
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.925405031437476e-05,
+      "loss": 3.5244,
+      "step": 1267200
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.924566436686424e-05,
+      "loss": 3.5094,
+      "step": 1267712
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.923729479815745e-05,
+      "loss": 3.5146,
+      "step": 1268224
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.922890885064693e-05,
+      "loss": 3.5171,
+      "step": 1268736
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.922052290313641e-05,
+      "loss": 3.525,
+      "step": 1269248
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.921213695562589e-05,
+      "loss": 3.5189,
+      "step": 1269760
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.92037673869191e-05,
+      "loss": 3.5217,
+      "step": 1270272
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.919538143940858e-05,
+      "loss": 3.508,
+      "step": 1270784
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.918699549189806e-05,
+      "loss": 3.5194,
+      "step": 1271296
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.9178625923191272e-05,
+      "loss": 3.5191,
+      "step": 1271808
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.9170239975680752e-05,
+      "loss": 3.5258,
+      "step": 1272320
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.9161854028170232e-05,
+      "loss": 3.5188,
+      "step": 1272832
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.9153468080659712e-05,
+      "loss": 3.5088,
+      "step": 1273344
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.914509851195292e-05,
+      "loss": 3.5081,
+      "step": 1273856
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.9136712564442405e-05,
+      "loss": 3.5187,
+      "step": 1274368
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.9128326616931885e-05,
+      "loss": 3.4983,
+      "step": 1274880
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.9119940669421365e-05,
+      "loss": 3.5113,
+      "step": 1275392
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.9111571100714574e-05,
+      "loss": 3.5157,
+      "step": 1275904
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.9103185153204054e-05,
+      "loss": 3.5158,
+      "step": 1276416
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.9094799205693534e-05,
+      "loss": 3.5038,
+      "step": 1276928
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.9086413258183014e-05,
+      "loss": 3.5185,
+      "step": 1277440
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.9078043689476226e-05,
+      "loss": 3.5041,
+      "step": 1277952
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.9069657741965706e-05,
+      "loss": 3.5264,
+      "step": 1278464
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.9061271794455186e-05,
+      "loss": 3.5106,
+      "step": 1278976
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.9052885846944666e-05,
+      "loss": 3.5194,
+      "step": 1279488
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.9044516278237875e-05,
+      "loss": 3.5236,
+      "step": 1280000
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.903613033072736e-05,
+      "loss": 3.5381,
+      "step": 1280512
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.902774438321684e-05,
+      "loss": 3.5114,
+      "step": 1281024
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.901935843570632e-05,
+      "loss": 3.515,
+      "step": 1281536
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.9010988866999527e-05,
+      "loss": 3.5189,
+      "step": 1282048
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.9002602919489007e-05,
+      "loss": 3.5059,
+      "step": 1282560
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.8994216971978487e-05,
+      "loss": 3.5182,
+      "step": 1283072
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.8985831024467967e-05,
+      "loss": 3.514,
+      "step": 1283584
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.897746145576118e-05,
+      "loss": 3.5102,
+      "step": 1284096
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.896907550825066e-05,
+      "loss": 3.5274,
+      "step": 1284608
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.896068956074014e-05,
+      "loss": 3.5172,
+      "step": 1285120
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.895230361322962e-05,
+      "loss": 3.5104,
+      "step": 1285632
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.894393404452283e-05,
+      "loss": 3.5129,
+      "step": 1286144
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.8935548097012312e-05,
+      "loss": 3.5157,
+      "step": 1286656
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.8927162149501792e-05,
+      "loss": 3.5094,
+      "step": 1287168
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.8918776201991272e-05,
+      "loss": 3.5229,
+      "step": 1287680
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.891040663328448e-05,
+      "loss": 3.5229,
+      "step": 1288192
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.890202068577396e-05,
+      "loss": 3.5118,
+      "step": 1288704
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.889363473826344e-05,
+      "loss": 3.5055,
+      "step": 1289216
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.888524879075292e-05,
+      "loss": 3.5207,
+      "step": 1289728
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.8876879222046133e-05,
+      "loss": 3.5162,
+      "step": 1290240
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.8868493274535613e-05,
+      "loss": 3.5185,
+      "step": 1290752
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.8860107327025093e-05,
+      "loss": 3.5172,
+      "step": 1291264
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.8851721379514573e-05,
+      "loss": 3.5266,
+      "step": 1291776
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.8843351810807782e-05,
+      "loss": 3.5203,
+      "step": 1292288
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.8834965863297266e-05,
+      "loss": 3.5162,
+      "step": 1292800
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.8826579915786746e-05,
+      "loss": 3.5146,
+      "step": 1293312
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.8818193968276226e-05,
+      "loss": 3.5115,
+      "step": 1293824
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.8809824399569435e-05,
+      "loss": 3.52,
+      "step": 1294336
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.8801438452058915e-05,
+      "loss": 3.5003,
+      "step": 1294848
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.8793052504548395e-05,
+      "loss": 3.5237,
+      "step": 1295360
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.8784666557037875e-05,
+      "loss": 3.5219,
+      "step": 1295872
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.8776296988331087e-05,
+      "loss": 3.5169,
+      "step": 1296384
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.8767911040820567e-05,
+      "loss": 3.5096,
+      "step": 1296896
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.8759525093310047e-05,
+      "loss": 3.5167,
+      "step": 1297408
+    },
+    {
+      "epoch": 1.03,
+      "eval_loss": 3.8474502563476562,
+      "eval_runtime": 305.6642,
+      "eval_samples_per_second": 1248.399,
+      "eval_steps_per_second": 39.013,
+      "step": 1297440
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.8751139145799527e-05,
+      "loss": 3.5031,
+      "step": 1297920
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.8742753198289007e-05,
+      "loss": 3.5133,
+      "step": 1298432
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.8734367250778487e-05,
+      "loss": 3.522,
+      "step": 1298944
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.8725981303267967e-05,
+      "loss": 3.5114,
+      "step": 1299456
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.871761173456118e-05,
+      "loss": 3.5212,
+      "step": 1299968
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.870922578705066e-05,
+      "loss": 3.5096,
+      "step": 1300480
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.870083983954014e-05,
+      "loss": 3.5093,
+      "step": 1300992
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.869245389202962e-05,
+      "loss": 3.5067,
+      "step": 1301504
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.868408432332283e-05,
+      "loss": 3.5161,
+      "step": 1302016
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.8675698375812308e-05,
+      "loss": 3.5056,
+      "step": 1302528
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.866731242830179e-05,
+      "loss": 3.5246,
+      "step": 1303040
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.865892648079127e-05,
+      "loss": 3.5226,
+      "step": 1303552
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.865055691208448e-05,
+      "loss": 3.512,
+      "step": 1304064
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.864217096457396e-05,
+      "loss": 3.504,
+      "step": 1304576
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.863378501706344e-05,
+      "loss": 3.5077,
+      "step": 1305088
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.862539906955292e-05,
+      "loss": 3.5067,
+      "step": 1305600
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.8617029500846133e-05,
+      "loss": 3.5046,
+      "step": 1306112
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.8608643553335613e-05,
+      "loss": 3.5108,
+      "step": 1306624
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.8600257605825093e-05,
+      "loss": 3.5047,
+      "step": 1307136
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.8591888037118302e-05,
+      "loss": 3.5216,
+      "step": 1307648
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.8583502089607782e-05,
+      "loss": 3.5131,
+      "step": 1308160
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.8575116142097262e-05,
+      "loss": 3.5181,
+      "step": 1308672
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.8566730194586745e-05,
+      "loss": 3.5157,
+      "step": 1309184
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.8558360625879954e-05,
+      "loss": 3.5238,
+      "step": 1309696
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.8549974678369434e-05,
+      "loss": 3.5025,
+      "step": 1310208
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.8541588730858914e-05,
+      "loss": 3.5106,
+      "step": 1310720
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.8533202783348394e-05,
+      "loss": 3.5063,
+      "step": 1311232
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.852481683583787e-05,
+      "loss": 3.5075,
+      "step": 1311744
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.8516447267131087e-05,
+      "loss": 3.5049,
+      "step": 1312256
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8508061319620567e-05,
+      "loss": 3.5078,
+      "step": 1312768
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.849967537211004e-05,
+      "loss": 3.5147,
+      "step": 1313280
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.849128942459952e-05,
+      "loss": 3.5159,
+      "step": 1313792
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8482919855892736e-05,
+      "loss": 3.5016,
+      "step": 1314304
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8474533908382216e-05,
+      "loss": 3.5137,
+      "step": 1314816
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8466147960871692e-05,
+      "loss": 3.5207,
+      "step": 1315328
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8457762013361172e-05,
+      "loss": 3.5092,
+      "step": 1315840
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8449392444654388e-05,
+      "loss": 3.4962,
+      "step": 1316352
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8441006497143868e-05,
+      "loss": 3.5133,
+      "step": 1316864
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8432620549633345e-05,
+      "loss": 3.4939,
+      "step": 1317376
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8424234602122824e-05,
+      "loss": 3.5027,
+      "step": 1317888
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.841586503341604e-05,
+      "loss": 3.5047,
+      "step": 1318400
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8407479085905514e-05,
+      "loss": 3.5192,
+      "step": 1318912
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8399093138394993e-05,
+      "loss": 3.505,
+      "step": 1319424
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.839072356968821e-05,
+      "loss": 3.5103,
+      "step": 1319936
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.838233762217769e-05,
+      "loss": 3.517,
+      "step": 1320448
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8373951674667166e-05,
+      "loss": 3.51,
+      "step": 1320960
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8365565727156646e-05,
+      "loss": 3.5105,
+      "step": 1321472
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8357196158449862e-05,
+      "loss": 3.4925,
+      "step": 1321984
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8348810210939335e-05,
+      "loss": 3.5099,
+      "step": 1322496
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8340424263428815e-05,
+      "loss": 3.5099,
+      "step": 1323008
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8332038315918298e-05,
+      "loss": 3.5069,
+      "step": 1323520
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8323668747211514e-05,
+      "loss": 3.5052,
+      "step": 1324032
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8315282799700987e-05,
+      "loss": 3.4944,
+      "step": 1324544
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8306896852190467e-05,
+      "loss": 3.5051,
+      "step": 1325056
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8298510904679947e-05,
+      "loss": 3.4936,
+      "step": 1325568
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8290141335973163e-05,
+      "loss": 3.5102,
+      "step": 1326080
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.828175538846264e-05,
+      "loss": 3.5001,
+      "step": 1326592
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.827336944095212e-05,
+      "loss": 3.5117,
+      "step": 1327104
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.82649834934416e-05,
+      "loss": 3.5115,
+      "step": 1327616
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.825661392473481e-05,
+      "loss": 3.49,
+      "step": 1328128
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.824822797722429e-05,
+      "loss": 3.5041,
+      "step": 1328640
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.823984202971377e-05,
+      "loss": 3.5067,
+      "step": 1329152
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8231456082203252e-05,
+      "loss": 3.4969,
+      "step": 1329664
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.822308651349646e-05,
+      "loss": 3.4975,
+      "step": 1330176
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.821470056598594e-05,
+      "loss": 3.5158,
+      "step": 1330688
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.820631461847542e-05,
+      "loss": 3.5065,
+      "step": 1331200
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.81979286709649e-05,
+      "loss": 3.4953,
+      "step": 1331712
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8189559102258113e-05,
+      "loss": 3.4883,
+      "step": 1332224
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8181173154747593e-05,
+      "loss": 3.4994,
+      "step": 1332736
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8172787207237073e-05,
+      "loss": 3.5071,
+      "step": 1333248
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8164401259726553e-05,
+      "loss": 3.515,
+      "step": 1333760
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8156031691019762e-05,
+      "loss": 3.5038,
+      "step": 1334272
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8147645743509242e-05,
+      "loss": 3.5159,
+      "step": 1334784
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8139259795998722e-05,
+      "loss": 3.5092,
+      "step": 1335296
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8130873848488206e-05,
+      "loss": 3.5117,
+      "step": 1335808
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8122504279781415e-05,
+      "loss": 3.4983,
+      "step": 1336320
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8114118332270895e-05,
+      "loss": 3.5109,
+      "step": 1336832
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8105732384760375e-05,
+      "loss": 3.5038,
+      "step": 1337344
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8097346437249855e-05,
+      "loss": 3.5046,
+      "step": 1337856
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8088976868543067e-05,
+      "loss": 3.5072,
+      "step": 1338368
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8080590921032547e-05,
+      "loss": 3.5031,
+      "step": 1338880
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8072204973522027e-05,
+      "loss": 3.5168,
+      "step": 1339392
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8063819026011507e-05,
+      "loss": 3.504,
+      "step": 1339904
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8055449457304716e-05,
+      "loss": 3.502,
+      "step": 1340416
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8047063509794196e-05,
+      "loss": 3.501,
+      "step": 1340928
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8038677562283676e-05,
+      "loss": 3.5097,
+      "step": 1341440
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.803029161477316e-05,
+      "loss": 3.5028,
+      "step": 1341952
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8021922046066368e-05,
+      "loss": 3.5016,
+      "step": 1342464
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.8013536098555848e-05,
+      "loss": 3.4994,
+      "step": 1342976
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.8005150151045328e-05,
+      "loss": 3.5111,
+      "step": 1343488
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7996764203534808e-05,
+      "loss": 3.4964,
+      "step": 1344000
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.798839463482802e-05,
+      "loss": 3.5022,
+      "step": 1344512
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.79800086873175e-05,
+      "loss": 3.5002,
+      "step": 1345024
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.797162273980698e-05,
+      "loss": 3.5114,
+      "step": 1345536
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.796323679229646e-05,
+      "loss": 3.5084,
+      "step": 1346048
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.795486722358967e-05,
+      "loss": 3.5077,
+      "step": 1346560
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.794648127607915e-05,
+      "loss": 3.495,
+      "step": 1347072
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.793809532856863e-05,
+      "loss": 3.5068,
+      "step": 1347584
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7929709381058113e-05,
+      "loss": 3.5043,
+      "step": 1348096
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7921339812351322e-05,
+      "loss": 3.5116,
+      "step": 1348608
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7912953864840802e-05,
+      "loss": 3.5065,
+      "step": 1349120
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7904567917330282e-05,
+      "loss": 3.4972,
+      "step": 1349632
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7896181969819762e-05,
+      "loss": 3.4938,
+      "step": 1350144
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7887812401112974e-05,
+      "loss": 3.5086,
+      "step": 1350656
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7879426453602454e-05,
+      "loss": 3.4814,
+      "step": 1351168
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7871040506091934e-05,
+      "loss": 3.5026,
+      "step": 1351680
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7862654558581414e-05,
+      "loss": 3.5027,
+      "step": 1352192
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7854284989874623e-05,
+      "loss": 3.5021,
+      "step": 1352704
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7845899042364103e-05,
+      "loss": 3.4881,
+      "step": 1353216
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7837513094853583e-05,
+      "loss": 3.5036,
+      "step": 1353728
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7829127147343063e-05,
+      "loss": 3.497,
+      "step": 1354240
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7820757578636276e-05,
+      "loss": 3.509,
+      "step": 1354752
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7812371631125756e-05,
+      "loss": 3.5,
+      "step": 1355264
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7803985683615236e-05,
+      "loss": 3.5006,
+      "step": 1355776
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7795599736104716e-05,
+      "loss": 3.5157,
+      "step": 1356288
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7787230167397925e-05,
+      "loss": 3.5232,
+      "step": 1356800
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7778844219887408e-05,
+      "loss": 3.4983,
+      "step": 1357312
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7770458272376888e-05,
+      "loss": 3.5002,
+      "step": 1357824
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7762072324866368e-05,
+      "loss": 3.5064,
+      "step": 1358336
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7753702756159577e-05,
+      "loss": 3.4956,
+      "step": 1358848
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7745316808649057e-05,
+      "loss": 3.5029,
+      "step": 1359360
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7736930861138537e-05,
+      "loss": 3.5069,
+      "step": 1359872
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7728544913628017e-05,
+      "loss": 3.4983,
+      "step": 1360384
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.772017534492123e-05,
+      "loss": 3.5138,
+      "step": 1360896
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.771178939741071e-05,
+      "loss": 3.5006,
+      "step": 1361408
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.770340344990019e-05,
+      "loss": 3.5011,
+      "step": 1361920
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.769501750238967e-05,
+      "loss": 3.4988,
+      "step": 1362432
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7686647933682878e-05,
+      "loss": 3.5024,
+      "step": 1362944
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.767826198617236e-05,
+      "loss": 3.4967,
+      "step": 1363456
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.766987603866184e-05,
+      "loss": 3.5108,
+      "step": 1363968
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.766149009115132e-05,
+      "loss": 3.5127,
+      "step": 1364480
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.765312052244453e-05,
+      "loss": 3.5008,
+      "step": 1364992
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.764473457493401e-05,
+      "loss": 3.488,
+      "step": 1365504
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.763634862742349e-05,
+      "loss": 3.5109,
+      "step": 1366016
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.762796267991297e-05,
+      "loss": 3.5023,
+      "step": 1366528
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7619593111206183e-05,
+      "loss": 3.5054,
+      "step": 1367040
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7611207163695663e-05,
+      "loss": 3.5023,
+      "step": 1367552
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7602821216185143e-05,
+      "loss": 3.5097,
+      "step": 1368064
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7594435268674623e-05,
+      "loss": 3.5073,
+      "step": 1368576
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7586065699967832e-05,
+      "loss": 3.5079,
+      "step": 1369088
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7577679752457315e-05,
+      "loss": 3.4979,
+      "step": 1369600
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7569293804946795e-05,
+      "loss": 3.4962,
+      "step": 1370112
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7560907857436275e-05,
+      "loss": 3.5086,
+      "step": 1370624
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7552538288729484e-05,
+      "loss": 3.491,
+      "step": 1371136
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7544152341218964e-05,
+      "loss": 3.5058,
+      "step": 1371648
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7535766393708444e-05,
+      "loss": 3.5158,
+      "step": 1372160
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7527380446197924e-05,
+      "loss": 3.5031,
+      "step": 1372672
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7519010877491137e-05,
+      "loss": 3.4951,
+      "step": 1373184
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.7510624929980617e-05,
+      "loss": 3.5006,
+      "step": 1373696
+    },
+    {
+      "epoch": 0.03,
+      "eval_loss": 3.849418878555298,
+      "eval_runtime": 305.8658,
+      "eval_samples_per_second": 1247.577,
+      "eval_steps_per_second": 38.988,
+      "step": 1373760
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.7502238982470097e-05,
+      "loss": 3.4958,
+      "step": 1374208
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.7493869413763306e-05,
+      "loss": 3.5005,
+      "step": 1374720
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.7485483466252786e-05,
+      "loss": 3.506,
+      "step": 1375232
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.747709751874227e-05,
+      "loss": 3.502,
+      "step": 1375744
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.746871157123175e-05,
+      "loss": 3.5086,
+      "step": 1376256
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.7460342002524958e-05,
+      "loss": 3.4949,
+      "step": 1376768
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.7451956055014438e-05,
+      "loss": 3.502,
+      "step": 1377280
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.7443570107503918e-05,
+      "loss": 3.4903,
+      "step": 1377792
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.7435184159993398e-05,
+      "loss": 3.5013,
+      "step": 1378304
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.742681459128661e-05,
+      "loss": 3.4968,
+      "step": 1378816
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.741842864377609e-05,
+      "loss": 3.5064,
+      "step": 1379328
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.741004269626557e-05,
+      "loss": 3.5108,
+      "step": 1379840
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.740165674875505e-05,
+      "loss": 3.4992,
+      "step": 1380352
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.739328718004826e-05,
+      "loss": 3.4938,
+      "step": 1380864
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.738490123253774e-05,
+      "loss": 3.4951,
+      "step": 1381376
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.7376515285027223e-05,
+      "loss": 3.4943,
+      "step": 1381888
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.7368129337516703e-05,
+      "loss": 3.4894,
+      "step": 1382400
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.735975976880991e-05,
+      "loss": 3.4967,
+      "step": 1382912
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.735137382129939e-05,
+      "loss": 3.4888,
+      "step": 1383424
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.734298787378887e-05,
+      "loss": 3.5129,
+      "step": 1383936
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.733460192627835e-05,
+      "loss": 3.4971,
+      "step": 1384448
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.7326232357571564e-05,
+      "loss": 3.5095,
+      "step": 1384960
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.7317846410061044e-05,
+      "loss": 3.5047,
+      "step": 1385472
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.7309460462550524e-05,
+      "loss": 3.5096,
+      "step": 1385984
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.7301074515040004e-05,
+      "loss": 3.4861,
+      "step": 1386496
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.7292704946333213e-05,
+      "loss": 3.4959,
+      "step": 1387008
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.7284318998822693e-05,
+      "loss": 3.4909,
+      "step": 1387520
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.7275933051312176e-05,
+      "loss": 3.4978,
+      "step": 1388032
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.7267547103801656e-05,
+      "loss": 3.4905,
+      "step": 1388544
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.7259177535094865e-05,
+      "loss": 3.4943,
+      "step": 1389056
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.7250791587584345e-05,
+      "loss": 3.5059,
+      "step": 1389568
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.7242405640073825e-05,
+      "loss": 3.5018,
+      "step": 1390080
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.7234019692563305e-05,
+      "loss": 3.4872,
+      "step": 1390592
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.7225650123856518e-05,
+      "loss": 3.5038,
+      "step": 1391104
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.7217264176345998e-05,
+      "loss": 3.5023,
+      "step": 1391616
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.7208878228835478e-05,
+      "loss": 3.4962,
+      "step": 1392128
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.7200492281324958e-05,
+      "loss": 3.4867,
+      "step": 1392640
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.719210633381443e-05,
+      "loss": 3.4988,
+      "step": 1393152
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.7183736765107647e-05,
+      "loss": 3.4795,
+      "step": 1393664
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.717535081759713e-05,
+      "loss": 3.4931,
+      "step": 1394176
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.716696487008661e-05,
+      "loss": 3.4903,
+      "step": 1394688
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.715859530137982e-05,
+      "loss": 3.5037,
+      "step": 1395200
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.71502093538693e-05,
+      "loss": 3.4955,
+      "step": 1395712
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.714182340635878e-05,
+      "loss": 3.4975,
+      "step": 1396224
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.7133437458848255e-05,
+      "loss": 3.5042,
+      "step": 1396736
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.712506789014147e-05,
+      "loss": 3.4982,
+      "step": 1397248
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.711668194263095e-05,
+      "loss": 3.4968,
+      "step": 1397760
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.710829599512043e-05,
+      "loss": 3.4809,
+      "step": 1398272
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.7099910047609904e-05,
+      "loss": 3.4926,
+      "step": 1398784
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.709154047890312e-05,
+      "loss": 3.4995,
+      "step": 1399296
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.70831545313926e-05,
+      "loss": 3.4931,
+      "step": 1399808
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.7074768583882077e-05,
+      "loss": 3.4907,
+      "step": 1400320
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.7066382636371557e-05,
+      "loss": 3.4842,
+      "step": 1400832
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.7058013067664773e-05,
+      "loss": 3.4928,
+      "step": 1401344
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.7049627120154253e-05,
+      "loss": 3.4764,
+      "step": 1401856
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.704124117264373e-05,
+      "loss": 3.4983,
+      "step": 1402368
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.703285522513321e-05,
+      "loss": 3.4872,
+      "step": 1402880
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.7024485656426425e-05,
+      "loss": 3.4999,
+      "step": 1403392
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.7016099708915905e-05,
+      "loss": 3.4957,
+      "step": 1403904
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.7007713761405378e-05,
+      "loss": 3.4793,
+      "step": 1404416
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.6999327813894858e-05,
+      "loss": 3.4894,
+      "step": 1404928
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.6990958245188074e-05,
+      "loss": 3.4927,
+      "step": 1405440
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.698257229767755e-05,
+      "loss": 3.4804,
+      "step": 1405952
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.697418635016703e-05,
+      "loss": 3.4918,
+      "step": 1406464
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.696580040265651e-05,
+      "loss": 3.4988,
+      "step": 1406976
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.6957430833949726e-05,
+      "loss": 3.4944,
+      "step": 1407488
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.69490448864392e-05,
+      "loss": 3.4858,
+      "step": 1408000
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.6940658938928683e-05,
+      "loss": 3.4761,
+      "step": 1408512
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.6932272991418163e-05,
+      "loss": 3.4834,
+      "step": 1409024
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.692390342271138e-05,
+      "loss": 3.4947,
+      "step": 1409536
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.6915517475200852e-05,
+      "loss": 3.5022,
+      "step": 1410048
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.6907131527690332e-05,
+      "loss": 3.4892,
+      "step": 1410560
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.6898745580179812e-05,
+      "loss": 3.5023,
+      "step": 1411072
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.6890376011473024e-05,
+      "loss": 3.4986,
+      "step": 1411584
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.6881990063962504e-05,
+      "loss": 3.5004,
+      "step": 1412096
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.6873604116451984e-05,
+      "loss": 3.48,
+      "step": 1412608
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.6865218168941464e-05,
+      "loss": 3.4997,
+      "step": 1413120
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.6856832221430944e-05,
+      "loss": 3.4926,
+      "step": 1413632
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.6848462652724153e-05,
+      "loss": 3.4908,
+      "step": 1414144
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.6840076705213633e-05,
+      "loss": 3.4963,
+      "step": 1414656
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.6831690757703116e-05,
+      "loss": 3.49,
+      "step": 1415168
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.6823321188996326e-05,
+      "loss": 3.5023,
+      "step": 1415680
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.6814935241485806e-05,
+      "loss": 3.4936,
+      "step": 1416192
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.6806549293975285e-05,
+      "loss": 3.4859,
+      "step": 1416704
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.6798163346464765e-05,
+      "loss": 3.4897,
+      "step": 1417216
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.6789793777757978e-05,
+      "loss": 3.4938,
+      "step": 1417728
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.6781407830247458e-05,
+      "loss": 3.4921,
+      "step": 1418240
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.6773021882736938e-05,
+      "loss": 3.489,
+      "step": 1418752
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.6764635935226418e-05,
+      "loss": 3.4882,
+      "step": 1419264
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.6756266366519627e-05,
+      "loss": 3.4947,
+      "step": 1419776
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.6747880419009107e-05,
+      "loss": 3.4851,
+      "step": 1420288
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.6739494471498587e-05,
+      "loss": 3.4914,
+      "step": 1420800
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.673110852398807e-05,
+      "loss": 3.4845,
+      "step": 1421312
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.672273895528128e-05,
+      "loss": 3.5015,
+      "step": 1421824
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.671435300777076e-05,
+      "loss": 3.4955,
+      "step": 1422336
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.670596706026024e-05,
+      "loss": 3.4951,
+      "step": 1422848
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.669758111274972e-05,
+      "loss": 3.4842,
+      "step": 1423360
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.668921154404293e-05,
+      "loss": 3.4953,
+      "step": 1423872
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.668082559653241e-05,
+      "loss": 3.4844,
+      "step": 1424384
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.667243964902189e-05,
+      "loss": 3.5034,
+      "step": 1424896
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.666405370151137e-05,
+      "loss": 3.4902,
+      "step": 1425408
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.665568413280458e-05,
+      "loss": 3.4853,
+      "step": 1425920
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.664729818529406e-05,
+      "loss": 3.4825,
+      "step": 1426432
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.663891223778354e-05,
+      "loss": 3.4913,
+      "step": 1426944
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.6630526290273024e-05,
+      "loss": 3.4733,
+      "step": 1427456
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.6622156721566233e-05,
+      "loss": 3.4889,
+      "step": 1427968
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.6613770774055713e-05,
+      "loss": 3.4892,
+      "step": 1428480
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.6605384826545193e-05,
+      "loss": 3.4888,
+      "step": 1428992
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.6596998879034673e-05,
+      "loss": 3.4733,
+      "step": 1429504
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.6588629310327885e-05,
+      "loss": 3.4978,
+      "step": 1430016
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.6580243362817365e-05,
+      "loss": 3.4796,
+      "step": 1430528
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.6571857415306845e-05,
+      "loss": 3.4974,
+      "step": 1431040
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.6563471467796325e-05,
+      "loss": 3.4868,
+      "step": 1431552
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.6555101899089534e-05,
+      "loss": 3.4862,
+      "step": 1432064
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.6546715951579014e-05,
+      "loss": 3.5026,
+      "step": 1432576
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.6538330004068494e-05,
+      "loss": 3.5121,
+      "step": 1433088
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.6529944056557977e-05,
+      "loss": 3.487,
+      "step": 1433600
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.6521574487851187e-05,
+      "loss": 3.4863,
+      "step": 1434112
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.6513188540340667e-05,
+      "loss": 3.4926,
+      "step": 1434624
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.6504802592830146e-05,
+      "loss": 3.4843,
+      "step": 1435136
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.6496416645319626e-05,
+      "loss": 3.4877,
+      "step": 1435648
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.648804707661284e-05,
+      "loss": 3.4983,
+      "step": 1436160
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.647966112910232e-05,
+      "loss": 3.4822,
+      "step": 1436672
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.64712751815918e-05,
+      "loss": 3.4998,
+      "step": 1437184
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.646288923408128e-05,
+      "loss": 3.4916,
+      "step": 1437696
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.6454519665374488e-05,
+      "loss": 3.4908,
+      "step": 1438208
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.6446133717863968e-05,
+      "loss": 3.4848,
+      "step": 1438720
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.6437747770353448e-05,
+      "loss": 3.4931,
+      "step": 1439232
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.642936182284293e-05,
+      "loss": 3.4815,
+      "step": 1439744
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.642099225413614e-05,
+      "loss": 3.4995,
+      "step": 1440256
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.641260630662562e-05,
+      "loss": 3.494,
+      "step": 1440768
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.64042203591151e-05,
+      "loss": 3.4911,
+      "step": 1441280
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.639583441160458e-05,
+      "loss": 3.4745,
+      "step": 1441792
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.6387464842897793e-05,
+      "loss": 3.498,
+      "step": 1442304
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.6379078895387273e-05,
+      "loss": 3.4927,
+      "step": 1442816
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.6370692947876753e-05,
+      "loss": 3.4901,
+      "step": 1443328
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.6362307000366232e-05,
+      "loss": 3.487,
+      "step": 1443840
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.635393743165944e-05,
+      "loss": 3.4986,
+      "step": 1444352
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.634555148414892e-05,
+      "loss": 3.4952,
+      "step": 1444864
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.63371655366384e-05,
+      "loss": 3.5003,
+      "step": 1445376
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.6328779589127885e-05,
+      "loss": 3.4835,
+      "step": 1445888
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.6320410020421094e-05,
+      "loss": 3.4855,
+      "step": 1446400
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.6312024072910574e-05,
+      "loss": 3.493,
+      "step": 1446912
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.6303638125400054e-05,
+      "loss": 3.4759,
+      "step": 1447424
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.6295252177889534e-05,
+      "loss": 3.4911,
+      "step": 1447936
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.6286882609182746e-05,
+      "loss": 3.5015,
+      "step": 1448448
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.6278496661672226e-05,
+      "loss": 3.4921,
+      "step": 1448960
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.6270110714161706e-05,
+      "loss": 3.4807,
+      "step": 1449472
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.6261724766651186e-05,
+      "loss": 3.4911,
+      "step": 1449984
+    },
+    {
+      "epoch": 1.03,
+      "eval_loss": 3.8496224880218506,
+      "eval_runtime": 304.0856,
+      "eval_samples_per_second": 1254.88,
+      "eval_steps_per_second": 39.216,
+      "step": 1450080
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.6253338819140666e-05,
+      "loss": 3.4798,
+      "step": 1450496
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.6244952871630146e-05,
+      "loss": 3.4868,
+      "step": 1451008
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.6236566924119626e-05,
+      "loss": 3.4938,
+      "step": 1451520
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.6228180976609103e-05,
+      "loss": 3.485,
+      "step": 1452032
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.621981140790232e-05,
+      "loss": 3.5004,
+      "step": 1452544
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.62114254603918e-05,
+      "loss": 3.4798,
+      "step": 1453056
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.620303951288128e-05,
+      "loss": 3.4908,
+      "step": 1453568
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.619465356537075e-05,
+      "loss": 3.4761,
+      "step": 1454080
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.6186283996663967e-05,
+      "loss": 3.4894,
+      "step": 1454592
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.6177898049153447e-05,
+      "loss": 3.4852,
+      "step": 1455104
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.6169512101642927e-05,
+      "loss": 3.4934,
+      "step": 1455616
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.6161126154132404e-05,
+      "loss": 3.4972,
+      "step": 1456128
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.615275658542562e-05,
+      "loss": 3.4847,
+      "step": 1456640
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.61443706379151e-05,
+      "loss": 3.4833,
+      "step": 1457152
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.6135984690404576e-05,
+      "loss": 3.482,
+      "step": 1457664
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.6127598742894056e-05,
+      "loss": 3.4817,
+      "step": 1458176
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.6119212795383536e-05,
+      "loss": 3.4771,
+      "step": 1458688
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.6110826847873016e-05,
+      "loss": 3.4831,
+      "step": 1459200
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.6102440900362496e-05,
+      "loss": 3.4773,
+      "step": 1459712
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.6094071331655705e-05,
+      "loss": 3.505,
+      "step": 1460224
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.6085685384145185e-05,
+      "loss": 3.4839,
+      "step": 1460736
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.6077299436634665e-05,
+      "loss": 3.4901,
+      "step": 1461248
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.606891348912415e-05,
+      "loss": 3.4954,
+      "step": 1461760
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.6060543920417358e-05,
+      "loss": 3.4954,
+      "step": 1462272
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.6052157972906837e-05,
+      "loss": 3.4753,
+      "step": 1462784
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.6043772025396317e-05,
+      "loss": 3.4846,
+      "step": 1463296
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.6035386077885797e-05,
+      "loss": 3.4787,
+      "step": 1463808
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.602701650917901e-05,
+      "loss": 3.4851,
+      "step": 1464320
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.601863056166849e-05,
+      "loss": 3.4762,
+      "step": 1464832
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.601024461415797e-05,
+      "loss": 3.4808,
+      "step": 1465344
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.600185866664745e-05,
+      "loss": 3.4905,
+      "step": 1465856
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.599348909794066e-05,
+      "loss": 3.4906,
+      "step": 1466368
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.598510315043014e-05,
+      "loss": 3.4801,
+      "step": 1466880
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.597671720291962e-05,
+      "loss": 3.491,
+      "step": 1467392
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5968331255409102e-05,
+      "loss": 3.4886,
+      "step": 1467904
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.595996168670231e-05,
+      "loss": 3.4839,
+      "step": 1468416
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.595157573919179e-05,
+      "loss": 3.474,
+      "step": 1468928
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.594318979168127e-05,
+      "loss": 3.4902,
+      "step": 1469440
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.593480384417075e-05,
+      "loss": 3.4649,
+      "step": 1469952
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5926434275463964e-05,
+      "loss": 3.4813,
+      "step": 1470464
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5918048327953444e-05,
+      "loss": 3.4761,
+      "step": 1470976
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5909662380442923e-05,
+      "loss": 3.4922,
+      "step": 1471488
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5901276432932403e-05,
+      "loss": 3.488,
+      "step": 1472000
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5892906864225613e-05,
+      "loss": 3.4818,
+      "step": 1472512
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5884520916715092e-05,
+      "loss": 3.4917,
+      "step": 1473024
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5876134969204572e-05,
+      "loss": 3.4897,
+      "step": 1473536
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5867749021694056e-05,
+      "loss": 3.4843,
+      "step": 1474048
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5859379452987265e-05,
+      "loss": 3.4694,
+      "step": 1474560
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5850993505476745e-05,
+      "loss": 3.4774,
+      "step": 1475072
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5842607557966225e-05,
+      "loss": 3.4895,
+      "step": 1475584
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5834221610455705e-05,
+      "loss": 3.4784,
+      "step": 1476096
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5825852041748917e-05,
+      "loss": 3.4739,
+      "step": 1476608
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5817466094238397e-05,
+      "loss": 3.4749,
+      "step": 1477120
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5809080146727877e-05,
+      "loss": 3.4813,
+      "step": 1477632
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5800694199217357e-05,
+      "loss": 3.4678,
+      "step": 1478144
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5792324630510566e-05,
+      "loss": 3.4823,
+      "step": 1478656
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5783938683000046e-05,
+      "loss": 3.48,
+      "step": 1479168
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5775552735489526e-05,
+      "loss": 3.4813,
+      "step": 1479680
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.576716678797901e-05,
+      "loss": 3.4854,
+      "step": 1480192
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.575879721927222e-05,
+      "loss": 3.4705,
+      "step": 1480704
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.57504112717617e-05,
+      "loss": 3.4733,
+      "step": 1481216
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.574202532425118e-05,
+      "loss": 3.4863,
+      "step": 1481728
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.573363937674066e-05,
+      "loss": 3.4688,
+      "step": 1482240
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.572526980803387e-05,
+      "loss": 3.4786,
+      "step": 1482752
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.571688386052335e-05,
+      "loss": 3.4864,
+      "step": 1483264
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.570849791301283e-05,
+      "loss": 3.4878,
+      "step": 1483776
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.570011196550231e-05,
+      "loss": 3.4706,
+      "step": 1484288
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.569174239679552e-05,
+      "loss": 3.4649,
+      "step": 1484800
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5683356449285e-05,
+      "loss": 3.4717,
+      "step": 1485312
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.567497050177448e-05,
+      "loss": 3.4814,
+      "step": 1485824
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5666584554263963e-05,
+      "loss": 3.4882,
+      "step": 1486336
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5658214985557172e-05,
+      "loss": 3.4765,
+      "step": 1486848
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5649829038046652e-05,
+      "loss": 3.4891,
+      "step": 1487360
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5641443090536132e-05,
+      "loss": 3.4854,
+      "step": 1487872
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5633057143025612e-05,
+      "loss": 3.4899,
+      "step": 1488384
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5624687574318825e-05,
+      "loss": 3.4699,
+      "step": 1488896
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5616301626808305e-05,
+      "loss": 3.4871,
+      "step": 1489408
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5607915679297784e-05,
+      "loss": 3.4804,
+      "step": 1489920
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5599529731787264e-05,
+      "loss": 3.4773,
+      "step": 1490432
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5591160163080474e-05,
+      "loss": 3.4826,
+      "step": 1490944
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5582774215569953e-05,
+      "loss": 3.4802,
+      "step": 1491456
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5574388268059433e-05,
+      "loss": 3.4847,
+      "step": 1491968
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5566002320548917e-05,
+      "loss": 3.4809,
+      "step": 1492480
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5557632751842126e-05,
+      "loss": 3.4762,
+      "step": 1492992
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5549246804331606e-05,
+      "loss": 3.4813,
+      "step": 1493504
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5540860856821086e-05,
+      "loss": 3.4787,
+      "step": 1494016
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5532491288114295e-05,
+      "loss": 3.4786,
+      "step": 1494528
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5524105340603778e-05,
+      "loss": 3.479,
+      "step": 1495040
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.5515719393093258e-05,
+      "loss": 3.4745,
+      "step": 1495552
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5507333445582738e-05,
+      "loss": 3.4835,
+      "step": 1496064
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5498963876875947e-05,
+      "loss": 3.4745,
+      "step": 1496576
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5490577929365427e-05,
+      "loss": 3.4721,
+      "step": 1497088
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5482191981854907e-05,
+      "loss": 3.475,
+      "step": 1497600
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5473806034344387e-05,
+      "loss": 3.4911,
+      "step": 1498112
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.54654364656376e-05,
+      "loss": 3.484,
+      "step": 1498624
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.545705051812708e-05,
+      "loss": 3.4797,
+      "step": 1499136
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.544866457061656e-05,
+      "loss": 3.4715,
+      "step": 1499648
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.544027862310604e-05,
+      "loss": 3.482,
+      "step": 1500160
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.543190905439925e-05,
+      "loss": 3.4704,
+      "step": 1500672
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5423523106888732e-05,
+      "loss": 3.4942,
+      "step": 1501184
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5415137159378212e-05,
+      "loss": 3.4769,
+      "step": 1501696
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5406751211867692e-05,
+      "loss": 3.476,
+      "step": 1502208
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.53983816431609e-05,
+      "loss": 3.4707,
+      "step": 1502720
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.538999569565038e-05,
+      "loss": 3.4818,
+      "step": 1503232
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.538160974813986e-05,
+      "loss": 3.4574,
+      "step": 1503744
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.537322380062934e-05,
+      "loss": 3.4789,
+      "step": 1504256
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5364854231922553e-05,
+      "loss": 3.4752,
+      "step": 1504768
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5356468284412033e-05,
+      "loss": 3.4772,
+      "step": 1505280
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5348082336901513e-05,
+      "loss": 3.462,
+      "step": 1505792
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5339696389390993e-05,
+      "loss": 3.4833,
+      "step": 1506304
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5331326820684202e-05,
+      "loss": 3.4667,
+      "step": 1506816
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5322940873173686e-05,
+      "loss": 3.4842,
+      "step": 1507328
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5314554925663166e-05,
+      "loss": 3.4773,
+      "step": 1507840
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5306168978152645e-05,
+      "loss": 3.4745,
+      "step": 1508352
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5297799409445855e-05,
+      "loss": 3.4895,
+      "step": 1508864
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5289413461935335e-05,
+      "loss": 3.5007,
+      "step": 1509376
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5281027514424814e-05,
+      "loss": 3.4788,
+      "step": 1509888
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5272641566914294e-05,
+      "loss": 3.4737,
+      "step": 1510400
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5264271998207507e-05,
+      "loss": 3.4798,
+      "step": 1510912
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5255886050696987e-05,
+      "loss": 3.471,
+      "step": 1511424
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5247500103186467e-05,
+      "loss": 3.4746,
+      "step": 1511936
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5239114155675947e-05,
+      "loss": 3.4867,
+      "step": 1512448
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5230744586969156e-05,
+      "loss": 3.4711,
+      "step": 1512960
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5222358639458636e-05,
+      "loss": 3.4861,
+      "step": 1513472
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.521397269194812e-05,
+      "loss": 3.4788,
+      "step": 1513984
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.52055867444376e-05,
+      "loss": 3.4799,
+      "step": 1514496
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5197217175730808e-05,
+      "loss": 3.4664,
+      "step": 1515008
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5188831228220288e-05,
+      "loss": 3.4826,
+      "step": 1515520
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5180445280709768e-05,
+      "loss": 3.4708,
+      "step": 1516032
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5172059333199248e-05,
+      "loss": 3.4859,
+      "step": 1516544
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.516368976449246e-05,
+      "loss": 3.4788,
+      "step": 1517056
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.515530381698194e-05,
+      "loss": 3.484,
+      "step": 1517568
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.514691786947142e-05,
+      "loss": 3.4636,
+      "step": 1518080
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.51385319219609e-05,
+      "loss": 3.4898,
+      "step": 1518592
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.513016235325411e-05,
+      "loss": 3.4774,
+      "step": 1519104
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.512177640574359e-05,
+      "loss": 3.4774,
+      "step": 1519616
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5113390458233073e-05,
+      "loss": 3.4762,
+      "step": 1520128
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5105004510722546e-05,
+      "loss": 3.488,
+      "step": 1520640
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5096634942015762e-05,
+      "loss": 3.4838,
+      "step": 1521152
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5088248994505242e-05,
+      "loss": 3.4845,
+      "step": 1521664
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5079863046994722e-05,
+      "loss": 3.4727,
+      "step": 1522176
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.50714770994842e-05,
+      "loss": 3.4771,
+      "step": 1522688
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5063107530777414e-05,
+      "loss": 3.4767,
+      "step": 1523200
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5054721583266894e-05,
+      "loss": 3.4688,
+      "step": 1523712
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5046335635756367e-05,
+      "loss": 3.4779,
+      "step": 1524224
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5037949688245847e-05,
+      "loss": 3.4887,
+      "step": 1524736
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5029580119539063e-05,
+      "loss": 3.4809,
+      "step": 1525248
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.5021194172028543e-05,
+      "loss": 3.4687,
+      "step": 1525760
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.501280822451802e-05,
+      "loss": 3.4783,
+      "step": 1526272
+    },
+    {
+      "epoch": 0.03,
+      "eval_loss": 3.851987600326538,
+      "eval_runtime": 310.8963,
+      "eval_samples_per_second": 1227.39,
+      "eval_steps_per_second": 38.357,
+      "step": 1526400
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.50044222770075e-05,
+      "loss": 3.4733,
+      "step": 1526784
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.4996052708300712e-05,
+      "loss": 3.4742,
+      "step": 1527296
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.4987666760790192e-05,
+      "loss": 3.4792,
+      "step": 1527808
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.4979280813279672e-05,
+      "loss": 3.4777,
+      "step": 1528320
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.4970894865769155e-05,
+      "loss": 3.4886,
+      "step": 1528832
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.4962525297062365e-05,
+      "loss": 3.4696,
+      "step": 1529344
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.4954139349551844e-05,
+      "loss": 3.4768,
+      "step": 1529856
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.4945753402041324e-05,
+      "loss": 3.4628,
+      "step": 1530368
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.4937367454530804e-05,
+      "loss": 3.4814,
+      "step": 1530880
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.4928997885824017e-05,
+      "loss": 3.4718,
+      "step": 1531392
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.4920611938313497e-05,
+      "loss": 3.4758,
+      "step": 1531904
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.4912225990802977e-05,
+      "loss": 3.4871,
+      "step": 1532416
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.4903856422096186e-05,
+      "loss": 3.4703,
+      "step": 1532928
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.4895470474585666e-05,
+      "loss": 3.4743,
+      "step": 1533440
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.4887084527075146e-05,
+      "loss": 3.4699,
+      "step": 1533952
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.4878698579564626e-05,
+      "loss": 3.4721,
+      "step": 1534464
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.487031263205411e-05,
+      "loss": 3.4621,
+      "step": 1534976
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.4861926684543586e-05,
+      "loss": 3.4719,
+      "step": 1535488
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.4853540737033066e-05,
+      "loss": 3.4675,
+      "step": 1536000
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.4845154789522546e-05,
+      "loss": 3.4881,
+      "step": 1536512
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.4836785220815758e-05,
+      "loss": 3.472,
+      "step": 1537024
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.4828399273305235e-05,
+      "loss": 3.4787,
+      "step": 1537536
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.4820013325794718e-05,
+      "loss": 3.4836,
+      "step": 1538048
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.481164375708793e-05,
+      "loss": 3.4844,
+      "step": 1538560
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.4803257809577407e-05,
+      "loss": 3.4625,
+      "step": 1539072
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.4794871862066887e-05,
+      "loss": 3.4762,
+      "step": 1539584
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.4786485914556367e-05,
+      "loss": 3.4642,
+      "step": 1540096
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.477811634584958e-05,
+      "loss": 3.4734,
+      "step": 1540608
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.476973039833906e-05,
+      "loss": 3.4609,
+      "step": 1541120
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.476134445082854e-05,
+      "loss": 3.4705,
+      "step": 1541632
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.475295850331802e-05,
+      "loss": 3.4783,
+      "step": 1542144
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4744588934611232e-05,
+      "loss": 3.4785,
+      "step": 1542656
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.473620298710071e-05,
+      "loss": 3.4677,
+      "step": 1543168
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4727817039590188e-05,
+      "loss": 3.4803,
+      "step": 1543680
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.471943109207967e-05,
+      "loss": 3.4708,
+      "step": 1544192
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.471106152337288e-05,
+      "loss": 3.4777,
+      "step": 1544704
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.470267557586236e-05,
+      "loss": 3.4601,
+      "step": 1545216
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.469428962835184e-05,
+      "loss": 3.475,
+      "step": 1545728
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.468590368084132e-05,
+      "loss": 3.4588,
+      "step": 1546240
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4677534112134533e-05,
+      "loss": 3.4674,
+      "step": 1546752
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4669148164624013e-05,
+      "loss": 3.4648,
+      "step": 1547264
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4660762217113493e-05,
+      "loss": 3.4813,
+      "step": 1547776
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4652376269602973e-05,
+      "loss": 3.4754,
+      "step": 1548288
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4644006700896182e-05,
+      "loss": 3.4737,
+      "step": 1548800
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4635620753385662e-05,
+      "loss": 3.4775,
+      "step": 1549312
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4627234805875142e-05,
+      "loss": 3.4779,
+      "step": 1549824
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4618848858364625e-05,
+      "loss": 3.4704,
+      "step": 1550336
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4610479289657834e-05,
+      "loss": 3.4632,
+      "step": 1550848
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4602093342147314e-05,
+      "loss": 3.4597,
+      "step": 1551360
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4593707394636794e-05,
+      "loss": 3.4778,
+      "step": 1551872
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4585321447126274e-05,
+      "loss": 3.4668,
+      "step": 1552384
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4576951878419487e-05,
+      "loss": 3.4625,
+      "step": 1552896
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4568565930908967e-05,
+      "loss": 3.4666,
+      "step": 1553408
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4560179983398447e-05,
+      "loss": 3.4704,
+      "step": 1553920
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4551794035887927e-05,
+      "loss": 3.4555,
+      "step": 1554432
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4543424467181136e-05,
+      "loss": 3.4632,
+      "step": 1554944
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4535038519670616e-05,
+      "loss": 3.4688,
+      "step": 1555456
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4526652572160096e-05,
+      "loss": 3.4666,
+      "step": 1555968
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.451826662464958e-05,
+      "loss": 3.4758,
+      "step": 1556480
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4509897055942788e-05,
+      "loss": 3.4599,
+      "step": 1556992
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4501511108432268e-05,
+      "loss": 3.4612,
+      "step": 1557504
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4493125160921748e-05,
+      "loss": 3.4742,
+      "step": 1558016
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4484739213411228e-05,
+      "loss": 3.4571,
+      "step": 1558528
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.447636964470444e-05,
+      "loss": 3.4648,
+      "step": 1559040
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.446798369719392e-05,
+      "loss": 3.4738,
+      "step": 1559552
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.44595977496834e-05,
+      "loss": 3.4737,
+      "step": 1560064
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.445121180217288e-05,
+      "loss": 3.4606,
+      "step": 1560576
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.444284223346609e-05,
+      "loss": 3.4536,
+      "step": 1561088
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.443445628595557e-05,
+      "loss": 3.4558,
+      "step": 1561600
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.442607033844505e-05,
+      "loss": 3.4683,
+      "step": 1562112
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4417684390934533e-05,
+      "loss": 3.4794,
+      "step": 1562624
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4409314822227742e-05,
+      "loss": 3.4654,
+      "step": 1563136
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4400928874717222e-05,
+      "loss": 3.4779,
+      "step": 1563648
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.43925429272067e-05,
+      "loss": 3.4739,
+      "step": 1564160
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.438415697969618e-05,
+      "loss": 3.4811,
+      "step": 1564672
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4375787410989394e-05,
+      "loss": 3.4591,
+      "step": 1565184
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4367401463478874e-05,
+      "loss": 3.4752,
+      "step": 1565696
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4359015515968354e-05,
+      "loss": 3.4677,
+      "step": 1566208
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.435062956845783e-05,
+      "loss": 3.4635,
+      "step": 1566720
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4342259999751043e-05,
+      "loss": 3.476,
+      "step": 1567232
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4333874052240523e-05,
+      "loss": 3.4695,
+      "step": 1567744
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4325488104730003e-05,
+      "loss": 3.468,
+      "step": 1568256
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4317102157219483e-05,
+      "loss": 3.4693,
+      "step": 1568768
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4308732588512695e-05,
+      "loss": 3.466,
+      "step": 1569280
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4300346641002175e-05,
+      "loss": 3.4696,
+      "step": 1569792
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4291960693491655e-05,
+      "loss": 3.4659,
+      "step": 1570304
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4283574745981132e-05,
+      "loss": 3.4695,
+      "step": 1570816
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4275205177274344e-05,
+      "loss": 3.4629,
+      "step": 1571328
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.4266819229763828e-05,
+      "loss": 3.4663,
+      "step": 1571840
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.4258433282253304e-05,
+      "loss": 3.4702,
+      "step": 1572352
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.4250047334742784e-05,
+      "loss": 3.4679,
+      "step": 1572864
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.4241677766035997e-05,
+      "loss": 3.4585,
+      "step": 1573376
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.4233291818525477e-05,
+      "loss": 3.4612,
+      "step": 1573888
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.4224905871014957e-05,
+      "loss": 3.4797,
+      "step": 1574400
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.4216519923504437e-05,
+      "loss": 3.4731,
+      "step": 1574912
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.420815035479765e-05,
+      "loss": 3.4702,
+      "step": 1575424
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.419976440728713e-05,
+      "loss": 3.4598,
+      "step": 1575936
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.4191378459776606e-05,
+      "loss": 3.4679,
+      "step": 1576448
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.4182992512266086e-05,
+      "loss": 3.4637,
+      "step": 1576960
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.4174622943559298e-05,
+      "loss": 3.4774,
+      "step": 1577472
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.4166236996048778e-05,
+      "loss": 3.4729,
+      "step": 1577984
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.4157851048538258e-05,
+      "loss": 3.4619,
+      "step": 1578496
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.4149465101027738e-05,
+      "loss": 3.4587,
+      "step": 1579008
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.414109553232095e-05,
+      "loss": 3.4703,
+      "step": 1579520
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.4132709584810427e-05,
+      "loss": 3.4443,
+      "step": 1580032
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.412432363729991e-05,
+      "loss": 3.4677,
+      "step": 1580544
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.411593768978939e-05,
+      "loss": 3.4596,
+      "step": 1581056
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.4107568121082603e-05,
+      "loss": 3.4657,
+      "step": 1581568
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.409918217357208e-05,
+      "loss": 3.4506,
+      "step": 1582080
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.409079622606156e-05,
+      "loss": 3.4713,
+      "step": 1582592
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.408241027855104e-05,
+      "loss": 3.4621,
+      "step": 1583104
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.4074040709844252e-05,
+      "loss": 3.4648,
+      "step": 1583616
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.406565476233373e-05,
+      "loss": 3.4687,
+      "step": 1584128
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.405726881482321e-05,
+      "loss": 3.4598,
+      "step": 1584640
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.404888286731269e-05,
+      "loss": 3.4783,
+      "step": 1585152
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.40405132986059e-05,
+      "loss": 3.4909,
+      "step": 1585664
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.403212735109538e-05,
+      "loss": 3.4642,
+      "step": 1586176
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.4023741403584864e-05,
+      "loss": 3.4596,
+      "step": 1586688
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.4015355456074344e-05,
+      "loss": 3.4707,
+      "step": 1587200
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.4006985887367553e-05,
+      "loss": 3.4623,
+      "step": 1587712
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.3998599939857033e-05,
+      "loss": 3.4586,
+      "step": 1588224
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.3990213992346513e-05,
+      "loss": 3.4763,
+      "step": 1588736
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.3981828044835993e-05,
+      "loss": 3.4581,
+      "step": 1589248
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.3973458476129205e-05,
+      "loss": 3.4724,
+      "step": 1589760
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.3965072528618685e-05,
+      "loss": 3.4669,
+      "step": 1590272
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.3956686581108165e-05,
+      "loss": 3.4696,
+      "step": 1590784
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.3948317012401374e-05,
+      "loss": 3.4617,
+      "step": 1591296
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.3939931064890854e-05,
+      "loss": 3.4672,
+      "step": 1591808
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.3931545117380334e-05,
+      "loss": 3.4617,
+      "step": 1592320
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.3923159169869818e-05,
+      "loss": 3.472,
+      "step": 1592832
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.3914773222359298e-05,
+      "loss": 3.4648,
+      "step": 1593344
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.3906403653652507e-05,
+      "loss": 3.4699,
+      "step": 1593856
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.3898017706141987e-05,
+      "loss": 3.4535,
+      "step": 1594368
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.3889631758631467e-05,
+      "loss": 3.4755,
+      "step": 1594880
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.3881245811120947e-05,
+      "loss": 3.4658,
+      "step": 1595392
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.387287624241416e-05,
+      "loss": 3.4673,
+      "step": 1595904
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.386449029490364e-05,
+      "loss": 3.463,
+      "step": 1596416
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.385610434739312e-05,
+      "loss": 3.4755,
+      "step": 1596928
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.38477183998826e-05,
+      "loss": 3.4709,
+      "step": 1597440
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.3839348831175808e-05,
+      "loss": 3.4786,
+      "step": 1597952
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.3830962883665288e-05,
+      "loss": 3.4565,
+      "step": 1598464
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.3822576936154768e-05,
+      "loss": 3.4662,
+      "step": 1598976
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.381420736744798e-05,
+      "loss": 3.4631,
+      "step": 1599488
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.380582141993746e-05,
+      "loss": 3.4593,
+      "step": 1600000
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.379743547242694e-05,
+      "loss": 3.4679,
+      "step": 1600512
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.378904952491642e-05,
+      "loss": 3.4731,
+      "step": 1601024
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.3780679956209633e-05,
+      "loss": 3.4722,
+      "step": 1601536
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.3772294008699113e-05,
+      "loss": 3.4505,
+      "step": 1602048
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.3763908061188593e-05,
+      "loss": 3.4688,
+      "step": 1602560
+    },
+    {
+      "epoch": 1.03,
+      "eval_loss": 3.8532638549804688,
+      "eval_runtime": 311.7856,
+      "eval_samples_per_second": 1223.889,
+      "eval_steps_per_second": 38.247,
+      "step": 1602720
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.3755522113678073e-05,
+      "loss": 3.4579,
+      "step": 1603072
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.3747136166167553e-05,
+      "loss": 3.4602,
+      "step": 1603584
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.373875021865703e-05,
+      "loss": 3.4719,
+      "step": 1604096
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.373036427114651e-05,
+      "loss": 3.4654,
+      "step": 1604608
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.372199470243972e-05,
+      "loss": 3.4706,
+      "step": 1605120
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.37136087549292e-05,
+      "loss": 3.4624,
+      "step": 1605632
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.370522280741868e-05,
+      "loss": 3.4661,
+      "step": 1606144
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.369683685990816e-05,
+      "loss": 3.4519,
+      "step": 1606656
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.3688467291201374e-05,
+      "loss": 3.4665,
+      "step": 1607168
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.368008134369085e-05,
+      "loss": 3.4628,
+      "step": 1607680
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.3671695396180334e-05,
+      "loss": 3.4621,
+      "step": 1608192
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.3663309448669814e-05,
+      "loss": 3.4744,
+      "step": 1608704
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.3654939879963026e-05,
+      "loss": 3.4597,
+      "step": 1609216
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.3646553932452503e-05,
+      "loss": 3.4643,
+      "step": 1609728
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.3638167984941983e-05,
+      "loss": 3.4616,
+      "step": 1610240
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.3629782037431463e-05,
+      "loss": 3.4575,
+      "step": 1610752
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.3621412468724675e-05,
+      "loss": 3.4515,
+      "step": 1611264
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.3613026521214155e-05,
+      "loss": 3.4608,
+      "step": 1611776
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.3604640573703635e-05,
+      "loss": 3.4535,
+      "step": 1612288
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.3596254626193115e-05,
+      "loss": 3.4747,
+      "step": 1612800
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.3587885057486324e-05,
+      "loss": 3.4639,
+      "step": 1613312
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.3579499109975804e-05,
+      "loss": 3.4659,
+      "step": 1613824
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.3571113162465288e-05,
+      "loss": 3.4742,
+      "step": 1614336
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.3562727214954767e-05,
+      "loss": 3.4711,
+      "step": 1614848
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.3554357646247977e-05,
+      "loss": 3.4507,
+      "step": 1615360
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.3545971698737457e-05,
+      "loss": 3.4654,
+      "step": 1615872
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.3537585751226936e-05,
+      "loss": 3.4582,
+      "step": 1616384
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.3529199803716416e-05,
+      "loss": 3.462,
+      "step": 1616896
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.352083023500963e-05,
+      "loss": 3.4511,
+      "step": 1617408
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.351244428749911e-05,
+      "loss": 3.4578,
+      "step": 1617920
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.350405833998859e-05,
+      "loss": 3.469,
+      "step": 1618432
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.349567239247807e-05,
+      "loss": 3.4638,
+      "step": 1618944
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3487302823771278e-05,
+      "loss": 3.458,
+      "step": 1619456
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3478916876260758e-05,
+      "loss": 3.4651,
+      "step": 1619968
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.347053092875024e-05,
+      "loss": 3.4629,
+      "step": 1620480
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.346214498123972e-05,
+      "loss": 3.4649,
+      "step": 1620992
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.345377541253293e-05,
+      "loss": 3.4532,
+      "step": 1621504
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.344538946502241e-05,
+      "loss": 3.4559,
+      "step": 1622016
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.343700351751189e-05,
+      "loss": 3.4522,
+      "step": 1622528
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.342861757000137e-05,
+      "loss": 3.4538,
+      "step": 1623040
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3420248001294583e-05,
+      "loss": 3.4526,
+      "step": 1623552
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3411862053784063e-05,
+      "loss": 3.4692,
+      "step": 1624064
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3403476106273542e-05,
+      "loss": 3.4642,
+      "step": 1624576
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3395090158763022e-05,
+      "loss": 3.4593,
+      "step": 1625088
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.338672059005623e-05,
+      "loss": 3.472,
+      "step": 1625600
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.337833464254571e-05,
+      "loss": 3.4597,
+      "step": 1626112
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.336994869503519e-05,
+      "loss": 3.4617,
+      "step": 1626624
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3361562747524675e-05,
+      "loss": 3.4525,
+      "step": 1627136
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3353193178817884e-05,
+      "loss": 3.4453,
+      "step": 1627648
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3344807231307364e-05,
+      "loss": 3.468,
+      "step": 1628160
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3336421283796844e-05,
+      "loss": 3.4545,
+      "step": 1628672
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3328035336286324e-05,
+      "loss": 3.4562,
+      "step": 1629184
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3319665767579536e-05,
+      "loss": 3.4544,
+      "step": 1629696
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3311279820069016e-05,
+      "loss": 3.4563,
+      "step": 1630208
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3302893872558496e-05,
+      "loss": 3.4449,
+      "step": 1630720
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3294507925047976e-05,
+      "loss": 3.4537,
+      "step": 1631232
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3286138356341185e-05,
+      "loss": 3.4568,
+      "step": 1631744
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3277752408830665e-05,
+      "loss": 3.4547,
+      "step": 1632256
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3269366461320145e-05,
+      "loss": 3.463,
+      "step": 1632768
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.326098051380963e-05,
+      "loss": 3.4462,
+      "step": 1633280
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3252610945102838e-05,
+      "loss": 3.4523,
+      "step": 1633792
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3244224997592318e-05,
+      "loss": 3.4666,
+      "step": 1634304
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3235839050081797e-05,
+      "loss": 3.4472,
+      "step": 1634816
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3227453102571274e-05,
+      "loss": 3.4508,
+      "step": 1635328
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.321908353386449e-05,
+      "loss": 3.4604,
+      "step": 1635840
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.321069758635397e-05,
+      "loss": 3.4631,
+      "step": 1636352
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.320231163884345e-05,
+      "loss": 3.4489,
+      "step": 1636864
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3193925691332926e-05,
+      "loss": 3.4457,
+      "step": 1637376
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.318555612262614e-05,
+      "loss": 3.4406,
+      "step": 1637888
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.317717017511562e-05,
+      "loss": 3.4567,
+      "step": 1638400
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.31687842276051e-05,
+      "loss": 3.4676,
+      "step": 1638912
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.316039828009458e-05,
+      "loss": 3.4548,
+      "step": 1639424
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.315202871138779e-05,
+      "loss": 3.4614,
+      "step": 1639936
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.314364276387727e-05,
+      "loss": 3.4603,
+      "step": 1640448
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3135256816366748e-05,
+      "loss": 3.4724,
+      "step": 1640960
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3126870868856228e-05,
+      "loss": 3.4485,
+      "step": 1641472
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3118501300149444e-05,
+      "loss": 3.4635,
+      "step": 1641984
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3110115352638924e-05,
+      "loss": 3.4554,
+      "step": 1642496
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.31017294051284e-05,
+      "loss": 3.449,
+      "step": 1643008
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.309334345761788e-05,
+      "loss": 3.4626,
+      "step": 1643520
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3084973888911093e-05,
+      "loss": 3.4595,
+      "step": 1644032
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3076587941400573e-05,
+      "loss": 3.4583,
+      "step": 1644544
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3068201993890052e-05,
+      "loss": 3.4615,
+      "step": 1645056
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3059832425183265e-05,
+      "loss": 3.4555,
+      "step": 1645568
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3051446477672745e-05,
+      "loss": 3.4507,
+      "step": 1646080
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.304306053016222e-05,
+      "loss": 3.4549,
+      "step": 1646592
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.30346745826517e-05,
+      "loss": 3.46,
+      "step": 1647104
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3026305013944914e-05,
+      "loss": 3.4472,
+      "step": 1647616
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3017919066434397e-05,
+      "loss": 3.4539,
+      "step": 1648128
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.3009533118923874e-05,
+      "loss": 3.4597,
+      "step": 1648640
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.3001147171413354e-05,
+      "loss": 3.4572,
+      "step": 1649152
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2992777602706566e-05,
+      "loss": 3.4477,
+      "step": 1649664
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2984391655196043e-05,
+      "loss": 3.451,
+      "step": 1650176
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2976005707685526e-05,
+      "loss": 3.4691,
+      "step": 1650688
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2967619760175006e-05,
+      "loss": 3.4583,
+      "step": 1651200
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.295925019146822e-05,
+      "loss": 3.4624,
+      "step": 1651712
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2950864243957695e-05,
+      "loss": 3.445,
+      "step": 1652224
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2942478296447175e-05,
+      "loss": 3.4582,
+      "step": 1652736
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2934092348936655e-05,
+      "loss": 3.4528,
+      "step": 1653248
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2925722780229868e-05,
+      "loss": 3.4619,
+      "step": 1653760
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2917336832719348e-05,
+      "loss": 3.4615,
+      "step": 1654272
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2908950885208827e-05,
+      "loss": 3.4508,
+      "step": 1654784
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2900564937698307e-05,
+      "loss": 3.4452,
+      "step": 1655296
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2892195368991517e-05,
+      "loss": 3.4597,
+      "step": 1655808
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2883809421480996e-05,
+      "loss": 3.4348,
+      "step": 1656320
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2875423473970476e-05,
+      "loss": 3.4561,
+      "step": 1656832
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.286703752645996e-05,
+      "loss": 3.4456,
+      "step": 1657344
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.285866795775317e-05,
+      "loss": 3.4587,
+      "step": 1657856
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.285028201024265e-05,
+      "loss": 3.4401,
+      "step": 1658368
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.284189606273213e-05,
+      "loss": 3.4593,
+      "step": 1658880
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.283351011522161e-05,
+      "loss": 3.4489,
+      "step": 1659392
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.282514054651482e-05,
+      "loss": 3.4561,
+      "step": 1659904
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.28167545990043e-05,
+      "loss": 3.4606,
+      "step": 1660416
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.280836865149378e-05,
+      "loss": 3.4511,
+      "step": 1660928
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.279998270398326e-05,
+      "loss": 3.465,
+      "step": 1661440
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.279161313527647e-05,
+      "loss": 3.4742,
+      "step": 1661952
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.278322718776595e-05,
+      "loss": 3.4526,
+      "step": 1662464
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.277484124025543e-05,
+      "loss": 3.4517,
+      "step": 1662976
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2766455292744913e-05,
+      "loss": 3.4548,
+      "step": 1663488
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2758085724038123e-05,
+      "loss": 3.454,
+      "step": 1664000
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2749699776527603e-05,
+      "loss": 3.4433,
+      "step": 1664512
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2741313829017082e-05,
+      "loss": 3.4694,
+      "step": 1665024
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2732927881506562e-05,
+      "loss": 3.4459,
+      "step": 1665536
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2724558312799775e-05,
+      "loss": 3.4627,
+      "step": 1666048
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2716172365289255e-05,
+      "loss": 3.4546,
+      "step": 1666560
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2707786417778735e-05,
+      "loss": 3.4566,
+      "step": 1667072
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2699400470268215e-05,
+      "loss": 3.4495,
+      "step": 1667584
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2691030901561424e-05,
+      "loss": 3.4589,
+      "step": 1668096
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2682644954050904e-05,
+      "loss": 3.4508,
+      "step": 1668608
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2674259006540384e-05,
+      "loss": 3.4623,
+      "step": 1669120
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2665873059029867e-05,
+      "loss": 3.454,
+      "step": 1669632
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2657503490323076e-05,
+      "loss": 3.4607,
+      "step": 1670144
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2649117542812556e-05,
+      "loss": 3.4434,
+      "step": 1670656
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2640731595302036e-05,
+      "loss": 3.4651,
+      "step": 1671168
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2632345647791516e-05,
+      "loss": 3.453,
+      "step": 1671680
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.262397607908473e-05,
+      "loss": 3.4581,
+      "step": 1672192
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.261559013157421e-05,
+      "loss": 3.4529,
+      "step": 1672704
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.260720418406369e-05,
+      "loss": 3.4608,
+      "step": 1673216
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.259881823655317e-05,
+      "loss": 3.4577,
+      "step": 1673728
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2590448667846378e-05,
+      "loss": 3.4693,
+      "step": 1674240
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2582062720335857e-05,
+      "loss": 3.443,
+      "step": 1674752
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2573676772825337e-05,
+      "loss": 3.4532,
+      "step": 1675264
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.256529082531482e-05,
+      "loss": 3.453,
+      "step": 1675776
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.255692125660803e-05,
+      "loss": 3.4516,
+      "step": 1676288
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.254853530909751e-05,
+      "loss": 3.4511,
+      "step": 1676800
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.254014936158699e-05,
+      "loss": 3.4614,
+      "step": 1677312
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.253176341407647e-05,
+      "loss": 3.464,
+      "step": 1677824
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2523393845369682e-05,
+      "loss": 3.4428,
+      "step": 1678336
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2515007897859162e-05,
+      "loss": 3.4614,
+      "step": 1678848
+    },
+    {
+      "epoch": 0.03,
+      "eval_loss": 3.8555819988250732,
+      "eval_runtime": 305.602,
+      "eval_samples_per_second": 1248.654,
+      "eval_steps_per_second": 39.021,
+      "step": 1679040
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.2506621950348642e-05,
+      "loss": 3.4524,
+      "step": 1679360
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.249823600283812e-05,
+      "loss": 3.445,
+      "step": 1679872
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.24898500553276e-05,
+      "loss": 3.4593,
+      "step": 1680384
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.248146410781708e-05,
+      "loss": 3.4546,
+      "step": 1680896
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.247309453911029e-05,
+      "loss": 3.4605,
+      "step": 1681408
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.246470859159977e-05,
+      "loss": 3.4493,
+      "step": 1681920
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.245632264408925e-05,
+      "loss": 3.448,
+      "step": 1682432
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.244793669657873e-05,
+      "loss": 3.4442,
+      "step": 1682944
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.243956712787194e-05,
+      "loss": 3.4523,
+      "step": 1683456
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.243118118036142e-05,
+      "loss": 3.4513,
+      "step": 1683968
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.24227952328509e-05,
+      "loss": 3.4533,
+      "step": 1684480
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.2414409285340383e-05,
+      "loss": 3.4632,
+      "step": 1684992
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.2406039716633592e-05,
+      "loss": 3.4496,
+      "step": 1685504
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.2397653769123072e-05,
+      "loss": 3.45,
+      "step": 1686016
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.2389267821612552e-05,
+      "loss": 3.4533,
+      "step": 1686528
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.2380881874102032e-05,
+      "loss": 3.4448,
+      "step": 1687040
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.2372512305395245e-05,
+      "loss": 3.4395,
+      "step": 1687552
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.2364126357884725e-05,
+      "loss": 3.4484,
+      "step": 1688064
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.2355740410374205e-05,
+      "loss": 3.4439,
+      "step": 1688576
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.2347354462863685e-05,
+      "loss": 3.4618,
+      "step": 1689088
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.2338984894156894e-05,
+      "loss": 3.455,
+      "step": 1689600
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.2330598946646374e-05,
+      "loss": 3.4555,
+      "step": 1690112
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.2322212999135854e-05,
+      "loss": 3.4599,
+      "step": 1690624
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.2313827051625337e-05,
+      "loss": 3.4596,
+      "step": 1691136
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.2305457482918546e-05,
+      "loss": 3.4429,
+      "step": 1691648
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.2297071535408026e-05,
+      "loss": 3.4553,
+      "step": 1692160
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.2288685587897506e-05,
+      "loss": 3.4418,
+      "step": 1692672
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.2280299640386986e-05,
+      "loss": 3.4502,
+      "step": 1693184
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.22719300716802e-05,
+      "loss": 3.4431,
+      "step": 1693696
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.226354412416968e-05,
+      "loss": 3.4452,
+      "step": 1694208
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.225515817665916e-05,
+      "loss": 3.4587,
+      "step": 1694720
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.224677222914864e-05,
+      "loss": 3.4517,
+      "step": 1695232
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.2238402660441847e-05,
+      "loss": 3.4484,
+      "step": 1695744
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.2230016712931327e-05,
+      "loss": 3.4523,
+      "step": 1696256
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.2221630765420807e-05,
+      "loss": 3.4498,
+      "step": 1696768
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.221324481791029e-05,
+      "loss": 3.4545,
+      "step": 1697280
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.22048752492035e-05,
+      "loss": 3.4431,
+      "step": 1697792
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.219648930169298e-05,
+      "loss": 3.444,
+      "step": 1698304
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.218810335418246e-05,
+      "loss": 3.4396,
+      "step": 1698816
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.217971740667194e-05,
+      "loss": 3.4409,
+      "step": 1699328
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.2171347837965152e-05,
+      "loss": 3.4443,
+      "step": 1699840
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.2162961890454632e-05,
+      "loss": 3.4543,
+      "step": 1700352
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.2154575942944112e-05,
+      "loss": 3.4528,
+      "step": 1700864
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.2146189995433592e-05,
+      "loss": 3.4498,
+      "step": 1701376
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.21378204267268e-05,
+      "loss": 3.4617,
+      "step": 1701888
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.212943447921628e-05,
+      "loss": 3.4501,
+      "step": 1702400
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.212104853170576e-05,
+      "loss": 3.4523,
+      "step": 1702912
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.2112662584195244e-05,
+      "loss": 3.4395,
+      "step": 1703424
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.2104293015488453e-05,
+      "loss": 3.432,
+      "step": 1703936
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.2095907067977933e-05,
+      "loss": 3.4596,
+      "step": 1704448
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.2087521120467413e-05,
+      "loss": 3.443,
+      "step": 1704960
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.2079135172956893e-05,
+      "loss": 3.4395,
+      "step": 1705472
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.2070765604250106e-05,
+      "loss": 3.4479,
+      "step": 1705984
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.2062379656739586e-05,
+      "loss": 3.4414,
+      "step": 1706496
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.2053993709229066e-05,
+      "loss": 3.4372,
+      "step": 1707008
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.2045607761718542e-05,
+      "loss": 3.4385,
+      "step": 1707520
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.2037238193011755e-05,
+      "loss": 3.4462,
+      "step": 1708032
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.2028852245501235e-05,
+      "loss": 3.4444,
+      "step": 1708544
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.2020466297990715e-05,
+      "loss": 3.4561,
+      "step": 1709056
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.2012080350480195e-05,
+      "loss": 3.4344,
+      "step": 1709568
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.2003710781773407e-05,
+      "loss": 3.4365,
+      "step": 1710080
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.1995324834262887e-05,
+      "loss": 3.4618,
+      "step": 1710592
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.1986938886752367e-05,
+      "loss": 3.4286,
+      "step": 1711104
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.1978552939241844e-05,
+      "loss": 3.4394,
+      "step": 1711616
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.197018337053506e-05,
+      "loss": 3.4512,
+      "step": 1712128
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.196179742302454e-05,
+      "loss": 3.4553,
+      "step": 1712640
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.1953411475514016e-05,
+      "loss": 3.435,
+      "step": 1713152
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.1945025528003496e-05,
+      "loss": 3.4364,
+      "step": 1713664
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.193665595929671e-05,
+      "loss": 3.4291,
+      "step": 1714176
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.192827001178619e-05,
+      "loss": 3.4442,
+      "step": 1714688
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.191988406427567e-05,
+      "loss": 3.457,
+      "step": 1715200
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.1911498116765148e-05,
+      "loss": 3.4451,
+      "step": 1715712
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.190312854805836e-05,
+      "loss": 3.4493,
+      "step": 1716224
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.189474260054784e-05,
+      "loss": 3.4518,
+      "step": 1716736
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.1886356653037317e-05,
+      "loss": 3.4613,
+      "step": 1717248
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.1877970705526797e-05,
+      "loss": 3.4399,
+      "step": 1717760
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.1869601136820013e-05,
+      "loss": 3.4511,
+      "step": 1718272
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.186121518930949e-05,
+      "loss": 3.4448,
+      "step": 1718784
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.185282924179897e-05,
+      "loss": 3.4365,
+      "step": 1719296
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.184444329428845e-05,
+      "loss": 3.4515,
+      "step": 1719808
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.1836073725581662e-05,
+      "loss": 3.4511,
+      "step": 1720320
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.182768777807114e-05,
+      "loss": 3.4513,
+      "step": 1720832
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.1819301830560622e-05,
+      "loss": 3.4465,
+      "step": 1721344
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.1810915883050102e-05,
+      "loss": 3.4464,
+      "step": 1721856
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.180254631434331e-05,
+      "loss": 3.4395,
+      "step": 1722368
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.179416036683279e-05,
+      "loss": 3.4481,
+      "step": 1722880
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.178577441932227e-05,
+      "loss": 3.4479,
+      "step": 1723392
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.177738847181175e-05,
+      "loss": 3.4394,
+      "step": 1723904
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.1769018903104963e-05,
+      "loss": 3.443,
+      "step": 1724416
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1760632955594443e-05,
+      "loss": 3.4424,
+      "step": 1724928
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1752247008083923e-05,
+      "loss": 3.4465,
+      "step": 1725440
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1743861060573403e-05,
+      "loss": 3.439,
+      "step": 1725952
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1735491491866612e-05,
+      "loss": 3.4368,
+      "step": 1726464
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1727105544356092e-05,
+      "loss": 3.4595,
+      "step": 1726976
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1718719596845576e-05,
+      "loss": 3.4472,
+      "step": 1727488
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1710333649335056e-05,
+      "loss": 3.4492,
+      "step": 1728000
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1701964080628265e-05,
+      "loss": 3.434,
+      "step": 1728512
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1693578133117745e-05,
+      "loss": 3.4491,
+      "step": 1729024
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1685192185607225e-05,
+      "loss": 3.4387,
+      "step": 1729536
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1676806238096705e-05,
+      "loss": 3.4483,
+      "step": 1730048
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1668436669389917e-05,
+      "loss": 3.4516,
+      "step": 1730560
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1660050721879397e-05,
+      "loss": 3.4395,
+      "step": 1731072
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1651664774368877e-05,
+      "loss": 3.4386,
+      "step": 1731584
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1643278826858357e-05,
+      "loss": 3.4442,
+      "step": 1732096
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1634909258151566e-05,
+      "loss": 3.4239,
+      "step": 1732608
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1626523310641046e-05,
+      "loss": 3.4496,
+      "step": 1733120
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.161813736313053e-05,
+      "loss": 3.4315,
+      "step": 1733632
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.160975141562001e-05,
+      "loss": 3.4479,
+      "step": 1734144
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.160138184691322e-05,
+      "loss": 3.4304,
+      "step": 1734656
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.15929958994027e-05,
+      "loss": 3.4485,
+      "step": 1735168
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1584609951892178e-05,
+      "loss": 3.4383,
+      "step": 1735680
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1576224004381658e-05,
+      "loss": 3.4413,
+      "step": 1736192
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.156785443567487e-05,
+      "loss": 3.4538,
+      "step": 1736704
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.155946848816435e-05,
+      "loss": 3.4384,
+      "step": 1737216
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.155108254065383e-05,
+      "loss": 3.4543,
+      "step": 1737728
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.154269659314331e-05,
+      "loss": 3.462,
+      "step": 1738240
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.153432702443652e-05,
+      "loss": 3.4423,
+      "step": 1738752
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1525941076926e-05,
+      "loss": 3.4447,
+      "step": 1739264
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1517555129415483e-05,
+      "loss": 3.4381,
+      "step": 1739776
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1509169181904963e-05,
+      "loss": 3.4455,
+      "step": 1740288
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1500799613198172e-05,
+      "loss": 3.4379,
+      "step": 1740800
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1492413665687652e-05,
+      "loss": 3.4564,
+      "step": 1741312
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1484027718177132e-05,
+      "loss": 3.4314,
+      "step": 1741824
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1475641770666612e-05,
+      "loss": 3.4503,
+      "step": 1742336
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1467272201959824e-05,
+      "loss": 3.4456,
+      "step": 1742848
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1458886254449304e-05,
+      "loss": 3.4443,
+      "step": 1743360
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1450500306938784e-05,
+      "loss": 3.436,
+      "step": 1743872
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1442114359428264e-05,
+      "loss": 3.4478,
+      "step": 1744384
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1433744790721473e-05,
+      "loss": 3.4369,
+      "step": 1744896
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1425358843210953e-05,
+      "loss": 3.4551,
+      "step": 1745408
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1416972895700437e-05,
+      "loss": 3.4406,
+      "step": 1745920
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1408586948189913e-05,
+      "loss": 3.4501,
+      "step": 1746432
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1400217379483126e-05,
+      "loss": 3.4305,
+      "step": 1746944
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1391831431972606e-05,
+      "loss": 3.4562,
+      "step": 1747456
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1383445484462086e-05,
+      "loss": 3.4453,
+      "step": 1747968
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1375059536951562e-05,
+      "loss": 3.448,
+      "step": 1748480
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1366689968244778e-05,
+      "loss": 3.442,
+      "step": 1748992
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1358304020734258e-05,
+      "loss": 3.4452,
+      "step": 1749504
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1349918073223738e-05,
+      "loss": 3.4489,
+      "step": 1750016
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1341548504516947e-05,
+      "loss": 3.4639,
+      "step": 1750528
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1333162557006427e-05,
+      "loss": 3.4304,
+      "step": 1751040
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1324776609495907e-05,
+      "loss": 3.4432,
+      "step": 1751552
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1316390661985387e-05,
+      "loss": 3.4406,
+      "step": 1752064
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1308004714474867e-05,
+      "loss": 3.4446,
+      "step": 1752576
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.129963514576808e-05,
+      "loss": 3.436,
+      "step": 1753088
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.129124919825756e-05,
+      "loss": 3.4493,
+      "step": 1753600
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1282863250747036e-05,
+      "loss": 3.4514,
+      "step": 1754112
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1274477303236516e-05,
+      "loss": 3.4316,
+      "step": 1754624
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.1266107734529732e-05,
+      "loss": 3.4521,
+      "step": 1755136
+    },
+    {
+      "epoch": 0.03,
+      "eval_loss": 3.8568949699401855,
+      "eval_runtime": 303.2113,
+      "eval_samples_per_second": 1258.499,
+      "eval_steps_per_second": 39.329,
+      "step": 1755360
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.125772178701921e-05,
+      "loss": 3.4388,
+      "step": 1755648
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.1249335839508688e-05,
+      "loss": 3.4367,
+      "step": 1756160
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.1240949891998168e-05,
+      "loss": 3.4498,
+      "step": 1756672
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.123258032329138e-05,
+      "loss": 3.4401,
+      "step": 1757184
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.122419437578086e-05,
+      "loss": 3.4505,
+      "step": 1757696
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.121580842827034e-05,
+      "loss": 3.4385,
+      "step": 1758208
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.120742248075982e-05,
+      "loss": 3.4409,
+      "step": 1758720
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.1199052912053033e-05,
+      "loss": 3.4339,
+      "step": 1759232
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.119066696454251e-05,
+      "loss": 3.444,
+      "step": 1759744
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.118228101703199e-05,
+      "loss": 3.4376,
+      "step": 1760256
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.117389506952147e-05,
+      "loss": 3.4415,
+      "step": 1760768
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.1165525500814682e-05,
+      "loss": 3.4504,
+      "step": 1761280
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.1157155932107895e-05,
+      "loss": 3.4426,
+      "step": 1761792
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.1148769984597374e-05,
+      "loss": 3.4374,
+      "step": 1762304
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.1140384037086854e-05,
+      "loss": 3.4392,
+      "step": 1762816
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.113199808957633e-05,
+      "loss": 3.4318,
+      "step": 1763328
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.1123612142065814e-05,
+      "loss": 3.4317,
+      "step": 1763840
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.1115226194555294e-05,
+      "loss": 3.4381,
+      "step": 1764352
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.1106840247044774e-05,
+      "loss": 3.436,
+      "step": 1764864
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.1098454299534254e-05,
+      "loss": 3.4433,
+      "step": 1765376
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.1090084730827463e-05,
+      "loss": 3.4474,
+      "step": 1765888
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.1081698783316943e-05,
+      "loss": 3.4436,
+      "step": 1766400
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.1073312835806423e-05,
+      "loss": 3.4467,
+      "step": 1766912
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.1064926888295907e-05,
+      "loss": 3.4465,
+      "step": 1767424
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.1056557319589116e-05,
+      "loss": 3.4282,
+      "step": 1767936
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.1048171372078596e-05,
+      "loss": 3.4482,
+      "step": 1768448
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.1039785424568076e-05,
+      "loss": 3.4332,
+      "step": 1768960
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.1031399477057556e-05,
+      "loss": 3.4372,
+      "step": 1769472
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.1023029908350768e-05,
+      "loss": 3.4308,
+      "step": 1769984
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.1014643960840248e-05,
+      "loss": 3.4383,
+      "step": 1770496
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.1006258013329728e-05,
+      "loss": 3.444,
+      "step": 1771008
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.0997872065819208e-05,
+      "loss": 3.4444,
+      "step": 1771520
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.0989502497112417e-05,
+      "loss": 3.4397,
+      "step": 1772032
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.0981116549601897e-05,
+      "loss": 3.4445,
+      "step": 1772544
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.0972730602091377e-05,
+      "loss": 3.4312,
+      "step": 1773056
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.096434465458086e-05,
+      "loss": 3.4446,
+      "step": 1773568
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.095597508587407e-05,
+      "loss": 3.4382,
+      "step": 1774080
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.094758913836355e-05,
+      "loss": 3.4285,
+      "step": 1774592
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.093920319085303e-05,
+      "loss": 3.4297,
+      "step": 1775104
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.093081724334251e-05,
+      "loss": 3.4311,
+      "step": 1775616
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.092244767463572e-05,
+      "loss": 3.438,
+      "step": 1776128
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.09140617271252e-05,
+      "loss": 3.4413,
+      "step": 1776640
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.090567577961468e-05,
+      "loss": 3.4425,
+      "step": 1777152
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.089728983210416e-05,
+      "loss": 3.4367,
+      "step": 1777664
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.088892026339737e-05,
+      "loss": 3.4516,
+      "step": 1778176
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.088053431588685e-05,
+      "loss": 3.4362,
+      "step": 1778688
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.087214836837633e-05,
+      "loss": 3.445,
+      "step": 1779200
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.086376242086581e-05,
+      "loss": 3.4314,
+      "step": 1779712
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.0855392852159023e-05,
+      "loss": 3.4151,
+      "step": 1780224
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.0847006904648503e-05,
+      "loss": 3.4502,
+      "step": 1780736
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.0838620957137983e-05,
+      "loss": 3.4333,
+      "step": 1781248
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.083023500962746e-05,
+      "loss": 3.4345,
+      "step": 1781760
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.0821865440920675e-05,
+      "loss": 3.4356,
+      "step": 1782272
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.0813479493410155e-05,
+      "loss": 3.4295,
+      "step": 1782784
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.0805093545899635e-05,
+      "loss": 3.4278,
+      "step": 1783296
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.0796707598389112e-05,
+      "loss": 3.4262,
+      "step": 1783808
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.0788338029682324e-05,
+      "loss": 3.4361,
+      "step": 1784320
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.0779952082171804e-05,
+      "loss": 3.4305,
+      "step": 1784832
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.0771566134661284e-05,
+      "loss": 3.4482,
+      "step": 1785344
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.0763180187150764e-05,
+      "loss": 3.4263,
+      "step": 1785856
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.0754810618443977e-05,
+      "loss": 3.4255,
+      "step": 1786368
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.0746424670933457e-05,
+      "loss": 3.4476,
+      "step": 1786880
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.0738038723422933e-05,
+      "loss": 3.4196,
+      "step": 1787392
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.0729652775912413e-05,
+      "loss": 3.4263,
+      "step": 1787904
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.072128320720563e-05,
+      "loss": 3.443,
+      "step": 1788416
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.071289725969511e-05,
+      "loss": 3.4428,
+      "step": 1788928
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.0704511312184586e-05,
+      "loss": 3.4262,
+      "step": 1789440
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.0696125364674065e-05,
+      "loss": 3.4237,
+      "step": 1789952
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.0687755795967278e-05,
+      "loss": 3.4201,
+      "step": 1790464
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.0679369848456755e-05,
+      "loss": 3.431,
+      "step": 1790976
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.0670983900946238e-05,
+      "loss": 3.4471,
+      "step": 1791488
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.0662597953435718e-05,
+      "loss": 3.4334,
+      "step": 1792000
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.065422838472893e-05,
+      "loss": 3.4358,
+      "step": 1792512
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.0645842437218407e-05,
+      "loss": 3.442,
+      "step": 1793024
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.0637456489707887e-05,
+      "loss": 3.4483,
+      "step": 1793536
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.0629070542197367e-05,
+      "loss": 3.4286,
+      "step": 1794048
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.062070097349058e-05,
+      "loss": 3.4461,
+      "step": 1794560
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.061231502598006e-05,
+      "loss": 3.4279,
+      "step": 1795072
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.060392907846954e-05,
+      "loss": 3.4314,
+      "step": 1795584
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.059554313095902e-05,
+      "loss": 3.4363,
+      "step": 1796096
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.0587173562252228e-05,
+      "loss": 3.4438,
+      "step": 1796608
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.0578787614741708e-05,
+      "loss": 3.4346,
+      "step": 1797120
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.057040166723119e-05,
+      "loss": 3.4405,
+      "step": 1797632
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.056201571972067e-05,
+      "loss": 3.4319,
+      "step": 1798144
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.055364615101388e-05,
+      "loss": 3.4253,
+      "step": 1798656
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.054526020350336e-05,
+      "loss": 3.4381,
+      "step": 1799168
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.053687425599284e-05,
+      "loss": 3.436,
+      "step": 1799680
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.052848830848232e-05,
+      "loss": 3.4261,
+      "step": 1800192
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.0520118739775533e-05,
+      "loss": 3.4345,
+      "step": 1800704
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0511732792265013e-05,
+      "loss": 3.4341,
+      "step": 1801216
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0503346844754493e-05,
+      "loss": 3.4386,
+      "step": 1801728
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0494977276047702e-05,
+      "loss": 3.4269,
+      "step": 1802240
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0486591328537182e-05,
+      "loss": 3.4277,
+      "step": 1802752
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0478205381026662e-05,
+      "loss": 3.4455,
+      "step": 1803264
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0469819433516145e-05,
+      "loss": 3.4382,
+      "step": 1803776
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0461449864809354e-05,
+      "loss": 3.438,
+      "step": 1804288
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0453063917298834e-05,
+      "loss": 3.4277,
+      "step": 1804800
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0444677969788314e-05,
+      "loss": 3.4342,
+      "step": 1805312
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0436292022277794e-05,
+      "loss": 3.4269,
+      "step": 1805824
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0427922453571007e-05,
+      "loss": 3.4381,
+      "step": 1806336
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0419536506060487e-05,
+      "loss": 3.4404,
+      "step": 1806848
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0411150558549967e-05,
+      "loss": 3.4319,
+      "step": 1807360
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0402764611039447e-05,
+      "loss": 3.4276,
+      "step": 1807872
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0394395042332656e-05,
+      "loss": 3.4296,
+      "step": 1808384
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0386009094822136e-05,
+      "loss": 3.4192,
+      "step": 1808896
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0377623147311616e-05,
+      "loss": 3.4331,
+      "step": 1809408
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.03692371998011e-05,
+      "loss": 3.4234,
+      "step": 1809920
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0360867631094308e-05,
+      "loss": 3.4402,
+      "step": 1810432
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0352481683583788e-05,
+      "loss": 3.4211,
+      "step": 1810944
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0344095736073268e-05,
+      "loss": 3.4335,
+      "step": 1811456
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0335709788562748e-05,
+      "loss": 3.4279,
+      "step": 1811968
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.032734021985596e-05,
+      "loss": 3.4342,
+      "step": 1812480
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.031895427234544e-05,
+      "loss": 3.4395,
+      "step": 1812992
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.031056832483492e-05,
+      "loss": 3.428,
+      "step": 1813504
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.03021823773244e-05,
+      "loss": 3.4429,
+      "step": 1814016
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.029381280861761e-05,
+      "loss": 3.4485,
+      "step": 1814528
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.028542686110709e-05,
+      "loss": 3.4302,
+      "step": 1815040
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.027704091359657e-05,
+      "loss": 3.4361,
+      "step": 1815552
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0268654966086053e-05,
+      "loss": 3.4262,
+      "step": 1816064
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.026028539737926e-05,
+      "loss": 3.4334,
+      "step": 1816576
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.025189944986874e-05,
+      "loss": 3.4327,
+      "step": 1817088
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.024351350235822e-05,
+      "loss": 3.439,
+      "step": 1817600
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.02351275548477e-05,
+      "loss": 3.4237,
+      "step": 1818112
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0226757986140914e-05,
+      "loss": 3.4377,
+      "step": 1818624
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0218372038630394e-05,
+      "loss": 3.4358,
+      "step": 1819136
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0209986091119874e-05,
+      "loss": 3.4348,
+      "step": 1819648
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0201600143609354e-05,
+      "loss": 3.4214,
+      "step": 1820160
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0193230574902563e-05,
+      "loss": 3.4371,
+      "step": 1820672
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0184844627392043e-05,
+      "loss": 3.4272,
+      "step": 1821184
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0176458679881523e-05,
+      "loss": 3.4415,
+      "step": 1821696
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0168072732371003e-05,
+      "loss": 3.4336,
+      "step": 1822208
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0159703163664215e-05,
+      "loss": 3.4372,
+      "step": 1822720
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0151317216153695e-05,
+      "loss": 3.4175,
+      "step": 1823232
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0142931268643175e-05,
+      "loss": 3.4494,
+      "step": 1823744
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0134545321132652e-05,
+      "loss": 3.4296,
+      "step": 1824256
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0126175752425864e-05,
+      "loss": 3.4358,
+      "step": 1824768
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0117789804915348e-05,
+      "loss": 3.4328,
+      "step": 1825280
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0109403857404828e-05,
+      "loss": 3.4327,
+      "step": 1825792
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0101017909894304e-05,
+      "loss": 3.4409,
+      "step": 1826304
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0092648341187517e-05,
+      "loss": 3.4504,
+      "step": 1826816
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0084262393676997e-05,
+      "loss": 3.4223,
+      "step": 1827328
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0075876446166477e-05,
+      "loss": 3.4353,
+      "step": 1827840
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0067490498655956e-05,
+      "loss": 3.4296,
+      "step": 1828352
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.005912092994917e-05,
+      "loss": 3.4321,
+      "step": 1828864
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.005073498243865e-05,
+      "loss": 3.4265,
+      "step": 1829376
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0042349034928125e-05,
+      "loss": 3.4398,
+      "step": 1829888
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0033963087417605e-05,
+      "loss": 3.442,
+      "step": 1830400
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.0025593518710818e-05,
+      "loss": 3.421,
+      "step": 1830912
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.00172075712003e-05,
+      "loss": 3.439,
+      "step": 1831424
+    },
+    {
+      "epoch": 1.03,
+      "eval_loss": 3.858309745788574,
+      "eval_runtime": 302.9295,
+      "eval_samples_per_second": 1259.669,
+      "eval_steps_per_second": 39.366,
+      "step": 1831680
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.0008821623689778e-05,
+      "loss": 3.4289,
+      "step": 1831936
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.0000435676179258e-05,
+      "loss": 3.4278,
+      "step": 1832448
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9992049728668738e-05,
+      "loss": 3.434,
+      "step": 1832960
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9983663781158218e-05,
+      "loss": 3.4277,
+      "step": 1833472
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.997529421245143e-05,
+      "loss": 3.4398,
+      "step": 1833984
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.996690826494091e-05,
+      "loss": 3.4234,
+      "step": 1834496
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.995852231743039e-05,
+      "loss": 3.4375,
+      "step": 1835008
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.995013636991987e-05,
+      "loss": 3.4254,
+      "step": 1835520
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.994176680121308e-05,
+      "loss": 3.4329,
+      "step": 1836032
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.993338085370256e-05,
+      "loss": 3.4277,
+      "step": 1836544
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.992499490619204e-05,
+      "loss": 3.43,
+      "step": 1837056
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9916608958681522e-05,
+      "loss": 3.4372,
+      "step": 1837568
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.990823938997473e-05,
+      "loss": 3.4328,
+      "step": 1838080
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.989985344246421e-05,
+      "loss": 3.4286,
+      "step": 1838592
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.989146749495369e-05,
+      "loss": 3.4325,
+      "step": 1839104
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.988308154744317e-05,
+      "loss": 3.4184,
+      "step": 1839616
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9874711978736384e-05,
+      "loss": 3.4223,
+      "step": 1840128
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9866326031225864e-05,
+      "loss": 3.4276,
+      "step": 1840640
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9857940083715344e-05,
+      "loss": 3.425,
+      "step": 1841152
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9849554136204824e-05,
+      "loss": 3.429,
+      "step": 1841664
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9841184567498033e-05,
+      "loss": 3.4382,
+      "step": 1842176
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9832798619987513e-05,
+      "loss": 3.4347,
+      "step": 1842688
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9824412672476993e-05,
+      "loss": 3.4386,
+      "step": 1843200
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9816026724966476e-05,
+      "loss": 3.4348,
+      "step": 1843712
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9807657156259685e-05,
+      "loss": 3.422,
+      "step": 1844224
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9799271208749165e-05,
+      "loss": 3.4364,
+      "step": 1844736
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9790885261238645e-05,
+      "loss": 3.4215,
+      "step": 1845248
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9782499313728125e-05,
+      "loss": 3.428,
+      "step": 1845760
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9774129745021338e-05,
+      "loss": 3.4245,
+      "step": 1846272
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9765743797510817e-05,
+      "loss": 3.4245,
+      "step": 1846784
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9757357850000297e-05,
+      "loss": 3.4289,
+      "step": 1847296
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9748971902489777e-05,
+      "loss": 3.4343,
+      "step": 1847808
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9740602333782986e-05,
+      "loss": 3.4274,
+      "step": 1848320
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9732216386272466e-05,
+      "loss": 3.4299,
+      "step": 1848832
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9723830438761946e-05,
+      "loss": 3.4215,
+      "step": 1849344
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9715444491251426e-05,
+      "loss": 3.4347,
+      "step": 1849856
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.970707492254464e-05,
+      "loss": 3.4296,
+      "step": 1850368
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.969868897503412e-05,
+      "loss": 3.4177,
+      "step": 1850880
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.96903030275236e-05,
+      "loss": 3.42,
+      "step": 1851392
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9681917080013075e-05,
+      "loss": 3.4151,
+      "step": 1851904
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9673547511306288e-05,
+      "loss": 3.4265,
+      "step": 1852416
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.966516156379577e-05,
+      "loss": 3.429,
+      "step": 1852928
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.965677561628525e-05,
+      "loss": 3.431,
+      "step": 1853440
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9648389668774728e-05,
+      "loss": 3.4278,
+      "step": 1853952
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.964002010006794e-05,
+      "loss": 3.4371,
+      "step": 1854464
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.963163415255742e-05,
+      "loss": 3.428,
+      "step": 1854976
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.96232482050469e-05,
+      "loss": 3.433,
+      "step": 1855488
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.961486225753638e-05,
+      "loss": 3.4215,
+      "step": 1856000
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9606492688829593e-05,
+      "loss": 3.401,
+      "step": 1856512
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9598106741319072e-05,
+      "loss": 3.4453,
+      "step": 1857024
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.958972079380855e-05,
+      "loss": 3.4184,
+      "step": 1857536
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.958133484629803e-05,
+      "loss": 3.4259,
+      "step": 1858048
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.957296527759124e-05,
+      "loss": 3.4249,
+      "step": 1858560
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9564579330080725e-05,
+      "loss": 3.4206,
+      "step": 1859072
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.95561933825702e-05,
+      "loss": 3.4136,
+      "step": 1859584
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.954780743505968e-05,
+      "loss": 3.4149,
+      "step": 1860096
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9539437866352894e-05,
+      "loss": 3.4273,
+      "step": 1860608
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9531051918842374e-05,
+      "loss": 3.4183,
+      "step": 1861120
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9522665971331854e-05,
+      "loss": 3.4373,
+      "step": 1861632
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9514280023821334e-05,
+      "loss": 3.4192,
+      "step": 1862144
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9505910455114546e-05,
+      "loss": 3.4141,
+      "step": 1862656
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9497524507604023e-05,
+      "loss": 3.4342,
+      "step": 1863168
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9489138560093503e-05,
+      "loss": 3.4121,
+      "step": 1863680
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9480752612582983e-05,
+      "loss": 3.4163,
+      "step": 1864192
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9472383043876195e-05,
+      "loss": 3.4321,
+      "step": 1864704
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9463997096365675e-05,
+      "loss": 3.4292,
+      "step": 1865216
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9455611148855155e-05,
+      "loss": 3.4166,
+      "step": 1865728
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9447225201344635e-05,
+      "loss": 3.4145,
+      "step": 1866240
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9438855632637847e-05,
+      "loss": 3.41,
+      "step": 1866752
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9430469685127324e-05,
+      "loss": 3.4214,
+      "step": 1867264
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9422083737616807e-05,
+      "loss": 3.4366,
+      "step": 1867776
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9413697790106287e-05,
+      "loss": 3.4225,
+      "step": 1868288
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9405328221399496e-05,
+      "loss": 3.4281,
+      "step": 1868800
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9396942273888976e-05,
+      "loss": 3.4284,
+      "step": 1869312
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9388556326378456e-05,
+      "loss": 3.4364,
+      "step": 1869824
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9380170378867936e-05,
+      "loss": 3.4182,
+      "step": 1870336
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.937180081016115e-05,
+      "loss": 3.4333,
+      "step": 1870848
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.936341486265063e-05,
+      "loss": 3.4161,
+      "step": 1871360
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.935502891514011e-05,
+      "loss": 3.422,
+      "step": 1871872
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.934665934643332e-05,
+      "loss": 3.425,
+      "step": 1872384
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9338273398922798e-05,
+      "loss": 3.431,
+      "step": 1872896
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9329887451412278e-05,
+      "loss": 3.426,
+      "step": 1873408
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.932150150390176e-05,
+      "loss": 3.4331,
+      "step": 1873920
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.931313193519497e-05,
+      "loss": 3.416,
+      "step": 1874432
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.930474598768445e-05,
+      "loss": 3.4195,
+      "step": 1874944
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.929636004017393e-05,
+      "loss": 3.4285,
+      "step": 1875456
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.928797409266341e-05,
+      "loss": 3.4263,
+      "step": 1875968
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9279604523956623e-05,
+      "loss": 3.4122,
+      "step": 1876480
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9271218576446102e-05,
+      "loss": 3.4265,
+      "step": 1876992
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.9262832628935582e-05,
+      "loss": 3.4248,
+      "step": 1877504
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.9254446681425062e-05,
+      "loss": 3.4299,
+      "step": 1878016
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.924607711271827e-05,
+      "loss": 3.417,
+      "step": 1878528
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.923769116520775e-05,
+      "loss": 3.4148,
+      "step": 1879040
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.922930521769723e-05,
+      "loss": 3.4354,
+      "step": 1879552
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.922091927018671e-05,
+      "loss": 3.4265,
+      "step": 1880064
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.9212549701479924e-05,
+      "loss": 3.4262,
+      "step": 1880576
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.9204180132773136e-05,
+      "loss": 3.4182,
+      "step": 1881088
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.9195794185262616e-05,
+      "loss": 3.4231,
+      "step": 1881600
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.9187408237752093e-05,
+      "loss": 3.4189,
+      "step": 1882112
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.9179022290241573e-05,
+      "loss": 3.4295,
+      "step": 1882624
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.9170636342731056e-05,
+      "loss": 3.4307,
+      "step": 1883136
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.9162250395220536e-05,
+      "loss": 3.4224,
+      "step": 1883648
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.9153864447710016e-05,
+      "loss": 3.4169,
+      "step": 1884160
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.9145478500199496e-05,
+      "loss": 3.4133,
+      "step": 1884672
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.9137108931492705e-05,
+      "loss": 3.4145,
+      "step": 1885184
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.9128722983982185e-05,
+      "loss": 3.4202,
+      "step": 1885696
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.9120337036471665e-05,
+      "loss": 3.4116,
+      "step": 1886208
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.9111967467764878e-05,
+      "loss": 3.4304,
+      "step": 1886720
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.9103581520254357e-05,
+      "loss": 3.413,
+      "step": 1887232
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.9095195572743837e-05,
+      "loss": 3.4208,
+      "step": 1887744
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.9086809625233317e-05,
+      "loss": 3.4148,
+      "step": 1888256
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.9078440056526526e-05,
+      "loss": 3.4276,
+      "step": 1888768
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.907005410901601e-05,
+      "loss": 3.4283,
+      "step": 1889280
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.906166816150549e-05,
+      "loss": 3.4153,
+      "step": 1889792
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.905328221399497e-05,
+      "loss": 3.4341,
+      "step": 1890304
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.904491264528818e-05,
+      "loss": 3.4399,
+      "step": 1890816
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.903652669777766e-05,
+      "loss": 3.4216,
+      "step": 1891328
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.902814075026714e-05,
+      "loss": 3.425,
+      "step": 1891840
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.901975480275662e-05,
+      "loss": 3.4147,
+      "step": 1892352
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.901138523404983e-05,
+      "loss": 3.4184,
+      "step": 1892864
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.900299928653931e-05,
+      "loss": 3.4261,
+      "step": 1893376
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.899461333902879e-05,
+      "loss": 3.4253,
+      "step": 1893888
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.898622739151827e-05,
+      "loss": 3.4124,
+      "step": 1894400
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.897785782281148e-05,
+      "loss": 3.4284,
+      "step": 1894912
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8969471875300963e-05,
+      "loss": 3.4265,
+      "step": 1895424
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8961085927790443e-05,
+      "loss": 3.4239,
+      "step": 1895936
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.895269998027992e-05,
+      "loss": 3.4139,
+      "step": 1896448
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8944330411573132e-05,
+      "loss": 3.424,
+      "step": 1896960
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8935944464062612e-05,
+      "loss": 3.4155,
+      "step": 1897472
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8927558516552092e-05,
+      "loss": 3.4312,
+      "step": 1897984
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8919172569041572e-05,
+      "loss": 3.4221,
+      "step": 1898496
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8910786621531052e-05,
+      "loss": 3.4299,
+      "step": 1899008
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8902417052824265e-05,
+      "loss": 3.4084,
+      "step": 1899520
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8894031105313745e-05,
+      "loss": 3.436,
+      "step": 1900032
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.888564515780322e-05,
+      "loss": 3.4159,
+      "step": 1900544
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8877275589096434e-05,
+      "loss": 3.4282,
+      "step": 1901056
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8868889641585917e-05,
+      "loss": 3.4234,
+      "step": 1901568
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8860503694075394e-05,
+      "loss": 3.4184,
+      "step": 1902080
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8852117746564874e-05,
+      "loss": 3.4281,
+      "step": 1902592
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8843748177858086e-05,
+      "loss": 3.4415,
+      "step": 1903104
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8835362230347566e-05,
+      "loss": 3.4081,
+      "step": 1903616
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8826976282837046e-05,
+      "loss": 3.4282,
+      "step": 1904128
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8818590335326526e-05,
+      "loss": 3.4199,
+      "step": 1904640
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.881022076661974e-05,
+      "loss": 3.4175,
+      "step": 1905152
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.880183481910922e-05,
+      "loss": 3.4204,
+      "step": 1905664
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8793448871598695e-05,
+      "loss": 3.4259,
+      "step": 1906176
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8785062924088175e-05,
+      "loss": 3.4296,
+      "step": 1906688
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8776693355381387e-05,
+      "loss": 3.419,
+      "step": 1907200
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8768307407870867e-05,
+      "loss": 3.4277,
+      "step": 1907712
+    },
+    {
+      "epoch": 0.03,
+      "eval_loss": 3.8584349155426025,
+      "eval_runtime": 319.4948,
+      "eval_samples_per_second": 1194.357,
+      "eval_steps_per_second": 37.325,
+      "step": 1908000
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.8759921460360347e-05,
+      "loss": 3.4161,
+      "step": 1908224
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.8751535512849827e-05,
+      "loss": 3.4173,
+      "step": 1908736
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.874316594414304e-05,
+      "loss": 3.4245,
+      "step": 1909248
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.8734779996632516e-05,
+      "loss": 3.4218,
+      "step": 1909760
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.8726394049121996e-05,
+      "loss": 3.4301,
+      "step": 1910272
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.871800810161148e-05,
+      "loss": 3.4153,
+      "step": 1910784
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.8709638532904692e-05,
+      "loss": 3.4254,
+      "step": 1911296
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.870125258539417e-05,
+      "loss": 3.4147,
+      "step": 1911808
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.869286663788365e-05,
+      "loss": 3.4172,
+      "step": 1912320
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.868448069037313e-05,
+      "loss": 3.4208,
+      "step": 1912832
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.867611112166634e-05,
+      "loss": 3.4168,
+      "step": 1913344
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.866772517415582e-05,
+      "loss": 3.4249,
+      "step": 1913856
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.86593392266453e-05,
+      "loss": 3.4305,
+      "step": 1914368
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.865095327913478e-05,
+      "loss": 3.4112,
+      "step": 1914880
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.864258371042799e-05,
+      "loss": 3.4187,
+      "step": 1915392
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.863419776291747e-05,
+      "loss": 3.4087,
+      "step": 1915904
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.862581181540695e-05,
+      "loss": 3.4084,
+      "step": 1916416
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.8617425867896433e-05,
+      "loss": 3.4186,
+      "step": 1916928
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.8609056299189642e-05,
+      "loss": 3.4128,
+      "step": 1917440
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.8600670351679122e-05,
+      "loss": 3.4191,
+      "step": 1917952
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.8592284404168602e-05,
+      "loss": 3.428,
+      "step": 1918464
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.8583898456658082e-05,
+      "loss": 3.4265,
+      "step": 1918976
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.8575528887951295e-05,
+      "loss": 3.4285,
+      "step": 1919488
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.8567142940440775e-05,
+      "loss": 3.4225,
+      "step": 1920000
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.8558756992930255e-05,
+      "loss": 3.4121,
+      "step": 1920512
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.8550371045419735e-05,
+      "loss": 3.4251,
+      "step": 1921024
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.8542001476712944e-05,
+      "loss": 3.4114,
+      "step": 1921536
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.8533615529202424e-05,
+      "loss": 3.4207,
+      "step": 1922048
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.8525229581691904e-05,
+      "loss": 3.4123,
+      "step": 1922560
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.8516843634181387e-05,
+      "loss": 3.4139,
+      "step": 1923072
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8508474065474596e-05,
+      "loss": 3.4184,
+      "step": 1923584
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8500088117964076e-05,
+      "loss": 3.4206,
+      "step": 1924096
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8491702170453556e-05,
+      "loss": 3.4212,
+      "step": 1924608
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8483316222943036e-05,
+      "loss": 3.4173,
+      "step": 1925120
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.847494665423625e-05,
+      "loss": 3.4143,
+      "step": 1925632
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.846656070672573e-05,
+      "loss": 3.4209,
+      "step": 1926144
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.845817475921521e-05,
+      "loss": 3.4224,
+      "step": 1926656
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.844978881170469e-05,
+      "loss": 3.4099,
+      "step": 1927168
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8441419242997897e-05,
+      "loss": 3.4104,
+      "step": 1927680
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8433033295487377e-05,
+      "loss": 3.4061,
+      "step": 1928192
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8424647347976857e-05,
+      "loss": 3.4138,
+      "step": 1928704
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.841626140046634e-05,
+      "loss": 3.4193,
+      "step": 1929216
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.840789183175955e-05,
+      "loss": 3.4231,
+      "step": 1929728
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.839950588424903e-05,
+      "loss": 3.4164,
+      "step": 1930240
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.839111993673851e-05,
+      "loss": 3.4251,
+      "step": 1930752
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.838273398922799e-05,
+      "loss": 3.4191,
+      "step": 1931264
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8374364420521202e-05,
+      "loss": 3.4204,
+      "step": 1931776
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8365978473010682e-05,
+      "loss": 3.4097,
+      "step": 1932288
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8357592525500162e-05,
+      "loss": 3.3928,
+      "step": 1932800
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8349206577989642e-05,
+      "loss": 3.4319,
+      "step": 1933312
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.834083700928285e-05,
+      "loss": 3.408,
+      "step": 1933824
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.833245106177233e-05,
+      "loss": 3.4167,
+      "step": 1934336
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.832406511426181e-05,
+      "loss": 3.4177,
+      "step": 1934848
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.831567916675129e-05,
+      "loss": 3.4089,
+      "step": 1935360
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8307309598044503e-05,
+      "loss": 3.402,
+      "step": 1935872
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8298923650533983e-05,
+      "loss": 3.4052,
+      "step": 1936384
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8290537703023463e-05,
+      "loss": 3.4188,
+      "step": 1936896
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.828215175551294e-05,
+      "loss": 3.4093,
+      "step": 1937408
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8273782186806156e-05,
+      "loss": 3.4274,
+      "step": 1937920
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8265396239295636e-05,
+      "loss": 3.4082,
+      "step": 1938432
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8257010291785116e-05,
+      "loss": 3.3972,
+      "step": 1938944
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8248624344274592e-05,
+      "loss": 3.424,
+      "step": 1939456
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8240254775567805e-05,
+      "loss": 3.4053,
+      "step": 1939968
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8231868828057285e-05,
+      "loss": 3.4037,
+      "step": 1940480
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8223482880546765e-05,
+      "loss": 3.4207,
+      "step": 1940992
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8215096933036245e-05,
+      "loss": 3.4243,
+      "step": 1941504
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8206727364329457e-05,
+      "loss": 3.4024,
+      "step": 1942016
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8198341416818937e-05,
+      "loss": 3.4037,
+      "step": 1942528
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8189955469308414e-05,
+      "loss": 3.3984,
+      "step": 1943040
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8181569521797894e-05,
+      "loss": 3.4109,
+      "step": 1943552
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.817319995309111e-05,
+      "loss": 3.4249,
+      "step": 1944064
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.816481400558059e-05,
+      "loss": 3.414,
+      "step": 1944576
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8156428058070066e-05,
+      "loss": 3.4205,
+      "step": 1945088
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8148042110559546e-05,
+      "loss": 3.4153,
+      "step": 1945600
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.813967254185276e-05,
+      "loss": 3.4247,
+      "step": 1946112
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8131286594342235e-05,
+      "loss": 3.4137,
+      "step": 1946624
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.812290064683172e-05,
+      "loss": 3.426,
+      "step": 1947136
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8114514699321198e-05,
+      "loss": 3.4022,
+      "step": 1947648
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.810614513061441e-05,
+      "loss": 3.4127,
+      "step": 1948160
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8097759183103887e-05,
+      "loss": 3.4149,
+      "step": 1948672
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8089373235593367e-05,
+      "loss": 3.418,
+      "step": 1949184
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8080987288082847e-05,
+      "loss": 3.4189,
+      "step": 1949696
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8072617719376063e-05,
+      "loss": 3.4211,
+      "step": 1950208
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.806423177186554e-05,
+      "loss": 3.4064,
+      "step": 1950720
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.805584582435502e-05,
+      "loss": 3.4088,
+      "step": 1951232
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.80474598768445e-05,
+      "loss": 3.4169,
+      "step": 1951744
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.803909030813771e-05,
+      "loss": 3.4206,
+      "step": 1952256
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.803070436062719e-05,
+      "loss": 3.4016,
+      "step": 1952768
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8022318413116672e-05,
+      "loss": 3.4145,
+      "step": 1953280
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.8013932465606152e-05,
+      "loss": 3.4161,
+      "step": 1953792
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.800556289689936e-05,
+      "loss": 3.4126,
+      "step": 1954304
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.799717694938884e-05,
+      "loss": 3.4113,
+      "step": 1954816
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.798879100187832e-05,
+      "loss": 3.4041,
+      "step": 1955328
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.79804050543678e-05,
+      "loss": 3.4232,
+      "step": 1955840
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7972035485661013e-05,
+      "loss": 3.4191,
+      "step": 1956352
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7963649538150493e-05,
+      "loss": 3.4169,
+      "step": 1956864
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7955263590639973e-05,
+      "loss": 3.4082,
+      "step": 1957376
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7946877643129453e-05,
+      "loss": 3.4141,
+      "step": 1957888
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7938508074422662e-05,
+      "loss": 3.408,
+      "step": 1958400
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7930122126912142e-05,
+      "loss": 3.4208,
+      "step": 1958912
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7921736179401626e-05,
+      "loss": 3.4186,
+      "step": 1959424
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7913350231891106e-05,
+      "loss": 3.4101,
+      "step": 1959936
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7904980663184315e-05,
+      "loss": 3.4124,
+      "step": 1960448
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7896594715673795e-05,
+      "loss": 3.4008,
+      "step": 1960960
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7888208768163275e-05,
+      "loss": 3.4051,
+      "step": 1961472
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7879839199456487e-05,
+      "loss": 3.4067,
+      "step": 1961984
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7871453251945967e-05,
+      "loss": 3.4058,
+      "step": 1962496
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7863067304435447e-05,
+      "loss": 3.4153,
+      "step": 1963008
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7854681356924927e-05,
+      "loss": 3.4056,
+      "step": 1963520
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7846311788218136e-05,
+      "loss": 3.4093,
+      "step": 1964032
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7837925840707616e-05,
+      "loss": 3.4073,
+      "step": 1964544
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7829539893197096e-05,
+      "loss": 3.4083,
+      "step": 1965056
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.782115394568658e-05,
+      "loss": 3.4199,
+      "step": 1965568
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.781278437697979e-05,
+      "loss": 3.4061,
+      "step": 1966080
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.780439842946927e-05,
+      "loss": 3.4243,
+      "step": 1966592
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.779601248195875e-05,
+      "loss": 3.4299,
+      "step": 1967104
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.778762653444823e-05,
+      "loss": 3.4133,
+      "step": 1967616
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.777925696574144e-05,
+      "loss": 3.4113,
+      "step": 1968128
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.777087101823092e-05,
+      "loss": 3.4091,
+      "step": 1968640
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.77624850707204e-05,
+      "loss": 3.411,
+      "step": 1969152
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.775409912320988e-05,
+      "loss": 3.4164,
+      "step": 1969664
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.774572955450309e-05,
+      "loss": 3.4135,
+      "step": 1970176
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.773734360699257e-05,
+      "loss": 3.4041,
+      "step": 1970688
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.772895765948205e-05,
+      "loss": 3.4179,
+      "step": 1971200
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7720571711971533e-05,
+      "loss": 3.4222,
+      "step": 1971712
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7712202143264742e-05,
+      "loss": 3.4078,
+      "step": 1972224
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7703816195754222e-05,
+      "loss": 3.4051,
+      "step": 1972736
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7695430248243702e-05,
+      "loss": 3.4187,
+      "step": 1973248
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7687044300733182e-05,
+      "loss": 3.4031,
+      "step": 1973760
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7678674732026394e-05,
+      "loss": 3.4231,
+      "step": 1974272
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7670288784515874e-05,
+      "loss": 3.412,
+      "step": 1974784
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7661902837005354e-05,
+      "loss": 3.4194,
+      "step": 1975296
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7653516889494834e-05,
+      "loss": 3.3981,
+      "step": 1975808
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7645147320788043e-05,
+      "loss": 3.4219,
+      "step": 1976320
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7636761373277523e-05,
+      "loss": 3.415,
+      "step": 1976832
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7628375425767003e-05,
+      "loss": 3.4136,
+      "step": 1977344
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7619989478256487e-05,
+      "loss": 3.4137,
+      "step": 1977856
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7611619909549696e-05,
+      "loss": 3.4079,
+      "step": 1978368
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7603233962039176e-05,
+      "loss": 3.4195,
+      "step": 1978880
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7594848014528656e-05,
+      "loss": 3.4357,
+      "step": 1979392
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7586462067018132e-05,
+      "loss": 3.3965,
+      "step": 1979904
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7578092498311348e-05,
+      "loss": 3.4171,
+      "step": 1980416
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7569706550800828e-05,
+      "loss": 3.4085,
+      "step": 1980928
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7561320603290308e-05,
+      "loss": 3.4055,
+      "step": 1981440
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7552934655779785e-05,
+      "loss": 3.4106,
+      "step": 1981952
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7544565087072997e-05,
+      "loss": 3.4129,
+      "step": 1982464
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7536179139562477e-05,
+      "loss": 3.4184,
+      "step": 1982976
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7527793192051957e-05,
+      "loss": 3.4119,
+      "step": 1983488
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7519407244541437e-05,
+      "loss": 3.4129,
+      "step": 1984000
+    },
+    {
+      "epoch": 1.03,
+      "eval_loss": 3.860267162322998,
+      "eval_runtime": 315.428,
+      "eval_samples_per_second": 1209.756,
+      "eval_steps_per_second": 37.806,
+      "step": 1984320
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.7511021297030917e-05,
+      "loss": 3.3985,
+      "step": 1984512
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.7502635349520397e-05,
+      "loss": 3.4065,
+      "step": 1985024
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.7494249402009877e-05,
+      "loss": 3.4122,
+      "step": 1985536
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.7485863454499357e-05,
+      "loss": 3.412,
+      "step": 1986048
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.7477493885792566e-05,
+      "loss": 3.421,
+      "step": 1986560
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.746910793828205e-05,
+      "loss": 3.405,
+      "step": 1987072
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.746072199077153e-05,
+      "loss": 3.415,
+      "step": 1987584
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.745233604326101e-05,
+      "loss": 3.4026,
+      "step": 1988096
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.7443966474554218e-05,
+      "loss": 3.4094,
+      "step": 1988608
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.7435580527043698e-05,
+      "loss": 3.4117,
+      "step": 1989120
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.7427194579533178e-05,
+      "loss": 3.4033,
+      "step": 1989632
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.7418808632022658e-05,
+      "loss": 3.4182,
+      "step": 1990144
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.741043906331587e-05,
+      "loss": 3.4218,
+      "step": 1990656
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.740205311580535e-05,
+      "loss": 3.3995,
+      "step": 1991168
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.739366716829483e-05,
+      "loss": 3.4128,
+      "step": 1991680
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.738528122078431e-05,
+      "loss": 3.3962,
+      "step": 1992192
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.737691165207752e-05,
+      "loss": 3.4,
+      "step": 1992704
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.7368525704567003e-05,
+      "loss": 3.4083,
+      "step": 1993216
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.7360139757056483e-05,
+      "loss": 3.4015,
+      "step": 1993728
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.7351753809545963e-05,
+      "loss": 3.4108,
+      "step": 1994240
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.7343384240839172e-05,
+      "loss": 3.4172,
+      "step": 1994752
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.7334998293328652e-05,
+      "loss": 3.4143,
+      "step": 1995264
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.7326612345818132e-05,
+      "loss": 3.4188,
+      "step": 1995776
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.7318226398307612e-05,
+      "loss": 3.4111,
+      "step": 1996288
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.7309856829600824e-05,
+      "loss": 3.4043,
+      "step": 1996800
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.7301470882090304e-05,
+      "loss": 3.4122,
+      "step": 1997312
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.7293084934579784e-05,
+      "loss": 3.4037,
+      "step": 1997824
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.728469898706926e-05,
+      "loss": 3.4123,
+      "step": 1998336
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.7276329418362473e-05,
+      "loss": 3.4,
+      "step": 1998848
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.7267943470851957e-05,
+      "loss": 3.4049,
+      "step": 1999360
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7259557523341437e-05,
+      "loss": 3.4076,
+      "step": 1999872
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7251171575830913e-05,
+      "loss": 3.4113,
+      "step": 2000384
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7242802007124126e-05,
+      "loss": 3.4152,
+      "step": 2000896
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7234416059613606e-05,
+      "loss": 3.4027,
+      "step": 2001408
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7226030112103082e-05,
+      "loss": 3.4068,
+      "step": 2001920
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7217644164592565e-05,
+      "loss": 3.4082,
+      "step": 2002432
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7209274595885778e-05,
+      "loss": 3.4126,
+      "step": 2002944
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7200888648375258e-05,
+      "loss": 3.3989,
+      "step": 2003456
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7192502700864734e-05,
+      "loss": 3.4012,
+      "step": 2003968
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7184116753354214e-05,
+      "loss": 3.3982,
+      "step": 2004480
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7175747184647427e-05,
+      "loss": 3.4025,
+      "step": 2004992
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.716736123713691e-05,
+      "loss": 3.4076,
+      "step": 2005504
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7158975289626387e-05,
+      "loss": 3.4135,
+      "step": 2006016
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7150589342115867e-05,
+      "loss": 3.4087,
+      "step": 2006528
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.714221977340908e-05,
+      "loss": 3.4138,
+      "step": 2007040
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7133833825898556e-05,
+      "loss": 3.4095,
+      "step": 2007552
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7125447878388036e-05,
+      "loss": 3.4121,
+      "step": 2008064
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.711706193087752e-05,
+      "loss": 3.4041,
+      "step": 2008576
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.710869236217073e-05,
+      "loss": 3.3811,
+      "step": 2009088
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7100306414660208e-05,
+      "loss": 3.4206,
+      "step": 2009600
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7091920467149688e-05,
+      "loss": 3.3979,
+      "step": 2010112
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7083534519639168e-05,
+      "loss": 3.4075,
+      "step": 2010624
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.707516495093238e-05,
+      "loss": 3.4081,
+      "step": 2011136
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.706677900342186e-05,
+      "loss": 3.398,
+      "step": 2011648
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.705839305591134e-05,
+      "loss": 3.3954,
+      "step": 2012160
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7050023487204553e-05,
+      "loss": 3.3932,
+      "step": 2012672
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.704163753969403e-05,
+      "loss": 3.4079,
+      "step": 2013184
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.703325159218351e-05,
+      "loss": 3.3959,
+      "step": 2013696
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.702486564467299e-05,
+      "loss": 3.4153,
+      "step": 2014208
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7016496075966205e-05,
+      "loss": 3.4006,
+      "step": 2014720
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7008110128455682e-05,
+      "loss": 3.389,
+      "step": 2015232
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6999724180945162e-05,
+      "loss": 3.4179,
+      "step": 2015744
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6991338233434642e-05,
+      "loss": 3.3933,
+      "step": 2016256
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6982968664727854e-05,
+      "loss": 3.3927,
+      "step": 2016768
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6974582717217334e-05,
+      "loss": 3.4099,
+      "step": 2017280
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6966196769706814e-05,
+      "loss": 3.4135,
+      "step": 2017792
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6957810822196294e-05,
+      "loss": 3.3934,
+      "step": 2018304
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6949441253489503e-05,
+      "loss": 3.3957,
+      "step": 2018816
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6941055305978983e-05,
+      "loss": 3.3907,
+      "step": 2019328
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6932669358468463e-05,
+      "loss": 3.3966,
+      "step": 2019840
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6924283410957943e-05,
+      "loss": 3.4121,
+      "step": 2020352
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6915913842251156e-05,
+      "loss": 3.4047,
+      "step": 2020864
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6907527894740636e-05,
+      "loss": 3.4087,
+      "step": 2021376
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6899141947230115e-05,
+      "loss": 3.4057,
+      "step": 2021888
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6890755999719595e-05,
+      "loss": 3.4136,
+      "step": 2022400
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6882386431012805e-05,
+      "loss": 3.4062,
+      "step": 2022912
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6874000483502288e-05,
+      "loss": 3.4159,
+      "step": 2023424
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6865614535991768e-05,
+      "loss": 3.3898,
+      "step": 2023936
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6857228588481248e-05,
+      "loss": 3.4007,
+      "step": 2024448
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6848859019774457e-05,
+      "loss": 3.405,
+      "step": 2024960
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6840473072263937e-05,
+      "loss": 3.407,
+      "step": 2025472
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6832087124753417e-05,
+      "loss": 3.4119,
+      "step": 2025984
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6823701177242897e-05,
+      "loss": 3.4103,
+      "step": 2026496
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.681533160853611e-05,
+      "loss": 3.3991,
+      "step": 2027008
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.680694566102559e-05,
+      "loss": 3.3982,
+      "step": 2027520
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.679855971351507e-05,
+      "loss": 3.405,
+      "step": 2028032
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.679017376600455e-05,
+      "loss": 3.4103,
+      "step": 2028544
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6781804197297758e-05,
+      "loss": 3.3928,
+      "step": 2029056
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.677341824978724e-05,
+      "loss": 3.403,
+      "step": 2029568
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.676503230227672e-05,
+      "loss": 3.411,
+      "step": 2030080
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.67566463547662e-05,
+      "loss": 3.4014,
+      "step": 2030592
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.674827678605941e-05,
+      "loss": 3.3996,
+      "step": 2031104
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.673989083854889e-05,
+      "loss": 3.3957,
+      "step": 2031616
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.673150489103837e-05,
+      "loss": 3.4109,
+      "step": 2032128
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.672311894352785e-05,
+      "loss": 3.412,
+      "step": 2032640
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6714749374821063e-05,
+      "loss": 3.4049,
+      "step": 2033152
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6706379806114275e-05,
+      "loss": 3.398,
+      "step": 2033664
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6697993858603752e-05,
+      "loss": 3.4025,
+      "step": 2034176
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6689607911093232e-05,
+      "loss": 3.3984,
+      "step": 2034688
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6681221963582712e-05,
+      "loss": 3.4103,
+      "step": 2035200
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6672836016072195e-05,
+      "loss": 3.409,
+      "step": 2035712
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6664450068561675e-05,
+      "loss": 3.3976,
+      "step": 2036224
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6656064121051155e-05,
+      "loss": 3.3999,
+      "step": 2036736
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.664767817354063e-05,
+      "loss": 3.3964,
+      "step": 2037248
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6639308604833844e-05,
+      "loss": 3.393,
+      "step": 2037760
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6630922657323324e-05,
+      "loss": 3.3986,
+      "step": 2038272
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6622536709812804e-05,
+      "loss": 3.3968,
+      "step": 2038784
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6614150762302284e-05,
+      "loss": 3.3996,
+      "step": 2039296
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6605781193595497e-05,
+      "loss": 3.3983,
+      "step": 2039808
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6597395246084976e-05,
+      "loss": 3.3939,
+      "step": 2040320
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6589009298574453e-05,
+      "loss": 3.4006,
+      "step": 2040832
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6580623351063933e-05,
+      "loss": 3.3954,
+      "step": 2041344
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.657225378235715e-05,
+      "loss": 3.4126,
+      "step": 2041856
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.656386783484663e-05,
+      "loss": 3.395,
+      "step": 2042368
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6555481887336105e-05,
+      "loss": 3.4153,
+      "step": 2042880
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6547095939825585e-05,
+      "loss": 3.4184,
+      "step": 2043392
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6538726371118798e-05,
+      "loss": 3.406,
+      "step": 2043904
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6530340423608278e-05,
+      "loss": 3.4009,
+      "step": 2044416
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6521954476097758e-05,
+      "loss": 3.4011,
+      "step": 2044928
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6513568528587238e-05,
+      "loss": 3.4006,
+      "step": 2045440
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.650519895988045e-05,
+      "loss": 3.4077,
+      "step": 2045952
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6496813012369927e-05,
+      "loss": 3.403,
+      "step": 2046464
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6488427064859407e-05,
+      "loss": 3.3956,
+      "step": 2046976
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6480041117348887e-05,
+      "loss": 3.4062,
+      "step": 2047488
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.64716715486421e-05,
+      "loss": 3.4123,
+      "step": 2048000
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.646328560113158e-05,
+      "loss": 3.3977,
+      "step": 2048512
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.645489965362106e-05,
+      "loss": 3.397,
+      "step": 2049024
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.644651370611054e-05,
+      "loss": 3.4045,
+      "step": 2049536
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.643814413740375e-05,
+      "loss": 3.3915,
+      "step": 2050048
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6429758189893228e-05,
+      "loss": 3.4151,
+      "step": 2050560
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.642137224238271e-05,
+      "loss": 3.3991,
+      "step": 2051072
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.641298629487219e-05,
+      "loss": 3.4134,
+      "step": 2051584
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.64046167261654e-05,
+      "loss": 3.3869,
+      "step": 2052096
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.639623077865488e-05,
+      "loss": 3.4111,
+      "step": 2052608
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.638784483114436e-05,
+      "loss": 3.4021,
+      "step": 2053120
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.637945888363384e-05,
+      "loss": 3.4061,
+      "step": 2053632
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6371089314927053e-05,
+      "loss": 3.4014,
+      "step": 2054144
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6362703367416533e-05,
+      "loss": 3.3985,
+      "step": 2054656
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6354317419906013e-05,
+      "loss": 3.4089,
+      "step": 2055168
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6345947851199225e-05,
+      "loss": 3.4221,
+      "step": 2055680
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6337561903688702e-05,
+      "loss": 3.3896,
+      "step": 2056192
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6329175956178182e-05,
+      "loss": 3.4036,
+      "step": 2056704
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6320790008667665e-05,
+      "loss": 3.4041,
+      "step": 2057216
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6312420439960874e-05,
+      "loss": 3.3962,
+      "step": 2057728
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6304034492450354e-05,
+      "loss": 3.4023,
+      "step": 2058240
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6295648544939834e-05,
+      "loss": 3.4052,
+      "step": 2058752
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6287262597429314e-05,
+      "loss": 3.4082,
+      "step": 2059264
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6278893028722527e-05,
+      "loss": 3.4047,
+      "step": 2059776
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6270507081212006e-05,
+      "loss": 3.4039,
+      "step": 2060288
+    },
+    {
+      "epoch": 0.03,
+      "eval_loss": 3.8618228435516357,
+      "eval_runtime": 310.5878,
+      "eval_samples_per_second": 1228.609,
+      "eval_steps_per_second": 38.395,
+      "step": 2060640
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6262121133701486e-05,
+      "loss": 3.3954,
+      "step": 2060800
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6253735186190966e-05,
+      "loss": 3.3938,
+      "step": 2061312
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6245365617484175e-05,
+      "loss": 3.406,
+      "step": 2061824
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6236979669973655e-05,
+      "loss": 3.3994,
+      "step": 2062336
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6228593722463135e-05,
+      "loss": 3.4137,
+      "step": 2062848
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.622020777495262e-05,
+      "loss": 3.3965,
+      "step": 2063360
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6211838206245828e-05,
+      "loss": 3.4025,
+      "step": 2063872
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6203452258735308e-05,
+      "loss": 3.395,
+      "step": 2064384
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6195066311224788e-05,
+      "loss": 3.4022,
+      "step": 2064896
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6186680363714268e-05,
+      "loss": 3.3961,
+      "step": 2065408
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.617831079500748e-05,
+      "loss": 3.3967,
+      "step": 2065920
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.616992484749696e-05,
+      "loss": 3.4127,
+      "step": 2066432
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.616153889998644e-05,
+      "loss": 3.4071,
+      "step": 2066944
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.615316933127965e-05,
+      "loss": 3.3899,
+      "step": 2067456
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.614478338376913e-05,
+      "loss": 3.4021,
+      "step": 2067968
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.613639743625861e-05,
+      "loss": 3.3862,
+      "step": 2068480
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.612801148874809e-05,
+      "loss": 3.3917,
+      "step": 2068992
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6119625541237572e-05,
+      "loss": 3.4004,
+      "step": 2069504
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6111239593727052e-05,
+      "loss": 3.3898,
+      "step": 2070016
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.610285364621653e-05,
+      "loss": 3.3998,
+      "step": 2070528
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.609446769870601e-05,
+      "loss": 3.4066,
+      "step": 2071040
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.608609812999922e-05,
+      "loss": 3.4053,
+      "step": 2071552
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.60777121824887e-05,
+      "loss": 3.4096,
+      "step": 2072064
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.606932623497818e-05,
+      "loss": 3.4037,
+      "step": 2072576
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.606094028746766e-05,
+      "loss": 3.3947,
+      "step": 2073088
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6052570718760874e-05,
+      "loss": 3.3985,
+      "step": 2073600
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6044184771250354e-05,
+      "loss": 3.3916,
+      "step": 2074112
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.603579882373983e-05,
+      "loss": 3.399,
+      "step": 2074624
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.602741287622931e-05,
+      "loss": 3.3923,
+      "step": 2075136
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6019043307522523e-05,
+      "loss": 3.3965,
+      "step": 2075648
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.6010657360012003e-05,
+      "loss": 3.3995,
+      "step": 2076160
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.6002271412501483e-05,
+      "loss": 3.3974,
+      "step": 2076672
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5993885464990963e-05,
+      "loss": 3.4016,
+      "step": 2077184
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5985515896284175e-05,
+      "loss": 3.3981,
+      "step": 2077696
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.597712994877365e-05,
+      "loss": 3.3937,
+      "step": 2078208
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5968744001263135e-05,
+      "loss": 3.4025,
+      "step": 2078720
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5960358053752615e-05,
+      "loss": 3.4051,
+      "step": 2079232
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5951988485045824e-05,
+      "loss": 3.3881,
+      "step": 2079744
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5943602537535304e-05,
+      "loss": 3.3894,
+      "step": 2080256
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5935216590024784e-05,
+      "loss": 3.3897,
+      "step": 2080768
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5926830642514264e-05,
+      "loss": 3.3894,
+      "step": 2081280
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5918461073807476e-05,
+      "loss": 3.3971,
+      "step": 2081792
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5910075126296956e-05,
+      "loss": 3.4018,
+      "step": 2082304
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5901689178786436e-05,
+      "loss": 3.4053,
+      "step": 2082816
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.589331961007965e-05,
+      "loss": 3.4027,
+      "step": 2083328
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5884933662569125e-05,
+      "loss": 3.4002,
+      "step": 2083840
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5876547715058605e-05,
+      "loss": 3.4021,
+      "step": 2084352
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.586816176754809e-05,
+      "loss": 3.396,
+      "step": 2084864
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5859792198841298e-05,
+      "loss": 3.3725,
+      "step": 2085376
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5851406251330778e-05,
+      "loss": 3.4085,
+      "step": 2085888
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.584303668262399e-05,
+      "loss": 3.3884,
+      "step": 2086400
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.583465073511347e-05,
+      "loss": 3.3983,
+      "step": 2086912
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.582626478760295e-05,
+      "loss": 3.3939,
+      "step": 2087424
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.581787884009243e-05,
+      "loss": 3.3866,
+      "step": 2087936
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.580949289258191e-05,
+      "loss": 3.3887,
+      "step": 2088448
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.580110694507139e-05,
+      "loss": 3.3857,
+      "step": 2088960
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.579272099756087e-05,
+      "loss": 3.3994,
+      "step": 2089472
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.578433505005035e-05,
+      "loss": 3.3834,
+      "step": 2089984
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.577596548134356e-05,
+      "loss": 3.4075,
+      "step": 2090496
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5767579533833042e-05,
+      "loss": 3.3927,
+      "step": 2091008
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5759193586322522e-05,
+      "loss": 3.3724,
+      "step": 2091520
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5750807638812002e-05,
+      "loss": 3.4069,
+      "step": 2092032
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.574243807010521e-05,
+      "loss": 3.3834,
+      "step": 2092544
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.573405212259469e-05,
+      "loss": 3.3845,
+      "step": 2093056
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.572566617508417e-05,
+      "loss": 3.3957,
+      "step": 2093568
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.571728022757365e-05,
+      "loss": 3.4049,
+      "step": 2094080
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5708910658866864e-05,
+      "loss": 3.384,
+      "step": 2094592
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5700524711356344e-05,
+      "loss": 3.3916,
+      "step": 2095104
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5692138763845824e-05,
+      "loss": 3.3771,
+      "step": 2095616
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5683752816335304e-05,
+      "loss": 3.3862,
+      "step": 2096128
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5675383247628513e-05,
+      "loss": 3.4065,
+      "step": 2096640
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5666997300117993e-05,
+      "loss": 3.3945,
+      "step": 2097152
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5658611352607476e-05,
+      "loss": 3.3991,
+      "step": 2097664
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5650225405096952e-05,
+      "loss": 3.3952,
+      "step": 2098176
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5641855836390165e-05,
+      "loss": 3.4045,
+      "step": 2098688
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5633469888879645e-05,
+      "loss": 3.3969,
+      "step": 2099200
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5625083941369125e-05,
+      "loss": 3.4046,
+      "step": 2099712
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5616697993858605e-05,
+      "loss": 3.3833,
+      "step": 2100224
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5608328425151817e-05,
+      "loss": 3.3904,
+      "step": 2100736
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5599942477641297e-05,
+      "loss": 3.3951,
+      "step": 2101248
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5591556530130777e-05,
+      "loss": 3.3951,
+      "step": 2101760
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5583170582620254e-05,
+      "loss": 3.3983,
+      "step": 2102272
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5574801013913466e-05,
+      "loss": 3.4015,
+      "step": 2102784
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5566415066402946e-05,
+      "loss": 3.3946,
+      "step": 2103296
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5558029118892426e-05,
+      "loss": 3.384,
+      "step": 2103808
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5549643171381906e-05,
+      "loss": 3.3954,
+      "step": 2104320
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.554127360267512e-05,
+      "loss": 3.4004,
+      "step": 2104832
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.55328876551646e-05,
+      "loss": 3.3827,
+      "step": 2105344
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5524501707654075e-05,
+      "loss": 3.3907,
+      "step": 2105856
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.551611576014356e-05,
+      "loss": 3.4004,
+      "step": 2106368
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.550774619143677e-05,
+      "loss": 3.3897,
+      "step": 2106880
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.549936024392625e-05,
+      "loss": 3.392,
+      "step": 2107392
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5490974296415728e-05,
+      "loss": 3.3857,
+      "step": 2107904
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5482588348905207e-05,
+      "loss": 3.4015,
+      "step": 2108416
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.547421878019842e-05,
+      "loss": 3.3976,
+      "step": 2108928
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.54658328326879e-05,
+      "loss": 3.4013,
+      "step": 2109440
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5457463263981112e-05,
+      "loss": 3.3807,
+      "step": 2109952
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5449077316470592e-05,
+      "loss": 3.3957,
+      "step": 2110464
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5440691368960072e-05,
+      "loss": 3.3866,
+      "step": 2110976
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.543230542144955e-05,
+      "loss": 3.3984,
+      "step": 2111488
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.542391947393903e-05,
+      "loss": 3.4021,
+      "step": 2112000
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5415533526428512e-05,
+      "loss": 3.3849,
+      "step": 2112512
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5407147578917992e-05,
+      "loss": 3.3925,
+      "step": 2113024
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5398761631407472e-05,
+      "loss": 3.3832,
+      "step": 2113536
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.539039206270068e-05,
+      "loss": 3.3852,
+      "step": 2114048
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.538200611519016e-05,
+      "loss": 3.3882,
+      "step": 2114560
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.537362016767964e-05,
+      "loss": 3.388,
+      "step": 2115072
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.536523422016912e-05,
+      "loss": 3.3906,
+      "step": 2115584
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5356864651462334e-05,
+      "loss": 3.3888,
+      "step": 2116096
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5348478703951813e-05,
+      "loss": 3.3842,
+      "step": 2116608
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5340092756441293e-05,
+      "loss": 3.385,
+      "step": 2117120
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5331706808930773e-05,
+      "loss": 3.3874,
+      "step": 2117632
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5323337240223982e-05,
+      "loss": 3.4026,
+      "step": 2118144
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5314951292713466e-05,
+      "loss": 3.3877,
+      "step": 2118656
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5306565345202946e-05,
+      "loss": 3.4002,
+      "step": 2119168
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5298179397692426e-05,
+      "loss": 3.4045,
+      "step": 2119680
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5289809828985635e-05,
+      "loss": 3.4042,
+      "step": 2120192
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5281423881475115e-05,
+      "loss": 3.3876,
+      "step": 2120704
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5273037933964595e-05,
+      "loss": 3.3913,
+      "step": 2121216
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5264651986454075e-05,
+      "loss": 3.3924,
+      "step": 2121728
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5256282417747286e-05,
+      "loss": 3.395,
+      "step": 2122240
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5247896470236767e-05,
+      "loss": 3.3939,
+      "step": 2122752
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5239510522726247e-05,
+      "loss": 3.3892,
+      "step": 2123264
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5231124575215727e-05,
+      "loss": 3.3904,
+      "step": 2123776
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5222755006508938e-05,
+      "loss": 3.4015,
+      "step": 2124288
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5214369058998418e-05,
+      "loss": 3.3959,
+      "step": 2124800
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5205983111487898e-05,
+      "loss": 3.3825,
+      "step": 2125312
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5197597163977376e-05,
+      "loss": 3.3963,
+      "step": 2125824
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5189227595270589e-05,
+      "loss": 3.3855,
+      "step": 2126336
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5180841647760068e-05,
+      "loss": 3.404,
+      "step": 2126848
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.517245570024955e-05,
+      "loss": 3.3903,
+      "step": 2127360
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5164069752739027e-05,
+      "loss": 3.4035,
+      "step": 2127872
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.515570018403224e-05,
+      "loss": 3.3773,
+      "step": 2128384
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.514731423652172e-05,
+      "loss": 3.3997,
+      "step": 2128896
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.51389282890112e-05,
+      "loss": 3.3948,
+      "step": 2129408
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5130558720304412e-05,
+      "loss": 3.3931,
+      "step": 2129920
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5122172772793892e-05,
+      "loss": 3.3958,
+      "step": 2130432
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5113786825283371e-05,
+      "loss": 3.3856,
+      "step": 2130944
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.510540087777285e-05,
+      "loss": 3.3993,
+      "step": 2131456
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5097031309066062e-05,
+      "loss": 3.4137,
+      "step": 2131968
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5088645361555542e-05,
+      "loss": 3.3809,
+      "step": 2132480
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5080259414045022e-05,
+      "loss": 3.3929,
+      "step": 2132992
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.50718734665345e-05,
+      "loss": 3.3946,
+      "step": 2133504
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5063503897827713e-05,
+      "loss": 3.3892,
+      "step": 2134016
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5055117950317193e-05,
+      "loss": 3.3864,
+      "step": 2134528
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5046732002806674e-05,
+      "loss": 3.3957,
+      "step": 2135040
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5038346055296151e-05,
+      "loss": 3.3931,
+      "step": 2135552
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5029976486589365e-05,
+      "loss": 3.3952,
+      "step": 2136064
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5021590539078845e-05,
+      "loss": 3.3952,
+      "step": 2136576
+    },
+    {
+      "epoch": 1.03,
+      "eval_loss": 3.8635990619659424,
+      "eval_runtime": 315.5233,
+      "eval_samples_per_second": 1209.391,
+      "eval_steps_per_second": 37.794,
+      "step": 2136960
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.5013220970372054e-05,
+      "loss": 3.385,
+      "step": 2137088
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.5004835022861536e-05,
+      "loss": 3.3813,
+      "step": 2137600
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.4996449075351016e-05,
+      "loss": 3.397,
+      "step": 2138112
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.4988063127840496e-05,
+      "loss": 3.3915,
+      "step": 2138624
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.4979693559133707e-05,
+      "loss": 3.3972,
+      "step": 2139136
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.4971307611623187e-05,
+      "loss": 3.3857,
+      "step": 2139648
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.4962921664112667e-05,
+      "loss": 3.4006,
+      "step": 2140160
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.4954535716602147e-05,
+      "loss": 3.385,
+      "step": 2140672
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.4946166147895357e-05,
+      "loss": 3.3912,
+      "step": 2141184
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.4937780200384837e-05,
+      "loss": 3.3849,
+      "step": 2141696
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.4929394252874317e-05,
+      "loss": 3.3879,
+      "step": 2142208
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.4921008305363795e-05,
+      "loss": 3.3982,
+      "step": 2142720
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.4912638736657008e-05,
+      "loss": 3.3986,
+      "step": 2143232
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.4904269167950219e-05,
+      "loss": 3.3795,
+      "step": 2143744
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.4895883220439699e-05,
+      "loss": 3.3919,
+      "step": 2144256
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.488749727292918e-05,
+      "loss": 3.3771,
+      "step": 2144768
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.487911132541866e-05,
+      "loss": 3.385,
+      "step": 2145280
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.487072537790814e-05,
+      "loss": 3.3876,
+      "step": 2145792
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.486233943039762e-05,
+      "loss": 3.3817,
+      "step": 2146304
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.4853953482887098e-05,
+      "loss": 3.3886,
+      "step": 2146816
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.4845567535376578e-05,
+      "loss": 3.396,
+      "step": 2147328
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.4837197966669791e-05,
+      "loss": 3.3918,
+      "step": 2147840
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.482881201915927e-05,
+      "loss": 3.4031,
+      "step": 2148352
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.4820426071648749e-05,
+      "loss": 3.3938,
+      "step": 2148864
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.4812040124138229e-05,
+      "loss": 3.3853,
+      "step": 2149376
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.4803670555431443e-05,
+      "loss": 3.3914,
+      "step": 2149888
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.479528460792092e-05,
+      "loss": 3.3821,
+      "step": 2150400
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.47868986604104e-05,
+      "loss": 3.3901,
+      "step": 2150912
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.4778512712899881e-05,
+      "loss": 3.3812,
+      "step": 2151424
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.4770143144193094e-05,
+      "loss": 3.3825,
+      "step": 2151936
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.476175719668257e-05,
+      "loss": 3.3936,
+      "step": 2152448
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4753371249172052e-05,
+      "loss": 3.3855,
+      "step": 2152960
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4744985301661532e-05,
+      "loss": 3.3904,
+      "step": 2153472
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4736615732954743e-05,
+      "loss": 3.3931,
+      "step": 2153984
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4728229785444223e-05,
+      "loss": 3.3846,
+      "step": 2154496
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4719843837933703e-05,
+      "loss": 3.3884,
+      "step": 2155008
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4711457890423183e-05,
+      "loss": 3.3969,
+      "step": 2155520
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4703088321716394e-05,
+      "loss": 3.375,
+      "step": 2156032
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4694702374205874e-05,
+      "loss": 3.3829,
+      "step": 2156544
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4686316426695353e-05,
+      "loss": 3.3798,
+      "step": 2157056
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4677930479184835e-05,
+      "loss": 3.3812,
+      "step": 2157568
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4669560910478044e-05,
+      "loss": 3.3855,
+      "step": 2158080
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4661174962967524e-05,
+      "loss": 3.3906,
+      "step": 2158592
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4652789015457006e-05,
+      "loss": 3.3934,
+      "step": 2159104
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4644403067946486e-05,
+      "loss": 3.3924,
+      "step": 2159616
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4636033499239697e-05,
+      "loss": 3.3893,
+      "step": 2160128
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4627647551729177e-05,
+      "loss": 3.393,
+      "step": 2160640
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4619277983022389e-05,
+      "loss": 3.3838,
+      "step": 2161152
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4610892035511867e-05,
+      "loss": 3.3688,
+      "step": 2161664
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4602506088001347e-05,
+      "loss": 3.3964,
+      "step": 2162176
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4594120140490827e-05,
+      "loss": 3.3801,
+      "step": 2162688
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4585734192980307e-05,
+      "loss": 3.3843,
+      "step": 2163200
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4577348245469789e-05,
+      "loss": 3.3843,
+      "step": 2163712
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4568962297959269e-05,
+      "loss": 3.3793,
+      "step": 2164224
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4560576350448745e-05,
+      "loss": 3.3769,
+      "step": 2164736
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.455220678174196e-05,
+      "loss": 3.3735,
+      "step": 2165248
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.454382083423144e-05,
+      "loss": 3.3898,
+      "step": 2165760
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.453543488672092e-05,
+      "loss": 3.3732,
+      "step": 2166272
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.452706531801413e-05,
+      "loss": 3.4,
+      "step": 2166784
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.451867937050361e-05,
+      "loss": 3.3852,
+      "step": 2167296
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.451029342299309e-05,
+      "loss": 3.3651,
+      "step": 2167808
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.450190747548257e-05,
+      "loss": 3.3961,
+      "step": 2168320
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.449353790677578e-05,
+      "loss": 3.3714,
+      "step": 2168832
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.448515195926526e-05,
+      "loss": 3.3806,
+      "step": 2169344
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.447676601175474e-05,
+      "loss": 3.382,
+      "step": 2169856
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4468380064244219e-05,
+      "loss": 3.395,
+      "step": 2170368
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4460010495537431e-05,
+      "loss": 3.3768,
+      "step": 2170880
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4451624548026913e-05,
+      "loss": 3.3801,
+      "step": 2171392
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4443238600516393e-05,
+      "loss": 3.3646,
+      "step": 2171904
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.443485265300587e-05,
+      "loss": 3.3811,
+      "step": 2172416
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4426483084299084e-05,
+      "loss": 3.3902,
+      "step": 2172928
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4418097136788564e-05,
+      "loss": 3.388,
+      "step": 2173440
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4409711189278044e-05,
+      "loss": 3.3865,
+      "step": 2173952
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4401325241767522e-05,
+      "loss": 3.3864,
+      "step": 2174464
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4392955673060735e-05,
+      "loss": 3.3965,
+      "step": 2174976
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4384569725550214e-05,
+      "loss": 3.3852,
+      "step": 2175488
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4376183778039693e-05,
+      "loss": 3.392,
+      "step": 2176000
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4367797830529173e-05,
+      "loss": 3.3767,
+      "step": 2176512
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4359428261822385e-05,
+      "loss": 3.3814,
+      "step": 2177024
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4351042314311867e-05,
+      "loss": 3.3851,
+      "step": 2177536
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4342656366801343e-05,
+      "loss": 3.3823,
+      "step": 2178048
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4334270419290823e-05,
+      "loss": 3.3922,
+      "step": 2178560
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4325900850584038e-05,
+      "loss": 3.3884,
+      "step": 2179072
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4317514903073517e-05,
+      "loss": 3.3899,
+      "step": 2179584
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4309128955562994e-05,
+      "loss": 3.3732,
+      "step": 2180096
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4300743008052476e-05,
+      "loss": 3.3859,
+      "step": 2180608
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4292373439345688e-05,
+      "loss": 3.3907,
+      "step": 2181120
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4283987491835166e-05,
+      "loss": 3.3739,
+      "step": 2181632
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4275601544324646e-05,
+      "loss": 3.3778,
+      "step": 2182144
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.4267215596814126e-05,
+      "loss": 3.3889,
+      "step": 2182656
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.4258846028107339e-05,
+      "loss": 3.3839,
+      "step": 2183168
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.4250460080596817e-05,
+      "loss": 3.3803,
+      "step": 2183680
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.4242074133086297e-05,
+      "loss": 3.3789,
+      "step": 2184192
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.4233688185575777e-05,
+      "loss": 3.3881,
+      "step": 2184704
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.4225318616868991e-05,
+      "loss": 3.3932,
+      "step": 2185216
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.4216932669358468e-05,
+      "loss": 3.3888,
+      "step": 2185728
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.4208563100651682e-05,
+      "loss": 3.3727,
+      "step": 2186240
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.4200177153141162e-05,
+      "loss": 3.3861,
+      "step": 2186752
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.4191791205630638e-05,
+      "loss": 3.3763,
+      "step": 2187264
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.418340525812012e-05,
+      "loss": 3.3883,
+      "step": 2187776
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.41750193106096e-05,
+      "loss": 3.3936,
+      "step": 2188288
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.416663336309908e-05,
+      "loss": 3.3775,
+      "step": 2188800
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.415824741558856e-05,
+      "loss": 3.3782,
+      "step": 2189312
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.414986146807804e-05,
+      "loss": 3.3794,
+      "step": 2189824
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.414149189937125e-05,
+      "loss": 3.3754,
+      "step": 2190336
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.413310595186073e-05,
+      "loss": 3.3723,
+      "step": 2190848
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.4124720004350212e-05,
+      "loss": 3.3831,
+      "step": 2191360
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.4116334056839692e-05,
+      "loss": 3.3805,
+      "step": 2191872
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.4107964488132901e-05,
+      "loss": 3.3826,
+      "step": 2192384
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.4099578540622383e-05,
+      "loss": 3.3725,
+      "step": 2192896
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.4091192593111863e-05,
+      "loss": 3.3771,
+      "step": 2193408
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.4082806645601343e-05,
+      "loss": 3.3753,
+      "step": 2193920
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.4074437076894554e-05,
+      "loss": 3.3941,
+      "step": 2194432
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.4066051129384034e-05,
+      "loss": 3.3778,
+      "step": 2194944
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.4057665181873514e-05,
+      "loss": 3.3917,
+      "step": 2195456
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.4049279234362994e-05,
+      "loss": 3.3918,
+      "step": 2195968
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.4040909665656204e-05,
+      "loss": 3.3988,
+      "step": 2196480
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.4032523718145684e-05,
+      "loss": 3.3799,
+      "step": 2196992
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.4024137770635164e-05,
+      "loss": 3.3783,
+      "step": 2197504
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.4015751823124646e-05,
+      "loss": 3.384,
+      "step": 2198016
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.4007382254417855e-05,
+      "loss": 3.3849,
+      "step": 2198528
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.3998996306907337e-05,
+      "loss": 3.3808,
+      "step": 2199040
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.3990610359396817e-05,
+      "loss": 3.3837,
+      "step": 2199552
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.3982224411886293e-05,
+      "loss": 3.3777,
+      "step": 2200064
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.3973854843179507e-05,
+      "loss": 3.3922,
+      "step": 2200576
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.3965468895668987e-05,
+      "loss": 3.3814,
+      "step": 2201088
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.3957082948158467e-05,
+      "loss": 3.3773,
+      "step": 2201600
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.3948697000647946e-05,
+      "loss": 3.3844,
+      "step": 2202112
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.3940327431941158e-05,
+      "loss": 3.3779,
+      "step": 2202624
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.3931941484430638e-05,
+      "loss": 3.3913,
+      "step": 2203136
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.3923555536920116e-05,
+      "loss": 3.3788,
+      "step": 2203648
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.3915169589409596e-05,
+      "loss": 3.3925,
+      "step": 2204160
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.3906800020702809e-05,
+      "loss": 3.3729,
+      "step": 2204672
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.389841407319229e-05,
+      "loss": 3.3877,
+      "step": 2205184
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.3890028125681767e-05,
+      "loss": 3.3846,
+      "step": 2205696
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.3881642178171247e-05,
+      "loss": 3.3835,
+      "step": 2206208
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.3873272609464461e-05,
+      "loss": 3.3776,
+      "step": 2206720
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.3864886661953941e-05,
+      "loss": 3.3847,
+      "step": 2207232
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.3856500714443418e-05,
+      "loss": 3.3911,
+      "step": 2207744
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.38481147669329e-05,
+      "loss": 3.4009,
+      "step": 2208256
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.3839745198226112e-05,
+      "loss": 3.3748,
+      "step": 2208768
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.383135925071559e-05,
+      "loss": 3.3803,
+      "step": 2209280
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.382297330320507e-05,
+      "loss": 3.3836,
+      "step": 2209792
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.381458735569455e-05,
+      "loss": 3.378,
+      "step": 2210304
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.3806217786987762e-05,
+      "loss": 3.3749,
+      "step": 2210816
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.379783183947724e-05,
+      "loss": 3.3883,
+      "step": 2211328
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.378944589196672e-05,
+      "loss": 3.3819,
+      "step": 2211840
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.3781076323259933e-05,
+      "loss": 3.3902,
+      "step": 2212352
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.3772690375749415e-05,
+      "loss": 3.38,
+      "step": 2212864
+    },
+    {
+      "epoch": 0.03,
+      "eval_loss": 3.8643229007720947,
+      "eval_runtime": 308.5425,
+      "eval_samples_per_second": 1236.753,
+      "eval_steps_per_second": 38.649,
+      "step": 2213280
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.3764304428238891e-05,
+      "loss": 3.381,
+      "step": 2213376
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.3755918480728371e-05,
+      "loss": 3.3698,
+      "step": 2213888
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.3747548912021585e-05,
+      "loss": 3.3875,
+      "step": 2214400
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.3739162964511062e-05,
+      "loss": 3.3839,
+      "step": 2214912
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.3730777017000544e-05,
+      "loss": 3.3866,
+      "step": 2215424
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.3722391069490024e-05,
+      "loss": 3.379,
+      "step": 2215936
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.3714021500783236e-05,
+      "loss": 3.3839,
+      "step": 2216448
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.3705635553272714e-05,
+      "loss": 3.3771,
+      "step": 2216960
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.3697249605762194e-05,
+      "loss": 3.3774,
+      "step": 2217472
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.3688863658251674e-05,
+      "loss": 3.3808,
+      "step": 2217984
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.3680494089544887e-05,
+      "loss": 3.3772,
+      "step": 2218496
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.3672108142034365e-05,
+      "loss": 3.3883,
+      "step": 2219008
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.3663722194523845e-05,
+      "loss": 3.3945,
+      "step": 2219520
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.3655352625817059e-05,
+      "loss": 3.3693,
+      "step": 2220032
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.3646966678306536e-05,
+      "loss": 3.3776,
+      "step": 2220544
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.3638580730796016e-05,
+      "loss": 3.3722,
+      "step": 2221056
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.3630194783285497e-05,
+      "loss": 3.3746,
+      "step": 2221568
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.3621808835774977e-05,
+      "loss": 3.3783,
+      "step": 2222080
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.3613422888264457e-05,
+      "loss": 3.3712,
+      "step": 2222592
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.3605036940753937e-05,
+      "loss": 3.3763,
+      "step": 2223104
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.3596650993243417e-05,
+      "loss": 3.3888,
+      "step": 2223616
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.3588281424536628e-05,
+      "loss": 3.3797,
+      "step": 2224128
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.3579895477026108e-05,
+      "loss": 3.3887,
+      "step": 2224640
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.3571509529515588e-05,
+      "loss": 3.3858,
+      "step": 2225152
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.356312358200507e-05,
+      "loss": 3.3792,
+      "step": 2225664
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.3554754013298279e-05,
+      "loss": 3.383,
+      "step": 2226176
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.354636806578776e-05,
+      "loss": 3.3713,
+      "step": 2226688
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.353798211827724e-05,
+      "loss": 3.3791,
+      "step": 2227200
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.3529596170766717e-05,
+      "loss": 3.3748,
+      "step": 2227712
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.3521226602059931e-05,
+      "loss": 3.3712,
+      "step": 2228224
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3512840654549411e-05,
+      "loss": 3.3851,
+      "step": 2228736
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.350445470703889e-05,
+      "loss": 3.3761,
+      "step": 2229248
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3496068759528369e-05,
+      "loss": 3.3836,
+      "step": 2229760
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3487699190821582e-05,
+      "loss": 3.3772,
+      "step": 2230272
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3479313243311062e-05,
+      "loss": 3.3759,
+      "step": 2230784
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3470927295800542e-05,
+      "loss": 3.3825,
+      "step": 2231296
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.346254134829002e-05,
+      "loss": 3.3867,
+      "step": 2231808
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3454171779583232e-05,
+      "loss": 3.3616,
+      "step": 2232320
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3445785832072714e-05,
+      "loss": 3.3778,
+      "step": 2232832
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.343739988456219e-05,
+      "loss": 3.3707,
+      "step": 2233344
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.342901393705167e-05,
+      "loss": 3.366,
+      "step": 2233856
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3420644368344885e-05,
+      "loss": 3.3738,
+      "step": 2234368
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3412258420834365e-05,
+      "loss": 3.3856,
+      "step": 2234880
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3403872473323841e-05,
+      "loss": 3.3812,
+      "step": 2235392
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3395486525813323e-05,
+      "loss": 3.3819,
+      "step": 2235904
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3387116957106535e-05,
+      "loss": 3.3789,
+      "step": 2236416
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3378731009596015e-05,
+      "loss": 3.3826,
+      "step": 2236928
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3370361440889226e-05,
+      "loss": 3.3723,
+      "step": 2237440
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3361975493378706e-05,
+      "loss": 3.3591,
+      "step": 2237952
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3353589545868186e-05,
+      "loss": 3.3846,
+      "step": 2238464
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3345203598357664e-05,
+      "loss": 3.3718,
+      "step": 2238976
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3336817650847144e-05,
+      "loss": 3.3776,
+      "step": 2239488
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3328431703336624e-05,
+      "loss": 3.3731,
+      "step": 2240000
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3320045755826106e-05,
+      "loss": 3.3699,
+      "step": 2240512
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3311659808315586e-05,
+      "loss": 3.3668,
+      "step": 2241024
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3303290239608795e-05,
+      "loss": 3.3637,
+      "step": 2241536
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3294904292098276e-05,
+      "loss": 3.3825,
+      "step": 2242048
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3286518344587756e-05,
+      "loss": 3.3676,
+      "step": 2242560
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3278148775880967e-05,
+      "loss": 3.3848,
+      "step": 2243072
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3269762828370447e-05,
+      "loss": 3.3757,
+      "step": 2243584
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3261376880859927e-05,
+      "loss": 3.3592,
+      "step": 2244096
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3252990933349407e-05,
+      "loss": 3.3835,
+      "step": 2244608
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3244621364642618e-05,
+      "loss": 3.3645,
+      "step": 2245120
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3236235417132098e-05,
+      "loss": 3.3694,
+      "step": 2245632
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3227849469621578e-05,
+      "loss": 3.3699,
+      "step": 2246144
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3219463522111058e-05,
+      "loss": 3.3845,
+      "step": 2246656
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3211093953404269e-05,
+      "loss": 3.3693,
+      "step": 2247168
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3202708005893748e-05,
+      "loss": 3.3671,
+      "step": 2247680
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.319432205838323e-05,
+      "loss": 3.3526,
+      "step": 2248192
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.318593611087271e-05,
+      "loss": 3.3696,
+      "step": 2248704
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3177566542165921e-05,
+      "loss": 3.3824,
+      "step": 2249216
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.31691805946554e-05,
+      "loss": 3.3817,
+      "step": 2249728
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.316079464714488e-05,
+      "loss": 3.3737,
+      "step": 2250240
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.315240869963436e-05,
+      "loss": 3.3792,
+      "step": 2250752
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3144039130927572e-05,
+      "loss": 3.3856,
+      "step": 2251264
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3135653183417051e-05,
+      "loss": 3.3727,
+      "step": 2251776
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3127267235906531e-05,
+      "loss": 3.3797,
+      "step": 2252288
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3118881288396011e-05,
+      "loss": 3.3743,
+      "step": 2252800
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3110511719689222e-05,
+      "loss": 3.3707,
+      "step": 2253312
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3102125772178702e-05,
+      "loss": 3.3717,
+      "step": 2253824
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3093739824668184e-05,
+      "loss": 3.3746,
+      "step": 2254336
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3085353877157664e-05,
+      "loss": 3.3804,
+      "step": 2254848
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3076984308450873e-05,
+      "loss": 3.3796,
+      "step": 2255360
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3068598360940354e-05,
+      "loss": 3.3801,
+      "step": 2255872
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3060212413429834e-05,
+      "loss": 3.3624,
+      "step": 2256384
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3051826465919314e-05,
+      "loss": 3.3774,
+      "step": 2256896
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3043456897212525e-05,
+      "loss": 3.38,
+      "step": 2257408
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3035070949702005e-05,
+      "loss": 3.3672,
+      "step": 2257920
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3026685002191485e-05,
+      "loss": 3.3697,
+      "step": 2258432
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.3018299054680965e-05,
+      "loss": 3.3784,
+      "step": 2258944
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.3009929485974176e-05,
+      "loss": 3.3709,
+      "step": 2259456
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.3001543538463656e-05,
+      "loss": 3.3721,
+      "step": 2259968
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2993157590953137e-05,
+      "loss": 3.3698,
+      "step": 2260480
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2984771643442614e-05,
+      "loss": 3.3804,
+      "step": 2260992
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2976402074735826e-05,
+      "loss": 3.3795,
+      "step": 2261504
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2968016127225308e-05,
+      "loss": 3.38,
+      "step": 2262016
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2959630179714788e-05,
+      "loss": 3.3605,
+      "step": 2262528
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2951244232204265e-05,
+      "loss": 3.3763,
+      "step": 2263040
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2942874663497479e-05,
+      "loss": 3.3706,
+      "step": 2263552
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2934488715986959e-05,
+      "loss": 3.3773,
+      "step": 2264064
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2926102768476439e-05,
+      "loss": 3.384,
+      "step": 2264576
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2917716820965917e-05,
+      "loss": 3.3647,
+      "step": 2265088
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.290934725225913e-05,
+      "loss": 3.368,
+      "step": 2265600
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.290096130474861e-05,
+      "loss": 3.3662,
+      "step": 2266112
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2892575357238088e-05,
+      "loss": 3.3703,
+      "step": 2266624
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2884189409727568e-05,
+      "loss": 3.3631,
+      "step": 2267136
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.287581984102078e-05,
+      "loss": 3.3706,
+      "step": 2267648
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2867433893510262e-05,
+      "loss": 3.3711,
+      "step": 2268160
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2859047945999738e-05,
+      "loss": 3.3711,
+      "step": 2268672
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2850661998489218e-05,
+      "loss": 3.3624,
+      "step": 2269184
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2842292429782433e-05,
+      "loss": 3.3707,
+      "step": 2269696
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2833906482271912e-05,
+      "loss": 3.3635,
+      "step": 2270208
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.282552053476139e-05,
+      "loss": 3.3821,
+      "step": 2270720
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.281713458725087e-05,
+      "loss": 3.3696,
+      "step": 2271232
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2808765018544083e-05,
+      "loss": 3.3792,
+      "step": 2271744
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2800379071033561e-05,
+      "loss": 3.3836,
+      "step": 2272256
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2791993123523041e-05,
+      "loss": 3.3878,
+      "step": 2272768
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2783607176012521e-05,
+      "loss": 3.3699,
+      "step": 2273280
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2775237607305734e-05,
+      "loss": 3.3706,
+      "step": 2273792
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2766851659795212e-05,
+      "loss": 3.3744,
+      "step": 2274304
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2758465712284692e-05,
+      "loss": 3.3729,
+      "step": 2274816
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2750079764774172e-05,
+      "loss": 3.3707,
+      "step": 2275328
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2741710196067386e-05,
+      "loss": 3.3734,
+      "step": 2275840
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2733324248556863e-05,
+      "loss": 3.3705,
+      "step": 2276352
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2724938301046343e-05,
+      "loss": 3.3807,
+      "step": 2276864
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2716552353535824e-05,
+      "loss": 3.3718,
+      "step": 2277376
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2708182784829033e-05,
+      "loss": 3.3662,
+      "step": 2277888
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2699796837318515e-05,
+      "loss": 3.3737,
+      "step": 2278400
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2691410889807995e-05,
+      "loss": 3.3709,
+      "step": 2278912
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2683041321101208e-05,
+      "loss": 3.3777,
+      "step": 2279424
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2674655373590686e-05,
+      "loss": 3.3732,
+      "step": 2279936
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2666269426080166e-05,
+      "loss": 3.3808,
+      "step": 2280448
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2657883478569646e-05,
+      "loss": 3.3667,
+      "step": 2280960
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2649513909862857e-05,
+      "loss": 3.3747,
+      "step": 2281472
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2641127962352336e-05,
+      "loss": 3.3785,
+      "step": 2281984
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2632742014841816e-05,
+      "loss": 3.3737,
+      "step": 2282496
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2624356067331296e-05,
+      "loss": 3.3698,
+      "step": 2283008
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2615986498624507e-05,
+      "loss": 3.3735,
+      "step": 2283520
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2607600551113987e-05,
+      "loss": 3.3791,
+      "step": 2284032
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2599214603603469e-05,
+      "loss": 3.3928,
+      "step": 2284544
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2590828656092949e-05,
+      "loss": 3.3682,
+      "step": 2285056
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.2582459087386158e-05,
+      "loss": 3.3688,
+      "step": 2285568
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.257407313987564e-05,
+      "loss": 3.3733,
+      "step": 2286080
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.256568719236512e-05,
+      "loss": 3.3733,
+      "step": 2286592
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.25573012448546e-05,
+      "loss": 3.3625,
+      "step": 2287104
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.254893167614781e-05,
+      "loss": 3.3797,
+      "step": 2287616
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.254054572863729e-05,
+      "loss": 3.3738,
+      "step": 2288128
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.253215978112677e-05,
+      "loss": 3.3799,
+      "step": 2288640
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.252377383361625e-05,
+      "loss": 3.3646,
+      "step": 2289152
+    },
+    {
+      "epoch": 1.03,
+      "eval_loss": 3.86430025100708,
+      "eval_runtime": 309.6051,
+      "eval_samples_per_second": 1232.509,
+      "eval_steps_per_second": 38.517,
+      "step": 2289600
+    }
+  ],
+  "logging_steps": 512,
+  "max_steps": 3052726,
+  "num_train_epochs": 9223372036854775807,
+  "save_steps": 10,
+  "total_flos": 1.6046213234352538e+18,
+  "trial_name": null,
+  "trial_params": null
+}