lesso's picture
Training in progress, epoch 0, checkpoint
7d76916 verified
raw
history blame
174 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.8902737591809481,
"eval_steps": 500,
"global_step": 1000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0008902737591809482,
"grad_norm": NaN,
"learning_rate": 0.0002,
"loss": 4.9194,
"step": 1
},
{
"epoch": 0.0017805475183618963,
"grad_norm": NaN,
"learning_rate": 0.0002,
"loss": 5.9087,
"step": 2
},
{
"epoch": 0.0026708212775428445,
"grad_norm": NaN,
"learning_rate": 0.0002,
"loss": 4.4946,
"step": 3
},
{
"epoch": 0.0035610950367237926,
"grad_norm": NaN,
"learning_rate": 0.0002,
"loss": 4.8484,
"step": 4
},
{
"epoch": 0.00445136879590474,
"grad_norm": 2.1236016750335693,
"learning_rate": 0.00019999950652018584,
"loss": 4.4794,
"step": 5
},
{
"epoch": 0.005341642555085689,
"grad_norm": NaN,
"learning_rate": 0.00019999950652018584,
"loss": 4.9055,
"step": 6
},
{
"epoch": 0.006231916314266637,
"grad_norm": 4.251764297485352,
"learning_rate": 0.0001999980260856137,
"loss": 5.8693,
"step": 7
},
{
"epoch": 0.007122190073447585,
"grad_norm": 6.466914653778076,
"learning_rate": 0.000199995558710895,
"loss": 4.7998,
"step": 8
},
{
"epoch": 0.008012463832628533,
"grad_norm": 4.194855690002441,
"learning_rate": 0.00019999210442038162,
"loss": 4.891,
"step": 9
},
{
"epoch": 0.00890273759180948,
"grad_norm": 4.219061851501465,
"learning_rate": 0.00019998766324816607,
"loss": 4.1163,
"step": 10
},
{
"epoch": 0.00979301135099043,
"grad_norm": 5.155824184417725,
"learning_rate": 0.0001999822352380809,
"loss": 3.4456,
"step": 11
},
{
"epoch": 0.010683285110171378,
"grad_norm": 3.160855531692505,
"learning_rate": 0.00019997582044369843,
"loss": 2.9053,
"step": 12
},
{
"epoch": 0.011573558869352326,
"grad_norm": 2.9109556674957275,
"learning_rate": 0.00019996841892833,
"loss": 2.3232,
"step": 13
},
{
"epoch": 0.012463832628533273,
"grad_norm": 3.342146635055542,
"learning_rate": 0.00019996003076502565,
"loss": 2.391,
"step": 14
},
{
"epoch": 0.013354106387714223,
"grad_norm": 2.6786203384399414,
"learning_rate": 0.00019995065603657316,
"loss": 2.292,
"step": 15
},
{
"epoch": 0.01424438014689517,
"grad_norm": 3.7010445594787598,
"learning_rate": 0.0001999402948354973,
"loss": 2.2301,
"step": 16
},
{
"epoch": 0.015134653906076118,
"grad_norm": 4.940849781036377,
"learning_rate": 0.00019992894726405893,
"loss": 2.3113,
"step": 17
},
{
"epoch": 0.016024927665257066,
"grad_norm": 4.58949613571167,
"learning_rate": 0.000199916613434254,
"loss": 2.3711,
"step": 18
},
{
"epoch": 0.016915201424438014,
"grad_norm": 5.198491096496582,
"learning_rate": 0.0001999032934678125,
"loss": 2.4678,
"step": 19
},
{
"epoch": 0.01780547518361896,
"grad_norm": 4.089502334594727,
"learning_rate": 0.00019988898749619702,
"loss": 2.104,
"step": 20
},
{
"epoch": 0.018695748942799913,
"grad_norm": 3.2676820755004883,
"learning_rate": 0.00019987369566060176,
"loss": 2.5646,
"step": 21
},
{
"epoch": 0.01958602270198086,
"grad_norm": 2.9748589992523193,
"learning_rate": 0.00019985741811195097,
"loss": 1.7189,
"step": 22
},
{
"epoch": 0.020476296461161808,
"grad_norm": 3.193584680557251,
"learning_rate": 0.00019984015501089752,
"loss": 2.7344,
"step": 23
},
{
"epoch": 0.021366570220342756,
"grad_norm": 2.9254608154296875,
"learning_rate": 0.0001998219065278212,
"loss": 1.8742,
"step": 24
},
{
"epoch": 0.022256843979523704,
"grad_norm": 3.604414701461792,
"learning_rate": 0.00019980267284282717,
"loss": 2.2673,
"step": 25
},
{
"epoch": 0.02314711773870465,
"grad_norm": 2.6772382259368896,
"learning_rate": 0.00019978245414574417,
"loss": 1.6669,
"step": 26
},
{
"epoch": 0.0240373914978856,
"grad_norm": 3.481696128845215,
"learning_rate": 0.00019976125063612252,
"loss": 2.3554,
"step": 27
},
{
"epoch": 0.024927665257066547,
"grad_norm": 2.589164972305298,
"learning_rate": 0.00019973906252323238,
"loss": 1.9393,
"step": 28
},
{
"epoch": 0.025817939016247494,
"grad_norm": 2.7414450645446777,
"learning_rate": 0.0001997158900260614,
"loss": 2.2943,
"step": 29
},
{
"epoch": 0.026708212775428446,
"grad_norm": 2.633164167404175,
"learning_rate": 0.0001996917333733128,
"loss": 1.9682,
"step": 30
},
{
"epoch": 0.027598486534609393,
"grad_norm": 2.2975387573242188,
"learning_rate": 0.00019966659280340297,
"loss": 1.7236,
"step": 31
},
{
"epoch": 0.02848876029379034,
"grad_norm": 3.209672689437866,
"learning_rate": 0.00019964046856445924,
"loss": 2.4324,
"step": 32
},
{
"epoch": 0.02937903405297129,
"grad_norm": 2.241156816482544,
"learning_rate": 0.00019961336091431727,
"loss": 1.4512,
"step": 33
},
{
"epoch": 0.030269307812152237,
"grad_norm": 1.827873706817627,
"learning_rate": 0.00019958527012051857,
"loss": 1.3515,
"step": 34
},
{
"epoch": 0.031159581571333184,
"grad_norm": 2.4487650394439697,
"learning_rate": 0.00019955619646030802,
"loss": 1.7576,
"step": 35
},
{
"epoch": 0.03204985533051413,
"grad_norm": 2.6001129150390625,
"learning_rate": 0.00019952614022063084,
"loss": 1.5846,
"step": 36
},
{
"epoch": 0.03294012908969508,
"grad_norm": 2.0996642112731934,
"learning_rate": 0.00019949510169813003,
"loss": 2.2082,
"step": 37
},
{
"epoch": 0.03383040284887603,
"grad_norm": 2.8602750301361084,
"learning_rate": 0.00019946308119914323,
"loss": 2.2053,
"step": 38
},
{
"epoch": 0.03472067660805698,
"grad_norm": 2.3887569904327393,
"learning_rate": 0.0001994300790396999,
"loss": 2.0379,
"step": 39
},
{
"epoch": 0.03561095036723792,
"grad_norm": 3.160905599594116,
"learning_rate": 0.000199396095545518,
"loss": 2.7873,
"step": 40
},
{
"epoch": 0.036501224126418874,
"grad_norm": 2.939594268798828,
"learning_rate": 0.00019936113105200085,
"loss": 2.0382,
"step": 41
},
{
"epoch": 0.037391497885599825,
"grad_norm": 4.975297451019287,
"learning_rate": 0.00019932518590423394,
"loss": 1.5076,
"step": 42
},
{
"epoch": 0.03828177164478077,
"grad_norm": 1.9363937377929688,
"learning_rate": 0.00019928826045698136,
"loss": 1.8618,
"step": 43
},
{
"epoch": 0.03917204540396172,
"grad_norm": 2.7423884868621826,
"learning_rate": 0.0001992503550746824,
"loss": 2.0692,
"step": 44
},
{
"epoch": 0.040062319163142665,
"grad_norm": 2.3734116554260254,
"learning_rate": 0.0001992114701314478,
"loss": 2.3358,
"step": 45
},
{
"epoch": 0.040952592922323616,
"grad_norm": 2.050905227661133,
"learning_rate": 0.0001991716060110563,
"loss": 1.873,
"step": 46
},
{
"epoch": 0.04184286668150456,
"grad_norm": 2.0106308460235596,
"learning_rate": 0.00019913076310695068,
"loss": 2.0759,
"step": 47
},
{
"epoch": 0.04273314044068551,
"grad_norm": 1.9395461082458496,
"learning_rate": 0.00019908894182223388,
"loss": 2.0742,
"step": 48
},
{
"epoch": 0.043623414199866456,
"grad_norm": 2.643402338027954,
"learning_rate": 0.00019904614256966512,
"loss": 2.5036,
"step": 49
},
{
"epoch": 0.04451368795904741,
"grad_norm": 2.7979204654693604,
"learning_rate": 0.00019900236577165576,
"loss": 2.5311,
"step": 50
},
{
"epoch": 0.04540396171822836,
"grad_norm": 1.904785394668579,
"learning_rate": 0.0001989576118602651,
"loss": 1.4152,
"step": 51
},
{
"epoch": 0.0462942354774093,
"grad_norm": 1.4882405996322632,
"learning_rate": 0.00019891188127719618,
"loss": 1.4164,
"step": 52
},
{
"epoch": 0.047184509236590254,
"grad_norm": 6.002376556396484,
"learning_rate": 0.0001988651744737914,
"loss": 1.9128,
"step": 53
},
{
"epoch": 0.0480747829957712,
"grad_norm": 1.935181975364685,
"learning_rate": 0.00019881749191102808,
"loss": 1.981,
"step": 54
},
{
"epoch": 0.04896505675495215,
"grad_norm": 2.011384963989258,
"learning_rate": 0.00019876883405951377,
"loss": 1.7107,
"step": 55
},
{
"epoch": 0.049855330514133094,
"grad_norm": 2.44512939453125,
"learning_rate": 0.00019871920139948192,
"loss": 2.0596,
"step": 56
},
{
"epoch": 0.050745604273314045,
"grad_norm": 2.8638131618499756,
"learning_rate": 0.0001986685944207868,
"loss": 2.5629,
"step": 57
},
{
"epoch": 0.05163587803249499,
"grad_norm": 1.969163179397583,
"learning_rate": 0.0001986170136228989,
"loss": 1.6412,
"step": 58
},
{
"epoch": 0.05252615179167594,
"grad_norm": 1.6036492586135864,
"learning_rate": 0.00019856445951489982,
"loss": 1.957,
"step": 59
},
{
"epoch": 0.05341642555085689,
"grad_norm": 2.210430860519409,
"learning_rate": 0.0001985109326154774,
"loss": 1.4708,
"step": 60
},
{
"epoch": 0.054306699310037836,
"grad_norm": 3.7153899669647217,
"learning_rate": 0.00019845643345292054,
"loss": 1.7712,
"step": 61
},
{
"epoch": 0.05519697306921879,
"grad_norm": 3.576857328414917,
"learning_rate": 0.00019840096256511398,
"loss": 2.0909,
"step": 62
},
{
"epoch": 0.05608724682839973,
"grad_norm": 3.2540183067321777,
"learning_rate": 0.00019834452049953297,
"loss": 2.0778,
"step": 63
},
{
"epoch": 0.05697752058758068,
"grad_norm": 1.8862560987472534,
"learning_rate": 0.00019828710781323792,
"loss": 1.9815,
"step": 64
},
{
"epoch": 0.05786779434676163,
"grad_norm": 3.74940824508667,
"learning_rate": 0.0001982287250728689,
"loss": 1.9903,
"step": 65
},
{
"epoch": 0.05875806810594258,
"grad_norm": 2.30810546875,
"learning_rate": 0.0001981693728546399,
"loss": 2.2339,
"step": 66
},
{
"epoch": 0.05964834186512353,
"grad_norm": 2.387211561203003,
"learning_rate": 0.0001981090517443334,
"loss": 2.708,
"step": 67
},
{
"epoch": 0.06053861562430447,
"grad_norm": 2.4890730381011963,
"learning_rate": 0.00019804776233729444,
"loss": 2.0848,
"step": 68
},
{
"epoch": 0.061428889383485424,
"grad_norm": 2.170064687728882,
"learning_rate": 0.0001979855052384247,
"loss": 2.3971,
"step": 69
},
{
"epoch": 0.06231916314266637,
"grad_norm": 1.8925060033798218,
"learning_rate": 0.00019792228106217658,
"loss": 1.8057,
"step": 70
},
{
"epoch": 0.06320943690184731,
"grad_norm": 2.75600528717041,
"learning_rate": 0.00019785809043254722,
"loss": 1.8895,
"step": 71
},
{
"epoch": 0.06409971066102826,
"grad_norm": 2.811318874359131,
"learning_rate": 0.0001977929339830722,
"loss": 2.0633,
"step": 72
},
{
"epoch": 0.06498998442020922,
"grad_norm": 2.9272348880767822,
"learning_rate": 0.00019772681235681936,
"loss": 1.9009,
"step": 73
},
{
"epoch": 0.06588025817939017,
"grad_norm": 3.1955409049987793,
"learning_rate": 0.00019765972620638248,
"loss": 2.7632,
"step": 74
},
{
"epoch": 0.0667705319385711,
"grad_norm": 1.8320505619049072,
"learning_rate": 0.00019759167619387476,
"loss": 2.0807,
"step": 75
},
{
"epoch": 0.06766080569775205,
"grad_norm": 1.975226640701294,
"learning_rate": 0.00019752266299092236,
"loss": 2.3015,
"step": 76
},
{
"epoch": 0.068551079456933,
"grad_norm": 1.833039402961731,
"learning_rate": 0.00019745268727865774,
"loss": 2.1897,
"step": 77
},
{
"epoch": 0.06944135321611396,
"grad_norm": 2.358522891998291,
"learning_rate": 0.0001973817497477129,
"loss": 2.1918,
"step": 78
},
{
"epoch": 0.07033162697529491,
"grad_norm": 2.0557165145874023,
"learning_rate": 0.00019730985109821266,
"loss": 1.9941,
"step": 79
},
{
"epoch": 0.07122190073447585,
"grad_norm": 2.551722288131714,
"learning_rate": 0.00019723699203976766,
"loss": 2.0574,
"step": 80
},
{
"epoch": 0.0721121744936568,
"grad_norm": 2.009219169616699,
"learning_rate": 0.0001971631732914674,
"loss": 2.0421,
"step": 81
},
{
"epoch": 0.07300244825283775,
"grad_norm": 1.9363999366760254,
"learning_rate": 0.0001970883955818731,
"loss": 1.8771,
"step": 82
},
{
"epoch": 0.0738927220120187,
"grad_norm": 2.5710339546203613,
"learning_rate": 0.0001970126596490106,
"loss": 2.3888,
"step": 83
},
{
"epoch": 0.07478299577119965,
"grad_norm": 1.6190274953842163,
"learning_rate": 0.00019693596624036292,
"loss": 2.0216,
"step": 84
},
{
"epoch": 0.07567326953038059,
"grad_norm": 4.4114580154418945,
"learning_rate": 0.0001968583161128631,
"loss": 1.7062,
"step": 85
},
{
"epoch": 0.07656354328956154,
"grad_norm": 2.1439876556396484,
"learning_rate": 0.00019677971003288655,
"loss": 2.2109,
"step": 86
},
{
"epoch": 0.07745381704874249,
"grad_norm": 2.3916540145874023,
"learning_rate": 0.00019670014877624353,
"loss": 2.083,
"step": 87
},
{
"epoch": 0.07834409080792344,
"grad_norm": 4.150461673736572,
"learning_rate": 0.00019661963312817148,
"loss": 1.833,
"step": 88
},
{
"epoch": 0.07923436456710438,
"grad_norm": 2.2267212867736816,
"learning_rate": 0.0001965381638833274,
"loss": 1.7452,
"step": 89
},
{
"epoch": 0.08012463832628533,
"grad_norm": 1.6420040130615234,
"learning_rate": 0.00019645574184577982,
"loss": 1.4853,
"step": 90
},
{
"epoch": 0.08101491208546628,
"grad_norm": 3.3110744953155518,
"learning_rate": 0.000196372367829001,
"loss": 1.7166,
"step": 91
},
{
"epoch": 0.08190518584464723,
"grad_norm": 2.2621548175811768,
"learning_rate": 0.00019628804265585877,
"loss": 1.9514,
"step": 92
},
{
"epoch": 0.08279545960382818,
"grad_norm": 1.7131949663162231,
"learning_rate": 0.0001962027671586086,
"loss": 1.3943,
"step": 93
},
{
"epoch": 0.08368573336300912,
"grad_norm": 2.4566714763641357,
"learning_rate": 0.0001961165421788852,
"loss": 2.0299,
"step": 94
},
{
"epoch": 0.08457600712219007,
"grad_norm": 2.615260362625122,
"learning_rate": 0.0001960293685676943,
"loss": 2.1021,
"step": 95
},
{
"epoch": 0.08546628088137102,
"grad_norm": NaN,
"learning_rate": 0.0001960293685676943,
"loss": 1.8427,
"step": 96
},
{
"epoch": 0.08635655464055197,
"grad_norm": 2.9707720279693604,
"learning_rate": 0.0001959412471854043,
"loss": 2.4674,
"step": 97
},
{
"epoch": 0.08724682839973291,
"grad_norm": 1.8748257160186768,
"learning_rate": 0.0001958521789017376,
"loss": 1.8866,
"step": 98
},
{
"epoch": 0.08813710215891386,
"grad_norm": 2.90433406829834,
"learning_rate": 0.00019576216459576222,
"loss": 2.4039,
"step": 99
},
{
"epoch": 0.08902737591809481,
"grad_norm": 2.7328593730926514,
"learning_rate": 0.00019567120515588308,
"loss": 1.9389,
"step": 100
},
{
"epoch": 0.08991764967727577,
"grad_norm": 2.1492199897766113,
"learning_rate": 0.00019557930147983302,
"loss": 2.5141,
"step": 101
},
{
"epoch": 0.09080792343645672,
"grad_norm": 1.7645761966705322,
"learning_rate": 0.00019548645447466431,
"loss": 1.8018,
"step": 102
},
{
"epoch": 0.09169819719563765,
"grad_norm": 2.1450297832489014,
"learning_rate": 0.00019539266505673938,
"loss": 2.2096,
"step": 103
},
{
"epoch": 0.0925884709548186,
"grad_norm": 2.1536448001861572,
"learning_rate": 0.00019529793415172192,
"loss": 2.3354,
"step": 104
},
{
"epoch": 0.09347874471399956,
"grad_norm": 2.578547716140747,
"learning_rate": 0.00019520226269456768,
"loss": 2.4987,
"step": 105
},
{
"epoch": 0.09436901847318051,
"grad_norm": 1.8539806604385376,
"learning_rate": 0.00019510565162951537,
"loss": 1.6678,
"step": 106
},
{
"epoch": 0.09525929223236144,
"grad_norm": 3.501614570617676,
"learning_rate": 0.00019500810191007718,
"loss": 2.011,
"step": 107
},
{
"epoch": 0.0961495659915424,
"grad_norm": 2.3680636882781982,
"learning_rate": 0.00019490961449902946,
"loss": 2.2364,
"step": 108
},
{
"epoch": 0.09703983975072335,
"grad_norm": 2.0228307247161865,
"learning_rate": 0.0001948101903684032,
"loss": 2.1797,
"step": 109
},
{
"epoch": 0.0979301135099043,
"grad_norm": 1.9101430177688599,
"learning_rate": 0.00019470983049947444,
"loss": 1.9929,
"step": 110
},
{
"epoch": 0.09882038726908525,
"grad_norm": 1.478506088256836,
"learning_rate": 0.00019460853588275454,
"loss": 1.5515,
"step": 111
},
{
"epoch": 0.09971066102826619,
"grad_norm": 2.007383346557617,
"learning_rate": 0.00019450630751798048,
"loss": 2.2393,
"step": 112
},
{
"epoch": 0.10060093478744714,
"grad_norm": 1.8135859966278076,
"learning_rate": 0.000194403146414105,
"loss": 1.9253,
"step": 113
},
{
"epoch": 0.10149120854662809,
"grad_norm": 2.946672201156616,
"learning_rate": 0.00019429905358928646,
"loss": 2.9314,
"step": 114
},
{
"epoch": 0.10238148230580904,
"grad_norm": 2.189742088317871,
"learning_rate": 0.00019419403007087907,
"loss": 1.7929,
"step": 115
},
{
"epoch": 0.10327175606498998,
"grad_norm": 1.661193609237671,
"learning_rate": 0.00019408807689542257,
"loss": 1.737,
"step": 116
},
{
"epoch": 0.10416202982417093,
"grad_norm": 2.0452182292938232,
"learning_rate": 0.00019398119510863197,
"loss": 2.2965,
"step": 117
},
{
"epoch": 0.10505230358335188,
"grad_norm": 2.4580020904541016,
"learning_rate": 0.00019387338576538744,
"loss": 3.0578,
"step": 118
},
{
"epoch": 0.10594257734253283,
"grad_norm": 1.7867645025253296,
"learning_rate": 0.00019376464992972356,
"loss": 1.6043,
"step": 119
},
{
"epoch": 0.10683285110171378,
"grad_norm": 2.074819326400757,
"learning_rate": 0.00019365498867481923,
"loss": 1.9777,
"step": 120
},
{
"epoch": 0.10772312486089472,
"grad_norm": 1.3542097806930542,
"learning_rate": 0.00019354440308298675,
"loss": 1.4176,
"step": 121
},
{
"epoch": 0.10861339862007567,
"grad_norm": 1.3362765312194824,
"learning_rate": 0.00019343289424566122,
"loss": 1.2639,
"step": 122
},
{
"epoch": 0.10950367237925662,
"grad_norm": 2.469452142715454,
"learning_rate": 0.00019332046326338986,
"loss": 2.6764,
"step": 123
},
{
"epoch": 0.11039394613843757,
"grad_norm": 2.0411577224731445,
"learning_rate": 0.0001932071112458211,
"loss": 2.5306,
"step": 124
},
{
"epoch": 0.11128421989761851,
"grad_norm": 1.3371831178665161,
"learning_rate": 0.00019309283931169356,
"loss": 1.647,
"step": 125
},
{
"epoch": 0.11217449365679946,
"grad_norm": 2.3767588138580322,
"learning_rate": 0.00019297764858882514,
"loss": 2.1805,
"step": 126
},
{
"epoch": 0.11306476741598041,
"grad_norm": 1.9597545862197876,
"learning_rate": 0.00019286154021410173,
"loss": 2.5409,
"step": 127
},
{
"epoch": 0.11395504117516136,
"grad_norm": 2.3952715396881104,
"learning_rate": 0.00019274451533346615,
"loss": 2.2106,
"step": 128
},
{
"epoch": 0.11484531493434232,
"grad_norm": 2.271564245223999,
"learning_rate": 0.00019262657510190666,
"loss": 1.4799,
"step": 129
},
{
"epoch": 0.11573558869352325,
"grad_norm": 2.6225390434265137,
"learning_rate": 0.0001925077206834458,
"loss": 2.0251,
"step": 130
},
{
"epoch": 0.1166258624527042,
"grad_norm": 1.9409428834915161,
"learning_rate": 0.0001923879532511287,
"loss": 2.1348,
"step": 131
},
{
"epoch": 0.11751613621188516,
"grad_norm": 3.0642130374908447,
"learning_rate": 0.0001922672739870115,
"loss": 2.6375,
"step": 132
},
{
"epoch": 0.1184064099710661,
"grad_norm": 2.3774688243865967,
"learning_rate": 0.00019214568408214985,
"loss": 2.6276,
"step": 133
},
{
"epoch": 0.11929668373024706,
"grad_norm": 1.9567856788635254,
"learning_rate": 0.00019202318473658705,
"loss": 2.6106,
"step": 134
},
{
"epoch": 0.120186957489428,
"grad_norm": 2.2974131107330322,
"learning_rate": 0.00019189977715934213,
"loss": 2.4285,
"step": 135
},
{
"epoch": 0.12107723124860895,
"grad_norm": 3.681556463241577,
"learning_rate": 0.00019177546256839812,
"loss": 2.104,
"step": 136
},
{
"epoch": 0.1219675050077899,
"grad_norm": 6.61801290512085,
"learning_rate": 0.0001916502421906898,
"loss": 1.8274,
"step": 137
},
{
"epoch": 0.12285777876697085,
"grad_norm": 2.095499277114868,
"learning_rate": 0.00019152411726209176,
"loss": 1.7257,
"step": 138
},
{
"epoch": 0.12374805252615179,
"grad_norm": 1.8911921977996826,
"learning_rate": 0.00019139708902740613,
"loss": 1.907,
"step": 139
},
{
"epoch": 0.12463832628533274,
"grad_norm": 1.6587978601455688,
"learning_rate": 0.0001912691587403503,
"loss": 1.6967,
"step": 140
},
{
"epoch": 0.1255286000445137,
"grad_norm": 2.2344250679016113,
"learning_rate": 0.00019114032766354453,
"loss": 1.841,
"step": 141
},
{
"epoch": 0.12641887380369463,
"grad_norm": 2.4435856342315674,
"learning_rate": 0.00019101059706849957,
"loss": 2.4631,
"step": 142
},
{
"epoch": 0.1273091475628756,
"grad_norm": 2.551354169845581,
"learning_rate": 0.00019087996823560402,
"loss": 2.413,
"step": 143
},
{
"epoch": 0.12819942132205653,
"grad_norm": 2.87556791305542,
"learning_rate": 0.0001907484424541117,
"loss": 1.9834,
"step": 144
},
{
"epoch": 0.1290896950812375,
"grad_norm": 2.5196003913879395,
"learning_rate": 0.00019061602102212898,
"loss": 2.9326,
"step": 145
},
{
"epoch": 0.12997996884041843,
"grad_norm": 2.4851393699645996,
"learning_rate": 0.00019048270524660196,
"loss": 1.8773,
"step": 146
},
{
"epoch": 0.13087024259959937,
"grad_norm": 2.0737171173095703,
"learning_rate": 0.0001903484964433035,
"loss": 1.9065,
"step": 147
},
{
"epoch": 0.13176051635878033,
"grad_norm": 2.818920612335205,
"learning_rate": 0.00019021339593682028,
"loss": 2.1904,
"step": 148
},
{
"epoch": 0.13265079011796127,
"grad_norm": 1.77272629737854,
"learning_rate": 0.00019007740506053983,
"loss": 1.9956,
"step": 149
},
{
"epoch": 0.1335410638771422,
"grad_norm": 1.917798638343811,
"learning_rate": 0.0001899405251566371,
"loss": 1.9516,
"step": 150
},
{
"epoch": 0.13443133763632317,
"grad_norm": 1.5387191772460938,
"learning_rate": 0.00018980275757606157,
"loss": 1.2293,
"step": 151
},
{
"epoch": 0.1353216113955041,
"grad_norm": 2.3466856479644775,
"learning_rate": 0.00018966410367852362,
"loss": 2.0018,
"step": 152
},
{
"epoch": 0.13621188515468508,
"grad_norm": 2.354393243789673,
"learning_rate": 0.00018952456483248119,
"loss": 1.9607,
"step": 153
},
{
"epoch": 0.137102158913866,
"grad_norm": 3.2391810417175293,
"learning_rate": 0.0001893841424151264,
"loss": 1.6369,
"step": 154
},
{
"epoch": 0.13799243267304695,
"grad_norm": 1.563175082206726,
"learning_rate": 0.0001892428378123718,
"loss": 1.7831,
"step": 155
},
{
"epoch": 0.13888270643222791,
"grad_norm": 2.1940622329711914,
"learning_rate": 0.0001891006524188368,
"loss": 2.1351,
"step": 156
},
{
"epoch": 0.13977298019140885,
"grad_norm": 2.7515769004821777,
"learning_rate": 0.00018895758763783383,
"loss": 2.5552,
"step": 157
},
{
"epoch": 0.14066325395058982,
"grad_norm": 2.5042002201080322,
"learning_rate": 0.00018881364488135448,
"loss": 1.6571,
"step": 158
},
{
"epoch": 0.14155352770977075,
"grad_norm": 1.2943757772445679,
"learning_rate": 0.00018866882557005567,
"loss": 1.3131,
"step": 159
},
{
"epoch": 0.1424438014689517,
"grad_norm": 1.860222339630127,
"learning_rate": 0.00018852313113324552,
"loss": 2.0416,
"step": 160
},
{
"epoch": 0.14333407522813266,
"grad_norm": 1.4477007389068604,
"learning_rate": 0.00018837656300886937,
"loss": 1.4943,
"step": 161
},
{
"epoch": 0.1442243489873136,
"grad_norm": 2.673553705215454,
"learning_rate": 0.00018822912264349534,
"loss": 2.4297,
"step": 162
},
{
"epoch": 0.14511462274649456,
"grad_norm": 2.126965284347534,
"learning_rate": 0.00018808081149230036,
"loss": 1.9287,
"step": 163
},
{
"epoch": 0.1460048965056755,
"grad_norm": 1.6320929527282715,
"learning_rate": 0.00018793163101905563,
"loss": 1.8308,
"step": 164
},
{
"epoch": 0.14689517026485643,
"grad_norm": 2.4692060947418213,
"learning_rate": 0.00018778158269611218,
"loss": 2.6697,
"step": 165
},
{
"epoch": 0.1477854440240374,
"grad_norm": 1.8720828294754028,
"learning_rate": 0.00018763066800438636,
"loss": 2.1752,
"step": 166
},
{
"epoch": 0.14867571778321834,
"grad_norm": 1.8736835718154907,
"learning_rate": 0.0001874788884333453,
"loss": 1.821,
"step": 167
},
{
"epoch": 0.1495659915423993,
"grad_norm": 1.8159123659133911,
"learning_rate": 0.00018732624548099204,
"loss": 1.7778,
"step": 168
},
{
"epoch": 0.15045626530158024,
"grad_norm": 3.0733249187469482,
"learning_rate": 0.0001871727406538509,
"loss": 2.8908,
"step": 169
},
{
"epoch": 0.15134653906076118,
"grad_norm": 1.6238402128219604,
"learning_rate": 0.0001870183754669526,
"loss": 1.6326,
"step": 170
},
{
"epoch": 0.15223681281994214,
"grad_norm": 2.544752597808838,
"learning_rate": 0.00018686315144381913,
"loss": 2.0834,
"step": 171
},
{
"epoch": 0.15312708657912308,
"grad_norm": 2.253525972366333,
"learning_rate": 0.000186707070116449,
"loss": 2.3574,
"step": 172
},
{
"epoch": 0.15401736033830402,
"grad_norm": 2.5005857944488525,
"learning_rate": 0.0001865501330253019,
"loss": 2.3706,
"step": 173
},
{
"epoch": 0.15490763409748498,
"grad_norm": 2.5505003929138184,
"learning_rate": 0.00018639234171928353,
"loss": 2.6693,
"step": 174
},
{
"epoch": 0.15579790785666592,
"grad_norm": 2.0035719871520996,
"learning_rate": 0.0001862336977557304,
"loss": 1.8442,
"step": 175
},
{
"epoch": 0.15668818161584688,
"grad_norm": 1.8089210987091064,
"learning_rate": 0.0001860742027003944,
"loss": 2.1121,
"step": 176
},
{
"epoch": 0.15757845537502782,
"grad_norm": 1.3150620460510254,
"learning_rate": 0.00018591385812742725,
"loss": 1.6413,
"step": 177
},
{
"epoch": 0.15846872913420876,
"grad_norm": 2.3389761447906494,
"learning_rate": 0.00018575266561936523,
"loss": 2.561,
"step": 178
},
{
"epoch": 0.15935900289338972,
"grad_norm": 2.1656057834625244,
"learning_rate": 0.00018559062676711332,
"loss": 2.0227,
"step": 179
},
{
"epoch": 0.16024927665257066,
"grad_norm": 1.8123457431793213,
"learning_rate": 0.0001854277431699295,
"loss": 1.8462,
"step": 180
},
{
"epoch": 0.16113955041175163,
"grad_norm": 2.1559298038482666,
"learning_rate": 0.00018526401643540922,
"loss": 2.5308,
"step": 181
},
{
"epoch": 0.16202982417093256,
"grad_norm": 1.6904340982437134,
"learning_rate": 0.00018509944817946922,
"loss": 1.6456,
"step": 182
},
{
"epoch": 0.1629200979301135,
"grad_norm": 1.3393988609313965,
"learning_rate": 0.00018493404002633166,
"loss": 1.4475,
"step": 183
},
{
"epoch": 0.16381037168929447,
"grad_norm": 2.157855749130249,
"learning_rate": 0.00018476779360850832,
"loss": 2.446,
"step": 184
},
{
"epoch": 0.1647006454484754,
"grad_norm": 2.064180612564087,
"learning_rate": 0.00018460071056678422,
"loss": 1.7074,
"step": 185
},
{
"epoch": 0.16559091920765637,
"grad_norm": 1.8045761585235596,
"learning_rate": 0.00018443279255020152,
"loss": 2.1376,
"step": 186
},
{
"epoch": 0.1664811929668373,
"grad_norm": 1.4348753690719604,
"learning_rate": 0.00018426404121604323,
"loss": 1.889,
"step": 187
},
{
"epoch": 0.16737146672601824,
"grad_norm": 2.1404173374176025,
"learning_rate": 0.00018409445822981693,
"loss": 2.1494,
"step": 188
},
{
"epoch": 0.1682617404851992,
"grad_norm": 2.041149139404297,
"learning_rate": 0.00018392404526523817,
"loss": 2.061,
"step": 189
},
{
"epoch": 0.16915201424438014,
"grad_norm": 1.5126264095306396,
"learning_rate": 0.0001837528040042142,
"loss": 2.1654,
"step": 190
},
{
"epoch": 0.17004228800356108,
"grad_norm": 2.7885916233062744,
"learning_rate": 0.00018358073613682706,
"loss": 2.3817,
"step": 191
},
{
"epoch": 0.17093256176274205,
"grad_norm": 1.7389510869979858,
"learning_rate": 0.00018340784336131713,
"loss": 1.721,
"step": 192
},
{
"epoch": 0.17182283552192298,
"grad_norm": 1.8442341089248657,
"learning_rate": 0.00018323412738406635,
"loss": 1.8906,
"step": 193
},
{
"epoch": 0.17271310928110395,
"grad_norm": 1.5204037427902222,
"learning_rate": 0.00018305958991958127,
"loss": 1.4487,
"step": 194
},
{
"epoch": 0.1736033830402849,
"grad_norm": 1.7395809888839722,
"learning_rate": 0.0001828842326904762,
"loss": 1.8568,
"step": 195
},
{
"epoch": 0.17449365679946582,
"grad_norm": 2.0284206867218018,
"learning_rate": 0.00018270805742745617,
"loss": 2.1218,
"step": 196
},
{
"epoch": 0.1753839305586468,
"grad_norm": 1.6896398067474365,
"learning_rate": 0.00018253106586929997,
"loss": 1.6777,
"step": 197
},
{
"epoch": 0.17627420431782773,
"grad_norm": 2.2234766483306885,
"learning_rate": 0.00018235325976284275,
"loss": 1.9342,
"step": 198
},
{
"epoch": 0.1771644780770087,
"grad_norm": 1.6226762533187866,
"learning_rate": 0.00018217464086295904,
"loss": 1.4723,
"step": 199
},
{
"epoch": 0.17805475183618963,
"grad_norm": 1.9177464246749878,
"learning_rate": 0.00018199521093254523,
"loss": 1.4232,
"step": 200
},
{
"epoch": 0.17894502559537057,
"grad_norm": 2.020766019821167,
"learning_rate": 0.00018181497174250236,
"loss": 1.5295,
"step": 201
},
{
"epoch": 0.17983529935455153,
"grad_norm": 2.385566473007202,
"learning_rate": 0.00018163392507171842,
"loss": 2.0807,
"step": 202
},
{
"epoch": 0.18072557311373247,
"grad_norm": 1.6531530618667603,
"learning_rate": 0.00018145207270705096,
"loss": 1.8632,
"step": 203
},
{
"epoch": 0.18161584687291343,
"grad_norm": 1.9366607666015625,
"learning_rate": 0.0001812694164433094,
"loss": 1.8869,
"step": 204
},
{
"epoch": 0.18250612063209437,
"grad_norm": 2.225327253341675,
"learning_rate": 0.00018108595808323736,
"loss": 2.2026,
"step": 205
},
{
"epoch": 0.1833963943912753,
"grad_norm": 1.5953576564788818,
"learning_rate": 0.00018090169943749476,
"loss": 1.6126,
"step": 206
},
{
"epoch": 0.18428666815045627,
"grad_norm": 1.2869915962219238,
"learning_rate": 0.00018071664232464002,
"loss": 1.2606,
"step": 207
},
{
"epoch": 0.1851769419096372,
"grad_norm": 1.9665155410766602,
"learning_rate": 0.0001805307885711122,
"loss": 1.5137,
"step": 208
},
{
"epoch": 0.18606721566881815,
"grad_norm": 1.9578527212142944,
"learning_rate": 0.00018034414001121278,
"loss": 1.8762,
"step": 209
},
{
"epoch": 0.1869574894279991,
"grad_norm": 2.5774004459381104,
"learning_rate": 0.00018015669848708767,
"loss": 2.44,
"step": 210
},
{
"epoch": 0.18784776318718005,
"grad_norm": 1.7559341192245483,
"learning_rate": 0.00017996846584870908,
"loss": 1.869,
"step": 211
},
{
"epoch": 0.18873803694636102,
"grad_norm": 1.6659877300262451,
"learning_rate": 0.0001797794439538571,
"loss": 1.7385,
"step": 212
},
{
"epoch": 0.18962831070554195,
"grad_norm": 1.969826102256775,
"learning_rate": 0.0001795896346681016,
"loss": 1.9256,
"step": 213
},
{
"epoch": 0.1905185844647229,
"grad_norm": 2.6088500022888184,
"learning_rate": 0.00017939903986478355,
"loss": 2.6625,
"step": 214
},
{
"epoch": 0.19140885822390385,
"grad_norm": 1.7767618894577026,
"learning_rate": 0.00017920766142499672,
"loss": 1.4711,
"step": 215
},
{
"epoch": 0.1922991319830848,
"grad_norm": 2.5906081199645996,
"learning_rate": 0.00017901550123756906,
"loss": 2.676,
"step": 216
},
{
"epoch": 0.19318940574226576,
"grad_norm": 1.8418434858322144,
"learning_rate": 0.00017882256119904403,
"loss": 1.8141,
"step": 217
},
{
"epoch": 0.1940796795014467,
"grad_norm": 2.3214592933654785,
"learning_rate": 0.00017862884321366188,
"loss": 1.6772,
"step": 218
},
{
"epoch": 0.19496995326062763,
"grad_norm": 2.0468828678131104,
"learning_rate": 0.000178434349193341,
"loss": 2.4774,
"step": 219
},
{
"epoch": 0.1958602270198086,
"grad_norm": 2.031306743621826,
"learning_rate": 0.0001782390810576588,
"loss": 1.6958,
"step": 220
},
{
"epoch": 0.19675050077898953,
"grad_norm": 2.217170476913452,
"learning_rate": 0.000178043040733833,
"loss": 1.8302,
"step": 221
},
{
"epoch": 0.1976407745381705,
"grad_norm": 2.470906972885132,
"learning_rate": 0.00017784623015670238,
"loss": 2.2488,
"step": 222
},
{
"epoch": 0.19853104829735144,
"grad_norm": 2.1658363342285156,
"learning_rate": 0.00017764865126870786,
"loss": 1.5873,
"step": 223
},
{
"epoch": 0.19942132205653237,
"grad_norm": 2.0716309547424316,
"learning_rate": 0.00017745030601987337,
"loss": 1.815,
"step": 224
},
{
"epoch": 0.20031159581571334,
"grad_norm": 1.9759398698806763,
"learning_rate": 0.00017725119636778644,
"loss": 2.1319,
"step": 225
},
{
"epoch": 0.20120186957489428,
"grad_norm": 1.9106535911560059,
"learning_rate": 0.00017705132427757895,
"loss": 1.5344,
"step": 226
},
{
"epoch": 0.20209214333407524,
"grad_norm": 1.441969633102417,
"learning_rate": 0.00017685069172190766,
"loss": 1.6145,
"step": 227
},
{
"epoch": 0.20298241709325618,
"grad_norm": 2.0080626010894775,
"learning_rate": 0.00017664930068093498,
"loss": 2.3943,
"step": 228
},
{
"epoch": 0.20387269085243712,
"grad_norm": 1.463238000869751,
"learning_rate": 0.00017644715314230918,
"loss": 1.6539,
"step": 229
},
{
"epoch": 0.20476296461161808,
"grad_norm": 1.6935185194015503,
"learning_rate": 0.0001762442511011448,
"loss": 1.893,
"step": 230
},
{
"epoch": 0.20565323837079902,
"grad_norm": 2.5301573276519775,
"learning_rate": 0.0001760405965600031,
"loss": 2.2857,
"step": 231
},
{
"epoch": 0.20654351212997996,
"grad_norm": 1.8854233026504517,
"learning_rate": 0.0001758361915288722,
"loss": 2.3479,
"step": 232
},
{
"epoch": 0.20743378588916092,
"grad_norm": 2.3383989334106445,
"learning_rate": 0.0001756310380251472,
"loss": 2.4742,
"step": 233
},
{
"epoch": 0.20832405964834186,
"grad_norm": 1.7122273445129395,
"learning_rate": 0.00017542513807361037,
"loss": 1.4847,
"step": 234
},
{
"epoch": 0.20921433340752282,
"grad_norm": 1.4948415756225586,
"learning_rate": 0.00017521849370641114,
"loss": 2.002,
"step": 235
},
{
"epoch": 0.21010460716670376,
"grad_norm": 1.8573172092437744,
"learning_rate": 0.00017501110696304596,
"loss": 1.5925,
"step": 236
},
{
"epoch": 0.2109948809258847,
"grad_norm": 2.2749545574188232,
"learning_rate": 0.00017480297989033825,
"loss": 1.9138,
"step": 237
},
{
"epoch": 0.21188515468506566,
"grad_norm": 1.9408509731292725,
"learning_rate": 0.00017459411454241822,
"loss": 1.8457,
"step": 238
},
{
"epoch": 0.2127754284442466,
"grad_norm": 1.870306134223938,
"learning_rate": 0.00017438451298070252,
"loss": 1.6819,
"step": 239
},
{
"epoch": 0.21366570220342757,
"grad_norm": 1.6488808393478394,
"learning_rate": 0.00017417417727387394,
"loss": 1.7439,
"step": 240
},
{
"epoch": 0.2145559759626085,
"grad_norm": 1.684953212738037,
"learning_rate": 0.000173963109497861,
"loss": 1.7115,
"step": 241
},
{
"epoch": 0.21544624972178944,
"grad_norm": 1.9300771951675415,
"learning_rate": 0.0001737513117358174,
"loss": 1.7342,
"step": 242
},
{
"epoch": 0.2163365234809704,
"grad_norm": 2.416626214981079,
"learning_rate": 0.0001735387860781016,
"loss": 2.1706,
"step": 243
},
{
"epoch": 0.21722679724015134,
"grad_norm": 3.1392476558685303,
"learning_rate": 0.00017332553462225602,
"loss": 1.9876,
"step": 244
},
{
"epoch": 0.2181170709993323,
"grad_norm": 3.1887779235839844,
"learning_rate": 0.00017311155947298643,
"loss": 1.8074,
"step": 245
},
{
"epoch": 0.21900734475851324,
"grad_norm": 2.362192153930664,
"learning_rate": 0.00017289686274214118,
"loss": 2.3624,
"step": 246
},
{
"epoch": 0.21989761851769418,
"grad_norm": 2.7475128173828125,
"learning_rate": 0.0001726814465486903,
"loss": 2.0255,
"step": 247
},
{
"epoch": 0.22078789227687515,
"grad_norm": 2.4550669193267822,
"learning_rate": 0.0001724653130187047,
"loss": 1.9455,
"step": 248
},
{
"epoch": 0.22167816603605608,
"grad_norm": 3.648386001586914,
"learning_rate": 0.00017224846428533499,
"loss": 2.045,
"step": 249
},
{
"epoch": 0.22256843979523702,
"grad_norm": 2.9079108238220215,
"learning_rate": 0.0001720309024887907,
"loss": 1.7025,
"step": 250
},
{
"epoch": 0.223458713554418,
"grad_norm": 2.9583306312561035,
"learning_rate": 0.00017181262977631888,
"loss": 1.6872,
"step": 251
},
{
"epoch": 0.22434898731359892,
"grad_norm": 2.069707155227661,
"learning_rate": 0.00017159364830218312,
"loss": 2.0011,
"step": 252
},
{
"epoch": 0.2252392610727799,
"grad_norm": 2.955867290496826,
"learning_rate": 0.00017137396022764214,
"loss": 1.8701,
"step": 253
},
{
"epoch": 0.22612953483196083,
"grad_norm": 2.031825065612793,
"learning_rate": 0.00017115356772092857,
"loss": 1.8168,
"step": 254
},
{
"epoch": 0.22701980859114176,
"grad_norm": 2.6691067218780518,
"learning_rate": 0.0001709324729572274,
"loss": 2.2007,
"step": 255
},
{
"epoch": 0.22791008235032273,
"grad_norm": 2.2941246032714844,
"learning_rate": 0.00017071067811865476,
"loss": 2.6734,
"step": 256
},
{
"epoch": 0.22880035610950367,
"grad_norm": 4.628593444824219,
"learning_rate": 0.00017048818539423615,
"loss": 2.1296,
"step": 257
},
{
"epoch": 0.22969062986868463,
"grad_norm": 1.7624280452728271,
"learning_rate": 0.00017026499697988493,
"loss": 2.0804,
"step": 258
},
{
"epoch": 0.23058090362786557,
"grad_norm": 4.2342705726623535,
"learning_rate": 0.00017004111507838064,
"loss": 1.8746,
"step": 259
},
{
"epoch": 0.2314711773870465,
"grad_norm": 5.327749252319336,
"learning_rate": 0.00016981654189934727,
"loss": 2.1781,
"step": 260
},
{
"epoch": 0.23236145114622747,
"grad_norm": 2.6609578132629395,
"learning_rate": 0.00016959127965923142,
"loss": 1.7924,
"step": 261
},
{
"epoch": 0.2332517249054084,
"grad_norm": 1.2489802837371826,
"learning_rate": 0.0001693653305812805,
"loss": 1.4193,
"step": 262
},
{
"epoch": 0.23414199866458937,
"grad_norm": 2.315696954727173,
"learning_rate": 0.00016913869689552064,
"loss": 2.1331,
"step": 263
},
{
"epoch": 0.2350322724237703,
"grad_norm": 2.0057482719421387,
"learning_rate": 0.00016891138083873487,
"loss": 1.317,
"step": 264
},
{
"epoch": 0.23592254618295125,
"grad_norm": 1.934035301208496,
"learning_rate": 0.00016868338465444085,
"loss": 2.0334,
"step": 265
},
{
"epoch": 0.2368128199421322,
"grad_norm": 1.3984287977218628,
"learning_rate": 0.00016845471059286887,
"loss": 1.3556,
"step": 266
},
{
"epoch": 0.23770309370131315,
"grad_norm": 3.7118077278137207,
"learning_rate": 0.00016822536091093965,
"loss": 2.4466,
"step": 267
},
{
"epoch": 0.23859336746049412,
"grad_norm": 3.144308090209961,
"learning_rate": 0.00016799533787224192,
"loss": 2.0156,
"step": 268
},
{
"epoch": 0.23948364121967505,
"grad_norm": 1.601457118988037,
"learning_rate": 0.00016776464374701025,
"loss": 1.6662,
"step": 269
},
{
"epoch": 0.240373914978856,
"grad_norm": 2.1832356452941895,
"learning_rate": 0.00016753328081210245,
"loss": 1.8462,
"step": 270
},
{
"epoch": 0.24126418873803696,
"grad_norm": 2.149434804916382,
"learning_rate": 0.00016730125135097735,
"loss": 2.2022,
"step": 271
},
{
"epoch": 0.2421544624972179,
"grad_norm": 2.7085752487182617,
"learning_rate": 0.000167068557653672,
"loss": 1.306,
"step": 272
},
{
"epoch": 0.24304473625639883,
"grad_norm": 2.049161434173584,
"learning_rate": 0.0001668352020167793,
"loss": 2.1773,
"step": 273
},
{
"epoch": 0.2439350100155798,
"grad_norm": 1.8990561962127686,
"learning_rate": 0.00016660118674342517,
"loss": 2.1278,
"step": 274
},
{
"epoch": 0.24482528377476073,
"grad_norm": 1.579715371131897,
"learning_rate": 0.00016636651414324587,
"loss": 1.5829,
"step": 275
},
{
"epoch": 0.2457155575339417,
"grad_norm": 2.2845969200134277,
"learning_rate": 0.00016613118653236518,
"loss": 1.7622,
"step": 276
},
{
"epoch": 0.24660583129312263,
"grad_norm": 1.9798493385314941,
"learning_rate": 0.0001658952062333717,
"loss": 2.2187,
"step": 277
},
{
"epoch": 0.24749610505230357,
"grad_norm": 2.739555835723877,
"learning_rate": 0.00016565857557529566,
"loss": 1.7902,
"step": 278
},
{
"epoch": 0.24838637881148454,
"grad_norm": 3.5172815322875977,
"learning_rate": 0.00016542129689358612,
"loss": 2.1033,
"step": 279
},
{
"epoch": 0.24927665257066547,
"grad_norm": 2.1256103515625,
"learning_rate": 0.0001651833725300879,
"loss": 2.1375,
"step": 280
},
{
"epoch": 0.25016692632984644,
"grad_norm": 1.4635076522827148,
"learning_rate": 0.00016494480483301836,
"loss": 1.6513,
"step": 281
},
{
"epoch": 0.2510572000890274,
"grad_norm": 2.5206878185272217,
"learning_rate": 0.00016470559615694446,
"loss": 1.5158,
"step": 282
},
{
"epoch": 0.2519474738482083,
"grad_norm": 1.6548582315444946,
"learning_rate": 0.00016446574886275913,
"loss": 2.1055,
"step": 283
},
{
"epoch": 0.25283774760738925,
"grad_norm": 2.238192558288574,
"learning_rate": 0.00016422526531765846,
"loss": 2.2413,
"step": 284
},
{
"epoch": 0.25372802136657024,
"grad_norm": 1.7222086191177368,
"learning_rate": 0.00016398414789511786,
"loss": 2.0776,
"step": 285
},
{
"epoch": 0.2546182951257512,
"grad_norm": 3.4774537086486816,
"learning_rate": 0.000163742398974869,
"loss": 2.3177,
"step": 286
},
{
"epoch": 0.2555085688849321,
"grad_norm": 1.5054153203964233,
"learning_rate": 0.00016350002094287609,
"loss": 1.397,
"step": 287
},
{
"epoch": 0.25639884264411306,
"grad_norm": 2.4916911125183105,
"learning_rate": 0.00016325701619131246,
"loss": 2.4185,
"step": 288
},
{
"epoch": 0.257289116403294,
"grad_norm": 1.975932240486145,
"learning_rate": 0.00016301338711853693,
"loss": 1.79,
"step": 289
},
{
"epoch": 0.258179390162475,
"grad_norm": 2.211235761642456,
"learning_rate": 0.00016276913612907007,
"loss": 2.0443,
"step": 290
},
{
"epoch": 0.2590696639216559,
"grad_norm": 2.4086029529571533,
"learning_rate": 0.00016252426563357055,
"loss": 1.9955,
"step": 291
},
{
"epoch": 0.25995993768083686,
"grad_norm": 1.6216076612472534,
"learning_rate": 0.00016227877804881127,
"loss": 1.4914,
"step": 292
},
{
"epoch": 0.2608502114400178,
"grad_norm": 1.9729924201965332,
"learning_rate": 0.00016203267579765563,
"loss": 2.484,
"step": 293
},
{
"epoch": 0.26174048519919874,
"grad_norm": 1.7074676752090454,
"learning_rate": 0.00016178596130903344,
"loss": 1.9975,
"step": 294
},
{
"epoch": 0.26263075895837973,
"grad_norm": 2.2856390476226807,
"learning_rate": 0.00016153863701791717,
"loss": 2.5118,
"step": 295
},
{
"epoch": 0.26352103271756067,
"grad_norm": 2.10971999168396,
"learning_rate": 0.00016129070536529766,
"loss": 1.9017,
"step": 296
},
{
"epoch": 0.2644113064767416,
"grad_norm": 1.7737220525741577,
"learning_rate": 0.00016104216879816026,
"loss": 1.5883,
"step": 297
},
{
"epoch": 0.26530158023592254,
"grad_norm": 2.5703797340393066,
"learning_rate": 0.00016079302976946055,
"loss": 2.4941,
"step": 298
},
{
"epoch": 0.2661918539951035,
"grad_norm": 2.0059597492218018,
"learning_rate": 0.00016054329073810015,
"loss": 2.5748,
"step": 299
},
{
"epoch": 0.2670821277542844,
"grad_norm": 1.8103333711624146,
"learning_rate": 0.00016029295416890248,
"loss": 1.7253,
"step": 300
},
{
"epoch": 0.2679724015134654,
"grad_norm": 1.659818410873413,
"learning_rate": 0.00016004202253258842,
"loss": 1.727,
"step": 301
},
{
"epoch": 0.26886267527264635,
"grad_norm": 1.6676276922225952,
"learning_rate": 0.0001597904983057519,
"loss": 1.8779,
"step": 302
},
{
"epoch": 0.2697529490318273,
"grad_norm": 2.9163947105407715,
"learning_rate": 0.00015953838397083552,
"loss": 1.7142,
"step": 303
},
{
"epoch": 0.2706432227910082,
"grad_norm": 1.6108711957931519,
"learning_rate": 0.00015928568201610595,
"loss": 1.7663,
"step": 304
},
{
"epoch": 0.27153349655018916,
"grad_norm": 1.8713996410369873,
"learning_rate": 0.00015903239493562948,
"loss": 1.8855,
"step": 305
},
{
"epoch": 0.27242377030937015,
"grad_norm": 1.5807499885559082,
"learning_rate": 0.00015877852522924732,
"loss": 2.17,
"step": 306
},
{
"epoch": 0.2733140440685511,
"grad_norm": 1.5979249477386475,
"learning_rate": 0.00015852407540255104,
"loss": 1.6459,
"step": 307
},
{
"epoch": 0.274204317827732,
"grad_norm": 2.3570306301116943,
"learning_rate": 0.00015826904796685762,
"loss": 2.2516,
"step": 308
},
{
"epoch": 0.27509459158691296,
"grad_norm": 2.0605814456939697,
"learning_rate": 0.00015801344543918495,
"loss": 1.4365,
"step": 309
},
{
"epoch": 0.2759848653460939,
"grad_norm": 2.146772861480713,
"learning_rate": 0.00015775727034222675,
"loss": 2.0198,
"step": 310
},
{
"epoch": 0.2768751391052749,
"grad_norm": 2.2532994747161865,
"learning_rate": 0.00015750052520432787,
"loss": 2.3515,
"step": 311
},
{
"epoch": 0.27776541286445583,
"grad_norm": 2.261044502258301,
"learning_rate": 0.0001572432125594591,
"loss": 1.9073,
"step": 312
},
{
"epoch": 0.27865568662363677,
"grad_norm": 1.7866798639297485,
"learning_rate": 0.00015698533494719238,
"loss": 1.4373,
"step": 313
},
{
"epoch": 0.2795459603828177,
"grad_norm": 1.5028505325317383,
"learning_rate": 0.00015672689491267567,
"loss": 1.7461,
"step": 314
},
{
"epoch": 0.28043623414199864,
"grad_norm": 2.3228518962860107,
"learning_rate": 0.00015646789500660773,
"loss": 2.063,
"step": 315
},
{
"epoch": 0.28132650790117963,
"grad_norm": 4.042271137237549,
"learning_rate": 0.00015620833778521307,
"loss": 1.6605,
"step": 316
},
{
"epoch": 0.28221678166036057,
"grad_norm": 2.26991868019104,
"learning_rate": 0.0001559482258102167,
"loss": 1.6581,
"step": 317
},
{
"epoch": 0.2831070554195415,
"grad_norm": 2.9728291034698486,
"learning_rate": 0.00015568756164881882,
"loss": 2.0517,
"step": 318
},
{
"epoch": 0.28399732917872245,
"grad_norm": 2.1857290267944336,
"learning_rate": 0.00015542634787366942,
"loss": 1.9245,
"step": 319
},
{
"epoch": 0.2848876029379034,
"grad_norm": 1.9066046476364136,
"learning_rate": 0.00015516458706284303,
"loss": 2.2897,
"step": 320
},
{
"epoch": 0.2857778766970844,
"grad_norm": 1.6206704378128052,
"learning_rate": 0.0001549022817998132,
"loss": 1.327,
"step": 321
},
{
"epoch": 0.2866681504562653,
"grad_norm": 2.0593979358673096,
"learning_rate": 0.00015463943467342693,
"loss": 1.8543,
"step": 322
},
{
"epoch": 0.28755842421544625,
"grad_norm": 1.762455940246582,
"learning_rate": 0.00015437604827787927,
"loss": 1.278,
"step": 323
},
{
"epoch": 0.2884486979746272,
"grad_norm": 2.4268031120300293,
"learning_rate": 0.00015411212521268758,
"loss": 2.4316,
"step": 324
},
{
"epoch": 0.2893389717338081,
"grad_norm": 1.6024727821350098,
"learning_rate": 0.00015384766808266602,
"loss": 1.9825,
"step": 325
},
{
"epoch": 0.2902292454929891,
"grad_norm": 2.4106545448303223,
"learning_rate": 0.00015358267949789966,
"loss": 2.4162,
"step": 326
},
{
"epoch": 0.29111951925217006,
"grad_norm": 1.7342814207077026,
"learning_rate": 0.00015331716207371888,
"loss": 2.1146,
"step": 327
},
{
"epoch": 0.292009793011351,
"grad_norm": 2.6861941814422607,
"learning_rate": 0.0001530511184306734,
"loss": 2.3119,
"step": 328
},
{
"epoch": 0.29290006677053193,
"grad_norm": 2.7844765186309814,
"learning_rate": 0.00015278455119450664,
"loss": 1.779,
"step": 329
},
{
"epoch": 0.29379034052971287,
"grad_norm": 2.8383467197418213,
"learning_rate": 0.0001525174629961296,
"loss": 2.0959,
"step": 330
},
{
"epoch": 0.29468061428889386,
"grad_norm": 1.988197922706604,
"learning_rate": 0.0001522498564715949,
"loss": 2.205,
"step": 331
},
{
"epoch": 0.2955708880480748,
"grad_norm": 2.0876247882843018,
"learning_rate": 0.00015198173426207094,
"loss": 1.6833,
"step": 332
},
{
"epoch": 0.29646116180725574,
"grad_norm": 1.5038071870803833,
"learning_rate": 0.00015171309901381572,
"loss": 1.5355,
"step": 333
},
{
"epoch": 0.2973514355664367,
"grad_norm": 1.8406671285629272,
"learning_rate": 0.00015144395337815064,
"loss": 1.6882,
"step": 334
},
{
"epoch": 0.2982417093256176,
"grad_norm": 1.7252678871154785,
"learning_rate": 0.00015117430001143452,
"loss": 1.8316,
"step": 335
},
{
"epoch": 0.2991319830847986,
"grad_norm": 2.3191328048706055,
"learning_rate": 0.00015090414157503714,
"loss": 1.7169,
"step": 336
},
{
"epoch": 0.30002225684397954,
"grad_norm": 1.5416702032089233,
"learning_rate": 0.00015063348073531324,
"loss": 1.7561,
"step": 337
},
{
"epoch": 0.3009125306031605,
"grad_norm": 2.3278818130493164,
"learning_rate": 0.0001503623201635761,
"loss": 2.284,
"step": 338
},
{
"epoch": 0.3018028043623414,
"grad_norm": 2.2224559783935547,
"learning_rate": 0.000150090662536071,
"loss": 2.1782,
"step": 339
},
{
"epoch": 0.30269307812152235,
"grad_norm": 2.172250986099243,
"learning_rate": 0.0001498185105339491,
"loss": 2.119,
"step": 340
},
{
"epoch": 0.3035833518807033,
"grad_norm": 1.8174207210540771,
"learning_rate": 0.00014954586684324078,
"loss": 1.6587,
"step": 341
},
{
"epoch": 0.3044736256398843,
"grad_norm": 1.9933828115463257,
"learning_rate": 0.00014927273415482915,
"loss": 1.8421,
"step": 342
},
{
"epoch": 0.3053638993990652,
"grad_norm": 1.7593411207199097,
"learning_rate": 0.00014899911516442365,
"loss": 2.4904,
"step": 343
},
{
"epoch": 0.30625417315824616,
"grad_norm": 2.25948429107666,
"learning_rate": 0.00014872501257253323,
"loss": 2.3422,
"step": 344
},
{
"epoch": 0.3071444469174271,
"grad_norm": 2.7912564277648926,
"learning_rate": 0.0001484504290844398,
"loss": 2.271,
"step": 345
},
{
"epoch": 0.30803472067660803,
"grad_norm": 1.8514056205749512,
"learning_rate": 0.00014817536741017152,
"loss": 2.3421,
"step": 346
},
{
"epoch": 0.308924994435789,
"grad_norm": 4.047842025756836,
"learning_rate": 0.00014789983026447612,
"loss": 1.4962,
"step": 347
},
{
"epoch": 0.30981526819496996,
"grad_norm": 2.038198471069336,
"learning_rate": 0.0001476238203667939,
"loss": 1.8679,
"step": 348
},
{
"epoch": 0.3107055419541509,
"grad_norm": 1.5603249073028564,
"learning_rate": 0.0001473473404412312,
"loss": 2.2399,
"step": 349
},
{
"epoch": 0.31159581571333184,
"grad_norm": 2.0062670707702637,
"learning_rate": 0.0001470703932165333,
"loss": 2.1111,
"step": 350
},
{
"epoch": 0.3124860894725128,
"grad_norm": 1.6404775381088257,
"learning_rate": 0.00014679298142605734,
"loss": 1.7317,
"step": 351
},
{
"epoch": 0.31337636323169377,
"grad_norm": 2.2055864334106445,
"learning_rate": 0.00014651510780774583,
"loss": 1.7962,
"step": 352
},
{
"epoch": 0.3142666369908747,
"grad_norm": 2.013460636138916,
"learning_rate": 0.00014623677510409918,
"loss": 2.1917,
"step": 353
},
{
"epoch": 0.31515691075005564,
"grad_norm": 1.7454619407653809,
"learning_rate": 0.00014595798606214882,
"loss": 1.8126,
"step": 354
},
{
"epoch": 0.3160471845092366,
"grad_norm": 2.3334221839904785,
"learning_rate": 0.00014567874343342997,
"loss": 1.5489,
"step": 355
},
{
"epoch": 0.3169374582684175,
"grad_norm": 2.6440775394439697,
"learning_rate": 0.00014539904997395468,
"loss": 2.3619,
"step": 356
},
{
"epoch": 0.3178277320275985,
"grad_norm": 2.4322640895843506,
"learning_rate": 0.00014511890844418453,
"loss": 2.1962,
"step": 357
},
{
"epoch": 0.31871800578677945,
"grad_norm": 2.2413289546966553,
"learning_rate": 0.00014483832160900326,
"loss": 1.9381,
"step": 358
},
{
"epoch": 0.3196082795459604,
"grad_norm": 2.011824369430542,
"learning_rate": 0.00014455729223768966,
"loss": 1.8238,
"step": 359
},
{
"epoch": 0.3204985533051413,
"grad_norm": 1.9649301767349243,
"learning_rate": 0.0001442758231038902,
"loss": 1.446,
"step": 360
},
{
"epoch": 0.32138882706432226,
"grad_norm": 2.035813093185425,
"learning_rate": 0.00014399391698559152,
"loss": 1.9748,
"step": 361
},
{
"epoch": 0.32227910082350325,
"grad_norm": 2.2650058269500732,
"learning_rate": 0.0001437115766650933,
"loss": 1.8778,
"step": 362
},
{
"epoch": 0.3231693745826842,
"grad_norm": 4.823574066162109,
"learning_rate": 0.00014342880492898048,
"loss": 1.3213,
"step": 363
},
{
"epoch": 0.3240596483418651,
"grad_norm": 1.6868720054626465,
"learning_rate": 0.0001431456045680959,
"loss": 1.8927,
"step": 364
},
{
"epoch": 0.32494992210104606,
"grad_norm": 2.2304537296295166,
"learning_rate": 0.00014286197837751286,
"loss": 2.1925,
"step": 365
},
{
"epoch": 0.325840195860227,
"grad_norm": 1.998257040977478,
"learning_rate": 0.00014257792915650728,
"loss": 1.9694,
"step": 366
},
{
"epoch": 0.326730469619408,
"grad_norm": 2.4832890033721924,
"learning_rate": 0.00014229345970853032,
"loss": 2.319,
"step": 367
},
{
"epoch": 0.32762074337858893,
"grad_norm": 1.5136913061141968,
"learning_rate": 0.00014200857284118066,
"loss": 1.3975,
"step": 368
},
{
"epoch": 0.32851101713776987,
"grad_norm": 1.8113616704940796,
"learning_rate": 0.00014172327136617656,
"loss": 1.6132,
"step": 369
},
{
"epoch": 0.3294012908969508,
"grad_norm": 3.2161953449249268,
"learning_rate": 0.00014143755809932845,
"loss": 1.7368,
"step": 370
},
{
"epoch": 0.33029156465613174,
"grad_norm": 2.979186534881592,
"learning_rate": 0.00014115143586051088,
"loss": 2.2553,
"step": 371
},
{
"epoch": 0.33118183841531273,
"grad_norm": 4.46711540222168,
"learning_rate": 0.00014086490747363493,
"loss": 1.6109,
"step": 372
},
{
"epoch": 0.33207211217449367,
"grad_norm": 1.9303728342056274,
"learning_rate": 0.00014057797576662,
"loss": 1.7995,
"step": 373
},
{
"epoch": 0.3329623859336746,
"grad_norm": 1.720313310623169,
"learning_rate": 0.00014029064357136628,
"loss": 1.5064,
"step": 374
},
{
"epoch": 0.33385265969285555,
"grad_norm": 1.9013950824737549,
"learning_rate": 0.00014000291372372647,
"loss": 1.8142,
"step": 375
},
{
"epoch": 0.3347429334520365,
"grad_norm": 1.8569226264953613,
"learning_rate": 0.00013971478906347806,
"loss": 1.8789,
"step": 376
},
{
"epoch": 0.3356332072112175,
"grad_norm": 2.768855333328247,
"learning_rate": 0.00013942627243429512,
"loss": 2.3762,
"step": 377
},
{
"epoch": 0.3365234809703984,
"grad_norm": 2.8080546855926514,
"learning_rate": 0.00013913736668372026,
"loss": 2.5786,
"step": 378
},
{
"epoch": 0.33741375472957935,
"grad_norm": 1.9482845067977905,
"learning_rate": 0.00013884807466313663,
"loss": 1.6996,
"step": 379
},
{
"epoch": 0.3383040284887603,
"grad_norm": 2.0869028568267822,
"learning_rate": 0.00013855839922773968,
"loss": 2.0259,
"step": 380
},
{
"epoch": 0.3391943022479412,
"grad_norm": 2.383391857147217,
"learning_rate": 0.000138268343236509,
"loss": 1.8103,
"step": 381
},
{
"epoch": 0.34008457600712216,
"grad_norm": 2.783890724182129,
"learning_rate": 0.00013797790955218014,
"loss": 1.7385,
"step": 382
},
{
"epoch": 0.34097484976630316,
"grad_norm": 1.6383908987045288,
"learning_rate": 0.00013768710104121627,
"loss": 1.5179,
"step": 383
},
{
"epoch": 0.3418651235254841,
"grad_norm": 2.065347194671631,
"learning_rate": 0.00013739592057378003,
"loss": 1.8636,
"step": 384
},
{
"epoch": 0.34275539728466503,
"grad_norm": 2.55138897895813,
"learning_rate": 0.0001371043710237051,
"loss": 2.1537,
"step": 385
},
{
"epoch": 0.34364567104384597,
"grad_norm": 2.131523609161377,
"learning_rate": 0.00013681245526846783,
"loss": 2.7836,
"step": 386
},
{
"epoch": 0.3445359448030269,
"grad_norm": 2.4860782623291016,
"learning_rate": 0.0001365201761891588,
"loss": 1.6104,
"step": 387
},
{
"epoch": 0.3454262185622079,
"grad_norm": 1.913320541381836,
"learning_rate": 0.00013622753667045457,
"loss": 1.6247,
"step": 388
},
{
"epoch": 0.34631649232138884,
"grad_norm": 3.11645245552063,
"learning_rate": 0.00013593453960058908,
"loss": 2.4805,
"step": 389
},
{
"epoch": 0.3472067660805698,
"grad_norm": 1.9025098085403442,
"learning_rate": 0.00013564118787132506,
"loss": 2.1268,
"step": 390
},
{
"epoch": 0.3480970398397507,
"grad_norm": 2.1510019302368164,
"learning_rate": 0.00013534748437792573,
"loss": 1.7887,
"step": 391
},
{
"epoch": 0.34898731359893165,
"grad_norm": 2.0571095943450928,
"learning_rate": 0.0001350534320191259,
"loss": 2.1384,
"step": 392
},
{
"epoch": 0.34987758735811264,
"grad_norm": 1.7569631338119507,
"learning_rate": 0.0001347590336971037,
"loss": 1.7031,
"step": 393
},
{
"epoch": 0.3507678611172936,
"grad_norm": 1.738153338432312,
"learning_rate": 0.0001344642923174517,
"loss": 1.9589,
"step": 394
},
{
"epoch": 0.3516581348764745,
"grad_norm": 2.1747841835021973,
"learning_rate": 0.00013416921078914835,
"loss": 2.0953,
"step": 395
},
{
"epoch": 0.35254840863565545,
"grad_norm": 1.6244169473648071,
"learning_rate": 0.00013387379202452917,
"loss": 1.2683,
"step": 396
},
{
"epoch": 0.3534386823948364,
"grad_norm": 1.4268521070480347,
"learning_rate": 0.00013357803893925807,
"loss": 1.7831,
"step": 397
},
{
"epoch": 0.3543289561540174,
"grad_norm": 2.6630098819732666,
"learning_rate": 0.00013328195445229868,
"loss": 2.2224,
"step": 398
},
{
"epoch": 0.3552192299131983,
"grad_norm": 1.8395928144454956,
"learning_rate": 0.00013298554148588528,
"loss": 1.7026,
"step": 399
},
{
"epoch": 0.35610950367237926,
"grad_norm": 1.6070834398269653,
"learning_rate": 0.00013268880296549425,
"loss": 1.6763,
"step": 400
},
{
"epoch": 0.3569997774315602,
"grad_norm": 2.3490657806396484,
"learning_rate": 0.00013239174181981495,
"loss": 1.6599,
"step": 401
},
{
"epoch": 0.35789005119074113,
"grad_norm": 3.1510345935821533,
"learning_rate": 0.00013209436098072095,
"loss": 1.247,
"step": 402
},
{
"epoch": 0.3587803249499221,
"grad_norm": 1.871864676475525,
"learning_rate": 0.00013179666338324108,
"loss": 2.056,
"step": 403
},
{
"epoch": 0.35967059870910306,
"grad_norm": 2.7115745544433594,
"learning_rate": 0.0001314986519655305,
"loss": 1.7978,
"step": 404
},
{
"epoch": 0.360560872468284,
"grad_norm": 1.72799551486969,
"learning_rate": 0.0001312003296688415,
"loss": 1.8869,
"step": 405
},
{
"epoch": 0.36145114622746494,
"grad_norm": 1.9090784788131714,
"learning_rate": 0.00013090169943749476,
"loss": 2.0729,
"step": 406
},
{
"epoch": 0.3623414199866459,
"grad_norm": 1.7954814434051514,
"learning_rate": 0.0001306027642188501,
"loss": 1.6842,
"step": 407
},
{
"epoch": 0.36323169374582687,
"grad_norm": 2.4236233234405518,
"learning_rate": 0.00013030352696327742,
"loss": 1.6774,
"step": 408
},
{
"epoch": 0.3641219675050078,
"grad_norm": 1.9163222312927246,
"learning_rate": 0.00013000399062412763,
"loss": 1.8087,
"step": 409
},
{
"epoch": 0.36501224126418874,
"grad_norm": 1.9570804834365845,
"learning_rate": 0.0001297041581577035,
"loss": 2.2163,
"step": 410
},
{
"epoch": 0.3659025150233697,
"grad_norm": 1.8605859279632568,
"learning_rate": 0.0001294040325232304,
"loss": 1.7177,
"step": 411
},
{
"epoch": 0.3667927887825506,
"grad_norm": 1.9943171739578247,
"learning_rate": 0.00012910361668282719,
"loss": 2.0689,
"step": 412
},
{
"epoch": 0.3676830625417316,
"grad_norm": 1.6847420930862427,
"learning_rate": 0.00012880291360147693,
"loss": 1.3597,
"step": 413
},
{
"epoch": 0.36857333630091255,
"grad_norm": 2.013718366622925,
"learning_rate": 0.0001285019262469976,
"loss": 2.3517,
"step": 414
},
{
"epoch": 0.3694636100600935,
"grad_norm": 1.6312776803970337,
"learning_rate": 0.00012820065759001293,
"loss": 1.4005,
"step": 415
},
{
"epoch": 0.3703538838192744,
"grad_norm": 2.1821882724761963,
"learning_rate": 0.00012789911060392294,
"loss": 1.9826,
"step": 416
},
{
"epoch": 0.37124415757845536,
"grad_norm": 2.313922166824341,
"learning_rate": 0.0001275972882648746,
"loss": 2.01,
"step": 417
},
{
"epoch": 0.3721344313376363,
"grad_norm": 3.776573419570923,
"learning_rate": 0.00012729519355173254,
"loss": 1.6818,
"step": 418
},
{
"epoch": 0.3730247050968173,
"grad_norm": 2.0204479694366455,
"learning_rate": 0.00012699282944604967,
"loss": 1.6056,
"step": 419
},
{
"epoch": 0.3739149788559982,
"grad_norm": 1.9438761472702026,
"learning_rate": 0.00012669019893203759,
"loss": 2.2081,
"step": 420
},
{
"epoch": 0.37480525261517916,
"grad_norm": 1.4874850511550903,
"learning_rate": 0.0001263873049965373,
"loss": 1.872,
"step": 421
},
{
"epoch": 0.3756955263743601,
"grad_norm": 2.692265272140503,
"learning_rate": 0.00012608415062898972,
"loss": 2.3535,
"step": 422
},
{
"epoch": 0.37658580013354104,
"grad_norm": 2.4645423889160156,
"learning_rate": 0.000125780738821406,
"loss": 2.0317,
"step": 423
},
{
"epoch": 0.37747607389272203,
"grad_norm": 1.6828348636627197,
"learning_rate": 0.00012547707256833823,
"loss": 1.5422,
"step": 424
},
{
"epoch": 0.37836634765190297,
"grad_norm": 2.2978315353393555,
"learning_rate": 0.00012517315486684972,
"loss": 1.5563,
"step": 425
},
{
"epoch": 0.3792566214110839,
"grad_norm": 2.2748594284057617,
"learning_rate": 0.0001248689887164855,
"loss": 2.0274,
"step": 426
},
{
"epoch": 0.38014689517026484,
"grad_norm": 1.9515248537063599,
"learning_rate": 0.00012456457711924266,
"loss": 1.569,
"step": 427
},
{
"epoch": 0.3810371689294458,
"grad_norm": 2.134955644607544,
"learning_rate": 0.00012425992307954075,
"loss": 2.2598,
"step": 428
},
{
"epoch": 0.3819274426886268,
"grad_norm": 4.138272285461426,
"learning_rate": 0.0001239550296041922,
"loss": 2.3726,
"step": 429
},
{
"epoch": 0.3828177164478077,
"grad_norm": 2.6821060180664062,
"learning_rate": 0.00012364989970237248,
"loss": 1.8071,
"step": 430
},
{
"epoch": 0.38370799020698865,
"grad_norm": 2.3056938648223877,
"learning_rate": 0.00012334453638559057,
"loss": 2.4385,
"step": 431
},
{
"epoch": 0.3845982639661696,
"grad_norm": 2.1225337982177734,
"learning_rate": 0.00012303894266765908,
"loss": 2.2096,
"step": 432
},
{
"epoch": 0.3854885377253505,
"grad_norm": 2.1250829696655273,
"learning_rate": 0.00012273312156466464,
"loss": 2.1836,
"step": 433
},
{
"epoch": 0.3863788114845315,
"grad_norm": 2.2559192180633545,
"learning_rate": 0.00012242707609493814,
"loss": 2.0049,
"step": 434
},
{
"epoch": 0.38726908524371245,
"grad_norm": 1.6558101177215576,
"learning_rate": 0.00012212080927902474,
"loss": 2.542,
"step": 435
},
{
"epoch": 0.3881593590028934,
"grad_norm": 2.9783380031585693,
"learning_rate": 0.00012181432413965428,
"loss": 2.2321,
"step": 436
},
{
"epoch": 0.3890496327620743,
"grad_norm": 2.465571165084839,
"learning_rate": 0.00012150762370171136,
"loss": 2.2325,
"step": 437
},
{
"epoch": 0.38993990652125526,
"grad_norm": 2.5568687915802,
"learning_rate": 0.00012120071099220549,
"loss": 1.9866,
"step": 438
},
{
"epoch": 0.39083018028043626,
"grad_norm": 1.9345122575759888,
"learning_rate": 0.00012089358904024117,
"loss": 1.977,
"step": 439
},
{
"epoch": 0.3917204540396172,
"grad_norm": 1.9170026779174805,
"learning_rate": 0.00012058626087698814,
"loss": 1.9797,
"step": 440
},
{
"epoch": 0.39261072779879813,
"grad_norm": 2.2700252532958984,
"learning_rate": 0.00012027872953565125,
"loss": 1.4902,
"step": 441
},
{
"epoch": 0.39350100155797907,
"grad_norm": 2.4803783893585205,
"learning_rate": 0.00011997099805144069,
"loss": 2.3046,
"step": 442
},
{
"epoch": 0.39439127531716,
"grad_norm": 1.6946451663970947,
"learning_rate": 0.000119663069461542,
"loss": 1.6621,
"step": 443
},
{
"epoch": 0.395281549076341,
"grad_norm": 1.673259973526001,
"learning_rate": 0.00011935494680508606,
"loss": 1.9749,
"step": 444
},
{
"epoch": 0.39617182283552194,
"grad_norm": 1.9574657678604126,
"learning_rate": 0.00011904663312311901,
"loss": 2.1803,
"step": 445
},
{
"epoch": 0.3970620965947029,
"grad_norm": 1.7250328063964844,
"learning_rate": 0.00011873813145857249,
"loss": 1.8238,
"step": 446
},
{
"epoch": 0.3979523703538838,
"grad_norm": 1.4955158233642578,
"learning_rate": 0.00011842944485623335,
"loss": 1.8026,
"step": 447
},
{
"epoch": 0.39884264411306475,
"grad_norm": 1.5842695236206055,
"learning_rate": 0.00011812057636271374,
"loss": 1.4951,
"step": 448
},
{
"epoch": 0.39973291787224574,
"grad_norm": 3.3510076999664307,
"learning_rate": 0.000117811529026421,
"loss": 2.1284,
"step": 449
},
{
"epoch": 0.4006231916314267,
"grad_norm": 1.7413403987884521,
"learning_rate": 0.00011750230589752762,
"loss": 2.1252,
"step": 450
},
{
"epoch": 0.4015134653906076,
"grad_norm": 1.716800332069397,
"learning_rate": 0.00011719291002794096,
"loss": 1.9248,
"step": 451
},
{
"epoch": 0.40240373914978855,
"grad_norm": 1.9150794744491577,
"learning_rate": 0.00011688334447127338,
"loss": 1.9823,
"step": 452
},
{
"epoch": 0.4032940129089695,
"grad_norm": 2.7563483715057373,
"learning_rate": 0.00011657361228281199,
"loss": 2.0374,
"step": 453
},
{
"epoch": 0.4041842866681505,
"grad_norm": 3.253457546234131,
"learning_rate": 0.00011626371651948838,
"loss": 2.2565,
"step": 454
},
{
"epoch": 0.4050745604273314,
"grad_norm": 1.7900830507278442,
"learning_rate": 0.00011595366023984864,
"loss": 1.7175,
"step": 455
},
{
"epoch": 0.40596483418651236,
"grad_norm": 1.7450424432754517,
"learning_rate": 0.0001156434465040231,
"loss": 1.8861,
"step": 456
},
{
"epoch": 0.4068551079456933,
"grad_norm": 3.2053165435791016,
"learning_rate": 0.00011533307837369607,
"loss": 1.8854,
"step": 457
},
{
"epoch": 0.40774538170487423,
"grad_norm": 4.137878894805908,
"learning_rate": 0.00011502255891207572,
"loss": 1.6806,
"step": 458
},
{
"epoch": 0.40863565546405517,
"grad_norm": 2.7607953548431396,
"learning_rate": 0.00011471189118386375,
"loss": 1.9632,
"step": 459
},
{
"epoch": 0.40952592922323616,
"grad_norm": 1.9842784404754639,
"learning_rate": 0.00011440107825522521,
"loss": 1.8746,
"step": 460
},
{
"epoch": 0.4104162029824171,
"grad_norm": 2.3230137825012207,
"learning_rate": 0.00011409012319375827,
"loss": 1.9887,
"step": 461
},
{
"epoch": 0.41130647674159804,
"grad_norm": 1.3049930334091187,
"learning_rate": 0.0001137790290684638,
"loss": 1.2191,
"step": 462
},
{
"epoch": 0.412196750500779,
"grad_norm": 2.8393821716308594,
"learning_rate": 0.00011346779894971527,
"loss": 2.3616,
"step": 463
},
{
"epoch": 0.4130870242599599,
"grad_norm": 1.8259540796279907,
"learning_rate": 0.00011315643590922827,
"loss": 1.755,
"step": 464
},
{
"epoch": 0.4139772980191409,
"grad_norm": 1.8513232469558716,
"learning_rate": 0.0001128449430200303,
"loss": 1.7911,
"step": 465
},
{
"epoch": 0.41486757177832184,
"grad_norm": 1.5856389999389648,
"learning_rate": 0.00011253332335643043,
"loss": 1.0047,
"step": 466
},
{
"epoch": 0.4157578455375028,
"grad_norm": 1.9679127931594849,
"learning_rate": 0.00011222157999398895,
"loss": 1.6357,
"step": 467
},
{
"epoch": 0.4166481192966837,
"grad_norm": 1.9347325563430786,
"learning_rate": 0.00011190971600948699,
"loss": 1.8921,
"step": 468
},
{
"epoch": 0.41753839305586465,
"grad_norm": 1.6752451658248901,
"learning_rate": 0.00011159773448089614,
"loss": 2.0727,
"step": 469
},
{
"epoch": 0.41842866681504565,
"grad_norm": 1.5543407201766968,
"learning_rate": 0.00011128563848734816,
"loss": 2.0131,
"step": 470
},
{
"epoch": 0.4193189405742266,
"grad_norm": 1.8420300483703613,
"learning_rate": 0.00011097343110910452,
"loss": 1.9657,
"step": 471
},
{
"epoch": 0.4202092143334075,
"grad_norm": 1.8071311712265015,
"learning_rate": 0.000110661115427526,
"loss": 1.5397,
"step": 472
},
{
"epoch": 0.42109948809258846,
"grad_norm": 2.43044114112854,
"learning_rate": 0.00011034869452504226,
"loss": 2.0463,
"step": 473
},
{
"epoch": 0.4219897618517694,
"grad_norm": 1.7965081930160522,
"learning_rate": 0.00011003617148512149,
"loss": 1.9827,
"step": 474
},
{
"epoch": 0.4228800356109504,
"grad_norm": 1.98358952999115,
"learning_rate": 0.00010972354939223996,
"loss": 1.989,
"step": 475
},
{
"epoch": 0.4237703093701313,
"grad_norm": 1.868479609489441,
"learning_rate": 0.00010941083133185146,
"loss": 2.0278,
"step": 476
},
{
"epoch": 0.42466058312931226,
"grad_norm": 1.813387393951416,
"learning_rate": 0.00010909802039035701,
"loss": 1.6557,
"step": 477
},
{
"epoch": 0.4255508568884932,
"grad_norm": 5.017477989196777,
"learning_rate": 0.00010878511965507434,
"loss": 2.2248,
"step": 478
},
{
"epoch": 0.42644113064767414,
"grad_norm": 1.644929051399231,
"learning_rate": 0.00010847213221420736,
"loss": 1.8849,
"step": 479
},
{
"epoch": 0.42733140440685513,
"grad_norm": 1.4300436973571777,
"learning_rate": 0.00010815906115681578,
"loss": 1.2436,
"step": 480
},
{
"epoch": 0.42822167816603607,
"grad_norm": 1.765423059463501,
"learning_rate": 0.0001078459095727845,
"loss": 1.8819,
"step": 481
},
{
"epoch": 0.429111951925217,
"grad_norm": 2.2176973819732666,
"learning_rate": 0.00010753268055279329,
"loss": 1.7296,
"step": 482
},
{
"epoch": 0.43000222568439794,
"grad_norm": 2.369621515274048,
"learning_rate": 0.0001072193771882861,
"loss": 2.3114,
"step": 483
},
{
"epoch": 0.4308924994435789,
"grad_norm": 2.4713504314422607,
"learning_rate": 0.00010690600257144061,
"loss": 2.204,
"step": 484
},
{
"epoch": 0.4317827732027599,
"grad_norm": 2.2376275062561035,
"learning_rate": 0.0001065925597951378,
"loss": 1.7342,
"step": 485
},
{
"epoch": 0.4326730469619408,
"grad_norm": 1.8253062963485718,
"learning_rate": 0.00010627905195293135,
"loss": 1.1762,
"step": 486
},
{
"epoch": 0.43356332072112175,
"grad_norm": 3.376502513885498,
"learning_rate": 0.00010596548213901708,
"loss": 2.3694,
"step": 487
},
{
"epoch": 0.4344535944803027,
"grad_norm": 1.9548606872558594,
"learning_rate": 0.00010565185344820247,
"loss": 2.0768,
"step": 488
},
{
"epoch": 0.4353438682394836,
"grad_norm": 1.488417148590088,
"learning_rate": 0.00010533816897587606,
"loss": 1.7886,
"step": 489
},
{
"epoch": 0.4362341419986646,
"grad_norm": 1.4256919622421265,
"learning_rate": 0.00010502443181797697,
"loss": 1.6104,
"step": 490
},
{
"epoch": 0.43712441575784555,
"grad_norm": 2.147977828979492,
"learning_rate": 0.00010471064507096426,
"loss": 1.8361,
"step": 491
},
{
"epoch": 0.4380146895170265,
"grad_norm": 1.7447888851165771,
"learning_rate": 0.0001043968118317865,
"loss": 1.9855,
"step": 492
},
{
"epoch": 0.4389049632762074,
"grad_norm": 1.9062583446502686,
"learning_rate": 0.00010408293519785101,
"loss": 2.5487,
"step": 493
},
{
"epoch": 0.43979523703538836,
"grad_norm": 2.2021713256835938,
"learning_rate": 0.00010376901826699348,
"loss": 2.1764,
"step": 494
},
{
"epoch": 0.44068551079456936,
"grad_norm": 1.2943754196166992,
"learning_rate": 0.00010345506413744726,
"loss": 1.6313,
"step": 495
},
{
"epoch": 0.4415757845537503,
"grad_norm": 1.3317290544509888,
"learning_rate": 0.00010314107590781284,
"loss": 1.5996,
"step": 496
},
{
"epoch": 0.44246605831293123,
"grad_norm": 2.0627801418304443,
"learning_rate": 0.00010282705667702734,
"loss": 1.8221,
"step": 497
},
{
"epoch": 0.44335633207211217,
"grad_norm": 2.628443717956543,
"learning_rate": 0.00010251300954433376,
"loss": 1.2766,
"step": 498
},
{
"epoch": 0.4442466058312931,
"grad_norm": 2.0734922885894775,
"learning_rate": 0.00010219893760925052,
"loss": 2.1783,
"step": 499
},
{
"epoch": 0.44513687959047404,
"grad_norm": 1.527775764465332,
"learning_rate": 0.00010188484397154084,
"loss": 1.733,
"step": 500
},
{
"epoch": 0.44602715334965504,
"grad_norm": 2.605490207672119,
"learning_rate": 0.00010157073173118208,
"loss": 2.1727,
"step": 501
},
{
"epoch": 0.446917427108836,
"grad_norm": 2.056980609893799,
"learning_rate": 0.00010125660398833528,
"loss": 2.4628,
"step": 502
},
{
"epoch": 0.4478077008680169,
"grad_norm": 3.1581008434295654,
"learning_rate": 0.00010094246384331442,
"loss": 2.2271,
"step": 503
},
{
"epoch": 0.44869797462719785,
"grad_norm": 1.937180519104004,
"learning_rate": 0.00010062831439655591,
"loss": 2.0092,
"step": 504
},
{
"epoch": 0.4495882483863788,
"grad_norm": 2.2551982402801514,
"learning_rate": 0.00010031415874858797,
"loss": 1.9285,
"step": 505
},
{
"epoch": 0.4504785221455598,
"grad_norm": 3.13543438911438,
"learning_rate": 0.0001,
"loss": 1.8066,
"step": 506
},
{
"epoch": 0.4513687959047407,
"grad_norm": 2.2084224224090576,
"learning_rate": 9.968584125141204e-05,
"loss": 2.1068,
"step": 507
},
{
"epoch": 0.45225906966392165,
"grad_norm": 2.102102518081665,
"learning_rate": 9.937168560344412e-05,
"loss": 1.8429,
"step": 508
},
{
"epoch": 0.4531493434231026,
"grad_norm": 1.972419023513794,
"learning_rate": 9.90575361566856e-05,
"loss": 1.8751,
"step": 509
},
{
"epoch": 0.45403961718228353,
"grad_norm": 1.8030009269714355,
"learning_rate": 9.874339601166473e-05,
"loss": 1.9289,
"step": 510
},
{
"epoch": 0.4549298909414645,
"grad_norm": 2.8846678733825684,
"learning_rate": 9.842926826881796e-05,
"loss": 2.224,
"step": 511
},
{
"epoch": 0.45582016470064546,
"grad_norm": 1.920257329940796,
"learning_rate": 9.81151560284592e-05,
"loss": 1.9377,
"step": 512
},
{
"epoch": 0.4567104384598264,
"grad_norm": 1.8159655332565308,
"learning_rate": 9.78010623907495e-05,
"loss": 1.6231,
"step": 513
},
{
"epoch": 0.45760071221900733,
"grad_norm": 2.1245462894439697,
"learning_rate": 9.748699045566626e-05,
"loss": 2.3452,
"step": 514
},
{
"epoch": 0.45849098597818827,
"grad_norm": 1.6165993213653564,
"learning_rate": 9.717294332297268e-05,
"loss": 1.4252,
"step": 515
},
{
"epoch": 0.45938125973736926,
"grad_norm": 1.8090025186538696,
"learning_rate": 9.685892409218717e-05,
"loss": 1.552,
"step": 516
},
{
"epoch": 0.4602715334965502,
"grad_norm": 1.4432064294815063,
"learning_rate": 9.654493586255278e-05,
"loss": 1.4914,
"step": 517
},
{
"epoch": 0.46116180725573114,
"grad_norm": 2.572908878326416,
"learning_rate": 9.623098173300654e-05,
"loss": 2.4062,
"step": 518
},
{
"epoch": 0.4620520810149121,
"grad_norm": 2.2798383235931396,
"learning_rate": 9.591706480214901e-05,
"loss": 1.9997,
"step": 519
},
{
"epoch": 0.462942354774093,
"grad_norm": 1.5268291234970093,
"learning_rate": 9.560318816821353e-05,
"loss": 1.6246,
"step": 520
},
{
"epoch": 0.463832628533274,
"grad_norm": 1.6568892002105713,
"learning_rate": 9.528935492903575e-05,
"loss": 1.4726,
"step": 521
},
{
"epoch": 0.46472290229245494,
"grad_norm": 1.6617227792739868,
"learning_rate": 9.497556818202306e-05,
"loss": 2.3641,
"step": 522
},
{
"epoch": 0.4656131760516359,
"grad_norm": 1.6580737829208374,
"learning_rate": 9.466183102412395e-05,
"loss": 1.4021,
"step": 523
},
{
"epoch": 0.4665034498108168,
"grad_norm": 2.1587393283843994,
"learning_rate": 9.434814655179755e-05,
"loss": 2.0219,
"step": 524
},
{
"epoch": 0.46739372356999775,
"grad_norm": 1.6377232074737549,
"learning_rate": 9.403451786098294e-05,
"loss": 1.7731,
"step": 525
},
{
"epoch": 0.46828399732917875,
"grad_norm": 1.7926323413848877,
"learning_rate": 9.372094804706867e-05,
"loss": 1.8779,
"step": 526
},
{
"epoch": 0.4691742710883597,
"grad_norm": 1.9899790287017822,
"learning_rate": 9.340744020486222e-05,
"loss": 1.7574,
"step": 527
},
{
"epoch": 0.4700645448475406,
"grad_norm": 3.482309341430664,
"learning_rate": 9.309399742855942e-05,
"loss": 2.1806,
"step": 528
},
{
"epoch": 0.47095481860672156,
"grad_norm": 1.9405080080032349,
"learning_rate": 9.278062281171393e-05,
"loss": 1.7881,
"step": 529
},
{
"epoch": 0.4718450923659025,
"grad_norm": 3.1491637229919434,
"learning_rate": 9.246731944720675e-05,
"loss": 2.3996,
"step": 530
},
{
"epoch": 0.4727353661250835,
"grad_norm": 2.0104711055755615,
"learning_rate": 9.215409042721552e-05,
"loss": 2.3646,
"step": 531
},
{
"epoch": 0.4736256398842644,
"grad_norm": 1.6170440912246704,
"learning_rate": 9.184093884318425e-05,
"loss": 1.6469,
"step": 532
},
{
"epoch": 0.47451591364344536,
"grad_norm": 2.090585947036743,
"learning_rate": 9.152786778579267e-05,
"loss": 1.8581,
"step": 533
},
{
"epoch": 0.4754061874026263,
"grad_norm": 2.355839490890503,
"learning_rate": 9.121488034492569e-05,
"loss": 1.8487,
"step": 534
},
{
"epoch": 0.47629646116180724,
"grad_norm": 2.266519546508789,
"learning_rate": 9.090197960964301e-05,
"loss": 2.0358,
"step": 535
},
{
"epoch": 0.47718673492098823,
"grad_norm": 2.3160934448242188,
"learning_rate": 9.058916866814858e-05,
"loss": 2.1925,
"step": 536
},
{
"epoch": 0.47807700868016917,
"grad_norm": 1.7758020162582397,
"learning_rate": 9.027645060776006e-05,
"loss": 2.1548,
"step": 537
},
{
"epoch": 0.4789672824393501,
"grad_norm": 2.3746981620788574,
"learning_rate": 8.99638285148785e-05,
"loss": 2.144,
"step": 538
},
{
"epoch": 0.47985755619853104,
"grad_norm": 2.0492055416107178,
"learning_rate": 8.965130547495776e-05,
"loss": 1.78,
"step": 539
},
{
"epoch": 0.480747829957712,
"grad_norm": 1.5570842027664185,
"learning_rate": 8.933888457247402e-05,
"loss": 1.9701,
"step": 540
},
{
"epoch": 0.4816381037168929,
"grad_norm": 1.4735817909240723,
"learning_rate": 8.902656889089548e-05,
"loss": 1.3506,
"step": 541
},
{
"epoch": 0.4825283774760739,
"grad_norm": 1.8302412033081055,
"learning_rate": 8.871436151265184e-05,
"loss": 1.6521,
"step": 542
},
{
"epoch": 0.48341865123525485,
"grad_norm": 1.834014654159546,
"learning_rate": 8.840226551910387e-05,
"loss": 1.7078,
"step": 543
},
{
"epoch": 0.4843089249944358,
"grad_norm": 1.8256818056106567,
"learning_rate": 8.809028399051302e-05,
"loss": 1.982,
"step": 544
},
{
"epoch": 0.4851991987536167,
"grad_norm": 3.1654930114746094,
"learning_rate": 8.777842000601105e-05,
"loss": 1.8923,
"step": 545
},
{
"epoch": 0.48608947251279766,
"grad_norm": 2.625009059906006,
"learning_rate": 8.746667664356956e-05,
"loss": 1.5361,
"step": 546
},
{
"epoch": 0.48697974627197865,
"grad_norm": 1.5053600072860718,
"learning_rate": 8.715505697996971e-05,
"loss": 1.7811,
"step": 547
},
{
"epoch": 0.4878700200311596,
"grad_norm": 1.6677234172821045,
"learning_rate": 8.684356409077176e-05,
"loss": 1.6811,
"step": 548
},
{
"epoch": 0.4887602937903405,
"grad_norm": 1.4328875541687012,
"learning_rate": 8.653220105028474e-05,
"loss": 1.4466,
"step": 549
},
{
"epoch": 0.48965056754952146,
"grad_norm": 6.340122222900391,
"learning_rate": 8.62209709315362e-05,
"loss": 2.122,
"step": 550
},
{
"epoch": 0.4905408413087024,
"grad_norm": 2.1618196964263916,
"learning_rate": 8.590987680624174e-05,
"loss": 1.6239,
"step": 551
},
{
"epoch": 0.4914311150678834,
"grad_norm": 1.8359202146530151,
"learning_rate": 8.559892174477479e-05,
"loss": 1.9783,
"step": 552
},
{
"epoch": 0.49232138882706433,
"grad_norm": 1.959260106086731,
"learning_rate": 8.528810881613626e-05,
"loss": 1.9082,
"step": 553
},
{
"epoch": 0.49321166258624527,
"grad_norm": 1.870314121246338,
"learning_rate": 8.497744108792429e-05,
"loss": 1.786,
"step": 554
},
{
"epoch": 0.4941019363454262,
"grad_norm": 2.359234571456909,
"learning_rate": 8.466692162630392e-05,
"loss": 2.0353,
"step": 555
},
{
"epoch": 0.49499221010460714,
"grad_norm": 1.3800303936004639,
"learning_rate": 8.435655349597689e-05,
"loss": 1.3332,
"step": 556
},
{
"epoch": 0.49588248386378814,
"grad_norm": 2.568152904510498,
"learning_rate": 8.404633976015134e-05,
"loss": 2.4055,
"step": 557
},
{
"epoch": 0.4967727576229691,
"grad_norm": 1.5831269025802612,
"learning_rate": 8.373628348051165e-05,
"loss": 1.443,
"step": 558
},
{
"epoch": 0.49766303138215,
"grad_norm": 1.4538564682006836,
"learning_rate": 8.342638771718802e-05,
"loss": 1.3316,
"step": 559
},
{
"epoch": 0.49855330514133095,
"grad_norm": 1.7724665403366089,
"learning_rate": 8.311665552872662e-05,
"loss": 1.5942,
"step": 560
},
{
"epoch": 0.4994435789005119,
"grad_norm": 1.7002063989639282,
"learning_rate": 8.280708997205904e-05,
"loss": 1.877,
"step": 561
},
{
"epoch": 0.5003338526596929,
"grad_norm": 2.1765499114990234,
"learning_rate": 8.249769410247239e-05,
"loss": 2.2905,
"step": 562
},
{
"epoch": 0.5012241264188738,
"grad_norm": 2.2722952365875244,
"learning_rate": 8.218847097357898e-05,
"loss": 1.4615,
"step": 563
},
{
"epoch": 0.5021144001780548,
"grad_norm": 1.7580784559249878,
"learning_rate": 8.187942363728625e-05,
"loss": 2.1017,
"step": 564
},
{
"epoch": 0.5030046739372357,
"grad_norm": 2.0220673084259033,
"learning_rate": 8.157055514376666e-05,
"loss": 2.2579,
"step": 565
},
{
"epoch": 0.5038949476964166,
"grad_norm": 1.9967743158340454,
"learning_rate": 8.126186854142752e-05,
"loss": 1.6888,
"step": 566
},
{
"epoch": 0.5047852214555976,
"grad_norm": 1.4775068759918213,
"learning_rate": 8.095336687688102e-05,
"loss": 1.3643,
"step": 567
},
{
"epoch": 0.5056754952147785,
"grad_norm": 5.734138011932373,
"learning_rate": 8.064505319491398e-05,
"loss": 2.1258,
"step": 568
},
{
"epoch": 0.5065657689739594,
"grad_norm": 1.27615487575531,
"learning_rate": 8.033693053845801e-05,
"loss": 1.5468,
"step": 569
},
{
"epoch": 0.5074560427331405,
"grad_norm": 1.7572683095932007,
"learning_rate": 8.002900194855932e-05,
"loss": 1.7513,
"step": 570
},
{
"epoch": 0.5083463164923214,
"grad_norm": 1.6680421829223633,
"learning_rate": 7.972127046434878e-05,
"loss": 1.6567,
"step": 571
},
{
"epoch": 0.5092365902515024,
"grad_norm": 1.8064501285552979,
"learning_rate": 7.941373912301189e-05,
"loss": 1.6988,
"step": 572
},
{
"epoch": 0.5101268640106833,
"grad_norm": 1.894484519958496,
"learning_rate": 7.910641095975886e-05,
"loss": 1.65,
"step": 573
},
{
"epoch": 0.5110171377698642,
"grad_norm": 2.205246686935425,
"learning_rate": 7.879928900779456e-05,
"loss": 2.2756,
"step": 574
},
{
"epoch": 0.5119074115290452,
"grad_norm": 2.020557403564453,
"learning_rate": 7.849237629828869e-05,
"loss": 1.8741,
"step": 575
},
{
"epoch": 0.5127976852882261,
"grad_norm": 1.7418237924575806,
"learning_rate": 7.818567586034577e-05,
"loss": 1.8595,
"step": 576
},
{
"epoch": 0.513687959047407,
"grad_norm": 2.7566726207733154,
"learning_rate": 7.787919072097531e-05,
"loss": 2.1259,
"step": 577
},
{
"epoch": 0.514578232806588,
"grad_norm": 2.0560312271118164,
"learning_rate": 7.75729239050619e-05,
"loss": 1.5462,
"step": 578
},
{
"epoch": 0.5154685065657689,
"grad_norm": 3.745788335800171,
"learning_rate": 7.726687843533538e-05,
"loss": 2.482,
"step": 579
},
{
"epoch": 0.51635878032495,
"grad_norm": 3.1230719089508057,
"learning_rate": 7.696105733234098e-05,
"loss": 1.6565,
"step": 580
},
{
"epoch": 0.5172490540841309,
"grad_norm": 5.474112033843994,
"learning_rate": 7.66554636144095e-05,
"loss": 1.6507,
"step": 581
},
{
"epoch": 0.5181393278433118,
"grad_norm": 1.4968229532241821,
"learning_rate": 7.635010029762756e-05,
"loss": 1.5576,
"step": 582
},
{
"epoch": 0.5190296016024928,
"grad_norm": 1.8227341175079346,
"learning_rate": 7.604497039580785e-05,
"loss": 2.0556,
"step": 583
},
{
"epoch": 0.5199198753616737,
"grad_norm": 1.888366937637329,
"learning_rate": 7.574007692045928e-05,
"loss": 2.0041,
"step": 584
},
{
"epoch": 0.5208101491208547,
"grad_norm": 1.5480713844299316,
"learning_rate": 7.543542288075739e-05,
"loss": 1.6337,
"step": 585
},
{
"epoch": 0.5217004228800356,
"grad_norm": 2.2726070880889893,
"learning_rate": 7.513101128351454e-05,
"loss": 1.7892,
"step": 586
},
{
"epoch": 0.5225906966392165,
"grad_norm": 2.1441900730133057,
"learning_rate": 7.48268451331503e-05,
"loss": 2.2337,
"step": 587
},
{
"epoch": 0.5234809703983975,
"grad_norm": 2.6022322177886963,
"learning_rate": 7.45229274316618e-05,
"loss": 1.742,
"step": 588
},
{
"epoch": 0.5243712441575784,
"grad_norm": 2.375544548034668,
"learning_rate": 7.421926117859403e-05,
"loss": 1.8181,
"step": 589
},
{
"epoch": 0.5252615179167595,
"grad_norm": 1.6946896314620972,
"learning_rate": 7.391584937101033e-05,
"loss": 1.84,
"step": 590
},
{
"epoch": 0.5261517916759404,
"grad_norm": 1.648368239402771,
"learning_rate": 7.361269500346274e-05,
"loss": 1.9039,
"step": 591
},
{
"epoch": 0.5270420654351213,
"grad_norm": 2.288290500640869,
"learning_rate": 7.330980106796246e-05,
"loss": 1.7733,
"step": 592
},
{
"epoch": 0.5279323391943023,
"grad_norm": 1.911532998085022,
"learning_rate": 7.300717055395039e-05,
"loss": 2.1929,
"step": 593
},
{
"epoch": 0.5288226129534832,
"grad_norm": 2.3173465728759766,
"learning_rate": 7.270480644826749e-05,
"loss": 1.7691,
"step": 594
},
{
"epoch": 0.5297128867126641,
"grad_norm": 2.155731439590454,
"learning_rate": 7.240271173512546e-05,
"loss": 2.3191,
"step": 595
},
{
"epoch": 0.5306031604718451,
"grad_norm": 2.021113634109497,
"learning_rate": 7.210088939607708e-05,
"loss": 2.4124,
"step": 596
},
{
"epoch": 0.531493434231026,
"grad_norm": 1.988287091255188,
"learning_rate": 7.179934240998706e-05,
"loss": 1.3772,
"step": 597
},
{
"epoch": 0.532383707990207,
"grad_norm": 1.78477942943573,
"learning_rate": 7.149807375300239e-05,
"loss": 1.9691,
"step": 598
},
{
"epoch": 0.5332739817493879,
"grad_norm": 2.129854679107666,
"learning_rate": 7.119708639852312e-05,
"loss": 1.5694,
"step": 599
},
{
"epoch": 0.5341642555085688,
"grad_norm": 1.9304897785186768,
"learning_rate": 7.089638331717284e-05,
"loss": 2.1899,
"step": 600
},
{
"epoch": 0.5350545292677499,
"grad_norm": 1.955014944076538,
"learning_rate": 7.059596747676962e-05,
"loss": 1.9041,
"step": 601
},
{
"epoch": 0.5359448030269308,
"grad_norm": 1.9473272562026978,
"learning_rate": 7.029584184229653e-05,
"loss": 2.2461,
"step": 602
},
{
"epoch": 0.5368350767861118,
"grad_norm": 3.0118231773376465,
"learning_rate": 6.999600937587239e-05,
"loss": 1.8597,
"step": 603
},
{
"epoch": 0.5377253505452927,
"grad_norm": 1.5499767065048218,
"learning_rate": 6.969647303672262e-05,
"loss": 1.2288,
"step": 604
},
{
"epoch": 0.5386156243044736,
"grad_norm": 2.489497661590576,
"learning_rate": 6.939723578114993e-05,
"loss": 2.0863,
"step": 605
},
{
"epoch": 0.5395058980636546,
"grad_norm": 1.4985642433166504,
"learning_rate": 6.909830056250527e-05,
"loss": 1.6077,
"step": 606
},
{
"epoch": 0.5403961718228355,
"grad_norm": 2.099515676498413,
"learning_rate": 6.879967033115853e-05,
"loss": 1.8709,
"step": 607
},
{
"epoch": 0.5412864455820164,
"grad_norm": 1.7907514572143555,
"learning_rate": 6.850134803446954e-05,
"loss": 1.7509,
"step": 608
},
{
"epoch": 0.5421767193411974,
"grad_norm": 2.593832492828369,
"learning_rate": 6.820333661675893e-05,
"loss": 1.6985,
"step": 609
},
{
"epoch": 0.5430669931003783,
"grad_norm": 1.652707815170288,
"learning_rate": 6.790563901927907e-05,
"loss": 1.7336,
"step": 610
},
{
"epoch": 0.5439572668595594,
"grad_norm": 2.1123557090759277,
"learning_rate": 6.760825818018508e-05,
"loss": 2.0235,
"step": 611
},
{
"epoch": 0.5448475406187403,
"grad_norm": 1.8303403854370117,
"learning_rate": 6.731119703450577e-05,
"loss": 1.988,
"step": 612
},
{
"epoch": 0.5457378143779212,
"grad_norm": 1.746991753578186,
"learning_rate": 6.701445851411472e-05,
"loss": 1.7563,
"step": 613
},
{
"epoch": 0.5466280881371022,
"grad_norm": 1.520955204963684,
"learning_rate": 6.671804554770135e-05,
"loss": 1.2435,
"step": 614
},
{
"epoch": 0.5475183618962831,
"grad_norm": 2.8054568767547607,
"learning_rate": 6.642196106074194e-05,
"loss": 1.6526,
"step": 615
},
{
"epoch": 0.548408635655464,
"grad_norm": 3.158360004425049,
"learning_rate": 6.612620797547087e-05,
"loss": 2.4104,
"step": 616
},
{
"epoch": 0.549298909414645,
"grad_norm": 2.212456226348877,
"learning_rate": 6.583078921085167e-05,
"loss": 2.2195,
"step": 617
},
{
"epoch": 0.5501891831738259,
"grad_norm": 2.032042980194092,
"learning_rate": 6.55357076825483e-05,
"loss": 1.5883,
"step": 618
},
{
"epoch": 0.5510794569330069,
"grad_norm": 2.144111394882202,
"learning_rate": 6.52409663028963e-05,
"loss": 2.2491,
"step": 619
},
{
"epoch": 0.5519697306921878,
"grad_norm": 1.9133466482162476,
"learning_rate": 6.494656798087412e-05,
"loss": 1.5789,
"step": 620
},
{
"epoch": 0.5528600044513688,
"grad_norm": 1.6118453741073608,
"learning_rate": 6.465251562207431e-05,
"loss": 1.7489,
"step": 621
},
{
"epoch": 0.5537502782105498,
"grad_norm": 2.499858856201172,
"learning_rate": 6.435881212867493e-05,
"loss": 2.5306,
"step": 622
},
{
"epoch": 0.5546405519697307,
"grad_norm": 2.0919814109802246,
"learning_rate": 6.406546039941094e-05,
"loss": 1.71,
"step": 623
},
{
"epoch": 0.5555308257289117,
"grad_norm": 1.7591675519943237,
"learning_rate": 6.377246332954544e-05,
"loss": 1.8629,
"step": 624
},
{
"epoch": 0.5564210994880926,
"grad_norm": 1.4329679012298584,
"learning_rate": 6.347982381084123e-05,
"loss": 1.6061,
"step": 625
},
{
"epoch": 0.5573113732472735,
"grad_norm": 2.896592140197754,
"learning_rate": 6.318754473153221e-05,
"loss": 1.7499,
"step": 626
},
{
"epoch": 0.5582016470064545,
"grad_norm": 3.4776594638824463,
"learning_rate": 6.289562897629492e-05,
"loss": 1.8892,
"step": 627
},
{
"epoch": 0.5590919207656354,
"grad_norm": 2.0120224952697754,
"learning_rate": 6.260407942621998e-05,
"loss": 2.1044,
"step": 628
},
{
"epoch": 0.5599821945248163,
"grad_norm": 1.950958251953125,
"learning_rate": 6.231289895878375e-05,
"loss": 1.7997,
"step": 629
},
{
"epoch": 0.5608724682839973,
"grad_norm": 2.242223024368286,
"learning_rate": 6.20220904478199e-05,
"loss": 1.9631,
"step": 630
},
{
"epoch": 0.5617627420431783,
"grad_norm": 3.0490360260009766,
"learning_rate": 6.173165676349103e-05,
"loss": 1.8321,
"step": 631
},
{
"epoch": 0.5626530158023593,
"grad_norm": 1.520606279373169,
"learning_rate": 6.144160077226036e-05,
"loss": 1.4851,
"step": 632
},
{
"epoch": 0.5635432895615402,
"grad_norm": 1.9609206914901733,
"learning_rate": 6.11519253368634e-05,
"loss": 1.9231,
"step": 633
},
{
"epoch": 0.5644335633207211,
"grad_norm": 1.3588906526565552,
"learning_rate": 6.086263331627976e-05,
"loss": 1.6969,
"step": 634
},
{
"epoch": 0.5653238370799021,
"grad_norm": 1.5578280687332153,
"learning_rate": 6.05737275657049e-05,
"loss": 2.2163,
"step": 635
},
{
"epoch": 0.566214110839083,
"grad_norm": 1.2065187692642212,
"learning_rate": 6.0285210936521955e-05,
"loss": 1.6729,
"step": 636
},
{
"epoch": 0.567104384598264,
"grad_norm": 1.831241488456726,
"learning_rate": 5.999708627627354e-05,
"loss": 1.762,
"step": 637
},
{
"epoch": 0.5679946583574449,
"grad_norm": 1.7721295356750488,
"learning_rate": 5.9709356428633746e-05,
"loss": 2.003,
"step": 638
},
{
"epoch": 0.5688849321166258,
"grad_norm": 1.7327338457107544,
"learning_rate": 5.9422024233380013e-05,
"loss": 2.7199,
"step": 639
},
{
"epoch": 0.5697752058758068,
"grad_norm": 2.1094236373901367,
"learning_rate": 5.913509252636511e-05,
"loss": 2.502,
"step": 640
},
{
"epoch": 0.5706654796349877,
"grad_norm": 1.6638685464859009,
"learning_rate": 5.884856413948913e-05,
"loss": 1.7315,
"step": 641
},
{
"epoch": 0.5715557533941688,
"grad_norm": 1.3953161239624023,
"learning_rate": 5.856244190067159e-05,
"loss": 1.3959,
"step": 642
},
{
"epoch": 0.5724460271533497,
"grad_norm": 1.945706844329834,
"learning_rate": 5.82767286338235e-05,
"loss": 2.1033,
"step": 643
},
{
"epoch": 0.5733363009125306,
"grad_norm": 1.9581321477890015,
"learning_rate": 5.799142715881938e-05,
"loss": 1.8766,
"step": 644
},
{
"epoch": 0.5742265746717116,
"grad_norm": 1.7764853239059448,
"learning_rate": 5.770654029146969e-05,
"loss": 1.5566,
"step": 645
},
{
"epoch": 0.5751168484308925,
"grad_norm": 2.052767038345337,
"learning_rate": 5.7422070843492734e-05,
"loss": 2.0427,
"step": 646
},
{
"epoch": 0.5760071221900734,
"grad_norm": 1.8176392316818237,
"learning_rate": 5.713802162248718e-05,
"loss": 1.5934,
"step": 647
},
{
"epoch": 0.5768973959492544,
"grad_norm": 1.4187897443771362,
"learning_rate": 5.6854395431904094e-05,
"loss": 1.5727,
"step": 648
},
{
"epoch": 0.5777876697084353,
"grad_norm": 2.4474024772644043,
"learning_rate": 5.657119507101954e-05,
"loss": 2.5993,
"step": 649
},
{
"epoch": 0.5786779434676163,
"grad_norm": 1.395693063735962,
"learning_rate": 5.6288423334906735e-05,
"loss": 1.3948,
"step": 650
},
{
"epoch": 0.5795682172267972,
"grad_norm": 1.5279170274734497,
"learning_rate": 5.6006083014408484e-05,
"loss": 1.7575,
"step": 651
},
{
"epoch": 0.5804584909859782,
"grad_norm": 1.3539286851882935,
"learning_rate": 5.572417689610987e-05,
"loss": 1.5935,
"step": 652
},
{
"epoch": 0.5813487647451592,
"grad_norm": 2.212347984313965,
"learning_rate": 5.544270776231038e-05,
"loss": 1.92,
"step": 653
},
{
"epoch": 0.5822390385043401,
"grad_norm": 1.814328670501709,
"learning_rate": 5.5161678390996796e-05,
"loss": 1.9437,
"step": 654
},
{
"epoch": 0.583129312263521,
"grad_norm": 1.3090717792510986,
"learning_rate": 5.488109155581549e-05,
"loss": 1.2791,
"step": 655
},
{
"epoch": 0.584019586022702,
"grad_norm": 1.4268810749053955,
"learning_rate": 5.4600950026045326e-05,
"loss": 1.5508,
"step": 656
},
{
"epoch": 0.5849098597818829,
"grad_norm": 1.5431543588638306,
"learning_rate": 5.4321256566570036e-05,
"loss": 1.8103,
"step": 657
},
{
"epoch": 0.5858001335410639,
"grad_norm": 1.4491729736328125,
"learning_rate": 5.404201393785122e-05,
"loss": 1.6786,
"step": 658
},
{
"epoch": 0.5866904073002448,
"grad_norm": 1.7064595222473145,
"learning_rate": 5.3763224895900846e-05,
"loss": 1.7031,
"step": 659
},
{
"epoch": 0.5875806810594257,
"grad_norm": 1.3737152814865112,
"learning_rate": 5.348489219225416e-05,
"loss": 1.98,
"step": 660
},
{
"epoch": 0.5884709548186067,
"grad_norm": 1.2751985788345337,
"learning_rate": 5.320701857394268e-05,
"loss": 1.4149,
"step": 661
},
{
"epoch": 0.5893612285777877,
"grad_norm": 1.9529341459274292,
"learning_rate": 5.292960678346675e-05,
"loss": 2.1125,
"step": 662
},
{
"epoch": 0.5902515023369687,
"grad_norm": 1.7817922830581665,
"learning_rate": 5.265265955876879e-05,
"loss": 1.4712,
"step": 663
},
{
"epoch": 0.5911417760961496,
"grad_norm": 1.8877997398376465,
"learning_rate": 5.237617963320608e-05,
"loss": 1.5736,
"step": 664
},
{
"epoch": 0.5920320498553305,
"grad_norm": 2.1079890727996826,
"learning_rate": 5.210016973552391e-05,
"loss": 2.2376,
"step": 665
},
{
"epoch": 0.5929223236145115,
"grad_norm": 1.34657883644104,
"learning_rate": 5.182463258982846e-05,
"loss": 1.539,
"step": 666
},
{
"epoch": 0.5938125973736924,
"grad_norm": 1.7172969579696655,
"learning_rate": 5.1549570915560206e-05,
"loss": 1.8295,
"step": 667
},
{
"epoch": 0.5947028711328733,
"grad_norm": 1.6305186748504639,
"learning_rate": 5.127498742746675e-05,
"loss": 1.8357,
"step": 668
},
{
"epoch": 0.5955931448920543,
"grad_norm": 1.9031035900115967,
"learning_rate": 5.100088483557634e-05,
"loss": 2.1622,
"step": 669
},
{
"epoch": 0.5964834186512352,
"grad_norm": 1.91080904006958,
"learning_rate": 5.072726584517086e-05,
"loss": 1.9162,
"step": 670
},
{
"epoch": 0.5973736924104162,
"grad_norm": 1.9471015930175781,
"learning_rate": 5.045413315675924e-05,
"loss": 1.6229,
"step": 671
},
{
"epoch": 0.5982639661695972,
"grad_norm": 2.020172595977783,
"learning_rate": 5.018148946605092e-05,
"loss": 2.0195,
"step": 672
},
{
"epoch": 0.5991542399287781,
"grad_norm": 1.5960651636123657,
"learning_rate": 4.990933746392899e-05,
"loss": 1.8776,
"step": 673
},
{
"epoch": 0.6000445136879591,
"grad_norm": 1.7368475198745728,
"learning_rate": 4.9637679836423924e-05,
"loss": 2.228,
"step": 674
},
{
"epoch": 0.60093478744714,
"grad_norm": 1.6208680868148804,
"learning_rate": 4.9366519264686725e-05,
"loss": 1.2907,
"step": 675
},
{
"epoch": 0.601825061206321,
"grad_norm": 2.604660749435425,
"learning_rate": 4.909585842496287e-05,
"loss": 1.7746,
"step": 676
},
{
"epoch": 0.6027153349655019,
"grad_norm": 2.447293519973755,
"learning_rate": 4.8825699988565485e-05,
"loss": 2.4796,
"step": 677
},
{
"epoch": 0.6036056087246828,
"grad_norm": 1.7362377643585205,
"learning_rate": 4.8556046621849346e-05,
"loss": 1.8023,
"step": 678
},
{
"epoch": 0.6044958824838638,
"grad_norm": 1.647889256477356,
"learning_rate": 4.828690098618429e-05,
"loss": 2.2236,
"step": 679
},
{
"epoch": 0.6053861562430447,
"grad_norm": 1.913087010383606,
"learning_rate": 4.8018265737929044e-05,
"loss": 2.0162,
"step": 680
},
{
"epoch": 0.6062764300022256,
"grad_norm": 1.737635612487793,
"learning_rate": 4.7750143528405126e-05,
"loss": 1.5109,
"step": 681
},
{
"epoch": 0.6071667037614066,
"grad_norm": 1.8633228540420532,
"learning_rate": 4.748253700387042e-05,
"loss": 1.7251,
"step": 682
},
{
"epoch": 0.6080569775205876,
"grad_norm": 1.7534853219985962,
"learning_rate": 4.721544880549337e-05,
"loss": 1.5481,
"step": 683
},
{
"epoch": 0.6089472512797686,
"grad_norm": 1.9271601438522339,
"learning_rate": 4.694888156932658e-05,
"loss": 2.0399,
"step": 684
},
{
"epoch": 0.6098375250389495,
"grad_norm": 2.470161199569702,
"learning_rate": 4.668283792628114e-05,
"loss": 1.8926,
"step": 685
},
{
"epoch": 0.6107277987981304,
"grad_norm": 2.124586582183838,
"learning_rate": 4.6417320502100316e-05,
"loss": 1.98,
"step": 686
},
{
"epoch": 0.6116180725573114,
"grad_norm": 2.7901787757873535,
"learning_rate": 4.615233191733398e-05,
"loss": 2.2678,
"step": 687
},
{
"epoch": 0.6125083463164923,
"grad_norm": 2.0822958946228027,
"learning_rate": 4.588787478731242e-05,
"loss": 2.0991,
"step": 688
},
{
"epoch": 0.6133986200756733,
"grad_norm": 1.843925952911377,
"learning_rate": 4.5623951722120736e-05,
"loss": 1.395,
"step": 689
},
{
"epoch": 0.6142888938348542,
"grad_norm": 1.8190912008285522,
"learning_rate": 4.5360565326573104e-05,
"loss": 1.9312,
"step": 690
},
{
"epoch": 0.6151791675940351,
"grad_norm": 3.005737781524658,
"learning_rate": 4.5097718200186814e-05,
"loss": 2.49,
"step": 691
},
{
"epoch": 0.6160694413532161,
"grad_norm": 2.127828598022461,
"learning_rate": 4.483541293715698e-05,
"loss": 2.263,
"step": 692
},
{
"epoch": 0.6169597151123971,
"grad_norm": 1.2479519844055176,
"learning_rate": 4.457365212633058e-05,
"loss": 1.4489,
"step": 693
},
{
"epoch": 0.617849988871578,
"grad_norm": 1.4654552936553955,
"learning_rate": 4.431243835118124e-05,
"loss": 1.4409,
"step": 694
},
{
"epoch": 0.618740262630759,
"grad_norm": 2.2219717502593994,
"learning_rate": 4.4051774189783315e-05,
"loss": 1.7917,
"step": 695
},
{
"epoch": 0.6196305363899399,
"grad_norm": 1.9172980785369873,
"learning_rate": 4.379166221478697e-05,
"loss": 2.211,
"step": 696
},
{
"epoch": 0.6205208101491209,
"grad_norm": 1.792610764503479,
"learning_rate": 4.3532104993392306e-05,
"loss": 1.6492,
"step": 697
},
{
"epoch": 0.6214110839083018,
"grad_norm": 2.376288414001465,
"learning_rate": 4.327310508732437e-05,
"loss": 2.417,
"step": 698
},
{
"epoch": 0.6223013576674827,
"grad_norm": 3.1200907230377197,
"learning_rate": 4.301466505280762e-05,
"loss": 2.2469,
"step": 699
},
{
"epoch": 0.6231916314266637,
"grad_norm": 2.2198028564453125,
"learning_rate": 4.2756787440540936e-05,
"loss": 1.4221,
"step": 700
},
{
"epoch": 0.6240819051858446,
"grad_norm": 1.8127583265304565,
"learning_rate": 4.249947479567218e-05,
"loss": 1.9228,
"step": 701
},
{
"epoch": 0.6249721789450255,
"grad_norm": 1.6736197471618652,
"learning_rate": 4.224272965777326e-05,
"loss": 1.7612,
"step": 702
},
{
"epoch": 0.6258624527042066,
"grad_norm": 1.928165316581726,
"learning_rate": 4.1986554560815096e-05,
"loss": 2.1468,
"step": 703
},
{
"epoch": 0.6267527264633875,
"grad_norm": 2.0662543773651123,
"learning_rate": 4.173095203314241e-05,
"loss": 2.3367,
"step": 704
},
{
"epoch": 0.6276430002225685,
"grad_norm": 2.434934377670288,
"learning_rate": 4.1475924597449024e-05,
"loss": 1.6921,
"step": 705
},
{
"epoch": 0.6285332739817494,
"grad_norm": 2.181518316268921,
"learning_rate": 4.12214747707527e-05,
"loss": 1.8562,
"step": 706
},
{
"epoch": 0.6294235477409303,
"grad_norm": 1.5964933633804321,
"learning_rate": 4.096760506437057e-05,
"loss": 1.5161,
"step": 707
},
{
"epoch": 0.6303138215001113,
"grad_norm": 1.5785959959030151,
"learning_rate": 4.071431798389408e-05,
"loss": 1.6356,
"step": 708
},
{
"epoch": 0.6312040952592922,
"grad_norm": 1.6601256132125854,
"learning_rate": 4.0461616029164526e-05,
"loss": 1.9933,
"step": 709
},
{
"epoch": 0.6320943690184732,
"grad_norm": 1.9473941326141357,
"learning_rate": 4.020950169424815e-05,
"loss": 1.6581,
"step": 710
},
{
"epoch": 0.6329846427776541,
"grad_norm": 1.2505872249603271,
"learning_rate": 3.9957977467411615e-05,
"loss": 1.5246,
"step": 711
},
{
"epoch": 0.633874916536835,
"grad_norm": 1.948551058769226,
"learning_rate": 3.9707045831097555e-05,
"loss": 1.7574,
"step": 712
},
{
"epoch": 0.6347651902960161,
"grad_norm": 1.8740789890289307,
"learning_rate": 3.945670926189987e-05,
"loss": 1.7806,
"step": 713
},
{
"epoch": 0.635655464055197,
"grad_norm": 2.870927572250366,
"learning_rate": 3.920697023053949e-05,
"loss": 1.8539,
"step": 714
},
{
"epoch": 0.636545737814378,
"grad_norm": 2.207949638366699,
"learning_rate": 3.895783120183976e-05,
"loss": 1.6277,
"step": 715
},
{
"epoch": 0.6374360115735589,
"grad_norm": 1.841289758682251,
"learning_rate": 3.8709294634702376e-05,
"loss": 1.9111,
"step": 716
},
{
"epoch": 0.6383262853327398,
"grad_norm": 1.5128315687179565,
"learning_rate": 3.846136298208285e-05,
"loss": 1.8701,
"step": 717
},
{
"epoch": 0.6392165590919208,
"grad_norm": 2.8079261779785156,
"learning_rate": 3.821403869096658e-05,
"loss": 2.3968,
"step": 718
},
{
"epoch": 0.6401068328511017,
"grad_norm": 2.5751731395721436,
"learning_rate": 3.796732420234443e-05,
"loss": 2.1135,
"step": 719
},
{
"epoch": 0.6409971066102826,
"grad_norm": 1.8290863037109375,
"learning_rate": 3.7721221951188765e-05,
"loss": 1.5428,
"step": 720
},
{
"epoch": 0.6418873803694636,
"grad_norm": 1.6619089841842651,
"learning_rate": 3.747573436642951e-05,
"loss": 2.0478,
"step": 721
},
{
"epoch": 0.6427776541286445,
"grad_norm": 5.214348316192627,
"learning_rate": 3.7230863870929964e-05,
"loss": 2.2313,
"step": 722
},
{
"epoch": 0.6436679278878255,
"grad_norm": 1.9198572635650635,
"learning_rate": 3.698661288146311e-05,
"loss": 2.1648,
"step": 723
},
{
"epoch": 0.6445582016470065,
"grad_norm": 2.1984057426452637,
"learning_rate": 3.674298380868756e-05,
"loss": 1.95,
"step": 724
},
{
"epoch": 0.6454484754061874,
"grad_norm": 1.4841092824935913,
"learning_rate": 3.649997905712396e-05,
"loss": 1.6187,
"step": 725
},
{
"epoch": 0.6463387491653684,
"grad_norm": 2.1685807704925537,
"learning_rate": 3.6257601025131026e-05,
"loss": 2.5758,
"step": 726
},
{
"epoch": 0.6472290229245493,
"grad_norm": 3.2427711486816406,
"learning_rate": 3.601585210488218e-05,
"loss": 2.4056,
"step": 727
},
{
"epoch": 0.6481192966837303,
"grad_norm": 1.696851134300232,
"learning_rate": 3.577473468234156e-05,
"loss": 1.7485,
"step": 728
},
{
"epoch": 0.6490095704429112,
"grad_norm": 2.0988123416900635,
"learning_rate": 3.553425113724088e-05,
"loss": 1.894,
"step": 729
},
{
"epoch": 0.6498998442020921,
"grad_norm": 1.6171239614486694,
"learning_rate": 3.52944038430556e-05,
"loss": 1.6306,
"step": 730
},
{
"epoch": 0.6507901179612731,
"grad_norm": 2.2144834995269775,
"learning_rate": 3.5055195166981645e-05,
"loss": 2.1848,
"step": 731
},
{
"epoch": 0.651680391720454,
"grad_norm": 1.90192711353302,
"learning_rate": 3.481662746991214e-05,
"loss": 2.0253,
"step": 732
},
{
"epoch": 0.6525706654796349,
"grad_norm": 1.956634283065796,
"learning_rate": 3.4578703106413904e-05,
"loss": 1.7608,
"step": 733
},
{
"epoch": 0.653460939238816,
"grad_norm": 1.6562048196792603,
"learning_rate": 3.4341424424704375e-05,
"loss": 1.828,
"step": 734
},
{
"epoch": 0.6543512129979969,
"grad_norm": 2.268554925918579,
"learning_rate": 3.4104793766628304e-05,
"loss": 1.9865,
"step": 735
},
{
"epoch": 0.6552414867571779,
"grad_norm": 2.734750986099243,
"learning_rate": 3.386881346763483e-05,
"loss": 1.8182,
"step": 736
},
{
"epoch": 0.6561317605163588,
"grad_norm": 1.684584379196167,
"learning_rate": 3.363348585675414e-05,
"loss": 1.4571,
"step": 737
},
{
"epoch": 0.6570220342755397,
"grad_norm": 1.4472819566726685,
"learning_rate": 3.339881325657484e-05,
"loss": 1.2802,
"step": 738
},
{
"epoch": 0.6579123080347207,
"grad_norm": 2.2351865768432617,
"learning_rate": 3.316479798322072e-05,
"loss": 1.5775,
"step": 739
},
{
"epoch": 0.6588025817939016,
"grad_norm": 2.1901495456695557,
"learning_rate": 3.2931442346328004e-05,
"loss": 1.9047,
"step": 740
},
{
"epoch": 0.6596928555530825,
"grad_norm": 2.646942377090454,
"learning_rate": 3.269874864902269e-05,
"loss": 2.0635,
"step": 741
},
{
"epoch": 0.6605831293122635,
"grad_norm": 2.0031111240386963,
"learning_rate": 3.246671918789755e-05,
"loss": 1.6451,
"step": 742
},
{
"epoch": 0.6614734030714444,
"grad_norm": 1.944433569908142,
"learning_rate": 3.223535625298979e-05,
"loss": 2.0588,
"step": 743
},
{
"epoch": 0.6623636768306255,
"grad_norm": 2.4715232849121094,
"learning_rate": 3.200466212775808e-05,
"loss": 2.2318,
"step": 744
},
{
"epoch": 0.6632539505898064,
"grad_norm": 1.5102177858352661,
"learning_rate": 3.1774639089060363e-05,
"loss": 1.6319,
"step": 745
},
{
"epoch": 0.6641442243489873,
"grad_norm": 1.6239656209945679,
"learning_rate": 3.154528940713113e-05,
"loss": 2.011,
"step": 746
},
{
"epoch": 0.6650344981081683,
"grad_norm": 1.6042510271072388,
"learning_rate": 3.1316615345559185e-05,
"loss": 1.6868,
"step": 747
},
{
"epoch": 0.6659247718673492,
"grad_norm": 1.2276384830474854,
"learning_rate": 3.108861916126518e-05,
"loss": 1.1603,
"step": 748
},
{
"epoch": 0.6668150456265302,
"grad_norm": 1.999118685722351,
"learning_rate": 3.086130310447937e-05,
"loss": 1.8479,
"step": 749
},
{
"epoch": 0.6677053193857111,
"grad_norm": 1.7513126134872437,
"learning_rate": 3.063466941871952e-05,
"loss": 2.1436,
"step": 750
},
{
"epoch": 0.668595593144892,
"grad_norm": 2.9473211765289307,
"learning_rate": 3.0408720340768572e-05,
"loss": 2.5041,
"step": 751
},
{
"epoch": 0.669485866904073,
"grad_norm": 1.2190455198287964,
"learning_rate": 3.018345810065275e-05,
"loss": 1.5207,
"step": 752
},
{
"epoch": 0.6703761406632539,
"grad_norm": 1.731210470199585,
"learning_rate": 2.9958884921619367e-05,
"loss": 1.4926,
"step": 753
},
{
"epoch": 0.671266414422435,
"grad_norm": 1.8084713220596313,
"learning_rate": 2.9735003020115092e-05,
"loss": 1.7094,
"step": 754
},
{
"epoch": 0.6721566881816159,
"grad_norm": 2.106186866760254,
"learning_rate": 2.9511814605763855e-05,
"loss": 2.2084,
"step": 755
},
{
"epoch": 0.6730469619407968,
"grad_norm": 1.5817265510559082,
"learning_rate": 2.9289321881345254e-05,
"loss": 1.2095,
"step": 756
},
{
"epoch": 0.6739372356999778,
"grad_norm": 2.0624277591705322,
"learning_rate": 2.9067527042772636e-05,
"loss": 1.7659,
"step": 757
},
{
"epoch": 0.6748275094591587,
"grad_norm": 1.7278014421463013,
"learning_rate": 2.8846432279071467e-05,
"loss": 2.2494,
"step": 758
},
{
"epoch": 0.6757177832183396,
"grad_norm": 1.3449331521987915,
"learning_rate": 2.8626039772357882e-05,
"loss": 1.503,
"step": 759
},
{
"epoch": 0.6766080569775206,
"grad_norm": 2.799799680709839,
"learning_rate": 2.840635169781688e-05,
"loss": 2.1368,
"step": 760
},
{
"epoch": 0.6774983307367015,
"grad_norm": 2.179213523864746,
"learning_rate": 2.8187370223681132e-05,
"loss": 2.0393,
"step": 761
},
{
"epoch": 0.6783886044958825,
"grad_norm": 1.5615414381027222,
"learning_rate": 2.7969097511209308e-05,
"loss": 1.4168,
"step": 762
},
{
"epoch": 0.6792788782550634,
"grad_norm": 2.0087335109710693,
"learning_rate": 2.775153571466502e-05,
"loss": 1.7341,
"step": 763
},
{
"epoch": 0.6801691520142443,
"grad_norm": 1.9231607913970947,
"learning_rate": 2.753468698129533e-05,
"loss": 1.9537,
"step": 764
},
{
"epoch": 0.6810594257734254,
"grad_norm": 2.165048599243164,
"learning_rate": 2.7318553451309726e-05,
"loss": 1.6802,
"step": 765
},
{
"epoch": 0.6819496995326063,
"grad_norm": 1.7000216245651245,
"learning_rate": 2.7103137257858868e-05,
"loss": 1.5879,
"step": 766
},
{
"epoch": 0.6828399732917872,
"grad_norm": 2.1674106121063232,
"learning_rate": 2.688844052701359e-05,
"loss": 2.4558,
"step": 767
},
{
"epoch": 0.6837302470509682,
"grad_norm": 2.2125790119171143,
"learning_rate": 2.6674465377744017e-05,
"loss": 1.5141,
"step": 768
},
{
"epoch": 0.6846205208101491,
"grad_norm": 1.5611724853515625,
"learning_rate": 2.646121392189841e-05,
"loss": 1.3355,
"step": 769
},
{
"epoch": 0.6855107945693301,
"grad_norm": 1.870052695274353,
"learning_rate": 2.624868826418262e-05,
"loss": 1.287,
"step": 770
},
{
"epoch": 0.686401068328511,
"grad_norm": 1.8563505411148071,
"learning_rate": 2.603689050213902e-05,
"loss": 2.1767,
"step": 771
},
{
"epoch": 0.6872913420876919,
"grad_norm": 1.7890762090682983,
"learning_rate": 2.582582272612609e-05,
"loss": 1.7168,
"step": 772
},
{
"epoch": 0.6881816158468729,
"grad_norm": 1.420925498008728,
"learning_rate": 2.561548701929749e-05,
"loss": 1.602,
"step": 773
},
{
"epoch": 0.6890718896060538,
"grad_norm": 2.628692865371704,
"learning_rate": 2.540588545758179e-05,
"loss": 1.9804,
"step": 774
},
{
"epoch": 0.6899621633652349,
"grad_norm": 2.270979404449463,
"learning_rate": 2.5197020109661772e-05,
"loss": 2.0805,
"step": 775
},
{
"epoch": 0.6908524371244158,
"grad_norm": 1.3668320178985596,
"learning_rate": 2.4988893036954043e-05,
"loss": 1.3515,
"step": 776
},
{
"epoch": 0.6917427108835967,
"grad_norm": 2.867128849029541,
"learning_rate": 2.4781506293588873e-05,
"loss": 1.9102,
"step": 777
},
{
"epoch": 0.6926329846427777,
"grad_norm": 2.0665462017059326,
"learning_rate": 2.4574861926389615e-05,
"loss": 1.5223,
"step": 778
},
{
"epoch": 0.6935232584019586,
"grad_norm": 1.425742506980896,
"learning_rate": 2.436896197485282e-05,
"loss": 1.3977,
"step": 779
},
{
"epoch": 0.6944135321611395,
"grad_norm": 1.6747236251831055,
"learning_rate": 2.4163808471127812e-05,
"loss": 1.7501,
"step": 780
},
{
"epoch": 0.6953038059203205,
"grad_norm": 1.8426623344421387,
"learning_rate": 2.3959403439996907e-05,
"loss": 1.8335,
"step": 781
},
{
"epoch": 0.6961940796795014,
"grad_norm": 2.007152795791626,
"learning_rate": 2.37557488988552e-05,
"loss": 2.4554,
"step": 782
},
{
"epoch": 0.6970843534386824,
"grad_norm": 2.151026964187622,
"learning_rate": 2.3552846857690846e-05,
"loss": 2.0088,
"step": 783
},
{
"epoch": 0.6979746271978633,
"grad_norm": 1.486572027206421,
"learning_rate": 2.3350699319065026e-05,
"loss": 1.6484,
"step": 784
},
{
"epoch": 0.6988649009570443,
"grad_norm": 2.3491599559783936,
"learning_rate": 2.3149308278092342e-05,
"loss": 2.1194,
"step": 785
},
{
"epoch": 0.6997551747162253,
"grad_norm": 2.2394917011260986,
"learning_rate": 2.2948675722421086e-05,
"loss": 1.646,
"step": 786
},
{
"epoch": 0.7006454484754062,
"grad_norm": 1.754228949546814,
"learning_rate": 2.2748803632213557e-05,
"loss": 2.0966,
"step": 787
},
{
"epoch": 0.7015357222345872,
"grad_norm": 2.0761775970458984,
"learning_rate": 2.254969398012663e-05,
"loss": 1.7768,
"step": 788
},
{
"epoch": 0.7024259959937681,
"grad_norm": 1.6214390993118286,
"learning_rate": 2.235134873129213e-05,
"loss": 1.799,
"step": 789
},
{
"epoch": 0.703316269752949,
"grad_norm": 2.5304348468780518,
"learning_rate": 2.2153769843297667e-05,
"loss": 2.4849,
"step": 790
},
{
"epoch": 0.70420654351213,
"grad_norm": 1.9643628597259521,
"learning_rate": 2.195695926616702e-05,
"loss": 1.5525,
"step": 791
},
{
"epoch": 0.7050968172713109,
"grad_norm": 2.729534149169922,
"learning_rate": 2.1760918942341192e-05,
"loss": 2.0789,
"step": 792
},
{
"epoch": 0.7059870910304918,
"grad_norm": 1.8052536249160767,
"learning_rate": 2.1565650806658975e-05,
"loss": 1.6703,
"step": 793
},
{
"epoch": 0.7068773647896728,
"grad_norm": 2.2261719703674316,
"learning_rate": 2.137115678633811e-05,
"loss": 2.0943,
"step": 794
},
{
"epoch": 0.7077676385488538,
"grad_norm": 1.9751397371292114,
"learning_rate": 2.1177438800956007e-05,
"loss": 2.0637,
"step": 795
},
{
"epoch": 0.7086579123080348,
"grad_norm": 1.6201308965682983,
"learning_rate": 2.098449876243096e-05,
"loss": 1.8145,
"step": 796
},
{
"epoch": 0.7095481860672157,
"grad_norm": 3.4927635192871094,
"learning_rate": 2.07923385750033e-05,
"loss": 1.2709,
"step": 797
},
{
"epoch": 0.7104384598263966,
"grad_norm": 1.7658709287643433,
"learning_rate": 2.0600960135216462e-05,
"loss": 2.021,
"step": 798
},
{
"epoch": 0.7113287335855776,
"grad_norm": 1.4747573137283325,
"learning_rate": 2.0410365331898416e-05,
"loss": 1.425,
"step": 799
},
{
"epoch": 0.7122190073447585,
"grad_norm": 2.7042531967163086,
"learning_rate": 2.0220556046142893e-05,
"loss": 1.6427,
"step": 800
},
{
"epoch": 0.7131092811039395,
"grad_norm": 3.2484536170959473,
"learning_rate": 2.0031534151290943e-05,
"loss": 3.4625,
"step": 801
},
{
"epoch": 0.7139995548631204,
"grad_norm": 2.409848928451538,
"learning_rate": 1.9843301512912327e-05,
"loss": 1.8531,
"step": 802
},
{
"epoch": 0.7148898286223013,
"grad_norm": 1.7639962434768677,
"learning_rate": 1.965585998878724e-05,
"loss": 2.042,
"step": 803
},
{
"epoch": 0.7157801023814823,
"grad_norm": 1.6976604461669922,
"learning_rate": 1.946921142888781e-05,
"loss": 1.6346,
"step": 804
},
{
"epoch": 0.7166703761406632,
"grad_norm": 2.473877191543579,
"learning_rate": 1.928335767535997e-05,
"loss": 2.1879,
"step": 805
},
{
"epoch": 0.7175606498998442,
"grad_norm": 2.551295518875122,
"learning_rate": 1.9098300562505266e-05,
"loss": 1.6309,
"step": 806
},
{
"epoch": 0.7184509236590252,
"grad_norm": 1.5652191638946533,
"learning_rate": 1.891404191676265e-05,
"loss": 1.7005,
"step": 807
},
{
"epoch": 0.7193411974182061,
"grad_norm": 2.0537121295928955,
"learning_rate": 1.8730583556690605e-05,
"loss": 1.694,
"step": 808
},
{
"epoch": 0.7202314711773871,
"grad_norm": 1.8218454122543335,
"learning_rate": 1.854792729294905e-05,
"loss": 1.7962,
"step": 809
},
{
"epoch": 0.721121744936568,
"grad_norm": 2.0948023796081543,
"learning_rate": 1.8366074928281607e-05,
"loss": 1.8436,
"step": 810
},
{
"epoch": 0.7220120186957489,
"grad_norm": 1.4422553777694702,
"learning_rate": 1.818502825749764e-05,
"loss": 1.4781,
"step": 811
},
{
"epoch": 0.7229022924549299,
"grad_norm": 2.543961763381958,
"learning_rate": 1.8004789067454764e-05,
"loss": 1.8586,
"step": 812
},
{
"epoch": 0.7237925662141108,
"grad_norm": 1.2190775871276855,
"learning_rate": 1.7825359137040988e-05,
"loss": 1.0705,
"step": 813
},
{
"epoch": 0.7246828399732917,
"grad_norm": 2.0991899967193604,
"learning_rate": 1.7646740237157256e-05,
"loss": 2.5025,
"step": 814
},
{
"epoch": 0.7255731137324727,
"grad_norm": 1.7105249166488647,
"learning_rate": 1.7468934130700044e-05,
"loss": 1.6559,
"step": 815
},
{
"epoch": 0.7264633874916537,
"grad_norm": 1.533126711845398,
"learning_rate": 1.7291942572543807e-05,
"loss": 1.6451,
"step": 816
},
{
"epoch": 0.7273536612508347,
"grad_norm": 2.375067949295044,
"learning_rate": 1.7115767309523812e-05,
"loss": 2.1343,
"step": 817
},
{
"epoch": 0.7282439350100156,
"grad_norm": 1.9037030935287476,
"learning_rate": 1.6940410080418723e-05,
"loss": 1.8725,
"step": 818
},
{
"epoch": 0.7291342087691965,
"grad_norm": 2.0840301513671875,
"learning_rate": 1.6765872615933677e-05,
"loss": 2.1541,
"step": 819
},
{
"epoch": 0.7300244825283775,
"grad_norm": 2.6141037940979004,
"learning_rate": 1.6592156638682886e-05,
"loss": 2.3102,
"step": 820
},
{
"epoch": 0.7309147562875584,
"grad_norm": 1.8828359842300415,
"learning_rate": 1.6419263863172997e-05,
"loss": 1.9035,
"step": 821
},
{
"epoch": 0.7318050300467394,
"grad_norm": 1.8121448755264282,
"learning_rate": 1.6247195995785837e-05,
"loss": 1.6091,
"step": 822
},
{
"epoch": 0.7326953038059203,
"grad_norm": 2.16495418548584,
"learning_rate": 1.6075954734761845e-05,
"loss": 1.7576,
"step": 823
},
{
"epoch": 0.7335855775651012,
"grad_norm": 1.4242593050003052,
"learning_rate": 1.5905541770183096e-05,
"loss": 1.5635,
"step": 824
},
{
"epoch": 0.7344758513242822,
"grad_norm": 2.510634183883667,
"learning_rate": 1.5735958783956794e-05,
"loss": 1.7408,
"step": 825
},
{
"epoch": 0.7353661250834632,
"grad_norm": 1.7546478509902954,
"learning_rate": 1.5567207449798515e-05,
"loss": 1.5751,
"step": 826
},
{
"epoch": 0.7362563988426442,
"grad_norm": 1.723543643951416,
"learning_rate": 1.539928943321579e-05,
"loss": 1.4336,
"step": 827
},
{
"epoch": 0.7371466726018251,
"grad_norm": 1.7334325313568115,
"learning_rate": 1.5232206391491699e-05,
"loss": 1.7615,
"step": 828
},
{
"epoch": 0.738036946361006,
"grad_norm": 1.8361815214157104,
"learning_rate": 1.5065959973668353e-05,
"loss": 1.9695,
"step": 829
},
{
"epoch": 0.738927220120187,
"grad_norm": 1.947911262512207,
"learning_rate": 1.4900551820530828e-05,
"loss": 1.851,
"step": 830
},
{
"epoch": 0.7398174938793679,
"grad_norm": 1.4021551609039307,
"learning_rate": 1.4735983564590783e-05,
"loss": 1.845,
"step": 831
},
{
"epoch": 0.7407077676385488,
"grad_norm": 1.4573607444763184,
"learning_rate": 1.4572256830070497e-05,
"loss": 1.4406,
"step": 832
},
{
"epoch": 0.7415980413977298,
"grad_norm": 1.9009228944778442,
"learning_rate": 1.4409373232886702e-05,
"loss": 2.1688,
"step": 833
},
{
"epoch": 0.7424883151569107,
"grad_norm": 2.001500129699707,
"learning_rate": 1.4247334380634792e-05,
"loss": 2.0726,
"step": 834
},
{
"epoch": 0.7433785889160917,
"grad_norm": 2.0300889015197754,
"learning_rate": 1.4086141872572789e-05,
"loss": 1.8231,
"step": 835
},
{
"epoch": 0.7442688626752726,
"grad_norm": 12.630093574523926,
"learning_rate": 1.3925797299605647e-05,
"loss": 1.6159,
"step": 836
},
{
"epoch": 0.7451591364344536,
"grad_norm": 2.9155776500701904,
"learning_rate": 1.3766302244269624e-05,
"loss": 1.6208,
"step": 837
},
{
"epoch": 0.7460494101936346,
"grad_norm": 11.810160636901855,
"learning_rate": 1.3607658280716473e-05,
"loss": 1.6429,
"step": 838
},
{
"epoch": 0.7469396839528155,
"grad_norm": 2.091851234436035,
"learning_rate": 1.3449866974698122e-05,
"loss": 1.751,
"step": 839
},
{
"epoch": 0.7478299577119965,
"grad_norm": 1.2974621057510376,
"learning_rate": 1.3292929883550998e-05,
"loss": 1.5679,
"step": 840
},
{
"epoch": 0.7487202314711774,
"grad_norm": 1.8211385011672974,
"learning_rate": 1.3136848556180892e-05,
"loss": 1.3824,
"step": 841
},
{
"epoch": 0.7496105052303583,
"grad_norm": 1.7687885761260986,
"learning_rate": 1.2981624533047432e-05,
"loss": 1.7996,
"step": 842
},
{
"epoch": 0.7505007789895393,
"grad_norm": 2.209022283554077,
"learning_rate": 1.2827259346149122e-05,
"loss": 1.6272,
"step": 843
},
{
"epoch": 0.7513910527487202,
"grad_norm": 2.4203338623046875,
"learning_rate": 1.2673754519008008e-05,
"loss": 1.564,
"step": 844
},
{
"epoch": 0.7522813265079011,
"grad_norm": 2.287649393081665,
"learning_rate": 1.2521111566654731e-05,
"loss": 1.815,
"step": 845
},
{
"epoch": 0.7531716002670821,
"grad_norm": 2.2657222747802734,
"learning_rate": 1.2369331995613665e-05,
"loss": 2.0586,
"step": 846
},
{
"epoch": 0.7540618740262631,
"grad_norm": 1.5107150077819824,
"learning_rate": 1.2218417303887842e-05,
"loss": 1.543,
"step": 847
},
{
"epoch": 0.7549521477854441,
"grad_norm": 1.3881908655166626,
"learning_rate": 1.206836898094439e-05,
"loss": 1.3387,
"step": 848
},
{
"epoch": 0.755842421544625,
"grad_norm": 2.1925411224365234,
"learning_rate": 1.191918850769964e-05,
"loss": 1.5979,
"step": 849
},
{
"epoch": 0.7567326953038059,
"grad_norm": 1.8776897192001343,
"learning_rate": 1.1770877356504683e-05,
"loss": 1.7517,
"step": 850
},
{
"epoch": 0.7576229690629869,
"grad_norm": 2.166015625,
"learning_rate": 1.1623436991130654e-05,
"loss": 1.8777,
"step": 851
},
{
"epoch": 0.7585132428221678,
"grad_norm": 1.5148497819900513,
"learning_rate": 1.1476868866754486e-05,
"loss": 1.9234,
"step": 852
},
{
"epoch": 0.7594035165813487,
"grad_norm": 2.4111993312835693,
"learning_rate": 1.1331174429944347e-05,
"loss": 1.4803,
"step": 853
},
{
"epoch": 0.7602937903405297,
"grad_norm": 1.7421929836273193,
"learning_rate": 1.1186355118645554e-05,
"loss": 1.3535,
"step": 854
},
{
"epoch": 0.7611840640997106,
"grad_norm": 1.5131878852844238,
"learning_rate": 1.1042412362166222e-05,
"loss": 1.7501,
"step": 855
},
{
"epoch": 0.7620743378588916,
"grad_norm": 6.059530735015869,
"learning_rate": 1.0899347581163221e-05,
"loss": 1.743,
"step": 856
},
{
"epoch": 0.7629646116180726,
"grad_norm": 2.317833423614502,
"learning_rate": 1.0757162187628222e-05,
"loss": 2.0051,
"step": 857
},
{
"epoch": 0.7638548853772535,
"grad_norm": 1.7132494449615479,
"learning_rate": 1.0615857584873623e-05,
"loss": 2.1177,
"step": 858
},
{
"epoch": 0.7647451591364345,
"grad_norm": 2.202890634536743,
"learning_rate": 1.0475435167518843e-05,
"loss": 1.9601,
"step": 859
},
{
"epoch": 0.7656354328956154,
"grad_norm": 3.0330166816711426,
"learning_rate": 1.0335896321476413e-05,
"loss": 1.5331,
"step": 860
},
{
"epoch": 0.7665257066547964,
"grad_norm": 1.7549813985824585,
"learning_rate": 1.0197242423938446e-05,
"loss": 1.7902,
"step": 861
},
{
"epoch": 0.7674159804139773,
"grad_norm": 2.6962575912475586,
"learning_rate": 1.0059474843362892e-05,
"loss": 2.384,
"step": 862
},
{
"epoch": 0.7683062541731582,
"grad_norm": 1.2838563919067383,
"learning_rate": 9.922594939460194e-06,
"loss": 1.1263,
"step": 863
},
{
"epoch": 0.7691965279323392,
"grad_norm": 1.1030938625335693,
"learning_rate": 9.786604063179728e-06,
"loss": 1.3106,
"step": 864
},
{
"epoch": 0.7700868016915201,
"grad_norm": 2.963934898376465,
"learning_rate": 9.651503556696516e-06,
"loss": 1.7046,
"step": 865
},
{
"epoch": 0.770977075450701,
"grad_norm": 1.6622592210769653,
"learning_rate": 9.517294753398064e-06,
"loss": 1.9185,
"step": 866
},
{
"epoch": 0.7718673492098821,
"grad_norm": 1.9364992380142212,
"learning_rate": 9.383978977871021e-06,
"loss": 1.5313,
"step": 867
},
{
"epoch": 0.772757622969063,
"grad_norm": 1.8039541244506836,
"learning_rate": 9.251557545888312e-06,
"loss": 1.9076,
"step": 868
},
{
"epoch": 0.773647896728244,
"grad_norm": 1.801265001296997,
"learning_rate": 9.120031764395987e-06,
"loss": 1.9091,
"step": 869
},
{
"epoch": 0.7745381704874249,
"grad_norm": 1.7942633628845215,
"learning_rate": 8.989402931500434e-06,
"loss": 1.9505,
"step": 870
},
{
"epoch": 0.7754284442466058,
"grad_norm": 1.9900010824203491,
"learning_rate": 8.85967233645547e-06,
"loss": 1.7563,
"step": 871
},
{
"epoch": 0.7763187180057868,
"grad_norm": 1.809123158454895,
"learning_rate": 8.730841259649725e-06,
"loss": 1.6804,
"step": 872
},
{
"epoch": 0.7772089917649677,
"grad_norm": 1.3528798818588257,
"learning_rate": 8.602910972593892e-06,
"loss": 1.6276,
"step": 873
},
{
"epoch": 0.7780992655241487,
"grad_norm": 3.139510154724121,
"learning_rate": 8.475882737908248e-06,
"loss": 2.2761,
"step": 874
},
{
"epoch": 0.7789895392833296,
"grad_norm": 2.2909891605377197,
"learning_rate": 8.34975780931021e-06,
"loss": 2.1253,
"step": 875
},
{
"epoch": 0.7798798130425105,
"grad_norm": 1.4801619052886963,
"learning_rate": 8.224537431601886e-06,
"loss": 1.5527,
"step": 876
},
{
"epoch": 0.7807700868016915,
"grad_norm": 1.4977777004241943,
"learning_rate": 8.100222840657878e-06,
"loss": 1.4283,
"step": 877
},
{
"epoch": 0.7816603605608725,
"grad_norm": 1.879576325416565,
"learning_rate": 7.976815263412963e-06,
"loss": 1.6929,
"step": 878
},
{
"epoch": 0.7825506343200535,
"grad_norm": 1.6205519437789917,
"learning_rate": 7.854315917850163e-06,
"loss": 1.8613,
"step": 879
},
{
"epoch": 0.7834409080792344,
"grad_norm": 1.9095968008041382,
"learning_rate": 7.73272601298851e-06,
"loss": 1.9905,
"step": 880
},
{
"epoch": 0.7843311818384153,
"grad_norm": 1.1806037425994873,
"learning_rate": 7.612046748871327e-06,
"loss": 1.4975,
"step": 881
},
{
"epoch": 0.7852214555975963,
"grad_norm": 1.2940500974655151,
"learning_rate": 7.492279316554207e-06,
"loss": 1.6006,
"step": 882
},
{
"epoch": 0.7861117293567772,
"grad_norm": 1.8334342241287231,
"learning_rate": 7.3734248980933395e-06,
"loss": 1.3934,
"step": 883
},
{
"epoch": 0.7870020031159581,
"grad_norm": 2.4100828170776367,
"learning_rate": 7.255484666533874e-06,
"loss": 1.692,
"step": 884
},
{
"epoch": 0.7878922768751391,
"grad_norm": 3.7759780883789062,
"learning_rate": 7.138459785898266e-06,
"loss": 1.9625,
"step": 885
},
{
"epoch": 0.78878255063432,
"grad_norm": 2.20845365524292,
"learning_rate": 7.022351411174866e-06,
"loss": 2.6353,
"step": 886
},
{
"epoch": 0.789672824393501,
"grad_norm": 1.8291293382644653,
"learning_rate": 6.907160688306425e-06,
"loss": 1.5929,
"step": 887
},
{
"epoch": 0.790563098152682,
"grad_norm": 2.1727707386016846,
"learning_rate": 6.7928887541789055e-06,
"loss": 2.1755,
"step": 888
},
{
"epoch": 0.7914533719118629,
"grad_norm": 1.7652902603149414,
"learning_rate": 6.679536736610137e-06,
"loss": 1.5469,
"step": 889
},
{
"epoch": 0.7923436456710439,
"grad_norm": 1.6923081874847412,
"learning_rate": 6.5671057543387985e-06,
"loss": 1.4707,
"step": 890
},
{
"epoch": 0.7932339194302248,
"grad_norm": 1.6656982898712158,
"learning_rate": 6.455596917013273e-06,
"loss": 1.498,
"step": 891
},
{
"epoch": 0.7941241931894057,
"grad_norm": 1.6052128076553345,
"learning_rate": 6.345011325180772e-06,
"loss": 1.6739,
"step": 892
},
{
"epoch": 0.7950144669485867,
"grad_norm": 2.754615306854248,
"learning_rate": 6.235350070276447e-06,
"loss": 2.0599,
"step": 893
},
{
"epoch": 0.7959047407077676,
"grad_norm": 1.7544282674789429,
"learning_rate": 6.126614234612593e-06,
"loss": 1.7979,
"step": 894
},
{
"epoch": 0.7967950144669486,
"grad_norm": 1.9163486957550049,
"learning_rate": 6.018804891368035e-06,
"loss": 1.7769,
"step": 895
},
{
"epoch": 0.7976852882261295,
"grad_norm": 2.594054937362671,
"learning_rate": 5.911923104577455e-06,
"loss": 2.1903,
"step": 896
},
{
"epoch": 0.7985755619853104,
"grad_norm": 2.1390740871429443,
"learning_rate": 5.805969929120947e-06,
"loss": 2.3515,
"step": 897
},
{
"epoch": 0.7994658357444915,
"grad_norm": 2.4581687450408936,
"learning_rate": 5.700946410713548e-06,
"loss": 1.7991,
"step": 898
},
{
"epoch": 0.8003561095036724,
"grad_norm": 1.9051916599273682,
"learning_rate": 5.5968535858950345e-06,
"loss": 1.8116,
"step": 899
},
{
"epoch": 0.8012463832628534,
"grad_norm": 1.8434404134750366,
"learning_rate": 5.49369248201953e-06,
"loss": 1.879,
"step": 900
},
{
"epoch": 0.8021366570220343,
"grad_norm": 2.205735206604004,
"learning_rate": 5.39146411724547e-06,
"loss": 2.0231,
"step": 901
},
{
"epoch": 0.8030269307812152,
"grad_norm": 2.084808588027954,
"learning_rate": 5.290169500525577e-06,
"loss": 2.0352,
"step": 902
},
{
"epoch": 0.8039172045403962,
"grad_norm": 2.2908926010131836,
"learning_rate": 5.189809631596798e-06,
"loss": 1.8234,
"step": 903
},
{
"epoch": 0.8048074782995771,
"grad_norm": 2.077094078063965,
"learning_rate": 5.0903855009705514e-06,
"loss": 2.0776,
"step": 904
},
{
"epoch": 0.805697752058758,
"grad_norm": 1.568692922592163,
"learning_rate": 4.991898089922819e-06,
"loss": 1.4186,
"step": 905
},
{
"epoch": 0.806588025817939,
"grad_norm": 1.4289157390594482,
"learning_rate": 4.8943483704846475e-06,
"loss": 1.1468,
"step": 906
},
{
"epoch": 0.8074782995771199,
"grad_norm": 3.5975162982940674,
"learning_rate": 4.797737305432337e-06,
"loss": 2.2185,
"step": 907
},
{
"epoch": 0.808368573336301,
"grad_norm": 1.8757998943328857,
"learning_rate": 4.702065848278126e-06,
"loss": 1.788,
"step": 908
},
{
"epoch": 0.8092588470954819,
"grad_norm": 2.071596145629883,
"learning_rate": 4.607334943260655e-06,
"loss": 1.4828,
"step": 909
},
{
"epoch": 0.8101491208546628,
"grad_norm": 1.8785046339035034,
"learning_rate": 4.513545525335705e-06,
"loss": 1.8259,
"step": 910
},
{
"epoch": 0.8110393946138438,
"grad_norm": 2.3740673065185547,
"learning_rate": 4.420698520166988e-06,
"loss": 1.9712,
"step": 911
},
{
"epoch": 0.8119296683730247,
"grad_norm": 1.753470540046692,
"learning_rate": 4.328794844116946e-06,
"loss": 1.1425,
"step": 912
},
{
"epoch": 0.8128199421322057,
"grad_norm": 2.1722190380096436,
"learning_rate": 4.237835404237778e-06,
"loss": 2.1072,
"step": 913
},
{
"epoch": 0.8137102158913866,
"grad_norm": 1.5812511444091797,
"learning_rate": 4.147821098262405e-06,
"loss": 1.8418,
"step": 914
},
{
"epoch": 0.8146004896505675,
"grad_norm": 2.335020065307617,
"learning_rate": 4.0587528145957235e-06,
"loss": 2.2968,
"step": 915
},
{
"epoch": 0.8154907634097485,
"grad_norm": 4.78397798538208,
"learning_rate": 3.970631432305694e-06,
"loss": 2.5117,
"step": 916
},
{
"epoch": 0.8163810371689294,
"grad_norm": 2.6600706577301025,
"learning_rate": 3.883457821114811e-06,
"loss": 1.7201,
"step": 917
},
{
"epoch": 0.8172713109281103,
"grad_norm": 1.75503671169281,
"learning_rate": 3.797232841391407e-06,
"loss": 2.0005,
"step": 918
},
{
"epoch": 0.8181615846872914,
"grad_norm": 1.8299975395202637,
"learning_rate": 3.711957344141237e-06,
"loss": 1.883,
"step": 919
},
{
"epoch": 0.8190518584464723,
"grad_norm": 2.177534818649292,
"learning_rate": 3.627632170999029e-06,
"loss": 1.5191,
"step": 920
},
{
"epoch": 0.8199421322056533,
"grad_norm": 1.4951258897781372,
"learning_rate": 3.5442581542201923e-06,
"loss": 1.4525,
"step": 921
},
{
"epoch": 0.8208324059648342,
"grad_norm": 2.103001117706299,
"learning_rate": 3.461836116672612e-06,
"loss": 2.0439,
"step": 922
},
{
"epoch": 0.8217226797240151,
"grad_norm": 1.925445318222046,
"learning_rate": 3.380366871828522e-06,
"loss": 2.3512,
"step": 923
},
{
"epoch": 0.8226129534831961,
"grad_norm": 1.6522331237792969,
"learning_rate": 3.2998512237565005e-06,
"loss": 1.7079,
"step": 924
},
{
"epoch": 0.823503227242377,
"grad_norm": 1.6218456029891968,
"learning_rate": 3.2202899671134546e-06,
"loss": 1.6155,
"step": 925
},
{
"epoch": 0.824393501001558,
"grad_norm": 2.5410165786743164,
"learning_rate": 3.1416838871368924e-06,
"loss": 1.8986,
"step": 926
},
{
"epoch": 0.8252837747607389,
"grad_norm": 1.5301979780197144,
"learning_rate": 3.064033759637064e-06,
"loss": 1.6242,
"step": 927
},
{
"epoch": 0.8261740485199198,
"grad_norm": 2.0955357551574707,
"learning_rate": 2.9873403509894203e-06,
"loss": 2.2897,
"step": 928
},
{
"epoch": 0.8270643222791009,
"grad_norm": 2.5796597003936768,
"learning_rate": 2.9116044181269007e-06,
"loss": 2.3979,
"step": 929
},
{
"epoch": 0.8279545960382818,
"grad_norm": 1.8714462518692017,
"learning_rate": 2.836826708532603e-06,
"loss": 2.3742,
"step": 930
},
{
"epoch": 0.8288448697974627,
"grad_norm": 2.4107353687286377,
"learning_rate": 2.7630079602323442e-06,
"loss": 1.8276,
"step": 931
},
{
"epoch": 0.8297351435566437,
"grad_norm": 1.8410996198654175,
"learning_rate": 2.690148901787337e-06,
"loss": 1.5216,
"step": 932
},
{
"epoch": 0.8306254173158246,
"grad_norm": 2.153083324432373,
"learning_rate": 2.618250252287113e-06,
"loss": 2.5236,
"step": 933
},
{
"epoch": 0.8315156910750056,
"grad_norm": 2.0374317169189453,
"learning_rate": 2.5473127213422763e-06,
"loss": 2.2,
"step": 934
},
{
"epoch": 0.8324059648341865,
"grad_norm": 1.670549988746643,
"learning_rate": 2.4773370090776626e-06,
"loss": 1.7859,
"step": 935
},
{
"epoch": 0.8332962385933674,
"grad_norm": 2.1211376190185547,
"learning_rate": 2.4083238061252567e-06,
"loss": 2.1081,
"step": 936
},
{
"epoch": 0.8341865123525484,
"grad_norm": 1.8752716779708862,
"learning_rate": 2.3402737936175425e-06,
"loss": 1.8866,
"step": 937
},
{
"epoch": 0.8350767861117293,
"grad_norm": 2.2133710384368896,
"learning_rate": 2.273187643180652e-06,
"loss": 1.6484,
"step": 938
},
{
"epoch": 0.8359670598709104,
"grad_norm": 2.0927562713623047,
"learning_rate": 2.2070660169278166e-06,
"loss": 1.9145,
"step": 939
},
{
"epoch": 0.8368573336300913,
"grad_norm": 2.2053141593933105,
"learning_rate": 2.141909567452793e-06,
"loss": 1.6497,
"step": 940
},
{
"epoch": 0.8377476073892722,
"grad_norm": 1.718266248703003,
"learning_rate": 2.0777189378234143e-06,
"loss": 1.8699,
"step": 941
},
{
"epoch": 0.8386378811484532,
"grad_norm": 1.7345627546310425,
"learning_rate": 2.014494761575314e-06,
"loss": 1.9545,
"step": 942
},
{
"epoch": 0.8395281549076341,
"grad_norm": 2.2770743370056152,
"learning_rate": 1.9522376627055583e-06,
"loss": 2.4747,
"step": 943
},
{
"epoch": 0.840418428666815,
"grad_norm": 3.990893840789795,
"learning_rate": 1.8909482556666024e-06,
"loss": 2.4903,
"step": 944
},
{
"epoch": 0.841308702425996,
"grad_norm": 1.6306087970733643,
"learning_rate": 1.8306271453601199e-06,
"loss": 1.4113,
"step": 945
},
{
"epoch": 0.8421989761851769,
"grad_norm": 1.6917046308517456,
"learning_rate": 1.771274927131139e-06,
"loss": 1.7981,
"step": 946
},
{
"epoch": 0.8430892499443579,
"grad_norm": 2.0122792720794678,
"learning_rate": 1.712892186762083e-06,
"loss": 1.671,
"step": 947
},
{
"epoch": 0.8439795237035388,
"grad_norm": 1.5916146039962769,
"learning_rate": 1.6554795004670388e-06,
"loss": 1.5368,
"step": 948
},
{
"epoch": 0.8448697974627198,
"grad_norm": 1.4738613367080688,
"learning_rate": 1.5990374348860305e-06,
"loss": 1.7981,
"step": 949
},
{
"epoch": 0.8457600712219008,
"grad_norm": 1.550717830657959,
"learning_rate": 1.543566547079467e-06,
"loss": 1.4818,
"step": 950
},
{
"epoch": 0.8466503449810817,
"grad_norm": 1.380691409111023,
"learning_rate": 1.4890673845226133e-06,
"loss": 1.5712,
"step": 951
},
{
"epoch": 0.8475406187402627,
"grad_norm": 1.9173582792282104,
"learning_rate": 1.4355404851001952e-06,
"loss": 1.7729,
"step": 952
},
{
"epoch": 0.8484308924994436,
"grad_norm": 1.9216915369033813,
"learning_rate": 1.3829863771011253e-06,
"loss": 1.767,
"step": 953
},
{
"epoch": 0.8493211662586245,
"grad_norm": 1.4184794425964355,
"learning_rate": 1.3314055792131964e-06,
"loss": 1.2916,
"step": 954
},
{
"epoch": 0.8502114400178055,
"grad_norm": 2.1560451984405518,
"learning_rate": 1.280798600518085e-06,
"loss": 1.5511,
"step": 955
},
{
"epoch": 0.8511017137769864,
"grad_norm": 1.8579264879226685,
"learning_rate": 1.231165940486234e-06,
"loss": 2.1381,
"step": 956
},
{
"epoch": 0.8519919875361673,
"grad_norm": 1.4371646642684937,
"learning_rate": 1.1825080889719563e-06,
"loss": 1.6049,
"step": 957
},
{
"epoch": 0.8528822612953483,
"grad_norm": 2.4612481594085693,
"learning_rate": 1.134825526208605e-06,
"loss": 1.804,
"step": 958
},
{
"epoch": 0.8537725350545292,
"grad_norm": 1.62546706199646,
"learning_rate": 1.0881187228038215e-06,
"loss": 1.8068,
"step": 959
},
{
"epoch": 0.8546628088137103,
"grad_norm": 2.140608787536621,
"learning_rate": 1.0423881397349068e-06,
"loss": 1.8131,
"step": 960
},
{
"epoch": 0.8555530825728912,
"grad_norm": 1.6791179180145264,
"learning_rate": 9.976342283442463e-07,
"loss": 2.104,
"step": 961
},
{
"epoch": 0.8564433563320721,
"grad_norm": 1.730907917022705,
"learning_rate": 9.538574303348813e-07,
"loss": 1.8383,
"step": 962
},
{
"epoch": 0.8573336300912531,
"grad_norm": 1.5879576206207275,
"learning_rate": 9.110581777661331e-07,
"loss": 1.705,
"step": 963
},
{
"epoch": 0.858223903850434,
"grad_norm": 2.378976821899414,
"learning_rate": 8.692368930493521e-07,
"loss": 2.2417,
"step": 964
},
{
"epoch": 0.859114177609615,
"grad_norm": 1.8429821729660034,
"learning_rate": 8.283939889437209e-07,
"loss": 2.1216,
"step": 965
},
{
"epoch": 0.8600044513687959,
"grad_norm": 3.7237606048583984,
"learning_rate": 7.885298685522235e-07,
"loss": 2.1695,
"step": 966
},
{
"epoch": 0.8608947251279768,
"grad_norm": 2.168633222579956,
"learning_rate": 7.496449253176274e-07,
"loss": 2.2198,
"step": 967
},
{
"epoch": 0.8617849988871578,
"grad_norm": 1.5449988842010498,
"learning_rate": 7.117395430186414e-07,
"loss": 1.6742,
"step": 968
},
{
"epoch": 0.8626752726463387,
"grad_norm": 1.8865556716918945,
"learning_rate": 6.748140957660631e-07,
"loss": 1.8883,
"step": 969
},
{
"epoch": 0.8635655464055197,
"grad_norm": 1.589005708694458,
"learning_rate": 6.388689479991605e-07,
"loss": 1.2163,
"step": 970
},
{
"epoch": 0.8644558201647007,
"grad_norm": 1.6373053789138794,
"learning_rate": 6.039044544820404e-07,
"loss": 1.4072,
"step": 971
},
{
"epoch": 0.8653460939238816,
"grad_norm": 2.5038976669311523,
"learning_rate": 5.699209603001076e-07,
"loss": 2.0531,
"step": 972
},
{
"epoch": 0.8662363676830626,
"grad_norm": 1.6263477802276611,
"learning_rate": 5.369188008567672e-07,
"loss": 1.5389,
"step": 973
},
{
"epoch": 0.8671266414422435,
"grad_norm": 1.6909034252166748,
"learning_rate": 5.048983018699827e-07,
"loss": 1.3181,
"step": 974
},
{
"epoch": 0.8680169152014244,
"grad_norm": 1.7488054037094116,
"learning_rate": 4.738597793691679e-07,
"loss": 1.5009,
"step": 975
},
{
"epoch": 0.8689071889606054,
"grad_norm": 1.4903292655944824,
"learning_rate": 4.438035396920004e-07,
"loss": 1.283,
"step": 976
},
{
"epoch": 0.8697974627197863,
"grad_norm": 2.250135898590088,
"learning_rate": 4.1472987948143473e-07,
"loss": 1.8839,
"step": 977
},
{
"epoch": 0.8706877364789672,
"grad_norm": 2.169273614883423,
"learning_rate": 3.866390856827495e-07,
"loss": 1.9408,
"step": 978
},
{
"epoch": 0.8715780102381482,
"grad_norm": 1.683918833732605,
"learning_rate": 3.595314355407609e-07,
"loss": 1.6478,
"step": 979
},
{
"epoch": 0.8724682839973292,
"grad_norm": 2.058692455291748,
"learning_rate": 3.3340719659701313e-07,
"loss": 2.2342,
"step": 980
},
{
"epoch": 0.8733585577565102,
"grad_norm": 1.406325101852417,
"learning_rate": 3.0826662668720364e-07,
"loss": 1.4541,
"step": 981
},
{
"epoch": 0.8742488315156911,
"grad_norm": 3.1064553260803223,
"learning_rate": 2.841099739386066e-07,
"loss": 3.0294,
"step": 982
},
{
"epoch": 0.875139105274872,
"grad_norm": 1.261873722076416,
"learning_rate": 2.609374767676309e-07,
"loss": 1.4086,
"step": 983
},
{
"epoch": 0.876029379034053,
"grad_norm": 1.0917588472366333,
"learning_rate": 2.387493638774774e-07,
"loss": 1.159,
"step": 984
},
{
"epoch": 0.8769196527932339,
"grad_norm": 1.7324159145355225,
"learning_rate": 2.175458542558517e-07,
"loss": 1.469,
"step": 985
},
{
"epoch": 0.8778099265524149,
"grad_norm": 1.3332997560501099,
"learning_rate": 1.973271571728441e-07,
"loss": 1.0762,
"step": 986
},
{
"epoch": 0.8787002003115958,
"grad_norm": 1.856528878211975,
"learning_rate": 1.7809347217881966e-07,
"loss": 2.0178,
"step": 987
},
{
"epoch": 0.8795904740707767,
"grad_norm": 2.5236916542053223,
"learning_rate": 1.598449891024978e-07,
"loss": 1.7862,
"step": 988
},
{
"epoch": 0.8804807478299577,
"grad_norm": 2.023587703704834,
"learning_rate": 1.425818880490315e-07,
"loss": 1.8994,
"step": 989
},
{
"epoch": 0.8813710215891387,
"grad_norm": 1.4901554584503174,
"learning_rate": 1.2630433939825327e-07,
"loss": 1.5095,
"step": 990
},
{
"epoch": 0.8822612953483197,
"grad_norm": 1.6693906784057617,
"learning_rate": 1.1101250380300965e-07,
"loss": 1.4956,
"step": 991
},
{
"epoch": 0.8831515691075006,
"grad_norm": 2.0669729709625244,
"learning_rate": 9.670653218752934e-08,
"loss": 2.1793,
"step": 992
},
{
"epoch": 0.8840418428666815,
"grad_norm": 2.204921007156372,
"learning_rate": 8.33865657459909e-08,
"loss": 1.751,
"step": 993
},
{
"epoch": 0.8849321166258625,
"grad_norm": 1.2985179424285889,
"learning_rate": 7.105273594107953e-08,
"loss": 1.4216,
"step": 994
},
{
"epoch": 0.8858223903850434,
"grad_norm": 1.683183193206787,
"learning_rate": 5.970516450271025e-08,
"loss": 2.0145,
"step": 995
},
{
"epoch": 0.8867126641442243,
"grad_norm": 1.415525197982788,
"learning_rate": 4.934396342684e-08,
"loss": 1.3802,
"step": 996
},
{
"epoch": 0.8876029379034053,
"grad_norm": 1.679310917854309,
"learning_rate": 3.996923497434635e-08,
"loss": 1.5799,
"step": 997
},
{
"epoch": 0.8884932116625862,
"grad_norm": 3.5218520164489746,
"learning_rate": 3.1581071670006015e-08,
"loss": 1.5458,
"step": 998
},
{
"epoch": 0.8893834854217672,
"grad_norm": 1.5060161352157593,
"learning_rate": 2.417955630159563e-08,
"loss": 1.0504,
"step": 999
},
{
"epoch": 0.8902737591809481,
"grad_norm": 2.1258907318115234,
"learning_rate": 1.7764761919103477e-08,
"loss": 2.3434,
"step": 1000
},
{
"epoch": 0.8902737591809481,
"eval_loss": 1.8668110370635986,
"eval_runtime": 52.7143,
"eval_samples_per_second": 8.973,
"eval_steps_per_second": 4.496,
"step": 1000
}
],
"logging_steps": 1,
"max_steps": 1000,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 6.34658190524416e+17,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}