diff --git "a/trainer_state.json" "b/trainer_state.json" new file mode 100644--- /dev/null +++ "b/trainer_state.json" @@ -0,0 +1,77196 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 1.0, + "eval_steps": 500, + "global_step": 11022, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 9.072763563781528e-05, + "grad_norm": 22.066263600331737, + "learning_rate": 3.0211480362537765e-06, + "loss": 4.2587, + "step": 1 + }, + { + "epoch": 0.00018145527127563056, + "grad_norm": 23.03299858414849, + "learning_rate": 6.042296072507553e-06, + "loss": 4.2968, + "step": 2 + }, + { + "epoch": 0.00027218290691344586, + "grad_norm": 21.998076784989955, + "learning_rate": 9.06344410876133e-06, + "loss": 4.1585, + "step": 3 + }, + { + "epoch": 0.0003629105425512611, + "grad_norm": 16.67517023046812, + "learning_rate": 1.2084592145015106e-05, + "loss": 3.9403, + "step": 4 + }, + { + "epoch": 0.0004536381781890764, + "grad_norm": 25.872989941239933, + "learning_rate": 1.5105740181268884e-05, + "loss": 3.7959, + "step": 5 + }, + { + "epoch": 0.0005443658138268917, + "grad_norm": 16.56523030178225, + "learning_rate": 1.812688821752266e-05, + "loss": 3.5333, + "step": 6 + }, + { + "epoch": 0.000635093449464707, + "grad_norm": 17.696973424464808, + "learning_rate": 2.1148036253776438e-05, + "loss": 3.3729, + "step": 7 + }, + { + "epoch": 0.0007258210851025222, + "grad_norm": 28.00548761076332, + "learning_rate": 2.4169184290030212e-05, + "loss": 3.4371, + "step": 8 + }, + { + "epoch": 0.0008165487207403375, + "grad_norm": 13.169246996542736, + "learning_rate": 2.7190332326283987e-05, + "loss": 3.3062, + "step": 9 + }, + { + "epoch": 0.0009072763563781528, + "grad_norm": 9.105098188586176, + "learning_rate": 3.0211480362537768e-05, + "loss": 3.1543, + "step": 10 + }, + { + "epoch": 0.000998003992015968, + "grad_norm": 7.186693976942599, + "learning_rate": 3.323262839879154e-05, + "loss": 3.1034, + "step": 11 + }, + { + "epoch": 0.0010887316276537834, + "grad_norm": 9.256355187730513, + "learning_rate": 3.625377643504532e-05, + "loss": 2.9672, + "step": 12 + }, + { + "epoch": 0.0011794592632915987, + "grad_norm": 24.11465329836891, + "learning_rate": 3.927492447129909e-05, + "loss": 3.0121, + "step": 13 + }, + { + "epoch": 0.001270186898929414, + "grad_norm": 24.204022991897965, + "learning_rate": 4.2296072507552875e-05, + "loss": 3.0344, + "step": 14 + }, + { + "epoch": 0.0013609145345672292, + "grad_norm": 10.635921431088422, + "learning_rate": 4.5317220543806646e-05, + "loss": 2.9912, + "step": 15 + }, + { + "epoch": 0.0014516421702050445, + "grad_norm": 5.488542242028252, + "learning_rate": 4.8338368580060424e-05, + "loss": 2.9658, + "step": 16 + }, + { + "epoch": 0.0015423698058428597, + "grad_norm": 3.6388310562344004, + "learning_rate": 5.13595166163142e-05, + "loss": 2.9478, + "step": 17 + }, + { + "epoch": 0.001633097441480675, + "grad_norm": 2.4057832215482273, + "learning_rate": 5.438066465256797e-05, + "loss": 2.9592, + "step": 18 + }, + { + "epoch": 0.0017238250771184902, + "grad_norm": 3.2343220574579, + "learning_rate": 5.740181268882176e-05, + "loss": 2.8903, + "step": 19 + }, + { + "epoch": 0.0018145527127563057, + "grad_norm": 2.0956019337658747, + "learning_rate": 6.0422960725075535e-05, + "loss": 2.9473, + "step": 20 + }, + { + "epoch": 0.001905280348394121, + "grad_norm": 4.111287663666902, + "learning_rate": 6.34441087613293e-05, + "loss": 2.838, + "step": 21 + }, + { + "epoch": 0.001996007984031936, + "grad_norm": 1.5716963699247473, + "learning_rate": 6.646525679758308e-05, + "loss": 2.8128, + "step": 22 + }, + { + "epoch": 0.0020867356196697514, + "grad_norm": 2.203905271923883, + "learning_rate": 6.948640483383687e-05, + "loss": 2.8683, + "step": 23 + }, + { + "epoch": 0.002177463255307567, + "grad_norm": 1.3349815893245556, + "learning_rate": 7.250755287009064e-05, + "loss": 2.8442, + "step": 24 + }, + { + "epoch": 0.002268190890945382, + "grad_norm": 2.3066098728780315, + "learning_rate": 7.552870090634441e-05, + "loss": 2.8349, + "step": 25 + }, + { + "epoch": 0.0023589185265831974, + "grad_norm": 2.20141400392811, + "learning_rate": 7.854984894259818e-05, + "loss": 2.811, + "step": 26 + }, + { + "epoch": 0.0024496461622210124, + "grad_norm": 1.170457615549049, + "learning_rate": 8.157099697885197e-05, + "loss": 2.7814, + "step": 27 + }, + { + "epoch": 0.002540373797858828, + "grad_norm": 1.6853401249173157, + "learning_rate": 8.459214501510575e-05, + "loss": 2.833, + "step": 28 + }, + { + "epoch": 0.002631101433496643, + "grad_norm": 1.8373362036692347, + "learning_rate": 8.761329305135952e-05, + "loss": 2.7689, + "step": 29 + }, + { + "epoch": 0.0027218290691344584, + "grad_norm": 2.8402187670009837, + "learning_rate": 9.063444108761329e-05, + "loss": 2.7779, + "step": 30 + }, + { + "epoch": 0.0028125567047722734, + "grad_norm": 10.452698443855219, + "learning_rate": 9.365558912386706e-05, + "loss": 2.8132, + "step": 31 + }, + { + "epoch": 0.002903284340410089, + "grad_norm": 7.540478284195402, + "learning_rate": 9.667673716012085e-05, + "loss": 2.824, + "step": 32 + }, + { + "epoch": 0.0029940119760479044, + "grad_norm": 3.8268655229902473, + "learning_rate": 9.969788519637463e-05, + "loss": 2.8958, + "step": 33 + }, + { + "epoch": 0.0030847396116857194, + "grad_norm": 2.558853638035011, + "learning_rate": 0.0001027190332326284, + "loss": 2.8172, + "step": 34 + }, + { + "epoch": 0.003175467247323535, + "grad_norm": 1.665828021719714, + "learning_rate": 0.00010574018126888218, + "loss": 2.8629, + "step": 35 + }, + { + "epoch": 0.00326619488296135, + "grad_norm": 1.3305937823615495, + "learning_rate": 0.00010876132930513595, + "loss": 2.8079, + "step": 36 + }, + { + "epoch": 0.0033569225185991654, + "grad_norm": 1.5245435918578671, + "learning_rate": 0.00011178247734138973, + "loss": 2.7989, + "step": 37 + }, + { + "epoch": 0.0034476501542369804, + "grad_norm": 1.9561125173378797, + "learning_rate": 0.00011480362537764352, + "loss": 2.7759, + "step": 38 + }, + { + "epoch": 0.003538377789874796, + "grad_norm": 1.1613040128800307, + "learning_rate": 0.00011782477341389729, + "loss": 2.7844, + "step": 39 + }, + { + "epoch": 0.0036291054255126113, + "grad_norm": 1.3048494044267724, + "learning_rate": 0.00012084592145015107, + "loss": 2.8135, + "step": 40 + }, + { + "epoch": 0.0037198330611504264, + "grad_norm": 0.9712190215352975, + "learning_rate": 0.00012386706948640483, + "loss": 2.8211, + "step": 41 + }, + { + "epoch": 0.003810560696788242, + "grad_norm": 1.4272990687539946, + "learning_rate": 0.0001268882175226586, + "loss": 2.8279, + "step": 42 + }, + { + "epoch": 0.003901288332426057, + "grad_norm": 0.8350078510680371, + "learning_rate": 0.0001299093655589124, + "loss": 2.814, + "step": 43 + }, + { + "epoch": 0.003992015968063872, + "grad_norm": 0.7048235624207178, + "learning_rate": 0.00013293051359516617, + "loss": 2.7888, + "step": 44 + }, + { + "epoch": 0.004082743603701687, + "grad_norm": 0.8192285848213998, + "learning_rate": 0.00013595166163141994, + "loss": 2.8173, + "step": 45 + }, + { + "epoch": 0.004173471239339503, + "grad_norm": 0.8122590590131664, + "learning_rate": 0.00013897280966767374, + "loss": 2.7652, + "step": 46 + }, + { + "epoch": 0.004264198874977318, + "grad_norm": 0.5443501484131978, + "learning_rate": 0.0001419939577039275, + "loss": 2.8122, + "step": 47 + }, + { + "epoch": 0.004354926510615134, + "grad_norm": 0.585359556996186, + "learning_rate": 0.00014501510574018128, + "loss": 2.7118, + "step": 48 + }, + { + "epoch": 0.004445654146252948, + "grad_norm": 0.6902350437524689, + "learning_rate": 0.00014803625377643502, + "loss": 2.7453, + "step": 49 + }, + { + "epoch": 0.004536381781890764, + "grad_norm": 0.6182357512059315, + "learning_rate": 0.00015105740181268882, + "loss": 2.761, + "step": 50 + }, + { + "epoch": 0.004627109417528579, + "grad_norm": 0.7174772143716582, + "learning_rate": 0.0001540785498489426, + "loss": 2.7035, + "step": 51 + }, + { + "epoch": 0.004717837053166395, + "grad_norm": 0.5484296744355537, + "learning_rate": 0.00015709969788519636, + "loss": 2.7865, + "step": 52 + }, + { + "epoch": 0.004808564688804209, + "grad_norm": 0.6418871704462707, + "learning_rate": 0.00016012084592145016, + "loss": 2.6964, + "step": 53 + }, + { + "epoch": 0.004899292324442025, + "grad_norm": 0.9740166498045296, + "learning_rate": 0.00016314199395770393, + "loss": 2.7318, + "step": 54 + }, + { + "epoch": 0.00499001996007984, + "grad_norm": 0.5484830897685524, + "learning_rate": 0.0001661631419939577, + "loss": 2.7583, + "step": 55 + }, + { + "epoch": 0.005080747595717656, + "grad_norm": 1.0139389188210524, + "learning_rate": 0.0001691842900302115, + "loss": 2.7051, + "step": 56 + }, + { + "epoch": 0.005171475231355471, + "grad_norm": 1.082634987810632, + "learning_rate": 0.00017220543806646527, + "loss": 2.6931, + "step": 57 + }, + { + "epoch": 0.005262202866993286, + "grad_norm": 0.7594418360808969, + "learning_rate": 0.00017522658610271904, + "loss": 2.7333, + "step": 58 + }, + { + "epoch": 0.005352930502631101, + "grad_norm": 1.1137422886225876, + "learning_rate": 0.0001782477341389728, + "loss": 2.7452, + "step": 59 + }, + { + "epoch": 0.005443658138268917, + "grad_norm": 1.330360089312767, + "learning_rate": 0.00018126888217522659, + "loss": 2.7332, + "step": 60 + }, + { + "epoch": 0.005534385773906732, + "grad_norm": 2.722740209365963, + "learning_rate": 0.00018429003021148036, + "loss": 2.6943, + "step": 61 + }, + { + "epoch": 0.005625113409544547, + "grad_norm": 4.463356825742107, + "learning_rate": 0.00018731117824773413, + "loss": 2.7913, + "step": 62 + }, + { + "epoch": 0.005715841045182362, + "grad_norm": 1.5412002520982688, + "learning_rate": 0.00019033232628398793, + "loss": 2.705, + "step": 63 + }, + { + "epoch": 0.005806568680820178, + "grad_norm": 1.4506529813674434, + "learning_rate": 0.0001933534743202417, + "loss": 2.7003, + "step": 64 + }, + { + "epoch": 0.005897296316457993, + "grad_norm": 1.70205119720204, + "learning_rate": 0.00019637462235649547, + "loss": 2.7584, + "step": 65 + }, + { + "epoch": 0.005988023952095809, + "grad_norm": 0.9867010377493848, + "learning_rate": 0.00019939577039274927, + "loss": 2.706, + "step": 66 + }, + { + "epoch": 0.006078751587733623, + "grad_norm": 1.3028563062560679, + "learning_rate": 0.00020241691842900304, + "loss": 2.7575, + "step": 67 + }, + { + "epoch": 0.006169479223371439, + "grad_norm": 0.9387518937784268, + "learning_rate": 0.0002054380664652568, + "loss": 2.7362, + "step": 68 + }, + { + "epoch": 0.006260206859009254, + "grad_norm": 1.0640987293086788, + "learning_rate": 0.00020845921450151058, + "loss": 2.7281, + "step": 69 + }, + { + "epoch": 0.00635093449464707, + "grad_norm": 0.4872118934472039, + "learning_rate": 0.00021148036253776435, + "loss": 2.7677, + "step": 70 + }, + { + "epoch": 0.006441662130284884, + "grad_norm": 0.7787375416814618, + "learning_rate": 0.00021450151057401812, + "loss": 2.7311, + "step": 71 + }, + { + "epoch": 0.0065323897659227, + "grad_norm": 0.5544553710882945, + "learning_rate": 0.0002175226586102719, + "loss": 2.7147, + "step": 72 + }, + { + "epoch": 0.006623117401560515, + "grad_norm": 1.0198213759338481, + "learning_rate": 0.0002205438066465257, + "loss": 2.6889, + "step": 73 + }, + { + "epoch": 0.006713845037198331, + "grad_norm": 0.688864732422647, + "learning_rate": 0.00022356495468277946, + "loss": 2.7471, + "step": 74 + }, + { + "epoch": 0.006804572672836146, + "grad_norm": 0.9942360577374387, + "learning_rate": 0.00022658610271903323, + "loss": 2.6969, + "step": 75 + }, + { + "epoch": 0.006895300308473961, + "grad_norm": 1.7681002862249326, + "learning_rate": 0.00022960725075528703, + "loss": 2.6701, + "step": 76 + }, + { + "epoch": 0.006986027944111776, + "grad_norm": 2.062561406800824, + "learning_rate": 0.0002326283987915408, + "loss": 2.6676, + "step": 77 + }, + { + "epoch": 0.007076755579749592, + "grad_norm": 2.1441942301618737, + "learning_rate": 0.00023564954682779457, + "loss": 2.733, + "step": 78 + }, + { + "epoch": 0.007167483215387407, + "grad_norm": 0.9305994755392801, + "learning_rate": 0.00023867069486404834, + "loss": 2.6762, + "step": 79 + }, + { + "epoch": 0.007258210851025223, + "grad_norm": 1.6446859950215986, + "learning_rate": 0.00024169184290030214, + "loss": 2.7413, + "step": 80 + }, + { + "epoch": 0.007348938486663037, + "grad_norm": 0.5969392062915605, + "learning_rate": 0.00024471299093655586, + "loss": 2.6697, + "step": 81 + }, + { + "epoch": 0.007439666122300853, + "grad_norm": 1.4145522198544644, + "learning_rate": 0.00024773413897280966, + "loss": 2.6933, + "step": 82 + }, + { + "epoch": 0.007530393757938668, + "grad_norm": 0.8455888930894543, + "learning_rate": 0.00025075528700906345, + "loss": 2.6072, + "step": 83 + }, + { + "epoch": 0.007621121393576484, + "grad_norm": 0.7318715400324846, + "learning_rate": 0.0002537764350453172, + "loss": 2.6637, + "step": 84 + }, + { + "epoch": 0.007711849029214298, + "grad_norm": 0.8603256749148339, + "learning_rate": 0.00025679758308157105, + "loss": 2.6369, + "step": 85 + }, + { + "epoch": 0.007802576664852114, + "grad_norm": 0.6775748368262986, + "learning_rate": 0.0002598187311178248, + "loss": 2.6441, + "step": 86 + }, + { + "epoch": 0.007893304300489928, + "grad_norm": 1.2007745505804122, + "learning_rate": 0.00026283987915407854, + "loss": 2.6239, + "step": 87 + }, + { + "epoch": 0.007984031936127744, + "grad_norm": 0.5428362959795466, + "learning_rate": 0.00026586102719033234, + "loss": 2.5955, + "step": 88 + }, + { + "epoch": 0.00807475957176556, + "grad_norm": 0.8959479956119106, + "learning_rate": 0.0002688821752265861, + "loss": 2.6403, + "step": 89 + }, + { + "epoch": 0.008165487207403375, + "grad_norm": 0.8688101920013656, + "learning_rate": 0.0002719033232628399, + "loss": 2.6538, + "step": 90 + }, + { + "epoch": 0.00825621484304119, + "grad_norm": 0.8299512788253713, + "learning_rate": 0.0002749244712990936, + "loss": 2.6543, + "step": 91 + }, + { + "epoch": 0.008346942478679006, + "grad_norm": 0.7321951516820533, + "learning_rate": 0.0002779456193353475, + "loss": 2.5984, + "step": 92 + }, + { + "epoch": 0.008437670114316821, + "grad_norm": 0.6286820230088039, + "learning_rate": 0.0002809667673716012, + "loss": 2.6532, + "step": 93 + }, + { + "epoch": 0.008528397749954637, + "grad_norm": 0.5941136181184642, + "learning_rate": 0.000283987915407855, + "loss": 2.6354, + "step": 94 + }, + { + "epoch": 0.008619125385592452, + "grad_norm": 0.5960854196762263, + "learning_rate": 0.00028700906344410876, + "loss": 2.5849, + "step": 95 + }, + { + "epoch": 0.008709853021230268, + "grad_norm": 0.7368503477488194, + "learning_rate": 0.00029003021148036256, + "loss": 2.6088, + "step": 96 + }, + { + "epoch": 0.008800580656868081, + "grad_norm": 0.6725131103701967, + "learning_rate": 0.0002930513595166163, + "loss": 2.6132, + "step": 97 + }, + { + "epoch": 0.008891308292505897, + "grad_norm": 0.4908794106381106, + "learning_rate": 0.00029607250755287005, + "loss": 2.633, + "step": 98 + }, + { + "epoch": 0.008982035928143712, + "grad_norm": 0.5890667740040948, + "learning_rate": 0.0002990936555891239, + "loss": 2.5842, + "step": 99 + }, + { + "epoch": 0.009072763563781528, + "grad_norm": 1.6506104990680912, + "learning_rate": 0.00030211480362537764, + "loss": 2.5891, + "step": 100 + }, + { + "epoch": 0.009163491199419343, + "grad_norm": 2.3182864868576374, + "learning_rate": 0.00030513595166163144, + "loss": 2.6624, + "step": 101 + }, + { + "epoch": 0.009254218835057159, + "grad_norm": 2.6143515052802875, + "learning_rate": 0.0003081570996978852, + "loss": 2.667, + "step": 102 + }, + { + "epoch": 0.009344946470694974, + "grad_norm": 2.044861653474928, + "learning_rate": 0.000311178247734139, + "loss": 2.6387, + "step": 103 + }, + { + "epoch": 0.00943567410633279, + "grad_norm": 1.3566532990977265, + "learning_rate": 0.00031419939577039273, + "loss": 2.5989, + "step": 104 + }, + { + "epoch": 0.009526401741970605, + "grad_norm": 1.3155159160081096, + "learning_rate": 0.0003172205438066466, + "loss": 2.6286, + "step": 105 + }, + { + "epoch": 0.009617129377608419, + "grad_norm": 0.7858577220119158, + "learning_rate": 0.0003202416918429003, + "loss": 2.6349, + "step": 106 + }, + { + "epoch": 0.009707857013246234, + "grad_norm": 0.9912903356340684, + "learning_rate": 0.00032326283987915407, + "loss": 2.5984, + "step": 107 + }, + { + "epoch": 0.00979858464888405, + "grad_norm": 0.6880569881633781, + "learning_rate": 0.00032628398791540787, + "loss": 2.6307, + "step": 108 + }, + { + "epoch": 0.009889312284521865, + "grad_norm": 0.6500768004016028, + "learning_rate": 0.0003293051359516616, + "loss": 2.6363, + "step": 109 + }, + { + "epoch": 0.00998003992015968, + "grad_norm": 0.6349564478649325, + "learning_rate": 0.0003323262839879154, + "loss": 2.6009, + "step": 110 + }, + { + "epoch": 0.010070767555797496, + "grad_norm": 0.48572905730367516, + "learning_rate": 0.00033534743202416915, + "loss": 2.5782, + "step": 111 + }, + { + "epoch": 0.010161495191435312, + "grad_norm": 0.6206512592504404, + "learning_rate": 0.000338368580060423, + "loss": 2.5525, + "step": 112 + }, + { + "epoch": 0.010252222827073127, + "grad_norm": 0.7918175400602899, + "learning_rate": 0.00034138972809667675, + "loss": 2.6088, + "step": 113 + }, + { + "epoch": 0.010342950462710943, + "grad_norm": 0.5789013492460244, + "learning_rate": 0.00034441087613293055, + "loss": 2.5919, + "step": 114 + }, + { + "epoch": 0.010433678098348756, + "grad_norm": 0.6181215724335545, + "learning_rate": 0.0003474320241691843, + "loss": 2.601, + "step": 115 + }, + { + "epoch": 0.010524405733986572, + "grad_norm": 0.6441971834794206, + "learning_rate": 0.0003504531722054381, + "loss": 2.509, + "step": 116 + }, + { + "epoch": 0.010615133369624387, + "grad_norm": 0.6305955433268183, + "learning_rate": 0.00035347432024169183, + "loss": 2.5463, + "step": 117 + }, + { + "epoch": 0.010705861005262203, + "grad_norm": 0.6221596631591141, + "learning_rate": 0.0003564954682779456, + "loss": 2.5696, + "step": 118 + }, + { + "epoch": 0.010796588640900018, + "grad_norm": 0.7907152449619109, + "learning_rate": 0.00035951661631419943, + "loss": 2.521, + "step": 119 + }, + { + "epoch": 0.010887316276537834, + "grad_norm": 0.6226606930299683, + "learning_rate": 0.00036253776435045317, + "loss": 2.561, + "step": 120 + }, + { + "epoch": 0.010978043912175649, + "grad_norm": 0.6240036429516576, + "learning_rate": 0.00036555891238670697, + "loss": 2.547, + "step": 121 + }, + { + "epoch": 0.011068771547813465, + "grad_norm": 0.582642119516467, + "learning_rate": 0.0003685800604229607, + "loss": 2.5208, + "step": 122 + }, + { + "epoch": 0.01115949918345128, + "grad_norm": 0.5753496754620565, + "learning_rate": 0.0003716012084592145, + "loss": 2.5211, + "step": 123 + }, + { + "epoch": 0.011250226819089094, + "grad_norm": 0.9313167262117609, + "learning_rate": 0.00037462235649546826, + "loss": 2.5815, + "step": 124 + }, + { + "epoch": 0.01134095445472691, + "grad_norm": 0.7591896194572858, + "learning_rate": 0.0003776435045317221, + "loss": 2.5287, + "step": 125 + }, + { + "epoch": 0.011431682090364725, + "grad_norm": 0.5628874173718367, + "learning_rate": 0.00038066465256797585, + "loss": 2.513, + "step": 126 + }, + { + "epoch": 0.01152240972600254, + "grad_norm": 0.7758652485710196, + "learning_rate": 0.00038368580060422965, + "loss": 2.4919, + "step": 127 + }, + { + "epoch": 0.011613137361640356, + "grad_norm": 0.5408438750913123, + "learning_rate": 0.0003867069486404834, + "loss": 2.4964, + "step": 128 + }, + { + "epoch": 0.011703864997278171, + "grad_norm": 0.677844287783369, + "learning_rate": 0.00038972809667673714, + "loss": 2.5619, + "step": 129 + }, + { + "epoch": 0.011794592632915987, + "grad_norm": 0.7000988834258626, + "learning_rate": 0.00039274924471299094, + "loss": 2.5491, + "step": 130 + }, + { + "epoch": 0.011885320268553802, + "grad_norm": 1.067329275866605, + "learning_rate": 0.0003957703927492447, + "loss": 2.5195, + "step": 131 + }, + { + "epoch": 0.011976047904191617, + "grad_norm": 1.1985177512122822, + "learning_rate": 0.00039879154078549853, + "loss": 2.5431, + "step": 132 + }, + { + "epoch": 0.012066775539829431, + "grad_norm": 0.9195055909223151, + "learning_rate": 0.0004018126888217523, + "loss": 2.4361, + "step": 133 + }, + { + "epoch": 0.012157503175467247, + "grad_norm": 0.7660514520354564, + "learning_rate": 0.0004048338368580061, + "loss": 2.4809, + "step": 134 + }, + { + "epoch": 0.012248230811105062, + "grad_norm": 0.6446979990236039, + "learning_rate": 0.0004078549848942598, + "loss": 2.4951, + "step": 135 + }, + { + "epoch": 0.012338958446742878, + "grad_norm": 0.5542395607401747, + "learning_rate": 0.0004108761329305136, + "loss": 2.4779, + "step": 136 + }, + { + "epoch": 0.012429686082380693, + "grad_norm": 0.6583880235086552, + "learning_rate": 0.00041389728096676736, + "loss": 2.4728, + "step": 137 + }, + { + "epoch": 0.012520413718018509, + "grad_norm": 0.5413560829179815, + "learning_rate": 0.00041691842900302116, + "loss": 2.4663, + "step": 138 + }, + { + "epoch": 0.012611141353656324, + "grad_norm": 0.7747629418096194, + "learning_rate": 0.00041993957703927496, + "loss": 2.4776, + "step": 139 + }, + { + "epoch": 0.01270186898929414, + "grad_norm": 1.0104387872772322, + "learning_rate": 0.0004229607250755287, + "loss": 2.4665, + "step": 140 + }, + { + "epoch": 0.012792596624931955, + "grad_norm": 0.7284096418474445, + "learning_rate": 0.0004259818731117825, + "loss": 2.5531, + "step": 141 + }, + { + "epoch": 0.012883324260569769, + "grad_norm": 0.5317731674011721, + "learning_rate": 0.00042900302114803624, + "loss": 2.4803, + "step": 142 + }, + { + "epoch": 0.012974051896207584, + "grad_norm": 0.7002341735481175, + "learning_rate": 0.00043202416918429004, + "loss": 2.4819, + "step": 143 + }, + { + "epoch": 0.0130647795318454, + "grad_norm": 0.49832114838531655, + "learning_rate": 0.0004350453172205438, + "loss": 2.4821, + "step": 144 + }, + { + "epoch": 0.013155507167483215, + "grad_norm": 0.4854963123273588, + "learning_rate": 0.0004380664652567976, + "loss": 2.5354, + "step": 145 + }, + { + "epoch": 0.01324623480312103, + "grad_norm": 0.5573038990541076, + "learning_rate": 0.0004410876132930514, + "loss": 2.4318, + "step": 146 + }, + { + "epoch": 0.013336962438758846, + "grad_norm": 0.5678848515030904, + "learning_rate": 0.0004441087613293052, + "loss": 2.4679, + "step": 147 + }, + { + "epoch": 0.013427690074396662, + "grad_norm": 0.5282964559320527, + "learning_rate": 0.0004471299093655589, + "loss": 2.4327, + "step": 148 + }, + { + "epoch": 0.013518417710034477, + "grad_norm": 0.48898915173806046, + "learning_rate": 0.0004501510574018127, + "loss": 2.4818, + "step": 149 + }, + { + "epoch": 0.013609145345672292, + "grad_norm": 0.5424556633704334, + "learning_rate": 0.00045317220543806646, + "loss": 2.4471, + "step": 150 + }, + { + "epoch": 0.013699872981310108, + "grad_norm": 0.4521698764560292, + "learning_rate": 0.0004561933534743202, + "loss": 2.4864, + "step": 151 + }, + { + "epoch": 0.013790600616947922, + "grad_norm": 0.8008763588935971, + "learning_rate": 0.00045921450151057406, + "loss": 2.4484, + "step": 152 + }, + { + "epoch": 0.013881328252585737, + "grad_norm": 0.6771074237588438, + "learning_rate": 0.0004622356495468278, + "loss": 2.4557, + "step": 153 + }, + { + "epoch": 0.013972055888223553, + "grad_norm": 0.5467281614796141, + "learning_rate": 0.0004652567975830816, + "loss": 2.4381, + "step": 154 + }, + { + "epoch": 0.014062783523861368, + "grad_norm": 0.8256834526173025, + "learning_rate": 0.00046827794561933535, + "loss": 2.4661, + "step": 155 + }, + { + "epoch": 0.014153511159499184, + "grad_norm": 0.8274303432156301, + "learning_rate": 0.00047129909365558915, + "loss": 2.4212, + "step": 156 + }, + { + "epoch": 0.014244238795136999, + "grad_norm": 0.8589498526917193, + "learning_rate": 0.0004743202416918429, + "loss": 2.4208, + "step": 157 + }, + { + "epoch": 0.014334966430774814, + "grad_norm": 0.9717628909343129, + "learning_rate": 0.0004773413897280967, + "loss": 2.4471, + "step": 158 + }, + { + "epoch": 0.01442569406641263, + "grad_norm": 0.9651687075067312, + "learning_rate": 0.0004803625377643505, + "loss": 2.4723, + "step": 159 + }, + { + "epoch": 0.014516421702050445, + "grad_norm": 0.8184905616770842, + "learning_rate": 0.0004833836858006043, + "loss": 2.4456, + "step": 160 + }, + { + "epoch": 0.014607149337688259, + "grad_norm": 0.6953145152630783, + "learning_rate": 0.00048640483383685803, + "loss": 2.4549, + "step": 161 + }, + { + "epoch": 0.014697876973326075, + "grad_norm": 0.7590874772769336, + "learning_rate": 0.0004894259818731117, + "loss": 2.4392, + "step": 162 + }, + { + "epoch": 0.01478860460896389, + "grad_norm": 0.6103491423622767, + "learning_rate": 0.0004924471299093656, + "loss": 2.4263, + "step": 163 + }, + { + "epoch": 0.014879332244601706, + "grad_norm": 0.5669341936189741, + "learning_rate": 0.0004954682779456193, + "loss": 2.4453, + "step": 164 + }, + { + "epoch": 0.014970059880239521, + "grad_norm": 0.5979576753016163, + "learning_rate": 0.0004984894259818731, + "loss": 2.4195, + "step": 165 + }, + { + "epoch": 0.015060787515877336, + "grad_norm": 0.5535645024415345, + "learning_rate": 0.0005015105740181269, + "loss": 2.4522, + "step": 166 + }, + { + "epoch": 0.015151515151515152, + "grad_norm": 0.6626263117921857, + "learning_rate": 0.0005045317220543807, + "loss": 2.4409, + "step": 167 + }, + { + "epoch": 0.015242242787152967, + "grad_norm": 0.6757483450401617, + "learning_rate": 0.0005075528700906344, + "loss": 2.4102, + "step": 168 + }, + { + "epoch": 0.015332970422790783, + "grad_norm": 0.5627034390891557, + "learning_rate": 0.0005105740181268882, + "loss": 2.4307, + "step": 169 + }, + { + "epoch": 0.015423698058428597, + "grad_norm": 0.6271557755189119, + "learning_rate": 0.0005135951661631421, + "loss": 2.4154, + "step": 170 + }, + { + "epoch": 0.015514425694066412, + "grad_norm": 0.621636953792065, + "learning_rate": 0.0005166163141993958, + "loss": 2.4228, + "step": 171 + }, + { + "epoch": 0.015605153329704228, + "grad_norm": 0.4140047999633023, + "learning_rate": 0.0005196374622356496, + "loss": 2.4163, + "step": 172 + }, + { + "epoch": 0.015695880965342045, + "grad_norm": 0.6891744671585747, + "learning_rate": 0.0005226586102719033, + "loss": 2.3522, + "step": 173 + }, + { + "epoch": 0.015786608600979857, + "grad_norm": 0.5690677182841243, + "learning_rate": 0.0005256797583081571, + "loss": 2.3539, + "step": 174 + }, + { + "epoch": 0.015877336236617672, + "grad_norm": 0.6950366293789482, + "learning_rate": 0.0005287009063444109, + "loss": 2.373, + "step": 175 + }, + { + "epoch": 0.015968063872255488, + "grad_norm": 0.6801270351397959, + "learning_rate": 0.0005317220543806647, + "loss": 2.4021, + "step": 176 + }, + { + "epoch": 0.016058791507893303, + "grad_norm": 0.4945206707994025, + "learning_rate": 0.0005347432024169185, + "loss": 2.4184, + "step": 177 + }, + { + "epoch": 0.01614951914353112, + "grad_norm": 0.5357830310865889, + "learning_rate": 0.0005377643504531722, + "loss": 2.3593, + "step": 178 + }, + { + "epoch": 0.016240246779168934, + "grad_norm": 0.5716749127361722, + "learning_rate": 0.0005407854984894261, + "loss": 2.3743, + "step": 179 + }, + { + "epoch": 0.01633097441480675, + "grad_norm": 0.47914111094969436, + "learning_rate": 0.0005438066465256798, + "loss": 2.3739, + "step": 180 + }, + { + "epoch": 0.016421702050444565, + "grad_norm": 0.5288928392735144, + "learning_rate": 0.0005468277945619336, + "loss": 2.3651, + "step": 181 + }, + { + "epoch": 0.01651242968608238, + "grad_norm": 0.5601604602199268, + "learning_rate": 0.0005498489425981872, + "loss": 2.379, + "step": 182 + }, + { + "epoch": 0.016603157321720196, + "grad_norm": 0.8688511893991975, + "learning_rate": 0.000552870090634441, + "loss": 2.3846, + "step": 183 + }, + { + "epoch": 0.01669388495735801, + "grad_norm": 0.8940099453409158, + "learning_rate": 0.000555891238670695, + "loss": 2.3492, + "step": 184 + }, + { + "epoch": 0.016784612592995827, + "grad_norm": 0.650452530754241, + "learning_rate": 0.0005589123867069486, + "loss": 2.3804, + "step": 185 + }, + { + "epoch": 0.016875340228633642, + "grad_norm": 0.6188001629945242, + "learning_rate": 0.0005619335347432024, + "loss": 2.3856, + "step": 186 + }, + { + "epoch": 0.016966067864271458, + "grad_norm": 0.5869781751235307, + "learning_rate": 0.0005649546827794561, + "loss": 2.4115, + "step": 187 + }, + { + "epoch": 0.017056795499909273, + "grad_norm": 0.6414995540205674, + "learning_rate": 0.00056797583081571, + "loss": 2.3395, + "step": 188 + }, + { + "epoch": 0.01714752313554709, + "grad_norm": 0.723495119187912, + "learning_rate": 0.0005709969788519637, + "loss": 2.3504, + "step": 189 + }, + { + "epoch": 0.017238250771184904, + "grad_norm": 0.747114477700804, + "learning_rate": 0.0005740181268882175, + "loss": 2.387, + "step": 190 + }, + { + "epoch": 0.01732897840682272, + "grad_norm": 0.5861648177223598, + "learning_rate": 0.0005770392749244713, + "loss": 2.3813, + "step": 191 + }, + { + "epoch": 0.017419706042460535, + "grad_norm": 0.588679085156933, + "learning_rate": 0.0005800604229607251, + "loss": 2.4155, + "step": 192 + }, + { + "epoch": 0.017510433678098347, + "grad_norm": 0.787971901538545, + "learning_rate": 0.0005830815709969789, + "loss": 2.4021, + "step": 193 + }, + { + "epoch": 0.017601161313736163, + "grad_norm": 0.5608126805015572, + "learning_rate": 0.0005861027190332326, + "loss": 2.3502, + "step": 194 + }, + { + "epoch": 0.017691888949373978, + "grad_norm": 0.6741244530305053, + "learning_rate": 0.0005891238670694864, + "loss": 2.3482, + "step": 195 + }, + { + "epoch": 0.017782616585011794, + "grad_norm": 0.5198600061225135, + "learning_rate": 0.0005921450151057401, + "loss": 2.3699, + "step": 196 + }, + { + "epoch": 0.01787334422064961, + "grad_norm": 0.5963963558459768, + "learning_rate": 0.000595166163141994, + "loss": 2.3016, + "step": 197 + }, + { + "epoch": 0.017964071856287425, + "grad_norm": 0.6976966035681433, + "learning_rate": 0.0005981873111782478, + "loss": 2.3475, + "step": 198 + }, + { + "epoch": 0.01805479949192524, + "grad_norm": 0.6211202749743395, + "learning_rate": 0.0006012084592145015, + "loss": 2.3466, + "step": 199 + }, + { + "epoch": 0.018145527127563055, + "grad_norm": 0.6643507739069034, + "learning_rate": 0.0006042296072507553, + "loss": 2.3369, + "step": 200 + }, + { + "epoch": 0.01823625476320087, + "grad_norm": 0.5034435320194071, + "learning_rate": 0.0006072507552870091, + "loss": 2.3274, + "step": 201 + }, + { + "epoch": 0.018326982398838686, + "grad_norm": 0.7336320301974716, + "learning_rate": 0.0006102719033232629, + "loss": 2.3642, + "step": 202 + }, + { + "epoch": 0.018417710034476502, + "grad_norm": 0.5606098667442464, + "learning_rate": 0.0006132930513595167, + "loss": 2.342, + "step": 203 + }, + { + "epoch": 0.018508437670114317, + "grad_norm": 0.577167043889983, + "learning_rate": 0.0006163141993957704, + "loss": 2.2939, + "step": 204 + }, + { + "epoch": 0.018599165305752133, + "grad_norm": 0.7791281547071632, + "learning_rate": 0.0006193353474320242, + "loss": 2.3185, + "step": 205 + }, + { + "epoch": 0.018689892941389948, + "grad_norm": 0.7342233769824503, + "learning_rate": 0.000622356495468278, + "loss": 2.3192, + "step": 206 + }, + { + "epoch": 0.018780620577027764, + "grad_norm": 0.8258867877923646, + "learning_rate": 0.0006253776435045318, + "loss": 2.3005, + "step": 207 + }, + { + "epoch": 0.01887134821266558, + "grad_norm": 0.9987906869627111, + "learning_rate": 0.0006283987915407855, + "loss": 2.3331, + "step": 208 + }, + { + "epoch": 0.018962075848303395, + "grad_norm": 1.0101688017432933, + "learning_rate": 0.0006314199395770393, + "loss": 2.3925, + "step": 209 + }, + { + "epoch": 0.01905280348394121, + "grad_norm": 1.0360941032905877, + "learning_rate": 0.0006344410876132932, + "loss": 2.393, + "step": 210 + }, + { + "epoch": 0.019143531119579022, + "grad_norm": 0.7857175856644436, + "learning_rate": 0.0006374622356495468, + "loss": 2.3394, + "step": 211 + }, + { + "epoch": 0.019234258755216838, + "grad_norm": 0.8656947653790984, + "learning_rate": 0.0006404833836858006, + "loss": 2.3574, + "step": 212 + }, + { + "epoch": 0.019324986390854653, + "grad_norm": 0.6017082279808256, + "learning_rate": 0.0006435045317220543, + "loss": 2.2926, + "step": 213 + }, + { + "epoch": 0.01941571402649247, + "grad_norm": 0.6812412761791427, + "learning_rate": 0.0006465256797583081, + "loss": 2.3077, + "step": 214 + }, + { + "epoch": 0.019506441662130284, + "grad_norm": 0.6384436704259314, + "learning_rate": 0.0006495468277945619, + "loss": 2.3439, + "step": 215 + }, + { + "epoch": 0.0195971692977681, + "grad_norm": 0.7342962375464711, + "learning_rate": 0.0006525679758308157, + "loss": 2.3269, + "step": 216 + }, + { + "epoch": 0.019687896933405915, + "grad_norm": 0.637555832851946, + "learning_rate": 0.0006555891238670695, + "loss": 2.3086, + "step": 217 + }, + { + "epoch": 0.01977862456904373, + "grad_norm": 0.5611559197683458, + "learning_rate": 0.0006586102719033232, + "loss": 2.2726, + "step": 218 + }, + { + "epoch": 0.019869352204681546, + "grad_norm": 0.5820240574448329, + "learning_rate": 0.0006616314199395771, + "loss": 2.2962, + "step": 219 + }, + { + "epoch": 0.01996007984031936, + "grad_norm": 0.5257231508077697, + "learning_rate": 0.0006646525679758308, + "loss": 2.303, + "step": 220 + }, + { + "epoch": 0.020050807475957177, + "grad_norm": 0.47009407198159375, + "learning_rate": 0.0006676737160120846, + "loss": 2.3242, + "step": 221 + }, + { + "epoch": 0.020141535111594992, + "grad_norm": 0.4973530413228806, + "learning_rate": 0.0006706948640483383, + "loss": 2.2875, + "step": 222 + }, + { + "epoch": 0.020232262747232808, + "grad_norm": 0.4438187271127681, + "learning_rate": 0.0006737160120845922, + "loss": 2.2442, + "step": 223 + }, + { + "epoch": 0.020322990382870623, + "grad_norm": 0.5818951188170491, + "learning_rate": 0.000676737160120846, + "loss": 2.2513, + "step": 224 + }, + { + "epoch": 0.02041371801850844, + "grad_norm": 0.5691346210422503, + "learning_rate": 0.0006797583081570997, + "loss": 2.2324, + "step": 225 + }, + { + "epoch": 0.020504445654146254, + "grad_norm": 0.6786912923654439, + "learning_rate": 0.0006827794561933535, + "loss": 2.2752, + "step": 226 + }, + { + "epoch": 0.02059517328978407, + "grad_norm": 0.5833861759143858, + "learning_rate": 0.0006858006042296072, + "loss": 2.2717, + "step": 227 + }, + { + "epoch": 0.020685900925421885, + "grad_norm": 0.5318670050776141, + "learning_rate": 0.0006888217522658611, + "loss": 2.2416, + "step": 228 + }, + { + "epoch": 0.020776628561059697, + "grad_norm": 0.5089936446606753, + "learning_rate": 0.0006918429003021148, + "loss": 2.1942, + "step": 229 + }, + { + "epoch": 0.020867356196697513, + "grad_norm": 0.5449233901735893, + "learning_rate": 0.0006948640483383686, + "loss": 2.2827, + "step": 230 + }, + { + "epoch": 0.020958083832335328, + "grad_norm": 0.5711186028659666, + "learning_rate": 0.0006978851963746224, + "loss": 2.1881, + "step": 231 + }, + { + "epoch": 0.021048811467973143, + "grad_norm": 0.6750486128491063, + "learning_rate": 0.0007009063444108762, + "loss": 2.2068, + "step": 232 + }, + { + "epoch": 0.02113953910361096, + "grad_norm": 0.4943356853831513, + "learning_rate": 0.00070392749244713, + "loss": 2.2549, + "step": 233 + }, + { + "epoch": 0.021230266739248774, + "grad_norm": 0.5616706691369335, + "learning_rate": 0.0007069486404833837, + "loss": 2.2175, + "step": 234 + }, + { + "epoch": 0.02132099437488659, + "grad_norm": 0.6153092780392164, + "learning_rate": 0.0007099697885196375, + "loss": 2.1888, + "step": 235 + }, + { + "epoch": 0.021411722010524405, + "grad_norm": 0.7992975498726147, + "learning_rate": 0.0007129909365558912, + "loss": 2.2202, + "step": 236 + }, + { + "epoch": 0.02150244964616222, + "grad_norm": 0.9596616247214971, + "learning_rate": 0.0007160120845921451, + "loss": 2.1961, + "step": 237 + }, + { + "epoch": 0.021593177281800036, + "grad_norm": 0.8513247535642892, + "learning_rate": 0.0007190332326283989, + "loss": 2.2198, + "step": 238 + }, + { + "epoch": 0.021683904917437852, + "grad_norm": 0.5754926816746111, + "learning_rate": 0.0007220543806646525, + "loss": 2.2513, + "step": 239 + }, + { + "epoch": 0.021774632553075667, + "grad_norm": 0.68767663236124, + "learning_rate": 0.0007250755287009063, + "loss": 2.2178, + "step": 240 + }, + { + "epoch": 0.021865360188713483, + "grad_norm": 0.8579705149558354, + "learning_rate": 0.0007280966767371601, + "loss": 2.1771, + "step": 241 + }, + { + "epoch": 0.021956087824351298, + "grad_norm": 0.90693852599214, + "learning_rate": 0.0007311178247734139, + "loss": 2.1972, + "step": 242 + }, + { + "epoch": 0.022046815459989114, + "grad_norm": 0.543613732372515, + "learning_rate": 0.0007341389728096676, + "loss": 2.2563, + "step": 243 + }, + { + "epoch": 0.02213754309562693, + "grad_norm": 0.6254700199811167, + "learning_rate": 0.0007371601208459214, + "loss": 2.1877, + "step": 244 + }, + { + "epoch": 0.022228270731264745, + "grad_norm": 0.5360491094279549, + "learning_rate": 0.0007401812688821753, + "loss": 2.1814, + "step": 245 + }, + { + "epoch": 0.02231899836690256, + "grad_norm": 0.6132470158692513, + "learning_rate": 0.000743202416918429, + "loss": 2.1703, + "step": 246 + }, + { + "epoch": 0.022409726002540376, + "grad_norm": 0.6673133892307503, + "learning_rate": 0.0007462235649546828, + "loss": 2.1779, + "step": 247 + }, + { + "epoch": 0.022500453638178187, + "grad_norm": 0.5842114786786279, + "learning_rate": 0.0007492447129909365, + "loss": 2.1801, + "step": 248 + }, + { + "epoch": 0.022591181273816003, + "grad_norm": 0.5372852138471861, + "learning_rate": 0.0007522658610271903, + "loss": 2.2113, + "step": 249 + }, + { + "epoch": 0.02268190890945382, + "grad_norm": 0.5600438407679923, + "learning_rate": 0.0007552870090634442, + "loss": 2.2009, + "step": 250 + }, + { + "epoch": 0.022772636545091634, + "grad_norm": 0.5288195941386873, + "learning_rate": 0.0007583081570996979, + "loss": 2.1913, + "step": 251 + }, + { + "epoch": 0.02286336418072945, + "grad_norm": 0.5002198808712656, + "learning_rate": 0.0007613293051359517, + "loss": 2.1967, + "step": 252 + }, + { + "epoch": 0.022954091816367265, + "grad_norm": 0.5532590131422718, + "learning_rate": 0.0007643504531722054, + "loss": 2.1579, + "step": 253 + }, + { + "epoch": 0.02304481945200508, + "grad_norm": 0.5055319528243327, + "learning_rate": 0.0007673716012084593, + "loss": 2.1097, + "step": 254 + }, + { + "epoch": 0.023135547087642896, + "grad_norm": 0.5315822082709729, + "learning_rate": 0.000770392749244713, + "loss": 2.1344, + "step": 255 + }, + { + "epoch": 0.02322627472328071, + "grad_norm": 0.4796046783968701, + "learning_rate": 0.0007734138972809668, + "loss": 2.1599, + "step": 256 + }, + { + "epoch": 0.023317002358918527, + "grad_norm": 0.5024103915001817, + "learning_rate": 0.0007764350453172206, + "loss": 2.1332, + "step": 257 + }, + { + "epoch": 0.023407729994556342, + "grad_norm": 0.4442022770177176, + "learning_rate": 0.0007794561933534743, + "loss": 2.16, + "step": 258 + }, + { + "epoch": 0.023498457630194158, + "grad_norm": 0.4848034364213125, + "learning_rate": 0.0007824773413897282, + "loss": 2.0962, + "step": 259 + }, + { + "epoch": 0.023589185265831973, + "grad_norm": 0.4957795777057599, + "learning_rate": 0.0007854984894259819, + "loss": 2.0772, + "step": 260 + }, + { + "epoch": 0.02367991290146979, + "grad_norm": 0.4834878638130147, + "learning_rate": 0.0007885196374622357, + "loss": 2.0958, + "step": 261 + }, + { + "epoch": 0.023770640537107604, + "grad_norm": 0.5867617912401093, + "learning_rate": 0.0007915407854984894, + "loss": 2.1282, + "step": 262 + }, + { + "epoch": 0.02386136817274542, + "grad_norm": 0.535133799295786, + "learning_rate": 0.0007945619335347433, + "loss": 2.0774, + "step": 263 + }, + { + "epoch": 0.023952095808383235, + "grad_norm": 0.5636685531798798, + "learning_rate": 0.0007975830815709971, + "loss": 2.1162, + "step": 264 + }, + { + "epoch": 0.02404282344402105, + "grad_norm": 0.4867671133984367, + "learning_rate": 0.0008006042296072508, + "loss": 2.1359, + "step": 265 + }, + { + "epoch": 0.024133551079658862, + "grad_norm": 0.4905529159637333, + "learning_rate": 0.0008036253776435046, + "loss": 2.1025, + "step": 266 + }, + { + "epoch": 0.024224278715296678, + "grad_norm": 0.5320754453091653, + "learning_rate": 0.0008066465256797582, + "loss": 2.1116, + "step": 267 + }, + { + "epoch": 0.024315006350934493, + "grad_norm": 0.5622697424261351, + "learning_rate": 0.0008096676737160121, + "loss": 2.0589, + "step": 268 + }, + { + "epoch": 0.02440573398657231, + "grad_norm": 0.49951568071563185, + "learning_rate": 0.0008126888217522658, + "loss": 2.0959, + "step": 269 + }, + { + "epoch": 0.024496461622210124, + "grad_norm": 0.6579203449492894, + "learning_rate": 0.0008157099697885196, + "loss": 2.1517, + "step": 270 + }, + { + "epoch": 0.02458718925784794, + "grad_norm": 0.6132411303865529, + "learning_rate": 0.0008187311178247734, + "loss": 2.1377, + "step": 271 + }, + { + "epoch": 0.024677916893485755, + "grad_norm": 0.5865452303919947, + "learning_rate": 0.0008217522658610272, + "loss": 2.0525, + "step": 272 + }, + { + "epoch": 0.02476864452912357, + "grad_norm": 0.46123992530657093, + "learning_rate": 0.000824773413897281, + "loss": 2.164, + "step": 273 + }, + { + "epoch": 0.024859372164761386, + "grad_norm": 0.5980790491881796, + "learning_rate": 0.0008277945619335347, + "loss": 2.0528, + "step": 274 + }, + { + "epoch": 0.0249500998003992, + "grad_norm": 0.6586701250739158, + "learning_rate": 0.0008308157099697885, + "loss": 2.0899, + "step": 275 + }, + { + "epoch": 0.025040827436037017, + "grad_norm": 0.6081939632561792, + "learning_rate": 0.0008338368580060423, + "loss": 2.0684, + "step": 276 + }, + { + "epoch": 0.025131555071674833, + "grad_norm": 0.5482836237412636, + "learning_rate": 0.0008368580060422961, + "loss": 2.0664, + "step": 277 + }, + { + "epoch": 0.025222282707312648, + "grad_norm": 0.523504614354313, + "learning_rate": 0.0008398791540785499, + "loss": 2.1036, + "step": 278 + }, + { + "epoch": 0.025313010342950464, + "grad_norm": 0.5770276935976466, + "learning_rate": 0.0008429003021148036, + "loss": 2.0883, + "step": 279 + }, + { + "epoch": 0.02540373797858828, + "grad_norm": 0.5750816252845802, + "learning_rate": 0.0008459214501510574, + "loss": 2.0763, + "step": 280 + }, + { + "epoch": 0.025494465614226094, + "grad_norm": 0.49821978251583754, + "learning_rate": 0.0008489425981873112, + "loss": 2.0951, + "step": 281 + }, + { + "epoch": 0.02558519324986391, + "grad_norm": 0.4721455703842688, + "learning_rate": 0.000851963746223565, + "loss": 2.0769, + "step": 282 + }, + { + "epoch": 0.025675920885501725, + "grad_norm": 0.5147130400438253, + "learning_rate": 0.0008549848942598187, + "loss": 2.0323, + "step": 283 + }, + { + "epoch": 0.025766648521139537, + "grad_norm": 0.5849276030275439, + "learning_rate": 0.0008580060422960725, + "loss": 2.118, + "step": 284 + }, + { + "epoch": 0.025857376156777353, + "grad_norm": 0.46403017794204576, + "learning_rate": 0.0008610271903323264, + "loss": 2.0749, + "step": 285 + }, + { + "epoch": 0.02594810379241517, + "grad_norm": 0.522320080706092, + "learning_rate": 0.0008640483383685801, + "loss": 2.0718, + "step": 286 + }, + { + "epoch": 0.026038831428052984, + "grad_norm": 0.577267418894682, + "learning_rate": 0.0008670694864048339, + "loss": 2.0597, + "step": 287 + }, + { + "epoch": 0.0261295590636908, + "grad_norm": 0.4432326212472034, + "learning_rate": 0.0008700906344410876, + "loss": 2.0451, + "step": 288 + }, + { + "epoch": 0.026220286699328615, + "grad_norm": 0.5674633492389448, + "learning_rate": 0.0008731117824773414, + "loss": 2.045, + "step": 289 + }, + { + "epoch": 0.02631101433496643, + "grad_norm": 0.5240796745724537, + "learning_rate": 0.0008761329305135952, + "loss": 2.035, + "step": 290 + }, + { + "epoch": 0.026401741970604246, + "grad_norm": 0.41865737731877967, + "learning_rate": 0.000879154078549849, + "loss": 2.0358, + "step": 291 + }, + { + "epoch": 0.02649246960624206, + "grad_norm": 0.5936971997047393, + "learning_rate": 0.0008821752265861028, + "loss": 2.0505, + "step": 292 + }, + { + "epoch": 0.026583197241879877, + "grad_norm": 0.5901811903439044, + "learning_rate": 0.0008851963746223565, + "loss": 2.0893, + "step": 293 + }, + { + "epoch": 0.026673924877517692, + "grad_norm": 0.5054670345603025, + "learning_rate": 0.0008882175226586104, + "loss": 2.1003, + "step": 294 + }, + { + "epoch": 0.026764652513155508, + "grad_norm": 0.4957899032787115, + "learning_rate": 0.000891238670694864, + "loss": 2.0563, + "step": 295 + }, + { + "epoch": 0.026855380148793323, + "grad_norm": 0.5269766293799505, + "learning_rate": 0.0008942598187311178, + "loss": 2.0326, + "step": 296 + }, + { + "epoch": 0.02694610778443114, + "grad_norm": 0.5002740011267993, + "learning_rate": 0.0008972809667673715, + "loss": 2.0051, + "step": 297 + }, + { + "epoch": 0.027036835420068954, + "grad_norm": 0.4772186260326764, + "learning_rate": 0.0009003021148036254, + "loss": 2.0397, + "step": 298 + }, + { + "epoch": 0.02712756305570677, + "grad_norm": 0.5276577064262151, + "learning_rate": 0.0009033232628398792, + "loss": 2.0441, + "step": 299 + }, + { + "epoch": 0.027218290691344585, + "grad_norm": 0.5516854357208114, + "learning_rate": 0.0009063444108761329, + "loss": 2.0478, + "step": 300 + }, + { + "epoch": 0.0273090183269824, + "grad_norm": 0.5577105650499187, + "learning_rate": 0.0009093655589123867, + "loss": 2.0374, + "step": 301 + }, + { + "epoch": 0.027399745962620216, + "grad_norm": 0.4348487280838952, + "learning_rate": 0.0009123867069486404, + "loss": 1.9957, + "step": 302 + }, + { + "epoch": 0.027490473598258028, + "grad_norm": 0.5275942821657473, + "learning_rate": 0.0009154078549848943, + "loss": 2.0316, + "step": 303 + }, + { + "epoch": 0.027581201233895843, + "grad_norm": 0.44148362829011245, + "learning_rate": 0.0009184290030211481, + "loss": 1.9902, + "step": 304 + }, + { + "epoch": 0.02767192886953366, + "grad_norm": 0.43836776925996657, + "learning_rate": 0.0009214501510574018, + "loss": 1.9677, + "step": 305 + }, + { + "epoch": 0.027762656505171474, + "grad_norm": 0.5058321690035206, + "learning_rate": 0.0009244712990936556, + "loss": 2.0426, + "step": 306 + }, + { + "epoch": 0.02785338414080929, + "grad_norm": 0.4336135102898531, + "learning_rate": 0.0009274924471299094, + "loss": 1.9732, + "step": 307 + }, + { + "epoch": 0.027944111776447105, + "grad_norm": 0.43845224142452416, + "learning_rate": 0.0009305135951661632, + "loss": 2.0155, + "step": 308 + }, + { + "epoch": 0.02803483941208492, + "grad_norm": 0.43940018685969623, + "learning_rate": 0.0009335347432024169, + "loss": 1.9539, + "step": 309 + }, + { + "epoch": 0.028125567047722736, + "grad_norm": 0.4499271321990757, + "learning_rate": 0.0009365558912386707, + "loss": 1.9665, + "step": 310 + }, + { + "epoch": 0.02821629468336055, + "grad_norm": 0.40533499717515126, + "learning_rate": 0.0009395770392749245, + "loss": 1.9804, + "step": 311 + }, + { + "epoch": 0.028307022318998367, + "grad_norm": 0.44754588507708937, + "learning_rate": 0.0009425981873111783, + "loss": 1.9989, + "step": 312 + }, + { + "epoch": 0.028397749954636183, + "grad_norm": 0.396985150752333, + "learning_rate": 0.0009456193353474321, + "loss": 1.9737, + "step": 313 + }, + { + "epoch": 0.028488477590273998, + "grad_norm": 0.4343714613959068, + "learning_rate": 0.0009486404833836858, + "loss": 1.9636, + "step": 314 + }, + { + "epoch": 0.028579205225911813, + "grad_norm": 0.4356518230094401, + "learning_rate": 0.0009516616314199396, + "loss": 1.9947, + "step": 315 + }, + { + "epoch": 0.02866993286154963, + "grad_norm": 0.45589022615331964, + "learning_rate": 0.0009546827794561934, + "loss": 1.986, + "step": 316 + }, + { + "epoch": 0.028760660497187444, + "grad_norm": 0.4463402265529145, + "learning_rate": 0.0009577039274924472, + "loss": 1.9813, + "step": 317 + }, + { + "epoch": 0.02885138813282526, + "grad_norm": 0.5162481298891238, + "learning_rate": 0.000960725075528701, + "loss": 1.9592, + "step": 318 + }, + { + "epoch": 0.028942115768463075, + "grad_norm": 0.42808068165104296, + "learning_rate": 0.0009637462235649547, + "loss": 1.957, + "step": 319 + }, + { + "epoch": 0.02903284340410089, + "grad_norm": 0.40027699182610593, + "learning_rate": 0.0009667673716012086, + "loss": 1.9817, + "step": 320 + }, + { + "epoch": 0.029123571039738703, + "grad_norm": 0.4567597156160375, + "learning_rate": 0.0009697885196374623, + "loss": 2.014, + "step": 321 + }, + { + "epoch": 0.029214298675376518, + "grad_norm": 0.390434669665759, + "learning_rate": 0.0009728096676737161, + "loss": 2.0038, + "step": 322 + }, + { + "epoch": 0.029305026311014334, + "grad_norm": 0.42720301988286213, + "learning_rate": 0.0009758308157099697, + "loss": 1.9757, + "step": 323 + }, + { + "epoch": 0.02939575394665215, + "grad_norm": 0.37902577569849666, + "learning_rate": 0.0009788519637462234, + "loss": 1.9794, + "step": 324 + }, + { + "epoch": 0.029486481582289965, + "grad_norm": 0.4404861081866576, + "learning_rate": 0.0009818731117824774, + "loss": 2.0327, + "step": 325 + }, + { + "epoch": 0.02957720921792778, + "grad_norm": 0.3904077779834119, + "learning_rate": 0.0009848942598187312, + "loss": 1.9842, + "step": 326 + }, + { + "epoch": 0.029667936853565596, + "grad_norm": 0.4283273664775541, + "learning_rate": 0.0009879154078549848, + "loss": 2.0077, + "step": 327 + }, + { + "epoch": 0.02975866448920341, + "grad_norm": 0.36916686743242594, + "learning_rate": 0.0009909365558912386, + "loss": 1.9756, + "step": 328 + }, + { + "epoch": 0.029849392124841227, + "grad_norm": 0.4499574171776433, + "learning_rate": 0.0009939577039274924, + "loss": 1.9934, + "step": 329 + }, + { + "epoch": 0.029940119760479042, + "grad_norm": 0.38895347899397625, + "learning_rate": 0.0009969788519637462, + "loss": 1.9716, + "step": 330 + }, + { + "epoch": 0.030030847396116857, + "grad_norm": 0.4055102212953553, + "learning_rate": 0.001, + "loss": 1.8954, + "step": 331 + }, + { + "epoch": 0.030121575031754673, + "grad_norm": 0.4416908810257114, + "learning_rate": 0.0009999999784124631, + "loss": 1.9537, + "step": 332 + }, + { + "epoch": 0.03021230266739249, + "grad_norm": 0.39991107279069854, + "learning_rate": 0.0009999999136498545, + "loss": 1.9614, + "step": 333 + }, + { + "epoch": 0.030303030303030304, + "grad_norm": 0.39838291155638655, + "learning_rate": 0.0009999998057121796, + "loss": 1.9766, + "step": 334 + }, + { + "epoch": 0.03039375793866812, + "grad_norm": 0.4187690942553008, + "learning_rate": 0.0009999996545994477, + "loss": 1.9834, + "step": 335 + }, + { + "epoch": 0.030484485574305935, + "grad_norm": 0.42669623819731123, + "learning_rate": 0.000999999460311672, + "loss": 1.9794, + "step": 336 + }, + { + "epoch": 0.03057521320994375, + "grad_norm": 0.5199053148615455, + "learning_rate": 0.0009999992228488692, + "loss": 1.9087, + "step": 337 + }, + { + "epoch": 0.030665940845581566, + "grad_norm": 0.4045631263572141, + "learning_rate": 0.0009999989422110598, + "loss": 1.9214, + "step": 338 + }, + { + "epoch": 0.030756668481219378, + "grad_norm": 0.4059641042076279, + "learning_rate": 0.0009999986183982681, + "loss": 1.9346, + "step": 339 + }, + { + "epoch": 0.030847396116857193, + "grad_norm": 0.4362056700772869, + "learning_rate": 0.0009999982514105222, + "loss": 1.9538, + "step": 340 + }, + { + "epoch": 0.03093812375249501, + "grad_norm": 0.4416494946193273, + "learning_rate": 0.0009999978412478531, + "loss": 1.9927, + "step": 341 + }, + { + "epoch": 0.031028851388132824, + "grad_norm": 0.3917045458266857, + "learning_rate": 0.000999997387910297, + "loss": 1.9468, + "step": 342 + }, + { + "epoch": 0.03111957902377064, + "grad_norm": 0.45698856212391603, + "learning_rate": 0.000999996891397893, + "loss": 1.9015, + "step": 343 + }, + { + "epoch": 0.031210306659408455, + "grad_norm": 0.4577634640049926, + "learning_rate": 0.0009999963517106833, + "loss": 1.9312, + "step": 344 + }, + { + "epoch": 0.03130103429504627, + "grad_norm": 0.3895297349123135, + "learning_rate": 0.0009999957688487154, + "loss": 1.9496, + "step": 345 + }, + { + "epoch": 0.03139176193068409, + "grad_norm": 0.4260547118499621, + "learning_rate": 0.000999995142812039, + "loss": 1.9622, + "step": 346 + }, + { + "epoch": 0.0314824895663219, + "grad_norm": 0.37615311208649993, + "learning_rate": 0.000999994473600708, + "loss": 1.9606, + "step": 347 + }, + { + "epoch": 0.031573217201959713, + "grad_norm": 0.4254437177501538, + "learning_rate": 0.0009999937612147807, + "loss": 1.9551, + "step": 348 + }, + { + "epoch": 0.03166394483759753, + "grad_norm": 0.39547547977877895, + "learning_rate": 0.0009999930056543184, + "loss": 1.91, + "step": 349 + }, + { + "epoch": 0.031754672473235344, + "grad_norm": 0.4016618048065583, + "learning_rate": 0.0009999922069193865, + "loss": 1.9462, + "step": 350 + }, + { + "epoch": 0.03184540010887316, + "grad_norm": 0.44110359669054783, + "learning_rate": 0.0009999913650100539, + "loss": 1.9442, + "step": 351 + }, + { + "epoch": 0.031936127744510975, + "grad_norm": 0.381766385474043, + "learning_rate": 0.000999990479926393, + "loss": 1.8987, + "step": 352 + }, + { + "epoch": 0.032026855380148794, + "grad_norm": 0.44167390484903063, + "learning_rate": 0.0009999895516684808, + "loss": 1.9366, + "step": 353 + }, + { + "epoch": 0.032117583015786606, + "grad_norm": 0.3527716775941824, + "learning_rate": 0.0009999885802363967, + "loss": 1.8896, + "step": 354 + }, + { + "epoch": 0.032208310651424425, + "grad_norm": 0.46738178772704597, + "learning_rate": 0.0009999875656302253, + "loss": 1.9485, + "step": 355 + }, + { + "epoch": 0.03229903828706224, + "grad_norm": 0.3761950375421787, + "learning_rate": 0.000999986507850054, + "loss": 1.9399, + "step": 356 + }, + { + "epoch": 0.032389765922700056, + "grad_norm": 0.4285506320668918, + "learning_rate": 0.0009999854068959736, + "loss": 1.9734, + "step": 357 + }, + { + "epoch": 0.03248049355833787, + "grad_norm": 0.35367114580009934, + "learning_rate": 0.00099998426276808, + "loss": 1.9003, + "step": 358 + }, + { + "epoch": 0.03257122119397569, + "grad_norm": 0.3911508356140937, + "learning_rate": 0.0009999830754664717, + "loss": 1.9285, + "step": 359 + }, + { + "epoch": 0.0326619488296135, + "grad_norm": 0.37343769944960115, + "learning_rate": 0.0009999818449912508, + "loss": 1.9531, + "step": 360 + }, + { + "epoch": 0.03275267646525132, + "grad_norm": 0.3636648050245496, + "learning_rate": 0.0009999805713425242, + "loss": 1.8838, + "step": 361 + }, + { + "epoch": 0.03284340410088913, + "grad_norm": 0.3781202785526687, + "learning_rate": 0.0009999792545204013, + "loss": 1.8983, + "step": 362 + }, + { + "epoch": 0.03293413173652695, + "grad_norm": 0.39419618634429304, + "learning_rate": 0.0009999778945249964, + "loss": 1.9198, + "step": 363 + }, + { + "epoch": 0.03302485937216476, + "grad_norm": 0.3807172301610997, + "learning_rate": 0.0009999764913564263, + "loss": 1.9524, + "step": 364 + }, + { + "epoch": 0.03311558700780258, + "grad_norm": 0.3555644479387886, + "learning_rate": 0.0009999750450148128, + "loss": 1.9165, + "step": 365 + }, + { + "epoch": 0.03320631464344039, + "grad_norm": 0.3531266756315637, + "learning_rate": 0.0009999735555002802, + "loss": 1.8925, + "step": 366 + }, + { + "epoch": 0.033297042279078204, + "grad_norm": 0.3236203471982404, + "learning_rate": 0.0009999720228129577, + "loss": 1.9264, + "step": 367 + }, + { + "epoch": 0.03338776991471602, + "grad_norm": 0.3585041005623553, + "learning_rate": 0.000999970446952977, + "loss": 1.9314, + "step": 368 + }, + { + "epoch": 0.033478497550353835, + "grad_norm": 0.3251639298647015, + "learning_rate": 0.0009999688279204745, + "loss": 1.9454, + "step": 369 + }, + { + "epoch": 0.033569225185991654, + "grad_norm": 0.32717860361469353, + "learning_rate": 0.0009999671657155904, + "loss": 1.9261, + "step": 370 + }, + { + "epoch": 0.033659952821629466, + "grad_norm": 0.33837973420301753, + "learning_rate": 0.0009999654603384677, + "loss": 1.9089, + "step": 371 + }, + { + "epoch": 0.033750680457267285, + "grad_norm": 0.3154290908851943, + "learning_rate": 0.0009999637117892538, + "loss": 1.9134, + "step": 372 + }, + { + "epoch": 0.0338414080929051, + "grad_norm": 0.35727699948902575, + "learning_rate": 0.0009999619200680996, + "loss": 1.9221, + "step": 373 + }, + { + "epoch": 0.033932135728542916, + "grad_norm": 0.35296242929868193, + "learning_rate": 0.0009999600851751597, + "loss": 1.9175, + "step": 374 + }, + { + "epoch": 0.03402286336418073, + "grad_norm": 0.3466793371435496, + "learning_rate": 0.0009999582071105932, + "loss": 1.8781, + "step": 375 + }, + { + "epoch": 0.03411359099981855, + "grad_norm": 0.35512423590764286, + "learning_rate": 0.0009999562858745616, + "loss": 1.8784, + "step": 376 + }, + { + "epoch": 0.03420431863545636, + "grad_norm": 0.33693027607816306, + "learning_rate": 0.000999954321467231, + "loss": 1.866, + "step": 377 + }, + { + "epoch": 0.03429504627109418, + "grad_norm": 0.30538731727616736, + "learning_rate": 0.0009999523138887712, + "loss": 1.9204, + "step": 378 + }, + { + "epoch": 0.03438577390673199, + "grad_norm": 0.3249981658177716, + "learning_rate": 0.0009999502631393552, + "loss": 1.9004, + "step": 379 + }, + { + "epoch": 0.03447650154236981, + "grad_norm": 0.3306128979389927, + "learning_rate": 0.0009999481692191607, + "loss": 1.9261, + "step": 380 + }, + { + "epoch": 0.03456722917800762, + "grad_norm": 0.31787367624849516, + "learning_rate": 0.0009999460321283677, + "loss": 1.8815, + "step": 381 + }, + { + "epoch": 0.03465795681364544, + "grad_norm": 0.37474856505845716, + "learning_rate": 0.0009999438518671613, + "loss": 1.9083, + "step": 382 + }, + { + "epoch": 0.03474868444928325, + "grad_norm": 0.3092048739359093, + "learning_rate": 0.0009999416284357297, + "loss": 1.9021, + "step": 383 + }, + { + "epoch": 0.03483941208492107, + "grad_norm": 0.3144894460094649, + "learning_rate": 0.0009999393618342646, + "loss": 1.8435, + "step": 384 + }, + { + "epoch": 0.03493013972055888, + "grad_norm": 0.30128483576022785, + "learning_rate": 0.000999937052062962, + "loss": 1.9242, + "step": 385 + }, + { + "epoch": 0.035020867356196694, + "grad_norm": 0.3462475462190913, + "learning_rate": 0.0009999346991220214, + "loss": 1.8783, + "step": 386 + }, + { + "epoch": 0.03511159499183451, + "grad_norm": 0.30589381306900926, + "learning_rate": 0.0009999323030116458, + "loss": 1.925, + "step": 387 + }, + { + "epoch": 0.035202322627472325, + "grad_norm": 0.3053400532413112, + "learning_rate": 0.000999929863732042, + "loss": 1.9196, + "step": 388 + }, + { + "epoch": 0.035293050263110144, + "grad_norm": 0.3192818539653869, + "learning_rate": 0.000999927381283421, + "loss": 1.9167, + "step": 389 + }, + { + "epoch": 0.035383777898747956, + "grad_norm": 0.3352011660718059, + "learning_rate": 0.0009999248556659967, + "loss": 1.894, + "step": 390 + }, + { + "epoch": 0.035474505534385775, + "grad_norm": 0.29627849131155026, + "learning_rate": 0.0009999222868799875, + "loss": 1.8603, + "step": 391 + }, + { + "epoch": 0.03556523317002359, + "grad_norm": 0.3087593400508195, + "learning_rate": 0.0009999196749256151, + "loss": 1.8697, + "step": 392 + }, + { + "epoch": 0.035655960805661406, + "grad_norm": 0.3465585242336305, + "learning_rate": 0.0009999170198031052, + "loss": 1.8742, + "step": 393 + }, + { + "epoch": 0.03574668844129922, + "grad_norm": 0.29208743814176735, + "learning_rate": 0.000999914321512687, + "loss": 1.8595, + "step": 394 + }, + { + "epoch": 0.03583741607693704, + "grad_norm": 0.35446230650232124, + "learning_rate": 0.0009999115800545936, + "loss": 1.8977, + "step": 395 + }, + { + "epoch": 0.03592814371257485, + "grad_norm": 0.36621207759392105, + "learning_rate": 0.0009999087954290612, + "loss": 1.8736, + "step": 396 + }, + { + "epoch": 0.03601887134821267, + "grad_norm": 0.32920828148978015, + "learning_rate": 0.0009999059676363308, + "loss": 1.895, + "step": 397 + }, + { + "epoch": 0.03610959898385048, + "grad_norm": 0.32751315856154434, + "learning_rate": 0.0009999030966766464, + "loss": 1.9094, + "step": 398 + }, + { + "epoch": 0.0362003266194883, + "grad_norm": 0.3018652031316173, + "learning_rate": 0.0009999001825502561, + "loss": 1.8206, + "step": 399 + }, + { + "epoch": 0.03629105425512611, + "grad_norm": 0.3577949498199622, + "learning_rate": 0.0009998972252574113, + "loss": 1.8885, + "step": 400 + }, + { + "epoch": 0.03638178189076393, + "grad_norm": 0.31847297773303873, + "learning_rate": 0.0009998942247983673, + "loss": 1.8615, + "step": 401 + }, + { + "epoch": 0.03647250952640174, + "grad_norm": 0.30906762537061605, + "learning_rate": 0.0009998911811733833, + "loss": 1.9368, + "step": 402 + }, + { + "epoch": 0.036563237162039554, + "grad_norm": 0.3112400266726251, + "learning_rate": 0.0009998880943827221, + "loss": 1.8903, + "step": 403 + }, + { + "epoch": 0.03665396479767737, + "grad_norm": 0.3175675235526251, + "learning_rate": 0.0009998849644266502, + "loss": 1.8998, + "step": 404 + }, + { + "epoch": 0.036744692433315185, + "grad_norm": 0.32259158986270603, + "learning_rate": 0.0009998817913054383, + "loss": 1.896, + "step": 405 + }, + { + "epoch": 0.036835420068953004, + "grad_norm": 0.3199975461349286, + "learning_rate": 0.0009998785750193599, + "loss": 1.8793, + "step": 406 + }, + { + "epoch": 0.036926147704590816, + "grad_norm": 0.31613640364479145, + "learning_rate": 0.0009998753155686928, + "loss": 1.8314, + "step": 407 + }, + { + "epoch": 0.037016875340228635, + "grad_norm": 0.2812480943448982, + "learning_rate": 0.0009998720129537186, + "loss": 1.8772, + "step": 408 + }, + { + "epoch": 0.03710760297586645, + "grad_norm": 0.3113344449778715, + "learning_rate": 0.0009998686671747223, + "loss": 1.8998, + "step": 409 + }, + { + "epoch": 0.037198330611504266, + "grad_norm": 0.31550353164780814, + "learning_rate": 0.000999865278231993, + "loss": 1.8705, + "step": 410 + }, + { + "epoch": 0.03728905824714208, + "grad_norm": 0.32675604544034426, + "learning_rate": 0.0009998618461258232, + "loss": 1.9176, + "step": 411 + }, + { + "epoch": 0.037379785882779896, + "grad_norm": 0.2776141595802902, + "learning_rate": 0.0009998583708565093, + "loss": 1.892, + "step": 412 + }, + { + "epoch": 0.03747051351841771, + "grad_norm": 0.30513436345653505, + "learning_rate": 0.0009998548524243515, + "loss": 1.9027, + "step": 413 + }, + { + "epoch": 0.03756124115405553, + "grad_norm": 0.32427080892088833, + "learning_rate": 0.0009998512908296535, + "loss": 1.9021, + "step": 414 + }, + { + "epoch": 0.03765196878969334, + "grad_norm": 0.30808044928673506, + "learning_rate": 0.0009998476860727228, + "loss": 1.9402, + "step": 415 + }, + { + "epoch": 0.03774269642533116, + "grad_norm": 0.34882459851388437, + "learning_rate": 0.0009998440381538706, + "loss": 1.8839, + "step": 416 + }, + { + "epoch": 0.03783342406096897, + "grad_norm": 0.31806164416521804, + "learning_rate": 0.0009998403470734122, + "loss": 1.8934, + "step": 417 + }, + { + "epoch": 0.03792415169660679, + "grad_norm": 0.34838606238641867, + "learning_rate": 0.0009998366128316663, + "loss": 1.8879, + "step": 418 + }, + { + "epoch": 0.0380148793322446, + "grad_norm": 0.3454005391411906, + "learning_rate": 0.0009998328354289552, + "loss": 1.8712, + "step": 419 + }, + { + "epoch": 0.03810560696788242, + "grad_norm": 0.30789296918472125, + "learning_rate": 0.0009998290148656049, + "loss": 1.8815, + "step": 420 + }, + { + "epoch": 0.03819633460352023, + "grad_norm": 0.35778094254051446, + "learning_rate": 0.0009998251511419455, + "loss": 1.8795, + "step": 421 + }, + { + "epoch": 0.038287062239158044, + "grad_norm": 0.30442574873905, + "learning_rate": 0.0009998212442583108, + "loss": 1.8325, + "step": 422 + }, + { + "epoch": 0.03837778987479586, + "grad_norm": 0.3052119660963261, + "learning_rate": 0.0009998172942150378, + "loss": 1.8947, + "step": 423 + }, + { + "epoch": 0.038468517510433675, + "grad_norm": 0.3035852240485413, + "learning_rate": 0.000999813301012468, + "loss": 1.8541, + "step": 424 + }, + { + "epoch": 0.038559245146071494, + "grad_norm": 0.2870747084318162, + "learning_rate": 0.0009998092646509458, + "loss": 1.8625, + "step": 425 + }, + { + "epoch": 0.038649972781709306, + "grad_norm": 0.3096501671771233, + "learning_rate": 0.00099980518513082, + "loss": 1.9283, + "step": 426 + }, + { + "epoch": 0.038740700417347125, + "grad_norm": 0.30632667547880227, + "learning_rate": 0.0009998010624524428, + "loss": 1.8797, + "step": 427 + }, + { + "epoch": 0.03883142805298494, + "grad_norm": 0.29125178317461364, + "learning_rate": 0.00099979689661617, + "loss": 1.8907, + "step": 428 + }, + { + "epoch": 0.038922155688622756, + "grad_norm": 0.279559089948409, + "learning_rate": 0.000999792687622362, + "loss": 1.8682, + "step": 429 + }, + { + "epoch": 0.03901288332426057, + "grad_norm": 0.30605636010305604, + "learning_rate": 0.0009997884354713813, + "loss": 1.8542, + "step": 430 + }, + { + "epoch": 0.03910361095989839, + "grad_norm": 0.30521178794313936, + "learning_rate": 0.0009997841401635959, + "loss": 1.8694, + "step": 431 + }, + { + "epoch": 0.0391943385955362, + "grad_norm": 0.2841459508028394, + "learning_rate": 0.000999779801699376, + "loss": 1.9059, + "step": 432 + }, + { + "epoch": 0.03928506623117402, + "grad_norm": 0.2983530173795164, + "learning_rate": 0.0009997754200790968, + "loss": 1.923, + "step": 433 + }, + { + "epoch": 0.03937579386681183, + "grad_norm": 0.2853576776077989, + "learning_rate": 0.0009997709953031362, + "loss": 1.9011, + "step": 434 + }, + { + "epoch": 0.03946652150244965, + "grad_norm": 0.30289824790535474, + "learning_rate": 0.0009997665273718767, + "loss": 1.8553, + "step": 435 + }, + { + "epoch": 0.03955724913808746, + "grad_norm": 0.3018704424288846, + "learning_rate": 0.0009997620162857036, + "loss": 1.964, + "step": 436 + }, + { + "epoch": 0.03964797677372528, + "grad_norm": 0.31191252148184484, + "learning_rate": 0.0009997574620450073, + "loss": 1.8739, + "step": 437 + }, + { + "epoch": 0.03973870440936309, + "grad_norm": 0.30641024461428956, + "learning_rate": 0.00099975286465018, + "loss": 1.8488, + "step": 438 + }, + { + "epoch": 0.03982943204500091, + "grad_norm": 0.30417035979822554, + "learning_rate": 0.0009997482241016194, + "loss": 1.8245, + "step": 439 + }, + { + "epoch": 0.03992015968063872, + "grad_norm": 0.31074719150280267, + "learning_rate": 0.0009997435403997258, + "loss": 1.7909, + "step": 440 + }, + { + "epoch": 0.040010887316276535, + "grad_norm": 0.304875289219493, + "learning_rate": 0.0009997388135449042, + "loss": 1.8643, + "step": 441 + }, + { + "epoch": 0.040101614951914354, + "grad_norm": 0.2775161265856499, + "learning_rate": 0.000999734043537562, + "loss": 1.8229, + "step": 442 + }, + { + "epoch": 0.040192342587552166, + "grad_norm": 0.27096343265429174, + "learning_rate": 0.0009997292303781118, + "loss": 1.8571, + "step": 443 + }, + { + "epoch": 0.040283070223189985, + "grad_norm": 0.2823253992530658, + "learning_rate": 0.0009997243740669686, + "loss": 1.8226, + "step": 444 + }, + { + "epoch": 0.0403737978588278, + "grad_norm": 0.29722354294652625, + "learning_rate": 0.0009997194746045523, + "loss": 1.8604, + "step": 445 + }, + { + "epoch": 0.040464525494465615, + "grad_norm": 0.2744723696279071, + "learning_rate": 0.0009997145319912855, + "loss": 1.8576, + "step": 446 + }, + { + "epoch": 0.04055525313010343, + "grad_norm": 0.3049709143243019, + "learning_rate": 0.0009997095462275954, + "loss": 1.8719, + "step": 447 + }, + { + "epoch": 0.040645980765741246, + "grad_norm": 0.28548316029385823, + "learning_rate": 0.0009997045173139123, + "loss": 1.8509, + "step": 448 + }, + { + "epoch": 0.04073670840137906, + "grad_norm": 0.27743247212711036, + "learning_rate": 0.0009996994452506705, + "loss": 1.8366, + "step": 449 + }, + { + "epoch": 0.04082743603701688, + "grad_norm": 0.2947686030461103, + "learning_rate": 0.0009996943300383079, + "loss": 1.8385, + "step": 450 + }, + { + "epoch": 0.04091816367265469, + "grad_norm": 0.27706420953333016, + "learning_rate": 0.0009996891716772662, + "loss": 1.8353, + "step": 451 + }, + { + "epoch": 0.04100889130829251, + "grad_norm": 0.29895106229187857, + "learning_rate": 0.000999683970167991, + "loss": 1.8793, + "step": 452 + }, + { + "epoch": 0.04109961894393032, + "grad_norm": 0.28688214143118346, + "learning_rate": 0.0009996787255109312, + "loss": 1.8931, + "step": 453 + }, + { + "epoch": 0.04119034657956814, + "grad_norm": 0.2841860083169639, + "learning_rate": 0.0009996734377065398, + "loss": 1.851, + "step": 454 + }, + { + "epoch": 0.04128107421520595, + "grad_norm": 0.29913121044646573, + "learning_rate": 0.0009996681067552735, + "loss": 1.8381, + "step": 455 + }, + { + "epoch": 0.04137180185084377, + "grad_norm": 0.27444429625630584, + "learning_rate": 0.0009996627326575925, + "loss": 1.8523, + "step": 456 + }, + { + "epoch": 0.04146252948648158, + "grad_norm": 0.2963238181339429, + "learning_rate": 0.000999657315413961, + "loss": 1.8624, + "step": 457 + }, + { + "epoch": 0.041553257122119394, + "grad_norm": 0.30806073993132926, + "learning_rate": 0.0009996518550248466, + "loss": 1.8693, + "step": 458 + }, + { + "epoch": 0.04164398475775721, + "grad_norm": 0.30378002244766283, + "learning_rate": 0.0009996463514907207, + "loss": 1.8573, + "step": 459 + }, + { + "epoch": 0.041734712393395025, + "grad_norm": 0.2772653409469799, + "learning_rate": 0.0009996408048120588, + "loss": 1.8522, + "step": 460 + }, + { + "epoch": 0.041825440029032844, + "grad_norm": 0.27071969753885494, + "learning_rate": 0.00099963521498934, + "loss": 1.9076, + "step": 461 + }, + { + "epoch": 0.041916167664670656, + "grad_norm": 0.2836827461224307, + "learning_rate": 0.0009996295820230467, + "loss": 1.8296, + "step": 462 + }, + { + "epoch": 0.042006895300308475, + "grad_norm": 0.2592223514851611, + "learning_rate": 0.000999623905913665, + "loss": 1.8784, + "step": 463 + }, + { + "epoch": 0.04209762293594629, + "grad_norm": 0.2824026731616427, + "learning_rate": 0.0009996181866616858, + "loss": 1.7887, + "step": 464 + }, + { + "epoch": 0.042188350571584106, + "grad_norm": 0.275275751186389, + "learning_rate": 0.0009996124242676022, + "loss": 1.8706, + "step": 465 + }, + { + "epoch": 0.04227907820722192, + "grad_norm": 0.2855567843205613, + "learning_rate": 0.0009996066187319124, + "loss": 1.8782, + "step": 466 + }, + { + "epoch": 0.04236980584285974, + "grad_norm": 0.27790936828242446, + "learning_rate": 0.0009996007700551173, + "loss": 1.8162, + "step": 467 + }, + { + "epoch": 0.04246053347849755, + "grad_norm": 0.2697285015247099, + "learning_rate": 0.000999594878237722, + "loss": 1.8335, + "step": 468 + }, + { + "epoch": 0.04255126111413537, + "grad_norm": 0.30677687687031835, + "learning_rate": 0.0009995889432802354, + "loss": 1.8484, + "step": 469 + }, + { + "epoch": 0.04264198874977318, + "grad_norm": 0.2799115405148999, + "learning_rate": 0.00099958296518317, + "loss": 1.8135, + "step": 470 + }, + { + "epoch": 0.042732716385411, + "grad_norm": 0.27810879516405446, + "learning_rate": 0.000999576943947042, + "loss": 1.8372, + "step": 471 + }, + { + "epoch": 0.04282344402104881, + "grad_norm": 0.30009040129903874, + "learning_rate": 0.000999570879572371, + "loss": 1.8439, + "step": 472 + }, + { + "epoch": 0.04291417165668663, + "grad_norm": 0.2914228633722628, + "learning_rate": 0.0009995647720596813, + "loss": 1.8424, + "step": 473 + }, + { + "epoch": 0.04300489929232444, + "grad_norm": 0.3042704768802582, + "learning_rate": 0.0009995586214094996, + "loss": 1.841, + "step": 474 + }, + { + "epoch": 0.04309562692796226, + "grad_norm": 0.27374381126561265, + "learning_rate": 0.0009995524276223574, + "loss": 1.8718, + "step": 475 + }, + { + "epoch": 0.04318635456360007, + "grad_norm": 0.28302869035661776, + "learning_rate": 0.0009995461906987893, + "loss": 1.8189, + "step": 476 + }, + { + "epoch": 0.043277082199237885, + "grad_norm": 0.2625220894549383, + "learning_rate": 0.0009995399106393341, + "loss": 1.8267, + "step": 477 + }, + { + "epoch": 0.043367809834875704, + "grad_norm": 0.28062885953764827, + "learning_rate": 0.0009995335874445341, + "loss": 1.829, + "step": 478 + }, + { + "epoch": 0.043458537470513516, + "grad_norm": 0.2792504144939958, + "learning_rate": 0.000999527221114935, + "loss": 1.8456, + "step": 479 + }, + { + "epoch": 0.043549265106151334, + "grad_norm": 0.2763181499743204, + "learning_rate": 0.0009995208116510869, + "loss": 1.8906, + "step": 480 + }, + { + "epoch": 0.043639992741789146, + "grad_norm": 0.3079600259161405, + "learning_rate": 0.0009995143590535431, + "loss": 1.8572, + "step": 481 + }, + { + "epoch": 0.043730720377426965, + "grad_norm": 0.2813007998450857, + "learning_rate": 0.0009995078633228606, + "loss": 1.8668, + "step": 482 + }, + { + "epoch": 0.04382144801306478, + "grad_norm": 0.3005302006728464, + "learning_rate": 0.0009995013244596008, + "loss": 1.873, + "step": 483 + }, + { + "epoch": 0.043912175648702596, + "grad_norm": 0.2838921732823289, + "learning_rate": 0.0009994947424643277, + "loss": 1.8105, + "step": 484 + }, + { + "epoch": 0.04400290328434041, + "grad_norm": 0.28676731270955663, + "learning_rate": 0.00099948811733761, + "loss": 1.8319, + "step": 485 + }, + { + "epoch": 0.04409363091997823, + "grad_norm": 0.30359877677923613, + "learning_rate": 0.00099948144908002, + "loss": 1.7727, + "step": 486 + }, + { + "epoch": 0.04418435855561604, + "grad_norm": 0.3033597941625984, + "learning_rate": 0.000999474737692133, + "loss": 1.8571, + "step": 487 + }, + { + "epoch": 0.04427508619125386, + "grad_norm": 0.27275814527722375, + "learning_rate": 0.000999467983174529, + "loss": 1.8532, + "step": 488 + }, + { + "epoch": 0.04436581382689167, + "grad_norm": 0.28740507215627925, + "learning_rate": 0.000999461185527791, + "loss": 1.8637, + "step": 489 + }, + { + "epoch": 0.04445654146252949, + "grad_norm": 0.29755760255537556, + "learning_rate": 0.000999454344752506, + "loss": 1.8394, + "step": 490 + }, + { + "epoch": 0.0445472690981673, + "grad_norm": 0.25171220071950495, + "learning_rate": 0.0009994474608492644, + "loss": 1.8682, + "step": 491 + }, + { + "epoch": 0.04463799673380512, + "grad_norm": 0.25912693012388444, + "learning_rate": 0.0009994405338186612, + "loss": 1.8497, + "step": 492 + }, + { + "epoch": 0.04472872436944293, + "grad_norm": 0.2751595857261023, + "learning_rate": 0.0009994335636612944, + "loss": 1.8566, + "step": 493 + }, + { + "epoch": 0.04481945200508075, + "grad_norm": 0.265560144001781, + "learning_rate": 0.0009994265503777656, + "loss": 1.8576, + "step": 494 + }, + { + "epoch": 0.04491017964071856, + "grad_norm": 0.2658042345480418, + "learning_rate": 0.0009994194939686807, + "loss": 1.8642, + "step": 495 + }, + { + "epoch": 0.045000907276356375, + "grad_norm": 0.278856911376686, + "learning_rate": 0.0009994123944346489, + "loss": 1.831, + "step": 496 + }, + { + "epoch": 0.045091634911994194, + "grad_norm": 0.27040579052353353, + "learning_rate": 0.000999405251776283, + "loss": 1.8493, + "step": 497 + }, + { + "epoch": 0.045182362547632006, + "grad_norm": 0.28478775701909853, + "learning_rate": 0.0009993980659942002, + "loss": 1.8402, + "step": 498 + }, + { + "epoch": 0.045273090183269825, + "grad_norm": 0.2782700350072567, + "learning_rate": 0.0009993908370890209, + "loss": 1.791, + "step": 499 + }, + { + "epoch": 0.04536381781890764, + "grad_norm": 0.2647558836162775, + "learning_rate": 0.000999383565061369, + "loss": 1.924, + "step": 500 + }, + { + "epoch": 0.045454545454545456, + "grad_norm": 0.27364041191659333, + "learning_rate": 0.0009993762499118726, + "loss": 1.8047, + "step": 501 + }, + { + "epoch": 0.04554527309018327, + "grad_norm": 0.2636301921761473, + "learning_rate": 0.0009993688916411637, + "loss": 1.8608, + "step": 502 + }, + { + "epoch": 0.04563600072582109, + "grad_norm": 0.28304275854351546, + "learning_rate": 0.0009993614902498772, + "loss": 1.8225, + "step": 503 + }, + { + "epoch": 0.0457267283614589, + "grad_norm": 0.27620050392076834, + "learning_rate": 0.0009993540457386525, + "loss": 1.8488, + "step": 504 + }, + { + "epoch": 0.04581745599709672, + "grad_norm": 0.2796093809729451, + "learning_rate": 0.0009993465581081323, + "loss": 1.8621, + "step": 505 + }, + { + "epoch": 0.04590818363273453, + "grad_norm": 0.2818226336659359, + "learning_rate": 0.0009993390273589633, + "loss": 1.8283, + "step": 506 + }, + { + "epoch": 0.04599891126837235, + "grad_norm": 0.2897641041889654, + "learning_rate": 0.0009993314534917956, + "loss": 1.8377, + "step": 507 + }, + { + "epoch": 0.04608963890401016, + "grad_norm": 0.24814648845534895, + "learning_rate": 0.0009993238365072832, + "loss": 1.8201, + "step": 508 + }, + { + "epoch": 0.04618036653964798, + "grad_norm": 0.2822770036454805, + "learning_rate": 0.000999316176406084, + "loss": 1.8295, + "step": 509 + }, + { + "epoch": 0.04627109417528579, + "grad_norm": 0.2689361818990401, + "learning_rate": 0.0009993084731888596, + "loss": 1.8245, + "step": 510 + }, + { + "epoch": 0.04636182181092361, + "grad_norm": 0.26035769244175994, + "learning_rate": 0.0009993007268562749, + "loss": 1.8443, + "step": 511 + }, + { + "epoch": 0.04645254944656142, + "grad_norm": 0.29086403236148456, + "learning_rate": 0.0009992929374089984, + "loss": 1.8151, + "step": 512 + }, + { + "epoch": 0.046543277082199234, + "grad_norm": 0.2554447939624349, + "learning_rate": 0.0009992851048477036, + "loss": 1.82, + "step": 513 + }, + { + "epoch": 0.04663400471783705, + "grad_norm": 0.279642493144278, + "learning_rate": 0.0009992772291730664, + "loss": 1.8009, + "step": 514 + }, + { + "epoch": 0.046724732353474865, + "grad_norm": 0.2707217980488519, + "learning_rate": 0.0009992693103857667, + "loss": 1.8381, + "step": 515 + }, + { + "epoch": 0.046815459989112684, + "grad_norm": 0.2675853827231967, + "learning_rate": 0.0009992613484864886, + "loss": 1.7804, + "step": 516 + }, + { + "epoch": 0.046906187624750496, + "grad_norm": 0.2626972229880895, + "learning_rate": 0.0009992533434759194, + "loss": 1.8612, + "step": 517 + }, + { + "epoch": 0.046996915260388315, + "grad_norm": 0.26328903540014387, + "learning_rate": 0.0009992452953547504, + "loss": 1.8287, + "step": 518 + }, + { + "epoch": 0.04708764289602613, + "grad_norm": 0.2811674581192553, + "learning_rate": 0.0009992372041236766, + "loss": 1.8456, + "step": 519 + }, + { + "epoch": 0.047178370531663946, + "grad_norm": 0.30324033676648926, + "learning_rate": 0.0009992290697833966, + "loss": 1.8201, + "step": 520 + }, + { + "epoch": 0.04726909816730176, + "grad_norm": 0.312206299296192, + "learning_rate": 0.000999220892334613, + "loss": 1.8616, + "step": 521 + }, + { + "epoch": 0.04735982580293958, + "grad_norm": 0.26350187179773465, + "learning_rate": 0.0009992126717780316, + "loss": 1.8594, + "step": 522 + }, + { + "epoch": 0.04745055343857739, + "grad_norm": 0.2758417800562162, + "learning_rate": 0.0009992044081143625, + "loss": 1.8017, + "step": 523 + }, + { + "epoch": 0.04754128107421521, + "grad_norm": 0.2692859595244755, + "learning_rate": 0.0009991961013443192, + "loss": 1.8233, + "step": 524 + }, + { + "epoch": 0.04763200870985302, + "grad_norm": 0.25648130783478723, + "learning_rate": 0.0009991877514686188, + "loss": 1.8404, + "step": 525 + }, + { + "epoch": 0.04772273634549084, + "grad_norm": 0.2842247761886678, + "learning_rate": 0.0009991793584879828, + "loss": 1.8347, + "step": 526 + }, + { + "epoch": 0.04781346398112865, + "grad_norm": 0.28667378811741107, + "learning_rate": 0.0009991709224031352, + "loss": 1.817, + "step": 527 + }, + { + "epoch": 0.04790419161676647, + "grad_norm": 0.27343102333546593, + "learning_rate": 0.0009991624432148052, + "loss": 1.8447, + "step": 528 + }, + { + "epoch": 0.04799491925240428, + "grad_norm": 0.26771950085034074, + "learning_rate": 0.0009991539209237247, + "loss": 1.8452, + "step": 529 + }, + { + "epoch": 0.0480856468880421, + "grad_norm": 0.2527998362601679, + "learning_rate": 0.0009991453555306291, + "loss": 1.8664, + "step": 530 + }, + { + "epoch": 0.04817637452367991, + "grad_norm": 0.2628477629393321, + "learning_rate": 0.0009991367470362589, + "loss": 1.827, + "step": 531 + }, + { + "epoch": 0.048267102159317725, + "grad_norm": 0.2543761551965943, + "learning_rate": 0.000999128095441357, + "loss": 1.8356, + "step": 532 + }, + { + "epoch": 0.048357829794955544, + "grad_norm": 0.24612518296220542, + "learning_rate": 0.0009991194007466704, + "loss": 1.8172, + "step": 533 + }, + { + "epoch": 0.048448557430593356, + "grad_norm": 0.2513957612484892, + "learning_rate": 0.00099911066295295, + "loss": 1.8733, + "step": 534 + }, + { + "epoch": 0.048539285066231175, + "grad_norm": 0.253381682829514, + "learning_rate": 0.0009991018820609504, + "loss": 1.8422, + "step": 535 + }, + { + "epoch": 0.04863001270186899, + "grad_norm": 0.2604327911647113, + "learning_rate": 0.0009990930580714298, + "loss": 1.8033, + "step": 536 + }, + { + "epoch": 0.048720740337506806, + "grad_norm": 0.2602987638593885, + "learning_rate": 0.0009990841909851497, + "loss": 1.8527, + "step": 537 + }, + { + "epoch": 0.04881146797314462, + "grad_norm": 0.2779682419540249, + "learning_rate": 0.0009990752808028765, + "loss": 1.8202, + "step": 538 + }, + { + "epoch": 0.04890219560878244, + "grad_norm": 0.2758292052143754, + "learning_rate": 0.000999066327525379, + "loss": 1.8176, + "step": 539 + }, + { + "epoch": 0.04899292324442025, + "grad_norm": 0.24639393696939374, + "learning_rate": 0.0009990573311534309, + "loss": 1.8303, + "step": 540 + }, + { + "epoch": 0.04908365088005807, + "grad_norm": 0.27534299150882335, + "learning_rate": 0.0009990482916878082, + "loss": 1.8332, + "step": 541 + }, + { + "epoch": 0.04917437851569588, + "grad_norm": 0.2670786971078685, + "learning_rate": 0.0009990392091292924, + "loss": 1.805, + "step": 542 + }, + { + "epoch": 0.0492651061513337, + "grad_norm": 0.2500306442678129, + "learning_rate": 0.000999030083478667, + "loss": 1.8051, + "step": 543 + }, + { + "epoch": 0.04935583378697151, + "grad_norm": 0.2833308383074544, + "learning_rate": 0.0009990209147367206, + "loss": 1.8443, + "step": 544 + }, + { + "epoch": 0.04944656142260933, + "grad_norm": 0.2720040480484193, + "learning_rate": 0.0009990117029042445, + "loss": 1.8523, + "step": 545 + }, + { + "epoch": 0.04953728905824714, + "grad_norm": 0.2812111971141155, + "learning_rate": 0.0009990024479820346, + "loss": 1.861, + "step": 546 + }, + { + "epoch": 0.04962801669388496, + "grad_norm": 0.2681771295137318, + "learning_rate": 0.0009989931499708895, + "loss": 1.8363, + "step": 547 + }, + { + "epoch": 0.04971874432952277, + "grad_norm": 0.28715123400935444, + "learning_rate": 0.0009989838088716124, + "loss": 1.8637, + "step": 548 + }, + { + "epoch": 0.04980947196516059, + "grad_norm": 0.2673394975481993, + "learning_rate": 0.00099897442468501, + "loss": 1.86, + "step": 549 + }, + { + "epoch": 0.0499001996007984, + "grad_norm": 0.28576016232477575, + "learning_rate": 0.0009989649974118922, + "loss": 1.8223, + "step": 550 + }, + { + "epoch": 0.049990927236436215, + "grad_norm": 0.2578095506320602, + "learning_rate": 0.0009989555270530734, + "loss": 1.8202, + "step": 551 + }, + { + "epoch": 0.050081654872074034, + "grad_norm": 0.24616578717147236, + "learning_rate": 0.0009989460136093716, + "loss": 1.8397, + "step": 552 + }, + { + "epoch": 0.050172382507711846, + "grad_norm": 0.2621385911972387, + "learning_rate": 0.0009989364570816078, + "loss": 1.8077, + "step": 553 + }, + { + "epoch": 0.050263110143349665, + "grad_norm": 0.2592729234908613, + "learning_rate": 0.0009989268574706073, + "loss": 1.8297, + "step": 554 + }, + { + "epoch": 0.05035383777898748, + "grad_norm": 0.26678098211836154, + "learning_rate": 0.0009989172147771993, + "loss": 1.8548, + "step": 555 + }, + { + "epoch": 0.050444565414625296, + "grad_norm": 0.2530989271398464, + "learning_rate": 0.000998907529002216, + "loss": 1.8124, + "step": 556 + }, + { + "epoch": 0.05053529305026311, + "grad_norm": 0.24683125292981106, + "learning_rate": 0.0009988978001464943, + "loss": 1.7979, + "step": 557 + }, + { + "epoch": 0.05062602068590093, + "grad_norm": 0.281246748686838, + "learning_rate": 0.000998888028210874, + "loss": 1.8795, + "step": 558 + }, + { + "epoch": 0.05071674832153874, + "grad_norm": 0.25503939212503085, + "learning_rate": 0.000998878213196199, + "loss": 1.8251, + "step": 559 + }, + { + "epoch": 0.05080747595717656, + "grad_norm": 0.259442337622213, + "learning_rate": 0.0009988683551033165, + "loss": 1.7898, + "step": 560 + }, + { + "epoch": 0.05089820359281437, + "grad_norm": 0.2598120786420471, + "learning_rate": 0.0009988584539330782, + "loss": 1.778, + "step": 561 + }, + { + "epoch": 0.05098893122845219, + "grad_norm": 0.25591627889623425, + "learning_rate": 0.0009988485096863388, + "loss": 1.8041, + "step": 562 + }, + { + "epoch": 0.05107965886409, + "grad_norm": 0.27007517156359934, + "learning_rate": 0.0009988385223639572, + "loss": 1.8156, + "step": 563 + }, + { + "epoch": 0.05117038649972782, + "grad_norm": 0.25828616391155945, + "learning_rate": 0.0009988284919667954, + "loss": 1.8257, + "step": 564 + }, + { + "epoch": 0.05126111413536563, + "grad_norm": 0.23918818380618026, + "learning_rate": 0.0009988184184957201, + "loss": 1.8644, + "step": 565 + }, + { + "epoch": 0.05135184177100345, + "grad_norm": 0.2382216971730792, + "learning_rate": 0.0009988083019516006, + "loss": 1.7982, + "step": 566 + }, + { + "epoch": 0.05144256940664126, + "grad_norm": 0.27140393652854866, + "learning_rate": 0.0009987981423353108, + "loss": 1.8502, + "step": 567 + }, + { + "epoch": 0.051533297042279075, + "grad_norm": 0.25099319149508514, + "learning_rate": 0.000998787939647728, + "loss": 1.8083, + "step": 568 + }, + { + "epoch": 0.051624024677916894, + "grad_norm": 0.2524177808453322, + "learning_rate": 0.000998777693889733, + "loss": 1.8198, + "step": 569 + }, + { + "epoch": 0.051714752313554706, + "grad_norm": 0.2370182281391083, + "learning_rate": 0.0009987674050622106, + "loss": 1.8291, + "step": 570 + }, + { + "epoch": 0.051805479949192525, + "grad_norm": 0.222760367890423, + "learning_rate": 0.0009987570731660495, + "loss": 1.8048, + "step": 571 + }, + { + "epoch": 0.05189620758483034, + "grad_norm": 0.25146336977982253, + "learning_rate": 0.0009987466982021413, + "loss": 1.748, + "step": 572 + }, + { + "epoch": 0.051986935220468156, + "grad_norm": 0.2338203235824519, + "learning_rate": 0.0009987362801713823, + "loss": 1.8096, + "step": 573 + }, + { + "epoch": 0.05207766285610597, + "grad_norm": 0.2409209990612872, + "learning_rate": 0.000998725819074672, + "loss": 1.8185, + "step": 574 + }, + { + "epoch": 0.05216839049174379, + "grad_norm": 0.25324763585998794, + "learning_rate": 0.0009987153149129137, + "loss": 1.841, + "step": 575 + }, + { + "epoch": 0.0522591181273816, + "grad_norm": 0.260466370281985, + "learning_rate": 0.0009987047676870147, + "loss": 1.8299, + "step": 576 + }, + { + "epoch": 0.05234984576301942, + "grad_norm": 0.2539969572730624, + "learning_rate": 0.0009986941773978853, + "loss": 1.7937, + "step": 577 + }, + { + "epoch": 0.05244057339865723, + "grad_norm": 0.24567682389648265, + "learning_rate": 0.00099868354404644, + "loss": 1.7827, + "step": 578 + }, + { + "epoch": 0.05253130103429505, + "grad_norm": 0.24878611620702304, + "learning_rate": 0.0009986728676335975, + "loss": 1.8133, + "step": 579 + }, + { + "epoch": 0.05262202866993286, + "grad_norm": 0.2547119836659784, + "learning_rate": 0.0009986621481602792, + "loss": 1.7821, + "step": 580 + }, + { + "epoch": 0.05271275630557068, + "grad_norm": 0.2339478270107706, + "learning_rate": 0.0009986513856274108, + "loss": 1.791, + "step": 581 + }, + { + "epoch": 0.05280348394120849, + "grad_norm": 0.24445138510194914, + "learning_rate": 0.0009986405800359221, + "loss": 1.798, + "step": 582 + }, + { + "epoch": 0.05289421157684631, + "grad_norm": 0.24967302407011788, + "learning_rate": 0.0009986297313867457, + "loss": 1.8209, + "step": 583 + }, + { + "epoch": 0.05298493921248412, + "grad_norm": 0.2467694368823511, + "learning_rate": 0.0009986188396808183, + "loss": 1.8412, + "step": 584 + }, + { + "epoch": 0.05307566684812194, + "grad_norm": 0.2471915790775749, + "learning_rate": 0.0009986079049190807, + "loss": 1.779, + "step": 585 + }, + { + "epoch": 0.05316639448375975, + "grad_norm": 0.25784951109413135, + "learning_rate": 0.000998596927102477, + "loss": 1.76, + "step": 586 + }, + { + "epoch": 0.053257122119397565, + "grad_norm": 0.26655252259080775, + "learning_rate": 0.0009985859062319553, + "loss": 1.8164, + "step": 587 + }, + { + "epoch": 0.053347849755035384, + "grad_norm": 0.2515386187493692, + "learning_rate": 0.000998574842308467, + "loss": 1.8555, + "step": 588 + }, + { + "epoch": 0.053438577390673196, + "grad_norm": 0.25563773137741563, + "learning_rate": 0.0009985637353329677, + "loss": 1.8195, + "step": 589 + }, + { + "epoch": 0.053529305026311015, + "grad_norm": 0.23826895092279576, + "learning_rate": 0.0009985525853064164, + "loss": 1.8317, + "step": 590 + }, + { + "epoch": 0.05362003266194883, + "grad_norm": 0.25100512473567566, + "learning_rate": 0.0009985413922297757, + "loss": 1.8296, + "step": 591 + }, + { + "epoch": 0.053710760297586646, + "grad_norm": 0.2519889742085497, + "learning_rate": 0.0009985301561040124, + "loss": 1.8268, + "step": 592 + }, + { + "epoch": 0.05380148793322446, + "grad_norm": 0.26207345815604915, + "learning_rate": 0.0009985188769300966, + "loss": 1.8191, + "step": 593 + }, + { + "epoch": 0.05389221556886228, + "grad_norm": 0.25749060995892753, + "learning_rate": 0.0009985075547090023, + "loss": 1.7885, + "step": 594 + }, + { + "epoch": 0.05398294320450009, + "grad_norm": 0.2456030686293609, + "learning_rate": 0.0009984961894417073, + "loss": 1.7928, + "step": 595 + }, + { + "epoch": 0.05407367084013791, + "grad_norm": 0.23569792461950037, + "learning_rate": 0.0009984847811291928, + "loss": 1.8279, + "step": 596 + }, + { + "epoch": 0.05416439847577572, + "grad_norm": 0.26136295959961586, + "learning_rate": 0.0009984733297724439, + "loss": 1.801, + "step": 597 + }, + { + "epoch": 0.05425512611141354, + "grad_norm": 0.2659915857790076, + "learning_rate": 0.0009984618353724496, + "loss": 1.7641, + "step": 598 + }, + { + "epoch": 0.05434585374705135, + "grad_norm": 0.24352155727871314, + "learning_rate": 0.0009984502979302023, + "loss": 1.8409, + "step": 599 + }, + { + "epoch": 0.05443658138268917, + "grad_norm": 0.26860200326891587, + "learning_rate": 0.0009984387174466983, + "loss": 1.8063, + "step": 600 + }, + { + "epoch": 0.05452730901832698, + "grad_norm": 0.27143158611674173, + "learning_rate": 0.0009984270939229377, + "loss": 1.8168, + "step": 601 + }, + { + "epoch": 0.0546180366539648, + "grad_norm": 0.24243595670438373, + "learning_rate": 0.000998415427359924, + "loss": 1.8112, + "step": 602 + }, + { + "epoch": 0.05470876428960261, + "grad_norm": 0.2665910082045011, + "learning_rate": 0.0009984037177586645, + "loss": 1.8472, + "step": 603 + }, + { + "epoch": 0.05479949192524043, + "grad_norm": 0.24582673051252704, + "learning_rate": 0.0009983919651201708, + "loss": 1.8096, + "step": 604 + }, + { + "epoch": 0.054890219560878244, + "grad_norm": 0.2531264272961351, + "learning_rate": 0.0009983801694454573, + "loss": 1.7975, + "step": 605 + }, + { + "epoch": 0.054980947196516056, + "grad_norm": 0.24005914004790818, + "learning_rate": 0.0009983683307355428, + "loss": 1.7665, + "step": 606 + }, + { + "epoch": 0.055071674832153875, + "grad_norm": 0.23934524935397128, + "learning_rate": 0.0009983564489914494, + "loss": 1.8341, + "step": 607 + }, + { + "epoch": 0.05516240246779169, + "grad_norm": 0.24943147375229688, + "learning_rate": 0.0009983445242142033, + "loss": 1.8042, + "step": 608 + }, + { + "epoch": 0.055253130103429506, + "grad_norm": 0.2590432247479187, + "learning_rate": 0.000998332556404834, + "loss": 1.803, + "step": 609 + }, + { + "epoch": 0.05534385773906732, + "grad_norm": 0.2666118284038789, + "learning_rate": 0.000998320545564375, + "loss": 1.7855, + "step": 610 + }, + { + "epoch": 0.055434585374705136, + "grad_norm": 0.27024155804538297, + "learning_rate": 0.0009983084916938634, + "loss": 1.7921, + "step": 611 + }, + { + "epoch": 0.05552531301034295, + "grad_norm": 0.29445517791301395, + "learning_rate": 0.00099829639479434, + "loss": 1.8031, + "step": 612 + }, + { + "epoch": 0.05561604064598077, + "grad_norm": 0.23604420267511192, + "learning_rate": 0.0009982842548668497, + "loss": 1.8226, + "step": 613 + }, + { + "epoch": 0.05570676828161858, + "grad_norm": 0.24811848746647802, + "learning_rate": 0.0009982720719124408, + "loss": 1.8071, + "step": 614 + }, + { + "epoch": 0.0557974959172564, + "grad_norm": 0.26718178687076494, + "learning_rate": 0.0009982598459321646, + "loss": 1.8082, + "step": 615 + }, + { + "epoch": 0.05588822355289421, + "grad_norm": 0.23206845757369343, + "learning_rate": 0.0009982475769270774, + "loss": 1.7737, + "step": 616 + }, + { + "epoch": 0.05597895118853203, + "grad_norm": 0.2609995352192514, + "learning_rate": 0.0009982352648982386, + "loss": 1.8196, + "step": 617 + }, + { + "epoch": 0.05606967882416984, + "grad_norm": 0.2503549561825973, + "learning_rate": 0.000998222909846711, + "loss": 1.8016, + "step": 618 + }, + { + "epoch": 0.05616040645980766, + "grad_norm": 0.22905390069131495, + "learning_rate": 0.0009982105117735621, + "loss": 1.8474, + "step": 619 + }, + { + "epoch": 0.05625113409544547, + "grad_norm": 0.22939403907691605, + "learning_rate": 0.0009981980706798618, + "loss": 1.8195, + "step": 620 + }, + { + "epoch": 0.05634186173108329, + "grad_norm": 0.24382069944463927, + "learning_rate": 0.0009981855865666847, + "loss": 1.7748, + "step": 621 + }, + { + "epoch": 0.0564325893667211, + "grad_norm": 0.23167119723629012, + "learning_rate": 0.0009981730594351087, + "loss": 1.8003, + "step": 622 + }, + { + "epoch": 0.056523317002358915, + "grad_norm": 0.2331257100868883, + "learning_rate": 0.0009981604892862158, + "loss": 1.8107, + "step": 623 + }, + { + "epoch": 0.056614044637996734, + "grad_norm": 0.25503792085192906, + "learning_rate": 0.0009981478761210908, + "loss": 1.7929, + "step": 624 + }, + { + "epoch": 0.056704772273634546, + "grad_norm": 0.2520337189416626, + "learning_rate": 0.0009981352199408238, + "loss": 1.8108, + "step": 625 + }, + { + "epoch": 0.056795499909272365, + "grad_norm": 0.24230302130564527, + "learning_rate": 0.0009981225207465068, + "loss": 1.8065, + "step": 626 + }, + { + "epoch": 0.05688622754491018, + "grad_norm": 0.2253613402208171, + "learning_rate": 0.000998109778539237, + "loss": 1.829, + "step": 627 + }, + { + "epoch": 0.056976955180547996, + "grad_norm": 0.24729605499638554, + "learning_rate": 0.000998096993320114, + "loss": 1.7748, + "step": 628 + }, + { + "epoch": 0.05706768281618581, + "grad_norm": 0.24820701692836694, + "learning_rate": 0.0009980841650902427, + "loss": 1.7858, + "step": 629 + }, + { + "epoch": 0.05715841045182363, + "grad_norm": 0.2451871076551789, + "learning_rate": 0.00099807129385073, + "loss": 1.8069, + "step": 630 + }, + { + "epoch": 0.05724913808746144, + "grad_norm": 0.26783041341435454, + "learning_rate": 0.0009980583796026878, + "loss": 1.7605, + "step": 631 + }, + { + "epoch": 0.05733986572309926, + "grad_norm": 0.24765215641869198, + "learning_rate": 0.0009980454223472311, + "loss": 1.8439, + "step": 632 + }, + { + "epoch": 0.05743059335873707, + "grad_norm": 0.23819918707151114, + "learning_rate": 0.0009980324220854788, + "loss": 1.8151, + "step": 633 + }, + { + "epoch": 0.05752132099437489, + "grad_norm": 0.2425098053108877, + "learning_rate": 0.0009980193788185535, + "loss": 1.7813, + "step": 634 + }, + { + "epoch": 0.0576120486300127, + "grad_norm": 0.23218095227000804, + "learning_rate": 0.0009980062925475813, + "loss": 1.7642, + "step": 635 + }, + { + "epoch": 0.05770277626565052, + "grad_norm": 0.23010492087114365, + "learning_rate": 0.0009979931632736923, + "loss": 1.7845, + "step": 636 + }, + { + "epoch": 0.05779350390128833, + "grad_norm": 0.24814867861142614, + "learning_rate": 0.0009979799909980204, + "loss": 1.8012, + "step": 637 + }, + { + "epoch": 0.05788423153692615, + "grad_norm": 0.23826562461606698, + "learning_rate": 0.0009979667757217029, + "loss": 1.8449, + "step": 638 + }, + { + "epoch": 0.05797495917256396, + "grad_norm": 0.2633798515250108, + "learning_rate": 0.0009979535174458806, + "loss": 1.7698, + "step": 639 + }, + { + "epoch": 0.05806568680820178, + "grad_norm": 0.2411066298549544, + "learning_rate": 0.0009979402161716991, + "loss": 1.8069, + "step": 640 + }, + { + "epoch": 0.058156414443839594, + "grad_norm": 0.24877907981472305, + "learning_rate": 0.0009979268719003062, + "loss": 1.783, + "step": 641 + }, + { + "epoch": 0.058247142079477406, + "grad_norm": 0.24861217863376553, + "learning_rate": 0.000997913484632855, + "loss": 1.7936, + "step": 642 + }, + { + "epoch": 0.058337869715115225, + "grad_norm": 0.2440860046092221, + "learning_rate": 0.0009979000543705006, + "loss": 1.7832, + "step": 643 + }, + { + "epoch": 0.058428597350753037, + "grad_norm": 0.2416806629655382, + "learning_rate": 0.000997886581114403, + "loss": 1.8355, + "step": 644 + }, + { + "epoch": 0.058519324986390855, + "grad_norm": 0.2582487898961905, + "learning_rate": 0.000997873064865726, + "loss": 1.8756, + "step": 645 + }, + { + "epoch": 0.05861005262202867, + "grad_norm": 0.23640538581574855, + "learning_rate": 0.0009978595056256364, + "loss": 1.7821, + "step": 646 + }, + { + "epoch": 0.058700780257666486, + "grad_norm": 0.23763689987616235, + "learning_rate": 0.0009978459033953054, + "loss": 1.7504, + "step": 647 + }, + { + "epoch": 0.0587915078933043, + "grad_norm": 0.24269510166954741, + "learning_rate": 0.000997832258175907, + "loss": 1.7909, + "step": 648 + }, + { + "epoch": 0.05888223552894212, + "grad_norm": 0.24281798922817593, + "learning_rate": 0.0009978185699686198, + "loss": 1.7959, + "step": 649 + }, + { + "epoch": 0.05897296316457993, + "grad_norm": 0.24832942440926728, + "learning_rate": 0.0009978048387746256, + "loss": 1.7979, + "step": 650 + }, + { + "epoch": 0.05906369080021775, + "grad_norm": 0.2099513065187607, + "learning_rate": 0.0009977910645951103, + "loss": 1.7698, + "step": 651 + }, + { + "epoch": 0.05915441843585556, + "grad_norm": 0.22205543564024544, + "learning_rate": 0.0009977772474312632, + "loss": 1.7608, + "step": 652 + }, + { + "epoch": 0.05924514607149338, + "grad_norm": 0.24566830294125377, + "learning_rate": 0.0009977633872842774, + "loss": 1.8129, + "step": 653 + }, + { + "epoch": 0.05933587370713119, + "grad_norm": 0.2433655816518578, + "learning_rate": 0.0009977494841553495, + "loss": 1.8244, + "step": 654 + }, + { + "epoch": 0.05942660134276901, + "grad_norm": 0.24752739121987774, + "learning_rate": 0.0009977355380456807, + "loss": 1.7637, + "step": 655 + }, + { + "epoch": 0.05951732897840682, + "grad_norm": 0.2386579255471397, + "learning_rate": 0.0009977215489564747, + "loss": 1.8314, + "step": 656 + }, + { + "epoch": 0.05960805661404464, + "grad_norm": 0.24530421080553388, + "learning_rate": 0.0009977075168889397, + "loss": 1.827, + "step": 657 + }, + { + "epoch": 0.05969878424968245, + "grad_norm": 0.233888782749824, + "learning_rate": 0.0009976934418442869, + "loss": 1.8247, + "step": 658 + }, + { + "epoch": 0.05978951188532027, + "grad_norm": 0.24331563160536274, + "learning_rate": 0.0009976793238237324, + "loss": 1.798, + "step": 659 + }, + { + "epoch": 0.059880239520958084, + "grad_norm": 0.2327240720835869, + "learning_rate": 0.0009976651628284948, + "loss": 1.8423, + "step": 660 + }, + { + "epoch": 0.059970967156595896, + "grad_norm": 0.22704531213402981, + "learning_rate": 0.0009976509588597968, + "loss": 1.7948, + "step": 661 + }, + { + "epoch": 0.060061694792233715, + "grad_norm": 0.2246517700752681, + "learning_rate": 0.0009976367119188655, + "loss": 1.7603, + "step": 662 + }, + { + "epoch": 0.06015242242787153, + "grad_norm": 0.23724673089910692, + "learning_rate": 0.0009976224220069306, + "loss": 1.759, + "step": 663 + }, + { + "epoch": 0.060243150063509346, + "grad_norm": 0.24197797195413287, + "learning_rate": 0.0009976080891252264, + "loss": 1.7907, + "step": 664 + }, + { + "epoch": 0.06033387769914716, + "grad_norm": 0.2423174616081188, + "learning_rate": 0.0009975937132749902, + "loss": 1.8072, + "step": 665 + }, + { + "epoch": 0.06042460533478498, + "grad_norm": 0.2366856283097312, + "learning_rate": 0.0009975792944574636, + "loss": 1.7675, + "step": 666 + }, + { + "epoch": 0.06051533297042279, + "grad_norm": 0.23834347507059106, + "learning_rate": 0.0009975648326738915, + "loss": 1.8055, + "step": 667 + }, + { + "epoch": 0.06060606060606061, + "grad_norm": 0.2240210706802623, + "learning_rate": 0.000997550327925523, + "loss": 1.8177, + "step": 668 + }, + { + "epoch": 0.06069678824169842, + "grad_norm": 0.2276647829064252, + "learning_rate": 0.00099753578021361, + "loss": 1.8349, + "step": 669 + }, + { + "epoch": 0.06078751587733624, + "grad_norm": 0.22643264544642522, + "learning_rate": 0.0009975211895394093, + "loss": 1.8274, + "step": 670 + }, + { + "epoch": 0.06087824351297405, + "grad_norm": 0.24480229184517724, + "learning_rate": 0.0009975065559041805, + "loss": 1.789, + "step": 671 + }, + { + "epoch": 0.06096897114861187, + "grad_norm": 0.2308960044962392, + "learning_rate": 0.0009974918793091872, + "loss": 1.7453, + "step": 672 + }, + { + "epoch": 0.06105969878424968, + "grad_norm": 0.2522503938318699, + "learning_rate": 0.0009974771597556968, + "loss": 1.7588, + "step": 673 + }, + { + "epoch": 0.0611504264198875, + "grad_norm": 0.23943116411237578, + "learning_rate": 0.0009974623972449804, + "loss": 1.807, + "step": 674 + }, + { + "epoch": 0.06124115405552531, + "grad_norm": 0.24290774395363757, + "learning_rate": 0.0009974475917783128, + "loss": 1.7727, + "step": 675 + }, + { + "epoch": 0.06133188169116313, + "grad_norm": 0.23907734265824435, + "learning_rate": 0.000997432743356972, + "loss": 1.8133, + "step": 676 + }, + { + "epoch": 0.061422609326800943, + "grad_norm": 0.23164112867376868, + "learning_rate": 0.0009974178519822408, + "loss": 1.7742, + "step": 677 + }, + { + "epoch": 0.061513336962438755, + "grad_norm": 0.23422722431380352, + "learning_rate": 0.0009974029176554047, + "loss": 1.7715, + "step": 678 + }, + { + "epoch": 0.061604064598076574, + "grad_norm": 0.2211469842561004, + "learning_rate": 0.0009973879403777533, + "loss": 1.7893, + "step": 679 + }, + { + "epoch": 0.061694792233714386, + "grad_norm": 0.23448224103009044, + "learning_rate": 0.00099737292015058, + "loss": 1.7245, + "step": 680 + }, + { + "epoch": 0.061785519869352205, + "grad_norm": 0.23757936266554983, + "learning_rate": 0.0009973578569751817, + "loss": 1.7819, + "step": 681 + }, + { + "epoch": 0.06187624750499002, + "grad_norm": 0.2424577738022501, + "learning_rate": 0.0009973427508528593, + "loss": 1.7576, + "step": 682 + }, + { + "epoch": 0.061966975140627836, + "grad_norm": 0.25823687094924697, + "learning_rate": 0.0009973276017849167, + "loss": 1.8372, + "step": 683 + }, + { + "epoch": 0.06205770277626565, + "grad_norm": 0.24633873902450354, + "learning_rate": 0.0009973124097726626, + "loss": 1.7267, + "step": 684 + }, + { + "epoch": 0.06214843041190347, + "grad_norm": 0.22893046012688215, + "learning_rate": 0.0009972971748174087, + "loss": 1.7565, + "step": 685 + }, + { + "epoch": 0.06223915804754128, + "grad_norm": 0.22174813688254677, + "learning_rate": 0.0009972818969204704, + "loss": 1.7479, + "step": 686 + }, + { + "epoch": 0.0623298856831791, + "grad_norm": 0.23333682873236408, + "learning_rate": 0.000997266576083167, + "loss": 1.825, + "step": 687 + }, + { + "epoch": 0.06242061331881691, + "grad_norm": 0.2217640721359216, + "learning_rate": 0.0009972512123068214, + "loss": 1.8379, + "step": 688 + }, + { + "epoch": 0.06251134095445472, + "grad_norm": 0.24067142800137015, + "learning_rate": 0.0009972358055927604, + "loss": 1.8215, + "step": 689 + }, + { + "epoch": 0.06260206859009254, + "grad_norm": 0.24155758342534342, + "learning_rate": 0.0009972203559423143, + "loss": 1.7665, + "step": 690 + }, + { + "epoch": 0.06269279622573036, + "grad_norm": 0.2422780364453313, + "learning_rate": 0.000997204863356817, + "loss": 1.7817, + "step": 691 + }, + { + "epoch": 0.06278352386136818, + "grad_norm": 0.23062262388556606, + "learning_rate": 0.0009971893278376068, + "loss": 1.7853, + "step": 692 + }, + { + "epoch": 0.06287425149700598, + "grad_norm": 0.23531570367002141, + "learning_rate": 0.0009971737493860247, + "loss": 1.7741, + "step": 693 + }, + { + "epoch": 0.0629649791326438, + "grad_norm": 0.23084211941623972, + "learning_rate": 0.000997158128003416, + "loss": 1.7446, + "step": 694 + }, + { + "epoch": 0.06305570676828162, + "grad_norm": 0.23922399553310728, + "learning_rate": 0.0009971424636911297, + "loss": 1.7309, + "step": 695 + }, + { + "epoch": 0.06314643440391943, + "grad_norm": 0.23040173230862548, + "learning_rate": 0.0009971267564505184, + "loss": 1.7612, + "step": 696 + }, + { + "epoch": 0.06323716203955725, + "grad_norm": 0.218886864137029, + "learning_rate": 0.0009971110062829385, + "loss": 1.813, + "step": 697 + }, + { + "epoch": 0.06332788967519506, + "grad_norm": 0.22575892419570917, + "learning_rate": 0.0009970952131897499, + "loss": 1.7759, + "step": 698 + }, + { + "epoch": 0.06341861731083288, + "grad_norm": 0.2197642587433438, + "learning_rate": 0.0009970793771723163, + "loss": 1.7485, + "step": 699 + }, + { + "epoch": 0.06350934494647069, + "grad_norm": 0.22190806060351642, + "learning_rate": 0.0009970634982320052, + "loss": 1.787, + "step": 700 + }, + { + "epoch": 0.06360007258210851, + "grad_norm": 0.22654126407737996, + "learning_rate": 0.000997047576370188, + "loss": 1.7848, + "step": 701 + }, + { + "epoch": 0.06369080021774633, + "grad_norm": 0.22129284511342684, + "learning_rate": 0.0009970316115882393, + "loss": 1.7731, + "step": 702 + }, + { + "epoch": 0.06378152785338415, + "grad_norm": 0.22488915958905534, + "learning_rate": 0.0009970156038875374, + "loss": 1.7937, + "step": 703 + }, + { + "epoch": 0.06387225548902195, + "grad_norm": 0.22802609327398343, + "learning_rate": 0.0009969995532694651, + "loss": 1.7966, + "step": 704 + }, + { + "epoch": 0.06396298312465977, + "grad_norm": 0.22533219025588075, + "learning_rate": 0.0009969834597354078, + "loss": 1.7562, + "step": 705 + }, + { + "epoch": 0.06405371076029759, + "grad_norm": 0.2237955486950884, + "learning_rate": 0.0009969673232867557, + "loss": 1.7996, + "step": 706 + }, + { + "epoch": 0.06414443839593541, + "grad_norm": 0.23131088828682872, + "learning_rate": 0.0009969511439249022, + "loss": 1.8139, + "step": 707 + }, + { + "epoch": 0.06423516603157321, + "grad_norm": 0.22368485517131734, + "learning_rate": 0.000996934921651244, + "loss": 1.8, + "step": 708 + }, + { + "epoch": 0.06432589366721103, + "grad_norm": 0.22130562877719814, + "learning_rate": 0.0009969186564671821, + "loss": 1.7622, + "step": 709 + }, + { + "epoch": 0.06441662130284885, + "grad_norm": 0.2097397775245629, + "learning_rate": 0.0009969023483741208, + "loss": 1.7352, + "step": 710 + }, + { + "epoch": 0.06450734893848667, + "grad_norm": 0.22521063057426918, + "learning_rate": 0.0009968859973734688, + "loss": 1.7831, + "step": 711 + }, + { + "epoch": 0.06459807657412447, + "grad_norm": 0.2117271568106001, + "learning_rate": 0.0009968696034666374, + "loss": 1.8552, + "step": 712 + }, + { + "epoch": 0.0646888042097623, + "grad_norm": 0.22413448763750352, + "learning_rate": 0.0009968531666550426, + "loss": 1.723, + "step": 713 + }, + { + "epoch": 0.06477953184540011, + "grad_norm": 0.2241296604421691, + "learning_rate": 0.0009968366869401038, + "loss": 1.7537, + "step": 714 + }, + { + "epoch": 0.06487025948103792, + "grad_norm": 0.2300289743289707, + "learning_rate": 0.0009968201643232436, + "loss": 1.7821, + "step": 715 + }, + { + "epoch": 0.06496098711667574, + "grad_norm": 0.23149469045953838, + "learning_rate": 0.0009968035988058893, + "loss": 1.7572, + "step": 716 + }, + { + "epoch": 0.06505171475231356, + "grad_norm": 0.21474506256949735, + "learning_rate": 0.0009967869903894707, + "loss": 1.7836, + "step": 717 + }, + { + "epoch": 0.06514244238795137, + "grad_norm": 0.22412681996010228, + "learning_rate": 0.0009967703390754224, + "loss": 1.7918, + "step": 718 + }, + { + "epoch": 0.06523317002358918, + "grad_norm": 0.2287945211920055, + "learning_rate": 0.000996753644865182, + "loss": 1.7939, + "step": 719 + }, + { + "epoch": 0.065323897659227, + "grad_norm": 0.2227501082227496, + "learning_rate": 0.0009967369077601913, + "loss": 1.8095, + "step": 720 + }, + { + "epoch": 0.06541462529486482, + "grad_norm": 0.21344902686923728, + "learning_rate": 0.0009967201277618954, + "loss": 1.8065, + "step": 721 + }, + { + "epoch": 0.06550535293050264, + "grad_norm": 0.20561607625053976, + "learning_rate": 0.0009967033048717431, + "loss": 1.7259, + "step": 722 + }, + { + "epoch": 0.06559608056614044, + "grad_norm": 0.21749622773966534, + "learning_rate": 0.0009966864390911873, + "loss": 1.7759, + "step": 723 + }, + { + "epoch": 0.06568680820177826, + "grad_norm": 0.22063095208077144, + "learning_rate": 0.0009966695304216844, + "loss": 1.7864, + "step": 724 + }, + { + "epoch": 0.06577753583741608, + "grad_norm": 0.21052304005298525, + "learning_rate": 0.0009966525788646942, + "loss": 1.7722, + "step": 725 + }, + { + "epoch": 0.0658682634730539, + "grad_norm": 0.2331981068228529, + "learning_rate": 0.0009966355844216808, + "loss": 1.7827, + "step": 726 + }, + { + "epoch": 0.0659589911086917, + "grad_norm": 0.2045634871540695, + "learning_rate": 0.0009966185470941114, + "loss": 1.7827, + "step": 727 + }, + { + "epoch": 0.06604971874432952, + "grad_norm": 0.21337941456468434, + "learning_rate": 0.0009966014668834572, + "loss": 1.7508, + "step": 728 + }, + { + "epoch": 0.06614044637996734, + "grad_norm": 0.21596716631556442, + "learning_rate": 0.000996584343791193, + "loss": 1.7791, + "step": 729 + }, + { + "epoch": 0.06623117401560516, + "grad_norm": 0.22823552998340055, + "learning_rate": 0.0009965671778187977, + "loss": 1.7577, + "step": 730 + }, + { + "epoch": 0.06632190165124296, + "grad_norm": 0.22811502311010073, + "learning_rate": 0.0009965499689677535, + "loss": 1.7954, + "step": 731 + }, + { + "epoch": 0.06641262928688078, + "grad_norm": 0.2378770698784086, + "learning_rate": 0.0009965327172395462, + "loss": 1.8189, + "step": 732 + }, + { + "epoch": 0.0665033569225186, + "grad_norm": 0.20990683804353893, + "learning_rate": 0.0009965154226356657, + "loss": 1.7529, + "step": 733 + }, + { + "epoch": 0.06659408455815641, + "grad_norm": 0.2273384066774868, + "learning_rate": 0.000996498085157605, + "loss": 1.8001, + "step": 734 + }, + { + "epoch": 0.06668481219379423, + "grad_norm": 0.21449698714253357, + "learning_rate": 0.0009964807048068616, + "loss": 1.7184, + "step": 735 + }, + { + "epoch": 0.06677553982943205, + "grad_norm": 0.24050830610845111, + "learning_rate": 0.000996463281584936, + "loss": 1.7861, + "step": 736 + }, + { + "epoch": 0.06686626746506986, + "grad_norm": 0.23659942155855365, + "learning_rate": 0.0009964458154933333, + "loss": 1.7726, + "step": 737 + }, + { + "epoch": 0.06695699510070767, + "grad_norm": 0.2372525910300461, + "learning_rate": 0.000996428306533561, + "loss": 1.779, + "step": 738 + }, + { + "epoch": 0.06704772273634549, + "grad_norm": 0.22501733218875153, + "learning_rate": 0.0009964107547071313, + "loss": 1.812, + "step": 739 + }, + { + "epoch": 0.06713845037198331, + "grad_norm": 0.24080868288123514, + "learning_rate": 0.0009963931600155598, + "loss": 1.7746, + "step": 740 + }, + { + "epoch": 0.06722917800762113, + "grad_norm": 0.22569507576232542, + "learning_rate": 0.0009963755224603656, + "loss": 1.8006, + "step": 741 + }, + { + "epoch": 0.06731990564325893, + "grad_norm": 0.22480890757929567, + "learning_rate": 0.0009963578420430722, + "loss": 1.7656, + "step": 742 + }, + { + "epoch": 0.06741063327889675, + "grad_norm": 0.2373041687783592, + "learning_rate": 0.0009963401187652056, + "loss": 1.7772, + "step": 743 + }, + { + "epoch": 0.06750136091453457, + "grad_norm": 0.21827172948306137, + "learning_rate": 0.0009963223526282968, + "loss": 1.7332, + "step": 744 + }, + { + "epoch": 0.06759208855017239, + "grad_norm": 0.25326949636148205, + "learning_rate": 0.0009963045436338798, + "loss": 1.7898, + "step": 745 + }, + { + "epoch": 0.0676828161858102, + "grad_norm": 0.2154672791405388, + "learning_rate": 0.0009962866917834921, + "loss": 1.7608, + "step": 746 + }, + { + "epoch": 0.06777354382144801, + "grad_norm": 0.23463808062500288, + "learning_rate": 0.0009962687970786754, + "loss": 1.8323, + "step": 747 + }, + { + "epoch": 0.06786427145708583, + "grad_norm": 0.21996423659006267, + "learning_rate": 0.0009962508595209752, + "loss": 1.7617, + "step": 748 + }, + { + "epoch": 0.06795499909272365, + "grad_norm": 0.22792708057249786, + "learning_rate": 0.00099623287911194, + "loss": 1.7362, + "step": 749 + }, + { + "epoch": 0.06804572672836146, + "grad_norm": 0.23313054003998562, + "learning_rate": 0.0009962148558531224, + "loss": 1.8002, + "step": 750 + }, + { + "epoch": 0.06813645436399927, + "grad_norm": 0.22457759783479686, + "learning_rate": 0.000996196789746079, + "loss": 1.7826, + "step": 751 + }, + { + "epoch": 0.0682271819996371, + "grad_norm": 0.21357130208926242, + "learning_rate": 0.0009961786807923697, + "loss": 1.7279, + "step": 752 + }, + { + "epoch": 0.0683179096352749, + "grad_norm": 0.23243386581978331, + "learning_rate": 0.0009961605289935582, + "loss": 1.7939, + "step": 753 + }, + { + "epoch": 0.06840863727091272, + "grad_norm": 0.22251568761890092, + "learning_rate": 0.0009961423343512119, + "loss": 1.7653, + "step": 754 + }, + { + "epoch": 0.06849936490655054, + "grad_norm": 0.20804228392098115, + "learning_rate": 0.0009961240968669018, + "loss": 1.7556, + "step": 755 + }, + { + "epoch": 0.06859009254218836, + "grad_norm": 0.2150760068057919, + "learning_rate": 0.0009961058165422027, + "loss": 1.7409, + "step": 756 + }, + { + "epoch": 0.06868082017782616, + "grad_norm": 0.22825634418699955, + "learning_rate": 0.0009960874933786935, + "loss": 1.7909, + "step": 757 + }, + { + "epoch": 0.06877154781346398, + "grad_norm": 0.23470585798497032, + "learning_rate": 0.000996069127377956, + "loss": 1.7389, + "step": 758 + }, + { + "epoch": 0.0688622754491018, + "grad_norm": 0.22259357365409616, + "learning_rate": 0.0009960507185415763, + "loss": 1.7692, + "step": 759 + }, + { + "epoch": 0.06895300308473962, + "grad_norm": 0.2285026021425473, + "learning_rate": 0.0009960322668711439, + "loss": 1.8187, + "step": 760 + }, + { + "epoch": 0.06904373072037742, + "grad_norm": 0.22428140871331176, + "learning_rate": 0.000996013772368252, + "loss": 1.7811, + "step": 761 + }, + { + "epoch": 0.06913445835601524, + "grad_norm": 0.20412326416943935, + "learning_rate": 0.0009959952350344981, + "loss": 1.8121, + "step": 762 + }, + { + "epoch": 0.06922518599165306, + "grad_norm": 0.21423019333076782, + "learning_rate": 0.0009959766548714823, + "loss": 1.7706, + "step": 763 + }, + { + "epoch": 0.06931591362729088, + "grad_norm": 0.21606134559588674, + "learning_rate": 0.0009959580318808093, + "loss": 1.7791, + "step": 764 + }, + { + "epoch": 0.06940664126292868, + "grad_norm": 0.20961448441391847, + "learning_rate": 0.0009959393660640872, + "loss": 1.7795, + "step": 765 + }, + { + "epoch": 0.0694973688985665, + "grad_norm": 0.2203820296115966, + "learning_rate": 0.0009959206574229277, + "loss": 1.7999, + "step": 766 + }, + { + "epoch": 0.06958809653420432, + "grad_norm": 0.2149368674673969, + "learning_rate": 0.0009959019059589463, + "loss": 1.7626, + "step": 767 + }, + { + "epoch": 0.06967882416984214, + "grad_norm": 0.20374495052905303, + "learning_rate": 0.0009958831116737622, + "loss": 1.7925, + "step": 768 + }, + { + "epoch": 0.06976955180547995, + "grad_norm": 0.23125862072046524, + "learning_rate": 0.0009958642745689983, + "loss": 1.754, + "step": 769 + }, + { + "epoch": 0.06986027944111776, + "grad_norm": 0.21779374970045845, + "learning_rate": 0.0009958453946462813, + "loss": 1.7804, + "step": 770 + }, + { + "epoch": 0.06995100707675558, + "grad_norm": 0.20797597011767663, + "learning_rate": 0.0009958264719072416, + "loss": 1.7594, + "step": 771 + }, + { + "epoch": 0.07004173471239339, + "grad_norm": 0.21594486991502582, + "learning_rate": 0.0009958075063535128, + "loss": 1.7555, + "step": 772 + }, + { + "epoch": 0.07013246234803121, + "grad_norm": 0.23964592818921968, + "learning_rate": 0.0009957884979867326, + "loss": 1.7523, + "step": 773 + }, + { + "epoch": 0.07022318998366903, + "grad_norm": 0.20950932278290457, + "learning_rate": 0.0009957694468085427, + "loss": 1.8037, + "step": 774 + }, + { + "epoch": 0.07031391761930685, + "grad_norm": 0.23113522753942342, + "learning_rate": 0.000995750352820588, + "loss": 1.7744, + "step": 775 + }, + { + "epoch": 0.07040464525494465, + "grad_norm": 0.22090121777699892, + "learning_rate": 0.0009957312160245172, + "loss": 1.7827, + "step": 776 + }, + { + "epoch": 0.07049537289058247, + "grad_norm": 0.22106282303140706, + "learning_rate": 0.0009957120364219828, + "loss": 1.766, + "step": 777 + }, + { + "epoch": 0.07058610052622029, + "grad_norm": 0.21701458245657818, + "learning_rate": 0.000995692814014641, + "loss": 1.7772, + "step": 778 + }, + { + "epoch": 0.07067682816185811, + "grad_norm": 0.2079572177952554, + "learning_rate": 0.0009956735488041517, + "loss": 1.8135, + "step": 779 + }, + { + "epoch": 0.07076755579749591, + "grad_norm": 0.21696586859567246, + "learning_rate": 0.0009956542407921784, + "loss": 1.802, + "step": 780 + }, + { + "epoch": 0.07085828343313373, + "grad_norm": 0.21406760133389857, + "learning_rate": 0.0009956348899803882, + "loss": 1.7611, + "step": 781 + }, + { + "epoch": 0.07094901106877155, + "grad_norm": 0.21869923028394656, + "learning_rate": 0.0009956154963704524, + "loss": 1.7373, + "step": 782 + }, + { + "epoch": 0.07103973870440937, + "grad_norm": 0.20101252986310117, + "learning_rate": 0.0009955960599640455, + "loss": 1.7575, + "step": 783 + }, + { + "epoch": 0.07113046634004717, + "grad_norm": 0.23605422283616656, + "learning_rate": 0.0009955765807628456, + "loss": 1.7329, + "step": 784 + }, + { + "epoch": 0.071221193975685, + "grad_norm": 0.2281599914399603, + "learning_rate": 0.000995557058768535, + "loss": 1.7936, + "step": 785 + }, + { + "epoch": 0.07131192161132281, + "grad_norm": 0.23685681768339728, + "learning_rate": 0.0009955374939827994, + "loss": 1.7739, + "step": 786 + }, + { + "epoch": 0.07140264924696063, + "grad_norm": 0.20742142173031664, + "learning_rate": 0.000995517886407328, + "loss": 1.752, + "step": 787 + }, + { + "epoch": 0.07149337688259844, + "grad_norm": 0.2267483011852226, + "learning_rate": 0.0009954982360438143, + "loss": 1.7244, + "step": 788 + }, + { + "epoch": 0.07158410451823626, + "grad_norm": 0.20674728147715862, + "learning_rate": 0.0009954785428939548, + "loss": 1.7854, + "step": 789 + }, + { + "epoch": 0.07167483215387407, + "grad_norm": 0.21265423452868257, + "learning_rate": 0.0009954588069594498, + "loss": 1.7381, + "step": 790 + }, + { + "epoch": 0.07176555978951188, + "grad_norm": 0.22449187668364887, + "learning_rate": 0.000995439028242004, + "loss": 1.7533, + "step": 791 + }, + { + "epoch": 0.0718562874251497, + "grad_norm": 0.214649738296938, + "learning_rate": 0.0009954192067433251, + "loss": 1.7834, + "step": 792 + }, + { + "epoch": 0.07194701506078752, + "grad_norm": 0.2293135991549427, + "learning_rate": 0.000995399342465125, + "loss": 1.758, + "step": 793 + }, + { + "epoch": 0.07203774269642534, + "grad_norm": 0.22254658073387337, + "learning_rate": 0.0009953794354091183, + "loss": 1.7119, + "step": 794 + }, + { + "epoch": 0.07212847033206314, + "grad_norm": 0.2281715720487833, + "learning_rate": 0.0009953594855770245, + "loss": 1.7542, + "step": 795 + }, + { + "epoch": 0.07221919796770096, + "grad_norm": 0.21367777764524457, + "learning_rate": 0.0009953394929705659, + "loss": 1.7907, + "step": 796 + }, + { + "epoch": 0.07230992560333878, + "grad_norm": 0.2134534021610028, + "learning_rate": 0.0009953194575914692, + "loss": 1.7693, + "step": 797 + }, + { + "epoch": 0.0724006532389766, + "grad_norm": 0.2160705573780347, + "learning_rate": 0.0009952993794414644, + "loss": 1.7664, + "step": 798 + }, + { + "epoch": 0.0724913808746144, + "grad_norm": 0.20957800287840458, + "learning_rate": 0.0009952792585222851, + "loss": 1.7248, + "step": 799 + }, + { + "epoch": 0.07258210851025222, + "grad_norm": 0.21854522072506283, + "learning_rate": 0.0009952590948356687, + "loss": 1.7249, + "step": 800 + }, + { + "epoch": 0.07267283614589004, + "grad_norm": 0.23076686266681531, + "learning_rate": 0.0009952388883833567, + "loss": 1.7832, + "step": 801 + }, + { + "epoch": 0.07276356378152786, + "grad_norm": 0.22760815364104642, + "learning_rate": 0.0009952186391670936, + "loss": 1.7727, + "step": 802 + }, + { + "epoch": 0.07285429141716566, + "grad_norm": 0.21271517357661354, + "learning_rate": 0.0009951983471886282, + "loss": 1.8112, + "step": 803 + }, + { + "epoch": 0.07294501905280348, + "grad_norm": 0.20855419273982098, + "learning_rate": 0.0009951780124497123, + "loss": 1.7503, + "step": 804 + }, + { + "epoch": 0.0730357466884413, + "grad_norm": 0.21906230097444773, + "learning_rate": 0.0009951576349521022, + "loss": 1.7639, + "step": 805 + }, + { + "epoch": 0.07312647432407911, + "grad_norm": 0.20739784562418914, + "learning_rate": 0.0009951372146975571, + "loss": 1.7938, + "step": 806 + }, + { + "epoch": 0.07321720195971693, + "grad_norm": 0.21817254650643308, + "learning_rate": 0.0009951167516878408, + "loss": 1.7871, + "step": 807 + }, + { + "epoch": 0.07330792959535475, + "grad_norm": 0.22096296166653026, + "learning_rate": 0.0009950962459247198, + "loss": 1.7748, + "step": 808 + }, + { + "epoch": 0.07339865723099256, + "grad_norm": 0.2165937512035548, + "learning_rate": 0.0009950756974099653, + "loss": 1.752, + "step": 809 + }, + { + "epoch": 0.07348938486663037, + "grad_norm": 0.22123438595184022, + "learning_rate": 0.000995055106145351, + "loss": 1.7267, + "step": 810 + }, + { + "epoch": 0.07358011250226819, + "grad_norm": 0.20736077751586846, + "learning_rate": 0.0009950344721326556, + "loss": 1.7607, + "step": 811 + }, + { + "epoch": 0.07367084013790601, + "grad_norm": 0.21749442259429902, + "learning_rate": 0.0009950137953736605, + "loss": 1.7548, + "step": 812 + }, + { + "epoch": 0.07376156777354383, + "grad_norm": 0.2176074964376878, + "learning_rate": 0.000994993075870151, + "loss": 1.8155, + "step": 813 + }, + { + "epoch": 0.07385229540918163, + "grad_norm": 0.2139258587078529, + "learning_rate": 0.0009949723136239168, + "loss": 1.8007, + "step": 814 + }, + { + "epoch": 0.07394302304481945, + "grad_norm": 0.20387947480555937, + "learning_rate": 0.0009949515086367501, + "loss": 1.7961, + "step": 815 + }, + { + "epoch": 0.07403375068045727, + "grad_norm": 0.2054450969611483, + "learning_rate": 0.0009949306609104479, + "loss": 1.7352, + "step": 816 + }, + { + "epoch": 0.07412447831609509, + "grad_norm": 0.21252346323240195, + "learning_rate": 0.00099490977044681, + "loss": 1.8144, + "step": 817 + }, + { + "epoch": 0.0742152059517329, + "grad_norm": 0.19750314004890832, + "learning_rate": 0.0009948888372476406, + "loss": 1.7588, + "step": 818 + }, + { + "epoch": 0.07430593358737071, + "grad_norm": 0.2088002714536161, + "learning_rate": 0.0009948678613147471, + "loss": 1.7215, + "step": 819 + }, + { + "epoch": 0.07439666122300853, + "grad_norm": 0.22165135561913787, + "learning_rate": 0.0009948468426499407, + "loss": 1.7271, + "step": 820 + }, + { + "epoch": 0.07448738885864635, + "grad_norm": 0.20437536222723535, + "learning_rate": 0.0009948257812550365, + "loss": 1.7567, + "step": 821 + }, + { + "epoch": 0.07457811649428416, + "grad_norm": 0.2151783796364009, + "learning_rate": 0.0009948046771318536, + "loss": 1.765, + "step": 822 + }, + { + "epoch": 0.07466884412992197, + "grad_norm": 0.20256471114783833, + "learning_rate": 0.0009947835302822135, + "loss": 1.7067, + "step": 823 + }, + { + "epoch": 0.07475957176555979, + "grad_norm": 0.19166638809395764, + "learning_rate": 0.0009947623407079427, + "loss": 1.7878, + "step": 824 + }, + { + "epoch": 0.0748502994011976, + "grad_norm": 0.21239887529536064, + "learning_rate": 0.000994741108410871, + "loss": 1.7522, + "step": 825 + }, + { + "epoch": 0.07494102703683542, + "grad_norm": 0.20433794065235508, + "learning_rate": 0.0009947198333928316, + "loss": 1.7579, + "step": 826 + }, + { + "epoch": 0.07503175467247324, + "grad_norm": 0.19860552385190117, + "learning_rate": 0.0009946985156556614, + "loss": 1.7134, + "step": 827 + }, + { + "epoch": 0.07512248230811105, + "grad_norm": 0.2128864567144705, + "learning_rate": 0.0009946771552012017, + "loss": 1.7541, + "step": 828 + }, + { + "epoch": 0.07521320994374886, + "grad_norm": 0.2031247750154403, + "learning_rate": 0.000994655752031297, + "loss": 1.7747, + "step": 829 + }, + { + "epoch": 0.07530393757938668, + "grad_norm": 0.22871031901657235, + "learning_rate": 0.0009946343061477947, + "loss": 1.7768, + "step": 830 + }, + { + "epoch": 0.0753946652150245, + "grad_norm": 0.20591840339649933, + "learning_rate": 0.0009946128175525475, + "loss": 1.7683, + "step": 831 + }, + { + "epoch": 0.07548539285066232, + "grad_norm": 0.2212843540320656, + "learning_rate": 0.0009945912862474105, + "loss": 1.7784, + "step": 832 + }, + { + "epoch": 0.07557612048630012, + "grad_norm": 0.22046760080404856, + "learning_rate": 0.000994569712234243, + "loss": 1.7525, + "step": 833 + }, + { + "epoch": 0.07566684812193794, + "grad_norm": 0.21130280072808802, + "learning_rate": 0.000994548095514908, + "loss": 1.7726, + "step": 834 + }, + { + "epoch": 0.07575757575757576, + "grad_norm": 0.1971430138955619, + "learning_rate": 0.0009945264360912722, + "loss": 1.7401, + "step": 835 + }, + { + "epoch": 0.07584830339321358, + "grad_norm": 0.23455764488304087, + "learning_rate": 0.0009945047339652057, + "loss": 1.769, + "step": 836 + }, + { + "epoch": 0.07593903102885138, + "grad_norm": 0.21452612197486826, + "learning_rate": 0.0009944829891385825, + "loss": 1.7526, + "step": 837 + }, + { + "epoch": 0.0760297586644892, + "grad_norm": 0.20865457533698925, + "learning_rate": 0.0009944612016132802, + "loss": 1.7821, + "step": 838 + }, + { + "epoch": 0.07612048630012702, + "grad_norm": 0.21737857529527813, + "learning_rate": 0.0009944393713911804, + "loss": 1.7344, + "step": 839 + }, + { + "epoch": 0.07621121393576484, + "grad_norm": 0.1988739857542084, + "learning_rate": 0.0009944174984741678, + "loss": 1.7824, + "step": 840 + }, + { + "epoch": 0.07630194157140265, + "grad_norm": 0.20875457174240278, + "learning_rate": 0.0009943955828641317, + "loss": 1.7498, + "step": 841 + }, + { + "epoch": 0.07639266920704046, + "grad_norm": 0.20673984389435976, + "learning_rate": 0.0009943736245629638, + "loss": 1.76, + "step": 842 + }, + { + "epoch": 0.07648339684267828, + "grad_norm": 0.1912662567935922, + "learning_rate": 0.0009943516235725606, + "loss": 1.7891, + "step": 843 + }, + { + "epoch": 0.07657412447831609, + "grad_norm": 0.19690343930784257, + "learning_rate": 0.000994329579894822, + "loss": 1.7781, + "step": 844 + }, + { + "epoch": 0.07666485211395391, + "grad_norm": 0.1981089775552474, + "learning_rate": 0.0009943074935316514, + "loss": 1.7541, + "step": 845 + }, + { + "epoch": 0.07675557974959173, + "grad_norm": 0.19589729699498618, + "learning_rate": 0.0009942853644849556, + "loss": 1.7063, + "step": 846 + }, + { + "epoch": 0.07684630738522955, + "grad_norm": 0.21106683910586094, + "learning_rate": 0.000994263192756646, + "loss": 1.7784, + "step": 847 + }, + { + "epoch": 0.07693703502086735, + "grad_norm": 0.1945553549963554, + "learning_rate": 0.0009942409783486367, + "loss": 1.7499, + "step": 848 + }, + { + "epoch": 0.07702776265650517, + "grad_norm": 0.20952214829982824, + "learning_rate": 0.0009942187212628462, + "loss": 1.8001, + "step": 849 + }, + { + "epoch": 0.07711849029214299, + "grad_norm": 0.18277404916397497, + "learning_rate": 0.000994196421501196, + "loss": 1.7605, + "step": 850 + }, + { + "epoch": 0.07720921792778081, + "grad_norm": 0.1884636665490286, + "learning_rate": 0.0009941740790656121, + "loss": 1.7277, + "step": 851 + }, + { + "epoch": 0.07729994556341861, + "grad_norm": 0.20385032958209762, + "learning_rate": 0.0009941516939580238, + "loss": 1.763, + "step": 852 + }, + { + "epoch": 0.07739067319905643, + "grad_norm": 0.197823831327996, + "learning_rate": 0.0009941292661803638, + "loss": 1.7987, + "step": 853 + }, + { + "epoch": 0.07748140083469425, + "grad_norm": 0.20132021080263895, + "learning_rate": 0.0009941067957345688, + "loss": 1.7166, + "step": 854 + }, + { + "epoch": 0.07757212847033207, + "grad_norm": 0.2059976583802919, + "learning_rate": 0.0009940842826225793, + "loss": 1.7785, + "step": 855 + }, + { + "epoch": 0.07766285610596987, + "grad_norm": 0.19567011512392954, + "learning_rate": 0.000994061726846339, + "loss": 1.7102, + "step": 856 + }, + { + "epoch": 0.07775358374160769, + "grad_norm": 0.21020840617499462, + "learning_rate": 0.0009940391284077958, + "loss": 1.7405, + "step": 857 + }, + { + "epoch": 0.07784431137724551, + "grad_norm": 0.20750037408480954, + "learning_rate": 0.000994016487308901, + "loss": 1.7451, + "step": 858 + }, + { + "epoch": 0.07793503901288333, + "grad_norm": 0.20792811847136164, + "learning_rate": 0.0009939938035516099, + "loss": 1.771, + "step": 859 + }, + { + "epoch": 0.07802576664852114, + "grad_norm": 0.2093544634880923, + "learning_rate": 0.000993971077137881, + "loss": 1.7567, + "step": 860 + }, + { + "epoch": 0.07811649428415895, + "grad_norm": 0.20365566691124853, + "learning_rate": 0.0009939483080696767, + "loss": 1.732, + "step": 861 + }, + { + "epoch": 0.07820722191979677, + "grad_norm": 0.19606584993958032, + "learning_rate": 0.0009939254963489633, + "loss": 1.7843, + "step": 862 + }, + { + "epoch": 0.07829794955543458, + "grad_norm": 0.20760379956777594, + "learning_rate": 0.0009939026419777104, + "loss": 1.7526, + "step": 863 + }, + { + "epoch": 0.0783886771910724, + "grad_norm": 0.1999128141072703, + "learning_rate": 0.0009938797449578916, + "loss": 1.7436, + "step": 864 + }, + { + "epoch": 0.07847940482671022, + "grad_norm": 0.2000052664581823, + "learning_rate": 0.000993856805291484, + "loss": 1.7499, + "step": 865 + }, + { + "epoch": 0.07857013246234804, + "grad_norm": 0.19385028377565774, + "learning_rate": 0.0009938338229804685, + "loss": 1.7675, + "step": 866 + }, + { + "epoch": 0.07866086009798584, + "grad_norm": 0.2029342694493701, + "learning_rate": 0.0009938107980268297, + "loss": 1.7572, + "step": 867 + }, + { + "epoch": 0.07875158773362366, + "grad_norm": 0.1973990827127814, + "learning_rate": 0.0009937877304325555, + "loss": 1.7678, + "step": 868 + }, + { + "epoch": 0.07884231536926148, + "grad_norm": 0.20771573158480117, + "learning_rate": 0.0009937646201996382, + "loss": 1.734, + "step": 869 + }, + { + "epoch": 0.0789330430048993, + "grad_norm": 0.20172083292101495, + "learning_rate": 0.000993741467330073, + "loss": 1.7802, + "step": 870 + }, + { + "epoch": 0.0790237706405371, + "grad_norm": 0.20613637439804938, + "learning_rate": 0.0009937182718258596, + "loss": 1.8008, + "step": 871 + }, + { + "epoch": 0.07911449827617492, + "grad_norm": 0.218321591126402, + "learning_rate": 0.0009936950336890003, + "loss": 1.758, + "step": 872 + }, + { + "epoch": 0.07920522591181274, + "grad_norm": 0.19826293516694304, + "learning_rate": 0.0009936717529215023, + "loss": 1.76, + "step": 873 + }, + { + "epoch": 0.07929595354745056, + "grad_norm": 0.20371219736623086, + "learning_rate": 0.0009936484295253755, + "loss": 1.8009, + "step": 874 + }, + { + "epoch": 0.07938668118308836, + "grad_norm": 0.2022113631944225, + "learning_rate": 0.0009936250635026343, + "loss": 1.7331, + "step": 875 + }, + { + "epoch": 0.07947740881872618, + "grad_norm": 0.21088323567990172, + "learning_rate": 0.000993601654855296, + "loss": 1.7714, + "step": 876 + }, + { + "epoch": 0.079568136454364, + "grad_norm": 0.20510259919234994, + "learning_rate": 0.000993578203585382, + "loss": 1.7712, + "step": 877 + }, + { + "epoch": 0.07965886409000182, + "grad_norm": 0.20540429075166003, + "learning_rate": 0.0009935547096949174, + "loss": 1.7345, + "step": 878 + }, + { + "epoch": 0.07974959172563963, + "grad_norm": 0.21490675792427047, + "learning_rate": 0.0009935311731859308, + "loss": 1.7686, + "step": 879 + }, + { + "epoch": 0.07984031936127745, + "grad_norm": 0.19792844049399383, + "learning_rate": 0.000993507594060455, + "loss": 1.779, + "step": 880 + }, + { + "epoch": 0.07993104699691526, + "grad_norm": 0.20211454148305485, + "learning_rate": 0.0009934839723205254, + "loss": 1.7335, + "step": 881 + }, + { + "epoch": 0.08002177463255307, + "grad_norm": 0.1944046844128232, + "learning_rate": 0.000993460307968182, + "loss": 1.7916, + "step": 882 + }, + { + "epoch": 0.08011250226819089, + "grad_norm": 0.20763586627271394, + "learning_rate": 0.0009934366010054686, + "loss": 1.7392, + "step": 883 + }, + { + "epoch": 0.08020322990382871, + "grad_norm": 0.19435387769413634, + "learning_rate": 0.0009934128514344318, + "loss": 1.7269, + "step": 884 + }, + { + "epoch": 0.08029395753946653, + "grad_norm": 0.20745999113176908, + "learning_rate": 0.0009933890592571223, + "loss": 1.7704, + "step": 885 + }, + { + "epoch": 0.08038468517510433, + "grad_norm": 0.20205054294529534, + "learning_rate": 0.0009933652244755952, + "loss": 1.7157, + "step": 886 + }, + { + "epoch": 0.08047541281074215, + "grad_norm": 0.2144764034779791, + "learning_rate": 0.0009933413470919082, + "loss": 1.8177, + "step": 887 + }, + { + "epoch": 0.08056614044637997, + "grad_norm": 0.1989338506950777, + "learning_rate": 0.000993317427108123, + "loss": 1.794, + "step": 888 + }, + { + "epoch": 0.08065686808201779, + "grad_norm": 0.19254858081457135, + "learning_rate": 0.0009932934645263054, + "loss": 1.7605, + "step": 889 + }, + { + "epoch": 0.0807475957176556, + "grad_norm": 0.20665825137500435, + "learning_rate": 0.0009932694593485242, + "loss": 1.7373, + "step": 890 + }, + { + "epoch": 0.08083832335329341, + "grad_norm": 0.19496906196012298, + "learning_rate": 0.0009932454115768527, + "loss": 1.7176, + "step": 891 + }, + { + "epoch": 0.08092905098893123, + "grad_norm": 0.20477911064265283, + "learning_rate": 0.0009932213212133672, + "loss": 1.7477, + "step": 892 + }, + { + "epoch": 0.08101977862456905, + "grad_norm": 0.20942126548451198, + "learning_rate": 0.000993197188260148, + "loss": 1.7597, + "step": 893 + }, + { + "epoch": 0.08111050626020685, + "grad_norm": 0.2095424418571953, + "learning_rate": 0.000993173012719279, + "loss": 1.8144, + "step": 894 + }, + { + "epoch": 0.08120123389584467, + "grad_norm": 0.21900585918443521, + "learning_rate": 0.0009931487945928472, + "loss": 1.6909, + "step": 895 + }, + { + "epoch": 0.08129196153148249, + "grad_norm": 0.20184866001757418, + "learning_rate": 0.0009931245338829448, + "loss": 1.7834, + "step": 896 + }, + { + "epoch": 0.08138268916712031, + "grad_norm": 0.21024315304520952, + "learning_rate": 0.0009931002305916658, + "loss": 1.8067, + "step": 897 + }, + { + "epoch": 0.08147341680275812, + "grad_norm": 0.20792604059220487, + "learning_rate": 0.0009930758847211095, + "loss": 1.735, + "step": 898 + }, + { + "epoch": 0.08156414443839594, + "grad_norm": 0.20832953953409006, + "learning_rate": 0.0009930514962733776, + "loss": 1.7547, + "step": 899 + }, + { + "epoch": 0.08165487207403375, + "grad_norm": 0.2167909008185766, + "learning_rate": 0.0009930270652505767, + "loss": 1.7245, + "step": 900 + }, + { + "epoch": 0.08174559970967156, + "grad_norm": 0.20903633516355685, + "learning_rate": 0.0009930025916548158, + "loss": 1.7071, + "step": 901 + }, + { + "epoch": 0.08183632734530938, + "grad_norm": 0.20271757982435243, + "learning_rate": 0.0009929780754882086, + "loss": 1.7392, + "step": 902 + }, + { + "epoch": 0.0819270549809472, + "grad_norm": 0.21847878691328274, + "learning_rate": 0.0009929535167528718, + "loss": 1.7628, + "step": 903 + }, + { + "epoch": 0.08201778261658502, + "grad_norm": 0.19208737771349038, + "learning_rate": 0.0009929289154509261, + "loss": 1.725, + "step": 904 + }, + { + "epoch": 0.08210851025222282, + "grad_norm": 0.19853195067141952, + "learning_rate": 0.000992904271584496, + "loss": 1.7267, + "step": 905 + }, + { + "epoch": 0.08219923788786064, + "grad_norm": 0.2033224905102553, + "learning_rate": 0.0009928795851557096, + "loss": 1.6999, + "step": 906 + }, + { + "epoch": 0.08228996552349846, + "grad_norm": 0.195219110291693, + "learning_rate": 0.0009928548561666981, + "loss": 1.7373, + "step": 907 + }, + { + "epoch": 0.08238069315913628, + "grad_norm": 0.21140775044226154, + "learning_rate": 0.0009928300846195972, + "loss": 1.7709, + "step": 908 + }, + { + "epoch": 0.08247142079477408, + "grad_norm": 0.2006471148939041, + "learning_rate": 0.0009928052705165458, + "loss": 1.7604, + "step": 909 + }, + { + "epoch": 0.0825621484304119, + "grad_norm": 0.20425323483500343, + "learning_rate": 0.0009927804138596867, + "loss": 1.8011, + "step": 910 + }, + { + "epoch": 0.08265287606604972, + "grad_norm": 0.19827792223368823, + "learning_rate": 0.0009927555146511664, + "loss": 1.7884, + "step": 911 + }, + { + "epoch": 0.08274360370168754, + "grad_norm": 0.1881387726118377, + "learning_rate": 0.0009927305728931347, + "loss": 1.67, + "step": 912 + }, + { + "epoch": 0.08283433133732535, + "grad_norm": 0.19737106069575538, + "learning_rate": 0.0009927055885877454, + "loss": 1.7701, + "step": 913 + }, + { + "epoch": 0.08292505897296316, + "grad_norm": 0.1995982092534841, + "learning_rate": 0.0009926805617371556, + "loss": 1.7136, + "step": 914 + }, + { + "epoch": 0.08301578660860098, + "grad_norm": 0.2007931523091614, + "learning_rate": 0.0009926554923435271, + "loss": 1.7438, + "step": 915 + }, + { + "epoch": 0.08310651424423879, + "grad_norm": 0.20443609609612903, + "learning_rate": 0.0009926303804090241, + "loss": 1.7538, + "step": 916 + }, + { + "epoch": 0.08319724187987661, + "grad_norm": 0.2020270798176484, + "learning_rate": 0.0009926052259358151, + "loss": 1.7718, + "step": 917 + }, + { + "epoch": 0.08328796951551443, + "grad_norm": 0.18638394512297715, + "learning_rate": 0.0009925800289260723, + "loss": 1.7458, + "step": 918 + }, + { + "epoch": 0.08337869715115225, + "grad_norm": 0.19355567072751878, + "learning_rate": 0.0009925547893819713, + "loss": 1.7716, + "step": 919 + }, + { + "epoch": 0.08346942478679005, + "grad_norm": 0.20246202008961417, + "learning_rate": 0.0009925295073056918, + "loss": 1.7628, + "step": 920 + }, + { + "epoch": 0.08356015242242787, + "grad_norm": 0.19990154974963512, + "learning_rate": 0.0009925041826994167, + "loss": 1.831, + "step": 921 + }, + { + "epoch": 0.08365088005806569, + "grad_norm": 0.187713355819524, + "learning_rate": 0.000992478815565333, + "loss": 1.7513, + "step": 922 + }, + { + "epoch": 0.08374160769370351, + "grad_norm": 0.20065988326729395, + "learning_rate": 0.0009924534059056306, + "loss": 1.7348, + "step": 923 + }, + { + "epoch": 0.08383233532934131, + "grad_norm": 0.19841362190450076, + "learning_rate": 0.0009924279537225043, + "loss": 1.7512, + "step": 924 + }, + { + "epoch": 0.08392306296497913, + "grad_norm": 0.20519496727457548, + "learning_rate": 0.0009924024590181515, + "loss": 1.7444, + "step": 925 + }, + { + "epoch": 0.08401379060061695, + "grad_norm": 0.19264889889699202, + "learning_rate": 0.0009923769217947739, + "loss": 1.777, + "step": 926 + }, + { + "epoch": 0.08410451823625477, + "grad_norm": 0.1990349823769215, + "learning_rate": 0.0009923513420545763, + "loss": 1.7656, + "step": 927 + }, + { + "epoch": 0.08419524587189257, + "grad_norm": 0.19949390352178267, + "learning_rate": 0.000992325719799768, + "loss": 1.7178, + "step": 928 + }, + { + "epoch": 0.08428597350753039, + "grad_norm": 0.1991842714371146, + "learning_rate": 0.000992300055032561, + "loss": 1.7317, + "step": 929 + }, + { + "epoch": 0.08437670114316821, + "grad_norm": 0.20332002623316575, + "learning_rate": 0.000992274347755172, + "loss": 1.7419, + "step": 930 + }, + { + "epoch": 0.08446742877880603, + "grad_norm": 0.19318678957955232, + "learning_rate": 0.0009922485979698202, + "loss": 1.7706, + "step": 931 + }, + { + "epoch": 0.08455815641444384, + "grad_norm": 0.2098307965949441, + "learning_rate": 0.0009922228056787295, + "loss": 1.7285, + "step": 932 + }, + { + "epoch": 0.08464888405008165, + "grad_norm": 0.19008277789171005, + "learning_rate": 0.000992196970884127, + "loss": 1.7295, + "step": 933 + }, + { + "epoch": 0.08473961168571947, + "grad_norm": 0.1983945087374935, + "learning_rate": 0.0009921710935882437, + "loss": 1.7734, + "step": 934 + }, + { + "epoch": 0.08483033932135728, + "grad_norm": 0.19385263771828137, + "learning_rate": 0.0009921451737933138, + "loss": 1.737, + "step": 935 + }, + { + "epoch": 0.0849210669569951, + "grad_norm": 0.1889548135283739, + "learning_rate": 0.0009921192115015757, + "loss": 1.6931, + "step": 936 + }, + { + "epoch": 0.08501179459263292, + "grad_norm": 0.209527510318505, + "learning_rate": 0.000992093206715271, + "loss": 1.7434, + "step": 937 + }, + { + "epoch": 0.08510252222827074, + "grad_norm": 0.18860897637430554, + "learning_rate": 0.0009920671594366454, + "loss": 1.7126, + "step": 938 + }, + { + "epoch": 0.08519324986390854, + "grad_norm": 0.20100060671015707, + "learning_rate": 0.000992041069667948, + "loss": 1.762, + "step": 939 + }, + { + "epoch": 0.08528397749954636, + "grad_norm": 0.19094156663833275, + "learning_rate": 0.000992014937411432, + "loss": 1.7716, + "step": 940 + }, + { + "epoch": 0.08537470513518418, + "grad_norm": 0.19911457083257328, + "learning_rate": 0.0009919887626693533, + "loss": 1.7395, + "step": 941 + }, + { + "epoch": 0.085465432770822, + "grad_norm": 0.19269733594134789, + "learning_rate": 0.0009919625454439726, + "loss": 1.7472, + "step": 942 + }, + { + "epoch": 0.0855561604064598, + "grad_norm": 0.19003623455818075, + "learning_rate": 0.0009919362857375535, + "loss": 1.7135, + "step": 943 + }, + { + "epoch": 0.08564688804209762, + "grad_norm": 0.20775023522811714, + "learning_rate": 0.000991909983552364, + "loss": 1.7266, + "step": 944 + }, + { + "epoch": 0.08573761567773544, + "grad_norm": 0.1906809992326712, + "learning_rate": 0.0009918836388906744, + "loss": 1.7431, + "step": 945 + }, + { + "epoch": 0.08582834331337326, + "grad_norm": 0.19424007124997877, + "learning_rate": 0.0009918572517547603, + "loss": 1.7413, + "step": 946 + }, + { + "epoch": 0.08591907094901106, + "grad_norm": 0.19121292728893616, + "learning_rate": 0.0009918308221469002, + "loss": 1.7637, + "step": 947 + }, + { + "epoch": 0.08600979858464888, + "grad_norm": 0.19254111365330726, + "learning_rate": 0.0009918043500693758, + "loss": 1.7687, + "step": 948 + }, + { + "epoch": 0.0861005262202867, + "grad_norm": 0.18504763053184017, + "learning_rate": 0.0009917778355244735, + "loss": 1.7285, + "step": 949 + }, + { + "epoch": 0.08619125385592452, + "grad_norm": 0.19189349595252245, + "learning_rate": 0.0009917512785144824, + "loss": 1.7674, + "step": 950 + }, + { + "epoch": 0.08628198149156233, + "grad_norm": 0.18350422853178494, + "learning_rate": 0.0009917246790416962, + "loss": 1.7758, + "step": 951 + }, + { + "epoch": 0.08637270912720015, + "grad_norm": 0.1933561044106444, + "learning_rate": 0.0009916980371084112, + "loss": 1.7255, + "step": 952 + }, + { + "epoch": 0.08646343676283796, + "grad_norm": 0.1941809538569071, + "learning_rate": 0.0009916713527169285, + "loss": 1.7043, + "step": 953 + }, + { + "epoch": 0.08655416439847577, + "grad_norm": 0.19946300158540783, + "learning_rate": 0.000991644625869552, + "loss": 1.7309, + "step": 954 + }, + { + "epoch": 0.08664489203411359, + "grad_norm": 0.19806285122954834, + "learning_rate": 0.0009916178565685895, + "loss": 1.716, + "step": 955 + }, + { + "epoch": 0.08673561966975141, + "grad_norm": 0.19133294383683755, + "learning_rate": 0.0009915910448163525, + "loss": 1.7239, + "step": 956 + }, + { + "epoch": 0.08682634730538923, + "grad_norm": 0.19734737052349313, + "learning_rate": 0.0009915641906151566, + "loss": 1.7271, + "step": 957 + }, + { + "epoch": 0.08691707494102703, + "grad_norm": 0.20496816919889133, + "learning_rate": 0.0009915372939673201, + "loss": 1.7239, + "step": 958 + }, + { + "epoch": 0.08700780257666485, + "grad_norm": 0.20230544415971485, + "learning_rate": 0.000991510354875166, + "loss": 1.7441, + "step": 959 + }, + { + "epoch": 0.08709853021230267, + "grad_norm": 0.20452945292298907, + "learning_rate": 0.0009914833733410205, + "loss": 1.7454, + "step": 960 + }, + { + "epoch": 0.08718925784794049, + "grad_norm": 0.19396054036777838, + "learning_rate": 0.000991456349367213, + "loss": 1.688, + "step": 961 + }, + { + "epoch": 0.08727998548357829, + "grad_norm": 0.21736560033248586, + "learning_rate": 0.0009914292829560775, + "loss": 1.7175, + "step": 962 + }, + { + "epoch": 0.08737071311921611, + "grad_norm": 0.19523010693873746, + "learning_rate": 0.0009914021741099508, + "loss": 1.695, + "step": 963 + }, + { + "epoch": 0.08746144075485393, + "grad_norm": 0.1975924850978107, + "learning_rate": 0.000991375022831174, + "loss": 1.7313, + "step": 964 + }, + { + "epoch": 0.08755216839049175, + "grad_norm": 0.1966412538889214, + "learning_rate": 0.0009913478291220914, + "loss": 1.7439, + "step": 965 + }, + { + "epoch": 0.08764289602612955, + "grad_norm": 0.1948093567714034, + "learning_rate": 0.0009913205929850514, + "loss": 1.768, + "step": 966 + }, + { + "epoch": 0.08773362366176737, + "grad_norm": 0.19343233284925526, + "learning_rate": 0.0009912933144224062, + "loss": 1.8004, + "step": 967 + }, + { + "epoch": 0.08782435129740519, + "grad_norm": 0.19602355205772434, + "learning_rate": 0.0009912659934365104, + "loss": 1.7459, + "step": 968 + }, + { + "epoch": 0.08791507893304301, + "grad_norm": 0.1928733545406515, + "learning_rate": 0.000991238630029724, + "loss": 1.7558, + "step": 969 + }, + { + "epoch": 0.08800580656868082, + "grad_norm": 0.19378830023942958, + "learning_rate": 0.000991211224204409, + "loss": 1.7221, + "step": 970 + }, + { + "epoch": 0.08809653420431864, + "grad_norm": 0.18433337422759985, + "learning_rate": 0.0009911837759629329, + "loss": 1.7333, + "step": 971 + }, + { + "epoch": 0.08818726183995645, + "grad_norm": 0.19474490317620485, + "learning_rate": 0.0009911562853076653, + "loss": 1.7273, + "step": 972 + }, + { + "epoch": 0.08827798947559426, + "grad_norm": 0.1976085496541992, + "learning_rate": 0.0009911287522409798, + "loss": 1.721, + "step": 973 + }, + { + "epoch": 0.08836871711123208, + "grad_norm": 0.19692137546544816, + "learning_rate": 0.0009911011767652544, + "loss": 1.7382, + "step": 974 + }, + { + "epoch": 0.0884594447468699, + "grad_norm": 0.18997157614950602, + "learning_rate": 0.00099107355888287, + "loss": 1.7211, + "step": 975 + }, + { + "epoch": 0.08855017238250772, + "grad_norm": 0.19391216600844113, + "learning_rate": 0.0009910458985962112, + "loss": 1.7091, + "step": 976 + }, + { + "epoch": 0.08864090001814552, + "grad_norm": 0.19305331916681712, + "learning_rate": 0.0009910181959076668, + "loss": 1.7366, + "step": 977 + }, + { + "epoch": 0.08873162765378334, + "grad_norm": 0.19405189879726592, + "learning_rate": 0.0009909904508196288, + "loss": 1.7402, + "step": 978 + }, + { + "epoch": 0.08882235528942116, + "grad_norm": 0.1926697047740414, + "learning_rate": 0.000990962663334493, + "loss": 1.7808, + "step": 979 + }, + { + "epoch": 0.08891308292505898, + "grad_norm": 0.20867562162956418, + "learning_rate": 0.0009909348334546588, + "loss": 1.7622, + "step": 980 + }, + { + "epoch": 0.08900381056069678, + "grad_norm": 0.18528103496158407, + "learning_rate": 0.0009909069611825296, + "loss": 1.724, + "step": 981 + }, + { + "epoch": 0.0890945381963346, + "grad_norm": 0.18776828213820693, + "learning_rate": 0.0009908790465205117, + "loss": 1.7702, + "step": 982 + }, + { + "epoch": 0.08918526583197242, + "grad_norm": 0.188842418868517, + "learning_rate": 0.000990851089471016, + "loss": 1.7012, + "step": 983 + }, + { + "epoch": 0.08927599346761024, + "grad_norm": 0.22738932660755587, + "learning_rate": 0.0009908230900364564, + "loss": 1.6948, + "step": 984 + }, + { + "epoch": 0.08936672110324805, + "grad_norm": 0.1934197725530949, + "learning_rate": 0.0009907950482192505, + "loss": 1.7405, + "step": 985 + }, + { + "epoch": 0.08945744873888586, + "grad_norm": 0.19042866879262674, + "learning_rate": 0.0009907669640218197, + "loss": 1.7658, + "step": 986 + }, + { + "epoch": 0.08954817637452368, + "grad_norm": 0.19645007986462426, + "learning_rate": 0.0009907388374465894, + "loss": 1.7708, + "step": 987 + }, + { + "epoch": 0.0896389040101615, + "grad_norm": 0.20895519582828206, + "learning_rate": 0.000990710668495988, + "loss": 1.7384, + "step": 988 + }, + { + "epoch": 0.08972963164579931, + "grad_norm": 0.18613346756875493, + "learning_rate": 0.0009906824571724484, + "loss": 1.7297, + "step": 989 + }, + { + "epoch": 0.08982035928143713, + "grad_norm": 0.19390453637614186, + "learning_rate": 0.000990654203478406, + "loss": 1.7435, + "step": 990 + }, + { + "epoch": 0.08991108691707494, + "grad_norm": 0.1912032216797838, + "learning_rate": 0.000990625907416301, + "loss": 1.7327, + "step": 991 + }, + { + "epoch": 0.09000181455271275, + "grad_norm": 0.20899419649534856, + "learning_rate": 0.0009905975689885765, + "loss": 1.7115, + "step": 992 + }, + { + "epoch": 0.09009254218835057, + "grad_norm": 0.19698994211061313, + "learning_rate": 0.0009905691881976796, + "loss": 1.7254, + "step": 993 + }, + { + "epoch": 0.09018326982398839, + "grad_norm": 0.20140728498537847, + "learning_rate": 0.000990540765046061, + "loss": 1.7527, + "step": 994 + }, + { + "epoch": 0.0902739974596262, + "grad_norm": 0.2059140268798838, + "learning_rate": 0.000990512299536175, + "loss": 1.741, + "step": 995 + }, + { + "epoch": 0.09036472509526401, + "grad_norm": 0.19529489750040957, + "learning_rate": 0.00099048379167048, + "loss": 1.7254, + "step": 996 + }, + { + "epoch": 0.09045545273090183, + "grad_norm": 0.19522271104877637, + "learning_rate": 0.0009904552414514366, + "loss": 1.7293, + "step": 997 + }, + { + "epoch": 0.09054618036653965, + "grad_norm": 0.17517266816004542, + "learning_rate": 0.0009904266488815114, + "loss": 1.7027, + "step": 998 + }, + { + "epoch": 0.09063690800217747, + "grad_norm": 0.20706696843704628, + "learning_rate": 0.0009903980139631726, + "loss": 1.786, + "step": 999 + }, + { + "epoch": 0.09072763563781527, + "grad_norm": 0.18663373398979893, + "learning_rate": 0.000990369336698893, + "loss": 1.7289, + "step": 1000 + }, + { + "epoch": 0.09081836327345309, + "grad_norm": 0.18571132138211544, + "learning_rate": 0.000990340617091149, + "loss": 1.7414, + "step": 1001 + }, + { + "epoch": 0.09090909090909091, + "grad_norm": 0.1916528619440021, + "learning_rate": 0.0009903118551424202, + "loss": 1.7296, + "step": 1002 + }, + { + "epoch": 0.09099981854472873, + "grad_norm": 0.19390811732545235, + "learning_rate": 0.0009902830508551907, + "loss": 1.7419, + "step": 1003 + }, + { + "epoch": 0.09109054618036654, + "grad_norm": 0.19732083639955755, + "learning_rate": 0.0009902542042319474, + "loss": 1.7276, + "step": 1004 + }, + { + "epoch": 0.09118127381600435, + "grad_norm": 0.20151147413822668, + "learning_rate": 0.0009902253152751811, + "loss": 1.7445, + "step": 1005 + }, + { + "epoch": 0.09127200145164217, + "grad_norm": 0.18328276513808242, + "learning_rate": 0.000990196383987387, + "loss": 1.7829, + "step": 1006 + }, + { + "epoch": 0.09136272908727998, + "grad_norm": 0.1795539547551327, + "learning_rate": 0.0009901674103710626, + "loss": 1.7817, + "step": 1007 + }, + { + "epoch": 0.0914534567229178, + "grad_norm": 0.20001244661312936, + "learning_rate": 0.0009901383944287102, + "loss": 1.764, + "step": 1008 + }, + { + "epoch": 0.09154418435855562, + "grad_norm": 0.18251930842675948, + "learning_rate": 0.000990109336162835, + "loss": 1.7043, + "step": 1009 + }, + { + "epoch": 0.09163491199419344, + "grad_norm": 0.1911414729104055, + "learning_rate": 0.0009900802355759467, + "loss": 1.7845, + "step": 1010 + }, + { + "epoch": 0.09172563962983124, + "grad_norm": 0.1974135908717924, + "learning_rate": 0.0009900510926705577, + "loss": 1.7687, + "step": 1011 + }, + { + "epoch": 0.09181636726546906, + "grad_norm": 0.18682726196158714, + "learning_rate": 0.0009900219074491846, + "loss": 1.7285, + "step": 1012 + }, + { + "epoch": 0.09190709490110688, + "grad_norm": 0.19363983480305755, + "learning_rate": 0.0009899926799143476, + "loss": 1.7467, + "step": 1013 + }, + { + "epoch": 0.0919978225367447, + "grad_norm": 0.19256853253389175, + "learning_rate": 0.0009899634100685704, + "loss": 1.7582, + "step": 1014 + }, + { + "epoch": 0.0920885501723825, + "grad_norm": 0.18847194276237375, + "learning_rate": 0.0009899340979143804, + "loss": 1.7118, + "step": 1015 + }, + { + "epoch": 0.09217927780802032, + "grad_norm": 0.19718870536331468, + "learning_rate": 0.0009899047434543092, + "loss": 1.7251, + "step": 1016 + }, + { + "epoch": 0.09227000544365814, + "grad_norm": 0.18453905587701375, + "learning_rate": 0.000989875346690891, + "loss": 1.6905, + "step": 1017 + }, + { + "epoch": 0.09236073307929596, + "grad_norm": 0.1849723088031825, + "learning_rate": 0.0009898459076266642, + "loss": 1.7762, + "step": 1018 + }, + { + "epoch": 0.09245146071493376, + "grad_norm": 0.18918394838731273, + "learning_rate": 0.0009898164262641714, + "loss": 1.7322, + "step": 1019 + }, + { + "epoch": 0.09254218835057158, + "grad_norm": 0.1857541855823339, + "learning_rate": 0.0009897869026059577, + "loss": 1.7385, + "step": 1020 + }, + { + "epoch": 0.0926329159862094, + "grad_norm": 0.18154399553892459, + "learning_rate": 0.000989757336654573, + "loss": 1.7325, + "step": 1021 + }, + { + "epoch": 0.09272364362184722, + "grad_norm": 0.1834810358370309, + "learning_rate": 0.0009897277284125702, + "loss": 1.7286, + "step": 1022 + }, + { + "epoch": 0.09281437125748503, + "grad_norm": 0.19133783413672217, + "learning_rate": 0.0009896980778825058, + "loss": 1.7255, + "step": 1023 + }, + { + "epoch": 0.09290509889312284, + "grad_norm": 0.18595055852211562, + "learning_rate": 0.0009896683850669401, + "loss": 1.7381, + "step": 1024 + }, + { + "epoch": 0.09299582652876066, + "grad_norm": 0.19318329599635645, + "learning_rate": 0.000989638649968437, + "loss": 1.7875, + "step": 1025 + }, + { + "epoch": 0.09308655416439847, + "grad_norm": 0.19373212121591096, + "learning_rate": 0.0009896088725895647, + "loss": 1.7437, + "step": 1026 + }, + { + "epoch": 0.09317728180003629, + "grad_norm": 0.17744867081206278, + "learning_rate": 0.000989579052932894, + "loss": 1.7109, + "step": 1027 + }, + { + "epoch": 0.0932680094356741, + "grad_norm": 0.18478796864928818, + "learning_rate": 0.0009895491910009997, + "loss": 1.7516, + "step": 1028 + }, + { + "epoch": 0.09335873707131193, + "grad_norm": 0.18421970254148085, + "learning_rate": 0.0009895192867964608, + "loss": 1.7579, + "step": 1029 + }, + { + "epoch": 0.09344946470694973, + "grad_norm": 0.19398495064221424, + "learning_rate": 0.0009894893403218593, + "loss": 1.792, + "step": 1030 + }, + { + "epoch": 0.09354019234258755, + "grad_norm": 0.18264516580772866, + "learning_rate": 0.0009894593515797812, + "loss": 1.7275, + "step": 1031 + }, + { + "epoch": 0.09363091997822537, + "grad_norm": 0.1762847649866077, + "learning_rate": 0.0009894293205728157, + "loss": 1.7565, + "step": 1032 + }, + { + "epoch": 0.09372164761386319, + "grad_norm": 0.1994953581166063, + "learning_rate": 0.0009893992473035563, + "loss": 1.7671, + "step": 1033 + }, + { + "epoch": 0.09381237524950099, + "grad_norm": 0.19318003883806859, + "learning_rate": 0.0009893691317745998, + "loss": 1.7542, + "step": 1034 + }, + { + "epoch": 0.09390310288513881, + "grad_norm": 0.18871570620834247, + "learning_rate": 0.0009893389739885467, + "loss": 1.7195, + "step": 1035 + }, + { + "epoch": 0.09399383052077663, + "grad_norm": 0.18715650321624708, + "learning_rate": 0.0009893087739480011, + "loss": 1.7085, + "step": 1036 + }, + { + "epoch": 0.09408455815641445, + "grad_norm": 0.18617134316653447, + "learning_rate": 0.0009892785316555708, + "loss": 1.7277, + "step": 1037 + }, + { + "epoch": 0.09417528579205225, + "grad_norm": 0.187020466118774, + "learning_rate": 0.000989248247113867, + "loss": 1.7572, + "step": 1038 + }, + { + "epoch": 0.09426601342769007, + "grad_norm": 0.18146968327440038, + "learning_rate": 0.0009892179203255052, + "loss": 1.7499, + "step": 1039 + }, + { + "epoch": 0.09435674106332789, + "grad_norm": 0.1871241251277369, + "learning_rate": 0.0009891875512931037, + "loss": 1.6962, + "step": 1040 + }, + { + "epoch": 0.09444746869896571, + "grad_norm": 0.18340505624621598, + "learning_rate": 0.000989157140019285, + "loss": 1.7358, + "step": 1041 + }, + { + "epoch": 0.09453819633460352, + "grad_norm": 0.18468691635664197, + "learning_rate": 0.0009891266865066752, + "loss": 1.711, + "step": 1042 + }, + { + "epoch": 0.09462892397024134, + "grad_norm": 0.1812652097104228, + "learning_rate": 0.0009890961907579041, + "loss": 1.727, + "step": 1043 + }, + { + "epoch": 0.09471965160587915, + "grad_norm": 0.19352799753137434, + "learning_rate": 0.0009890656527756047, + "loss": 1.7628, + "step": 1044 + }, + { + "epoch": 0.09481037924151696, + "grad_norm": 0.17810259189221395, + "learning_rate": 0.0009890350725624143, + "loss": 1.739, + "step": 1045 + }, + { + "epoch": 0.09490110687715478, + "grad_norm": 0.19104304267493546, + "learning_rate": 0.000989004450120973, + "loss": 1.7592, + "step": 1046 + }, + { + "epoch": 0.0949918345127926, + "grad_norm": 0.18609484000728752, + "learning_rate": 0.0009889737854539254, + "loss": 1.7438, + "step": 1047 + }, + { + "epoch": 0.09508256214843042, + "grad_norm": 0.18253679468122358, + "learning_rate": 0.0009889430785639194, + "loss": 1.7329, + "step": 1048 + }, + { + "epoch": 0.09517328978406822, + "grad_norm": 0.1806133807856498, + "learning_rate": 0.0009889123294536068, + "loss": 1.6996, + "step": 1049 + }, + { + "epoch": 0.09526401741970604, + "grad_norm": 0.1860944528925207, + "learning_rate": 0.0009888815381256422, + "loss": 1.7019, + "step": 1050 + }, + { + "epoch": 0.09535474505534386, + "grad_norm": 0.17588218480152118, + "learning_rate": 0.0009888507045826846, + "loss": 1.7281, + "step": 1051 + }, + { + "epoch": 0.09544547269098168, + "grad_norm": 0.18514187022300063, + "learning_rate": 0.0009888198288273968, + "loss": 1.7512, + "step": 1052 + }, + { + "epoch": 0.09553620032661948, + "grad_norm": 0.17879669704310872, + "learning_rate": 0.000988788910862445, + "loss": 1.7254, + "step": 1053 + }, + { + "epoch": 0.0956269279622573, + "grad_norm": 0.1782348208551713, + "learning_rate": 0.0009887579506904982, + "loss": 1.7434, + "step": 1054 + }, + { + "epoch": 0.09571765559789512, + "grad_norm": 0.17504981762436558, + "learning_rate": 0.0009887269483142304, + "loss": 1.7129, + "step": 1055 + }, + { + "epoch": 0.09580838323353294, + "grad_norm": 0.18286701739225147, + "learning_rate": 0.0009886959037363188, + "loss": 1.7287, + "step": 1056 + }, + { + "epoch": 0.09589911086917075, + "grad_norm": 0.1860649765501083, + "learning_rate": 0.0009886648169594439, + "loss": 1.7076, + "step": 1057 + }, + { + "epoch": 0.09598983850480856, + "grad_norm": 0.1857687514066898, + "learning_rate": 0.00098863368798629, + "loss": 1.7269, + "step": 1058 + }, + { + "epoch": 0.09608056614044638, + "grad_norm": 0.18425330252299765, + "learning_rate": 0.000988602516819545, + "loss": 1.682, + "step": 1059 + }, + { + "epoch": 0.0961712937760842, + "grad_norm": 0.18658839177001976, + "learning_rate": 0.0009885713034619007, + "loss": 1.694, + "step": 1060 + }, + { + "epoch": 0.096262021411722, + "grad_norm": 0.1782743422210229, + "learning_rate": 0.0009885400479160524, + "loss": 1.717, + "step": 1061 + }, + { + "epoch": 0.09635274904735983, + "grad_norm": 0.19089337100939888, + "learning_rate": 0.000988508750184699, + "loss": 1.719, + "step": 1062 + }, + { + "epoch": 0.09644347668299764, + "grad_norm": 0.1778528186454166, + "learning_rate": 0.000988477410270543, + "loss": 1.7532, + "step": 1063 + }, + { + "epoch": 0.09653420431863545, + "grad_norm": 0.18369211031455118, + "learning_rate": 0.0009884460281762905, + "loss": 1.7181, + "step": 1064 + }, + { + "epoch": 0.09662493195427327, + "grad_norm": 0.18059103887768388, + "learning_rate": 0.0009884146039046515, + "loss": 1.7471, + "step": 1065 + }, + { + "epoch": 0.09671565958991109, + "grad_norm": 0.1805942170055446, + "learning_rate": 0.0009883831374583396, + "loss": 1.683, + "step": 1066 + }, + { + "epoch": 0.0968063872255489, + "grad_norm": 0.19051019712602968, + "learning_rate": 0.0009883516288400718, + "loss": 1.7156, + "step": 1067 + }, + { + "epoch": 0.09689711486118671, + "grad_norm": 0.18927813931282197, + "learning_rate": 0.0009883200780525687, + "loss": 1.7451, + "step": 1068 + }, + { + "epoch": 0.09698784249682453, + "grad_norm": 0.17742327344589437, + "learning_rate": 0.000988288485098555, + "loss": 1.782, + "step": 1069 + }, + { + "epoch": 0.09707857013246235, + "grad_norm": 0.18943534120555508, + "learning_rate": 0.0009882568499807586, + "loss": 1.7774, + "step": 1070 + }, + { + "epoch": 0.09716929776810017, + "grad_norm": 0.20156944560861298, + "learning_rate": 0.0009882251727019113, + "loss": 1.7912, + "step": 1071 + }, + { + "epoch": 0.09726002540373797, + "grad_norm": 0.20425417177459262, + "learning_rate": 0.0009881934532647483, + "loss": 1.756, + "step": 1072 + }, + { + "epoch": 0.09735075303937579, + "grad_norm": 0.187149859079739, + "learning_rate": 0.0009881616916720087, + "loss": 1.7019, + "step": 1073 + }, + { + "epoch": 0.09744148067501361, + "grad_norm": 0.18801687198179215, + "learning_rate": 0.0009881298879264352, + "loss": 1.7275, + "step": 1074 + }, + { + "epoch": 0.09753220831065143, + "grad_norm": 0.18483084342404893, + "learning_rate": 0.0009880980420307738, + "loss": 1.7769, + "step": 1075 + }, + { + "epoch": 0.09762293594628924, + "grad_norm": 0.185815822944051, + "learning_rate": 0.0009880661539877745, + "loss": 1.718, + "step": 1076 + }, + { + "epoch": 0.09771366358192705, + "grad_norm": 0.18173652297341594, + "learning_rate": 0.0009880342238001909, + "loss": 1.7685, + "step": 1077 + }, + { + "epoch": 0.09780439121756487, + "grad_norm": 0.1902746126224888, + "learning_rate": 0.0009880022514707804, + "loss": 1.6978, + "step": 1078 + }, + { + "epoch": 0.09789511885320269, + "grad_norm": 0.185120129595132, + "learning_rate": 0.000987970237002303, + "loss": 1.7843, + "step": 1079 + }, + { + "epoch": 0.0979858464888405, + "grad_norm": 0.18192049683356198, + "learning_rate": 0.0009879381803975242, + "loss": 1.762, + "step": 1080 + }, + { + "epoch": 0.09807657412447832, + "grad_norm": 0.1957815417774691, + "learning_rate": 0.0009879060816592115, + "loss": 1.7542, + "step": 1081 + }, + { + "epoch": 0.09816730176011614, + "grad_norm": 0.18279485088254746, + "learning_rate": 0.0009878739407901368, + "loss": 1.709, + "step": 1082 + }, + { + "epoch": 0.09825802939575394, + "grad_norm": 0.18004161477675595, + "learning_rate": 0.0009878417577930752, + "loss": 1.7142, + "step": 1083 + }, + { + "epoch": 0.09834875703139176, + "grad_norm": 0.18049727582630457, + "learning_rate": 0.000987809532670806, + "loss": 1.7288, + "step": 1084 + }, + { + "epoch": 0.09843948466702958, + "grad_norm": 0.19562156816245593, + "learning_rate": 0.0009877772654261119, + "loss": 1.7952, + "step": 1085 + }, + { + "epoch": 0.0985302123026674, + "grad_norm": 0.17268432437298203, + "learning_rate": 0.0009877449560617788, + "loss": 1.7687, + "step": 1086 + }, + { + "epoch": 0.0986209399383052, + "grad_norm": 0.18634539326114644, + "learning_rate": 0.0009877126045805971, + "loss": 1.7225, + "step": 1087 + }, + { + "epoch": 0.09871166757394302, + "grad_norm": 0.17913474835662205, + "learning_rate": 0.00098768021098536, + "loss": 1.7471, + "step": 1088 + }, + { + "epoch": 0.09880239520958084, + "grad_norm": 0.19322528258058833, + "learning_rate": 0.0009876477752788647, + "loss": 1.7079, + "step": 1089 + }, + { + "epoch": 0.09889312284521866, + "grad_norm": 0.18647105726062216, + "learning_rate": 0.0009876152974639123, + "loss": 1.7064, + "step": 1090 + }, + { + "epoch": 0.09898385048085646, + "grad_norm": 0.187366385354363, + "learning_rate": 0.000987582777543307, + "loss": 1.7186, + "step": 1091 + }, + { + "epoch": 0.09907457811649428, + "grad_norm": 0.1914081246697726, + "learning_rate": 0.000987550215519857, + "loss": 1.7249, + "step": 1092 + }, + { + "epoch": 0.0991653057521321, + "grad_norm": 0.17796506075074092, + "learning_rate": 0.0009875176113963739, + "loss": 1.6511, + "step": 1093 + }, + { + "epoch": 0.09925603338776992, + "grad_norm": 0.18150758094133226, + "learning_rate": 0.0009874849651756734, + "loss": 1.7846, + "step": 1094 + }, + { + "epoch": 0.09934676102340773, + "grad_norm": 0.17693201336281544, + "learning_rate": 0.0009874522768605744, + "loss": 1.7075, + "step": 1095 + }, + { + "epoch": 0.09943748865904554, + "grad_norm": 0.16848529145705407, + "learning_rate": 0.0009874195464538993, + "loss": 1.7896, + "step": 1096 + }, + { + "epoch": 0.09952821629468336, + "grad_norm": 0.18188824005611437, + "learning_rate": 0.0009873867739584746, + "loss": 1.7448, + "step": 1097 + }, + { + "epoch": 0.09961894393032118, + "grad_norm": 0.1763747905018434, + "learning_rate": 0.00098735395937713, + "loss": 1.7032, + "step": 1098 + }, + { + "epoch": 0.09970967156595899, + "grad_norm": 0.17964115634419583, + "learning_rate": 0.0009873211027126992, + "loss": 1.7302, + "step": 1099 + }, + { + "epoch": 0.0998003992015968, + "grad_norm": 0.17849750909865406, + "learning_rate": 0.0009872882039680194, + "loss": 1.7649, + "step": 1100 + }, + { + "epoch": 0.09989112683723463, + "grad_norm": 0.1796687984256201, + "learning_rate": 0.0009872552631459315, + "loss": 1.7162, + "step": 1101 + }, + { + "epoch": 0.09998185447287243, + "grad_norm": 0.1844308399524796, + "learning_rate": 0.0009872222802492796, + "loss": 1.7435, + "step": 1102 + }, + { + "epoch": 0.10007258210851025, + "grad_norm": 0.18178240544701202, + "learning_rate": 0.0009871892552809122, + "loss": 1.7571, + "step": 1103 + }, + { + "epoch": 0.10016330974414807, + "grad_norm": 0.17392352795275978, + "learning_rate": 0.0009871561882436805, + "loss": 1.7649, + "step": 1104 + }, + { + "epoch": 0.10025403737978589, + "grad_norm": 0.18344567232953918, + "learning_rate": 0.0009871230791404404, + "loss": 1.6959, + "step": 1105 + }, + { + "epoch": 0.10034476501542369, + "grad_norm": 0.17641672530587577, + "learning_rate": 0.0009870899279740507, + "loss": 1.7459, + "step": 1106 + }, + { + "epoch": 0.10043549265106151, + "grad_norm": 0.17789814943937604, + "learning_rate": 0.0009870567347473737, + "loss": 1.7323, + "step": 1107 + }, + { + "epoch": 0.10052622028669933, + "grad_norm": 0.18660334181459642, + "learning_rate": 0.000987023499463276, + "loss": 1.7079, + "step": 1108 + }, + { + "epoch": 0.10061694792233715, + "grad_norm": 0.1814221057559884, + "learning_rate": 0.0009869902221246272, + "loss": 1.7299, + "step": 1109 + }, + { + "epoch": 0.10070767555797495, + "grad_norm": 0.18029016950679713, + "learning_rate": 0.0009869569027343011, + "loss": 1.6544, + "step": 1110 + }, + { + "epoch": 0.10079840319361277, + "grad_norm": 0.19917455402303114, + "learning_rate": 0.0009869235412951745, + "loss": 1.6816, + "step": 1111 + }, + { + "epoch": 0.10088913082925059, + "grad_norm": 0.18628504133352766, + "learning_rate": 0.0009868901378101286, + "loss": 1.7418, + "step": 1112 + }, + { + "epoch": 0.10097985846488841, + "grad_norm": 0.17981432370425296, + "learning_rate": 0.0009868566922820474, + "loss": 1.705, + "step": 1113 + }, + { + "epoch": 0.10107058610052622, + "grad_norm": 0.18174132994533906, + "learning_rate": 0.000986823204713819, + "loss": 1.7352, + "step": 1114 + }, + { + "epoch": 0.10116131373616404, + "grad_norm": 0.18071260924357788, + "learning_rate": 0.0009867896751083352, + "loss": 1.7539, + "step": 1115 + }, + { + "epoch": 0.10125204137180185, + "grad_norm": 0.17815047436116632, + "learning_rate": 0.0009867561034684912, + "loss": 1.7304, + "step": 1116 + }, + { + "epoch": 0.10134276900743966, + "grad_norm": 0.1828472502053516, + "learning_rate": 0.000986722489797186, + "loss": 1.714, + "step": 1117 + }, + { + "epoch": 0.10143349664307748, + "grad_norm": 0.18300463379129678, + "learning_rate": 0.000986688834097322, + "loss": 1.7818, + "step": 1118 + }, + { + "epoch": 0.1015242242787153, + "grad_norm": 0.17945513159072335, + "learning_rate": 0.0009866551363718055, + "loss": 1.7103, + "step": 1119 + }, + { + "epoch": 0.10161495191435312, + "grad_norm": 0.18341031640961172, + "learning_rate": 0.000986621396623546, + "loss": 1.7574, + "step": 1120 + }, + { + "epoch": 0.10170567954999092, + "grad_norm": 0.18431500977866191, + "learning_rate": 0.0009865876148554575, + "loss": 1.7531, + "step": 1121 + }, + { + "epoch": 0.10179640718562874, + "grad_norm": 0.17482140754668168, + "learning_rate": 0.0009865537910704566, + "loss": 1.7812, + "step": 1122 + }, + { + "epoch": 0.10188713482126656, + "grad_norm": 0.18963595234638916, + "learning_rate": 0.000986519925271464, + "loss": 1.764, + "step": 1123 + }, + { + "epoch": 0.10197786245690438, + "grad_norm": 0.1828396859259966, + "learning_rate": 0.0009864860174614045, + "loss": 1.7346, + "step": 1124 + }, + { + "epoch": 0.10206859009254218, + "grad_norm": 0.17701184747798432, + "learning_rate": 0.0009864520676432053, + "loss": 1.7306, + "step": 1125 + }, + { + "epoch": 0.10215931772818, + "grad_norm": 0.17902563499313828, + "learning_rate": 0.0009864180758197988, + "loss": 1.7752, + "step": 1126 + }, + { + "epoch": 0.10225004536381782, + "grad_norm": 0.17770692470859292, + "learning_rate": 0.0009863840419941194, + "loss": 1.7852, + "step": 1127 + }, + { + "epoch": 0.10234077299945564, + "grad_norm": 0.17169212684366073, + "learning_rate": 0.0009863499661691064, + "loss": 1.7202, + "step": 1128 + }, + { + "epoch": 0.10243150063509344, + "grad_norm": 0.1819153588405013, + "learning_rate": 0.000986315848347702, + "loss": 1.7354, + "step": 1129 + }, + { + "epoch": 0.10252222827073126, + "grad_norm": 0.17425244701683432, + "learning_rate": 0.0009862816885328526, + "loss": 1.7593, + "step": 1130 + }, + { + "epoch": 0.10261295590636908, + "grad_norm": 0.17018691460927532, + "learning_rate": 0.0009862474867275077, + "loss": 1.7072, + "step": 1131 + }, + { + "epoch": 0.1027036835420069, + "grad_norm": 0.17546174686678292, + "learning_rate": 0.0009862132429346207, + "loss": 1.7102, + "step": 1132 + }, + { + "epoch": 0.1027944111776447, + "grad_norm": 0.17315747070782375, + "learning_rate": 0.0009861789571571483, + "loss": 1.702, + "step": 1133 + }, + { + "epoch": 0.10288513881328253, + "grad_norm": 0.17948274793910513, + "learning_rate": 0.0009861446293980517, + "loss": 1.7572, + "step": 1134 + }, + { + "epoch": 0.10297586644892034, + "grad_norm": 0.18514692600095015, + "learning_rate": 0.0009861102596602942, + "loss": 1.7182, + "step": 1135 + }, + { + "epoch": 0.10306659408455815, + "grad_norm": 0.17608030435915073, + "learning_rate": 0.0009860758479468446, + "loss": 1.6965, + "step": 1136 + }, + { + "epoch": 0.10315732172019597, + "grad_norm": 0.17366027437235365, + "learning_rate": 0.0009860413942606736, + "loss": 1.7201, + "step": 1137 + }, + { + "epoch": 0.10324804935583379, + "grad_norm": 0.1757306018695061, + "learning_rate": 0.0009860068986047566, + "loss": 1.8139, + "step": 1138 + }, + { + "epoch": 0.1033387769914716, + "grad_norm": 0.17745308832036333, + "learning_rate": 0.0009859723609820722, + "loss": 1.7169, + "step": 1139 + }, + { + "epoch": 0.10342950462710941, + "grad_norm": 0.17677863360980073, + "learning_rate": 0.000985937781395603, + "loss": 1.6862, + "step": 1140 + }, + { + "epoch": 0.10352023226274723, + "grad_norm": 0.17493476003878686, + "learning_rate": 0.0009859031598483347, + "loss": 1.7518, + "step": 1141 + }, + { + "epoch": 0.10361095989838505, + "grad_norm": 0.17466208696620839, + "learning_rate": 0.0009858684963432568, + "loss": 1.7551, + "step": 1142 + }, + { + "epoch": 0.10370168753402287, + "grad_norm": 0.16871027523745846, + "learning_rate": 0.0009858337908833627, + "loss": 1.7519, + "step": 1143 + }, + { + "epoch": 0.10379241516966067, + "grad_norm": 0.18188942972430444, + "learning_rate": 0.0009857990434716493, + "loss": 1.7453, + "step": 1144 + }, + { + "epoch": 0.10388314280529849, + "grad_norm": 0.17439154996856185, + "learning_rate": 0.0009857642541111168, + "loss": 1.7207, + "step": 1145 + }, + { + "epoch": 0.10397387044093631, + "grad_norm": 0.17417384881287912, + "learning_rate": 0.0009857294228047694, + "loss": 1.7464, + "step": 1146 + }, + { + "epoch": 0.10406459807657413, + "grad_norm": 0.16809203548667082, + "learning_rate": 0.0009856945495556146, + "loss": 1.7068, + "step": 1147 + }, + { + "epoch": 0.10415532571221194, + "grad_norm": 0.17203036688727544, + "learning_rate": 0.0009856596343666639, + "loss": 1.751, + "step": 1148 + }, + { + "epoch": 0.10424605334784975, + "grad_norm": 0.1894045409150221, + "learning_rate": 0.0009856246772409322, + "loss": 1.6816, + "step": 1149 + }, + { + "epoch": 0.10433678098348757, + "grad_norm": 0.17399359002479314, + "learning_rate": 0.0009855896781814381, + "loss": 1.7106, + "step": 1150 + }, + { + "epoch": 0.10442750861912539, + "grad_norm": 0.19065727034333077, + "learning_rate": 0.000985554637191204, + "loss": 1.7145, + "step": 1151 + }, + { + "epoch": 0.1045182362547632, + "grad_norm": 0.1723067327997499, + "learning_rate": 0.000985519554273255, + "loss": 1.7274, + "step": 1152 + }, + { + "epoch": 0.10460896389040102, + "grad_norm": 0.17523183478728308, + "learning_rate": 0.000985484429430621, + "loss": 1.7153, + "step": 1153 + }, + { + "epoch": 0.10469969152603883, + "grad_norm": 0.16391221754922145, + "learning_rate": 0.000985449262666335, + "loss": 1.7155, + "step": 1154 + }, + { + "epoch": 0.10479041916167664, + "grad_norm": 0.1664371533384921, + "learning_rate": 0.0009854140539834338, + "loss": 1.7598, + "step": 1155 + }, + { + "epoch": 0.10488114679731446, + "grad_norm": 0.17854739526186642, + "learning_rate": 0.0009853788033849574, + "loss": 1.7164, + "step": 1156 + }, + { + "epoch": 0.10497187443295228, + "grad_norm": 0.1755431762691989, + "learning_rate": 0.0009853435108739498, + "loss": 1.7113, + "step": 1157 + }, + { + "epoch": 0.1050626020685901, + "grad_norm": 0.19109041344041489, + "learning_rate": 0.0009853081764534585, + "loss": 1.7278, + "step": 1158 + }, + { + "epoch": 0.1051533297042279, + "grad_norm": 0.1784693985014377, + "learning_rate": 0.0009852728001265344, + "loss": 1.7368, + "step": 1159 + }, + { + "epoch": 0.10524405733986572, + "grad_norm": 0.18695383424795717, + "learning_rate": 0.0009852373818962327, + "loss": 1.7222, + "step": 1160 + }, + { + "epoch": 0.10533478497550354, + "grad_norm": 0.17810213377014156, + "learning_rate": 0.0009852019217656113, + "loss": 1.6864, + "step": 1161 + }, + { + "epoch": 0.10542551261114136, + "grad_norm": 0.17304692522672752, + "learning_rate": 0.0009851664197377328, + "loss": 1.695, + "step": 1162 + }, + { + "epoch": 0.10551624024677916, + "grad_norm": 0.16860942084277344, + "learning_rate": 0.0009851308758156623, + "loss": 1.7222, + "step": 1163 + }, + { + "epoch": 0.10560696788241698, + "grad_norm": 0.1655672628451575, + "learning_rate": 0.0009850952900024692, + "loss": 1.7584, + "step": 1164 + }, + { + "epoch": 0.1056976955180548, + "grad_norm": 0.17211251827133253, + "learning_rate": 0.0009850596623012264, + "loss": 1.7127, + "step": 1165 + }, + { + "epoch": 0.10578842315369262, + "grad_norm": 0.179191983974999, + "learning_rate": 0.0009850239927150103, + "loss": 1.7436, + "step": 1166 + }, + { + "epoch": 0.10587915078933043, + "grad_norm": 0.18567554250851379, + "learning_rate": 0.0009849882812469006, + "loss": 1.698, + "step": 1167 + }, + { + "epoch": 0.10596987842496824, + "grad_norm": 0.17445889428056205, + "learning_rate": 0.0009849525278999816, + "loss": 1.6836, + "step": 1168 + }, + { + "epoch": 0.10606060606060606, + "grad_norm": 0.18210024964633983, + "learning_rate": 0.0009849167326773402, + "loss": 1.7341, + "step": 1169 + }, + { + "epoch": 0.10615133369624388, + "grad_norm": 0.17031052983706535, + "learning_rate": 0.0009848808955820675, + "loss": 1.7541, + "step": 1170 + }, + { + "epoch": 0.10624206133188169, + "grad_norm": 0.1672698341611268, + "learning_rate": 0.0009848450166172582, + "loss": 1.7215, + "step": 1171 + }, + { + "epoch": 0.1063327889675195, + "grad_norm": 0.16606879520083143, + "learning_rate": 0.00098480909578601, + "loss": 1.7036, + "step": 1172 + }, + { + "epoch": 0.10642351660315733, + "grad_norm": 0.16825332723053016, + "learning_rate": 0.0009847731330914252, + "loss": 1.7032, + "step": 1173 + }, + { + "epoch": 0.10651424423879513, + "grad_norm": 0.17912591883925358, + "learning_rate": 0.0009847371285366087, + "loss": 1.7331, + "step": 1174 + }, + { + "epoch": 0.10660497187443295, + "grad_norm": 0.17944666713535054, + "learning_rate": 0.0009847010821246698, + "loss": 1.7553, + "step": 1175 + }, + { + "epoch": 0.10669569951007077, + "grad_norm": 0.16929814358315057, + "learning_rate": 0.000984664993858721, + "loss": 1.6904, + "step": 1176 + }, + { + "epoch": 0.10678642714570859, + "grad_norm": 0.1700857325170699, + "learning_rate": 0.0009846288637418786, + "loss": 1.6863, + "step": 1177 + }, + { + "epoch": 0.10687715478134639, + "grad_norm": 0.1756552762055619, + "learning_rate": 0.0009845926917772623, + "loss": 1.757, + "step": 1178 + }, + { + "epoch": 0.10696788241698421, + "grad_norm": 0.16819911878554697, + "learning_rate": 0.0009845564779679956, + "loss": 1.732, + "step": 1179 + }, + { + "epoch": 0.10705861005262203, + "grad_norm": 0.18232639648600016, + "learning_rate": 0.0009845202223172057, + "loss": 1.7221, + "step": 1180 + }, + { + "epoch": 0.10714933768825985, + "grad_norm": 0.1616404309152846, + "learning_rate": 0.0009844839248280232, + "loss": 1.7225, + "step": 1181 + }, + { + "epoch": 0.10724006532389765, + "grad_norm": 0.18326327282719967, + "learning_rate": 0.0009844475855035823, + "loss": 1.7464, + "step": 1182 + }, + { + "epoch": 0.10733079295953547, + "grad_norm": 0.1739256519405278, + "learning_rate": 0.000984411204347021, + "loss": 1.739, + "step": 1183 + }, + { + "epoch": 0.10742152059517329, + "grad_norm": 0.17103895535689884, + "learning_rate": 0.0009843747813614808, + "loss": 1.7329, + "step": 1184 + }, + { + "epoch": 0.10751224823081111, + "grad_norm": 0.17553472740629109, + "learning_rate": 0.000984338316550107, + "loss": 1.7325, + "step": 1185 + }, + { + "epoch": 0.10760297586644892, + "grad_norm": 0.17497663339261005, + "learning_rate": 0.000984301809916048, + "loss": 1.7223, + "step": 1186 + }, + { + "epoch": 0.10769370350208674, + "grad_norm": 0.1719223665991814, + "learning_rate": 0.0009842652614624565, + "loss": 1.7379, + "step": 1187 + }, + { + "epoch": 0.10778443113772455, + "grad_norm": 0.18404166812622755, + "learning_rate": 0.000984228671192488, + "loss": 1.7283, + "step": 1188 + }, + { + "epoch": 0.10787515877336237, + "grad_norm": 0.172674658797702, + "learning_rate": 0.0009841920391093027, + "loss": 1.662, + "step": 1189 + }, + { + "epoch": 0.10796588640900018, + "grad_norm": 0.19198240753785453, + "learning_rate": 0.0009841553652160634, + "loss": 1.7265, + "step": 1190 + }, + { + "epoch": 0.108056614044638, + "grad_norm": 0.1773281529263326, + "learning_rate": 0.0009841186495159369, + "loss": 1.7218, + "step": 1191 + }, + { + "epoch": 0.10814734168027582, + "grad_norm": 0.17691820330837346, + "learning_rate": 0.0009840818920120935, + "loss": 1.6944, + "step": 1192 + }, + { + "epoch": 0.10823806931591362, + "grad_norm": 0.17231456518332297, + "learning_rate": 0.0009840450927077076, + "loss": 1.7082, + "step": 1193 + }, + { + "epoch": 0.10832879695155144, + "grad_norm": 0.1788749632603217, + "learning_rate": 0.0009840082516059565, + "loss": 1.782, + "step": 1194 + }, + { + "epoch": 0.10841952458718926, + "grad_norm": 0.1733815267096086, + "learning_rate": 0.0009839713687100216, + "loss": 1.7025, + "step": 1195 + }, + { + "epoch": 0.10851025222282708, + "grad_norm": 0.18087396666212702, + "learning_rate": 0.0009839344440230877, + "loss": 1.7393, + "step": 1196 + }, + { + "epoch": 0.10860097985846488, + "grad_norm": 0.17987008650530525, + "learning_rate": 0.0009838974775483432, + "loss": 1.7251, + "step": 1197 + }, + { + "epoch": 0.1086917074941027, + "grad_norm": 0.17399625942378785, + "learning_rate": 0.00098386046928898, + "loss": 1.7305, + "step": 1198 + }, + { + "epoch": 0.10878243512974052, + "grad_norm": 0.170059595008116, + "learning_rate": 0.0009838234192481943, + "loss": 1.7269, + "step": 1199 + }, + { + "epoch": 0.10887316276537834, + "grad_norm": 0.17187403352773256, + "learning_rate": 0.0009837863274291848, + "loss": 1.7336, + "step": 1200 + }, + { + "epoch": 0.10896389040101614, + "grad_norm": 0.17111017292608174, + "learning_rate": 0.0009837491938351549, + "loss": 1.7388, + "step": 1201 + }, + { + "epoch": 0.10905461803665396, + "grad_norm": 0.16774562447879976, + "learning_rate": 0.0009837120184693106, + "loss": 1.702, + "step": 1202 + }, + { + "epoch": 0.10914534567229178, + "grad_norm": 0.16641863981649987, + "learning_rate": 0.000983674801334862, + "loss": 1.696, + "step": 1203 + }, + { + "epoch": 0.1092360733079296, + "grad_norm": 0.18391388612182547, + "learning_rate": 0.0009836375424350234, + "loss": 1.76, + "step": 1204 + }, + { + "epoch": 0.1093268009435674, + "grad_norm": 0.1671809777510978, + "learning_rate": 0.0009836002417730116, + "loss": 1.7044, + "step": 1205 + }, + { + "epoch": 0.10941752857920523, + "grad_norm": 0.1731374801617371, + "learning_rate": 0.0009835628993520478, + "loss": 1.68, + "step": 1206 + }, + { + "epoch": 0.10950825621484304, + "grad_norm": 0.16830488711718655, + "learning_rate": 0.0009835255151753562, + "loss": 1.7673, + "step": 1207 + }, + { + "epoch": 0.10959898385048086, + "grad_norm": 0.17179004902817402, + "learning_rate": 0.000983488089246165, + "loss": 1.7531, + "step": 1208 + }, + { + "epoch": 0.10968971148611867, + "grad_norm": 0.17266251756545145, + "learning_rate": 0.0009834506215677062, + "loss": 1.6958, + "step": 1209 + }, + { + "epoch": 0.10978043912175649, + "grad_norm": 0.17506700641761533, + "learning_rate": 0.000983413112143215, + "loss": 1.7234, + "step": 1210 + }, + { + "epoch": 0.1098711667573943, + "grad_norm": 0.18045663133205334, + "learning_rate": 0.00098337556097593, + "loss": 1.7323, + "step": 1211 + }, + { + "epoch": 0.10996189439303211, + "grad_norm": 0.1791329119799067, + "learning_rate": 0.0009833379680690944, + "loss": 1.7193, + "step": 1212 + }, + { + "epoch": 0.11005262202866993, + "grad_norm": 0.1771566706693738, + "learning_rate": 0.000983300333425954, + "loss": 1.7502, + "step": 1213 + }, + { + "epoch": 0.11014334966430775, + "grad_norm": 0.1796757228618933, + "learning_rate": 0.0009832626570497585, + "loss": 1.7304, + "step": 1214 + }, + { + "epoch": 0.11023407729994557, + "grad_norm": 0.16800176553115453, + "learning_rate": 0.0009832249389437613, + "loss": 1.7282, + "step": 1215 + }, + { + "epoch": 0.11032480493558337, + "grad_norm": 0.16933057656890146, + "learning_rate": 0.0009831871791112195, + "loss": 1.712, + "step": 1216 + }, + { + "epoch": 0.11041553257122119, + "grad_norm": 0.16401970144383746, + "learning_rate": 0.0009831493775553934, + "loss": 1.7672, + "step": 1217 + }, + { + "epoch": 0.11050626020685901, + "grad_norm": 0.16717715251022214, + "learning_rate": 0.0009831115342795475, + "loss": 1.7065, + "step": 1218 + }, + { + "epoch": 0.11059698784249683, + "grad_norm": 0.17601911447485705, + "learning_rate": 0.0009830736492869494, + "loss": 1.7394, + "step": 1219 + }, + { + "epoch": 0.11068771547813464, + "grad_norm": 0.17492732650332493, + "learning_rate": 0.0009830357225808705, + "loss": 1.6857, + "step": 1220 + }, + { + "epoch": 0.11077844311377245, + "grad_norm": 0.17780599150493354, + "learning_rate": 0.0009829977541645856, + "loss": 1.7214, + "step": 1221 + }, + { + "epoch": 0.11086917074941027, + "grad_norm": 0.17199389778598095, + "learning_rate": 0.0009829597440413737, + "loss": 1.7157, + "step": 1222 + }, + { + "epoch": 0.11095989838504809, + "grad_norm": 0.16914872789158947, + "learning_rate": 0.0009829216922145165, + "loss": 1.7565, + "step": 1223 + }, + { + "epoch": 0.1110506260206859, + "grad_norm": 0.16603653674865937, + "learning_rate": 0.0009828835986873002, + "loss": 1.7374, + "step": 1224 + }, + { + "epoch": 0.11114135365632372, + "grad_norm": 0.1686192499669711, + "learning_rate": 0.0009828454634630138, + "loss": 1.6951, + "step": 1225 + }, + { + "epoch": 0.11123208129196153, + "grad_norm": 0.16203418661404173, + "learning_rate": 0.0009828072865449503, + "loss": 1.6799, + "step": 1226 + }, + { + "epoch": 0.11132280892759934, + "grad_norm": 0.17812085802906216, + "learning_rate": 0.0009827690679364068, + "loss": 1.7597, + "step": 1227 + }, + { + "epoch": 0.11141353656323716, + "grad_norm": 0.1689335069096628, + "learning_rate": 0.000982730807640683, + "loss": 1.7056, + "step": 1228 + }, + { + "epoch": 0.11150426419887498, + "grad_norm": 0.16221407947074298, + "learning_rate": 0.000982692505661083, + "loss": 1.7223, + "step": 1229 + }, + { + "epoch": 0.1115949918345128, + "grad_norm": 0.17687426914276116, + "learning_rate": 0.0009826541620009137, + "loss": 1.6797, + "step": 1230 + }, + { + "epoch": 0.1116857194701506, + "grad_norm": 0.17906576969614918, + "learning_rate": 0.0009826157766634864, + "loss": 1.7714, + "step": 1231 + }, + { + "epoch": 0.11177644710578842, + "grad_norm": 0.16718189763273283, + "learning_rate": 0.000982577349652116, + "loss": 1.671, + "step": 1232 + }, + { + "epoch": 0.11186717474142624, + "grad_norm": 0.17226410737727066, + "learning_rate": 0.0009825388809701198, + "loss": 1.7454, + "step": 1233 + }, + { + "epoch": 0.11195790237706406, + "grad_norm": 0.17431120592622737, + "learning_rate": 0.0009825003706208206, + "loss": 1.6922, + "step": 1234 + }, + { + "epoch": 0.11204863001270186, + "grad_norm": 0.17372064945088686, + "learning_rate": 0.0009824618186075429, + "loss": 1.7393, + "step": 1235 + }, + { + "epoch": 0.11213935764833968, + "grad_norm": 0.1693006525783567, + "learning_rate": 0.0009824232249336163, + "loss": 1.7199, + "step": 1236 + }, + { + "epoch": 0.1122300852839775, + "grad_norm": 0.17108515735401972, + "learning_rate": 0.000982384589602373, + "loss": 1.736, + "step": 1237 + }, + { + "epoch": 0.11232081291961532, + "grad_norm": 0.16527671266549857, + "learning_rate": 0.0009823459126171495, + "loss": 1.7005, + "step": 1238 + }, + { + "epoch": 0.11241154055525313, + "grad_norm": 0.16982732517425214, + "learning_rate": 0.000982307193981285, + "loss": 1.6884, + "step": 1239 + }, + { + "epoch": 0.11250226819089094, + "grad_norm": 0.1751768253756675, + "learning_rate": 0.0009822684336981235, + "loss": 1.7311, + "step": 1240 + }, + { + "epoch": 0.11259299582652876, + "grad_norm": 0.17624362128815813, + "learning_rate": 0.0009822296317710116, + "loss": 1.7029, + "step": 1241 + }, + { + "epoch": 0.11268372346216658, + "grad_norm": 0.17330620293157492, + "learning_rate": 0.0009821907882033, + "loss": 1.7265, + "step": 1242 + }, + { + "epoch": 0.11277445109780439, + "grad_norm": 0.16836252024876744, + "learning_rate": 0.0009821519029983427, + "loss": 1.7048, + "step": 1243 + }, + { + "epoch": 0.1128651787334422, + "grad_norm": 0.17165586385004733, + "learning_rate": 0.0009821129761594972, + "loss": 1.7197, + "step": 1244 + }, + { + "epoch": 0.11295590636908003, + "grad_norm": 0.16937110711768982, + "learning_rate": 0.0009820740076901255, + "loss": 1.7659, + "step": 1245 + }, + { + "epoch": 0.11304663400471783, + "grad_norm": 0.17462353995857285, + "learning_rate": 0.0009820349975935923, + "loss": 1.7321, + "step": 1246 + }, + { + "epoch": 0.11313736164035565, + "grad_norm": 0.16954361133694762, + "learning_rate": 0.0009819959458732658, + "loss": 1.7481, + "step": 1247 + }, + { + "epoch": 0.11322808927599347, + "grad_norm": 0.16753386743003298, + "learning_rate": 0.0009819568525325185, + "loss": 1.7166, + "step": 1248 + }, + { + "epoch": 0.11331881691163129, + "grad_norm": 0.17199556323121912, + "learning_rate": 0.0009819177175747257, + "loss": 1.723, + "step": 1249 + }, + { + "epoch": 0.11340954454726909, + "grad_norm": 0.1718950450596678, + "learning_rate": 0.0009818785410032672, + "loss": 1.7522, + "step": 1250 + }, + { + "epoch": 0.11350027218290691, + "grad_norm": 0.17120457613004988, + "learning_rate": 0.0009818393228215253, + "loss": 1.6639, + "step": 1251 + }, + { + "epoch": 0.11359099981854473, + "grad_norm": 0.16571038562355697, + "learning_rate": 0.0009818000630328872, + "loss": 1.6842, + "step": 1252 + }, + { + "epoch": 0.11368172745418255, + "grad_norm": 0.16718739880677688, + "learning_rate": 0.0009817607616407426, + "loss": 1.7328, + "step": 1253 + }, + { + "epoch": 0.11377245508982035, + "grad_norm": 0.16528915776400369, + "learning_rate": 0.0009817214186484853, + "loss": 1.7017, + "step": 1254 + }, + { + "epoch": 0.11386318272545817, + "grad_norm": 0.16501139724234495, + "learning_rate": 0.0009816820340595124, + "loss": 1.7236, + "step": 1255 + }, + { + "epoch": 0.11395391036109599, + "grad_norm": 0.16687641205927253, + "learning_rate": 0.0009816426078772247, + "loss": 1.6482, + "step": 1256 + }, + { + "epoch": 0.11404463799673381, + "grad_norm": 0.15358917141742956, + "learning_rate": 0.0009816031401050271, + "loss": 1.7164, + "step": 1257 + }, + { + "epoch": 0.11413536563237162, + "grad_norm": 0.16563913311707196, + "learning_rate": 0.000981563630746327, + "loss": 1.7141, + "step": 1258 + }, + { + "epoch": 0.11422609326800943, + "grad_norm": 0.1670236831961389, + "learning_rate": 0.0009815240798045368, + "loss": 1.6957, + "step": 1259 + }, + { + "epoch": 0.11431682090364725, + "grad_norm": 0.16389266433877142, + "learning_rate": 0.0009814844872830712, + "loss": 1.722, + "step": 1260 + }, + { + "epoch": 0.11440754853928507, + "grad_norm": 0.17444308981986043, + "learning_rate": 0.0009814448531853493, + "loss": 1.712, + "step": 1261 + }, + { + "epoch": 0.11449827617492288, + "grad_norm": 0.16038882891924436, + "learning_rate": 0.000981405177514793, + "loss": 1.7192, + "step": 1262 + }, + { + "epoch": 0.1145890038105607, + "grad_norm": 0.1699197734223029, + "learning_rate": 0.000981365460274829, + "loss": 1.7381, + "step": 1263 + }, + { + "epoch": 0.11467973144619852, + "grad_norm": 0.1655918629960827, + "learning_rate": 0.0009813257014688866, + "loss": 1.7379, + "step": 1264 + }, + { + "epoch": 0.11477045908183632, + "grad_norm": 0.16530309848467212, + "learning_rate": 0.0009812859011003987, + "loss": 1.7287, + "step": 1265 + }, + { + "epoch": 0.11486118671747414, + "grad_norm": 0.17818473697567655, + "learning_rate": 0.0009812460591728024, + "loss": 1.7301, + "step": 1266 + }, + { + "epoch": 0.11495191435311196, + "grad_norm": 0.16275256906474633, + "learning_rate": 0.0009812061756895379, + "loss": 1.7309, + "step": 1267 + }, + { + "epoch": 0.11504264198874978, + "grad_norm": 0.16749771586563175, + "learning_rate": 0.0009811662506540493, + "loss": 1.7296, + "step": 1268 + }, + { + "epoch": 0.11513336962438758, + "grad_norm": 0.1664982415944664, + "learning_rate": 0.000981126284069784, + "loss": 1.7188, + "step": 1269 + }, + { + "epoch": 0.1152240972600254, + "grad_norm": 0.17159171730858627, + "learning_rate": 0.0009810862759401932, + "loss": 1.7669, + "step": 1270 + }, + { + "epoch": 0.11531482489566322, + "grad_norm": 0.17885468188390433, + "learning_rate": 0.0009810462262687316, + "loss": 1.7026, + "step": 1271 + }, + { + "epoch": 0.11540555253130104, + "grad_norm": 0.1645897500830826, + "learning_rate": 0.0009810061350588573, + "loss": 1.693, + "step": 1272 + }, + { + "epoch": 0.11549628016693884, + "grad_norm": 0.17055455144237242, + "learning_rate": 0.0009809660023140326, + "loss": 1.7184, + "step": 1273 + }, + { + "epoch": 0.11558700780257666, + "grad_norm": 0.16703851707155892, + "learning_rate": 0.0009809258280377225, + "loss": 1.7501, + "step": 1274 + }, + { + "epoch": 0.11567773543821448, + "grad_norm": 0.16576569313683442, + "learning_rate": 0.0009808856122333963, + "loss": 1.7401, + "step": 1275 + }, + { + "epoch": 0.1157684630738523, + "grad_norm": 0.17070723457980533, + "learning_rate": 0.0009808453549045267, + "loss": 1.7078, + "step": 1276 + }, + { + "epoch": 0.1158591907094901, + "grad_norm": 0.15368230527423424, + "learning_rate": 0.0009808050560545895, + "loss": 1.6849, + "step": 1277 + }, + { + "epoch": 0.11594991834512793, + "grad_norm": 0.16677612971906916, + "learning_rate": 0.0009807647156870652, + "loss": 1.6938, + "step": 1278 + }, + { + "epoch": 0.11604064598076574, + "grad_norm": 0.1774914479145659, + "learning_rate": 0.0009807243338054367, + "loss": 1.6849, + "step": 1279 + }, + { + "epoch": 0.11613137361640356, + "grad_norm": 0.16954766799767335, + "learning_rate": 0.0009806839104131912, + "loss": 1.7228, + "step": 1280 + }, + { + "epoch": 0.11622210125204137, + "grad_norm": 0.17554459634606834, + "learning_rate": 0.0009806434455138194, + "loss": 1.7573, + "step": 1281 + }, + { + "epoch": 0.11631282888767919, + "grad_norm": 0.17689257525469643, + "learning_rate": 0.0009806029391108148, + "loss": 1.7315, + "step": 1282 + }, + { + "epoch": 0.116403556523317, + "grad_norm": 0.16671403301387505, + "learning_rate": 0.0009805623912076758, + "loss": 1.7477, + "step": 1283 + }, + { + "epoch": 0.11649428415895481, + "grad_norm": 0.16868156509711527, + "learning_rate": 0.0009805218018079035, + "loss": 1.7218, + "step": 1284 + }, + { + "epoch": 0.11658501179459263, + "grad_norm": 0.16505093625932996, + "learning_rate": 0.0009804811709150027, + "loss": 1.7269, + "step": 1285 + }, + { + "epoch": 0.11667573943023045, + "grad_norm": 0.16220678479120987, + "learning_rate": 0.0009804404985324822, + "loss": 1.7459, + "step": 1286 + }, + { + "epoch": 0.11676646706586827, + "grad_norm": 0.16938755574078093, + "learning_rate": 0.0009803997846638537, + "loss": 1.6992, + "step": 1287 + }, + { + "epoch": 0.11685719470150607, + "grad_norm": 0.1639546243844472, + "learning_rate": 0.000980359029312633, + "loss": 1.7334, + "step": 1288 + }, + { + "epoch": 0.11694792233714389, + "grad_norm": 0.1721830647011744, + "learning_rate": 0.0009803182324823391, + "loss": 1.7057, + "step": 1289 + }, + { + "epoch": 0.11703864997278171, + "grad_norm": 0.16749332546036447, + "learning_rate": 0.0009802773941764954, + "loss": 1.7124, + "step": 1290 + }, + { + "epoch": 0.11712937760841953, + "grad_norm": 0.1725722233332143, + "learning_rate": 0.0009802365143986276, + "loss": 1.6965, + "step": 1291 + }, + { + "epoch": 0.11722010524405733, + "grad_norm": 0.16079930952311292, + "learning_rate": 0.0009801955931522663, + "loss": 1.7171, + "step": 1292 + }, + { + "epoch": 0.11731083287969515, + "grad_norm": 0.1730177210506614, + "learning_rate": 0.0009801546304409445, + "loss": 1.7071, + "step": 1293 + }, + { + "epoch": 0.11740156051533297, + "grad_norm": 0.17665942167185703, + "learning_rate": 0.0009801136262681997, + "loss": 1.7388, + "step": 1294 + }, + { + "epoch": 0.11749228815097079, + "grad_norm": 0.16350102940297387, + "learning_rate": 0.0009800725806375725, + "loss": 1.709, + "step": 1295 + }, + { + "epoch": 0.1175830157866086, + "grad_norm": 0.15706991084562846, + "learning_rate": 0.0009800314935526073, + "loss": 1.6667, + "step": 1296 + }, + { + "epoch": 0.11767374342224642, + "grad_norm": 0.168421978510857, + "learning_rate": 0.0009799903650168518, + "loss": 1.704, + "step": 1297 + }, + { + "epoch": 0.11776447105788423, + "grad_norm": 0.16418038668451274, + "learning_rate": 0.0009799491950338577, + "loss": 1.7355, + "step": 1298 + }, + { + "epoch": 0.11785519869352205, + "grad_norm": 0.1726801559199094, + "learning_rate": 0.0009799079836071796, + "loss": 1.7427, + "step": 1299 + }, + { + "epoch": 0.11794592632915986, + "grad_norm": 0.16092371714975456, + "learning_rate": 0.0009798667307403767, + "loss": 1.7011, + "step": 1300 + }, + { + "epoch": 0.11803665396479768, + "grad_norm": 0.1681284635354932, + "learning_rate": 0.0009798254364370106, + "loss": 1.7077, + "step": 1301 + }, + { + "epoch": 0.1181273816004355, + "grad_norm": 0.17243698945729172, + "learning_rate": 0.0009797841007006473, + "loss": 1.7218, + "step": 1302 + }, + { + "epoch": 0.1182181092360733, + "grad_norm": 0.17521636651697575, + "learning_rate": 0.0009797427235348564, + "loss": 1.726, + "step": 1303 + }, + { + "epoch": 0.11830883687171112, + "grad_norm": 0.163768177480489, + "learning_rate": 0.0009797013049432106, + "loss": 1.7513, + "step": 1304 + }, + { + "epoch": 0.11839956450734894, + "grad_norm": 0.16398025460442012, + "learning_rate": 0.0009796598449292862, + "loss": 1.6883, + "step": 1305 + }, + { + "epoch": 0.11849029214298676, + "grad_norm": 0.16409932726293275, + "learning_rate": 0.0009796183434966636, + "loss": 1.6903, + "step": 1306 + }, + { + "epoch": 0.11858101977862456, + "grad_norm": 0.17192319083285695, + "learning_rate": 0.0009795768006489265, + "loss": 1.7598, + "step": 1307 + }, + { + "epoch": 0.11867174741426238, + "grad_norm": 0.16048067950152728, + "learning_rate": 0.0009795352163896617, + "loss": 1.6953, + "step": 1308 + }, + { + "epoch": 0.1187624750499002, + "grad_norm": 0.1710916469913902, + "learning_rate": 0.0009794935907224606, + "loss": 1.734, + "step": 1309 + }, + { + "epoch": 0.11885320268553802, + "grad_norm": 0.17231722782778508, + "learning_rate": 0.0009794519236509171, + "loss": 1.7411, + "step": 1310 + }, + { + "epoch": 0.11894393032117583, + "grad_norm": 0.17756864375591233, + "learning_rate": 0.0009794102151786295, + "loss": 1.705, + "step": 1311 + }, + { + "epoch": 0.11903465795681364, + "grad_norm": 0.1729665386993865, + "learning_rate": 0.000979368465309199, + "loss": 1.6971, + "step": 1312 + }, + { + "epoch": 0.11912538559245146, + "grad_norm": 0.16734737404097536, + "learning_rate": 0.000979326674046231, + "loss": 1.7409, + "step": 1313 + }, + { + "epoch": 0.11921611322808928, + "grad_norm": 0.16733233112084747, + "learning_rate": 0.0009792848413933339, + "loss": 1.7052, + "step": 1314 + }, + { + "epoch": 0.11930684086372709, + "grad_norm": 0.16415805002402167, + "learning_rate": 0.0009792429673541202, + "loss": 1.717, + "step": 1315 + }, + { + "epoch": 0.1193975684993649, + "grad_norm": 0.16662725019395558, + "learning_rate": 0.0009792010519322054, + "loss": 1.6738, + "step": 1316 + }, + { + "epoch": 0.11948829613500273, + "grad_norm": 0.16328170405544845, + "learning_rate": 0.0009791590951312094, + "loss": 1.7037, + "step": 1317 + }, + { + "epoch": 0.11957902377064054, + "grad_norm": 0.16693928258534835, + "learning_rate": 0.0009791170969547548, + "loss": 1.6878, + "step": 1318 + }, + { + "epoch": 0.11966975140627835, + "grad_norm": 0.16622122407891127, + "learning_rate": 0.0009790750574064685, + "loss": 1.7468, + "step": 1319 + }, + { + "epoch": 0.11976047904191617, + "grad_norm": 0.17348017927501044, + "learning_rate": 0.00097903297648998, + "loss": 1.7086, + "step": 1320 + }, + { + "epoch": 0.11985120667755399, + "grad_norm": 0.16668353459495724, + "learning_rate": 0.0009789908542089235, + "loss": 1.723, + "step": 1321 + }, + { + "epoch": 0.11994193431319179, + "grad_norm": 0.16650936189906979, + "learning_rate": 0.0009789486905669362, + "loss": 1.6786, + "step": 1322 + }, + { + "epoch": 0.12003266194882961, + "grad_norm": 0.16521461612273033, + "learning_rate": 0.000978906485567659, + "loss": 1.7263, + "step": 1323 + }, + { + "epoch": 0.12012338958446743, + "grad_norm": 0.1619886305126759, + "learning_rate": 0.0009788642392147362, + "loss": 1.6627, + "step": 1324 + }, + { + "epoch": 0.12021411722010525, + "grad_norm": 0.16939225478706263, + "learning_rate": 0.0009788219515118155, + "loss": 1.7186, + "step": 1325 + }, + { + "epoch": 0.12030484485574305, + "grad_norm": 0.17620922396269553, + "learning_rate": 0.0009787796224625489, + "loss": 1.7431, + "step": 1326 + }, + { + "epoch": 0.12039557249138087, + "grad_norm": 0.16104795162302576, + "learning_rate": 0.0009787372520705913, + "loss": 1.7166, + "step": 1327 + }, + { + "epoch": 0.12048630012701869, + "grad_norm": 0.16836018545908663, + "learning_rate": 0.0009786948403396015, + "loss": 1.7014, + "step": 1328 + }, + { + "epoch": 0.12057702776265651, + "grad_norm": 0.1652676380075769, + "learning_rate": 0.0009786523872732417, + "loss": 1.7289, + "step": 1329 + }, + { + "epoch": 0.12066775539829432, + "grad_norm": 0.16320511888949862, + "learning_rate": 0.0009786098928751775, + "loss": 1.7038, + "step": 1330 + }, + { + "epoch": 0.12075848303393213, + "grad_norm": 0.16162613271333615, + "learning_rate": 0.0009785673571490786, + "loss": 1.7224, + "step": 1331 + }, + { + "epoch": 0.12084921066956995, + "grad_norm": 0.16503523301835268, + "learning_rate": 0.0009785247800986178, + "loss": 1.724, + "step": 1332 + }, + { + "epoch": 0.12093993830520777, + "grad_norm": 0.16312807207250835, + "learning_rate": 0.000978482161727472, + "loss": 1.743, + "step": 1333 + }, + { + "epoch": 0.12103066594084558, + "grad_norm": 0.16592148028646148, + "learning_rate": 0.0009784395020393207, + "loss": 1.7211, + "step": 1334 + }, + { + "epoch": 0.1211213935764834, + "grad_norm": 0.16988654043780838, + "learning_rate": 0.0009783968010378481, + "loss": 1.731, + "step": 1335 + }, + { + "epoch": 0.12121212121212122, + "grad_norm": 0.17331667598326883, + "learning_rate": 0.0009783540587267412, + "loss": 1.7394, + "step": 1336 + }, + { + "epoch": 0.12130284884775902, + "grad_norm": 0.1589100185921072, + "learning_rate": 0.0009783112751096906, + "loss": 1.6987, + "step": 1337 + }, + { + "epoch": 0.12139357648339684, + "grad_norm": 0.16993903278920136, + "learning_rate": 0.000978268450190391, + "loss": 1.7103, + "step": 1338 + }, + { + "epoch": 0.12148430411903466, + "grad_norm": 0.16886062651454906, + "learning_rate": 0.0009782255839725406, + "loss": 1.723, + "step": 1339 + }, + { + "epoch": 0.12157503175467248, + "grad_norm": 0.1704036965199548, + "learning_rate": 0.0009781826764598401, + "loss": 1.7323, + "step": 1340 + }, + { + "epoch": 0.12166575939031028, + "grad_norm": 0.1655476977225724, + "learning_rate": 0.0009781397276559952, + "loss": 1.6995, + "step": 1341 + }, + { + "epoch": 0.1217564870259481, + "grad_norm": 0.160896046155488, + "learning_rate": 0.000978096737564714, + "loss": 1.7295, + "step": 1342 + }, + { + "epoch": 0.12184721466158592, + "grad_norm": 0.16887779846066908, + "learning_rate": 0.0009780537061897096, + "loss": 1.7121, + "step": 1343 + }, + { + "epoch": 0.12193794229722374, + "grad_norm": 0.16444499733852871, + "learning_rate": 0.0009780106335346968, + "loss": 1.7041, + "step": 1344 + }, + { + "epoch": 0.12202866993286154, + "grad_norm": 0.17198863093310304, + "learning_rate": 0.0009779675196033956, + "loss": 1.7251, + "step": 1345 + }, + { + "epoch": 0.12211939756849936, + "grad_norm": 0.16341748665434985, + "learning_rate": 0.0009779243643995286, + "loss": 1.7212, + "step": 1346 + }, + { + "epoch": 0.12221012520413718, + "grad_norm": 0.1656862595400008, + "learning_rate": 0.0009778811679268223, + "loss": 1.7193, + "step": 1347 + }, + { + "epoch": 0.122300852839775, + "grad_norm": 0.1739662828079975, + "learning_rate": 0.0009778379301890066, + "loss": 1.6956, + "step": 1348 + }, + { + "epoch": 0.1223915804754128, + "grad_norm": 0.1668495585233782, + "learning_rate": 0.0009777946511898153, + "loss": 1.6828, + "step": 1349 + }, + { + "epoch": 0.12248230811105063, + "grad_norm": 0.16748021827163315, + "learning_rate": 0.0009777513309329852, + "loss": 1.7184, + "step": 1350 + }, + { + "epoch": 0.12257303574668844, + "grad_norm": 0.17479865071816908, + "learning_rate": 0.0009777079694222575, + "loss": 1.6786, + "step": 1351 + }, + { + "epoch": 0.12266376338232626, + "grad_norm": 0.1680918559432495, + "learning_rate": 0.000977664566661376, + "loss": 1.7063, + "step": 1352 + }, + { + "epoch": 0.12275449101796407, + "grad_norm": 0.16105147164559205, + "learning_rate": 0.0009776211226540893, + "loss": 1.7179, + "step": 1353 + }, + { + "epoch": 0.12284521865360189, + "grad_norm": 0.16452619701070076, + "learning_rate": 0.0009775776374041478, + "loss": 1.7067, + "step": 1354 + }, + { + "epoch": 0.1229359462892397, + "grad_norm": 0.17526506924580884, + "learning_rate": 0.000977534110915307, + "loss": 1.743, + "step": 1355 + }, + { + "epoch": 0.12302667392487751, + "grad_norm": 0.1595904097653898, + "learning_rate": 0.0009774905431913254, + "loss": 1.7111, + "step": 1356 + }, + { + "epoch": 0.12311740156051533, + "grad_norm": 0.16002708998654838, + "learning_rate": 0.0009774469342359652, + "loss": 1.7306, + "step": 1357 + }, + { + "epoch": 0.12320812919615315, + "grad_norm": 0.1686789825151307, + "learning_rate": 0.0009774032840529916, + "loss": 1.6755, + "step": 1358 + }, + { + "epoch": 0.12329885683179097, + "grad_norm": 0.17229710994799283, + "learning_rate": 0.000977359592646174, + "loss": 1.6612, + "step": 1359 + }, + { + "epoch": 0.12338958446742877, + "grad_norm": 0.15858842647226903, + "learning_rate": 0.0009773158600192855, + "loss": 1.7042, + "step": 1360 + }, + { + "epoch": 0.12348031210306659, + "grad_norm": 0.16052398466591336, + "learning_rate": 0.000977272086176102, + "loss": 1.722, + "step": 1361 + }, + { + "epoch": 0.12357103973870441, + "grad_norm": 0.15907984619203985, + "learning_rate": 0.0009772282711204034, + "loss": 1.7194, + "step": 1362 + }, + { + "epoch": 0.12366176737434223, + "grad_norm": 0.1708088628022477, + "learning_rate": 0.0009771844148559734, + "loss": 1.7518, + "step": 1363 + }, + { + "epoch": 0.12375249500998003, + "grad_norm": 0.159348810863419, + "learning_rate": 0.0009771405173865989, + "loss": 1.704, + "step": 1364 + }, + { + "epoch": 0.12384322264561785, + "grad_norm": 0.1631924731381693, + "learning_rate": 0.00097709657871607, + "loss": 1.6857, + "step": 1365 + }, + { + "epoch": 0.12393395028125567, + "grad_norm": 0.17093931868162693, + "learning_rate": 0.0009770525988481817, + "loss": 1.7538, + "step": 1366 + }, + { + "epoch": 0.12402467791689349, + "grad_norm": 0.17047159452682703, + "learning_rate": 0.000977008577786731, + "loss": 1.7819, + "step": 1367 + }, + { + "epoch": 0.1241154055525313, + "grad_norm": 0.1501437590826976, + "learning_rate": 0.0009769645155355193, + "loss": 1.6944, + "step": 1368 + }, + { + "epoch": 0.12420613318816912, + "grad_norm": 0.1652680340470558, + "learning_rate": 0.0009769204120983516, + "loss": 1.7068, + "step": 1369 + }, + { + "epoch": 0.12429686082380693, + "grad_norm": 0.1619325961934043, + "learning_rate": 0.0009768762674790357, + "loss": 1.7337, + "step": 1370 + }, + { + "epoch": 0.12438758845944475, + "grad_norm": 0.15754647863669222, + "learning_rate": 0.0009768320816813838, + "loss": 1.683, + "step": 1371 + }, + { + "epoch": 0.12447831609508256, + "grad_norm": 0.16866171019223397, + "learning_rate": 0.0009767878547092114, + "loss": 1.7543, + "step": 1372 + }, + { + "epoch": 0.12456904373072038, + "grad_norm": 0.15960344851919506, + "learning_rate": 0.0009767435865663376, + "loss": 1.7285, + "step": 1373 + }, + { + "epoch": 0.1246597713663582, + "grad_norm": 0.16089967086712406, + "learning_rate": 0.000976699277256585, + "loss": 1.7036, + "step": 1374 + }, + { + "epoch": 0.124750499001996, + "grad_norm": 0.16086226803858036, + "learning_rate": 0.0009766549267837793, + "loss": 1.7142, + "step": 1375 + }, + { + "epoch": 0.12484122663763382, + "grad_norm": 0.1603449600962552, + "learning_rate": 0.0009766105351517505, + "loss": 1.7253, + "step": 1376 + }, + { + "epoch": 0.12493195427327164, + "grad_norm": 0.15895391101873596, + "learning_rate": 0.0009765661023643317, + "loss": 1.6474, + "step": 1377 + }, + { + "epoch": 0.12502268190890944, + "grad_norm": 0.16959722283670656, + "learning_rate": 0.0009765216284253598, + "loss": 1.7298, + "step": 1378 + }, + { + "epoch": 0.12511340954454728, + "grad_norm": 0.1613883040561515, + "learning_rate": 0.0009764771133386749, + "loss": 1.6769, + "step": 1379 + }, + { + "epoch": 0.12520413718018508, + "grad_norm": 0.1729503923242712, + "learning_rate": 0.0009764325571081212, + "loss": 1.7207, + "step": 1380 + }, + { + "epoch": 0.1252948648158229, + "grad_norm": 0.17499749417740101, + "learning_rate": 0.000976387959737546, + "loss": 1.723, + "step": 1381 + }, + { + "epoch": 0.12538559245146072, + "grad_norm": 0.16458824585241214, + "learning_rate": 0.0009763433212308001, + "loss": 1.6939, + "step": 1382 + }, + { + "epoch": 0.12547632008709853, + "grad_norm": 0.1590601090664154, + "learning_rate": 0.0009762986415917383, + "loss": 1.679, + "step": 1383 + }, + { + "epoch": 0.12556704772273636, + "grad_norm": 0.16334344850690477, + "learning_rate": 0.0009762539208242185, + "loss": 1.6901, + "step": 1384 + }, + { + "epoch": 0.12565777535837416, + "grad_norm": 0.15840740223575733, + "learning_rate": 0.0009762091589321025, + "loss": 1.7208, + "step": 1385 + }, + { + "epoch": 0.12574850299401197, + "grad_norm": 0.16612322061018228, + "learning_rate": 0.0009761643559192556, + "loss": 1.6851, + "step": 1386 + }, + { + "epoch": 0.1258392306296498, + "grad_norm": 0.16611556947925427, + "learning_rate": 0.0009761195117895462, + "loss": 1.6962, + "step": 1387 + }, + { + "epoch": 0.1259299582652876, + "grad_norm": 0.17171027733220212, + "learning_rate": 0.0009760746265468468, + "loss": 1.6727, + "step": 1388 + }, + { + "epoch": 0.1260206859009254, + "grad_norm": 0.16159706113644154, + "learning_rate": 0.0009760297001950334, + "loss": 1.6751, + "step": 1389 + }, + { + "epoch": 0.12611141353656324, + "grad_norm": 0.17057434554489448, + "learning_rate": 0.0009759847327379849, + "loss": 1.7008, + "step": 1390 + }, + { + "epoch": 0.12620214117220105, + "grad_norm": 0.1613632253674611, + "learning_rate": 0.0009759397241795849, + "loss": 1.7327, + "step": 1391 + }, + { + "epoch": 0.12629286880783885, + "grad_norm": 0.15968931566294892, + "learning_rate": 0.0009758946745237194, + "loss": 1.7472, + "step": 1392 + }, + { + "epoch": 0.1263835964434767, + "grad_norm": 0.1546905231775517, + "learning_rate": 0.0009758495837742787, + "loss": 1.6984, + "step": 1393 + }, + { + "epoch": 0.1264743240791145, + "grad_norm": 0.16880800283295722, + "learning_rate": 0.0009758044519351562, + "loss": 1.7371, + "step": 1394 + }, + { + "epoch": 0.12656505171475232, + "grad_norm": 0.16337869276725753, + "learning_rate": 0.0009757592790102492, + "loss": 1.7122, + "step": 1395 + }, + { + "epoch": 0.12665577935039013, + "grad_norm": 0.15904884422094828, + "learning_rate": 0.0009757140650034584, + "loss": 1.7309, + "step": 1396 + }, + { + "epoch": 0.12674650698602793, + "grad_norm": 0.16028725400921887, + "learning_rate": 0.0009756688099186878, + "loss": 1.7235, + "step": 1397 + }, + { + "epoch": 0.12683723462166577, + "grad_norm": 0.16089783551492948, + "learning_rate": 0.0009756235137598457, + "loss": 1.7316, + "step": 1398 + }, + { + "epoch": 0.12692796225730357, + "grad_norm": 0.16473647216925502, + "learning_rate": 0.0009755781765308428, + "loss": 1.715, + "step": 1399 + }, + { + "epoch": 0.12701868989294138, + "grad_norm": 0.15977520796505884, + "learning_rate": 0.0009755327982355944, + "loss": 1.6448, + "step": 1400 + }, + { + "epoch": 0.1271094175285792, + "grad_norm": 0.1588179545342757, + "learning_rate": 0.0009754873788780186, + "loss": 1.6873, + "step": 1401 + }, + { + "epoch": 0.12720014516421702, + "grad_norm": 0.15197301309862743, + "learning_rate": 0.0009754419184620378, + "loss": 1.6637, + "step": 1402 + }, + { + "epoch": 0.12729087279985485, + "grad_norm": 0.1557665700521498, + "learning_rate": 0.000975396416991577, + "loss": 1.6734, + "step": 1403 + }, + { + "epoch": 0.12738160043549265, + "grad_norm": 0.16687166576833273, + "learning_rate": 0.0009753508744705657, + "loss": 1.7342, + "step": 1404 + }, + { + "epoch": 0.12747232807113046, + "grad_norm": 0.16895995136088865, + "learning_rate": 0.0009753052909029363, + "loss": 1.7153, + "step": 1405 + }, + { + "epoch": 0.1275630557067683, + "grad_norm": 0.1617057076002056, + "learning_rate": 0.0009752596662926249, + "loss": 1.7154, + "step": 1406 + }, + { + "epoch": 0.1276537833424061, + "grad_norm": 0.158125851199102, + "learning_rate": 0.0009752140006435714, + "loss": 1.6949, + "step": 1407 + }, + { + "epoch": 0.1277445109780439, + "grad_norm": 0.15409688931451657, + "learning_rate": 0.000975168293959719, + "loss": 1.7205, + "step": 1408 + }, + { + "epoch": 0.12783523861368173, + "grad_norm": 0.1713817569210781, + "learning_rate": 0.0009751225462450142, + "loss": 1.6897, + "step": 1409 + }, + { + "epoch": 0.12792596624931954, + "grad_norm": 0.16305117794575047, + "learning_rate": 0.0009750767575034075, + "loss": 1.7443, + "step": 1410 + }, + { + "epoch": 0.12801669388495734, + "grad_norm": 0.161738787079145, + "learning_rate": 0.0009750309277388529, + "loss": 1.7232, + "step": 1411 + }, + { + "epoch": 0.12810742152059518, + "grad_norm": 0.16185154840039714, + "learning_rate": 0.0009749850569553076, + "loss": 1.6826, + "step": 1412 + }, + { + "epoch": 0.12819814915623298, + "grad_norm": 0.16713432272383058, + "learning_rate": 0.0009749391451567325, + "loss": 1.72, + "step": 1413 + }, + { + "epoch": 0.12828887679187082, + "grad_norm": 0.16041398503179624, + "learning_rate": 0.0009748931923470923, + "loss": 1.7125, + "step": 1414 + }, + { + "epoch": 0.12837960442750862, + "grad_norm": 0.16772622891889402, + "learning_rate": 0.0009748471985303551, + "loss": 1.707, + "step": 1415 + }, + { + "epoch": 0.12847033206314643, + "grad_norm": 0.16497943230730466, + "learning_rate": 0.0009748011637104921, + "loss": 1.6799, + "step": 1416 + }, + { + "epoch": 0.12856105969878426, + "grad_norm": 0.16177325143827448, + "learning_rate": 0.0009747550878914788, + "loss": 1.7107, + "step": 1417 + }, + { + "epoch": 0.12865178733442206, + "grad_norm": 0.16787777509673918, + "learning_rate": 0.0009747089710772936, + "loss": 1.7407, + "step": 1418 + }, + { + "epoch": 0.12874251497005987, + "grad_norm": 0.1598205624432169, + "learning_rate": 0.0009746628132719188, + "loss": 1.7231, + "step": 1419 + }, + { + "epoch": 0.1288332426056977, + "grad_norm": 0.17359064290723883, + "learning_rate": 0.0009746166144793401, + "loss": 1.731, + "step": 1420 + }, + { + "epoch": 0.1289239702413355, + "grad_norm": 0.1682375888097794, + "learning_rate": 0.0009745703747035469, + "loss": 1.7532, + "step": 1421 + }, + { + "epoch": 0.12901469787697334, + "grad_norm": 0.16880994488199103, + "learning_rate": 0.0009745240939485318, + "loss": 1.6982, + "step": 1422 + }, + { + "epoch": 0.12910542551261114, + "grad_norm": 0.15539366112109046, + "learning_rate": 0.0009744777722182912, + "loss": 1.7008, + "step": 1423 + }, + { + "epoch": 0.12919615314824895, + "grad_norm": 0.1668259120996251, + "learning_rate": 0.0009744314095168252, + "loss": 1.7019, + "step": 1424 + }, + { + "epoch": 0.12928688078388678, + "grad_norm": 0.16014643871929546, + "learning_rate": 0.0009743850058481369, + "loss": 1.7243, + "step": 1425 + }, + { + "epoch": 0.1293776084195246, + "grad_norm": 0.1749122671711764, + "learning_rate": 0.0009743385612162335, + "loss": 1.7081, + "step": 1426 + }, + { + "epoch": 0.1294683360551624, + "grad_norm": 0.16951567053549402, + "learning_rate": 0.0009742920756251255, + "loss": 1.7117, + "step": 1427 + }, + { + "epoch": 0.12955906369080022, + "grad_norm": 0.16321288302546952, + "learning_rate": 0.0009742455490788267, + "loss": 1.6978, + "step": 1428 + }, + { + "epoch": 0.12964979132643803, + "grad_norm": 0.17452697516294302, + "learning_rate": 0.0009741989815813551, + "loss": 1.6751, + "step": 1429 + }, + { + "epoch": 0.12974051896207583, + "grad_norm": 0.16314616629745565, + "learning_rate": 0.0009741523731367312, + "loss": 1.7185, + "step": 1430 + }, + { + "epoch": 0.12983124659771367, + "grad_norm": 0.1620540701133212, + "learning_rate": 0.0009741057237489802, + "loss": 1.6994, + "step": 1431 + }, + { + "epoch": 0.12992197423335147, + "grad_norm": 0.17166980853891245, + "learning_rate": 0.00097405903342213, + "loss": 1.6994, + "step": 1432 + }, + { + "epoch": 0.1300127018689893, + "grad_norm": 0.15812130496850063, + "learning_rate": 0.0009740123021602126, + "loss": 1.6823, + "step": 1433 + }, + { + "epoch": 0.1301034295046271, + "grad_norm": 0.17140983301304138, + "learning_rate": 0.0009739655299672628, + "loss": 1.692, + "step": 1434 + }, + { + "epoch": 0.13019415714026492, + "grad_norm": 0.16707080799321564, + "learning_rate": 0.0009739187168473198, + "loss": 1.6944, + "step": 1435 + }, + { + "epoch": 0.13028488477590275, + "grad_norm": 0.15335776727672223, + "learning_rate": 0.0009738718628044256, + "loss": 1.6783, + "step": 1436 + }, + { + "epoch": 0.13037561241154055, + "grad_norm": 0.1620818787297382, + "learning_rate": 0.0009738249678426263, + "loss": 1.7349, + "step": 1437 + }, + { + "epoch": 0.13046634004717836, + "grad_norm": 0.15844991630393374, + "learning_rate": 0.0009737780319659712, + "loss": 1.6921, + "step": 1438 + }, + { + "epoch": 0.1305570676828162, + "grad_norm": 0.16023531061064286, + "learning_rate": 0.0009737310551785133, + "loss": 1.6756, + "step": 1439 + }, + { + "epoch": 0.130647795318454, + "grad_norm": 0.16614445301295513, + "learning_rate": 0.0009736840374843088, + "loss": 1.743, + "step": 1440 + }, + { + "epoch": 0.13073852295409183, + "grad_norm": 0.16410693713464422, + "learning_rate": 0.0009736369788874178, + "loss": 1.7414, + "step": 1441 + }, + { + "epoch": 0.13082925058972963, + "grad_norm": 0.15947193712290123, + "learning_rate": 0.000973589879391904, + "loss": 1.6528, + "step": 1442 + }, + { + "epoch": 0.13091997822536744, + "grad_norm": 0.1564481090074711, + "learning_rate": 0.0009735427390018343, + "loss": 1.6846, + "step": 1443 + }, + { + "epoch": 0.13101070586100527, + "grad_norm": 0.16042985370831064, + "learning_rate": 0.0009734955577212793, + "loss": 1.6854, + "step": 1444 + }, + { + "epoch": 0.13110143349664308, + "grad_norm": 0.15513192470099305, + "learning_rate": 0.0009734483355543131, + "loss": 1.7032, + "step": 1445 + }, + { + "epoch": 0.13119216113228088, + "grad_norm": 0.14907978812514314, + "learning_rate": 0.0009734010725050133, + "loss": 1.6838, + "step": 1446 + }, + { + "epoch": 0.13128288876791872, + "grad_norm": 0.15464509714277358, + "learning_rate": 0.0009733537685774612, + "loss": 1.6809, + "step": 1447 + }, + { + "epoch": 0.13137361640355652, + "grad_norm": 0.16256418445905976, + "learning_rate": 0.0009733064237757413, + "loss": 1.7307, + "step": 1448 + }, + { + "epoch": 0.13146434403919433, + "grad_norm": 0.16118255897942702, + "learning_rate": 0.000973259038103942, + "loss": 1.7232, + "step": 1449 + }, + { + "epoch": 0.13155507167483216, + "grad_norm": 0.16423804924399107, + "learning_rate": 0.000973211611566155, + "loss": 1.717, + "step": 1450 + }, + { + "epoch": 0.13164579931046996, + "grad_norm": 0.155504018274602, + "learning_rate": 0.0009731641441664756, + "loss": 1.6321, + "step": 1451 + }, + { + "epoch": 0.1317365269461078, + "grad_norm": 0.16278249106896878, + "learning_rate": 0.0009731166359090026, + "loss": 1.7092, + "step": 1452 + }, + { + "epoch": 0.1318272545817456, + "grad_norm": 0.16642646880005452, + "learning_rate": 0.0009730690867978385, + "loss": 1.7185, + "step": 1453 + }, + { + "epoch": 0.1319179822173834, + "grad_norm": 0.16099900854621363, + "learning_rate": 0.000973021496837089, + "loss": 1.6968, + "step": 1454 + }, + { + "epoch": 0.13200870985302124, + "grad_norm": 0.16061466991035425, + "learning_rate": 0.0009729738660308634, + "loss": 1.6861, + "step": 1455 + }, + { + "epoch": 0.13209943748865904, + "grad_norm": 0.1635508378486691, + "learning_rate": 0.0009729261943832748, + "loss": 1.6707, + "step": 1456 + }, + { + "epoch": 0.13219016512429685, + "grad_norm": 0.16637561604540826, + "learning_rate": 0.0009728784818984395, + "loss": 1.6831, + "step": 1457 + }, + { + "epoch": 0.13228089275993468, + "grad_norm": 0.15454332945231977, + "learning_rate": 0.0009728307285804778, + "loss": 1.6977, + "step": 1458 + }, + { + "epoch": 0.1323716203955725, + "grad_norm": 0.15467528745652862, + "learning_rate": 0.0009727829344335129, + "loss": 1.7439, + "step": 1459 + }, + { + "epoch": 0.13246234803121032, + "grad_norm": 0.1645628009921849, + "learning_rate": 0.0009727350994616719, + "loss": 1.7544, + "step": 1460 + }, + { + "epoch": 0.13255307566684812, + "grad_norm": 0.16709892974797003, + "learning_rate": 0.0009726872236690856, + "loss": 1.6953, + "step": 1461 + }, + { + "epoch": 0.13264380330248593, + "grad_norm": 0.16033885378958718, + "learning_rate": 0.0009726393070598876, + "loss": 1.6814, + "step": 1462 + }, + { + "epoch": 0.13273453093812376, + "grad_norm": 0.1646276085350471, + "learning_rate": 0.0009725913496382159, + "loss": 1.6766, + "step": 1463 + }, + { + "epoch": 0.13282525857376157, + "grad_norm": 0.1706833138323584, + "learning_rate": 0.0009725433514082115, + "loss": 1.7114, + "step": 1464 + }, + { + "epoch": 0.13291598620939937, + "grad_norm": 0.15259130196488643, + "learning_rate": 0.000972495312374019, + "loss": 1.6514, + "step": 1465 + }, + { + "epoch": 0.1330067138450372, + "grad_norm": 0.1613320657048532, + "learning_rate": 0.0009724472325397868, + "loss": 1.6885, + "step": 1466 + }, + { + "epoch": 0.133097441480675, + "grad_norm": 0.16743661938960377, + "learning_rate": 0.0009723991119096662, + "loss": 1.7554, + "step": 1467 + }, + { + "epoch": 0.13318816911631282, + "grad_norm": 0.1693015250283149, + "learning_rate": 0.0009723509504878129, + "loss": 1.7484, + "step": 1468 + }, + { + "epoch": 0.13327889675195065, + "grad_norm": 0.16524532676155612, + "learning_rate": 0.0009723027482783853, + "loss": 1.6822, + "step": 1469 + }, + { + "epoch": 0.13336962438758845, + "grad_norm": 0.17111583653432713, + "learning_rate": 0.0009722545052855457, + "loss": 1.7183, + "step": 1470 + }, + { + "epoch": 0.1334603520232263, + "grad_norm": 0.16074578423098948, + "learning_rate": 0.00097220622151346, + "loss": 1.6709, + "step": 1471 + }, + { + "epoch": 0.1335510796588641, + "grad_norm": 0.1557076450066402, + "learning_rate": 0.0009721578969662976, + "loss": 1.7058, + "step": 1472 + }, + { + "epoch": 0.1336418072945019, + "grad_norm": 0.16862838607017916, + "learning_rate": 0.0009721095316482312, + "loss": 1.7001, + "step": 1473 + }, + { + "epoch": 0.13373253493013973, + "grad_norm": 0.16464041672700627, + "learning_rate": 0.0009720611255634369, + "loss": 1.7138, + "step": 1474 + }, + { + "epoch": 0.13382326256577753, + "grad_norm": 0.15716238162633417, + "learning_rate": 0.000972012678716095, + "loss": 1.7373, + "step": 1475 + }, + { + "epoch": 0.13391399020141534, + "grad_norm": 0.1668102053103429, + "learning_rate": 0.0009719641911103888, + "loss": 1.6815, + "step": 1476 + }, + { + "epoch": 0.13400471783705317, + "grad_norm": 0.16220431046595868, + "learning_rate": 0.000971915662750505, + "loss": 1.6825, + "step": 1477 + }, + { + "epoch": 0.13409544547269098, + "grad_norm": 0.16054931946577872, + "learning_rate": 0.0009718670936406344, + "loss": 1.7127, + "step": 1478 + }, + { + "epoch": 0.1341861731083288, + "grad_norm": 0.16620110305335964, + "learning_rate": 0.0009718184837849705, + "loss": 1.7094, + "step": 1479 + }, + { + "epoch": 0.13427690074396662, + "grad_norm": 0.1477814436812061, + "learning_rate": 0.000971769833187711, + "loss": 1.694, + "step": 1480 + }, + { + "epoch": 0.13436762837960442, + "grad_norm": 0.15700408911017674, + "learning_rate": 0.0009717211418530569, + "loss": 1.6977, + "step": 1481 + }, + { + "epoch": 0.13445835601524225, + "grad_norm": 0.16132095821610068, + "learning_rate": 0.0009716724097852127, + "loss": 1.6733, + "step": 1482 + }, + { + "epoch": 0.13454908365088006, + "grad_norm": 0.16090812432423063, + "learning_rate": 0.0009716236369883864, + "loss": 1.6898, + "step": 1483 + }, + { + "epoch": 0.13463981128651786, + "grad_norm": 0.16240676541928903, + "learning_rate": 0.0009715748234667896, + "loss": 1.6739, + "step": 1484 + }, + { + "epoch": 0.1347305389221557, + "grad_norm": 0.17812180572135164, + "learning_rate": 0.0009715259692246372, + "loss": 1.6983, + "step": 1485 + }, + { + "epoch": 0.1348212665577935, + "grad_norm": 0.16445577182863635, + "learning_rate": 0.0009714770742661478, + "loss": 1.6461, + "step": 1486 + }, + { + "epoch": 0.1349119941934313, + "grad_norm": 0.16083839225610802, + "learning_rate": 0.0009714281385955437, + "loss": 1.7514, + "step": 1487 + }, + { + "epoch": 0.13500272182906914, + "grad_norm": 0.16407098934866815, + "learning_rate": 0.0009713791622170502, + "loss": 1.6747, + "step": 1488 + }, + { + "epoch": 0.13509344946470694, + "grad_norm": 0.15817174053629984, + "learning_rate": 0.0009713301451348968, + "loss": 1.7205, + "step": 1489 + }, + { + "epoch": 0.13518417710034478, + "grad_norm": 0.15714560975177802, + "learning_rate": 0.0009712810873533158, + "loss": 1.689, + "step": 1490 + }, + { + "epoch": 0.13527490473598258, + "grad_norm": 0.17193997038330489, + "learning_rate": 0.0009712319888765433, + "loss": 1.7088, + "step": 1491 + }, + { + "epoch": 0.1353656323716204, + "grad_norm": 0.1598551969914748, + "learning_rate": 0.0009711828497088192, + "loss": 1.7003, + "step": 1492 + }, + { + "epoch": 0.13545636000725822, + "grad_norm": 0.14760616027840193, + "learning_rate": 0.0009711336698543867, + "loss": 1.7136, + "step": 1493 + }, + { + "epoch": 0.13554708764289602, + "grad_norm": 0.16495338028328274, + "learning_rate": 0.0009710844493174922, + "loss": 1.7071, + "step": 1494 + }, + { + "epoch": 0.13563781527853383, + "grad_norm": 0.15744706616729837, + "learning_rate": 0.0009710351881023861, + "loss": 1.7076, + "step": 1495 + }, + { + "epoch": 0.13572854291417166, + "grad_norm": 0.15975810510777944, + "learning_rate": 0.0009709858862133221, + "loss": 1.7009, + "step": 1496 + }, + { + "epoch": 0.13581927054980947, + "grad_norm": 0.17617994845528057, + "learning_rate": 0.0009709365436545574, + "loss": 1.7297, + "step": 1497 + }, + { + "epoch": 0.1359099981854473, + "grad_norm": 0.15450279343362971, + "learning_rate": 0.0009708871604303528, + "loss": 1.7045, + "step": 1498 + }, + { + "epoch": 0.1360007258210851, + "grad_norm": 0.16403755948759796, + "learning_rate": 0.0009708377365449726, + "loss": 1.7188, + "step": 1499 + }, + { + "epoch": 0.1360914534567229, + "grad_norm": 0.15249269331723825, + "learning_rate": 0.0009707882720026841, + "loss": 1.6865, + "step": 1500 + }, + { + "epoch": 0.13618218109236074, + "grad_norm": 0.15277339401336945, + "learning_rate": 0.0009707387668077592, + "loss": 1.6884, + "step": 1501 + }, + { + "epoch": 0.13627290872799855, + "grad_norm": 0.16104722697628576, + "learning_rate": 0.0009706892209644725, + "loss": 1.7361, + "step": 1502 + }, + { + "epoch": 0.13636363636363635, + "grad_norm": 0.15690549726739578, + "learning_rate": 0.0009706396344771021, + "loss": 1.6929, + "step": 1503 + }, + { + "epoch": 0.1364543639992742, + "grad_norm": 0.15676719306184908, + "learning_rate": 0.0009705900073499296, + "loss": 1.7185, + "step": 1504 + }, + { + "epoch": 0.136545091634912, + "grad_norm": 0.14738956806702386, + "learning_rate": 0.000970540339587241, + "loss": 1.6802, + "step": 1505 + }, + { + "epoch": 0.1366358192705498, + "grad_norm": 0.1559108587347215, + "learning_rate": 0.0009704906311933246, + "loss": 1.7127, + "step": 1506 + }, + { + "epoch": 0.13672654690618763, + "grad_norm": 0.1648409493075795, + "learning_rate": 0.0009704408821724728, + "loss": 1.7319, + "step": 1507 + }, + { + "epoch": 0.13681727454182543, + "grad_norm": 0.15670435729281124, + "learning_rate": 0.0009703910925289815, + "loss": 1.7254, + "step": 1508 + }, + { + "epoch": 0.13690800217746327, + "grad_norm": 0.1496833635577137, + "learning_rate": 0.00097034126226715, + "loss": 1.7175, + "step": 1509 + }, + { + "epoch": 0.13699872981310107, + "grad_norm": 0.16559352056471605, + "learning_rate": 0.0009702913913912812, + "loss": 1.7059, + "step": 1510 + }, + { + "epoch": 0.13708945744873888, + "grad_norm": 0.14973414174851205, + "learning_rate": 0.0009702414799056815, + "loss": 1.7118, + "step": 1511 + }, + { + "epoch": 0.1371801850843767, + "grad_norm": 0.15451856818555917, + "learning_rate": 0.0009701915278146607, + "loss": 1.6895, + "step": 1512 + }, + { + "epoch": 0.13727091272001452, + "grad_norm": 0.15477309462628794, + "learning_rate": 0.0009701415351225322, + "loss": 1.6447, + "step": 1513 + }, + { + "epoch": 0.13736164035565232, + "grad_norm": 0.16702653529365202, + "learning_rate": 0.0009700915018336127, + "loss": 1.6711, + "step": 1514 + }, + { + "epoch": 0.13745236799129015, + "grad_norm": 0.16178700099667653, + "learning_rate": 0.000970041427952223, + "loss": 1.7111, + "step": 1515 + }, + { + "epoch": 0.13754309562692796, + "grad_norm": 0.16923872105219132, + "learning_rate": 0.0009699913134826865, + "loss": 1.7363, + "step": 1516 + }, + { + "epoch": 0.1376338232625658, + "grad_norm": 0.15503282462604695, + "learning_rate": 0.0009699411584293308, + "loss": 1.7173, + "step": 1517 + }, + { + "epoch": 0.1377245508982036, + "grad_norm": 0.16137823496778994, + "learning_rate": 0.0009698909627964869, + "loss": 1.705, + "step": 1518 + }, + { + "epoch": 0.1378152785338414, + "grad_norm": 0.15640934479880275, + "learning_rate": 0.000969840726588489, + "loss": 1.679, + "step": 1519 + }, + { + "epoch": 0.13790600616947923, + "grad_norm": 0.15504919955055288, + "learning_rate": 0.0009697904498096752, + "loss": 1.7303, + "step": 1520 + }, + { + "epoch": 0.13799673380511704, + "grad_norm": 0.15360789730896704, + "learning_rate": 0.0009697401324643869, + "loss": 1.7313, + "step": 1521 + }, + { + "epoch": 0.13808746144075484, + "grad_norm": 0.15721588803774617, + "learning_rate": 0.0009696897745569688, + "loss": 1.6942, + "step": 1522 + }, + { + "epoch": 0.13817818907639268, + "grad_norm": 0.16241483562425046, + "learning_rate": 0.0009696393760917696, + "loss": 1.6883, + "step": 1523 + }, + { + "epoch": 0.13826891671203048, + "grad_norm": 0.15838330845034945, + "learning_rate": 0.0009695889370731409, + "loss": 1.6662, + "step": 1524 + }, + { + "epoch": 0.1383596443476683, + "grad_norm": 0.15605273483966497, + "learning_rate": 0.0009695384575054382, + "loss": 1.6903, + "step": 1525 + }, + { + "epoch": 0.13845037198330612, + "grad_norm": 0.16001614676469594, + "learning_rate": 0.0009694879373930207, + "loss": 1.6838, + "step": 1526 + }, + { + "epoch": 0.13854109961894392, + "grad_norm": 0.15685742578916945, + "learning_rate": 0.0009694373767402504, + "loss": 1.6838, + "step": 1527 + }, + { + "epoch": 0.13863182725458176, + "grad_norm": 0.15573276767850644, + "learning_rate": 0.0009693867755514937, + "loss": 1.703, + "step": 1528 + }, + { + "epoch": 0.13872255489021956, + "grad_norm": 0.1622007732841471, + "learning_rate": 0.0009693361338311195, + "loss": 1.6824, + "step": 1529 + }, + { + "epoch": 0.13881328252585737, + "grad_norm": 0.15788004350893278, + "learning_rate": 0.0009692854515835011, + "loss": 1.7302, + "step": 1530 + }, + { + "epoch": 0.1389040101614952, + "grad_norm": 0.158904283727251, + "learning_rate": 0.0009692347288130147, + "loss": 1.6951, + "step": 1531 + }, + { + "epoch": 0.138994737797133, + "grad_norm": 0.15717800606560436, + "learning_rate": 0.0009691839655240405, + "loss": 1.7096, + "step": 1532 + }, + { + "epoch": 0.1390854654327708, + "grad_norm": 0.1595585221252782, + "learning_rate": 0.0009691331617209616, + "loss": 1.6951, + "step": 1533 + }, + { + "epoch": 0.13917619306840864, + "grad_norm": 0.15383389937535225, + "learning_rate": 0.000969082317408165, + "loss": 1.7286, + "step": 1534 + }, + { + "epoch": 0.13926692070404645, + "grad_norm": 0.1531797210487208, + "learning_rate": 0.0009690314325900411, + "loss": 1.6752, + "step": 1535 + }, + { + "epoch": 0.13935764833968428, + "grad_norm": 0.15732849636281981, + "learning_rate": 0.000968980507270984, + "loss": 1.7183, + "step": 1536 + }, + { + "epoch": 0.1394483759753221, + "grad_norm": 0.15520527316943097, + "learning_rate": 0.0009689295414553909, + "loss": 1.6826, + "step": 1537 + }, + { + "epoch": 0.1395391036109599, + "grad_norm": 0.1519920916976813, + "learning_rate": 0.0009688785351476629, + "loss": 1.6812, + "step": 1538 + }, + { + "epoch": 0.13962983124659772, + "grad_norm": 0.15788570943858543, + "learning_rate": 0.0009688274883522042, + "loss": 1.7554, + "step": 1539 + }, + { + "epoch": 0.13972055888223553, + "grad_norm": 0.15343868874928648, + "learning_rate": 0.0009687764010734228, + "loss": 1.6657, + "step": 1540 + }, + { + "epoch": 0.13981128651787333, + "grad_norm": 0.15749903281499206, + "learning_rate": 0.0009687252733157301, + "loss": 1.7031, + "step": 1541 + }, + { + "epoch": 0.13990201415351117, + "grad_norm": 0.15587204163976964, + "learning_rate": 0.0009686741050835408, + "loss": 1.748, + "step": 1542 + }, + { + "epoch": 0.13999274178914897, + "grad_norm": 0.15178048180870435, + "learning_rate": 0.0009686228963812736, + "loss": 1.7145, + "step": 1543 + }, + { + "epoch": 0.14008346942478678, + "grad_norm": 0.155320917974986, + "learning_rate": 0.0009685716472133503, + "loss": 1.6799, + "step": 1544 + }, + { + "epoch": 0.1401741970604246, + "grad_norm": 0.15343588357342672, + "learning_rate": 0.0009685203575841961, + "loss": 1.7143, + "step": 1545 + }, + { + "epoch": 0.14026492469606242, + "grad_norm": 0.14618213949052128, + "learning_rate": 0.0009684690274982399, + "loss": 1.6906, + "step": 1546 + }, + { + "epoch": 0.14035565233170025, + "grad_norm": 0.15142461676354532, + "learning_rate": 0.0009684176569599143, + "loss": 1.7411, + "step": 1547 + }, + { + "epoch": 0.14044637996733805, + "grad_norm": 0.15659652552199724, + "learning_rate": 0.0009683662459736549, + "loss": 1.7053, + "step": 1548 + }, + { + "epoch": 0.14053710760297586, + "grad_norm": 0.1609788313898246, + "learning_rate": 0.000968314794543901, + "loss": 1.6847, + "step": 1549 + }, + { + "epoch": 0.1406278352386137, + "grad_norm": 0.15914993522267473, + "learning_rate": 0.0009682633026750957, + "loss": 1.7073, + "step": 1550 + }, + { + "epoch": 0.1407185628742515, + "grad_norm": 0.15375325091768494, + "learning_rate": 0.0009682117703716852, + "loss": 1.6704, + "step": 1551 + }, + { + "epoch": 0.1408092905098893, + "grad_norm": 0.1563656409934703, + "learning_rate": 0.0009681601976381193, + "loss": 1.6906, + "step": 1552 + }, + { + "epoch": 0.14090001814552713, + "grad_norm": 0.15238885282755357, + "learning_rate": 0.0009681085844788515, + "loss": 1.7074, + "step": 1553 + }, + { + "epoch": 0.14099074578116494, + "grad_norm": 0.1538996089439555, + "learning_rate": 0.0009680569308983382, + "loss": 1.7183, + "step": 1554 + }, + { + "epoch": 0.14108147341680277, + "grad_norm": 0.15631380709499937, + "learning_rate": 0.00096800523690104, + "loss": 1.6811, + "step": 1555 + }, + { + "epoch": 0.14117220105244058, + "grad_norm": 0.151456059982163, + "learning_rate": 0.0009679535024914207, + "loss": 1.7261, + "step": 1556 + }, + { + "epoch": 0.14126292868807838, + "grad_norm": 0.15616543672181807, + "learning_rate": 0.0009679017276739474, + "loss": 1.7279, + "step": 1557 + }, + { + "epoch": 0.14135365632371621, + "grad_norm": 0.15135294694409948, + "learning_rate": 0.000967849912453091, + "loss": 1.6923, + "step": 1558 + }, + { + "epoch": 0.14144438395935402, + "grad_norm": 0.1546856675634401, + "learning_rate": 0.0009677980568333257, + "loss": 1.712, + "step": 1559 + }, + { + "epoch": 0.14153511159499182, + "grad_norm": 0.15666385674694064, + "learning_rate": 0.0009677461608191292, + "loss": 1.6637, + "step": 1560 + }, + { + "epoch": 0.14162583923062966, + "grad_norm": 0.1551039320812694, + "learning_rate": 0.0009676942244149828, + "loss": 1.6874, + "step": 1561 + }, + { + "epoch": 0.14171656686626746, + "grad_norm": 0.15485791408923213, + "learning_rate": 0.0009676422476253713, + "loss": 1.7012, + "step": 1562 + }, + { + "epoch": 0.14180729450190527, + "grad_norm": 0.16279800148434456, + "learning_rate": 0.0009675902304547826, + "loss": 1.7559, + "step": 1563 + }, + { + "epoch": 0.1418980221375431, + "grad_norm": 0.15528225536000986, + "learning_rate": 0.0009675381729077087, + "loss": 1.7031, + "step": 1564 + }, + { + "epoch": 0.1419887497731809, + "grad_norm": 0.15180542502613809, + "learning_rate": 0.0009674860749886446, + "loss": 1.7072, + "step": 1565 + }, + { + "epoch": 0.14207947740881874, + "grad_norm": 0.16280061387506214, + "learning_rate": 0.000967433936702089, + "loss": 1.6914, + "step": 1566 + }, + { + "epoch": 0.14217020504445654, + "grad_norm": 0.161949124231421, + "learning_rate": 0.0009673817580525441, + "loss": 1.6912, + "step": 1567 + }, + { + "epoch": 0.14226093268009435, + "grad_norm": 0.14995456843942498, + "learning_rate": 0.0009673295390445156, + "loss": 1.6885, + "step": 1568 + }, + { + "epoch": 0.14235166031573218, + "grad_norm": 0.15342336503951765, + "learning_rate": 0.0009672772796825124, + "loss": 1.7077, + "step": 1569 + }, + { + "epoch": 0.14244238795137, + "grad_norm": 0.16300054677624062, + "learning_rate": 0.0009672249799710474, + "loss": 1.7214, + "step": 1570 + }, + { + "epoch": 0.1425331155870078, + "grad_norm": 0.14594535201496098, + "learning_rate": 0.0009671726399146363, + "loss": 1.6764, + "step": 1571 + }, + { + "epoch": 0.14262384322264562, + "grad_norm": 0.162540404704507, + "learning_rate": 0.0009671202595177991, + "loss": 1.7193, + "step": 1572 + }, + { + "epoch": 0.14271457085828343, + "grad_norm": 0.14926725569985033, + "learning_rate": 0.0009670678387850585, + "loss": 1.6973, + "step": 1573 + }, + { + "epoch": 0.14280529849392126, + "grad_norm": 0.1516650917461977, + "learning_rate": 0.0009670153777209413, + "loss": 1.6677, + "step": 1574 + }, + { + "epoch": 0.14289602612955907, + "grad_norm": 0.14593527234704262, + "learning_rate": 0.0009669628763299774, + "loss": 1.7179, + "step": 1575 + }, + { + "epoch": 0.14298675376519687, + "grad_norm": 0.15116698739207426, + "learning_rate": 0.0009669103346167002, + "loss": 1.7041, + "step": 1576 + }, + { + "epoch": 0.1430774814008347, + "grad_norm": 0.15625191613427747, + "learning_rate": 0.0009668577525856468, + "loss": 1.6813, + "step": 1577 + }, + { + "epoch": 0.1431682090364725, + "grad_norm": 0.15380321692624488, + "learning_rate": 0.0009668051302413577, + "loss": 1.7449, + "step": 1578 + }, + { + "epoch": 0.14325893667211032, + "grad_norm": 0.15167090991634669, + "learning_rate": 0.0009667524675883767, + "loss": 1.6782, + "step": 1579 + }, + { + "epoch": 0.14334966430774815, + "grad_norm": 0.16158369601111322, + "learning_rate": 0.0009666997646312514, + "loss": 1.694, + "step": 1580 + }, + { + "epoch": 0.14344039194338595, + "grad_norm": 0.1527531809519506, + "learning_rate": 0.0009666470213745327, + "loss": 1.6917, + "step": 1581 + }, + { + "epoch": 0.14353111957902376, + "grad_norm": 0.1557583009505683, + "learning_rate": 0.000966594237822775, + "loss": 1.6765, + "step": 1582 + }, + { + "epoch": 0.1436218472146616, + "grad_norm": 0.1636299724617481, + "learning_rate": 0.000966541413980536, + "loss": 1.6827, + "step": 1583 + }, + { + "epoch": 0.1437125748502994, + "grad_norm": 0.15658877322973613, + "learning_rate": 0.000966488549852377, + "loss": 1.7239, + "step": 1584 + }, + { + "epoch": 0.14380330248593723, + "grad_norm": 0.15349787281400873, + "learning_rate": 0.0009664356454428631, + "loss": 1.6623, + "step": 1585 + }, + { + "epoch": 0.14389403012157503, + "grad_norm": 0.16162880987812017, + "learning_rate": 0.0009663827007565624, + "loss": 1.7197, + "step": 1586 + }, + { + "epoch": 0.14398475775721284, + "grad_norm": 0.15515636800635574, + "learning_rate": 0.0009663297157980468, + "loss": 1.7048, + "step": 1587 + }, + { + "epoch": 0.14407548539285067, + "grad_norm": 0.15648849321895714, + "learning_rate": 0.0009662766905718916, + "loss": 1.7601, + "step": 1588 + }, + { + "epoch": 0.14416621302848848, + "grad_norm": 0.15400279333697584, + "learning_rate": 0.0009662236250826755, + "loss": 1.7037, + "step": 1589 + }, + { + "epoch": 0.14425694066412628, + "grad_norm": 0.15423733431192885, + "learning_rate": 0.0009661705193349804, + "loss": 1.7262, + "step": 1590 + }, + { + "epoch": 0.14434766829976411, + "grad_norm": 0.15225398890784922, + "learning_rate": 0.0009661173733333925, + "loss": 1.6502, + "step": 1591 + }, + { + "epoch": 0.14443839593540192, + "grad_norm": 0.1542992926459316, + "learning_rate": 0.0009660641870825005, + "loss": 1.7085, + "step": 1592 + }, + { + "epoch": 0.14452912357103972, + "grad_norm": 0.15328417188000062, + "learning_rate": 0.0009660109605868975, + "loss": 1.7334, + "step": 1593 + }, + { + "epoch": 0.14461985120667756, + "grad_norm": 0.16355805054465528, + "learning_rate": 0.0009659576938511791, + "loss": 1.6843, + "step": 1594 + }, + { + "epoch": 0.14471057884231536, + "grad_norm": 0.15701197072667505, + "learning_rate": 0.0009659043868799454, + "loss": 1.6734, + "step": 1595 + }, + { + "epoch": 0.1448013064779532, + "grad_norm": 0.15623001137994966, + "learning_rate": 0.000965851039677799, + "loss": 1.6399, + "step": 1596 + }, + { + "epoch": 0.144892034113591, + "grad_norm": 0.1537962762946322, + "learning_rate": 0.0009657976522493468, + "loss": 1.7094, + "step": 1597 + }, + { + "epoch": 0.1449827617492288, + "grad_norm": 0.16276751502311396, + "learning_rate": 0.0009657442245991985, + "loss": 1.6764, + "step": 1598 + }, + { + "epoch": 0.14507348938486664, + "grad_norm": 0.15230750968737974, + "learning_rate": 0.000965690756731968, + "loss": 1.7312, + "step": 1599 + }, + { + "epoch": 0.14516421702050444, + "grad_norm": 0.15841217828497697, + "learning_rate": 0.0009656372486522719, + "loss": 1.7559, + "step": 1600 + }, + { + "epoch": 0.14525494465614225, + "grad_norm": 0.1605406347285732, + "learning_rate": 0.0009655837003647307, + "loss": 1.6761, + "step": 1601 + }, + { + "epoch": 0.14534567229178008, + "grad_norm": 0.15309450293290888, + "learning_rate": 0.0009655301118739686, + "loss": 1.7276, + "step": 1602 + }, + { + "epoch": 0.1454363999274179, + "grad_norm": 0.15723764241591073, + "learning_rate": 0.0009654764831846126, + "loss": 1.7178, + "step": 1603 + }, + { + "epoch": 0.14552712756305572, + "grad_norm": 0.16074812563825291, + "learning_rate": 0.0009654228143012936, + "loss": 1.7177, + "step": 1604 + }, + { + "epoch": 0.14561785519869352, + "grad_norm": 0.15172045962217853, + "learning_rate": 0.000965369105228646, + "loss": 1.6707, + "step": 1605 + }, + { + "epoch": 0.14570858283433133, + "grad_norm": 0.15623342920371425, + "learning_rate": 0.0009653153559713076, + "loss": 1.7005, + "step": 1606 + }, + { + "epoch": 0.14579931046996916, + "grad_norm": 0.15793744954466848, + "learning_rate": 0.0009652615665339196, + "loss": 1.7138, + "step": 1607 + }, + { + "epoch": 0.14589003810560697, + "grad_norm": 0.1529438272995232, + "learning_rate": 0.0009652077369211267, + "loss": 1.668, + "step": 1608 + }, + { + "epoch": 0.14598076574124477, + "grad_norm": 0.15187586295134395, + "learning_rate": 0.0009651538671375774, + "loss": 1.7152, + "step": 1609 + }, + { + "epoch": 0.1460714933768826, + "grad_norm": 0.1571671800833422, + "learning_rate": 0.0009650999571879229, + "loss": 1.7067, + "step": 1610 + }, + { + "epoch": 0.1461622210125204, + "grad_norm": 0.15604078457436624, + "learning_rate": 0.0009650460070768185, + "loss": 1.6933, + "step": 1611 + }, + { + "epoch": 0.14625294864815822, + "grad_norm": 0.15281399495803089, + "learning_rate": 0.000964992016808923, + "loss": 1.6747, + "step": 1612 + }, + { + "epoch": 0.14634367628379605, + "grad_norm": 0.14807461926904536, + "learning_rate": 0.0009649379863888983, + "loss": 1.6968, + "step": 1613 + }, + { + "epoch": 0.14643440391943385, + "grad_norm": 0.14618578551000588, + "learning_rate": 0.00096488391582141, + "loss": 1.6704, + "step": 1614 + }, + { + "epoch": 0.14652513155507169, + "grad_norm": 0.15473783873080157, + "learning_rate": 0.0009648298051111268, + "loss": 1.7085, + "step": 1615 + }, + { + "epoch": 0.1466158591907095, + "grad_norm": 0.15164204216716468, + "learning_rate": 0.0009647756542627218, + "loss": 1.7279, + "step": 1616 + }, + { + "epoch": 0.1467065868263473, + "grad_norm": 0.14508127543847346, + "learning_rate": 0.0009647214632808702, + "loss": 1.6993, + "step": 1617 + }, + { + "epoch": 0.14679731446198513, + "grad_norm": 0.14625273522802348, + "learning_rate": 0.0009646672321702519, + "loss": 1.688, + "step": 1618 + }, + { + "epoch": 0.14688804209762293, + "grad_norm": 0.14813831489395535, + "learning_rate": 0.0009646129609355497, + "loss": 1.6405, + "step": 1619 + }, + { + "epoch": 0.14697876973326074, + "grad_norm": 0.15175254728179613, + "learning_rate": 0.0009645586495814497, + "loss": 1.7032, + "step": 1620 + }, + { + "epoch": 0.14706949736889857, + "grad_norm": 0.14530184774471294, + "learning_rate": 0.0009645042981126419, + "loss": 1.6855, + "step": 1621 + }, + { + "epoch": 0.14716022500453638, + "grad_norm": 0.14941787283861388, + "learning_rate": 0.0009644499065338195, + "loss": 1.6674, + "step": 1622 + }, + { + "epoch": 0.1472509526401742, + "grad_norm": 0.15499527138876315, + "learning_rate": 0.0009643954748496793, + "loss": 1.7288, + "step": 1623 + }, + { + "epoch": 0.14734168027581201, + "grad_norm": 0.15266269353470588, + "learning_rate": 0.0009643410030649212, + "loss": 1.6954, + "step": 1624 + }, + { + "epoch": 0.14743240791144982, + "grad_norm": 0.1577346404856441, + "learning_rate": 0.0009642864911842493, + "loss": 1.6686, + "step": 1625 + }, + { + "epoch": 0.14752313554708765, + "grad_norm": 0.15643188707888087, + "learning_rate": 0.0009642319392123702, + "loss": 1.7051, + "step": 1626 + }, + { + "epoch": 0.14761386318272546, + "grad_norm": 0.15177232699257925, + "learning_rate": 0.0009641773471539949, + "loss": 1.708, + "step": 1627 + }, + { + "epoch": 0.14770459081836326, + "grad_norm": 0.15328025318194138, + "learning_rate": 0.0009641227150138372, + "loss": 1.6781, + "step": 1628 + }, + { + "epoch": 0.1477953184540011, + "grad_norm": 0.15122935119924416, + "learning_rate": 0.0009640680427966147, + "loss": 1.6909, + "step": 1629 + }, + { + "epoch": 0.1478860460896389, + "grad_norm": 0.1544751775309506, + "learning_rate": 0.0009640133305070482, + "loss": 1.7369, + "step": 1630 + }, + { + "epoch": 0.1479767737252767, + "grad_norm": 0.1544922884389043, + "learning_rate": 0.0009639585781498623, + "loss": 1.6901, + "step": 1631 + }, + { + "epoch": 0.14806750136091454, + "grad_norm": 0.15150143312835393, + "learning_rate": 0.0009639037857297847, + "loss": 1.6906, + "step": 1632 + }, + { + "epoch": 0.14815822899655234, + "grad_norm": 0.1446489000435027, + "learning_rate": 0.000963848953251547, + "loss": 1.7291, + "step": 1633 + }, + { + "epoch": 0.14824895663219018, + "grad_norm": 0.1606980865371554, + "learning_rate": 0.0009637940807198837, + "loss": 1.6881, + "step": 1634 + }, + { + "epoch": 0.14833968426782798, + "grad_norm": 0.16379672204676576, + "learning_rate": 0.0009637391681395334, + "loss": 1.6836, + "step": 1635 + }, + { + "epoch": 0.1484304119034658, + "grad_norm": 0.15446083731107932, + "learning_rate": 0.0009636842155152372, + "loss": 1.6956, + "step": 1636 + }, + { + "epoch": 0.14852113953910362, + "grad_norm": 0.16863068397776546, + "learning_rate": 0.0009636292228517409, + "loss": 1.6549, + "step": 1637 + }, + { + "epoch": 0.14861186717474142, + "grad_norm": 0.14737569363438302, + "learning_rate": 0.0009635741901537929, + "loss": 1.7185, + "step": 1638 + }, + { + "epoch": 0.14870259481037923, + "grad_norm": 0.14254120957299882, + "learning_rate": 0.0009635191174261452, + "loss": 1.7196, + "step": 1639 + }, + { + "epoch": 0.14879332244601706, + "grad_norm": 0.16318326571026834, + "learning_rate": 0.0009634640046735533, + "loss": 1.7616, + "step": 1640 + }, + { + "epoch": 0.14888405008165487, + "grad_norm": 0.16452230889141742, + "learning_rate": 0.0009634088519007764, + "loss": 1.6741, + "step": 1641 + }, + { + "epoch": 0.1489747777172927, + "grad_norm": 0.1525539422404198, + "learning_rate": 0.0009633536591125768, + "loss": 1.7333, + "step": 1642 + }, + { + "epoch": 0.1490655053529305, + "grad_norm": 0.16349985054936697, + "learning_rate": 0.0009632984263137205, + "loss": 1.6894, + "step": 1643 + }, + { + "epoch": 0.1491562329885683, + "grad_norm": 0.15838372002981804, + "learning_rate": 0.0009632431535089767, + "loss": 1.6775, + "step": 1644 + }, + { + "epoch": 0.14924696062420614, + "grad_norm": 0.15919754693847063, + "learning_rate": 0.0009631878407031183, + "loss": 1.6894, + "step": 1645 + }, + { + "epoch": 0.14933768825984395, + "grad_norm": 0.14990974248168967, + "learning_rate": 0.0009631324879009218, + "loss": 1.6935, + "step": 1646 + }, + { + "epoch": 0.14942841589548175, + "grad_norm": 0.1438218441446177, + "learning_rate": 0.0009630770951071666, + "loss": 1.667, + "step": 1647 + }, + { + "epoch": 0.14951914353111959, + "grad_norm": 0.15549632685285159, + "learning_rate": 0.0009630216623266359, + "loss": 1.7133, + "step": 1648 + }, + { + "epoch": 0.1496098711667574, + "grad_norm": 0.15111059210278113, + "learning_rate": 0.0009629661895641165, + "loss": 1.6824, + "step": 1649 + }, + { + "epoch": 0.1497005988023952, + "grad_norm": 0.14324164866830236, + "learning_rate": 0.0009629106768243983, + "loss": 1.6353, + "step": 1650 + }, + { + "epoch": 0.14979132643803303, + "grad_norm": 0.14768124792512058, + "learning_rate": 0.0009628551241122749, + "loss": 1.671, + "step": 1651 + }, + { + "epoch": 0.14988205407367083, + "grad_norm": 0.15077221438008775, + "learning_rate": 0.0009627995314325436, + "loss": 1.7195, + "step": 1652 + }, + { + "epoch": 0.14997278170930867, + "grad_norm": 0.14991691567706267, + "learning_rate": 0.0009627438987900044, + "loss": 1.7055, + "step": 1653 + }, + { + "epoch": 0.15006350934494647, + "grad_norm": 0.1573863050166732, + "learning_rate": 0.0009626882261894612, + "loss": 1.6407, + "step": 1654 + }, + { + "epoch": 0.15015423698058428, + "grad_norm": 0.1491008457377192, + "learning_rate": 0.0009626325136357216, + "loss": 1.7124, + "step": 1655 + }, + { + "epoch": 0.1502449646162221, + "grad_norm": 0.14686903451023217, + "learning_rate": 0.0009625767611335963, + "loss": 1.6534, + "step": 1656 + }, + { + "epoch": 0.15033569225185991, + "grad_norm": 0.1519390604323493, + "learning_rate": 0.0009625209686878993, + "loss": 1.7304, + "step": 1657 + }, + { + "epoch": 0.15042641988749772, + "grad_norm": 0.15462477919376633, + "learning_rate": 0.0009624651363034487, + "loss": 1.7378, + "step": 1658 + }, + { + "epoch": 0.15051714752313555, + "grad_norm": 0.1476157904615709, + "learning_rate": 0.0009624092639850654, + "loss": 1.674, + "step": 1659 + }, + { + "epoch": 0.15060787515877336, + "grad_norm": 0.1475060934565435, + "learning_rate": 0.0009623533517375738, + "loss": 1.7037, + "step": 1660 + }, + { + "epoch": 0.1506986027944112, + "grad_norm": 0.1459782554972735, + "learning_rate": 0.0009622973995658024, + "loss": 1.6761, + "step": 1661 + }, + { + "epoch": 0.150789330430049, + "grad_norm": 0.14782076986543358, + "learning_rate": 0.0009622414074745823, + "loss": 1.6811, + "step": 1662 + }, + { + "epoch": 0.1508800580656868, + "grad_norm": 0.15150677207005897, + "learning_rate": 0.0009621853754687485, + "loss": 1.7181, + "step": 1663 + }, + { + "epoch": 0.15097078570132463, + "grad_norm": 0.14516512260885053, + "learning_rate": 0.0009621293035531395, + "loss": 1.7009, + "step": 1664 + }, + { + "epoch": 0.15106151333696244, + "grad_norm": 0.14793325200899493, + "learning_rate": 0.0009620731917325968, + "loss": 1.7038, + "step": 1665 + }, + { + "epoch": 0.15115224097260024, + "grad_norm": 0.14778645394815862, + "learning_rate": 0.0009620170400119661, + "loss": 1.7114, + "step": 1666 + }, + { + "epoch": 0.15124296860823808, + "grad_norm": 0.14328387364304213, + "learning_rate": 0.000961960848396096, + "loss": 1.7019, + "step": 1667 + }, + { + "epoch": 0.15133369624387588, + "grad_norm": 0.14784885530299943, + "learning_rate": 0.0009619046168898384, + "loss": 1.7201, + "step": 1668 + }, + { + "epoch": 0.1514244238795137, + "grad_norm": 0.1513043168868256, + "learning_rate": 0.0009618483454980491, + "loss": 1.6778, + "step": 1669 + }, + { + "epoch": 0.15151515151515152, + "grad_norm": 0.1538810191350235, + "learning_rate": 0.000961792034225587, + "loss": 1.6764, + "step": 1670 + }, + { + "epoch": 0.15160587915078932, + "grad_norm": 0.14467183871356268, + "learning_rate": 0.0009617356830773148, + "loss": 1.7019, + "step": 1671 + }, + { + "epoch": 0.15169660678642716, + "grad_norm": 0.14035750925554324, + "learning_rate": 0.0009616792920580982, + "loss": 1.7322, + "step": 1672 + }, + { + "epoch": 0.15178733442206496, + "grad_norm": 0.14944284207933742, + "learning_rate": 0.0009616228611728069, + "loss": 1.7075, + "step": 1673 + }, + { + "epoch": 0.15187806205770277, + "grad_norm": 0.14888958629138277, + "learning_rate": 0.0009615663904263132, + "loss": 1.7087, + "step": 1674 + }, + { + "epoch": 0.1519687896933406, + "grad_norm": 0.14539724617233826, + "learning_rate": 0.0009615098798234938, + "loss": 1.668, + "step": 1675 + }, + { + "epoch": 0.1520595173289784, + "grad_norm": 0.14841624256284824, + "learning_rate": 0.0009614533293692282, + "loss": 1.6634, + "step": 1676 + }, + { + "epoch": 0.1521502449646162, + "grad_norm": 0.14526762805986732, + "learning_rate": 0.0009613967390683998, + "loss": 1.688, + "step": 1677 + }, + { + "epoch": 0.15224097260025404, + "grad_norm": 0.1542328985625987, + "learning_rate": 0.0009613401089258949, + "loss": 1.7082, + "step": 1678 + }, + { + "epoch": 0.15233170023589185, + "grad_norm": 0.15389148754016027, + "learning_rate": 0.0009612834389466034, + "loss": 1.712, + "step": 1679 + }, + { + "epoch": 0.15242242787152968, + "grad_norm": 0.145265786648367, + "learning_rate": 0.0009612267291354191, + "loss": 1.6887, + "step": 1680 + }, + { + "epoch": 0.15251315550716749, + "grad_norm": 0.14535039231234562, + "learning_rate": 0.0009611699794972389, + "loss": 1.6605, + "step": 1681 + }, + { + "epoch": 0.1526038831428053, + "grad_norm": 0.15987327530418266, + "learning_rate": 0.0009611131900369627, + "loss": 1.7079, + "step": 1682 + }, + { + "epoch": 0.15269461077844312, + "grad_norm": 0.14659827416099322, + "learning_rate": 0.0009610563607594948, + "loss": 1.7042, + "step": 1683 + }, + { + "epoch": 0.15278533841408093, + "grad_norm": 0.14899041077050787, + "learning_rate": 0.0009609994916697422, + "loss": 1.6999, + "step": 1684 + }, + { + "epoch": 0.15287606604971873, + "grad_norm": 0.14892339818795888, + "learning_rate": 0.0009609425827726154, + "loss": 1.7122, + "step": 1685 + }, + { + "epoch": 0.15296679368535657, + "grad_norm": 0.1550944122256091, + "learning_rate": 0.0009608856340730288, + "loss": 1.7004, + "step": 1686 + }, + { + "epoch": 0.15305752132099437, + "grad_norm": 0.15347621489348603, + "learning_rate": 0.0009608286455758996, + "loss": 1.6886, + "step": 1687 + }, + { + "epoch": 0.15314824895663218, + "grad_norm": 0.15388152900617708, + "learning_rate": 0.0009607716172861492, + "loss": 1.68, + "step": 1688 + }, + { + "epoch": 0.15323897659227, + "grad_norm": 0.14934504783516359, + "learning_rate": 0.0009607145492087015, + "loss": 1.6914, + "step": 1689 + }, + { + "epoch": 0.15332970422790781, + "grad_norm": 0.1476214191084938, + "learning_rate": 0.0009606574413484847, + "loss": 1.689, + "step": 1690 + }, + { + "epoch": 0.15342043186354565, + "grad_norm": 0.15323204250723613, + "learning_rate": 0.0009606002937104299, + "loss": 1.7089, + "step": 1691 + }, + { + "epoch": 0.15351115949918345, + "grad_norm": 0.1506611031406367, + "learning_rate": 0.0009605431062994718, + "loss": 1.6937, + "step": 1692 + }, + { + "epoch": 0.15360188713482126, + "grad_norm": 0.14457253794197336, + "learning_rate": 0.0009604858791205487, + "loss": 1.7094, + "step": 1693 + }, + { + "epoch": 0.1536926147704591, + "grad_norm": 0.14667283441854154, + "learning_rate": 0.000960428612178602, + "loss": 1.6944, + "step": 1694 + }, + { + "epoch": 0.1537833424060969, + "grad_norm": 0.14899829392434658, + "learning_rate": 0.0009603713054785768, + "loss": 1.7289, + "step": 1695 + }, + { + "epoch": 0.1538740700417347, + "grad_norm": 0.1456819647212202, + "learning_rate": 0.0009603139590254215, + "loss": 1.6712, + "step": 1696 + }, + { + "epoch": 0.15396479767737253, + "grad_norm": 0.14793149226808333, + "learning_rate": 0.000960256572824088, + "loss": 1.6267, + "step": 1697 + }, + { + "epoch": 0.15405552531301034, + "grad_norm": 0.15186007840221225, + "learning_rate": 0.0009601991468795316, + "loss": 1.7035, + "step": 1698 + }, + { + "epoch": 0.15414625294864817, + "grad_norm": 0.14266762434404337, + "learning_rate": 0.000960141681196711, + "loss": 1.699, + "step": 1699 + }, + { + "epoch": 0.15423698058428598, + "grad_norm": 0.14597957344706983, + "learning_rate": 0.0009600841757805885, + "loss": 1.6458, + "step": 1700 + }, + { + "epoch": 0.15432770821992378, + "grad_norm": 0.1462365434677655, + "learning_rate": 0.0009600266306361296, + "loss": 1.6618, + "step": 1701 + }, + { + "epoch": 0.15441843585556161, + "grad_norm": 0.1457806284589882, + "learning_rate": 0.0009599690457683033, + "loss": 1.6636, + "step": 1702 + }, + { + "epoch": 0.15450916349119942, + "grad_norm": 0.14831577297165077, + "learning_rate": 0.0009599114211820821, + "loss": 1.6432, + "step": 1703 + }, + { + "epoch": 0.15459989112683722, + "grad_norm": 0.15114878038408344, + "learning_rate": 0.0009598537568824419, + "loss": 1.6973, + "step": 1704 + }, + { + "epoch": 0.15469061876247506, + "grad_norm": 0.14955275885519287, + "learning_rate": 0.0009597960528743621, + "loss": 1.6576, + "step": 1705 + }, + { + "epoch": 0.15478134639811286, + "grad_norm": 0.14931252490500332, + "learning_rate": 0.0009597383091628252, + "loss": 1.6741, + "step": 1706 + }, + { + "epoch": 0.15487207403375067, + "grad_norm": 0.1510728406715583, + "learning_rate": 0.0009596805257528177, + "loss": 1.6814, + "step": 1707 + }, + { + "epoch": 0.1549628016693885, + "grad_norm": 0.1511606062818176, + "learning_rate": 0.000959622702649329, + "loss": 1.7167, + "step": 1708 + }, + { + "epoch": 0.1550535293050263, + "grad_norm": 0.14549761132854946, + "learning_rate": 0.0009595648398573522, + "loss": 1.6996, + "step": 1709 + }, + { + "epoch": 0.15514425694066414, + "grad_norm": 0.1537327021749211, + "learning_rate": 0.0009595069373818836, + "loss": 1.6897, + "step": 1710 + }, + { + "epoch": 0.15523498457630194, + "grad_norm": 0.1436754080824039, + "learning_rate": 0.0009594489952279235, + "loss": 1.6951, + "step": 1711 + }, + { + "epoch": 0.15532571221193975, + "grad_norm": 0.148371385942544, + "learning_rate": 0.0009593910134004748, + "loss": 1.7075, + "step": 1712 + }, + { + "epoch": 0.15541643984757758, + "grad_norm": 0.1527464637677261, + "learning_rate": 0.0009593329919045444, + "loss": 1.7185, + "step": 1713 + }, + { + "epoch": 0.15550716748321539, + "grad_norm": 0.1445038438149152, + "learning_rate": 0.0009592749307451424, + "loss": 1.6663, + "step": 1714 + }, + { + "epoch": 0.1555978951188532, + "grad_norm": 0.15067087136068746, + "learning_rate": 0.0009592168299272825, + "loss": 1.6904, + "step": 1715 + }, + { + "epoch": 0.15568862275449102, + "grad_norm": 0.14950039497058462, + "learning_rate": 0.0009591586894559817, + "loss": 1.6907, + "step": 1716 + }, + { + "epoch": 0.15577935039012883, + "grad_norm": 0.14926234026553634, + "learning_rate": 0.0009591005093362603, + "loss": 1.6913, + "step": 1717 + }, + { + "epoch": 0.15587007802576666, + "grad_norm": 0.15020690261120975, + "learning_rate": 0.0009590422895731422, + "loss": 1.6755, + "step": 1718 + }, + { + "epoch": 0.15596080566140447, + "grad_norm": 0.14937786006117837, + "learning_rate": 0.0009589840301716549, + "loss": 1.6712, + "step": 1719 + }, + { + "epoch": 0.15605153329704227, + "grad_norm": 0.1441269495855302, + "learning_rate": 0.0009589257311368289, + "loss": 1.6848, + "step": 1720 + }, + { + "epoch": 0.1561422609326801, + "grad_norm": 0.1513128598754201, + "learning_rate": 0.0009588673924736983, + "loss": 1.6893, + "step": 1721 + }, + { + "epoch": 0.1562329885683179, + "grad_norm": 0.1480190801594217, + "learning_rate": 0.0009588090141873007, + "loss": 1.76, + "step": 1722 + }, + { + "epoch": 0.15632371620395571, + "grad_norm": 0.14452602564611866, + "learning_rate": 0.0009587505962826773, + "loss": 1.7115, + "step": 1723 + }, + { + "epoch": 0.15641444383959355, + "grad_norm": 0.1543618983214259, + "learning_rate": 0.0009586921387648721, + "loss": 1.6798, + "step": 1724 + }, + { + "epoch": 0.15650517147523135, + "grad_norm": 0.15543780913637925, + "learning_rate": 0.0009586336416389331, + "loss": 1.6858, + "step": 1725 + }, + { + "epoch": 0.15659589911086916, + "grad_norm": 0.1544399715673706, + "learning_rate": 0.0009585751049099117, + "loss": 1.7095, + "step": 1726 + }, + { + "epoch": 0.156686626746507, + "grad_norm": 0.1502760601889345, + "learning_rate": 0.0009585165285828623, + "loss": 1.6921, + "step": 1727 + }, + { + "epoch": 0.1567773543821448, + "grad_norm": 0.15598083673934587, + "learning_rate": 0.0009584579126628432, + "loss": 1.6674, + "step": 1728 + }, + { + "epoch": 0.15686808201778263, + "grad_norm": 0.15106123821219594, + "learning_rate": 0.0009583992571549157, + "loss": 1.6604, + "step": 1729 + }, + { + "epoch": 0.15695880965342043, + "grad_norm": 0.15922119700820248, + "learning_rate": 0.0009583405620641448, + "loss": 1.6784, + "step": 1730 + }, + { + "epoch": 0.15704953728905824, + "grad_norm": 0.14627355833419797, + "learning_rate": 0.0009582818273955988, + "loss": 1.7083, + "step": 1731 + }, + { + "epoch": 0.15714026492469607, + "grad_norm": 0.14687137493440616, + "learning_rate": 0.0009582230531543494, + "loss": 1.7429, + "step": 1732 + }, + { + "epoch": 0.15723099256033388, + "grad_norm": 0.14596633480229387, + "learning_rate": 0.0009581642393454719, + "loss": 1.6833, + "step": 1733 + }, + { + "epoch": 0.15732172019597168, + "grad_norm": 0.14400548240693384, + "learning_rate": 0.0009581053859740447, + "loss": 1.6823, + "step": 1734 + }, + { + "epoch": 0.15741244783160951, + "grad_norm": 0.1478028468844674, + "learning_rate": 0.00095804649304515, + "loss": 1.7284, + "step": 1735 + }, + { + "epoch": 0.15750317546724732, + "grad_norm": 0.1452152670182663, + "learning_rate": 0.0009579875605638732, + "loss": 1.7119, + "step": 1736 + }, + { + "epoch": 0.15759390310288515, + "grad_norm": 0.15768989075305523, + "learning_rate": 0.0009579285885353029, + "loss": 1.7332, + "step": 1737 + }, + { + "epoch": 0.15768463073852296, + "grad_norm": 0.14465289562002892, + "learning_rate": 0.0009578695769645316, + "loss": 1.6792, + "step": 1738 + }, + { + "epoch": 0.15777535837416076, + "grad_norm": 0.14946275596459746, + "learning_rate": 0.0009578105258566547, + "loss": 1.6935, + "step": 1739 + }, + { + "epoch": 0.1578660860097986, + "grad_norm": 0.14342290940876476, + "learning_rate": 0.0009577514352167715, + "loss": 1.6437, + "step": 1740 + }, + { + "epoch": 0.1579568136454364, + "grad_norm": 0.15634654957112598, + "learning_rate": 0.0009576923050499844, + "loss": 1.6927, + "step": 1741 + }, + { + "epoch": 0.1580475412810742, + "grad_norm": 0.14003124357979033, + "learning_rate": 0.0009576331353613994, + "loss": 1.6685, + "step": 1742 + }, + { + "epoch": 0.15813826891671204, + "grad_norm": 0.1481705965369682, + "learning_rate": 0.0009575739261561256, + "loss": 1.6805, + "step": 1743 + }, + { + "epoch": 0.15822899655234984, + "grad_norm": 0.13711645329730235, + "learning_rate": 0.0009575146774392758, + "loss": 1.6424, + "step": 1744 + }, + { + "epoch": 0.15831972418798765, + "grad_norm": 0.14164257951838236, + "learning_rate": 0.0009574553892159663, + "loss": 1.6801, + "step": 1745 + }, + { + "epoch": 0.15841045182362548, + "grad_norm": 0.13933651473041697, + "learning_rate": 0.0009573960614913164, + "loss": 1.7013, + "step": 1746 + }, + { + "epoch": 0.15850117945926329, + "grad_norm": 0.1474648541171814, + "learning_rate": 0.0009573366942704492, + "loss": 1.7024, + "step": 1747 + }, + { + "epoch": 0.15859190709490112, + "grad_norm": 0.15267292220923195, + "learning_rate": 0.0009572772875584911, + "loss": 1.6634, + "step": 1748 + }, + { + "epoch": 0.15868263473053892, + "grad_norm": 0.14216803278637455, + "learning_rate": 0.0009572178413605718, + "loss": 1.6913, + "step": 1749 + }, + { + "epoch": 0.15877336236617673, + "grad_norm": 0.1437892398376636, + "learning_rate": 0.0009571583556818245, + "loss": 1.6925, + "step": 1750 + }, + { + "epoch": 0.15886409000181456, + "grad_norm": 0.14494369491746498, + "learning_rate": 0.0009570988305273858, + "loss": 1.6795, + "step": 1751 + }, + { + "epoch": 0.15895481763745237, + "grad_norm": 0.15092643972943107, + "learning_rate": 0.0009570392659023957, + "loss": 1.7387, + "step": 1752 + }, + { + "epoch": 0.15904554527309017, + "grad_norm": 0.14108376323519287, + "learning_rate": 0.0009569796618119977, + "loss": 1.7735, + "step": 1753 + }, + { + "epoch": 0.159136272908728, + "grad_norm": 0.15044650156147632, + "learning_rate": 0.0009569200182613385, + "loss": 1.7209, + "step": 1754 + }, + { + "epoch": 0.1592270005443658, + "grad_norm": 0.15073076170845326, + "learning_rate": 0.0009568603352555684, + "loss": 1.6908, + "step": 1755 + }, + { + "epoch": 0.15931772818000364, + "grad_norm": 0.14717736369477358, + "learning_rate": 0.000956800612799841, + "loss": 1.6887, + "step": 1756 + }, + { + "epoch": 0.15940845581564145, + "grad_norm": 0.14270956279312597, + "learning_rate": 0.0009567408508993134, + "loss": 1.6919, + "step": 1757 + }, + { + "epoch": 0.15949918345127925, + "grad_norm": 0.1412686604934792, + "learning_rate": 0.0009566810495591459, + "loss": 1.6815, + "step": 1758 + }, + { + "epoch": 0.15958991108691709, + "grad_norm": 0.13649270243006292, + "learning_rate": 0.0009566212087845025, + "loss": 1.653, + "step": 1759 + }, + { + "epoch": 0.1596806387225549, + "grad_norm": 0.1381104842989118, + "learning_rate": 0.0009565613285805506, + "loss": 1.7026, + "step": 1760 + }, + { + "epoch": 0.1597713663581927, + "grad_norm": 0.1407142067519727, + "learning_rate": 0.0009565014089524604, + "loss": 1.666, + "step": 1761 + }, + { + "epoch": 0.15986209399383053, + "grad_norm": 0.14273554039655648, + "learning_rate": 0.0009564414499054065, + "loss": 1.7215, + "step": 1762 + }, + { + "epoch": 0.15995282162946833, + "grad_norm": 0.14556857621149075, + "learning_rate": 0.000956381451444566, + "loss": 1.7188, + "step": 1763 + }, + { + "epoch": 0.16004354926510614, + "grad_norm": 0.14614502783496072, + "learning_rate": 0.0009563214135751199, + "loss": 1.667, + "step": 1764 + }, + { + "epoch": 0.16013427690074397, + "grad_norm": 0.14546070763109906, + "learning_rate": 0.0009562613363022526, + "loss": 1.6726, + "step": 1765 + }, + { + "epoch": 0.16022500453638178, + "grad_norm": 0.15339409532332057, + "learning_rate": 0.0009562012196311515, + "loss": 1.6409, + "step": 1766 + }, + { + "epoch": 0.1603157321720196, + "grad_norm": 0.14689359859867643, + "learning_rate": 0.0009561410635670079, + "loss": 1.6862, + "step": 1767 + }, + { + "epoch": 0.16040645980765741, + "grad_norm": 0.14776687573649272, + "learning_rate": 0.0009560808681150164, + "loss": 1.6914, + "step": 1768 + }, + { + "epoch": 0.16049718744329522, + "grad_norm": 0.1487709031722604, + "learning_rate": 0.0009560206332803745, + "loss": 1.6927, + "step": 1769 + }, + { + "epoch": 0.16058791507893305, + "grad_norm": 0.14226185905368746, + "learning_rate": 0.0009559603590682837, + "loss": 1.7021, + "step": 1770 + }, + { + "epoch": 0.16067864271457086, + "grad_norm": 0.143045624272326, + "learning_rate": 0.0009559000454839488, + "loss": 1.6871, + "step": 1771 + }, + { + "epoch": 0.16076937035020866, + "grad_norm": 0.1529181647302779, + "learning_rate": 0.0009558396925325778, + "loss": 1.698, + "step": 1772 + }, + { + "epoch": 0.1608600979858465, + "grad_norm": 0.15839956796850574, + "learning_rate": 0.000955779300219382, + "loss": 1.7247, + "step": 1773 + }, + { + "epoch": 0.1609508256214843, + "grad_norm": 0.14532990733601703, + "learning_rate": 0.0009557188685495767, + "loss": 1.7114, + "step": 1774 + }, + { + "epoch": 0.16104155325712213, + "grad_norm": 0.1527195834813221, + "learning_rate": 0.0009556583975283798, + "loss": 1.6654, + "step": 1775 + }, + { + "epoch": 0.16113228089275994, + "grad_norm": 0.14193833793645638, + "learning_rate": 0.0009555978871610131, + "loss": 1.68, + "step": 1776 + }, + { + "epoch": 0.16122300852839774, + "grad_norm": 0.14125628328522655, + "learning_rate": 0.0009555373374527016, + "loss": 1.7058, + "step": 1777 + }, + { + "epoch": 0.16131373616403558, + "grad_norm": 0.14896142514163854, + "learning_rate": 0.0009554767484086741, + "loss": 1.7105, + "step": 1778 + }, + { + "epoch": 0.16140446379967338, + "grad_norm": 0.14956099875608905, + "learning_rate": 0.0009554161200341622, + "loss": 1.7124, + "step": 1779 + }, + { + "epoch": 0.1614951914353112, + "grad_norm": 0.15197312120123854, + "learning_rate": 0.0009553554523344011, + "loss": 1.7173, + "step": 1780 + }, + { + "epoch": 0.16158591907094902, + "grad_norm": 0.1428961773448787, + "learning_rate": 0.0009552947453146297, + "loss": 1.6275, + "step": 1781 + }, + { + "epoch": 0.16167664670658682, + "grad_norm": 0.14890506739037, + "learning_rate": 0.0009552339989800898, + "loss": 1.6333, + "step": 1782 + }, + { + "epoch": 0.16176737434222463, + "grad_norm": 0.19456701562162812, + "learning_rate": 0.0009551732133360271, + "loss": 1.7042, + "step": 1783 + }, + { + "epoch": 0.16185810197786246, + "grad_norm": 0.1452661548396083, + "learning_rate": 0.0009551123883876902, + "loss": 1.74, + "step": 1784 + }, + { + "epoch": 0.16194882961350027, + "grad_norm": 0.17562312199803834, + "learning_rate": 0.0009550515241403317, + "loss": 1.6851, + "step": 1785 + }, + { + "epoch": 0.1620395572491381, + "grad_norm": 0.14847029182275637, + "learning_rate": 0.000954990620599207, + "loss": 1.6987, + "step": 1786 + }, + { + "epoch": 0.1621302848847759, + "grad_norm": 0.1463303409921293, + "learning_rate": 0.0009549296777695748, + "loss": 1.7163, + "step": 1787 + }, + { + "epoch": 0.1622210125204137, + "grad_norm": 0.14816099123345078, + "learning_rate": 0.0009548686956566984, + "loss": 1.6975, + "step": 1788 + }, + { + "epoch": 0.16231174015605154, + "grad_norm": 0.15814076583814723, + "learning_rate": 0.0009548076742658427, + "loss": 1.6646, + "step": 1789 + }, + { + "epoch": 0.16240246779168935, + "grad_norm": 0.14097850592745753, + "learning_rate": 0.0009547466136022774, + "loss": 1.7092, + "step": 1790 + }, + { + "epoch": 0.16249319542732715, + "grad_norm": 0.14683710926064636, + "learning_rate": 0.0009546855136712752, + "loss": 1.7021, + "step": 1791 + }, + { + "epoch": 0.16258392306296499, + "grad_norm": 0.14913568745688144, + "learning_rate": 0.0009546243744781116, + "loss": 1.6705, + "step": 1792 + }, + { + "epoch": 0.1626746506986028, + "grad_norm": 0.14352774025810724, + "learning_rate": 0.0009545631960280662, + "loss": 1.7038, + "step": 1793 + }, + { + "epoch": 0.16276537833424062, + "grad_norm": 0.1530580556268464, + "learning_rate": 0.000954501978326422, + "loss": 1.6552, + "step": 1794 + }, + { + "epoch": 0.16285610596987843, + "grad_norm": 0.14502341117377815, + "learning_rate": 0.0009544407213784651, + "loss": 1.6586, + "step": 1795 + }, + { + "epoch": 0.16294683360551623, + "grad_norm": 0.14564313658457975, + "learning_rate": 0.0009543794251894847, + "loss": 1.6897, + "step": 1796 + }, + { + "epoch": 0.16303756124115407, + "grad_norm": 0.1410732559423456, + "learning_rate": 0.0009543180897647741, + "loss": 1.6728, + "step": 1797 + }, + { + "epoch": 0.16312828887679187, + "grad_norm": 0.14035728718334228, + "learning_rate": 0.0009542567151096294, + "loss": 1.6881, + "step": 1798 + }, + { + "epoch": 0.16321901651242968, + "grad_norm": 0.1476603065061341, + "learning_rate": 0.0009541953012293505, + "loss": 1.6767, + "step": 1799 + }, + { + "epoch": 0.1633097441480675, + "grad_norm": 0.14870866724958232, + "learning_rate": 0.0009541338481292404, + "loss": 1.6939, + "step": 1800 + }, + { + "epoch": 0.16340047178370531, + "grad_norm": 0.15046845323431512, + "learning_rate": 0.0009540723558146055, + "loss": 1.6978, + "step": 1801 + }, + { + "epoch": 0.16349119941934312, + "grad_norm": 0.14445305887250423, + "learning_rate": 0.0009540108242907557, + "loss": 1.6881, + "step": 1802 + }, + { + "epoch": 0.16358192705498095, + "grad_norm": 0.14746650236427555, + "learning_rate": 0.0009539492535630044, + "loss": 1.7153, + "step": 1803 + }, + { + "epoch": 0.16367265469061876, + "grad_norm": 0.14830035509718675, + "learning_rate": 0.0009538876436366681, + "loss": 1.7289, + "step": 1804 + }, + { + "epoch": 0.1637633823262566, + "grad_norm": 0.14104121819672563, + "learning_rate": 0.0009538259945170671, + "loss": 1.6764, + "step": 1805 + }, + { + "epoch": 0.1638541099618944, + "grad_norm": 0.15130122815776514, + "learning_rate": 0.0009537643062095243, + "loss": 1.6679, + "step": 1806 + }, + { + "epoch": 0.1639448375975322, + "grad_norm": 0.14498682066145402, + "learning_rate": 0.000953702578719367, + "loss": 1.7062, + "step": 1807 + }, + { + "epoch": 0.16403556523317003, + "grad_norm": 0.15014153271899505, + "learning_rate": 0.0009536408120519249, + "loss": 1.6889, + "step": 1808 + }, + { + "epoch": 0.16412629286880784, + "grad_norm": 0.1523853395031965, + "learning_rate": 0.000953579006212532, + "loss": 1.686, + "step": 1809 + }, + { + "epoch": 0.16421702050444564, + "grad_norm": 0.15368508712921186, + "learning_rate": 0.000953517161206525, + "loss": 1.6737, + "step": 1810 + }, + { + "epoch": 0.16430774814008348, + "grad_norm": 0.14778099757906227, + "learning_rate": 0.0009534552770392444, + "loss": 1.6895, + "step": 1811 + }, + { + "epoch": 0.16439847577572128, + "grad_norm": 0.14321635017828385, + "learning_rate": 0.0009533933537160338, + "loss": 1.6915, + "step": 1812 + }, + { + "epoch": 0.1644892034113591, + "grad_norm": 0.1434970816134253, + "learning_rate": 0.0009533313912422401, + "loss": 1.6336, + "step": 1813 + }, + { + "epoch": 0.16457993104699692, + "grad_norm": 0.13965451063668347, + "learning_rate": 0.0009532693896232141, + "loss": 1.7213, + "step": 1814 + }, + { + "epoch": 0.16467065868263472, + "grad_norm": 0.1414841712816428, + "learning_rate": 0.0009532073488643094, + "loss": 1.7242, + "step": 1815 + }, + { + "epoch": 0.16476138631827256, + "grad_norm": 0.14574282660935, + "learning_rate": 0.0009531452689708833, + "loss": 1.6579, + "step": 1816 + }, + { + "epoch": 0.16485211395391036, + "grad_norm": 0.13995347093879987, + "learning_rate": 0.0009530831499482966, + "loss": 1.6522, + "step": 1817 + }, + { + "epoch": 0.16494284158954817, + "grad_norm": 0.14764940137212768, + "learning_rate": 0.000953020991801913, + "loss": 1.6943, + "step": 1818 + }, + { + "epoch": 0.165033569225186, + "grad_norm": 0.14595147236640796, + "learning_rate": 0.0009529587945371, + "loss": 1.6604, + "step": 1819 + }, + { + "epoch": 0.1651242968608238, + "grad_norm": 0.14199622230403833, + "learning_rate": 0.0009528965581592284, + "loss": 1.6682, + "step": 1820 + }, + { + "epoch": 0.1652150244964616, + "grad_norm": 0.14443979195983872, + "learning_rate": 0.0009528342826736722, + "loss": 1.6723, + "step": 1821 + }, + { + "epoch": 0.16530575213209944, + "grad_norm": 0.13954128196668233, + "learning_rate": 0.0009527719680858089, + "loss": 1.6639, + "step": 1822 + }, + { + "epoch": 0.16539647976773725, + "grad_norm": 0.14392479768338476, + "learning_rate": 0.0009527096144010196, + "loss": 1.6859, + "step": 1823 + }, + { + "epoch": 0.16548720740337508, + "grad_norm": 0.14677362087345733, + "learning_rate": 0.0009526472216246882, + "loss": 1.6985, + "step": 1824 + }, + { + "epoch": 0.16557793503901289, + "grad_norm": 0.1459780464747039, + "learning_rate": 0.0009525847897622026, + "loss": 1.7432, + "step": 1825 + }, + { + "epoch": 0.1656686626746507, + "grad_norm": 0.1499480543071059, + "learning_rate": 0.0009525223188189536, + "loss": 1.6987, + "step": 1826 + }, + { + "epoch": 0.16575939031028852, + "grad_norm": 0.14375846922663466, + "learning_rate": 0.0009524598088003357, + "loss": 1.65, + "step": 1827 + }, + { + "epoch": 0.16585011794592633, + "grad_norm": 0.1433818287014888, + "learning_rate": 0.0009523972597117467, + "loss": 1.6569, + "step": 1828 + }, + { + "epoch": 0.16594084558156413, + "grad_norm": 0.14565618621275367, + "learning_rate": 0.0009523346715585877, + "loss": 1.6813, + "step": 1829 + }, + { + "epoch": 0.16603157321720197, + "grad_norm": 0.1469781430221869, + "learning_rate": 0.0009522720443462629, + "loss": 1.6398, + "step": 1830 + }, + { + "epoch": 0.16612230085283977, + "grad_norm": 0.1487325236821966, + "learning_rate": 0.0009522093780801806, + "loss": 1.6852, + "step": 1831 + }, + { + "epoch": 0.16621302848847758, + "grad_norm": 0.15112805463241424, + "learning_rate": 0.0009521466727657518, + "loss": 1.6956, + "step": 1832 + }, + { + "epoch": 0.1663037561241154, + "grad_norm": 0.15119308841016044, + "learning_rate": 0.0009520839284083913, + "loss": 1.6958, + "step": 1833 + }, + { + "epoch": 0.16639448375975321, + "grad_norm": 0.14374378915981723, + "learning_rate": 0.0009520211450135168, + "loss": 1.6571, + "step": 1834 + }, + { + "epoch": 0.16648521139539105, + "grad_norm": 0.1466322678705494, + "learning_rate": 0.0009519583225865498, + "loss": 1.7161, + "step": 1835 + }, + { + "epoch": 0.16657593903102885, + "grad_norm": 0.14635263576392585, + "learning_rate": 0.0009518954611329152, + "loss": 1.7077, + "step": 1836 + }, + { + "epoch": 0.16666666666666666, + "grad_norm": 0.14532429091664284, + "learning_rate": 0.0009518325606580407, + "loss": 1.7247, + "step": 1837 + }, + { + "epoch": 0.1667573943023045, + "grad_norm": 0.1402593922076757, + "learning_rate": 0.0009517696211673581, + "loss": 1.7251, + "step": 1838 + }, + { + "epoch": 0.1668481219379423, + "grad_norm": 0.13782502310402012, + "learning_rate": 0.0009517066426663021, + "loss": 1.6994, + "step": 1839 + }, + { + "epoch": 0.1669388495735801, + "grad_norm": 0.14092858405095918, + "learning_rate": 0.000951643625160311, + "loss": 1.686, + "step": 1840 + }, + { + "epoch": 0.16702957720921793, + "grad_norm": 0.1474040540696151, + "learning_rate": 0.0009515805686548262, + "loss": 1.6932, + "step": 1841 + }, + { + "epoch": 0.16712030484485574, + "grad_norm": 0.14524659382338534, + "learning_rate": 0.0009515174731552929, + "loss": 1.6731, + "step": 1842 + }, + { + "epoch": 0.16721103248049357, + "grad_norm": 0.1448753363146247, + "learning_rate": 0.000951454338667159, + "loss": 1.6296, + "step": 1843 + }, + { + "epoch": 0.16730176011613138, + "grad_norm": 0.14728706408399267, + "learning_rate": 0.0009513911651958766, + "loss": 1.6881, + "step": 1844 + }, + { + "epoch": 0.16739248775176918, + "grad_norm": 0.14293039262868418, + "learning_rate": 0.0009513279527469005, + "loss": 1.6522, + "step": 1845 + }, + { + "epoch": 0.16748321538740701, + "grad_norm": 0.13785813550774534, + "learning_rate": 0.0009512647013256892, + "loss": 1.6977, + "step": 1846 + }, + { + "epoch": 0.16757394302304482, + "grad_norm": 0.15604054491609556, + "learning_rate": 0.0009512014109377045, + "loss": 1.6654, + "step": 1847 + }, + { + "epoch": 0.16766467065868262, + "grad_norm": 0.14188820351558476, + "learning_rate": 0.0009511380815884114, + "loss": 1.6912, + "step": 1848 + }, + { + "epoch": 0.16775539829432046, + "grad_norm": 0.14270408877122204, + "learning_rate": 0.0009510747132832785, + "loss": 1.6846, + "step": 1849 + }, + { + "epoch": 0.16784612592995826, + "grad_norm": 0.15194830240285376, + "learning_rate": 0.0009510113060277774, + "loss": 1.6659, + "step": 1850 + }, + { + "epoch": 0.16793685356559607, + "grad_norm": 0.1512443122701145, + "learning_rate": 0.0009509478598273837, + "loss": 1.6668, + "step": 1851 + }, + { + "epoch": 0.1680275812012339, + "grad_norm": 0.14102999289560758, + "learning_rate": 0.0009508843746875759, + "loss": 1.6493, + "step": 1852 + }, + { + "epoch": 0.1681183088368717, + "grad_norm": 0.14613547515637282, + "learning_rate": 0.0009508208506138358, + "loss": 1.6454, + "step": 1853 + }, + { + "epoch": 0.16820903647250954, + "grad_norm": 0.14124612064635853, + "learning_rate": 0.000950757287611649, + "loss": 1.6766, + "step": 1854 + }, + { + "epoch": 0.16829976410814734, + "grad_norm": 0.13588068515557156, + "learning_rate": 0.0009506936856865038, + "loss": 1.6739, + "step": 1855 + }, + { + "epoch": 0.16839049174378515, + "grad_norm": 0.13881558234457445, + "learning_rate": 0.0009506300448438924, + "loss": 1.6887, + "step": 1856 + }, + { + "epoch": 0.16848121937942298, + "grad_norm": 0.14369210589429499, + "learning_rate": 0.0009505663650893104, + "loss": 1.6574, + "step": 1857 + }, + { + "epoch": 0.16857194701506079, + "grad_norm": 0.14359172320836158, + "learning_rate": 0.0009505026464282563, + "loss": 1.6579, + "step": 1858 + }, + { + "epoch": 0.1686626746506986, + "grad_norm": 0.14668709191730966, + "learning_rate": 0.0009504388888662321, + "loss": 1.6168, + "step": 1859 + }, + { + "epoch": 0.16875340228633642, + "grad_norm": 0.14480702648141253, + "learning_rate": 0.0009503750924087436, + "loss": 1.7107, + "step": 1860 + }, + { + "epoch": 0.16884412992197423, + "grad_norm": 0.1489966155596639, + "learning_rate": 0.0009503112570612993, + "loss": 1.6746, + "step": 1861 + }, + { + "epoch": 0.16893485755761206, + "grad_norm": 0.1397855459093095, + "learning_rate": 0.0009502473828294118, + "loss": 1.6976, + "step": 1862 + }, + { + "epoch": 0.16902558519324987, + "grad_norm": 0.14323880300693534, + "learning_rate": 0.0009501834697185964, + "loss": 1.7172, + "step": 1863 + }, + { + "epoch": 0.16911631282888767, + "grad_norm": 0.1454229889499067, + "learning_rate": 0.0009501195177343721, + "loss": 1.7229, + "step": 1864 + }, + { + "epoch": 0.1692070404645255, + "grad_norm": 0.14389877595315725, + "learning_rate": 0.000950055526882261, + "loss": 1.6614, + "step": 1865 + }, + { + "epoch": 0.1692977681001633, + "grad_norm": 0.1401565870451138, + "learning_rate": 0.0009499914971677889, + "loss": 1.6579, + "step": 1866 + }, + { + "epoch": 0.16938849573580111, + "grad_norm": 0.14042877757304312, + "learning_rate": 0.0009499274285964846, + "loss": 1.7308, + "step": 1867 + }, + { + "epoch": 0.16947922337143895, + "grad_norm": 0.14780653616859035, + "learning_rate": 0.0009498633211738806, + "loss": 1.6838, + "step": 1868 + }, + { + "epoch": 0.16956995100707675, + "grad_norm": 0.14881831745716215, + "learning_rate": 0.0009497991749055125, + "loss": 1.6744, + "step": 1869 + }, + { + "epoch": 0.16966067864271456, + "grad_norm": 0.1463782659336543, + "learning_rate": 0.0009497349897969194, + "loss": 1.6395, + "step": 1870 + }, + { + "epoch": 0.1697514062783524, + "grad_norm": 0.14477843845106012, + "learning_rate": 0.0009496707658536435, + "loss": 1.6667, + "step": 1871 + }, + { + "epoch": 0.1698421339139902, + "grad_norm": 0.14508364035539176, + "learning_rate": 0.0009496065030812308, + "loss": 1.6787, + "step": 1872 + }, + { + "epoch": 0.16993286154962803, + "grad_norm": 0.1410838163111932, + "learning_rate": 0.0009495422014852302, + "loss": 1.6579, + "step": 1873 + }, + { + "epoch": 0.17002358918526583, + "grad_norm": 0.14010195025436123, + "learning_rate": 0.0009494778610711945, + "loss": 1.6576, + "step": 1874 + }, + { + "epoch": 0.17011431682090364, + "grad_norm": 0.1420685345454115, + "learning_rate": 0.0009494134818446789, + "loss": 1.6662, + "step": 1875 + }, + { + "epoch": 0.17020504445654147, + "grad_norm": 0.13464633876351895, + "learning_rate": 0.0009493490638112432, + "loss": 1.6536, + "step": 1876 + }, + { + "epoch": 0.17029577209217928, + "grad_norm": 0.14642231595500232, + "learning_rate": 0.0009492846069764495, + "loss": 1.6672, + "step": 1877 + }, + { + "epoch": 0.17038649972781708, + "grad_norm": 0.1401212598555627, + "learning_rate": 0.0009492201113458637, + "loss": 1.7039, + "step": 1878 + }, + { + "epoch": 0.17047722736345491, + "grad_norm": 0.15181357849768218, + "learning_rate": 0.0009491555769250552, + "loss": 1.6973, + "step": 1879 + }, + { + "epoch": 0.17056795499909272, + "grad_norm": 0.14894905520688576, + "learning_rate": 0.0009490910037195964, + "loss": 1.663, + "step": 1880 + }, + { + "epoch": 0.17065868263473055, + "grad_norm": 0.13767483987366025, + "learning_rate": 0.0009490263917350632, + "loss": 1.672, + "step": 1881 + }, + { + "epoch": 0.17074941027036836, + "grad_norm": 0.13643493063628082, + "learning_rate": 0.0009489617409770348, + "loss": 1.6946, + "step": 1882 + }, + { + "epoch": 0.17084013790600616, + "grad_norm": 0.14103633805352808, + "learning_rate": 0.000948897051451094, + "loss": 1.6827, + "step": 1883 + }, + { + "epoch": 0.170930865541644, + "grad_norm": 0.14126037866185318, + "learning_rate": 0.0009488323231628266, + "loss": 1.6645, + "step": 1884 + }, + { + "epoch": 0.1710215931772818, + "grad_norm": 0.14535094522394798, + "learning_rate": 0.0009487675561178221, + "loss": 1.6916, + "step": 1885 + }, + { + "epoch": 0.1711123208129196, + "grad_norm": 0.14818251943993208, + "learning_rate": 0.0009487027503216729, + "loss": 1.7106, + "step": 1886 + }, + { + "epoch": 0.17120304844855744, + "grad_norm": 0.14433278960645196, + "learning_rate": 0.000948637905779975, + "loss": 1.6775, + "step": 1887 + }, + { + "epoch": 0.17129377608419524, + "grad_norm": 0.15400401099887154, + "learning_rate": 0.000948573022498328, + "loss": 1.6912, + "step": 1888 + }, + { + "epoch": 0.17138450371983305, + "grad_norm": 0.14036557543876688, + "learning_rate": 0.0009485081004823342, + "loss": 1.6808, + "step": 1889 + }, + { + "epoch": 0.17147523135547088, + "grad_norm": 0.1430338502972256, + "learning_rate": 0.0009484431397375998, + "loss": 1.7365, + "step": 1890 + }, + { + "epoch": 0.17156595899110869, + "grad_norm": 0.1396236984014062, + "learning_rate": 0.0009483781402697344, + "loss": 1.7318, + "step": 1891 + }, + { + "epoch": 0.17165668662674652, + "grad_norm": 0.14569781124214976, + "learning_rate": 0.0009483131020843503, + "loss": 1.7164, + "step": 1892 + }, + { + "epoch": 0.17174741426238432, + "grad_norm": 0.13372599423453077, + "learning_rate": 0.0009482480251870639, + "loss": 1.6468, + "step": 1893 + }, + { + "epoch": 0.17183814189802213, + "grad_norm": 0.14439413014654656, + "learning_rate": 0.0009481829095834943, + "loss": 1.6747, + "step": 1894 + }, + { + "epoch": 0.17192886953365996, + "grad_norm": 0.14954944894981884, + "learning_rate": 0.0009481177552792645, + "loss": 1.6904, + "step": 1895 + }, + { + "epoch": 0.17201959716929777, + "grad_norm": 0.1430939265704399, + "learning_rate": 0.0009480525622800006, + "loss": 1.6684, + "step": 1896 + }, + { + "epoch": 0.17211032480493557, + "grad_norm": 0.14474688554859283, + "learning_rate": 0.0009479873305913318, + "loss": 1.6976, + "step": 1897 + }, + { + "epoch": 0.1722010524405734, + "grad_norm": 0.14124008433638205, + "learning_rate": 0.000947922060218891, + "loss": 1.7016, + "step": 1898 + }, + { + "epoch": 0.1722917800762112, + "grad_norm": 0.14606875139278067, + "learning_rate": 0.0009478567511683142, + "loss": 1.6433, + "step": 1899 + }, + { + "epoch": 0.17238250771184904, + "grad_norm": 0.13669907418907454, + "learning_rate": 0.0009477914034452411, + "loss": 1.6807, + "step": 1900 + }, + { + "epoch": 0.17247323534748685, + "grad_norm": 0.1491790046955284, + "learning_rate": 0.0009477260170553142, + "loss": 1.6852, + "step": 1901 + }, + { + "epoch": 0.17256396298312465, + "grad_norm": 0.14725234652561456, + "learning_rate": 0.0009476605920041796, + "loss": 1.6742, + "step": 1902 + }, + { + "epoch": 0.17265469061876249, + "grad_norm": 0.1374862695670332, + "learning_rate": 0.0009475951282974871, + "loss": 1.6492, + "step": 1903 + }, + { + "epoch": 0.1727454182544003, + "grad_norm": 0.13801808222459883, + "learning_rate": 0.0009475296259408892, + "loss": 1.6886, + "step": 1904 + }, + { + "epoch": 0.1728361458900381, + "grad_norm": 0.14064913927646136, + "learning_rate": 0.0009474640849400422, + "loss": 1.6936, + "step": 1905 + }, + { + "epoch": 0.17292687352567593, + "grad_norm": 0.13996108807796578, + "learning_rate": 0.0009473985053006055, + "loss": 1.7019, + "step": 1906 + }, + { + "epoch": 0.17301760116131373, + "grad_norm": 0.14924787396940561, + "learning_rate": 0.000947332887028242, + "loss": 1.6877, + "step": 1907 + }, + { + "epoch": 0.17310832879695154, + "grad_norm": 0.1447415858685545, + "learning_rate": 0.0009472672301286176, + "loss": 1.6995, + "step": 1908 + }, + { + "epoch": 0.17319905643258937, + "grad_norm": 0.1506224542696062, + "learning_rate": 0.0009472015346074021, + "loss": 1.6805, + "step": 1909 + }, + { + "epoch": 0.17328978406822718, + "grad_norm": 0.14456631585093738, + "learning_rate": 0.0009471358004702682, + "loss": 1.6679, + "step": 1910 + }, + { + "epoch": 0.173380511703865, + "grad_norm": 0.13918339766370683, + "learning_rate": 0.000947070027722892, + "loss": 1.6618, + "step": 1911 + }, + { + "epoch": 0.17347123933950281, + "grad_norm": 0.14805788473237116, + "learning_rate": 0.0009470042163709529, + "loss": 1.674, + "step": 1912 + }, + { + "epoch": 0.17356196697514062, + "grad_norm": 0.13741361383600625, + "learning_rate": 0.000946938366420134, + "loss": 1.6866, + "step": 1913 + }, + { + "epoch": 0.17365269461077845, + "grad_norm": 0.14265080013123158, + "learning_rate": 0.0009468724778761212, + "loss": 1.6489, + "step": 1914 + }, + { + "epoch": 0.17374342224641626, + "grad_norm": 0.15140684634661217, + "learning_rate": 0.000946806550744604, + "loss": 1.6751, + "step": 1915 + }, + { + "epoch": 0.17383414988205406, + "grad_norm": 0.14488494148738054, + "learning_rate": 0.0009467405850312753, + "loss": 1.6696, + "step": 1916 + }, + { + "epoch": 0.1739248775176919, + "grad_norm": 0.14589545426878042, + "learning_rate": 0.0009466745807418315, + "loss": 1.6799, + "step": 1917 + }, + { + "epoch": 0.1740156051533297, + "grad_norm": 0.14598126396091793, + "learning_rate": 0.0009466085378819715, + "loss": 1.6182, + "step": 1918 + }, + { + "epoch": 0.17410633278896753, + "grad_norm": 0.13747028589537608, + "learning_rate": 0.0009465424564573985, + "loss": 1.6737, + "step": 1919 + }, + { + "epoch": 0.17419706042460534, + "grad_norm": 0.14615340920993905, + "learning_rate": 0.0009464763364738187, + "loss": 1.6858, + "step": 1920 + }, + { + "epoch": 0.17428778806024314, + "grad_norm": 0.14050493392139912, + "learning_rate": 0.0009464101779369414, + "loss": 1.671, + "step": 1921 + }, + { + "epoch": 0.17437851569588098, + "grad_norm": 0.14867708127610088, + "learning_rate": 0.0009463439808524794, + "loss": 1.7169, + "step": 1922 + }, + { + "epoch": 0.17446924333151878, + "grad_norm": 0.14090296331118898, + "learning_rate": 0.0009462777452261489, + "loss": 1.6614, + "step": 1923 + }, + { + "epoch": 0.17455997096715659, + "grad_norm": 0.1465390430493409, + "learning_rate": 0.0009462114710636694, + "loss": 1.6631, + "step": 1924 + }, + { + "epoch": 0.17465069860279442, + "grad_norm": 0.14529748324852057, + "learning_rate": 0.0009461451583707635, + "loss": 1.6655, + "step": 1925 + }, + { + "epoch": 0.17474142623843222, + "grad_norm": 0.13981402154981445, + "learning_rate": 0.0009460788071531574, + "loss": 1.7014, + "step": 1926 + }, + { + "epoch": 0.17483215387407003, + "grad_norm": 0.14536874922243906, + "learning_rate": 0.0009460124174165807, + "loss": 1.6114, + "step": 1927 + }, + { + "epoch": 0.17492288150970786, + "grad_norm": 0.147061501281767, + "learning_rate": 0.0009459459891667659, + "loss": 1.6554, + "step": 1928 + }, + { + "epoch": 0.17501360914534567, + "grad_norm": 0.14740330936360446, + "learning_rate": 0.0009458795224094492, + "loss": 1.6552, + "step": 1929 + }, + { + "epoch": 0.1751043367809835, + "grad_norm": 0.14271165150246465, + "learning_rate": 0.0009458130171503701, + "loss": 1.6854, + "step": 1930 + }, + { + "epoch": 0.1751950644166213, + "grad_norm": 0.13941909869967858, + "learning_rate": 0.0009457464733952711, + "loss": 1.6861, + "step": 1931 + }, + { + "epoch": 0.1752857920522591, + "grad_norm": 0.13422420575830632, + "learning_rate": 0.0009456798911498985, + "loss": 1.7054, + "step": 1932 + }, + { + "epoch": 0.17537651968789694, + "grad_norm": 0.14263003671362665, + "learning_rate": 0.0009456132704200017, + "loss": 1.7338, + "step": 1933 + }, + { + "epoch": 0.17546724732353475, + "grad_norm": 0.14478582057136663, + "learning_rate": 0.0009455466112113333, + "loss": 1.6869, + "step": 1934 + }, + { + "epoch": 0.17555797495917255, + "grad_norm": 0.14520966864738882, + "learning_rate": 0.0009454799135296492, + "loss": 1.6677, + "step": 1935 + }, + { + "epoch": 0.17564870259481039, + "grad_norm": 0.13703960426882753, + "learning_rate": 0.0009454131773807091, + "loss": 1.6971, + "step": 1936 + }, + { + "epoch": 0.1757394302304482, + "grad_norm": 0.1360906519034109, + "learning_rate": 0.0009453464027702754, + "loss": 1.6929, + "step": 1937 + }, + { + "epoch": 0.17583015786608602, + "grad_norm": 0.14246643753017016, + "learning_rate": 0.0009452795897041142, + "loss": 1.6917, + "step": 1938 + }, + { + "epoch": 0.17592088550172383, + "grad_norm": 0.14277081371063347, + "learning_rate": 0.000945212738187995, + "loss": 1.6816, + "step": 1939 + }, + { + "epoch": 0.17601161313736163, + "grad_norm": 0.1405172957804813, + "learning_rate": 0.00094514584822769, + "loss": 1.7288, + "step": 1940 + }, + { + "epoch": 0.17610234077299947, + "grad_norm": 0.14131580206383498, + "learning_rate": 0.0009450789198289753, + "loss": 1.6563, + "step": 1941 + }, + { + "epoch": 0.17619306840863727, + "grad_norm": 0.13809432849086406, + "learning_rate": 0.0009450119529976304, + "loss": 1.6594, + "step": 1942 + }, + { + "epoch": 0.17628379604427508, + "grad_norm": 0.14172708504580112, + "learning_rate": 0.0009449449477394379, + "loss": 1.662, + "step": 1943 + }, + { + "epoch": 0.1763745236799129, + "grad_norm": 0.1434041542244893, + "learning_rate": 0.0009448779040601835, + "loss": 1.6913, + "step": 1944 + }, + { + "epoch": 0.17646525131555071, + "grad_norm": 0.13896538987701734, + "learning_rate": 0.0009448108219656565, + "loss": 1.6561, + "step": 1945 + }, + { + "epoch": 0.17655597895118852, + "grad_norm": 0.14146520588230627, + "learning_rate": 0.0009447437014616495, + "loss": 1.6948, + "step": 1946 + }, + { + "epoch": 0.17664670658682635, + "grad_norm": 0.14085512744721942, + "learning_rate": 0.0009446765425539582, + "loss": 1.6635, + "step": 1947 + }, + { + "epoch": 0.17673743422246416, + "grad_norm": 0.14287360105220287, + "learning_rate": 0.0009446093452483821, + "loss": 1.7054, + "step": 1948 + }, + { + "epoch": 0.176828161858102, + "grad_norm": 0.13553257523836149, + "learning_rate": 0.0009445421095507233, + "loss": 1.6335, + "step": 1949 + }, + { + "epoch": 0.1769188894937398, + "grad_norm": 0.14215957286515596, + "learning_rate": 0.000944474835466788, + "loss": 1.6895, + "step": 1950 + }, + { + "epoch": 0.1770096171293776, + "grad_norm": 0.13522091479319182, + "learning_rate": 0.000944407523002385, + "loss": 1.6848, + "step": 1951 + }, + { + "epoch": 0.17710034476501543, + "grad_norm": 0.14016214837368804, + "learning_rate": 0.000944340172163327, + "loss": 1.6817, + "step": 1952 + }, + { + "epoch": 0.17719107240065324, + "grad_norm": 0.15138810145615716, + "learning_rate": 0.0009442727829554297, + "loss": 1.7212, + "step": 1953 + }, + { + "epoch": 0.17728180003629104, + "grad_norm": 0.14497915348570906, + "learning_rate": 0.0009442053553845119, + "loss": 1.698, + "step": 1954 + }, + { + "epoch": 0.17737252767192888, + "grad_norm": 0.13981619565189368, + "learning_rate": 0.0009441378894563963, + "loss": 1.7177, + "step": 1955 + }, + { + "epoch": 0.17746325530756668, + "grad_norm": 0.13767925620332488, + "learning_rate": 0.0009440703851769086, + "loss": 1.6803, + "step": 1956 + }, + { + "epoch": 0.1775539829432045, + "grad_norm": 0.1374447237405678, + "learning_rate": 0.0009440028425518777, + "loss": 1.6341, + "step": 1957 + }, + { + "epoch": 0.17764471057884232, + "grad_norm": 0.14439865602427426, + "learning_rate": 0.0009439352615871358, + "loss": 1.6905, + "step": 1958 + }, + { + "epoch": 0.17773543821448012, + "grad_norm": 0.13965566468223883, + "learning_rate": 0.0009438676422885185, + "loss": 1.6758, + "step": 1959 + }, + { + "epoch": 0.17782616585011796, + "grad_norm": 0.14114781544851185, + "learning_rate": 0.000943799984661865, + "loss": 1.7203, + "step": 1960 + }, + { + "epoch": 0.17791689348575576, + "grad_norm": 0.14025907941921845, + "learning_rate": 0.0009437322887130174, + "loss": 1.6979, + "step": 1961 + }, + { + "epoch": 0.17800762112139357, + "grad_norm": 0.1397309778141844, + "learning_rate": 0.0009436645544478213, + "loss": 1.6862, + "step": 1962 + }, + { + "epoch": 0.1780983487570314, + "grad_norm": 0.1390090414560206, + "learning_rate": 0.0009435967818721256, + "loss": 1.6725, + "step": 1963 + }, + { + "epoch": 0.1781890763926692, + "grad_norm": 0.13985827420736485, + "learning_rate": 0.0009435289709917822, + "loss": 1.668, + "step": 1964 + }, + { + "epoch": 0.178279804028307, + "grad_norm": 0.14006766462622183, + "learning_rate": 0.000943461121812647, + "loss": 1.7156, + "step": 1965 + }, + { + "epoch": 0.17837053166394484, + "grad_norm": 0.14274929596259078, + "learning_rate": 0.0009433932343405785, + "loss": 1.6524, + "step": 1966 + }, + { + "epoch": 0.17846125929958265, + "grad_norm": 0.14280689003352182, + "learning_rate": 0.0009433253085814388, + "loss": 1.6406, + "step": 1967 + }, + { + "epoch": 0.17855198693522048, + "grad_norm": 0.14514072465921712, + "learning_rate": 0.0009432573445410934, + "loss": 1.6607, + "step": 1968 + }, + { + "epoch": 0.17864271457085829, + "grad_norm": 0.13959739416528474, + "learning_rate": 0.000943189342225411, + "loss": 1.6614, + "step": 1969 + }, + { + "epoch": 0.1787334422064961, + "grad_norm": 0.13834853721801005, + "learning_rate": 0.0009431213016402635, + "loss": 1.6861, + "step": 1970 + }, + { + "epoch": 0.17882416984213392, + "grad_norm": 0.14328593009925378, + "learning_rate": 0.0009430532227915265, + "loss": 1.6313, + "step": 1971 + }, + { + "epoch": 0.17891489747777173, + "grad_norm": 0.13636236648418942, + "learning_rate": 0.0009429851056850782, + "loss": 1.6934, + "step": 1972 + }, + { + "epoch": 0.17900562511340953, + "grad_norm": 0.14088691646206486, + "learning_rate": 0.0009429169503268009, + "loss": 1.6536, + "step": 1973 + }, + { + "epoch": 0.17909635274904737, + "grad_norm": 0.13802451985060193, + "learning_rate": 0.0009428487567225795, + "loss": 1.7159, + "step": 1974 + }, + { + "epoch": 0.17918708038468517, + "grad_norm": 0.1408536292861803, + "learning_rate": 0.0009427805248783028, + "loss": 1.7288, + "step": 1975 + }, + { + "epoch": 0.179277808020323, + "grad_norm": 0.13971511841895703, + "learning_rate": 0.0009427122547998625, + "loss": 1.6901, + "step": 1976 + }, + { + "epoch": 0.1793685356559608, + "grad_norm": 0.13741969211137783, + "learning_rate": 0.0009426439464931537, + "loss": 1.6951, + "step": 1977 + }, + { + "epoch": 0.17945926329159861, + "grad_norm": 0.13849763208676674, + "learning_rate": 0.0009425755999640748, + "loss": 1.6684, + "step": 1978 + }, + { + "epoch": 0.17954999092723645, + "grad_norm": 0.15026295007947443, + "learning_rate": 0.0009425072152185278, + "loss": 1.6403, + "step": 1979 + }, + { + "epoch": 0.17964071856287425, + "grad_norm": 0.13895305334233868, + "learning_rate": 0.0009424387922624174, + "loss": 1.7256, + "step": 1980 + }, + { + "epoch": 0.17973144619851206, + "grad_norm": 0.14613646864499702, + "learning_rate": 0.0009423703311016523, + "loss": 1.6496, + "step": 1981 + }, + { + "epoch": 0.1798221738341499, + "grad_norm": 0.1444663857140953, + "learning_rate": 0.0009423018317421437, + "loss": 1.6536, + "step": 1982 + }, + { + "epoch": 0.1799129014697877, + "grad_norm": 0.13604546618600238, + "learning_rate": 0.0009422332941898067, + "loss": 1.6917, + "step": 1983 + }, + { + "epoch": 0.1800036291054255, + "grad_norm": 0.14667098014260757, + "learning_rate": 0.0009421647184505597, + "loss": 1.636, + "step": 1984 + }, + { + "epoch": 0.18009435674106333, + "grad_norm": 0.1392250884006369, + "learning_rate": 0.000942096104530324, + "loss": 1.7177, + "step": 1985 + }, + { + "epoch": 0.18018508437670114, + "grad_norm": 0.13816935686168416, + "learning_rate": 0.0009420274524350247, + "loss": 1.6723, + "step": 1986 + }, + { + "epoch": 0.18027581201233897, + "grad_norm": 0.1425739907389218, + "learning_rate": 0.0009419587621705897, + "loss": 1.6594, + "step": 1987 + }, + { + "epoch": 0.18036653964797678, + "grad_norm": 0.1373898934752449, + "learning_rate": 0.0009418900337429502, + "loss": 1.6644, + "step": 1988 + }, + { + "epoch": 0.18045726728361458, + "grad_norm": 0.1362298980620139, + "learning_rate": 0.0009418212671580413, + "loss": 1.6322, + "step": 1989 + }, + { + "epoch": 0.1805479949192524, + "grad_norm": 0.13856851253485147, + "learning_rate": 0.000941752462421801, + "loss": 1.7071, + "step": 1990 + }, + { + "epoch": 0.18063872255489022, + "grad_norm": 0.13873517064205176, + "learning_rate": 0.0009416836195401703, + "loss": 1.6764, + "step": 1991 + }, + { + "epoch": 0.18072945019052802, + "grad_norm": 0.13780162626568787, + "learning_rate": 0.0009416147385190939, + "loss": 1.6739, + "step": 1992 + }, + { + "epoch": 0.18082017782616586, + "grad_norm": 0.14074685023381597, + "learning_rate": 0.0009415458193645199, + "loss": 1.6577, + "step": 1993 + }, + { + "epoch": 0.18091090546180366, + "grad_norm": 0.14620836307848134, + "learning_rate": 0.0009414768620823993, + "loss": 1.6345, + "step": 1994 + }, + { + "epoch": 0.1810016330974415, + "grad_norm": 0.14212085772078992, + "learning_rate": 0.0009414078666786865, + "loss": 1.683, + "step": 1995 + }, + { + "epoch": 0.1810923607330793, + "grad_norm": 0.14175230930584115, + "learning_rate": 0.0009413388331593394, + "loss": 1.6768, + "step": 1996 + }, + { + "epoch": 0.1811830883687171, + "grad_norm": 0.14216266716893183, + "learning_rate": 0.000941269761530319, + "loss": 1.6509, + "step": 1997 + }, + { + "epoch": 0.18127381600435494, + "grad_norm": 0.13436220568716103, + "learning_rate": 0.0009412006517975898, + "loss": 1.7016, + "step": 1998 + }, + { + "epoch": 0.18136454363999274, + "grad_norm": 0.13414098087565185, + "learning_rate": 0.000941131503967119, + "loss": 1.6439, + "step": 1999 + }, + { + "epoch": 0.18145527127563055, + "grad_norm": 0.13589182071120773, + "learning_rate": 0.0009410623180448781, + "loss": 1.6799, + "step": 2000 + }, + { + "epoch": 0.18154599891126838, + "grad_norm": 0.1432466221731666, + "learning_rate": 0.0009409930940368407, + "loss": 1.6578, + "step": 2001 + }, + { + "epoch": 0.18163672654690619, + "grad_norm": 0.1369918540070777, + "learning_rate": 0.000940923831948985, + "loss": 1.6937, + "step": 2002 + }, + { + "epoch": 0.181727454182544, + "grad_norm": 0.14188902007420928, + "learning_rate": 0.0009408545317872912, + "loss": 1.6985, + "step": 2003 + }, + { + "epoch": 0.18181818181818182, + "grad_norm": 0.14758922099238098, + "learning_rate": 0.0009407851935577436, + "loss": 1.6712, + "step": 2004 + }, + { + "epoch": 0.18190890945381963, + "grad_norm": 0.13917921841145403, + "learning_rate": 0.0009407158172663298, + "loss": 1.6851, + "step": 2005 + }, + { + "epoch": 0.18199963708945746, + "grad_norm": 0.14055691748730545, + "learning_rate": 0.00094064640291904, + "loss": 1.6725, + "step": 2006 + }, + { + "epoch": 0.18209036472509527, + "grad_norm": 0.14708235487598514, + "learning_rate": 0.0009405769505218684, + "loss": 1.7203, + "step": 2007 + }, + { + "epoch": 0.18218109236073307, + "grad_norm": 0.14629196383420345, + "learning_rate": 0.0009405074600808122, + "loss": 1.7203, + "step": 2008 + }, + { + "epoch": 0.1822718199963709, + "grad_norm": 0.13864570171501586, + "learning_rate": 0.000940437931601872, + "loss": 1.6579, + "step": 2009 + }, + { + "epoch": 0.1823625476320087, + "grad_norm": 0.13561422347936677, + "learning_rate": 0.0009403683650910514, + "loss": 1.6434, + "step": 2010 + }, + { + "epoch": 0.18245327526764651, + "grad_norm": 0.13532726263465622, + "learning_rate": 0.0009402987605543576, + "loss": 1.699, + "step": 2011 + }, + { + "epoch": 0.18254400290328435, + "grad_norm": 0.13611017667485956, + "learning_rate": 0.0009402291179978009, + "loss": 1.6803, + "step": 2012 + }, + { + "epoch": 0.18263473053892215, + "grad_norm": 0.133865729404263, + "learning_rate": 0.000940159437427395, + "loss": 1.7175, + "step": 2013 + }, + { + "epoch": 0.18272545817455996, + "grad_norm": 0.13675992856269614, + "learning_rate": 0.0009400897188491568, + "loss": 1.6643, + "step": 2014 + }, + { + "epoch": 0.1828161858101978, + "grad_norm": 0.1347484382922197, + "learning_rate": 0.0009400199622691065, + "loss": 1.6302, + "step": 2015 + }, + { + "epoch": 0.1829069134458356, + "grad_norm": 0.1419798219775819, + "learning_rate": 0.0009399501676932675, + "loss": 1.6772, + "step": 2016 + }, + { + "epoch": 0.18299764108147343, + "grad_norm": 0.14789575335074615, + "learning_rate": 0.0009398803351276668, + "loss": 1.7218, + "step": 2017 + }, + { + "epoch": 0.18308836871711123, + "grad_norm": 0.14315107771919158, + "learning_rate": 0.0009398104645783344, + "loss": 1.6803, + "step": 2018 + }, + { + "epoch": 0.18317909635274904, + "grad_norm": 0.1431319609098944, + "learning_rate": 0.0009397405560513035, + "loss": 1.6464, + "step": 2019 + }, + { + "epoch": 0.18326982398838687, + "grad_norm": 0.14538131960418438, + "learning_rate": 0.0009396706095526107, + "loss": 1.7033, + "step": 2020 + }, + { + "epoch": 0.18336055162402468, + "grad_norm": 0.14424332683801397, + "learning_rate": 0.000939600625088296, + "loss": 1.6667, + "step": 2021 + }, + { + "epoch": 0.18345127925966248, + "grad_norm": 0.1355881383958486, + "learning_rate": 0.0009395306026644026, + "loss": 1.6535, + "step": 2022 + }, + { + "epoch": 0.1835420068953003, + "grad_norm": 0.1426926790759815, + "learning_rate": 0.0009394605422869769, + "loss": 1.6766, + "step": 2023 + }, + { + "epoch": 0.18363273453093812, + "grad_norm": 0.14314122831993023, + "learning_rate": 0.0009393904439620685, + "loss": 1.7062, + "step": 2024 + }, + { + "epoch": 0.18372346216657595, + "grad_norm": 0.13722278349720282, + "learning_rate": 0.0009393203076957307, + "loss": 1.6884, + "step": 2025 + }, + { + "epoch": 0.18381418980221376, + "grad_norm": 0.14446592200937308, + "learning_rate": 0.0009392501334940193, + "loss": 1.6878, + "step": 2026 + }, + { + "epoch": 0.18390491743785156, + "grad_norm": 0.14537706934471553, + "learning_rate": 0.0009391799213629942, + "loss": 1.6582, + "step": 2027 + }, + { + "epoch": 0.1839956450734894, + "grad_norm": 0.13831570662303347, + "learning_rate": 0.0009391096713087183, + "loss": 1.6649, + "step": 2028 + }, + { + "epoch": 0.1840863727091272, + "grad_norm": 0.13291830670623866, + "learning_rate": 0.0009390393833372575, + "loss": 1.6664, + "step": 2029 + }, + { + "epoch": 0.184177100344765, + "grad_norm": 0.1357925387600138, + "learning_rate": 0.0009389690574546812, + "loss": 1.659, + "step": 2030 + }, + { + "epoch": 0.18426782798040284, + "grad_norm": 0.136689709805149, + "learning_rate": 0.000938898693667062, + "loss": 1.7097, + "step": 2031 + }, + { + "epoch": 0.18435855561604064, + "grad_norm": 0.13367036784537295, + "learning_rate": 0.0009388282919804761, + "loss": 1.6893, + "step": 2032 + }, + { + "epoch": 0.18444928325167845, + "grad_norm": 0.13634548455265477, + "learning_rate": 0.0009387578524010026, + "loss": 1.6796, + "step": 2033 + }, + { + "epoch": 0.18454001088731628, + "grad_norm": 0.14425739005265556, + "learning_rate": 0.0009386873749347236, + "loss": 1.6775, + "step": 2034 + }, + { + "epoch": 0.18463073852295409, + "grad_norm": 0.13253338220039163, + "learning_rate": 0.0009386168595877253, + "loss": 1.6633, + "step": 2035 + }, + { + "epoch": 0.18472146615859192, + "grad_norm": 0.13683202115753834, + "learning_rate": 0.0009385463063660964, + "loss": 1.6598, + "step": 2036 + }, + { + "epoch": 0.18481219379422972, + "grad_norm": 0.13702154425052876, + "learning_rate": 0.0009384757152759293, + "loss": 1.6612, + "step": 2037 + }, + { + "epoch": 0.18490292142986753, + "grad_norm": 0.14176205697269093, + "learning_rate": 0.0009384050863233198, + "loss": 1.6723, + "step": 2038 + }, + { + "epoch": 0.18499364906550536, + "grad_norm": 0.13938709979171784, + "learning_rate": 0.0009383344195143663, + "loss": 1.7046, + "step": 2039 + }, + { + "epoch": 0.18508437670114317, + "grad_norm": 0.13408861094167285, + "learning_rate": 0.0009382637148551711, + "loss": 1.7214, + "step": 2040 + }, + { + "epoch": 0.18517510433678097, + "grad_norm": 0.14362056466823764, + "learning_rate": 0.0009381929723518395, + "loss": 1.6672, + "step": 2041 + }, + { + "epoch": 0.1852658319724188, + "grad_norm": 0.1362767090999768, + "learning_rate": 0.0009381221920104801, + "loss": 1.6716, + "step": 2042 + }, + { + "epoch": 0.1853565596080566, + "grad_norm": 0.14355036226265291, + "learning_rate": 0.000938051373837205, + "loss": 1.7051, + "step": 2043 + }, + { + "epoch": 0.18544728724369444, + "grad_norm": 0.14037137193582683, + "learning_rate": 0.0009379805178381291, + "loss": 1.6721, + "step": 2044 + }, + { + "epoch": 0.18553801487933225, + "grad_norm": 0.13754781565063268, + "learning_rate": 0.000937909624019371, + "loss": 1.6442, + "step": 2045 + }, + { + "epoch": 0.18562874251497005, + "grad_norm": 0.13876809992167813, + "learning_rate": 0.0009378386923870523, + "loss": 1.637, + "step": 2046 + }, + { + "epoch": 0.18571947015060788, + "grad_norm": 0.1387313857199876, + "learning_rate": 0.0009377677229472981, + "loss": 1.6929, + "step": 2047 + }, + { + "epoch": 0.1858101977862457, + "grad_norm": 0.14100446793882157, + "learning_rate": 0.0009376967157062363, + "loss": 1.6955, + "step": 2048 + }, + { + "epoch": 0.1859009254218835, + "grad_norm": 0.13404507896181134, + "learning_rate": 0.0009376256706699986, + "loss": 1.6783, + "step": 2049 + }, + { + "epoch": 0.18599165305752133, + "grad_norm": 0.1406295659832398, + "learning_rate": 0.0009375545878447198, + "loss": 1.6654, + "step": 2050 + }, + { + "epoch": 0.18608238069315913, + "grad_norm": 0.14594426355293627, + "learning_rate": 0.0009374834672365379, + "loss": 1.6686, + "step": 2051 + }, + { + "epoch": 0.18617310832879694, + "grad_norm": 0.1445364373388697, + "learning_rate": 0.000937412308851594, + "loss": 1.6853, + "step": 2052 + }, + { + "epoch": 0.18626383596443477, + "grad_norm": 0.1421851539252192, + "learning_rate": 0.0009373411126960329, + "loss": 1.6724, + "step": 2053 + }, + { + "epoch": 0.18635456360007258, + "grad_norm": 0.1397201545021263, + "learning_rate": 0.0009372698787760021, + "loss": 1.6754, + "step": 2054 + }, + { + "epoch": 0.1864452912357104, + "grad_norm": 0.13757612511755588, + "learning_rate": 0.0009371986070976531, + "loss": 1.6961, + "step": 2055 + }, + { + "epoch": 0.1865360188713482, + "grad_norm": 0.13539551242382192, + "learning_rate": 0.0009371272976671397, + "loss": 1.6675, + "step": 2056 + }, + { + "epoch": 0.18662674650698602, + "grad_norm": 0.13649080454407608, + "learning_rate": 0.0009370559504906198, + "loss": 1.729, + "step": 2057 + }, + { + "epoch": 0.18671747414262385, + "grad_norm": 0.14307740464854293, + "learning_rate": 0.0009369845655742542, + "loss": 1.7231, + "step": 2058 + }, + { + "epoch": 0.18680820177826166, + "grad_norm": 0.14001864232147077, + "learning_rate": 0.0009369131429242068, + "loss": 1.6728, + "step": 2059 + }, + { + "epoch": 0.18689892941389946, + "grad_norm": 0.14093748408812679, + "learning_rate": 0.0009368416825466453, + "loss": 1.6949, + "step": 2060 + }, + { + "epoch": 0.1869896570495373, + "grad_norm": 0.1389293490804242, + "learning_rate": 0.00093677018444774, + "loss": 1.687, + "step": 2061 + }, + { + "epoch": 0.1870803846851751, + "grad_norm": 0.14041966602495637, + "learning_rate": 0.0009366986486336649, + "loss": 1.6623, + "step": 2062 + }, + { + "epoch": 0.18717111232081293, + "grad_norm": 0.13454246883766724, + "learning_rate": 0.0009366270751105972, + "loss": 1.6972, + "step": 2063 + }, + { + "epoch": 0.18726183995645074, + "grad_norm": 0.13153638172644827, + "learning_rate": 0.0009365554638847171, + "loss": 1.6924, + "step": 2064 + }, + { + "epoch": 0.18735256759208854, + "grad_norm": 0.13806799539709047, + "learning_rate": 0.0009364838149622086, + "loss": 1.6729, + "step": 2065 + }, + { + "epoch": 0.18744329522772638, + "grad_norm": 0.13113381251046843, + "learning_rate": 0.0009364121283492582, + "loss": 1.6375, + "step": 2066 + }, + { + "epoch": 0.18753402286336418, + "grad_norm": 0.1378215473579271, + "learning_rate": 0.000936340404052056, + "loss": 1.6418, + "step": 2067 + }, + { + "epoch": 0.18762475049900199, + "grad_norm": 0.13829392871645516, + "learning_rate": 0.0009362686420767959, + "loss": 1.7013, + "step": 2068 + }, + { + "epoch": 0.18771547813463982, + "grad_norm": 0.13722113838174677, + "learning_rate": 0.000936196842429674, + "loss": 1.627, + "step": 2069 + }, + { + "epoch": 0.18780620577027762, + "grad_norm": 0.14879414789672432, + "learning_rate": 0.0009361250051168907, + "loss": 1.6872, + "step": 2070 + }, + { + "epoch": 0.18789693340591543, + "grad_norm": 0.14281800241938378, + "learning_rate": 0.0009360531301446489, + "loss": 1.6611, + "step": 2071 + }, + { + "epoch": 0.18798766104155326, + "grad_norm": 0.14743496750562313, + "learning_rate": 0.0009359812175191549, + "loss": 1.7194, + "step": 2072 + }, + { + "epoch": 0.18807838867719107, + "grad_norm": 0.13386044800659086, + "learning_rate": 0.0009359092672466185, + "loss": 1.641, + "step": 2073 + }, + { + "epoch": 0.1881691163128289, + "grad_norm": 0.13372728260054106, + "learning_rate": 0.0009358372793332528, + "loss": 1.6826, + "step": 2074 + }, + { + "epoch": 0.1882598439484667, + "grad_norm": 0.14088043451767307, + "learning_rate": 0.0009357652537852737, + "loss": 1.6667, + "step": 2075 + }, + { + "epoch": 0.1883505715841045, + "grad_norm": 0.14088003746714256, + "learning_rate": 0.0009356931906089008, + "loss": 1.6881, + "step": 2076 + }, + { + "epoch": 0.18844129921974234, + "grad_norm": 0.13844948413122893, + "learning_rate": 0.0009356210898103565, + "loss": 1.6384, + "step": 2077 + }, + { + "epoch": 0.18853202685538015, + "grad_norm": 0.15100834588187653, + "learning_rate": 0.0009355489513958671, + "loss": 1.7004, + "step": 2078 + }, + { + "epoch": 0.18862275449101795, + "grad_norm": 0.14717183596718955, + "learning_rate": 0.0009354767753716613, + "loss": 1.6903, + "step": 2079 + }, + { + "epoch": 0.18871348212665578, + "grad_norm": 0.13749022959933851, + "learning_rate": 0.0009354045617439719, + "loss": 1.6846, + "step": 2080 + }, + { + "epoch": 0.1888042097622936, + "grad_norm": 0.13401524687511188, + "learning_rate": 0.0009353323105190345, + "loss": 1.6896, + "step": 2081 + }, + { + "epoch": 0.18889493739793142, + "grad_norm": 0.137752017823974, + "learning_rate": 0.0009352600217030877, + "loss": 1.697, + "step": 2082 + }, + { + "epoch": 0.18898566503356923, + "grad_norm": 0.13095990659806772, + "learning_rate": 0.0009351876953023741, + "loss": 1.7085, + "step": 2083 + }, + { + "epoch": 0.18907639266920703, + "grad_norm": 0.13921454788929696, + "learning_rate": 0.0009351153313231389, + "loss": 1.6304, + "step": 2084 + }, + { + "epoch": 0.18916712030484487, + "grad_norm": 0.13355446313578415, + "learning_rate": 0.0009350429297716305, + "loss": 1.7021, + "step": 2085 + }, + { + "epoch": 0.18925784794048267, + "grad_norm": 0.13165678498191194, + "learning_rate": 0.0009349704906541013, + "loss": 1.6735, + "step": 2086 + }, + { + "epoch": 0.18934857557612048, + "grad_norm": 0.14615947564573414, + "learning_rate": 0.0009348980139768058, + "loss": 1.7028, + "step": 2087 + }, + { + "epoch": 0.1894393032117583, + "grad_norm": 0.14396316551029578, + "learning_rate": 0.0009348254997460028, + "loss": 1.6486, + "step": 2088 + }, + { + "epoch": 0.1895300308473961, + "grad_norm": 0.13981100620572223, + "learning_rate": 0.0009347529479679539, + "loss": 1.6813, + "step": 2089 + }, + { + "epoch": 0.18962075848303392, + "grad_norm": 0.14243016533135563, + "learning_rate": 0.0009346803586489238, + "loss": 1.674, + "step": 2090 + }, + { + "epoch": 0.18971148611867175, + "grad_norm": 0.13584830234251352, + "learning_rate": 0.0009346077317951806, + "loss": 1.6537, + "step": 2091 + }, + { + "epoch": 0.18980221375430956, + "grad_norm": 0.1426398430257892, + "learning_rate": 0.0009345350674129958, + "loss": 1.716, + "step": 2092 + }, + { + "epoch": 0.1898929413899474, + "grad_norm": 0.13659953054781573, + "learning_rate": 0.0009344623655086438, + "loss": 1.6578, + "step": 2093 + }, + { + "epoch": 0.1899836690255852, + "grad_norm": 0.13642464944963067, + "learning_rate": 0.0009343896260884026, + "loss": 1.6805, + "step": 2094 + }, + { + "epoch": 0.190074396661223, + "grad_norm": 0.13973872473770246, + "learning_rate": 0.0009343168491585532, + "loss": 1.696, + "step": 2095 + }, + { + "epoch": 0.19016512429686083, + "grad_norm": 0.13330186361533164, + "learning_rate": 0.0009342440347253798, + "loss": 1.6999, + "step": 2096 + }, + { + "epoch": 0.19025585193249864, + "grad_norm": 0.1416067039389149, + "learning_rate": 0.00093417118279517, + "loss": 1.691, + "step": 2097 + }, + { + "epoch": 0.19034657956813644, + "grad_norm": 0.1373537576778393, + "learning_rate": 0.0009340982933742145, + "loss": 1.6542, + "step": 2098 + }, + { + "epoch": 0.19043730720377428, + "grad_norm": 0.13836826427080468, + "learning_rate": 0.0009340253664688075, + "loss": 1.6531, + "step": 2099 + }, + { + "epoch": 0.19052803483941208, + "grad_norm": 0.13765478963119607, + "learning_rate": 0.0009339524020852461, + "loss": 1.7386, + "step": 2100 + }, + { + "epoch": 0.1906187624750499, + "grad_norm": 0.14410753287521977, + "learning_rate": 0.0009338794002298307, + "loss": 1.6769, + "step": 2101 + }, + { + "epoch": 0.19070949011068772, + "grad_norm": 0.13628150762633928, + "learning_rate": 0.0009338063609088654, + "loss": 1.6544, + "step": 2102 + }, + { + "epoch": 0.19080021774632552, + "grad_norm": 0.13375563511642535, + "learning_rate": 0.0009337332841286567, + "loss": 1.6575, + "step": 2103 + }, + { + "epoch": 0.19089094538196336, + "grad_norm": 0.1396559422766005, + "learning_rate": 0.000933660169895515, + "loss": 1.6784, + "step": 2104 + }, + { + "epoch": 0.19098167301760116, + "grad_norm": 0.130224466757507, + "learning_rate": 0.0009335870182157537, + "loss": 1.6329, + "step": 2105 + }, + { + "epoch": 0.19107240065323897, + "grad_norm": 0.13521035352398217, + "learning_rate": 0.0009335138290956896, + "loss": 1.6726, + "step": 2106 + }, + { + "epoch": 0.1911631282888768, + "grad_norm": 0.13647439485224733, + "learning_rate": 0.0009334406025416425, + "loss": 1.7064, + "step": 2107 + }, + { + "epoch": 0.1912538559245146, + "grad_norm": 0.1397429338191348, + "learning_rate": 0.0009333673385599352, + "loss": 1.658, + "step": 2108 + }, + { + "epoch": 0.1913445835601524, + "grad_norm": 0.1375090436993035, + "learning_rate": 0.0009332940371568945, + "loss": 1.6776, + "step": 2109 + }, + { + "epoch": 0.19143531119579024, + "grad_norm": 0.13696520139292728, + "learning_rate": 0.00093322069833885, + "loss": 1.6977, + "step": 2110 + }, + { + "epoch": 0.19152603883142805, + "grad_norm": 0.1374832960366677, + "learning_rate": 0.0009331473221121341, + "loss": 1.6668, + "step": 2111 + }, + { + "epoch": 0.19161676646706588, + "grad_norm": 0.14562637657516625, + "learning_rate": 0.0009330739084830832, + "loss": 1.7026, + "step": 2112 + }, + { + "epoch": 0.19170749410270368, + "grad_norm": 0.1340534292479811, + "learning_rate": 0.0009330004574580365, + "loss": 1.6676, + "step": 2113 + }, + { + "epoch": 0.1917982217383415, + "grad_norm": 0.1285094987225704, + "learning_rate": 0.0009329269690433364, + "loss": 1.6526, + "step": 2114 + }, + { + "epoch": 0.19188894937397932, + "grad_norm": 0.1326090185207664, + "learning_rate": 0.0009328534432453289, + "loss": 1.6514, + "step": 2115 + }, + { + "epoch": 0.19197967700961713, + "grad_norm": 0.13333763808349744, + "learning_rate": 0.0009327798800703626, + "loss": 1.658, + "step": 2116 + }, + { + "epoch": 0.19207040464525493, + "grad_norm": 0.1385137655086619, + "learning_rate": 0.00093270627952479, + "loss": 1.621, + "step": 2117 + }, + { + "epoch": 0.19216113228089277, + "grad_norm": 0.13630990181559977, + "learning_rate": 0.0009326326416149662, + "loss": 1.6426, + "step": 2118 + }, + { + "epoch": 0.19225185991653057, + "grad_norm": 0.1356905354041628, + "learning_rate": 0.0009325589663472503, + "loss": 1.6487, + "step": 2119 + }, + { + "epoch": 0.1923425875521684, + "grad_norm": 0.1384976880878789, + "learning_rate": 0.0009324852537280036, + "loss": 1.6966, + "step": 2120 + }, + { + "epoch": 0.1924333151878062, + "grad_norm": 0.1315102519624895, + "learning_rate": 0.0009324115037635917, + "loss": 1.6782, + "step": 2121 + }, + { + "epoch": 0.192524042823444, + "grad_norm": 0.13035407864488321, + "learning_rate": 0.0009323377164603825, + "loss": 1.6919, + "step": 2122 + }, + { + "epoch": 0.19261477045908185, + "grad_norm": 0.1397415625487618, + "learning_rate": 0.0009322638918247482, + "loss": 1.7103, + "step": 2123 + }, + { + "epoch": 0.19270549809471965, + "grad_norm": 0.1348435634795127, + "learning_rate": 0.0009321900298630628, + "loss": 1.683, + "step": 2124 + }, + { + "epoch": 0.19279622573035746, + "grad_norm": 0.13313732694788155, + "learning_rate": 0.0009321161305817046, + "loss": 1.646, + "step": 2125 + }, + { + "epoch": 0.1928869533659953, + "grad_norm": 0.13268530462350683, + "learning_rate": 0.0009320421939870549, + "loss": 1.668, + "step": 2126 + }, + { + "epoch": 0.1929776810016331, + "grad_norm": 0.14484615322232136, + "learning_rate": 0.0009319682200854981, + "loss": 1.6951, + "step": 2127 + }, + { + "epoch": 0.1930684086372709, + "grad_norm": 0.1337078834261737, + "learning_rate": 0.0009318942088834219, + "loss": 1.6786, + "step": 2128 + }, + { + "epoch": 0.19315913627290873, + "grad_norm": 0.1382805834084907, + "learning_rate": 0.0009318201603872169, + "loss": 1.6345, + "step": 2129 + }, + { + "epoch": 0.19324986390854654, + "grad_norm": 0.14089841973452505, + "learning_rate": 0.0009317460746032777, + "loss": 1.6852, + "step": 2130 + }, + { + "epoch": 0.19334059154418437, + "grad_norm": 0.1370226173746868, + "learning_rate": 0.0009316719515380011, + "loss": 1.6371, + "step": 2131 + }, + { + "epoch": 0.19343131917982218, + "grad_norm": 0.13186909566173324, + "learning_rate": 0.000931597791197788, + "loss": 1.6037, + "step": 2132 + }, + { + "epoch": 0.19352204681545998, + "grad_norm": 0.14040236004590642, + "learning_rate": 0.0009315235935890418, + "loss": 1.6847, + "step": 2133 + }, + { + "epoch": 0.1936127744510978, + "grad_norm": 0.1442700004396547, + "learning_rate": 0.0009314493587181699, + "loss": 1.6944, + "step": 2134 + }, + { + "epoch": 0.19370350208673562, + "grad_norm": 0.13754270482859715, + "learning_rate": 0.0009313750865915822, + "loss": 1.6834, + "step": 2135 + }, + { + "epoch": 0.19379422972237342, + "grad_norm": 0.13996170013417317, + "learning_rate": 0.0009313007772156923, + "loss": 1.6917, + "step": 2136 + }, + { + "epoch": 0.19388495735801126, + "grad_norm": 0.1396782979454884, + "learning_rate": 0.0009312264305969166, + "loss": 1.6661, + "step": 2137 + }, + { + "epoch": 0.19397568499364906, + "grad_norm": 0.13731576332476733, + "learning_rate": 0.0009311520467416751, + "loss": 1.6778, + "step": 2138 + }, + { + "epoch": 0.1940664126292869, + "grad_norm": 0.1438532192920793, + "learning_rate": 0.0009310776256563908, + "loss": 1.709, + "step": 2139 + }, + { + "epoch": 0.1941571402649247, + "grad_norm": 0.13752371108083283, + "learning_rate": 0.00093100316734749, + "loss": 1.673, + "step": 2140 + }, + { + "epoch": 0.1942478679005625, + "grad_norm": 0.13412156938893757, + "learning_rate": 0.0009309286718214022, + "loss": 1.6622, + "step": 2141 + }, + { + "epoch": 0.19433859553620034, + "grad_norm": 0.13878484698830731, + "learning_rate": 0.0009308541390845601, + "loss": 1.6719, + "step": 2142 + }, + { + "epoch": 0.19442932317183814, + "grad_norm": 0.1395811976847181, + "learning_rate": 0.0009307795691433996, + "loss": 1.6488, + "step": 2143 + }, + { + "epoch": 0.19452005080747595, + "grad_norm": 0.14006042147787756, + "learning_rate": 0.0009307049620043597, + "loss": 1.6903, + "step": 2144 + }, + { + "epoch": 0.19461077844311378, + "grad_norm": 0.13755243237514755, + "learning_rate": 0.0009306303176738829, + "loss": 1.7201, + "step": 2145 + }, + { + "epoch": 0.19470150607875158, + "grad_norm": 0.13919242169788434, + "learning_rate": 0.0009305556361584147, + "loss": 1.6405, + "step": 2146 + }, + { + "epoch": 0.1947922337143894, + "grad_norm": 0.13876633876261393, + "learning_rate": 0.0009304809174644038, + "loss": 1.7022, + "step": 2147 + }, + { + "epoch": 0.19488296135002722, + "grad_norm": 0.13577593443572794, + "learning_rate": 0.0009304061615983024, + "loss": 1.6978, + "step": 2148 + }, + { + "epoch": 0.19497368898566503, + "grad_norm": 0.13483911766634196, + "learning_rate": 0.0009303313685665655, + "loss": 1.6492, + "step": 2149 + }, + { + "epoch": 0.19506441662130286, + "grad_norm": 0.12933407329354474, + "learning_rate": 0.0009302565383756514, + "loss": 1.696, + "step": 2150 + }, + { + "epoch": 0.19515514425694067, + "grad_norm": 0.13853055047266108, + "learning_rate": 0.0009301816710320217, + "loss": 1.622, + "step": 2151 + }, + { + "epoch": 0.19524587189257847, + "grad_norm": 0.14324130393120504, + "learning_rate": 0.0009301067665421414, + "loss": 1.7079, + "step": 2152 + }, + { + "epoch": 0.1953365995282163, + "grad_norm": 0.13410423262105792, + "learning_rate": 0.0009300318249124785, + "loss": 1.6363, + "step": 2153 + }, + { + "epoch": 0.1954273271638541, + "grad_norm": 0.13808266998664515, + "learning_rate": 0.000929956846149504, + "loss": 1.6426, + "step": 2154 + }, + { + "epoch": 0.1955180547994919, + "grad_norm": 0.13821816662083222, + "learning_rate": 0.0009298818302596926, + "loss": 1.6448, + "step": 2155 + }, + { + "epoch": 0.19560878243512975, + "grad_norm": 0.1356679602687008, + "learning_rate": 0.0009298067772495218, + "loss": 1.678, + "step": 2156 + }, + { + "epoch": 0.19569951007076755, + "grad_norm": 0.13825798341652484, + "learning_rate": 0.0009297316871254725, + "loss": 1.6962, + "step": 2157 + }, + { + "epoch": 0.19579023770640538, + "grad_norm": 0.13174166673530255, + "learning_rate": 0.0009296565598940284, + "loss": 1.6656, + "step": 2158 + }, + { + "epoch": 0.1958809653420432, + "grad_norm": 0.1427041143459169, + "learning_rate": 0.0009295813955616772, + "loss": 1.674, + "step": 2159 + }, + { + "epoch": 0.195971692977681, + "grad_norm": 0.1337321141370052, + "learning_rate": 0.0009295061941349092, + "loss": 1.6834, + "step": 2160 + }, + { + "epoch": 0.19606242061331883, + "grad_norm": 0.13920187816535862, + "learning_rate": 0.0009294309556202179, + "loss": 1.68, + "step": 2161 + }, + { + "epoch": 0.19615314824895663, + "grad_norm": 0.12646599607448616, + "learning_rate": 0.0009293556800241005, + "loss": 1.647, + "step": 2162 + }, + { + "epoch": 0.19624387588459444, + "grad_norm": 0.1293836320922889, + "learning_rate": 0.0009292803673530567, + "loss": 1.6379, + "step": 2163 + }, + { + "epoch": 0.19633460352023227, + "grad_norm": 0.1475578403646271, + "learning_rate": 0.00092920501761359, + "loss": 1.7328, + "step": 2164 + }, + { + "epoch": 0.19642533115587008, + "grad_norm": 0.1329292831538382, + "learning_rate": 0.0009291296308122067, + "loss": 1.6571, + "step": 2165 + }, + { + "epoch": 0.19651605879150788, + "grad_norm": 0.14000465286074928, + "learning_rate": 0.0009290542069554166, + "loss": 1.6742, + "step": 2166 + }, + { + "epoch": 0.1966067864271457, + "grad_norm": 0.1389955318374318, + "learning_rate": 0.0009289787460497323, + "loss": 1.6656, + "step": 2167 + }, + { + "epoch": 0.19669751406278352, + "grad_norm": 0.14324228300102498, + "learning_rate": 0.0009289032481016704, + "loss": 1.6852, + "step": 2168 + }, + { + "epoch": 0.19678824169842135, + "grad_norm": 0.14213723208831497, + "learning_rate": 0.0009288277131177495, + "loss": 1.7185, + "step": 2169 + }, + { + "epoch": 0.19687896933405916, + "grad_norm": 0.13779376258423817, + "learning_rate": 0.0009287521411044925, + "loss": 1.6566, + "step": 2170 + }, + { + "epoch": 0.19696969696969696, + "grad_norm": 0.13672841376154313, + "learning_rate": 0.0009286765320684249, + "loss": 1.6618, + "step": 2171 + }, + { + "epoch": 0.1970604246053348, + "grad_norm": 0.13890400486299093, + "learning_rate": 0.0009286008860160755, + "loss": 1.7334, + "step": 2172 + }, + { + "epoch": 0.1971511522409726, + "grad_norm": 0.13609298239559353, + "learning_rate": 0.0009285252029539766, + "loss": 1.6695, + "step": 2173 + }, + { + "epoch": 0.1972418798766104, + "grad_norm": 0.13148224901196018, + "learning_rate": 0.0009284494828886631, + "loss": 1.6525, + "step": 2174 + }, + { + "epoch": 0.19733260751224824, + "grad_norm": 0.13851935648766725, + "learning_rate": 0.0009283737258266737, + "loss": 1.6651, + "step": 2175 + }, + { + "epoch": 0.19742333514788604, + "grad_norm": 0.13592218837377548, + "learning_rate": 0.0009282979317745499, + "loss": 1.6771, + "step": 2176 + }, + { + "epoch": 0.19751406278352387, + "grad_norm": 0.13863403096891835, + "learning_rate": 0.0009282221007388366, + "loss": 1.6736, + "step": 2177 + }, + { + "epoch": 0.19760479041916168, + "grad_norm": 0.13554105703037392, + "learning_rate": 0.0009281462327260818, + "loss": 1.7022, + "step": 2178 + }, + { + "epoch": 0.19769551805479949, + "grad_norm": 0.127308458742029, + "learning_rate": 0.0009280703277428366, + "loss": 1.6687, + "step": 2179 + }, + { + "epoch": 0.19778624569043732, + "grad_norm": 0.137807370066564, + "learning_rate": 0.0009279943857956556, + "loss": 1.6977, + "step": 2180 + }, + { + "epoch": 0.19787697332607512, + "grad_norm": 0.13474514351187614, + "learning_rate": 0.0009279184068910965, + "loss": 1.67, + "step": 2181 + }, + { + "epoch": 0.19796770096171293, + "grad_norm": 0.1320138912620313, + "learning_rate": 0.0009278423910357195, + "loss": 1.6316, + "step": 2182 + }, + { + "epoch": 0.19805842859735076, + "grad_norm": 0.13267241272801852, + "learning_rate": 0.0009277663382360892, + "loss": 1.6963, + "step": 2183 + }, + { + "epoch": 0.19814915623298857, + "grad_norm": 0.13536136121510248, + "learning_rate": 0.0009276902484987725, + "loss": 1.691, + "step": 2184 + }, + { + "epoch": 0.19823988386862637, + "grad_norm": 0.1354173152422429, + "learning_rate": 0.0009276141218303398, + "loss": 1.6765, + "step": 2185 + }, + { + "epoch": 0.1983306115042642, + "grad_norm": 0.13992920166831108, + "learning_rate": 0.0009275379582373647, + "loss": 1.6995, + "step": 2186 + }, + { + "epoch": 0.198421339139902, + "grad_norm": 0.13783250335000422, + "learning_rate": 0.0009274617577264238, + "loss": 1.6474, + "step": 2187 + }, + { + "epoch": 0.19851206677553984, + "grad_norm": 0.14036865240004784, + "learning_rate": 0.0009273855203040973, + "loss": 1.6469, + "step": 2188 + }, + { + "epoch": 0.19860279441117765, + "grad_norm": 0.14080465442956108, + "learning_rate": 0.0009273092459769678, + "loss": 1.6752, + "step": 2189 + }, + { + "epoch": 0.19869352204681545, + "grad_norm": 0.13119782527737672, + "learning_rate": 0.0009272329347516223, + "loss": 1.6757, + "step": 2190 + }, + { + "epoch": 0.19878424968245328, + "grad_norm": 0.13640251471321813, + "learning_rate": 0.0009271565866346498, + "loss": 1.6596, + "step": 2191 + }, + { + "epoch": 0.1988749773180911, + "grad_norm": 0.13281670815701638, + "learning_rate": 0.0009270802016326429, + "loss": 1.6813, + "step": 2192 + }, + { + "epoch": 0.1989657049537289, + "grad_norm": 0.1314637486323989, + "learning_rate": 0.000927003779752198, + "loss": 1.6804, + "step": 2193 + }, + { + "epoch": 0.19905643258936673, + "grad_norm": 0.12791572751629385, + "learning_rate": 0.0009269273209999134, + "loss": 1.7128, + "step": 2194 + }, + { + "epoch": 0.19914716022500453, + "grad_norm": 0.12887889858958962, + "learning_rate": 0.000926850825382392, + "loss": 1.6527, + "step": 2195 + }, + { + "epoch": 0.19923788786064237, + "grad_norm": 0.13006521075409272, + "learning_rate": 0.0009267742929062389, + "loss": 1.7008, + "step": 2196 + }, + { + "epoch": 0.19932861549628017, + "grad_norm": 0.1318209576832188, + "learning_rate": 0.0009266977235780625, + "loss": 1.6701, + "step": 2197 + }, + { + "epoch": 0.19941934313191798, + "grad_norm": 0.13261482564261654, + "learning_rate": 0.0009266211174044749, + "loss": 1.6582, + "step": 2198 + }, + { + "epoch": 0.1995100707675558, + "grad_norm": 0.12599626037523906, + "learning_rate": 0.0009265444743920909, + "loss": 1.6016, + "step": 2199 + }, + { + "epoch": 0.1996007984031936, + "grad_norm": 0.13572809104482514, + "learning_rate": 0.0009264677945475286, + "loss": 1.6676, + "step": 2200 + }, + { + "epoch": 0.19969152603883142, + "grad_norm": 0.1338142810260303, + "learning_rate": 0.0009263910778774096, + "loss": 1.6292, + "step": 2201 + }, + { + "epoch": 0.19978225367446925, + "grad_norm": 0.13183088398472823, + "learning_rate": 0.0009263143243883581, + "loss": 1.6942, + "step": 2202 + }, + { + "epoch": 0.19987298131010706, + "grad_norm": 0.1366627536384488, + "learning_rate": 0.0009262375340870017, + "loss": 1.6575, + "step": 2203 + }, + { + "epoch": 0.19996370894574486, + "grad_norm": 0.1319722559980649, + "learning_rate": 0.0009261607069799716, + "loss": 1.6639, + "step": 2204 + }, + { + "epoch": 0.2000544365813827, + "grad_norm": 0.13565892472579125, + "learning_rate": 0.0009260838430739016, + "loss": 1.6518, + "step": 2205 + }, + { + "epoch": 0.2001451642170205, + "grad_norm": 0.1344972003086107, + "learning_rate": 0.0009260069423754289, + "loss": 1.6521, + "step": 2206 + }, + { + "epoch": 0.20023589185265833, + "grad_norm": 0.13336891480006413, + "learning_rate": 0.000925930004891194, + "loss": 1.6805, + "step": 2207 + }, + { + "epoch": 0.20032661948829614, + "grad_norm": 0.13479427997826007, + "learning_rate": 0.0009258530306278404, + "loss": 1.6564, + "step": 2208 + }, + { + "epoch": 0.20041734712393394, + "grad_norm": 0.1396338968703019, + "learning_rate": 0.0009257760195920148, + "loss": 1.6377, + "step": 2209 + }, + { + "epoch": 0.20050807475957177, + "grad_norm": 0.13488882266799582, + "learning_rate": 0.0009256989717903671, + "loss": 1.6776, + "step": 2210 + }, + { + "epoch": 0.20059880239520958, + "grad_norm": 0.13705392995972668, + "learning_rate": 0.0009256218872295505, + "loss": 1.6564, + "step": 2211 + }, + { + "epoch": 0.20068953003084739, + "grad_norm": 0.1309688099828371, + "learning_rate": 0.0009255447659162213, + "loss": 1.6857, + "step": 2212 + }, + { + "epoch": 0.20078025766648522, + "grad_norm": 0.13638893267765317, + "learning_rate": 0.0009254676078570388, + "loss": 1.6849, + "step": 2213 + }, + { + "epoch": 0.20087098530212302, + "grad_norm": 0.1312799409458523, + "learning_rate": 0.0009253904130586654, + "loss": 1.6774, + "step": 2214 + }, + { + "epoch": 0.20096171293776086, + "grad_norm": 0.13811926104314934, + "learning_rate": 0.0009253131815277674, + "loss": 1.6434, + "step": 2215 + }, + { + "epoch": 0.20105244057339866, + "grad_norm": 0.13794222765206798, + "learning_rate": 0.0009252359132710136, + "loss": 1.6553, + "step": 2216 + }, + { + "epoch": 0.20114316820903647, + "grad_norm": 0.14212860611041167, + "learning_rate": 0.0009251586082950758, + "loss": 1.7099, + "step": 2217 + }, + { + "epoch": 0.2012338958446743, + "grad_norm": 0.13529570725152487, + "learning_rate": 0.0009250812666066295, + "loss": 1.6766, + "step": 2218 + }, + { + "epoch": 0.2013246234803121, + "grad_norm": 0.13607673833969283, + "learning_rate": 0.0009250038882123533, + "loss": 1.6229, + "step": 2219 + }, + { + "epoch": 0.2014153511159499, + "grad_norm": 0.13544290710885296, + "learning_rate": 0.0009249264731189287, + "loss": 1.6626, + "step": 2220 + }, + { + "epoch": 0.20150607875158774, + "grad_norm": 0.13097467544901653, + "learning_rate": 0.0009248490213330405, + "loss": 1.6584, + "step": 2221 + }, + { + "epoch": 0.20159680638722555, + "grad_norm": 0.13959590246918074, + "learning_rate": 0.0009247715328613766, + "loss": 1.7009, + "step": 2222 + }, + { + "epoch": 0.20168753402286335, + "grad_norm": 0.13321716808083886, + "learning_rate": 0.0009246940077106282, + "loss": 1.7134, + "step": 2223 + }, + { + "epoch": 0.20177826165850118, + "grad_norm": 0.1301374797732444, + "learning_rate": 0.0009246164458874899, + "loss": 1.6436, + "step": 2224 + }, + { + "epoch": 0.201868989294139, + "grad_norm": 0.13571988581079353, + "learning_rate": 0.0009245388473986587, + "loss": 1.6417, + "step": 2225 + }, + { + "epoch": 0.20195971692977682, + "grad_norm": 0.14243381696750368, + "learning_rate": 0.0009244612122508355, + "loss": 1.6701, + "step": 2226 + }, + { + "epoch": 0.20205044456541463, + "grad_norm": 0.13358453063913156, + "learning_rate": 0.0009243835404507242, + "loss": 1.6743, + "step": 2227 + }, + { + "epoch": 0.20214117220105243, + "grad_norm": 0.13962039163351003, + "learning_rate": 0.0009243058320050313, + "loss": 1.6925, + "step": 2228 + }, + { + "epoch": 0.20223189983669027, + "grad_norm": 0.12757408218801364, + "learning_rate": 0.0009242280869204676, + "loss": 1.6492, + "step": 2229 + }, + { + "epoch": 0.20232262747232807, + "grad_norm": 0.13046683118070906, + "learning_rate": 0.0009241503052037458, + "loss": 1.6376, + "step": 2230 + }, + { + "epoch": 0.20241335510796588, + "grad_norm": 0.13865554352447174, + "learning_rate": 0.0009240724868615828, + "loss": 1.6839, + "step": 2231 + }, + { + "epoch": 0.2025040827436037, + "grad_norm": 0.12885991620648563, + "learning_rate": 0.0009239946319006982, + "loss": 1.6519, + "step": 2232 + }, + { + "epoch": 0.2025948103792415, + "grad_norm": 0.1348226839914834, + "learning_rate": 0.0009239167403278143, + "loss": 1.6703, + "step": 2233 + }, + { + "epoch": 0.20268553801487932, + "grad_norm": 0.13858531341737357, + "learning_rate": 0.0009238388121496577, + "loss": 1.6729, + "step": 2234 + }, + { + "epoch": 0.20277626565051715, + "grad_norm": 0.13234068886173977, + "learning_rate": 0.0009237608473729571, + "loss": 1.7044, + "step": 2235 + }, + { + "epoch": 0.20286699328615496, + "grad_norm": 0.13348778736221661, + "learning_rate": 0.0009236828460044449, + "loss": 1.6624, + "step": 2236 + }, + { + "epoch": 0.2029577209217928, + "grad_norm": 0.1298106682228398, + "learning_rate": 0.0009236048080508564, + "loss": 1.6879, + "step": 2237 + }, + { + "epoch": 0.2030484485574306, + "grad_norm": 0.13377407500399172, + "learning_rate": 0.0009235267335189303, + "loss": 1.6587, + "step": 2238 + }, + { + "epoch": 0.2031391761930684, + "grad_norm": 0.13046437879037268, + "learning_rate": 0.0009234486224154085, + "loss": 1.6644, + "step": 2239 + }, + { + "epoch": 0.20322990382870623, + "grad_norm": 0.13387546752904125, + "learning_rate": 0.0009233704747470356, + "loss": 1.665, + "step": 2240 + }, + { + "epoch": 0.20332063146434404, + "grad_norm": 0.12891969023634528, + "learning_rate": 0.0009232922905205599, + "loss": 1.6459, + "step": 2241 + }, + { + "epoch": 0.20341135909998184, + "grad_norm": 0.13362292550816965, + "learning_rate": 0.0009232140697427324, + "loss": 1.6385, + "step": 2242 + }, + { + "epoch": 0.20350208673561967, + "grad_norm": 0.13319615069055316, + "learning_rate": 0.0009231358124203077, + "loss": 1.6946, + "step": 2243 + }, + { + "epoch": 0.20359281437125748, + "grad_norm": 0.12927499982094698, + "learning_rate": 0.0009230575185600431, + "loss": 1.6722, + "step": 2244 + }, + { + "epoch": 0.2036835420068953, + "grad_norm": 0.1286095466395052, + "learning_rate": 0.0009229791881686997, + "loss": 1.6427, + "step": 2245 + }, + { + "epoch": 0.20377426964253312, + "grad_norm": 0.12841405968307584, + "learning_rate": 0.0009229008212530409, + "loss": 1.6242, + "step": 2246 + }, + { + "epoch": 0.20386499727817092, + "grad_norm": 0.14412122273326422, + "learning_rate": 0.0009228224178198338, + "loss": 1.6661, + "step": 2247 + }, + { + "epoch": 0.20395572491380876, + "grad_norm": 0.13149570940396701, + "learning_rate": 0.0009227439778758486, + "loss": 1.6426, + "step": 2248 + }, + { + "epoch": 0.20404645254944656, + "grad_norm": 0.1287419982163936, + "learning_rate": 0.0009226655014278587, + "loss": 1.6431, + "step": 2249 + }, + { + "epoch": 0.20413718018508437, + "grad_norm": 0.12630561605791665, + "learning_rate": 0.0009225869884826403, + "loss": 1.6848, + "step": 2250 + }, + { + "epoch": 0.2042279078207222, + "grad_norm": 0.14285490641192317, + "learning_rate": 0.0009225084390469733, + "loss": 1.7029, + "step": 2251 + }, + { + "epoch": 0.20431863545636, + "grad_norm": 0.13796385097320293, + "learning_rate": 0.0009224298531276404, + "loss": 1.6512, + "step": 2252 + }, + { + "epoch": 0.2044093630919978, + "grad_norm": 0.1334973455509367, + "learning_rate": 0.0009223512307314272, + "loss": 1.6809, + "step": 2253 + }, + { + "epoch": 0.20450009072763564, + "grad_norm": 0.13068855379382432, + "learning_rate": 0.0009222725718651231, + "loss": 1.6543, + "step": 2254 + }, + { + "epoch": 0.20459081836327345, + "grad_norm": 0.1294338406547417, + "learning_rate": 0.0009221938765355202, + "loss": 1.671, + "step": 2255 + }, + { + "epoch": 0.20468154599891128, + "grad_norm": 0.13325814749626172, + "learning_rate": 0.0009221151447494138, + "loss": 1.6985, + "step": 2256 + }, + { + "epoch": 0.20477227363454908, + "grad_norm": 0.1345436999662351, + "learning_rate": 0.0009220363765136024, + "loss": 1.6626, + "step": 2257 + }, + { + "epoch": 0.2048630012701869, + "grad_norm": 0.13124030265766468, + "learning_rate": 0.0009219575718348879, + "loss": 1.6902, + "step": 2258 + }, + { + "epoch": 0.20495372890582472, + "grad_norm": 0.1305511401113103, + "learning_rate": 0.0009218787307200746, + "loss": 1.6293, + "step": 2259 + }, + { + "epoch": 0.20504445654146253, + "grad_norm": 0.13499570619661067, + "learning_rate": 0.0009217998531759708, + "loss": 1.6741, + "step": 2260 + }, + { + "epoch": 0.20513518417710033, + "grad_norm": 0.13119453637823303, + "learning_rate": 0.0009217209392093876, + "loss": 1.6654, + "step": 2261 + }, + { + "epoch": 0.20522591181273817, + "grad_norm": 0.12909110438043048, + "learning_rate": 0.000921641988827139, + "loss": 1.6549, + "step": 2262 + }, + { + "epoch": 0.20531663944837597, + "grad_norm": 0.13454902054452986, + "learning_rate": 0.0009215630020360427, + "loss": 1.6683, + "step": 2263 + }, + { + "epoch": 0.2054073670840138, + "grad_norm": 0.12934857660067353, + "learning_rate": 0.000921483978842919, + "loss": 1.7036, + "step": 2264 + }, + { + "epoch": 0.2054980947196516, + "grad_norm": 0.1347323696260342, + "learning_rate": 0.0009214049192545915, + "loss": 1.678, + "step": 2265 + }, + { + "epoch": 0.2055888223552894, + "grad_norm": 0.1349192109184227, + "learning_rate": 0.0009213258232778871, + "loss": 1.6531, + "step": 2266 + }, + { + "epoch": 0.20567954999092725, + "grad_norm": 0.13300031622512784, + "learning_rate": 0.000921246690919636, + "loss": 1.6622, + "step": 2267 + }, + { + "epoch": 0.20577027762656505, + "grad_norm": 0.1335458587937569, + "learning_rate": 0.0009211675221866708, + "loss": 1.6174, + "step": 2268 + }, + { + "epoch": 0.20586100526220286, + "grad_norm": 0.13659971183945308, + "learning_rate": 0.0009210883170858282, + "loss": 1.7075, + "step": 2269 + }, + { + "epoch": 0.2059517328978407, + "grad_norm": 0.13814670303732257, + "learning_rate": 0.0009210090756239471, + "loss": 1.7151, + "step": 2270 + }, + { + "epoch": 0.2060424605334785, + "grad_norm": 0.12878221186435418, + "learning_rate": 0.0009209297978078705, + "loss": 1.6442, + "step": 2271 + }, + { + "epoch": 0.2061331881691163, + "grad_norm": 0.13759269752835726, + "learning_rate": 0.0009208504836444436, + "loss": 1.6934, + "step": 2272 + }, + { + "epoch": 0.20622391580475413, + "grad_norm": 0.12844758211203372, + "learning_rate": 0.0009207711331405156, + "loss": 1.6656, + "step": 2273 + }, + { + "epoch": 0.20631464344039194, + "grad_norm": 0.13037096412212879, + "learning_rate": 0.0009206917463029381, + "loss": 1.692, + "step": 2274 + }, + { + "epoch": 0.20640537107602977, + "grad_norm": 0.1396862314307512, + "learning_rate": 0.0009206123231385665, + "loss": 1.6788, + "step": 2275 + }, + { + "epoch": 0.20649609871166758, + "grad_norm": 0.1250412727669269, + "learning_rate": 0.0009205328636542587, + "loss": 1.6618, + "step": 2276 + }, + { + "epoch": 0.20658682634730538, + "grad_norm": 0.1387022181296454, + "learning_rate": 0.000920453367856876, + "loss": 1.6431, + "step": 2277 + }, + { + "epoch": 0.2066775539829432, + "grad_norm": 0.1365219197582735, + "learning_rate": 0.0009203738357532831, + "loss": 1.6677, + "step": 2278 + }, + { + "epoch": 0.20676828161858102, + "grad_norm": 0.14399205328480397, + "learning_rate": 0.0009202942673503476, + "loss": 1.6478, + "step": 2279 + }, + { + "epoch": 0.20685900925421882, + "grad_norm": 0.13160963542528667, + "learning_rate": 0.0009202146626549402, + "loss": 1.6781, + "step": 2280 + }, + { + "epoch": 0.20694973688985666, + "grad_norm": 0.1321040375959952, + "learning_rate": 0.0009201350216739347, + "loss": 1.6887, + "step": 2281 + }, + { + "epoch": 0.20704046452549446, + "grad_norm": 0.13552124979887112, + "learning_rate": 0.0009200553444142081, + "loss": 1.6921, + "step": 2282 + }, + { + "epoch": 0.2071311921611323, + "grad_norm": 0.13344387664046672, + "learning_rate": 0.0009199756308826407, + "loss": 1.6146, + "step": 2283 + }, + { + "epoch": 0.2072219197967701, + "grad_norm": 0.13290827536424138, + "learning_rate": 0.0009198958810861155, + "loss": 1.641, + "step": 2284 + }, + { + "epoch": 0.2073126474324079, + "grad_norm": 0.1273378364303839, + "learning_rate": 0.0009198160950315193, + "loss": 1.6694, + "step": 2285 + }, + { + "epoch": 0.20740337506804574, + "grad_norm": 0.13304344407653493, + "learning_rate": 0.0009197362727257412, + "loss": 1.6762, + "step": 2286 + }, + { + "epoch": 0.20749410270368354, + "grad_norm": 0.14045933933083926, + "learning_rate": 0.0009196564141756743, + "loss": 1.6999, + "step": 2287 + }, + { + "epoch": 0.20758483033932135, + "grad_norm": 0.12838021440384792, + "learning_rate": 0.000919576519388214, + "loss": 1.6906, + "step": 2288 + }, + { + "epoch": 0.20767555797495918, + "grad_norm": 0.13351116195513635, + "learning_rate": 0.0009194965883702596, + "loss": 1.6451, + "step": 2289 + }, + { + "epoch": 0.20776628561059698, + "grad_norm": 0.13672998263777614, + "learning_rate": 0.000919416621128713, + "loss": 1.6702, + "step": 2290 + }, + { + "epoch": 0.2078570132462348, + "grad_norm": 0.13270624893982225, + "learning_rate": 0.0009193366176704792, + "loss": 1.6687, + "step": 2291 + }, + { + "epoch": 0.20794774088187262, + "grad_norm": 0.14286236935105454, + "learning_rate": 0.0009192565780024667, + "loss": 1.6844, + "step": 2292 + }, + { + "epoch": 0.20803846851751043, + "grad_norm": 0.12794537635133138, + "learning_rate": 0.0009191765021315868, + "loss": 1.6936, + "step": 2293 + }, + { + "epoch": 0.20812919615314826, + "grad_norm": 0.13887563217954207, + "learning_rate": 0.0009190963900647543, + "loss": 1.6551, + "step": 2294 + }, + { + "epoch": 0.20821992378878607, + "grad_norm": 0.1203222017385964, + "learning_rate": 0.0009190162418088865, + "loss": 1.6558, + "step": 2295 + }, + { + "epoch": 0.20831065142442387, + "grad_norm": 0.12823573364683014, + "learning_rate": 0.0009189360573709047, + "loss": 1.631, + "step": 2296 + }, + { + "epoch": 0.2084013790600617, + "grad_norm": 0.1293431992370953, + "learning_rate": 0.0009188558367577327, + "loss": 1.6805, + "step": 2297 + }, + { + "epoch": 0.2084921066956995, + "grad_norm": 0.1330612225433963, + "learning_rate": 0.0009187755799762972, + "loss": 1.6742, + "step": 2298 + }, + { + "epoch": 0.2085828343313373, + "grad_norm": 0.1304045400337867, + "learning_rate": 0.0009186952870335288, + "loss": 1.6236, + "step": 2299 + }, + { + "epoch": 0.20867356196697515, + "grad_norm": 0.1360317438351536, + "learning_rate": 0.0009186149579363605, + "loss": 1.7115, + "step": 2300 + }, + { + "epoch": 0.20876428960261295, + "grad_norm": 0.13579941903092568, + "learning_rate": 0.000918534592691729, + "loss": 1.6555, + "step": 2301 + }, + { + "epoch": 0.20885501723825078, + "grad_norm": 0.13420911922597833, + "learning_rate": 0.0009184541913065739, + "loss": 1.7209, + "step": 2302 + }, + { + "epoch": 0.2089457448738886, + "grad_norm": 0.1344344340257357, + "learning_rate": 0.0009183737537878374, + "loss": 1.6487, + "step": 2303 + }, + { + "epoch": 0.2090364725095264, + "grad_norm": 0.13434451628627395, + "learning_rate": 0.0009182932801424657, + "loss": 1.6976, + "step": 2304 + }, + { + "epoch": 0.20912720014516423, + "grad_norm": 0.1308186239188082, + "learning_rate": 0.0009182127703774077, + "loss": 1.6456, + "step": 2305 + }, + { + "epoch": 0.20921792778080203, + "grad_norm": 0.1305716387147868, + "learning_rate": 0.0009181322244996153, + "loss": 1.6689, + "step": 2306 + }, + { + "epoch": 0.20930865541643984, + "grad_norm": 0.133162843078581, + "learning_rate": 0.0009180516425160436, + "loss": 1.6808, + "step": 2307 + }, + { + "epoch": 0.20939938305207767, + "grad_norm": 0.1290213263669741, + "learning_rate": 0.0009179710244336512, + "loss": 1.6864, + "step": 2308 + }, + { + "epoch": 0.20949011068771548, + "grad_norm": 0.12812076197970168, + "learning_rate": 0.0009178903702593991, + "loss": 1.6824, + "step": 2309 + }, + { + "epoch": 0.20958083832335328, + "grad_norm": 0.12668930815150733, + "learning_rate": 0.0009178096800002518, + "loss": 1.656, + "step": 2310 + }, + { + "epoch": 0.2096715659589911, + "grad_norm": 0.13090564136707208, + "learning_rate": 0.0009177289536631772, + "loss": 1.6455, + "step": 2311 + }, + { + "epoch": 0.20976229359462892, + "grad_norm": 0.12649345195885503, + "learning_rate": 0.0009176481912551458, + "loss": 1.5874, + "step": 2312 + }, + { + "epoch": 0.20985302123026675, + "grad_norm": 0.13074762468415685, + "learning_rate": 0.0009175673927831316, + "loss": 1.6689, + "step": 2313 + }, + { + "epoch": 0.20994374886590456, + "grad_norm": 0.13365910610362255, + "learning_rate": 0.0009174865582541115, + "loss": 1.6583, + "step": 2314 + }, + { + "epoch": 0.21003447650154236, + "grad_norm": 0.12988320166304373, + "learning_rate": 0.0009174056876750655, + "loss": 1.6558, + "step": 2315 + }, + { + "epoch": 0.2101252041371802, + "grad_norm": 0.1354043086753273, + "learning_rate": 0.0009173247810529768, + "loss": 1.6758, + "step": 2316 + }, + { + "epoch": 0.210215931772818, + "grad_norm": 0.13445599259761742, + "learning_rate": 0.0009172438383948318, + "loss": 1.6427, + "step": 2317 + }, + { + "epoch": 0.2103066594084558, + "grad_norm": 0.13604829305349425, + "learning_rate": 0.00091716285970762, + "loss": 1.6723, + "step": 2318 + }, + { + "epoch": 0.21039738704409364, + "grad_norm": 0.1308200624529931, + "learning_rate": 0.0009170818449983336, + "loss": 1.635, + "step": 2319 + }, + { + "epoch": 0.21048811467973144, + "grad_norm": 0.1338052338076211, + "learning_rate": 0.0009170007942739685, + "loss": 1.6557, + "step": 2320 + }, + { + "epoch": 0.21057884231536927, + "grad_norm": 0.12565029065345162, + "learning_rate": 0.0009169197075415233, + "loss": 1.6587, + "step": 2321 + }, + { + "epoch": 0.21066956995100708, + "grad_norm": 0.1294704247253913, + "learning_rate": 0.0009168385848080001, + "loss": 1.627, + "step": 2322 + }, + { + "epoch": 0.21076029758664488, + "grad_norm": 0.13904667946200658, + "learning_rate": 0.0009167574260804034, + "loss": 1.6306, + "step": 2323 + }, + { + "epoch": 0.21085102522228272, + "grad_norm": 0.13266902524656424, + "learning_rate": 0.0009166762313657417, + "loss": 1.6948, + "step": 2324 + }, + { + "epoch": 0.21094175285792052, + "grad_norm": 0.18525004347436091, + "learning_rate": 0.0009165950006710259, + "loss": 1.654, + "step": 2325 + }, + { + "epoch": 0.21103248049355833, + "grad_norm": 0.13674863071573004, + "learning_rate": 0.0009165137340032705, + "loss": 1.6626, + "step": 2326 + }, + { + "epoch": 0.21112320812919616, + "grad_norm": 0.13381704113432105, + "learning_rate": 0.0009164324313694928, + "loss": 1.6959, + "step": 2327 + }, + { + "epoch": 0.21121393576483397, + "grad_norm": 0.14810774542298646, + "learning_rate": 0.0009163510927767131, + "loss": 1.6769, + "step": 2328 + }, + { + "epoch": 0.21130466340047177, + "grad_norm": 0.13439224586927928, + "learning_rate": 0.0009162697182319553, + "loss": 1.6565, + "step": 2329 + }, + { + "epoch": 0.2113953910361096, + "grad_norm": 0.13552144577695036, + "learning_rate": 0.0009161883077422459, + "loss": 1.6409, + "step": 2330 + }, + { + "epoch": 0.2114861186717474, + "grad_norm": 0.12710323504084511, + "learning_rate": 0.0009161068613146149, + "loss": 1.6386, + "step": 2331 + }, + { + "epoch": 0.21157684630738524, + "grad_norm": 0.1303813864297804, + "learning_rate": 0.000916025378956095, + "loss": 1.6526, + "step": 2332 + }, + { + "epoch": 0.21166757394302305, + "grad_norm": 0.13641214582449449, + "learning_rate": 0.0009159438606737223, + "loss": 1.627, + "step": 2333 + }, + { + "epoch": 0.21175830157866085, + "grad_norm": 0.12946004073993464, + "learning_rate": 0.000915862306474536, + "loss": 1.6363, + "step": 2334 + }, + { + "epoch": 0.21184902921429868, + "grad_norm": 0.1320410846430972, + "learning_rate": 0.0009157807163655782, + "loss": 1.6733, + "step": 2335 + }, + { + "epoch": 0.2119397568499365, + "grad_norm": 0.1375845481357206, + "learning_rate": 0.0009156990903538944, + "loss": 1.6718, + "step": 2336 + }, + { + "epoch": 0.2120304844855743, + "grad_norm": 0.13492731540799524, + "learning_rate": 0.0009156174284465327, + "loss": 1.6681, + "step": 2337 + }, + { + "epoch": 0.21212121212121213, + "grad_norm": 0.13255225098267973, + "learning_rate": 0.0009155357306505447, + "loss": 1.6872, + "step": 2338 + }, + { + "epoch": 0.21221193975684993, + "grad_norm": 0.1346447388053369, + "learning_rate": 0.0009154539969729854, + "loss": 1.6644, + "step": 2339 + }, + { + "epoch": 0.21230266739248776, + "grad_norm": 0.13103304410993497, + "learning_rate": 0.0009153722274209121, + "loss": 1.6457, + "step": 2340 + }, + { + "epoch": 0.21239339502812557, + "grad_norm": 0.13587705954299342, + "learning_rate": 0.0009152904220013858, + "loss": 1.6883, + "step": 2341 + }, + { + "epoch": 0.21248412266376338, + "grad_norm": 0.12985893551505973, + "learning_rate": 0.0009152085807214703, + "loss": 1.6376, + "step": 2342 + }, + { + "epoch": 0.2125748502994012, + "grad_norm": 0.13260306242497083, + "learning_rate": 0.0009151267035882326, + "loss": 1.6615, + "step": 2343 + }, + { + "epoch": 0.212665577935039, + "grad_norm": 0.13546917412923803, + "learning_rate": 0.0009150447906087429, + "loss": 1.704, + "step": 2344 + }, + { + "epoch": 0.21275630557067682, + "grad_norm": 0.13659989720625634, + "learning_rate": 0.0009149628417900743, + "loss": 1.689, + "step": 2345 + }, + { + "epoch": 0.21284703320631465, + "grad_norm": 0.13143281402832413, + "learning_rate": 0.0009148808571393034, + "loss": 1.6316, + "step": 2346 + }, + { + "epoch": 0.21293776084195246, + "grad_norm": 0.12893407078545968, + "learning_rate": 0.0009147988366635092, + "loss": 1.6286, + "step": 2347 + }, + { + "epoch": 0.21302848847759026, + "grad_norm": 0.12786986262149597, + "learning_rate": 0.0009147167803697742, + "loss": 1.6459, + "step": 2348 + }, + { + "epoch": 0.2131192161132281, + "grad_norm": 0.12815445578650417, + "learning_rate": 0.0009146346882651841, + "loss": 1.6614, + "step": 2349 + }, + { + "epoch": 0.2132099437488659, + "grad_norm": 0.12606776862840738, + "learning_rate": 0.0009145525603568277, + "loss": 1.694, + "step": 2350 + }, + { + "epoch": 0.21330067138450373, + "grad_norm": 0.13683932475624083, + "learning_rate": 0.0009144703966517966, + "loss": 1.6063, + "step": 2351 + }, + { + "epoch": 0.21339139902014154, + "grad_norm": 0.12899232432439064, + "learning_rate": 0.0009143881971571857, + "loss": 1.7031, + "step": 2352 + }, + { + "epoch": 0.21348212665577934, + "grad_norm": 0.12477195599007058, + "learning_rate": 0.0009143059618800927, + "loss": 1.6262, + "step": 2353 + }, + { + "epoch": 0.21357285429141717, + "grad_norm": 0.12938176524698333, + "learning_rate": 0.000914223690827619, + "loss": 1.6583, + "step": 2354 + }, + { + "epoch": 0.21366358192705498, + "grad_norm": 0.12952369749412065, + "learning_rate": 0.0009141413840068684, + "loss": 1.6762, + "step": 2355 + }, + { + "epoch": 0.21375430956269278, + "grad_norm": 0.1316423318468718, + "learning_rate": 0.0009140590414249485, + "loss": 1.6728, + "step": 2356 + }, + { + "epoch": 0.21384503719833062, + "grad_norm": 0.13363902056204724, + "learning_rate": 0.0009139766630889692, + "loss": 1.6341, + "step": 2357 + }, + { + "epoch": 0.21393576483396842, + "grad_norm": 0.12757914184878674, + "learning_rate": 0.000913894249006044, + "loss": 1.616, + "step": 2358 + }, + { + "epoch": 0.21402649246960626, + "grad_norm": 0.13724384614360394, + "learning_rate": 0.0009138117991832894, + "loss": 1.6514, + "step": 2359 + }, + { + "epoch": 0.21411722010524406, + "grad_norm": 0.12865813515578892, + "learning_rate": 0.0009137293136278248, + "loss": 1.6594, + "step": 2360 + }, + { + "epoch": 0.21420794774088187, + "grad_norm": 0.12973452096618066, + "learning_rate": 0.0009136467923467733, + "loss": 1.6649, + "step": 2361 + }, + { + "epoch": 0.2142986753765197, + "grad_norm": 0.13728724583846105, + "learning_rate": 0.0009135642353472602, + "loss": 1.6499, + "step": 2362 + }, + { + "epoch": 0.2143894030121575, + "grad_norm": 0.1268579497193104, + "learning_rate": 0.0009134816426364144, + "loss": 1.6546, + "step": 2363 + }, + { + "epoch": 0.2144801306477953, + "grad_norm": 0.1296839379419403, + "learning_rate": 0.0009133990142213678, + "loss": 1.6602, + "step": 2364 + }, + { + "epoch": 0.21457085828343314, + "grad_norm": 0.13432691312674047, + "learning_rate": 0.0009133163501092555, + "loss": 1.6866, + "step": 2365 + }, + { + "epoch": 0.21466158591907095, + "grad_norm": 0.1302266395227069, + "learning_rate": 0.0009132336503072153, + "loss": 1.6553, + "step": 2366 + }, + { + "epoch": 0.21475231355470875, + "grad_norm": 0.12925768665299553, + "learning_rate": 0.0009131509148223886, + "loss": 1.6884, + "step": 2367 + }, + { + "epoch": 0.21484304119034658, + "grad_norm": 0.13226748026165547, + "learning_rate": 0.0009130681436619193, + "loss": 1.646, + "step": 2368 + }, + { + "epoch": 0.2149337688259844, + "grad_norm": 0.1292741774395997, + "learning_rate": 0.0009129853368329552, + "loss": 1.6719, + "step": 2369 + }, + { + "epoch": 0.21502449646162222, + "grad_norm": 0.1271983874371139, + "learning_rate": 0.0009129024943426463, + "loss": 1.6554, + "step": 2370 + }, + { + "epoch": 0.21511522409726003, + "grad_norm": 0.13251315816100115, + "learning_rate": 0.0009128196161981462, + "loss": 1.6111, + "step": 2371 + }, + { + "epoch": 0.21520595173289783, + "grad_norm": 0.13688368715517357, + "learning_rate": 0.0009127367024066112, + "loss": 1.6444, + "step": 2372 + }, + { + "epoch": 0.21529667936853567, + "grad_norm": 0.13537195354970058, + "learning_rate": 0.0009126537529752012, + "loss": 1.6948, + "step": 2373 + }, + { + "epoch": 0.21538740700417347, + "grad_norm": 0.1330908379303992, + "learning_rate": 0.0009125707679110789, + "loss": 1.6606, + "step": 2374 + }, + { + "epoch": 0.21547813463981128, + "grad_norm": 0.1322637613453778, + "learning_rate": 0.00091248774722141, + "loss": 1.6649, + "step": 2375 + }, + { + "epoch": 0.2155688622754491, + "grad_norm": 0.13096525800420367, + "learning_rate": 0.0009124046909133633, + "loss": 1.6235, + "step": 2376 + }, + { + "epoch": 0.2156595899110869, + "grad_norm": 0.1284633919603959, + "learning_rate": 0.0009123215989941107, + "loss": 1.6878, + "step": 2377 + }, + { + "epoch": 0.21575031754672475, + "grad_norm": 0.13563959915602597, + "learning_rate": 0.0009122384714708273, + "loss": 1.6816, + "step": 2378 + }, + { + "epoch": 0.21584104518236255, + "grad_norm": 0.1317056874488702, + "learning_rate": 0.000912155308350691, + "loss": 1.6298, + "step": 2379 + }, + { + "epoch": 0.21593177281800036, + "grad_norm": 0.13754967740776897, + "learning_rate": 0.0009120721096408833, + "loss": 1.6643, + "step": 2380 + }, + { + "epoch": 0.2160225004536382, + "grad_norm": 0.13165400094154744, + "learning_rate": 0.0009119888753485881, + "loss": 1.6951, + "step": 2381 + }, + { + "epoch": 0.216113228089276, + "grad_norm": 0.1275878425316763, + "learning_rate": 0.0009119056054809929, + "loss": 1.6167, + "step": 2382 + }, + { + "epoch": 0.2162039557249138, + "grad_norm": 0.1293687188050987, + "learning_rate": 0.0009118223000452877, + "loss": 1.6395, + "step": 2383 + }, + { + "epoch": 0.21629468336055163, + "grad_norm": 0.13231226281183422, + "learning_rate": 0.0009117389590486665, + "loss": 1.6527, + "step": 2384 + }, + { + "epoch": 0.21638541099618944, + "grad_norm": 0.13326056179999207, + "learning_rate": 0.0009116555824983252, + "loss": 1.672, + "step": 2385 + }, + { + "epoch": 0.21647613863182724, + "grad_norm": 0.13339167229089424, + "learning_rate": 0.0009115721704014639, + "loss": 1.6409, + "step": 2386 + }, + { + "epoch": 0.21656686626746507, + "grad_norm": 0.12535083431070873, + "learning_rate": 0.000911488722765285, + "loss": 1.6177, + "step": 2387 + }, + { + "epoch": 0.21665759390310288, + "grad_norm": 0.13794588857843706, + "learning_rate": 0.0009114052395969942, + "loss": 1.6893, + "step": 2388 + }, + { + "epoch": 0.2167483215387407, + "grad_norm": 0.130707132259764, + "learning_rate": 0.0009113217209038003, + "loss": 1.6687, + "step": 2389 + }, + { + "epoch": 0.21683904917437852, + "grad_norm": 0.13013934806788585, + "learning_rate": 0.0009112381666929153, + "loss": 1.6707, + "step": 2390 + }, + { + "epoch": 0.21692977681001632, + "grad_norm": 0.12942166774797892, + "learning_rate": 0.0009111545769715538, + "loss": 1.7053, + "step": 2391 + }, + { + "epoch": 0.21702050444565416, + "grad_norm": 0.12375814210698258, + "learning_rate": 0.0009110709517469341, + "loss": 1.6594, + "step": 2392 + }, + { + "epoch": 0.21711123208129196, + "grad_norm": 0.13413339794516027, + "learning_rate": 0.0009109872910262771, + "loss": 1.6487, + "step": 2393 + }, + { + "epoch": 0.21720195971692977, + "grad_norm": 0.12659711881437374, + "learning_rate": 0.0009109035948168069, + "loss": 1.6992, + "step": 2394 + }, + { + "epoch": 0.2172926873525676, + "grad_norm": 0.1258168564959803, + "learning_rate": 0.0009108198631257508, + "loss": 1.6937, + "step": 2395 + }, + { + "epoch": 0.2173834149882054, + "grad_norm": 0.12736528595061866, + "learning_rate": 0.0009107360959603391, + "loss": 1.6062, + "step": 2396 + }, + { + "epoch": 0.21747414262384324, + "grad_norm": 0.12976127007966728, + "learning_rate": 0.0009106522933278047, + "loss": 1.6628, + "step": 2397 + }, + { + "epoch": 0.21756487025948104, + "grad_norm": 0.1272651439723199, + "learning_rate": 0.0009105684552353844, + "loss": 1.6683, + "step": 2398 + }, + { + "epoch": 0.21765559789511885, + "grad_norm": 0.13120216817916436, + "learning_rate": 0.0009104845816903174, + "loss": 1.6636, + "step": 2399 + }, + { + "epoch": 0.21774632553075668, + "grad_norm": 0.13071343972934785, + "learning_rate": 0.0009104006726998464, + "loss": 1.6821, + "step": 2400 + }, + { + "epoch": 0.21783705316639448, + "grad_norm": 0.12895897210383805, + "learning_rate": 0.0009103167282712167, + "loss": 1.639, + "step": 2401 + }, + { + "epoch": 0.2179277808020323, + "grad_norm": 0.1298061563556032, + "learning_rate": 0.0009102327484116771, + "loss": 1.6852, + "step": 2402 + }, + { + "epoch": 0.21801850843767012, + "grad_norm": 0.13124324620072703, + "learning_rate": 0.0009101487331284792, + "loss": 1.702, + "step": 2403 + }, + { + "epoch": 0.21810923607330793, + "grad_norm": 0.13697781135582443, + "learning_rate": 0.0009100646824288778, + "loss": 1.6616, + "step": 2404 + }, + { + "epoch": 0.21819996370894573, + "grad_norm": 0.13167854044305544, + "learning_rate": 0.0009099805963201305, + "loss": 1.7121, + "step": 2405 + }, + { + "epoch": 0.21829069134458357, + "grad_norm": 0.14063130934040935, + "learning_rate": 0.0009098964748094985, + "loss": 1.7013, + "step": 2406 + }, + { + "epoch": 0.21838141898022137, + "grad_norm": 0.12915708553805533, + "learning_rate": 0.0009098123179042452, + "loss": 1.6345, + "step": 2407 + }, + { + "epoch": 0.2184721466158592, + "grad_norm": 0.12918188703483388, + "learning_rate": 0.0009097281256116381, + "loss": 1.6689, + "step": 2408 + }, + { + "epoch": 0.218562874251497, + "grad_norm": 0.13427631435927417, + "learning_rate": 0.0009096438979389468, + "loss": 1.6667, + "step": 2409 + }, + { + "epoch": 0.2186536018871348, + "grad_norm": 0.1327057424971284, + "learning_rate": 0.0009095596348934448, + "loss": 1.6801, + "step": 2410 + }, + { + "epoch": 0.21874432952277265, + "grad_norm": 0.13087578301279748, + "learning_rate": 0.0009094753364824076, + "loss": 1.6658, + "step": 2411 + }, + { + "epoch": 0.21883505715841045, + "grad_norm": 0.13318569212282255, + "learning_rate": 0.000909391002713115, + "loss": 1.6581, + "step": 2412 + }, + { + "epoch": 0.21892578479404826, + "grad_norm": 0.12894459896081237, + "learning_rate": 0.0009093066335928488, + "loss": 1.6717, + "step": 2413 + }, + { + "epoch": 0.2190165124296861, + "grad_norm": 0.13296891886789677, + "learning_rate": 0.0009092222291288947, + "loss": 1.6829, + "step": 2414 + }, + { + "epoch": 0.2191072400653239, + "grad_norm": 0.1360349549425881, + "learning_rate": 0.0009091377893285406, + "loss": 1.6437, + "step": 2415 + }, + { + "epoch": 0.21919796770096173, + "grad_norm": 0.12811882931916144, + "learning_rate": 0.000909053314199078, + "loss": 1.658, + "step": 2416 + }, + { + "epoch": 0.21928869533659953, + "grad_norm": 0.12738019344104276, + "learning_rate": 0.0009089688037478016, + "loss": 1.6563, + "step": 2417 + }, + { + "epoch": 0.21937942297223734, + "grad_norm": 0.1314930075825668, + "learning_rate": 0.0009088842579820087, + "loss": 1.6665, + "step": 2418 + }, + { + "epoch": 0.21947015060787517, + "grad_norm": 0.12211857147446842, + "learning_rate": 0.000908799676909, + "loss": 1.6292, + "step": 2419 + }, + { + "epoch": 0.21956087824351297, + "grad_norm": 0.13498935835376596, + "learning_rate": 0.0009087150605360788, + "loss": 1.6303, + "step": 2420 + }, + { + "epoch": 0.21965160587915078, + "grad_norm": 0.12987192421076948, + "learning_rate": 0.0009086304088705518, + "loss": 1.6717, + "step": 2421 + }, + { + "epoch": 0.2197423335147886, + "grad_norm": 0.13329970262440236, + "learning_rate": 0.0009085457219197288, + "loss": 1.7145, + "step": 2422 + }, + { + "epoch": 0.21983306115042642, + "grad_norm": 0.14046898673965844, + "learning_rate": 0.0009084609996909226, + "loss": 1.6914, + "step": 2423 + }, + { + "epoch": 0.21992378878606422, + "grad_norm": 0.13207424250905586, + "learning_rate": 0.0009083762421914489, + "loss": 1.6678, + "step": 2424 + }, + { + "epoch": 0.22001451642170206, + "grad_norm": 0.1300357268020055, + "learning_rate": 0.0009082914494286266, + "loss": 1.6401, + "step": 2425 + }, + { + "epoch": 0.22010524405733986, + "grad_norm": 0.132487777465239, + "learning_rate": 0.0009082066214097772, + "loss": 1.678, + "step": 2426 + }, + { + "epoch": 0.2201959716929777, + "grad_norm": 0.14416964694811268, + "learning_rate": 0.0009081217581422259, + "loss": 1.6795, + "step": 2427 + }, + { + "epoch": 0.2202866993286155, + "grad_norm": 0.12468191213265989, + "learning_rate": 0.0009080368596333006, + "loss": 1.6546, + "step": 2428 + }, + { + "epoch": 0.2203774269642533, + "grad_norm": 0.12064945358905192, + "learning_rate": 0.0009079519258903326, + "loss": 1.6028, + "step": 2429 + }, + { + "epoch": 0.22046815459989114, + "grad_norm": 0.12804600974154234, + "learning_rate": 0.0009078669569206555, + "loss": 1.699, + "step": 2430 + }, + { + "epoch": 0.22055888223552894, + "grad_norm": 0.13153547900666623, + "learning_rate": 0.0009077819527316066, + "loss": 1.65, + "step": 2431 + }, + { + "epoch": 0.22064960987116675, + "grad_norm": 0.1289571666752174, + "learning_rate": 0.000907696913330526, + "loss": 1.6676, + "step": 2432 + }, + { + "epoch": 0.22074033750680458, + "grad_norm": 0.1254859521234181, + "learning_rate": 0.0009076118387247568, + "loss": 1.6589, + "step": 2433 + }, + { + "epoch": 0.22083106514244238, + "grad_norm": 0.13278225292416088, + "learning_rate": 0.0009075267289216451, + "loss": 1.6681, + "step": 2434 + }, + { + "epoch": 0.22092179277808022, + "grad_norm": 0.12882821879062345, + "learning_rate": 0.0009074415839285405, + "loss": 1.6511, + "step": 2435 + }, + { + "epoch": 0.22101252041371802, + "grad_norm": 0.12561995138178958, + "learning_rate": 0.0009073564037527951, + "loss": 1.6516, + "step": 2436 + }, + { + "epoch": 0.22110324804935583, + "grad_norm": 0.129543199788461, + "learning_rate": 0.0009072711884017641, + "loss": 1.6681, + "step": 2437 + }, + { + "epoch": 0.22119397568499366, + "grad_norm": 0.128300041032359, + "learning_rate": 0.000907185937882806, + "loss": 1.6505, + "step": 2438 + }, + { + "epoch": 0.22128470332063147, + "grad_norm": 0.12573059230179692, + "learning_rate": 0.000907100652203282, + "loss": 1.697, + "step": 2439 + }, + { + "epoch": 0.22137543095626927, + "grad_norm": 0.12433236081286786, + "learning_rate": 0.0009070153313705569, + "loss": 1.6723, + "step": 2440 + }, + { + "epoch": 0.2214661585919071, + "grad_norm": 0.13220353571970953, + "learning_rate": 0.0009069299753919979, + "loss": 1.6088, + "step": 2441 + }, + { + "epoch": 0.2215568862275449, + "grad_norm": 0.1328268760751491, + "learning_rate": 0.0009068445842749754, + "loss": 1.6017, + "step": 2442 + }, + { + "epoch": 0.2216476138631827, + "grad_norm": 0.13823410625468327, + "learning_rate": 0.0009067591580268632, + "loss": 1.6386, + "step": 2443 + }, + { + "epoch": 0.22173834149882055, + "grad_norm": 0.1321339372427065, + "learning_rate": 0.0009066736966550378, + "loss": 1.6983, + "step": 2444 + }, + { + "epoch": 0.22182906913445835, + "grad_norm": 0.13373687908421455, + "learning_rate": 0.0009065882001668786, + "loss": 1.662, + "step": 2445 + }, + { + "epoch": 0.22191979677009618, + "grad_norm": 0.12592188655398298, + "learning_rate": 0.0009065026685697686, + "loss": 1.7297, + "step": 2446 + }, + { + "epoch": 0.222010524405734, + "grad_norm": 0.12473652707428544, + "learning_rate": 0.0009064171018710931, + "loss": 1.6598, + "step": 2447 + }, + { + "epoch": 0.2221012520413718, + "grad_norm": 0.1276447057586685, + "learning_rate": 0.0009063315000782411, + "loss": 1.6682, + "step": 2448 + }, + { + "epoch": 0.22219197967700963, + "grad_norm": 0.12598226191809955, + "learning_rate": 0.0009062458631986039, + "loss": 1.6747, + "step": 2449 + }, + { + "epoch": 0.22228270731264743, + "grad_norm": 0.1294372638598116, + "learning_rate": 0.0009061601912395767, + "loss": 1.6588, + "step": 2450 + }, + { + "epoch": 0.22237343494828524, + "grad_norm": 0.13152463977496137, + "learning_rate": 0.0009060744842085573, + "loss": 1.6551, + "step": 2451 + }, + { + "epoch": 0.22246416258392307, + "grad_norm": 0.1264799360853359, + "learning_rate": 0.0009059887421129461, + "loss": 1.6649, + "step": 2452 + }, + { + "epoch": 0.22255489021956087, + "grad_norm": 0.13751276373597782, + "learning_rate": 0.0009059029649601474, + "loss": 1.6609, + "step": 2453 + }, + { + "epoch": 0.22264561785519868, + "grad_norm": 0.1280515473860174, + "learning_rate": 0.0009058171527575676, + "loss": 1.6215, + "step": 2454 + }, + { + "epoch": 0.2227363454908365, + "grad_norm": 0.13014494591747808, + "learning_rate": 0.000905731305512617, + "loss": 1.7112, + "step": 2455 + }, + { + "epoch": 0.22282707312647432, + "grad_norm": 0.12738817597192953, + "learning_rate": 0.0009056454232327083, + "loss": 1.6595, + "step": 2456 + }, + { + "epoch": 0.22291780076211215, + "grad_norm": 0.12470016686071726, + "learning_rate": 0.0009055595059252575, + "loss": 1.6432, + "step": 2457 + }, + { + "epoch": 0.22300852839774996, + "grad_norm": 0.12558538389342458, + "learning_rate": 0.0009054735535976837, + "loss": 1.7037, + "step": 2458 + }, + { + "epoch": 0.22309925603338776, + "grad_norm": 0.13395849698484863, + "learning_rate": 0.0009053875662574087, + "loss": 1.7241, + "step": 2459 + }, + { + "epoch": 0.2231899836690256, + "grad_norm": 0.13392546318174292, + "learning_rate": 0.0009053015439118577, + "loss": 1.6752, + "step": 2460 + }, + { + "epoch": 0.2232807113046634, + "grad_norm": 0.13377735988730766, + "learning_rate": 0.0009052154865684585, + "loss": 1.6298, + "step": 2461 + }, + { + "epoch": 0.2233714389403012, + "grad_norm": 0.12849135092390684, + "learning_rate": 0.0009051293942346425, + "loss": 1.6714, + "step": 2462 + }, + { + "epoch": 0.22346216657593904, + "grad_norm": 0.12590047733217918, + "learning_rate": 0.0009050432669178434, + "loss": 1.6688, + "step": 2463 + }, + { + "epoch": 0.22355289421157684, + "grad_norm": 0.12477098176861953, + "learning_rate": 0.0009049571046254988, + "loss": 1.6413, + "step": 2464 + }, + { + "epoch": 0.22364362184721467, + "grad_norm": 0.12700927068078915, + "learning_rate": 0.0009048709073650482, + "loss": 1.6293, + "step": 2465 + }, + { + "epoch": 0.22373434948285248, + "grad_norm": 0.12430316084368626, + "learning_rate": 0.0009047846751439353, + "loss": 1.6832, + "step": 2466 + }, + { + "epoch": 0.22382507711849028, + "grad_norm": 0.12612135081661727, + "learning_rate": 0.0009046984079696059, + "loss": 1.6535, + "step": 2467 + }, + { + "epoch": 0.22391580475412812, + "grad_norm": 0.119851409512733, + "learning_rate": 0.0009046121058495093, + "loss": 1.6488, + "step": 2468 + }, + { + "epoch": 0.22400653238976592, + "grad_norm": 0.12627738302339622, + "learning_rate": 0.0009045257687910978, + "loss": 1.6647, + "step": 2469 + }, + { + "epoch": 0.22409726002540373, + "grad_norm": 0.1245736750267001, + "learning_rate": 0.0009044393968018265, + "loss": 1.6963, + "step": 2470 + }, + { + "epoch": 0.22418798766104156, + "grad_norm": 0.12534169985934762, + "learning_rate": 0.0009043529898891538, + "loss": 1.6221, + "step": 2471 + }, + { + "epoch": 0.22427871529667937, + "grad_norm": 0.12971435501768527, + "learning_rate": 0.0009042665480605408, + "loss": 1.6819, + "step": 2472 + }, + { + "epoch": 0.22436944293231717, + "grad_norm": 0.12546989658724986, + "learning_rate": 0.0009041800713234517, + "loss": 1.6292, + "step": 2473 + }, + { + "epoch": 0.224460170567955, + "grad_norm": 0.12469236960229878, + "learning_rate": 0.0009040935596853539, + "loss": 1.6683, + "step": 2474 + }, + { + "epoch": 0.2245508982035928, + "grad_norm": 0.13354715250136462, + "learning_rate": 0.0009040070131537177, + "loss": 1.676, + "step": 2475 + }, + { + "epoch": 0.22464162583923064, + "grad_norm": 0.12726523979252993, + "learning_rate": 0.0009039204317360163, + "loss": 1.6694, + "step": 2476 + }, + { + "epoch": 0.22473235347486845, + "grad_norm": 0.12249239469172266, + "learning_rate": 0.0009038338154397261, + "loss": 1.6366, + "step": 2477 + }, + { + "epoch": 0.22482308111050625, + "grad_norm": 0.12368270209066906, + "learning_rate": 0.0009037471642723265, + "loss": 1.6907, + "step": 2478 + }, + { + "epoch": 0.22491380874614408, + "grad_norm": 0.12730844808319958, + "learning_rate": 0.0009036604782412997, + "loss": 1.6442, + "step": 2479 + }, + { + "epoch": 0.2250045363817819, + "grad_norm": 0.12831990664833792, + "learning_rate": 0.0009035737573541312, + "loss": 1.6551, + "step": 2480 + }, + { + "epoch": 0.2250952640174197, + "grad_norm": 0.13004587547876595, + "learning_rate": 0.0009034870016183092, + "loss": 1.6627, + "step": 2481 + }, + { + "epoch": 0.22518599165305753, + "grad_norm": 0.12603883984030229, + "learning_rate": 0.0009034002110413251, + "loss": 1.6865, + "step": 2482 + }, + { + "epoch": 0.22527671928869533, + "grad_norm": 0.1253579199050264, + "learning_rate": 0.0009033133856306733, + "loss": 1.6587, + "step": 2483 + }, + { + "epoch": 0.22536744692433316, + "grad_norm": 0.13153854973612697, + "learning_rate": 0.0009032265253938513, + "loss": 1.6671, + "step": 2484 + }, + { + "epoch": 0.22545817455997097, + "grad_norm": 0.12787176948836945, + "learning_rate": 0.0009031396303383595, + "loss": 1.6616, + "step": 2485 + }, + { + "epoch": 0.22554890219560877, + "grad_norm": 0.12431701442494028, + "learning_rate": 0.0009030527004717009, + "loss": 1.6164, + "step": 2486 + }, + { + "epoch": 0.2256396298312466, + "grad_norm": 0.1299412944250751, + "learning_rate": 0.0009029657358013825, + "loss": 1.6468, + "step": 2487 + }, + { + "epoch": 0.2257303574668844, + "grad_norm": 0.12855641500614373, + "learning_rate": 0.0009028787363349133, + "loss": 1.6839, + "step": 2488 + }, + { + "epoch": 0.22582108510252222, + "grad_norm": 0.1341153013183056, + "learning_rate": 0.0009027917020798058, + "loss": 1.6782, + "step": 2489 + }, + { + "epoch": 0.22591181273816005, + "grad_norm": 0.1257432002949535, + "learning_rate": 0.0009027046330435755, + "loss": 1.6831, + "step": 2490 + }, + { + "epoch": 0.22600254037379786, + "grad_norm": 0.1385343235996702, + "learning_rate": 0.0009026175292337409, + "loss": 1.6711, + "step": 2491 + }, + { + "epoch": 0.22609326800943566, + "grad_norm": 0.13059103579390902, + "learning_rate": 0.0009025303906578231, + "loss": 1.677, + "step": 2492 + }, + { + "epoch": 0.2261839956450735, + "grad_norm": 0.13556987844634336, + "learning_rate": 0.0009024432173233468, + "loss": 1.6724, + "step": 2493 + }, + { + "epoch": 0.2262747232807113, + "grad_norm": 0.12694247738229375, + "learning_rate": 0.0009023560092378393, + "loss": 1.6703, + "step": 2494 + }, + { + "epoch": 0.22636545091634913, + "grad_norm": 0.12353957179010627, + "learning_rate": 0.0009022687664088314, + "loss": 1.6768, + "step": 2495 + }, + { + "epoch": 0.22645617855198694, + "grad_norm": 0.12761752450041725, + "learning_rate": 0.000902181488843856, + "loss": 1.6943, + "step": 2496 + }, + { + "epoch": 0.22654690618762474, + "grad_norm": 0.1251646339563608, + "learning_rate": 0.0009020941765504498, + "loss": 1.6232, + "step": 2497 + }, + { + "epoch": 0.22663763382326257, + "grad_norm": 0.12324937806482753, + "learning_rate": 0.0009020068295361522, + "loss": 1.6484, + "step": 2498 + }, + { + "epoch": 0.22672836145890038, + "grad_norm": 0.13016956597696852, + "learning_rate": 0.0009019194478085055, + "loss": 1.6234, + "step": 2499 + }, + { + "epoch": 0.22681908909453818, + "grad_norm": 0.12628562139878513, + "learning_rate": 0.0009018320313750554, + "loss": 1.6668, + "step": 2500 + }, + { + "epoch": 0.22690981673017602, + "grad_norm": 0.12491546529151051, + "learning_rate": 0.0009017445802433501, + "loss": 1.6636, + "step": 2501 + }, + { + "epoch": 0.22700054436581382, + "grad_norm": 0.12797667527483314, + "learning_rate": 0.0009016570944209413, + "loss": 1.6194, + "step": 2502 + }, + { + "epoch": 0.22709127200145166, + "grad_norm": 0.130734963152287, + "learning_rate": 0.000901569573915383, + "loss": 1.663, + "step": 2503 + }, + { + "epoch": 0.22718199963708946, + "grad_norm": 0.12418773554154443, + "learning_rate": 0.0009014820187342327, + "loss": 1.6342, + "step": 2504 + }, + { + "epoch": 0.22727272727272727, + "grad_norm": 0.1280087265635555, + "learning_rate": 0.0009013944288850511, + "loss": 1.6691, + "step": 2505 + }, + { + "epoch": 0.2273634549083651, + "grad_norm": 0.12556935237470243, + "learning_rate": 0.0009013068043754014, + "loss": 1.6652, + "step": 2506 + }, + { + "epoch": 0.2274541825440029, + "grad_norm": 0.12795232343310983, + "learning_rate": 0.0009012191452128499, + "loss": 1.6012, + "step": 2507 + }, + { + "epoch": 0.2275449101796407, + "grad_norm": 0.12358183575452765, + "learning_rate": 0.000901131451404966, + "loss": 1.6763, + "step": 2508 + }, + { + "epoch": 0.22763563781527854, + "grad_norm": 0.12264183712503618, + "learning_rate": 0.0009010437229593223, + "loss": 1.668, + "step": 2509 + }, + { + "epoch": 0.22772636545091635, + "grad_norm": 0.12931437877258442, + "learning_rate": 0.000900955959883494, + "loss": 1.6332, + "step": 2510 + }, + { + "epoch": 0.22781709308655415, + "grad_norm": 0.12479015921718244, + "learning_rate": 0.0009008681621850594, + "loss": 1.69, + "step": 2511 + }, + { + "epoch": 0.22790782072219198, + "grad_norm": 0.12931401460283304, + "learning_rate": 0.0009007803298716, + "loss": 1.6934, + "step": 2512 + }, + { + "epoch": 0.2279985483578298, + "grad_norm": 0.12451529960983093, + "learning_rate": 0.0009006924629507, + "loss": 1.6612, + "step": 2513 + }, + { + "epoch": 0.22808927599346762, + "grad_norm": 0.11904597294491716, + "learning_rate": 0.0009006045614299467, + "loss": 1.6306, + "step": 2514 + }, + { + "epoch": 0.22818000362910543, + "grad_norm": 0.12365737170654045, + "learning_rate": 0.0009005166253169307, + "loss": 1.6616, + "step": 2515 + }, + { + "epoch": 0.22827073126474323, + "grad_norm": 0.12680481014150805, + "learning_rate": 0.0009004286546192449, + "loss": 1.6649, + "step": 2516 + }, + { + "epoch": 0.22836145890038106, + "grad_norm": 0.13026354051837233, + "learning_rate": 0.0009003406493444856, + "loss": 1.6964, + "step": 2517 + }, + { + "epoch": 0.22845218653601887, + "grad_norm": 0.12639328481825213, + "learning_rate": 0.0009002526095002526, + "loss": 1.6638, + "step": 2518 + }, + { + "epoch": 0.22854291417165667, + "grad_norm": 0.1220312377940576, + "learning_rate": 0.0009001645350941475, + "loss": 1.6616, + "step": 2519 + }, + { + "epoch": 0.2286336418072945, + "grad_norm": 0.12564664909571938, + "learning_rate": 0.0009000764261337759, + "loss": 1.6323, + "step": 2520 + }, + { + "epoch": 0.2287243694429323, + "grad_norm": 0.12299888491933647, + "learning_rate": 0.000899988282626746, + "loss": 1.6448, + "step": 2521 + }, + { + "epoch": 0.22881509707857015, + "grad_norm": 0.12689722418413624, + "learning_rate": 0.0008999001045806688, + "loss": 1.6595, + "step": 2522 + }, + { + "epoch": 0.22890582471420795, + "grad_norm": 0.11961685230166434, + "learning_rate": 0.0008998118920031589, + "loss": 1.6721, + "step": 2523 + }, + { + "epoch": 0.22899655234984576, + "grad_norm": 0.12496426219435981, + "learning_rate": 0.0008997236449018328, + "loss": 1.6445, + "step": 2524 + }, + { + "epoch": 0.2290872799854836, + "grad_norm": 0.12476316871145093, + "learning_rate": 0.0008996353632843112, + "loss": 1.6476, + "step": 2525 + }, + { + "epoch": 0.2291780076211214, + "grad_norm": 0.13307912975108308, + "learning_rate": 0.0008995470471582172, + "loss": 1.6675, + "step": 2526 + }, + { + "epoch": 0.2292687352567592, + "grad_norm": 0.12794492803650465, + "learning_rate": 0.0008994586965311768, + "loss": 1.6429, + "step": 2527 + }, + { + "epoch": 0.22935946289239703, + "grad_norm": 0.12264892360147571, + "learning_rate": 0.0008993703114108189, + "loss": 1.6833, + "step": 2528 + }, + { + "epoch": 0.22945019052803484, + "grad_norm": 0.12679814834800524, + "learning_rate": 0.000899281891804776, + "loss": 1.656, + "step": 2529 + }, + { + "epoch": 0.22954091816367264, + "grad_norm": 0.12194389346764475, + "learning_rate": 0.0008991934377206828, + "loss": 1.6882, + "step": 2530 + }, + { + "epoch": 0.22963164579931047, + "grad_norm": 0.12975044861776883, + "learning_rate": 0.0008991049491661774, + "loss": 1.6519, + "step": 2531 + }, + { + "epoch": 0.22972237343494828, + "grad_norm": 0.12316021111333299, + "learning_rate": 0.0008990164261489007, + "loss": 1.6436, + "step": 2532 + }, + { + "epoch": 0.2298131010705861, + "grad_norm": 0.128367837286541, + "learning_rate": 0.0008989278686764968, + "loss": 1.6694, + "step": 2533 + }, + { + "epoch": 0.22990382870622392, + "grad_norm": 0.12796102033666762, + "learning_rate": 0.0008988392767566128, + "loss": 1.6726, + "step": 2534 + }, + { + "epoch": 0.22999455634186172, + "grad_norm": 0.12640854295331627, + "learning_rate": 0.0008987506503968984, + "loss": 1.6706, + "step": 2535 + }, + { + "epoch": 0.23008528397749956, + "grad_norm": 0.12239357010221287, + "learning_rate": 0.0008986619896050066, + "loss": 1.6714, + "step": 2536 + }, + { + "epoch": 0.23017601161313736, + "grad_norm": 0.12701111680324728, + "learning_rate": 0.0008985732943885931, + "loss": 1.6677, + "step": 2537 + }, + { + "epoch": 0.23026673924877517, + "grad_norm": 0.12159773496661302, + "learning_rate": 0.000898484564755317, + "loss": 1.6332, + "step": 2538 + }, + { + "epoch": 0.230357466884413, + "grad_norm": 0.12504467855563187, + "learning_rate": 0.0008983958007128401, + "loss": 1.66, + "step": 2539 + }, + { + "epoch": 0.2304481945200508, + "grad_norm": 0.1220331139821401, + "learning_rate": 0.0008983070022688269, + "loss": 1.6428, + "step": 2540 + }, + { + "epoch": 0.23053892215568864, + "grad_norm": 0.12713404314620896, + "learning_rate": 0.0008982181694309455, + "loss": 1.6459, + "step": 2541 + }, + { + "epoch": 0.23062964979132644, + "grad_norm": 0.12687044619158488, + "learning_rate": 0.0008981293022068664, + "loss": 1.6462, + "step": 2542 + }, + { + "epoch": 0.23072037742696425, + "grad_norm": 0.12958458795884142, + "learning_rate": 0.0008980404006042634, + "loss": 1.6559, + "step": 2543 + }, + { + "epoch": 0.23081110506260208, + "grad_norm": 0.1264502526861309, + "learning_rate": 0.0008979514646308131, + "loss": 1.627, + "step": 2544 + }, + { + "epoch": 0.23090183269823988, + "grad_norm": 0.12665697654294694, + "learning_rate": 0.0008978624942941952, + "loss": 1.651, + "step": 2545 + }, + { + "epoch": 0.2309925603338777, + "grad_norm": 0.12295523660927168, + "learning_rate": 0.0008977734896020924, + "loss": 1.6564, + "step": 2546 + }, + { + "epoch": 0.23108328796951552, + "grad_norm": 0.12174436178532863, + "learning_rate": 0.0008976844505621899, + "loss": 1.6271, + "step": 2547 + }, + { + "epoch": 0.23117401560515333, + "grad_norm": 0.11953551669430948, + "learning_rate": 0.0008975953771821766, + "loss": 1.6264, + "step": 2548 + }, + { + "epoch": 0.23126474324079113, + "grad_norm": 0.12970555187847047, + "learning_rate": 0.000897506269469744, + "loss": 1.6571, + "step": 2549 + }, + { + "epoch": 0.23135547087642896, + "grad_norm": 0.12910991769108487, + "learning_rate": 0.0008974171274325863, + "loss": 1.6363, + "step": 2550 + }, + { + "epoch": 0.23144619851206677, + "grad_norm": 0.12481991495303521, + "learning_rate": 0.0008973279510784011, + "loss": 1.701, + "step": 2551 + }, + { + "epoch": 0.2315369261477046, + "grad_norm": 0.12221862565910348, + "learning_rate": 0.0008972387404148888, + "loss": 1.6715, + "step": 2552 + }, + { + "epoch": 0.2316276537833424, + "grad_norm": 0.12419204992295992, + "learning_rate": 0.0008971494954497527, + "loss": 1.6649, + "step": 2553 + }, + { + "epoch": 0.2317183814189802, + "grad_norm": 0.13049469817306036, + "learning_rate": 0.0008970602161906991, + "loss": 1.6675, + "step": 2554 + }, + { + "epoch": 0.23180910905461805, + "grad_norm": 0.12700195657799704, + "learning_rate": 0.0008969709026454373, + "loss": 1.6908, + "step": 2555 + }, + { + "epoch": 0.23189983669025585, + "grad_norm": 0.12355460830946663, + "learning_rate": 0.0008968815548216797, + "loss": 1.6706, + "step": 2556 + }, + { + "epoch": 0.23199056432589366, + "grad_norm": 0.124447253896416, + "learning_rate": 0.0008967921727271412, + "loss": 1.6523, + "step": 2557 + }, + { + "epoch": 0.2320812919615315, + "grad_norm": 0.12026877290910122, + "learning_rate": 0.0008967027563695401, + "loss": 1.6356, + "step": 2558 + }, + { + "epoch": 0.2321720195971693, + "grad_norm": 0.12433524030147465, + "learning_rate": 0.0008966133057565977, + "loss": 1.6414, + "step": 2559 + }, + { + "epoch": 0.23226274723280713, + "grad_norm": 0.12146274163928822, + "learning_rate": 0.0008965238208960375, + "loss": 1.6547, + "step": 2560 + }, + { + "epoch": 0.23235347486844493, + "grad_norm": 0.12321309942348946, + "learning_rate": 0.0008964343017955874, + "loss": 1.6544, + "step": 2561 + }, + { + "epoch": 0.23244420250408274, + "grad_norm": 0.12808644489092727, + "learning_rate": 0.0008963447484629767, + "loss": 1.7066, + "step": 2562 + }, + { + "epoch": 0.23253493013972057, + "grad_norm": 0.12673777064353786, + "learning_rate": 0.0008962551609059384, + "loss": 1.6452, + "step": 2563 + }, + { + "epoch": 0.23262565777535837, + "grad_norm": 0.12777117878828606, + "learning_rate": 0.0008961655391322088, + "loss": 1.6299, + "step": 2564 + }, + { + "epoch": 0.23271638541099618, + "grad_norm": 0.12778435873366542, + "learning_rate": 0.0008960758831495264, + "loss": 1.68, + "step": 2565 + }, + { + "epoch": 0.232807113046634, + "grad_norm": 0.12256872299264239, + "learning_rate": 0.0008959861929656331, + "loss": 1.6556, + "step": 2566 + }, + { + "epoch": 0.23289784068227182, + "grad_norm": 0.13058945598960944, + "learning_rate": 0.0008958964685882736, + "loss": 1.6658, + "step": 2567 + }, + { + "epoch": 0.23298856831790962, + "grad_norm": 0.12524197537732507, + "learning_rate": 0.0008958067100251958, + "loss": 1.6314, + "step": 2568 + }, + { + "epoch": 0.23307929595354746, + "grad_norm": 0.12417032684418952, + "learning_rate": 0.0008957169172841504, + "loss": 1.6552, + "step": 2569 + }, + { + "epoch": 0.23317002358918526, + "grad_norm": 0.13249946131860213, + "learning_rate": 0.0008956270903728906, + "loss": 1.6651, + "step": 2570 + }, + { + "epoch": 0.2332607512248231, + "grad_norm": 0.12354007908164559, + "learning_rate": 0.0008955372292991734, + "loss": 1.6523, + "step": 2571 + }, + { + "epoch": 0.2333514788604609, + "grad_norm": 0.12612381393164854, + "learning_rate": 0.0008954473340707581, + "loss": 1.6539, + "step": 2572 + }, + { + "epoch": 0.2334422064960987, + "grad_norm": 0.12761383940236687, + "learning_rate": 0.0008953574046954071, + "loss": 1.6769, + "step": 2573 + }, + { + "epoch": 0.23353293413173654, + "grad_norm": 0.13260518901170906, + "learning_rate": 0.0008952674411808861, + "loss": 1.6932, + "step": 2574 + }, + { + "epoch": 0.23362366176737434, + "grad_norm": 0.12532416054800322, + "learning_rate": 0.0008951774435349634, + "loss": 1.664, + "step": 2575 + }, + { + "epoch": 0.23371438940301215, + "grad_norm": 0.13080871107095202, + "learning_rate": 0.00089508741176541, + "loss": 1.6705, + "step": 2576 + }, + { + "epoch": 0.23380511703864998, + "grad_norm": 0.12915760474285545, + "learning_rate": 0.0008949973458800003, + "loss": 1.6992, + "step": 2577 + }, + { + "epoch": 0.23389584467428778, + "grad_norm": 0.12328726431750466, + "learning_rate": 0.0008949072458865117, + "loss": 1.6835, + "step": 2578 + }, + { + "epoch": 0.23398657230992562, + "grad_norm": 0.121289130820108, + "learning_rate": 0.0008948171117927242, + "loss": 1.6869, + "step": 2579 + }, + { + "epoch": 0.23407729994556342, + "grad_norm": 0.12464116876838048, + "learning_rate": 0.000894726943606421, + "loss": 1.701, + "step": 2580 + }, + { + "epoch": 0.23416802758120123, + "grad_norm": 0.11966585656924783, + "learning_rate": 0.000894636741335388, + "loss": 1.6601, + "step": 2581 + }, + { + "epoch": 0.23425875521683906, + "grad_norm": 0.12496927026689536, + "learning_rate": 0.000894546504987414, + "loss": 1.6329, + "step": 2582 + }, + { + "epoch": 0.23434948285247686, + "grad_norm": 0.12199885450481174, + "learning_rate": 0.0008944562345702913, + "loss": 1.62, + "step": 2583 + }, + { + "epoch": 0.23444021048811467, + "grad_norm": 0.1242879315176768, + "learning_rate": 0.0008943659300918148, + "loss": 1.6691, + "step": 2584 + }, + { + "epoch": 0.2345309381237525, + "grad_norm": 0.12695072957909329, + "learning_rate": 0.0008942755915597819, + "loss": 1.6477, + "step": 2585 + }, + { + "epoch": 0.2346216657593903, + "grad_norm": 0.12699609541291482, + "learning_rate": 0.0008941852189819936, + "loss": 1.5947, + "step": 2586 + }, + { + "epoch": 0.2347123933950281, + "grad_norm": 0.13280635651603717, + "learning_rate": 0.0008940948123662536, + "loss": 1.6943, + "step": 2587 + }, + { + "epoch": 0.23480312103066595, + "grad_norm": 0.12990556715487692, + "learning_rate": 0.0008940043717203684, + "loss": 1.6845, + "step": 2588 + }, + { + "epoch": 0.23489384866630375, + "grad_norm": 0.12472761126481946, + "learning_rate": 0.0008939138970521475, + "loss": 1.6555, + "step": 2589 + }, + { + "epoch": 0.23498457630194158, + "grad_norm": 0.12635806443229314, + "learning_rate": 0.0008938233883694038, + "loss": 1.6583, + "step": 2590 + }, + { + "epoch": 0.2350753039375794, + "grad_norm": 0.1254861938985054, + "learning_rate": 0.0008937328456799522, + "loss": 1.6719, + "step": 2591 + }, + { + "epoch": 0.2351660315732172, + "grad_norm": 0.1205319560766688, + "learning_rate": 0.0008936422689916115, + "loss": 1.6705, + "step": 2592 + }, + { + "epoch": 0.23525675920885503, + "grad_norm": 0.12689824600467525, + "learning_rate": 0.000893551658312203, + "loss": 1.6318, + "step": 2593 + }, + { + "epoch": 0.23534748684449283, + "grad_norm": 0.1272869350479097, + "learning_rate": 0.0008934610136495506, + "loss": 1.6434, + "step": 2594 + }, + { + "epoch": 0.23543821448013064, + "grad_norm": 0.130633780119154, + "learning_rate": 0.0008933703350114817, + "loss": 1.6323, + "step": 2595 + }, + { + "epoch": 0.23552894211576847, + "grad_norm": 0.12279297504011645, + "learning_rate": 0.0008932796224058264, + "loss": 1.6352, + "step": 2596 + }, + { + "epoch": 0.23561966975140627, + "grad_norm": 0.1274534539552033, + "learning_rate": 0.0008931888758404178, + "loss": 1.6307, + "step": 2597 + }, + { + "epoch": 0.2357103973870441, + "grad_norm": 0.12719398928311515, + "learning_rate": 0.0008930980953230918, + "loss": 1.6412, + "step": 2598 + }, + { + "epoch": 0.2358011250226819, + "grad_norm": 0.12540321210042724, + "learning_rate": 0.0008930072808616873, + "loss": 1.6696, + "step": 2599 + }, + { + "epoch": 0.23589185265831972, + "grad_norm": 0.12835304188852242, + "learning_rate": 0.0008929164324640462, + "loss": 1.6273, + "step": 2600 + }, + { + "epoch": 0.23598258029395755, + "grad_norm": 0.1260685999001611, + "learning_rate": 0.0008928255501380132, + "loss": 1.6933, + "step": 2601 + }, + { + "epoch": 0.23607330792959536, + "grad_norm": 0.12372140344000811, + "learning_rate": 0.0008927346338914361, + "loss": 1.6498, + "step": 2602 + }, + { + "epoch": 0.23616403556523316, + "grad_norm": 0.12059613304789847, + "learning_rate": 0.0008926436837321655, + "loss": 1.6354, + "step": 2603 + }, + { + "epoch": 0.236254763200871, + "grad_norm": 0.1274927267647292, + "learning_rate": 0.0008925526996680548, + "loss": 1.6065, + "step": 2604 + }, + { + "epoch": 0.2363454908365088, + "grad_norm": 0.12407521718978233, + "learning_rate": 0.0008924616817069608, + "loss": 1.6741, + "step": 2605 + }, + { + "epoch": 0.2364362184721466, + "grad_norm": 0.12132829191696919, + "learning_rate": 0.0008923706298567427, + "loss": 1.6887, + "step": 2606 + }, + { + "epoch": 0.23652694610778444, + "grad_norm": 0.1238116783092353, + "learning_rate": 0.0008922795441252629, + "loss": 1.6757, + "step": 2607 + }, + { + "epoch": 0.23661767374342224, + "grad_norm": 0.12277985694251806, + "learning_rate": 0.0008921884245203866, + "loss": 1.6653, + "step": 2608 + }, + { + "epoch": 0.23670840137906007, + "grad_norm": 0.12577515105468218, + "learning_rate": 0.0008920972710499819, + "loss": 1.684, + "step": 2609 + }, + { + "epoch": 0.23679912901469788, + "grad_norm": 0.12421778082044964, + "learning_rate": 0.0008920060837219203, + "loss": 1.6757, + "step": 2610 + }, + { + "epoch": 0.23688985665033568, + "grad_norm": 0.12252263439265836, + "learning_rate": 0.0008919148625440755, + "loss": 1.6717, + "step": 2611 + }, + { + "epoch": 0.23698058428597352, + "grad_norm": 0.1255383251747309, + "learning_rate": 0.0008918236075243246, + "loss": 1.6529, + "step": 2612 + }, + { + "epoch": 0.23707131192161132, + "grad_norm": 0.12716331461312724, + "learning_rate": 0.0008917323186705474, + "loss": 1.6491, + "step": 2613 + }, + { + "epoch": 0.23716203955724913, + "grad_norm": 0.12377283745484173, + "learning_rate": 0.0008916409959906267, + "loss": 1.6304, + "step": 2614 + }, + { + "epoch": 0.23725276719288696, + "grad_norm": 0.1294106375463457, + "learning_rate": 0.0008915496394924484, + "loss": 1.6349, + "step": 2615 + }, + { + "epoch": 0.23734349482852476, + "grad_norm": 0.1281609203537657, + "learning_rate": 0.000891458249183901, + "loss": 1.7218, + "step": 2616 + }, + { + "epoch": 0.2374342224641626, + "grad_norm": 0.1260923260376898, + "learning_rate": 0.0008913668250728759, + "loss": 1.6977, + "step": 2617 + }, + { + "epoch": 0.2375249500998004, + "grad_norm": 0.12489170876784271, + "learning_rate": 0.000891275367167268, + "loss": 1.6421, + "step": 2618 + }, + { + "epoch": 0.2376156777354382, + "grad_norm": 0.12296790593798536, + "learning_rate": 0.0008911838754749743, + "loss": 1.6243, + "step": 2619 + }, + { + "epoch": 0.23770640537107604, + "grad_norm": 0.12288314337105258, + "learning_rate": 0.0008910923500038955, + "loss": 1.666, + "step": 2620 + }, + { + "epoch": 0.23779713300671385, + "grad_norm": 0.13128960738462156, + "learning_rate": 0.0008910007907619344, + "loss": 1.6389, + "step": 2621 + }, + { + "epoch": 0.23788786064235165, + "grad_norm": 0.1264304519048713, + "learning_rate": 0.0008909091977569976, + "loss": 1.6973, + "step": 2622 + }, + { + "epoch": 0.23797858827798948, + "grad_norm": 0.12590783301058822, + "learning_rate": 0.0008908175709969936, + "loss": 1.6383, + "step": 2623 + }, + { + "epoch": 0.2380693159136273, + "grad_norm": 0.12750473443872545, + "learning_rate": 0.0008907259104898352, + "loss": 1.6696, + "step": 2624 + }, + { + "epoch": 0.2381600435492651, + "grad_norm": 0.12508610412097637, + "learning_rate": 0.0008906342162434366, + "loss": 1.6646, + "step": 2625 + }, + { + "epoch": 0.23825077118490293, + "grad_norm": 0.12379206673328597, + "learning_rate": 0.000890542488265716, + "loss": 1.6478, + "step": 2626 + }, + { + "epoch": 0.23834149882054073, + "grad_norm": 0.12284326640652057, + "learning_rate": 0.0008904507265645938, + "loss": 1.6518, + "step": 2627 + }, + { + "epoch": 0.23843222645617856, + "grad_norm": 0.12079390478555423, + "learning_rate": 0.0008903589311479939, + "loss": 1.6663, + "step": 2628 + }, + { + "epoch": 0.23852295409181637, + "grad_norm": 0.12392203632901697, + "learning_rate": 0.0008902671020238427, + "loss": 1.6654, + "step": 2629 + }, + { + "epoch": 0.23861368172745417, + "grad_norm": 0.12342102530309346, + "learning_rate": 0.0008901752392000699, + "loss": 1.6512, + "step": 2630 + }, + { + "epoch": 0.238704409363092, + "grad_norm": 0.13145046966041604, + "learning_rate": 0.0008900833426846075, + "loss": 1.6448, + "step": 2631 + }, + { + "epoch": 0.2387951369987298, + "grad_norm": 0.12893830893040464, + "learning_rate": 0.000889991412485391, + "loss": 1.6656, + "step": 2632 + }, + { + "epoch": 0.23888586463436762, + "grad_norm": 0.12229154357769578, + "learning_rate": 0.0008898994486103587, + "loss": 1.6538, + "step": 2633 + }, + { + "epoch": 0.23897659227000545, + "grad_norm": 0.1204309964503294, + "learning_rate": 0.0008898074510674515, + "loss": 1.6446, + "step": 2634 + }, + { + "epoch": 0.23906731990564326, + "grad_norm": 0.11908612708224803, + "learning_rate": 0.0008897154198646133, + "loss": 1.6123, + "step": 2635 + }, + { + "epoch": 0.2391580475412811, + "grad_norm": 0.11816724199177862, + "learning_rate": 0.0008896233550097912, + "loss": 1.6366, + "step": 2636 + }, + { + "epoch": 0.2392487751769189, + "grad_norm": 0.13170669811724434, + "learning_rate": 0.000889531256510935, + "loss": 1.6863, + "step": 2637 + }, + { + "epoch": 0.2393395028125567, + "grad_norm": 0.12140501801109939, + "learning_rate": 0.0008894391243759974, + "loss": 1.6721, + "step": 2638 + }, + { + "epoch": 0.23943023044819453, + "grad_norm": 0.12243197174283486, + "learning_rate": 0.000889346958612934, + "loss": 1.6298, + "step": 2639 + }, + { + "epoch": 0.23952095808383234, + "grad_norm": 0.12190819829496995, + "learning_rate": 0.0008892547592297033, + "loss": 1.5779, + "step": 2640 + }, + { + "epoch": 0.23961168571947014, + "grad_norm": 0.12205385221636017, + "learning_rate": 0.0008891625262342669, + "loss": 1.6509, + "step": 2641 + }, + { + "epoch": 0.23970241335510797, + "grad_norm": 0.12691714254722825, + "learning_rate": 0.0008890702596345889, + "loss": 1.6555, + "step": 2642 + }, + { + "epoch": 0.23979314099074578, + "grad_norm": 0.11863565886031271, + "learning_rate": 0.0008889779594386367, + "loss": 1.6582, + "step": 2643 + }, + { + "epoch": 0.23988386862638358, + "grad_norm": 0.12521817627170626, + "learning_rate": 0.0008888856256543804, + "loss": 1.6488, + "step": 2644 + }, + { + "epoch": 0.23997459626202142, + "grad_norm": 0.11942720371199218, + "learning_rate": 0.0008887932582897929, + "loss": 1.6684, + "step": 2645 + }, + { + "epoch": 0.24006532389765922, + "grad_norm": 0.12516337562234572, + "learning_rate": 0.0008887008573528504, + "loss": 1.6947, + "step": 2646 + }, + { + "epoch": 0.24015605153329705, + "grad_norm": 0.12033900988701039, + "learning_rate": 0.0008886084228515313, + "loss": 1.6711, + "step": 2647 + }, + { + "epoch": 0.24024677916893486, + "grad_norm": 0.1268626918938578, + "learning_rate": 0.0008885159547938178, + "loss": 1.6743, + "step": 2648 + }, + { + "epoch": 0.24033750680457266, + "grad_norm": 0.12689847749447786, + "learning_rate": 0.0008884234531876943, + "loss": 1.6482, + "step": 2649 + }, + { + "epoch": 0.2404282344402105, + "grad_norm": 0.12014773264244913, + "learning_rate": 0.0008883309180411484, + "loss": 1.6601, + "step": 2650 + }, + { + "epoch": 0.2405189620758483, + "grad_norm": 0.118642820931217, + "learning_rate": 0.0008882383493621706, + "loss": 1.6462, + "step": 2651 + }, + { + "epoch": 0.2406096897114861, + "grad_norm": 0.12327556989247739, + "learning_rate": 0.0008881457471587539, + "loss": 1.7047, + "step": 2652 + }, + { + "epoch": 0.24070041734712394, + "grad_norm": 0.11886035282738577, + "learning_rate": 0.0008880531114388948, + "loss": 1.6521, + "step": 2653 + }, + { + "epoch": 0.24079114498276175, + "grad_norm": 0.12080160679515697, + "learning_rate": 0.0008879604422105925, + "loss": 1.6112, + "step": 2654 + }, + { + "epoch": 0.24088187261839955, + "grad_norm": 0.12016274174394959, + "learning_rate": 0.0008878677394818487, + "loss": 1.6582, + "step": 2655 + }, + { + "epoch": 0.24097260025403738, + "grad_norm": 0.12537838226357384, + "learning_rate": 0.0008877750032606683, + "loss": 1.6469, + "step": 2656 + }, + { + "epoch": 0.2410633278896752, + "grad_norm": 0.1258324855816701, + "learning_rate": 0.0008876822335550594, + "loss": 1.7044, + "step": 2657 + }, + { + "epoch": 0.24115405552531302, + "grad_norm": 0.1262797280637909, + "learning_rate": 0.0008875894303730323, + "loss": 1.6559, + "step": 2658 + }, + { + "epoch": 0.24124478316095083, + "grad_norm": 0.12736190990857552, + "learning_rate": 0.0008874965937226009, + "loss": 1.6413, + "step": 2659 + }, + { + "epoch": 0.24133551079658863, + "grad_norm": 0.1287928931014607, + "learning_rate": 0.0008874037236117815, + "loss": 1.6918, + "step": 2660 + }, + { + "epoch": 0.24142623843222646, + "grad_norm": 0.12859245757356744, + "learning_rate": 0.0008873108200485936, + "loss": 1.6512, + "step": 2661 + }, + { + "epoch": 0.24151696606786427, + "grad_norm": 0.11919873995631454, + "learning_rate": 0.0008872178830410592, + "loss": 1.6342, + "step": 2662 + }, + { + "epoch": 0.24160769370350207, + "grad_norm": 0.12284230032559698, + "learning_rate": 0.0008871249125972035, + "loss": 1.6629, + "step": 2663 + }, + { + "epoch": 0.2416984213391399, + "grad_norm": 0.12455390942359283, + "learning_rate": 0.0008870319087250546, + "loss": 1.6332, + "step": 2664 + }, + { + "epoch": 0.2417891489747777, + "grad_norm": 0.1232963392336127, + "learning_rate": 0.0008869388714326433, + "loss": 1.6758, + "step": 2665 + }, + { + "epoch": 0.24187987661041555, + "grad_norm": 0.11586508381983424, + "learning_rate": 0.0008868458007280034, + "loss": 1.6474, + "step": 2666 + }, + { + "epoch": 0.24197060424605335, + "grad_norm": 0.12308864858743744, + "learning_rate": 0.0008867526966191716, + "loss": 1.683, + "step": 2667 + }, + { + "epoch": 0.24206133188169116, + "grad_norm": 0.12254822077816574, + "learning_rate": 0.0008866595591141875, + "loss": 1.6951, + "step": 2668 + }, + { + "epoch": 0.242152059517329, + "grad_norm": 0.12265651029657954, + "learning_rate": 0.0008865663882210935, + "loss": 1.6792, + "step": 2669 + }, + { + "epoch": 0.2422427871529668, + "grad_norm": 0.12589290546220971, + "learning_rate": 0.0008864731839479347, + "loss": 1.6127, + "step": 2670 + }, + { + "epoch": 0.2423335147886046, + "grad_norm": 0.12388289564819414, + "learning_rate": 0.0008863799463027597, + "loss": 1.6902, + "step": 2671 + }, + { + "epoch": 0.24242424242424243, + "grad_norm": 0.12360753437469225, + "learning_rate": 0.0008862866752936194, + "loss": 1.6464, + "step": 2672 + }, + { + "epoch": 0.24251497005988024, + "grad_norm": 0.1257747632656148, + "learning_rate": 0.0008861933709285677, + "loss": 1.6356, + "step": 2673 + }, + { + "epoch": 0.24260569769551804, + "grad_norm": 0.12541663729097513, + "learning_rate": 0.0008861000332156615, + "loss": 1.6236, + "step": 2674 + }, + { + "epoch": 0.24269642533115587, + "grad_norm": 0.12268305671337514, + "learning_rate": 0.0008860066621629606, + "loss": 1.6006, + "step": 2675 + }, + { + "epoch": 0.24278715296679368, + "grad_norm": 0.12371784693297329, + "learning_rate": 0.0008859132577785274, + "loss": 1.6538, + "step": 2676 + }, + { + "epoch": 0.2428778806024315, + "grad_norm": 0.12569803525150283, + "learning_rate": 0.0008858198200704275, + "loss": 1.6509, + "step": 2677 + }, + { + "epoch": 0.24296860823806932, + "grad_norm": 0.12237039590283424, + "learning_rate": 0.0008857263490467293, + "loss": 1.6449, + "step": 2678 + }, + { + "epoch": 0.24305933587370712, + "grad_norm": 0.1252757104733616, + "learning_rate": 0.0008856328447155041, + "loss": 1.6954, + "step": 2679 + }, + { + "epoch": 0.24315006350934495, + "grad_norm": 0.12114321987029608, + "learning_rate": 0.0008855393070848258, + "loss": 1.6648, + "step": 2680 + }, + { + "epoch": 0.24324079114498276, + "grad_norm": 0.12194799179020954, + "learning_rate": 0.0008854457361627717, + "loss": 1.6479, + "step": 2681 + }, + { + "epoch": 0.24333151878062056, + "grad_norm": 0.122855427340133, + "learning_rate": 0.0008853521319574214, + "loss": 1.6017, + "step": 2682 + }, + { + "epoch": 0.2434222464162584, + "grad_norm": 0.13287420426239455, + "learning_rate": 0.0008852584944768576, + "loss": 1.6649, + "step": 2683 + }, + { + "epoch": 0.2435129740518962, + "grad_norm": 0.12450445025959855, + "learning_rate": 0.0008851648237291661, + "loss": 1.6528, + "step": 2684 + }, + { + "epoch": 0.24360370168753404, + "grad_norm": 0.12120491867063286, + "learning_rate": 0.0008850711197224353, + "loss": 1.6478, + "step": 2685 + }, + { + "epoch": 0.24369442932317184, + "grad_norm": 0.12188937416373213, + "learning_rate": 0.0008849773824647565, + "loss": 1.6356, + "step": 2686 + }, + { + "epoch": 0.24378515695880965, + "grad_norm": 0.12046238645117674, + "learning_rate": 0.000884883611964224, + "loss": 1.6267, + "step": 2687 + }, + { + "epoch": 0.24387588459444748, + "grad_norm": 0.12457157974653087, + "learning_rate": 0.0008847898082289349, + "loss": 1.678, + "step": 2688 + }, + { + "epoch": 0.24396661223008528, + "grad_norm": 0.12533943244263096, + "learning_rate": 0.0008846959712669892, + "loss": 1.6707, + "step": 2689 + }, + { + "epoch": 0.2440573398657231, + "grad_norm": 0.12813200795825788, + "learning_rate": 0.0008846021010864896, + "loss": 1.6381, + "step": 2690 + }, + { + "epoch": 0.24414806750136092, + "grad_norm": 0.12545201880775497, + "learning_rate": 0.000884508197695542, + "loss": 1.6723, + "step": 2691 + }, + { + "epoch": 0.24423879513699873, + "grad_norm": 0.11641136361359464, + "learning_rate": 0.0008844142611022548, + "loss": 1.6328, + "step": 2692 + }, + { + "epoch": 0.24432952277263653, + "grad_norm": 0.12550922743884, + "learning_rate": 0.0008843202913147394, + "loss": 1.6278, + "step": 2693 + }, + { + "epoch": 0.24442025040827436, + "grad_norm": 0.12445447035148961, + "learning_rate": 0.0008842262883411103, + "loss": 1.6768, + "step": 2694 + }, + { + "epoch": 0.24451097804391217, + "grad_norm": 0.12351875924617878, + "learning_rate": 0.0008841322521894846, + "loss": 1.6439, + "step": 2695 + }, + { + "epoch": 0.24460170567955, + "grad_norm": 0.13092939607011606, + "learning_rate": 0.0008840381828679823, + "loss": 1.6799, + "step": 2696 + }, + { + "epoch": 0.2446924333151878, + "grad_norm": 0.12460511086044061, + "learning_rate": 0.0008839440803847263, + "loss": 1.6381, + "step": 2697 + }, + { + "epoch": 0.2447831609508256, + "grad_norm": 0.1278621044709998, + "learning_rate": 0.0008838499447478423, + "loss": 1.6762, + "step": 2698 + }, + { + "epoch": 0.24487388858646345, + "grad_norm": 0.12273566078250062, + "learning_rate": 0.0008837557759654591, + "loss": 1.6566, + "step": 2699 + }, + { + "epoch": 0.24496461622210125, + "grad_norm": 0.13035465861000586, + "learning_rate": 0.000883661574045708, + "loss": 1.6197, + "step": 2700 + }, + { + "epoch": 0.24505534385773906, + "grad_norm": 0.12079307296868112, + "learning_rate": 0.0008835673389967235, + "loss": 1.6746, + "step": 2701 + }, + { + "epoch": 0.2451460714933769, + "grad_norm": 0.12179917158478584, + "learning_rate": 0.0008834730708266427, + "loss": 1.6434, + "step": 2702 + }, + { + "epoch": 0.2452367991290147, + "grad_norm": 0.20176424399453186, + "learning_rate": 0.0008833787695436057, + "loss": 1.6418, + "step": 2703 + }, + { + "epoch": 0.24532752676465253, + "grad_norm": 0.12327555738495086, + "learning_rate": 0.0008832844351557555, + "loss": 1.6316, + "step": 2704 + }, + { + "epoch": 0.24541825440029033, + "grad_norm": 0.12501500434942375, + "learning_rate": 0.0008831900676712378, + "loss": 1.6382, + "step": 2705 + }, + { + "epoch": 0.24550898203592814, + "grad_norm": 0.12657819786663, + "learning_rate": 0.0008830956670982013, + "loss": 1.6579, + "step": 2706 + }, + { + "epoch": 0.24559970967156597, + "grad_norm": 0.12135128564531844, + "learning_rate": 0.0008830012334447975, + "loss": 1.6522, + "step": 2707 + }, + { + "epoch": 0.24569043730720377, + "grad_norm": 0.11852051599037418, + "learning_rate": 0.0008829067667191807, + "loss": 1.6574, + "step": 2708 + }, + { + "epoch": 0.24578116494284158, + "grad_norm": 0.12777242179816684, + "learning_rate": 0.0008828122669295082, + "loss": 1.6648, + "step": 2709 + }, + { + "epoch": 0.2458718925784794, + "grad_norm": 0.1255735251568175, + "learning_rate": 0.00088271773408394, + "loss": 1.7014, + "step": 2710 + }, + { + "epoch": 0.24596262021411722, + "grad_norm": 0.1257693264118278, + "learning_rate": 0.000882623168190639, + "loss": 1.6609, + "step": 2711 + }, + { + "epoch": 0.24605334784975502, + "grad_norm": 0.1253938975081252, + "learning_rate": 0.0008825285692577712, + "loss": 1.6921, + "step": 2712 + }, + { + "epoch": 0.24614407548539285, + "grad_norm": 0.12292423990781336, + "learning_rate": 0.0008824339372935048, + "loss": 1.644, + "step": 2713 + }, + { + "epoch": 0.24623480312103066, + "grad_norm": 0.12813161447194915, + "learning_rate": 0.0008823392723060117, + "loss": 1.641, + "step": 2714 + }, + { + "epoch": 0.2463255307566685, + "grad_norm": 0.12748038775043077, + "learning_rate": 0.0008822445743034661, + "loss": 1.6353, + "step": 2715 + }, + { + "epoch": 0.2464162583923063, + "grad_norm": 0.1285534911181136, + "learning_rate": 0.0008821498432940452, + "loss": 1.6561, + "step": 2716 + }, + { + "epoch": 0.2465069860279441, + "grad_norm": 0.12110592934482631, + "learning_rate": 0.000882055079285929, + "loss": 1.656, + "step": 2717 + }, + { + "epoch": 0.24659771366358194, + "grad_norm": 0.1288714441903737, + "learning_rate": 0.0008819602822873004, + "loss": 1.6696, + "step": 2718 + }, + { + "epoch": 0.24668844129921974, + "grad_norm": 0.12101136929413954, + "learning_rate": 0.0008818654523063451, + "loss": 1.6381, + "step": 2719 + }, + { + "epoch": 0.24677916893485755, + "grad_norm": 0.12204873185924803, + "learning_rate": 0.0008817705893512518, + "loss": 1.6601, + "step": 2720 + }, + { + "epoch": 0.24686989657049538, + "grad_norm": 0.12287737733193893, + "learning_rate": 0.0008816756934302117, + "loss": 1.6266, + "step": 2721 + }, + { + "epoch": 0.24696062420613318, + "grad_norm": 0.12055706265782178, + "learning_rate": 0.0008815807645514192, + "loss": 1.633, + "step": 2722 + }, + { + "epoch": 0.24705135184177102, + "grad_norm": 0.12231096000760742, + "learning_rate": 0.0008814858027230716, + "loss": 1.6557, + "step": 2723 + }, + { + "epoch": 0.24714207947740882, + "grad_norm": 0.12240991261571164, + "learning_rate": 0.0008813908079533686, + "loss": 1.6546, + "step": 2724 + }, + { + "epoch": 0.24723280711304663, + "grad_norm": 0.11997962255616408, + "learning_rate": 0.000881295780250513, + "loss": 1.6631, + "step": 2725 + }, + { + "epoch": 0.24732353474868446, + "grad_norm": 0.12707046779745626, + "learning_rate": 0.0008812007196227108, + "loss": 1.6552, + "step": 2726 + }, + { + "epoch": 0.24741426238432226, + "grad_norm": 0.12584166134762975, + "learning_rate": 0.0008811056260781703, + "loss": 1.6001, + "step": 2727 + }, + { + "epoch": 0.24750499001996007, + "grad_norm": 0.1238735247241371, + "learning_rate": 0.0008810104996251027, + "loss": 1.6767, + "step": 2728 + }, + { + "epoch": 0.2475957176555979, + "grad_norm": 0.12059675305235405, + "learning_rate": 0.0008809153402717223, + "loss": 1.6824, + "step": 2729 + }, + { + "epoch": 0.2476864452912357, + "grad_norm": 0.12522144524816103, + "learning_rate": 0.0008808201480262461, + "loss": 1.6531, + "step": 2730 + }, + { + "epoch": 0.2477771729268735, + "grad_norm": 0.12243778411552869, + "learning_rate": 0.000880724922896894, + "loss": 1.6561, + "step": 2731 + }, + { + "epoch": 0.24786790056251135, + "grad_norm": 0.11634208283665647, + "learning_rate": 0.0008806296648918888, + "loss": 1.6561, + "step": 2732 + }, + { + "epoch": 0.24795862819814915, + "grad_norm": 0.11983543704936414, + "learning_rate": 0.0008805343740194558, + "loss": 1.6804, + "step": 2733 + }, + { + "epoch": 0.24804935583378698, + "grad_norm": 0.1483876276151793, + "learning_rate": 0.0008804390502878237, + "loss": 1.6496, + "step": 2734 + }, + { + "epoch": 0.2481400834694248, + "grad_norm": 0.11972417347559106, + "learning_rate": 0.0008803436937052234, + "loss": 1.693, + "step": 2735 + }, + { + "epoch": 0.2482308111050626, + "grad_norm": 0.12157994359627332, + "learning_rate": 0.0008802483042798891, + "loss": 1.631, + "step": 2736 + }, + { + "epoch": 0.24832153874070043, + "grad_norm": 0.12118178361079913, + "learning_rate": 0.0008801528820200577, + "loss": 1.6286, + "step": 2737 + }, + { + "epoch": 0.24841226637633823, + "grad_norm": 0.12478982905706472, + "learning_rate": 0.0008800574269339689, + "loss": 1.663, + "step": 2738 + }, + { + "epoch": 0.24850299401197604, + "grad_norm": 0.13551316952677459, + "learning_rate": 0.0008799619390298653, + "loss": 1.6523, + "step": 2739 + }, + { + "epoch": 0.24859372164761387, + "grad_norm": 0.1220154334719568, + "learning_rate": 0.0008798664183159923, + "loss": 1.6598, + "step": 2740 + }, + { + "epoch": 0.24868444928325167, + "grad_norm": 0.12151896142261251, + "learning_rate": 0.000879770864800598, + "loss": 1.6693, + "step": 2741 + }, + { + "epoch": 0.2487751769188895, + "grad_norm": 0.1213609747151695, + "learning_rate": 0.0008796752784919335, + "loss": 1.667, + "step": 2742 + }, + { + "epoch": 0.2488659045545273, + "grad_norm": 0.1241651814335016, + "learning_rate": 0.0008795796593982529, + "loss": 1.6416, + "step": 2743 + }, + { + "epoch": 0.24895663219016512, + "grad_norm": 0.11989119027245021, + "learning_rate": 0.0008794840075278127, + "loss": 1.6483, + "step": 2744 + }, + { + "epoch": 0.24904735982580295, + "grad_norm": 0.12479953457414432, + "learning_rate": 0.0008793883228888726, + "loss": 1.6961, + "step": 2745 + }, + { + "epoch": 0.24913808746144075, + "grad_norm": 0.1215124093957141, + "learning_rate": 0.0008792926054896948, + "loss": 1.656, + "step": 2746 + }, + { + "epoch": 0.24922881509707856, + "grad_norm": 0.12030423862721341, + "learning_rate": 0.0008791968553385445, + "loss": 1.6525, + "step": 2747 + }, + { + "epoch": 0.2493195427327164, + "grad_norm": 0.1217849833150302, + "learning_rate": 0.0008791010724436901, + "loss": 1.6428, + "step": 2748 + }, + { + "epoch": 0.2494102703683542, + "grad_norm": 0.12090889809960796, + "learning_rate": 0.0008790052568134021, + "loss": 1.6462, + "step": 2749 + }, + { + "epoch": 0.249500998003992, + "grad_norm": 0.1183816678533021, + "learning_rate": 0.0008789094084559544, + "loss": 1.6898, + "step": 2750 + }, + { + "epoch": 0.24959172563962984, + "grad_norm": 0.1288600778787652, + "learning_rate": 0.0008788135273796233, + "loss": 1.6436, + "step": 2751 + }, + { + "epoch": 0.24968245327526764, + "grad_norm": 0.12633793435994353, + "learning_rate": 0.0008787176135926883, + "loss": 1.6579, + "step": 2752 + }, + { + "epoch": 0.24977318091090547, + "grad_norm": 0.12107927303956871, + "learning_rate": 0.0008786216671034316, + "loss": 1.6722, + "step": 2753 + }, + { + "epoch": 0.24986390854654328, + "grad_norm": 0.12540183302297425, + "learning_rate": 0.0008785256879201382, + "loss": 1.6722, + "step": 2754 + }, + { + "epoch": 0.24995463618218108, + "grad_norm": 0.12415981251137016, + "learning_rate": 0.0008784296760510957, + "loss": 1.5938, + "step": 2755 + }, + { + "epoch": 0.2500453638178189, + "grad_norm": 0.11792922399595075, + "learning_rate": 0.000878333631504595, + "loss": 1.6066, + "step": 2756 + }, + { + "epoch": 0.2501360914534567, + "grad_norm": 0.1270357212212167, + "learning_rate": 0.0008782375542889293, + "loss": 1.6676, + "step": 2757 + }, + { + "epoch": 0.25022681908909455, + "grad_norm": 0.12720873851798173, + "learning_rate": 0.0008781414444123953, + "loss": 1.6408, + "step": 2758 + }, + { + "epoch": 0.25031754672473233, + "grad_norm": 0.12430269450578907, + "learning_rate": 0.0008780453018832918, + "loss": 1.5994, + "step": 2759 + }, + { + "epoch": 0.25040827436037016, + "grad_norm": 0.12331697580552105, + "learning_rate": 0.0008779491267099207, + "loss": 1.6999, + "step": 2760 + }, + { + "epoch": 0.250499001996008, + "grad_norm": 0.12321773955502642, + "learning_rate": 0.0008778529189005867, + "loss": 1.6691, + "step": 2761 + }, + { + "epoch": 0.2505897296316458, + "grad_norm": 0.11942490620107932, + "learning_rate": 0.0008777566784635975, + "loss": 1.6672, + "step": 2762 + }, + { + "epoch": 0.2506804572672836, + "grad_norm": 0.12256745145232682, + "learning_rate": 0.0008776604054072637, + "loss": 1.6458, + "step": 2763 + }, + { + "epoch": 0.25077118490292144, + "grad_norm": 0.11884429844121355, + "learning_rate": 0.0008775640997398979, + "loss": 1.6518, + "step": 2764 + }, + { + "epoch": 0.2508619125385592, + "grad_norm": 0.1195380723932989, + "learning_rate": 0.0008774677614698165, + "loss": 1.7236, + "step": 2765 + }, + { + "epoch": 0.25095264017419705, + "grad_norm": 0.11900447246063169, + "learning_rate": 0.0008773713906053384, + "loss": 1.6719, + "step": 2766 + }, + { + "epoch": 0.2510433678098349, + "grad_norm": 0.11863894644343963, + "learning_rate": 0.000877274987154785, + "loss": 1.6291, + "step": 2767 + }, + { + "epoch": 0.2511340954454727, + "grad_norm": 0.11974921220278559, + "learning_rate": 0.0008771785511264809, + "loss": 1.6632, + "step": 2768 + }, + { + "epoch": 0.2512248230811105, + "grad_norm": 0.11964866532081317, + "learning_rate": 0.0008770820825287533, + "loss": 1.6782, + "step": 2769 + }, + { + "epoch": 0.2513155507167483, + "grad_norm": 0.1225379255839415, + "learning_rate": 0.0008769855813699324, + "loss": 1.6169, + "step": 2770 + }, + { + "epoch": 0.25140627835238616, + "grad_norm": 0.12222529883373148, + "learning_rate": 0.0008768890476583508, + "loss": 1.634, + "step": 2771 + }, + { + "epoch": 0.25149700598802394, + "grad_norm": 0.12467654386527847, + "learning_rate": 0.0008767924814023446, + "loss": 1.6019, + "step": 2772 + }, + { + "epoch": 0.25158773362366177, + "grad_norm": 0.129963727508364, + "learning_rate": 0.000876695882610252, + "loss": 1.6712, + "step": 2773 + }, + { + "epoch": 0.2516784612592996, + "grad_norm": 0.12057976321290102, + "learning_rate": 0.0008765992512904144, + "loss": 1.6371, + "step": 2774 + }, + { + "epoch": 0.2517691888949374, + "grad_norm": 0.12005909637490768, + "learning_rate": 0.0008765025874511758, + "loss": 1.6146, + "step": 2775 + }, + { + "epoch": 0.2518599165305752, + "grad_norm": 0.12238169178349953, + "learning_rate": 0.0008764058911008835, + "loss": 1.6754, + "step": 2776 + }, + { + "epoch": 0.25195064416621304, + "grad_norm": 0.1250625180997474, + "learning_rate": 0.0008763091622478869, + "loss": 1.6482, + "step": 2777 + }, + { + "epoch": 0.2520413718018508, + "grad_norm": 0.12144775319110977, + "learning_rate": 0.0008762124009005388, + "loss": 1.6533, + "step": 2778 + }, + { + "epoch": 0.25213209943748865, + "grad_norm": 0.12604404438634942, + "learning_rate": 0.0008761156070671943, + "loss": 1.6738, + "step": 2779 + }, + { + "epoch": 0.2522228270731265, + "grad_norm": 0.1226391918919448, + "learning_rate": 0.0008760187807562119, + "loss": 1.6679, + "step": 2780 + }, + { + "epoch": 0.25231355470876426, + "grad_norm": 0.12404636668263992, + "learning_rate": 0.0008759219219759522, + "loss": 1.6533, + "step": 2781 + }, + { + "epoch": 0.2524042823444021, + "grad_norm": 0.14493562052525583, + "learning_rate": 0.0008758250307347792, + "loss": 1.6673, + "step": 2782 + }, + { + "epoch": 0.25249500998003993, + "grad_norm": 0.12197904727726537, + "learning_rate": 0.0008757281070410592, + "loss": 1.6851, + "step": 2783 + }, + { + "epoch": 0.2525857376156777, + "grad_norm": 0.11629482662700236, + "learning_rate": 0.000875631150903162, + "loss": 1.6547, + "step": 2784 + }, + { + "epoch": 0.25267646525131554, + "grad_norm": 0.11890120137438888, + "learning_rate": 0.0008755341623294595, + "loss": 1.6421, + "step": 2785 + }, + { + "epoch": 0.2527671928869534, + "grad_norm": 0.11858148341168012, + "learning_rate": 0.0008754371413283267, + "loss": 1.6186, + "step": 2786 + }, + { + "epoch": 0.2528579205225912, + "grad_norm": 0.11791928065694487, + "learning_rate": 0.0008753400879081414, + "loss": 1.6327, + "step": 2787 + }, + { + "epoch": 0.252948648158229, + "grad_norm": 0.1202798390897717, + "learning_rate": 0.0008752430020772844, + "loss": 1.6538, + "step": 2788 + }, + { + "epoch": 0.2530393757938668, + "grad_norm": 0.13661150943811962, + "learning_rate": 0.0008751458838441386, + "loss": 1.6169, + "step": 2789 + }, + { + "epoch": 0.25313010342950465, + "grad_norm": 0.12199525643493252, + "learning_rate": 0.0008750487332170906, + "loss": 1.6816, + "step": 2790 + }, + { + "epoch": 0.2532208310651424, + "grad_norm": 0.12173930838242415, + "learning_rate": 0.0008749515502045291, + "loss": 1.6597, + "step": 2791 + }, + { + "epoch": 0.25331155870078026, + "grad_norm": 0.12229567391172917, + "learning_rate": 0.0008748543348148461, + "loss": 1.6915, + "step": 2792 + }, + { + "epoch": 0.2534022863364181, + "grad_norm": 0.11924413090848664, + "learning_rate": 0.0008747570870564358, + "loss": 1.6361, + "step": 2793 + }, + { + "epoch": 0.25349301397205587, + "grad_norm": 0.11859872117621266, + "learning_rate": 0.0008746598069376961, + "loss": 1.6329, + "step": 2794 + }, + { + "epoch": 0.2535837416076937, + "grad_norm": 0.1275248344477003, + "learning_rate": 0.0008745624944670267, + "loss": 1.6196, + "step": 2795 + }, + { + "epoch": 0.25367446924333154, + "grad_norm": 0.1155673402999938, + "learning_rate": 0.0008744651496528308, + "loss": 1.6408, + "step": 2796 + }, + { + "epoch": 0.2537651968789693, + "grad_norm": 0.1235907318157248, + "learning_rate": 0.000874367772503514, + "loss": 1.6441, + "step": 2797 + }, + { + "epoch": 0.25385592451460715, + "grad_norm": 0.12348455082879274, + "learning_rate": 0.0008742703630274847, + "loss": 1.6754, + "step": 2798 + }, + { + "epoch": 0.253946652150245, + "grad_norm": 0.11973783947198445, + "learning_rate": 0.0008741729212331545, + "loss": 1.683, + "step": 2799 + }, + { + "epoch": 0.25403737978588276, + "grad_norm": 0.12302441022884678, + "learning_rate": 0.0008740754471289374, + "loss": 1.6685, + "step": 2800 + }, + { + "epoch": 0.2541281074215206, + "grad_norm": 0.1243745167724175, + "learning_rate": 0.0008739779407232504, + "loss": 1.6462, + "step": 2801 + }, + { + "epoch": 0.2542188350571584, + "grad_norm": 0.12054246828181203, + "learning_rate": 0.000873880402024513, + "loss": 1.6576, + "step": 2802 + }, + { + "epoch": 0.2543095626927962, + "grad_norm": 0.12310472679355845, + "learning_rate": 0.0008737828310411477, + "loss": 1.644, + "step": 2803 + }, + { + "epoch": 0.25440029032843403, + "grad_norm": 0.12232522688768702, + "learning_rate": 0.0008736852277815801, + "loss": 1.6345, + "step": 2804 + }, + { + "epoch": 0.25449101796407186, + "grad_norm": 0.11631007500969179, + "learning_rate": 0.0008735875922542378, + "loss": 1.6766, + "step": 2805 + }, + { + "epoch": 0.2545817455997097, + "grad_norm": 0.1194609585206269, + "learning_rate": 0.0008734899244675519, + "loss": 1.6478, + "step": 2806 + }, + { + "epoch": 0.2546724732353475, + "grad_norm": 0.12359187401893268, + "learning_rate": 0.0008733922244299559, + "loss": 1.6629, + "step": 2807 + }, + { + "epoch": 0.2547632008709853, + "grad_norm": 0.12123971397590863, + "learning_rate": 0.0008732944921498864, + "loss": 1.6806, + "step": 2808 + }, + { + "epoch": 0.25485392850662314, + "grad_norm": 0.12019642674943065, + "learning_rate": 0.0008731967276357826, + "loss": 1.6401, + "step": 2809 + }, + { + "epoch": 0.2549446561422609, + "grad_norm": 0.1213333292549718, + "learning_rate": 0.0008730989308960861, + "loss": 1.6309, + "step": 2810 + }, + { + "epoch": 0.25503538377789875, + "grad_norm": 0.12469486828765741, + "learning_rate": 0.0008730011019392421, + "loss": 1.6791, + "step": 2811 + }, + { + "epoch": 0.2551261114135366, + "grad_norm": 0.12371295300865187, + "learning_rate": 0.0008729032407736979, + "loss": 1.663, + "step": 2812 + }, + { + "epoch": 0.25521683904917436, + "grad_norm": 0.12045141094259329, + "learning_rate": 0.0008728053474079039, + "loss": 1.633, + "step": 2813 + }, + { + "epoch": 0.2553075666848122, + "grad_norm": 0.1266071476586634, + "learning_rate": 0.0008727074218503133, + "loss": 1.6577, + "step": 2814 + }, + { + "epoch": 0.25539829432045, + "grad_norm": 0.12324069082792241, + "learning_rate": 0.0008726094641093818, + "loss": 1.6683, + "step": 2815 + }, + { + "epoch": 0.2554890219560878, + "grad_norm": 0.12136710902657964, + "learning_rate": 0.0008725114741935683, + "loss": 1.6506, + "step": 2816 + }, + { + "epoch": 0.25557974959172564, + "grad_norm": 0.12310893579319493, + "learning_rate": 0.0008724134521113338, + "loss": 1.6393, + "step": 2817 + }, + { + "epoch": 0.25567047722736347, + "grad_norm": 0.11861990383787886, + "learning_rate": 0.0008723153978711431, + "loss": 1.6746, + "step": 2818 + }, + { + "epoch": 0.25576120486300125, + "grad_norm": 0.12620748286527442, + "learning_rate": 0.0008722173114814628, + "loss": 1.6553, + "step": 2819 + }, + { + "epoch": 0.2558519324986391, + "grad_norm": 0.12829165844730292, + "learning_rate": 0.0008721191929507628, + "loss": 1.6903, + "step": 2820 + }, + { + "epoch": 0.2559426601342769, + "grad_norm": 0.12489304862584569, + "learning_rate": 0.0008720210422875157, + "loss": 1.6496, + "step": 2821 + }, + { + "epoch": 0.2560333877699147, + "grad_norm": 0.12136041704782942, + "learning_rate": 0.0008719228595001967, + "loss": 1.6243, + "step": 2822 + }, + { + "epoch": 0.2561241154055525, + "grad_norm": 0.12186922221014825, + "learning_rate": 0.000871824644597284, + "loss": 1.6674, + "step": 2823 + }, + { + "epoch": 0.25621484304119035, + "grad_norm": 0.12215814301446754, + "learning_rate": 0.0008717263975872583, + "loss": 1.6618, + "step": 2824 + }, + { + "epoch": 0.2563055706768282, + "grad_norm": 0.11745027997453314, + "learning_rate": 0.0008716281184786037, + "loss": 1.6417, + "step": 2825 + }, + { + "epoch": 0.25639629831246596, + "grad_norm": 0.12260262969861754, + "learning_rate": 0.0008715298072798061, + "loss": 1.6609, + "step": 2826 + }, + { + "epoch": 0.2564870259481038, + "grad_norm": 0.11833856785640169, + "learning_rate": 0.0008714314639993548, + "loss": 1.6111, + "step": 2827 + }, + { + "epoch": 0.25657775358374163, + "grad_norm": 0.12019972479651102, + "learning_rate": 0.0008713330886457419, + "loss": 1.6473, + "step": 2828 + }, + { + "epoch": 0.2566684812193794, + "grad_norm": 0.11892311298166117, + "learning_rate": 0.0008712346812274621, + "loss": 1.6227, + "step": 2829 + }, + { + "epoch": 0.25675920885501724, + "grad_norm": 0.12025373744044861, + "learning_rate": 0.0008711362417530129, + "loss": 1.676, + "step": 2830 + }, + { + "epoch": 0.2568499364906551, + "grad_norm": 0.13006760490061917, + "learning_rate": 0.0008710377702308944, + "loss": 1.6446, + "step": 2831 + }, + { + "epoch": 0.25694066412629285, + "grad_norm": 0.12477642892963321, + "learning_rate": 0.0008709392666696098, + "loss": 1.6302, + "step": 2832 + }, + { + "epoch": 0.2570313917619307, + "grad_norm": 0.12168895605007286, + "learning_rate": 0.0008708407310776649, + "loss": 1.668, + "step": 2833 + }, + { + "epoch": 0.2571221193975685, + "grad_norm": 0.11859858234585671, + "learning_rate": 0.0008707421634635684, + "loss": 1.6453, + "step": 2834 + }, + { + "epoch": 0.2572128470332063, + "grad_norm": 0.11926675222529194, + "learning_rate": 0.0008706435638358313, + "loss": 1.6341, + "step": 2835 + }, + { + "epoch": 0.2573035746688441, + "grad_norm": 0.11521726696262488, + "learning_rate": 0.0008705449322029677, + "loss": 1.6712, + "step": 2836 + }, + { + "epoch": 0.25739430230448196, + "grad_norm": 0.11526675670297325, + "learning_rate": 0.0008704462685734946, + "loss": 1.6292, + "step": 2837 + }, + { + "epoch": 0.25748502994011974, + "grad_norm": 0.12047932789038814, + "learning_rate": 0.0008703475729559318, + "loss": 1.6591, + "step": 2838 + }, + { + "epoch": 0.25757575757575757, + "grad_norm": 0.12000376302194206, + "learning_rate": 0.0008702488453588013, + "loss": 1.652, + "step": 2839 + }, + { + "epoch": 0.2576664852113954, + "grad_norm": 0.11803489869993367, + "learning_rate": 0.0008701500857906285, + "loss": 1.6771, + "step": 2840 + }, + { + "epoch": 0.2577572128470332, + "grad_norm": 0.12087486820351007, + "learning_rate": 0.0008700512942599412, + "loss": 1.6717, + "step": 2841 + }, + { + "epoch": 0.257847940482671, + "grad_norm": 0.12216535000386551, + "learning_rate": 0.0008699524707752702, + "loss": 1.615, + "step": 2842 + }, + { + "epoch": 0.25793866811830884, + "grad_norm": 0.1209484002971405, + "learning_rate": 0.0008698536153451488, + "loss": 1.645, + "step": 2843 + }, + { + "epoch": 0.2580293957539467, + "grad_norm": 0.1206784111968717, + "learning_rate": 0.0008697547279781132, + "loss": 1.6339, + "step": 2844 + }, + { + "epoch": 0.25812012338958445, + "grad_norm": 0.11559568343292524, + "learning_rate": 0.0008696558086827022, + "loss": 1.6234, + "step": 2845 + }, + { + "epoch": 0.2582108510252223, + "grad_norm": 0.11894509470842717, + "learning_rate": 0.000869556857467458, + "loss": 1.6867, + "step": 2846 + }, + { + "epoch": 0.2583015786608601, + "grad_norm": 0.1228385220678716, + "learning_rate": 0.0008694578743409242, + "loss": 1.6435, + "step": 2847 + }, + { + "epoch": 0.2583923062964979, + "grad_norm": 0.12152377470182536, + "learning_rate": 0.0008693588593116488, + "loss": 1.6407, + "step": 2848 + }, + { + "epoch": 0.25848303393213573, + "grad_norm": 0.12374280590606389, + "learning_rate": 0.0008692598123881814, + "loss": 1.6536, + "step": 2849 + }, + { + "epoch": 0.25857376156777356, + "grad_norm": 0.11967008205289091, + "learning_rate": 0.0008691607335790749, + "loss": 1.6285, + "step": 2850 + }, + { + "epoch": 0.25866448920341134, + "grad_norm": 0.11755221144109669, + "learning_rate": 0.0008690616228928845, + "loss": 1.6562, + "step": 2851 + }, + { + "epoch": 0.2587552168390492, + "grad_norm": 0.11964657938220992, + "learning_rate": 0.0008689624803381686, + "loss": 1.6577, + "step": 2852 + }, + { + "epoch": 0.258845944474687, + "grad_norm": 0.11821430868915549, + "learning_rate": 0.0008688633059234881, + "loss": 1.6492, + "step": 2853 + }, + { + "epoch": 0.2589366721103248, + "grad_norm": 0.12057685738165194, + "learning_rate": 0.0008687640996574068, + "loss": 1.6357, + "step": 2854 + }, + { + "epoch": 0.2590273997459626, + "grad_norm": 0.12388703167482447, + "learning_rate": 0.0008686648615484912, + "loss": 1.6171, + "step": 2855 + }, + { + "epoch": 0.25911812738160045, + "grad_norm": 0.11919816637674277, + "learning_rate": 0.0008685655916053105, + "loss": 1.6526, + "step": 2856 + }, + { + "epoch": 0.2592088550172382, + "grad_norm": 0.12219092455650028, + "learning_rate": 0.0008684662898364365, + "loss": 1.6654, + "step": 2857 + }, + { + "epoch": 0.25929958265287606, + "grad_norm": 0.1224931461755701, + "learning_rate": 0.0008683669562504441, + "loss": 1.6504, + "step": 2858 + }, + { + "epoch": 0.2593903102885139, + "grad_norm": 0.12480837583452102, + "learning_rate": 0.0008682675908559108, + "loss": 1.6305, + "step": 2859 + }, + { + "epoch": 0.25948103792415167, + "grad_norm": 0.1207438518443115, + "learning_rate": 0.0008681681936614168, + "loss": 1.6307, + "step": 2860 + }, + { + "epoch": 0.2595717655597895, + "grad_norm": 0.11908718934043339, + "learning_rate": 0.0008680687646755449, + "loss": 1.6707, + "step": 2861 + }, + { + "epoch": 0.25966249319542734, + "grad_norm": 0.11588716994577294, + "learning_rate": 0.000867969303906881, + "loss": 1.6505, + "step": 2862 + }, + { + "epoch": 0.25975322083106517, + "grad_norm": 0.11747175344688067, + "learning_rate": 0.0008678698113640135, + "loss": 1.6434, + "step": 2863 + }, + { + "epoch": 0.25984394846670295, + "grad_norm": 0.12595443200826076, + "learning_rate": 0.0008677702870555336, + "loss": 1.6249, + "step": 2864 + }, + { + "epoch": 0.2599346761023408, + "grad_norm": 0.11845948839039434, + "learning_rate": 0.000867670730990035, + "loss": 1.6726, + "step": 2865 + }, + { + "epoch": 0.2600254037379786, + "grad_norm": 0.1171234774678639, + "learning_rate": 0.0008675711431761147, + "loss": 1.6072, + "step": 2866 + }, + { + "epoch": 0.2601161313736164, + "grad_norm": 0.12079027111641814, + "learning_rate": 0.000867471523622372, + "loss": 1.6341, + "step": 2867 + }, + { + "epoch": 0.2602068590092542, + "grad_norm": 0.1220146768809492, + "learning_rate": 0.0008673718723374091, + "loss": 1.6433, + "step": 2868 + }, + { + "epoch": 0.26029758664489205, + "grad_norm": 0.11601565987103843, + "learning_rate": 0.0008672721893298309, + "loss": 1.6313, + "step": 2869 + }, + { + "epoch": 0.26038831428052983, + "grad_norm": 0.1196285913630473, + "learning_rate": 0.000867172474608245, + "loss": 1.6658, + "step": 2870 + }, + { + "epoch": 0.26047904191616766, + "grad_norm": 0.12395222352073482, + "learning_rate": 0.0008670727281812618, + "loss": 1.6662, + "step": 2871 + }, + { + "epoch": 0.2605697695518055, + "grad_norm": 0.12301252473508277, + "learning_rate": 0.0008669729500574943, + "loss": 1.6879, + "step": 2872 + }, + { + "epoch": 0.2606604971874433, + "grad_norm": 0.12193131821747753, + "learning_rate": 0.0008668731402455586, + "loss": 1.6566, + "step": 2873 + }, + { + "epoch": 0.2607512248230811, + "grad_norm": 0.1189210029440626, + "learning_rate": 0.0008667732987540733, + "loss": 1.6521, + "step": 2874 + }, + { + "epoch": 0.26084195245871894, + "grad_norm": 0.11670674527475176, + "learning_rate": 0.0008666734255916594, + "loss": 1.6259, + "step": 2875 + }, + { + "epoch": 0.2609326800943567, + "grad_norm": 0.12222731897262136, + "learning_rate": 0.0008665735207669412, + "loss": 1.6542, + "step": 2876 + }, + { + "epoch": 0.26102340772999455, + "grad_norm": 0.11867742720522559, + "learning_rate": 0.0008664735842885455, + "loss": 1.6808, + "step": 2877 + }, + { + "epoch": 0.2611141353656324, + "grad_norm": 0.118524255229037, + "learning_rate": 0.0008663736161651017, + "loss": 1.6185, + "step": 2878 + }, + { + "epoch": 0.26120486300127016, + "grad_norm": 0.12086370247151765, + "learning_rate": 0.0008662736164052423, + "loss": 1.6439, + "step": 2879 + }, + { + "epoch": 0.261295590636908, + "grad_norm": 0.12134714139343532, + "learning_rate": 0.000866173585017602, + "loss": 1.6488, + "step": 2880 + }, + { + "epoch": 0.2613863182725458, + "grad_norm": 0.12145952831421081, + "learning_rate": 0.0008660735220108187, + "loss": 1.6216, + "step": 2881 + }, + { + "epoch": 0.26147704590818366, + "grad_norm": 0.1271308342577007, + "learning_rate": 0.0008659734273935328, + "loss": 1.6819, + "step": 2882 + }, + { + "epoch": 0.26156777354382144, + "grad_norm": 0.12454162102230691, + "learning_rate": 0.0008658733011743876, + "loss": 1.6123, + "step": 2883 + }, + { + "epoch": 0.26165850117945927, + "grad_norm": 0.1215098340645193, + "learning_rate": 0.0008657731433620289, + "loss": 1.6722, + "step": 2884 + }, + { + "epoch": 0.2617492288150971, + "grad_norm": 0.1305558321102688, + "learning_rate": 0.0008656729539651051, + "loss": 1.62, + "step": 2885 + }, + { + "epoch": 0.2618399564507349, + "grad_norm": 0.12371772967385322, + "learning_rate": 0.0008655727329922681, + "loss": 1.6475, + "step": 2886 + }, + { + "epoch": 0.2619306840863727, + "grad_norm": 0.1331938919619809, + "learning_rate": 0.0008654724804521718, + "loss": 1.6737, + "step": 2887 + }, + { + "epoch": 0.26202141172201054, + "grad_norm": 0.1343536142775054, + "learning_rate": 0.0008653721963534728, + "loss": 1.5792, + "step": 2888 + }, + { + "epoch": 0.2621121393576483, + "grad_norm": 0.12498682723166525, + "learning_rate": 0.0008652718807048307, + "loss": 1.6779, + "step": 2889 + }, + { + "epoch": 0.26220286699328615, + "grad_norm": 0.1248695417107198, + "learning_rate": 0.000865171533514908, + "loss": 1.6539, + "step": 2890 + }, + { + "epoch": 0.262293594628924, + "grad_norm": 0.13094206441758882, + "learning_rate": 0.0008650711547923695, + "loss": 1.6973, + "step": 2891 + }, + { + "epoch": 0.26238432226456176, + "grad_norm": 0.11774161662345896, + "learning_rate": 0.0008649707445458831, + "loss": 1.6608, + "step": 2892 + }, + { + "epoch": 0.2624750499001996, + "grad_norm": 0.12138451206821779, + "learning_rate": 0.0008648703027841191, + "loss": 1.6416, + "step": 2893 + }, + { + "epoch": 0.26256577753583743, + "grad_norm": 0.11918847275765151, + "learning_rate": 0.0008647698295157505, + "loss": 1.6202, + "step": 2894 + }, + { + "epoch": 0.2626565051714752, + "grad_norm": 0.12383544276990875, + "learning_rate": 0.0008646693247494534, + "loss": 1.6447, + "step": 2895 + }, + { + "epoch": 0.26274723280711304, + "grad_norm": 0.11969711830494617, + "learning_rate": 0.0008645687884939065, + "loss": 1.6166, + "step": 2896 + }, + { + "epoch": 0.2628379604427509, + "grad_norm": 0.1259912405710093, + "learning_rate": 0.0008644682207577909, + "loss": 1.7016, + "step": 2897 + }, + { + "epoch": 0.26292868807838865, + "grad_norm": 0.11829016252469028, + "learning_rate": 0.0008643676215497909, + "loss": 1.6372, + "step": 2898 + }, + { + "epoch": 0.2630194157140265, + "grad_norm": 0.12358781282579212, + "learning_rate": 0.0008642669908785929, + "loss": 1.7087, + "step": 2899 + }, + { + "epoch": 0.2631101433496643, + "grad_norm": 0.11806784955914669, + "learning_rate": 0.0008641663287528864, + "loss": 1.6746, + "step": 2900 + }, + { + "epoch": 0.26320087098530215, + "grad_norm": 0.1190338046860206, + "learning_rate": 0.000864065635181364, + "loss": 1.6243, + "step": 2901 + }, + { + "epoch": 0.2632915986209399, + "grad_norm": 0.1155803727113984, + "learning_rate": 0.0008639649101727202, + "loss": 1.6491, + "step": 2902 + }, + { + "epoch": 0.26338232625657776, + "grad_norm": 0.1182987525712819, + "learning_rate": 0.0008638641537356529, + "loss": 1.6209, + "step": 2903 + }, + { + "epoch": 0.2634730538922156, + "grad_norm": 0.11704272172725305, + "learning_rate": 0.0008637633658788622, + "loss": 1.6825, + "step": 2904 + }, + { + "epoch": 0.26356378152785337, + "grad_norm": 0.12076356617051064, + "learning_rate": 0.0008636625466110513, + "loss": 1.6434, + "step": 2905 + }, + { + "epoch": 0.2636545091634912, + "grad_norm": 0.11849168309514892, + "learning_rate": 0.0008635616959409259, + "loss": 1.6613, + "step": 2906 + }, + { + "epoch": 0.26374523679912903, + "grad_norm": 0.12215364154085413, + "learning_rate": 0.0008634608138771942, + "loss": 1.6412, + "step": 2907 + }, + { + "epoch": 0.2638359644347668, + "grad_norm": 0.12243503076345931, + "learning_rate": 0.0008633599004285679, + "loss": 1.634, + "step": 2908 + }, + { + "epoch": 0.26392669207040464, + "grad_norm": 0.12109045407854409, + "learning_rate": 0.0008632589556037606, + "loss": 1.6816, + "step": 2909 + }, + { + "epoch": 0.2640174197060425, + "grad_norm": 0.12287812674061101, + "learning_rate": 0.000863157979411489, + "loss": 1.6642, + "step": 2910 + }, + { + "epoch": 0.26410814734168025, + "grad_norm": 0.12313825817088199, + "learning_rate": 0.0008630569718604724, + "loss": 1.6476, + "step": 2911 + }, + { + "epoch": 0.2641988749773181, + "grad_norm": 0.11579964757988676, + "learning_rate": 0.0008629559329594327, + "loss": 1.5962, + "step": 2912 + }, + { + "epoch": 0.2642896026129559, + "grad_norm": 0.12134575466583279, + "learning_rate": 0.0008628548627170947, + "loss": 1.6295, + "step": 2913 + }, + { + "epoch": 0.2643803302485937, + "grad_norm": 0.11762487230288474, + "learning_rate": 0.0008627537611421857, + "loss": 1.6435, + "step": 2914 + }, + { + "epoch": 0.26447105788423153, + "grad_norm": 0.12385949709977533, + "learning_rate": 0.0008626526282434361, + "loss": 1.6665, + "step": 2915 + }, + { + "epoch": 0.26456178551986936, + "grad_norm": 0.11865676556920239, + "learning_rate": 0.0008625514640295786, + "loss": 1.6804, + "step": 2916 + }, + { + "epoch": 0.26465251315550714, + "grad_norm": 0.12212147871445113, + "learning_rate": 0.0008624502685093487, + "loss": 1.6649, + "step": 2917 + }, + { + "epoch": 0.264743240791145, + "grad_norm": 0.12010138464238937, + "learning_rate": 0.0008623490416914848, + "loss": 1.6488, + "step": 2918 + }, + { + "epoch": 0.2648339684267828, + "grad_norm": 0.12085500283378767, + "learning_rate": 0.0008622477835847275, + "loss": 1.6912, + "step": 2919 + }, + { + "epoch": 0.26492469606242064, + "grad_norm": 0.11832491009012154, + "learning_rate": 0.000862146494197821, + "loss": 1.6382, + "step": 2920 + }, + { + "epoch": 0.2650154236980584, + "grad_norm": 0.11807351578710429, + "learning_rate": 0.0008620451735395112, + "loss": 1.6292, + "step": 2921 + }, + { + "epoch": 0.26510615133369625, + "grad_norm": 0.12193201556571671, + "learning_rate": 0.0008619438216185473, + "loss": 1.6534, + "step": 2922 + }, + { + "epoch": 0.2651968789693341, + "grad_norm": 0.11546830863988487, + "learning_rate": 0.0008618424384436809, + "loss": 1.7265, + "step": 2923 + }, + { + "epoch": 0.26528760660497186, + "grad_norm": 0.12169222798913934, + "learning_rate": 0.0008617410240236669, + "loss": 1.6221, + "step": 2924 + }, + { + "epoch": 0.2653783342406097, + "grad_norm": 0.11569745355064502, + "learning_rate": 0.000861639578367262, + "loss": 1.6387, + "step": 2925 + }, + { + "epoch": 0.2654690618762475, + "grad_norm": 0.12607497191593264, + "learning_rate": 0.0008615381014832264, + "loss": 1.6462, + "step": 2926 + }, + { + "epoch": 0.2655597895118853, + "grad_norm": 0.11783240088392445, + "learning_rate": 0.0008614365933803222, + "loss": 1.6103, + "step": 2927 + }, + { + "epoch": 0.26565051714752314, + "grad_norm": 0.12215107193767964, + "learning_rate": 0.000861335054067315, + "loss": 1.6654, + "step": 2928 + }, + { + "epoch": 0.26574124478316097, + "grad_norm": 0.1185558677421082, + "learning_rate": 0.0008612334835529726, + "loss": 1.6411, + "step": 2929 + }, + { + "epoch": 0.26583197241879875, + "grad_norm": 0.12165563599741548, + "learning_rate": 0.0008611318818460657, + "loss": 1.6417, + "step": 2930 + }, + { + "epoch": 0.2659227000544366, + "grad_norm": 0.12578993111173079, + "learning_rate": 0.0008610302489553675, + "loss": 1.7018, + "step": 2931 + }, + { + "epoch": 0.2660134276900744, + "grad_norm": 0.11817655229292252, + "learning_rate": 0.0008609285848896542, + "loss": 1.6923, + "step": 2932 + }, + { + "epoch": 0.2661041553257122, + "grad_norm": 0.12221603200041704, + "learning_rate": 0.0008608268896577043, + "loss": 1.646, + "step": 2933 + }, + { + "epoch": 0.26619488296135, + "grad_norm": 0.11817784803738329, + "learning_rate": 0.0008607251632682993, + "loss": 1.6503, + "step": 2934 + }, + { + "epoch": 0.26628561059698785, + "grad_norm": 0.11852736202613909, + "learning_rate": 0.0008606234057302233, + "loss": 1.6888, + "step": 2935 + }, + { + "epoch": 0.26637633823262563, + "grad_norm": 0.11685978014553458, + "learning_rate": 0.0008605216170522632, + "loss": 1.6371, + "step": 2936 + }, + { + "epoch": 0.26646706586826346, + "grad_norm": 0.1183183386046145, + "learning_rate": 0.0008604197972432082, + "loss": 1.6145, + "step": 2937 + }, + { + "epoch": 0.2665577935039013, + "grad_norm": 0.12287874321713414, + "learning_rate": 0.0008603179463118507, + "loss": 1.6366, + "step": 2938 + }, + { + "epoch": 0.26664852113953913, + "grad_norm": 0.11985806907086768, + "learning_rate": 0.0008602160642669852, + "loss": 1.641, + "step": 2939 + }, + { + "epoch": 0.2667392487751769, + "grad_norm": 0.11756478272211629, + "learning_rate": 0.0008601141511174096, + "loss": 1.6671, + "step": 2940 + }, + { + "epoch": 0.26682997641081474, + "grad_norm": 0.11488838295372862, + "learning_rate": 0.0008600122068719241, + "loss": 1.6461, + "step": 2941 + }, + { + "epoch": 0.2669207040464526, + "grad_norm": 0.11846117105593451, + "learning_rate": 0.0008599102315393313, + "loss": 1.657, + "step": 2942 + }, + { + "epoch": 0.26701143168209035, + "grad_norm": 0.1144089746537657, + "learning_rate": 0.000859808225128437, + "loss": 1.5958, + "step": 2943 + }, + { + "epoch": 0.2671021593177282, + "grad_norm": 0.11601751599086747, + "learning_rate": 0.0008597061876480495, + "loss": 1.6902, + "step": 2944 + }, + { + "epoch": 0.267192886953366, + "grad_norm": 0.12280406417767535, + "learning_rate": 0.0008596041191069795, + "loss": 1.614, + "step": 2945 + }, + { + "epoch": 0.2672836145890038, + "grad_norm": 0.11847390020867002, + "learning_rate": 0.000859502019514041, + "loss": 1.6164, + "step": 2946 + }, + { + "epoch": 0.2673743422246416, + "grad_norm": 0.1172244127775159, + "learning_rate": 0.0008593998888780501, + "loss": 1.6321, + "step": 2947 + }, + { + "epoch": 0.26746506986027946, + "grad_norm": 0.12124625632356009, + "learning_rate": 0.0008592977272078258, + "loss": 1.6714, + "step": 2948 + }, + { + "epoch": 0.26755579749591724, + "grad_norm": 0.12265591392247306, + "learning_rate": 0.0008591955345121898, + "loss": 1.6932, + "step": 2949 + }, + { + "epoch": 0.26764652513155507, + "grad_norm": 0.11680093271831402, + "learning_rate": 0.0008590933107999664, + "loss": 1.6759, + "step": 2950 + }, + { + "epoch": 0.2677372527671929, + "grad_norm": 0.1194999182321416, + "learning_rate": 0.0008589910560799827, + "loss": 1.6492, + "step": 2951 + }, + { + "epoch": 0.2678279804028307, + "grad_norm": 0.11876280493480144, + "learning_rate": 0.0008588887703610686, + "loss": 1.6536, + "step": 2952 + }, + { + "epoch": 0.2679187080384685, + "grad_norm": 0.1210872076653765, + "learning_rate": 0.000858786453652056, + "loss": 1.6569, + "step": 2953 + }, + { + "epoch": 0.26800943567410634, + "grad_norm": 0.11836663902427579, + "learning_rate": 0.0008586841059617804, + "loss": 1.6223, + "step": 2954 + }, + { + "epoch": 0.2681001633097441, + "grad_norm": 0.12325889488981077, + "learning_rate": 0.0008585817272990794, + "loss": 1.6552, + "step": 2955 + }, + { + "epoch": 0.26819089094538195, + "grad_norm": 0.11818111534186987, + "learning_rate": 0.0008584793176727933, + "loss": 1.6319, + "step": 2956 + }, + { + "epoch": 0.2682816185810198, + "grad_norm": 0.12070008959641695, + "learning_rate": 0.0008583768770917654, + "loss": 1.6818, + "step": 2957 + }, + { + "epoch": 0.2683723462166576, + "grad_norm": 0.11983387278260627, + "learning_rate": 0.0008582744055648413, + "loss": 1.672, + "step": 2958 + }, + { + "epoch": 0.2684630738522954, + "grad_norm": 0.11835853272328721, + "learning_rate": 0.0008581719031008695, + "loss": 1.5974, + "step": 2959 + }, + { + "epoch": 0.26855380148793323, + "grad_norm": 0.11489496858699728, + "learning_rate": 0.000858069369708701, + "loss": 1.6468, + "step": 2960 + }, + { + "epoch": 0.26864452912357106, + "grad_norm": 0.1210325432474675, + "learning_rate": 0.0008579668053971896, + "loss": 1.6313, + "step": 2961 + }, + { + "epoch": 0.26873525675920884, + "grad_norm": 0.12524022497692341, + "learning_rate": 0.0008578642101751919, + "loss": 1.6082, + "step": 2962 + }, + { + "epoch": 0.2688259843948467, + "grad_norm": 0.12099869294557904, + "learning_rate": 0.0008577615840515669, + "loss": 1.6602, + "step": 2963 + }, + { + "epoch": 0.2689167120304845, + "grad_norm": 0.11740716735368377, + "learning_rate": 0.0008576589270351763, + "loss": 1.6326, + "step": 2964 + }, + { + "epoch": 0.2690074396661223, + "grad_norm": 0.11595067679350964, + "learning_rate": 0.0008575562391348847, + "loss": 1.6298, + "step": 2965 + }, + { + "epoch": 0.2690981673017601, + "grad_norm": 0.11607711515834217, + "learning_rate": 0.0008574535203595593, + "loss": 1.6491, + "step": 2966 + }, + { + "epoch": 0.26918889493739795, + "grad_norm": 0.12319936150512352, + "learning_rate": 0.0008573507707180695, + "loss": 1.6559, + "step": 2967 + }, + { + "epoch": 0.2692796225730357, + "grad_norm": 0.12038948964961942, + "learning_rate": 0.0008572479902192881, + "loss": 1.6399, + "step": 2968 + }, + { + "epoch": 0.26937035020867356, + "grad_norm": 0.121204840302166, + "learning_rate": 0.0008571451788720901, + "loss": 1.6454, + "step": 2969 + }, + { + "epoch": 0.2694610778443114, + "grad_norm": 0.11695901100440496, + "learning_rate": 0.0008570423366853532, + "loss": 1.6761, + "step": 2970 + }, + { + "epoch": 0.26955180547994917, + "grad_norm": 0.11502150922670806, + "learning_rate": 0.0008569394636679579, + "loss": 1.6565, + "step": 2971 + }, + { + "epoch": 0.269642533115587, + "grad_norm": 0.11681678361296635, + "learning_rate": 0.0008568365598287875, + "loss": 1.6302, + "step": 2972 + }, + { + "epoch": 0.26973326075122483, + "grad_norm": 0.11752965364599698, + "learning_rate": 0.0008567336251767273, + "loss": 1.6194, + "step": 2973 + }, + { + "epoch": 0.2698239883868626, + "grad_norm": 0.12112619762647948, + "learning_rate": 0.0008566306597206662, + "loss": 1.6394, + "step": 2974 + }, + { + "epoch": 0.26991471602250044, + "grad_norm": 0.11941348731570865, + "learning_rate": 0.000856527663469495, + "loss": 1.6294, + "step": 2975 + }, + { + "epoch": 0.2700054436581383, + "grad_norm": 0.1148787223078164, + "learning_rate": 0.0008564246364321074, + "loss": 1.6763, + "step": 2976 + }, + { + "epoch": 0.2700961712937761, + "grad_norm": 0.12362221623131119, + "learning_rate": 0.0008563215786174, + "loss": 1.6161, + "step": 2977 + }, + { + "epoch": 0.2701868989294139, + "grad_norm": 0.11739758193021518, + "learning_rate": 0.0008562184900342718, + "loss": 1.6882, + "step": 2978 + }, + { + "epoch": 0.2702776265650517, + "grad_norm": 0.12194901947349492, + "learning_rate": 0.0008561153706916245, + "loss": 1.6399, + "step": 2979 + }, + { + "epoch": 0.27036835420068955, + "grad_norm": 0.12311545085377591, + "learning_rate": 0.0008560122205983622, + "loss": 1.6614, + "step": 2980 + }, + { + "epoch": 0.27045908183632733, + "grad_norm": 0.12138380417593067, + "learning_rate": 0.0008559090397633925, + "loss": 1.6089, + "step": 2981 + }, + { + "epoch": 0.27054980947196516, + "grad_norm": 0.12098263210172613, + "learning_rate": 0.0008558058281956247, + "loss": 1.6321, + "step": 2982 + }, + { + "epoch": 0.270640537107603, + "grad_norm": 0.12322498187309308, + "learning_rate": 0.0008557025859039711, + "loss": 1.6214, + "step": 2983 + }, + { + "epoch": 0.2707312647432408, + "grad_norm": 0.1255236848106748, + "learning_rate": 0.0008555993128973468, + "loss": 1.6734, + "step": 2984 + }, + { + "epoch": 0.2708219923788786, + "grad_norm": 0.12237529289121635, + "learning_rate": 0.0008554960091846695, + "loss": 1.6427, + "step": 2985 + }, + { + "epoch": 0.27091272001451644, + "grad_norm": 0.1204700586815098, + "learning_rate": 0.0008553926747748593, + "loss": 1.6478, + "step": 2986 + }, + { + "epoch": 0.2710034476501542, + "grad_norm": 0.12404252317217396, + "learning_rate": 0.0008552893096768394, + "loss": 1.7084, + "step": 2987 + }, + { + "epoch": 0.27109417528579205, + "grad_norm": 0.12144950767064351, + "learning_rate": 0.0008551859138995351, + "loss": 1.6597, + "step": 2988 + }, + { + "epoch": 0.2711849029214299, + "grad_norm": 0.1213983030103421, + "learning_rate": 0.0008550824874518749, + "loss": 1.6761, + "step": 2989 + }, + { + "epoch": 0.27127563055706766, + "grad_norm": 0.12104465772513703, + "learning_rate": 0.0008549790303427894, + "loss": 1.665, + "step": 2990 + }, + { + "epoch": 0.2713663581927055, + "grad_norm": 0.11234597354482158, + "learning_rate": 0.0008548755425812124, + "loss": 1.6183, + "step": 2991 + }, + { + "epoch": 0.2714570858283433, + "grad_norm": 0.11534638876345249, + "learning_rate": 0.0008547720241760801, + "loss": 1.6162, + "step": 2992 + }, + { + "epoch": 0.2715478134639811, + "grad_norm": 0.11650725739853199, + "learning_rate": 0.0008546684751363312, + "loss": 1.6569, + "step": 2993 + }, + { + "epoch": 0.27163854109961894, + "grad_norm": 0.11638813414031907, + "learning_rate": 0.0008545648954709071, + "loss": 1.6393, + "step": 2994 + }, + { + "epoch": 0.27172926873525677, + "grad_norm": 0.12328219523109155, + "learning_rate": 0.0008544612851887521, + "loss": 1.6439, + "step": 2995 + }, + { + "epoch": 0.2718199963708946, + "grad_norm": 0.11684440021071055, + "learning_rate": 0.0008543576442988128, + "loss": 1.6283, + "step": 2996 + }, + { + "epoch": 0.2719107240065324, + "grad_norm": 0.11832519734048409, + "learning_rate": 0.0008542539728100388, + "loss": 1.6645, + "step": 2997 + }, + { + "epoch": 0.2720014516421702, + "grad_norm": 0.11888962330279562, + "learning_rate": 0.000854150270731382, + "loss": 1.6625, + "step": 2998 + }, + { + "epoch": 0.27209217927780804, + "grad_norm": 0.12290305701861187, + "learning_rate": 0.000854046538071797, + "loss": 1.6292, + "step": 2999 + }, + { + "epoch": 0.2721829069134458, + "grad_norm": 0.11464179565765571, + "learning_rate": 0.0008539427748402415, + "loss": 1.6264, + "step": 3000 + }, + { + "epoch": 0.27227363454908365, + "grad_norm": 0.11757929978105748, + "learning_rate": 0.000853838981045675, + "loss": 1.6328, + "step": 3001 + }, + { + "epoch": 0.2723643621847215, + "grad_norm": 0.11689524466403793, + "learning_rate": 0.0008537351566970604, + "loss": 1.6336, + "step": 3002 + }, + { + "epoch": 0.27245508982035926, + "grad_norm": 0.11458077273009944, + "learning_rate": 0.0008536313018033629, + "loss": 1.6838, + "step": 3003 + }, + { + "epoch": 0.2725458174559971, + "grad_norm": 0.11653161675828907, + "learning_rate": 0.0008535274163735503, + "loss": 1.6853, + "step": 3004 + }, + { + "epoch": 0.27263654509163493, + "grad_norm": 0.11476046780260551, + "learning_rate": 0.0008534235004165933, + "loss": 1.6274, + "step": 3005 + }, + { + "epoch": 0.2727272727272727, + "grad_norm": 0.11742045153165175, + "learning_rate": 0.000853319553941465, + "loss": 1.6275, + "step": 3006 + }, + { + "epoch": 0.27281800036291054, + "grad_norm": 0.11519852294377508, + "learning_rate": 0.0008532155769571411, + "loss": 1.6065, + "step": 3007 + }, + { + "epoch": 0.2729087279985484, + "grad_norm": 0.11440258664153302, + "learning_rate": 0.0008531115694726, + "loss": 1.675, + "step": 3008 + }, + { + "epoch": 0.27299945563418615, + "grad_norm": 0.11785479951192766, + "learning_rate": 0.0008530075314968228, + "loss": 1.6689, + "step": 3009 + }, + { + "epoch": 0.273090183269824, + "grad_norm": 0.12383509298505406, + "learning_rate": 0.0008529034630387933, + "loss": 1.6663, + "step": 3010 + }, + { + "epoch": 0.2731809109054618, + "grad_norm": 0.11719461639201273, + "learning_rate": 0.0008527993641074978, + "loss": 1.6445, + "step": 3011 + }, + { + "epoch": 0.2732716385410996, + "grad_norm": 0.11956703668253804, + "learning_rate": 0.000852695234711925, + "loss": 1.5915, + "step": 3012 + }, + { + "epoch": 0.2733623661767374, + "grad_norm": 0.12609126739412546, + "learning_rate": 0.000852591074861067, + "loss": 1.6322, + "step": 3013 + }, + { + "epoch": 0.27345309381237526, + "grad_norm": 0.11475064788234443, + "learning_rate": 0.0008524868845639175, + "loss": 1.5851, + "step": 3014 + }, + { + "epoch": 0.2735438214480131, + "grad_norm": 0.11785169102457392, + "learning_rate": 0.0008523826638294738, + "loss": 1.6657, + "step": 3015 + }, + { + "epoch": 0.27363454908365087, + "grad_norm": 0.11584820184275828, + "learning_rate": 0.0008522784126667349, + "loss": 1.6337, + "step": 3016 + }, + { + "epoch": 0.2737252767192887, + "grad_norm": 0.11574091159547932, + "learning_rate": 0.0008521741310847031, + "loss": 1.6489, + "step": 3017 + }, + { + "epoch": 0.27381600435492653, + "grad_norm": 0.1154426039530518, + "learning_rate": 0.0008520698190923834, + "loss": 1.7363, + "step": 3018 + }, + { + "epoch": 0.2739067319905643, + "grad_norm": 0.12006683704430339, + "learning_rate": 0.0008519654766987829, + "loss": 1.6723, + "step": 3019 + }, + { + "epoch": 0.27399745962620214, + "grad_norm": 0.11765356309210649, + "learning_rate": 0.0008518611039129114, + "loss": 1.6317, + "step": 3020 + }, + { + "epoch": 0.27408818726184, + "grad_norm": 0.11480604599715344, + "learning_rate": 0.000851756700743782, + "loss": 1.6471, + "step": 3021 + }, + { + "epoch": 0.27417891489747775, + "grad_norm": 0.11647282360213981, + "learning_rate": 0.0008516522672004094, + "loss": 1.6602, + "step": 3022 + }, + { + "epoch": 0.2742696425331156, + "grad_norm": 0.11304436058205114, + "learning_rate": 0.0008515478032918119, + "loss": 1.6218, + "step": 3023 + }, + { + "epoch": 0.2743603701687534, + "grad_norm": 0.1184414733252593, + "learning_rate": 0.0008514433090270096, + "loss": 1.6258, + "step": 3024 + }, + { + "epoch": 0.2744510978043912, + "grad_norm": 0.11360458255549544, + "learning_rate": 0.000851338784415026, + "loss": 1.6343, + "step": 3025 + }, + { + "epoch": 0.27454182544002903, + "grad_norm": 0.11592439557678615, + "learning_rate": 0.0008512342294648864, + "loss": 1.6627, + "step": 3026 + }, + { + "epoch": 0.27463255307566686, + "grad_norm": 0.11342463963888297, + "learning_rate": 0.0008511296441856193, + "loss": 1.651, + "step": 3027 + }, + { + "epoch": 0.27472328071130464, + "grad_norm": 0.11366375410522589, + "learning_rate": 0.0008510250285862557, + "loss": 1.6779, + "step": 3028 + }, + { + "epoch": 0.2748140083469425, + "grad_norm": 0.10928684295398203, + "learning_rate": 0.0008509203826758292, + "loss": 1.6585, + "step": 3029 + }, + { + "epoch": 0.2749047359825803, + "grad_norm": 0.11776014568793867, + "learning_rate": 0.000850815706463376, + "loss": 1.6694, + "step": 3030 + }, + { + "epoch": 0.2749954636182181, + "grad_norm": 0.11864657058440653, + "learning_rate": 0.0008507109999579348, + "loss": 1.6841, + "step": 3031 + }, + { + "epoch": 0.2750861912538559, + "grad_norm": 0.11518823502770648, + "learning_rate": 0.0008506062631685469, + "loss": 1.6537, + "step": 3032 + }, + { + "epoch": 0.27517691888949375, + "grad_norm": 0.11915876585519358, + "learning_rate": 0.0008505014961042566, + "loss": 1.6443, + "step": 3033 + }, + { + "epoch": 0.2752676465251316, + "grad_norm": 0.11605296380343631, + "learning_rate": 0.0008503966987741105, + "loss": 1.6589, + "step": 3034 + }, + { + "epoch": 0.27535837416076936, + "grad_norm": 0.12363473917109524, + "learning_rate": 0.0008502918711871577, + "loss": 1.6714, + "step": 3035 + }, + { + "epoch": 0.2754491017964072, + "grad_norm": 0.11907179109836694, + "learning_rate": 0.0008501870133524503, + "loss": 1.6441, + "step": 3036 + }, + { + "epoch": 0.275539829432045, + "grad_norm": 0.12287454078996106, + "learning_rate": 0.0008500821252790427, + "loss": 1.6868, + "step": 3037 + }, + { + "epoch": 0.2756305570676828, + "grad_norm": 0.11782950305675052, + "learning_rate": 0.000849977206975992, + "loss": 1.628, + "step": 3038 + }, + { + "epoch": 0.27572128470332063, + "grad_norm": 0.11916935719936773, + "learning_rate": 0.0008498722584523578, + "loss": 1.618, + "step": 3039 + }, + { + "epoch": 0.27581201233895847, + "grad_norm": 0.11536163892459449, + "learning_rate": 0.0008497672797172026, + "loss": 1.6169, + "step": 3040 + }, + { + "epoch": 0.27590273997459624, + "grad_norm": 0.11344725949119597, + "learning_rate": 0.0008496622707795913, + "loss": 1.621, + "step": 3041 + }, + { + "epoch": 0.2759934676102341, + "grad_norm": 0.11945026860810828, + "learning_rate": 0.0008495572316485913, + "loss": 1.6713, + "step": 3042 + }, + { + "epoch": 0.2760841952458719, + "grad_norm": 0.12336790793681203, + "learning_rate": 0.0008494521623332727, + "loss": 1.6537, + "step": 3043 + }, + { + "epoch": 0.2761749228815097, + "grad_norm": 0.126964880500101, + "learning_rate": 0.0008493470628427085, + "loss": 1.6657, + "step": 3044 + }, + { + "epoch": 0.2762656505171475, + "grad_norm": 0.11428911246741201, + "learning_rate": 0.000849241933185974, + "loss": 1.672, + "step": 3045 + }, + { + "epoch": 0.27635637815278535, + "grad_norm": 0.1168474715077944, + "learning_rate": 0.0008491367733721471, + "loss": 1.6532, + "step": 3046 + }, + { + "epoch": 0.27644710578842313, + "grad_norm": 0.11639531972531225, + "learning_rate": 0.0008490315834103082, + "loss": 1.6199, + "step": 3047 + }, + { + "epoch": 0.27653783342406096, + "grad_norm": 0.12112827985515903, + "learning_rate": 0.0008489263633095407, + "loss": 1.6249, + "step": 3048 + }, + { + "epoch": 0.2766285610596988, + "grad_norm": 0.1145384871759643, + "learning_rate": 0.0008488211130789304, + "loss": 1.6623, + "step": 3049 + }, + { + "epoch": 0.2767192886953366, + "grad_norm": 0.1143046373342336, + "learning_rate": 0.0008487158327275655, + "loss": 1.6529, + "step": 3050 + }, + { + "epoch": 0.2768100163309744, + "grad_norm": 0.12250101049442669, + "learning_rate": 0.000848610522264537, + "loss": 1.6902, + "step": 3051 + }, + { + "epoch": 0.27690074396661224, + "grad_norm": 0.11696090193236054, + "learning_rate": 0.0008485051816989386, + "loss": 1.5913, + "step": 3052 + }, + { + "epoch": 0.2769914716022501, + "grad_norm": 0.11782081306612646, + "learning_rate": 0.0008483998110398665, + "loss": 1.6273, + "step": 3053 + }, + { + "epoch": 0.27708219923788785, + "grad_norm": 0.1178025969840187, + "learning_rate": 0.0008482944102964192, + "loss": 1.6539, + "step": 3054 + }, + { + "epoch": 0.2771729268735257, + "grad_norm": 0.12258015075509159, + "learning_rate": 0.0008481889794776982, + "loss": 1.6155, + "step": 3055 + }, + { + "epoch": 0.2772636545091635, + "grad_norm": 0.11728204071287966, + "learning_rate": 0.0008480835185928075, + "loss": 1.6625, + "step": 3056 + }, + { + "epoch": 0.2773543821448013, + "grad_norm": 0.11878372866232215, + "learning_rate": 0.0008479780276508538, + "loss": 1.6382, + "step": 3057 + }, + { + "epoch": 0.2774451097804391, + "grad_norm": 0.11532234602504896, + "learning_rate": 0.0008478725066609461, + "loss": 1.57, + "step": 3058 + }, + { + "epoch": 0.27753583741607696, + "grad_norm": 0.12262839034740576, + "learning_rate": 0.0008477669556321961, + "loss": 1.6365, + "step": 3059 + }, + { + "epoch": 0.27762656505171474, + "grad_norm": 0.11498799498820259, + "learning_rate": 0.0008476613745737183, + "loss": 1.6134, + "step": 3060 + }, + { + "epoch": 0.27771729268735257, + "grad_norm": 0.11907344511740293, + "learning_rate": 0.0008475557634946296, + "loss": 1.5923, + "step": 3061 + }, + { + "epoch": 0.2778080203229904, + "grad_norm": 0.12209123103754829, + "learning_rate": 0.0008474501224040494, + "loss": 1.66, + "step": 3062 + }, + { + "epoch": 0.2778987479586282, + "grad_norm": 0.11916370950449673, + "learning_rate": 0.0008473444513110999, + "loss": 1.6197, + "step": 3063 + }, + { + "epoch": 0.277989475594266, + "grad_norm": 0.11574435884877225, + "learning_rate": 0.0008472387502249059, + "loss": 1.5933, + "step": 3064 + }, + { + "epoch": 0.27808020322990384, + "grad_norm": 0.11622306213522877, + "learning_rate": 0.0008471330191545947, + "loss": 1.6407, + "step": 3065 + }, + { + "epoch": 0.2781709308655416, + "grad_norm": 0.11614395484111621, + "learning_rate": 0.0008470272581092962, + "loss": 1.5902, + "step": 3066 + }, + { + "epoch": 0.27826165850117945, + "grad_norm": 0.11633472505838842, + "learning_rate": 0.0008469214670981425, + "loss": 1.6651, + "step": 3067 + }, + { + "epoch": 0.2783523861368173, + "grad_norm": 0.11279425832973089, + "learning_rate": 0.0008468156461302692, + "loss": 1.6374, + "step": 3068 + }, + { + "epoch": 0.27844311377245506, + "grad_norm": 0.11803528488728712, + "learning_rate": 0.0008467097952148138, + "loss": 1.647, + "step": 3069 + }, + { + "epoch": 0.2785338414080929, + "grad_norm": 0.12267501751815185, + "learning_rate": 0.0008466039143609163, + "loss": 1.6416, + "step": 3070 + }, + { + "epoch": 0.27862456904373073, + "grad_norm": 0.11428624020966198, + "learning_rate": 0.0008464980035777199, + "loss": 1.6524, + "step": 3071 + }, + { + "epoch": 0.27871529667936856, + "grad_norm": 0.12170462348651245, + "learning_rate": 0.0008463920628743697, + "loss": 1.6269, + "step": 3072 + }, + { + "epoch": 0.27880602431500634, + "grad_norm": 0.11663553352987063, + "learning_rate": 0.0008462860922600139, + "loss": 1.6232, + "step": 3073 + }, + { + "epoch": 0.2788967519506442, + "grad_norm": 0.1164852116406299, + "learning_rate": 0.0008461800917438029, + "loss": 1.6551, + "step": 3074 + }, + { + "epoch": 0.278987479586282, + "grad_norm": 0.1151648788568508, + "learning_rate": 0.0008460740613348899, + "loss": 1.6263, + "step": 3075 + }, + { + "epoch": 0.2790782072219198, + "grad_norm": 0.11741542389520886, + "learning_rate": 0.0008459680010424309, + "loss": 1.6529, + "step": 3076 + }, + { + "epoch": 0.2791689348575576, + "grad_norm": 0.1162106732760501, + "learning_rate": 0.0008458619108755839, + "loss": 1.6129, + "step": 3077 + }, + { + "epoch": 0.27925966249319545, + "grad_norm": 0.1183507516083421, + "learning_rate": 0.0008457557908435099, + "loss": 1.6551, + "step": 3078 + }, + { + "epoch": 0.2793503901288332, + "grad_norm": 0.11772330320541134, + "learning_rate": 0.0008456496409553724, + "loss": 1.6722, + "step": 3079 + }, + { + "epoch": 0.27944111776447106, + "grad_norm": 0.11171547733941202, + "learning_rate": 0.0008455434612203377, + "loss": 1.6647, + "step": 3080 + }, + { + "epoch": 0.2795318454001089, + "grad_norm": 0.11716238476160588, + "learning_rate": 0.0008454372516475739, + "loss": 1.6853, + "step": 3081 + }, + { + "epoch": 0.27962257303574667, + "grad_norm": 0.11757594247882067, + "learning_rate": 0.0008453310122462526, + "loss": 1.6142, + "step": 3082 + }, + { + "epoch": 0.2797133006713845, + "grad_norm": 0.12032720562890133, + "learning_rate": 0.0008452247430255476, + "loss": 1.6391, + "step": 3083 + }, + { + "epoch": 0.27980402830702233, + "grad_norm": 0.11620486919089158, + "learning_rate": 0.0008451184439946348, + "loss": 1.6514, + "step": 3084 + }, + { + "epoch": 0.2798947559426601, + "grad_norm": 0.11421716807312589, + "learning_rate": 0.000845012115162694, + "loss": 1.6577, + "step": 3085 + }, + { + "epoch": 0.27998548357829794, + "grad_norm": 0.11523493824410674, + "learning_rate": 0.0008449057565389058, + "loss": 1.6605, + "step": 3086 + }, + { + "epoch": 0.2800762112139358, + "grad_norm": 0.11261456239769076, + "learning_rate": 0.000844799368132455, + "loss": 1.6229, + "step": 3087 + }, + { + "epoch": 0.28016693884957355, + "grad_norm": 0.11855720666400266, + "learning_rate": 0.0008446929499525277, + "loss": 1.6859, + "step": 3088 + }, + { + "epoch": 0.2802576664852114, + "grad_norm": 0.11509968928488684, + "learning_rate": 0.0008445865020083134, + "loss": 1.6102, + "step": 3089 + }, + { + "epoch": 0.2803483941208492, + "grad_norm": 0.12450893824804671, + "learning_rate": 0.0008444800243090039, + "loss": 1.631, + "step": 3090 + }, + { + "epoch": 0.28043912175648705, + "grad_norm": 0.11969312923324042, + "learning_rate": 0.0008443735168637936, + "loss": 1.6722, + "step": 3091 + }, + { + "epoch": 0.28052984939212483, + "grad_norm": 0.11533004719639282, + "learning_rate": 0.0008442669796818793, + "loss": 1.6384, + "step": 3092 + }, + { + "epoch": 0.28062057702776266, + "grad_norm": 0.11410109508696242, + "learning_rate": 0.0008441604127724607, + "loss": 1.6064, + "step": 3093 + }, + { + "epoch": 0.2807113046634005, + "grad_norm": 0.11908735753750425, + "learning_rate": 0.0008440538161447396, + "loss": 1.6549, + "step": 3094 + }, + { + "epoch": 0.2808020322990383, + "grad_norm": 0.11494134690853093, + "learning_rate": 0.0008439471898079207, + "loss": 1.6323, + "step": 3095 + }, + { + "epoch": 0.2808927599346761, + "grad_norm": 0.11875765673353265, + "learning_rate": 0.0008438405337712114, + "loss": 1.6349, + "step": 3096 + }, + { + "epoch": 0.28098348757031394, + "grad_norm": 0.11480542398415518, + "learning_rate": 0.0008437338480438214, + "loss": 1.6504, + "step": 3097 + }, + { + "epoch": 0.2810742152059517, + "grad_norm": 0.11023155413229474, + "learning_rate": 0.0008436271326349627, + "loss": 1.6549, + "step": 3098 + }, + { + "epoch": 0.28116494284158955, + "grad_norm": 0.11155656059556757, + "learning_rate": 0.0008435203875538506, + "loss": 1.618, + "step": 3099 + }, + { + "epoch": 0.2812556704772274, + "grad_norm": 0.11744141529524044, + "learning_rate": 0.0008434136128097025, + "loss": 1.6332, + "step": 3100 + }, + { + "epoch": 0.28134639811286516, + "grad_norm": 0.11864936146723877, + "learning_rate": 0.0008433068084117382, + "loss": 1.6497, + "step": 3101 + }, + { + "epoch": 0.281437125748503, + "grad_norm": 0.11948807187400629, + "learning_rate": 0.0008431999743691804, + "loss": 1.6156, + "step": 3102 + }, + { + "epoch": 0.2815278533841408, + "grad_norm": 0.11576449456593998, + "learning_rate": 0.0008430931106912543, + "loss": 1.6373, + "step": 3103 + }, + { + "epoch": 0.2816185810197786, + "grad_norm": 0.11424465874493803, + "learning_rate": 0.0008429862173871873, + "loss": 1.6552, + "step": 3104 + }, + { + "epoch": 0.28170930865541643, + "grad_norm": 0.10956145090505305, + "learning_rate": 0.0008428792944662103, + "loss": 1.6308, + "step": 3105 + }, + { + "epoch": 0.28180003629105427, + "grad_norm": 0.11437574607685093, + "learning_rate": 0.0008427723419375553, + "loss": 1.6279, + "step": 3106 + }, + { + "epoch": 0.28189076392669205, + "grad_norm": 0.11489001124654152, + "learning_rate": 0.0008426653598104583, + "loss": 1.6368, + "step": 3107 + }, + { + "epoch": 0.2819814915623299, + "grad_norm": 0.11315861483135936, + "learning_rate": 0.0008425583480941568, + "loss": 1.6409, + "step": 3108 + }, + { + "epoch": 0.2820722191979677, + "grad_norm": 0.11625213026309375, + "learning_rate": 0.0008424513067978916, + "loss": 1.6426, + "step": 3109 + }, + { + "epoch": 0.28216294683360554, + "grad_norm": 0.12127972175849257, + "learning_rate": 0.0008423442359309054, + "loss": 1.6299, + "step": 3110 + }, + { + "epoch": 0.2822536744692433, + "grad_norm": 0.11122561870740463, + "learning_rate": 0.000842237135502444, + "loss": 1.6667, + "step": 3111 + }, + { + "epoch": 0.28234440210488115, + "grad_norm": 0.12105295176105141, + "learning_rate": 0.0008421300055217558, + "loss": 1.6448, + "step": 3112 + }, + { + "epoch": 0.282435129740519, + "grad_norm": 0.11599490789542571, + "learning_rate": 0.0008420228459980908, + "loss": 1.6144, + "step": 3113 + }, + { + "epoch": 0.28252585737615676, + "grad_norm": 0.11900695545396849, + "learning_rate": 0.000841915656940703, + "loss": 1.6291, + "step": 3114 + }, + { + "epoch": 0.2826165850117946, + "grad_norm": 0.1154061471680418, + "learning_rate": 0.0008418084383588476, + "loss": 1.665, + "step": 3115 + }, + { + "epoch": 0.28270731264743243, + "grad_norm": 0.11145640065773126, + "learning_rate": 0.0008417011902617833, + "loss": 1.651, + "step": 3116 + }, + { + "epoch": 0.2827980402830702, + "grad_norm": 0.11831980196651116, + "learning_rate": 0.0008415939126587706, + "loss": 1.5794, + "step": 3117 + }, + { + "epoch": 0.28288876791870804, + "grad_norm": 0.11706477156960567, + "learning_rate": 0.0008414866055590734, + "loss": 1.6455, + "step": 3118 + }, + { + "epoch": 0.2829794955543459, + "grad_norm": 0.12036684949120884, + "learning_rate": 0.0008413792689719575, + "loss": 1.693, + "step": 3119 + }, + { + "epoch": 0.28307022318998365, + "grad_norm": 0.12071035504459864, + "learning_rate": 0.0008412719029066915, + "loss": 1.6363, + "step": 3120 + }, + { + "epoch": 0.2831609508256215, + "grad_norm": 0.11636409748664805, + "learning_rate": 0.0008411645073725461, + "loss": 1.6372, + "step": 3121 + }, + { + "epoch": 0.2832516784612593, + "grad_norm": 0.11747393446243179, + "learning_rate": 0.0008410570823787954, + "loss": 1.6611, + "step": 3122 + }, + { + "epoch": 0.2833424060968971, + "grad_norm": 0.11410915701052618, + "learning_rate": 0.0008409496279347153, + "loss": 1.6513, + "step": 3123 + }, + { + "epoch": 0.2834331337325349, + "grad_norm": 0.12406984372285344, + "learning_rate": 0.0008408421440495847, + "loss": 1.6272, + "step": 3124 + }, + { + "epoch": 0.28352386136817276, + "grad_norm": 0.11813626963309473, + "learning_rate": 0.0008407346307326846, + "loss": 1.6347, + "step": 3125 + }, + { + "epoch": 0.28361458900381054, + "grad_norm": 0.1279108658571367, + "learning_rate": 0.0008406270879932989, + "loss": 1.6557, + "step": 3126 + }, + { + "epoch": 0.28370531663944837, + "grad_norm": 0.11305578116937258, + "learning_rate": 0.0008405195158407142, + "loss": 1.6612, + "step": 3127 + }, + { + "epoch": 0.2837960442750862, + "grad_norm": 0.11656298492357153, + "learning_rate": 0.0008404119142842189, + "loss": 1.6358, + "step": 3128 + }, + { + "epoch": 0.28388677191072403, + "grad_norm": 0.11188857297951267, + "learning_rate": 0.0008403042833331045, + "loss": 1.654, + "step": 3129 + }, + { + "epoch": 0.2839774995463618, + "grad_norm": 0.11352265182415203, + "learning_rate": 0.0008401966229966653, + "loss": 1.6163, + "step": 3130 + }, + { + "epoch": 0.28406822718199964, + "grad_norm": 0.1124468566428149, + "learning_rate": 0.0008400889332841974, + "loss": 1.6696, + "step": 3131 + }, + { + "epoch": 0.2841589548176375, + "grad_norm": 0.11387543233710798, + "learning_rate": 0.0008399812142050003, + "loss": 1.6177, + "step": 3132 + }, + { + "epoch": 0.28424968245327525, + "grad_norm": 0.11598282145455716, + "learning_rate": 0.0008398734657683749, + "loss": 1.701, + "step": 3133 + }, + { + "epoch": 0.2843404100889131, + "grad_norm": 0.11841355105515006, + "learning_rate": 0.0008397656879836257, + "loss": 1.6729, + "step": 3134 + }, + { + "epoch": 0.2844311377245509, + "grad_norm": 0.11260919009238052, + "learning_rate": 0.0008396578808600594, + "loss": 1.6381, + "step": 3135 + }, + { + "epoch": 0.2845218653601887, + "grad_norm": 0.11313531826623464, + "learning_rate": 0.000839550044406985, + "loss": 1.6757, + "step": 3136 + }, + { + "epoch": 0.28461259299582653, + "grad_norm": 0.11722455100410002, + "learning_rate": 0.0008394421786337141, + "loss": 1.6098, + "step": 3137 + }, + { + "epoch": 0.28470332063146436, + "grad_norm": 0.11919819805288553, + "learning_rate": 0.0008393342835495612, + "loss": 1.6099, + "step": 3138 + }, + { + "epoch": 0.28479404826710214, + "grad_norm": 0.11561725122533643, + "learning_rate": 0.0008392263591638428, + "loss": 1.6198, + "step": 3139 + }, + { + "epoch": 0.28488477590274, + "grad_norm": 0.11467060792467314, + "learning_rate": 0.0008391184054858784, + "loss": 1.6484, + "step": 3140 + }, + { + "epoch": 0.2849755035383778, + "grad_norm": 0.1196543293365629, + "learning_rate": 0.0008390104225249894, + "loss": 1.6109, + "step": 3141 + }, + { + "epoch": 0.2850662311740156, + "grad_norm": 0.11486853854244072, + "learning_rate": 0.0008389024102905008, + "loss": 1.6663, + "step": 3142 + }, + { + "epoch": 0.2851569588096534, + "grad_norm": 0.11663054908541179, + "learning_rate": 0.000838794368791739, + "loss": 1.6243, + "step": 3143 + }, + { + "epoch": 0.28524768644529125, + "grad_norm": 0.11903062925032876, + "learning_rate": 0.0008386862980380335, + "loss": 1.6367, + "step": 3144 + }, + { + "epoch": 0.285338414080929, + "grad_norm": 0.12342145946863427, + "learning_rate": 0.0008385781980387163, + "loss": 1.6234, + "step": 3145 + }, + { + "epoch": 0.28542914171656686, + "grad_norm": 0.10917634294287024, + "learning_rate": 0.0008384700688031217, + "loss": 1.6707, + "step": 3146 + }, + { + "epoch": 0.2855198693522047, + "grad_norm": 0.11464404522978927, + "learning_rate": 0.0008383619103405868, + "loss": 1.6057, + "step": 3147 + }, + { + "epoch": 0.2856105969878425, + "grad_norm": 0.11412454842690467, + "learning_rate": 0.0008382537226604512, + "loss": 1.6682, + "step": 3148 + }, + { + "epoch": 0.2857013246234803, + "grad_norm": 0.11813162020586895, + "learning_rate": 0.0008381455057720567, + "loss": 1.6421, + "step": 3149 + }, + { + "epoch": 0.28579205225911813, + "grad_norm": 0.1150290668830656, + "learning_rate": 0.0008380372596847479, + "loss": 1.6286, + "step": 3150 + }, + { + "epoch": 0.28588277989475597, + "grad_norm": 0.11894696991523049, + "learning_rate": 0.000837928984407872, + "loss": 1.6136, + "step": 3151 + }, + { + "epoch": 0.28597350753039374, + "grad_norm": 0.11809560056664199, + "learning_rate": 0.0008378206799507784, + "loss": 1.6481, + "step": 3152 + }, + { + "epoch": 0.2860642351660316, + "grad_norm": 0.117331934889814, + "learning_rate": 0.0008377123463228194, + "loss": 1.6021, + "step": 3153 + }, + { + "epoch": 0.2861549628016694, + "grad_norm": 0.12019073044711111, + "learning_rate": 0.0008376039835333495, + "loss": 1.6274, + "step": 3154 + }, + { + "epoch": 0.2862456904373072, + "grad_norm": 0.1220047506197571, + "learning_rate": 0.0008374955915917258, + "loss": 1.6659, + "step": 3155 + }, + { + "epoch": 0.286336418072945, + "grad_norm": 0.11655002460601466, + "learning_rate": 0.0008373871705073079, + "loss": 1.6143, + "step": 3156 + }, + { + "epoch": 0.28642714570858285, + "grad_norm": 0.1161073784367509, + "learning_rate": 0.0008372787202894583, + "loss": 1.6321, + "step": 3157 + }, + { + "epoch": 0.28651787334422063, + "grad_norm": 0.11446737218036326, + "learning_rate": 0.0008371702409475415, + "loss": 1.6523, + "step": 3158 + }, + { + "epoch": 0.28660860097985846, + "grad_norm": 0.11401738177514528, + "learning_rate": 0.0008370617324909246, + "loss": 1.6414, + "step": 3159 + }, + { + "epoch": 0.2866993286154963, + "grad_norm": 0.11499221318227168, + "learning_rate": 0.0008369531949289775, + "loss": 1.6145, + "step": 3160 + }, + { + "epoch": 0.2867900562511341, + "grad_norm": 0.11770829007697171, + "learning_rate": 0.0008368446282710723, + "loss": 1.6499, + "step": 3161 + }, + { + "epoch": 0.2868807838867719, + "grad_norm": 0.11996146262420951, + "learning_rate": 0.0008367360325265838, + "loss": 1.6468, + "step": 3162 + }, + { + "epoch": 0.28697151152240974, + "grad_norm": 0.11864435052983069, + "learning_rate": 0.0008366274077048894, + "loss": 1.6499, + "step": 3163 + }, + { + "epoch": 0.2870622391580475, + "grad_norm": 0.11164076138509704, + "learning_rate": 0.0008365187538153685, + "loss": 1.6194, + "step": 3164 + }, + { + "epoch": 0.28715296679368535, + "grad_norm": 0.11459763766458274, + "learning_rate": 0.0008364100708674038, + "loss": 1.6118, + "step": 3165 + }, + { + "epoch": 0.2872436944293232, + "grad_norm": 0.11618675797498466, + "learning_rate": 0.0008363013588703798, + "loss": 1.6419, + "step": 3166 + }, + { + "epoch": 0.287334422064961, + "grad_norm": 0.11668294935572061, + "learning_rate": 0.000836192617833684, + "loss": 1.6403, + "step": 3167 + }, + { + "epoch": 0.2874251497005988, + "grad_norm": 0.1171937203818761, + "learning_rate": 0.0008360838477667058, + "loss": 1.6263, + "step": 3168 + }, + { + "epoch": 0.2875158773362366, + "grad_norm": 0.11351084043544399, + "learning_rate": 0.0008359750486788382, + "loss": 1.6559, + "step": 3169 + }, + { + "epoch": 0.28760660497187446, + "grad_norm": 0.11597306092049835, + "learning_rate": 0.0008358662205794754, + "loss": 1.6663, + "step": 3170 + }, + { + "epoch": 0.28769733260751224, + "grad_norm": 0.11806158776207337, + "learning_rate": 0.0008357573634780152, + "loss": 1.6491, + "step": 3171 + }, + { + "epoch": 0.28778806024315007, + "grad_norm": 0.11625263488830585, + "learning_rate": 0.0008356484773838569, + "loss": 1.6313, + "step": 3172 + }, + { + "epoch": 0.2878787878787879, + "grad_norm": 0.11645246253560333, + "learning_rate": 0.0008355395623064031, + "loss": 1.5586, + "step": 3173 + }, + { + "epoch": 0.2879695155144257, + "grad_norm": 0.12007203304123272, + "learning_rate": 0.0008354306182550589, + "loss": 1.6746, + "step": 3174 + }, + { + "epoch": 0.2880602431500635, + "grad_norm": 0.11733088447972352, + "learning_rate": 0.0008353216452392312, + "loss": 1.6259, + "step": 3175 + }, + { + "epoch": 0.28815097078570134, + "grad_norm": 0.11389073028321245, + "learning_rate": 0.0008352126432683299, + "loss": 1.6926, + "step": 3176 + }, + { + "epoch": 0.2882416984213391, + "grad_norm": 0.1174689216136462, + "learning_rate": 0.0008351036123517677, + "loss": 1.6254, + "step": 3177 + }, + { + "epoch": 0.28833242605697695, + "grad_norm": 0.1197052447652965, + "learning_rate": 0.0008349945524989589, + "loss": 1.6447, + "step": 3178 + }, + { + "epoch": 0.2884231536926148, + "grad_norm": 0.11741913539093006, + "learning_rate": 0.0008348854637193212, + "loss": 1.6296, + "step": 3179 + }, + { + "epoch": 0.28851388132825256, + "grad_norm": 0.11757856725608883, + "learning_rate": 0.0008347763460222746, + "loss": 1.6761, + "step": 3180 + }, + { + "epoch": 0.2886046089638904, + "grad_norm": 0.11612564079426603, + "learning_rate": 0.0008346671994172411, + "loss": 1.6808, + "step": 3181 + }, + { + "epoch": 0.28869533659952823, + "grad_norm": 0.12093609980987784, + "learning_rate": 0.0008345580239136455, + "loss": 1.6249, + "step": 3182 + }, + { + "epoch": 0.288786064235166, + "grad_norm": 0.11979110399091751, + "learning_rate": 0.0008344488195209152, + "loss": 1.6383, + "step": 3183 + }, + { + "epoch": 0.28887679187080384, + "grad_norm": 0.11430866714756072, + "learning_rate": 0.0008343395862484799, + "loss": 1.6655, + "step": 3184 + }, + { + "epoch": 0.2889675195064417, + "grad_norm": 0.11236292714581803, + "learning_rate": 0.0008342303241057723, + "loss": 1.6494, + "step": 3185 + }, + { + "epoch": 0.28905824714207945, + "grad_norm": 0.1143898503846638, + "learning_rate": 0.000834121033102227, + "loss": 1.6357, + "step": 3186 + }, + { + "epoch": 0.2891489747777173, + "grad_norm": 0.10918310899498425, + "learning_rate": 0.0008340117132472811, + "loss": 1.6573, + "step": 3187 + }, + { + "epoch": 0.2892397024133551, + "grad_norm": 0.11493603193923575, + "learning_rate": 0.0008339023645503745, + "loss": 1.6667, + "step": 3188 + }, + { + "epoch": 0.28933043004899295, + "grad_norm": 0.11402208710346333, + "learning_rate": 0.0008337929870209495, + "loss": 1.5957, + "step": 3189 + }, + { + "epoch": 0.2894211576846307, + "grad_norm": 0.11250367881242236, + "learning_rate": 0.000833683580668451, + "loss": 1.6575, + "step": 3190 + }, + { + "epoch": 0.28951188532026856, + "grad_norm": 0.120616223821837, + "learning_rate": 0.0008335741455023261, + "loss": 1.6432, + "step": 3191 + }, + { + "epoch": 0.2896026129559064, + "grad_norm": 0.11617529234321924, + "learning_rate": 0.0008334646815320246, + "loss": 1.6677, + "step": 3192 + }, + { + "epoch": 0.28969334059154417, + "grad_norm": 0.12023836697635738, + "learning_rate": 0.0008333551887669987, + "loss": 1.6403, + "step": 3193 + }, + { + "epoch": 0.289784068227182, + "grad_norm": 0.12338468542233973, + "learning_rate": 0.000833245667216703, + "loss": 1.6864, + "step": 3194 + }, + { + "epoch": 0.28987479586281983, + "grad_norm": 0.11666913998604074, + "learning_rate": 0.0008331361168905949, + "loss": 1.6336, + "step": 3195 + }, + { + "epoch": 0.2899655234984576, + "grad_norm": 0.11820364407095621, + "learning_rate": 0.000833026537798134, + "loss": 1.6396, + "step": 3196 + }, + { + "epoch": 0.29005625113409544, + "grad_norm": 0.11788113445275006, + "learning_rate": 0.0008329169299487824, + "loss": 1.6245, + "step": 3197 + }, + { + "epoch": 0.2901469787697333, + "grad_norm": 0.11336422233934601, + "learning_rate": 0.000832807293352005, + "loss": 1.636, + "step": 3198 + }, + { + "epoch": 0.29023770640537105, + "grad_norm": 0.10817501607051683, + "learning_rate": 0.0008326976280172687, + "loss": 1.6288, + "step": 3199 + }, + { + "epoch": 0.2903284340410089, + "grad_norm": 0.11422255846035807, + "learning_rate": 0.0008325879339540432, + "loss": 1.6884, + "step": 3200 + }, + { + "epoch": 0.2904191616766467, + "grad_norm": 0.11699118249603874, + "learning_rate": 0.0008324782111718005, + "loss": 1.6735, + "step": 3201 + }, + { + "epoch": 0.2905098893122845, + "grad_norm": 0.11438682999071835, + "learning_rate": 0.0008323684596800154, + "loss": 1.628, + "step": 3202 + }, + { + "epoch": 0.29060061694792233, + "grad_norm": 0.11614553258594423, + "learning_rate": 0.0008322586794881646, + "loss": 1.6086, + "step": 3203 + }, + { + "epoch": 0.29069134458356016, + "grad_norm": 0.11683112409194045, + "learning_rate": 0.000832148870605728, + "loss": 1.6287, + "step": 3204 + }, + { + "epoch": 0.29078207221919794, + "grad_norm": 0.11345872836702904, + "learning_rate": 0.0008320390330421875, + "loss": 1.6084, + "step": 3205 + }, + { + "epoch": 0.2908727998548358, + "grad_norm": 0.10987890764630509, + "learning_rate": 0.0008319291668070274, + "loss": 1.685, + "step": 3206 + }, + { + "epoch": 0.2909635274904736, + "grad_norm": 0.10789791943090268, + "learning_rate": 0.0008318192719097351, + "loss": 1.6249, + "step": 3207 + }, + { + "epoch": 0.29105425512611144, + "grad_norm": 0.11330201977263336, + "learning_rate": 0.0008317093483597995, + "loss": 1.6324, + "step": 3208 + }, + { + "epoch": 0.2911449827617492, + "grad_norm": 0.11496300351928568, + "learning_rate": 0.000831599396166713, + "loss": 1.6545, + "step": 3209 + }, + { + "epoch": 0.29123571039738705, + "grad_norm": 0.11157964776811143, + "learning_rate": 0.0008314894153399697, + "loss": 1.5921, + "step": 3210 + }, + { + "epoch": 0.2913264380330249, + "grad_norm": 0.11473086859237153, + "learning_rate": 0.0008313794058890664, + "loss": 1.6055, + "step": 3211 + }, + { + "epoch": 0.29141716566866266, + "grad_norm": 0.11520433630366386, + "learning_rate": 0.0008312693678235026, + "loss": 1.6306, + "step": 3212 + }, + { + "epoch": 0.2915078933043005, + "grad_norm": 0.11114786525049569, + "learning_rate": 0.0008311593011527802, + "loss": 1.704, + "step": 3213 + }, + { + "epoch": 0.2915986209399383, + "grad_norm": 0.11816545011758296, + "learning_rate": 0.0008310492058864032, + "loss": 1.6445, + "step": 3214 + }, + { + "epoch": 0.2916893485755761, + "grad_norm": 0.11831112590768438, + "learning_rate": 0.0008309390820338784, + "loss": 1.6141, + "step": 3215 + }, + { + "epoch": 0.29178007621121393, + "grad_norm": 0.11226642927955767, + "learning_rate": 0.0008308289296047154, + "loss": 1.6638, + "step": 3216 + }, + { + "epoch": 0.29187080384685177, + "grad_norm": 0.114568186389413, + "learning_rate": 0.0008307187486084253, + "loss": 1.6736, + "step": 3217 + }, + { + "epoch": 0.29196153148248954, + "grad_norm": 0.11377630788696394, + "learning_rate": 0.0008306085390545226, + "loss": 1.6672, + "step": 3218 + }, + { + "epoch": 0.2920522591181274, + "grad_norm": 0.11611259495087621, + "learning_rate": 0.0008304983009525239, + "loss": 1.6776, + "step": 3219 + }, + { + "epoch": 0.2921429867537652, + "grad_norm": 0.11486805013404751, + "learning_rate": 0.0008303880343119481, + "loss": 1.6553, + "step": 3220 + }, + { + "epoch": 0.292233714389403, + "grad_norm": 0.11815377217098667, + "learning_rate": 0.0008302777391423168, + "loss": 1.6772, + "step": 3221 + }, + { + "epoch": 0.2923244420250408, + "grad_norm": 0.11606815945764454, + "learning_rate": 0.0008301674154531542, + "loss": 1.625, + "step": 3222 + }, + { + "epoch": 0.29241516966067865, + "grad_norm": 0.11588464513162547, + "learning_rate": 0.0008300570632539865, + "loss": 1.6624, + "step": 3223 + }, + { + "epoch": 0.29250589729631643, + "grad_norm": 0.11275370540300661, + "learning_rate": 0.0008299466825543429, + "loss": 1.6321, + "step": 3224 + }, + { + "epoch": 0.29259662493195426, + "grad_norm": 0.11182902364544806, + "learning_rate": 0.0008298362733637544, + "loss": 1.5938, + "step": 3225 + }, + { + "epoch": 0.2926873525675921, + "grad_norm": 0.11479756242868545, + "learning_rate": 0.0008297258356917552, + "loss": 1.5782, + "step": 3226 + }, + { + "epoch": 0.29277808020322993, + "grad_norm": 0.1121466110072503, + "learning_rate": 0.0008296153695478816, + "loss": 1.6319, + "step": 3227 + }, + { + "epoch": 0.2928688078388677, + "grad_norm": 0.11098646443166171, + "learning_rate": 0.0008295048749416721, + "loss": 1.6284, + "step": 3228 + }, + { + "epoch": 0.29295953547450554, + "grad_norm": 0.11467551441029637, + "learning_rate": 0.0008293943518826681, + "loss": 1.6443, + "step": 3229 + }, + { + "epoch": 0.29305026311014337, + "grad_norm": 0.1154935729496479, + "learning_rate": 0.0008292838003804133, + "loss": 1.6716, + "step": 3230 + }, + { + "epoch": 0.29314099074578115, + "grad_norm": 0.11268645740924997, + "learning_rate": 0.0008291732204444537, + "loss": 1.6478, + "step": 3231 + }, + { + "epoch": 0.293231718381419, + "grad_norm": 0.11139365510212244, + "learning_rate": 0.0008290626120843382, + "loss": 1.6594, + "step": 3232 + }, + { + "epoch": 0.2933224460170568, + "grad_norm": 0.11983739604611758, + "learning_rate": 0.0008289519753096175, + "loss": 1.6333, + "step": 3233 + }, + { + "epoch": 0.2934131736526946, + "grad_norm": 0.1169143392022053, + "learning_rate": 0.0008288413101298453, + "loss": 1.6787, + "step": 3234 + }, + { + "epoch": 0.2935039012883324, + "grad_norm": 0.11894591885408563, + "learning_rate": 0.0008287306165545776, + "loss": 1.6604, + "step": 3235 + }, + { + "epoch": 0.29359462892397026, + "grad_norm": 0.11959543657656797, + "learning_rate": 0.0008286198945933725, + "loss": 1.616, + "step": 3236 + }, + { + "epoch": 0.29368535655960804, + "grad_norm": 0.11856843071506, + "learning_rate": 0.0008285091442557913, + "loss": 1.6306, + "step": 3237 + }, + { + "epoch": 0.29377608419524587, + "grad_norm": 0.11515038660554794, + "learning_rate": 0.0008283983655513969, + "loss": 1.6202, + "step": 3238 + }, + { + "epoch": 0.2938668118308837, + "grad_norm": 0.11545912094728417, + "learning_rate": 0.0008282875584897553, + "loss": 1.6339, + "step": 3239 + }, + { + "epoch": 0.2939575394665215, + "grad_norm": 0.11385850988764286, + "learning_rate": 0.0008281767230804346, + "loss": 1.6185, + "step": 3240 + }, + { + "epoch": 0.2940482671021593, + "grad_norm": 0.12541077428101233, + "learning_rate": 0.0008280658593330056, + "loss": 1.6389, + "step": 3241 + }, + { + "epoch": 0.29413899473779714, + "grad_norm": 0.11063736588466491, + "learning_rate": 0.0008279549672570413, + "loss": 1.5764, + "step": 3242 + }, + { + "epoch": 0.2942297223734349, + "grad_norm": 0.1162004089290839, + "learning_rate": 0.0008278440468621172, + "loss": 1.6759, + "step": 3243 + }, + { + "epoch": 0.29432045000907275, + "grad_norm": 0.11364499124239465, + "learning_rate": 0.0008277330981578112, + "loss": 1.6284, + "step": 3244 + }, + { + "epoch": 0.2944111776447106, + "grad_norm": 0.1187773516632957, + "learning_rate": 0.0008276221211537039, + "loss": 1.6216, + "step": 3245 + }, + { + "epoch": 0.2945019052803484, + "grad_norm": 0.11513872715831025, + "learning_rate": 0.0008275111158593783, + "loss": 1.6401, + "step": 3246 + }, + { + "epoch": 0.2945926329159862, + "grad_norm": 0.11296855112597019, + "learning_rate": 0.0008274000822844195, + "loss": 1.6118, + "step": 3247 + }, + { + "epoch": 0.29468336055162403, + "grad_norm": 0.11582703575684651, + "learning_rate": 0.0008272890204384152, + "loss": 1.6424, + "step": 3248 + }, + { + "epoch": 0.29477408818726186, + "grad_norm": 0.11338496137521552, + "learning_rate": 0.0008271779303309561, + "loss": 1.6272, + "step": 3249 + }, + { + "epoch": 0.29486481582289964, + "grad_norm": 0.11932201661220151, + "learning_rate": 0.000827066811971634, + "loss": 1.6012, + "step": 3250 + }, + { + "epoch": 0.2949555434585375, + "grad_norm": 0.11139835513634347, + "learning_rate": 0.0008269556653700449, + "loss": 1.6011, + "step": 3251 + }, + { + "epoch": 0.2950462710941753, + "grad_norm": 0.10802954665241511, + "learning_rate": 0.0008268444905357857, + "loss": 1.6219, + "step": 3252 + }, + { + "epoch": 0.2951369987298131, + "grad_norm": 0.11486556178128189, + "learning_rate": 0.0008267332874784568, + "loss": 1.6141, + "step": 3253 + }, + { + "epoch": 0.2952277263654509, + "grad_norm": 0.11538855263338804, + "learning_rate": 0.0008266220562076601, + "loss": 1.6387, + "step": 3254 + }, + { + "epoch": 0.29531845400108875, + "grad_norm": 0.1106282568015656, + "learning_rate": 0.0008265107967330008, + "loss": 1.6518, + "step": 3255 + }, + { + "epoch": 0.2954091816367265, + "grad_norm": 0.11752586795297958, + "learning_rate": 0.0008263995090640861, + "loss": 1.656, + "step": 3256 + }, + { + "epoch": 0.29549990927236436, + "grad_norm": 0.11967670614241134, + "learning_rate": 0.0008262881932105257, + "loss": 1.6142, + "step": 3257 + }, + { + "epoch": 0.2955906369080022, + "grad_norm": 0.11875442737637121, + "learning_rate": 0.0008261768491819317, + "loss": 1.6416, + "step": 3258 + }, + { + "epoch": 0.29568136454363997, + "grad_norm": 0.1144272706674996, + "learning_rate": 0.0008260654769879186, + "loss": 1.6578, + "step": 3259 + }, + { + "epoch": 0.2957720921792778, + "grad_norm": 0.112419646811609, + "learning_rate": 0.0008259540766381037, + "loss": 1.6611, + "step": 3260 + }, + { + "epoch": 0.29586281981491563, + "grad_norm": 0.11445929299147699, + "learning_rate": 0.0008258426481421062, + "loss": 1.6595, + "step": 3261 + }, + { + "epoch": 0.2959535474505534, + "grad_norm": 0.1180944296402275, + "learning_rate": 0.0008257311915095478, + "loss": 1.6735, + "step": 3262 + }, + { + "epoch": 0.29604427508619124, + "grad_norm": 0.11336873260799263, + "learning_rate": 0.0008256197067500533, + "loss": 1.6535, + "step": 3263 + }, + { + "epoch": 0.2961350027218291, + "grad_norm": 0.11113148177631253, + "learning_rate": 0.0008255081938732489, + "loss": 1.6482, + "step": 3264 + }, + { + "epoch": 0.2962257303574669, + "grad_norm": 0.1173358847863042, + "learning_rate": 0.000825396652888764, + "loss": 1.6415, + "step": 3265 + }, + { + "epoch": 0.2963164579931047, + "grad_norm": 0.11179405205612176, + "learning_rate": 0.0008252850838062304, + "loss": 1.6236, + "step": 3266 + }, + { + "epoch": 0.2964071856287425, + "grad_norm": 0.11345601305931088, + "learning_rate": 0.0008251734866352817, + "loss": 1.6228, + "step": 3267 + }, + { + "epoch": 0.29649791326438035, + "grad_norm": 0.12125681736382993, + "learning_rate": 0.0008250618613855546, + "loss": 1.6441, + "step": 3268 + }, + { + "epoch": 0.29658864090001813, + "grad_norm": 0.11256291893414276, + "learning_rate": 0.0008249502080666878, + "loss": 1.6683, + "step": 3269 + }, + { + "epoch": 0.29667936853565596, + "grad_norm": 0.11338833292106981, + "learning_rate": 0.0008248385266883225, + "loss": 1.6541, + "step": 3270 + }, + { + "epoch": 0.2967700961712938, + "grad_norm": 0.11661988172644899, + "learning_rate": 0.0008247268172601028, + "loss": 1.6639, + "step": 3271 + }, + { + "epoch": 0.2968608238069316, + "grad_norm": 0.11238873456080985, + "learning_rate": 0.0008246150797916742, + "loss": 1.6788, + "step": 3272 + }, + { + "epoch": 0.2969515514425694, + "grad_norm": 0.11133820524588699, + "learning_rate": 0.000824503314292686, + "loss": 1.6317, + "step": 3273 + }, + { + "epoch": 0.29704227907820724, + "grad_norm": 0.11694517507650896, + "learning_rate": 0.0008243915207727886, + "loss": 1.5961, + "step": 3274 + }, + { + "epoch": 0.297133006713845, + "grad_norm": 0.11148209925726045, + "learning_rate": 0.0008242796992416358, + "loss": 1.6795, + "step": 3275 + }, + { + "epoch": 0.29722373434948285, + "grad_norm": 0.11095501227294778, + "learning_rate": 0.0008241678497088829, + "loss": 1.6303, + "step": 3276 + }, + { + "epoch": 0.2973144619851207, + "grad_norm": 0.11440946220404129, + "learning_rate": 0.0008240559721841884, + "loss": 1.5948, + "step": 3277 + }, + { + "epoch": 0.29740518962075846, + "grad_norm": 0.11544053931456924, + "learning_rate": 0.000823944066677213, + "loss": 1.5964, + "step": 3278 + }, + { + "epoch": 0.2974959172563963, + "grad_norm": 0.11625405850765286, + "learning_rate": 0.0008238321331976197, + "loss": 1.6668, + "step": 3279 + }, + { + "epoch": 0.2975866448920341, + "grad_norm": 0.11038760129601466, + "learning_rate": 0.000823720171755074, + "loss": 1.6008, + "step": 3280 + }, + { + "epoch": 0.2976773725276719, + "grad_norm": 0.1211168543694061, + "learning_rate": 0.0008236081823592437, + "loss": 1.6294, + "step": 3281 + }, + { + "epoch": 0.29776810016330973, + "grad_norm": 0.11573719151543065, + "learning_rate": 0.0008234961650197993, + "loss": 1.6239, + "step": 3282 + }, + { + "epoch": 0.29785882779894757, + "grad_norm": 0.1162465380914093, + "learning_rate": 0.0008233841197464133, + "loss": 1.6259, + "step": 3283 + }, + { + "epoch": 0.2979495554345854, + "grad_norm": 0.11644812134092314, + "learning_rate": 0.0008232720465487608, + "loss": 1.6645, + "step": 3284 + }, + { + "epoch": 0.2980402830702232, + "grad_norm": 0.11425908608253715, + "learning_rate": 0.0008231599454365195, + "loss": 1.6429, + "step": 3285 + }, + { + "epoch": 0.298131010705861, + "grad_norm": 0.11562781131965626, + "learning_rate": 0.0008230478164193693, + "loss": 1.6199, + "step": 3286 + }, + { + "epoch": 0.29822173834149884, + "grad_norm": 0.115044090714436, + "learning_rate": 0.0008229356595069925, + "loss": 1.6555, + "step": 3287 + }, + { + "epoch": 0.2983124659771366, + "grad_norm": 0.1183258762951398, + "learning_rate": 0.0008228234747090741, + "loss": 1.6281, + "step": 3288 + }, + { + "epoch": 0.29840319361277445, + "grad_norm": 0.11149700263127273, + "learning_rate": 0.0008227112620353007, + "loss": 1.6174, + "step": 3289 + }, + { + "epoch": 0.2984939212484123, + "grad_norm": 0.11079622948099328, + "learning_rate": 0.0008225990214953627, + "loss": 1.5795, + "step": 3290 + }, + { + "epoch": 0.29858464888405006, + "grad_norm": 0.1171221427580354, + "learning_rate": 0.0008224867530989513, + "loss": 1.6587, + "step": 3291 + }, + { + "epoch": 0.2986753765196879, + "grad_norm": 0.10842195226545541, + "learning_rate": 0.0008223744568557614, + "loss": 1.6363, + "step": 3292 + }, + { + "epoch": 0.29876610415532573, + "grad_norm": 0.11442799855866098, + "learning_rate": 0.0008222621327754895, + "loss": 1.6497, + "step": 3293 + }, + { + "epoch": 0.2988568317909635, + "grad_norm": 0.11067802244625471, + "learning_rate": 0.0008221497808678352, + "loss": 1.6631, + "step": 3294 + }, + { + "epoch": 0.29894755942660134, + "grad_norm": 0.11068726821104366, + "learning_rate": 0.0008220374011424997, + "loss": 1.6739, + "step": 3295 + }, + { + "epoch": 0.29903828706223917, + "grad_norm": 0.1102572286195157, + "learning_rate": 0.0008219249936091871, + "loss": 1.6618, + "step": 3296 + }, + { + "epoch": 0.29912901469787695, + "grad_norm": 0.1161454716989206, + "learning_rate": 0.0008218125582776039, + "loss": 1.6138, + "step": 3297 + }, + { + "epoch": 0.2992197423335148, + "grad_norm": 0.1115755431057583, + "learning_rate": 0.0008217000951574589, + "loss": 1.6361, + "step": 3298 + }, + { + "epoch": 0.2993104699691526, + "grad_norm": 0.11411263625501802, + "learning_rate": 0.0008215876042584633, + "loss": 1.5927, + "step": 3299 + }, + { + "epoch": 0.2994011976047904, + "grad_norm": 0.11526356316975947, + "learning_rate": 0.0008214750855903306, + "loss": 1.6899, + "step": 3300 + }, + { + "epoch": 0.2994919252404282, + "grad_norm": 0.11563772788923907, + "learning_rate": 0.0008213625391627767, + "loss": 1.6346, + "step": 3301 + }, + { + "epoch": 0.29958265287606606, + "grad_norm": 0.1158448087890971, + "learning_rate": 0.0008212499649855204, + "loss": 1.6231, + "step": 3302 + }, + { + "epoch": 0.2996733805117039, + "grad_norm": 0.11579030433524055, + "learning_rate": 0.0008211373630682822, + "loss": 1.6163, + "step": 3303 + }, + { + "epoch": 0.29976410814734167, + "grad_norm": 0.11341084111942533, + "learning_rate": 0.0008210247334207854, + "loss": 1.6274, + "step": 3304 + }, + { + "epoch": 0.2998548357829795, + "grad_norm": 0.10895143509473214, + "learning_rate": 0.0008209120760527555, + "loss": 1.6167, + "step": 3305 + }, + { + "epoch": 0.29994556341861733, + "grad_norm": 0.11114874094912329, + "learning_rate": 0.0008207993909739207, + "loss": 1.6862, + "step": 3306 + }, + { + "epoch": 0.3000362910542551, + "grad_norm": 0.11512126539646184, + "learning_rate": 0.0008206866781940111, + "loss": 1.6397, + "step": 3307 + }, + { + "epoch": 0.30012701868989294, + "grad_norm": 0.11313324244434185, + "learning_rate": 0.0008205739377227595, + "loss": 1.616, + "step": 3308 + }, + { + "epoch": 0.3002177463255308, + "grad_norm": 0.11109934528787756, + "learning_rate": 0.0008204611695699013, + "loss": 1.6363, + "step": 3309 + }, + { + "epoch": 0.30030847396116855, + "grad_norm": 0.11327847886666426, + "learning_rate": 0.0008203483737451737, + "loss": 1.6511, + "step": 3310 + }, + { + "epoch": 0.3003992015968064, + "grad_norm": 0.11850519668322221, + "learning_rate": 0.000820235550258317, + "loss": 1.6296, + "step": 3311 + }, + { + "epoch": 0.3004899292324442, + "grad_norm": 0.11347727546827008, + "learning_rate": 0.0008201226991190731, + "loss": 1.6668, + "step": 3312 + }, + { + "epoch": 0.300580656868082, + "grad_norm": 0.11556755550811605, + "learning_rate": 0.0008200098203371871, + "loss": 1.6234, + "step": 3313 + }, + { + "epoch": 0.30067138450371983, + "grad_norm": 0.11166009638121412, + "learning_rate": 0.0008198969139224059, + "loss": 1.6584, + "step": 3314 + }, + { + "epoch": 0.30076211213935766, + "grad_norm": 0.11290102196165648, + "learning_rate": 0.0008197839798844791, + "loss": 1.6582, + "step": 3315 + }, + { + "epoch": 0.30085283977499544, + "grad_norm": 0.11370761500111252, + "learning_rate": 0.0008196710182331585, + "loss": 1.6653, + "step": 3316 + }, + { + "epoch": 0.3009435674106333, + "grad_norm": 0.11673621074350947, + "learning_rate": 0.0008195580289781983, + "loss": 1.607, + "step": 3317 + }, + { + "epoch": 0.3010342950462711, + "grad_norm": 0.11515687654914093, + "learning_rate": 0.0008194450121293553, + "loss": 1.7197, + "step": 3318 + }, + { + "epoch": 0.3011250226819089, + "grad_norm": 0.11084475925668169, + "learning_rate": 0.0008193319676963884, + "loss": 1.5706, + "step": 3319 + }, + { + "epoch": 0.3012157503175467, + "grad_norm": 0.11269464245210148, + "learning_rate": 0.0008192188956890589, + "loss": 1.6458, + "step": 3320 + }, + { + "epoch": 0.30130647795318455, + "grad_norm": 0.11859806061321093, + "learning_rate": 0.0008191057961171308, + "loss": 1.6351, + "step": 3321 + }, + { + "epoch": 0.3013972055888224, + "grad_norm": 0.11341543904637733, + "learning_rate": 0.0008189926689903702, + "loss": 1.6006, + "step": 3322 + }, + { + "epoch": 0.30148793322446016, + "grad_norm": 0.11296429190681746, + "learning_rate": 0.0008188795143185454, + "loss": 1.6519, + "step": 3323 + }, + { + "epoch": 0.301578660860098, + "grad_norm": 0.11894413123247538, + "learning_rate": 0.0008187663321114278, + "loss": 1.6512, + "step": 3324 + }, + { + "epoch": 0.3016693884957358, + "grad_norm": 0.11612995584148644, + "learning_rate": 0.0008186531223787903, + "loss": 1.6073, + "step": 3325 + }, + { + "epoch": 0.3017601161313736, + "grad_norm": 0.11503972970536278, + "learning_rate": 0.0008185398851304089, + "loss": 1.636, + "step": 3326 + }, + { + "epoch": 0.30185084376701143, + "grad_norm": 0.12123095132394132, + "learning_rate": 0.0008184266203760613, + "loss": 1.6482, + "step": 3327 + }, + { + "epoch": 0.30194157140264927, + "grad_norm": 0.11212759937048034, + "learning_rate": 0.0008183133281255281, + "loss": 1.6446, + "step": 3328 + }, + { + "epoch": 0.30203229903828704, + "grad_norm": 0.11476518906182952, + "learning_rate": 0.0008182000083885921, + "loss": 1.6513, + "step": 3329 + }, + { + "epoch": 0.3021230266739249, + "grad_norm": 0.11929470253773247, + "learning_rate": 0.0008180866611750386, + "loss": 1.6249, + "step": 3330 + }, + { + "epoch": 0.3022137543095627, + "grad_norm": 0.11723234177839806, + "learning_rate": 0.000817973286494655, + "loss": 1.6557, + "step": 3331 + }, + { + "epoch": 0.3023044819452005, + "grad_norm": 0.11296707256893983, + "learning_rate": 0.0008178598843572311, + "loss": 1.6352, + "step": 3332 + }, + { + "epoch": 0.3023952095808383, + "grad_norm": 0.1162189781042623, + "learning_rate": 0.0008177464547725595, + "loss": 1.6082, + "step": 3333 + }, + { + "epoch": 0.30248593721647615, + "grad_norm": 0.12452398501107735, + "learning_rate": 0.0008176329977504347, + "loss": 1.6256, + "step": 3334 + }, + { + "epoch": 0.30257666485211393, + "grad_norm": 0.11466176990729404, + "learning_rate": 0.0008175195133006537, + "loss": 1.626, + "step": 3335 + }, + { + "epoch": 0.30266739248775176, + "grad_norm": 0.11255582854272028, + "learning_rate": 0.0008174060014330158, + "loss": 1.6183, + "step": 3336 + }, + { + "epoch": 0.3027581201233896, + "grad_norm": 0.1157442088474719, + "learning_rate": 0.000817292462157323, + "loss": 1.6487, + "step": 3337 + }, + { + "epoch": 0.3028488477590274, + "grad_norm": 0.109641706970087, + "learning_rate": 0.0008171788954833793, + "loss": 1.6491, + "step": 3338 + }, + { + "epoch": 0.3029395753946652, + "grad_norm": 0.1102026678044397, + "learning_rate": 0.0008170653014209912, + "loss": 1.6273, + "step": 3339 + }, + { + "epoch": 0.30303030303030304, + "grad_norm": 0.11080191387179049, + "learning_rate": 0.0008169516799799676, + "loss": 1.6338, + "step": 3340 + }, + { + "epoch": 0.30312103066594087, + "grad_norm": 0.1075767053659628, + "learning_rate": 0.0008168380311701198, + "loss": 1.5854, + "step": 3341 + }, + { + "epoch": 0.30321175830157865, + "grad_norm": 0.11655078968672726, + "learning_rate": 0.0008167243550012613, + "loss": 1.6783, + "step": 3342 + }, + { + "epoch": 0.3033024859372165, + "grad_norm": 0.11631249661065692, + "learning_rate": 0.000816610651483208, + "loss": 1.6518, + "step": 3343 + }, + { + "epoch": 0.3033932135728543, + "grad_norm": 0.11341012212959933, + "learning_rate": 0.0008164969206257784, + "loss": 1.6351, + "step": 3344 + }, + { + "epoch": 0.3034839412084921, + "grad_norm": 0.11224179317336372, + "learning_rate": 0.000816383162438793, + "loss": 1.6264, + "step": 3345 + }, + { + "epoch": 0.3035746688441299, + "grad_norm": 0.11441743173057008, + "learning_rate": 0.0008162693769320749, + "loss": 1.6339, + "step": 3346 + }, + { + "epoch": 0.30366539647976776, + "grad_norm": 0.11394957223694042, + "learning_rate": 0.0008161555641154492, + "loss": 1.642, + "step": 3347 + }, + { + "epoch": 0.30375612411540553, + "grad_norm": 0.11585317416861805, + "learning_rate": 0.0008160417239987443, + "loss": 1.6265, + "step": 3348 + }, + { + "epoch": 0.30384685175104337, + "grad_norm": 0.11188794371431249, + "learning_rate": 0.0008159278565917899, + "loss": 1.6289, + "step": 3349 + }, + { + "epoch": 0.3039375793866812, + "grad_norm": 0.11518980418924064, + "learning_rate": 0.0008158139619044185, + "loss": 1.6441, + "step": 3350 + }, + { + "epoch": 0.304028307022319, + "grad_norm": 0.1105981985749987, + "learning_rate": 0.0008157000399464649, + "loss": 1.6123, + "step": 3351 + }, + { + "epoch": 0.3041190346579568, + "grad_norm": 0.10789564319076662, + "learning_rate": 0.0008155860907277663, + "loss": 1.6034, + "step": 3352 + }, + { + "epoch": 0.30420976229359464, + "grad_norm": 0.11300236345884677, + "learning_rate": 0.0008154721142581622, + "loss": 1.629, + "step": 3353 + }, + { + "epoch": 0.3043004899292324, + "grad_norm": 0.11382139445713092, + "learning_rate": 0.0008153581105474947, + "loss": 1.6421, + "step": 3354 + }, + { + "epoch": 0.30439121756487025, + "grad_norm": 0.11571366157795034, + "learning_rate": 0.0008152440796056077, + "loss": 1.6472, + "step": 3355 + }, + { + "epoch": 0.3044819452005081, + "grad_norm": 0.11324134180748209, + "learning_rate": 0.0008151300214423482, + "loss": 1.6072, + "step": 3356 + }, + { + "epoch": 0.30457267283614586, + "grad_norm": 0.11292790743613298, + "learning_rate": 0.0008150159360675647, + "loss": 1.6432, + "step": 3357 + }, + { + "epoch": 0.3046634004717837, + "grad_norm": 0.11518478614445338, + "learning_rate": 0.0008149018234911088, + "loss": 1.6651, + "step": 3358 + }, + { + "epoch": 0.30475412810742153, + "grad_norm": 0.11290300506832454, + "learning_rate": 0.000814787683722834, + "loss": 1.6232, + "step": 3359 + }, + { + "epoch": 0.30484485574305936, + "grad_norm": 0.10969705645157857, + "learning_rate": 0.0008146735167725963, + "loss": 1.6145, + "step": 3360 + }, + { + "epoch": 0.30493558337869714, + "grad_norm": 0.11487611062019132, + "learning_rate": 0.0008145593226502541, + "loss": 1.626, + "step": 3361 + }, + { + "epoch": 0.30502631101433497, + "grad_norm": 0.11524858163552652, + "learning_rate": 0.0008144451013656679, + "loss": 1.6915, + "step": 3362 + }, + { + "epoch": 0.3051170386499728, + "grad_norm": 0.11080285080065709, + "learning_rate": 0.0008143308529287009, + "loss": 1.6577, + "step": 3363 + }, + { + "epoch": 0.3052077662856106, + "grad_norm": 0.11646121723948255, + "learning_rate": 0.0008142165773492185, + "loss": 1.6448, + "step": 3364 + }, + { + "epoch": 0.3052984939212484, + "grad_norm": 0.11285909797120064, + "learning_rate": 0.0008141022746370883, + "loss": 1.6439, + "step": 3365 + }, + { + "epoch": 0.30538922155688625, + "grad_norm": 0.11464639869845139, + "learning_rate": 0.0008139879448021805, + "loss": 1.657, + "step": 3366 + }, + { + "epoch": 0.305479949192524, + "grad_norm": 0.1134105822913286, + "learning_rate": 0.0008138735878543672, + "loss": 1.631, + "step": 3367 + }, + { + "epoch": 0.30557067682816186, + "grad_norm": 0.11612161625775998, + "learning_rate": 0.0008137592038035233, + "loss": 1.6507, + "step": 3368 + }, + { + "epoch": 0.3056614044637997, + "grad_norm": 0.11647514157342331, + "learning_rate": 0.0008136447926595261, + "loss": 1.6633, + "step": 3369 + }, + { + "epoch": 0.30575213209943747, + "grad_norm": 0.11850292773229931, + "learning_rate": 0.0008135303544322547, + "loss": 1.6476, + "step": 3370 + }, + { + "epoch": 0.3058428597350753, + "grad_norm": 0.11246270788396524, + "learning_rate": 0.000813415889131591, + "loss": 1.6751, + "step": 3371 + }, + { + "epoch": 0.30593358737071313, + "grad_norm": 0.1167145171069806, + "learning_rate": 0.0008133013967674193, + "loss": 1.6122, + "step": 3372 + }, + { + "epoch": 0.3060243150063509, + "grad_norm": 0.11474217382003667, + "learning_rate": 0.0008131868773496254, + "loss": 1.6289, + "step": 3373 + }, + { + "epoch": 0.30611504264198874, + "grad_norm": 0.11618320002807905, + "learning_rate": 0.0008130723308880987, + "loss": 1.6603, + "step": 3374 + }, + { + "epoch": 0.3062057702776266, + "grad_norm": 0.11321521194404051, + "learning_rate": 0.00081295775739273, + "loss": 1.6019, + "step": 3375 + }, + { + "epoch": 0.30629649791326435, + "grad_norm": 0.11433018735694825, + "learning_rate": 0.0008128431568734131, + "loss": 1.6701, + "step": 3376 + }, + { + "epoch": 0.3063872255489022, + "grad_norm": 0.1150326248717124, + "learning_rate": 0.0008127285293400432, + "loss": 1.6662, + "step": 3377 + }, + { + "epoch": 0.30647795318454, + "grad_norm": 0.11001607274543697, + "learning_rate": 0.000812613874802519, + "loss": 1.6385, + "step": 3378 + }, + { + "epoch": 0.30656868082017785, + "grad_norm": 0.1062213459487622, + "learning_rate": 0.0008124991932707402, + "loss": 1.6411, + "step": 3379 + }, + { + "epoch": 0.30665940845581563, + "grad_norm": 0.11654298165260302, + "learning_rate": 0.0008123844847546102, + "loss": 1.6669, + "step": 3380 + }, + { + "epoch": 0.30675013609145346, + "grad_norm": 0.11448577708693818, + "learning_rate": 0.0008122697492640341, + "loss": 1.6584, + "step": 3381 + }, + { + "epoch": 0.3068408637270913, + "grad_norm": 0.11699026844726554, + "learning_rate": 0.0008121549868089189, + "loss": 1.6543, + "step": 3382 + }, + { + "epoch": 0.3069315913627291, + "grad_norm": 0.11222864366447895, + "learning_rate": 0.0008120401973991747, + "loss": 1.6556, + "step": 3383 + }, + { + "epoch": 0.3070223189983669, + "grad_norm": 0.1090582196131501, + "learning_rate": 0.0008119253810447134, + "loss": 1.5781, + "step": 3384 + }, + { + "epoch": 0.30711304663400474, + "grad_norm": 0.11639946662084129, + "learning_rate": 0.0008118105377554495, + "loss": 1.6217, + "step": 3385 + }, + { + "epoch": 0.3072037742696425, + "grad_norm": 0.11234229975480522, + "learning_rate": 0.0008116956675412997, + "loss": 1.6174, + "step": 3386 + }, + { + "epoch": 0.30729450190528035, + "grad_norm": 0.11126176137873481, + "learning_rate": 0.0008115807704121831, + "loss": 1.6266, + "step": 3387 + }, + { + "epoch": 0.3073852295409182, + "grad_norm": 0.11120266046453907, + "learning_rate": 0.0008114658463780209, + "loss": 1.6273, + "step": 3388 + }, + { + "epoch": 0.30747595717655596, + "grad_norm": 0.11320855446751499, + "learning_rate": 0.0008113508954487371, + "loss": 1.6181, + "step": 3389 + }, + { + "epoch": 0.3075666848121938, + "grad_norm": 0.11806188444198279, + "learning_rate": 0.0008112359176342576, + "loss": 1.6182, + "step": 3390 + }, + { + "epoch": 0.3076574124478316, + "grad_norm": 0.11108796055288109, + "learning_rate": 0.0008111209129445107, + "loss": 1.6302, + "step": 3391 + }, + { + "epoch": 0.3077481400834694, + "grad_norm": 0.11018160573614116, + "learning_rate": 0.0008110058813894272, + "loss": 1.6299, + "step": 3392 + }, + { + "epoch": 0.30783886771910723, + "grad_norm": 0.11085630773237745, + "learning_rate": 0.0008108908229789399, + "loss": 1.5869, + "step": 3393 + }, + { + "epoch": 0.30792959535474507, + "grad_norm": 0.10926652838626807, + "learning_rate": 0.0008107757377229842, + "loss": 1.6644, + "step": 3394 + }, + { + "epoch": 0.30802032299038284, + "grad_norm": 0.11429483231852579, + "learning_rate": 0.0008106606256314978, + "loss": 1.686, + "step": 3395 + }, + { + "epoch": 0.3081110506260207, + "grad_norm": 0.11083270209256277, + "learning_rate": 0.0008105454867144206, + "loss": 1.6106, + "step": 3396 + }, + { + "epoch": 0.3082017782616585, + "grad_norm": 0.1145255175402067, + "learning_rate": 0.0008104303209816948, + "loss": 1.6723, + "step": 3397 + }, + { + "epoch": 0.30829250589729634, + "grad_norm": 0.11421185555894822, + "learning_rate": 0.0008103151284432651, + "loss": 1.63, + "step": 3398 + }, + { + "epoch": 0.3083832335329341, + "grad_norm": 0.1146413860325581, + "learning_rate": 0.0008101999091090781, + "loss": 1.6205, + "step": 3399 + }, + { + "epoch": 0.30847396116857195, + "grad_norm": 0.10741082010352955, + "learning_rate": 0.0008100846629890834, + "loss": 1.6514, + "step": 3400 + }, + { + "epoch": 0.3085646888042098, + "grad_norm": 0.11485688477272517, + "learning_rate": 0.0008099693900932326, + "loss": 1.646, + "step": 3401 + }, + { + "epoch": 0.30865541643984756, + "grad_norm": 0.11265535347469624, + "learning_rate": 0.0008098540904314789, + "loss": 1.6387, + "step": 3402 + }, + { + "epoch": 0.3087461440754854, + "grad_norm": 0.11159963224513018, + "learning_rate": 0.000809738764013779, + "loss": 1.6424, + "step": 3403 + }, + { + "epoch": 0.30883687171112323, + "grad_norm": 0.11071983969498515, + "learning_rate": 0.0008096234108500911, + "loss": 1.6185, + "step": 3404 + }, + { + "epoch": 0.308927599346761, + "grad_norm": 0.10853176831489872, + "learning_rate": 0.0008095080309503762, + "loss": 1.6278, + "step": 3405 + }, + { + "epoch": 0.30901832698239884, + "grad_norm": 0.11124043588746516, + "learning_rate": 0.000809392624324597, + "loss": 1.6162, + "step": 3406 + }, + { + "epoch": 0.30910905461803667, + "grad_norm": 0.11500963894698742, + "learning_rate": 0.0008092771909827193, + "loss": 1.6609, + "step": 3407 + }, + { + "epoch": 0.30919978225367445, + "grad_norm": 0.11231687420049281, + "learning_rate": 0.0008091617309347103, + "loss": 1.6399, + "step": 3408 + }, + { + "epoch": 0.3092905098893123, + "grad_norm": 0.11567596492879192, + "learning_rate": 0.0008090462441905405, + "loss": 1.6231, + "step": 3409 + }, + { + "epoch": 0.3093812375249501, + "grad_norm": 0.11147962535719333, + "learning_rate": 0.0008089307307601819, + "loss": 1.6548, + "step": 3410 + }, + { + "epoch": 0.3094719651605879, + "grad_norm": 0.11295914263098546, + "learning_rate": 0.0008088151906536092, + "loss": 1.6738, + "step": 3411 + }, + { + "epoch": 0.3095626927962257, + "grad_norm": 0.11596055929155678, + "learning_rate": 0.0008086996238807991, + "loss": 1.6338, + "step": 3412 + }, + { + "epoch": 0.30965342043186356, + "grad_norm": 0.11257798599566543, + "learning_rate": 0.0008085840304517311, + "loss": 1.6602, + "step": 3413 + }, + { + "epoch": 0.30974414806750133, + "grad_norm": 0.11544769746601494, + "learning_rate": 0.0008084684103763866, + "loss": 1.6074, + "step": 3414 + }, + { + "epoch": 0.30983487570313917, + "grad_norm": 0.1153566085581579, + "learning_rate": 0.0008083527636647494, + "loss": 1.6423, + "step": 3415 + }, + { + "epoch": 0.309925603338777, + "grad_norm": 0.1124980929201609, + "learning_rate": 0.0008082370903268057, + "loss": 1.6198, + "step": 3416 + }, + { + "epoch": 0.31001633097441483, + "grad_norm": 0.11377966540118707, + "learning_rate": 0.0008081213903725437, + "loss": 1.6379, + "step": 3417 + }, + { + "epoch": 0.3101070586100526, + "grad_norm": 0.10958590766971779, + "learning_rate": 0.0008080056638119542, + "loss": 1.7072, + "step": 3418 + }, + { + "epoch": 0.31019778624569044, + "grad_norm": 0.11205003140027966, + "learning_rate": 0.0008078899106550303, + "loss": 1.6481, + "step": 3419 + }, + { + "epoch": 0.3102885138813283, + "grad_norm": 0.11212533087755677, + "learning_rate": 0.0008077741309117674, + "loss": 1.6604, + "step": 3420 + }, + { + "epoch": 0.31037924151696605, + "grad_norm": 0.10966147390822578, + "learning_rate": 0.0008076583245921627, + "loss": 1.6558, + "step": 3421 + }, + { + "epoch": 0.3104699691526039, + "grad_norm": 0.10985829553714652, + "learning_rate": 0.0008075424917062164, + "loss": 1.6063, + "step": 3422 + }, + { + "epoch": 0.3105606967882417, + "grad_norm": 0.11170542373206539, + "learning_rate": 0.0008074266322639305, + "loss": 1.6522, + "step": 3423 + }, + { + "epoch": 0.3106514244238795, + "grad_norm": 0.11136527642627389, + "learning_rate": 0.0008073107462753098, + "loss": 1.6363, + "step": 3424 + }, + { + "epoch": 0.31074215205951733, + "grad_norm": 0.1133250133375053, + "learning_rate": 0.0008071948337503608, + "loss": 1.6867, + "step": 3425 + }, + { + "epoch": 0.31083287969515516, + "grad_norm": 0.11194748621283061, + "learning_rate": 0.0008070788946990926, + "loss": 1.6611, + "step": 3426 + }, + { + "epoch": 0.31092360733079294, + "grad_norm": 0.11382613526409269, + "learning_rate": 0.0008069629291315166, + "loss": 1.6583, + "step": 3427 + }, + { + "epoch": 0.31101433496643077, + "grad_norm": 0.11545408762569782, + "learning_rate": 0.0008068469370576464, + "loss": 1.6176, + "step": 3428 + }, + { + "epoch": 0.3111050626020686, + "grad_norm": 0.11242585834221165, + "learning_rate": 0.0008067309184874979, + "loss": 1.6481, + "step": 3429 + }, + { + "epoch": 0.3111957902377064, + "grad_norm": 0.1053374777480855, + "learning_rate": 0.0008066148734310894, + "loss": 1.6622, + "step": 3430 + }, + { + "epoch": 0.3112865178733442, + "grad_norm": 0.11202140038833718, + "learning_rate": 0.0008064988018984415, + "loss": 1.6191, + "step": 3431 + }, + { + "epoch": 0.31137724550898205, + "grad_norm": 0.11017060153576838, + "learning_rate": 0.0008063827038995768, + "loss": 1.6777, + "step": 3432 + }, + { + "epoch": 0.3114679731446198, + "grad_norm": 0.11114218059828809, + "learning_rate": 0.0008062665794445205, + "loss": 1.6864, + "step": 3433 + }, + { + "epoch": 0.31155870078025766, + "grad_norm": 0.11047633606182469, + "learning_rate": 0.0008061504285432999, + "loss": 1.6473, + "step": 3434 + }, + { + "epoch": 0.3116494284158955, + "grad_norm": 0.11847458196334067, + "learning_rate": 0.0008060342512059447, + "loss": 1.6787, + "step": 3435 + }, + { + "epoch": 0.3117401560515333, + "grad_norm": 0.11710740422851165, + "learning_rate": 0.0008059180474424868, + "loss": 1.5827, + "step": 3436 + }, + { + "epoch": 0.3118308836871711, + "grad_norm": 0.11189297744005756, + "learning_rate": 0.0008058018172629604, + "loss": 1.591, + "step": 3437 + }, + { + "epoch": 0.31192161132280893, + "grad_norm": 0.10909808419088964, + "learning_rate": 0.0008056855606774021, + "loss": 1.6111, + "step": 3438 + }, + { + "epoch": 0.31201233895844677, + "grad_norm": 0.1148290053314652, + "learning_rate": 0.0008055692776958503, + "loss": 1.5835, + "step": 3439 + }, + { + "epoch": 0.31210306659408454, + "grad_norm": 0.11053859948309365, + "learning_rate": 0.0008054529683283467, + "loss": 1.6541, + "step": 3440 + }, + { + "epoch": 0.3121937942297224, + "grad_norm": 0.10973388671960722, + "learning_rate": 0.0008053366325849339, + "loss": 1.6325, + "step": 3441 + }, + { + "epoch": 0.3122845218653602, + "grad_norm": 0.11068686720743053, + "learning_rate": 0.0008052202704756582, + "loss": 1.6523, + "step": 3442 + }, + { + "epoch": 0.312375249500998, + "grad_norm": 0.11795053830666774, + "learning_rate": 0.0008051038820105671, + "loss": 1.662, + "step": 3443 + }, + { + "epoch": 0.3124659771366358, + "grad_norm": 0.10972391319389385, + "learning_rate": 0.0008049874671997106, + "loss": 1.6785, + "step": 3444 + }, + { + "epoch": 0.31255670477227365, + "grad_norm": 0.10872636556744963, + "learning_rate": 0.0008048710260531416, + "loss": 1.6294, + "step": 3445 + }, + { + "epoch": 0.31264743240791143, + "grad_norm": 0.11723383445519035, + "learning_rate": 0.0008047545585809144, + "loss": 1.6145, + "step": 3446 + }, + { + "epoch": 0.31273816004354926, + "grad_norm": 0.11699108155872455, + "learning_rate": 0.0008046380647930863, + "loss": 1.6571, + "step": 3447 + }, + { + "epoch": 0.3128288876791871, + "grad_norm": 0.11230775817541414, + "learning_rate": 0.0008045215446997163, + "loss": 1.6717, + "step": 3448 + }, + { + "epoch": 0.3129196153148249, + "grad_norm": 0.11552954601667954, + "learning_rate": 0.0008044049983108661, + "loss": 1.6429, + "step": 3449 + }, + { + "epoch": 0.3130103429504627, + "grad_norm": 0.10882775307804092, + "learning_rate": 0.0008042884256365994, + "loss": 1.6468, + "step": 3450 + }, + { + "epoch": 0.31310107058610054, + "grad_norm": 0.11395968732881503, + "learning_rate": 0.0008041718266869822, + "loss": 1.6442, + "step": 3451 + }, + { + "epoch": 0.3131917982217383, + "grad_norm": 0.1093066600221801, + "learning_rate": 0.0008040552014720831, + "loss": 1.5988, + "step": 3452 + }, + { + "epoch": 0.31328252585737615, + "grad_norm": 0.11456861870777543, + "learning_rate": 0.0008039385500019725, + "loss": 1.6854, + "step": 3453 + }, + { + "epoch": 0.313373253493014, + "grad_norm": 0.1090262170490689, + "learning_rate": 0.0008038218722867232, + "loss": 1.6408, + "step": 3454 + }, + { + "epoch": 0.3134639811286518, + "grad_norm": 0.11221479223212036, + "learning_rate": 0.0008037051683364106, + "loss": 1.6685, + "step": 3455 + }, + { + "epoch": 0.3135547087642896, + "grad_norm": 0.11082335057169995, + "learning_rate": 0.0008035884381611118, + "loss": 1.6261, + "step": 3456 + }, + { + "epoch": 0.3136454363999274, + "grad_norm": 0.1111635663384375, + "learning_rate": 0.0008034716817709067, + "loss": 1.6482, + "step": 3457 + }, + { + "epoch": 0.31373616403556526, + "grad_norm": 0.11068766886796468, + "learning_rate": 0.0008033548991758772, + "loss": 1.6028, + "step": 3458 + }, + { + "epoch": 0.31382689167120303, + "grad_norm": 0.11235585504666389, + "learning_rate": 0.0008032380903861074, + "loss": 1.6627, + "step": 3459 + }, + { + "epoch": 0.31391761930684087, + "grad_norm": 0.1106892761466869, + "learning_rate": 0.0008031212554116838, + "loss": 1.6355, + "step": 3460 + }, + { + "epoch": 0.3140083469424787, + "grad_norm": 0.11075454082982095, + "learning_rate": 0.0008030043942626951, + "loss": 1.6251, + "step": 3461 + }, + { + "epoch": 0.3140990745781165, + "grad_norm": 0.10963887268581722, + "learning_rate": 0.0008028875069492323, + "loss": 1.6451, + "step": 3462 + }, + { + "epoch": 0.3141898022137543, + "grad_norm": 0.11494931086735216, + "learning_rate": 0.0008027705934813887, + "loss": 1.6126, + "step": 3463 + }, + { + "epoch": 0.31428052984939214, + "grad_norm": 0.11154691436522135, + "learning_rate": 0.0008026536538692596, + "loss": 1.631, + "step": 3464 + }, + { + "epoch": 0.3143712574850299, + "grad_norm": 0.1130520314132625, + "learning_rate": 0.000802536688122943, + "loss": 1.6245, + "step": 3465 + }, + { + "epoch": 0.31446198512066775, + "grad_norm": 0.10793508870319199, + "learning_rate": 0.0008024196962525388, + "loss": 1.6421, + "step": 3466 + }, + { + "epoch": 0.3145527127563056, + "grad_norm": 0.11124775457424153, + "learning_rate": 0.0008023026782681492, + "loss": 1.6206, + "step": 3467 + }, + { + "epoch": 0.31464344039194336, + "grad_norm": 0.1105775550913933, + "learning_rate": 0.0008021856341798788, + "loss": 1.6475, + "step": 3468 + }, + { + "epoch": 0.3147341680275812, + "grad_norm": 0.11184095728613112, + "learning_rate": 0.0008020685639978342, + "loss": 1.6478, + "step": 3469 + }, + { + "epoch": 0.31482489566321903, + "grad_norm": 0.11308041732192302, + "learning_rate": 0.0008019514677321249, + "loss": 1.6021, + "step": 3470 + }, + { + "epoch": 0.3149156232988568, + "grad_norm": 0.11404927459084102, + "learning_rate": 0.0008018343453928616, + "loss": 1.6901, + "step": 3471 + }, + { + "epoch": 0.31500635093449464, + "grad_norm": 0.11628219117130287, + "learning_rate": 0.0008017171969901582, + "loss": 1.6391, + "step": 3472 + }, + { + "epoch": 0.31509707857013247, + "grad_norm": 0.11230156791145811, + "learning_rate": 0.0008016000225341302, + "loss": 1.6243, + "step": 3473 + }, + { + "epoch": 0.3151878062057703, + "grad_norm": 0.11986231903737642, + "learning_rate": 0.0008014828220348959, + "loss": 1.6261, + "step": 3474 + }, + { + "epoch": 0.3152785338414081, + "grad_norm": 0.11203659943702549, + "learning_rate": 0.0008013655955025757, + "loss": 1.6232, + "step": 3475 + }, + { + "epoch": 0.3153692614770459, + "grad_norm": 0.113113520058777, + "learning_rate": 0.0008012483429472916, + "loss": 1.6581, + "step": 3476 + }, + { + "epoch": 0.31545998911268375, + "grad_norm": 0.11402125586929152, + "learning_rate": 0.0008011310643791689, + "loss": 1.6406, + "step": 3477 + }, + { + "epoch": 0.3155507167483215, + "grad_norm": 0.11718287790963085, + "learning_rate": 0.0008010137598083344, + "loss": 1.6337, + "step": 3478 + }, + { + "epoch": 0.31564144438395936, + "grad_norm": 0.10518750321107859, + "learning_rate": 0.0008008964292449172, + "loss": 1.6344, + "step": 3479 + }, + { + "epoch": 0.3157321720195972, + "grad_norm": 0.10943199542092955, + "learning_rate": 0.0008007790726990492, + "loss": 1.6485, + "step": 3480 + }, + { + "epoch": 0.31582289965523497, + "grad_norm": 0.1146859241628759, + "learning_rate": 0.0008006616901808638, + "loss": 1.6483, + "step": 3481 + }, + { + "epoch": 0.3159136272908728, + "grad_norm": 0.11016723120492317, + "learning_rate": 0.0008005442817004972, + "loss": 1.7007, + "step": 3482 + }, + { + "epoch": 0.31600435492651063, + "grad_norm": 0.10961727573834137, + "learning_rate": 0.0008004268472680875, + "loss": 1.623, + "step": 3483 + }, + { + "epoch": 0.3160950825621484, + "grad_norm": 0.11362487828981382, + "learning_rate": 0.0008003093868937754, + "loss": 1.6257, + "step": 3484 + }, + { + "epoch": 0.31618581019778624, + "grad_norm": 0.11513255512930382, + "learning_rate": 0.0008001919005877033, + "loss": 1.6573, + "step": 3485 + }, + { + "epoch": 0.3162765378334241, + "grad_norm": 0.11249341214959258, + "learning_rate": 0.0008000743883600166, + "loss": 1.6173, + "step": 3486 + }, + { + "epoch": 0.31636726546906185, + "grad_norm": 0.10920440809978825, + "learning_rate": 0.000799956850220862, + "loss": 1.6164, + "step": 3487 + }, + { + "epoch": 0.3164579931046997, + "grad_norm": 0.1131326135200938, + "learning_rate": 0.0007998392861803892, + "loss": 1.6817, + "step": 3488 + }, + { + "epoch": 0.3165487207403375, + "grad_norm": 0.10850727405566679, + "learning_rate": 0.0007997216962487499, + "loss": 1.64, + "step": 3489 + }, + { + "epoch": 0.3166394483759753, + "grad_norm": 0.11470638371391349, + "learning_rate": 0.000799604080436098, + "loss": 1.6039, + "step": 3490 + }, + { + "epoch": 0.31673017601161313, + "grad_norm": 0.11641902378382268, + "learning_rate": 0.0007994864387525896, + "loss": 1.6129, + "step": 3491 + }, + { + "epoch": 0.31682090364725096, + "grad_norm": 0.11549110481938686, + "learning_rate": 0.0007993687712083828, + "loss": 1.6435, + "step": 3492 + }, + { + "epoch": 0.3169116312828888, + "grad_norm": 0.11410479226596366, + "learning_rate": 0.0007992510778136388, + "loss": 1.6309, + "step": 3493 + }, + { + "epoch": 0.31700235891852657, + "grad_norm": 0.1137271703945991, + "learning_rate": 0.0007991333585785201, + "loss": 1.6185, + "step": 3494 + }, + { + "epoch": 0.3170930865541644, + "grad_norm": 0.1109395628491063, + "learning_rate": 0.0007990156135131917, + "loss": 1.6669, + "step": 3495 + }, + { + "epoch": 0.31718381418980224, + "grad_norm": 0.11081468742068247, + "learning_rate": 0.0007988978426278209, + "loss": 1.6549, + "step": 3496 + }, + { + "epoch": 0.31727454182544, + "grad_norm": 0.11451139974272723, + "learning_rate": 0.0007987800459325775, + "loss": 1.6397, + "step": 3497 + }, + { + "epoch": 0.31736526946107785, + "grad_norm": 0.11391151248303909, + "learning_rate": 0.0007986622234376332, + "loss": 1.6398, + "step": 3498 + }, + { + "epoch": 0.3174559970967157, + "grad_norm": 0.11296976428212288, + "learning_rate": 0.0007985443751531616, + "loss": 1.664, + "step": 3499 + }, + { + "epoch": 0.31754672473235346, + "grad_norm": 0.1108425266771971, + "learning_rate": 0.0007984265010893395, + "loss": 1.6639, + "step": 3500 + }, + { + "epoch": 0.3176374523679913, + "grad_norm": 0.10859794584637186, + "learning_rate": 0.0007983086012563449, + "loss": 1.6209, + "step": 3501 + }, + { + "epoch": 0.3177281800036291, + "grad_norm": 0.11015441448597699, + "learning_rate": 0.0007981906756643586, + "loss": 1.6419, + "step": 3502 + }, + { + "epoch": 0.3178189076392669, + "grad_norm": 0.11489362034978577, + "learning_rate": 0.0007980727243235635, + "loss": 1.6288, + "step": 3503 + }, + { + "epoch": 0.31790963527490473, + "grad_norm": 0.11078251552731519, + "learning_rate": 0.0007979547472441447, + "loss": 1.6573, + "step": 3504 + }, + { + "epoch": 0.31800036291054257, + "grad_norm": 0.11179196969302363, + "learning_rate": 0.0007978367444362897, + "loss": 1.6912, + "step": 3505 + }, + { + "epoch": 0.31809109054618034, + "grad_norm": 0.10891903361836743, + "learning_rate": 0.0007977187159101878, + "loss": 1.6599, + "step": 3506 + }, + { + "epoch": 0.3181818181818182, + "grad_norm": 0.11021090184472525, + "learning_rate": 0.000797600661676031, + "loss": 1.6081, + "step": 3507 + }, + { + "epoch": 0.318272545817456, + "grad_norm": 0.11394207100812391, + "learning_rate": 0.000797482581744013, + "loss": 1.6437, + "step": 3508 + }, + { + "epoch": 0.3183632734530938, + "grad_norm": 0.11307531109433692, + "learning_rate": 0.0007973644761243303, + "loss": 1.6261, + "step": 3509 + }, + { + "epoch": 0.3184540010887316, + "grad_norm": 0.1113516270651849, + "learning_rate": 0.0007972463448271815, + "loss": 1.6223, + "step": 3510 + }, + { + "epoch": 0.31854472872436945, + "grad_norm": 0.11204791812187272, + "learning_rate": 0.0007971281878627667, + "loss": 1.6389, + "step": 3511 + }, + { + "epoch": 0.3186354563600073, + "grad_norm": 0.11281144933940612, + "learning_rate": 0.0007970100052412893, + "loss": 1.6259, + "step": 3512 + }, + { + "epoch": 0.31872618399564506, + "grad_norm": 0.11738031376404473, + "learning_rate": 0.0007968917969729541, + "loss": 1.6431, + "step": 3513 + }, + { + "epoch": 0.3188169116312829, + "grad_norm": 0.11918436362860102, + "learning_rate": 0.0007967735630679684, + "loss": 1.635, + "step": 3514 + }, + { + "epoch": 0.31890763926692073, + "grad_norm": 0.11477878822784414, + "learning_rate": 0.0007966553035365419, + "loss": 1.6454, + "step": 3515 + }, + { + "epoch": 0.3189983669025585, + "grad_norm": 0.11158721479123675, + "learning_rate": 0.0007965370183888863, + "loss": 1.6276, + "step": 3516 + }, + { + "epoch": 0.31908909453819634, + "grad_norm": 0.10961796679096211, + "learning_rate": 0.0007964187076352152, + "loss": 1.6693, + "step": 3517 + }, + { + "epoch": 0.31917982217383417, + "grad_norm": 0.11104430118607592, + "learning_rate": 0.0007963003712857453, + "loss": 1.6467, + "step": 3518 + }, + { + "epoch": 0.31927054980947195, + "grad_norm": 0.1034377747780095, + "learning_rate": 0.0007961820093506944, + "loss": 1.6376, + "step": 3519 + }, + { + "epoch": 0.3193612774451098, + "grad_norm": 0.10552134788255947, + "learning_rate": 0.0007960636218402834, + "loss": 1.6705, + "step": 3520 + }, + { + "epoch": 0.3194520050807476, + "grad_norm": 0.10912004695578968, + "learning_rate": 0.0007959452087647352, + "loss": 1.6802, + "step": 3521 + }, + { + "epoch": 0.3195427327163854, + "grad_norm": 0.11529693647431083, + "learning_rate": 0.0007958267701342744, + "loss": 1.6033, + "step": 3522 + }, + { + "epoch": 0.3196334603520232, + "grad_norm": 0.11302273205919737, + "learning_rate": 0.0007957083059591285, + "loss": 1.5779, + "step": 3523 + }, + { + "epoch": 0.31972418798766106, + "grad_norm": 0.10686432566540514, + "learning_rate": 0.0007955898162495267, + "loss": 1.6164, + "step": 3524 + }, + { + "epoch": 0.31981491562329883, + "grad_norm": 0.11586524832728791, + "learning_rate": 0.0007954713010157008, + "loss": 1.6372, + "step": 3525 + }, + { + "epoch": 0.31990564325893667, + "grad_norm": 0.1124944179701029, + "learning_rate": 0.0007953527602678845, + "loss": 1.6679, + "step": 3526 + }, + { + "epoch": 0.3199963708945745, + "grad_norm": 0.10978619948497176, + "learning_rate": 0.0007952341940163137, + "loss": 1.6323, + "step": 3527 + }, + { + "epoch": 0.3200870985302123, + "grad_norm": 0.11170548987334299, + "learning_rate": 0.0007951156022712269, + "loss": 1.6887, + "step": 3528 + }, + { + "epoch": 0.3201778261658501, + "grad_norm": 0.11390886387493251, + "learning_rate": 0.0007949969850428642, + "loss": 1.6189, + "step": 3529 + }, + { + "epoch": 0.32026855380148794, + "grad_norm": 0.11116278628873806, + "learning_rate": 0.0007948783423414685, + "loss": 1.6087, + "step": 3530 + }, + { + "epoch": 0.3203592814371258, + "grad_norm": 0.10963016722321789, + "learning_rate": 0.0007947596741772844, + "loss": 1.602, + "step": 3531 + }, + { + "epoch": 0.32045000907276355, + "grad_norm": 0.10921643872606304, + "learning_rate": 0.000794640980560559, + "loss": 1.6291, + "step": 3532 + }, + { + "epoch": 0.3205407367084014, + "grad_norm": 0.10670692830520209, + "learning_rate": 0.0007945222615015416, + "loss": 1.6438, + "step": 3533 + }, + { + "epoch": 0.3206314643440392, + "grad_norm": 0.10577922965814407, + "learning_rate": 0.0007944035170104835, + "loss": 1.6123, + "step": 3534 + }, + { + "epoch": 0.320722191979677, + "grad_norm": 0.11356046315559239, + "learning_rate": 0.0007942847470976382, + "loss": 1.614, + "step": 3535 + }, + { + "epoch": 0.32081291961531483, + "grad_norm": 0.11287407537120674, + "learning_rate": 0.0007941659517732615, + "loss": 1.6567, + "step": 3536 + }, + { + "epoch": 0.32090364725095266, + "grad_norm": 0.11029876792757068, + "learning_rate": 0.0007940471310476119, + "loss": 1.6336, + "step": 3537 + }, + { + "epoch": 0.32099437488659044, + "grad_norm": 0.11937634038904824, + "learning_rate": 0.0007939282849309488, + "loss": 1.6309, + "step": 3538 + }, + { + "epoch": 0.32108510252222827, + "grad_norm": 0.11210017614093512, + "learning_rate": 0.0007938094134335352, + "loss": 1.6168, + "step": 3539 + }, + { + "epoch": 0.3211758301578661, + "grad_norm": 0.10959397473924432, + "learning_rate": 0.0007936905165656354, + "loss": 1.6212, + "step": 3540 + }, + { + "epoch": 0.3212665577935039, + "grad_norm": 0.11079390493294182, + "learning_rate": 0.0007935715943375161, + "loss": 1.6921, + "step": 3541 + }, + { + "epoch": 0.3213572854291417, + "grad_norm": 0.11165819364569783, + "learning_rate": 0.0007934526467594465, + "loss": 1.6074, + "step": 3542 + }, + { + "epoch": 0.32144801306477955, + "grad_norm": 0.11209010186535681, + "learning_rate": 0.0007933336738416976, + "loss": 1.6198, + "step": 3543 + }, + { + "epoch": 0.3215387407004173, + "grad_norm": 0.11157720387927886, + "learning_rate": 0.0007932146755945426, + "loss": 1.6161, + "step": 3544 + }, + { + "epoch": 0.32162946833605516, + "grad_norm": 0.1146721738465521, + "learning_rate": 0.0007930956520282573, + "loss": 1.6337, + "step": 3545 + }, + { + "epoch": 0.321720195971693, + "grad_norm": 0.11065995863001533, + "learning_rate": 0.0007929766031531192, + "loss": 1.6281, + "step": 3546 + }, + { + "epoch": 0.32181092360733077, + "grad_norm": 0.11474876276368164, + "learning_rate": 0.0007928575289794082, + "loss": 1.6258, + "step": 3547 + }, + { + "epoch": 0.3219016512429686, + "grad_norm": 0.10814356914017245, + "learning_rate": 0.0007927384295174065, + "loss": 1.6433, + "step": 3548 + }, + { + "epoch": 0.32199237887860643, + "grad_norm": 0.10797439324937706, + "learning_rate": 0.0007926193047773981, + "loss": 1.6542, + "step": 3549 + }, + { + "epoch": 0.32208310651424427, + "grad_norm": 0.11380246141293127, + "learning_rate": 0.0007925001547696698, + "loss": 1.6482, + "step": 3550 + }, + { + "epoch": 0.32217383414988204, + "grad_norm": 0.10879484813573272, + "learning_rate": 0.0007923809795045098, + "loss": 1.6213, + "step": 3551 + }, + { + "epoch": 0.3222645617855199, + "grad_norm": 0.11180842048408994, + "learning_rate": 0.0007922617789922093, + "loss": 1.6284, + "step": 3552 + }, + { + "epoch": 0.3223552894211577, + "grad_norm": 0.11143555487116864, + "learning_rate": 0.0007921425532430612, + "loss": 1.6455, + "step": 3553 + }, + { + "epoch": 0.3224460170567955, + "grad_norm": 0.11367244875155325, + "learning_rate": 0.0007920233022673604, + "loss": 1.6217, + "step": 3554 + }, + { + "epoch": 0.3225367446924333, + "grad_norm": 0.10887327526643684, + "learning_rate": 0.0007919040260754045, + "loss": 1.6803, + "step": 3555 + }, + { + "epoch": 0.32262747232807115, + "grad_norm": 0.11369416579460478, + "learning_rate": 0.0007917847246774927, + "loss": 1.6477, + "step": 3556 + }, + { + "epoch": 0.32271819996370893, + "grad_norm": 0.11498633511759244, + "learning_rate": 0.0007916653980839272, + "loss": 1.6086, + "step": 3557 + }, + { + "epoch": 0.32280892759934676, + "grad_norm": 0.11162482116500132, + "learning_rate": 0.0007915460463050114, + "loss": 1.6417, + "step": 3558 + }, + { + "epoch": 0.3228996552349846, + "grad_norm": 0.1122907952132257, + "learning_rate": 0.0007914266693510517, + "loss": 1.6174, + "step": 3559 + }, + { + "epoch": 0.3229903828706224, + "grad_norm": 0.11243878546951214, + "learning_rate": 0.000791307267232356, + "loss": 1.6274, + "step": 3560 + }, + { + "epoch": 0.3230811105062602, + "grad_norm": 0.10949495799851634, + "learning_rate": 0.0007911878399592349, + "loss": 1.6281, + "step": 3561 + }, + { + "epoch": 0.32317183814189804, + "grad_norm": 0.11913883284889955, + "learning_rate": 0.0007910683875420009, + "loss": 1.6313, + "step": 3562 + }, + { + "epoch": 0.3232625657775358, + "grad_norm": 0.11346315589987338, + "learning_rate": 0.0007909489099909688, + "loss": 1.5921, + "step": 3563 + }, + { + "epoch": 0.32335329341317365, + "grad_norm": 0.11440495066222316, + "learning_rate": 0.0007908294073164554, + "loss": 1.637, + "step": 3564 + }, + { + "epoch": 0.3234440210488115, + "grad_norm": 0.10984048986914174, + "learning_rate": 0.0007907098795287798, + "loss": 1.6591, + "step": 3565 + }, + { + "epoch": 0.32353474868444926, + "grad_norm": 0.10772476413916328, + "learning_rate": 0.0007905903266382633, + "loss": 1.6496, + "step": 3566 + }, + { + "epoch": 0.3236254763200871, + "grad_norm": 0.1115282465151772, + "learning_rate": 0.0007904707486552292, + "loss": 1.6423, + "step": 3567 + }, + { + "epoch": 0.3237162039557249, + "grad_norm": 0.1080630052391363, + "learning_rate": 0.0007903511455900031, + "loss": 1.6425, + "step": 3568 + }, + { + "epoch": 0.32380693159136276, + "grad_norm": 0.11106757990659749, + "learning_rate": 0.0007902315174529128, + "loss": 1.6103, + "step": 3569 + }, + { + "epoch": 0.32389765922700053, + "grad_norm": 0.1124773597866333, + "learning_rate": 0.0007901118642542883, + "loss": 1.6395, + "step": 3570 + }, + { + "epoch": 0.32398838686263837, + "grad_norm": 0.11336166382160343, + "learning_rate": 0.0007899921860044614, + "loss": 1.617, + "step": 3571 + }, + { + "epoch": 0.3240791144982762, + "grad_norm": 0.11406015423560505, + "learning_rate": 0.0007898724827137667, + "loss": 1.6183, + "step": 3572 + }, + { + "epoch": 0.324169842133914, + "grad_norm": 0.11454861436569984, + "learning_rate": 0.0007897527543925402, + "loss": 1.6235, + "step": 3573 + }, + { + "epoch": 0.3242605697695518, + "grad_norm": 0.11490678755061173, + "learning_rate": 0.0007896330010511208, + "loss": 1.6823, + "step": 3574 + }, + { + "epoch": 0.32435129740518964, + "grad_norm": 0.11211300755002104, + "learning_rate": 0.0007895132226998491, + "loss": 1.6638, + "step": 3575 + }, + { + "epoch": 0.3244420250408274, + "grad_norm": 0.10969556316727976, + "learning_rate": 0.0007893934193490678, + "loss": 1.6416, + "step": 3576 + }, + { + "epoch": 0.32453275267646525, + "grad_norm": 0.10901489901138291, + "learning_rate": 0.0007892735910091221, + "loss": 1.6759, + "step": 3577 + }, + { + "epoch": 0.3246234803121031, + "grad_norm": 0.10952400827355266, + "learning_rate": 0.0007891537376903592, + "loss": 1.6081, + "step": 3578 + }, + { + "epoch": 0.32471420794774086, + "grad_norm": 0.11097162861059336, + "learning_rate": 0.0007890338594031286, + "loss": 1.6721, + "step": 3579 + }, + { + "epoch": 0.3248049355833787, + "grad_norm": 0.11858664303565593, + "learning_rate": 0.0007889139561577815, + "loss": 1.6679, + "step": 3580 + }, + { + "epoch": 0.32489566321901653, + "grad_norm": 0.1105588271089469, + "learning_rate": 0.0007887940279646717, + "loss": 1.6031, + "step": 3581 + }, + { + "epoch": 0.3249863908546543, + "grad_norm": 0.11836321455788386, + "learning_rate": 0.000788674074834155, + "loss": 1.6563, + "step": 3582 + }, + { + "epoch": 0.32507711849029214, + "grad_norm": 0.11100174488387309, + "learning_rate": 0.0007885540967765895, + "loss": 1.6348, + "step": 3583 + }, + { + "epoch": 0.32516784612592997, + "grad_norm": 0.11837191276722096, + "learning_rate": 0.0007884340938023351, + "loss": 1.6386, + "step": 3584 + }, + { + "epoch": 0.32525857376156775, + "grad_norm": 0.11418913551767, + "learning_rate": 0.0007883140659217543, + "loss": 1.6031, + "step": 3585 + }, + { + "epoch": 0.3253493013972056, + "grad_norm": 0.11406754430752268, + "learning_rate": 0.0007881940131452112, + "loss": 1.6378, + "step": 3586 + }, + { + "epoch": 0.3254400290328434, + "grad_norm": 0.11141322523123105, + "learning_rate": 0.0007880739354830729, + "loss": 1.6354, + "step": 3587 + }, + { + "epoch": 0.32553075666848125, + "grad_norm": 0.11319107206531179, + "learning_rate": 0.0007879538329457076, + "loss": 1.6253, + "step": 3588 + }, + { + "epoch": 0.325621484304119, + "grad_norm": 0.10744070073666531, + "learning_rate": 0.0007878337055434864, + "loss": 1.6371, + "step": 3589 + }, + { + "epoch": 0.32571221193975686, + "grad_norm": 0.1108058138729872, + "learning_rate": 0.0007877135532867823, + "loss": 1.6179, + "step": 3590 + }, + { + "epoch": 0.3258029395753947, + "grad_norm": 0.11077692613790768, + "learning_rate": 0.0007875933761859706, + "loss": 1.6273, + "step": 3591 + }, + { + "epoch": 0.32589366721103247, + "grad_norm": 0.11024199179878864, + "learning_rate": 0.0007874731742514284, + "loss": 1.6, + "step": 3592 + }, + { + "epoch": 0.3259843948466703, + "grad_norm": 0.11080451105061859, + "learning_rate": 0.0007873529474935353, + "loss": 1.6317, + "step": 3593 + }, + { + "epoch": 0.32607512248230813, + "grad_norm": 0.1167055839850932, + "learning_rate": 0.0007872326959226727, + "loss": 1.6375, + "step": 3594 + }, + { + "epoch": 0.3261658501179459, + "grad_norm": 0.10768242892182167, + "learning_rate": 0.0007871124195492245, + "loss": 1.5891, + "step": 3595 + }, + { + "epoch": 0.32625657775358374, + "grad_norm": 0.10947094416097143, + "learning_rate": 0.0007869921183835766, + "loss": 1.6431, + "step": 3596 + }, + { + "epoch": 0.3263473053892216, + "grad_norm": 0.1155386135415904, + "learning_rate": 0.0007868717924361168, + "loss": 1.5908, + "step": 3597 + }, + { + "epoch": 0.32643803302485935, + "grad_norm": 0.11007832844395464, + "learning_rate": 0.0007867514417172356, + "loss": 1.6589, + "step": 3598 + }, + { + "epoch": 0.3265287606604972, + "grad_norm": 0.11108406610877317, + "learning_rate": 0.0007866310662373253, + "loss": 1.6181, + "step": 3599 + }, + { + "epoch": 0.326619488296135, + "grad_norm": 0.11320127061583833, + "learning_rate": 0.00078651066600678, + "loss": 1.6049, + "step": 3600 + }, + { + "epoch": 0.3267102159317728, + "grad_norm": 0.10920401067201732, + "learning_rate": 0.0007863902410359966, + "loss": 1.5683, + "step": 3601 + }, + { + "epoch": 0.32680094356741063, + "grad_norm": 0.11287438645120079, + "learning_rate": 0.0007862697913353736, + "loss": 1.626, + "step": 3602 + }, + { + "epoch": 0.32689167120304846, + "grad_norm": 0.11760162941993352, + "learning_rate": 0.0007861493169153118, + "loss": 1.5878, + "step": 3603 + }, + { + "epoch": 0.32698239883868624, + "grad_norm": 0.11143049577542312, + "learning_rate": 0.0007860288177862145, + "loss": 1.6493, + "step": 3604 + }, + { + "epoch": 0.32707312647432407, + "grad_norm": 0.11397463588108876, + "learning_rate": 0.0007859082939584866, + "loss": 1.6463, + "step": 3605 + }, + { + "epoch": 0.3271638541099619, + "grad_norm": 0.11268544304147751, + "learning_rate": 0.0007857877454425353, + "loss": 1.6615, + "step": 3606 + }, + { + "epoch": 0.3272545817455997, + "grad_norm": 0.10708360002049683, + "learning_rate": 0.00078566717224877, + "loss": 1.6367, + "step": 3607 + }, + { + "epoch": 0.3273453093812375, + "grad_norm": 0.1108179447999257, + "learning_rate": 0.0007855465743876024, + "loss": 1.6085, + "step": 3608 + }, + { + "epoch": 0.32743603701687535, + "grad_norm": 0.11420509145944352, + "learning_rate": 0.0007854259518694459, + "loss": 1.6127, + "step": 3609 + }, + { + "epoch": 0.3275267646525132, + "grad_norm": 0.11225238342369684, + "learning_rate": 0.0007853053047047165, + "loss": 1.6347, + "step": 3610 + }, + { + "epoch": 0.32761749228815096, + "grad_norm": 0.11616848972902931, + "learning_rate": 0.000785184632903832, + "loss": 1.6518, + "step": 3611 + }, + { + "epoch": 0.3277082199237888, + "grad_norm": 0.11185228452393524, + "learning_rate": 0.0007850639364772123, + "loss": 1.6676, + "step": 3612 + }, + { + "epoch": 0.3277989475594266, + "grad_norm": 0.11634064674735224, + "learning_rate": 0.0007849432154352797, + "loss": 1.6281, + "step": 3613 + }, + { + "epoch": 0.3278896751950644, + "grad_norm": 0.1073111335629564, + "learning_rate": 0.0007848224697884585, + "loss": 1.6467, + "step": 3614 + }, + { + "epoch": 0.32798040283070223, + "grad_norm": 0.10798416292209284, + "learning_rate": 0.000784701699547175, + "loss": 1.6368, + "step": 3615 + }, + { + "epoch": 0.32807113046634007, + "grad_norm": 0.11268540148655827, + "learning_rate": 0.0007845809047218579, + "loss": 1.583, + "step": 3616 + }, + { + "epoch": 0.32816185810197784, + "grad_norm": 0.1147455491584213, + "learning_rate": 0.0007844600853229376, + "loss": 1.6545, + "step": 3617 + }, + { + "epoch": 0.3282525857376157, + "grad_norm": 0.1128957810689354, + "learning_rate": 0.000784339241360847, + "loss": 1.6408, + "step": 3618 + }, + { + "epoch": 0.3283433133732535, + "grad_norm": 0.11040518317542938, + "learning_rate": 0.0007842183728460211, + "loss": 1.5612, + "step": 3619 + }, + { + "epoch": 0.3284340410088913, + "grad_norm": 0.11232716278096365, + "learning_rate": 0.0007840974797888967, + "loss": 1.6539, + "step": 3620 + }, + { + "epoch": 0.3285247686445291, + "grad_norm": 0.11036232061996983, + "learning_rate": 0.0007839765621999133, + "loss": 1.6046, + "step": 3621 + }, + { + "epoch": 0.32861549628016695, + "grad_norm": 0.11328170545443901, + "learning_rate": 0.0007838556200895117, + "loss": 1.604, + "step": 3622 + }, + { + "epoch": 0.32870622391580473, + "grad_norm": 0.11893214079716445, + "learning_rate": 0.0007837346534681355, + "loss": 1.6062, + "step": 3623 + }, + { + "epoch": 0.32879695155144256, + "grad_norm": 0.11045961714465047, + "learning_rate": 0.0007836136623462301, + "loss": 1.6211, + "step": 3624 + }, + { + "epoch": 0.3288876791870804, + "grad_norm": 0.10924101632351081, + "learning_rate": 0.0007834926467342433, + "loss": 1.632, + "step": 3625 + }, + { + "epoch": 0.3289784068227182, + "grad_norm": 0.11142114437339362, + "learning_rate": 0.0007833716066426246, + "loss": 1.6199, + "step": 3626 + }, + { + "epoch": 0.329069134458356, + "grad_norm": 0.10761274753811295, + "learning_rate": 0.0007832505420818259, + "loss": 1.6074, + "step": 3627 + }, + { + "epoch": 0.32915986209399384, + "grad_norm": 0.10835598315606104, + "learning_rate": 0.000783129453062301, + "loss": 1.5878, + "step": 3628 + }, + { + "epoch": 0.32925058972963167, + "grad_norm": 0.10733348522271341, + "learning_rate": 0.0007830083395945062, + "loss": 1.6376, + "step": 3629 + }, + { + "epoch": 0.32934131736526945, + "grad_norm": 0.11190633303434125, + "learning_rate": 0.0007828872016888998, + "loss": 1.5892, + "step": 3630 + }, + { + "epoch": 0.3294320450009073, + "grad_norm": 0.11347461623715682, + "learning_rate": 0.0007827660393559416, + "loss": 1.6064, + "step": 3631 + }, + { + "epoch": 0.3295227726365451, + "grad_norm": 0.11056562364906804, + "learning_rate": 0.0007826448526060942, + "loss": 1.6322, + "step": 3632 + }, + { + "epoch": 0.3296135002721829, + "grad_norm": 0.10586784607607833, + "learning_rate": 0.0007825236414498222, + "loss": 1.627, + "step": 3633 + }, + { + "epoch": 0.3297042279078207, + "grad_norm": 0.11140947397448257, + "learning_rate": 0.0007824024058975921, + "loss": 1.6097, + "step": 3634 + }, + { + "epoch": 0.32979495554345856, + "grad_norm": 0.11435879755622468, + "learning_rate": 0.0007822811459598727, + "loss": 1.581, + "step": 3635 + }, + { + "epoch": 0.32988568317909633, + "grad_norm": 0.11395151991185584, + "learning_rate": 0.0007821598616471345, + "loss": 1.6606, + "step": 3636 + }, + { + "epoch": 0.32997641081473417, + "grad_norm": 0.11407322111497237, + "learning_rate": 0.000782038552969851, + "loss": 1.6341, + "step": 3637 + }, + { + "epoch": 0.330067138450372, + "grad_norm": 0.11424123872466195, + "learning_rate": 0.0007819172199384967, + "loss": 1.6497, + "step": 3638 + }, + { + "epoch": 0.3301578660860098, + "grad_norm": 0.11024783940491757, + "learning_rate": 0.0007817958625635489, + "loss": 1.6424, + "step": 3639 + }, + { + "epoch": 0.3302485937216476, + "grad_norm": 0.10865874073630478, + "learning_rate": 0.0007816744808554867, + "loss": 1.6708, + "step": 3640 + }, + { + "epoch": 0.33033932135728544, + "grad_norm": 0.11047367454912199, + "learning_rate": 0.0007815530748247919, + "loss": 1.6205, + "step": 3641 + }, + { + "epoch": 0.3304300489929232, + "grad_norm": 0.10694338192946617, + "learning_rate": 0.0007814316444819474, + "loss": 1.647, + "step": 3642 + }, + { + "epoch": 0.33052077662856105, + "grad_norm": 0.10887948609033017, + "learning_rate": 0.000781310189837439, + "loss": 1.6445, + "step": 3643 + }, + { + "epoch": 0.3306115042641989, + "grad_norm": 0.10553985471105616, + "learning_rate": 0.0007811887109017542, + "loss": 1.6607, + "step": 3644 + }, + { + "epoch": 0.33070223189983666, + "grad_norm": 0.10870926128641663, + "learning_rate": 0.0007810672076853827, + "loss": 1.6255, + "step": 3645 + }, + { + "epoch": 0.3307929595354745, + "grad_norm": 0.1116870611706354, + "learning_rate": 0.0007809456801988164, + "loss": 1.6012, + "step": 3646 + }, + { + "epoch": 0.33088368717111233, + "grad_norm": 0.10859279935599428, + "learning_rate": 0.0007808241284525492, + "loss": 1.6681, + "step": 3647 + }, + { + "epoch": 0.33097441480675016, + "grad_norm": 0.11575151867671458, + "learning_rate": 0.0007807025524570772, + "loss": 1.599, + "step": 3648 + }, + { + "epoch": 0.33106514244238794, + "grad_norm": 0.11162146381533843, + "learning_rate": 0.0007805809522228984, + "loss": 1.606, + "step": 3649 + }, + { + "epoch": 0.33115587007802577, + "grad_norm": 0.11409908589313654, + "learning_rate": 0.0007804593277605131, + "loss": 1.6023, + "step": 3650 + }, + { + "epoch": 0.3312465977136636, + "grad_norm": 0.10910410171997739, + "learning_rate": 0.0007803376790804233, + "loss": 1.631, + "step": 3651 + }, + { + "epoch": 0.3313373253493014, + "grad_norm": 0.112152123904596, + "learning_rate": 0.0007802160061931338, + "loss": 1.6417, + "step": 3652 + }, + { + "epoch": 0.3314280529849392, + "grad_norm": 0.1124688399135861, + "learning_rate": 0.0007800943091091508, + "loss": 1.6355, + "step": 3653 + }, + { + "epoch": 0.33151878062057705, + "grad_norm": 0.10665883978681907, + "learning_rate": 0.000779972587838983, + "loss": 1.6197, + "step": 3654 + }, + { + "epoch": 0.3316095082562148, + "grad_norm": 0.10726842179893915, + "learning_rate": 0.0007798508423931407, + "loss": 1.6021, + "step": 3655 + }, + { + "epoch": 0.33170023589185266, + "grad_norm": 0.10870702037056262, + "learning_rate": 0.0007797290727821371, + "loss": 1.6395, + "step": 3656 + }, + { + "epoch": 0.3317909635274905, + "grad_norm": 0.1054574651296614, + "learning_rate": 0.000779607279016487, + "loss": 1.6226, + "step": 3657 + }, + { + "epoch": 0.33188169116312827, + "grad_norm": 0.10481854760081422, + "learning_rate": 0.000779485461106707, + "loss": 1.6391, + "step": 3658 + }, + { + "epoch": 0.3319724187987661, + "grad_norm": 0.11046243716509228, + "learning_rate": 0.0007793636190633161, + "loss": 1.6277, + "step": 3659 + }, + { + "epoch": 0.33206314643440393, + "grad_norm": 0.11149208392181918, + "learning_rate": 0.0007792417528968357, + "loss": 1.6331, + "step": 3660 + }, + { + "epoch": 0.3321538740700417, + "grad_norm": 0.10835836493814653, + "learning_rate": 0.0007791198626177888, + "loss": 1.5982, + "step": 3661 + }, + { + "epoch": 0.33224460170567954, + "grad_norm": 0.10862392289398691, + "learning_rate": 0.0007789979482367004, + "loss": 1.646, + "step": 3662 + }, + { + "epoch": 0.3323353293413174, + "grad_norm": 0.1126544232470771, + "learning_rate": 0.0007788760097640983, + "loss": 1.6384, + "step": 3663 + }, + { + "epoch": 0.33242605697695515, + "grad_norm": 0.1125943559173475, + "learning_rate": 0.0007787540472105115, + "loss": 1.663, + "step": 3664 + }, + { + "epoch": 0.332516784612593, + "grad_norm": 0.10948811776379959, + "learning_rate": 0.0007786320605864718, + "loss": 1.686, + "step": 3665 + }, + { + "epoch": 0.3326075122482308, + "grad_norm": 0.11431021334657367, + "learning_rate": 0.0007785100499025125, + "loss": 1.653, + "step": 3666 + }, + { + "epoch": 0.33269823988386865, + "grad_norm": 0.10736210856647585, + "learning_rate": 0.0007783880151691694, + "loss": 1.644, + "step": 3667 + }, + { + "epoch": 0.33278896751950643, + "grad_norm": 0.10775860483497596, + "learning_rate": 0.00077826595639698, + "loss": 1.5936, + "step": 3668 + }, + { + "epoch": 0.33287969515514426, + "grad_norm": 0.1115017476189224, + "learning_rate": 0.0007781438735964845, + "loss": 1.6185, + "step": 3669 + }, + { + "epoch": 0.3329704227907821, + "grad_norm": 0.10521219193587648, + "learning_rate": 0.0007780217667782243, + "loss": 1.5922, + "step": 3670 + }, + { + "epoch": 0.33306115042641987, + "grad_norm": 0.10821560319716071, + "learning_rate": 0.0007778996359527436, + "loss": 1.6396, + "step": 3671 + }, + { + "epoch": 0.3331518780620577, + "grad_norm": 0.1082141128348619, + "learning_rate": 0.0007777774811305884, + "loss": 1.6579, + "step": 3672 + }, + { + "epoch": 0.33324260569769554, + "grad_norm": 0.10485413378818496, + "learning_rate": 0.0007776553023223068, + "loss": 1.6623, + "step": 3673 + }, + { + "epoch": 0.3333333333333333, + "grad_norm": 0.10798500506291284, + "learning_rate": 0.000777533099538449, + "loss": 1.652, + "step": 3674 + }, + { + "epoch": 0.33342406096897115, + "grad_norm": 0.10796260980886146, + "learning_rate": 0.000777410872789567, + "loss": 1.6514, + "step": 3675 + }, + { + "epoch": 0.333514788604609, + "grad_norm": 0.11270562458572983, + "learning_rate": 0.0007772886220862153, + "loss": 1.6506, + "step": 3676 + }, + { + "epoch": 0.33360551624024676, + "grad_norm": 0.10557799195976586, + "learning_rate": 0.0007771663474389504, + "loss": 1.6314, + "step": 3677 + }, + { + "epoch": 0.3336962438758846, + "grad_norm": 0.1028852764051647, + "learning_rate": 0.0007770440488583301, + "loss": 1.6431, + "step": 3678 + }, + { + "epoch": 0.3337869715115224, + "grad_norm": 0.10422729145120369, + "learning_rate": 0.0007769217263549157, + "loss": 1.6134, + "step": 3679 + }, + { + "epoch": 0.3338776991471602, + "grad_norm": 0.10282218598156787, + "learning_rate": 0.0007767993799392693, + "loss": 1.6376, + "step": 3680 + }, + { + "epoch": 0.33396842678279803, + "grad_norm": 0.10258751573184119, + "learning_rate": 0.0007766770096219557, + "loss": 1.6215, + "step": 3681 + }, + { + "epoch": 0.33405915441843587, + "grad_norm": 0.1057886102194109, + "learning_rate": 0.0007765546154135417, + "loss": 1.6145, + "step": 3682 + }, + { + "epoch": 0.33414988205407364, + "grad_norm": 0.10909180280235642, + "learning_rate": 0.0007764321973245955, + "loss": 1.641, + "step": 3683 + }, + { + "epoch": 0.3342406096897115, + "grad_norm": 0.10497891753267342, + "learning_rate": 0.0007763097553656886, + "loss": 1.67, + "step": 3684 + }, + { + "epoch": 0.3343313373253493, + "grad_norm": 0.1059003507271752, + "learning_rate": 0.0007761872895473936, + "loss": 1.6047, + "step": 3685 + }, + { + "epoch": 0.33442206496098714, + "grad_norm": 0.102683485872095, + "learning_rate": 0.0007760647998802853, + "loss": 1.6364, + "step": 3686 + }, + { + "epoch": 0.3345127925966249, + "grad_norm": 0.10837588666446717, + "learning_rate": 0.0007759422863749409, + "loss": 1.5966, + "step": 3687 + }, + { + "epoch": 0.33460352023226275, + "grad_norm": 0.10669606024142889, + "learning_rate": 0.0007758197490419394, + "loss": 1.6244, + "step": 3688 + }, + { + "epoch": 0.3346942478679006, + "grad_norm": 0.10693554973692719, + "learning_rate": 0.000775697187891862, + "loss": 1.6618, + "step": 3689 + }, + { + "epoch": 0.33478497550353836, + "grad_norm": 0.10817214547930692, + "learning_rate": 0.0007755746029352917, + "loss": 1.6649, + "step": 3690 + }, + { + "epoch": 0.3348757031391762, + "grad_norm": 0.11393793530216086, + "learning_rate": 0.0007754519941828139, + "loss": 1.637, + "step": 3691 + }, + { + "epoch": 0.33496643077481403, + "grad_norm": 0.11252757289635897, + "learning_rate": 0.0007753293616450157, + "loss": 1.6425, + "step": 3692 + }, + { + "epoch": 0.3350571584104518, + "grad_norm": 0.11515059250896555, + "learning_rate": 0.0007752067053324867, + "loss": 1.6217, + "step": 3693 + }, + { + "epoch": 0.33514788604608964, + "grad_norm": 0.11690722803910256, + "learning_rate": 0.000775084025255818, + "loss": 1.6606, + "step": 3694 + }, + { + "epoch": 0.33523861368172747, + "grad_norm": 0.11221819325439553, + "learning_rate": 0.0007749613214256033, + "loss": 1.606, + "step": 3695 + }, + { + "epoch": 0.33532934131736525, + "grad_norm": 0.11039847332996698, + "learning_rate": 0.0007748385938524378, + "loss": 1.601, + "step": 3696 + }, + { + "epoch": 0.3354200689530031, + "grad_norm": 0.11213524822966382, + "learning_rate": 0.0007747158425469194, + "loss": 1.6868, + "step": 3697 + }, + { + "epoch": 0.3355107965886409, + "grad_norm": 0.10973905922384335, + "learning_rate": 0.0007745930675196473, + "loss": 1.5923, + "step": 3698 + }, + { + "epoch": 0.3356015242242787, + "grad_norm": 0.1164354297658847, + "learning_rate": 0.0007744702687812235, + "loss": 1.6524, + "step": 3699 + }, + { + "epoch": 0.3356922518599165, + "grad_norm": 0.11095798070926759, + "learning_rate": 0.0007743474463422516, + "loss": 1.6123, + "step": 3700 + }, + { + "epoch": 0.33578297949555436, + "grad_norm": 0.11005867502394347, + "learning_rate": 0.000774224600213337, + "loss": 1.6493, + "step": 3701 + }, + { + "epoch": 0.33587370713119213, + "grad_norm": 0.1122981345897059, + "learning_rate": 0.0007741017304050879, + "loss": 1.6177, + "step": 3702 + }, + { + "epoch": 0.33596443476682997, + "grad_norm": 0.11027593597988884, + "learning_rate": 0.000773978836928114, + "loss": 1.6469, + "step": 3703 + }, + { + "epoch": 0.3360551624024678, + "grad_norm": 0.11284193386419776, + "learning_rate": 0.0007738559197930273, + "loss": 1.6343, + "step": 3704 + }, + { + "epoch": 0.33614589003810563, + "grad_norm": 0.11059541061864715, + "learning_rate": 0.0007737329790104414, + "loss": 1.5886, + "step": 3705 + }, + { + "epoch": 0.3362366176737434, + "grad_norm": 0.10674131210339831, + "learning_rate": 0.0007736100145909724, + "loss": 1.6218, + "step": 3706 + }, + { + "epoch": 0.33632734530938124, + "grad_norm": 0.10705127896138274, + "learning_rate": 0.0007734870265452382, + "loss": 1.6262, + "step": 3707 + }, + { + "epoch": 0.3364180729450191, + "grad_norm": 0.1051492301408495, + "learning_rate": 0.0007733640148838592, + "loss": 1.6173, + "step": 3708 + }, + { + "epoch": 0.33650880058065685, + "grad_norm": 0.10955177931499499, + "learning_rate": 0.000773240979617457, + "loss": 1.6385, + "step": 3709 + }, + { + "epoch": 0.3365995282162947, + "grad_norm": 0.10862082290718589, + "learning_rate": 0.0007731179207566561, + "loss": 1.6167, + "step": 3710 + }, + { + "epoch": 0.3366902558519325, + "grad_norm": 0.10805773224633272, + "learning_rate": 0.0007729948383120827, + "loss": 1.6334, + "step": 3711 + }, + { + "epoch": 0.3367809834875703, + "grad_norm": 0.10741837898577672, + "learning_rate": 0.0007728717322943643, + "loss": 1.6398, + "step": 3712 + }, + { + "epoch": 0.33687171112320813, + "grad_norm": 0.11053411240848057, + "learning_rate": 0.0007727486027141319, + "loss": 1.6216, + "step": 3713 + }, + { + "epoch": 0.33696243875884596, + "grad_norm": 0.11292918198100296, + "learning_rate": 0.0007726254495820173, + "loss": 1.6262, + "step": 3714 + }, + { + "epoch": 0.33705316639448374, + "grad_norm": 0.10959173804338264, + "learning_rate": 0.0007725022729086551, + "loss": 1.6505, + "step": 3715 + }, + { + "epoch": 0.33714389403012157, + "grad_norm": 0.10708514051061192, + "learning_rate": 0.0007723790727046815, + "loss": 1.6201, + "step": 3716 + }, + { + "epoch": 0.3372346216657594, + "grad_norm": 0.10868173765379692, + "learning_rate": 0.0007722558489807347, + "loss": 1.6274, + "step": 3717 + }, + { + "epoch": 0.3373253493013972, + "grad_norm": 0.11586739833578528, + "learning_rate": 0.0007721326017474552, + "loss": 1.652, + "step": 3718 + }, + { + "epoch": 0.337416076937035, + "grad_norm": 0.11088823043153005, + "learning_rate": 0.0007720093310154855, + "loss": 1.6177, + "step": 3719 + }, + { + "epoch": 0.33750680457267285, + "grad_norm": 0.1104170134005954, + "learning_rate": 0.0007718860367954698, + "loss": 1.64, + "step": 3720 + }, + { + "epoch": 0.3375975322083106, + "grad_norm": 0.10794334970885297, + "learning_rate": 0.000771762719098055, + "loss": 1.6288, + "step": 3721 + }, + { + "epoch": 0.33768825984394846, + "grad_norm": 0.10838475653014029, + "learning_rate": 0.0007716393779338892, + "loss": 1.6929, + "step": 3722 + }, + { + "epoch": 0.3377789874795863, + "grad_norm": 0.10737655171094312, + "learning_rate": 0.0007715160133136232, + "loss": 1.6687, + "step": 3723 + }, + { + "epoch": 0.3378697151152241, + "grad_norm": 0.10985623057130742, + "learning_rate": 0.0007713926252479093, + "loss": 1.6484, + "step": 3724 + }, + { + "epoch": 0.3379604427508619, + "grad_norm": 0.11208518779211937, + "learning_rate": 0.0007712692137474025, + "loss": 1.6661, + "step": 3725 + }, + { + "epoch": 0.33805117038649973, + "grad_norm": 0.11331329503388032, + "learning_rate": 0.0007711457788227587, + "loss": 1.6084, + "step": 3726 + }, + { + "epoch": 0.33814189802213757, + "grad_norm": 0.1087588981189955, + "learning_rate": 0.0007710223204846372, + "loss": 1.6144, + "step": 3727 + }, + { + "epoch": 0.33823262565777534, + "grad_norm": 0.10850127763243649, + "learning_rate": 0.0007708988387436984, + "loss": 1.6213, + "step": 3728 + }, + { + "epoch": 0.3383233532934132, + "grad_norm": 0.11258364732633924, + "learning_rate": 0.0007707753336106047, + "loss": 1.6477, + "step": 3729 + }, + { + "epoch": 0.338414080929051, + "grad_norm": 0.10689328136571345, + "learning_rate": 0.0007706518050960212, + "loss": 1.603, + "step": 3730 + }, + { + "epoch": 0.3385048085646888, + "grad_norm": 0.1078333722577742, + "learning_rate": 0.0007705282532106144, + "loss": 1.6564, + "step": 3731 + }, + { + "epoch": 0.3385955362003266, + "grad_norm": 0.11167475868744149, + "learning_rate": 0.000770404677965053, + "loss": 1.5791, + "step": 3732 + }, + { + "epoch": 0.33868626383596445, + "grad_norm": 0.1081215179801604, + "learning_rate": 0.000770281079370008, + "loss": 1.643, + "step": 3733 + }, + { + "epoch": 0.33877699147160223, + "grad_norm": 0.10577490007168482, + "learning_rate": 0.0007701574574361518, + "loss": 1.6007, + "step": 3734 + }, + { + "epoch": 0.33886771910724006, + "grad_norm": 0.11078438424361906, + "learning_rate": 0.0007700338121741595, + "loss": 1.6162, + "step": 3735 + }, + { + "epoch": 0.3389584467428779, + "grad_norm": 0.10907645569693265, + "learning_rate": 0.0007699101435947077, + "loss": 1.659, + "step": 3736 + }, + { + "epoch": 0.33904917437851567, + "grad_norm": 0.10942887816947303, + "learning_rate": 0.0007697864517084749, + "loss": 1.629, + "step": 3737 + }, + { + "epoch": 0.3391399020141535, + "grad_norm": 0.10634192027087155, + "learning_rate": 0.0007696627365261427, + "loss": 1.6555, + "step": 3738 + }, + { + "epoch": 0.33923062964979134, + "grad_norm": 0.10830063924613215, + "learning_rate": 0.0007695389980583932, + "loss": 1.6032, + "step": 3739 + }, + { + "epoch": 0.3393213572854291, + "grad_norm": 0.10995464438358402, + "learning_rate": 0.0007694152363159115, + "loss": 1.643, + "step": 3740 + }, + { + "epoch": 0.33941208492106695, + "grad_norm": 0.10991017940177043, + "learning_rate": 0.0007692914513093844, + "loss": 1.6162, + "step": 3741 + }, + { + "epoch": 0.3395028125567048, + "grad_norm": 0.11039617169456256, + "learning_rate": 0.0007691676430495007, + "loss": 1.6004, + "step": 3742 + }, + { + "epoch": 0.3395935401923426, + "grad_norm": 0.10767259492595632, + "learning_rate": 0.0007690438115469516, + "loss": 1.6326, + "step": 3743 + }, + { + "epoch": 0.3396842678279804, + "grad_norm": 0.11532638254916673, + "learning_rate": 0.0007689199568124297, + "loss": 1.6755, + "step": 3744 + }, + { + "epoch": 0.3397749954636182, + "grad_norm": 0.10809720178545654, + "learning_rate": 0.0007687960788566298, + "loss": 1.5954, + "step": 3745 + }, + { + "epoch": 0.33986572309925606, + "grad_norm": 0.10384976780549095, + "learning_rate": 0.0007686721776902488, + "loss": 1.6468, + "step": 3746 + }, + { + "epoch": 0.33995645073489383, + "grad_norm": 0.10570018565826506, + "learning_rate": 0.0007685482533239858, + "loss": 1.6474, + "step": 3747 + }, + { + "epoch": 0.34004717837053167, + "grad_norm": 0.10460092031666834, + "learning_rate": 0.0007684243057685414, + "loss": 1.6559, + "step": 3748 + }, + { + "epoch": 0.3401379060061695, + "grad_norm": 0.11418951936772032, + "learning_rate": 0.0007683003350346187, + "loss": 1.6464, + "step": 3749 + }, + { + "epoch": 0.3402286336418073, + "grad_norm": 0.10761628569204945, + "learning_rate": 0.0007681763411329226, + "loss": 1.6422, + "step": 3750 + }, + { + "epoch": 0.3403193612774451, + "grad_norm": 0.10788993686953033, + "learning_rate": 0.00076805232407416, + "loss": 1.5874, + "step": 3751 + }, + { + "epoch": 0.34041008891308294, + "grad_norm": 0.10873722065467063, + "learning_rate": 0.0007679282838690395, + "loss": 1.6483, + "step": 3752 + }, + { + "epoch": 0.3405008165487207, + "grad_norm": 0.11101572520803595, + "learning_rate": 0.0007678042205282723, + "loss": 1.5818, + "step": 3753 + }, + { + "epoch": 0.34059154418435855, + "grad_norm": 0.11586862679782176, + "learning_rate": 0.000767680134062571, + "loss": 1.6319, + "step": 3754 + }, + { + "epoch": 0.3406822718199964, + "grad_norm": 0.11062453938753976, + "learning_rate": 0.0007675560244826508, + "loss": 1.6104, + "step": 3755 + }, + { + "epoch": 0.34077299945563416, + "grad_norm": 0.1191011001337582, + "learning_rate": 0.0007674318917992287, + "loss": 1.5914, + "step": 3756 + }, + { + "epoch": 0.340863727091272, + "grad_norm": 0.10671596319926478, + "learning_rate": 0.000767307736023023, + "loss": 1.6125, + "step": 3757 + }, + { + "epoch": 0.34095445472690983, + "grad_norm": 0.10950607985553855, + "learning_rate": 0.0007671835571647549, + "loss": 1.6686, + "step": 3758 + }, + { + "epoch": 0.3410451823625476, + "grad_norm": 0.10798305320151426, + "learning_rate": 0.0007670593552351475, + "loss": 1.6772, + "step": 3759 + }, + { + "epoch": 0.34113590999818544, + "grad_norm": 0.10771625787664607, + "learning_rate": 0.0007669351302449253, + "loss": 1.6216, + "step": 3760 + }, + { + "epoch": 0.34122663763382327, + "grad_norm": 0.110607737964297, + "learning_rate": 0.0007668108822048152, + "loss": 1.6486, + "step": 3761 + }, + { + "epoch": 0.3413173652694611, + "grad_norm": 0.10931311188530288, + "learning_rate": 0.0007666866111255461, + "loss": 1.6271, + "step": 3762 + }, + { + "epoch": 0.3414080929050989, + "grad_norm": 0.11072105805401707, + "learning_rate": 0.0007665623170178489, + "loss": 1.6584, + "step": 3763 + }, + { + "epoch": 0.3414988205407367, + "grad_norm": 0.10818489896009525, + "learning_rate": 0.0007664379998924563, + "loss": 1.6122, + "step": 3764 + }, + { + "epoch": 0.34158954817637455, + "grad_norm": 0.11139074967880734, + "learning_rate": 0.0007663136597601031, + "loss": 1.6201, + "step": 3765 + }, + { + "epoch": 0.3416802758120123, + "grad_norm": 0.10966670480412166, + "learning_rate": 0.0007661892966315262, + "loss": 1.6199, + "step": 3766 + }, + { + "epoch": 0.34177100344765016, + "grad_norm": 0.10494555759215915, + "learning_rate": 0.0007660649105174642, + "loss": 1.6332, + "step": 3767 + }, + { + "epoch": 0.341861731083288, + "grad_norm": 0.10428970184644991, + "learning_rate": 0.000765940501428658, + "loss": 1.6107, + "step": 3768 + }, + { + "epoch": 0.34195245871892577, + "grad_norm": 0.10484200266078333, + "learning_rate": 0.0007658160693758504, + "loss": 1.5822, + "step": 3769 + }, + { + "epoch": 0.3420431863545636, + "grad_norm": 0.10659533836281256, + "learning_rate": 0.000765691614369786, + "loss": 1.6567, + "step": 3770 + }, + { + "epoch": 0.34213391399020143, + "grad_norm": 0.10896600621955853, + "learning_rate": 0.0007655671364212116, + "loss": 1.6744, + "step": 3771 + }, + { + "epoch": 0.3422246416258392, + "grad_norm": 0.11130920243721269, + "learning_rate": 0.0007654426355408756, + "loss": 1.6263, + "step": 3772 + }, + { + "epoch": 0.34231536926147704, + "grad_norm": 0.11253730102385893, + "learning_rate": 0.0007653181117395292, + "loss": 1.6701, + "step": 3773 + }, + { + "epoch": 0.3424060968971149, + "grad_norm": 0.11011457494414571, + "learning_rate": 0.0007651935650279245, + "loss": 1.6735, + "step": 3774 + }, + { + "epoch": 0.34249682453275265, + "grad_norm": 0.10526852801690909, + "learning_rate": 0.0007650689954168166, + "loss": 1.6686, + "step": 3775 + }, + { + "epoch": 0.3425875521683905, + "grad_norm": 0.10601438323203811, + "learning_rate": 0.0007649444029169617, + "loss": 1.5901, + "step": 3776 + }, + { + "epoch": 0.3426782798040283, + "grad_norm": 0.10985059485720225, + "learning_rate": 0.0007648197875391185, + "loss": 1.6433, + "step": 3777 + }, + { + "epoch": 0.3427690074396661, + "grad_norm": 0.10785215262705455, + "learning_rate": 0.0007646951492940479, + "loss": 1.623, + "step": 3778 + }, + { + "epoch": 0.34285973507530393, + "grad_norm": 0.11202929208837137, + "learning_rate": 0.0007645704881925118, + "loss": 1.6552, + "step": 3779 + }, + { + "epoch": 0.34295046271094176, + "grad_norm": 0.1098471933206107, + "learning_rate": 0.0007644458042452753, + "loss": 1.624, + "step": 3780 + }, + { + "epoch": 0.3430411903465796, + "grad_norm": 0.1114951989553543, + "learning_rate": 0.0007643210974631045, + "loss": 1.617, + "step": 3781 + }, + { + "epoch": 0.34313191798221737, + "grad_norm": 0.11470997576805797, + "learning_rate": 0.000764196367856768, + "loss": 1.6511, + "step": 3782 + }, + { + "epoch": 0.3432226456178552, + "grad_norm": 0.11553030956761097, + "learning_rate": 0.0007640716154370363, + "loss": 1.6638, + "step": 3783 + }, + { + "epoch": 0.34331337325349304, + "grad_norm": 0.10704874378990817, + "learning_rate": 0.0007639468402146814, + "loss": 1.6134, + "step": 3784 + }, + { + "epoch": 0.3434041008891308, + "grad_norm": 0.11279453446395914, + "learning_rate": 0.0007638220422004784, + "loss": 1.6357, + "step": 3785 + }, + { + "epoch": 0.34349482852476865, + "grad_norm": 0.10889157258420079, + "learning_rate": 0.0007636972214052028, + "loss": 1.6342, + "step": 3786 + }, + { + "epoch": 0.3435855561604065, + "grad_norm": 0.10627339606769613, + "learning_rate": 0.0007635723778396334, + "loss": 1.6135, + "step": 3787 + }, + { + "epoch": 0.34367628379604426, + "grad_norm": 0.10537826115736013, + "learning_rate": 0.0007634475115145502, + "loss": 1.6073, + "step": 3788 + }, + { + "epoch": 0.3437670114316821, + "grad_norm": 0.11034005188557344, + "learning_rate": 0.0007633226224407358, + "loss": 1.6009, + "step": 3789 + }, + { + "epoch": 0.3438577390673199, + "grad_norm": 0.10744472493732837, + "learning_rate": 0.000763197710628974, + "loss": 1.6247, + "step": 3790 + }, + { + "epoch": 0.3439484667029577, + "grad_norm": 0.11238778071923598, + "learning_rate": 0.0007630727760900511, + "loss": 1.6431, + "step": 3791 + }, + { + "epoch": 0.34403919433859553, + "grad_norm": 0.10843256648993797, + "learning_rate": 0.0007629478188347552, + "loss": 1.5738, + "step": 3792 + }, + { + "epoch": 0.34412992197423337, + "grad_norm": 0.10836480986967058, + "learning_rate": 0.0007628228388738765, + "loss": 1.5822, + "step": 3793 + }, + { + "epoch": 0.34422064960987114, + "grad_norm": 0.10940124503798779, + "learning_rate": 0.0007626978362182069, + "loss": 1.5996, + "step": 3794 + }, + { + "epoch": 0.344311377245509, + "grad_norm": 0.11420544635068855, + "learning_rate": 0.0007625728108785404, + "loss": 1.657, + "step": 3795 + }, + { + "epoch": 0.3444021048811468, + "grad_norm": 0.10471215621477359, + "learning_rate": 0.0007624477628656727, + "loss": 1.6297, + "step": 3796 + }, + { + "epoch": 0.3444928325167846, + "grad_norm": 0.11395583773065257, + "learning_rate": 0.0007623226921904024, + "loss": 1.6315, + "step": 3797 + }, + { + "epoch": 0.3445835601524224, + "grad_norm": 0.11198611978857535, + "learning_rate": 0.0007621975988635288, + "loss": 1.6149, + "step": 3798 + }, + { + "epoch": 0.34467428778806025, + "grad_norm": 0.11439810628565403, + "learning_rate": 0.0007620724828958539, + "loss": 1.6086, + "step": 3799 + }, + { + "epoch": 0.3447650154236981, + "grad_norm": 0.1129746229268178, + "learning_rate": 0.0007619473442981815, + "loss": 1.6245, + "step": 3800 + }, + { + "epoch": 0.34485574305933586, + "grad_norm": 0.10820571765118576, + "learning_rate": 0.0007618221830813172, + "loss": 1.6302, + "step": 3801 + }, + { + "epoch": 0.3449464706949737, + "grad_norm": 0.11108828162456723, + "learning_rate": 0.0007616969992560689, + "loss": 1.6714, + "step": 3802 + }, + { + "epoch": 0.3450371983306115, + "grad_norm": 0.10864660213351332, + "learning_rate": 0.0007615717928332462, + "loss": 1.631, + "step": 3803 + }, + { + "epoch": 0.3451279259662493, + "grad_norm": 0.1109246044783026, + "learning_rate": 0.0007614465638236605, + "loss": 1.6506, + "step": 3804 + }, + { + "epoch": 0.34521865360188714, + "grad_norm": 0.10791909553369375, + "learning_rate": 0.0007613213122381256, + "loss": 1.6202, + "step": 3805 + }, + { + "epoch": 0.34530938123752497, + "grad_norm": 0.1100931867714128, + "learning_rate": 0.0007611960380874567, + "loss": 1.632, + "step": 3806 + }, + { + "epoch": 0.34540010887316275, + "grad_norm": 0.10882498283139061, + "learning_rate": 0.0007610707413824717, + "loss": 1.6293, + "step": 3807 + }, + { + "epoch": 0.3454908365088006, + "grad_norm": 0.11338854215206041, + "learning_rate": 0.0007609454221339895, + "loss": 1.6009, + "step": 3808 + }, + { + "epoch": 0.3455815641444384, + "grad_norm": 0.10633563235880784, + "learning_rate": 0.0007608200803528318, + "loss": 1.5818, + "step": 3809 + }, + { + "epoch": 0.3456722917800762, + "grad_norm": 0.11225300305730901, + "learning_rate": 0.0007606947160498216, + "loss": 1.6054, + "step": 3810 + }, + { + "epoch": 0.345763019415714, + "grad_norm": 0.10797624937746333, + "learning_rate": 0.000760569329235784, + "loss": 1.6302, + "step": 3811 + }, + { + "epoch": 0.34585374705135186, + "grad_norm": 0.10959132621093055, + "learning_rate": 0.0007604439199215468, + "loss": 1.6476, + "step": 3812 + }, + { + "epoch": 0.34594447468698963, + "grad_norm": 0.10968446969151151, + "learning_rate": 0.0007603184881179386, + "loss": 1.6032, + "step": 3813 + }, + { + "epoch": 0.34603520232262747, + "grad_norm": 0.11341888168908298, + "learning_rate": 0.0007601930338357908, + "loss": 1.6179, + "step": 3814 + }, + { + "epoch": 0.3461259299582653, + "grad_norm": 0.11003280634120217, + "learning_rate": 0.0007600675570859357, + "loss": 1.6374, + "step": 3815 + }, + { + "epoch": 0.3462166575939031, + "grad_norm": 0.10804572926421875, + "learning_rate": 0.000759942057879209, + "loss": 1.6495, + "step": 3816 + }, + { + "epoch": 0.3463073852295409, + "grad_norm": 0.11294367966440066, + "learning_rate": 0.0007598165362264473, + "loss": 1.5847, + "step": 3817 + }, + { + "epoch": 0.34639811286517874, + "grad_norm": 0.1098202492430197, + "learning_rate": 0.0007596909921384894, + "loss": 1.6617, + "step": 3818 + }, + { + "epoch": 0.3464888405008166, + "grad_norm": 0.10778446833432725, + "learning_rate": 0.000759565425626176, + "loss": 1.6389, + "step": 3819 + }, + { + "epoch": 0.34657956813645435, + "grad_norm": 0.11103764670961125, + "learning_rate": 0.0007594398367003498, + "loss": 1.6266, + "step": 3820 + }, + { + "epoch": 0.3466702957720922, + "grad_norm": 0.11017298875105223, + "learning_rate": 0.0007593142253718556, + "loss": 1.6525, + "step": 3821 + }, + { + "epoch": 0.34676102340773, + "grad_norm": 0.11031145332643638, + "learning_rate": 0.0007591885916515398, + "loss": 1.692, + "step": 3822 + }, + { + "epoch": 0.3468517510433678, + "grad_norm": 0.10530562989650302, + "learning_rate": 0.0007590629355502508, + "loss": 1.672, + "step": 3823 + }, + { + "epoch": 0.34694247867900563, + "grad_norm": 0.10923216622166339, + "learning_rate": 0.0007589372570788393, + "loss": 1.6045, + "step": 3824 + }, + { + "epoch": 0.34703320631464346, + "grad_norm": 0.10882129550986998, + "learning_rate": 0.0007588115562481573, + "loss": 1.6214, + "step": 3825 + }, + { + "epoch": 0.34712393395028124, + "grad_norm": 0.10246636144509302, + "learning_rate": 0.0007586858330690593, + "loss": 1.6834, + "step": 3826 + }, + { + "epoch": 0.34721466158591907, + "grad_norm": 0.10904268971489439, + "learning_rate": 0.0007585600875524016, + "loss": 1.6364, + "step": 3827 + }, + { + "epoch": 0.3473053892215569, + "grad_norm": 0.10799829589525824, + "learning_rate": 0.0007584343197090422, + "loss": 1.656, + "step": 3828 + }, + { + "epoch": 0.3473961168571947, + "grad_norm": 0.1110486814899266, + "learning_rate": 0.0007583085295498412, + "loss": 1.5794, + "step": 3829 + }, + { + "epoch": 0.3474868444928325, + "grad_norm": 0.10471651763594078, + "learning_rate": 0.0007581827170856605, + "loss": 1.5885, + "step": 3830 + }, + { + "epoch": 0.34757757212847035, + "grad_norm": 0.10668041968447627, + "learning_rate": 0.0007580568823273643, + "loss": 1.6269, + "step": 3831 + }, + { + "epoch": 0.3476682997641081, + "grad_norm": 0.11411784130151918, + "learning_rate": 0.0007579310252858181, + "loss": 1.623, + "step": 3832 + }, + { + "epoch": 0.34775902739974596, + "grad_norm": 0.10466083235427463, + "learning_rate": 0.00075780514597189, + "loss": 1.6236, + "step": 3833 + }, + { + "epoch": 0.3478497550353838, + "grad_norm": 0.10726596569543977, + "learning_rate": 0.0007576792443964495, + "loss": 1.5783, + "step": 3834 + }, + { + "epoch": 0.34794048267102157, + "grad_norm": 0.10891587547605619, + "learning_rate": 0.0007575533205703682, + "loss": 1.6326, + "step": 3835 + }, + { + "epoch": 0.3480312103066594, + "grad_norm": 0.10622799201498262, + "learning_rate": 0.0007574273745045198, + "loss": 1.6693, + "step": 3836 + }, + { + "epoch": 0.34812193794229723, + "grad_norm": 0.10808129327837841, + "learning_rate": 0.0007573014062097796, + "loss": 1.63, + "step": 3837 + }, + { + "epoch": 0.34821266557793507, + "grad_norm": 0.11085153451985434, + "learning_rate": 0.0007571754156970252, + "loss": 1.6472, + "step": 3838 + }, + { + "epoch": 0.34830339321357284, + "grad_norm": 0.10866030977226994, + "learning_rate": 0.0007570494029771356, + "loss": 1.6249, + "step": 3839 + }, + { + "epoch": 0.3483941208492107, + "grad_norm": 0.10800851686039224, + "learning_rate": 0.0007569233680609921, + "loss": 1.604, + "step": 3840 + }, + { + "epoch": 0.3484848484848485, + "grad_norm": 0.10848180587553367, + "learning_rate": 0.0007567973109594781, + "loss": 1.6241, + "step": 3841 + }, + { + "epoch": 0.3485755761204863, + "grad_norm": 0.10774918544336037, + "learning_rate": 0.0007566712316834783, + "loss": 1.6378, + "step": 3842 + }, + { + "epoch": 0.3486663037561241, + "grad_norm": 0.10494169820584436, + "learning_rate": 0.00075654513024388, + "loss": 1.6173, + "step": 3843 + }, + { + "epoch": 0.34875703139176195, + "grad_norm": 0.11074810897944175, + "learning_rate": 0.0007564190066515717, + "loss": 1.6441, + "step": 3844 + }, + { + "epoch": 0.34884775902739973, + "grad_norm": 0.10840248409504688, + "learning_rate": 0.0007562928609174444, + "loss": 1.5983, + "step": 3845 + }, + { + "epoch": 0.34893848666303756, + "grad_norm": 0.10465024176382441, + "learning_rate": 0.0007561666930523908, + "loss": 1.6158, + "step": 3846 + }, + { + "epoch": 0.3490292142986754, + "grad_norm": 0.1049646935163911, + "learning_rate": 0.0007560405030673055, + "loss": 1.6201, + "step": 3847 + }, + { + "epoch": 0.34911994193431317, + "grad_norm": 0.10812972207179002, + "learning_rate": 0.0007559142909730851, + "loss": 1.6136, + "step": 3848 + }, + { + "epoch": 0.349210669569951, + "grad_norm": 0.10973145622340227, + "learning_rate": 0.000755788056780628, + "loss": 1.6258, + "step": 3849 + }, + { + "epoch": 0.34930139720558884, + "grad_norm": 0.11161385356796523, + "learning_rate": 0.0007556618005008343, + "loss": 1.6583, + "step": 3850 + }, + { + "epoch": 0.3493921248412266, + "grad_norm": 0.11463889048286618, + "learning_rate": 0.0007555355221446066, + "loss": 1.6618, + "step": 3851 + }, + { + "epoch": 0.34948285247686445, + "grad_norm": 0.10718046372996473, + "learning_rate": 0.0007554092217228489, + "loss": 1.6697, + "step": 3852 + }, + { + "epoch": 0.3495735801125023, + "grad_norm": 0.10411205664448174, + "learning_rate": 0.0007552828992464674, + "loss": 1.5965, + "step": 3853 + }, + { + "epoch": 0.34966430774814006, + "grad_norm": 0.10552422710141234, + "learning_rate": 0.0007551565547263694, + "loss": 1.6032, + "step": 3854 + }, + { + "epoch": 0.3497550353837779, + "grad_norm": 0.10778175851261963, + "learning_rate": 0.0007550301881734658, + "loss": 1.6736, + "step": 3855 + }, + { + "epoch": 0.3498457630194157, + "grad_norm": 0.10745121157142858, + "learning_rate": 0.0007549037995986677, + "loss": 1.653, + "step": 3856 + }, + { + "epoch": 0.34993649065505356, + "grad_norm": 0.10926309735742185, + "learning_rate": 0.000754777389012889, + "loss": 1.5957, + "step": 3857 + }, + { + "epoch": 0.35002721829069133, + "grad_norm": 0.10844215605887954, + "learning_rate": 0.000754650956427045, + "loss": 1.653, + "step": 3858 + }, + { + "epoch": 0.35011794592632917, + "grad_norm": 0.10392847585741163, + "learning_rate": 0.0007545245018520535, + "loss": 1.6423, + "step": 3859 + }, + { + "epoch": 0.350208673561967, + "grad_norm": 0.10471321647119386, + "learning_rate": 0.0007543980252988339, + "loss": 1.6086, + "step": 3860 + }, + { + "epoch": 0.3502994011976048, + "grad_norm": 0.10508056155462073, + "learning_rate": 0.0007542715267783072, + "loss": 1.5991, + "step": 3861 + }, + { + "epoch": 0.3503901288332426, + "grad_norm": 0.11028529682226268, + "learning_rate": 0.0007541450063013966, + "loss": 1.6607, + "step": 3862 + }, + { + "epoch": 0.35048085646888044, + "grad_norm": 0.10797990410482884, + "learning_rate": 0.0007540184638790275, + "loss": 1.6364, + "step": 3863 + }, + { + "epoch": 0.3505715841045182, + "grad_norm": 0.10217226642894736, + "learning_rate": 0.0007538918995221263, + "loss": 1.621, + "step": 3864 + }, + { + "epoch": 0.35066231174015605, + "grad_norm": 0.1085034897744128, + "learning_rate": 0.0007537653132416223, + "loss": 1.6404, + "step": 3865 + }, + { + "epoch": 0.3507530393757939, + "grad_norm": 0.10226536039982344, + "learning_rate": 0.0007536387050484461, + "loss": 1.6584, + "step": 3866 + }, + { + "epoch": 0.35084376701143166, + "grad_norm": 0.10707308173579541, + "learning_rate": 0.0007535120749535304, + "loss": 1.6399, + "step": 3867 + }, + { + "epoch": 0.3509344946470695, + "grad_norm": 0.10521807368654315, + "learning_rate": 0.0007533854229678096, + "loss": 1.6235, + "step": 3868 + }, + { + "epoch": 0.3510252222827073, + "grad_norm": 0.10825689760969824, + "learning_rate": 0.0007532587491022203, + "loss": 1.6245, + "step": 3869 + }, + { + "epoch": 0.3511159499183451, + "grad_norm": 0.1042346960674158, + "learning_rate": 0.0007531320533677004, + "loss": 1.6216, + "step": 3870 + }, + { + "epoch": 0.35120667755398294, + "grad_norm": 0.10545622180640643, + "learning_rate": 0.0007530053357751906, + "loss": 1.6153, + "step": 3871 + }, + { + "epoch": 0.35129740518962077, + "grad_norm": 0.10410284660562666, + "learning_rate": 0.0007528785963356326, + "loss": 1.6166, + "step": 3872 + }, + { + "epoch": 0.35138813282525855, + "grad_norm": 0.10429969422212275, + "learning_rate": 0.0007527518350599708, + "loss": 1.5869, + "step": 3873 + }, + { + "epoch": 0.3514788604608964, + "grad_norm": 0.10597430909361166, + "learning_rate": 0.0007526250519591506, + "loss": 1.595, + "step": 3874 + }, + { + "epoch": 0.3515695880965342, + "grad_norm": 0.1100373316104339, + "learning_rate": 0.00075249824704412, + "loss": 1.6719, + "step": 3875 + }, + { + "epoch": 0.35166031573217205, + "grad_norm": 0.10640860167975986, + "learning_rate": 0.0007523714203258286, + "loss": 1.6519, + "step": 3876 + }, + { + "epoch": 0.3517510433678098, + "grad_norm": 0.10840037616010326, + "learning_rate": 0.0007522445718152278, + "loss": 1.5816, + "step": 3877 + }, + { + "epoch": 0.35184177100344766, + "grad_norm": 0.10732151624171521, + "learning_rate": 0.000752117701523271, + "loss": 1.6586, + "step": 3878 + }, + { + "epoch": 0.3519324986390855, + "grad_norm": 0.10904764344543628, + "learning_rate": 0.0007519908094609135, + "loss": 1.6429, + "step": 3879 + }, + { + "epoch": 0.35202322627472327, + "grad_norm": 0.10733386558172475, + "learning_rate": 0.0007518638956391126, + "loss": 1.6103, + "step": 3880 + }, + { + "epoch": 0.3521139539103611, + "grad_norm": 0.10357294324927062, + "learning_rate": 0.0007517369600688271, + "loss": 1.6158, + "step": 3881 + }, + { + "epoch": 0.35220468154599893, + "grad_norm": 0.10673053531499799, + "learning_rate": 0.000751610002761018, + "loss": 1.6591, + "step": 3882 + }, + { + "epoch": 0.3522954091816367, + "grad_norm": 0.10667885301195262, + "learning_rate": 0.000751483023726648, + "loss": 1.6291, + "step": 3883 + }, + { + "epoch": 0.35238613681727454, + "grad_norm": 0.1108375273287016, + "learning_rate": 0.0007513560229766819, + "loss": 1.629, + "step": 3884 + }, + { + "epoch": 0.3524768644529124, + "grad_norm": 0.10908899927971481, + "learning_rate": 0.0007512290005220861, + "loss": 1.6406, + "step": 3885 + }, + { + "epoch": 0.35256759208855015, + "grad_norm": 0.10572825678831539, + "learning_rate": 0.0007511019563738293, + "loss": 1.633, + "step": 3886 + }, + { + "epoch": 0.352658319724188, + "grad_norm": 0.10623665384968818, + "learning_rate": 0.0007509748905428815, + "loss": 1.6323, + "step": 3887 + }, + { + "epoch": 0.3527490473598258, + "grad_norm": 0.1041650573868312, + "learning_rate": 0.0007508478030402147, + "loss": 1.5913, + "step": 3888 + }, + { + "epoch": 0.3528397749954636, + "grad_norm": 0.10876639183719405, + "learning_rate": 0.0007507206938768032, + "loss": 1.6179, + "step": 3889 + }, + { + "epoch": 0.35293050263110143, + "grad_norm": 0.10884327203859391, + "learning_rate": 0.0007505935630636229, + "loss": 1.6434, + "step": 3890 + }, + { + "epoch": 0.35302123026673926, + "grad_norm": 0.11097147091561359, + "learning_rate": 0.0007504664106116515, + "loss": 1.6448, + "step": 3891 + }, + { + "epoch": 0.35311195790237704, + "grad_norm": 0.10769795111156769, + "learning_rate": 0.0007503392365318688, + "loss": 1.6618, + "step": 3892 + }, + { + "epoch": 0.35320268553801487, + "grad_norm": 0.10593339356580186, + "learning_rate": 0.0007502120408352557, + "loss": 1.599, + "step": 3893 + }, + { + "epoch": 0.3532934131736527, + "grad_norm": 0.11247529764076503, + "learning_rate": 0.0007500848235327964, + "loss": 1.6374, + "step": 3894 + }, + { + "epoch": 0.35338414080929054, + "grad_norm": 0.11047115741347582, + "learning_rate": 0.0007499575846354755, + "loss": 1.6112, + "step": 3895 + }, + { + "epoch": 0.3534748684449283, + "grad_norm": 0.11179683305761531, + "learning_rate": 0.0007498303241542805, + "loss": 1.6432, + "step": 3896 + }, + { + "epoch": 0.35356559608056615, + "grad_norm": 0.11097529243229817, + "learning_rate": 0.0007497030421002001, + "loss": 1.6193, + "step": 3897 + }, + { + "epoch": 0.353656323716204, + "grad_norm": 0.1083160153298816, + "learning_rate": 0.0007495757384842251, + "loss": 1.6509, + "step": 3898 + }, + { + "epoch": 0.35374705135184176, + "grad_norm": 0.11163923219072093, + "learning_rate": 0.0007494484133173484, + "loss": 1.6251, + "step": 3899 + }, + { + "epoch": 0.3538377789874796, + "grad_norm": 0.11128128108094339, + "learning_rate": 0.0007493210666105646, + "loss": 1.6253, + "step": 3900 + }, + { + "epoch": 0.3539285066231174, + "grad_norm": 0.10771660517291524, + "learning_rate": 0.0007491936983748698, + "loss": 1.6266, + "step": 3901 + }, + { + "epoch": 0.3540192342587552, + "grad_norm": 0.10293354497821613, + "learning_rate": 0.0007490663086212624, + "loss": 1.5928, + "step": 3902 + }, + { + "epoch": 0.35410996189439303, + "grad_norm": 0.10829607878991304, + "learning_rate": 0.0007489388973607425, + "loss": 1.6321, + "step": 3903 + }, + { + "epoch": 0.35420068953003087, + "grad_norm": 0.10927629443256323, + "learning_rate": 0.0007488114646043121, + "loss": 1.6485, + "step": 3904 + }, + { + "epoch": 0.35429141716566864, + "grad_norm": 0.10830916798751586, + "learning_rate": 0.0007486840103629751, + "loss": 1.5818, + "step": 3905 + }, + { + "epoch": 0.3543821448013065, + "grad_norm": 0.10649938893150925, + "learning_rate": 0.0007485565346477374, + "loss": 1.5931, + "step": 3906 + }, + { + "epoch": 0.3544728724369443, + "grad_norm": 0.11110699531648903, + "learning_rate": 0.0007484290374696061, + "loss": 1.6183, + "step": 3907 + }, + { + "epoch": 0.3545636000725821, + "grad_norm": 0.10601392047603829, + "learning_rate": 0.0007483015188395907, + "loss": 1.6232, + "step": 3908 + }, + { + "epoch": 0.3546543277082199, + "grad_norm": 0.10614580234533828, + "learning_rate": 0.0007481739787687028, + "loss": 1.6172, + "step": 3909 + }, + { + "epoch": 0.35474505534385775, + "grad_norm": 0.10547014320222453, + "learning_rate": 0.0007480464172679549, + "loss": 1.6163, + "step": 3910 + }, + { + "epoch": 0.35483578297949553, + "grad_norm": 0.10623670208213516, + "learning_rate": 0.0007479188343483626, + "loss": 1.5791, + "step": 3911 + }, + { + "epoch": 0.35492651061513336, + "grad_norm": 0.10685579276089947, + "learning_rate": 0.0007477912300209424, + "loss": 1.6386, + "step": 3912 + }, + { + "epoch": 0.3550172382507712, + "grad_norm": 0.10916819670198127, + "learning_rate": 0.0007476636042967128, + "loss": 1.6443, + "step": 3913 + }, + { + "epoch": 0.355107965886409, + "grad_norm": 0.10695471945917288, + "learning_rate": 0.0007475359571866946, + "loss": 1.6606, + "step": 3914 + }, + { + "epoch": 0.3551986935220468, + "grad_norm": 0.11372006721630169, + "learning_rate": 0.00074740828870191, + "loss": 1.6452, + "step": 3915 + }, + { + "epoch": 0.35528942115768464, + "grad_norm": 0.10774860163759438, + "learning_rate": 0.0007472805988533831, + "loss": 1.5919, + "step": 3916 + }, + { + "epoch": 0.35538014879332247, + "grad_norm": 0.10624380209759919, + "learning_rate": 0.0007471528876521402, + "loss": 1.6536, + "step": 3917 + }, + { + "epoch": 0.35547087642896025, + "grad_norm": 0.11781984268364554, + "learning_rate": 0.0007470251551092089, + "loss": 1.5819, + "step": 3918 + }, + { + "epoch": 0.3555616040645981, + "grad_norm": 0.10760042772135568, + "learning_rate": 0.0007468974012356192, + "loss": 1.5868, + "step": 3919 + }, + { + "epoch": 0.3556523317002359, + "grad_norm": 0.10678904913532891, + "learning_rate": 0.0007467696260424024, + "loss": 1.5939, + "step": 3920 + }, + { + "epoch": 0.3557430593358737, + "grad_norm": 0.10948937245356666, + "learning_rate": 0.000746641829540592, + "loss": 1.6388, + "step": 3921 + }, + { + "epoch": 0.3558337869715115, + "grad_norm": 0.11158379156050281, + "learning_rate": 0.0007465140117412233, + "loss": 1.6306, + "step": 3922 + }, + { + "epoch": 0.35592451460714936, + "grad_norm": 0.10516255707592327, + "learning_rate": 0.0007463861726553334, + "loss": 1.5872, + "step": 3923 + }, + { + "epoch": 0.35601524224278713, + "grad_norm": 0.10834444587708149, + "learning_rate": 0.0007462583122939612, + "loss": 1.6718, + "step": 3924 + }, + { + "epoch": 0.35610596987842497, + "grad_norm": 0.10954736406516333, + "learning_rate": 0.0007461304306681472, + "loss": 1.6129, + "step": 3925 + }, + { + "epoch": 0.3561966975140628, + "grad_norm": 0.10532738751238525, + "learning_rate": 0.0007460025277889345, + "loss": 1.6066, + "step": 3926 + }, + { + "epoch": 0.3562874251497006, + "grad_norm": 0.10262189992650457, + "learning_rate": 0.0007458746036673672, + "loss": 1.5782, + "step": 3927 + }, + { + "epoch": 0.3563781527853384, + "grad_norm": 0.10449229062812393, + "learning_rate": 0.0007457466583144915, + "loss": 1.6386, + "step": 3928 + }, + { + "epoch": 0.35646888042097624, + "grad_norm": 0.10767416355552702, + "learning_rate": 0.0007456186917413559, + "loss": 1.61, + "step": 3929 + }, + { + "epoch": 0.356559608056614, + "grad_norm": 0.1084158812009966, + "learning_rate": 0.0007454907039590098, + "loss": 1.6308, + "step": 3930 + }, + { + "epoch": 0.35665033569225185, + "grad_norm": 0.10774637384717752, + "learning_rate": 0.0007453626949785055, + "loss": 1.6169, + "step": 3931 + }, + { + "epoch": 0.3567410633278897, + "grad_norm": 0.10892902502449267, + "learning_rate": 0.0007452346648108961, + "loss": 1.6627, + "step": 3932 + }, + { + "epoch": 0.3568317909635275, + "grad_norm": 0.10741127388279223, + "learning_rate": 0.0007451066134672373, + "loss": 1.5866, + "step": 3933 + }, + { + "epoch": 0.3569225185991653, + "grad_norm": 0.10335155164789997, + "learning_rate": 0.0007449785409585863, + "loss": 1.6203, + "step": 3934 + }, + { + "epoch": 0.3570132462348031, + "grad_norm": 0.10780969843119982, + "learning_rate": 0.0007448504472960022, + "loss": 1.6957, + "step": 3935 + }, + { + "epoch": 0.35710397387044096, + "grad_norm": 0.10213993824005756, + "learning_rate": 0.0007447223324905459, + "loss": 1.6118, + "step": 3936 + }, + { + "epoch": 0.35719470150607874, + "grad_norm": 0.10833886787889549, + "learning_rate": 0.0007445941965532801, + "loss": 1.6232, + "step": 3937 + }, + { + "epoch": 0.35728542914171657, + "grad_norm": 0.10579767721374303, + "learning_rate": 0.0007444660394952694, + "loss": 1.6507, + "step": 3938 + }, + { + "epoch": 0.3573761567773544, + "grad_norm": 0.10879986710937305, + "learning_rate": 0.0007443378613275803, + "loss": 1.6335, + "step": 3939 + }, + { + "epoch": 0.3574668844129922, + "grad_norm": 0.10636495622567539, + "learning_rate": 0.0007442096620612806, + "loss": 1.5972, + "step": 3940 + }, + { + "epoch": 0.35755761204863, + "grad_norm": 0.10597536849116701, + "learning_rate": 0.0007440814417074408, + "loss": 1.6278, + "step": 3941 + }, + { + "epoch": 0.35764833968426785, + "grad_norm": 0.11225515746420973, + "learning_rate": 0.0007439532002771324, + "loss": 1.6356, + "step": 3942 + }, + { + "epoch": 0.3577390673199056, + "grad_norm": 0.10753376532297604, + "learning_rate": 0.0007438249377814293, + "loss": 1.6424, + "step": 3943 + }, + { + "epoch": 0.35782979495554346, + "grad_norm": 0.1100461507884274, + "learning_rate": 0.0007436966542314068, + "loss": 1.5937, + "step": 3944 + }, + { + "epoch": 0.3579205225911813, + "grad_norm": 0.10666693459282596, + "learning_rate": 0.0007435683496381422, + "loss": 1.599, + "step": 3945 + }, + { + "epoch": 0.35801125022681907, + "grad_norm": 0.10448935911078297, + "learning_rate": 0.0007434400240127149, + "loss": 1.6232, + "step": 3946 + }, + { + "epoch": 0.3581019778624569, + "grad_norm": 0.10439745523140503, + "learning_rate": 0.0007433116773662056, + "loss": 1.6327, + "step": 3947 + }, + { + "epoch": 0.35819270549809473, + "grad_norm": 0.10587292993767125, + "learning_rate": 0.000743183309709697, + "loss": 1.6037, + "step": 3948 + }, + { + "epoch": 0.3582834331337325, + "grad_norm": 0.10466135279732762, + "learning_rate": 0.0007430549210542739, + "loss": 1.6071, + "step": 3949 + }, + { + "epoch": 0.35837416076937034, + "grad_norm": 0.11039632796224585, + "learning_rate": 0.0007429265114110225, + "loss": 1.6036, + "step": 3950 + }, + { + "epoch": 0.3584648884050082, + "grad_norm": 0.11100692872376074, + "learning_rate": 0.000742798080791031, + "loss": 1.6382, + "step": 3951 + }, + { + "epoch": 0.358555616040646, + "grad_norm": 0.10958438806572476, + "learning_rate": 0.0007426696292053893, + "loss": 1.6367, + "step": 3952 + }, + { + "epoch": 0.3586463436762838, + "grad_norm": 0.10579107379158322, + "learning_rate": 0.0007425411566651897, + "loss": 1.6472, + "step": 3953 + }, + { + "epoch": 0.3587370713119216, + "grad_norm": 0.10302177405897661, + "learning_rate": 0.0007424126631815253, + "loss": 1.6559, + "step": 3954 + }, + { + "epoch": 0.35882779894755945, + "grad_norm": 0.10683736419615521, + "learning_rate": 0.0007422841487654916, + "loss": 1.6042, + "step": 3955 + }, + { + "epoch": 0.35891852658319723, + "grad_norm": 0.1132521329355465, + "learning_rate": 0.000742155613428186, + "loss": 1.5849, + "step": 3956 + }, + { + "epoch": 0.35900925421883506, + "grad_norm": 0.10442320337764506, + "learning_rate": 0.0007420270571807076, + "loss": 1.5958, + "step": 3957 + }, + { + "epoch": 0.3590999818544729, + "grad_norm": 0.10665422191087574, + "learning_rate": 0.0007418984800341571, + "loss": 1.6557, + "step": 3958 + }, + { + "epoch": 0.35919070949011067, + "grad_norm": 0.10697678345395405, + "learning_rate": 0.0007417698819996371, + "loss": 1.6631, + "step": 3959 + }, + { + "epoch": 0.3592814371257485, + "grad_norm": 0.10536269862362041, + "learning_rate": 0.0007416412630882523, + "loss": 1.5844, + "step": 3960 + }, + { + "epoch": 0.35937216476138634, + "grad_norm": 0.10622979549174472, + "learning_rate": 0.0007415126233111087, + "loss": 1.6291, + "step": 3961 + }, + { + "epoch": 0.3594628923970241, + "grad_norm": 0.10760797205868283, + "learning_rate": 0.0007413839626793145, + "loss": 1.5937, + "step": 3962 + }, + { + "epoch": 0.35955362003266195, + "grad_norm": 0.10294724053413559, + "learning_rate": 0.0007412552812039797, + "loss": 1.6376, + "step": 3963 + }, + { + "epoch": 0.3596443476682998, + "grad_norm": 0.1021289184327748, + "learning_rate": 0.0007411265788962156, + "loss": 1.5994, + "step": 3964 + }, + { + "epoch": 0.35973507530393756, + "grad_norm": 0.10451736575330182, + "learning_rate": 0.0007409978557671359, + "loss": 1.6123, + "step": 3965 + }, + { + "epoch": 0.3598258029395754, + "grad_norm": 0.10581325794724267, + "learning_rate": 0.000740869111827856, + "loss": 1.6091, + "step": 3966 + }, + { + "epoch": 0.3599165305752132, + "grad_norm": 0.10426801571023836, + "learning_rate": 0.0007407403470894926, + "loss": 1.6328, + "step": 3967 + }, + { + "epoch": 0.360007258210851, + "grad_norm": 0.10647533485390552, + "learning_rate": 0.0007406115615631649, + "loss": 1.601, + "step": 3968 + }, + { + "epoch": 0.36009798584648883, + "grad_norm": 0.10551330925291286, + "learning_rate": 0.0007404827552599933, + "loss": 1.6244, + "step": 3969 + }, + { + "epoch": 0.36018871348212667, + "grad_norm": 0.10725992741470163, + "learning_rate": 0.0007403539281911003, + "loss": 1.5985, + "step": 3970 + }, + { + "epoch": 0.3602794411177645, + "grad_norm": 0.10714960615282355, + "learning_rate": 0.0007402250803676103, + "loss": 1.6225, + "step": 3971 + }, + { + "epoch": 0.3603701687534023, + "grad_norm": 0.10476682622129083, + "learning_rate": 0.0007400962118006492, + "loss": 1.59, + "step": 3972 + }, + { + "epoch": 0.3604608963890401, + "grad_norm": 0.10639147772975167, + "learning_rate": 0.0007399673225013448, + "loss": 1.6364, + "step": 3973 + }, + { + "epoch": 0.36055162402467794, + "grad_norm": 0.1117247366554156, + "learning_rate": 0.0007398384124808267, + "loss": 1.6203, + "step": 3974 + }, + { + "epoch": 0.3606423516603157, + "grad_norm": 0.10580884574846544, + "learning_rate": 0.0007397094817502263, + "loss": 1.6125, + "step": 3975 + }, + { + "epoch": 0.36073307929595355, + "grad_norm": 0.10473098304866438, + "learning_rate": 0.0007395805303206768, + "loss": 1.644, + "step": 3976 + }, + { + "epoch": 0.3608238069315914, + "grad_norm": 0.10515889216081833, + "learning_rate": 0.0007394515582033132, + "loss": 1.6164, + "step": 3977 + }, + { + "epoch": 0.36091453456722916, + "grad_norm": 0.10939855607081067, + "learning_rate": 0.0007393225654092724, + "loss": 1.6013, + "step": 3978 + }, + { + "epoch": 0.361005262202867, + "grad_norm": 0.11477259225876174, + "learning_rate": 0.0007391935519496926, + "loss": 1.6323, + "step": 3979 + }, + { + "epoch": 0.3610959898385048, + "grad_norm": 0.11126007298941609, + "learning_rate": 0.0007390645178357146, + "loss": 1.6326, + "step": 3980 + }, + { + "epoch": 0.3611867174741426, + "grad_norm": 0.11050593536730455, + "learning_rate": 0.00073893546307848, + "loss": 1.6197, + "step": 3981 + }, + { + "epoch": 0.36127744510978044, + "grad_norm": 0.1038555303717986, + "learning_rate": 0.0007388063876891331, + "loss": 1.6609, + "step": 3982 + }, + { + "epoch": 0.36136817274541827, + "grad_norm": 0.1068068357578983, + "learning_rate": 0.0007386772916788193, + "loss": 1.6278, + "step": 3983 + }, + { + "epoch": 0.36145890038105605, + "grad_norm": 0.10913874913592797, + "learning_rate": 0.0007385481750586863, + "loss": 1.6572, + "step": 3984 + }, + { + "epoch": 0.3615496280166939, + "grad_norm": 0.10533146975245165, + "learning_rate": 0.0007384190378398833, + "loss": 1.6167, + "step": 3985 + }, + { + "epoch": 0.3616403556523317, + "grad_norm": 0.10271168326644248, + "learning_rate": 0.0007382898800335612, + "loss": 1.6349, + "step": 3986 + }, + { + "epoch": 0.3617310832879695, + "grad_norm": 0.10612379113957258, + "learning_rate": 0.0007381607016508727, + "loss": 1.6421, + "step": 3987 + }, + { + "epoch": 0.3618218109236073, + "grad_norm": 0.10791757425281011, + "learning_rate": 0.0007380315027029725, + "loss": 1.6501, + "step": 3988 + }, + { + "epoch": 0.36191253855924516, + "grad_norm": 0.10094192187320628, + "learning_rate": 0.000737902283201017, + "loss": 1.6149, + "step": 3989 + }, + { + "epoch": 0.362003266194883, + "grad_norm": 0.10961249936435435, + "learning_rate": 0.0007377730431561645, + "loss": 1.6344, + "step": 3990 + }, + { + "epoch": 0.36209399383052077, + "grad_norm": 0.10863868836267271, + "learning_rate": 0.0007376437825795743, + "loss": 1.6302, + "step": 3991 + }, + { + "epoch": 0.3621847214661586, + "grad_norm": 0.10869419776964077, + "learning_rate": 0.0007375145014824089, + "loss": 1.6269, + "step": 3992 + }, + { + "epoch": 0.36227544910179643, + "grad_norm": 0.10792935597235792, + "learning_rate": 0.000737385199875831, + "loss": 1.6037, + "step": 3993 + }, + { + "epoch": 0.3623661767374342, + "grad_norm": 0.10733640590560684, + "learning_rate": 0.0007372558777710061, + "loss": 1.6419, + "step": 3994 + }, + { + "epoch": 0.36245690437307204, + "grad_norm": 0.11322478473948437, + "learning_rate": 0.0007371265351791012, + "loss": 1.6411, + "step": 3995 + }, + { + "epoch": 0.3625476320087099, + "grad_norm": 0.11065711108083237, + "learning_rate": 0.000736997172111285, + "loss": 1.6234, + "step": 3996 + }, + { + "epoch": 0.36263835964434765, + "grad_norm": 0.11087994481246018, + "learning_rate": 0.0007368677885787282, + "loss": 1.5981, + "step": 3997 + }, + { + "epoch": 0.3627290872799855, + "grad_norm": 0.10478239280563323, + "learning_rate": 0.0007367383845926028, + "loss": 1.6595, + "step": 3998 + }, + { + "epoch": 0.3628198149156233, + "grad_norm": 0.10483293079859671, + "learning_rate": 0.0007366089601640831, + "loss": 1.6364, + "step": 3999 + }, + { + "epoch": 0.3629105425512611, + "grad_norm": 0.10376432730328741, + "learning_rate": 0.0007364795153043448, + "loss": 1.6286, + "step": 4000 + }, + { + "epoch": 0.36300127018689893, + "grad_norm": 0.10922433949590173, + "learning_rate": 0.0007363500500245655, + "loss": 1.6318, + "step": 4001 + }, + { + "epoch": 0.36309199782253676, + "grad_norm": 0.10375565433240248, + "learning_rate": 0.0007362205643359246, + "loss": 1.6153, + "step": 4002 + }, + { + "epoch": 0.36318272545817454, + "grad_norm": 0.1019234013517, + "learning_rate": 0.000736091058249603, + "loss": 1.5925, + "step": 4003 + }, + { + "epoch": 0.36327345309381237, + "grad_norm": 0.10603361538777775, + "learning_rate": 0.0007359615317767839, + "loss": 1.6919, + "step": 4004 + }, + { + "epoch": 0.3633641807294502, + "grad_norm": 0.10873641238150629, + "learning_rate": 0.0007358319849286517, + "loss": 1.618, + "step": 4005 + }, + { + "epoch": 0.363454908365088, + "grad_norm": 0.10348008237072388, + "learning_rate": 0.0007357024177163927, + "loss": 1.6294, + "step": 4006 + }, + { + "epoch": 0.3635456360007258, + "grad_norm": 0.11532467370078672, + "learning_rate": 0.0007355728301511955, + "loss": 1.643, + "step": 4007 + }, + { + "epoch": 0.36363636363636365, + "grad_norm": 0.11426674351136622, + "learning_rate": 0.0007354432222442494, + "loss": 1.6159, + "step": 4008 + }, + { + "epoch": 0.3637270912720015, + "grad_norm": 0.11115148021488044, + "learning_rate": 0.0007353135940067465, + "loss": 1.624, + "step": 4009 + }, + { + "epoch": 0.36381781890763926, + "grad_norm": 0.1057529599731184, + "learning_rate": 0.00073518394544988, + "loss": 1.6085, + "step": 4010 + }, + { + "epoch": 0.3639085465432771, + "grad_norm": 0.10364701999769685, + "learning_rate": 0.0007350542765848452, + "loss": 1.6376, + "step": 4011 + }, + { + "epoch": 0.3639992741789149, + "grad_norm": 0.10872522288986342, + "learning_rate": 0.000734924587422839, + "loss": 1.6382, + "step": 4012 + }, + { + "epoch": 0.3640900018145527, + "grad_norm": 0.1032229593719282, + "learning_rate": 0.0007347948779750599, + "loss": 1.6538, + "step": 4013 + }, + { + "epoch": 0.36418072945019053, + "grad_norm": 0.10638666733217945, + "learning_rate": 0.0007346651482527087, + "loss": 1.6362, + "step": 4014 + }, + { + "epoch": 0.36427145708582837, + "grad_norm": 0.10341630830521503, + "learning_rate": 0.0007345353982669871, + "loss": 1.5988, + "step": 4015 + }, + { + "epoch": 0.36436218472146614, + "grad_norm": 0.10348060415498808, + "learning_rate": 0.0007344056280290995, + "loss": 1.5823, + "step": 4016 + }, + { + "epoch": 0.364452912357104, + "grad_norm": 0.10913233758734622, + "learning_rate": 0.0007342758375502514, + "loss": 1.5887, + "step": 4017 + }, + { + "epoch": 0.3645436399927418, + "grad_norm": 0.1093146095243986, + "learning_rate": 0.00073414602684165, + "loss": 1.6365, + "step": 4018 + }, + { + "epoch": 0.3646343676283796, + "grad_norm": 0.10715760977887787, + "learning_rate": 0.0007340161959145049, + "loss": 1.6256, + "step": 4019 + }, + { + "epoch": 0.3647250952640174, + "grad_norm": 0.11131480948632319, + "learning_rate": 0.0007338863447800267, + "loss": 1.6159, + "step": 4020 + }, + { + "epoch": 0.36481582289965525, + "grad_norm": 0.11062010425296658, + "learning_rate": 0.0007337564734494281, + "loss": 1.652, + "step": 4021 + }, + { + "epoch": 0.36490655053529303, + "grad_norm": 0.10573066939701585, + "learning_rate": 0.0007336265819339236, + "loss": 1.6153, + "step": 4022 + }, + { + "epoch": 0.36499727817093086, + "grad_norm": 0.10600270161928652, + "learning_rate": 0.0007334966702447292, + "loss": 1.627, + "step": 4023 + }, + { + "epoch": 0.3650880058065687, + "grad_norm": 0.1082803450575308, + "learning_rate": 0.0007333667383930632, + "loss": 1.6078, + "step": 4024 + }, + { + "epoch": 0.36517873344220647, + "grad_norm": 0.10823140356954901, + "learning_rate": 0.0007332367863901448, + "loss": 1.6243, + "step": 4025 + }, + { + "epoch": 0.3652694610778443, + "grad_norm": 0.10620163528391623, + "learning_rate": 0.0007331068142471955, + "loss": 1.5864, + "step": 4026 + }, + { + "epoch": 0.36536018871348214, + "grad_norm": 0.10506944903838882, + "learning_rate": 0.0007329768219754383, + "loss": 1.6122, + "step": 4027 + }, + { + "epoch": 0.3654509163491199, + "grad_norm": 0.10579974662377666, + "learning_rate": 0.0007328468095860984, + "loss": 1.6018, + "step": 4028 + }, + { + "epoch": 0.36554164398475775, + "grad_norm": 0.10662270474098437, + "learning_rate": 0.0007327167770904022, + "loss": 1.6374, + "step": 4029 + }, + { + "epoch": 0.3656323716203956, + "grad_norm": 0.10993087740471646, + "learning_rate": 0.0007325867244995776, + "loss": 1.6333, + "step": 4030 + }, + { + "epoch": 0.3657230992560334, + "grad_norm": 0.10654061093961747, + "learning_rate": 0.0007324566518248555, + "loss": 1.6316, + "step": 4031 + }, + { + "epoch": 0.3658138268916712, + "grad_norm": 0.10583400801271638, + "learning_rate": 0.0007323265590774671, + "loss": 1.6105, + "step": 4032 + }, + { + "epoch": 0.365904554527309, + "grad_norm": 0.10788142996787466, + "learning_rate": 0.0007321964462686461, + "loss": 1.5633, + "step": 4033 + }, + { + "epoch": 0.36599528216294686, + "grad_norm": 0.10718506688319121, + "learning_rate": 0.0007320663134096278, + "loss": 1.6222, + "step": 4034 + }, + { + "epoch": 0.36608600979858463, + "grad_norm": 0.1088160358868827, + "learning_rate": 0.000731936160511649, + "loss": 1.5596, + "step": 4035 + }, + { + "epoch": 0.36617673743422247, + "grad_norm": 0.1103514337901857, + "learning_rate": 0.0007318059875859487, + "loss": 1.635, + "step": 4036 + }, + { + "epoch": 0.3662674650698603, + "grad_norm": 0.10986960455915651, + "learning_rate": 0.000731675794643767, + "loss": 1.6679, + "step": 4037 + }, + { + "epoch": 0.3663581927054981, + "grad_norm": 0.10481174448497549, + "learning_rate": 0.0007315455816963465, + "loss": 1.584, + "step": 4038 + }, + { + "epoch": 0.3664489203411359, + "grad_norm": 0.10386124226426655, + "learning_rate": 0.0007314153487549308, + "loss": 1.6328, + "step": 4039 + }, + { + "epoch": 0.36653964797677374, + "grad_norm": 0.1065127528245891, + "learning_rate": 0.0007312850958307656, + "loss": 1.6268, + "step": 4040 + }, + { + "epoch": 0.3666303756124115, + "grad_norm": 0.10570710531610676, + "learning_rate": 0.0007311548229350982, + "loss": 1.6505, + "step": 4041 + }, + { + "epoch": 0.36672110324804935, + "grad_norm": 0.10867229575874776, + "learning_rate": 0.000731024530079178, + "loss": 1.6376, + "step": 4042 + }, + { + "epoch": 0.3668118308836872, + "grad_norm": 0.10423998421034113, + "learning_rate": 0.0007308942172742554, + "loss": 1.6263, + "step": 4043 + }, + { + "epoch": 0.36690255851932496, + "grad_norm": 0.10912322548616038, + "learning_rate": 0.0007307638845315832, + "loss": 1.5745, + "step": 4044 + }, + { + "epoch": 0.3669932861549628, + "grad_norm": 0.10953131994269258, + "learning_rate": 0.0007306335318624155, + "loss": 1.6179, + "step": 4045 + }, + { + "epoch": 0.3670840137906006, + "grad_norm": 0.1047801017674772, + "learning_rate": 0.0007305031592780085, + "loss": 1.672, + "step": 4046 + }, + { + "epoch": 0.3671747414262384, + "grad_norm": 0.10778247997777396, + "learning_rate": 0.0007303727667896195, + "loss": 1.5568, + "step": 4047 + }, + { + "epoch": 0.36726546906187624, + "grad_norm": 0.11161447304740897, + "learning_rate": 0.0007302423544085083, + "loss": 1.634, + "step": 4048 + }, + { + "epoch": 0.36735619669751407, + "grad_norm": 0.10554176253601852, + "learning_rate": 0.0007301119221459357, + "loss": 1.6086, + "step": 4049 + }, + { + "epoch": 0.3674469243331519, + "grad_norm": 0.10428450277281104, + "learning_rate": 0.0007299814700131649, + "loss": 1.6057, + "step": 4050 + }, + { + "epoch": 0.3675376519687897, + "grad_norm": 0.10822757311504672, + "learning_rate": 0.0007298509980214602, + "loss": 1.6066, + "step": 4051 + }, + { + "epoch": 0.3676283796044275, + "grad_norm": 0.10749988706201666, + "learning_rate": 0.0007297205061820879, + "loss": 1.5806, + "step": 4052 + }, + { + "epoch": 0.36771910724006535, + "grad_norm": 0.10943594998254808, + "learning_rate": 0.0007295899945063161, + "loss": 1.6627, + "step": 4053 + }, + { + "epoch": 0.3678098348757031, + "grad_norm": 0.10620003529650084, + "learning_rate": 0.0007294594630054142, + "loss": 1.5973, + "step": 4054 + }, + { + "epoch": 0.36790056251134096, + "grad_norm": 0.10948186448328626, + "learning_rate": 0.0007293289116906541, + "loss": 1.6114, + "step": 4055 + }, + { + "epoch": 0.3679912901469788, + "grad_norm": 0.11176296488289811, + "learning_rate": 0.0007291983405733087, + "loss": 1.6449, + "step": 4056 + }, + { + "epoch": 0.36808201778261657, + "grad_norm": 0.10382356815946853, + "learning_rate": 0.0007290677496646525, + "loss": 1.6227, + "step": 4057 + }, + { + "epoch": 0.3681727454182544, + "grad_norm": 0.10339122078383342, + "learning_rate": 0.0007289371389759627, + "loss": 1.5704, + "step": 4058 + }, + { + "epoch": 0.36826347305389223, + "grad_norm": 0.10626192955211308, + "learning_rate": 0.000728806508518517, + "loss": 1.6154, + "step": 4059 + }, + { + "epoch": 0.36835420068953, + "grad_norm": 0.10971798526807457, + "learning_rate": 0.0007286758583035958, + "loss": 1.5826, + "step": 4060 + }, + { + "epoch": 0.36844492832516784, + "grad_norm": 0.10633975399650249, + "learning_rate": 0.0007285451883424803, + "loss": 1.6511, + "step": 4061 + }, + { + "epoch": 0.3685356559608057, + "grad_norm": 0.10383898526411268, + "learning_rate": 0.0007284144986464541, + "loss": 1.6168, + "step": 4062 + }, + { + "epoch": 0.36862638359644345, + "grad_norm": 0.10890650048085591, + "learning_rate": 0.0007282837892268025, + "loss": 1.6179, + "step": 4063 + }, + { + "epoch": 0.3687171112320813, + "grad_norm": 0.11104566401939839, + "learning_rate": 0.0007281530600948119, + "loss": 1.6196, + "step": 4064 + }, + { + "epoch": 0.3688078388677191, + "grad_norm": 0.10548253182402588, + "learning_rate": 0.0007280223112617709, + "loss": 1.6519, + "step": 4065 + }, + { + "epoch": 0.3688985665033569, + "grad_norm": 0.10913311072577912, + "learning_rate": 0.0007278915427389697, + "loss": 1.6256, + "step": 4066 + }, + { + "epoch": 0.36898929413899473, + "grad_norm": 0.10611732665264301, + "learning_rate": 0.0007277607545377003, + "loss": 1.6308, + "step": 4067 + }, + { + "epoch": 0.36908002177463256, + "grad_norm": 0.10650256214928149, + "learning_rate": 0.0007276299466692562, + "loss": 1.6353, + "step": 4068 + }, + { + "epoch": 0.3691707494102704, + "grad_norm": 0.10192589674840047, + "learning_rate": 0.0007274991191449325, + "loss": 1.6029, + "step": 4069 + }, + { + "epoch": 0.36926147704590817, + "grad_norm": 0.10636950563164359, + "learning_rate": 0.0007273682719760265, + "loss": 1.6304, + "step": 4070 + }, + { + "epoch": 0.369352204681546, + "grad_norm": 0.11096285422811286, + "learning_rate": 0.0007272374051738366, + "loss": 1.6407, + "step": 4071 + }, + { + "epoch": 0.36944293231718384, + "grad_norm": 0.10386460167846992, + "learning_rate": 0.0007271065187496633, + "loss": 1.6023, + "step": 4072 + }, + { + "epoch": 0.3695336599528216, + "grad_norm": 0.10628115315687095, + "learning_rate": 0.0007269756127148086, + "loss": 1.6349, + "step": 4073 + }, + { + "epoch": 0.36962438758845945, + "grad_norm": 0.10592719563860173, + "learning_rate": 0.0007268446870805764, + "loss": 1.6537, + "step": 4074 + }, + { + "epoch": 0.3697151152240973, + "grad_norm": 0.10659330000823024, + "learning_rate": 0.000726713741858272, + "loss": 1.6281, + "step": 4075 + }, + { + "epoch": 0.36980584285973506, + "grad_norm": 0.11271117594839156, + "learning_rate": 0.0007265827770592024, + "loss": 1.6117, + "step": 4076 + }, + { + "epoch": 0.3698965704953729, + "grad_norm": 0.10892930466295901, + "learning_rate": 0.0007264517926946769, + "loss": 1.6183, + "step": 4077 + }, + { + "epoch": 0.3699872981310107, + "grad_norm": 0.11138171266246784, + "learning_rate": 0.0007263207887760055, + "loss": 1.5868, + "step": 4078 + }, + { + "epoch": 0.3700780257666485, + "grad_norm": 0.11082606598656804, + "learning_rate": 0.0007261897653145006, + "loss": 1.5996, + "step": 4079 + }, + { + "epoch": 0.37016875340228633, + "grad_norm": 0.10591615239686446, + "learning_rate": 0.0007260587223214763, + "loss": 1.5691, + "step": 4080 + }, + { + "epoch": 0.37025948103792417, + "grad_norm": 0.11385723451185324, + "learning_rate": 0.0007259276598082479, + "loss": 1.644, + "step": 4081 + }, + { + "epoch": 0.37035020867356194, + "grad_norm": 0.10620862706125293, + "learning_rate": 0.0007257965777861329, + "loss": 1.633, + "step": 4082 + }, + { + "epoch": 0.3704409363091998, + "grad_norm": 0.10121330696611297, + "learning_rate": 0.0007256654762664501, + "loss": 1.646, + "step": 4083 + }, + { + "epoch": 0.3705316639448376, + "grad_norm": 0.10565888002764849, + "learning_rate": 0.00072553435526052, + "loss": 1.6339, + "step": 4084 + }, + { + "epoch": 0.3706223915804754, + "grad_norm": 0.11019018339153668, + "learning_rate": 0.0007254032147796652, + "loss": 1.6402, + "step": 4085 + }, + { + "epoch": 0.3707131192161132, + "grad_norm": 0.1031368181833324, + "learning_rate": 0.0007252720548352096, + "loss": 1.6016, + "step": 4086 + }, + { + "epoch": 0.37080384685175105, + "grad_norm": 0.10734558427136007, + "learning_rate": 0.0007251408754384789, + "loss": 1.6185, + "step": 4087 + }, + { + "epoch": 0.3708945744873889, + "grad_norm": 0.1060449314617186, + "learning_rate": 0.0007250096766008003, + "loss": 1.6626, + "step": 4088 + }, + { + "epoch": 0.37098530212302666, + "grad_norm": 0.1021302747153932, + "learning_rate": 0.000724878458333503, + "loss": 1.6476, + "step": 4089 + }, + { + "epoch": 0.3710760297586645, + "grad_norm": 0.10459712474550344, + "learning_rate": 0.0007247472206479178, + "loss": 1.617, + "step": 4090 + }, + { + "epoch": 0.3711667573943023, + "grad_norm": 0.10764603718727611, + "learning_rate": 0.0007246159635553768, + "loss": 1.658, + "step": 4091 + }, + { + "epoch": 0.3712574850299401, + "grad_norm": 0.10880503725244012, + "learning_rate": 0.0007244846870672144, + "loss": 1.658, + "step": 4092 + }, + { + "epoch": 0.37134821266557794, + "grad_norm": 0.10580711809180449, + "learning_rate": 0.0007243533911947661, + "loss": 1.6215, + "step": 4093 + }, + { + "epoch": 0.37143894030121577, + "grad_norm": 0.10705623743479623, + "learning_rate": 0.0007242220759493694, + "loss": 1.6388, + "step": 4094 + }, + { + "epoch": 0.37152966793685355, + "grad_norm": 0.1084954488374282, + "learning_rate": 0.0007240907413423634, + "loss": 1.6591, + "step": 4095 + }, + { + "epoch": 0.3716203955724914, + "grad_norm": 0.1014946670042704, + "learning_rate": 0.0007239593873850888, + "loss": 1.5927, + "step": 4096 + }, + { + "epoch": 0.3717111232081292, + "grad_norm": 0.10494985744887486, + "learning_rate": 0.0007238280140888882, + "loss": 1.5996, + "step": 4097 + }, + { + "epoch": 0.371801850843767, + "grad_norm": 0.10918142908110981, + "learning_rate": 0.0007236966214651055, + "loss": 1.6289, + "step": 4098 + }, + { + "epoch": 0.3718925784794048, + "grad_norm": 0.11663283631806354, + "learning_rate": 0.0007235652095250866, + "loss": 1.611, + "step": 4099 + }, + { + "epoch": 0.37198330611504266, + "grad_norm": 0.1054429097856415, + "learning_rate": 0.000723433778280179, + "loss": 1.6386, + "step": 4100 + }, + { + "epoch": 0.37207403375068043, + "grad_norm": 0.10585895149181283, + "learning_rate": 0.0007233023277417316, + "loss": 1.6218, + "step": 4101 + }, + { + "epoch": 0.37216476138631827, + "grad_norm": 0.10409396394256905, + "learning_rate": 0.0007231708579210954, + "loss": 1.6259, + "step": 4102 + }, + { + "epoch": 0.3722554890219561, + "grad_norm": 0.10566966059314734, + "learning_rate": 0.0007230393688296226, + "loss": 1.6368, + "step": 4103 + }, + { + "epoch": 0.3723462166575939, + "grad_norm": 0.10451143675485385, + "learning_rate": 0.0007229078604786675, + "loss": 1.562, + "step": 4104 + }, + { + "epoch": 0.3724369442932317, + "grad_norm": 0.1095158059522829, + "learning_rate": 0.0007227763328795858, + "loss": 1.6185, + "step": 4105 + }, + { + "epoch": 0.37252767192886954, + "grad_norm": 0.1065437377301357, + "learning_rate": 0.0007226447860437348, + "loss": 1.6633, + "step": 4106 + }, + { + "epoch": 0.3726183995645074, + "grad_norm": 0.11075958581284369, + "learning_rate": 0.000722513219982474, + "loss": 1.6131, + "step": 4107 + }, + { + "epoch": 0.37270912720014515, + "grad_norm": 0.1082717944168862, + "learning_rate": 0.0007223816347071635, + "loss": 1.6649, + "step": 4108 + }, + { + "epoch": 0.372799854835783, + "grad_norm": 0.10716376854385778, + "learning_rate": 0.0007222500302291663, + "loss": 1.5807, + "step": 4109 + }, + { + "epoch": 0.3728905824714208, + "grad_norm": 0.10112992771111946, + "learning_rate": 0.0007221184065598462, + "loss": 1.6004, + "step": 4110 + }, + { + "epoch": 0.3729813101070586, + "grad_norm": 0.10590635195969475, + "learning_rate": 0.0007219867637105689, + "loss": 1.6513, + "step": 4111 + }, + { + "epoch": 0.3730720377426964, + "grad_norm": 0.10700875503507547, + "learning_rate": 0.0007218551016927019, + "loss": 1.5984, + "step": 4112 + }, + { + "epoch": 0.37316276537833426, + "grad_norm": 0.10955726514773008, + "learning_rate": 0.0007217234205176141, + "loss": 1.6274, + "step": 4113 + }, + { + "epoch": 0.37325349301397204, + "grad_norm": 0.1065031519997901, + "learning_rate": 0.0007215917201966763, + "loss": 1.5972, + "step": 4114 + }, + { + "epoch": 0.37334422064960987, + "grad_norm": 0.10767761216434342, + "learning_rate": 0.0007214600007412607, + "loss": 1.6357, + "step": 4115 + }, + { + "epoch": 0.3734349482852477, + "grad_norm": 0.10589435326141144, + "learning_rate": 0.0007213282621627415, + "loss": 1.6463, + "step": 4116 + }, + { + "epoch": 0.3735256759208855, + "grad_norm": 0.10587845114229535, + "learning_rate": 0.0007211965044724943, + "loss": 1.6336, + "step": 4117 + }, + { + "epoch": 0.3736164035565233, + "grad_norm": 0.10404258260085772, + "learning_rate": 0.0007210647276818963, + "loss": 1.6336, + "step": 4118 + }, + { + "epoch": 0.37370713119216115, + "grad_norm": 0.10277924647857643, + "learning_rate": 0.0007209329318023264, + "loss": 1.6426, + "step": 4119 + }, + { + "epoch": 0.3737978588277989, + "grad_norm": 0.10267650537968043, + "learning_rate": 0.0007208011168451654, + "loss": 1.6366, + "step": 4120 + }, + { + "epoch": 0.37388858646343676, + "grad_norm": 0.10459726361075736, + "learning_rate": 0.0007206692828217952, + "loss": 1.6288, + "step": 4121 + }, + { + "epoch": 0.3739793140990746, + "grad_norm": 0.10622744399932273, + "learning_rate": 0.0007205374297436001, + "loss": 1.662, + "step": 4122 + }, + { + "epoch": 0.37407004173471237, + "grad_norm": 0.10681279337675888, + "learning_rate": 0.0007204055576219653, + "loss": 1.6229, + "step": 4123 + }, + { + "epoch": 0.3741607693703502, + "grad_norm": 0.10830060277665438, + "learning_rate": 0.0007202736664682783, + "loss": 1.6603, + "step": 4124 + }, + { + "epoch": 0.37425149700598803, + "grad_norm": 0.10199952022919287, + "learning_rate": 0.0007201417562939276, + "loss": 1.6099, + "step": 4125 + }, + { + "epoch": 0.37434222464162586, + "grad_norm": 0.10544763320991872, + "learning_rate": 0.0007200098271103039, + "loss": 1.6053, + "step": 4126 + }, + { + "epoch": 0.37443295227726364, + "grad_norm": 0.1055356618460069, + "learning_rate": 0.000719877878928799, + "loss": 1.6469, + "step": 4127 + }, + { + "epoch": 0.3745236799129015, + "grad_norm": 0.11014278248175056, + "learning_rate": 0.0007197459117608071, + "loss": 1.6575, + "step": 4128 + }, + { + "epoch": 0.3746144075485393, + "grad_norm": 0.10658137347801429, + "learning_rate": 0.0007196139256177233, + "loss": 1.598, + "step": 4129 + }, + { + "epoch": 0.3747051351841771, + "grad_norm": 0.10220194770958903, + "learning_rate": 0.0007194819205109446, + "loss": 1.649, + "step": 4130 + }, + { + "epoch": 0.3747958628198149, + "grad_norm": 0.10381985090215695, + "learning_rate": 0.0007193498964518696, + "loss": 1.6014, + "step": 4131 + }, + { + "epoch": 0.37488659045545275, + "grad_norm": 0.10312418341934596, + "learning_rate": 0.0007192178534518989, + "loss": 1.6232, + "step": 4132 + }, + { + "epoch": 0.37497731809109053, + "grad_norm": 0.10566826451628233, + "learning_rate": 0.000719085791522434, + "loss": 1.6544, + "step": 4133 + }, + { + "epoch": 0.37506804572672836, + "grad_norm": 0.10698230999510985, + "learning_rate": 0.000718953710674879, + "loss": 1.6658, + "step": 4134 + }, + { + "epoch": 0.3751587733623662, + "grad_norm": 0.10563061997470304, + "learning_rate": 0.0007188216109206385, + "loss": 1.6179, + "step": 4135 + }, + { + "epoch": 0.37524950099800397, + "grad_norm": 0.10339228634310992, + "learning_rate": 0.00071868949227112, + "loss": 1.6758, + "step": 4136 + }, + { + "epoch": 0.3753402286336418, + "grad_norm": 0.10923622024947847, + "learning_rate": 0.0007185573547377315, + "loss": 1.5762, + "step": 4137 + }, + { + "epoch": 0.37543095626927964, + "grad_norm": 0.10880342570387216, + "learning_rate": 0.000718425198331883, + "loss": 1.5883, + "step": 4138 + }, + { + "epoch": 0.3755216839049174, + "grad_norm": 0.10839246283544209, + "learning_rate": 0.0007182930230649866, + "loss": 1.6195, + "step": 4139 + }, + { + "epoch": 0.37561241154055525, + "grad_norm": 0.10813134293830913, + "learning_rate": 0.0007181608289484554, + "loss": 1.5995, + "step": 4140 + }, + { + "epoch": 0.3757031391761931, + "grad_norm": 0.10686402265190369, + "learning_rate": 0.0007180286159937046, + "loss": 1.6035, + "step": 4141 + }, + { + "epoch": 0.37579386681183086, + "grad_norm": 0.1114153317961022, + "learning_rate": 0.0007178963842121507, + "loss": 1.5775, + "step": 4142 + }, + { + "epoch": 0.3758845944474687, + "grad_norm": 0.10441832792130201, + "learning_rate": 0.0007177641336152116, + "loss": 1.6114, + "step": 4143 + }, + { + "epoch": 0.3759753220831065, + "grad_norm": 0.10675333654489969, + "learning_rate": 0.0007176318642143077, + "loss": 1.5854, + "step": 4144 + }, + { + "epoch": 0.37606604971874436, + "grad_norm": 0.10874036000082128, + "learning_rate": 0.0007174995760208603, + "loss": 1.6081, + "step": 4145 + }, + { + "epoch": 0.37615677735438213, + "grad_norm": 0.10935559735759892, + "learning_rate": 0.0007173672690462924, + "loss": 1.6196, + "step": 4146 + }, + { + "epoch": 0.37624750499001997, + "grad_norm": 0.10659201995593062, + "learning_rate": 0.0007172349433020287, + "loss": 1.6278, + "step": 4147 + }, + { + "epoch": 0.3763382326256578, + "grad_norm": 0.10897311354111057, + "learning_rate": 0.0007171025987994957, + "loss": 1.6745, + "step": 4148 + }, + { + "epoch": 0.3764289602612956, + "grad_norm": 0.10707030835843145, + "learning_rate": 0.0007169702355501213, + "loss": 1.6469, + "step": 4149 + }, + { + "epoch": 0.3765196878969334, + "grad_norm": 0.10434669011234624, + "learning_rate": 0.0007168378535653351, + "loss": 1.5944, + "step": 4150 + }, + { + "epoch": 0.37661041553257124, + "grad_norm": 0.10360389064840164, + "learning_rate": 0.0007167054528565682, + "loss": 1.6231, + "step": 4151 + }, + { + "epoch": 0.376701143168209, + "grad_norm": 0.10344722996383518, + "learning_rate": 0.0007165730334352535, + "loss": 1.6421, + "step": 4152 + }, + { + "epoch": 0.37679187080384685, + "grad_norm": 0.10451412820427079, + "learning_rate": 0.0007164405953128256, + "loss": 1.6031, + "step": 4153 + }, + { + "epoch": 0.3768825984394847, + "grad_norm": 0.10489220280187488, + "learning_rate": 0.0007163081385007201, + "loss": 1.6153, + "step": 4154 + }, + { + "epoch": 0.37697332607512246, + "grad_norm": 0.10245243261342248, + "learning_rate": 0.0007161756630103753, + "loss": 1.6053, + "step": 4155 + }, + { + "epoch": 0.3770640537107603, + "grad_norm": 0.10680928370270901, + "learning_rate": 0.0007160431688532301, + "loss": 1.6881, + "step": 4156 + }, + { + "epoch": 0.3771547813463981, + "grad_norm": 0.10459763973772426, + "learning_rate": 0.0007159106560407252, + "loss": 1.6777, + "step": 4157 + }, + { + "epoch": 0.3772455089820359, + "grad_norm": 0.11468577131010942, + "learning_rate": 0.0007157781245843035, + "loss": 1.6715, + "step": 4158 + }, + { + "epoch": 0.37733623661767374, + "grad_norm": 0.10592996541567323, + "learning_rate": 0.0007156455744954088, + "loss": 1.6095, + "step": 4159 + }, + { + "epoch": 0.37742696425331157, + "grad_norm": 0.10442838887037229, + "learning_rate": 0.0007155130057854871, + "loss": 1.6424, + "step": 4160 + }, + { + "epoch": 0.37751769188894935, + "grad_norm": 0.10824800342772684, + "learning_rate": 0.0007153804184659855, + "loss": 1.6038, + "step": 4161 + }, + { + "epoch": 0.3776084195245872, + "grad_norm": 0.11061960802017345, + "learning_rate": 0.0007152478125483531, + "loss": 1.6236, + "step": 4162 + }, + { + "epoch": 0.377699147160225, + "grad_norm": 0.10810337537241767, + "learning_rate": 0.0007151151880440403, + "loss": 1.6371, + "step": 4163 + }, + { + "epoch": 0.37778987479586285, + "grad_norm": 0.10075156104371738, + "learning_rate": 0.0007149825449644993, + "loss": 1.6054, + "step": 4164 + }, + { + "epoch": 0.3778806024315006, + "grad_norm": 0.10253229418312677, + "learning_rate": 0.0007148498833211838, + "loss": 1.6108, + "step": 4165 + }, + { + "epoch": 0.37797133006713846, + "grad_norm": 0.10525603133389622, + "learning_rate": 0.0007147172031255493, + "loss": 1.6263, + "step": 4166 + }, + { + "epoch": 0.3780620577027763, + "grad_norm": 0.11001872975648472, + "learning_rate": 0.0007145845043890528, + "loss": 1.6307, + "step": 4167 + }, + { + "epoch": 0.37815278533841407, + "grad_norm": 0.10567038258771953, + "learning_rate": 0.0007144517871231526, + "loss": 1.5872, + "step": 4168 + }, + { + "epoch": 0.3782435129740519, + "grad_norm": 0.10474665861313391, + "learning_rate": 0.0007143190513393089, + "loss": 1.6113, + "step": 4169 + }, + { + "epoch": 0.37833424060968973, + "grad_norm": 0.11014708330381719, + "learning_rate": 0.0007141862970489836, + "loss": 1.6582, + "step": 4170 + }, + { + "epoch": 0.3784249682453275, + "grad_norm": 0.10724004464552313, + "learning_rate": 0.0007140535242636399, + "loss": 1.6564, + "step": 4171 + }, + { + "epoch": 0.37851569588096534, + "grad_norm": 0.10243842153672879, + "learning_rate": 0.000713920732994743, + "loss": 1.6288, + "step": 4172 + }, + { + "epoch": 0.3786064235166032, + "grad_norm": 0.10864909983379042, + "learning_rate": 0.0007137879232537592, + "loss": 1.6376, + "step": 4173 + }, + { + "epoch": 0.37869715115224095, + "grad_norm": 0.10870964946904602, + "learning_rate": 0.0007136550950521566, + "loss": 1.5909, + "step": 4174 + }, + { + "epoch": 0.3787878787878788, + "grad_norm": 0.10560858027565552, + "learning_rate": 0.0007135222484014052, + "loss": 1.658, + "step": 4175 + }, + { + "epoch": 0.3788786064235166, + "grad_norm": 0.11011581641786457, + "learning_rate": 0.0007133893833129761, + "loss": 1.6731, + "step": 4176 + }, + { + "epoch": 0.3789693340591544, + "grad_norm": 0.1077057829658078, + "learning_rate": 0.0007132564997983423, + "loss": 1.6233, + "step": 4177 + }, + { + "epoch": 0.3790600616947922, + "grad_norm": 0.10575961895299907, + "learning_rate": 0.0007131235978689783, + "loss": 1.6116, + "step": 4178 + }, + { + "epoch": 0.37915078933043006, + "grad_norm": 0.10887499073483718, + "learning_rate": 0.0007129906775363603, + "loss": 1.6802, + "step": 4179 + }, + { + "epoch": 0.37924151696606784, + "grad_norm": 0.10346240394727114, + "learning_rate": 0.0007128577388119659, + "loss": 1.6083, + "step": 4180 + }, + { + "epoch": 0.37933224460170567, + "grad_norm": 0.10885615036774808, + "learning_rate": 0.0007127247817072743, + "loss": 1.6054, + "step": 4181 + }, + { + "epoch": 0.3794229722373435, + "grad_norm": 0.11228073045695289, + "learning_rate": 0.0007125918062337665, + "loss": 1.6205, + "step": 4182 + }, + { + "epoch": 0.37951369987298134, + "grad_norm": 0.10880904023644827, + "learning_rate": 0.0007124588124029249, + "loss": 1.6181, + "step": 4183 + }, + { + "epoch": 0.3796044275086191, + "grad_norm": 0.11072875352898091, + "learning_rate": 0.0007123258002262333, + "loss": 1.6017, + "step": 4184 + }, + { + "epoch": 0.37969515514425695, + "grad_norm": 0.10755271950056774, + "learning_rate": 0.000712192769715178, + "loss": 1.6448, + "step": 4185 + }, + { + "epoch": 0.3797858827798948, + "grad_norm": 0.1068612711957338, + "learning_rate": 0.0007120597208812453, + "loss": 1.634, + "step": 4186 + }, + { + "epoch": 0.37987661041553256, + "grad_norm": 0.1085077004068708, + "learning_rate": 0.0007119266537359249, + "loss": 1.6112, + "step": 4187 + }, + { + "epoch": 0.3799673380511704, + "grad_norm": 0.10624269468749885, + "learning_rate": 0.0007117935682907064, + "loss": 1.6347, + "step": 4188 + }, + { + "epoch": 0.3800580656868082, + "grad_norm": 0.10277634819976794, + "learning_rate": 0.0007116604645570822, + "loss": 1.64, + "step": 4189 + }, + { + "epoch": 0.380148793322446, + "grad_norm": 0.10317882369523455, + "learning_rate": 0.0007115273425465456, + "loss": 1.6053, + "step": 4190 + }, + { + "epoch": 0.38023952095808383, + "grad_norm": 0.10476439433867096, + "learning_rate": 0.0007113942022705919, + "loss": 1.6344, + "step": 4191 + }, + { + "epoch": 0.38033024859372166, + "grad_norm": 0.10324561609519853, + "learning_rate": 0.0007112610437407177, + "loss": 1.5811, + "step": 4192 + }, + { + "epoch": 0.38042097622935944, + "grad_norm": 0.10282778917870376, + "learning_rate": 0.000711127866968421, + "loss": 1.638, + "step": 4193 + }, + { + "epoch": 0.3805117038649973, + "grad_norm": 0.10513712865638974, + "learning_rate": 0.0007109946719652021, + "loss": 1.6308, + "step": 4194 + }, + { + "epoch": 0.3806024315006351, + "grad_norm": 0.10185673728896939, + "learning_rate": 0.0007108614587425622, + "loss": 1.6459, + "step": 4195 + }, + { + "epoch": 0.3806931591362729, + "grad_norm": 0.1023626819816311, + "learning_rate": 0.0007107282273120042, + "loss": 1.6238, + "step": 4196 + }, + { + "epoch": 0.3807838867719107, + "grad_norm": 0.10808903723197288, + "learning_rate": 0.0007105949776850326, + "loss": 1.6376, + "step": 4197 + }, + { + "epoch": 0.38087461440754855, + "grad_norm": 0.1069301161314213, + "learning_rate": 0.0007104617098731537, + "loss": 1.7007, + "step": 4198 + }, + { + "epoch": 0.38096534204318633, + "grad_norm": 0.10497955925028829, + "learning_rate": 0.0007103284238878751, + "loss": 1.5833, + "step": 4199 + }, + { + "epoch": 0.38105606967882416, + "grad_norm": 0.10779135872812795, + "learning_rate": 0.0007101951197407061, + "loss": 1.6002, + "step": 4200 + }, + { + "epoch": 0.381146797314462, + "grad_norm": 0.10307206827750098, + "learning_rate": 0.0007100617974431576, + "loss": 1.665, + "step": 4201 + }, + { + "epoch": 0.3812375249500998, + "grad_norm": 0.10161438324149466, + "learning_rate": 0.000709928457006742, + "loss": 1.6323, + "step": 4202 + }, + { + "epoch": 0.3813282525857376, + "grad_norm": 0.1046503655026434, + "learning_rate": 0.0007097950984429731, + "loss": 1.5809, + "step": 4203 + }, + { + "epoch": 0.38141898022137544, + "grad_norm": 0.10340839290213437, + "learning_rate": 0.0007096617217633665, + "loss": 1.6189, + "step": 4204 + }, + { + "epoch": 0.38150970785701327, + "grad_norm": 0.10611076164686577, + "learning_rate": 0.0007095283269794394, + "loss": 1.6327, + "step": 4205 + }, + { + "epoch": 0.38160043549265105, + "grad_norm": 0.10714857538012046, + "learning_rate": 0.0007093949141027103, + "loss": 1.6024, + "step": 4206 + }, + { + "epoch": 0.3816911631282889, + "grad_norm": 0.10826517059046359, + "learning_rate": 0.0007092614831446996, + "loss": 1.6263, + "step": 4207 + }, + { + "epoch": 0.3817818907639267, + "grad_norm": 0.10461238475328666, + "learning_rate": 0.000709128034116929, + "loss": 1.6435, + "step": 4208 + }, + { + "epoch": 0.3818726183995645, + "grad_norm": 0.10389576567257201, + "learning_rate": 0.0007089945670309217, + "loss": 1.5662, + "step": 4209 + }, + { + "epoch": 0.3819633460352023, + "grad_norm": 0.10098426627265744, + "learning_rate": 0.0007088610818982027, + "loss": 1.6648, + "step": 4210 + }, + { + "epoch": 0.38205407367084016, + "grad_norm": 0.11099539100223453, + "learning_rate": 0.0007087275787302987, + "loss": 1.6237, + "step": 4211 + }, + { + "epoch": 0.38214480130647793, + "grad_norm": 0.10815347270629477, + "learning_rate": 0.0007085940575387376, + "loss": 1.5937, + "step": 4212 + }, + { + "epoch": 0.38223552894211577, + "grad_norm": 0.10457661208190783, + "learning_rate": 0.0007084605183350484, + "loss": 1.6072, + "step": 4213 + }, + { + "epoch": 0.3823262565777536, + "grad_norm": 0.10413790211625047, + "learning_rate": 0.0007083269611307632, + "loss": 1.6324, + "step": 4214 + }, + { + "epoch": 0.3824169842133914, + "grad_norm": 0.10576658603090675, + "learning_rate": 0.000708193385937414, + "loss": 1.6128, + "step": 4215 + }, + { + "epoch": 0.3825077118490292, + "grad_norm": 0.10579032633581208, + "learning_rate": 0.0007080597927665352, + "loss": 1.6605, + "step": 4216 + }, + { + "epoch": 0.38259843948466704, + "grad_norm": 0.10382821077071525, + "learning_rate": 0.0007079261816296627, + "loss": 1.6317, + "step": 4217 + }, + { + "epoch": 0.3826891671203048, + "grad_norm": 0.10339007418727404, + "learning_rate": 0.0007077925525383337, + "loss": 1.6185, + "step": 4218 + }, + { + "epoch": 0.38277989475594265, + "grad_norm": 0.10697078191603766, + "learning_rate": 0.0007076589055040874, + "loss": 1.6165, + "step": 4219 + }, + { + "epoch": 0.3828706223915805, + "grad_norm": 0.10180161133453526, + "learning_rate": 0.0007075252405384638, + "loss": 1.6559, + "step": 4220 + }, + { + "epoch": 0.3829613500272183, + "grad_norm": 0.1063607049072439, + "learning_rate": 0.0007073915576530051, + "loss": 1.6643, + "step": 4221 + }, + { + "epoch": 0.3830520776628561, + "grad_norm": 0.10462637473549903, + "learning_rate": 0.0007072578568592549, + "loss": 1.6416, + "step": 4222 + }, + { + "epoch": 0.3831428052984939, + "grad_norm": 0.10293899207069288, + "learning_rate": 0.0007071241381687581, + "loss": 1.5783, + "step": 4223 + }, + { + "epoch": 0.38323353293413176, + "grad_norm": 0.1049295626887884, + "learning_rate": 0.0007069904015930617, + "loss": 1.6024, + "step": 4224 + }, + { + "epoch": 0.38332426056976954, + "grad_norm": 0.1129139507311777, + "learning_rate": 0.0007068566471437132, + "loss": 1.569, + "step": 4225 + }, + { + "epoch": 0.38341498820540737, + "grad_norm": 0.10893775474319374, + "learning_rate": 0.0007067228748322631, + "loss": 1.6421, + "step": 4226 + }, + { + "epoch": 0.3835057158410452, + "grad_norm": 0.10433778616653958, + "learning_rate": 0.0007065890846702621, + "loss": 1.6221, + "step": 4227 + }, + { + "epoch": 0.383596443476683, + "grad_norm": 0.10360468294972733, + "learning_rate": 0.0007064552766692632, + "loss": 1.639, + "step": 4228 + }, + { + "epoch": 0.3836871711123208, + "grad_norm": 0.10209957021806541, + "learning_rate": 0.0007063214508408208, + "loss": 1.5805, + "step": 4229 + }, + { + "epoch": 0.38377789874795865, + "grad_norm": 0.10731099734036445, + "learning_rate": 0.0007061876071964905, + "loss": 1.613, + "step": 4230 + }, + { + "epoch": 0.3838686263835964, + "grad_norm": 0.10659790397323465, + "learning_rate": 0.0007060537457478303, + "loss": 1.5991, + "step": 4231 + }, + { + "epoch": 0.38395935401923426, + "grad_norm": 0.10048938991958414, + "learning_rate": 0.0007059198665063985, + "loss": 1.6226, + "step": 4232 + }, + { + "epoch": 0.3840500816548721, + "grad_norm": 0.1074775844548196, + "learning_rate": 0.000705785969483756, + "loss": 1.6268, + "step": 4233 + }, + { + "epoch": 0.38414080929050987, + "grad_norm": 0.1039230761107088, + "learning_rate": 0.0007056520546914647, + "loss": 1.6175, + "step": 4234 + }, + { + "epoch": 0.3842315369261477, + "grad_norm": 0.10719378203620698, + "learning_rate": 0.0007055181221410881, + "loss": 1.6311, + "step": 4235 + }, + { + "epoch": 0.38432226456178553, + "grad_norm": 0.10460824926808789, + "learning_rate": 0.0007053841718441914, + "loss": 1.6083, + "step": 4236 + }, + { + "epoch": 0.3844129921974233, + "grad_norm": 0.10677481024595074, + "learning_rate": 0.0007052502038123412, + "loss": 1.6367, + "step": 4237 + }, + { + "epoch": 0.38450371983306114, + "grad_norm": 0.10784305784778167, + "learning_rate": 0.0007051162180571056, + "loss": 1.6141, + "step": 4238 + }, + { + "epoch": 0.384594447468699, + "grad_norm": 0.10308860638527373, + "learning_rate": 0.0007049822145900545, + "loss": 1.6253, + "step": 4239 + }, + { + "epoch": 0.3846851751043368, + "grad_norm": 0.10251046368564176, + "learning_rate": 0.0007048481934227587, + "loss": 1.6236, + "step": 4240 + }, + { + "epoch": 0.3847759027399746, + "grad_norm": 0.10248124514319308, + "learning_rate": 0.0007047141545667915, + "loss": 1.6045, + "step": 4241 + }, + { + "epoch": 0.3848666303756124, + "grad_norm": 0.10792067951551727, + "learning_rate": 0.0007045800980337267, + "loss": 1.6179, + "step": 4242 + }, + { + "epoch": 0.38495735801125025, + "grad_norm": 0.10465238310681092, + "learning_rate": 0.0007044460238351403, + "loss": 1.6153, + "step": 4243 + }, + { + "epoch": 0.385048085646888, + "grad_norm": 0.10403534545981642, + "learning_rate": 0.0007043119319826097, + "loss": 1.6073, + "step": 4244 + }, + { + "epoch": 0.38513881328252586, + "grad_norm": 0.10281822960437302, + "learning_rate": 0.0007041778224877135, + "loss": 1.5761, + "step": 4245 + }, + { + "epoch": 0.3852295409181637, + "grad_norm": 0.10625065339140556, + "learning_rate": 0.0007040436953620324, + "loss": 1.5847, + "step": 4246 + }, + { + "epoch": 0.38532026855380147, + "grad_norm": 0.102793911950067, + "learning_rate": 0.000703909550617148, + "loss": 1.5818, + "step": 4247 + }, + { + "epoch": 0.3854109961894393, + "grad_norm": 0.10759902750156215, + "learning_rate": 0.000703775388264644, + "loss": 1.6873, + "step": 4248 + }, + { + "epoch": 0.38550172382507714, + "grad_norm": 0.10162404801286881, + "learning_rate": 0.000703641208316105, + "loss": 1.6071, + "step": 4249 + }, + { + "epoch": 0.3855924514607149, + "grad_norm": 0.10656801816565611, + "learning_rate": 0.0007035070107831177, + "loss": 1.6243, + "step": 4250 + }, + { + "epoch": 0.38568317909635275, + "grad_norm": 0.09813849942950839, + "learning_rate": 0.0007033727956772702, + "loss": 1.6055, + "step": 4251 + }, + { + "epoch": 0.3857739067319906, + "grad_norm": 0.10335667427934121, + "learning_rate": 0.0007032385630101516, + "loss": 1.6334, + "step": 4252 + }, + { + "epoch": 0.38586463436762836, + "grad_norm": 0.10948983003875151, + "learning_rate": 0.0007031043127933533, + "loss": 1.6369, + "step": 4253 + }, + { + "epoch": 0.3859553620032662, + "grad_norm": 0.10653249466307457, + "learning_rate": 0.0007029700450384676, + "loss": 1.6488, + "step": 4254 + }, + { + "epoch": 0.386046089638904, + "grad_norm": 0.09868636806158507, + "learning_rate": 0.0007028357597570885, + "loss": 1.6276, + "step": 4255 + }, + { + "epoch": 0.3861368172745418, + "grad_norm": 0.10448145475222419, + "learning_rate": 0.0007027014569608117, + "loss": 1.6096, + "step": 4256 + }, + { + "epoch": 0.38622754491017963, + "grad_norm": 0.10753856766462498, + "learning_rate": 0.0007025671366612343, + "loss": 1.6211, + "step": 4257 + }, + { + "epoch": 0.38631827254581746, + "grad_norm": 0.09819072767042751, + "learning_rate": 0.0007024327988699548, + "loss": 1.5844, + "step": 4258 + }, + { + "epoch": 0.3864090001814553, + "grad_norm": 0.10494630410360216, + "learning_rate": 0.0007022984435985731, + "loss": 1.6132, + "step": 4259 + }, + { + "epoch": 0.3864997278170931, + "grad_norm": 0.10469575192360016, + "learning_rate": 0.0007021640708586911, + "loss": 1.6134, + "step": 4260 + }, + { + "epoch": 0.3865904554527309, + "grad_norm": 0.10627647255712613, + "learning_rate": 0.0007020296806619118, + "loss": 1.6063, + "step": 4261 + }, + { + "epoch": 0.38668118308836874, + "grad_norm": 0.10660461931873418, + "learning_rate": 0.0007018952730198398, + "loss": 1.6097, + "step": 4262 + }, + { + "epoch": 0.3867719107240065, + "grad_norm": 0.10478328339593933, + "learning_rate": 0.0007017608479440812, + "loss": 1.6729, + "step": 4263 + }, + { + "epoch": 0.38686263835964435, + "grad_norm": 0.11138359559001489, + "learning_rate": 0.0007016264054462435, + "loss": 1.6333, + "step": 4264 + }, + { + "epoch": 0.3869533659952822, + "grad_norm": 0.10687187221211585, + "learning_rate": 0.0007014919455379363, + "loss": 1.5998, + "step": 4265 + }, + { + "epoch": 0.38704409363091996, + "grad_norm": 0.1058091908764074, + "learning_rate": 0.0007013574682307697, + "loss": 1.6623, + "step": 4266 + }, + { + "epoch": 0.3871348212665578, + "grad_norm": 0.10570580828789668, + "learning_rate": 0.000701222973536356, + "loss": 1.6463, + "step": 4267 + }, + { + "epoch": 0.3872255489021956, + "grad_norm": 0.10624506691897638, + "learning_rate": 0.0007010884614663088, + "loss": 1.574, + "step": 4268 + }, + { + "epoch": 0.3873162765378334, + "grad_norm": 0.10440353898671012, + "learning_rate": 0.0007009539320322435, + "loss": 1.5949, + "step": 4269 + }, + { + "epoch": 0.38740700417347124, + "grad_norm": 0.10408882534692548, + "learning_rate": 0.0007008193852457767, + "loss": 1.6193, + "step": 4270 + }, + { + "epoch": 0.38749773180910907, + "grad_norm": 0.1011620401063553, + "learning_rate": 0.0007006848211185261, + "loss": 1.6604, + "step": 4271 + }, + { + "epoch": 0.38758845944474685, + "grad_norm": 0.10246862335102788, + "learning_rate": 0.0007005502396621116, + "loss": 1.6148, + "step": 4272 + }, + { + "epoch": 0.3876791870803847, + "grad_norm": 0.10179553591732526, + "learning_rate": 0.0007004156408881545, + "loss": 1.6013, + "step": 4273 + }, + { + "epoch": 0.3877699147160225, + "grad_norm": 0.10721065610363202, + "learning_rate": 0.0007002810248082773, + "loss": 1.6131, + "step": 4274 + }, + { + "epoch": 0.3878606423516603, + "grad_norm": 0.10421361876764695, + "learning_rate": 0.000700146391434104, + "loss": 1.6309, + "step": 4275 + }, + { + "epoch": 0.3879513699872981, + "grad_norm": 0.10635030032427258, + "learning_rate": 0.0007000117407772602, + "loss": 1.5716, + "step": 4276 + }, + { + "epoch": 0.38804209762293596, + "grad_norm": 0.10486661760697058, + "learning_rate": 0.0006998770728493734, + "loss": 1.5896, + "step": 4277 + }, + { + "epoch": 0.3881328252585738, + "grad_norm": 0.11489458297621623, + "learning_rate": 0.0006997423876620717, + "loss": 1.6512, + "step": 4278 + }, + { + "epoch": 0.38822355289421157, + "grad_norm": 0.11129199441002721, + "learning_rate": 0.0006996076852269853, + "loss": 1.6049, + "step": 4279 + }, + { + "epoch": 0.3883142805298494, + "grad_norm": 0.10848051961276338, + "learning_rate": 0.0006994729655557462, + "loss": 1.6385, + "step": 4280 + }, + { + "epoch": 0.38840500816548723, + "grad_norm": 0.10609428791073681, + "learning_rate": 0.0006993382286599868, + "loss": 1.6624, + "step": 4281 + }, + { + "epoch": 0.388495735801125, + "grad_norm": 0.10128519677409853, + "learning_rate": 0.000699203474551342, + "loss": 1.5866, + "step": 4282 + }, + { + "epoch": 0.38858646343676284, + "grad_norm": 0.10469462275495199, + "learning_rate": 0.0006990687032414479, + "loss": 1.6666, + "step": 4283 + }, + { + "epoch": 0.3886771910724007, + "grad_norm": 0.10186849990072934, + "learning_rate": 0.0006989339147419419, + "loss": 1.5564, + "step": 4284 + }, + { + "epoch": 0.38876791870803845, + "grad_norm": 0.10584056652314716, + "learning_rate": 0.0006987991090644632, + "loss": 1.6064, + "step": 4285 + }, + { + "epoch": 0.3888586463436763, + "grad_norm": 0.10225864316776831, + "learning_rate": 0.0006986642862206519, + "loss": 1.6285, + "step": 4286 + }, + { + "epoch": 0.3889493739793141, + "grad_norm": 0.1033140092362613, + "learning_rate": 0.0006985294462221503, + "loss": 1.5655, + "step": 4287 + }, + { + "epoch": 0.3890401016149519, + "grad_norm": 0.10350735340261917, + "learning_rate": 0.0006983945890806018, + "loss": 1.6044, + "step": 4288 + }, + { + "epoch": 0.3891308292505897, + "grad_norm": 0.1040900742618187, + "learning_rate": 0.0006982597148076511, + "loss": 1.6432, + "step": 4289 + }, + { + "epoch": 0.38922155688622756, + "grad_norm": 0.11882833287750665, + "learning_rate": 0.0006981248234149451, + "loss": 1.6139, + "step": 4290 + }, + { + "epoch": 0.38931228452186534, + "grad_norm": 0.1029258681457512, + "learning_rate": 0.0006979899149141311, + "loss": 1.5904, + "step": 4291 + }, + { + "epoch": 0.38940301215750317, + "grad_norm": 0.10370029523217336, + "learning_rate": 0.000697854989316859, + "loss": 1.6348, + "step": 4292 + }, + { + "epoch": 0.389493739793141, + "grad_norm": 0.10564991284354235, + "learning_rate": 0.0006977200466347794, + "loss": 1.6279, + "step": 4293 + }, + { + "epoch": 0.3895844674287788, + "grad_norm": 0.1059567489427403, + "learning_rate": 0.0006975850868795446, + "loss": 1.6187, + "step": 4294 + }, + { + "epoch": 0.3896751950644166, + "grad_norm": 0.10376070222434591, + "learning_rate": 0.0006974501100628083, + "loss": 1.6777, + "step": 4295 + }, + { + "epoch": 0.38976592270005445, + "grad_norm": 0.10537600317221164, + "learning_rate": 0.000697315116196226, + "loss": 1.6405, + "step": 4296 + }, + { + "epoch": 0.3898566503356923, + "grad_norm": 0.10775499191895416, + "learning_rate": 0.0006971801052914544, + "loss": 1.6369, + "step": 4297 + }, + { + "epoch": 0.38994737797133006, + "grad_norm": 0.1045298126173391, + "learning_rate": 0.0006970450773601516, + "loss": 1.6126, + "step": 4298 + }, + { + "epoch": 0.3900381056069679, + "grad_norm": 0.10344759012949575, + "learning_rate": 0.0006969100324139772, + "loss": 1.6484, + "step": 4299 + }, + { + "epoch": 0.3901288332426057, + "grad_norm": 0.1101317294907289, + "learning_rate": 0.0006967749704645926, + "loss": 1.6242, + "step": 4300 + }, + { + "epoch": 0.3902195608782435, + "grad_norm": 0.10257323263562493, + "learning_rate": 0.0006966398915236602, + "loss": 1.6526, + "step": 4301 + }, + { + "epoch": 0.39031028851388133, + "grad_norm": 0.10876102435280693, + "learning_rate": 0.0006965047956028444, + "loss": 1.6172, + "step": 4302 + }, + { + "epoch": 0.39040101614951916, + "grad_norm": 0.09910687977284666, + "learning_rate": 0.0006963696827138102, + "loss": 1.6376, + "step": 4303 + }, + { + "epoch": 0.39049174378515694, + "grad_norm": 0.10413415974211732, + "learning_rate": 0.0006962345528682254, + "loss": 1.6381, + "step": 4304 + }, + { + "epoch": 0.3905824714207948, + "grad_norm": 0.10420373091036808, + "learning_rate": 0.0006960994060777577, + "loss": 1.6496, + "step": 4305 + }, + { + "epoch": 0.3906731990564326, + "grad_norm": 0.09924068741553409, + "learning_rate": 0.0006959642423540775, + "loss": 1.5851, + "step": 4306 + }, + { + "epoch": 0.3907639266920704, + "grad_norm": 0.10434235759479335, + "learning_rate": 0.0006958290617088559, + "loss": 1.6076, + "step": 4307 + }, + { + "epoch": 0.3908546543277082, + "grad_norm": 0.10721109490632007, + "learning_rate": 0.0006956938641537663, + "loss": 1.5806, + "step": 4308 + }, + { + "epoch": 0.39094538196334605, + "grad_norm": 0.10347018357402378, + "learning_rate": 0.0006955586497004824, + "loss": 1.6201, + "step": 4309 + }, + { + "epoch": 0.3910361095989838, + "grad_norm": 0.10493285272830334, + "learning_rate": 0.0006954234183606803, + "loss": 1.6374, + "step": 4310 + }, + { + "epoch": 0.39112683723462166, + "grad_norm": 0.10755313248502267, + "learning_rate": 0.0006952881701460374, + "loss": 1.6469, + "step": 4311 + }, + { + "epoch": 0.3912175648702595, + "grad_norm": 0.10755531537858679, + "learning_rate": 0.0006951529050682322, + "loss": 1.6339, + "step": 4312 + }, + { + "epoch": 0.39130829250589727, + "grad_norm": 0.10448673895459798, + "learning_rate": 0.0006950176231389448, + "loss": 1.6163, + "step": 4313 + }, + { + "epoch": 0.3913990201415351, + "grad_norm": 0.10536687663653792, + "learning_rate": 0.0006948823243698569, + "loss": 1.6281, + "step": 4314 + }, + { + "epoch": 0.39148974777717294, + "grad_norm": 0.10468739160787077, + "learning_rate": 0.0006947470087726516, + "loss": 1.5974, + "step": 4315 + }, + { + "epoch": 0.39158047541281077, + "grad_norm": 0.1046323663172895, + "learning_rate": 0.0006946116763590133, + "loss": 1.6287, + "step": 4316 + }, + { + "epoch": 0.39167120304844855, + "grad_norm": 0.10740061575376691, + "learning_rate": 0.0006944763271406282, + "loss": 1.629, + "step": 4317 + }, + { + "epoch": 0.3917619306840864, + "grad_norm": 0.10393762292535336, + "learning_rate": 0.0006943409611291835, + "loss": 1.633, + "step": 4318 + }, + { + "epoch": 0.3918526583197242, + "grad_norm": 0.10656280770874306, + "learning_rate": 0.0006942055783363683, + "loss": 1.6226, + "step": 4319 + }, + { + "epoch": 0.391943385955362, + "grad_norm": 0.10292937083152641, + "learning_rate": 0.0006940701787738725, + "loss": 1.6693, + "step": 4320 + }, + { + "epoch": 0.3920341135909998, + "grad_norm": 0.10316372972900205, + "learning_rate": 0.0006939347624533885, + "loss": 1.6066, + "step": 4321 + }, + { + "epoch": 0.39212484122663765, + "grad_norm": 0.10363910695635278, + "learning_rate": 0.000693799329386609, + "loss": 1.6308, + "step": 4322 + }, + { + "epoch": 0.39221556886227543, + "grad_norm": 0.10237889835918675, + "learning_rate": 0.0006936638795852288, + "loss": 1.6199, + "step": 4323 + }, + { + "epoch": 0.39230629649791326, + "grad_norm": 0.10130835675930676, + "learning_rate": 0.0006935284130609443, + "loss": 1.5926, + "step": 4324 + }, + { + "epoch": 0.3923970241335511, + "grad_norm": 0.1035123675918707, + "learning_rate": 0.0006933929298254526, + "loss": 1.6083, + "step": 4325 + }, + { + "epoch": 0.3924877517691889, + "grad_norm": 0.10146762397363122, + "learning_rate": 0.0006932574298904528, + "loss": 1.5962, + "step": 4326 + }, + { + "epoch": 0.3925784794048267, + "grad_norm": 0.10696981863391432, + "learning_rate": 0.0006931219132676456, + "loss": 1.6083, + "step": 4327 + }, + { + "epoch": 0.39266920704046454, + "grad_norm": 0.10356303558228282, + "learning_rate": 0.0006929863799687327, + "loss": 1.6388, + "step": 4328 + }, + { + "epoch": 0.3927599346761023, + "grad_norm": 0.10448598023051268, + "learning_rate": 0.0006928508300054175, + "loss": 1.6156, + "step": 4329 + }, + { + "epoch": 0.39285066231174015, + "grad_norm": 0.10221516914315376, + "learning_rate": 0.0006927152633894046, + "loss": 1.5949, + "step": 4330 + }, + { + "epoch": 0.392941389947378, + "grad_norm": 0.10704809783903187, + "learning_rate": 0.0006925796801324005, + "loss": 1.6364, + "step": 4331 + }, + { + "epoch": 0.39303211758301576, + "grad_norm": 0.10093879093586776, + "learning_rate": 0.0006924440802461123, + "loss": 1.585, + "step": 4332 + }, + { + "epoch": 0.3931228452186536, + "grad_norm": 0.10223649186721591, + "learning_rate": 0.0006923084637422497, + "loss": 1.5692, + "step": 4333 + }, + { + "epoch": 0.3932135728542914, + "grad_norm": 0.10616845482802578, + "learning_rate": 0.0006921728306325227, + "loss": 1.6365, + "step": 4334 + }, + { + "epoch": 0.39330430048992926, + "grad_norm": 0.10479457068196486, + "learning_rate": 0.0006920371809286437, + "loss": 1.6424, + "step": 4335 + }, + { + "epoch": 0.39339502812556704, + "grad_norm": 0.10472842249014573, + "learning_rate": 0.0006919015146423258, + "loss": 1.6483, + "step": 4336 + }, + { + "epoch": 0.39348575576120487, + "grad_norm": 0.10261143321976009, + "learning_rate": 0.0006917658317852837, + "loss": 1.6308, + "step": 4337 + }, + { + "epoch": 0.3935764833968427, + "grad_norm": 0.10212164692328357, + "learning_rate": 0.0006916301323692338, + "loss": 1.6326, + "step": 4338 + }, + { + "epoch": 0.3936672110324805, + "grad_norm": 0.1040088919046243, + "learning_rate": 0.0006914944164058936, + "loss": 1.6438, + "step": 4339 + }, + { + "epoch": 0.3937579386681183, + "grad_norm": 0.10535647748621642, + "learning_rate": 0.0006913586839069825, + "loss": 1.6108, + "step": 4340 + }, + { + "epoch": 0.39384866630375615, + "grad_norm": 0.10811162340507742, + "learning_rate": 0.0006912229348842207, + "loss": 1.5988, + "step": 4341 + }, + { + "epoch": 0.3939393939393939, + "grad_norm": 0.10404204655221004, + "learning_rate": 0.0006910871693493304, + "loss": 1.6091, + "step": 4342 + }, + { + "epoch": 0.39403012157503176, + "grad_norm": 0.10015126518951938, + "learning_rate": 0.0006909513873140349, + "loss": 1.5742, + "step": 4343 + }, + { + "epoch": 0.3941208492106696, + "grad_norm": 0.10516509714457203, + "learning_rate": 0.0006908155887900588, + "loss": 1.6186, + "step": 4344 + }, + { + "epoch": 0.39421157684630737, + "grad_norm": 0.10641171699545826, + "learning_rate": 0.0006906797737891285, + "loss": 1.5859, + "step": 4345 + }, + { + "epoch": 0.3943023044819452, + "grad_norm": 0.10555448092675938, + "learning_rate": 0.0006905439423229718, + "loss": 1.6395, + "step": 4346 + }, + { + "epoch": 0.39439303211758303, + "grad_norm": 0.10603853975645115, + "learning_rate": 0.0006904080944033174, + "loss": 1.6077, + "step": 4347 + }, + { + "epoch": 0.3944837597532208, + "grad_norm": 0.10697788196300448, + "learning_rate": 0.0006902722300418963, + "loss": 1.5981, + "step": 4348 + }, + { + "epoch": 0.39457448738885864, + "grad_norm": 0.09977428733390542, + "learning_rate": 0.0006901363492504397, + "loss": 1.5726, + "step": 4349 + }, + { + "epoch": 0.3946652150244965, + "grad_norm": 0.11102763130031103, + "learning_rate": 0.0006900004520406814, + "loss": 1.6267, + "step": 4350 + }, + { + "epoch": 0.39475594266013425, + "grad_norm": 0.1062838272926244, + "learning_rate": 0.0006898645384243563, + "loss": 1.6172, + "step": 4351 + }, + { + "epoch": 0.3948466702957721, + "grad_norm": 0.10607860659687358, + "learning_rate": 0.0006897286084132002, + "loss": 1.6077, + "step": 4352 + }, + { + "epoch": 0.3949373979314099, + "grad_norm": 0.10422192654453873, + "learning_rate": 0.0006895926620189508, + "loss": 1.6299, + "step": 4353 + }, + { + "epoch": 0.39502812556704775, + "grad_norm": 0.10680900723120791, + "learning_rate": 0.0006894566992533469, + "loss": 1.6415, + "step": 4354 + }, + { + "epoch": 0.3951188532026855, + "grad_norm": 0.10352330832361314, + "learning_rate": 0.0006893207201281293, + "loss": 1.6027, + "step": 4355 + }, + { + "epoch": 0.39520958083832336, + "grad_norm": 0.10219710541080758, + "learning_rate": 0.0006891847246550397, + "loss": 1.5959, + "step": 4356 + }, + { + "epoch": 0.3953003084739612, + "grad_norm": 0.10238718476377394, + "learning_rate": 0.0006890487128458209, + "loss": 1.6293, + "step": 4357 + }, + { + "epoch": 0.39539103610959897, + "grad_norm": 0.10309413198419498, + "learning_rate": 0.0006889126847122182, + "loss": 1.624, + "step": 4358 + }, + { + "epoch": 0.3954817637452368, + "grad_norm": 0.10165930711358763, + "learning_rate": 0.0006887766402659773, + "loss": 1.5958, + "step": 4359 + }, + { + "epoch": 0.39557249138087464, + "grad_norm": 0.10170986158108096, + "learning_rate": 0.0006886405795188456, + "loss": 1.6072, + "step": 4360 + }, + { + "epoch": 0.3956632190165124, + "grad_norm": 0.10005857462950857, + "learning_rate": 0.0006885045024825721, + "loss": 1.6217, + "step": 4361 + }, + { + "epoch": 0.39575394665215025, + "grad_norm": 0.10052386130319627, + "learning_rate": 0.000688368409168907, + "loss": 1.6167, + "step": 4362 + }, + { + "epoch": 0.3958446742877881, + "grad_norm": 0.1071726656559452, + "learning_rate": 0.0006882322995896021, + "loss": 1.619, + "step": 4363 + }, + { + "epoch": 0.39593540192342586, + "grad_norm": 0.107548310390941, + "learning_rate": 0.0006880961737564103, + "loss": 1.6209, + "step": 4364 + }, + { + "epoch": 0.3960261295590637, + "grad_norm": 0.10940782658885896, + "learning_rate": 0.0006879600316810861, + "loss": 1.6224, + "step": 4365 + }, + { + "epoch": 0.3961168571947015, + "grad_norm": 0.10857787500947967, + "learning_rate": 0.0006878238733753856, + "loss": 1.6495, + "step": 4366 + }, + { + "epoch": 0.3962075848303393, + "grad_norm": 0.10359185390649192, + "learning_rate": 0.0006876876988510659, + "loss": 1.6199, + "step": 4367 + }, + { + "epoch": 0.39629831246597713, + "grad_norm": 0.10133457373613346, + "learning_rate": 0.0006875515081198858, + "loss": 1.6334, + "step": 4368 + }, + { + "epoch": 0.39638904010161496, + "grad_norm": 0.10474773397704965, + "learning_rate": 0.0006874153011936051, + "loss": 1.6295, + "step": 4369 + }, + { + "epoch": 0.39647976773725274, + "grad_norm": 0.10911046749716183, + "learning_rate": 0.0006872790780839858, + "loss": 1.5985, + "step": 4370 + }, + { + "epoch": 0.3965704953728906, + "grad_norm": 0.11177774528882327, + "learning_rate": 0.0006871428388027904, + "loss": 1.6372, + "step": 4371 + }, + { + "epoch": 0.3966612230085284, + "grad_norm": 0.10259971587305404, + "learning_rate": 0.0006870065833617833, + "loss": 1.6002, + "step": 4372 + }, + { + "epoch": 0.39675195064416624, + "grad_norm": 0.09955595048035128, + "learning_rate": 0.0006868703117727302, + "loss": 1.5989, + "step": 4373 + }, + { + "epoch": 0.396842678279804, + "grad_norm": 0.10550041529936774, + "learning_rate": 0.000686734024047398, + "loss": 1.6399, + "step": 4374 + }, + { + "epoch": 0.39693340591544185, + "grad_norm": 0.10578654880541762, + "learning_rate": 0.0006865977201975555, + "loss": 1.5846, + "step": 4375 + }, + { + "epoch": 0.3970241335510797, + "grad_norm": 0.10524503948360124, + "learning_rate": 0.0006864614002349722, + "loss": 1.6536, + "step": 4376 + }, + { + "epoch": 0.39711486118671746, + "grad_norm": 0.10669551355766321, + "learning_rate": 0.0006863250641714195, + "loss": 1.6271, + "step": 4377 + }, + { + "epoch": 0.3972055888223553, + "grad_norm": 0.10340954799877755, + "learning_rate": 0.0006861887120186701, + "loss": 1.6558, + "step": 4378 + }, + { + "epoch": 0.3972963164579931, + "grad_norm": 0.10059159405064706, + "learning_rate": 0.000686052343788498, + "loss": 1.6357, + "step": 4379 + }, + { + "epoch": 0.3973870440936309, + "grad_norm": 0.101177143026381, + "learning_rate": 0.0006859159594926786, + "loss": 1.6298, + "step": 4380 + }, + { + "epoch": 0.39747777172926874, + "grad_norm": 0.09808653613843958, + "learning_rate": 0.0006857795591429885, + "loss": 1.6119, + "step": 4381 + }, + { + "epoch": 0.39756849936490657, + "grad_norm": 0.10552677902473422, + "learning_rate": 0.0006856431427512065, + "loss": 1.647, + "step": 4382 + }, + { + "epoch": 0.39765922700054435, + "grad_norm": 0.10593541924882587, + "learning_rate": 0.0006855067103291114, + "loss": 1.6424, + "step": 4383 + }, + { + "epoch": 0.3977499546361822, + "grad_norm": 0.10192128341530407, + "learning_rate": 0.0006853702618884845, + "loss": 1.6436, + "step": 4384 + }, + { + "epoch": 0.39784068227182, + "grad_norm": 0.1016977550099834, + "learning_rate": 0.0006852337974411085, + "loss": 1.6264, + "step": 4385 + }, + { + "epoch": 0.3979314099074578, + "grad_norm": 0.10695372869887013, + "learning_rate": 0.0006850973169987664, + "loss": 1.6038, + "step": 4386 + }, + { + "epoch": 0.3980221375430956, + "grad_norm": 0.1062151840130557, + "learning_rate": 0.0006849608205732441, + "loss": 1.6126, + "step": 4387 + }, + { + "epoch": 0.39811286517873345, + "grad_norm": 0.10275837433564335, + "learning_rate": 0.0006848243081763273, + "loss": 1.6343, + "step": 4388 + }, + { + "epoch": 0.39820359281437123, + "grad_norm": 0.10427031807247955, + "learning_rate": 0.0006846877798198044, + "loss": 1.6082, + "step": 4389 + }, + { + "epoch": 0.39829432045000907, + "grad_norm": 0.10174543102387403, + "learning_rate": 0.0006845512355154646, + "loss": 1.6135, + "step": 4390 + }, + { + "epoch": 0.3983850480856469, + "grad_norm": 0.10942443078601266, + "learning_rate": 0.0006844146752750984, + "loss": 1.6413, + "step": 4391 + }, + { + "epoch": 0.39847577572128473, + "grad_norm": 0.10145538725005131, + "learning_rate": 0.0006842780991104977, + "loss": 1.6226, + "step": 4392 + }, + { + "epoch": 0.3985665033569225, + "grad_norm": 0.10415523072046733, + "learning_rate": 0.000684141507033456, + "loss": 1.6473, + "step": 4393 + }, + { + "epoch": 0.39865723099256034, + "grad_norm": 0.10712077124987067, + "learning_rate": 0.000684004899055768, + "loss": 1.6102, + "step": 4394 + }, + { + "epoch": 0.3987479586281982, + "grad_norm": 0.10657164050601103, + "learning_rate": 0.00068386827518923, + "loss": 1.6506, + "step": 4395 + }, + { + "epoch": 0.39883868626383595, + "grad_norm": 0.10791013265628172, + "learning_rate": 0.0006837316354456391, + "loss": 1.6379, + "step": 4396 + }, + { + "epoch": 0.3989294138994738, + "grad_norm": 0.10107400317254354, + "learning_rate": 0.0006835949798367946, + "loss": 1.6398, + "step": 4397 + }, + { + "epoch": 0.3990201415351116, + "grad_norm": 0.1115699105806856, + "learning_rate": 0.0006834583083744964, + "loss": 1.6234, + "step": 4398 + }, + { + "epoch": 0.3991108691707494, + "grad_norm": 0.1040970121017878, + "learning_rate": 0.0006833216210705463, + "loss": 1.6048, + "step": 4399 + }, + { + "epoch": 0.3992015968063872, + "grad_norm": 0.10659223980170751, + "learning_rate": 0.0006831849179367472, + "loss": 1.6046, + "step": 4400 + }, + { + "epoch": 0.39929232444202506, + "grad_norm": 0.1058130860325431, + "learning_rate": 0.0006830481989849034, + "loss": 1.6332, + "step": 4401 + }, + { + "epoch": 0.39938305207766284, + "grad_norm": 0.10696216200153642, + "learning_rate": 0.0006829114642268209, + "loss": 1.6241, + "step": 4402 + }, + { + "epoch": 0.39947377971330067, + "grad_norm": 0.1036273484412382, + "learning_rate": 0.0006827747136743061, + "loss": 1.6435, + "step": 4403 + }, + { + "epoch": 0.3995645073489385, + "grad_norm": 0.10787172686791928, + "learning_rate": 0.0006826379473391681, + "loss": 1.6113, + "step": 4404 + }, + { + "epoch": 0.3996552349845763, + "grad_norm": 0.10206873886765884, + "learning_rate": 0.0006825011652332163, + "loss": 1.6218, + "step": 4405 + }, + { + "epoch": 0.3997459626202141, + "grad_norm": 0.10241368667090392, + "learning_rate": 0.000682364367368262, + "loss": 1.6604, + "step": 4406 + }, + { + "epoch": 0.39983669025585195, + "grad_norm": 0.10088878220614506, + "learning_rate": 0.0006822275537561178, + "loss": 1.6496, + "step": 4407 + }, + { + "epoch": 0.3999274178914897, + "grad_norm": 0.10292576218392073, + "learning_rate": 0.0006820907244085973, + "loss": 1.604, + "step": 4408 + }, + { + "epoch": 0.40001814552712756, + "grad_norm": 0.10686852700442583, + "learning_rate": 0.0006819538793375161, + "loss": 1.5792, + "step": 4409 + }, + { + "epoch": 0.4001088731627654, + "grad_norm": 0.09965642877586564, + "learning_rate": 0.0006818170185546905, + "loss": 1.6194, + "step": 4410 + }, + { + "epoch": 0.4001996007984032, + "grad_norm": 0.10269409468566941, + "learning_rate": 0.0006816801420719385, + "loss": 1.631, + "step": 4411 + }, + { + "epoch": 0.400290328434041, + "grad_norm": 0.10639900024176538, + "learning_rate": 0.0006815432499010795, + "loss": 1.596, + "step": 4412 + }, + { + "epoch": 0.40038105606967883, + "grad_norm": 0.10354225580668087, + "learning_rate": 0.0006814063420539342, + "loss": 1.6139, + "step": 4413 + }, + { + "epoch": 0.40047178370531666, + "grad_norm": 0.10556870352319606, + "learning_rate": 0.0006812694185423245, + "loss": 1.6607, + "step": 4414 + }, + { + "epoch": 0.40056251134095444, + "grad_norm": 0.10645740951645741, + "learning_rate": 0.0006811324793780738, + "loss": 1.601, + "step": 4415 + }, + { + "epoch": 0.4006532389765923, + "grad_norm": 0.10174526736960007, + "learning_rate": 0.0006809955245730068, + "loss": 1.613, + "step": 4416 + }, + { + "epoch": 0.4007439666122301, + "grad_norm": 0.10274519067774446, + "learning_rate": 0.0006808585541389495, + "loss": 1.6214, + "step": 4417 + }, + { + "epoch": 0.4008346942478679, + "grad_norm": 0.10294186757203251, + "learning_rate": 0.0006807215680877293, + "loss": 1.5967, + "step": 4418 + }, + { + "epoch": 0.4009254218835057, + "grad_norm": 0.10611338497010604, + "learning_rate": 0.0006805845664311754, + "loss": 1.6496, + "step": 4419 + }, + { + "epoch": 0.40101614951914355, + "grad_norm": 0.10376263153004504, + "learning_rate": 0.0006804475491811172, + "loss": 1.6134, + "step": 4420 + }, + { + "epoch": 0.4011068771547813, + "grad_norm": 0.10336917450604313, + "learning_rate": 0.0006803105163493868, + "loss": 1.6206, + "step": 4421 + }, + { + "epoch": 0.40119760479041916, + "grad_norm": 0.10484635307067657, + "learning_rate": 0.0006801734679478166, + "loss": 1.6215, + "step": 4422 + }, + { + "epoch": 0.401288332426057, + "grad_norm": 0.1056214003992392, + "learning_rate": 0.0006800364039882408, + "loss": 1.594, + "step": 4423 + }, + { + "epoch": 0.40137906006169477, + "grad_norm": 0.10748545991797609, + "learning_rate": 0.0006798993244824952, + "loss": 1.635, + "step": 4424 + }, + { + "epoch": 0.4014697876973326, + "grad_norm": 0.10714957957882532, + "learning_rate": 0.0006797622294424164, + "loss": 1.5905, + "step": 4425 + }, + { + "epoch": 0.40156051533297044, + "grad_norm": 0.10456460726040777, + "learning_rate": 0.0006796251188798426, + "loss": 1.6339, + "step": 4426 + }, + { + "epoch": 0.4016512429686082, + "grad_norm": 0.10328968194873993, + "learning_rate": 0.0006794879928066131, + "loss": 1.5858, + "step": 4427 + }, + { + "epoch": 0.40174197060424605, + "grad_norm": 0.10596708270410557, + "learning_rate": 0.000679350851234569, + "loss": 1.6499, + "step": 4428 + }, + { + "epoch": 0.4018326982398839, + "grad_norm": 0.09995998357802094, + "learning_rate": 0.0006792136941755527, + "loss": 1.6339, + "step": 4429 + }, + { + "epoch": 0.4019234258755217, + "grad_norm": 0.10044396455264691, + "learning_rate": 0.0006790765216414073, + "loss": 1.6238, + "step": 4430 + }, + { + "epoch": 0.4020141535111595, + "grad_norm": 0.10399664473241985, + "learning_rate": 0.0006789393336439779, + "loss": 1.6319, + "step": 4431 + }, + { + "epoch": 0.4021048811467973, + "grad_norm": 0.10506638996052853, + "learning_rate": 0.0006788021301951107, + "loss": 1.6197, + "step": 4432 + }, + { + "epoch": 0.40219560878243515, + "grad_norm": 0.10112096632312224, + "learning_rate": 0.0006786649113066532, + "loss": 1.6083, + "step": 4433 + }, + { + "epoch": 0.40228633641807293, + "grad_norm": 0.10077458264237421, + "learning_rate": 0.0006785276769904542, + "loss": 1.5895, + "step": 4434 + }, + { + "epoch": 0.40237706405371076, + "grad_norm": 0.10650006728212442, + "learning_rate": 0.0006783904272583641, + "loss": 1.5723, + "step": 4435 + }, + { + "epoch": 0.4024677916893486, + "grad_norm": 0.10628275798193058, + "learning_rate": 0.0006782531621222343, + "loss": 1.6303, + "step": 4436 + }, + { + "epoch": 0.4025585193249864, + "grad_norm": 0.10728406340949925, + "learning_rate": 0.0006781158815939177, + "loss": 1.6139, + "step": 4437 + }, + { + "epoch": 0.4026492469606242, + "grad_norm": 0.10788400324889583, + "learning_rate": 0.0006779785856852683, + "loss": 1.6676, + "step": 4438 + }, + { + "epoch": 0.40273997459626204, + "grad_norm": 0.10297065151732454, + "learning_rate": 0.0006778412744081419, + "loss": 1.5962, + "step": 4439 + }, + { + "epoch": 0.4028307022318998, + "grad_norm": 0.10885112043127926, + "learning_rate": 0.0006777039477743953, + "loss": 1.6429, + "step": 4440 + }, + { + "epoch": 0.40292142986753765, + "grad_norm": 0.10431622451807636, + "learning_rate": 0.0006775666057958865, + "loss": 1.6124, + "step": 4441 + }, + { + "epoch": 0.4030121575031755, + "grad_norm": 0.100799005866515, + "learning_rate": 0.0006774292484844753, + "loss": 1.6094, + "step": 4442 + }, + { + "epoch": 0.40310288513881326, + "grad_norm": 0.10579190105036565, + "learning_rate": 0.0006772918758520221, + "loss": 1.6343, + "step": 4443 + }, + { + "epoch": 0.4031936127744511, + "grad_norm": 0.1030440086941071, + "learning_rate": 0.0006771544879103895, + "loss": 1.5904, + "step": 4444 + }, + { + "epoch": 0.4032843404100889, + "grad_norm": 0.10442242933427721, + "learning_rate": 0.0006770170846714407, + "loss": 1.6102, + "step": 4445 + }, + { + "epoch": 0.4033750680457267, + "grad_norm": 0.10170610894005042, + "learning_rate": 0.0006768796661470405, + "loss": 1.6141, + "step": 4446 + }, + { + "epoch": 0.40346579568136454, + "grad_norm": 0.09968609419254089, + "learning_rate": 0.0006767422323490551, + "loss": 1.66, + "step": 4447 + }, + { + "epoch": 0.40355652331700237, + "grad_norm": 0.10602670865708255, + "learning_rate": 0.0006766047832893519, + "loss": 1.6766, + "step": 4448 + }, + { + "epoch": 0.4036472509526402, + "grad_norm": 0.10221030103404925, + "learning_rate": 0.0006764673189797996, + "loss": 1.6495, + "step": 4449 + }, + { + "epoch": 0.403737978588278, + "grad_norm": 0.10005206501969015, + "learning_rate": 0.0006763298394322683, + "loss": 1.627, + "step": 4450 + }, + { + "epoch": 0.4038287062239158, + "grad_norm": 0.10292733770877664, + "learning_rate": 0.0006761923446586294, + "loss": 1.6145, + "step": 4451 + }, + { + "epoch": 0.40391943385955364, + "grad_norm": 0.10178456590109486, + "learning_rate": 0.0006760548346707554, + "loss": 1.6751, + "step": 4452 + }, + { + "epoch": 0.4040101614951914, + "grad_norm": 0.10254685252110168, + "learning_rate": 0.0006759173094805209, + "loss": 1.6255, + "step": 4453 + }, + { + "epoch": 0.40410088913082925, + "grad_norm": 0.1029080982107371, + "learning_rate": 0.0006757797690998004, + "loss": 1.6301, + "step": 4454 + }, + { + "epoch": 0.4041916167664671, + "grad_norm": 0.10017257589214461, + "learning_rate": 0.0006756422135404711, + "loss": 1.6071, + "step": 4455 + }, + { + "epoch": 0.40428234440210487, + "grad_norm": 0.10630973311257301, + "learning_rate": 0.0006755046428144107, + "loss": 1.6333, + "step": 4456 + }, + { + "epoch": 0.4043730720377427, + "grad_norm": 0.10354226688393083, + "learning_rate": 0.0006753670569334986, + "loss": 1.6031, + "step": 4457 + }, + { + "epoch": 0.40446379967338053, + "grad_norm": 0.1035230383103849, + "learning_rate": 0.0006752294559096152, + "loss": 1.5964, + "step": 4458 + }, + { + "epoch": 0.4045545273090183, + "grad_norm": 0.10365332967194779, + "learning_rate": 0.0006750918397546425, + "loss": 1.6225, + "step": 4459 + }, + { + "epoch": 0.40464525494465614, + "grad_norm": 0.10787890582944293, + "learning_rate": 0.0006749542084804636, + "loss": 1.6428, + "step": 4460 + }, + { + "epoch": 0.404735982580294, + "grad_norm": 0.10514540351716677, + "learning_rate": 0.0006748165620989631, + "loss": 1.583, + "step": 4461 + }, + { + "epoch": 0.40482671021593175, + "grad_norm": 0.10777584365561205, + "learning_rate": 0.0006746789006220266, + "loss": 1.6225, + "step": 4462 + }, + { + "epoch": 0.4049174378515696, + "grad_norm": 0.10662517120460284, + "learning_rate": 0.0006745412240615414, + "loss": 1.6414, + "step": 4463 + }, + { + "epoch": 0.4050081654872074, + "grad_norm": 0.10613836221051338, + "learning_rate": 0.0006744035324293957, + "loss": 1.6384, + "step": 4464 + }, + { + "epoch": 0.4050988931228452, + "grad_norm": 0.10070886142691676, + "learning_rate": 0.0006742658257374793, + "loss": 1.6277, + "step": 4465 + }, + { + "epoch": 0.405189620758483, + "grad_norm": 0.09759208251845373, + "learning_rate": 0.000674128103997683, + "loss": 1.6716, + "step": 4466 + }, + { + "epoch": 0.40528034839412086, + "grad_norm": 0.10304923098630092, + "learning_rate": 0.0006739903672218994, + "loss": 1.6637, + "step": 4467 + }, + { + "epoch": 0.40537107602975864, + "grad_norm": 0.10469040939115772, + "learning_rate": 0.0006738526154220221, + "loss": 1.6534, + "step": 4468 + }, + { + "epoch": 0.40546180366539647, + "grad_norm": 0.10325915648059078, + "learning_rate": 0.0006737148486099456, + "loss": 1.6184, + "step": 4469 + }, + { + "epoch": 0.4055525313010343, + "grad_norm": 0.1042198080898624, + "learning_rate": 0.0006735770667975665, + "loss": 1.5942, + "step": 4470 + }, + { + "epoch": 0.40564325893667214, + "grad_norm": 0.10405999176119322, + "learning_rate": 0.0006734392699967819, + "loss": 1.6345, + "step": 4471 + }, + { + "epoch": 0.4057339865723099, + "grad_norm": 0.10646017852612556, + "learning_rate": 0.0006733014582194908, + "loss": 1.5528, + "step": 4472 + }, + { + "epoch": 0.40582471420794775, + "grad_norm": 0.10533679479741645, + "learning_rate": 0.0006731636314775935, + "loss": 1.6096, + "step": 4473 + }, + { + "epoch": 0.4059154418435856, + "grad_norm": 0.10536499000048781, + "learning_rate": 0.0006730257897829908, + "loss": 1.6077, + "step": 4474 + }, + { + "epoch": 0.40600616947922336, + "grad_norm": 0.10318300539504181, + "learning_rate": 0.0006728879331475859, + "loss": 1.5956, + "step": 4475 + }, + { + "epoch": 0.4060968971148612, + "grad_norm": 0.10414342963072423, + "learning_rate": 0.0006727500615832823, + "loss": 1.5751, + "step": 4476 + }, + { + "epoch": 0.406187624750499, + "grad_norm": 0.10370697849004545, + "learning_rate": 0.0006726121751019855, + "loss": 1.6608, + "step": 4477 + }, + { + "epoch": 0.4062783523861368, + "grad_norm": 0.10490635265476608, + "learning_rate": 0.0006724742737156018, + "loss": 1.595, + "step": 4478 + }, + { + "epoch": 0.40636908002177463, + "grad_norm": 0.10564259055675596, + "learning_rate": 0.0006723363574360393, + "loss": 1.5907, + "step": 4479 + }, + { + "epoch": 0.40645980765741246, + "grad_norm": 0.10176189846382475, + "learning_rate": 0.000672198426275207, + "loss": 1.6327, + "step": 4480 + }, + { + "epoch": 0.40655053529305024, + "grad_norm": 0.10520563109149492, + "learning_rate": 0.0006720604802450151, + "loss": 1.6149, + "step": 4481 + }, + { + "epoch": 0.4066412629286881, + "grad_norm": 0.10187419535012031, + "learning_rate": 0.0006719225193573754, + "loss": 1.6386, + "step": 4482 + }, + { + "epoch": 0.4067319905643259, + "grad_norm": 0.10294588909913292, + "learning_rate": 0.0006717845436242007, + "loss": 1.599, + "step": 4483 + }, + { + "epoch": 0.4068227181999637, + "grad_norm": 0.10202328545849028, + "learning_rate": 0.0006716465530574055, + "loss": 1.6127, + "step": 4484 + }, + { + "epoch": 0.4069134458356015, + "grad_norm": 0.102308000367207, + "learning_rate": 0.0006715085476689051, + "loss": 1.5963, + "step": 4485 + }, + { + "epoch": 0.40700417347123935, + "grad_norm": 0.10451981994995503, + "learning_rate": 0.0006713705274706162, + "loss": 1.6556, + "step": 4486 + }, + { + "epoch": 0.4070949011068771, + "grad_norm": 0.10380972708172716, + "learning_rate": 0.0006712324924744572, + "loss": 1.5506, + "step": 4487 + }, + { + "epoch": 0.40718562874251496, + "grad_norm": 0.10289517322324825, + "learning_rate": 0.0006710944426923471, + "loss": 1.5986, + "step": 4488 + }, + { + "epoch": 0.4072763563781528, + "grad_norm": 0.10423245049737886, + "learning_rate": 0.0006709563781362067, + "loss": 1.6019, + "step": 4489 + }, + { + "epoch": 0.4073670840137906, + "grad_norm": 0.1010927041509333, + "learning_rate": 0.0006708182988179579, + "loss": 1.6116, + "step": 4490 + }, + { + "epoch": 0.4074578116494284, + "grad_norm": 0.10921764371466942, + "learning_rate": 0.0006706802047495238, + "loss": 1.6488, + "step": 4491 + }, + { + "epoch": 0.40754853928506624, + "grad_norm": 0.10578373137405642, + "learning_rate": 0.0006705420959428288, + "loss": 1.6218, + "step": 4492 + }, + { + "epoch": 0.40763926692070407, + "grad_norm": 0.10002444602246092, + "learning_rate": 0.0006704039724097988, + "loss": 1.6092, + "step": 4493 + }, + { + "epoch": 0.40772999455634185, + "grad_norm": 0.10345394620818778, + "learning_rate": 0.0006702658341623606, + "loss": 1.5872, + "step": 4494 + }, + { + "epoch": 0.4078207221919797, + "grad_norm": 0.103235217319018, + "learning_rate": 0.0006701276812124424, + "loss": 1.6068, + "step": 4495 + }, + { + "epoch": 0.4079114498276175, + "grad_norm": 0.1039405404231357, + "learning_rate": 0.000669989513571974, + "loss": 1.5991, + "step": 4496 + }, + { + "epoch": 0.4080021774632553, + "grad_norm": 0.10026438083956188, + "learning_rate": 0.000669851331252886, + "loss": 1.5909, + "step": 4497 + }, + { + "epoch": 0.4080929050988931, + "grad_norm": 0.1031386131874325, + "learning_rate": 0.0006697131342671104, + "loss": 1.6135, + "step": 4498 + }, + { + "epoch": 0.40818363273453095, + "grad_norm": 0.10649560569511611, + "learning_rate": 0.000669574922626581, + "loss": 1.5918, + "step": 4499 + }, + { + "epoch": 0.40827436037016873, + "grad_norm": 0.10309770871806807, + "learning_rate": 0.0006694366963432317, + "loss": 1.5891, + "step": 4500 + }, + { + "epoch": 0.40836508800580656, + "grad_norm": 0.10657327146301186, + "learning_rate": 0.0006692984554289987, + "loss": 1.6319, + "step": 4501 + }, + { + "epoch": 0.4084558156414444, + "grad_norm": 0.10271011041695363, + "learning_rate": 0.0006691601998958193, + "loss": 1.6396, + "step": 4502 + }, + { + "epoch": 0.4085465432770822, + "grad_norm": 0.10816093756594275, + "learning_rate": 0.0006690219297556316, + "loss": 1.6206, + "step": 4503 + }, + { + "epoch": 0.40863727091272, + "grad_norm": 0.10512063817600975, + "learning_rate": 0.0006688836450203754, + "loss": 1.623, + "step": 4504 + }, + { + "epoch": 0.40872799854835784, + "grad_norm": 0.10100124176209928, + "learning_rate": 0.0006687453457019916, + "loss": 1.6014, + "step": 4505 + }, + { + "epoch": 0.4088187261839956, + "grad_norm": 0.10423385948520339, + "learning_rate": 0.0006686070318124223, + "loss": 1.6391, + "step": 4506 + }, + { + "epoch": 0.40890945381963345, + "grad_norm": 0.10281787382803084, + "learning_rate": 0.000668468703363611, + "loss": 1.6503, + "step": 4507 + }, + { + "epoch": 0.4090001814552713, + "grad_norm": 0.10110003029941128, + "learning_rate": 0.0006683303603675022, + "loss": 1.6175, + "step": 4508 + }, + { + "epoch": 0.4090909090909091, + "grad_norm": 0.10188313700388624, + "learning_rate": 0.000668192002836042, + "loss": 1.604, + "step": 4509 + }, + { + "epoch": 0.4091816367265469, + "grad_norm": 0.10621130251437172, + "learning_rate": 0.0006680536307811777, + "loss": 1.5828, + "step": 4510 + }, + { + "epoch": 0.4092723643621847, + "grad_norm": 0.10646157950026341, + "learning_rate": 0.0006679152442148574, + "loss": 1.5895, + "step": 4511 + }, + { + "epoch": 0.40936309199782256, + "grad_norm": 0.10184174621318272, + "learning_rate": 0.0006677768431490312, + "loss": 1.5952, + "step": 4512 + }, + { + "epoch": 0.40945381963346034, + "grad_norm": 0.10264239888230378, + "learning_rate": 0.0006676384275956498, + "loss": 1.6286, + "step": 4513 + }, + { + "epoch": 0.40954454726909817, + "grad_norm": 0.10356425419341776, + "learning_rate": 0.0006674999975666654, + "loss": 1.642, + "step": 4514 + }, + { + "epoch": 0.409635274904736, + "grad_norm": 0.10459773021930228, + "learning_rate": 0.0006673615530740317, + "loss": 1.646, + "step": 4515 + }, + { + "epoch": 0.4097260025403738, + "grad_norm": 0.10155328451197297, + "learning_rate": 0.000667223094129703, + "loss": 1.6254, + "step": 4516 + }, + { + "epoch": 0.4098167301760116, + "grad_norm": 0.10765054168993216, + "learning_rate": 0.0006670846207456356, + "loss": 1.6108, + "step": 4517 + }, + { + "epoch": 0.40990745781164944, + "grad_norm": 0.10370869481551874, + "learning_rate": 0.0006669461329337866, + "loss": 1.6517, + "step": 4518 + }, + { + "epoch": 0.4099981854472872, + "grad_norm": 0.10335181063887797, + "learning_rate": 0.0006668076307061145, + "loss": 1.6347, + "step": 4519 + }, + { + "epoch": 0.41008891308292506, + "grad_norm": 0.10755378050698937, + "learning_rate": 0.0006666691140745787, + "loss": 1.6389, + "step": 4520 + }, + { + "epoch": 0.4101796407185629, + "grad_norm": 0.10198884624691282, + "learning_rate": 0.0006665305830511405, + "loss": 1.6036, + "step": 4521 + }, + { + "epoch": 0.41027036835420067, + "grad_norm": 0.10089385151145795, + "learning_rate": 0.0006663920376477618, + "loss": 1.6277, + "step": 4522 + }, + { + "epoch": 0.4103610959898385, + "grad_norm": 0.10922579293962117, + "learning_rate": 0.0006662534778764061, + "loss": 1.6015, + "step": 4523 + }, + { + "epoch": 0.41045182362547633, + "grad_norm": 0.10492718573194142, + "learning_rate": 0.0006661149037490383, + "loss": 1.5979, + "step": 4524 + }, + { + "epoch": 0.4105425512611141, + "grad_norm": 0.10623101117289885, + "learning_rate": 0.0006659763152776237, + "loss": 1.6015, + "step": 4525 + }, + { + "epoch": 0.41063327889675194, + "grad_norm": 0.10274751600341378, + "learning_rate": 0.0006658377124741301, + "loss": 1.6251, + "step": 4526 + }, + { + "epoch": 0.4107240065323898, + "grad_norm": 0.10488672449960162, + "learning_rate": 0.0006656990953505255, + "loss": 1.5634, + "step": 4527 + }, + { + "epoch": 0.4108147341680276, + "grad_norm": 0.10521296928792648, + "learning_rate": 0.0006655604639187796, + "loss": 1.6127, + "step": 4528 + }, + { + "epoch": 0.4109054618036654, + "grad_norm": 0.10381263022166061, + "learning_rate": 0.0006654218181908633, + "loss": 1.6129, + "step": 4529 + }, + { + "epoch": 0.4109961894393032, + "grad_norm": 0.10265262939338843, + "learning_rate": 0.0006652831581787485, + "loss": 1.651, + "step": 4530 + }, + { + "epoch": 0.41108691707494105, + "grad_norm": 0.10311043638673704, + "learning_rate": 0.0006651444838944088, + "loss": 1.6104, + "step": 4531 + }, + { + "epoch": 0.4111776447105788, + "grad_norm": 0.10360076530722986, + "learning_rate": 0.0006650057953498185, + "loss": 1.6132, + "step": 4532 + }, + { + "epoch": 0.41126837234621666, + "grad_norm": 0.10033975880219663, + "learning_rate": 0.0006648670925569534, + "loss": 1.6467, + "step": 4533 + }, + { + "epoch": 0.4113590999818545, + "grad_norm": 0.10227852464524365, + "learning_rate": 0.0006647283755277907, + "loss": 1.6041, + "step": 4534 + }, + { + "epoch": 0.41144982761749227, + "grad_norm": 0.09916461774313766, + "learning_rate": 0.0006645896442743082, + "loss": 1.6167, + "step": 4535 + }, + { + "epoch": 0.4115405552531301, + "grad_norm": 0.11182907446036668, + "learning_rate": 0.000664450898808486, + "loss": 1.5918, + "step": 4536 + }, + { + "epoch": 0.41163128288876794, + "grad_norm": 0.10411025367208662, + "learning_rate": 0.0006643121391423041, + "loss": 1.6198, + "step": 4537 + }, + { + "epoch": 0.4117220105244057, + "grad_norm": 0.10035132727816333, + "learning_rate": 0.000664173365287745, + "loss": 1.6455, + "step": 4538 + }, + { + "epoch": 0.41181273816004355, + "grad_norm": 0.10492964011849881, + "learning_rate": 0.0006640345772567917, + "loss": 1.5994, + "step": 4539 + }, + { + "epoch": 0.4119034657956814, + "grad_norm": 0.10615779369949278, + "learning_rate": 0.0006638957750614282, + "loss": 1.6489, + "step": 4540 + }, + { + "epoch": 0.41199419343131916, + "grad_norm": 0.10580625429472813, + "learning_rate": 0.0006637569587136406, + "loss": 1.6269, + "step": 4541 + }, + { + "epoch": 0.412084921066957, + "grad_norm": 0.10394127197901157, + "learning_rate": 0.0006636181282254154, + "loss": 1.5646, + "step": 4542 + }, + { + "epoch": 0.4121756487025948, + "grad_norm": 0.10318730083152587, + "learning_rate": 0.0006634792836087408, + "loss": 1.659, + "step": 4543 + }, + { + "epoch": 0.4122663763382326, + "grad_norm": 0.10213036416707169, + "learning_rate": 0.0006633404248756057, + "loss": 1.6016, + "step": 4544 + }, + { + "epoch": 0.41235710397387043, + "grad_norm": 0.10479908058589432, + "learning_rate": 0.000663201552038001, + "loss": 1.5787, + "step": 4545 + }, + { + "epoch": 0.41244783160950826, + "grad_norm": 0.10110672880696746, + "learning_rate": 0.0006630626651079184, + "loss": 1.5727, + "step": 4546 + }, + { + "epoch": 0.4125385592451461, + "grad_norm": 0.10090056247928118, + "learning_rate": 0.0006629237640973504, + "loss": 1.5522, + "step": 4547 + }, + { + "epoch": 0.4126292868807839, + "grad_norm": 0.10464360548902946, + "learning_rate": 0.0006627848490182914, + "loss": 1.6017, + "step": 4548 + }, + { + "epoch": 0.4127200145164217, + "grad_norm": 0.10634799813742862, + "learning_rate": 0.0006626459198827367, + "loss": 1.6212, + "step": 4549 + }, + { + "epoch": 0.41281074215205954, + "grad_norm": 0.10602297362140867, + "learning_rate": 0.0006625069767026829, + "loss": 1.6215, + "step": 4550 + }, + { + "epoch": 0.4129014697876973, + "grad_norm": 0.10381741618655041, + "learning_rate": 0.0006623680194901278, + "loss": 1.6021, + "step": 4551 + }, + { + "epoch": 0.41299219742333515, + "grad_norm": 0.10118977124850687, + "learning_rate": 0.0006622290482570701, + "loss": 1.6407, + "step": 4552 + }, + { + "epoch": 0.413082925058973, + "grad_norm": 0.1058631667096266, + "learning_rate": 0.0006620900630155102, + "loss": 1.5909, + "step": 4553 + }, + { + "epoch": 0.41317365269461076, + "grad_norm": 0.10524653845261332, + "learning_rate": 0.0006619510637774495, + "loss": 1.6182, + "step": 4554 + }, + { + "epoch": 0.4132643803302486, + "grad_norm": 0.1040494956915391, + "learning_rate": 0.0006618120505548906, + "loss": 1.6521, + "step": 4555 + }, + { + "epoch": 0.4133551079658864, + "grad_norm": 0.10570943102028747, + "learning_rate": 0.0006616730233598373, + "loss": 1.5881, + "step": 4556 + }, + { + "epoch": 0.4134458356015242, + "grad_norm": 0.10691878221470434, + "learning_rate": 0.0006615339822042945, + "loss": 1.6006, + "step": 4557 + }, + { + "epoch": 0.41353656323716204, + "grad_norm": 0.0999132390620876, + "learning_rate": 0.0006613949271002687, + "loss": 1.599, + "step": 4558 + }, + { + "epoch": 0.41362729087279987, + "grad_norm": 0.10467289689743121, + "learning_rate": 0.000661255858059767, + "loss": 1.5838, + "step": 4559 + }, + { + "epoch": 0.41371801850843765, + "grad_norm": 0.10160026422959006, + "learning_rate": 0.0006611167750947984, + "loss": 1.6336, + "step": 4560 + }, + { + "epoch": 0.4138087461440755, + "grad_norm": 0.10233239095134296, + "learning_rate": 0.0006609776782173723, + "loss": 1.6245, + "step": 4561 + }, + { + "epoch": 0.4138994737797133, + "grad_norm": 0.10685141978169868, + "learning_rate": 0.0006608385674395, + "loss": 1.6536, + "step": 4562 + }, + { + "epoch": 0.4139902014153511, + "grad_norm": 0.09937956942054892, + "learning_rate": 0.0006606994427731938, + "loss": 1.6127, + "step": 4563 + }, + { + "epoch": 0.4140809290509889, + "grad_norm": 0.10150477327093382, + "learning_rate": 0.0006605603042304669, + "loss": 1.6247, + "step": 4564 + }, + { + "epoch": 0.41417165668662675, + "grad_norm": 0.10450722209304493, + "learning_rate": 0.0006604211518233343, + "loss": 1.6283, + "step": 4565 + }, + { + "epoch": 0.4142623843222646, + "grad_norm": 0.10296260630836788, + "learning_rate": 0.0006602819855638113, + "loss": 1.5802, + "step": 4566 + }, + { + "epoch": 0.41435311195790236, + "grad_norm": 0.10340677912851029, + "learning_rate": 0.0006601428054639154, + "loss": 1.5961, + "step": 4567 + }, + { + "epoch": 0.4144438395935402, + "grad_norm": 0.10480071754879491, + "learning_rate": 0.0006600036115356646, + "loss": 1.6231, + "step": 4568 + }, + { + "epoch": 0.41453456722917803, + "grad_norm": 0.10233895042051179, + "learning_rate": 0.0006598644037910784, + "loss": 1.6163, + "step": 4569 + }, + { + "epoch": 0.4146252948648158, + "grad_norm": 0.1037712499011874, + "learning_rate": 0.0006597251822421774, + "loss": 1.6449, + "step": 4570 + }, + { + "epoch": 0.41471602250045364, + "grad_norm": 0.10487545099763813, + "learning_rate": 0.0006595859469009833, + "loss": 1.6273, + "step": 4571 + }, + { + "epoch": 0.4148067501360915, + "grad_norm": 0.1078128438911901, + "learning_rate": 0.000659446697779519, + "loss": 1.6348, + "step": 4572 + }, + { + "epoch": 0.41489747777172925, + "grad_norm": 0.10592167153644506, + "learning_rate": 0.0006593074348898091, + "loss": 1.6199, + "step": 4573 + }, + { + "epoch": 0.4149882054073671, + "grad_norm": 0.10615849079184003, + "learning_rate": 0.0006591681582438786, + "loss": 1.6131, + "step": 4574 + }, + { + "epoch": 0.4150789330430049, + "grad_norm": 0.10160379413445338, + "learning_rate": 0.0006590288678537542, + "loss": 1.5948, + "step": 4575 + }, + { + "epoch": 0.4151696606786427, + "grad_norm": 0.10307013802146894, + "learning_rate": 0.0006588895637314636, + "loss": 1.6034, + "step": 4576 + }, + { + "epoch": 0.4152603883142805, + "grad_norm": 0.10475639396976728, + "learning_rate": 0.0006587502458890359, + "loss": 1.612, + "step": 4577 + }, + { + "epoch": 0.41535111594991836, + "grad_norm": 0.09874713973321776, + "learning_rate": 0.000658610914338501, + "loss": 1.5832, + "step": 4578 + }, + { + "epoch": 0.41544184358555614, + "grad_norm": 0.10101905242964526, + "learning_rate": 0.00065847156909189, + "loss": 1.6101, + "step": 4579 + }, + { + "epoch": 0.41553257122119397, + "grad_norm": 0.10147071755131745, + "learning_rate": 0.000658332210161236, + "loss": 1.6221, + "step": 4580 + }, + { + "epoch": 0.4156232988568318, + "grad_norm": 0.10384642705148693, + "learning_rate": 0.0006581928375585721, + "loss": 1.6148, + "step": 4581 + }, + { + "epoch": 0.4157140264924696, + "grad_norm": 0.10369642487862692, + "learning_rate": 0.0006580534512959336, + "loss": 1.6291, + "step": 4582 + }, + { + "epoch": 0.4158047541281074, + "grad_norm": 0.09979721080106647, + "learning_rate": 0.000657914051385356, + "loss": 1.6069, + "step": 4583 + }, + { + "epoch": 0.41589548176374525, + "grad_norm": 0.10465847671502299, + "learning_rate": 0.000657774637838877, + "loss": 1.5892, + "step": 4584 + }, + { + "epoch": 0.4159862093993831, + "grad_norm": 0.10088902628366583, + "learning_rate": 0.0006576352106685348, + "loss": 1.5771, + "step": 4585 + }, + { + "epoch": 0.41607693703502086, + "grad_norm": 0.10202864554983489, + "learning_rate": 0.0006574957698863688, + "loss": 1.5792, + "step": 4586 + }, + { + "epoch": 0.4161676646706587, + "grad_norm": 0.10123742791175892, + "learning_rate": 0.0006573563155044198, + "loss": 1.6379, + "step": 4587 + }, + { + "epoch": 0.4162583923062965, + "grad_norm": 0.10435880715106925, + "learning_rate": 0.0006572168475347299, + "loss": 1.6184, + "step": 4588 + }, + { + "epoch": 0.4163491199419343, + "grad_norm": 0.10198203399025546, + "learning_rate": 0.0006570773659893419, + "loss": 1.6097, + "step": 4589 + }, + { + "epoch": 0.41643984757757213, + "grad_norm": 0.10378785164079986, + "learning_rate": 0.0006569378708803004, + "loss": 1.6511, + "step": 4590 + }, + { + "epoch": 0.41653057521320996, + "grad_norm": 0.10102900830035147, + "learning_rate": 0.0006567983622196504, + "loss": 1.5939, + "step": 4591 + }, + { + "epoch": 0.41662130284884774, + "grad_norm": 0.10304484659647746, + "learning_rate": 0.0006566588400194388, + "loss": 1.6509, + "step": 4592 + }, + { + "epoch": 0.4167120304844856, + "grad_norm": 0.10079545766640512, + "learning_rate": 0.0006565193042917134, + "loss": 1.6103, + "step": 4593 + }, + { + "epoch": 0.4168027581201234, + "grad_norm": 0.09993271325699064, + "learning_rate": 0.0006563797550485228, + "loss": 1.6322, + "step": 4594 + }, + { + "epoch": 0.4168934857557612, + "grad_norm": 0.10113713167807935, + "learning_rate": 0.0006562401923019174, + "loss": 1.5781, + "step": 4595 + }, + { + "epoch": 0.416984213391399, + "grad_norm": 0.1041103588161689, + "learning_rate": 0.0006561006160639484, + "loss": 1.6243, + "step": 4596 + }, + { + "epoch": 0.41707494102703685, + "grad_norm": 0.11280658967380867, + "learning_rate": 0.0006559610263466683, + "loss": 1.6373, + "step": 4597 + }, + { + "epoch": 0.4171656686626746, + "grad_norm": 0.10617661207290059, + "learning_rate": 0.0006558214231621305, + "loss": 1.6587, + "step": 4598 + }, + { + "epoch": 0.41725639629831246, + "grad_norm": 0.10173163492635541, + "learning_rate": 0.0006556818065223898, + "loss": 1.6075, + "step": 4599 + }, + { + "epoch": 0.4173471239339503, + "grad_norm": 0.10861048037357982, + "learning_rate": 0.0006555421764395021, + "loss": 1.6033, + "step": 4600 + }, + { + "epoch": 0.41743785156958807, + "grad_norm": 0.10345974584641553, + "learning_rate": 0.0006554025329255246, + "loss": 1.6272, + "step": 4601 + }, + { + "epoch": 0.4175285792052259, + "grad_norm": 0.09905315440277951, + "learning_rate": 0.0006552628759925156, + "loss": 1.6624, + "step": 4602 + }, + { + "epoch": 0.41761930684086374, + "grad_norm": 0.10232201507637934, + "learning_rate": 0.000655123205652534, + "loss": 1.63, + "step": 4603 + }, + { + "epoch": 0.41771003447650157, + "grad_norm": 0.10284425750279831, + "learning_rate": 0.0006549835219176411, + "loss": 1.6127, + "step": 4604 + }, + { + "epoch": 0.41780076211213935, + "grad_norm": 0.10363620443341819, + "learning_rate": 0.0006548438247998981, + "loss": 1.6453, + "step": 4605 + }, + { + "epoch": 0.4178914897477772, + "grad_norm": 0.10247714309952788, + "learning_rate": 0.0006547041143113681, + "loss": 1.6242, + "step": 4606 + }, + { + "epoch": 0.417982217383415, + "grad_norm": 0.10376310585499987, + "learning_rate": 0.0006545643904641149, + "loss": 1.6258, + "step": 4607 + }, + { + "epoch": 0.4180729450190528, + "grad_norm": 0.10092886249687275, + "learning_rate": 0.0006544246532702038, + "loss": 1.6026, + "step": 4608 + }, + { + "epoch": 0.4181636726546906, + "grad_norm": 0.0999646418505867, + "learning_rate": 0.0006542849027417013, + "loss": 1.6607, + "step": 4609 + }, + { + "epoch": 0.41825440029032845, + "grad_norm": 0.09805243612155055, + "learning_rate": 0.0006541451388906746, + "loss": 1.6125, + "step": 4610 + }, + { + "epoch": 0.41834512792596623, + "grad_norm": 0.09997123596629025, + "learning_rate": 0.0006540053617291924, + "loss": 1.6173, + "step": 4611 + }, + { + "epoch": 0.41843585556160406, + "grad_norm": 0.1022894504316367, + "learning_rate": 0.0006538655712693246, + "loss": 1.6461, + "step": 4612 + }, + { + "epoch": 0.4185265831972419, + "grad_norm": 0.10984281710279124, + "learning_rate": 0.0006537257675231419, + "loss": 1.6347, + "step": 4613 + }, + { + "epoch": 0.4186173108328797, + "grad_norm": 0.10315836566510586, + "learning_rate": 0.0006535859505027167, + "loss": 1.6217, + "step": 4614 + }, + { + "epoch": 0.4187080384685175, + "grad_norm": 0.10410912935707269, + "learning_rate": 0.0006534461202201219, + "loss": 1.6407, + "step": 4615 + }, + { + "epoch": 0.41879876610415534, + "grad_norm": 0.10408701651858994, + "learning_rate": 0.0006533062766874322, + "loss": 1.6064, + "step": 4616 + }, + { + "epoch": 0.4188894937397931, + "grad_norm": 0.10320596997747547, + "learning_rate": 0.0006531664199167227, + "loss": 1.6222, + "step": 4617 + }, + { + "epoch": 0.41898022137543095, + "grad_norm": 0.10067826677104691, + "learning_rate": 0.0006530265499200702, + "loss": 1.5991, + "step": 4618 + }, + { + "epoch": 0.4190709490110688, + "grad_norm": 0.10329328348324077, + "learning_rate": 0.0006528866667095528, + "loss": 1.6293, + "step": 4619 + }, + { + "epoch": 0.41916167664670656, + "grad_norm": 0.1046030752023794, + "learning_rate": 0.000652746770297249, + "loss": 1.64, + "step": 4620 + }, + { + "epoch": 0.4192524042823444, + "grad_norm": 0.10098506523196887, + "learning_rate": 0.0006526068606952394, + "loss": 1.6404, + "step": 4621 + }, + { + "epoch": 0.4193431319179822, + "grad_norm": 0.10000207785523572, + "learning_rate": 0.0006524669379156045, + "loss": 1.6072, + "step": 4622 + }, + { + "epoch": 0.41943385955362006, + "grad_norm": 0.10594501025243815, + "learning_rate": 0.0006523270019704271, + "loss": 1.6614, + "step": 4623 + }, + { + "epoch": 0.41952458718925784, + "grad_norm": 0.10213230616021554, + "learning_rate": 0.0006521870528717909, + "loss": 1.626, + "step": 4624 + }, + { + "epoch": 0.41961531482489567, + "grad_norm": 0.10323867525331075, + "learning_rate": 0.0006520470906317801, + "loss": 1.6129, + "step": 4625 + }, + { + "epoch": 0.4197060424605335, + "grad_norm": 0.1015349278916297, + "learning_rate": 0.0006519071152624805, + "loss": 1.6066, + "step": 4626 + }, + { + "epoch": 0.4197967700961713, + "grad_norm": 0.10152603176690476, + "learning_rate": 0.0006517671267759792, + "loss": 1.5417, + "step": 4627 + }, + { + "epoch": 0.4198874977318091, + "grad_norm": 0.10054473196318181, + "learning_rate": 0.0006516271251843641, + "loss": 1.6551, + "step": 4628 + }, + { + "epoch": 0.41997822536744694, + "grad_norm": 0.1023211028110914, + "learning_rate": 0.0006514871104997246, + "loss": 1.6408, + "step": 4629 + }, + { + "epoch": 0.4200689530030847, + "grad_norm": 0.10104383731231964, + "learning_rate": 0.0006513470827341504, + "loss": 1.5932, + "step": 4630 + }, + { + "epoch": 0.42015968063872255, + "grad_norm": 0.09624082016449127, + "learning_rate": 0.0006512070418997336, + "loss": 1.62, + "step": 4631 + }, + { + "epoch": 0.4202504082743604, + "grad_norm": 0.09996454254319435, + "learning_rate": 0.0006510669880085663, + "loss": 1.6557, + "step": 4632 + }, + { + "epoch": 0.42034113590999816, + "grad_norm": 0.10767530983251129, + "learning_rate": 0.0006509269210727424, + "loss": 1.6546, + "step": 4633 + }, + { + "epoch": 0.420431863545636, + "grad_norm": 0.10546692584664136, + "learning_rate": 0.0006507868411043566, + "loss": 1.5528, + "step": 4634 + }, + { + "epoch": 0.42052259118127383, + "grad_norm": 0.10879376978009352, + "learning_rate": 0.0006506467481155048, + "loss": 1.6145, + "step": 4635 + }, + { + "epoch": 0.4206133188169116, + "grad_norm": 0.10867927793406786, + "learning_rate": 0.0006505066421182842, + "loss": 1.6366, + "step": 4636 + }, + { + "epoch": 0.42070404645254944, + "grad_norm": 0.10571324143722877, + "learning_rate": 0.0006503665231247928, + "loss": 1.6458, + "step": 4637 + }, + { + "epoch": 0.4207947740881873, + "grad_norm": 0.10644273239631992, + "learning_rate": 0.00065022639114713, + "loss": 1.6261, + "step": 4638 + }, + { + "epoch": 0.42088550172382505, + "grad_norm": 0.10701177853881161, + "learning_rate": 0.0006500862461973962, + "loss": 1.6185, + "step": 4639 + }, + { + "epoch": 0.4209762293594629, + "grad_norm": 0.10076359006317619, + "learning_rate": 0.0006499460882876929, + "loss": 1.6272, + "step": 4640 + }, + { + "epoch": 0.4210669569951007, + "grad_norm": 0.10206029588768435, + "learning_rate": 0.0006498059174301227, + "loss": 1.6379, + "step": 4641 + }, + { + "epoch": 0.42115768463073855, + "grad_norm": 0.09850003602677222, + "learning_rate": 0.0006496657336367895, + "loss": 1.6438, + "step": 4642 + }, + { + "epoch": 0.4212484122663763, + "grad_norm": 0.10504355174871519, + "learning_rate": 0.0006495255369197982, + "loss": 1.6259, + "step": 4643 + }, + { + "epoch": 0.42133913990201416, + "grad_norm": 0.0981294209352129, + "learning_rate": 0.0006493853272912547, + "loss": 1.6507, + "step": 4644 + }, + { + "epoch": 0.421429867537652, + "grad_norm": 0.1048797514385126, + "learning_rate": 0.0006492451047632661, + "loss": 1.6385, + "step": 4645 + }, + { + "epoch": 0.42152059517328977, + "grad_norm": 0.10406258853814468, + "learning_rate": 0.0006491048693479408, + "loss": 1.6049, + "step": 4646 + }, + { + "epoch": 0.4216113228089276, + "grad_norm": 0.10540351559193811, + "learning_rate": 0.0006489646210573881, + "loss": 1.6027, + "step": 4647 + }, + { + "epoch": 0.42170205044456543, + "grad_norm": 0.10180981633194883, + "learning_rate": 0.0006488243599037184, + "loss": 1.6033, + "step": 4648 + }, + { + "epoch": 0.4217927780802032, + "grad_norm": 0.10275300163182817, + "learning_rate": 0.0006486840858990432, + "loss": 1.6534, + "step": 4649 + }, + { + "epoch": 0.42188350571584105, + "grad_norm": 0.10064571791421933, + "learning_rate": 0.0006485437990554752, + "loss": 1.6025, + "step": 4650 + }, + { + "epoch": 0.4219742333514789, + "grad_norm": 0.10318254518699252, + "learning_rate": 0.0006484034993851284, + "loss": 1.603, + "step": 4651 + }, + { + "epoch": 0.42206496098711666, + "grad_norm": 0.10330221160638897, + "learning_rate": 0.0006482631869001175, + "loss": 1.5838, + "step": 4652 + }, + { + "epoch": 0.4221556886227545, + "grad_norm": 0.10007746504269067, + "learning_rate": 0.0006481228616125588, + "loss": 1.6162, + "step": 4653 + }, + { + "epoch": 0.4222464162583923, + "grad_norm": 0.10199678891386296, + "learning_rate": 0.0006479825235345686, + "loss": 1.641, + "step": 4654 + }, + { + "epoch": 0.4223371438940301, + "grad_norm": 0.10441734361315248, + "learning_rate": 0.0006478421726782663, + "loss": 1.5921, + "step": 4655 + }, + { + "epoch": 0.42242787152966793, + "grad_norm": 0.10402921849325361, + "learning_rate": 0.0006477018090557703, + "loss": 1.611, + "step": 4656 + }, + { + "epoch": 0.42251859916530576, + "grad_norm": 0.09999590350178865, + "learning_rate": 0.0006475614326792012, + "loss": 1.6076, + "step": 4657 + }, + { + "epoch": 0.42260932680094354, + "grad_norm": 0.10434994923001772, + "learning_rate": 0.0006474210435606809, + "loss": 1.5982, + "step": 4658 + }, + { + "epoch": 0.4227000544365814, + "grad_norm": 0.10192865351365155, + "learning_rate": 0.0006472806417123316, + "loss": 1.6219, + "step": 4659 + }, + { + "epoch": 0.4227907820722192, + "grad_norm": 0.1012907667496276, + "learning_rate": 0.0006471402271462773, + "loss": 1.6134, + "step": 4660 + }, + { + "epoch": 0.42288150970785704, + "grad_norm": 0.10310002116961091, + "learning_rate": 0.0006469997998746425, + "loss": 1.6192, + "step": 4661 + }, + { + "epoch": 0.4229722373434948, + "grad_norm": 0.09926380431928428, + "learning_rate": 0.0006468593599095535, + "loss": 1.5573, + "step": 4662 + }, + { + "epoch": 0.42306296497913265, + "grad_norm": 0.10298442213557223, + "learning_rate": 0.0006467189072631372, + "loss": 1.6056, + "step": 4663 + }, + { + "epoch": 0.4231536926147705, + "grad_norm": 0.10296134755280789, + "learning_rate": 0.0006465784419475214, + "loss": 1.6571, + "step": 4664 + }, + { + "epoch": 0.42324442025040826, + "grad_norm": 0.10406259893158412, + "learning_rate": 0.0006464379639748356, + "loss": 1.6126, + "step": 4665 + }, + { + "epoch": 0.4233351478860461, + "grad_norm": 0.10455726758747885, + "learning_rate": 0.0006462974733572101, + "loss": 1.6043, + "step": 4666 + }, + { + "epoch": 0.4234258755216839, + "grad_norm": 0.10418095539859523, + "learning_rate": 0.0006461569701067762, + "loss": 1.6327, + "step": 4667 + }, + { + "epoch": 0.4235166031573217, + "grad_norm": 0.10306217163919089, + "learning_rate": 0.0006460164542356665, + "loss": 1.5887, + "step": 4668 + }, + { + "epoch": 0.42360733079295954, + "grad_norm": 0.10393296847589881, + "learning_rate": 0.0006458759257560141, + "loss": 1.5925, + "step": 4669 + }, + { + "epoch": 0.42369805842859737, + "grad_norm": 0.09936435368171882, + "learning_rate": 0.0006457353846799544, + "loss": 1.6188, + "step": 4670 + }, + { + "epoch": 0.42378878606423515, + "grad_norm": 0.10130645647720095, + "learning_rate": 0.0006455948310196226, + "loss": 1.66, + "step": 4671 + }, + { + "epoch": 0.423879513699873, + "grad_norm": 0.10559403822117551, + "learning_rate": 0.0006454542647871556, + "loss": 1.5695, + "step": 4672 + }, + { + "epoch": 0.4239702413355108, + "grad_norm": 0.1038426924030167, + "learning_rate": 0.0006453136859946915, + "loss": 1.6082, + "step": 4673 + }, + { + "epoch": 0.4240609689711486, + "grad_norm": 0.10156235926419385, + "learning_rate": 0.0006451730946543692, + "loss": 1.5918, + "step": 4674 + }, + { + "epoch": 0.4241516966067864, + "grad_norm": 0.106042360517948, + "learning_rate": 0.0006450324907783288, + "loss": 1.6367, + "step": 4675 + }, + { + "epoch": 0.42424242424242425, + "grad_norm": 0.10328687072349357, + "learning_rate": 0.0006448918743787113, + "loss": 1.6193, + "step": 4676 + }, + { + "epoch": 0.42433315187806203, + "grad_norm": 0.10175034853305963, + "learning_rate": 0.0006447512454676593, + "loss": 1.613, + "step": 4677 + }, + { + "epoch": 0.42442387951369986, + "grad_norm": 0.1007442453774251, + "learning_rate": 0.0006446106040573158, + "loss": 1.5794, + "step": 4678 + }, + { + "epoch": 0.4245146071493377, + "grad_norm": 0.10365794446268337, + "learning_rate": 0.0006444699501598252, + "loss": 1.6466, + "step": 4679 + }, + { + "epoch": 0.42460533478497553, + "grad_norm": 0.0978892922410328, + "learning_rate": 0.0006443292837873334, + "loss": 1.6344, + "step": 4680 + }, + { + "epoch": 0.4246960624206133, + "grad_norm": 0.10231326281195491, + "learning_rate": 0.0006441886049519864, + "loss": 1.5974, + "step": 4681 + }, + { + "epoch": 0.42478679005625114, + "grad_norm": 0.09976445265692399, + "learning_rate": 0.0006440479136659323, + "loss": 1.6278, + "step": 4682 + }, + { + "epoch": 0.424877517691889, + "grad_norm": 0.10283737822948619, + "learning_rate": 0.0006439072099413195, + "loss": 1.5789, + "step": 4683 + }, + { + "epoch": 0.42496824532752675, + "grad_norm": 0.1055592082733293, + "learning_rate": 0.0006437664937902981, + "loss": 1.5831, + "step": 4684 + }, + { + "epoch": 0.4250589729631646, + "grad_norm": 0.1030353733342594, + "learning_rate": 0.0006436257652250185, + "loss": 1.6147, + "step": 4685 + }, + { + "epoch": 0.4251497005988024, + "grad_norm": 0.09947587663463621, + "learning_rate": 0.0006434850242576331, + "loss": 1.6028, + "step": 4686 + }, + { + "epoch": 0.4252404282344402, + "grad_norm": 0.10227881665704294, + "learning_rate": 0.0006433442709002948, + "loss": 1.5778, + "step": 4687 + }, + { + "epoch": 0.425331155870078, + "grad_norm": 0.10034337730720909, + "learning_rate": 0.0006432035051651574, + "loss": 1.6194, + "step": 4688 + }, + { + "epoch": 0.42542188350571586, + "grad_norm": 0.10573680360848754, + "learning_rate": 0.0006430627270643762, + "loss": 1.6363, + "step": 4689 + }, + { + "epoch": 0.42551261114135364, + "grad_norm": 0.10457497448361237, + "learning_rate": 0.0006429219366101075, + "loss": 1.6596, + "step": 4690 + }, + { + "epoch": 0.42560333877699147, + "grad_norm": 0.10093428382561732, + "learning_rate": 0.0006427811338145084, + "loss": 1.6166, + "step": 4691 + }, + { + "epoch": 0.4256940664126293, + "grad_norm": 0.10043580165685517, + "learning_rate": 0.0006426403186897376, + "loss": 1.578, + "step": 4692 + }, + { + "epoch": 0.4257847940482671, + "grad_norm": 0.1068391773463969, + "learning_rate": 0.000642499491247954, + "loss": 1.6209, + "step": 4693 + }, + { + "epoch": 0.4258755216839049, + "grad_norm": 0.10062373239467805, + "learning_rate": 0.0006423586515013185, + "loss": 1.6501, + "step": 4694 + }, + { + "epoch": 0.42596624931954274, + "grad_norm": 0.10354710537299652, + "learning_rate": 0.0006422177994619924, + "loss": 1.6487, + "step": 4695 + }, + { + "epoch": 0.4260569769551805, + "grad_norm": 0.10595134043607507, + "learning_rate": 0.0006420769351421383, + "loss": 1.5737, + "step": 4696 + }, + { + "epoch": 0.42614770459081835, + "grad_norm": 0.10201537887448085, + "learning_rate": 0.00064193605855392, + "loss": 1.6278, + "step": 4697 + }, + { + "epoch": 0.4262384322264562, + "grad_norm": 0.10432548248307974, + "learning_rate": 0.000641795169709502, + "loss": 1.6053, + "step": 4698 + }, + { + "epoch": 0.426329159862094, + "grad_norm": 0.10176455250444576, + "learning_rate": 0.0006416542686210505, + "loss": 1.5871, + "step": 4699 + }, + { + "epoch": 0.4264198874977318, + "grad_norm": 0.1004559023665071, + "learning_rate": 0.0006415133553007317, + "loss": 1.6311, + "step": 4700 + }, + { + "epoch": 0.42651061513336963, + "grad_norm": 0.1069080714507777, + "learning_rate": 0.0006413724297607139, + "loss": 1.6174, + "step": 4701 + }, + { + "epoch": 0.42660134276900746, + "grad_norm": 0.10175322526485335, + "learning_rate": 0.000641231492013166, + "loss": 1.598, + "step": 4702 + }, + { + "epoch": 0.42669207040464524, + "grad_norm": 0.10226096864087576, + "learning_rate": 0.000641090542070258, + "loss": 1.6037, + "step": 4703 + }, + { + "epoch": 0.4267827980402831, + "grad_norm": 0.09841608809795217, + "learning_rate": 0.0006409495799441607, + "loss": 1.6062, + "step": 4704 + }, + { + "epoch": 0.4268735256759209, + "grad_norm": 0.10164850636855823, + "learning_rate": 0.0006408086056470466, + "loss": 1.6003, + "step": 4705 + }, + { + "epoch": 0.4269642533115587, + "grad_norm": 0.09944964813321422, + "learning_rate": 0.0006406676191910885, + "loss": 1.5634, + "step": 4706 + }, + { + "epoch": 0.4270549809471965, + "grad_norm": 0.10297815103792388, + "learning_rate": 0.0006405266205884609, + "loss": 1.6272, + "step": 4707 + }, + { + "epoch": 0.42714570858283435, + "grad_norm": 0.10117868001463322, + "learning_rate": 0.0006403856098513387, + "loss": 1.583, + "step": 4708 + }, + { + "epoch": 0.4272364362184721, + "grad_norm": 0.10252798514062351, + "learning_rate": 0.0006402445869918987, + "loss": 1.6208, + "step": 4709 + }, + { + "epoch": 0.42732716385410996, + "grad_norm": 0.0979383701484874, + "learning_rate": 0.0006401035520223178, + "loss": 1.5863, + "step": 4710 + }, + { + "epoch": 0.4274178914897478, + "grad_norm": 0.10433677364868994, + "learning_rate": 0.0006399625049547744, + "loss": 1.5952, + "step": 4711 + }, + { + "epoch": 0.42750861912538557, + "grad_norm": 0.09821503299441542, + "learning_rate": 0.0006398214458014482, + "loss": 1.5626, + "step": 4712 + }, + { + "epoch": 0.4275993467610234, + "grad_norm": 0.10302974909560933, + "learning_rate": 0.0006396803745745194, + "loss": 1.6365, + "step": 4713 + }, + { + "epoch": 0.42769007439666124, + "grad_norm": 0.09951529004013251, + "learning_rate": 0.0006395392912861699, + "loss": 1.5911, + "step": 4714 + }, + { + "epoch": 0.427780802032299, + "grad_norm": 0.10237131667384439, + "learning_rate": 0.0006393981959485819, + "loss": 1.6212, + "step": 4715 + }, + { + "epoch": 0.42787152966793685, + "grad_norm": 0.10267872184703578, + "learning_rate": 0.0006392570885739392, + "loss": 1.6532, + "step": 4716 + }, + { + "epoch": 0.4279622573035747, + "grad_norm": 0.10316349044625066, + "learning_rate": 0.0006391159691744263, + "loss": 1.597, + "step": 4717 + }, + { + "epoch": 0.4280529849392125, + "grad_norm": 0.10221771922081754, + "learning_rate": 0.000638974837762229, + "loss": 1.6298, + "step": 4718 + }, + { + "epoch": 0.4281437125748503, + "grad_norm": 0.10579629983483109, + "learning_rate": 0.0006388336943495339, + "loss": 1.6013, + "step": 4719 + }, + { + "epoch": 0.4282344402104881, + "grad_norm": 0.0993831382292146, + "learning_rate": 0.0006386925389485289, + "loss": 1.5998, + "step": 4720 + }, + { + "epoch": 0.42832516784612595, + "grad_norm": 0.10576887971065416, + "learning_rate": 0.0006385513715714028, + "loss": 1.6014, + "step": 4721 + }, + { + "epoch": 0.42841589548176373, + "grad_norm": 0.10487944030667845, + "learning_rate": 0.0006384101922303452, + "loss": 1.6448, + "step": 4722 + }, + { + "epoch": 0.42850662311740156, + "grad_norm": 0.10473496644906513, + "learning_rate": 0.0006382690009375471, + "loss": 1.588, + "step": 4723 + }, + { + "epoch": 0.4285973507530394, + "grad_norm": 0.10292045203240979, + "learning_rate": 0.0006381277977052004, + "loss": 1.6233, + "step": 4724 + }, + { + "epoch": 0.4286880783886772, + "grad_norm": 0.10361973710766005, + "learning_rate": 0.0006379865825454982, + "loss": 1.6318, + "step": 4725 + }, + { + "epoch": 0.428778806024315, + "grad_norm": 0.10537048930450284, + "learning_rate": 0.0006378453554706341, + "loss": 1.6184, + "step": 4726 + }, + { + "epoch": 0.42886953365995284, + "grad_norm": 0.10472480404582094, + "learning_rate": 0.0006377041164928033, + "loss": 1.6246, + "step": 4727 + }, + { + "epoch": 0.4289602612955906, + "grad_norm": 0.10101070599827233, + "learning_rate": 0.0006375628656242017, + "loss": 1.5868, + "step": 4728 + }, + { + "epoch": 0.42905098893122845, + "grad_norm": 0.10482896000040465, + "learning_rate": 0.0006374216028770263, + "loss": 1.589, + "step": 4729 + }, + { + "epoch": 0.4291417165668663, + "grad_norm": 0.10087838013690442, + "learning_rate": 0.0006372803282634754, + "loss": 1.6116, + "step": 4730 + }, + { + "epoch": 0.42923244420250406, + "grad_norm": 0.1018292541058638, + "learning_rate": 0.0006371390417957477, + "loss": 1.6138, + "step": 4731 + }, + { + "epoch": 0.4293231718381419, + "grad_norm": 0.10088212691633536, + "learning_rate": 0.0006369977434860438, + "loss": 1.6233, + "step": 4732 + }, + { + "epoch": 0.4294138994737797, + "grad_norm": 0.10271520322475204, + "learning_rate": 0.0006368564333465645, + "loss": 1.5942, + "step": 4733 + }, + { + "epoch": 0.4295046271094175, + "grad_norm": 0.1081883217517621, + "learning_rate": 0.0006367151113895119, + "loss": 1.5999, + "step": 4734 + }, + { + "epoch": 0.42959535474505534, + "grad_norm": 0.10298229357751262, + "learning_rate": 0.0006365737776270892, + "loss": 1.6066, + "step": 4735 + }, + { + "epoch": 0.42968608238069317, + "grad_norm": 0.10133809846084472, + "learning_rate": 0.0006364324320715009, + "loss": 1.6134, + "step": 4736 + }, + { + "epoch": 0.429776810016331, + "grad_norm": 0.10432535596042813, + "learning_rate": 0.0006362910747349518, + "loss": 1.6175, + "step": 4737 + }, + { + "epoch": 0.4298675376519688, + "grad_norm": 0.10696850675575868, + "learning_rate": 0.0006361497056296486, + "loss": 1.618, + "step": 4738 + }, + { + "epoch": 0.4299582652876066, + "grad_norm": 0.10203854713674969, + "learning_rate": 0.0006360083247677979, + "loss": 1.6757, + "step": 4739 + }, + { + "epoch": 0.43004899292324444, + "grad_norm": 0.1000477412573961, + "learning_rate": 0.0006358669321616084, + "loss": 1.6479, + "step": 4740 + }, + { + "epoch": 0.4301397205588822, + "grad_norm": 0.09923941393032559, + "learning_rate": 0.0006357255278232894, + "loss": 1.6257, + "step": 4741 + }, + { + "epoch": 0.43023044819452005, + "grad_norm": 0.10367701156357424, + "learning_rate": 0.000635584111765051, + "loss": 1.6171, + "step": 4742 + }, + { + "epoch": 0.4303211758301579, + "grad_norm": 0.10455571972966836, + "learning_rate": 0.0006354426839991046, + "loss": 1.6098, + "step": 4743 + }, + { + "epoch": 0.43041190346579566, + "grad_norm": 0.10310710662244249, + "learning_rate": 0.0006353012445376624, + "loss": 1.6119, + "step": 4744 + }, + { + "epoch": 0.4305026311014335, + "grad_norm": 0.10036369035224732, + "learning_rate": 0.0006351597933929379, + "loss": 1.5811, + "step": 4745 + }, + { + "epoch": 0.43059335873707133, + "grad_norm": 0.10388302393385566, + "learning_rate": 0.0006350183305771453, + "loss": 1.5913, + "step": 4746 + }, + { + "epoch": 0.4306840863727091, + "grad_norm": 0.105584251834781, + "learning_rate": 0.0006348768561024998, + "loss": 1.654, + "step": 4747 + }, + { + "epoch": 0.43077481400834694, + "grad_norm": 0.10698785324975324, + "learning_rate": 0.0006347353699812181, + "loss": 1.5953, + "step": 4748 + }, + { + "epoch": 0.4308655416439848, + "grad_norm": 0.10088684598183437, + "learning_rate": 0.0006345938722255171, + "loss": 1.5904, + "step": 4749 + }, + { + "epoch": 0.43095626927962255, + "grad_norm": 0.10131390751900729, + "learning_rate": 0.0006344523628476156, + "loss": 1.6886, + "step": 4750 + }, + { + "epoch": 0.4310469969152604, + "grad_norm": 0.10572315653846397, + "learning_rate": 0.0006343108418597328, + "loss": 1.6186, + "step": 4751 + }, + { + "epoch": 0.4311377245508982, + "grad_norm": 0.10108626972293672, + "learning_rate": 0.0006341693092740887, + "loss": 1.6233, + "step": 4752 + }, + { + "epoch": 0.431228452186536, + "grad_norm": 0.10218765300832304, + "learning_rate": 0.0006340277651029054, + "loss": 1.5813, + "step": 4753 + }, + { + "epoch": 0.4313191798221738, + "grad_norm": 0.09791706932881628, + "learning_rate": 0.0006338862093584046, + "loss": 1.6065, + "step": 4754 + }, + { + "epoch": 0.43140990745781166, + "grad_norm": 0.09913209730348248, + "learning_rate": 0.0006337446420528099, + "loss": 1.5834, + "step": 4755 + }, + { + "epoch": 0.4315006350934495, + "grad_norm": 0.10641082728229996, + "learning_rate": 0.0006336030631983456, + "loss": 1.6265, + "step": 4756 + }, + { + "epoch": 0.43159136272908727, + "grad_norm": 0.10099556617108875, + "learning_rate": 0.0006334614728072371, + "loss": 1.5562, + "step": 4757 + }, + { + "epoch": 0.4316820903647251, + "grad_norm": 0.09965743504705181, + "learning_rate": 0.000633319870891711, + "loss": 1.6316, + "step": 4758 + }, + { + "epoch": 0.43177281800036293, + "grad_norm": 0.09990012603266979, + "learning_rate": 0.0006331782574639939, + "loss": 1.5649, + "step": 4759 + }, + { + "epoch": 0.4318635456360007, + "grad_norm": 0.09960646314047657, + "learning_rate": 0.000633036632536315, + "loss": 1.6248, + "step": 4760 + }, + { + "epoch": 0.43195427327163854, + "grad_norm": 0.10330568490894128, + "learning_rate": 0.0006328949961209033, + "loss": 1.6407, + "step": 4761 + }, + { + "epoch": 0.4320450009072764, + "grad_norm": 0.10205018553364269, + "learning_rate": 0.000632753348229989, + "loss": 1.6189, + "step": 4762 + }, + { + "epoch": 0.43213572854291415, + "grad_norm": 0.1026081125503528, + "learning_rate": 0.0006326116888758036, + "loss": 1.633, + "step": 4763 + }, + { + "epoch": 0.432226456178552, + "grad_norm": 0.10415977666694569, + "learning_rate": 0.0006324700180705791, + "loss": 1.5932, + "step": 4764 + }, + { + "epoch": 0.4323171838141898, + "grad_norm": 0.10476030509796372, + "learning_rate": 0.0006323283358265492, + "loss": 1.6298, + "step": 4765 + }, + { + "epoch": 0.4324079114498276, + "grad_norm": 0.10653224201329545, + "learning_rate": 0.000632186642155948, + "loss": 1.6319, + "step": 4766 + }, + { + "epoch": 0.43249863908546543, + "grad_norm": 0.10314255235885422, + "learning_rate": 0.0006320449370710107, + "loss": 1.6105, + "step": 4767 + }, + { + "epoch": 0.43258936672110326, + "grad_norm": 0.1051177869343914, + "learning_rate": 0.0006319032205839737, + "loss": 1.6102, + "step": 4768 + }, + { + "epoch": 0.43268009435674104, + "grad_norm": 0.10453034017423057, + "learning_rate": 0.000631761492707074, + "loss": 1.6193, + "step": 4769 + }, + { + "epoch": 0.4327708219923789, + "grad_norm": 0.10072962768348565, + "learning_rate": 0.0006316197534525502, + "loss": 1.578, + "step": 4770 + }, + { + "epoch": 0.4328615496280167, + "grad_norm": 0.10339653625400125, + "learning_rate": 0.0006314780028326411, + "loss": 1.5695, + "step": 4771 + }, + { + "epoch": 0.4329522772636545, + "grad_norm": 0.0999982869809694, + "learning_rate": 0.0006313362408595873, + "loss": 1.6008, + "step": 4772 + }, + { + "epoch": 0.4330430048992923, + "grad_norm": 0.09927796003128309, + "learning_rate": 0.0006311944675456296, + "loss": 1.6131, + "step": 4773 + }, + { + "epoch": 0.43313373253493015, + "grad_norm": 0.10244435921281093, + "learning_rate": 0.0006310526829030101, + "loss": 1.5057, + "step": 4774 + }, + { + "epoch": 0.433224460170568, + "grad_norm": 0.10556278883388279, + "learning_rate": 0.0006309108869439725, + "loss": 1.6205, + "step": 4775 + }, + { + "epoch": 0.43331518780620576, + "grad_norm": 0.10087207883919701, + "learning_rate": 0.0006307690796807602, + "loss": 1.5883, + "step": 4776 + }, + { + "epoch": 0.4334059154418436, + "grad_norm": 0.10543293237765773, + "learning_rate": 0.0006306272611256188, + "loss": 1.5756, + "step": 4777 + }, + { + "epoch": 0.4334966430774814, + "grad_norm": 0.10411839878109096, + "learning_rate": 0.0006304854312907941, + "loss": 1.6145, + "step": 4778 + }, + { + "epoch": 0.4335873707131192, + "grad_norm": 0.09564500931568805, + "learning_rate": 0.000630343590188533, + "loss": 1.64, + "step": 4779 + }, + { + "epoch": 0.43367809834875704, + "grad_norm": 0.0974917499888948, + "learning_rate": 0.000630201737831084, + "loss": 1.5908, + "step": 4780 + }, + { + "epoch": 0.43376882598439487, + "grad_norm": 0.10006270054808462, + "learning_rate": 0.0006300598742306955, + "loss": 1.6395, + "step": 4781 + }, + { + "epoch": 0.43385955362003265, + "grad_norm": 0.10616151036784532, + "learning_rate": 0.0006299179993996178, + "loss": 1.6293, + "step": 4782 + }, + { + "epoch": 0.4339502812556705, + "grad_norm": 0.10013370174427264, + "learning_rate": 0.0006297761133501016, + "loss": 1.6226, + "step": 4783 + }, + { + "epoch": 0.4340410088913083, + "grad_norm": 0.10065353126348055, + "learning_rate": 0.0006296342160943991, + "loss": 1.6117, + "step": 4784 + }, + { + "epoch": 0.4341317365269461, + "grad_norm": 0.10133077746240672, + "learning_rate": 0.0006294923076447628, + "loss": 1.6435, + "step": 4785 + }, + { + "epoch": 0.4342224641625839, + "grad_norm": 0.10450871073913971, + "learning_rate": 0.0006293503880134466, + "loss": 1.6117, + "step": 4786 + }, + { + "epoch": 0.43431319179822175, + "grad_norm": 0.1035525407158309, + "learning_rate": 0.0006292084572127054, + "loss": 1.5955, + "step": 4787 + }, + { + "epoch": 0.43440391943385953, + "grad_norm": 0.10711220035824995, + "learning_rate": 0.0006290665152547948, + "loss": 1.5819, + "step": 4788 + }, + { + "epoch": 0.43449464706949736, + "grad_norm": 0.10860976475619163, + "learning_rate": 0.0006289245621519717, + "loss": 1.667, + "step": 4789 + }, + { + "epoch": 0.4345853747051352, + "grad_norm": 0.1047152726213094, + "learning_rate": 0.0006287825979164936, + "loss": 1.611, + "step": 4790 + }, + { + "epoch": 0.434676102340773, + "grad_norm": 0.10115525158422937, + "learning_rate": 0.0006286406225606191, + "loss": 1.5662, + "step": 4791 + }, + { + "epoch": 0.4347668299764108, + "grad_norm": 0.10032815264270804, + "learning_rate": 0.000628498636096608, + "loss": 1.5949, + "step": 4792 + }, + { + "epoch": 0.43485755761204864, + "grad_norm": 0.0998175554584648, + "learning_rate": 0.0006283566385367207, + "loss": 1.6177, + "step": 4793 + }, + { + "epoch": 0.4349482852476865, + "grad_norm": 0.10051692269978933, + "learning_rate": 0.0006282146298932187, + "loss": 1.6172, + "step": 4794 + }, + { + "epoch": 0.43503901288332425, + "grad_norm": 0.1051544532041998, + "learning_rate": 0.0006280726101783647, + "loss": 1.5942, + "step": 4795 + }, + { + "epoch": 0.4351297405189621, + "grad_norm": 0.10138790056289745, + "learning_rate": 0.0006279305794044218, + "loss": 1.6027, + "step": 4796 + }, + { + "epoch": 0.4352204681545999, + "grad_norm": 0.1041309905064153, + "learning_rate": 0.0006277885375836545, + "loss": 1.6333, + "step": 4797 + }, + { + "epoch": 0.4353111957902377, + "grad_norm": 0.10150911420252884, + "learning_rate": 0.000627646484728328, + "loss": 1.6506, + "step": 4798 + }, + { + "epoch": 0.4354019234258755, + "grad_norm": 0.10365884990236672, + "learning_rate": 0.0006275044208507091, + "loss": 1.6115, + "step": 4799 + }, + { + "epoch": 0.43549265106151336, + "grad_norm": 0.10279729089417627, + "learning_rate": 0.0006273623459630644, + "loss": 1.6308, + "step": 4800 + }, + { + "epoch": 0.43558337869715114, + "grad_norm": 0.10096998273814525, + "learning_rate": 0.0006272202600776624, + "loss": 1.6183, + "step": 4801 + }, + { + "epoch": 0.43567410633278897, + "grad_norm": 0.10609738021532239, + "learning_rate": 0.0006270781632067721, + "loss": 1.6039, + "step": 4802 + }, + { + "epoch": 0.4357648339684268, + "grad_norm": 0.10804704531092547, + "learning_rate": 0.0006269360553626637, + "loss": 1.5829, + "step": 4803 + }, + { + "epoch": 0.4358555616040646, + "grad_norm": 0.10382006975055338, + "learning_rate": 0.0006267939365576085, + "loss": 1.6035, + "step": 4804 + }, + { + "epoch": 0.4359462892397024, + "grad_norm": 0.10068551156103532, + "learning_rate": 0.000626651806803878, + "loss": 1.6219, + "step": 4805 + }, + { + "epoch": 0.43603701687534024, + "grad_norm": 0.09999753845461022, + "learning_rate": 0.0006265096661137452, + "loss": 1.6284, + "step": 4806 + }, + { + "epoch": 0.436127744510978, + "grad_norm": 0.10121743321402482, + "learning_rate": 0.0006263675144994841, + "loss": 1.635, + "step": 4807 + }, + { + "epoch": 0.43621847214661585, + "grad_norm": 0.09775531090266759, + "learning_rate": 0.0006262253519733696, + "loss": 1.6046, + "step": 4808 + }, + { + "epoch": 0.4363091997822537, + "grad_norm": 0.10258315553324972, + "learning_rate": 0.0006260831785476773, + "loss": 1.6173, + "step": 4809 + }, + { + "epoch": 0.43639992741789146, + "grad_norm": 0.10138639929040753, + "learning_rate": 0.000625940994234684, + "loss": 1.6619, + "step": 4810 + }, + { + "epoch": 0.4364906550535293, + "grad_norm": 0.10198527343132717, + "learning_rate": 0.0006257987990466674, + "loss": 1.5749, + "step": 4811 + }, + { + "epoch": 0.43658138268916713, + "grad_norm": 0.10248653312034518, + "learning_rate": 0.0006256565929959057, + "loss": 1.5733, + "step": 4812 + }, + { + "epoch": 0.43667211032480496, + "grad_norm": 0.10201737895703689, + "learning_rate": 0.0006255143760946788, + "loss": 1.6211, + "step": 4813 + }, + { + "epoch": 0.43676283796044274, + "grad_norm": 0.10329111774121591, + "learning_rate": 0.000625372148355267, + "loss": 1.627, + "step": 4814 + }, + { + "epoch": 0.4368535655960806, + "grad_norm": 0.10104887025214224, + "learning_rate": 0.0006252299097899517, + "loss": 1.6027, + "step": 4815 + }, + { + "epoch": 0.4369442932317184, + "grad_norm": 0.10206240783823325, + "learning_rate": 0.0006250876604110153, + "loss": 1.6327, + "step": 4816 + }, + { + "epoch": 0.4370350208673562, + "grad_norm": 0.09890690756739041, + "learning_rate": 0.000624945400230741, + "loss": 1.5839, + "step": 4817 + }, + { + "epoch": 0.437125748502994, + "grad_norm": 0.0997848841426633, + "learning_rate": 0.0006248031292614129, + "loss": 1.6326, + "step": 4818 + }, + { + "epoch": 0.43721647613863185, + "grad_norm": 0.10346238860328048, + "learning_rate": 0.0006246608475153164, + "loss": 1.5689, + "step": 4819 + }, + { + "epoch": 0.4373072037742696, + "grad_norm": 0.10021745459956821, + "learning_rate": 0.0006245185550047371, + "loss": 1.5854, + "step": 4820 + }, + { + "epoch": 0.43739793140990746, + "grad_norm": 0.0996388857139328, + "learning_rate": 0.0006243762517419622, + "loss": 1.6078, + "step": 4821 + }, + { + "epoch": 0.4374886590455453, + "grad_norm": 0.09926589921678845, + "learning_rate": 0.0006242339377392799, + "loss": 1.5885, + "step": 4822 + }, + { + "epoch": 0.43757938668118307, + "grad_norm": 0.10284258712384416, + "learning_rate": 0.0006240916130089784, + "loss": 1.5725, + "step": 4823 + }, + { + "epoch": 0.4376701143168209, + "grad_norm": 0.10363673963064389, + "learning_rate": 0.0006239492775633482, + "loss": 1.6351, + "step": 4824 + }, + { + "epoch": 0.43776084195245873, + "grad_norm": 0.10014544286815849, + "learning_rate": 0.0006238069314146793, + "loss": 1.5726, + "step": 4825 + }, + { + "epoch": 0.4378515695880965, + "grad_norm": 0.1041272408265715, + "learning_rate": 0.0006236645745752638, + "loss": 1.5995, + "step": 4826 + }, + { + "epoch": 0.43794229722373434, + "grad_norm": 0.0993848577313028, + "learning_rate": 0.0006235222070573941, + "loss": 1.555, + "step": 4827 + }, + { + "epoch": 0.4380330248593722, + "grad_norm": 0.10464157074007667, + "learning_rate": 0.0006233798288733635, + "loss": 1.6082, + "step": 4828 + }, + { + "epoch": 0.43812375249500995, + "grad_norm": 0.10178040958078291, + "learning_rate": 0.0006232374400354664, + "loss": 1.6832, + "step": 4829 + }, + { + "epoch": 0.4382144801306478, + "grad_norm": 0.10352911951540372, + "learning_rate": 0.0006230950405559983, + "loss": 1.6279, + "step": 4830 + }, + { + "epoch": 0.4383052077662856, + "grad_norm": 0.10251930186236191, + "learning_rate": 0.0006229526304472555, + "loss": 1.6382, + "step": 4831 + }, + { + "epoch": 0.43839593540192345, + "grad_norm": 0.10813415975864826, + "learning_rate": 0.0006228102097215346, + "loss": 1.5761, + "step": 4832 + }, + { + "epoch": 0.43848666303756123, + "grad_norm": 0.10158504060162721, + "learning_rate": 0.0006226677783911342, + "loss": 1.6178, + "step": 4833 + }, + { + "epoch": 0.43857739067319906, + "grad_norm": 0.09880935378387004, + "learning_rate": 0.0006225253364683529, + "loss": 1.5962, + "step": 4834 + }, + { + "epoch": 0.4386681183088369, + "grad_norm": 0.10319362458884135, + "learning_rate": 0.000622382883965491, + "loss": 1.6069, + "step": 4835 + }, + { + "epoch": 0.4387588459444747, + "grad_norm": 0.09614862265879075, + "learning_rate": 0.0006222404208948488, + "loss": 1.5674, + "step": 4836 + }, + { + "epoch": 0.4388495735801125, + "grad_norm": 0.0984455520691922, + "learning_rate": 0.0006220979472687283, + "loss": 1.5938, + "step": 4837 + }, + { + "epoch": 0.43894030121575034, + "grad_norm": 0.10295813668148124, + "learning_rate": 0.0006219554630994322, + "loss": 1.613, + "step": 4838 + }, + { + "epoch": 0.4390310288513881, + "grad_norm": 0.09744258728062, + "learning_rate": 0.0006218129683992637, + "loss": 1.6118, + "step": 4839 + }, + { + "epoch": 0.43912175648702595, + "grad_norm": 0.10066250737221911, + "learning_rate": 0.0006216704631805274, + "loss": 1.609, + "step": 4840 + }, + { + "epoch": 0.4392124841226638, + "grad_norm": 0.1026030191352797, + "learning_rate": 0.0006215279474555288, + "loss": 1.6251, + "step": 4841 + }, + { + "epoch": 0.43930321175830156, + "grad_norm": 0.1006713957995404, + "learning_rate": 0.000621385421236574, + "loss": 1.6247, + "step": 4842 + }, + { + "epoch": 0.4393939393939394, + "grad_norm": 0.10161326781699834, + "learning_rate": 0.0006212428845359702, + "loss": 1.5952, + "step": 4843 + }, + { + "epoch": 0.4394846670295772, + "grad_norm": 0.10072652974431984, + "learning_rate": 0.0006211003373660255, + "loss": 1.596, + "step": 4844 + }, + { + "epoch": 0.439575394665215, + "grad_norm": 0.10120125773965397, + "learning_rate": 0.0006209577797390486, + "loss": 1.6522, + "step": 4845 + }, + { + "epoch": 0.43966612230085284, + "grad_norm": 0.09639798978320534, + "learning_rate": 0.0006208152116673498, + "loss": 1.5917, + "step": 4846 + }, + { + "epoch": 0.43975684993649067, + "grad_norm": 0.10139164368038725, + "learning_rate": 0.0006206726331632397, + "loss": 1.613, + "step": 4847 + }, + { + "epoch": 0.43984757757212845, + "grad_norm": 0.10106159476793738, + "learning_rate": 0.0006205300442390298, + "loss": 1.6059, + "step": 4848 + }, + { + "epoch": 0.4399383052077663, + "grad_norm": 0.10492824950438055, + "learning_rate": 0.0006203874449070328, + "loss": 1.6527, + "step": 4849 + }, + { + "epoch": 0.4400290328434041, + "grad_norm": 0.10584795121730459, + "learning_rate": 0.0006202448351795622, + "loss": 1.6014, + "step": 4850 + }, + { + "epoch": 0.44011976047904194, + "grad_norm": 0.1042936695434151, + "learning_rate": 0.0006201022150689325, + "loss": 1.5967, + "step": 4851 + }, + { + "epoch": 0.4402104881146797, + "grad_norm": 0.10269043566283907, + "learning_rate": 0.0006199595845874586, + "loss": 1.5506, + "step": 4852 + }, + { + "epoch": 0.44030121575031755, + "grad_norm": 0.10177516211976065, + "learning_rate": 0.0006198169437474572, + "loss": 1.6177, + "step": 4853 + }, + { + "epoch": 0.4403919433859554, + "grad_norm": 0.10615115098919248, + "learning_rate": 0.0006196742925612447, + "loss": 1.6034, + "step": 4854 + }, + { + "epoch": 0.44048267102159316, + "grad_norm": 0.10427012111368507, + "learning_rate": 0.0006195316310411396, + "loss": 1.6179, + "step": 4855 + }, + { + "epoch": 0.440573398657231, + "grad_norm": 0.10121385234909454, + "learning_rate": 0.0006193889591994604, + "loss": 1.6098, + "step": 4856 + }, + { + "epoch": 0.44066412629286883, + "grad_norm": 0.10410488167637523, + "learning_rate": 0.0006192462770485272, + "loss": 1.5732, + "step": 4857 + }, + { + "epoch": 0.4407548539285066, + "grad_norm": 0.10065367758274216, + "learning_rate": 0.0006191035846006603, + "loss": 1.6362, + "step": 4858 + }, + { + "epoch": 0.44084558156414444, + "grad_norm": 0.10694045275267752, + "learning_rate": 0.0006189608818681813, + "loss": 1.6546, + "step": 4859 + }, + { + "epoch": 0.4409363091997823, + "grad_norm": 0.09860629337279941, + "learning_rate": 0.0006188181688634127, + "loss": 1.5598, + "step": 4860 + }, + { + "epoch": 0.44102703683542005, + "grad_norm": 0.09974604966915564, + "learning_rate": 0.0006186754455986776, + "loss": 1.6343, + "step": 4861 + }, + { + "epoch": 0.4411177644710579, + "grad_norm": 0.10137495662226095, + "learning_rate": 0.0006185327120863003, + "loss": 1.5825, + "step": 4862 + }, + { + "epoch": 0.4412084921066957, + "grad_norm": 0.1036595198687425, + "learning_rate": 0.0006183899683386059, + "loss": 1.6003, + "step": 4863 + }, + { + "epoch": 0.4412992197423335, + "grad_norm": 0.10489717905106301, + "learning_rate": 0.00061824721436792, + "loss": 1.5805, + "step": 4864 + }, + { + "epoch": 0.4413899473779713, + "grad_norm": 0.0984458621541374, + "learning_rate": 0.0006181044501865702, + "loss": 1.6001, + "step": 4865 + }, + { + "epoch": 0.44148067501360916, + "grad_norm": 0.10091337848756, + "learning_rate": 0.0006179616758068834, + "loss": 1.6565, + "step": 4866 + }, + { + "epoch": 0.44157140264924694, + "grad_norm": 0.0987991317519194, + "learning_rate": 0.0006178188912411886, + "loss": 1.6355, + "step": 4867 + }, + { + "epoch": 0.44166213028488477, + "grad_norm": 0.1002079828788447, + "learning_rate": 0.0006176760965018151, + "loss": 1.6438, + "step": 4868 + }, + { + "epoch": 0.4417528579205226, + "grad_norm": 0.09650743661206221, + "learning_rate": 0.0006175332916010934, + "loss": 1.5974, + "step": 4869 + }, + { + "epoch": 0.44184358555616043, + "grad_norm": 0.10480308715601308, + "learning_rate": 0.0006173904765513547, + "loss": 1.6105, + "step": 4870 + }, + { + "epoch": 0.4419343131917982, + "grad_norm": 0.10560696736654637, + "learning_rate": 0.000617247651364931, + "loss": 1.5842, + "step": 4871 + }, + { + "epoch": 0.44202504082743604, + "grad_norm": 0.10077616644252997, + "learning_rate": 0.0006171048160541553, + "loss": 1.606, + "step": 4872 + }, + { + "epoch": 0.4421157684630739, + "grad_norm": 0.10273872127372222, + "learning_rate": 0.0006169619706313614, + "loss": 1.6241, + "step": 4873 + }, + { + "epoch": 0.44220649609871165, + "grad_norm": 0.10002384389851425, + "learning_rate": 0.0006168191151088841, + "loss": 1.6197, + "step": 4874 + }, + { + "epoch": 0.4422972237343495, + "grad_norm": 0.09959653073575761, + "learning_rate": 0.0006166762494990592, + "loss": 1.5974, + "step": 4875 + }, + { + "epoch": 0.4423879513699873, + "grad_norm": 0.10246751620010444, + "learning_rate": 0.0006165333738142227, + "loss": 1.5622, + "step": 4876 + }, + { + "epoch": 0.4424786790056251, + "grad_norm": 0.10299547469373105, + "learning_rate": 0.0006163904880667123, + "loss": 1.6471, + "step": 4877 + }, + { + "epoch": 0.44256940664126293, + "grad_norm": 0.10419984215557784, + "learning_rate": 0.0006162475922688662, + "loss": 1.598, + "step": 4878 + }, + { + "epoch": 0.44266013427690076, + "grad_norm": 0.09885547180350358, + "learning_rate": 0.0006161046864330233, + "loss": 1.5961, + "step": 4879 + }, + { + "epoch": 0.44275086191253854, + "grad_norm": 0.09939947050361433, + "learning_rate": 0.0006159617705715236, + "loss": 1.6201, + "step": 4880 + }, + { + "epoch": 0.4428415895481764, + "grad_norm": 0.10104959884639103, + "learning_rate": 0.0006158188446967079, + "loss": 1.591, + "step": 4881 + }, + { + "epoch": 0.4429323171838142, + "grad_norm": 0.0985329886825931, + "learning_rate": 0.000615675908820918, + "loss": 1.657, + "step": 4882 + }, + { + "epoch": 0.443023044819452, + "grad_norm": 0.10115737073532134, + "learning_rate": 0.0006155329629564963, + "loss": 1.6396, + "step": 4883 + }, + { + "epoch": 0.4431137724550898, + "grad_norm": 0.10270289128470962, + "learning_rate": 0.0006153900071157861, + "loss": 1.625, + "step": 4884 + }, + { + "epoch": 0.44320450009072765, + "grad_norm": 0.10204059430914246, + "learning_rate": 0.0006152470413111319, + "loss": 1.5898, + "step": 4885 + }, + { + "epoch": 0.4432952277263654, + "grad_norm": 0.10351087423384987, + "learning_rate": 0.0006151040655548785, + "loss": 1.5766, + "step": 4886 + }, + { + "epoch": 0.44338595536200326, + "grad_norm": 0.10213425113257778, + "learning_rate": 0.0006149610798593722, + "loss": 1.597, + "step": 4887 + }, + { + "epoch": 0.4434766829976411, + "grad_norm": 0.10639150563939057, + "learning_rate": 0.0006148180842369597, + "loss": 1.6102, + "step": 4888 + }, + { + "epoch": 0.44356741063327887, + "grad_norm": 0.10086548486747268, + "learning_rate": 0.0006146750786999888, + "loss": 1.6087, + "step": 4889 + }, + { + "epoch": 0.4436581382689167, + "grad_norm": 0.10079527329884712, + "learning_rate": 0.0006145320632608079, + "loss": 1.5935, + "step": 4890 + }, + { + "epoch": 0.44374886590455453, + "grad_norm": 0.10652715355141006, + "learning_rate": 0.0006143890379317664, + "loss": 1.6243, + "step": 4891 + }, + { + "epoch": 0.44383959354019237, + "grad_norm": 0.10324886533575917, + "learning_rate": 0.0006142460027252147, + "loss": 1.6337, + "step": 4892 + }, + { + "epoch": 0.44393032117583014, + "grad_norm": 0.10039439838022991, + "learning_rate": 0.0006141029576535037, + "loss": 1.6384, + "step": 4893 + }, + { + "epoch": 0.444021048811468, + "grad_norm": 0.10198058963731468, + "learning_rate": 0.0006139599027289856, + "loss": 1.5887, + "step": 4894 + }, + { + "epoch": 0.4441117764471058, + "grad_norm": 0.10502306401310084, + "learning_rate": 0.0006138168379640131, + "loss": 1.6041, + "step": 4895 + }, + { + "epoch": 0.4442025040827436, + "grad_norm": 0.10397414781702304, + "learning_rate": 0.0006136737633709398, + "loss": 1.6058, + "step": 4896 + }, + { + "epoch": 0.4442932317183814, + "grad_norm": 0.1047534379399016, + "learning_rate": 0.0006135306789621204, + "loss": 1.547, + "step": 4897 + }, + { + "epoch": 0.44438395935401925, + "grad_norm": 0.09894796444860851, + "learning_rate": 0.0006133875847499101, + "loss": 1.5783, + "step": 4898 + }, + { + "epoch": 0.44447468698965703, + "grad_norm": 0.10441132805255235, + "learning_rate": 0.000613244480746665, + "loss": 1.61, + "step": 4899 + }, + { + "epoch": 0.44456541462529486, + "grad_norm": 0.10509980047758573, + "learning_rate": 0.0006131013669647425, + "loss": 1.5931, + "step": 4900 + }, + { + "epoch": 0.4446561422609327, + "grad_norm": 0.10218826995110095, + "learning_rate": 0.0006129582434165001, + "loss": 1.6235, + "step": 4901 + }, + { + "epoch": 0.4447468698965705, + "grad_norm": 0.10097234273751579, + "learning_rate": 0.0006128151101142969, + "loss": 1.6006, + "step": 4902 + }, + { + "epoch": 0.4448375975322083, + "grad_norm": 0.09942755008085268, + "learning_rate": 0.0006126719670704921, + "loss": 1.618, + "step": 4903 + }, + { + "epoch": 0.44492832516784614, + "grad_norm": 0.0990328085852285, + "learning_rate": 0.0006125288142974466, + "loss": 1.6209, + "step": 4904 + }, + { + "epoch": 0.4450190528034839, + "grad_norm": 0.10258946054479015, + "learning_rate": 0.0006123856518075213, + "loss": 1.5927, + "step": 4905 + }, + { + "epoch": 0.44510978043912175, + "grad_norm": 0.10430404770107234, + "learning_rate": 0.0006122424796130783, + "loss": 1.6373, + "step": 4906 + }, + { + "epoch": 0.4452005080747596, + "grad_norm": 0.09957729608416246, + "learning_rate": 0.0006120992977264808, + "loss": 1.5516, + "step": 4907 + }, + { + "epoch": 0.44529123571039736, + "grad_norm": 0.09821491523506733, + "learning_rate": 0.0006119561061600923, + "loss": 1.6029, + "step": 4908 + }, + { + "epoch": 0.4453819633460352, + "grad_norm": 0.10164180811903742, + "learning_rate": 0.0006118129049262777, + "loss": 1.5892, + "step": 4909 + }, + { + "epoch": 0.445472690981673, + "grad_norm": 0.10741658853575878, + "learning_rate": 0.0006116696940374021, + "loss": 1.628, + "step": 4910 + }, + { + "epoch": 0.44556341861731086, + "grad_norm": 0.10248501538416127, + "learning_rate": 0.000611526473505832, + "loss": 1.5921, + "step": 4911 + }, + { + "epoch": 0.44565414625294864, + "grad_norm": 0.10006702714048132, + "learning_rate": 0.0006113832433439345, + "loss": 1.6413, + "step": 4912 + }, + { + "epoch": 0.44574487388858647, + "grad_norm": 0.10409223586188027, + "learning_rate": 0.0006112400035640775, + "loss": 1.6288, + "step": 4913 + }, + { + "epoch": 0.4458356015242243, + "grad_norm": 0.10381553436189073, + "learning_rate": 0.0006110967541786299, + "loss": 1.6648, + "step": 4914 + }, + { + "epoch": 0.4459263291598621, + "grad_norm": 0.09778840049710663, + "learning_rate": 0.000610953495199961, + "loss": 1.5834, + "step": 4915 + }, + { + "epoch": 0.4460170567954999, + "grad_norm": 0.10583987992464026, + "learning_rate": 0.0006108102266404416, + "loss": 1.6591, + "step": 4916 + }, + { + "epoch": 0.44610778443113774, + "grad_norm": 0.10091262623828691, + "learning_rate": 0.0006106669485124428, + "loss": 1.5692, + "step": 4917 + }, + { + "epoch": 0.4461985120667755, + "grad_norm": 0.09589634467244311, + "learning_rate": 0.0006105236608283366, + "loss": 1.6021, + "step": 4918 + }, + { + "epoch": 0.44628923970241335, + "grad_norm": 0.10028330859281785, + "learning_rate": 0.000610380363600496, + "loss": 1.6027, + "step": 4919 + }, + { + "epoch": 0.4463799673380512, + "grad_norm": 0.09805689293858143, + "learning_rate": 0.0006102370568412947, + "loss": 1.6073, + "step": 4920 + }, + { + "epoch": 0.44647069497368896, + "grad_norm": 0.09801313130638424, + "learning_rate": 0.0006100937405631075, + "loss": 1.5814, + "step": 4921 + }, + { + "epoch": 0.4465614226093268, + "grad_norm": 0.09871809698449042, + "learning_rate": 0.0006099504147783094, + "loss": 1.6346, + "step": 4922 + }, + { + "epoch": 0.44665215024496463, + "grad_norm": 0.10305805895879792, + "learning_rate": 0.0006098070794992768, + "loss": 1.5883, + "step": 4923 + }, + { + "epoch": 0.4467428778806024, + "grad_norm": 0.10545375172458876, + "learning_rate": 0.0006096637347383866, + "loss": 1.6242, + "step": 4924 + }, + { + "epoch": 0.44683360551624024, + "grad_norm": 0.10487494859049937, + "learning_rate": 0.0006095203805080169, + "loss": 1.5954, + "step": 4925 + }, + { + "epoch": 0.4469243331518781, + "grad_norm": 0.1083744456219487, + "learning_rate": 0.0006093770168205463, + "loss": 1.5751, + "step": 4926 + }, + { + "epoch": 0.44701506078751585, + "grad_norm": 0.10446361411341268, + "learning_rate": 0.0006092336436883539, + "loss": 1.611, + "step": 4927 + }, + { + "epoch": 0.4471057884231537, + "grad_norm": 0.10079759450576353, + "learning_rate": 0.0006090902611238203, + "loss": 1.6455, + "step": 4928 + }, + { + "epoch": 0.4471965160587915, + "grad_norm": 0.09998944656399665, + "learning_rate": 0.000608946869139327, + "loss": 1.6174, + "step": 4929 + }, + { + "epoch": 0.44728724369442935, + "grad_norm": 0.09854966363273315, + "learning_rate": 0.0006088034677472549, + "loss": 1.5482, + "step": 4930 + }, + { + "epoch": 0.4473779713300671, + "grad_norm": 0.10299930293756379, + "learning_rate": 0.0006086600569599877, + "loss": 1.6404, + "step": 4931 + }, + { + "epoch": 0.44746869896570496, + "grad_norm": 0.1010052833260613, + "learning_rate": 0.0006085166367899085, + "loss": 1.5539, + "step": 4932 + }, + { + "epoch": 0.4475594266013428, + "grad_norm": 0.10105196033112161, + "learning_rate": 0.0006083732072494017, + "loss": 1.5688, + "step": 4933 + }, + { + "epoch": 0.44765015423698057, + "grad_norm": 0.10032748997452831, + "learning_rate": 0.0006082297683508524, + "loss": 1.6045, + "step": 4934 + }, + { + "epoch": 0.4477408818726184, + "grad_norm": 0.11779470838510456, + "learning_rate": 0.0006080863201066467, + "loss": 1.6174, + "step": 4935 + }, + { + "epoch": 0.44783160950825623, + "grad_norm": 0.10272487609381036, + "learning_rate": 0.0006079428625291714, + "loss": 1.5545, + "step": 4936 + }, + { + "epoch": 0.447922337143894, + "grad_norm": 0.10045379863364182, + "learning_rate": 0.000607799395630814, + "loss": 1.6044, + "step": 4937 + }, + { + "epoch": 0.44801306477953184, + "grad_norm": 0.10004225782384467, + "learning_rate": 0.0006076559194239628, + "loss": 1.5779, + "step": 4938 + }, + { + "epoch": 0.4481037924151697, + "grad_norm": 0.10636820119806394, + "learning_rate": 0.0006075124339210071, + "loss": 1.6361, + "step": 4939 + }, + { + "epoch": 0.44819452005080745, + "grad_norm": 0.09957158765956611, + "learning_rate": 0.000607368939134337, + "loss": 1.5753, + "step": 4940 + }, + { + "epoch": 0.4482852476864453, + "grad_norm": 0.09799849077939336, + "learning_rate": 0.0006072254350763432, + "loss": 1.6034, + "step": 4941 + }, + { + "epoch": 0.4483759753220831, + "grad_norm": 0.10213835470322201, + "learning_rate": 0.000607081921759417, + "loss": 1.6376, + "step": 4942 + }, + { + "epoch": 0.4484667029577209, + "grad_norm": 0.10413046875473608, + "learning_rate": 0.0006069383991959514, + "loss": 1.6111, + "step": 4943 + }, + { + "epoch": 0.44855743059335873, + "grad_norm": 0.09961059413254648, + "learning_rate": 0.0006067948673983392, + "loss": 1.6215, + "step": 4944 + }, + { + "epoch": 0.44864815822899656, + "grad_norm": 0.0980624180008852, + "learning_rate": 0.0006066513263789742, + "loss": 1.5329, + "step": 4945 + }, + { + "epoch": 0.44873888586463434, + "grad_norm": 0.0991423547238229, + "learning_rate": 0.0006065077761502518, + "loss": 1.6062, + "step": 4946 + }, + { + "epoch": 0.4488296135002722, + "grad_norm": 0.10069207424446217, + "learning_rate": 0.000606364216724567, + "loss": 1.6254, + "step": 4947 + }, + { + "epoch": 0.44892034113591, + "grad_norm": 0.09958694181857605, + "learning_rate": 0.0006062206481143165, + "loss": 1.663, + "step": 4948 + }, + { + "epoch": 0.44901106877154784, + "grad_norm": 0.099652773407024, + "learning_rate": 0.0006060770703318974, + "loss": 1.5734, + "step": 4949 + }, + { + "epoch": 0.4491017964071856, + "grad_norm": 0.09827991521569368, + "learning_rate": 0.0006059334833897075, + "loss": 1.597, + "step": 4950 + }, + { + "epoch": 0.44919252404282345, + "grad_norm": 0.10040837742879379, + "learning_rate": 0.0006057898873001458, + "loss": 1.6123, + "step": 4951 + }, + { + "epoch": 0.4492832516784613, + "grad_norm": 0.0997085407100611, + "learning_rate": 0.0006056462820756118, + "loss": 1.6083, + "step": 4952 + }, + { + "epoch": 0.44937397931409906, + "grad_norm": 0.09741998381456696, + "learning_rate": 0.0006055026677285058, + "loss": 1.6325, + "step": 4953 + }, + { + "epoch": 0.4494647069497369, + "grad_norm": 0.09872550829623158, + "learning_rate": 0.0006053590442712287, + "loss": 1.6421, + "step": 4954 + }, + { + "epoch": 0.4495554345853747, + "grad_norm": 0.10103035658756873, + "learning_rate": 0.0006052154117161827, + "loss": 1.6348, + "step": 4955 + }, + { + "epoch": 0.4496461622210125, + "grad_norm": 0.09934105238097313, + "learning_rate": 0.0006050717700757704, + "loss": 1.6251, + "step": 4956 + }, + { + "epoch": 0.44973688985665033, + "grad_norm": 0.09837996257474087, + "learning_rate": 0.0006049281193623953, + "loss": 1.5936, + "step": 4957 + }, + { + "epoch": 0.44982761749228817, + "grad_norm": 0.09938524265208952, + "learning_rate": 0.0006047844595884616, + "loss": 1.5967, + "step": 4958 + }, + { + "epoch": 0.44991834512792594, + "grad_norm": 0.10246910339069124, + "learning_rate": 0.0006046407907663744, + "loss": 1.6176, + "step": 4959 + }, + { + "epoch": 0.4500090727635638, + "grad_norm": 0.10256384230609646, + "learning_rate": 0.0006044971129085395, + "loss": 1.5587, + "step": 4960 + }, + { + "epoch": 0.4500998003992016, + "grad_norm": 0.09916972377590348, + "learning_rate": 0.0006043534260273634, + "loss": 1.637, + "step": 4961 + }, + { + "epoch": 0.4501905280348394, + "grad_norm": 0.1004063278632683, + "learning_rate": 0.0006042097301352534, + "loss": 1.5724, + "step": 4962 + }, + { + "epoch": 0.4502812556704772, + "grad_norm": 0.10179208692231637, + "learning_rate": 0.0006040660252446182, + "loss": 1.5906, + "step": 4963 + }, + { + "epoch": 0.45037198330611505, + "grad_norm": 0.09939066441720473, + "learning_rate": 0.0006039223113678664, + "loss": 1.6066, + "step": 4964 + }, + { + "epoch": 0.45046271094175283, + "grad_norm": 0.09768944044940574, + "learning_rate": 0.0006037785885174076, + "loss": 1.5838, + "step": 4965 + }, + { + "epoch": 0.45055343857739066, + "grad_norm": 0.10201663378465782, + "learning_rate": 0.0006036348567056522, + "loss": 1.561, + "step": 4966 + }, + { + "epoch": 0.4506441662130285, + "grad_norm": 0.09942683589284738, + "learning_rate": 0.0006034911159450118, + "loss": 1.5754, + "step": 4967 + }, + { + "epoch": 0.45073489384866633, + "grad_norm": 0.10397309069722477, + "learning_rate": 0.0006033473662478984, + "loss": 1.6187, + "step": 4968 + }, + { + "epoch": 0.4508256214843041, + "grad_norm": 0.10115265345376848, + "learning_rate": 0.0006032036076267244, + "loss": 1.6053, + "step": 4969 + }, + { + "epoch": 0.45091634911994194, + "grad_norm": 0.09967811175813175, + "learning_rate": 0.000603059840093904, + "loss": 1.5734, + "step": 4970 + }, + { + "epoch": 0.45100707675557977, + "grad_norm": 0.09907689860820443, + "learning_rate": 0.0006029160636618511, + "loss": 1.6297, + "step": 4971 + }, + { + "epoch": 0.45109780439121755, + "grad_norm": 0.10015009228086233, + "learning_rate": 0.0006027722783429807, + "loss": 1.5753, + "step": 4972 + }, + { + "epoch": 0.4511885320268554, + "grad_norm": 0.10084760085457051, + "learning_rate": 0.0006026284841497092, + "loss": 1.5867, + "step": 4973 + }, + { + "epoch": 0.4512792596624932, + "grad_norm": 0.10099530764950679, + "learning_rate": 0.0006024846810944529, + "loss": 1.6429, + "step": 4974 + }, + { + "epoch": 0.451369987298131, + "grad_norm": 0.10015481897283009, + "learning_rate": 0.0006023408691896294, + "loss": 1.5889, + "step": 4975 + }, + { + "epoch": 0.4514607149337688, + "grad_norm": 0.10292020765623737, + "learning_rate": 0.0006021970484476565, + "loss": 1.6156, + "step": 4976 + }, + { + "epoch": 0.45155144256940666, + "grad_norm": 0.10561678468884113, + "learning_rate": 0.0006020532188809536, + "loss": 1.6179, + "step": 4977 + }, + { + "epoch": 0.45164217020504444, + "grad_norm": 0.10590399200137468, + "learning_rate": 0.0006019093805019401, + "loss": 1.589, + "step": 4978 + }, + { + "epoch": 0.45173289784068227, + "grad_norm": 0.10279298638641098, + "learning_rate": 0.0006017655333230366, + "loss": 1.5734, + "step": 4979 + }, + { + "epoch": 0.4518236254763201, + "grad_norm": 0.10401868589095399, + "learning_rate": 0.0006016216773566643, + "loss": 1.5652, + "step": 4980 + }, + { + "epoch": 0.4519143531119579, + "grad_norm": 0.09977685032163693, + "learning_rate": 0.0006014778126152452, + "loss": 1.5947, + "step": 4981 + }, + { + "epoch": 0.4520050807475957, + "grad_norm": 0.1006982976707464, + "learning_rate": 0.000601333939111202, + "loss": 1.6038, + "step": 4982 + }, + { + "epoch": 0.45209580838323354, + "grad_norm": 0.10181330947647739, + "learning_rate": 0.0006011900568569584, + "loss": 1.6368, + "step": 4983 + }, + { + "epoch": 0.4521865360188713, + "grad_norm": 0.0998125402995899, + "learning_rate": 0.0006010461658649383, + "loss": 1.6126, + "step": 4984 + }, + { + "epoch": 0.45227726365450915, + "grad_norm": 0.09615095152731305, + "learning_rate": 0.0006009022661475668, + "loss": 1.6426, + "step": 4985 + }, + { + "epoch": 0.452367991290147, + "grad_norm": 0.09995004045959034, + "learning_rate": 0.0006007583577172698, + "loss": 1.5752, + "step": 4986 + }, + { + "epoch": 0.4524587189257848, + "grad_norm": 0.10013070176015794, + "learning_rate": 0.000600614440586474, + "loss": 1.6283, + "step": 4987 + }, + { + "epoch": 0.4525494465614226, + "grad_norm": 0.09908874932928848, + "learning_rate": 0.0006004705147676061, + "loss": 1.5915, + "step": 4988 + }, + { + "epoch": 0.45264017419706043, + "grad_norm": 0.09737181215985334, + "learning_rate": 0.0006003265802730946, + "loss": 1.6298, + "step": 4989 + }, + { + "epoch": 0.45273090183269826, + "grad_norm": 0.10206192812988855, + "learning_rate": 0.0006001826371153682, + "loss": 1.5974, + "step": 4990 + }, + { + "epoch": 0.45282162946833604, + "grad_norm": 0.10334895817082111, + "learning_rate": 0.0006000386853068561, + "loss": 1.5802, + "step": 4991 + }, + { + "epoch": 0.4529123571039739, + "grad_norm": 0.09855919139468201, + "learning_rate": 0.000599894724859989, + "loss": 1.602, + "step": 4992 + }, + { + "epoch": 0.4530030847396117, + "grad_norm": 0.10191330382706647, + "learning_rate": 0.0005997507557871975, + "loss": 1.6024, + "step": 4993 + }, + { + "epoch": 0.4530938123752495, + "grad_norm": 0.10312578430553408, + "learning_rate": 0.0005996067781009137, + "loss": 1.5982, + "step": 4994 + }, + { + "epoch": 0.4531845400108873, + "grad_norm": 0.09850412081096871, + "learning_rate": 0.0005994627918135697, + "loss": 1.6196, + "step": 4995 + }, + { + "epoch": 0.45327526764652515, + "grad_norm": 0.09583682535783962, + "learning_rate": 0.0005993187969375992, + "loss": 1.6151, + "step": 4996 + }, + { + "epoch": 0.4533659952821629, + "grad_norm": 0.1062918538212457, + "learning_rate": 0.0005991747934854358, + "loss": 1.5413, + "step": 4997 + }, + { + "epoch": 0.45345672291780076, + "grad_norm": 0.09771885748270408, + "learning_rate": 0.0005990307814695144, + "loss": 1.636, + "step": 4998 + }, + { + "epoch": 0.4535474505534386, + "grad_norm": 0.10091919371096755, + "learning_rate": 0.0005988867609022705, + "loss": 1.6162, + "step": 4999 + }, + { + "epoch": 0.45363817818907637, + "grad_norm": 0.09738950901447609, + "learning_rate": 0.0005987427317961403, + "loss": 1.592, + "step": 5000 + }, + { + "epoch": 0.4537289058247142, + "grad_norm": 0.10277351318147405, + "learning_rate": 0.0005985986941635603, + "loss": 1.6063, + "step": 5001 + }, + { + "epoch": 0.45381963346035203, + "grad_norm": 0.10314264723782465, + "learning_rate": 0.0005984546480169688, + "loss": 1.6233, + "step": 5002 + }, + { + "epoch": 0.4539103610959898, + "grad_norm": 0.10229513441080805, + "learning_rate": 0.0005983105933688039, + "loss": 1.6437, + "step": 5003 + }, + { + "epoch": 0.45400108873162764, + "grad_norm": 0.10269587367314163, + "learning_rate": 0.0005981665302315047, + "loss": 1.649, + "step": 5004 + }, + { + "epoch": 0.4540918163672655, + "grad_norm": 0.10554731090486315, + "learning_rate": 0.0005980224586175113, + "loss": 1.5632, + "step": 5005 + }, + { + "epoch": 0.4541825440029033, + "grad_norm": 0.10780592491459857, + "learning_rate": 0.000597878378539264, + "loss": 1.5898, + "step": 5006 + }, + { + "epoch": 0.4542732716385411, + "grad_norm": 0.10332776523036245, + "learning_rate": 0.0005977342900092044, + "loss": 1.6325, + "step": 5007 + }, + { + "epoch": 0.4543639992741789, + "grad_norm": 0.10594452809294445, + "learning_rate": 0.0005975901930397742, + "loss": 1.631, + "step": 5008 + }, + { + "epoch": 0.45445472690981675, + "grad_norm": 0.10851177570363787, + "learning_rate": 0.0005974460876434168, + "loss": 1.6072, + "step": 5009 + }, + { + "epoch": 0.45454545454545453, + "grad_norm": 0.10117116836072407, + "learning_rate": 0.0005973019738325752, + "loss": 1.6319, + "step": 5010 + }, + { + "epoch": 0.45463618218109236, + "grad_norm": 0.10063647453564323, + "learning_rate": 0.0005971578516196938, + "loss": 1.5885, + "step": 5011 + }, + { + "epoch": 0.4547269098167302, + "grad_norm": 0.09510986664061002, + "learning_rate": 0.0005970137210172176, + "loss": 1.6037, + "step": 5012 + }, + { + "epoch": 0.454817637452368, + "grad_norm": 0.1011085927367238, + "learning_rate": 0.0005968695820375924, + "loss": 1.6641, + "step": 5013 + }, + { + "epoch": 0.4549083650880058, + "grad_norm": 0.09942115103740512, + "learning_rate": 0.0005967254346932644, + "loss": 1.607, + "step": 5014 + }, + { + "epoch": 0.45499909272364364, + "grad_norm": 0.10073800260133366, + "learning_rate": 0.0005965812789966809, + "loss": 1.5813, + "step": 5015 + }, + { + "epoch": 0.4550898203592814, + "grad_norm": 0.1006852501033161, + "learning_rate": 0.0005964371149602898, + "loss": 1.5835, + "step": 5016 + }, + { + "epoch": 0.45518054799491925, + "grad_norm": 0.09981930285825624, + "learning_rate": 0.0005962929425965395, + "loss": 1.6158, + "step": 5017 + }, + { + "epoch": 0.4552712756305571, + "grad_norm": 0.10166652503302964, + "learning_rate": 0.0005961487619178794, + "loss": 1.6111, + "step": 5018 + }, + { + "epoch": 0.45536200326619486, + "grad_norm": 0.1029893806117222, + "learning_rate": 0.0005960045729367596, + "loss": 1.5934, + "step": 5019 + }, + { + "epoch": 0.4554527309018327, + "grad_norm": 0.10047451613763553, + "learning_rate": 0.0005958603756656307, + "loss": 1.5952, + "step": 5020 + }, + { + "epoch": 0.4555434585374705, + "grad_norm": 0.10306394056369332, + "learning_rate": 0.0005957161701169442, + "loss": 1.5606, + "step": 5021 + }, + { + "epoch": 0.4556341861731083, + "grad_norm": 0.10869240222041014, + "learning_rate": 0.0005955719563031524, + "loss": 1.6551, + "step": 5022 + }, + { + "epoch": 0.45572491380874613, + "grad_norm": 0.10182120693022821, + "learning_rate": 0.0005954277342367082, + "loss": 1.6243, + "step": 5023 + }, + { + "epoch": 0.45581564144438397, + "grad_norm": 0.09817495479406618, + "learning_rate": 0.0005952835039300648, + "loss": 1.6179, + "step": 5024 + }, + { + "epoch": 0.4559063690800218, + "grad_norm": 0.09677867602301478, + "learning_rate": 0.000595139265395677, + "loss": 1.585, + "step": 5025 + }, + { + "epoch": 0.4559970967156596, + "grad_norm": 0.1051818143457317, + "learning_rate": 0.0005949950186459996, + "loss": 1.6212, + "step": 5026 + }, + { + "epoch": 0.4560878243512974, + "grad_norm": 0.10507502368897616, + "learning_rate": 0.0005948507636934883, + "loss": 1.5734, + "step": 5027 + }, + { + "epoch": 0.45617855198693524, + "grad_norm": 0.10043362907450756, + "learning_rate": 0.0005947065005505996, + "loss": 1.6062, + "step": 5028 + }, + { + "epoch": 0.456269279622573, + "grad_norm": 0.0997618100737745, + "learning_rate": 0.0005945622292297905, + "loss": 1.5956, + "step": 5029 + }, + { + "epoch": 0.45636000725821085, + "grad_norm": 0.11018702751719586, + "learning_rate": 0.0005944179497435192, + "loss": 1.6305, + "step": 5030 + }, + { + "epoch": 0.4564507348938487, + "grad_norm": 0.1029076211227094, + "learning_rate": 0.0005942736621042439, + "loss": 1.6045, + "step": 5031 + }, + { + "epoch": 0.45654146252948646, + "grad_norm": 0.10246772904281046, + "learning_rate": 0.0005941293663244238, + "loss": 1.6242, + "step": 5032 + }, + { + "epoch": 0.4566321901651243, + "grad_norm": 0.10439708830016005, + "learning_rate": 0.0005939850624165193, + "loss": 1.6226, + "step": 5033 + }, + { + "epoch": 0.45672291780076213, + "grad_norm": 0.10171887591578982, + "learning_rate": 0.0005938407503929908, + "loss": 1.6117, + "step": 5034 + }, + { + "epoch": 0.4568136454363999, + "grad_norm": 0.10519066636294397, + "learning_rate": 0.0005936964302662995, + "loss": 1.5842, + "step": 5035 + }, + { + "epoch": 0.45690437307203774, + "grad_norm": 0.10490675886884698, + "learning_rate": 0.0005935521020489077, + "loss": 1.6136, + "step": 5036 + }, + { + "epoch": 0.45699510070767557, + "grad_norm": 0.0968325831638475, + "learning_rate": 0.0005934077657532782, + "loss": 1.636, + "step": 5037 + }, + { + "epoch": 0.45708582834331335, + "grad_norm": 0.10277602896332826, + "learning_rate": 0.0005932634213918744, + "loss": 1.6216, + "step": 5038 + }, + { + "epoch": 0.4571765559789512, + "grad_norm": 0.10088962635047316, + "learning_rate": 0.0005931190689771603, + "loss": 1.6112, + "step": 5039 + }, + { + "epoch": 0.457267283614589, + "grad_norm": 0.11306758950560482, + "learning_rate": 0.0005929747085216008, + "loss": 1.5947, + "step": 5040 + }, + { + "epoch": 0.4573580112502268, + "grad_norm": 0.10160615498236587, + "learning_rate": 0.0005928303400376616, + "loss": 1.6227, + "step": 5041 + }, + { + "epoch": 0.4574487388858646, + "grad_norm": 0.10336239271808634, + "learning_rate": 0.0005926859635378088, + "loss": 1.6208, + "step": 5042 + }, + { + "epoch": 0.45753946652150246, + "grad_norm": 0.09881059673705525, + "learning_rate": 0.0005925415790345095, + "loss": 1.6099, + "step": 5043 + }, + { + "epoch": 0.4576301941571403, + "grad_norm": 0.09690045823969878, + "learning_rate": 0.0005923971865402311, + "loss": 1.588, + "step": 5044 + }, + { + "epoch": 0.45772092179277807, + "grad_norm": 0.1023027248977483, + "learning_rate": 0.0005922527860674421, + "loss": 1.5912, + "step": 5045 + }, + { + "epoch": 0.4578116494284159, + "grad_norm": 0.09886296818979305, + "learning_rate": 0.0005921083776286115, + "loss": 1.5596, + "step": 5046 + }, + { + "epoch": 0.45790237706405373, + "grad_norm": 0.10340074104821595, + "learning_rate": 0.0005919639612362086, + "loss": 1.635, + "step": 5047 + }, + { + "epoch": 0.4579931046996915, + "grad_norm": 0.1049364611240682, + "learning_rate": 0.0005918195369027044, + "loss": 1.612, + "step": 5048 + }, + { + "epoch": 0.45808383233532934, + "grad_norm": 0.10341248886627892, + "learning_rate": 0.0005916751046405696, + "loss": 1.5953, + "step": 5049 + }, + { + "epoch": 0.4581745599709672, + "grad_norm": 0.09764275289781872, + "learning_rate": 0.0005915306644622759, + "loss": 1.6064, + "step": 5050 + }, + { + "epoch": 0.45826528760660495, + "grad_norm": 0.0961095293957751, + "learning_rate": 0.0005913862163802959, + "loss": 1.5848, + "step": 5051 + }, + { + "epoch": 0.4583560152422428, + "grad_norm": 0.10176729931138295, + "learning_rate": 0.0005912417604071027, + "loss": 1.5892, + "step": 5052 + }, + { + "epoch": 0.4584467428778806, + "grad_norm": 0.09863831362189636, + "learning_rate": 0.0005910972965551701, + "loss": 1.6128, + "step": 5053 + }, + { + "epoch": 0.4585374705135184, + "grad_norm": 0.0988145742396109, + "learning_rate": 0.0005909528248369723, + "loss": 1.5843, + "step": 5054 + }, + { + "epoch": 0.45862819814915623, + "grad_norm": 0.09819018990175635, + "learning_rate": 0.0005908083452649849, + "loss": 1.5877, + "step": 5055 + }, + { + "epoch": 0.45871892578479406, + "grad_norm": 0.10089747912069363, + "learning_rate": 0.0005906638578516833, + "loss": 1.6044, + "step": 5056 + }, + { + "epoch": 0.45880965342043184, + "grad_norm": 0.09911988463168923, + "learning_rate": 0.0005905193626095442, + "loss": 1.5954, + "step": 5057 + }, + { + "epoch": 0.4589003810560697, + "grad_norm": 0.09956054138470874, + "learning_rate": 0.000590374859551045, + "loss": 1.5857, + "step": 5058 + }, + { + "epoch": 0.4589911086917075, + "grad_norm": 0.1060093841760565, + "learning_rate": 0.0005902303486886631, + "loss": 1.6552, + "step": 5059 + }, + { + "epoch": 0.4590818363273453, + "grad_norm": 0.09765993980752619, + "learning_rate": 0.0005900858300348776, + "loss": 1.6131, + "step": 5060 + }, + { + "epoch": 0.4591725639629831, + "grad_norm": 0.10048602992081156, + "learning_rate": 0.0005899413036021672, + "loss": 1.6157, + "step": 5061 + }, + { + "epoch": 0.45926329159862095, + "grad_norm": 0.09981640668127448, + "learning_rate": 0.000589796769403012, + "loss": 1.627, + "step": 5062 + }, + { + "epoch": 0.4593540192342588, + "grad_norm": 0.09970692409854268, + "learning_rate": 0.0005896522274498926, + "loss": 1.5834, + "step": 5063 + }, + { + "epoch": 0.45944474686989656, + "grad_norm": 0.11125996431299023, + "learning_rate": 0.00058950767775529, + "loss": 1.5783, + "step": 5064 + }, + { + "epoch": 0.4595354745055344, + "grad_norm": 0.09970871605224198, + "learning_rate": 0.0005893631203316865, + "loss": 1.6245, + "step": 5065 + }, + { + "epoch": 0.4596262021411722, + "grad_norm": 0.10176844545612743, + "learning_rate": 0.0005892185551915641, + "loss": 1.5859, + "step": 5066 + }, + { + "epoch": 0.45971692977681, + "grad_norm": 0.09679535509004046, + "learning_rate": 0.0005890739823474064, + "loss": 1.5832, + "step": 5067 + }, + { + "epoch": 0.45980765741244783, + "grad_norm": 0.09758441940763943, + "learning_rate": 0.0005889294018116972, + "loss": 1.5812, + "step": 5068 + }, + { + "epoch": 0.45989838504808567, + "grad_norm": 0.09983785299576527, + "learning_rate": 0.000588784813596921, + "loss": 1.595, + "step": 5069 + }, + { + "epoch": 0.45998911268372344, + "grad_norm": 0.10282839405029061, + "learning_rate": 0.0005886402177155633, + "loss": 1.6123, + "step": 5070 + }, + { + "epoch": 0.4600798403193613, + "grad_norm": 0.11017335470220753, + "learning_rate": 0.0005884956141801094, + "loss": 1.6138, + "step": 5071 + }, + { + "epoch": 0.4601705679549991, + "grad_norm": 0.10476586973281618, + "learning_rate": 0.0005883510030030465, + "loss": 1.5959, + "step": 5072 + }, + { + "epoch": 0.4602612955906369, + "grad_norm": 0.10072564965441307, + "learning_rate": 0.0005882063841968613, + "loss": 1.6098, + "step": 5073 + }, + { + "epoch": 0.4603520232262747, + "grad_norm": 0.10058782384623778, + "learning_rate": 0.0005880617577740418, + "loss": 1.6418, + "step": 5074 + }, + { + "epoch": 0.46044275086191255, + "grad_norm": 0.10231526214709166, + "learning_rate": 0.0005879171237470765, + "loss": 1.5897, + "step": 5075 + }, + { + "epoch": 0.46053347849755033, + "grad_norm": 0.09611969683433438, + "learning_rate": 0.0005877724821284546, + "loss": 1.6061, + "step": 5076 + }, + { + "epoch": 0.46062420613318816, + "grad_norm": 0.09925325589452752, + "learning_rate": 0.0005876278329306661, + "loss": 1.6132, + "step": 5077 + }, + { + "epoch": 0.460714933768826, + "grad_norm": 0.09927522013912682, + "learning_rate": 0.0005874831761662013, + "loss": 1.6122, + "step": 5078 + }, + { + "epoch": 0.4608056614044638, + "grad_norm": 0.09728411842025016, + "learning_rate": 0.0005873385118475511, + "loss": 1.5852, + "step": 5079 + }, + { + "epoch": 0.4608963890401016, + "grad_norm": 0.0975775870268435, + "learning_rate": 0.0005871938399872078, + "loss": 1.5975, + "step": 5080 + }, + { + "epoch": 0.46098711667573944, + "grad_norm": 0.10023994877741255, + "learning_rate": 0.0005870491605976634, + "loss": 1.6175, + "step": 5081 + }, + { + "epoch": 0.46107784431137727, + "grad_norm": 0.10495317930462787, + "learning_rate": 0.0005869044736914113, + "loss": 1.5639, + "step": 5082 + }, + { + "epoch": 0.46116857194701505, + "grad_norm": 0.10171978858525797, + "learning_rate": 0.000586759779280945, + "loss": 1.5827, + "step": 5083 + }, + { + "epoch": 0.4612592995826529, + "grad_norm": 0.10810412008811185, + "learning_rate": 0.0005866150773787589, + "loss": 1.5895, + "step": 5084 + }, + { + "epoch": 0.4613500272182907, + "grad_norm": 0.10163210228357057, + "learning_rate": 0.0005864703679973482, + "loss": 1.5826, + "step": 5085 + }, + { + "epoch": 0.4614407548539285, + "grad_norm": 0.10103870276778214, + "learning_rate": 0.0005863256511492083, + "loss": 1.6195, + "step": 5086 + }, + { + "epoch": 0.4615314824895663, + "grad_norm": 0.099600809390084, + "learning_rate": 0.000586180926846836, + "loss": 1.6319, + "step": 5087 + }, + { + "epoch": 0.46162221012520416, + "grad_norm": 0.09773410771914007, + "learning_rate": 0.0005860361951027278, + "loss": 1.5386, + "step": 5088 + }, + { + "epoch": 0.46171293776084193, + "grad_norm": 0.10161891642266177, + "learning_rate": 0.0005858914559293814, + "loss": 1.5941, + "step": 5089 + }, + { + "epoch": 0.46180366539647977, + "grad_norm": 0.10163060633390021, + "learning_rate": 0.0005857467093392951, + "loss": 1.6575, + "step": 5090 + }, + { + "epoch": 0.4618943930321176, + "grad_norm": 0.10083860995400273, + "learning_rate": 0.0005856019553449681, + "loss": 1.6074, + "step": 5091 + }, + { + "epoch": 0.4619851206677554, + "grad_norm": 0.09913605032877922, + "learning_rate": 0.0005854571939588996, + "loss": 1.614, + "step": 5092 + }, + { + "epoch": 0.4620758483033932, + "grad_norm": 0.1021161409867291, + "learning_rate": 0.0005853124251935895, + "loss": 1.571, + "step": 5093 + }, + { + "epoch": 0.46216657593903104, + "grad_norm": 0.09405225499145299, + "learning_rate": 0.000585167649061539, + "loss": 1.594, + "step": 5094 + }, + { + "epoch": 0.4622573035746688, + "grad_norm": 0.10067427257383178, + "learning_rate": 0.0005850228655752496, + "loss": 1.6452, + "step": 5095 + }, + { + "epoch": 0.46234803121030665, + "grad_norm": 0.09860701154315482, + "learning_rate": 0.0005848780747472231, + "loss": 1.6024, + "step": 5096 + }, + { + "epoch": 0.4624387588459445, + "grad_norm": 0.09818963136329383, + "learning_rate": 0.0005847332765899626, + "loss": 1.6286, + "step": 5097 + }, + { + "epoch": 0.46252948648158226, + "grad_norm": 0.09897519217286159, + "learning_rate": 0.0005845884711159708, + "loss": 1.557, + "step": 5098 + }, + { + "epoch": 0.4626202141172201, + "grad_norm": 0.149172309229154, + "learning_rate": 0.0005844436583377523, + "loss": 1.6122, + "step": 5099 + }, + { + "epoch": 0.46271094175285793, + "grad_norm": 0.09919015035106084, + "learning_rate": 0.0005842988382678114, + "loss": 1.598, + "step": 5100 + }, + { + "epoch": 0.46280166938849576, + "grad_norm": 0.16964167711719547, + "learning_rate": 0.0005841540109186533, + "loss": 1.5755, + "step": 5101 + }, + { + "epoch": 0.46289239702413354, + "grad_norm": 1.103539836800613, + "learning_rate": 0.000584009176302784, + "loss": 1.7234, + "step": 5102 + }, + { + "epoch": 0.4629831246597714, + "grad_norm": 0.10372125732453207, + "learning_rate": 0.00058386433443271, + "loss": 1.5557, + "step": 5103 + }, + { + "epoch": 0.4630738522954092, + "grad_norm": 0.11306959895004295, + "learning_rate": 0.0005837194853209384, + "loss": 1.6201, + "step": 5104 + }, + { + "epoch": 0.463164579931047, + "grad_norm": 0.11690276066424798, + "learning_rate": 0.0005835746289799768, + "loss": 1.6034, + "step": 5105 + }, + { + "epoch": 0.4632553075666848, + "grad_norm": 0.1145084912036242, + "learning_rate": 0.0005834297654223337, + "loss": 1.5671, + "step": 5106 + }, + { + "epoch": 0.46334603520232265, + "grad_norm": 0.1125404417983045, + "learning_rate": 0.0005832848946605181, + "loss": 1.5787, + "step": 5107 + }, + { + "epoch": 0.4634367628379604, + "grad_norm": 0.11803456812817109, + "learning_rate": 0.0005831400167070394, + "loss": 1.626, + "step": 5108 + }, + { + "epoch": 0.46352749047359826, + "grad_norm": 0.1136132906883892, + "learning_rate": 0.0005829951315744083, + "loss": 1.6289, + "step": 5109 + }, + { + "epoch": 0.4636182181092361, + "grad_norm": 0.11453742232494601, + "learning_rate": 0.0005828502392751351, + "loss": 1.6274, + "step": 5110 + }, + { + "epoch": 0.46370894574487387, + "grad_norm": 0.11837454255372433, + "learning_rate": 0.0005827053398217317, + "loss": 1.6321, + "step": 5111 + }, + { + "epoch": 0.4637996733805117, + "grad_norm": 0.11146045004530743, + "learning_rate": 0.0005825604332267098, + "loss": 1.636, + "step": 5112 + }, + { + "epoch": 0.46389040101614953, + "grad_norm": 0.10865787216260181, + "learning_rate": 0.0005824155195025825, + "loss": 1.6174, + "step": 5113 + }, + { + "epoch": 0.4639811286517873, + "grad_norm": 0.11192248022108356, + "learning_rate": 0.0005822705986618629, + "loss": 1.6359, + "step": 5114 + }, + { + "epoch": 0.46407185628742514, + "grad_norm": 0.1212493886327241, + "learning_rate": 0.000582125670717065, + "loss": 1.6019, + "step": 5115 + }, + { + "epoch": 0.464162583923063, + "grad_norm": 0.1127272240295738, + "learning_rate": 0.0005819807356807034, + "loss": 1.5965, + "step": 5116 + }, + { + "epoch": 0.46425331155870075, + "grad_norm": 0.10978337898818075, + "learning_rate": 0.000581835793565293, + "loss": 1.5815, + "step": 5117 + }, + { + "epoch": 0.4643440391943386, + "grad_norm": 0.1093847991733921, + "learning_rate": 0.0005816908443833499, + "loss": 1.6375, + "step": 5118 + }, + { + "epoch": 0.4644347668299764, + "grad_norm": 0.11187774228171916, + "learning_rate": 0.0005815458881473903, + "loss": 1.6115, + "step": 5119 + }, + { + "epoch": 0.46452549446561425, + "grad_norm": 0.10690139924217051, + "learning_rate": 0.0005814009248699312, + "loss": 1.5992, + "step": 5120 + }, + { + "epoch": 0.46461622210125203, + "grad_norm": 0.11200341010295299, + "learning_rate": 0.0005812559545634903, + "loss": 1.5901, + "step": 5121 + }, + { + "epoch": 0.46470694973688986, + "grad_norm": 0.1087853242065912, + "learning_rate": 0.0005811109772405858, + "loss": 1.6443, + "step": 5122 + }, + { + "epoch": 0.4647976773725277, + "grad_norm": 0.11610126417420014, + "learning_rate": 0.0005809659929137363, + "loss": 1.6212, + "step": 5123 + }, + { + "epoch": 0.4648884050081655, + "grad_norm": 0.10581209931802354, + "learning_rate": 0.0005808210015954616, + "loss": 1.6538, + "step": 5124 + }, + { + "epoch": 0.4649791326438033, + "grad_norm": 0.10554784408215306, + "learning_rate": 0.0005806760032982813, + "loss": 1.5928, + "step": 5125 + }, + { + "epoch": 0.46506986027944114, + "grad_norm": 0.10427780435055495, + "learning_rate": 0.0005805309980347164, + "loss": 1.6076, + "step": 5126 + }, + { + "epoch": 0.4651605879150789, + "grad_norm": 0.10544458451630953, + "learning_rate": 0.0005803859858172878, + "loss": 1.5652, + "step": 5127 + }, + { + "epoch": 0.46525131555071675, + "grad_norm": 0.10621837547490787, + "learning_rate": 0.0005802409666585175, + "loss": 1.5984, + "step": 5128 + }, + { + "epoch": 0.4653420431863546, + "grad_norm": 0.10533288293015684, + "learning_rate": 0.000580095940570928, + "loss": 1.6746, + "step": 5129 + }, + { + "epoch": 0.46543277082199236, + "grad_norm": 0.10967749082131424, + "learning_rate": 0.0005799509075670421, + "loss": 1.6405, + "step": 5130 + }, + { + "epoch": 0.4655234984576302, + "grad_norm": 0.10981376455090398, + "learning_rate": 0.0005798058676593837, + "loss": 1.6431, + "step": 5131 + }, + { + "epoch": 0.465614226093268, + "grad_norm": 0.10133898712021674, + "learning_rate": 0.0005796608208604768, + "loss": 1.6546, + "step": 5132 + }, + { + "epoch": 0.4657049537289058, + "grad_norm": 0.10211209007387169, + "learning_rate": 0.0005795157671828463, + "loss": 1.6138, + "step": 5133 + }, + { + "epoch": 0.46579568136454363, + "grad_norm": 0.10478544437413503, + "learning_rate": 0.0005793707066390174, + "loss": 1.6295, + "step": 5134 + }, + { + "epoch": 0.46588640900018147, + "grad_norm": 0.10295682771861804, + "learning_rate": 0.0005792256392415165, + "loss": 1.5748, + "step": 5135 + }, + { + "epoch": 0.46597713663581924, + "grad_norm": 0.10457503086560584, + "learning_rate": 0.00057908056500287, + "loss": 1.6788, + "step": 5136 + }, + { + "epoch": 0.4660678642714571, + "grad_norm": 0.10252208726509797, + "learning_rate": 0.0005789354839356048, + "loss": 1.6274, + "step": 5137 + }, + { + "epoch": 0.4661585919070949, + "grad_norm": 0.10570256375486031, + "learning_rate": 0.0005787903960522492, + "loss": 1.5762, + "step": 5138 + }, + { + "epoch": 0.46624931954273274, + "grad_norm": 0.10469881014998589, + "learning_rate": 0.0005786453013653312, + "loss": 1.6566, + "step": 5139 + }, + { + "epoch": 0.4663400471783705, + "grad_norm": 0.10384971658987834, + "learning_rate": 0.0005785001998873798, + "loss": 1.5957, + "step": 5140 + }, + { + "epoch": 0.46643077481400835, + "grad_norm": 0.10387962719938779, + "learning_rate": 0.0005783550916309244, + "loss": 1.5842, + "step": 5141 + }, + { + "epoch": 0.4665215024496462, + "grad_norm": 0.1085561757723825, + "learning_rate": 0.0005782099766084956, + "loss": 1.6991, + "step": 5142 + }, + { + "epoch": 0.46661223008528396, + "grad_norm": 0.10328714000581239, + "learning_rate": 0.0005780648548326237, + "loss": 1.5759, + "step": 5143 + }, + { + "epoch": 0.4667029577209218, + "grad_norm": 0.10568499481914845, + "learning_rate": 0.00057791972631584, + "loss": 1.5761, + "step": 5144 + }, + { + "epoch": 0.46679368535655963, + "grad_norm": 0.10278404050088728, + "learning_rate": 0.0005777745910706765, + "loss": 1.6442, + "step": 5145 + }, + { + "epoch": 0.4668844129921974, + "grad_norm": 0.10285869579222065, + "learning_rate": 0.0005776294491096657, + "loss": 1.6228, + "step": 5146 + }, + { + "epoch": 0.46697514062783524, + "grad_norm": 0.10604092367285686, + "learning_rate": 0.0005774843004453403, + "loss": 1.6235, + "step": 5147 + }, + { + "epoch": 0.46706586826347307, + "grad_norm": 0.10879411664190736, + "learning_rate": 0.0005773391450902344, + "loss": 1.6197, + "step": 5148 + }, + { + "epoch": 0.46715659589911085, + "grad_norm": 0.10537448264509874, + "learning_rate": 0.0005771939830568815, + "loss": 1.5768, + "step": 5149 + }, + { + "epoch": 0.4672473235347487, + "grad_norm": 0.10306978430559492, + "learning_rate": 0.0005770488143578172, + "loss": 1.5706, + "step": 5150 + }, + { + "epoch": 0.4673380511703865, + "grad_norm": 0.10805969055999169, + "learning_rate": 0.0005769036390055763, + "loss": 1.6146, + "step": 5151 + }, + { + "epoch": 0.4674287788060243, + "grad_norm": 0.10323266216338788, + "learning_rate": 0.0005767584570126949, + "loss": 1.6149, + "step": 5152 + }, + { + "epoch": 0.4675195064416621, + "grad_norm": 0.10182531918814518, + "learning_rate": 0.0005766132683917093, + "loss": 1.6389, + "step": 5153 + }, + { + "epoch": 0.46761023407729996, + "grad_norm": 0.10609271264738052, + "learning_rate": 0.0005764680731551566, + "loss": 1.607, + "step": 5154 + }, + { + "epoch": 0.46770096171293774, + "grad_norm": 0.10424745351717679, + "learning_rate": 0.0005763228713155748, + "loss": 1.6418, + "step": 5155 + }, + { + "epoch": 0.46779168934857557, + "grad_norm": 0.1071539663542735, + "learning_rate": 0.0005761776628855016, + "loss": 1.5774, + "step": 5156 + }, + { + "epoch": 0.4678824169842134, + "grad_norm": 0.10363119817933295, + "learning_rate": 0.0005760324478774759, + "loss": 1.5745, + "step": 5157 + }, + { + "epoch": 0.46797314461985123, + "grad_norm": 0.1064531283128578, + "learning_rate": 0.0005758872263040373, + "loss": 1.5706, + "step": 5158 + }, + { + "epoch": 0.468063872255489, + "grad_norm": 0.1022136692195427, + "learning_rate": 0.0005757419981777255, + "loss": 1.6104, + "step": 5159 + }, + { + "epoch": 0.46815459989112684, + "grad_norm": 0.10320192253953604, + "learning_rate": 0.000575596763511081, + "loss": 1.5753, + "step": 5160 + }, + { + "epoch": 0.4682453275267647, + "grad_norm": 0.10457759914917195, + "learning_rate": 0.0005754515223166447, + "loss": 1.5912, + "step": 5161 + }, + { + "epoch": 0.46833605516240245, + "grad_norm": 0.10802761901174747, + "learning_rate": 0.0005753062746069585, + "loss": 1.6369, + "step": 5162 + }, + { + "epoch": 0.4684267827980403, + "grad_norm": 0.10142241823765401, + "learning_rate": 0.0005751610203945644, + "loss": 1.5924, + "step": 5163 + }, + { + "epoch": 0.4685175104336781, + "grad_norm": 0.10214251861690188, + "learning_rate": 0.000575015759692005, + "loss": 1.6307, + "step": 5164 + }, + { + "epoch": 0.4686082380693159, + "grad_norm": 0.09764950189879741, + "learning_rate": 0.0005748704925118238, + "loss": 1.6085, + "step": 5165 + }, + { + "epoch": 0.46869896570495373, + "grad_norm": 0.10710965047740739, + "learning_rate": 0.0005747252188665644, + "loss": 1.6054, + "step": 5166 + }, + { + "epoch": 0.46878969334059156, + "grad_norm": 0.10230664853019718, + "learning_rate": 0.0005745799387687714, + "loss": 1.6215, + "step": 5167 + }, + { + "epoch": 0.46888042097622934, + "grad_norm": 0.10176136845367749, + "learning_rate": 0.0005744346522309897, + "loss": 1.6035, + "step": 5168 + }, + { + "epoch": 0.4689711486118672, + "grad_norm": 0.10330011886432315, + "learning_rate": 0.0005742893592657648, + "loss": 1.6335, + "step": 5169 + }, + { + "epoch": 0.469061876247505, + "grad_norm": 0.10215862242339771, + "learning_rate": 0.0005741440598856428, + "loss": 1.6157, + "step": 5170 + }, + { + "epoch": 0.4691526038831428, + "grad_norm": 0.10232201831049186, + "learning_rate": 0.0005739987541031703, + "loss": 1.6301, + "step": 5171 + }, + { + "epoch": 0.4692433315187806, + "grad_norm": 0.10390007183910353, + "learning_rate": 0.0005738534419308945, + "loss": 1.5794, + "step": 5172 + }, + { + "epoch": 0.46933405915441845, + "grad_norm": 0.10632889337945658, + "learning_rate": 0.000573708123381363, + "loss": 1.636, + "step": 5173 + }, + { + "epoch": 0.4694247867900562, + "grad_norm": 0.10613152334177403, + "learning_rate": 0.0005735627984671242, + "loss": 1.6448, + "step": 5174 + }, + { + "epoch": 0.46951551442569406, + "grad_norm": 0.10377882833554956, + "learning_rate": 0.0005734174672007271, + "loss": 1.6039, + "step": 5175 + }, + { + "epoch": 0.4696062420613319, + "grad_norm": 0.09893752095509949, + "learning_rate": 0.0005732721295947206, + "loss": 1.6227, + "step": 5176 + }, + { + "epoch": 0.4696969696969697, + "grad_norm": 0.10292392224433947, + "learning_rate": 0.0005731267856616551, + "loss": 1.5984, + "step": 5177 + }, + { + "epoch": 0.4697876973326075, + "grad_norm": 0.10096413561924278, + "learning_rate": 0.0005729814354140808, + "loss": 1.6065, + "step": 5178 + }, + { + "epoch": 0.46987842496824533, + "grad_norm": 0.09929486801642051, + "learning_rate": 0.000572836078864549, + "loss": 1.6136, + "step": 5179 + }, + { + "epoch": 0.46996915260388317, + "grad_norm": 0.09941286895015747, + "learning_rate": 0.0005726907160256107, + "loss": 1.5928, + "step": 5180 + }, + { + "epoch": 0.47005988023952094, + "grad_norm": 0.09979625893487112, + "learning_rate": 0.0005725453469098186, + "loss": 1.596, + "step": 5181 + }, + { + "epoch": 0.4701506078751588, + "grad_norm": 0.09771880303117488, + "learning_rate": 0.0005723999715297251, + "loss": 1.5929, + "step": 5182 + }, + { + "epoch": 0.4702413355107966, + "grad_norm": 0.10072369680207482, + "learning_rate": 0.0005722545898978834, + "loss": 1.5734, + "step": 5183 + }, + { + "epoch": 0.4703320631464344, + "grad_norm": 0.1052218648237784, + "learning_rate": 0.0005721092020268471, + "loss": 1.5959, + "step": 5184 + }, + { + "epoch": 0.4704227907820722, + "grad_norm": 0.10421207042027542, + "learning_rate": 0.0005719638079291706, + "loss": 1.6092, + "step": 5185 + }, + { + "epoch": 0.47051351841771005, + "grad_norm": 0.09767521752579479, + "learning_rate": 0.0005718184076174087, + "loss": 1.6569, + "step": 5186 + }, + { + "epoch": 0.47060424605334783, + "grad_norm": 0.10229658667791994, + "learning_rate": 0.0005716730011041168, + "loss": 1.6133, + "step": 5187 + }, + { + "epoch": 0.47069497368898566, + "grad_norm": 0.10439221267379697, + "learning_rate": 0.0005715275884018505, + "loss": 1.6039, + "step": 5188 + }, + { + "epoch": 0.4707857013246235, + "grad_norm": 0.09906717412600465, + "learning_rate": 0.0005713821695231666, + "loss": 1.6149, + "step": 5189 + }, + { + "epoch": 0.4708764289602613, + "grad_norm": 0.09859927471212075, + "learning_rate": 0.0005712367444806217, + "loss": 1.6067, + "step": 5190 + }, + { + "epoch": 0.4709671565958991, + "grad_norm": 0.10015408382706471, + "learning_rate": 0.0005710913132867734, + "loss": 1.625, + "step": 5191 + }, + { + "epoch": 0.47105788423153694, + "grad_norm": 0.10264045522799797, + "learning_rate": 0.0005709458759541799, + "loss": 1.5968, + "step": 5192 + }, + { + "epoch": 0.4711486118671747, + "grad_norm": 0.10188157763986573, + "learning_rate": 0.0005708004324953994, + "loss": 1.6076, + "step": 5193 + }, + { + "epoch": 0.47123933950281255, + "grad_norm": 0.10317359786161508, + "learning_rate": 0.0005706549829229912, + "loss": 1.5686, + "step": 5194 + }, + { + "epoch": 0.4713300671384504, + "grad_norm": 0.10125708116365777, + "learning_rate": 0.0005705095272495146, + "loss": 1.6107, + "step": 5195 + }, + { + "epoch": 0.4714207947740882, + "grad_norm": 0.10032433475826967, + "learning_rate": 0.0005703640654875302, + "loss": 1.6058, + "step": 5196 + }, + { + "epoch": 0.471511522409726, + "grad_norm": 0.1020967194171185, + "learning_rate": 0.0005702185976495984, + "loss": 1.6217, + "step": 5197 + }, + { + "epoch": 0.4716022500453638, + "grad_norm": 0.11929505136479003, + "learning_rate": 0.0005700731237482801, + "loss": 1.5787, + "step": 5198 + }, + { + "epoch": 0.47169297768100166, + "grad_norm": 0.10222682826992431, + "learning_rate": 0.0005699276437961374, + "loss": 1.5713, + "step": 5199 + }, + { + "epoch": 0.47178370531663943, + "grad_norm": 0.10352327067292807, + "learning_rate": 0.0005697821578057323, + "loss": 1.6122, + "step": 5200 + }, + { + "epoch": 0.47187443295227727, + "grad_norm": 0.10726113405107299, + "learning_rate": 0.0005696366657896276, + "loss": 1.6126, + "step": 5201 + }, + { + "epoch": 0.4719651605879151, + "grad_norm": 0.10200086160342504, + "learning_rate": 0.0005694911677603865, + "loss": 1.6232, + "step": 5202 + }, + { + "epoch": 0.4720558882235529, + "grad_norm": 0.10477905344730222, + "learning_rate": 0.0005693456637305729, + "loss": 1.604, + "step": 5203 + }, + { + "epoch": 0.4721466158591907, + "grad_norm": 0.09835960066109867, + "learning_rate": 0.000569200153712751, + "loss": 1.5924, + "step": 5204 + }, + { + "epoch": 0.47223734349482854, + "grad_norm": 0.10008876426139064, + "learning_rate": 0.0005690546377194857, + "loss": 1.6085, + "step": 5205 + }, + { + "epoch": 0.4723280711304663, + "grad_norm": 0.10237294891987911, + "learning_rate": 0.000568909115763342, + "loss": 1.6081, + "step": 5206 + }, + { + "epoch": 0.47241879876610415, + "grad_norm": 0.10259295073370367, + "learning_rate": 0.0005687635878568862, + "loss": 1.591, + "step": 5207 + }, + { + "epoch": 0.472509526401742, + "grad_norm": 0.10155601202301114, + "learning_rate": 0.0005686180540126844, + "loss": 1.6508, + "step": 5208 + }, + { + "epoch": 0.47260025403737976, + "grad_norm": 0.10691599109241846, + "learning_rate": 0.0005684725142433036, + "loss": 1.5929, + "step": 5209 + }, + { + "epoch": 0.4726909816730176, + "grad_norm": 0.10073964755205482, + "learning_rate": 0.0005683269685613111, + "loss": 1.6341, + "step": 5210 + }, + { + "epoch": 0.47278170930865543, + "grad_norm": 0.09950466360478223, + "learning_rate": 0.0005681814169792747, + "loss": 1.5865, + "step": 5211 + }, + { + "epoch": 0.4728724369442932, + "grad_norm": 0.09806597822943074, + "learning_rate": 0.0005680358595097629, + "loss": 1.5874, + "step": 5212 + }, + { + "epoch": 0.47296316457993104, + "grad_norm": 0.1002407018157081, + "learning_rate": 0.0005678902961653446, + "loss": 1.615, + "step": 5213 + }, + { + "epoch": 0.47305389221556887, + "grad_norm": 0.0978595843169049, + "learning_rate": 0.0005677447269585894, + "loss": 1.607, + "step": 5214 + }, + { + "epoch": 0.4731446198512067, + "grad_norm": 0.0970218413267668, + "learning_rate": 0.0005675991519020668, + "loss": 1.6525, + "step": 5215 + }, + { + "epoch": 0.4732353474868445, + "grad_norm": 0.0986896399193735, + "learning_rate": 0.0005674535710083476, + "loss": 1.6148, + "step": 5216 + }, + { + "epoch": 0.4733260751224823, + "grad_norm": 0.10238443696831483, + "learning_rate": 0.0005673079842900027, + "loss": 1.5716, + "step": 5217 + }, + { + "epoch": 0.47341680275812015, + "grad_norm": 0.10188395958871693, + "learning_rate": 0.0005671623917596031, + "loss": 1.5836, + "step": 5218 + }, + { + "epoch": 0.4735075303937579, + "grad_norm": 0.10297848412967336, + "learning_rate": 0.0005670167934297214, + "loss": 1.6052, + "step": 5219 + }, + { + "epoch": 0.47359825802939576, + "grad_norm": 0.09895465059030634, + "learning_rate": 0.0005668711893129295, + "loss": 1.6528, + "step": 5220 + }, + { + "epoch": 0.4736889856650336, + "grad_norm": 0.10242135959604366, + "learning_rate": 0.0005667255794218007, + "loss": 1.5857, + "step": 5221 + }, + { + "epoch": 0.47377971330067137, + "grad_norm": 0.10346766446682336, + "learning_rate": 0.0005665799637689082, + "loss": 1.563, + "step": 5222 + }, + { + "epoch": 0.4738704409363092, + "grad_norm": 0.10100272373436614, + "learning_rate": 0.0005664343423668261, + "loss": 1.6003, + "step": 5223 + }, + { + "epoch": 0.47396116857194703, + "grad_norm": 0.10228110839922429, + "learning_rate": 0.0005662887152281286, + "loss": 1.6028, + "step": 5224 + }, + { + "epoch": 0.4740518962075848, + "grad_norm": 0.10134788275396252, + "learning_rate": 0.0005661430823653908, + "loss": 1.6268, + "step": 5225 + }, + { + "epoch": 0.47414262384322264, + "grad_norm": 0.09775708997422776, + "learning_rate": 0.0005659974437911883, + "loss": 1.6337, + "step": 5226 + }, + { + "epoch": 0.4742333514788605, + "grad_norm": 0.09995057186530086, + "learning_rate": 0.0005658517995180965, + "loss": 1.5592, + "step": 5227 + }, + { + "epoch": 0.47432407911449825, + "grad_norm": 0.10744902033351823, + "learning_rate": 0.0005657061495586924, + "loss": 1.6288, + "step": 5228 + }, + { + "epoch": 0.4744148067501361, + "grad_norm": 0.09958186000189458, + "learning_rate": 0.0005655604939255525, + "loss": 1.5895, + "step": 5229 + }, + { + "epoch": 0.4745055343857739, + "grad_norm": 0.09896471272732475, + "learning_rate": 0.0005654148326312542, + "loss": 1.6618, + "step": 5230 + }, + { + "epoch": 0.4745962620214117, + "grad_norm": 0.1020415757450849, + "learning_rate": 0.0005652691656883754, + "loss": 1.5653, + "step": 5231 + }, + { + "epoch": 0.47468698965704953, + "grad_norm": 0.10462336763018983, + "learning_rate": 0.0005651234931094946, + "loss": 1.5683, + "step": 5232 + }, + { + "epoch": 0.47477771729268736, + "grad_norm": 0.09807266721191027, + "learning_rate": 0.0005649778149071907, + "loss": 1.5694, + "step": 5233 + }, + { + "epoch": 0.4748684449283252, + "grad_norm": 0.10606039091415521, + "learning_rate": 0.0005648321310940427, + "loss": 1.6232, + "step": 5234 + }, + { + "epoch": 0.474959172563963, + "grad_norm": 0.1055036034305914, + "learning_rate": 0.0005646864416826306, + "loss": 1.5858, + "step": 5235 + }, + { + "epoch": 0.4750499001996008, + "grad_norm": 0.10171141102599293, + "learning_rate": 0.000564540746685535, + "loss": 1.6077, + "step": 5236 + }, + { + "epoch": 0.47514062783523864, + "grad_norm": 0.10019823125008687, + "learning_rate": 0.0005643950461153362, + "loss": 1.6017, + "step": 5237 + }, + { + "epoch": 0.4752313554708764, + "grad_norm": 0.10188321326412063, + "learning_rate": 0.0005642493399846158, + "loss": 1.5715, + "step": 5238 + }, + { + "epoch": 0.47532208310651425, + "grad_norm": 0.10028772767081547, + "learning_rate": 0.0005641036283059553, + "loss": 1.5983, + "step": 5239 + }, + { + "epoch": 0.4754128107421521, + "grad_norm": 0.10053513934620617, + "learning_rate": 0.000563957911091937, + "loss": 1.6025, + "step": 5240 + }, + { + "epoch": 0.47550353837778986, + "grad_norm": 0.10098263096692577, + "learning_rate": 0.0005638121883551439, + "loss": 1.6133, + "step": 5241 + }, + { + "epoch": 0.4755942660134277, + "grad_norm": 0.10143922151662088, + "learning_rate": 0.0005636664601081587, + "loss": 1.6548, + "step": 5242 + }, + { + "epoch": 0.4756849936490655, + "grad_norm": 0.09699734281774876, + "learning_rate": 0.0005635207263635655, + "loss": 1.5823, + "step": 5243 + }, + { + "epoch": 0.4757757212847033, + "grad_norm": 0.0956405304877161, + "learning_rate": 0.0005633749871339481, + "loss": 1.6355, + "step": 5244 + }, + { + "epoch": 0.47586644892034113, + "grad_norm": 0.10411153964107966, + "learning_rate": 0.0005632292424318912, + "loss": 1.6208, + "step": 5245 + }, + { + "epoch": 0.47595717655597897, + "grad_norm": 0.09916370466933985, + "learning_rate": 0.0005630834922699799, + "loss": 1.6117, + "step": 5246 + }, + { + "epoch": 0.47604790419161674, + "grad_norm": 0.09981978786149742, + "learning_rate": 0.0005629377366607998, + "loss": 1.5767, + "step": 5247 + }, + { + "epoch": 0.4761386318272546, + "grad_norm": 0.10000152547644417, + "learning_rate": 0.0005627919756169368, + "loss": 1.6105, + "step": 5248 + }, + { + "epoch": 0.4762293594628924, + "grad_norm": 0.09980584219618494, + "learning_rate": 0.0005626462091509774, + "loss": 1.6141, + "step": 5249 + }, + { + "epoch": 0.4763200870985302, + "grad_norm": 0.10699101455557812, + "learning_rate": 0.0005625004372755087, + "loss": 1.6139, + "step": 5250 + }, + { + "epoch": 0.476410814734168, + "grad_norm": 0.1024882716160833, + "learning_rate": 0.000562354660003118, + "loss": 1.6106, + "step": 5251 + }, + { + "epoch": 0.47650154236980585, + "grad_norm": 0.09872973881578742, + "learning_rate": 0.0005622088773463933, + "loss": 1.5754, + "step": 5252 + }, + { + "epoch": 0.4765922700054437, + "grad_norm": 0.10336619487971295, + "learning_rate": 0.0005620630893179229, + "loss": 1.6093, + "step": 5253 + }, + { + "epoch": 0.47668299764108146, + "grad_norm": 0.0989789391433333, + "learning_rate": 0.0005619172959302952, + "loss": 1.6254, + "step": 5254 + }, + { + "epoch": 0.4767737252767193, + "grad_norm": 0.09978786863043444, + "learning_rate": 0.0005617714971961003, + "loss": 1.6063, + "step": 5255 + }, + { + "epoch": 0.47686445291235713, + "grad_norm": 0.10149319860859425, + "learning_rate": 0.0005616256931279274, + "loss": 1.5733, + "step": 5256 + }, + { + "epoch": 0.4769551805479949, + "grad_norm": 0.10082747365245263, + "learning_rate": 0.0005614798837383668, + "loss": 1.6126, + "step": 5257 + }, + { + "epoch": 0.47704590818363274, + "grad_norm": 0.09807483245853078, + "learning_rate": 0.0005613340690400091, + "loss": 1.5957, + "step": 5258 + }, + { + "epoch": 0.47713663581927057, + "grad_norm": 0.09859498667509933, + "learning_rate": 0.0005611882490454455, + "loss": 1.5683, + "step": 5259 + }, + { + "epoch": 0.47722736345490835, + "grad_norm": 0.10237050269735226, + "learning_rate": 0.0005610424237672678, + "loss": 1.6392, + "step": 5260 + }, + { + "epoch": 0.4773180910905462, + "grad_norm": 0.10248777309997448, + "learning_rate": 0.0005608965932180676, + "loss": 1.6201, + "step": 5261 + }, + { + "epoch": 0.477408818726184, + "grad_norm": 0.10127592569546429, + "learning_rate": 0.0005607507574104377, + "loss": 1.6265, + "step": 5262 + }, + { + "epoch": 0.4774995463618218, + "grad_norm": 0.10058812001394966, + "learning_rate": 0.0005606049163569709, + "loss": 1.6207, + "step": 5263 + }, + { + "epoch": 0.4775902739974596, + "grad_norm": 0.10251667497770328, + "learning_rate": 0.0005604590700702605, + "loss": 1.636, + "step": 5264 + }, + { + "epoch": 0.47768100163309746, + "grad_norm": 0.09534389165239536, + "learning_rate": 0.0005603132185629007, + "loss": 1.6033, + "step": 5265 + }, + { + "epoch": 0.47777172926873523, + "grad_norm": 0.09601742941048332, + "learning_rate": 0.0005601673618474855, + "loss": 1.5853, + "step": 5266 + }, + { + "epoch": 0.47786245690437307, + "grad_norm": 0.10005567082299603, + "learning_rate": 0.0005600214999366098, + "loss": 1.6118, + "step": 5267 + }, + { + "epoch": 0.4779531845400109, + "grad_norm": 0.10100225588195677, + "learning_rate": 0.0005598756328428686, + "loss": 1.5969, + "step": 5268 + }, + { + "epoch": 0.4780439121756487, + "grad_norm": 0.09929635253806038, + "learning_rate": 0.0005597297605788578, + "loss": 1.5798, + "step": 5269 + }, + { + "epoch": 0.4781346398112865, + "grad_norm": 0.10342625216962907, + "learning_rate": 0.0005595838831571734, + "loss": 1.602, + "step": 5270 + }, + { + "epoch": 0.47822536744692434, + "grad_norm": 0.1075628815865743, + "learning_rate": 0.0005594380005904117, + "loss": 1.6022, + "step": 5271 + }, + { + "epoch": 0.4783160950825622, + "grad_norm": 0.09463207202877949, + "learning_rate": 0.0005592921128911702, + "loss": 1.6078, + "step": 5272 + }, + { + "epoch": 0.47840682271819995, + "grad_norm": 0.10109405168919501, + "learning_rate": 0.0005591462200720457, + "loss": 1.6024, + "step": 5273 + }, + { + "epoch": 0.4784975503538378, + "grad_norm": 0.09971470347816175, + "learning_rate": 0.0005590003221456366, + "loss": 1.6053, + "step": 5274 + }, + { + "epoch": 0.4785882779894756, + "grad_norm": 0.1077083472690853, + "learning_rate": 0.000558854419124541, + "loss": 1.6203, + "step": 5275 + }, + { + "epoch": 0.4786790056251134, + "grad_norm": 0.09892062407897748, + "learning_rate": 0.0005587085110213575, + "loss": 1.5845, + "step": 5276 + }, + { + "epoch": 0.47876973326075123, + "grad_norm": 0.10001856191620474, + "learning_rate": 0.0005585625978486853, + "loss": 1.6036, + "step": 5277 + }, + { + "epoch": 0.47886046089638906, + "grad_norm": 0.09960459466999944, + "learning_rate": 0.0005584166796191244, + "loss": 1.5823, + "step": 5278 + }, + { + "epoch": 0.47895118853202684, + "grad_norm": 0.09747826914379172, + "learning_rate": 0.0005582707563452744, + "loss": 1.6096, + "step": 5279 + }, + { + "epoch": 0.47904191616766467, + "grad_norm": 0.10125180442157934, + "learning_rate": 0.0005581248280397363, + "loss": 1.6126, + "step": 5280 + }, + { + "epoch": 0.4791326438033025, + "grad_norm": 0.0986157793250548, + "learning_rate": 0.0005579788947151105, + "loss": 1.6145, + "step": 5281 + }, + { + "epoch": 0.4792233714389403, + "grad_norm": 0.10499745661744099, + "learning_rate": 0.0005578329563839987, + "loss": 1.5773, + "step": 5282 + }, + { + "epoch": 0.4793140990745781, + "grad_norm": 0.09833194287700682, + "learning_rate": 0.0005576870130590025, + "loss": 1.6208, + "step": 5283 + }, + { + "epoch": 0.47940482671021595, + "grad_norm": 0.10028374776660659, + "learning_rate": 0.0005575410647527242, + "loss": 1.655, + "step": 5284 + }, + { + "epoch": 0.4794955543458537, + "grad_norm": 0.10181870800336096, + "learning_rate": 0.0005573951114777666, + "loss": 1.587, + "step": 5285 + }, + { + "epoch": 0.47958628198149156, + "grad_norm": 0.10028280604319686, + "learning_rate": 0.0005572491532467326, + "loss": 1.5532, + "step": 5286 + }, + { + "epoch": 0.4796770096171294, + "grad_norm": 0.10757869163638463, + "learning_rate": 0.0005571031900722257, + "loss": 1.6167, + "step": 5287 + }, + { + "epoch": 0.47976773725276717, + "grad_norm": 0.10070369874873869, + "learning_rate": 0.00055695722196685, + "loss": 1.6066, + "step": 5288 + }, + { + "epoch": 0.479858464888405, + "grad_norm": 0.0999703263346717, + "learning_rate": 0.0005568112489432097, + "loss": 1.6096, + "step": 5289 + }, + { + "epoch": 0.47994919252404283, + "grad_norm": 0.10329434285484476, + "learning_rate": 0.0005566652710139098, + "loss": 1.6384, + "step": 5290 + }, + { + "epoch": 0.48003992015968067, + "grad_norm": 0.1032633227155664, + "learning_rate": 0.0005565192881915554, + "loss": 1.6503, + "step": 5291 + }, + { + "epoch": 0.48013064779531844, + "grad_norm": 0.10033415049652823, + "learning_rate": 0.0005563733004887522, + "loss": 1.5791, + "step": 5292 + }, + { + "epoch": 0.4802213754309563, + "grad_norm": 0.10063732779805919, + "learning_rate": 0.0005562273079181059, + "loss": 1.5897, + "step": 5293 + }, + { + "epoch": 0.4803121030665941, + "grad_norm": 0.09723843864558362, + "learning_rate": 0.0005560813104922237, + "loss": 1.5896, + "step": 5294 + }, + { + "epoch": 0.4804028307022319, + "grad_norm": 0.09720913994695828, + "learning_rate": 0.0005559353082237118, + "loss": 1.6481, + "step": 5295 + }, + { + "epoch": 0.4804935583378697, + "grad_norm": 0.09780426515208497, + "learning_rate": 0.0005557893011251777, + "loss": 1.5936, + "step": 5296 + }, + { + "epoch": 0.48058428597350755, + "grad_norm": 0.1027423640869888, + "learning_rate": 0.0005556432892092295, + "loss": 1.6041, + "step": 5297 + }, + { + "epoch": 0.48067501360914533, + "grad_norm": 0.1022007201831454, + "learning_rate": 0.0005554972724884748, + "loss": 1.6024, + "step": 5298 + }, + { + "epoch": 0.48076574124478316, + "grad_norm": 0.10083701993446413, + "learning_rate": 0.0005553512509755227, + "loss": 1.6112, + "step": 5299 + }, + { + "epoch": 0.480856468880421, + "grad_norm": 0.10215252893602139, + "learning_rate": 0.0005552052246829819, + "loss": 1.6206, + "step": 5300 + }, + { + "epoch": 0.4809471965160588, + "grad_norm": 0.10049206376694886, + "learning_rate": 0.0005550591936234616, + "loss": 1.6371, + "step": 5301 + }, + { + "epoch": 0.4810379241516966, + "grad_norm": 0.09799717161530118, + "learning_rate": 0.0005549131578095718, + "loss": 1.603, + "step": 5302 + }, + { + "epoch": 0.48112865178733444, + "grad_norm": 0.09766628678913118, + "learning_rate": 0.0005547671172539229, + "loss": 1.5949, + "step": 5303 + }, + { + "epoch": 0.4812193794229722, + "grad_norm": 0.09915852755385914, + "learning_rate": 0.0005546210719691255, + "loss": 1.5518, + "step": 5304 + }, + { + "epoch": 0.48131010705861005, + "grad_norm": 0.10052936862289442, + "learning_rate": 0.0005544750219677901, + "loss": 1.6036, + "step": 5305 + }, + { + "epoch": 0.4814008346942479, + "grad_norm": 0.10066093685096716, + "learning_rate": 0.0005543289672625288, + "loss": 1.6273, + "step": 5306 + }, + { + "epoch": 0.48149156232988566, + "grad_norm": 0.10036045400809851, + "learning_rate": 0.0005541829078659531, + "loss": 1.6098, + "step": 5307 + }, + { + "epoch": 0.4815822899655235, + "grad_norm": 0.10490794052047919, + "learning_rate": 0.0005540368437906753, + "loss": 1.6251, + "step": 5308 + }, + { + "epoch": 0.4816730176011613, + "grad_norm": 0.10076463113718506, + "learning_rate": 0.0005538907750493081, + "loss": 1.5878, + "step": 5309 + }, + { + "epoch": 0.4817637452367991, + "grad_norm": 0.10740102488143559, + "learning_rate": 0.0005537447016544645, + "loss": 1.6165, + "step": 5310 + }, + { + "epoch": 0.48185447287243693, + "grad_norm": 0.10085293768786723, + "learning_rate": 0.000553598623618758, + "loss": 1.5609, + "step": 5311 + }, + { + "epoch": 0.48194520050807477, + "grad_norm": 0.09645476234891415, + "learning_rate": 0.0005534525409548024, + "loss": 1.6009, + "step": 5312 + }, + { + "epoch": 0.4820359281437126, + "grad_norm": 0.10217831236671561, + "learning_rate": 0.000553306453675212, + "loss": 1.6478, + "step": 5313 + }, + { + "epoch": 0.4821266557793504, + "grad_norm": 0.10299655161146917, + "learning_rate": 0.0005531603617926017, + "loss": 1.5579, + "step": 5314 + }, + { + "epoch": 0.4822173834149882, + "grad_norm": 0.1008208609762425, + "learning_rate": 0.0005530142653195861, + "loss": 1.6042, + "step": 5315 + }, + { + "epoch": 0.48230811105062604, + "grad_norm": 0.10063873613265775, + "learning_rate": 0.0005528681642687808, + "loss": 1.5768, + "step": 5316 + }, + { + "epoch": 0.4823988386862638, + "grad_norm": 0.09727925118811377, + "learning_rate": 0.0005527220586528019, + "loss": 1.5935, + "step": 5317 + }, + { + "epoch": 0.48248956632190165, + "grad_norm": 0.09818237840594041, + "learning_rate": 0.0005525759484842654, + "loss": 1.5596, + "step": 5318 + }, + { + "epoch": 0.4825802939575395, + "grad_norm": 0.10073934723961235, + "learning_rate": 0.0005524298337757881, + "loss": 1.5737, + "step": 5319 + }, + { + "epoch": 0.48267102159317726, + "grad_norm": 0.0984460592504534, + "learning_rate": 0.0005522837145399867, + "loss": 1.5952, + "step": 5320 + }, + { + "epoch": 0.4827617492288151, + "grad_norm": 0.0982746812961956, + "learning_rate": 0.0005521375907894791, + "loss": 1.6157, + "step": 5321 + }, + { + "epoch": 0.48285247686445293, + "grad_norm": 0.09597872794698173, + "learning_rate": 0.0005519914625368829, + "loss": 1.6298, + "step": 5322 + }, + { + "epoch": 0.4829432045000907, + "grad_norm": 0.09685170782393056, + "learning_rate": 0.0005518453297948159, + "loss": 1.6431, + "step": 5323 + }, + { + "epoch": 0.48303393213572854, + "grad_norm": 0.09665897878363387, + "learning_rate": 0.0005516991925758973, + "loss": 1.5663, + "step": 5324 + }, + { + "epoch": 0.48312465977136637, + "grad_norm": 0.10565744103102864, + "learning_rate": 0.0005515530508927456, + "loss": 1.5553, + "step": 5325 + }, + { + "epoch": 0.48321538740700415, + "grad_norm": 0.09966606971109976, + "learning_rate": 0.0005514069047579806, + "loss": 1.6177, + "step": 5326 + }, + { + "epoch": 0.483306115042642, + "grad_norm": 0.09827804950988396, + "learning_rate": 0.0005512607541842217, + "loss": 1.5716, + "step": 5327 + }, + { + "epoch": 0.4833968426782798, + "grad_norm": 0.09717315277469084, + "learning_rate": 0.000551114599184089, + "loss": 1.6262, + "step": 5328 + }, + { + "epoch": 0.4834875703139176, + "grad_norm": 0.0992849551147009, + "learning_rate": 0.0005509684397702033, + "loss": 1.6217, + "step": 5329 + }, + { + "epoch": 0.4835782979495554, + "grad_norm": 0.09821371660103471, + "learning_rate": 0.0005508222759551852, + "loss": 1.5883, + "step": 5330 + }, + { + "epoch": 0.48366902558519326, + "grad_norm": 0.09906936262189674, + "learning_rate": 0.0005506761077516562, + "loss": 1.64, + "step": 5331 + }, + { + "epoch": 0.4837597532208311, + "grad_norm": 0.09918186142954473, + "learning_rate": 0.0005505299351722376, + "loss": 1.5318, + "step": 5332 + }, + { + "epoch": 0.48385048085646887, + "grad_norm": 0.10079196333322628, + "learning_rate": 0.0005503837582295518, + "loss": 1.5861, + "step": 5333 + }, + { + "epoch": 0.4839412084921067, + "grad_norm": 0.10204210908421513, + "learning_rate": 0.0005502375769362211, + "loss": 1.619, + "step": 5334 + }, + { + "epoch": 0.48403193612774453, + "grad_norm": 0.09666801069674752, + "learning_rate": 0.0005500913913048682, + "loss": 1.6457, + "step": 5335 + }, + { + "epoch": 0.4841226637633823, + "grad_norm": 0.0997379427561511, + "learning_rate": 0.0005499452013481162, + "loss": 1.6142, + "step": 5336 + }, + { + "epoch": 0.48421339139902014, + "grad_norm": 0.09941674977901842, + "learning_rate": 0.0005497990070785888, + "loss": 1.5964, + "step": 5337 + }, + { + "epoch": 0.484304119034658, + "grad_norm": 0.10238352621293047, + "learning_rate": 0.0005496528085089099, + "loss": 1.6164, + "step": 5338 + }, + { + "epoch": 0.48439484667029575, + "grad_norm": 0.10548467400615374, + "learning_rate": 0.0005495066056517034, + "loss": 1.6035, + "step": 5339 + }, + { + "epoch": 0.4844855743059336, + "grad_norm": 0.09909579058791122, + "learning_rate": 0.0005493603985195943, + "loss": 1.6047, + "step": 5340 + }, + { + "epoch": 0.4845763019415714, + "grad_norm": 0.1016481999626479, + "learning_rate": 0.0005492141871252075, + "loss": 1.6429, + "step": 5341 + }, + { + "epoch": 0.4846670295772092, + "grad_norm": 0.0992609879698827, + "learning_rate": 0.0005490679714811685, + "loss": 1.602, + "step": 5342 + }, + { + "epoch": 0.48475775721284703, + "grad_norm": 0.09850886840631592, + "learning_rate": 0.000548921751600103, + "loss": 1.5675, + "step": 5343 + }, + { + "epoch": 0.48484848484848486, + "grad_norm": 0.10007542061449407, + "learning_rate": 0.0005487755274946367, + "loss": 1.6024, + "step": 5344 + }, + { + "epoch": 0.48493921248412264, + "grad_norm": 0.10177620212564396, + "learning_rate": 0.0005486292991773966, + "loss": 1.6273, + "step": 5345 + }, + { + "epoch": 0.48502994011976047, + "grad_norm": 0.10035587560106603, + "learning_rate": 0.0005484830666610094, + "loss": 1.5908, + "step": 5346 + }, + { + "epoch": 0.4851206677553983, + "grad_norm": 0.09698413910569512, + "learning_rate": 0.0005483368299581022, + "loss": 1.5855, + "step": 5347 + }, + { + "epoch": 0.4852113953910361, + "grad_norm": 0.1001653179895131, + "learning_rate": 0.0005481905890813026, + "loss": 1.5992, + "step": 5348 + }, + { + "epoch": 0.4853021230266739, + "grad_norm": 0.10443063969526824, + "learning_rate": 0.0005480443440432386, + "loss": 1.5693, + "step": 5349 + }, + { + "epoch": 0.48539285066231175, + "grad_norm": 0.0987495771167287, + "learning_rate": 0.0005478980948565384, + "loss": 1.6501, + "step": 5350 + }, + { + "epoch": 0.4854835782979496, + "grad_norm": 0.09665110735382969, + "learning_rate": 0.0005477518415338305, + "loss": 1.5786, + "step": 5351 + }, + { + "epoch": 0.48557430593358736, + "grad_norm": 0.10198531712514984, + "learning_rate": 0.000547605584087744, + "loss": 1.5818, + "step": 5352 + }, + { + "epoch": 0.4856650335692252, + "grad_norm": 0.09701441440604537, + "learning_rate": 0.0005474593225309087, + "loss": 1.5914, + "step": 5353 + }, + { + "epoch": 0.485755761204863, + "grad_norm": 0.09900181128888792, + "learning_rate": 0.0005473130568759536, + "loss": 1.5956, + "step": 5354 + }, + { + "epoch": 0.4858464888405008, + "grad_norm": 0.09985183304481975, + "learning_rate": 0.0005471667871355091, + "loss": 1.619, + "step": 5355 + }, + { + "epoch": 0.48593721647613863, + "grad_norm": 0.09984926220191336, + "learning_rate": 0.0005470205133222055, + "loss": 1.6173, + "step": 5356 + }, + { + "epoch": 0.48602794411177647, + "grad_norm": 0.10283255984028757, + "learning_rate": 0.0005468742354486737, + "loss": 1.6201, + "step": 5357 + }, + { + "epoch": 0.48611867174741424, + "grad_norm": 0.1011809956808341, + "learning_rate": 0.000546727953527545, + "loss": 1.6411, + "step": 5358 + }, + { + "epoch": 0.4862093993830521, + "grad_norm": 0.0984365133728774, + "learning_rate": 0.0005465816675714504, + "loss": 1.6542, + "step": 5359 + }, + { + "epoch": 0.4863001270186899, + "grad_norm": 0.10280429390990949, + "learning_rate": 0.000546435377593022, + "loss": 1.5921, + "step": 5360 + }, + { + "epoch": 0.4863908546543277, + "grad_norm": 0.09748965717117884, + "learning_rate": 0.0005462890836048918, + "loss": 1.6321, + "step": 5361 + }, + { + "epoch": 0.4864815822899655, + "grad_norm": 0.09987894859478766, + "learning_rate": 0.0005461427856196925, + "loss": 1.6326, + "step": 5362 + }, + { + "epoch": 0.48657230992560335, + "grad_norm": 0.09874276673212119, + "learning_rate": 0.0005459964836500568, + "loss": 1.5725, + "step": 5363 + }, + { + "epoch": 0.48666303756124113, + "grad_norm": 0.0985908251375071, + "learning_rate": 0.000545850177708618, + "loss": 1.5947, + "step": 5364 + }, + { + "epoch": 0.48675376519687896, + "grad_norm": 0.09918013837805773, + "learning_rate": 0.0005457038678080097, + "loss": 1.6251, + "step": 5365 + }, + { + "epoch": 0.4868444928325168, + "grad_norm": 0.10121164811229287, + "learning_rate": 0.0005455575539608655, + "loss": 1.6098, + "step": 5366 + }, + { + "epoch": 0.4869352204681546, + "grad_norm": 0.1002789197756171, + "learning_rate": 0.0005454112361798199, + "loss": 1.5774, + "step": 5367 + }, + { + "epoch": 0.4870259481037924, + "grad_norm": 0.09973906931228714, + "learning_rate": 0.0005452649144775073, + "loss": 1.5789, + "step": 5368 + }, + { + "epoch": 0.48711667573943024, + "grad_norm": 0.09793391714016247, + "learning_rate": 0.0005451185888665628, + "loss": 1.588, + "step": 5369 + }, + { + "epoch": 0.48720740337506807, + "grad_norm": 0.09766199926265294, + "learning_rate": 0.0005449722593596214, + "loss": 1.5806, + "step": 5370 + }, + { + "epoch": 0.48729813101070585, + "grad_norm": 0.09557633465187641, + "learning_rate": 0.0005448259259693187, + "loss": 1.6394, + "step": 5371 + }, + { + "epoch": 0.4873888586463437, + "grad_norm": 0.09510738045665418, + "learning_rate": 0.0005446795887082908, + "loss": 1.5719, + "step": 5372 + }, + { + "epoch": 0.4874795862819815, + "grad_norm": 0.0934786243047965, + "learning_rate": 0.0005445332475891738, + "loss": 1.5877, + "step": 5373 + }, + { + "epoch": 0.4875703139176193, + "grad_norm": 0.09664439856622814, + "learning_rate": 0.0005443869026246042, + "loss": 1.6037, + "step": 5374 + }, + { + "epoch": 0.4876610415532571, + "grad_norm": 0.09843623089125453, + "learning_rate": 0.0005442405538272192, + "loss": 1.6112, + "step": 5375 + }, + { + "epoch": 0.48775176918889496, + "grad_norm": 0.09922204460112674, + "learning_rate": 0.0005440942012096557, + "loss": 1.6087, + "step": 5376 + }, + { + "epoch": 0.48784249682453273, + "grad_norm": 0.09799988907623425, + "learning_rate": 0.0005439478447845516, + "loss": 1.604, + "step": 5377 + }, + { + "epoch": 0.48793322446017057, + "grad_norm": 0.09917069401824659, + "learning_rate": 0.0005438014845645446, + "loss": 1.577, + "step": 5378 + }, + { + "epoch": 0.4880239520958084, + "grad_norm": 0.10176327174257122, + "learning_rate": 0.0005436551205622728, + "loss": 1.6117, + "step": 5379 + }, + { + "epoch": 0.4881146797314462, + "grad_norm": 0.09966653279489607, + "learning_rate": 0.0005435087527903749, + "loss": 1.5494, + "step": 5380 + }, + { + "epoch": 0.488205407367084, + "grad_norm": 0.09916883369136092, + "learning_rate": 0.00054336238126149, + "loss": 1.6261, + "step": 5381 + }, + { + "epoch": 0.48829613500272184, + "grad_norm": 0.0971877463238166, + "learning_rate": 0.0005432160059882569, + "loss": 1.5854, + "step": 5382 + }, + { + "epoch": 0.4883868626383596, + "grad_norm": 0.10080715942025142, + "learning_rate": 0.0005430696269833153, + "loss": 1.6568, + "step": 5383 + }, + { + "epoch": 0.48847759027399745, + "grad_norm": 0.09565330289788426, + "learning_rate": 0.0005429232442593053, + "loss": 1.5971, + "step": 5384 + }, + { + "epoch": 0.4885683179096353, + "grad_norm": 0.10015220314347162, + "learning_rate": 0.0005427768578288666, + "loss": 1.6228, + "step": 5385 + }, + { + "epoch": 0.48865904554527306, + "grad_norm": 0.09745489427990862, + "learning_rate": 0.00054263046770464, + "loss": 1.5963, + "step": 5386 + }, + { + "epoch": 0.4887497731809109, + "grad_norm": 0.09892383050164393, + "learning_rate": 0.0005424840738992661, + "loss": 1.6391, + "step": 5387 + }, + { + "epoch": 0.48884050081654873, + "grad_norm": 0.09908124192857701, + "learning_rate": 0.0005423376764253863, + "loss": 1.5887, + "step": 5388 + }, + { + "epoch": 0.48893122845218656, + "grad_norm": 0.09461618884437524, + "learning_rate": 0.0005421912752956419, + "loss": 1.5525, + "step": 5389 + }, + { + "epoch": 0.48902195608782434, + "grad_norm": 0.10097057828885651, + "learning_rate": 0.0005420448705226746, + "loss": 1.5821, + "step": 5390 + }, + { + "epoch": 0.48911268372346217, + "grad_norm": 0.09964804585039065, + "learning_rate": 0.0005418984621191266, + "loss": 1.6046, + "step": 5391 + }, + { + "epoch": 0.4892034113591, + "grad_norm": 0.0993444229049177, + "learning_rate": 0.0005417520500976402, + "loss": 1.5946, + "step": 5392 + }, + { + "epoch": 0.4892941389947378, + "grad_norm": 0.09842604428735408, + "learning_rate": 0.0005416056344708581, + "loss": 1.5941, + "step": 5393 + }, + { + "epoch": 0.4893848666303756, + "grad_norm": 0.09789722179328321, + "learning_rate": 0.0005414592152514232, + "loss": 1.6298, + "step": 5394 + }, + { + "epoch": 0.48947559426601345, + "grad_norm": 0.09671029495091357, + "learning_rate": 0.0005413127924519792, + "loss": 1.6211, + "step": 5395 + }, + { + "epoch": 0.4895663219016512, + "grad_norm": 0.10074139487339717, + "learning_rate": 0.0005411663660851694, + "loss": 1.6383, + "step": 5396 + }, + { + "epoch": 0.48965704953728906, + "grad_norm": 0.09411842626661801, + "learning_rate": 0.0005410199361636378, + "loss": 1.5784, + "step": 5397 + }, + { + "epoch": 0.4897477771729269, + "grad_norm": 0.09682987465576293, + "learning_rate": 0.0005408735027000285, + "loss": 1.5831, + "step": 5398 + }, + { + "epoch": 0.48983850480856467, + "grad_norm": 0.10135535521742621, + "learning_rate": 0.0005407270657069866, + "loss": 1.5477, + "step": 5399 + }, + { + "epoch": 0.4899292324442025, + "grad_norm": 0.09957155835719267, + "learning_rate": 0.0005405806251971563, + "loss": 1.6077, + "step": 5400 + }, + { + "epoch": 0.49001996007984033, + "grad_norm": 0.09710097148699896, + "learning_rate": 0.0005404341811831832, + "loss": 1.5884, + "step": 5401 + }, + { + "epoch": 0.4901106877154781, + "grad_norm": 0.10174297663735664, + "learning_rate": 0.0005402877336777123, + "loss": 1.5822, + "step": 5402 + }, + { + "epoch": 0.49020141535111594, + "grad_norm": 0.09728609132807622, + "learning_rate": 0.0005401412826933899, + "loss": 1.6129, + "step": 5403 + }, + { + "epoch": 0.4902921429867538, + "grad_norm": 0.10163890985343521, + "learning_rate": 0.0005399948282428618, + "loss": 1.6322, + "step": 5404 + }, + { + "epoch": 0.49038287062239155, + "grad_norm": 0.10706641680979372, + "learning_rate": 0.0005398483703387743, + "loss": 1.601, + "step": 5405 + }, + { + "epoch": 0.4904735982580294, + "grad_norm": 0.10650523053606974, + "learning_rate": 0.0005397019089937742, + "loss": 1.6201, + "step": 5406 + }, + { + "epoch": 0.4905643258936672, + "grad_norm": 0.10002055239834304, + "learning_rate": 0.0005395554442205084, + "loss": 1.5758, + "step": 5407 + }, + { + "epoch": 0.49065505352930505, + "grad_norm": 0.09813688090052312, + "learning_rate": 0.0005394089760316242, + "loss": 1.5879, + "step": 5408 + }, + { + "epoch": 0.49074578116494283, + "grad_norm": 0.10422278350224515, + "learning_rate": 0.0005392625044397692, + "loss": 1.5888, + "step": 5409 + }, + { + "epoch": 0.49083650880058066, + "grad_norm": 0.10208750220777937, + "learning_rate": 0.0005391160294575908, + "loss": 1.5598, + "step": 5410 + }, + { + "epoch": 0.4909272364362185, + "grad_norm": 0.10020173281006108, + "learning_rate": 0.0005389695510977379, + "loss": 1.6031, + "step": 5411 + }, + { + "epoch": 0.49101796407185627, + "grad_norm": 0.0972086626441125, + "learning_rate": 0.0005388230693728583, + "loss": 1.5817, + "step": 5412 + }, + { + "epoch": 0.4911086917074941, + "grad_norm": 0.10357573785500201, + "learning_rate": 0.0005386765842956009, + "loss": 1.6086, + "step": 5413 + }, + { + "epoch": 0.49119941934313194, + "grad_norm": 0.10993646564245485, + "learning_rate": 0.0005385300958786149, + "loss": 1.5253, + "step": 5414 + }, + { + "epoch": 0.4912901469787697, + "grad_norm": 0.09993685964810417, + "learning_rate": 0.0005383836041345494, + "loss": 1.597, + "step": 5415 + }, + { + "epoch": 0.49138087461440755, + "grad_norm": 0.10409190706866234, + "learning_rate": 0.0005382371090760541, + "loss": 1.6178, + "step": 5416 + }, + { + "epoch": 0.4914716022500454, + "grad_norm": 0.10479895980531295, + "learning_rate": 0.0005380906107157786, + "loss": 1.5442, + "step": 5417 + }, + { + "epoch": 0.49156232988568316, + "grad_norm": 0.10111294017069315, + "learning_rate": 0.0005379441090663734, + "loss": 1.5978, + "step": 5418 + }, + { + "epoch": 0.491653057521321, + "grad_norm": 0.10081946986865387, + "learning_rate": 0.0005377976041404886, + "loss": 1.6509, + "step": 5419 + }, + { + "epoch": 0.4917437851569588, + "grad_norm": 0.10103379057272245, + "learning_rate": 0.0005376510959507753, + "loss": 1.6261, + "step": 5420 + }, + { + "epoch": 0.4918345127925966, + "grad_norm": 0.1056626751726393, + "learning_rate": 0.0005375045845098844, + "loss": 1.6279, + "step": 5421 + }, + { + "epoch": 0.49192524042823443, + "grad_norm": 0.10276755620061723, + "learning_rate": 0.0005373580698304668, + "loss": 1.6346, + "step": 5422 + }, + { + "epoch": 0.49201596806387227, + "grad_norm": 0.10393659690276516, + "learning_rate": 0.0005372115519251746, + "loss": 1.581, + "step": 5423 + }, + { + "epoch": 0.49210669569951004, + "grad_norm": 0.09997001667550112, + "learning_rate": 0.0005370650308066594, + "loss": 1.5677, + "step": 5424 + }, + { + "epoch": 0.4921974233351479, + "grad_norm": 0.10036249390695497, + "learning_rate": 0.0005369185064875731, + "loss": 1.6162, + "step": 5425 + }, + { + "epoch": 0.4922881509707857, + "grad_norm": 0.0974957638453811, + "learning_rate": 0.0005367719789805685, + "loss": 1.5663, + "step": 5426 + }, + { + "epoch": 0.49237887860642354, + "grad_norm": 0.09964837225952794, + "learning_rate": 0.0005366254482982981, + "loss": 1.5364, + "step": 5427 + }, + { + "epoch": 0.4924696062420613, + "grad_norm": 0.10164254613098486, + "learning_rate": 0.0005364789144534149, + "loss": 1.6098, + "step": 5428 + }, + { + "epoch": 0.49256033387769915, + "grad_norm": 0.09686043457164847, + "learning_rate": 0.0005363323774585719, + "loss": 1.6068, + "step": 5429 + }, + { + "epoch": 0.492651061513337, + "grad_norm": 0.1043321655937666, + "learning_rate": 0.0005361858373264228, + "loss": 1.6282, + "step": 5430 + }, + { + "epoch": 0.49274178914897476, + "grad_norm": 0.09742098149833099, + "learning_rate": 0.0005360392940696214, + "loss": 1.6277, + "step": 5431 + }, + { + "epoch": 0.4928325167846126, + "grad_norm": 0.10160359529835308, + "learning_rate": 0.0005358927477008216, + "loss": 1.5681, + "step": 5432 + }, + { + "epoch": 0.49292324442025043, + "grad_norm": 0.09765111476254484, + "learning_rate": 0.0005357461982326777, + "loss": 1.6056, + "step": 5433 + }, + { + "epoch": 0.4930139720558882, + "grad_norm": 0.09891121553058377, + "learning_rate": 0.0005355996456778444, + "loss": 1.604, + "step": 5434 + }, + { + "epoch": 0.49310469969152604, + "grad_norm": 0.10336461779598474, + "learning_rate": 0.0005354530900489764, + "loss": 1.586, + "step": 5435 + }, + { + "epoch": 0.49319542732716387, + "grad_norm": 0.09819287055995862, + "learning_rate": 0.0005353065313587289, + "loss": 1.639, + "step": 5436 + }, + { + "epoch": 0.49328615496280165, + "grad_norm": 0.09891656429788832, + "learning_rate": 0.000535159969619757, + "loss": 1.6317, + "step": 5437 + }, + { + "epoch": 0.4933768825984395, + "grad_norm": 0.10095340171821983, + "learning_rate": 0.0005350134048447169, + "loss": 1.6176, + "step": 5438 + }, + { + "epoch": 0.4934676102340773, + "grad_norm": 0.09731630383673202, + "learning_rate": 0.0005348668370462638, + "loss": 1.5872, + "step": 5439 + }, + { + "epoch": 0.4935583378697151, + "grad_norm": 0.0973955624199667, + "learning_rate": 0.0005347202662370542, + "loss": 1.6068, + "step": 5440 + }, + { + "epoch": 0.4936490655053529, + "grad_norm": 0.09605920350784666, + "learning_rate": 0.0005345736924297447, + "loss": 1.6113, + "step": 5441 + }, + { + "epoch": 0.49373979314099076, + "grad_norm": 0.0985520259134812, + "learning_rate": 0.0005344271156369916, + "loss": 1.6082, + "step": 5442 + }, + { + "epoch": 0.49383052077662853, + "grad_norm": 0.09971043818305687, + "learning_rate": 0.0005342805358714522, + "loss": 1.5781, + "step": 5443 + }, + { + "epoch": 0.49392124841226637, + "grad_norm": 0.09706302405860284, + "learning_rate": 0.0005341339531457833, + "loss": 1.6085, + "step": 5444 + }, + { + "epoch": 0.4940119760479042, + "grad_norm": 0.0969979928093939, + "learning_rate": 0.0005339873674726426, + "loss": 1.5918, + "step": 5445 + }, + { + "epoch": 0.49410270368354203, + "grad_norm": 0.09874847369367078, + "learning_rate": 0.0005338407788646876, + "loss": 1.6102, + "step": 5446 + }, + { + "epoch": 0.4941934313191798, + "grad_norm": 0.09865141048807188, + "learning_rate": 0.0005336941873345764, + "loss": 1.588, + "step": 5447 + }, + { + "epoch": 0.49428415895481764, + "grad_norm": 0.09680118903346788, + "learning_rate": 0.0005335475928949673, + "loss": 1.5874, + "step": 5448 + }, + { + "epoch": 0.4943748865904555, + "grad_norm": 0.09819371123479312, + "learning_rate": 0.0005334009955585184, + "loss": 1.5803, + "step": 5449 + }, + { + "epoch": 0.49446561422609325, + "grad_norm": 0.09706890593123724, + "learning_rate": 0.0005332543953378888, + "loss": 1.5945, + "step": 5450 + }, + { + "epoch": 0.4945563418617311, + "grad_norm": 0.09856042037116863, + "learning_rate": 0.000533107792245737, + "loss": 1.5878, + "step": 5451 + }, + { + "epoch": 0.4946470694973689, + "grad_norm": 0.09941614351414432, + "learning_rate": 0.0005329611862947227, + "loss": 1.596, + "step": 5452 + }, + { + "epoch": 0.4947377971330067, + "grad_norm": 0.097531253903557, + "learning_rate": 0.000532814577497505, + "loss": 1.5592, + "step": 5453 + }, + { + "epoch": 0.49482852476864453, + "grad_norm": 0.10239385660895868, + "learning_rate": 0.0005326679658667437, + "loss": 1.5846, + "step": 5454 + }, + { + "epoch": 0.49491925240428236, + "grad_norm": 0.10397146863340886, + "learning_rate": 0.0005325213514150988, + "loss": 1.5826, + "step": 5455 + }, + { + "epoch": 0.49500998003992014, + "grad_norm": 0.10319033914680069, + "learning_rate": 0.0005323747341552304, + "loss": 1.5955, + "step": 5456 + }, + { + "epoch": 0.49510070767555797, + "grad_norm": 0.0998380482680944, + "learning_rate": 0.0005322281140997989, + "loss": 1.5984, + "step": 5457 + }, + { + "epoch": 0.4951914353111958, + "grad_norm": 0.09781911694886285, + "learning_rate": 0.000532081491261465, + "loss": 1.6222, + "step": 5458 + }, + { + "epoch": 0.4952821629468336, + "grad_norm": 0.10042004738259463, + "learning_rate": 0.0005319348656528895, + "loss": 1.5642, + "step": 5459 + }, + { + "epoch": 0.4953728905824714, + "grad_norm": 0.09555094896104277, + "learning_rate": 0.0005317882372867338, + "loss": 1.6046, + "step": 5460 + }, + { + "epoch": 0.49546361821810925, + "grad_norm": 0.09574902095306477, + "learning_rate": 0.000531641606175659, + "loss": 1.6012, + "step": 5461 + }, + { + "epoch": 0.495554345853747, + "grad_norm": 0.10096578026822879, + "learning_rate": 0.000531494972332327, + "loss": 1.5653, + "step": 5462 + }, + { + "epoch": 0.49564507348938486, + "grad_norm": 0.10357546505966192, + "learning_rate": 0.0005313483357693994, + "loss": 1.5587, + "step": 5463 + }, + { + "epoch": 0.4957358011250227, + "grad_norm": 0.09724986049663031, + "learning_rate": 0.0005312016964995384, + "loss": 1.6456, + "step": 5464 + }, + { + "epoch": 0.4958265287606605, + "grad_norm": 0.10123918005214465, + "learning_rate": 0.0005310550545354063, + "loss": 1.5837, + "step": 5465 + }, + { + "epoch": 0.4959172563962983, + "grad_norm": 0.09871659024909621, + "learning_rate": 0.0005309084098896656, + "loss": 1.5743, + "step": 5466 + }, + { + "epoch": 0.49600798403193613, + "grad_norm": 0.09835016678610543, + "learning_rate": 0.0005307617625749793, + "loss": 1.6013, + "step": 5467 + }, + { + "epoch": 0.49609871166757397, + "grad_norm": 0.10052329175678817, + "learning_rate": 0.0005306151126040101, + "loss": 1.5803, + "step": 5468 + }, + { + "epoch": 0.49618943930321174, + "grad_norm": 0.09670316577706513, + "learning_rate": 0.0005304684599894215, + "loss": 1.6039, + "step": 5469 + }, + { + "epoch": 0.4962801669388496, + "grad_norm": 0.0953162399124763, + "learning_rate": 0.0005303218047438769, + "loss": 1.5742, + "step": 5470 + }, + { + "epoch": 0.4963708945744874, + "grad_norm": 0.0982999072407688, + "learning_rate": 0.00053017514688004, + "loss": 1.5807, + "step": 5471 + }, + { + "epoch": 0.4964616222101252, + "grad_norm": 0.0991579015278405, + "learning_rate": 0.0005300284864105747, + "loss": 1.6218, + "step": 5472 + }, + { + "epoch": 0.496552349845763, + "grad_norm": 0.09771355382760584, + "learning_rate": 0.0005298818233481451, + "loss": 1.6161, + "step": 5473 + }, + { + "epoch": 0.49664307748140085, + "grad_norm": 0.10138451270507244, + "learning_rate": 0.0005297351577054158, + "loss": 1.6116, + "step": 5474 + }, + { + "epoch": 0.49673380511703863, + "grad_norm": 0.09810833143709381, + "learning_rate": 0.0005295884894950513, + "loss": 1.6235, + "step": 5475 + }, + { + "epoch": 0.49682453275267646, + "grad_norm": 0.09734203316001938, + "learning_rate": 0.000529441818729716, + "loss": 1.589, + "step": 5476 + }, + { + "epoch": 0.4969152603883143, + "grad_norm": 0.09936630774371395, + "learning_rate": 0.0005292951454220757, + "loss": 1.5923, + "step": 5477 + }, + { + "epoch": 0.49700598802395207, + "grad_norm": 0.10113310357767549, + "learning_rate": 0.0005291484695847951, + "loss": 1.6021, + "step": 5478 + }, + { + "epoch": 0.4970967156595899, + "grad_norm": 0.10302230784185776, + "learning_rate": 0.0005290017912305399, + "loss": 1.5657, + "step": 5479 + }, + { + "epoch": 0.49718744329522774, + "grad_norm": 0.0992979058028296, + "learning_rate": 0.0005288551103719757, + "loss": 1.6035, + "step": 5480 + }, + { + "epoch": 0.4972781709308655, + "grad_norm": 0.09666978869403671, + "learning_rate": 0.0005287084270217684, + "loss": 1.5962, + "step": 5481 + }, + { + "epoch": 0.49736889856650335, + "grad_norm": 0.09674236665159006, + "learning_rate": 0.0005285617411925845, + "loss": 1.568, + "step": 5482 + }, + { + "epoch": 0.4974596262021412, + "grad_norm": 0.09897300643821927, + "learning_rate": 0.0005284150528970898, + "loss": 1.599, + "step": 5483 + }, + { + "epoch": 0.497550353837779, + "grad_norm": 0.09519238203462688, + "learning_rate": 0.0005282683621479512, + "loss": 1.6157, + "step": 5484 + }, + { + "epoch": 0.4976410814734168, + "grad_norm": 0.1004326202374681, + "learning_rate": 0.0005281216689578352, + "loss": 1.6082, + "step": 5485 + }, + { + "epoch": 0.4977318091090546, + "grad_norm": 0.09807228211046493, + "learning_rate": 0.000527974973339409, + "loss": 1.5961, + "step": 5486 + }, + { + "epoch": 0.49782253674469246, + "grad_norm": 0.0970639843068602, + "learning_rate": 0.00052782827530534, + "loss": 1.616, + "step": 5487 + }, + { + "epoch": 0.49791326438033023, + "grad_norm": 0.09752408753533615, + "learning_rate": 0.0005276815748682949, + "loss": 1.5716, + "step": 5488 + }, + { + "epoch": 0.49800399201596807, + "grad_norm": 0.09843944236662071, + "learning_rate": 0.0005275348720409419, + "loss": 1.5563, + "step": 5489 + }, + { + "epoch": 0.4980947196516059, + "grad_norm": 0.09662669101196894, + "learning_rate": 0.0005273881668359488, + "loss": 1.6141, + "step": 5490 + }, + { + "epoch": 0.4981854472872437, + "grad_norm": 0.10081230351909883, + "learning_rate": 0.0005272414592659832, + "loss": 1.6012, + "step": 5491 + }, + { + "epoch": 0.4982761749228815, + "grad_norm": 0.09850399968972814, + "learning_rate": 0.0005270947493437137, + "loss": 1.59, + "step": 5492 + }, + { + "epoch": 0.49836690255851934, + "grad_norm": 0.10204898748708296, + "learning_rate": 0.0005269480370818086, + "loss": 1.5977, + "step": 5493 + }, + { + "epoch": 0.4984576301941571, + "grad_norm": 0.09684289625578822, + "learning_rate": 0.0005268013224929367, + "loss": 1.5887, + "step": 5494 + }, + { + "epoch": 0.49854835782979495, + "grad_norm": 0.09677385581691575, + "learning_rate": 0.0005266546055897664, + "loss": 1.6116, + "step": 5495 + }, + { + "epoch": 0.4986390854654328, + "grad_norm": 0.10138271307604443, + "learning_rate": 0.000526507886384967, + "loss": 1.547, + "step": 5496 + }, + { + "epoch": 0.49872981310107056, + "grad_norm": 0.09620953300352698, + "learning_rate": 0.0005263611648912078, + "loss": 1.6288, + "step": 5497 + }, + { + "epoch": 0.4988205407367084, + "grad_norm": 0.09561534921091934, + "learning_rate": 0.000526214441121158, + "loss": 1.57, + "step": 5498 + }, + { + "epoch": 0.49891126837234623, + "grad_norm": 0.10047866326964403, + "learning_rate": 0.0005260677150874877, + "loss": 1.5843, + "step": 5499 + }, + { + "epoch": 0.499001996007984, + "grad_norm": 0.09688309175358356, + "learning_rate": 0.0005259209868028658, + "loss": 1.6038, + "step": 5500 + }, + { + "epoch": 0.49909272364362184, + "grad_norm": 0.09587902281022999, + "learning_rate": 0.0005257742562799633, + "loss": 1.5791, + "step": 5501 + }, + { + "epoch": 0.49918345127925967, + "grad_norm": 0.10177301359627802, + "learning_rate": 0.0005256275235314498, + "loss": 1.6097, + "step": 5502 + }, + { + "epoch": 0.4992741789148975, + "grad_norm": 0.09860504097554917, + "learning_rate": 0.0005254807885699958, + "loss": 1.6122, + "step": 5503 + }, + { + "epoch": 0.4993649065505353, + "grad_norm": 0.09901890602339448, + "learning_rate": 0.0005253340514082722, + "loss": 1.5931, + "step": 5504 + }, + { + "epoch": 0.4994556341861731, + "grad_norm": 0.09724359715797087, + "learning_rate": 0.0005251873120589493, + "loss": 1.6077, + "step": 5505 + }, + { + "epoch": 0.49954636182181095, + "grad_norm": 0.09630716480196781, + "learning_rate": 0.0005250405705346984, + "loss": 1.6032, + "step": 5506 + }, + { + "epoch": 0.4996370894574487, + "grad_norm": 0.10236472112606343, + "learning_rate": 0.0005248938268481904, + "loss": 1.5732, + "step": 5507 + }, + { + "epoch": 0.49972781709308656, + "grad_norm": 0.0940420911269685, + "learning_rate": 0.0005247470810120969, + "loss": 1.6147, + "step": 5508 + }, + { + "epoch": 0.4998185447287244, + "grad_norm": 0.09710665829981223, + "learning_rate": 0.0005246003330390894, + "loss": 1.5828, + "step": 5509 + }, + { + "epoch": 0.49990927236436217, + "grad_norm": 0.09862145562037325, + "learning_rate": 0.0005244535829418395, + "loss": 1.5911, + "step": 5510 + }, + { + "epoch": 0.5, + "grad_norm": 0.10069777437603349, + "learning_rate": 0.000524306830733019, + "loss": 1.6409, + "step": 5511 + }, + { + "epoch": 0.5000907276356378, + "grad_norm": 0.0952594464208466, + "learning_rate": 0.0005241600764253001, + "loss": 1.5942, + "step": 5512 + }, + { + "epoch": 0.5001814552712757, + "grad_norm": 0.09727416882992922, + "learning_rate": 0.000524013320031355, + "loss": 1.584, + "step": 5513 + }, + { + "epoch": 0.5002721829069134, + "grad_norm": 0.09906719911834228, + "learning_rate": 0.0005238665615638563, + "loss": 1.6099, + "step": 5514 + }, + { + "epoch": 0.5003629105425512, + "grad_norm": 0.11343720575884833, + "learning_rate": 0.0005237198010354763, + "loss": 1.5819, + "step": 5515 + }, + { + "epoch": 0.5004536381781891, + "grad_norm": 0.10357685011612067, + "learning_rate": 0.0005235730384588881, + "loss": 1.5891, + "step": 5516 + }, + { + "epoch": 0.5005443658138269, + "grad_norm": 0.09602329848480019, + "learning_rate": 0.0005234262738467646, + "loss": 1.5661, + "step": 5517 + }, + { + "epoch": 0.5006350934494647, + "grad_norm": 0.10033130429254214, + "learning_rate": 0.0005232795072117788, + "loss": 1.6112, + "step": 5518 + }, + { + "epoch": 0.5007258210851026, + "grad_norm": 0.0955219015803867, + "learning_rate": 0.0005231327385666043, + "loss": 1.5728, + "step": 5519 + }, + { + "epoch": 0.5008165487207403, + "grad_norm": 0.09691056680644994, + "learning_rate": 0.0005229859679239142, + "loss": 1.6489, + "step": 5520 + }, + { + "epoch": 0.5009072763563781, + "grad_norm": 0.09766110064116432, + "learning_rate": 0.0005228391952963826, + "loss": 1.6688, + "step": 5521 + }, + { + "epoch": 0.500998003992016, + "grad_norm": 0.10101899782783455, + "learning_rate": 0.000522692420696683, + "loss": 1.5864, + "step": 5522 + }, + { + "epoch": 0.5010887316276538, + "grad_norm": 0.1110521057175813, + "learning_rate": 0.0005225456441374895, + "loss": 1.6276, + "step": 5523 + }, + { + "epoch": 0.5011794592632915, + "grad_norm": 0.09909202398859104, + "learning_rate": 0.0005223988656314763, + "loss": 1.5943, + "step": 5524 + }, + { + "epoch": 0.5012701868989294, + "grad_norm": 0.1023754107974637, + "learning_rate": 0.0005222520851913177, + "loss": 1.554, + "step": 5525 + }, + { + "epoch": 0.5013609145345672, + "grad_norm": 0.09864642877139906, + "learning_rate": 0.0005221053028296884, + "loss": 1.6151, + "step": 5526 + }, + { + "epoch": 0.501451642170205, + "grad_norm": 0.09863259660184748, + "learning_rate": 0.0005219585185592629, + "loss": 1.6064, + "step": 5527 + }, + { + "epoch": 0.5015423698058429, + "grad_norm": 0.09675311939802021, + "learning_rate": 0.0005218117323927162, + "loss": 1.5724, + "step": 5528 + }, + { + "epoch": 0.5016330974414807, + "grad_norm": 0.10040160090060003, + "learning_rate": 0.0005216649443427231, + "loss": 1.6253, + "step": 5529 + }, + { + "epoch": 0.5017238250771184, + "grad_norm": 0.10044355084447024, + "learning_rate": 0.0005215181544219587, + "loss": 1.5906, + "step": 5530 + }, + { + "epoch": 0.5018145527127563, + "grad_norm": 0.09669596856678103, + "learning_rate": 0.0005213713626430986, + "loss": 1.583, + "step": 5531 + }, + { + "epoch": 0.5019052803483941, + "grad_norm": 0.09581838022141115, + "learning_rate": 0.0005212245690188182, + "loss": 1.5827, + "step": 5532 + }, + { + "epoch": 0.501996007984032, + "grad_norm": 0.09726879343275904, + "learning_rate": 0.0005210777735617933, + "loss": 1.5736, + "step": 5533 + }, + { + "epoch": 0.5020867356196698, + "grad_norm": 0.09801103707601337, + "learning_rate": 0.0005209309762846993, + "loss": 1.5902, + "step": 5534 + }, + { + "epoch": 0.5021774632553075, + "grad_norm": 0.10288887442549538, + "learning_rate": 0.0005207841772002126, + "loss": 1.6025, + "step": 5535 + }, + { + "epoch": 0.5022681908909454, + "grad_norm": 0.1005991982396262, + "learning_rate": 0.000520637376321009, + "loss": 1.6017, + "step": 5536 + }, + { + "epoch": 0.5023589185265832, + "grad_norm": 0.09981062743605365, + "learning_rate": 0.000520490573659765, + "loss": 1.5922, + "step": 5537 + }, + { + "epoch": 0.502449646162221, + "grad_norm": 0.09780463318804766, + "learning_rate": 0.000520343769229157, + "loss": 1.5479, + "step": 5538 + }, + { + "epoch": 0.5025403737978589, + "grad_norm": 0.09981159413168424, + "learning_rate": 0.0005201969630418612, + "loss": 1.602, + "step": 5539 + }, + { + "epoch": 0.5026311014334967, + "grad_norm": 0.09744490016872631, + "learning_rate": 0.000520050155110555, + "loss": 1.5847, + "step": 5540 + }, + { + "epoch": 0.5027218290691344, + "grad_norm": 0.1009267904831275, + "learning_rate": 0.0005199033454479148, + "loss": 1.5865, + "step": 5541 + }, + { + "epoch": 0.5028125567047723, + "grad_norm": 0.09833014143853526, + "learning_rate": 0.0005197565340666177, + "loss": 1.5986, + "step": 5542 + }, + { + "epoch": 0.5029032843404101, + "grad_norm": 0.10096668623367129, + "learning_rate": 0.0005196097209793412, + "loss": 1.6616, + "step": 5543 + }, + { + "epoch": 0.5029940119760479, + "grad_norm": 0.09946178594176358, + "learning_rate": 0.0005194629061987623, + "loss": 1.5765, + "step": 5544 + }, + { + "epoch": 0.5030847396116858, + "grad_norm": 0.09805137974729655, + "learning_rate": 0.0005193160897375588, + "loss": 1.6003, + "step": 5545 + }, + { + "epoch": 0.5031754672473235, + "grad_norm": 0.09661347493167877, + "learning_rate": 0.0005191692716084076, + "loss": 1.6105, + "step": 5546 + }, + { + "epoch": 0.5032661948829613, + "grad_norm": 0.09859726467538066, + "learning_rate": 0.0005190224518239874, + "loss": 1.6206, + "step": 5547 + }, + { + "epoch": 0.5033569225185992, + "grad_norm": 0.10327337455200195, + "learning_rate": 0.0005188756303969756, + "loss": 1.5744, + "step": 5548 + }, + { + "epoch": 0.503447650154237, + "grad_norm": 0.10092463237714631, + "learning_rate": 0.0005187288073400503, + "loss": 1.6281, + "step": 5549 + }, + { + "epoch": 0.5035383777898748, + "grad_norm": 0.1042419928972944, + "learning_rate": 0.0005185819826658896, + "loss": 1.5927, + "step": 5550 + }, + { + "epoch": 0.5036291054255126, + "grad_norm": 0.10044659857458932, + "learning_rate": 0.0005184351563871721, + "loss": 1.5852, + "step": 5551 + }, + { + "epoch": 0.5037198330611504, + "grad_norm": 0.10005658744922824, + "learning_rate": 0.000518288328516576, + "loss": 1.6248, + "step": 5552 + }, + { + "epoch": 0.5038105606967882, + "grad_norm": 0.10281930171483449, + "learning_rate": 0.0005181414990667804, + "loss": 1.5937, + "step": 5553 + }, + { + "epoch": 0.5039012883324261, + "grad_norm": 0.09884125542114262, + "learning_rate": 0.0005179946680504632, + "loss": 1.5398, + "step": 5554 + }, + { + "epoch": 0.5039920159680639, + "grad_norm": 0.09935384666711511, + "learning_rate": 0.000517847835480304, + "loss": 1.5798, + "step": 5555 + }, + { + "epoch": 0.5040827436037016, + "grad_norm": 0.10014470514878347, + "learning_rate": 0.0005177010013689816, + "loss": 1.5828, + "step": 5556 + }, + { + "epoch": 0.5041734712393395, + "grad_norm": 0.094333555923234, + "learning_rate": 0.000517554165729175, + "loss": 1.6243, + "step": 5557 + }, + { + "epoch": 0.5042641988749773, + "grad_norm": 0.09552504225863687, + "learning_rate": 0.0005174073285735636, + "loss": 1.57, + "step": 5558 + }, + { + "epoch": 0.5043549265106151, + "grad_norm": 0.09574029286264388, + "learning_rate": 0.000517260489914827, + "loss": 1.6461, + "step": 5559 + }, + { + "epoch": 0.504445654146253, + "grad_norm": 0.1017982161463892, + "learning_rate": 0.0005171136497656444, + "loss": 1.6005, + "step": 5560 + }, + { + "epoch": 0.5045363817818908, + "grad_norm": 0.09810833189839238, + "learning_rate": 0.0005169668081386958, + "loss": 1.5981, + "step": 5561 + }, + { + "epoch": 0.5046271094175285, + "grad_norm": 0.1008709879423238, + "learning_rate": 0.0005168199650466606, + "loss": 1.5987, + "step": 5562 + }, + { + "epoch": 0.5047178370531664, + "grad_norm": 0.10062932674946726, + "learning_rate": 0.000516673120502219, + "loss": 1.6159, + "step": 5563 + }, + { + "epoch": 0.5048085646888042, + "grad_norm": 0.10064056657974214, + "learning_rate": 0.000516526274518051, + "loss": 1.5529, + "step": 5564 + }, + { + "epoch": 0.504899292324442, + "grad_norm": 0.0974637122662692, + "learning_rate": 0.0005163794271068368, + "loss": 1.6269, + "step": 5565 + }, + { + "epoch": 0.5049900199600799, + "grad_norm": 0.0983323997749255, + "learning_rate": 0.0005162325782812565, + "loss": 1.604, + "step": 5566 + }, + { + "epoch": 0.5050807475957176, + "grad_norm": 0.10104324989330436, + "learning_rate": 0.0005160857280539909, + "loss": 1.5834, + "step": 5567 + }, + { + "epoch": 0.5051714752313554, + "grad_norm": 0.10094528950376265, + "learning_rate": 0.0005159388764377202, + "loss": 1.5804, + "step": 5568 + }, + { + "epoch": 0.5052622028669933, + "grad_norm": 0.10484673834029104, + "learning_rate": 0.0005157920234451251, + "loss": 1.5965, + "step": 5569 + }, + { + "epoch": 0.5053529305026311, + "grad_norm": 0.09498667066526656, + "learning_rate": 0.0005156451690888866, + "loss": 1.5804, + "step": 5570 + }, + { + "epoch": 0.505443658138269, + "grad_norm": 0.10227191022584668, + "learning_rate": 0.0005154983133816853, + "loss": 1.6355, + "step": 5571 + }, + { + "epoch": 0.5055343857739067, + "grad_norm": 0.09606716490480592, + "learning_rate": 0.0005153514563362025, + "loss": 1.5657, + "step": 5572 + }, + { + "epoch": 0.5056251134095445, + "grad_norm": 0.09920383471586006, + "learning_rate": 0.000515204597965119, + "loss": 1.5967, + "step": 5573 + }, + { + "epoch": 0.5057158410451824, + "grad_norm": 0.09846300270388296, + "learning_rate": 0.0005150577382811164, + "loss": 1.579, + "step": 5574 + }, + { + "epoch": 0.5058065686808202, + "grad_norm": 0.09543894575055117, + "learning_rate": 0.0005149108772968758, + "loss": 1.6014, + "step": 5575 + }, + { + "epoch": 0.505897296316458, + "grad_norm": 0.098178856431819, + "learning_rate": 0.0005147640150250787, + "loss": 1.6081, + "step": 5576 + }, + { + "epoch": 0.5059880239520959, + "grad_norm": 0.09837204670405843, + "learning_rate": 0.000514617151478407, + "loss": 1.6186, + "step": 5577 + }, + { + "epoch": 0.5060787515877336, + "grad_norm": 0.0970613557376345, + "learning_rate": 0.0005144702866695417, + "loss": 1.5636, + "step": 5578 + }, + { + "epoch": 0.5061694792233714, + "grad_norm": 0.09856469056347089, + "learning_rate": 0.0005143234206111654, + "loss": 1.629, + "step": 5579 + }, + { + "epoch": 0.5062602068590093, + "grad_norm": 0.10055664103294468, + "learning_rate": 0.0005141765533159592, + "loss": 1.6046, + "step": 5580 + }, + { + "epoch": 0.5063509344946471, + "grad_norm": 0.09742609568094204, + "learning_rate": 0.0005140296847966058, + "loss": 1.6056, + "step": 5581 + }, + { + "epoch": 0.5064416621302849, + "grad_norm": 0.09802500209854392, + "learning_rate": 0.000513882815065787, + "loss": 1.5737, + "step": 5582 + }, + { + "epoch": 0.5065323897659227, + "grad_norm": 0.10060799418944649, + "learning_rate": 0.0005137359441361851, + "loss": 1.5883, + "step": 5583 + }, + { + "epoch": 0.5066231174015605, + "grad_norm": 0.09689160503748812, + "learning_rate": 0.0005135890720204824, + "loss": 1.5944, + "step": 5584 + }, + { + "epoch": 0.5067138450371983, + "grad_norm": 0.1018623383615849, + "learning_rate": 0.0005134421987313613, + "loss": 1.6027, + "step": 5585 + }, + { + "epoch": 0.5068045726728362, + "grad_norm": 0.09356906720034003, + "learning_rate": 0.0005132953242815043, + "loss": 1.6013, + "step": 5586 + }, + { + "epoch": 0.506895300308474, + "grad_norm": 0.10052907602443296, + "learning_rate": 0.0005131484486835942, + "loss": 1.6126, + "step": 5587 + }, + { + "epoch": 0.5069860279441117, + "grad_norm": 0.09960676119951194, + "learning_rate": 0.0005130015719503136, + "loss": 1.6119, + "step": 5588 + }, + { + "epoch": 0.5070767555797496, + "grad_norm": 0.09662688608761832, + "learning_rate": 0.0005128546940943453, + "loss": 1.578, + "step": 5589 + }, + { + "epoch": 0.5071674832153874, + "grad_norm": 0.09660331317396924, + "learning_rate": 0.0005127078151283723, + "loss": 1.5993, + "step": 5590 + }, + { + "epoch": 0.5072582108510252, + "grad_norm": 0.09919266418909378, + "learning_rate": 0.0005125609350650776, + "loss": 1.5894, + "step": 5591 + }, + { + "epoch": 0.5073489384866631, + "grad_norm": 0.09515508328825617, + "learning_rate": 0.0005124140539171444, + "loss": 1.57, + "step": 5592 + }, + { + "epoch": 0.5074396661223008, + "grad_norm": 0.09590825893421677, + "learning_rate": 0.0005122671716972555, + "loss": 1.5989, + "step": 5593 + }, + { + "epoch": 0.5075303937579386, + "grad_norm": 0.10256696032282134, + "learning_rate": 0.0005121202884180949, + "loss": 1.5892, + "step": 5594 + }, + { + "epoch": 0.5076211213935765, + "grad_norm": 0.10320125826117586, + "learning_rate": 0.0005119734040923454, + "loss": 1.5901, + "step": 5595 + }, + { + "epoch": 0.5077118490292143, + "grad_norm": 0.10611859839152478, + "learning_rate": 0.0005118265187326908, + "loss": 1.6051, + "step": 5596 + }, + { + "epoch": 0.5078025766648521, + "grad_norm": 0.0990541967152532, + "learning_rate": 0.0005116796323518144, + "loss": 1.6199, + "step": 5597 + }, + { + "epoch": 0.50789330430049, + "grad_norm": 0.10207043043128945, + "learning_rate": 0.0005115327449624, + "loss": 1.5883, + "step": 5598 + }, + { + "epoch": 0.5079840319361277, + "grad_norm": 0.0985986606501718, + "learning_rate": 0.0005113858565771317, + "loss": 1.5772, + "step": 5599 + }, + { + "epoch": 0.5080747595717655, + "grad_norm": 0.1010235252291212, + "learning_rate": 0.0005112389672086927, + "loss": 1.5995, + "step": 5600 + }, + { + "epoch": 0.5081654872074034, + "grad_norm": 0.09728190784878214, + "learning_rate": 0.0005110920768697674, + "loss": 1.6142, + "step": 5601 + }, + { + "epoch": 0.5082562148430412, + "grad_norm": 0.0969123040811508, + "learning_rate": 0.0005109451855730394, + "loss": 1.6161, + "step": 5602 + }, + { + "epoch": 0.508346942478679, + "grad_norm": 0.0962408419269555, + "learning_rate": 0.0005107982933311931, + "loss": 1.6204, + "step": 5603 + }, + { + "epoch": 0.5084376701143168, + "grad_norm": 0.10147691539727369, + "learning_rate": 0.0005106514001569128, + "loss": 1.6183, + "step": 5604 + }, + { + "epoch": 0.5085283977499546, + "grad_norm": 0.10414991943705676, + "learning_rate": 0.0005105045060628822, + "loss": 1.5627, + "step": 5605 + }, + { + "epoch": 0.5086191253855924, + "grad_norm": 0.09705255317942246, + "learning_rate": 0.0005103576110617863, + "loss": 1.6423, + "step": 5606 + }, + { + "epoch": 0.5087098530212303, + "grad_norm": 0.09997436984942666, + "learning_rate": 0.0005102107151663089, + "loss": 1.5897, + "step": 5607 + }, + { + "epoch": 0.5088005806568681, + "grad_norm": 0.09954566591537772, + "learning_rate": 0.0005100638183891347, + "loss": 1.641, + "step": 5608 + }, + { + "epoch": 0.508891308292506, + "grad_norm": 0.09558672758453211, + "learning_rate": 0.0005099169207429485, + "loss": 1.5922, + "step": 5609 + }, + { + "epoch": 0.5089820359281437, + "grad_norm": 0.09582290902608882, + "learning_rate": 0.0005097700222404345, + "loss": 1.6079, + "step": 5610 + }, + { + "epoch": 0.5090727635637815, + "grad_norm": 0.099461785063441, + "learning_rate": 0.0005096231228942779, + "loss": 1.6317, + "step": 5611 + }, + { + "epoch": 0.5091634911994194, + "grad_norm": 0.09470803168744764, + "learning_rate": 0.0005094762227171632, + "loss": 1.63, + "step": 5612 + }, + { + "epoch": 0.5092542188350572, + "grad_norm": 0.09808394517485144, + "learning_rate": 0.0005093293217217752, + "loss": 1.5714, + "step": 5613 + }, + { + "epoch": 0.509344946470695, + "grad_norm": 0.09695948213542732, + "learning_rate": 0.0005091824199207987, + "loss": 1.5706, + "step": 5614 + }, + { + "epoch": 0.5094356741063328, + "grad_norm": 0.09908172941531354, + "learning_rate": 0.0005090355173269191, + "loss": 1.5817, + "step": 5615 + }, + { + "epoch": 0.5095264017419706, + "grad_norm": 0.09734523689465675, + "learning_rate": 0.0005088886139528213, + "loss": 1.5592, + "step": 5616 + }, + { + "epoch": 0.5096171293776084, + "grad_norm": 0.09619455835875648, + "learning_rate": 0.0005087417098111901, + "loss": 1.5702, + "step": 5617 + }, + { + "epoch": 0.5097078570132463, + "grad_norm": 0.10111287483664662, + "learning_rate": 0.0005085948049147113, + "loss": 1.5934, + "step": 5618 + }, + { + "epoch": 0.5097985846488841, + "grad_norm": 0.10440085438980665, + "learning_rate": 0.0005084478992760697, + "loss": 1.6122, + "step": 5619 + }, + { + "epoch": 0.5098893122845218, + "grad_norm": 0.10327051323712438, + "learning_rate": 0.0005083009929079505, + "loss": 1.584, + "step": 5620 + }, + { + "epoch": 0.5099800399201597, + "grad_norm": 0.09966460371038967, + "learning_rate": 0.0005081540858230397, + "loss": 1.5997, + "step": 5621 + }, + { + "epoch": 0.5100707675557975, + "grad_norm": 0.09785048147666016, + "learning_rate": 0.0005080071780340223, + "loss": 1.612, + "step": 5622 + }, + { + "epoch": 0.5101614951914353, + "grad_norm": 0.10171450839935427, + "learning_rate": 0.0005078602695535837, + "loss": 1.6255, + "step": 5623 + }, + { + "epoch": 0.5102522228270732, + "grad_norm": 0.09772092052046898, + "learning_rate": 0.0005077133603944099, + "loss": 1.579, + "step": 5624 + }, + { + "epoch": 0.5103429504627109, + "grad_norm": 0.09447597507082435, + "learning_rate": 0.0005075664505691861, + "loss": 1.5889, + "step": 5625 + }, + { + "epoch": 0.5104336780983487, + "grad_norm": 0.09722147334083844, + "learning_rate": 0.0005074195400905984, + "loss": 1.5954, + "step": 5626 + }, + { + "epoch": 0.5105244057339866, + "grad_norm": 0.09675129501803917, + "learning_rate": 0.0005072726289713323, + "loss": 1.5792, + "step": 5627 + }, + { + "epoch": 0.5106151333696244, + "grad_norm": 0.09910071961621604, + "learning_rate": 0.0005071257172240735, + "loss": 1.5917, + "step": 5628 + }, + { + "epoch": 0.5107058610052622, + "grad_norm": 0.09871766628744166, + "learning_rate": 0.0005069788048615079, + "loss": 1.6175, + "step": 5629 + }, + { + "epoch": 0.5107965886409, + "grad_norm": 0.09784620628553203, + "learning_rate": 0.0005068318918963216, + "loss": 1.5939, + "step": 5630 + }, + { + "epoch": 0.5108873162765378, + "grad_norm": 0.09991036109762777, + "learning_rate": 0.0005066849783412006, + "loss": 1.6241, + "step": 5631 + }, + { + "epoch": 0.5109780439121756, + "grad_norm": 0.09713020300544535, + "learning_rate": 0.0005065380642088303, + "loss": 1.5699, + "step": 5632 + }, + { + "epoch": 0.5110687715478135, + "grad_norm": 0.10029359312064047, + "learning_rate": 0.0005063911495118975, + "loss": 1.6209, + "step": 5633 + }, + { + "epoch": 0.5111594991834513, + "grad_norm": 0.1087875265551214, + "learning_rate": 0.0005062442342630879, + "loss": 1.6309, + "step": 5634 + }, + { + "epoch": 0.511250226819089, + "grad_norm": 0.10526156656420592, + "learning_rate": 0.0005060973184750877, + "loss": 1.5674, + "step": 5635 + }, + { + "epoch": 0.5113409544547269, + "grad_norm": 0.10078059826944319, + "learning_rate": 0.0005059504021605832, + "loss": 1.5851, + "step": 5636 + }, + { + "epoch": 0.5114316820903647, + "grad_norm": 0.1009642988905601, + "learning_rate": 0.0005058034853322606, + "loss": 1.5705, + "step": 5637 + }, + { + "epoch": 0.5115224097260025, + "grad_norm": 0.09946579509039409, + "learning_rate": 0.0005056565680028063, + "loss": 1.6003, + "step": 5638 + }, + { + "epoch": 0.5116131373616404, + "grad_norm": 0.09770554992290634, + "learning_rate": 0.0005055096501849065, + "loss": 1.566, + "step": 5639 + }, + { + "epoch": 0.5117038649972782, + "grad_norm": 0.10004651771473648, + "learning_rate": 0.0005053627318912475, + "loss": 1.6199, + "step": 5640 + }, + { + "epoch": 0.5117945926329159, + "grad_norm": 0.09937954229839334, + "learning_rate": 0.0005052158131345157, + "loss": 1.5673, + "step": 5641 + }, + { + "epoch": 0.5118853202685538, + "grad_norm": 0.09839050836709116, + "learning_rate": 0.0005050688939273978, + "loss": 1.6345, + "step": 5642 + }, + { + "epoch": 0.5119760479041916, + "grad_norm": 0.09678883822583849, + "learning_rate": 0.0005049219742825803, + "loss": 1.6081, + "step": 5643 + }, + { + "epoch": 0.5120667755398294, + "grad_norm": 0.09900825201402674, + "learning_rate": 0.0005047750542127493, + "loss": 1.5757, + "step": 5644 + }, + { + "epoch": 0.5121575031754673, + "grad_norm": 0.09759308327026003, + "learning_rate": 0.0005046281337305919, + "loss": 1.601, + "step": 5645 + }, + { + "epoch": 0.512248230811105, + "grad_norm": 0.10180435229945344, + "learning_rate": 0.0005044812128487943, + "loss": 1.5895, + "step": 5646 + }, + { + "epoch": 0.5123389584467429, + "grad_norm": 0.096471890502979, + "learning_rate": 0.0005043342915800434, + "loss": 1.6195, + "step": 5647 + }, + { + "epoch": 0.5124296860823807, + "grad_norm": 0.10148162436071946, + "learning_rate": 0.0005041873699370258, + "loss": 1.6164, + "step": 5648 + }, + { + "epoch": 0.5125204137180185, + "grad_norm": 0.0994615924562717, + "learning_rate": 0.0005040404479324282, + "loss": 1.6094, + "step": 5649 + }, + { + "epoch": 0.5126111413536564, + "grad_norm": 0.0954500391881366, + "learning_rate": 0.0005038935255789373, + "loss": 1.6208, + "step": 5650 + }, + { + "epoch": 0.5127018689892942, + "grad_norm": 0.09668504349450048, + "learning_rate": 0.00050374660288924, + "loss": 1.5855, + "step": 5651 + }, + { + "epoch": 0.5127925966249319, + "grad_norm": 0.09981263869527467, + "learning_rate": 0.0005035996798760227, + "loss": 1.5774, + "step": 5652 + }, + { + "epoch": 0.5128833242605698, + "grad_norm": 0.09562358268898334, + "learning_rate": 0.0005034527565519728, + "loss": 1.6572, + "step": 5653 + }, + { + "epoch": 0.5129740518962076, + "grad_norm": 0.09629040792903988, + "learning_rate": 0.0005033058329297767, + "loss": 1.6392, + "step": 5654 + }, + { + "epoch": 0.5130647795318454, + "grad_norm": 0.10134411678427115, + "learning_rate": 0.0005031589090221215, + "loss": 1.5721, + "step": 5655 + }, + { + "epoch": 0.5131555071674833, + "grad_norm": 0.10189082770770426, + "learning_rate": 0.000503011984841694, + "loss": 1.6137, + "step": 5656 + }, + { + "epoch": 0.513246234803121, + "grad_norm": 0.09883280670015447, + "learning_rate": 0.0005028650604011813, + "loss": 1.5951, + "step": 5657 + }, + { + "epoch": 0.5133369624387588, + "grad_norm": 0.10878320263056088, + "learning_rate": 0.00050271813571327, + "loss": 1.6224, + "step": 5658 + }, + { + "epoch": 0.5134276900743967, + "grad_norm": 0.10156939251214833, + "learning_rate": 0.0005025712107906472, + "loss": 1.6335, + "step": 5659 + }, + { + "epoch": 0.5135184177100345, + "grad_norm": 0.09907587304248602, + "learning_rate": 0.0005024242856460002, + "loss": 1.5795, + "step": 5660 + }, + { + "epoch": 0.5136091453456723, + "grad_norm": 0.10449704232201149, + "learning_rate": 0.0005022773602920159, + "loss": 1.6052, + "step": 5661 + }, + { + "epoch": 0.5136998729813101, + "grad_norm": 0.10025745062490035, + "learning_rate": 0.000502130434741381, + "loss": 1.5805, + "step": 5662 + }, + { + "epoch": 0.5137906006169479, + "grad_norm": 0.09813675533853522, + "learning_rate": 0.0005019835090067827, + "loss": 1.5964, + "step": 5663 + }, + { + "epoch": 0.5138813282525857, + "grad_norm": 0.10123822428546725, + "learning_rate": 0.0005018365831009081, + "loss": 1.5966, + "step": 5664 + }, + { + "epoch": 0.5139720558882236, + "grad_norm": 0.0963783750888031, + "learning_rate": 0.0005016896570364445, + "loss": 1.5905, + "step": 5665 + }, + { + "epoch": 0.5140627835238614, + "grad_norm": 0.10088308498266105, + "learning_rate": 0.0005015427308260787, + "loss": 1.5604, + "step": 5666 + }, + { + "epoch": 0.5141535111594991, + "grad_norm": 0.09863801542968934, + "learning_rate": 0.0005013958044824977, + "loss": 1.6228, + "step": 5667 + }, + { + "epoch": 0.514244238795137, + "grad_norm": 0.10093243493466116, + "learning_rate": 0.0005012488780183889, + "loss": 1.634, + "step": 5668 + }, + { + "epoch": 0.5143349664307748, + "grad_norm": 0.09502288129888797, + "learning_rate": 0.0005011019514464393, + "loss": 1.5657, + "step": 5669 + }, + { + "epoch": 0.5144256940664126, + "grad_norm": 0.09620113035160276, + "learning_rate": 0.0005009550247793361, + "loss": 1.6024, + "step": 5670 + }, + { + "epoch": 0.5145164217020505, + "grad_norm": 0.09929524786752984, + "learning_rate": 0.0005008080980297661, + "loss": 1.6019, + "step": 5671 + }, + { + "epoch": 0.5146071493376883, + "grad_norm": 0.10034162506896493, + "learning_rate": 0.000500661171210417, + "loss": 1.6221, + "step": 5672 + }, + { + "epoch": 0.514697876973326, + "grad_norm": 0.09914694592100534, + "learning_rate": 0.0005005142443339755, + "loss": 1.5851, + "step": 5673 + }, + { + "epoch": 0.5147886046089639, + "grad_norm": 0.09658261117006675, + "learning_rate": 0.000500367317413129, + "loss": 1.5877, + "step": 5674 + }, + { + "epoch": 0.5148793322446017, + "grad_norm": 0.09894638915320514, + "learning_rate": 0.0005002203904605646, + "loss": 1.6412, + "step": 5675 + }, + { + "epoch": 0.5149700598802395, + "grad_norm": 0.09593980245631219, + "learning_rate": 0.0005000734634889694, + "loss": 1.6034, + "step": 5676 + }, + { + "epoch": 0.5150607875158774, + "grad_norm": 0.09914657580074196, + "learning_rate": 0.0004999265365110306, + "loss": 1.6206, + "step": 5677 + }, + { + "epoch": 0.5151515151515151, + "grad_norm": 0.10250548644105827, + "learning_rate": 0.0004997796095394354, + "loss": 1.6083, + "step": 5678 + }, + { + "epoch": 0.5152422427871529, + "grad_norm": 0.10521834018702178, + "learning_rate": 0.0004996326825868711, + "loss": 1.5549, + "step": 5679 + }, + { + "epoch": 0.5153329704227908, + "grad_norm": 0.1048816293703604, + "learning_rate": 0.0004994857556660246, + "loss": 1.5625, + "step": 5680 + }, + { + "epoch": 0.5154236980584286, + "grad_norm": 0.0951839847164587, + "learning_rate": 0.0004993388287895831, + "loss": 1.5993, + "step": 5681 + }, + { + "epoch": 0.5155144256940664, + "grad_norm": 0.09657895223887833, + "learning_rate": 0.0004991919019702339, + "loss": 1.5941, + "step": 5682 + }, + { + "epoch": 0.5156051533297042, + "grad_norm": 0.10188803694177044, + "learning_rate": 0.0004990449752206641, + "loss": 1.5535, + "step": 5683 + }, + { + "epoch": 0.515695880965342, + "grad_norm": 0.09686643272740816, + "learning_rate": 0.0004988980485535608, + "loss": 1.5919, + "step": 5684 + }, + { + "epoch": 0.5157866086009799, + "grad_norm": 0.09677343005776076, + "learning_rate": 0.0004987511219816111, + "loss": 1.583, + "step": 5685 + }, + { + "epoch": 0.5158773362366177, + "grad_norm": 0.10230740281156511, + "learning_rate": 0.0004986041955175023, + "loss": 1.5968, + "step": 5686 + }, + { + "epoch": 0.5159680638722555, + "grad_norm": 0.09686304283312384, + "learning_rate": 0.0004984572691739213, + "loss": 1.6197, + "step": 5687 + }, + { + "epoch": 0.5160587915078934, + "grad_norm": 0.0977740969599323, + "learning_rate": 0.0004983103429635555, + "loss": 1.606, + "step": 5688 + }, + { + "epoch": 0.5161495191435311, + "grad_norm": 0.09752388436454262, + "learning_rate": 0.0004981634168990919, + "loss": 1.6386, + "step": 5689 + }, + { + "epoch": 0.5162402467791689, + "grad_norm": 0.1004848240432961, + "learning_rate": 0.0004980164909932173, + "loss": 1.6592, + "step": 5690 + }, + { + "epoch": 0.5163309744148068, + "grad_norm": 0.09755287102300425, + "learning_rate": 0.0004978695652586191, + "loss": 1.5774, + "step": 5691 + }, + { + "epoch": 0.5164217020504446, + "grad_norm": 0.10299133547614601, + "learning_rate": 0.0004977226397079843, + "loss": 1.6261, + "step": 5692 + }, + { + "epoch": 0.5165124296860824, + "grad_norm": 0.09627219786764424, + "learning_rate": 0.0004975757143539997, + "loss": 1.5598, + "step": 5693 + }, + { + "epoch": 0.5166031573217202, + "grad_norm": 0.09483729618240153, + "learning_rate": 0.0004974287892093527, + "loss": 1.6199, + "step": 5694 + }, + { + "epoch": 0.516693884957358, + "grad_norm": 0.09836105046771756, + "learning_rate": 0.0004972818642867301, + "loss": 1.6249, + "step": 5695 + }, + { + "epoch": 0.5167846125929958, + "grad_norm": 0.09638649155869917, + "learning_rate": 0.0004971349395988189, + "loss": 1.5711, + "step": 5696 + }, + { + "epoch": 0.5168753402286337, + "grad_norm": 0.10052054026450542, + "learning_rate": 0.0004969880151583061, + "loss": 1.6162, + "step": 5697 + }, + { + "epoch": 0.5169660678642715, + "grad_norm": 0.09825801925953873, + "learning_rate": 0.0004968410909778786, + "loss": 1.5787, + "step": 5698 + }, + { + "epoch": 0.5170567954999092, + "grad_norm": 0.09518504603046739, + "learning_rate": 0.0004966941670702235, + "loss": 1.582, + "step": 5699 + }, + { + "epoch": 0.5171475231355471, + "grad_norm": 0.0980278699145611, + "learning_rate": 0.0004965472434480272, + "loss": 1.6126, + "step": 5700 + }, + { + "epoch": 0.5172382507711849, + "grad_norm": 0.09697460835933908, + "learning_rate": 0.0004964003201239773, + "loss": 1.6012, + "step": 5701 + }, + { + "epoch": 0.5173289784068227, + "grad_norm": 0.10312477824041666, + "learning_rate": 0.0004962533971107602, + "loss": 1.5886, + "step": 5702 + }, + { + "epoch": 0.5174197060424606, + "grad_norm": 0.10266356181273506, + "learning_rate": 0.0004961064744210627, + "loss": 1.5849, + "step": 5703 + }, + { + "epoch": 0.5175104336780983, + "grad_norm": 0.09538306666482971, + "learning_rate": 0.0004959595520675719, + "loss": 1.5622, + "step": 5704 + }, + { + "epoch": 0.5176011613137361, + "grad_norm": 0.09848078562068897, + "learning_rate": 0.0004958126300629743, + "loss": 1.6184, + "step": 5705 + }, + { + "epoch": 0.517691888949374, + "grad_norm": 0.09851308863801196, + "learning_rate": 0.0004956657084199567, + "loss": 1.5826, + "step": 5706 + }, + { + "epoch": 0.5177826165850118, + "grad_norm": 0.09306924655687777, + "learning_rate": 0.0004955187871512058, + "loss": 1.5768, + "step": 5707 + }, + { + "epoch": 0.5178733442206496, + "grad_norm": 0.0971237035746852, + "learning_rate": 0.0004953718662694082, + "loss": 1.5789, + "step": 5708 + }, + { + "epoch": 0.5179640718562875, + "grad_norm": 0.09912881243336237, + "learning_rate": 0.0004952249457872507, + "loss": 1.6284, + "step": 5709 + }, + { + "epoch": 0.5180547994919252, + "grad_norm": 0.09751045232938513, + "learning_rate": 0.0004950780257174199, + "loss": 1.6247, + "step": 5710 + }, + { + "epoch": 0.518145527127563, + "grad_norm": 0.1003773758097117, + "learning_rate": 0.0004949311060726023, + "loss": 1.6012, + "step": 5711 + }, + { + "epoch": 0.5182362547632009, + "grad_norm": 0.09847177778738919, + "learning_rate": 0.0004947841868654843, + "loss": 1.5907, + "step": 5712 + }, + { + "epoch": 0.5183269823988387, + "grad_norm": 0.09864077811585859, + "learning_rate": 0.0004946372681087527, + "loss": 1.5994, + "step": 5713 + }, + { + "epoch": 0.5184177100344765, + "grad_norm": 0.09825675609759428, + "learning_rate": 0.0004944903498150938, + "loss": 1.5586, + "step": 5714 + }, + { + "epoch": 0.5185084376701143, + "grad_norm": 0.10064653318684863, + "learning_rate": 0.0004943434319971938, + "loss": 1.5913, + "step": 5715 + }, + { + "epoch": 0.5185991653057521, + "grad_norm": 0.09955714614047201, + "learning_rate": 0.0004941965146677393, + "loss": 1.584, + "step": 5716 + }, + { + "epoch": 0.5186898929413899, + "grad_norm": 0.10105463505260899, + "learning_rate": 0.0004940495978394167, + "loss": 1.5894, + "step": 5717 + }, + { + "epoch": 0.5187806205770278, + "grad_norm": 0.1000388097304499, + "learning_rate": 0.0004939026815249124, + "loss": 1.5837, + "step": 5718 + }, + { + "epoch": 0.5188713482126656, + "grad_norm": 0.10071725578956149, + "learning_rate": 0.0004937557657369122, + "loss": 1.5666, + "step": 5719 + }, + { + "epoch": 0.5189620758483033, + "grad_norm": 0.09861401541121076, + "learning_rate": 0.0004936088504881026, + "loss": 1.5714, + "step": 5720 + }, + { + "epoch": 0.5190528034839412, + "grad_norm": 0.10309909853288227, + "learning_rate": 0.0004934619357911699, + "loss": 1.6266, + "step": 5721 + }, + { + "epoch": 0.519143531119579, + "grad_norm": 0.10187326938457963, + "learning_rate": 0.0004933150216587997, + "loss": 1.5776, + "step": 5722 + }, + { + "epoch": 0.5192342587552169, + "grad_norm": 0.09855686154005792, + "learning_rate": 0.0004931681081036786, + "loss": 1.6059, + "step": 5723 + }, + { + "epoch": 0.5193249863908547, + "grad_norm": 0.10012024878261501, + "learning_rate": 0.000493021195138492, + "loss": 1.6062, + "step": 5724 + }, + { + "epoch": 0.5194157140264924, + "grad_norm": 0.100666107087747, + "learning_rate": 0.0004928742827759266, + "loss": 1.5913, + "step": 5725 + }, + { + "epoch": 0.5195064416621303, + "grad_norm": 0.09818305190290921, + "learning_rate": 0.0004927273710286678, + "loss": 1.5573, + "step": 5726 + }, + { + "epoch": 0.5195971692977681, + "grad_norm": 0.09617075225307492, + "learning_rate": 0.0004925804599094016, + "loss": 1.6264, + "step": 5727 + }, + { + "epoch": 0.5196878969334059, + "grad_norm": 0.09760330586295153, + "learning_rate": 0.0004924335494308139, + "loss": 1.5938, + "step": 5728 + }, + { + "epoch": 0.5197786245690438, + "grad_norm": 0.09701663300764542, + "learning_rate": 0.0004922866396055902, + "loss": 1.6043, + "step": 5729 + }, + { + "epoch": 0.5198693522046816, + "grad_norm": 0.09989878672523159, + "learning_rate": 0.0004921397304464163, + "loss": 1.5733, + "step": 5730 + }, + { + "epoch": 0.5199600798403193, + "grad_norm": 0.09607843062151021, + "learning_rate": 0.0004919928219659779, + "loss": 1.5667, + "step": 5731 + }, + { + "epoch": 0.5200508074759572, + "grad_norm": 0.09734602144703933, + "learning_rate": 0.0004918459141769602, + "loss": 1.5973, + "step": 5732 + }, + { + "epoch": 0.520141535111595, + "grad_norm": 0.0998714463944715, + "learning_rate": 0.0004916990070920495, + "loss": 1.5958, + "step": 5733 + }, + { + "epoch": 0.5202322627472328, + "grad_norm": 0.10217384774085295, + "learning_rate": 0.0004915521007239304, + "loss": 1.6122, + "step": 5734 + }, + { + "epoch": 0.5203229903828707, + "grad_norm": 0.0966174231666751, + "learning_rate": 0.0004914051950852888, + "loss": 1.6209, + "step": 5735 + }, + { + "epoch": 0.5204137180185084, + "grad_norm": 0.10090260663912179, + "learning_rate": 0.0004912582901888099, + "loss": 1.595, + "step": 5736 + }, + { + "epoch": 0.5205044456541462, + "grad_norm": 0.09779797423375709, + "learning_rate": 0.0004911113860471788, + "loss": 1.5681, + "step": 5737 + }, + { + "epoch": 0.5205951732897841, + "grad_norm": 0.09875404056426186, + "learning_rate": 0.0004909644826730811, + "loss": 1.578, + "step": 5738 + }, + { + "epoch": 0.5206859009254219, + "grad_norm": 0.0993302547170307, + "learning_rate": 0.0004908175800792012, + "loss": 1.576, + "step": 5739 + }, + { + "epoch": 0.5207766285610597, + "grad_norm": 0.09939101307973922, + "learning_rate": 0.0004906706782782249, + "loss": 1.5614, + "step": 5740 + }, + { + "epoch": 0.5208673561966976, + "grad_norm": 0.10072059702776183, + "learning_rate": 0.0004905237772828369, + "loss": 1.5796, + "step": 5741 + }, + { + "epoch": 0.5209580838323353, + "grad_norm": 0.10236299480514525, + "learning_rate": 0.0004903768771057221, + "loss": 1.6276, + "step": 5742 + }, + { + "epoch": 0.5210488114679731, + "grad_norm": 0.09799676891044144, + "learning_rate": 0.0004902299777595655, + "loss": 1.5978, + "step": 5743 + }, + { + "epoch": 0.521139539103611, + "grad_norm": 0.0993752635013165, + "learning_rate": 0.0004900830792570515, + "loss": 1.6097, + "step": 5744 + }, + { + "epoch": 0.5212302667392488, + "grad_norm": 0.10181556698524453, + "learning_rate": 0.0004899361816108653, + "loss": 1.5957, + "step": 5745 + }, + { + "epoch": 0.5213209943748865, + "grad_norm": 0.09731982797291638, + "learning_rate": 0.0004897892848336914, + "loss": 1.5845, + "step": 5746 + }, + { + "epoch": 0.5214117220105244, + "grad_norm": 0.09744862586466631, + "learning_rate": 0.0004896423889382139, + "loss": 1.6204, + "step": 5747 + }, + { + "epoch": 0.5215024496461622, + "grad_norm": 0.09672179455041405, + "learning_rate": 0.0004894954939371178, + "loss": 1.6239, + "step": 5748 + }, + { + "epoch": 0.5215931772818, + "grad_norm": 0.10211064393864613, + "learning_rate": 0.0004893485998430873, + "loss": 1.6101, + "step": 5749 + }, + { + "epoch": 0.5216839049174379, + "grad_norm": 0.09773143425718675, + "learning_rate": 0.0004892017066688069, + "loss": 1.6075, + "step": 5750 + }, + { + "epoch": 0.5217746325530757, + "grad_norm": 0.09546117012299778, + "learning_rate": 0.0004890548144269606, + "loss": 1.5525, + "step": 5751 + }, + { + "epoch": 0.5218653601887134, + "grad_norm": 0.09863218734411801, + "learning_rate": 0.0004889079231302328, + "loss": 1.5764, + "step": 5752 + }, + { + "epoch": 0.5219560878243513, + "grad_norm": 0.09739601342240592, + "learning_rate": 0.0004887610327913075, + "loss": 1.5371, + "step": 5753 + }, + { + "epoch": 0.5220468154599891, + "grad_norm": 0.09867057043927027, + "learning_rate": 0.0004886141434228686, + "loss": 1.6312, + "step": 5754 + }, + { + "epoch": 0.5221375430956269, + "grad_norm": 0.09602877950533006, + "learning_rate": 0.0004884672550375999, + "loss": 1.6067, + "step": 5755 + }, + { + "epoch": 0.5222282707312648, + "grad_norm": 0.09949697316616453, + "learning_rate": 0.0004883203676481856, + "loss": 1.5792, + "step": 5756 + }, + { + "epoch": 0.5223189983669025, + "grad_norm": 0.09625775345210073, + "learning_rate": 0.00048817348126730943, + "loss": 1.6162, + "step": 5757 + }, + { + "epoch": 0.5224097260025403, + "grad_norm": 0.09527564280953527, + "learning_rate": 0.0004880265959076547, + "loss": 1.5793, + "step": 5758 + }, + { + "epoch": 0.5225004536381782, + "grad_norm": 0.10287211833876878, + "learning_rate": 0.00048787971158190525, + "loss": 1.6389, + "step": 5759 + }, + { + "epoch": 0.522591181273816, + "grad_norm": 0.09872695400133583, + "learning_rate": 0.00048773282830274453, + "loss": 1.6127, + "step": 5760 + }, + { + "epoch": 0.5226819089094539, + "grad_norm": 0.09564134696434329, + "learning_rate": 0.00048758594608285584, + "loss": 1.5651, + "step": 5761 + }, + { + "epoch": 0.5227726365450917, + "grad_norm": 0.09447073513042031, + "learning_rate": 0.0004874390649349225, + "loss": 1.5938, + "step": 5762 + }, + { + "epoch": 0.5228633641807294, + "grad_norm": 0.09876729581764547, + "learning_rate": 0.0004872921848716276, + "loss": 1.5975, + "step": 5763 + }, + { + "epoch": 0.5229540918163673, + "grad_norm": 0.0968538848439749, + "learning_rate": 0.0004871453059056548, + "loss": 1.5932, + "step": 5764 + }, + { + "epoch": 0.5230448194520051, + "grad_norm": 0.09919815278891418, + "learning_rate": 0.00048699842804968645, + "loss": 1.5461, + "step": 5765 + }, + { + "epoch": 0.5231355470876429, + "grad_norm": 0.09711861709581487, + "learning_rate": 0.0004868515513164058, + "loss": 1.6129, + "step": 5766 + }, + { + "epoch": 0.5232262747232808, + "grad_norm": 0.10230139516436346, + "learning_rate": 0.00048670467571849577, + "loss": 1.6158, + "step": 5767 + }, + { + "epoch": 0.5233170023589185, + "grad_norm": 0.09637027154311492, + "learning_rate": 0.0004865578012686388, + "loss": 1.5613, + "step": 5768 + }, + { + "epoch": 0.5234077299945563, + "grad_norm": 0.1001494736513091, + "learning_rate": 0.0004864109279795177, + "loss": 1.6457, + "step": 5769 + }, + { + "epoch": 0.5234984576301942, + "grad_norm": 0.10078831913353778, + "learning_rate": 0.0004862640558638151, + "loss": 1.6276, + "step": 5770 + }, + { + "epoch": 0.523589185265832, + "grad_norm": 0.09823654038009834, + "learning_rate": 0.0004861171849342129, + "loss": 1.612, + "step": 5771 + }, + { + "epoch": 0.5236799129014698, + "grad_norm": 0.09459243963095819, + "learning_rate": 0.00048597031520339427, + "loss": 1.5813, + "step": 5772 + }, + { + "epoch": 0.5237706405371076, + "grad_norm": 0.09841637777456008, + "learning_rate": 0.00048582344668404075, + "loss": 1.587, + "step": 5773 + }, + { + "epoch": 0.5238613681727454, + "grad_norm": 0.09546548680231343, + "learning_rate": 0.0004856765793888348, + "loss": 1.6234, + "step": 5774 + }, + { + "epoch": 0.5239520958083832, + "grad_norm": 0.10024411753786538, + "learning_rate": 0.0004855297133304584, + "loss": 1.5711, + "step": 5775 + }, + { + "epoch": 0.5240428234440211, + "grad_norm": 0.09969827858007083, + "learning_rate": 0.00048538284852159323, + "loss": 1.5776, + "step": 5776 + }, + { + "epoch": 0.5241335510796589, + "grad_norm": 0.10217272103423242, + "learning_rate": 0.00048523598497492143, + "loss": 1.5893, + "step": 5777 + }, + { + "epoch": 0.5242242787152966, + "grad_norm": 0.10148512073548388, + "learning_rate": 0.00048508912270312415, + "loss": 1.5958, + "step": 5778 + }, + { + "epoch": 0.5243150063509345, + "grad_norm": 0.10095586361163543, + "learning_rate": 0.0004849422617188837, + "loss": 1.6357, + "step": 5779 + }, + { + "epoch": 0.5244057339865723, + "grad_norm": 0.10079817111986769, + "learning_rate": 0.00048479540203488097, + "loss": 1.5428, + "step": 5780 + }, + { + "epoch": 0.5244964616222101, + "grad_norm": 0.10003387309622017, + "learning_rate": 0.0004846485436637976, + "loss": 1.6051, + "step": 5781 + }, + { + "epoch": 0.524587189257848, + "grad_norm": 0.10256936918809055, + "learning_rate": 0.0004845016866183148, + "loss": 1.57, + "step": 5782 + }, + { + "epoch": 0.5246779168934858, + "grad_norm": 0.09885330721741496, + "learning_rate": 0.00048435483091111356, + "loss": 1.6008, + "step": 5783 + }, + { + "epoch": 0.5247686445291235, + "grad_norm": 0.10066408668684096, + "learning_rate": 0.0004842079765548749, + "loss": 1.6308, + "step": 5784 + }, + { + "epoch": 0.5248593721647614, + "grad_norm": 0.10034590540937893, + "learning_rate": 0.00048406112356227997, + "loss": 1.593, + "step": 5785 + }, + { + "epoch": 0.5249500998003992, + "grad_norm": 0.0990585413491822, + "learning_rate": 0.00048391427194600917, + "loss": 1.6099, + "step": 5786 + }, + { + "epoch": 0.525040827436037, + "grad_norm": 0.09643270738432606, + "learning_rate": 0.00048376742171874344, + "loss": 1.6051, + "step": 5787 + }, + { + "epoch": 0.5251315550716749, + "grad_norm": 0.10783201276134344, + "learning_rate": 0.00048362057289316323, + "loss": 1.6239, + "step": 5788 + }, + { + "epoch": 0.5252222827073126, + "grad_norm": 0.09906554711068902, + "learning_rate": 0.00048347372548194914, + "loss": 1.6037, + "step": 5789 + }, + { + "epoch": 0.5253130103429504, + "grad_norm": 0.09551716513380093, + "learning_rate": 0.0004833268794977811, + "loss": 1.6053, + "step": 5790 + }, + { + "epoch": 0.5254037379785883, + "grad_norm": 0.09201422559862117, + "learning_rate": 0.00048318003495333953, + "loss": 1.5709, + "step": 5791 + }, + { + "epoch": 0.5254944656142261, + "grad_norm": 0.0986592572439027, + "learning_rate": 0.00048303319186130447, + "loss": 1.5617, + "step": 5792 + }, + { + "epoch": 0.5255851932498639, + "grad_norm": 0.09659815970645519, + "learning_rate": 0.0004828863502343557, + "loss": 1.6188, + "step": 5793 + }, + { + "epoch": 0.5256759208855017, + "grad_norm": 0.09551222010457414, + "learning_rate": 0.00048273951008517303, + "loss": 1.5781, + "step": 5794 + }, + { + "epoch": 0.5257666485211395, + "grad_norm": 0.09913368829221986, + "learning_rate": 0.00048259267142643626, + "loss": 1.599, + "step": 5795 + }, + { + "epoch": 0.5258573761567773, + "grad_norm": 0.10016948764149772, + "learning_rate": 0.0004824458342708251, + "loss": 1.6158, + "step": 5796 + }, + { + "epoch": 0.5259481037924152, + "grad_norm": 0.104690493825064, + "learning_rate": 0.0004822989986310185, + "loss": 1.6447, + "step": 5797 + }, + { + "epoch": 0.526038831428053, + "grad_norm": 0.0988726133080388, + "learning_rate": 0.000482152164519696, + "loss": 1.6041, + "step": 5798 + }, + { + "epoch": 0.5261295590636909, + "grad_norm": 0.10293784824120382, + "learning_rate": 0.0004820053319495369, + "loss": 1.6142, + "step": 5799 + }, + { + "epoch": 0.5262202866993286, + "grad_norm": 0.10397470843918026, + "learning_rate": 0.0004818585009332199, + "loss": 1.5815, + "step": 5800 + }, + { + "epoch": 0.5263110143349664, + "grad_norm": 0.09652040687161455, + "learning_rate": 0.00048171167148342405, + "loss": 1.6032, + "step": 5801 + }, + { + "epoch": 0.5264017419706043, + "grad_norm": 0.10078836340112841, + "learning_rate": 0.0004815648436128279, + "loss": 1.6117, + "step": 5802 + }, + { + "epoch": 0.5264924696062421, + "grad_norm": 0.09740443020936557, + "learning_rate": 0.00048141801733411044, + "loss": 1.5986, + "step": 5803 + }, + { + "epoch": 0.5265831972418799, + "grad_norm": 0.10193561703694362, + "learning_rate": 0.00048127119265994986, + "loss": 1.553, + "step": 5804 + }, + { + "epoch": 0.5266739248775177, + "grad_norm": 0.09992421094489744, + "learning_rate": 0.00048112436960302455, + "loss": 1.5965, + "step": 5805 + }, + { + "epoch": 0.5267646525131555, + "grad_norm": 0.09640778995067867, + "learning_rate": 0.0004809775481760127, + "loss": 1.5713, + "step": 5806 + }, + { + "epoch": 0.5268553801487933, + "grad_norm": 0.09778010960022446, + "learning_rate": 0.00048083072839159234, + "loss": 1.5782, + "step": 5807 + }, + { + "epoch": 0.5269461077844312, + "grad_norm": 0.10131505278151044, + "learning_rate": 0.00048068391026244146, + "loss": 1.5669, + "step": 5808 + }, + { + "epoch": 0.527036835420069, + "grad_norm": 0.10292572742896644, + "learning_rate": 0.00048053709380123784, + "loss": 1.6016, + "step": 5809 + }, + { + "epoch": 0.5271275630557067, + "grad_norm": 0.10003494864029741, + "learning_rate": 0.00048039027902065876, + "loss": 1.6102, + "step": 5810 + }, + { + "epoch": 0.5272182906913446, + "grad_norm": 0.11468516928998014, + "learning_rate": 0.0004802434659333822, + "loss": 1.6122, + "step": 5811 + }, + { + "epoch": 0.5273090183269824, + "grad_norm": 0.09908263446610667, + "learning_rate": 0.00048009665455208523, + "loss": 1.5924, + "step": 5812 + }, + { + "epoch": 0.5273997459626202, + "grad_norm": 0.09961027869516145, + "learning_rate": 0.00047994984488944505, + "loss": 1.624, + "step": 5813 + }, + { + "epoch": 0.5274904735982581, + "grad_norm": 0.09611125734532859, + "learning_rate": 0.00047980303695813886, + "loss": 1.5908, + "step": 5814 + }, + { + "epoch": 0.5275812012338958, + "grad_norm": 0.10051185661482151, + "learning_rate": 0.00047965623077084326, + "loss": 1.6069, + "step": 5815 + }, + { + "epoch": 0.5276719288695336, + "grad_norm": 0.09702570523555797, + "learning_rate": 0.00047950942634023523, + "loss": 1.5845, + "step": 5816 + }, + { + "epoch": 0.5277626565051715, + "grad_norm": 0.10140181325316931, + "learning_rate": 0.000479362623678991, + "loss": 1.6314, + "step": 5817 + }, + { + "epoch": 0.5278533841408093, + "grad_norm": 0.09765013929025898, + "learning_rate": 0.0004792158227997875, + "loss": 1.5911, + "step": 5818 + }, + { + "epoch": 0.5279441117764471, + "grad_norm": 0.09872186670070654, + "learning_rate": 0.0004790690237153007, + "loss": 1.5719, + "step": 5819 + }, + { + "epoch": 0.528034839412085, + "grad_norm": 0.10224151863131821, + "learning_rate": 0.00047892222643820684, + "loss": 1.6053, + "step": 5820 + }, + { + "epoch": 0.5281255670477227, + "grad_norm": 0.09539699422758448, + "learning_rate": 0.00047877543098118186, + "loss": 1.5933, + "step": 5821 + }, + { + "epoch": 0.5282162946833605, + "grad_norm": 0.09637272004948833, + "learning_rate": 0.00047862863735690145, + "loss": 1.5836, + "step": 5822 + }, + { + "epoch": 0.5283070223189984, + "grad_norm": 0.09730649893121791, + "learning_rate": 0.00047848184557804134, + "loss": 1.6102, + "step": 5823 + }, + { + "epoch": 0.5283977499546362, + "grad_norm": 0.09968743842684896, + "learning_rate": 0.0004783350556572772, + "loss": 1.5637, + "step": 5824 + }, + { + "epoch": 0.528488477590274, + "grad_norm": 0.09885516457408229, + "learning_rate": 0.00047818826760728393, + "loss": 1.5548, + "step": 5825 + }, + { + "epoch": 0.5285792052259118, + "grad_norm": 0.09746024332049127, + "learning_rate": 0.0004780414814407371, + "loss": 1.6035, + "step": 5826 + }, + { + "epoch": 0.5286699328615496, + "grad_norm": 0.09526554053225475, + "learning_rate": 0.0004778946971703116, + "loss": 1.5427, + "step": 5827 + }, + { + "epoch": 0.5287606604971874, + "grad_norm": 0.09249306180049768, + "learning_rate": 0.0004777479148086823, + "loss": 1.5434, + "step": 5828 + }, + { + "epoch": 0.5288513881328253, + "grad_norm": 0.09538287928955416, + "learning_rate": 0.00047760113436852376, + "loss": 1.5943, + "step": 5829 + }, + { + "epoch": 0.5289421157684631, + "grad_norm": 0.09462478607793416, + "learning_rate": 0.00047745435586251056, + "loss": 1.5977, + "step": 5830 + }, + { + "epoch": 0.5290328434041008, + "grad_norm": 0.09597511292789958, + "learning_rate": 0.0004773075793033172, + "loss": 1.5107, + "step": 5831 + }, + { + "epoch": 0.5291235710397387, + "grad_norm": 0.09525340371830936, + "learning_rate": 0.0004771608047036176, + "loss": 1.5829, + "step": 5832 + }, + { + "epoch": 0.5292142986753765, + "grad_norm": 0.09552229779916355, + "learning_rate": 0.00047701403207608576, + "loss": 1.6002, + "step": 5833 + }, + { + "epoch": 0.5293050263110143, + "grad_norm": 0.09651304088429259, + "learning_rate": 0.0004768672614333957, + "loss": 1.5976, + "step": 5834 + }, + { + "epoch": 0.5293957539466522, + "grad_norm": 0.09439712570156905, + "learning_rate": 0.00047672049278822123, + "loss": 1.562, + "step": 5835 + }, + { + "epoch": 0.52948648158229, + "grad_norm": 0.0946445747605777, + "learning_rate": 0.00047657372615323546, + "loss": 1.5896, + "step": 5836 + }, + { + "epoch": 0.5295772092179278, + "grad_norm": 0.09227231014830996, + "learning_rate": 0.0004764269615411119, + "loss": 1.5793, + "step": 5837 + }, + { + "epoch": 0.5296679368535656, + "grad_norm": 0.09736867195725285, + "learning_rate": 0.00047628019896452377, + "loss": 1.5336, + "step": 5838 + }, + { + "epoch": 0.5297586644892034, + "grad_norm": 0.0999905945903141, + "learning_rate": 0.0004761334384361439, + "loss": 1.595, + "step": 5839 + }, + { + "epoch": 0.5298493921248413, + "grad_norm": 0.0967981456517446, + "learning_rate": 0.0004759866799686451, + "loss": 1.5783, + "step": 5840 + }, + { + "epoch": 0.5299401197604791, + "grad_norm": 0.0980264208561004, + "learning_rate": 0.0004758399235746999, + "loss": 1.5843, + "step": 5841 + }, + { + "epoch": 0.5300308473961168, + "grad_norm": 0.10099839941025664, + "learning_rate": 0.00047569316926698103, + "loss": 1.5967, + "step": 5842 + }, + { + "epoch": 0.5301215750317547, + "grad_norm": 0.09786680706944344, + "learning_rate": 0.00047554641705816066, + "loss": 1.601, + "step": 5843 + }, + { + "epoch": 0.5302123026673925, + "grad_norm": 0.09549200299383431, + "learning_rate": 0.00047539966696091065, + "loss": 1.6065, + "step": 5844 + }, + { + "epoch": 0.5303030303030303, + "grad_norm": 0.10167583465734159, + "learning_rate": 0.00047525291898790315, + "loss": 1.5911, + "step": 5845 + }, + { + "epoch": 0.5303937579386682, + "grad_norm": 0.09777366174992534, + "learning_rate": 0.00047510617315180966, + "loss": 1.5514, + "step": 5846 + }, + { + "epoch": 0.5304844855743059, + "grad_norm": 0.0945302831175847, + "learning_rate": 0.00047495942946530176, + "loss": 1.5936, + "step": 5847 + }, + { + "epoch": 0.5305752132099437, + "grad_norm": 0.09681882625073741, + "learning_rate": 0.0004748126879410509, + "loss": 1.5671, + "step": 5848 + }, + { + "epoch": 0.5306659408455816, + "grad_norm": 0.10028572300832037, + "learning_rate": 0.00047466594859172783, + "loss": 1.5429, + "step": 5849 + }, + { + "epoch": 0.5307566684812194, + "grad_norm": 0.09938739152797565, + "learning_rate": 0.0004745192114300042, + "loss": 1.6044, + "step": 5850 + }, + { + "epoch": 0.5308473961168572, + "grad_norm": 0.09982916676064514, + "learning_rate": 0.00047437247646855025, + "loss": 1.609, + "step": 5851 + }, + { + "epoch": 0.530938123752495, + "grad_norm": 0.09995365985390975, + "learning_rate": 0.0004742257437200368, + "loss": 1.5819, + "step": 5852 + }, + { + "epoch": 0.5310288513881328, + "grad_norm": 0.09543567958166997, + "learning_rate": 0.0004740790131971343, + "loss": 1.552, + "step": 5853 + }, + { + "epoch": 0.5311195790237706, + "grad_norm": 0.0998327908500229, + "learning_rate": 0.0004739322849125126, + "loss": 1.6209, + "step": 5854 + }, + { + "epoch": 0.5312103066594085, + "grad_norm": 0.0975004183939282, + "learning_rate": 0.00047378555887884214, + "loss": 1.5626, + "step": 5855 + }, + { + "epoch": 0.5313010342950463, + "grad_norm": 0.09860625951948886, + "learning_rate": 0.0004736388351087922, + "loss": 1.6592, + "step": 5856 + }, + { + "epoch": 0.531391761930684, + "grad_norm": 0.10449780898699479, + "learning_rate": 0.000473492113615033, + "loss": 1.6065, + "step": 5857 + }, + { + "epoch": 0.5314824895663219, + "grad_norm": 0.09623519931927918, + "learning_rate": 0.00047334539441023366, + "loss": 1.5655, + "step": 5858 + }, + { + "epoch": 0.5315732172019597, + "grad_norm": 0.10504587495155555, + "learning_rate": 0.00047319867750706345, + "loss": 1.5766, + "step": 5859 + }, + { + "epoch": 0.5316639448375975, + "grad_norm": 0.0981115394501624, + "learning_rate": 0.00047305196291819147, + "loss": 1.5866, + "step": 5860 + }, + { + "epoch": 0.5317546724732354, + "grad_norm": 0.09710814736575889, + "learning_rate": 0.0004729052506562864, + "loss": 1.5758, + "step": 5861 + }, + { + "epoch": 0.5318454001088732, + "grad_norm": 0.09420595207967325, + "learning_rate": 0.0004727585407340168, + "loss": 1.5682, + "step": 5862 + }, + { + "epoch": 0.5319361277445109, + "grad_norm": 0.09838143906863621, + "learning_rate": 0.00047261183316405143, + "loss": 1.5825, + "step": 5863 + }, + { + "epoch": 0.5320268553801488, + "grad_norm": 0.09917215886157196, + "learning_rate": 0.0004724651279590581, + "loss": 1.6158, + "step": 5864 + }, + { + "epoch": 0.5321175830157866, + "grad_norm": 0.09990621860616075, + "learning_rate": 0.00047231842513170507, + "loss": 1.5856, + "step": 5865 + }, + { + "epoch": 0.5322083106514244, + "grad_norm": 0.09801021744818417, + "learning_rate": 0.0004721717246946602, + "loss": 1.5976, + "step": 5866 + }, + { + "epoch": 0.5322990382870623, + "grad_norm": 0.10010671616683078, + "learning_rate": 0.00047202502666059105, + "loss": 1.5974, + "step": 5867 + }, + { + "epoch": 0.5323897659227, + "grad_norm": 0.09702357054929575, + "learning_rate": 0.0004718783310421649, + "loss": 1.5762, + "step": 5868 + }, + { + "epoch": 0.5324804935583378, + "grad_norm": 0.09836113189127176, + "learning_rate": 0.00047173163785204893, + "loss": 1.5722, + "step": 5869 + }, + { + "epoch": 0.5325712211939757, + "grad_norm": 0.09866468802466595, + "learning_rate": 0.00047158494710291036, + "loss": 1.6194, + "step": 5870 + }, + { + "epoch": 0.5326619488296135, + "grad_norm": 0.09789026357634255, + "learning_rate": 0.0004714382588074157, + "loss": 1.5703, + "step": 5871 + }, + { + "epoch": 0.5327526764652513, + "grad_norm": 0.09745201379742759, + "learning_rate": 0.00047129157297823147, + "loss": 1.6156, + "step": 5872 + }, + { + "epoch": 0.5328434041008892, + "grad_norm": 0.09791532315210448, + "learning_rate": 0.00047114488962802424, + "loss": 1.6096, + "step": 5873 + }, + { + "epoch": 0.5329341317365269, + "grad_norm": 0.0978178955997177, + "learning_rate": 0.0004709982087694602, + "loss": 1.5878, + "step": 5874 + }, + { + "epoch": 0.5330248593721648, + "grad_norm": 0.0959676496730377, + "learning_rate": 0.00047085153041520497, + "loss": 1.6111, + "step": 5875 + }, + { + "epoch": 0.5331155870078026, + "grad_norm": 0.09684153088725589, + "learning_rate": 0.0004707048545779244, + "loss": 1.6185, + "step": 5876 + }, + { + "epoch": 0.5332063146434404, + "grad_norm": 0.09447575734376457, + "learning_rate": 0.00047055818127028404, + "loss": 1.6026, + "step": 5877 + }, + { + "epoch": 0.5332970422790783, + "grad_norm": 0.10128632299483552, + "learning_rate": 0.000470411510504949, + "loss": 1.6284, + "step": 5878 + }, + { + "epoch": 0.533387769914716, + "grad_norm": 0.09542965548621123, + "learning_rate": 0.0004702648422945843, + "loss": 1.5835, + "step": 5879 + }, + { + "epoch": 0.5334784975503538, + "grad_norm": 0.09898102703462616, + "learning_rate": 0.0004701181766518548, + "loss": 1.5637, + "step": 5880 + }, + { + "epoch": 0.5335692251859917, + "grad_norm": 0.09686897531259761, + "learning_rate": 0.00046997151358942524, + "loss": 1.5639, + "step": 5881 + }, + { + "epoch": 0.5336599528216295, + "grad_norm": 0.09691807577517791, + "learning_rate": 0.00046982485311996, + "loss": 1.614, + "step": 5882 + }, + { + "epoch": 0.5337506804572673, + "grad_norm": 0.09578482416047567, + "learning_rate": 0.0004696781952561231, + "loss": 1.62, + "step": 5883 + }, + { + "epoch": 0.5338414080929051, + "grad_norm": 0.09451078761581204, + "learning_rate": 0.00046953154001057865, + "loss": 1.5583, + "step": 5884 + }, + { + "epoch": 0.5339321357285429, + "grad_norm": 0.09900625850531619, + "learning_rate": 0.00046938488739599, + "loss": 1.5429, + "step": 5885 + }, + { + "epoch": 0.5340228633641807, + "grad_norm": 0.09731506132401233, + "learning_rate": 0.00046923823742502086, + "loss": 1.588, + "step": 5886 + }, + { + "epoch": 0.5341135909998186, + "grad_norm": 0.10179552842338135, + "learning_rate": 0.0004690915901103346, + "loss": 1.5745, + "step": 5887 + }, + { + "epoch": 0.5342043186354564, + "grad_norm": 0.10121317818227955, + "learning_rate": 0.0004689449454645937, + "loss": 1.5757, + "step": 5888 + }, + { + "epoch": 0.5342950462710941, + "grad_norm": 0.0985578283641816, + "learning_rate": 0.0004687983035004617, + "loss": 1.5832, + "step": 5889 + }, + { + "epoch": 0.534385773906732, + "grad_norm": 0.09971169278543933, + "learning_rate": 0.0004686516642306007, + "loss": 1.6297, + "step": 5890 + }, + { + "epoch": 0.5344765015423698, + "grad_norm": 0.10079152336442058, + "learning_rate": 0.00046850502766767303, + "loss": 1.595, + "step": 5891 + }, + { + "epoch": 0.5345672291780076, + "grad_norm": 0.0955706391770076, + "learning_rate": 0.0004683583938243411, + "loss": 1.5657, + "step": 5892 + }, + { + "epoch": 0.5346579568136455, + "grad_norm": 0.0986681939075829, + "learning_rate": 0.0004682117627132663, + "loss": 1.5635, + "step": 5893 + }, + { + "epoch": 0.5347486844492833, + "grad_norm": 0.0998065801469779, + "learning_rate": 0.0004680651343471107, + "loss": 1.5774, + "step": 5894 + }, + { + "epoch": 0.534839412084921, + "grad_norm": 0.0961141224793259, + "learning_rate": 0.000467918508738535, + "loss": 1.5691, + "step": 5895 + }, + { + "epoch": 0.5349301397205589, + "grad_norm": 0.09660225427572684, + "learning_rate": 0.0004677718859002012, + "loss": 1.5919, + "step": 5896 + }, + { + "epoch": 0.5350208673561967, + "grad_norm": 0.09875611363278529, + "learning_rate": 0.0004676252658447697, + "loss": 1.6262, + "step": 5897 + }, + { + "epoch": 0.5351115949918345, + "grad_norm": 0.09855088973958352, + "learning_rate": 0.00046747864858490124, + "loss": 1.5914, + "step": 5898 + }, + { + "epoch": 0.5352023226274724, + "grad_norm": 0.09671721781776442, + "learning_rate": 0.0004673320341332564, + "loss": 1.5761, + "step": 5899 + }, + { + "epoch": 0.5352930502631101, + "grad_norm": 0.09764769550020326, + "learning_rate": 0.0004671854225024951, + "loss": 1.6629, + "step": 5900 + }, + { + "epoch": 0.5353837778987479, + "grad_norm": 0.09924841287508514, + "learning_rate": 0.0004670388137052774, + "loss": 1.6104, + "step": 5901 + }, + { + "epoch": 0.5354745055343858, + "grad_norm": 0.10161075608895863, + "learning_rate": 0.0004668922077542631, + "loss": 1.5819, + "step": 5902 + }, + { + "epoch": 0.5355652331700236, + "grad_norm": 0.10048919879362435, + "learning_rate": 0.00046674560466211136, + "loss": 1.5683, + "step": 5903 + }, + { + "epoch": 0.5356559608056614, + "grad_norm": 0.09901708346624344, + "learning_rate": 0.00046659900444148163, + "loss": 1.6162, + "step": 5904 + }, + { + "epoch": 0.5357466884412992, + "grad_norm": 0.09603346189701355, + "learning_rate": 0.0004664524071050328, + "loss": 1.5778, + "step": 5905 + }, + { + "epoch": 0.535837416076937, + "grad_norm": 0.10214279262182994, + "learning_rate": 0.00046630581266542365, + "loss": 1.6353, + "step": 5906 + }, + { + "epoch": 0.5359281437125748, + "grad_norm": 0.09824458380119723, + "learning_rate": 0.00046615922113531243, + "loss": 1.5986, + "step": 5907 + }, + { + "epoch": 0.5360188713482127, + "grad_norm": 0.09643050574304472, + "learning_rate": 0.00046601263252735755, + "loss": 1.5874, + "step": 5908 + }, + { + "epoch": 0.5361095989838505, + "grad_norm": 0.10024854599344278, + "learning_rate": 0.00046586604685421685, + "loss": 1.5578, + "step": 5909 + }, + { + "epoch": 0.5362003266194882, + "grad_norm": 0.09874104073792701, + "learning_rate": 0.000465719464128548, + "loss": 1.5829, + "step": 5910 + }, + { + "epoch": 0.5362910542551261, + "grad_norm": 0.09776696327908485, + "learning_rate": 0.0004655728843630083, + "loss": 1.6003, + "step": 5911 + }, + { + "epoch": 0.5363817818907639, + "grad_norm": 0.09535551304836132, + "learning_rate": 0.0004654263075702553, + "loss": 1.5722, + "step": 5912 + }, + { + "epoch": 0.5364725095264018, + "grad_norm": 0.09879236745379892, + "learning_rate": 0.0004652797337629458, + "loss": 1.6047, + "step": 5913 + }, + { + "epoch": 0.5365632371620396, + "grad_norm": 0.09634982478798054, + "learning_rate": 0.0004651331629537363, + "loss": 1.6108, + "step": 5914 + }, + { + "epoch": 0.5366539647976774, + "grad_norm": 0.0987954483951113, + "learning_rate": 0.0004649865951552833, + "loss": 1.575, + "step": 5915 + }, + { + "epoch": 0.5367446924333152, + "grad_norm": 0.09717723699045111, + "learning_rate": 0.0004648400303802431, + "loss": 1.5719, + "step": 5916 + }, + { + "epoch": 0.536835420068953, + "grad_norm": 0.09635041707325365, + "learning_rate": 0.0004646934686412713, + "loss": 1.6114, + "step": 5917 + }, + { + "epoch": 0.5369261477045908, + "grad_norm": 0.09679876894055305, + "learning_rate": 0.0004645469099510238, + "loss": 1.611, + "step": 5918 + }, + { + "epoch": 0.5370168753402287, + "grad_norm": 0.09712600731850048, + "learning_rate": 0.00046440035432215567, + "loss": 1.6069, + "step": 5919 + }, + { + "epoch": 0.5371076029758665, + "grad_norm": 0.09763553767607813, + "learning_rate": 0.00046425380176732227, + "loss": 1.5902, + "step": 5920 + }, + { + "epoch": 0.5371983306115042, + "grad_norm": 0.09835477371341682, + "learning_rate": 0.0004641072522991785, + "loss": 1.5898, + "step": 5921 + }, + { + "epoch": 0.5372890582471421, + "grad_norm": 0.09819543779467779, + "learning_rate": 0.0004639607059303787, + "loss": 1.5682, + "step": 5922 + }, + { + "epoch": 0.5373797858827799, + "grad_norm": 0.0971951584395413, + "learning_rate": 0.00046381416267357736, + "loss": 1.5632, + "step": 5923 + }, + { + "epoch": 0.5374705135184177, + "grad_norm": 0.09957295054313385, + "learning_rate": 0.00046366762254142826, + "loss": 1.5841, + "step": 5924 + }, + { + "epoch": 0.5375612411540556, + "grad_norm": 0.10975933087471562, + "learning_rate": 0.0004635210855465853, + "loss": 1.5276, + "step": 5925 + }, + { + "epoch": 0.5376519687896933, + "grad_norm": 0.10033869889546239, + "learning_rate": 0.00046337455170170213, + "loss": 1.5892, + "step": 5926 + }, + { + "epoch": 0.5377426964253311, + "grad_norm": 0.09893624187359445, + "learning_rate": 0.00046322802101943147, + "loss": 1.6144, + "step": 5927 + }, + { + "epoch": 0.537833424060969, + "grad_norm": 0.09939000462139444, + "learning_rate": 0.0004630814935124269, + "loss": 1.5598, + "step": 5928 + }, + { + "epoch": 0.5379241516966068, + "grad_norm": 0.09643942555464927, + "learning_rate": 0.0004629349691933408, + "loss": 1.5879, + "step": 5929 + }, + { + "epoch": 0.5380148793322446, + "grad_norm": 0.09761010771496194, + "learning_rate": 0.00046278844807482545, + "loss": 1.5895, + "step": 5930 + }, + { + "epoch": 0.5381056069678825, + "grad_norm": 0.09826974768627779, + "learning_rate": 0.0004626419301695333, + "loss": 1.5628, + "step": 5931 + }, + { + "epoch": 0.5381963346035202, + "grad_norm": 0.09833707136243018, + "learning_rate": 0.0004624954154901159, + "loss": 1.6392, + "step": 5932 + }, + { + "epoch": 0.538287062239158, + "grad_norm": 0.10004270494784295, + "learning_rate": 0.0004623489040492249, + "loss": 1.5737, + "step": 5933 + }, + { + "epoch": 0.5383777898747959, + "grad_norm": 0.09801137816465938, + "learning_rate": 0.0004622023958595113, + "loss": 1.5616, + "step": 5934 + }, + { + "epoch": 0.5384685175104337, + "grad_norm": 0.097311427297104, + "learning_rate": 0.0004620558909336267, + "loss": 1.5844, + "step": 5935 + }, + { + "epoch": 0.5385592451460715, + "grad_norm": 0.09812898867405657, + "learning_rate": 0.00046190938928422143, + "loss": 1.6766, + "step": 5936 + }, + { + "epoch": 0.5386499727817093, + "grad_norm": 0.09844529755605022, + "learning_rate": 0.000461762890923946, + "loss": 1.5768, + "step": 5937 + }, + { + "epoch": 0.5387407004173471, + "grad_norm": 0.09887482391212789, + "learning_rate": 0.00046161639586545075, + "loss": 1.6175, + "step": 5938 + }, + { + "epoch": 0.5388314280529849, + "grad_norm": 0.09903984293223986, + "learning_rate": 0.0004614699041213852, + "loss": 1.5868, + "step": 5939 + }, + { + "epoch": 0.5389221556886228, + "grad_norm": 0.09553145273177818, + "learning_rate": 0.00046132341570439905, + "loss": 1.6162, + "step": 5940 + }, + { + "epoch": 0.5390128833242606, + "grad_norm": 0.09717177360672581, + "learning_rate": 0.0004611769306271419, + "loss": 1.628, + "step": 5941 + }, + { + "epoch": 0.5391036109598983, + "grad_norm": 0.0973247011400886, + "learning_rate": 0.00046103044890226223, + "loss": 1.5761, + "step": 5942 + }, + { + "epoch": 0.5391943385955362, + "grad_norm": 0.09955090051732982, + "learning_rate": 0.0004608839705424091, + "loss": 1.5653, + "step": 5943 + }, + { + "epoch": 0.539285066231174, + "grad_norm": 0.09538652677744963, + "learning_rate": 0.00046073749556023094, + "loss": 1.6019, + "step": 5944 + }, + { + "epoch": 0.5393757938668118, + "grad_norm": 0.09798000625584283, + "learning_rate": 0.0004605910239683759, + "loss": 1.5833, + "step": 5945 + }, + { + "epoch": 0.5394665215024497, + "grad_norm": 0.09582011452385454, + "learning_rate": 0.0004604445557794916, + "loss": 1.585, + "step": 5946 + }, + { + "epoch": 0.5395572491380874, + "grad_norm": 0.10043077323135288, + "learning_rate": 0.0004602980910062258, + "loss": 1.5794, + "step": 5947 + }, + { + "epoch": 0.5396479767737252, + "grad_norm": 0.09649847428076448, + "learning_rate": 0.00046015162966122583, + "loss": 1.5917, + "step": 5948 + }, + { + "epoch": 0.5397387044093631, + "grad_norm": 0.09556089559817511, + "learning_rate": 0.00046000517175713834, + "loss": 1.5762, + "step": 5949 + }, + { + "epoch": 0.5398294320450009, + "grad_norm": 0.1024339659275857, + "learning_rate": 0.0004598587173066101, + "loss": 1.5747, + "step": 5950 + }, + { + "epoch": 0.5399201596806387, + "grad_norm": 0.09690028083842366, + "learning_rate": 0.0004597122663222876, + "loss": 1.5575, + "step": 5951 + }, + { + "epoch": 0.5400108873162766, + "grad_norm": 0.09747739846900162, + "learning_rate": 0.000459565818816817, + "loss": 1.5914, + "step": 5952 + }, + { + "epoch": 0.5401016149519143, + "grad_norm": 0.09849749226611176, + "learning_rate": 0.0004594193748028438, + "loss": 1.6037, + "step": 5953 + }, + { + "epoch": 0.5401923425875522, + "grad_norm": 0.09697497215400734, + "learning_rate": 0.00045927293429301355, + "loss": 1.578, + "step": 5954 + }, + { + "epoch": 0.54028307022319, + "grad_norm": 0.09689719778528262, + "learning_rate": 0.00045912649729997146, + "loss": 1.5773, + "step": 5955 + }, + { + "epoch": 0.5403737978588278, + "grad_norm": 0.0971329255324683, + "learning_rate": 0.0004589800638363623, + "loss": 1.5601, + "step": 5956 + }, + { + "epoch": 0.5404645254944657, + "grad_norm": 0.10403594111029484, + "learning_rate": 0.00045883363391483075, + "loss": 1.6111, + "step": 5957 + }, + { + "epoch": 0.5405552531301034, + "grad_norm": 0.1010581394105225, + "learning_rate": 0.00045868720754802076, + "loss": 1.6231, + "step": 5958 + }, + { + "epoch": 0.5406459807657412, + "grad_norm": 0.10003707186995774, + "learning_rate": 0.0004585407847485766, + "loss": 1.6019, + "step": 5959 + }, + { + "epoch": 0.5407367084013791, + "grad_norm": 0.09968353213267335, + "learning_rate": 0.000458394365529142, + "loss": 1.5516, + "step": 5960 + }, + { + "epoch": 0.5408274360370169, + "grad_norm": 0.10190875977447846, + "learning_rate": 0.00045824794990235985, + "loss": 1.597, + "step": 5961 + }, + { + "epoch": 0.5409181636726547, + "grad_norm": 0.10184604036598832, + "learning_rate": 0.00045810153788087356, + "loss": 1.5955, + "step": 5962 + }, + { + "epoch": 0.5410088913082925, + "grad_norm": 0.0979679786281962, + "learning_rate": 0.00045795512947732547, + "loss": 1.5862, + "step": 5963 + }, + { + "epoch": 0.5410996189439303, + "grad_norm": 0.09858204565234428, + "learning_rate": 0.0004578087247043582, + "loss": 1.6003, + "step": 5964 + }, + { + "epoch": 0.5411903465795681, + "grad_norm": 0.09958348139212013, + "learning_rate": 0.0004576623235746139, + "loss": 1.5557, + "step": 5965 + }, + { + "epoch": 0.541281074215206, + "grad_norm": 0.09286980818642154, + "learning_rate": 0.00045751592610073383, + "loss": 1.5539, + "step": 5966 + }, + { + "epoch": 0.5413718018508438, + "grad_norm": 0.09705240394907468, + "learning_rate": 0.00045736953229536014, + "loss": 1.5628, + "step": 5967 + }, + { + "epoch": 0.5414625294864815, + "grad_norm": 0.09598109769475173, + "learning_rate": 0.00045722314217113346, + "loss": 1.5539, + "step": 5968 + }, + { + "epoch": 0.5415532571221194, + "grad_norm": 0.10142092947594622, + "learning_rate": 0.00045707675574069485, + "loss": 1.5971, + "step": 5969 + }, + { + "epoch": 0.5416439847577572, + "grad_norm": 0.09730211363023304, + "learning_rate": 0.0004569303730166848, + "loss": 1.5884, + "step": 5970 + }, + { + "epoch": 0.541734712393395, + "grad_norm": 0.09483450704637028, + "learning_rate": 0.0004567839940117432, + "loss": 1.5684, + "step": 5971 + }, + { + "epoch": 0.5418254400290329, + "grad_norm": 0.09946177881259051, + "learning_rate": 0.00045663761873851027, + "loss": 1.5979, + "step": 5972 + }, + { + "epoch": 0.5419161676646707, + "grad_norm": 0.09909028031657435, + "learning_rate": 0.000456491247209625, + "loss": 1.5744, + "step": 5973 + }, + { + "epoch": 0.5420068953003084, + "grad_norm": 0.09995189013833654, + "learning_rate": 0.0004563448794377273, + "loss": 1.5984, + "step": 5974 + }, + { + "epoch": 0.5420976229359463, + "grad_norm": 0.09593888907313257, + "learning_rate": 0.00045619851543545555, + "loss": 1.5592, + "step": 5975 + }, + { + "epoch": 0.5421883505715841, + "grad_norm": 0.09611031772312985, + "learning_rate": 0.00045605215521544843, + "loss": 1.5689, + "step": 5976 + }, + { + "epoch": 0.5422790782072219, + "grad_norm": 0.0975860162853713, + "learning_rate": 0.00045590579879034436, + "loss": 1.5744, + "step": 5977 + }, + { + "epoch": 0.5423698058428598, + "grad_norm": 0.09778922987639739, + "learning_rate": 0.00045575944617278093, + "loss": 1.5826, + "step": 5978 + }, + { + "epoch": 0.5424605334784975, + "grad_norm": 0.09289427847899133, + "learning_rate": 0.0004556130973753958, + "loss": 1.5862, + "step": 5979 + }, + { + "epoch": 0.5425512611141353, + "grad_norm": 0.09459646121931564, + "learning_rate": 0.0004554667524108264, + "loss": 1.5556, + "step": 5980 + }, + { + "epoch": 0.5426419887497732, + "grad_norm": 0.09667058520732612, + "learning_rate": 0.00045532041129170924, + "loss": 1.5862, + "step": 5981 + }, + { + "epoch": 0.542732716385411, + "grad_norm": 0.09962933056681327, + "learning_rate": 0.00045517407403068135, + "loss": 1.5873, + "step": 5982 + }, + { + "epoch": 0.5428234440210488, + "grad_norm": 0.09780490743756592, + "learning_rate": 0.00045502774064037866, + "loss": 1.6024, + "step": 5983 + }, + { + "epoch": 0.5429141716566867, + "grad_norm": 0.09478335865305487, + "learning_rate": 0.0004548814111334374, + "loss": 1.6418, + "step": 5984 + }, + { + "epoch": 0.5430048992923244, + "grad_norm": 0.09277448737526447, + "learning_rate": 0.0004547350855224928, + "loss": 1.6269, + "step": 5985 + }, + { + "epoch": 0.5430956269279622, + "grad_norm": 0.09949693501911185, + "learning_rate": 0.00045458876382018014, + "loss": 1.5811, + "step": 5986 + }, + { + "epoch": 0.5431863545636001, + "grad_norm": 0.09862226389160593, + "learning_rate": 0.00045444244603913466, + "loss": 1.5575, + "step": 5987 + }, + { + "epoch": 0.5432770821992379, + "grad_norm": 0.10037029498602536, + "learning_rate": 0.0004542961321919905, + "loss": 1.5921, + "step": 5988 + }, + { + "epoch": 0.5433678098348756, + "grad_norm": 0.09825572487410798, + "learning_rate": 0.00045414982229138195, + "loss": 1.6439, + "step": 5989 + }, + { + "epoch": 0.5434585374705135, + "grad_norm": 0.09708849758608921, + "learning_rate": 0.00045400351634994313, + "loss": 1.6261, + "step": 5990 + }, + { + "epoch": 0.5435492651061513, + "grad_norm": 0.09518789736375259, + "learning_rate": 0.00045385721438030756, + "loss": 1.6288, + "step": 5991 + }, + { + "epoch": 0.5436399927417892, + "grad_norm": 0.10358723494996366, + "learning_rate": 0.00045371091639510825, + "loss": 1.557, + "step": 5992 + }, + { + "epoch": 0.543730720377427, + "grad_norm": 0.10022568958136974, + "learning_rate": 0.0004535646224069781, + "loss": 1.6273, + "step": 5993 + }, + { + "epoch": 0.5438214480130648, + "grad_norm": 0.10096529027813268, + "learning_rate": 0.0004534183324285498, + "loss": 1.6343, + "step": 5994 + }, + { + "epoch": 0.5439121756487026, + "grad_norm": 0.09872751923542233, + "learning_rate": 0.0004532720464724552, + "loss": 1.5913, + "step": 5995 + }, + { + "epoch": 0.5440029032843404, + "grad_norm": 0.09632894400959353, + "learning_rate": 0.00045312576455132624, + "loss": 1.6063, + "step": 5996 + }, + { + "epoch": 0.5440936309199782, + "grad_norm": 0.09840829846308465, + "learning_rate": 0.00045297948667779437, + "loss": 1.5802, + "step": 5997 + }, + { + "epoch": 0.5441843585556161, + "grad_norm": 0.0933955374056068, + "learning_rate": 0.0004528332128644909, + "loss": 1.5367, + "step": 5998 + }, + { + "epoch": 0.5442750861912539, + "grad_norm": 0.09775934037206743, + "learning_rate": 0.00045268694312404647, + "loss": 1.5804, + "step": 5999 + }, + { + "epoch": 0.5443658138268916, + "grad_norm": 0.0947898122554069, + "learning_rate": 0.00045254067746909143, + "loss": 1.5922, + "step": 6000 + }, + { + "epoch": 0.5444565414625295, + "grad_norm": 0.09713709416310948, + "learning_rate": 0.0004523944159122559, + "loss": 1.5921, + "step": 6001 + }, + { + "epoch": 0.5445472690981673, + "grad_norm": 0.1022387591112202, + "learning_rate": 0.00045224815846616964, + "loss": 1.579, + "step": 6002 + }, + { + "epoch": 0.5446379967338051, + "grad_norm": 0.10087880317875915, + "learning_rate": 0.00045210190514346176, + "loss": 1.5548, + "step": 6003 + }, + { + "epoch": 0.544728724369443, + "grad_norm": 0.09999993634794338, + "learning_rate": 0.00045195565595676165, + "loss": 1.5789, + "step": 6004 + }, + { + "epoch": 0.5448194520050808, + "grad_norm": 0.0988783482749872, + "learning_rate": 0.00045180941091869735, + "loss": 1.5623, + "step": 6005 + }, + { + "epoch": 0.5449101796407185, + "grad_norm": 0.09895248456887454, + "learning_rate": 0.0004516631700418978, + "loss": 1.5808, + "step": 6006 + }, + { + "epoch": 0.5450009072763564, + "grad_norm": 0.09803719619183779, + "learning_rate": 0.0004515169333389906, + "loss": 1.6112, + "step": 6007 + }, + { + "epoch": 0.5450916349119942, + "grad_norm": 0.09770040209760603, + "learning_rate": 0.0004513707008226034, + "loss": 1.5728, + "step": 6008 + }, + { + "epoch": 0.545182362547632, + "grad_norm": 0.09938798656376424, + "learning_rate": 0.0004512244725053634, + "loss": 1.5366, + "step": 6009 + }, + { + "epoch": 0.5452730901832699, + "grad_norm": 0.09848375251756623, + "learning_rate": 0.00045107824839989727, + "loss": 1.5582, + "step": 6010 + }, + { + "epoch": 0.5453638178189076, + "grad_norm": 0.09748443953408044, + "learning_rate": 0.00045093202851883174, + "loss": 1.5932, + "step": 6011 + }, + { + "epoch": 0.5454545454545454, + "grad_norm": 0.09638407191474194, + "learning_rate": 0.0004507858128747924, + "loss": 1.5524, + "step": 6012 + }, + { + "epoch": 0.5455452730901833, + "grad_norm": 0.0962282374084542, + "learning_rate": 0.0004506396014804057, + "loss": 1.603, + "step": 6013 + }, + { + "epoch": 0.5456360007258211, + "grad_norm": 0.09777741952596086, + "learning_rate": 0.0004504933943482967, + "loss": 1.5847, + "step": 6014 + }, + { + "epoch": 0.5457267283614589, + "grad_norm": 0.09828459200497519, + "learning_rate": 0.00045034719149109027, + "loss": 1.5927, + "step": 6015 + }, + { + "epoch": 0.5458174559970967, + "grad_norm": 0.09823998394449854, + "learning_rate": 0.0004502009929214113, + "loss": 1.616, + "step": 6016 + }, + { + "epoch": 0.5459081836327345, + "grad_norm": 0.10141984371034264, + "learning_rate": 0.00045005479865188385, + "loss": 1.5765, + "step": 6017 + }, + { + "epoch": 0.5459989112683723, + "grad_norm": 0.09971610903052257, + "learning_rate": 0.0004499086086951319, + "loss": 1.6301, + "step": 6018 + }, + { + "epoch": 0.5460896389040102, + "grad_norm": 0.0938824336498517, + "learning_rate": 0.0004497624230637791, + "loss": 1.5551, + "step": 6019 + }, + { + "epoch": 0.546180366539648, + "grad_norm": 0.09850964495274886, + "learning_rate": 0.0004496162417704482, + "loss": 1.6229, + "step": 6020 + }, + { + "epoch": 0.5462710941752857, + "grad_norm": 0.09712161565994433, + "learning_rate": 0.00044947006482776234, + "loss": 1.6094, + "step": 6021 + }, + { + "epoch": 0.5463618218109236, + "grad_norm": 0.09565512526658705, + "learning_rate": 0.0004493238922483439, + "loss": 1.6174, + "step": 6022 + }, + { + "epoch": 0.5464525494465614, + "grad_norm": 0.09632283901706817, + "learning_rate": 0.00044917772404481487, + "loss": 1.6013, + "step": 6023 + }, + { + "epoch": 0.5465432770821992, + "grad_norm": 0.09884102595911706, + "learning_rate": 0.0004490315602297968, + "loss": 1.6155, + "step": 6024 + }, + { + "epoch": 0.5466340047178371, + "grad_norm": 0.09765506872201037, + "learning_rate": 0.000448885400815911, + "loss": 1.5707, + "step": 6025 + }, + { + "epoch": 0.5467247323534749, + "grad_norm": 0.09633735903933575, + "learning_rate": 0.0004487392458157785, + "loss": 1.5443, + "step": 6026 + }, + { + "epoch": 0.5468154599891126, + "grad_norm": 0.09809578035839098, + "learning_rate": 0.00044859309524201957, + "loss": 1.5631, + "step": 6027 + }, + { + "epoch": 0.5469061876247505, + "grad_norm": 0.09757437349014725, + "learning_rate": 0.0004484469491072543, + "loss": 1.5909, + "step": 6028 + }, + { + "epoch": 0.5469969152603883, + "grad_norm": 0.10004708538520445, + "learning_rate": 0.00044830080742410273, + "loss": 1.5973, + "step": 6029 + }, + { + "epoch": 0.5470876428960262, + "grad_norm": 0.09859377085663892, + "learning_rate": 0.00044815467020518413, + "loss": 1.5745, + "step": 6030 + }, + { + "epoch": 0.547178370531664, + "grad_norm": 0.09540119668122468, + "learning_rate": 0.0004480085374631173, + "loss": 1.5749, + "step": 6031 + }, + { + "epoch": 0.5472690981673017, + "grad_norm": 0.10028988334519011, + "learning_rate": 0.000447862409210521, + "loss": 1.531, + "step": 6032 + }, + { + "epoch": 0.5473598258029396, + "grad_norm": 0.0978063901367236, + "learning_rate": 0.0004477162854600133, + "loss": 1.5991, + "step": 6033 + }, + { + "epoch": 0.5474505534385774, + "grad_norm": 0.097905654374198, + "learning_rate": 0.000447570166224212, + "loss": 1.6037, + "step": 6034 + }, + { + "epoch": 0.5475412810742152, + "grad_norm": 0.09933300874220691, + "learning_rate": 0.0004474240515157347, + "loss": 1.5781, + "step": 6035 + }, + { + "epoch": 0.5476320087098531, + "grad_norm": 0.09586371882044352, + "learning_rate": 0.00044727794134719805, + "loss": 1.5768, + "step": 6036 + }, + { + "epoch": 0.5477227363454908, + "grad_norm": 0.10024420123783243, + "learning_rate": 0.00044713183573121906, + "loss": 1.5956, + "step": 6037 + }, + { + "epoch": 0.5478134639811286, + "grad_norm": 0.09811651412955642, + "learning_rate": 0.00044698573468041406, + "loss": 1.5474, + "step": 6038 + }, + { + "epoch": 0.5479041916167665, + "grad_norm": 0.09554321497917675, + "learning_rate": 0.0004468396382073985, + "loss": 1.5928, + "step": 6039 + }, + { + "epoch": 0.5479949192524043, + "grad_norm": 0.09519079364201437, + "learning_rate": 0.0004466935463247881, + "loss": 1.5452, + "step": 6040 + }, + { + "epoch": 0.5480856468880421, + "grad_norm": 0.09829147897460297, + "learning_rate": 0.00044654745904519773, + "loss": 1.6129, + "step": 6041 + }, + { + "epoch": 0.54817637452368, + "grad_norm": 0.09579381237806373, + "learning_rate": 0.00044640137638124217, + "loss": 1.6022, + "step": 6042 + }, + { + "epoch": 0.5482671021593177, + "grad_norm": 0.09905471011832472, + "learning_rate": 0.0004462552983455357, + "loss": 1.5638, + "step": 6043 + }, + { + "epoch": 0.5483578297949555, + "grad_norm": 0.09564563373438428, + "learning_rate": 0.00044610922495069195, + "loss": 1.5649, + "step": 6044 + }, + { + "epoch": 0.5484485574305934, + "grad_norm": 0.09718473619989923, + "learning_rate": 0.00044596315620932484, + "loss": 1.5284, + "step": 6045 + }, + { + "epoch": 0.5485392850662312, + "grad_norm": 0.09783276607277842, + "learning_rate": 0.00044581709213404696, + "loss": 1.5725, + "step": 6046 + }, + { + "epoch": 0.548630012701869, + "grad_norm": 0.1018488132432512, + "learning_rate": 0.0004456710327374713, + "loss": 1.6111, + "step": 6047 + }, + { + "epoch": 0.5487207403375068, + "grad_norm": 0.09743336748890676, + "learning_rate": 0.00044552497803221, + "loss": 1.6141, + "step": 6048 + }, + { + "epoch": 0.5488114679731446, + "grad_norm": 0.0968838589568733, + "learning_rate": 0.00044537892803087477, + "loss": 1.5818, + "step": 6049 + }, + { + "epoch": 0.5489021956087824, + "grad_norm": 0.1008610175398001, + "learning_rate": 0.00044523288274607724, + "loss": 1.5689, + "step": 6050 + }, + { + "epoch": 0.5489929232444203, + "grad_norm": 0.10345683817503035, + "learning_rate": 0.00044508684219042805, + "loss": 1.5841, + "step": 6051 + }, + { + "epoch": 0.5490836508800581, + "grad_norm": 0.10029837576556544, + "learning_rate": 0.0004449408063765385, + "loss": 1.5784, + "step": 6052 + }, + { + "epoch": 0.5491743785156958, + "grad_norm": 0.10053589931571172, + "learning_rate": 0.0004447947753170183, + "loss": 1.6435, + "step": 6053 + }, + { + "epoch": 0.5492651061513337, + "grad_norm": 0.10019507099344854, + "learning_rate": 0.0004446487490244774, + "loss": 1.591, + "step": 6054 + }, + { + "epoch": 0.5493558337869715, + "grad_norm": 0.09770182150391539, + "learning_rate": 0.0004445027275115252, + "loss": 1.584, + "step": 6055 + }, + { + "epoch": 0.5494465614226093, + "grad_norm": 0.09410069362645007, + "learning_rate": 0.00044435671079077067, + "loss": 1.6242, + "step": 6056 + }, + { + "epoch": 0.5495372890582472, + "grad_norm": 0.09897467882235797, + "learning_rate": 0.0004442106988748223, + "loss": 1.625, + "step": 6057 + }, + { + "epoch": 0.549628016693885, + "grad_norm": 0.09796614636041165, + "learning_rate": 0.00044406469177628836, + "loss": 1.6037, + "step": 6058 + }, + { + "epoch": 0.5497187443295227, + "grad_norm": 0.09922399384778728, + "learning_rate": 0.0004439186895077764, + "loss": 1.6191, + "step": 6059 + }, + { + "epoch": 0.5498094719651606, + "grad_norm": 0.09810877986170524, + "learning_rate": 0.00044377269208189396, + "loss": 1.5867, + "step": 6060 + }, + { + "epoch": 0.5499001996007984, + "grad_norm": 0.0980205490442198, + "learning_rate": 0.00044362669951124783, + "loss": 1.5855, + "step": 6061 + }, + { + "epoch": 0.5499909272364362, + "grad_norm": 0.09745993629629654, + "learning_rate": 0.0004434807118084446, + "loss": 1.6001, + "step": 6062 + }, + { + "epoch": 0.550081654872074, + "grad_norm": 0.0985439193029614, + "learning_rate": 0.00044333472898609017, + "loss": 1.5639, + "step": 6063 + }, + { + "epoch": 0.5501723825077118, + "grad_norm": 0.09536845507331625, + "learning_rate": 0.00044318875105679023, + "loss": 1.6425, + "step": 6064 + }, + { + "epoch": 0.5502631101433496, + "grad_norm": 0.09789575252494985, + "learning_rate": 0.0004430427780331501, + "loss": 1.6051, + "step": 6065 + }, + { + "epoch": 0.5503538377789875, + "grad_norm": 0.10078862181478243, + "learning_rate": 0.00044289680992777445, + "loss": 1.579, + "step": 6066 + }, + { + "epoch": 0.5504445654146253, + "grad_norm": 0.0986012911590988, + "learning_rate": 0.0004427508467532675, + "loss": 1.6222, + "step": 6067 + }, + { + "epoch": 0.5505352930502632, + "grad_norm": 0.092605814275744, + "learning_rate": 0.00044260488852223347, + "loss": 1.6117, + "step": 6068 + }, + { + "epoch": 0.5506260206859009, + "grad_norm": 0.09541105320443098, + "learning_rate": 0.00044245893524727585, + "loss": 1.5846, + "step": 6069 + }, + { + "epoch": 0.5507167483215387, + "grad_norm": 0.09841384240680077, + "learning_rate": 0.00044231298694099767, + "loss": 1.6188, + "step": 6070 + }, + { + "epoch": 0.5508074759571766, + "grad_norm": 0.10196010132593013, + "learning_rate": 0.0004421670436160015, + "loss": 1.6248, + "step": 6071 + }, + { + "epoch": 0.5508982035928144, + "grad_norm": 0.0948020199674189, + "learning_rate": 0.0004420211052848897, + "loss": 1.5622, + "step": 6072 + }, + { + "epoch": 0.5509889312284522, + "grad_norm": 0.09996439452295945, + "learning_rate": 0.0004418751719602639, + "loss": 1.6027, + "step": 6073 + }, + { + "epoch": 0.55107965886409, + "grad_norm": 0.09887434048773815, + "learning_rate": 0.0004417292436547256, + "loss": 1.5825, + "step": 6074 + }, + { + "epoch": 0.5511703864997278, + "grad_norm": 0.10291608399300638, + "learning_rate": 0.0004415833203808756, + "loss": 1.5937, + "step": 6075 + }, + { + "epoch": 0.5512611141353656, + "grad_norm": 0.09840201970586948, + "learning_rate": 0.00044143740215131456, + "loss": 1.6055, + "step": 6076 + }, + { + "epoch": 0.5513518417710035, + "grad_norm": 0.0980495260599247, + "learning_rate": 0.00044129148897864264, + "loss": 1.5687, + "step": 6077 + }, + { + "epoch": 0.5514425694066413, + "grad_norm": 0.10053126020701231, + "learning_rate": 0.0004411455808754592, + "loss": 1.618, + "step": 6078 + }, + { + "epoch": 0.551533297042279, + "grad_norm": 0.10011455302484971, + "learning_rate": 0.0004409996778543636, + "loss": 1.5829, + "step": 6079 + }, + { + "epoch": 0.5516240246779169, + "grad_norm": 0.1010640445702008, + "learning_rate": 0.0004408537799279544, + "loss": 1.6016, + "step": 6080 + }, + { + "epoch": 0.5517147523135547, + "grad_norm": 0.09532576349170187, + "learning_rate": 0.00044070788710883, + "loss": 1.5849, + "step": 6081 + }, + { + "epoch": 0.5518054799491925, + "grad_norm": 0.10078105583707968, + "learning_rate": 0.0004405619994095882, + "loss": 1.5634, + "step": 6082 + }, + { + "epoch": 0.5518962075848304, + "grad_norm": 0.09882807749020811, + "learning_rate": 0.0004404161168428266, + "loss": 1.5777, + "step": 6083 + }, + { + "epoch": 0.5519869352204682, + "grad_norm": 0.09644400625969164, + "learning_rate": 0.00044027023942114224, + "loss": 1.5826, + "step": 6084 + }, + { + "epoch": 0.5520776628561059, + "grad_norm": 0.09850923339296214, + "learning_rate": 0.0004401243671571314, + "loss": 1.5427, + "step": 6085 + }, + { + "epoch": 0.5521683904917438, + "grad_norm": 0.1019337903141255, + "learning_rate": 0.0004399785000633903, + "loss": 1.5648, + "step": 6086 + }, + { + "epoch": 0.5522591181273816, + "grad_norm": 0.10183319716610709, + "learning_rate": 0.00043983263815251464, + "loss": 1.5931, + "step": 6087 + }, + { + "epoch": 0.5523498457630194, + "grad_norm": 0.10468018841704692, + "learning_rate": 0.0004396867814370994, + "loss": 1.5862, + "step": 6088 + }, + { + "epoch": 0.5524405733986573, + "grad_norm": 0.09939491603033877, + "learning_rate": 0.00043954092992973964, + "loss": 1.5648, + "step": 6089 + }, + { + "epoch": 0.552531301034295, + "grad_norm": 0.10658477303461156, + "learning_rate": 0.00043939508364302917, + "loss": 1.5718, + "step": 6090 + }, + { + "epoch": 0.5526220286699328, + "grad_norm": 0.09681497504710981, + "learning_rate": 0.00043924924258956243, + "loss": 1.5802, + "step": 6091 + }, + { + "epoch": 0.5527127563055707, + "grad_norm": 0.09883528007043682, + "learning_rate": 0.00043910340678193245, + "loss": 1.5889, + "step": 6092 + }, + { + "epoch": 0.5528034839412085, + "grad_norm": 0.1018165963859731, + "learning_rate": 0.0004389575762327323, + "loss": 1.6009, + "step": 6093 + }, + { + "epoch": 0.5528942115768463, + "grad_norm": 0.09648910361016418, + "learning_rate": 0.0004388117509545545, + "loss": 1.5754, + "step": 6094 + }, + { + "epoch": 0.5529849392124842, + "grad_norm": 0.1008868797395992, + "learning_rate": 0.000438665930959991, + "loss": 1.6291, + "step": 6095 + }, + { + "epoch": 0.5530756668481219, + "grad_norm": 0.09657447645480645, + "learning_rate": 0.0004385201162616334, + "loss": 1.5701, + "step": 6096 + }, + { + "epoch": 0.5531663944837597, + "grad_norm": 0.09809801711652842, + "learning_rate": 0.0004383743068720728, + "loss": 1.5501, + "step": 6097 + }, + { + "epoch": 0.5532571221193976, + "grad_norm": 0.09501369929112805, + "learning_rate": 0.00043822850280389973, + "loss": 1.6261, + "step": 6098 + }, + { + "epoch": 0.5533478497550354, + "grad_norm": 0.09855680877233014, + "learning_rate": 0.0004380827040697047, + "loss": 1.5814, + "step": 6099 + }, + { + "epoch": 0.5534385773906731, + "grad_norm": 0.10553657881551227, + "learning_rate": 0.00043793691068207726, + "loss": 1.5696, + "step": 6100 + }, + { + "epoch": 0.553529305026311, + "grad_norm": 0.09544272889143579, + "learning_rate": 0.00043779112265360687, + "loss": 1.5596, + "step": 6101 + }, + { + "epoch": 0.5536200326619488, + "grad_norm": 0.09703432388637859, + "learning_rate": 0.00043764533999688206, + "loss": 1.569, + "step": 6102 + }, + { + "epoch": 0.5537107602975866, + "grad_norm": 0.09756982823933733, + "learning_rate": 0.00043749956272449135, + "loss": 1.5918, + "step": 6103 + }, + { + "epoch": 0.5538014879332245, + "grad_norm": 0.09845983993117806, + "learning_rate": 0.0004373537908490227, + "loss": 1.5651, + "step": 6104 + }, + { + "epoch": 0.5538922155688623, + "grad_norm": 0.09711236414311608, + "learning_rate": 0.0004372080243830634, + "loss": 1.5856, + "step": 6105 + }, + { + "epoch": 0.5539829432045001, + "grad_norm": 0.09803898495746913, + "learning_rate": 0.00043706226333920027, + "loss": 1.5806, + "step": 6106 + }, + { + "epoch": 0.5540736708401379, + "grad_norm": 0.1041020969894574, + "learning_rate": 0.0004369165077300201, + "loss": 1.5493, + "step": 6107 + }, + { + "epoch": 0.5541643984757757, + "grad_norm": 0.09874631346296688, + "learning_rate": 0.0004367707575681089, + "loss": 1.6282, + "step": 6108 + }, + { + "epoch": 0.5542551261114136, + "grad_norm": 0.09821735847568185, + "learning_rate": 0.000436625012866052, + "loss": 1.5978, + "step": 6109 + }, + { + "epoch": 0.5543458537470514, + "grad_norm": 0.09527041519438806, + "learning_rate": 0.00043647927363643456, + "loss": 1.5756, + "step": 6110 + }, + { + "epoch": 0.5544365813826891, + "grad_norm": 0.09763689875186632, + "learning_rate": 0.00043633353989184137, + "loss": 1.57, + "step": 6111 + }, + { + "epoch": 0.554527309018327, + "grad_norm": 0.10244505942617561, + "learning_rate": 0.00043618781164485624, + "loss": 1.5969, + "step": 6112 + }, + { + "epoch": 0.5546180366539648, + "grad_norm": 0.10377461379531075, + "learning_rate": 0.00043604208890806297, + "loss": 1.58, + "step": 6113 + }, + { + "epoch": 0.5547087642896026, + "grad_norm": 0.09498314588366934, + "learning_rate": 0.00043589637169404465, + "loss": 1.6105, + "step": 6114 + }, + { + "epoch": 0.5547994919252405, + "grad_norm": 0.09648483379151816, + "learning_rate": 0.0004357506600153842, + "loss": 1.5999, + "step": 6115 + }, + { + "epoch": 0.5548902195608783, + "grad_norm": 0.09726050517811569, + "learning_rate": 0.00043560495388466383, + "loss": 1.5678, + "step": 6116 + }, + { + "epoch": 0.554980947196516, + "grad_norm": 0.10124494043069737, + "learning_rate": 0.00043545925331446507, + "loss": 1.592, + "step": 6117 + }, + { + "epoch": 0.5550716748321539, + "grad_norm": 0.10034999596047951, + "learning_rate": 0.0004353135583173694, + "loss": 1.6444, + "step": 6118 + }, + { + "epoch": 0.5551624024677917, + "grad_norm": 0.09766669790911485, + "learning_rate": 0.00043516786890595747, + "loss": 1.5697, + "step": 6119 + }, + { + "epoch": 0.5552531301034295, + "grad_norm": 0.09845389351634912, + "learning_rate": 0.0004350221850928095, + "loss": 1.6084, + "step": 6120 + }, + { + "epoch": 0.5553438577390674, + "grad_norm": 0.09448167221271218, + "learning_rate": 0.0004348765068905054, + "loss": 1.6325, + "step": 6121 + }, + { + "epoch": 0.5554345853747051, + "grad_norm": 0.09372645146694726, + "learning_rate": 0.0004347308343116246, + "loss": 1.5647, + "step": 6122 + }, + { + "epoch": 0.5555253130103429, + "grad_norm": 0.09877497162344806, + "learning_rate": 0.000434585167368746, + "loss": 1.5337, + "step": 6123 + }, + { + "epoch": 0.5556160406459808, + "grad_norm": 0.09745580463723195, + "learning_rate": 0.0004344395060744477, + "loss": 1.5653, + "step": 6124 + }, + { + "epoch": 0.5557067682816186, + "grad_norm": 0.09793281503923042, + "learning_rate": 0.00043429385044130775, + "loss": 1.6373, + "step": 6125 + }, + { + "epoch": 0.5557974959172564, + "grad_norm": 0.10176690710759212, + "learning_rate": 0.00043414820048190357, + "loss": 1.5953, + "step": 6126 + }, + { + "epoch": 0.5558882235528942, + "grad_norm": 0.098089094133701, + "learning_rate": 0.00043400255620881193, + "loss": 1.5447, + "step": 6127 + }, + { + "epoch": 0.555978951188532, + "grad_norm": 0.09948076543038825, + "learning_rate": 0.00043385691763460935, + "loss": 1.5877, + "step": 6128 + }, + { + "epoch": 0.5560696788241698, + "grad_norm": 0.09561111037613548, + "learning_rate": 0.0004337112847718714, + "loss": 1.5808, + "step": 6129 + }, + { + "epoch": 0.5561604064598077, + "grad_norm": 0.09603795492983704, + "learning_rate": 0.000433565657633174, + "loss": 1.542, + "step": 6130 + }, + { + "epoch": 0.5562511340954455, + "grad_norm": 0.10048112697614607, + "learning_rate": 0.0004334200362310918, + "loss": 1.5658, + "step": 6131 + }, + { + "epoch": 0.5563418617310832, + "grad_norm": 0.1034804255711507, + "learning_rate": 0.0004332744205781993, + "loss": 1.6034, + "step": 6132 + }, + { + "epoch": 0.5564325893667211, + "grad_norm": 0.09601496546677467, + "learning_rate": 0.00043312881068707055, + "loss": 1.5469, + "step": 6133 + }, + { + "epoch": 0.5565233170023589, + "grad_norm": 0.09993209697148098, + "learning_rate": 0.0004329832065702788, + "loss": 1.6014, + "step": 6134 + }, + { + "epoch": 0.5566140446379967, + "grad_norm": 0.09793494551835767, + "learning_rate": 0.0004328376082403969, + "loss": 1.5965, + "step": 6135 + }, + { + "epoch": 0.5567047722736346, + "grad_norm": 0.09742082458491164, + "learning_rate": 0.00043269201570999763, + "loss": 1.5855, + "step": 6136 + }, + { + "epoch": 0.5567954999092724, + "grad_norm": 0.09786949914135068, + "learning_rate": 0.0004325464289916525, + "loss": 1.556, + "step": 6137 + }, + { + "epoch": 0.5568862275449101, + "grad_norm": 0.09466348589070443, + "learning_rate": 0.00043240084809793324, + "loss": 1.5747, + "step": 6138 + }, + { + "epoch": 0.556976955180548, + "grad_norm": 0.10014262735275294, + "learning_rate": 0.0004322552730414107, + "loss": 1.6087, + "step": 6139 + }, + { + "epoch": 0.5570676828161858, + "grad_norm": 0.0937605926918599, + "learning_rate": 0.0004321097038346554, + "loss": 1.5604, + "step": 6140 + }, + { + "epoch": 0.5571584104518236, + "grad_norm": 0.09803984134707869, + "learning_rate": 0.0004319641404902372, + "loss": 1.5651, + "step": 6141 + }, + { + "epoch": 0.5572491380874615, + "grad_norm": 0.0964249139563854, + "learning_rate": 0.0004318185830207254, + "loss": 1.571, + "step": 6142 + }, + { + "epoch": 0.5573398657230992, + "grad_norm": 0.09699870856758759, + "learning_rate": 0.00043167303143868915, + "loss": 1.5676, + "step": 6143 + }, + { + "epoch": 0.5574305933587371, + "grad_norm": 0.09573187898956048, + "learning_rate": 0.00043152748575669655, + "loss": 1.5366, + "step": 6144 + }, + { + "epoch": 0.5575213209943749, + "grad_norm": 0.09872916018033281, + "learning_rate": 0.00043138194598731555, + "loss": 1.5482, + "step": 6145 + }, + { + "epoch": 0.5576120486300127, + "grad_norm": 0.09660208992259975, + "learning_rate": 0.0004312364121431138, + "loss": 1.5596, + "step": 6146 + }, + { + "epoch": 0.5577027762656506, + "grad_norm": 0.09601452100399865, + "learning_rate": 0.000431090884236658, + "loss": 1.6036, + "step": 6147 + }, + { + "epoch": 0.5577935039012883, + "grad_norm": 0.09622604577895985, + "learning_rate": 0.00043094536228051447, + "loss": 1.5176, + "step": 6148 + }, + { + "epoch": 0.5578842315369261, + "grad_norm": 0.09873070727827528, + "learning_rate": 0.00043079984628724904, + "loss": 1.5798, + "step": 6149 + }, + { + "epoch": 0.557974959172564, + "grad_norm": 0.10327725186366883, + "learning_rate": 0.0004306543362694272, + "loss": 1.6583, + "step": 6150 + }, + { + "epoch": 0.5580656868082018, + "grad_norm": 0.09583105653026466, + "learning_rate": 0.00043050883223961356, + "loss": 1.5649, + "step": 6151 + }, + { + "epoch": 0.5581564144438396, + "grad_norm": 0.10344076476369775, + "learning_rate": 0.00043036333421037255, + "loss": 1.6208, + "step": 6152 + }, + { + "epoch": 0.5582471420794775, + "grad_norm": 0.09458194726010408, + "learning_rate": 0.00043021784219426766, + "loss": 1.5757, + "step": 6153 + }, + { + "epoch": 0.5583378697151152, + "grad_norm": 0.10298533798791404, + "learning_rate": 0.0004300723562038626, + "loss": 1.5711, + "step": 6154 + }, + { + "epoch": 0.558428597350753, + "grad_norm": 0.09912493331446491, + "learning_rate": 0.00042992687625171996, + "loss": 1.635, + "step": 6155 + }, + { + "epoch": 0.5585193249863909, + "grad_norm": 0.09844313347500955, + "learning_rate": 0.00042978140235040176, + "loss": 1.5887, + "step": 6156 + }, + { + "epoch": 0.5586100526220287, + "grad_norm": 0.09947037278464013, + "learning_rate": 0.0004296359345124699, + "loss": 1.5665, + "step": 6157 + }, + { + "epoch": 0.5587007802576665, + "grad_norm": 0.09917951375611961, + "learning_rate": 0.00042949047275048544, + "loss": 1.5889, + "step": 6158 + }, + { + "epoch": 0.5587915078933043, + "grad_norm": 0.09870469084565046, + "learning_rate": 0.00042934501707700895, + "loss": 1.5635, + "step": 6159 + }, + { + "epoch": 0.5588822355289421, + "grad_norm": 0.09748910835807138, + "learning_rate": 0.0004291995675046006, + "loss": 1.5935, + "step": 6160 + }, + { + "epoch": 0.5589729631645799, + "grad_norm": 0.10319066850444662, + "learning_rate": 0.0004290541240458201, + "loss": 1.6039, + "step": 6161 + }, + { + "epoch": 0.5590636908002178, + "grad_norm": 0.09734227023345497, + "learning_rate": 0.0004289086867132266, + "loss": 1.6134, + "step": 6162 + }, + { + "epoch": 0.5591544184358556, + "grad_norm": 0.09787744737699895, + "learning_rate": 0.00042876325551937836, + "loss": 1.5746, + "step": 6163 + }, + { + "epoch": 0.5592451460714933, + "grad_norm": 0.09403315064138466, + "learning_rate": 0.0004286178304768335, + "loss": 1.616, + "step": 6164 + }, + { + "epoch": 0.5593358737071312, + "grad_norm": 0.10003961485400534, + "learning_rate": 0.0004284724115981496, + "loss": 1.6044, + "step": 6165 + }, + { + "epoch": 0.559426601342769, + "grad_norm": 0.09850245660985218, + "learning_rate": 0.0004283269988958834, + "loss": 1.5898, + "step": 6166 + }, + { + "epoch": 0.5595173289784068, + "grad_norm": 0.10236929695078993, + "learning_rate": 0.0004281815923825915, + "loss": 1.603, + "step": 6167 + }, + { + "epoch": 0.5596080566140447, + "grad_norm": 0.09669558022286119, + "learning_rate": 0.00042803619207082937, + "loss": 1.5714, + "step": 6168 + }, + { + "epoch": 0.5596987842496824, + "grad_norm": 0.09898074620144834, + "learning_rate": 0.000427890797973153, + "loss": 1.5782, + "step": 6169 + }, + { + "epoch": 0.5597895118853202, + "grad_norm": 0.09856936119136808, + "learning_rate": 0.0004277454101021167, + "loss": 1.5944, + "step": 6170 + }, + { + "epoch": 0.5598802395209581, + "grad_norm": 0.10226977233530174, + "learning_rate": 0.00042760002847027495, + "loss": 1.5615, + "step": 6171 + }, + { + "epoch": 0.5599709671565959, + "grad_norm": 0.096757820105283, + "learning_rate": 0.0004274546530901815, + "loss": 1.5907, + "step": 6172 + }, + { + "epoch": 0.5600616947922337, + "grad_norm": 0.10001735568941729, + "learning_rate": 0.00042730928397438926, + "loss": 1.6054, + "step": 6173 + }, + { + "epoch": 0.5601524224278716, + "grad_norm": 0.1206320863211657, + "learning_rate": 0.0004271639211354512, + "loss": 1.5911, + "step": 6174 + }, + { + "epoch": 0.5602431500635093, + "grad_norm": 0.09655972306519366, + "learning_rate": 0.0004270185645859193, + "loss": 1.5809, + "step": 6175 + }, + { + "epoch": 0.5603338776991471, + "grad_norm": 0.10007272589915656, + "learning_rate": 0.0004268732143383449, + "loss": 1.577, + "step": 6176 + }, + { + "epoch": 0.560424605334785, + "grad_norm": 0.09556876417893051, + "learning_rate": 0.0004267278704052794, + "loss": 1.5704, + "step": 6177 + }, + { + "epoch": 0.5605153329704228, + "grad_norm": 0.09626573321383994, + "learning_rate": 0.000426582532799273, + "loss": 1.5732, + "step": 6178 + }, + { + "epoch": 0.5606060606060606, + "grad_norm": 0.09524632886615952, + "learning_rate": 0.00042643720153287583, + "loss": 1.5845, + "step": 6179 + }, + { + "epoch": 0.5606967882416984, + "grad_norm": 0.09533149028523158, + "learning_rate": 0.0004262918766186371, + "loss": 1.5727, + "step": 6180 + }, + { + "epoch": 0.5607875158773362, + "grad_norm": 0.09715906917430735, + "learning_rate": 0.00042614655806910565, + "loss": 1.5806, + "step": 6181 + }, + { + "epoch": 0.5608782435129741, + "grad_norm": 0.10050025771279907, + "learning_rate": 0.0004260012458968299, + "loss": 1.6123, + "step": 6182 + }, + { + "epoch": 0.5609689711486119, + "grad_norm": 0.09577927066883983, + "learning_rate": 0.00042585594011435734, + "loss": 1.5792, + "step": 6183 + }, + { + "epoch": 0.5610596987842497, + "grad_norm": 0.09527266568298229, + "learning_rate": 0.0004257106407342352, + "loss": 1.5328, + "step": 6184 + }, + { + "epoch": 0.5611504264198875, + "grad_norm": 0.1067075946969064, + "learning_rate": 0.0004255653477690103, + "loss": 1.5668, + "step": 6185 + }, + { + "epoch": 0.5612411540555253, + "grad_norm": 0.10339864472128686, + "learning_rate": 0.00042542006123122867, + "loss": 1.6444, + "step": 6186 + }, + { + "epoch": 0.5613318816911631, + "grad_norm": 0.10032363705036312, + "learning_rate": 0.00042527478113343567, + "loss": 1.5379, + "step": 6187 + }, + { + "epoch": 0.561422609326801, + "grad_norm": 0.09996285110560443, + "learning_rate": 0.0004251295074881763, + "loss": 1.6109, + "step": 6188 + }, + { + "epoch": 0.5615133369624388, + "grad_norm": 0.10333871660683054, + "learning_rate": 0.0004249842403079952, + "loss": 1.5696, + "step": 6189 + }, + { + "epoch": 0.5616040645980765, + "grad_norm": 0.09605987004136285, + "learning_rate": 0.00042483897960543575, + "loss": 1.6013, + "step": 6190 + }, + { + "epoch": 0.5616947922337144, + "grad_norm": 0.10221756712113657, + "learning_rate": 0.00042469372539304155, + "loss": 1.5742, + "step": 6191 + }, + { + "epoch": 0.5617855198693522, + "grad_norm": 0.09748546935146231, + "learning_rate": 0.0004245484776833551, + "loss": 1.5785, + "step": 6192 + }, + { + "epoch": 0.56187624750499, + "grad_norm": 0.09544527143906464, + "learning_rate": 0.000424403236488919, + "loss": 1.5262, + "step": 6193 + }, + { + "epoch": 0.5619669751406279, + "grad_norm": 0.0987770190232102, + "learning_rate": 0.00042425800182227454, + "loss": 1.6191, + "step": 6194 + }, + { + "epoch": 0.5620577027762657, + "grad_norm": 0.09321367911828606, + "learning_rate": 0.0004241127736959627, + "loss": 1.5669, + "step": 6195 + }, + { + "epoch": 0.5621484304119034, + "grad_norm": 0.0961033792037302, + "learning_rate": 0.00042396755212252416, + "loss": 1.6075, + "step": 6196 + }, + { + "epoch": 0.5622391580475413, + "grad_norm": 0.10039367438873897, + "learning_rate": 0.00042382233711449856, + "loss": 1.5531, + "step": 6197 + }, + { + "epoch": 0.5623298856831791, + "grad_norm": 0.09786566733572356, + "learning_rate": 0.00042367712868442544, + "loss": 1.5782, + "step": 6198 + }, + { + "epoch": 0.5624206133188169, + "grad_norm": 0.09512183610951087, + "learning_rate": 0.00042353192684484333, + "loss": 1.5826, + "step": 6199 + }, + { + "epoch": 0.5625113409544548, + "grad_norm": 0.09898865623519455, + "learning_rate": 0.0004233867316082907, + "loss": 1.5626, + "step": 6200 + }, + { + "epoch": 0.5626020685900925, + "grad_norm": 0.09716566117228474, + "learning_rate": 0.0004232415429873053, + "loss": 1.5499, + "step": 6201 + }, + { + "epoch": 0.5626927962257303, + "grad_norm": 0.10137651540272637, + "learning_rate": 0.0004230963609944238, + "loss": 1.6002, + "step": 6202 + }, + { + "epoch": 0.5627835238613682, + "grad_norm": 0.09752290839913964, + "learning_rate": 0.0004229511856421828, + "loss": 1.6005, + "step": 6203 + }, + { + "epoch": 0.562874251497006, + "grad_norm": 0.10114683552370311, + "learning_rate": 0.0004228060169431185, + "loss": 1.5717, + "step": 6204 + }, + { + "epoch": 0.5629649791326438, + "grad_norm": 0.09566987863221081, + "learning_rate": 0.00042266085490976586, + "loss": 1.6152, + "step": 6205 + }, + { + "epoch": 0.5630557067682816, + "grad_norm": 0.10293771339992425, + "learning_rate": 0.0004225156995546599, + "loss": 1.5636, + "step": 6206 + }, + { + "epoch": 0.5631464344039194, + "grad_norm": 0.09798264233594538, + "learning_rate": 0.00042237055089033436, + "loss": 1.6062, + "step": 6207 + }, + { + "epoch": 0.5632371620395572, + "grad_norm": 0.09962218673521177, + "learning_rate": 0.0004222254089293236, + "loss": 1.5924, + "step": 6208 + }, + { + "epoch": 0.5633278896751951, + "grad_norm": 0.09787497945684764, + "learning_rate": 0.00042208027368416, + "loss": 1.5721, + "step": 6209 + }, + { + "epoch": 0.5634186173108329, + "grad_norm": 0.09724029688551401, + "learning_rate": 0.00042193514516737635, + "loss": 1.5699, + "step": 6210 + }, + { + "epoch": 0.5635093449464706, + "grad_norm": 0.10309708569910259, + "learning_rate": 0.0004217900233915045, + "loss": 1.5859, + "step": 6211 + }, + { + "epoch": 0.5636000725821085, + "grad_norm": 0.09638971412780095, + "learning_rate": 0.0004216449083690756, + "loss": 1.567, + "step": 6212 + }, + { + "epoch": 0.5636908002177463, + "grad_norm": 0.1003265004702982, + "learning_rate": 0.00042149980011262035, + "loss": 1.6, + "step": 6213 + }, + { + "epoch": 0.5637815278533841, + "grad_norm": 0.10094501016400935, + "learning_rate": 0.00042135469863466903, + "loss": 1.5282, + "step": 6214 + }, + { + "epoch": 0.563872255489022, + "grad_norm": 0.09375617889151715, + "learning_rate": 0.0004212096039477509, + "loss": 1.5918, + "step": 6215 + }, + { + "epoch": 0.5639629831246598, + "grad_norm": 0.09634953498146445, + "learning_rate": 0.0004210645160643952, + "loss": 1.5466, + "step": 6216 + }, + { + "epoch": 0.5640537107602975, + "grad_norm": 0.0963240409340226, + "learning_rate": 0.00042091943499713007, + "loss": 1.5298, + "step": 6217 + }, + { + "epoch": 0.5641444383959354, + "grad_norm": 0.096383364167346, + "learning_rate": 0.00042077436075848364, + "loss": 1.573, + "step": 6218 + }, + { + "epoch": 0.5642351660315732, + "grad_norm": 0.09623652013680836, + "learning_rate": 0.0004206292933609826, + "loss": 1.5922, + "step": 6219 + }, + { + "epoch": 0.5643258936672111, + "grad_norm": 0.09897728190796926, + "learning_rate": 0.00042048423281715384, + "loss": 1.588, + "step": 6220 + }, + { + "epoch": 0.5644166213028489, + "grad_norm": 0.09444562240423564, + "learning_rate": 0.00042033917913952335, + "loss": 1.5719, + "step": 6221 + }, + { + "epoch": 0.5645073489384866, + "grad_norm": 0.09832145530009934, + "learning_rate": 0.0004201941323406164, + "loss": 1.5679, + "step": 6222 + }, + { + "epoch": 0.5645980765741245, + "grad_norm": 0.09734510793843042, + "learning_rate": 0.00042004909243295786, + "loss": 1.5706, + "step": 6223 + }, + { + "epoch": 0.5646888042097623, + "grad_norm": 0.0960177123174031, + "learning_rate": 0.000419904059429072, + "loss": 1.6062, + "step": 6224 + }, + { + "epoch": 0.5647795318454001, + "grad_norm": 0.09765453202335064, + "learning_rate": 0.00041975903334148253, + "loss": 1.6023, + "step": 6225 + }, + { + "epoch": 0.564870259481038, + "grad_norm": 0.09753348301604452, + "learning_rate": 0.0004196140141827122, + "loss": 1.6335, + "step": 6226 + }, + { + "epoch": 0.5649609871166758, + "grad_norm": 0.09707220530389761, + "learning_rate": 0.0004194690019652837, + "loss": 1.6203, + "step": 6227 + }, + { + "epoch": 0.5650517147523135, + "grad_norm": 0.09712413934001515, + "learning_rate": 0.00041932399670171875, + "loss": 1.6246, + "step": 6228 + }, + { + "epoch": 0.5651424423879514, + "grad_norm": 0.09918336131026238, + "learning_rate": 0.00041917899840453855, + "loss": 1.6044, + "step": 6229 + }, + { + "epoch": 0.5652331700235892, + "grad_norm": 0.09531921224279455, + "learning_rate": 0.0004190340070862637, + "loss": 1.5976, + "step": 6230 + }, + { + "epoch": 0.565323897659227, + "grad_norm": 0.10212422454137453, + "learning_rate": 0.00041888902275941423, + "loss": 1.5394, + "step": 6231 + }, + { + "epoch": 0.5654146252948649, + "grad_norm": 0.1012001750379454, + "learning_rate": 0.0004187440454365096, + "loss": 1.5812, + "step": 6232 + }, + { + "epoch": 0.5655053529305026, + "grad_norm": 0.09527129546452498, + "learning_rate": 0.0004185990751300688, + "loss": 1.5572, + "step": 6233 + }, + { + "epoch": 0.5655960805661404, + "grad_norm": 0.10456450245069038, + "learning_rate": 0.0004184541118526097, + "loss": 1.572, + "step": 6234 + }, + { + "epoch": 0.5656868082017783, + "grad_norm": 0.10523297511360888, + "learning_rate": 0.00041830915561665027, + "loss": 1.6008, + "step": 6235 + }, + { + "epoch": 0.5657775358374161, + "grad_norm": 0.09258443759578656, + "learning_rate": 0.0004181642064347071, + "loss": 1.556, + "step": 6236 + }, + { + "epoch": 0.5658682634730539, + "grad_norm": 0.10013310533124814, + "learning_rate": 0.0004180192643192968, + "loss": 1.5941, + "step": 6237 + }, + { + "epoch": 0.5659589911086917, + "grad_norm": 0.10269159379882085, + "learning_rate": 0.000417874329282935, + "loss": 1.5787, + "step": 6238 + }, + { + "epoch": 0.5660497187443295, + "grad_norm": 0.09997030379043914, + "learning_rate": 0.0004177294013381371, + "loss": 1.5604, + "step": 6239 + }, + { + "epoch": 0.5661404463799673, + "grad_norm": 0.0948566044985486, + "learning_rate": 0.0004175844804974176, + "loss": 1.5662, + "step": 6240 + }, + { + "epoch": 0.5662311740156052, + "grad_norm": 0.09389159113225737, + "learning_rate": 0.0004174395667732902, + "loss": 1.556, + "step": 6241 + }, + { + "epoch": 0.566321901651243, + "grad_norm": 0.09746187835041384, + "learning_rate": 0.00041729466017826846, + "loss": 1.5848, + "step": 6242 + }, + { + "epoch": 0.5664126292868807, + "grad_norm": 0.09999586091940156, + "learning_rate": 0.00041714976072486506, + "loss": 1.5867, + "step": 6243 + }, + { + "epoch": 0.5665033569225186, + "grad_norm": 0.09615714236084513, + "learning_rate": 0.0004170048684255919, + "loss": 1.5833, + "step": 6244 + }, + { + "epoch": 0.5665940845581564, + "grad_norm": 0.10000297146935548, + "learning_rate": 0.0004168599832929608, + "loss": 1.581, + "step": 6245 + }, + { + "epoch": 0.5666848121937942, + "grad_norm": 0.10548636326547632, + "learning_rate": 0.00041671510533948194, + "loss": 1.5998, + "step": 6246 + }, + { + "epoch": 0.5667755398294321, + "grad_norm": 0.09822191717350086, + "learning_rate": 0.00041657023457766643, + "loss": 1.5755, + "step": 6247 + }, + { + "epoch": 0.5668662674650699, + "grad_norm": 0.09607698342433216, + "learning_rate": 0.0004164253710200233, + "loss": 1.5819, + "step": 6248 + }, + { + "epoch": 0.5669569951007076, + "grad_norm": 0.09930342196282349, + "learning_rate": 0.0004162805146790617, + "loss": 1.6107, + "step": 6249 + }, + { + "epoch": 0.5670477227363455, + "grad_norm": 0.10119179824075338, + "learning_rate": 0.0004161356655672901, + "loss": 1.5845, + "step": 6250 + }, + { + "epoch": 0.5671384503719833, + "grad_norm": 0.09985884076089264, + "learning_rate": 0.0004159908236972161, + "loss": 1.5828, + "step": 6251 + }, + { + "epoch": 0.5672291780076211, + "grad_norm": 0.09825003202222224, + "learning_rate": 0.00041584598908134675, + "loss": 1.5503, + "step": 6252 + }, + { + "epoch": 0.567319905643259, + "grad_norm": 0.09883516996145925, + "learning_rate": 0.0004157011617321889, + "loss": 1.5595, + "step": 6253 + }, + { + "epoch": 0.5674106332788967, + "grad_norm": 0.11165974721094948, + "learning_rate": 0.0004155563416622478, + "loss": 1.5809, + "step": 6254 + }, + { + "epoch": 0.5675013609145345, + "grad_norm": 0.0977562050597768, + "learning_rate": 0.0004154115288840292, + "loss": 1.5856, + "step": 6255 + }, + { + "epoch": 0.5675920885501724, + "grad_norm": 0.09971772022201715, + "learning_rate": 0.00041526672341003757, + "loss": 1.5764, + "step": 6256 + }, + { + "epoch": 0.5676828161858102, + "grad_norm": 0.09668466737732548, + "learning_rate": 0.0004151219252527769, + "loss": 1.5817, + "step": 6257 + }, + { + "epoch": 0.5677735438214481, + "grad_norm": 0.10291482632856147, + "learning_rate": 0.0004149771344247505, + "loss": 1.5676, + "step": 6258 + }, + { + "epoch": 0.5678642714570858, + "grad_norm": 0.10047776701886656, + "learning_rate": 0.0004148323509384609, + "loss": 1.5693, + "step": 6259 + }, + { + "epoch": 0.5679549990927236, + "grad_norm": 0.10234148294970521, + "learning_rate": 0.0004146875748064106, + "loss": 1.5587, + "step": 6260 + }, + { + "epoch": 0.5680457267283615, + "grad_norm": 0.0967792800083637, + "learning_rate": 0.00041454280604110073, + "loss": 1.5593, + "step": 6261 + }, + { + "epoch": 0.5681364543639993, + "grad_norm": 0.09933771100994024, + "learning_rate": 0.00041439804465503196, + "loss": 1.5889, + "step": 6262 + }, + { + "epoch": 0.5682271819996371, + "grad_norm": 0.10325550459642005, + "learning_rate": 0.0004142532906607047, + "loss": 1.5923, + "step": 6263 + }, + { + "epoch": 0.568317909635275, + "grad_norm": 0.09674103459456398, + "learning_rate": 0.00041410854407061864, + "loss": 1.58, + "step": 6264 + }, + { + "epoch": 0.5684086372709127, + "grad_norm": 0.10284922454281847, + "learning_rate": 0.0004139638048972723, + "loss": 1.5707, + "step": 6265 + }, + { + "epoch": 0.5684993649065505, + "grad_norm": 0.09736447960154528, + "learning_rate": 0.00041381907315316414, + "loss": 1.5466, + "step": 6266 + }, + { + "epoch": 0.5685900925421884, + "grad_norm": 0.0960563692459452, + "learning_rate": 0.00041367434885079173, + "loss": 1.5535, + "step": 6267 + }, + { + "epoch": 0.5686808201778262, + "grad_norm": 0.09362150804725809, + "learning_rate": 0.00041352963200265197, + "loss": 1.5765, + "step": 6268 + }, + { + "epoch": 0.568771547813464, + "grad_norm": 0.09461135407527455, + "learning_rate": 0.0004133849226212412, + "loss": 1.5705, + "step": 6269 + }, + { + "epoch": 0.5688622754491018, + "grad_norm": 0.10097253884686995, + "learning_rate": 0.0004132402207190551, + "loss": 1.5762, + "step": 6270 + }, + { + "epoch": 0.5689530030847396, + "grad_norm": 0.10548735379251141, + "learning_rate": 0.00041309552630858877, + "loss": 1.5673, + "step": 6271 + }, + { + "epoch": 0.5690437307203774, + "grad_norm": 0.09938895650105992, + "learning_rate": 0.0004129508394023367, + "loss": 1.5519, + "step": 6272 + }, + { + "epoch": 0.5691344583560153, + "grad_norm": 0.09532297478460267, + "learning_rate": 0.0004128061600127923, + "loss": 1.5775, + "step": 6273 + }, + { + "epoch": 0.5692251859916531, + "grad_norm": 0.09748940947531823, + "learning_rate": 0.00041266148815244895, + "loss": 1.5744, + "step": 6274 + }, + { + "epoch": 0.5693159136272908, + "grad_norm": 0.1000524282958089, + "learning_rate": 0.00041251682383379896, + "loss": 1.5976, + "step": 6275 + }, + { + "epoch": 0.5694066412629287, + "grad_norm": 0.10160307129189088, + "learning_rate": 0.000412372167069334, + "loss": 1.6214, + "step": 6276 + }, + { + "epoch": 0.5694973688985665, + "grad_norm": 0.09244564312996045, + "learning_rate": 0.0004122275178715453, + "loss": 1.6017, + "step": 6277 + }, + { + "epoch": 0.5695880965342043, + "grad_norm": 0.09758130585132113, + "learning_rate": 0.00041208287625292344, + "loss": 1.5984, + "step": 6278 + }, + { + "epoch": 0.5696788241698422, + "grad_norm": 0.09705320767412126, + "learning_rate": 0.00041193824222595835, + "loss": 1.5197, + "step": 6279 + }, + { + "epoch": 0.56976955180548, + "grad_norm": 0.09946990530128878, + "learning_rate": 0.0004117936158031388, + "loss": 1.5383, + "step": 6280 + }, + { + "epoch": 0.5698602794411177, + "grad_norm": 0.09910958250043952, + "learning_rate": 0.0004116489969969536, + "loss": 1.5597, + "step": 6281 + }, + { + "epoch": 0.5699510070767556, + "grad_norm": 0.09844936775719645, + "learning_rate": 0.0004115043858198906, + "loss": 1.587, + "step": 6282 + }, + { + "epoch": 0.5700417347123934, + "grad_norm": 0.10215430126712438, + "learning_rate": 0.00041135978228443686, + "loss": 1.6119, + "step": 6283 + }, + { + "epoch": 0.5701324623480312, + "grad_norm": 0.0997433174232985, + "learning_rate": 0.000411215186403079, + "loss": 1.6076, + "step": 6284 + }, + { + "epoch": 0.570223189983669, + "grad_norm": 0.09720958124526728, + "learning_rate": 0.00041107059818830274, + "loss": 1.6194, + "step": 6285 + }, + { + "epoch": 0.5703139176193068, + "grad_norm": 0.09895477795437514, + "learning_rate": 0.0004109260176525936, + "loss": 1.5599, + "step": 6286 + }, + { + "epoch": 0.5704046452549446, + "grad_norm": 0.09773871416445558, + "learning_rate": 0.00041078144480843594, + "loss": 1.554, + "step": 6287 + }, + { + "epoch": 0.5704953728905825, + "grad_norm": 0.10039904693864496, + "learning_rate": 0.00041063687966831365, + "loss": 1.6185, + "step": 6288 + }, + { + "epoch": 0.5705861005262203, + "grad_norm": 0.10471471232264185, + "learning_rate": 0.00041049232224471007, + "loss": 1.568, + "step": 6289 + }, + { + "epoch": 0.570676828161858, + "grad_norm": 0.1032690276800346, + "learning_rate": 0.0004103477725501076, + "loss": 1.5994, + "step": 6290 + }, + { + "epoch": 0.5707675557974959, + "grad_norm": 0.09999487231135672, + "learning_rate": 0.00041020323059698805, + "loss": 1.6119, + "step": 6291 + }, + { + "epoch": 0.5708582834331337, + "grad_norm": 0.09718676292659155, + "learning_rate": 0.000410058696397833, + "loss": 1.5976, + "step": 6292 + }, + { + "epoch": 0.5709490110687715, + "grad_norm": 0.09554486350074776, + "learning_rate": 0.0004099141699651225, + "loss": 1.5922, + "step": 6293 + }, + { + "epoch": 0.5710397387044094, + "grad_norm": 0.10074799507374473, + "learning_rate": 0.0004097696513113368, + "loss": 1.6165, + "step": 6294 + }, + { + "epoch": 0.5711304663400472, + "grad_norm": 0.0990228886553428, + "learning_rate": 0.00040962514044895503, + "loss": 1.5877, + "step": 6295 + }, + { + "epoch": 0.571221193975685, + "grad_norm": 0.09585222373474639, + "learning_rate": 0.00040948063739045583, + "loss": 1.5471, + "step": 6296 + }, + { + "epoch": 0.5713119216113228, + "grad_norm": 0.10103087485481708, + "learning_rate": 0.0004093361421483168, + "loss": 1.5315, + "step": 6297 + }, + { + "epoch": 0.5714026492469606, + "grad_norm": 0.09848908637543864, + "learning_rate": 0.0004091916547350153, + "loss": 1.5613, + "step": 6298 + }, + { + "epoch": 0.5714933768825985, + "grad_norm": 0.09897613455551828, + "learning_rate": 0.00040904717516302786, + "loss": 1.6099, + "step": 6299 + }, + { + "epoch": 0.5715841045182363, + "grad_norm": 0.09913696414096038, + "learning_rate": 0.00040890270344483016, + "loss": 1.5547, + "step": 6300 + }, + { + "epoch": 0.571674832153874, + "grad_norm": 0.10066355530405896, + "learning_rate": 0.00040875823959289734, + "loss": 1.6034, + "step": 6301 + }, + { + "epoch": 0.5717655597895119, + "grad_norm": 0.09918300162251084, + "learning_rate": 0.000408613783619704, + "loss": 1.5673, + "step": 6302 + }, + { + "epoch": 0.5718562874251497, + "grad_norm": 0.09962230477002874, + "learning_rate": 0.00040846933553772413, + "loss": 1.6112, + "step": 6303 + }, + { + "epoch": 0.5719470150607875, + "grad_norm": 0.09951501192743056, + "learning_rate": 0.00040832489535943053, + "loss": 1.5522, + "step": 6304 + }, + { + "epoch": 0.5720377426964254, + "grad_norm": 0.09975430233339533, + "learning_rate": 0.0004081804630972956, + "loss": 1.5825, + "step": 6305 + }, + { + "epoch": 0.5721284703320632, + "grad_norm": 0.095557984767477, + "learning_rate": 0.00040803603876379146, + "loss": 1.5463, + "step": 6306 + }, + { + "epoch": 0.5722191979677009, + "grad_norm": 0.09466121878291293, + "learning_rate": 0.00040789162237138876, + "loss": 1.5631, + "step": 6307 + }, + { + "epoch": 0.5723099256033388, + "grad_norm": 0.09750682021719614, + "learning_rate": 0.00040774721393255805, + "loss": 1.5967, + "step": 6308 + }, + { + "epoch": 0.5724006532389766, + "grad_norm": 0.09480091484697713, + "learning_rate": 0.0004076028134597689, + "loss": 1.5885, + "step": 6309 + }, + { + "epoch": 0.5724913808746144, + "grad_norm": 0.09899507539787757, + "learning_rate": 0.0004074584209654905, + "loss": 1.5994, + "step": 6310 + }, + { + "epoch": 0.5725821085102523, + "grad_norm": 0.09583507751316216, + "learning_rate": 0.0004073140364621912, + "loss": 1.5543, + "step": 6311 + }, + { + "epoch": 0.57267283614589, + "grad_norm": 0.09781049214656312, + "learning_rate": 0.00040716965996233844, + "loss": 1.6406, + "step": 6312 + }, + { + "epoch": 0.5727635637815278, + "grad_norm": 0.09419786640649808, + "learning_rate": 0.0004070252914783994, + "loss": 1.5746, + "step": 6313 + }, + { + "epoch": 0.5728542914171657, + "grad_norm": 0.09596259607743605, + "learning_rate": 0.00040688093102283997, + "loss": 1.5768, + "step": 6314 + }, + { + "epoch": 0.5729450190528035, + "grad_norm": 0.09380990213681538, + "learning_rate": 0.00040673657860812587, + "loss": 1.5292, + "step": 6315 + }, + { + "epoch": 0.5730357466884413, + "grad_norm": 0.09916076720221967, + "learning_rate": 0.0004065922342467218, + "loss": 1.6148, + "step": 6316 + }, + { + "epoch": 0.5731264743240791, + "grad_norm": 0.10030436440130529, + "learning_rate": 0.0004064478979510922, + "loss": 1.5888, + "step": 6317 + }, + { + "epoch": 0.5732172019597169, + "grad_norm": 0.09543310188423683, + "learning_rate": 0.00040630356973370054, + "loss": 1.5784, + "step": 6318 + }, + { + "epoch": 0.5733079295953547, + "grad_norm": 0.0981554858545736, + "learning_rate": 0.00040615924960700934, + "loss": 1.5892, + "step": 6319 + }, + { + "epoch": 0.5733986572309926, + "grad_norm": 0.09655317214764823, + "learning_rate": 0.00040601493758348075, + "loss": 1.604, + "step": 6320 + }, + { + "epoch": 0.5734893848666304, + "grad_norm": 0.09812732823471965, + "learning_rate": 0.0004058706336755763, + "loss": 1.5675, + "step": 6321 + }, + { + "epoch": 0.5735801125022681, + "grad_norm": 0.0991112280712678, + "learning_rate": 0.0004057263378957563, + "loss": 1.6402, + "step": 6322 + }, + { + "epoch": 0.573670840137906, + "grad_norm": 0.10694508693142793, + "learning_rate": 0.000405582050256481, + "loss": 1.5692, + "step": 6323 + }, + { + "epoch": 0.5737615677735438, + "grad_norm": 0.10302998211235441, + "learning_rate": 0.0004054377707702094, + "loss": 1.5787, + "step": 6324 + }, + { + "epoch": 0.5738522954091816, + "grad_norm": 0.09835463844116904, + "learning_rate": 0.00040529349944940055, + "loss": 1.5592, + "step": 6325 + }, + { + "epoch": 0.5739430230448195, + "grad_norm": 0.09921486495855203, + "learning_rate": 0.00040514923630651173, + "loss": 1.6069, + "step": 6326 + }, + { + "epoch": 0.5740337506804573, + "grad_norm": 0.09569893089666771, + "learning_rate": 0.00040500498135400044, + "loss": 1.5216, + "step": 6327 + }, + { + "epoch": 0.574124478316095, + "grad_norm": 0.10114250114101733, + "learning_rate": 0.0004048607346043231, + "loss": 1.5306, + "step": 6328 + }, + { + "epoch": 0.5742152059517329, + "grad_norm": 0.10106162485904191, + "learning_rate": 0.0004047164960699352, + "loss": 1.5366, + "step": 6329 + }, + { + "epoch": 0.5743059335873707, + "grad_norm": 0.1006659019716765, + "learning_rate": 0.000404572265763292, + "loss": 1.5978, + "step": 6330 + }, + { + "epoch": 0.5743966612230085, + "grad_norm": 0.09477212066909321, + "learning_rate": 0.0004044280436968477, + "loss": 1.5646, + "step": 6331 + }, + { + "epoch": 0.5744873888586464, + "grad_norm": 0.09856554018689098, + "learning_rate": 0.0004042838298830558, + "loss": 1.5563, + "step": 6332 + }, + { + "epoch": 0.5745781164942841, + "grad_norm": 0.1061970970744038, + "learning_rate": 0.00040413962433436933, + "loss": 1.6081, + "step": 6333 + }, + { + "epoch": 0.574668844129922, + "grad_norm": 0.10043934915626188, + "learning_rate": 0.0004039954270632405, + "loss": 1.6431, + "step": 6334 + }, + { + "epoch": 0.5747595717655598, + "grad_norm": 0.09831923588337388, + "learning_rate": 0.00040385123808212074, + "loss": 1.6222, + "step": 6335 + }, + { + "epoch": 0.5748502994011976, + "grad_norm": 0.09934605144550818, + "learning_rate": 0.0004037070574034607, + "loss": 1.582, + "step": 6336 + }, + { + "epoch": 0.5749410270368355, + "grad_norm": 0.09866050309615534, + "learning_rate": 0.0004035628850397104, + "loss": 1.563, + "step": 6337 + }, + { + "epoch": 0.5750317546724732, + "grad_norm": 0.09676828624819331, + "learning_rate": 0.0004034187210033193, + "loss": 1.6097, + "step": 6338 + }, + { + "epoch": 0.575122482308111, + "grad_norm": 0.0991536391336659, + "learning_rate": 0.0004032745653067358, + "loss": 1.5888, + "step": 6339 + }, + { + "epoch": 0.5752132099437489, + "grad_norm": 0.09910751704556163, + "learning_rate": 0.00040313041796240766, + "loss": 1.5941, + "step": 6340 + }, + { + "epoch": 0.5753039375793867, + "grad_norm": 0.09936226343952394, + "learning_rate": 0.0004029862789827824, + "loss": 1.5774, + "step": 6341 + }, + { + "epoch": 0.5753946652150245, + "grad_norm": 0.09803350171897258, + "learning_rate": 0.0004028421483803063, + "loss": 1.5488, + "step": 6342 + }, + { + "epoch": 0.5754853928506624, + "grad_norm": 0.0991356555746917, + "learning_rate": 0.0004026980261674249, + "loss": 1.5956, + "step": 6343 + }, + { + "epoch": 0.5755761204863001, + "grad_norm": 0.09902402457644352, + "learning_rate": 0.0004025539123565833, + "loss": 1.5662, + "step": 6344 + }, + { + "epoch": 0.5756668481219379, + "grad_norm": 0.09933992885110292, + "learning_rate": 0.00040240980696022586, + "loss": 1.6052, + "step": 6345 + }, + { + "epoch": 0.5757575757575758, + "grad_norm": 0.10113603184238298, + "learning_rate": 0.0004022657099907958, + "loss": 1.5613, + "step": 6346 + }, + { + "epoch": 0.5758483033932136, + "grad_norm": 0.10073562053554157, + "learning_rate": 0.0004021216214607361, + "loss": 1.5704, + "step": 6347 + }, + { + "epoch": 0.5759390310288514, + "grad_norm": 0.09622482903387716, + "learning_rate": 0.0004019775413824888, + "loss": 1.5693, + "step": 6348 + }, + { + "epoch": 0.5760297586644892, + "grad_norm": 0.09440285942758869, + "learning_rate": 0.0004018334697684952, + "loss": 1.575, + "step": 6349 + }, + { + "epoch": 0.576120486300127, + "grad_norm": 0.09874971358443592, + "learning_rate": 0.0004016894066311962, + "loss": 1.5769, + "step": 6350 + }, + { + "epoch": 0.5762112139357648, + "grad_norm": 0.10088948930448204, + "learning_rate": 0.0004015453519830312, + "loss": 1.5982, + "step": 6351 + }, + { + "epoch": 0.5763019415714027, + "grad_norm": 0.09811326797875274, + "learning_rate": 0.0004014013058364398, + "loss": 1.5768, + "step": 6352 + }, + { + "epoch": 0.5763926692070405, + "grad_norm": 0.10004115515341762, + "learning_rate": 0.00040125726820385997, + "loss": 1.5948, + "step": 6353 + }, + { + "epoch": 0.5764833968426782, + "grad_norm": 0.09560513619692852, + "learning_rate": 0.00040111323909772966, + "loss": 1.5665, + "step": 6354 + }, + { + "epoch": 0.5765741244783161, + "grad_norm": 0.0971808129346433, + "learning_rate": 0.0004009692185304856, + "loss": 1.6019, + "step": 6355 + }, + { + "epoch": 0.5766648521139539, + "grad_norm": 0.10045427746358923, + "learning_rate": 0.0004008252065145642, + "loss": 1.6171, + "step": 6356 + }, + { + "epoch": 0.5767555797495917, + "grad_norm": 0.09647533951485782, + "learning_rate": 0.0004006812030624009, + "loss": 1.5943, + "step": 6357 + }, + { + "epoch": 0.5768463073852296, + "grad_norm": 0.09765988681201469, + "learning_rate": 0.0004005372081864303, + "loss": 1.5864, + "step": 6358 + }, + { + "epoch": 0.5769370350208674, + "grad_norm": 0.0993705251017021, + "learning_rate": 0.0004003932218990864, + "loss": 1.5835, + "step": 6359 + }, + { + "epoch": 0.5770277626565051, + "grad_norm": 0.09963918963775129, + "learning_rate": 0.00040024924421280263, + "loss": 1.6183, + "step": 6360 + }, + { + "epoch": 0.577118490292143, + "grad_norm": 0.09699159009616615, + "learning_rate": 0.00040010527514001114, + "loss": 1.5853, + "step": 6361 + }, + { + "epoch": 0.5772092179277808, + "grad_norm": 0.0917958760981152, + "learning_rate": 0.00039996131469314394, + "loss": 1.5871, + "step": 6362 + }, + { + "epoch": 0.5772999455634186, + "grad_norm": 0.09557355156122507, + "learning_rate": 0.0003998173628846318, + "loss": 1.5848, + "step": 6363 + }, + { + "epoch": 0.5773906731990565, + "grad_norm": 0.09704949301939905, + "learning_rate": 0.00039967341972690543, + "loss": 1.572, + "step": 6364 + }, + { + "epoch": 0.5774814008346942, + "grad_norm": 0.09654134435994724, + "learning_rate": 0.0003995294852323939, + "loss": 1.5956, + "step": 6365 + }, + { + "epoch": 0.577572128470332, + "grad_norm": 0.09806284386996758, + "learning_rate": 0.0003993855594135261, + "loss": 1.569, + "step": 6366 + }, + { + "epoch": 0.5776628561059699, + "grad_norm": 0.09681886571566511, + "learning_rate": 0.0003992416422827302, + "loss": 1.5829, + "step": 6367 + }, + { + "epoch": 0.5777535837416077, + "grad_norm": 0.10318432686078975, + "learning_rate": 0.00039909773385243325, + "loss": 1.5977, + "step": 6368 + }, + { + "epoch": 0.5778443113772455, + "grad_norm": 0.09756546362109984, + "learning_rate": 0.00039895383413506187, + "loss": 1.5515, + "step": 6369 + }, + { + "epoch": 0.5779350390128833, + "grad_norm": 0.10168417710329891, + "learning_rate": 0.0003988099431430419, + "loss": 1.5647, + "step": 6370 + }, + { + "epoch": 0.5780257666485211, + "grad_norm": 0.09983908171239808, + "learning_rate": 0.00039866606088879796, + "loss": 1.611, + "step": 6371 + }, + { + "epoch": 0.5781164942841589, + "grad_norm": 0.09797912168636547, + "learning_rate": 0.00039852218738475474, + "loss": 1.5418, + "step": 6372 + }, + { + "epoch": 0.5782072219197968, + "grad_norm": 0.10139963415762196, + "learning_rate": 0.0003983783226433357, + "loss": 1.5708, + "step": 6373 + }, + { + "epoch": 0.5782979495554346, + "grad_norm": 0.09883935682990556, + "learning_rate": 0.0003982344666769635, + "loss": 1.6228, + "step": 6374 + }, + { + "epoch": 0.5783886771910725, + "grad_norm": 0.0994684891655964, + "learning_rate": 0.00039809061949806, + "loss": 1.5746, + "step": 6375 + }, + { + "epoch": 0.5784794048267102, + "grad_norm": 0.0986001575315924, + "learning_rate": 0.0003979467811190465, + "loss": 1.5894, + "step": 6376 + }, + { + "epoch": 0.578570132462348, + "grad_norm": 0.09866056085802002, + "learning_rate": 0.00039780295155234364, + "loss": 1.6172, + "step": 6377 + }, + { + "epoch": 0.5786608600979859, + "grad_norm": 0.09516772560062742, + "learning_rate": 0.00039765913081037086, + "loss": 1.5628, + "step": 6378 + }, + { + "epoch": 0.5787515877336237, + "grad_norm": 0.09542013587814482, + "learning_rate": 0.0003975153189055471, + "loss": 1.5803, + "step": 6379 + }, + { + "epoch": 0.5788423153692615, + "grad_norm": 0.09551258643766347, + "learning_rate": 0.00039737151585029075, + "loss": 1.5437, + "step": 6380 + }, + { + "epoch": 0.5789330430048993, + "grad_norm": 0.10045995789304613, + "learning_rate": 0.00039722772165701924, + "loss": 1.6378, + "step": 6381 + }, + { + "epoch": 0.5790237706405371, + "grad_norm": 0.09404775458259125, + "learning_rate": 0.00039708393633814906, + "loss": 1.5374, + "step": 6382 + }, + { + "epoch": 0.5791144982761749, + "grad_norm": 0.09843387218107949, + "learning_rate": 0.0003969401599060961, + "loss": 1.5873, + "step": 6383 + }, + { + "epoch": 0.5792052259118128, + "grad_norm": 0.09729062643595672, + "learning_rate": 0.0003967963923732756, + "loss": 1.6033, + "step": 6384 + }, + { + "epoch": 0.5792959535474506, + "grad_norm": 0.09980631952886339, + "learning_rate": 0.0003966526337521018, + "loss": 1.5963, + "step": 6385 + }, + { + "epoch": 0.5793866811830883, + "grad_norm": 0.09612037117806943, + "learning_rate": 0.00039650888405498824, + "loss": 1.5929, + "step": 6386 + }, + { + "epoch": 0.5794774088187262, + "grad_norm": 0.10247263516649902, + "learning_rate": 0.00039636514329434777, + "loss": 1.5587, + "step": 6387 + }, + { + "epoch": 0.579568136454364, + "grad_norm": 0.10115827455087609, + "learning_rate": 0.0003962214114825925, + "loss": 1.579, + "step": 6388 + }, + { + "epoch": 0.5796588640900018, + "grad_norm": 0.09497567087627584, + "learning_rate": 0.00039607768863213373, + "loss": 1.5893, + "step": 6389 + }, + { + "epoch": 0.5797495917256397, + "grad_norm": 0.09809741994319429, + "learning_rate": 0.0003959339747553818, + "loss": 1.5734, + "step": 6390 + }, + { + "epoch": 0.5798403193612774, + "grad_norm": 0.10021048276082406, + "learning_rate": 0.0003957902698647465, + "loss": 1.5916, + "step": 6391 + }, + { + "epoch": 0.5799310469969152, + "grad_norm": 0.09927807546584447, + "learning_rate": 0.00039564657397263677, + "loss": 1.601, + "step": 6392 + }, + { + "epoch": 0.5800217746325531, + "grad_norm": 0.09802786523874618, + "learning_rate": 0.0003955028870914607, + "loss": 1.5496, + "step": 6393 + }, + { + "epoch": 0.5801125022681909, + "grad_norm": 0.09703859051522952, + "learning_rate": 0.00039535920923362567, + "loss": 1.5514, + "step": 6394 + }, + { + "epoch": 0.5802032299038287, + "grad_norm": 0.09459490583430956, + "learning_rate": 0.0003952155404115384, + "loss": 1.6145, + "step": 6395 + }, + { + "epoch": 0.5802939575394666, + "grad_norm": 0.09904072719246358, + "learning_rate": 0.0003950718806376048, + "loss": 1.5635, + "step": 6396 + }, + { + "epoch": 0.5803846851751043, + "grad_norm": 0.09977221846998108, + "learning_rate": 0.0003949282299242296, + "loss": 1.5979, + "step": 6397 + }, + { + "epoch": 0.5804754128107421, + "grad_norm": 0.09628535783255458, + "learning_rate": 0.0003947845882838173, + "loss": 1.5889, + "step": 6398 + }, + { + "epoch": 0.58056614044638, + "grad_norm": 0.09441235131073974, + "learning_rate": 0.0003946409557287714, + "loss": 1.583, + "step": 6399 + }, + { + "epoch": 0.5806568680820178, + "grad_norm": 0.09677288657751092, + "learning_rate": 0.00039449733227149445, + "loss": 1.5502, + "step": 6400 + }, + { + "epoch": 0.5807475957176556, + "grad_norm": 0.0959846737802323, + "learning_rate": 0.0003943537179243883, + "loss": 1.5818, + "step": 6401 + }, + { + "epoch": 0.5808383233532934, + "grad_norm": 0.10007093043094434, + "learning_rate": 0.0003942101126998541, + "loss": 1.5472, + "step": 6402 + }, + { + "epoch": 0.5809290509889312, + "grad_norm": 0.09719258453063156, + "learning_rate": 0.00039406651661029246, + "loss": 1.539, + "step": 6403 + }, + { + "epoch": 0.581019778624569, + "grad_norm": 0.09888911337937763, + "learning_rate": 0.00039392292966810265, + "loss": 1.5989, + "step": 6404 + }, + { + "epoch": 0.5811105062602069, + "grad_norm": 0.09934528996412427, + "learning_rate": 0.00039377935188568354, + "loss": 1.562, + "step": 6405 + }, + { + "epoch": 0.5812012338958447, + "grad_norm": 0.09998881062189459, + "learning_rate": 0.00039363578327543316, + "loss": 1.5797, + "step": 6406 + }, + { + "epoch": 0.5812919615314824, + "grad_norm": 0.09606278078179464, + "learning_rate": 0.00039349222384974837, + "loss": 1.5848, + "step": 6407 + }, + { + "epoch": 0.5813826891671203, + "grad_norm": 0.09421312584987891, + "learning_rate": 0.0003933486736210258, + "loss": 1.5637, + "step": 6408 + }, + { + "epoch": 0.5814734168027581, + "grad_norm": 0.09618159065154613, + "learning_rate": 0.0003932051326016611, + "loss": 1.5905, + "step": 6409 + }, + { + "epoch": 0.5815641444383959, + "grad_norm": 0.09979783108219502, + "learning_rate": 0.0003930616008040487, + "loss": 1.6012, + "step": 6410 + }, + { + "epoch": 0.5816548720740338, + "grad_norm": 0.09611960879191216, + "learning_rate": 0.000392918078240583, + "loss": 1.6195, + "step": 6411 + }, + { + "epoch": 0.5817455997096715, + "grad_norm": 0.10222824468834296, + "learning_rate": 0.00039277456492365693, + "loss": 1.5976, + "step": 6412 + }, + { + "epoch": 0.5818363273453094, + "grad_norm": 0.09835527728296968, + "learning_rate": 0.00039263106086566315, + "loss": 1.568, + "step": 6413 + }, + { + "epoch": 0.5819270549809472, + "grad_norm": 0.09609381592875582, + "learning_rate": 0.00039248756607899294, + "loss": 1.5514, + "step": 6414 + }, + { + "epoch": 0.582017782616585, + "grad_norm": 0.09627383057683186, + "learning_rate": 0.00039234408057603725, + "loss": 1.5889, + "step": 6415 + }, + { + "epoch": 0.5821085102522229, + "grad_norm": 0.09685326555142347, + "learning_rate": 0.0003922006043691862, + "loss": 1.5878, + "step": 6416 + }, + { + "epoch": 0.5821992378878607, + "grad_norm": 0.09798205378741744, + "learning_rate": 0.0003920571374708288, + "loss": 1.5462, + "step": 6417 + }, + { + "epoch": 0.5822899655234984, + "grad_norm": 0.09433886299323029, + "learning_rate": 0.00039191367989335323, + "loss": 1.5938, + "step": 6418 + }, + { + "epoch": 0.5823806931591363, + "grad_norm": 0.09737032454388829, + "learning_rate": 0.0003917702316491476, + "loss": 1.5381, + "step": 6419 + }, + { + "epoch": 0.5824714207947741, + "grad_norm": 0.10196302821247698, + "learning_rate": 0.0003916267927505984, + "loss": 1.5919, + "step": 6420 + }, + { + "epoch": 0.5825621484304119, + "grad_norm": 0.09898152855749244, + "learning_rate": 0.0003914833632100916, + "loss": 1.5808, + "step": 6421 + }, + { + "epoch": 0.5826528760660498, + "grad_norm": 0.09840735852107076, + "learning_rate": 0.00039133994304001233, + "loss": 1.6168, + "step": 6422 + }, + { + "epoch": 0.5827436037016875, + "grad_norm": 0.09895900567080484, + "learning_rate": 0.00039119653225274515, + "loss": 1.5865, + "step": 6423 + }, + { + "epoch": 0.5828343313373253, + "grad_norm": 0.09496643526688926, + "learning_rate": 0.0003910531308606733, + "loss": 1.5265, + "step": 6424 + }, + { + "epoch": 0.5829250589729632, + "grad_norm": 0.09763951369025296, + "learning_rate": 0.00039090973887617964, + "loss": 1.6012, + "step": 6425 + }, + { + "epoch": 0.583015786608601, + "grad_norm": 0.09917678555489798, + "learning_rate": 0.000390766356311646, + "loss": 1.5884, + "step": 6426 + }, + { + "epoch": 0.5831065142442388, + "grad_norm": 0.10453630547685759, + "learning_rate": 0.0003906229831794538, + "loss": 1.5931, + "step": 6427 + }, + { + "epoch": 0.5831972418798766, + "grad_norm": 0.10076409109289552, + "learning_rate": 0.00039047961949198316, + "loss": 1.5727, + "step": 6428 + }, + { + "epoch": 0.5832879695155144, + "grad_norm": 0.10506909238021173, + "learning_rate": 0.00039033626526161337, + "loss": 1.5887, + "step": 6429 + }, + { + "epoch": 0.5833786971511522, + "grad_norm": 0.10370191761183888, + "learning_rate": 0.00039019292050072326, + "loss": 1.5743, + "step": 6430 + }, + { + "epoch": 0.5834694247867901, + "grad_norm": 0.09951377693509474, + "learning_rate": 0.0003900495852216908, + "loss": 1.5661, + "step": 6431 + }, + { + "epoch": 0.5835601524224279, + "grad_norm": 0.0991452923017671, + "learning_rate": 0.00038990625943689274, + "loss": 1.5572, + "step": 6432 + }, + { + "epoch": 0.5836508800580656, + "grad_norm": 0.09547224035440868, + "learning_rate": 0.00038976294315870526, + "loss": 1.5249, + "step": 6433 + }, + { + "epoch": 0.5837416076937035, + "grad_norm": 0.09820521144043363, + "learning_rate": 0.00038961963639950396, + "loss": 1.5569, + "step": 6434 + }, + { + "epoch": 0.5838323353293413, + "grad_norm": 0.10268783052432139, + "learning_rate": 0.00038947633917166345, + "loss": 1.5529, + "step": 6435 + }, + { + "epoch": 0.5839230629649791, + "grad_norm": 0.09661925543006804, + "learning_rate": 0.00038933305148755734, + "loss": 1.5793, + "step": 6436 + }, + { + "epoch": 0.584013790600617, + "grad_norm": 0.09528185274875108, + "learning_rate": 0.0003891897733595585, + "loss": 1.5898, + "step": 6437 + }, + { + "epoch": 0.5841045182362548, + "grad_norm": 0.09737660338991584, + "learning_rate": 0.0003890465048000391, + "loss": 1.5494, + "step": 6438 + }, + { + "epoch": 0.5841952458718925, + "grad_norm": 0.10302017471290949, + "learning_rate": 0.00038890324582137027, + "loss": 1.5456, + "step": 6439 + }, + { + "epoch": 0.5842859735075304, + "grad_norm": 0.10044927095127608, + "learning_rate": 0.00038875999643592257, + "loss": 1.5649, + "step": 6440 + }, + { + "epoch": 0.5843767011431682, + "grad_norm": 0.09871725275991854, + "learning_rate": 0.0003886167566560655, + "loss": 1.601, + "step": 6441 + }, + { + "epoch": 0.584467428778806, + "grad_norm": 0.0966820182040383, + "learning_rate": 0.00038847352649416805, + "loss": 1.5998, + "step": 6442 + }, + { + "epoch": 0.5845581564144439, + "grad_norm": 0.09631986679074986, + "learning_rate": 0.000388330305962598, + "loss": 1.5513, + "step": 6443 + }, + { + "epoch": 0.5846488840500816, + "grad_norm": 0.09434968017148854, + "learning_rate": 0.0003881870950737224, + "loss": 1.5599, + "step": 6444 + }, + { + "epoch": 0.5847396116857194, + "grad_norm": 0.10153060219446448, + "learning_rate": 0.00038804389383990777, + "loss": 1.6274, + "step": 6445 + }, + { + "epoch": 0.5848303393213573, + "grad_norm": 0.11934948311034084, + "learning_rate": 0.00038790070227351934, + "loss": 1.5492, + "step": 6446 + }, + { + "epoch": 0.5849210669569951, + "grad_norm": 0.10111382425903104, + "learning_rate": 0.00038775752038692167, + "loss": 1.5815, + "step": 6447 + }, + { + "epoch": 0.5850117945926329, + "grad_norm": 0.0962569181230766, + "learning_rate": 0.00038761434819247886, + "loss": 1.6018, + "step": 6448 + }, + { + "epoch": 0.5851025222282707, + "grad_norm": 0.10182534758970327, + "learning_rate": 0.00038747118570255345, + "loss": 1.5944, + "step": 6449 + }, + { + "epoch": 0.5851932498639085, + "grad_norm": 0.10149594824868746, + "learning_rate": 0.0003873280329295078, + "loss": 1.5575, + "step": 6450 + }, + { + "epoch": 0.5852839774995464, + "grad_norm": 0.09786906994378707, + "learning_rate": 0.0003871848898857031, + "loss": 1.5854, + "step": 6451 + }, + { + "epoch": 0.5853747051351842, + "grad_norm": 0.09448030355575575, + "learning_rate": 0.0003870417565834999, + "loss": 1.5396, + "step": 6452 + }, + { + "epoch": 0.585465432770822, + "grad_norm": 0.09650251721918757, + "learning_rate": 0.0003868986330352576, + "loss": 1.5953, + "step": 6453 + }, + { + "epoch": 0.5855561604064599, + "grad_norm": 0.09602825777279098, + "learning_rate": 0.000386755519253335, + "loss": 1.5844, + "step": 6454 + }, + { + "epoch": 0.5856468880420976, + "grad_norm": 0.09681424033700498, + "learning_rate": 0.0003866124152500901, + "loss": 1.5399, + "step": 6455 + }, + { + "epoch": 0.5857376156777354, + "grad_norm": 0.09826235308886978, + "learning_rate": 0.0003864693210378798, + "loss": 1.6109, + "step": 6456 + }, + { + "epoch": 0.5858283433133733, + "grad_norm": 0.09774664852806515, + "learning_rate": 0.0003863262366290602, + "loss": 1.5308, + "step": 6457 + }, + { + "epoch": 0.5859190709490111, + "grad_norm": 0.09785920996149589, + "learning_rate": 0.0003861831620359869, + "loss": 1.537, + "step": 6458 + }, + { + "epoch": 0.5860097985846489, + "grad_norm": 0.0994193995838299, + "learning_rate": 0.00038604009727101447, + "loss": 1.6233, + "step": 6459 + }, + { + "epoch": 0.5861005262202867, + "grad_norm": 0.10256848646637834, + "learning_rate": 0.0003858970423464963, + "loss": 1.6178, + "step": 6460 + }, + { + "epoch": 0.5861912538559245, + "grad_norm": 0.09878532603802534, + "learning_rate": 0.0003857539972747854, + "loss": 1.4921, + "step": 6461 + }, + { + "epoch": 0.5862819814915623, + "grad_norm": 0.10059372131616758, + "learning_rate": 0.0003856109620682337, + "loss": 1.5345, + "step": 6462 + }, + { + "epoch": 0.5863727091272002, + "grad_norm": 0.09791918579574511, + "learning_rate": 0.00038546793673919225, + "loss": 1.6021, + "step": 6463 + }, + { + "epoch": 0.586463436762838, + "grad_norm": 0.10447243414559246, + "learning_rate": 0.0003853249213000113, + "loss": 1.6057, + "step": 6464 + }, + { + "epoch": 0.5865541643984757, + "grad_norm": 0.09586590280640875, + "learning_rate": 0.00038518191576304017, + "loss": 1.5755, + "step": 6465 + }, + { + "epoch": 0.5866448920341136, + "grad_norm": 0.09845967476952984, + "learning_rate": 0.0003850389201406277, + "loss": 1.5947, + "step": 6466 + }, + { + "epoch": 0.5867356196697514, + "grad_norm": 0.10183270396438354, + "learning_rate": 0.0003848959344451215, + "loss": 1.6234, + "step": 6467 + }, + { + "epoch": 0.5868263473053892, + "grad_norm": 0.09805511401641001, + "learning_rate": 0.0003847529586888683, + "loss": 1.5825, + "step": 6468 + }, + { + "epoch": 0.5869170749410271, + "grad_norm": 0.0950516312537717, + "learning_rate": 0.000384609992884214, + "loss": 1.5911, + "step": 6469 + }, + { + "epoch": 0.5870078025766648, + "grad_norm": 0.09629526177486245, + "learning_rate": 0.00038446703704350394, + "loss": 1.5295, + "step": 6470 + }, + { + "epoch": 0.5870985302123026, + "grad_norm": 0.09866593549855354, + "learning_rate": 0.0003843240911790822, + "loss": 1.568, + "step": 6471 + }, + { + "epoch": 0.5871892578479405, + "grad_norm": 0.09918058054378859, + "learning_rate": 0.0003841811553032921, + "loss": 1.6105, + "step": 6472 + }, + { + "epoch": 0.5872799854835783, + "grad_norm": 0.1031042752152229, + "learning_rate": 0.0003840382294284764, + "loss": 1.6594, + "step": 6473 + }, + { + "epoch": 0.5873707131192161, + "grad_norm": 0.10213678481600408, + "learning_rate": 0.0003838953135669768, + "loss": 1.5614, + "step": 6474 + }, + { + "epoch": 0.587461440754854, + "grad_norm": 0.10011226149471576, + "learning_rate": 0.00038375240773113386, + "loss": 1.582, + "step": 6475 + }, + { + "epoch": 0.5875521683904917, + "grad_norm": 0.10046655307226375, + "learning_rate": 0.0003836095119332876, + "loss": 1.5796, + "step": 6476 + }, + { + "epoch": 0.5876428960261295, + "grad_norm": 0.097502428858912, + "learning_rate": 0.0003834666261857774, + "loss": 1.5531, + "step": 6477 + }, + { + "epoch": 0.5877336236617674, + "grad_norm": 0.0984880279537328, + "learning_rate": 0.000383323750500941, + "loss": 1.6013, + "step": 6478 + }, + { + "epoch": 0.5878243512974052, + "grad_norm": 0.10072135121181604, + "learning_rate": 0.0003831808848911159, + "loss": 1.5498, + "step": 6479 + }, + { + "epoch": 0.587915078933043, + "grad_norm": 0.09684531100956696, + "learning_rate": 0.0003830380293686386, + "loss": 1.5735, + "step": 6480 + }, + { + "epoch": 0.5880058065686808, + "grad_norm": 0.09869413670245673, + "learning_rate": 0.00038289518394584484, + "loss": 1.5675, + "step": 6481 + }, + { + "epoch": 0.5880965342043186, + "grad_norm": 0.10380688508532059, + "learning_rate": 0.00038275234863506915, + "loss": 1.6292, + "step": 6482 + }, + { + "epoch": 0.5881872618399564, + "grad_norm": 0.09736322400055542, + "learning_rate": 0.00038260952344864533, + "loss": 1.5882, + "step": 6483 + }, + { + "epoch": 0.5882779894755943, + "grad_norm": 0.09656882207688791, + "learning_rate": 0.0003824667083989067, + "loss": 1.5757, + "step": 6484 + }, + { + "epoch": 0.5883687171112321, + "grad_norm": 0.10093278618996325, + "learning_rate": 0.00038232390349818496, + "loss": 1.5846, + "step": 6485 + }, + { + "epoch": 0.5884594447468698, + "grad_norm": 0.10257928060465614, + "learning_rate": 0.0003821811087588115, + "loss": 1.5713, + "step": 6486 + }, + { + "epoch": 0.5885501723825077, + "grad_norm": 0.10439983180619743, + "learning_rate": 0.0003820383241931168, + "loss": 1.5582, + "step": 6487 + }, + { + "epoch": 0.5886409000181455, + "grad_norm": 0.10086767600376166, + "learning_rate": 0.0003818955498134299, + "loss": 1.596, + "step": 6488 + }, + { + "epoch": 0.5887316276537834, + "grad_norm": 0.10041184256721426, + "learning_rate": 0.0003817527856320798, + "loss": 1.5669, + "step": 6489 + }, + { + "epoch": 0.5888223552894212, + "grad_norm": 0.10019424241246537, + "learning_rate": 0.0003816100316613942, + "loss": 1.544, + "step": 6490 + }, + { + "epoch": 0.588913082925059, + "grad_norm": 0.10334790307639363, + "learning_rate": 0.00038146728791369985, + "loss": 1.5982, + "step": 6491 + }, + { + "epoch": 0.5890038105606968, + "grad_norm": 0.09919425182955029, + "learning_rate": 0.00038132455440132254, + "loss": 1.5847, + "step": 6492 + }, + { + "epoch": 0.5890945381963346, + "grad_norm": 0.09814769455302161, + "learning_rate": 0.0003811818311365874, + "loss": 1.5402, + "step": 6493 + }, + { + "epoch": 0.5891852658319724, + "grad_norm": 0.09508208246562602, + "learning_rate": 0.00038103911813181884, + "loss": 1.5625, + "step": 6494 + }, + { + "epoch": 0.5892759934676103, + "grad_norm": 0.10396619610987465, + "learning_rate": 0.00038089641539933985, + "loss": 1.5626, + "step": 6495 + }, + { + "epoch": 0.5893667211032481, + "grad_norm": 0.1010625503949126, + "learning_rate": 0.00038075372295147283, + "loss": 1.6116, + "step": 6496 + }, + { + "epoch": 0.5894574487388858, + "grad_norm": 0.10000631686697804, + "learning_rate": 0.0003806110408005395, + "loss": 1.5883, + "step": 6497 + }, + { + "epoch": 0.5895481763745237, + "grad_norm": 0.10020890744803851, + "learning_rate": 0.0003804683689588605, + "loss": 1.5631, + "step": 6498 + }, + { + "epoch": 0.5896389040101615, + "grad_norm": 0.1002441611318757, + "learning_rate": 0.00038032570743875535, + "loss": 1.5425, + "step": 6499 + }, + { + "epoch": 0.5897296316457993, + "grad_norm": 0.10456528749281929, + "learning_rate": 0.000380183056252543, + "loss": 1.5728, + "step": 6500 + }, + { + "epoch": 0.5898203592814372, + "grad_norm": 0.09936219484031975, + "learning_rate": 0.00038004041541254155, + "loss": 1.5837, + "step": 6501 + }, + { + "epoch": 0.589911086917075, + "grad_norm": 0.09778194139222923, + "learning_rate": 0.00037989778493106776, + "loss": 1.5727, + "step": 6502 + }, + { + "epoch": 0.5900018145527127, + "grad_norm": 0.09964006594486416, + "learning_rate": 0.0003797551648204379, + "loss": 1.557, + "step": 6503 + }, + { + "epoch": 0.5900925421883506, + "grad_norm": 0.09610773980136375, + "learning_rate": 0.00037961255509296723, + "loss": 1.5745, + "step": 6504 + }, + { + "epoch": 0.5901832698239884, + "grad_norm": 0.09820449118096511, + "learning_rate": 0.00037946995576097027, + "loss": 1.5557, + "step": 6505 + }, + { + "epoch": 0.5902739974596262, + "grad_norm": 0.09987714085455655, + "learning_rate": 0.0003793273668367605, + "loss": 1.5738, + "step": 6506 + }, + { + "epoch": 0.590364725095264, + "grad_norm": 0.1008654126112923, + "learning_rate": 0.0003791847883326503, + "loss": 1.6398, + "step": 6507 + }, + { + "epoch": 0.5904554527309018, + "grad_norm": 0.0956554024650181, + "learning_rate": 0.00037904222026095136, + "loss": 1.5464, + "step": 6508 + }, + { + "epoch": 0.5905461803665396, + "grad_norm": 0.09342264633191051, + "learning_rate": 0.00037889966263397474, + "loss": 1.5981, + "step": 6509 + }, + { + "epoch": 0.5906369080021775, + "grad_norm": 0.09549473599339013, + "learning_rate": 0.0003787571154640299, + "loss": 1.5521, + "step": 6510 + }, + { + "epoch": 0.5907276356378153, + "grad_norm": 0.09841830261166072, + "learning_rate": 0.00037861457876342596, + "loss": 1.5844, + "step": 6511 + }, + { + "epoch": 0.590818363273453, + "grad_norm": 0.10036223964133732, + "learning_rate": 0.00037847205254447114, + "loss": 1.6012, + "step": 6512 + }, + { + "epoch": 0.5909090909090909, + "grad_norm": 0.09622030586209444, + "learning_rate": 0.00037832953681947256, + "loss": 1.5716, + "step": 6513 + }, + { + "epoch": 0.5909998185447287, + "grad_norm": 0.09858174350740559, + "learning_rate": 0.00037818703160073634, + "loss": 1.586, + "step": 6514 + }, + { + "epoch": 0.5910905461803665, + "grad_norm": 0.10011178583932319, + "learning_rate": 0.0003780445369005679, + "loss": 1.593, + "step": 6515 + }, + { + "epoch": 0.5911812738160044, + "grad_norm": 0.10009525635136832, + "learning_rate": 0.00037790205273127177, + "loss": 1.558, + "step": 6516 + }, + { + "epoch": 0.5912720014516422, + "grad_norm": 0.10013960494394956, + "learning_rate": 0.00037775957910515123, + "loss": 1.5312, + "step": 6517 + }, + { + "epoch": 0.5913627290872799, + "grad_norm": 0.09690908361662627, + "learning_rate": 0.0003776171160345091, + "loss": 1.5955, + "step": 6518 + }, + { + "epoch": 0.5914534567229178, + "grad_norm": 0.09624259530583816, + "learning_rate": 0.00037747466353164693, + "loss": 1.5851, + "step": 6519 + }, + { + "epoch": 0.5915441843585556, + "grad_norm": 0.1010203351623091, + "learning_rate": 0.00037733222160886583, + "loss": 1.5772, + "step": 6520 + }, + { + "epoch": 0.5916349119941934, + "grad_norm": 0.10187458668999612, + "learning_rate": 0.0003771897902784654, + "loss": 1.5503, + "step": 6521 + }, + { + "epoch": 0.5917256396298313, + "grad_norm": 0.09583762041002658, + "learning_rate": 0.00037704736955274467, + "loss": 1.5558, + "step": 6522 + }, + { + "epoch": 0.591816367265469, + "grad_norm": 0.0977640414988729, + "learning_rate": 0.00037690495944400174, + "loss": 1.5936, + "step": 6523 + }, + { + "epoch": 0.5919070949011068, + "grad_norm": 0.09942199610366168, + "learning_rate": 0.0003767625599645336, + "loss": 1.6088, + "step": 6524 + }, + { + "epoch": 0.5919978225367447, + "grad_norm": 0.09732513428670106, + "learning_rate": 0.00037662017112663666, + "loss": 1.5842, + "step": 6525 + }, + { + "epoch": 0.5920885501723825, + "grad_norm": 0.10027970818480145, + "learning_rate": 0.0003764777929426062, + "loss": 1.5642, + "step": 6526 + }, + { + "epoch": 0.5921792778080204, + "grad_norm": 0.09949612261401111, + "learning_rate": 0.00037633542542473623, + "loss": 1.56, + "step": 6527 + }, + { + "epoch": 0.5922700054436582, + "grad_norm": 0.1008755343798805, + "learning_rate": 0.0003761930685853207, + "loss": 1.5961, + "step": 6528 + }, + { + "epoch": 0.5923607330792959, + "grad_norm": 0.09460417044005727, + "learning_rate": 0.00037605072243665196, + "loss": 1.5526, + "step": 6529 + }, + { + "epoch": 0.5924514607149338, + "grad_norm": 0.09897041614307582, + "learning_rate": 0.00037590838699102163, + "loss": 1.5573, + "step": 6530 + }, + { + "epoch": 0.5925421883505716, + "grad_norm": 0.09718536124371131, + "learning_rate": 0.0003757660622607203, + "loss": 1.6009, + "step": 6531 + }, + { + "epoch": 0.5926329159862094, + "grad_norm": 0.09400831383144723, + "learning_rate": 0.0003756237482580378, + "loss": 1.591, + "step": 6532 + }, + { + "epoch": 0.5927236436218473, + "grad_norm": 0.09370221070413995, + "learning_rate": 0.0003754814449952631, + "loss": 1.5547, + "step": 6533 + }, + { + "epoch": 0.592814371257485, + "grad_norm": 0.09566299568796442, + "learning_rate": 0.0003753391524846839, + "loss": 1.4783, + "step": 6534 + }, + { + "epoch": 0.5929050988931228, + "grad_norm": 0.0981737608506663, + "learning_rate": 0.00037519687073858703, + "loss": 1.5919, + "step": 6535 + }, + { + "epoch": 0.5929958265287607, + "grad_norm": 0.1009236788090539, + "learning_rate": 0.000375054599769259, + "loss": 1.5494, + "step": 6536 + }, + { + "epoch": 0.5930865541643985, + "grad_norm": 0.09875332656519621, + "learning_rate": 0.00037491233958898473, + "loss": 1.5961, + "step": 6537 + }, + { + "epoch": 0.5931772818000363, + "grad_norm": 0.09670938549976908, + "learning_rate": 0.00037477009021004827, + "loss": 1.6159, + "step": 6538 + }, + { + "epoch": 0.5932680094356741, + "grad_norm": 0.09814544258932602, + "learning_rate": 0.000374627851644733, + "loss": 1.5979, + "step": 6539 + }, + { + "epoch": 0.5933587370713119, + "grad_norm": 0.09674073186511299, + "learning_rate": 0.0003744856239053213, + "loss": 1.5825, + "step": 6540 + }, + { + "epoch": 0.5934494647069497, + "grad_norm": 0.098278456998982, + "learning_rate": 0.0003743434070040944, + "loss": 1.5707, + "step": 6541 + }, + { + "epoch": 0.5935401923425876, + "grad_norm": 0.10050297763448666, + "learning_rate": 0.0003742012009533328, + "loss": 1.5765, + "step": 6542 + }, + { + "epoch": 0.5936309199782254, + "grad_norm": 0.12877493306235596, + "learning_rate": 0.00037405900576531595, + "loss": 1.5762, + "step": 6543 + }, + { + "epoch": 0.5937216476138631, + "grad_norm": 0.10323006992624516, + "learning_rate": 0.00037391682145232263, + "loss": 1.6, + "step": 6544 + }, + { + "epoch": 0.593812375249501, + "grad_norm": 0.09396250719716079, + "learning_rate": 0.0003737746480266304, + "loss": 1.5718, + "step": 6545 + }, + { + "epoch": 0.5939031028851388, + "grad_norm": 0.09815045645274019, + "learning_rate": 0.00037363248550051586, + "loss": 1.578, + "step": 6546 + }, + { + "epoch": 0.5939938305207766, + "grad_norm": 0.09532114318862243, + "learning_rate": 0.0003734903338862549, + "loss": 1.5772, + "step": 6547 + }, + { + "epoch": 0.5940845581564145, + "grad_norm": 0.09772033389643392, + "learning_rate": 0.00037334819319612226, + "loss": 1.5832, + "step": 6548 + }, + { + "epoch": 0.5941752857920523, + "grad_norm": 0.09915133825521831, + "learning_rate": 0.0003732060634423917, + "loss": 1.6058, + "step": 6549 + }, + { + "epoch": 0.59426601342769, + "grad_norm": 0.09898673368965981, + "learning_rate": 0.00037306394463733616, + "loss": 1.5574, + "step": 6550 + }, + { + "epoch": 0.5943567410633279, + "grad_norm": 0.09796797756719591, + "learning_rate": 0.0003729218367932278, + "loss": 1.5799, + "step": 6551 + }, + { + "epoch": 0.5944474686989657, + "grad_norm": 0.09524891953454721, + "learning_rate": 0.0003727797399223377, + "loss": 1.5674, + "step": 6552 + }, + { + "epoch": 0.5945381963346035, + "grad_norm": 0.09827812827331583, + "learning_rate": 0.0003726376540369357, + "loss": 1.5921, + "step": 6553 + }, + { + "epoch": 0.5946289239702414, + "grad_norm": 0.10079641292653208, + "learning_rate": 0.000372495579149291, + "loss": 1.5718, + "step": 6554 + }, + { + "epoch": 0.5947196516058791, + "grad_norm": 0.0969204261222264, + "learning_rate": 0.000372353515271672, + "loss": 1.5649, + "step": 6555 + }, + { + "epoch": 0.5948103792415169, + "grad_norm": 0.10357498900818331, + "learning_rate": 0.0003722114624163456, + "loss": 1.5758, + "step": 6556 + }, + { + "epoch": 0.5949011068771548, + "grad_norm": 0.09860732825184824, + "learning_rate": 0.0003720694205955784, + "loss": 1.5858, + "step": 6557 + }, + { + "epoch": 0.5949918345127926, + "grad_norm": 0.10004453487510093, + "learning_rate": 0.00037192738982163534, + "loss": 1.616, + "step": 6558 + }, + { + "epoch": 0.5950825621484304, + "grad_norm": 0.09995012219439069, + "learning_rate": 0.0003717853701067813, + "loss": 1.6168, + "step": 6559 + }, + { + "epoch": 0.5951732897840682, + "grad_norm": 0.09763178236259035, + "learning_rate": 0.00037164336146327924, + "loss": 1.562, + "step": 6560 + }, + { + "epoch": 0.595264017419706, + "grad_norm": 0.09993938490186433, + "learning_rate": 0.00037150136390339195, + "loss": 1.5999, + "step": 6561 + }, + { + "epoch": 0.5953547450553438, + "grad_norm": 0.09631338206058532, + "learning_rate": 0.00037135937743938094, + "loss": 1.5928, + "step": 6562 + }, + { + "epoch": 0.5954454726909817, + "grad_norm": 0.09807416144321036, + "learning_rate": 0.00037121740208350654, + "loss": 1.615, + "step": 6563 + }, + { + "epoch": 0.5955362003266195, + "grad_norm": 0.10033650740390797, + "learning_rate": 0.0003710754378480284, + "loss": 1.5631, + "step": 6564 + }, + { + "epoch": 0.5956269279622574, + "grad_norm": 0.09890909728934863, + "learning_rate": 0.0003709334847452053, + "loss": 1.6055, + "step": 6565 + }, + { + "epoch": 0.5957176555978951, + "grad_norm": 0.09917340966881204, + "learning_rate": 0.00037079154278729464, + "loss": 1.6176, + "step": 6566 + }, + { + "epoch": 0.5958083832335329, + "grad_norm": 0.09817940073534019, + "learning_rate": 0.0003706496119865534, + "loss": 1.5906, + "step": 6567 + }, + { + "epoch": 0.5958991108691708, + "grad_norm": 0.09622107658298824, + "learning_rate": 0.0003705076923552372, + "loss": 1.6302, + "step": 6568 + }, + { + "epoch": 0.5959898385048086, + "grad_norm": 0.09786882832132415, + "learning_rate": 0.00037036578390560107, + "loss": 1.6035, + "step": 6569 + }, + { + "epoch": 0.5960805661404464, + "grad_norm": 0.10025475519577731, + "learning_rate": 0.00037022388664989834, + "loss": 1.5463, + "step": 6570 + }, + { + "epoch": 0.5961712937760842, + "grad_norm": 0.0956318551652879, + "learning_rate": 0.0003700820006003822, + "loss": 1.5528, + "step": 6571 + }, + { + "epoch": 0.596262021411722, + "grad_norm": 0.09590994051376509, + "learning_rate": 0.00036994012576930456, + "loss": 1.5551, + "step": 6572 + }, + { + "epoch": 0.5963527490473598, + "grad_norm": 0.09541135314858931, + "learning_rate": 0.00036979826216891615, + "loss": 1.5476, + "step": 6573 + }, + { + "epoch": 0.5964434766829977, + "grad_norm": 0.09636810687385215, + "learning_rate": 0.0003696564098114669, + "loss": 1.607, + "step": 6574 + }, + { + "epoch": 0.5965342043186355, + "grad_norm": 0.09449960359519818, + "learning_rate": 0.0003695145687092059, + "loss": 1.5403, + "step": 6575 + }, + { + "epoch": 0.5966249319542732, + "grad_norm": 0.09801934600787876, + "learning_rate": 0.0003693727388743813, + "loss": 1.6002, + "step": 6576 + }, + { + "epoch": 0.5967156595899111, + "grad_norm": 0.09786411973429344, + "learning_rate": 0.0003692309203192398, + "loss": 1.6003, + "step": 6577 + }, + { + "epoch": 0.5968063872255489, + "grad_norm": 0.09506992481658763, + "learning_rate": 0.0003690891130560276, + "loss": 1.5659, + "step": 6578 + }, + { + "epoch": 0.5968971148611867, + "grad_norm": 0.09836692967177493, + "learning_rate": 0.00036894731709698995, + "loss": 1.5786, + "step": 6579 + }, + { + "epoch": 0.5969878424968246, + "grad_norm": 0.09884145013104861, + "learning_rate": 0.00036880553245437057, + "loss": 1.6104, + "step": 6580 + }, + { + "epoch": 0.5970785701324623, + "grad_norm": 0.09875167729735498, + "learning_rate": 0.0003686637591404129, + "loss": 1.5515, + "step": 6581 + }, + { + "epoch": 0.5971692977681001, + "grad_norm": 0.10145741714055505, + "learning_rate": 0.00036852199716735885, + "loss": 1.6005, + "step": 6582 + }, + { + "epoch": 0.597260025403738, + "grad_norm": 0.09757278701786805, + "learning_rate": 0.00036838024654744985, + "loss": 1.5848, + "step": 6583 + }, + { + "epoch": 0.5973507530393758, + "grad_norm": 0.09759836806149684, + "learning_rate": 0.00036823850729292594, + "loss": 1.5836, + "step": 6584 + }, + { + "epoch": 0.5974414806750136, + "grad_norm": 0.09965069075907178, + "learning_rate": 0.0003680967794160264, + "loss": 1.5502, + "step": 6585 + }, + { + "epoch": 0.5975322083106515, + "grad_norm": 0.09820706814566071, + "learning_rate": 0.00036795506292898933, + "loss": 1.5149, + "step": 6586 + }, + { + "epoch": 0.5976229359462892, + "grad_norm": 0.10344396644195272, + "learning_rate": 0.0003678133578440521, + "loss": 1.6042, + "step": 6587 + }, + { + "epoch": 0.597713663581927, + "grad_norm": 0.09369483628621288, + "learning_rate": 0.0003676716641734509, + "loss": 1.5367, + "step": 6588 + }, + { + "epoch": 0.5978043912175649, + "grad_norm": 0.09884305799426123, + "learning_rate": 0.0003675299819294208, + "loss": 1.5843, + "step": 6589 + }, + { + "epoch": 0.5978951188532027, + "grad_norm": 0.09751132511732052, + "learning_rate": 0.00036738831112419646, + "loss": 1.5531, + "step": 6590 + }, + { + "epoch": 0.5979858464888405, + "grad_norm": 0.10025011415620463, + "learning_rate": 0.00036724665177001106, + "loss": 1.5508, + "step": 6591 + }, + { + "epoch": 0.5980765741244783, + "grad_norm": 0.09720116359777793, + "learning_rate": 0.00036710500387909675, + "loss": 1.6256, + "step": 6592 + }, + { + "epoch": 0.5981673017601161, + "grad_norm": 0.09321672756415952, + "learning_rate": 0.00036696336746368496, + "loss": 1.551, + "step": 6593 + }, + { + "epoch": 0.5982580293957539, + "grad_norm": 0.09899142995908759, + "learning_rate": 0.0003668217425360061, + "loss": 1.5831, + "step": 6594 + }, + { + "epoch": 0.5983487570313918, + "grad_norm": 0.09774799167970055, + "learning_rate": 0.0003666801291082893, + "loss": 1.5995, + "step": 6595 + }, + { + "epoch": 0.5984394846670296, + "grad_norm": 0.09782339225659939, + "learning_rate": 0.00036653852719276293, + "loss": 1.5978, + "step": 6596 + }, + { + "epoch": 0.5985302123026673, + "grad_norm": 0.09668833348050113, + "learning_rate": 0.00036639693680165443, + "loss": 1.5785, + "step": 6597 + }, + { + "epoch": 0.5986209399383052, + "grad_norm": 0.0959590669850797, + "learning_rate": 0.00036625535794719024, + "loss": 1.5614, + "step": 6598 + }, + { + "epoch": 0.598711667573943, + "grad_norm": 0.09516021692167755, + "learning_rate": 0.0003661137906415955, + "loss": 1.6054, + "step": 6599 + }, + { + "epoch": 0.5988023952095808, + "grad_norm": 0.09823561705700866, + "learning_rate": 0.0003659722348970947, + "loss": 1.5809, + "step": 6600 + }, + { + "epoch": 0.5988931228452187, + "grad_norm": 0.0999107182738511, + "learning_rate": 0.00036583069072591124, + "loss": 1.603, + "step": 6601 + }, + { + "epoch": 0.5989838504808565, + "grad_norm": 0.09897088527459723, + "learning_rate": 0.00036568915814026736, + "loss": 1.5913, + "step": 6602 + }, + { + "epoch": 0.5990745781164943, + "grad_norm": 0.09819717959265607, + "learning_rate": 0.0003655476371523845, + "loss": 1.5876, + "step": 6603 + }, + { + "epoch": 0.5991653057521321, + "grad_norm": 0.1027405586834814, + "learning_rate": 0.00036540612777448303, + "loss": 1.5909, + "step": 6604 + }, + { + "epoch": 0.5992560333877699, + "grad_norm": 0.09902475573328715, + "learning_rate": 0.000365264630018782, + "loss": 1.5978, + "step": 6605 + }, + { + "epoch": 0.5993467610234078, + "grad_norm": 0.09816451794383498, + "learning_rate": 0.0003651231438975002, + "loss": 1.5889, + "step": 6606 + }, + { + "epoch": 0.5994374886590456, + "grad_norm": 0.0969139506269526, + "learning_rate": 0.00036498166942285483, + "loss": 1.5649, + "step": 6607 + }, + { + "epoch": 0.5995282162946833, + "grad_norm": 0.09801180014483309, + "learning_rate": 0.0003648402066070623, + "loss": 1.5679, + "step": 6608 + }, + { + "epoch": 0.5996189439303212, + "grad_norm": 0.0959600138214165, + "learning_rate": 0.00036469875546233765, + "loss": 1.5748, + "step": 6609 + }, + { + "epoch": 0.599709671565959, + "grad_norm": 0.09455444778585005, + "learning_rate": 0.0003645573160008955, + "loss": 1.5688, + "step": 6610 + }, + { + "epoch": 0.5998003992015968, + "grad_norm": 0.10059419397543155, + "learning_rate": 0.0003644158882349492, + "loss": 1.5605, + "step": 6611 + }, + { + "epoch": 0.5998911268372347, + "grad_norm": 0.09847730264943465, + "learning_rate": 0.00036427447217671075, + "loss": 1.6161, + "step": 6612 + }, + { + "epoch": 0.5999818544728724, + "grad_norm": 0.09718536473945842, + "learning_rate": 0.0003641330678383916, + "loss": 1.5703, + "step": 6613 + }, + { + "epoch": 0.6000725821085102, + "grad_norm": 0.10046901079734798, + "learning_rate": 0.0003639916752322021, + "loss": 1.5679, + "step": 6614 + }, + { + "epoch": 0.6001633097441481, + "grad_norm": 0.09595042753848368, + "learning_rate": 0.00036385029437035153, + "loss": 1.5779, + "step": 6615 + }, + { + "epoch": 0.6002540373797859, + "grad_norm": 0.10232471160970068, + "learning_rate": 0.00036370892526504825, + "loss": 1.5412, + "step": 6616 + }, + { + "epoch": 0.6003447650154237, + "grad_norm": 0.09847874341276028, + "learning_rate": 0.00036356756792849923, + "loss": 1.5558, + "step": 6617 + }, + { + "epoch": 0.6004354926510616, + "grad_norm": 0.09880001703057825, + "learning_rate": 0.0003634262223729109, + "loss": 1.5489, + "step": 6618 + }, + { + "epoch": 0.6005262202866993, + "grad_norm": 0.10201689281359766, + "learning_rate": 0.00036328488861048827, + "loss": 1.5856, + "step": 6619 + }, + { + "epoch": 0.6006169479223371, + "grad_norm": 0.09747608077737369, + "learning_rate": 0.0003631435666534357, + "loss": 1.5316, + "step": 6620 + }, + { + "epoch": 0.600707675557975, + "grad_norm": 0.09543162452695995, + "learning_rate": 0.00036300225651395625, + "loss": 1.5175, + "step": 6621 + }, + { + "epoch": 0.6007984031936128, + "grad_norm": 0.09665398091631075, + "learning_rate": 0.0003628609582042521, + "loss": 1.5508, + "step": 6622 + }, + { + "epoch": 0.6008891308292506, + "grad_norm": 0.09913029416294662, + "learning_rate": 0.0003627196717365247, + "loss": 1.5662, + "step": 6623 + }, + { + "epoch": 0.6009798584648884, + "grad_norm": 0.0950086970886493, + "learning_rate": 0.0003625783971229737, + "loss": 1.5563, + "step": 6624 + }, + { + "epoch": 0.6010705861005262, + "grad_norm": 0.09351183889504076, + "learning_rate": 0.00036243713437579837, + "loss": 1.5853, + "step": 6625 + }, + { + "epoch": 0.601161313736164, + "grad_norm": 0.1001302559713075, + "learning_rate": 0.0003622958835071969, + "loss": 1.5588, + "step": 6626 + }, + { + "epoch": 0.6012520413718019, + "grad_norm": 0.1000805243262128, + "learning_rate": 0.000362154644529366, + "loss": 1.5708, + "step": 6627 + }, + { + "epoch": 0.6013427690074397, + "grad_norm": 0.09896619467963567, + "learning_rate": 0.00036201341745450185, + "loss": 1.5949, + "step": 6628 + }, + { + "epoch": 0.6014334966430774, + "grad_norm": 0.09626888176086983, + "learning_rate": 0.00036187220229479946, + "loss": 1.5752, + "step": 6629 + }, + { + "epoch": 0.6015242242787153, + "grad_norm": 0.09974461419222586, + "learning_rate": 0.0003617309990624529, + "loss": 1.6222, + "step": 6630 + }, + { + "epoch": 0.6016149519143531, + "grad_norm": 0.09571676448250643, + "learning_rate": 0.00036158980776965487, + "loss": 1.5881, + "step": 6631 + }, + { + "epoch": 0.6017056795499909, + "grad_norm": 0.09660668060321402, + "learning_rate": 0.00036144862842859725, + "loss": 1.5961, + "step": 6632 + }, + { + "epoch": 0.6017964071856288, + "grad_norm": 0.09899084243810531, + "learning_rate": 0.0003613074610514712, + "loss": 1.5857, + "step": 6633 + }, + { + "epoch": 0.6018871348212665, + "grad_norm": 0.0993765579520921, + "learning_rate": 0.00036116630565046615, + "loss": 1.5787, + "step": 6634 + }, + { + "epoch": 0.6019778624569043, + "grad_norm": 0.0983958195634984, + "learning_rate": 0.00036102516223777113, + "loss": 1.5763, + "step": 6635 + }, + { + "epoch": 0.6020685900925422, + "grad_norm": 0.10261770104560994, + "learning_rate": 0.0003608840308255737, + "loss": 1.579, + "step": 6636 + }, + { + "epoch": 0.60215931772818, + "grad_norm": 0.09520537414114945, + "learning_rate": 0.0003607429114260609, + "loss": 1.5605, + "step": 6637 + }, + { + "epoch": 0.6022500453638178, + "grad_norm": 0.10165567093962477, + "learning_rate": 0.00036060180405141816, + "loss": 1.5787, + "step": 6638 + }, + { + "epoch": 0.6023407729994557, + "grad_norm": 0.09481538472095397, + "learning_rate": 0.0003604607087138302, + "loss": 1.5587, + "step": 6639 + }, + { + "epoch": 0.6024315006350934, + "grad_norm": 0.1031017701958424, + "learning_rate": 0.0003603196254254806, + "loss": 1.5603, + "step": 6640 + }, + { + "epoch": 0.6025222282707313, + "grad_norm": 0.10145956702372114, + "learning_rate": 0.00036017855419855196, + "loss": 1.6221, + "step": 6641 + }, + { + "epoch": 0.6026129559063691, + "grad_norm": 0.09940562968902555, + "learning_rate": 0.0003600374950452257, + "loss": 1.5764, + "step": 6642 + }, + { + "epoch": 0.6027036835420069, + "grad_norm": 0.09864711135769012, + "learning_rate": 0.0003598964479776825, + "loss": 1.5754, + "step": 6643 + }, + { + "epoch": 0.6027944111776448, + "grad_norm": 0.100535040672324, + "learning_rate": 0.0003597554130081014, + "loss": 1.5631, + "step": 6644 + }, + { + "epoch": 0.6028851388132825, + "grad_norm": 0.09847821883403039, + "learning_rate": 0.0003596143901486613, + "loss": 1.5717, + "step": 6645 + }, + { + "epoch": 0.6029758664489203, + "grad_norm": 0.0971183708141074, + "learning_rate": 0.00035947337941153915, + "loss": 1.6081, + "step": 6646 + }, + { + "epoch": 0.6030665940845582, + "grad_norm": 0.09867310686250964, + "learning_rate": 0.0003593323808089116, + "loss": 1.5837, + "step": 6647 + }, + { + "epoch": 0.603157321720196, + "grad_norm": 0.09974557615701184, + "learning_rate": 0.0003591913943529536, + "loss": 1.5296, + "step": 6648 + }, + { + "epoch": 0.6032480493558338, + "grad_norm": 0.09569846249575945, + "learning_rate": 0.0003590504200558393, + "loss": 1.6093, + "step": 6649 + }, + { + "epoch": 0.6033387769914716, + "grad_norm": 0.09609035816669659, + "learning_rate": 0.0003589094579297423, + "loss": 1.5226, + "step": 6650 + }, + { + "epoch": 0.6034295046271094, + "grad_norm": 0.09845881385432893, + "learning_rate": 0.0003587685079868342, + "loss": 1.559, + "step": 6651 + }, + { + "epoch": 0.6035202322627472, + "grad_norm": 0.09845653547296719, + "learning_rate": 0.00035862757023928616, + "loss": 1.6027, + "step": 6652 + }, + { + "epoch": 0.6036109598983851, + "grad_norm": 0.09489637668399041, + "learning_rate": 0.00035848664469926835, + "loss": 1.6184, + "step": 6653 + }, + { + "epoch": 0.6037016875340229, + "grad_norm": 0.0977692218021984, + "learning_rate": 0.0003583457313789497, + "loss": 1.6279, + "step": 6654 + }, + { + "epoch": 0.6037924151696606, + "grad_norm": 0.1008081531894733, + "learning_rate": 0.000358204830290498, + "loss": 1.5319, + "step": 6655 + }, + { + "epoch": 0.6038831428052985, + "grad_norm": 0.10195914257536415, + "learning_rate": 0.0003580639414460801, + "loss": 1.5806, + "step": 6656 + }, + { + "epoch": 0.6039738704409363, + "grad_norm": 0.09698176419983082, + "learning_rate": 0.0003579230648578619, + "loss": 1.567, + "step": 6657 + }, + { + "epoch": 0.6040645980765741, + "grad_norm": 0.09892021227325014, + "learning_rate": 0.0003577822005380078, + "loss": 1.5916, + "step": 6658 + }, + { + "epoch": 0.604155325712212, + "grad_norm": 0.0976748558019307, + "learning_rate": 0.0003576413484986817, + "loss": 1.585, + "step": 6659 + }, + { + "epoch": 0.6042460533478498, + "grad_norm": 0.09442750600844149, + "learning_rate": 0.000357500508752046, + "loss": 1.5908, + "step": 6660 + }, + { + "epoch": 0.6043367809834875, + "grad_norm": 0.09552765174830512, + "learning_rate": 0.00035735968131026246, + "loss": 1.5563, + "step": 6661 + }, + { + "epoch": 0.6044275086191254, + "grad_norm": 0.09497702944182035, + "learning_rate": 0.00035721886618549157, + "loss": 1.5348, + "step": 6662 + }, + { + "epoch": 0.6045182362547632, + "grad_norm": 0.10033778178676236, + "learning_rate": 0.00035707806338989255, + "loss": 1.5743, + "step": 6663 + }, + { + "epoch": 0.604608963890401, + "grad_norm": 0.10023104516549407, + "learning_rate": 0.0003569372729356239, + "loss": 1.5531, + "step": 6664 + }, + { + "epoch": 0.6046996915260389, + "grad_norm": 0.10416113986491762, + "learning_rate": 0.00035679649483484285, + "loss": 1.5824, + "step": 6665 + }, + { + "epoch": 0.6047904191616766, + "grad_norm": 0.09347879053613528, + "learning_rate": 0.00035665572909970546, + "loss": 1.5946, + "step": 6666 + }, + { + "epoch": 0.6048811467973144, + "grad_norm": 0.09636898790356968, + "learning_rate": 0.0003565149757423669, + "loss": 1.5953, + "step": 6667 + }, + { + "epoch": 0.6049718744329523, + "grad_norm": 0.1011745938890222, + "learning_rate": 0.0003563742347749814, + "loss": 1.5975, + "step": 6668 + }, + { + "epoch": 0.6050626020685901, + "grad_norm": 0.09799913471704133, + "learning_rate": 0.00035623350620970205, + "loss": 1.591, + "step": 6669 + }, + { + "epoch": 0.6051533297042279, + "grad_norm": 0.09755198823654161, + "learning_rate": 0.0003560927900586805, + "loss": 1.5724, + "step": 6670 + }, + { + "epoch": 0.6052440573398657, + "grad_norm": 0.09838721309013954, + "learning_rate": 0.00035595208633406775, + "loss": 1.5891, + "step": 6671 + }, + { + "epoch": 0.6053347849755035, + "grad_norm": 0.09663257921406884, + "learning_rate": 0.00035581139504801366, + "loss": 1.5744, + "step": 6672 + }, + { + "epoch": 0.6054255126111413, + "grad_norm": 0.09927032192130811, + "learning_rate": 0.0003556707162126668, + "loss": 1.5917, + "step": 6673 + }, + { + "epoch": 0.6055162402467792, + "grad_norm": 0.09429681065918537, + "learning_rate": 0.0003555300498401748, + "loss": 1.5914, + "step": 6674 + }, + { + "epoch": 0.605606967882417, + "grad_norm": 0.09712393484549071, + "learning_rate": 0.0003553893959426842, + "loss": 1.5246, + "step": 6675 + }, + { + "epoch": 0.6056976955180547, + "grad_norm": 0.09673303883489177, + "learning_rate": 0.00035524875453234086, + "loss": 1.4996, + "step": 6676 + }, + { + "epoch": 0.6057884231536926, + "grad_norm": 0.10031057354628689, + "learning_rate": 0.0003551081256212887, + "loss": 1.6126, + "step": 6677 + }, + { + "epoch": 0.6058791507893304, + "grad_norm": 0.09541099817365166, + "learning_rate": 0.0003549675092216713, + "loss": 1.5711, + "step": 6678 + }, + { + "epoch": 0.6059698784249683, + "grad_norm": 0.09765663489895553, + "learning_rate": 0.0003548269053456309, + "loss": 1.5625, + "step": 6679 + }, + { + "epoch": 0.6060606060606061, + "grad_norm": 0.0950841102049741, + "learning_rate": 0.00035468631400530857, + "loss": 1.5628, + "step": 6680 + }, + { + "epoch": 0.6061513336962439, + "grad_norm": 0.09771166338385535, + "learning_rate": 0.00035454573521284444, + "loss": 1.5485, + "step": 6681 + }, + { + "epoch": 0.6062420613318817, + "grad_norm": 0.09895664108818744, + "learning_rate": 0.0003544051689803776, + "loss": 1.5567, + "step": 6682 + }, + { + "epoch": 0.6063327889675195, + "grad_norm": 0.09907526896710604, + "learning_rate": 0.0003542646153200457, + "loss": 1.5657, + "step": 6683 + }, + { + "epoch": 0.6064235166031573, + "grad_norm": 0.09848219369745047, + "learning_rate": 0.00035412407424398584, + "loss": 1.5927, + "step": 6684 + }, + { + "epoch": 0.6065142442387952, + "grad_norm": 0.09798473993551222, + "learning_rate": 0.00035398354576433367, + "loss": 1.5664, + "step": 6685 + }, + { + "epoch": 0.606604971874433, + "grad_norm": 0.09718872443731098, + "learning_rate": 0.0003538430298932239, + "loss": 1.5432, + "step": 6686 + }, + { + "epoch": 0.6066956995100707, + "grad_norm": 0.09897267727913434, + "learning_rate": 0.00035370252664278993, + "loss": 1.5391, + "step": 6687 + }, + { + "epoch": 0.6067864271457086, + "grad_norm": 0.10228752940683172, + "learning_rate": 0.00035356203602516443, + "loss": 1.6101, + "step": 6688 + }, + { + "epoch": 0.6068771547813464, + "grad_norm": 0.10136975137218322, + "learning_rate": 0.00035342155805247877, + "loss": 1.5832, + "step": 6689 + }, + { + "epoch": 0.6069678824169842, + "grad_norm": 0.09617872944032069, + "learning_rate": 0.00035328109273686305, + "loss": 1.5676, + "step": 6690 + }, + { + "epoch": 0.6070586100526221, + "grad_norm": 0.10301925680628432, + "learning_rate": 0.0003531406400904465, + "loss": 1.5789, + "step": 6691 + }, + { + "epoch": 0.6071493376882598, + "grad_norm": 0.09846456387461644, + "learning_rate": 0.00035300020012535744, + "loss": 1.5867, + "step": 6692 + }, + { + "epoch": 0.6072400653238976, + "grad_norm": 0.09772407705154719, + "learning_rate": 0.0003528597728537227, + "loss": 1.5671, + "step": 6693 + }, + { + "epoch": 0.6073307929595355, + "grad_norm": 0.10202858816884512, + "learning_rate": 0.00035271935828766845, + "loss": 1.577, + "step": 6694 + }, + { + "epoch": 0.6074215205951733, + "grad_norm": 0.09602025062840837, + "learning_rate": 0.0003525789564393192, + "loss": 1.5412, + "step": 6695 + }, + { + "epoch": 0.6075122482308111, + "grad_norm": 0.09667004558326316, + "learning_rate": 0.00035243856732079885, + "loss": 1.562, + "step": 6696 + }, + { + "epoch": 0.607602975866449, + "grad_norm": 0.09673751511871627, + "learning_rate": 0.0003522981909442299, + "loss": 1.5772, + "step": 6697 + }, + { + "epoch": 0.6076937035020867, + "grad_norm": 0.10091392634386445, + "learning_rate": 0.000352157827321734, + "loss": 1.5797, + "step": 6698 + }, + { + "epoch": 0.6077844311377245, + "grad_norm": 0.10069642405331568, + "learning_rate": 0.0003520174764654313, + "loss": 1.6057, + "step": 6699 + }, + { + "epoch": 0.6078751587733624, + "grad_norm": 0.09841790413337513, + "learning_rate": 0.0003518771383874414, + "loss": 1.5934, + "step": 6700 + }, + { + "epoch": 0.6079658864090002, + "grad_norm": 0.09966298741972265, + "learning_rate": 0.00035173681309988257, + "loss": 1.5306, + "step": 6701 + }, + { + "epoch": 0.608056614044638, + "grad_norm": 0.10060093202489515, + "learning_rate": 0.0003515965006148717, + "loss": 1.6094, + "step": 6702 + }, + { + "epoch": 0.6081473416802758, + "grad_norm": 0.09682102903397083, + "learning_rate": 0.0003514562009445248, + "loss": 1.5615, + "step": 6703 + }, + { + "epoch": 0.6082380693159136, + "grad_norm": 0.09485143216628818, + "learning_rate": 0.000351315914100957, + "loss": 1.5647, + "step": 6704 + }, + { + "epoch": 0.6083287969515514, + "grad_norm": 0.10243455644484092, + "learning_rate": 0.0003511756400962818, + "loss": 1.6041, + "step": 6705 + }, + { + "epoch": 0.6084195245871893, + "grad_norm": 0.10073598221861653, + "learning_rate": 0.00035103537894261197, + "loss": 1.5548, + "step": 6706 + }, + { + "epoch": 0.6085102522228271, + "grad_norm": 0.10261567721741688, + "learning_rate": 0.0003508951306520592, + "loss": 1.5867, + "step": 6707 + }, + { + "epoch": 0.6086009798584648, + "grad_norm": 0.09867186292382119, + "learning_rate": 0.00035075489523673397, + "loss": 1.5703, + "step": 6708 + }, + { + "epoch": 0.6086917074941027, + "grad_norm": 0.09673607249106851, + "learning_rate": 0.0003506146727087454, + "loss": 1.5985, + "step": 6709 + }, + { + "epoch": 0.6087824351297405, + "grad_norm": 0.09948433539077099, + "learning_rate": 0.00035047446308020193, + "loss": 1.566, + "step": 6710 + }, + { + "epoch": 0.6088731627653783, + "grad_norm": 0.1069726942540809, + "learning_rate": 0.0003503342663632106, + "loss": 1.5813, + "step": 6711 + }, + { + "epoch": 0.6089638904010162, + "grad_norm": 0.09969567719701637, + "learning_rate": 0.0003501940825698774, + "loss": 1.6099, + "step": 6712 + }, + { + "epoch": 0.609054618036654, + "grad_norm": 0.09982601000102663, + "learning_rate": 0.0003500539117123073, + "loss": 1.5449, + "step": 6713 + }, + { + "epoch": 0.6091453456722917, + "grad_norm": 0.09498283407162501, + "learning_rate": 0.0003499137538026038, + "loss": 1.6026, + "step": 6714 + }, + { + "epoch": 0.6092360733079296, + "grad_norm": 0.09876160421050818, + "learning_rate": 0.0003497736088528701, + "loss": 1.6026, + "step": 6715 + }, + { + "epoch": 0.6093268009435674, + "grad_norm": 0.09794854676403798, + "learning_rate": 0.00034963347687520726, + "loss": 1.5912, + "step": 6716 + }, + { + "epoch": 0.6094175285792053, + "grad_norm": 0.1004962052896127, + "learning_rate": 0.00034949335788171584, + "loss": 1.6119, + "step": 6717 + }, + { + "epoch": 0.6095082562148431, + "grad_norm": 0.09692673276316899, + "learning_rate": 0.0003493532518844953, + "loss": 1.6064, + "step": 6718 + }, + { + "epoch": 0.6095989838504808, + "grad_norm": 0.09922554259843798, + "learning_rate": 0.00034921315889564346, + "loss": 1.5537, + "step": 6719 + }, + { + "epoch": 0.6096897114861187, + "grad_norm": 0.09832682787401133, + "learning_rate": 0.0003490730789272577, + "loss": 1.5645, + "step": 6720 + }, + { + "epoch": 0.6097804391217565, + "grad_norm": 0.09846855460934718, + "learning_rate": 0.00034893301199143383, + "loss": 1.5443, + "step": 6721 + }, + { + "epoch": 0.6098711667573943, + "grad_norm": 0.09466713736301535, + "learning_rate": 0.00034879295810026645, + "loss": 1.5812, + "step": 6722 + }, + { + "epoch": 0.6099618943930322, + "grad_norm": 0.09737023890009491, + "learning_rate": 0.00034865291726584956, + "loss": 1.5581, + "step": 6723 + }, + { + "epoch": 0.6100526220286699, + "grad_norm": 0.09776170182182713, + "learning_rate": 0.00034851288950027556, + "loss": 1.5532, + "step": 6724 + }, + { + "epoch": 0.6101433496643077, + "grad_norm": 0.0995560017766512, + "learning_rate": 0.00034837287481563595, + "loss": 1.5824, + "step": 6725 + }, + { + "epoch": 0.6102340772999456, + "grad_norm": 0.10284278326039399, + "learning_rate": 0.00034823287322402084, + "loss": 1.5872, + "step": 6726 + }, + { + "epoch": 0.6103248049355834, + "grad_norm": 0.10042533187807158, + "learning_rate": 0.00034809288473751956, + "loss": 1.61, + "step": 6727 + }, + { + "epoch": 0.6104155325712212, + "grad_norm": 0.09797455731839609, + "learning_rate": 0.00034795290936822016, + "loss": 1.5381, + "step": 6728 + }, + { + "epoch": 0.610506260206859, + "grad_norm": 0.09612536459446644, + "learning_rate": 0.0003478129471282093, + "loss": 1.5801, + "step": 6729 + }, + { + "epoch": 0.6105969878424968, + "grad_norm": 0.09776540746606854, + "learning_rate": 0.0003476729980295728, + "loss": 1.6311, + "step": 6730 + }, + { + "epoch": 0.6106877154781346, + "grad_norm": 0.0959394657798878, + "learning_rate": 0.0003475330620843955, + "loss": 1.5962, + "step": 6731 + }, + { + "epoch": 0.6107784431137725, + "grad_norm": 0.09866681179870755, + "learning_rate": 0.00034739313930476075, + "loss": 1.5255, + "step": 6732 + }, + { + "epoch": 0.6108691707494103, + "grad_norm": 0.10231382231629556, + "learning_rate": 0.00034725322970275096, + "loss": 1.6072, + "step": 6733 + }, + { + "epoch": 0.610959898385048, + "grad_norm": 0.10042560251918768, + "learning_rate": 0.00034711333329044724, + "loss": 1.548, + "step": 6734 + }, + { + "epoch": 0.6110506260206859, + "grad_norm": 0.09391595244467113, + "learning_rate": 0.00034697345007992985, + "loss": 1.6002, + "step": 6735 + }, + { + "epoch": 0.6111413536563237, + "grad_norm": 0.09853161812735337, + "learning_rate": 0.0003468335800832775, + "loss": 1.5544, + "step": 6736 + }, + { + "epoch": 0.6112320812919615, + "grad_norm": 0.09930016989351396, + "learning_rate": 0.00034669372331256807, + "loss": 1.602, + "step": 6737 + }, + { + "epoch": 0.6113228089275994, + "grad_norm": 0.10488443668652699, + "learning_rate": 0.00034655387977987806, + "loss": 1.6062, + "step": 6738 + }, + { + "epoch": 0.6114135365632372, + "grad_norm": 0.09782540277539074, + "learning_rate": 0.0003464140494972833, + "loss": 1.5969, + "step": 6739 + }, + { + "epoch": 0.6115042641988749, + "grad_norm": 0.09825770975236744, + "learning_rate": 0.0003462742324768581, + "loss": 1.5538, + "step": 6740 + }, + { + "epoch": 0.6115949918345128, + "grad_norm": 0.0962135143569348, + "learning_rate": 0.00034613442873067553, + "loss": 1.5549, + "step": 6741 + }, + { + "epoch": 0.6116857194701506, + "grad_norm": 0.09825172708167929, + "learning_rate": 0.0003459946382708077, + "loss": 1.5396, + "step": 6742 + }, + { + "epoch": 0.6117764471057884, + "grad_norm": 0.09897614279089223, + "learning_rate": 0.0003458548611093256, + "loss": 1.62, + "step": 6743 + }, + { + "epoch": 0.6118671747414263, + "grad_norm": 0.10103429288411697, + "learning_rate": 0.0003457150972582989, + "loss": 1.5368, + "step": 6744 + }, + { + "epoch": 0.611957902377064, + "grad_norm": 0.09689980720467119, + "learning_rate": 0.0003455753467297961, + "loss": 1.5588, + "step": 6745 + }, + { + "epoch": 0.6120486300127018, + "grad_norm": 0.0997419464946472, + "learning_rate": 0.00034543560953588504, + "loss": 1.5322, + "step": 6746 + }, + { + "epoch": 0.6121393576483397, + "grad_norm": 0.10195356018482288, + "learning_rate": 0.000345295885688632, + "loss": 1.5993, + "step": 6747 + }, + { + "epoch": 0.6122300852839775, + "grad_norm": 0.09873111831664405, + "learning_rate": 0.00034515617520010193, + "loss": 1.5836, + "step": 6748 + }, + { + "epoch": 0.6123208129196153, + "grad_norm": 0.09872593820575262, + "learning_rate": 0.00034501647808235893, + "loss": 1.5773, + "step": 6749 + }, + { + "epoch": 0.6124115405552532, + "grad_norm": 0.10116745209038491, + "learning_rate": 0.000344876794347466, + "loss": 1.603, + "step": 6750 + }, + { + "epoch": 0.6125022681908909, + "grad_norm": 0.0963457537570076, + "learning_rate": 0.0003447371240074847, + "loss": 1.5912, + "step": 6751 + }, + { + "epoch": 0.6125929958265287, + "grad_norm": 0.0971852091803179, + "learning_rate": 0.00034459746707447554, + "loss": 1.6236, + "step": 6752 + }, + { + "epoch": 0.6126837234621666, + "grad_norm": 0.09679363931201475, + "learning_rate": 0.00034445782356049795, + "loss": 1.5544, + "step": 6753 + }, + { + "epoch": 0.6127744510978044, + "grad_norm": 0.09923138133638244, + "learning_rate": 0.0003443181934776104, + "loss": 1.567, + "step": 6754 + }, + { + "epoch": 0.6128651787334423, + "grad_norm": 0.09613709796605695, + "learning_rate": 0.0003441785768378697, + "loss": 1.5552, + "step": 6755 + }, + { + "epoch": 0.61295590636908, + "grad_norm": 0.10086782677094164, + "learning_rate": 0.0003440389736533318, + "loss": 1.5228, + "step": 6756 + }, + { + "epoch": 0.6130466340047178, + "grad_norm": 0.1017654680211484, + "learning_rate": 0.0003438993839360517, + "loss": 1.583, + "step": 6757 + }, + { + "epoch": 0.6131373616403557, + "grad_norm": 0.0943883594137125, + "learning_rate": 0.0003437598076980826, + "loss": 1.5928, + "step": 6758 + }, + { + "epoch": 0.6132280892759935, + "grad_norm": 0.09731742370235598, + "learning_rate": 0.0003436202449514772, + "loss": 1.5685, + "step": 6759 + }, + { + "epoch": 0.6133188169116313, + "grad_norm": 0.10552371420236939, + "learning_rate": 0.0003434806957082868, + "loss": 1.5712, + "step": 6760 + }, + { + "epoch": 0.6134095445472691, + "grad_norm": 0.09430670748986425, + "learning_rate": 0.000343341159980561, + "loss": 1.5195, + "step": 6761 + }, + { + "epoch": 0.6135002721829069, + "grad_norm": 0.09433019149382273, + "learning_rate": 0.0003432016377803496, + "loss": 1.5455, + "step": 6762 + }, + { + "epoch": 0.6135909998185447, + "grad_norm": 0.10014852301429876, + "learning_rate": 0.0003430621291196997, + "loss": 1.5578, + "step": 6763 + }, + { + "epoch": 0.6136817274541826, + "grad_norm": 0.0987688817527783, + "learning_rate": 0.00034292263401065816, + "loss": 1.6082, + "step": 6764 + }, + { + "epoch": 0.6137724550898204, + "grad_norm": 0.09786223281082924, + "learning_rate": 0.00034278315246527027, + "loss": 1.5404, + "step": 6765 + }, + { + "epoch": 0.6138631827254581, + "grad_norm": 0.09986877854534144, + "learning_rate": 0.00034264368449558023, + "loss": 1.5775, + "step": 6766 + }, + { + "epoch": 0.613953910361096, + "grad_norm": 0.09622723532424188, + "learning_rate": 0.0003425042301136314, + "loss": 1.5887, + "step": 6767 + }, + { + "epoch": 0.6140446379967338, + "grad_norm": 0.09580326826996746, + "learning_rate": 0.00034236478933146543, + "loss": 1.5505, + "step": 6768 + }, + { + "epoch": 0.6141353656323716, + "grad_norm": 0.0995805777643942, + "learning_rate": 0.000342225362161123, + "loss": 1.5762, + "step": 6769 + }, + { + "epoch": 0.6142260932680095, + "grad_norm": 0.09744632984563212, + "learning_rate": 0.0003420859486146439, + "loss": 1.5731, + "step": 6770 + }, + { + "epoch": 0.6143168209036473, + "grad_norm": 0.09957099628530564, + "learning_rate": 0.0003419465487040665, + "loss": 1.6078, + "step": 6771 + }, + { + "epoch": 0.614407548539285, + "grad_norm": 0.0967884317357685, + "learning_rate": 0.0003418071624414279, + "loss": 1.5541, + "step": 6772 + }, + { + "epoch": 0.6144982761749229, + "grad_norm": 0.09861473247866331, + "learning_rate": 0.00034166778983876414, + "loss": 1.5607, + "step": 6773 + }, + { + "epoch": 0.6145890038105607, + "grad_norm": 0.10196427045829809, + "learning_rate": 0.00034152843090811004, + "loss": 1.6028, + "step": 6774 + }, + { + "epoch": 0.6146797314461985, + "grad_norm": 0.10154904423960524, + "learning_rate": 0.00034138908566149926, + "loss": 1.6145, + "step": 6775 + }, + { + "epoch": 0.6147704590818364, + "grad_norm": 0.10201221813207352, + "learning_rate": 0.00034124975411096433, + "loss": 1.5789, + "step": 6776 + }, + { + "epoch": 0.6148611867174741, + "grad_norm": 0.10102383926297298, + "learning_rate": 0.00034111043626853637, + "loss": 1.5738, + "step": 6777 + }, + { + "epoch": 0.6149519143531119, + "grad_norm": 0.1007028762963522, + "learning_rate": 0.00034097113214624574, + "loss": 1.5558, + "step": 6778 + }, + { + "epoch": 0.6150426419887498, + "grad_norm": 0.11211013843282164, + "learning_rate": 0.0003408318417561215, + "loss": 1.5738, + "step": 6779 + }, + { + "epoch": 0.6151333696243876, + "grad_norm": 0.09997864460447398, + "learning_rate": 0.000340692565110191, + "loss": 1.5956, + "step": 6780 + }, + { + "epoch": 0.6152240972600254, + "grad_norm": 0.10128649529465263, + "learning_rate": 0.00034055330222048096, + "loss": 1.5966, + "step": 6781 + }, + { + "epoch": 0.6153148248956632, + "grad_norm": 0.10257645181940524, + "learning_rate": 0.000340414053099017, + "loss": 1.5397, + "step": 6782 + }, + { + "epoch": 0.615405552531301, + "grad_norm": 0.0988411752742622, + "learning_rate": 0.0003402748177578229, + "loss": 1.5771, + "step": 6783 + }, + { + "epoch": 0.6154962801669388, + "grad_norm": 0.09719177737241758, + "learning_rate": 0.0003401355962089216, + "loss": 1.5678, + "step": 6784 + }, + { + "epoch": 0.6155870078025767, + "grad_norm": 0.09964859574862085, + "learning_rate": 0.0003399963884643354, + "loss": 1.5938, + "step": 6785 + }, + { + "epoch": 0.6156777354382145, + "grad_norm": 0.09514675647087538, + "learning_rate": 0.0003398571945360847, + "loss": 1.576, + "step": 6786 + }, + { + "epoch": 0.6157684630738522, + "grad_norm": 0.10183290213635719, + "learning_rate": 0.00033971801443618876, + "loss": 1.5465, + "step": 6787 + }, + { + "epoch": 0.6158591907094901, + "grad_norm": 0.10042897374243126, + "learning_rate": 0.00033957884817666583, + "loss": 1.5611, + "step": 6788 + }, + { + "epoch": 0.6159499183451279, + "grad_norm": 0.09671537808654966, + "learning_rate": 0.00033943969576953316, + "loss": 1.5672, + "step": 6789 + }, + { + "epoch": 0.6160406459807657, + "grad_norm": 0.09708652773959173, + "learning_rate": 0.0003393005572268063, + "loss": 1.5762, + "step": 6790 + }, + { + "epoch": 0.6161313736164036, + "grad_norm": 0.09508631918444585, + "learning_rate": 0.00033916143256050005, + "loss": 1.5989, + "step": 6791 + }, + { + "epoch": 0.6162221012520414, + "grad_norm": 0.09785525405662515, + "learning_rate": 0.0003390223217826277, + "loss": 1.5522, + "step": 6792 + }, + { + "epoch": 0.6163128288876791, + "grad_norm": 0.0987315199193865, + "learning_rate": 0.0003388832249052017, + "loss": 1.5566, + "step": 6793 + }, + { + "epoch": 0.616403556523317, + "grad_norm": 0.09791444100131812, + "learning_rate": 0.00033874414194023293, + "loss": 1.5674, + "step": 6794 + }, + { + "epoch": 0.6164942841589548, + "grad_norm": 0.09941235770444447, + "learning_rate": 0.0003386050728997313, + "loss": 1.5567, + "step": 6795 + }, + { + "epoch": 0.6165850117945927, + "grad_norm": 0.09419523022737149, + "learning_rate": 0.0003384660177957055, + "loss": 1.5971, + "step": 6796 + }, + { + "epoch": 0.6166757394302305, + "grad_norm": 0.09989292115748598, + "learning_rate": 0.00033832697664016274, + "loss": 1.5693, + "step": 6797 + }, + { + "epoch": 0.6167664670658682, + "grad_norm": 0.09866593658892694, + "learning_rate": 0.00033818794944510944, + "loss": 1.5762, + "step": 6798 + }, + { + "epoch": 0.6168571947015061, + "grad_norm": 0.09956463270210586, + "learning_rate": 0.00033804893622255063, + "loss": 1.5724, + "step": 6799 + }, + { + "epoch": 0.6169479223371439, + "grad_norm": 0.09572966981798692, + "learning_rate": 0.0003379099369844897, + "loss": 1.5362, + "step": 6800 + }, + { + "epoch": 0.6170386499727817, + "grad_norm": 0.0978768822394835, + "learning_rate": 0.00033777095174292995, + "loss": 1.5546, + "step": 6801 + }, + { + "epoch": 0.6171293776084196, + "grad_norm": 0.09360838715813277, + "learning_rate": 0.0003376319805098723, + "loss": 1.5553, + "step": 6802 + }, + { + "epoch": 0.6172201052440573, + "grad_norm": 0.09985317817255082, + "learning_rate": 0.00033749302329731716, + "loss": 1.6014, + "step": 6803 + }, + { + "epoch": 0.6173108328796951, + "grad_norm": 0.095053305406064, + "learning_rate": 0.0003373540801172633, + "loss": 1.5934, + "step": 6804 + }, + { + "epoch": 0.617401560515333, + "grad_norm": 0.0986265115103523, + "learning_rate": 0.0003372151509817086, + "loss": 1.547, + "step": 6805 + }, + { + "epoch": 0.6174922881509708, + "grad_norm": 0.09609269746365036, + "learning_rate": 0.0003370762359026498, + "loss": 1.5922, + "step": 6806 + }, + { + "epoch": 0.6175830157866086, + "grad_norm": 0.0966600259672406, + "learning_rate": 0.00033693733489208183, + "loss": 1.5547, + "step": 6807 + }, + { + "epoch": 0.6176737434222465, + "grad_norm": 0.10073154236400825, + "learning_rate": 0.000336798447961999, + "loss": 1.5399, + "step": 6808 + }, + { + "epoch": 0.6177644710578842, + "grad_norm": 0.097262546752706, + "learning_rate": 0.0003366595751243943, + "loss": 1.5696, + "step": 6809 + }, + { + "epoch": 0.617855198693522, + "grad_norm": 0.10058146079724505, + "learning_rate": 0.0003365207163912593, + "loss": 1.5598, + "step": 6810 + }, + { + "epoch": 0.6179459263291599, + "grad_norm": 0.09951745487680584, + "learning_rate": 0.00033638187177458467, + "loss": 1.5402, + "step": 6811 + }, + { + "epoch": 0.6180366539647977, + "grad_norm": 0.09689752062509445, + "learning_rate": 0.00033624304128635954, + "loss": 1.5558, + "step": 6812 + }, + { + "epoch": 0.6181273816004355, + "grad_norm": 0.10888382939531768, + "learning_rate": 0.0003361042249385719, + "loss": 1.5321, + "step": 6813 + }, + { + "epoch": 0.6182181092360733, + "grad_norm": 0.10171021656762236, + "learning_rate": 0.0003359654227432085, + "loss": 1.5339, + "step": 6814 + }, + { + "epoch": 0.6183088368717111, + "grad_norm": 0.1012587505255921, + "learning_rate": 0.00033582663471225504, + "loss": 1.5885, + "step": 6815 + }, + { + "epoch": 0.6183995645073489, + "grad_norm": 0.10426138785539585, + "learning_rate": 0.0003356878608576958, + "loss": 1.5627, + "step": 6816 + }, + { + "epoch": 0.6184902921429868, + "grad_norm": 0.09690682489773082, + "learning_rate": 0.0003355491011915141, + "loss": 1.5564, + "step": 6817 + }, + { + "epoch": 0.6185810197786246, + "grad_norm": 0.09717398046704345, + "learning_rate": 0.0003354103557256918, + "loss": 1.6002, + "step": 6818 + }, + { + "epoch": 0.6186717474142623, + "grad_norm": 0.09585583431681169, + "learning_rate": 0.0003352716244722095, + "loss": 1.5886, + "step": 6819 + }, + { + "epoch": 0.6187624750499002, + "grad_norm": 0.09766359603540323, + "learning_rate": 0.0003351329074430467, + "loss": 1.5747, + "step": 6820 + }, + { + "epoch": 0.618853202685538, + "grad_norm": 0.10138182751591629, + "learning_rate": 0.0003349942046501817, + "loss": 1.5665, + "step": 6821 + }, + { + "epoch": 0.6189439303211758, + "grad_norm": 0.10200158943061943, + "learning_rate": 0.0003348555161055913, + "loss": 1.5295, + "step": 6822 + }, + { + "epoch": 0.6190346579568137, + "grad_norm": 0.09983425537906226, + "learning_rate": 0.0003347168418212514, + "loss": 1.5907, + "step": 6823 + }, + { + "epoch": 0.6191253855924514, + "grad_norm": 0.09992049822260284, + "learning_rate": 0.0003345781818091367, + "loss": 1.5651, + "step": 6824 + }, + { + "epoch": 0.6192161132280892, + "grad_norm": 0.09835621870610084, + "learning_rate": 0.0003344395360812204, + "loss": 1.5468, + "step": 6825 + }, + { + "epoch": 0.6193068408637271, + "grad_norm": 0.09610785545215778, + "learning_rate": 0.00033430090464947447, + "loss": 1.5777, + "step": 6826 + }, + { + "epoch": 0.6193975684993649, + "grad_norm": 0.09903142537452543, + "learning_rate": 0.0003341622875258699, + "loss": 1.5485, + "step": 6827 + }, + { + "epoch": 0.6194882961350027, + "grad_norm": 0.10202669883914403, + "learning_rate": 0.0003340236847223763, + "loss": 1.5653, + "step": 6828 + }, + { + "epoch": 0.6195790237706406, + "grad_norm": 0.10233837428940971, + "learning_rate": 0.00033388509625096197, + "loss": 1.5711, + "step": 6829 + }, + { + "epoch": 0.6196697514062783, + "grad_norm": 0.0952026144242752, + "learning_rate": 0.000333746522123594, + "loss": 1.6093, + "step": 6830 + }, + { + "epoch": 0.6197604790419161, + "grad_norm": 0.10438891309158693, + "learning_rate": 0.00033360796235223824, + "loss": 1.5828, + "step": 6831 + }, + { + "epoch": 0.619851206677554, + "grad_norm": 0.09640523608841706, + "learning_rate": 0.00033346941694885964, + "loss": 1.5575, + "step": 6832 + }, + { + "epoch": 0.6199419343131918, + "grad_norm": 0.0968133869594926, + "learning_rate": 0.0003333308859254213, + "loss": 1.5875, + "step": 6833 + }, + { + "epoch": 0.6200326619488297, + "grad_norm": 0.09747106900138089, + "learning_rate": 0.0003331923692938856, + "loss": 1.5912, + "step": 6834 + }, + { + "epoch": 0.6201233895844674, + "grad_norm": 0.09824091483954427, + "learning_rate": 0.0003330538670662134, + "loss": 1.5525, + "step": 6835 + }, + { + "epoch": 0.6202141172201052, + "grad_norm": 0.09688340588302762, + "learning_rate": 0.00033291537925436444, + "loss": 1.553, + "step": 6836 + }, + { + "epoch": 0.6203048448557431, + "grad_norm": 0.10273257521642332, + "learning_rate": 0.00033277690587029703, + "loss": 1.5915, + "step": 6837 + }, + { + "epoch": 0.6203955724913809, + "grad_norm": 0.10689869816276446, + "learning_rate": 0.0003326384469259685, + "loss": 1.5682, + "step": 6838 + }, + { + "epoch": 0.6204863001270187, + "grad_norm": 0.10048840748193848, + "learning_rate": 0.00033250000243333446, + "loss": 1.5884, + "step": 6839 + }, + { + "epoch": 0.6205770277626566, + "grad_norm": 0.09601861322508598, + "learning_rate": 0.0003323615724043503, + "loss": 1.5618, + "step": 6840 + }, + { + "epoch": 0.6206677553982943, + "grad_norm": 0.10256803320915381, + "learning_rate": 0.0003322231568509688, + "loss": 1.5761, + "step": 6841 + }, + { + "epoch": 0.6207584830339321, + "grad_norm": 0.09751186574658514, + "learning_rate": 0.00033208475578514265, + "loss": 1.5568, + "step": 6842 + }, + { + "epoch": 0.62084921066957, + "grad_norm": 0.09306745080909146, + "learning_rate": 0.00033194636921882246, + "loss": 1.5661, + "step": 6843 + }, + { + "epoch": 0.6209399383052078, + "grad_norm": 0.09872758705060986, + "learning_rate": 0.00033180799716395806, + "loss": 1.5889, + "step": 6844 + }, + { + "epoch": 0.6210306659408455, + "grad_norm": 0.09816983471746991, + "learning_rate": 0.00033166963963249794, + "loss": 1.5793, + "step": 6845 + }, + { + "epoch": 0.6211213935764834, + "grad_norm": 0.09839052037066015, + "learning_rate": 0.00033153129663638926, + "loss": 1.5851, + "step": 6846 + }, + { + "epoch": 0.6212121212121212, + "grad_norm": 0.09842780403679262, + "learning_rate": 0.00033139296818757776, + "loss": 1.571, + "step": 6847 + }, + { + "epoch": 0.621302848847759, + "grad_norm": 0.09918710116291625, + "learning_rate": 0.00033125465429800836, + "loss": 1.5597, + "step": 6848 + }, + { + "epoch": 0.6213935764833969, + "grad_norm": 0.0967747974910046, + "learning_rate": 0.00033111635497962453, + "loss": 1.5628, + "step": 6849 + }, + { + "epoch": 0.6214843041190347, + "grad_norm": 0.10266391424876618, + "learning_rate": 0.00033097807024436843, + "loss": 1.61, + "step": 6850 + }, + { + "epoch": 0.6215750317546724, + "grad_norm": 0.10275322665430568, + "learning_rate": 0.00033083980010418075, + "loss": 1.5938, + "step": 6851 + }, + { + "epoch": 0.6216657593903103, + "grad_norm": 0.10069179354552929, + "learning_rate": 0.00033070154457100133, + "loss": 1.5296, + "step": 6852 + }, + { + "epoch": 0.6217564870259481, + "grad_norm": 0.10027511015813302, + "learning_rate": 0.00033056330365676843, + "loss": 1.5851, + "step": 6853 + }, + { + "epoch": 0.6218472146615859, + "grad_norm": 0.09819331662475686, + "learning_rate": 0.00033042507737341916, + "loss": 1.5683, + "step": 6854 + }, + { + "epoch": 0.6219379422972238, + "grad_norm": 0.09628379855415166, + "learning_rate": 0.00033028686573288946, + "loss": 1.5597, + "step": 6855 + }, + { + "epoch": 0.6220286699328615, + "grad_norm": 0.10156879722503041, + "learning_rate": 0.000330148668747114, + "loss": 1.599, + "step": 6856 + }, + { + "epoch": 0.6221193975684993, + "grad_norm": 0.09585214013821967, + "learning_rate": 0.0003300104864280261, + "loss": 1.5935, + "step": 6857 + }, + { + "epoch": 0.6222101252041372, + "grad_norm": 0.09900504243617206, + "learning_rate": 0.00032987231878755765, + "loss": 1.596, + "step": 6858 + }, + { + "epoch": 0.622300852839775, + "grad_norm": 0.09841350775822237, + "learning_rate": 0.0003297341658376396, + "loss": 1.5607, + "step": 6859 + }, + { + "epoch": 0.6223915804754128, + "grad_norm": 0.10149456668836337, + "learning_rate": 0.0003295960275902015, + "loss": 1.5822, + "step": 6860 + }, + { + "epoch": 0.6224823081110507, + "grad_norm": 0.09695686868039145, + "learning_rate": 0.00032945790405717137, + "loss": 1.5861, + "step": 6861 + }, + { + "epoch": 0.6225730357466884, + "grad_norm": 0.10050396243238784, + "learning_rate": 0.00032931979525047627, + "loss": 1.5761, + "step": 6862 + }, + { + "epoch": 0.6226637633823262, + "grad_norm": 0.09544558160450341, + "learning_rate": 0.0003291817011820422, + "loss": 1.544, + "step": 6863 + }, + { + "epoch": 0.6227544910179641, + "grad_norm": 0.0973863149496474, + "learning_rate": 0.0003290436218637933, + "loss": 1.565, + "step": 6864 + }, + { + "epoch": 0.6228452186536019, + "grad_norm": 0.10094472476578159, + "learning_rate": 0.00032890555730765293, + "loss": 1.5486, + "step": 6865 + }, + { + "epoch": 0.6229359462892397, + "grad_norm": 0.09854290505332694, + "learning_rate": 0.00032876750752554285, + "loss": 1.5871, + "step": 6866 + }, + { + "epoch": 0.6230266739248775, + "grad_norm": 0.09717269575156068, + "learning_rate": 0.00032862947252938394, + "loss": 1.5595, + "step": 6867 + }, + { + "epoch": 0.6231174015605153, + "grad_norm": 0.09752138745824304, + "learning_rate": 0.0003284914523310951, + "loss": 1.5606, + "step": 6868 + }, + { + "epoch": 0.6232081291961531, + "grad_norm": 0.09577633901812312, + "learning_rate": 0.0003283534469425946, + "loss": 1.5554, + "step": 6869 + }, + { + "epoch": 0.623298856831791, + "grad_norm": 0.09993438784545233, + "learning_rate": 0.00032821545637579927, + "loss": 1.5933, + "step": 6870 + }, + { + "epoch": 0.6233895844674288, + "grad_norm": 0.09424343083068665, + "learning_rate": 0.0003280774806426248, + "loss": 1.5125, + "step": 6871 + }, + { + "epoch": 0.6234803121030666, + "grad_norm": 0.09657926761523213, + "learning_rate": 0.00032793951975498506, + "loss": 1.5626, + "step": 6872 + }, + { + "epoch": 0.6235710397387044, + "grad_norm": 0.09966637640751345, + "learning_rate": 0.0003278015737247931, + "loss": 1.5858, + "step": 6873 + }, + { + "epoch": 0.6236617673743422, + "grad_norm": 0.10185151255351343, + "learning_rate": 0.0003276636425639608, + "loss": 1.5518, + "step": 6874 + }, + { + "epoch": 0.6237524950099801, + "grad_norm": 0.10249636218963543, + "learning_rate": 0.00032752572628439824, + "loss": 1.5841, + "step": 6875 + }, + { + "epoch": 0.6238432226456179, + "grad_norm": 0.10333271798930938, + "learning_rate": 0.00032738782489801464, + "loss": 1.6047, + "step": 6876 + }, + { + "epoch": 0.6239339502812556, + "grad_norm": 0.10644385523844178, + "learning_rate": 0.0003272499384167179, + "loss": 1.5883, + "step": 6877 + }, + { + "epoch": 0.6240246779168935, + "grad_norm": 0.10141498542224865, + "learning_rate": 0.00032711206685241413, + "loss": 1.6184, + "step": 6878 + }, + { + "epoch": 0.6241154055525313, + "grad_norm": 0.09575686285144881, + "learning_rate": 0.0003269742102170092, + "loss": 1.5834, + "step": 6879 + }, + { + "epoch": 0.6242061331881691, + "grad_norm": 0.09919561027385429, + "learning_rate": 0.0003268363685224066, + "loss": 1.5993, + "step": 6880 + }, + { + "epoch": 0.624296860823807, + "grad_norm": 0.09747446954224093, + "learning_rate": 0.0003266985417805092, + "loss": 1.5572, + "step": 6881 + }, + { + "epoch": 0.6243875884594448, + "grad_norm": 0.09601660945305078, + "learning_rate": 0.0003265607300032182, + "loss": 1.56, + "step": 6882 + }, + { + "epoch": 0.6244783160950825, + "grad_norm": 0.09798558330560003, + "learning_rate": 0.00032642293320243366, + "loss": 1.5749, + "step": 6883 + }, + { + "epoch": 0.6245690437307204, + "grad_norm": 0.09994580111182129, + "learning_rate": 0.00032628515139005457, + "loss": 1.6147, + "step": 6884 + }, + { + "epoch": 0.6246597713663582, + "grad_norm": 0.09822043771948623, + "learning_rate": 0.0003261473845779781, + "loss": 1.5991, + "step": 6885 + }, + { + "epoch": 0.624750499001996, + "grad_norm": 0.09757021733206966, + "learning_rate": 0.0003260096327781005, + "loss": 1.5593, + "step": 6886 + }, + { + "epoch": 0.6248412266376339, + "grad_norm": 0.09655781782288804, + "learning_rate": 0.0003258718960023169, + "loss": 1.5775, + "step": 6887 + }, + { + "epoch": 0.6249319542732716, + "grad_norm": 0.09818873627773829, + "learning_rate": 0.0003257341742625207, + "loss": 1.5589, + "step": 6888 + }, + { + "epoch": 0.6250226819089094, + "grad_norm": 0.09559607245467668, + "learning_rate": 0.0003255964675706044, + "loss": 1.5547, + "step": 6889 + }, + { + "epoch": 0.6251134095445473, + "grad_norm": 0.09942696964567024, + "learning_rate": 0.00032545877593845876, + "loss": 1.5406, + "step": 6890 + }, + { + "epoch": 0.6252041371801851, + "grad_norm": 0.0982694710413974, + "learning_rate": 0.0003253210993779735, + "loss": 1.5411, + "step": 6891 + }, + { + "epoch": 0.6252948648158229, + "grad_norm": 0.09412544735116145, + "learning_rate": 0.00032518343790103704, + "loss": 1.612, + "step": 6892 + }, + { + "epoch": 0.6253855924514607, + "grad_norm": 0.09391878568400465, + "learning_rate": 0.00032504579151953637, + "loss": 1.591, + "step": 6893 + }, + { + "epoch": 0.6254763200870985, + "grad_norm": 0.10075946330131612, + "learning_rate": 0.0003249081602453575, + "loss": 1.5431, + "step": 6894 + }, + { + "epoch": 0.6255670477227363, + "grad_norm": 0.09504962857316156, + "learning_rate": 0.0003247705440903848, + "loss": 1.5732, + "step": 6895 + }, + { + "epoch": 0.6256577753583742, + "grad_norm": 0.10134280184288894, + "learning_rate": 0.00032463294306650156, + "loss": 1.5302, + "step": 6896 + }, + { + "epoch": 0.625748502994012, + "grad_norm": 0.10216344651332093, + "learning_rate": 0.0003244953571855894, + "loss": 1.5279, + "step": 6897 + }, + { + "epoch": 0.6258392306296497, + "grad_norm": 0.09780307370593451, + "learning_rate": 0.000324357786459529, + "loss": 1.5301, + "step": 6898 + }, + { + "epoch": 0.6259299582652876, + "grad_norm": 0.10001086715509537, + "learning_rate": 0.00032422023090019974, + "loss": 1.5756, + "step": 6899 + }, + { + "epoch": 0.6260206859009254, + "grad_norm": 0.09984765664232452, + "learning_rate": 0.0003240826905194794, + "loss": 1.5347, + "step": 6900 + }, + { + "epoch": 0.6261114135365632, + "grad_norm": 0.09585253621005005, + "learning_rate": 0.00032394516532924445, + "loss": 1.5311, + "step": 6901 + }, + { + "epoch": 0.6262021411722011, + "grad_norm": 0.0963657484012037, + "learning_rate": 0.0003238076553413706, + "loss": 1.5805, + "step": 6902 + }, + { + "epoch": 0.6262928688078389, + "grad_norm": 0.09594896717174588, + "learning_rate": 0.0003236701605677318, + "loss": 1.5724, + "step": 6903 + }, + { + "epoch": 0.6263835964434766, + "grad_norm": 0.10057248809983643, + "learning_rate": 0.0003235326810202005, + "loss": 1.5771, + "step": 6904 + }, + { + "epoch": 0.6264743240791145, + "grad_norm": 0.0986239371556352, + "learning_rate": 0.0003233952167106482, + "loss": 1.5681, + "step": 6905 + }, + { + "epoch": 0.6265650517147523, + "grad_norm": 0.09959458841495987, + "learning_rate": 0.00032325776765094506, + "loss": 1.5357, + "step": 6906 + }, + { + "epoch": 0.6266557793503901, + "grad_norm": 0.0988647625396888, + "learning_rate": 0.0003231203338529596, + "loss": 1.5525, + "step": 6907 + }, + { + "epoch": 0.626746506986028, + "grad_norm": 0.10182231902427989, + "learning_rate": 0.0003229829153285594, + "loss": 1.6155, + "step": 6908 + }, + { + "epoch": 0.6268372346216657, + "grad_norm": 0.09978243899220823, + "learning_rate": 0.0003228455120896105, + "loss": 1.5849, + "step": 6909 + }, + { + "epoch": 0.6269279622573036, + "grad_norm": 0.10015082818697107, + "learning_rate": 0.0003227081241479779, + "loss": 1.6002, + "step": 6910 + }, + { + "epoch": 0.6270186898929414, + "grad_norm": 0.10051617076316263, + "learning_rate": 0.00032257075151552483, + "loss": 1.5723, + "step": 6911 + }, + { + "epoch": 0.6271094175285792, + "grad_norm": 0.09763277394962024, + "learning_rate": 0.0003224333942041134, + "loss": 1.5568, + "step": 6912 + }, + { + "epoch": 0.6272001451642171, + "grad_norm": 0.09722579934274984, + "learning_rate": 0.0003222960522256049, + "loss": 1.5909, + "step": 6913 + }, + { + "epoch": 0.6272908727998548, + "grad_norm": 0.1002037111639384, + "learning_rate": 0.00032215872559185815, + "loss": 1.6003, + "step": 6914 + }, + { + "epoch": 0.6273816004354926, + "grad_norm": 0.0970430403172642, + "learning_rate": 0.00032202141431473175, + "loss": 1.5955, + "step": 6915 + }, + { + "epoch": 0.6274723280711305, + "grad_norm": 0.10145230895210787, + "learning_rate": 0.00032188411840608256, + "loss": 1.5414, + "step": 6916 + }, + { + "epoch": 0.6275630557067683, + "grad_norm": 0.09488419149220582, + "learning_rate": 0.0003217468378777657, + "loss": 1.535, + "step": 6917 + }, + { + "epoch": 0.6276537833424061, + "grad_norm": 0.09576687674761497, + "learning_rate": 0.00032160957274163595, + "loss": 1.5136, + "step": 6918 + }, + { + "epoch": 0.627744510978044, + "grad_norm": 0.09670610119376899, + "learning_rate": 0.00032147232300954576, + "loss": 1.604, + "step": 6919 + }, + { + "epoch": 0.6278352386136817, + "grad_norm": 0.09827738601176107, + "learning_rate": 0.00032133508869334695, + "loss": 1.5814, + "step": 6920 + }, + { + "epoch": 0.6279259662493195, + "grad_norm": 0.09373135784065041, + "learning_rate": 0.0003211978698048894, + "loss": 1.5275, + "step": 6921 + }, + { + "epoch": 0.6280166938849574, + "grad_norm": 0.09470645068304204, + "learning_rate": 0.00032106066635602214, + "loss": 1.5899, + "step": 6922 + }, + { + "epoch": 0.6281074215205952, + "grad_norm": 0.10128407300503249, + "learning_rate": 0.0003209234783585929, + "loss": 1.5488, + "step": 6923 + }, + { + "epoch": 0.628198149156233, + "grad_norm": 0.09976056549657748, + "learning_rate": 0.0003207863058244475, + "loss": 1.5917, + "step": 6924 + }, + { + "epoch": 0.6282888767918708, + "grad_norm": 0.09640467707720607, + "learning_rate": 0.00032064914876543096, + "loss": 1.556, + "step": 6925 + }, + { + "epoch": 0.6283796044275086, + "grad_norm": 0.09566504675119032, + "learning_rate": 0.0003205120071933869, + "loss": 1.5862, + "step": 6926 + }, + { + "epoch": 0.6284703320631464, + "grad_norm": 0.10045490315943997, + "learning_rate": 0.0003203748811201576, + "loss": 1.5545, + "step": 6927 + }, + { + "epoch": 0.6285610596987843, + "grad_norm": 0.09667778853182753, + "learning_rate": 0.00032023777055758374, + "loss": 1.5729, + "step": 6928 + }, + { + "epoch": 0.6286517873344221, + "grad_norm": 0.10172600101237578, + "learning_rate": 0.00032010067551750486, + "loss": 1.5354, + "step": 6929 + }, + { + "epoch": 0.6287425149700598, + "grad_norm": 0.10001384609372396, + "learning_rate": 0.0003199635960117593, + "loss": 1.5626, + "step": 6930 + }, + { + "epoch": 0.6288332426056977, + "grad_norm": 0.0984865119514521, + "learning_rate": 0.0003198265320521836, + "loss": 1.5856, + "step": 6931 + }, + { + "epoch": 0.6289239702413355, + "grad_norm": 0.0985067807665284, + "learning_rate": 0.0003196894836506133, + "loss": 1.565, + "step": 6932 + }, + { + "epoch": 0.6290146978769733, + "grad_norm": 0.09874364333886775, + "learning_rate": 0.0003195524508188828, + "loss": 1.5506, + "step": 6933 + }, + { + "epoch": 0.6291054255126112, + "grad_norm": 0.1025364610438463, + "learning_rate": 0.00031941543356882475, + "loss": 1.6212, + "step": 6934 + }, + { + "epoch": 0.629196153148249, + "grad_norm": 0.10458708590675819, + "learning_rate": 0.0003192784319122707, + "loss": 1.5904, + "step": 6935 + }, + { + "epoch": 0.6292868807838867, + "grad_norm": 0.10166993814428674, + "learning_rate": 0.00031914144586105066, + "loss": 1.5998, + "step": 6936 + }, + { + "epoch": 0.6293776084195246, + "grad_norm": 0.09451403825629948, + "learning_rate": 0.0003190044754269934, + "loss": 1.5859, + "step": 6937 + }, + { + "epoch": 0.6294683360551624, + "grad_norm": 0.09814416643972505, + "learning_rate": 0.0003188675206219264, + "loss": 1.5649, + "step": 6938 + }, + { + "epoch": 0.6295590636908002, + "grad_norm": 0.09723165611091326, + "learning_rate": 0.00031873058145767566, + "loss": 1.5701, + "step": 6939 + }, + { + "epoch": 0.6296497913264381, + "grad_norm": 0.10220637755447035, + "learning_rate": 0.0003185936579460658, + "loss": 1.5528, + "step": 6940 + }, + { + "epoch": 0.6297405189620758, + "grad_norm": 0.09461589268706798, + "learning_rate": 0.00031845675009892037, + "loss": 1.5948, + "step": 6941 + }, + { + "epoch": 0.6298312465977136, + "grad_norm": 0.10007210281698371, + "learning_rate": 0.0003183198579280615, + "loss": 1.5465, + "step": 6942 + }, + { + "epoch": 0.6299219742333515, + "grad_norm": 0.09995774583394333, + "learning_rate": 0.00031818298144530954, + "loss": 1.5824, + "step": 6943 + }, + { + "epoch": 0.6300127018689893, + "grad_norm": 0.09614131435511593, + "learning_rate": 0.00031804612066248396, + "loss": 1.5816, + "step": 6944 + }, + { + "epoch": 0.630103429504627, + "grad_norm": 0.09855394369616201, + "learning_rate": 0.0003179092755914028, + "loss": 1.5485, + "step": 6945 + }, + { + "epoch": 0.6301941571402649, + "grad_norm": 0.10040962762492094, + "learning_rate": 0.00031777244624388236, + "loss": 1.5865, + "step": 6946 + }, + { + "epoch": 0.6302848847759027, + "grad_norm": 0.09818078654911612, + "learning_rate": 0.0003176356326317381, + "loss": 1.5281, + "step": 6947 + }, + { + "epoch": 0.6303756124115406, + "grad_norm": 0.09803639531250867, + "learning_rate": 0.0003174988347667837, + "loss": 1.5554, + "step": 6948 + }, + { + "epoch": 0.6304663400471784, + "grad_norm": 0.09837737024581264, + "learning_rate": 0.0003173620526608321, + "loss": 1.5534, + "step": 6949 + }, + { + "epoch": 0.6305570676828162, + "grad_norm": 0.09843813425350793, + "learning_rate": 0.0003172252863256939, + "loss": 1.5497, + "step": 6950 + }, + { + "epoch": 0.630647795318454, + "grad_norm": 0.1013963776474145, + "learning_rate": 0.00031708853577317933, + "loss": 1.5914, + "step": 6951 + }, + { + "epoch": 0.6307385229540918, + "grad_norm": 0.0985850624872131, + "learning_rate": 0.0003169518010150967, + "loss": 1.5487, + "step": 6952 + }, + { + "epoch": 0.6308292505897296, + "grad_norm": 0.10035277493279447, + "learning_rate": 0.00031681508206325286, + "loss": 1.5913, + "step": 6953 + }, + { + "epoch": 0.6309199782253675, + "grad_norm": 0.10047339016549237, + "learning_rate": 0.0003166783789294537, + "loss": 1.5807, + "step": 6954 + }, + { + "epoch": 0.6310107058610053, + "grad_norm": 0.1003911156512484, + "learning_rate": 0.0003165416916255037, + "loss": 1.5399, + "step": 6955 + }, + { + "epoch": 0.631101433496643, + "grad_norm": 0.09680165305629657, + "learning_rate": 0.0003164050201632054, + "loss": 1.5494, + "step": 6956 + }, + { + "epoch": 0.6311921611322809, + "grad_norm": 0.1013435926250711, + "learning_rate": 0.0003162683645543609, + "loss": 1.609, + "step": 6957 + }, + { + "epoch": 0.6312828887679187, + "grad_norm": 0.09436854269532088, + "learning_rate": 0.00031613172481077015, + "loss": 1.6066, + "step": 6958 + }, + { + "epoch": 0.6313736164035565, + "grad_norm": 0.0937320591828463, + "learning_rate": 0.0003159951009442321, + "loss": 1.511, + "step": 6959 + }, + { + "epoch": 0.6314643440391944, + "grad_norm": 0.09831599502637961, + "learning_rate": 0.00031585849296654413, + "loss": 1.5486, + "step": 6960 + }, + { + "epoch": 0.6315550716748322, + "grad_norm": 0.09947784707089591, + "learning_rate": 0.0003157219008895024, + "loss": 1.6131, + "step": 6961 + }, + { + "epoch": 0.6316457993104699, + "grad_norm": 0.09843212144611714, + "learning_rate": 0.00031558532472490187, + "loss": 1.5577, + "step": 6962 + }, + { + "epoch": 0.6317365269461078, + "grad_norm": 0.09842709298339619, + "learning_rate": 0.00031544876448453554, + "loss": 1.625, + "step": 6963 + }, + { + "epoch": 0.6318272545817456, + "grad_norm": 0.0983376665615962, + "learning_rate": 0.0003153122201801956, + "loss": 1.5423, + "step": 6964 + }, + { + "epoch": 0.6319179822173834, + "grad_norm": 0.09535219725004673, + "learning_rate": 0.0003151756918236727, + "loss": 1.5705, + "step": 6965 + }, + { + "epoch": 0.6320087098530213, + "grad_norm": 0.10048804678932256, + "learning_rate": 0.000315039179426756, + "loss": 1.5592, + "step": 6966 + }, + { + "epoch": 0.632099437488659, + "grad_norm": 0.10178349291772495, + "learning_rate": 0.0003149026830012336, + "loss": 1.5764, + "step": 6967 + }, + { + "epoch": 0.6321901651242968, + "grad_norm": 0.10204367658346729, + "learning_rate": 0.00031476620255889166, + "loss": 1.6013, + "step": 6968 + }, + { + "epoch": 0.6322808927599347, + "grad_norm": 0.0949779101991465, + "learning_rate": 0.0003146297381115155, + "loss": 1.5409, + "step": 6969 + }, + { + "epoch": 0.6323716203955725, + "grad_norm": 0.10026105673615321, + "learning_rate": 0.00031449328967088873, + "loss": 1.5876, + "step": 6970 + }, + { + "epoch": 0.6324623480312103, + "grad_norm": 0.0980355401184222, + "learning_rate": 0.0003143568572487937, + "loss": 1.5824, + "step": 6971 + }, + { + "epoch": 0.6325530756668482, + "grad_norm": 0.09838480688707524, + "learning_rate": 0.0003142204408570114, + "loss": 1.6257, + "step": 6972 + }, + { + "epoch": 0.6326438033024859, + "grad_norm": 0.09929656950744362, + "learning_rate": 0.0003140840405073214, + "loss": 1.5642, + "step": 6973 + }, + { + "epoch": 0.6327345309381237, + "grad_norm": 0.09882557994543832, + "learning_rate": 0.0003139476562115021, + "loss": 1.572, + "step": 6974 + }, + { + "epoch": 0.6328252585737616, + "grad_norm": 0.09890149802879006, + "learning_rate": 0.00031381128798132997, + "loss": 1.5434, + "step": 6975 + }, + { + "epoch": 0.6329159862093994, + "grad_norm": 0.10153220654353928, + "learning_rate": 0.00031367493582858054, + "loss": 1.5649, + "step": 6976 + }, + { + "epoch": 0.6330067138450372, + "grad_norm": 0.09849793934705942, + "learning_rate": 0.000313538599765028, + "loss": 1.5924, + "step": 6977 + }, + { + "epoch": 0.633097441480675, + "grad_norm": 0.0964569719193802, + "learning_rate": 0.00031340227980244473, + "loss": 1.544, + "step": 6978 + }, + { + "epoch": 0.6331881691163128, + "grad_norm": 0.10286766989798707, + "learning_rate": 0.00031326597595260195, + "loss": 1.5847, + "step": 6979 + }, + { + "epoch": 0.6332788967519506, + "grad_norm": 0.09781995234636282, + "learning_rate": 0.0003131296882272698, + "loss": 1.5648, + "step": 6980 + }, + { + "epoch": 0.6333696243875885, + "grad_norm": 0.1028087228368034, + "learning_rate": 0.0003129934166382168, + "loss": 1.5755, + "step": 6981 + }, + { + "epoch": 0.6334603520232263, + "grad_norm": 0.09893134658532943, + "learning_rate": 0.00031285716119720965, + "loss": 1.5457, + "step": 6982 + }, + { + "epoch": 0.633551079658864, + "grad_norm": 0.09810572713050798, + "learning_rate": 0.0003127209219160142, + "loss": 1.5253, + "step": 6983 + }, + { + "epoch": 0.6336418072945019, + "grad_norm": 0.09664359341649212, + "learning_rate": 0.00031258469880639494, + "loss": 1.592, + "step": 6984 + }, + { + "epoch": 0.6337325349301397, + "grad_norm": 0.10007557606585885, + "learning_rate": 0.0003124484918801144, + "loss": 1.5801, + "step": 6985 + }, + { + "epoch": 0.6338232625657776, + "grad_norm": 0.10163312875717459, + "learning_rate": 0.00031231230114893416, + "loss": 1.589, + "step": 6986 + }, + { + "epoch": 0.6339139902014154, + "grad_norm": 0.10024491564991904, + "learning_rate": 0.0003121761266246144, + "loss": 1.5685, + "step": 6987 + }, + { + "epoch": 0.6340047178370531, + "grad_norm": 0.09730771014645334, + "learning_rate": 0.0003120399683189139, + "loss": 1.5641, + "step": 6988 + }, + { + "epoch": 0.634095445472691, + "grad_norm": 0.09653864926663658, + "learning_rate": 0.00031190382624358975, + "loss": 1.5766, + "step": 6989 + }, + { + "epoch": 0.6341861731083288, + "grad_norm": 0.09787112168498122, + "learning_rate": 0.00031176770041039793, + "loss": 1.5783, + "step": 6990 + }, + { + "epoch": 0.6342769007439666, + "grad_norm": 0.10102245994979508, + "learning_rate": 0.0003116315908310931, + "loss": 1.5811, + "step": 6991 + }, + { + "epoch": 0.6343676283796045, + "grad_norm": 0.10094506263328301, + "learning_rate": 0.000311495497517428, + "loss": 1.6029, + "step": 6992 + }, + { + "epoch": 0.6344583560152423, + "grad_norm": 0.09932488447455681, + "learning_rate": 0.0003113594204811544, + "loss": 1.5408, + "step": 6993 + }, + { + "epoch": 0.63454908365088, + "grad_norm": 0.09859366740638012, + "learning_rate": 0.0003112233597340228, + "loss": 1.6053, + "step": 6994 + }, + { + "epoch": 0.6346398112865179, + "grad_norm": 0.09605707298742916, + "learning_rate": 0.00031108731528778165, + "loss": 1.5373, + "step": 6995 + }, + { + "epoch": 0.6347305389221557, + "grad_norm": 0.10335898999319512, + "learning_rate": 0.000310951287154179, + "loss": 1.6091, + "step": 6996 + }, + { + "epoch": 0.6348212665577935, + "grad_norm": 0.09774902072462688, + "learning_rate": 0.00031081527534496036, + "loss": 1.6069, + "step": 6997 + }, + { + "epoch": 0.6349119941934314, + "grad_norm": 0.09736611772096962, + "learning_rate": 0.0003106792798718707, + "loss": 1.5511, + "step": 6998 + }, + { + "epoch": 0.6350027218290691, + "grad_norm": 0.09879227830088677, + "learning_rate": 0.0003105433007466531, + "loss": 1.5898, + "step": 6999 + }, + { + "epoch": 0.6350934494647069, + "grad_norm": 0.0967189159107507, + "learning_rate": 0.00031040733798104935, + "loss": 1.5809, + "step": 7000 + }, + { + "epoch": 0.6351841771003448, + "grad_norm": 0.09705951076437705, + "learning_rate": 0.0003102713915868001, + "loss": 1.5742, + "step": 7001 + }, + { + "epoch": 0.6352749047359826, + "grad_norm": 0.0987845804013216, + "learning_rate": 0.0003101354615756439, + "loss": 1.548, + "step": 7002 + }, + { + "epoch": 0.6353656323716204, + "grad_norm": 0.09434923542692245, + "learning_rate": 0.00030999954795931853, + "loss": 1.5448, + "step": 7003 + }, + { + "epoch": 0.6354563600072582, + "grad_norm": 0.09465148936092856, + "learning_rate": 0.00030986365074956037, + "loss": 1.5645, + "step": 7004 + }, + { + "epoch": 0.635547087642896, + "grad_norm": 0.0965864554477857, + "learning_rate": 0.0003097277699581039, + "loss": 1.5673, + "step": 7005 + }, + { + "epoch": 0.6356378152785338, + "grad_norm": 0.09998965588717017, + "learning_rate": 0.00030959190559668267, + "loss": 1.5583, + "step": 7006 + }, + { + "epoch": 0.6357285429141717, + "grad_norm": 0.09737737329868618, + "learning_rate": 0.00030945605767702835, + "loss": 1.5638, + "step": 7007 + }, + { + "epoch": 0.6358192705498095, + "grad_norm": 0.10139643596357974, + "learning_rate": 0.0003093202262108716, + "loss": 1.5762, + "step": 7008 + }, + { + "epoch": 0.6359099981854472, + "grad_norm": 0.09514035193534602, + "learning_rate": 0.0003091844112099413, + "loss": 1.5692, + "step": 7009 + }, + { + "epoch": 0.6360007258210851, + "grad_norm": 0.09775288351738201, + "learning_rate": 0.00030904861268596525, + "loss": 1.6075, + "step": 7010 + }, + { + "epoch": 0.6360914534567229, + "grad_norm": 0.09761450373709771, + "learning_rate": 0.00030891283065066967, + "loss": 1.5576, + "step": 7011 + }, + { + "epoch": 0.6361821810923607, + "grad_norm": 0.09820831824103822, + "learning_rate": 0.0003087770651157793, + "loss": 1.6022, + "step": 7012 + }, + { + "epoch": 0.6362729087279986, + "grad_norm": 0.10094793223996563, + "learning_rate": 0.00030864131609301766, + "loss": 1.5485, + "step": 7013 + }, + { + "epoch": 0.6363636363636364, + "grad_norm": 0.10157771268125693, + "learning_rate": 0.00030850558359410646, + "loss": 1.5699, + "step": 7014 + }, + { + "epoch": 0.6364543639992741, + "grad_norm": 0.09899137019007859, + "learning_rate": 0.00030836986763076635, + "loss": 1.5439, + "step": 7015 + }, + { + "epoch": 0.636545091634912, + "grad_norm": 0.09922751718512637, + "learning_rate": 0.0003082341682147165, + "loss": 1.5464, + "step": 7016 + }, + { + "epoch": 0.6366358192705498, + "grad_norm": 0.10052197850589684, + "learning_rate": 0.00030809848535767444, + "loss": 1.5608, + "step": 7017 + }, + { + "epoch": 0.6367265469061876, + "grad_norm": 0.10047759730066995, + "learning_rate": 0.0003079628190713563, + "loss": 1.554, + "step": 7018 + }, + { + "epoch": 0.6368172745418255, + "grad_norm": 0.09905270274652629, + "learning_rate": 0.0003078271693674772, + "loss": 1.5787, + "step": 7019 + }, + { + "epoch": 0.6369080021774632, + "grad_norm": 0.10133596910284944, + "learning_rate": 0.00030769153625775036, + "loss": 1.5633, + "step": 7020 + }, + { + "epoch": 0.636998729813101, + "grad_norm": 0.098684496201882, + "learning_rate": 0.0003075559197538876, + "loss": 1.5914, + "step": 7021 + }, + { + "epoch": 0.6370894574487389, + "grad_norm": 0.10095375080490648, + "learning_rate": 0.0003074203198675997, + "loss": 1.5729, + "step": 7022 + }, + { + "epoch": 0.6371801850843767, + "grad_norm": 0.09993407287859132, + "learning_rate": 0.00030728473661059555, + "loss": 1.5373, + "step": 7023 + }, + { + "epoch": 0.6372709127200146, + "grad_norm": 0.09644157296253617, + "learning_rate": 0.00030714916999458264, + "loss": 1.5627, + "step": 7024 + }, + { + "epoch": 0.6373616403556523, + "grad_norm": 0.100690420134986, + "learning_rate": 0.00030701362003126735, + "loss": 1.6319, + "step": 7025 + }, + { + "epoch": 0.6374523679912901, + "grad_norm": 0.09634713148113397, + "learning_rate": 0.0003068780867323544, + "loss": 1.591, + "step": 7026 + }, + { + "epoch": 0.637543095626928, + "grad_norm": 0.10339732700193419, + "learning_rate": 0.00030674257010954723, + "loss": 1.587, + "step": 7027 + }, + { + "epoch": 0.6376338232625658, + "grad_norm": 0.09999855539092099, + "learning_rate": 0.00030660707017454757, + "loss": 1.5385, + "step": 7028 + }, + { + "epoch": 0.6377245508982036, + "grad_norm": 0.10029516770613527, + "learning_rate": 0.00030647158693905584, + "loss": 1.5903, + "step": 7029 + }, + { + "epoch": 0.6378152785338415, + "grad_norm": 0.0950947034328304, + "learning_rate": 0.00030633612041477124, + "loss": 1.5511, + "step": 7030 + }, + { + "epoch": 0.6379060061694792, + "grad_norm": 0.09681306958023203, + "learning_rate": 0.0003062006706133911, + "loss": 1.5468, + "step": 7031 + }, + { + "epoch": 0.637996733805117, + "grad_norm": 0.09749624059937993, + "learning_rate": 0.0003060652375466116, + "loss": 1.5978, + "step": 7032 + }, + { + "epoch": 0.6380874614407549, + "grad_norm": 0.09610407296548683, + "learning_rate": 0.00030592982122612755, + "loss": 1.5597, + "step": 7033 + }, + { + "epoch": 0.6381781890763927, + "grad_norm": 0.09804750601767401, + "learning_rate": 0.0003057944216636317, + "loss": 1.5631, + "step": 7034 + }, + { + "epoch": 0.6382689167120305, + "grad_norm": 0.10294242232512257, + "learning_rate": 0.00030565903887081646, + "loss": 1.5853, + "step": 7035 + }, + { + "epoch": 0.6383596443476683, + "grad_norm": 0.10068717617272917, + "learning_rate": 0.00030552367285937177, + "loss": 1.6368, + "step": 7036 + }, + { + "epoch": 0.6384503719833061, + "grad_norm": 0.09718092484682748, + "learning_rate": 0.00030538832364098676, + "loss": 1.5601, + "step": 7037 + }, + { + "epoch": 0.6385410996189439, + "grad_norm": 0.09944836819282322, + "learning_rate": 0.0003052529912273485, + "loss": 1.594, + "step": 7038 + }, + { + "epoch": 0.6386318272545818, + "grad_norm": 0.10123033613976765, + "learning_rate": 0.0003051176756301431, + "loss": 1.5459, + "step": 7039 + }, + { + "epoch": 0.6387225548902196, + "grad_norm": 0.09892970982454938, + "learning_rate": 0.00030498237686105536, + "loss": 1.5255, + "step": 7040 + }, + { + "epoch": 0.6388132825258573, + "grad_norm": 0.09853588026358447, + "learning_rate": 0.000304847094931768, + "loss": 1.5537, + "step": 7041 + }, + { + "epoch": 0.6389040101614952, + "grad_norm": 0.09583983336726026, + "learning_rate": 0.0003047118298539626, + "loss": 1.5587, + "step": 7042 + }, + { + "epoch": 0.638994737797133, + "grad_norm": 0.09659269318872786, + "learning_rate": 0.0003045765816393196, + "loss": 1.5237, + "step": 7043 + }, + { + "epoch": 0.6390854654327708, + "grad_norm": 0.09841273189114126, + "learning_rate": 0.0003044413502995176, + "loss": 1.5507, + "step": 7044 + }, + { + "epoch": 0.6391761930684087, + "grad_norm": 0.09944243760035697, + "learning_rate": 0.0003043061358462339, + "loss": 1.6033, + "step": 7045 + }, + { + "epoch": 0.6392669207040464, + "grad_norm": 0.10632968513951012, + "learning_rate": 0.00030417093829114404, + "loss": 1.5672, + "step": 7046 + }, + { + "epoch": 0.6393576483396842, + "grad_norm": 0.10313790447811047, + "learning_rate": 0.00030403575764592275, + "loss": 1.532, + "step": 7047 + }, + { + "epoch": 0.6394483759753221, + "grad_norm": 0.09785945770917842, + "learning_rate": 0.00030390059392224246, + "loss": 1.6058, + "step": 7048 + }, + { + "epoch": 0.6395391036109599, + "grad_norm": 0.09781690085769136, + "learning_rate": 0.0003037654471317748, + "loss": 1.5296, + "step": 7049 + }, + { + "epoch": 0.6396298312465977, + "grad_norm": 0.10166764567864521, + "learning_rate": 0.0003036303172861897, + "loss": 1.5717, + "step": 7050 + }, + { + "epoch": 0.6397205588822356, + "grad_norm": 0.10513543199140352, + "learning_rate": 0.0003034952043971557, + "loss": 1.5887, + "step": 7051 + }, + { + "epoch": 0.6398112865178733, + "grad_norm": 0.10072619491155341, + "learning_rate": 0.0003033601084763398, + "loss": 1.5877, + "step": 7052 + }, + { + "epoch": 0.6399020141535111, + "grad_norm": 0.10061512437190548, + "learning_rate": 0.0003032250295354075, + "loss": 1.584, + "step": 7053 + }, + { + "epoch": 0.639992741789149, + "grad_norm": 0.09382910998406889, + "learning_rate": 0.00030308996758602284, + "loss": 1.593, + "step": 7054 + }, + { + "epoch": 0.6400834694247868, + "grad_norm": 0.09758339823901827, + "learning_rate": 0.00030295492263984866, + "loss": 1.5756, + "step": 7055 + }, + { + "epoch": 0.6401741970604246, + "grad_norm": 0.10207067957858568, + "learning_rate": 0.00030281989470854577, + "loss": 1.568, + "step": 7056 + }, + { + "epoch": 0.6402649246960624, + "grad_norm": 0.09989330233585436, + "learning_rate": 0.00030268488380377404, + "loss": 1.6054, + "step": 7057 + }, + { + "epoch": 0.6403556523317002, + "grad_norm": 0.09762624051808817, + "learning_rate": 0.0003025498899371917, + "loss": 1.5526, + "step": 7058 + }, + { + "epoch": 0.640446379967338, + "grad_norm": 0.09932096396051132, + "learning_rate": 0.00030241491312045553, + "loss": 1.5686, + "step": 7059 + }, + { + "epoch": 0.6405371076029759, + "grad_norm": 0.10305893685320727, + "learning_rate": 0.00030227995336522067, + "loss": 1.5488, + "step": 7060 + }, + { + "epoch": 0.6406278352386137, + "grad_norm": 0.09863973791335082, + "learning_rate": 0.00030214501068314103, + "loss": 1.5429, + "step": 7061 + }, + { + "epoch": 0.6407185628742516, + "grad_norm": 0.09632151020970439, + "learning_rate": 0.0003020100850858689, + "loss": 1.5789, + "step": 7062 + }, + { + "epoch": 0.6408092905098893, + "grad_norm": 0.10085190626441995, + "learning_rate": 0.000301875176585055, + "loss": 1.5788, + "step": 7063 + }, + { + "epoch": 0.6409000181455271, + "grad_norm": 0.09879918612422958, + "learning_rate": 0.00030174028519234884, + "loss": 1.5635, + "step": 7064 + }, + { + "epoch": 0.640990745781165, + "grad_norm": 0.09792594001985831, + "learning_rate": 0.0003016054109193982, + "loss": 1.578, + "step": 7065 + }, + { + "epoch": 0.6410814734168028, + "grad_norm": 0.09672499999917807, + "learning_rate": 0.00030147055377784983, + "loss": 1.5614, + "step": 7066 + }, + { + "epoch": 0.6411722010524405, + "grad_norm": 0.12374794974931698, + "learning_rate": 0.00030133571377934814, + "loss": 1.6014, + "step": 7067 + }, + { + "epoch": 0.6412629286880784, + "grad_norm": 0.0950989590743159, + "learning_rate": 0.00030120089093553694, + "loss": 1.575, + "step": 7068 + }, + { + "epoch": 0.6413536563237162, + "grad_norm": 0.10163522727449384, + "learning_rate": 0.0003010660852580582, + "loss": 1.558, + "step": 7069 + }, + { + "epoch": 0.641444383959354, + "grad_norm": 0.101949188172992, + "learning_rate": 0.0003009312967585522, + "loss": 1.5168, + "step": 7070 + }, + { + "epoch": 0.6415351115949919, + "grad_norm": 0.09986726210790071, + "learning_rate": 0.0003007965254486581, + "loss": 1.5657, + "step": 7071 + }, + { + "epoch": 0.6416258392306297, + "grad_norm": 0.09627160527518688, + "learning_rate": 0.00030066177134001343, + "loss": 1.5872, + "step": 7072 + }, + { + "epoch": 0.6417165668662674, + "grad_norm": 0.0993971293052338, + "learning_rate": 0.0003005270344442539, + "loss": 1.6033, + "step": 7073 + }, + { + "epoch": 0.6418072945019053, + "grad_norm": 0.10127348835318876, + "learning_rate": 0.00030039231477301464, + "loss": 1.5668, + "step": 7074 + }, + { + "epoch": 0.6418980221375431, + "grad_norm": 0.09781555477811107, + "learning_rate": 0.00030025761233792836, + "loss": 1.5855, + "step": 7075 + }, + { + "epoch": 0.6419887497731809, + "grad_norm": 0.09824287757978643, + "learning_rate": 0.0003001229271506268, + "loss": 1.6164, + "step": 7076 + }, + { + "epoch": 0.6420794774088188, + "grad_norm": 0.09771206180588961, + "learning_rate": 0.00029998825922273974, + "loss": 1.5702, + "step": 7077 + }, + { + "epoch": 0.6421702050444565, + "grad_norm": 0.09877021022520262, + "learning_rate": 0.00029985360856589607, + "loss": 1.5582, + "step": 7078 + }, + { + "epoch": 0.6422609326800943, + "grad_norm": 0.10121556218104506, + "learning_rate": 0.00029971897519172287, + "loss": 1.5528, + "step": 7079 + }, + { + "epoch": 0.6423516603157322, + "grad_norm": 0.09930190307996535, + "learning_rate": 0.00029958435911184555, + "loss": 1.5631, + "step": 7080 + }, + { + "epoch": 0.64244238795137, + "grad_norm": 0.09601069952246559, + "learning_rate": 0.0002994497603378883, + "loss": 1.5839, + "step": 7081 + }, + { + "epoch": 0.6425331155870078, + "grad_norm": 0.09734443331382922, + "learning_rate": 0.00029931517888147395, + "loss": 1.5513, + "step": 7082 + }, + { + "epoch": 0.6426238432226457, + "grad_norm": 0.09760654653184375, + "learning_rate": 0.0002991806147542234, + "loss": 1.5874, + "step": 7083 + }, + { + "epoch": 0.6427145708582834, + "grad_norm": 0.09801412361500608, + "learning_rate": 0.00029904606796775645, + "loss": 1.5535, + "step": 7084 + }, + { + "epoch": 0.6428052984939212, + "grad_norm": 0.09593456855638163, + "learning_rate": 0.0002989115385336911, + "loss": 1.56, + "step": 7085 + }, + { + "epoch": 0.6428960261295591, + "grad_norm": 0.09733053140507354, + "learning_rate": 0.0002987770264636441, + "loss": 1.589, + "step": 7086 + }, + { + "epoch": 0.6429867537651969, + "grad_norm": 0.09755174846886014, + "learning_rate": 0.0002986425317692305, + "loss": 1.6189, + "step": 7087 + }, + { + "epoch": 0.6430774814008346, + "grad_norm": 0.09953598419707489, + "learning_rate": 0.00029850805446206383, + "loss": 1.5884, + "step": 7088 + }, + { + "epoch": 0.6431682090364725, + "grad_norm": 0.09900596321443716, + "learning_rate": 0.0002983735945537564, + "loss": 1.5708, + "step": 7089 + }, + { + "epoch": 0.6432589366721103, + "grad_norm": 0.09980548603764994, + "learning_rate": 0.00029823915205591886, + "loss": 1.5761, + "step": 7090 + }, + { + "epoch": 0.6433496643077481, + "grad_norm": 0.09702768026606498, + "learning_rate": 0.00029810472698016036, + "loss": 1.5711, + "step": 7091 + }, + { + "epoch": 0.643440391943386, + "grad_norm": 0.10005848507714599, + "learning_rate": 0.00029797031933808825, + "loss": 1.559, + "step": 7092 + }, + { + "epoch": 0.6435311195790238, + "grad_norm": 0.10451803226106673, + "learning_rate": 0.00029783592914130896, + "loss": 1.581, + "step": 7093 + }, + { + "epoch": 0.6436218472146615, + "grad_norm": 0.0990216293131509, + "learning_rate": 0.00029770155640142704, + "loss": 1.5872, + "step": 7094 + }, + { + "epoch": 0.6437125748502994, + "grad_norm": 0.09848414389852249, + "learning_rate": 0.00029756720113004544, + "loss": 1.5384, + "step": 7095 + }, + { + "epoch": 0.6438033024859372, + "grad_norm": 0.11079955720429942, + "learning_rate": 0.00029743286333876575, + "loss": 1.5289, + "step": 7096 + }, + { + "epoch": 0.643894030121575, + "grad_norm": 0.09980795102827661, + "learning_rate": 0.00029729854303918825, + "loss": 1.5829, + "step": 7097 + }, + { + "epoch": 0.6439847577572129, + "grad_norm": 0.10051045114692811, + "learning_rate": 0.00029716424024291155, + "loss": 1.5688, + "step": 7098 + }, + { + "epoch": 0.6440754853928506, + "grad_norm": 0.09717511638363611, + "learning_rate": 0.0002970299549615325, + "loss": 1.5586, + "step": 7099 + }, + { + "epoch": 0.6441662130284885, + "grad_norm": 0.09760549394350672, + "learning_rate": 0.00029689568720664677, + "loss": 1.5541, + "step": 7100 + }, + { + "epoch": 0.6442569406641263, + "grad_norm": 0.09849462861743723, + "learning_rate": 0.0002967614369898485, + "loss": 1.5279, + "step": 7101 + }, + { + "epoch": 0.6443476682997641, + "grad_norm": 0.09933712204624731, + "learning_rate": 0.00029662720432272995, + "loss": 1.5833, + "step": 7102 + }, + { + "epoch": 0.644438395935402, + "grad_norm": 0.09796408778316357, + "learning_rate": 0.00029649298921688227, + "loss": 1.554, + "step": 7103 + }, + { + "epoch": 0.6445291235710398, + "grad_norm": 0.09703376199283895, + "learning_rate": 0.00029635879168389497, + "loss": 1.5747, + "step": 7104 + }, + { + "epoch": 0.6446198512066775, + "grad_norm": 0.09740159419152655, + "learning_rate": 0.00029622461173535615, + "loss": 1.5604, + "step": 7105 + }, + { + "epoch": 0.6447105788423154, + "grad_norm": 0.09925507686374359, + "learning_rate": 0.000296090449382852, + "loss": 1.5673, + "step": 7106 + }, + { + "epoch": 0.6448013064779532, + "grad_norm": 0.10173558296961221, + "learning_rate": 0.0002959563046379676, + "loss": 1.5445, + "step": 7107 + }, + { + "epoch": 0.644892034113591, + "grad_norm": 0.10314238834041317, + "learning_rate": 0.00029582217751228656, + "loss": 1.5998, + "step": 7108 + }, + { + "epoch": 0.6449827617492289, + "grad_norm": 0.09675381864284836, + "learning_rate": 0.00029568806801739045, + "loss": 1.5588, + "step": 7109 + }, + { + "epoch": 0.6450734893848666, + "grad_norm": 0.10105552078089731, + "learning_rate": 0.00029555397616485977, + "loss": 1.5974, + "step": 7110 + }, + { + "epoch": 0.6451642170205044, + "grad_norm": 0.09838047669450442, + "learning_rate": 0.0002954199019662734, + "loss": 1.6299, + "step": 7111 + }, + { + "epoch": 0.6452549446561423, + "grad_norm": 0.10023563981814174, + "learning_rate": 0.00029528584543320847, + "loss": 1.5259, + "step": 7112 + }, + { + "epoch": 0.6453456722917801, + "grad_norm": 0.09760662736332291, + "learning_rate": 0.0002951518065772412, + "loss": 1.5575, + "step": 7113 + }, + { + "epoch": 0.6454363999274179, + "grad_norm": 0.09943884923684915, + "learning_rate": 0.00029501778540994553, + "loss": 1.5578, + "step": 7114 + }, + { + "epoch": 0.6455271275630557, + "grad_norm": 0.09941876640581362, + "learning_rate": 0.00029488378194289446, + "loss": 1.5864, + "step": 7115 + }, + { + "epoch": 0.6456178551986935, + "grad_norm": 0.09698143208023635, + "learning_rate": 0.00029474979618765885, + "loss": 1.5769, + "step": 7116 + }, + { + "epoch": 0.6457085828343313, + "grad_norm": 0.10117571998929259, + "learning_rate": 0.00029461582815580866, + "loss": 1.5939, + "step": 7117 + }, + { + "epoch": 0.6457993104699692, + "grad_norm": 0.10129907213228793, + "learning_rate": 0.00029448187785891206, + "loss": 1.5443, + "step": 7118 + }, + { + "epoch": 0.645890038105607, + "grad_norm": 0.10141258274762116, + "learning_rate": 0.0002943479453085355, + "loss": 1.6018, + "step": 7119 + }, + { + "epoch": 0.6459807657412447, + "grad_norm": 0.09937620183602011, + "learning_rate": 0.00029421403051624404, + "loss": 1.553, + "step": 7120 + }, + { + "epoch": 0.6460714933768826, + "grad_norm": 0.09734598943889589, + "learning_rate": 0.00029408013349360154, + "loss": 1.571, + "step": 7121 + }, + { + "epoch": 0.6461622210125204, + "grad_norm": 0.09757669513089606, + "learning_rate": 0.00029394625425216984, + "loss": 1.5642, + "step": 7122 + }, + { + "epoch": 0.6462529486481582, + "grad_norm": 0.10255151712384589, + "learning_rate": 0.00029381239280350946, + "loss": 1.6129, + "step": 7123 + }, + { + "epoch": 0.6463436762837961, + "grad_norm": 0.09942509712065183, + "learning_rate": 0.00029367854915917936, + "loss": 1.5953, + "step": 7124 + }, + { + "epoch": 0.6464344039194339, + "grad_norm": 0.09803128068252087, + "learning_rate": 0.000293544723330737, + "loss": 1.5478, + "step": 7125 + }, + { + "epoch": 0.6465251315550716, + "grad_norm": 0.09815762116207029, + "learning_rate": 0.00029341091532973814, + "loss": 1.5667, + "step": 7126 + }, + { + "epoch": 0.6466158591907095, + "grad_norm": 0.09617201473149686, + "learning_rate": 0.00029327712516773706, + "loss": 1.5163, + "step": 7127 + }, + { + "epoch": 0.6467065868263473, + "grad_norm": 0.09823967586194819, + "learning_rate": 0.0002931433528562868, + "loss": 1.5524, + "step": 7128 + }, + { + "epoch": 0.6467973144619851, + "grad_norm": 0.09734494748407366, + "learning_rate": 0.00029300959840693853, + "loss": 1.6183, + "step": 7129 + }, + { + "epoch": 0.646888042097623, + "grad_norm": 0.09975215446193766, + "learning_rate": 0.000292875861831242, + "loss": 1.57, + "step": 7130 + }, + { + "epoch": 0.6469787697332607, + "grad_norm": 0.09989218391342468, + "learning_rate": 0.0002927421431407452, + "loss": 1.5287, + "step": 7131 + }, + { + "epoch": 0.6470694973688985, + "grad_norm": 0.1026304459031267, + "learning_rate": 0.000292608442346995, + "loss": 1.5811, + "step": 7132 + }, + { + "epoch": 0.6471602250045364, + "grad_norm": 0.09824827827253467, + "learning_rate": 0.0002924747594615364, + "loss": 1.5767, + "step": 7133 + }, + { + "epoch": 0.6472509526401742, + "grad_norm": 0.10677394464712155, + "learning_rate": 0.00029234109449591283, + "loss": 1.5811, + "step": 7134 + }, + { + "epoch": 0.647341680275812, + "grad_norm": 0.09987629815445859, + "learning_rate": 0.00029220744746166625, + "loss": 1.5757, + "step": 7135 + }, + { + "epoch": 0.6474324079114498, + "grad_norm": 0.1043690367737497, + "learning_rate": 0.0002920738183703373, + "loss": 1.5681, + "step": 7136 + }, + { + "epoch": 0.6475231355470876, + "grad_norm": 0.10019818087014538, + "learning_rate": 0.0002919402072334648, + "loss": 1.5616, + "step": 7137 + }, + { + "epoch": 0.6476138631827255, + "grad_norm": 0.10175186149981176, + "learning_rate": 0.000291806614062586, + "loss": 1.5893, + "step": 7138 + }, + { + "epoch": 0.6477045908183633, + "grad_norm": 0.10066265945227383, + "learning_rate": 0.00029167303886923693, + "loss": 1.5535, + "step": 7139 + }, + { + "epoch": 0.6477953184540011, + "grad_norm": 0.10070195572272468, + "learning_rate": 0.0002915394816649516, + "loss": 1.5669, + "step": 7140 + }, + { + "epoch": 0.647886046089639, + "grad_norm": 0.09843873127818453, + "learning_rate": 0.0002914059424612628, + "loss": 1.5301, + "step": 7141 + }, + { + "epoch": 0.6479767737252767, + "grad_norm": 0.09724777959685646, + "learning_rate": 0.0002912724212697014, + "loss": 1.5598, + "step": 7142 + }, + { + "epoch": 0.6480675013609145, + "grad_norm": 0.10073278404438307, + "learning_rate": 0.00029113891810179715, + "loss": 1.5455, + "step": 7143 + }, + { + "epoch": 0.6481582289965524, + "grad_norm": 0.10190769566208942, + "learning_rate": 0.0002910054329690784, + "loss": 1.5472, + "step": 7144 + }, + { + "epoch": 0.6482489566321902, + "grad_norm": 0.10203149308025905, + "learning_rate": 0.000290871965883071, + "loss": 1.5411, + "step": 7145 + }, + { + "epoch": 0.648339684267828, + "grad_norm": 0.09993761985552799, + "learning_rate": 0.00029073851685530044, + "loss": 1.5417, + "step": 7146 + }, + { + "epoch": 0.6484304119034658, + "grad_norm": 0.09492396191097183, + "learning_rate": 0.0002906050858972898, + "loss": 1.5834, + "step": 7147 + }, + { + "epoch": 0.6485211395391036, + "grad_norm": 0.09975723555235262, + "learning_rate": 0.00029047167302056066, + "loss": 1.5935, + "step": 7148 + }, + { + "epoch": 0.6486118671747414, + "grad_norm": 0.09975241672153747, + "learning_rate": 0.0002903382782366336, + "loss": 1.5522, + "step": 7149 + }, + { + "epoch": 0.6487025948103793, + "grad_norm": 0.10403614265888494, + "learning_rate": 0.0002902049015570271, + "loss": 1.5696, + "step": 7150 + }, + { + "epoch": 0.6487933224460171, + "grad_norm": 0.1004639449483825, + "learning_rate": 0.0002900715429932581, + "loss": 1.5465, + "step": 7151 + }, + { + "epoch": 0.6488840500816548, + "grad_norm": 0.10068633300767857, + "learning_rate": 0.0002899382025568423, + "loss": 1.5401, + "step": 7152 + }, + { + "epoch": 0.6489747777172927, + "grad_norm": 0.11071066114028752, + "learning_rate": 0.00028980488025929385, + "loss": 1.5752, + "step": 7153 + }, + { + "epoch": 0.6490655053529305, + "grad_norm": 0.09771012527937178, + "learning_rate": 0.00028967157611212505, + "loss": 1.5732, + "step": 7154 + }, + { + "epoch": 0.6491562329885683, + "grad_norm": 0.10089777025593559, + "learning_rate": 0.0002895382901268463, + "loss": 1.5952, + "step": 7155 + }, + { + "epoch": 0.6492469606242062, + "grad_norm": 0.09999767185911845, + "learning_rate": 0.0002894050223149676, + "loss": 1.5869, + "step": 7156 + }, + { + "epoch": 0.649337688259844, + "grad_norm": 0.10104402541329693, + "learning_rate": 0.00028927177268799607, + "loss": 1.5968, + "step": 7157 + }, + { + "epoch": 0.6494284158954817, + "grad_norm": 0.10060757447461119, + "learning_rate": 0.00028913854125743794, + "loss": 1.547, + "step": 7158 + }, + { + "epoch": 0.6495191435311196, + "grad_norm": 0.10068918746889752, + "learning_rate": 0.00028900532803479784, + "loss": 1.5644, + "step": 7159 + }, + { + "epoch": 0.6496098711667574, + "grad_norm": 0.09318215546032074, + "learning_rate": 0.00028887213303157894, + "loss": 1.58, + "step": 7160 + }, + { + "epoch": 0.6497005988023952, + "grad_norm": 0.09980988840300817, + "learning_rate": 0.0002887389562592825, + "loss": 1.5342, + "step": 7161 + }, + { + "epoch": 0.6497913264380331, + "grad_norm": 0.09841471346978739, + "learning_rate": 0.0002886057977294081, + "loss": 1.5612, + "step": 7162 + }, + { + "epoch": 0.6498820540736708, + "grad_norm": 0.10119852764358914, + "learning_rate": 0.00028847265745345443, + "loss": 1.5907, + "step": 7163 + }, + { + "epoch": 0.6499727817093086, + "grad_norm": 0.10343368455613251, + "learning_rate": 0.00028833953544291796, + "loss": 1.5907, + "step": 7164 + }, + { + "epoch": 0.6500635093449465, + "grad_norm": 0.09672643111287813, + "learning_rate": 0.00028820643170929364, + "loss": 1.5673, + "step": 7165 + }, + { + "epoch": 0.6501542369805843, + "grad_norm": 0.09607160200659098, + "learning_rate": 0.0002880733462640751, + "loss": 1.5655, + "step": 7166 + }, + { + "epoch": 0.650244964616222, + "grad_norm": 0.09943175165958609, + "learning_rate": 0.0002879402791187545, + "loss": 1.5899, + "step": 7167 + }, + { + "epoch": 0.6503356922518599, + "grad_norm": 0.10106978015052133, + "learning_rate": 0.00028780723028482214, + "loss": 1.5954, + "step": 7168 + }, + { + "epoch": 0.6504264198874977, + "grad_norm": 0.09641857783097331, + "learning_rate": 0.0002876741997737665, + "loss": 1.5776, + "step": 7169 + }, + { + "epoch": 0.6505171475231355, + "grad_norm": 0.09823984744075635, + "learning_rate": 0.00028754118759707523, + "loss": 1.5637, + "step": 7170 + }, + { + "epoch": 0.6506078751587734, + "grad_norm": 0.09973806304296332, + "learning_rate": 0.00028740819376623375, + "loss": 1.5791, + "step": 7171 + }, + { + "epoch": 0.6506986027944112, + "grad_norm": 0.10497686586806704, + "learning_rate": 0.0002872752182927257, + "loss": 1.6115, + "step": 7172 + }, + { + "epoch": 0.6507893304300489, + "grad_norm": 0.10183565997846829, + "learning_rate": 0.00028714226118803425, + "loss": 1.5783, + "step": 7173 + }, + { + "epoch": 0.6508800580656868, + "grad_norm": 0.10339706784786408, + "learning_rate": 0.00028700932246363974, + "loss": 1.5859, + "step": 7174 + }, + { + "epoch": 0.6509707857013246, + "grad_norm": 0.09676195758545668, + "learning_rate": 0.0002868764021310217, + "loss": 1.5447, + "step": 7175 + }, + { + "epoch": 0.6510615133369625, + "grad_norm": 0.10320293673955643, + "learning_rate": 0.00028674350020165766, + "loss": 1.5159, + "step": 7176 + }, + { + "epoch": 0.6511522409726003, + "grad_norm": 0.10600204885813633, + "learning_rate": 0.000286610616687024, + "loss": 1.5667, + "step": 7177 + }, + { + "epoch": 0.651242968608238, + "grad_norm": 0.11024141552347248, + "learning_rate": 0.00028647775159859504, + "loss": 1.5552, + "step": 7178 + }, + { + "epoch": 0.6513336962438759, + "grad_norm": 0.10006916285657115, + "learning_rate": 0.00028634490494784345, + "loss": 1.5782, + "step": 7179 + }, + { + "epoch": 0.6514244238795137, + "grad_norm": 0.09814227590425784, + "learning_rate": 0.000286212076746241, + "loss": 1.5169, + "step": 7180 + }, + { + "epoch": 0.6515151515151515, + "grad_norm": 0.09981669415997453, + "learning_rate": 0.0002860792670052572, + "loss": 1.6227, + "step": 7181 + }, + { + "epoch": 0.6516058791507894, + "grad_norm": 0.0970851708529887, + "learning_rate": 0.0002859464757363601, + "loss": 1.5197, + "step": 7182 + }, + { + "epoch": 0.6516966067864272, + "grad_norm": 0.09612044855476007, + "learning_rate": 0.0002858137029510164, + "loss": 1.5445, + "step": 7183 + }, + { + "epoch": 0.6517873344220649, + "grad_norm": 0.09597123909921136, + "learning_rate": 0.00028568094866069114, + "loss": 1.5707, + "step": 7184 + }, + { + "epoch": 0.6518780620577028, + "grad_norm": 0.09643237115399286, + "learning_rate": 0.0002855482128768476, + "loss": 1.5747, + "step": 7185 + }, + { + "epoch": 0.6519687896933406, + "grad_norm": 0.09842886666598277, + "learning_rate": 0.00028541549561094726, + "loss": 1.5926, + "step": 7186 + }, + { + "epoch": 0.6520595173289784, + "grad_norm": 0.09601980969354884, + "learning_rate": 0.0002852827968744507, + "loss": 1.569, + "step": 7187 + }, + { + "epoch": 0.6521502449646163, + "grad_norm": 0.09832716771845024, + "learning_rate": 0.0002851501166788163, + "loss": 1.5811, + "step": 7188 + }, + { + "epoch": 0.652240972600254, + "grad_norm": 0.09826511979521625, + "learning_rate": 0.00028501745503550075, + "loss": 1.5986, + "step": 7189 + }, + { + "epoch": 0.6523317002358918, + "grad_norm": 0.09763553658320655, + "learning_rate": 0.00028488481195595967, + "loss": 1.5656, + "step": 7190 + }, + { + "epoch": 0.6524224278715297, + "grad_norm": 0.09502581346334218, + "learning_rate": 0.000284752187451647, + "loss": 1.5891, + "step": 7191 + }, + { + "epoch": 0.6525131555071675, + "grad_norm": 0.10211262563584148, + "learning_rate": 0.0002846195815340146, + "loss": 1.5583, + "step": 7192 + }, + { + "epoch": 0.6526038831428053, + "grad_norm": 0.10028442877166237, + "learning_rate": 0.00028448699421451294, + "loss": 1.503, + "step": 7193 + }, + { + "epoch": 0.6526946107784432, + "grad_norm": 0.09681622976065622, + "learning_rate": 0.0002843544255045912, + "loss": 1.5911, + "step": 7194 + }, + { + "epoch": 0.6527853384140809, + "grad_norm": 0.09309041381155905, + "learning_rate": 0.0002842218754156968, + "loss": 1.5472, + "step": 7195 + }, + { + "epoch": 0.6528760660497187, + "grad_norm": 0.09477146311362772, + "learning_rate": 0.00028408934395927486, + "loss": 1.5553, + "step": 7196 + }, + { + "epoch": 0.6529667936853566, + "grad_norm": 0.10116458043773863, + "learning_rate": 0.0002839568311467702, + "loss": 1.5919, + "step": 7197 + }, + { + "epoch": 0.6530575213209944, + "grad_norm": 0.09684823692937622, + "learning_rate": 0.00028382433698962475, + "loss": 1.5443, + "step": 7198 + }, + { + "epoch": 0.6531482489566321, + "grad_norm": 0.10088681858629119, + "learning_rate": 0.0002836918614992799, + "loss": 1.5695, + "step": 7199 + }, + { + "epoch": 0.65323897659227, + "grad_norm": 0.10273429207825134, + "learning_rate": 0.00028355940468717446, + "loss": 1.5372, + "step": 7200 + }, + { + "epoch": 0.6533297042279078, + "grad_norm": 0.10132612133960697, + "learning_rate": 0.0002834269665647465, + "loss": 1.5716, + "step": 7201 + }, + { + "epoch": 0.6534204318635456, + "grad_norm": 0.10202416719942013, + "learning_rate": 0.000283294547143432, + "loss": 1.5784, + "step": 7202 + }, + { + "epoch": 0.6535111594991835, + "grad_norm": 0.10375211941435959, + "learning_rate": 0.000283162146434665, + "loss": 1.5478, + "step": 7203 + }, + { + "epoch": 0.6536018871348213, + "grad_norm": 0.10042087283364581, + "learning_rate": 0.0002830297644498789, + "loss": 1.5518, + "step": 7204 + }, + { + "epoch": 0.653692614770459, + "grad_norm": 0.09910961776039298, + "learning_rate": 0.00028289740120050435, + "loss": 1.5291, + "step": 7205 + }, + { + "epoch": 0.6537833424060969, + "grad_norm": 0.10222056823512313, + "learning_rate": 0.0002827650566979713, + "loss": 1.5523, + "step": 7206 + }, + { + "epoch": 0.6538740700417347, + "grad_norm": 0.10045224240481863, + "learning_rate": 0.00028263273095370766, + "loss": 1.5649, + "step": 7207 + }, + { + "epoch": 0.6539647976773725, + "grad_norm": 0.09592093881452073, + "learning_rate": 0.0002825004239791398, + "loss": 1.5685, + "step": 7208 + }, + { + "epoch": 0.6540555253130104, + "grad_norm": 0.09804091532324018, + "learning_rate": 0.00028236813578569244, + "loss": 1.5624, + "step": 7209 + }, + { + "epoch": 0.6541462529486481, + "grad_norm": 0.10158816642258353, + "learning_rate": 0.0002822358663847884, + "loss": 1.6003, + "step": 7210 + }, + { + "epoch": 0.6542369805842859, + "grad_norm": 0.09915211814760817, + "learning_rate": 0.0002821036157878496, + "loss": 1.5989, + "step": 7211 + }, + { + "epoch": 0.6543277082199238, + "grad_norm": 0.09684132505960817, + "learning_rate": 0.0002819713840062956, + "loss": 1.606, + "step": 7212 + }, + { + "epoch": 0.6544184358555616, + "grad_norm": 0.10056252348507982, + "learning_rate": 0.00028183917105154444, + "loss": 1.5757, + "step": 7213 + }, + { + "epoch": 0.6545091634911994, + "grad_norm": 0.09811075809782743, + "learning_rate": 0.0002817069769350134, + "loss": 1.5751, + "step": 7214 + }, + { + "epoch": 0.6545998911268373, + "grad_norm": 0.09688557875047242, + "learning_rate": 0.0002815748016681171, + "loss": 1.5263, + "step": 7215 + }, + { + "epoch": 0.654690618762475, + "grad_norm": 0.09581924797866272, + "learning_rate": 0.0002814426452622686, + "loss": 1.5532, + "step": 7216 + }, + { + "epoch": 0.6547813463981129, + "grad_norm": 0.09916220782624346, + "learning_rate": 0.0002813105077288801, + "loss": 1.6077, + "step": 7217 + }, + { + "epoch": 0.6548720740337507, + "grad_norm": 0.09529830310382517, + "learning_rate": 0.0002811783890793615, + "loss": 1.5842, + "step": 7218 + }, + { + "epoch": 0.6549628016693885, + "grad_norm": 0.09653243471569703, + "learning_rate": 0.0002810462893251213, + "loss": 1.5401, + "step": 7219 + }, + { + "epoch": 0.6550535293050264, + "grad_norm": 0.10097300788910556, + "learning_rate": 0.000280914208477566, + "loss": 1.5876, + "step": 7220 + }, + { + "epoch": 0.6551442569406641, + "grad_norm": 0.10082535097107732, + "learning_rate": 0.0002807821465481011, + "loss": 1.5612, + "step": 7221 + }, + { + "epoch": 0.6552349845763019, + "grad_norm": 0.09756770244782959, + "learning_rate": 0.0002806501035481305, + "loss": 1.5983, + "step": 7222 + }, + { + "epoch": 0.6553257122119398, + "grad_norm": 0.09728291051886113, + "learning_rate": 0.00028051807948905537, + "loss": 1.6172, + "step": 7223 + }, + { + "epoch": 0.6554164398475776, + "grad_norm": 0.09573615408336232, + "learning_rate": 0.0002803860743822768, + "loss": 1.5385, + "step": 7224 + }, + { + "epoch": 0.6555071674832154, + "grad_norm": 0.0994341083488423, + "learning_rate": 0.000280254088239193, + "loss": 1.5441, + "step": 7225 + }, + { + "epoch": 0.6555978951188532, + "grad_norm": 0.09905238885038423, + "learning_rate": 0.0002801221210712008, + "loss": 1.5448, + "step": 7226 + }, + { + "epoch": 0.655688622754491, + "grad_norm": 0.10019514282097879, + "learning_rate": 0.0002799901728896962, + "loss": 1.5902, + "step": 7227 + }, + { + "epoch": 0.6557793503901288, + "grad_norm": 0.09665297295188854, + "learning_rate": 0.0002798582437060725, + "loss": 1.6557, + "step": 7228 + }, + { + "epoch": 0.6558700780257667, + "grad_norm": 0.10347849811361531, + "learning_rate": 0.0002797263335317217, + "loss": 1.5481, + "step": 7229 + }, + { + "epoch": 0.6559608056614045, + "grad_norm": 0.09904433631070035, + "learning_rate": 0.0002795944423780346, + "loss": 1.5746, + "step": 7230 + }, + { + "epoch": 0.6560515332970422, + "grad_norm": 0.09802650097228395, + "learning_rate": 0.0002794625702563999, + "loss": 1.6226, + "step": 7231 + }, + { + "epoch": 0.6561422609326801, + "grad_norm": 0.09923854324448825, + "learning_rate": 0.0002793307171782048, + "loss": 1.4947, + "step": 7232 + }, + { + "epoch": 0.6562329885683179, + "grad_norm": 0.0985468976407164, + "learning_rate": 0.00027919888315483467, + "loss": 1.5984, + "step": 7233 + }, + { + "epoch": 0.6563237162039557, + "grad_norm": 0.09855802994465457, + "learning_rate": 0.00027906706819767367, + "loss": 1.5895, + "step": 7234 + }, + { + "epoch": 0.6564144438395936, + "grad_norm": 0.09798551615073418, + "learning_rate": 0.0002789352723181039, + "loss": 1.5963, + "step": 7235 + }, + { + "epoch": 0.6565051714752314, + "grad_norm": 0.0966478910950511, + "learning_rate": 0.0002788034955275058, + "loss": 1.5603, + "step": 7236 + }, + { + "epoch": 0.6565958991108691, + "grad_norm": 0.10030700222842828, + "learning_rate": 0.0002786717378372584, + "loss": 1.5584, + "step": 7237 + }, + { + "epoch": 0.656686626746507, + "grad_norm": 0.09803301036439405, + "learning_rate": 0.0002785399992587393, + "loss": 1.5914, + "step": 7238 + }, + { + "epoch": 0.6567773543821448, + "grad_norm": 0.09755825390837496, + "learning_rate": 0.00027840827980332386, + "loss": 1.5848, + "step": 7239 + }, + { + "epoch": 0.6568680820177826, + "grad_norm": 0.10007072112339999, + "learning_rate": 0.00027827657948238595, + "loss": 1.5348, + "step": 7240 + }, + { + "epoch": 0.6569588096534205, + "grad_norm": 0.0981289219878389, + "learning_rate": 0.00027814489830729826, + "loss": 1.5473, + "step": 7241 + }, + { + "epoch": 0.6570495372890582, + "grad_norm": 0.09776276194603531, + "learning_rate": 0.0002780132362894313, + "loss": 1.5647, + "step": 7242 + }, + { + "epoch": 0.657140264924696, + "grad_norm": 0.10229043792935515, + "learning_rate": 0.0002778815934401539, + "loss": 1.5474, + "step": 7243 + }, + { + "epoch": 0.6572309925603339, + "grad_norm": 0.09858982577933799, + "learning_rate": 0.00027774996977083366, + "loss": 1.5921, + "step": 7244 + }, + { + "epoch": 0.6573217201959717, + "grad_norm": 0.10126170564839662, + "learning_rate": 0.00027761836529283644, + "loss": 1.6092, + "step": 7245 + }, + { + "epoch": 0.6574124478316095, + "grad_norm": 0.10094820639337085, + "learning_rate": 0.00027748678001752623, + "loss": 1.538, + "step": 7246 + }, + { + "epoch": 0.6575031754672473, + "grad_norm": 0.09779609572884494, + "learning_rate": 0.0002773552139562651, + "loss": 1.5592, + "step": 7247 + }, + { + "epoch": 0.6575939031028851, + "grad_norm": 0.09933331160134148, + "learning_rate": 0.0002772236671204143, + "loss": 1.5547, + "step": 7248 + }, + { + "epoch": 0.6576846307385229, + "grad_norm": 0.09879663429513391, + "learning_rate": 0.0002770921395213327, + "loss": 1.5654, + "step": 7249 + }, + { + "epoch": 0.6577753583741608, + "grad_norm": 0.09930067648341807, + "learning_rate": 0.0002769606311703774, + "loss": 1.5465, + "step": 7250 + }, + { + "epoch": 0.6578660860097986, + "grad_norm": 0.0992070549392741, + "learning_rate": 0.00027682914207890477, + "loss": 1.5339, + "step": 7251 + }, + { + "epoch": 0.6579568136454363, + "grad_norm": 0.09908380857649027, + "learning_rate": 0.0002766976722582684, + "loss": 1.5692, + "step": 7252 + }, + { + "epoch": 0.6580475412810742, + "grad_norm": 0.10285300843257268, + "learning_rate": 0.0002765662217198211, + "loss": 1.6041, + "step": 7253 + }, + { + "epoch": 0.658138268916712, + "grad_norm": 0.09769618360003754, + "learning_rate": 0.0002764347904749133, + "loss": 1.5506, + "step": 7254 + }, + { + "epoch": 0.6582289965523499, + "grad_norm": 0.09953069917436003, + "learning_rate": 0.0002763033785348945, + "loss": 1.5566, + "step": 7255 + }, + { + "epoch": 0.6583197241879877, + "grad_norm": 0.10158827528541893, + "learning_rate": 0.00027617198591111194, + "loss": 1.5982, + "step": 7256 + }, + { + "epoch": 0.6584104518236255, + "grad_norm": 0.0986435587471192, + "learning_rate": 0.0002760406126149112, + "loss": 1.5598, + "step": 7257 + }, + { + "epoch": 0.6585011794592633, + "grad_norm": 0.09837476926240661, + "learning_rate": 0.0002759092586576367, + "loss": 1.5312, + "step": 7258 + }, + { + "epoch": 0.6585919070949011, + "grad_norm": 0.09838065091816506, + "learning_rate": 0.0002757779240506308, + "loss": 1.5782, + "step": 7259 + }, + { + "epoch": 0.6586826347305389, + "grad_norm": 0.09922208476694172, + "learning_rate": 0.00027564660880523403, + "loss": 1.5515, + "step": 7260 + }, + { + "epoch": 0.6587733623661768, + "grad_norm": 0.10387565376163073, + "learning_rate": 0.00027551531293278564, + "loss": 1.5318, + "step": 7261 + }, + { + "epoch": 0.6588640900018146, + "grad_norm": 0.09899662023365552, + "learning_rate": 0.0002753840364446232, + "loss": 1.5503, + "step": 7262 + }, + { + "epoch": 0.6589548176374523, + "grad_norm": 0.09927490073280812, + "learning_rate": 0.00027525277935208235, + "loss": 1.534, + "step": 7263 + }, + { + "epoch": 0.6590455452730902, + "grad_norm": 0.09586370830243912, + "learning_rate": 0.00027512154166649695, + "loss": 1.5607, + "step": 7264 + }, + { + "epoch": 0.659136272908728, + "grad_norm": 0.10041463739483275, + "learning_rate": 0.00027499032339919975, + "loss": 1.5163, + "step": 7265 + }, + { + "epoch": 0.6592270005443658, + "grad_norm": 0.10701272877568349, + "learning_rate": 0.0002748591245615213, + "loss": 1.5539, + "step": 7266 + }, + { + "epoch": 0.6593177281800037, + "grad_norm": 0.09705840404678485, + "learning_rate": 0.0002747279451647905, + "loss": 1.5983, + "step": 7267 + }, + { + "epoch": 0.6594084558156414, + "grad_norm": 0.09933141199076027, + "learning_rate": 0.0002745967852203347, + "loss": 1.5341, + "step": 7268 + }, + { + "epoch": 0.6594991834512792, + "grad_norm": 0.10042069372741648, + "learning_rate": 0.00027446564473948, + "loss": 1.5851, + "step": 7269 + }, + { + "epoch": 0.6595899110869171, + "grad_norm": 0.10102143906745073, + "learning_rate": 0.00027433452373355007, + "loss": 1.5243, + "step": 7270 + }, + { + "epoch": 0.6596806387225549, + "grad_norm": 0.10353443287263435, + "learning_rate": 0.0002742034222138671, + "loss": 1.5691, + "step": 7271 + }, + { + "epoch": 0.6597713663581927, + "grad_norm": 0.10023400172392027, + "learning_rate": 0.00027407234019175214, + "loss": 1.5781, + "step": 7272 + }, + { + "epoch": 0.6598620939938306, + "grad_norm": 0.1008724488356988, + "learning_rate": 0.0002739412776785238, + "loss": 1.5388, + "step": 7273 + }, + { + "epoch": 0.6599528216294683, + "grad_norm": 0.09940090221649286, + "learning_rate": 0.00027381023468549937, + "loss": 1.551, + "step": 7274 + }, + { + "epoch": 0.6600435492651061, + "grad_norm": 0.10703616235568747, + "learning_rate": 0.00027367921122399465, + "loss": 1.5718, + "step": 7275 + }, + { + "epoch": 0.660134276900744, + "grad_norm": 0.10199808477130845, + "learning_rate": 0.0002735482073053233, + "loss": 1.5161, + "step": 7276 + }, + { + "epoch": 0.6602250045363818, + "grad_norm": 0.10147891201388817, + "learning_rate": 0.00027341722294079763, + "loss": 1.5428, + "step": 7277 + }, + { + "epoch": 0.6603157321720196, + "grad_norm": 0.09948989107293056, + "learning_rate": 0.00027328625814172807, + "loss": 1.5471, + "step": 7278 + }, + { + "epoch": 0.6604064598076574, + "grad_norm": 0.10170532530122324, + "learning_rate": 0.00027315531291942374, + "loss": 1.5413, + "step": 7279 + }, + { + "epoch": 0.6604971874432952, + "grad_norm": 0.09366078627257059, + "learning_rate": 0.0002730243872851915, + "loss": 1.5791, + "step": 7280 + }, + { + "epoch": 0.660587915078933, + "grad_norm": 0.09554174797870091, + "learning_rate": 0.00027289348125033675, + "loss": 1.5767, + "step": 7281 + }, + { + "epoch": 0.6606786427145709, + "grad_norm": 0.09490300776856633, + "learning_rate": 0.0002727625948261635, + "loss": 1.5476, + "step": 7282 + }, + { + "epoch": 0.6607693703502087, + "grad_norm": 0.09915814099409975, + "learning_rate": 0.00027263172802397353, + "loss": 1.5802, + "step": 7283 + }, + { + "epoch": 0.6608600979858464, + "grad_norm": 0.10198755864213467, + "learning_rate": 0.00027250088085506734, + "loss": 1.5633, + "step": 7284 + }, + { + "epoch": 0.6609508256214843, + "grad_norm": 0.10016239151157441, + "learning_rate": 0.0002723700533307438, + "loss": 1.5791, + "step": 7285 + }, + { + "epoch": 0.6610415532571221, + "grad_norm": 0.10245057898937464, + "learning_rate": 0.00027223924546229976, + "loss": 1.5288, + "step": 7286 + }, + { + "epoch": 0.6611322808927599, + "grad_norm": 0.10057052738429888, + "learning_rate": 0.0002721084572610304, + "loss": 1.5965, + "step": 7287 + }, + { + "epoch": 0.6612230085283978, + "grad_norm": 0.10042298752831565, + "learning_rate": 0.00027197768873822917, + "loss": 1.5735, + "step": 7288 + }, + { + "epoch": 0.6613137361640355, + "grad_norm": 0.10134573550729022, + "learning_rate": 0.00027184693990518825, + "loss": 1.5551, + "step": 7289 + }, + { + "epoch": 0.6614044637996733, + "grad_norm": 0.10070004082508494, + "learning_rate": 0.0002717162107731978, + "loss": 1.5753, + "step": 7290 + }, + { + "epoch": 0.6614951914353112, + "grad_norm": 0.10166812055641339, + "learning_rate": 0.0002715855013535458, + "loss": 1.6114, + "step": 7291 + }, + { + "epoch": 0.661585919070949, + "grad_norm": 0.10187800966846614, + "learning_rate": 0.00027145481165751975, + "loss": 1.5622, + "step": 7292 + }, + { + "epoch": 0.6616766467065869, + "grad_norm": 0.09910529062139334, + "learning_rate": 0.0002713241416964044, + "loss": 1.5428, + "step": 7293 + }, + { + "epoch": 0.6617673743422247, + "grad_norm": 0.1000516585043077, + "learning_rate": 0.0002711934914814829, + "loss": 1.5301, + "step": 7294 + }, + { + "epoch": 0.6618581019778624, + "grad_norm": 0.09855188688563697, + "learning_rate": 0.00027106286102403736, + "loss": 1.5678, + "step": 7295 + }, + { + "epoch": 0.6619488296135003, + "grad_norm": 0.09655552429752073, + "learning_rate": 0.00027093225033534754, + "loss": 1.5499, + "step": 7296 + }, + { + "epoch": 0.6620395572491381, + "grad_norm": 0.09877680726548671, + "learning_rate": 0.0002708016594266917, + "loss": 1.5797, + "step": 7297 + }, + { + "epoch": 0.6621302848847759, + "grad_norm": 0.09971878700613307, + "learning_rate": 0.00027067108830934605, + "loss": 1.5803, + "step": 7298 + }, + { + "epoch": 0.6622210125204138, + "grad_norm": 0.10100354484055865, + "learning_rate": 0.00027054053699458573, + "loss": 1.6068, + "step": 7299 + }, + { + "epoch": 0.6623117401560515, + "grad_norm": 0.09783594066008659, + "learning_rate": 0.00027041000549368413, + "loss": 1.6035, + "step": 7300 + }, + { + "epoch": 0.6624024677916893, + "grad_norm": 0.10304846137142645, + "learning_rate": 0.0002702794938179122, + "loss": 1.5989, + "step": 7301 + }, + { + "epoch": 0.6624931954273272, + "grad_norm": 0.10233968543697666, + "learning_rate": 0.0002701490019785399, + "loss": 1.5336, + "step": 7302 + }, + { + "epoch": 0.662583923062965, + "grad_norm": 0.09929052022417714, + "learning_rate": 0.0002700185299868353, + "loss": 1.5532, + "step": 7303 + }, + { + "epoch": 0.6626746506986028, + "grad_norm": 0.09845788351594721, + "learning_rate": 0.00026988807785406426, + "loss": 1.5844, + "step": 7304 + }, + { + "epoch": 0.6627653783342407, + "grad_norm": 0.09924945381246918, + "learning_rate": 0.00026975764559149186, + "loss": 1.5529, + "step": 7305 + }, + { + "epoch": 0.6628561059698784, + "grad_norm": 0.10027703725528046, + "learning_rate": 0.0002696272332103806, + "loss": 1.574, + "step": 7306 + }, + { + "epoch": 0.6629468336055162, + "grad_norm": 0.10128916568624885, + "learning_rate": 0.0002694968407219917, + "loss": 1.5296, + "step": 7307 + }, + { + "epoch": 0.6630375612411541, + "grad_norm": 0.09735289401907439, + "learning_rate": 0.00026936646813758436, + "loss": 1.5704, + "step": 7308 + }, + { + "epoch": 0.6631282888767919, + "grad_norm": 0.10015619144828244, + "learning_rate": 0.0002692361154684168, + "loss": 1.5954, + "step": 7309 + }, + { + "epoch": 0.6632190165124296, + "grad_norm": 0.09658485632304949, + "learning_rate": 0.00026910578272574463, + "loss": 1.5825, + "step": 7310 + }, + { + "epoch": 0.6633097441480675, + "grad_norm": 0.10329783307021076, + "learning_rate": 0.000268975469920822, + "loss": 1.6028, + "step": 7311 + }, + { + "epoch": 0.6634004717837053, + "grad_norm": 0.1004351590851112, + "learning_rate": 0.0002688451770649018, + "loss": 1.5703, + "step": 7312 + }, + { + "epoch": 0.6634911994193431, + "grad_norm": 0.0991178574917548, + "learning_rate": 0.00026871490416923463, + "loss": 1.5617, + "step": 7313 + }, + { + "epoch": 0.663581927054981, + "grad_norm": 0.10256000842882523, + "learning_rate": 0.0002685846512450693, + "loss": 1.5472, + "step": 7314 + }, + { + "epoch": 0.6636726546906188, + "grad_norm": 0.09920553371101862, + "learning_rate": 0.00026845441830365354, + "loss": 1.5369, + "step": 7315 + }, + { + "epoch": 0.6637633823262565, + "grad_norm": 0.10348261256512643, + "learning_rate": 0.000268324205356233, + "loss": 1.555, + "step": 7316 + }, + { + "epoch": 0.6638541099618944, + "grad_norm": 0.09976679768979117, + "learning_rate": 0.0002681940124140515, + "loss": 1.5521, + "step": 7317 + }, + { + "epoch": 0.6639448375975322, + "grad_norm": 0.10296207122555054, + "learning_rate": 0.000268063839488351, + "loss": 1.6103, + "step": 7318 + }, + { + "epoch": 0.66403556523317, + "grad_norm": 0.10244199052414861, + "learning_rate": 0.0002679336865903724, + "loss": 1.5949, + "step": 7319 + }, + { + "epoch": 0.6641262928688079, + "grad_norm": 0.10164240778614546, + "learning_rate": 0.00026780355373135406, + "loss": 1.5608, + "step": 7320 + }, + { + "epoch": 0.6642170205044456, + "grad_norm": 0.09707163714083054, + "learning_rate": 0.000267673440922533, + "loss": 1.5437, + "step": 7321 + }, + { + "epoch": 0.6643077481400834, + "grad_norm": 0.09844697598084431, + "learning_rate": 0.0002675433481751445, + "loss": 1.5637, + "step": 7322 + }, + { + "epoch": 0.6643984757757213, + "grad_norm": 0.09953191280027697, + "learning_rate": 0.00026741327550042226, + "loss": 1.5165, + "step": 7323 + }, + { + "epoch": 0.6644892034113591, + "grad_norm": 0.09739157609695084, + "learning_rate": 0.00026728322290959806, + "loss": 1.5153, + "step": 7324 + }, + { + "epoch": 0.6645799310469969, + "grad_norm": 0.09994571501221575, + "learning_rate": 0.0002671531904139016, + "loss": 1.5533, + "step": 7325 + }, + { + "epoch": 0.6646706586826348, + "grad_norm": 0.09685123786085409, + "learning_rate": 0.0002670231780245617, + "loss": 1.545, + "step": 7326 + }, + { + "epoch": 0.6647613863182725, + "grad_norm": 0.10127883572319095, + "learning_rate": 0.0002668931857528047, + "loss": 1.5482, + "step": 7327 + }, + { + "epoch": 0.6648521139539103, + "grad_norm": 0.10044349747396032, + "learning_rate": 0.00026676321360985533, + "loss": 1.5556, + "step": 7328 + }, + { + "epoch": 0.6649428415895482, + "grad_norm": 0.09701976630898236, + "learning_rate": 0.000266633261606937, + "loss": 1.5379, + "step": 7329 + }, + { + "epoch": 0.665033569225186, + "grad_norm": 0.09897319230152501, + "learning_rate": 0.0002665033297552707, + "loss": 1.5437, + "step": 7330 + }, + { + "epoch": 0.6651242968608239, + "grad_norm": 0.1032505106463897, + "learning_rate": 0.00026637341806607653, + "loss": 1.5925, + "step": 7331 + }, + { + "epoch": 0.6652150244964616, + "grad_norm": 0.09922365229872289, + "learning_rate": 0.00026624352655057185, + "loss": 1.5587, + "step": 7332 + }, + { + "epoch": 0.6653057521320994, + "grad_norm": 0.09799820443982693, + "learning_rate": 0.00026611365521997344, + "loss": 1.567, + "step": 7333 + }, + { + "epoch": 0.6653964797677373, + "grad_norm": 0.09596881984176649, + "learning_rate": 0.00026598380408549525, + "loss": 1.5551, + "step": 7334 + }, + { + "epoch": 0.6654872074033751, + "grad_norm": 0.0997570556105706, + "learning_rate": 0.00026585397315834994, + "loss": 1.5318, + "step": 7335 + }, + { + "epoch": 0.6655779350390129, + "grad_norm": 0.09888930462518315, + "learning_rate": 0.00026572416244974875, + "loss": 1.5322, + "step": 7336 + }, + { + "epoch": 0.6656686626746507, + "grad_norm": 0.10043664869732324, + "learning_rate": 0.00026559437197090066, + "loss": 1.6229, + "step": 7337 + }, + { + "epoch": 0.6657593903102885, + "grad_norm": 0.09639881087034738, + "learning_rate": 0.0002654646017330129, + "loss": 1.584, + "step": 7338 + }, + { + "epoch": 0.6658501179459263, + "grad_norm": 0.10183918489118006, + "learning_rate": 0.00026533485174729134, + "loss": 1.5371, + "step": 7339 + }, + { + "epoch": 0.6659408455815642, + "grad_norm": 0.09922685525514856, + "learning_rate": 0.0002652051220249401, + "loss": 1.5663, + "step": 7340 + }, + { + "epoch": 0.666031573217202, + "grad_norm": 0.09803453099536441, + "learning_rate": 0.00026507541257716116, + "loss": 1.5378, + "step": 7341 + }, + { + "epoch": 0.6661223008528397, + "grad_norm": 0.09557868878828032, + "learning_rate": 0.00026494572341515487, + "loss": 1.5552, + "step": 7342 + }, + { + "epoch": 0.6662130284884776, + "grad_norm": 0.09990971764991817, + "learning_rate": 0.00026481605455012014, + "loss": 1.598, + "step": 7343 + }, + { + "epoch": 0.6663037561241154, + "grad_norm": 0.1006305497879032, + "learning_rate": 0.00026468640599325375, + "loss": 1.5211, + "step": 7344 + }, + { + "epoch": 0.6663944837597532, + "grad_norm": 0.09949398401084526, + "learning_rate": 0.0002645567777557507, + "loss": 1.5405, + "step": 7345 + }, + { + "epoch": 0.6664852113953911, + "grad_norm": 0.09987797195778367, + "learning_rate": 0.00026442716984880453, + "loss": 1.6038, + "step": 7346 + }, + { + "epoch": 0.6665759390310289, + "grad_norm": 0.10065319128599605, + "learning_rate": 0.0002642975822836072, + "loss": 1.5598, + "step": 7347 + }, + { + "epoch": 0.6666666666666666, + "grad_norm": 0.10054695758086599, + "learning_rate": 0.0002641680150713485, + "loss": 1.5478, + "step": 7348 + }, + { + "epoch": 0.6667573943023045, + "grad_norm": 0.1000620706280821, + "learning_rate": 0.0002640384682232161, + "loss": 1.5536, + "step": 7349 + }, + { + "epoch": 0.6668481219379423, + "grad_norm": 0.0965143842046694, + "learning_rate": 0.00026390894175039705, + "loss": 1.5454, + "step": 7350 + }, + { + "epoch": 0.6669388495735801, + "grad_norm": 0.09950058649861945, + "learning_rate": 0.00026377943566407557, + "loss": 1.5773, + "step": 7351 + }, + { + "epoch": 0.667029577209218, + "grad_norm": 0.10089331997614763, + "learning_rate": 0.0002636499499754345, + "loss": 1.5833, + "step": 7352 + }, + { + "epoch": 0.6671203048448557, + "grad_norm": 0.09809091681611426, + "learning_rate": 0.00026352048469565525, + "loss": 1.5506, + "step": 7353 + }, + { + "epoch": 0.6672110324804935, + "grad_norm": 0.10065312961786468, + "learning_rate": 0.00026339103983591687, + "loss": 1.6118, + "step": 7354 + }, + { + "epoch": 0.6673017601161314, + "grad_norm": 0.1017529351172969, + "learning_rate": 0.00026326161540739723, + "loss": 1.543, + "step": 7355 + }, + { + "epoch": 0.6673924877517692, + "grad_norm": 0.100106181298158, + "learning_rate": 0.0002631322114212718, + "loss": 1.5381, + "step": 7356 + }, + { + "epoch": 0.667483215387407, + "grad_norm": 0.09968101564084159, + "learning_rate": 0.00026300282788871497, + "loss": 1.6128, + "step": 7357 + }, + { + "epoch": 0.6675739430230448, + "grad_norm": 0.09721479476721788, + "learning_rate": 0.00026287346482089895, + "loss": 1.5806, + "step": 7358 + }, + { + "epoch": 0.6676646706586826, + "grad_norm": 0.09656325997779051, + "learning_rate": 0.00026274412222899405, + "loss": 1.592, + "step": 7359 + }, + { + "epoch": 0.6677553982943204, + "grad_norm": 0.09820397451723803, + "learning_rate": 0.0002626148001241693, + "loss": 1.589, + "step": 7360 + }, + { + "epoch": 0.6678461259299583, + "grad_norm": 0.10009765867250152, + "learning_rate": 0.00026248549851759127, + "loss": 1.5952, + "step": 7361 + }, + { + "epoch": 0.6679368535655961, + "grad_norm": 0.10023842544641154, + "learning_rate": 0.00026235621742042546, + "loss": 1.5733, + "step": 7362 + }, + { + "epoch": 0.6680275812012338, + "grad_norm": 0.10131412803957023, + "learning_rate": 0.00026222695684383556, + "loss": 1.5659, + "step": 7363 + }, + { + "epoch": 0.6681183088368717, + "grad_norm": 0.10063666665277418, + "learning_rate": 0.000262097716798983, + "loss": 1.5648, + "step": 7364 + }, + { + "epoch": 0.6682090364725095, + "grad_norm": 0.09910173818239228, + "learning_rate": 0.0002619684972970277, + "loss": 1.5909, + "step": 7365 + }, + { + "epoch": 0.6682997641081473, + "grad_norm": 0.10091152729512687, + "learning_rate": 0.0002618392983491274, + "loss": 1.6074, + "step": 7366 + }, + { + "epoch": 0.6683904917437852, + "grad_norm": 0.09797259577113265, + "learning_rate": 0.0002617101199664391, + "loss": 1.5733, + "step": 7367 + }, + { + "epoch": 0.668481219379423, + "grad_norm": 0.0994585950302081, + "learning_rate": 0.00026158096216011696, + "loss": 1.5444, + "step": 7368 + }, + { + "epoch": 0.6685719470150608, + "grad_norm": 0.10070538292855172, + "learning_rate": 0.0002614518249413136, + "loss": 1.539, + "step": 7369 + }, + { + "epoch": 0.6686626746506986, + "grad_norm": 0.09830722463831447, + "learning_rate": 0.0002613227083211806, + "loss": 1.5252, + "step": 7370 + }, + { + "epoch": 0.6687534022863364, + "grad_norm": 0.10239447220511057, + "learning_rate": 0.00026119361231086694, + "loss": 1.5679, + "step": 7371 + }, + { + "epoch": 0.6688441299219743, + "grad_norm": 0.10047039692538405, + "learning_rate": 0.00026106453692151987, + "loss": 1.5967, + "step": 7372 + }, + { + "epoch": 0.6689348575576121, + "grad_norm": 0.10086531441037647, + "learning_rate": 0.00026093548216428556, + "loss": 1.5467, + "step": 7373 + }, + { + "epoch": 0.6690255851932498, + "grad_norm": 0.09675739342957486, + "learning_rate": 0.0002608064480503074, + "loss": 1.5264, + "step": 7374 + }, + { + "epoch": 0.6691163128288877, + "grad_norm": 0.10218775460562522, + "learning_rate": 0.0002606774345907278, + "loss": 1.5445, + "step": 7375 + }, + { + "epoch": 0.6692070404645255, + "grad_norm": 0.09817322197675082, + "learning_rate": 0.0002605484417966868, + "loss": 1.5807, + "step": 7376 + }, + { + "epoch": 0.6692977681001633, + "grad_norm": 0.0995315051448005, + "learning_rate": 0.00026041946967932317, + "loss": 1.5847, + "step": 7377 + }, + { + "epoch": 0.6693884957358012, + "grad_norm": 0.09921382369884582, + "learning_rate": 0.0002602905182497738, + "loss": 1.5712, + "step": 7378 + }, + { + "epoch": 0.669479223371439, + "grad_norm": 0.09825557367280979, + "learning_rate": 0.00026016158751917336, + "loss": 1.5524, + "step": 7379 + }, + { + "epoch": 0.6695699510070767, + "grad_norm": 0.10284323892321877, + "learning_rate": 0.0002600326774986553, + "loss": 1.5428, + "step": 7380 + }, + { + "epoch": 0.6696606786427146, + "grad_norm": 0.10366892098643657, + "learning_rate": 0.000259903788199351, + "loss": 1.571, + "step": 7381 + }, + { + "epoch": 0.6697514062783524, + "grad_norm": 0.09849384586332041, + "learning_rate": 0.00025977491963238977, + "loss": 1.5951, + "step": 7382 + }, + { + "epoch": 0.6698421339139902, + "grad_norm": 0.09601556440040444, + "learning_rate": 0.00025964607180889974, + "loss": 1.5525, + "step": 7383 + }, + { + "epoch": 0.6699328615496281, + "grad_norm": 0.10093361199226451, + "learning_rate": 0.0002595172447400069, + "loss": 1.575, + "step": 7384 + }, + { + "epoch": 0.6700235891852658, + "grad_norm": 0.0986453329904023, + "learning_rate": 0.0002593884384368352, + "loss": 1.5457, + "step": 7385 + }, + { + "epoch": 0.6701143168209036, + "grad_norm": 0.10313425187338598, + "learning_rate": 0.00025925965291050736, + "loss": 1.5246, + "step": 7386 + }, + { + "epoch": 0.6702050444565415, + "grad_norm": 0.10161099622814593, + "learning_rate": 0.00025913088817214406, + "loss": 1.5603, + "step": 7387 + }, + { + "epoch": 0.6702957720921793, + "grad_norm": 0.09678542229676954, + "learning_rate": 0.0002590021442328642, + "loss": 1.5976, + "step": 7388 + }, + { + "epoch": 0.670386499727817, + "grad_norm": 0.10170765187983184, + "learning_rate": 0.0002588734211037844, + "loss": 1.5918, + "step": 7389 + }, + { + "epoch": 0.6704772273634549, + "grad_norm": 0.09805717915429686, + "learning_rate": 0.00025874471879602047, + "loss": 1.5302, + "step": 7390 + }, + { + "epoch": 0.6705679549990927, + "grad_norm": 0.09768163822250661, + "learning_rate": 0.00025861603732068564, + "loss": 1.5334, + "step": 7391 + }, + { + "epoch": 0.6706586826347305, + "grad_norm": 0.09763703620137236, + "learning_rate": 0.00025848737668889135, + "loss": 1.5579, + "step": 7392 + }, + { + "epoch": 0.6707494102703684, + "grad_norm": 0.10060493415901937, + "learning_rate": 0.00025835873691174764, + "loss": 1.5599, + "step": 7393 + }, + { + "epoch": 0.6708401379060062, + "grad_norm": 0.09962243327409553, + "learning_rate": 0.00025823011800036287, + "loss": 1.5934, + "step": 7394 + }, + { + "epoch": 0.6709308655416439, + "grad_norm": 0.10345824408625257, + "learning_rate": 0.000258101519965843, + "loss": 1.5861, + "step": 7395 + }, + { + "epoch": 0.6710215931772818, + "grad_norm": 0.09836978534952277, + "learning_rate": 0.0002579729428192924, + "loss": 1.569, + "step": 7396 + }, + { + "epoch": 0.6711123208129196, + "grad_norm": 0.10142323522916931, + "learning_rate": 0.00025784438657181397, + "loss": 1.599, + "step": 7397 + }, + { + "epoch": 0.6712030484485574, + "grad_norm": 0.10125738910960537, + "learning_rate": 0.0002577158512345085, + "loss": 1.5809, + "step": 7398 + }, + { + "epoch": 0.6712937760841953, + "grad_norm": 0.0969386027263741, + "learning_rate": 0.0002575873368184748, + "loss": 1.5509, + "step": 7399 + }, + { + "epoch": 0.671384503719833, + "grad_norm": 0.09598881951472246, + "learning_rate": 0.0002574588433348103, + "loss": 1.5864, + "step": 7400 + }, + { + "epoch": 0.6714752313554708, + "grad_norm": 0.10062217479483834, + "learning_rate": 0.0002573303707946105, + "loss": 1.5722, + "step": 7401 + }, + { + "epoch": 0.6715659589911087, + "grad_norm": 0.10262842129125448, + "learning_rate": 0.00025720191920896907, + "loss": 1.59, + "step": 7402 + }, + { + "epoch": 0.6716566866267465, + "grad_norm": 0.0988934471327944, + "learning_rate": 0.0002570734885889775, + "loss": 1.555, + "step": 7403 + }, + { + "epoch": 0.6717474142623843, + "grad_norm": 0.10255646276126137, + "learning_rate": 0.00025694507894572616, + "loss": 1.5859, + "step": 7404 + }, + { + "epoch": 0.6718381418980222, + "grad_norm": 0.10167466518678216, + "learning_rate": 0.0002568166902903031, + "loss": 1.5856, + "step": 7405 + }, + { + "epoch": 0.6719288695336599, + "grad_norm": 0.1005337332073474, + "learning_rate": 0.0002566883226337945, + "loss": 1.5729, + "step": 7406 + }, + { + "epoch": 0.6720195971692978, + "grad_norm": 0.09921088456043123, + "learning_rate": 0.0002565599759872852, + "loss": 1.5944, + "step": 7407 + }, + { + "epoch": 0.6721103248049356, + "grad_norm": 0.0968822489362569, + "learning_rate": 0.0002564316503618578, + "loss": 1.5791, + "step": 7408 + }, + { + "epoch": 0.6722010524405734, + "grad_norm": 0.0976703305223404, + "learning_rate": 0.0002563033457685934, + "loss": 1.5579, + "step": 7409 + }, + { + "epoch": 0.6722917800762113, + "grad_norm": 0.10050647548099977, + "learning_rate": 0.00025617506221857077, + "loss": 1.5794, + "step": 7410 + }, + { + "epoch": 0.672382507711849, + "grad_norm": 0.09963049318168282, + "learning_rate": 0.0002560467997228677, + "loss": 1.5726, + "step": 7411 + }, + { + "epoch": 0.6724732353474868, + "grad_norm": 0.09791291189661352, + "learning_rate": 0.00025591855829255937, + "loss": 1.5613, + "step": 7412 + }, + { + "epoch": 0.6725639629831247, + "grad_norm": 0.09516154469142746, + "learning_rate": 0.0002557903379387194, + "loss": 1.5666, + "step": 7413 + }, + { + "epoch": 0.6726546906187625, + "grad_norm": 0.09553291741491365, + "learning_rate": 0.00025566213867241993, + "loss": 1.5352, + "step": 7414 + }, + { + "epoch": 0.6727454182544003, + "grad_norm": 0.10224735192532, + "learning_rate": 0.00025553396050473077, + "loss": 1.547, + "step": 7415 + }, + { + "epoch": 0.6728361458900382, + "grad_norm": 0.10080199799806464, + "learning_rate": 0.0002554058034467199, + "loss": 1.6151, + "step": 7416 + }, + { + "epoch": 0.6729268735256759, + "grad_norm": 0.0957808237102496, + "learning_rate": 0.000255277667509454, + "loss": 1.5199, + "step": 7417 + }, + { + "epoch": 0.6730176011613137, + "grad_norm": 0.09948292136521004, + "learning_rate": 0.00025514955270399784, + "loss": 1.593, + "step": 7418 + }, + { + "epoch": 0.6731083287969516, + "grad_norm": 0.09573251361529111, + "learning_rate": 0.00025502145904141385, + "loss": 1.5554, + "step": 7419 + }, + { + "epoch": 0.6731990564325894, + "grad_norm": 0.09796081477061037, + "learning_rate": 0.00025489338653276273, + "loss": 1.5811, + "step": 7420 + }, + { + "epoch": 0.6732897840682271, + "grad_norm": 0.10086603853073442, + "learning_rate": 0.00025476533518910407, + "loss": 1.5685, + "step": 7421 + }, + { + "epoch": 0.673380511703865, + "grad_norm": 0.1015526085166892, + "learning_rate": 0.00025463730502149476, + "loss": 1.5778, + "step": 7422 + }, + { + "epoch": 0.6734712393395028, + "grad_norm": 0.09361501720810815, + "learning_rate": 0.00025450929604099026, + "loss": 1.5269, + "step": 7423 + }, + { + "epoch": 0.6735619669751406, + "grad_norm": 0.10049460615833954, + "learning_rate": 0.0002543813082586441, + "loss": 1.5536, + "step": 7424 + }, + { + "epoch": 0.6736526946107785, + "grad_norm": 0.09781082834042416, + "learning_rate": 0.00025425334168550847, + "loss": 1.5925, + "step": 7425 + }, + { + "epoch": 0.6737434222464163, + "grad_norm": 0.09857363826081293, + "learning_rate": 0.000254125396332633, + "loss": 1.6122, + "step": 7426 + }, + { + "epoch": 0.673834149882054, + "grad_norm": 0.102002948183872, + "learning_rate": 0.0002539974722110655, + "loss": 1.5073, + "step": 7427 + }, + { + "epoch": 0.6739248775176919, + "grad_norm": 0.10039068635202886, + "learning_rate": 0.0002538695693318528, + "loss": 1.5505, + "step": 7428 + }, + { + "epoch": 0.6740156051533297, + "grad_norm": 0.10117662584072581, + "learning_rate": 0.0002537416877060391, + "loss": 1.5715, + "step": 7429 + }, + { + "epoch": 0.6741063327889675, + "grad_norm": 0.09914573987121729, + "learning_rate": 0.0002536138273446667, + "loss": 1.5507, + "step": 7430 + }, + { + "epoch": 0.6741970604246054, + "grad_norm": 0.09904197376624108, + "learning_rate": 0.0002534859882587769, + "loss": 1.5787, + "step": 7431 + }, + { + "epoch": 0.6742877880602431, + "grad_norm": 0.10408360170236038, + "learning_rate": 0.000253358170459408, + "loss": 1.5628, + "step": 7432 + }, + { + "epoch": 0.6743785156958809, + "grad_norm": 0.10324728517029795, + "learning_rate": 0.00025323037395759776, + "loss": 1.5699, + "step": 7433 + }, + { + "epoch": 0.6744692433315188, + "grad_norm": 0.10129478956799493, + "learning_rate": 0.0002531025987643809, + "loss": 1.5457, + "step": 7434 + }, + { + "epoch": 0.6745599709671566, + "grad_norm": 0.1021939225592333, + "learning_rate": 0.00025297484489079117, + "loss": 1.5892, + "step": 7435 + }, + { + "epoch": 0.6746506986027944, + "grad_norm": 0.10455319519209155, + "learning_rate": 0.00025284711234785996, + "loss": 1.546, + "step": 7436 + }, + { + "epoch": 0.6747414262384323, + "grad_norm": 0.10039993480777645, + "learning_rate": 0.0002527194011466169, + "loss": 1.5733, + "step": 7437 + }, + { + "epoch": 0.67483215387407, + "grad_norm": 0.09920605070603286, + "learning_rate": 0.0002525917112980902, + "loss": 1.5712, + "step": 7438 + }, + { + "epoch": 0.6749228815097078, + "grad_norm": 0.09877167241977833, + "learning_rate": 0.0002524640428133054, + "loss": 1.5818, + "step": 7439 + }, + { + "epoch": 0.6750136091453457, + "grad_norm": 0.09565176699355217, + "learning_rate": 0.0002523363957032871, + "loss": 1.5371, + "step": 7440 + }, + { + "epoch": 0.6751043367809835, + "grad_norm": 0.09474124429056589, + "learning_rate": 0.0002522087699790577, + "loss": 1.5497, + "step": 7441 + }, + { + "epoch": 0.6751950644166212, + "grad_norm": 0.09999443464747942, + "learning_rate": 0.0002520811656516375, + "loss": 1.5585, + "step": 7442 + }, + { + "epoch": 0.6752857920522591, + "grad_norm": 0.09623042678782721, + "learning_rate": 0.00025195358273204515, + "loss": 1.5556, + "step": 7443 + }, + { + "epoch": 0.6753765196878969, + "grad_norm": 0.09763093423271811, + "learning_rate": 0.0002518260212312974, + "loss": 1.5477, + "step": 7444 + }, + { + "epoch": 0.6754672473235348, + "grad_norm": 0.09788100523892958, + "learning_rate": 0.0002516984811604094, + "loss": 1.5342, + "step": 7445 + }, + { + "epoch": 0.6755579749591726, + "grad_norm": 0.10182027143294105, + "learning_rate": 0.0002515709625303942, + "loss": 1.5695, + "step": 7446 + }, + { + "epoch": 0.6756487025948104, + "grad_norm": 0.09770028785600551, + "learning_rate": 0.0002514434653522626, + "loss": 1.535, + "step": 7447 + }, + { + "epoch": 0.6757394302304482, + "grad_norm": 0.09808676014465147, + "learning_rate": 0.00025131598963702473, + "loss": 1.5757, + "step": 7448 + }, + { + "epoch": 0.675830157866086, + "grad_norm": 0.09827822167575423, + "learning_rate": 0.00025118853539568786, + "loss": 1.5586, + "step": 7449 + }, + { + "epoch": 0.6759208855017238, + "grad_norm": 0.0989918074017844, + "learning_rate": 0.00025106110263925746, + "loss": 1.5606, + "step": 7450 + }, + { + "epoch": 0.6760116131373617, + "grad_norm": 0.10139401668086061, + "learning_rate": 0.0002509336913787377, + "loss": 1.5782, + "step": 7451 + }, + { + "epoch": 0.6761023407729995, + "grad_norm": 0.10050396218288717, + "learning_rate": 0.00025080630162513043, + "loss": 1.5769, + "step": 7452 + }, + { + "epoch": 0.6761930684086372, + "grad_norm": 0.10207497544210971, + "learning_rate": 0.00025067893338943573, + "loss": 1.5752, + "step": 7453 + }, + { + "epoch": 0.6762837960442751, + "grad_norm": 0.10044796252326389, + "learning_rate": 0.00025055158668265163, + "loss": 1.5407, + "step": 7454 + }, + { + "epoch": 0.6763745236799129, + "grad_norm": 0.1004866709201312, + "learning_rate": 0.0002504242615157748, + "loss": 1.5603, + "step": 7455 + }, + { + "epoch": 0.6764652513155507, + "grad_norm": 0.10156004530747871, + "learning_rate": 0.0002502969578998, + "loss": 1.5758, + "step": 7456 + }, + { + "epoch": 0.6765559789511886, + "grad_norm": 0.09748314425821807, + "learning_rate": 0.00025016967584571946, + "loss": 1.5286, + "step": 7457 + }, + { + "epoch": 0.6766467065868264, + "grad_norm": 0.09712327005725745, + "learning_rate": 0.0002500424153645245, + "loss": 1.5406, + "step": 7458 + }, + { + "epoch": 0.6767374342224641, + "grad_norm": 0.100527283412458, + "learning_rate": 0.0002499151764672037, + "loss": 1.5497, + "step": 7459 + }, + { + "epoch": 0.676828161858102, + "grad_norm": 0.0983494871144962, + "learning_rate": 0.00024978795916474417, + "loss": 1.5547, + "step": 7460 + }, + { + "epoch": 0.6769188894937398, + "grad_norm": 0.09898118990494346, + "learning_rate": 0.00024966076346813145, + "loss": 1.5595, + "step": 7461 + }, + { + "epoch": 0.6770096171293776, + "grad_norm": 0.10192870143599168, + "learning_rate": 0.00024953358938834864, + "loss": 1.556, + "step": 7462 + }, + { + "epoch": 0.6771003447650155, + "grad_norm": 0.09729002075123462, + "learning_rate": 0.0002494064369363771, + "loss": 1.5806, + "step": 7463 + }, + { + "epoch": 0.6771910724006532, + "grad_norm": 0.1002087857981089, + "learning_rate": 0.0002492793061231967, + "loss": 1.5394, + "step": 7464 + }, + { + "epoch": 0.677281800036291, + "grad_norm": 0.10091167240686112, + "learning_rate": 0.0002491521969597854, + "loss": 1.5602, + "step": 7465 + }, + { + "epoch": 0.6773725276719289, + "grad_norm": 0.09812396443508495, + "learning_rate": 0.00024902510945711877, + "loss": 1.5608, + "step": 7466 + }, + { + "epoch": 0.6774632553075667, + "grad_norm": 0.10361188846335175, + "learning_rate": 0.00024889804362617077, + "loss": 1.59, + "step": 7467 + }, + { + "epoch": 0.6775539829432045, + "grad_norm": 0.10040832162695779, + "learning_rate": 0.00024877099947791394, + "loss": 1.5753, + "step": 7468 + }, + { + "epoch": 0.6776447105788423, + "grad_norm": 0.09661266353334975, + "learning_rate": 0.00024864397702331826, + "loss": 1.5382, + "step": 7469 + }, + { + "epoch": 0.6777354382144801, + "grad_norm": 0.09625254896750507, + "learning_rate": 0.0002485169762733521, + "loss": 1.5169, + "step": 7470 + }, + { + "epoch": 0.6778261658501179, + "grad_norm": 0.09594301907644806, + "learning_rate": 0.00024838999723898205, + "loss": 1.5836, + "step": 7471 + }, + { + "epoch": 0.6779168934857558, + "grad_norm": 0.10130384442726845, + "learning_rate": 0.00024826303993117304, + "loss": 1.5465, + "step": 7472 + }, + { + "epoch": 0.6780076211213936, + "grad_norm": 0.09862594165335273, + "learning_rate": 0.00024813610436088765, + "loss": 1.5866, + "step": 7473 + }, + { + "epoch": 0.6780983487570313, + "grad_norm": 0.10320289380686459, + "learning_rate": 0.00024800919053908656, + "loss": 1.6086, + "step": 7474 + }, + { + "epoch": 0.6781890763926692, + "grad_norm": 0.10061167761568304, + "learning_rate": 0.00024788229847672917, + "loss": 1.5653, + "step": 7475 + }, + { + "epoch": 0.678279804028307, + "grad_norm": 0.09568377924299626, + "learning_rate": 0.00024775542818477247, + "loss": 1.5805, + "step": 7476 + }, + { + "epoch": 0.6783705316639448, + "grad_norm": 0.09668457633745382, + "learning_rate": 0.0002476285796741716, + "loss": 1.5689, + "step": 7477 + }, + { + "epoch": 0.6784612592995827, + "grad_norm": 0.09990274650458975, + "learning_rate": 0.00024750175295587995, + "loss": 1.5455, + "step": 7478 + }, + { + "epoch": 0.6785519869352205, + "grad_norm": 0.09434329167377395, + "learning_rate": 0.0002473749480408494, + "loss": 1.5381, + "step": 7479 + }, + { + "epoch": 0.6786427145708582, + "grad_norm": 0.10002125222832138, + "learning_rate": 0.00024724816494002934, + "loss": 1.5274, + "step": 7480 + }, + { + "epoch": 0.6787334422064961, + "grad_norm": 0.10016642509649525, + "learning_rate": 0.0002471214036643673, + "loss": 1.5798, + "step": 7481 + }, + { + "epoch": 0.6788241698421339, + "grad_norm": 0.09839083922071049, + "learning_rate": 0.0002469946642248095, + "loss": 1.5894, + "step": 7482 + }, + { + "epoch": 0.6789148974777718, + "grad_norm": 0.10479206669318955, + "learning_rate": 0.00024686794663229974, + "loss": 1.6254, + "step": 7483 + }, + { + "epoch": 0.6790056251134096, + "grad_norm": 0.09663680821387241, + "learning_rate": 0.0002467412508977799, + "loss": 1.5179, + "step": 7484 + }, + { + "epoch": 0.6790963527490473, + "grad_norm": 0.09706025273132161, + "learning_rate": 0.0002466145770321905, + "loss": 1.5894, + "step": 7485 + }, + { + "epoch": 0.6791870803846852, + "grad_norm": 0.10021048108457682, + "learning_rate": 0.00024648792504646964, + "loss": 1.5613, + "step": 7486 + }, + { + "epoch": 0.679277808020323, + "grad_norm": 0.09982252274329087, + "learning_rate": 0.00024636129495155397, + "loss": 1.5791, + "step": 7487 + }, + { + "epoch": 0.6793685356559608, + "grad_norm": 0.09806033058822529, + "learning_rate": 0.0002462346867583776, + "loss": 1.5955, + "step": 7488 + }, + { + "epoch": 0.6794592632915987, + "grad_norm": 0.09900368576743643, + "learning_rate": 0.0002461081004778737, + "loss": 1.5837, + "step": 7489 + }, + { + "epoch": 0.6795499909272364, + "grad_norm": 0.09617093757737025, + "learning_rate": 0.00024598153612097275, + "loss": 1.5489, + "step": 7490 + }, + { + "epoch": 0.6796407185628742, + "grad_norm": 0.10341070669355373, + "learning_rate": 0.0002458549936986034, + "loss": 1.58, + "step": 7491 + }, + { + "epoch": 0.6797314461985121, + "grad_norm": 0.09797397936354969, + "learning_rate": 0.00024572847322169297, + "loss": 1.5752, + "step": 7492 + }, + { + "epoch": 0.6798221738341499, + "grad_norm": 0.09767977130717581, + "learning_rate": 0.0002456019747011663, + "loss": 1.5564, + "step": 7493 + }, + { + "epoch": 0.6799129014697877, + "grad_norm": 0.0993373865175673, + "learning_rate": 0.0002454754981479465, + "loss": 1.5598, + "step": 7494 + }, + { + "epoch": 0.6800036291054256, + "grad_norm": 0.09760100008124564, + "learning_rate": 0.0002453490435729549, + "loss": 1.5902, + "step": 7495 + }, + { + "epoch": 0.6800943567410633, + "grad_norm": 0.09639985754384724, + "learning_rate": 0.00024522261098711113, + "loss": 1.5421, + "step": 7496 + }, + { + "epoch": 0.6801850843767011, + "grad_norm": 0.1121888399168511, + "learning_rate": 0.00024509620040133245, + "loss": 1.5741, + "step": 7497 + }, + { + "epoch": 0.680275812012339, + "grad_norm": 0.10157745132283988, + "learning_rate": 0.0002449698118265342, + "loss": 1.4973, + "step": 7498 + }, + { + "epoch": 0.6803665396479768, + "grad_norm": 0.10293380917096669, + "learning_rate": 0.00024484344527363054, + "loss": 1.5378, + "step": 7499 + }, + { + "epoch": 0.6804572672836146, + "grad_norm": 0.09735757154777643, + "learning_rate": 0.000244717100753533, + "loss": 1.5762, + "step": 7500 + }, + { + "epoch": 0.6805479949192524, + "grad_norm": 0.09998351381782598, + "learning_rate": 0.0002445907782771512, + "loss": 1.5819, + "step": 7501 + }, + { + "epoch": 0.6806387225548902, + "grad_norm": 0.10347943152101506, + "learning_rate": 0.00024446447785539334, + "loss": 1.6249, + "step": 7502 + }, + { + "epoch": 0.680729450190528, + "grad_norm": 0.10093018412861991, + "learning_rate": 0.0002443381994991657, + "loss": 1.5486, + "step": 7503 + }, + { + "epoch": 0.6808201778261659, + "grad_norm": 0.0977206503475741, + "learning_rate": 0.00024421194321937216, + "loss": 1.5415, + "step": 7504 + }, + { + "epoch": 0.6809109054618037, + "grad_norm": 0.10201975662747116, + "learning_rate": 0.0002440857090269149, + "loss": 1.5491, + "step": 7505 + }, + { + "epoch": 0.6810016330974414, + "grad_norm": 0.09739611580085993, + "learning_rate": 0.00024395949693269454, + "loss": 1.5738, + "step": 7506 + }, + { + "epoch": 0.6810923607330793, + "grad_norm": 0.09712871344678436, + "learning_rate": 0.00024383330694760936, + "loss": 1.5886, + "step": 7507 + }, + { + "epoch": 0.6811830883687171, + "grad_norm": 0.09989269363352431, + "learning_rate": 0.00024370713908255566, + "loss": 1.5488, + "step": 7508 + }, + { + "epoch": 0.6812738160043549, + "grad_norm": 0.10055387453613428, + "learning_rate": 0.0002435809933484285, + "loss": 1.5392, + "step": 7509 + }, + { + "epoch": 0.6813645436399928, + "grad_norm": 0.10134833909200403, + "learning_rate": 0.00024345486975612014, + "loss": 1.5624, + "step": 7510 + }, + { + "epoch": 0.6814552712756305, + "grad_norm": 0.10163559737029426, + "learning_rate": 0.0002433287683165218, + "loss": 1.5518, + "step": 7511 + }, + { + "epoch": 0.6815459989112683, + "grad_norm": 0.10052923180711158, + "learning_rate": 0.00024320268904052194, + "loss": 1.562, + "step": 7512 + }, + { + "epoch": 0.6816367265469062, + "grad_norm": 0.10046640361233376, + "learning_rate": 0.00024307663193900793, + "loss": 1.5717, + "step": 7513 + }, + { + "epoch": 0.681727454182544, + "grad_norm": 0.10467604008415014, + "learning_rate": 0.0002429505970228646, + "loss": 1.5418, + "step": 7514 + }, + { + "epoch": 0.6818181818181818, + "grad_norm": 0.10130803389207908, + "learning_rate": 0.00024282458430297495, + "loss": 1.5737, + "step": 7515 + }, + { + "epoch": 0.6819089094538197, + "grad_norm": 0.10054108051221315, + "learning_rate": 0.0002426985937902205, + "loss": 1.5922, + "step": 7516 + }, + { + "epoch": 0.6819996370894574, + "grad_norm": 0.1040181955869008, + "learning_rate": 0.00024257262549548026, + "loss": 1.5663, + "step": 7517 + }, + { + "epoch": 0.6820903647250952, + "grad_norm": 0.09941538681739502, + "learning_rate": 0.00024244667942963173, + "loss": 1.5317, + "step": 7518 + }, + { + "epoch": 0.6821810923607331, + "grad_norm": 0.10019532380339899, + "learning_rate": 0.00024232075560355054, + "loss": 1.5056, + "step": 7519 + }, + { + "epoch": 0.6822718199963709, + "grad_norm": 0.09770516394370701, + "learning_rate": 0.00024219485402811008, + "loss": 1.5699, + "step": 7520 + }, + { + "epoch": 0.6823625476320088, + "grad_norm": 0.10073015310607013, + "learning_rate": 0.00024206897471418205, + "loss": 1.5381, + "step": 7521 + }, + { + "epoch": 0.6824532752676465, + "grad_norm": 0.10336745969144118, + "learning_rate": 0.00024194311767263582, + "loss": 1.6061, + "step": 7522 + }, + { + "epoch": 0.6825440029032843, + "grad_norm": 0.09936845306106165, + "learning_rate": 0.00024181728291433957, + "loss": 1.5746, + "step": 7523 + }, + { + "epoch": 0.6826347305389222, + "grad_norm": 0.09871787338369044, + "learning_rate": 0.00024169147045015904, + "loss": 1.5103, + "step": 7524 + }, + { + "epoch": 0.68272545817456, + "grad_norm": 0.09783374727236688, + "learning_rate": 0.00024156568029095772, + "loss": 1.5199, + "step": 7525 + }, + { + "epoch": 0.6828161858101978, + "grad_norm": 0.09815346994309991, + "learning_rate": 0.00024143991244759838, + "loss": 1.6036, + "step": 7526 + }, + { + "epoch": 0.6829069134458357, + "grad_norm": 0.10058093231379919, + "learning_rate": 0.0002413141669309407, + "loss": 1.5787, + "step": 7527 + }, + { + "epoch": 0.6829976410814734, + "grad_norm": 0.10052406633791462, + "learning_rate": 0.00024118844375184268, + "loss": 1.589, + "step": 7528 + }, + { + "epoch": 0.6830883687171112, + "grad_norm": 0.09745812704314513, + "learning_rate": 0.00024106274292116082, + "loss": 1.568, + "step": 7529 + }, + { + "epoch": 0.6831790963527491, + "grad_norm": 0.10015372807886894, + "learning_rate": 0.0002409370644497493, + "loss": 1.5366, + "step": 7530 + }, + { + "epoch": 0.6832698239883869, + "grad_norm": 0.10075995023723032, + "learning_rate": 0.00024081140834846044, + "loss": 1.5414, + "step": 7531 + }, + { + "epoch": 0.6833605516240246, + "grad_norm": 0.10008591947767993, + "learning_rate": 0.0002406857746281445, + "loss": 1.5629, + "step": 7532 + }, + { + "epoch": 0.6834512792596625, + "grad_norm": 0.09869074623957357, + "learning_rate": 0.00024056016329965008, + "loss": 1.5276, + "step": 7533 + }, + { + "epoch": 0.6835420068953003, + "grad_norm": 0.09886102018628586, + "learning_rate": 0.00024043457437382404, + "loss": 1.548, + "step": 7534 + }, + { + "epoch": 0.6836327345309381, + "grad_norm": 0.10433337440179136, + "learning_rate": 0.00024030900786151056, + "loss": 1.5822, + "step": 7535 + }, + { + "epoch": 0.683723462166576, + "grad_norm": 0.10215835279638381, + "learning_rate": 0.00024018346377355272, + "loss": 1.5786, + "step": 7536 + }, + { + "epoch": 0.6838141898022138, + "grad_norm": 0.1049742973493452, + "learning_rate": 0.00024005794212079109, + "loss": 1.5629, + "step": 7537 + }, + { + "epoch": 0.6839049174378515, + "grad_norm": 0.09970711893561311, + "learning_rate": 0.00023993244291406425, + "loss": 1.5877, + "step": 7538 + }, + { + "epoch": 0.6839956450734894, + "grad_norm": 0.10179670871177482, + "learning_rate": 0.0002398069661642095, + "loss": 1.5856, + "step": 7539 + }, + { + "epoch": 0.6840863727091272, + "grad_norm": 0.10146769593834236, + "learning_rate": 0.00023968151188206156, + "loss": 1.5705, + "step": 7540 + }, + { + "epoch": 0.684177100344765, + "grad_norm": 0.09819038688870324, + "learning_rate": 0.00023955608007845326, + "loss": 1.5248, + "step": 7541 + }, + { + "epoch": 0.6842678279804029, + "grad_norm": 0.09650491050068911, + "learning_rate": 0.00023943067076421582, + "loss": 1.5819, + "step": 7542 + }, + { + "epoch": 0.6843585556160406, + "grad_norm": 0.09926253421386258, + "learning_rate": 0.00023930528395017854, + "loss": 1.6125, + "step": 7543 + }, + { + "epoch": 0.6844492832516784, + "grad_norm": 0.0983529381205183, + "learning_rate": 0.0002391799196471684, + "loss": 1.5246, + "step": 7544 + }, + { + "epoch": 0.6845400108873163, + "grad_norm": 0.10097979035443219, + "learning_rate": 0.0002390545778660105, + "loss": 1.5786, + "step": 7545 + }, + { + "epoch": 0.6846307385229541, + "grad_norm": 0.10275029257865956, + "learning_rate": 0.0002389292586175284, + "loss": 1.5914, + "step": 7546 + }, + { + "epoch": 0.6847214661585919, + "grad_norm": 0.10493545290878971, + "learning_rate": 0.00023880396191254332, + "loss": 1.5514, + "step": 7547 + }, + { + "epoch": 0.6848121937942298, + "grad_norm": 0.09738401906954386, + "learning_rate": 0.00023867868776187445, + "loss": 1.5279, + "step": 7548 + }, + { + "epoch": 0.6849029214298675, + "grad_norm": 0.10343745004826461, + "learning_rate": 0.0002385534361763394, + "loss": 1.5768, + "step": 7549 + }, + { + "epoch": 0.6849936490655053, + "grad_norm": 0.10106658387502322, + "learning_rate": 0.00023842820716675383, + "loss": 1.6006, + "step": 7550 + }, + { + "epoch": 0.6850843767011432, + "grad_norm": 0.10110551039489044, + "learning_rate": 0.0002383030007439312, + "loss": 1.5465, + "step": 7551 + }, + { + "epoch": 0.685175104336781, + "grad_norm": 0.09751306462862891, + "learning_rate": 0.0002381778169186828, + "loss": 1.5528, + "step": 7552 + }, + { + "epoch": 0.6852658319724187, + "grad_norm": 0.09761075675157838, + "learning_rate": 0.0002380526557018186, + "loss": 1.574, + "step": 7553 + }, + { + "epoch": 0.6853565596080566, + "grad_norm": 0.10020889185662098, + "learning_rate": 0.0002379275171041463, + "loss": 1.5504, + "step": 7554 + }, + { + "epoch": 0.6854472872436944, + "grad_norm": 0.09384877883303236, + "learning_rate": 0.00023780240113647127, + "loss": 1.5294, + "step": 7555 + }, + { + "epoch": 0.6855380148793322, + "grad_norm": 0.09888199781252281, + "learning_rate": 0.00023767730780959758, + "loss": 1.5713, + "step": 7556 + }, + { + "epoch": 0.6856287425149701, + "grad_norm": 0.09709268375574533, + "learning_rate": 0.0002375522371343272, + "loss": 1.5677, + "step": 7557 + }, + { + "epoch": 0.6857194701506079, + "grad_norm": 0.09992307852083865, + "learning_rate": 0.00023742718912145983, + "loss": 1.5499, + "step": 7558 + }, + { + "epoch": 0.6858101977862457, + "grad_norm": 0.09731229330283524, + "learning_rate": 0.0002373021637817932, + "loss": 1.5648, + "step": 7559 + }, + { + "epoch": 0.6859009254218835, + "grad_norm": 0.10264899980841269, + "learning_rate": 0.00023717716112612357, + "loss": 1.5419, + "step": 7560 + }, + { + "epoch": 0.6859916530575213, + "grad_norm": 0.09829973036301672, + "learning_rate": 0.0002370521811652449, + "loss": 1.5348, + "step": 7561 + }, + { + "epoch": 0.6860823806931592, + "grad_norm": 0.10374320115553028, + "learning_rate": 0.0002369272239099489, + "loss": 1.5584, + "step": 7562 + }, + { + "epoch": 0.686173108328797, + "grad_norm": 0.0987053259552629, + "learning_rate": 0.0002368022893710261, + "loss": 1.5769, + "step": 7563 + }, + { + "epoch": 0.6862638359644347, + "grad_norm": 0.09894262344343499, + "learning_rate": 0.0002366773775592642, + "loss": 1.5396, + "step": 7564 + }, + { + "epoch": 0.6863545636000726, + "grad_norm": 0.09773740729648643, + "learning_rate": 0.00023655248848544974, + "loss": 1.5254, + "step": 7565 + }, + { + "epoch": 0.6864452912357104, + "grad_norm": 0.09574517830249381, + "learning_rate": 0.00023642762216036657, + "loss": 1.5149, + "step": 7566 + }, + { + "epoch": 0.6865360188713482, + "grad_norm": 0.1010075469724532, + "learning_rate": 0.0002363027785947972, + "loss": 1.5496, + "step": 7567 + }, + { + "epoch": 0.6866267465069861, + "grad_norm": 0.10257007085201476, + "learning_rate": 0.00023617795779952184, + "loss": 1.5309, + "step": 7568 + }, + { + "epoch": 0.6867174741426239, + "grad_norm": 0.10296239923020728, + "learning_rate": 0.0002360531597853185, + "loss": 1.5681, + "step": 7569 + }, + { + "epoch": 0.6868082017782616, + "grad_norm": 0.10457124740944679, + "learning_rate": 0.0002359283845629639, + "loss": 1.5977, + "step": 7570 + }, + { + "epoch": 0.6868989294138995, + "grad_norm": 0.10135303257938902, + "learning_rate": 0.0002358036321432322, + "loss": 1.5593, + "step": 7571 + }, + { + "epoch": 0.6869896570495373, + "grad_norm": 0.09785237362777371, + "learning_rate": 0.0002356789025368956, + "loss": 1.5715, + "step": 7572 + }, + { + "epoch": 0.6870803846851751, + "grad_norm": 0.1010647862647718, + "learning_rate": 0.0002355541957547247, + "loss": 1.5672, + "step": 7573 + }, + { + "epoch": 0.687171112320813, + "grad_norm": 0.09807761571647787, + "learning_rate": 0.00023542951180748823, + "loss": 1.5553, + "step": 7574 + }, + { + "epoch": 0.6872618399564507, + "grad_norm": 0.10280475769241737, + "learning_rate": 0.00023530485070595236, + "loss": 1.5221, + "step": 7575 + }, + { + "epoch": 0.6873525675920885, + "grad_norm": 0.10163453053926025, + "learning_rate": 0.0002351802124608815, + "loss": 1.5668, + "step": 7576 + }, + { + "epoch": 0.6874432952277264, + "grad_norm": 0.09739015900940193, + "learning_rate": 0.00023505559708303847, + "loss": 1.5262, + "step": 7577 + }, + { + "epoch": 0.6875340228633642, + "grad_norm": 0.0983992567310536, + "learning_rate": 0.00023493100458318368, + "loss": 1.5607, + "step": 7578 + }, + { + "epoch": 0.687624750499002, + "grad_norm": 0.09999330213126033, + "learning_rate": 0.00023480643497207556, + "loss": 1.5841, + "step": 7579 + }, + { + "epoch": 0.6877154781346398, + "grad_norm": 0.09849926622479697, + "learning_rate": 0.00023468188826047083, + "loss": 1.5261, + "step": 7580 + }, + { + "epoch": 0.6878062057702776, + "grad_norm": 0.09919830779214868, + "learning_rate": 0.00023455736445912435, + "loss": 1.6018, + "step": 7581 + }, + { + "epoch": 0.6878969334059154, + "grad_norm": 0.10407814948167589, + "learning_rate": 0.00023443286357878858, + "loss": 1.5901, + "step": 7582 + }, + { + "epoch": 0.6879876610415533, + "grad_norm": 0.09796799017803738, + "learning_rate": 0.000234308385630214, + "loss": 1.5885, + "step": 7583 + }, + { + "epoch": 0.6880783886771911, + "grad_norm": 0.09911685949550846, + "learning_rate": 0.00023418393062414966, + "loss": 1.5423, + "step": 7584 + }, + { + "epoch": 0.6881691163128288, + "grad_norm": 0.0972731624461986, + "learning_rate": 0.00023405949857134206, + "loss": 1.5346, + "step": 7585 + }, + { + "epoch": 0.6882598439484667, + "grad_norm": 0.09914387042620088, + "learning_rate": 0.00023393508948253583, + "loss": 1.5641, + "step": 7586 + }, + { + "epoch": 0.6883505715841045, + "grad_norm": 0.09841479398521451, + "learning_rate": 0.00023381070336847377, + "loss": 1.5819, + "step": 7587 + }, + { + "epoch": 0.6884412992197423, + "grad_norm": 0.10377946746731576, + "learning_rate": 0.00023368634023989692, + "loss": 1.5519, + "step": 7588 + }, + { + "epoch": 0.6885320268553802, + "grad_norm": 0.1071276079798488, + "learning_rate": 0.00023356200010754387, + "loss": 1.5763, + "step": 7589 + }, + { + "epoch": 0.688622754491018, + "grad_norm": 0.10028080356417403, + "learning_rate": 0.00023343768298215114, + "loss": 1.5484, + "step": 7590 + }, + { + "epoch": 0.6887134821266557, + "grad_norm": 0.09864649774088603, + "learning_rate": 0.00023331338887445398, + "loss": 1.5772, + "step": 7591 + }, + { + "epoch": 0.6888042097622936, + "grad_norm": 0.10249873411960064, + "learning_rate": 0.00023318911779518503, + "loss": 1.5494, + "step": 7592 + }, + { + "epoch": 0.6888949373979314, + "grad_norm": 0.10307332702795244, + "learning_rate": 0.00023306486975507485, + "loss": 1.5181, + "step": 7593 + }, + { + "epoch": 0.6889856650335692, + "grad_norm": 0.10359460280143819, + "learning_rate": 0.0002329406447648527, + "loss": 1.5647, + "step": 7594 + }, + { + "epoch": 0.6890763926692071, + "grad_norm": 0.09935276621658991, + "learning_rate": 0.00023281644283524506, + "loss": 1.5389, + "step": 7595 + }, + { + "epoch": 0.6891671203048448, + "grad_norm": 0.09613240870280121, + "learning_rate": 0.00023269226397697697, + "loss": 1.5187, + "step": 7596 + }, + { + "epoch": 0.6892578479404827, + "grad_norm": 0.09994511744473993, + "learning_rate": 0.00023256810820077145, + "loss": 1.545, + "step": 7597 + }, + { + "epoch": 0.6893485755761205, + "grad_norm": 0.0974769422926387, + "learning_rate": 0.0002324439755173492, + "loss": 1.5197, + "step": 7598 + }, + { + "epoch": 0.6894393032117583, + "grad_norm": 0.09649098877235258, + "learning_rate": 0.00023231986593742917, + "loss": 1.5151, + "step": 7599 + }, + { + "epoch": 0.6895300308473962, + "grad_norm": 0.09996289462626345, + "learning_rate": 0.00023219577947172787, + "loss": 1.5375, + "step": 7600 + }, + { + "epoch": 0.689620758483034, + "grad_norm": 0.10717029591544447, + "learning_rate": 0.00023207171613096072, + "loss": 1.5744, + "step": 7601 + }, + { + "epoch": 0.6897114861186717, + "grad_norm": 0.10387877257763355, + "learning_rate": 0.00023194767592584037, + "loss": 1.5602, + "step": 7602 + }, + { + "epoch": 0.6898022137543096, + "grad_norm": 0.10165021821144841, + "learning_rate": 0.00023182365886707736, + "loss": 1.5484, + "step": 7603 + }, + { + "epoch": 0.6898929413899474, + "grad_norm": 0.09979553704697813, + "learning_rate": 0.00023169966496538124, + "loss": 1.5672, + "step": 7604 + }, + { + "epoch": 0.6899836690255852, + "grad_norm": 0.10277712642398952, + "learning_rate": 0.0002315756942314587, + "loss": 1.5754, + "step": 7605 + }, + { + "epoch": 0.690074396661223, + "grad_norm": 0.09973989157159902, + "learning_rate": 0.00023145174667601426, + "loss": 1.5075, + "step": 7606 + }, + { + "epoch": 0.6901651242968608, + "grad_norm": 0.09892414153699208, + "learning_rate": 0.00023132782230975125, + "loss": 1.5412, + "step": 7607 + }, + { + "epoch": 0.6902558519324986, + "grad_norm": 0.09752296779713333, + "learning_rate": 0.0002312039211433704, + "loss": 1.5608, + "step": 7608 + }, + { + "epoch": 0.6903465795681365, + "grad_norm": 0.09918577678275067, + "learning_rate": 0.00023108004318757043, + "loss": 1.5861, + "step": 7609 + }, + { + "epoch": 0.6904373072037743, + "grad_norm": 0.10048993889858154, + "learning_rate": 0.0002309561884530485, + "loss": 1.5469, + "step": 7610 + }, + { + "epoch": 0.690528034839412, + "grad_norm": 0.1053724406798454, + "learning_rate": 0.0002308323569504992, + "loss": 1.5707, + "step": 7611 + }, + { + "epoch": 0.6906187624750499, + "grad_norm": 0.10137453393720026, + "learning_rate": 0.0002307085486906157, + "loss": 1.5669, + "step": 7612 + }, + { + "epoch": 0.6907094901106877, + "grad_norm": 0.10177185158970842, + "learning_rate": 0.00023058476368408853, + "loss": 1.5236, + "step": 7613 + }, + { + "epoch": 0.6908002177463255, + "grad_norm": 0.10103296296822191, + "learning_rate": 0.00023046100194160695, + "loss": 1.5778, + "step": 7614 + }, + { + "epoch": 0.6908909453819634, + "grad_norm": 0.09995678477366099, + "learning_rate": 0.00023033726347385753, + "loss": 1.5423, + "step": 7615 + }, + { + "epoch": 0.6909816730176012, + "grad_norm": 0.09786122983935389, + "learning_rate": 0.00023021354829152503, + "loss": 1.5381, + "step": 7616 + }, + { + "epoch": 0.6910724006532389, + "grad_norm": 0.09854887548596478, + "learning_rate": 0.00023008985640529252, + "loss": 1.5237, + "step": 7617 + }, + { + "epoch": 0.6911631282888768, + "grad_norm": 0.10408453620698403, + "learning_rate": 0.00022996618782584072, + "loss": 1.5681, + "step": 7618 + }, + { + "epoch": 0.6912538559245146, + "grad_norm": 0.0959756003227103, + "learning_rate": 0.0002298425425638482, + "loss": 1.5256, + "step": 7619 + }, + { + "epoch": 0.6913445835601524, + "grad_norm": 0.10178649828601312, + "learning_rate": 0.00022971892062999196, + "loss": 1.5864, + "step": 7620 + }, + { + "epoch": 0.6914353111957903, + "grad_norm": 0.10012092511482082, + "learning_rate": 0.00022959532203494694, + "loss": 1.5345, + "step": 7621 + }, + { + "epoch": 0.691526038831428, + "grad_norm": 0.10106361488435624, + "learning_rate": 0.0002294717467893857, + "loss": 1.5696, + "step": 7622 + }, + { + "epoch": 0.6916167664670658, + "grad_norm": 0.09929896077845411, + "learning_rate": 0.0002293481949039788, + "loss": 1.5585, + "step": 7623 + }, + { + "epoch": 0.6917074941027037, + "grad_norm": 0.098234356885211, + "learning_rate": 0.00022922466638939537, + "loss": 1.4931, + "step": 7624 + }, + { + "epoch": 0.6917982217383415, + "grad_norm": 0.09734790760625944, + "learning_rate": 0.00022910116125630193, + "loss": 1.5768, + "step": 7625 + }, + { + "epoch": 0.6918889493739793, + "grad_norm": 0.09992615176781523, + "learning_rate": 0.00022897767951536292, + "loss": 1.5687, + "step": 7626 + }, + { + "epoch": 0.6919796770096172, + "grad_norm": 0.10144148087111153, + "learning_rate": 0.00022885422117724125, + "loss": 1.5878, + "step": 7627 + }, + { + "epoch": 0.6920704046452549, + "grad_norm": 0.09905275401499143, + "learning_rate": 0.00022873078625259768, + "loss": 1.498, + "step": 7628 + }, + { + "epoch": 0.6921611322808927, + "grad_norm": 0.09752243157572567, + "learning_rate": 0.00022860737475209077, + "loss": 1.5762, + "step": 7629 + }, + { + "epoch": 0.6922518599165306, + "grad_norm": 0.10295659765655367, + "learning_rate": 0.0002284839866863768, + "loss": 1.5767, + "step": 7630 + }, + { + "epoch": 0.6923425875521684, + "grad_norm": 0.0980780551790331, + "learning_rate": 0.00022836062206611085, + "loss": 1.5429, + "step": 7631 + }, + { + "epoch": 0.6924333151878062, + "grad_norm": 0.09985982439315215, + "learning_rate": 0.00022823728090194518, + "loss": 1.5613, + "step": 7632 + }, + { + "epoch": 0.692524042823444, + "grad_norm": 0.09951352033498134, + "learning_rate": 0.00022811396320453016, + "loss": 1.5286, + "step": 7633 + }, + { + "epoch": 0.6926147704590818, + "grad_norm": 0.10464877520161904, + "learning_rate": 0.00022799066898451448, + "loss": 1.5405, + "step": 7634 + }, + { + "epoch": 0.6927054980947196, + "grad_norm": 0.10326081042640073, + "learning_rate": 0.00022786739825254488, + "loss": 1.5304, + "step": 7635 + }, + { + "epoch": 0.6927962257303575, + "grad_norm": 0.09865689801759911, + "learning_rate": 0.0002277441510192655, + "loss": 1.5586, + "step": 7636 + }, + { + "epoch": 0.6928869533659953, + "grad_norm": 0.10087182526566367, + "learning_rate": 0.00022762092729531857, + "loss": 1.5344, + "step": 7637 + }, + { + "epoch": 0.6929776810016332, + "grad_norm": 0.09942225385575877, + "learning_rate": 0.00022749772709134498, + "loss": 1.5563, + "step": 7638 + }, + { + "epoch": 0.6930684086372709, + "grad_norm": 0.10399116266380219, + "learning_rate": 0.00022737455041798278, + "loss": 1.5428, + "step": 7639 + }, + { + "epoch": 0.6931591362729087, + "grad_norm": 0.09978403462822766, + "learning_rate": 0.0002272513972858682, + "loss": 1.5116, + "step": 7640 + }, + { + "epoch": 0.6932498639085466, + "grad_norm": 0.10066701342359226, + "learning_rate": 0.00022712826770563576, + "loss": 1.5506, + "step": 7641 + }, + { + "epoch": 0.6933405915441844, + "grad_norm": 0.1044019292354193, + "learning_rate": 0.00022700516168791752, + "loss": 1.5749, + "step": 7642 + }, + { + "epoch": 0.6934313191798221, + "grad_norm": 0.10230469578469312, + "learning_rate": 0.00022688207924334393, + "loss": 1.5497, + "step": 7643 + }, + { + "epoch": 0.69352204681546, + "grad_norm": 0.0993483879105438, + "learning_rate": 0.00022675902038254286, + "loss": 1.5466, + "step": 7644 + }, + { + "epoch": 0.6936127744510978, + "grad_norm": 0.10305178502691323, + "learning_rate": 0.00022663598511614086, + "loss": 1.5907, + "step": 7645 + }, + { + "epoch": 0.6937035020867356, + "grad_norm": 0.10431646434680623, + "learning_rate": 0.00022651297345476184, + "loss": 1.5667, + "step": 7646 + }, + { + "epoch": 0.6937942297223735, + "grad_norm": 0.10040196130518374, + "learning_rate": 0.00022638998540902772, + "loss": 1.5861, + "step": 7647 + }, + { + "epoch": 0.6938849573580113, + "grad_norm": 0.09700961480226397, + "learning_rate": 0.00022626702098955883, + "loss": 1.5433, + "step": 7648 + }, + { + "epoch": 0.693975684993649, + "grad_norm": 0.10218180740279607, + "learning_rate": 0.00022614408020697302, + "loss": 1.5432, + "step": 7649 + }, + { + "epoch": 0.6940664126292869, + "grad_norm": 0.10164300228005738, + "learning_rate": 0.00022602116307188601, + "loss": 1.5327, + "step": 7650 + }, + { + "epoch": 0.6941571402649247, + "grad_norm": 0.0981361593442586, + "learning_rate": 0.00022589826959491205, + "loss": 1.5318, + "step": 7651 + }, + { + "epoch": 0.6942478679005625, + "grad_norm": 0.1021708472265895, + "learning_rate": 0.00022577539978666306, + "loss": 1.561, + "step": 7652 + }, + { + "epoch": 0.6943385955362004, + "grad_norm": 0.09942683158131589, + "learning_rate": 0.00022565255365774868, + "loss": 1.573, + "step": 7653 + }, + { + "epoch": 0.6944293231718381, + "grad_norm": 0.10784797079907288, + "learning_rate": 0.00022552973121877656, + "loss": 1.579, + "step": 7654 + }, + { + "epoch": 0.6945200508074759, + "grad_norm": 0.1025626341312386, + "learning_rate": 0.00022540693248035282, + "loss": 1.587, + "step": 7655 + }, + { + "epoch": 0.6946107784431138, + "grad_norm": 0.09883162916680209, + "learning_rate": 0.0002252841574530809, + "loss": 1.5458, + "step": 7656 + }, + { + "epoch": 0.6947015060787516, + "grad_norm": 0.10012627150143698, + "learning_rate": 0.00022516140614756226, + "loss": 1.5586, + "step": 7657 + }, + { + "epoch": 0.6947922337143894, + "grad_norm": 0.10392362907524354, + "learning_rate": 0.00022503867857439676, + "loss": 1.6018, + "step": 7658 + }, + { + "epoch": 0.6948829613500273, + "grad_norm": 0.10001705595728141, + "learning_rate": 0.00022491597474418203, + "loss": 1.5439, + "step": 7659 + }, + { + "epoch": 0.694973688985665, + "grad_norm": 0.10122673868789482, + "learning_rate": 0.00022479329466751347, + "loss": 1.5168, + "step": 7660 + }, + { + "epoch": 0.6950644166213028, + "grad_norm": 0.10287242858934577, + "learning_rate": 0.00022467063835498424, + "loss": 1.5784, + "step": 7661 + }, + { + "epoch": 0.6951551442569407, + "grad_norm": 0.10100242817774087, + "learning_rate": 0.0002245480058171862, + "loss": 1.5684, + "step": 7662 + }, + { + "epoch": 0.6952458718925785, + "grad_norm": 0.10251707322119687, + "learning_rate": 0.00022442539706470844, + "loss": 1.5607, + "step": 7663 + }, + { + "epoch": 0.6953365995282162, + "grad_norm": 0.10034631998470867, + "learning_rate": 0.00022430281210813807, + "loss": 1.5726, + "step": 7664 + }, + { + "epoch": 0.6954273271638541, + "grad_norm": 0.09933011389402244, + "learning_rate": 0.0002241802509580605, + "loss": 1.5634, + "step": 7665 + }, + { + "epoch": 0.6955180547994919, + "grad_norm": 0.1020671770635684, + "learning_rate": 0.0002240577136250591, + "loss": 1.5644, + "step": 7666 + }, + { + "epoch": 0.6956087824351297, + "grad_norm": 0.0990184049445387, + "learning_rate": 0.0002239352001197148, + "loss": 1.6092, + "step": 7667 + }, + { + "epoch": 0.6956995100707676, + "grad_norm": 0.10543153644759315, + "learning_rate": 0.00022381271045260649, + "loss": 1.5551, + "step": 7668 + }, + { + "epoch": 0.6957902377064054, + "grad_norm": 0.09857360344198399, + "learning_rate": 0.00022369024463431147, + "loss": 1.5153, + "step": 7669 + }, + { + "epoch": 0.6958809653420431, + "grad_norm": 0.10108195635096619, + "learning_rate": 0.00022356780267540461, + "loss": 1.5654, + "step": 7670 + }, + { + "epoch": 0.695971692977681, + "grad_norm": 0.10163294135754079, + "learning_rate": 0.00022344538458645853, + "loss": 1.5358, + "step": 7671 + }, + { + "epoch": 0.6960624206133188, + "grad_norm": 0.0991319072158296, + "learning_rate": 0.00022332299037804444, + "loss": 1.5808, + "step": 7672 + }, + { + "epoch": 0.6961531482489566, + "grad_norm": 0.09807416556853803, + "learning_rate": 0.0002232006200607307, + "loss": 1.5682, + "step": 7673 + }, + { + "epoch": 0.6962438758845945, + "grad_norm": 0.10249163770441445, + "learning_rate": 0.0002230782736450842, + "loss": 1.5907, + "step": 7674 + }, + { + "epoch": 0.6963346035202322, + "grad_norm": 0.10201309716782901, + "learning_rate": 0.00022295595114166984, + "loss": 1.5704, + "step": 7675 + }, + { + "epoch": 0.6964253311558701, + "grad_norm": 0.10153544468171624, + "learning_rate": 0.00022283365256104987, + "loss": 1.5742, + "step": 7676 + }, + { + "epoch": 0.6965160587915079, + "grad_norm": 0.10361128106991627, + "learning_rate": 0.00022271137791378488, + "loss": 1.5189, + "step": 7677 + }, + { + "epoch": 0.6966067864271457, + "grad_norm": 0.10272535535112759, + "learning_rate": 0.00022258912721043306, + "loss": 1.5406, + "step": 7678 + }, + { + "epoch": 0.6966975140627836, + "grad_norm": 0.09724461157442303, + "learning_rate": 0.0002224669004615512, + "loss": 1.5673, + "step": 7679 + }, + { + "epoch": 0.6967882416984214, + "grad_norm": 0.10120122262617381, + "learning_rate": 0.0002223446976776934, + "loss": 1.5217, + "step": 7680 + }, + { + "epoch": 0.6968789693340591, + "grad_norm": 0.10204607934614855, + "learning_rate": 0.00022222251886941147, + "loss": 1.5649, + "step": 7681 + }, + { + "epoch": 0.696969696969697, + "grad_norm": 0.1003883464100539, + "learning_rate": 0.00022210036404725638, + "loss": 1.5524, + "step": 7682 + }, + { + "epoch": 0.6970604246053348, + "grad_norm": 0.10347402415434732, + "learning_rate": 0.0002219782332217758, + "loss": 1.5703, + "step": 7683 + }, + { + "epoch": 0.6971511522409726, + "grad_norm": 0.10054443221885588, + "learning_rate": 0.0002218561264035156, + "loss": 1.5649, + "step": 7684 + }, + { + "epoch": 0.6972418798766105, + "grad_norm": 0.10033894819091292, + "learning_rate": 0.00022173404360302003, + "loss": 1.5865, + "step": 7685 + }, + { + "epoch": 0.6973326075122482, + "grad_norm": 0.10476250474610196, + "learning_rate": 0.0002216119848308308, + "loss": 1.5386, + "step": 7686 + }, + { + "epoch": 0.697423335147886, + "grad_norm": 0.09859064909713444, + "learning_rate": 0.00022148995009748756, + "loss": 1.5224, + "step": 7687 + }, + { + "epoch": 0.6975140627835239, + "grad_norm": 0.10469187996162119, + "learning_rate": 0.00022136793941352834, + "loss": 1.5593, + "step": 7688 + }, + { + "epoch": 0.6976047904191617, + "grad_norm": 0.10165371847726766, + "learning_rate": 0.00022124595278948846, + "loss": 1.5566, + "step": 7689 + }, + { + "epoch": 0.6976955180547995, + "grad_norm": 0.1043290725304547, + "learning_rate": 0.00022112399023590178, + "loss": 1.5844, + "step": 7690 + }, + { + "epoch": 0.6977862456904373, + "grad_norm": 0.10107344931724077, + "learning_rate": 0.00022100205176329947, + "loss": 1.5535, + "step": 7691 + }, + { + "epoch": 0.6978769733260751, + "grad_norm": 0.10031553159356257, + "learning_rate": 0.0002208801373822113, + "loss": 1.5555, + "step": 7692 + }, + { + "epoch": 0.6979677009617129, + "grad_norm": 0.09727258154645399, + "learning_rate": 0.00022075824710316444, + "loss": 1.5455, + "step": 7693 + }, + { + "epoch": 0.6980584285973508, + "grad_norm": 0.0994089247267075, + "learning_rate": 0.00022063638093668386, + "loss": 1.5676, + "step": 7694 + }, + { + "epoch": 0.6981491562329886, + "grad_norm": 0.09819798918090139, + "learning_rate": 0.00022051453889329316, + "loss": 1.5935, + "step": 7695 + }, + { + "epoch": 0.6982398838686263, + "grad_norm": 0.10568704096431246, + "learning_rate": 0.00022039272098351326, + "loss": 1.5557, + "step": 7696 + }, + { + "epoch": 0.6983306115042642, + "grad_norm": 0.1039378258148512, + "learning_rate": 0.0002202709272178629, + "loss": 1.5567, + "step": 7697 + }, + { + "epoch": 0.698421339139902, + "grad_norm": 0.09990798649512951, + "learning_rate": 0.00022014915760685917, + "loss": 1.5721, + "step": 7698 + }, + { + "epoch": 0.6985120667755398, + "grad_norm": 0.09901669774806315, + "learning_rate": 0.00022002741216101712, + "loss": 1.5346, + "step": 7699 + }, + { + "epoch": 0.6986027944111777, + "grad_norm": 0.1001672875473531, + "learning_rate": 0.00021990569089084934, + "loss": 1.5848, + "step": 7700 + }, + { + "epoch": 0.6986935220468155, + "grad_norm": 0.09985367849883436, + "learning_rate": 0.00021978399380686626, + "loss": 1.5354, + "step": 7701 + }, + { + "epoch": 0.6987842496824532, + "grad_norm": 0.10050410493966142, + "learning_rate": 0.00021966232091957672, + "loss": 1.5409, + "step": 7702 + }, + { + "epoch": 0.6988749773180911, + "grad_norm": 0.10112693099846083, + "learning_rate": 0.00021954067223948714, + "loss": 1.5593, + "step": 7703 + }, + { + "epoch": 0.6989657049537289, + "grad_norm": 0.10066720624588586, + "learning_rate": 0.00021941904777710163, + "loss": 1.5766, + "step": 7704 + }, + { + "epoch": 0.6990564325893667, + "grad_norm": 0.10067351254873928, + "learning_rate": 0.00021929744754292275, + "loss": 1.5753, + "step": 7705 + }, + { + "epoch": 0.6991471602250046, + "grad_norm": 0.09962041170051243, + "learning_rate": 0.00021917587154745077, + "loss": 1.5689, + "step": 7706 + }, + { + "epoch": 0.6992378878606423, + "grad_norm": 0.10014981139987515, + "learning_rate": 0.00021905431980118374, + "loss": 1.5748, + "step": 7707 + }, + { + "epoch": 0.6993286154962801, + "grad_norm": 0.10083397320692247, + "learning_rate": 0.00021893279231461737, + "loss": 1.5391, + "step": 7708 + }, + { + "epoch": 0.699419343131918, + "grad_norm": 0.10165801970772585, + "learning_rate": 0.00021881128909824598, + "loss": 1.5364, + "step": 7709 + }, + { + "epoch": 0.6995100707675558, + "grad_norm": 0.10483738015947536, + "learning_rate": 0.00021868981016256124, + "loss": 1.6008, + "step": 7710 + }, + { + "epoch": 0.6996007984031936, + "grad_norm": 0.10593348263553154, + "learning_rate": 0.0002185683555180527, + "loss": 1.5737, + "step": 7711 + }, + { + "epoch": 0.6996915260388314, + "grad_norm": 0.1015806092255661, + "learning_rate": 0.00021844692517520808, + "loss": 1.5501, + "step": 7712 + }, + { + "epoch": 0.6997822536744692, + "grad_norm": 0.10060390399464077, + "learning_rate": 0.00021832551914451322, + "loss": 1.5419, + "step": 7713 + }, + { + "epoch": 0.6998729813101071, + "grad_norm": 0.09792759085734974, + "learning_rate": 0.00021820413743645124, + "loss": 1.5368, + "step": 7714 + }, + { + "epoch": 0.6999637089457449, + "grad_norm": 0.10245631960898895, + "learning_rate": 0.00021808278006150335, + "loss": 1.5529, + "step": 7715 + }, + { + "epoch": 0.7000544365813827, + "grad_norm": 0.1016292309301396, + "learning_rate": 0.00021796144703014914, + "loss": 1.5455, + "step": 7716 + }, + { + "epoch": 0.7001451642170206, + "grad_norm": 0.10055672619257178, + "learning_rate": 0.00021784013835286553, + "loss": 1.5733, + "step": 7717 + }, + { + "epoch": 0.7002358918526583, + "grad_norm": 0.09842131683256461, + "learning_rate": 0.00021771885404012743, + "loss": 1.5587, + "step": 7718 + }, + { + "epoch": 0.7003266194882961, + "grad_norm": 0.10091385403433055, + "learning_rate": 0.00021759759410240804, + "loss": 1.581, + "step": 7719 + }, + { + "epoch": 0.700417347123934, + "grad_norm": 0.09934119934572147, + "learning_rate": 0.00021747635855017783, + "loss": 1.5229, + "step": 7720 + }, + { + "epoch": 0.7005080747595718, + "grad_norm": 0.10166886827008702, + "learning_rate": 0.00021735514739390589, + "loss": 1.5806, + "step": 7721 + }, + { + "epoch": 0.7005988023952096, + "grad_norm": 0.10338026394677347, + "learning_rate": 0.0002172339606440584, + "loss": 1.5409, + "step": 7722 + }, + { + "epoch": 0.7006895300308474, + "grad_norm": 0.0988206234733405, + "learning_rate": 0.00021711279831110038, + "loss": 1.603, + "step": 7723 + }, + { + "epoch": 0.7007802576664852, + "grad_norm": 0.09921868878730851, + "learning_rate": 0.00021699166040549384, + "loss": 1.5648, + "step": 7724 + }, + { + "epoch": 0.700870985302123, + "grad_norm": 0.1042488084601685, + "learning_rate": 0.00021687054693769896, + "loss": 1.5828, + "step": 7725 + }, + { + "epoch": 0.7009617129377609, + "grad_norm": 0.10044834985381006, + "learning_rate": 0.00021674945791817424, + "loss": 1.5409, + "step": 7726 + }, + { + "epoch": 0.7010524405733987, + "grad_norm": 0.1002052771956267, + "learning_rate": 0.0002166283933573756, + "loss": 1.5999, + "step": 7727 + }, + { + "epoch": 0.7011431682090364, + "grad_norm": 0.098334934642481, + "learning_rate": 0.00021650735326575682, + "loss": 1.5675, + "step": 7728 + }, + { + "epoch": 0.7012338958446743, + "grad_norm": 0.10243571092691542, + "learning_rate": 0.0002163863376537698, + "loss": 1.5131, + "step": 7729 + }, + { + "epoch": 0.7013246234803121, + "grad_norm": 0.10189253267938961, + "learning_rate": 0.00021626534653186453, + "loss": 1.5518, + "step": 7730 + }, + { + "epoch": 0.7014153511159499, + "grad_norm": 0.10408969207327365, + "learning_rate": 0.0002161443799104884, + "loss": 1.5499, + "step": 7731 + }, + { + "epoch": 0.7015060787515878, + "grad_norm": 0.10335574162515804, + "learning_rate": 0.00021602343780008677, + "loss": 1.5986, + "step": 7732 + }, + { + "epoch": 0.7015968063872255, + "grad_norm": 0.10338229826209051, + "learning_rate": 0.0002159025202111033, + "loss": 1.5697, + "step": 7733 + }, + { + "epoch": 0.7016875340228633, + "grad_norm": 0.09953577139848772, + "learning_rate": 0.00021578162715397904, + "loss": 1.5781, + "step": 7734 + }, + { + "epoch": 0.7017782616585012, + "grad_norm": 0.10065824234869766, + "learning_rate": 0.00021566075863915297, + "loss": 1.5756, + "step": 7735 + }, + { + "epoch": 0.701868989294139, + "grad_norm": 0.09934540012950989, + "learning_rate": 0.00021553991467706236, + "loss": 1.5531, + "step": 7736 + }, + { + "epoch": 0.7019597169297768, + "grad_norm": 0.10032132912613764, + "learning_rate": 0.00021541909527814218, + "loss": 1.5951, + "step": 7737 + }, + { + "epoch": 0.7020504445654147, + "grad_norm": 0.09792798955801144, + "learning_rate": 0.0002152983004528251, + "loss": 1.5864, + "step": 7738 + }, + { + "epoch": 0.7021411722010524, + "grad_norm": 0.09747401965624579, + "learning_rate": 0.00021517753021154152, + "loss": 1.4919, + "step": 7739 + }, + { + "epoch": 0.7022318998366902, + "grad_norm": 0.09603161744921218, + "learning_rate": 0.00021505678456472038, + "loss": 1.5636, + "step": 7740 + }, + { + "epoch": 0.7023226274723281, + "grad_norm": 0.10330878328428039, + "learning_rate": 0.00021493606352278788, + "loss": 1.5239, + "step": 7741 + }, + { + "epoch": 0.7024133551079659, + "grad_norm": 0.10342029022436956, + "learning_rate": 0.00021481536709616817, + "loss": 1.5579, + "step": 7742 + }, + { + "epoch": 0.7025040827436037, + "grad_norm": 0.1017749889805895, + "learning_rate": 0.0002146946952952835, + "loss": 1.575, + "step": 7743 + }, + { + "epoch": 0.7025948103792415, + "grad_norm": 0.09622763488620832, + "learning_rate": 0.00021457404813055416, + "loss": 1.5492, + "step": 7744 + }, + { + "epoch": 0.7026855380148793, + "grad_norm": 0.09921720472539064, + "learning_rate": 0.00021445342561239778, + "loss": 1.5722, + "step": 7745 + }, + { + "epoch": 0.7027762656505171, + "grad_norm": 0.0966279642271893, + "learning_rate": 0.00021433282775123002, + "loss": 1.5369, + "step": 7746 + }, + { + "epoch": 0.702866993286155, + "grad_norm": 0.10035734903326511, + "learning_rate": 0.0002142122545574649, + "loss": 1.5066, + "step": 7747 + }, + { + "epoch": 0.7029577209217928, + "grad_norm": 0.09975429545546209, + "learning_rate": 0.00021409170604151369, + "loss": 1.5164, + "step": 7748 + }, + { + "epoch": 0.7030484485574305, + "grad_norm": 0.0993645354518222, + "learning_rate": 0.00021397118221378565, + "loss": 1.6242, + "step": 7749 + }, + { + "epoch": 0.7031391761930684, + "grad_norm": 0.10277729721625783, + "learning_rate": 0.0002138506830846883, + "loss": 1.5693, + "step": 7750 + }, + { + "epoch": 0.7032299038287062, + "grad_norm": 0.09717209327455181, + "learning_rate": 0.00021373020866462651, + "loss": 1.561, + "step": 7751 + }, + { + "epoch": 0.7033206314643441, + "grad_norm": 0.10051985984856804, + "learning_rate": 0.00021360975896400343, + "loss": 1.5398, + "step": 7752 + }, + { + "epoch": 0.7034113590999819, + "grad_norm": 0.09943253105926836, + "learning_rate": 0.00021348933399321996, + "loss": 1.5362, + "step": 7753 + }, + { + "epoch": 0.7035020867356196, + "grad_norm": 0.10190608249270824, + "learning_rate": 0.0002133689337626748, + "loss": 1.5678, + "step": 7754 + }, + { + "epoch": 0.7035928143712575, + "grad_norm": 0.10512161198614776, + "learning_rate": 0.00021324855828276423, + "loss": 1.5495, + "step": 7755 + }, + { + "epoch": 0.7036835420068953, + "grad_norm": 0.10197930084243041, + "learning_rate": 0.00021312820756388314, + "loss": 1.5781, + "step": 7756 + }, + { + "epoch": 0.7037742696425331, + "grad_norm": 0.10295804176020476, + "learning_rate": 0.00021300788161642358, + "loss": 1.5645, + "step": 7757 + }, + { + "epoch": 0.703864997278171, + "grad_norm": 0.10099931292395986, + "learning_rate": 0.00021288758045077572, + "loss": 1.5676, + "step": 7758 + }, + { + "epoch": 0.7039557249138088, + "grad_norm": 0.09866488532357073, + "learning_rate": 0.00021276730407732725, + "loss": 1.5756, + "step": 7759 + }, + { + "epoch": 0.7040464525494465, + "grad_norm": 0.09621082522452168, + "learning_rate": 0.00021264705250646483, + "loss": 1.5816, + "step": 7760 + }, + { + "epoch": 0.7041371801850844, + "grad_norm": 0.10243786411405294, + "learning_rate": 0.00021252682574857167, + "loss": 1.5559, + "step": 7761 + }, + { + "epoch": 0.7042279078207222, + "grad_norm": 0.10194320897681307, + "learning_rate": 0.00021240662381402943, + "loss": 1.5537, + "step": 7762 + }, + { + "epoch": 0.70431863545636, + "grad_norm": 0.10003301615799441, + "learning_rate": 0.00021228644671321767, + "loss": 1.5661, + "step": 7763 + }, + { + "epoch": 0.7044093630919979, + "grad_norm": 0.10277437817199508, + "learning_rate": 0.00021216629445651368, + "loss": 1.5651, + "step": 7764 + }, + { + "epoch": 0.7045000907276356, + "grad_norm": 0.09980539651908485, + "learning_rate": 0.0002120461670542924, + "loss": 1.5969, + "step": 7765 + }, + { + "epoch": 0.7045908183632734, + "grad_norm": 0.09936634307228517, + "learning_rate": 0.00021192606451692726, + "loss": 1.5742, + "step": 7766 + }, + { + "epoch": 0.7046815459989113, + "grad_norm": 0.09979040721493117, + "learning_rate": 0.0002118059868547887, + "loss": 1.5327, + "step": 7767 + }, + { + "epoch": 0.7047722736345491, + "grad_norm": 0.10250628526460955, + "learning_rate": 0.00021168593407824583, + "loss": 1.5855, + "step": 7768 + }, + { + "epoch": 0.7048630012701869, + "grad_norm": 0.10112636123866561, + "learning_rate": 0.00021156590619766485, + "loss": 1.5715, + "step": 7769 + }, + { + "epoch": 0.7049537289058248, + "grad_norm": 0.10125549104788811, + "learning_rate": 0.00021144590322341055, + "loss": 1.5937, + "step": 7770 + }, + { + "epoch": 0.7050444565414625, + "grad_norm": 0.10123392542957585, + "learning_rate": 0.00021132592516584508, + "loss": 1.5671, + "step": 7771 + }, + { + "epoch": 0.7051351841771003, + "grad_norm": 0.09913071676940642, + "learning_rate": 0.00021120597203532832, + "loss": 1.5542, + "step": 7772 + }, + { + "epoch": 0.7052259118127382, + "grad_norm": 0.10412488276273799, + "learning_rate": 0.00021108604384221862, + "loss": 1.5455, + "step": 7773 + }, + { + "epoch": 0.705316639448376, + "grad_norm": 0.10485036917039171, + "learning_rate": 0.0002109661405968716, + "loss": 1.5426, + "step": 7774 + }, + { + "epoch": 0.7054073670840137, + "grad_norm": 0.10751933505620032, + "learning_rate": 0.00021084626230964077, + "loss": 1.557, + "step": 7775 + }, + { + "epoch": 0.7054980947196516, + "grad_norm": 0.10445690356924643, + "learning_rate": 0.00021072640899087781, + "loss": 1.5424, + "step": 7776 + }, + { + "epoch": 0.7055888223552894, + "grad_norm": 0.10138303066870417, + "learning_rate": 0.00021060658065093224, + "loss": 1.5536, + "step": 7777 + }, + { + "epoch": 0.7056795499909272, + "grad_norm": 0.10010889072940045, + "learning_rate": 0.0002104867773001511, + "loss": 1.5286, + "step": 7778 + }, + { + "epoch": 0.7057702776265651, + "grad_norm": 0.09928602941446694, + "learning_rate": 0.00021036699894887918, + "loss": 1.5493, + "step": 7779 + }, + { + "epoch": 0.7058610052622029, + "grad_norm": 0.10139759804023565, + "learning_rate": 0.00021024724560745985, + "loss": 1.5614, + "step": 7780 + }, + { + "epoch": 0.7059517328978406, + "grad_norm": 0.0987863347194022, + "learning_rate": 0.00021012751728623346, + "loss": 1.553, + "step": 7781 + }, + { + "epoch": 0.7060424605334785, + "grad_norm": 0.09975345771936704, + "learning_rate": 0.0002100078139955386, + "loss": 1.5704, + "step": 7782 + }, + { + "epoch": 0.7061331881691163, + "grad_norm": 0.09716378626487966, + "learning_rate": 0.0002098881357457117, + "loss": 1.5451, + "step": 7783 + }, + { + "epoch": 0.7062239158047541, + "grad_norm": 0.09979540834815889, + "learning_rate": 0.00020976848254708718, + "loss": 1.5736, + "step": 7784 + }, + { + "epoch": 0.706314643440392, + "grad_norm": 0.10889563049311156, + "learning_rate": 0.00020964885440999704, + "loss": 1.5349, + "step": 7785 + }, + { + "epoch": 0.7064053710760297, + "grad_norm": 0.1021939617945023, + "learning_rate": 0.00020952925134477092, + "loss": 1.5201, + "step": 7786 + }, + { + "epoch": 0.7064960987116675, + "grad_norm": 0.10317067197363511, + "learning_rate": 0.0002094096733617369, + "loss": 1.5528, + "step": 7787 + }, + { + "epoch": 0.7065868263473054, + "grad_norm": 0.09891578582020613, + "learning_rate": 0.00020929012047122043, + "loss": 1.5789, + "step": 7788 + }, + { + "epoch": 0.7066775539829432, + "grad_norm": 0.09937111039347797, + "learning_rate": 0.00020917059268354472, + "loss": 1.6113, + "step": 7789 + }, + { + "epoch": 0.7067682816185811, + "grad_norm": 0.09942906079569859, + "learning_rate": 0.00020905109000903115, + "loss": 1.5769, + "step": 7790 + }, + { + "epoch": 0.7068590092542189, + "grad_norm": 0.10298122561140921, + "learning_rate": 0.00020893161245799907, + "loss": 1.5425, + "step": 7791 + }, + { + "epoch": 0.7069497368898566, + "grad_norm": 0.10373445219838934, + "learning_rate": 0.0002088121600407652, + "loss": 1.5378, + "step": 7792 + }, + { + "epoch": 0.7070404645254945, + "grad_norm": 0.10056209651473402, + "learning_rate": 0.000208692732767644, + "loss": 1.5493, + "step": 7793 + }, + { + "epoch": 0.7071311921611323, + "grad_norm": 0.10187652881545284, + "learning_rate": 0.0002085733306489484, + "loss": 1.5296, + "step": 7794 + }, + { + "epoch": 0.7072219197967701, + "grad_norm": 0.10430314060572546, + "learning_rate": 0.0002084539536949887, + "loss": 1.6095, + "step": 7795 + }, + { + "epoch": 0.707312647432408, + "grad_norm": 0.09978692295792813, + "learning_rate": 0.0002083346019160729, + "loss": 1.5499, + "step": 7796 + }, + { + "epoch": 0.7074033750680457, + "grad_norm": 0.0985651170858351, + "learning_rate": 0.00020821527532250734, + "loss": 1.5503, + "step": 7797 + }, + { + "epoch": 0.7074941027036835, + "grad_norm": 0.09916840520031142, + "learning_rate": 0.0002080959739245956, + "loss": 1.5585, + "step": 7798 + }, + { + "epoch": 0.7075848303393214, + "grad_norm": 0.09743433472503306, + "learning_rate": 0.00020797669773263972, + "loss": 1.5638, + "step": 7799 + }, + { + "epoch": 0.7076755579749592, + "grad_norm": 0.09988901955381062, + "learning_rate": 0.00020785744675693886, + "loss": 1.5398, + "step": 7800 + }, + { + "epoch": 0.707766285610597, + "grad_norm": 0.10043777455333655, + "learning_rate": 0.00020773822100779072, + "loss": 1.5835, + "step": 7801 + }, + { + "epoch": 0.7078570132462348, + "grad_norm": 0.09975844761603744, + "learning_rate": 0.00020761902049549024, + "loss": 1.5854, + "step": 7802 + }, + { + "epoch": 0.7079477408818726, + "grad_norm": 0.09897014791015098, + "learning_rate": 0.00020749984523033027, + "loss": 1.5359, + "step": 7803 + }, + { + "epoch": 0.7080384685175104, + "grad_norm": 0.10076928181249803, + "learning_rate": 0.00020738069522260194, + "loss": 1.5624, + "step": 7804 + }, + { + "epoch": 0.7081291961531483, + "grad_norm": 0.09905817840940577, + "learning_rate": 0.0002072615704825937, + "loss": 1.5542, + "step": 7805 + }, + { + "epoch": 0.7082199237887861, + "grad_norm": 0.10009822350475457, + "learning_rate": 0.00020714247102059186, + "loss": 1.5693, + "step": 7806 + }, + { + "epoch": 0.7083106514244238, + "grad_norm": 0.10241337501556665, + "learning_rate": 0.00020702339684688077, + "loss": 1.5597, + "step": 7807 + }, + { + "epoch": 0.7084013790600617, + "grad_norm": 0.10357737939912759, + "learning_rate": 0.00020690434797174273, + "loss": 1.5709, + "step": 7808 + }, + { + "epoch": 0.7084921066956995, + "grad_norm": 0.10445627929323674, + "learning_rate": 0.00020678532440545743, + "loss": 1.5869, + "step": 7809 + }, + { + "epoch": 0.7085828343313373, + "grad_norm": 0.09814242830321414, + "learning_rate": 0.00020666632615830245, + "loss": 1.5992, + "step": 7810 + }, + { + "epoch": 0.7086735619669752, + "grad_norm": 0.10120762466579804, + "learning_rate": 0.00020654735324055357, + "loss": 1.5423, + "step": 7811 + }, + { + "epoch": 0.708764289602613, + "grad_norm": 0.09669984682116914, + "learning_rate": 0.000206428405662484, + "loss": 1.5124, + "step": 7812 + }, + { + "epoch": 0.7088550172382507, + "grad_norm": 0.10425901612124484, + "learning_rate": 0.00020630948343436468, + "loss": 1.577, + "step": 7813 + }, + { + "epoch": 0.7089457448738886, + "grad_norm": 0.10027785084915745, + "learning_rate": 0.00020619058656646478, + "loss": 1.5433, + "step": 7814 + }, + { + "epoch": 0.7090364725095264, + "grad_norm": 0.09925859075921988, + "learning_rate": 0.0002060717150690512, + "loss": 1.5675, + "step": 7815 + }, + { + "epoch": 0.7091272001451642, + "grad_norm": 0.10157605676815687, + "learning_rate": 0.00020595286895238834, + "loss": 1.5463, + "step": 7816 + }, + { + "epoch": 0.7092179277808021, + "grad_norm": 0.09773071455951797, + "learning_rate": 0.0002058340482267384, + "loss": 1.501, + "step": 7817 + }, + { + "epoch": 0.7093086554164398, + "grad_norm": 0.09950039986856245, + "learning_rate": 0.00020571525290236193, + "loss": 1.5114, + "step": 7818 + }, + { + "epoch": 0.7093993830520776, + "grad_norm": 0.10403692698487388, + "learning_rate": 0.00020559648298951676, + "loss": 1.5551, + "step": 7819 + }, + { + "epoch": 0.7094901106877155, + "grad_norm": 0.09930458369165054, + "learning_rate": 0.00020547773849845846, + "loss": 1.5587, + "step": 7820 + }, + { + "epoch": 0.7095808383233533, + "grad_norm": 0.10347187121899794, + "learning_rate": 0.0002053590194394409, + "loss": 1.5707, + "step": 7821 + }, + { + "epoch": 0.7096715659589911, + "grad_norm": 0.1009758650942857, + "learning_rate": 0.00020524032582271558, + "loss": 1.562, + "step": 7822 + }, + { + "epoch": 0.709762293594629, + "grad_norm": 0.10071239844646637, + "learning_rate": 0.00020512165765853159, + "loss": 1.5523, + "step": 7823 + }, + { + "epoch": 0.7098530212302667, + "grad_norm": 0.09882582371944451, + "learning_rate": 0.00020500301495713575, + "loss": 1.5544, + "step": 7824 + }, + { + "epoch": 0.7099437488659045, + "grad_norm": 0.1009513864659506, + "learning_rate": 0.00020488439772877321, + "loss": 1.5889, + "step": 7825 + }, + { + "epoch": 0.7100344765015424, + "grad_norm": 0.10511086359707603, + "learning_rate": 0.00020476580598368638, + "loss": 1.5474, + "step": 7826 + }, + { + "epoch": 0.7101252041371802, + "grad_norm": 0.10024512443497026, + "learning_rate": 0.00020464723973211558, + "loss": 1.577, + "step": 7827 + }, + { + "epoch": 0.710215931772818, + "grad_norm": 0.10184403177804141, + "learning_rate": 0.00020452869898429933, + "loss": 1.591, + "step": 7828 + }, + { + "epoch": 0.7103066594084558, + "grad_norm": 0.10015893199620998, + "learning_rate": 0.0002044101837504733, + "loss": 1.598, + "step": 7829 + }, + { + "epoch": 0.7103973870440936, + "grad_norm": 0.10089271823607707, + "learning_rate": 0.0002042916940408715, + "loss": 1.5949, + "step": 7830 + }, + { + "epoch": 0.7104881146797315, + "grad_norm": 0.10103641673114389, + "learning_rate": 0.00020417322986572561, + "loss": 1.5562, + "step": 7831 + }, + { + "epoch": 0.7105788423153693, + "grad_norm": 0.1028392974832259, + "learning_rate": 0.00020405479123526498, + "loss": 1.5332, + "step": 7832 + }, + { + "epoch": 0.710669569951007, + "grad_norm": 0.1035036082649936, + "learning_rate": 0.00020393637815971656, + "loss": 1.5924, + "step": 7833 + }, + { + "epoch": 0.7107602975866449, + "grad_norm": 0.09983963484386106, + "learning_rate": 0.00020381799064930566, + "loss": 1.5512, + "step": 7834 + }, + { + "epoch": 0.7108510252222827, + "grad_norm": 0.10345694677218706, + "learning_rate": 0.00020369962871425495, + "loss": 1.5588, + "step": 7835 + }, + { + "epoch": 0.7109417528579205, + "grad_norm": 0.1000694028146874, + "learning_rate": 0.00020358129236478496, + "loss": 1.5629, + "step": 7836 + }, + { + "epoch": 0.7110324804935584, + "grad_norm": 0.09978670992258158, + "learning_rate": 0.0002034629816111137, + "loss": 1.5427, + "step": 7837 + }, + { + "epoch": 0.7111232081291962, + "grad_norm": 0.10244127325821803, + "learning_rate": 0.00020334469646345806, + "loss": 1.5315, + "step": 7838 + }, + { + "epoch": 0.7112139357648339, + "grad_norm": 0.09743756819936376, + "learning_rate": 0.00020322643693203158, + "loss": 1.571, + "step": 7839 + }, + { + "epoch": 0.7113046634004718, + "grad_norm": 0.1005421341759573, + "learning_rate": 0.00020310820302704585, + "loss": 1.5497, + "step": 7840 + }, + { + "epoch": 0.7113953910361096, + "grad_norm": 0.10371595370001989, + "learning_rate": 0.00020298999475871072, + "loss": 1.5813, + "step": 7841 + }, + { + "epoch": 0.7114861186717474, + "grad_norm": 0.10102317399582365, + "learning_rate": 0.00020287181213723333, + "loss": 1.5831, + "step": 7842 + }, + { + "epoch": 0.7115768463073853, + "grad_norm": 0.10359113722897546, + "learning_rate": 0.00020275365517281857, + "loss": 1.5134, + "step": 7843 + }, + { + "epoch": 0.711667573943023, + "grad_norm": 0.10047159355948138, + "learning_rate": 0.00020263552387566962, + "loss": 1.5675, + "step": 7844 + }, + { + "epoch": 0.7117583015786608, + "grad_norm": 0.10470397465858608, + "learning_rate": 0.0002025174182559869, + "loss": 1.559, + "step": 7845 + }, + { + "epoch": 0.7118490292142987, + "grad_norm": 0.10228486792422571, + "learning_rate": 0.00020239933832396913, + "loss": 1.5075, + "step": 7846 + }, + { + "epoch": 0.7119397568499365, + "grad_norm": 0.10577263026519175, + "learning_rate": 0.00020228128408981218, + "loss": 1.5437, + "step": 7847 + }, + { + "epoch": 0.7120304844855743, + "grad_norm": 0.10506462617388648, + "learning_rate": 0.0002021632555637104, + "loss": 1.5846, + "step": 7848 + }, + { + "epoch": 0.7121212121212122, + "grad_norm": 0.09971980801121623, + "learning_rate": 0.0002020452527558554, + "loss": 1.5409, + "step": 7849 + }, + { + "epoch": 0.7122119397568499, + "grad_norm": 0.10065472866093057, + "learning_rate": 0.00020192727567643653, + "loss": 1.5318, + "step": 7850 + }, + { + "epoch": 0.7123026673924877, + "grad_norm": 0.10682166903920788, + "learning_rate": 0.00020180932433564152, + "loss": 1.61, + "step": 7851 + }, + { + "epoch": 0.7123933950281256, + "grad_norm": 0.10444507667091979, + "learning_rate": 0.00020169139874365533, + "loss": 1.5497, + "step": 7852 + }, + { + "epoch": 0.7124841226637634, + "grad_norm": 0.10583610811503913, + "learning_rate": 0.00020157349891066062, + "loss": 1.5611, + "step": 7853 + }, + { + "epoch": 0.7125748502994012, + "grad_norm": 0.10156264077486916, + "learning_rate": 0.00020145562484683827, + "loss": 1.5661, + "step": 7854 + }, + { + "epoch": 0.712665577935039, + "grad_norm": 0.10052379825719199, + "learning_rate": 0.0002013377765623669, + "loss": 1.5487, + "step": 7855 + }, + { + "epoch": 0.7127563055706768, + "grad_norm": 0.10067550805730498, + "learning_rate": 0.00020121995406742254, + "loss": 1.5574, + "step": 7856 + }, + { + "epoch": 0.7128470332063146, + "grad_norm": 0.1005999590443153, + "learning_rate": 0.00020110215737217902, + "loss": 1.5356, + "step": 7857 + }, + { + "epoch": 0.7129377608419525, + "grad_norm": 0.10208255393481108, + "learning_rate": 0.00020098438648680846, + "loss": 1.5396, + "step": 7858 + }, + { + "epoch": 0.7130284884775903, + "grad_norm": 0.10318113454771237, + "learning_rate": 0.00020086664142148015, + "loss": 1.5433, + "step": 7859 + }, + { + "epoch": 0.713119216113228, + "grad_norm": 0.10185398774378841, + "learning_rate": 0.00020074892218636126, + "loss": 1.5429, + "step": 7860 + }, + { + "epoch": 0.7132099437488659, + "grad_norm": 0.10276297612442124, + "learning_rate": 0.00020063122879161705, + "loss": 1.5595, + "step": 7861 + }, + { + "epoch": 0.7133006713845037, + "grad_norm": 0.10678657472028319, + "learning_rate": 0.00020051356124741054, + "loss": 1.5957, + "step": 7862 + }, + { + "epoch": 0.7133913990201415, + "grad_norm": 0.10497300882313938, + "learning_rate": 0.00020039591956390218, + "loss": 1.5993, + "step": 7863 + }, + { + "epoch": 0.7134821266557794, + "grad_norm": 0.10093118929016107, + "learning_rate": 0.00020027830375125011, + "loss": 1.4976, + "step": 7864 + }, + { + "epoch": 0.7135728542914171, + "grad_norm": 0.10002863490169435, + "learning_rate": 0.00020016071381961088, + "loss": 1.523, + "step": 7865 + }, + { + "epoch": 0.713663581927055, + "grad_norm": 0.10168256166526973, + "learning_rate": 0.00020004314977913824, + "loss": 1.5496, + "step": 7866 + }, + { + "epoch": 0.7137543095626928, + "grad_norm": 0.09729915585130325, + "learning_rate": 0.00019992561163998358, + "loss": 1.5502, + "step": 7867 + }, + { + "epoch": 0.7138450371983306, + "grad_norm": 0.09869673033733382, + "learning_rate": 0.00019980809941229661, + "loss": 1.5524, + "step": 7868 + }, + { + "epoch": 0.7139357648339685, + "grad_norm": 0.10001526455562981, + "learning_rate": 0.0001996906131062247, + "loss": 1.5412, + "step": 7869 + }, + { + "epoch": 0.7140264924696063, + "grad_norm": 0.10181001273121568, + "learning_rate": 0.00019957315273191257, + "loss": 1.5988, + "step": 7870 + }, + { + "epoch": 0.714117220105244, + "grad_norm": 0.10394694792651611, + "learning_rate": 0.00019945571829950282, + "loss": 1.5413, + "step": 7871 + }, + { + "epoch": 0.7142079477408819, + "grad_norm": 0.10570805695840885, + "learning_rate": 0.0001993383098191363, + "loss": 1.6308, + "step": 7872 + }, + { + "epoch": 0.7142986753765197, + "grad_norm": 0.10085950183876968, + "learning_rate": 0.00019922092730095098, + "loss": 1.5712, + "step": 7873 + }, + { + "epoch": 0.7143894030121575, + "grad_norm": 0.09994113639962267, + "learning_rate": 0.0001991035707550828, + "loss": 1.6117, + "step": 7874 + }, + { + "epoch": 0.7144801306477954, + "grad_norm": 0.10196394911971271, + "learning_rate": 0.00019898624019166577, + "loss": 1.5639, + "step": 7875 + }, + { + "epoch": 0.7145708582834331, + "grad_norm": 0.10759850191688951, + "learning_rate": 0.00019886893562083108, + "loss": 1.5831, + "step": 7876 + }, + { + "epoch": 0.7146615859190709, + "grad_norm": 0.10398231463209685, + "learning_rate": 0.00019875165705270838, + "loss": 1.5956, + "step": 7877 + }, + { + "epoch": 0.7147523135547088, + "grad_norm": 0.09832089061243249, + "learning_rate": 0.00019863440449742432, + "loss": 1.5883, + "step": 7878 + }, + { + "epoch": 0.7148430411903466, + "grad_norm": 0.09713314718124658, + "learning_rate": 0.00019851717796510405, + "loss": 1.5815, + "step": 7879 + }, + { + "epoch": 0.7149337688259844, + "grad_norm": 0.10321127085678866, + "learning_rate": 0.00019839997746586986, + "loss": 1.6036, + "step": 7880 + }, + { + "epoch": 0.7150244964616222, + "grad_norm": 0.10192989127231694, + "learning_rate": 0.00019828280300984187, + "loss": 1.5603, + "step": 7881 + }, + { + "epoch": 0.71511522409726, + "grad_norm": 0.1077140344917289, + "learning_rate": 0.00019816565460713853, + "loss": 1.5072, + "step": 7882 + }, + { + "epoch": 0.7152059517328978, + "grad_norm": 0.10100740034401566, + "learning_rate": 0.00019804853226787535, + "loss": 1.559, + "step": 7883 + }, + { + "epoch": 0.7152966793685357, + "grad_norm": 0.09883697974677416, + "learning_rate": 0.00019793143600216578, + "loss": 1.5273, + "step": 7884 + }, + { + "epoch": 0.7153874070041735, + "grad_norm": 0.10737637051947445, + "learning_rate": 0.0001978143658201212, + "loss": 1.5521, + "step": 7885 + }, + { + "epoch": 0.7154781346398112, + "grad_norm": 0.10554740536126654, + "learning_rate": 0.0001976973217318509, + "loss": 1.4958, + "step": 7886 + }, + { + "epoch": 0.7155688622754491, + "grad_norm": 0.09643757746857011, + "learning_rate": 0.0001975803037474614, + "loss": 1.5245, + "step": 7887 + }, + { + "epoch": 0.7156595899110869, + "grad_norm": 0.10319685760023059, + "learning_rate": 0.00019746331187705702, + "loss": 1.5478, + "step": 7888 + }, + { + "epoch": 0.7157503175467247, + "grad_norm": 0.10078274997137648, + "learning_rate": 0.00019734634613074043, + "loss": 1.4896, + "step": 7889 + }, + { + "epoch": 0.7158410451823626, + "grad_norm": 0.09739977727166052, + "learning_rate": 0.0001972294065186115, + "loss": 1.5735, + "step": 7890 + }, + { + "epoch": 0.7159317728180004, + "grad_norm": 0.1031660873455106, + "learning_rate": 0.0001971124930507678, + "loss": 1.5594, + "step": 7891 + }, + { + "epoch": 0.7160225004536381, + "grad_norm": 0.10301079553033635, + "learning_rate": 0.00019699560573730485, + "loss": 1.5647, + "step": 7892 + }, + { + "epoch": 0.716113228089276, + "grad_norm": 0.09960201905347157, + "learning_rate": 0.0001968787445883163, + "loss": 1.5437, + "step": 7893 + }, + { + "epoch": 0.7162039557249138, + "grad_norm": 0.0982876516248305, + "learning_rate": 0.00019676190961389272, + "loss": 1.5267, + "step": 7894 + }, + { + "epoch": 0.7162946833605516, + "grad_norm": 0.09948954615543179, + "learning_rate": 0.00019664510082412285, + "loss": 1.5582, + "step": 7895 + }, + { + "epoch": 0.7163854109961895, + "grad_norm": 0.10544019313098553, + "learning_rate": 0.0001965283182290934, + "loss": 1.5573, + "step": 7896 + }, + { + "epoch": 0.7164761386318272, + "grad_norm": 0.1017055529755723, + "learning_rate": 0.00019641156183888836, + "loss": 1.5746, + "step": 7897 + }, + { + "epoch": 0.716566866267465, + "grad_norm": 0.10033989679012734, + "learning_rate": 0.00019629483166358952, + "loss": 1.5516, + "step": 7898 + }, + { + "epoch": 0.7166575939031029, + "grad_norm": 0.09848162224728539, + "learning_rate": 0.00019617812771327675, + "loss": 1.5168, + "step": 7899 + }, + { + "epoch": 0.7167483215387407, + "grad_norm": 0.09935712151792805, + "learning_rate": 0.00019606144999802762, + "loss": 1.5499, + "step": 7900 + }, + { + "epoch": 0.7168390491743785, + "grad_norm": 0.1007482129605427, + "learning_rate": 0.00019594479852791686, + "loss": 1.5756, + "step": 7901 + }, + { + "epoch": 0.7169297768100164, + "grad_norm": 0.1003047914922827, + "learning_rate": 0.00019582817331301777, + "loss": 1.5528, + "step": 7902 + }, + { + "epoch": 0.7170205044456541, + "grad_norm": 0.09958862137282365, + "learning_rate": 0.00019571157436340075, + "loss": 1.5613, + "step": 7903 + }, + { + "epoch": 0.717111232081292, + "grad_norm": 0.1047791971442858, + "learning_rate": 0.00019559500168913414, + "loss": 1.57, + "step": 7904 + }, + { + "epoch": 0.7172019597169298, + "grad_norm": 0.11067002758940213, + "learning_rate": 0.0001954784553002838, + "loss": 1.5279, + "step": 7905 + }, + { + "epoch": 0.7172926873525676, + "grad_norm": 0.1006923648524339, + "learning_rate": 0.00019536193520691388, + "loss": 1.542, + "step": 7906 + }, + { + "epoch": 0.7173834149882055, + "grad_norm": 0.10306162084132958, + "learning_rate": 0.00019524544141908562, + "loss": 1.5786, + "step": 7907 + }, + { + "epoch": 0.7174741426238432, + "grad_norm": 0.1053808788300511, + "learning_rate": 0.00019512897394685836, + "loss": 1.5869, + "step": 7908 + }, + { + "epoch": 0.717564870259481, + "grad_norm": 0.10121219774590372, + "learning_rate": 0.00019501253280028934, + "loss": 1.5402, + "step": 7909 + }, + { + "epoch": 0.7176555978951189, + "grad_norm": 0.10232266417448688, + "learning_rate": 0.00019489611798943307, + "loss": 1.5804, + "step": 7910 + }, + { + "epoch": 0.7177463255307567, + "grad_norm": 0.10319409721016222, + "learning_rate": 0.0001947797295243418, + "loss": 1.5178, + "step": 7911 + }, + { + "epoch": 0.7178370531663945, + "grad_norm": 0.09802473818791059, + "learning_rate": 0.00019466336741506607, + "loss": 1.5076, + "step": 7912 + }, + { + "epoch": 0.7179277808020323, + "grad_norm": 0.09844455795316165, + "learning_rate": 0.0001945470316716535, + "loss": 1.5488, + "step": 7913 + }, + { + "epoch": 0.7180185084376701, + "grad_norm": 0.10080460772660225, + "learning_rate": 0.0001944307223041498, + "loss": 1.5707, + "step": 7914 + }, + { + "epoch": 0.7181092360733079, + "grad_norm": 0.1001921203766273, + "learning_rate": 0.00019431443932259795, + "loss": 1.5452, + "step": 7915 + }, + { + "epoch": 0.7181999637089458, + "grad_norm": 0.1041963298911596, + "learning_rate": 0.00019419818273703967, + "loss": 1.5671, + "step": 7916 + }, + { + "epoch": 0.7182906913445836, + "grad_norm": 0.10010203627124259, + "learning_rate": 0.0001940819525575133, + "loss": 1.5644, + "step": 7917 + }, + { + "epoch": 0.7183814189802213, + "grad_norm": 0.09807300804409753, + "learning_rate": 0.00019396574879405526, + "loss": 1.5684, + "step": 7918 + }, + { + "epoch": 0.7184721466158592, + "grad_norm": 0.09795902504290767, + "learning_rate": 0.00019384957145670014, + "loss": 1.5901, + "step": 7919 + }, + { + "epoch": 0.718562874251497, + "grad_norm": 0.10265835890774694, + "learning_rate": 0.00019373342055547965, + "loss": 1.5673, + "step": 7920 + }, + { + "epoch": 0.7186536018871348, + "grad_norm": 0.09961293789273658, + "learning_rate": 0.00019361729610042322, + "loss": 1.4996, + "step": 7921 + }, + { + "epoch": 0.7187443295227727, + "grad_norm": 0.10161066687041903, + "learning_rate": 0.00019350119810155865, + "loss": 1.5466, + "step": 7922 + }, + { + "epoch": 0.7188350571584105, + "grad_norm": 0.10173567080903372, + "learning_rate": 0.00019338512656891056, + "loss": 1.5958, + "step": 7923 + }, + { + "epoch": 0.7189257847940482, + "grad_norm": 0.10153240086586982, + "learning_rate": 0.00019326908151250216, + "loss": 1.5869, + "step": 7924 + }, + { + "epoch": 0.7190165124296861, + "grad_norm": 0.10196559606707417, + "learning_rate": 0.00019315306294235364, + "loss": 1.5749, + "step": 7925 + }, + { + "epoch": 0.7191072400653239, + "grad_norm": 0.10092431600696837, + "learning_rate": 0.00019303707086848354, + "loss": 1.5398, + "step": 7926 + }, + { + "epoch": 0.7191979677009617, + "grad_norm": 0.09946311011393699, + "learning_rate": 0.00019292110530090757, + "loss": 1.5562, + "step": 7927 + }, + { + "epoch": 0.7192886953365996, + "grad_norm": 0.10227495663991323, + "learning_rate": 0.0001928051662496393, + "loss": 1.5454, + "step": 7928 + }, + { + "epoch": 0.7193794229722373, + "grad_norm": 0.10084467318340347, + "learning_rate": 0.0001926892537246903, + "loss": 1.5749, + "step": 7929 + }, + { + "epoch": 0.7194701506078751, + "grad_norm": 0.09876527543151695, + "learning_rate": 0.0001925733677360696, + "loss": 1.5472, + "step": 7930 + }, + { + "epoch": 0.719560878243513, + "grad_norm": 0.10084950079052771, + "learning_rate": 0.00019245750829378374, + "loss": 1.572, + "step": 7931 + }, + { + "epoch": 0.7196516058791508, + "grad_norm": 0.10371604189912063, + "learning_rate": 0.0001923416754078373, + "loss": 1.5994, + "step": 7932 + }, + { + "epoch": 0.7197423335147886, + "grad_norm": 0.10229676440444814, + "learning_rate": 0.00019222586908823275, + "loss": 1.5503, + "step": 7933 + }, + { + "epoch": 0.7198330611504264, + "grad_norm": 0.10044685758794072, + "learning_rate": 0.00019211008934496977, + "loss": 1.5535, + "step": 7934 + }, + { + "epoch": 0.7199237887860642, + "grad_norm": 0.102677545516238, + "learning_rate": 0.00019199433618804574, + "loss": 1.6177, + "step": 7935 + }, + { + "epoch": 0.720014516421702, + "grad_norm": 0.09826163813642444, + "learning_rate": 0.0001918786096274564, + "loss": 1.5398, + "step": 7936 + }, + { + "epoch": 0.7201052440573399, + "grad_norm": 0.09556889940552563, + "learning_rate": 0.0001917629096731945, + "loss": 1.5559, + "step": 7937 + }, + { + "epoch": 0.7201959716929777, + "grad_norm": 0.10109156393818265, + "learning_rate": 0.0001916472363352506, + "loss": 1.5471, + "step": 7938 + }, + { + "epoch": 0.7202866993286154, + "grad_norm": 0.10142569297144045, + "learning_rate": 0.00019153158962361327, + "loss": 1.5682, + "step": 7939 + }, + { + "epoch": 0.7203774269642533, + "grad_norm": 0.10053123603971352, + "learning_rate": 0.0001914159695482689, + "loss": 1.561, + "step": 7940 + }, + { + "epoch": 0.7204681545998911, + "grad_norm": 0.10098861105825373, + "learning_rate": 0.00019130037611920098, + "loss": 1.551, + "step": 7941 + }, + { + "epoch": 0.720558882235529, + "grad_norm": 0.09902207190110678, + "learning_rate": 0.00019118480934639088, + "loss": 1.5268, + "step": 7942 + }, + { + "epoch": 0.7206496098711668, + "grad_norm": 0.10218794523401187, + "learning_rate": 0.00019106926923981822, + "loss": 1.5606, + "step": 7943 + }, + { + "epoch": 0.7207403375068046, + "grad_norm": 0.10174206388226849, + "learning_rate": 0.00019095375580945967, + "loss": 1.5128, + "step": 7944 + }, + { + "epoch": 0.7208310651424424, + "grad_norm": 0.10157490500440208, + "learning_rate": 0.00019083826906528972, + "loss": 1.5482, + "step": 7945 + }, + { + "epoch": 0.7209217927780802, + "grad_norm": 0.10082745721454373, + "learning_rate": 0.0001907228090172808, + "loss": 1.5866, + "step": 7946 + }, + { + "epoch": 0.721012520413718, + "grad_norm": 0.09948259469206949, + "learning_rate": 0.00019060737567540303, + "loss": 1.5899, + "step": 7947 + }, + { + "epoch": 0.7211032480493559, + "grad_norm": 0.09919370660865179, + "learning_rate": 0.00019049196904962395, + "loss": 1.5865, + "step": 7948 + }, + { + "epoch": 0.7211939756849937, + "grad_norm": 0.10107114072180515, + "learning_rate": 0.00019037658914990884, + "loss": 1.5369, + "step": 7949 + }, + { + "epoch": 0.7212847033206314, + "grad_norm": 0.10093171531144396, + "learning_rate": 0.00019026123598622107, + "loss": 1.5781, + "step": 7950 + }, + { + "epoch": 0.7213754309562693, + "grad_norm": 0.10170722777004435, + "learning_rate": 0.00019014590956852124, + "loss": 1.5635, + "step": 7951 + }, + { + "epoch": 0.7214661585919071, + "grad_norm": 0.10018301106815994, + "learning_rate": 0.00019003060990676758, + "loss": 1.5773, + "step": 7952 + }, + { + "epoch": 0.7215568862275449, + "grad_norm": 0.1068500043425099, + "learning_rate": 0.00018991533701091657, + "loss": 1.5896, + "step": 7953 + }, + { + "epoch": 0.7216476138631828, + "grad_norm": 0.1011373842367507, + "learning_rate": 0.00018980009089092183, + "loss": 1.5651, + "step": 7954 + }, + { + "epoch": 0.7217383414988205, + "grad_norm": 0.10533542154188204, + "learning_rate": 0.0001896848715567351, + "loss": 1.5707, + "step": 7955 + }, + { + "epoch": 0.7218290691344583, + "grad_norm": 0.10096908969299352, + "learning_rate": 0.00018956967901830523, + "loss": 1.5663, + "step": 7956 + }, + { + "epoch": 0.7219197967700962, + "grad_norm": 0.10086712772316038, + "learning_rate": 0.0001894545132855795, + "loss": 1.5904, + "step": 7957 + }, + { + "epoch": 0.722010524405734, + "grad_norm": 0.09640958237335268, + "learning_rate": 0.00018933937436850236, + "loss": 1.5422, + "step": 7958 + }, + { + "epoch": 0.7221012520413718, + "grad_norm": 0.09689072507387311, + "learning_rate": 0.00018922426227701582, + "loss": 1.5468, + "step": 7959 + }, + { + "epoch": 0.7221919796770097, + "grad_norm": 0.10457467225701148, + "learning_rate": 0.0001891091770210603, + "loss": 1.5551, + "step": 7960 + }, + { + "epoch": 0.7222827073126474, + "grad_norm": 0.10106642790974836, + "learning_rate": 0.00018899411861057308, + "loss": 1.5575, + "step": 7961 + }, + { + "epoch": 0.7223734349482852, + "grad_norm": 0.09699999800931715, + "learning_rate": 0.0001888790870554894, + "loss": 1.5372, + "step": 7962 + }, + { + "epoch": 0.7224641625839231, + "grad_norm": 0.1054561474571875, + "learning_rate": 0.00018876408236574238, + "loss": 1.5801, + "step": 7963 + }, + { + "epoch": 0.7225548902195609, + "grad_norm": 0.10206077098907614, + "learning_rate": 0.00018864910455126293, + "loss": 1.5355, + "step": 7964 + }, + { + "epoch": 0.7226456178551987, + "grad_norm": 0.0993912561621099, + "learning_rate": 0.00018853415362197917, + "loss": 1.5655, + "step": 7965 + }, + { + "epoch": 0.7227363454908365, + "grad_norm": 0.10026911564572633, + "learning_rate": 0.000188419229587817, + "loss": 1.5609, + "step": 7966 + }, + { + "epoch": 0.7228270731264743, + "grad_norm": 0.09983921393470034, + "learning_rate": 0.00018830433245870044, + "loss": 1.5849, + "step": 7967 + }, + { + "epoch": 0.7229178007621121, + "grad_norm": 0.09896009755243593, + "learning_rate": 0.00018818946224455075, + "loss": 1.5336, + "step": 7968 + }, + { + "epoch": 0.72300852839775, + "grad_norm": 0.10427306381907132, + "learning_rate": 0.00018807461895528673, + "loss": 1.5758, + "step": 7969 + }, + { + "epoch": 0.7230992560333878, + "grad_norm": 0.10120920940011516, + "learning_rate": 0.00018795980260082534, + "loss": 1.5786, + "step": 7970 + }, + { + "epoch": 0.7231899836690255, + "grad_norm": 0.10054371055300979, + "learning_rate": 0.00018784501319108116, + "loss": 1.5618, + "step": 7971 + }, + { + "epoch": 0.7232807113046634, + "grad_norm": 0.10034551160450596, + "learning_rate": 0.0001877302507359661, + "loss": 1.5884, + "step": 7972 + }, + { + "epoch": 0.7233714389403012, + "grad_norm": 0.1024266290629453, + "learning_rate": 0.00018761551524538973, + "loss": 1.5039, + "step": 7973 + }, + { + "epoch": 0.723462166575939, + "grad_norm": 0.1035756145034394, + "learning_rate": 0.00018750080672925984, + "loss": 1.5917, + "step": 7974 + }, + { + "epoch": 0.7235528942115769, + "grad_norm": 0.10084532871847689, + "learning_rate": 0.0001873861251974814, + "loss": 1.6006, + "step": 7975 + }, + { + "epoch": 0.7236436218472146, + "grad_norm": 0.1009501631241762, + "learning_rate": 0.00018727147065995687, + "loss": 1.574, + "step": 7976 + }, + { + "epoch": 0.7237343494828524, + "grad_norm": 0.10241594239772667, + "learning_rate": 0.00018715684312658697, + "loss": 1.5992, + "step": 7977 + }, + { + "epoch": 0.7238250771184903, + "grad_norm": 0.10059984570170828, + "learning_rate": 0.00018704224260726994, + "loss": 1.5716, + "step": 7978 + }, + { + "epoch": 0.7239158047541281, + "grad_norm": 0.10054761917198977, + "learning_rate": 0.00018692766911190123, + "loss": 1.5397, + "step": 7979 + }, + { + "epoch": 0.724006532389766, + "grad_norm": 0.10168352998709061, + "learning_rate": 0.00018681312265037459, + "loss": 1.5715, + "step": 7980 + }, + { + "epoch": 0.7240972600254038, + "grad_norm": 0.10163528687514793, + "learning_rate": 0.00018669860323258097, + "loss": 1.5486, + "step": 7981 + }, + { + "epoch": 0.7241879876610415, + "grad_norm": 0.09946582715603011, + "learning_rate": 0.00018658411086840915, + "loss": 1.5434, + "step": 7982 + }, + { + "epoch": 0.7242787152966794, + "grad_norm": 0.1024196506339684, + "learning_rate": 0.00018646964556774537, + "loss": 1.5124, + "step": 7983 + }, + { + "epoch": 0.7243694429323172, + "grad_norm": 0.10231372406279719, + "learning_rate": 0.00018635520734047405, + "loss": 1.5514, + "step": 7984 + }, + { + "epoch": 0.724460170567955, + "grad_norm": 0.09943269076060278, + "learning_rate": 0.0001862407961964767, + "loss": 1.5538, + "step": 7985 + }, + { + "epoch": 0.7245508982035929, + "grad_norm": 0.10165787903967446, + "learning_rate": 0.0001861264121456328, + "loss": 1.5246, + "step": 7986 + }, + { + "epoch": 0.7246416258392306, + "grad_norm": 0.1004662117944376, + "learning_rate": 0.00018601205519781962, + "loss": 1.5754, + "step": 7987 + }, + { + "epoch": 0.7247323534748684, + "grad_norm": 0.10157048516152317, + "learning_rate": 0.00018589772536291177, + "loss": 1.5518, + "step": 7988 + }, + { + "epoch": 0.7248230811105063, + "grad_norm": 0.09890372407468732, + "learning_rate": 0.00018578342265078147, + "loss": 1.6125, + "step": 7989 + }, + { + "epoch": 0.7249138087461441, + "grad_norm": 0.10430601194413824, + "learning_rate": 0.0001856691470712991, + "loss": 1.5773, + "step": 7990 + }, + { + "epoch": 0.7250045363817819, + "grad_norm": 0.10093641291361934, + "learning_rate": 0.00018555489863433222, + "loss": 1.5369, + "step": 7991 + }, + { + "epoch": 0.7250952640174197, + "grad_norm": 0.09676675399307459, + "learning_rate": 0.00018544067734974618, + "loss": 1.5685, + "step": 7992 + }, + { + "epoch": 0.7251859916530575, + "grad_norm": 0.10157783329482085, + "learning_rate": 0.00018532648322740363, + "loss": 1.5653, + "step": 7993 + }, + { + "epoch": 0.7252767192886953, + "grad_norm": 0.10103630261126141, + "learning_rate": 0.000185212316277166, + "loss": 1.5468, + "step": 7994 + }, + { + "epoch": 0.7253674469243332, + "grad_norm": 0.10008393762640704, + "learning_rate": 0.00018509817650889127, + "loss": 1.5023, + "step": 7995 + }, + { + "epoch": 0.725458174559971, + "grad_norm": 0.09680155113849867, + "learning_rate": 0.0001849840639324352, + "loss": 1.5144, + "step": 7996 + }, + { + "epoch": 0.7255489021956087, + "grad_norm": 0.10250518883031345, + "learning_rate": 0.00018486997855765187, + "loss": 1.5934, + "step": 7997 + }, + { + "epoch": 0.7256396298312466, + "grad_norm": 0.10160663580410903, + "learning_rate": 0.0001847559203943923, + "loss": 1.5271, + "step": 7998 + }, + { + "epoch": 0.7257303574668844, + "grad_norm": 0.10149691861182572, + "learning_rate": 0.0001846418894525053, + "loss": 1.5794, + "step": 7999 + }, + { + "epoch": 0.7258210851025222, + "grad_norm": 0.10057195314025645, + "learning_rate": 0.00018452788574183782, + "loss": 1.5442, + "step": 8000 + }, + { + "epoch": 0.7259118127381601, + "grad_norm": 0.10104563238166284, + "learning_rate": 0.00018441390927223373, + "loss": 1.5536, + "step": 8001 + }, + { + "epoch": 0.7260025403737979, + "grad_norm": 0.10560568678036722, + "learning_rate": 0.00018429996005353522, + "loss": 1.5779, + "step": 8002 + }, + { + "epoch": 0.7260932680094356, + "grad_norm": 0.10035425817387263, + "learning_rate": 0.00018418603809558154, + "loss": 1.5442, + "step": 8003 + }, + { + "epoch": 0.7261839956450735, + "grad_norm": 0.10277763852017345, + "learning_rate": 0.00018407214340821016, + "loss": 1.5396, + "step": 8004 + }, + { + "epoch": 0.7262747232807113, + "grad_norm": 0.10297057948764345, + "learning_rate": 0.0001839582760012558, + "loss": 1.5575, + "step": 8005 + }, + { + "epoch": 0.7263654509163491, + "grad_norm": 0.10201564739132171, + "learning_rate": 0.00018384443588455069, + "loss": 1.553, + "step": 8006 + }, + { + "epoch": 0.726456178551987, + "grad_norm": 0.10494613181005777, + "learning_rate": 0.00018373062306792533, + "loss": 1.6036, + "step": 8007 + }, + { + "epoch": 0.7265469061876247, + "grad_norm": 0.10021050771579465, + "learning_rate": 0.00018361683756120724, + "loss": 1.5804, + "step": 8008 + }, + { + "epoch": 0.7266376338232625, + "grad_norm": 0.09831371165710469, + "learning_rate": 0.00018350307937422172, + "loss": 1.5037, + "step": 8009 + }, + { + "epoch": 0.7267283614589004, + "grad_norm": 0.10615684797284562, + "learning_rate": 0.00018338934851679195, + "loss": 1.5656, + "step": 8010 + }, + { + "epoch": 0.7268190890945382, + "grad_norm": 0.09932021347661202, + "learning_rate": 0.00018327564499873871, + "loss": 1.5746, + "step": 8011 + }, + { + "epoch": 0.726909816730176, + "grad_norm": 0.1027845524811969, + "learning_rate": 0.0001831619688298803, + "loss": 1.5245, + "step": 8012 + }, + { + "epoch": 0.7270005443658138, + "grad_norm": 0.100777641403616, + "learning_rate": 0.00018304832002003236, + "loss": 1.5452, + "step": 8013 + }, + { + "epoch": 0.7270912720014516, + "grad_norm": 0.10011831021342334, + "learning_rate": 0.00018293469857900884, + "loss": 1.5759, + "step": 8014 + }, + { + "epoch": 0.7271819996370894, + "grad_norm": 0.10420979076722002, + "learning_rate": 0.00018282110451662087, + "loss": 1.5688, + "step": 8015 + }, + { + "epoch": 0.7272727272727273, + "grad_norm": 0.10027401200864229, + "learning_rate": 0.00018270753784267708, + "loss": 1.5205, + "step": 8016 + }, + { + "epoch": 0.7273634549083651, + "grad_norm": 0.10314304882002896, + "learning_rate": 0.00018259399856698416, + "loss": 1.5187, + "step": 8017 + }, + { + "epoch": 0.727454182544003, + "grad_norm": 0.09925886492993435, + "learning_rate": 0.00018248048669934646, + "loss": 1.5941, + "step": 8018 + }, + { + "epoch": 0.7275449101796407, + "grad_norm": 0.10515858168784549, + "learning_rate": 0.00018236700224956548, + "loss": 1.5632, + "step": 8019 + }, + { + "epoch": 0.7276356378152785, + "grad_norm": 0.09785468278929854, + "learning_rate": 0.00018225354522744054, + "loss": 1.5964, + "step": 8020 + }, + { + "epoch": 0.7277263654509164, + "grad_norm": 0.10294636319580319, + "learning_rate": 0.00018214011564276895, + "loss": 1.5605, + "step": 8021 + }, + { + "epoch": 0.7278170930865542, + "grad_norm": 0.10551091739840458, + "learning_rate": 0.00018202671350534523, + "loss": 1.4802, + "step": 8022 + }, + { + "epoch": 0.727907820722192, + "grad_norm": 0.10075898923853346, + "learning_rate": 0.00018191333882496146, + "loss": 1.5672, + "step": 8023 + }, + { + "epoch": 0.7279985483578298, + "grad_norm": 0.10846796713023137, + "learning_rate": 0.0001817999916114078, + "loss": 1.5742, + "step": 8024 + }, + { + "epoch": 0.7280892759934676, + "grad_norm": 0.10509130616535726, + "learning_rate": 0.0001816866718744719, + "loss": 1.5034, + "step": 8025 + }, + { + "epoch": 0.7281800036291054, + "grad_norm": 0.09922613566115451, + "learning_rate": 0.00018157337962393882, + "loss": 1.5149, + "step": 8026 + }, + { + "epoch": 0.7282707312647433, + "grad_norm": 0.10290480329624703, + "learning_rate": 0.00018146011486959118, + "loss": 1.56, + "step": 8027 + }, + { + "epoch": 0.7283614589003811, + "grad_norm": 0.1025606528677002, + "learning_rate": 0.0001813468776212097, + "loss": 1.5439, + "step": 8028 + }, + { + "epoch": 0.7284521865360188, + "grad_norm": 0.10931202963686758, + "learning_rate": 0.00018123366788857232, + "loss": 1.6048, + "step": 8029 + }, + { + "epoch": 0.7285429141716567, + "grad_norm": 0.09984501554295516, + "learning_rate": 0.00018112048568145455, + "loss": 1.5438, + "step": 8030 + }, + { + "epoch": 0.7286336418072945, + "grad_norm": 0.09905458505958836, + "learning_rate": 0.00018100733100963, + "loss": 1.5908, + "step": 8031 + }, + { + "epoch": 0.7287243694429323, + "grad_norm": 0.09939207717271244, + "learning_rate": 0.00018089420388286927, + "loss": 1.5365, + "step": 8032 + }, + { + "epoch": 0.7288150970785702, + "grad_norm": 0.10145920960512642, + "learning_rate": 0.00018078110431094123, + "loss": 1.5575, + "step": 8033 + }, + { + "epoch": 0.728905824714208, + "grad_norm": 0.10239789389346388, + "learning_rate": 0.00018066803230361173, + "loss": 1.5631, + "step": 8034 + }, + { + "epoch": 0.7289965523498457, + "grad_norm": 0.09930456207981164, + "learning_rate": 0.00018055498787064483, + "loss": 1.569, + "step": 8035 + }, + { + "epoch": 0.7290872799854836, + "grad_norm": 0.0988633426767511, + "learning_rate": 0.00018044197102180183, + "loss": 1.5279, + "step": 8036 + }, + { + "epoch": 0.7291780076211214, + "grad_norm": 0.1040802514508737, + "learning_rate": 0.00018032898176684154, + "loss": 1.5239, + "step": 8037 + }, + { + "epoch": 0.7292687352567592, + "grad_norm": 0.10896715985882967, + "learning_rate": 0.000180216020115521, + "loss": 1.5634, + "step": 8038 + }, + { + "epoch": 0.7293594628923971, + "grad_norm": 0.1034962216289245, + "learning_rate": 0.00018010308607759422, + "loss": 1.5711, + "step": 8039 + }, + { + "epoch": 0.7294501905280348, + "grad_norm": 0.10474816408580422, + "learning_rate": 0.00017999017966281294, + "loss": 1.5631, + "step": 8040 + }, + { + "epoch": 0.7295409181636726, + "grad_norm": 0.10519519891048275, + "learning_rate": 0.00017987730088092686, + "loss": 1.5742, + "step": 8041 + }, + { + "epoch": 0.7296316457993105, + "grad_norm": 0.10249533030561925, + "learning_rate": 0.00017976444974168317, + "loss": 1.5893, + "step": 8042 + }, + { + "epoch": 0.7297223734349483, + "grad_norm": 0.10044377111744456, + "learning_rate": 0.0001796516262548264, + "loss": 1.5298, + "step": 8043 + }, + { + "epoch": 0.7298131010705861, + "grad_norm": 0.10081600497946425, + "learning_rate": 0.00017953883043009876, + "loss": 1.5585, + "step": 8044 + }, + { + "epoch": 0.729903828706224, + "grad_norm": 0.1010135727295831, + "learning_rate": 0.00017942606227724057, + "loss": 1.5757, + "step": 8045 + }, + { + "epoch": 0.7299945563418617, + "grad_norm": 0.09927939810876045, + "learning_rate": 0.0001793133218059891, + "loss": 1.5668, + "step": 8046 + }, + { + "epoch": 0.7300852839774995, + "grad_norm": 0.10709265030847004, + "learning_rate": 0.00017920060902607937, + "loss": 1.5475, + "step": 8047 + }, + { + "epoch": 0.7301760116131374, + "grad_norm": 0.10383795074910461, + "learning_rate": 0.00017908792394724437, + "loss": 1.5395, + "step": 8048 + }, + { + "epoch": 0.7302667392487752, + "grad_norm": 0.10132671409707247, + "learning_rate": 0.0001789752665792146, + "loss": 1.5263, + "step": 8049 + }, + { + "epoch": 0.7303574668844129, + "grad_norm": 0.10114123706871898, + "learning_rate": 0.00017886263693171788, + "loss": 1.5494, + "step": 8050 + }, + { + "epoch": 0.7304481945200508, + "grad_norm": 0.09778337260700735, + "learning_rate": 0.0001787500350144796, + "loss": 1.5475, + "step": 8051 + }, + { + "epoch": 0.7305389221556886, + "grad_norm": 0.10499480678146354, + "learning_rate": 0.00017863746083722338, + "loss": 1.6073, + "step": 8052 + }, + { + "epoch": 0.7306296497913264, + "grad_norm": 0.10157294400782932, + "learning_rate": 0.00017852491440966973, + "loss": 1.5822, + "step": 8053 + }, + { + "epoch": 0.7307203774269643, + "grad_norm": 0.09971419853877397, + "learning_rate": 0.0001784123957415369, + "loss": 1.6075, + "step": 8054 + }, + { + "epoch": 0.730811105062602, + "grad_norm": 0.10016615221930146, + "learning_rate": 0.0001782999048425411, + "loss": 1.5747, + "step": 8055 + }, + { + "epoch": 0.7309018326982398, + "grad_norm": 0.10056465695179075, + "learning_rate": 0.00017818744172239616, + "loss": 1.5605, + "step": 8056 + }, + { + "epoch": 0.7309925603338777, + "grad_norm": 0.10277957601689978, + "learning_rate": 0.00017807500639081288, + "loss": 1.4825, + "step": 8057 + }, + { + "epoch": 0.7310832879695155, + "grad_norm": 0.1015218524000263, + "learning_rate": 0.00017796259885750033, + "loss": 1.5643, + "step": 8058 + }, + { + "epoch": 0.7311740156051534, + "grad_norm": 0.10029424351847648, + "learning_rate": 0.00017785021913216493, + "loss": 1.5991, + "step": 8059 + }, + { + "epoch": 0.7312647432407912, + "grad_norm": 0.10481809497532212, + "learning_rate": 0.00017773786722451052, + "loss": 1.5936, + "step": 8060 + }, + { + "epoch": 0.7313554708764289, + "grad_norm": 0.10127512446469528, + "learning_rate": 0.00017762554314423862, + "loss": 1.5553, + "step": 8061 + }, + { + "epoch": 0.7314461985120668, + "grad_norm": 0.10159771040533991, + "learning_rate": 0.00017751324690104875, + "loss": 1.5603, + "step": 8062 + }, + { + "epoch": 0.7315369261477046, + "grad_norm": 0.09979104948394407, + "learning_rate": 0.00017740097850463737, + "loss": 1.5516, + "step": 8063 + }, + { + "epoch": 0.7316276537833424, + "grad_norm": 0.09861842296953902, + "learning_rate": 0.0001772887379646991, + "loss": 1.5551, + "step": 8064 + }, + { + "epoch": 0.7317183814189803, + "grad_norm": 0.09808073250069951, + "learning_rate": 0.000177176525290926, + "loss": 1.5302, + "step": 8065 + }, + { + "epoch": 0.731809109054618, + "grad_norm": 0.09807650556947883, + "learning_rate": 0.0001770643404930075, + "loss": 1.5478, + "step": 8066 + }, + { + "epoch": 0.7318998366902558, + "grad_norm": 0.10176496898881682, + "learning_rate": 0.00017695218358063065, + "loss": 1.5374, + "step": 8067 + }, + { + "epoch": 0.7319905643258937, + "grad_norm": 0.1028719708751399, + "learning_rate": 0.00017684005456348052, + "loss": 1.5794, + "step": 8068 + }, + { + "epoch": 0.7320812919615315, + "grad_norm": 0.10517355738046104, + "learning_rate": 0.00017672795345123927, + "loss": 1.5478, + "step": 8069 + }, + { + "epoch": 0.7321720195971693, + "grad_norm": 0.0996500239333792, + "learning_rate": 0.00017661588025358693, + "loss": 1.5715, + "step": 8070 + }, + { + "epoch": 0.7322627472328072, + "grad_norm": 0.10143184930246364, + "learning_rate": 0.00017650383498020068, + "loss": 1.5539, + "step": 8071 + }, + { + "epoch": 0.7323534748684449, + "grad_norm": 0.10204229255321016, + "learning_rate": 0.00017639181764075623, + "loss": 1.5482, + "step": 8072 + }, + { + "epoch": 0.7324442025040827, + "grad_norm": 0.10151668378432002, + "learning_rate": 0.00017627982824492606, + "loss": 1.5429, + "step": 8073 + }, + { + "epoch": 0.7325349301397206, + "grad_norm": 0.09969004702626942, + "learning_rate": 0.00017616786680238022, + "loss": 1.5367, + "step": 8074 + }, + { + "epoch": 0.7326256577753584, + "grad_norm": 0.10256952145910325, + "learning_rate": 0.00017605593332278702, + "loss": 1.6051, + "step": 8075 + }, + { + "epoch": 0.7327163854109962, + "grad_norm": 0.10317422903801443, + "learning_rate": 0.00017594402781581176, + "loss": 1.5556, + "step": 8076 + }, + { + "epoch": 0.732807113046634, + "grad_norm": 0.09862099449517243, + "learning_rate": 0.0001758321502911172, + "loss": 1.4853, + "step": 8077 + }, + { + "epoch": 0.7328978406822718, + "grad_norm": 0.09778063127010957, + "learning_rate": 0.00017572030075836448, + "loss": 1.5466, + "step": 8078 + }, + { + "epoch": 0.7329885683179096, + "grad_norm": 0.10242112493979075, + "learning_rate": 0.0001756084792272114, + "loss": 1.5456, + "step": 8079 + }, + { + "epoch": 0.7330792959535475, + "grad_norm": 0.1022113510008635, + "learning_rate": 0.0001754966857073141, + "loss": 1.5637, + "step": 8080 + }, + { + "epoch": 0.7331700235891853, + "grad_norm": 0.10471252282844046, + "learning_rate": 0.00017538492020832564, + "loss": 1.5672, + "step": 8081 + }, + { + "epoch": 0.733260751224823, + "grad_norm": 0.09972888223935392, + "learning_rate": 0.00017527318273989734, + "loss": 1.5193, + "step": 8082 + }, + { + "epoch": 0.7333514788604609, + "grad_norm": 0.09857617765630448, + "learning_rate": 0.0001751614733116776, + "loss": 1.543, + "step": 8083 + }, + { + "epoch": 0.7334422064960987, + "grad_norm": 0.09709941373042738, + "learning_rate": 0.00017504979193331234, + "loss": 1.5548, + "step": 8084 + }, + { + "epoch": 0.7335329341317365, + "grad_norm": 0.09776487732437648, + "learning_rate": 0.00017493813861444558, + "loss": 1.5288, + "step": 8085 + }, + { + "epoch": 0.7336236617673744, + "grad_norm": 0.10484468978742452, + "learning_rate": 0.00017482651336471843, + "loss": 1.5679, + "step": 8086 + }, + { + "epoch": 0.7337143894030121, + "grad_norm": 0.0994668898819283, + "learning_rate": 0.00017471491619376966, + "loss": 1.57, + "step": 8087 + }, + { + "epoch": 0.7338051170386499, + "grad_norm": 0.09999459147292292, + "learning_rate": 0.0001746033471112358, + "loss": 1.5196, + "step": 8088 + }, + { + "epoch": 0.7338958446742878, + "grad_norm": 0.10186051153152857, + "learning_rate": 0.00017449180612675108, + "loss": 1.4964, + "step": 8089 + }, + { + "epoch": 0.7339865723099256, + "grad_norm": 0.1052518862140555, + "learning_rate": 0.00017438029324994686, + "loss": 1.5316, + "step": 8090 + }, + { + "epoch": 0.7340772999455634, + "grad_norm": 0.0996128850811878, + "learning_rate": 0.00017426880849045212, + "loss": 1.5577, + "step": 8091 + }, + { + "epoch": 0.7341680275812013, + "grad_norm": 0.10358764427877729, + "learning_rate": 0.00017415735185789393, + "loss": 1.5864, + "step": 8092 + }, + { + "epoch": 0.734258755216839, + "grad_norm": 0.10246435464620626, + "learning_rate": 0.00017404592336189644, + "loss": 1.5888, + "step": 8093 + }, + { + "epoch": 0.7343494828524768, + "grad_norm": 0.09998962219384448, + "learning_rate": 0.0001739345230120814, + "loss": 1.5455, + "step": 8094 + }, + { + "epoch": 0.7344402104881147, + "grad_norm": 0.10185431263966303, + "learning_rate": 0.00017382315081806833, + "loss": 1.5646, + "step": 8095 + }, + { + "epoch": 0.7345309381237525, + "grad_norm": 0.10062355149989997, + "learning_rate": 0.00017371180678947434, + "loss": 1.5487, + "step": 8096 + }, + { + "epoch": 0.7346216657593904, + "grad_norm": 0.10109432872578086, + "learning_rate": 0.000173600490935914, + "loss": 1.5409, + "step": 8097 + }, + { + "epoch": 0.7347123933950281, + "grad_norm": 0.10036148440526044, + "learning_rate": 0.00017348920326699924, + "loss": 1.5425, + "step": 8098 + }, + { + "epoch": 0.7348031210306659, + "grad_norm": 0.09957208075117752, + "learning_rate": 0.00017337794379234002, + "loss": 1.601, + "step": 8099 + }, + { + "epoch": 0.7348938486663038, + "grad_norm": 0.09967917549040599, + "learning_rate": 0.00017326671252154347, + "loss": 1.526, + "step": 8100 + }, + { + "epoch": 0.7349845763019416, + "grad_norm": 0.0996577245555461, + "learning_rate": 0.0001731555094642143, + "loss": 1.5701, + "step": 8101 + }, + { + "epoch": 0.7350753039375794, + "grad_norm": 0.09963418714926262, + "learning_rate": 0.0001730443346299551, + "loss": 1.5468, + "step": 8102 + }, + { + "epoch": 0.7351660315732172, + "grad_norm": 0.09849719535816852, + "learning_rate": 0.00017293318802836594, + "loss": 1.5369, + "step": 8103 + }, + { + "epoch": 0.735256759208855, + "grad_norm": 0.10526507382412033, + "learning_rate": 0.00017282206966904418, + "loss": 1.5458, + "step": 8104 + }, + { + "epoch": 0.7353474868444928, + "grad_norm": 0.10241584613732702, + "learning_rate": 0.00017271097956158478, + "loss": 1.5951, + "step": 8105 + }, + { + "epoch": 0.7354382144801307, + "grad_norm": 0.10282340722594475, + "learning_rate": 0.00017259991771558064, + "loss": 1.5464, + "step": 8106 + }, + { + "epoch": 0.7355289421157685, + "grad_norm": 0.09710781564242364, + "learning_rate": 0.00017248888414062193, + "loss": 1.575, + "step": 8107 + }, + { + "epoch": 0.7356196697514062, + "grad_norm": 0.0982436499416924, + "learning_rate": 0.00017237787884629615, + "loss": 1.5743, + "step": 8108 + }, + { + "epoch": 0.7357103973870441, + "grad_norm": 0.09944596063806846, + "learning_rate": 0.00017226690184218897, + "loss": 1.5562, + "step": 8109 + }, + { + "epoch": 0.7358011250226819, + "grad_norm": 0.10286798852924843, + "learning_rate": 0.00017215595313788303, + "loss": 1.5239, + "step": 8110 + }, + { + "epoch": 0.7358918526583197, + "grad_norm": 0.10232217490393564, + "learning_rate": 0.0001720450327429589, + "loss": 1.5611, + "step": 8111 + }, + { + "epoch": 0.7359825802939576, + "grad_norm": 0.10508577691428495, + "learning_rate": 0.0001719341406669944, + "loss": 1.5267, + "step": 8112 + }, + { + "epoch": 0.7360733079295954, + "grad_norm": 0.1001238318883231, + "learning_rate": 0.00017182327691956544, + "loss": 1.5172, + "step": 8113 + }, + { + "epoch": 0.7361640355652331, + "grad_norm": 0.10421724833503862, + "learning_rate": 0.00017171244151024484, + "loss": 1.5536, + "step": 8114 + }, + { + "epoch": 0.736254763200871, + "grad_norm": 0.10206700003850627, + "learning_rate": 0.00017160163444860315, + "loss": 1.5271, + "step": 8115 + }, + { + "epoch": 0.7363454908365088, + "grad_norm": 0.10146446384133088, + "learning_rate": 0.00017149085574420887, + "loss": 1.548, + "step": 8116 + }, + { + "epoch": 0.7364362184721466, + "grad_norm": 0.10002349416709366, + "learning_rate": 0.00017138010540662763, + "loss": 1.5319, + "step": 8117 + }, + { + "epoch": 0.7365269461077845, + "grad_norm": 0.10609104372748056, + "learning_rate": 0.00017126938344542254, + "loss": 1.5474, + "step": 8118 + }, + { + "epoch": 0.7366176737434222, + "grad_norm": 0.10120831422764452, + "learning_rate": 0.00017115868987015466, + "loss": 1.5447, + "step": 8119 + }, + { + "epoch": 0.73670840137906, + "grad_norm": 0.1058494486326085, + "learning_rate": 0.0001710480246903825, + "loss": 1.5778, + "step": 8120 + }, + { + "epoch": 0.7367991290146979, + "grad_norm": 0.10765241154953036, + "learning_rate": 0.00017093738791566193, + "loss": 1.5319, + "step": 8121 + }, + { + "epoch": 0.7368898566503357, + "grad_norm": 0.10354925914937664, + "learning_rate": 0.00017082677955554626, + "loss": 1.584, + "step": 8122 + }, + { + "epoch": 0.7369805842859735, + "grad_norm": 0.10452625326574412, + "learning_rate": 0.0001707161996195868, + "loss": 1.571, + "step": 8123 + }, + { + "epoch": 0.7370713119216113, + "grad_norm": 0.10348308068445145, + "learning_rate": 0.0001706056481173321, + "loss": 1.5811, + "step": 8124 + }, + { + "epoch": 0.7371620395572491, + "grad_norm": 0.10999281570183014, + "learning_rate": 0.00017049512505832798, + "loss": 1.6226, + "step": 8125 + }, + { + "epoch": 0.7372527671928869, + "grad_norm": 0.10372670048293837, + "learning_rate": 0.00017038463045211845, + "loss": 1.5432, + "step": 8126 + }, + { + "epoch": 0.7373434948285248, + "grad_norm": 0.10170408271565425, + "learning_rate": 0.00017027416430824482, + "loss": 1.5532, + "step": 8127 + }, + { + "epoch": 0.7374342224641626, + "grad_norm": 0.09985783759209867, + "learning_rate": 0.0001701637266362457, + "loss": 1.5534, + "step": 8128 + }, + { + "epoch": 0.7375249500998003, + "grad_norm": 0.10254749211889437, + "learning_rate": 0.00017005331744565722, + "loss": 1.5172, + "step": 8129 + }, + { + "epoch": 0.7376156777354382, + "grad_norm": 0.10207666385482078, + "learning_rate": 0.00016994293674601352, + "loss": 1.5435, + "step": 8130 + }, + { + "epoch": 0.737706405371076, + "grad_norm": 0.0980188210791035, + "learning_rate": 0.00016983258454684598, + "loss": 1.5402, + "step": 8131 + }, + { + "epoch": 0.7377971330067138, + "grad_norm": 0.10109492298617889, + "learning_rate": 0.00016972226085768316, + "loss": 1.5424, + "step": 8132 + }, + { + "epoch": 0.7378878606423517, + "grad_norm": 0.10005482779916965, + "learning_rate": 0.00016961196568805186, + "loss": 1.5444, + "step": 8133 + }, + { + "epoch": 0.7379785882779895, + "grad_norm": 0.0975454802004766, + "learning_rate": 0.00016950169904747614, + "loss": 1.5416, + "step": 8134 + }, + { + "epoch": 0.7380693159136273, + "grad_norm": 0.09748586421797228, + "learning_rate": 0.0001693914609454773, + "loss": 1.5034, + "step": 8135 + }, + { + "epoch": 0.7381600435492651, + "grad_norm": 0.10316774585471972, + "learning_rate": 0.00016928125139157468, + "loss": 1.5393, + "step": 8136 + }, + { + "epoch": 0.7382507711849029, + "grad_norm": 0.09778530019796318, + "learning_rate": 0.00016917107039528473, + "loss": 1.587, + "step": 8137 + }, + { + "epoch": 0.7383414988205408, + "grad_norm": 0.10066231545763815, + "learning_rate": 0.0001690609179661216, + "loss": 1.5244, + "step": 8138 + }, + { + "epoch": 0.7384322264561786, + "grad_norm": 0.1053720007771457, + "learning_rate": 0.00016895079411359686, + "loss": 1.5902, + "step": 8139 + }, + { + "epoch": 0.7385229540918163, + "grad_norm": 0.10146705056736632, + "learning_rate": 0.00016884069884721996, + "loss": 1.5546, + "step": 8140 + }, + { + "epoch": 0.7386136817274542, + "grad_norm": 0.10458572151531052, + "learning_rate": 0.00016873063217649737, + "loss": 1.5361, + "step": 8141 + }, + { + "epoch": 0.738704409363092, + "grad_norm": 0.10449900118790884, + "learning_rate": 0.00016862059411093357, + "loss": 1.5566, + "step": 8142 + }, + { + "epoch": 0.7387951369987298, + "grad_norm": 0.10302503059503026, + "learning_rate": 0.00016851058466003038, + "loss": 1.5445, + "step": 8143 + }, + { + "epoch": 0.7388858646343677, + "grad_norm": 0.10290830860099705, + "learning_rate": 0.00016840060383328714, + "loss": 1.5927, + "step": 8144 + }, + { + "epoch": 0.7389765922700055, + "grad_norm": 0.10532481166264025, + "learning_rate": 0.0001682906516402004, + "loss": 1.5475, + "step": 8145 + }, + { + "epoch": 0.7390673199056432, + "grad_norm": 0.10382342738765518, + "learning_rate": 0.00016818072809026503, + "loss": 1.5517, + "step": 8146 + }, + { + "epoch": 0.7391580475412811, + "grad_norm": 0.09877783830748349, + "learning_rate": 0.0001680708331929726, + "loss": 1.5138, + "step": 8147 + }, + { + "epoch": 0.7392487751769189, + "grad_norm": 0.10332235571374263, + "learning_rate": 0.00016796096695781271, + "loss": 1.5422, + "step": 8148 + }, + { + "epoch": 0.7393395028125567, + "grad_norm": 0.10185229349329507, + "learning_rate": 0.00016785112939427188, + "loss": 1.5696, + "step": 8149 + }, + { + "epoch": 0.7394302304481946, + "grad_norm": 0.10269914056194963, + "learning_rate": 0.0001677413205118354, + "loss": 1.5418, + "step": 8150 + }, + { + "epoch": 0.7395209580838323, + "grad_norm": 0.10333539318820534, + "learning_rate": 0.00016763154031998478, + "loss": 1.5802, + "step": 8151 + }, + { + "epoch": 0.7396116857194701, + "grad_norm": 0.09958169341387736, + "learning_rate": 0.00016752178882819947, + "loss": 1.55, + "step": 8152 + }, + { + "epoch": 0.739702413355108, + "grad_norm": 0.10516255019040137, + "learning_rate": 0.00016741206604595687, + "loss": 1.5718, + "step": 8153 + }, + { + "epoch": 0.7397931409907458, + "grad_norm": 0.10248756647526648, + "learning_rate": 0.00016730237198273146, + "loss": 1.5171, + "step": 8154 + }, + { + "epoch": 0.7398838686263836, + "grad_norm": 0.10381261987617235, + "learning_rate": 0.000167192706647995, + "loss": 1.5117, + "step": 8155 + }, + { + "epoch": 0.7399745962620214, + "grad_norm": 0.10679661332987389, + "learning_rate": 0.0001670830700512176, + "loss": 1.5321, + "step": 8156 + }, + { + "epoch": 0.7400653238976592, + "grad_norm": 0.10268626679909508, + "learning_rate": 0.00016697346220186598, + "loss": 1.553, + "step": 8157 + }, + { + "epoch": 0.740156051533297, + "grad_norm": 0.10136314351379351, + "learning_rate": 0.00016686388310940513, + "loss": 1.5768, + "step": 8158 + }, + { + "epoch": 0.7402467791689349, + "grad_norm": 0.10150785586855109, + "learning_rate": 0.00016675433278329693, + "loss": 1.562, + "step": 8159 + }, + { + "epoch": 0.7403375068045727, + "grad_norm": 0.1029242386962871, + "learning_rate": 0.0001666448112330014, + "loss": 1.5621, + "step": 8160 + }, + { + "epoch": 0.7404282344402104, + "grad_norm": 0.10429104468977175, + "learning_rate": 0.00016653531846797553, + "loss": 1.5741, + "step": 8161 + }, + { + "epoch": 0.7405189620758483, + "grad_norm": 0.1019008918708323, + "learning_rate": 0.0001664258544976739, + "loss": 1.5802, + "step": 8162 + }, + { + "epoch": 0.7406096897114861, + "grad_norm": 0.10435453140131956, + "learning_rate": 0.00016631641933154902, + "loss": 1.5246, + "step": 8163 + }, + { + "epoch": 0.7407004173471239, + "grad_norm": 0.10102030553368518, + "learning_rate": 0.00016620701297905056, + "loss": 1.5577, + "step": 8164 + }, + { + "epoch": 0.7407911449827618, + "grad_norm": 0.10023831529295185, + "learning_rate": 0.00016609763544962551, + "loss": 1.5672, + "step": 8165 + }, + { + "epoch": 0.7408818726183996, + "grad_norm": 0.10186879175962894, + "learning_rate": 0.00016598828675271892, + "loss": 1.5393, + "step": 8166 + }, + { + "epoch": 0.7409726002540373, + "grad_norm": 0.10123825934267575, + "learning_rate": 0.0001658789668977731, + "loss": 1.5849, + "step": 8167 + }, + { + "epoch": 0.7410633278896752, + "grad_norm": 0.10215688390653817, + "learning_rate": 0.00016576967589422776, + "loss": 1.5438, + "step": 8168 + }, + { + "epoch": 0.741154055525313, + "grad_norm": 0.10032532754872224, + "learning_rate": 0.00016566041375152003, + "loss": 1.5073, + "step": 8169 + }, + { + "epoch": 0.7412447831609508, + "grad_norm": 0.10449979116411243, + "learning_rate": 0.00016555118047908497, + "loss": 1.5329, + "step": 8170 + }, + { + "epoch": 0.7413355107965887, + "grad_norm": 0.10393238655953599, + "learning_rate": 0.00016544197608635476, + "loss": 1.5662, + "step": 8171 + }, + { + "epoch": 0.7414262384322264, + "grad_norm": 0.09948553196424124, + "learning_rate": 0.00016533280058275907, + "loss": 1.5586, + "step": 8172 + }, + { + "epoch": 0.7415169660678643, + "grad_norm": 0.10332739009200069, + "learning_rate": 0.00016522365397772542, + "loss": 1.5631, + "step": 8173 + }, + { + "epoch": 0.7416076937035021, + "grad_norm": 0.1022845708726406, + "learning_rate": 0.0001651145362806787, + "loss": 1.5278, + "step": 8174 + }, + { + "epoch": 0.7416984213391399, + "grad_norm": 0.09936513056825665, + "learning_rate": 0.00016500544750104118, + "loss": 1.5574, + "step": 8175 + }, + { + "epoch": 0.7417891489747778, + "grad_norm": 0.1037215675460877, + "learning_rate": 0.00016489638764823246, + "loss": 1.5672, + "step": 8176 + }, + { + "epoch": 0.7418798766104155, + "grad_norm": 0.10364332746643612, + "learning_rate": 0.00016478735673167017, + "loss": 1.5461, + "step": 8177 + }, + { + "epoch": 0.7419706042460533, + "grad_norm": 0.10459574697612511, + "learning_rate": 0.0001646783547607691, + "loss": 1.5328, + "step": 8178 + }, + { + "epoch": 0.7420613318816912, + "grad_norm": 0.1017539888443917, + "learning_rate": 0.00016456938174494128, + "loss": 1.5737, + "step": 8179 + }, + { + "epoch": 0.742152059517329, + "grad_norm": 0.10065310912847529, + "learning_rate": 0.00016446043769359681, + "loss": 1.5803, + "step": 8180 + }, + { + "epoch": 0.7422427871529668, + "grad_norm": 0.1031458419085925, + "learning_rate": 0.00016435152261614323, + "loss": 1.545, + "step": 8181 + }, + { + "epoch": 0.7423335147886047, + "grad_norm": 0.10395443407434368, + "learning_rate": 0.00016424263652198507, + "loss": 1.5837, + "step": 8182 + }, + { + "epoch": 0.7424242424242424, + "grad_norm": 0.1046826046634893, + "learning_rate": 0.0001641337794205246, + "loss": 1.6163, + "step": 8183 + }, + { + "epoch": 0.7425149700598802, + "grad_norm": 0.10710106616910843, + "learning_rate": 0.0001640249513211619, + "loss": 1.561, + "step": 8184 + }, + { + "epoch": 0.7426056976955181, + "grad_norm": 0.09787143050564302, + "learning_rate": 0.0001639161522332942, + "loss": 1.528, + "step": 8185 + }, + { + "epoch": 0.7426964253311559, + "grad_norm": 0.10726839913300092, + "learning_rate": 0.00016380738216631614, + "loss": 1.5493, + "step": 8186 + }, + { + "epoch": 0.7427871529667937, + "grad_norm": 0.10167169794288691, + "learning_rate": 0.0001636986411296203, + "loss": 1.5312, + "step": 8187 + }, + { + "epoch": 0.7428778806024315, + "grad_norm": 0.10149721262785852, + "learning_rate": 0.00016358992913259623, + "loss": 1.561, + "step": 8188 + }, + { + "epoch": 0.7429686082380693, + "grad_norm": 0.10252096656428304, + "learning_rate": 0.00016348124618463156, + "loss": 1.5372, + "step": 8189 + }, + { + "epoch": 0.7430593358737071, + "grad_norm": 0.10059987861071219, + "learning_rate": 0.00016337259229511064, + "loss": 1.5515, + "step": 8190 + }, + { + "epoch": 0.743150063509345, + "grad_norm": 0.10313896765063885, + "learning_rate": 0.0001632639674734162, + "loss": 1.5476, + "step": 8191 + }, + { + "epoch": 0.7432407911449828, + "grad_norm": 0.10481809049436192, + "learning_rate": 0.0001631553717289278, + "loss": 1.5656, + "step": 8192 + }, + { + "epoch": 0.7433315187806205, + "grad_norm": 0.10309670122665116, + "learning_rate": 0.00016304680507102254, + "loss": 1.5515, + "step": 8193 + }, + { + "epoch": 0.7434222464162584, + "grad_norm": 0.09959957449400075, + "learning_rate": 0.00016293826750907547, + "loss": 1.5513, + "step": 8194 + }, + { + "epoch": 0.7435129740518962, + "grad_norm": 0.09857708420388665, + "learning_rate": 0.00016282975905245866, + "loss": 1.5262, + "step": 8195 + }, + { + "epoch": 0.743603701687534, + "grad_norm": 0.102010268169718, + "learning_rate": 0.00016272127971054173, + "loss": 1.5574, + "step": 8196 + }, + { + "epoch": 0.7436944293231719, + "grad_norm": 0.09896740820147938, + "learning_rate": 0.00016261282949269195, + "loss": 1.5296, + "step": 8197 + }, + { + "epoch": 0.7437851569588096, + "grad_norm": 0.10048877759038057, + "learning_rate": 0.0001625044084082743, + "loss": 1.5358, + "step": 8198 + }, + { + "epoch": 0.7438758845944474, + "grad_norm": 0.10231616817575338, + "learning_rate": 0.0001623960164666507, + "loss": 1.5351, + "step": 8199 + }, + { + "epoch": 0.7439666122300853, + "grad_norm": 0.10438772336391708, + "learning_rate": 0.00016228765367718063, + "loss": 1.5641, + "step": 8200 + }, + { + "epoch": 0.7440573398657231, + "grad_norm": 0.10059687416204761, + "learning_rate": 0.00016217932004922164, + "loss": 1.5804, + "step": 8201 + }, + { + "epoch": 0.7441480675013609, + "grad_norm": 0.10152791172023826, + "learning_rate": 0.00016207101559212816, + "loss": 1.5278, + "step": 8202 + }, + { + "epoch": 0.7442387951369988, + "grad_norm": 0.10220553622702515, + "learning_rate": 0.00016196274031525209, + "loss": 1.5735, + "step": 8203 + }, + { + "epoch": 0.7443295227726365, + "grad_norm": 0.10078749428018281, + "learning_rate": 0.0001618544942279433, + "loss": 1.5548, + "step": 8204 + }, + { + "epoch": 0.7444202504082743, + "grad_norm": 0.09779873225741993, + "learning_rate": 0.00016174627733954883, + "loss": 1.5704, + "step": 8205 + }, + { + "epoch": 0.7445109780439122, + "grad_norm": 0.10477411438552196, + "learning_rate": 0.00016163808965941322, + "loss": 1.5596, + "step": 8206 + }, + { + "epoch": 0.74460170567955, + "grad_norm": 0.09839058052176818, + "learning_rate": 0.00016152993119687825, + "loss": 1.5261, + "step": 8207 + }, + { + "epoch": 0.7446924333151878, + "grad_norm": 0.10111372747250102, + "learning_rate": 0.0001614218019612838, + "loss": 1.5799, + "step": 8208 + }, + { + "epoch": 0.7447831609508256, + "grad_norm": 0.102204656685255, + "learning_rate": 0.00016131370196196664, + "loss": 1.5659, + "step": 8209 + }, + { + "epoch": 0.7448738885864634, + "grad_norm": 0.1012575224058899, + "learning_rate": 0.00016120563120826105, + "loss": 1.5725, + "step": 8210 + }, + { + "epoch": 0.7449646162221013, + "grad_norm": 0.10296515617827845, + "learning_rate": 0.00016109758970949916, + "loss": 1.5756, + "step": 8211 + }, + { + "epoch": 0.7450553438577391, + "grad_norm": 0.10593082646685076, + "learning_rate": 0.00016098957747501052, + "loss": 1.6162, + "step": 8212 + }, + { + "epoch": 0.7451460714933769, + "grad_norm": 0.1029970811898401, + "learning_rate": 0.00016088159451412164, + "loss": 1.5446, + "step": 8213 + }, + { + "epoch": 0.7452367991290147, + "grad_norm": 0.10114626806344615, + "learning_rate": 0.00016077364083615727, + "loss": 1.5283, + "step": 8214 + }, + { + "epoch": 0.7453275267646525, + "grad_norm": 0.10120977373163942, + "learning_rate": 0.00016066571645043893, + "loss": 1.5371, + "step": 8215 + }, + { + "epoch": 0.7454182544002903, + "grad_norm": 0.1026290021420474, + "learning_rate": 0.00016055782136628605, + "loss": 1.5389, + "step": 8216 + }, + { + "epoch": 0.7455089820359282, + "grad_norm": 0.10293503387000275, + "learning_rate": 0.00016044995559301513, + "loss": 1.5989, + "step": 8217 + }, + { + "epoch": 0.745599709671566, + "grad_norm": 0.10396423405650261, + "learning_rate": 0.00016034211913994073, + "loss": 1.605, + "step": 8218 + }, + { + "epoch": 0.7456904373072037, + "grad_norm": 0.10113867248922125, + "learning_rate": 0.00016023431201637428, + "loss": 1.5431, + "step": 8219 + }, + { + "epoch": 0.7457811649428416, + "grad_norm": 0.10854518268293219, + "learning_rate": 0.00016012653423162505, + "loss": 1.5542, + "step": 8220 + }, + { + "epoch": 0.7458718925784794, + "grad_norm": 0.1015739513018599, + "learning_rate": 0.00016001878579499984, + "loss": 1.5521, + "step": 8221 + }, + { + "epoch": 0.7459626202141172, + "grad_norm": 0.10178429312754979, + "learning_rate": 0.00015991106671580264, + "loss": 1.5626, + "step": 8222 + }, + { + "epoch": 0.7460533478497551, + "grad_norm": 0.1014407623694052, + "learning_rate": 0.0001598033770033347, + "loss": 1.569, + "step": 8223 + }, + { + "epoch": 0.7461440754853929, + "grad_norm": 0.10241504785414551, + "learning_rate": 0.00015969571666689554, + "loss": 1.5519, + "step": 8224 + }, + { + "epoch": 0.7462348031210306, + "grad_norm": 0.09850808365698646, + "learning_rate": 0.00015958808571578132, + "loss": 1.5672, + "step": 8225 + }, + { + "epoch": 0.7463255307566685, + "grad_norm": 0.10265866891675604, + "learning_rate": 0.00015948048415928613, + "loss": 1.5217, + "step": 8226 + }, + { + "epoch": 0.7464162583923063, + "grad_norm": 0.09867480458594365, + "learning_rate": 0.00015937291200670094, + "loss": 1.5856, + "step": 8227 + }, + { + "epoch": 0.7465069860279441, + "grad_norm": 0.10314652429905886, + "learning_rate": 0.0001592653692673154, + "loss": 1.5463, + "step": 8228 + }, + { + "epoch": 0.746597713663582, + "grad_norm": 0.09649211294113508, + "learning_rate": 0.00015915785595041544, + "loss": 1.5036, + "step": 8229 + }, + { + "epoch": 0.7466884412992197, + "grad_norm": 0.1021876825472895, + "learning_rate": 0.0001590503720652846, + "loss": 1.5791, + "step": 8230 + }, + { + "epoch": 0.7467791689348575, + "grad_norm": 0.10677844019782937, + "learning_rate": 0.0001589429176212046, + "loss": 1.5869, + "step": 8231 + }, + { + "epoch": 0.7468698965704954, + "grad_norm": 0.09843594333146889, + "learning_rate": 0.00015883549262745396, + "loss": 1.54, + "step": 8232 + }, + { + "epoch": 0.7469606242061332, + "grad_norm": 0.10268151170526128, + "learning_rate": 0.00015872809709330864, + "loss": 1.5744, + "step": 8233 + }, + { + "epoch": 0.747051351841771, + "grad_norm": 0.10120440816374128, + "learning_rate": 0.0001586207310280426, + "loss": 1.5294, + "step": 8234 + }, + { + "epoch": 0.7471420794774088, + "grad_norm": 0.10581195532743032, + "learning_rate": 0.00015851339444092655, + "loss": 1.5323, + "step": 8235 + }, + { + "epoch": 0.7472328071130466, + "grad_norm": 0.09933704374585552, + "learning_rate": 0.0001584060873412294, + "loss": 1.5701, + "step": 8236 + }, + { + "epoch": 0.7473235347486844, + "grad_norm": 0.1008930474637728, + "learning_rate": 0.00015829880973821682, + "loss": 1.5241, + "step": 8237 + }, + { + "epoch": 0.7474142623843223, + "grad_norm": 0.10395509913645674, + "learning_rate": 0.0001581915616411525, + "loss": 1.5608, + "step": 8238 + }, + { + "epoch": 0.7475049900199601, + "grad_norm": 0.09998224243677346, + "learning_rate": 0.0001580843430592972, + "loss": 1.5608, + "step": 8239 + }, + { + "epoch": 0.7475957176555978, + "grad_norm": 0.10412749120608572, + "learning_rate": 0.00015797715400190916, + "loss": 1.5595, + "step": 8240 + }, + { + "epoch": 0.7476864452912357, + "grad_norm": 0.0977953202897959, + "learning_rate": 0.0001578699944782444, + "loss": 1.5384, + "step": 8241 + }, + { + "epoch": 0.7477771729268735, + "grad_norm": 0.10297844168275799, + "learning_rate": 0.00015776286449755605, + "loss": 1.5509, + "step": 8242 + }, + { + "epoch": 0.7478679005625113, + "grad_norm": 0.10178480865592653, + "learning_rate": 0.00015765576406909465, + "loss": 1.5338, + "step": 8243 + }, + { + "epoch": 0.7479586281981492, + "grad_norm": 0.1018945508982676, + "learning_rate": 0.00015754869320210846, + "loss": 1.5519, + "step": 8244 + }, + { + "epoch": 0.748049355833787, + "grad_norm": 0.1043970546745673, + "learning_rate": 0.00015744165190584324, + "loss": 1.5326, + "step": 8245 + }, + { + "epoch": 0.7481400834694247, + "grad_norm": 0.10143656167826719, + "learning_rate": 0.00015733464018954192, + "loss": 1.5553, + "step": 8246 + }, + { + "epoch": 0.7482308111050626, + "grad_norm": 0.10338909253944756, + "learning_rate": 0.00015722765806244472, + "loss": 1.556, + "step": 8247 + }, + { + "epoch": 0.7483215387407004, + "grad_norm": 0.1015542810314651, + "learning_rate": 0.00015712070553378993, + "loss": 1.5849, + "step": 8248 + }, + { + "epoch": 0.7484122663763383, + "grad_norm": 0.10265121464976285, + "learning_rate": 0.00015701378261281273, + "loss": 1.535, + "step": 8249 + }, + { + "epoch": 0.7485029940119761, + "grad_norm": 0.103565230042245, + "learning_rate": 0.00015690688930874582, + "loss": 1.535, + "step": 8250 + }, + { + "epoch": 0.7485937216476138, + "grad_norm": 0.10071759464487226, + "learning_rate": 0.00015680002563081959, + "loss": 1.562, + "step": 8251 + }, + { + "epoch": 0.7486844492832517, + "grad_norm": 0.10605342053295341, + "learning_rate": 0.00015669319158826184, + "loss": 1.5471, + "step": 8252 + }, + { + "epoch": 0.7487751769188895, + "grad_norm": 0.10024966395801378, + "learning_rate": 0.00015658638719029765, + "loss": 1.5105, + "step": 8253 + }, + { + "epoch": 0.7488659045545273, + "grad_norm": 0.10491331932818856, + "learning_rate": 0.00015647961244614933, + "loss": 1.4928, + "step": 8254 + }, + { + "epoch": 0.7489566321901652, + "grad_norm": 0.10351720245675504, + "learning_rate": 0.0001563728673650373, + "loss": 1.5027, + "step": 8255 + }, + { + "epoch": 0.749047359825803, + "grad_norm": 0.10101845368605013, + "learning_rate": 0.00015626615195617883, + "loss": 1.5723, + "step": 8256 + }, + { + "epoch": 0.7491380874614407, + "grad_norm": 0.10423248884860976, + "learning_rate": 0.00015615946622878863, + "loss": 1.5297, + "step": 8257 + }, + { + "epoch": 0.7492288150970786, + "grad_norm": 0.10346404906634439, + "learning_rate": 0.00015605281019207917, + "loss": 1.5307, + "step": 8258 + }, + { + "epoch": 0.7493195427327164, + "grad_norm": 0.10207290489367632, + "learning_rate": 0.00015594618385526043, + "loss": 1.5445, + "step": 8259 + }, + { + "epoch": 0.7494102703683542, + "grad_norm": 0.10296224697655032, + "learning_rate": 0.00015583958722753945, + "loss": 1.5857, + "step": 8260 + }, + { + "epoch": 0.7495009980039921, + "grad_norm": 0.10459475164692567, + "learning_rate": 0.00015573302031812063, + "loss": 1.5817, + "step": 8261 + }, + { + "epoch": 0.7495917256396298, + "grad_norm": 0.09925015813091502, + "learning_rate": 0.00015562648313620643, + "loss": 1.5515, + "step": 8262 + }, + { + "epoch": 0.7496824532752676, + "grad_norm": 0.10072954565075912, + "learning_rate": 0.00015551997569099614, + "loss": 1.5142, + "step": 8263 + }, + { + "epoch": 0.7497731809109055, + "grad_norm": 0.09838392489425672, + "learning_rate": 0.00015541349799168657, + "loss": 1.5994, + "step": 8264 + }, + { + "epoch": 0.7498639085465433, + "grad_norm": 0.10837392482988008, + "learning_rate": 0.00015530705004747241, + "loss": 1.5821, + "step": 8265 + }, + { + "epoch": 0.7499546361821811, + "grad_norm": 0.10456271886236408, + "learning_rate": 0.00015520063186754514, + "loss": 1.538, + "step": 8266 + }, + { + "epoch": 0.7500453638178189, + "grad_norm": 0.10425485227658363, + "learning_rate": 0.00015509424346109424, + "loss": 1.583, + "step": 8267 + }, + { + "epoch": 0.7501360914534567, + "grad_norm": 0.10370821730520682, + "learning_rate": 0.00015498788483730609, + "loss": 1.5446, + "step": 8268 + }, + { + "epoch": 0.7502268190890945, + "grad_norm": 0.10428672752788928, + "learning_rate": 0.00015488155600536514, + "loss": 1.5487, + "step": 8269 + }, + { + "epoch": 0.7503175467247324, + "grad_norm": 0.10036960660411484, + "learning_rate": 0.00015477525697445266, + "loss": 1.5383, + "step": 8270 + }, + { + "epoch": 0.7504082743603702, + "grad_norm": 0.1044617688026611, + "learning_rate": 0.00015466898775374742, + "loss": 1.5172, + "step": 8271 + }, + { + "epoch": 0.7504990019960079, + "grad_norm": 0.10182116172270814, + "learning_rate": 0.00015456274835242624, + "loss": 1.5403, + "step": 8272 + }, + { + "epoch": 0.7505897296316458, + "grad_norm": 0.1008797916396218, + "learning_rate": 0.0001544565387796626, + "loss": 1.5352, + "step": 8273 + }, + { + "epoch": 0.7506804572672836, + "grad_norm": 0.1006259531992304, + "learning_rate": 0.00015435035904462756, + "loss": 1.5353, + "step": 8274 + }, + { + "epoch": 0.7507711849029214, + "grad_norm": 0.10319517568494611, + "learning_rate": 0.00015424420915649006, + "loss": 1.5085, + "step": 8275 + }, + { + "epoch": 0.7508619125385593, + "grad_norm": 0.10517607417152477, + "learning_rate": 0.00015413808912441613, + "loss": 1.5117, + "step": 8276 + }, + { + "epoch": 0.750952640174197, + "grad_norm": 0.09992203434819386, + "learning_rate": 0.00015403199895756926, + "loss": 1.5434, + "step": 8277 + }, + { + "epoch": 0.7510433678098348, + "grad_norm": 0.10537024056886984, + "learning_rate": 0.00015392593866511006, + "loss": 1.5514, + "step": 8278 + }, + { + "epoch": 0.7511340954454727, + "grad_norm": 0.10659932197146568, + "learning_rate": 0.00015381990825619725, + "loss": 1.6013, + "step": 8279 + }, + { + "epoch": 0.7512248230811105, + "grad_norm": 0.10380276521987547, + "learning_rate": 0.00015371390773998632, + "loss": 1.5518, + "step": 8280 + }, + { + "epoch": 0.7513155507167483, + "grad_norm": 0.10063077267420381, + "learning_rate": 0.00015360793712563037, + "loss": 1.5558, + "step": 8281 + }, + { + "epoch": 0.7514062783523862, + "grad_norm": 0.098216530861606, + "learning_rate": 0.00015350199642228014, + "loss": 1.5542, + "step": 8282 + }, + { + "epoch": 0.7514970059880239, + "grad_norm": 0.10847316034479101, + "learning_rate": 0.0001533960856390837, + "loss": 1.5576, + "step": 8283 + }, + { + "epoch": 0.7515877336236617, + "grad_norm": 0.10813801903609498, + "learning_rate": 0.00015329020478518636, + "loss": 1.5409, + "step": 8284 + }, + { + "epoch": 0.7516784612592996, + "grad_norm": 0.10336193672481679, + "learning_rate": 0.00015318435386973078, + "loss": 1.5192, + "step": 8285 + }, + { + "epoch": 0.7517691888949374, + "grad_norm": 0.10427132959369158, + "learning_rate": 0.00015307853290185753, + "loss": 1.5431, + "step": 8286 + }, + { + "epoch": 0.7518599165305753, + "grad_norm": 0.1016255692380064, + "learning_rate": 0.0001529727418907041, + "loss": 1.5296, + "step": 8287 + }, + { + "epoch": 0.751950644166213, + "grad_norm": 0.10073389935734352, + "learning_rate": 0.00015286698084540534, + "loss": 1.5589, + "step": 8288 + }, + { + "epoch": 0.7520413718018508, + "grad_norm": 0.09944691189868643, + "learning_rate": 0.00015276124977509404, + "loss": 1.5334, + "step": 8289 + }, + { + "epoch": 0.7521320994374887, + "grad_norm": 0.099857346052867, + "learning_rate": 0.00015265554868890008, + "loss": 1.5398, + "step": 8290 + }, + { + "epoch": 0.7522228270731265, + "grad_norm": 0.09955255816002519, + "learning_rate": 0.00015254987759595056, + "loss": 1.5482, + "step": 8291 + }, + { + "epoch": 0.7523135547087643, + "grad_norm": 0.09938048086741624, + "learning_rate": 0.00015244423650537047, + "loss": 1.5797, + "step": 8292 + }, + { + "epoch": 0.7524042823444022, + "grad_norm": 0.10047572362098192, + "learning_rate": 0.0001523386254262818, + "loss": 1.5521, + "step": 8293 + }, + { + "epoch": 0.7524950099800399, + "grad_norm": 0.10171970257418944, + "learning_rate": 0.000152233044367804, + "loss": 1.5588, + "step": 8294 + }, + { + "epoch": 0.7525857376156777, + "grad_norm": 0.10023524066985146, + "learning_rate": 0.00015212749333905396, + "loss": 1.4651, + "step": 8295 + }, + { + "epoch": 0.7526764652513156, + "grad_norm": 0.10379881669945137, + "learning_rate": 0.00015202197234914634, + "loss": 1.5747, + "step": 8296 + }, + { + "epoch": 0.7527671928869534, + "grad_norm": 0.1040257106973594, + "learning_rate": 0.0001519164814071925, + "loss": 1.5425, + "step": 8297 + }, + { + "epoch": 0.7528579205225912, + "grad_norm": 0.10861737427800128, + "learning_rate": 0.00015181102052230178, + "loss": 1.5274, + "step": 8298 + }, + { + "epoch": 0.752948648158229, + "grad_norm": 0.10669048335032227, + "learning_rate": 0.0001517055897035809, + "loss": 1.5359, + "step": 8299 + }, + { + "epoch": 0.7530393757938668, + "grad_norm": 0.10414271991639382, + "learning_rate": 0.00015160018896013373, + "loss": 1.5575, + "step": 8300 + }, + { + "epoch": 0.7531301034295046, + "grad_norm": 0.10455960586020838, + "learning_rate": 0.0001514948183010614, + "loss": 1.5591, + "step": 8301 + }, + { + "epoch": 0.7532208310651425, + "grad_norm": 0.10423465925202652, + "learning_rate": 0.00015138947773546302, + "loss": 1.5212, + "step": 8302 + }, + { + "epoch": 0.7533115587007803, + "grad_norm": 0.1047878345146071, + "learning_rate": 0.00015128416727243466, + "loss": 1.5529, + "step": 8303 + }, + { + "epoch": 0.753402286336418, + "grad_norm": 0.10732408166238745, + "learning_rate": 0.00015117888692106968, + "loss": 1.5613, + "step": 8304 + }, + { + "epoch": 0.7534930139720559, + "grad_norm": 0.10049996895552421, + "learning_rate": 0.0001510736366904592, + "loss": 1.5335, + "step": 8305 + }, + { + "epoch": 0.7535837416076937, + "grad_norm": 0.1034778266435024, + "learning_rate": 0.0001509684165896918, + "loss": 1.5341, + "step": 8306 + }, + { + "epoch": 0.7536744692433315, + "grad_norm": 0.10505312365676879, + "learning_rate": 0.00015086322662785306, + "loss": 1.5197, + "step": 8307 + }, + { + "epoch": 0.7537651968789694, + "grad_norm": 0.10042910829734857, + "learning_rate": 0.000150758066814026, + "loss": 1.5743, + "step": 8308 + }, + { + "epoch": 0.7538559245146071, + "grad_norm": 0.10250263655728213, + "learning_rate": 0.0001506529371572915, + "loss": 1.5523, + "step": 8309 + }, + { + "epoch": 0.7539466521502449, + "grad_norm": 0.10190034703286166, + "learning_rate": 0.00015054783766672736, + "loss": 1.5656, + "step": 8310 + }, + { + "epoch": 0.7540373797858828, + "grad_norm": 0.09766091317176717, + "learning_rate": 0.0001504427683514088, + "loss": 1.5285, + "step": 8311 + }, + { + "epoch": 0.7541281074215206, + "grad_norm": 0.10363776394995168, + "learning_rate": 0.0001503377292204089, + "loss": 1.5448, + "step": 8312 + }, + { + "epoch": 0.7542188350571584, + "grad_norm": 0.10385295843331212, + "learning_rate": 0.0001502327202827974, + "loss": 1.5748, + "step": 8313 + }, + { + "epoch": 0.7543095626927963, + "grad_norm": 0.10189821214347701, + "learning_rate": 0.00015012774154764226, + "loss": 1.5589, + "step": 8314 + }, + { + "epoch": 0.754400290328434, + "grad_norm": 0.10238105017392328, + "learning_rate": 0.00015002279302400801, + "loss": 1.5199, + "step": 8315 + }, + { + "epoch": 0.7544910179640718, + "grad_norm": 0.10506956368069828, + "learning_rate": 0.0001499178747209573, + "loss": 1.5704, + "step": 8316 + }, + { + "epoch": 0.7545817455997097, + "grad_norm": 0.10569644427465613, + "learning_rate": 0.00014981298664754978, + "loss": 1.5827, + "step": 8317 + }, + { + "epoch": 0.7546724732353475, + "grad_norm": 0.10195908421149395, + "learning_rate": 0.00014970812881284223, + "loss": 1.5708, + "step": 8318 + }, + { + "epoch": 0.7547632008709853, + "grad_norm": 0.10129894203659351, + "learning_rate": 0.0001496033012258896, + "loss": 1.5241, + "step": 8319 + }, + { + "epoch": 0.7548539285066231, + "grad_norm": 0.09931383155117132, + "learning_rate": 0.00014949850389574354, + "loss": 1.5153, + "step": 8320 + }, + { + "epoch": 0.7549446561422609, + "grad_norm": 0.10293695829866197, + "learning_rate": 0.0001493937368314532, + "loss": 1.6039, + "step": 8321 + }, + { + "epoch": 0.7550353837778987, + "grad_norm": 0.10198281148354191, + "learning_rate": 0.0001492890000420653, + "loss": 1.5939, + "step": 8322 + }, + { + "epoch": 0.7551261114135366, + "grad_norm": 0.09954046571689172, + "learning_rate": 0.0001491842935366241, + "loss": 1.5298, + "step": 8323 + }, + { + "epoch": 0.7552168390491744, + "grad_norm": 0.10179514434717943, + "learning_rate": 0.0001490796173241709, + "loss": 1.5376, + "step": 8324 + }, + { + "epoch": 0.7553075666848122, + "grad_norm": 0.10360722220886558, + "learning_rate": 0.00014897497141374427, + "loss": 1.5729, + "step": 8325 + }, + { + "epoch": 0.75539829432045, + "grad_norm": 0.10294291414618839, + "learning_rate": 0.0001488703558143808, + "loss": 1.561, + "step": 8326 + }, + { + "epoch": 0.7554890219560878, + "grad_norm": 0.09917364927259356, + "learning_rate": 0.0001487657705351138, + "loss": 1.6021, + "step": 8327 + }, + { + "epoch": 0.7555797495917257, + "grad_norm": 0.0995098829161765, + "learning_rate": 0.00014866121558497415, + "loss": 1.5667, + "step": 8328 + }, + { + "epoch": 0.7556704772273635, + "grad_norm": 0.09796551604789465, + "learning_rate": 0.0001485566909729903, + "loss": 1.544, + "step": 8329 + }, + { + "epoch": 0.7557612048630012, + "grad_norm": 0.10144380662451112, + "learning_rate": 0.00014845219670818816, + "loss": 1.5851, + "step": 8330 + }, + { + "epoch": 0.7558519324986391, + "grad_norm": 0.09934316850464404, + "learning_rate": 0.00014834773279959063, + "loss": 1.5826, + "step": 8331 + }, + { + "epoch": 0.7559426601342769, + "grad_norm": 0.09971143671329985, + "learning_rate": 0.00014824329925621806, + "loss": 1.5314, + "step": 8332 + }, + { + "epoch": 0.7560333877699147, + "grad_norm": 0.09969332562387502, + "learning_rate": 0.0001481388960870886, + "loss": 1.5627, + "step": 8333 + }, + { + "epoch": 0.7561241154055526, + "grad_norm": 0.10453816691965408, + "learning_rate": 0.0001480345233012173, + "loss": 1.5654, + "step": 8334 + }, + { + "epoch": 0.7562148430411904, + "grad_norm": 0.10269027813379906, + "learning_rate": 0.00014793018090761666, + "loss": 1.5332, + "step": 8335 + }, + { + "epoch": 0.7563055706768281, + "grad_norm": 0.1001524079283487, + "learning_rate": 0.00014782586891529676, + "loss": 1.5144, + "step": 8336 + }, + { + "epoch": 0.756396298312466, + "grad_norm": 0.10284052796942679, + "learning_rate": 0.00014772158733326518, + "loss": 1.5355, + "step": 8337 + }, + { + "epoch": 0.7564870259481038, + "grad_norm": 0.0992249900057859, + "learning_rate": 0.00014761733617052643, + "loss": 1.536, + "step": 8338 + }, + { + "epoch": 0.7565777535837416, + "grad_norm": 0.1045472457493161, + "learning_rate": 0.00014751311543608248, + "loss": 1.5358, + "step": 8339 + }, + { + "epoch": 0.7566684812193795, + "grad_norm": 0.10430592669739046, + "learning_rate": 0.00014740892513893312, + "loss": 1.5205, + "step": 8340 + }, + { + "epoch": 0.7567592088550172, + "grad_norm": 0.10946715173505663, + "learning_rate": 0.00014730476528807503, + "loss": 1.5411, + "step": 8341 + }, + { + "epoch": 0.756849936490655, + "grad_norm": 0.10421467190956354, + "learning_rate": 0.0001472006358925023, + "loss": 1.5261, + "step": 8342 + }, + { + "epoch": 0.7569406641262929, + "grad_norm": 0.103310048706371, + "learning_rate": 0.0001470965369612068, + "loss": 1.5871, + "step": 8343 + }, + { + "epoch": 0.7570313917619307, + "grad_norm": 0.10712980367945939, + "learning_rate": 0.0001469924685031772, + "loss": 1.5408, + "step": 8344 + }, + { + "epoch": 0.7571221193975685, + "grad_norm": 0.10379917056672637, + "learning_rate": 0.00014688843052740013, + "loss": 1.5519, + "step": 8345 + }, + { + "epoch": 0.7572128470332063, + "grad_norm": 0.1024265884189632, + "learning_rate": 0.00014678442304285895, + "loss": 1.575, + "step": 8346 + }, + { + "epoch": 0.7573035746688441, + "grad_norm": 0.10439140012312648, + "learning_rate": 0.00014668044605853508, + "loss": 1.5548, + "step": 8347 + }, + { + "epoch": 0.7573943023044819, + "grad_norm": 0.10518342004624044, + "learning_rate": 0.00014657649958340675, + "loss": 1.5496, + "step": 8348 + }, + { + "epoch": 0.7574850299401198, + "grad_norm": 0.10326195240021491, + "learning_rate": 0.00014647258362644966, + "loss": 1.5651, + "step": 8349 + }, + { + "epoch": 0.7575757575757576, + "grad_norm": 0.10046505859916456, + "learning_rate": 0.00014636869819663716, + "loss": 1.5599, + "step": 8350 + }, + { + "epoch": 0.7576664852113953, + "grad_norm": 0.10092137080277071, + "learning_rate": 0.00014626484330293978, + "loss": 1.5603, + "step": 8351 + }, + { + "epoch": 0.7577572128470332, + "grad_norm": 0.11720096986386491, + "learning_rate": 0.0001461610189543251, + "loss": 1.5575, + "step": 8352 + }, + { + "epoch": 0.757847940482671, + "grad_norm": 0.103717280253321, + "learning_rate": 0.00014605722515975857, + "loss": 1.5276, + "step": 8353 + }, + { + "epoch": 0.7579386681183088, + "grad_norm": 0.10221632622256822, + "learning_rate": 0.00014595346192820298, + "loss": 1.575, + "step": 8354 + }, + { + "epoch": 0.7580293957539467, + "grad_norm": 0.10648991328855872, + "learning_rate": 0.00014584972926861818, + "loss": 1.5663, + "step": 8355 + }, + { + "epoch": 0.7581201233895845, + "grad_norm": 0.10442815939218826, + "learning_rate": 0.0001457460271899612, + "loss": 1.5223, + "step": 8356 + }, + { + "epoch": 0.7582108510252222, + "grad_norm": 0.10287767301743354, + "learning_rate": 0.00014564235570118723, + "loss": 1.5373, + "step": 8357 + }, + { + "epoch": 0.7583015786608601, + "grad_norm": 0.10482103659198223, + "learning_rate": 0.00014553871481124808, + "loss": 1.5525, + "step": 8358 + }, + { + "epoch": 0.7583923062964979, + "grad_norm": 0.10455610966533369, + "learning_rate": 0.0001454351045290928, + "loss": 1.555, + "step": 8359 + }, + { + "epoch": 0.7584830339321357, + "grad_norm": 0.10216494498997646, + "learning_rate": 0.00014533152486366878, + "loss": 1.5314, + "step": 8360 + }, + { + "epoch": 0.7585737615677736, + "grad_norm": 0.10469517146899279, + "learning_rate": 0.00014522797582391996, + "loss": 1.5455, + "step": 8361 + }, + { + "epoch": 0.7586644892034113, + "grad_norm": 0.10231597015566385, + "learning_rate": 0.0001451244574187876, + "loss": 1.5666, + "step": 8362 + }, + { + "epoch": 0.7587552168390492, + "grad_norm": 0.10065484741618615, + "learning_rate": 0.00014502096965721055, + "loss": 1.5068, + "step": 8363 + }, + { + "epoch": 0.758845944474687, + "grad_norm": 0.10119018324445318, + "learning_rate": 0.00014491751254812525, + "loss": 1.5689, + "step": 8364 + }, + { + "epoch": 0.7589366721103248, + "grad_norm": 0.0990331315460522, + "learning_rate": 0.00014481408610046503, + "loss": 1.5227, + "step": 8365 + }, + { + "epoch": 0.7590273997459627, + "grad_norm": 0.10036839906682142, + "learning_rate": 0.0001447106903231607, + "loss": 1.6192, + "step": 8366 + }, + { + "epoch": 0.7591181273816004, + "grad_norm": 0.10341343862212794, + "learning_rate": 0.00014460732522514065, + "loss": 1.5742, + "step": 8367 + }, + { + "epoch": 0.7592088550172382, + "grad_norm": 0.10556519761036535, + "learning_rate": 0.00014450399081533056, + "loss": 1.5489, + "step": 8368 + }, + { + "epoch": 0.7592995826528761, + "grad_norm": 0.1052564498097075, + "learning_rate": 0.00014440068710265313, + "loss": 1.5071, + "step": 8369 + }, + { + "epoch": 0.7593903102885139, + "grad_norm": 0.10449469933485363, + "learning_rate": 0.00014429741409602891, + "loss": 1.5785, + "step": 8370 + }, + { + "epoch": 0.7594810379241517, + "grad_norm": 0.10253132414998976, + "learning_rate": 0.00014419417180437544, + "loss": 1.5954, + "step": 8371 + }, + { + "epoch": 0.7595717655597896, + "grad_norm": 0.10383397309759401, + "learning_rate": 0.00014409096023660766, + "loss": 1.56, + "step": 8372 + }, + { + "epoch": 0.7596624931954273, + "grad_norm": 0.10291980015558765, + "learning_rate": 0.00014398777940163775, + "loss": 1.5137, + "step": 8373 + }, + { + "epoch": 0.7597532208310651, + "grad_norm": 0.10420778521543396, + "learning_rate": 0.0001438846293083757, + "loss": 1.5337, + "step": 8374 + }, + { + "epoch": 0.759843948466703, + "grad_norm": 0.09764285208127357, + "learning_rate": 0.0001437815099657283, + "loss": 1.5027, + "step": 8375 + }, + { + "epoch": 0.7599346761023408, + "grad_norm": 0.10110438320998093, + "learning_rate": 0.00014367842138259996, + "loss": 1.502, + "step": 8376 + }, + { + "epoch": 0.7600254037379786, + "grad_norm": 0.09946312149679921, + "learning_rate": 0.00014357536356789265, + "loss": 1.5566, + "step": 8377 + }, + { + "epoch": 0.7601161313736164, + "grad_norm": 0.09673313691061357, + "learning_rate": 0.0001434723365305052, + "loss": 1.5329, + "step": 8378 + }, + { + "epoch": 0.7602068590092542, + "grad_norm": 0.10127457230574884, + "learning_rate": 0.00014336934027933386, + "loss": 1.5692, + "step": 8379 + }, + { + "epoch": 0.760297586644892, + "grad_norm": 0.1047617819350656, + "learning_rate": 0.00014326637482327275, + "loss": 1.5706, + "step": 8380 + }, + { + "epoch": 0.7603883142805299, + "grad_norm": 0.10226363802943429, + "learning_rate": 0.00014316344017121268, + "loss": 1.5778, + "step": 8381 + }, + { + "epoch": 0.7604790419161677, + "grad_norm": 0.1034163308517672, + "learning_rate": 0.00014306053633204207, + "loss": 1.5456, + "step": 8382 + }, + { + "epoch": 0.7605697695518054, + "grad_norm": 0.10242081422854411, + "learning_rate": 0.0001429576633146467, + "loss": 1.5243, + "step": 8383 + }, + { + "epoch": 0.7606604971874433, + "grad_norm": 0.10578129906075143, + "learning_rate": 0.00014285482112790993, + "loss": 1.6067, + "step": 8384 + }, + { + "epoch": 0.7607512248230811, + "grad_norm": 0.10134269923150833, + "learning_rate": 0.00014275200978071194, + "loss": 1.5572, + "step": 8385 + }, + { + "epoch": 0.7608419524587189, + "grad_norm": 0.10386135259815302, + "learning_rate": 0.00014264922928193047, + "loss": 1.5471, + "step": 8386 + }, + { + "epoch": 0.7609326800943568, + "grad_norm": 0.10545384214369712, + "learning_rate": 0.00014254647964044082, + "loss": 1.5622, + "step": 8387 + }, + { + "epoch": 0.7610234077299945, + "grad_norm": 0.1050263236293055, + "learning_rate": 0.00014244376086511534, + "loss": 1.5582, + "step": 8388 + }, + { + "epoch": 0.7611141353656323, + "grad_norm": 0.1007886725044581, + "learning_rate": 0.00014234107296482367, + "loss": 1.5555, + "step": 8389 + }, + { + "epoch": 0.7612048630012702, + "grad_norm": 0.1028235364343989, + "learning_rate": 0.00014223841594843324, + "loss": 1.5434, + "step": 8390 + }, + { + "epoch": 0.761295590636908, + "grad_norm": 0.10175624526834526, + "learning_rate": 0.00014213578982480812, + "loss": 1.5713, + "step": 8391 + }, + { + "epoch": 0.7613863182725458, + "grad_norm": 0.10477325091688566, + "learning_rate": 0.00014203319460281044, + "loss": 1.572, + "step": 8392 + }, + { + "epoch": 0.7614770459081837, + "grad_norm": 0.10534497520025926, + "learning_rate": 0.00014193063029129904, + "loss": 1.5701, + "step": 8393 + }, + { + "epoch": 0.7615677735438214, + "grad_norm": 0.10335980737373293, + "learning_rate": 0.00014182809689913069, + "loss": 1.5372, + "step": 8394 + }, + { + "epoch": 0.7616585011794592, + "grad_norm": 0.10114949013634543, + "learning_rate": 0.00014172559443515885, + "loss": 1.5403, + "step": 8395 + }, + { + "epoch": 0.7617492288150971, + "grad_norm": 0.10513989760325121, + "learning_rate": 0.0001416231229082347, + "loss": 1.5642, + "step": 8396 + }, + { + "epoch": 0.7618399564507349, + "grad_norm": 0.10150536263564736, + "learning_rate": 0.00014152068232720677, + "loss": 1.5515, + "step": 8397 + }, + { + "epoch": 0.7619306840863727, + "grad_norm": 0.10096105038327438, + "learning_rate": 0.00014141827270092066, + "loss": 1.5608, + "step": 8398 + }, + { + "epoch": 0.7620214117220105, + "grad_norm": 0.10154770809155592, + "learning_rate": 0.00014131589403821966, + "loss": 1.5443, + "step": 8399 + }, + { + "epoch": 0.7621121393576483, + "grad_norm": 0.09976915042989014, + "learning_rate": 0.00014121354634794391, + "loss": 1.5005, + "step": 8400 + }, + { + "epoch": 0.7622028669932862, + "grad_norm": 0.10529278378616821, + "learning_rate": 0.00014111122963893154, + "loss": 1.591, + "step": 8401 + }, + { + "epoch": 0.762293594628924, + "grad_norm": 0.0977746654005389, + "learning_rate": 0.00014100894392001735, + "loss": 1.535, + "step": 8402 + }, + { + "epoch": 0.7623843222645618, + "grad_norm": 0.1009309966505941, + "learning_rate": 0.00014090668920003357, + "loss": 1.5144, + "step": 8403 + }, + { + "epoch": 0.7624750499001997, + "grad_norm": 0.10218164043159114, + "learning_rate": 0.0001408044654878103, + "loss": 1.5371, + "step": 8404 + }, + { + "epoch": 0.7625657775358374, + "grad_norm": 0.09991849058482358, + "learning_rate": 0.00014070227279217436, + "loss": 1.571, + "step": 8405 + }, + { + "epoch": 0.7626565051714752, + "grad_norm": 0.10612617608112163, + "learning_rate": 0.00014060011112194997, + "loss": 1.5345, + "step": 8406 + }, + { + "epoch": 0.7627472328071131, + "grad_norm": 0.10659509128838417, + "learning_rate": 0.00014049798048595898, + "loss": 1.5555, + "step": 8407 + }, + { + "epoch": 0.7628379604427509, + "grad_norm": 0.1040071048663989, + "learning_rate": 0.00014039588089302047, + "loss": 1.54, + "step": 8408 + }, + { + "epoch": 0.7629286880783887, + "grad_norm": 0.10024455594465304, + "learning_rate": 0.00014029381235195065, + "loss": 1.5349, + "step": 8409 + }, + { + "epoch": 0.7630194157140265, + "grad_norm": 0.10450759377005414, + "learning_rate": 0.00014019177487156298, + "loss": 1.5483, + "step": 8410 + }, + { + "epoch": 0.7631101433496643, + "grad_norm": 0.10548912062845424, + "learning_rate": 0.00014008976846066878, + "loss": 1.5688, + "step": 8411 + }, + { + "epoch": 0.7632008709853021, + "grad_norm": 0.10630233042861158, + "learning_rate": 0.00013998779312807607, + "loss": 1.5463, + "step": 8412 + }, + { + "epoch": 0.76329159862094, + "grad_norm": 0.10184701295701788, + "learning_rate": 0.00013988584888259038, + "loss": 1.5224, + "step": 8413 + }, + { + "epoch": 0.7633823262565778, + "grad_norm": 0.10163521703672049, + "learning_rate": 0.0001397839357330147, + "loss": 1.5496, + "step": 8414 + }, + { + "epoch": 0.7634730538922155, + "grad_norm": 0.10334217688554458, + "learning_rate": 0.0001396820536881494, + "loss": 1.5506, + "step": 8415 + }, + { + "epoch": 0.7635637815278534, + "grad_norm": 0.10541521121606887, + "learning_rate": 0.0001395802027567919, + "loss": 1.5812, + "step": 8416 + }, + { + "epoch": 0.7636545091634912, + "grad_norm": 0.10274335920547523, + "learning_rate": 0.0001394783829477368, + "loss": 1.557, + "step": 8417 + }, + { + "epoch": 0.763745236799129, + "grad_norm": 0.09890733669817085, + "learning_rate": 0.00013937659426977667, + "loss": 1.5409, + "step": 8418 + }, + { + "epoch": 0.7638359644347669, + "grad_norm": 0.10070258735002077, + "learning_rate": 0.00013927483673170077, + "loss": 1.5506, + "step": 8419 + }, + { + "epoch": 0.7639266920704046, + "grad_norm": 0.10172168817167627, + "learning_rate": 0.00013917311034229574, + "loss": 1.5255, + "step": 8420 + }, + { + "epoch": 0.7640174197060424, + "grad_norm": 0.09985011763573333, + "learning_rate": 0.00013907141511034594, + "loss": 1.5734, + "step": 8421 + }, + { + "epoch": 0.7641081473416803, + "grad_norm": 0.10081233464285114, + "learning_rate": 0.0001389697510446325, + "loss": 1.5357, + "step": 8422 + }, + { + "epoch": 0.7641988749773181, + "grad_norm": 0.10706864142386398, + "learning_rate": 0.00013886811815393442, + "loss": 1.5788, + "step": 8423 + }, + { + "epoch": 0.7642896026129559, + "grad_norm": 0.10356620451674022, + "learning_rate": 0.00013876651644702736, + "loss": 1.5374, + "step": 8424 + }, + { + "epoch": 0.7643803302485938, + "grad_norm": 0.10126554878868411, + "learning_rate": 0.00013866494593268509, + "loss": 1.5305, + "step": 8425 + }, + { + "epoch": 0.7644710578842315, + "grad_norm": 0.10532832897793035, + "learning_rate": 0.00013856340661967792, + "loss": 1.538, + "step": 8426 + }, + { + "epoch": 0.7645617855198693, + "grad_norm": 0.10245648388907158, + "learning_rate": 0.00013846189851677372, + "loss": 1.5627, + "step": 8427 + }, + { + "epoch": 0.7646525131555072, + "grad_norm": 0.1054588333275946, + "learning_rate": 0.00013836042163273805, + "loss": 1.5246, + "step": 8428 + }, + { + "epoch": 0.764743240791145, + "grad_norm": 0.10303111936706195, + "learning_rate": 0.00013825897597633324, + "loss": 1.4953, + "step": 8429 + }, + { + "epoch": 0.7648339684267828, + "grad_norm": 0.10647788053890615, + "learning_rate": 0.00013815756155631902, + "loss": 1.533, + "step": 8430 + }, + { + "epoch": 0.7649246960624206, + "grad_norm": 0.10239652981714696, + "learning_rate": 0.00013805617838145274, + "loss": 1.5194, + "step": 8431 + }, + { + "epoch": 0.7650154236980584, + "grad_norm": 0.10487981038786649, + "learning_rate": 0.0001379548264604889, + "loss": 1.5126, + "step": 8432 + }, + { + "epoch": 0.7651061513336962, + "grad_norm": 0.10266777638867136, + "learning_rate": 0.0001378535058021792, + "loss": 1.5463, + "step": 8433 + }, + { + "epoch": 0.7651968789693341, + "grad_norm": 0.1009763864555316, + "learning_rate": 0.00013775221641527247, + "loss": 1.5579, + "step": 8434 + }, + { + "epoch": 0.7652876066049719, + "grad_norm": 0.10042827355785926, + "learning_rate": 0.00013765095830851537, + "loss": 1.5162, + "step": 8435 + }, + { + "epoch": 0.7653783342406096, + "grad_norm": 0.10342730434328623, + "learning_rate": 0.00013754973149065146, + "loss": 1.5534, + "step": 8436 + }, + { + "epoch": 0.7654690618762475, + "grad_norm": 0.10118332087104671, + "learning_rate": 0.00013744853597042134, + "loss": 1.5654, + "step": 8437 + }, + { + "epoch": 0.7655597895118853, + "grad_norm": 0.10218001545449941, + "learning_rate": 0.00013734737175656386, + "loss": 1.5339, + "step": 8438 + }, + { + "epoch": 0.7656505171475232, + "grad_norm": 0.11143065439090298, + "learning_rate": 0.0001372462388578143, + "loss": 1.5289, + "step": 8439 + }, + { + "epoch": 0.765741244783161, + "grad_norm": 0.10415585897749542, + "learning_rate": 0.0001371451372829055, + "loss": 1.534, + "step": 8440 + }, + { + "epoch": 0.7658319724187987, + "grad_norm": 0.10340095407580234, + "learning_rate": 0.0001370440670405674, + "loss": 1.539, + "step": 8441 + }, + { + "epoch": 0.7659227000544366, + "grad_norm": 0.10156234011275228, + "learning_rate": 0.00013694302813952776, + "loss": 1.5484, + "step": 8442 + }, + { + "epoch": 0.7660134276900744, + "grad_norm": 0.10312603163787237, + "learning_rate": 0.00013684202058851115, + "loss": 1.5405, + "step": 8443 + }, + { + "epoch": 0.7661041553257122, + "grad_norm": 0.10461382507209918, + "learning_rate": 0.0001367410443962394, + "loss": 1.5384, + "step": 8444 + }, + { + "epoch": 0.7661948829613501, + "grad_norm": 0.10395013822204523, + "learning_rate": 0.00013664009957143204, + "loss": 1.5564, + "step": 8445 + }, + { + "epoch": 0.7662856105969879, + "grad_norm": 0.10146925697019887, + "learning_rate": 0.00013653918612280575, + "loss": 1.5659, + "step": 8446 + }, + { + "epoch": 0.7663763382326256, + "grad_norm": 0.10654980313537728, + "learning_rate": 0.0001364383040590742, + "loss": 1.5539, + "step": 8447 + }, + { + "epoch": 0.7664670658682635, + "grad_norm": 0.10734908685270418, + "learning_rate": 0.00013633745338894883, + "loss": 1.5235, + "step": 8448 + }, + { + "epoch": 0.7665577935039013, + "grad_norm": 0.10592409268161743, + "learning_rate": 0.00013623663412113795, + "loss": 1.5418, + "step": 8449 + }, + { + "epoch": 0.7666485211395391, + "grad_norm": 0.10442130669129385, + "learning_rate": 0.00013613584626434732, + "loss": 1.5317, + "step": 8450 + }, + { + "epoch": 0.766739248775177, + "grad_norm": 0.10481299518181375, + "learning_rate": 0.00013603508982727985, + "loss": 1.5548, + "step": 8451 + }, + { + "epoch": 0.7668299764108147, + "grad_norm": 0.10165518781488596, + "learning_rate": 0.00013593436481863615, + "loss": 1.5981, + "step": 8452 + }, + { + "epoch": 0.7669207040464525, + "grad_norm": 0.10478352246703986, + "learning_rate": 0.00013583367124711356, + "loss": 1.5242, + "step": 8453 + }, + { + "epoch": 0.7670114316820904, + "grad_norm": 0.10655850606025474, + "learning_rate": 0.00013573300912140713, + "loss": 1.5237, + "step": 8454 + }, + { + "epoch": 0.7671021593177282, + "grad_norm": 0.10601985755690056, + "learning_rate": 0.0001356323784502092, + "loss": 1.5518, + "step": 8455 + }, + { + "epoch": 0.767192886953366, + "grad_norm": 0.10272846352447289, + "learning_rate": 0.0001355317792422091, + "loss": 1.5181, + "step": 8456 + }, + { + "epoch": 0.7672836145890038, + "grad_norm": 0.103946239446315, + "learning_rate": 0.00013543121150609344, + "loss": 1.5721, + "step": 8457 + }, + { + "epoch": 0.7673743422246416, + "grad_norm": 0.10459517998281939, + "learning_rate": 0.00013533067525054655, + "loss": 1.5099, + "step": 8458 + }, + { + "epoch": 0.7674650698602794, + "grad_norm": 0.10778814203223656, + "learning_rate": 0.00013523017048424957, + "loss": 1.5618, + "step": 8459 + }, + { + "epoch": 0.7675557974959173, + "grad_norm": 0.10616923040886095, + "learning_rate": 0.00013512969721588098, + "loss": 1.5637, + "step": 8460 + }, + { + "epoch": 0.7676465251315551, + "grad_norm": 0.10280207670803043, + "learning_rate": 0.00013502925545411687, + "loss": 1.5285, + "step": 8461 + }, + { + "epoch": 0.7677372527671928, + "grad_norm": 0.10352292887081437, + "learning_rate": 0.00013492884520763043, + "loss": 1.6143, + "step": 8462 + }, + { + "epoch": 0.7678279804028307, + "grad_norm": 0.10425508072958806, + "learning_rate": 0.00013482846648509207, + "loss": 1.5489, + "step": 8463 + }, + { + "epoch": 0.7679187080384685, + "grad_norm": 0.09991816301729804, + "learning_rate": 0.0001347281192951692, + "loss": 1.5353, + "step": 8464 + }, + { + "epoch": 0.7680094356741063, + "grad_norm": 0.09792072522322037, + "learning_rate": 0.00013462780364652732, + "loss": 1.495, + "step": 8465 + }, + { + "epoch": 0.7681001633097442, + "grad_norm": 0.09733050622456878, + "learning_rate": 0.00013452751954782838, + "loss": 1.5545, + "step": 8466 + }, + { + "epoch": 0.768190890945382, + "grad_norm": 0.10814295067520185, + "learning_rate": 0.0001344272670077319, + "loss": 1.537, + "step": 8467 + }, + { + "epoch": 0.7682816185810197, + "grad_norm": 0.10051843459672734, + "learning_rate": 0.00013432704603489487, + "loss": 1.5529, + "step": 8468 + }, + { + "epoch": 0.7683723462166576, + "grad_norm": 0.09916274793246772, + "learning_rate": 0.00013422685663797118, + "loss": 1.5552, + "step": 8469 + }, + { + "epoch": 0.7684630738522954, + "grad_norm": 0.1017356238605737, + "learning_rate": 0.0001341266988256125, + "loss": 1.5919, + "step": 8470 + }, + { + "epoch": 0.7685538014879332, + "grad_norm": 0.1030909706576391, + "learning_rate": 0.00013402657260646716, + "loss": 1.5616, + "step": 8471 + }, + { + "epoch": 0.7686445291235711, + "grad_norm": 0.1018925373497487, + "learning_rate": 0.00013392647798918134, + "loss": 1.5458, + "step": 8472 + }, + { + "epoch": 0.7687352567592088, + "grad_norm": 0.10383511694025355, + "learning_rate": 0.0001338264149823981, + "loss": 1.5582, + "step": 8473 + }, + { + "epoch": 0.7688259843948466, + "grad_norm": 0.1020232327092391, + "learning_rate": 0.00013372638359475782, + "loss": 1.5605, + "step": 8474 + }, + { + "epoch": 0.7689167120304845, + "grad_norm": 0.09972657624825423, + "learning_rate": 0.00013362638383489832, + "loss": 1.5777, + "step": 8475 + }, + { + "epoch": 0.7690074396661223, + "grad_norm": 0.10388564691562599, + "learning_rate": 0.0001335264157114545, + "loss": 1.5286, + "step": 8476 + }, + { + "epoch": 0.7690981673017602, + "grad_norm": 0.10457409838204358, + "learning_rate": 0.00013342647923305883, + "loss": 1.5925, + "step": 8477 + }, + { + "epoch": 0.769188894937398, + "grad_norm": 0.10626737294638465, + "learning_rate": 0.00013332657440834063, + "loss": 1.5448, + "step": 8478 + }, + { + "epoch": 0.7692796225730357, + "grad_norm": 0.14725924328782336, + "learning_rate": 0.00013322670124592685, + "loss": 1.5145, + "step": 8479 + }, + { + "epoch": 0.7693703502086736, + "grad_norm": 0.10597600564583677, + "learning_rate": 0.00013312685975444145, + "loss": 1.5391, + "step": 8480 + }, + { + "epoch": 0.7694610778443114, + "grad_norm": 0.09950976943860422, + "learning_rate": 0.00013302704994250564, + "loss": 1.5095, + "step": 8481 + }, + { + "epoch": 0.7695518054799492, + "grad_norm": 0.10328178053032425, + "learning_rate": 0.0001329272718187383, + "loss": 1.5323, + "step": 8482 + }, + { + "epoch": 0.7696425331155871, + "grad_norm": 0.101690801519586, + "learning_rate": 0.0001328275253917552, + "loss": 1.5502, + "step": 8483 + }, + { + "epoch": 0.7697332607512248, + "grad_norm": 0.10109220000304432, + "learning_rate": 0.0001327278106701692, + "loss": 1.4992, + "step": 8484 + }, + { + "epoch": 0.7698239883868626, + "grad_norm": 0.10128429128800773, + "learning_rate": 0.00013262812766259093, + "loss": 1.5333, + "step": 8485 + }, + { + "epoch": 0.7699147160225005, + "grad_norm": 0.10692133843974151, + "learning_rate": 0.00013252847637762805, + "loss": 1.5672, + "step": 8486 + }, + { + "epoch": 0.7700054436581383, + "grad_norm": 0.10141901447546986, + "learning_rate": 0.00013242885682388544, + "loss": 1.5254, + "step": 8487 + }, + { + "epoch": 0.770096171293776, + "grad_norm": 0.10138938315353518, + "learning_rate": 0.00013232926900996506, + "loss": 1.581, + "step": 8488 + }, + { + "epoch": 0.7701868989294139, + "grad_norm": 0.10071377459292781, + "learning_rate": 0.00013222971294446668, + "loss": 1.5483, + "step": 8489 + }, + { + "epoch": 0.7702776265650517, + "grad_norm": 0.1019418088944037, + "learning_rate": 0.00013213018863598674, + "loss": 1.5566, + "step": 8490 + }, + { + "epoch": 0.7703683542006895, + "grad_norm": 0.10095675455229373, + "learning_rate": 0.00013203069609311907, + "loss": 1.5481, + "step": 8491 + }, + { + "epoch": 0.7704590818363274, + "grad_norm": 0.10139813723116775, + "learning_rate": 0.00013193123532445505, + "loss": 1.5169, + "step": 8492 + }, + { + "epoch": 0.7705498094719652, + "grad_norm": 0.10320730631320511, + "learning_rate": 0.00013183180633858328, + "loss": 1.6054, + "step": 8493 + }, + { + "epoch": 0.7706405371076029, + "grad_norm": 0.10888978582361024, + "learning_rate": 0.00013173240914408923, + "loss": 1.5468, + "step": 8494 + }, + { + "epoch": 0.7707312647432408, + "grad_norm": 0.09859806066314247, + "learning_rate": 0.00013163304374955588, + "loss": 1.5106, + "step": 8495 + }, + { + "epoch": 0.7708219923788786, + "grad_norm": 0.10257613734349431, + "learning_rate": 0.00013153371016356354, + "loss": 1.5137, + "step": 8496 + }, + { + "epoch": 0.7709127200145164, + "grad_norm": 0.10061601247202319, + "learning_rate": 0.00013143440839468966, + "loss": 1.556, + "step": 8497 + }, + { + "epoch": 0.7710034476501543, + "grad_norm": 0.10117478565579027, + "learning_rate": 0.00013133513845150886, + "loss": 1.5469, + "step": 8498 + }, + { + "epoch": 0.771094175285792, + "grad_norm": 0.10372952021020664, + "learning_rate": 0.00013123590034259326, + "loss": 1.5377, + "step": 8499 + }, + { + "epoch": 0.7711849029214298, + "grad_norm": 0.10222447117818392, + "learning_rate": 0.00013113669407651185, + "loss": 1.5751, + "step": 8500 + }, + { + "epoch": 0.7712756305570677, + "grad_norm": 0.10247011253163157, + "learning_rate": 0.00013103751966183147, + "loss": 1.5562, + "step": 8501 + }, + { + "epoch": 0.7713663581927055, + "grad_norm": 0.10049849880412712, + "learning_rate": 0.00013093837710711549, + "loss": 1.5357, + "step": 8502 + }, + { + "epoch": 0.7714570858283433, + "grad_norm": 0.10458598746763034, + "learning_rate": 0.0001308392664209252, + "loss": 1.5245, + "step": 8503 + }, + { + "epoch": 0.7715478134639812, + "grad_norm": 0.10932821564878248, + "learning_rate": 0.00013074018761181867, + "loss": 1.5336, + "step": 8504 + }, + { + "epoch": 0.7716385410996189, + "grad_norm": 0.10199857714580046, + "learning_rate": 0.0001306411406883512, + "loss": 1.5453, + "step": 8505 + }, + { + "epoch": 0.7717292687352567, + "grad_norm": 0.10410368925983014, + "learning_rate": 0.0001305421256590758, + "loss": 1.6106, + "step": 8506 + }, + { + "epoch": 0.7718199963708946, + "grad_norm": 0.10419275421311919, + "learning_rate": 0.0001304431425325423, + "loss": 1.513, + "step": 8507 + }, + { + "epoch": 0.7719107240065324, + "grad_norm": 0.10751737090476397, + "learning_rate": 0.0001303441913172978, + "loss": 1.545, + "step": 8508 + }, + { + "epoch": 0.7720014516421702, + "grad_norm": 0.10433672895013561, + "learning_rate": 0.00013024527202188678, + "loss": 1.5657, + "step": 8509 + }, + { + "epoch": 0.772092179277808, + "grad_norm": 0.10445653071490447, + "learning_rate": 0.0001301463846548513, + "loss": 1.5605, + "step": 8510 + }, + { + "epoch": 0.7721829069134458, + "grad_norm": 0.09958963815369701, + "learning_rate": 0.0001300475292247299, + "loss": 1.5695, + "step": 8511 + }, + { + "epoch": 0.7722736345490836, + "grad_norm": 0.10709081408977837, + "learning_rate": 0.0001299487057400588, + "loss": 1.5647, + "step": 8512 + }, + { + "epoch": 0.7723643621847215, + "grad_norm": 0.09984603486166473, + "learning_rate": 0.00012984991420937159, + "loss": 1.5128, + "step": 8513 + }, + { + "epoch": 0.7724550898203593, + "grad_norm": 0.10434935708668247, + "learning_rate": 0.00012975115464119885, + "loss": 1.5661, + "step": 8514 + }, + { + "epoch": 0.772545817455997, + "grad_norm": 0.09995083292419124, + "learning_rate": 0.00012965242704406822, + "loss": 1.5311, + "step": 8515 + }, + { + "epoch": 0.7726365450916349, + "grad_norm": 0.10237277716697425, + "learning_rate": 0.00012955373142650535, + "loss": 1.5412, + "step": 8516 + }, + { + "epoch": 0.7727272727272727, + "grad_norm": 0.10105138424237393, + "learning_rate": 0.00012945506779703242, + "loss": 1.5312, + "step": 8517 + }, + { + "epoch": 0.7728180003629106, + "grad_norm": 0.10394057116882137, + "learning_rate": 0.00012935643616416898, + "loss": 1.5631, + "step": 8518 + }, + { + "epoch": 0.7729087279985484, + "grad_norm": 0.1038297733726562, + "learning_rate": 0.0001292578365364317, + "loss": 1.537, + "step": 8519 + }, + { + "epoch": 0.7729994556341862, + "grad_norm": 0.10196889800527237, + "learning_rate": 0.00012915926892233509, + "loss": 1.5546, + "step": 8520 + }, + { + "epoch": 0.773090183269824, + "grad_norm": 0.10146825434952589, + "learning_rate": 0.00012906073333039027, + "loss": 1.5388, + "step": 8521 + }, + { + "epoch": 0.7731809109054618, + "grad_norm": 0.10169268902418853, + "learning_rate": 0.0001289622297691056, + "loss": 1.5381, + "step": 8522 + }, + { + "epoch": 0.7732716385410996, + "grad_norm": 0.09958433008469576, + "learning_rate": 0.0001288637582469871, + "loss": 1.5777, + "step": 8523 + }, + { + "epoch": 0.7733623661767375, + "grad_norm": 0.10301152548487938, + "learning_rate": 0.00012876531877253794, + "loss": 1.5984, + "step": 8524 + }, + { + "epoch": 0.7734530938123753, + "grad_norm": 0.10091427806289575, + "learning_rate": 0.000128666911354258, + "loss": 1.5656, + "step": 8525 + }, + { + "epoch": 0.773543821448013, + "grad_norm": 0.10599331497589858, + "learning_rate": 0.00012856853600064523, + "loss": 1.5639, + "step": 8526 + }, + { + "epoch": 0.7736345490836509, + "grad_norm": 0.103058234431142, + "learning_rate": 0.0001284701927201941, + "loss": 1.5588, + "step": 8527 + }, + { + "epoch": 0.7737252767192887, + "grad_norm": 0.09763845261460848, + "learning_rate": 0.0001283718815213964, + "loss": 1.5309, + "step": 8528 + }, + { + "epoch": 0.7738160043549265, + "grad_norm": 0.10562745101500465, + "learning_rate": 0.00012827360241274166, + "loss": 1.5651, + "step": 8529 + }, + { + "epoch": 0.7739067319905644, + "grad_norm": 0.10484141612679203, + "learning_rate": 0.0001281753554027162, + "loss": 1.5723, + "step": 8530 + }, + { + "epoch": 0.7739974596262021, + "grad_norm": 0.10132260325923088, + "learning_rate": 0.0001280771404998034, + "loss": 1.5224, + "step": 8531 + }, + { + "epoch": 0.7740881872618399, + "grad_norm": 0.10123481000949966, + "learning_rate": 0.0001279789577124843, + "loss": 1.5755, + "step": 8532 + }, + { + "epoch": 0.7741789148974778, + "grad_norm": 0.10750036453126408, + "learning_rate": 0.00012788080704923726, + "loss": 1.5541, + "step": 8533 + }, + { + "epoch": 0.7742696425331156, + "grad_norm": 0.10675683699113944, + "learning_rate": 0.0001277826885185373, + "loss": 1.5192, + "step": 8534 + }, + { + "epoch": 0.7743603701687534, + "grad_norm": 0.10448432547168143, + "learning_rate": 0.00012768460212885695, + "loss": 1.5458, + "step": 8535 + }, + { + "epoch": 0.7744510978043913, + "grad_norm": 0.10406407884744256, + "learning_rate": 0.0001275865478886662, + "loss": 1.572, + "step": 8536 + }, + { + "epoch": 0.774541825440029, + "grad_norm": 0.10268392863010072, + "learning_rate": 0.00012748852580643193, + "loss": 1.4886, + "step": 8537 + }, + { + "epoch": 0.7746325530756668, + "grad_norm": 0.09933413638158409, + "learning_rate": 0.00012739053589061827, + "loss": 1.5176, + "step": 8538 + }, + { + "epoch": 0.7747232807113047, + "grad_norm": 0.10388013309771028, + "learning_rate": 0.0001272925781496867, + "loss": 1.5451, + "step": 8539 + }, + { + "epoch": 0.7748140083469425, + "grad_norm": 0.10176606567195833, + "learning_rate": 0.00012719465259209607, + "loss": 1.5397, + "step": 8540 + }, + { + "epoch": 0.7749047359825803, + "grad_norm": 0.10797080863503548, + "learning_rate": 0.00012709675922630215, + "loss": 1.5682, + "step": 8541 + }, + { + "epoch": 0.7749954636182181, + "grad_norm": 0.1016020425977718, + "learning_rate": 0.00012699889806075789, + "loss": 1.5732, + "step": 8542 + }, + { + "epoch": 0.7750861912538559, + "grad_norm": 0.10990535558550754, + "learning_rate": 0.00012690106910391393, + "loss": 1.5985, + "step": 8543 + }, + { + "epoch": 0.7751769188894937, + "grad_norm": 0.10448354952676205, + "learning_rate": 0.00012680327236421756, + "loss": 1.514, + "step": 8544 + }, + { + "epoch": 0.7752676465251316, + "grad_norm": 0.0985552521434194, + "learning_rate": 0.0001267055078501136, + "loss": 1.5345, + "step": 8545 + }, + { + "epoch": 0.7753583741607694, + "grad_norm": 0.10631812934300897, + "learning_rate": 0.0001266077755700441, + "loss": 1.5383, + "step": 8546 + }, + { + "epoch": 0.7754491017964071, + "grad_norm": 0.09959113540406996, + "learning_rate": 0.0001265100755324481, + "loss": 1.5763, + "step": 8547 + }, + { + "epoch": 0.775539829432045, + "grad_norm": 0.10522570533766991, + "learning_rate": 0.0001264124077457623, + "loss": 1.5988, + "step": 8548 + }, + { + "epoch": 0.7756305570676828, + "grad_norm": 0.1023440071990922, + "learning_rate": 0.00012631477221841997, + "loss": 1.5301, + "step": 8549 + }, + { + "epoch": 0.7757212847033206, + "grad_norm": 0.1042804134947227, + "learning_rate": 0.00012621716895885228, + "loss": 1.5651, + "step": 8550 + }, + { + "epoch": 0.7758120123389585, + "grad_norm": 0.10284219281157139, + "learning_rate": 0.0001261195979754871, + "loss": 1.5606, + "step": 8551 + }, + { + "epoch": 0.7759027399745962, + "grad_norm": 0.1018837203420659, + "learning_rate": 0.0001260220592767497, + "loss": 1.5585, + "step": 8552 + }, + { + "epoch": 0.775993467610234, + "grad_norm": 0.10324296421120853, + "learning_rate": 0.00012592455287106265, + "loss": 1.5093, + "step": 8553 + }, + { + "epoch": 0.7760841952458719, + "grad_norm": 0.10778822445822688, + "learning_rate": 0.00012582707876684552, + "loss": 1.5519, + "step": 8554 + }, + { + "epoch": 0.7761749228815097, + "grad_norm": 0.10514459610789523, + "learning_rate": 0.00012572963697251538, + "loss": 1.5448, + "step": 8555 + }, + { + "epoch": 0.7762656505171476, + "grad_norm": 0.10225019960242299, + "learning_rate": 0.00012563222749648612, + "loss": 1.5444, + "step": 8556 + }, + { + "epoch": 0.7763563781527854, + "grad_norm": 0.09952889807801361, + "learning_rate": 0.00012553485034716931, + "loss": 1.5581, + "step": 8557 + }, + { + "epoch": 0.7764471057884231, + "grad_norm": 0.10517013050942378, + "learning_rate": 0.0001254375055329734, + "loss": 1.5827, + "step": 8558 + }, + { + "epoch": 0.776537833424061, + "grad_norm": 0.10392713294857708, + "learning_rate": 0.00012534019306230393, + "loss": 1.515, + "step": 8559 + }, + { + "epoch": 0.7766285610596988, + "grad_norm": 0.10121779752713055, + "learning_rate": 0.00012524291294356416, + "loss": 1.5383, + "step": 8560 + }, + { + "epoch": 0.7767192886953366, + "grad_norm": 0.10066170714439755, + "learning_rate": 0.0001251456651851541, + "loss": 1.5677, + "step": 8561 + }, + { + "epoch": 0.7768100163309745, + "grad_norm": 0.10180236132747794, + "learning_rate": 0.00012504844979547097, + "loss": 1.5213, + "step": 8562 + }, + { + "epoch": 0.7769007439666122, + "grad_norm": 0.10201598346888616, + "learning_rate": 0.00012495126678290942, + "loss": 1.515, + "step": 8563 + }, + { + "epoch": 0.77699147160225, + "grad_norm": 0.10575303841722163, + "learning_rate": 0.00012485411615586139, + "loss": 1.5159, + "step": 8564 + }, + { + "epoch": 0.7770821992378879, + "grad_norm": 0.10538545794968195, + "learning_rate": 0.00012475699792271577, + "loss": 1.5306, + "step": 8565 + }, + { + "epoch": 0.7771729268735257, + "grad_norm": 0.10417395003802774, + "learning_rate": 0.0001246599120918585, + "loss": 1.5687, + "step": 8566 + }, + { + "epoch": 0.7772636545091635, + "grad_norm": 0.10650080446878307, + "learning_rate": 0.00012456285867167333, + "loss": 1.5205, + "step": 8567 + }, + { + "epoch": 0.7773543821448013, + "grad_norm": 0.10045707231254748, + "learning_rate": 0.00012446583767054066, + "loss": 1.4965, + "step": 8568 + }, + { + "epoch": 0.7774451097804391, + "grad_norm": 0.10673250298876273, + "learning_rate": 0.00012436884909683803, + "loss": 1.5491, + "step": 8569 + }, + { + "epoch": 0.7775358374160769, + "grad_norm": 0.10676857153983536, + "learning_rate": 0.0001242718929589407, + "loss": 1.5209, + "step": 8570 + }, + { + "epoch": 0.7776265650517148, + "grad_norm": 0.10575488977396011, + "learning_rate": 0.00012417496926522094, + "loss": 1.5777, + "step": 8571 + }, + { + "epoch": 0.7777172926873526, + "grad_norm": 0.10267470250232068, + "learning_rate": 0.00012407807802404796, + "loss": 1.5522, + "step": 8572 + }, + { + "epoch": 0.7778080203229903, + "grad_norm": 0.10045272078957526, + "learning_rate": 0.0001239812192437882, + "loss": 1.5197, + "step": 8573 + }, + { + "epoch": 0.7778987479586282, + "grad_norm": 0.10250229194000993, + "learning_rate": 0.00012388439293280573, + "loss": 1.5389, + "step": 8574 + }, + { + "epoch": 0.777989475594266, + "grad_norm": 0.10508288515268169, + "learning_rate": 0.00012378759909946135, + "loss": 1.5889, + "step": 8575 + }, + { + "epoch": 0.7780802032299038, + "grad_norm": 0.10222455342600643, + "learning_rate": 0.0001236908377521131, + "loss": 1.5783, + "step": 8576 + }, + { + "epoch": 0.7781709308655417, + "grad_norm": 0.1059471996083733, + "learning_rate": 0.0001235941088991166, + "loss": 1.5534, + "step": 8577 + }, + { + "epoch": 0.7782616585011795, + "grad_norm": 0.10396311151114077, + "learning_rate": 0.00012349741254882412, + "loss": 1.5368, + "step": 8578 + }, + { + "epoch": 0.7783523861368172, + "grad_norm": 0.10202632572560082, + "learning_rate": 0.00012340074870958572, + "loss": 1.57, + "step": 8579 + }, + { + "epoch": 0.7784431137724551, + "grad_norm": 0.1028232942268626, + "learning_rate": 0.00012330411738974807, + "loss": 1.5393, + "step": 8580 + }, + { + "epoch": 0.7785338414080929, + "grad_norm": 0.102667921883515, + "learning_rate": 0.0001232075185976555, + "loss": 1.5215, + "step": 8581 + }, + { + "epoch": 0.7786245690437307, + "grad_norm": 0.10480395039421847, + "learning_rate": 0.00012311095234164926, + "loss": 1.5392, + "step": 8582 + }, + { + "epoch": 0.7787152966793686, + "grad_norm": 0.103827485086863, + "learning_rate": 0.0001230144186300677, + "loss": 1.5554, + "step": 8583 + }, + { + "epoch": 0.7788060243150063, + "grad_norm": 0.10518568815135276, + "learning_rate": 0.00012291791747124676, + "loss": 1.5582, + "step": 8584 + }, + { + "epoch": 0.7788967519506441, + "grad_norm": 0.10641620831768235, + "learning_rate": 0.0001228214488735192, + "loss": 1.5472, + "step": 8585 + }, + { + "epoch": 0.778987479586282, + "grad_norm": 0.10447516031548743, + "learning_rate": 0.00012272501284521503, + "loss": 1.5609, + "step": 8586 + }, + { + "epoch": 0.7790782072219198, + "grad_norm": 0.10037722467777468, + "learning_rate": 0.00012262860939466158, + "loss": 1.5574, + "step": 8587 + }, + { + "epoch": 0.7791689348575576, + "grad_norm": 0.10545685388080851, + "learning_rate": 0.00012253223853018346, + "loss": 1.5546, + "step": 8588 + }, + { + "epoch": 0.7792596624931954, + "grad_norm": 0.10450558687966614, + "learning_rate": 0.0001224359002601022, + "loss": 1.5662, + "step": 8589 + }, + { + "epoch": 0.7793503901288332, + "grad_norm": 0.10257670144272715, + "learning_rate": 0.00012233959459273647, + "loss": 1.5894, + "step": 8590 + }, + { + "epoch": 0.779441117764471, + "grad_norm": 0.10115982012142444, + "learning_rate": 0.0001222433215364025, + "loss": 1.5707, + "step": 8591 + }, + { + "epoch": 0.7795318454001089, + "grad_norm": 0.10131395652171374, + "learning_rate": 0.00012214708109941342, + "loss": 1.5168, + "step": 8592 + }, + { + "epoch": 0.7796225730357467, + "grad_norm": 0.10204602172453604, + "learning_rate": 0.0001220508732900793, + "loss": 1.56, + "step": 8593 + }, + { + "epoch": 0.7797133006713846, + "grad_norm": 0.10281258557316444, + "learning_rate": 0.00012195469811670823, + "loss": 1.5749, + "step": 8594 + }, + { + "epoch": 0.7798040283070223, + "grad_norm": 0.10463777336120605, + "learning_rate": 0.00012185855558760473, + "loss": 1.515, + "step": 8595 + }, + { + "epoch": 0.7798947559426601, + "grad_norm": 0.10190930173493533, + "learning_rate": 0.0001217624457110707, + "loss": 1.5441, + "step": 8596 + }, + { + "epoch": 0.779985483578298, + "grad_norm": 0.1007636299178208, + "learning_rate": 0.00012166636849540502, + "loss": 1.5307, + "step": 8597 + }, + { + "epoch": 0.7800762112139358, + "grad_norm": 0.0974164795253021, + "learning_rate": 0.00012157032394890438, + "loss": 1.5543, + "step": 8598 + }, + { + "epoch": 0.7801669388495736, + "grad_norm": 0.10481003936861744, + "learning_rate": 0.00012147431207986204, + "loss": 1.5726, + "step": 8599 + }, + { + "epoch": 0.7802576664852114, + "grad_norm": 0.10127490044846542, + "learning_rate": 0.00012137833289656847, + "loss": 1.5474, + "step": 8600 + }, + { + "epoch": 0.7803483941208492, + "grad_norm": 0.1028165142012889, + "learning_rate": 0.0001212823864073117, + "loss": 1.5765, + "step": 8601 + }, + { + "epoch": 0.780439121756487, + "grad_norm": 0.10070635437319858, + "learning_rate": 0.0001211864726203768, + "loss": 1.5398, + "step": 8602 + }, + { + "epoch": 0.7805298493921249, + "grad_norm": 0.10382980577488023, + "learning_rate": 0.00012109059154404568, + "loss": 1.5492, + "step": 8603 + }, + { + "epoch": 0.7806205770277627, + "grad_norm": 0.10311565015090923, + "learning_rate": 0.00012099474318659798, + "loss": 1.5116, + "step": 8604 + }, + { + "epoch": 0.7807113046634004, + "grad_norm": 0.10191408832638921, + "learning_rate": 0.00012089892755631005, + "loss": 1.5707, + "step": 8605 + }, + { + "epoch": 0.7808020322990383, + "grad_norm": 0.10743640291751573, + "learning_rate": 0.00012080314466145542, + "loss": 1.5793, + "step": 8606 + }, + { + "epoch": 0.7808927599346761, + "grad_norm": 0.10271819769855188, + "learning_rate": 0.00012070739451030532, + "loss": 1.5638, + "step": 8607 + }, + { + "epoch": 0.7809834875703139, + "grad_norm": 0.10811720860875468, + "learning_rate": 0.00012061167711112758, + "loss": 1.5287, + "step": 8608 + }, + { + "epoch": 0.7810742152059518, + "grad_norm": 0.1041114193158808, + "learning_rate": 0.00012051599247218737, + "loss": 1.577, + "step": 8609 + }, + { + "epoch": 0.7811649428415895, + "grad_norm": 0.10339552151362351, + "learning_rate": 0.00012042034060174706, + "loss": 1.5334, + "step": 8610 + }, + { + "epoch": 0.7812556704772273, + "grad_norm": 0.10259709524769633, + "learning_rate": 0.00012032472150806645, + "loss": 1.5414, + "step": 8611 + }, + { + "epoch": 0.7813463981128652, + "grad_norm": 0.10285045409111958, + "learning_rate": 0.00012022913519940209, + "loss": 1.5543, + "step": 8612 + }, + { + "epoch": 0.781437125748503, + "grad_norm": 0.10187999488918366, + "learning_rate": 0.00012013358168400773, + "loss": 1.5281, + "step": 8613 + }, + { + "epoch": 0.7815278533841408, + "grad_norm": 0.09998595970697437, + "learning_rate": 0.00012003806097013475, + "loss": 1.5317, + "step": 8614 + }, + { + "epoch": 0.7816185810197787, + "grad_norm": 0.10150036713451373, + "learning_rate": 0.00011994257306603118, + "loss": 1.5234, + "step": 8615 + }, + { + "epoch": 0.7817093086554164, + "grad_norm": 0.1036832008166466, + "learning_rate": 0.0001198471179799423, + "loss": 1.5806, + "step": 8616 + }, + { + "epoch": 0.7818000362910542, + "grad_norm": 0.09963599835938775, + "learning_rate": 0.00011975169572011085, + "loss": 1.5647, + "step": 8617 + }, + { + "epoch": 0.7818907639266921, + "grad_norm": 0.10563904464786454, + "learning_rate": 0.00011965630629477665, + "loss": 1.5185, + "step": 8618 + }, + { + "epoch": 0.7819814915623299, + "grad_norm": 0.1068297860904783, + "learning_rate": 0.0001195609497121764, + "loss": 1.5343, + "step": 8619 + }, + { + "epoch": 0.7820722191979677, + "grad_norm": 0.10179514968526916, + "learning_rate": 0.00011946562598054412, + "loss": 1.5003, + "step": 8620 + }, + { + "epoch": 0.7821629468336055, + "grad_norm": 0.107687575729774, + "learning_rate": 0.00011937033510811124, + "loss": 1.5429, + "step": 8621 + }, + { + "epoch": 0.7822536744692433, + "grad_norm": 0.11039652064737931, + "learning_rate": 0.00011927507710310603, + "loss": 1.5597, + "step": 8622 + }, + { + "epoch": 0.7823444021048811, + "grad_norm": 0.10618457522642111, + "learning_rate": 0.00011917985197375392, + "loss": 1.5619, + "step": 8623 + }, + { + "epoch": 0.782435129740519, + "grad_norm": 0.10721339909000636, + "learning_rate": 0.00011908465972827781, + "loss": 1.6103, + "step": 8624 + }, + { + "epoch": 0.7825258573761568, + "grad_norm": 0.10096820218089628, + "learning_rate": 0.00011898950037489737, + "loss": 1.551, + "step": 8625 + }, + { + "epoch": 0.7826165850117945, + "grad_norm": 0.10389297079027443, + "learning_rate": 0.00011889437392182984, + "loss": 1.5114, + "step": 8626 + }, + { + "epoch": 0.7827073126474324, + "grad_norm": 0.09928274396671957, + "learning_rate": 0.00011879928037728916, + "loss": 1.523, + "step": 8627 + }, + { + "epoch": 0.7827980402830702, + "grad_norm": 0.10054804032480406, + "learning_rate": 0.00011870421974948693, + "loss": 1.5039, + "step": 8628 + }, + { + "epoch": 0.782888767918708, + "grad_norm": 0.10118704148792812, + "learning_rate": 0.00011860919204663157, + "loss": 1.5627, + "step": 8629 + }, + { + "epoch": 0.7829794955543459, + "grad_norm": 0.10080603177388855, + "learning_rate": 0.00011851419727692847, + "loss": 1.5369, + "step": 8630 + }, + { + "epoch": 0.7830702231899836, + "grad_norm": 0.1063036529151998, + "learning_rate": 0.00011841923544858086, + "loss": 1.5491, + "step": 8631 + }, + { + "epoch": 0.7831609508256215, + "grad_norm": 0.10308493838477809, + "learning_rate": 0.00011832430656978838, + "loss": 1.5333, + "step": 8632 + }, + { + "epoch": 0.7832516784612593, + "grad_norm": 0.09991092043655059, + "learning_rate": 0.00011822941064874837, + "loss": 1.5208, + "step": 8633 + }, + { + "epoch": 0.7833424060968971, + "grad_norm": 0.10096369637247915, + "learning_rate": 0.00011813454769365489, + "loss": 1.5254, + "step": 8634 + }, + { + "epoch": 0.783433133732535, + "grad_norm": 0.10134912571043123, + "learning_rate": 0.00011803971771269967, + "loss": 1.5291, + "step": 8635 + }, + { + "epoch": 0.7835238613681728, + "grad_norm": 0.10368580959079707, + "learning_rate": 0.00011794492071407109, + "loss": 1.5542, + "step": 8636 + }, + { + "epoch": 0.7836145890038105, + "grad_norm": 0.09927807496358437, + "learning_rate": 0.00011785015670595478, + "loss": 1.5498, + "step": 8637 + }, + { + "epoch": 0.7837053166394484, + "grad_norm": 0.09986991409426636, + "learning_rate": 0.00011775542569653392, + "loss": 1.51, + "step": 8638 + }, + { + "epoch": 0.7837960442750862, + "grad_norm": 0.10161231041680995, + "learning_rate": 0.0001176607276939884, + "loss": 1.49, + "step": 8639 + }, + { + "epoch": 0.783886771910724, + "grad_norm": 0.1014856763643614, + "learning_rate": 0.00011756606270649517, + "loss": 1.5357, + "step": 8640 + }, + { + "epoch": 0.7839774995463619, + "grad_norm": 0.1035634210976885, + "learning_rate": 0.00011747143074222887, + "loss": 1.5525, + "step": 8641 + }, + { + "epoch": 0.7840682271819996, + "grad_norm": 0.10086143630037342, + "learning_rate": 0.00011737683180936104, + "loss": 1.5229, + "step": 8642 + }, + { + "epoch": 0.7841589548176374, + "grad_norm": 0.10352301065022666, + "learning_rate": 0.00011728226591606017, + "loss": 1.6267, + "step": 8643 + }, + { + "epoch": 0.7842496824532753, + "grad_norm": 0.10119991665370744, + "learning_rate": 0.00011718773307049186, + "loss": 1.5268, + "step": 8644 + }, + { + "epoch": 0.7843404100889131, + "grad_norm": 0.10313642036009224, + "learning_rate": 0.00011709323328081938, + "loss": 1.548, + "step": 8645 + }, + { + "epoch": 0.7844311377245509, + "grad_norm": 0.10282129573398664, + "learning_rate": 0.00011699876655520269, + "loss": 1.6157, + "step": 8646 + }, + { + "epoch": 0.7845218653601888, + "grad_norm": 0.10538807478325235, + "learning_rate": 0.00011690433290179874, + "loss": 1.6005, + "step": 8647 + }, + { + "epoch": 0.7846125929958265, + "grad_norm": 0.10346252754302211, + "learning_rate": 0.00011680993232876219, + "loss": 1.5575, + "step": 8648 + }, + { + "epoch": 0.7847033206314643, + "grad_norm": 0.10481818393714624, + "learning_rate": 0.00011671556484424456, + "loss": 1.5521, + "step": 8649 + }, + { + "epoch": 0.7847940482671022, + "grad_norm": 0.10651214528751204, + "learning_rate": 0.00011662123045639439, + "loss": 1.5564, + "step": 8650 + }, + { + "epoch": 0.78488477590274, + "grad_norm": 0.10512843224995137, + "learning_rate": 0.00011652692917335733, + "loss": 1.5673, + "step": 8651 + }, + { + "epoch": 0.7849755035383778, + "grad_norm": 0.1058369950984314, + "learning_rate": 0.0001164326610032766, + "loss": 1.5428, + "step": 8652 + }, + { + "epoch": 0.7850662311740156, + "grad_norm": 0.0999879298357879, + "learning_rate": 0.00011633842595429211, + "loss": 1.5341, + "step": 8653 + }, + { + "epoch": 0.7851569588096534, + "grad_norm": 0.10185206974486372, + "learning_rate": 0.000116244224034541, + "loss": 1.5525, + "step": 8654 + }, + { + "epoch": 0.7852476864452912, + "grad_norm": 0.09988710629807372, + "learning_rate": 0.00011615005525215777, + "loss": 1.577, + "step": 8655 + }, + { + "epoch": 0.7853384140809291, + "grad_norm": 0.10897312015809815, + "learning_rate": 0.00011605591961527378, + "loss": 1.5713, + "step": 8656 + }, + { + "epoch": 0.7854291417165669, + "grad_norm": 0.10360160279775975, + "learning_rate": 0.00011596181713201781, + "loss": 1.572, + "step": 8657 + }, + { + "epoch": 0.7855198693522046, + "grad_norm": 0.1068387813974317, + "learning_rate": 0.0001158677478105154, + "loss": 1.5699, + "step": 8658 + }, + { + "epoch": 0.7856105969878425, + "grad_norm": 0.10269930615835253, + "learning_rate": 0.00011577371165888973, + "loss": 1.5432, + "step": 8659 + }, + { + "epoch": 0.7857013246234803, + "grad_norm": 0.10388653061946547, + "learning_rate": 0.00011567970868526068, + "loss": 1.5001, + "step": 8660 + }, + { + "epoch": 0.7857920522591181, + "grad_norm": 0.10341830036193023, + "learning_rate": 0.00011558573889774526, + "loss": 1.5485, + "step": 8661 + }, + { + "epoch": 0.785882779894756, + "grad_norm": 0.09904818201339449, + "learning_rate": 0.00011549180230445811, + "loss": 1.5395, + "step": 8662 + }, + { + "epoch": 0.7859735075303937, + "grad_norm": 0.10520221909674533, + "learning_rate": 0.00011539789891351049, + "loss": 1.5878, + "step": 8663 + }, + { + "epoch": 0.7860642351660315, + "grad_norm": 0.10616326020216786, + "learning_rate": 0.00011530402873301088, + "loss": 1.5471, + "step": 8664 + }, + { + "epoch": 0.7861549628016694, + "grad_norm": 0.10419305918247546, + "learning_rate": 0.00011521019177106507, + "loss": 1.5512, + "step": 8665 + }, + { + "epoch": 0.7862456904373072, + "grad_norm": 0.10452482835006323, + "learning_rate": 0.00011511638803577601, + "loss": 1.57, + "step": 8666 + }, + { + "epoch": 0.786336418072945, + "grad_norm": 0.10265491681295662, + "learning_rate": 0.00011502261753524363, + "loss": 1.5753, + "step": 8667 + }, + { + "epoch": 0.7864271457085829, + "grad_norm": 0.11217835368152806, + "learning_rate": 0.00011492888027756481, + "loss": 1.5486, + "step": 8668 + }, + { + "epoch": 0.7865178733442206, + "grad_norm": 0.09970072951033662, + "learning_rate": 0.00011483517627083406, + "loss": 1.5466, + "step": 8669 + }, + { + "epoch": 0.7866086009798585, + "grad_norm": 0.10167979359928278, + "learning_rate": 0.00011474150552314261, + "loss": 1.5241, + "step": 8670 + }, + { + "epoch": 0.7866993286154963, + "grad_norm": 0.1051373947959984, + "learning_rate": 0.00011464786804257866, + "loss": 1.5229, + "step": 8671 + }, + { + "epoch": 0.7867900562511341, + "grad_norm": 0.0994431587555734, + "learning_rate": 0.00011455426383722834, + "loss": 1.5447, + "step": 8672 + }, + { + "epoch": 0.786880783886772, + "grad_norm": 0.09998153193514245, + "learning_rate": 0.0001144606929151742, + "loss": 1.5492, + "step": 8673 + }, + { + "epoch": 0.7869715115224097, + "grad_norm": 0.10658000869788331, + "learning_rate": 0.00011436715528449587, + "loss": 1.5137, + "step": 8674 + }, + { + "epoch": 0.7870622391580475, + "grad_norm": 0.10182910777539435, + "learning_rate": 0.00011427365095327069, + "loss": 1.5552, + "step": 8675 + }, + { + "epoch": 0.7871529667936854, + "grad_norm": 0.10714284659719144, + "learning_rate": 0.00011418017992957263, + "loss": 1.5799, + "step": 8676 + }, + { + "epoch": 0.7872436944293232, + "grad_norm": 0.10462343460678142, + "learning_rate": 0.00011408674222147286, + "loss": 1.5516, + "step": 8677 + }, + { + "epoch": 0.787334422064961, + "grad_norm": 0.10107321414049336, + "learning_rate": 0.00011399333783703964, + "loss": 1.5587, + "step": 8678 + }, + { + "epoch": 0.7874251497005988, + "grad_norm": 0.09955330763727693, + "learning_rate": 0.00011389996678433855, + "loss": 1.5311, + "step": 8679 + }, + { + "epoch": 0.7875158773362366, + "grad_norm": 0.10078902364516651, + "learning_rate": 0.00011380662907143241, + "loss": 1.5337, + "step": 8680 + }, + { + "epoch": 0.7876066049718744, + "grad_norm": 0.10155245492086228, + "learning_rate": 0.00011371332470638063, + "loss": 1.5592, + "step": 8681 + }, + { + "epoch": 0.7876973326075123, + "grad_norm": 0.10194652358474973, + "learning_rate": 0.00011362005369724033, + "loss": 1.5883, + "step": 8682 + }, + { + "epoch": 0.7877880602431501, + "grad_norm": 0.10481320517065776, + "learning_rate": 0.00011352681605206533, + "loss": 1.582, + "step": 8683 + }, + { + "epoch": 0.7878787878787878, + "grad_norm": 0.10122669507586424, + "learning_rate": 0.0001134336117789066, + "loss": 1.5506, + "step": 8684 + }, + { + "epoch": 0.7879695155144257, + "grad_norm": 0.10332779046636449, + "learning_rate": 0.00011334044088581264, + "loss": 1.5142, + "step": 8685 + }, + { + "epoch": 0.7880602431500635, + "grad_norm": 0.10010309279280072, + "learning_rate": 0.00011324730338082856, + "loss": 1.519, + "step": 8686 + }, + { + "epoch": 0.7881509707857013, + "grad_norm": 0.1064987423447954, + "learning_rate": 0.00011315419927199666, + "loss": 1.5629, + "step": 8687 + }, + { + "epoch": 0.7882416984213392, + "grad_norm": 0.10165665183043815, + "learning_rate": 0.00011306112856735673, + "loss": 1.557, + "step": 8688 + }, + { + "epoch": 0.788332426056977, + "grad_norm": 0.10270813271860162, + "learning_rate": 0.00011296809127494545, + "loss": 1.5437, + "step": 8689 + }, + { + "epoch": 0.7884231536926147, + "grad_norm": 0.10461720296084417, + "learning_rate": 0.00011287508740279657, + "loss": 1.5541, + "step": 8690 + }, + { + "epoch": 0.7885138813282526, + "grad_norm": 0.09989304508736785, + "learning_rate": 0.0001127821169589408, + "loss": 1.5417, + "step": 8691 + }, + { + "epoch": 0.7886046089638904, + "grad_norm": 0.10486983515588491, + "learning_rate": 0.00011268917995140648, + "loss": 1.5819, + "step": 8692 + }, + { + "epoch": 0.7886953365995282, + "grad_norm": 0.10266383645912457, + "learning_rate": 0.00011259627638821851, + "loss": 1.5675, + "step": 8693 + }, + { + "epoch": 0.7887860642351661, + "grad_norm": 0.10550797536119395, + "learning_rate": 0.00011250340627739908, + "loss": 1.5365, + "step": 8694 + }, + { + "epoch": 0.7888767918708038, + "grad_norm": 0.1052971061171649, + "learning_rate": 0.00011241056962696755, + "loss": 1.5331, + "step": 8695 + }, + { + "epoch": 0.7889675195064416, + "grad_norm": 0.1050634412347302, + "learning_rate": 0.00011231776644494068, + "loss": 1.517, + "step": 8696 + }, + { + "epoch": 0.7890582471420795, + "grad_norm": 0.103822743093322, + "learning_rate": 0.00011222499673933179, + "loss": 1.5491, + "step": 8697 + }, + { + "epoch": 0.7891489747777173, + "grad_norm": 0.10637645237360892, + "learning_rate": 0.0001121322605181514, + "loss": 1.6231, + "step": 8698 + }, + { + "epoch": 0.7892397024133551, + "grad_norm": 0.10708357386549454, + "learning_rate": 0.00011203955778940767, + "loss": 1.5377, + "step": 8699 + }, + { + "epoch": 0.789330430048993, + "grad_norm": 0.10298055388409544, + "learning_rate": 0.0001119468885611053, + "loss": 1.5666, + "step": 8700 + }, + { + "epoch": 0.7894211576846307, + "grad_norm": 0.1059717193824519, + "learning_rate": 0.00011185425284124611, + "loss": 1.5063, + "step": 8701 + }, + { + "epoch": 0.7895118853202685, + "grad_norm": 0.10361887875772165, + "learning_rate": 0.00011176165063782956, + "loss": 1.5299, + "step": 8702 + }, + { + "epoch": 0.7896026129559064, + "grad_norm": 0.1011401528651601, + "learning_rate": 0.00011166908195885156, + "loss": 1.5389, + "step": 8703 + }, + { + "epoch": 0.7896933405915442, + "grad_norm": 0.10587779167257466, + "learning_rate": 0.00011157654681230573, + "loss": 1.5871, + "step": 8704 + }, + { + "epoch": 0.789784068227182, + "grad_norm": 0.10630784188624724, + "learning_rate": 0.00011148404520618216, + "loss": 1.5598, + "step": 8705 + }, + { + "epoch": 0.7898747958628198, + "grad_norm": 0.10127776964260492, + "learning_rate": 0.0001113915771484687, + "loss": 1.4826, + "step": 8706 + }, + { + "epoch": 0.7899655234984576, + "grad_norm": 0.10538890347482541, + "learning_rate": 0.00011129914264714985, + "loss": 1.5308, + "step": 8707 + }, + { + "epoch": 0.7900562511340955, + "grad_norm": 0.10467464311559455, + "learning_rate": 0.00011120674171020717, + "loss": 1.5044, + "step": 8708 + }, + { + "epoch": 0.7901469787697333, + "grad_norm": 0.10725102683985428, + "learning_rate": 0.00011111437434561978, + "loss": 1.5516, + "step": 8709 + }, + { + "epoch": 0.790237706405371, + "grad_norm": 0.10237226720757314, + "learning_rate": 0.00011102204056136333, + "loss": 1.519, + "step": 8710 + }, + { + "epoch": 0.7903284340410089, + "grad_norm": 0.10423565744373688, + "learning_rate": 0.00011092974036541114, + "loss": 1.5095, + "step": 8711 + }, + { + "epoch": 0.7904191616766467, + "grad_norm": 0.10664178614620913, + "learning_rate": 0.00011083747376573312, + "loss": 1.568, + "step": 8712 + }, + { + "epoch": 0.7905098893122845, + "grad_norm": 0.10303226174574395, + "learning_rate": 0.00011074524077029668, + "loss": 1.5401, + "step": 8713 + }, + { + "epoch": 0.7906006169479224, + "grad_norm": 0.10245822040389599, + "learning_rate": 0.00011065304138706606, + "loss": 1.5308, + "step": 8714 + }, + { + "epoch": 0.7906913445835602, + "grad_norm": 0.10355617386051597, + "learning_rate": 0.00011056087562400264, + "loss": 1.5895, + "step": 8715 + }, + { + "epoch": 0.7907820722191979, + "grad_norm": 0.10493527932789062, + "learning_rate": 0.00011046874348906505, + "loss": 1.5458, + "step": 8716 + }, + { + "epoch": 0.7908727998548358, + "grad_norm": 0.10338109319260481, + "learning_rate": 0.00011037664499020894, + "loss": 1.5052, + "step": 8717 + }, + { + "epoch": 0.7909635274904736, + "grad_norm": 0.09953038710752884, + "learning_rate": 0.00011028458013538678, + "loss": 1.5307, + "step": 8718 + }, + { + "epoch": 0.7910542551261114, + "grad_norm": 0.10279534390247512, + "learning_rate": 0.00011019254893254859, + "loss": 1.5471, + "step": 8719 + }, + { + "epoch": 0.7911449827617493, + "grad_norm": 0.1051724186233149, + "learning_rate": 0.00011010055138964142, + "loss": 1.5053, + "step": 8720 + }, + { + "epoch": 0.791235710397387, + "grad_norm": 0.1025451662011193, + "learning_rate": 0.00011000858751460907, + "loss": 1.5392, + "step": 8721 + }, + { + "epoch": 0.7913264380330248, + "grad_norm": 0.10524751615123183, + "learning_rate": 0.00010991665731539251, + "loss": 1.4954, + "step": 8722 + }, + { + "epoch": 0.7914171656686627, + "grad_norm": 0.10776370635037856, + "learning_rate": 0.00010982476079993025, + "loss": 1.5459, + "step": 8723 + }, + { + "epoch": 0.7915078933043005, + "grad_norm": 0.10391570581232176, + "learning_rate": 0.00010973289797615738, + "loss": 1.5397, + "step": 8724 + }, + { + "epoch": 0.7915986209399383, + "grad_norm": 0.10219564954740593, + "learning_rate": 0.00010964106885200614, + "loss": 1.5163, + "step": 8725 + }, + { + "epoch": 0.7916893485755762, + "grad_norm": 0.10543425377886985, + "learning_rate": 0.00010954927343540616, + "loss": 1.5375, + "step": 8726 + }, + { + "epoch": 0.7917800762112139, + "grad_norm": 0.10329321382776493, + "learning_rate": 0.00010945751173428409, + "loss": 1.5632, + "step": 8727 + }, + { + "epoch": 0.7918708038468517, + "grad_norm": 0.10140910852245945, + "learning_rate": 0.00010936578375656348, + "loss": 1.5471, + "step": 8728 + }, + { + "epoch": 0.7919615314824896, + "grad_norm": 0.10904751612687194, + "learning_rate": 0.00010927408951016482, + "loss": 1.597, + "step": 8729 + }, + { + "epoch": 0.7920522591181274, + "grad_norm": 0.10579082557952973, + "learning_rate": 0.0001091824290030063, + "loss": 1.5737, + "step": 8730 + }, + { + "epoch": 0.7921429867537652, + "grad_norm": 0.10348402067220198, + "learning_rate": 0.00010909080224300261, + "loss": 1.58, + "step": 8731 + }, + { + "epoch": 0.792233714389403, + "grad_norm": 0.10878506691812359, + "learning_rate": 0.00010899920923806567, + "loss": 1.4995, + "step": 8732 + }, + { + "epoch": 0.7923244420250408, + "grad_norm": 0.10170544184442012, + "learning_rate": 0.0001089076499961047, + "loss": 1.5404, + "step": 8733 + }, + { + "epoch": 0.7924151696606786, + "grad_norm": 0.10543339520686622, + "learning_rate": 0.0001088161245250257, + "loss": 1.5788, + "step": 8734 + }, + { + "epoch": 0.7925058972963165, + "grad_norm": 0.10106781674789503, + "learning_rate": 0.00010872463283273215, + "loss": 1.5475, + "step": 8735 + }, + { + "epoch": 0.7925966249319543, + "grad_norm": 0.10291356766291106, + "learning_rate": 0.0001086331749271241, + "loss": 1.5462, + "step": 8736 + }, + { + "epoch": 0.792687352567592, + "grad_norm": 0.10333316191814484, + "learning_rate": 0.00010854175081609918, + "loss": 1.5538, + "step": 8737 + }, + { + "epoch": 0.7927780802032299, + "grad_norm": 0.10007016262604042, + "learning_rate": 0.00010845036050755174, + "loss": 1.5335, + "step": 8738 + }, + { + "epoch": 0.7928688078388677, + "grad_norm": 0.10193903900709227, + "learning_rate": 0.00010835900400937332, + "loss": 1.5822, + "step": 8739 + }, + { + "epoch": 0.7929595354745055, + "grad_norm": 0.10479965045972563, + "learning_rate": 0.0001082676813294527, + "loss": 1.526, + "step": 8740 + }, + { + "epoch": 0.7930502631101434, + "grad_norm": 0.10160392006161689, + "learning_rate": 0.00010817639247567556, + "loss": 1.581, + "step": 8741 + }, + { + "epoch": 0.7931409907457811, + "grad_norm": 0.10557965518814046, + "learning_rate": 0.00010808513745592458, + "loss": 1.6034, + "step": 8742 + }, + { + "epoch": 0.7932317183814189, + "grad_norm": 0.10320167291496068, + "learning_rate": 0.00010799391627807969, + "loss": 1.5786, + "step": 8743 + }, + { + "epoch": 0.7933224460170568, + "grad_norm": 0.1005694789654496, + "learning_rate": 0.00010790272895001801, + "loss": 1.5475, + "step": 8744 + }, + { + "epoch": 0.7934131736526946, + "grad_norm": 0.10382437107479199, + "learning_rate": 0.00010781157547961356, + "loss": 1.5738, + "step": 8745 + }, + { + "epoch": 0.7935039012883325, + "grad_norm": 0.102251557712984, + "learning_rate": 0.00010772045587473716, + "loss": 1.5462, + "step": 8746 + }, + { + "epoch": 0.7935946289239703, + "grad_norm": 0.10044973596248057, + "learning_rate": 0.00010762937014325742, + "loss": 1.5381, + "step": 8747 + }, + { + "epoch": 0.793685356559608, + "grad_norm": 0.10455840179455655, + "learning_rate": 0.00010753831829303934, + "loss": 1.5783, + "step": 8748 + }, + { + "epoch": 0.7937760841952459, + "grad_norm": 0.10434965236041839, + "learning_rate": 0.00010744730033194505, + "loss": 1.5551, + "step": 8749 + }, + { + "epoch": 0.7938668118308837, + "grad_norm": 0.10562792528626934, + "learning_rate": 0.00010735631626783455, + "loss": 1.5539, + "step": 8750 + }, + { + "epoch": 0.7939575394665215, + "grad_norm": 0.10512222253058232, + "learning_rate": 0.00010726536610856397, + "loss": 1.5311, + "step": 8751 + }, + { + "epoch": 0.7940482671021594, + "grad_norm": 0.10011793029581702, + "learning_rate": 0.00010717444986198676, + "loss": 1.5012, + "step": 8752 + }, + { + "epoch": 0.7941389947377971, + "grad_norm": 0.10070736133699938, + "learning_rate": 0.00010708356753595382, + "loss": 1.5559, + "step": 8753 + }, + { + "epoch": 0.7942297223734349, + "grad_norm": 0.10331392682881622, + "learning_rate": 0.00010699271913831282, + "loss": 1.5538, + "step": 8754 + }, + { + "epoch": 0.7943204500090728, + "grad_norm": 0.10274926185561556, + "learning_rate": 0.00010690190467690836, + "loss": 1.6051, + "step": 8755 + }, + { + "epoch": 0.7944111776447106, + "grad_norm": 0.09985520192296768, + "learning_rate": 0.00010681112415958228, + "loss": 1.5912, + "step": 8756 + }, + { + "epoch": 0.7945019052803484, + "grad_norm": 0.1025671101577293, + "learning_rate": 0.00010672037759417357, + "loss": 1.5641, + "step": 8757 + }, + { + "epoch": 0.7945926329159863, + "grad_norm": 0.10379701848731648, + "learning_rate": 0.00010662966498851834, + "loss": 1.4997, + "step": 8758 + }, + { + "epoch": 0.794683360551624, + "grad_norm": 0.10432591634643139, + "learning_rate": 0.0001065389863504494, + "loss": 1.5565, + "step": 8759 + }, + { + "epoch": 0.7947740881872618, + "grad_norm": 0.10136684881794106, + "learning_rate": 0.0001064483416877971, + "loss": 1.5199, + "step": 8760 + }, + { + "epoch": 0.7948648158228997, + "grad_norm": 0.09973953638955625, + "learning_rate": 0.00010635773100838853, + "loss": 1.5212, + "step": 8761 + }, + { + "epoch": 0.7949555434585375, + "grad_norm": 0.10419987525658174, + "learning_rate": 0.00010626715432004774, + "loss": 1.5842, + "step": 8762 + }, + { + "epoch": 0.7950462710941752, + "grad_norm": 0.10292318526451112, + "learning_rate": 0.00010617661163059633, + "loss": 1.5658, + "step": 8763 + }, + { + "epoch": 0.7951369987298131, + "grad_norm": 0.10448546671197938, + "learning_rate": 0.0001060861029478526, + "loss": 1.514, + "step": 8764 + }, + { + "epoch": 0.7952277263654509, + "grad_norm": 0.10593755918378013, + "learning_rate": 0.00010599562827963172, + "loss": 1.5584, + "step": 8765 + }, + { + "epoch": 0.7953184540010887, + "grad_norm": 0.10518835055339253, + "learning_rate": 0.00010590518763374645, + "loss": 1.5702, + "step": 8766 + }, + { + "epoch": 0.7954091816367266, + "grad_norm": 0.10511324777736905, + "learning_rate": 0.00010581478101800645, + "loss": 1.5133, + "step": 8767 + }, + { + "epoch": 0.7954999092723644, + "grad_norm": 0.10052703380907674, + "learning_rate": 0.00010572440844021824, + "loss": 1.5794, + "step": 8768 + }, + { + "epoch": 0.7955906369080021, + "grad_norm": 0.10410855068571004, + "learning_rate": 0.0001056340699081853, + "loss": 1.4961, + "step": 8769 + }, + { + "epoch": 0.79568136454364, + "grad_norm": 0.10492066008152587, + "learning_rate": 0.00010554376542970867, + "loss": 1.5244, + "step": 8770 + }, + { + "epoch": 0.7957720921792778, + "grad_norm": 0.10153163321108198, + "learning_rate": 0.00010545349501258605, + "loss": 1.5337, + "step": 8771 + }, + { + "epoch": 0.7958628198149156, + "grad_norm": 0.10323450221661593, + "learning_rate": 0.00010536325866461216, + "loss": 1.5392, + "step": 8772 + }, + { + "epoch": 0.7959535474505535, + "grad_norm": 0.10341731679571928, + "learning_rate": 0.00010527305639357904, + "loss": 1.5075, + "step": 8773 + }, + { + "epoch": 0.7960442750861912, + "grad_norm": 0.10134410339909869, + "learning_rate": 0.00010518288820727578, + "loss": 1.5346, + "step": 8774 + }, + { + "epoch": 0.796135002721829, + "grad_norm": 0.10576771104365566, + "learning_rate": 0.00010509275411348835, + "loss": 1.5279, + "step": 8775 + }, + { + "epoch": 0.7962257303574669, + "grad_norm": 0.10509760656209524, + "learning_rate": 0.00010500265411999966, + "loss": 1.5453, + "step": 8776 + }, + { + "epoch": 0.7963164579931047, + "grad_norm": 0.10428706491487807, + "learning_rate": 0.0001049125882345901, + "loss": 1.5675, + "step": 8777 + }, + { + "epoch": 0.7964071856287425, + "grad_norm": 0.10621469206915643, + "learning_rate": 0.00010482255646503686, + "loss": 1.582, + "step": 8778 + }, + { + "epoch": 0.7964979132643804, + "grad_norm": 0.10683011688970133, + "learning_rate": 0.00010473255881911392, + "loss": 1.511, + "step": 8779 + }, + { + "epoch": 0.7965886409000181, + "grad_norm": 0.100654658085348, + "learning_rate": 0.00010464259530459291, + "loss": 1.5405, + "step": 8780 + }, + { + "epoch": 0.7966793685356559, + "grad_norm": 0.10665876808607985, + "learning_rate": 0.00010455266592924195, + "loss": 1.5915, + "step": 8781 + }, + { + "epoch": 0.7967700961712938, + "grad_norm": 0.1062260339070185, + "learning_rate": 0.00010446277070082672, + "loss": 1.5573, + "step": 8782 + }, + { + "epoch": 0.7968608238069316, + "grad_norm": 0.10221119483264517, + "learning_rate": 0.00010437290962710938, + "loss": 1.5015, + "step": 8783 + }, + { + "epoch": 0.7969515514425695, + "grad_norm": 0.10569020291459404, + "learning_rate": 0.00010428308271584974, + "loss": 1.5393, + "step": 8784 + }, + { + "epoch": 0.7970422790782072, + "grad_norm": 0.10106004209060404, + "learning_rate": 0.00010419328997480426, + "loss": 1.512, + "step": 8785 + }, + { + "epoch": 0.797133006713845, + "grad_norm": 0.10494709519760537, + "learning_rate": 0.00010410353141172635, + "loss": 1.5333, + "step": 8786 + }, + { + "epoch": 0.7972237343494829, + "grad_norm": 0.1050066876512301, + "learning_rate": 0.00010401380703436702, + "loss": 1.5567, + "step": 8787 + }, + { + "epoch": 0.7973144619851207, + "grad_norm": 0.10608392562020419, + "learning_rate": 0.00010392411685047365, + "loss": 1.5399, + "step": 8788 + }, + { + "epoch": 0.7974051896207585, + "grad_norm": 0.10426819915243132, + "learning_rate": 0.0001038344608677913, + "loss": 1.5523, + "step": 8789 + }, + { + "epoch": 0.7974959172563963, + "grad_norm": 0.09871698859462835, + "learning_rate": 0.00010374483909406151, + "loss": 1.5287, + "step": 8790 + }, + { + "epoch": 0.7975866448920341, + "grad_norm": 0.10335026681534398, + "learning_rate": 0.00010365525153702344, + "loss": 1.5023, + "step": 8791 + }, + { + "epoch": 0.7976773725276719, + "grad_norm": 0.10570824871439785, + "learning_rate": 0.00010356569820441281, + "loss": 1.538, + "step": 8792 + }, + { + "epoch": 0.7977681001633098, + "grad_norm": 0.10200280708414253, + "learning_rate": 0.0001034761791039624, + "loss": 1.5502, + "step": 8793 + }, + { + "epoch": 0.7978588277989476, + "grad_norm": 0.10256177658000781, + "learning_rate": 0.00010338669424340247, + "loss": 1.595, + "step": 8794 + }, + { + "epoch": 0.7979495554345853, + "grad_norm": 0.10422554257629213, + "learning_rate": 0.00010329724363046, + "loss": 1.5261, + "step": 8795 + }, + { + "epoch": 0.7980402830702232, + "grad_norm": 0.10471499228202673, + "learning_rate": 0.00010320782727285888, + "loss": 1.5293, + "step": 8796 + }, + { + "epoch": 0.798131010705861, + "grad_norm": 0.10793315297563856, + "learning_rate": 0.00010311844517832037, + "loss": 1.5703, + "step": 8797 + }, + { + "epoch": 0.7982217383414988, + "grad_norm": 0.10311304126685623, + "learning_rate": 0.00010302909735456268, + "loss": 1.5669, + "step": 8798 + }, + { + "epoch": 0.7983124659771367, + "grad_norm": 0.10538431563337874, + "learning_rate": 0.00010293978380930096, + "loss": 1.5789, + "step": 8799 + }, + { + "epoch": 0.7984031936127745, + "grad_norm": 0.101059161953996, + "learning_rate": 0.00010285050455024736, + "loss": 1.5357, + "step": 8800 + }, + { + "epoch": 0.7984939212484122, + "grad_norm": 0.10283919597603747, + "learning_rate": 0.00010276125958511129, + "loss": 1.5781, + "step": 8801 + }, + { + "epoch": 0.7985846488840501, + "grad_norm": 0.10601623547045019, + "learning_rate": 0.00010267204892159904, + "loss": 1.5694, + "step": 8802 + }, + { + "epoch": 0.7986753765196879, + "grad_norm": 0.10178917493431608, + "learning_rate": 0.00010258287256741378, + "loss": 1.5249, + "step": 8803 + }, + { + "epoch": 0.7987661041553257, + "grad_norm": 0.1056275627508756, + "learning_rate": 0.00010249373053025601, + "loss": 1.5883, + "step": 8804 + }, + { + "epoch": 0.7988568317909636, + "grad_norm": 0.10163782899372788, + "learning_rate": 0.00010240462281782337, + "loss": 1.5415, + "step": 8805 + }, + { + "epoch": 0.7989475594266013, + "grad_norm": 0.10324878607184818, + "learning_rate": 0.00010231554943781019, + "loss": 1.5362, + "step": 8806 + }, + { + "epoch": 0.7990382870622391, + "grad_norm": 0.10282792031874242, + "learning_rate": 0.0001022265103979077, + "loss": 1.5203, + "step": 8807 + }, + { + "epoch": 0.799129014697877, + "grad_norm": 0.102738983816617, + "learning_rate": 0.00010213750570580488, + "loss": 1.5359, + "step": 8808 + }, + { + "epoch": 0.7992197423335148, + "grad_norm": 0.10349481048295697, + "learning_rate": 0.00010204853536918701, + "loss": 1.5239, + "step": 8809 + }, + { + "epoch": 0.7993104699691526, + "grad_norm": 0.10455987445126057, + "learning_rate": 0.00010195959939573668, + "loss": 1.5603, + "step": 8810 + }, + { + "epoch": 0.7994011976047904, + "grad_norm": 0.10340790331253859, + "learning_rate": 0.00010187069779313374, + "loss": 1.5365, + "step": 8811 + }, + { + "epoch": 0.7994919252404282, + "grad_norm": 0.10570632946528931, + "learning_rate": 0.00010178183056905454, + "loss": 1.5373, + "step": 8812 + }, + { + "epoch": 0.799582652876066, + "grad_norm": 0.10477366823566765, + "learning_rate": 0.00010169299773117313, + "loss": 1.531, + "step": 8813 + }, + { + "epoch": 0.7996733805117039, + "grad_norm": 0.10014446033087382, + "learning_rate": 0.00010160419928715997, + "loss": 1.5762, + "step": 8814 + }, + { + "epoch": 0.7997641081473417, + "grad_norm": 0.10233838718379423, + "learning_rate": 0.00010151543524468298, + "loss": 1.6068, + "step": 8815 + }, + { + "epoch": 0.7998548357829794, + "grad_norm": 0.10600628198335905, + "learning_rate": 0.00010142670561140694, + "loss": 1.5171, + "step": 8816 + }, + { + "epoch": 0.7999455634186173, + "grad_norm": 0.10159862669701634, + "learning_rate": 0.00010133801039499346, + "loss": 1.5237, + "step": 8817 + }, + { + "epoch": 0.8000362910542551, + "grad_norm": 0.10431581124506353, + "learning_rate": 0.00010124934960310172, + "loss": 1.526, + "step": 8818 + }, + { + "epoch": 0.8001270186898929, + "grad_norm": 0.1041493331019048, + "learning_rate": 0.00010116072324338738, + "loss": 1.5342, + "step": 8819 + }, + { + "epoch": 0.8002177463255308, + "grad_norm": 0.10262096803533698, + "learning_rate": 0.00010107213132350307, + "loss": 1.5634, + "step": 8820 + }, + { + "epoch": 0.8003084739611686, + "grad_norm": 0.10319138563297625, + "learning_rate": 0.0001009835738510993, + "loss": 1.4829, + "step": 8821 + }, + { + "epoch": 0.8003992015968064, + "grad_norm": 0.1016870997410134, + "learning_rate": 0.00010089505083382278, + "loss": 1.5045, + "step": 8822 + }, + { + "epoch": 0.8004899292324442, + "grad_norm": 0.10830333684677608, + "learning_rate": 0.0001008065622793174, + "loss": 1.5732, + "step": 8823 + }, + { + "epoch": 0.800580656868082, + "grad_norm": 0.10121178142831389, + "learning_rate": 0.0001007181081952241, + "loss": 1.5539, + "step": 8824 + }, + { + "epoch": 0.8006713845037199, + "grad_norm": 0.10161385804337614, + "learning_rate": 0.00010062968858918109, + "loss": 1.529, + "step": 8825 + }, + { + "epoch": 0.8007621121393577, + "grad_norm": 0.10644437722229379, + "learning_rate": 0.00010054130346882334, + "loss": 1.5989, + "step": 8826 + }, + { + "epoch": 0.8008528397749954, + "grad_norm": 0.10682409880675751, + "learning_rate": 0.00010045295284178268, + "loss": 1.5737, + "step": 8827 + }, + { + "epoch": 0.8009435674106333, + "grad_norm": 0.10410310585574721, + "learning_rate": 0.00010036463671568868, + "loss": 1.5225, + "step": 8828 + }, + { + "epoch": 0.8010342950462711, + "grad_norm": 0.10208726518727354, + "learning_rate": 0.00010027635509816718, + "loss": 1.5131, + "step": 8829 + }, + { + "epoch": 0.8011250226819089, + "grad_norm": 0.10283841423718691, + "learning_rate": 0.00010018810799684124, + "loss": 1.5379, + "step": 8830 + }, + { + "epoch": 0.8012157503175468, + "grad_norm": 0.10869372347839681, + "learning_rate": 0.00010009989541933118, + "loss": 1.5827, + "step": 8831 + }, + { + "epoch": 0.8013064779531845, + "grad_norm": 0.10909762980537391, + "learning_rate": 0.00010001171737325416, + "loss": 1.5817, + "step": 8832 + }, + { + "epoch": 0.8013972055888223, + "grad_norm": 0.10246052200766316, + "learning_rate": 9.992357386622425e-05, + "loss": 1.5486, + "step": 8833 + }, + { + "epoch": 0.8014879332244602, + "grad_norm": 0.10728190397729816, + "learning_rate": 9.983546490585255e-05, + "loss": 1.5336, + "step": 8834 + }, + { + "epoch": 0.801578660860098, + "grad_norm": 0.10485601503901172, + "learning_rate": 9.974739049974745e-05, + "loss": 1.5304, + "step": 8835 + }, + { + "epoch": 0.8016693884957358, + "grad_norm": 0.10520455598867315, + "learning_rate": 9.965935065551428e-05, + "loss": 1.5357, + "step": 8836 + }, + { + "epoch": 0.8017601161313737, + "grad_norm": 0.10494097070760917, + "learning_rate": 9.957134538075512e-05, + "loss": 1.5062, + "step": 8837 + }, + { + "epoch": 0.8018508437670114, + "grad_norm": 0.10381079122115781, + "learning_rate": 9.94833746830694e-05, + "loss": 1.5367, + "step": 8838 + }, + { + "epoch": 0.8019415714026492, + "grad_norm": 0.10695150206488395, + "learning_rate": 9.939543857005329e-05, + "loss": 1.549, + "step": 8839 + }, + { + "epoch": 0.8020322990382871, + "grad_norm": 0.1056421344949963, + "learning_rate": 9.930753704930001e-05, + "loss": 1.4984, + "step": 8840 + }, + { + "epoch": 0.8021230266739249, + "grad_norm": 0.10971373664504812, + "learning_rate": 9.921967012840005e-05, + "loss": 1.5498, + "step": 8841 + }, + { + "epoch": 0.8022137543095627, + "grad_norm": 0.10395378446947043, + "learning_rate": 9.913183781494067e-05, + "loss": 1.5774, + "step": 8842 + }, + { + "epoch": 0.8023044819452005, + "grad_norm": 0.10259480329746307, + "learning_rate": 9.904404011650603e-05, + "loss": 1.57, + "step": 8843 + }, + { + "epoch": 0.8023952095808383, + "grad_norm": 0.10444621118084053, + "learning_rate": 9.895627704067766e-05, + "loss": 1.5503, + "step": 8844 + }, + { + "epoch": 0.8024859372164761, + "grad_norm": 0.10383863696789654, + "learning_rate": 9.886854859503397e-05, + "loss": 1.525, + "step": 8845 + }, + { + "epoch": 0.802576664852114, + "grad_norm": 0.10564395238255754, + "learning_rate": 9.878085478715021e-05, + "loss": 1.5276, + "step": 8846 + }, + { + "epoch": 0.8026673924877518, + "grad_norm": 0.10544726547687713, + "learning_rate": 9.869319562459866e-05, + "loss": 1.5563, + "step": 8847 + }, + { + "epoch": 0.8027581201233895, + "grad_norm": 0.10494648299210833, + "learning_rate": 9.8605571114949e-05, + "loss": 1.5403, + "step": 8848 + }, + { + "epoch": 0.8028488477590274, + "grad_norm": 0.10643344635082694, + "learning_rate": 9.851798126576739e-05, + "loss": 1.5496, + "step": 8849 + }, + { + "epoch": 0.8029395753946652, + "grad_norm": 0.0991116965031542, + "learning_rate": 9.843042608461711e-05, + "loss": 1.5661, + "step": 8850 + }, + { + "epoch": 0.803030303030303, + "grad_norm": 0.10710574515961853, + "learning_rate": 9.834290557905872e-05, + "loss": 1.5503, + "step": 8851 + }, + { + "epoch": 0.8031210306659409, + "grad_norm": 0.1028944305649683, + "learning_rate": 9.825541975664981e-05, + "loss": 1.5647, + "step": 8852 + }, + { + "epoch": 0.8032117583015786, + "grad_norm": 0.10353899656712404, + "learning_rate": 9.816796862494459e-05, + "loss": 1.5285, + "step": 8853 + }, + { + "epoch": 0.8033024859372164, + "grad_norm": 0.10626042736715856, + "learning_rate": 9.808055219149436e-05, + "loss": 1.5766, + "step": 8854 + }, + { + "epoch": 0.8033932135728543, + "grad_norm": 0.10725950254779151, + "learning_rate": 9.799317046384787e-05, + "loss": 1.543, + "step": 8855 + }, + { + "epoch": 0.8034839412084921, + "grad_norm": 0.10472752867916953, + "learning_rate": 9.790582344955035e-05, + "loss": 1.5789, + "step": 8856 + }, + { + "epoch": 0.8035746688441299, + "grad_norm": 0.10507042128688085, + "learning_rate": 9.781851115614404e-05, + "loss": 1.5511, + "step": 8857 + }, + { + "epoch": 0.8036653964797678, + "grad_norm": 0.10394857800657553, + "learning_rate": 9.773123359116875e-05, + "loss": 1.5502, + "step": 8858 + }, + { + "epoch": 0.8037561241154055, + "grad_norm": 0.10522299531132949, + "learning_rate": 9.764399076216057e-05, + "loss": 1.5427, + "step": 8859 + }, + { + "epoch": 0.8038468517510434, + "grad_norm": 0.10093804446847893, + "learning_rate": 9.755678267665325e-05, + "loss": 1.5439, + "step": 8860 + }, + { + "epoch": 0.8039375793866812, + "grad_norm": 0.10766458176383903, + "learning_rate": 9.74696093421769e-05, + "loss": 1.5458, + "step": 8861 + }, + { + "epoch": 0.804028307022319, + "grad_norm": 0.1042576679163997, + "learning_rate": 9.738247076625928e-05, + "loss": 1.5632, + "step": 8862 + }, + { + "epoch": 0.8041190346579569, + "grad_norm": 0.10460608996110886, + "learning_rate": 9.729536695642461e-05, + "loss": 1.5353, + "step": 8863 + }, + { + "epoch": 0.8042097622935946, + "grad_norm": 0.10417249454104076, + "learning_rate": 9.720829792019426e-05, + "loss": 1.582, + "step": 8864 + }, + { + "epoch": 0.8043004899292324, + "grad_norm": 0.10704590826184207, + "learning_rate": 9.712126366508683e-05, + "loss": 1.5759, + "step": 8865 + }, + { + "epoch": 0.8043912175648703, + "grad_norm": 0.10771589245663739, + "learning_rate": 9.703426419861755e-05, + "loss": 1.5626, + "step": 8866 + }, + { + "epoch": 0.8044819452005081, + "grad_norm": 0.1064487490692461, + "learning_rate": 9.694729952829911e-05, + "loss": 1.5152, + "step": 8867 + }, + { + "epoch": 0.8045726728361459, + "grad_norm": 0.10310361405737413, + "learning_rate": 9.686036966164063e-05, + "loss": 1.5175, + "step": 8868 + }, + { + "epoch": 0.8046634004717838, + "grad_norm": 0.10718326989026054, + "learning_rate": 9.677347460614871e-05, + "loss": 1.5751, + "step": 8869 + }, + { + "epoch": 0.8047541281074215, + "grad_norm": 0.10733340072750838, + "learning_rate": 9.668661436932674e-05, + "loss": 1.5614, + "step": 8870 + }, + { + "epoch": 0.8048448557430593, + "grad_norm": 0.10573614332451889, + "learning_rate": 9.659978895867494e-05, + "loss": 1.5365, + "step": 8871 + }, + { + "epoch": 0.8049355833786972, + "grad_norm": 0.10308534769254732, + "learning_rate": 9.651299838169092e-05, + "loss": 1.5191, + "step": 8872 + }, + { + "epoch": 0.805026311014335, + "grad_norm": 0.10472480638143776, + "learning_rate": 9.642624264586896e-05, + "loss": 1.5499, + "step": 8873 + }, + { + "epoch": 0.8051170386499727, + "grad_norm": 0.1038401290704068, + "learning_rate": 9.63395217587003e-05, + "loss": 1.5658, + "step": 8874 + }, + { + "epoch": 0.8052077662856106, + "grad_norm": 0.1046064171357177, + "learning_rate": 9.625283572767347e-05, + "loss": 1.5116, + "step": 8875 + }, + { + "epoch": 0.8052984939212484, + "grad_norm": 0.1081179797097731, + "learning_rate": 9.616618456027387e-05, + "loss": 1.5758, + "step": 8876 + }, + { + "epoch": 0.8053892215568862, + "grad_norm": 0.10262363869217635, + "learning_rate": 9.607956826398379e-05, + "loss": 1.5182, + "step": 8877 + }, + { + "epoch": 0.8054799491925241, + "grad_norm": 0.10131515866620674, + "learning_rate": 9.599298684628233e-05, + "loss": 1.5251, + "step": 8878 + }, + { + "epoch": 0.8055706768281619, + "grad_norm": 0.10201154538558402, + "learning_rate": 9.590644031464618e-05, + "loss": 1.5207, + "step": 8879 + }, + { + "epoch": 0.8056614044637996, + "grad_norm": 0.10311414841676904, + "learning_rate": 9.581992867654843e-05, + "loss": 1.5507, + "step": 8880 + }, + { + "epoch": 0.8057521320994375, + "grad_norm": 0.10257883505159358, + "learning_rate": 9.573345193945931e-05, + "loss": 1.4834, + "step": 8881 + }, + { + "epoch": 0.8058428597350753, + "grad_norm": 0.10060095627362485, + "learning_rate": 9.56470101108462e-05, + "loss": 1.5653, + "step": 8882 + }, + { + "epoch": 0.8059335873707131, + "grad_norm": 0.10559380617432457, + "learning_rate": 9.556060319817344e-05, + "loss": 1.5135, + "step": 8883 + }, + { + "epoch": 0.806024315006351, + "grad_norm": 0.10419612100297963, + "learning_rate": 9.547423120890224e-05, + "loss": 1.5341, + "step": 8884 + }, + { + "epoch": 0.8061150426419887, + "grad_norm": 0.10560046119001376, + "learning_rate": 9.538789415049071e-05, + "loss": 1.5135, + "step": 8885 + }, + { + "epoch": 0.8062057702776265, + "grad_norm": 0.1064282416356257, + "learning_rate": 9.530159203039423e-05, + "loss": 1.5134, + "step": 8886 + }, + { + "epoch": 0.8062964979132644, + "grad_norm": 0.10618369049398892, + "learning_rate": 9.521532485606493e-05, + "loss": 1.5328, + "step": 8887 + }, + { + "epoch": 0.8063872255489022, + "grad_norm": 0.1006267017458691, + "learning_rate": 9.512909263495184e-05, + "loss": 1.5065, + "step": 8888 + }, + { + "epoch": 0.80647795318454, + "grad_norm": 0.10954717065978067, + "learning_rate": 9.504289537450145e-05, + "loss": 1.5705, + "step": 8889 + }, + { + "epoch": 0.8065686808201779, + "grad_norm": 0.10577304607888355, + "learning_rate": 9.495673308215657e-05, + "loss": 1.5388, + "step": 8890 + }, + { + "epoch": 0.8066594084558156, + "grad_norm": 0.10538242435051198, + "learning_rate": 9.487060576535761e-05, + "loss": 1.5305, + "step": 8891 + }, + { + "epoch": 0.8067501360914534, + "grad_norm": 0.10505978105437626, + "learning_rate": 9.478451343154143e-05, + "loss": 1.5019, + "step": 8892 + }, + { + "epoch": 0.8068408637270913, + "grad_norm": 0.10821232298947701, + "learning_rate": 9.469845608814237e-05, + "loss": 1.5406, + "step": 8893 + }, + { + "epoch": 0.8069315913627291, + "grad_norm": 0.1057503584698374, + "learning_rate": 9.461243374259137e-05, + "loss": 1.539, + "step": 8894 + }, + { + "epoch": 0.8070223189983668, + "grad_norm": 0.10186267169447655, + "learning_rate": 9.452644640231634e-05, + "loss": 1.5389, + "step": 8895 + }, + { + "epoch": 0.8071130466340047, + "grad_norm": 0.10701601715012254, + "learning_rate": 9.444049407474253e-05, + "loss": 1.5858, + "step": 8896 + }, + { + "epoch": 0.8072037742696425, + "grad_norm": 0.10276128324841571, + "learning_rate": 9.435457676729187e-05, + "loss": 1.54, + "step": 8897 + }, + { + "epoch": 0.8072945019052804, + "grad_norm": 0.10340520465177307, + "learning_rate": 9.426869448738295e-05, + "loss": 1.5989, + "step": 8898 + }, + { + "epoch": 0.8073852295409182, + "grad_norm": 0.10851407321369667, + "learning_rate": 9.418284724243237e-05, + "loss": 1.5621, + "step": 8899 + }, + { + "epoch": 0.807475957176556, + "grad_norm": 0.10593774672292065, + "learning_rate": 9.409703503985279e-05, + "loss": 1.5514, + "step": 8900 + }, + { + "epoch": 0.8075666848121938, + "grad_norm": 0.10328119655833728, + "learning_rate": 9.4011257887054e-05, + "loss": 1.5883, + "step": 8901 + }, + { + "epoch": 0.8076574124478316, + "grad_norm": 0.10301118552559266, + "learning_rate": 9.392551579144282e-05, + "loss": 1.5621, + "step": 8902 + }, + { + "epoch": 0.8077481400834694, + "grad_norm": 0.10498822716591255, + "learning_rate": 9.38398087604233e-05, + "loss": 1.6033, + "step": 8903 + }, + { + "epoch": 0.8078388677191073, + "grad_norm": 0.10509422189373388, + "learning_rate": 9.375413680139616e-05, + "loss": 1.5337, + "step": 8904 + }, + { + "epoch": 0.8079295953547451, + "grad_norm": 0.10200510836339167, + "learning_rate": 9.366849992175896e-05, + "loss": 1.5652, + "step": 8905 + }, + { + "epoch": 0.8080203229903828, + "grad_norm": 0.10681303089215434, + "learning_rate": 9.358289812890691e-05, + "loss": 1.5417, + "step": 8906 + }, + { + "epoch": 0.8081110506260207, + "grad_norm": 0.10763854643400435, + "learning_rate": 9.349733143023154e-05, + "loss": 1.5279, + "step": 8907 + }, + { + "epoch": 0.8082017782616585, + "grad_norm": 0.1059862125249326, + "learning_rate": 9.341179983312131e-05, + "loss": 1.578, + "step": 8908 + }, + { + "epoch": 0.8082925058972963, + "grad_norm": 0.10636429927528425, + "learning_rate": 9.332630334496229e-05, + "loss": 1.5436, + "step": 8909 + }, + { + "epoch": 0.8083832335329342, + "grad_norm": 0.10278119473035469, + "learning_rate": 9.324084197313686e-05, + "loss": 1.5263, + "step": 8910 + }, + { + "epoch": 0.808473961168572, + "grad_norm": 0.10510342415223012, + "learning_rate": 9.315541572502473e-05, + "loss": 1.5135, + "step": 8911 + }, + { + "epoch": 0.8085646888042097, + "grad_norm": 0.10530558045222897, + "learning_rate": 9.307002460800223e-05, + "loss": 1.5338, + "step": 8912 + }, + { + "epoch": 0.8086554164398476, + "grad_norm": 0.10119270877787039, + "learning_rate": 9.298466862944316e-05, + "loss": 1.5069, + "step": 8913 + }, + { + "epoch": 0.8087461440754854, + "grad_norm": 0.10512833306813406, + "learning_rate": 9.289934779671799e-05, + "loss": 1.5667, + "step": 8914 + }, + { + "epoch": 0.8088368717111232, + "grad_norm": 0.10603267188662042, + "learning_rate": 9.281406211719406e-05, + "loss": 1.5418, + "step": 8915 + }, + { + "epoch": 0.8089275993467611, + "grad_norm": 0.10501671750863438, + "learning_rate": 9.272881159823599e-05, + "loss": 1.5137, + "step": 8916 + }, + { + "epoch": 0.8090183269823988, + "grad_norm": 0.10755986033020343, + "learning_rate": 9.264359624720509e-05, + "loss": 1.5184, + "step": 8917 + }, + { + "epoch": 0.8091090546180366, + "grad_norm": 0.10805765647854663, + "learning_rate": 9.255841607145949e-05, + "loss": 1.5444, + "step": 8918 + }, + { + "epoch": 0.8091997822536745, + "grad_norm": 0.10504662538199545, + "learning_rate": 9.24732710783549e-05, + "loss": 1.5313, + "step": 8919 + }, + { + "epoch": 0.8092905098893123, + "grad_norm": 0.10525558449224066, + "learning_rate": 9.238816127524341e-05, + "loss": 1.5594, + "step": 8920 + }, + { + "epoch": 0.8093812375249501, + "grad_norm": 0.10509821234542374, + "learning_rate": 9.230308666947413e-05, + "loss": 1.5616, + "step": 8921 + }, + { + "epoch": 0.809471965160588, + "grad_norm": 0.1030869784814921, + "learning_rate": 9.221804726839339e-05, + "loss": 1.5575, + "step": 8922 + }, + { + "epoch": 0.8095626927962257, + "grad_norm": 0.10536391390091743, + "learning_rate": 9.213304307934456e-05, + "loss": 1.5344, + "step": 8923 + }, + { + "epoch": 0.8096534204318635, + "grad_norm": 0.1065193877389415, + "learning_rate": 9.204807410966754e-05, + "loss": 1.5225, + "step": 8924 + }, + { + "epoch": 0.8097441480675014, + "grad_norm": 0.10194528685256889, + "learning_rate": 9.196314036669929e-05, + "loss": 1.5392, + "step": 8925 + }, + { + "epoch": 0.8098348757031392, + "grad_norm": 0.10253832846895959, + "learning_rate": 9.187824185777415e-05, + "loss": 1.5472, + "step": 8926 + }, + { + "epoch": 0.809925603338777, + "grad_norm": 0.10462094210926025, + "learning_rate": 9.179337859022297e-05, + "loss": 1.5833, + "step": 8927 + }, + { + "epoch": 0.8100163309744148, + "grad_norm": 0.10364072888082776, + "learning_rate": 9.170855057137362e-05, + "loss": 1.5288, + "step": 8928 + }, + { + "epoch": 0.8101070586100526, + "grad_norm": 0.10735603488695238, + "learning_rate": 9.162375780855109e-05, + "loss": 1.5631, + "step": 8929 + }, + { + "epoch": 0.8101977862456904, + "grad_norm": 0.10230311680156805, + "learning_rate": 9.153900030907735e-05, + "loss": 1.5424, + "step": 8930 + }, + { + "epoch": 0.8102885138813283, + "grad_norm": 0.10478824998909035, + "learning_rate": 9.145427808027118e-05, + "loss": 1.5464, + "step": 8931 + }, + { + "epoch": 0.810379241516966, + "grad_norm": 0.1067821842233867, + "learning_rate": 9.136959112944815e-05, + "loss": 1.5185, + "step": 8932 + }, + { + "epoch": 0.8104699691526038, + "grad_norm": 0.1015264628494603, + "learning_rate": 9.12849394639213e-05, + "loss": 1.56, + "step": 8933 + }, + { + "epoch": 0.8105606967882417, + "grad_norm": 0.10582685096261987, + "learning_rate": 9.120032309100018e-05, + "loss": 1.5469, + "step": 8934 + }, + { + "epoch": 0.8106514244238795, + "grad_norm": 0.10647862032212924, + "learning_rate": 9.111574201799128e-05, + "loss": 1.5433, + "step": 8935 + }, + { + "epoch": 0.8107421520595173, + "grad_norm": 0.1025910089257415, + "learning_rate": 9.103119625219841e-05, + "loss": 1.526, + "step": 8936 + }, + { + "epoch": 0.8108328796951552, + "grad_norm": 0.1054811227128268, + "learning_rate": 9.094668580092192e-05, + "loss": 1.5186, + "step": 8937 + }, + { + "epoch": 0.8109236073307929, + "grad_norm": 0.10504807699026944, + "learning_rate": 9.08622106714595e-05, + "loss": 1.5466, + "step": 8938 + }, + { + "epoch": 0.8110143349664308, + "grad_norm": 0.1086746590264572, + "learning_rate": 9.07777708711054e-05, + "loss": 1.5664, + "step": 8939 + }, + { + "epoch": 0.8111050626020686, + "grad_norm": 0.10133945720465863, + "learning_rate": 9.06933664071512e-05, + "loss": 1.5399, + "step": 8940 + }, + { + "epoch": 0.8111957902377064, + "grad_norm": 0.10889042738609885, + "learning_rate": 9.060899728688515e-05, + "loss": 1.5851, + "step": 8941 + }, + { + "epoch": 0.8112865178733443, + "grad_norm": 0.1055193935693028, + "learning_rate": 9.052466351759242e-05, + "loss": 1.5133, + "step": 8942 + }, + { + "epoch": 0.811377245508982, + "grad_norm": 0.11371161951687661, + "learning_rate": 9.044036510655546e-05, + "loss": 1.5315, + "step": 8943 + }, + { + "epoch": 0.8114679731446198, + "grad_norm": 0.10820900807636014, + "learning_rate": 9.035610206105316e-05, + "loss": 1.5282, + "step": 8944 + }, + { + "epoch": 0.8115587007802577, + "grad_norm": 0.10372077807101195, + "learning_rate": 9.027187438836198e-05, + "loss": 1.5081, + "step": 8945 + }, + { + "epoch": 0.8116494284158955, + "grad_norm": 0.10811463946940233, + "learning_rate": 9.018768209575472e-05, + "loss": 1.5452, + "step": 8946 + }, + { + "epoch": 0.8117401560515333, + "grad_norm": 0.10490425740079107, + "learning_rate": 9.010352519050163e-05, + "loss": 1.5528, + "step": 8947 + }, + { + "epoch": 0.8118308836871712, + "grad_norm": 0.10617463889763558, + "learning_rate": 9.001940367986955e-05, + "loss": 1.5563, + "step": 8948 + }, + { + "epoch": 0.8119216113228089, + "grad_norm": 0.10474882839865891, + "learning_rate": 8.993531757112227e-05, + "loss": 1.5368, + "step": 8949 + }, + { + "epoch": 0.8120123389584467, + "grad_norm": 0.10655073741487096, + "learning_rate": 8.985126687152084e-05, + "loss": 1.5393, + "step": 8950 + }, + { + "epoch": 0.8121030665940846, + "grad_norm": 0.10563962582625307, + "learning_rate": 8.976725158832305e-05, + "loss": 1.5512, + "step": 8951 + }, + { + "epoch": 0.8121937942297224, + "grad_norm": 0.10670455133045945, + "learning_rate": 8.968327172878332e-05, + "loss": 1.5773, + "step": 8952 + }, + { + "epoch": 0.8122845218653602, + "grad_norm": 0.10266311304334201, + "learning_rate": 8.959932730015363e-05, + "loss": 1.5557, + "step": 8953 + }, + { + "epoch": 0.812375249500998, + "grad_norm": 0.10694899140042248, + "learning_rate": 8.951541830968263e-05, + "loss": 1.5832, + "step": 8954 + }, + { + "epoch": 0.8124659771366358, + "grad_norm": 0.10339699854802016, + "learning_rate": 8.943154476461573e-05, + "loss": 1.5991, + "step": 8955 + }, + { + "epoch": 0.8125567047722736, + "grad_norm": 0.10252572784325523, + "learning_rate": 8.934770667219533e-05, + "loss": 1.5311, + "step": 8956 + }, + { + "epoch": 0.8126474324079115, + "grad_norm": 0.10325035000227971, + "learning_rate": 8.926390403966111e-05, + "loss": 1.5768, + "step": 8957 + }, + { + "epoch": 0.8127381600435493, + "grad_norm": 0.10506609493619362, + "learning_rate": 8.91801368742493e-05, + "loss": 1.5426, + "step": 8958 + }, + { + "epoch": 0.812828887679187, + "grad_norm": 0.10754978698300999, + "learning_rate": 8.909640518319312e-05, + "loss": 1.59, + "step": 8959 + }, + { + "epoch": 0.8129196153148249, + "grad_norm": 0.10564355491183243, + "learning_rate": 8.901270897372288e-05, + "loss": 1.5172, + "step": 8960 + }, + { + "epoch": 0.8130103429504627, + "grad_norm": 0.10671139299553559, + "learning_rate": 8.892904825306597e-05, + "loss": 1.5403, + "step": 8961 + }, + { + "epoch": 0.8131010705861005, + "grad_norm": 0.10435197790323676, + "learning_rate": 8.884542302844628e-05, + "loss": 1.5253, + "step": 8962 + }, + { + "epoch": 0.8131917982217384, + "grad_norm": 0.10700739788833633, + "learning_rate": 8.876183330708482e-05, + "loss": 1.4858, + "step": 8963 + }, + { + "epoch": 0.8132825258573761, + "grad_norm": 0.10516668149870813, + "learning_rate": 8.86782790961998e-05, + "loss": 1.5181, + "step": 8964 + }, + { + "epoch": 0.8133732534930139, + "grad_norm": 0.10450063410766111, + "learning_rate": 8.859476040300596e-05, + "loss": 1.5882, + "step": 8965 + }, + { + "epoch": 0.8134639811286518, + "grad_norm": 0.10601208534567023, + "learning_rate": 8.851127723471508e-05, + "loss": 1.5227, + "step": 8966 + }, + { + "epoch": 0.8135547087642896, + "grad_norm": 0.1057837668699104, + "learning_rate": 8.842782959853617e-05, + "loss": 1.5233, + "step": 8967 + }, + { + "epoch": 0.8136454363999274, + "grad_norm": 0.10335548679073532, + "learning_rate": 8.834441750167477e-05, + "loss": 1.5661, + "step": 8968 + }, + { + "epoch": 0.8137361640355653, + "grad_norm": 0.10467000788720421, + "learning_rate": 8.826104095133363e-05, + "loss": 1.572, + "step": 8969 + }, + { + "epoch": 0.813826891671203, + "grad_norm": 0.10355289603008826, + "learning_rate": 8.81776999547122e-05, + "loss": 1.5419, + "step": 8970 + }, + { + "epoch": 0.8139176193068408, + "grad_norm": 0.1064172769247637, + "learning_rate": 8.809439451900725e-05, + "loss": 1.5131, + "step": 8971 + }, + { + "epoch": 0.8140083469424787, + "grad_norm": 0.10697758500703446, + "learning_rate": 8.801112465141198e-05, + "loss": 1.6007, + "step": 8972 + }, + { + "epoch": 0.8140990745781165, + "grad_norm": 0.10803650345692745, + "learning_rate": 8.792789035911669e-05, + "loss": 1.5176, + "step": 8973 + }, + { + "epoch": 0.8141898022137543, + "grad_norm": 0.10212215805698885, + "learning_rate": 8.784469164930897e-05, + "loss": 1.5313, + "step": 8974 + }, + { + "epoch": 0.8142805298493921, + "grad_norm": 0.10286435653063186, + "learning_rate": 8.776152852917285e-05, + "loss": 1.5314, + "step": 8975 + }, + { + "epoch": 0.8143712574850299, + "grad_norm": 0.10344342366936933, + "learning_rate": 8.767840100588926e-05, + "loss": 1.5362, + "step": 8976 + }, + { + "epoch": 0.8144619851206678, + "grad_norm": 0.11213449078178363, + "learning_rate": 8.759530908663671e-05, + "loss": 1.5382, + "step": 8977 + }, + { + "epoch": 0.8145527127563056, + "grad_norm": 0.1025885921659639, + "learning_rate": 8.751225277859004e-05, + "loss": 1.5062, + "step": 8978 + }, + { + "epoch": 0.8146434403919434, + "grad_norm": 0.10224240743458042, + "learning_rate": 8.742923208892117e-05, + "loss": 1.5311, + "step": 8979 + }, + { + "epoch": 0.8147341680275813, + "grad_norm": 0.10183260918836357, + "learning_rate": 8.734624702479877e-05, + "loss": 1.5052, + "step": 8980 + }, + { + "epoch": 0.814824895663219, + "grad_norm": 0.10462366181934807, + "learning_rate": 8.726329759338886e-05, + "loss": 1.5258, + "step": 8981 + }, + { + "epoch": 0.8149156232988568, + "grad_norm": 0.10358765972242577, + "learning_rate": 8.718038380185406e-05, + "loss": 1.5559, + "step": 8982 + }, + { + "epoch": 0.8150063509344947, + "grad_norm": 0.10807596392235022, + "learning_rate": 8.70975056573537e-05, + "loss": 1.51, + "step": 8983 + }, + { + "epoch": 0.8150970785701325, + "grad_norm": 0.10717069215549858, + "learning_rate": 8.701466316704482e-05, + "loss": 1.5156, + "step": 8984 + }, + { + "epoch": 0.8151878062057702, + "grad_norm": 0.1076232582271438, + "learning_rate": 8.693185633808064e-05, + "loss": 1.5878, + "step": 8985 + }, + { + "epoch": 0.8152785338414081, + "grad_norm": 0.10515123503234369, + "learning_rate": 8.684908517761147e-05, + "loss": 1.5003, + "step": 8986 + }, + { + "epoch": 0.8153692614770459, + "grad_norm": 0.1040746389868325, + "learning_rate": 8.676634969278474e-05, + "loss": 1.5333, + "step": 8987 + }, + { + "epoch": 0.8154599891126837, + "grad_norm": 0.10520659743526801, + "learning_rate": 8.668364989074468e-05, + "loss": 1.5608, + "step": 8988 + }, + { + "epoch": 0.8155507167483216, + "grad_norm": 0.10704753112126028, + "learning_rate": 8.660098577863235e-05, + "loss": 1.5346, + "step": 8989 + }, + { + "epoch": 0.8156414443839594, + "grad_norm": 0.1078867624983181, + "learning_rate": 8.65183573635857e-05, + "loss": 1.5358, + "step": 8990 + }, + { + "epoch": 0.8157321720195971, + "grad_norm": 0.1095395916423944, + "learning_rate": 8.643576465273983e-05, + "loss": 1.5592, + "step": 8991 + }, + { + "epoch": 0.815822899655235, + "grad_norm": 0.1017889907697321, + "learning_rate": 8.635320765322674e-05, + "loss": 1.5502, + "step": 8992 + }, + { + "epoch": 0.8159136272908728, + "grad_norm": 0.10244848988684968, + "learning_rate": 8.627068637217505e-05, + "loss": 1.4954, + "step": 8993 + }, + { + "epoch": 0.8160043549265106, + "grad_norm": 0.10178917426743489, + "learning_rate": 8.618820081671064e-05, + "loss": 1.4936, + "step": 8994 + }, + { + "epoch": 0.8160950825621485, + "grad_norm": 0.10701825499993656, + "learning_rate": 8.610575099395613e-05, + "loss": 1.5645, + "step": 8995 + }, + { + "epoch": 0.8161858101977862, + "grad_norm": 0.10200753627973003, + "learning_rate": 8.602333691103087e-05, + "loss": 1.4916, + "step": 8996 + }, + { + "epoch": 0.816276537833424, + "grad_norm": 0.10764869307255338, + "learning_rate": 8.594095857505163e-05, + "loss": 1.5604, + "step": 8997 + }, + { + "epoch": 0.8163672654690619, + "grad_norm": 0.10095775836929602, + "learning_rate": 8.58586159931316e-05, + "loss": 1.5023, + "step": 8998 + }, + { + "epoch": 0.8164579931046997, + "grad_norm": 0.10139549352319147, + "learning_rate": 8.577630917238106e-05, + "loss": 1.5213, + "step": 8999 + }, + { + "epoch": 0.8165487207403375, + "grad_norm": 0.10257612635692981, + "learning_rate": 8.569403811990722e-05, + "loss": 1.547, + "step": 9000 + }, + { + "epoch": 0.8166394483759754, + "grad_norm": 0.10588703207932829, + "learning_rate": 8.56118028428144e-05, + "loss": 1.5391, + "step": 9001 + }, + { + "epoch": 0.8167301760116131, + "grad_norm": 0.10266044392567039, + "learning_rate": 8.552960334820347e-05, + "loss": 1.5681, + "step": 9002 + }, + { + "epoch": 0.8168209036472509, + "grad_norm": 0.10518482095743104, + "learning_rate": 8.544743964317225e-05, + "loss": 1.5199, + "step": 9003 + }, + { + "epoch": 0.8169116312828888, + "grad_norm": 0.10760022227511731, + "learning_rate": 8.536531173481588e-05, + "loss": 1.5285, + "step": 9004 + }, + { + "epoch": 0.8170023589185266, + "grad_norm": 0.10708937200295068, + "learning_rate": 8.528321963022589e-05, + "loss": 1.5265, + "step": 9005 + }, + { + "epoch": 0.8170930865541643, + "grad_norm": 0.10382249136299156, + "learning_rate": 8.520116333649091e-05, + "loss": 1.5452, + "step": 9006 + }, + { + "epoch": 0.8171838141898022, + "grad_norm": 0.10335938251643578, + "learning_rate": 8.51191428606966e-05, + "loss": 1.5936, + "step": 9007 + }, + { + "epoch": 0.81727454182544, + "grad_norm": 0.1024184024071563, + "learning_rate": 8.503715820992558e-05, + "loss": 1.5299, + "step": 9008 + }, + { + "epoch": 0.8173652694610778, + "grad_norm": 0.10470774225959771, + "learning_rate": 8.495520939125712e-05, + "loss": 1.5456, + "step": 9009 + }, + { + "epoch": 0.8174559970967157, + "grad_norm": 0.10180010477794581, + "learning_rate": 8.487329641176739e-05, + "loss": 1.5348, + "step": 9010 + }, + { + "epoch": 0.8175467247323535, + "grad_norm": 0.10455961248467756, + "learning_rate": 8.479141927852979e-05, + "loss": 1.5111, + "step": 9011 + }, + { + "epoch": 0.8176374523679912, + "grad_norm": 0.09707635846182432, + "learning_rate": 8.470957799861433e-05, + "loss": 1.5254, + "step": 9012 + }, + { + "epoch": 0.8177281800036291, + "grad_norm": 0.10214711081201101, + "learning_rate": 8.462777257908793e-05, + "loss": 1.4909, + "step": 9013 + }, + { + "epoch": 0.8178189076392669, + "grad_norm": 0.10574938503088657, + "learning_rate": 8.454600302701465e-05, + "loss": 1.5617, + "step": 9014 + }, + { + "epoch": 0.8179096352749048, + "grad_norm": 0.10481556907964625, + "learning_rate": 8.446426934945517e-05, + "loss": 1.5768, + "step": 9015 + }, + { + "epoch": 0.8180003629105426, + "grad_norm": 0.10228230955391002, + "learning_rate": 8.43825715534674e-05, + "loss": 1.5514, + "step": 9016 + }, + { + "epoch": 0.8180910905461803, + "grad_norm": 0.10506599619842613, + "learning_rate": 8.43009096461057e-05, + "loss": 1.5839, + "step": 9017 + }, + { + "epoch": 0.8181818181818182, + "grad_norm": 0.10140644251462026, + "learning_rate": 8.421928363442183e-05, + "loss": 1.5365, + "step": 9018 + }, + { + "epoch": 0.818272545817456, + "grad_norm": 0.10670681987549, + "learning_rate": 8.413769352546408e-05, + "loss": 1.584, + "step": 9019 + }, + { + "epoch": 0.8183632734530938, + "grad_norm": 0.10448203328979107, + "learning_rate": 8.40561393262777e-05, + "loss": 1.5463, + "step": 9020 + }, + { + "epoch": 0.8184540010887317, + "grad_norm": 0.10254304523870246, + "learning_rate": 8.397462104390507e-05, + "loss": 1.5095, + "step": 9021 + }, + { + "epoch": 0.8185447287243695, + "grad_norm": 0.10470997836508952, + "learning_rate": 8.389313868538517e-05, + "loss": 1.5159, + "step": 9022 + }, + { + "epoch": 0.8186354563600072, + "grad_norm": 0.10534014373345668, + "learning_rate": 8.381169225775415e-05, + "loss": 1.5464, + "step": 9023 + }, + { + "epoch": 0.8187261839956451, + "grad_norm": 0.1055564625077261, + "learning_rate": 8.373028176804475e-05, + "loss": 1.5141, + "step": 9024 + }, + { + "epoch": 0.8188169116312829, + "grad_norm": 0.10459781329989366, + "learning_rate": 8.3648907223287e-05, + "loss": 1.5397, + "step": 9025 + }, + { + "epoch": 0.8189076392669207, + "grad_norm": 0.10457206765771453, + "learning_rate": 8.356756863050741e-05, + "loss": 1.5355, + "step": 9026 + }, + { + "epoch": 0.8189983669025586, + "grad_norm": 0.10640408411717432, + "learning_rate": 8.34862659967296e-05, + "loss": 1.5292, + "step": 9027 + }, + { + "epoch": 0.8190890945381963, + "grad_norm": 0.10504881273985339, + "learning_rate": 8.340499932897416e-05, + "loss": 1.558, + "step": 9028 + }, + { + "epoch": 0.8191798221738341, + "grad_norm": 0.10334532848763518, + "learning_rate": 8.33237686342585e-05, + "loss": 1.5025, + "step": 9029 + }, + { + "epoch": 0.819270549809472, + "grad_norm": 0.11487326401231694, + "learning_rate": 8.324257391959666e-05, + "loss": 1.5775, + "step": 9030 + }, + { + "epoch": 0.8193612774451098, + "grad_norm": 0.10593692855207927, + "learning_rate": 8.316141519199999e-05, + "loss": 1.5461, + "step": 9031 + }, + { + "epoch": 0.8194520050807476, + "grad_norm": 0.10188703106501303, + "learning_rate": 8.30802924584767e-05, + "loss": 1.5551, + "step": 9032 + }, + { + "epoch": 0.8195427327163854, + "grad_norm": 0.1056286116903141, + "learning_rate": 8.299920572603158e-05, + "loss": 1.5273, + "step": 9033 + }, + { + "epoch": 0.8196334603520232, + "grad_norm": 0.10806208890862896, + "learning_rate": 8.291815500166639e-05, + "loss": 1.5063, + "step": 9034 + }, + { + "epoch": 0.819724187987661, + "grad_norm": 0.10404072286162837, + "learning_rate": 8.28371402923801e-05, + "loss": 1.5485, + "step": 9035 + }, + { + "epoch": 0.8198149156232989, + "grad_norm": 0.105938411804023, + "learning_rate": 8.275616160516824e-05, + "loss": 1.5441, + "step": 9036 + }, + { + "epoch": 0.8199056432589367, + "grad_norm": 0.10361006037518723, + "learning_rate": 8.267521894702318e-05, + "loss": 1.537, + "step": 9037 + }, + { + "epoch": 0.8199963708945744, + "grad_norm": 0.10425628537706905, + "learning_rate": 8.259431232493453e-05, + "loss": 1.5637, + "step": 9038 + }, + { + "epoch": 0.8200870985302123, + "grad_norm": 0.10516023970357628, + "learning_rate": 8.25134417458886e-05, + "loss": 1.5526, + "step": 9039 + }, + { + "epoch": 0.8201778261658501, + "grad_norm": 0.10480705804012454, + "learning_rate": 8.243260721686852e-05, + "loss": 1.5333, + "step": 9040 + }, + { + "epoch": 0.8202685538014879, + "grad_norm": 0.1056117221126037, + "learning_rate": 8.23518087448542e-05, + "loss": 1.5324, + "step": 9041 + }, + { + "epoch": 0.8203592814371258, + "grad_norm": 0.10139420667065632, + "learning_rate": 8.227104633682291e-05, + "loss": 1.5096, + "step": 9042 + }, + { + "epoch": 0.8204500090727636, + "grad_norm": 0.10925571302644549, + "learning_rate": 8.21903199997483e-05, + "loss": 1.5565, + "step": 9043 + }, + { + "epoch": 0.8205407367084013, + "grad_norm": 0.10625619867763737, + "learning_rate": 8.210962974060104e-05, + "loss": 1.5035, + "step": 9044 + }, + { + "epoch": 0.8206314643440392, + "grad_norm": 0.1043857788683113, + "learning_rate": 8.202897556634897e-05, + "loss": 1.5107, + "step": 9045 + }, + { + "epoch": 0.820722191979677, + "grad_norm": 0.1024942288736869, + "learning_rate": 8.194835748395635e-05, + "loss": 1.483, + "step": 9046 + }, + { + "epoch": 0.8208129196153148, + "grad_norm": 0.10693409759779499, + "learning_rate": 8.186777550038477e-05, + "loss": 1.5237, + "step": 9047 + }, + { + "epoch": 0.8209036472509527, + "grad_norm": 0.10350099840496998, + "learning_rate": 8.178722962259227e-05, + "loss": 1.5397, + "step": 9048 + }, + { + "epoch": 0.8209943748865904, + "grad_norm": 0.10846602532487747, + "learning_rate": 8.17067198575343e-05, + "loss": 1.5707, + "step": 9049 + }, + { + "epoch": 0.8210851025222282, + "grad_norm": 0.10306111291275859, + "learning_rate": 8.162624621216269e-05, + "loss": 1.5149, + "step": 9050 + }, + { + "epoch": 0.8211758301578661, + "grad_norm": 0.10442643106108405, + "learning_rate": 8.154580869342626e-05, + "loss": 1.5467, + "step": 9051 + }, + { + "epoch": 0.8212665577935039, + "grad_norm": 0.10428585945963213, + "learning_rate": 8.146540730827106e-05, + "loss": 1.6021, + "step": 9052 + }, + { + "epoch": 0.8213572854291418, + "grad_norm": 0.10529221668752008, + "learning_rate": 8.138504206363961e-05, + "loss": 1.5464, + "step": 9053 + }, + { + "epoch": 0.8214480130647795, + "grad_norm": 0.10092324564384059, + "learning_rate": 8.130471296647124e-05, + "loss": 1.5172, + "step": 9054 + }, + { + "epoch": 0.8215387407004173, + "grad_norm": 0.10056923918285897, + "learning_rate": 8.122442002370284e-05, + "loss": 1.5306, + "step": 9055 + }, + { + "epoch": 0.8216294683360552, + "grad_norm": 0.1067388573082958, + "learning_rate": 8.114416324226748e-05, + "loss": 1.5659, + "step": 9056 + }, + { + "epoch": 0.821720195971693, + "grad_norm": 0.10353154728754378, + "learning_rate": 8.106394262909539e-05, + "loss": 1.5466, + "step": 9057 + }, + { + "epoch": 0.8218109236073308, + "grad_norm": 0.10942266468562008, + "learning_rate": 8.098375819111348e-05, + "loss": 1.5583, + "step": 9058 + }, + { + "epoch": 0.8219016512429687, + "grad_norm": 0.10516701311504421, + "learning_rate": 8.090360993524593e-05, + "loss": 1.5588, + "step": 9059 + }, + { + "epoch": 0.8219923788786064, + "grad_norm": 0.10483255497672567, + "learning_rate": 8.082349786841342e-05, + "loss": 1.5782, + "step": 9060 + }, + { + "epoch": 0.8220831065142442, + "grad_norm": 0.10360539925910683, + "learning_rate": 8.074342199753338e-05, + "loss": 1.519, + "step": 9061 + }, + { + "epoch": 0.8221738341498821, + "grad_norm": 0.10778483872202146, + "learning_rate": 8.066338232952092e-05, + "loss": 1.5159, + "step": 9062 + }, + { + "epoch": 0.8222645617855199, + "grad_norm": 0.10519179766564263, + "learning_rate": 8.058337887128719e-05, + "loss": 1.5668, + "step": 9063 + }, + { + "epoch": 0.8223552894211577, + "grad_norm": 0.10256687995504682, + "learning_rate": 8.05034116297404e-05, + "loss": 1.5256, + "step": 9064 + }, + { + "epoch": 0.8224460170567955, + "grad_norm": 0.10445477630418605, + "learning_rate": 8.042348061178595e-05, + "loss": 1.564, + "step": 9065 + }, + { + "epoch": 0.8225367446924333, + "grad_norm": 0.10332354399001922, + "learning_rate": 8.034358582432578e-05, + "loss": 1.5559, + "step": 9066 + }, + { + "epoch": 0.8226274723280711, + "grad_norm": 0.10015332464580026, + "learning_rate": 8.026372727425884e-05, + "loss": 1.5236, + "step": 9067 + }, + { + "epoch": 0.822718199963709, + "grad_norm": 0.10765699809131812, + "learning_rate": 8.018390496848077e-05, + "loss": 1.5362, + "step": 9068 + }, + { + "epoch": 0.8228089275993468, + "grad_norm": 0.10685801975807316, + "learning_rate": 8.010411891388441e-05, + "loss": 1.5363, + "step": 9069 + }, + { + "epoch": 0.8228996552349845, + "grad_norm": 0.1074003600293774, + "learning_rate": 8.002436911735939e-05, + "loss": 1.4982, + "step": 9070 + }, + { + "epoch": 0.8229903828706224, + "grad_norm": 0.10889370318895421, + "learning_rate": 7.994465558579184e-05, + "loss": 1.4924, + "step": 9071 + }, + { + "epoch": 0.8230811105062602, + "grad_norm": 0.10277718771578301, + "learning_rate": 7.986497832606532e-05, + "loss": 1.5989, + "step": 9072 + }, + { + "epoch": 0.823171838141898, + "grad_norm": 0.10622902026194034, + "learning_rate": 7.978533734505988e-05, + "loss": 1.5319, + "step": 9073 + }, + { + "epoch": 0.8232625657775359, + "grad_norm": 0.10389022033758337, + "learning_rate": 7.970573264965237e-05, + "loss": 1.5395, + "step": 9074 + }, + { + "epoch": 0.8233532934131736, + "grad_norm": 0.10729912913568647, + "learning_rate": 7.962616424671687e-05, + "loss": 1.5892, + "step": 9075 + }, + { + "epoch": 0.8234440210488114, + "grad_norm": 0.10468719588652496, + "learning_rate": 7.954663214312407e-05, + "loss": 1.5537, + "step": 9076 + }, + { + "epoch": 0.8235347486844493, + "grad_norm": 0.11177305738201054, + "learning_rate": 7.946713634574144e-05, + "loss": 1.5274, + "step": 9077 + }, + { + "epoch": 0.8236254763200871, + "grad_norm": 0.1088587873051807, + "learning_rate": 7.938767686143355e-05, + "loss": 1.5783, + "step": 9078 + }, + { + "epoch": 0.8237162039557249, + "grad_norm": 0.10214323399888592, + "learning_rate": 7.930825369706185e-05, + "loss": 1.5827, + "step": 9079 + }, + { + "epoch": 0.8238069315913628, + "grad_norm": 0.09994033355929009, + "learning_rate": 7.92288668594845e-05, + "loss": 1.5369, + "step": 9080 + }, + { + "epoch": 0.8238976592270005, + "grad_norm": 0.105796624317359, + "learning_rate": 7.914951635555635e-05, + "loss": 1.5513, + "step": 9081 + }, + { + "epoch": 0.8239883868626383, + "grad_norm": 0.1015992397412309, + "learning_rate": 7.907020219212963e-05, + "loss": 1.5165, + "step": 9082 + }, + { + "epoch": 0.8240791144982762, + "grad_norm": 0.10320097301152814, + "learning_rate": 7.899092437605298e-05, + "loss": 1.4973, + "step": 9083 + }, + { + "epoch": 0.824169842133914, + "grad_norm": 0.10345698078370927, + "learning_rate": 7.891168291417194e-05, + "loss": 1.5549, + "step": 9084 + }, + { + "epoch": 0.8242605697695518, + "grad_norm": 0.11028301652346019, + "learning_rate": 7.883247781332914e-05, + "loss": 1.5567, + "step": 9085 + }, + { + "epoch": 0.8243512974051896, + "grad_norm": 0.10514770280148959, + "learning_rate": 7.87533090803641e-05, + "loss": 1.5221, + "step": 9086 + }, + { + "epoch": 0.8244420250408274, + "grad_norm": 0.10584223340286582, + "learning_rate": 7.867417672211291e-05, + "loss": 1.5797, + "step": 9087 + }, + { + "epoch": 0.8245327526764652, + "grad_norm": 0.10482470757492918, + "learning_rate": 7.85950807454085e-05, + "loss": 1.5373, + "step": 9088 + }, + { + "epoch": 0.8246234803121031, + "grad_norm": 0.10452959609168153, + "learning_rate": 7.851602115708112e-05, + "loss": 1.5632, + "step": 9089 + }, + { + "epoch": 0.8247142079477409, + "grad_norm": 0.10536877797683386, + "learning_rate": 7.843699796395742e-05, + "loss": 1.5632, + "step": 9090 + }, + { + "epoch": 0.8248049355833788, + "grad_norm": 0.10353750133874032, + "learning_rate": 7.835801117286096e-05, + "loss": 1.5467, + "step": 9091 + }, + { + "epoch": 0.8248956632190165, + "grad_norm": 0.10436917174353438, + "learning_rate": 7.827906079061248e-05, + "loss": 1.5599, + "step": 9092 + }, + { + "epoch": 0.8249863908546543, + "grad_norm": 0.10615705249788301, + "learning_rate": 7.820014682402915e-05, + "loss": 1.5506, + "step": 9093 + }, + { + "epoch": 0.8250771184902922, + "grad_norm": 0.10583578096199887, + "learning_rate": 7.812126927992547e-05, + "loss": 1.505, + "step": 9094 + }, + { + "epoch": 0.82516784612593, + "grad_norm": 0.11041642601927279, + "learning_rate": 7.804242816511221e-05, + "loss": 1.5393, + "step": 9095 + }, + { + "epoch": 0.8252585737615677, + "grad_norm": 0.1049424459348867, + "learning_rate": 7.796362348639757e-05, + "loss": 1.4997, + "step": 9096 + }, + { + "epoch": 0.8253493013972056, + "grad_norm": 0.10393235393329535, + "learning_rate": 7.788485525058631e-05, + "loss": 1.4873, + "step": 9097 + }, + { + "epoch": 0.8254400290328434, + "grad_norm": 0.1045116670574485, + "learning_rate": 7.780612346447979e-05, + "loss": 1.5683, + "step": 9098 + }, + { + "epoch": 0.8255307566684812, + "grad_norm": 0.10600108923474305, + "learning_rate": 7.772742813487693e-05, + "loss": 1.5777, + "step": 9099 + }, + { + "epoch": 0.8256214843041191, + "grad_norm": 0.10666823902625108, + "learning_rate": 7.764876926857278e-05, + "loss": 1.5079, + "step": 9100 + }, + { + "epoch": 0.8257122119397569, + "grad_norm": 0.10467725515062921, + "learning_rate": 7.757014687235975e-05, + "loss": 1.5232, + "step": 9101 + }, + { + "epoch": 0.8258029395753946, + "grad_norm": 0.1103041341092532, + "learning_rate": 7.749156095302662e-05, + "loss": 1.56, + "step": 9102 + }, + { + "epoch": 0.8258936672110325, + "grad_norm": 0.10632183358629284, + "learning_rate": 7.741301151735968e-05, + "loss": 1.5373, + "step": 9103 + }, + { + "epoch": 0.8259843948466703, + "grad_norm": 0.1052526889100943, + "learning_rate": 7.733449857214142e-05, + "loss": 1.5404, + "step": 9104 + }, + { + "epoch": 0.8260751224823081, + "grad_norm": 0.1071853597715068, + "learning_rate": 7.72560221241514e-05, + "loss": 1.54, + "step": 9105 + }, + { + "epoch": 0.826165850117946, + "grad_norm": 0.10565405651573043, + "learning_rate": 7.71775821801663e-05, + "loss": 1.5144, + "step": 9106 + }, + { + "epoch": 0.8262565777535837, + "grad_norm": 0.10247634807468965, + "learning_rate": 7.709917874695932e-05, + "loss": 1.5576, + "step": 9107 + }, + { + "epoch": 0.8263473053892215, + "grad_norm": 0.10437260977477258, + "learning_rate": 7.702081183130044e-05, + "loss": 1.5114, + "step": 9108 + }, + { + "epoch": 0.8264380330248594, + "grad_norm": 0.10494583961603846, + "learning_rate": 7.694248143995674e-05, + "loss": 1.5544, + "step": 9109 + }, + { + "epoch": 0.8265287606604972, + "grad_norm": 0.10551041945961806, + "learning_rate": 7.686418757969233e-05, + "loss": 1.5384, + "step": 9110 + }, + { + "epoch": 0.826619488296135, + "grad_norm": 0.107490967086495, + "learning_rate": 7.678593025726765e-05, + "loss": 1.5404, + "step": 9111 + }, + { + "epoch": 0.8267102159317729, + "grad_norm": 0.10606319345607014, + "learning_rate": 7.670770947944017e-05, + "loss": 1.5688, + "step": 9112 + }, + { + "epoch": 0.8268009435674106, + "grad_norm": 0.10113828610684888, + "learning_rate": 7.662952525296446e-05, + "loss": 1.5132, + "step": 9113 + }, + { + "epoch": 0.8268916712030484, + "grad_norm": 0.10467184640485358, + "learning_rate": 7.655137758459162e-05, + "loss": 1.5619, + "step": 9114 + }, + { + "epoch": 0.8269823988386863, + "grad_norm": 0.10742283266108461, + "learning_rate": 7.647326648106967e-05, + "loss": 1.5672, + "step": 9115 + }, + { + "epoch": 0.8270731264743241, + "grad_norm": 0.10552382890985637, + "learning_rate": 7.639519194914357e-05, + "loss": 1.5569, + "step": 9116 + }, + { + "epoch": 0.8271638541099618, + "grad_norm": 0.10295965224226541, + "learning_rate": 7.631715399555517e-05, + "loss": 1.6127, + "step": 9117 + }, + { + "epoch": 0.8272545817455997, + "grad_norm": 0.1053093546858141, + "learning_rate": 7.623915262704295e-05, + "loss": 1.5641, + "step": 9118 + }, + { + "epoch": 0.8273453093812375, + "grad_norm": 0.10453215963049856, + "learning_rate": 7.616118785034226e-05, + "loss": 1.542, + "step": 9119 + }, + { + "epoch": 0.8274360370168753, + "grad_norm": 0.10107655648192344, + "learning_rate": 7.60832596721856e-05, + "loss": 1.5532, + "step": 9120 + }, + { + "epoch": 0.8275267646525132, + "grad_norm": 0.1011406559670393, + "learning_rate": 7.600536809930198e-05, + "loss": 1.5104, + "step": 9121 + }, + { + "epoch": 0.827617492288151, + "grad_norm": 0.10359940738666765, + "learning_rate": 7.592751313841711e-05, + "loss": 1.5574, + "step": 9122 + }, + { + "epoch": 0.8277082199237887, + "grad_norm": 0.11076589766943074, + "learning_rate": 7.584969479625414e-05, + "loss": 1.5077, + "step": 9123 + }, + { + "epoch": 0.8277989475594266, + "grad_norm": 0.10446377575193647, + "learning_rate": 7.577191307953246e-05, + "loss": 1.5033, + "step": 9124 + }, + { + "epoch": 0.8278896751950644, + "grad_norm": 0.10614193648608003, + "learning_rate": 7.56941679949687e-05, + "loss": 1.5698, + "step": 9125 + }, + { + "epoch": 0.8279804028307022, + "grad_norm": 0.10441110637829584, + "learning_rate": 7.561645954927593e-05, + "loss": 1.5776, + "step": 9126 + }, + { + "epoch": 0.8280711304663401, + "grad_norm": 0.10242584199519936, + "learning_rate": 7.553878774916456e-05, + "loss": 1.5275, + "step": 9127 + }, + { + "epoch": 0.8281618581019778, + "grad_norm": 0.1042590146549892, + "learning_rate": 7.54611526013414e-05, + "loss": 1.5762, + "step": 9128 + }, + { + "epoch": 0.8282525857376157, + "grad_norm": 0.10334540682665379, + "learning_rate": 7.538355411251018e-05, + "loss": 1.5198, + "step": 9129 + }, + { + "epoch": 0.8283433133732535, + "grad_norm": 0.10331099351373782, + "learning_rate": 7.530599228937179e-05, + "loss": 1.5607, + "step": 9130 + }, + { + "epoch": 0.8284340410088913, + "grad_norm": 0.10275817659442986, + "learning_rate": 7.522846713862342e-05, + "loss": 1.5456, + "step": 9131 + }, + { + "epoch": 0.8285247686445292, + "grad_norm": 0.10407886411733015, + "learning_rate": 7.515097866695952e-05, + "loss": 1.5303, + "step": 9132 + }, + { + "epoch": 0.828615496280167, + "grad_norm": 0.10499335025449973, + "learning_rate": 7.507352688107138e-05, + "loss": 1.518, + "step": 9133 + }, + { + "epoch": 0.8287062239158047, + "grad_norm": 0.10587984860824243, + "learning_rate": 7.499611178764676e-05, + "loss": 1.564, + "step": 9134 + }, + { + "epoch": 0.8287969515514426, + "grad_norm": 0.1056037435656382, + "learning_rate": 7.491873339337057e-05, + "loss": 1.5515, + "step": 9135 + }, + { + "epoch": 0.8288876791870804, + "grad_norm": 0.10545056674315718, + "learning_rate": 7.484139170492427e-05, + "loss": 1.5289, + "step": 9136 + }, + { + "epoch": 0.8289784068227182, + "grad_norm": 0.10588923976818161, + "learning_rate": 7.47640867289866e-05, + "loss": 1.5042, + "step": 9137 + }, + { + "epoch": 0.8290691344583561, + "grad_norm": 0.10694679164865495, + "learning_rate": 7.468681847223269e-05, + "loss": 1.5584, + "step": 9138 + }, + { + "epoch": 0.8291598620939938, + "grad_norm": 0.10036615809382428, + "learning_rate": 7.460958694133446e-05, + "loss": 1.502, + "step": 9139 + }, + { + "epoch": 0.8292505897296316, + "grad_norm": 0.1069849350465067, + "learning_rate": 7.453239214296132e-05, + "loss": 1.5837, + "step": 9140 + }, + { + "epoch": 0.8293413173652695, + "grad_norm": 0.10335173050677857, + "learning_rate": 7.44552340837788e-05, + "loss": 1.5366, + "step": 9141 + }, + { + "epoch": 0.8294320450009073, + "grad_norm": 0.10209736323108456, + "learning_rate": 7.437811277044948e-05, + "loss": 1.4977, + "step": 9142 + }, + { + "epoch": 0.8295227726365451, + "grad_norm": 0.10532003861816497, + "learning_rate": 7.430102820963292e-05, + "loss": 1.539, + "step": 9143 + }, + { + "epoch": 0.829613500272183, + "grad_norm": 0.10322405055672117, + "learning_rate": 7.422398040798533e-05, + "loss": 1.5236, + "step": 9144 + }, + { + "epoch": 0.8297042279078207, + "grad_norm": 0.10442764390661616, + "learning_rate": 7.414696937215982e-05, + "loss": 1.5403, + "step": 9145 + }, + { + "epoch": 0.8297949555434585, + "grad_norm": 0.1047240307743209, + "learning_rate": 7.406999510880608e-05, + "loss": 1.5593, + "step": 9146 + }, + { + "epoch": 0.8298856831790964, + "grad_norm": 0.10648642573359524, + "learning_rate": 7.39930576245711e-05, + "loss": 1.5372, + "step": 9147 + }, + { + "epoch": 0.8299764108147342, + "grad_norm": 0.10368386949483223, + "learning_rate": 7.391615692609849e-05, + "loss": 1.5032, + "step": 9148 + }, + { + "epoch": 0.8300671384503719, + "grad_norm": 0.10439864235326177, + "learning_rate": 7.38392930200284e-05, + "loss": 1.5391, + "step": 9149 + }, + { + "epoch": 0.8301578660860098, + "grad_norm": 0.10479212397268399, + "learning_rate": 7.376246591299829e-05, + "loss": 1.5474, + "step": 9150 + }, + { + "epoch": 0.8302485937216476, + "grad_norm": 0.10552835197244323, + "learning_rate": 7.368567561164203e-05, + "loss": 1.5324, + "step": 9151 + }, + { + "epoch": 0.8303393213572854, + "grad_norm": 0.10647986492608388, + "learning_rate": 7.360892212259041e-05, + "loss": 1.5363, + "step": 9152 + }, + { + "epoch": 0.8304300489929233, + "grad_norm": 0.10473733001945931, + "learning_rate": 7.353220545247136e-05, + "loss": 1.5485, + "step": 9153 + }, + { + "epoch": 0.830520776628561, + "grad_norm": 0.10389300780679146, + "learning_rate": 7.345552560790919e-05, + "loss": 1.5294, + "step": 9154 + }, + { + "epoch": 0.8306115042641988, + "grad_norm": 0.10593571995374243, + "learning_rate": 7.337888259552516e-05, + "loss": 1.564, + "step": 9155 + }, + { + "epoch": 0.8307022318998367, + "grad_norm": 0.1053839435348316, + "learning_rate": 7.330227642193749e-05, + "loss": 1.5467, + "step": 9156 + }, + { + "epoch": 0.8307929595354745, + "grad_norm": 0.10289825843984486, + "learning_rate": 7.322570709376125e-05, + "loss": 1.5432, + "step": 9157 + }, + { + "epoch": 0.8308836871711123, + "grad_norm": 0.10661588745537914, + "learning_rate": 7.314917461760812e-05, + "loss": 1.5435, + "step": 9158 + }, + { + "epoch": 0.8309744148067502, + "grad_norm": 0.1063670778532219, + "learning_rate": 7.307267900008651e-05, + "loss": 1.4983, + "step": 9159 + }, + { + "epoch": 0.8310651424423879, + "grad_norm": 0.10235679976843835, + "learning_rate": 7.299622024780216e-05, + "loss": 1.5711, + "step": 9160 + }, + { + "epoch": 0.8311558700780257, + "grad_norm": 0.106711298413604, + "learning_rate": 7.291979836735713e-05, + "loss": 1.5377, + "step": 9161 + }, + { + "epoch": 0.8312465977136636, + "grad_norm": 0.10518200249696448, + "learning_rate": 7.284341336535027e-05, + "loss": 1.5211, + "step": 9162 + }, + { + "epoch": 0.8313373253493014, + "grad_norm": 0.1037336855347695, + "learning_rate": 7.276706524837767e-05, + "loss": 1.5311, + "step": 9163 + }, + { + "epoch": 0.8314280529849392, + "grad_norm": 0.10604065891041216, + "learning_rate": 7.26907540230321e-05, + "loss": 1.5848, + "step": 9164 + }, + { + "epoch": 0.831518780620577, + "grad_norm": 0.10467337424765713, + "learning_rate": 7.261447969590285e-05, + "loss": 1.5287, + "step": 9165 + }, + { + "epoch": 0.8316095082562148, + "grad_norm": 0.1049297948782022, + "learning_rate": 7.253824227357614e-05, + "loss": 1.5234, + "step": 9166 + }, + { + "epoch": 0.8317002358918527, + "grad_norm": 0.10754839498288606, + "learning_rate": 7.246204176263538e-05, + "loss": 1.5478, + "step": 9167 + }, + { + "epoch": 0.8317909635274905, + "grad_norm": 0.10384074493055075, + "learning_rate": 7.23858781696603e-05, + "loss": 1.5594, + "step": 9168 + }, + { + "epoch": 0.8318816911631283, + "grad_norm": 0.10255629519890354, + "learning_rate": 7.230975150122754e-05, + "loss": 1.5335, + "step": 9169 + }, + { + "epoch": 0.8319724187987662, + "grad_norm": 0.10708265316140474, + "learning_rate": 7.223366176391077e-05, + "loss": 1.5443, + "step": 9170 + }, + { + "epoch": 0.8320631464344039, + "grad_norm": 0.1013890206261426, + "learning_rate": 7.215760896428047e-05, + "loss": 1.527, + "step": 9171 + }, + { + "epoch": 0.8321538740700417, + "grad_norm": 0.10633486595125483, + "learning_rate": 7.208159310890372e-05, + "loss": 1.5271, + "step": 9172 + }, + { + "epoch": 0.8322446017056796, + "grad_norm": 0.10573474862788454, + "learning_rate": 7.200561420434437e-05, + "loss": 1.5133, + "step": 9173 + }, + { + "epoch": 0.8323353293413174, + "grad_norm": 0.10626494483706307, + "learning_rate": 7.19296722571634e-05, + "loss": 1.5567, + "step": 9174 + }, + { + "epoch": 0.8324260569769552, + "grad_norm": 0.10135988723043506, + "learning_rate": 7.185376727391829e-05, + "loss": 1.511, + "step": 9175 + }, + { + "epoch": 0.832516784612593, + "grad_norm": 0.1052248197799529, + "learning_rate": 7.17778992611634e-05, + "loss": 1.5071, + "step": 9176 + }, + { + "epoch": 0.8326075122482308, + "grad_norm": 0.10006914344201313, + "learning_rate": 7.170206822545016e-05, + "loss": 1.5462, + "step": 9177 + }, + { + "epoch": 0.8326982398838686, + "grad_norm": 0.10736204975526373, + "learning_rate": 7.162627417332629e-05, + "loss": 1.5116, + "step": 9178 + }, + { + "epoch": 0.8327889675195065, + "grad_norm": 0.10187732174644615, + "learning_rate": 7.155051711133692e-05, + "loss": 1.5298, + "step": 9179 + }, + { + "epoch": 0.8328796951551443, + "grad_norm": 0.10600129707544143, + "learning_rate": 7.147479704602345e-05, + "loss": 1.5364, + "step": 9180 + }, + { + "epoch": 0.832970422790782, + "grad_norm": 0.10568254818052303, + "learning_rate": 7.139911398392446e-05, + "loss": 1.5264, + "step": 9181 + }, + { + "epoch": 0.8330611504264199, + "grad_norm": 0.10442287836796693, + "learning_rate": 7.132346793157524e-05, + "loss": 1.524, + "step": 9182 + }, + { + "epoch": 0.8331518780620577, + "grad_norm": 0.10868484288236745, + "learning_rate": 7.124785889550755e-05, + "loss": 1.5314, + "step": 9183 + }, + { + "epoch": 0.8332426056976955, + "grad_norm": 0.10758514620786053, + "learning_rate": 7.117228688225058e-05, + "loss": 1.5535, + "step": 9184 + }, + { + "epoch": 0.8333333333333334, + "grad_norm": 0.1017550662291369, + "learning_rate": 7.109675189832986e-05, + "loss": 1.5513, + "step": 9185 + }, + { + "epoch": 0.8334240609689711, + "grad_norm": 0.10470474552283979, + "learning_rate": 7.102125395026771e-05, + "loss": 1.5519, + "step": 9186 + }, + { + "epoch": 0.8335147886046089, + "grad_norm": 0.10807638153139448, + "learning_rate": 7.094579304458349e-05, + "loss": 1.5705, + "step": 9187 + }, + { + "epoch": 0.8336055162402468, + "grad_norm": 0.10269474031760713, + "learning_rate": 7.087036918779338e-05, + "loss": 1.5199, + "step": 9188 + }, + { + "epoch": 0.8336962438758846, + "grad_norm": 0.09998526078681808, + "learning_rate": 7.079498238641014e-05, + "loss": 1.5061, + "step": 9189 + }, + { + "epoch": 0.8337869715115224, + "grad_norm": 0.10466591372060492, + "learning_rate": 7.071963264694331e-05, + "loss": 1.5186, + "step": 9190 + }, + { + "epoch": 0.8338776991471603, + "grad_norm": 0.10841897722972715, + "learning_rate": 7.06443199758996e-05, + "loss": 1.5309, + "step": 9191 + }, + { + "epoch": 0.833968426782798, + "grad_norm": 0.10290125492767475, + "learning_rate": 7.056904437978218e-05, + "loss": 1.5533, + "step": 9192 + }, + { + "epoch": 0.8340591544184358, + "grad_norm": 0.1019899699122331, + "learning_rate": 7.04938058650909e-05, + "loss": 1.5333, + "step": 9193 + }, + { + "epoch": 0.8341498820540737, + "grad_norm": 0.099728362599304, + "learning_rate": 7.041860443832276e-05, + "loss": 1.4996, + "step": 9194 + }, + { + "epoch": 0.8342406096897115, + "grad_norm": 0.10339247100835733, + "learning_rate": 7.03434401059716e-05, + "loss": 1.5137, + "step": 9195 + }, + { + "epoch": 0.8343313373253493, + "grad_norm": 0.10404028852602347, + "learning_rate": 7.026831287452773e-05, + "loss": 1.5099, + "step": 9196 + }, + { + "epoch": 0.8344220649609871, + "grad_norm": 0.10542808778499409, + "learning_rate": 7.019322275047823e-05, + "loss": 1.5587, + "step": 9197 + }, + { + "epoch": 0.8345127925966249, + "grad_norm": 0.10582968422900796, + "learning_rate": 7.011816974030743e-05, + "loss": 1.5624, + "step": 9198 + }, + { + "epoch": 0.8346035202322627, + "grad_norm": 0.10683357940662608, + "learning_rate": 7.004315385049603e-05, + "loss": 1.5528, + "step": 9199 + }, + { + "epoch": 0.8346942478679006, + "grad_norm": 0.10626783934492566, + "learning_rate": 6.996817508752151e-05, + "loss": 1.5742, + "step": 9200 + }, + { + "epoch": 0.8347849755035384, + "grad_norm": 0.10423772533090003, + "learning_rate": 6.989323345785864e-05, + "loss": 1.527, + "step": 9201 + }, + { + "epoch": 0.8348757031391761, + "grad_norm": 0.10079613735739457, + "learning_rate": 6.98183289679783e-05, + "loss": 1.4963, + "step": 9202 + }, + { + "epoch": 0.834966430774814, + "grad_norm": 0.10420720832575323, + "learning_rate": 6.974346162434874e-05, + "loss": 1.4993, + "step": 9203 + }, + { + "epoch": 0.8350571584104518, + "grad_norm": 0.10319389237133657, + "learning_rate": 6.96686314334346e-05, + "loss": 1.5282, + "step": 9204 + }, + { + "epoch": 0.8351478860460897, + "grad_norm": 0.10552348836418202, + "learning_rate": 6.959383840169765e-05, + "loss": 1.5122, + "step": 9205 + }, + { + "epoch": 0.8352386136817275, + "grad_norm": 0.10330649548896471, + "learning_rate": 6.951908253559625e-05, + "loss": 1.5313, + "step": 9206 + }, + { + "epoch": 0.8353293413173652, + "grad_norm": 0.1050936714876425, + "learning_rate": 6.944436384158531e-05, + "loss": 1.5218, + "step": 9207 + }, + { + "epoch": 0.8354200689530031, + "grad_norm": 0.10573741022996555, + "learning_rate": 6.936968232611718e-05, + "loss": 1.577, + "step": 9208 + }, + { + "epoch": 0.8355107965886409, + "grad_norm": 0.10475655253898601, + "learning_rate": 6.929503799564036e-05, + "loss": 1.5566, + "step": 9209 + }, + { + "epoch": 0.8356015242242787, + "grad_norm": 0.10201846440490332, + "learning_rate": 6.922043085660046e-05, + "loss": 1.5437, + "step": 9210 + }, + { + "epoch": 0.8356922518599166, + "grad_norm": 0.10443164724464257, + "learning_rate": 6.914586091543995e-05, + "loss": 1.486, + "step": 9211 + }, + { + "epoch": 0.8357829794955544, + "grad_norm": 0.10427102518064163, + "learning_rate": 6.907132817859785e-05, + "loss": 1.5119, + "step": 9212 + }, + { + "epoch": 0.8358737071311921, + "grad_norm": 0.10364711383859393, + "learning_rate": 6.899683265251011e-05, + "loss": 1.4777, + "step": 9213 + }, + { + "epoch": 0.83596443476683, + "grad_norm": 0.11285849022902178, + "learning_rate": 6.892237434360927e-05, + "loss": 1.5508, + "step": 9214 + }, + { + "epoch": 0.8360551624024678, + "grad_norm": 0.10630831357647835, + "learning_rate": 6.884795325832499e-05, + "loss": 1.5585, + "step": 9215 + }, + { + "epoch": 0.8361458900381056, + "grad_norm": 0.10346209145620552, + "learning_rate": 6.877356940308355e-05, + "loss": 1.5143, + "step": 9216 + }, + { + "epoch": 0.8362366176737435, + "grad_norm": 0.10343027426419864, + "learning_rate": 6.869922278430774e-05, + "loss": 1.5222, + "step": 9217 + }, + { + "epoch": 0.8363273453093812, + "grad_norm": 0.10551105338564941, + "learning_rate": 6.862491340841775e-05, + "loss": 1.537, + "step": 9218 + }, + { + "epoch": 0.836418072945019, + "grad_norm": 0.107043989287781, + "learning_rate": 6.855064128183014e-05, + "loss": 1.5454, + "step": 9219 + }, + { + "epoch": 0.8365088005806569, + "grad_norm": 0.1052721139199419, + "learning_rate": 6.847640641095815e-05, + "loss": 1.5049, + "step": 9220 + }, + { + "epoch": 0.8365995282162947, + "grad_norm": 0.10183514068941091, + "learning_rate": 6.840220880221209e-05, + "loss": 1.558, + "step": 9221 + }, + { + "epoch": 0.8366902558519325, + "grad_norm": 0.10431842471729166, + "learning_rate": 6.832804846199897e-05, + "loss": 1.5198, + "step": 9222 + }, + { + "epoch": 0.8367809834875704, + "grad_norm": 0.10781627347710172, + "learning_rate": 6.825392539672237e-05, + "loss": 1.5116, + "step": 9223 + }, + { + "epoch": 0.8368717111232081, + "grad_norm": 0.1060355322961128, + "learning_rate": 6.817983961278307e-05, + "loss": 1.5673, + "step": 9224 + }, + { + "epoch": 0.8369624387588459, + "grad_norm": 0.10510570408683056, + "learning_rate": 6.810579111657816e-05, + "loss": 1.539, + "step": 9225 + }, + { + "epoch": 0.8370531663944838, + "grad_norm": 0.10385356234744918, + "learning_rate": 6.803177991450193e-05, + "loss": 1.5826, + "step": 9226 + }, + { + "epoch": 0.8371438940301216, + "grad_norm": 0.10636754396049941, + "learning_rate": 6.795780601294504e-05, + "loss": 1.5695, + "step": 9227 + }, + { + "epoch": 0.8372346216657593, + "grad_norm": 0.10679189736134981, + "learning_rate": 6.788386941829544e-05, + "loss": 1.5313, + "step": 9228 + }, + { + "epoch": 0.8373253493013972, + "grad_norm": 0.10697485019098575, + "learning_rate": 6.780997013693735e-05, + "loss": 1.5463, + "step": 9229 + }, + { + "epoch": 0.837416076937035, + "grad_norm": 0.10408757598297261, + "learning_rate": 6.773610817525195e-05, + "loss": 1.5702, + "step": 9230 + }, + { + "epoch": 0.8375068045726728, + "grad_norm": 0.10224685487501321, + "learning_rate": 6.76622835396174e-05, + "loss": 1.543, + "step": 9231 + }, + { + "epoch": 0.8375975322083107, + "grad_norm": 0.10577941662457405, + "learning_rate": 6.758849623640844e-05, + "loss": 1.5657, + "step": 9232 + }, + { + "epoch": 0.8376882598439485, + "grad_norm": 0.10501074079455304, + "learning_rate": 6.751474627199638e-05, + "loss": 1.5271, + "step": 9233 + }, + { + "epoch": 0.8377789874795862, + "grad_norm": 0.10495656831242448, + "learning_rate": 6.744103365274978e-05, + "loss": 1.5846, + "step": 9234 + }, + { + "epoch": 0.8378697151152241, + "grad_norm": 0.10740126481156147, + "learning_rate": 6.736735838503377e-05, + "loss": 1.5387, + "step": 9235 + }, + { + "epoch": 0.8379604427508619, + "grad_norm": 0.10103069845806167, + "learning_rate": 6.729372047521015e-05, + "loss": 1.5051, + "step": 9236 + }, + { + "epoch": 0.8380511703864997, + "grad_norm": 0.10590067337590742, + "learning_rate": 6.722011992963738e-05, + "loss": 1.5411, + "step": 9237 + }, + { + "epoch": 0.8381418980221376, + "grad_norm": 0.10365275351468763, + "learning_rate": 6.714655675467119e-05, + "loss": 1.5284, + "step": 9238 + }, + { + "epoch": 0.8382326256577753, + "grad_norm": 0.0991009386711073, + "learning_rate": 6.707303095666361e-05, + "loss": 1.5274, + "step": 9239 + }, + { + "epoch": 0.8383233532934131, + "grad_norm": 0.1048564550154153, + "learning_rate": 6.69995425419635e-05, + "loss": 1.5838, + "step": 9240 + }, + { + "epoch": 0.838414080929051, + "grad_norm": 0.10665444525894388, + "learning_rate": 6.69260915169167e-05, + "loss": 1.5378, + "step": 9241 + }, + { + "epoch": 0.8385048085646888, + "grad_norm": 0.1058428735399182, + "learning_rate": 6.685267788786587e-05, + "loss": 1.5487, + "step": 9242 + }, + { + "epoch": 0.8385955362003267, + "grad_norm": 0.1050722892102678, + "learning_rate": 6.677930166115015e-05, + "loss": 1.5476, + "step": 9243 + }, + { + "epoch": 0.8386862638359645, + "grad_norm": 0.10185147609870027, + "learning_rate": 6.670596284310542e-05, + "loss": 1.5254, + "step": 9244 + }, + { + "epoch": 0.8387769914716022, + "grad_norm": 0.10686884449877276, + "learning_rate": 6.66326614400648e-05, + "loss": 1.5648, + "step": 9245 + }, + { + "epoch": 0.8388677191072401, + "grad_norm": 0.10559144510686637, + "learning_rate": 6.65593974583577e-05, + "loss": 1.5213, + "step": 9246 + }, + { + "epoch": 0.8389584467428779, + "grad_norm": 0.1054871080450452, + "learning_rate": 6.648617090431048e-05, + "loss": 1.5595, + "step": 9247 + }, + { + "epoch": 0.8390491743785157, + "grad_norm": 0.10582105921840863, + "learning_rate": 6.64129817842462e-05, + "loss": 1.5528, + "step": 9248 + }, + { + "epoch": 0.8391399020141536, + "grad_norm": 0.10494823032786056, + "learning_rate": 6.633983010448502e-05, + "loss": 1.5712, + "step": 9249 + }, + { + "epoch": 0.8392306296497913, + "grad_norm": 0.10336066570882234, + "learning_rate": 6.626671587134342e-05, + "loss": 1.527, + "step": 9250 + }, + { + "epoch": 0.8393213572854291, + "grad_norm": 0.10635576531730585, + "learning_rate": 6.619363909113469e-05, + "loss": 1.5305, + "step": 9251 + }, + { + "epoch": 0.839412084921067, + "grad_norm": 0.10401568054798378, + "learning_rate": 6.612059977016932e-05, + "loss": 1.5568, + "step": 9252 + }, + { + "epoch": 0.8395028125567048, + "grad_norm": 0.1028532260970578, + "learning_rate": 6.604759791475407e-05, + "loss": 1.4969, + "step": 9253 + }, + { + "epoch": 0.8395935401923426, + "grad_norm": 0.10595846146333718, + "learning_rate": 6.59746335311926e-05, + "loss": 1.5392, + "step": 9254 + }, + { + "epoch": 0.8396842678279804, + "grad_norm": 0.1030807693598303, + "learning_rate": 6.59017066257856e-05, + "loss": 1.5632, + "step": 9255 + }, + { + "epoch": 0.8397749954636182, + "grad_norm": 0.10460492417030604, + "learning_rate": 6.582881720483009e-05, + "loss": 1.5109, + "step": 9256 + }, + { + "epoch": 0.839865723099256, + "grad_norm": 0.10408295682831914, + "learning_rate": 6.57559652746203e-05, + "loss": 1.4742, + "step": 9257 + }, + { + "epoch": 0.8399564507348939, + "grad_norm": 0.10565214638238597, + "learning_rate": 6.568315084144682e-05, + "loss": 1.5383, + "step": 9258 + }, + { + "epoch": 0.8400471783705317, + "grad_norm": 0.10586071204859113, + "learning_rate": 6.561037391159741e-05, + "loss": 1.5555, + "step": 9259 + }, + { + "epoch": 0.8401379060061694, + "grad_norm": 0.10612908838173417, + "learning_rate": 6.553763449135624e-05, + "loss": 1.5211, + "step": 9260 + }, + { + "epoch": 0.8402286336418073, + "grad_norm": 0.10477902276663008, + "learning_rate": 6.54649325870042e-05, + "loss": 1.4979, + "step": 9261 + }, + { + "epoch": 0.8403193612774451, + "grad_norm": 0.10952209891754165, + "learning_rate": 6.539226820481941e-05, + "loss": 1.5352, + "step": 9262 + }, + { + "epoch": 0.8404100889130829, + "grad_norm": 0.10268999476497298, + "learning_rate": 6.531964135107638e-05, + "loss": 1.5745, + "step": 9263 + }, + { + "epoch": 0.8405008165487208, + "grad_norm": 0.10431425057255983, + "learning_rate": 6.524705203204617e-05, + "loss": 1.5719, + "step": 9264 + }, + { + "epoch": 0.8405915441843586, + "grad_norm": 0.10378484373727273, + "learning_rate": 6.517450025399719e-05, + "loss": 1.5112, + "step": 9265 + }, + { + "epoch": 0.8406822718199963, + "grad_norm": 0.10091035512017224, + "learning_rate": 6.510198602319423e-05, + "loss": 1.5862, + "step": 9266 + }, + { + "epoch": 0.8407729994556342, + "grad_norm": 0.10553570166277433, + "learning_rate": 6.50295093458989e-05, + "loss": 1.5723, + "step": 9267 + }, + { + "epoch": 0.840863727091272, + "grad_norm": 0.1077756216753336, + "learning_rate": 6.495707022836945e-05, + "loss": 1.5346, + "step": 9268 + }, + { + "epoch": 0.8409544547269098, + "grad_norm": 0.10225079123318102, + "learning_rate": 6.488466867686121e-05, + "loss": 1.5338, + "step": 9269 + }, + { + "epoch": 0.8410451823625477, + "grad_norm": 0.1069235315765124, + "learning_rate": 6.481230469762595e-05, + "loss": 1.5798, + "step": 9270 + }, + { + "epoch": 0.8411359099981854, + "grad_norm": 0.1059397710754474, + "learning_rate": 6.47399782969122e-05, + "loss": 1.5556, + "step": 9271 + }, + { + "epoch": 0.8412266376338232, + "grad_norm": 0.1153817430945914, + "learning_rate": 6.466768948096547e-05, + "loss": 1.5981, + "step": 9272 + }, + { + "epoch": 0.8413173652694611, + "grad_norm": 0.1020031849119449, + "learning_rate": 6.459543825602804e-05, + "loss": 1.529, + "step": 9273 + }, + { + "epoch": 0.8414080929050989, + "grad_norm": 0.10730323176084308, + "learning_rate": 6.452322462833871e-05, + "loss": 1.5648, + "step": 9274 + }, + { + "epoch": 0.8414988205407367, + "grad_norm": 0.10724094420533246, + "learning_rate": 6.445104860413298e-05, + "loss": 1.5898, + "step": 9275 + }, + { + "epoch": 0.8415895481763745, + "grad_norm": 0.10530991146076538, + "learning_rate": 6.437891018964353e-05, + "loss": 1.5521, + "step": 9276 + }, + { + "epoch": 0.8416802758120123, + "grad_norm": 0.10538080619861061, + "learning_rate": 6.430680939109934e-05, + "loss": 1.5068, + "step": 9277 + }, + { + "epoch": 0.8417710034476501, + "grad_norm": 0.10614459974214444, + "learning_rate": 6.42347462147263e-05, + "loss": 1.556, + "step": 9278 + }, + { + "epoch": 0.841861731083288, + "grad_norm": 0.10329685901821338, + "learning_rate": 6.416272066674728e-05, + "loss": 1.5312, + "step": 9279 + }, + { + "epoch": 0.8419524587189258, + "grad_norm": 0.10537851749881691, + "learning_rate": 6.409073275338145e-05, + "loss": 1.5497, + "step": 9280 + }, + { + "epoch": 0.8420431863545637, + "grad_norm": 0.10410839606391897, + "learning_rate": 6.40187824808452e-05, + "loss": 1.5174, + "step": 9281 + }, + { + "epoch": 0.8421339139902014, + "grad_norm": 0.10567421348717024, + "learning_rate": 6.39468698553512e-05, + "loss": 1.5821, + "step": 9282 + }, + { + "epoch": 0.8422246416258392, + "grad_norm": 0.1045182193678967, + "learning_rate": 6.387499488310939e-05, + "loss": 1.5432, + "step": 9283 + }, + { + "epoch": 0.8423153692614771, + "grad_norm": 0.10705667955774097, + "learning_rate": 6.380315757032601e-05, + "loss": 1.569, + "step": 9284 + }, + { + "epoch": 0.8424060968971149, + "grad_norm": 0.10550988042354807, + "learning_rate": 6.373135792320417e-05, + "loss": 1.551, + "step": 9285 + }, + { + "epoch": 0.8424968245327527, + "grad_norm": 0.1041727977896966, + "learning_rate": 6.365959594794401e-05, + "loss": 1.577, + "step": 9286 + }, + { + "epoch": 0.8425875521683905, + "grad_norm": 0.10407966050670207, + "learning_rate": 6.358787165074193e-05, + "loss": 1.5427, + "step": 9287 + }, + { + "epoch": 0.8426782798040283, + "grad_norm": 0.10370335236347435, + "learning_rate": 6.351618503779144e-05, + "loss": 1.4816, + "step": 9288 + }, + { + "epoch": 0.8427690074396661, + "grad_norm": 0.106472041308769, + "learning_rate": 6.344453611528283e-05, + "loss": 1.5419, + "step": 9289 + }, + { + "epoch": 0.842859735075304, + "grad_norm": 0.0996363934135323, + "learning_rate": 6.337292488940289e-05, + "loss": 1.5428, + "step": 9290 + }, + { + "epoch": 0.8429504627109418, + "grad_norm": 0.10487768204504644, + "learning_rate": 6.330135136633519e-05, + "loss": 1.5505, + "step": 9291 + }, + { + "epoch": 0.8430411903465795, + "grad_norm": 0.11147427562041956, + "learning_rate": 6.32298155522601e-05, + "loss": 1.5042, + "step": 9292 + }, + { + "epoch": 0.8431319179822174, + "grad_norm": 0.10883779352120472, + "learning_rate": 6.315831745335487e-05, + "loss": 1.5888, + "step": 9293 + }, + { + "epoch": 0.8432226456178552, + "grad_norm": 0.10370028528238973, + "learning_rate": 6.308685707579331e-05, + "loss": 1.4881, + "step": 9294 + }, + { + "epoch": 0.843313373253493, + "grad_norm": 0.10498672364527227, + "learning_rate": 6.301543442574587e-05, + "loss": 1.5364, + "step": 9295 + }, + { + "epoch": 0.8434041008891309, + "grad_norm": 0.10659259672904486, + "learning_rate": 6.294404950938026e-05, + "loss": 1.5154, + "step": 9296 + }, + { + "epoch": 0.8434948285247686, + "grad_norm": 0.10435557189721943, + "learning_rate": 6.287270233286036e-05, + "loss": 1.5564, + "step": 9297 + }, + { + "epoch": 0.8435855561604064, + "grad_norm": 0.10807272462308917, + "learning_rate": 6.2801392902347e-05, + "loss": 1.54, + "step": 9298 + }, + { + "epoch": 0.8436762837960443, + "grad_norm": 0.10977641279215827, + "learning_rate": 6.273012122399784e-05, + "loss": 1.5694, + "step": 9299 + }, + { + "epoch": 0.8437670114316821, + "grad_norm": 0.10647045336490317, + "learning_rate": 6.265888730396718e-05, + "loss": 1.5188, + "step": 9300 + }, + { + "epoch": 0.8438577390673199, + "grad_norm": 0.10549952129790209, + "learning_rate": 6.258769114840595e-05, + "loss": 1.5394, + "step": 9301 + }, + { + "epoch": 0.8439484667029578, + "grad_norm": 0.10456055208103611, + "learning_rate": 6.25165327634622e-05, + "loss": 1.5617, + "step": 9302 + }, + { + "epoch": 0.8440391943385955, + "grad_norm": 0.10585910378183011, + "learning_rate": 6.244541215528022e-05, + "loss": 1.5405, + "step": 9303 + }, + { + "epoch": 0.8441299219742333, + "grad_norm": 0.10392941661004705, + "learning_rate": 6.237432933000142e-05, + "loss": 1.4998, + "step": 9304 + }, + { + "epoch": 0.8442206496098712, + "grad_norm": 0.1057789820028149, + "learning_rate": 6.230328429376375e-05, + "loss": 1.5324, + "step": 9305 + }, + { + "epoch": 0.844311377245509, + "grad_norm": 0.10418766749869636, + "learning_rate": 6.223227705270207e-05, + "loss": 1.53, + "step": 9306 + }, + { + "epoch": 0.8444021048811468, + "grad_norm": 0.10473750199659045, + "learning_rate": 6.21613076129478e-05, + "loss": 1.543, + "step": 9307 + }, + { + "epoch": 0.8444928325167846, + "grad_norm": 0.10488237042189891, + "learning_rate": 6.2090375980629e-05, + "loss": 1.5205, + "step": 9308 + }, + { + "epoch": 0.8445835601524224, + "grad_norm": 0.10662559112940811, + "learning_rate": 6.201948216187093e-05, + "loss": 1.5536, + "step": 9309 + }, + { + "epoch": 0.8446742877880602, + "grad_norm": 0.10460731248829572, + "learning_rate": 6.194862616279511e-05, + "loss": 1.5577, + "step": 9310 + }, + { + "epoch": 0.8447650154236981, + "grad_norm": 0.11232703566386405, + "learning_rate": 6.187780798951986e-05, + "loss": 1.5108, + "step": 9311 + }, + { + "epoch": 0.8448557430593359, + "grad_norm": 0.10470379435418249, + "learning_rate": 6.180702764816048e-05, + "loss": 1.5356, + "step": 9312 + }, + { + "epoch": 0.8449464706949736, + "grad_norm": 0.10449677362664027, + "learning_rate": 6.173628514482898e-05, + "loss": 1.5359, + "step": 9313 + }, + { + "epoch": 0.8450371983306115, + "grad_norm": 0.10004557396539578, + "learning_rate": 6.16655804856338e-05, + "loss": 1.5128, + "step": 9314 + }, + { + "epoch": 0.8451279259662493, + "grad_norm": 0.10415601272894363, + "learning_rate": 6.159491367668025e-05, + "loss": 1.5473, + "step": 9315 + }, + { + "epoch": 0.8452186536018871, + "grad_norm": 0.10898981176252859, + "learning_rate": 6.152428472407068e-05, + "loss": 1.53, + "step": 9316 + }, + { + "epoch": 0.845309381237525, + "grad_norm": 0.10502662926904953, + "learning_rate": 6.145369363390369e-05, + "loss": 1.5517, + "step": 9317 + }, + { + "epoch": 0.8454001088731627, + "grad_norm": 0.10716280119833416, + "learning_rate": 6.138314041227478e-05, + "loss": 1.5385, + "step": 9318 + }, + { + "epoch": 0.8454908365088006, + "grad_norm": 0.10690536146409528, + "learning_rate": 6.131262506527635e-05, + "loss": 1.5552, + "step": 9319 + }, + { + "epoch": 0.8455815641444384, + "grad_norm": 0.10280962979108721, + "learning_rate": 6.124214759899754e-05, + "loss": 1.5293, + "step": 9320 + }, + { + "epoch": 0.8456722917800762, + "grad_norm": 0.10310265982073376, + "learning_rate": 6.117170801952392e-05, + "loss": 1.5234, + "step": 9321 + }, + { + "epoch": 0.8457630194157141, + "grad_norm": 0.10353407546824045, + "learning_rate": 6.110130633293792e-05, + "loss": 1.584, + "step": 9322 + }, + { + "epoch": 0.8458537470513519, + "grad_norm": 0.10419904686890383, + "learning_rate": 6.103094254531888e-05, + "loss": 1.5426, + "step": 9323 + }, + { + "epoch": 0.8459444746869896, + "grad_norm": 0.1082058311332834, + "learning_rate": 6.0960616662742665e-05, + "loss": 1.5348, + "step": 9324 + }, + { + "epoch": 0.8460352023226275, + "grad_norm": 0.10342472116308093, + "learning_rate": 6.089032869128175e-05, + "loss": 1.5456, + "step": 9325 + }, + { + "epoch": 0.8461259299582653, + "grad_norm": 0.10516400447058837, + "learning_rate": 6.0820078637005724e-05, + "loss": 1.5477, + "step": 9326 + }, + { + "epoch": 0.8462166575939031, + "grad_norm": 0.10416697157071213, + "learning_rate": 6.074986650598074e-05, + "loss": 1.5302, + "step": 9327 + }, + { + "epoch": 0.846307385229541, + "grad_norm": 0.10625492525694487, + "learning_rate": 6.067969230426951e-05, + "loss": 1.5625, + "step": 9328 + }, + { + "epoch": 0.8463981128651787, + "grad_norm": 0.10289662894785064, + "learning_rate": 6.06095560379315e-05, + "loss": 1.5196, + "step": 9329 + }, + { + "epoch": 0.8464888405008165, + "grad_norm": 0.10643268352966181, + "learning_rate": 6.053945771302316e-05, + "loss": 1.534, + "step": 9330 + }, + { + "epoch": 0.8465795681364544, + "grad_norm": 0.10768073222599084, + "learning_rate": 6.0469397335597475e-05, + "loss": 1.5104, + "step": 9331 + }, + { + "epoch": 0.8466702957720922, + "grad_norm": 0.1047382575726203, + "learning_rate": 6.039937491170394e-05, + "loss": 1.5062, + "step": 9332 + }, + { + "epoch": 0.84676102340773, + "grad_norm": 0.10238364014202667, + "learning_rate": 6.032939044738933e-05, + "loss": 1.539, + "step": 9333 + }, + { + "epoch": 0.8468517510433679, + "grad_norm": 0.10539880582842442, + "learning_rate": 6.025944394869654e-05, + "loss": 1.5339, + "step": 9334 + }, + { + "epoch": 0.8469424786790056, + "grad_norm": 0.10369792754161572, + "learning_rate": 6.0189535421665665e-05, + "loss": 1.5249, + "step": 9335 + }, + { + "epoch": 0.8470332063146434, + "grad_norm": 0.10561491180328925, + "learning_rate": 6.011966487233311e-05, + "loss": 1.5402, + "step": 9336 + }, + { + "epoch": 0.8471239339502813, + "grad_norm": 0.10785136319568468, + "learning_rate": 6.0049832306732446e-05, + "loss": 1.5623, + "step": 9337 + }, + { + "epoch": 0.8472146615859191, + "grad_norm": 0.10689955087024533, + "learning_rate": 5.998003773089361e-05, + "loss": 1.522, + "step": 9338 + }, + { + "epoch": 0.8473053892215568, + "grad_norm": 0.10532392010471965, + "learning_rate": 5.9910281150843296e-05, + "loss": 1.5284, + "step": 9339 + }, + { + "epoch": 0.8473961168571947, + "grad_norm": 0.10542404943709198, + "learning_rate": 5.9840562572605115e-05, + "loss": 1.555, + "step": 9340 + }, + { + "epoch": 0.8474868444928325, + "grad_norm": 0.10569495067781927, + "learning_rate": 5.977088200219927e-05, + "loss": 1.5008, + "step": 9341 + }, + { + "epoch": 0.8475775721284703, + "grad_norm": 0.10532320572176429, + "learning_rate": 5.970123944564249e-05, + "loss": 1.5101, + "step": 9342 + }, + { + "epoch": 0.8476682997641082, + "grad_norm": 0.1039752904219723, + "learning_rate": 5.963163490894863e-05, + "loss": 1.5238, + "step": 9343 + }, + { + "epoch": 0.847759027399746, + "grad_norm": 0.10907437288314666, + "learning_rate": 5.956206839812805e-05, + "loss": 1.5608, + "step": 9344 + }, + { + "epoch": 0.8478497550353837, + "grad_norm": 0.10803432602365107, + "learning_rate": 5.949253991918785e-05, + "loss": 1.5228, + "step": 9345 + }, + { + "epoch": 0.8479404826710216, + "grad_norm": 0.10677005710302108, + "learning_rate": 5.94230494781316e-05, + "loss": 1.5562, + "step": 9346 + }, + { + "epoch": 0.8480312103066594, + "grad_norm": 0.10580426325192954, + "learning_rate": 5.935359708096005e-05, + "loss": 1.5469, + "step": 9347 + }, + { + "epoch": 0.8481219379422972, + "grad_norm": 0.10462879594240616, + "learning_rate": 5.928418273367037e-05, + "loss": 1.5743, + "step": 9348 + }, + { + "epoch": 0.8482126655779351, + "grad_norm": 0.10664771358651627, + "learning_rate": 5.921480644225635e-05, + "loss": 1.5047, + "step": 9349 + }, + { + "epoch": 0.8483033932135728, + "grad_norm": 0.10834816189198726, + "learning_rate": 5.9145468212708774e-05, + "loss": 1.5078, + "step": 9350 + }, + { + "epoch": 0.8483941208492106, + "grad_norm": 0.10337613409701203, + "learning_rate": 5.907616805101507e-05, + "loss": 1.5411, + "step": 9351 + }, + { + "epoch": 0.8484848484848485, + "grad_norm": 0.10461859752633272, + "learning_rate": 5.900690596315927e-05, + "loss": 1.5583, + "step": 9352 + }, + { + "epoch": 0.8485755761204863, + "grad_norm": 0.10691464857366463, + "learning_rate": 5.8937681955122e-05, + "loss": 1.5467, + "step": 9353 + }, + { + "epoch": 0.8486663037561241, + "grad_norm": 0.10528752204642097, + "learning_rate": 5.886849603288102e-05, + "loss": 1.5051, + "step": 9354 + }, + { + "epoch": 0.848757031391762, + "grad_norm": 0.10298516782840249, + "learning_rate": 5.879934820241045e-05, + "loss": 1.4857, + "step": 9355 + }, + { + "epoch": 0.8488477590273997, + "grad_norm": 0.10516435300144664, + "learning_rate": 5.873023846968106e-05, + "loss": 1.5543, + "step": 9356 + }, + { + "epoch": 0.8489384866630375, + "grad_norm": 0.1107862285134558, + "learning_rate": 5.86611668406607e-05, + "loss": 1.5181, + "step": 9357 + }, + { + "epoch": 0.8490292142986754, + "grad_norm": 0.1091460377216078, + "learning_rate": 5.859213332131358e-05, + "loss": 1.5396, + "step": 9358 + }, + { + "epoch": 0.8491199419343132, + "grad_norm": 0.11299244523049862, + "learning_rate": 5.852313791760083e-05, + "loss": 1.5237, + "step": 9359 + }, + { + "epoch": 0.8492106695699511, + "grad_norm": 0.10419107330119746, + "learning_rate": 5.845418063548014e-05, + "loss": 1.5613, + "step": 9360 + }, + { + "epoch": 0.8493013972055888, + "grad_norm": 0.10597342641854875, + "learning_rate": 5.838526148090612e-05, + "loss": 1.5569, + "step": 9361 + }, + { + "epoch": 0.8493921248412266, + "grad_norm": 0.10533898184245213, + "learning_rate": 5.831638045982984e-05, + "loss": 1.5579, + "step": 9362 + }, + { + "epoch": 0.8494828524768645, + "grad_norm": 0.10415835831908235, + "learning_rate": 5.824753757819917e-05, + "loss": 1.5155, + "step": 9363 + }, + { + "epoch": 0.8495735801125023, + "grad_norm": 0.10060525602947194, + "learning_rate": 5.817873284195879e-05, + "loss": 1.5254, + "step": 9364 + }, + { + "epoch": 0.8496643077481401, + "grad_norm": 0.10668920016246054, + "learning_rate": 5.810996625704984e-05, + "loss": 1.5119, + "step": 9365 + }, + { + "epoch": 0.849755035383778, + "grad_norm": 0.10853870189456478, + "learning_rate": 5.80412378294104e-05, + "loss": 1.5596, + "step": 9366 + }, + { + "epoch": 0.8498457630194157, + "grad_norm": 0.10761979127305972, + "learning_rate": 5.797254756497538e-05, + "loss": 1.5506, + "step": 9367 + }, + { + "epoch": 0.8499364906550535, + "grad_norm": 0.10530736814492693, + "learning_rate": 5.790389546967601e-05, + "loss": 1.5348, + "step": 9368 + }, + { + "epoch": 0.8500272182906914, + "grad_norm": 0.10974150792339896, + "learning_rate": 5.783528154944029e-05, + "loss": 1.5443, + "step": 9369 + }, + { + "epoch": 0.8501179459263292, + "grad_norm": 0.1030743102095007, + "learning_rate": 5.776670581019328e-05, + "loss": 1.5133, + "step": 9370 + }, + { + "epoch": 0.8502086735619669, + "grad_norm": 0.10636445560144875, + "learning_rate": 5.7698168257856455e-05, + "loss": 1.5377, + "step": 9371 + }, + { + "epoch": 0.8502994011976048, + "grad_norm": 0.10713424027152199, + "learning_rate": 5.762966889834792e-05, + "loss": 1.5362, + "step": 9372 + }, + { + "epoch": 0.8503901288332426, + "grad_norm": 0.10374630985831557, + "learning_rate": 5.75612077375825e-05, + "loss": 1.4918, + "step": 9373 + }, + { + "epoch": 0.8504808564688804, + "grad_norm": 0.1112804823969929, + "learning_rate": 5.749278478147224e-05, + "loss": 1.5165, + "step": 9374 + }, + { + "epoch": 0.8505715841045183, + "grad_norm": 0.10769352059451814, + "learning_rate": 5.7424400035925193e-05, + "loss": 1.5181, + "step": 9375 + }, + { + "epoch": 0.850662311740156, + "grad_norm": 0.10398924492258105, + "learning_rate": 5.7356053506846304e-05, + "loss": 1.5147, + "step": 9376 + }, + { + "epoch": 0.8507530393757938, + "grad_norm": 0.10278336334872026, + "learning_rate": 5.7287745200137575e-05, + "loss": 1.5664, + "step": 9377 + }, + { + "epoch": 0.8508437670114317, + "grad_norm": 0.1038506922691918, + "learning_rate": 5.7219475121697284e-05, + "loss": 1.52, + "step": 9378 + }, + { + "epoch": 0.8509344946470695, + "grad_norm": 0.1075619845717592, + "learning_rate": 5.7151243277420495e-05, + "loss": 1.5137, + "step": 9379 + }, + { + "epoch": 0.8510252222827073, + "grad_norm": 0.10497920864025202, + "learning_rate": 5.708304967319922e-05, + "loss": 1.4751, + "step": 9380 + }, + { + "epoch": 0.8511159499183452, + "grad_norm": 0.10345725661909837, + "learning_rate": 5.701489431492174e-05, + "loss": 1.5387, + "step": 9381 + }, + { + "epoch": 0.8512066775539829, + "grad_norm": 0.10687949241326669, + "learning_rate": 5.694677720847358e-05, + "loss": 1.5641, + "step": 9382 + }, + { + "epoch": 0.8512974051896207, + "grad_norm": 0.1051250898402402, + "learning_rate": 5.687869835973636e-05, + "loss": 1.5428, + "step": 9383 + }, + { + "epoch": 0.8513881328252586, + "grad_norm": 0.10343779834288191, + "learning_rate": 5.6810657774588995e-05, + "loss": 1.5839, + "step": 9384 + }, + { + "epoch": 0.8514788604608964, + "grad_norm": 0.10936873560207389, + "learning_rate": 5.674265545890661e-05, + "loss": 1.5223, + "step": 9385 + }, + { + "epoch": 0.8515695880965342, + "grad_norm": 0.10448964325662484, + "learning_rate": 5.6674691418561185e-05, + "loss": 1.5647, + "step": 9386 + }, + { + "epoch": 0.851660315732172, + "grad_norm": 0.10397355158596493, + "learning_rate": 5.660676565942158e-05, + "loss": 1.5239, + "step": 9387 + }, + { + "epoch": 0.8517510433678098, + "grad_norm": 0.10087885536201616, + "learning_rate": 5.653887818735309e-05, + "loss": 1.5912, + "step": 9388 + }, + { + "epoch": 0.8518417710034476, + "grad_norm": 0.10369616240495944, + "learning_rate": 5.6471029008217766e-05, + "loss": 1.5368, + "step": 9389 + }, + { + "epoch": 0.8519324986390855, + "grad_norm": 0.10507432900127936, + "learning_rate": 5.6403218127874414e-05, + "loss": 1.5528, + "step": 9390 + }, + { + "epoch": 0.8520232262747233, + "grad_norm": 0.10587798340842297, + "learning_rate": 5.633544555217873e-05, + "loss": 1.5651, + "step": 9391 + }, + { + "epoch": 0.852113953910361, + "grad_norm": 0.10563394026727642, + "learning_rate": 5.626771128698266e-05, + "loss": 1.4968, + "step": 9392 + }, + { + "epoch": 0.8522046815459989, + "grad_norm": 0.10499876142515442, + "learning_rate": 5.6200015338135016e-05, + "loss": 1.5609, + "step": 9393 + }, + { + "epoch": 0.8522954091816367, + "grad_norm": 0.10786990530768033, + "learning_rate": 5.613235771148156e-05, + "loss": 1.5462, + "step": 9394 + }, + { + "epoch": 0.8523861368172745, + "grad_norm": 0.10493432907534361, + "learning_rate": 5.6064738412864414e-05, + "loss": 1.4861, + "step": 9395 + }, + { + "epoch": 0.8524768644529124, + "grad_norm": 0.10676848938520665, + "learning_rate": 5.599715744812245e-05, + "loss": 1.5512, + "step": 9396 + }, + { + "epoch": 0.8525675920885502, + "grad_norm": 0.1034801067903075, + "learning_rate": 5.592961482309139e-05, + "loss": 1.5513, + "step": 9397 + }, + { + "epoch": 0.852658319724188, + "grad_norm": 0.10674951583487308, + "learning_rate": 5.5862110543603684e-05, + "loss": 1.485, + "step": 9398 + }, + { + "epoch": 0.8527490473598258, + "grad_norm": 0.10445808150845026, + "learning_rate": 5.579464461548811e-05, + "loss": 1.5443, + "step": 9399 + }, + { + "epoch": 0.8528397749954636, + "grad_norm": 0.10233646295836148, + "learning_rate": 5.5727217044570396e-05, + "loss": 1.4954, + "step": 9400 + }, + { + "epoch": 0.8529305026311015, + "grad_norm": 0.10736053020817211, + "learning_rate": 5.565982783667306e-05, + "loss": 1.531, + "step": 9401 + }, + { + "epoch": 0.8530212302667393, + "grad_norm": 0.10396663011909482, + "learning_rate": 5.559247699761505e-05, + "loss": 1.5445, + "step": 9402 + }, + { + "epoch": 0.853111957902377, + "grad_norm": 0.10471499815653343, + "learning_rate": 5.55251645332121e-05, + "loss": 1.5532, + "step": 9403 + }, + { + "epoch": 0.8532026855380149, + "grad_norm": 0.10688083355550976, + "learning_rate": 5.545789044927668e-05, + "loss": 1.4965, + "step": 9404 + }, + { + "epoch": 0.8532934131736527, + "grad_norm": 0.10524337717754095, + "learning_rate": 5.539065475161803e-05, + "loss": 1.5172, + "step": 9405 + }, + { + "epoch": 0.8533841408092905, + "grad_norm": 0.10887776349460104, + "learning_rate": 5.532345744604183e-05, + "loss": 1.5781, + "step": 9406 + }, + { + "epoch": 0.8534748684449284, + "grad_norm": 0.10493273448169527, + "learning_rate": 5.525629853835057e-05, + "loss": 1.5479, + "step": 9407 + }, + { + "epoch": 0.8535655960805661, + "grad_norm": 0.10514156629669318, + "learning_rate": 5.518917803434359e-05, + "loss": 1.526, + "step": 9408 + }, + { + "epoch": 0.8536563237162039, + "grad_norm": 0.10503289255472541, + "learning_rate": 5.512209593981665e-05, + "loss": 1.5485, + "step": 9409 + }, + { + "epoch": 0.8537470513518418, + "grad_norm": 0.10418210773501722, + "learning_rate": 5.505505226056212e-05, + "loss": 1.5232, + "step": 9410 + }, + { + "epoch": 0.8538377789874796, + "grad_norm": 0.1082530461861129, + "learning_rate": 5.498804700236959e-05, + "loss": 1.5614, + "step": 9411 + }, + { + "epoch": 0.8539285066231174, + "grad_norm": 0.11370227828705913, + "learning_rate": 5.492108017102465e-05, + "loss": 1.5449, + "step": 9412 + }, + { + "epoch": 0.8540192342587553, + "grad_norm": 0.10767910718582756, + "learning_rate": 5.4854151772310127e-05, + "loss": 1.5203, + "step": 9413 + }, + { + "epoch": 0.854109961894393, + "grad_norm": 0.11175141582588623, + "learning_rate": 5.478726181200511e-05, + "loss": 1.5275, + "step": 9414 + }, + { + "epoch": 0.8542006895300308, + "grad_norm": 0.10268140175800364, + "learning_rate": 5.472041029588576e-05, + "loss": 1.5042, + "step": 9415 + }, + { + "epoch": 0.8542914171656687, + "grad_norm": 0.10172223612675865, + "learning_rate": 5.4653597229724625e-05, + "loss": 1.5803, + "step": 9416 + }, + { + "epoch": 0.8543821448013065, + "grad_norm": 0.1158952551361984, + "learning_rate": 5.458682261929088e-05, + "loss": 1.5034, + "step": 9417 + }, + { + "epoch": 0.8544728724369443, + "grad_norm": 0.10454874072636791, + "learning_rate": 5.4520086470350784e-05, + "loss": 1.5331, + "step": 9418 + }, + { + "epoch": 0.8545636000725821, + "grad_norm": 0.10444994322495833, + "learning_rate": 5.4453388788666844e-05, + "loss": 1.5727, + "step": 9419 + }, + { + "epoch": 0.8546543277082199, + "grad_norm": 0.10814238694313545, + "learning_rate": 5.438672957999835e-05, + "loss": 1.611, + "step": 9420 + }, + { + "epoch": 0.8547450553438577, + "grad_norm": 0.10460121331616389, + "learning_rate": 5.432010885010147e-05, + "loss": 1.5398, + "step": 9421 + }, + { + "epoch": 0.8548357829794956, + "grad_norm": 0.10598381340691478, + "learning_rate": 5.425352660472893e-05, + "loss": 1.5412, + "step": 9422 + }, + { + "epoch": 0.8549265106151334, + "grad_norm": 0.10267127821989606, + "learning_rate": 5.4186982849630085e-05, + "loss": 1.4903, + "step": 9423 + }, + { + "epoch": 0.8550172382507711, + "grad_norm": 0.10148782101922921, + "learning_rate": 5.4120477590550885e-05, + "loss": 1.5301, + "step": 9424 + }, + { + "epoch": 0.855107965886409, + "grad_norm": 0.10919913411037267, + "learning_rate": 5.405401083323425e-05, + "loss": 1.5646, + "step": 9425 + }, + { + "epoch": 0.8551986935220468, + "grad_norm": 0.10554340775906876, + "learning_rate": 5.398758258341951e-05, + "loss": 1.5062, + "step": 9426 + }, + { + "epoch": 0.8552894211576846, + "grad_norm": 0.10755158115283873, + "learning_rate": 5.392119284684266e-05, + "loss": 1.5302, + "step": 9427 + }, + { + "epoch": 0.8553801487933225, + "grad_norm": 0.10530327022321197, + "learning_rate": 5.385484162923654e-05, + "loss": 1.4966, + "step": 9428 + }, + { + "epoch": 0.8554708764289602, + "grad_norm": 0.10180755034387406, + "learning_rate": 5.378852893633068e-05, + "loss": 1.5148, + "step": 9429 + }, + { + "epoch": 0.855561604064598, + "grad_norm": 0.1067432180877517, + "learning_rate": 5.372225477385112e-05, + "loss": 1.5579, + "step": 9430 + }, + { + "epoch": 0.8556523317002359, + "grad_norm": 0.10334156072557514, + "learning_rate": 5.3656019147520554e-05, + "loss": 1.5234, + "step": 9431 + }, + { + "epoch": 0.8557430593358737, + "grad_norm": 0.10403952645295207, + "learning_rate": 5.358982206305862e-05, + "loss": 1.5098, + "step": 9432 + }, + { + "epoch": 0.8558337869715115, + "grad_norm": 0.10800568064635624, + "learning_rate": 5.352366352618132e-05, + "loss": 1.5106, + "step": 9433 + }, + { + "epoch": 0.8559245146071494, + "grad_norm": 0.10653727232934561, + "learning_rate": 5.3457543542601404e-05, + "loss": 1.5371, + "step": 9434 + }, + { + "epoch": 0.8560152422427871, + "grad_norm": 0.10467949165318088, + "learning_rate": 5.3391462118028533e-05, + "loss": 1.5302, + "step": 9435 + }, + { + "epoch": 0.856105969878425, + "grad_norm": 0.10329244239812452, + "learning_rate": 5.332541925816858e-05, + "loss": 1.5376, + "step": 9436 + }, + { + "epoch": 0.8561966975140628, + "grad_norm": 0.10286143659908585, + "learning_rate": 5.3259414968724665e-05, + "loss": 1.5242, + "step": 9437 + }, + { + "epoch": 0.8562874251497006, + "grad_norm": 0.10510428585092045, + "learning_rate": 5.3193449255395984e-05, + "loss": 1.5458, + "step": 9438 + }, + { + "epoch": 0.8563781527853385, + "grad_norm": 0.1034746563565824, + "learning_rate": 5.312752212387889e-05, + "loss": 1.5046, + "step": 9439 + }, + { + "epoch": 0.8564688804209762, + "grad_norm": 0.10387018659684366, + "learning_rate": 5.3061633579866156e-05, + "loss": 1.5149, + "step": 9440 + }, + { + "epoch": 0.856559608056614, + "grad_norm": 0.10670391828081778, + "learning_rate": 5.299578362904711e-05, + "loss": 1.5355, + "step": 9441 + }, + { + "epoch": 0.8566503356922519, + "grad_norm": 0.10630335158493123, + "learning_rate": 5.292997227710811e-05, + "loss": 1.5197, + "step": 9442 + }, + { + "epoch": 0.8567410633278897, + "grad_norm": 0.10810732156830735, + "learning_rate": 5.2864199529731815e-05, + "loss": 1.5315, + "step": 9443 + }, + { + "epoch": 0.8568317909635275, + "grad_norm": 0.10995031382821091, + "learning_rate": 5.27984653925978e-05, + "loss": 1.5835, + "step": 9444 + }, + { + "epoch": 0.8569225185991654, + "grad_norm": 0.10562941735450997, + "learning_rate": 5.273276987138231e-05, + "loss": 1.5341, + "step": 9445 + }, + { + "epoch": 0.8570132462348031, + "grad_norm": 0.10352901573937753, + "learning_rate": 5.2667112971758077e-05, + "loss": 1.4928, + "step": 9446 + }, + { + "epoch": 0.8571039738704409, + "grad_norm": 0.1044113448006792, + "learning_rate": 5.260149469939446e-05, + "loss": 1.558, + "step": 9447 + }, + { + "epoch": 0.8571947015060788, + "grad_norm": 0.10360621435254255, + "learning_rate": 5.2535915059957804e-05, + "loss": 1.5505, + "step": 9448 + }, + { + "epoch": 0.8572854291417166, + "grad_norm": 0.10156827939068878, + "learning_rate": 5.247037405911081e-05, + "loss": 1.5395, + "step": 9449 + }, + { + "epoch": 0.8573761567773543, + "grad_norm": 0.106453907508119, + "learning_rate": 5.240487170251301e-05, + "loss": 1.5473, + "step": 9450 + }, + { + "epoch": 0.8574668844129922, + "grad_norm": 0.10741124633207594, + "learning_rate": 5.2339407995820324e-05, + "loss": 1.5208, + "step": 9451 + }, + { + "epoch": 0.85755761204863, + "grad_norm": 0.10562034940363918, + "learning_rate": 5.2273982944685884e-05, + "loss": 1.5628, + "step": 9452 + }, + { + "epoch": 0.8576483396842678, + "grad_norm": 0.11023076888482906, + "learning_rate": 5.220859655475907e-05, + "loss": 1.5614, + "step": 9453 + }, + { + "epoch": 0.8577390673199057, + "grad_norm": 0.10527892134756894, + "learning_rate": 5.2143248831685805e-05, + "loss": 1.5563, + "step": 9454 + }, + { + "epoch": 0.8578297949555435, + "grad_norm": 0.10792752086170847, + "learning_rate": 5.207793978110908e-05, + "loss": 1.5373, + "step": 9455 + }, + { + "epoch": 0.8579205225911812, + "grad_norm": 0.10465628036886157, + "learning_rate": 5.201266940866833e-05, + "loss": 1.5194, + "step": 9456 + }, + { + "epoch": 0.8580112502268191, + "grad_norm": 0.10697550133922355, + "learning_rate": 5.1947437719999436e-05, + "loss": 1.4968, + "step": 9457 + }, + { + "epoch": 0.8581019778624569, + "grad_norm": 0.10155064729088939, + "learning_rate": 5.188224472073549e-05, + "loss": 1.5072, + "step": 9458 + }, + { + "epoch": 0.8581927054980947, + "grad_norm": 0.10894307902277403, + "learning_rate": 5.181709041650562e-05, + "loss": 1.5393, + "step": 9459 + }, + { + "epoch": 0.8582834331337326, + "grad_norm": 0.1050900492527884, + "learning_rate": 5.1751974812936196e-05, + "loss": 1.5147, + "step": 9460 + }, + { + "epoch": 0.8583741607693703, + "grad_norm": 0.10990877871088256, + "learning_rate": 5.168689791564968e-05, + "loss": 1.5287, + "step": 9461 + }, + { + "epoch": 0.8584648884050081, + "grad_norm": 0.10844883480087865, + "learning_rate": 5.162185973026568e-05, + "loss": 1.5368, + "step": 9462 + }, + { + "epoch": 0.858555616040646, + "grad_norm": 0.10679511272603515, + "learning_rate": 5.1556860262400194e-05, + "loss": 1.538, + "step": 9463 + }, + { + "epoch": 0.8586463436762838, + "grad_norm": 0.10391839878586963, + "learning_rate": 5.149189951766581e-05, + "loss": 1.5776, + "step": 9464 + }, + { + "epoch": 0.8587370713119216, + "grad_norm": 0.10898533613110381, + "learning_rate": 5.1426977501672134e-05, + "loss": 1.6056, + "step": 9465 + }, + { + "epoch": 0.8588277989475595, + "grad_norm": 0.1050013586944666, + "learning_rate": 5.136209422002508e-05, + "loss": 1.5215, + "step": 9466 + }, + { + "epoch": 0.8589185265831972, + "grad_norm": 0.1050760454726116, + "learning_rate": 5.129724967832716e-05, + "loss": 1.5346, + "step": 9467 + }, + { + "epoch": 0.859009254218835, + "grad_norm": 0.10216374760882144, + "learning_rate": 5.12324438821779e-05, + "loss": 1.5146, + "step": 9468 + }, + { + "epoch": 0.8590999818544729, + "grad_norm": 0.10821075515563446, + "learning_rate": 5.116767683717333e-05, + "loss": 1.5137, + "step": 9469 + }, + { + "epoch": 0.8591907094901107, + "grad_norm": 0.10409155884382852, + "learning_rate": 5.110294854890601e-05, + "loss": 1.5369, + "step": 9470 + }, + { + "epoch": 0.8592814371257484, + "grad_norm": 0.10407033973770323, + "learning_rate": 5.103825902296516e-05, + "loss": 1.5367, + "step": 9471 + }, + { + "epoch": 0.8593721647613863, + "grad_norm": 0.10824034493971901, + "learning_rate": 5.097360826493691e-05, + "loss": 1.5446, + "step": 9472 + }, + { + "epoch": 0.8594628923970241, + "grad_norm": 0.10425819616460263, + "learning_rate": 5.0908996280403744e-05, + "loss": 1.5256, + "step": 9473 + }, + { + "epoch": 0.859553620032662, + "grad_norm": 0.10946958008812878, + "learning_rate": 5.084442307494486e-05, + "loss": 1.5333, + "step": 9474 + }, + { + "epoch": 0.8596443476682998, + "grad_norm": 0.10697645219203553, + "learning_rate": 5.077988865413624e-05, + "loss": 1.5182, + "step": 9475 + }, + { + "epoch": 0.8597350753039376, + "grad_norm": 0.10474057256051444, + "learning_rate": 5.071539302355055e-05, + "loss": 1.5143, + "step": 9476 + }, + { + "epoch": 0.8598258029395754, + "grad_norm": 0.10866690589717848, + "learning_rate": 5.065093618875688e-05, + "loss": 1.5642, + "step": 9477 + }, + { + "epoch": 0.8599165305752132, + "grad_norm": 0.10704480403165088, + "learning_rate": 5.0586518155321004e-05, + "loss": 1.542, + "step": 9478 + }, + { + "epoch": 0.860007258210851, + "grad_norm": 0.1111403910131448, + "learning_rate": 5.0522138928805636e-05, + "loss": 1.5198, + "step": 9479 + }, + { + "epoch": 0.8600979858464889, + "grad_norm": 0.10408556584730269, + "learning_rate": 5.045779851476984e-05, + "loss": 1.5406, + "step": 9480 + }, + { + "epoch": 0.8601887134821267, + "grad_norm": 0.11157494297230462, + "learning_rate": 5.039349691876921e-05, + "loss": 1.5281, + "step": 9481 + }, + { + "epoch": 0.8602794411177644, + "grad_norm": 0.10301615102635253, + "learning_rate": 5.0329234146356484e-05, + "loss": 1.5514, + "step": 9482 + }, + { + "epoch": 0.8603701687534023, + "grad_norm": 0.10540070963991259, + "learning_rate": 5.026501020308072e-05, + "loss": 1.5195, + "step": 9483 + }, + { + "epoch": 0.8604608963890401, + "grad_norm": 0.10779409829797419, + "learning_rate": 5.020082509448759e-05, + "loss": 1.5238, + "step": 9484 + }, + { + "epoch": 0.8605516240246779, + "grad_norm": 0.1063725699948753, + "learning_rate": 5.013667882611944e-05, + "loss": 1.5517, + "step": 9485 + }, + { + "epoch": 0.8606423516603158, + "grad_norm": 0.1056800679374919, + "learning_rate": 5.007257140351546e-05, + "loss": 1.5288, + "step": 9486 + }, + { + "epoch": 0.8607330792959536, + "grad_norm": 0.11040496659383814, + "learning_rate": 5.00085028322112e-05, + "loss": 1.5427, + "step": 9487 + }, + { + "epoch": 0.8608238069315913, + "grad_norm": 0.10505564699704777, + "learning_rate": 4.994447311773903e-05, + "loss": 1.5278, + "step": 9488 + }, + { + "epoch": 0.8609145345672292, + "grad_norm": 0.10493963842029574, + "learning_rate": 4.988048226562797e-05, + "loss": 1.5355, + "step": 9489 + }, + { + "epoch": 0.861005262202867, + "grad_norm": 0.10600250633551056, + "learning_rate": 4.981653028140354e-05, + "loss": 1.5406, + "step": 9490 + }, + { + "epoch": 0.8610959898385048, + "grad_norm": 0.10563090859743077, + "learning_rate": 4.975261717058821e-05, + "loss": 1.5266, + "step": 9491 + }, + { + "epoch": 0.8611867174741427, + "grad_norm": 0.11044918497939199, + "learning_rate": 4.968874293870057e-05, + "loss": 1.539, + "step": 9492 + }, + { + "epoch": 0.8612774451097804, + "grad_norm": 0.10883412699631155, + "learning_rate": 4.962490759125649e-05, + "loss": 1.5724, + "step": 9493 + }, + { + "epoch": 0.8613681727454182, + "grad_norm": 0.10409579278045733, + "learning_rate": 4.956111113376799e-05, + "loss": 1.5435, + "step": 9494 + }, + { + "epoch": 0.8614589003810561, + "grad_norm": 0.10655142926617285, + "learning_rate": 4.9497353571743895e-05, + "loss": 1.5061, + "step": 9495 + }, + { + "epoch": 0.8615496280166939, + "grad_norm": 0.10370793926204909, + "learning_rate": 4.943363491068975e-05, + "loss": 1.5109, + "step": 9496 + }, + { + "epoch": 0.8616403556523317, + "grad_norm": 0.10491753145761286, + "learning_rate": 4.936995515610765e-05, + "loss": 1.5103, + "step": 9497 + }, + { + "epoch": 0.8617310832879695, + "grad_norm": 0.10598179575721742, + "learning_rate": 4.930631431349625e-05, + "loss": 1.5123, + "step": 9498 + }, + { + "epoch": 0.8618218109236073, + "grad_norm": 0.10257331065569794, + "learning_rate": 4.924271238835104e-05, + "loss": 1.5267, + "step": 9499 + }, + { + "epoch": 0.8619125385592451, + "grad_norm": 0.10864281324314479, + "learning_rate": 4.9179149386164136e-05, + "loss": 1.5433, + "step": 9500 + }, + { + "epoch": 0.862003266194883, + "grad_norm": 0.10449534610588557, + "learning_rate": 4.911562531242414e-05, + "loss": 1.5861, + "step": 9501 + }, + { + "epoch": 0.8620939938305208, + "grad_norm": 0.1037641424239496, + "learning_rate": 4.9052140172616275e-05, + "loss": 1.5501, + "step": 9502 + }, + { + "epoch": 0.8621847214661585, + "grad_norm": 0.10602207868904619, + "learning_rate": 4.898869397222261e-05, + "loss": 1.5389, + "step": 9503 + }, + { + "epoch": 0.8622754491017964, + "grad_norm": 0.10716752761696714, + "learning_rate": 4.89252867167217e-05, + "loss": 1.5138, + "step": 9504 + }, + { + "epoch": 0.8623661767374342, + "grad_norm": 0.10476895666646585, + "learning_rate": 4.8861918411588715e-05, + "loss": 1.5835, + "step": 9505 + }, + { + "epoch": 0.862456904373072, + "grad_norm": 0.1089008908239614, + "learning_rate": 4.879858906229556e-05, + "loss": 1.5594, + "step": 9506 + }, + { + "epoch": 0.8625476320087099, + "grad_norm": 0.106747633744684, + "learning_rate": 4.8735298674310815e-05, + "loss": 1.5663, + "step": 9507 + }, + { + "epoch": 0.8626383596443477, + "grad_norm": 0.10441566539336583, + "learning_rate": 4.8672047253099536e-05, + "loss": 1.5374, + "step": 9508 + }, + { + "epoch": 0.8627290872799854, + "grad_norm": 0.10697212102937767, + "learning_rate": 4.860883480412337e-05, + "loss": 1.5558, + "step": 9509 + }, + { + "epoch": 0.8628198149156233, + "grad_norm": 0.10417313742258648, + "learning_rate": 4.8545661332841e-05, + "loss": 1.5344, + "step": 9510 + }, + { + "epoch": 0.8629105425512611, + "grad_norm": 0.1082312546193667, + "learning_rate": 4.848252684470727e-05, + "loss": 1.6153, + "step": 9511 + }, + { + "epoch": 0.863001270186899, + "grad_norm": 0.10493275597659826, + "learning_rate": 4.841943134517379e-05, + "loss": 1.5466, + "step": 9512 + }, + { + "epoch": 0.8630919978225368, + "grad_norm": 0.10550975553612452, + "learning_rate": 4.835637483968913e-05, + "loss": 1.5604, + "step": 9513 + }, + { + "epoch": 0.8631827254581745, + "grad_norm": 0.10819177211089853, + "learning_rate": 4.829335733369794e-05, + "loss": 1.5264, + "step": 9514 + }, + { + "epoch": 0.8632734530938124, + "grad_norm": 0.10547865891158262, + "learning_rate": 4.823037883264198e-05, + "loss": 1.5226, + "step": 9515 + }, + { + "epoch": 0.8633641807294502, + "grad_norm": 0.10372551378772009, + "learning_rate": 4.8167439341959326e-05, + "loss": 1.5144, + "step": 9516 + }, + { + "epoch": 0.863454908365088, + "grad_norm": 0.10183500949909258, + "learning_rate": 4.810453886708499e-05, + "loss": 1.5265, + "step": 9517 + }, + { + "epoch": 0.8635456360007259, + "grad_norm": 0.10592032870658545, + "learning_rate": 4.804167741345028e-05, + "loss": 1.5399, + "step": 9518 + }, + { + "epoch": 0.8636363636363636, + "grad_norm": 0.10638286439556614, + "learning_rate": 4.79788549864833e-05, + "loss": 1.5091, + "step": 9519 + }, + { + "epoch": 0.8637270912720014, + "grad_norm": 0.10677525981523242, + "learning_rate": 4.7916071591608875e-05, + "loss": 1.5677, + "step": 9520 + }, + { + "epoch": 0.8638178189076393, + "grad_norm": 0.1086871807650894, + "learning_rate": 4.785332723424818e-05, + "loss": 1.5283, + "step": 9521 + }, + { + "epoch": 0.8639085465432771, + "grad_norm": 0.10616356392820414, + "learning_rate": 4.779062191981936e-05, + "loss": 1.4892, + "step": 9522 + }, + { + "epoch": 0.8639992741789149, + "grad_norm": 0.10761429339217755, + "learning_rate": 4.772795565373705e-05, + "loss": 1.5571, + "step": 9523 + }, + { + "epoch": 0.8640900018145528, + "grad_norm": 0.10610032743715722, + "learning_rate": 4.766532844141241e-05, + "loss": 1.5266, + "step": 9524 + }, + { + "epoch": 0.8641807294501905, + "grad_norm": 0.10452443955381348, + "learning_rate": 4.760274028825329e-05, + "loss": 1.5268, + "step": 9525 + }, + { + "epoch": 0.8642714570858283, + "grad_norm": 0.10492148509477142, + "learning_rate": 4.75401911996643e-05, + "loss": 1.526, + "step": 9526 + }, + { + "epoch": 0.8643621847214662, + "grad_norm": 0.1051275925793052, + "learning_rate": 4.747768118104645e-05, + "loss": 1.5261, + "step": 9527 + }, + { + "epoch": 0.864452912357104, + "grad_norm": 0.10282966214336842, + "learning_rate": 4.741521023779755e-05, + "loss": 1.5342, + "step": 9528 + }, + { + "epoch": 0.8645436399927418, + "grad_norm": 0.11184826653703325, + "learning_rate": 4.7352778375311766e-05, + "loss": 1.5516, + "step": 9529 + }, + { + "epoch": 0.8646343676283796, + "grad_norm": 0.10371705743131998, + "learning_rate": 4.729038559898047e-05, + "loss": 1.5566, + "step": 9530 + }, + { + "epoch": 0.8647250952640174, + "grad_norm": 0.10494514202578753, + "learning_rate": 4.7228031914191114e-05, + "loss": 1.5731, + "step": 9531 + }, + { + "epoch": 0.8648158228996552, + "grad_norm": 0.1048486596323347, + "learning_rate": 4.716571732632779e-05, + "loss": 1.5671, + "step": 9532 + }, + { + "epoch": 0.8649065505352931, + "grad_norm": 0.10888613282262598, + "learning_rate": 4.7103441840771645e-05, + "loss": 1.5258, + "step": 9533 + }, + { + "epoch": 0.8649972781709309, + "grad_norm": 0.108105738662617, + "learning_rate": 4.7041205462900083e-05, + "loss": 1.5861, + "step": 9534 + }, + { + "epoch": 0.8650880058065686, + "grad_norm": 0.10041241833971185, + "learning_rate": 4.697900819808704e-05, + "loss": 1.5194, + "step": 9535 + }, + { + "epoch": 0.8651787334422065, + "grad_norm": 0.10535888779761872, + "learning_rate": 4.691685005170354e-05, + "loss": 1.5585, + "step": 9536 + }, + { + "epoch": 0.8652694610778443, + "grad_norm": 0.1067450484563665, + "learning_rate": 4.685473102911669e-05, + "loss": 1.5539, + "step": 9537 + }, + { + "epoch": 0.8653601887134821, + "grad_norm": 0.1091442312236847, + "learning_rate": 4.67926511356907e-05, + "loss": 1.5418, + "step": 9538 + }, + { + "epoch": 0.86545091634912, + "grad_norm": 0.10633060563682845, + "learning_rate": 4.6730610376785995e-05, + "loss": 1.5225, + "step": 9539 + }, + { + "epoch": 0.8655416439847577, + "grad_norm": 0.10624570288375065, + "learning_rate": 4.666860875775997e-05, + "loss": 1.4786, + "step": 9540 + }, + { + "epoch": 0.8656323716203955, + "grad_norm": 0.10774410553934442, + "learning_rate": 4.660664628396638e-05, + "loss": 1.5425, + "step": 9541 + }, + { + "epoch": 0.8657230992560334, + "grad_norm": 0.10357552207137331, + "learning_rate": 4.654472296075568e-05, + "loss": 1.5484, + "step": 9542 + }, + { + "epoch": 0.8658138268916712, + "grad_norm": 0.10586488829611182, + "learning_rate": 4.648283879347503e-05, + "loss": 1.5871, + "step": 9543 + }, + { + "epoch": 0.865904554527309, + "grad_norm": 0.10405202259371554, + "learning_rate": 4.6420993787468055e-05, + "loss": 1.5604, + "step": 9544 + }, + { + "epoch": 0.8659952821629469, + "grad_norm": 0.10503075011299934, + "learning_rate": 4.635918794807509e-05, + "loss": 1.5579, + "step": 9545 + }, + { + "epoch": 0.8660860097985846, + "grad_norm": 0.1064363527159357, + "learning_rate": 4.629742128063308e-05, + "loss": 1.5127, + "step": 9546 + }, + { + "epoch": 0.8661767374342224, + "grad_norm": 0.10429370736129968, + "learning_rate": 4.6235693790475706e-05, + "loss": 1.5483, + "step": 9547 + }, + { + "epoch": 0.8662674650698603, + "grad_norm": 0.10832863432124779, + "learning_rate": 4.6174005482933046e-05, + "loss": 1.4862, + "step": 9548 + }, + { + "epoch": 0.8663581927054981, + "grad_norm": 0.10597799397751671, + "learning_rate": 4.611235636333183e-05, + "loss": 1.5252, + "step": 9549 + }, + { + "epoch": 0.866448920341136, + "grad_norm": 0.10661827184481047, + "learning_rate": 4.605074643699558e-05, + "loss": 1.516, + "step": 9550 + }, + { + "epoch": 0.8665396479767737, + "grad_norm": 0.10487966868707597, + "learning_rate": 4.5989175709244324e-05, + "loss": 1.5871, + "step": 9551 + }, + { + "epoch": 0.8666303756124115, + "grad_norm": 0.10675112685017811, + "learning_rate": 4.592764418539458e-05, + "loss": 1.5582, + "step": 9552 + }, + { + "epoch": 0.8667211032480494, + "grad_norm": 0.10811553839444461, + "learning_rate": 4.586615187075965e-05, + "loss": 1.5395, + "step": 9553 + }, + { + "epoch": 0.8668118308836872, + "grad_norm": 0.10436631390160231, + "learning_rate": 4.580469877064952e-05, + "loss": 1.4937, + "step": 9554 + }, + { + "epoch": 0.866902558519325, + "grad_norm": 0.10361925381994598, + "learning_rate": 4.5743284890370604e-05, + "loss": 1.5348, + "step": 9555 + }, + { + "epoch": 0.8669932861549628, + "grad_norm": 0.10522177645718221, + "learning_rate": 4.568191023522594e-05, + "loss": 1.5725, + "step": 9556 + }, + { + "epoch": 0.8670840137906006, + "grad_norm": 0.10667588796112037, + "learning_rate": 4.562057481051535e-05, + "loss": 1.5376, + "step": 9557 + }, + { + "epoch": 0.8671747414262384, + "grad_norm": 0.10209885245705495, + "learning_rate": 4.555927862153508e-05, + "loss": 1.5425, + "step": 9558 + }, + { + "epoch": 0.8672654690618763, + "grad_norm": 0.10437169485574353, + "learning_rate": 4.549802167357797e-05, + "loss": 1.5693, + "step": 9559 + }, + { + "epoch": 0.8673561966975141, + "grad_norm": 0.1061264093974504, + "learning_rate": 4.543680397193373e-05, + "loss": 1.5056, + "step": 9560 + }, + { + "epoch": 0.8674469243331518, + "grad_norm": 0.10527436072539369, + "learning_rate": 4.537562552188851e-05, + "loss": 1.5369, + "step": 9561 + }, + { + "epoch": 0.8675376519687897, + "grad_norm": 0.1061100811835175, + "learning_rate": 4.531448632872503e-05, + "loss": 1.4939, + "step": 9562 + }, + { + "epoch": 0.8676283796044275, + "grad_norm": 0.10624500721042213, + "learning_rate": 4.5253386397722583e-05, + "loss": 1.5373, + "step": 9563 + }, + { + "epoch": 0.8677191072400653, + "grad_norm": 0.1055191181093467, + "learning_rate": 4.5192325734157383e-05, + "loss": 1.4913, + "step": 9564 + }, + { + "epoch": 0.8678098348757032, + "grad_norm": 0.10482867336515674, + "learning_rate": 4.5131304343301813e-05, + "loss": 1.553, + "step": 9565 + }, + { + "epoch": 0.867900562511341, + "grad_norm": 0.10522530125295715, + "learning_rate": 4.507032223042512e-05, + "loss": 1.4956, + "step": 9566 + }, + { + "epoch": 0.8679912901469787, + "grad_norm": 0.10512532036814536, + "learning_rate": 4.5009379400793194e-05, + "loss": 1.5404, + "step": 9567 + }, + { + "epoch": 0.8680820177826166, + "grad_norm": 0.10453834356426261, + "learning_rate": 4.494847585966838e-05, + "loss": 1.5436, + "step": 9568 + }, + { + "epoch": 0.8681727454182544, + "grad_norm": 0.10176585470125828, + "learning_rate": 4.488761161230981e-05, + "loss": 1.5424, + "step": 9569 + }, + { + "epoch": 0.8682634730538922, + "grad_norm": 0.10485284041037554, + "learning_rate": 4.482678666397294e-05, + "loss": 1.5561, + "step": 9570 + }, + { + "epoch": 0.8683542006895301, + "grad_norm": 0.1021272767634867, + "learning_rate": 4.476600101991024e-05, + "loss": 1.4975, + "step": 9571 + }, + { + "epoch": 0.8684449283251678, + "grad_norm": 0.10667131466183767, + "learning_rate": 4.4705254685370456e-05, + "loss": 1.5358, + "step": 9572 + }, + { + "epoch": 0.8685356559608056, + "grad_norm": 0.10744800843707829, + "learning_rate": 4.464454766559894e-05, + "loss": 1.5112, + "step": 9573 + }, + { + "epoch": 0.8686263835964435, + "grad_norm": 0.10431053770946865, + "learning_rate": 4.4583879965837905e-05, + "loss": 1.5118, + "step": 9574 + }, + { + "epoch": 0.8687171112320813, + "grad_norm": 0.10684569683721828, + "learning_rate": 4.452325159132597e-05, + "loss": 1.5157, + "step": 9575 + }, + { + "epoch": 0.8688078388677191, + "grad_norm": 0.10773045162401801, + "learning_rate": 4.4462662547298304e-05, + "loss": 1.5085, + "step": 9576 + }, + { + "epoch": 0.868898566503357, + "grad_norm": 0.1061236764280564, + "learning_rate": 4.440211283898687e-05, + "loss": 1.5858, + "step": 9577 + }, + { + "epoch": 0.8689892941389947, + "grad_norm": 0.10529585557809001, + "learning_rate": 4.434160247162022e-05, + "loss": 1.5186, + "step": 9578 + }, + { + "epoch": 0.8690800217746325, + "grad_norm": 0.10635846109233794, + "learning_rate": 4.4281131450423393e-05, + "loss": 1.5408, + "step": 9579 + }, + { + "epoch": 0.8691707494102704, + "grad_norm": 0.11509517655326917, + "learning_rate": 4.422069978061793e-05, + "loss": 1.5432, + "step": 9580 + }, + { + "epoch": 0.8692614770459082, + "grad_norm": 0.10646960360112054, + "learning_rate": 4.416030746742228e-05, + "loss": 1.5616, + "step": 9581 + }, + { + "epoch": 0.869352204681546, + "grad_norm": 0.11138244695323472, + "learning_rate": 4.4099954516051254e-05, + "loss": 1.5782, + "step": 9582 + }, + { + "epoch": 0.8694429323171838, + "grad_norm": 0.10852395842101377, + "learning_rate": 4.4039640931716307e-05, + "loss": 1.5713, + "step": 9583 + }, + { + "epoch": 0.8695336599528216, + "grad_norm": 0.10516414477539306, + "learning_rate": 4.3979366719625546e-05, + "loss": 1.5461, + "step": 9584 + }, + { + "epoch": 0.8696243875884594, + "grad_norm": 0.10867808273529386, + "learning_rate": 4.391913188498375e-05, + "loss": 1.4944, + "step": 9585 + }, + { + "epoch": 0.8697151152240973, + "grad_norm": 0.10589420027649715, + "learning_rate": 4.3858936432992156e-05, + "loss": 1.5222, + "step": 9586 + }, + { + "epoch": 0.8698058428597351, + "grad_norm": 0.10667523629252398, + "learning_rate": 4.379878036884849e-05, + "loss": 1.478, + "step": 9587 + }, + { + "epoch": 0.869896570495373, + "grad_norm": 0.10843135771701101, + "learning_rate": 4.373866369774754e-05, + "loss": 1.4776, + "step": 9588 + }, + { + "epoch": 0.8699872981310107, + "grad_norm": 0.10815375014619229, + "learning_rate": 4.3678586424880204e-05, + "loss": 1.5002, + "step": 9589 + }, + { + "epoch": 0.8700780257666485, + "grad_norm": 0.10454905755348237, + "learning_rate": 4.361854855543407e-05, + "loss": 1.5741, + "step": 9590 + }, + { + "epoch": 0.8701687534022864, + "grad_norm": 0.10577954296803553, + "learning_rate": 4.355855009459364e-05, + "loss": 1.5159, + "step": 9591 + }, + { + "epoch": 0.8702594810379242, + "grad_norm": 0.10563881268016946, + "learning_rate": 4.349859104753956e-05, + "loss": 1.5735, + "step": 9592 + }, + { + "epoch": 0.8703502086735619, + "grad_norm": 0.1086914678887319, + "learning_rate": 4.343867141944946e-05, + "loss": 1.5244, + "step": 9593 + }, + { + "epoch": 0.8704409363091998, + "grad_norm": 0.10531592189913745, + "learning_rate": 4.337879121549748e-05, + "loss": 1.5429, + "step": 9594 + }, + { + "epoch": 0.8705316639448376, + "grad_norm": 0.10631306763080661, + "learning_rate": 4.3318950440854154e-05, + "loss": 1.5513, + "step": 9595 + }, + { + "epoch": 0.8706223915804754, + "grad_norm": 0.10914058504776046, + "learning_rate": 4.325914910068673e-05, + "loss": 1.4985, + "step": 9596 + }, + { + "epoch": 0.8707131192161133, + "grad_norm": 0.1091503799953333, + "learning_rate": 4.319938720015903e-05, + "loss": 1.5692, + "step": 9597 + }, + { + "epoch": 0.870803846851751, + "grad_norm": 0.10489630945682185, + "learning_rate": 4.313966474443171e-05, + "loss": 1.5628, + "step": 9598 + }, + { + "epoch": 0.8708945744873888, + "grad_norm": 0.10558697068094386, + "learning_rate": 4.307998173866151e-05, + "loss": 1.5366, + "step": 9599 + }, + { + "epoch": 0.8709853021230267, + "grad_norm": 0.10795653275605319, + "learning_rate": 4.302033818800227e-05, + "loss": 1.555, + "step": 9600 + }, + { + "epoch": 0.8710760297586645, + "grad_norm": 0.11467302041321968, + "learning_rate": 4.296073409760426e-05, + "loss": 1.5361, + "step": 9601 + }, + { + "epoch": 0.8711667573943023, + "grad_norm": 0.10739671432684891, + "learning_rate": 4.290116947261424e-05, + "loss": 1.5712, + "step": 9602 + }, + { + "epoch": 0.8712574850299402, + "grad_norm": 0.1037237118113812, + "learning_rate": 4.28416443181755e-05, + "loss": 1.4844, + "step": 9603 + }, + { + "epoch": 0.8713482126655779, + "grad_norm": 0.1079153666865909, + "learning_rate": 4.2782158639428245e-05, + "loss": 1.5256, + "step": 9604 + }, + { + "epoch": 0.8714389403012157, + "grad_norm": 0.1116180537823969, + "learning_rate": 4.2722712441508995e-05, + "loss": 1.5391, + "step": 9605 + }, + { + "epoch": 0.8715296679368536, + "grad_norm": 0.1061184547247339, + "learning_rate": 4.26633057295509e-05, + "loss": 1.5588, + "step": 9606 + }, + { + "epoch": 0.8716203955724914, + "grad_norm": 0.10439871721351332, + "learning_rate": 4.260393850868355e-05, + "loss": 1.528, + "step": 9607 + }, + { + "epoch": 0.8717111232081292, + "grad_norm": 0.10756042003872683, + "learning_rate": 4.254461078403377e-05, + "loss": 1.4922, + "step": 9608 + }, + { + "epoch": 0.871801850843767, + "grad_norm": 0.10706411239614544, + "learning_rate": 4.2485322560724235e-05, + "loss": 1.5738, + "step": 9609 + }, + { + "epoch": 0.8718925784794048, + "grad_norm": 0.10303102570796735, + "learning_rate": 4.2426073843874425e-05, + "loss": 1.5416, + "step": 9610 + }, + { + "epoch": 0.8719833061150426, + "grad_norm": 0.1028480092949492, + "learning_rate": 4.2366864638600674e-05, + "loss": 1.5101, + "step": 9611 + }, + { + "epoch": 0.8720740337506805, + "grad_norm": 0.10447912978949048, + "learning_rate": 4.230769495001563e-05, + "loss": 1.5512, + "step": 9612 + }, + { + "epoch": 0.8721647613863183, + "grad_norm": 0.10368986714988411, + "learning_rate": 4.224856478322847e-05, + "loss": 1.5388, + "step": 9613 + }, + { + "epoch": 0.872255489021956, + "grad_norm": 0.10834596323575078, + "learning_rate": 4.218947414334534e-05, + "loss": 1.579, + "step": 9614 + }, + { + "epoch": 0.8723462166575939, + "grad_norm": 0.10447655359924088, + "learning_rate": 4.2130423035468436e-05, + "loss": 1.5292, + "step": 9615 + }, + { + "epoch": 0.8724369442932317, + "grad_norm": 0.10642810614405247, + "learning_rate": 4.207141146469712e-05, + "loss": 1.5286, + "step": 9616 + }, + { + "epoch": 0.8725276719288695, + "grad_norm": 0.10691394514528403, + "learning_rate": 4.201243943612681e-05, + "loss": 1.5637, + "step": 9617 + }, + { + "epoch": 0.8726183995645074, + "grad_norm": 0.1051775089522831, + "learning_rate": 4.195350695484995e-05, + "loss": 1.5935, + "step": 9618 + }, + { + "epoch": 0.8727091272001452, + "grad_norm": 0.10530682570799037, + "learning_rate": 4.1894614025955294e-05, + "loss": 1.5821, + "step": 9619 + }, + { + "epoch": 0.8727998548357829, + "grad_norm": 0.10799486307228541, + "learning_rate": 4.18357606545281e-05, + "loss": 1.5095, + "step": 9620 + }, + { + "epoch": 0.8728905824714208, + "grad_norm": 0.10572925420067958, + "learning_rate": 4.1776946845650646e-05, + "loss": 1.5507, + "step": 9621 + }, + { + "epoch": 0.8729813101070586, + "grad_norm": 0.11058694675327309, + "learning_rate": 4.171817260440131e-05, + "loss": 1.6258, + "step": 9622 + }, + { + "epoch": 0.8730720377426964, + "grad_norm": 0.105903513607186, + "learning_rate": 4.165943793585525e-05, + "loss": 1.5233, + "step": 9623 + }, + { + "epoch": 0.8731627653783343, + "grad_norm": 0.1016416227793586, + "learning_rate": 4.16007428450843e-05, + "loss": 1.5546, + "step": 9624 + }, + { + "epoch": 0.873253493013972, + "grad_norm": 0.10671604441081861, + "learning_rate": 4.154208733715681e-05, + "loss": 1.5438, + "step": 9625 + }, + { + "epoch": 0.8733442206496099, + "grad_norm": 0.10540877000203097, + "learning_rate": 4.148347141713771e-05, + "loss": 1.5472, + "step": 9626 + }, + { + "epoch": 0.8734349482852477, + "grad_norm": 0.10640903417543032, + "learning_rate": 4.1424895090088286e-05, + "loss": 1.534, + "step": 9627 + }, + { + "epoch": 0.8735256759208855, + "grad_norm": 0.10959273219671273, + "learning_rate": 4.136635836106684e-05, + "loss": 1.54, + "step": 9628 + }, + { + "epoch": 0.8736164035565234, + "grad_norm": 0.10601106279518059, + "learning_rate": 4.1307861235127975e-05, + "loss": 1.5399, + "step": 9629 + }, + { + "epoch": 0.8737071311921611, + "grad_norm": 0.10721468429414839, + "learning_rate": 4.124940371732283e-05, + "loss": 1.5607, + "step": 9630 + }, + { + "epoch": 0.8737978588277989, + "grad_norm": 0.10345479749287402, + "learning_rate": 4.119098581269926e-05, + "loss": 1.554, + "step": 9631 + }, + { + "epoch": 0.8738885864634368, + "grad_norm": 0.10734545191279198, + "learning_rate": 4.1132607526301766e-05, + "loss": 1.5641, + "step": 9632 + }, + { + "epoch": 0.8739793140990746, + "grad_norm": 0.10475514321383946, + "learning_rate": 4.107426886317123e-05, + "loss": 1.4984, + "step": 9633 + }, + { + "epoch": 0.8740700417347124, + "grad_norm": 0.10396033616503782, + "learning_rate": 4.1015969828345154e-05, + "loss": 1.5057, + "step": 9634 + }, + { + "epoch": 0.8741607693703503, + "grad_norm": 0.10589149096643236, + "learning_rate": 4.0957710426857805e-05, + "loss": 1.5712, + "step": 9635 + }, + { + "epoch": 0.874251497005988, + "grad_norm": 0.10301337328773903, + "learning_rate": 4.0899490663739825e-05, + "loss": 1.5425, + "step": 9636 + }, + { + "epoch": 0.8743422246416258, + "grad_norm": 0.10687011356190218, + "learning_rate": 4.084131054401841e-05, + "loss": 1.513, + "step": 9637 + }, + { + "epoch": 0.8744329522772637, + "grad_norm": 0.10394162303527898, + "learning_rate": 4.078317007271753e-05, + "loss": 1.5168, + "step": 9638 + }, + { + "epoch": 0.8745236799129015, + "grad_norm": 0.10522486115510217, + "learning_rate": 4.072506925485764e-05, + "loss": 1.5021, + "step": 9639 + }, + { + "epoch": 0.8746144075485393, + "grad_norm": 0.10800433050142587, + "learning_rate": 4.0667008095455704e-05, + "loss": 1.5026, + "step": 9640 + }, + { + "epoch": 0.8747051351841771, + "grad_norm": 0.1063520566337005, + "learning_rate": 4.060898659952522e-05, + "loss": 1.5331, + "step": 9641 + }, + { + "epoch": 0.8747958628198149, + "grad_norm": 0.1077861351685226, + "learning_rate": 4.055100477207657e-05, + "loss": 1.5694, + "step": 9642 + }, + { + "epoch": 0.8748865904554527, + "grad_norm": 0.10680468527835976, + "learning_rate": 4.049306261811636e-05, + "loss": 1.5145, + "step": 9643 + }, + { + "epoch": 0.8749773180910906, + "grad_norm": 0.10470777220734388, + "learning_rate": 4.043516014264786e-05, + "loss": 1.5067, + "step": 9644 + }, + { + "epoch": 0.8750680457267284, + "grad_norm": 0.10705027929993534, + "learning_rate": 4.037729735067108e-05, + "loss": 1.5049, + "step": 9645 + }, + { + "epoch": 0.8751587733623661, + "grad_norm": 0.10910900807425987, + "learning_rate": 4.0319474247182355e-05, + "loss": 1.5536, + "step": 9646 + }, + { + "epoch": 0.875249500998004, + "grad_norm": 0.10907239971134569, + "learning_rate": 4.026169083717479e-05, + "loss": 1.4947, + "step": 9647 + }, + { + "epoch": 0.8753402286336418, + "grad_norm": 0.10734425226801494, + "learning_rate": 4.020394712563796e-05, + "loss": 1.5136, + "step": 9648 + }, + { + "epoch": 0.8754309562692796, + "grad_norm": 0.109094776284834, + "learning_rate": 4.0146243117558154e-05, + "loss": 1.5778, + "step": 9649 + }, + { + "epoch": 0.8755216839049175, + "grad_norm": 0.10738927368887974, + "learning_rate": 4.0088578817918e-05, + "loss": 1.5507, + "step": 9650 + }, + { + "epoch": 0.8756124115405552, + "grad_norm": 0.10257176852260805, + "learning_rate": 4.0030954231696726e-05, + "loss": 1.5402, + "step": 9651 + }, + { + "epoch": 0.875703139176193, + "grad_norm": 0.10692248541490587, + "learning_rate": 3.997336936387053e-05, + "loss": 1.5762, + "step": 9652 + }, + { + "epoch": 0.8757938668118309, + "grad_norm": 0.10380647918165191, + "learning_rate": 3.991582421941159e-05, + "loss": 1.5252, + "step": 9653 + }, + { + "epoch": 0.8758845944474687, + "grad_norm": 0.10397940725921721, + "learning_rate": 3.9858318803288995e-05, + "loss": 1.5349, + "step": 9654 + }, + { + "epoch": 0.8759753220831065, + "grad_norm": 0.10737247588217846, + "learning_rate": 3.980085312046844e-05, + "loss": 1.5558, + "step": 9655 + }, + { + "epoch": 0.8760660497187444, + "grad_norm": 0.10765114811875238, + "learning_rate": 3.9743427175912064e-05, + "loss": 1.5667, + "step": 9656 + }, + { + "epoch": 0.8761567773543821, + "grad_norm": 0.10674573411820507, + "learning_rate": 3.968604097457862e-05, + "loss": 1.5435, + "step": 9657 + }, + { + "epoch": 0.8762475049900199, + "grad_norm": 0.10770487720889606, + "learning_rate": 3.9628694521423256e-05, + "loss": 1.5145, + "step": 9658 + }, + { + "epoch": 0.8763382326256578, + "grad_norm": 0.10586309054745457, + "learning_rate": 3.9571387821398074e-05, + "loss": 1.5354, + "step": 9659 + }, + { + "epoch": 0.8764289602612956, + "grad_norm": 0.10740812093078933, + "learning_rate": 3.951412087945144e-05, + "loss": 1.549, + "step": 9660 + }, + { + "epoch": 0.8765196878969334, + "grad_norm": 0.10613859060462365, + "learning_rate": 3.945689370052824e-05, + "loss": 1.5576, + "step": 9661 + }, + { + "epoch": 0.8766104155325712, + "grad_norm": 0.10450088852279349, + "learning_rate": 3.9399706289570124e-05, + "loss": 1.5781, + "step": 9662 + }, + { + "epoch": 0.876701143168209, + "grad_norm": 0.111461417017101, + "learning_rate": 3.934255865151537e-05, + "loss": 1.546, + "step": 9663 + }, + { + "epoch": 0.8767918708038469, + "grad_norm": 0.10793053414690901, + "learning_rate": 3.928545079129853e-05, + "loss": 1.4906, + "step": 9664 + }, + { + "epoch": 0.8768825984394847, + "grad_norm": 0.10701957265847227, + "learning_rate": 3.9228382713850877e-05, + "loss": 1.5276, + "step": 9665 + }, + { + "epoch": 0.8769733260751225, + "grad_norm": 0.11136698626078013, + "learning_rate": 3.917135442410036e-05, + "loss": 1.5885, + "step": 9666 + }, + { + "epoch": 0.8770640537107603, + "grad_norm": 0.10925716241073617, + "learning_rate": 3.9114365926971265e-05, + "loss": 1.5507, + "step": 9667 + }, + { + "epoch": 0.8771547813463981, + "grad_norm": 0.10675100937065476, + "learning_rate": 3.905741722738454e-05, + "loss": 1.5384, + "step": 9668 + }, + { + "epoch": 0.8772455089820359, + "grad_norm": 0.11034919922078452, + "learning_rate": 3.9000508330257925e-05, + "loss": 1.522, + "step": 9669 + }, + { + "epoch": 0.8773362366176738, + "grad_norm": 0.10432485324065899, + "learning_rate": 3.8943639240505205e-05, + "loss": 1.5725, + "step": 9670 + }, + { + "epoch": 0.8774269642533116, + "grad_norm": 0.10274026158163062, + "learning_rate": 3.888680996303717e-05, + "loss": 1.4918, + "step": 9671 + }, + { + "epoch": 0.8775176918889493, + "grad_norm": 0.10440424393698845, + "learning_rate": 3.883002050276119e-05, + "loss": 1.5136, + "step": 9672 + }, + { + "epoch": 0.8776084195245872, + "grad_norm": 0.10591939178861198, + "learning_rate": 3.8773270864580876e-05, + "loss": 1.5469, + "step": 9673 + }, + { + "epoch": 0.877699147160225, + "grad_norm": 0.10512308783742819, + "learning_rate": 3.871656105339666e-05, + "loss": 1.5444, + "step": 9674 + }, + { + "epoch": 0.8777898747958628, + "grad_norm": 0.10538126345123881, + "learning_rate": 3.8659891074105226e-05, + "loss": 1.5183, + "step": 9675 + }, + { + "epoch": 0.8778806024315007, + "grad_norm": 0.10813717758144378, + "learning_rate": 3.8603260931600324e-05, + "loss": 1.5264, + "step": 9676 + }, + { + "epoch": 0.8779713300671385, + "grad_norm": 0.10111246478456526, + "learning_rate": 3.854667063077172e-05, + "loss": 1.5483, + "step": 9677 + }, + { + "epoch": 0.8780620577027762, + "grad_norm": 0.10575010889376225, + "learning_rate": 3.849012017650616e-05, + "loss": 1.5734, + "step": 9678 + }, + { + "epoch": 0.8781527853384141, + "grad_norm": 0.10394819162839156, + "learning_rate": 3.84336095736868e-05, + "loss": 1.4727, + "step": 9679 + }, + { + "epoch": 0.8782435129740519, + "grad_norm": 0.10722844357772984, + "learning_rate": 3.8377138827193294e-05, + "loss": 1.5875, + "step": 9680 + }, + { + "epoch": 0.8783342406096897, + "grad_norm": 0.10715246058286104, + "learning_rate": 3.832070794190179e-05, + "loss": 1.5227, + "step": 9681 + }, + { + "epoch": 0.8784249682453276, + "grad_norm": 0.10579221295872784, + "learning_rate": 3.8264316922685284e-05, + "loss": 1.5593, + "step": 9682 + }, + { + "epoch": 0.8785156958809653, + "grad_norm": 0.10903717260337238, + "learning_rate": 3.820796577441305e-05, + "loss": 1.5426, + "step": 9683 + }, + { + "epoch": 0.8786064235166031, + "grad_norm": 0.10853499335021104, + "learning_rate": 3.815165450195107e-05, + "loss": 1.5362, + "step": 9684 + }, + { + "epoch": 0.878697151152241, + "grad_norm": 0.10767959947373901, + "learning_rate": 3.809538311016158e-05, + "loss": 1.5933, + "step": 9685 + }, + { + "epoch": 0.8787878787878788, + "grad_norm": 0.1056350219642995, + "learning_rate": 3.8039151603904086e-05, + "loss": 1.5507, + "step": 9686 + }, + { + "epoch": 0.8788786064235166, + "grad_norm": 0.10355909965029528, + "learning_rate": 3.798295998803386e-05, + "loss": 1.5169, + "step": 9687 + }, + { + "epoch": 0.8789693340591545, + "grad_norm": 0.10968859983375469, + "learning_rate": 3.7926808267403076e-05, + "loss": 1.5597, + "step": 9688 + }, + { + "epoch": 0.8790600616947922, + "grad_norm": 0.10655139162432932, + "learning_rate": 3.7870696446860585e-05, + "loss": 1.5499, + "step": 9689 + }, + { + "epoch": 0.87915078933043, + "grad_norm": 0.10530607700643407, + "learning_rate": 3.7814624531251564e-05, + "loss": 1.5648, + "step": 9690 + }, + { + "epoch": 0.8792415169660679, + "grad_norm": 0.11355675263345327, + "learning_rate": 3.775859252541775e-05, + "loss": 1.5313, + "step": 9691 + }, + { + "epoch": 0.8793322446017057, + "grad_norm": 0.10356693852532571, + "learning_rate": 3.7702600434197654e-05, + "loss": 1.5546, + "step": 9692 + }, + { + "epoch": 0.8794229722373434, + "grad_norm": 0.10805776721465593, + "learning_rate": 3.7646648262426085e-05, + "loss": 1.6054, + "step": 9693 + }, + { + "epoch": 0.8795136998729813, + "grad_norm": 0.11230954909621696, + "learning_rate": 3.759073601493468e-05, + "loss": 1.591, + "step": 9694 + }, + { + "epoch": 0.8796044275086191, + "grad_norm": 0.10665103839700743, + "learning_rate": 3.7534863696551294e-05, + "loss": 1.5186, + "step": 9695 + }, + { + "epoch": 0.8796951551442569, + "grad_norm": 0.10838159356262636, + "learning_rate": 3.7479031312100677e-05, + "loss": 1.5545, + "step": 9696 + }, + { + "epoch": 0.8797858827798948, + "grad_norm": 0.10418239920007978, + "learning_rate": 3.742323886640386e-05, + "loss": 1.5389, + "step": 9697 + }, + { + "epoch": 0.8798766104155326, + "grad_norm": 0.10693739676663938, + "learning_rate": 3.7367486364278444e-05, + "loss": 1.5117, + "step": 9698 + }, + { + "epoch": 0.8799673380511703, + "grad_norm": 0.10645595700872904, + "learning_rate": 3.7311773810538855e-05, + "loss": 1.5491, + "step": 9699 + }, + { + "epoch": 0.8800580656868082, + "grad_norm": 0.10754038686784786, + "learning_rate": 3.7256101209995785e-05, + "loss": 1.5588, + "step": 9700 + }, + { + "epoch": 0.880148793322446, + "grad_norm": 0.10606090057581423, + "learning_rate": 3.720046856745651e-05, + "loss": 1.5067, + "step": 9701 + }, + { + "epoch": 0.8802395209580839, + "grad_norm": 0.10960663282513748, + "learning_rate": 3.7144875887724975e-05, + "loss": 1.5133, + "step": 9702 + }, + { + "epoch": 0.8803302485937217, + "grad_norm": 0.10562105325510632, + "learning_rate": 3.708932317560171e-05, + "loss": 1.5188, + "step": 9703 + }, + { + "epoch": 0.8804209762293594, + "grad_norm": 0.10986530859989933, + "learning_rate": 3.703381043588361e-05, + "loss": 1.5176, + "step": 9704 + }, + { + "epoch": 0.8805117038649973, + "grad_norm": 0.10517665739142537, + "learning_rate": 3.697833767336417e-05, + "loss": 1.4733, + "step": 9705 + }, + { + "epoch": 0.8806024315006351, + "grad_norm": 0.10658875088017893, + "learning_rate": 3.6922904892833567e-05, + "loss": 1.5397, + "step": 9706 + }, + { + "epoch": 0.8806931591362729, + "grad_norm": 0.10227985791192308, + "learning_rate": 3.686751209907835e-05, + "loss": 1.573, + "step": 9707 + }, + { + "epoch": 0.8807838867719108, + "grad_norm": 0.10293923337022905, + "learning_rate": 3.681215929688164e-05, + "loss": 1.5505, + "step": 9708 + }, + { + "epoch": 0.8808746144075486, + "grad_norm": 0.1105082199314787, + "learning_rate": 3.675684649102329e-05, + "loss": 1.5407, + "step": 9709 + }, + { + "epoch": 0.8809653420431863, + "grad_norm": 0.10422622936056174, + "learning_rate": 3.670157368627958e-05, + "loss": 1.5638, + "step": 9710 + }, + { + "epoch": 0.8810560696788242, + "grad_norm": 0.11057466743136848, + "learning_rate": 3.6646340887423244e-05, + "loss": 1.5199, + "step": 9711 + }, + { + "epoch": 0.881146797314462, + "grad_norm": 0.10890364335933524, + "learning_rate": 3.659114809922365e-05, + "loss": 1.5252, + "step": 9712 + }, + { + "epoch": 0.8812375249500998, + "grad_norm": 0.11051618314593076, + "learning_rate": 3.6535995326446745e-05, + "loss": 1.5221, + "step": 9713 + }, + { + "epoch": 0.8813282525857377, + "grad_norm": 0.10647372778386618, + "learning_rate": 3.648088257385496e-05, + "loss": 1.5623, + "step": 9714 + }, + { + "epoch": 0.8814189802213754, + "grad_norm": 0.10132532556838329, + "learning_rate": 3.642580984620719e-05, + "loss": 1.5294, + "step": 9715 + }, + { + "epoch": 0.8815097078570132, + "grad_norm": 0.1093207681400508, + "learning_rate": 3.637077714825904e-05, + "loss": 1.5308, + "step": 9716 + }, + { + "epoch": 0.8816004354926511, + "grad_norm": 0.11085050054733649, + "learning_rate": 3.631578448476275e-05, + "loss": 1.5187, + "step": 9717 + }, + { + "epoch": 0.8816911631282889, + "grad_norm": 0.10885013539621942, + "learning_rate": 3.626083186046675e-05, + "loss": 1.5668, + "step": 9718 + }, + { + "epoch": 0.8817818907639267, + "grad_norm": 0.10539347475308973, + "learning_rate": 3.620591928011624e-05, + "loss": 1.5367, + "step": 9719 + }, + { + "epoch": 0.8818726183995645, + "grad_norm": 0.10699521571627132, + "learning_rate": 3.615104674845299e-05, + "loss": 1.5513, + "step": 9720 + }, + { + "epoch": 0.8819633460352023, + "grad_norm": 0.1121779581143969, + "learning_rate": 3.6096214270215253e-05, + "loss": 1.5536, + "step": 9721 + }, + { + "epoch": 0.8820540736708401, + "grad_norm": 0.10513020986495022, + "learning_rate": 3.6041421850137703e-05, + "loss": 1.5105, + "step": 9722 + }, + { + "epoch": 0.882144801306478, + "grad_norm": 0.10692303525210684, + "learning_rate": 3.598666949295182e-05, + "loss": 1.5304, + "step": 9723 + }, + { + "epoch": 0.8822355289421158, + "grad_norm": 0.10778693110247879, + "learning_rate": 3.593195720338538e-05, + "loss": 1.5435, + "step": 9724 + }, + { + "epoch": 0.8823262565777535, + "grad_norm": 0.10654179521558814, + "learning_rate": 3.5877284986162885e-05, + "loss": 1.528, + "step": 9725 + }, + { + "epoch": 0.8824169842133914, + "grad_norm": 0.11251625171587795, + "learning_rate": 3.582265284600511e-05, + "loss": 1.5175, + "step": 9726 + }, + { + "epoch": 0.8825077118490292, + "grad_norm": 0.10780861617891156, + "learning_rate": 3.576806078762984e-05, + "loss": 1.5863, + "step": 9727 + }, + { + "epoch": 0.882598439484667, + "grad_norm": 0.10564021798136718, + "learning_rate": 3.571350881575086e-05, + "loss": 1.5572, + "step": 9728 + }, + { + "epoch": 0.8826891671203049, + "grad_norm": 0.10716304769943032, + "learning_rate": 3.565899693507879e-05, + "loss": 1.5049, + "step": 9729 + }, + { + "epoch": 0.8827798947559427, + "grad_norm": 0.10525199344831031, + "learning_rate": 3.5604525150320866e-05, + "loss": 1.5461, + "step": 9730 + }, + { + "epoch": 0.8828706223915804, + "grad_norm": 0.10821549684592086, + "learning_rate": 3.55500934661806e-05, + "loss": 1.5312, + "step": 9731 + }, + { + "epoch": 0.8829613500272183, + "grad_norm": 0.10622622710113779, + "learning_rate": 3.549570188735812e-05, + "loss": 1.4764, + "step": 9732 + }, + { + "epoch": 0.8830520776628561, + "grad_norm": 0.1075782924178233, + "learning_rate": 3.544135041855029e-05, + "loss": 1.5423, + "step": 9733 + }, + { + "epoch": 0.8831428052984939, + "grad_norm": 0.10466829243838242, + "learning_rate": 3.53870390644504e-05, + "loss": 1.5316, + "step": 9734 + }, + { + "epoch": 0.8832335329341318, + "grad_norm": 0.1053774298789146, + "learning_rate": 3.533276782974815e-05, + "loss": 1.4939, + "step": 9735 + }, + { + "epoch": 0.8833242605697695, + "grad_norm": 0.1090080331797448, + "learning_rate": 3.527853671912978e-05, + "loss": 1.5435, + "step": 9736 + }, + { + "epoch": 0.8834149882054073, + "grad_norm": 0.10461842716196254, + "learning_rate": 3.522434573727839e-05, + "loss": 1.5505, + "step": 9737 + }, + { + "epoch": 0.8835057158410452, + "grad_norm": 0.1130940156808728, + "learning_rate": 3.5170194888873184e-05, + "loss": 1.5341, + "step": 9738 + }, + { + "epoch": 0.883596443476683, + "grad_norm": 0.10627896119819535, + "learning_rate": 3.511608417859014e-05, + "loss": 1.5887, + "step": 9739 + }, + { + "epoch": 0.8836871711123209, + "grad_norm": 0.10607356580770558, + "learning_rate": 3.506201361110167e-05, + "loss": 1.5844, + "step": 9740 + }, + { + "epoch": 0.8837778987479586, + "grad_norm": 0.1062172739572453, + "learning_rate": 3.500798319107701e-05, + "loss": 1.5603, + "step": 9741 + }, + { + "epoch": 0.8838686263835964, + "grad_norm": 0.1055620167355383, + "learning_rate": 3.495399292318146e-05, + "loss": 1.5493, + "step": 9742 + }, + { + "epoch": 0.8839593540192343, + "grad_norm": 0.10373049778658984, + "learning_rate": 3.490004281207715e-05, + "loss": 1.5149, + "step": 9743 + }, + { + "epoch": 0.8840500816548721, + "grad_norm": 0.11045816548001329, + "learning_rate": 3.484613286242272e-05, + "loss": 1.4868, + "step": 9744 + }, + { + "epoch": 0.8841408092905099, + "grad_norm": 0.10933753809503825, + "learning_rate": 3.479226307887329e-05, + "loss": 1.5311, + "step": 9745 + }, + { + "epoch": 0.8842315369261478, + "grad_norm": 0.10794738954292693, + "learning_rate": 3.4738433466080465e-05, + "loss": 1.5256, + "step": 9746 + }, + { + "epoch": 0.8843222645617855, + "grad_norm": 0.1055745301950988, + "learning_rate": 3.468464402869248e-05, + "loss": 1.5342, + "step": 9747 + }, + { + "epoch": 0.8844129921974233, + "grad_norm": 0.1084762515319756, + "learning_rate": 3.463089477135406e-05, + "loss": 1.5086, + "step": 9748 + }, + { + "epoch": 0.8845037198330612, + "grad_norm": 0.10778467342322372, + "learning_rate": 3.457718569870644e-05, + "loss": 1.519, + "step": 9749 + }, + { + "epoch": 0.884594447468699, + "grad_norm": 0.10531189541165449, + "learning_rate": 3.452351681538751e-05, + "loss": 1.5137, + "step": 9750 + }, + { + "epoch": 0.8846851751043368, + "grad_norm": 0.10780246068383122, + "learning_rate": 3.4469888126031524e-05, + "loss": 1.5591, + "step": 9751 + }, + { + "epoch": 0.8847759027399746, + "grad_norm": 0.10176848685926615, + "learning_rate": 3.4416299635269264e-05, + "loss": 1.518, + "step": 9752 + }, + { + "epoch": 0.8848666303756124, + "grad_norm": 0.10917875554454402, + "learning_rate": 3.4362751347728094e-05, + "loss": 1.5202, + "step": 9753 + }, + { + "epoch": 0.8849573580112502, + "grad_norm": 0.10381570134453254, + "learning_rate": 3.430924326803209e-05, + "loss": 1.5072, + "step": 9754 + }, + { + "epoch": 0.8850480856468881, + "grad_norm": 0.10749572312202382, + "learning_rate": 3.425577540080144e-05, + "loss": 1.5171, + "step": 9755 + }, + { + "epoch": 0.8851388132825259, + "grad_norm": 0.10410401777997194, + "learning_rate": 3.420234775065323e-05, + "loss": 1.5806, + "step": 9756 + }, + { + "epoch": 0.8852295409181636, + "grad_norm": 0.10688800695938844, + "learning_rate": 3.414896032220105e-05, + "loss": 1.5361, + "step": 9757 + }, + { + "epoch": 0.8853202685538015, + "grad_norm": 0.10479668656669335, + "learning_rate": 3.409561312005477e-05, + "loss": 1.5313, + "step": 9758 + }, + { + "epoch": 0.8854109961894393, + "grad_norm": 0.10309753175008658, + "learning_rate": 3.4042306148820925e-05, + "loss": 1.5169, + "step": 9759 + }, + { + "epoch": 0.8855017238250771, + "grad_norm": 0.105644106878326, + "learning_rate": 3.398903941310266e-05, + "loss": 1.5279, + "step": 9760 + }, + { + "epoch": 0.885592451460715, + "grad_norm": 0.10846352276878755, + "learning_rate": 3.393581291749953e-05, + "loss": 1.5208, + "step": 9761 + }, + { + "epoch": 0.8856831790963527, + "grad_norm": 0.10683980536759023, + "learning_rate": 3.388262666660768e-05, + "loss": 1.5152, + "step": 9762 + }, + { + "epoch": 0.8857739067319905, + "grad_norm": 0.10712800014045229, + "learning_rate": 3.382948066501951e-05, + "loss": 1.5198, + "step": 9763 + }, + { + "epoch": 0.8858646343676284, + "grad_norm": 0.10586003439250655, + "learning_rate": 3.3776374917324606e-05, + "loss": 1.5262, + "step": 9764 + }, + { + "epoch": 0.8859553620032662, + "grad_norm": 0.11369763602353022, + "learning_rate": 3.3723309428108416e-05, + "loss": 1.5557, + "step": 9765 + }, + { + "epoch": 0.886046089638904, + "grad_norm": 0.10705182479710183, + "learning_rate": 3.367028420195306e-05, + "loss": 1.5516, + "step": 9766 + }, + { + "epoch": 0.8861368172745419, + "grad_norm": 0.11138516659915111, + "learning_rate": 3.361729924343754e-05, + "loss": 1.5586, + "step": 9767 + }, + { + "epoch": 0.8862275449101796, + "grad_norm": 0.10657065911747535, + "learning_rate": 3.356435455713691e-05, + "loss": 1.5662, + "step": 9768 + }, + { + "epoch": 0.8863182725458174, + "grad_norm": 0.1027190637021106, + "learning_rate": 3.351145014762297e-05, + "loss": 1.511, + "step": 9769 + }, + { + "epoch": 0.8864090001814553, + "grad_norm": 0.10665357298684663, + "learning_rate": 3.345858601946411e-05, + "loss": 1.5639, + "step": 9770 + }, + { + "epoch": 0.8864997278170931, + "grad_norm": 0.10774302352730439, + "learning_rate": 3.340576217722508e-05, + "loss": 1.5199, + "step": 9771 + }, + { + "epoch": 0.8865904554527309, + "grad_norm": 0.10848309071546998, + "learning_rate": 3.3352978625467265e-05, + "loss": 1.5507, + "step": 9772 + }, + { + "epoch": 0.8866811830883687, + "grad_norm": 0.10499325832726598, + "learning_rate": 3.330023536874849e-05, + "loss": 1.5249, + "step": 9773 + }, + { + "epoch": 0.8867719107240065, + "grad_norm": 0.11901268886447879, + "learning_rate": 3.324753241162326e-05, + "loss": 1.5554, + "step": 9774 + }, + { + "epoch": 0.8868626383596443, + "grad_norm": 0.10467669290217795, + "learning_rate": 3.319486975864239e-05, + "loss": 1.539, + "step": 9775 + }, + { + "epoch": 0.8869533659952822, + "grad_norm": 0.10892748598144944, + "learning_rate": 3.3142247414353235e-05, + "loss": 1.5465, + "step": 9776 + }, + { + "epoch": 0.88704409363092, + "grad_norm": 0.10603324668984504, + "learning_rate": 3.3089665383299885e-05, + "loss": 1.5618, + "step": 9777 + }, + { + "epoch": 0.8871348212665577, + "grad_norm": 0.10882355004934431, + "learning_rate": 3.303712367002276e-05, + "loss": 1.5372, + "step": 9778 + }, + { + "epoch": 0.8872255489021956, + "grad_norm": 0.10498930332686728, + "learning_rate": 3.298462227905874e-05, + "loss": 1.5655, + "step": 9779 + }, + { + "epoch": 0.8873162765378334, + "grad_norm": 0.10414030275649191, + "learning_rate": 3.293216121494142e-05, + "loss": 1.5382, + "step": 9780 + }, + { + "epoch": 0.8874070041734713, + "grad_norm": 0.10389816726952937, + "learning_rate": 3.2879740482200935e-05, + "loss": 1.5401, + "step": 9781 + }, + { + "epoch": 0.8874977318091091, + "grad_norm": 0.10683660092589144, + "learning_rate": 3.2827360085363636e-05, + "loss": 1.5571, + "step": 9782 + }, + { + "epoch": 0.8875884594447468, + "grad_norm": 0.10638839105884235, + "learning_rate": 3.277502002895261e-05, + "loss": 1.5376, + "step": 9783 + }, + { + "epoch": 0.8876791870803847, + "grad_norm": 0.10649738000828651, + "learning_rate": 3.272272031748758e-05, + "loss": 1.602, + "step": 9784 + }, + { + "epoch": 0.8877699147160225, + "grad_norm": 0.10677828512806133, + "learning_rate": 3.2670460955484426e-05, + "loss": 1.5439, + "step": 9785 + }, + { + "epoch": 0.8878606423516603, + "grad_norm": 0.1076219513111383, + "learning_rate": 3.261824194745583e-05, + "loss": 1.5519, + "step": 9786 + }, + { + "epoch": 0.8879513699872982, + "grad_norm": 0.105136115197493, + "learning_rate": 3.256606329791095e-05, + "loss": 1.5347, + "step": 9787 + }, + { + "epoch": 0.888042097622936, + "grad_norm": 0.10235789250362032, + "learning_rate": 3.2513925011355414e-05, + "loss": 1.5651, + "step": 9788 + }, + { + "epoch": 0.8881328252585737, + "grad_norm": 0.10594173101178983, + "learning_rate": 3.24618270922914e-05, + "loss": 1.5014, + "step": 9789 + }, + { + "epoch": 0.8882235528942116, + "grad_norm": 0.10960329338817476, + "learning_rate": 3.2409769545217406e-05, + "loss": 1.5712, + "step": 9790 + }, + { + "epoch": 0.8883142805298494, + "grad_norm": 0.10808549144121518, + "learning_rate": 3.235775237462885e-05, + "loss": 1.5604, + "step": 9791 + }, + { + "epoch": 0.8884050081654872, + "grad_norm": 0.1061196188188463, + "learning_rate": 3.2305775585017294e-05, + "loss": 1.5492, + "step": 9792 + }, + { + "epoch": 0.8884957358011251, + "grad_norm": 0.10542912756493826, + "learning_rate": 3.225383918087083e-05, + "loss": 1.515, + "step": 9793 + }, + { + "epoch": 0.8885864634367628, + "grad_norm": 0.11045654683719573, + "learning_rate": 3.220194316667435e-05, + "loss": 1.5954, + "step": 9794 + }, + { + "epoch": 0.8886771910724006, + "grad_norm": 0.10951340121830908, + "learning_rate": 3.215008754690907e-05, + "loss": 1.54, + "step": 9795 + }, + { + "epoch": 0.8887679187080385, + "grad_norm": 0.10707448424521787, + "learning_rate": 3.209827232605267e-05, + "loss": 1.5538, + "step": 9796 + }, + { + "epoch": 0.8888586463436763, + "grad_norm": 0.10505504716469372, + "learning_rate": 3.2046497508579355e-05, + "loss": 1.5373, + "step": 9797 + }, + { + "epoch": 0.8889493739793141, + "grad_norm": 0.11188411804968038, + "learning_rate": 3.199476309895999e-05, + "loss": 1.5239, + "step": 9798 + }, + { + "epoch": 0.889040101614952, + "grad_norm": 0.1054677366680371, + "learning_rate": 3.1943069101661834e-05, + "loss": 1.4972, + "step": 9799 + }, + { + "epoch": 0.8891308292505897, + "grad_norm": 0.1053559135330168, + "learning_rate": 3.18914155211486e-05, + "loss": 1.498, + "step": 9800 + }, + { + "epoch": 0.8892215568862275, + "grad_norm": 0.10469709494089291, + "learning_rate": 3.183980236188066e-05, + "loss": 1.5127, + "step": 9801 + }, + { + "epoch": 0.8893122845218654, + "grad_norm": 0.10769140498068536, + "learning_rate": 3.1788229628314737e-05, + "loss": 1.5213, + "step": 9802 + }, + { + "epoch": 0.8894030121575032, + "grad_norm": 0.1095234178409645, + "learning_rate": 3.1736697324904304e-05, + "loss": 1.5782, + "step": 9803 + }, + { + "epoch": 0.889493739793141, + "grad_norm": 0.10598704768774911, + "learning_rate": 3.168520545609893e-05, + "loss": 1.5751, + "step": 9804 + }, + { + "epoch": 0.8895844674287788, + "grad_norm": 0.11093819321533128, + "learning_rate": 3.163375402634516e-05, + "loss": 1.5712, + "step": 9805 + }, + { + "epoch": 0.8896751950644166, + "grad_norm": 0.10862183042762445, + "learning_rate": 3.1582343040085836e-05, + "loss": 1.5267, + "step": 9806 + }, + { + "epoch": 0.8897659227000544, + "grad_norm": 0.10486168552602342, + "learning_rate": 3.153097250176007e-05, + "loss": 1.5511, + "step": 9807 + }, + { + "epoch": 0.8898566503356923, + "grad_norm": 0.10745144123505407, + "learning_rate": 3.147964241580398e-05, + "loss": 1.5564, + "step": 9808 + }, + { + "epoch": 0.8899473779713301, + "grad_norm": 0.10551813496892144, + "learning_rate": 3.142835278664985e-05, + "loss": 1.52, + "step": 9809 + }, + { + "epoch": 0.8900381056069678, + "grad_norm": 0.10469406448286735, + "learning_rate": 3.137710361872642e-05, + "loss": 1.5106, + "step": 9810 + }, + { + "epoch": 0.8901288332426057, + "grad_norm": 0.10320234395094798, + "learning_rate": 3.132589491645915e-05, + "loss": 1.5377, + "step": 9811 + }, + { + "epoch": 0.8902195608782435, + "grad_norm": 0.10470756233191438, + "learning_rate": 3.127472668427001e-05, + "loss": 1.5273, + "step": 9812 + }, + { + "epoch": 0.8903102885138813, + "grad_norm": 0.10466577760317856, + "learning_rate": 3.122359892657728e-05, + "loss": 1.5348, + "step": 9813 + }, + { + "epoch": 0.8904010161495192, + "grad_norm": 0.10654790194984011, + "learning_rate": 3.117251164779583e-05, + "loss": 1.5725, + "step": 9814 + }, + { + "epoch": 0.8904917437851569, + "grad_norm": 0.10372208666889346, + "learning_rate": 3.112146485233719e-05, + "loss": 1.5544, + "step": 9815 + }, + { + "epoch": 0.8905824714207947, + "grad_norm": 0.1045306839560289, + "learning_rate": 3.10704585446091e-05, + "loss": 1.5733, + "step": 9816 + }, + { + "epoch": 0.8906731990564326, + "grad_norm": 0.10534069741295896, + "learning_rate": 3.1019492729016e-05, + "loss": 1.472, + "step": 9817 + }, + { + "epoch": 0.8907639266920704, + "grad_norm": 0.10503382075622385, + "learning_rate": 3.096856740995885e-05, + "loss": 1.5197, + "step": 9818 + }, + { + "epoch": 0.8908546543277083, + "grad_norm": 0.10840843751934627, + "learning_rate": 3.091768259183503e-05, + "loss": 1.5556, + "step": 9819 + }, + { + "epoch": 0.890945381963346, + "grad_norm": 0.10611128743992577, + "learning_rate": 3.086683827903852e-05, + "loss": 1.5198, + "step": 9820 + }, + { + "epoch": 0.8910361095989838, + "grad_norm": 0.10807481046511969, + "learning_rate": 3.081603447595954e-05, + "loss": 1.565, + "step": 9821 + }, + { + "epoch": 0.8911268372346217, + "grad_norm": 0.10557242540573884, + "learning_rate": 3.076527118698524e-05, + "loss": 1.555, + "step": 9822 + }, + { + "epoch": 0.8912175648702595, + "grad_norm": 0.10725706919894021, + "learning_rate": 3.071454841649896e-05, + "loss": 1.5464, + "step": 9823 + }, + { + "epoch": 0.8913082925058973, + "grad_norm": 0.105141889715143, + "learning_rate": 3.0663866168880504e-05, + "loss": 1.5215, + "step": 9824 + }, + { + "epoch": 0.8913990201415352, + "grad_norm": 0.10414797427842273, + "learning_rate": 3.061322444850639e-05, + "loss": 1.5198, + "step": 9825 + }, + { + "epoch": 0.8914897477771729, + "grad_norm": 0.1108507458182355, + "learning_rate": 3.056262325974951e-05, + "loss": 1.5492, + "step": 9826 + }, + { + "epoch": 0.8915804754128107, + "grad_norm": 0.10802109762509353, + "learning_rate": 3.051206260697931e-05, + "loss": 1.5227, + "step": 9827 + }, + { + "epoch": 0.8916712030484486, + "grad_norm": 0.11135716281988159, + "learning_rate": 3.0461542494561733e-05, + "loss": 1.5327, + "step": 9828 + }, + { + "epoch": 0.8917619306840864, + "grad_norm": 0.11125130014004196, + "learning_rate": 3.041106292685919e-05, + "loss": 1.5291, + "step": 9829 + }, + { + "epoch": 0.8918526583197242, + "grad_norm": 0.10671525457285422, + "learning_rate": 3.0360623908230524e-05, + "loss": 1.5887, + "step": 9830 + }, + { + "epoch": 0.891943385955362, + "grad_norm": 0.10884499451552086, + "learning_rate": 3.0310225443031193e-05, + "loss": 1.5021, + "step": 9831 + }, + { + "epoch": 0.8920341135909998, + "grad_norm": 0.10560101413970219, + "learning_rate": 3.025986753561316e-05, + "loss": 1.5438, + "step": 9832 + }, + { + "epoch": 0.8921248412266376, + "grad_norm": 0.11042815459701161, + "learning_rate": 3.020955019032473e-05, + "loss": 1.5424, + "step": 9833 + }, + { + "epoch": 0.8922155688622755, + "grad_norm": 0.10370770055386447, + "learning_rate": 3.0159273411510866e-05, + "loss": 1.532, + "step": 9834 + }, + { + "epoch": 0.8923062964979133, + "grad_norm": 0.10809736509894186, + "learning_rate": 3.01090372035131e-05, + "loss": 1.5848, + "step": 9835 + }, + { + "epoch": 0.892397024133551, + "grad_norm": 0.10404612991237758, + "learning_rate": 3.0058841570669183e-05, + "loss": 1.5057, + "step": 9836 + }, + { + "epoch": 0.8924877517691889, + "grad_norm": 0.10644134472554692, + "learning_rate": 3.0008686517313532e-05, + "loss": 1.4987, + "step": 9837 + }, + { + "epoch": 0.8925784794048267, + "grad_norm": 0.10559311167086183, + "learning_rate": 2.9958572047777132e-05, + "loss": 1.5565, + "step": 9838 + }, + { + "epoch": 0.8926692070404645, + "grad_norm": 0.10859170006099465, + "learning_rate": 2.9908498166387298e-05, + "loss": 1.5732, + "step": 9839 + }, + { + "epoch": 0.8927599346761024, + "grad_norm": 0.10586151140603851, + "learning_rate": 2.9858464877467905e-05, + "loss": 1.5387, + "step": 9840 + }, + { + "epoch": 0.8928506623117402, + "grad_norm": 0.10495202471001001, + "learning_rate": 2.980847218533922e-05, + "loss": 1.5007, + "step": 9841 + }, + { + "epoch": 0.8929413899473779, + "grad_norm": 0.10585814678754499, + "learning_rate": 2.9758520094318455e-05, + "loss": 1.5513, + "step": 9842 + }, + { + "epoch": 0.8930321175830158, + "grad_norm": 0.10542349607693163, + "learning_rate": 2.9708608608718768e-05, + "loss": 1.5488, + "step": 9843 + }, + { + "epoch": 0.8931228452186536, + "grad_norm": 0.10701743345297884, + "learning_rate": 2.9658737732849937e-05, + "loss": 1.5349, + "step": 9844 + }, + { + "epoch": 0.8932135728542914, + "grad_norm": 0.1041341547173952, + "learning_rate": 2.960890747101852e-05, + "loss": 1.5111, + "step": 9845 + }, + { + "epoch": 0.8933043004899293, + "grad_norm": 0.10908877955852349, + "learning_rate": 2.9559117827527292e-05, + "loss": 1.51, + "step": 9846 + }, + { + "epoch": 0.893395028125567, + "grad_norm": 0.10870550236038867, + "learning_rate": 2.9509368806675485e-05, + "loss": 1.5435, + "step": 9847 + }, + { + "epoch": 0.8934857557612048, + "grad_norm": 0.10961927573156252, + "learning_rate": 2.9459660412759103e-05, + "loss": 1.5651, + "step": 9848 + }, + { + "epoch": 0.8935764833968427, + "grad_norm": 0.10874803215132899, + "learning_rate": 2.940999265007033e-05, + "loss": 1.5367, + "step": 9849 + }, + { + "epoch": 0.8936672110324805, + "grad_norm": 0.11000333324886383, + "learning_rate": 2.9360365522898113e-05, + "loss": 1.5951, + "step": 9850 + }, + { + "epoch": 0.8937579386681183, + "grad_norm": 0.10855315931093852, + "learning_rate": 2.931077903552759e-05, + "loss": 1.5418, + "step": 9851 + }, + { + "epoch": 0.8938486663037561, + "grad_norm": 0.10960568405854193, + "learning_rate": 2.9261233192240775e-05, + "loss": 1.5453, + "step": 9852 + }, + { + "epoch": 0.8939393939393939, + "grad_norm": 0.10841398832302969, + "learning_rate": 2.921172799731586e-05, + "loss": 1.5759, + "step": 9853 + }, + { + "epoch": 0.8940301215750317, + "grad_norm": 0.11162854569265027, + "learning_rate": 2.9162263455027538e-05, + "loss": 1.5543, + "step": 9854 + }, + { + "epoch": 0.8941208492106696, + "grad_norm": 0.1020473344345464, + "learning_rate": 2.911283956964722e-05, + "loss": 1.5686, + "step": 9855 + }, + { + "epoch": 0.8942115768463074, + "grad_norm": 0.10675842894426944, + "learning_rate": 2.9063456345442662e-05, + "loss": 1.5484, + "step": 9856 + }, + { + "epoch": 0.8943023044819453, + "grad_norm": 0.10621893474849227, + "learning_rate": 2.90141137866779e-05, + "loss": 1.4786, + "step": 9857 + }, + { + "epoch": 0.894393032117583, + "grad_norm": 0.10631284197563375, + "learning_rate": 2.8964811897613907e-05, + "loss": 1.5191, + "step": 9858 + }, + { + "epoch": 0.8944837597532208, + "grad_norm": 0.11213141992319527, + "learning_rate": 2.891555068250784e-05, + "loss": 1.5251, + "step": 9859 + }, + { + "epoch": 0.8945744873888587, + "grad_norm": 0.10919177441554997, + "learning_rate": 2.886633014561346e-05, + "loss": 1.5233, + "step": 9860 + }, + { + "epoch": 0.8946652150244965, + "grad_norm": 0.10646455047126349, + "learning_rate": 2.8817150291180815e-05, + "loss": 1.5254, + "step": 9861 + }, + { + "epoch": 0.8947559426601343, + "grad_norm": 0.10959259846698521, + "learning_rate": 2.876801112345673e-05, + "loss": 1.555, + "step": 9862 + }, + { + "epoch": 0.8948466702957721, + "grad_norm": 0.10481285492465228, + "learning_rate": 2.871891264668436e-05, + "loss": 1.5211, + "step": 9863 + }, + { + "epoch": 0.8949373979314099, + "grad_norm": 0.10425670434075494, + "learning_rate": 2.8669854865103262e-05, + "loss": 1.5157, + "step": 9864 + }, + { + "epoch": 0.8950281255670477, + "grad_norm": 0.10842158444204333, + "learning_rate": 2.862083778294966e-05, + "loss": 1.5467, + "step": 9865 + }, + { + "epoch": 0.8951188532026856, + "grad_norm": 0.10694391229680605, + "learning_rate": 2.8571861404456277e-05, + "loss": 1.5316, + "step": 9866 + }, + { + "epoch": 0.8952095808383234, + "grad_norm": 0.10737804051346181, + "learning_rate": 2.8522925733852177e-05, + "loss": 1.5565, + "step": 9867 + }, + { + "epoch": 0.8953003084739611, + "grad_norm": 0.10241379538625484, + "learning_rate": 2.8474030775362814e-05, + "loss": 1.5199, + "step": 9868 + }, + { + "epoch": 0.895391036109599, + "grad_norm": 0.10578833850308929, + "learning_rate": 2.8425176533210474e-05, + "loss": 1.5576, + "step": 9869 + }, + { + "epoch": 0.8954817637452368, + "grad_norm": 0.1068631119545361, + "learning_rate": 2.8376363011613615e-05, + "loss": 1.5153, + "step": 9870 + }, + { + "epoch": 0.8955724913808746, + "grad_norm": 0.10724908679863228, + "learning_rate": 2.8327590214787256e-05, + "loss": 1.5075, + "step": 9871 + }, + { + "epoch": 0.8956632190165125, + "grad_norm": 0.1053067677220825, + "learning_rate": 2.827885814694303e-05, + "loss": 1.5462, + "step": 9872 + }, + { + "epoch": 0.8957539466521502, + "grad_norm": 0.10518698224871074, + "learning_rate": 2.823016681228896e-05, + "loss": 1.5109, + "step": 9873 + }, + { + "epoch": 0.895844674287788, + "grad_norm": 0.10359999993774952, + "learning_rate": 2.8181516215029568e-05, + "loss": 1.4703, + "step": 9874 + }, + { + "epoch": 0.8959354019234259, + "grad_norm": 0.10920904085080396, + "learning_rate": 2.8132906359365664e-05, + "loss": 1.5532, + "step": 9875 + }, + { + "epoch": 0.8960261295590637, + "grad_norm": 0.1143729230606985, + "learning_rate": 2.808433724949494e-05, + "loss": 1.525, + "step": 9876 + }, + { + "epoch": 0.8961168571947015, + "grad_norm": 0.10691638206168218, + "learning_rate": 2.803580888961127e-05, + "loss": 1.5191, + "step": 9877 + }, + { + "epoch": 0.8962075848303394, + "grad_norm": 0.10839996886653372, + "learning_rate": 2.7987321283904966e-05, + "loss": 1.5737, + "step": 9878 + }, + { + "epoch": 0.8962983124659771, + "grad_norm": 0.10614990594856141, + "learning_rate": 2.7938874436563122e-05, + "loss": 1.5011, + "step": 9879 + }, + { + "epoch": 0.8963890401016149, + "grad_norm": 0.10355670520319236, + "learning_rate": 2.789046835176895e-05, + "loss": 1.5367, + "step": 9880 + }, + { + "epoch": 0.8964797677372528, + "grad_norm": 0.10577953529566092, + "learning_rate": 2.7842103033702493e-05, + "loss": 1.5278, + "step": 9881 + }, + { + "epoch": 0.8965704953728906, + "grad_norm": 0.1059182219659353, + "learning_rate": 2.7793778486539968e-05, + "loss": 1.5723, + "step": 9882 + }, + { + "epoch": 0.8966612230085284, + "grad_norm": 0.10471647041665842, + "learning_rate": 2.774549471445431e-05, + "loss": 1.5119, + "step": 9883 + }, + { + "epoch": 0.8967519506441662, + "grad_norm": 0.10822455590874791, + "learning_rate": 2.7697251721614858e-05, + "loss": 1.5095, + "step": 9884 + }, + { + "epoch": 0.896842678279804, + "grad_norm": 0.10823790750567439, + "learning_rate": 2.7649049512187162e-05, + "loss": 1.4885, + "step": 9885 + }, + { + "epoch": 0.8969334059154418, + "grad_norm": 0.10567596432503669, + "learning_rate": 2.7600888090333786e-05, + "loss": 1.5231, + "step": 9886 + }, + { + "epoch": 0.8970241335510797, + "grad_norm": 0.10598204105151443, + "learning_rate": 2.7552767460213347e-05, + "loss": 1.5032, + "step": 9887 + }, + { + "epoch": 0.8971148611867175, + "grad_norm": 0.11313138007767104, + "learning_rate": 2.7504687625981016e-05, + "loss": 1.5157, + "step": 9888 + }, + { + "epoch": 0.8972055888223552, + "grad_norm": 0.10495990892672664, + "learning_rate": 2.7456648591788536e-05, + "loss": 1.5113, + "step": 9889 + }, + { + "epoch": 0.8972963164579931, + "grad_norm": 0.10770931720436748, + "learning_rate": 2.7408650361784137e-05, + "loss": 1.539, + "step": 9890 + }, + { + "epoch": 0.8973870440936309, + "grad_norm": 0.1081678121589741, + "learning_rate": 2.736069294011245e-05, + "loss": 1.5431, + "step": 9891 + }, + { + "epoch": 0.8974777717292687, + "grad_norm": 0.10492358964772469, + "learning_rate": 2.7312776330914503e-05, + "loss": 1.5261, + "step": 9892 + }, + { + "epoch": 0.8975684993649066, + "grad_norm": 0.10697204553152159, + "learning_rate": 2.7264900538328087e-05, + "loss": 1.5439, + "step": 9893 + }, + { + "epoch": 0.8976592270005443, + "grad_norm": 0.10529143351495791, + "learning_rate": 2.7217065566487177e-05, + "loss": 1.5251, + "step": 9894 + }, + { + "epoch": 0.8977499546361822, + "grad_norm": 0.10351174338414165, + "learning_rate": 2.7169271419522247e-05, + "loss": 1.5343, + "step": 9895 + }, + { + "epoch": 0.89784068227182, + "grad_norm": 0.10490252381583769, + "learning_rate": 2.7121518101560382e-05, + "loss": 1.4913, + "step": 9896 + }, + { + "epoch": 0.8979314099074578, + "grad_norm": 0.11024691202592758, + "learning_rate": 2.707380561672529e-05, + "loss": 1.5955, + "step": 9897 + }, + { + "epoch": 0.8980221375430957, + "grad_norm": 0.10808161719155596, + "learning_rate": 2.7026133969136723e-05, + "loss": 1.5138, + "step": 9898 + }, + { + "epoch": 0.8981128651787335, + "grad_norm": 0.10485986726111204, + "learning_rate": 2.6978503162911118e-05, + "loss": 1.5831, + "step": 9899 + }, + { + "epoch": 0.8982035928143712, + "grad_norm": 0.1080580062287693, + "learning_rate": 2.6930913202161566e-05, + "loss": 1.5605, + "step": 9900 + }, + { + "epoch": 0.8982943204500091, + "grad_norm": 0.10627590626573902, + "learning_rate": 2.68833640909974e-05, + "loss": 1.5257, + "step": 9901 + }, + { + "epoch": 0.8983850480856469, + "grad_norm": 0.10666404460692025, + "learning_rate": 2.683585583352438e-05, + "loss": 1.55, + "step": 9902 + }, + { + "epoch": 0.8984757757212847, + "grad_norm": 0.10540913815635387, + "learning_rate": 2.6788388433844956e-05, + "loss": 1.5641, + "step": 9903 + }, + { + "epoch": 0.8985665033569226, + "grad_norm": 0.10686059527263848, + "learning_rate": 2.6740961896058015e-05, + "loss": 1.5477, + "step": 9904 + }, + { + "epoch": 0.8986572309925603, + "grad_norm": 0.10547003621419712, + "learning_rate": 2.669357622425872e-05, + "loss": 1.5309, + "step": 9905 + }, + { + "epoch": 0.8987479586281981, + "grad_norm": 0.10373901722131203, + "learning_rate": 2.6646231422538913e-05, + "loss": 1.5185, + "step": 9906 + }, + { + "epoch": 0.898838686263836, + "grad_norm": 0.10688045747468383, + "learning_rate": 2.659892749498677e-05, + "loss": 1.526, + "step": 9907 + }, + { + "epoch": 0.8989294138994738, + "grad_norm": 0.10642882464616829, + "learning_rate": 2.655166444568702e-05, + "loss": 1.5385, + "step": 9908 + }, + { + "epoch": 0.8990201415351116, + "grad_norm": 0.10963545735399302, + "learning_rate": 2.6504442278720787e-05, + "loss": 1.5275, + "step": 9909 + }, + { + "epoch": 0.8991108691707494, + "grad_norm": 0.10570692785410775, + "learning_rate": 2.6457260998165755e-05, + "loss": 1.5308, + "step": 9910 + }, + { + "epoch": 0.8992015968063872, + "grad_norm": 0.10657344734213775, + "learning_rate": 2.6410120608096e-05, + "loss": 1.5109, + "step": 9911 + }, + { + "epoch": 0.899292324442025, + "grad_norm": 0.11029205972175433, + "learning_rate": 2.6363021112582153e-05, + "loss": 1.5444, + "step": 9912 + }, + { + "epoch": 0.8993830520776629, + "grad_norm": 0.10818208069863187, + "learning_rate": 2.6315962515691293e-05, + "loss": 1.5197, + "step": 9913 + }, + { + "epoch": 0.8994737797133007, + "grad_norm": 0.10457498359126317, + "learning_rate": 2.6268944821486897e-05, + "loss": 1.5628, + "step": 9914 + }, + { + "epoch": 0.8995645073489384, + "grad_norm": 0.10744373630514199, + "learning_rate": 2.622196803402882e-05, + "loss": 1.5706, + "step": 9915 + }, + { + "epoch": 0.8996552349845763, + "grad_norm": 0.10267209119210054, + "learning_rate": 2.6175032157373714e-05, + "loss": 1.5178, + "step": 9916 + }, + { + "epoch": 0.8997459626202141, + "grad_norm": 0.11338671735686691, + "learning_rate": 2.6128137195574442e-05, + "loss": 1.5431, + "step": 9917 + }, + { + "epoch": 0.8998366902558519, + "grad_norm": 0.10751921207543559, + "learning_rate": 2.608128315268038e-05, + "loss": 1.5284, + "step": 9918 + }, + { + "epoch": 0.8999274178914898, + "grad_norm": 0.10424038616028836, + "learning_rate": 2.6034470032737177e-05, + "loss": 1.5238, + "step": 9919 + }, + { + "epoch": 0.9000181455271276, + "grad_norm": 0.10977812730771337, + "learning_rate": 2.5987697839787496e-05, + "loss": 1.5645, + "step": 9920 + }, + { + "epoch": 0.9001088731627653, + "grad_norm": 0.10521424345984617, + "learning_rate": 2.5940966577869985e-05, + "loss": 1.4879, + "step": 9921 + }, + { + "epoch": 0.9001996007984032, + "grad_norm": 0.10522730670588618, + "learning_rate": 2.589427625101981e-05, + "loss": 1.5142, + "step": 9922 + }, + { + "epoch": 0.900290328434041, + "grad_norm": 0.10704982215316154, + "learning_rate": 2.5847626863268804e-05, + "loss": 1.5265, + "step": 9923 + }, + { + "epoch": 0.9003810560696788, + "grad_norm": 0.10954746894806278, + "learning_rate": 2.5801018418645128e-05, + "loss": 1.5502, + "step": 9924 + }, + { + "epoch": 0.9004717837053167, + "grad_norm": 0.1071056151269994, + "learning_rate": 2.575445092117329e-05, + "loss": 1.5132, + "step": 9925 + }, + { + "epoch": 0.9005625113409544, + "grad_norm": 0.10653699853060593, + "learning_rate": 2.5707924374874625e-05, + "loss": 1.5525, + "step": 9926 + }, + { + "epoch": 0.9006532389765922, + "grad_norm": 0.10398526369976133, + "learning_rate": 2.5661438783766476e-05, + "loss": 1.5572, + "step": 9927 + }, + { + "epoch": 0.9007439666122301, + "grad_norm": 0.10721037459590703, + "learning_rate": 2.5614994151863136e-05, + "loss": 1.5553, + "step": 9928 + }, + { + "epoch": 0.9008346942478679, + "grad_norm": 0.10837258600006258, + "learning_rate": 2.5568590483174838e-05, + "loss": 1.545, + "step": 9929 + }, + { + "epoch": 0.9009254218835057, + "grad_norm": 0.10998823901186502, + "learning_rate": 2.552222778170876e-05, + "loss": 1.526, + "step": 9930 + }, + { + "epoch": 0.9010161495191435, + "grad_norm": 0.11311457017364897, + "learning_rate": 2.5475906051468267e-05, + "loss": 1.5299, + "step": 9931 + }, + { + "epoch": 0.9011068771547813, + "grad_norm": 0.10568013131341165, + "learning_rate": 2.5429625296453152e-05, + "loss": 1.4789, + "step": 9932 + }, + { + "epoch": 0.9011976047904192, + "grad_norm": 0.10487058011330479, + "learning_rate": 2.5383385520659884e-05, + "loss": 1.4958, + "step": 9933 + }, + { + "epoch": 0.901288332426057, + "grad_norm": 0.10133471152479827, + "learning_rate": 2.5337186728081275e-05, + "loss": 1.4929, + "step": 9934 + }, + { + "epoch": 0.9013790600616948, + "grad_norm": 0.1091344933526716, + "learning_rate": 2.5291028922706405e-05, + "loss": 1.4899, + "step": 9935 + }, + { + "epoch": 0.9014697876973327, + "grad_norm": 0.10886336048995991, + "learning_rate": 2.52449121085212e-05, + "loss": 1.5455, + "step": 9936 + }, + { + "epoch": 0.9015605153329704, + "grad_norm": 0.10590057881143107, + "learning_rate": 2.5198836289507864e-05, + "loss": 1.5563, + "step": 9937 + }, + { + "epoch": 0.9016512429686082, + "grad_norm": 0.10966468144762247, + "learning_rate": 2.5152801469644994e-05, + "loss": 1.512, + "step": 9938 + }, + { + "epoch": 0.9017419706042461, + "grad_norm": 0.10867081123500127, + "learning_rate": 2.510680765290768e-05, + "loss": 1.5018, + "step": 9939 + }, + { + "epoch": 0.9018326982398839, + "grad_norm": 0.10679873883079236, + "learning_rate": 2.5060854843267533e-05, + "loss": 1.5559, + "step": 9940 + }, + { + "epoch": 0.9019234258755217, + "grad_norm": 0.10733665544634254, + "learning_rate": 2.5014943044692597e-05, + "loss": 1.6014, + "step": 9941 + }, + { + "epoch": 0.9020141535111595, + "grad_norm": 0.10423247273707976, + "learning_rate": 2.4969072261147198e-05, + "loss": 1.5601, + "step": 9942 + }, + { + "epoch": 0.9021048811467973, + "grad_norm": 0.10996461056239673, + "learning_rate": 2.4923242496592503e-05, + "loss": 1.4939, + "step": 9943 + }, + { + "epoch": 0.9021956087824351, + "grad_norm": 0.1058247555842, + "learning_rate": 2.4877453754985845e-05, + "loss": 1.5214, + "step": 9944 + }, + { + "epoch": 0.902286336418073, + "grad_norm": 0.10750280676524397, + "learning_rate": 2.483170604028112e-05, + "loss": 1.4835, + "step": 9945 + }, + { + "epoch": 0.9023770640537108, + "grad_norm": 0.10669369050038108, + "learning_rate": 2.4785999356428557e-05, + "loss": 1.5023, + "step": 9946 + }, + { + "epoch": 0.9024677916893485, + "grad_norm": 0.10748528663278052, + "learning_rate": 2.4740333707374996e-05, + "loss": 1.5216, + "step": 9947 + }, + { + "epoch": 0.9025585193249864, + "grad_norm": 0.10832409794226569, + "learning_rate": 2.469470909706373e-05, + "loss": 1.5509, + "step": 9948 + }, + { + "epoch": 0.9026492469606242, + "grad_norm": 0.10211064132794985, + "learning_rate": 2.464912552943427e-05, + "loss": 1.5435, + "step": 9949 + }, + { + "epoch": 0.902739974596262, + "grad_norm": 0.10289327374928363, + "learning_rate": 2.4603583008422915e-05, + "loss": 1.533, + "step": 9950 + }, + { + "epoch": 0.9028307022318999, + "grad_norm": 0.10757369659649473, + "learning_rate": 2.4558081537962296e-05, + "loss": 1.511, + "step": 9951 + }, + { + "epoch": 0.9029214298675377, + "grad_norm": 0.10844221052138847, + "learning_rate": 2.451262112198138e-05, + "loss": 1.5141, + "step": 9952 + }, + { + "epoch": 0.9030121575031754, + "grad_norm": 0.1040257383537056, + "learning_rate": 2.44672017644057e-05, + "loss": 1.5427, + "step": 9953 + }, + { + "epoch": 0.9031028851388133, + "grad_norm": 0.10928527964487529, + "learning_rate": 2.442182346915722e-05, + "loss": 1.5289, + "step": 9954 + }, + { + "epoch": 0.9031936127744511, + "grad_norm": 0.10831304577309428, + "learning_rate": 2.4376486240154428e-05, + "loss": 1.5778, + "step": 9955 + }, + { + "epoch": 0.9032843404100889, + "grad_norm": 0.10439558068197992, + "learning_rate": 2.433119008131207e-05, + "loss": 1.4903, + "step": 9956 + }, + { + "epoch": 0.9033750680457268, + "grad_norm": 0.10864283523147551, + "learning_rate": 2.4285934996541635e-05, + "loss": 1.5527, + "step": 9957 + }, + { + "epoch": 0.9034657956813645, + "grad_norm": 0.10419039065293112, + "learning_rate": 2.4240720989750777e-05, + "loss": 1.5523, + "step": 9958 + }, + { + "epoch": 0.9035565233170023, + "grad_norm": 0.1058987731874347, + "learning_rate": 2.4195548064843808e-05, + "loss": 1.568, + "step": 9959 + }, + { + "epoch": 0.9036472509526402, + "grad_norm": 0.10825019971923558, + "learning_rate": 2.415041622572134e-05, + "loss": 1.5102, + "step": 9960 + }, + { + "epoch": 0.903737978588278, + "grad_norm": 0.10622652331382992, + "learning_rate": 2.410532547628064e-05, + "loss": 1.5271, + "step": 9961 + }, + { + "epoch": 0.9038287062239158, + "grad_norm": 0.10568419428405962, + "learning_rate": 2.40602758204152e-05, + "loss": 1.5357, + "step": 9962 + }, + { + "epoch": 0.9039194338595536, + "grad_norm": 0.10680339135763711, + "learning_rate": 2.401526726201503e-05, + "loss": 1.5463, + "step": 9963 + }, + { + "epoch": 0.9040101614951914, + "grad_norm": 0.12319193988140292, + "learning_rate": 2.3970299804966734e-05, + "loss": 1.5049, + "step": 9964 + }, + { + "epoch": 0.9041008891308292, + "grad_norm": 0.10958518917226966, + "learning_rate": 2.3925373453153265e-05, + "loss": 1.563, + "step": 9965 + }, + { + "epoch": 0.9041916167664671, + "grad_norm": 0.1045325902825053, + "learning_rate": 2.3880488210453853e-05, + "loss": 1.4877, + "step": 9966 + }, + { + "epoch": 0.9042823444021049, + "grad_norm": 0.10839945323142502, + "learning_rate": 2.3835644080744455e-05, + "loss": 1.5236, + "step": 9967 + }, + { + "epoch": 0.9043730720377426, + "grad_norm": 0.10529448669536041, + "learning_rate": 2.379084106789747e-05, + "loss": 1.5103, + "step": 9968 + }, + { + "epoch": 0.9044637996733805, + "grad_norm": 0.10436345070926395, + "learning_rate": 2.374607917578153e-05, + "loss": 1.5041, + "step": 9969 + }, + { + "epoch": 0.9045545273090183, + "grad_norm": 0.11030796637576656, + "learning_rate": 2.3701358408261764e-05, + "loss": 1.5663, + "step": 9970 + }, + { + "epoch": 0.9046452549446562, + "grad_norm": 0.10180081043367431, + "learning_rate": 2.3656678769199967e-05, + "loss": 1.5015, + "step": 9971 + }, + { + "epoch": 0.904735982580294, + "grad_norm": 0.1059981848269607, + "learning_rate": 2.361204026245417e-05, + "loss": 1.5369, + "step": 9972 + }, + { + "epoch": 0.9048267102159318, + "grad_norm": 0.10777484670045663, + "learning_rate": 2.3567442891878845e-05, + "loss": 1.5364, + "step": 9973 + }, + { + "epoch": 0.9049174378515696, + "grad_norm": 0.11169570388563446, + "learning_rate": 2.3522886661325073e-05, + "loss": 1.5183, + "step": 9974 + }, + { + "epoch": 0.9050081654872074, + "grad_norm": 0.10193614826754437, + "learning_rate": 2.347837157464028e-05, + "loss": 1.5639, + "step": 9975 + }, + { + "epoch": 0.9050988931228452, + "grad_norm": 0.10575460176213146, + "learning_rate": 2.343389763566839e-05, + "loss": 1.5458, + "step": 9976 + }, + { + "epoch": 0.9051896207584831, + "grad_norm": 0.10807190483822951, + "learning_rate": 2.3389464848249553e-05, + "loss": 1.5139, + "step": 9977 + }, + { + "epoch": 0.9052803483941209, + "grad_norm": 0.10498663717714693, + "learning_rate": 2.334507321622076e-05, + "loss": 1.5411, + "step": 9978 + }, + { + "epoch": 0.9053710760297586, + "grad_norm": 0.10701547997958147, + "learning_rate": 2.330072274341516e-05, + "loss": 1.534, + "step": 9979 + }, + { + "epoch": 0.9054618036653965, + "grad_norm": 0.10395275715963449, + "learning_rate": 2.3256413433662304e-05, + "loss": 1.5398, + "step": 9980 + }, + { + "epoch": 0.9055525313010343, + "grad_norm": 0.10477737924729992, + "learning_rate": 2.3212145290788468e-05, + "loss": 1.5075, + "step": 9981 + }, + { + "epoch": 0.9056432589366721, + "grad_norm": 0.10847858416240232, + "learning_rate": 2.3167918318616198e-05, + "loss": 1.5298, + "step": 9982 + }, + { + "epoch": 0.90573398657231, + "grad_norm": 0.10457343746672132, + "learning_rate": 2.312373252096439e-05, + "loss": 1.5441, + "step": 9983 + }, + { + "epoch": 0.9058247142079477, + "grad_norm": 0.10448412881430844, + "learning_rate": 2.30795879016486e-05, + "loss": 1.4911, + "step": 9984 + }, + { + "epoch": 0.9059154418435855, + "grad_norm": 0.10540887412241311, + "learning_rate": 2.3035484464480728e-05, + "loss": 1.4922, + "step": 9985 + }, + { + "epoch": 0.9060061694792234, + "grad_norm": 0.10702390639626168, + "learning_rate": 2.2991422213269054e-05, + "loss": 1.5229, + "step": 9986 + }, + { + "epoch": 0.9060968971148612, + "grad_norm": 0.10400743407316206, + "learning_rate": 2.2947401151818315e-05, + "loss": 1.5128, + "step": 9987 + }, + { + "epoch": 0.906187624750499, + "grad_norm": 0.10664567254641463, + "learning_rate": 2.290342128392986e-05, + "loss": 1.5397, + "step": 9988 + }, + { + "epoch": 0.9062783523861369, + "grad_norm": 0.10582855841842366, + "learning_rate": 2.2859482613401207e-05, + "loss": 1.5428, + "step": 9989 + }, + { + "epoch": 0.9063690800217746, + "grad_norm": 0.10309630837421939, + "learning_rate": 2.2815585144026597e-05, + "loss": 1.5112, + "step": 9990 + }, + { + "epoch": 0.9064598076574124, + "grad_norm": 0.11069973227607648, + "learning_rate": 2.2771728879596608e-05, + "loss": 1.5058, + "step": 9991 + }, + { + "epoch": 0.9065505352930503, + "grad_norm": 0.10551722742366366, + "learning_rate": 2.2727913823898104e-05, + "loss": 1.5549, + "step": 9992 + }, + { + "epoch": 0.9066412629286881, + "grad_norm": 0.10760429997970279, + "learning_rate": 2.268413998071456e-05, + "loss": 1.5511, + "step": 9993 + }, + { + "epoch": 0.9067319905643259, + "grad_norm": 0.10593793437320607, + "learning_rate": 2.2640407353825944e-05, + "loss": 1.5322, + "step": 9994 + }, + { + "epoch": 0.9068227181999637, + "grad_norm": 0.11993355678533625, + "learning_rate": 2.2596715947008518e-05, + "loss": 1.534, + "step": 9995 + }, + { + "epoch": 0.9069134458356015, + "grad_norm": 0.11226966313906073, + "learning_rate": 2.255306576403493e-05, + "loss": 1.5767, + "step": 9996 + }, + { + "epoch": 0.9070041734712393, + "grad_norm": 0.10601206588576052, + "learning_rate": 2.25094568086745e-05, + "loss": 1.4941, + "step": 9997 + }, + { + "epoch": 0.9070949011068772, + "grad_norm": 0.10458547923679462, + "learning_rate": 2.2465889084692935e-05, + "loss": 1.5124, + "step": 9998 + }, + { + "epoch": 0.907185628742515, + "grad_norm": 0.10751303337763107, + "learning_rate": 2.2422362595852232e-05, + "loss": 1.5244, + "step": 9999 + }, + { + "epoch": 0.9072763563781527, + "grad_norm": 0.10440215927050259, + "learning_rate": 2.237887734591082e-05, + "loss": 1.5309, + "step": 10000 + }, + { + "epoch": 0.9073670840137906, + "grad_norm": 0.10388676632769903, + "learning_rate": 2.2335433338623813e-05, + "loss": 1.5376, + "step": 10001 + }, + { + "epoch": 0.9074578116494284, + "grad_norm": 0.10598004584411813, + "learning_rate": 2.229203057774254e-05, + "loss": 1.5626, + "step": 10002 + }, + { + "epoch": 0.9075485392850662, + "grad_norm": 0.10465538621797746, + "learning_rate": 2.2248669067014727e-05, + "loss": 1.5376, + "step": 10003 + }, + { + "epoch": 0.9076392669207041, + "grad_norm": 0.10707676684724944, + "learning_rate": 2.2205348810184876e-05, + "loss": 1.5551, + "step": 10004 + }, + { + "epoch": 0.9077299945563418, + "grad_norm": 0.10725922951777367, + "learning_rate": 2.2162069810993502e-05, + "loss": 1.5011, + "step": 10005 + }, + { + "epoch": 0.9078207221919796, + "grad_norm": 0.1055039346930624, + "learning_rate": 2.2118832073177887e-05, + "loss": 1.5244, + "step": 10006 + }, + { + "epoch": 0.9079114498276175, + "grad_norm": 0.10720059455011931, + "learning_rate": 2.207563560047149e-05, + "loss": 1.5128, + "step": 10007 + }, + { + "epoch": 0.9080021774632553, + "grad_norm": 0.10599973379819515, + "learning_rate": 2.2032480396604436e-05, + "loss": 1.51, + "step": 10008 + }, + { + "epoch": 0.9080929050988932, + "grad_norm": 0.10444722160361078, + "learning_rate": 2.198936646530314e-05, + "loss": 1.4937, + "step": 10009 + }, + { + "epoch": 0.908183632734531, + "grad_norm": 0.1030377396723194, + "learning_rate": 2.194629381029051e-05, + "loss": 1.5609, + "step": 10010 + }, + { + "epoch": 0.9082743603701687, + "grad_norm": 0.10671706689537759, + "learning_rate": 2.190326243528584e-05, + "loss": 1.5268, + "step": 10011 + }, + { + "epoch": 0.9083650880058066, + "grad_norm": 0.110981269276906, + "learning_rate": 2.186027234400495e-05, + "loss": 1.5412, + "step": 10012 + }, + { + "epoch": 0.9084558156414444, + "grad_norm": 0.10815722859382365, + "learning_rate": 2.1817323540159973e-05, + "loss": 1.5266, + "step": 10013 + }, + { + "epoch": 0.9085465432770822, + "grad_norm": 0.10645056611099041, + "learning_rate": 2.177441602745961e-05, + "loss": 1.4826, + "step": 10014 + }, + { + "epoch": 0.9086372709127201, + "grad_norm": 0.1032821133964603, + "learning_rate": 2.1731549809608898e-05, + "loss": 1.5667, + "step": 10015 + }, + { + "epoch": 0.9087279985483578, + "grad_norm": 0.10498001348423787, + "learning_rate": 2.168872489030943e-05, + "loss": 1.5275, + "step": 10016 + }, + { + "epoch": 0.9088187261839956, + "grad_norm": 0.10365461677243257, + "learning_rate": 2.164594127325892e-05, + "loss": 1.5537, + "step": 10017 + }, + { + "epoch": 0.9089094538196335, + "grad_norm": 0.1112849630619739, + "learning_rate": 2.1603198962152014e-05, + "loss": 1.5465, + "step": 10018 + }, + { + "epoch": 0.9090001814552713, + "grad_norm": 0.10944196698348962, + "learning_rate": 2.1560497960679327e-05, + "loss": 1.5405, + "step": 10019 + }, + { + "epoch": 0.9090909090909091, + "grad_norm": 0.10855126514238646, + "learning_rate": 2.1517838272528124e-05, + "loss": 1.565, + "step": 10020 + }, + { + "epoch": 0.909181636726547, + "grad_norm": 0.10654851771881588, + "learning_rate": 2.1475219901382124e-05, + "loss": 1.531, + "step": 10021 + }, + { + "epoch": 0.9092723643621847, + "grad_norm": 0.10570399069853993, + "learning_rate": 2.1432642850921445e-05, + "loss": 1.5104, + "step": 10022 + }, + { + "epoch": 0.9093630919978225, + "grad_norm": 0.10480710445713039, + "learning_rate": 2.139010712482259e-05, + "loss": 1.5217, + "step": 10023 + }, + { + "epoch": 0.9094538196334604, + "grad_norm": 0.10559569146801895, + "learning_rate": 2.1347612726758502e-05, + "loss": 1.5568, + "step": 10024 + }, + { + "epoch": 0.9095445472690982, + "grad_norm": 0.1069442029340883, + "learning_rate": 2.1305159660398645e-05, + "loss": 1.5458, + "step": 10025 + }, + { + "epoch": 0.909635274904736, + "grad_norm": 0.10214186344791261, + "learning_rate": 2.12627479294088e-05, + "loss": 1.4969, + "step": 10026 + }, + { + "epoch": 0.9097260025403738, + "grad_norm": 0.10660897566918634, + "learning_rate": 2.122037753745115e-05, + "loss": 1.5428, + "step": 10027 + }, + { + "epoch": 0.9098167301760116, + "grad_norm": 0.11053098614184008, + "learning_rate": 2.117804848818444e-05, + "loss": 1.5271, + "step": 10028 + }, + { + "epoch": 0.9099074578116494, + "grad_norm": 0.10833885284562744, + "learning_rate": 2.1135760785263912e-05, + "loss": 1.5198, + "step": 10029 + }, + { + "epoch": 0.9099981854472873, + "grad_norm": 0.10485768390724551, + "learning_rate": 2.109351443234103e-05, + "loss": 1.5191, + "step": 10030 + }, + { + "epoch": 0.910088913082925, + "grad_norm": 0.10772418583488444, + "learning_rate": 2.1051309433063715e-05, + "loss": 1.5489, + "step": 10031 + }, + { + "epoch": 0.9101796407185628, + "grad_norm": 0.10686265270095097, + "learning_rate": 2.1009145791076434e-05, + "loss": 1.512, + "step": 10032 + }, + { + "epoch": 0.9102703683542007, + "grad_norm": 0.10736346734766562, + "learning_rate": 2.0967023510020056e-05, + "loss": 1.5538, + "step": 10033 + }, + { + "epoch": 0.9103610959898385, + "grad_norm": 0.10939256477750393, + "learning_rate": 2.092494259353167e-05, + "loss": 1.5629, + "step": 10034 + }, + { + "epoch": 0.9104518236254763, + "grad_norm": 0.10891941784501602, + "learning_rate": 2.0882903045245205e-05, + "loss": 1.5418, + "step": 10035 + }, + { + "epoch": 0.9105425512611142, + "grad_norm": 0.10495829103910674, + "learning_rate": 2.0840904868790645e-05, + "loss": 1.5188, + "step": 10036 + }, + { + "epoch": 0.9106332788967519, + "grad_norm": 0.10673276656646802, + "learning_rate": 2.079894806779459e-05, + "loss": 1.4958, + "step": 10037 + }, + { + "epoch": 0.9107240065323897, + "grad_norm": 0.10532273758045646, + "learning_rate": 2.0757032645879914e-05, + "loss": 1.5166, + "step": 10038 + }, + { + "epoch": 0.9108147341680276, + "grad_norm": 0.1076983434690284, + "learning_rate": 2.071515860666623e-05, + "loss": 1.4917, + "step": 10039 + }, + { + "epoch": 0.9109054618036654, + "grad_norm": 0.10618303806076912, + "learning_rate": 2.0673325953769194e-05, + "loss": 1.5242, + "step": 10040 + }, + { + "epoch": 0.9109961894393032, + "grad_norm": 0.10818101057655666, + "learning_rate": 2.0631534690801034e-05, + "loss": 1.5614, + "step": 10041 + }, + { + "epoch": 0.911086917074941, + "grad_norm": 0.10554716673956613, + "learning_rate": 2.0589784821370638e-05, + "loss": 1.5258, + "step": 10042 + }, + { + "epoch": 0.9111776447105788, + "grad_norm": 0.11100535734034117, + "learning_rate": 2.05480763490829e-05, + "loss": 1.553, + "step": 10043 + }, + { + "epoch": 0.9112683723462166, + "grad_norm": 0.10961985897863895, + "learning_rate": 2.0506409277539383e-05, + "loss": 1.5461, + "step": 10044 + }, + { + "epoch": 0.9113590999818545, + "grad_norm": 0.10739505847805932, + "learning_rate": 2.0464783610338156e-05, + "loss": 1.489, + "step": 10045 + }, + { + "epoch": 0.9114498276174923, + "grad_norm": 0.1045964346622498, + "learning_rate": 2.0423199351073563e-05, + "loss": 1.5176, + "step": 10046 + }, + { + "epoch": 0.9115405552531302, + "grad_norm": 0.10607675766452802, + "learning_rate": 2.0381656503336344e-05, + "loss": 1.5381, + "step": 10047 + }, + { + "epoch": 0.9116312828887679, + "grad_norm": 0.10792952134599629, + "learning_rate": 2.0340155070713796e-05, + "loss": 1.5481, + "step": 10048 + }, + { + "epoch": 0.9117220105244057, + "grad_norm": 0.11043631474245033, + "learning_rate": 2.029869505678955e-05, + "loss": 1.5521, + "step": 10049 + }, + { + "epoch": 0.9118127381600436, + "grad_norm": 0.1059245174416806, + "learning_rate": 2.0257276465143692e-05, + "loss": 1.5341, + "step": 10050 + }, + { + "epoch": 0.9119034657956814, + "grad_norm": 0.10724101321126002, + "learning_rate": 2.021589929935269e-05, + "loss": 1.5348, + "step": 10051 + }, + { + "epoch": 0.9119941934313192, + "grad_norm": 0.10814259649716138, + "learning_rate": 2.0174563562989468e-05, + "loss": 1.5606, + "step": 10052 + }, + { + "epoch": 0.912084921066957, + "grad_norm": 0.10510677734607025, + "learning_rate": 2.01332692596235e-05, + "loss": 1.5604, + "step": 10053 + }, + { + "epoch": 0.9121756487025948, + "grad_norm": 0.10557612343036402, + "learning_rate": 2.0092016392820434e-05, + "loss": 1.5209, + "step": 10054 + }, + { + "epoch": 0.9122663763382326, + "grad_norm": 0.10379405263325545, + "learning_rate": 2.005080496614242e-05, + "loss": 1.5338, + "step": 10055 + }, + { + "epoch": 0.9123571039738705, + "grad_norm": 0.10779354641969752, + "learning_rate": 2.0009634983148173e-05, + "loss": 1.5139, + "step": 10056 + }, + { + "epoch": 0.9124478316095083, + "grad_norm": 0.10682415598705665, + "learning_rate": 1.996850644739273e-05, + "loss": 1.5081, + "step": 10057 + }, + { + "epoch": 0.912538559245146, + "grad_norm": 0.10749546745603708, + "learning_rate": 1.9927419362427424e-05, + "loss": 1.5311, + "step": 10058 + }, + { + "epoch": 0.9126292868807839, + "grad_norm": 0.11132853676707452, + "learning_rate": 1.988637373180019e-05, + "loss": 1.5621, + "step": 10059 + }, + { + "epoch": 0.9127200145164217, + "grad_norm": 0.10390254558298204, + "learning_rate": 1.984536955905547e-05, + "loss": 1.5073, + "step": 10060 + }, + { + "epoch": 0.9128107421520595, + "grad_norm": 0.11029724367506431, + "learning_rate": 1.9804406847733714e-05, + "loss": 1.5467, + "step": 10061 + }, + { + "epoch": 0.9129014697876974, + "grad_norm": 0.1129437756203454, + "learning_rate": 1.9763485601372312e-05, + "loss": 1.5276, + "step": 10062 + }, + { + "epoch": 0.9129921974233352, + "grad_norm": 0.10982773085967719, + "learning_rate": 1.9722605823504713e-05, + "loss": 1.5609, + "step": 10063 + }, + { + "epoch": 0.9130829250589729, + "grad_norm": 0.10792368384670308, + "learning_rate": 1.9681767517660877e-05, + "loss": 1.5568, + "step": 10064 + }, + { + "epoch": 0.9131736526946108, + "grad_norm": 0.10943035187752506, + "learning_rate": 1.9640970687367144e-05, + "loss": 1.5339, + "step": 10065 + }, + { + "epoch": 0.9132643803302486, + "grad_norm": 0.10659187421019053, + "learning_rate": 1.960021533614642e-05, + "loss": 1.5225, + "step": 10066 + }, + { + "epoch": 0.9133551079658864, + "grad_norm": 0.10776292522946226, + "learning_rate": 1.955950146751789e-05, + "loss": 1.5669, + "step": 10067 + }, + { + "epoch": 0.9134458356015243, + "grad_norm": 0.10681920648295144, + "learning_rate": 1.951882908499719e-05, + "loss": 1.5418, + "step": 10068 + }, + { + "epoch": 0.913536563237162, + "grad_norm": 0.10823529362933765, + "learning_rate": 1.947819819209651e-05, + "loss": 1.5669, + "step": 10069 + }, + { + "epoch": 0.9136272908727998, + "grad_norm": 0.1107159874092024, + "learning_rate": 1.94376087923242e-05, + "loss": 1.5573, + "step": 10070 + }, + { + "epoch": 0.9137180185084377, + "grad_norm": 0.10863454675330753, + "learning_rate": 1.9397060889185137e-05, + "loss": 1.5366, + "step": 10071 + }, + { + "epoch": 0.9138087461440755, + "grad_norm": 0.10635610358849114, + "learning_rate": 1.935655448618079e-05, + "loss": 1.5314, + "step": 10072 + }, + { + "epoch": 0.9138994737797133, + "grad_norm": 0.10742641926986056, + "learning_rate": 1.931608958680875e-05, + "loss": 1.5091, + "step": 10073 + }, + { + "epoch": 0.9139902014153511, + "grad_norm": 0.11034510445426918, + "learning_rate": 1.9275666194563223e-05, + "loss": 1.5702, + "step": 10074 + }, + { + "epoch": 0.9140809290509889, + "grad_norm": 0.1063794995697921, + "learning_rate": 1.9235284312934752e-05, + "loss": 1.5575, + "step": 10075 + }, + { + "epoch": 0.9141716566866267, + "grad_norm": 0.10530644614927351, + "learning_rate": 1.9194943945410382e-05, + "loss": 1.51, + "step": 10076 + }, + { + "epoch": 0.9142623843222646, + "grad_norm": 0.10452511069669404, + "learning_rate": 1.915464509547349e-05, + "loss": 1.5148, + "step": 10077 + }, + { + "epoch": 0.9143531119579024, + "grad_norm": 0.10831542476075182, + "learning_rate": 1.911438776660379e-05, + "loss": 1.509, + "step": 10078 + }, + { + "epoch": 0.9144438395935401, + "grad_norm": 0.1067090092879113, + "learning_rate": 1.907417196227762e-05, + "loss": 1.5512, + "step": 10079 + }, + { + "epoch": 0.914534567229178, + "grad_norm": 0.10962766856531209, + "learning_rate": 1.9033997685967585e-05, + "loss": 1.5574, + "step": 10080 + }, + { + "epoch": 0.9146252948648158, + "grad_norm": 0.10670161802000772, + "learning_rate": 1.899386494114269e-05, + "loss": 1.5277, + "step": 10081 + }, + { + "epoch": 0.9147160225004536, + "grad_norm": 0.10631421131302782, + "learning_rate": 1.8953773731268552e-05, + "loss": 1.5321, + "step": 10082 + }, + { + "epoch": 0.9148067501360915, + "grad_norm": 0.10270504483258533, + "learning_rate": 1.891372405980685e-05, + "loss": 1.5299, + "step": 10083 + }, + { + "epoch": 0.9148974777717293, + "grad_norm": 0.10968914137772483, + "learning_rate": 1.8873715930216027e-05, + "loss": 1.4877, + "step": 10084 + }, + { + "epoch": 0.9149882054073671, + "grad_norm": 0.10883947610943666, + "learning_rate": 1.8833749345950668e-05, + "loss": 1.5414, + "step": 10085 + }, + { + "epoch": 0.9150789330430049, + "grad_norm": 0.10508254140813979, + "learning_rate": 1.8793824310462114e-05, + "loss": 1.5258, + "step": 10086 + }, + { + "epoch": 0.9151696606786427, + "grad_norm": 0.10845049881367484, + "learning_rate": 1.8753940827197668e-05, + "loss": 1.5186, + "step": 10087 + }, + { + "epoch": 0.9152603883142806, + "grad_norm": 0.10569257003817313, + "learning_rate": 1.871409889960135e-05, + "loss": 1.5712, + "step": 10088 + }, + { + "epoch": 0.9153511159499184, + "grad_norm": 0.1050691798827049, + "learning_rate": 1.8674298531113574e-05, + "loss": 1.5259, + "step": 10089 + }, + { + "epoch": 0.9154418435855561, + "grad_norm": 0.10888859171637069, + "learning_rate": 1.863453972517104e-05, + "loss": 1.5493, + "step": 10090 + }, + { + "epoch": 0.915532571221194, + "grad_norm": 0.10567124313624275, + "learning_rate": 1.8594822485206886e-05, + "loss": 1.4963, + "step": 10091 + }, + { + "epoch": 0.9156232988568318, + "grad_norm": 0.10386199238403485, + "learning_rate": 1.8555146814650814e-05, + "loss": 1.5832, + "step": 10092 + }, + { + "epoch": 0.9157140264924696, + "grad_norm": 0.10328333365685086, + "learning_rate": 1.8515512716928807e-05, + "loss": 1.4845, + "step": 10093 + }, + { + "epoch": 0.9158047541281075, + "grad_norm": 0.11096725216053645, + "learning_rate": 1.847592019546318e-05, + "loss": 1.5278, + "step": 10094 + }, + { + "epoch": 0.9158954817637452, + "grad_norm": 0.10774066224361316, + "learning_rate": 1.8436369253672812e-05, + "loss": 1.5059, + "step": 10095 + }, + { + "epoch": 0.915986209399383, + "grad_norm": 0.10653116374216798, + "learning_rate": 1.8396859894972974e-05, + "loss": 1.5677, + "step": 10096 + }, + { + "epoch": 0.9160769370350209, + "grad_norm": 0.10269952125013691, + "learning_rate": 1.8357392122775318e-05, + "loss": 1.5484, + "step": 10097 + }, + { + "epoch": 0.9161676646706587, + "grad_norm": 0.10322946759211539, + "learning_rate": 1.8317965940487736e-05, + "loss": 1.5254, + "step": 10098 + }, + { + "epoch": 0.9162583923062965, + "grad_norm": 0.10693343614492137, + "learning_rate": 1.827858135151478e-05, + "loss": 1.5274, + "step": 10099 + }, + { + "epoch": 0.9163491199419344, + "grad_norm": 0.10664290064231832, + "learning_rate": 1.8239238359257394e-05, + "loss": 1.566, + "step": 10100 + }, + { + "epoch": 0.9164398475775721, + "grad_norm": 0.10474004067216312, + "learning_rate": 1.8199936967112753e-05, + "loss": 1.5095, + "step": 10101 + }, + { + "epoch": 0.9165305752132099, + "grad_norm": 0.10570309643251545, + "learning_rate": 1.816067717847453e-05, + "loss": 1.539, + "step": 10102 + }, + { + "epoch": 0.9166213028488478, + "grad_norm": 0.11294918906166049, + "learning_rate": 1.8121458996732954e-05, + "loss": 1.5627, + "step": 10103 + }, + { + "epoch": 0.9167120304844856, + "grad_norm": 0.10435438652405717, + "learning_rate": 1.808228242527432e-05, + "loss": 1.5231, + "step": 10104 + }, + { + "epoch": 0.9168027581201234, + "grad_norm": 0.1057664351288003, + "learning_rate": 1.8043147467481646e-05, + "loss": 1.5543, + "step": 10105 + }, + { + "epoch": 0.9168934857557612, + "grad_norm": 0.10937504260975031, + "learning_rate": 1.800405412673417e-05, + "loss": 1.5428, + "step": 10106 + }, + { + "epoch": 0.916984213391399, + "grad_norm": 0.10804772274959937, + "learning_rate": 1.796500240640775e-05, + "loss": 1.545, + "step": 10107 + }, + { + "epoch": 0.9170749410270368, + "grad_norm": 0.10477357283040034, + "learning_rate": 1.7925992309874406e-05, + "loss": 1.5318, + "step": 10108 + }, + { + "epoch": 0.9171656686626747, + "grad_norm": 0.10640922437626593, + "learning_rate": 1.7887023840502614e-05, + "loss": 1.4776, + "step": 10109 + }, + { + "epoch": 0.9172563962983125, + "grad_norm": 0.10871984641613333, + "learning_rate": 1.784809700165746e-05, + "loss": 1.5161, + "step": 10110 + }, + { + "epoch": 0.9173471239339502, + "grad_norm": 0.10754329841715805, + "learning_rate": 1.780921179670014e-05, + "loss": 1.541, + "step": 10111 + }, + { + "epoch": 0.9174378515695881, + "grad_norm": 0.10605617814026085, + "learning_rate": 1.777036822898842e-05, + "loss": 1.5539, + "step": 10112 + }, + { + "epoch": 0.9175285792052259, + "grad_norm": 0.11231598456292038, + "learning_rate": 1.7731566301876557e-05, + "loss": 1.5378, + "step": 10113 + }, + { + "epoch": 0.9176193068408637, + "grad_norm": 0.10676294734676546, + "learning_rate": 1.7692806018714923e-05, + "loss": 1.5524, + "step": 10114 + }, + { + "epoch": 0.9177100344765016, + "grad_norm": 0.10650813795821365, + "learning_rate": 1.7654087382850624e-05, + "loss": 1.5109, + "step": 10115 + }, + { + "epoch": 0.9178007621121393, + "grad_norm": 0.10481152643513675, + "learning_rate": 1.761541039762693e-05, + "loss": 1.5008, + "step": 10116 + }, + { + "epoch": 0.9178914897477771, + "grad_norm": 0.10522540061499766, + "learning_rate": 1.757677506638372e-05, + "loss": 1.5146, + "step": 10117 + }, + { + "epoch": 0.917982217383415, + "grad_norm": 0.10631887697842014, + "learning_rate": 1.753818139245705e-05, + "loss": 1.5128, + "step": 10118 + }, + { + "epoch": 0.9180729450190528, + "grad_norm": 0.1145279754207472, + "learning_rate": 1.7499629379179483e-05, + "loss": 1.5606, + "step": 10119 + }, + { + "epoch": 0.9181636726546906, + "grad_norm": 0.10748112928060168, + "learning_rate": 1.7461119029880124e-05, + "loss": 1.5241, + "step": 10120 + }, + { + "epoch": 0.9182544002903285, + "grad_norm": 0.10406504199853772, + "learning_rate": 1.7422650347884205e-05, + "loss": 1.5356, + "step": 10121 + }, + { + "epoch": 0.9183451279259662, + "grad_norm": 0.10987816549599387, + "learning_rate": 1.7384223336513517e-05, + "loss": 1.4962, + "step": 10122 + }, + { + "epoch": 0.9184358555616041, + "grad_norm": 0.10637661320060177, + "learning_rate": 1.734583799908629e-05, + "loss": 1.5213, + "step": 10123 + }, + { + "epoch": 0.9185265831972419, + "grad_norm": 0.10554179656316709, + "learning_rate": 1.730749433891715e-05, + "loss": 1.588, + "step": 10124 + }, + { + "epoch": 0.9186173108328797, + "grad_norm": 0.10704402753071007, + "learning_rate": 1.7269192359316955e-05, + "loss": 1.4951, + "step": 10125 + }, + { + "epoch": 0.9187080384685176, + "grad_norm": 0.10559769958014895, + "learning_rate": 1.7230932063593162e-05, + "loss": 1.4958, + "step": 10126 + }, + { + "epoch": 0.9187987661041553, + "grad_norm": 0.10420536647247493, + "learning_rate": 1.719271345504958e-05, + "loss": 1.5491, + "step": 10127 + }, + { + "epoch": 0.9188894937397931, + "grad_norm": 0.10715629721076016, + "learning_rate": 1.7154536536986286e-05, + "loss": 1.5584, + "step": 10128 + }, + { + "epoch": 0.918980221375431, + "grad_norm": 0.11074333690850752, + "learning_rate": 1.7116401312699926e-05, + "loss": 1.5728, + "step": 10129 + }, + { + "epoch": 0.9190709490110688, + "grad_norm": 0.10392789846291556, + "learning_rate": 1.7078307785483472e-05, + "loss": 1.5089, + "step": 10130 + }, + { + "epoch": 0.9191616766467066, + "grad_norm": 0.10413183370294556, + "learning_rate": 1.7040255958626404e-05, + "loss": 1.5261, + "step": 10131 + }, + { + "epoch": 0.9192524042823444, + "grad_norm": 0.10719252611997865, + "learning_rate": 1.7002245835414377e-05, + "loss": 1.553, + "step": 10132 + }, + { + "epoch": 0.9193431319179822, + "grad_norm": 0.1068884098476291, + "learning_rate": 1.6964277419129537e-05, + "loss": 1.5025, + "step": 10133 + }, + { + "epoch": 0.91943385955362, + "grad_norm": 0.10765996765613238, + "learning_rate": 1.6926350713050652e-05, + "loss": 1.5721, + "step": 10134 + }, + { + "epoch": 0.9195245871892579, + "grad_norm": 0.10621866819494256, + "learning_rate": 1.6888465720452496e-05, + "loss": 1.5249, + "step": 10135 + }, + { + "epoch": 0.9196153148248957, + "grad_norm": 0.10631046142549355, + "learning_rate": 1.6850622444606555e-05, + "loss": 1.494, + "step": 10136 + }, + { + "epoch": 0.9197060424605334, + "grad_norm": 0.10948159253188974, + "learning_rate": 1.68128208887805e-05, + "loss": 1.5645, + "step": 10137 + }, + { + "epoch": 0.9197967700961713, + "grad_norm": 0.11000147246009406, + "learning_rate": 1.6775061056238717e-05, + "loss": 1.5137, + "step": 10138 + }, + { + "epoch": 0.9198874977318091, + "grad_norm": 0.10675806466195731, + "learning_rate": 1.6737342950241484e-05, + "loss": 1.5137, + "step": 10139 + }, + { + "epoch": 0.9199782253674469, + "grad_norm": 0.10840689390960041, + "learning_rate": 1.6699666574046035e-05, + "loss": 1.547, + "step": 10140 + }, + { + "epoch": 0.9200689530030848, + "grad_norm": 0.10542321713959982, + "learning_rate": 1.6662031930905596e-05, + "loss": 1.5342, + "step": 10141 + }, + { + "epoch": 0.9201596806387226, + "grad_norm": 0.11035375783252929, + "learning_rate": 1.6624439024069847e-05, + "loss": 1.5481, + "step": 10142 + }, + { + "epoch": 0.9202504082743603, + "grad_norm": 0.10735163555433656, + "learning_rate": 1.658688785678514e-05, + "loss": 1.5652, + "step": 10143 + }, + { + "epoch": 0.9203411359099982, + "grad_norm": 0.10501798889363843, + "learning_rate": 1.654937843229387e-05, + "loss": 1.5385, + "step": 10144 + }, + { + "epoch": 0.920431863545636, + "grad_norm": 0.10592956034551773, + "learning_rate": 1.6511910753834957e-05, + "loss": 1.5188, + "step": 10145 + }, + { + "epoch": 0.9205225911812738, + "grad_norm": 0.10901788447472024, + "learning_rate": 1.6474484824643867e-05, + "loss": 1.5252, + "step": 10146 + }, + { + "epoch": 0.9206133188169117, + "grad_norm": 0.10227441945404188, + "learning_rate": 1.6437100647952285e-05, + "loss": 1.4643, + "step": 10147 + }, + { + "epoch": 0.9207040464525494, + "grad_norm": 0.10840461881135421, + "learning_rate": 1.639975822698836e-05, + "loss": 1.546, + "step": 10148 + }, + { + "epoch": 0.9207947740881872, + "grad_norm": 0.10758522049748541, + "learning_rate": 1.636245756497651e-05, + "loss": 1.5617, + "step": 10149 + }, + { + "epoch": 0.9208855017238251, + "grad_norm": 0.11526135114461962, + "learning_rate": 1.6325198665137818e-05, + "loss": 1.5325, + "step": 10150 + }, + { + "epoch": 0.9209762293594629, + "grad_norm": 0.10404530069488821, + "learning_rate": 1.62879815306895e-05, + "loss": 1.526, + "step": 10151 + }, + { + "epoch": 0.9210669569951007, + "grad_norm": 0.10707990153897336, + "learning_rate": 1.62508061648452e-05, + "loss": 1.5256, + "step": 10152 + }, + { + "epoch": 0.9211576846307385, + "grad_norm": 0.11215059355395957, + "learning_rate": 1.6213672570815073e-05, + "loss": 1.6166, + "step": 10153 + }, + { + "epoch": 0.9212484122663763, + "grad_norm": 0.1102004264148504, + "learning_rate": 1.617658075180567e-05, + "loss": 1.5649, + "step": 10154 + }, + { + "epoch": 0.9213391399020141, + "grad_norm": 0.10363550609463704, + "learning_rate": 1.613953071101987e-05, + "loss": 1.5141, + "step": 10155 + }, + { + "epoch": 0.921429867537652, + "grad_norm": 0.10625699212948002, + "learning_rate": 1.610252245165683e-05, + "loss": 1.4938, + "step": 10156 + }, + { + "epoch": 0.9215205951732898, + "grad_norm": 0.10760000563429546, + "learning_rate": 1.606555597691234e-05, + "loss": 1.5352, + "step": 10157 + }, + { + "epoch": 0.9216113228089275, + "grad_norm": 0.10787796771067304, + "learning_rate": 1.602863128997839e-05, + "loss": 1.5367, + "step": 10158 + }, + { + "epoch": 0.9217020504445654, + "grad_norm": 0.1066420256005593, + "learning_rate": 1.5991748394043436e-05, + "loss": 1.5139, + "step": 10159 + }, + { + "epoch": 0.9217927780802032, + "grad_norm": 0.10471939391793764, + "learning_rate": 1.595490729229243e-05, + "loss": 1.5648, + "step": 10160 + }, + { + "epoch": 0.9218835057158411, + "grad_norm": 0.10450618522335788, + "learning_rate": 1.5918107987906437e-05, + "loss": 1.5425, + "step": 10161 + }, + { + "epoch": 0.9219742333514789, + "grad_norm": 0.10820919643350975, + "learning_rate": 1.5881350484063195e-05, + "loss": 1.5668, + "step": 10162 + }, + { + "epoch": 0.9220649609871167, + "grad_norm": 0.10997600200758835, + "learning_rate": 1.5844634783936664e-05, + "loss": 1.5445, + "step": 10163 + }, + { + "epoch": 0.9221556886227545, + "grad_norm": 0.10267654229192842, + "learning_rate": 1.580796089069736e-05, + "loss": 1.5119, + "step": 10164 + }, + { + "epoch": 0.9222464162583923, + "grad_norm": 0.10758181922251386, + "learning_rate": 1.577132880751192e-05, + "loss": 1.5737, + "step": 10165 + }, + { + "epoch": 0.9223371438940301, + "grad_norm": 0.10934257480193534, + "learning_rate": 1.5734738537543593e-05, + "loss": 1.5334, + "step": 10166 + }, + { + "epoch": 0.922427871529668, + "grad_norm": 0.10940455483778255, + "learning_rate": 1.569819008395207e-05, + "loss": 1.5463, + "step": 10167 + }, + { + "epoch": 0.9225185991653058, + "grad_norm": 0.10648764422283265, + "learning_rate": 1.566168344989316e-05, + "loss": 1.5238, + "step": 10168 + }, + { + "epoch": 0.9226093268009435, + "grad_norm": 0.10560222279238664, + "learning_rate": 1.562521863851918e-05, + "loss": 1.5163, + "step": 10169 + }, + { + "epoch": 0.9227000544365814, + "grad_norm": 0.11074490362540458, + "learning_rate": 1.5588795652978992e-05, + "loss": 1.5474, + "step": 10170 + }, + { + "epoch": 0.9227907820722192, + "grad_norm": 0.10421489777497053, + "learning_rate": 1.5552414496417755e-05, + "loss": 1.5298, + "step": 10171 + }, + { + "epoch": 0.922881509707857, + "grad_norm": 0.10487376316249102, + "learning_rate": 1.5516075171976952e-05, + "loss": 1.4871, + "step": 10172 + }, + { + "epoch": 0.9229722373434949, + "grad_norm": 0.10712636707746397, + "learning_rate": 1.5479777682794405e-05, + "loss": 1.4889, + "step": 10173 + }, + { + "epoch": 0.9230629649791326, + "grad_norm": 0.10617575676776143, + "learning_rate": 1.54435220320045e-05, + "loss": 1.5417, + "step": 10174 + }, + { + "epoch": 0.9231536926147704, + "grad_norm": 0.10932695968916958, + "learning_rate": 1.5407308222737836e-05, + "loss": 1.5698, + "step": 10175 + }, + { + "epoch": 0.9232444202504083, + "grad_norm": 0.10726268476305868, + "learning_rate": 1.537113625812153e-05, + "loss": 1.5507, + "step": 10176 + }, + { + "epoch": 0.9233351478860461, + "grad_norm": 0.10959356352950046, + "learning_rate": 1.533500614127903e-05, + "loss": 1.5617, + "step": 10177 + }, + { + "epoch": 0.9234258755216839, + "grad_norm": 0.10638066045725823, + "learning_rate": 1.529891787533022e-05, + "loss": 1.5169, + "step": 10178 + }, + { + "epoch": 0.9235166031573218, + "grad_norm": 0.11019597014207388, + "learning_rate": 1.5262871463391336e-05, + "loss": 1.5471, + "step": 10179 + }, + { + "epoch": 0.9236073307929595, + "grad_norm": 0.10475433039641642, + "learning_rate": 1.5226866908574833e-05, + "loss": 1.5109, + "step": 10180 + }, + { + "epoch": 0.9236980584285973, + "grad_norm": 0.10729204794839112, + "learning_rate": 1.5190904213989887e-05, + "loss": 1.5654, + "step": 10181 + }, + { + "epoch": 0.9237887860642352, + "grad_norm": 0.1092762491520433, + "learning_rate": 1.5154983382741849e-05, + "loss": 1.5366, + "step": 10182 + }, + { + "epoch": 0.923879513699873, + "grad_norm": 0.10610486255855353, + "learning_rate": 1.511910441793235e-05, + "loss": 1.5482, + "step": 10183 + }, + { + "epoch": 0.9239702413355108, + "grad_norm": 0.10672267791595416, + "learning_rate": 1.5083267322659688e-05, + "loss": 1.5285, + "step": 10184 + }, + { + "epoch": 0.9240609689711486, + "grad_norm": 0.10496891958227171, + "learning_rate": 1.5047472100018389e-05, + "loss": 1.568, + "step": 10185 + }, + { + "epoch": 0.9241516966067864, + "grad_norm": 0.10706940923863244, + "learning_rate": 1.5011718753099368e-05, + "loss": 1.5309, + "step": 10186 + }, + { + "epoch": 0.9242424242424242, + "grad_norm": 0.10569875728142165, + "learning_rate": 1.4976007284989879e-05, + "loss": 1.5124, + "step": 10187 + }, + { + "epoch": 0.9243331518780621, + "grad_norm": 0.1063165744513232, + "learning_rate": 1.4940337698773677e-05, + "loss": 1.4917, + "step": 10188 + }, + { + "epoch": 0.9244238795136999, + "grad_norm": 0.10615553859375287, + "learning_rate": 1.4904709997530796e-05, + "loss": 1.5511, + "step": 10189 + }, + { + "epoch": 0.9245146071493376, + "grad_norm": 0.1115295151507142, + "learning_rate": 1.4869124184337668e-05, + "loss": 1.5056, + "step": 10190 + }, + { + "epoch": 0.9246053347849755, + "grad_norm": 0.1052525670241665, + "learning_rate": 1.4833580262267221e-05, + "loss": 1.5692, + "step": 10191 + }, + { + "epoch": 0.9246960624206133, + "grad_norm": 0.10683347023856848, + "learning_rate": 1.4798078234388556e-05, + "loss": 1.5204, + "step": 10192 + }, + { + "epoch": 0.9247867900562511, + "grad_norm": 0.10701951128005269, + "learning_rate": 1.4762618103767388e-05, + "loss": 1.5444, + "step": 10193 + }, + { + "epoch": 0.924877517691889, + "grad_norm": 0.11135043796995939, + "learning_rate": 1.4727199873465602e-05, + "loss": 1.5282, + "step": 10194 + }, + { + "epoch": 0.9249682453275268, + "grad_norm": 0.10900239348324064, + "learning_rate": 1.4691823546541694e-05, + "loss": 1.5009, + "step": 10195 + }, + { + "epoch": 0.9250589729631645, + "grad_norm": 0.1093995352761024, + "learning_rate": 1.4656489126050387e-05, + "loss": 1.5122, + "step": 10196 + }, + { + "epoch": 0.9251497005988024, + "grad_norm": 0.1008411250350193, + "learning_rate": 1.4621196615042687e-05, + "loss": 1.5393, + "step": 10197 + }, + { + "epoch": 0.9252404282344402, + "grad_norm": 0.10288980459966808, + "learning_rate": 1.4585946016566265e-05, + "loss": 1.5374, + "step": 10198 + }, + { + "epoch": 0.925331155870078, + "grad_norm": 0.10656586286627147, + "learning_rate": 1.4550737333664966e-05, + "loss": 1.5001, + "step": 10199 + }, + { + "epoch": 0.9254218835057159, + "grad_norm": 0.1063636743550202, + "learning_rate": 1.4515570569378966e-05, + "loss": 1.5285, + "step": 10200 + }, + { + "epoch": 0.9255126111413536, + "grad_norm": 0.11224968918442041, + "learning_rate": 1.4480445726745007e-05, + "loss": 1.5423, + "step": 10201 + }, + { + "epoch": 0.9256033387769915, + "grad_norm": 0.10947733978510049, + "learning_rate": 1.4445362808796159e-05, + "loss": 1.5273, + "step": 10202 + }, + { + "epoch": 0.9256940664126293, + "grad_norm": 0.10868491479784716, + "learning_rate": 1.4410321818561779e-05, + "loss": 1.5249, + "step": 10203 + }, + { + "epoch": 0.9257847940482671, + "grad_norm": 0.10666261386532967, + "learning_rate": 1.4375322759067666e-05, + "loss": 1.5733, + "step": 10204 + }, + { + "epoch": 0.925875521683905, + "grad_norm": 0.10650210104793317, + "learning_rate": 1.4340365633336072e-05, + "loss": 1.5157, + "step": 10205 + }, + { + "epoch": 0.9259662493195427, + "grad_norm": 0.1040283675407322, + "learning_rate": 1.4305450444385414e-05, + "loss": 1.5246, + "step": 10206 + }, + { + "epoch": 0.9260569769551805, + "grad_norm": 0.10936708590039504, + "learning_rate": 1.4270577195230727e-05, + "loss": 1.5631, + "step": 10207 + }, + { + "epoch": 0.9261477045908184, + "grad_norm": 0.10402772767295133, + "learning_rate": 1.4235745888883212e-05, + "loss": 1.5477, + "step": 10208 + }, + { + "epoch": 0.9262384322264562, + "grad_norm": 0.10630484167180328, + "learning_rate": 1.420095652835074e-05, + "loss": 1.5308, + "step": 10209 + }, + { + "epoch": 0.926329159862094, + "grad_norm": 0.10923427494737452, + "learning_rate": 1.4166209116637241e-05, + "loss": 1.5241, + "step": 10210 + }, + { + "epoch": 0.9264198874977319, + "grad_norm": 0.1100527595110928, + "learning_rate": 1.413150365674315e-05, + "loss": 1.5334, + "step": 10211 + }, + { + "epoch": 0.9265106151333696, + "grad_norm": 0.10489191343148366, + "learning_rate": 1.4096840151665403e-05, + "loss": 1.5736, + "step": 10212 + }, + { + "epoch": 0.9266013427690074, + "grad_norm": 0.10675531604840531, + "learning_rate": 1.4062218604397047e-05, + "loss": 1.5208, + "step": 10213 + }, + { + "epoch": 0.9266920704046453, + "grad_norm": 0.10675205024397648, + "learning_rate": 1.4027639017927752e-05, + "loss": 1.4952, + "step": 10214 + }, + { + "epoch": 0.9267827980402831, + "grad_norm": 0.10599006666278642, + "learning_rate": 1.3993101395243457e-05, + "loss": 1.5351, + "step": 10215 + }, + { + "epoch": 0.9268735256759209, + "grad_norm": 0.1064145622228451, + "learning_rate": 1.3958605739326502e-05, + "loss": 1.578, + "step": 10216 + }, + { + "epoch": 0.9269642533115587, + "grad_norm": 0.10606944358076559, + "learning_rate": 1.392415205315556e-05, + "loss": 1.5534, + "step": 10217 + }, + { + "epoch": 0.9270549809471965, + "grad_norm": 0.10858766813488953, + "learning_rate": 1.3889740339705748e-05, + "loss": 1.5285, + "step": 10218 + }, + { + "epoch": 0.9271457085828343, + "grad_norm": 0.10798829588732509, + "learning_rate": 1.3855370601948525e-05, + "loss": 1.4962, + "step": 10219 + }, + { + "epoch": 0.9272364362184722, + "grad_norm": 0.11136581820614266, + "learning_rate": 1.3821042842851627e-05, + "loss": 1.5639, + "step": 10220 + }, + { + "epoch": 0.92732716385411, + "grad_norm": 0.1071467484662977, + "learning_rate": 1.3786757065379408e-05, + "loss": 1.4833, + "step": 10221 + }, + { + "epoch": 0.9274178914897477, + "grad_norm": 0.10648903293210597, + "learning_rate": 1.3752513272492328e-05, + "loss": 1.5413, + "step": 10222 + }, + { + "epoch": 0.9275086191253856, + "grad_norm": 0.108634849512497, + "learning_rate": 1.3718311467147416e-05, + "loss": 1.5283, + "step": 10223 + }, + { + "epoch": 0.9275993467610234, + "grad_norm": 0.1089031013470127, + "learning_rate": 1.3684151652297916e-05, + "loss": 1.5443, + "step": 10224 + }, + { + "epoch": 0.9276900743966612, + "grad_norm": 0.10410142756175167, + "learning_rate": 1.3650033830893693e-05, + "loss": 1.5021, + "step": 10225 + }, + { + "epoch": 0.9277808020322991, + "grad_norm": 0.10326023993553067, + "learning_rate": 1.3615958005880724e-05, + "loss": 1.529, + "step": 10226 + }, + { + "epoch": 0.9278715296679368, + "grad_norm": 0.10839553365146849, + "learning_rate": 1.3581924180201377e-05, + "loss": 1.5389, + "step": 10227 + }, + { + "epoch": 0.9279622573035746, + "grad_norm": 0.10557136648564582, + "learning_rate": 1.3547932356794635e-05, + "loss": 1.5336, + "step": 10228 + }, + { + "epoch": 0.9280529849392125, + "grad_norm": 0.11020413669275428, + "learning_rate": 1.3513982538595648e-05, + "loss": 1.5235, + "step": 10229 + }, + { + "epoch": 0.9281437125748503, + "grad_norm": 0.10603752911989828, + "learning_rate": 1.3480074728535906e-05, + "loss": 1.5464, + "step": 10230 + }, + { + "epoch": 0.9282344402104881, + "grad_norm": 0.10813248448658488, + "learning_rate": 1.3446208929543403e-05, + "loss": 1.5394, + "step": 10231 + }, + { + "epoch": 0.928325167846126, + "grad_norm": 0.10531879378320622, + "learning_rate": 1.3412385144542572e-05, + "loss": 1.5237, + "step": 10232 + }, + { + "epoch": 0.9284158954817637, + "grad_norm": 0.1068407473179843, + "learning_rate": 1.3378603376453969e-05, + "loss": 1.5579, + "step": 10233 + }, + { + "epoch": 0.9285066231174015, + "grad_norm": 0.10626482134141278, + "learning_rate": 1.3344863628194593e-05, + "loss": 1.4887, + "step": 10234 + }, + { + "epoch": 0.9285973507530394, + "grad_norm": 0.1081560469595028, + "learning_rate": 1.3311165902678057e-05, + "loss": 1.5353, + "step": 10235 + }, + { + "epoch": 0.9286880783886772, + "grad_norm": 0.10904719948402955, + "learning_rate": 1.3277510202814091e-05, + "loss": 1.5526, + "step": 10236 + }, + { + "epoch": 0.928778806024315, + "grad_norm": 0.11361163968718521, + "learning_rate": 1.3243896531508815e-05, + "loss": 1.5439, + "step": 10237 + }, + { + "epoch": 0.9288695336599528, + "grad_norm": 0.10965092049183628, + "learning_rate": 1.3210324891664794e-05, + "loss": 1.5895, + "step": 10238 + }, + { + "epoch": 0.9289602612955906, + "grad_norm": 0.10699877717660752, + "learning_rate": 1.3176795286180986e-05, + "loss": 1.4723, + "step": 10239 + }, + { + "epoch": 0.9290509889312285, + "grad_norm": 0.10536053280039404, + "learning_rate": 1.3143307717952691e-05, + "loss": 1.533, + "step": 10240 + }, + { + "epoch": 0.9291417165668663, + "grad_norm": 0.10670049717539348, + "learning_rate": 1.3109862189871425e-05, + "loss": 1.476, + "step": 10241 + }, + { + "epoch": 0.9292324442025041, + "grad_norm": 0.11168786898484748, + "learning_rate": 1.3076458704825434e-05, + "loss": 1.5512, + "step": 10242 + }, + { + "epoch": 0.929323171838142, + "grad_norm": 0.10357563617315425, + "learning_rate": 1.3043097265698966e-05, + "loss": 1.4977, + "step": 10243 + }, + { + "epoch": 0.9294138994737797, + "grad_norm": 0.1141737241875297, + "learning_rate": 1.3009777875372775e-05, + "loss": 1.4941, + "step": 10244 + }, + { + "epoch": 0.9295046271094175, + "grad_norm": 0.1085951770545893, + "learning_rate": 1.2976500536724111e-05, + "loss": 1.5181, + "step": 10245 + }, + { + "epoch": 0.9295953547450554, + "grad_norm": 0.1115847460550308, + "learning_rate": 1.2943265252626402e-05, + "loss": 1.5273, + "step": 10246 + }, + { + "epoch": 0.9296860823806932, + "grad_norm": 0.10595267972222308, + "learning_rate": 1.291007202594946e-05, + "loss": 1.5482, + "step": 10247 + }, + { + "epoch": 0.929776810016331, + "grad_norm": 0.10912036857156883, + "learning_rate": 1.2876920859559548e-05, + "loss": 1.5248, + "step": 10248 + }, + { + "epoch": 0.9298675376519688, + "grad_norm": 0.10387116375154444, + "learning_rate": 1.2843811756319434e-05, + "loss": 1.5512, + "step": 10249 + }, + { + "epoch": 0.9299582652876066, + "grad_norm": 0.10583370501996807, + "learning_rate": 1.281074471908794e-05, + "loss": 1.571, + "step": 10250 + }, + { + "epoch": 0.9300489929232444, + "grad_norm": 0.10734802096692725, + "learning_rate": 1.2777719750720395e-05, + "loss": 1.5002, + "step": 10251 + }, + { + "epoch": 0.9301397205588823, + "grad_norm": 0.10955047846715654, + "learning_rate": 1.2744736854068629e-05, + "loss": 1.5111, + "step": 10252 + }, + { + "epoch": 0.93023044819452, + "grad_norm": 0.11043031541125657, + "learning_rate": 1.2711796031980583e-05, + "loss": 1.5824, + "step": 10253 + }, + { + "epoch": 0.9303211758301578, + "grad_norm": 0.11266704047833595, + "learning_rate": 1.2678897287300762e-05, + "loss": 1.5498, + "step": 10254 + }, + { + "epoch": 0.9304119034657957, + "grad_norm": 0.10441160708396244, + "learning_rate": 1.2646040622870003e-05, + "loss": 1.5369, + "step": 10255 + }, + { + "epoch": 0.9305026311014335, + "grad_norm": 0.10587361164746624, + "learning_rate": 1.2613226041525539e-05, + "loss": 1.5178, + "step": 10256 + }, + { + "epoch": 0.9305933587370713, + "grad_norm": 0.10982787436757996, + "learning_rate": 1.2580453546100768e-05, + "loss": 1.5755, + "step": 10257 + }, + { + "epoch": 0.9306840863727092, + "grad_norm": 0.11213924930027348, + "learning_rate": 1.2547723139425648e-05, + "loss": 1.5689, + "step": 10258 + }, + { + "epoch": 0.9307748140083469, + "grad_norm": 0.10471621868497893, + "learning_rate": 1.2515034824326533e-05, + "loss": 1.5023, + "step": 10259 + }, + { + "epoch": 0.9308655416439847, + "grad_norm": 0.10900132784123762, + "learning_rate": 1.248238860362605e-05, + "loss": 1.5232, + "step": 10260 + }, + { + "epoch": 0.9309562692796226, + "grad_norm": 0.10837989501483451, + "learning_rate": 1.2449784480143056e-05, + "loss": 1.5369, + "step": 10261 + }, + { + "epoch": 0.9310469969152604, + "grad_norm": 0.10630530420488057, + "learning_rate": 1.2417222456693023e-05, + "loss": 1.5122, + "step": 10262 + }, + { + "epoch": 0.9311377245508982, + "grad_norm": 0.10551826139227395, + "learning_rate": 1.238470253608781e-05, + "loss": 1.5268, + "step": 10263 + }, + { + "epoch": 0.931228452186536, + "grad_norm": 0.11138127922667401, + "learning_rate": 1.2352224721135341e-05, + "loss": 1.5407, + "step": 10264 + }, + { + "epoch": 0.9313191798221738, + "grad_norm": 0.10565760847423847, + "learning_rate": 1.2319789014640092e-05, + "loss": 1.53, + "step": 10265 + }, + { + "epoch": 0.9314099074578116, + "grad_norm": 0.10745544857173239, + "learning_rate": 1.2287395419402992e-05, + "loss": 1.5087, + "step": 10266 + }, + { + "epoch": 0.9315006350934495, + "grad_norm": 0.10859857534926991, + "learning_rate": 1.2255043938221244e-05, + "loss": 1.5265, + "step": 10267 + }, + { + "epoch": 0.9315913627290873, + "grad_norm": 0.10729989833512925, + "learning_rate": 1.2222734573888227e-05, + "loss": 1.5216, + "step": 10268 + }, + { + "epoch": 0.931682090364725, + "grad_norm": 0.11093073385490364, + "learning_rate": 1.2190467329193987e-05, + "loss": 1.5389, + "step": 10269 + }, + { + "epoch": 0.9317728180003629, + "grad_norm": 0.10922576509534065, + "learning_rate": 1.2158242206924797e-05, + "loss": 1.5092, + "step": 10270 + }, + { + "epoch": 0.9318635456360007, + "grad_norm": 0.11114284319051566, + "learning_rate": 1.2126059209863316e-05, + "loss": 1.5238, + "step": 10271 + }, + { + "epoch": 0.9319542732716385, + "grad_norm": 0.10549573529740548, + "learning_rate": 1.209391834078849e-05, + "loss": 1.5455, + "step": 10272 + }, + { + "epoch": 0.9320450009072764, + "grad_norm": 0.10863643823688017, + "learning_rate": 1.2061819602475766e-05, + "loss": 1.5166, + "step": 10273 + }, + { + "epoch": 0.9321357285429142, + "grad_norm": 0.10621634057184047, + "learning_rate": 1.202976299769687e-05, + "loss": 1.5563, + "step": 10274 + }, + { + "epoch": 0.9322264561785519, + "grad_norm": 0.1071131717170603, + "learning_rate": 1.1997748529219755e-05, + "loss": 1.542, + "step": 10275 + }, + { + "epoch": 0.9323171838141898, + "grad_norm": 0.10783354214819255, + "learning_rate": 1.1965776199809042e-05, + "loss": 1.5233, + "step": 10276 + }, + { + "epoch": 0.9324079114498276, + "grad_norm": 0.10562110029770885, + "learning_rate": 1.1933846012225525e-05, + "loss": 1.49, + "step": 10277 + }, + { + "epoch": 0.9324986390854655, + "grad_norm": 0.10943744637323469, + "learning_rate": 1.1901957969226274e-05, + "loss": 1.5702, + "step": 10278 + }, + { + "epoch": 0.9325893667211033, + "grad_norm": 0.10692939831934686, + "learning_rate": 1.1870112073564865e-05, + "loss": 1.563, + "step": 10279 + }, + { + "epoch": 0.932680094356741, + "grad_norm": 0.112033469998503, + "learning_rate": 1.1838308327991266e-05, + "loss": 1.5065, + "step": 10280 + }, + { + "epoch": 0.9327708219923789, + "grad_norm": 0.10684500312070863, + "learning_rate": 1.1806546735251722e-05, + "loss": 1.5461, + "step": 10281 + }, + { + "epoch": 0.9328615496280167, + "grad_norm": 0.10694095702142208, + "learning_rate": 1.1774827298088763e-05, + "loss": 1.5804, + "step": 10282 + }, + { + "epoch": 0.9329522772636545, + "grad_norm": 0.10403530426511334, + "learning_rate": 1.1743150019241422e-05, + "loss": 1.549, + "step": 10283 + }, + { + "epoch": 0.9330430048992924, + "grad_norm": 0.10483866849892419, + "learning_rate": 1.171151490144512e-05, + "loss": 1.4833, + "step": 10284 + }, + { + "epoch": 0.9331337325349301, + "grad_norm": 0.10572807254547624, + "learning_rate": 1.1679921947431338e-05, + "loss": 1.5725, + "step": 10285 + }, + { + "epoch": 0.9332244601705679, + "grad_norm": 0.10804937957689106, + "learning_rate": 1.1648371159928339e-05, + "loss": 1.4871, + "step": 10286 + }, + { + "epoch": 0.9333151878062058, + "grad_norm": 0.11259532612177904, + "learning_rate": 1.16168625416605e-05, + "loss": 1.4801, + "step": 10287 + }, + { + "epoch": 0.9334059154418436, + "grad_norm": 0.10665356888914888, + "learning_rate": 1.1585396095348478e-05, + "loss": 1.5477, + "step": 10288 + }, + { + "epoch": 0.9334966430774814, + "grad_norm": 0.10915505581193748, + "learning_rate": 1.1553971823709542e-05, + "loss": 1.5252, + "step": 10289 + }, + { + "epoch": 0.9335873707131193, + "grad_norm": 0.11020970558341862, + "learning_rate": 1.1522589729457188e-05, + "loss": 1.5523, + "step": 10290 + }, + { + "epoch": 0.933678098348757, + "grad_norm": 0.10830650889159958, + "learning_rate": 1.1491249815301142e-05, + "loss": 1.5241, + "step": 10291 + }, + { + "epoch": 0.9337688259843948, + "grad_norm": 0.10668451909896107, + "learning_rate": 1.1459952083947622e-05, + "loss": 1.5376, + "step": 10292 + }, + { + "epoch": 0.9338595536200327, + "grad_norm": 0.10443810332788793, + "learning_rate": 1.1428696538099303e-05, + "loss": 1.5517, + "step": 10293 + }, + { + "epoch": 0.9339502812556705, + "grad_norm": 0.10630588098858341, + "learning_rate": 1.1397483180455025e-05, + "loss": 1.5069, + "step": 10294 + }, + { + "epoch": 0.9340410088913083, + "grad_norm": 0.10508393489027126, + "learning_rate": 1.1366312013710078e-05, + "loss": 1.5711, + "step": 10295 + }, + { + "epoch": 0.9341317365269461, + "grad_norm": 0.11095737253514835, + "learning_rate": 1.133518304055614e-05, + "loss": 1.5589, + "step": 10296 + }, + { + "epoch": 0.9342224641625839, + "grad_norm": 0.10687585917232452, + "learning_rate": 1.1304096263681173e-05, + "loss": 1.5689, + "step": 10297 + }, + { + "epoch": 0.9343131917982217, + "grad_norm": 0.1099540400648764, + "learning_rate": 1.1273051685769475e-05, + "loss": 1.4742, + "step": 10298 + }, + { + "epoch": 0.9344039194338596, + "grad_norm": 0.1095363736160611, + "learning_rate": 1.1242049309501845e-05, + "loss": 1.5508, + "step": 10299 + }, + { + "epoch": 0.9344946470694974, + "grad_norm": 0.10843997462405279, + "learning_rate": 1.1211089137555308e-05, + "loss": 1.5189, + "step": 10300 + }, + { + "epoch": 0.9345853747051351, + "grad_norm": 0.10745555635409476, + "learning_rate": 1.118017117260317e-05, + "loss": 1.5304, + "step": 10301 + }, + { + "epoch": 0.934676102340773, + "grad_norm": 0.10588699064473589, + "learning_rate": 1.1149295417315352e-05, + "loss": 1.5157, + "step": 10302 + }, + { + "epoch": 0.9347668299764108, + "grad_norm": 0.1087277300743989, + "learning_rate": 1.1118461874357944e-05, + "loss": 1.5142, + "step": 10303 + }, + { + "epoch": 0.9348575576120486, + "grad_norm": 0.10681618149599882, + "learning_rate": 1.1087670546393424e-05, + "loss": 1.488, + "step": 10304 + }, + { + "epoch": 0.9349482852476865, + "grad_norm": 0.109070309365971, + "learning_rate": 1.1056921436080503e-05, + "loss": 1.5307, + "step": 10305 + }, + { + "epoch": 0.9350390128833242, + "grad_norm": 0.10764802038487271, + "learning_rate": 1.1026214546074609e-05, + "loss": 1.5309, + "step": 10306 + }, + { + "epoch": 0.935129740518962, + "grad_norm": 0.1061062887395856, + "learning_rate": 1.0995549879027066e-05, + "loss": 1.5569, + "step": 10307 + }, + { + "epoch": 0.9352204681545999, + "grad_norm": 0.10442238361364325, + "learning_rate": 1.096492743758587e-05, + "loss": 1.5043, + "step": 10308 + }, + { + "epoch": 0.9353111957902377, + "grad_norm": 0.1102223085100724, + "learning_rate": 1.093434722439529e-05, + "loss": 1.554, + "step": 10309 + }, + { + "epoch": 0.9354019234258755, + "grad_norm": 0.10569245924762638, + "learning_rate": 1.0903809242095942e-05, + "loss": 1.4935, + "step": 10310 + }, + { + "epoch": 0.9354926510615134, + "grad_norm": 0.10759180706845572, + "learning_rate": 1.087331349332471e-05, + "loss": 1.5144, + "step": 10311 + }, + { + "epoch": 0.9355833786971511, + "grad_norm": 0.10651706649371696, + "learning_rate": 1.0842859980714937e-05, + "loss": 1.541, + "step": 10312 + }, + { + "epoch": 0.9356741063327889, + "grad_norm": 0.10218493755512013, + "learning_rate": 1.081244870689635e-05, + "loss": 1.5395, + "step": 10313 + }, + { + "epoch": 0.9357648339684268, + "grad_norm": 0.10383510526517806, + "learning_rate": 1.0782079674494905e-05, + "loss": 1.4809, + "step": 10314 + }, + { + "epoch": 0.9358555616040646, + "grad_norm": 0.10617336735659114, + "learning_rate": 1.0751752886132892e-05, + "loss": 1.5293, + "step": 10315 + }, + { + "epoch": 0.9359462892397025, + "grad_norm": 0.10732600663301067, + "learning_rate": 1.0721468344429219e-05, + "loss": 1.4996, + "step": 10316 + }, + { + "epoch": 0.9360370168753402, + "grad_norm": 0.10607865236541142, + "learning_rate": 1.0691226051998847e-05, + "loss": 1.5428, + "step": 10317 + }, + { + "epoch": 0.936127744510978, + "grad_norm": 0.10729849372584795, + "learning_rate": 1.0661026011453245e-05, + "loss": 1.5321, + "step": 10318 + }, + { + "epoch": 0.9362184721466159, + "grad_norm": 0.10477009679340707, + "learning_rate": 1.0630868225400103e-05, + "loss": 1.4911, + "step": 10319 + }, + { + "epoch": 0.9363091997822537, + "grad_norm": 0.11148786010872007, + "learning_rate": 1.0600752696443671e-05, + "loss": 1.5128, + "step": 10320 + }, + { + "epoch": 0.9363999274178915, + "grad_norm": 0.10659198735497676, + "learning_rate": 1.0570679427184371e-05, + "loss": 1.5707, + "step": 10321 + }, + { + "epoch": 0.9364906550535294, + "grad_norm": 0.14113033358214083, + "learning_rate": 1.0540648420218957e-05, + "loss": 1.5967, + "step": 10322 + }, + { + "epoch": 0.9365813826891671, + "grad_norm": 0.10664635682113603, + "learning_rate": 1.0510659678140799e-05, + "loss": 1.5519, + "step": 10323 + }, + { + "epoch": 0.9366721103248049, + "grad_norm": 0.10613812007756449, + "learning_rate": 1.0480713203539271e-05, + "loss": 1.5295, + "step": 10324 + }, + { + "epoch": 0.9367628379604428, + "grad_norm": 0.1080540882787836, + "learning_rate": 1.0450808999000306e-05, + "loss": 1.5349, + "step": 10325 + }, + { + "epoch": 0.9368535655960806, + "grad_norm": 0.10931257235290244, + "learning_rate": 1.0420947067106113e-05, + "loss": 1.5247, + "step": 10326 + }, + { + "epoch": 0.9369442932317184, + "grad_norm": 0.1064121861996391, + "learning_rate": 1.0391127410435353e-05, + "loss": 1.5094, + "step": 10327 + }, + { + "epoch": 0.9370350208673562, + "grad_norm": 0.10776203667711325, + "learning_rate": 1.0361350031562911e-05, + "loss": 1.524, + "step": 10328 + }, + { + "epoch": 0.937125748502994, + "grad_norm": 0.11126883237560282, + "learning_rate": 1.0331614933059952e-05, + "loss": 1.5328, + "step": 10329 + }, + { + "epoch": 0.9372164761386318, + "grad_norm": 0.10575040580024804, + "learning_rate": 1.030192211749431e-05, + "loss": 1.4959, + "step": 10330 + }, + { + "epoch": 0.9373072037742697, + "grad_norm": 0.11026824627005319, + "learning_rate": 1.0272271587429882e-05, + "loss": 1.5253, + "step": 10331 + }, + { + "epoch": 0.9373979314099075, + "grad_norm": 0.10730237245488201, + "learning_rate": 1.0242663345426895e-05, + "loss": 1.527, + "step": 10332 + }, + { + "epoch": 0.9374886590455452, + "grad_norm": 0.10855110977708488, + "learning_rate": 1.0213097394042137e-05, + "loss": 1.5174, + "step": 10333 + }, + { + "epoch": 0.9375793866811831, + "grad_norm": 0.10861003796512034, + "learning_rate": 1.0183573735828622e-05, + "loss": 1.545, + "step": 10334 + }, + { + "epoch": 0.9376701143168209, + "grad_norm": 0.10436797471316511, + "learning_rate": 1.0154092373335755e-05, + "loss": 1.5317, + "step": 10335 + }, + { + "epoch": 0.9377608419524587, + "grad_norm": 0.10910030375917125, + "learning_rate": 1.012465330910911e-05, + "loss": 1.5518, + "step": 10336 + }, + { + "epoch": 0.9378515695880966, + "grad_norm": 0.1083601033493991, + "learning_rate": 1.0095256545690933e-05, + "loss": 1.534, + "step": 10337 + }, + { + "epoch": 0.9379422972237343, + "grad_norm": 0.10645165901079864, + "learning_rate": 1.0065902085619583e-05, + "loss": 1.5341, + "step": 10338 + }, + { + "epoch": 0.9380330248593721, + "grad_norm": 0.10402039135947422, + "learning_rate": 1.0036589931429695e-05, + "loss": 1.5155, + "step": 10339 + }, + { + "epoch": 0.93812375249501, + "grad_norm": 0.10856189948952027, + "learning_rate": 1.0007320085652527e-05, + "loss": 1.5654, + "step": 10340 + }, + { + "epoch": 0.9382144801306478, + "grad_norm": 0.10929884668364279, + "learning_rate": 9.978092550815498e-06, + "loss": 1.5792, + "step": 10341 + }, + { + "epoch": 0.9383052077662856, + "grad_norm": 0.1100136927540084, + "learning_rate": 9.94890732944237e-06, + "loss": 1.5146, + "step": 10342 + }, + { + "epoch": 0.9383959354019235, + "grad_norm": 0.11304085723847158, + "learning_rate": 9.919764424053346e-06, + "loss": 1.5833, + "step": 10343 + }, + { + "epoch": 0.9384866630375612, + "grad_norm": 0.10619511598879665, + "learning_rate": 9.890663837164916e-06, + "loss": 1.5009, + "step": 10344 + }, + { + "epoch": 0.938577390673199, + "grad_norm": 0.1077685503298053, + "learning_rate": 9.861605571289844e-06, + "loss": 1.5819, + "step": 10345 + }, + { + "epoch": 0.9386681183088369, + "grad_norm": 0.1061749061474299, + "learning_rate": 9.832589628937404e-06, + "loss": 1.5248, + "step": 10346 + }, + { + "epoch": 0.9387588459444747, + "grad_norm": 0.10375547049607234, + "learning_rate": 9.803616012613092e-06, + "loss": 1.5439, + "step": 10347 + }, + { + "epoch": 0.9388495735801125, + "grad_norm": 0.1046771112025509, + "learning_rate": 9.774684724818738e-06, + "loss": 1.5005, + "step": 10348 + }, + { + "epoch": 0.9389403012157503, + "grad_norm": 0.10367235699742593, + "learning_rate": 9.745795768052678e-06, + "loss": 1.4977, + "step": 10349 + }, + { + "epoch": 0.9390310288513881, + "grad_norm": 0.10695216065321629, + "learning_rate": 9.716949144809362e-06, + "loss": 1.5406, + "step": 10350 + }, + { + "epoch": 0.9391217564870259, + "grad_norm": 0.10288670069691327, + "learning_rate": 9.688144857579795e-06, + "loss": 1.4735, + "step": 10351 + }, + { + "epoch": 0.9392124841226638, + "grad_norm": 0.10756757588106686, + "learning_rate": 9.659382908851155e-06, + "loss": 1.5334, + "step": 10352 + }, + { + "epoch": 0.9393032117583016, + "grad_norm": 0.10568548139718387, + "learning_rate": 9.630663301107068e-06, + "loss": 1.5043, + "step": 10353 + }, + { + "epoch": 0.9393939393939394, + "grad_norm": 0.10564153280154127, + "learning_rate": 9.601986036827492e-06, + "loss": 1.517, + "step": 10354 + }, + { + "epoch": 0.9394846670295772, + "grad_norm": 0.10706935146212694, + "learning_rate": 9.57335111848867e-06, + "loss": 1.5572, + "step": 10355 + }, + { + "epoch": 0.939575394665215, + "grad_norm": 0.10467868156673166, + "learning_rate": 9.544758548563293e-06, + "loss": 1.5651, + "step": 10356 + }, + { + "epoch": 0.9396661223008529, + "grad_norm": 0.10524110275678358, + "learning_rate": 9.516208329520215e-06, + "loss": 1.5687, + "step": 10357 + }, + { + "epoch": 0.9397568499364907, + "grad_norm": 0.10761223772111843, + "learning_rate": 9.487700463824966e-06, + "loss": 1.5236, + "step": 10358 + }, + { + "epoch": 0.9398475775721284, + "grad_norm": 0.10643781111585386, + "learning_rate": 9.459234953939022e-06, + "loss": 1.479, + "step": 10359 + }, + { + "epoch": 0.9399383052077663, + "grad_norm": 0.10850409378258341, + "learning_rate": 9.430811802320415e-06, + "loss": 1.5625, + "step": 10360 + }, + { + "epoch": 0.9400290328434041, + "grad_norm": 0.10809587112518598, + "learning_rate": 9.402431011423518e-06, + "loss": 1.5214, + "step": 10361 + }, + { + "epoch": 0.9401197604790419, + "grad_norm": 0.10456171205694036, + "learning_rate": 9.374092583699034e-06, + "loss": 1.5252, + "step": 10362 + }, + { + "epoch": 0.9402104881146798, + "grad_norm": 0.10868447700336505, + "learning_rate": 9.345796521593953e-06, + "loss": 1.5659, + "step": 10363 + }, + { + "epoch": 0.9403012157503176, + "grad_norm": 0.10748649097850266, + "learning_rate": 9.31754282755165e-06, + "loss": 1.4813, + "step": 10364 + }, + { + "epoch": 0.9403919433859553, + "grad_norm": 0.1097776595144269, + "learning_rate": 9.289331504011845e-06, + "loss": 1.537, + "step": 10365 + }, + { + "epoch": 0.9404826710215932, + "grad_norm": 0.10544376579386248, + "learning_rate": 9.261162553410585e-06, + "loss": 1.5526, + "step": 10366 + }, + { + "epoch": 0.940573398657231, + "grad_norm": 0.1081161690564556, + "learning_rate": 9.23303597818026e-06, + "loss": 1.5593, + "step": 10367 + }, + { + "epoch": 0.9406641262928688, + "grad_norm": 0.10823790086332429, + "learning_rate": 9.204951780749648e-06, + "loss": 1.5414, + "step": 10368 + }, + { + "epoch": 0.9407548539285067, + "grad_norm": 0.10695692408769134, + "learning_rate": 9.176909963543811e-06, + "loss": 1.5217, + "step": 10369 + }, + { + "epoch": 0.9408455815641444, + "grad_norm": 0.10650761886012332, + "learning_rate": 9.148910528984033e-06, + "loss": 1.478, + "step": 10370 + }, + { + "epoch": 0.9409363091997822, + "grad_norm": 0.1102958722945656, + "learning_rate": 9.120953479488214e-06, + "loss": 1.5366, + "step": 10371 + }, + { + "epoch": 0.9410270368354201, + "grad_norm": 0.10928087069580852, + "learning_rate": 9.093038817470477e-06, + "loss": 1.5011, + "step": 10372 + }, + { + "epoch": 0.9411177644710579, + "grad_norm": 0.10402877371491998, + "learning_rate": 9.065166545341119e-06, + "loss": 1.5101, + "step": 10373 + }, + { + "epoch": 0.9412084921066957, + "grad_norm": 0.10748656124444152, + "learning_rate": 9.03733666550699e-06, + "loss": 1.5124, + "step": 10374 + }, + { + "epoch": 0.9412992197423335, + "grad_norm": 0.10835189485520393, + "learning_rate": 9.00954918037128e-06, + "loss": 1.5667, + "step": 10375 + }, + { + "epoch": 0.9413899473779713, + "grad_norm": 0.11144487277216392, + "learning_rate": 8.981804092333234e-06, + "loss": 1.5124, + "step": 10376 + }, + { + "epoch": 0.9414806750136091, + "grad_norm": 0.1105103313948828, + "learning_rate": 8.954101403788884e-06, + "loss": 1.5149, + "step": 10377 + }, + { + "epoch": 0.941571402649247, + "grad_norm": 0.1139626431241257, + "learning_rate": 8.926441117130201e-06, + "loss": 1.5342, + "step": 10378 + }, + { + "epoch": 0.9416621302848848, + "grad_norm": 0.10809507443428742, + "learning_rate": 8.898823234745723e-06, + "loss": 1.5517, + "step": 10379 + }, + { + "epoch": 0.9417528579205225, + "grad_norm": 0.10520597819123882, + "learning_rate": 8.871247759020206e-06, + "loss": 1.4978, + "step": 10380 + }, + { + "epoch": 0.9418435855561604, + "grad_norm": 0.10597369655694784, + "learning_rate": 8.84371469233486e-06, + "loss": 1.4793, + "step": 10381 + }, + { + "epoch": 0.9419343131917982, + "grad_norm": 0.10874926449698062, + "learning_rate": 8.816224037067112e-06, + "loss": 1.5289, + "step": 10382 + }, + { + "epoch": 0.942025040827436, + "grad_norm": 0.10786534623126753, + "learning_rate": 8.788775795590842e-06, + "loss": 1.534, + "step": 10383 + }, + { + "epoch": 0.9421157684630739, + "grad_norm": 0.10498649366652271, + "learning_rate": 8.761369970276155e-06, + "loss": 1.5607, + "step": 10384 + }, + { + "epoch": 0.9422064960987117, + "grad_norm": 0.10530070515327794, + "learning_rate": 8.734006563489604e-06, + "loss": 1.5383, + "step": 10385 + }, + { + "epoch": 0.9422972237343494, + "grad_norm": 0.10817848138175896, + "learning_rate": 8.706685577593965e-06, + "loss": 1.5178, + "step": 10386 + }, + { + "epoch": 0.9423879513699873, + "grad_norm": 0.10796400829566662, + "learning_rate": 8.679407014948405e-06, + "loss": 1.5029, + "step": 10387 + }, + { + "epoch": 0.9424786790056251, + "grad_norm": 0.10689251171773119, + "learning_rate": 8.652170877908538e-06, + "loss": 1.5835, + "step": 10388 + }, + { + "epoch": 0.9425694066412629, + "grad_norm": 0.1051299050777441, + "learning_rate": 8.624977168826097e-06, + "loss": 1.503, + "step": 10389 + }, + { + "epoch": 0.9426601342769008, + "grad_norm": 0.10951370171817407, + "learning_rate": 8.59782589004926e-06, + "loss": 1.5352, + "step": 10390 + }, + { + "epoch": 0.9427508619125385, + "grad_norm": 0.11151270228623504, + "learning_rate": 8.570717043922648e-06, + "loss": 1.5506, + "step": 10391 + }, + { + "epoch": 0.9428415895481764, + "grad_norm": 0.11229163515962198, + "learning_rate": 8.543650632787058e-06, + "loss": 1.5642, + "step": 10392 + }, + { + "epoch": 0.9429323171838142, + "grad_norm": 0.10654781809914037, + "learning_rate": 8.51662665897962e-06, + "loss": 1.5093, + "step": 10393 + }, + { + "epoch": 0.943023044819452, + "grad_norm": 0.11105686080847892, + "learning_rate": 8.48964512483391e-06, + "loss": 1.5729, + "step": 10394 + }, + { + "epoch": 0.9431137724550899, + "grad_norm": 0.10985345239506279, + "learning_rate": 8.46270603267979e-06, + "loss": 1.5818, + "step": 10395 + }, + { + "epoch": 0.9432045000907276, + "grad_norm": 0.10300626486808503, + "learning_rate": 8.43580938484345e-06, + "loss": 1.5505, + "step": 10396 + }, + { + "epoch": 0.9432952277263654, + "grad_norm": 0.10511896939775811, + "learning_rate": 8.40895518364737e-06, + "loss": 1.5518, + "step": 10397 + }, + { + "epoch": 0.9433859553620033, + "grad_norm": 0.11029817758717332, + "learning_rate": 8.382143431410583e-06, + "loss": 1.5304, + "step": 10398 + }, + { + "epoch": 0.9434766829976411, + "grad_norm": 0.10833071987617615, + "learning_rate": 8.355374130448068e-06, + "loss": 1.5382, + "step": 10399 + }, + { + "epoch": 0.9435674106332789, + "grad_norm": 0.10743902167467156, + "learning_rate": 8.328647283071478e-06, + "loss": 1.5967, + "step": 10400 + }, + { + "epoch": 0.9436581382689168, + "grad_norm": 0.10716804833906528, + "learning_rate": 8.301962891588688e-06, + "loss": 1.5507, + "step": 10401 + }, + { + "epoch": 0.9437488659045545, + "grad_norm": 0.10951044620113645, + "learning_rate": 8.275320958303855e-06, + "loss": 1.4925, + "step": 10402 + }, + { + "epoch": 0.9438395935401923, + "grad_norm": 0.10390797748142694, + "learning_rate": 8.248721485517529e-06, + "loss": 1.4597, + "step": 10403 + }, + { + "epoch": 0.9439303211758302, + "grad_norm": 0.10577575481794879, + "learning_rate": 8.222164475526538e-06, + "loss": 1.5102, + "step": 10404 + }, + { + "epoch": 0.944021048811468, + "grad_norm": 0.10535564258781242, + "learning_rate": 8.195649930624215e-06, + "loss": 1.5404, + "step": 10405 + }, + { + "epoch": 0.9441117764471058, + "grad_norm": 0.10757743707654797, + "learning_rate": 8.169177853099951e-06, + "loss": 1.5335, + "step": 10406 + }, + { + "epoch": 0.9442025040827436, + "grad_norm": 0.10470602428106705, + "learning_rate": 8.142748245239639e-06, + "loss": 1.53, + "step": 10407 + }, + { + "epoch": 0.9442932317183814, + "grad_norm": 0.10741303309263879, + "learning_rate": 8.116361109325566e-06, + "loss": 1.5187, + "step": 10408 + }, + { + "epoch": 0.9443839593540192, + "grad_norm": 0.10861658690855594, + "learning_rate": 8.09001644763624e-06, + "loss": 1.5479, + "step": 10409 + }, + { + "epoch": 0.9444746869896571, + "grad_norm": 0.10605688240997789, + "learning_rate": 8.063714262446398e-06, + "loss": 1.5263, + "step": 10410 + }, + { + "epoch": 0.9445654146252949, + "grad_norm": 0.10725170716994852, + "learning_rate": 8.037454556027334e-06, + "loss": 1.5406, + "step": 10411 + }, + { + "epoch": 0.9446561422609326, + "grad_norm": 0.10439252434973308, + "learning_rate": 8.011237330646626e-06, + "loss": 1.5305, + "step": 10412 + }, + { + "epoch": 0.9447468698965705, + "grad_norm": 0.10652548380896862, + "learning_rate": 7.985062588568126e-06, + "loss": 1.4867, + "step": 10413 + }, + { + "epoch": 0.9448375975322083, + "grad_norm": 0.1074239445885956, + "learning_rate": 7.958930332051918e-06, + "loss": 1.5442, + "step": 10414 + }, + { + "epoch": 0.9449283251678461, + "grad_norm": 0.10996628051502477, + "learning_rate": 7.932840563354582e-06, + "loss": 1.5214, + "step": 10415 + }, + { + "epoch": 0.945019052803484, + "grad_norm": 0.10552637948628713, + "learning_rate": 7.906793284729042e-06, + "loss": 1.527, + "step": 10416 + }, + { + "epoch": 0.9451097804391217, + "grad_norm": 0.10340941838251576, + "learning_rate": 7.880788498424384e-06, + "loss": 1.5189, + "step": 10417 + }, + { + "epoch": 0.9452005080747595, + "grad_norm": 0.10650009598467783, + "learning_rate": 7.854826206686206e-06, + "loss": 1.5274, + "step": 10418 + }, + { + "epoch": 0.9452912357103974, + "grad_norm": 0.10830842368872957, + "learning_rate": 7.828906411756321e-06, + "loss": 1.5552, + "step": 10419 + }, + { + "epoch": 0.9453819633460352, + "grad_norm": 0.10745327194334142, + "learning_rate": 7.803029115872883e-06, + "loss": 1.4858, + "step": 10420 + }, + { + "epoch": 0.945472690981673, + "grad_norm": 0.11326470173720454, + "learning_rate": 7.777194321270441e-06, + "loss": 1.5375, + "step": 10421 + }, + { + "epoch": 0.9455634186173109, + "grad_norm": 0.10934410655308327, + "learning_rate": 7.751402030179822e-06, + "loss": 1.5141, + "step": 10422 + }, + { + "epoch": 0.9456541462529486, + "grad_norm": 0.10853424649074227, + "learning_rate": 7.725652244828185e-06, + "loss": 1.4951, + "step": 10423 + }, + { + "epoch": 0.9457448738885864, + "grad_norm": 0.10466056198476065, + "learning_rate": 7.699944967438976e-06, + "loss": 1.5112, + "step": 10424 + }, + { + "epoch": 0.9458356015242243, + "grad_norm": 0.11039418903562964, + "learning_rate": 7.674280200232142e-06, + "loss": 1.5628, + "step": 10425 + }, + { + "epoch": 0.9459263291598621, + "grad_norm": 0.11223551598191749, + "learning_rate": 7.648657945423743e-06, + "loss": 1.5119, + "step": 10426 + }, + { + "epoch": 0.9460170567954999, + "grad_norm": 0.10585622663885298, + "learning_rate": 7.623078205226286e-06, + "loss": 1.4794, + "step": 10427 + }, + { + "epoch": 0.9461077844311377, + "grad_norm": 0.10750697288228374, + "learning_rate": 7.597540981848616e-06, + "loss": 1.5673, + "step": 10428 + }, + { + "epoch": 0.9461985120667755, + "grad_norm": 0.10700318713480732, + "learning_rate": 7.572046277495859e-06, + "loss": 1.5433, + "step": 10429 + }, + { + "epoch": 0.9462892397024134, + "grad_norm": 0.10395203595384712, + "learning_rate": 7.546594094369475e-06, + "loss": 1.5195, + "step": 10430 + }, + { + "epoch": 0.9463799673380512, + "grad_norm": 0.10985307394667047, + "learning_rate": 7.5211844346672605e-06, + "loss": 1.5403, + "step": 10431 + }, + { + "epoch": 0.946470694973689, + "grad_norm": 0.1099732327204903, + "learning_rate": 7.4958173005833515e-06, + "loss": 1.5271, + "step": 10432 + }, + { + "epoch": 0.9465614226093269, + "grad_norm": 0.10679563492371301, + "learning_rate": 7.470492694308273e-06, + "loss": 1.5296, + "step": 10433 + }, + { + "epoch": 0.9466521502449646, + "grad_norm": 0.11010019871562747, + "learning_rate": 7.445210618028664e-06, + "loss": 1.525, + "step": 10434 + }, + { + "epoch": 0.9467428778806024, + "grad_norm": 0.10479461258628642, + "learning_rate": 7.419971073927667e-06, + "loss": 1.5454, + "step": 10435 + }, + { + "epoch": 0.9468336055162403, + "grad_norm": 0.10812215491899786, + "learning_rate": 7.394774064184872e-06, + "loss": 1.5536, + "step": 10436 + }, + { + "epoch": 0.9469243331518781, + "grad_norm": 0.11294772155030873, + "learning_rate": 7.369619590975929e-06, + "loss": 1.5507, + "step": 10437 + }, + { + "epoch": 0.9470150607875158, + "grad_norm": 0.10590669094394595, + "learning_rate": 7.344507656472876e-06, + "loss": 1.5064, + "step": 10438 + }, + { + "epoch": 0.9471057884231537, + "grad_norm": 0.1127536873486555, + "learning_rate": 7.319438262844258e-06, + "loss": 1.4974, + "step": 10439 + }, + { + "epoch": 0.9471965160587915, + "grad_norm": 0.1064177062455429, + "learning_rate": 7.294411412254731e-06, + "loss": 1.5881, + "step": 10440 + }, + { + "epoch": 0.9472872436944293, + "grad_norm": 0.10928975539408871, + "learning_rate": 7.269427106865401e-06, + "loss": 1.4985, + "step": 10441 + }, + { + "epoch": 0.9473779713300672, + "grad_norm": 0.10771943583290812, + "learning_rate": 7.244485348833651e-06, + "loss": 1.5328, + "step": 10442 + }, + { + "epoch": 0.947468698965705, + "grad_norm": 0.10584753973915806, + "learning_rate": 7.219586140313206e-06, + "loss": 1.5285, + "step": 10443 + }, + { + "epoch": 0.9475594266013427, + "grad_norm": 0.11123840866965963, + "learning_rate": 7.194729483454121e-06, + "loss": 1.5525, + "step": 10444 + }, + { + "epoch": 0.9476501542369806, + "grad_norm": 0.10854757284201295, + "learning_rate": 7.169915380402847e-06, + "loss": 1.5569, + "step": 10445 + }, + { + "epoch": 0.9477408818726184, + "grad_norm": 0.10543120934196845, + "learning_rate": 7.145143833302003e-06, + "loss": 1.5613, + "step": 10446 + }, + { + "epoch": 0.9478316095082562, + "grad_norm": 0.11125665647958083, + "learning_rate": 7.1204148442906014e-06, + "loss": 1.5535, + "step": 10447 + }, + { + "epoch": 0.9479223371438941, + "grad_norm": 0.10618055037226178, + "learning_rate": 7.095728415503988e-06, + "loss": 1.5226, + "step": 10448 + }, + { + "epoch": 0.9480130647795318, + "grad_norm": 0.11075274789592322, + "learning_rate": 7.0710845490739025e-06, + "loss": 1.5046, + "step": 10449 + }, + { + "epoch": 0.9481037924151696, + "grad_norm": 0.11415030463366337, + "learning_rate": 7.04648324712831e-06, + "loss": 1.5165, + "step": 10450 + }, + { + "epoch": 0.9481945200508075, + "grad_norm": 0.10738928982440661, + "learning_rate": 7.021924511791511e-06, + "loss": 1.5059, + "step": 10451 + }, + { + "epoch": 0.9482852476864453, + "grad_norm": 0.10508348874856943, + "learning_rate": 6.997408345184253e-06, + "loss": 1.534, + "step": 10452 + }, + { + "epoch": 0.9483759753220831, + "grad_norm": 0.10608256846857539, + "learning_rate": 6.972934749423454e-06, + "loss": 1.5333, + "step": 10453 + }, + { + "epoch": 0.948466702957721, + "grad_norm": 0.10544037227406931, + "learning_rate": 6.948503726622312e-06, + "loss": 1.549, + "step": 10454 + }, + { + "epoch": 0.9485574305933587, + "grad_norm": 0.1078491470769284, + "learning_rate": 6.924115278890642e-06, + "loss": 1.4955, + "step": 10455 + }, + { + "epoch": 0.9486481582289965, + "grad_norm": 0.10728158165158107, + "learning_rate": 6.899769408334256e-06, + "loss": 1.5678, + "step": 10456 + }, + { + "epoch": 0.9487388858646344, + "grad_norm": 0.11071092973367594, + "learning_rate": 6.8754661170554174e-06, + "loss": 1.5386, + "step": 10457 + }, + { + "epoch": 0.9488296135002722, + "grad_norm": 0.10765384616109198, + "learning_rate": 6.851205407152783e-06, + "loss": 1.5371, + "step": 10458 + }, + { + "epoch": 0.94892034113591, + "grad_norm": 0.10545109785790226, + "learning_rate": 6.82698728072123e-06, + "loss": 1.5182, + "step": 10459 + }, + { + "epoch": 0.9490110687715478, + "grad_norm": 0.10695663493130993, + "learning_rate": 6.802811739852033e-06, + "loss": 1.5643, + "step": 10460 + }, + { + "epoch": 0.9491017964071856, + "grad_norm": 0.10406315626430666, + "learning_rate": 6.778678786632741e-06, + "loss": 1.5369, + "step": 10461 + }, + { + "epoch": 0.9491925240428234, + "grad_norm": 0.10762913500819822, + "learning_rate": 6.754588423147245e-06, + "loss": 1.582, + "step": 10462 + }, + { + "epoch": 0.9492832516784613, + "grad_norm": 0.11043631008321333, + "learning_rate": 6.7305406514757136e-06, + "loss": 1.5521, + "step": 10463 + }, + { + "epoch": 0.9493739793140991, + "grad_norm": 0.10584759154700454, + "learning_rate": 6.706535473694653e-06, + "loss": 1.5343, + "step": 10464 + }, + { + "epoch": 0.9494647069497368, + "grad_norm": 0.10779814424950511, + "learning_rate": 6.682572891876959e-06, + "loss": 1.5124, + "step": 10465 + }, + { + "epoch": 0.9495554345853747, + "grad_norm": 0.10712404118203449, + "learning_rate": 6.658652908091867e-06, + "loss": 1.5498, + "step": 10466 + }, + { + "epoch": 0.9496461622210125, + "grad_norm": 0.10471164062368962, + "learning_rate": 6.634775524404779e-06, + "loss": 1.5291, + "step": 10467 + }, + { + "epoch": 0.9497368898566504, + "grad_norm": 0.10600468391393111, + "learning_rate": 6.610940742877547e-06, + "loss": 1.5303, + "step": 10468 + }, + { + "epoch": 0.9498276174922882, + "grad_norm": 0.1115092720938401, + "learning_rate": 6.587148565568301e-06, + "loss": 1.5363, + "step": 10469 + }, + { + "epoch": 0.949918345127926, + "grad_norm": 0.10562214797845389, + "learning_rate": 6.563398994531511e-06, + "loss": 1.5193, + "step": 10470 + }, + { + "epoch": 0.9500090727635638, + "grad_norm": 0.10718547870439192, + "learning_rate": 6.539692031817923e-06, + "loss": 1.5179, + "step": 10471 + }, + { + "epoch": 0.9500998003992016, + "grad_norm": 0.10534326779996941, + "learning_rate": 6.516027679474623e-06, + "loss": 1.5292, + "step": 10472 + }, + { + "epoch": 0.9501905280348394, + "grad_norm": 0.10409803356647769, + "learning_rate": 6.492405939545087e-06, + "loss": 1.4984, + "step": 10473 + }, + { + "epoch": 0.9502812556704773, + "grad_norm": 0.10654194953229182, + "learning_rate": 6.468826814069073e-06, + "loss": 1.5385, + "step": 10474 + }, + { + "epoch": 0.950371983306115, + "grad_norm": 0.1033775843455268, + "learning_rate": 6.445290305082563e-06, + "loss": 1.5302, + "step": 10475 + }, + { + "epoch": 0.9504627109417528, + "grad_norm": 0.10879906522669011, + "learning_rate": 6.421796414617987e-06, + "loss": 1.5409, + "step": 10476 + }, + { + "epoch": 0.9505534385773907, + "grad_norm": 0.11195168919041615, + "learning_rate": 6.398345144704054e-06, + "loss": 1.5436, + "step": 10477 + }, + { + "epoch": 0.9506441662130285, + "grad_norm": 0.11021091089263363, + "learning_rate": 6.374936497365758e-06, + "loss": 1.5328, + "step": 10478 + }, + { + "epoch": 0.9507348938486663, + "grad_norm": 0.10759440307740271, + "learning_rate": 6.351570474624424e-06, + "loss": 1.5488, + "step": 10479 + }, + { + "epoch": 0.9508256214843042, + "grad_norm": 0.10580442765814153, + "learning_rate": 6.328247078497773e-06, + "loss": 1.5489, + "step": 10480 + }, + { + "epoch": 0.9509163491199419, + "grad_norm": 0.10851397811270405, + "learning_rate": 6.304966310999749e-06, + "loss": 1.5051, + "step": 10481 + }, + { + "epoch": 0.9510070767555797, + "grad_norm": 0.10660403412549181, + "learning_rate": 6.281728174140577e-06, + "loss": 1.5203, + "step": 10482 + }, + { + "epoch": 0.9510978043912176, + "grad_norm": 0.10560042377306185, + "learning_rate": 6.258532669927042e-06, + "loss": 1.5085, + "step": 10483 + }, + { + "epoch": 0.9511885320268554, + "grad_norm": 0.11059734653972482, + "learning_rate": 6.235379800361929e-06, + "loss": 1.5568, + "step": 10484 + }, + { + "epoch": 0.9512792596624932, + "grad_norm": 0.10833872647323507, + "learning_rate": 6.212269567444528e-06, + "loss": 1.5355, + "step": 10485 + }, + { + "epoch": 0.951369987298131, + "grad_norm": 0.10205045226939016, + "learning_rate": 6.1892019731704665e-06, + "loss": 1.5577, + "step": 10486 + }, + { + "epoch": 0.9514607149337688, + "grad_norm": 0.10644286338904949, + "learning_rate": 6.166177019531594e-06, + "loss": 1.5253, + "step": 10487 + }, + { + "epoch": 0.9515514425694066, + "grad_norm": 0.10799226252260508, + "learning_rate": 6.143194708516042e-06, + "loss": 1.5421, + "step": 10488 + }, + { + "epoch": 0.9516421702050445, + "grad_norm": 0.10680655057539896, + "learning_rate": 6.120255042108447e-06, + "loss": 1.5332, + "step": 10489 + }, + { + "epoch": 0.9517328978406823, + "grad_norm": 0.10306264453616225, + "learning_rate": 6.097358022289667e-06, + "loss": 1.5494, + "step": 10490 + }, + { + "epoch": 0.95182362547632, + "grad_norm": 0.10830750546941086, + "learning_rate": 6.074503651036789e-06, + "loss": 1.5728, + "step": 10491 + }, + { + "epoch": 0.9519143531119579, + "grad_norm": 0.10820225221323479, + "learning_rate": 6.051691930323344e-06, + "loss": 1.5488, + "step": 10492 + }, + { + "epoch": 0.9520050807475957, + "grad_norm": 0.1058673574863951, + "learning_rate": 6.02892286211909e-06, + "loss": 1.532, + "step": 10493 + }, + { + "epoch": 0.9520958083832335, + "grad_norm": 0.10726251355167021, + "learning_rate": 6.006196448390122e-06, + "loss": 1.5608, + "step": 10494 + }, + { + "epoch": 0.9521865360188714, + "grad_norm": 0.10793252782013293, + "learning_rate": 5.983512691098925e-06, + "loss": 1.5056, + "step": 10495 + }, + { + "epoch": 0.9522772636545092, + "grad_norm": 0.102686429131904, + "learning_rate": 5.960871592204154e-06, + "loss": 1.5174, + "step": 10496 + }, + { + "epoch": 0.9523679912901469, + "grad_norm": 0.10463372148625039, + "learning_rate": 5.938273153661023e-06, + "loss": 1.5276, + "step": 10497 + }, + { + "epoch": 0.9524587189257848, + "grad_norm": 0.10627003130229559, + "learning_rate": 5.915717377420804e-06, + "loss": 1.5122, + "step": 10498 + }, + { + "epoch": 0.9525494465614226, + "grad_norm": 0.11001015453890617, + "learning_rate": 5.893204265431162e-06, + "loss": 1.5192, + "step": 10499 + }, + { + "epoch": 0.9526401741970604, + "grad_norm": 0.10897364226619204, + "learning_rate": 5.870733819636209e-06, + "loss": 1.4899, + "step": 10500 + }, + { + "epoch": 0.9527309018326983, + "grad_norm": 0.10922237909703177, + "learning_rate": 5.8483060419762256e-06, + "loss": 1.5373, + "step": 10501 + }, + { + "epoch": 0.952821629468336, + "grad_norm": 0.10702149290950322, + "learning_rate": 5.825920934387774e-06, + "loss": 1.5203, + "step": 10502 + }, + { + "epoch": 0.9529123571039738, + "grad_norm": 0.10746317021942887, + "learning_rate": 5.803578498803974e-06, + "loss": 1.5386, + "step": 10503 + }, + { + "epoch": 0.9530030847396117, + "grad_norm": 0.10913161388408792, + "learning_rate": 5.781278737153894e-06, + "loss": 1.5626, + "step": 10504 + }, + { + "epoch": 0.9530938123752495, + "grad_norm": 0.12220292546561431, + "learning_rate": 5.759021651363327e-06, + "loss": 1.5335, + "step": 10505 + }, + { + "epoch": 0.9531845400108874, + "grad_norm": 0.11104238744096163, + "learning_rate": 5.73680724335407e-06, + "loss": 1.5567, + "step": 10506 + }, + { + "epoch": 0.9532752676465251, + "grad_norm": 0.10624684707537957, + "learning_rate": 5.714635515044364e-06, + "loss": 1.5037, + "step": 10507 + }, + { + "epoch": 0.9533659952821629, + "grad_norm": 0.10720151497784818, + "learning_rate": 5.69250646834868e-06, + "loss": 1.5507, + "step": 10508 + }, + { + "epoch": 0.9534567229178008, + "grad_norm": 0.10984257779295771, + "learning_rate": 5.6704201051779315e-06, + "loss": 1.5412, + "step": 10509 + }, + { + "epoch": 0.9535474505534386, + "grad_norm": 0.10826267765001234, + "learning_rate": 5.648376427439317e-06, + "loss": 1.553, + "step": 10510 + }, + { + "epoch": 0.9536381781890764, + "grad_norm": 0.10617133214452398, + "learning_rate": 5.626375437036202e-06, + "loss": 1.519, + "step": 10511 + }, + { + "epoch": 0.9537289058247143, + "grad_norm": 0.10630722363899169, + "learning_rate": 5.6044171358684e-06, + "loss": 1.5165, + "step": 10512 + }, + { + "epoch": 0.953819633460352, + "grad_norm": 0.11050827387613328, + "learning_rate": 5.582501525832117e-06, + "loss": 1.5729, + "step": 10513 + }, + { + "epoch": 0.9539103610959898, + "grad_norm": 0.10989758367508806, + "learning_rate": 5.560628608819673e-06, + "loss": 1.5036, + "step": 10514 + }, + { + "epoch": 0.9540010887316277, + "grad_norm": 0.10407092766063168, + "learning_rate": 5.538798386719834e-06, + "loss": 1.5264, + "step": 10515 + }, + { + "epoch": 0.9540918163672655, + "grad_norm": 0.1093903478146508, + "learning_rate": 5.517010861417648e-06, + "loss": 1.5826, + "step": 10516 + }, + { + "epoch": 0.9541825440029033, + "grad_norm": 0.10830597919241398, + "learning_rate": 5.495266034794388e-06, + "loss": 1.5507, + "step": 10517 + }, + { + "epoch": 0.9542732716385411, + "grad_norm": 0.10542445828157043, + "learning_rate": 5.473563908727885e-06, + "loss": 1.5212, + "step": 10518 + }, + { + "epoch": 0.9543639992741789, + "grad_norm": 0.10899129945063407, + "learning_rate": 5.451904485091919e-06, + "loss": 1.5508, + "step": 10519 + }, + { + "epoch": 0.9544547269098167, + "grad_norm": 0.10502483425447483, + "learning_rate": 5.430287765756936e-06, + "loss": 1.5535, + "step": 10520 + }, + { + "epoch": 0.9545454545454546, + "grad_norm": 0.11219204188286487, + "learning_rate": 5.4087137525895005e-06, + "loss": 1.5298, + "step": 10521 + }, + { + "epoch": 0.9546361821810924, + "grad_norm": 0.11074926627428024, + "learning_rate": 5.38718244745251e-06, + "loss": 1.5261, + "step": 10522 + }, + { + "epoch": 0.9547269098167301, + "grad_norm": 0.10752197816455647, + "learning_rate": 5.3656938522052e-06, + "loss": 1.5424, + "step": 10523 + }, + { + "epoch": 0.954817637452368, + "grad_norm": 0.11183471713903692, + "learning_rate": 5.344247968703197e-06, + "loss": 1.556, + "step": 10524 + }, + { + "epoch": 0.9549083650880058, + "grad_norm": 0.10719588648744262, + "learning_rate": 5.322844798798243e-06, + "loss": 1.5802, + "step": 10525 + }, + { + "epoch": 0.9549990927236436, + "grad_norm": 0.10914577797896355, + "learning_rate": 5.301484344338525e-06, + "loss": 1.5263, + "step": 10526 + }, + { + "epoch": 0.9550898203592815, + "grad_norm": 0.1036067251971993, + "learning_rate": 5.280166607168568e-06, + "loss": 1.5569, + "step": 10527 + }, + { + "epoch": 0.9551805479949192, + "grad_norm": 0.10658466513244724, + "learning_rate": 5.258891589129122e-06, + "loss": 1.5478, + "step": 10528 + }, + { + "epoch": 0.955271275630557, + "grad_norm": 0.10419564592311924, + "learning_rate": 5.237659292057329e-06, + "loss": 1.5253, + "step": 10529 + }, + { + "epoch": 0.9553620032661949, + "grad_norm": 0.10960045966843802, + "learning_rate": 5.216469717786554e-06, + "loss": 1.5139, + "step": 10530 + }, + { + "epoch": 0.9554527309018327, + "grad_norm": 0.10863964316247501, + "learning_rate": 5.1953228681466125e-06, + "loss": 1.5743, + "step": 10531 + }, + { + "epoch": 0.9555434585374705, + "grad_norm": 0.10553646412693753, + "learning_rate": 5.174218744963377e-06, + "loss": 1.5187, + "step": 10532 + }, + { + "epoch": 0.9556341861731084, + "grad_norm": 0.10474639321542716, + "learning_rate": 5.153157350059334e-06, + "loss": 1.5043, + "step": 10533 + }, + { + "epoch": 0.9557249138087461, + "grad_norm": 0.10993847562081827, + "learning_rate": 5.132138685253085e-06, + "loss": 1.5284, + "step": 10534 + }, + { + "epoch": 0.9558156414443839, + "grad_norm": 0.10478331200763388, + "learning_rate": 5.111162752359566e-06, + "loss": 1.5122, + "step": 10535 + }, + { + "epoch": 0.9559063690800218, + "grad_norm": 0.10249078366224676, + "learning_rate": 5.090229553190051e-06, + "loss": 1.5433, + "step": 10536 + }, + { + "epoch": 0.9559970967156596, + "grad_norm": 0.10608822535064792, + "learning_rate": 5.069339089552261e-06, + "loss": 1.5645, + "step": 10537 + }, + { + "epoch": 0.9560878243512974, + "grad_norm": 0.10740061457739399, + "learning_rate": 5.048491363249919e-06, + "loss": 1.5142, + "step": 10538 + }, + { + "epoch": 0.9561785519869352, + "grad_norm": 0.10772413594443417, + "learning_rate": 5.027686376083307e-06, + "loss": 1.5149, + "step": 10539 + }, + { + "epoch": 0.956269279622573, + "grad_norm": 0.10619786187073342, + "learning_rate": 5.006924129848933e-06, + "loss": 1.5432, + "step": 10540 + }, + { + "epoch": 0.9563600072582108, + "grad_norm": 0.10828978405355515, + "learning_rate": 4.986204626339585e-06, + "loss": 1.5417, + "step": 10541 + }, + { + "epoch": 0.9564507348938487, + "grad_norm": 0.1027742278521159, + "learning_rate": 4.965527867344444e-06, + "loss": 1.4698, + "step": 10542 + }, + { + "epoch": 0.9565414625294865, + "grad_norm": 0.10602018812980403, + "learning_rate": 4.9448938546489134e-06, + "loss": 1.5818, + "step": 10543 + }, + { + "epoch": 0.9566321901651244, + "grad_norm": 0.10548961243295354, + "learning_rate": 4.924302590034846e-06, + "loss": 1.5206, + "step": 10544 + }, + { + "epoch": 0.9567229178007621, + "grad_norm": 0.1101836223237028, + "learning_rate": 4.9037540752801535e-06, + "loss": 1.5072, + "step": 10545 + }, + { + "epoch": 0.9568136454363999, + "grad_norm": 0.10771297576552968, + "learning_rate": 4.88324831215925e-06, + "loss": 1.535, + "step": 10546 + }, + { + "epoch": 0.9569043730720378, + "grad_norm": 0.1073495664857183, + "learning_rate": 4.862785302442829e-06, + "loss": 1.5211, + "step": 10547 + }, + { + "epoch": 0.9569951007076756, + "grad_norm": 0.10923793643440491, + "learning_rate": 4.842365047897923e-06, + "loss": 1.5347, + "step": 10548 + }, + { + "epoch": 0.9570858283433133, + "grad_norm": 0.10816148773380574, + "learning_rate": 4.821987550287732e-06, + "loss": 1.5185, + "step": 10549 + }, + { + "epoch": 0.9571765559789512, + "grad_norm": 0.10510174539836821, + "learning_rate": 4.8016528113719035e-06, + "loss": 1.5094, + "step": 10550 + }, + { + "epoch": 0.957267283614589, + "grad_norm": 0.10816249744948228, + "learning_rate": 4.781360832906312e-06, + "loss": 1.4999, + "step": 10551 + }, + { + "epoch": 0.9573580112502268, + "grad_norm": 0.10939480984531674, + "learning_rate": 4.7611116166432765e-06, + "loss": 1.5442, + "step": 10552 + }, + { + "epoch": 0.9574487388858647, + "grad_norm": 0.10521731985693773, + "learning_rate": 4.7409051643311775e-06, + "loss": 1.5416, + "step": 10553 + }, + { + "epoch": 0.9575394665215025, + "grad_norm": 0.11118977164129672, + "learning_rate": 4.720741477714952e-06, + "loss": 1.5267, + "step": 10554 + }, + { + "epoch": 0.9576301941571402, + "grad_norm": 0.10885160488921698, + "learning_rate": 4.700620558535707e-06, + "loss": 1.5627, + "step": 10555 + }, + { + "epoch": 0.9577209217927781, + "grad_norm": 0.10610464524891529, + "learning_rate": 4.6805424085308305e-06, + "loss": 1.6047, + "step": 10556 + }, + { + "epoch": 0.9578116494284159, + "grad_norm": 0.10743070126174675, + "learning_rate": 4.660507029434102e-06, + "loss": 1.5046, + "step": 10557 + }, + { + "epoch": 0.9579023770640537, + "grad_norm": 0.10755367145844563, + "learning_rate": 4.640514422975639e-06, + "loss": 1.5144, + "step": 10558 + }, + { + "epoch": 0.9579931046996916, + "grad_norm": 0.10730459160349612, + "learning_rate": 4.6205645908817816e-06, + "loss": 1.5097, + "step": 10559 + }, + { + "epoch": 0.9580838323353293, + "grad_norm": 0.10808423035134053, + "learning_rate": 4.600657534875097e-06, + "loss": 1.5013, + "step": 10560 + }, + { + "epoch": 0.9581745599709671, + "grad_norm": 0.10811014324610256, + "learning_rate": 4.580793256674765e-06, + "loss": 1.5305, + "step": 10561 + }, + { + "epoch": 0.958265287606605, + "grad_norm": 0.1073434970384068, + "learning_rate": 4.560971757995913e-06, + "loss": 1.5441, + "step": 10562 + }, + { + "epoch": 0.9583560152422428, + "grad_norm": 0.10440253104612639, + "learning_rate": 4.541193040550118e-06, + "loss": 1.5163, + "step": 10563 + }, + { + "epoch": 0.9584467428778806, + "grad_norm": 0.1300642719650978, + "learning_rate": 4.521457106045346e-06, + "loss": 1.5256, + "step": 10564 + }, + { + "epoch": 0.9585374705135185, + "grad_norm": 0.11115280647567984, + "learning_rate": 4.501763956185845e-06, + "loss": 1.5469, + "step": 10565 + }, + { + "epoch": 0.9586281981491562, + "grad_norm": 0.10680718449675292, + "learning_rate": 4.482113592671977e-06, + "loss": 1.5089, + "step": 10566 + }, + { + "epoch": 0.958718925784794, + "grad_norm": 0.11519174916354125, + "learning_rate": 4.462506017200662e-06, + "loss": 1.5043, + "step": 10567 + }, + { + "epoch": 0.9588096534204319, + "grad_norm": 0.10655156191362859, + "learning_rate": 4.442941231464992e-06, + "loss": 1.5017, + "step": 10568 + }, + { + "epoch": 0.9589003810560697, + "grad_norm": 0.10747416109337878, + "learning_rate": 4.423419237154391e-06, + "loss": 1.5855, + "step": 10569 + }, + { + "epoch": 0.9589911086917075, + "grad_norm": 0.1055327006515119, + "learning_rate": 4.403940035954568e-06, + "loss": 1.5314, + "step": 10570 + }, + { + "epoch": 0.9590818363273453, + "grad_norm": 0.10814646225263753, + "learning_rate": 4.384503629547565e-06, + "loss": 1.5172, + "step": 10571 + }, + { + "epoch": 0.9591725639629831, + "grad_norm": 0.10283926753564275, + "learning_rate": 4.365110019611707e-06, + "loss": 1.5137, + "step": 10572 + }, + { + "epoch": 0.9592632915986209, + "grad_norm": 0.10901703291669962, + "learning_rate": 4.345759207821653e-06, + "loss": 1.5643, + "step": 10573 + }, + { + "epoch": 0.9593540192342588, + "grad_norm": 0.10503015494102703, + "learning_rate": 4.326451195848347e-06, + "loss": 1.4891, + "step": 10574 + }, + { + "epoch": 0.9594447468698966, + "grad_norm": 0.1082445801251729, + "learning_rate": 4.3071859853590635e-06, + "loss": 1.5299, + "step": 10575 + }, + { + "epoch": 0.9595354745055343, + "grad_norm": 0.10766630559235378, + "learning_rate": 4.287963578017307e-06, + "loss": 1.5382, + "step": 10576 + }, + { + "epoch": 0.9596262021411722, + "grad_norm": 0.11070361176241274, + "learning_rate": 4.268783975482915e-06, + "loss": 1.5203, + "step": 10577 + }, + { + "epoch": 0.95971692977681, + "grad_norm": 0.10740451319477878, + "learning_rate": 4.24964717941212e-06, + "loss": 1.5647, + "step": 10578 + }, + { + "epoch": 0.9598076574124478, + "grad_norm": 0.10815186589169541, + "learning_rate": 4.230553191457376e-06, + "loss": 1.5389, + "step": 10579 + }, + { + "epoch": 0.9598983850480857, + "grad_norm": 0.1104806526488358, + "learning_rate": 4.211502013267421e-06, + "loss": 1.5613, + "step": 10580 + }, + { + "epoch": 0.9599891126837234, + "grad_norm": 0.10917365495828099, + "learning_rate": 4.192493646487383e-06, + "loss": 1.5251, + "step": 10581 + }, + { + "epoch": 0.9600798403193613, + "grad_norm": 0.11012346440682225, + "learning_rate": 4.173528092758505e-06, + "loss": 1.5501, + "step": 10582 + }, + { + "epoch": 0.9601705679549991, + "grad_norm": 0.10381513968239085, + "learning_rate": 4.154605353718643e-06, + "loss": 1.5438, + "step": 10583 + }, + { + "epoch": 0.9602612955906369, + "grad_norm": 0.10419497728782097, + "learning_rate": 4.135725431001602e-06, + "loss": 1.5434, + "step": 10584 + }, + { + "epoch": 0.9603520232262748, + "grad_norm": 0.10811756472921097, + "learning_rate": 4.1168883262378e-06, + "loss": 1.5359, + "step": 10585 + }, + { + "epoch": 0.9604427508619126, + "grad_norm": 0.10273082480265322, + "learning_rate": 4.0980940410537685e-06, + "loss": 1.5097, + "step": 10586 + }, + { + "epoch": 0.9605334784975503, + "grad_norm": 0.10514361105407256, + "learning_rate": 4.079342577072431e-06, + "loss": 1.5453, + "step": 10587 + }, + { + "epoch": 0.9606242061331882, + "grad_norm": 0.10383050604496961, + "learning_rate": 4.0606339359129386e-06, + "loss": 1.5066, + "step": 10588 + }, + { + "epoch": 0.960714933768826, + "grad_norm": 0.11034919863239893, + "learning_rate": 4.041968119190775e-06, + "loss": 1.5686, + "step": 10589 + }, + { + "epoch": 0.9608056614044638, + "grad_norm": 0.10592321723535479, + "learning_rate": 4.023345128517764e-06, + "loss": 1.5149, + "step": 10590 + }, + { + "epoch": 0.9608963890401017, + "grad_norm": 0.10768671006054292, + "learning_rate": 4.004764965502006e-06, + "loss": 1.5517, + "step": 10591 + }, + { + "epoch": 0.9609871166757394, + "grad_norm": 0.11145025807097005, + "learning_rate": 3.98622763174794e-06, + "loss": 1.6148, + "step": 10592 + }, + { + "epoch": 0.9610778443113772, + "grad_norm": 0.10449917245659777, + "learning_rate": 3.967733128856177e-06, + "loss": 1.5412, + "step": 10593 + }, + { + "epoch": 0.9611685719470151, + "grad_norm": 0.10883284835748815, + "learning_rate": 3.9492814584237705e-06, + "loss": 1.4989, + "step": 10594 + }, + { + "epoch": 0.9612592995826529, + "grad_norm": 0.10924330650736312, + "learning_rate": 3.930872622044002e-06, + "loss": 1.5015, + "step": 10595 + }, + { + "epoch": 0.9613500272182907, + "grad_norm": 0.1062906247785787, + "learning_rate": 3.912506621306544e-06, + "loss": 1.5828, + "step": 10596 + }, + { + "epoch": 0.9614407548539285, + "grad_norm": 0.10732221569584977, + "learning_rate": 3.8941834577972405e-06, + "loss": 1.5464, + "step": 10597 + }, + { + "epoch": 0.9615314824895663, + "grad_norm": 0.10608818280167943, + "learning_rate": 3.875903133098268e-06, + "loss": 1.5155, + "step": 10598 + }, + { + "epoch": 0.9616222101252041, + "grad_norm": 0.10769414530511123, + "learning_rate": 3.857665648788256e-06, + "loss": 1.5286, + "step": 10599 + }, + { + "epoch": 0.961712937760842, + "grad_norm": 0.10861531155705925, + "learning_rate": 3.8394710064418305e-06, + "loss": 1.5175, + "step": 10600 + }, + { + "epoch": 0.9618036653964798, + "grad_norm": 0.10605314318128824, + "learning_rate": 3.821319207630292e-06, + "loss": 1.5354, + "step": 10601 + }, + { + "epoch": 0.9618943930321175, + "grad_norm": 0.10887993677596136, + "learning_rate": 3.803210253920997e-06, + "loss": 1.4987, + "step": 10602 + }, + { + "epoch": 0.9619851206677554, + "grad_norm": 0.10707434865358967, + "learning_rate": 3.785144146877584e-06, + "loss": 1.5112, + "step": 10603 + }, + { + "epoch": 0.9620758483033932, + "grad_norm": 0.11347677576976853, + "learning_rate": 3.767120888060083e-06, + "loss": 1.5644, + "step": 10604 + }, + { + "epoch": 0.962166575939031, + "grad_norm": 0.10583850352342791, + "learning_rate": 3.7491404790248594e-06, + "loss": 1.4978, + "step": 10605 + }, + { + "epoch": 0.9622573035746689, + "grad_norm": 0.10567923537687562, + "learning_rate": 3.731202921324506e-06, + "loss": 1.5106, + "step": 10606 + }, + { + "epoch": 0.9623480312103067, + "grad_norm": 0.11185418465735665, + "learning_rate": 3.7133082165078935e-06, + "loss": 1.5209, + "step": 10607 + }, + { + "epoch": 0.9624387588459444, + "grad_norm": 0.10375626423434775, + "learning_rate": 3.6954563661202867e-06, + "loss": 1.542, + "step": 10608 + }, + { + "epoch": 0.9625294864815823, + "grad_norm": 0.10366628750323964, + "learning_rate": 3.6776473717031743e-06, + "loss": 1.5091, + "step": 10609 + }, + { + "epoch": 0.9626202141172201, + "grad_norm": 0.10878338761368096, + "learning_rate": 3.659881234794382e-06, + "loss": 1.5215, + "step": 10610 + }, + { + "epoch": 0.9627109417528579, + "grad_norm": 0.10779430986644163, + "learning_rate": 3.6421579569279606e-06, + "loss": 1.5612, + "step": 10611 + }, + { + "epoch": 0.9628016693884958, + "grad_norm": 0.10727346359961687, + "learning_rate": 3.624477539634352e-06, + "loss": 1.5017, + "step": 10612 + }, + { + "epoch": 0.9628923970241335, + "grad_norm": 0.10699358474244158, + "learning_rate": 3.60683998444028e-06, + "loss": 1.5313, + "step": 10613 + }, + { + "epoch": 0.9629831246597713, + "grad_norm": 0.10830318119817214, + "learning_rate": 3.5892452928686927e-06, + "loss": 1.5854, + "step": 10614 + }, + { + "epoch": 0.9630738522954092, + "grad_norm": 0.10733200167293123, + "learning_rate": 3.5716934664389857e-06, + "loss": 1.5085, + "step": 10615 + }, + { + "epoch": 0.963164579931047, + "grad_norm": 0.10280543929004143, + "learning_rate": 3.5541845066667243e-06, + "loss": 1.5572, + "step": 10616 + }, + { + "epoch": 0.9632553075666848, + "grad_norm": 0.10616236966332152, + "learning_rate": 3.536718415063811e-06, + "loss": 1.5202, + "step": 10617 + }, + { + "epoch": 0.9633460352023226, + "grad_norm": 0.10361297841128428, + "learning_rate": 3.5192951931383723e-06, + "loss": 1.562, + "step": 10618 + }, + { + "epoch": 0.9634367628379604, + "grad_norm": 0.10596095211622908, + "learning_rate": 3.5019148423950377e-06, + "loss": 1.4836, + "step": 10619 + }, + { + "epoch": 0.9635274904735982, + "grad_norm": 0.10320207494290849, + "learning_rate": 3.484577364334496e-06, + "loss": 1.5242, + "step": 10620 + }, + { + "epoch": 0.9636182181092361, + "grad_norm": 0.11036602725908679, + "learning_rate": 3.467282760453827e-06, + "loss": 1.4951, + "step": 10621 + }, + { + "epoch": 0.9637089457448739, + "grad_norm": 0.10809096384429955, + "learning_rate": 3.450031032246559e-06, + "loss": 1.5384, + "step": 10622 + }, + { + "epoch": 0.9637996733805118, + "grad_norm": 0.10793775610223098, + "learning_rate": 3.4328221812022777e-06, + "loss": 1.5564, + "step": 10623 + }, + { + "epoch": 0.9638904010161495, + "grad_norm": 0.11015344094531891, + "learning_rate": 3.4156562088069053e-06, + "loss": 1.5108, + "step": 10624 + }, + { + "epoch": 0.9639811286517873, + "grad_norm": 0.10621807472552677, + "learning_rate": 3.3985331165429236e-06, + "loss": 1.5554, + "step": 10625 + }, + { + "epoch": 0.9640718562874252, + "grad_norm": 0.10827653502481278, + "learning_rate": 3.38145290588876e-06, + "loss": 1.5325, + "step": 10626 + }, + { + "epoch": 0.964162583923063, + "grad_norm": 0.10846827794201166, + "learning_rate": 3.364415578319291e-06, + "loss": 1.5702, + "step": 10627 + }, + { + "epoch": 0.9642533115587008, + "grad_norm": 0.10321384077050687, + "learning_rate": 3.3474211353057836e-06, + "loss": 1.5326, + "step": 10628 + }, + { + "epoch": 0.9643440391943386, + "grad_norm": 0.11086542662662532, + "learning_rate": 3.3304695783156202e-06, + "loss": 1.5307, + "step": 10629 + }, + { + "epoch": 0.9644347668299764, + "grad_norm": 0.10942465225554093, + "learning_rate": 3.3135609088126294e-06, + "loss": 1.538, + "step": 10630 + }, + { + "epoch": 0.9645254944656142, + "grad_norm": 0.11092836482062854, + "learning_rate": 3.2966951282568105e-06, + "loss": 1.5401, + "step": 10631 + }, + { + "epoch": 0.9646162221012521, + "grad_norm": 0.10836910030862872, + "learning_rate": 3.279872238104664e-06, + "loss": 1.5292, + "step": 10632 + }, + { + "epoch": 0.9647069497368899, + "grad_norm": 0.10565179922757462, + "learning_rate": 3.263092239808696e-06, + "loss": 1.5032, + "step": 10633 + }, + { + "epoch": 0.9647976773725276, + "grad_norm": 0.1041117743993044, + "learning_rate": 3.2463551348179687e-06, + "loss": 1.5433, + "step": 10634 + }, + { + "epoch": 0.9648884050081655, + "grad_norm": 0.1135193443100669, + "learning_rate": 3.22966092457766e-06, + "loss": 1.5302, + "step": 10635 + }, + { + "epoch": 0.9649791326438033, + "grad_norm": 0.1042549042429001, + "learning_rate": 3.2130096105293383e-06, + "loss": 1.5404, + "step": 10636 + }, + { + "epoch": 0.9650698602794411, + "grad_norm": 0.10845682967150443, + "learning_rate": 3.196401194110854e-06, + "loss": 1.5128, + "step": 10637 + }, + { + "epoch": 0.965160587915079, + "grad_norm": 0.10876057375769871, + "learning_rate": 3.1798356767563376e-06, + "loss": 1.5051, + "step": 10638 + }, + { + "epoch": 0.9652513155507167, + "grad_norm": 0.112212749190329, + "learning_rate": 3.163313059896311e-06, + "loss": 1.5172, + "step": 10639 + }, + { + "epoch": 0.9653420431863545, + "grad_norm": 0.10851285551843594, + "learning_rate": 3.146833344957356e-06, + "loss": 1.5255, + "step": 10640 + }, + { + "epoch": 0.9654327708219924, + "grad_norm": 0.10935240911430208, + "learning_rate": 3.1303965333626116e-06, + "loss": 1.5671, + "step": 10641 + }, + { + "epoch": 0.9655234984576302, + "grad_norm": 0.10874208835036091, + "learning_rate": 3.1140026265313317e-06, + "loss": 1.5252, + "step": 10642 + }, + { + "epoch": 0.965614226093268, + "grad_norm": 0.10994028190418832, + "learning_rate": 3.097651625879161e-06, + "loss": 1.5488, + "step": 10643 + }, + { + "epoch": 0.9657049537289059, + "grad_norm": 0.1055058018937974, + "learning_rate": 3.081343532817971e-06, + "loss": 1.5533, + "step": 10644 + }, + { + "epoch": 0.9657956813645436, + "grad_norm": 0.1062533148188745, + "learning_rate": 3.0650783487560785e-06, + "loss": 1.5495, + "step": 10645 + }, + { + "epoch": 0.9658864090001814, + "grad_norm": 0.10621842859070248, + "learning_rate": 3.0488560750978613e-06, + "loss": 1.5562, + "step": 10646 + }, + { + "epoch": 0.9659771366358193, + "grad_norm": 0.1075078973847449, + "learning_rate": 3.032676713244198e-06, + "loss": 1.5072, + "step": 10647 + }, + { + "epoch": 0.9660678642714571, + "grad_norm": 0.10701579512293326, + "learning_rate": 3.016540264592138e-06, + "loss": 1.4844, + "step": 10648 + }, + { + "epoch": 0.9661585919070949, + "grad_norm": 0.10775325173561218, + "learning_rate": 3.000446730535067e-06, + "loss": 1.4626, + "step": 10649 + }, + { + "epoch": 0.9662493195427327, + "grad_norm": 0.10486718046599307, + "learning_rate": 2.9843961124626507e-06, + "loss": 1.5002, + "step": 10650 + }, + { + "epoch": 0.9663400471783705, + "grad_norm": 0.1063063382343594, + "learning_rate": 2.9683884117608916e-06, + "loss": 1.5354, + "step": 10651 + }, + { + "epoch": 0.9664307748140083, + "grad_norm": 0.10612403195844325, + "learning_rate": 2.9524236298120733e-06, + "loss": 1.5546, + "step": 10652 + }, + { + "epoch": 0.9665215024496462, + "grad_norm": 0.10864611414846552, + "learning_rate": 2.9365017679947037e-06, + "loss": 1.5414, + "step": 10653 + }, + { + "epoch": 0.966612230085284, + "grad_norm": 0.10768876364309128, + "learning_rate": 2.9206228276836834e-06, + "loss": 1.5344, + "step": 10654 + }, + { + "epoch": 0.9667029577209217, + "grad_norm": 0.10835631388818132, + "learning_rate": 2.9047868102501372e-06, + "loss": 1.4976, + "step": 10655 + }, + { + "epoch": 0.9667936853565596, + "grad_norm": 0.10772564211060716, + "learning_rate": 2.888993717061528e-06, + "loss": 1.5162, + "step": 10656 + }, + { + "epoch": 0.9668844129921974, + "grad_norm": 0.10885829630445816, + "learning_rate": 2.8732435494815414e-06, + "loss": 1.5758, + "step": 10657 + }, + { + "epoch": 0.9669751406278352, + "grad_norm": 0.10785230153511782, + "learning_rate": 2.8575363088702566e-06, + "loss": 1.5563, + "step": 10658 + }, + { + "epoch": 0.9670658682634731, + "grad_norm": 0.11028735670560892, + "learning_rate": 2.8418719965840334e-06, + "loss": 1.5499, + "step": 10659 + }, + { + "epoch": 0.9671565958991108, + "grad_norm": 0.10942532387792707, + "learning_rate": 2.826250613975345e-06, + "loss": 1.5444, + "step": 10660 + }, + { + "epoch": 0.9672473235347487, + "grad_norm": 0.10570740328477995, + "learning_rate": 2.8106721623932796e-06, + "loss": 1.5511, + "step": 10661 + }, + { + "epoch": 0.9673380511703865, + "grad_norm": 0.11207199750792664, + "learning_rate": 2.795136643182927e-06, + "loss": 1.5103, + "step": 10662 + }, + { + "epoch": 0.9674287788060243, + "grad_norm": 0.11076242900827739, + "learning_rate": 2.7796440576857707e-06, + "loss": 1.522, + "step": 10663 + }, + { + "epoch": 0.9675195064416622, + "grad_norm": 0.10936122498848, + "learning_rate": 2.764194407239684e-06, + "loss": 1.5744, + "step": 10664 + }, + { + "epoch": 0.9676102340773, + "grad_norm": 0.10846483193318461, + "learning_rate": 2.7487876931786557e-06, + "loss": 1.5511, + "step": 10665 + }, + { + "epoch": 0.9677009617129377, + "grad_norm": 0.10533661376815756, + "learning_rate": 2.733423916833122e-06, + "loss": 1.5152, + "step": 10666 + }, + { + "epoch": 0.9677916893485756, + "grad_norm": 0.1057779989999592, + "learning_rate": 2.718103079529688e-06, + "loss": 1.5361, + "step": 10667 + }, + { + "epoch": 0.9678824169842134, + "grad_norm": 0.10388334079097718, + "learning_rate": 2.702825182591351e-06, + "loss": 1.5088, + "step": 10668 + }, + { + "epoch": 0.9679731446198512, + "grad_norm": 0.10707462156160791, + "learning_rate": 2.6875902273373886e-06, + "loss": 1.4939, + "step": 10669 + }, + { + "epoch": 0.9680638722554891, + "grad_norm": 0.10768196385106109, + "learning_rate": 2.672398215083305e-06, + "loss": 1.5605, + "step": 10670 + }, + { + "epoch": 0.9681545998911268, + "grad_norm": 0.10465322641428602, + "learning_rate": 2.6572491471408834e-06, + "loss": 1.5441, + "step": 10671 + }, + { + "epoch": 0.9682453275267646, + "grad_norm": 0.10357402355068315, + "learning_rate": 2.6421430248183552e-06, + "loss": 1.5542, + "step": 10672 + }, + { + "epoch": 0.9683360551624025, + "grad_norm": 0.10678356944575176, + "learning_rate": 2.6270798494200664e-06, + "loss": 1.5593, + "step": 10673 + }, + { + "epoch": 0.9684267827980403, + "grad_norm": 0.10890943997216135, + "learning_rate": 2.612059622246754e-06, + "loss": 1.5276, + "step": 10674 + }, + { + "epoch": 0.9685175104336781, + "grad_norm": 0.10714319009760058, + "learning_rate": 2.5970823445953252e-06, + "loss": 1.548, + "step": 10675 + }, + { + "epoch": 0.968608238069316, + "grad_norm": 0.10855736190043593, + "learning_rate": 2.5821480177591895e-06, + "loss": 1.5254, + "step": 10676 + }, + { + "epoch": 0.9686989657049537, + "grad_norm": 0.10600272067708909, + "learning_rate": 2.5672566430279266e-06, + "loss": 1.5626, + "step": 10677 + }, + { + "epoch": 0.9687896933405915, + "grad_norm": 0.10539551146180272, + "learning_rate": 2.552408221687286e-06, + "loss": 1.552, + "step": 10678 + }, + { + "epoch": 0.9688804209762294, + "grad_norm": 0.1078147393074172, + "learning_rate": 2.5376027550195745e-06, + "loss": 1.5857, + "step": 10679 + }, + { + "epoch": 0.9689711486118672, + "grad_norm": 0.10830843910534595, + "learning_rate": 2.5228402443031595e-06, + "loss": 1.5274, + "step": 10680 + }, + { + "epoch": 0.969061876247505, + "grad_norm": 0.10951848393099478, + "learning_rate": 2.508120690812854e-06, + "loss": 1.5021, + "step": 10681 + }, + { + "epoch": 0.9691526038831428, + "grad_norm": 0.10348554966374862, + "learning_rate": 2.493444095819586e-06, + "loss": 1.5371, + "step": 10682 + }, + { + "epoch": 0.9692433315187806, + "grad_norm": 0.10978822579750941, + "learning_rate": 2.4788104605907302e-06, + "loss": 1.5411, + "step": 10683 + }, + { + "epoch": 0.9693340591544184, + "grad_norm": 0.11313219218100892, + "learning_rate": 2.4642197863899986e-06, + "loss": 1.5557, + "step": 10684 + }, + { + "epoch": 0.9694247867900563, + "grad_norm": 0.106748170645471, + "learning_rate": 2.4496720744771605e-06, + "loss": 1.5581, + "step": 10685 + }, + { + "epoch": 0.9695155144256941, + "grad_norm": 0.10817915752289185, + "learning_rate": 2.4351673261084895e-06, + "loss": 1.532, + "step": 10686 + }, + { + "epoch": 0.9696062420613318, + "grad_norm": 0.1023887912769014, + "learning_rate": 2.420705542536483e-06, + "loss": 1.5712, + "step": 10687 + }, + { + "epoch": 0.9696969696969697, + "grad_norm": 0.10530785071251517, + "learning_rate": 2.4062867250098097e-06, + "loss": 1.538, + "step": 10688 + }, + { + "epoch": 0.9697876973326075, + "grad_norm": 0.10661273402375912, + "learning_rate": 2.391910874773695e-06, + "loss": 1.5102, + "step": 10689 + }, + { + "epoch": 0.9698784249682453, + "grad_norm": 0.1109913602651678, + "learning_rate": 2.3775779930693686e-06, + "loss": 1.5313, + "step": 10690 + }, + { + "epoch": 0.9699691526038832, + "grad_norm": 0.10583703423589225, + "learning_rate": 2.3632880811345624e-06, + "loss": 1.5319, + "step": 10691 + }, + { + "epoch": 0.9700598802395209, + "grad_norm": 0.10894279513128056, + "learning_rate": 2.349041140203123e-06, + "loss": 1.5078, + "step": 10692 + }, + { + "epoch": 0.9701506078751587, + "grad_norm": 0.10620797936994045, + "learning_rate": 2.334837171505344e-06, + "loss": 1.5041, + "step": 10693 + }, + { + "epoch": 0.9702413355107966, + "grad_norm": 0.10659569451720603, + "learning_rate": 2.320676176267744e-06, + "loss": 1.5391, + "step": 10694 + }, + { + "epoch": 0.9703320631464344, + "grad_norm": 0.10670268241728775, + "learning_rate": 2.306558155713068e-06, + "loss": 1.5394, + "step": 10695 + }, + { + "epoch": 0.9704227907820722, + "grad_norm": 0.10606915353962289, + "learning_rate": 2.292483111060506e-06, + "loss": 1.5232, + "step": 10696 + }, + { + "epoch": 0.97051351841771, + "grad_norm": 0.10863041340533432, + "learning_rate": 2.278451043525365e-06, + "loss": 1.5276, + "step": 10697 + }, + { + "epoch": 0.9706042460533478, + "grad_norm": 0.10733039997709795, + "learning_rate": 2.264461954319341e-06, + "loss": 1.5428, + "step": 10698 + }, + { + "epoch": 0.9706949736889857, + "grad_norm": 0.1041166831230596, + "learning_rate": 2.2505158446503025e-06, + "loss": 1.554, + "step": 10699 + }, + { + "epoch": 0.9707857013246235, + "grad_norm": 0.10478047014607593, + "learning_rate": 2.2366127157226745e-06, + "loss": 1.5092, + "step": 10700 + }, + { + "epoch": 0.9708764289602613, + "grad_norm": 0.10628821572538873, + "learning_rate": 2.2227525687368856e-06, + "loss": 1.4989, + "step": 10701 + }, + { + "epoch": 0.9709671565958992, + "grad_norm": 0.10968094597219737, + "learning_rate": 2.208935404889756e-06, + "loss": 1.5087, + "step": 10702 + }, + { + "epoch": 0.9710578842315369, + "grad_norm": 0.10747512774303812, + "learning_rate": 2.1951612253744424e-06, + "loss": 1.544, + "step": 10703 + }, + { + "epoch": 0.9711486118671747, + "grad_norm": 0.11026553705858767, + "learning_rate": 2.181430031380327e-06, + "loss": 1.5668, + "step": 10704 + }, + { + "epoch": 0.9712393395028126, + "grad_norm": 0.10746195324154138, + "learning_rate": 2.1677418240930725e-06, + "loss": 1.5401, + "step": 10705 + }, + { + "epoch": 0.9713300671384504, + "grad_norm": 0.10814945888726815, + "learning_rate": 2.1540966046946774e-06, + "loss": 1.5132, + "step": 10706 + }, + { + "epoch": 0.9714207947740882, + "grad_norm": 0.10854240156222093, + "learning_rate": 2.140494374363422e-06, + "loss": 1.5342, + "step": 10707 + }, + { + "epoch": 0.971511522409726, + "grad_norm": 0.10665973569576927, + "learning_rate": 2.126935134273866e-06, + "loss": 1.5121, + "step": 10708 + }, + { + "epoch": 0.9716022500453638, + "grad_norm": 0.10725663817508373, + "learning_rate": 2.1134188855968518e-06, + "loss": 1.5226, + "step": 10709 + }, + { + "epoch": 0.9716929776810016, + "grad_norm": 0.10957362008150838, + "learning_rate": 2.0999456294995002e-06, + "loss": 1.5325, + "step": 10710 + }, + { + "epoch": 0.9717837053166395, + "grad_norm": 0.10754851312829537, + "learning_rate": 2.0865153671452143e-06, + "loss": 1.5173, + "step": 10711 + }, + { + "epoch": 0.9718744329522773, + "grad_norm": 0.10689337401451733, + "learning_rate": 2.0731280996936773e-06, + "loss": 1.5259, + "step": 10712 + }, + { + "epoch": 0.971965160587915, + "grad_norm": 0.10933650062012061, + "learning_rate": 2.059783828300965e-06, + "loss": 1.5301, + "step": 10713 + }, + { + "epoch": 0.9720558882235529, + "grad_norm": 0.10625477525590737, + "learning_rate": 2.0464825541193222e-06, + "loss": 1.5363, + "step": 10714 + }, + { + "epoch": 0.9721466158591907, + "grad_norm": 0.11228948160354818, + "learning_rate": 2.0332242782972743e-06, + "loss": 1.5837, + "step": 10715 + }, + { + "epoch": 0.9722373434948285, + "grad_norm": 0.1091586553880559, + "learning_rate": 2.0200090019796835e-06, + "loss": 1.5515, + "step": 10716 + }, + { + "epoch": 0.9723280711304664, + "grad_norm": 0.10733953187065239, + "learning_rate": 2.0068367263076924e-06, + "loss": 1.514, + "step": 10717 + }, + { + "epoch": 0.9724187987661042, + "grad_norm": 0.1106378362701991, + "learning_rate": 1.9937074524188358e-06, + "loss": 1.542, + "step": 10718 + }, + { + "epoch": 0.9725095264017419, + "grad_norm": 0.10749853922981059, + "learning_rate": 1.9806211814466513e-06, + "loss": 1.5218, + "step": 10719 + }, + { + "epoch": 0.9726002540373798, + "grad_norm": 0.10864161240887496, + "learning_rate": 1.96757791452129e-06, + "loss": 1.5385, + "step": 10720 + }, + { + "epoch": 0.9726909816730176, + "grad_norm": 0.10683690340958663, + "learning_rate": 1.954577652768963e-06, + "loss": 1.5057, + "step": 10721 + }, + { + "epoch": 0.9727817093086554, + "grad_norm": 0.10625964582533746, + "learning_rate": 1.941620397312216e-06, + "loss": 1.5391, + "step": 10722 + }, + { + "epoch": 0.9728724369442933, + "grad_norm": 0.10399026014535427, + "learning_rate": 1.9287061492699875e-06, + "loss": 1.4937, + "step": 10723 + }, + { + "epoch": 0.972963164579931, + "grad_norm": 0.10506318490520193, + "learning_rate": 1.9158349097573858e-06, + "loss": 1.5383, + "step": 10724 + }, + { + "epoch": 0.9730538922155688, + "grad_norm": 0.10600967184397425, + "learning_rate": 1.9030066798858548e-06, + "loss": 1.5242, + "step": 10725 + }, + { + "epoch": 0.9731446198512067, + "grad_norm": 0.10891630276203303, + "learning_rate": 1.890221460763064e-06, + "loss": 1.5522, + "step": 10726 + }, + { + "epoch": 0.9732353474868445, + "grad_norm": 0.10848294419563967, + "learning_rate": 1.8774792534931306e-06, + "loss": 1.5508, + "step": 10727 + }, + { + "epoch": 0.9733260751224823, + "grad_norm": 0.11059306091583011, + "learning_rate": 1.8647800591762855e-06, + "loss": 1.5368, + "step": 10728 + }, + { + "epoch": 0.9734168027581201, + "grad_norm": 0.10537744755763326, + "learning_rate": 1.85212387890904e-06, + "loss": 1.5678, + "step": 10729 + }, + { + "epoch": 0.9735075303937579, + "grad_norm": 0.10586762192369281, + "learning_rate": 1.8395107137843537e-06, + "loss": 1.5636, + "step": 10730 + }, + { + "epoch": 0.9735982580293957, + "grad_norm": 0.10749045075061547, + "learning_rate": 1.8269405648912995e-06, + "loss": 1.5161, + "step": 10731 + }, + { + "epoch": 0.9736889856650336, + "grad_norm": 0.10313328846227339, + "learning_rate": 1.8144134333153983e-06, + "loss": 1.5304, + "step": 10732 + }, + { + "epoch": 0.9737797133006714, + "grad_norm": 0.10804983991729669, + "learning_rate": 1.8019293201383403e-06, + "loss": 1.5209, + "step": 10733 + }, + { + "epoch": 0.9738704409363091, + "grad_norm": 0.10544080274236563, + "learning_rate": 1.7894882264380408e-06, + "loss": 1.5184, + "step": 10734 + }, + { + "epoch": 0.973961168571947, + "grad_norm": 0.10733955571095712, + "learning_rate": 1.7770901532889183e-06, + "loss": 1.5687, + "step": 10735 + }, + { + "epoch": 0.9740518962075848, + "grad_norm": 0.1086288436716644, + "learning_rate": 1.76473510176145e-06, + "loss": 1.5615, + "step": 10736 + }, + { + "epoch": 0.9741426238432227, + "grad_norm": 0.10453699678505038, + "learning_rate": 1.75242307292256e-06, + "loss": 1.4838, + "step": 10737 + }, + { + "epoch": 0.9742333514788605, + "grad_norm": 0.11272052175873108, + "learning_rate": 1.7401540678353977e-06, + "loss": 1.5668, + "step": 10738 + }, + { + "epoch": 0.9743240791144983, + "grad_norm": 0.10427696891133559, + "learning_rate": 1.7279280875593383e-06, + "loss": 1.5247, + "step": 10739 + }, + { + "epoch": 0.9744148067501361, + "grad_norm": 0.10873199426503954, + "learning_rate": 1.7157451331500928e-06, + "loss": 1.5393, + "step": 10740 + }, + { + "epoch": 0.9745055343857739, + "grad_norm": 0.10333671854192548, + "learning_rate": 1.703605205659764e-06, + "loss": 1.5149, + "step": 10741 + }, + { + "epoch": 0.9745962620214117, + "grad_norm": 0.10525441269568508, + "learning_rate": 1.691508306136569e-06, + "loss": 1.5233, + "step": 10742 + }, + { + "epoch": 0.9746869896570496, + "grad_norm": 0.10698562605990956, + "learning_rate": 1.6794544356250052e-06, + "loss": 1.5416, + "step": 10743 + }, + { + "epoch": 0.9747777172926874, + "grad_norm": 0.10545827716900498, + "learning_rate": 1.6674435951660182e-06, + "loss": 1.5493, + "step": 10744 + }, + { + "epoch": 0.9748684449283251, + "grad_norm": 0.10553597825706416, + "learning_rate": 1.6554757857967783e-06, + "loss": 1.5107, + "step": 10745 + }, + { + "epoch": 0.974959172563963, + "grad_norm": 0.10845650807699726, + "learning_rate": 1.6435510085505701e-06, + "loss": 1.5391, + "step": 10746 + }, + { + "epoch": 0.9750499001996008, + "grad_norm": 0.10500202636989231, + "learning_rate": 1.631669264457236e-06, + "loss": 1.5406, + "step": 10747 + }, + { + "epoch": 0.9751406278352386, + "grad_norm": 0.10446584497004134, + "learning_rate": 1.6198305545426784e-06, + "loss": 1.5893, + "step": 10748 + }, + { + "epoch": 0.9752313554708765, + "grad_norm": 0.1077123753655288, + "learning_rate": 1.6080348798292454e-06, + "loss": 1.5431, + "step": 10749 + }, + { + "epoch": 0.9753220831065142, + "grad_norm": 0.1089406341079349, + "learning_rate": 1.596282241335456e-06, + "loss": 1.5282, + "step": 10750 + }, + { + "epoch": 0.975412810742152, + "grad_norm": 0.10519754116796512, + "learning_rate": 1.5845726400761096e-06, + "loss": 1.5228, + "step": 10751 + }, + { + "epoch": 0.9755035383777899, + "grad_norm": 0.10665268184310377, + "learning_rate": 1.5729060770624525e-06, + "loss": 1.5081, + "step": 10752 + }, + { + "epoch": 0.9755942660134277, + "grad_norm": 0.11542447018886143, + "learning_rate": 1.561282553301735e-06, + "loss": 1.5555, + "step": 10753 + }, + { + "epoch": 0.9756849936490655, + "grad_norm": 0.10386406669892555, + "learning_rate": 1.5497020697977648e-06, + "loss": 1.5675, + "step": 10754 + }, + { + "epoch": 0.9757757212847034, + "grad_norm": 0.10426334844864782, + "learning_rate": 1.5381646275504645e-06, + "loss": 1.5384, + "step": 10755 + }, + { + "epoch": 0.9758664489203411, + "grad_norm": 0.10885310967612957, + "learning_rate": 1.5266702275560928e-06, + "loss": 1.5395, + "step": 10756 + }, + { + "epoch": 0.9759571765559789, + "grad_norm": 0.10588004777316261, + "learning_rate": 1.5152188708073e-06, + "loss": 1.5338, + "step": 10757 + }, + { + "epoch": 0.9760479041916168, + "grad_norm": 0.10744405511203685, + "learning_rate": 1.5038105582927952e-06, + "loss": 1.5229, + "step": 10758 + }, + { + "epoch": 0.9761386318272546, + "grad_norm": 0.10799048685551656, + "learning_rate": 1.4924452909976793e-06, + "loss": 1.5748, + "step": 10759 + }, + { + "epoch": 0.9762293594628924, + "grad_norm": 0.10625343537124646, + "learning_rate": 1.4811230699033895e-06, + "loss": 1.5446, + "step": 10760 + }, + { + "epoch": 0.9763200870985302, + "grad_norm": 0.10747854755531293, + "learning_rate": 1.4698438959876436e-06, + "loss": 1.5277, + "step": 10761 + }, + { + "epoch": 0.976410814734168, + "grad_norm": 0.10620953376752136, + "learning_rate": 1.4586077702243294e-06, + "loss": 1.5357, + "step": 10762 + }, + { + "epoch": 0.9765015423698058, + "grad_norm": 0.10486482367719115, + "learning_rate": 1.4474146935836706e-06, + "loss": 1.4804, + "step": 10763 + }, + { + "epoch": 0.9765922700054437, + "grad_norm": 0.10348713764284524, + "learning_rate": 1.4362646670322831e-06, + "loss": 1.5994, + "step": 10764 + }, + { + "epoch": 0.9766829976410815, + "grad_norm": 0.10957128180969493, + "learning_rate": 1.4251576915328967e-06, + "loss": 1.5819, + "step": 10765 + }, + { + "epoch": 0.9767737252767192, + "grad_norm": 0.1050525220343452, + "learning_rate": 1.4140937680446331e-06, + "loss": 1.5646, + "step": 10766 + }, + { + "epoch": 0.9768644529123571, + "grad_norm": 0.1051584795289685, + "learning_rate": 1.4030728975228946e-06, + "loss": 1.4821, + "step": 10767 + }, + { + "epoch": 0.9769551805479949, + "grad_norm": 0.11335302024919358, + "learning_rate": 1.3920950809192534e-06, + "loss": 1.6009, + "step": 10768 + }, + { + "epoch": 0.9770459081836327, + "grad_norm": 0.10645960027522158, + "learning_rate": 1.3811603191816735e-06, + "loss": 1.5081, + "step": 10769 + }, + { + "epoch": 0.9771366358192706, + "grad_norm": 0.11081923701775569, + "learning_rate": 1.3702686132543996e-06, + "loss": 1.532, + "step": 10770 + }, + { + "epoch": 0.9772273634549083, + "grad_norm": 0.11017344145038176, + "learning_rate": 1.359419964077957e-06, + "loss": 1.5862, + "step": 10771 + }, + { + "epoch": 0.9773180910905461, + "grad_norm": 0.10894774341074699, + "learning_rate": 1.3486143725890965e-06, + "loss": 1.5258, + "step": 10772 + }, + { + "epoch": 0.977408818726184, + "grad_norm": 0.10363708091105873, + "learning_rate": 1.3378518397208494e-06, + "loss": 1.5358, + "step": 10773 + }, + { + "epoch": 0.9774995463618218, + "grad_norm": 0.10659517092292764, + "learning_rate": 1.3271323664025836e-06, + "loss": 1.5331, + "step": 10774 + }, + { + "epoch": 0.9775902739974597, + "grad_norm": 0.1032335075930618, + "learning_rate": 1.3164559535599474e-06, + "loss": 1.5412, + "step": 10775 + }, + { + "epoch": 0.9776810016330975, + "grad_norm": 0.10950574938970094, + "learning_rate": 1.3058226021148146e-06, + "loss": 1.5304, + "step": 10776 + }, + { + "epoch": 0.9777717292687352, + "grad_norm": 0.11145973571055508, + "learning_rate": 1.2952323129854504e-06, + "loss": 1.5313, + "step": 10777 + }, + { + "epoch": 0.9778624569043731, + "grad_norm": 0.10549067740162063, + "learning_rate": 1.2846850870862347e-06, + "loss": 1.502, + "step": 10778 + }, + { + "epoch": 0.9779531845400109, + "grad_norm": 0.10869379178658714, + "learning_rate": 1.2741809253279946e-06, + "loss": 1.4984, + "step": 10779 + }, + { + "epoch": 0.9780439121756487, + "grad_norm": 0.10919273579853575, + "learning_rate": 1.2637198286177265e-06, + "loss": 1.5398, + "step": 10780 + }, + { + "epoch": 0.9781346398112866, + "grad_norm": 0.10831366496368197, + "learning_rate": 1.2533017978587635e-06, + "loss": 1.5028, + "step": 10781 + }, + { + "epoch": 0.9782253674469243, + "grad_norm": 0.10453275795020919, + "learning_rate": 1.2429268339506639e-06, + "loss": 1.5014, + "step": 10782 + }, + { + "epoch": 0.9783160950825621, + "grad_norm": 0.10671989960539162, + "learning_rate": 1.2325949377893775e-06, + "loss": 1.5232, + "step": 10783 + }, + { + "epoch": 0.9784068227182, + "grad_norm": 0.11125319711156105, + "learning_rate": 1.222306110267024e-06, + "loss": 1.5578, + "step": 10784 + }, + { + "epoch": 0.9784975503538378, + "grad_norm": 0.10872370756915921, + "learning_rate": 1.2120603522720043e-06, + "loss": 1.5371, + "step": 10785 + }, + { + "epoch": 0.9785882779894756, + "grad_norm": 0.10622091551182021, + "learning_rate": 1.2018576646891655e-06, + "loss": 1.561, + "step": 10786 + }, + { + "epoch": 0.9786790056251135, + "grad_norm": 0.10600938024686511, + "learning_rate": 1.191698048399359e-06, + "loss": 1.5759, + "step": 10787 + }, + { + "epoch": 0.9787697332607512, + "grad_norm": 0.10875023510256707, + "learning_rate": 1.1815815042799938e-06, + "loss": 1.529, + "step": 10788 + }, + { + "epoch": 0.978860460896389, + "grad_norm": 0.10776059063004906, + "learning_rate": 1.1715080332045936e-06, + "loss": 1.5443, + "step": 10789 + }, + { + "epoch": 0.9789511885320269, + "grad_norm": 0.10415105740347562, + "learning_rate": 1.1614776360429624e-06, + "loss": 1.5053, + "step": 10790 + }, + { + "epoch": 0.9790419161676647, + "grad_norm": 0.10723279757226106, + "learning_rate": 1.151490313661241e-06, + "loss": 1.5427, + "step": 10791 + }, + { + "epoch": 0.9791326438033024, + "grad_norm": 0.11074383802067828, + "learning_rate": 1.1415460669218503e-06, + "loss": 1.5581, + "step": 10792 + }, + { + "epoch": 0.9792233714389403, + "grad_norm": 0.10749492276257801, + "learning_rate": 1.1316448966835479e-06, + "loss": 1.5228, + "step": 10793 + }, + { + "epoch": 0.9793140990745781, + "grad_norm": 0.11037437434728238, + "learning_rate": 1.1217868038011503e-06, + "loss": 1.542, + "step": 10794 + }, + { + "epoch": 0.9794048267102159, + "grad_norm": 0.10605210148606935, + "learning_rate": 1.1119717891260872e-06, + "loss": 1.5144, + "step": 10795 + }, + { + "epoch": 0.9794955543458538, + "grad_norm": 0.10543916790477277, + "learning_rate": 1.1021998535057365e-06, + "loss": 1.5408, + "step": 10796 + }, + { + "epoch": 0.9795862819814916, + "grad_norm": 0.1075387661886633, + "learning_rate": 1.0924709977839231e-06, + "loss": 1.5331, + "step": 10797 + }, + { + "epoch": 0.9796770096171293, + "grad_norm": 0.10725175618184316, + "learning_rate": 1.0827852228008085e-06, + "loss": 1.5025, + "step": 10798 + }, + { + "epoch": 0.9797677372527672, + "grad_norm": 0.10968228491551749, + "learning_rate": 1.0731425293927232e-06, + "loss": 1.5117, + "step": 10799 + }, + { + "epoch": 0.979858464888405, + "grad_norm": 0.10463563355438592, + "learning_rate": 1.0635429183922795e-06, + "loss": 1.5483, + "step": 10800 + }, + { + "epoch": 0.9799491925240428, + "grad_norm": 0.10446300666135412, + "learning_rate": 1.0539863906284808e-06, + "loss": 1.5003, + "step": 10801 + }, + { + "epoch": 0.9800399201596807, + "grad_norm": 0.10704563298902788, + "learning_rate": 1.0444729469265001e-06, + "loss": 1.5258, + "step": 10802 + }, + { + "epoch": 0.9801306477953184, + "grad_norm": 0.10653841391239102, + "learning_rate": 1.0350025881077918e-06, + "loss": 1.5089, + "step": 10803 + }, + { + "epoch": 0.9802213754309562, + "grad_norm": 0.10737617509559907, + "learning_rate": 1.025575314990146e-06, + "loss": 1.5109, + "step": 10804 + }, + { + "epoch": 0.9803121030665941, + "grad_norm": 0.10704919226913062, + "learning_rate": 1.0161911283876335e-06, + "loss": 1.52, + "step": 10805 + }, + { + "epoch": 0.9804028307022319, + "grad_norm": 0.10491209754208822, + "learning_rate": 1.0068500291105509e-06, + "loss": 1.518, + "step": 10806 + }, + { + "epoch": 0.9804935583378697, + "grad_norm": 0.10869534565154539, + "learning_rate": 9.975520179655306e-07, + "loss": 1.543, + "step": 10807 + }, + { + "epoch": 0.9805842859735076, + "grad_norm": 0.10696477197564552, + "learning_rate": 9.882970957554304e-07, + "loss": 1.5443, + "step": 10808 + }, + { + "epoch": 0.9806750136091453, + "grad_norm": 0.11055334021019797, + "learning_rate": 9.79085263279389e-07, + "loss": 1.5277, + "step": 10809 + }, + { + "epoch": 0.9807657412447831, + "grad_norm": 0.1030703532849161, + "learning_rate": 9.699165213329364e-07, + "loss": 1.5263, + "step": 10810 + }, + { + "epoch": 0.980856468880421, + "grad_norm": 0.106289533202629, + "learning_rate": 9.607908707077173e-07, + "loss": 1.4619, + "step": 10811 + }, + { + "epoch": 0.9809471965160588, + "grad_norm": 0.10895460454006932, + "learning_rate": 9.517083121917681e-07, + "loss": 1.5675, + "step": 10812 + }, + { + "epoch": 0.9810379241516967, + "grad_norm": 0.11352604243833381, + "learning_rate": 9.4266884656935e-07, + "loss": 1.5497, + "step": 10813 + }, + { + "epoch": 0.9811286517873344, + "grad_norm": 0.10481795409596655, + "learning_rate": 9.336724746210056e-07, + "loss": 1.529, + "step": 10814 + }, + { + "epoch": 0.9812193794229722, + "grad_norm": 0.10978491771561406, + "learning_rate": 9.247191971236135e-07, + "loss": 1.5413, + "step": 10815 + }, + { + "epoch": 0.9813101070586101, + "grad_norm": 0.10479042403888726, + "learning_rate": 9.158090148502774e-07, + "loss": 1.544, + "step": 10816 + }, + { + "epoch": 0.9814008346942479, + "grad_norm": 0.10691482973436614, + "learning_rate": 9.069419285703817e-07, + "loss": 1.5325, + "step": 10817 + }, + { + "epoch": 0.9814915623298857, + "grad_norm": 0.10501682088294614, + "learning_rate": 8.981179390496474e-07, + "loss": 1.5375, + "step": 10818 + }, + { + "epoch": 0.9815822899655235, + "grad_norm": 0.10892085790234646, + "learning_rate": 8.893370470499651e-07, + "loss": 1.5146, + "step": 10819 + }, + { + "epoch": 0.9816730176011613, + "grad_norm": 0.10468986394358092, + "learning_rate": 8.805992533295615e-07, + "loss": 1.5346, + "step": 10820 + }, + { + "epoch": 0.9817637452367991, + "grad_norm": 0.10681096826719587, + "learning_rate": 8.719045586429996e-07, + "loss": 1.5399, + "step": 10821 + }, + { + "epoch": 0.981854472872437, + "grad_norm": 0.10614757731310975, + "learning_rate": 8.63252963741068e-07, + "loss": 1.5351, + "step": 10822 + }, + { + "epoch": 0.9819452005080748, + "grad_norm": 0.108011417114314, + "learning_rate": 8.5464446937078e-07, + "loss": 1.5389, + "step": 10823 + }, + { + "epoch": 0.9820359281437125, + "grad_norm": 0.10546825047384562, + "learning_rate": 8.460790762754856e-07, + "loss": 1.5016, + "step": 10824 + }, + { + "epoch": 0.9821266557793504, + "grad_norm": 0.10752841067799472, + "learning_rate": 8.375567851948707e-07, + "loss": 1.5406, + "step": 10825 + }, + { + "epoch": 0.9822173834149882, + "grad_norm": 0.10767496287568631, + "learning_rate": 8.290775968647912e-07, + "loss": 1.5256, + "step": 10826 + }, + { + "epoch": 0.982308111050626, + "grad_norm": 0.10868571752511784, + "learning_rate": 8.206415120174393e-07, + "loss": 1.564, + "step": 10827 + }, + { + "epoch": 0.9823988386862639, + "grad_norm": 0.10320265264795127, + "learning_rate": 8.122485313812323e-07, + "loss": 1.5044, + "step": 10828 + }, + { + "epoch": 0.9824895663219017, + "grad_norm": 0.10695208971978695, + "learning_rate": 8.038986556809236e-07, + "loss": 1.4919, + "step": 10829 + }, + { + "epoch": 0.9825802939575394, + "grad_norm": 0.10837871543559341, + "learning_rate": 7.955918856376033e-07, + "loss": 1.535, + "step": 10830 + }, + { + "epoch": 0.9826710215931773, + "grad_norm": 0.10611859965245748, + "learning_rate": 7.873282219684197e-07, + "loss": 1.522, + "step": 10831 + }, + { + "epoch": 0.9827617492288151, + "grad_norm": 0.10661466927756691, + "learning_rate": 7.791076653870799e-07, + "loss": 1.5571, + "step": 10832 + }, + { + "epoch": 0.9828524768644529, + "grad_norm": 0.10867483457281217, + "learning_rate": 7.709302166033494e-07, + "loss": 1.528, + "step": 10833 + }, + { + "epoch": 0.9829432045000908, + "grad_norm": 0.10471113109854993, + "learning_rate": 7.627958763233855e-07, + "loss": 1.568, + "step": 10834 + }, + { + "epoch": 0.9830339321357285, + "grad_norm": 0.11268642590234257, + "learning_rate": 7.547046452495709e-07, + "loss": 1.5658, + "step": 10835 + }, + { + "epoch": 0.9831246597713663, + "grad_norm": 0.1109049256033483, + "learning_rate": 7.466565240806244e-07, + "loss": 1.5495, + "step": 10836 + }, + { + "epoch": 0.9832153874070042, + "grad_norm": 0.10886174167772701, + "learning_rate": 7.386515135114347e-07, + "loss": 1.5117, + "step": 10837 + }, + { + "epoch": 0.983306115042642, + "grad_norm": 0.10895852378212242, + "learning_rate": 7.306896142332819e-07, + "loss": 1.5674, + "step": 10838 + }, + { + "epoch": 0.9833968426782798, + "grad_norm": 0.1069418493843736, + "learning_rate": 7.227708269336164e-07, + "loss": 1.5236, + "step": 10839 + }, + { + "epoch": 0.9834875703139176, + "grad_norm": 0.10618427356237264, + "learning_rate": 7.148951522963353e-07, + "loss": 1.5556, + "step": 10840 + }, + { + "epoch": 0.9835782979495554, + "grad_norm": 0.10699898542504363, + "learning_rate": 7.070625910014506e-07, + "loss": 1.5041, + "step": 10841 + }, + { + "epoch": 0.9836690255851932, + "grad_norm": 0.11015183820662444, + "learning_rate": 6.992731437252542e-07, + "loss": 1.5257, + "step": 10842 + }, + { + "epoch": 0.9837597532208311, + "grad_norm": 0.10578659195753497, + "learning_rate": 6.915268111404305e-07, + "loss": 1.5006, + "step": 10843 + }, + { + "epoch": 0.9838504808564689, + "grad_norm": 0.10924282869395811, + "learning_rate": 6.838235939158887e-07, + "loss": 1.5247, + "step": 10844 + }, + { + "epoch": 0.9839412084921066, + "grad_norm": 0.10789680370609231, + "learning_rate": 6.761634927167081e-07, + "loss": 1.5385, + "step": 10845 + }, + { + "epoch": 0.9840319361277445, + "grad_norm": 0.1070828347024363, + "learning_rate": 6.685465082044707e-07, + "loss": 1.5604, + "step": 10846 + }, + { + "epoch": 0.9841226637633823, + "grad_norm": 0.10474394889106246, + "learning_rate": 6.609726410367611e-07, + "loss": 1.5469, + "step": 10847 + }, + { + "epoch": 0.9842133913990201, + "grad_norm": 0.10825576507737204, + "learning_rate": 6.534418918677232e-07, + "loss": 1.5295, + "step": 10848 + }, + { + "epoch": 0.984304119034658, + "grad_norm": 0.10861927413757981, + "learning_rate": 6.459542613475588e-07, + "loss": 1.5923, + "step": 10849 + }, + { + "epoch": 0.9843948466702958, + "grad_norm": 0.11335029106944314, + "learning_rate": 6.385097501228066e-07, + "loss": 1.5796, + "step": 10850 + }, + { + "epoch": 0.9844855743059336, + "grad_norm": 0.10960313458327416, + "learning_rate": 6.311083588363963e-07, + "loss": 1.5478, + "step": 10851 + }, + { + "epoch": 0.9845763019415714, + "grad_norm": 0.10540864644673252, + "learning_rate": 6.237500881273173e-07, + "loss": 1.5369, + "step": 10852 + }, + { + "epoch": 0.9846670295772092, + "grad_norm": 0.10916716264426707, + "learning_rate": 6.164349386310609e-07, + "loss": 1.537, + "step": 10853 + }, + { + "epoch": 0.9847577572128471, + "grad_norm": 0.10515233615151486, + "learning_rate": 6.091629109792329e-07, + "loss": 1.5403, + "step": 10854 + }, + { + "epoch": 0.9848484848484849, + "grad_norm": 0.10811561863297839, + "learning_rate": 6.019340057997757e-07, + "loss": 1.5564, + "step": 10855 + }, + { + "epoch": 0.9849392124841226, + "grad_norm": 0.1085548961030262, + "learning_rate": 5.947482237169677e-07, + "loss": 1.5788, + "step": 10856 + }, + { + "epoch": 0.9850299401197605, + "grad_norm": 0.10557461011341099, + "learning_rate": 5.876055653512013e-07, + "loss": 1.5262, + "step": 10857 + }, + { + "epoch": 0.9851206677553983, + "grad_norm": 0.10774370393597814, + "learning_rate": 5.805060313193167e-07, + "loss": 1.5603, + "step": 10858 + }, + { + "epoch": 0.9852113953910361, + "grad_norm": 0.10692870236375021, + "learning_rate": 5.734496222343788e-07, + "loss": 1.5146, + "step": 10859 + }, + { + "epoch": 0.985302123026674, + "grad_norm": 0.10755347627753012, + "learning_rate": 5.664363387056226e-07, + "loss": 1.5316, + "step": 10860 + }, + { + "epoch": 0.9853928506623117, + "grad_norm": 0.10920998662905469, + "learning_rate": 5.594661813387303e-07, + "loss": 1.5161, + "step": 10861 + }, + { + "epoch": 0.9854835782979495, + "grad_norm": 0.10373639879275545, + "learning_rate": 5.525391507355537e-07, + "loss": 1.5175, + "step": 10862 + }, + { + "epoch": 0.9855743059335874, + "grad_norm": 0.1052587900423189, + "learning_rate": 5.4565524749417e-07, + "loss": 1.5119, + "step": 10863 + }, + { + "epoch": 0.9856650335692252, + "grad_norm": 0.10603204263535024, + "learning_rate": 5.388144722091592e-07, + "loss": 1.5189, + "step": 10864 + }, + { + "epoch": 0.985755761204863, + "grad_norm": 0.11120917415646027, + "learning_rate": 5.320168254710489e-07, + "loss": 1.5219, + "step": 10865 + }, + { + "epoch": 0.9858464888405009, + "grad_norm": 0.10809196032884832, + "learning_rate": 5.25262307866925e-07, + "loss": 1.5873, + "step": 10866 + }, + { + "epoch": 0.9859372164761386, + "grad_norm": 0.10533885176898876, + "learning_rate": 5.185509199800431e-07, + "loss": 1.5574, + "step": 10867 + }, + { + "epoch": 0.9860279441117764, + "grad_norm": 0.10641632289148552, + "learning_rate": 5.118826623898842e-07, + "loss": 1.5132, + "step": 10868 + }, + { + "epoch": 0.9861186717474143, + "grad_norm": 0.10662807134183468, + "learning_rate": 5.052575356722656e-07, + "loss": 1.5279, + "step": 10869 + }, + { + "epoch": 0.9862093993830521, + "grad_norm": 0.10595254005034621, + "learning_rate": 4.986755403992849e-07, + "loss": 1.5633, + "step": 10870 + }, + { + "epoch": 0.9863001270186899, + "grad_norm": 0.10710683423532015, + "learning_rate": 4.92136677139321e-07, + "loss": 1.5863, + "step": 10871 + }, + { + "epoch": 0.9863908546543277, + "grad_norm": 0.1052998685312957, + "learning_rate": 4.856409464569222e-07, + "loss": 1.5326, + "step": 10872 + }, + { + "epoch": 0.9864815822899655, + "grad_norm": 0.1060021569116092, + "learning_rate": 4.791883489130843e-07, + "loss": 1.5038, + "step": 10873 + }, + { + "epoch": 0.9865723099256033, + "grad_norm": 0.1071386391022503, + "learning_rate": 4.727788850649173e-07, + "loss": 1.5078, + "step": 10874 + }, + { + "epoch": 0.9866630375612412, + "grad_norm": 0.10952975656599419, + "learning_rate": 4.6641255546597814e-07, + "loss": 1.5831, + "step": 10875 + }, + { + "epoch": 0.986753765196879, + "grad_norm": 0.11000333541475846, + "learning_rate": 4.6008936066588293e-07, + "loss": 1.4976, + "step": 10876 + }, + { + "epoch": 0.9868444928325167, + "grad_norm": 0.10662189050291475, + "learning_rate": 4.5380930121075023e-07, + "loss": 1.5872, + "step": 10877 + }, + { + "epoch": 0.9869352204681546, + "grad_norm": 0.10485216547198811, + "learning_rate": 4.475723776427576e-07, + "loss": 1.5478, + "step": 10878 + }, + { + "epoch": 0.9870259481037924, + "grad_norm": 0.10585107683948328, + "learning_rate": 4.4137859050052963e-07, + "loss": 1.5185, + "step": 10879 + }, + { + "epoch": 0.9871166757394302, + "grad_norm": 0.10901915704362074, + "learning_rate": 4.352279403188608e-07, + "loss": 1.5261, + "step": 10880 + }, + { + "epoch": 0.9872074033750681, + "grad_norm": 0.10995125052414806, + "learning_rate": 4.2912042762893735e-07, + "loss": 1.5585, + "step": 10881 + }, + { + "epoch": 0.9872981310107058, + "grad_norm": 0.11088991077484606, + "learning_rate": 4.230560529580596e-07, + "loss": 1.546, + "step": 10882 + }, + { + "epoch": 0.9873888586463436, + "grad_norm": 0.10679759868904867, + "learning_rate": 4.1703481682997537e-07, + "loss": 1.5669, + "step": 10883 + }, + { + "epoch": 0.9874795862819815, + "grad_norm": 0.10901731510606394, + "learning_rate": 4.11056719764491e-07, + "loss": 1.5331, + "step": 10884 + }, + { + "epoch": 0.9875703139176193, + "grad_norm": 0.10464122297577007, + "learning_rate": 4.051217622779713e-07, + "loss": 1.5298, + "step": 10885 + }, + { + "epoch": 0.9876610415532571, + "grad_norm": 0.10781893979024518, + "learning_rate": 3.992299448827286e-07, + "loss": 1.5311, + "step": 10886 + }, + { + "epoch": 0.987751769188895, + "grad_norm": 0.10603831654336514, + "learning_rate": 3.933812680876891e-07, + "loss": 1.4965, + "step": 10887 + }, + { + "epoch": 0.9878424968245327, + "grad_norm": 0.11135636617215473, + "learning_rate": 3.8757573239778246e-07, + "loss": 1.5461, + "step": 10888 + }, + { + "epoch": 0.9879332244601706, + "grad_norm": 0.10937046257363407, + "learning_rate": 3.818133383143296e-07, + "loss": 1.5018, + "step": 10889 + }, + { + "epoch": 0.9880239520958084, + "grad_norm": 0.10565451118206182, + "learning_rate": 3.760940863349882e-07, + "loss": 1.5097, + "step": 10890 + }, + { + "epoch": 0.9881146797314462, + "grad_norm": 0.11014284516007665, + "learning_rate": 3.7041797695352984e-07, + "loss": 1.5386, + "step": 10891 + }, + { + "epoch": 0.9882054073670841, + "grad_norm": 0.11162344818542698, + "learning_rate": 3.647850106600625e-07, + "loss": 1.5238, + "step": 10892 + }, + { + "epoch": 0.9882961350027218, + "grad_norm": 0.10749127320484442, + "learning_rate": 3.591951879411415e-07, + "loss": 1.5114, + "step": 10893 + }, + { + "epoch": 0.9883868626383596, + "grad_norm": 0.10696896063784474, + "learning_rate": 3.536485092792696e-07, + "loss": 1.5421, + "step": 10894 + }, + { + "epoch": 0.9884775902739975, + "grad_norm": 0.10717167441128823, + "learning_rate": 3.4814497515356367e-07, + "loss": 1.5291, + "step": 10895 + }, + { + "epoch": 0.9885683179096353, + "grad_norm": 0.10546916903341645, + "learning_rate": 3.4268458603908814e-07, + "loss": 1.5403, + "step": 10896 + }, + { + "epoch": 0.9886590455452731, + "grad_norm": 0.10644738022367375, + "learning_rate": 3.372673424075212e-07, + "loss": 1.5438, + "step": 10897 + }, + { + "epoch": 0.988749773180911, + "grad_norm": 0.10398502605680454, + "learning_rate": 3.3189324472654437e-07, + "loss": 1.5057, + "step": 10898 + }, + { + "epoch": 0.9888405008165487, + "grad_norm": 0.1081507248572752, + "learning_rate": 3.265622934602308e-07, + "loss": 1.5022, + "step": 10899 + }, + { + "epoch": 0.9889312284521865, + "grad_norm": 0.10690217782515155, + "learning_rate": 3.21274489068879e-07, + "loss": 1.5328, + "step": 10900 + }, + { + "epoch": 0.9890219560878244, + "grad_norm": 0.10814690311223492, + "learning_rate": 3.160298320091792e-07, + "loss": 1.5215, + "step": 10901 + }, + { + "epoch": 0.9891126837234622, + "grad_norm": 0.107369790090747, + "learning_rate": 3.108283227338804e-07, + "loss": 1.4827, + "step": 10902 + }, + { + "epoch": 0.9892034113591, + "grad_norm": 0.11417988618398295, + "learning_rate": 3.0566996169223426e-07, + "loss": 1.5414, + "step": 10903 + }, + { + "epoch": 0.9892941389947378, + "grad_norm": 0.10606934582734039, + "learning_rate": 3.0055474932960683e-07, + "loss": 1.553, + "step": 10904 + }, + { + "epoch": 0.9893848666303756, + "grad_norm": 0.10443849578791245, + "learning_rate": 2.9548268608775574e-07, + "loss": 1.5247, + "step": 10905 + }, + { + "epoch": 0.9894755942660134, + "grad_norm": 0.10785929558266309, + "learning_rate": 2.904537724046641e-07, + "loss": 1.5556, + "step": 10906 + }, + { + "epoch": 0.9895663219016513, + "grad_norm": 0.11290903250278447, + "learning_rate": 2.854680087144845e-07, + "loss": 1.5347, + "step": 10907 + }, + { + "epoch": 0.9896570495372891, + "grad_norm": 0.10338825178676805, + "learning_rate": 2.8052539544781706e-07, + "loss": 1.5159, + "step": 10908 + }, + { + "epoch": 0.9897477771729268, + "grad_norm": 0.10982061509567378, + "learning_rate": 2.7562593303137596e-07, + "loss": 1.5213, + "step": 10909 + }, + { + "epoch": 0.9898385048085647, + "grad_norm": 0.10274491103779286, + "learning_rate": 2.7076962188832266e-07, + "loss": 1.5327, + "step": 10910 + }, + { + "epoch": 0.9899292324442025, + "grad_norm": 0.10939369416143568, + "learning_rate": 2.659564624379884e-07, + "loss": 1.55, + "step": 10911 + }, + { + "epoch": 0.9900199600798403, + "grad_norm": 0.10829351648368932, + "learning_rate": 2.6118645509592974e-07, + "loss": 1.5385, + "step": 10912 + }, + { + "epoch": 0.9901106877154782, + "grad_norm": 0.1077966868741053, + "learning_rate": 2.5645960027409487e-07, + "loss": 1.5058, + "step": 10913 + }, + { + "epoch": 0.9902014153511159, + "grad_norm": 0.10618520566391887, + "learning_rate": 2.517758983806573e-07, + "loss": 1.5223, + "step": 10914 + }, + { + "epoch": 0.9902921429867537, + "grad_norm": 0.1060539070485695, + "learning_rate": 2.4713534981996023e-07, + "loss": 1.5459, + "step": 10915 + }, + { + "epoch": 0.9903828706223916, + "grad_norm": 0.1030018286075463, + "learning_rate": 2.425379549928497e-07, + "loss": 1.5455, + "step": 10916 + }, + { + "epoch": 0.9904735982580294, + "grad_norm": 0.10695297585246481, + "learning_rate": 2.3798371429623044e-07, + "loss": 1.5463, + "step": 10917 + }, + { + "epoch": 0.9905643258936672, + "grad_norm": 0.10736859721803318, + "learning_rate": 2.3347262812334347e-07, + "loss": 1.4996, + "step": 10918 + }, + { + "epoch": 0.990655053529305, + "grad_norm": 0.10824170770782002, + "learning_rate": 2.2900469686376602e-07, + "loss": 1.5159, + "step": 10919 + }, + { + "epoch": 0.9907457811649428, + "grad_norm": 0.10815977032589122, + "learning_rate": 2.245799209033006e-07, + "loss": 1.5222, + "step": 10920 + }, + { + "epoch": 0.9908365088005806, + "grad_norm": 0.1042451507236852, + "learning_rate": 2.2019830062397495e-07, + "loss": 1.5503, + "step": 10921 + }, + { + "epoch": 0.9909272364362185, + "grad_norm": 0.107296677595319, + "learning_rate": 2.1585983640420859e-07, + "loss": 1.5319, + "step": 10922 + }, + { + "epoch": 0.9910179640718563, + "grad_norm": 0.11027190815413564, + "learning_rate": 2.1156452861864627e-07, + "loss": 1.5717, + "step": 10923 + }, + { + "epoch": 0.991108691707494, + "grad_norm": 0.10891817831986318, + "learning_rate": 2.0731237763810252e-07, + "loss": 1.5188, + "step": 10924 + }, + { + "epoch": 0.9911994193431319, + "grad_norm": 0.112402588837065, + "learning_rate": 2.0310338382983905e-07, + "loss": 1.5178, + "step": 10925 + }, + { + "epoch": 0.9912901469787697, + "grad_norm": 0.10739777444231387, + "learning_rate": 1.9893754755723193e-07, + "loss": 1.5037, + "step": 10926 + }, + { + "epoch": 0.9913808746144076, + "grad_norm": 0.10566510955295033, + "learning_rate": 1.9481486918004886e-07, + "loss": 1.5403, + "step": 10927 + }, + { + "epoch": 0.9914716022500454, + "grad_norm": 0.10974282946821483, + "learning_rate": 1.907353490542274e-07, + "loss": 1.5184, + "step": 10928 + }, + { + "epoch": 0.9915623298856832, + "grad_norm": 0.10580198186880518, + "learning_rate": 1.866989875320968e-07, + "loss": 1.5164, + "step": 10929 + }, + { + "epoch": 0.991653057521321, + "grad_norm": 0.10916885608516505, + "learning_rate": 1.8270578496215606e-07, + "loss": 1.5187, + "step": 10930 + }, + { + "epoch": 0.9917437851569588, + "grad_norm": 0.10938721185522156, + "learning_rate": 1.7875574168929599e-07, + "loss": 1.5255, + "step": 10931 + }, + { + "epoch": 0.9918345127925966, + "grad_norm": 0.10782462661785978, + "learning_rate": 1.7484885805446604e-07, + "loss": 1.507, + "step": 10932 + }, + { + "epoch": 0.9919252404282345, + "grad_norm": 0.10939223924139001, + "learning_rate": 1.7098513439517404e-07, + "loss": 1.5524, + "step": 10933 + }, + { + "epoch": 0.9920159680638723, + "grad_norm": 0.10768724370261828, + "learning_rate": 1.67164571044931e-07, + "loss": 1.4736, + "step": 10934 + }, + { + "epoch": 0.99210669569951, + "grad_norm": 0.10631950070710074, + "learning_rate": 1.6338716833369515e-07, + "loss": 1.5347, + "step": 10935 + }, + { + "epoch": 0.9921974233351479, + "grad_norm": 0.10999289824875214, + "learning_rate": 1.5965292658765007e-07, + "loss": 1.538, + "step": 10936 + }, + { + "epoch": 0.9922881509707857, + "grad_norm": 0.1082345064551086, + "learning_rate": 1.5596184612926e-07, + "loss": 1.5792, + "step": 10937 + }, + { + "epoch": 0.9923788786064235, + "grad_norm": 0.1095881697630519, + "learning_rate": 1.5231392727727e-07, + "loss": 1.5142, + "step": 10938 + }, + { + "epoch": 0.9924696062420614, + "grad_norm": 0.10354791727421656, + "learning_rate": 1.487091703465948e-07, + "loss": 1.5267, + "step": 10939 + }, + { + "epoch": 0.9925603338776992, + "grad_norm": 0.10550166088578691, + "learning_rate": 1.4514757564854098e-07, + "loss": 1.5059, + "step": 10940 + }, + { + "epoch": 0.9926510615133369, + "grad_norm": 0.11004056752471034, + "learning_rate": 1.416291434906958e-07, + "loss": 1.5594, + "step": 10941 + }, + { + "epoch": 0.9927417891489748, + "grad_norm": 0.10516967500945484, + "learning_rate": 1.3815387417681625e-07, + "loss": 1.4954, + "step": 10942 + }, + { + "epoch": 0.9928325167846126, + "grad_norm": 0.11007984098302437, + "learning_rate": 1.3472176800705116e-07, + "loss": 1.5094, + "step": 10943 + }, + { + "epoch": 0.9929232444202504, + "grad_norm": 0.10507749346929442, + "learning_rate": 1.3133282527771905e-07, + "loss": 1.5102, + "step": 10944 + }, + { + "epoch": 0.9930139720558883, + "grad_norm": 0.10732999656499384, + "learning_rate": 1.2798704628147474e-07, + "loss": 1.5521, + "step": 10945 + }, + { + "epoch": 0.993104699691526, + "grad_norm": 0.10378571206896792, + "learning_rate": 1.2468443130725372e-07, + "loss": 1.5138, + "step": 10946 + }, + { + "epoch": 0.9931954273271638, + "grad_norm": 0.10885852812540842, + "learning_rate": 1.2142498064016127e-07, + "loss": 1.524, + "step": 10947 + }, + { + "epoch": 0.9932861549628017, + "grad_norm": 0.10719605173264596, + "learning_rate": 1.1820869456169448e-07, + "loss": 1.5499, + "step": 10948 + }, + { + "epoch": 0.9933768825984395, + "grad_norm": 0.10930001192660761, + "learning_rate": 1.1503557334963111e-07, + "loss": 1.5324, + "step": 10949 + }, + { + "epoch": 0.9934676102340773, + "grad_norm": 0.10675957690616208, + "learning_rate": 1.1190561727786319e-07, + "loss": 1.5057, + "step": 10950 + }, + { + "epoch": 0.9935583378697151, + "grad_norm": 0.1065617105540684, + "learning_rate": 1.0881882661673004e-07, + "loss": 1.497, + "step": 10951 + }, + { + "epoch": 0.9936490655053529, + "grad_norm": 0.10786649681366288, + "learning_rate": 1.0577520163279619e-07, + "loss": 1.5689, + "step": 10952 + }, + { + "epoch": 0.9937397931409907, + "grad_norm": 0.10988877880646876, + "learning_rate": 1.0277474258885144e-07, + "loss": 1.5249, + "step": 10953 + }, + { + "epoch": 0.9938305207766286, + "grad_norm": 0.1100770920162175, + "learning_rate": 9.981744974396634e-08, + "loss": 1.5617, + "step": 10954 + }, + { + "epoch": 0.9939212484122664, + "grad_norm": 0.10543795129768783, + "learning_rate": 9.690332335354767e-08, + "loss": 1.5261, + "step": 10955 + }, + { + "epoch": 0.9940119760479041, + "grad_norm": 0.10775142761267478, + "learning_rate": 9.4032363669172e-08, + "loss": 1.543, + "step": 10956 + }, + { + "epoch": 0.994102703683542, + "grad_norm": 0.10547046855914899, + "learning_rate": 9.120457093875211e-08, + "loss": 1.5466, + "step": 10957 + }, + { + "epoch": 0.9941934313191798, + "grad_norm": 0.10400625009538711, + "learning_rate": 8.841994540659259e-08, + "loss": 1.5316, + "step": 10958 + }, + { + "epoch": 0.9942841589548176, + "grad_norm": 0.10945534933573217, + "learning_rate": 8.56784873129457e-08, + "loss": 1.5443, + "step": 10959 + }, + { + "epoch": 0.9943748865904555, + "grad_norm": 0.10700571495523285, + "learning_rate": 8.298019689473301e-08, + "loss": 1.5578, + "step": 10960 + }, + { + "epoch": 0.9944656142260933, + "grad_norm": 0.1051649082457291, + "learning_rate": 8.032507438482384e-08, + "loss": 1.5137, + "step": 10961 + }, + { + "epoch": 0.994556341861731, + "grad_norm": 0.10575585881418419, + "learning_rate": 7.77131200124792e-08, + "loss": 1.522, + "step": 10962 + }, + { + "epoch": 0.9946470694973689, + "grad_norm": 0.10805589186321, + "learning_rate": 7.514433400335197e-08, + "loss": 1.5482, + "step": 10963 + }, + { + "epoch": 0.9947377971330067, + "grad_norm": 0.1076426338940676, + "learning_rate": 7.261871657915364e-08, + "loss": 1.513, + "step": 10964 + }, + { + "epoch": 0.9948285247686446, + "grad_norm": 0.10744702738720105, + "learning_rate": 7.013626795804306e-08, + "loss": 1.537, + "step": 10965 + }, + { + "epoch": 0.9949192524042824, + "grad_norm": 0.1053256360454041, + "learning_rate": 6.769698835429327e-08, + "loss": 1.4951, + "step": 10966 + }, + { + "epoch": 0.9950099800399201, + "grad_norm": 0.10582274445096973, + "learning_rate": 6.530087797862461e-08, + "loss": 1.5225, + "step": 10967 + }, + { + "epoch": 0.995100707675558, + "grad_norm": 0.10661027098816771, + "learning_rate": 6.294793703792712e-08, + "loss": 1.5524, + "step": 10968 + }, + { + "epoch": 0.9951914353111958, + "grad_norm": 0.10507223797271507, + "learning_rate": 6.063816573537162e-08, + "loss": 1.4946, + "step": 10969 + }, + { + "epoch": 0.9952821629468336, + "grad_norm": 0.10848477828715411, + "learning_rate": 5.837156427035417e-08, + "loss": 1.5117, + "step": 10970 + }, + { + "epoch": 0.9953728905824715, + "grad_norm": 0.10625360975044851, + "learning_rate": 5.6148132838662604e-08, + "loss": 1.5129, + "step": 10971 + }, + { + "epoch": 0.9954636182181092, + "grad_norm": 0.10809568564672016, + "learning_rate": 5.3967871632254475e-08, + "loss": 1.5853, + "step": 10972 + }, + { + "epoch": 0.995554345853747, + "grad_norm": 0.10522182722517907, + "learning_rate": 5.18307808393681e-08, + "loss": 1.5287, + "step": 10973 + }, + { + "epoch": 0.9956450734893849, + "grad_norm": 0.1078655639798502, + "learning_rate": 4.973686064463356e-08, + "loss": 1.5641, + "step": 10974 + }, + { + "epoch": 0.9957358011250227, + "grad_norm": 0.10921271921123821, + "learning_rate": 4.768611122873967e-08, + "loss": 1.519, + "step": 10975 + }, + { + "epoch": 0.9958265287606605, + "grad_norm": 0.11018930503285072, + "learning_rate": 4.567853276887801e-08, + "loss": 1.4921, + "step": 10976 + }, + { + "epoch": 0.9959172563962984, + "grad_norm": 0.10874485045298618, + "learning_rate": 4.3714125438409914e-08, + "loss": 1.5476, + "step": 10977 + }, + { + "epoch": 0.9960079840319361, + "grad_norm": 0.10603490326435407, + "learning_rate": 4.1792889406810916e-08, + "loss": 1.4813, + "step": 10978 + }, + { + "epoch": 0.9960987116675739, + "grad_norm": 0.10939602607476896, + "learning_rate": 3.991482484017039e-08, + "loss": 1.5465, + "step": 10979 + }, + { + "epoch": 0.9961894393032118, + "grad_norm": 0.10455033438910825, + "learning_rate": 3.807993190052539e-08, + "loss": 1.5153, + "step": 10980 + }, + { + "epoch": 0.9962801669388496, + "grad_norm": 0.11493557809770617, + "learning_rate": 3.628821074636024e-08, + "loss": 1.545, + "step": 10981 + }, + { + "epoch": 0.9963708945744874, + "grad_norm": 0.10466906700528275, + "learning_rate": 3.453966153244004e-08, + "loss": 1.5458, + "step": 10982 + }, + { + "epoch": 0.9964616222101252, + "grad_norm": 0.10497933729717861, + "learning_rate": 3.283428440964409e-08, + "loss": 1.4993, + "step": 10983 + }, + { + "epoch": 0.996552349845763, + "grad_norm": 0.11050961458487575, + "learning_rate": 3.11720795253545e-08, + "loss": 1.5101, + "step": 10984 + }, + { + "epoch": 0.9966430774814008, + "grad_norm": 0.11064173110090912, + "learning_rate": 2.955304702301209e-08, + "loss": 1.5627, + "step": 10985 + }, + { + "epoch": 0.9967338051170387, + "grad_norm": 0.10996748498896061, + "learning_rate": 2.7977187042449446e-08, + "loss": 1.5116, + "step": 10986 + }, + { + "epoch": 0.9968245327526765, + "grad_norm": 0.10649688042403611, + "learning_rate": 2.6444499719779914e-08, + "loss": 1.5517, + "step": 10987 + }, + { + "epoch": 0.9969152603883142, + "grad_norm": 0.10974905499971169, + "learning_rate": 2.495498518728656e-08, + "loss": 1.5733, + "step": 10988 + }, + { + "epoch": 0.9970059880239521, + "grad_norm": 0.11234720536016339, + "learning_rate": 2.350864357358873e-08, + "loss": 1.5149, + "step": 10989 + }, + { + "epoch": 0.9970967156595899, + "grad_norm": 0.10529101004990121, + "learning_rate": 2.2105475003642018e-08, + "loss": 1.5592, + "step": 10990 + }, + { + "epoch": 0.9971874432952277, + "grad_norm": 0.1054989771705613, + "learning_rate": 2.074547959862727e-08, + "loss": 1.5398, + "step": 10991 + }, + { + "epoch": 0.9972781709308656, + "grad_norm": 0.10608243828289261, + "learning_rate": 1.942865747583955e-08, + "loss": 1.4871, + "step": 10992 + }, + { + "epoch": 0.9973688985665033, + "grad_norm": 0.10739188700005974, + "learning_rate": 1.815500874913223e-08, + "loss": 1.4937, + "step": 10993 + }, + { + "epoch": 0.9974596262021411, + "grad_norm": 0.10881722983099748, + "learning_rate": 1.6924533528417386e-08, + "loss": 1.5264, + "step": 10994 + }, + { + "epoch": 0.997550353837779, + "grad_norm": 0.10898173307181508, + "learning_rate": 1.573723191994336e-08, + "loss": 1.5096, + "step": 10995 + }, + { + "epoch": 0.9976410814734168, + "grad_norm": 0.1063837294669092, + "learning_rate": 1.4593104026294768e-08, + "loss": 1.5625, + "step": 10996 + }, + { + "epoch": 0.9977318091090546, + "grad_norm": 0.1072438747462128, + "learning_rate": 1.3492149946170428e-08, + "loss": 1.5734, + "step": 10997 + }, + { + "epoch": 0.9978225367446925, + "grad_norm": 0.10548761031263292, + "learning_rate": 1.2434369774716458e-08, + "loss": 1.4927, + "step": 10998 + }, + { + "epoch": 0.9979132643803302, + "grad_norm": 0.10671141495488551, + "learning_rate": 1.14197636032487e-08, + "loss": 1.5628, + "step": 10999 + }, + { + "epoch": 0.998003992015968, + "grad_norm": 0.1061769615248565, + "learning_rate": 1.0448331519363752e-08, + "loss": 1.5806, + "step": 11000 + }, + { + "epoch": 0.9980947196516059, + "grad_norm": 0.10838800718727203, + "learning_rate": 9.520073606938962e-09, + "loss": 1.5282, + "step": 11001 + }, + { + "epoch": 0.9981854472872437, + "grad_norm": 0.1099612363729963, + "learning_rate": 8.634989946132433e-09, + "loss": 1.5737, + "step": 11002 + }, + { + "epoch": 0.9982761749228816, + "grad_norm": 0.10954206291125039, + "learning_rate": 7.79308061343853e-09, + "loss": 1.5503, + "step": 11003 + }, + { + "epoch": 0.9983669025585193, + "grad_norm": 0.11031360809364686, + "learning_rate": 6.994345681465841e-09, + "loss": 1.5538, + "step": 11004 + }, + { + "epoch": 0.9984576301941571, + "grad_norm": 0.10496185034411781, + "learning_rate": 6.238785219270238e-09, + "loss": 1.5176, + "step": 11005 + }, + { + "epoch": 0.998548357829795, + "grad_norm": 0.1117645718411626, + "learning_rate": 5.526399292021811e-09, + "loss": 1.5068, + "step": 11006 + }, + { + "epoch": 0.9986390854654328, + "grad_norm": 0.10977993082059045, + "learning_rate": 4.857187961226917e-09, + "loss": 1.5219, + "step": 11007 + }, + { + "epoch": 0.9987298131010706, + "grad_norm": 0.10701214351886533, + "learning_rate": 4.231151284728174e-09, + "loss": 1.5548, + "step": 11008 + }, + { + "epoch": 0.9988205407367085, + "grad_norm": 0.1094651035713291, + "learning_rate": 3.648289316593445e-09, + "loss": 1.5121, + "step": 11009 + }, + { + "epoch": 0.9989112683723462, + "grad_norm": 0.10666299952831598, + "learning_rate": 3.108602107060321e-09, + "loss": 1.4739, + "step": 11010 + }, + { + "epoch": 0.999001996007984, + "grad_norm": 0.10866262454393928, + "learning_rate": 2.6120897028691915e-09, + "loss": 1.537, + "step": 11011 + }, + { + "epoch": 0.9990927236436219, + "grad_norm": 0.10856857255579785, + "learning_rate": 2.1587521467636427e-09, + "loss": 1.5575, + "step": 11012 + }, + { + "epoch": 0.9991834512792597, + "grad_norm": 0.10649343801981355, + "learning_rate": 1.748589477934548e-09, + "loss": 1.5166, + "step": 11013 + }, + { + "epoch": 0.9992741789148974, + "grad_norm": 0.10412661196122928, + "learning_rate": 1.3816017317980211e-09, + "loss": 1.5453, + "step": 11014 + }, + { + "epoch": 0.9993649065505353, + "grad_norm": 0.10667796374505434, + "learning_rate": 1.0577889401064411e-09, + "loss": 1.524, + "step": 11015 + }, + { + "epoch": 0.9994556341861731, + "grad_norm": 0.10807272919495248, + "learning_rate": 7.771511307264056e-10, + "loss": 1.5507, + "step": 11016 + }, + { + "epoch": 0.9995463618218109, + "grad_norm": 0.10561990428832477, + "learning_rate": 5.39688327971799e-10, + "loss": 1.4973, + "step": 11017 + }, + { + "epoch": 0.9996370894574488, + "grad_norm": 0.10836214524710186, + "learning_rate": 3.4540055227072487e-10, + "loss": 1.5042, + "step": 11018 + }, + { + "epoch": 0.9997278170930866, + "grad_norm": 0.1117194823036587, + "learning_rate": 1.9428782044306204e-10, + "loss": 1.4679, + "step": 11019 + }, + { + "epoch": 0.9998185447287243, + "grad_norm": 0.10932535147792898, + "learning_rate": 8.635014553393105e-11, + "loss": 1.5059, + "step": 11020 + }, + { + "epoch": 0.9999092723643622, + "grad_norm": 0.10368998806928163, + "learning_rate": 2.158753686920534e-11, + "loss": 1.5308, + "step": 11021 + }, + { + "epoch": 1.0, + "grad_norm": 0.10665142780779378, + "learning_rate": 0.0, + "loss": 1.5038, + "step": 11022 + }, + { + "epoch": 1.0, + "step": 11022, + "total_flos": 3.1027688657649664e+16, + "train_loss": 1.6355401830109944, + "train_runtime": 139867.2018, + "train_samples_per_second": 80.693, + "train_steps_per_second": 0.079 + } + ], + "logging_steps": 1.0, + "max_steps": 11022, + "num_input_tokens_seen": 0, + "num_train_epochs": 1, + "save_steps": 500, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": false, + "should_training_stop": false + }, + "attributes": {} + } + }, + "total_flos": 3.1027688657649664e+16, + "train_batch_size": 4, + "trial_name": null, + "trial_params": null +}