diff --git "a/trainer_state.json" "b/trainer_state.json" new file mode 100644--- /dev/null +++ "b/trainer_state.json" @@ -0,0 +1,100856 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 1.0, + "eval_steps": 500, + "global_step": 14402, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 6.943480072212192e-05, + "grad_norm": 25.743069053425643, + "learning_rate": 2.3094688221709007e-08, + "loss": 3.1826, + "step": 1 + }, + { + "epoch": 0.00013886960144424384, + "grad_norm": 24.166175221666883, + "learning_rate": 4.6189376443418014e-08, + "loss": 3.1349, + "step": 2 + }, + { + "epoch": 0.00020830440216636578, + "grad_norm": 22.440605356482912, + "learning_rate": 6.928406466512703e-08, + "loss": 2.5422, + "step": 3 + }, + { + "epoch": 0.0002777392028884877, + "grad_norm": 30.98926697054666, + "learning_rate": 9.237875288683603e-08, + "loss": 3.2239, + "step": 4 + }, + { + "epoch": 0.00034717400361060965, + "grad_norm": 29.170489475620762, + "learning_rate": 1.1547344110854505e-07, + "loss": 3.3828, + "step": 5 + }, + { + "epoch": 0.00041660880433273156, + "grad_norm": 36.73531608789288, + "learning_rate": 1.3856812933025406e-07, + "loss": 3.4907, + "step": 6 + }, + { + "epoch": 0.0004860436050548535, + "grad_norm": 32.23771588791372, + "learning_rate": 1.6166281755196307e-07, + "loss": 3.6359, + "step": 7 + }, + { + "epoch": 0.0005554784057769754, + "grad_norm": 32.510747196956984, + "learning_rate": 1.8475750577367206e-07, + "loss": 3.5005, + "step": 8 + }, + { + "epoch": 0.0006249132064990974, + "grad_norm": 34.70408112594647, + "learning_rate": 2.0785219399538107e-07, + "loss": 3.8527, + "step": 9 + }, + { + "epoch": 0.0006943480072212193, + "grad_norm": 28.577339992701063, + "learning_rate": 2.309468822170901e-07, + "loss": 3.6636, + "step": 10 + }, + { + "epoch": 0.0007637828079433412, + "grad_norm": 26.801568183338322, + "learning_rate": 2.540415704387991e-07, + "loss": 2.9264, + "step": 11 + }, + { + "epoch": 0.0008332176086654631, + "grad_norm": 26.62357260357466, + "learning_rate": 2.771362586605081e-07, + "loss": 3.1676, + "step": 12 + }, + { + "epoch": 0.000902652409387585, + "grad_norm": 32.93000361263025, + "learning_rate": 3.0023094688221713e-07, + "loss": 3.3359, + "step": 13 + }, + { + "epoch": 0.000972087210109707, + "grad_norm": 35.826058883271884, + "learning_rate": 3.2332563510392614e-07, + "loss": 3.7214, + "step": 14 + }, + { + "epoch": 0.001041522010831829, + "grad_norm": 23.840167416609845, + "learning_rate": 3.464203233256351e-07, + "loss": 2.682, + "step": 15 + }, + { + "epoch": 0.0011109568115539507, + "grad_norm": 29.234444647064542, + "learning_rate": 3.695150115473441e-07, + "loss": 3.3909, + "step": 16 + }, + { + "epoch": 0.0011803916122760728, + "grad_norm": 28.281741162159857, + "learning_rate": 3.926096997690532e-07, + "loss": 3.0161, + "step": 17 + }, + { + "epoch": 0.0012498264129981948, + "grad_norm": 25.458988024367404, + "learning_rate": 4.1570438799076213e-07, + "loss": 2.5692, + "step": 18 + }, + { + "epoch": 0.0013192612137203166, + "grad_norm": 27.49871744760481, + "learning_rate": 4.3879907621247114e-07, + "loss": 3.1641, + "step": 19 + }, + { + "epoch": 0.0013886960144424386, + "grad_norm": 26.594539253723877, + "learning_rate": 4.618937644341802e-07, + "loss": 2.8514, + "step": 20 + }, + { + "epoch": 0.0014581308151645604, + "grad_norm": 24.857691441667104, + "learning_rate": 4.849884526558892e-07, + "loss": 2.754, + "step": 21 + }, + { + "epoch": 0.0015275656158866824, + "grad_norm": 37.539728438369586, + "learning_rate": 5.080831408775982e-07, + "loss": 4.1185, + "step": 22 + }, + { + "epoch": 0.0015970004166088044, + "grad_norm": 28.29778430699994, + "learning_rate": 5.311778290993072e-07, + "loss": 3.48, + "step": 23 + }, + { + "epoch": 0.0016664352173309262, + "grad_norm": 24.33092393333535, + "learning_rate": 5.542725173210162e-07, + "loss": 2.6638, + "step": 24 + }, + { + "epoch": 0.0017358700180530482, + "grad_norm": 25.256558995989188, + "learning_rate": 5.773672055427252e-07, + "loss": 2.582, + "step": 25 + }, + { + "epoch": 0.00180530481877517, + "grad_norm": 25.760249406840487, + "learning_rate": 6.004618937644343e-07, + "loss": 2.0152, + "step": 26 + }, + { + "epoch": 0.001874739619497292, + "grad_norm": 23.489047916347662, + "learning_rate": 6.235565819861433e-07, + "loss": 2.1558, + "step": 27 + }, + { + "epoch": 0.001944174420219414, + "grad_norm": 31.235978398880896, + "learning_rate": 6.466512702078523e-07, + "loss": 3.5227, + "step": 28 + }, + { + "epoch": 0.002013609220941536, + "grad_norm": 23.888543064125773, + "learning_rate": 6.697459584295613e-07, + "loss": 2.648, + "step": 29 + }, + { + "epoch": 0.002083044021663658, + "grad_norm": 36.33417629402573, + "learning_rate": 6.928406466512702e-07, + "loss": 4.6123, + "step": 30 + }, + { + "epoch": 0.00215247882238578, + "grad_norm": 28.010977611629272, + "learning_rate": 7.159353348729793e-07, + "loss": 2.9833, + "step": 31 + }, + { + "epoch": 0.0022219136231079015, + "grad_norm": 24.297214528180493, + "learning_rate": 7.390300230946882e-07, + "loss": 2.6014, + "step": 32 + }, + { + "epoch": 0.0022913484238300235, + "grad_norm": 25.52625918335633, + "learning_rate": 7.621247113163972e-07, + "loss": 2.4657, + "step": 33 + }, + { + "epoch": 0.0023607832245521455, + "grad_norm": 26.47100750061038, + "learning_rate": 7.852193995381063e-07, + "loss": 2.7159, + "step": 34 + }, + { + "epoch": 0.0024302180252742675, + "grad_norm": 29.18671888965165, + "learning_rate": 8.083140877598153e-07, + "loss": 3.403, + "step": 35 + }, + { + "epoch": 0.0024996528259963896, + "grad_norm": 23.36608661775619, + "learning_rate": 8.314087759815243e-07, + "loss": 2.6611, + "step": 36 + }, + { + "epoch": 0.002569087626718511, + "grad_norm": 25.488314032085164, + "learning_rate": 8.545034642032334e-07, + "loss": 2.7589, + "step": 37 + }, + { + "epoch": 0.002638522427440633, + "grad_norm": 22.56654378362558, + "learning_rate": 8.775981524249423e-07, + "loss": 3.0017, + "step": 38 + }, + { + "epoch": 0.002707957228162755, + "grad_norm": 27.823323684714733, + "learning_rate": 9.006928406466514e-07, + "loss": 3.1453, + "step": 39 + }, + { + "epoch": 0.002777392028884877, + "grad_norm": 22.146653263912004, + "learning_rate": 9.237875288683604e-07, + "loss": 2.6011, + "step": 40 + }, + { + "epoch": 0.002846826829606999, + "grad_norm": 24.048448823267766, + "learning_rate": 9.468822170900693e-07, + "loss": 2.637, + "step": 41 + }, + { + "epoch": 0.002916261630329121, + "grad_norm": 24.07555341942036, + "learning_rate": 9.699769053117784e-07, + "loss": 3.051, + "step": 42 + }, + { + "epoch": 0.002985696431051243, + "grad_norm": 19.982728293116626, + "learning_rate": 9.930715935334874e-07, + "loss": 2.0515, + "step": 43 + }, + { + "epoch": 0.003055131231773365, + "grad_norm": 22.50681926212181, + "learning_rate": 1.0161662817551965e-06, + "loss": 2.7645, + "step": 44 + }, + { + "epoch": 0.003124566032495487, + "grad_norm": 17.11870138045247, + "learning_rate": 1.0392609699769055e-06, + "loss": 1.7083, + "step": 45 + }, + { + "epoch": 0.003194000833217609, + "grad_norm": 24.713318785274932, + "learning_rate": 1.0623556581986145e-06, + "loss": 2.381, + "step": 46 + }, + { + "epoch": 0.0032634356339397304, + "grad_norm": 17.501124546350336, + "learning_rate": 1.0854503464203233e-06, + "loss": 1.9371, + "step": 47 + }, + { + "epoch": 0.0033328704346618524, + "grad_norm": 16.206932101084885, + "learning_rate": 1.1085450346420325e-06, + "loss": 1.9531, + "step": 48 + }, + { + "epoch": 0.0034023052353839745, + "grad_norm": 14.423982498032824, + "learning_rate": 1.1316397228637415e-06, + "loss": 1.3662, + "step": 49 + }, + { + "epoch": 0.0034717400361060965, + "grad_norm": 15.341809929206708, + "learning_rate": 1.1547344110854503e-06, + "loss": 1.4146, + "step": 50 + }, + { + "epoch": 0.0035411748368282185, + "grad_norm": 19.41155990733793, + "learning_rate": 1.1778290993071595e-06, + "loss": 1.4623, + "step": 51 + }, + { + "epoch": 0.00361060963755034, + "grad_norm": 16.373297361743013, + "learning_rate": 1.2009237875288685e-06, + "loss": 1.186, + "step": 52 + }, + { + "epoch": 0.003680044438272462, + "grad_norm": 16.275983136764896, + "learning_rate": 1.2240184757505773e-06, + "loss": 1.5246, + "step": 53 + }, + { + "epoch": 0.003749479238994584, + "grad_norm": 20.327434114179265, + "learning_rate": 1.2471131639722866e-06, + "loss": 2.194, + "step": 54 + }, + { + "epoch": 0.003818914039716706, + "grad_norm": 17.578348797589847, + "learning_rate": 1.2702078521939956e-06, + "loss": 1.8619, + "step": 55 + }, + { + "epoch": 0.003888348840438828, + "grad_norm": 9.54970331285728, + "learning_rate": 1.2933025404157046e-06, + "loss": 0.9079, + "step": 56 + }, + { + "epoch": 0.00395778364116095, + "grad_norm": 12.279321817700035, + "learning_rate": 1.3163972286374136e-06, + "loss": 0.8782, + "step": 57 + }, + { + "epoch": 0.004027218441883072, + "grad_norm": 12.137562385951293, + "learning_rate": 1.3394919168591226e-06, + "loss": 1.5448, + "step": 58 + }, + { + "epoch": 0.004096653242605193, + "grad_norm": 12.568245754461351, + "learning_rate": 1.3625866050808314e-06, + "loss": 1.9133, + "step": 59 + }, + { + "epoch": 0.004166088043327316, + "grad_norm": 12.048296413369126, + "learning_rate": 1.3856812933025404e-06, + "loss": 1.5706, + "step": 60 + }, + { + "epoch": 0.004235522844049437, + "grad_norm": 12.750101727107443, + "learning_rate": 1.4087759815242496e-06, + "loss": 1.3996, + "step": 61 + }, + { + "epoch": 0.00430495764477156, + "grad_norm": 8.718744059951307, + "learning_rate": 1.4318706697459586e-06, + "loss": 1.0854, + "step": 62 + }, + { + "epoch": 0.004374392445493681, + "grad_norm": 11.089197498297837, + "learning_rate": 1.4549653579676676e-06, + "loss": 1.3503, + "step": 63 + }, + { + "epoch": 0.004443827246215803, + "grad_norm": 10.842418149670134, + "learning_rate": 1.4780600461893764e-06, + "loss": 1.1687, + "step": 64 + }, + { + "epoch": 0.004513262046937925, + "grad_norm": 8.753486708154485, + "learning_rate": 1.5011547344110855e-06, + "loss": 1.1534, + "step": 65 + }, + { + "epoch": 0.004582696847660047, + "grad_norm": 9.47527019885148, + "learning_rate": 1.5242494226327945e-06, + "loss": 0.8791, + "step": 66 + }, + { + "epoch": 0.0046521316483821695, + "grad_norm": 8.67619044254941, + "learning_rate": 1.5473441108545037e-06, + "loss": 1.1002, + "step": 67 + }, + { + "epoch": 0.004721566449104291, + "grad_norm": 8.999343453644995, + "learning_rate": 1.5704387990762127e-06, + "loss": 1.0985, + "step": 68 + }, + { + "epoch": 0.004791001249826413, + "grad_norm": 10.493490047629653, + "learning_rate": 1.5935334872979217e-06, + "loss": 1.1387, + "step": 69 + }, + { + "epoch": 0.004860436050548535, + "grad_norm": 10.356976905673072, + "learning_rate": 1.6166281755196305e-06, + "loss": 1.2244, + "step": 70 + }, + { + "epoch": 0.004929870851270657, + "grad_norm": 9.000559087830059, + "learning_rate": 1.6397228637413395e-06, + "loss": 1.5649, + "step": 71 + }, + { + "epoch": 0.004999305651992779, + "grad_norm": 7.338640699431165, + "learning_rate": 1.6628175519630485e-06, + "loss": 0.6841, + "step": 72 + }, + { + "epoch": 0.005068740452714901, + "grad_norm": 8.905079207582279, + "learning_rate": 1.6859122401847578e-06, + "loss": 0.9226, + "step": 73 + }, + { + "epoch": 0.005138175253437022, + "grad_norm": 11.250404321011482, + "learning_rate": 1.7090069284064668e-06, + "loss": 0.8143, + "step": 74 + }, + { + "epoch": 0.005207610054159145, + "grad_norm": 10.356677078236354, + "learning_rate": 1.7321016166281756e-06, + "loss": 1.0377, + "step": 75 + }, + { + "epoch": 0.005277044854881266, + "grad_norm": 10.138981484769202, + "learning_rate": 1.7551963048498846e-06, + "loss": 0.9871, + "step": 76 + }, + { + "epoch": 0.005346479655603389, + "grad_norm": 9.680459763079574, + "learning_rate": 1.7782909930715936e-06, + "loss": 1.0687, + "step": 77 + }, + { + "epoch": 0.00541591445632551, + "grad_norm": 9.502236507628096, + "learning_rate": 1.8013856812933028e-06, + "loss": 0.576, + "step": 78 + }, + { + "epoch": 0.005485349257047632, + "grad_norm": 8.543038322969451, + "learning_rate": 1.8244803695150118e-06, + "loss": 0.9264, + "step": 79 + }, + { + "epoch": 0.005554784057769754, + "grad_norm": 13.08679735538333, + "learning_rate": 1.8475750577367208e-06, + "loss": 0.9738, + "step": 80 + }, + { + "epoch": 0.005624218858491876, + "grad_norm": 6.243650078421165, + "learning_rate": 1.8706697459584296e-06, + "loss": 0.4676, + "step": 81 + }, + { + "epoch": 0.005693653659213998, + "grad_norm": 7.518004315662655, + "learning_rate": 1.8937644341801386e-06, + "loss": 0.5602, + "step": 82 + }, + { + "epoch": 0.00576308845993612, + "grad_norm": 10.026994351790133, + "learning_rate": 1.9168591224018476e-06, + "loss": 0.953, + "step": 83 + }, + { + "epoch": 0.005832523260658242, + "grad_norm": 7.471787475858601, + "learning_rate": 1.939953810623557e-06, + "loss": 0.6896, + "step": 84 + }, + { + "epoch": 0.005901958061380364, + "grad_norm": 8.112109679718, + "learning_rate": 1.9630484988452657e-06, + "loss": 1.2848, + "step": 85 + }, + { + "epoch": 0.005971392862102486, + "grad_norm": 8.901612937354118, + "learning_rate": 1.986143187066975e-06, + "loss": 0.8456, + "step": 86 + }, + { + "epoch": 0.006040827662824608, + "grad_norm": 6.632958150924269, + "learning_rate": 2.0092378752886837e-06, + "loss": 0.4002, + "step": 87 + }, + { + "epoch": 0.00611026246354673, + "grad_norm": 9.513902698937503, + "learning_rate": 2.032332563510393e-06, + "loss": 0.953, + "step": 88 + }, + { + "epoch": 0.006179697264268851, + "grad_norm": 8.977874587093943, + "learning_rate": 2.0554272517321017e-06, + "loss": 0.8093, + "step": 89 + }, + { + "epoch": 0.006249132064990974, + "grad_norm": 7.877280918683887, + "learning_rate": 2.078521939953811e-06, + "loss": 0.9446, + "step": 90 + }, + { + "epoch": 0.006318566865713095, + "grad_norm": 8.682888539294106, + "learning_rate": 2.1016166281755197e-06, + "loss": 0.825, + "step": 91 + }, + { + "epoch": 0.006388001666435218, + "grad_norm": 10.503719122240408, + "learning_rate": 2.124711316397229e-06, + "loss": 0.8066, + "step": 92 + }, + { + "epoch": 0.006457436467157339, + "grad_norm": 7.049284652644519, + "learning_rate": 2.1478060046189377e-06, + "loss": 0.7004, + "step": 93 + }, + { + "epoch": 0.006526871267879461, + "grad_norm": 8.004087065974892, + "learning_rate": 2.1709006928406465e-06, + "loss": 0.7037, + "step": 94 + }, + { + "epoch": 0.006596306068601583, + "grad_norm": 9.634970345519243, + "learning_rate": 2.1939953810623558e-06, + "loss": 0.8039, + "step": 95 + }, + { + "epoch": 0.006665740869323705, + "grad_norm": 7.392604521492039, + "learning_rate": 2.217090069284065e-06, + "loss": 0.6265, + "step": 96 + }, + { + "epoch": 0.006735175670045827, + "grad_norm": 8.341040957707106, + "learning_rate": 2.2401847575057738e-06, + "loss": 0.9059, + "step": 97 + }, + { + "epoch": 0.006804610470767949, + "grad_norm": 8.51503350737925, + "learning_rate": 2.263279445727483e-06, + "loss": 0.6902, + "step": 98 + }, + { + "epoch": 0.0068740452714900705, + "grad_norm": 8.552520045378948, + "learning_rate": 2.286374133949192e-06, + "loss": 0.9884, + "step": 99 + }, + { + "epoch": 0.006943480072212193, + "grad_norm": 6.329427847739734, + "learning_rate": 2.3094688221709006e-06, + "loss": 0.8089, + "step": 100 + }, + { + "epoch": 0.0070129148729343145, + "grad_norm": 8.998937337841024, + "learning_rate": 2.33256351039261e-06, + "loss": 0.7129, + "step": 101 + }, + { + "epoch": 0.007082349673656437, + "grad_norm": 7.786626046748243, + "learning_rate": 2.355658198614319e-06, + "loss": 0.7022, + "step": 102 + }, + { + "epoch": 0.007151784474378559, + "grad_norm": 6.622691089398595, + "learning_rate": 2.378752886836028e-06, + "loss": 0.5738, + "step": 103 + }, + { + "epoch": 0.00722121927510068, + "grad_norm": 8.000336760770878, + "learning_rate": 2.401847575057737e-06, + "loss": 0.8833, + "step": 104 + }, + { + "epoch": 0.007290654075822803, + "grad_norm": 8.085605236794848, + "learning_rate": 2.424942263279446e-06, + "loss": 0.6113, + "step": 105 + }, + { + "epoch": 0.007360088876544924, + "grad_norm": 7.052447042160323, + "learning_rate": 2.4480369515011547e-06, + "loss": 0.5477, + "step": 106 + }, + { + "epoch": 0.007429523677267047, + "grad_norm": 7.347218311552021, + "learning_rate": 2.471131639722864e-06, + "loss": 0.7281, + "step": 107 + }, + { + "epoch": 0.007498958477989168, + "grad_norm": 8.165754998974737, + "learning_rate": 2.494226327944573e-06, + "loss": 0.76, + "step": 108 + }, + { + "epoch": 0.00756839327871129, + "grad_norm": 9.20995143835554, + "learning_rate": 2.517321016166282e-06, + "loss": 0.8136, + "step": 109 + }, + { + "epoch": 0.007637828079433412, + "grad_norm": 8.04080671703488, + "learning_rate": 2.540415704387991e-06, + "loss": 0.5286, + "step": 110 + }, + { + "epoch": 0.007707262880155534, + "grad_norm": 7.9324158285274375, + "learning_rate": 2.5635103926097e-06, + "loss": 0.521, + "step": 111 + }, + { + "epoch": 0.007776697680877656, + "grad_norm": 9.96336583788214, + "learning_rate": 2.586605080831409e-06, + "loss": 1.0405, + "step": 112 + }, + { + "epoch": 0.007846132481599778, + "grad_norm": 6.353347273278931, + "learning_rate": 2.609699769053118e-06, + "loss": 0.445, + "step": 113 + }, + { + "epoch": 0.0079155672823219, + "grad_norm": 8.588038325108936, + "learning_rate": 2.632794457274827e-06, + "loss": 0.9429, + "step": 114 + }, + { + "epoch": 0.007985002083044021, + "grad_norm": 8.058255637313328, + "learning_rate": 2.6558891454965355e-06, + "loss": 0.5134, + "step": 115 + }, + { + "epoch": 0.008054436883766143, + "grad_norm": 7.483094127899339, + "learning_rate": 2.678983833718245e-06, + "loss": 0.5747, + "step": 116 + }, + { + "epoch": 0.008123871684488266, + "grad_norm": 9.349145562118684, + "learning_rate": 2.7020785219399544e-06, + "loss": 0.9157, + "step": 117 + }, + { + "epoch": 0.008193306485210387, + "grad_norm": 8.163396168943548, + "learning_rate": 2.725173210161663e-06, + "loss": 0.6872, + "step": 118 + }, + { + "epoch": 0.008262741285932509, + "grad_norm": 8.23598924390474, + "learning_rate": 2.748267898383372e-06, + "loss": 0.8795, + "step": 119 + }, + { + "epoch": 0.008332176086654632, + "grad_norm": 7.404893421679968, + "learning_rate": 2.771362586605081e-06, + "loss": 0.9829, + "step": 120 + }, + { + "epoch": 0.008401610887376754, + "grad_norm": 6.7105027768007925, + "learning_rate": 2.79445727482679e-06, + "loss": 0.5319, + "step": 121 + }, + { + "epoch": 0.008471045688098875, + "grad_norm": 8.298025773152233, + "learning_rate": 2.8175519630484993e-06, + "loss": 0.7877, + "step": 122 + }, + { + "epoch": 0.008540480488820997, + "grad_norm": 6.80474408690692, + "learning_rate": 2.840646651270208e-06, + "loss": 0.837, + "step": 123 + }, + { + "epoch": 0.00860991528954312, + "grad_norm": 7.907476042663422, + "learning_rate": 2.8637413394919173e-06, + "loss": 0.7648, + "step": 124 + }, + { + "epoch": 0.00867935009026524, + "grad_norm": 8.430192083729516, + "learning_rate": 2.886836027713626e-06, + "loss": 0.5931, + "step": 125 + }, + { + "epoch": 0.008748784890987363, + "grad_norm": 6.334908481261137, + "learning_rate": 2.9099307159353353e-06, + "loss": 0.423, + "step": 126 + }, + { + "epoch": 0.008818219691709485, + "grad_norm": 6.664750534802436, + "learning_rate": 2.9330254041570437e-06, + "loss": 0.7361, + "step": 127 + }, + { + "epoch": 0.008887654492431606, + "grad_norm": 6.316517517639449, + "learning_rate": 2.956120092378753e-06, + "loss": 0.6497, + "step": 128 + }, + { + "epoch": 0.008957089293153728, + "grad_norm": 9.000839744781612, + "learning_rate": 2.979214780600462e-06, + "loss": 0.6092, + "step": 129 + }, + { + "epoch": 0.00902652409387585, + "grad_norm": 9.454946006631703, + "learning_rate": 3.002309468822171e-06, + "loss": 0.9259, + "step": 130 + }, + { + "epoch": 0.009095958894597973, + "grad_norm": 6.471047428954251, + "learning_rate": 3.02540415704388e-06, + "loss": 0.3845, + "step": 131 + }, + { + "epoch": 0.009165393695320094, + "grad_norm": 9.146262598600908, + "learning_rate": 3.048498845265589e-06, + "loss": 0.9956, + "step": 132 + }, + { + "epoch": 0.009234828496042216, + "grad_norm": 4.973471332963633, + "learning_rate": 3.071593533487298e-06, + "loss": 0.3136, + "step": 133 + }, + { + "epoch": 0.009304263296764339, + "grad_norm": 5.037309599011863, + "learning_rate": 3.0946882217090074e-06, + "loss": 0.2538, + "step": 134 + }, + { + "epoch": 0.00937369809748646, + "grad_norm": 8.846441039525033, + "learning_rate": 3.117782909930716e-06, + "loss": 0.8734, + "step": 135 + }, + { + "epoch": 0.009443132898208582, + "grad_norm": 8.741796894358815, + "learning_rate": 3.1408775981524254e-06, + "loss": 0.7593, + "step": 136 + }, + { + "epoch": 0.009512567698930705, + "grad_norm": 8.388844859539322, + "learning_rate": 3.163972286374134e-06, + "loss": 0.5611, + "step": 137 + }, + { + "epoch": 0.009582002499652825, + "grad_norm": 8.440278770747923, + "learning_rate": 3.1870669745958434e-06, + "loss": 0.5961, + "step": 138 + }, + { + "epoch": 0.009651437300374948, + "grad_norm": 9.155870099159435, + "learning_rate": 3.2101616628175526e-06, + "loss": 0.6409, + "step": 139 + }, + { + "epoch": 0.00972087210109707, + "grad_norm": 7.165109260560067, + "learning_rate": 3.233256351039261e-06, + "loss": 0.5397, + "step": 140 + }, + { + "epoch": 0.009790306901819193, + "grad_norm": 7.666755660133118, + "learning_rate": 3.2563510392609702e-06, + "loss": 0.6154, + "step": 141 + }, + { + "epoch": 0.009859741702541313, + "grad_norm": 9.113626284488772, + "learning_rate": 3.279445727482679e-06, + "loss": 0.9999, + "step": 142 + }, + { + "epoch": 0.009929176503263436, + "grad_norm": 7.561342490361833, + "learning_rate": 3.3025404157043883e-06, + "loss": 0.5588, + "step": 143 + }, + { + "epoch": 0.009998611303985558, + "grad_norm": 7.466591873255865, + "learning_rate": 3.325635103926097e-06, + "loss": 0.5744, + "step": 144 + }, + { + "epoch": 0.010068046104707679, + "grad_norm": 11.61710081432426, + "learning_rate": 3.3487297921478063e-06, + "loss": 0.7473, + "step": 145 + }, + { + "epoch": 0.010137480905429801, + "grad_norm": 7.8725926074307, + "learning_rate": 3.3718244803695155e-06, + "loss": 0.5119, + "step": 146 + }, + { + "epoch": 0.010206915706151924, + "grad_norm": 7.972460125637176, + "learning_rate": 3.3949191685912243e-06, + "loss": 0.4512, + "step": 147 + }, + { + "epoch": 0.010276350506874045, + "grad_norm": 8.724357984745915, + "learning_rate": 3.4180138568129335e-06, + "loss": 0.6453, + "step": 148 + }, + { + "epoch": 0.010345785307596167, + "grad_norm": 7.961249184519657, + "learning_rate": 3.441108545034642e-06, + "loss": 0.5433, + "step": 149 + }, + { + "epoch": 0.01041522010831829, + "grad_norm": 6.215403593926059, + "learning_rate": 3.464203233256351e-06, + "loss": 0.7723, + "step": 150 + }, + { + "epoch": 0.010484654909040412, + "grad_norm": 8.758460490427854, + "learning_rate": 3.4872979214780608e-06, + "loss": 0.9119, + "step": 151 + }, + { + "epoch": 0.010554089709762533, + "grad_norm": 10.176733999827276, + "learning_rate": 3.510392609699769e-06, + "loss": 0.8499, + "step": 152 + }, + { + "epoch": 0.010623524510484655, + "grad_norm": 6.876447113139471, + "learning_rate": 3.5334872979214784e-06, + "loss": 0.4014, + "step": 153 + }, + { + "epoch": 0.010692959311206778, + "grad_norm": 7.746090493668413, + "learning_rate": 3.556581986143187e-06, + "loss": 0.7376, + "step": 154 + }, + { + "epoch": 0.010762394111928898, + "grad_norm": 9.048510962138742, + "learning_rate": 3.5796766743648964e-06, + "loss": 0.6867, + "step": 155 + }, + { + "epoch": 0.01083182891265102, + "grad_norm": 7.736614251182222, + "learning_rate": 3.6027713625866056e-06, + "loss": 0.6344, + "step": 156 + }, + { + "epoch": 0.010901263713373143, + "grad_norm": 7.090764783531742, + "learning_rate": 3.6258660508083144e-06, + "loss": 0.7005, + "step": 157 + }, + { + "epoch": 0.010970698514095264, + "grad_norm": 7.706643936289208, + "learning_rate": 3.6489607390300236e-06, + "loss": 0.4559, + "step": 158 + }, + { + "epoch": 0.011040133314817386, + "grad_norm": 7.776544216058004, + "learning_rate": 3.6720554272517324e-06, + "loss": 0.5341, + "step": 159 + }, + { + "epoch": 0.011109568115539509, + "grad_norm": 8.019923216313877, + "learning_rate": 3.6951501154734416e-06, + "loss": 0.4778, + "step": 160 + }, + { + "epoch": 0.011179002916261631, + "grad_norm": 9.161321307245682, + "learning_rate": 3.71824480369515e-06, + "loss": 1.0299, + "step": 161 + }, + { + "epoch": 0.011248437716983752, + "grad_norm": 8.433578559862811, + "learning_rate": 3.7413394919168592e-06, + "loss": 0.6039, + "step": 162 + }, + { + "epoch": 0.011317872517705874, + "grad_norm": 7.695087311710237, + "learning_rate": 3.7644341801385685e-06, + "loss": 0.7305, + "step": 163 + }, + { + "epoch": 0.011387307318427997, + "grad_norm": 7.655838073611998, + "learning_rate": 3.7875288683602773e-06, + "loss": 0.6541, + "step": 164 + }, + { + "epoch": 0.011456742119150118, + "grad_norm": 10.467888289091613, + "learning_rate": 3.8106235565819865e-06, + "loss": 0.5579, + "step": 165 + }, + { + "epoch": 0.01152617691987224, + "grad_norm": 8.032530258362842, + "learning_rate": 3.833718244803695e-06, + "loss": 0.5161, + "step": 166 + }, + { + "epoch": 0.011595611720594362, + "grad_norm": 6.628730962263039, + "learning_rate": 3.8568129330254045e-06, + "loss": 0.5773, + "step": 167 + }, + { + "epoch": 0.011665046521316483, + "grad_norm": 8.296067825933568, + "learning_rate": 3.879907621247114e-06, + "loss": 0.6489, + "step": 168 + }, + { + "epoch": 0.011734481322038606, + "grad_norm": 7.901972751907289, + "learning_rate": 3.903002309468822e-06, + "loss": 0.597, + "step": 169 + }, + { + "epoch": 0.011803916122760728, + "grad_norm": 6.6234571772396595, + "learning_rate": 3.926096997690531e-06, + "loss": 0.4176, + "step": 170 + }, + { + "epoch": 0.011873350923482849, + "grad_norm": 4.951919107747259, + "learning_rate": 3.9491916859122405e-06, + "loss": 0.4214, + "step": 171 + }, + { + "epoch": 0.011942785724204971, + "grad_norm": 8.268020533640861, + "learning_rate": 3.97228637413395e-06, + "loss": 0.6889, + "step": 172 + }, + { + "epoch": 0.012012220524927094, + "grad_norm": 9.395095407345556, + "learning_rate": 3.995381062355658e-06, + "loss": 0.6425, + "step": 173 + }, + { + "epoch": 0.012081655325649216, + "grad_norm": 7.691579921237829, + "learning_rate": 4.018475750577367e-06, + "loss": 0.6374, + "step": 174 + }, + { + "epoch": 0.012151090126371337, + "grad_norm": 6.493290820973727, + "learning_rate": 4.041570438799077e-06, + "loss": 0.5608, + "step": 175 + }, + { + "epoch": 0.01222052492709346, + "grad_norm": 7.197257103887168, + "learning_rate": 4.064665127020786e-06, + "loss": 0.8264, + "step": 176 + }, + { + "epoch": 0.012289959727815582, + "grad_norm": 10.38255815605028, + "learning_rate": 4.087759815242495e-06, + "loss": 0.9234, + "step": 177 + }, + { + "epoch": 0.012359394528537702, + "grad_norm": 7.244668465776146, + "learning_rate": 4.110854503464203e-06, + "loss": 0.851, + "step": 178 + }, + { + "epoch": 0.012428829329259825, + "grad_norm": 9.357321140568963, + "learning_rate": 4.133949191685913e-06, + "loss": 0.6943, + "step": 179 + }, + { + "epoch": 0.012498264129981947, + "grad_norm": 7.631312363698161, + "learning_rate": 4.157043879907622e-06, + "loss": 0.7102, + "step": 180 + }, + { + "epoch": 0.012567698930704068, + "grad_norm": 7.586090088158953, + "learning_rate": 4.18013856812933e-06, + "loss": 0.7218, + "step": 181 + }, + { + "epoch": 0.01263713373142619, + "grad_norm": 7.414171322152922, + "learning_rate": 4.2032332563510394e-06, + "loss": 0.6047, + "step": 182 + }, + { + "epoch": 0.012706568532148313, + "grad_norm": 7.896374712841234, + "learning_rate": 4.226327944572749e-06, + "loss": 0.7097, + "step": 183 + }, + { + "epoch": 0.012776003332870435, + "grad_norm": 9.595437263954201, + "learning_rate": 4.249422632794458e-06, + "loss": 0.6796, + "step": 184 + }, + { + "epoch": 0.012845438133592556, + "grad_norm": 8.399978659382272, + "learning_rate": 4.272517321016167e-06, + "loss": 0.6807, + "step": 185 + }, + { + "epoch": 0.012914872934314679, + "grad_norm": 7.078793603147643, + "learning_rate": 4.2956120092378755e-06, + "loss": 0.4348, + "step": 186 + }, + { + "epoch": 0.012984307735036801, + "grad_norm": 9.295339273821106, + "learning_rate": 4.318706697459585e-06, + "loss": 0.7936, + "step": 187 + }, + { + "epoch": 0.013053742535758922, + "grad_norm": 8.695839431216239, + "learning_rate": 4.341801385681293e-06, + "loss": 0.9782, + "step": 188 + }, + { + "epoch": 0.013123177336481044, + "grad_norm": 6.366062648654598, + "learning_rate": 4.364896073903003e-06, + "loss": 0.559, + "step": 189 + }, + { + "epoch": 0.013192612137203167, + "grad_norm": 7.521188061274255, + "learning_rate": 4.3879907621247115e-06, + "loss": 0.4666, + "step": 190 + }, + { + "epoch": 0.013262046937925287, + "grad_norm": 7.0531940973021445, + "learning_rate": 4.411085450346421e-06, + "loss": 0.6315, + "step": 191 + }, + { + "epoch": 0.01333148173864741, + "grad_norm": 9.519300206969021, + "learning_rate": 4.43418013856813e-06, + "loss": 1.2702, + "step": 192 + }, + { + "epoch": 0.013400916539369532, + "grad_norm": 7.036508094027822, + "learning_rate": 4.457274826789838e-06, + "loss": 0.647, + "step": 193 + }, + { + "epoch": 0.013470351340091655, + "grad_norm": 7.526637565291745, + "learning_rate": 4.4803695150115476e-06, + "loss": 0.5896, + "step": 194 + }, + { + "epoch": 0.013539786140813775, + "grad_norm": 8.06785613897329, + "learning_rate": 4.503464203233257e-06, + "loss": 0.672, + "step": 195 + }, + { + "epoch": 0.013609220941535898, + "grad_norm": 8.16332388619368, + "learning_rate": 4.526558891454966e-06, + "loss": 0.4016, + "step": 196 + }, + { + "epoch": 0.01367865574225802, + "grad_norm": 7.5665928541979905, + "learning_rate": 4.549653579676675e-06, + "loss": 0.6695, + "step": 197 + }, + { + "epoch": 0.013748090542980141, + "grad_norm": 7.885979985250841, + "learning_rate": 4.572748267898384e-06, + "loss": 0.6387, + "step": 198 + }, + { + "epoch": 0.013817525343702263, + "grad_norm": 7.996267726925191, + "learning_rate": 4.595842956120093e-06, + "loss": 0.6419, + "step": 199 + }, + { + "epoch": 0.013886960144424386, + "grad_norm": 7.886509937063648, + "learning_rate": 4.618937644341801e-06, + "loss": 0.7893, + "step": 200 + }, + { + "epoch": 0.013956394945146507, + "grad_norm": 10.137581562013832, + "learning_rate": 4.6420323325635104e-06, + "loss": 0.8724, + "step": 201 + }, + { + "epoch": 0.014025829745868629, + "grad_norm": 8.866437627812644, + "learning_rate": 4.66512702078522e-06, + "loss": 0.9136, + "step": 202 + }, + { + "epoch": 0.014095264546590752, + "grad_norm": 7.240950884521287, + "learning_rate": 4.688221709006929e-06, + "loss": 0.5637, + "step": 203 + }, + { + "epoch": 0.014164699347312874, + "grad_norm": 6.705868275639191, + "learning_rate": 4.711316397228638e-06, + "loss": 0.3435, + "step": 204 + }, + { + "epoch": 0.014234134148034995, + "grad_norm": 7.7813978578362, + "learning_rate": 4.7344110854503465e-06, + "loss": 0.87, + "step": 205 + }, + { + "epoch": 0.014303568948757117, + "grad_norm": 6.34798697499396, + "learning_rate": 4.757505773672056e-06, + "loss": 0.5861, + "step": 206 + }, + { + "epoch": 0.01437300374947924, + "grad_norm": 5.762108611077589, + "learning_rate": 4.780600461893765e-06, + "loss": 0.3837, + "step": 207 + }, + { + "epoch": 0.01444243855020136, + "grad_norm": 6.776809589591002, + "learning_rate": 4.803695150115474e-06, + "loss": 0.5251, + "step": 208 + }, + { + "epoch": 0.014511873350923483, + "grad_norm": 7.252967084559288, + "learning_rate": 4.826789838337183e-06, + "loss": 0.4171, + "step": 209 + }, + { + "epoch": 0.014581308151645605, + "grad_norm": 6.267014708319897, + "learning_rate": 4.849884526558892e-06, + "loss": 0.4742, + "step": 210 + }, + { + "epoch": 0.014650742952367726, + "grad_norm": 7.906051619813322, + "learning_rate": 4.872979214780601e-06, + "loss": 0.7827, + "step": 211 + }, + { + "epoch": 0.014720177753089848, + "grad_norm": 8.613978998958547, + "learning_rate": 4.896073903002309e-06, + "loss": 0.6629, + "step": 212 + }, + { + "epoch": 0.01478961255381197, + "grad_norm": 6.203115949991411, + "learning_rate": 4.9191685912240186e-06, + "loss": 0.4997, + "step": 213 + }, + { + "epoch": 0.014859047354534093, + "grad_norm": 5.755868901799211, + "learning_rate": 4.942263279445728e-06, + "loss": 0.3926, + "step": 214 + }, + { + "epoch": 0.014928482155256214, + "grad_norm": 7.203269526065333, + "learning_rate": 4.965357967667437e-06, + "loss": 0.5858, + "step": 215 + }, + { + "epoch": 0.014997916955978336, + "grad_norm": 8.07108253770573, + "learning_rate": 4.988452655889146e-06, + "loss": 0.7231, + "step": 216 + }, + { + "epoch": 0.015067351756700459, + "grad_norm": 6.9279162382934665, + "learning_rate": 5.0115473441108554e-06, + "loss": 0.6657, + "step": 217 + }, + { + "epoch": 0.01513678655742258, + "grad_norm": 7.73020488394158, + "learning_rate": 5.034642032332564e-06, + "loss": 0.8388, + "step": 218 + }, + { + "epoch": 0.015206221358144702, + "grad_norm": 7.902878886808829, + "learning_rate": 5.057736720554273e-06, + "loss": 0.4083, + "step": 219 + }, + { + "epoch": 0.015275656158866825, + "grad_norm": 10.711205506478693, + "learning_rate": 5.080831408775982e-06, + "loss": 0.4782, + "step": 220 + }, + { + "epoch": 0.015345090959588945, + "grad_norm": 6.880081162040072, + "learning_rate": 5.1039260969976915e-06, + "loss": 0.2855, + "step": 221 + }, + { + "epoch": 0.015414525760311068, + "grad_norm": 6.380662974163436, + "learning_rate": 5.1270207852194e-06, + "loss": 0.4787, + "step": 222 + }, + { + "epoch": 0.01548396056103319, + "grad_norm": 7.439845565309826, + "learning_rate": 5.150115473441108e-06, + "loss": 0.5416, + "step": 223 + }, + { + "epoch": 0.015553395361755313, + "grad_norm": 8.091993541925458, + "learning_rate": 5.173210161662818e-06, + "loss": 0.3417, + "step": 224 + }, + { + "epoch": 0.015622830162477433, + "grad_norm": 7.49630391835408, + "learning_rate": 5.196304849884527e-06, + "loss": 0.7003, + "step": 225 + }, + { + "epoch": 0.015692264963199556, + "grad_norm": 7.939654978211845, + "learning_rate": 5.219399538106236e-06, + "loss": 0.7269, + "step": 226 + }, + { + "epoch": 0.015761699763921676, + "grad_norm": 7.345411689485614, + "learning_rate": 5.242494226327945e-06, + "loss": 0.5719, + "step": 227 + }, + { + "epoch": 0.0158311345646438, + "grad_norm": 10.482862789614648, + "learning_rate": 5.265588914549654e-06, + "loss": 0.9187, + "step": 228 + }, + { + "epoch": 0.01590056936536592, + "grad_norm": 8.171438197822155, + "learning_rate": 5.288683602771363e-06, + "loss": 0.4687, + "step": 229 + }, + { + "epoch": 0.015970004166088042, + "grad_norm": 7.223450030240171, + "learning_rate": 5.311778290993071e-06, + "loss": 0.6092, + "step": 230 + }, + { + "epoch": 0.016039438966810166, + "grad_norm": 7.068081165092103, + "learning_rate": 5.334872979214781e-06, + "loss": 0.5466, + "step": 231 + }, + { + "epoch": 0.016108873767532287, + "grad_norm": 5.874106446122856, + "learning_rate": 5.35796766743649e-06, + "loss": 0.4628, + "step": 232 + }, + { + "epoch": 0.016178308568254408, + "grad_norm": 7.108867549566205, + "learning_rate": 5.381062355658199e-06, + "loss": 0.3716, + "step": 233 + }, + { + "epoch": 0.016247743368976532, + "grad_norm": 5.632178509844155, + "learning_rate": 5.404157043879909e-06, + "loss": 0.5437, + "step": 234 + }, + { + "epoch": 0.016317178169698653, + "grad_norm": 7.789452575297797, + "learning_rate": 5.427251732101617e-06, + "loss": 0.9047, + "step": 235 + }, + { + "epoch": 0.016386612970420773, + "grad_norm": 6.313918989909171, + "learning_rate": 5.450346420323326e-06, + "loss": 0.8541, + "step": 236 + }, + { + "epoch": 0.016456047771142897, + "grad_norm": 8.792205404881122, + "learning_rate": 5.473441108545036e-06, + "loss": 0.5568, + "step": 237 + }, + { + "epoch": 0.016525482571865018, + "grad_norm": 7.093517449702753, + "learning_rate": 5.496535796766744e-06, + "loss": 0.2775, + "step": 238 + }, + { + "epoch": 0.016594917372587142, + "grad_norm": 7.030211419043433, + "learning_rate": 5.519630484988453e-06, + "loss": 0.3933, + "step": 239 + }, + { + "epoch": 0.016664352173309263, + "grad_norm": 7.109727530662788, + "learning_rate": 5.542725173210162e-06, + "loss": 0.6273, + "step": 240 + }, + { + "epoch": 0.016733786974031384, + "grad_norm": 9.235528657829454, + "learning_rate": 5.565819861431872e-06, + "loss": 0.9034, + "step": 241 + }, + { + "epoch": 0.016803221774753508, + "grad_norm": 5.60568755918627, + "learning_rate": 5.58891454965358e-06, + "loss": 0.2712, + "step": 242 + }, + { + "epoch": 0.01687265657547563, + "grad_norm": 7.894937100207131, + "learning_rate": 5.6120092378752884e-06, + "loss": 0.4055, + "step": 243 + }, + { + "epoch": 0.01694209137619775, + "grad_norm": 7.242054568582779, + "learning_rate": 5.6351039260969985e-06, + "loss": 0.673, + "step": 244 + }, + { + "epoch": 0.017011526176919874, + "grad_norm": 7.741044483359136, + "learning_rate": 5.658198614318708e-06, + "loss": 0.5858, + "step": 245 + }, + { + "epoch": 0.017080960977641994, + "grad_norm": 6.561504313361452, + "learning_rate": 5.681293302540416e-06, + "loss": 0.5438, + "step": 246 + }, + { + "epoch": 0.017150395778364115, + "grad_norm": 7.82850665938877, + "learning_rate": 5.7043879907621245e-06, + "loss": 0.6689, + "step": 247 + }, + { + "epoch": 0.01721983057908624, + "grad_norm": 6.818172850670444, + "learning_rate": 5.7274826789838346e-06, + "loss": 0.9547, + "step": 248 + }, + { + "epoch": 0.01728926537980836, + "grad_norm": 6.798584154668491, + "learning_rate": 5.750577367205543e-06, + "loss": 0.4316, + "step": 249 + }, + { + "epoch": 0.01735870018053048, + "grad_norm": 8.379369709812018, + "learning_rate": 5.773672055427252e-06, + "loss": 0.8569, + "step": 250 + }, + { + "epoch": 0.017428134981252605, + "grad_norm": 5.911403153707157, + "learning_rate": 5.796766743648961e-06, + "loss": 0.2661, + "step": 251 + }, + { + "epoch": 0.017497569781974726, + "grad_norm": 7.432621330431931, + "learning_rate": 5.819861431870671e-06, + "loss": 0.5803, + "step": 252 + }, + { + "epoch": 0.017567004582696846, + "grad_norm": 6.981417976237189, + "learning_rate": 5.842956120092379e-06, + "loss": 0.6352, + "step": 253 + }, + { + "epoch": 0.01763643938341897, + "grad_norm": 7.268127620571004, + "learning_rate": 5.866050808314087e-06, + "loss": 0.5386, + "step": 254 + }, + { + "epoch": 0.01770587418414109, + "grad_norm": 6.779749442363237, + "learning_rate": 5.889145496535797e-06, + "loss": 0.419, + "step": 255 + }, + { + "epoch": 0.017775308984863212, + "grad_norm": 6.048719139098827, + "learning_rate": 5.912240184757506e-06, + "loss": 0.398, + "step": 256 + }, + { + "epoch": 0.017844743785585336, + "grad_norm": 8.916395897895692, + "learning_rate": 5.935334872979215e-06, + "loss": 0.733, + "step": 257 + }, + { + "epoch": 0.017914178586307457, + "grad_norm": 5.763008183819964, + "learning_rate": 5.958429561200924e-06, + "loss": 0.5008, + "step": 258 + }, + { + "epoch": 0.017983613387029578, + "grad_norm": 5.506019659210942, + "learning_rate": 5.9815242494226335e-06, + "loss": 0.3564, + "step": 259 + }, + { + "epoch": 0.0180530481877517, + "grad_norm": 6.2034451104650365, + "learning_rate": 6.004618937644342e-06, + "loss": 0.4833, + "step": 260 + }, + { + "epoch": 0.018122482988473822, + "grad_norm": 7.430924159177619, + "learning_rate": 6.027713625866052e-06, + "loss": 0.432, + "step": 261 + }, + { + "epoch": 0.018191917789195947, + "grad_norm": 6.850038257928346, + "learning_rate": 6.05080831408776e-06, + "loss": 0.494, + "step": 262 + }, + { + "epoch": 0.018261352589918067, + "grad_norm": 6.721300418423033, + "learning_rate": 6.0739030023094695e-06, + "loss": 0.6773, + "step": 263 + }, + { + "epoch": 0.018330787390640188, + "grad_norm": 6.841477097403693, + "learning_rate": 6.096997690531178e-06, + "loss": 0.4801, + "step": 264 + }, + { + "epoch": 0.018400222191362312, + "grad_norm": 8.708883828325524, + "learning_rate": 6.120092378752888e-06, + "loss": 0.7887, + "step": 265 + }, + { + "epoch": 0.018469656992084433, + "grad_norm": 6.575860342200785, + "learning_rate": 6.143187066974596e-06, + "loss": 0.478, + "step": 266 + }, + { + "epoch": 0.018539091792806554, + "grad_norm": 7.144096209557728, + "learning_rate": 6.166281755196305e-06, + "loss": 0.6448, + "step": 267 + }, + { + "epoch": 0.018608526593528678, + "grad_norm": 8.178977160679182, + "learning_rate": 6.189376443418015e-06, + "loss": 0.7615, + "step": 268 + }, + { + "epoch": 0.0186779613942508, + "grad_norm": 8.376766153462068, + "learning_rate": 6.212471131639723e-06, + "loss": 0.7756, + "step": 269 + }, + { + "epoch": 0.01874739619497292, + "grad_norm": 8.048363295250732, + "learning_rate": 6.235565819861432e-06, + "loss": 0.45, + "step": 270 + }, + { + "epoch": 0.018816830995695043, + "grad_norm": 7.907170737872879, + "learning_rate": 6.258660508083141e-06, + "loss": 0.8838, + "step": 271 + }, + { + "epoch": 0.018886265796417164, + "grad_norm": 5.689841682331542, + "learning_rate": 6.281755196304851e-06, + "loss": 0.3793, + "step": 272 + }, + { + "epoch": 0.018955700597139285, + "grad_norm": 7.235933537441556, + "learning_rate": 6.304849884526559e-06, + "loss": 0.5064, + "step": 273 + }, + { + "epoch": 0.01902513539786141, + "grad_norm": 8.573491732383479, + "learning_rate": 6.327944572748268e-06, + "loss": 0.7878, + "step": 274 + }, + { + "epoch": 0.01909457019858353, + "grad_norm": 7.384608978851236, + "learning_rate": 6.351039260969978e-06, + "loss": 0.6261, + "step": 275 + }, + { + "epoch": 0.01916400499930565, + "grad_norm": 6.8300163065760735, + "learning_rate": 6.374133949191687e-06, + "loss": 0.5075, + "step": 276 + }, + { + "epoch": 0.019233439800027775, + "grad_norm": 7.20469012733151, + "learning_rate": 6.397228637413395e-06, + "loss": 0.6793, + "step": 277 + }, + { + "epoch": 0.019302874600749895, + "grad_norm": 7.66091465195689, + "learning_rate": 6.420323325635105e-06, + "loss": 0.6588, + "step": 278 + }, + { + "epoch": 0.019372309401472016, + "grad_norm": 6.1393678300018335, + "learning_rate": 6.443418013856814e-06, + "loss": 0.3072, + "step": 279 + }, + { + "epoch": 0.01944174420219414, + "grad_norm": 6.955873617409415, + "learning_rate": 6.466512702078522e-06, + "loss": 0.7568, + "step": 280 + }, + { + "epoch": 0.01951117900291626, + "grad_norm": 7.3958652182056674, + "learning_rate": 6.489607390300231e-06, + "loss": 0.6083, + "step": 281 + }, + { + "epoch": 0.019580613803638385, + "grad_norm": 7.248430019318075, + "learning_rate": 6.5127020785219405e-06, + "loss": 0.6296, + "step": 282 + }, + { + "epoch": 0.019650048604360506, + "grad_norm": 7.261412925078724, + "learning_rate": 6.53579676674365e-06, + "loss": 0.6231, + "step": 283 + }, + { + "epoch": 0.019719483405082627, + "grad_norm": 7.115747329622963, + "learning_rate": 6.558891454965358e-06, + "loss": 0.7525, + "step": 284 + }, + { + "epoch": 0.01978891820580475, + "grad_norm": 4.884646393329919, + "learning_rate": 6.581986143187068e-06, + "loss": 0.4707, + "step": 285 + }, + { + "epoch": 0.01985835300652687, + "grad_norm": 7.031418572019309, + "learning_rate": 6.6050808314087765e-06, + "loss": 0.6885, + "step": 286 + }, + { + "epoch": 0.019927787807248992, + "grad_norm": 8.727511101239376, + "learning_rate": 6.628175519630486e-06, + "loss": 0.8772, + "step": 287 + }, + { + "epoch": 0.019997222607971116, + "grad_norm": 6.825765458286112, + "learning_rate": 6.651270207852194e-06, + "loss": 0.6025, + "step": 288 + }, + { + "epoch": 0.020066657408693237, + "grad_norm": 7.803096499060419, + "learning_rate": 6.674364896073904e-06, + "loss": 0.587, + "step": 289 + }, + { + "epoch": 0.020136092209415358, + "grad_norm": 8.051493066179713, + "learning_rate": 6.6974595842956126e-06, + "loss": 0.7166, + "step": 290 + }, + { + "epoch": 0.020205527010137482, + "grad_norm": 7.528003888149653, + "learning_rate": 6.720554272517321e-06, + "loss": 0.7072, + "step": 291 + }, + { + "epoch": 0.020274961810859603, + "grad_norm": 5.872670048523211, + "learning_rate": 6.743648960739031e-06, + "loss": 0.6164, + "step": 292 + }, + { + "epoch": 0.020344396611581723, + "grad_norm": 7.247530452394163, + "learning_rate": 6.766743648960739e-06, + "loss": 0.6691, + "step": 293 + }, + { + "epoch": 0.020413831412303848, + "grad_norm": 7.148143712801787, + "learning_rate": 6.789838337182449e-06, + "loss": 0.6739, + "step": 294 + }, + { + "epoch": 0.02048326621302597, + "grad_norm": 7.654533786734221, + "learning_rate": 6.812933025404158e-06, + "loss": 0.7014, + "step": 295 + }, + { + "epoch": 0.02055270101374809, + "grad_norm": 6.616408505516904, + "learning_rate": 6.836027713625867e-06, + "loss": 0.5719, + "step": 296 + }, + { + "epoch": 0.020622135814470213, + "grad_norm": 7.042103483214846, + "learning_rate": 6.859122401847575e-06, + "loss": 0.7897, + "step": 297 + }, + { + "epoch": 0.020691570615192334, + "grad_norm": 8.395484946348608, + "learning_rate": 6.882217090069284e-06, + "loss": 0.6979, + "step": 298 + }, + { + "epoch": 0.020761005415914455, + "grad_norm": 6.4000256002916, + "learning_rate": 6.905311778290994e-06, + "loss": 0.7118, + "step": 299 + }, + { + "epoch": 0.02083044021663658, + "grad_norm": 6.564081158509931, + "learning_rate": 6.928406466512702e-06, + "loss": 0.4898, + "step": 300 + }, + { + "epoch": 0.0208998750173587, + "grad_norm": 7.506242231861854, + "learning_rate": 6.9515011547344115e-06, + "loss": 0.7916, + "step": 301 + }, + { + "epoch": 0.020969309818080824, + "grad_norm": 6.565232756783366, + "learning_rate": 6.9745958429561215e-06, + "loss": 0.5997, + "step": 302 + }, + { + "epoch": 0.021038744618802944, + "grad_norm": 5.535471339222495, + "learning_rate": 6.99769053117783e-06, + "loss": 0.5694, + "step": 303 + }, + { + "epoch": 0.021108179419525065, + "grad_norm": 6.245243607682892, + "learning_rate": 7.020785219399538e-06, + "loss": 0.3781, + "step": 304 + }, + { + "epoch": 0.02117761422024719, + "grad_norm": 7.5399123022411585, + "learning_rate": 7.0438799076212475e-06, + "loss": 0.7124, + "step": 305 + }, + { + "epoch": 0.02124704902096931, + "grad_norm": 7.235601607230619, + "learning_rate": 7.066974595842957e-06, + "loss": 0.6458, + "step": 306 + }, + { + "epoch": 0.02131648382169143, + "grad_norm": 6.05366644187799, + "learning_rate": 7.090069284064666e-06, + "loss": 0.293, + "step": 307 + }, + { + "epoch": 0.021385918622413555, + "grad_norm": 6.8668146431598345, + "learning_rate": 7.113163972286374e-06, + "loss": 0.7551, + "step": 308 + }, + { + "epoch": 0.021455353423135676, + "grad_norm": 6.736934020837996, + "learning_rate": 7.136258660508084e-06, + "loss": 0.6981, + "step": 309 + }, + { + "epoch": 0.021524788223857796, + "grad_norm": 5.249739320162398, + "learning_rate": 7.159353348729793e-06, + "loss": 0.3149, + "step": 310 + }, + { + "epoch": 0.02159422302457992, + "grad_norm": 6.8273055750049645, + "learning_rate": 7.182448036951501e-06, + "loss": 0.2502, + "step": 311 + }, + { + "epoch": 0.02166365782530204, + "grad_norm": 8.320322755579785, + "learning_rate": 7.205542725173211e-06, + "loss": 0.8918, + "step": 312 + }, + { + "epoch": 0.021733092626024162, + "grad_norm": 8.442613851593565, + "learning_rate": 7.22863741339492e-06, + "loss": 0.7903, + "step": 313 + }, + { + "epoch": 0.021802527426746286, + "grad_norm": 8.45217220190808, + "learning_rate": 7.251732101616629e-06, + "loss": 0.8665, + "step": 314 + }, + { + "epoch": 0.021871962227468407, + "grad_norm": 7.5277451108055935, + "learning_rate": 7.274826789838337e-06, + "loss": 0.6209, + "step": 315 + }, + { + "epoch": 0.021941397028190528, + "grad_norm": 8.248503159595833, + "learning_rate": 7.297921478060047e-06, + "loss": 0.9061, + "step": 316 + }, + { + "epoch": 0.022010831828912652, + "grad_norm": 7.2776793516288105, + "learning_rate": 7.321016166281756e-06, + "loss": 0.6603, + "step": 317 + }, + { + "epoch": 0.022080266629634773, + "grad_norm": 5.965637859359356, + "learning_rate": 7.344110854503465e-06, + "loss": 0.4422, + "step": 318 + }, + { + "epoch": 0.022149701430356893, + "grad_norm": 7.5312840452806284, + "learning_rate": 7.367205542725174e-06, + "loss": 0.6134, + "step": 319 + }, + { + "epoch": 0.022219136231079017, + "grad_norm": 5.186297170080935, + "learning_rate": 7.390300230946883e-06, + "loss": 0.3595, + "step": 320 + }, + { + "epoch": 0.022288571031801138, + "grad_norm": 5.828307732663609, + "learning_rate": 7.413394919168592e-06, + "loss": 0.8684, + "step": 321 + }, + { + "epoch": 0.022358005832523262, + "grad_norm": 7.883420149475018, + "learning_rate": 7.4364896073903e-06, + "loss": 0.7388, + "step": 322 + }, + { + "epoch": 0.022427440633245383, + "grad_norm": 5.958168620052153, + "learning_rate": 7.45958429561201e-06, + "loss": 0.6228, + "step": 323 + }, + { + "epoch": 0.022496875433967504, + "grad_norm": 7.586493157542109, + "learning_rate": 7.4826789838337185e-06, + "loss": 0.6236, + "step": 324 + }, + { + "epoch": 0.022566310234689628, + "grad_norm": 5.070739679339793, + "learning_rate": 7.505773672055428e-06, + "loss": 0.2893, + "step": 325 + }, + { + "epoch": 0.02263574503541175, + "grad_norm": 7.297666983970221, + "learning_rate": 7.528868360277137e-06, + "loss": 0.6513, + "step": 326 + }, + { + "epoch": 0.02270517983613387, + "grad_norm": 6.458388113960575, + "learning_rate": 7.551963048498846e-06, + "loss": 0.5596, + "step": 327 + }, + { + "epoch": 0.022774614636855994, + "grad_norm": 5.436275519046475, + "learning_rate": 7.5750577367205545e-06, + "loss": 0.3816, + "step": 328 + }, + { + "epoch": 0.022844049437578114, + "grad_norm": 7.137994819236926, + "learning_rate": 7.598152424942264e-06, + "loss": 0.7518, + "step": 329 + }, + { + "epoch": 0.022913484238300235, + "grad_norm": 7.055767976438735, + "learning_rate": 7.621247113163973e-06, + "loss": 0.5626, + "step": 330 + }, + { + "epoch": 0.02298291903902236, + "grad_norm": 6.997292636102885, + "learning_rate": 7.644341801385682e-06, + "loss": 0.824, + "step": 331 + }, + { + "epoch": 0.02305235383974448, + "grad_norm": 6.212786801754549, + "learning_rate": 7.66743648960739e-06, + "loss": 0.8321, + "step": 332 + }, + { + "epoch": 0.0231217886404666, + "grad_norm": 6.9599198959217174, + "learning_rate": 7.6905311778291e-06, + "loss": 0.7084, + "step": 333 + }, + { + "epoch": 0.023191223441188725, + "grad_norm": 7.06848038393813, + "learning_rate": 7.713625866050809e-06, + "loss": 0.53, + "step": 334 + }, + { + "epoch": 0.023260658241910846, + "grad_norm": 6.266825349856001, + "learning_rate": 7.736720554272517e-06, + "loss": 0.5645, + "step": 335 + }, + { + "epoch": 0.023330093042632966, + "grad_norm": 6.155799681888197, + "learning_rate": 7.759815242494227e-06, + "loss": 0.3237, + "step": 336 + }, + { + "epoch": 0.02339952784335509, + "grad_norm": 5.917148975534441, + "learning_rate": 7.782909930715936e-06, + "loss": 0.6142, + "step": 337 + }, + { + "epoch": 0.02346896264407721, + "grad_norm": 6.109046201222556, + "learning_rate": 7.806004618937644e-06, + "loss": 0.5259, + "step": 338 + }, + { + "epoch": 0.023538397444799332, + "grad_norm": 5.117697228312204, + "learning_rate": 7.829099307159354e-06, + "loss": 0.287, + "step": 339 + }, + { + "epoch": 0.023607832245521456, + "grad_norm": 6.01514587593891, + "learning_rate": 7.852193995381063e-06, + "loss": 0.5645, + "step": 340 + }, + { + "epoch": 0.023677267046243577, + "grad_norm": 6.802472620112372, + "learning_rate": 7.875288683602773e-06, + "loss": 0.498, + "step": 341 + }, + { + "epoch": 0.023746701846965697, + "grad_norm": 8.104846417351672, + "learning_rate": 7.898383371824481e-06, + "loss": 0.806, + "step": 342 + }, + { + "epoch": 0.02381613664768782, + "grad_norm": 8.340285985024336, + "learning_rate": 7.921478060046191e-06, + "loss": 0.7905, + "step": 343 + }, + { + "epoch": 0.023885571448409942, + "grad_norm": 6.537761804549327, + "learning_rate": 7.9445727482679e-06, + "loss": 0.6177, + "step": 344 + }, + { + "epoch": 0.023955006249132067, + "grad_norm": 6.988147662879393, + "learning_rate": 7.967667436489608e-06, + "loss": 0.5674, + "step": 345 + }, + { + "epoch": 0.024024441049854187, + "grad_norm": 5.990117475928348, + "learning_rate": 7.990762124711316e-06, + "loss": 0.6938, + "step": 346 + }, + { + "epoch": 0.024093875850576308, + "grad_norm": 5.538492973429358, + "learning_rate": 8.013856812933026e-06, + "loss": 0.5507, + "step": 347 + }, + { + "epoch": 0.024163310651298432, + "grad_norm": 6.067197910485304, + "learning_rate": 8.036951501154735e-06, + "loss": 0.6371, + "step": 348 + }, + { + "epoch": 0.024232745452020553, + "grad_norm": 8.345060036141263, + "learning_rate": 8.060046189376443e-06, + "loss": 0.8673, + "step": 349 + }, + { + "epoch": 0.024302180252742674, + "grad_norm": 7.0232190023213965, + "learning_rate": 8.083140877598153e-06, + "loss": 0.6343, + "step": 350 + }, + { + "epoch": 0.024371615053464798, + "grad_norm": 7.299012686964988, + "learning_rate": 8.106235565819862e-06, + "loss": 0.4694, + "step": 351 + }, + { + "epoch": 0.02444104985418692, + "grad_norm": 5.699938535693354, + "learning_rate": 8.129330254041572e-06, + "loss": 0.4333, + "step": 352 + }, + { + "epoch": 0.02451048465490904, + "grad_norm": 6.64751936730123, + "learning_rate": 8.15242494226328e-06, + "loss": 0.6341, + "step": 353 + }, + { + "epoch": 0.024579919455631163, + "grad_norm": 7.358249173848904, + "learning_rate": 8.17551963048499e-06, + "loss": 0.7334, + "step": 354 + }, + { + "epoch": 0.024649354256353284, + "grad_norm": 6.947107744962895, + "learning_rate": 8.198614318706698e-06, + "loss": 0.4811, + "step": 355 + }, + { + "epoch": 0.024718789057075405, + "grad_norm": 7.9905042925361105, + "learning_rate": 8.221709006928407e-06, + "loss": 0.808, + "step": 356 + }, + { + "epoch": 0.02478822385779753, + "grad_norm": 7.074925682535921, + "learning_rate": 8.244803695150117e-06, + "loss": 0.6418, + "step": 357 + }, + { + "epoch": 0.02485765865851965, + "grad_norm": 6.493440715600506, + "learning_rate": 8.267898383371825e-06, + "loss": 0.6527, + "step": 358 + }, + { + "epoch": 0.02492709345924177, + "grad_norm": 6.252875541049399, + "learning_rate": 8.290993071593534e-06, + "loss": 0.5673, + "step": 359 + }, + { + "epoch": 0.024996528259963895, + "grad_norm": 6.680630293609687, + "learning_rate": 8.314087759815244e-06, + "loss": 0.859, + "step": 360 + }, + { + "epoch": 0.025065963060686015, + "grad_norm": 5.061991943083161, + "learning_rate": 8.337182448036952e-06, + "loss": 0.4473, + "step": 361 + }, + { + "epoch": 0.025135397861408136, + "grad_norm": 6.7000922277432124, + "learning_rate": 8.36027713625866e-06, + "loss": 0.8466, + "step": 362 + }, + { + "epoch": 0.02520483266213026, + "grad_norm": 7.104739112728029, + "learning_rate": 8.38337182448037e-06, + "loss": 0.5612, + "step": 363 + }, + { + "epoch": 0.02527426746285238, + "grad_norm": 5.660476424452743, + "learning_rate": 8.406466512702079e-06, + "loss": 0.5105, + "step": 364 + }, + { + "epoch": 0.025343702263574505, + "grad_norm": 6.204625626329607, + "learning_rate": 8.429561200923789e-06, + "loss": 0.7786, + "step": 365 + }, + { + "epoch": 0.025413137064296626, + "grad_norm": 7.189824870870765, + "learning_rate": 8.452655889145497e-06, + "loss": 0.7959, + "step": 366 + }, + { + "epoch": 0.025482571865018747, + "grad_norm": 5.4005025338163835, + "learning_rate": 8.475750577367207e-06, + "loss": 0.5015, + "step": 367 + }, + { + "epoch": 0.02555200666574087, + "grad_norm": 6.179153950657576, + "learning_rate": 8.498845265588916e-06, + "loss": 0.8045, + "step": 368 + }, + { + "epoch": 0.02562144146646299, + "grad_norm": 6.380841571553142, + "learning_rate": 8.521939953810624e-06, + "loss": 0.614, + "step": 369 + }, + { + "epoch": 0.025690876267185112, + "grad_norm": 5.98211190707568, + "learning_rate": 8.545034642032334e-06, + "loss": 0.7181, + "step": 370 + }, + { + "epoch": 0.025760311067907236, + "grad_norm": 6.716203480188986, + "learning_rate": 8.568129330254043e-06, + "loss": 0.9596, + "step": 371 + }, + { + "epoch": 0.025829745868629357, + "grad_norm": 4.688742121775829, + "learning_rate": 8.591224018475751e-06, + "loss": 0.3099, + "step": 372 + }, + { + "epoch": 0.025899180669351478, + "grad_norm": 6.838852405968659, + "learning_rate": 8.61431870669746e-06, + "loss": 0.7038, + "step": 373 + }, + { + "epoch": 0.025968615470073602, + "grad_norm": 7.882614321083732, + "learning_rate": 8.63741339491917e-06, + "loss": 0.7109, + "step": 374 + }, + { + "epoch": 0.026038050270795723, + "grad_norm": 5.892903512026724, + "learning_rate": 8.660508083140878e-06, + "loss": 0.659, + "step": 375 + }, + { + "epoch": 0.026107485071517843, + "grad_norm": 6.44911963805991, + "learning_rate": 8.683602771362586e-06, + "loss": 0.8151, + "step": 376 + }, + { + "epoch": 0.026176919872239968, + "grad_norm": 4.527131866714667, + "learning_rate": 8.706697459584296e-06, + "loss": 0.7085, + "step": 377 + }, + { + "epoch": 0.02624635467296209, + "grad_norm": 6.9135618427276615, + "learning_rate": 8.729792147806006e-06, + "loss": 0.2928, + "step": 378 + }, + { + "epoch": 0.02631578947368421, + "grad_norm": 6.760408838578817, + "learning_rate": 8.752886836027715e-06, + "loss": 0.7458, + "step": 379 + }, + { + "epoch": 0.026385224274406333, + "grad_norm": 9.24616524555643, + "learning_rate": 8.775981524249423e-06, + "loss": 0.7244, + "step": 380 + }, + { + "epoch": 0.026454659075128454, + "grad_norm": 7.711115269193827, + "learning_rate": 8.799076212471133e-06, + "loss": 0.3853, + "step": 381 + }, + { + "epoch": 0.026524093875850575, + "grad_norm": 7.176188599274549, + "learning_rate": 8.822170900692842e-06, + "loss": 0.8815, + "step": 382 + }, + { + "epoch": 0.0265935286765727, + "grad_norm": 6.619997606508476, + "learning_rate": 8.84526558891455e-06, + "loss": 0.4085, + "step": 383 + }, + { + "epoch": 0.02666296347729482, + "grad_norm": 4.8904274112495045, + "learning_rate": 8.86836027713626e-06, + "loss": 0.4159, + "step": 384 + }, + { + "epoch": 0.026732398278016944, + "grad_norm": 5.867374157660708, + "learning_rate": 8.891454965357968e-06, + "loss": 0.788, + "step": 385 + }, + { + "epoch": 0.026801833078739064, + "grad_norm": 5.958051232927474, + "learning_rate": 8.914549653579677e-06, + "loss": 0.428, + "step": 386 + }, + { + "epoch": 0.026871267879461185, + "grad_norm": 7.0537509318282785, + "learning_rate": 8.937644341801387e-06, + "loss": 0.7907, + "step": 387 + }, + { + "epoch": 0.02694070268018331, + "grad_norm": 6.6452728123702745, + "learning_rate": 8.960739030023095e-06, + "loss": 0.6033, + "step": 388 + }, + { + "epoch": 0.02701013748090543, + "grad_norm": 7.503647623096358, + "learning_rate": 8.983833718244804e-06, + "loss": 0.7108, + "step": 389 + }, + { + "epoch": 0.02707957228162755, + "grad_norm": 4.752411000913283, + "learning_rate": 9.006928406466514e-06, + "loss": 0.3046, + "step": 390 + }, + { + "epoch": 0.027149007082349675, + "grad_norm": 5.876081179515414, + "learning_rate": 9.030023094688222e-06, + "loss": 0.6925, + "step": 391 + }, + { + "epoch": 0.027218441883071796, + "grad_norm": 6.819150079586152, + "learning_rate": 9.053117782909932e-06, + "loss": 0.8163, + "step": 392 + }, + { + "epoch": 0.027287876683793916, + "grad_norm": 6.362870296106029, + "learning_rate": 9.07621247113164e-06, + "loss": 0.4485, + "step": 393 + }, + { + "epoch": 0.02735731148451604, + "grad_norm": 6.918324256538554, + "learning_rate": 9.09930715935335e-06, + "loss": 0.9359, + "step": 394 + }, + { + "epoch": 0.02742674628523816, + "grad_norm": 6.246425940430203, + "learning_rate": 9.122401847575059e-06, + "loss": 0.3473, + "step": 395 + }, + { + "epoch": 0.027496181085960282, + "grad_norm": 6.6533307700734365, + "learning_rate": 9.145496535796767e-06, + "loss": 0.9051, + "step": 396 + }, + { + "epoch": 0.027565615886682406, + "grad_norm": 4.522431898254659, + "learning_rate": 9.168591224018476e-06, + "loss": 0.4896, + "step": 397 + }, + { + "epoch": 0.027635050687404527, + "grad_norm": 5.62359784174198, + "learning_rate": 9.191685912240186e-06, + "loss": 0.6406, + "step": 398 + }, + { + "epoch": 0.027704485488126648, + "grad_norm": 6.554786252346115, + "learning_rate": 9.214780600461894e-06, + "loss": 0.7186, + "step": 399 + }, + { + "epoch": 0.027773920288848772, + "grad_norm": 6.470728191349876, + "learning_rate": 9.237875288683602e-06, + "loss": 0.5706, + "step": 400 + }, + { + "epoch": 0.027843355089570893, + "grad_norm": 5.696581976585349, + "learning_rate": 9.260969976905312e-06, + "loss": 0.6381, + "step": 401 + }, + { + "epoch": 0.027912789890293013, + "grad_norm": 6.827037938775935, + "learning_rate": 9.284064665127021e-06, + "loss": 0.7833, + "step": 402 + }, + { + "epoch": 0.027982224691015137, + "grad_norm": 6.560358522649497, + "learning_rate": 9.307159353348731e-06, + "loss": 0.6933, + "step": 403 + }, + { + "epoch": 0.028051659491737258, + "grad_norm": 6.647170924819252, + "learning_rate": 9.33025404157044e-06, + "loss": 0.4499, + "step": 404 + }, + { + "epoch": 0.028121094292459382, + "grad_norm": 5.130789644873473, + "learning_rate": 9.35334872979215e-06, + "loss": 0.4588, + "step": 405 + }, + { + "epoch": 0.028190529093181503, + "grad_norm": 4.860021733661967, + "learning_rate": 9.376443418013858e-06, + "loss": 0.2815, + "step": 406 + }, + { + "epoch": 0.028259963893903624, + "grad_norm": 4.90820253480832, + "learning_rate": 9.399538106235566e-06, + "loss": 0.4098, + "step": 407 + }, + { + "epoch": 0.028329398694625748, + "grad_norm": 4.941040654728571, + "learning_rate": 9.422632794457276e-06, + "loss": 0.4002, + "step": 408 + }, + { + "epoch": 0.02839883349534787, + "grad_norm": 7.675709003678412, + "learning_rate": 9.445727482678985e-06, + "loss": 0.9723, + "step": 409 + }, + { + "epoch": 0.02846826829606999, + "grad_norm": 8.592025328623098, + "learning_rate": 9.468822170900693e-06, + "loss": 0.2338, + "step": 410 + }, + { + "epoch": 0.028537703096792114, + "grad_norm": 5.993286898711553, + "learning_rate": 9.491916859122403e-06, + "loss": 0.5384, + "step": 411 + }, + { + "epoch": 0.028607137897514234, + "grad_norm": 5.8652312391691055, + "learning_rate": 9.515011547344111e-06, + "loss": 0.4496, + "step": 412 + }, + { + "epoch": 0.028676572698236355, + "grad_norm": 4.901801928340526, + "learning_rate": 9.53810623556582e-06, + "loss": 0.4712, + "step": 413 + }, + { + "epoch": 0.02874600749895848, + "grad_norm": 7.614399384579382, + "learning_rate": 9.56120092378753e-06, + "loss": 0.6751, + "step": 414 + }, + { + "epoch": 0.0288154422996806, + "grad_norm": 4.412664038311866, + "learning_rate": 9.584295612009238e-06, + "loss": 0.3466, + "step": 415 + }, + { + "epoch": 0.02888487710040272, + "grad_norm": 7.238038321499026, + "learning_rate": 9.607390300230948e-06, + "loss": 0.9787, + "step": 416 + }, + { + "epoch": 0.028954311901124845, + "grad_norm": 6.94095866789776, + "learning_rate": 9.630484988452657e-06, + "loss": 0.9159, + "step": 417 + }, + { + "epoch": 0.029023746701846966, + "grad_norm": 5.653221161555569, + "learning_rate": 9.653579676674367e-06, + "loss": 0.4889, + "step": 418 + }, + { + "epoch": 0.029093181502569086, + "grad_norm": 5.047780685821234, + "learning_rate": 9.676674364896075e-06, + "loss": 0.4796, + "step": 419 + }, + { + "epoch": 0.02916261630329121, + "grad_norm": 6.069627117715251, + "learning_rate": 9.699769053117783e-06, + "loss": 0.4571, + "step": 420 + }, + { + "epoch": 0.02923205110401333, + "grad_norm": 6.496502890553569, + "learning_rate": 9.722863741339492e-06, + "loss": 0.8334, + "step": 421 + }, + { + "epoch": 0.029301485904735452, + "grad_norm": 5.687072942120927, + "learning_rate": 9.745958429561202e-06, + "loss": 0.6352, + "step": 422 + }, + { + "epoch": 0.029370920705457576, + "grad_norm": 6.279007178298347, + "learning_rate": 9.76905311778291e-06, + "loss": 0.7161, + "step": 423 + }, + { + "epoch": 0.029440355506179697, + "grad_norm": 4.882604390084902, + "learning_rate": 9.792147806004619e-06, + "loss": 0.5925, + "step": 424 + }, + { + "epoch": 0.029509790306901817, + "grad_norm": 5.593126759659433, + "learning_rate": 9.815242494226329e-06, + "loss": 0.5873, + "step": 425 + }, + { + "epoch": 0.02957922510762394, + "grad_norm": 6.904887221887285, + "learning_rate": 9.838337182448037e-06, + "loss": 0.5615, + "step": 426 + }, + { + "epoch": 0.029648659908346062, + "grad_norm": 7.180240302055389, + "learning_rate": 9.861431870669747e-06, + "loss": 0.5184, + "step": 427 + }, + { + "epoch": 0.029718094709068187, + "grad_norm": 7.498208464056241, + "learning_rate": 9.884526558891456e-06, + "loss": 0.7807, + "step": 428 + }, + { + "epoch": 0.029787529509790307, + "grad_norm": 4.391965369061992, + "learning_rate": 9.907621247113166e-06, + "loss": 0.4396, + "step": 429 + }, + { + "epoch": 0.029856964310512428, + "grad_norm": 7.582962848348381, + "learning_rate": 9.930715935334874e-06, + "loss": 0.7123, + "step": 430 + }, + { + "epoch": 0.029926399111234552, + "grad_norm": 5.231346626173306, + "learning_rate": 9.953810623556582e-06, + "loss": 0.4976, + "step": 431 + }, + { + "epoch": 0.029995833911956673, + "grad_norm": 5.0576953003156255, + "learning_rate": 9.976905311778292e-06, + "loss": 0.5557, + "step": 432 + }, + { + "epoch": 0.030065268712678794, + "grad_norm": 6.9069570660612145, + "learning_rate": 1e-05, + "loss": 0.7385, + "step": 433 + }, + { + "epoch": 0.030134703513400918, + "grad_norm": 7.778669181247511, + "learning_rate": 9.99999987355283e-06, + "loss": 0.6871, + "step": 434 + }, + { + "epoch": 0.03020413831412304, + "grad_norm": 5.357549502532245, + "learning_rate": 9.999999494211324e-06, + "loss": 0.8859, + "step": 435 + }, + { + "epoch": 0.03027357311484516, + "grad_norm": 6.284819787134612, + "learning_rate": 9.9999988619755e-06, + "loss": 0.5724, + "step": 436 + }, + { + "epoch": 0.030343007915567283, + "grad_norm": 6.831598996626386, + "learning_rate": 9.999997976845392e-06, + "loss": 0.6657, + "step": 437 + }, + { + "epoch": 0.030412442716289404, + "grad_norm": 6.27780986608783, + "learning_rate": 9.999996838821044e-06, + "loss": 0.7065, + "step": 438 + }, + { + "epoch": 0.030481877517011525, + "grad_norm": 5.045048202924045, + "learning_rate": 9.999995447902514e-06, + "loss": 0.5953, + "step": 439 + }, + { + "epoch": 0.03055131231773365, + "grad_norm": 5.842654195692098, + "learning_rate": 9.999993804089873e-06, + "loss": 0.6818, + "step": 440 + }, + { + "epoch": 0.03062074711845577, + "grad_norm": 6.460290689885131, + "learning_rate": 9.999991907383202e-06, + "loss": 0.6786, + "step": 441 + }, + { + "epoch": 0.03069018191917789, + "grad_norm": 2.861495331541182, + "learning_rate": 9.999989757782599e-06, + "loss": 0.1267, + "step": 442 + }, + { + "epoch": 0.030759616719900015, + "grad_norm": 5.029345903596466, + "learning_rate": 9.999987355288172e-06, + "loss": 0.4737, + "step": 443 + }, + { + "epoch": 0.030829051520622135, + "grad_norm": 6.080178030290366, + "learning_rate": 9.999984699900042e-06, + "loss": 0.5977, + "step": 444 + }, + { + "epoch": 0.030898486321344256, + "grad_norm": 6.755300927136362, + "learning_rate": 9.999981791618343e-06, + "loss": 0.8255, + "step": 445 + }, + { + "epoch": 0.03096792112206638, + "grad_norm": 6.768465028511879, + "learning_rate": 9.999978630443223e-06, + "loss": 0.5011, + "step": 446 + }, + { + "epoch": 0.0310373559227885, + "grad_norm": 6.5119485992982185, + "learning_rate": 9.999975216374843e-06, + "loss": 0.5937, + "step": 447 + }, + { + "epoch": 0.031106790723510625, + "grad_norm": 3.6145781303228954, + "learning_rate": 9.999971549413374e-06, + "loss": 0.2656, + "step": 448 + }, + { + "epoch": 0.031176225524232746, + "grad_norm": 5.395036276138725, + "learning_rate": 9.999967629559002e-06, + "loss": 0.6524, + "step": 449 + }, + { + "epoch": 0.031245660324954867, + "grad_norm": 6.3267192533831915, + "learning_rate": 9.999963456811926e-06, + "loss": 0.6883, + "step": 450 + }, + { + "epoch": 0.03131509512567699, + "grad_norm": 5.323953199320294, + "learning_rate": 9.999959031172353e-06, + "loss": 0.6204, + "step": 451 + }, + { + "epoch": 0.03138452992639911, + "grad_norm": 6.599704105261902, + "learning_rate": 9.999954352640512e-06, + "loss": 0.7781, + "step": 452 + }, + { + "epoch": 0.03145396472712123, + "grad_norm": 4.986954276383524, + "learning_rate": 9.999949421216638e-06, + "loss": 0.4345, + "step": 453 + }, + { + "epoch": 0.03152339952784335, + "grad_norm": 5.247116121837747, + "learning_rate": 9.999944236900981e-06, + "loss": 0.61, + "step": 454 + }, + { + "epoch": 0.031592834328565474, + "grad_norm": 6.211583434456717, + "learning_rate": 9.9999387996938e-06, + "loss": 0.6007, + "step": 455 + }, + { + "epoch": 0.0316622691292876, + "grad_norm": 5.668713988535656, + "learning_rate": 9.999933109595375e-06, + "loss": 0.4547, + "step": 456 + }, + { + "epoch": 0.03173170393000972, + "grad_norm": 6.079668473959285, + "learning_rate": 9.999927166605988e-06, + "loss": 0.6103, + "step": 457 + }, + { + "epoch": 0.03180113873073184, + "grad_norm": 6.064849792893844, + "learning_rate": 9.999920970725945e-06, + "loss": 0.7693, + "step": 458 + }, + { + "epoch": 0.03187057353145396, + "grad_norm": 5.738259981227205, + "learning_rate": 9.999914521955557e-06, + "loss": 0.5279, + "step": 459 + }, + { + "epoch": 0.031940008332176084, + "grad_norm": 4.5610230068039925, + "learning_rate": 9.99990782029515e-06, + "loss": 0.4056, + "step": 460 + }, + { + "epoch": 0.03200944313289821, + "grad_norm": 6.936309822383197, + "learning_rate": 9.999900865745061e-06, + "loss": 0.818, + "step": 461 + }, + { + "epoch": 0.03207887793362033, + "grad_norm": 6.286528207571545, + "learning_rate": 9.999893658305646e-06, + "loss": 0.6776, + "step": 462 + }, + { + "epoch": 0.03214831273434245, + "grad_norm": 6.176488771304935, + "learning_rate": 9.999886197977267e-06, + "loss": 0.5587, + "step": 463 + }, + { + "epoch": 0.032217747535064574, + "grad_norm": 5.627431279771351, + "learning_rate": 9.999878484760304e-06, + "loss": 0.5378, + "step": 464 + }, + { + "epoch": 0.032287182335786695, + "grad_norm": 5.519314095876356, + "learning_rate": 9.999870518655142e-06, + "loss": 0.3596, + "step": 465 + }, + { + "epoch": 0.032356617136508815, + "grad_norm": 5.402724973265845, + "learning_rate": 9.999862299662188e-06, + "loss": 0.3665, + "step": 466 + }, + { + "epoch": 0.03242605193723094, + "grad_norm": 6.3812711140790075, + "learning_rate": 9.999853827781857e-06, + "loss": 0.8771, + "step": 467 + }, + { + "epoch": 0.032495486737953064, + "grad_norm": 5.9338189617621415, + "learning_rate": 9.999845103014577e-06, + "loss": 0.6081, + "step": 468 + }, + { + "epoch": 0.032564921538675184, + "grad_norm": 5.883826060939156, + "learning_rate": 9.999836125360789e-06, + "loss": 0.6843, + "step": 469 + }, + { + "epoch": 0.032634356339397305, + "grad_norm": 5.411641552716915, + "learning_rate": 9.999826894820947e-06, + "loss": 0.4745, + "step": 470 + }, + { + "epoch": 0.032703791140119426, + "grad_norm": 6.692910460347541, + "learning_rate": 9.99981741139552e-06, + "loss": 0.9, + "step": 471 + }, + { + "epoch": 0.03277322594084155, + "grad_norm": 5.961849396614783, + "learning_rate": 9.999807675084984e-06, + "loss": 0.5387, + "step": 472 + }, + { + "epoch": 0.032842660741563674, + "grad_norm": 6.689824686070395, + "learning_rate": 9.999797685889836e-06, + "loss": 0.6502, + "step": 473 + }, + { + "epoch": 0.032912095542285795, + "grad_norm": 6.058811668661387, + "learning_rate": 9.999787443810576e-06, + "loss": 0.6155, + "step": 474 + }, + { + "epoch": 0.032981530343007916, + "grad_norm": 5.9874867549465405, + "learning_rate": 9.999776948847727e-06, + "loss": 0.6768, + "step": 475 + }, + { + "epoch": 0.033050965143730036, + "grad_norm": 5.651512684905336, + "learning_rate": 9.999766201001817e-06, + "loss": 0.4318, + "step": 476 + }, + { + "epoch": 0.03312039994445216, + "grad_norm": 5.028687361869108, + "learning_rate": 9.999755200273389e-06, + "loss": 0.5417, + "step": 477 + }, + { + "epoch": 0.033189834745174285, + "grad_norm": 5.126187229503819, + "learning_rate": 9.999743946663002e-06, + "loss": 0.3153, + "step": 478 + }, + { + "epoch": 0.033259269545896405, + "grad_norm": 5.257781775734921, + "learning_rate": 9.999732440171223e-06, + "loss": 0.5462, + "step": 479 + }, + { + "epoch": 0.033328704346618526, + "grad_norm": 5.255353219812936, + "learning_rate": 9.999720680798635e-06, + "loss": 0.5243, + "step": 480 + }, + { + "epoch": 0.03339813914734065, + "grad_norm": 5.9253259357897985, + "learning_rate": 9.999708668545831e-06, + "loss": 0.5819, + "step": 481 + }, + { + "epoch": 0.03346757394806277, + "grad_norm": 6.184543080330575, + "learning_rate": 9.999696403413423e-06, + "loss": 0.6708, + "step": 482 + }, + { + "epoch": 0.03353700874878489, + "grad_norm": 6.024552485805366, + "learning_rate": 9.999683885402026e-06, + "loss": 0.5302, + "step": 483 + }, + { + "epoch": 0.033606443549507016, + "grad_norm": 7.399753261509762, + "learning_rate": 9.999671114512277e-06, + "loss": 0.4729, + "step": 484 + }, + { + "epoch": 0.03367587835022914, + "grad_norm": 5.55269525780665, + "learning_rate": 9.999658090744818e-06, + "loss": 0.4701, + "step": 485 + }, + { + "epoch": 0.03374531315095126, + "grad_norm": 5.199497229607478, + "learning_rate": 9.999644814100313e-06, + "loss": 0.3629, + "step": 486 + }, + { + "epoch": 0.03381474795167338, + "grad_norm": 6.080533607320299, + "learning_rate": 9.999631284579433e-06, + "loss": 0.7568, + "step": 487 + }, + { + "epoch": 0.0338841827523955, + "grad_norm": 6.018959158711198, + "learning_rate": 9.999617502182854e-06, + "loss": 0.5558, + "step": 488 + }, + { + "epoch": 0.03395361755311762, + "grad_norm": 5.674459181219298, + "learning_rate": 9.999603466911284e-06, + "loss": 0.4269, + "step": 489 + }, + { + "epoch": 0.03402305235383975, + "grad_norm": 6.274947389477818, + "learning_rate": 9.999589178765426e-06, + "loss": 0.5257, + "step": 490 + }, + { + "epoch": 0.03409248715456187, + "grad_norm": 4.986999759560102, + "learning_rate": 9.999574637746006e-06, + "loss": 0.3431, + "step": 491 + }, + { + "epoch": 0.03416192195528399, + "grad_norm": 5.965796935867004, + "learning_rate": 9.999559843853756e-06, + "loss": 0.8307, + "step": 492 + }, + { + "epoch": 0.03423135675600611, + "grad_norm": 6.066166717319229, + "learning_rate": 9.999544797089428e-06, + "loss": 0.4083, + "step": 493 + }, + { + "epoch": 0.03430079155672823, + "grad_norm": 6.165297196657152, + "learning_rate": 9.999529497453782e-06, + "loss": 0.741, + "step": 494 + }, + { + "epoch": 0.03437022635745035, + "grad_norm": 7.944318113381601, + "learning_rate": 9.999513944947591e-06, + "loss": 0.8734, + "step": 495 + }, + { + "epoch": 0.03443966115817248, + "grad_norm": 7.618278080480178, + "learning_rate": 9.999498139571642e-06, + "loss": 0.3951, + "step": 496 + }, + { + "epoch": 0.0345090959588946, + "grad_norm": 5.624323598064029, + "learning_rate": 9.999482081326734e-06, + "loss": 0.4671, + "step": 497 + }, + { + "epoch": 0.03457853075961672, + "grad_norm": 6.163382158823257, + "learning_rate": 9.99946577021368e-06, + "loss": 0.7706, + "step": 498 + }, + { + "epoch": 0.03464796556033884, + "grad_norm": 5.860245938439233, + "learning_rate": 9.999449206233305e-06, + "loss": 0.4915, + "step": 499 + }, + { + "epoch": 0.03471740036106096, + "grad_norm": 6.531519831041931, + "learning_rate": 9.999432389386445e-06, + "loss": 0.6789, + "step": 500 + }, + { + "epoch": 0.03478683516178309, + "grad_norm": 4.352611016790576, + "learning_rate": 9.999415319673953e-06, + "loss": 0.5366, + "step": 501 + }, + { + "epoch": 0.03485626996250521, + "grad_norm": 6.771348467977927, + "learning_rate": 9.999397997096692e-06, + "loss": 0.8456, + "step": 502 + }, + { + "epoch": 0.03492570476322733, + "grad_norm": 4.854548394309112, + "learning_rate": 9.999380421655535e-06, + "loss": 0.336, + "step": 503 + }, + { + "epoch": 0.03499513956394945, + "grad_norm": 5.748395146004432, + "learning_rate": 9.999362593351376e-06, + "loss": 0.6206, + "step": 504 + }, + { + "epoch": 0.03506457436467157, + "grad_norm": 7.4122792634318895, + "learning_rate": 9.999344512185114e-06, + "loss": 0.7026, + "step": 505 + }, + { + "epoch": 0.03513400916539369, + "grad_norm": 5.46496670740742, + "learning_rate": 9.999326178157662e-06, + "loss": 0.6925, + "step": 506 + }, + { + "epoch": 0.03520344396611582, + "grad_norm": 6.658261234910614, + "learning_rate": 9.999307591269951e-06, + "loss": 0.5679, + "step": 507 + }, + { + "epoch": 0.03527287876683794, + "grad_norm": 5.145984865551088, + "learning_rate": 9.999288751522918e-06, + "loss": 0.4936, + "step": 508 + }, + { + "epoch": 0.03534231356756006, + "grad_norm": 6.230757230212693, + "learning_rate": 9.999269658917517e-06, + "loss": 0.8232, + "step": 509 + }, + { + "epoch": 0.03541174836828218, + "grad_norm": 4.704307621881029, + "learning_rate": 9.999250313454714e-06, + "loss": 0.33, + "step": 510 + }, + { + "epoch": 0.0354811831690043, + "grad_norm": 6.333297779913553, + "learning_rate": 9.999230715135488e-06, + "loss": 0.7925, + "step": 511 + }, + { + "epoch": 0.035550617969726424, + "grad_norm": 4.931566885802384, + "learning_rate": 9.99921086396083e-06, + "loss": 0.5662, + "step": 512 + }, + { + "epoch": 0.03562005277044855, + "grad_norm": 6.6737933855309075, + "learning_rate": 9.999190759931741e-06, + "loss": 0.7374, + "step": 513 + }, + { + "epoch": 0.03568948757117067, + "grad_norm": 5.588899464065289, + "learning_rate": 9.99917040304924e-06, + "loss": 0.6212, + "step": 514 + }, + { + "epoch": 0.03575892237189279, + "grad_norm": 6.2235116182363175, + "learning_rate": 9.99914979331436e-06, + "loss": 0.5285, + "step": 515 + }, + { + "epoch": 0.035828357172614914, + "grad_norm": 6.292876061387508, + "learning_rate": 9.999128930728138e-06, + "loss": 0.5399, + "step": 516 + }, + { + "epoch": 0.035897791973337034, + "grad_norm": 5.228767276828957, + "learning_rate": 9.999107815291632e-06, + "loss": 0.4125, + "step": 517 + }, + { + "epoch": 0.035967226774059155, + "grad_norm": 5.313882172315606, + "learning_rate": 9.99908644700591e-06, + "loss": 0.5639, + "step": 518 + }, + { + "epoch": 0.03603666157478128, + "grad_norm": 5.772639537012846, + "learning_rate": 9.999064825872052e-06, + "loss": 0.6747, + "step": 519 + }, + { + "epoch": 0.0361060963755034, + "grad_norm": 6.22948282300013, + "learning_rate": 9.999042951891152e-06, + "loss": 0.5887, + "step": 520 + }, + { + "epoch": 0.036175531176225524, + "grad_norm": 6.013811875298543, + "learning_rate": 9.999020825064318e-06, + "loss": 0.6529, + "step": 521 + }, + { + "epoch": 0.036244965976947645, + "grad_norm": 5.525633323920022, + "learning_rate": 9.998998445392665e-06, + "loss": 0.6372, + "step": 522 + }, + { + "epoch": 0.036314400777669766, + "grad_norm": 5.230982960847445, + "learning_rate": 9.998975812877328e-06, + "loss": 0.5117, + "step": 523 + }, + { + "epoch": 0.03638383557839189, + "grad_norm": 5.419072106730317, + "learning_rate": 9.998952927519452e-06, + "loss": 0.5176, + "step": 524 + }, + { + "epoch": 0.036453270379114014, + "grad_norm": 6.494550656664596, + "learning_rate": 9.998929789320192e-06, + "loss": 0.7149, + "step": 525 + }, + { + "epoch": 0.036522705179836135, + "grad_norm": 6.302959404371616, + "learning_rate": 9.998906398280723e-06, + "loss": 0.5371, + "step": 526 + }, + { + "epoch": 0.036592139980558255, + "grad_norm": 5.494105037980103, + "learning_rate": 9.998882754402221e-06, + "loss": 0.3644, + "step": 527 + }, + { + "epoch": 0.036661574781280376, + "grad_norm": 6.778447536483514, + "learning_rate": 9.99885885768589e-06, + "loss": 0.779, + "step": 528 + }, + { + "epoch": 0.0367310095820025, + "grad_norm": 3.822710549909421, + "learning_rate": 9.998834708132932e-06, + "loss": 0.2716, + "step": 529 + }, + { + "epoch": 0.036800444382724624, + "grad_norm": 5.709501580025093, + "learning_rate": 9.998810305744571e-06, + "loss": 0.4167, + "step": 530 + }, + { + "epoch": 0.036869879183446745, + "grad_norm": 5.083773330678954, + "learning_rate": 9.998785650522041e-06, + "loss": 0.5782, + "step": 531 + }, + { + "epoch": 0.036939313984168866, + "grad_norm": 5.079229343723481, + "learning_rate": 9.998760742466591e-06, + "loss": 0.2723, + "step": 532 + }, + { + "epoch": 0.03700874878489099, + "grad_norm": 7.414963449830528, + "learning_rate": 9.998735581579478e-06, + "loss": 0.8686, + "step": 533 + }, + { + "epoch": 0.03707818358561311, + "grad_norm": 5.017802446829348, + "learning_rate": 9.998710167861976e-06, + "loss": 0.4596, + "step": 534 + }, + { + "epoch": 0.03714761838633523, + "grad_norm": 5.1978148470449685, + "learning_rate": 9.99868450131537e-06, + "loss": 0.6486, + "step": 535 + }, + { + "epoch": 0.037217053187057356, + "grad_norm": 6.365903071705479, + "learning_rate": 9.998658581940959e-06, + "loss": 0.6638, + "step": 536 + }, + { + "epoch": 0.037286487987779476, + "grad_norm": 6.304925677062259, + "learning_rate": 9.998632409740052e-06, + "loss": 0.6632, + "step": 537 + }, + { + "epoch": 0.0373559227885016, + "grad_norm": 4.71526936573796, + "learning_rate": 9.998605984713975e-06, + "loss": 0.5156, + "step": 538 + }, + { + "epoch": 0.03742535758922372, + "grad_norm": 5.306462620702109, + "learning_rate": 9.998579306864062e-06, + "loss": 0.4895, + "step": 539 + }, + { + "epoch": 0.03749479238994584, + "grad_norm": 6.640155197930903, + "learning_rate": 9.998552376191665e-06, + "loss": 0.76, + "step": 540 + }, + { + "epoch": 0.037564227190667966, + "grad_norm": 6.407428092082328, + "learning_rate": 9.998525192698148e-06, + "loss": 0.6922, + "step": 541 + }, + { + "epoch": 0.03763366199139009, + "grad_norm": 6.319311262754528, + "learning_rate": 9.99849775638488e-06, + "loss": 0.718, + "step": 542 + }, + { + "epoch": 0.03770309679211221, + "grad_norm": 5.646072684540412, + "learning_rate": 9.998470067253251e-06, + "loss": 0.4708, + "step": 543 + }, + { + "epoch": 0.03777253159283433, + "grad_norm": 6.386419015831803, + "learning_rate": 9.998442125304664e-06, + "loss": 0.6598, + "step": 544 + }, + { + "epoch": 0.03784196639355645, + "grad_norm": 4.795924051829016, + "learning_rate": 9.99841393054053e-06, + "loss": 0.4261, + "step": 545 + }, + { + "epoch": 0.03791140119427857, + "grad_norm": 6.026562026792236, + "learning_rate": 9.998385482962274e-06, + "loss": 0.8194, + "step": 546 + }, + { + "epoch": 0.0379808359950007, + "grad_norm": 4.335606456591261, + "learning_rate": 9.998356782571336e-06, + "loss": 0.3986, + "step": 547 + }, + { + "epoch": 0.03805027079572282, + "grad_norm": 5.1976334497067995, + "learning_rate": 9.99832782936917e-06, + "loss": 0.5406, + "step": 548 + }, + { + "epoch": 0.03811970559644494, + "grad_norm": 6.08563476776115, + "learning_rate": 9.998298623357238e-06, + "loss": 0.7269, + "step": 549 + }, + { + "epoch": 0.03818914039716706, + "grad_norm": 5.65762912255376, + "learning_rate": 9.998269164537018e-06, + "loss": 0.3486, + "step": 550 + }, + { + "epoch": 0.03825857519788918, + "grad_norm": 6.651051283287734, + "learning_rate": 9.998239452909998e-06, + "loss": 0.6524, + "step": 551 + }, + { + "epoch": 0.0383280099986113, + "grad_norm": 6.3433191654376815, + "learning_rate": 9.998209488477683e-06, + "loss": 0.9663, + "step": 552 + }, + { + "epoch": 0.03839744479933343, + "grad_norm": 4.658874637603492, + "learning_rate": 9.998179271241587e-06, + "loss": 0.4584, + "step": 553 + }, + { + "epoch": 0.03846687960005555, + "grad_norm": 5.003904426291276, + "learning_rate": 9.998148801203242e-06, + "loss": 0.564, + "step": 554 + }, + { + "epoch": 0.03853631440077767, + "grad_norm": 5.813184614200642, + "learning_rate": 9.998118078364186e-06, + "loss": 0.5276, + "step": 555 + }, + { + "epoch": 0.03860574920149979, + "grad_norm": 6.217352664495251, + "learning_rate": 9.99808710272597e-06, + "loss": 0.7469, + "step": 556 + }, + { + "epoch": 0.03867518400222191, + "grad_norm": 6.362511327994057, + "learning_rate": 9.998055874290166e-06, + "loss": 0.693, + "step": 557 + }, + { + "epoch": 0.03874461880294403, + "grad_norm": 5.697222739415906, + "learning_rate": 9.998024393058353e-06, + "loss": 0.4795, + "step": 558 + }, + { + "epoch": 0.03881405360366616, + "grad_norm": 5.584251313085637, + "learning_rate": 9.99799265903212e-06, + "loss": 0.6306, + "step": 559 + }, + { + "epoch": 0.03888348840438828, + "grad_norm": 5.907968337914951, + "learning_rate": 9.997960672213076e-06, + "loss": 0.5679, + "step": 560 + }, + { + "epoch": 0.0389529232051104, + "grad_norm": 5.654380218521138, + "learning_rate": 9.997928432602834e-06, + "loss": 0.6453, + "step": 561 + }, + { + "epoch": 0.03902235800583252, + "grad_norm": 5.579514915789915, + "learning_rate": 9.99789594020303e-06, + "loss": 0.58, + "step": 562 + }, + { + "epoch": 0.03909179280655464, + "grad_norm": 3.797666493065679, + "learning_rate": 9.997863195015303e-06, + "loss": 0.4575, + "step": 563 + }, + { + "epoch": 0.03916122760727677, + "grad_norm": 5.883390941564609, + "learning_rate": 9.997830197041312e-06, + "loss": 0.3515, + "step": 564 + }, + { + "epoch": 0.03923066240799889, + "grad_norm": 5.94720735536341, + "learning_rate": 9.997796946282725e-06, + "loss": 0.7877, + "step": 565 + }, + { + "epoch": 0.03930009720872101, + "grad_norm": 7.15133606832761, + "learning_rate": 9.997763442741225e-06, + "loss": 0.9724, + "step": 566 + }, + { + "epoch": 0.03936953200944313, + "grad_norm": 5.575033621973364, + "learning_rate": 9.997729686418502e-06, + "loss": 0.441, + "step": 567 + }, + { + "epoch": 0.03943896681016525, + "grad_norm": 4.833553273250969, + "learning_rate": 9.99769567731627e-06, + "loss": 0.5724, + "step": 568 + }, + { + "epoch": 0.039508401610887374, + "grad_norm": 6.013917168462498, + "learning_rate": 9.997661415436244e-06, + "loss": 0.7292, + "step": 569 + }, + { + "epoch": 0.0395778364116095, + "grad_norm": 4.3930702335539955, + "learning_rate": 9.99762690078016e-06, + "loss": 0.3436, + "step": 570 + }, + { + "epoch": 0.03964727121233162, + "grad_norm": 5.7270457492432145, + "learning_rate": 9.997592133349761e-06, + "loss": 0.5271, + "step": 571 + }, + { + "epoch": 0.03971670601305374, + "grad_norm": 5.407928229591425, + "learning_rate": 9.997557113146808e-06, + "loss": 0.6306, + "step": 572 + }, + { + "epoch": 0.039786140813775864, + "grad_norm": 7.0475262448294025, + "learning_rate": 9.997521840173073e-06, + "loss": 0.6983, + "step": 573 + }, + { + "epoch": 0.039855575614497984, + "grad_norm": 5.745509503266648, + "learning_rate": 9.997486314430336e-06, + "loss": 0.525, + "step": 574 + }, + { + "epoch": 0.039925010415220105, + "grad_norm": 7.301695667997415, + "learning_rate": 9.997450535920397e-06, + "loss": 0.7057, + "step": 575 + }, + { + "epoch": 0.03999444521594223, + "grad_norm": 6.551702374771802, + "learning_rate": 9.997414504645066e-06, + "loss": 1.1228, + "step": 576 + }, + { + "epoch": 0.040063880016664354, + "grad_norm": 5.596269245405725, + "learning_rate": 9.997378220606162e-06, + "loss": 0.6745, + "step": 577 + }, + { + "epoch": 0.040133314817386474, + "grad_norm": 3.63566918066838, + "learning_rate": 9.997341683805525e-06, + "loss": 0.3247, + "step": 578 + }, + { + "epoch": 0.040202749618108595, + "grad_norm": 6.543727455537456, + "learning_rate": 9.997304894245e-06, + "loss": 0.6967, + "step": 579 + }, + { + "epoch": 0.040272184418830716, + "grad_norm": 4.388741000208411, + "learning_rate": 9.997267851926446e-06, + "loss": 0.3417, + "step": 580 + }, + { + "epoch": 0.040341619219552836, + "grad_norm": 7.020275206785026, + "learning_rate": 9.997230556851742e-06, + "loss": 0.8831, + "step": 581 + }, + { + "epoch": 0.040411054020274964, + "grad_norm": 6.189141402572488, + "learning_rate": 9.997193009022768e-06, + "loss": 0.7277, + "step": 582 + }, + { + "epoch": 0.040480488820997085, + "grad_norm": 5.595771903656124, + "learning_rate": 9.997155208441429e-06, + "loss": 0.3981, + "step": 583 + }, + { + "epoch": 0.040549923621719206, + "grad_norm": 5.840701643978763, + "learning_rate": 9.997117155109633e-06, + "loss": 0.6628, + "step": 584 + }, + { + "epoch": 0.040619358422441326, + "grad_norm": 6.484513350752704, + "learning_rate": 9.997078849029307e-06, + "loss": 0.6173, + "step": 585 + }, + { + "epoch": 0.04068879322316345, + "grad_norm": 4.832760592513056, + "learning_rate": 9.997040290202385e-06, + "loss": 0.4516, + "step": 586 + }, + { + "epoch": 0.040758228023885575, + "grad_norm": 4.768738192266674, + "learning_rate": 9.99700147863082e-06, + "loss": 0.4889, + "step": 587 + }, + { + "epoch": 0.040827662824607695, + "grad_norm": 6.458325425739323, + "learning_rate": 9.996962414316578e-06, + "loss": 0.522, + "step": 588 + }, + { + "epoch": 0.040897097625329816, + "grad_norm": 5.057316576087953, + "learning_rate": 9.99692309726163e-06, + "loss": 0.8534, + "step": 589 + }, + { + "epoch": 0.04096653242605194, + "grad_norm": 4.470762887557778, + "learning_rate": 9.996883527467965e-06, + "loss": 0.4179, + "step": 590 + }, + { + "epoch": 0.04103596722677406, + "grad_norm": 6.765726662496632, + "learning_rate": 9.996843704937587e-06, + "loss": 0.5624, + "step": 591 + }, + { + "epoch": 0.04110540202749618, + "grad_norm": 4.990741793173546, + "learning_rate": 9.996803629672506e-06, + "loss": 0.5746, + "step": 592 + }, + { + "epoch": 0.041174836828218306, + "grad_norm": 5.331974763473855, + "learning_rate": 9.996763301674754e-06, + "loss": 0.7352, + "step": 593 + }, + { + "epoch": 0.04124427162894043, + "grad_norm": 4.8496698570403005, + "learning_rate": 9.996722720946368e-06, + "loss": 0.492, + "step": 594 + }, + { + "epoch": 0.04131370642966255, + "grad_norm": 5.3242084775693845, + "learning_rate": 9.9966818874894e-06, + "loss": 0.2868, + "step": 595 + }, + { + "epoch": 0.04138314123038467, + "grad_norm": 4.398737550239831, + "learning_rate": 9.996640801305917e-06, + "loss": 0.5425, + "step": 596 + }, + { + "epoch": 0.04145257603110679, + "grad_norm": 4.2244766870285355, + "learning_rate": 9.996599462397997e-06, + "loss": 0.2951, + "step": 597 + }, + { + "epoch": 0.04152201083182891, + "grad_norm": 4.3066984828806465, + "learning_rate": 9.99655787076773e-06, + "loss": 0.4191, + "step": 598 + }, + { + "epoch": 0.04159144563255104, + "grad_norm": 5.688637374956635, + "learning_rate": 9.99651602641722e-06, + "loss": 0.6695, + "step": 599 + }, + { + "epoch": 0.04166088043327316, + "grad_norm": 4.844937401261487, + "learning_rate": 9.996473929348583e-06, + "loss": 0.6308, + "step": 600 + }, + { + "epoch": 0.04173031523399528, + "grad_norm": 4.938870732358953, + "learning_rate": 9.996431579563947e-06, + "loss": 0.8417, + "step": 601 + }, + { + "epoch": 0.0417997500347174, + "grad_norm": 4.0279850075318056, + "learning_rate": 9.99638897706546e-06, + "loss": 0.2207, + "step": 602 + }, + { + "epoch": 0.04186918483543952, + "grad_norm": 6.600400974556559, + "learning_rate": 9.996346121855266e-06, + "loss": 0.7621, + "step": 603 + }, + { + "epoch": 0.04193861963616165, + "grad_norm": 6.486423792075853, + "learning_rate": 9.996303013935545e-06, + "loss": 0.2614, + "step": 604 + }, + { + "epoch": 0.04200805443688377, + "grad_norm": 4.910369192845338, + "learning_rate": 9.996259653308466e-06, + "loss": 0.5273, + "step": 605 + }, + { + "epoch": 0.04207748923760589, + "grad_norm": 5.247117900741713, + "learning_rate": 9.996216039976231e-06, + "loss": 0.6113, + "step": 606 + }, + { + "epoch": 0.04214692403832801, + "grad_norm": 5.242787371137216, + "learning_rate": 9.99617217394104e-06, + "loss": 0.7545, + "step": 607 + }, + { + "epoch": 0.04221635883905013, + "grad_norm": 7.1495405431187855, + "learning_rate": 9.996128055205117e-06, + "loss": 0.8877, + "step": 608 + }, + { + "epoch": 0.04228579363977225, + "grad_norm": 5.234100840499188, + "learning_rate": 9.996083683770688e-06, + "loss": 0.41, + "step": 609 + }, + { + "epoch": 0.04235522844049438, + "grad_norm": 5.792141595305357, + "learning_rate": 9.996039059640002e-06, + "loss": 0.6725, + "step": 610 + }, + { + "epoch": 0.0424246632412165, + "grad_norm": 5.836904810788175, + "learning_rate": 9.99599418281531e-06, + "loss": 0.8989, + "step": 611 + }, + { + "epoch": 0.04249409804193862, + "grad_norm": 5.84631547095041, + "learning_rate": 9.995949053298889e-06, + "loss": 0.8644, + "step": 612 + }, + { + "epoch": 0.04256353284266074, + "grad_norm": 5.025589687994242, + "learning_rate": 9.995903671093017e-06, + "loss": 0.3413, + "step": 613 + }, + { + "epoch": 0.04263296764338286, + "grad_norm": 5.786687074635843, + "learning_rate": 9.99585803619999e-06, + "loss": 0.7646, + "step": 614 + }, + { + "epoch": 0.04270240244410498, + "grad_norm": 6.002052351059022, + "learning_rate": 9.995812148622117e-06, + "loss": 0.732, + "step": 615 + }, + { + "epoch": 0.04277183724482711, + "grad_norm": 5.278909763533911, + "learning_rate": 9.99576600836172e-06, + "loss": 0.4222, + "step": 616 + }, + { + "epoch": 0.04284127204554923, + "grad_norm": 6.083448501144449, + "learning_rate": 9.995719615421129e-06, + "loss": 0.5847, + "step": 617 + }, + { + "epoch": 0.04291070684627135, + "grad_norm": 5.1123005644540145, + "learning_rate": 9.995672969802694e-06, + "loss": 0.6799, + "step": 618 + }, + { + "epoch": 0.04298014164699347, + "grad_norm": 7.301556957688154, + "learning_rate": 9.995626071508774e-06, + "loss": 0.8487, + "step": 619 + }, + { + "epoch": 0.04304957644771559, + "grad_norm": 6.706804976629173, + "learning_rate": 9.99557892054174e-06, + "loss": 0.9211, + "step": 620 + }, + { + "epoch": 0.043119011248437714, + "grad_norm": 5.616566740842066, + "learning_rate": 9.995531516903976e-06, + "loss": 0.5703, + "step": 621 + }, + { + "epoch": 0.04318844604915984, + "grad_norm": 5.373063444175822, + "learning_rate": 9.995483860597881e-06, + "loss": 0.8442, + "step": 622 + }, + { + "epoch": 0.04325788084988196, + "grad_norm": 4.895218416623463, + "learning_rate": 9.995435951625866e-06, + "loss": 0.2844, + "step": 623 + }, + { + "epoch": 0.04332731565060408, + "grad_norm": 6.1419272721031675, + "learning_rate": 9.995387789990352e-06, + "loss": 0.4993, + "step": 624 + }, + { + "epoch": 0.0433967504513262, + "grad_norm": 4.435039491462992, + "learning_rate": 9.995339375693778e-06, + "loss": 0.4735, + "step": 625 + }, + { + "epoch": 0.043466185252048324, + "grad_norm": 5.436198258551347, + "learning_rate": 9.99529070873859e-06, + "loss": 0.6172, + "step": 626 + }, + { + "epoch": 0.04353562005277045, + "grad_norm": 5.275137828235285, + "learning_rate": 9.99524178912725e-06, + "loss": 0.304, + "step": 627 + }, + { + "epoch": 0.04360505485349257, + "grad_norm": 5.757922178376902, + "learning_rate": 9.995192616862232e-06, + "loss": 0.5592, + "step": 628 + }, + { + "epoch": 0.04367448965421469, + "grad_norm": 5.250581508439782, + "learning_rate": 9.995143191946026e-06, + "loss": 0.5363, + "step": 629 + }, + { + "epoch": 0.043743924454936814, + "grad_norm": 6.3283396001910885, + "learning_rate": 9.99509351438113e-06, + "loss": 0.6936, + "step": 630 + }, + { + "epoch": 0.043813359255658935, + "grad_norm": 5.086353651351679, + "learning_rate": 9.995043584170054e-06, + "loss": 0.2576, + "step": 631 + }, + { + "epoch": 0.043882794056381055, + "grad_norm": 6.0505639592194775, + "learning_rate": 9.994993401315328e-06, + "loss": 0.6608, + "step": 632 + }, + { + "epoch": 0.04395222885710318, + "grad_norm": 4.266165030814394, + "learning_rate": 9.994942965819487e-06, + "loss": 0.3557, + "step": 633 + }, + { + "epoch": 0.044021663657825304, + "grad_norm": 6.289600481450322, + "learning_rate": 9.994892277685082e-06, + "loss": 0.5873, + "step": 634 + }, + { + "epoch": 0.044091098458547424, + "grad_norm": 6.085378916307771, + "learning_rate": 9.994841336914678e-06, + "loss": 0.7853, + "step": 635 + }, + { + "epoch": 0.044160533259269545, + "grad_norm": 5.5657995175392365, + "learning_rate": 9.994790143510853e-06, + "loss": 0.5294, + "step": 636 + }, + { + "epoch": 0.044229968059991666, + "grad_norm": 5.848892549029386, + "learning_rate": 9.994738697476194e-06, + "loss": 0.6143, + "step": 637 + }, + { + "epoch": 0.04429940286071379, + "grad_norm": 6.647537434047508, + "learning_rate": 9.994686998813302e-06, + "loss": 0.7232, + "step": 638 + }, + { + "epoch": 0.044368837661435914, + "grad_norm": 5.117303661423518, + "learning_rate": 9.994635047524794e-06, + "loss": 0.5613, + "step": 639 + }, + { + "epoch": 0.044438272462158035, + "grad_norm": 5.082880709591779, + "learning_rate": 9.994582843613299e-06, + "loss": 0.5221, + "step": 640 + }, + { + "epoch": 0.044507707262880156, + "grad_norm": 5.752944526950684, + "learning_rate": 9.994530387081454e-06, + "loss": 0.6203, + "step": 641 + }, + { + "epoch": 0.044577142063602276, + "grad_norm": 5.43113467137701, + "learning_rate": 9.994477677931912e-06, + "loss": 0.5625, + "step": 642 + }, + { + "epoch": 0.0446465768643244, + "grad_norm": 5.710043828784028, + "learning_rate": 9.994424716167344e-06, + "loss": 0.5739, + "step": 643 + }, + { + "epoch": 0.044716011665046525, + "grad_norm": 4.461680661965069, + "learning_rate": 9.994371501790423e-06, + "loss": 0.4762, + "step": 644 + }, + { + "epoch": 0.044785446465768645, + "grad_norm": 6.681123482451172, + "learning_rate": 9.994318034803845e-06, + "loss": 0.9257, + "step": 645 + }, + { + "epoch": 0.044854881266490766, + "grad_norm": 3.8356650513087773, + "learning_rate": 9.994264315210311e-06, + "loss": 0.3248, + "step": 646 + }, + { + "epoch": 0.04492431606721289, + "grad_norm": 4.905152621803148, + "learning_rate": 9.99421034301254e-06, + "loss": 0.4264, + "step": 647 + }, + { + "epoch": 0.04499375086793501, + "grad_norm": 4.706602343878299, + "learning_rate": 9.994156118213261e-06, + "loss": 0.4325, + "step": 648 + }, + { + "epoch": 0.04506318566865713, + "grad_norm": 4.8466194806818335, + "learning_rate": 9.994101640815216e-06, + "loss": 0.7114, + "step": 649 + }, + { + "epoch": 0.045132620469379256, + "grad_norm": 4.839562791247465, + "learning_rate": 9.994046910821163e-06, + "loss": 0.4455, + "step": 650 + }, + { + "epoch": 0.04520205527010138, + "grad_norm": 5.3547027734374035, + "learning_rate": 9.993991928233866e-06, + "loss": 0.5754, + "step": 651 + }, + { + "epoch": 0.0452714900708235, + "grad_norm": 5.930482608884689, + "learning_rate": 9.99393669305611e-06, + "loss": 0.6949, + "step": 652 + }, + { + "epoch": 0.04534092487154562, + "grad_norm": 5.946020152314283, + "learning_rate": 9.993881205290685e-06, + "loss": 0.9579, + "step": 653 + }, + { + "epoch": 0.04541035967226774, + "grad_norm": 4.130156922877328, + "learning_rate": 9.993825464940402e-06, + "loss": 0.425, + "step": 654 + }, + { + "epoch": 0.04547979447298986, + "grad_norm": 5.762412804957007, + "learning_rate": 9.993769472008077e-06, + "loss": 0.6353, + "step": 655 + }, + { + "epoch": 0.04554922927371199, + "grad_norm": 5.055458561677381, + "learning_rate": 9.993713226496543e-06, + "loss": 0.4816, + "step": 656 + }, + { + "epoch": 0.04561866407443411, + "grad_norm": 5.523202897008252, + "learning_rate": 9.993656728408645e-06, + "loss": 0.8008, + "step": 657 + }, + { + "epoch": 0.04568809887515623, + "grad_norm": 5.99476635847649, + "learning_rate": 9.993599977747239e-06, + "loss": 0.6256, + "step": 658 + }, + { + "epoch": 0.04575753367587835, + "grad_norm": 5.921879574170726, + "learning_rate": 9.993542974515197e-06, + "loss": 0.7431, + "step": 659 + }, + { + "epoch": 0.04582696847660047, + "grad_norm": 6.626727056619924, + "learning_rate": 9.993485718715405e-06, + "loss": 0.5809, + "step": 660 + }, + { + "epoch": 0.04589640327732259, + "grad_norm": 4.925078096480798, + "learning_rate": 9.993428210350752e-06, + "loss": 0.6023, + "step": 661 + }, + { + "epoch": 0.04596583807804472, + "grad_norm": 6.49043590894669, + "learning_rate": 9.993370449424153e-06, + "loss": 0.7246, + "step": 662 + }, + { + "epoch": 0.04603527287876684, + "grad_norm": 5.571292684133112, + "learning_rate": 9.993312435938528e-06, + "loss": 0.7258, + "step": 663 + }, + { + "epoch": 0.04610470767948896, + "grad_norm": 5.895604140670428, + "learning_rate": 9.993254169896807e-06, + "loss": 0.7167, + "step": 664 + }, + { + "epoch": 0.04617414248021108, + "grad_norm": 4.26586213385751, + "learning_rate": 9.993195651301942e-06, + "loss": 0.3716, + "step": 665 + }, + { + "epoch": 0.0462435772809332, + "grad_norm": 4.53897670668393, + "learning_rate": 9.99313688015689e-06, + "loss": 0.4756, + "step": 666 + }, + { + "epoch": 0.04631301208165533, + "grad_norm": 3.294704919455413, + "learning_rate": 9.993077856464627e-06, + "loss": 0.2139, + "step": 667 + }, + { + "epoch": 0.04638244688237745, + "grad_norm": 5.6353877185186425, + "learning_rate": 9.993018580228135e-06, + "loss": 0.6015, + "step": 668 + }, + { + "epoch": 0.04645188168309957, + "grad_norm": 5.134723049069843, + "learning_rate": 9.992959051450413e-06, + "loss": 0.7059, + "step": 669 + }, + { + "epoch": 0.04652131648382169, + "grad_norm": 4.430640977255875, + "learning_rate": 9.992899270134473e-06, + "loss": 0.347, + "step": 670 + }, + { + "epoch": 0.04659075128454381, + "grad_norm": 5.709162874616909, + "learning_rate": 9.992839236283336e-06, + "loss": 0.7561, + "step": 671 + }, + { + "epoch": 0.04666018608526593, + "grad_norm": 4.589483738764916, + "learning_rate": 9.992778949900042e-06, + "loss": 0.3547, + "step": 672 + }, + { + "epoch": 0.04672962088598806, + "grad_norm": 6.6286852586880505, + "learning_rate": 9.992718410987636e-06, + "loss": 0.7243, + "step": 673 + }, + { + "epoch": 0.04679905568671018, + "grad_norm": 5.472732735300226, + "learning_rate": 9.992657619549184e-06, + "loss": 0.6439, + "step": 674 + }, + { + "epoch": 0.0468684904874323, + "grad_norm": 5.186320391604662, + "learning_rate": 9.99259657558776e-06, + "loss": 0.6176, + "step": 675 + }, + { + "epoch": 0.04693792528815442, + "grad_norm": 6.525902983235542, + "learning_rate": 9.99253527910645e-06, + "loss": 0.5332, + "step": 676 + }, + { + "epoch": 0.04700736008887654, + "grad_norm": 5.230737744483306, + "learning_rate": 9.992473730108354e-06, + "loss": 0.489, + "step": 677 + }, + { + "epoch": 0.047076794889598664, + "grad_norm": 4.095183710870003, + "learning_rate": 9.992411928596588e-06, + "loss": 0.3977, + "step": 678 + }, + { + "epoch": 0.04714622969032079, + "grad_norm": 6.148857555475747, + "learning_rate": 9.992349874574274e-06, + "loss": 0.5967, + "step": 679 + }, + { + "epoch": 0.04721566449104291, + "grad_norm": 6.046034821291769, + "learning_rate": 9.992287568044554e-06, + "loss": 0.9083, + "step": 680 + }, + { + "epoch": 0.04728509929176503, + "grad_norm": 5.501001092681591, + "learning_rate": 9.992225009010576e-06, + "loss": 0.6978, + "step": 681 + }, + { + "epoch": 0.047354534092487154, + "grad_norm": 4.773098237371324, + "learning_rate": 9.99216219747551e-06, + "loss": 0.4296, + "step": 682 + }, + { + "epoch": 0.047423968893209274, + "grad_norm": 4.7886233652203245, + "learning_rate": 9.992099133442525e-06, + "loss": 0.3198, + "step": 683 + }, + { + "epoch": 0.047493403693931395, + "grad_norm": 3.7095350557337192, + "learning_rate": 9.992035816914816e-06, + "loss": 0.3297, + "step": 684 + }, + { + "epoch": 0.04756283849465352, + "grad_norm": 5.671251028019554, + "learning_rate": 9.991972247895585e-06, + "loss": 0.5346, + "step": 685 + }, + { + "epoch": 0.04763227329537564, + "grad_norm": 4.691379188955125, + "learning_rate": 9.991908426388046e-06, + "loss": 0.7333, + "step": 686 + }, + { + "epoch": 0.047701708096097764, + "grad_norm": 4.650332561327247, + "learning_rate": 9.991844352395429e-06, + "loss": 0.4452, + "step": 687 + }, + { + "epoch": 0.047771142896819885, + "grad_norm": 5.699856464232138, + "learning_rate": 9.991780025920972e-06, + "loss": 0.5472, + "step": 688 + }, + { + "epoch": 0.047840577697542006, + "grad_norm": 6.2112708294475665, + "learning_rate": 9.991715446967928e-06, + "loss": 0.6031, + "step": 689 + }, + { + "epoch": 0.04791001249826413, + "grad_norm": 4.909061303795624, + "learning_rate": 9.991650615539567e-06, + "loss": 0.425, + "step": 690 + }, + { + "epoch": 0.047979447298986254, + "grad_norm": 6.21423437032649, + "learning_rate": 9.991585531639167e-06, + "loss": 0.7406, + "step": 691 + }, + { + "epoch": 0.048048882099708375, + "grad_norm": 4.382090372038084, + "learning_rate": 9.99152019527002e-06, + "loss": 0.3912, + "step": 692 + }, + { + "epoch": 0.048118316900430495, + "grad_norm": 5.6078304679304365, + "learning_rate": 9.991454606435427e-06, + "loss": 0.5902, + "step": 693 + }, + { + "epoch": 0.048187751701152616, + "grad_norm": 5.853339406196495, + "learning_rate": 9.99138876513871e-06, + "loss": 0.5891, + "step": 694 + }, + { + "epoch": 0.04825718650187474, + "grad_norm": 5.8455438851216135, + "learning_rate": 9.991322671383197e-06, + "loss": 0.6937, + "step": 695 + }, + { + "epoch": 0.048326621302596864, + "grad_norm": 6.649374833206646, + "learning_rate": 9.991256325172232e-06, + "loss": 0.8635, + "step": 696 + }, + { + "epoch": 0.048396056103318985, + "grad_norm": 6.012925621088737, + "learning_rate": 9.99118972650917e-06, + "loss": 1.0924, + "step": 697 + }, + { + "epoch": 0.048465490904041106, + "grad_norm": 4.977710728227318, + "learning_rate": 9.991122875397377e-06, + "loss": 0.3265, + "step": 698 + }, + { + "epoch": 0.04853492570476323, + "grad_norm": 7.086722062528404, + "learning_rate": 9.991055771840239e-06, + "loss": 0.6656, + "step": 699 + }, + { + "epoch": 0.04860436050548535, + "grad_norm": 6.182449126065321, + "learning_rate": 9.990988415841147e-06, + "loss": 0.7988, + "step": 700 + }, + { + "epoch": 0.04867379530620747, + "grad_norm": 4.305440514230685, + "learning_rate": 9.99092080740351e-06, + "loss": 0.3211, + "step": 701 + }, + { + "epoch": 0.048743230106929596, + "grad_norm": 6.50311447978165, + "learning_rate": 9.990852946530746e-06, + "loss": 0.789, + "step": 702 + }, + { + "epoch": 0.048812664907651716, + "grad_norm": 4.73488664658267, + "learning_rate": 9.990784833226285e-06, + "loss": 0.6239, + "step": 703 + }, + { + "epoch": 0.04888209970837384, + "grad_norm": 5.548278431037929, + "learning_rate": 9.990716467493577e-06, + "loss": 0.6327, + "step": 704 + }, + { + "epoch": 0.04895153450909596, + "grad_norm": 5.744145323311782, + "learning_rate": 9.990647849336076e-06, + "loss": 0.7761, + "step": 705 + }, + { + "epoch": 0.04902096930981808, + "grad_norm": 4.502047106110494, + "learning_rate": 9.990578978757257e-06, + "loss": 0.5615, + "step": 706 + }, + { + "epoch": 0.049090404110540206, + "grad_norm": 4.924566567066488, + "learning_rate": 9.990509855760598e-06, + "loss": 0.4088, + "step": 707 + }, + { + "epoch": 0.04915983891126233, + "grad_norm": 4.124872574793904, + "learning_rate": 9.990440480349599e-06, + "loss": 0.3375, + "step": 708 + }, + { + "epoch": 0.04922927371198445, + "grad_norm": 6.252079440192097, + "learning_rate": 9.990370852527767e-06, + "loss": 0.6336, + "step": 709 + }, + { + "epoch": 0.04929870851270657, + "grad_norm": 5.117180459969774, + "learning_rate": 9.990300972298624e-06, + "loss": 0.5806, + "step": 710 + }, + { + "epoch": 0.04936814331342869, + "grad_norm": 5.049624789535316, + "learning_rate": 9.990230839665704e-06, + "loss": 0.541, + "step": 711 + }, + { + "epoch": 0.04943757811415081, + "grad_norm": 5.229084678687425, + "learning_rate": 9.990160454632554e-06, + "loss": 0.5403, + "step": 712 + }, + { + "epoch": 0.04950701291487294, + "grad_norm": 5.490394347351558, + "learning_rate": 9.990089817202737e-06, + "loss": 0.5434, + "step": 713 + }, + { + "epoch": 0.04957644771559506, + "grad_norm": 4.8100647031152315, + "learning_rate": 9.990018927379823e-06, + "loss": 0.4469, + "step": 714 + }, + { + "epoch": 0.04964588251631718, + "grad_norm": 5.473783736933605, + "learning_rate": 9.9899477851674e-06, + "loss": 0.618, + "step": 715 + }, + { + "epoch": 0.0497153173170393, + "grad_norm": 5.249865227981726, + "learning_rate": 9.989876390569062e-06, + "loss": 0.5261, + "step": 716 + }, + { + "epoch": 0.04978475211776142, + "grad_norm": 4.708877748911251, + "learning_rate": 9.989804743588423e-06, + "loss": 0.624, + "step": 717 + }, + { + "epoch": 0.04985418691848354, + "grad_norm": 6.115422646427735, + "learning_rate": 9.989732844229107e-06, + "loss": 0.7497, + "step": 718 + }, + { + "epoch": 0.04992362171920567, + "grad_norm": 4.260508105711181, + "learning_rate": 9.989660692494751e-06, + "loss": 0.4474, + "step": 719 + }, + { + "epoch": 0.04999305651992779, + "grad_norm": 7.51725860076484, + "learning_rate": 9.989588288389003e-06, + "loss": 0.6335, + "step": 720 + }, + { + "epoch": 0.05006249132064991, + "grad_norm": 5.783938157840532, + "learning_rate": 9.989515631915524e-06, + "loss": 0.7792, + "step": 721 + }, + { + "epoch": 0.05013192612137203, + "grad_norm": 4.364848401501063, + "learning_rate": 9.989442723077991e-06, + "loss": 0.5082, + "step": 722 + }, + { + "epoch": 0.05020136092209415, + "grad_norm": 3.9802923239712875, + "learning_rate": 9.989369561880091e-06, + "loss": 0.2611, + "step": 723 + }, + { + "epoch": 0.05027079572281627, + "grad_norm": 5.755484380666307, + "learning_rate": 9.989296148325525e-06, + "loss": 0.5454, + "step": 724 + }, + { + "epoch": 0.0503402305235384, + "grad_norm": 3.5439999881225317, + "learning_rate": 9.989222482418006e-06, + "loss": 0.3573, + "step": 725 + }, + { + "epoch": 0.05040966532426052, + "grad_norm": 6.618609773149128, + "learning_rate": 9.989148564161258e-06, + "loss": 0.911, + "step": 726 + }, + { + "epoch": 0.05047910012498264, + "grad_norm": 5.069257498600404, + "learning_rate": 9.989074393559022e-06, + "loss": 0.613, + "step": 727 + }, + { + "epoch": 0.05054853492570476, + "grad_norm": 5.864054610037569, + "learning_rate": 9.98899997061505e-06, + "loss": 0.8184, + "step": 728 + }, + { + "epoch": 0.05061796972642688, + "grad_norm": 4.517963581897608, + "learning_rate": 9.988925295333103e-06, + "loss": 0.5548, + "step": 729 + }, + { + "epoch": 0.05068740452714901, + "grad_norm": 4.618753160369594, + "learning_rate": 9.98885036771696e-06, + "loss": 0.5531, + "step": 730 + }, + { + "epoch": 0.05075683932787113, + "grad_norm": 4.376843038128889, + "learning_rate": 9.988775187770413e-06, + "loss": 0.4608, + "step": 731 + }, + { + "epoch": 0.05082627412859325, + "grad_norm": 4.866481954747598, + "learning_rate": 9.988699755497258e-06, + "loss": 0.3514, + "step": 732 + }, + { + "epoch": 0.05089570892931537, + "grad_norm": 4.1040280996416865, + "learning_rate": 9.988624070901318e-06, + "loss": 0.6799, + "step": 733 + }, + { + "epoch": 0.05096514373003749, + "grad_norm": 4.682159071004771, + "learning_rate": 9.988548133986415e-06, + "loss": 0.5379, + "step": 734 + }, + { + "epoch": 0.051034578530759614, + "grad_norm": 4.966606443743591, + "learning_rate": 9.988471944756395e-06, + "loss": 0.4636, + "step": 735 + }, + { + "epoch": 0.05110401333148174, + "grad_norm": 4.711458416668277, + "learning_rate": 9.988395503215107e-06, + "loss": 0.3936, + "step": 736 + }, + { + "epoch": 0.05117344813220386, + "grad_norm": 4.709532737592508, + "learning_rate": 9.98831880936642e-06, + "loss": 0.4481, + "step": 737 + }, + { + "epoch": 0.05124288293292598, + "grad_norm": 5.057946309627939, + "learning_rate": 9.988241863214212e-06, + "loss": 0.4989, + "step": 738 + }, + { + "epoch": 0.051312317733648104, + "grad_norm": 6.029719457481402, + "learning_rate": 9.988164664762374e-06, + "loss": 0.5903, + "step": 739 + }, + { + "epoch": 0.051381752534370224, + "grad_norm": 4.348655708656128, + "learning_rate": 9.988087214014815e-06, + "loss": 0.5438, + "step": 740 + }, + { + "epoch": 0.051451187335092345, + "grad_norm": 6.263846152222153, + "learning_rate": 9.988009510975446e-06, + "loss": 0.608, + "step": 741 + }, + { + "epoch": 0.05152062213581447, + "grad_norm": 5.341494767010231, + "learning_rate": 9.9879315556482e-06, + "loss": 0.6357, + "step": 742 + }, + { + "epoch": 0.051590056936536594, + "grad_norm": 4.882202875476889, + "learning_rate": 9.987853348037022e-06, + "loss": 0.3735, + "step": 743 + }, + { + "epoch": 0.051659491737258714, + "grad_norm": 4.492912656255205, + "learning_rate": 9.987774888145867e-06, + "loss": 0.4875, + "step": 744 + }, + { + "epoch": 0.051728926537980835, + "grad_norm": 3.773069207931385, + "learning_rate": 9.987696175978698e-06, + "loss": 0.3732, + "step": 745 + }, + { + "epoch": 0.051798361338702956, + "grad_norm": 4.712267786494523, + "learning_rate": 9.987617211539505e-06, + "loss": 0.4418, + "step": 746 + }, + { + "epoch": 0.051867796139425076, + "grad_norm": 5.502889018503653, + "learning_rate": 9.987537994832275e-06, + "loss": 0.5774, + "step": 747 + }, + { + "epoch": 0.051937230940147204, + "grad_norm": 4.20299459003689, + "learning_rate": 9.987458525861016e-06, + "loss": 0.4716, + "step": 748 + }, + { + "epoch": 0.052006665740869325, + "grad_norm": 6.591786713702156, + "learning_rate": 9.98737880462975e-06, + "loss": 0.6916, + "step": 749 + }, + { + "epoch": 0.052076100541591445, + "grad_norm": 5.425698032187315, + "learning_rate": 9.987298831142507e-06, + "loss": 0.6178, + "step": 750 + }, + { + "epoch": 0.052145535342313566, + "grad_norm": 5.878407309301575, + "learning_rate": 9.987218605403332e-06, + "loss": 0.5453, + "step": 751 + }, + { + "epoch": 0.05221497014303569, + "grad_norm": 5.08698484079951, + "learning_rate": 9.987138127416285e-06, + "loss": 0.5782, + "step": 752 + }, + { + "epoch": 0.052284404943757815, + "grad_norm": 5.879980805190245, + "learning_rate": 9.987057397185433e-06, + "loss": 0.9409, + "step": 753 + }, + { + "epoch": 0.052353839744479935, + "grad_norm": 7.0619768626850155, + "learning_rate": 9.986976414714863e-06, + "loss": 0.6808, + "step": 754 + }, + { + "epoch": 0.052423274545202056, + "grad_norm": 4.113634903601791, + "learning_rate": 9.98689518000867e-06, + "loss": 0.5029, + "step": 755 + }, + { + "epoch": 0.05249270934592418, + "grad_norm": 6.365846519761652, + "learning_rate": 9.986813693070958e-06, + "loss": 0.8543, + "step": 756 + }, + { + "epoch": 0.0525621441466463, + "grad_norm": 5.337016275372264, + "learning_rate": 9.986731953905854e-06, + "loss": 0.998, + "step": 757 + }, + { + "epoch": 0.05263157894736842, + "grad_norm": 4.482510077691802, + "learning_rate": 9.986649962517491e-06, + "loss": 0.4713, + "step": 758 + }, + { + "epoch": 0.052701013748090546, + "grad_norm": 5.99016367767901, + "learning_rate": 9.986567718910017e-06, + "loss": 0.8063, + "step": 759 + }, + { + "epoch": 0.052770448548812667, + "grad_norm": 4.790256336151655, + "learning_rate": 9.98648522308759e-06, + "loss": 0.4555, + "step": 760 + }, + { + "epoch": 0.05283988334953479, + "grad_norm": 4.081396326845866, + "learning_rate": 9.986402475054382e-06, + "loss": 0.3434, + "step": 761 + }, + { + "epoch": 0.05290931815025691, + "grad_norm": 4.731669740723197, + "learning_rate": 9.98631947481458e-06, + "loss": 0.7651, + "step": 762 + }, + { + "epoch": 0.05297875295097903, + "grad_norm": 4.94347253864801, + "learning_rate": 9.986236222372379e-06, + "loss": 0.4651, + "step": 763 + }, + { + "epoch": 0.05304818775170115, + "grad_norm": 5.960552828652827, + "learning_rate": 9.986152717731994e-06, + "loss": 0.7553, + "step": 764 + }, + { + "epoch": 0.05311762255242328, + "grad_norm": 4.693546729412604, + "learning_rate": 9.986068960897648e-06, + "loss": 0.5428, + "step": 765 + }, + { + "epoch": 0.0531870573531454, + "grad_norm": 4.82511924849523, + "learning_rate": 9.985984951873575e-06, + "loss": 0.4141, + "step": 766 + }, + { + "epoch": 0.05325649215386752, + "grad_norm": 4.8448179849610105, + "learning_rate": 9.985900690664025e-06, + "loss": 0.5061, + "step": 767 + }, + { + "epoch": 0.05332592695458964, + "grad_norm": 4.430891864955989, + "learning_rate": 9.98581617727326e-06, + "loss": 0.346, + "step": 768 + }, + { + "epoch": 0.05339536175531176, + "grad_norm": 5.11006363764043, + "learning_rate": 9.985731411705554e-06, + "loss": 0.7828, + "step": 769 + }, + { + "epoch": 0.05346479655603389, + "grad_norm": 4.718222616321539, + "learning_rate": 9.985646393965196e-06, + "loss": 0.4868, + "step": 770 + }, + { + "epoch": 0.05353423135675601, + "grad_norm": 5.691807304395084, + "learning_rate": 9.985561124056484e-06, + "loss": 0.7973, + "step": 771 + }, + { + "epoch": 0.05360366615747813, + "grad_norm": 4.392238674725026, + "learning_rate": 9.985475601983733e-06, + "loss": 0.3112, + "step": 772 + }, + { + "epoch": 0.05367310095820025, + "grad_norm": 5.799717920285053, + "learning_rate": 9.985389827751266e-06, + "loss": 0.6339, + "step": 773 + }, + { + "epoch": 0.05374253575892237, + "grad_norm": 6.3274326905982035, + "learning_rate": 9.985303801363423e-06, + "loss": 0.8679, + "step": 774 + }, + { + "epoch": 0.05381197055964449, + "grad_norm": 4.955774779022077, + "learning_rate": 9.985217522824558e-06, + "loss": 0.5535, + "step": 775 + }, + { + "epoch": 0.05388140536036662, + "grad_norm": 5.371640328381147, + "learning_rate": 9.98513099213903e-06, + "loss": 0.511, + "step": 776 + }, + { + "epoch": 0.05395084016108874, + "grad_norm": 4.93937416310012, + "learning_rate": 9.985044209311217e-06, + "loss": 0.4582, + "step": 777 + }, + { + "epoch": 0.05402027496181086, + "grad_norm": 5.832216246395709, + "learning_rate": 9.984957174345509e-06, + "loss": 0.7087, + "step": 778 + }, + { + "epoch": 0.05408970976253298, + "grad_norm": 4.859658162849972, + "learning_rate": 9.984869887246307e-06, + "loss": 0.5068, + "step": 779 + }, + { + "epoch": 0.0541591445632551, + "grad_norm": 5.465886543799718, + "learning_rate": 9.984782348018027e-06, + "loss": 0.6644, + "step": 780 + }, + { + "epoch": 0.05422857936397722, + "grad_norm": 4.912474934027746, + "learning_rate": 9.984694556665098e-06, + "loss": 0.5801, + "step": 781 + }, + { + "epoch": 0.05429801416469935, + "grad_norm": 4.8307962171364585, + "learning_rate": 9.984606513191959e-06, + "loss": 0.4253, + "step": 782 + }, + { + "epoch": 0.05436744896542147, + "grad_norm": 3.78195117183482, + "learning_rate": 9.984518217603062e-06, + "loss": 0.4946, + "step": 783 + }, + { + "epoch": 0.05443688376614359, + "grad_norm": 6.1860148087415485, + "learning_rate": 9.984429669902874e-06, + "loss": 0.753, + "step": 784 + }, + { + "epoch": 0.05450631856686571, + "grad_norm": 5.452248410668079, + "learning_rate": 9.984340870095874e-06, + "loss": 0.8678, + "step": 785 + }, + { + "epoch": 0.05457575336758783, + "grad_norm": 5.377308015234931, + "learning_rate": 9.984251818186552e-06, + "loss": 0.6031, + "step": 786 + }, + { + "epoch": 0.054645188168309954, + "grad_norm": 5.170839150857725, + "learning_rate": 9.984162514179414e-06, + "loss": 0.4633, + "step": 787 + }, + { + "epoch": 0.05471462296903208, + "grad_norm": 5.2700664222346285, + "learning_rate": 9.984072958078977e-06, + "loss": 0.6024, + "step": 788 + }, + { + "epoch": 0.0547840577697542, + "grad_norm": 5.277357561535729, + "learning_rate": 9.983983149889768e-06, + "loss": 0.516, + "step": 789 + }, + { + "epoch": 0.05485349257047632, + "grad_norm": 4.943264751845346, + "learning_rate": 9.983893089616332e-06, + "loss": 0.3824, + "step": 790 + }, + { + "epoch": 0.05492292737119844, + "grad_norm": 4.460423425633663, + "learning_rate": 9.983802777263223e-06, + "loss": 0.4777, + "step": 791 + }, + { + "epoch": 0.054992362171920564, + "grad_norm": 5.333019121676857, + "learning_rate": 9.983712212835009e-06, + "loss": 0.6833, + "step": 792 + }, + { + "epoch": 0.05506179697264269, + "grad_norm": 4.715970705130396, + "learning_rate": 9.98362139633627e-06, + "loss": 0.4223, + "step": 793 + }, + { + "epoch": 0.05513123177336481, + "grad_norm": 6.100854587077938, + "learning_rate": 9.9835303277716e-06, + "loss": 0.8033, + "step": 794 + }, + { + "epoch": 0.05520066657408693, + "grad_norm": 5.214988274486931, + "learning_rate": 9.983439007145606e-06, + "loss": 0.512, + "step": 795 + }, + { + "epoch": 0.055270101374809054, + "grad_norm": 5.454061849419084, + "learning_rate": 9.983347434462906e-06, + "loss": 0.8152, + "step": 796 + }, + { + "epoch": 0.055339536175531175, + "grad_norm": 4.889922018041808, + "learning_rate": 9.98325560972813e-06, + "loss": 0.5321, + "step": 797 + }, + { + "epoch": 0.055408970976253295, + "grad_norm": 5.953590913340075, + "learning_rate": 9.983163532945927e-06, + "loss": 0.4376, + "step": 798 + }, + { + "epoch": 0.05547840577697542, + "grad_norm": 5.366688377270275, + "learning_rate": 9.98307120412095e-06, + "loss": 0.7865, + "step": 799 + }, + { + "epoch": 0.055547840577697544, + "grad_norm": 4.87553821918744, + "learning_rate": 9.98297862325787e-06, + "loss": 0.6314, + "step": 800 + }, + { + "epoch": 0.055617275378419664, + "grad_norm": 4.9894506888616315, + "learning_rate": 9.982885790361372e-06, + "loss": 0.6921, + "step": 801 + }, + { + "epoch": 0.055686710179141785, + "grad_norm": 4.753296030077526, + "learning_rate": 9.982792705436147e-06, + "loss": 0.4623, + "step": 802 + }, + { + "epoch": 0.055756144979863906, + "grad_norm": 4.68018376892769, + "learning_rate": 9.982699368486907e-06, + "loss": 0.313, + "step": 803 + }, + { + "epoch": 0.05582557978058603, + "grad_norm": 4.894882821522259, + "learning_rate": 9.982605779518369e-06, + "loss": 0.3951, + "step": 804 + }, + { + "epoch": 0.055895014581308154, + "grad_norm": 7.351167666092212, + "learning_rate": 9.98251193853527e-06, + "loss": 0.6325, + "step": 805 + }, + { + "epoch": 0.055964449382030275, + "grad_norm": 3.850195782085043, + "learning_rate": 9.982417845542357e-06, + "loss": 0.4287, + "step": 806 + }, + { + "epoch": 0.056033884182752396, + "grad_norm": 4.818134158620349, + "learning_rate": 9.982323500544385e-06, + "loss": 0.4672, + "step": 807 + }, + { + "epoch": 0.056103318983474516, + "grad_norm": 6.533269444355503, + "learning_rate": 9.982228903546129e-06, + "loss": 0.9613, + "step": 808 + }, + { + "epoch": 0.05617275378419664, + "grad_norm": 5.0910109719802135, + "learning_rate": 9.982134054552373e-06, + "loss": 0.3717, + "step": 809 + }, + { + "epoch": 0.056242188584918765, + "grad_norm": 3.6367939709812354, + "learning_rate": 9.982038953567914e-06, + "loss": 0.4374, + "step": 810 + }, + { + "epoch": 0.056311623385640885, + "grad_norm": 4.19897794140048, + "learning_rate": 9.981943600597562e-06, + "loss": 0.4336, + "step": 811 + }, + { + "epoch": 0.056381058186363006, + "grad_norm": 5.771272428000438, + "learning_rate": 9.981847995646142e-06, + "loss": 0.6326, + "step": 812 + }, + { + "epoch": 0.05645049298708513, + "grad_norm": 5.559806523706431, + "learning_rate": 9.981752138718484e-06, + "loss": 0.616, + "step": 813 + }, + { + "epoch": 0.05651992778780725, + "grad_norm": 4.6925742039467675, + "learning_rate": 9.981656029819444e-06, + "loss": 0.5655, + "step": 814 + }, + { + "epoch": 0.05658936258852937, + "grad_norm": 5.543281182192348, + "learning_rate": 9.981559668953876e-06, + "loss": 0.8608, + "step": 815 + }, + { + "epoch": 0.056658797389251496, + "grad_norm": 5.447741188044963, + "learning_rate": 9.98146305612666e-06, + "loss": 0.524, + "step": 816 + }, + { + "epoch": 0.05672823218997362, + "grad_norm": 4.758266373977329, + "learning_rate": 9.981366191342677e-06, + "loss": 0.5684, + "step": 817 + }, + { + "epoch": 0.05679766699069574, + "grad_norm": 4.362557076905672, + "learning_rate": 9.98126907460683e-06, + "loss": 0.479, + "step": 818 + }, + { + "epoch": 0.05686710179141786, + "grad_norm": 4.414720533516325, + "learning_rate": 9.981171705924029e-06, + "loss": 0.312, + "step": 819 + }, + { + "epoch": 0.05693653659213998, + "grad_norm": 4.599478915544888, + "learning_rate": 9.9810740852992e-06, + "loss": 0.6729, + "step": 820 + }, + { + "epoch": 0.0570059713928621, + "grad_norm": 4.51681507491556, + "learning_rate": 9.98097621273728e-06, + "loss": 0.5332, + "step": 821 + }, + { + "epoch": 0.05707540619358423, + "grad_norm": 4.458634891794412, + "learning_rate": 9.98087808824322e-06, + "loss": 0.3974, + "step": 822 + }, + { + "epoch": 0.05714484099430635, + "grad_norm": 5.679804026500894, + "learning_rate": 9.980779711821983e-06, + "loss": 0.5288, + "step": 823 + }, + { + "epoch": 0.05721427579502847, + "grad_norm": 4.5083719310795685, + "learning_rate": 9.980681083478543e-06, + "loss": 0.4033, + "step": 824 + }, + { + "epoch": 0.05728371059575059, + "grad_norm": 6.986410721750218, + "learning_rate": 9.980582203217893e-06, + "loss": 0.7277, + "step": 825 + }, + { + "epoch": 0.05735314539647271, + "grad_norm": 4.915844057565651, + "learning_rate": 9.980483071045028e-06, + "loss": 0.7306, + "step": 826 + }, + { + "epoch": 0.05742258019719483, + "grad_norm": 6.065689651606107, + "learning_rate": 9.980383686964966e-06, + "loss": 0.8851, + "step": 827 + }, + { + "epoch": 0.05749201499791696, + "grad_norm": 5.903824276691955, + "learning_rate": 9.980284050982731e-06, + "loss": 0.7023, + "step": 828 + }, + { + "epoch": 0.05756144979863908, + "grad_norm": 5.782728474541114, + "learning_rate": 9.980184163103368e-06, + "loss": 0.636, + "step": 829 + }, + { + "epoch": 0.0576308845993612, + "grad_norm": 5.023634802144964, + "learning_rate": 9.980084023331921e-06, + "loss": 0.5108, + "step": 830 + }, + { + "epoch": 0.05770031940008332, + "grad_norm": 4.44219042772744, + "learning_rate": 9.979983631673463e-06, + "loss": 0.3715, + "step": 831 + }, + { + "epoch": 0.05776975420080544, + "grad_norm": 4.087772742765401, + "learning_rate": 9.979882988133068e-06, + "loss": 0.2826, + "step": 832 + }, + { + "epoch": 0.05783918900152757, + "grad_norm": 5.251898639356191, + "learning_rate": 9.979782092715825e-06, + "loss": 0.4231, + "step": 833 + }, + { + "epoch": 0.05790862380224969, + "grad_norm": 4.9695246622775, + "learning_rate": 9.97968094542684e-06, + "loss": 0.5981, + "step": 834 + }, + { + "epoch": 0.05797805860297181, + "grad_norm": 5.834848117543546, + "learning_rate": 9.979579546271225e-06, + "loss": 0.7907, + "step": 835 + }, + { + "epoch": 0.05804749340369393, + "grad_norm": 5.454160042170218, + "learning_rate": 9.979477895254113e-06, + "loss": 0.821, + "step": 836 + }, + { + "epoch": 0.05811692820441605, + "grad_norm": 4.46656251099197, + "learning_rate": 9.979375992380645e-06, + "loss": 0.3572, + "step": 837 + }, + { + "epoch": 0.05818636300513817, + "grad_norm": 4.993120846382734, + "learning_rate": 9.979273837655973e-06, + "loss": 0.6124, + "step": 838 + }, + { + "epoch": 0.0582557978058603, + "grad_norm": 3.911172763154037, + "learning_rate": 9.979171431085264e-06, + "loss": 0.2543, + "step": 839 + }, + { + "epoch": 0.05832523260658242, + "grad_norm": 5.432654817736759, + "learning_rate": 9.979068772673698e-06, + "loss": 0.5199, + "step": 840 + }, + { + "epoch": 0.05839466740730454, + "grad_norm": 4.9650495752362565, + "learning_rate": 9.978965862426471e-06, + "loss": 0.5773, + "step": 841 + }, + { + "epoch": 0.05846410220802666, + "grad_norm": 3.7169989933920498, + "learning_rate": 9.978862700348782e-06, + "loss": 0.2395, + "step": 842 + }, + { + "epoch": 0.05853353700874878, + "grad_norm": 6.561747574338523, + "learning_rate": 9.978759286445851e-06, + "loss": 0.7589, + "step": 843 + }, + { + "epoch": 0.058602971809470904, + "grad_norm": 5.024869506221608, + "learning_rate": 9.97865562072291e-06, + "loss": 0.2439, + "step": 844 + }, + { + "epoch": 0.05867240661019303, + "grad_norm": 5.204125246126191, + "learning_rate": 9.978551703185202e-06, + "loss": 0.762, + "step": 845 + }, + { + "epoch": 0.05874184141091515, + "grad_norm": 4.768023281407063, + "learning_rate": 9.978447533837981e-06, + "loss": 0.4073, + "step": 846 + }, + { + "epoch": 0.05881127621163727, + "grad_norm": 5.311523694767885, + "learning_rate": 9.978343112686519e-06, + "loss": 0.4754, + "step": 847 + }, + { + "epoch": 0.058880711012359394, + "grad_norm": 3.6270137334563155, + "learning_rate": 9.978238439736093e-06, + "loss": 0.3285, + "step": 848 + }, + { + "epoch": 0.058950145813081514, + "grad_norm": 5.811385562564773, + "learning_rate": 9.978133514992004e-06, + "loss": 0.9314, + "step": 849 + }, + { + "epoch": 0.059019580613803635, + "grad_norm": 5.220765594471936, + "learning_rate": 9.978028338459553e-06, + "loss": 0.5203, + "step": 850 + }, + { + "epoch": 0.05908901541452576, + "grad_norm": 4.284268000302469, + "learning_rate": 9.977922910144061e-06, + "loss": 0.3385, + "step": 851 + }, + { + "epoch": 0.05915845021524788, + "grad_norm": 3.778962136553783, + "learning_rate": 9.977817230050861e-06, + "loss": 0.2803, + "step": 852 + }, + { + "epoch": 0.059227885015970004, + "grad_norm": 5.990180129947594, + "learning_rate": 9.977711298185299e-06, + "loss": 0.7038, + "step": 853 + }, + { + "epoch": 0.059297319816692125, + "grad_norm": 5.2643320168657315, + "learning_rate": 9.977605114552734e-06, + "loss": 0.4497, + "step": 854 + }, + { + "epoch": 0.059366754617414245, + "grad_norm": 5.566776662591551, + "learning_rate": 9.977498679158532e-06, + "loss": 0.4686, + "step": 855 + }, + { + "epoch": 0.05943618941813637, + "grad_norm": 5.081580337441843, + "learning_rate": 9.977391992008082e-06, + "loss": 0.5855, + "step": 856 + }, + { + "epoch": 0.059505624218858494, + "grad_norm": 6.290713035797393, + "learning_rate": 9.977285053106773e-06, + "loss": 0.9367, + "step": 857 + }, + { + "epoch": 0.059575059019580615, + "grad_norm": 4.852832831147473, + "learning_rate": 9.977177862460021e-06, + "loss": 0.5509, + "step": 858 + }, + { + "epoch": 0.059644493820302735, + "grad_norm": 5.8750319175016745, + "learning_rate": 9.977070420073245e-06, + "loss": 0.7002, + "step": 859 + }, + { + "epoch": 0.059713928621024856, + "grad_norm": 5.366944502014877, + "learning_rate": 9.976962725951878e-06, + "loss": 0.683, + "step": 860 + }, + { + "epoch": 0.05978336342174698, + "grad_norm": 6.393163947069777, + "learning_rate": 9.97685478010137e-06, + "loss": 0.912, + "step": 861 + }, + { + "epoch": 0.059852798222469104, + "grad_norm": 4.401148216867979, + "learning_rate": 9.976746582527177e-06, + "loss": 0.5825, + "step": 862 + }, + { + "epoch": 0.059922233023191225, + "grad_norm": 4.654659423810781, + "learning_rate": 9.976638133234773e-06, + "loss": 0.6291, + "step": 863 + }, + { + "epoch": 0.059991667823913346, + "grad_norm": 5.470211469742042, + "learning_rate": 9.976529432229645e-06, + "loss": 0.8138, + "step": 864 + }, + { + "epoch": 0.060061102624635467, + "grad_norm": 4.825491035778757, + "learning_rate": 9.97642047951729e-06, + "loss": 0.3348, + "step": 865 + }, + { + "epoch": 0.06013053742535759, + "grad_norm": 3.6568859412625394, + "learning_rate": 9.97631127510322e-06, + "loss": 0.4965, + "step": 866 + }, + { + "epoch": 0.06019997222607971, + "grad_norm": 5.407199196874604, + "learning_rate": 9.976201818992952e-06, + "loss": 0.7079, + "step": 867 + }, + { + "epoch": 0.060269407026801836, + "grad_norm": 4.101236946933585, + "learning_rate": 9.97609211119203e-06, + "loss": 0.4677, + "step": 868 + }, + { + "epoch": 0.060338841827523956, + "grad_norm": 4.238608913628256, + "learning_rate": 9.975982151705998e-06, + "loss": 0.4338, + "step": 869 + }, + { + "epoch": 0.06040827662824608, + "grad_norm": 3.586802823860022, + "learning_rate": 9.975871940540422e-06, + "loss": 0.3797, + "step": 870 + }, + { + "epoch": 0.0604777114289682, + "grad_norm": 5.015545499131453, + "learning_rate": 9.975761477700873e-06, + "loss": 0.7338, + "step": 871 + }, + { + "epoch": 0.06054714622969032, + "grad_norm": 4.348134558601847, + "learning_rate": 9.975650763192938e-06, + "loss": 0.4368, + "step": 872 + }, + { + "epoch": 0.060616581030412446, + "grad_norm": 5.17596403133597, + "learning_rate": 9.975539797022218e-06, + "loss": 0.4639, + "step": 873 + }, + { + "epoch": 0.06068601583113457, + "grad_norm": 6.9953767658247665, + "learning_rate": 9.975428579194327e-06, + "loss": 0.7597, + "step": 874 + }, + { + "epoch": 0.06075545063185669, + "grad_norm": 5.340506592070401, + "learning_rate": 9.975317109714886e-06, + "loss": 0.6475, + "step": 875 + }, + { + "epoch": 0.06082488543257881, + "grad_norm": 5.29047765463008, + "learning_rate": 9.975205388589537e-06, + "loss": 0.5791, + "step": 876 + }, + { + "epoch": 0.06089432023330093, + "grad_norm": 5.6974687377834625, + "learning_rate": 9.975093415823928e-06, + "loss": 0.6237, + "step": 877 + }, + { + "epoch": 0.06096375503402305, + "grad_norm": 5.548071497529065, + "learning_rate": 9.974981191423725e-06, + "loss": 0.468, + "step": 878 + }, + { + "epoch": 0.06103318983474518, + "grad_norm": 3.106728793995161, + "learning_rate": 9.974868715394604e-06, + "loss": 0.2516, + "step": 879 + }, + { + "epoch": 0.0611026246354673, + "grad_norm": 4.0955982347025905, + "learning_rate": 9.97475598774225e-06, + "loss": 0.5117, + "step": 880 + }, + { + "epoch": 0.06117205943618942, + "grad_norm": 5.513738555852737, + "learning_rate": 9.97464300847237e-06, + "loss": 0.7659, + "step": 881 + }, + { + "epoch": 0.06124149423691154, + "grad_norm": 4.742117963553443, + "learning_rate": 9.974529777590674e-06, + "loss": 0.5422, + "step": 882 + }, + { + "epoch": 0.06131092903763366, + "grad_norm": 3.159027201489317, + "learning_rate": 9.974416295102892e-06, + "loss": 0.1298, + "step": 883 + }, + { + "epoch": 0.06138036383835578, + "grad_norm": 4.869362082844647, + "learning_rate": 9.974302561014762e-06, + "loss": 0.6021, + "step": 884 + }, + { + "epoch": 0.06144979863907791, + "grad_norm": 3.059106279202786, + "learning_rate": 9.974188575332036e-06, + "loss": 0.2314, + "step": 885 + }, + { + "epoch": 0.06151923343980003, + "grad_norm": 4.763636407962317, + "learning_rate": 9.974074338060482e-06, + "loss": 0.5019, + "step": 886 + }, + { + "epoch": 0.06158866824052215, + "grad_norm": 4.987420676154112, + "learning_rate": 9.973959849205876e-06, + "loss": 0.448, + "step": 887 + }, + { + "epoch": 0.06165810304124427, + "grad_norm": 5.604948309749669, + "learning_rate": 9.973845108774012e-06, + "loss": 0.2817, + "step": 888 + }, + { + "epoch": 0.06172753784196639, + "grad_norm": 4.761072252438883, + "learning_rate": 9.973730116770688e-06, + "loss": 0.4862, + "step": 889 + }, + { + "epoch": 0.06179697264268851, + "grad_norm": 5.300757562179495, + "learning_rate": 9.973614873201722e-06, + "loss": 0.5307, + "step": 890 + }, + { + "epoch": 0.06186640744341064, + "grad_norm": 5.713485720989508, + "learning_rate": 9.973499378072947e-06, + "loss": 0.5329, + "step": 891 + }, + { + "epoch": 0.06193584224413276, + "grad_norm": 6.090220816575116, + "learning_rate": 9.973383631390199e-06, + "loss": 0.7065, + "step": 892 + }, + { + "epoch": 0.06200527704485488, + "grad_norm": 2.959788149743685, + "learning_rate": 9.973267633159335e-06, + "loss": 0.3194, + "step": 893 + }, + { + "epoch": 0.062074711845577, + "grad_norm": 5.114553588918628, + "learning_rate": 9.973151383386222e-06, + "loss": 0.6698, + "step": 894 + }, + { + "epoch": 0.06214414664629912, + "grad_norm": 4.152304745216256, + "learning_rate": 9.97303488207674e-06, + "loss": 0.4091, + "step": 895 + }, + { + "epoch": 0.06221358144702125, + "grad_norm": 3.9643779803590977, + "learning_rate": 9.972918129236779e-06, + "loss": 0.3985, + "step": 896 + }, + { + "epoch": 0.06228301624774337, + "grad_norm": 4.546765992530377, + "learning_rate": 9.972801124872248e-06, + "loss": 0.5113, + "step": 897 + }, + { + "epoch": 0.06235245104846549, + "grad_norm": 4.2010299821223125, + "learning_rate": 9.972683868989063e-06, + "loss": 0.322, + "step": 898 + }, + { + "epoch": 0.06242188584918761, + "grad_norm": 4.9671207282442555, + "learning_rate": 9.972566361593155e-06, + "loss": 0.4954, + "step": 899 + }, + { + "epoch": 0.06249132064990973, + "grad_norm": 5.127850447152676, + "learning_rate": 9.972448602690467e-06, + "loss": 0.5614, + "step": 900 + }, + { + "epoch": 0.06256075545063186, + "grad_norm": 5.924564244264348, + "learning_rate": 9.972330592286956e-06, + "loss": 0.8143, + "step": 901 + }, + { + "epoch": 0.06263019025135398, + "grad_norm": 4.5368959262939015, + "learning_rate": 9.97221233038859e-06, + "loss": 0.4361, + "step": 902 + }, + { + "epoch": 0.0626996250520761, + "grad_norm": 4.916949533125268, + "learning_rate": 9.97209381700135e-06, + "loss": 0.5876, + "step": 903 + }, + { + "epoch": 0.06276905985279822, + "grad_norm": 5.1309711609392865, + "learning_rate": 9.971975052131231e-06, + "loss": 0.7273, + "step": 904 + }, + { + "epoch": 0.06283849465352034, + "grad_norm": 5.080705813535925, + "learning_rate": 9.97185603578424e-06, + "loss": 0.6639, + "step": 905 + }, + { + "epoch": 0.06290792945424246, + "grad_norm": 3.9934500093197767, + "learning_rate": 9.971736767966397e-06, + "loss": 0.4592, + "step": 906 + }, + { + "epoch": 0.06297736425496459, + "grad_norm": 6.43875890376898, + "learning_rate": 9.971617248683736e-06, + "loss": 0.6645, + "step": 907 + }, + { + "epoch": 0.0630467990556867, + "grad_norm": 4.067354097287484, + "learning_rate": 9.971497477942297e-06, + "loss": 0.5227, + "step": 908 + }, + { + "epoch": 0.06311623385640883, + "grad_norm": 4.861073689208835, + "learning_rate": 9.971377455748144e-06, + "loss": 0.4918, + "step": 909 + }, + { + "epoch": 0.06318566865713095, + "grad_norm": 5.665398584991399, + "learning_rate": 9.971257182107343e-06, + "loss": 0.5597, + "step": 910 + }, + { + "epoch": 0.06325510345785308, + "grad_norm": 5.2107106156001235, + "learning_rate": 9.97113665702598e-06, + "loss": 0.7108, + "step": 911 + }, + { + "epoch": 0.0633245382585752, + "grad_norm": 5.74240152950369, + "learning_rate": 9.97101588051015e-06, + "loss": 0.7618, + "step": 912 + }, + { + "epoch": 0.06339397305929732, + "grad_norm": 4.99449985710756, + "learning_rate": 9.970894852565963e-06, + "loss": 0.6625, + "step": 913 + }, + { + "epoch": 0.06346340786001944, + "grad_norm": 4.8980195397179855, + "learning_rate": 9.970773573199537e-06, + "loss": 0.7337, + "step": 914 + }, + { + "epoch": 0.06353284266074156, + "grad_norm": 4.134093868592578, + "learning_rate": 9.97065204241701e-06, + "loss": 0.5792, + "step": 915 + }, + { + "epoch": 0.06360227746146369, + "grad_norm": 5.694789794956021, + "learning_rate": 9.970530260224526e-06, + "loss": 0.7233, + "step": 916 + }, + { + "epoch": 0.0636717122621858, + "grad_norm": 5.498143501458203, + "learning_rate": 9.970408226628248e-06, + "loss": 0.5447, + "step": 917 + }, + { + "epoch": 0.06374114706290793, + "grad_norm": 5.410190167568352, + "learning_rate": 9.970285941634346e-06, + "loss": 0.7237, + "step": 918 + }, + { + "epoch": 0.06381058186363005, + "grad_norm": 5.539925312538419, + "learning_rate": 9.970163405249004e-06, + "loss": 0.6118, + "step": 919 + }, + { + "epoch": 0.06388001666435217, + "grad_norm": 5.876427794128868, + "learning_rate": 9.97004061747842e-06, + "loss": 0.5688, + "step": 920 + }, + { + "epoch": 0.06394945146507429, + "grad_norm": 4.2317853639985366, + "learning_rate": 9.969917578328808e-06, + "loss": 0.3539, + "step": 921 + }, + { + "epoch": 0.06401888626579642, + "grad_norm": 4.5892420963011515, + "learning_rate": 9.969794287806389e-06, + "loss": 0.5841, + "step": 922 + }, + { + "epoch": 0.06408832106651854, + "grad_norm": 3.3146340769070424, + "learning_rate": 9.969670745917396e-06, + "loss": 0.2819, + "step": 923 + }, + { + "epoch": 0.06415775586724067, + "grad_norm": 5.081862364209531, + "learning_rate": 9.969546952668084e-06, + "loss": 0.6818, + "step": 924 + }, + { + "epoch": 0.06422719066796279, + "grad_norm": 4.631114001452725, + "learning_rate": 9.969422908064707e-06, + "loss": 0.523, + "step": 925 + }, + { + "epoch": 0.0642966254686849, + "grad_norm": 4.901189625878395, + "learning_rate": 9.969298612113544e-06, + "loss": 0.6004, + "step": 926 + }, + { + "epoch": 0.06436606026940703, + "grad_norm": 5.299401241730944, + "learning_rate": 9.96917406482088e-06, + "loss": 0.7762, + "step": 927 + }, + { + "epoch": 0.06443549507012915, + "grad_norm": 5.542163696763225, + "learning_rate": 9.969049266193015e-06, + "loss": 0.4865, + "step": 928 + }, + { + "epoch": 0.06450492987085127, + "grad_norm": 4.465404114208305, + "learning_rate": 9.96892421623626e-06, + "loss": 0.4133, + "step": 929 + }, + { + "epoch": 0.06457436467157339, + "grad_norm": 6.35869173710654, + "learning_rate": 9.968798914956943e-06, + "loss": 0.892, + "step": 930 + }, + { + "epoch": 0.06464379947229551, + "grad_norm": 6.9370680148147335, + "learning_rate": 9.968673362361399e-06, + "loss": 0.6944, + "step": 931 + }, + { + "epoch": 0.06471323427301763, + "grad_norm": 5.661517076298483, + "learning_rate": 9.968547558455978e-06, + "loss": 0.5798, + "step": 932 + }, + { + "epoch": 0.06478266907373975, + "grad_norm": 4.9951964780944005, + "learning_rate": 9.968421503247043e-06, + "loss": 0.7221, + "step": 933 + }, + { + "epoch": 0.06485210387446189, + "grad_norm": 4.0016157505836665, + "learning_rate": 9.968295196740972e-06, + "loss": 0.3895, + "step": 934 + }, + { + "epoch": 0.064921538675184, + "grad_norm": 9.559840478550852, + "learning_rate": 9.968168638944152e-06, + "loss": 0.2989, + "step": 935 + }, + { + "epoch": 0.06499097347590613, + "grad_norm": 4.935931500016278, + "learning_rate": 9.968041829862982e-06, + "loss": 0.5947, + "step": 936 + }, + { + "epoch": 0.06506040827662825, + "grad_norm": 5.587065543745426, + "learning_rate": 9.96791476950388e-06, + "loss": 0.5525, + "step": 937 + }, + { + "epoch": 0.06512984307735037, + "grad_norm": 3.8792570713408026, + "learning_rate": 9.967787457873271e-06, + "loss": 0.4604, + "step": 938 + }, + { + "epoch": 0.06519927787807249, + "grad_norm": 5.445166715053491, + "learning_rate": 9.967659894977593e-06, + "loss": 0.6125, + "step": 939 + }, + { + "epoch": 0.06526871267879461, + "grad_norm": 5.6259443769761805, + "learning_rate": 9.967532080823296e-06, + "loss": 0.3986, + "step": 940 + }, + { + "epoch": 0.06533814747951673, + "grad_norm": 5.069460638270786, + "learning_rate": 9.967404015416852e-06, + "loss": 0.4489, + "step": 941 + }, + { + "epoch": 0.06540758228023885, + "grad_norm": 3.889412231608481, + "learning_rate": 9.967275698764731e-06, + "loss": 0.2919, + "step": 942 + }, + { + "epoch": 0.06547701708096097, + "grad_norm": 6.258305167921691, + "learning_rate": 9.967147130873428e-06, + "loss": 0.679, + "step": 943 + }, + { + "epoch": 0.0655464518816831, + "grad_norm": 4.935509271523038, + "learning_rate": 9.967018311749442e-06, + "loss": 0.3051, + "step": 944 + }, + { + "epoch": 0.06561588668240523, + "grad_norm": 4.599972173731018, + "learning_rate": 9.966889241399292e-06, + "loss": 0.5479, + "step": 945 + }, + { + "epoch": 0.06568532148312735, + "grad_norm": 5.257587420403792, + "learning_rate": 9.966759919829504e-06, + "loss": 0.4262, + "step": 946 + }, + { + "epoch": 0.06575475628384947, + "grad_norm": 3.300766154660848, + "learning_rate": 9.966630347046618e-06, + "loss": 0.2644, + "step": 947 + }, + { + "epoch": 0.06582419108457159, + "grad_norm": 4.686274300288148, + "learning_rate": 9.96650052305719e-06, + "loss": 0.448, + "step": 948 + }, + { + "epoch": 0.06589362588529371, + "grad_norm": 5.923122079322992, + "learning_rate": 9.966370447867786e-06, + "loss": 0.9193, + "step": 949 + }, + { + "epoch": 0.06596306068601583, + "grad_norm": 4.929882151993082, + "learning_rate": 9.966240121484986e-06, + "loss": 0.3997, + "step": 950 + }, + { + "epoch": 0.06603249548673795, + "grad_norm": 5.271498456006508, + "learning_rate": 9.966109543915379e-06, + "loss": 0.4331, + "step": 951 + }, + { + "epoch": 0.06610193028746007, + "grad_norm": 4.840715873537683, + "learning_rate": 9.96597871516557e-06, + "loss": 0.5001, + "step": 952 + }, + { + "epoch": 0.0661713650881822, + "grad_norm": 5.10183667515691, + "learning_rate": 9.965847635242178e-06, + "loss": 0.507, + "step": 953 + }, + { + "epoch": 0.06624079988890431, + "grad_norm": 5.414356583087017, + "learning_rate": 9.96571630415183e-06, + "loss": 0.5994, + "step": 954 + }, + { + "epoch": 0.06631023468962643, + "grad_norm": 4.8547297969582015, + "learning_rate": 9.965584721901174e-06, + "loss": 0.4566, + "step": 955 + }, + { + "epoch": 0.06637966949034857, + "grad_norm": 5.432584003569413, + "learning_rate": 9.965452888496858e-06, + "loss": 0.6977, + "step": 956 + }, + { + "epoch": 0.06644910429107069, + "grad_norm": 5.769937502432863, + "learning_rate": 9.965320803945556e-06, + "loss": 0.6047, + "step": 957 + }, + { + "epoch": 0.06651853909179281, + "grad_norm": 3.5877495567883657, + "learning_rate": 9.965188468253946e-06, + "loss": 0.403, + "step": 958 + }, + { + "epoch": 0.06658797389251493, + "grad_norm": 4.3328059547176725, + "learning_rate": 9.965055881428722e-06, + "loss": 0.475, + "step": 959 + }, + { + "epoch": 0.06665740869323705, + "grad_norm": 5.070560454634842, + "learning_rate": 9.964923043476588e-06, + "loss": 0.9678, + "step": 960 + }, + { + "epoch": 0.06672684349395917, + "grad_norm": 4.199185495674789, + "learning_rate": 9.964789954404266e-06, + "loss": 0.4211, + "step": 961 + }, + { + "epoch": 0.0667962782946813, + "grad_norm": 5.000263599913347, + "learning_rate": 9.964656614218487e-06, + "loss": 0.6436, + "step": 962 + }, + { + "epoch": 0.06686571309540341, + "grad_norm": 5.248422134006128, + "learning_rate": 9.964523022925994e-06, + "loss": 0.3279, + "step": 963 + }, + { + "epoch": 0.06693514789612554, + "grad_norm": 4.63320215509228, + "learning_rate": 9.964389180533542e-06, + "loss": 0.559, + "step": 964 + }, + { + "epoch": 0.06700458269684766, + "grad_norm": 4.808317605138757, + "learning_rate": 9.964255087047905e-06, + "loss": 0.3749, + "step": 965 + }, + { + "epoch": 0.06707401749756978, + "grad_norm": 4.748395187083906, + "learning_rate": 9.964120742475862e-06, + "loss": 0.4182, + "step": 966 + }, + { + "epoch": 0.0671434522982919, + "grad_norm": 4.618444970402005, + "learning_rate": 9.963986146824212e-06, + "loss": 0.5677, + "step": 967 + }, + { + "epoch": 0.06721288709901403, + "grad_norm": 4.3391769180757915, + "learning_rate": 9.963851300099755e-06, + "loss": 0.4017, + "step": 968 + }, + { + "epoch": 0.06728232189973615, + "grad_norm": 4.5085605044239125, + "learning_rate": 9.96371620230932e-06, + "loss": 0.4572, + "step": 969 + }, + { + "epoch": 0.06735175670045827, + "grad_norm": 4.498879617709213, + "learning_rate": 9.963580853459736e-06, + "loss": 0.403, + "step": 970 + }, + { + "epoch": 0.0674211915011804, + "grad_norm": 6.085207824059735, + "learning_rate": 9.963445253557849e-06, + "loss": 0.3908, + "step": 971 + }, + { + "epoch": 0.06749062630190251, + "grad_norm": 6.119907990219186, + "learning_rate": 9.963309402610516e-06, + "loss": 0.9212, + "step": 972 + }, + { + "epoch": 0.06756006110262464, + "grad_norm": 5.649927276214212, + "learning_rate": 9.963173300624611e-06, + "loss": 0.6599, + "step": 973 + }, + { + "epoch": 0.06762949590334676, + "grad_norm": 5.90755139735476, + "learning_rate": 9.963036947607015e-06, + "loss": 0.9232, + "step": 974 + }, + { + "epoch": 0.06769893070406888, + "grad_norm": 4.7525889773397205, + "learning_rate": 9.962900343564628e-06, + "loss": 0.5801, + "step": 975 + }, + { + "epoch": 0.067768365504791, + "grad_norm": 4.805003349380725, + "learning_rate": 9.962763488504358e-06, + "loss": 0.5882, + "step": 976 + }, + { + "epoch": 0.06783780030551312, + "grad_norm": 4.258515303354054, + "learning_rate": 9.962626382433126e-06, + "loss": 0.4745, + "step": 977 + }, + { + "epoch": 0.06790723510623524, + "grad_norm": 5.428223264656458, + "learning_rate": 9.962489025357866e-06, + "loss": 0.5739, + "step": 978 + }, + { + "epoch": 0.06797666990695737, + "grad_norm": 4.483352979682293, + "learning_rate": 9.962351417285527e-06, + "loss": 0.5748, + "step": 979 + }, + { + "epoch": 0.0680461047076795, + "grad_norm": 5.206350310861984, + "learning_rate": 9.962213558223069e-06, + "loss": 0.6655, + "step": 980 + }, + { + "epoch": 0.06811553950840162, + "grad_norm": 4.289404062911407, + "learning_rate": 9.962075448177464e-06, + "loss": 0.4287, + "step": 981 + }, + { + "epoch": 0.06818497430912374, + "grad_norm": 4.524158306888315, + "learning_rate": 9.961937087155697e-06, + "loss": 0.3906, + "step": 982 + }, + { + "epoch": 0.06825440910984586, + "grad_norm": 4.946946002356679, + "learning_rate": 9.96179847516477e-06, + "loss": 0.7494, + "step": 983 + }, + { + "epoch": 0.06832384391056798, + "grad_norm": 4.4642887682414045, + "learning_rate": 9.961659612211687e-06, + "loss": 0.3617, + "step": 984 + }, + { + "epoch": 0.0683932787112901, + "grad_norm": 5.500827820213797, + "learning_rate": 9.961520498303478e-06, + "loss": 0.8504, + "step": 985 + }, + { + "epoch": 0.06846271351201222, + "grad_norm": 5.136642431754584, + "learning_rate": 9.961381133447175e-06, + "loss": 0.5318, + "step": 986 + }, + { + "epoch": 0.06853214831273434, + "grad_norm": 5.52539288210066, + "learning_rate": 9.96124151764983e-06, + "loss": 0.6852, + "step": 987 + }, + { + "epoch": 0.06860158311345646, + "grad_norm": 4.428777704571156, + "learning_rate": 9.9611016509185e-06, + "loss": 0.3122, + "step": 988 + }, + { + "epoch": 0.06867101791417858, + "grad_norm": 5.2708835637065246, + "learning_rate": 9.960961533260266e-06, + "loss": 0.6886, + "step": 989 + }, + { + "epoch": 0.0687404527149007, + "grad_norm": 3.9931575066170146, + "learning_rate": 9.96082116468221e-06, + "loss": 0.4194, + "step": 990 + }, + { + "epoch": 0.06880988751562284, + "grad_norm": 6.832481155647511, + "learning_rate": 9.960680545191433e-06, + "loss": 0.9276, + "step": 991 + }, + { + "epoch": 0.06887932231634496, + "grad_norm": 5.7151866707559895, + "learning_rate": 9.960539674795047e-06, + "loss": 0.5466, + "step": 992 + }, + { + "epoch": 0.06894875711706708, + "grad_norm": 5.2636203243830995, + "learning_rate": 9.960398553500178e-06, + "loss": 0.6021, + "step": 993 + }, + { + "epoch": 0.0690181919177892, + "grad_norm": 3.9032444533172472, + "learning_rate": 9.960257181313963e-06, + "loss": 0.4737, + "step": 994 + }, + { + "epoch": 0.06908762671851132, + "grad_norm": 4.64332598624562, + "learning_rate": 9.960115558243553e-06, + "loss": 0.4973, + "step": 995 + }, + { + "epoch": 0.06915706151923344, + "grad_norm": 4.938738787736154, + "learning_rate": 9.95997368429611e-06, + "loss": 0.6196, + "step": 996 + }, + { + "epoch": 0.06922649631995556, + "grad_norm": 3.041256838796851, + "learning_rate": 9.959831559478811e-06, + "loss": 0.122, + "step": 997 + }, + { + "epoch": 0.06929593112067768, + "grad_norm": 4.056889867993404, + "learning_rate": 9.959689183798843e-06, + "loss": 0.405, + "step": 998 + }, + { + "epoch": 0.0693653659213998, + "grad_norm": 5.030799742114715, + "learning_rate": 9.95954655726341e-06, + "loss": 0.4943, + "step": 999 + }, + { + "epoch": 0.06943480072212192, + "grad_norm": 5.411608639754731, + "learning_rate": 9.959403679879724e-06, + "loss": 0.6233, + "step": 1000 + }, + { + "epoch": 0.06950423552284404, + "grad_norm": 3.6928322172334913, + "learning_rate": 9.959260551655012e-06, + "loss": 0.328, + "step": 1001 + }, + { + "epoch": 0.06957367032356618, + "grad_norm": 4.59437761930444, + "learning_rate": 9.959117172596513e-06, + "loss": 0.3028, + "step": 1002 + }, + { + "epoch": 0.0696431051242883, + "grad_norm": 5.292544934425304, + "learning_rate": 9.958973542711479e-06, + "loss": 0.6095, + "step": 1003 + }, + { + "epoch": 0.06971253992501042, + "grad_norm": 4.085850586704651, + "learning_rate": 9.958829662007175e-06, + "loss": 0.512, + "step": 1004 + }, + { + "epoch": 0.06978197472573254, + "grad_norm": 5.21134476478826, + "learning_rate": 9.958685530490877e-06, + "loss": 0.4792, + "step": 1005 + }, + { + "epoch": 0.06985140952645466, + "grad_norm": 4.760194137837527, + "learning_rate": 9.958541148169876e-06, + "loss": 0.4877, + "step": 1006 + }, + { + "epoch": 0.06992084432717678, + "grad_norm": 5.168300952803078, + "learning_rate": 9.958396515051475e-06, + "loss": 0.5909, + "step": 1007 + }, + { + "epoch": 0.0699902791278989, + "grad_norm": 4.984849652097988, + "learning_rate": 9.95825163114299e-06, + "loss": 0.4613, + "step": 1008 + }, + { + "epoch": 0.07005971392862102, + "grad_norm": 4.76983048599734, + "learning_rate": 9.958106496451747e-06, + "loss": 0.663, + "step": 1009 + }, + { + "epoch": 0.07012914872934314, + "grad_norm": 5.510563683412331, + "learning_rate": 9.957961110985087e-06, + "loss": 0.7078, + "step": 1010 + }, + { + "epoch": 0.07019858353006526, + "grad_norm": 5.450244464940457, + "learning_rate": 9.957815474750367e-06, + "loss": 0.5241, + "step": 1011 + }, + { + "epoch": 0.07026801833078739, + "grad_norm": 7.293180876619849, + "learning_rate": 9.957669587754949e-06, + "loss": 1.0173, + "step": 1012 + }, + { + "epoch": 0.0703374531315095, + "grad_norm": 4.776561652084263, + "learning_rate": 9.957523450006213e-06, + "loss": 0.7421, + "step": 1013 + }, + { + "epoch": 0.07040688793223164, + "grad_norm": 4.386340269433218, + "learning_rate": 9.95737706151155e-06, + "loss": 0.4798, + "step": 1014 + }, + { + "epoch": 0.07047632273295376, + "grad_norm": 5.420171081285744, + "learning_rate": 9.957230422278366e-06, + "loss": 0.5972, + "step": 1015 + }, + { + "epoch": 0.07054575753367588, + "grad_norm": 4.709384652308194, + "learning_rate": 9.957083532314078e-06, + "loss": 0.4396, + "step": 1016 + }, + { + "epoch": 0.070615192334398, + "grad_norm": 4.594456997365588, + "learning_rate": 9.956936391626113e-06, + "loss": 0.4249, + "step": 1017 + }, + { + "epoch": 0.07068462713512012, + "grad_norm": 5.417561171225137, + "learning_rate": 9.956789000221913e-06, + "loss": 0.5429, + "step": 1018 + }, + { + "epoch": 0.07075406193584224, + "grad_norm": 5.458146186881246, + "learning_rate": 9.956641358108936e-06, + "loss": 0.6841, + "step": 1019 + }, + { + "epoch": 0.07082349673656436, + "grad_norm": 6.2015737535658095, + "learning_rate": 9.956493465294647e-06, + "loss": 0.5538, + "step": 1020 + }, + { + "epoch": 0.07089293153728649, + "grad_norm": 4.2977119537080215, + "learning_rate": 9.956345321786529e-06, + "loss": 0.5605, + "step": 1021 + }, + { + "epoch": 0.0709623663380086, + "grad_norm": 4.307382266167953, + "learning_rate": 9.956196927592071e-06, + "loss": 0.3595, + "step": 1022 + }, + { + "epoch": 0.07103180113873073, + "grad_norm": 5.911114503193186, + "learning_rate": 9.956048282718782e-06, + "loss": 0.7524, + "step": 1023 + }, + { + "epoch": 0.07110123593945285, + "grad_norm": 4.29391786236575, + "learning_rate": 9.955899387174179e-06, + "loss": 0.4595, + "step": 1024 + }, + { + "epoch": 0.07117067074017498, + "grad_norm": 4.7298040688777485, + "learning_rate": 9.955750240965792e-06, + "loss": 0.4059, + "step": 1025 + }, + { + "epoch": 0.0712401055408971, + "grad_norm": 6.669446193839085, + "learning_rate": 9.955600844101166e-06, + "loss": 0.9742, + "step": 1026 + }, + { + "epoch": 0.07130954034161922, + "grad_norm": 3.9502386263885723, + "learning_rate": 9.955451196587857e-06, + "loss": 0.3498, + "step": 1027 + }, + { + "epoch": 0.07137897514234134, + "grad_norm": 4.074939068376195, + "learning_rate": 9.955301298433435e-06, + "loss": 0.4876, + "step": 1028 + }, + { + "epoch": 0.07144840994306347, + "grad_norm": 5.170854111381556, + "learning_rate": 9.955151149645479e-06, + "loss": 0.5523, + "step": 1029 + }, + { + "epoch": 0.07151784474378559, + "grad_norm": 4.187257809490731, + "learning_rate": 9.955000750231586e-06, + "loss": 0.589, + "step": 1030 + }, + { + "epoch": 0.0715872795445077, + "grad_norm": 5.16127155546752, + "learning_rate": 9.954850100199361e-06, + "loss": 0.5855, + "step": 1031 + }, + { + "epoch": 0.07165671434522983, + "grad_norm": 4.964547894200507, + "learning_rate": 9.954699199556426e-06, + "loss": 0.6431, + "step": 1032 + }, + { + "epoch": 0.07172614914595195, + "grad_norm": 4.2512883460282245, + "learning_rate": 9.954548048310411e-06, + "loss": 0.6329, + "step": 1033 + }, + { + "epoch": 0.07179558394667407, + "grad_norm": 3.8370726730536355, + "learning_rate": 9.954396646468962e-06, + "loss": 0.3079, + "step": 1034 + }, + { + "epoch": 0.07186501874739619, + "grad_norm": 4.975439229505246, + "learning_rate": 9.95424499403974e-06, + "loss": 0.5805, + "step": 1035 + }, + { + "epoch": 0.07193445354811831, + "grad_norm": 3.961435572419675, + "learning_rate": 9.954093091030408e-06, + "loss": 0.3708, + "step": 1036 + }, + { + "epoch": 0.07200388834884044, + "grad_norm": 5.925678724743737, + "learning_rate": 9.953940937448657e-06, + "loss": 0.6303, + "step": 1037 + }, + { + "epoch": 0.07207332314956257, + "grad_norm": 4.686833690892003, + "learning_rate": 9.953788533302176e-06, + "loss": 0.703, + "step": 1038 + }, + { + "epoch": 0.07214275795028469, + "grad_norm": 7.3699263513723885, + "learning_rate": 9.953635878598678e-06, + "loss": 0.717, + "step": 1039 + }, + { + "epoch": 0.0722121927510068, + "grad_norm": 4.452765672993177, + "learning_rate": 9.953482973345883e-06, + "loss": 0.5205, + "step": 1040 + }, + { + "epoch": 0.07228162755172893, + "grad_norm": 4.960614394841153, + "learning_rate": 9.953329817551524e-06, + "loss": 0.4376, + "step": 1041 + }, + { + "epoch": 0.07235106235245105, + "grad_norm": 4.589168201737519, + "learning_rate": 9.953176411223348e-06, + "loss": 0.5071, + "step": 1042 + }, + { + "epoch": 0.07242049715317317, + "grad_norm": 4.555086111883653, + "learning_rate": 9.953022754369115e-06, + "loss": 0.614, + "step": 1043 + }, + { + "epoch": 0.07248993195389529, + "grad_norm": 3.154823390915928, + "learning_rate": 9.952868846996594e-06, + "loss": 0.356, + "step": 1044 + }, + { + "epoch": 0.07255936675461741, + "grad_norm": 4.718248397747654, + "learning_rate": 9.952714689113572e-06, + "loss": 0.6445, + "step": 1045 + }, + { + "epoch": 0.07262880155533953, + "grad_norm": 4.581650676814528, + "learning_rate": 9.952560280727846e-06, + "loss": 0.5306, + "step": 1046 + }, + { + "epoch": 0.07269823635606165, + "grad_norm": 3.6644459171202963, + "learning_rate": 9.952405621847226e-06, + "loss": 0.2656, + "step": 1047 + }, + { + "epoch": 0.07276767115678379, + "grad_norm": 4.046147335314258, + "learning_rate": 9.95225071247953e-06, + "loss": 0.5103, + "step": 1048 + }, + { + "epoch": 0.07283710595750591, + "grad_norm": 5.355061234498256, + "learning_rate": 9.9520955526326e-06, + "loss": 0.7253, + "step": 1049 + }, + { + "epoch": 0.07290654075822803, + "grad_norm": 4.333199765343125, + "learning_rate": 9.951940142314281e-06, + "loss": 0.4539, + "step": 1050 + }, + { + "epoch": 0.07297597555895015, + "grad_norm": 4.070974835280359, + "learning_rate": 9.951784481532431e-06, + "loss": 0.2147, + "step": 1051 + }, + { + "epoch": 0.07304541035967227, + "grad_norm": 4.756207890337684, + "learning_rate": 9.951628570294927e-06, + "loss": 0.513, + "step": 1052 + }, + { + "epoch": 0.07311484516039439, + "grad_norm": 5.073521508665398, + "learning_rate": 9.951472408609652e-06, + "loss": 0.6551, + "step": 1053 + }, + { + "epoch": 0.07318427996111651, + "grad_norm": 4.719688942934508, + "learning_rate": 9.951315996484504e-06, + "loss": 0.5327, + "step": 1054 + }, + { + "epoch": 0.07325371476183863, + "grad_norm": 5.243207290798345, + "learning_rate": 9.9511593339274e-06, + "loss": 0.6048, + "step": 1055 + }, + { + "epoch": 0.07332314956256075, + "grad_norm": 3.944595594133608, + "learning_rate": 9.951002420946256e-06, + "loss": 0.3478, + "step": 1056 + }, + { + "epoch": 0.07339258436328287, + "grad_norm": 4.9035345496008125, + "learning_rate": 9.950845257549014e-06, + "loss": 0.5161, + "step": 1057 + }, + { + "epoch": 0.073462019164005, + "grad_norm": 6.162208538319855, + "learning_rate": 9.950687843743619e-06, + "loss": 0.4798, + "step": 1058 + }, + { + "epoch": 0.07353145396472713, + "grad_norm": 5.163469182764663, + "learning_rate": 9.950530179538036e-06, + "loss": 0.4314, + "step": 1059 + }, + { + "epoch": 0.07360088876544925, + "grad_norm": 4.908100864939862, + "learning_rate": 9.95037226494024e-06, + "loss": 0.4189, + "step": 1060 + }, + { + "epoch": 0.07367032356617137, + "grad_norm": 6.257497350136744, + "learning_rate": 9.950214099958212e-06, + "loss": 0.7988, + "step": 1061 + }, + { + "epoch": 0.07373975836689349, + "grad_norm": 4.509034251468079, + "learning_rate": 9.950055684599961e-06, + "loss": 0.4832, + "step": 1062 + }, + { + "epoch": 0.07380919316761561, + "grad_norm": 5.589379940824678, + "learning_rate": 9.949897018873493e-06, + "loss": 0.5365, + "step": 1063 + }, + { + "epoch": 0.07387862796833773, + "grad_norm": 5.1175159247520785, + "learning_rate": 9.949738102786836e-06, + "loss": 0.6402, + "step": 1064 + }, + { + "epoch": 0.07394806276905985, + "grad_norm": 5.40432800157665, + "learning_rate": 9.949578936348025e-06, + "loss": 0.7799, + "step": 1065 + }, + { + "epoch": 0.07401749756978197, + "grad_norm": 4.683268087825423, + "learning_rate": 9.949419519565113e-06, + "loss": 0.5868, + "step": 1066 + }, + { + "epoch": 0.0740869323705041, + "grad_norm": 4.636203716811295, + "learning_rate": 9.949259852446163e-06, + "loss": 0.4692, + "step": 1067 + }, + { + "epoch": 0.07415636717122621, + "grad_norm": 5.595424409010696, + "learning_rate": 9.949099934999252e-06, + "loss": 0.7494, + "step": 1068 + }, + { + "epoch": 0.07422580197194834, + "grad_norm": 4.2697931512413865, + "learning_rate": 9.948939767232463e-06, + "loss": 0.5135, + "step": 1069 + }, + { + "epoch": 0.07429523677267046, + "grad_norm": 5.485031794407256, + "learning_rate": 9.948779349153902e-06, + "loss": 0.5376, + "step": 1070 + }, + { + "epoch": 0.07436467157339259, + "grad_norm": 4.133166228380315, + "learning_rate": 9.948618680771682e-06, + "loss": 0.5123, + "step": 1071 + }, + { + "epoch": 0.07443410637411471, + "grad_norm": 5.639850381119093, + "learning_rate": 9.948457762093929e-06, + "loss": 0.7541, + "step": 1072 + }, + { + "epoch": 0.07450354117483683, + "grad_norm": 4.5853703821480725, + "learning_rate": 9.948296593128779e-06, + "loss": 0.6902, + "step": 1073 + }, + { + "epoch": 0.07457297597555895, + "grad_norm": 5.284811095457621, + "learning_rate": 9.94813517388439e-06, + "loss": 0.6339, + "step": 1074 + }, + { + "epoch": 0.07464241077628107, + "grad_norm": 6.459569378219492, + "learning_rate": 9.947973504368923e-06, + "loss": 0.7999, + "step": 1075 + }, + { + "epoch": 0.0747118455770032, + "grad_norm": 4.195725082843013, + "learning_rate": 9.947811584590553e-06, + "loss": 0.3348, + "step": 1076 + }, + { + "epoch": 0.07478128037772531, + "grad_norm": 5.218944847957534, + "learning_rate": 9.947649414557472e-06, + "loss": 0.7348, + "step": 1077 + }, + { + "epoch": 0.07485071517844744, + "grad_norm": 5.523518245898167, + "learning_rate": 9.947486994277884e-06, + "loss": 0.9064, + "step": 1078 + }, + { + "epoch": 0.07492014997916956, + "grad_norm": 4.306449068834788, + "learning_rate": 9.947324323760002e-06, + "loss": 0.3249, + "step": 1079 + }, + { + "epoch": 0.07498958477989168, + "grad_norm": 7.200146486890911, + "learning_rate": 9.947161403012053e-06, + "loss": 0.6001, + "step": 1080 + }, + { + "epoch": 0.0750590195806138, + "grad_norm": 4.376901264600628, + "learning_rate": 9.94699823204228e-06, + "loss": 0.6751, + "step": 1081 + }, + { + "epoch": 0.07512845438133593, + "grad_norm": 5.522702194373978, + "learning_rate": 9.946834810858932e-06, + "loss": 0.8449, + "step": 1082 + }, + { + "epoch": 0.07519788918205805, + "grad_norm": 5.180661175604792, + "learning_rate": 9.946671139470278e-06, + "loss": 0.4985, + "step": 1083 + }, + { + "epoch": 0.07526732398278017, + "grad_norm": 5.190318398189906, + "learning_rate": 9.946507217884597e-06, + "loss": 0.6379, + "step": 1084 + }, + { + "epoch": 0.0753367587835023, + "grad_norm": 3.7854202097064378, + "learning_rate": 9.946343046110176e-06, + "loss": 0.4363, + "step": 1085 + }, + { + "epoch": 0.07540619358422442, + "grad_norm": 5.111445069774006, + "learning_rate": 9.946178624155322e-06, + "loss": 0.6724, + "step": 1086 + }, + { + "epoch": 0.07547562838494654, + "grad_norm": 3.3378423785766316, + "learning_rate": 9.946013952028351e-06, + "loss": 0.2684, + "step": 1087 + }, + { + "epoch": 0.07554506318566866, + "grad_norm": 3.755569890286403, + "learning_rate": 9.945849029737588e-06, + "loss": 0.3951, + "step": 1088 + }, + { + "epoch": 0.07561449798639078, + "grad_norm": 4.570432242900106, + "learning_rate": 9.94568385729138e-06, + "loss": 0.4258, + "step": 1089 + }, + { + "epoch": 0.0756839327871129, + "grad_norm": 4.479712289815535, + "learning_rate": 9.94551843469808e-06, + "loss": 0.5424, + "step": 1090 + }, + { + "epoch": 0.07575336758783502, + "grad_norm": 5.011187815002273, + "learning_rate": 9.945352761966052e-06, + "loss": 0.6591, + "step": 1091 + }, + { + "epoch": 0.07582280238855714, + "grad_norm": 3.569937393157766, + "learning_rate": 9.945186839103678e-06, + "loss": 0.2467, + "step": 1092 + }, + { + "epoch": 0.07589223718927926, + "grad_norm": 4.958672460824238, + "learning_rate": 9.94502066611935e-06, + "loss": 0.3021, + "step": 1093 + }, + { + "epoch": 0.0759616719900014, + "grad_norm": 4.634282130003978, + "learning_rate": 9.94485424302147e-06, + "loss": 0.5594, + "step": 1094 + }, + { + "epoch": 0.07603110679072352, + "grad_norm": 2.6541089030073906, + "learning_rate": 9.94468756981846e-06, + "loss": 0.1268, + "step": 1095 + }, + { + "epoch": 0.07610054159144564, + "grad_norm": 4.818636885587487, + "learning_rate": 9.944520646518747e-06, + "loss": 0.5112, + "step": 1096 + }, + { + "epoch": 0.07616997639216776, + "grad_norm": 5.535042864904982, + "learning_rate": 9.944353473130775e-06, + "loss": 0.6944, + "step": 1097 + }, + { + "epoch": 0.07623941119288988, + "grad_norm": 4.683414080245664, + "learning_rate": 9.944186049663e-06, + "loss": 0.4795, + "step": 1098 + }, + { + "epoch": 0.076308845993612, + "grad_norm": 5.978393538903464, + "learning_rate": 9.94401837612389e-06, + "loss": 0.9775, + "step": 1099 + }, + { + "epoch": 0.07637828079433412, + "grad_norm": 3.59374866009973, + "learning_rate": 9.943850452521924e-06, + "loss": 0.3919, + "step": 1100 + }, + { + "epoch": 0.07644771559505624, + "grad_norm": 4.147217453959496, + "learning_rate": 9.943682278865596e-06, + "loss": 0.4297, + "step": 1101 + }, + { + "epoch": 0.07651715039577836, + "grad_norm": 6.053280441092893, + "learning_rate": 9.943513855163413e-06, + "loss": 0.734, + "step": 1102 + }, + { + "epoch": 0.07658658519650048, + "grad_norm": 4.760608243644429, + "learning_rate": 9.943345181423893e-06, + "loss": 0.5406, + "step": 1103 + }, + { + "epoch": 0.0766560199972226, + "grad_norm": 4.4103870343030405, + "learning_rate": 9.943176257655567e-06, + "loss": 0.4588, + "step": 1104 + }, + { + "epoch": 0.07672545479794474, + "grad_norm": 3.9809512648571355, + "learning_rate": 9.94300708386698e-06, + "loss": 0.3698, + "step": 1105 + }, + { + "epoch": 0.07679488959866686, + "grad_norm": 4.961850212307463, + "learning_rate": 9.94283766006669e-06, + "loss": 0.4751, + "step": 1106 + }, + { + "epoch": 0.07686432439938898, + "grad_norm": 5.0068795355998414, + "learning_rate": 9.942667986263261e-06, + "loss": 0.3645, + "step": 1107 + }, + { + "epoch": 0.0769337592001111, + "grad_norm": 4.303559660098102, + "learning_rate": 9.94249806246528e-06, + "loss": 0.4047, + "step": 1108 + }, + { + "epoch": 0.07700319400083322, + "grad_norm": 6.892718668179105, + "learning_rate": 9.942327888681339e-06, + "loss": 0.8255, + "step": 1109 + }, + { + "epoch": 0.07707262880155534, + "grad_norm": 4.148080933294095, + "learning_rate": 9.942157464920047e-06, + "loss": 0.4765, + "step": 1110 + }, + { + "epoch": 0.07714206360227746, + "grad_norm": 7.266802126398682, + "learning_rate": 9.941986791190022e-06, + "loss": 0.6487, + "step": 1111 + }, + { + "epoch": 0.07721149840299958, + "grad_norm": 5.4413906685860205, + "learning_rate": 9.941815867499897e-06, + "loss": 0.5206, + "step": 1112 + }, + { + "epoch": 0.0772809332037217, + "grad_norm": 5.564253666958973, + "learning_rate": 9.94164469385832e-06, + "loss": 1.0296, + "step": 1113 + }, + { + "epoch": 0.07735036800444382, + "grad_norm": 4.911221303387646, + "learning_rate": 9.941473270273943e-06, + "loss": 0.6206, + "step": 1114 + }, + { + "epoch": 0.07741980280516594, + "grad_norm": 3.530029882723889, + "learning_rate": 9.941301596755442e-06, + "loss": 0.2404, + "step": 1115 + }, + { + "epoch": 0.07748923760588806, + "grad_norm": 4.582876379929587, + "learning_rate": 9.941129673311497e-06, + "loss": 0.4813, + "step": 1116 + }, + { + "epoch": 0.0775586724066102, + "grad_norm": 4.692535545190693, + "learning_rate": 9.940957499950803e-06, + "loss": 0.7174, + "step": 1117 + }, + { + "epoch": 0.07762810720733232, + "grad_norm": 5.2011626352892515, + "learning_rate": 9.940785076682071e-06, + "loss": 0.4203, + "step": 1118 + }, + { + "epoch": 0.07769754200805444, + "grad_norm": 4.954208635513522, + "learning_rate": 9.940612403514021e-06, + "loss": 0.8683, + "step": 1119 + }, + { + "epoch": 0.07776697680877656, + "grad_norm": 4.16477203321742, + "learning_rate": 9.940439480455386e-06, + "loss": 0.4857, + "step": 1120 + }, + { + "epoch": 0.07783641160949868, + "grad_norm": 3.840970906816745, + "learning_rate": 9.940266307514912e-06, + "loss": 0.5077, + "step": 1121 + }, + { + "epoch": 0.0779058464102208, + "grad_norm": 4.133212698035837, + "learning_rate": 9.94009288470136e-06, + "loss": 0.4221, + "step": 1122 + }, + { + "epoch": 0.07797528121094292, + "grad_norm": 5.725692012663836, + "learning_rate": 9.939919212023499e-06, + "loss": 0.8626, + "step": 1123 + }, + { + "epoch": 0.07804471601166504, + "grad_norm": 4.820829613993883, + "learning_rate": 9.939745289490115e-06, + "loss": 0.6214, + "step": 1124 + }, + { + "epoch": 0.07811415081238716, + "grad_norm": 4.128199766906215, + "learning_rate": 9.939571117110002e-06, + "loss": 0.3708, + "step": 1125 + }, + { + "epoch": 0.07818358561310929, + "grad_norm": 4.221331125263477, + "learning_rate": 9.939396694891973e-06, + "loss": 0.5674, + "step": 1126 + }, + { + "epoch": 0.0782530204138314, + "grad_norm": 4.875702544809185, + "learning_rate": 9.939222022844848e-06, + "loss": 0.5837, + "step": 1127 + }, + { + "epoch": 0.07832245521455354, + "grad_norm": 4.298183264117277, + "learning_rate": 9.939047100977462e-06, + "loss": 0.4794, + "step": 1128 + }, + { + "epoch": 0.07839189001527566, + "grad_norm": 5.807473231071572, + "learning_rate": 9.938871929298662e-06, + "loss": 0.8932, + "step": 1129 + }, + { + "epoch": 0.07846132481599778, + "grad_norm": 4.073882014124974, + "learning_rate": 9.938696507817308e-06, + "loss": 0.2716, + "step": 1130 + }, + { + "epoch": 0.0785307596167199, + "grad_norm": 5.124283166435162, + "learning_rate": 9.938520836542276e-06, + "loss": 0.5822, + "step": 1131 + }, + { + "epoch": 0.07860019441744202, + "grad_norm": 4.47390840162405, + "learning_rate": 9.938344915482447e-06, + "loss": 0.4653, + "step": 1132 + }, + { + "epoch": 0.07866962921816414, + "grad_norm": 4.0013654048100795, + "learning_rate": 9.93816874464672e-06, + "loss": 0.5078, + "step": 1133 + }, + { + "epoch": 0.07873906401888627, + "grad_norm": 9.37207396481838, + "learning_rate": 9.937992324044006e-06, + "loss": 0.5636, + "step": 1134 + }, + { + "epoch": 0.07880849881960839, + "grad_norm": 4.91779721369415, + "learning_rate": 9.937815653683228e-06, + "loss": 0.4476, + "step": 1135 + }, + { + "epoch": 0.0788779336203305, + "grad_norm": 4.73370765935559, + "learning_rate": 9.937638733573321e-06, + "loss": 0.4221, + "step": 1136 + }, + { + "epoch": 0.07894736842105263, + "grad_norm": 5.259205142365437, + "learning_rate": 9.937461563723235e-06, + "loss": 0.4458, + "step": 1137 + }, + { + "epoch": 0.07901680322177475, + "grad_norm": 5.020464343693775, + "learning_rate": 9.93728414414193e-06, + "loss": 0.6043, + "step": 1138 + }, + { + "epoch": 0.07908623802249687, + "grad_norm": 3.643359009351639, + "learning_rate": 9.937106474838381e-06, + "loss": 0.3207, + "step": 1139 + }, + { + "epoch": 0.079155672823219, + "grad_norm": 5.091720282289669, + "learning_rate": 9.936928555821571e-06, + "loss": 0.8982, + "step": 1140 + }, + { + "epoch": 0.07922510762394112, + "grad_norm": 6.745025353426768, + "learning_rate": 9.936750387100502e-06, + "loss": 0.6157, + "step": 1141 + }, + { + "epoch": 0.07929454242466324, + "grad_norm": 5.502258543117327, + "learning_rate": 9.936571968684185e-06, + "loss": 0.6599, + "step": 1142 + }, + { + "epoch": 0.07936397722538537, + "grad_norm": 3.696056151196825, + "learning_rate": 9.936393300581643e-06, + "loss": 0.467, + "step": 1143 + }, + { + "epoch": 0.07943341202610749, + "grad_norm": 4.523466954561496, + "learning_rate": 9.936214382801917e-06, + "loss": 0.3725, + "step": 1144 + }, + { + "epoch": 0.0795028468268296, + "grad_norm": 6.360211010176474, + "learning_rate": 9.93603521535405e-06, + "loss": 0.8839, + "step": 1145 + }, + { + "epoch": 0.07957228162755173, + "grad_norm": 4.115518547206297, + "learning_rate": 9.935855798247107e-06, + "loss": 0.6318, + "step": 1146 + }, + { + "epoch": 0.07964171642827385, + "grad_norm": 5.309615110085079, + "learning_rate": 9.935676131490166e-06, + "loss": 0.7017, + "step": 1147 + }, + { + "epoch": 0.07971115122899597, + "grad_norm": 5.060261671222787, + "learning_rate": 9.935496215092308e-06, + "loss": 0.6353, + "step": 1148 + }, + { + "epoch": 0.07978058602971809, + "grad_norm": 5.54925450882981, + "learning_rate": 9.935316049062639e-06, + "loss": 0.6805, + "step": 1149 + }, + { + "epoch": 0.07985002083044021, + "grad_norm": 3.1926776436690436, + "learning_rate": 9.935135633410267e-06, + "loss": 0.288, + "step": 1150 + }, + { + "epoch": 0.07991945563116234, + "grad_norm": 4.360632216589036, + "learning_rate": 9.934954968144322e-06, + "loss": 0.6064, + "step": 1151 + }, + { + "epoch": 0.07998889043188447, + "grad_norm": 4.716966721412772, + "learning_rate": 9.934774053273936e-06, + "loss": 0.1244, + "step": 1152 + }, + { + "epoch": 0.08005832523260659, + "grad_norm": 4.342857660779656, + "learning_rate": 9.934592888808263e-06, + "loss": 0.7548, + "step": 1153 + }, + { + "epoch": 0.08012776003332871, + "grad_norm": 4.507632700787169, + "learning_rate": 9.934411474756467e-06, + "loss": 0.7116, + "step": 1154 + }, + { + "epoch": 0.08019719483405083, + "grad_norm": 4.346453634404164, + "learning_rate": 9.93422981112772e-06, + "loss": 0.456, + "step": 1155 + }, + { + "epoch": 0.08026662963477295, + "grad_norm": 4.49186851032777, + "learning_rate": 9.934047897931213e-06, + "loss": 0.5934, + "step": 1156 + }, + { + "epoch": 0.08033606443549507, + "grad_norm": 5.759450283585912, + "learning_rate": 9.933865735176149e-06, + "loss": 0.7882, + "step": 1157 + }, + { + "epoch": 0.08040549923621719, + "grad_norm": 2.8159045510153993, + "learning_rate": 9.933683322871737e-06, + "loss": 0.2394, + "step": 1158 + }, + { + "epoch": 0.08047493403693931, + "grad_norm": 4.852447298704516, + "learning_rate": 9.933500661027206e-06, + "loss": 0.5212, + "step": 1159 + }, + { + "epoch": 0.08054436883766143, + "grad_norm": 4.696042661976838, + "learning_rate": 9.933317749651794e-06, + "loss": 0.5441, + "step": 1160 + }, + { + "epoch": 0.08061380363838355, + "grad_norm": 5.711331618036688, + "learning_rate": 9.933134588754754e-06, + "loss": 0.6229, + "step": 1161 + }, + { + "epoch": 0.08068323843910567, + "grad_norm": 4.654595481876398, + "learning_rate": 9.932951178345347e-06, + "loss": 0.5526, + "step": 1162 + }, + { + "epoch": 0.08075267323982781, + "grad_norm": 4.226448844502341, + "learning_rate": 9.932767518432852e-06, + "loss": 0.3289, + "step": 1163 + }, + { + "epoch": 0.08082210804054993, + "grad_norm": 3.9605988151296385, + "learning_rate": 9.932583609026558e-06, + "loss": 0.3378, + "step": 1164 + }, + { + "epoch": 0.08089154284127205, + "grad_norm": 4.402353041515778, + "learning_rate": 9.932399450135765e-06, + "loss": 0.4658, + "step": 1165 + }, + { + "epoch": 0.08096097764199417, + "grad_norm": 5.8049205495542875, + "learning_rate": 9.932215041769792e-06, + "loss": 0.696, + "step": 1166 + }, + { + "epoch": 0.08103041244271629, + "grad_norm": 5.431009987685382, + "learning_rate": 9.932030383937963e-06, + "loss": 0.7269, + "step": 1167 + }, + { + "epoch": 0.08109984724343841, + "grad_norm": 4.635621321944196, + "learning_rate": 9.931845476649616e-06, + "loss": 0.3618, + "step": 1168 + }, + { + "epoch": 0.08116928204416053, + "grad_norm": 3.8109762867769903, + "learning_rate": 9.931660319914108e-06, + "loss": 0.3818, + "step": 1169 + }, + { + "epoch": 0.08123871684488265, + "grad_norm": 4.435645286892863, + "learning_rate": 9.931474913740799e-06, + "loss": 0.3451, + "step": 1170 + }, + { + "epoch": 0.08130815164560477, + "grad_norm": 3.7349159513651795, + "learning_rate": 9.93128925813907e-06, + "loss": 0.2724, + "step": 1171 + }, + { + "epoch": 0.0813775864463269, + "grad_norm": 4.017916522379333, + "learning_rate": 9.93110335311831e-06, + "loss": 0.2894, + "step": 1172 + }, + { + "epoch": 0.08144702124704901, + "grad_norm": 4.853429578781107, + "learning_rate": 9.930917198687924e-06, + "loss": 0.4605, + "step": 1173 + }, + { + "epoch": 0.08151645604777115, + "grad_norm": 5.338552505513365, + "learning_rate": 9.930730794857325e-06, + "loss": 0.9494, + "step": 1174 + }, + { + "epoch": 0.08158589084849327, + "grad_norm": 5.15412834138613, + "learning_rate": 9.930544141635943e-06, + "loss": 0.491, + "step": 1175 + }, + { + "epoch": 0.08165532564921539, + "grad_norm": 3.7818909823758045, + "learning_rate": 9.930357239033214e-06, + "loss": 0.4604, + "step": 1176 + }, + { + "epoch": 0.08172476044993751, + "grad_norm": 4.758105948067564, + "learning_rate": 9.9301700870586e-06, + "loss": 0.6408, + "step": 1177 + }, + { + "epoch": 0.08179419525065963, + "grad_norm": 3.8783143730941156, + "learning_rate": 9.929982685721557e-06, + "loss": 0.2288, + "step": 1178 + }, + { + "epoch": 0.08186363005138175, + "grad_norm": 5.901015286597707, + "learning_rate": 9.929795035031572e-06, + "loss": 0.7598, + "step": 1179 + }, + { + "epoch": 0.08193306485210387, + "grad_norm": 5.00760882401945, + "learning_rate": 9.929607134998132e-06, + "loss": 0.5429, + "step": 1180 + }, + { + "epoch": 0.082002499652826, + "grad_norm": 4.79910973031596, + "learning_rate": 9.929418985630739e-06, + "loss": 0.4473, + "step": 1181 + }, + { + "epoch": 0.08207193445354811, + "grad_norm": 4.055070365184233, + "learning_rate": 9.929230586938914e-06, + "loss": 0.3835, + "step": 1182 + }, + { + "epoch": 0.08214136925427024, + "grad_norm": 5.303129382891074, + "learning_rate": 9.929041938932183e-06, + "loss": 0.7743, + "step": 1183 + }, + { + "epoch": 0.08221080405499236, + "grad_norm": 4.151930400806376, + "learning_rate": 9.928853041620088e-06, + "loss": 0.3956, + "step": 1184 + }, + { + "epoch": 0.08228023885571449, + "grad_norm": 5.0833415244397955, + "learning_rate": 9.928663895012186e-06, + "loss": 0.4021, + "step": 1185 + }, + { + "epoch": 0.08234967365643661, + "grad_norm": 4.533648861832096, + "learning_rate": 9.928474499118037e-06, + "loss": 0.4614, + "step": 1186 + }, + { + "epoch": 0.08241910845715873, + "grad_norm": 4.863303884673229, + "learning_rate": 9.928284853947227e-06, + "loss": 0.7539, + "step": 1187 + }, + { + "epoch": 0.08248854325788085, + "grad_norm": 4.235326812692163, + "learning_rate": 9.928094959509347e-06, + "loss": 0.5374, + "step": 1188 + }, + { + "epoch": 0.08255797805860297, + "grad_norm": 5.28669080191623, + "learning_rate": 9.927904815813997e-06, + "loss": 0.6986, + "step": 1189 + }, + { + "epoch": 0.0826274128593251, + "grad_norm": 4.561071694807699, + "learning_rate": 9.927714422870802e-06, + "loss": 0.4995, + "step": 1190 + }, + { + "epoch": 0.08269684766004722, + "grad_norm": 4.234473595771569, + "learning_rate": 9.927523780689385e-06, + "loss": 0.3524, + "step": 1191 + }, + { + "epoch": 0.08276628246076934, + "grad_norm": 4.3233534021613265, + "learning_rate": 9.927332889279391e-06, + "loss": 0.6914, + "step": 1192 + }, + { + "epoch": 0.08283571726149146, + "grad_norm": 5.27325821916644, + "learning_rate": 9.927141748650478e-06, + "loss": 0.463, + "step": 1193 + }, + { + "epoch": 0.08290515206221358, + "grad_norm": 5.806077967321281, + "learning_rate": 9.926950358812308e-06, + "loss": 0.7533, + "step": 1194 + }, + { + "epoch": 0.0829745868629357, + "grad_norm": 5.159543358673594, + "learning_rate": 9.926758719774563e-06, + "loss": 0.7184, + "step": 1195 + }, + { + "epoch": 0.08304402166365782, + "grad_norm": 4.639061796688853, + "learning_rate": 9.92656683154694e-06, + "loss": 0.7285, + "step": 1196 + }, + { + "epoch": 0.08311345646437995, + "grad_norm": 4.32519579020212, + "learning_rate": 9.926374694139139e-06, + "loss": 0.4814, + "step": 1197 + }, + { + "epoch": 0.08318289126510207, + "grad_norm": 10.13789421276774, + "learning_rate": 9.926182307560881e-06, + "loss": 0.6947, + "step": 1198 + }, + { + "epoch": 0.0832523260658242, + "grad_norm": 5.295061374579348, + "learning_rate": 9.925989671821898e-06, + "loss": 0.6968, + "step": 1199 + }, + { + "epoch": 0.08332176086654632, + "grad_norm": 3.862797799062758, + "learning_rate": 9.92579678693193e-06, + "loss": 0.4694, + "step": 1200 + }, + { + "epoch": 0.08339119566726844, + "grad_norm": 4.7552534411439, + "learning_rate": 9.925603652900734e-06, + "loss": 0.3565, + "step": 1201 + }, + { + "epoch": 0.08346063046799056, + "grad_norm": 4.323811257444635, + "learning_rate": 9.92541026973808e-06, + "loss": 0.5622, + "step": 1202 + }, + { + "epoch": 0.08353006526871268, + "grad_norm": 6.211969349580078, + "learning_rate": 9.925216637453747e-06, + "loss": 0.888, + "step": 1203 + }, + { + "epoch": 0.0835995000694348, + "grad_norm": 6.270521654628784, + "learning_rate": 9.925022756057532e-06, + "loss": 0.7173, + "step": 1204 + }, + { + "epoch": 0.08366893487015692, + "grad_norm": 6.2410427118124, + "learning_rate": 9.924828625559238e-06, + "loss": 0.6177, + "step": 1205 + }, + { + "epoch": 0.08373836967087904, + "grad_norm": 5.295255252914425, + "learning_rate": 9.924634245968682e-06, + "loss": 0.563, + "step": 1206 + }, + { + "epoch": 0.08380780447160116, + "grad_norm": 5.005831086986683, + "learning_rate": 9.924439617295702e-06, + "loss": 0.4043, + "step": 1207 + }, + { + "epoch": 0.0838772392723233, + "grad_norm": 4.531180198877733, + "learning_rate": 9.924244739550137e-06, + "loss": 0.7454, + "step": 1208 + }, + { + "epoch": 0.08394667407304542, + "grad_norm": 5.572767558579299, + "learning_rate": 9.924049612741846e-06, + "loss": 0.5127, + "step": 1209 + }, + { + "epoch": 0.08401610887376754, + "grad_norm": 4.966498900014144, + "learning_rate": 9.923854236880698e-06, + "loss": 0.665, + "step": 1210 + }, + { + "epoch": 0.08408554367448966, + "grad_norm": 4.80091328092973, + "learning_rate": 9.923658611976573e-06, + "loss": 0.286, + "step": 1211 + }, + { + "epoch": 0.08415497847521178, + "grad_norm": 4.860568416232989, + "learning_rate": 9.923462738039368e-06, + "loss": 0.735, + "step": 1212 + }, + { + "epoch": 0.0842244132759339, + "grad_norm": 4.324215783686278, + "learning_rate": 9.923266615078988e-06, + "loss": 0.3958, + "step": 1213 + }, + { + "epoch": 0.08429384807665602, + "grad_norm": 4.912697561852342, + "learning_rate": 9.923070243105355e-06, + "loss": 0.4456, + "step": 1214 + }, + { + "epoch": 0.08436328287737814, + "grad_norm": 4.757053456316674, + "learning_rate": 9.922873622128399e-06, + "loss": 0.6899, + "step": 1215 + }, + { + "epoch": 0.08443271767810026, + "grad_norm": 5.518080019572764, + "learning_rate": 9.922676752158064e-06, + "loss": 0.7354, + "step": 1216 + }, + { + "epoch": 0.08450215247882238, + "grad_norm": 4.254881855936205, + "learning_rate": 9.922479633204312e-06, + "loss": 0.5505, + "step": 1217 + }, + { + "epoch": 0.0845715872795445, + "grad_norm": 4.579494108975009, + "learning_rate": 9.92228226527711e-06, + "loss": 0.4096, + "step": 1218 + }, + { + "epoch": 0.08464102208026662, + "grad_norm": 4.142602362561205, + "learning_rate": 9.92208464838644e-06, + "loss": 0.4461, + "step": 1219 + }, + { + "epoch": 0.08471045688098876, + "grad_norm": 4.15269701171399, + "learning_rate": 9.921886782542299e-06, + "loss": 0.4628, + "step": 1220 + }, + { + "epoch": 0.08477989168171088, + "grad_norm": 5.198638042777074, + "learning_rate": 9.921688667754694e-06, + "loss": 0.5251, + "step": 1221 + }, + { + "epoch": 0.084849326482433, + "grad_norm": 4.690806376901492, + "learning_rate": 9.921490304033646e-06, + "loss": 0.6629, + "step": 1222 + }, + { + "epoch": 0.08491876128315512, + "grad_norm": 4.604497296462114, + "learning_rate": 9.921291691389186e-06, + "loss": 0.4228, + "step": 1223 + }, + { + "epoch": 0.08498819608387724, + "grad_norm": 3.564469973159241, + "learning_rate": 9.92109282983136e-06, + "loss": 0.3682, + "step": 1224 + }, + { + "epoch": 0.08505763088459936, + "grad_norm": 4.320918600116759, + "learning_rate": 9.92089371937023e-06, + "loss": 0.3814, + "step": 1225 + }, + { + "epoch": 0.08512706568532148, + "grad_norm": 5.6883056591861045, + "learning_rate": 9.920694360015864e-06, + "loss": 0.5792, + "step": 1226 + }, + { + "epoch": 0.0851965004860436, + "grad_norm": 5.157495893261966, + "learning_rate": 9.920494751778344e-06, + "loss": 0.5797, + "step": 1227 + }, + { + "epoch": 0.08526593528676572, + "grad_norm": 3.9554364424016972, + "learning_rate": 9.920294894667767e-06, + "loss": 0.5692, + "step": 1228 + }, + { + "epoch": 0.08533537008748784, + "grad_norm": 4.85975284832723, + "learning_rate": 9.920094788694243e-06, + "loss": 0.6154, + "step": 1229 + }, + { + "epoch": 0.08540480488820996, + "grad_norm": 11.290487963439533, + "learning_rate": 9.919894433867892e-06, + "loss": 0.3245, + "step": 1230 + }, + { + "epoch": 0.0854742396889321, + "grad_norm": 5.025840201185536, + "learning_rate": 9.919693830198847e-06, + "loss": 0.4431, + "step": 1231 + }, + { + "epoch": 0.08554367448965422, + "grad_norm": 4.649628926064188, + "learning_rate": 9.919492977697256e-06, + "loss": 0.4839, + "step": 1232 + }, + { + "epoch": 0.08561310929037634, + "grad_norm": 3.909887107218836, + "learning_rate": 9.919291876373276e-06, + "loss": 0.3956, + "step": 1233 + }, + { + "epoch": 0.08568254409109846, + "grad_norm": 5.579771580702045, + "learning_rate": 9.91909052623708e-06, + "loss": 0.8172, + "step": 1234 + }, + { + "epoch": 0.08575197889182058, + "grad_norm": 4.25543454312069, + "learning_rate": 9.918888927298851e-06, + "loss": 0.5459, + "step": 1235 + }, + { + "epoch": 0.0858214136925427, + "grad_norm": 3.578453570913025, + "learning_rate": 9.918687079568788e-06, + "loss": 0.4142, + "step": 1236 + }, + { + "epoch": 0.08589084849326482, + "grad_norm": 5.0251449494192375, + "learning_rate": 9.918484983057095e-06, + "loss": 0.6628, + "step": 1237 + }, + { + "epoch": 0.08596028329398694, + "grad_norm": 4.000402175452813, + "learning_rate": 9.918282637774e-06, + "loss": 0.3298, + "step": 1238 + }, + { + "epoch": 0.08602971809470907, + "grad_norm": 5.565048302466547, + "learning_rate": 9.918080043729734e-06, + "loss": 0.5422, + "step": 1239 + }, + { + "epoch": 0.08609915289543119, + "grad_norm": 2.866303794001422, + "learning_rate": 9.917877200934544e-06, + "loss": 0.1285, + "step": 1240 + }, + { + "epoch": 0.0861685876961533, + "grad_norm": 3.2177502069660804, + "learning_rate": 9.917674109398692e-06, + "loss": 0.3212, + "step": 1241 + }, + { + "epoch": 0.08623802249687543, + "grad_norm": 4.29930280407711, + "learning_rate": 9.917470769132446e-06, + "loss": 0.396, + "step": 1242 + }, + { + "epoch": 0.08630745729759756, + "grad_norm": 3.834965187966803, + "learning_rate": 9.917267180146094e-06, + "loss": 0.3565, + "step": 1243 + }, + { + "epoch": 0.08637689209831968, + "grad_norm": 4.444698731519701, + "learning_rate": 9.917063342449931e-06, + "loss": 0.3961, + "step": 1244 + }, + { + "epoch": 0.0864463268990418, + "grad_norm": 5.439025184387969, + "learning_rate": 9.91685925605427e-06, + "loss": 0.7406, + "step": 1245 + }, + { + "epoch": 0.08651576169976392, + "grad_norm": 3.989220358625172, + "learning_rate": 9.91665492096943e-06, + "loss": 0.4845, + "step": 1246 + }, + { + "epoch": 0.08658519650048604, + "grad_norm": 3.943214901159205, + "learning_rate": 9.91645033720575e-06, + "loss": 0.4204, + "step": 1247 + }, + { + "epoch": 0.08665463130120817, + "grad_norm": 4.669685128962917, + "learning_rate": 9.916245504773572e-06, + "loss": 0.5363, + "step": 1248 + }, + { + "epoch": 0.08672406610193029, + "grad_norm": 3.7840896969144455, + "learning_rate": 9.916040423683262e-06, + "loss": 0.3577, + "step": 1249 + }, + { + "epoch": 0.0867935009026524, + "grad_norm": 4.9497348902264235, + "learning_rate": 9.91583509394519e-06, + "loss": 0.5895, + "step": 1250 + }, + { + "epoch": 0.08686293570337453, + "grad_norm": 4.606669931296728, + "learning_rate": 9.91562951556974e-06, + "loss": 0.3642, + "step": 1251 + }, + { + "epoch": 0.08693237050409665, + "grad_norm": 4.669192355729503, + "learning_rate": 9.915423688567314e-06, + "loss": 0.6443, + "step": 1252 + }, + { + "epoch": 0.08700180530481877, + "grad_norm": 5.237051267799775, + "learning_rate": 9.915217612948317e-06, + "loss": 0.4671, + "step": 1253 + }, + { + "epoch": 0.0870712401055409, + "grad_norm": 4.155042572238659, + "learning_rate": 9.915011288723178e-06, + "loss": 0.39, + "step": 1254 + }, + { + "epoch": 0.08714067490626302, + "grad_norm": 6.347686450090276, + "learning_rate": 9.914804715902328e-06, + "loss": 0.5465, + "step": 1255 + }, + { + "epoch": 0.08721010970698514, + "grad_norm": 4.959711362763308, + "learning_rate": 9.914597894496218e-06, + "loss": 0.8028, + "step": 1256 + }, + { + "epoch": 0.08727954450770727, + "grad_norm": 4.018123637413376, + "learning_rate": 9.914390824515307e-06, + "loss": 0.3598, + "step": 1257 + }, + { + "epoch": 0.08734897930842939, + "grad_norm": 4.2754949293590965, + "learning_rate": 9.91418350597007e-06, + "loss": 0.527, + "step": 1258 + }, + { + "epoch": 0.08741841410915151, + "grad_norm": 3.653540912737491, + "learning_rate": 9.91397593887099e-06, + "loss": 0.3612, + "step": 1259 + }, + { + "epoch": 0.08748784890987363, + "grad_norm": 5.655544159304154, + "learning_rate": 9.91376812322857e-06, + "loss": 0.6405, + "step": 1260 + }, + { + "epoch": 0.08755728371059575, + "grad_norm": 4.323575821022961, + "learning_rate": 9.913560059053318e-06, + "loss": 0.5584, + "step": 1261 + }, + { + "epoch": 0.08762671851131787, + "grad_norm": 4.641284609826128, + "learning_rate": 9.913351746355758e-06, + "loss": 0.4738, + "step": 1262 + }, + { + "epoch": 0.08769615331203999, + "grad_norm": 5.671778146857839, + "learning_rate": 9.913143185146426e-06, + "loss": 0.7733, + "step": 1263 + }, + { + "epoch": 0.08776558811276211, + "grad_norm": 4.17883334991228, + "learning_rate": 9.91293437543587e-06, + "loss": 0.5649, + "step": 1264 + }, + { + "epoch": 0.08783502291348423, + "grad_norm": 4.772101358865596, + "learning_rate": 9.912725317234655e-06, + "loss": 0.6759, + "step": 1265 + }, + { + "epoch": 0.08790445771420637, + "grad_norm": 3.574186501565113, + "learning_rate": 9.912516010553352e-06, + "loss": 0.4596, + "step": 1266 + }, + { + "epoch": 0.08797389251492849, + "grad_norm": 3.0729789014084137, + "learning_rate": 9.912306455402548e-06, + "loss": 0.1449, + "step": 1267 + }, + { + "epoch": 0.08804332731565061, + "grad_norm": 4.594586914437889, + "learning_rate": 9.912096651792842e-06, + "loss": 0.4047, + "step": 1268 + }, + { + "epoch": 0.08811276211637273, + "grad_norm": 4.148315010228122, + "learning_rate": 9.911886599734845e-06, + "loss": 0.5129, + "step": 1269 + }, + { + "epoch": 0.08818219691709485, + "grad_norm": 4.118703464794777, + "learning_rate": 9.911676299239184e-06, + "loss": 0.326, + "step": 1270 + }, + { + "epoch": 0.08825163171781697, + "grad_norm": 4.939462868697017, + "learning_rate": 9.911465750316492e-06, + "loss": 0.6523, + "step": 1271 + }, + { + "epoch": 0.08832106651853909, + "grad_norm": 4.493972667108219, + "learning_rate": 9.91125495297742e-06, + "loss": 0.3893, + "step": 1272 + }, + { + "epoch": 0.08839050131926121, + "grad_norm": 5.53983544924788, + "learning_rate": 9.91104390723263e-06, + "loss": 0.6655, + "step": 1273 + }, + { + "epoch": 0.08845993611998333, + "grad_norm": 5.434130743391432, + "learning_rate": 9.910832613092797e-06, + "loss": 0.6191, + "step": 1274 + }, + { + "epoch": 0.08852937092070545, + "grad_norm": 4.343286838992886, + "learning_rate": 9.910621070568607e-06, + "loss": 0.337, + "step": 1275 + }, + { + "epoch": 0.08859880572142757, + "grad_norm": 4.020449921602951, + "learning_rate": 9.91040927967076e-06, + "loss": 0.4356, + "step": 1276 + }, + { + "epoch": 0.08866824052214971, + "grad_norm": 4.336008748857587, + "learning_rate": 9.910197240409969e-06, + "loss": 0.5097, + "step": 1277 + }, + { + "epoch": 0.08873767532287183, + "grad_norm": 5.388843466595745, + "learning_rate": 9.909984952796959e-06, + "loss": 0.8222, + "step": 1278 + }, + { + "epoch": 0.08880711012359395, + "grad_norm": 4.074947137892525, + "learning_rate": 9.909772416842463e-06, + "loss": 0.4551, + "step": 1279 + }, + { + "epoch": 0.08887654492431607, + "grad_norm": 4.69379941576267, + "learning_rate": 9.909559632557236e-06, + "loss": 0.3801, + "step": 1280 + }, + { + "epoch": 0.08894597972503819, + "grad_norm": 4.753870459916141, + "learning_rate": 9.909346599952037e-06, + "loss": 0.7037, + "step": 1281 + }, + { + "epoch": 0.08901541452576031, + "grad_norm": 4.650182234514726, + "learning_rate": 9.909133319037643e-06, + "loss": 0.5504, + "step": 1282 + }, + { + "epoch": 0.08908484932648243, + "grad_norm": 4.184575958201373, + "learning_rate": 9.90891978982484e-06, + "loss": 0.5578, + "step": 1283 + }, + { + "epoch": 0.08915428412720455, + "grad_norm": 4.795798844662197, + "learning_rate": 9.908706012324431e-06, + "loss": 0.7351, + "step": 1284 + }, + { + "epoch": 0.08922371892792667, + "grad_norm": 4.285945627437185, + "learning_rate": 9.908491986547224e-06, + "loss": 0.5288, + "step": 1285 + }, + { + "epoch": 0.0892931537286488, + "grad_norm": 5.304631654318846, + "learning_rate": 9.908277712504049e-06, + "loss": 0.6029, + "step": 1286 + }, + { + "epoch": 0.08936258852937091, + "grad_norm": 3.6115709814775716, + "learning_rate": 9.908063190205739e-06, + "loss": 0.4159, + "step": 1287 + }, + { + "epoch": 0.08943202333009305, + "grad_norm": 6.516016620520026, + "learning_rate": 9.907848419663148e-06, + "loss": 0.5863, + "step": 1288 + }, + { + "epoch": 0.08950145813081517, + "grad_norm": 4.480860582873414, + "learning_rate": 9.907633400887138e-06, + "loss": 0.5386, + "step": 1289 + }, + { + "epoch": 0.08957089293153729, + "grad_norm": 4.394487387306154, + "learning_rate": 9.907418133888582e-06, + "loss": 0.5631, + "step": 1290 + }, + { + "epoch": 0.08964032773225941, + "grad_norm": 5.105504320990119, + "learning_rate": 9.907202618678371e-06, + "loss": 0.5023, + "step": 1291 + }, + { + "epoch": 0.08970976253298153, + "grad_norm": 4.9202791224563205, + "learning_rate": 9.906986855267405e-06, + "loss": 0.7332, + "step": 1292 + }, + { + "epoch": 0.08977919733370365, + "grad_norm": 4.536169532580502, + "learning_rate": 9.906770843666595e-06, + "loss": 0.4669, + "step": 1293 + }, + { + "epoch": 0.08984863213442577, + "grad_norm": 3.5264698946332196, + "learning_rate": 9.90655458388687e-06, + "loss": 0.4169, + "step": 1294 + }, + { + "epoch": 0.0899180669351479, + "grad_norm": 5.059279789467382, + "learning_rate": 9.906338075939166e-06, + "loss": 0.5981, + "step": 1295 + }, + { + "epoch": 0.08998750173587002, + "grad_norm": 4.496010782071867, + "learning_rate": 9.906121319834432e-06, + "loss": 0.6819, + "step": 1296 + }, + { + "epoch": 0.09005693653659214, + "grad_norm": 4.399011002827967, + "learning_rate": 9.905904315583636e-06, + "loss": 0.6472, + "step": 1297 + }, + { + "epoch": 0.09012637133731426, + "grad_norm": 4.527553330493005, + "learning_rate": 9.90568706319775e-06, + "loss": 0.4522, + "step": 1298 + }, + { + "epoch": 0.09019580613803638, + "grad_norm": 4.879407890269408, + "learning_rate": 9.905469562687763e-06, + "loss": 0.7609, + "step": 1299 + }, + { + "epoch": 0.09026524093875851, + "grad_norm": 3.866646781536308, + "learning_rate": 9.905251814064677e-06, + "loss": 0.372, + "step": 1300 + }, + { + "epoch": 0.09033467573948063, + "grad_norm": 3.8932277886797735, + "learning_rate": 9.905033817339504e-06, + "loss": 0.5858, + "step": 1301 + }, + { + "epoch": 0.09040411054020275, + "grad_norm": 5.20472257612294, + "learning_rate": 9.904815572523272e-06, + "loss": 0.3693, + "step": 1302 + }, + { + "epoch": 0.09047354534092487, + "grad_norm": 3.6715186269153683, + "learning_rate": 9.904597079627018e-06, + "loss": 0.5368, + "step": 1303 + }, + { + "epoch": 0.090542980141647, + "grad_norm": 3.115208846097869, + "learning_rate": 9.904378338661795e-06, + "loss": 0.1586, + "step": 1304 + }, + { + "epoch": 0.09061241494236912, + "grad_norm": 4.457582419352622, + "learning_rate": 9.904159349638665e-06, + "loss": 0.4126, + "step": 1305 + }, + { + "epoch": 0.09068184974309124, + "grad_norm": 4.636940100048691, + "learning_rate": 9.903940112568701e-06, + "loss": 0.7778, + "step": 1306 + }, + { + "epoch": 0.09075128454381336, + "grad_norm": 5.386520201347107, + "learning_rate": 9.903720627463e-06, + "loss": 0.8934, + "step": 1307 + }, + { + "epoch": 0.09082071934453548, + "grad_norm": 4.660555687696774, + "learning_rate": 9.903500894332655e-06, + "loss": 0.5957, + "step": 1308 + }, + { + "epoch": 0.0908901541452576, + "grad_norm": 4.751308921371486, + "learning_rate": 9.903280913188786e-06, + "loss": 0.479, + "step": 1309 + }, + { + "epoch": 0.09095958894597972, + "grad_norm": 5.343201001230028, + "learning_rate": 9.903060684042516e-06, + "loss": 0.8311, + "step": 1310 + }, + { + "epoch": 0.09102902374670185, + "grad_norm": 5.4340435221876024, + "learning_rate": 9.902840206904984e-06, + "loss": 0.8404, + "step": 1311 + }, + { + "epoch": 0.09109845854742397, + "grad_norm": 3.4760932067568318, + "learning_rate": 9.902619481787344e-06, + "loss": 0.2382, + "step": 1312 + }, + { + "epoch": 0.0911678933481461, + "grad_norm": 4.283967096608612, + "learning_rate": 9.902398508700757e-06, + "loss": 0.4666, + "step": 1313 + }, + { + "epoch": 0.09123732814886822, + "grad_norm": 4.4100162091719675, + "learning_rate": 9.9021772876564e-06, + "loss": 0.6359, + "step": 1314 + }, + { + "epoch": 0.09130676294959034, + "grad_norm": 4.335130693770736, + "learning_rate": 9.901955818665464e-06, + "loss": 0.6145, + "step": 1315 + }, + { + "epoch": 0.09137619775031246, + "grad_norm": 4.093762037805684, + "learning_rate": 9.901734101739152e-06, + "loss": 0.519, + "step": 1316 + }, + { + "epoch": 0.09144563255103458, + "grad_norm": 4.725196910642091, + "learning_rate": 9.901512136888672e-06, + "loss": 0.5798, + "step": 1317 + }, + { + "epoch": 0.0915150673517567, + "grad_norm": 4.5440338595334975, + "learning_rate": 9.901289924125257e-06, + "loss": 0.5259, + "step": 1318 + }, + { + "epoch": 0.09158450215247882, + "grad_norm": 4.658909332328787, + "learning_rate": 9.901067463460142e-06, + "loss": 0.5461, + "step": 1319 + }, + { + "epoch": 0.09165393695320094, + "grad_norm": 4.158419941967739, + "learning_rate": 9.900844754904581e-06, + "loss": 0.4688, + "step": 1320 + }, + { + "epoch": 0.09172337175392306, + "grad_norm": 4.88658882490353, + "learning_rate": 9.90062179846984e-06, + "loss": 0.7088, + "step": 1321 + }, + { + "epoch": 0.09179280655464518, + "grad_norm": 2.9143450946453284, + "learning_rate": 9.90039859416719e-06, + "loss": 0.2995, + "step": 1322 + }, + { + "epoch": 0.09186224135536732, + "grad_norm": 3.943203609196513, + "learning_rate": 9.900175142007929e-06, + "loss": 0.4954, + "step": 1323 + }, + { + "epoch": 0.09193167615608944, + "grad_norm": 4.47572314076152, + "learning_rate": 9.89995144200335e-06, + "loss": 0.7639, + "step": 1324 + }, + { + "epoch": 0.09200111095681156, + "grad_norm": 4.969523424797767, + "learning_rate": 9.899727494164774e-06, + "loss": 0.7059, + "step": 1325 + }, + { + "epoch": 0.09207054575753368, + "grad_norm": 4.120087734712923, + "learning_rate": 9.899503298503525e-06, + "loss": 0.3763, + "step": 1326 + }, + { + "epoch": 0.0921399805582558, + "grad_norm": 5.067988309702797, + "learning_rate": 9.899278855030944e-06, + "loss": 0.585, + "step": 1327 + }, + { + "epoch": 0.09220941535897792, + "grad_norm": 4.039966234840161, + "learning_rate": 9.899054163758383e-06, + "loss": 0.4453, + "step": 1328 + }, + { + "epoch": 0.09227885015970004, + "grad_norm": 4.624625701293212, + "learning_rate": 9.898829224697206e-06, + "loss": 0.5645, + "step": 1329 + }, + { + "epoch": 0.09234828496042216, + "grad_norm": 5.177213800487478, + "learning_rate": 9.89860403785879e-06, + "loss": 0.605, + "step": 1330 + }, + { + "epoch": 0.09241771976114428, + "grad_norm": 4.658634603397049, + "learning_rate": 9.898378603254524e-06, + "loss": 0.6117, + "step": 1331 + }, + { + "epoch": 0.0924871545618664, + "grad_norm": 4.666195866979545, + "learning_rate": 9.898152920895811e-06, + "loss": 0.412, + "step": 1332 + }, + { + "epoch": 0.09255658936258852, + "grad_norm": 4.376930942206212, + "learning_rate": 9.897926990794066e-06, + "loss": 0.4629, + "step": 1333 + }, + { + "epoch": 0.09262602416331066, + "grad_norm": 4.855977029944248, + "learning_rate": 9.897700812960717e-06, + "loss": 0.5679, + "step": 1334 + }, + { + "epoch": 0.09269545896403278, + "grad_norm": 6.169285730188878, + "learning_rate": 9.897474387407202e-06, + "loss": 0.7218, + "step": 1335 + }, + { + "epoch": 0.0927648937647549, + "grad_norm": 5.178209339476342, + "learning_rate": 9.897247714144975e-06, + "loss": 0.5485, + "step": 1336 + }, + { + "epoch": 0.09283432856547702, + "grad_norm": 4.985863476100043, + "learning_rate": 9.897020793185501e-06, + "loss": 0.5302, + "step": 1337 + }, + { + "epoch": 0.09290376336619914, + "grad_norm": 3.7767209090906046, + "learning_rate": 9.896793624540256e-06, + "loss": 0.2786, + "step": 1338 + }, + { + "epoch": 0.09297319816692126, + "grad_norm": 5.220556557392699, + "learning_rate": 9.896566208220729e-06, + "loss": 0.5117, + "step": 1339 + }, + { + "epoch": 0.09304263296764338, + "grad_norm": 3.741201155047686, + "learning_rate": 9.896338544238426e-06, + "loss": 0.3969, + "step": 1340 + }, + { + "epoch": 0.0931120677683655, + "grad_norm": 4.447324193413887, + "learning_rate": 9.896110632604858e-06, + "loss": 0.3914, + "step": 1341 + }, + { + "epoch": 0.09318150256908762, + "grad_norm": 5.00842964667195, + "learning_rate": 9.895882473331557e-06, + "loss": 0.7516, + "step": 1342 + }, + { + "epoch": 0.09325093736980974, + "grad_norm": 4.3887508796747765, + "learning_rate": 9.89565406643006e-06, + "loss": 0.5764, + "step": 1343 + }, + { + "epoch": 0.09332037217053187, + "grad_norm": 4.471200980128816, + "learning_rate": 9.89542541191192e-06, + "loss": 0.5304, + "step": 1344 + }, + { + "epoch": 0.09338980697125399, + "grad_norm": 4.497355139791726, + "learning_rate": 9.8951965097887e-06, + "loss": 0.687, + "step": 1345 + }, + { + "epoch": 0.09345924177197612, + "grad_norm": 4.617979234372066, + "learning_rate": 9.894967360071982e-06, + "loss": 0.481, + "step": 1346 + }, + { + "epoch": 0.09352867657269824, + "grad_norm": 5.369970066031287, + "learning_rate": 9.894737962773354e-06, + "loss": 0.6619, + "step": 1347 + }, + { + "epoch": 0.09359811137342036, + "grad_norm": 4.226961802603493, + "learning_rate": 9.894508317904418e-06, + "loss": 0.5354, + "step": 1348 + }, + { + "epoch": 0.09366754617414248, + "grad_norm": 4.946606876667253, + "learning_rate": 9.89427842547679e-06, + "loss": 0.7071, + "step": 1349 + }, + { + "epoch": 0.0937369809748646, + "grad_norm": 5.5561916504720275, + "learning_rate": 9.894048285502098e-06, + "loss": 0.6732, + "step": 1350 + }, + { + "epoch": 0.09380641577558672, + "grad_norm": 6.913186391522856, + "learning_rate": 9.89381789799198e-06, + "loss": 0.7755, + "step": 1351 + }, + { + "epoch": 0.09387585057630884, + "grad_norm": 3.7737207055580773, + "learning_rate": 9.893587262958093e-06, + "loss": 0.379, + "step": 1352 + }, + { + "epoch": 0.09394528537703097, + "grad_norm": 4.624893208379848, + "learning_rate": 9.893356380412097e-06, + "loss": 0.5548, + "step": 1353 + }, + { + "epoch": 0.09401472017775309, + "grad_norm": 5.159897332703197, + "learning_rate": 9.893125250365674e-06, + "loss": 0.6081, + "step": 1354 + }, + { + "epoch": 0.0940841549784752, + "grad_norm": 5.173391268751992, + "learning_rate": 9.892893872830514e-06, + "loss": 0.5876, + "step": 1355 + }, + { + "epoch": 0.09415358977919733, + "grad_norm": 3.649056165755258, + "learning_rate": 9.892662247818317e-06, + "loss": 0.4209, + "step": 1356 + }, + { + "epoch": 0.09422302457991946, + "grad_norm": 4.9779515279684645, + "learning_rate": 9.8924303753408e-06, + "loss": 0.8236, + "step": 1357 + }, + { + "epoch": 0.09429245938064158, + "grad_norm": 5.157859649265562, + "learning_rate": 9.892198255409691e-06, + "loss": 0.4799, + "step": 1358 + }, + { + "epoch": 0.0943618941813637, + "grad_norm": 3.8127157576257775, + "learning_rate": 9.89196588803673e-06, + "loss": 0.421, + "step": 1359 + }, + { + "epoch": 0.09443132898208582, + "grad_norm": 3.2938190476061187, + "learning_rate": 9.89173327323367e-06, + "loss": 0.4079, + "step": 1360 + }, + { + "epoch": 0.09450076378280794, + "grad_norm": 4.099457305916928, + "learning_rate": 9.891500411012275e-06, + "loss": 0.293, + "step": 1361 + }, + { + "epoch": 0.09457019858353007, + "grad_norm": 4.723280762128105, + "learning_rate": 9.891267301384327e-06, + "loss": 0.692, + "step": 1362 + }, + { + "epoch": 0.09463963338425219, + "grad_norm": 4.431120674952316, + "learning_rate": 9.891033944361613e-06, + "loss": 0.4724, + "step": 1363 + }, + { + "epoch": 0.09470906818497431, + "grad_norm": 5.292143597371503, + "learning_rate": 9.890800339955935e-06, + "loss": 0.7446, + "step": 1364 + }, + { + "epoch": 0.09477850298569643, + "grad_norm": 4.322797194365144, + "learning_rate": 9.890566488179111e-06, + "loss": 0.2078, + "step": 1365 + }, + { + "epoch": 0.09484793778641855, + "grad_norm": 3.675068748583708, + "learning_rate": 9.89033238904297e-06, + "loss": 0.4216, + "step": 1366 + }, + { + "epoch": 0.09491737258714067, + "grad_norm": 4.107590015210328, + "learning_rate": 9.890098042559348e-06, + "loss": 0.4276, + "step": 1367 + }, + { + "epoch": 0.09498680738786279, + "grad_norm": 4.635964410226979, + "learning_rate": 9.889863448740102e-06, + "loss": 0.475, + "step": 1368 + }, + { + "epoch": 0.09505624218858492, + "grad_norm": 3.7456902203348283, + "learning_rate": 9.889628607597095e-06, + "loss": 0.3446, + "step": 1369 + }, + { + "epoch": 0.09512567698930705, + "grad_norm": 4.552400080916885, + "learning_rate": 9.889393519142207e-06, + "loss": 0.6254, + "step": 1370 + }, + { + "epoch": 0.09519511179002917, + "grad_norm": 4.038329545370405, + "learning_rate": 9.889158183387326e-06, + "loss": 0.5735, + "step": 1371 + }, + { + "epoch": 0.09526454659075129, + "grad_norm": 5.414999401779139, + "learning_rate": 9.888922600344359e-06, + "loss": 0.6119, + "step": 1372 + }, + { + "epoch": 0.09533398139147341, + "grad_norm": 4.7755519402171585, + "learning_rate": 9.888686770025217e-06, + "loss": 0.4937, + "step": 1373 + }, + { + "epoch": 0.09540341619219553, + "grad_norm": 4.206937521522721, + "learning_rate": 9.888450692441832e-06, + "loss": 0.3964, + "step": 1374 + }, + { + "epoch": 0.09547285099291765, + "grad_norm": 4.089789260062656, + "learning_rate": 9.88821436760614e-06, + "loss": 0.6167, + "step": 1375 + }, + { + "epoch": 0.09554228579363977, + "grad_norm": 4.620450270970714, + "learning_rate": 9.8879777955301e-06, + "loss": 0.5489, + "step": 1376 + }, + { + "epoch": 0.09561172059436189, + "grad_norm": 5.325258261901468, + "learning_rate": 9.88774097622567e-06, + "loss": 0.5987, + "step": 1377 + }, + { + "epoch": 0.09568115539508401, + "grad_norm": 3.705544240782535, + "learning_rate": 9.887503909704835e-06, + "loss": 0.3993, + "step": 1378 + }, + { + "epoch": 0.09575059019580613, + "grad_norm": 4.283317732522032, + "learning_rate": 9.887266595979583e-06, + "loss": 0.5186, + "step": 1379 + }, + { + "epoch": 0.09582002499652827, + "grad_norm": 5.645905596427402, + "learning_rate": 9.887029035061915e-06, + "loss": 0.8449, + "step": 1380 + }, + { + "epoch": 0.09588945979725039, + "grad_norm": 5.10412900196531, + "learning_rate": 9.88679122696385e-06, + "loss": 0.7481, + "step": 1381 + }, + { + "epoch": 0.09595889459797251, + "grad_norm": 3.948881952467173, + "learning_rate": 9.886553171697415e-06, + "loss": 0.4464, + "step": 1382 + }, + { + "epoch": 0.09602832939869463, + "grad_norm": 3.9647976545727763, + "learning_rate": 9.886314869274649e-06, + "loss": 0.3703, + "step": 1383 + }, + { + "epoch": 0.09609776419941675, + "grad_norm": 4.904702878550367, + "learning_rate": 9.886076319707607e-06, + "loss": 0.5627, + "step": 1384 + }, + { + "epoch": 0.09616719900013887, + "grad_norm": 6.458986495164821, + "learning_rate": 9.885837523008353e-06, + "loss": 0.7612, + "step": 1385 + }, + { + "epoch": 0.09623663380086099, + "grad_norm": 4.737654809639494, + "learning_rate": 9.885598479188966e-06, + "loss": 0.4233, + "step": 1386 + }, + { + "epoch": 0.09630606860158311, + "grad_norm": 4.793801421196134, + "learning_rate": 9.885359188261536e-06, + "loss": 0.523, + "step": 1387 + }, + { + "epoch": 0.09637550340230523, + "grad_norm": 4.773298687718501, + "learning_rate": 9.885119650238167e-06, + "loss": 0.4104, + "step": 1388 + }, + { + "epoch": 0.09644493820302735, + "grad_norm": 4.770502384270093, + "learning_rate": 9.884879865130973e-06, + "loss": 0.6155, + "step": 1389 + }, + { + "epoch": 0.09651437300374947, + "grad_norm": 4.365994878344466, + "learning_rate": 9.884639832952085e-06, + "loss": 0.5617, + "step": 1390 + }, + { + "epoch": 0.09658380780447161, + "grad_norm": 5.233463610741223, + "learning_rate": 9.884399553713639e-06, + "loss": 0.6585, + "step": 1391 + }, + { + "epoch": 0.09665324260519373, + "grad_norm": 4.2868594230134285, + "learning_rate": 9.884159027427793e-06, + "loss": 0.5836, + "step": 1392 + }, + { + "epoch": 0.09672267740591585, + "grad_norm": 5.06131148639139, + "learning_rate": 9.883918254106709e-06, + "loss": 0.5357, + "step": 1393 + }, + { + "epoch": 0.09679211220663797, + "grad_norm": 4.168288755435476, + "learning_rate": 9.883677233762567e-06, + "loss": 0.4407, + "step": 1394 + }, + { + "epoch": 0.09686154700736009, + "grad_norm": 4.661055665697704, + "learning_rate": 9.883435966407555e-06, + "loss": 0.557, + "step": 1395 + }, + { + "epoch": 0.09693098180808221, + "grad_norm": 5.102820698152733, + "learning_rate": 9.88319445205388e-06, + "loss": 0.7647, + "step": 1396 + }, + { + "epoch": 0.09700041660880433, + "grad_norm": 4.311361601834448, + "learning_rate": 9.882952690713754e-06, + "loss": 0.646, + "step": 1397 + }, + { + "epoch": 0.09706985140952645, + "grad_norm": 5.199056766179924, + "learning_rate": 9.882710682399407e-06, + "loss": 0.3538, + "step": 1398 + }, + { + "epoch": 0.09713928621024857, + "grad_norm": 5.567304990121936, + "learning_rate": 9.882468427123079e-06, + "loss": 0.673, + "step": 1399 + }, + { + "epoch": 0.0972087210109707, + "grad_norm": 4.5797556508897665, + "learning_rate": 9.882225924897022e-06, + "loss": 0.5623, + "step": 1400 + }, + { + "epoch": 0.09727815581169282, + "grad_norm": 4.653949428324637, + "learning_rate": 9.881983175733503e-06, + "loss": 0.4494, + "step": 1401 + }, + { + "epoch": 0.09734759061241494, + "grad_norm": 5.009193093287199, + "learning_rate": 9.881740179644799e-06, + "loss": 0.5073, + "step": 1402 + }, + { + "epoch": 0.09741702541313707, + "grad_norm": 5.101340255142069, + "learning_rate": 9.8814969366432e-06, + "loss": 0.6643, + "step": 1403 + }, + { + "epoch": 0.09748646021385919, + "grad_norm": 5.522477413238814, + "learning_rate": 9.88125344674101e-06, + "loss": 0.7866, + "step": 1404 + }, + { + "epoch": 0.09755589501458131, + "grad_norm": 6.1313161603201785, + "learning_rate": 9.881009709950547e-06, + "loss": 0.9623, + "step": 1405 + }, + { + "epoch": 0.09762532981530343, + "grad_norm": 5.378298622525633, + "learning_rate": 9.880765726284133e-06, + "loss": 0.661, + "step": 1406 + }, + { + "epoch": 0.09769476461602555, + "grad_norm": 4.430002476911952, + "learning_rate": 9.880521495754112e-06, + "loss": 0.4735, + "step": 1407 + }, + { + "epoch": 0.09776419941674767, + "grad_norm": 4.837863791741688, + "learning_rate": 9.880277018372837e-06, + "loss": 0.4609, + "step": 1408 + }, + { + "epoch": 0.0978336342174698, + "grad_norm": 5.2930098943589945, + "learning_rate": 9.880032294152673e-06, + "loss": 0.6534, + "step": 1409 + }, + { + "epoch": 0.09790306901819192, + "grad_norm": 3.9025667504026536, + "learning_rate": 9.879787323105996e-06, + "loss": 0.3865, + "step": 1410 + }, + { + "epoch": 0.09797250381891404, + "grad_norm": 4.257170339438434, + "learning_rate": 9.8795421052452e-06, + "loss": 0.3296, + "step": 1411 + }, + { + "epoch": 0.09804193861963616, + "grad_norm": 5.111117920768885, + "learning_rate": 9.879296640582686e-06, + "loss": 0.6103, + "step": 1412 + }, + { + "epoch": 0.09811137342035828, + "grad_norm": 5.301777133455815, + "learning_rate": 9.879050929130867e-06, + "loss": 0.6157, + "step": 1413 + }, + { + "epoch": 0.09818080822108041, + "grad_norm": 4.960311362655914, + "learning_rate": 9.878804970902175e-06, + "loss": 0.5044, + "step": 1414 + }, + { + "epoch": 0.09825024302180253, + "grad_norm": 4.569294337625017, + "learning_rate": 9.878558765909048e-06, + "loss": 0.3892, + "step": 1415 + }, + { + "epoch": 0.09831967782252465, + "grad_norm": 5.7908651558011135, + "learning_rate": 9.878312314163938e-06, + "loss": 0.7408, + "step": 1416 + }, + { + "epoch": 0.09838911262324677, + "grad_norm": 4.66316644897711, + "learning_rate": 9.878065615679313e-06, + "loss": 0.669, + "step": 1417 + }, + { + "epoch": 0.0984585474239689, + "grad_norm": 4.575384290322658, + "learning_rate": 9.877818670467647e-06, + "loss": 0.4646, + "step": 1418 + }, + { + "epoch": 0.09852798222469102, + "grad_norm": 3.795580118089873, + "learning_rate": 9.877571478541434e-06, + "loss": 0.4155, + "step": 1419 + }, + { + "epoch": 0.09859741702541314, + "grad_norm": 4.828266354344776, + "learning_rate": 9.877324039913173e-06, + "loss": 0.616, + "step": 1420 + }, + { + "epoch": 0.09866685182613526, + "grad_norm": 5.198681724913463, + "learning_rate": 9.877076354595385e-06, + "loss": 0.6337, + "step": 1421 + }, + { + "epoch": 0.09873628662685738, + "grad_norm": 3.349218705869498, + "learning_rate": 9.87682842260059e-06, + "loss": 0.3862, + "step": 1422 + }, + { + "epoch": 0.0988057214275795, + "grad_norm": 5.360275399041558, + "learning_rate": 9.876580243941333e-06, + "loss": 0.7028, + "step": 1423 + }, + { + "epoch": 0.09887515622830162, + "grad_norm": 3.295308978859412, + "learning_rate": 9.876331818630168e-06, + "loss": 0.5054, + "step": 1424 + }, + { + "epoch": 0.09894459102902374, + "grad_norm": 5.354463514674654, + "learning_rate": 9.876083146679654e-06, + "loss": 0.8394, + "step": 1425 + }, + { + "epoch": 0.09901402582974587, + "grad_norm": 3.9354718132436743, + "learning_rate": 9.875834228102374e-06, + "loss": 0.6806, + "step": 1426 + }, + { + "epoch": 0.099083460630468, + "grad_norm": 2.9775044809519655, + "learning_rate": 9.875585062910916e-06, + "loss": 0.3313, + "step": 1427 + }, + { + "epoch": 0.09915289543119012, + "grad_norm": 5.914369367608152, + "learning_rate": 9.875335651117882e-06, + "loss": 0.4899, + "step": 1428 + }, + { + "epoch": 0.09922233023191224, + "grad_norm": 4.651429212607517, + "learning_rate": 9.875085992735886e-06, + "loss": 0.689, + "step": 1429 + }, + { + "epoch": 0.09929176503263436, + "grad_norm": 4.071094062518788, + "learning_rate": 9.87483608777756e-06, + "loss": 0.2934, + "step": 1430 + }, + { + "epoch": 0.09936119983335648, + "grad_norm": 4.072330746610035, + "learning_rate": 9.87458593625554e-06, + "loss": 0.581, + "step": 1431 + }, + { + "epoch": 0.0994306346340786, + "grad_norm": 4.434073271049279, + "learning_rate": 9.874335538182478e-06, + "loss": 0.5349, + "step": 1432 + }, + { + "epoch": 0.09950006943480072, + "grad_norm": 4.837339591680378, + "learning_rate": 9.87408489357104e-06, + "loss": 0.5878, + "step": 1433 + }, + { + "epoch": 0.09956950423552284, + "grad_norm": 5.0955652705818695, + "learning_rate": 9.873834002433907e-06, + "loss": 0.5706, + "step": 1434 + }, + { + "epoch": 0.09963893903624496, + "grad_norm": 4.472690709419307, + "learning_rate": 9.873582864783762e-06, + "loss": 0.6272, + "step": 1435 + }, + { + "epoch": 0.09970837383696708, + "grad_norm": 5.469142792153725, + "learning_rate": 9.873331480633312e-06, + "loss": 0.7154, + "step": 1436 + }, + { + "epoch": 0.09977780863768922, + "grad_norm": 4.6354564244692416, + "learning_rate": 9.87307984999527e-06, + "loss": 0.4549, + "step": 1437 + }, + { + "epoch": 0.09984724343841134, + "grad_norm": 4.074484653508504, + "learning_rate": 9.872827972882362e-06, + "loss": 0.6131, + "step": 1438 + }, + { + "epoch": 0.09991667823913346, + "grad_norm": 3.465181877612099, + "learning_rate": 9.872575849307332e-06, + "loss": 0.3152, + "step": 1439 + }, + { + "epoch": 0.09998611303985558, + "grad_norm": 5.1034914967571, + "learning_rate": 9.872323479282926e-06, + "loss": 0.6223, + "step": 1440 + }, + { + "epoch": 0.1000555478405777, + "grad_norm": 3.6859642808034816, + "learning_rate": 9.872070862821914e-06, + "loss": 0.248, + "step": 1441 + }, + { + "epoch": 0.10012498264129982, + "grad_norm": 4.213779463270209, + "learning_rate": 9.87181799993707e-06, + "loss": 0.5969, + "step": 1442 + }, + { + "epoch": 0.10019441744202194, + "grad_norm": 4.839763005401552, + "learning_rate": 9.871564890641184e-06, + "loss": 0.4299, + "step": 1443 + }, + { + "epoch": 0.10026385224274406, + "grad_norm": 6.4194272334589275, + "learning_rate": 9.87131153494706e-06, + "loss": 1.0267, + "step": 1444 + }, + { + "epoch": 0.10033328704346618, + "grad_norm": 5.405520777809143, + "learning_rate": 9.871057932867509e-06, + "loss": 0.657, + "step": 1445 + }, + { + "epoch": 0.1004027218441883, + "grad_norm": 3.510282552520804, + "learning_rate": 9.87080408441536e-06, + "loss": 0.4098, + "step": 1446 + }, + { + "epoch": 0.10047215664491042, + "grad_norm": 5.018641436959001, + "learning_rate": 9.870549989603453e-06, + "loss": 0.7437, + "step": 1447 + }, + { + "epoch": 0.10054159144563254, + "grad_norm": 4.175765925711543, + "learning_rate": 9.870295648444639e-06, + "loss": 0.4091, + "step": 1448 + }, + { + "epoch": 0.10061102624635468, + "grad_norm": 4.507699369394679, + "learning_rate": 9.870041060951782e-06, + "loss": 0.3783, + "step": 1449 + }, + { + "epoch": 0.1006804610470768, + "grad_norm": 4.645013840426378, + "learning_rate": 9.869786227137758e-06, + "loss": 0.6014, + "step": 1450 + }, + { + "epoch": 0.10074989584779892, + "grad_norm": 5.24303950064517, + "learning_rate": 9.869531147015459e-06, + "loss": 0.6051, + "step": 1451 + }, + { + "epoch": 0.10081933064852104, + "grad_norm": 5.433163687740451, + "learning_rate": 9.869275820597783e-06, + "loss": 0.7789, + "step": 1452 + }, + { + "epoch": 0.10088876544924316, + "grad_norm": 4.843308164396283, + "learning_rate": 9.869020247897648e-06, + "loss": 0.6026, + "step": 1453 + }, + { + "epoch": 0.10095820024996528, + "grad_norm": 4.576815581219095, + "learning_rate": 9.868764428927977e-06, + "loss": 0.4794, + "step": 1454 + }, + { + "epoch": 0.1010276350506874, + "grad_norm": 3.6229477668361936, + "learning_rate": 9.86850836370171e-06, + "loss": 0.2725, + "step": 1455 + }, + { + "epoch": 0.10109706985140952, + "grad_norm": 4.058156320114755, + "learning_rate": 9.8682520522318e-06, + "loss": 0.2962, + "step": 1456 + }, + { + "epoch": 0.10116650465213164, + "grad_norm": 3.805415086298999, + "learning_rate": 9.86799549453121e-06, + "loss": 0.4468, + "step": 1457 + }, + { + "epoch": 0.10123593945285377, + "grad_norm": 4.682132470477474, + "learning_rate": 9.867738690612916e-06, + "loss": 0.4124, + "step": 1458 + }, + { + "epoch": 0.10130537425357589, + "grad_norm": 4.417322092609537, + "learning_rate": 9.867481640489907e-06, + "loss": 0.7896, + "step": 1459 + }, + { + "epoch": 0.10137480905429802, + "grad_norm": 5.370521293032936, + "learning_rate": 9.867224344175185e-06, + "loss": 0.8068, + "step": 1460 + }, + { + "epoch": 0.10144424385502014, + "grad_norm": 3.9229433283414443, + "learning_rate": 9.866966801681764e-06, + "loss": 0.2458, + "step": 1461 + }, + { + "epoch": 0.10151367865574226, + "grad_norm": 4.9491396772186365, + "learning_rate": 9.866709013022669e-06, + "loss": 0.6703, + "step": 1462 + }, + { + "epoch": 0.10158311345646438, + "grad_norm": 5.513867226692305, + "learning_rate": 9.866450978210938e-06, + "loss": 0.6185, + "step": 1463 + }, + { + "epoch": 0.1016525482571865, + "grad_norm": 6.46850910579035, + "learning_rate": 9.866192697259625e-06, + "loss": 0.61, + "step": 1464 + }, + { + "epoch": 0.10172198305790862, + "grad_norm": 3.7946975651332737, + "learning_rate": 9.86593417018179e-06, + "loss": 0.5239, + "step": 1465 + }, + { + "epoch": 0.10179141785863074, + "grad_norm": 4.216608452908571, + "learning_rate": 9.865675396990513e-06, + "loss": 0.49, + "step": 1466 + }, + { + "epoch": 0.10186085265935287, + "grad_norm": 4.225718064339089, + "learning_rate": 9.86541637769888e-06, + "loss": 0.3515, + "step": 1467 + }, + { + "epoch": 0.10193028746007499, + "grad_norm": 5.022422290702395, + "learning_rate": 9.865157112319992e-06, + "loss": 0.7022, + "step": 1468 + }, + { + "epoch": 0.10199972226079711, + "grad_norm": 4.4170400213203385, + "learning_rate": 9.864897600866961e-06, + "loss": 0.5751, + "step": 1469 + }, + { + "epoch": 0.10206915706151923, + "grad_norm": 4.274323488898478, + "learning_rate": 9.864637843352916e-06, + "loss": 0.4817, + "step": 1470 + }, + { + "epoch": 0.10213859186224135, + "grad_norm": 4.408698994386973, + "learning_rate": 9.864377839790992e-06, + "loss": 0.594, + "step": 1471 + }, + { + "epoch": 0.10220802666296348, + "grad_norm": 4.244252469750399, + "learning_rate": 9.864117590194342e-06, + "loss": 0.4003, + "step": 1472 + }, + { + "epoch": 0.1022774614636856, + "grad_norm": 6.39113269552439, + "learning_rate": 9.863857094576129e-06, + "loss": 0.8399, + "step": 1473 + }, + { + "epoch": 0.10234689626440772, + "grad_norm": 3.9532686514251383, + "learning_rate": 9.863596352949528e-06, + "loss": 0.3364, + "step": 1474 + }, + { + "epoch": 0.10241633106512985, + "grad_norm": 5.12506003554793, + "learning_rate": 9.863335365327726e-06, + "loss": 0.6353, + "step": 1475 + }, + { + "epoch": 0.10248576586585197, + "grad_norm": 5.4996458875676515, + "learning_rate": 9.863074131723926e-06, + "loss": 0.6631, + "step": 1476 + }, + { + "epoch": 0.10255520066657409, + "grad_norm": 4.204707386388905, + "learning_rate": 9.86281265215134e-06, + "loss": 0.5784, + "step": 1477 + }, + { + "epoch": 0.10262463546729621, + "grad_norm": 3.1744307981162536, + "learning_rate": 9.86255092662319e-06, + "loss": 0.3978, + "step": 1478 + }, + { + "epoch": 0.10269407026801833, + "grad_norm": 5.584314613196682, + "learning_rate": 9.862288955152716e-06, + "loss": 0.5764, + "step": 1479 + }, + { + "epoch": 0.10276350506874045, + "grad_norm": 3.7756388145140103, + "learning_rate": 9.862026737753172e-06, + "loss": 0.4831, + "step": 1480 + }, + { + "epoch": 0.10283293986946257, + "grad_norm": 3.9668559875166354, + "learning_rate": 9.861764274437815e-06, + "loss": 0.2832, + "step": 1481 + }, + { + "epoch": 0.10290237467018469, + "grad_norm": 5.383119243692636, + "learning_rate": 9.861501565219924e-06, + "loss": 0.8845, + "step": 1482 + }, + { + "epoch": 0.10297180947090682, + "grad_norm": 4.715036109096074, + "learning_rate": 9.861238610112784e-06, + "loss": 0.5594, + "step": 1483 + }, + { + "epoch": 0.10304124427162895, + "grad_norm": 3.6892227513070686, + "learning_rate": 9.860975409129696e-06, + "loss": 0.384, + "step": 1484 + }, + { + "epoch": 0.10311067907235107, + "grad_norm": 4.335314531370543, + "learning_rate": 9.860711962283972e-06, + "loss": 0.6645, + "step": 1485 + }, + { + "epoch": 0.10318011387307319, + "grad_norm": 4.291383789553024, + "learning_rate": 9.860448269588938e-06, + "loss": 0.483, + "step": 1486 + }, + { + "epoch": 0.10324954867379531, + "grad_norm": 5.582990820965034, + "learning_rate": 9.86018433105793e-06, + "loss": 0.6673, + "step": 1487 + }, + { + "epoch": 0.10331898347451743, + "grad_norm": 4.981697082701538, + "learning_rate": 9.859920146704299e-06, + "loss": 0.6948, + "step": 1488 + }, + { + "epoch": 0.10338841827523955, + "grad_norm": 3.987023159443007, + "learning_rate": 9.859655716541405e-06, + "loss": 0.3902, + "step": 1489 + }, + { + "epoch": 0.10345785307596167, + "grad_norm": 5.194073800640568, + "learning_rate": 9.859391040582625e-06, + "loss": 0.8335, + "step": 1490 + }, + { + "epoch": 0.10352728787668379, + "grad_norm": 3.872956527428385, + "learning_rate": 9.859126118841345e-06, + "loss": 0.2913, + "step": 1491 + }, + { + "epoch": 0.10359672267740591, + "grad_norm": 2.404685406215228, + "learning_rate": 9.858860951330964e-06, + "loss": 0.1651, + "step": 1492 + }, + { + "epoch": 0.10366615747812803, + "grad_norm": 4.387267208777114, + "learning_rate": 9.858595538064895e-06, + "loss": 0.656, + "step": 1493 + }, + { + "epoch": 0.10373559227885015, + "grad_norm": 3.972165891023337, + "learning_rate": 9.85832987905656e-06, + "loss": 0.5746, + "step": 1494 + }, + { + "epoch": 0.10380502707957229, + "grad_norm": 4.893645957387192, + "learning_rate": 9.858063974319399e-06, + "loss": 0.5839, + "step": 1495 + }, + { + "epoch": 0.10387446188029441, + "grad_norm": 4.703251458920722, + "learning_rate": 9.857797823866858e-06, + "loss": 0.3784, + "step": 1496 + }, + { + "epoch": 0.10394389668101653, + "grad_norm": 4.921201400380296, + "learning_rate": 9.857531427712402e-06, + "loss": 0.5057, + "step": 1497 + }, + { + "epoch": 0.10401333148173865, + "grad_norm": 4.907933484566761, + "learning_rate": 9.8572647858695e-06, + "loss": 0.6931, + "step": 1498 + }, + { + "epoch": 0.10408276628246077, + "grad_norm": 4.575797357221193, + "learning_rate": 9.856997898351644e-06, + "loss": 0.5452, + "step": 1499 + }, + { + "epoch": 0.10415220108318289, + "grad_norm": 4.890072875984711, + "learning_rate": 9.85673076517233e-06, + "loss": 0.4678, + "step": 1500 + }, + { + "epoch": 0.10422163588390501, + "grad_norm": 5.854166444131935, + "learning_rate": 9.856463386345068e-06, + "loss": 0.4742, + "step": 1501 + }, + { + "epoch": 0.10429107068462713, + "grad_norm": 4.66304438296173, + "learning_rate": 9.856195761883385e-06, + "loss": 0.4911, + "step": 1502 + }, + { + "epoch": 0.10436050548534925, + "grad_norm": 3.8920834612027053, + "learning_rate": 9.855927891800814e-06, + "loss": 0.4804, + "step": 1503 + }, + { + "epoch": 0.10442994028607137, + "grad_norm": 4.111242119672071, + "learning_rate": 9.855659776110908e-06, + "loss": 0.5079, + "step": 1504 + }, + { + "epoch": 0.1044993750867935, + "grad_norm": 4.374730703835033, + "learning_rate": 9.855391414827223e-06, + "loss": 0.3222, + "step": 1505 + }, + { + "epoch": 0.10456880988751563, + "grad_norm": 5.764078149491738, + "learning_rate": 9.855122807963334e-06, + "loss": 0.837, + "step": 1506 + }, + { + "epoch": 0.10463824468823775, + "grad_norm": 4.5850401913252945, + "learning_rate": 9.854853955532829e-06, + "loss": 0.5047, + "step": 1507 + }, + { + "epoch": 0.10470767948895987, + "grad_norm": 4.498032863862511, + "learning_rate": 9.854584857549303e-06, + "loss": 0.5032, + "step": 1508 + }, + { + "epoch": 0.10477711428968199, + "grad_norm": 4.202398976515563, + "learning_rate": 9.85431551402637e-06, + "loss": 0.4368, + "step": 1509 + }, + { + "epoch": 0.10484654909040411, + "grad_norm": 4.952828072459177, + "learning_rate": 9.854045924977648e-06, + "loss": 0.63, + "step": 1510 + }, + { + "epoch": 0.10491598389112623, + "grad_norm": 4.425120166048361, + "learning_rate": 9.85377609041678e-06, + "loss": 0.6482, + "step": 1511 + }, + { + "epoch": 0.10498541869184835, + "grad_norm": 4.529078257756628, + "learning_rate": 9.853506010357408e-06, + "loss": 0.6276, + "step": 1512 + }, + { + "epoch": 0.10505485349257047, + "grad_norm": 4.637433011654157, + "learning_rate": 9.853235684813193e-06, + "loss": 0.6854, + "step": 1513 + }, + { + "epoch": 0.1051242882932926, + "grad_norm": 4.661170672388385, + "learning_rate": 9.85296511379781e-06, + "loss": 0.6506, + "step": 1514 + }, + { + "epoch": 0.10519372309401472, + "grad_norm": 6.071298315668853, + "learning_rate": 9.852694297324943e-06, + "loss": 0.9429, + "step": 1515 + }, + { + "epoch": 0.10526315789473684, + "grad_norm": 3.94590979840419, + "learning_rate": 9.85242323540829e-06, + "loss": 0.4408, + "step": 1516 + }, + { + "epoch": 0.10533259269545897, + "grad_norm": 4.332637500242891, + "learning_rate": 9.85215192806156e-06, + "loss": 0.296, + "step": 1517 + }, + { + "epoch": 0.10540202749618109, + "grad_norm": 4.480137032902055, + "learning_rate": 9.851880375298476e-06, + "loss": 0.4728, + "step": 1518 + }, + { + "epoch": 0.10547146229690321, + "grad_norm": 4.731982617974487, + "learning_rate": 9.851608577132772e-06, + "loss": 0.5655, + "step": 1519 + }, + { + "epoch": 0.10554089709762533, + "grad_norm": 6.520421620226633, + "learning_rate": 9.851336533578197e-06, + "loss": 0.582, + "step": 1520 + }, + { + "epoch": 0.10561033189834745, + "grad_norm": 5.8580211585506525, + "learning_rate": 9.851064244648512e-06, + "loss": 0.6654, + "step": 1521 + }, + { + "epoch": 0.10567976669906957, + "grad_norm": 4.118247584013102, + "learning_rate": 9.850791710357484e-06, + "loss": 0.4173, + "step": 1522 + }, + { + "epoch": 0.1057492014997917, + "grad_norm": 3.846588091553713, + "learning_rate": 9.8505189307189e-06, + "loss": 0.4547, + "step": 1523 + }, + { + "epoch": 0.10581863630051382, + "grad_norm": 3.780827342457809, + "learning_rate": 9.85024590574656e-06, + "loss": 0.3835, + "step": 1524 + }, + { + "epoch": 0.10588807110123594, + "grad_norm": 4.142471460006247, + "learning_rate": 9.849972635454269e-06, + "loss": 0.3718, + "step": 1525 + }, + { + "epoch": 0.10595750590195806, + "grad_norm": 5.30455870557488, + "learning_rate": 9.849699119855849e-06, + "loss": 0.644, + "step": 1526 + }, + { + "epoch": 0.10602694070268018, + "grad_norm": 5.125261613370212, + "learning_rate": 9.849425358965137e-06, + "loss": 0.5728, + "step": 1527 + }, + { + "epoch": 0.1060963755034023, + "grad_norm": 4.036633810686275, + "learning_rate": 9.849151352795978e-06, + "loss": 0.521, + "step": 1528 + }, + { + "epoch": 0.10616581030412443, + "grad_norm": 4.023083043024948, + "learning_rate": 9.848877101362229e-06, + "loss": 0.4578, + "step": 1529 + }, + { + "epoch": 0.10623524510484655, + "grad_norm": 6.097939891037617, + "learning_rate": 9.848602604677764e-06, + "loss": 0.683, + "step": 1530 + }, + { + "epoch": 0.10630467990556867, + "grad_norm": 5.428831392012675, + "learning_rate": 9.848327862756466e-06, + "loss": 0.5905, + "step": 1531 + }, + { + "epoch": 0.1063741147062908, + "grad_norm": 4.038482225978325, + "learning_rate": 9.848052875612232e-06, + "loss": 0.3259, + "step": 1532 + }, + { + "epoch": 0.10644354950701292, + "grad_norm": 4.774959460651911, + "learning_rate": 9.84777764325897e-06, + "loss": 0.5392, + "step": 1533 + }, + { + "epoch": 0.10651298430773504, + "grad_norm": 4.649973700708671, + "learning_rate": 9.847502165710596e-06, + "loss": 0.5627, + "step": 1534 + }, + { + "epoch": 0.10658241910845716, + "grad_norm": 5.712074584432798, + "learning_rate": 9.84722644298105e-06, + "loss": 0.4408, + "step": 1535 + }, + { + "epoch": 0.10665185390917928, + "grad_norm": 6.35996853697112, + "learning_rate": 9.846950475084279e-06, + "loss": 0.9047, + "step": 1536 + }, + { + "epoch": 0.1067212887099014, + "grad_norm": 5.33043706443409, + "learning_rate": 9.846674262034234e-06, + "loss": 0.66, + "step": 1537 + }, + { + "epoch": 0.10679072351062352, + "grad_norm": 4.937533137775621, + "learning_rate": 9.84639780384489e-06, + "loss": 0.4078, + "step": 1538 + }, + { + "epoch": 0.10686015831134564, + "grad_norm": 5.910042035005478, + "learning_rate": 9.846121100530229e-06, + "loss": 0.5399, + "step": 1539 + }, + { + "epoch": 0.10692959311206778, + "grad_norm": 4.818395582616907, + "learning_rate": 9.845844152104248e-06, + "loss": 0.5133, + "step": 1540 + }, + { + "epoch": 0.1069990279127899, + "grad_norm": 5.285148066265314, + "learning_rate": 9.84556695858095e-06, + "loss": 0.8738, + "step": 1541 + }, + { + "epoch": 0.10706846271351202, + "grad_norm": 5.475571325031809, + "learning_rate": 9.845289519974362e-06, + "loss": 0.5095, + "step": 1542 + }, + { + "epoch": 0.10713789751423414, + "grad_norm": 6.184994607743289, + "learning_rate": 9.84501183629851e-06, + "loss": 0.4073, + "step": 1543 + }, + { + "epoch": 0.10720733231495626, + "grad_norm": 3.8931914758363138, + "learning_rate": 9.844733907567444e-06, + "loss": 0.3666, + "step": 1544 + }, + { + "epoch": 0.10727676711567838, + "grad_norm": 4.297116611492556, + "learning_rate": 9.844455733795217e-06, + "loss": 0.4009, + "step": 1545 + }, + { + "epoch": 0.1073462019164005, + "grad_norm": 4.192064205735997, + "learning_rate": 9.844177314995902e-06, + "loss": 0.402, + "step": 1546 + }, + { + "epoch": 0.10741563671712262, + "grad_norm": 3.7897208059550316, + "learning_rate": 9.84389865118358e-06, + "loss": 0.2806, + "step": 1547 + }, + { + "epoch": 0.10748507151784474, + "grad_norm": 4.3203932120933315, + "learning_rate": 9.843619742372345e-06, + "loss": 0.5323, + "step": 1548 + }, + { + "epoch": 0.10755450631856686, + "grad_norm": 3.9939091673572746, + "learning_rate": 9.843340588576306e-06, + "loss": 0.5407, + "step": 1549 + }, + { + "epoch": 0.10762394111928898, + "grad_norm": 5.150457779299713, + "learning_rate": 9.84306118980958e-06, + "loss": 0.7399, + "step": 1550 + }, + { + "epoch": 0.1076933759200111, + "grad_norm": 4.911652375244797, + "learning_rate": 9.842781546086298e-06, + "loss": 0.6613, + "step": 1551 + }, + { + "epoch": 0.10776281072073324, + "grad_norm": 4.881668184883155, + "learning_rate": 9.842501657420606e-06, + "loss": 0.7577, + "step": 1552 + }, + { + "epoch": 0.10783224552145536, + "grad_norm": 5.202801671014035, + "learning_rate": 9.84222152382666e-06, + "loss": 0.5879, + "step": 1553 + }, + { + "epoch": 0.10790168032217748, + "grad_norm": 2.8899270835761808, + "learning_rate": 9.841941145318628e-06, + "loss": 0.2977, + "step": 1554 + }, + { + "epoch": 0.1079711151228996, + "grad_norm": 4.569706152644165, + "learning_rate": 9.841660521910692e-06, + "loss": 0.4627, + "step": 1555 + }, + { + "epoch": 0.10804054992362172, + "grad_norm": 4.687682745828268, + "learning_rate": 9.841379653617048e-06, + "loss": 0.4952, + "step": 1556 + }, + { + "epoch": 0.10810998472434384, + "grad_norm": 4.252852647572385, + "learning_rate": 9.841098540451897e-06, + "loss": 0.4663, + "step": 1557 + }, + { + "epoch": 0.10817941952506596, + "grad_norm": 4.719937487685151, + "learning_rate": 9.840817182429459e-06, + "loss": 0.6428, + "step": 1558 + }, + { + "epoch": 0.10824885432578808, + "grad_norm": 4.459412855425336, + "learning_rate": 9.840535579563969e-06, + "loss": 0.5252, + "step": 1559 + }, + { + "epoch": 0.1083182891265102, + "grad_norm": 5.054094951706585, + "learning_rate": 9.840253731869664e-06, + "loss": 0.597, + "step": 1560 + }, + { + "epoch": 0.10838772392723232, + "grad_norm": 3.1400965121412687, + "learning_rate": 9.839971639360804e-06, + "loss": 0.2184, + "step": 1561 + }, + { + "epoch": 0.10845715872795444, + "grad_norm": 3.969700058327271, + "learning_rate": 9.839689302051654e-06, + "loss": 0.4614, + "step": 1562 + }, + { + "epoch": 0.10852659352867658, + "grad_norm": 4.0392657255352855, + "learning_rate": 9.839406719956496e-06, + "loss": 0.3555, + "step": 1563 + }, + { + "epoch": 0.1085960283293987, + "grad_norm": 3.495674736955305, + "learning_rate": 9.839123893089622e-06, + "loss": 0.2343, + "step": 1564 + }, + { + "epoch": 0.10866546313012082, + "grad_norm": 4.667452749454958, + "learning_rate": 9.838840821465338e-06, + "loss": 0.643, + "step": 1565 + }, + { + "epoch": 0.10873489793084294, + "grad_norm": 4.490900119758754, + "learning_rate": 9.83855750509796e-06, + "loss": 0.5119, + "step": 1566 + }, + { + "epoch": 0.10880433273156506, + "grad_norm": 3.314578752852845, + "learning_rate": 9.83827394400182e-06, + "loss": 0.3871, + "step": 1567 + }, + { + "epoch": 0.10887376753228718, + "grad_norm": 4.26856821223684, + "learning_rate": 9.837990138191258e-06, + "loss": 0.4262, + "step": 1568 + }, + { + "epoch": 0.1089432023330093, + "grad_norm": 3.9969153602034533, + "learning_rate": 9.837706087680628e-06, + "loss": 0.4782, + "step": 1569 + }, + { + "epoch": 0.10901263713373142, + "grad_norm": 3.6387732467998637, + "learning_rate": 9.8374217924843e-06, + "loss": 0.3585, + "step": 1570 + }, + { + "epoch": 0.10908207193445354, + "grad_norm": 3.358258342037373, + "learning_rate": 9.83713725261665e-06, + "loss": 0.2208, + "step": 1571 + }, + { + "epoch": 0.10915150673517567, + "grad_norm": 3.8320195121228413, + "learning_rate": 9.836852468092075e-06, + "loss": 0.4853, + "step": 1572 + }, + { + "epoch": 0.10922094153589779, + "grad_norm": 4.593973320215978, + "learning_rate": 9.836567438924972e-06, + "loss": 0.4529, + "step": 1573 + }, + { + "epoch": 0.10929037633661991, + "grad_norm": 3.217217836563474, + "learning_rate": 9.836282165129763e-06, + "loss": 0.377, + "step": 1574 + }, + { + "epoch": 0.10935981113734204, + "grad_norm": 4.5976554124817905, + "learning_rate": 9.835996646720873e-06, + "loss": 0.5341, + "step": 1575 + }, + { + "epoch": 0.10942924593806416, + "grad_norm": 3.7926232650431015, + "learning_rate": 9.835710883712746e-06, + "loss": 0.4095, + "step": 1576 + }, + { + "epoch": 0.10949868073878628, + "grad_norm": 4.174914908042183, + "learning_rate": 9.835424876119833e-06, + "loss": 0.4987, + "step": 1577 + }, + { + "epoch": 0.1095681155395084, + "grad_norm": 2.8208369748292643, + "learning_rate": 9.835138623956603e-06, + "loss": 0.3247, + "step": 1578 + }, + { + "epoch": 0.10963755034023052, + "grad_norm": 3.5268928991424877, + "learning_rate": 9.834852127237532e-06, + "loss": 0.4655, + "step": 1579 + }, + { + "epoch": 0.10970698514095265, + "grad_norm": 5.3490427375905885, + "learning_rate": 9.83456538597711e-06, + "loss": 0.8395, + "step": 1580 + }, + { + "epoch": 0.10977641994167477, + "grad_norm": 4.528018993315913, + "learning_rate": 9.834278400189845e-06, + "loss": 0.5525, + "step": 1581 + }, + { + "epoch": 0.10984585474239689, + "grad_norm": 4.932849394101731, + "learning_rate": 9.833991169890247e-06, + "loss": 0.5518, + "step": 1582 + }, + { + "epoch": 0.10991528954311901, + "grad_norm": 4.714738264369017, + "learning_rate": 9.833703695092845e-06, + "loss": 0.5196, + "step": 1583 + }, + { + "epoch": 0.10998472434384113, + "grad_norm": 4.408660557303999, + "learning_rate": 9.83341597581218e-06, + "loss": 0.4204, + "step": 1584 + }, + { + "epoch": 0.11005415914456325, + "grad_norm": 3.743320813706743, + "learning_rate": 9.833128012062805e-06, + "loss": 0.5559, + "step": 1585 + }, + { + "epoch": 0.11012359394528538, + "grad_norm": 3.5811830990667435, + "learning_rate": 9.832839803859285e-06, + "loss": 0.4234, + "step": 1586 + }, + { + "epoch": 0.1101930287460075, + "grad_norm": 5.104278667578373, + "learning_rate": 9.832551351216195e-06, + "loss": 0.6434, + "step": 1587 + }, + { + "epoch": 0.11026246354672962, + "grad_norm": 6.72400788502477, + "learning_rate": 9.832262654148126e-06, + "loss": 0.8587, + "step": 1588 + }, + { + "epoch": 0.11033189834745175, + "grad_norm": 4.161496348110367, + "learning_rate": 9.83197371266968e-06, + "loss": 0.5506, + "step": 1589 + }, + { + "epoch": 0.11040133314817387, + "grad_norm": 3.776560693539345, + "learning_rate": 9.831684526795471e-06, + "loss": 0.4692, + "step": 1590 + }, + { + "epoch": 0.11047076794889599, + "grad_norm": 4.498195818207967, + "learning_rate": 9.831395096540128e-06, + "loss": 0.5355, + "step": 1591 + }, + { + "epoch": 0.11054020274961811, + "grad_norm": 3.7454207291080124, + "learning_rate": 9.831105421918287e-06, + "loss": 0.4557, + "step": 1592 + }, + { + "epoch": 0.11060963755034023, + "grad_norm": 4.390260306066933, + "learning_rate": 9.8308155029446e-06, + "loss": 0.4584, + "step": 1593 + }, + { + "epoch": 0.11067907235106235, + "grad_norm": 3.0579293731284625, + "learning_rate": 9.830525339633732e-06, + "loss": 0.2558, + "step": 1594 + }, + { + "epoch": 0.11074850715178447, + "grad_norm": 4.870705655984856, + "learning_rate": 9.830234932000358e-06, + "loss": 0.6957, + "step": 1595 + }, + { + "epoch": 0.11081794195250659, + "grad_norm": 3.823578666763392, + "learning_rate": 9.829944280059166e-06, + "loss": 0.5417, + "step": 1596 + }, + { + "epoch": 0.11088737675322871, + "grad_norm": 3.966919167976425, + "learning_rate": 9.82965338382486e-06, + "loss": 0.3847, + "step": 1597 + }, + { + "epoch": 0.11095681155395085, + "grad_norm": 4.842282801747006, + "learning_rate": 9.829362243312151e-06, + "loss": 0.4381, + "step": 1598 + }, + { + "epoch": 0.11102624635467297, + "grad_norm": 3.58162065625632, + "learning_rate": 9.829070858535761e-06, + "loss": 0.3113, + "step": 1599 + }, + { + "epoch": 0.11109568115539509, + "grad_norm": 4.492226767601615, + "learning_rate": 9.828779229510434e-06, + "loss": 0.5637, + "step": 1600 + }, + { + "epoch": 0.11116511595611721, + "grad_norm": 4.387054802836475, + "learning_rate": 9.82848735625092e-06, + "loss": 0.4992, + "step": 1601 + }, + { + "epoch": 0.11123455075683933, + "grad_norm": 5.118782618547968, + "learning_rate": 9.828195238771976e-06, + "loss": 0.5997, + "step": 1602 + }, + { + "epoch": 0.11130398555756145, + "grad_norm": 5.251109347835807, + "learning_rate": 9.82790287708838e-06, + "loss": 0.683, + "step": 1603 + }, + { + "epoch": 0.11137342035828357, + "grad_norm": 3.5240909002017995, + "learning_rate": 9.827610271214921e-06, + "loss": 0.351, + "step": 1604 + }, + { + "epoch": 0.11144285515900569, + "grad_norm": 4.875392596363631, + "learning_rate": 9.827317421166397e-06, + "loss": 0.502, + "step": 1605 + }, + { + "epoch": 0.11151228995972781, + "grad_norm": 5.8870866083624795, + "learning_rate": 9.827024326957621e-06, + "loss": 0.5, + "step": 1606 + }, + { + "epoch": 0.11158172476044993, + "grad_norm": 4.090982102166375, + "learning_rate": 9.826730988603416e-06, + "loss": 0.5652, + "step": 1607 + }, + { + "epoch": 0.11165115956117205, + "grad_norm": 5.348751817782694, + "learning_rate": 9.82643740611862e-06, + "loss": 0.9469, + "step": 1608 + }, + { + "epoch": 0.11172059436189419, + "grad_norm": 3.050607589608507, + "learning_rate": 9.82614357951808e-06, + "loss": 0.3071, + "step": 1609 + }, + { + "epoch": 0.11179002916261631, + "grad_norm": 5.4928244049893555, + "learning_rate": 9.825849508816661e-06, + "loss": 0.7792, + "step": 1610 + }, + { + "epoch": 0.11185946396333843, + "grad_norm": 4.209022350640694, + "learning_rate": 9.825555194029232e-06, + "loss": 0.5517, + "step": 1611 + }, + { + "epoch": 0.11192889876406055, + "grad_norm": 4.092041768635105, + "learning_rate": 9.825260635170684e-06, + "loss": 0.5325, + "step": 1612 + }, + { + "epoch": 0.11199833356478267, + "grad_norm": 4.9619086965080434, + "learning_rate": 9.824965832255913e-06, + "loss": 0.743, + "step": 1613 + }, + { + "epoch": 0.11206776836550479, + "grad_norm": 4.421598831619101, + "learning_rate": 9.82467078529983e-06, + "loss": 0.3902, + "step": 1614 + }, + { + "epoch": 0.11213720316622691, + "grad_norm": 3.9773891500369074, + "learning_rate": 9.824375494317358e-06, + "loss": 0.4186, + "step": 1615 + }, + { + "epoch": 0.11220663796694903, + "grad_norm": 3.723363296604242, + "learning_rate": 9.824079959323431e-06, + "loss": 0.3506, + "step": 1616 + }, + { + "epoch": 0.11227607276767115, + "grad_norm": 4.030108433675491, + "learning_rate": 9.823784180333e-06, + "loss": 0.4958, + "step": 1617 + }, + { + "epoch": 0.11234550756839327, + "grad_norm": 4.9857469711080515, + "learning_rate": 9.823488157361023e-06, + "loss": 0.5275, + "step": 1618 + }, + { + "epoch": 0.1124149423691154, + "grad_norm": 5.174116321636986, + "learning_rate": 9.823191890422474e-06, + "loss": 0.6698, + "step": 1619 + }, + { + "epoch": 0.11248437716983753, + "grad_norm": 3.2327252267581024, + "learning_rate": 9.822895379532336e-06, + "loss": 0.3652, + "step": 1620 + }, + { + "epoch": 0.11255381197055965, + "grad_norm": 3.132738327208875, + "learning_rate": 9.822598624705609e-06, + "loss": 0.2778, + "step": 1621 + }, + { + "epoch": 0.11262324677128177, + "grad_norm": 3.5805252444323457, + "learning_rate": 9.822301625957299e-06, + "loss": 0.2494, + "step": 1622 + }, + { + "epoch": 0.11269268157200389, + "grad_norm": 2.768350140469748, + "learning_rate": 9.82200438330243e-06, + "loss": 0.2753, + "step": 1623 + }, + { + "epoch": 0.11276211637272601, + "grad_norm": 3.9852206986210277, + "learning_rate": 9.821706896756035e-06, + "loss": 0.4955, + "step": 1624 + }, + { + "epoch": 0.11283155117344813, + "grad_norm": 5.609812176569715, + "learning_rate": 9.821409166333163e-06, + "loss": 0.6614, + "step": 1625 + }, + { + "epoch": 0.11290098597417025, + "grad_norm": 4.3938758193827265, + "learning_rate": 9.82111119204887e-06, + "loss": 0.4627, + "step": 1626 + }, + { + "epoch": 0.11297042077489237, + "grad_norm": 5.5237164783105195, + "learning_rate": 9.820812973918229e-06, + "loss": 0.6492, + "step": 1627 + }, + { + "epoch": 0.1130398555756145, + "grad_norm": 4.5435492063896215, + "learning_rate": 9.820514511956322e-06, + "loss": 0.5198, + "step": 1628 + }, + { + "epoch": 0.11310929037633662, + "grad_norm": 4.994011109844973, + "learning_rate": 9.820215806178247e-06, + "loss": 0.6478, + "step": 1629 + }, + { + "epoch": 0.11317872517705874, + "grad_norm": 2.462600576237636, + "learning_rate": 9.81991685659911e-06, + "loss": 0.3581, + "step": 1630 + }, + { + "epoch": 0.11324815997778086, + "grad_norm": 4.173065566970126, + "learning_rate": 9.819617663234033e-06, + "loss": 0.5546, + "step": 1631 + }, + { + "epoch": 0.11331759477850299, + "grad_norm": 4.713183084165724, + "learning_rate": 9.819318226098147e-06, + "loss": 0.755, + "step": 1632 + }, + { + "epoch": 0.11338702957922511, + "grad_norm": 4.484860173800317, + "learning_rate": 9.8190185452066e-06, + "loss": 0.5616, + "step": 1633 + }, + { + "epoch": 0.11345646437994723, + "grad_norm": 5.0019413446242815, + "learning_rate": 9.818718620574547e-06, + "loss": 0.4998, + "step": 1634 + }, + { + "epoch": 0.11352589918066935, + "grad_norm": 4.243186151910063, + "learning_rate": 9.81841845221716e-06, + "loss": 0.4372, + "step": 1635 + }, + { + "epoch": 0.11359533398139147, + "grad_norm": 5.811550641860544, + "learning_rate": 9.81811804014962e-06, + "loss": 0.799, + "step": 1636 + }, + { + "epoch": 0.1136647687821136, + "grad_norm": 4.018389265290418, + "learning_rate": 9.817817384387123e-06, + "loss": 0.5037, + "step": 1637 + }, + { + "epoch": 0.11373420358283572, + "grad_norm": 4.737743263934409, + "learning_rate": 9.817516484944871e-06, + "loss": 0.7064, + "step": 1638 + }, + { + "epoch": 0.11380363838355784, + "grad_norm": 4.318047725515127, + "learning_rate": 9.817215341838089e-06, + "loss": 0.5675, + "step": 1639 + }, + { + "epoch": 0.11387307318427996, + "grad_norm": 4.18738630195801, + "learning_rate": 9.816913955082006e-06, + "loss": 0.6433, + "step": 1640 + }, + { + "epoch": 0.11394250798500208, + "grad_norm": 4.359658266159575, + "learning_rate": 9.816612324691865e-06, + "loss": 0.4907, + "step": 1641 + }, + { + "epoch": 0.1140119427857242, + "grad_norm": 2.930211588564268, + "learning_rate": 9.816310450682923e-06, + "loss": 0.3517, + "step": 1642 + }, + { + "epoch": 0.11408137758644633, + "grad_norm": 4.153235784652996, + "learning_rate": 9.816008333070449e-06, + "loss": 0.5227, + "step": 1643 + }, + { + "epoch": 0.11415081238716845, + "grad_norm": 5.435588917114712, + "learning_rate": 9.815705971869722e-06, + "loss": 0.6447, + "step": 1644 + }, + { + "epoch": 0.11422024718789058, + "grad_norm": 5.193230487598492, + "learning_rate": 9.815403367096038e-06, + "loss": 0.7624, + "step": 1645 + }, + { + "epoch": 0.1142896819886127, + "grad_norm": 3.510205620724451, + "learning_rate": 9.8151005187647e-06, + "loss": 0.4098, + "step": 1646 + }, + { + "epoch": 0.11435911678933482, + "grad_norm": 5.170449628229908, + "learning_rate": 9.814797426891026e-06, + "loss": 0.558, + "step": 1647 + }, + { + "epoch": 0.11442855159005694, + "grad_norm": 4.271438949125593, + "learning_rate": 9.814494091490347e-06, + "loss": 0.4687, + "step": 1648 + }, + { + "epoch": 0.11449798639077906, + "grad_norm": 4.5198704923220285, + "learning_rate": 9.814190512578003e-06, + "loss": 0.5943, + "step": 1649 + }, + { + "epoch": 0.11456742119150118, + "grad_norm": 4.780131795922946, + "learning_rate": 9.813886690169353e-06, + "loss": 0.6866, + "step": 1650 + }, + { + "epoch": 0.1146368559922233, + "grad_norm": 4.082262998535368, + "learning_rate": 9.81358262427976e-06, + "loss": 0.5692, + "step": 1651 + }, + { + "epoch": 0.11470629079294542, + "grad_norm": 4.184962174339737, + "learning_rate": 9.813278314924606e-06, + "loss": 0.4397, + "step": 1652 + }, + { + "epoch": 0.11477572559366754, + "grad_norm": 3.863851143741417, + "learning_rate": 9.812973762119282e-06, + "loss": 0.2746, + "step": 1653 + }, + { + "epoch": 0.11484516039438966, + "grad_norm": 4.636606359121546, + "learning_rate": 9.812668965879189e-06, + "loss": 0.5567, + "step": 1654 + }, + { + "epoch": 0.1149145951951118, + "grad_norm": 3.9603695271694153, + "learning_rate": 9.812363926219747e-06, + "loss": 0.5271, + "step": 1655 + }, + { + "epoch": 0.11498402999583392, + "grad_norm": 4.384958577879751, + "learning_rate": 9.812058643156383e-06, + "loss": 0.5356, + "step": 1656 + }, + { + "epoch": 0.11505346479655604, + "grad_norm": 4.941713028545739, + "learning_rate": 9.811753116704538e-06, + "loss": 0.677, + "step": 1657 + }, + { + "epoch": 0.11512289959727816, + "grad_norm": 3.7051703814617416, + "learning_rate": 9.811447346879666e-06, + "loss": 0.3111, + "step": 1658 + }, + { + "epoch": 0.11519233439800028, + "grad_norm": 4.507083872550089, + "learning_rate": 9.811141333697229e-06, + "loss": 0.3968, + "step": 1659 + }, + { + "epoch": 0.1152617691987224, + "grad_norm": 3.8764165037350873, + "learning_rate": 9.81083507717271e-06, + "loss": 0.5018, + "step": 1660 + }, + { + "epoch": 0.11533120399944452, + "grad_norm": 4.690159655133787, + "learning_rate": 9.810528577321596e-06, + "loss": 0.5007, + "step": 1661 + }, + { + "epoch": 0.11540063880016664, + "grad_norm": 4.75494499096628, + "learning_rate": 9.81022183415939e-06, + "loss": 0.4398, + "step": 1662 + }, + { + "epoch": 0.11547007360088876, + "grad_norm": 4.152409365538443, + "learning_rate": 9.809914847701608e-06, + "loss": 0.6402, + "step": 1663 + }, + { + "epoch": 0.11553950840161088, + "grad_norm": 4.836386581933431, + "learning_rate": 9.809607617963774e-06, + "loss": 0.6558, + "step": 1664 + }, + { + "epoch": 0.115608943202333, + "grad_norm": 4.879353539847682, + "learning_rate": 9.809300144961428e-06, + "loss": 0.5634, + "step": 1665 + }, + { + "epoch": 0.11567837800305514, + "grad_norm": 3.8245068287320474, + "learning_rate": 9.808992428710128e-06, + "loss": 0.3772, + "step": 1666 + }, + { + "epoch": 0.11574781280377726, + "grad_norm": 4.933973318157725, + "learning_rate": 9.808684469225427e-06, + "loss": 0.5334, + "step": 1667 + }, + { + "epoch": 0.11581724760449938, + "grad_norm": 3.582216719386233, + "learning_rate": 9.80837626652291e-06, + "loss": 0.4699, + "step": 1668 + }, + { + "epoch": 0.1158866824052215, + "grad_norm": 5.042437378079707, + "learning_rate": 9.808067820618162e-06, + "loss": 0.8377, + "step": 1669 + }, + { + "epoch": 0.11595611720594362, + "grad_norm": 3.096714235307455, + "learning_rate": 9.807759131526784e-06, + "loss": 0.3174, + "step": 1670 + }, + { + "epoch": 0.11602555200666574, + "grad_norm": 4.083449398549343, + "learning_rate": 9.807450199264388e-06, + "loss": 0.4748, + "step": 1671 + }, + { + "epoch": 0.11609498680738786, + "grad_norm": 5.066613552249841, + "learning_rate": 9.807141023846602e-06, + "loss": 0.5623, + "step": 1672 + }, + { + "epoch": 0.11616442160810998, + "grad_norm": 2.829723536089631, + "learning_rate": 9.806831605289064e-06, + "loss": 0.298, + "step": 1673 + }, + { + "epoch": 0.1162338564088321, + "grad_norm": 3.7123490835591877, + "learning_rate": 9.806521943607421e-06, + "loss": 0.4025, + "step": 1674 + }, + { + "epoch": 0.11630329120955422, + "grad_norm": 5.29058880790818, + "learning_rate": 9.806212038817337e-06, + "loss": 0.9478, + "step": 1675 + }, + { + "epoch": 0.11637272601027634, + "grad_norm": 4.362487781678488, + "learning_rate": 9.805901890934489e-06, + "loss": 0.4006, + "step": 1676 + }, + { + "epoch": 0.11644216081099847, + "grad_norm": 4.251015453973613, + "learning_rate": 9.80559149997456e-06, + "loss": 0.5686, + "step": 1677 + }, + { + "epoch": 0.1165115956117206, + "grad_norm": 4.249796639268927, + "learning_rate": 9.80528086595325e-06, + "loss": 0.6019, + "step": 1678 + }, + { + "epoch": 0.11658103041244272, + "grad_norm": 5.358643557038087, + "learning_rate": 9.804969988886272e-06, + "loss": 0.5723, + "step": 1679 + }, + { + "epoch": 0.11665046521316484, + "grad_norm": 3.3736961957766143, + "learning_rate": 9.804658868789349e-06, + "loss": 0.3636, + "step": 1680 + }, + { + "epoch": 0.11671990001388696, + "grad_norm": 4.01816723815723, + "learning_rate": 9.80434750567822e-06, + "loss": 0.4961, + "step": 1681 + }, + { + "epoch": 0.11678933481460908, + "grad_norm": 4.302250390946548, + "learning_rate": 9.804035899568626e-06, + "loss": 0.4491, + "step": 1682 + }, + { + "epoch": 0.1168587696153312, + "grad_norm": 4.781545779769371, + "learning_rate": 9.803724050476335e-06, + "loss": 0.8137, + "step": 1683 + }, + { + "epoch": 0.11692820441605332, + "grad_norm": 4.094319295725907, + "learning_rate": 9.803411958417118e-06, + "loss": 0.5435, + "step": 1684 + }, + { + "epoch": 0.11699763921677545, + "grad_norm": 4.313402821208586, + "learning_rate": 9.803099623406757e-06, + "loss": 0.5524, + "step": 1685 + }, + { + "epoch": 0.11706707401749757, + "grad_norm": 5.282461204485641, + "learning_rate": 9.802787045461056e-06, + "loss": 0.645, + "step": 1686 + }, + { + "epoch": 0.11713650881821969, + "grad_norm": 3.672488360157587, + "learning_rate": 9.802474224595818e-06, + "loss": 0.3966, + "step": 1687 + }, + { + "epoch": 0.11720594361894181, + "grad_norm": 5.0527815413364845, + "learning_rate": 9.802161160826868e-06, + "loss": 0.8493, + "step": 1688 + }, + { + "epoch": 0.11727537841966394, + "grad_norm": 4.0520212681991365, + "learning_rate": 9.801847854170042e-06, + "loss": 0.3701, + "step": 1689 + }, + { + "epoch": 0.11734481322038606, + "grad_norm": 4.116258360688445, + "learning_rate": 9.801534304641184e-06, + "loss": 0.5303, + "step": 1690 + }, + { + "epoch": 0.11741424802110818, + "grad_norm": 3.4463356608170654, + "learning_rate": 9.801220512256155e-06, + "loss": 0.3537, + "step": 1691 + }, + { + "epoch": 0.1174836828218303, + "grad_norm": 4.944817681385152, + "learning_rate": 9.800906477030825e-06, + "loss": 0.6223, + "step": 1692 + }, + { + "epoch": 0.11755311762255242, + "grad_norm": 5.04652776842049, + "learning_rate": 9.800592198981077e-06, + "loss": 0.2809, + "step": 1693 + }, + { + "epoch": 0.11762255242327455, + "grad_norm": 3.208818525830077, + "learning_rate": 9.80027767812281e-06, + "loss": 0.4328, + "step": 1694 + }, + { + "epoch": 0.11769198722399667, + "grad_norm": 5.067224331419792, + "learning_rate": 9.799962914471927e-06, + "loss": 0.8128, + "step": 1695 + }, + { + "epoch": 0.11776142202471879, + "grad_norm": 3.3737912896805367, + "learning_rate": 9.799647908044355e-06, + "loss": 0.3542, + "step": 1696 + }, + { + "epoch": 0.11783085682544091, + "grad_norm": 4.270624352759668, + "learning_rate": 9.79933265885602e-06, + "loss": 0.4514, + "step": 1697 + }, + { + "epoch": 0.11790029162616303, + "grad_norm": 5.09791387047133, + "learning_rate": 9.79901716692287e-06, + "loss": 0.7495, + "step": 1698 + }, + { + "epoch": 0.11796972642688515, + "grad_norm": 4.066840800561533, + "learning_rate": 9.798701432260865e-06, + "loss": 0.3086, + "step": 1699 + }, + { + "epoch": 0.11803916122760727, + "grad_norm": 3.8076512506229694, + "learning_rate": 9.798385454885968e-06, + "loss": 0.468, + "step": 1700 + }, + { + "epoch": 0.1181085960283294, + "grad_norm": 3.7979216614517215, + "learning_rate": 9.798069234814167e-06, + "loss": 0.3929, + "step": 1701 + }, + { + "epoch": 0.11817803082905153, + "grad_norm": 2.574675289203647, + "learning_rate": 9.797752772061453e-06, + "loss": 0.2818, + "step": 1702 + }, + { + "epoch": 0.11824746562977365, + "grad_norm": 3.7641269228487366, + "learning_rate": 9.797436066643831e-06, + "loss": 0.379, + "step": 1703 + }, + { + "epoch": 0.11831690043049577, + "grad_norm": 5.46268523545497, + "learning_rate": 9.797119118577323e-06, + "loss": 0.8578, + "step": 1704 + }, + { + "epoch": 0.11838633523121789, + "grad_norm": 4.641459046859287, + "learning_rate": 9.79680192787796e-06, + "loss": 0.7158, + "step": 1705 + }, + { + "epoch": 0.11845577003194001, + "grad_norm": 4.289730359366163, + "learning_rate": 9.79648449456178e-06, + "loss": 0.6539, + "step": 1706 + }, + { + "epoch": 0.11852520483266213, + "grad_norm": 5.343407314120756, + "learning_rate": 9.796166818644843e-06, + "loss": 0.7804, + "step": 1707 + }, + { + "epoch": 0.11859463963338425, + "grad_norm": 4.778188601029679, + "learning_rate": 9.795848900143217e-06, + "loss": 0.4941, + "step": 1708 + }, + { + "epoch": 0.11866407443410637, + "grad_norm": 4.340651735285887, + "learning_rate": 9.795530739072979e-06, + "loss": 0.5169, + "step": 1709 + }, + { + "epoch": 0.11873350923482849, + "grad_norm": 4.799767648773151, + "learning_rate": 9.795212335450224e-06, + "loss": 0.598, + "step": 1710 + }, + { + "epoch": 0.11880294403555061, + "grad_norm": 4.447824518809235, + "learning_rate": 9.794893689291054e-06, + "loss": 0.4449, + "step": 1711 + }, + { + "epoch": 0.11887237883627275, + "grad_norm": 4.846488817884635, + "learning_rate": 9.794574800611588e-06, + "loss": 0.4941, + "step": 1712 + }, + { + "epoch": 0.11894181363699487, + "grad_norm": 3.5224235747276644, + "learning_rate": 9.794255669427952e-06, + "loss": 0.3266, + "step": 1713 + }, + { + "epoch": 0.11901124843771699, + "grad_norm": 4.827867599440925, + "learning_rate": 9.793936295756292e-06, + "loss": 0.5808, + "step": 1714 + }, + { + "epoch": 0.11908068323843911, + "grad_norm": 5.040162625644233, + "learning_rate": 9.793616679612756e-06, + "loss": 0.4275, + "step": 1715 + }, + { + "epoch": 0.11915011803916123, + "grad_norm": 4.876503092833679, + "learning_rate": 9.793296821013515e-06, + "loss": 0.8744, + "step": 1716 + }, + { + "epoch": 0.11921955283988335, + "grad_norm": 5.165187735288729, + "learning_rate": 9.792976719974744e-06, + "loss": 0.7894, + "step": 1717 + }, + { + "epoch": 0.11928898764060547, + "grad_norm": 3.5660370719822523, + "learning_rate": 9.792656376512635e-06, + "loss": 0.4372, + "step": 1718 + }, + { + "epoch": 0.11935842244132759, + "grad_norm": 4.716438597867215, + "learning_rate": 9.792335790643387e-06, + "loss": 0.4876, + "step": 1719 + }, + { + "epoch": 0.11942785724204971, + "grad_norm": 3.6117060114192534, + "learning_rate": 9.792014962383221e-06, + "loss": 0.4416, + "step": 1720 + }, + { + "epoch": 0.11949729204277183, + "grad_norm": 4.308363500310813, + "learning_rate": 9.79169389174836e-06, + "loss": 0.5443, + "step": 1721 + }, + { + "epoch": 0.11956672684349395, + "grad_norm": 4.461284097438357, + "learning_rate": 9.791372578755044e-06, + "loss": 0.3977, + "step": 1722 + }, + { + "epoch": 0.11963616164421609, + "grad_norm": 3.8188951864622127, + "learning_rate": 9.791051023419522e-06, + "loss": 0.4713, + "step": 1723 + }, + { + "epoch": 0.11970559644493821, + "grad_norm": 4.870742333437552, + "learning_rate": 9.790729225758064e-06, + "loss": 0.535, + "step": 1724 + }, + { + "epoch": 0.11977503124566033, + "grad_norm": 3.6518919420761073, + "learning_rate": 9.79040718578694e-06, + "loss": 0.2369, + "step": 1725 + }, + { + "epoch": 0.11984446604638245, + "grad_norm": 4.302248853074158, + "learning_rate": 9.790084903522444e-06, + "loss": 0.4053, + "step": 1726 + }, + { + "epoch": 0.11991390084710457, + "grad_norm": 3.6731511625912674, + "learning_rate": 9.789762378980874e-06, + "loss": 0.4124, + "step": 1727 + }, + { + "epoch": 0.11998333564782669, + "grad_norm": 4.702931511904636, + "learning_rate": 9.789439612178541e-06, + "loss": 0.5971, + "step": 1728 + }, + { + "epoch": 0.12005277044854881, + "grad_norm": 4.457762639450602, + "learning_rate": 9.789116603131773e-06, + "loss": 0.3287, + "step": 1729 + }, + { + "epoch": 0.12012220524927093, + "grad_norm": 3.875600980576659, + "learning_rate": 9.788793351856906e-06, + "loss": 0.3351, + "step": 1730 + }, + { + "epoch": 0.12019164004999305, + "grad_norm": 5.06322354064501, + "learning_rate": 9.788469858370292e-06, + "loss": 0.5239, + "step": 1731 + }, + { + "epoch": 0.12026107485071517, + "grad_norm": 5.732136164714776, + "learning_rate": 9.788146122688289e-06, + "loss": 0.5588, + "step": 1732 + }, + { + "epoch": 0.1203305096514373, + "grad_norm": 3.7812660863601515, + "learning_rate": 9.787822144827272e-06, + "loss": 0.4961, + "step": 1733 + }, + { + "epoch": 0.12039994445215942, + "grad_norm": 4.509362506992422, + "learning_rate": 9.78749792480363e-06, + "loss": 0.5193, + "step": 1734 + }, + { + "epoch": 0.12046937925288155, + "grad_norm": 4.7182803821630275, + "learning_rate": 9.78717346263376e-06, + "loss": 0.5364, + "step": 1735 + }, + { + "epoch": 0.12053881405360367, + "grad_norm": 5.197037297552331, + "learning_rate": 9.786848758334075e-06, + "loss": 0.8427, + "step": 1736 + }, + { + "epoch": 0.12060824885432579, + "grad_norm": 5.3935377930770265, + "learning_rate": 9.786523811920995e-06, + "loss": 0.6223, + "step": 1737 + }, + { + "epoch": 0.12067768365504791, + "grad_norm": 4.019467386284341, + "learning_rate": 9.786198623410958e-06, + "loss": 0.6432, + "step": 1738 + }, + { + "epoch": 0.12074711845577003, + "grad_norm": 3.8245309085058263, + "learning_rate": 9.785873192820411e-06, + "loss": 0.3583, + "step": 1739 + }, + { + "epoch": 0.12081655325649215, + "grad_norm": 4.697094246983288, + "learning_rate": 9.785547520165813e-06, + "loss": 0.5494, + "step": 1740 + }, + { + "epoch": 0.12088598805721427, + "grad_norm": 5.71910329169793, + "learning_rate": 9.785221605463636e-06, + "loss": 0.714, + "step": 1741 + }, + { + "epoch": 0.1209554228579364, + "grad_norm": 4.623625250642741, + "learning_rate": 9.784895448730366e-06, + "loss": 0.6718, + "step": 1742 + }, + { + "epoch": 0.12102485765865852, + "grad_norm": 3.6374407594337455, + "learning_rate": 9.784569049982497e-06, + "loss": 0.3915, + "step": 1743 + }, + { + "epoch": 0.12109429245938064, + "grad_norm": 5.221373600544769, + "learning_rate": 9.78424240923654e-06, + "loss": 0.5445, + "step": 1744 + }, + { + "epoch": 0.12116372726010276, + "grad_norm": 5.024479846322383, + "learning_rate": 9.783915526509016e-06, + "loss": 0.5985, + "step": 1745 + }, + { + "epoch": 0.12123316206082489, + "grad_norm": 4.64495320212532, + "learning_rate": 9.78358840181646e-06, + "loss": 0.4452, + "step": 1746 + }, + { + "epoch": 0.12130259686154701, + "grad_norm": 5.8191971805827185, + "learning_rate": 9.783261035175413e-06, + "loss": 0.685, + "step": 1747 + }, + { + "epoch": 0.12137203166226913, + "grad_norm": 2.8720120588204, + "learning_rate": 9.782933426602436e-06, + "loss": 0.2025, + "step": 1748 + }, + { + "epoch": 0.12144146646299125, + "grad_norm": 3.265447671056633, + "learning_rate": 9.7826055761141e-06, + "loss": 0.3473, + "step": 1749 + }, + { + "epoch": 0.12151090126371338, + "grad_norm": 5.093259054752176, + "learning_rate": 9.782277483726984e-06, + "loss": 0.8673, + "step": 1750 + }, + { + "epoch": 0.1215803360644355, + "grad_norm": 5.420568448158613, + "learning_rate": 9.781949149457686e-06, + "loss": 0.6675, + "step": 1751 + }, + { + "epoch": 0.12164977086515762, + "grad_norm": 3.3841345040308566, + "learning_rate": 9.78162057332281e-06, + "loss": 0.4042, + "step": 1752 + }, + { + "epoch": 0.12171920566587974, + "grad_norm": 3.0575130032376565, + "learning_rate": 9.781291755338975e-06, + "loss": 0.2801, + "step": 1753 + }, + { + "epoch": 0.12178864046660186, + "grad_norm": 3.8175123389163264, + "learning_rate": 9.780962695522815e-06, + "loss": 0.3632, + "step": 1754 + }, + { + "epoch": 0.12185807526732398, + "grad_norm": 5.414279726866016, + "learning_rate": 9.780633393890972e-06, + "loss": 0.6495, + "step": 1755 + }, + { + "epoch": 0.1219275100680461, + "grad_norm": 4.355123148881332, + "learning_rate": 9.780303850460102e-06, + "loss": 0.447, + "step": 1756 + }, + { + "epoch": 0.12199694486876822, + "grad_norm": 4.569654746824713, + "learning_rate": 9.779974065246872e-06, + "loss": 0.3025, + "step": 1757 + }, + { + "epoch": 0.12206637966949035, + "grad_norm": 4.862548918327962, + "learning_rate": 9.779644038267962e-06, + "loss": 0.6075, + "step": 1758 + }, + { + "epoch": 0.12213581447021248, + "grad_norm": 2.546465191252702, + "learning_rate": 9.779313769540066e-06, + "loss": 0.3151, + "step": 1759 + }, + { + "epoch": 0.1222052492709346, + "grad_norm": 3.9465978019141494, + "learning_rate": 9.778983259079886e-06, + "loss": 0.461, + "step": 1760 + }, + { + "epoch": 0.12227468407165672, + "grad_norm": 4.489098934043291, + "learning_rate": 9.778652506904142e-06, + "loss": 0.8292, + "step": 1761 + }, + { + "epoch": 0.12234411887237884, + "grad_norm": 4.0408485298165875, + "learning_rate": 9.778321513029564e-06, + "loss": 0.4274, + "step": 1762 + }, + { + "epoch": 0.12241355367310096, + "grad_norm": 4.273000573877079, + "learning_rate": 9.777990277472887e-06, + "loss": 0.3045, + "step": 1763 + }, + { + "epoch": 0.12248298847382308, + "grad_norm": 2.751467069584683, + "learning_rate": 9.777658800250868e-06, + "loss": 0.3264, + "step": 1764 + }, + { + "epoch": 0.1225524232745452, + "grad_norm": 4.155573166386027, + "learning_rate": 9.777327081380277e-06, + "loss": 0.4696, + "step": 1765 + }, + { + "epoch": 0.12262185807526732, + "grad_norm": 4.263280283126045, + "learning_rate": 9.776995120877886e-06, + "loss": 0.6178, + "step": 1766 + }, + { + "epoch": 0.12269129287598944, + "grad_norm": 4.067418782200545, + "learning_rate": 9.776662918760487e-06, + "loss": 0.4424, + "step": 1767 + }, + { + "epoch": 0.12276072767671156, + "grad_norm": 5.116189955115654, + "learning_rate": 9.776330475044883e-06, + "loss": 0.8439, + "step": 1768 + }, + { + "epoch": 0.1228301624774337, + "grad_norm": 4.352649030954681, + "learning_rate": 9.775997789747888e-06, + "loss": 0.6252, + "step": 1769 + }, + { + "epoch": 0.12289959727815582, + "grad_norm": 4.826425145081024, + "learning_rate": 9.775664862886331e-06, + "loss": 0.6233, + "step": 1770 + }, + { + "epoch": 0.12296903207887794, + "grad_norm": 4.184276031581763, + "learning_rate": 9.775331694477047e-06, + "loss": 0.566, + "step": 1771 + }, + { + "epoch": 0.12303846687960006, + "grad_norm": 4.0849420870839905, + "learning_rate": 9.774998284536892e-06, + "loss": 0.4232, + "step": 1772 + }, + { + "epoch": 0.12310790168032218, + "grad_norm": 3.7174346496372945, + "learning_rate": 9.774664633082724e-06, + "loss": 0.4009, + "step": 1773 + }, + { + "epoch": 0.1231773364810443, + "grad_norm": 4.186400117553004, + "learning_rate": 9.774330740131424e-06, + "loss": 0.4533, + "step": 1774 + }, + { + "epoch": 0.12324677128176642, + "grad_norm": 4.057531497930357, + "learning_rate": 9.773996605699876e-06, + "loss": 0.4657, + "step": 1775 + }, + { + "epoch": 0.12331620608248854, + "grad_norm": 4.868401522510038, + "learning_rate": 9.773662229804984e-06, + "loss": 0.5264, + "step": 1776 + }, + { + "epoch": 0.12338564088321066, + "grad_norm": 4.606773982600506, + "learning_rate": 9.773327612463656e-06, + "loss": 0.5314, + "step": 1777 + }, + { + "epoch": 0.12345507568393278, + "grad_norm": 3.7509095111252355, + "learning_rate": 9.772992753692821e-06, + "loss": 0.3741, + "step": 1778 + }, + { + "epoch": 0.1235245104846549, + "grad_norm": 4.4145984250899675, + "learning_rate": 9.772657653509412e-06, + "loss": 0.7466, + "step": 1779 + }, + { + "epoch": 0.12359394528537702, + "grad_norm": 4.283551128463392, + "learning_rate": 9.77232231193038e-06, + "loss": 0.4786, + "step": 1780 + }, + { + "epoch": 0.12366338008609916, + "grad_norm": 4.5007352726891225, + "learning_rate": 9.771986728972684e-06, + "loss": 0.3799, + "step": 1781 + }, + { + "epoch": 0.12373281488682128, + "grad_norm": 5.258005770112338, + "learning_rate": 9.771650904653302e-06, + "loss": 0.7578, + "step": 1782 + }, + { + "epoch": 0.1238022496875434, + "grad_norm": 3.1455804420815454, + "learning_rate": 9.771314838989215e-06, + "loss": 0.3213, + "step": 1783 + }, + { + "epoch": 0.12387168448826552, + "grad_norm": 4.204647422245746, + "learning_rate": 9.770978531997423e-06, + "loss": 0.5212, + "step": 1784 + }, + { + "epoch": 0.12394111928898764, + "grad_norm": 5.062125799064982, + "learning_rate": 9.770641983694935e-06, + "loss": 0.8633, + "step": 1785 + }, + { + "epoch": 0.12401055408970976, + "grad_norm": 5.350570800154277, + "learning_rate": 9.770305194098774e-06, + "loss": 0.5724, + "step": 1786 + }, + { + "epoch": 0.12407998889043188, + "grad_norm": 5.038263677308377, + "learning_rate": 9.769968163225975e-06, + "loss": 0.7449, + "step": 1787 + }, + { + "epoch": 0.124149423691154, + "grad_norm": 3.522527657097216, + "learning_rate": 9.769630891093583e-06, + "loss": 0.4395, + "step": 1788 + }, + { + "epoch": 0.12421885849187612, + "grad_norm": 4.836971375159003, + "learning_rate": 9.769293377718658e-06, + "loss": 0.8029, + "step": 1789 + }, + { + "epoch": 0.12428829329259825, + "grad_norm": 2.6164357002141467, + "learning_rate": 9.768955623118273e-06, + "loss": 0.3406, + "step": 1790 + }, + { + "epoch": 0.12435772809332037, + "grad_norm": 4.253442700254873, + "learning_rate": 9.768617627309505e-06, + "loss": 0.672, + "step": 1791 + }, + { + "epoch": 0.1244271628940425, + "grad_norm": 3.9975407517883763, + "learning_rate": 9.768279390309457e-06, + "loss": 0.4902, + "step": 1792 + }, + { + "epoch": 0.12449659769476462, + "grad_norm": 3.7615750603518534, + "learning_rate": 9.76794091213523e-06, + "loss": 0.4263, + "step": 1793 + }, + { + "epoch": 0.12456603249548674, + "grad_norm": 3.4481392328629727, + "learning_rate": 9.767602192803948e-06, + "loss": 0.3783, + "step": 1794 + }, + { + "epoch": 0.12463546729620886, + "grad_norm": 3.6165197489946315, + "learning_rate": 9.767263232332744e-06, + "loss": 0.3354, + "step": 1795 + }, + { + "epoch": 0.12470490209693098, + "grad_norm": 5.258884734657244, + "learning_rate": 9.766924030738758e-06, + "loss": 0.7447, + "step": 1796 + }, + { + "epoch": 0.1247743368976531, + "grad_norm": 7.004388794378471, + "learning_rate": 9.76658458803915e-06, + "loss": 0.6935, + "step": 1797 + }, + { + "epoch": 0.12484377169837522, + "grad_norm": 4.324621150544358, + "learning_rate": 9.766244904251087e-06, + "loss": 0.5644, + "step": 1798 + }, + { + "epoch": 0.12491320649909735, + "grad_norm": 4.43271847031387, + "learning_rate": 9.765904979391749e-06, + "loss": 0.4704, + "step": 1799 + }, + { + "epoch": 0.12498264129981947, + "grad_norm": 4.556371616407555, + "learning_rate": 9.765564813478332e-06, + "loss": 0.4905, + "step": 1800 + }, + { + "epoch": 0.1250520761005416, + "grad_norm": 3.172510936296308, + "learning_rate": 9.765224406528037e-06, + "loss": 0.2841, + "step": 1801 + }, + { + "epoch": 0.12512151090126372, + "grad_norm": 4.892158968318668, + "learning_rate": 9.764883758558086e-06, + "loss": 0.4777, + "step": 1802 + }, + { + "epoch": 0.12519094570198583, + "grad_norm": 3.5949272436689097, + "learning_rate": 9.764542869585705e-06, + "loss": 0.3131, + "step": 1803 + }, + { + "epoch": 0.12526038050270796, + "grad_norm": 4.962337124079528, + "learning_rate": 9.764201739628136e-06, + "loss": 0.5364, + "step": 1804 + }, + { + "epoch": 0.12532981530343007, + "grad_norm": 5.73503737180503, + "learning_rate": 9.763860368702638e-06, + "loss": 0.6831, + "step": 1805 + }, + { + "epoch": 0.1253992501041522, + "grad_norm": 2.978771160888224, + "learning_rate": 9.76351875682647e-06, + "loss": 0.3032, + "step": 1806 + }, + { + "epoch": 0.1254686849048743, + "grad_norm": 3.9247400718152354, + "learning_rate": 9.763176904016914e-06, + "loss": 0.5178, + "step": 1807 + }, + { + "epoch": 0.12553811970559645, + "grad_norm": 4.341900589246379, + "learning_rate": 9.762834810291259e-06, + "loss": 0.3188, + "step": 1808 + }, + { + "epoch": 0.12560755450631858, + "grad_norm": 5.784727291764583, + "learning_rate": 9.762492475666811e-06, + "loss": 0.846, + "step": 1809 + }, + { + "epoch": 0.1256769893070407, + "grad_norm": 3.310205368605784, + "learning_rate": 9.762149900160881e-06, + "loss": 0.3303, + "step": 1810 + }, + { + "epoch": 0.12574642410776282, + "grad_norm": 4.342776512321875, + "learning_rate": 9.761807083790799e-06, + "loss": 0.4553, + "step": 1811 + }, + { + "epoch": 0.12581585890848493, + "grad_norm": 5.163083634892525, + "learning_rate": 9.761464026573903e-06, + "loss": 0.742, + "step": 1812 + }, + { + "epoch": 0.12588529370920706, + "grad_norm": 3.7469870277442197, + "learning_rate": 9.761120728527544e-06, + "loss": 0.4785, + "step": 1813 + }, + { + "epoch": 0.12595472850992917, + "grad_norm": 5.094743515886799, + "learning_rate": 9.760777189669087e-06, + "loss": 0.6243, + "step": 1814 + }, + { + "epoch": 0.1260241633106513, + "grad_norm": 5.3348630084182895, + "learning_rate": 9.760433410015906e-06, + "loss": 0.757, + "step": 1815 + }, + { + "epoch": 0.1260935981113734, + "grad_norm": 4.218047348561074, + "learning_rate": 9.76008938958539e-06, + "loss": 0.4869, + "step": 1816 + }, + { + "epoch": 0.12616303291209555, + "grad_norm": 3.434367644257062, + "learning_rate": 9.75974512839494e-06, + "loss": 0.3012, + "step": 1817 + }, + { + "epoch": 0.12623246771281765, + "grad_norm": 4.290369411886893, + "learning_rate": 9.759400626461966e-06, + "loss": 0.5693, + "step": 1818 + }, + { + "epoch": 0.1263019025135398, + "grad_norm": 4.177294307507477, + "learning_rate": 9.759055883803896e-06, + "loss": 0.5852, + "step": 1819 + }, + { + "epoch": 0.1263713373142619, + "grad_norm": 4.12931338919358, + "learning_rate": 9.758710900438162e-06, + "loss": 0.4204, + "step": 1820 + }, + { + "epoch": 0.12644077211498403, + "grad_norm": 5.25275519473393, + "learning_rate": 9.758365676382218e-06, + "loss": 0.5688, + "step": 1821 + }, + { + "epoch": 0.12651020691570616, + "grad_norm": 4.761388335309499, + "learning_rate": 9.75802021165352e-06, + "loss": 0.6799, + "step": 1822 + }, + { + "epoch": 0.12657964171642827, + "grad_norm": 4.01693446713266, + "learning_rate": 9.757674506269545e-06, + "loss": 0.3772, + "step": 1823 + }, + { + "epoch": 0.1266490765171504, + "grad_norm": 4.754351366456632, + "learning_rate": 9.757328560247777e-06, + "loss": 0.5844, + "step": 1824 + }, + { + "epoch": 0.1267185113178725, + "grad_norm": 4.2931784997254505, + "learning_rate": 9.756982373605715e-06, + "loss": 0.6067, + "step": 1825 + }, + { + "epoch": 0.12678794611859465, + "grad_norm": 3.762348714479913, + "learning_rate": 9.756635946360867e-06, + "loss": 0.5811, + "step": 1826 + }, + { + "epoch": 0.12685738091931675, + "grad_norm": 4.360583412209665, + "learning_rate": 9.756289278530754e-06, + "loss": 0.5136, + "step": 1827 + }, + { + "epoch": 0.1269268157200389, + "grad_norm": 5.868709425117602, + "learning_rate": 9.75594237013291e-06, + "loss": 0.7776, + "step": 1828 + }, + { + "epoch": 0.126996250520761, + "grad_norm": 3.936409560835182, + "learning_rate": 9.755595221184887e-06, + "loss": 0.4506, + "step": 1829 + }, + { + "epoch": 0.12706568532148313, + "grad_norm": 4.707009052812682, + "learning_rate": 9.755247831704236e-06, + "loss": 0.7754, + "step": 1830 + }, + { + "epoch": 0.12713512012220524, + "grad_norm": 3.8013013531831272, + "learning_rate": 9.754900201708532e-06, + "loss": 0.3204, + "step": 1831 + }, + { + "epoch": 0.12720455492292737, + "grad_norm": 3.617958678969524, + "learning_rate": 9.754552331215354e-06, + "loss": 0.4046, + "step": 1832 + }, + { + "epoch": 0.1272739897236495, + "grad_norm": 4.630773013586941, + "learning_rate": 9.754204220242303e-06, + "loss": 0.5782, + "step": 1833 + }, + { + "epoch": 0.1273434245243716, + "grad_norm": 4.569606148302303, + "learning_rate": 9.753855868806978e-06, + "loss": 0.8256, + "step": 1834 + }, + { + "epoch": 0.12741285932509375, + "grad_norm": 3.843743315787575, + "learning_rate": 9.753507276927005e-06, + "loss": 0.3633, + "step": 1835 + }, + { + "epoch": 0.12748229412581585, + "grad_norm": 4.787536272790188, + "learning_rate": 9.753158444620013e-06, + "loss": 0.6483, + "step": 1836 + }, + { + "epoch": 0.127551728926538, + "grad_norm": 4.7951874554992076, + "learning_rate": 9.752809371903645e-06, + "loss": 0.4415, + "step": 1837 + }, + { + "epoch": 0.1276211637272601, + "grad_norm": 4.61157766036463, + "learning_rate": 9.752460058795558e-06, + "loss": 0.5129, + "step": 1838 + }, + { + "epoch": 0.12769059852798223, + "grad_norm": 3.5377934082312588, + "learning_rate": 9.752110505313419e-06, + "loss": 0.3025, + "step": 1839 + }, + { + "epoch": 0.12776003332870434, + "grad_norm": 5.0961048792962265, + "learning_rate": 9.751760711474907e-06, + "loss": 0.6219, + "step": 1840 + }, + { + "epoch": 0.12782946812942647, + "grad_norm": 3.330631129108507, + "learning_rate": 9.751410677297714e-06, + "loss": 0.2671, + "step": 1841 + }, + { + "epoch": 0.12789890293014858, + "grad_norm": 5.846752266653663, + "learning_rate": 9.751060402799548e-06, + "loss": 0.5587, + "step": 1842 + }, + { + "epoch": 0.1279683377308707, + "grad_norm": 5.190908305769495, + "learning_rate": 9.750709887998123e-06, + "loss": 0.4654, + "step": 1843 + }, + { + "epoch": 0.12803777253159285, + "grad_norm": 4.15082659834225, + "learning_rate": 9.750359132911165e-06, + "loss": 0.49, + "step": 1844 + }, + { + "epoch": 0.12810720733231495, + "grad_norm": 5.725666465139423, + "learning_rate": 9.75000813755642e-06, + "loss": 0.4922, + "step": 1845 + }, + { + "epoch": 0.1281766421330371, + "grad_norm": 5.884857658741604, + "learning_rate": 9.749656901951637e-06, + "loss": 0.6666, + "step": 1846 + }, + { + "epoch": 0.1282460769337592, + "grad_norm": 5.1766292626022885, + "learning_rate": 9.749305426114583e-06, + "loss": 0.4867, + "step": 1847 + }, + { + "epoch": 0.12831551173448133, + "grad_norm": 4.466225087796118, + "learning_rate": 9.748953710063033e-06, + "loss": 0.5987, + "step": 1848 + }, + { + "epoch": 0.12838494653520344, + "grad_norm": 4.115171277959829, + "learning_rate": 9.748601753814782e-06, + "loss": 0.4159, + "step": 1849 + }, + { + "epoch": 0.12845438133592557, + "grad_norm": 5.458812641659435, + "learning_rate": 9.748249557387624e-06, + "loss": 0.687, + "step": 1850 + }, + { + "epoch": 0.12852381613664768, + "grad_norm": 6.452974341830799, + "learning_rate": 9.747897120799379e-06, + "loss": 0.5378, + "step": 1851 + }, + { + "epoch": 0.1285932509373698, + "grad_norm": 2.873414426968416, + "learning_rate": 9.74754444406787e-06, + "loss": 0.2825, + "step": 1852 + }, + { + "epoch": 0.12866268573809192, + "grad_norm": 6.296518029614031, + "learning_rate": 9.747191527210932e-06, + "loss": 1.0101, + "step": 1853 + }, + { + "epoch": 0.12873212053881405, + "grad_norm": 4.89988189496897, + "learning_rate": 9.746838370246421e-06, + "loss": 0.6278, + "step": 1854 + }, + { + "epoch": 0.1288015553395362, + "grad_norm": 4.199057286242706, + "learning_rate": 9.746484973192196e-06, + "loss": 0.5515, + "step": 1855 + }, + { + "epoch": 0.1288709901402583, + "grad_norm": 5.0511284510622865, + "learning_rate": 9.746131336066134e-06, + "loss": 0.5065, + "step": 1856 + }, + { + "epoch": 0.12894042494098043, + "grad_norm": 5.2781893494311625, + "learning_rate": 9.745777458886118e-06, + "loss": 0.5325, + "step": 1857 + }, + { + "epoch": 0.12900985974170254, + "grad_norm": 3.4295011204261514, + "learning_rate": 9.745423341670048e-06, + "loss": 0.4434, + "step": 1858 + }, + { + "epoch": 0.12907929454242467, + "grad_norm": 3.048474282969756, + "learning_rate": 9.745068984435835e-06, + "loss": 0.2637, + "step": 1859 + }, + { + "epoch": 0.12914872934314678, + "grad_norm": 2.6657393938115, + "learning_rate": 9.744714387201403e-06, + "loss": 0.2568, + "step": 1860 + }, + { + "epoch": 0.1292181641438689, + "grad_norm": 3.8898655774511472, + "learning_rate": 9.744359549984687e-06, + "loss": 0.4741, + "step": 1861 + }, + { + "epoch": 0.12928759894459102, + "grad_norm": 3.9785119962155897, + "learning_rate": 9.744004472803631e-06, + "loss": 0.5367, + "step": 1862 + }, + { + "epoch": 0.12935703374531315, + "grad_norm": 4.634057145852632, + "learning_rate": 9.7436491556762e-06, + "loss": 0.6075, + "step": 1863 + }, + { + "epoch": 0.12942646854603526, + "grad_norm": 3.437199570037133, + "learning_rate": 9.74329359862036e-06, + "loss": 0.3462, + "step": 1864 + }, + { + "epoch": 0.1294959033467574, + "grad_norm": 4.0292865726279095, + "learning_rate": 9.7429378016541e-06, + "loss": 0.5191, + "step": 1865 + }, + { + "epoch": 0.1295653381474795, + "grad_norm": 5.342536280165616, + "learning_rate": 9.742581764795411e-06, + "loss": 0.5465, + "step": 1866 + }, + { + "epoch": 0.12963477294820164, + "grad_norm": 4.56825366376222, + "learning_rate": 9.742225488062302e-06, + "loss": 0.6381, + "step": 1867 + }, + { + "epoch": 0.12970420774892377, + "grad_norm": 3.8985818298212855, + "learning_rate": 9.741868971472797e-06, + "loss": 0.4097, + "step": 1868 + }, + { + "epoch": 0.12977364254964588, + "grad_norm": 4.464710102008608, + "learning_rate": 9.741512215044922e-06, + "loss": 0.5609, + "step": 1869 + }, + { + "epoch": 0.129843077350368, + "grad_norm": 4.350304441014219, + "learning_rate": 9.741155218796728e-06, + "loss": 0.5476, + "step": 1870 + }, + { + "epoch": 0.12991251215109012, + "grad_norm": 3.596483734332975, + "learning_rate": 9.740797982746264e-06, + "loss": 0.3873, + "step": 1871 + }, + { + "epoch": 0.12998194695181225, + "grad_norm": 3.7896218631946983, + "learning_rate": 9.740440506911606e-06, + "loss": 0.373, + "step": 1872 + }, + { + "epoch": 0.13005138175253436, + "grad_norm": 3.3792507105550134, + "learning_rate": 9.74008279131083e-06, + "loss": 0.2868, + "step": 1873 + }, + { + "epoch": 0.1301208165532565, + "grad_norm": 3.9974235776743607, + "learning_rate": 9.73972483596203e-06, + "loss": 0.5502, + "step": 1874 + }, + { + "epoch": 0.1301902513539786, + "grad_norm": 3.0903653975396135, + "learning_rate": 9.73936664088331e-06, + "loss": 0.1898, + "step": 1875 + }, + { + "epoch": 0.13025968615470074, + "grad_norm": 3.5899348209446544, + "learning_rate": 9.73900820609279e-06, + "loss": 0.3208, + "step": 1876 + }, + { + "epoch": 0.13032912095542284, + "grad_norm": 4.788605177701336, + "learning_rate": 9.738649531608597e-06, + "loss": 0.7412, + "step": 1877 + }, + { + "epoch": 0.13039855575614498, + "grad_norm": 4.741830076454195, + "learning_rate": 9.738290617448874e-06, + "loss": 0.4594, + "step": 1878 + }, + { + "epoch": 0.13046799055686711, + "grad_norm": 5.789563584742074, + "learning_rate": 9.737931463631771e-06, + "loss": 0.6426, + "step": 1879 + }, + { + "epoch": 0.13053742535758922, + "grad_norm": 8.694320870163759, + "learning_rate": 9.737572070175458e-06, + "loss": 0.3904, + "step": 1880 + }, + { + "epoch": 0.13060686015831136, + "grad_norm": 3.6509275315791023, + "learning_rate": 9.73721243709811e-06, + "loss": 0.3424, + "step": 1881 + }, + { + "epoch": 0.13067629495903346, + "grad_norm": 4.495602856398995, + "learning_rate": 9.736852564417916e-06, + "loss": 0.6616, + "step": 1882 + }, + { + "epoch": 0.1307457297597556, + "grad_norm": 5.707796752110541, + "learning_rate": 9.736492452153081e-06, + "loss": 0.784, + "step": 1883 + }, + { + "epoch": 0.1308151645604777, + "grad_norm": 5.191613763869277, + "learning_rate": 9.736132100321817e-06, + "loss": 0.7692, + "step": 1884 + }, + { + "epoch": 0.13088459936119984, + "grad_norm": 3.112531467470029, + "learning_rate": 9.735771508942352e-06, + "loss": 0.2483, + "step": 1885 + }, + { + "epoch": 0.13095403416192195, + "grad_norm": 4.02630979495226, + "learning_rate": 9.73541067803292e-06, + "loss": 0.5709, + "step": 1886 + }, + { + "epoch": 0.13102346896264408, + "grad_norm": 4.101849453728972, + "learning_rate": 9.735049607611776e-06, + "loss": 0.3765, + "step": 1887 + }, + { + "epoch": 0.1310929037633662, + "grad_norm": 4.921690609383153, + "learning_rate": 9.73468829769718e-06, + "loss": 0.6169, + "step": 1888 + }, + { + "epoch": 0.13116233856408832, + "grad_norm": 4.87811784102188, + "learning_rate": 9.73432674830741e-06, + "loss": 0.7865, + "step": 1889 + }, + { + "epoch": 0.13123177336481046, + "grad_norm": 3.9286496306094993, + "learning_rate": 9.733964959460749e-06, + "loss": 0.5547, + "step": 1890 + }, + { + "epoch": 0.13130120816553256, + "grad_norm": 5.0123117686077645, + "learning_rate": 9.733602931175496e-06, + "loss": 0.7967, + "step": 1891 + }, + { + "epoch": 0.1313706429662547, + "grad_norm": 4.534716974227372, + "learning_rate": 9.733240663469965e-06, + "loss": 0.5185, + "step": 1892 + }, + { + "epoch": 0.1314400777669768, + "grad_norm": 4.380307948647215, + "learning_rate": 9.732878156362478e-06, + "loss": 0.6317, + "step": 1893 + }, + { + "epoch": 0.13150951256769894, + "grad_norm": 4.567734919423023, + "learning_rate": 9.732515409871366e-06, + "loss": 0.7239, + "step": 1894 + }, + { + "epoch": 0.13157894736842105, + "grad_norm": 3.7090731747763743, + "learning_rate": 9.732152424014981e-06, + "loss": 0.3383, + "step": 1895 + }, + { + "epoch": 0.13164838216914318, + "grad_norm": 5.134006435308687, + "learning_rate": 9.731789198811683e-06, + "loss": 0.6073, + "step": 1896 + }, + { + "epoch": 0.1317178169698653, + "grad_norm": 5.397993275656045, + "learning_rate": 9.73142573427984e-06, + "loss": 0.955, + "step": 1897 + }, + { + "epoch": 0.13178725177058742, + "grad_norm": 3.724827370248617, + "learning_rate": 9.73106203043784e-06, + "loss": 0.3382, + "step": 1898 + }, + { + "epoch": 0.13185668657130953, + "grad_norm": 4.364961604860191, + "learning_rate": 9.730698087304073e-06, + "loss": 0.8464, + "step": 1899 + }, + { + "epoch": 0.13192612137203166, + "grad_norm": 4.146868527795593, + "learning_rate": 9.730333904896952e-06, + "loss": 0.4927, + "step": 1900 + }, + { + "epoch": 0.1319955561727538, + "grad_norm": 3.391759332252334, + "learning_rate": 9.729969483234894e-06, + "loss": 0.2921, + "step": 1901 + }, + { + "epoch": 0.1320649909734759, + "grad_norm": 5.060121522366206, + "learning_rate": 9.72960482233633e-06, + "loss": 0.6825, + "step": 1902 + }, + { + "epoch": 0.13213442577419804, + "grad_norm": 4.4889983555212005, + "learning_rate": 9.729239922219708e-06, + "loss": 0.522, + "step": 1903 + }, + { + "epoch": 0.13220386057492015, + "grad_norm": 3.7224531263254885, + "learning_rate": 9.728874782903481e-06, + "loss": 0.5061, + "step": 1904 + }, + { + "epoch": 0.13227329537564228, + "grad_norm": 4.988375686835955, + "learning_rate": 9.728509404406121e-06, + "loss": 0.9253, + "step": 1905 + }, + { + "epoch": 0.1323427301763644, + "grad_norm": 4.068864570832338, + "learning_rate": 9.728143786746104e-06, + "loss": 0.5936, + "step": 1906 + }, + { + "epoch": 0.13241216497708652, + "grad_norm": 4.320068356195618, + "learning_rate": 9.727777929941924e-06, + "loss": 0.5795, + "step": 1907 + }, + { + "epoch": 0.13248159977780863, + "grad_norm": 4.221897186521596, + "learning_rate": 9.727411834012087e-06, + "loss": 0.4937, + "step": 1908 + }, + { + "epoch": 0.13255103457853076, + "grad_norm": 5.686093395727419, + "learning_rate": 9.72704549897511e-06, + "loss": 0.6904, + "step": 1909 + }, + { + "epoch": 0.13262046937925287, + "grad_norm": 5.013842655693153, + "learning_rate": 9.72667892484952e-06, + "loss": 0.5298, + "step": 1910 + }, + { + "epoch": 0.132689904179975, + "grad_norm": 5.3998698912562535, + "learning_rate": 9.726312111653857e-06, + "loss": 0.837, + "step": 1911 + }, + { + "epoch": 0.13275933898069714, + "grad_norm": 4.56287932313851, + "learning_rate": 9.725945059406677e-06, + "loss": 0.5392, + "step": 1912 + }, + { + "epoch": 0.13282877378141925, + "grad_norm": 3.9830599159927362, + "learning_rate": 9.725577768126544e-06, + "loss": 0.3005, + "step": 1913 + }, + { + "epoch": 0.13289820858214138, + "grad_norm": 4.828242305496214, + "learning_rate": 9.725210237832034e-06, + "loss": 0.6916, + "step": 1914 + }, + { + "epoch": 0.1329676433828635, + "grad_norm": 4.324068302429372, + "learning_rate": 9.724842468541737e-06, + "loss": 0.6229, + "step": 1915 + }, + { + "epoch": 0.13303707818358562, + "grad_norm": 4.202831357224146, + "learning_rate": 9.724474460274255e-06, + "loss": 0.4579, + "step": 1916 + }, + { + "epoch": 0.13310651298430773, + "grad_norm": 4.306499401557309, + "learning_rate": 9.7241062130482e-06, + "loss": 0.5066, + "step": 1917 + }, + { + "epoch": 0.13317594778502986, + "grad_norm": 38.021407239355845, + "learning_rate": 9.7237377268822e-06, + "loss": 0.3939, + "step": 1918 + }, + { + "epoch": 0.13324538258575197, + "grad_norm": 4.526755364466834, + "learning_rate": 9.723369001794891e-06, + "loss": 0.4766, + "step": 1919 + }, + { + "epoch": 0.1333148173864741, + "grad_norm": 2.6962370702294547, + "learning_rate": 9.723000037804922e-06, + "loss": 0.276, + "step": 1920 + }, + { + "epoch": 0.1333842521871962, + "grad_norm": 4.040355251708409, + "learning_rate": 9.722630834930955e-06, + "loss": 0.3421, + "step": 1921 + }, + { + "epoch": 0.13345368698791835, + "grad_norm": 4.900738668115288, + "learning_rate": 9.722261393191664e-06, + "loss": 0.8399, + "step": 1922 + }, + { + "epoch": 0.13352312178864045, + "grad_norm": 5.011107757373327, + "learning_rate": 9.721891712605737e-06, + "loss": 0.6696, + "step": 1923 + }, + { + "epoch": 0.1335925565893626, + "grad_norm": 4.401838554184152, + "learning_rate": 9.721521793191868e-06, + "loss": 0.4223, + "step": 1924 + }, + { + "epoch": 0.13366199139008472, + "grad_norm": 4.249245121301882, + "learning_rate": 9.721151634968773e-06, + "loss": 0.5972, + "step": 1925 + }, + { + "epoch": 0.13373142619080683, + "grad_norm": 4.82963563937356, + "learning_rate": 9.720781237955168e-06, + "loss": 0.8872, + "step": 1926 + }, + { + "epoch": 0.13380086099152896, + "grad_norm": 4.406076926274472, + "learning_rate": 9.72041060216979e-06, + "loss": 0.5614, + "step": 1927 + }, + { + "epoch": 0.13387029579225107, + "grad_norm": 3.7875716576048513, + "learning_rate": 9.720039727631387e-06, + "loss": 0.4767, + "step": 1928 + }, + { + "epoch": 0.1339397305929732, + "grad_norm": 4.927149061673529, + "learning_rate": 9.719668614358713e-06, + "loss": 0.7639, + "step": 1929 + }, + { + "epoch": 0.1340091653936953, + "grad_norm": 4.192310511719908, + "learning_rate": 9.719297262370545e-06, + "loss": 0.4583, + "step": 1930 + }, + { + "epoch": 0.13407860019441745, + "grad_norm": 3.3052351400874542, + "learning_rate": 9.718925671685658e-06, + "loss": 0.2644, + "step": 1931 + }, + { + "epoch": 0.13414803499513955, + "grad_norm": 5.242148073229841, + "learning_rate": 9.718553842322851e-06, + "loss": 0.5569, + "step": 1932 + }, + { + "epoch": 0.1342174697958617, + "grad_norm": 4.770046058495332, + "learning_rate": 9.718181774300931e-06, + "loss": 0.581, + "step": 1933 + }, + { + "epoch": 0.1342869045965838, + "grad_norm": 3.887634159569658, + "learning_rate": 9.717809467638716e-06, + "loss": 0.5577, + "step": 1934 + }, + { + "epoch": 0.13435633939730593, + "grad_norm": 3.7053507283037117, + "learning_rate": 9.717436922355035e-06, + "loss": 0.4089, + "step": 1935 + }, + { + "epoch": 0.13442577419802806, + "grad_norm": 3.9579773677299155, + "learning_rate": 9.717064138468735e-06, + "loss": 0.4999, + "step": 1936 + }, + { + "epoch": 0.13449520899875017, + "grad_norm": 3.9169762105431714, + "learning_rate": 9.716691115998667e-06, + "loss": 0.5036, + "step": 1937 + }, + { + "epoch": 0.1345646437994723, + "grad_norm": 3.8036819067690493, + "learning_rate": 9.7163178549637e-06, + "loss": 0.593, + "step": 1938 + }, + { + "epoch": 0.1346340786001944, + "grad_norm": 4.388330531145816, + "learning_rate": 9.715944355382714e-06, + "loss": 0.5321, + "step": 1939 + }, + { + "epoch": 0.13470351340091655, + "grad_norm": 2.699600558969193, + "learning_rate": 9.715570617274596e-06, + "loss": 0.2605, + "step": 1940 + }, + { + "epoch": 0.13477294820163865, + "grad_norm": 4.983772464641617, + "learning_rate": 9.715196640658256e-06, + "loss": 0.7274, + "step": 1941 + }, + { + "epoch": 0.1348423830023608, + "grad_norm": 4.712923103359737, + "learning_rate": 9.714822425552604e-06, + "loss": 0.6865, + "step": 1942 + }, + { + "epoch": 0.1349118178030829, + "grad_norm": 4.527389000413363, + "learning_rate": 9.714447971976568e-06, + "loss": 0.6267, + "step": 1943 + }, + { + "epoch": 0.13498125260380503, + "grad_norm": 3.3735229378086427, + "learning_rate": 9.714073279949089e-06, + "loss": 0.3693, + "step": 1944 + }, + { + "epoch": 0.13505068740452714, + "grad_norm": 4.547484488255602, + "learning_rate": 9.713698349489117e-06, + "loss": 0.2491, + "step": 1945 + }, + { + "epoch": 0.13512012220524927, + "grad_norm": 4.689896667038806, + "learning_rate": 9.713323180615617e-06, + "loss": 0.6146, + "step": 1946 + }, + { + "epoch": 0.1351895570059714, + "grad_norm": 3.801806427659175, + "learning_rate": 9.712947773347564e-06, + "loss": 0.3315, + "step": 1947 + }, + { + "epoch": 0.1352589918066935, + "grad_norm": 3.442317975792722, + "learning_rate": 9.712572127703945e-06, + "loss": 0.3588, + "step": 1948 + }, + { + "epoch": 0.13532842660741565, + "grad_norm": 4.232493005297604, + "learning_rate": 9.712196243703762e-06, + "loss": 0.6084, + "step": 1949 + }, + { + "epoch": 0.13539786140813775, + "grad_norm": 4.422228233628285, + "learning_rate": 9.711820121366025e-06, + "loss": 0.4592, + "step": 1950 + }, + { + "epoch": 0.1354672962088599, + "grad_norm": 4.100366819007629, + "learning_rate": 9.711443760709757e-06, + "loss": 0.4624, + "step": 1951 + }, + { + "epoch": 0.135536731009582, + "grad_norm": 3.854244024748049, + "learning_rate": 9.711067161753995e-06, + "loss": 0.4878, + "step": 1952 + }, + { + "epoch": 0.13560616581030413, + "grad_norm": 3.7245586980659318, + "learning_rate": 9.710690324517788e-06, + "loss": 0.4548, + "step": 1953 + }, + { + "epoch": 0.13567560061102624, + "grad_norm": 4.781606643938577, + "learning_rate": 9.710313249020193e-06, + "loss": 0.5182, + "step": 1954 + }, + { + "epoch": 0.13574503541174837, + "grad_norm": 4.755514193717609, + "learning_rate": 9.709935935280286e-06, + "loss": 0.6849, + "step": 1955 + }, + { + "epoch": 0.13581447021247048, + "grad_norm": 4.548151837119778, + "learning_rate": 9.709558383317148e-06, + "loss": 0.7407, + "step": 1956 + }, + { + "epoch": 0.1358839050131926, + "grad_norm": 3.7754027807517723, + "learning_rate": 9.709180593149877e-06, + "loss": 0.3542, + "step": 1957 + }, + { + "epoch": 0.13595333981391475, + "grad_norm": 10.771841496731694, + "learning_rate": 9.70880256479758e-06, + "loss": 0.5798, + "step": 1958 + }, + { + "epoch": 0.13602277461463685, + "grad_norm": 4.277629931100356, + "learning_rate": 9.708424298279377e-06, + "loss": 0.4424, + "step": 1959 + }, + { + "epoch": 0.136092209415359, + "grad_norm": 3.1059775129904885, + "learning_rate": 9.708045793614402e-06, + "loss": 0.3527, + "step": 1960 + }, + { + "epoch": 0.1361616442160811, + "grad_norm": 3.959252139166287, + "learning_rate": 9.707667050821796e-06, + "loss": 0.4787, + "step": 1961 + }, + { + "epoch": 0.13623107901680323, + "grad_norm": 4.627029172037978, + "learning_rate": 9.70728806992072e-06, + "loss": 0.6027, + "step": 1962 + }, + { + "epoch": 0.13630051381752534, + "grad_norm": 6.462599389150344, + "learning_rate": 9.70690885093034e-06, + "loss": 0.8626, + "step": 1963 + }, + { + "epoch": 0.13636994861824747, + "grad_norm": 5.128555094824931, + "learning_rate": 9.706529393869835e-06, + "loss": 0.8223, + "step": 1964 + }, + { + "epoch": 0.13643938341896958, + "grad_norm": 4.522585983654513, + "learning_rate": 9.706149698758401e-06, + "loss": 0.6136, + "step": 1965 + }, + { + "epoch": 0.1365088182196917, + "grad_norm": 3.582141328723532, + "learning_rate": 9.705769765615239e-06, + "loss": 0.3099, + "step": 1966 + }, + { + "epoch": 0.13657825302041382, + "grad_norm": 4.216729129685716, + "learning_rate": 9.705389594459566e-06, + "loss": 0.4292, + "step": 1967 + }, + { + "epoch": 0.13664768782113595, + "grad_norm": 4.2008671713960055, + "learning_rate": 9.705009185310615e-06, + "loss": 0.4613, + "step": 1968 + }, + { + "epoch": 0.13671712262185806, + "grad_norm": 4.3301690302084195, + "learning_rate": 9.704628538187621e-06, + "loss": 0.6703, + "step": 1969 + }, + { + "epoch": 0.1367865574225802, + "grad_norm": 4.090942632933998, + "learning_rate": 9.704247653109843e-06, + "loss": 0.421, + "step": 1970 + }, + { + "epoch": 0.13685599222330233, + "grad_norm": 3.715539963891412, + "learning_rate": 9.703866530096538e-06, + "loss": 0.3299, + "step": 1971 + }, + { + "epoch": 0.13692542702402444, + "grad_norm": 4.646310454673358, + "learning_rate": 9.703485169166988e-06, + "loss": 0.6391, + "step": 1972 + }, + { + "epoch": 0.13699486182474657, + "grad_norm": 12.191677580913243, + "learning_rate": 9.70310357034048e-06, + "loss": 0.4313, + "step": 1973 + }, + { + "epoch": 0.13706429662546868, + "grad_norm": 4.414715314025958, + "learning_rate": 9.702721733636315e-06, + "loss": 0.7022, + "step": 1974 + }, + { + "epoch": 0.1371337314261908, + "grad_norm": 4.552738634733077, + "learning_rate": 9.702339659073807e-06, + "loss": 0.8359, + "step": 1975 + }, + { + "epoch": 0.13720316622691292, + "grad_norm": 6.309611887311083, + "learning_rate": 9.70195734667228e-06, + "loss": 0.7154, + "step": 1976 + }, + { + "epoch": 0.13727260102763506, + "grad_norm": 5.880815314177445, + "learning_rate": 9.701574796451071e-06, + "loss": 0.598, + "step": 1977 + }, + { + "epoch": 0.13734203582835716, + "grad_norm": 3.780988900300168, + "learning_rate": 9.701192008429529e-06, + "loss": 0.4191, + "step": 1978 + }, + { + "epoch": 0.1374114706290793, + "grad_norm": 3.9939162496094687, + "learning_rate": 9.700808982627015e-06, + "loss": 0.5581, + "step": 1979 + }, + { + "epoch": 0.1374809054298014, + "grad_norm": 3.905331085573648, + "learning_rate": 9.700425719062902e-06, + "loss": 0.4617, + "step": 1980 + }, + { + "epoch": 0.13755034023052354, + "grad_norm": 4.13987907064628, + "learning_rate": 9.700042217756575e-06, + "loss": 0.4853, + "step": 1981 + }, + { + "epoch": 0.13761977503124567, + "grad_norm": 4.359510947687961, + "learning_rate": 9.699658478727431e-06, + "loss": 0.5342, + "step": 1982 + }, + { + "epoch": 0.13768920983196778, + "grad_norm": 4.740046279301326, + "learning_rate": 9.69927450199488e-06, + "loss": 0.4983, + "step": 1983 + }, + { + "epoch": 0.13775864463268991, + "grad_norm": 4.811088123538485, + "learning_rate": 9.69889028757834e-06, + "loss": 0.5284, + "step": 1984 + }, + { + "epoch": 0.13782807943341202, + "grad_norm": 4.431822772844443, + "learning_rate": 9.698505835497249e-06, + "loss": 0.608, + "step": 1985 + }, + { + "epoch": 0.13789751423413416, + "grad_norm": 4.542012058952836, + "learning_rate": 9.698121145771048e-06, + "loss": 0.5729, + "step": 1986 + }, + { + "epoch": 0.13796694903485626, + "grad_norm": 4.201730581065927, + "learning_rate": 9.697736218419196e-06, + "loss": 0.5455, + "step": 1987 + }, + { + "epoch": 0.1380363838355784, + "grad_norm": 4.452249724962726, + "learning_rate": 9.697351053461163e-06, + "loss": 0.5571, + "step": 1988 + }, + { + "epoch": 0.1381058186363005, + "grad_norm": 3.910507386314688, + "learning_rate": 9.696965650916427e-06, + "loss": 0.3761, + "step": 1989 + }, + { + "epoch": 0.13817525343702264, + "grad_norm": 5.152119323324873, + "learning_rate": 9.696580010804486e-06, + "loss": 0.6357, + "step": 1990 + }, + { + "epoch": 0.13824468823774475, + "grad_norm": 4.530585079973892, + "learning_rate": 9.69619413314484e-06, + "loss": 0.5589, + "step": 1991 + }, + { + "epoch": 0.13831412303846688, + "grad_norm": 4.95239284919159, + "learning_rate": 9.695808017957012e-06, + "loss": 0.5127, + "step": 1992 + }, + { + "epoch": 0.13838355783918901, + "grad_norm": 5.747623942083142, + "learning_rate": 9.695421665260525e-06, + "loss": 0.7764, + "step": 1993 + }, + { + "epoch": 0.13845299263991112, + "grad_norm": 4.825390983709328, + "learning_rate": 9.695035075074924e-06, + "loss": 0.6451, + "step": 1994 + }, + { + "epoch": 0.13852242744063326, + "grad_norm": 4.187502716995179, + "learning_rate": 9.694648247419762e-06, + "loss": 0.381, + "step": 1995 + }, + { + "epoch": 0.13859186224135536, + "grad_norm": 4.222532622492172, + "learning_rate": 9.694261182314605e-06, + "loss": 0.6246, + "step": 1996 + }, + { + "epoch": 0.1386612970420775, + "grad_norm": 3.8711348324921375, + "learning_rate": 9.69387387977903e-06, + "loss": 0.455, + "step": 1997 + }, + { + "epoch": 0.1387307318427996, + "grad_norm": 5.504042372192557, + "learning_rate": 9.693486339832623e-06, + "loss": 0.6149, + "step": 1998 + }, + { + "epoch": 0.13880016664352174, + "grad_norm": 4.073917769821312, + "learning_rate": 9.693098562494988e-06, + "loss": 0.3421, + "step": 1999 + }, + { + "epoch": 0.13886960144424385, + "grad_norm": 3.9402395095896137, + "learning_rate": 9.69271054778574e-06, + "loss": 0.6305, + "step": 2000 + }, + { + "epoch": 0.13893903624496598, + "grad_norm": 4.6535730345142055, + "learning_rate": 9.692322295724503e-06, + "loss": 0.4873, + "step": 2001 + }, + { + "epoch": 0.1390084710456881, + "grad_norm": 4.155673564835397, + "learning_rate": 9.691933806330913e-06, + "loss": 0.6034, + "step": 2002 + }, + { + "epoch": 0.13907790584641022, + "grad_norm": 3.4917887981696367, + "learning_rate": 9.69154507962462e-06, + "loss": 0.3722, + "step": 2003 + }, + { + "epoch": 0.13914734064713236, + "grad_norm": 3.1936535776092247, + "learning_rate": 9.691156115625287e-06, + "loss": 0.4081, + "step": 2004 + }, + { + "epoch": 0.13921677544785446, + "grad_norm": 5.370418533866856, + "learning_rate": 9.690766914352586e-06, + "loss": 0.625, + "step": 2005 + }, + { + "epoch": 0.1392862102485766, + "grad_norm": 3.72225268254193, + "learning_rate": 9.690377475826201e-06, + "loss": 0.3724, + "step": 2006 + }, + { + "epoch": 0.1393556450492987, + "grad_norm": 4.275777785811119, + "learning_rate": 9.68998780006583e-06, + "loss": 0.4789, + "step": 2007 + }, + { + "epoch": 0.13942507985002084, + "grad_norm": 5.11604047161193, + "learning_rate": 9.689597887091186e-06, + "loss": 0.3912, + "step": 2008 + }, + { + "epoch": 0.13949451465074295, + "grad_norm": 3.493829860584996, + "learning_rate": 9.689207736921986e-06, + "loss": 0.4383, + "step": 2009 + }, + { + "epoch": 0.13956394945146508, + "grad_norm": 3.814691586632843, + "learning_rate": 9.688817349577966e-06, + "loss": 0.5006, + "step": 2010 + }, + { + "epoch": 0.1396333842521872, + "grad_norm": 5.296556849163979, + "learning_rate": 9.688426725078868e-06, + "loss": 0.956, + "step": 2011 + }, + { + "epoch": 0.13970281905290932, + "grad_norm": 3.9901503441249515, + "learning_rate": 9.688035863444453e-06, + "loss": 0.6011, + "step": 2012 + }, + { + "epoch": 0.13977225385363143, + "grad_norm": 3.865976012220175, + "learning_rate": 9.687644764694488e-06, + "loss": 0.4383, + "step": 2013 + }, + { + "epoch": 0.13984168865435356, + "grad_norm": 4.998681857518684, + "learning_rate": 9.687253428848757e-06, + "loss": 0.4956, + "step": 2014 + }, + { + "epoch": 0.1399111234550757, + "grad_norm": 3.0371246949613733, + "learning_rate": 9.68686185592705e-06, + "loss": 0.2869, + "step": 2015 + }, + { + "epoch": 0.1399805582557978, + "grad_norm": 4.71332296829214, + "learning_rate": 9.686470045949175e-06, + "loss": 0.4438, + "step": 2016 + }, + { + "epoch": 0.14004999305651994, + "grad_norm": 4.703425836493707, + "learning_rate": 9.686077998934948e-06, + "loss": 0.5444, + "step": 2017 + }, + { + "epoch": 0.14011942785724205, + "grad_norm": 4.785108338916558, + "learning_rate": 9.685685714904198e-06, + "loss": 0.5929, + "step": 2018 + }, + { + "epoch": 0.14018886265796418, + "grad_norm": 4.844972255457499, + "learning_rate": 9.685293193876766e-06, + "loss": 0.6697, + "step": 2019 + }, + { + "epoch": 0.1402582974586863, + "grad_norm": 4.3003844144559045, + "learning_rate": 9.684900435872507e-06, + "loss": 0.5971, + "step": 2020 + }, + { + "epoch": 0.14032773225940842, + "grad_norm": 3.5184866007677904, + "learning_rate": 9.684507440911286e-06, + "loss": 0.4103, + "step": 2021 + }, + { + "epoch": 0.14039716706013053, + "grad_norm": 4.439364471675243, + "learning_rate": 9.684114209012979e-06, + "loss": 0.4947, + "step": 2022 + }, + { + "epoch": 0.14046660186085266, + "grad_norm": 5.981733828806437, + "learning_rate": 9.683720740197474e-06, + "loss": 0.6097, + "step": 2023 + }, + { + "epoch": 0.14053603666157477, + "grad_norm": 4.799351121715541, + "learning_rate": 9.683327034484674e-06, + "loss": 0.4424, + "step": 2024 + }, + { + "epoch": 0.1406054714622969, + "grad_norm": 4.494609203450447, + "learning_rate": 9.682933091894495e-06, + "loss": 0.5255, + "step": 2025 + }, + { + "epoch": 0.140674906263019, + "grad_norm": 3.162269364031781, + "learning_rate": 9.682538912446857e-06, + "loss": 0.1532, + "step": 2026 + }, + { + "epoch": 0.14074434106374115, + "grad_norm": 3.3210971369734326, + "learning_rate": 9.6821444961617e-06, + "loss": 0.4505, + "step": 2027 + }, + { + "epoch": 0.14081377586446328, + "grad_norm": 3.690271639091779, + "learning_rate": 9.681749843058973e-06, + "loss": 0.3989, + "step": 2028 + }, + { + "epoch": 0.1408832106651854, + "grad_norm": 4.545355226660664, + "learning_rate": 9.681354953158636e-06, + "loss": 0.6355, + "step": 2029 + }, + { + "epoch": 0.14095264546590752, + "grad_norm": 4.253528297667697, + "learning_rate": 9.680959826480664e-06, + "loss": 0.7317, + "step": 2030 + }, + { + "epoch": 0.14102208026662963, + "grad_norm": 4.041190115096981, + "learning_rate": 9.680564463045041e-06, + "loss": 0.2772, + "step": 2031 + }, + { + "epoch": 0.14109151506735176, + "grad_norm": 3.6680877541888206, + "learning_rate": 9.680168862871762e-06, + "loss": 0.3295, + "step": 2032 + }, + { + "epoch": 0.14116094986807387, + "grad_norm": 4.086416824923184, + "learning_rate": 9.67977302598084e-06, + "loss": 0.6584, + "step": 2033 + }, + { + "epoch": 0.141230384668796, + "grad_norm": 4.183636055332061, + "learning_rate": 9.679376952392293e-06, + "loss": 0.4658, + "step": 2034 + }, + { + "epoch": 0.1412998194695181, + "grad_norm": 4.68064726128267, + "learning_rate": 9.678980642126155e-06, + "loss": 0.4912, + "step": 2035 + }, + { + "epoch": 0.14136925427024025, + "grad_norm": 4.362028975453703, + "learning_rate": 9.678584095202468e-06, + "loss": 0.4873, + "step": 2036 + }, + { + "epoch": 0.14143868907096235, + "grad_norm": 3.3070382344840104, + "learning_rate": 9.678187311641295e-06, + "loss": 0.2924, + "step": 2037 + }, + { + "epoch": 0.1415081238716845, + "grad_norm": 3.5990291695230945, + "learning_rate": 9.6777902914627e-06, + "loss": 0.5233, + "step": 2038 + }, + { + "epoch": 0.14157755867240662, + "grad_norm": 3.300326116228172, + "learning_rate": 9.677393034686766e-06, + "loss": 0.3191, + "step": 2039 + }, + { + "epoch": 0.14164699347312873, + "grad_norm": 7.058213958288835, + "learning_rate": 9.676995541333585e-06, + "loss": 0.7709, + "step": 2040 + }, + { + "epoch": 0.14171642827385086, + "grad_norm": 3.7259821793919197, + "learning_rate": 9.676597811423264e-06, + "loss": 0.4154, + "step": 2041 + }, + { + "epoch": 0.14178586307457297, + "grad_norm": 4.556570792476836, + "learning_rate": 9.676199844975914e-06, + "loss": 0.482, + "step": 2042 + }, + { + "epoch": 0.1418552978752951, + "grad_norm": 2.9546063447011495, + "learning_rate": 9.675801642011669e-06, + "loss": 0.2433, + "step": 2043 + }, + { + "epoch": 0.1419247326760172, + "grad_norm": 4.142795416249976, + "learning_rate": 9.675403202550667e-06, + "loss": 0.384, + "step": 2044 + }, + { + "epoch": 0.14199416747673935, + "grad_norm": 4.235361392547599, + "learning_rate": 9.675004526613062e-06, + "loss": 0.5672, + "step": 2045 + }, + { + "epoch": 0.14206360227746145, + "grad_norm": 4.262726126556873, + "learning_rate": 9.674605614219019e-06, + "loss": 0.6718, + "step": 2046 + }, + { + "epoch": 0.1421330370781836, + "grad_norm": 7.903242075261548, + "learning_rate": 9.674206465388713e-06, + "loss": 0.9077, + "step": 2047 + }, + { + "epoch": 0.1422024718789057, + "grad_norm": 4.2459558452820785, + "learning_rate": 9.673807080142334e-06, + "loss": 0.4331, + "step": 2048 + }, + { + "epoch": 0.14227190667962783, + "grad_norm": 4.239551182077637, + "learning_rate": 9.67340745850008e-06, + "loss": 0.579, + "step": 2049 + }, + { + "epoch": 0.14234134148034996, + "grad_norm": 8.038026349729106, + "learning_rate": 9.673007600482168e-06, + "loss": 0.5906, + "step": 2050 + }, + { + "epoch": 0.14241077628107207, + "grad_norm": 4.002059659042826, + "learning_rate": 9.672607506108817e-06, + "loss": 0.5827, + "step": 2051 + }, + { + "epoch": 0.1424802110817942, + "grad_norm": 3.6532412751703083, + "learning_rate": 9.672207175400265e-06, + "loss": 0.3607, + "step": 2052 + }, + { + "epoch": 0.1425496458825163, + "grad_norm": 5.39727124970411, + "learning_rate": 9.671806608376763e-06, + "loss": 0.8619, + "step": 2053 + }, + { + "epoch": 0.14261908068323845, + "grad_norm": 3.7304817664753545, + "learning_rate": 9.671405805058569e-06, + "loss": 0.4787, + "step": 2054 + }, + { + "epoch": 0.14268851548396055, + "grad_norm": 5.571774798894162, + "learning_rate": 9.671004765465955e-06, + "loss": 0.518, + "step": 2055 + }, + { + "epoch": 0.1427579502846827, + "grad_norm": 5.013494184550903, + "learning_rate": 9.670603489619206e-06, + "loss": 0.6222, + "step": 2056 + }, + { + "epoch": 0.1428273850854048, + "grad_norm": 3.7019157226706985, + "learning_rate": 9.670201977538616e-06, + "loss": 0.4918, + "step": 2057 + }, + { + "epoch": 0.14289681988612693, + "grad_norm": 4.621294263187852, + "learning_rate": 9.669800229244497e-06, + "loss": 0.6814, + "step": 2058 + }, + { + "epoch": 0.14296625468684904, + "grad_norm": 3.2022282426342716, + "learning_rate": 9.669398244757164e-06, + "loss": 0.3462, + "step": 2059 + }, + { + "epoch": 0.14303568948757117, + "grad_norm": 4.504873090753799, + "learning_rate": 9.668996024096955e-06, + "loss": 0.4152, + "step": 2060 + }, + { + "epoch": 0.1431051242882933, + "grad_norm": 4.714806660737154, + "learning_rate": 9.668593567284208e-06, + "loss": 0.804, + "step": 2061 + }, + { + "epoch": 0.1431745590890154, + "grad_norm": 3.9891302249264045, + "learning_rate": 9.668190874339281e-06, + "loss": 0.4326, + "step": 2062 + }, + { + "epoch": 0.14324399388973755, + "grad_norm": 3.683347926804802, + "learning_rate": 9.667787945282544e-06, + "loss": 0.4933, + "step": 2063 + }, + { + "epoch": 0.14331342869045965, + "grad_norm": 4.1253775336132374, + "learning_rate": 9.667384780134373e-06, + "loss": 0.4991, + "step": 2064 + }, + { + "epoch": 0.1433828634911818, + "grad_norm": 4.45048989616051, + "learning_rate": 9.666981378915161e-06, + "loss": 0.5493, + "step": 2065 + }, + { + "epoch": 0.1434522982919039, + "grad_norm": 3.0058904010812255, + "learning_rate": 9.666577741645313e-06, + "loss": 0.3457, + "step": 2066 + }, + { + "epoch": 0.14352173309262603, + "grad_norm": 4.49495986414482, + "learning_rate": 9.666173868345243e-06, + "loss": 0.6551, + "step": 2067 + }, + { + "epoch": 0.14359116789334814, + "grad_norm": 3.7112993964502934, + "learning_rate": 9.665769759035379e-06, + "loss": 0.2404, + "step": 2068 + }, + { + "epoch": 0.14366060269407027, + "grad_norm": 6.237392857371296, + "learning_rate": 9.66536541373616e-06, + "loss": 0.6418, + "step": 2069 + }, + { + "epoch": 0.14373003749479238, + "grad_norm": 4.61966544142573, + "learning_rate": 9.664960832468038e-06, + "loss": 0.5599, + "step": 2070 + }, + { + "epoch": 0.1437994722955145, + "grad_norm": 3.9083094106465857, + "learning_rate": 9.664556015251476e-06, + "loss": 0.2786, + "step": 2071 + }, + { + "epoch": 0.14386890709623662, + "grad_norm": 5.103914771144059, + "learning_rate": 9.664150962106948e-06, + "loss": 0.3883, + "step": 2072 + }, + { + "epoch": 0.14393834189695875, + "grad_norm": 4.899108425182691, + "learning_rate": 9.663745673054943e-06, + "loss": 0.672, + "step": 2073 + }, + { + "epoch": 0.1440077766976809, + "grad_norm": 4.55170172296448, + "learning_rate": 9.663340148115961e-06, + "loss": 0.6498, + "step": 2074 + }, + { + "epoch": 0.144077211498403, + "grad_norm": 3.8242001460057105, + "learning_rate": 9.66293438731051e-06, + "loss": 0.2487, + "step": 2075 + }, + { + "epoch": 0.14414664629912513, + "grad_norm": 3.9355486909118977, + "learning_rate": 9.662528390659111e-06, + "loss": 0.415, + "step": 2076 + }, + { + "epoch": 0.14421608109984724, + "grad_norm": 4.624334921565409, + "learning_rate": 9.662122158182306e-06, + "loss": 0.5791, + "step": 2077 + }, + { + "epoch": 0.14428551590056937, + "grad_norm": 4.222531085801669, + "learning_rate": 9.661715689900636e-06, + "loss": 0.5788, + "step": 2078 + }, + { + "epoch": 0.14435495070129148, + "grad_norm": 4.9143621463126035, + "learning_rate": 9.661308985834661e-06, + "loss": 0.7304, + "step": 2079 + }, + { + "epoch": 0.1444243855020136, + "grad_norm": 5.022547185570577, + "learning_rate": 9.660902046004954e-06, + "loss": 0.5861, + "step": 2080 + }, + { + "epoch": 0.14449382030273572, + "grad_norm": 4.659990300546865, + "learning_rate": 9.660494870432094e-06, + "loss": 0.6541, + "step": 2081 + }, + { + "epoch": 0.14456325510345786, + "grad_norm": 4.30139623958207, + "learning_rate": 9.660087459136678e-06, + "loss": 0.4429, + "step": 2082 + }, + { + "epoch": 0.14463268990417996, + "grad_norm": 4.016953235363166, + "learning_rate": 9.65967981213931e-06, + "loss": 0.4234, + "step": 2083 + }, + { + "epoch": 0.1447021247049021, + "grad_norm": 3.3875678485971736, + "learning_rate": 9.659271929460612e-06, + "loss": 0.4571, + "step": 2084 + }, + { + "epoch": 0.14477155950562423, + "grad_norm": 3.0449316946739238, + "learning_rate": 9.658863811121212e-06, + "loss": 0.2103, + "step": 2085 + }, + { + "epoch": 0.14484099430634634, + "grad_norm": 3.8184834853338927, + "learning_rate": 9.658455457141751e-06, + "loss": 0.3131, + "step": 2086 + }, + { + "epoch": 0.14491042910706847, + "grad_norm": 4.873143529200353, + "learning_rate": 9.658046867542885e-06, + "loss": 0.5708, + "step": 2087 + }, + { + "epoch": 0.14497986390779058, + "grad_norm": 3.558694303086474, + "learning_rate": 9.65763804234528e-06, + "loss": 0.303, + "step": 2088 + }, + { + "epoch": 0.14504929870851271, + "grad_norm": 6.1269836643224105, + "learning_rate": 9.657228981569612e-06, + "loss": 0.8868, + "step": 2089 + }, + { + "epoch": 0.14511873350923482, + "grad_norm": 4.571678745494512, + "learning_rate": 9.656819685236574e-06, + "loss": 0.5841, + "step": 2090 + }, + { + "epoch": 0.14518816830995696, + "grad_norm": 4.1197659416405905, + "learning_rate": 9.656410153366864e-06, + "loss": 0.4262, + "step": 2091 + }, + { + "epoch": 0.14525760311067906, + "grad_norm": 3.928091041737535, + "learning_rate": 9.656000385981198e-06, + "loss": 0.335, + "step": 2092 + }, + { + "epoch": 0.1453270379114012, + "grad_norm": 5.2877342602906285, + "learning_rate": 9.655590383100302e-06, + "loss": 0.7814, + "step": 2093 + }, + { + "epoch": 0.1453964727121233, + "grad_norm": 3.1919939465047484, + "learning_rate": 9.655180144744912e-06, + "loss": 0.2093, + "step": 2094 + }, + { + "epoch": 0.14546590751284544, + "grad_norm": 4.982244355002265, + "learning_rate": 9.654769670935778e-06, + "loss": 0.7627, + "step": 2095 + }, + { + "epoch": 0.14553534231356757, + "grad_norm": 4.441459683627491, + "learning_rate": 9.654358961693661e-06, + "loss": 0.547, + "step": 2096 + }, + { + "epoch": 0.14560477711428968, + "grad_norm": 4.507316410103269, + "learning_rate": 9.653948017039335e-06, + "loss": 0.5081, + "step": 2097 + }, + { + "epoch": 0.14567421191501181, + "grad_norm": 4.559209379079217, + "learning_rate": 9.653536836993584e-06, + "loss": 0.4905, + "step": 2098 + }, + { + "epoch": 0.14574364671573392, + "grad_norm": 3.319787945490247, + "learning_rate": 9.653125421577204e-06, + "loss": 0.4427, + "step": 2099 + }, + { + "epoch": 0.14581308151645606, + "grad_norm": 3.640337076457778, + "learning_rate": 9.65271377081101e-06, + "loss": 0.2775, + "step": 2100 + }, + { + "epoch": 0.14588251631717816, + "grad_norm": 4.125116930941902, + "learning_rate": 9.652301884715815e-06, + "loss": 0.4386, + "step": 2101 + }, + { + "epoch": 0.1459519511179003, + "grad_norm": 4.771023132367938, + "learning_rate": 9.651889763312456e-06, + "loss": 0.5129, + "step": 2102 + }, + { + "epoch": 0.1460213859186224, + "grad_norm": 2.963634209392736, + "learning_rate": 9.651477406621776e-06, + "loss": 0.4573, + "step": 2103 + }, + { + "epoch": 0.14609082071934454, + "grad_norm": 4.768417154334674, + "learning_rate": 9.651064814664633e-06, + "loss": 0.7566, + "step": 2104 + }, + { + "epoch": 0.14616025552006665, + "grad_norm": 2.9122861978174406, + "learning_rate": 9.650651987461896e-06, + "loss": 0.1471, + "step": 2105 + }, + { + "epoch": 0.14622969032078878, + "grad_norm": 5.859515755877611, + "learning_rate": 9.650238925034441e-06, + "loss": 0.5179, + "step": 2106 + }, + { + "epoch": 0.14629912512151091, + "grad_norm": 3.964267190676561, + "learning_rate": 9.649825627403166e-06, + "loss": 0.4821, + "step": 2107 + }, + { + "epoch": 0.14636855992223302, + "grad_norm": 4.54913469478474, + "learning_rate": 9.649412094588971e-06, + "loss": 0.4727, + "step": 2108 + }, + { + "epoch": 0.14643799472295516, + "grad_norm": 4.860896334877277, + "learning_rate": 9.648998326612774e-06, + "loss": 0.6699, + "step": 2109 + }, + { + "epoch": 0.14650742952367726, + "grad_norm": 3.5387954041003713, + "learning_rate": 9.648584323495502e-06, + "loss": 0.3415, + "step": 2110 + }, + { + "epoch": 0.1465768643243994, + "grad_norm": 2.9956984849515123, + "learning_rate": 9.648170085258094e-06, + "loss": 0.2599, + "step": 2111 + }, + { + "epoch": 0.1466462991251215, + "grad_norm": 5.281627270863168, + "learning_rate": 9.647755611921505e-06, + "loss": 0.6811, + "step": 2112 + }, + { + "epoch": 0.14671573392584364, + "grad_norm": 3.313124716405168, + "learning_rate": 9.647340903506697e-06, + "loss": 0.4289, + "step": 2113 + }, + { + "epoch": 0.14678516872656575, + "grad_norm": 5.621777066423199, + "learning_rate": 9.646925960034643e-06, + "loss": 0.7604, + "step": 2114 + }, + { + "epoch": 0.14685460352728788, + "grad_norm": 2.7928116904997076, + "learning_rate": 9.646510781526332e-06, + "loss": 0.2051, + "step": 2115 + }, + { + "epoch": 0.14692403832801, + "grad_norm": 3.8886256796163754, + "learning_rate": 9.646095368002765e-06, + "loss": 0.45, + "step": 2116 + }, + { + "epoch": 0.14699347312873212, + "grad_norm": 2.5670306658311794, + "learning_rate": 9.64567971948495e-06, + "loss": 0.2052, + "step": 2117 + }, + { + "epoch": 0.14706290792945426, + "grad_norm": 7.5435510105966905, + "learning_rate": 9.645263835993914e-06, + "loss": 0.3888, + "step": 2118 + }, + { + "epoch": 0.14713234273017636, + "grad_norm": 4.542963662982822, + "learning_rate": 9.644847717550686e-06, + "loss": 0.7395, + "step": 2119 + }, + { + "epoch": 0.1472017775308985, + "grad_norm": 3.4930400126079584, + "learning_rate": 9.64443136417632e-06, + "loss": 0.4938, + "step": 2120 + }, + { + "epoch": 0.1472712123316206, + "grad_norm": 3.6627284340003055, + "learning_rate": 9.644014775891871e-06, + "loss": 0.3185, + "step": 2121 + }, + { + "epoch": 0.14734064713234274, + "grad_norm": 4.796525961331801, + "learning_rate": 9.643597952718409e-06, + "loss": 0.8325, + "step": 2122 + }, + { + "epoch": 0.14741008193306485, + "grad_norm": 5.5445109667379775, + "learning_rate": 9.643180894677015e-06, + "loss": 0.7546, + "step": 2123 + }, + { + "epoch": 0.14747951673378698, + "grad_norm": 2.7253182669581175, + "learning_rate": 9.642763601788788e-06, + "loss": 0.1883, + "step": 2124 + }, + { + "epoch": 0.1475489515345091, + "grad_norm": 5.122835293498146, + "learning_rate": 9.64234607407483e-06, + "loss": 0.7419, + "step": 2125 + }, + { + "epoch": 0.14761838633523122, + "grad_norm": 4.271000985188102, + "learning_rate": 9.641928311556263e-06, + "loss": 0.6409, + "step": 2126 + }, + { + "epoch": 0.14768782113595333, + "grad_norm": 4.915586052957864, + "learning_rate": 9.641510314254214e-06, + "loss": 0.7303, + "step": 2127 + }, + { + "epoch": 0.14775725593667546, + "grad_norm": 5.174385425179848, + "learning_rate": 9.641092082189824e-06, + "loss": 0.5006, + "step": 2128 + }, + { + "epoch": 0.14782669073739757, + "grad_norm": 2.409503078200908, + "learning_rate": 9.640673615384248e-06, + "loss": 0.1535, + "step": 2129 + }, + { + "epoch": 0.1478961255381197, + "grad_norm": 4.201531251957234, + "learning_rate": 9.640254913858653e-06, + "loss": 0.5363, + "step": 2130 + }, + { + "epoch": 0.14796556033884184, + "grad_norm": 4.754898519537631, + "learning_rate": 9.639835977634216e-06, + "loss": 0.6777, + "step": 2131 + }, + { + "epoch": 0.14803499513956395, + "grad_norm": 4.458480151582614, + "learning_rate": 9.639416806732124e-06, + "loss": 0.6779, + "step": 2132 + }, + { + "epoch": 0.14810442994028608, + "grad_norm": 3.495242742808231, + "learning_rate": 9.638997401173581e-06, + "loss": 0.3626, + "step": 2133 + }, + { + "epoch": 0.1481738647410082, + "grad_norm": 4.003974587317677, + "learning_rate": 9.638577760979799e-06, + "loss": 0.3901, + "step": 2134 + }, + { + "epoch": 0.14824329954173032, + "grad_norm": 4.009722704641441, + "learning_rate": 9.638157886172001e-06, + "loss": 0.6799, + "step": 2135 + }, + { + "epoch": 0.14831273434245243, + "grad_norm": 4.8117589946830694, + "learning_rate": 9.637737776771428e-06, + "loss": 0.8886, + "step": 2136 + }, + { + "epoch": 0.14838216914317456, + "grad_norm": 3.8628286078524794, + "learning_rate": 9.637317432799324e-06, + "loss": 0.627, + "step": 2137 + }, + { + "epoch": 0.14845160394389667, + "grad_norm": 4.806849215836901, + "learning_rate": 9.636896854276954e-06, + "loss": 0.7726, + "step": 2138 + }, + { + "epoch": 0.1485210387446188, + "grad_norm": 4.35950051490445, + "learning_rate": 9.636476041225586e-06, + "loss": 0.6371, + "step": 2139 + }, + { + "epoch": 0.1485904735453409, + "grad_norm": 3.2712297665561283, + "learning_rate": 9.636054993666507e-06, + "loss": 0.441, + "step": 2140 + }, + { + "epoch": 0.14865990834606305, + "grad_norm": 4.355060290067269, + "learning_rate": 9.635633711621014e-06, + "loss": 0.653, + "step": 2141 + }, + { + "epoch": 0.14872934314678518, + "grad_norm": 3.8338356810863337, + "learning_rate": 9.63521219511041e-06, + "loss": 0.4056, + "step": 2142 + }, + { + "epoch": 0.1487987779475073, + "grad_norm": 3.671166130351197, + "learning_rate": 9.634790444156021e-06, + "loss": 0.5412, + "step": 2143 + }, + { + "epoch": 0.14886821274822942, + "grad_norm": 5.13140177444575, + "learning_rate": 9.634368458779175e-06, + "loss": 0.6732, + "step": 2144 + }, + { + "epoch": 0.14893764754895153, + "grad_norm": 3.140632701933939, + "learning_rate": 9.633946239001217e-06, + "loss": 0.1796, + "step": 2145 + }, + { + "epoch": 0.14900708234967366, + "grad_norm": 4.080931149673151, + "learning_rate": 9.633523784843502e-06, + "loss": 0.4658, + "step": 2146 + }, + { + "epoch": 0.14907651715039577, + "grad_norm": 4.0423618324455095, + "learning_rate": 9.633101096327396e-06, + "loss": 0.6895, + "step": 2147 + }, + { + "epoch": 0.1491459519511179, + "grad_norm": 4.199203475655609, + "learning_rate": 9.63267817347428e-06, + "loss": 0.5258, + "step": 2148 + }, + { + "epoch": 0.14921538675184, + "grad_norm": 4.135762111929506, + "learning_rate": 9.632255016305545e-06, + "loss": 0.5535, + "step": 2149 + }, + { + "epoch": 0.14928482155256215, + "grad_norm": 3.55350284521541, + "learning_rate": 9.631831624842591e-06, + "loss": 0.3804, + "step": 2150 + }, + { + "epoch": 0.14935425635328425, + "grad_norm": 3.560772374066174, + "learning_rate": 9.631407999106837e-06, + "loss": 0.4919, + "step": 2151 + }, + { + "epoch": 0.1494236911540064, + "grad_norm": 5.428231971785874, + "learning_rate": 9.630984139119706e-06, + "loss": 0.7099, + "step": 2152 + }, + { + "epoch": 0.14949312595472852, + "grad_norm": 4.283387069547894, + "learning_rate": 9.630560044902637e-06, + "loss": 0.5947, + "step": 2153 + }, + { + "epoch": 0.14956256075545063, + "grad_norm": 3.7978832595563463, + "learning_rate": 9.630135716477083e-06, + "loss": 0.328, + "step": 2154 + }, + { + "epoch": 0.14963199555617276, + "grad_norm": 3.8698691820464473, + "learning_rate": 9.629711153864501e-06, + "loss": 0.5876, + "step": 2155 + }, + { + "epoch": 0.14970143035689487, + "grad_norm": 3.7171291787596044, + "learning_rate": 9.62928635708637e-06, + "loss": 0.3968, + "step": 2156 + }, + { + "epoch": 0.149770865157617, + "grad_norm": 4.010113480677708, + "learning_rate": 9.628861326164172e-06, + "loss": 0.4367, + "step": 2157 + }, + { + "epoch": 0.1498402999583391, + "grad_norm": 5.170529051538263, + "learning_rate": 9.628436061119407e-06, + "loss": 0.7093, + "step": 2158 + }, + { + "epoch": 0.14990973475906125, + "grad_norm": 5.42154246122125, + "learning_rate": 9.628010561973583e-06, + "loss": 0.8771, + "step": 2159 + }, + { + "epoch": 0.14997916955978335, + "grad_norm": 4.560966698215717, + "learning_rate": 9.62758482874822e-06, + "loss": 0.5701, + "step": 2160 + }, + { + "epoch": 0.1500486043605055, + "grad_norm": 4.1083092882864545, + "learning_rate": 9.627158861464856e-06, + "loss": 0.4977, + "step": 2161 + }, + { + "epoch": 0.1501180391612276, + "grad_norm": 4.162130983711981, + "learning_rate": 9.62673266014503e-06, + "loss": 0.4592, + "step": 2162 + }, + { + "epoch": 0.15018747396194973, + "grad_norm": 4.150643483025613, + "learning_rate": 9.626306224810304e-06, + "loss": 0.5144, + "step": 2163 + }, + { + "epoch": 0.15025690876267186, + "grad_norm": 3.52445630944535, + "learning_rate": 9.625879555482243e-06, + "loss": 0.2949, + "step": 2164 + }, + { + "epoch": 0.15032634356339397, + "grad_norm": 4.34367618299048, + "learning_rate": 9.625452652182428e-06, + "loss": 0.5263, + "step": 2165 + }, + { + "epoch": 0.1503957783641161, + "grad_norm": 5.7225432767828535, + "learning_rate": 9.625025514932454e-06, + "loss": 0.8177, + "step": 2166 + }, + { + "epoch": 0.1504652131648382, + "grad_norm": 4.411352366839603, + "learning_rate": 9.624598143753921e-06, + "loss": 0.6157, + "step": 2167 + }, + { + "epoch": 0.15053464796556035, + "grad_norm": 4.672624854610096, + "learning_rate": 9.624170538668448e-06, + "loss": 0.6514, + "step": 2168 + }, + { + "epoch": 0.15060408276628245, + "grad_norm": 5.338776565245194, + "learning_rate": 9.62374269969766e-06, + "loss": 0.5284, + "step": 2169 + }, + { + "epoch": 0.1506735175670046, + "grad_norm": 4.144303880694495, + "learning_rate": 9.623314626863199e-06, + "loss": 0.669, + "step": 2170 + }, + { + "epoch": 0.1507429523677267, + "grad_norm": 6.997013068353923, + "learning_rate": 9.622886320186717e-06, + "loss": 0.7822, + "step": 2171 + }, + { + "epoch": 0.15081238716844883, + "grad_norm": 3.4879408606678557, + "learning_rate": 9.622457779689876e-06, + "loss": 0.3417, + "step": 2172 + }, + { + "epoch": 0.15088182196917094, + "grad_norm": 6.638254983399789, + "learning_rate": 9.622029005394351e-06, + "loss": 0.6465, + "step": 2173 + }, + { + "epoch": 0.15095125676989307, + "grad_norm": 4.599317854997834, + "learning_rate": 9.621599997321829e-06, + "loss": 0.6965, + "step": 2174 + }, + { + "epoch": 0.15102069157061518, + "grad_norm": 4.042166253060516, + "learning_rate": 9.621170755494007e-06, + "loss": 0.53, + "step": 2175 + }, + { + "epoch": 0.1510901263713373, + "grad_norm": 4.5173353425869776, + "learning_rate": 9.6207412799326e-06, + "loss": 0.6743, + "step": 2176 + }, + { + "epoch": 0.15115956117205945, + "grad_norm": 4.099354829361525, + "learning_rate": 9.620311570659326e-06, + "loss": 0.4366, + "step": 2177 + }, + { + "epoch": 0.15122899597278155, + "grad_norm": 3.7265663868190066, + "learning_rate": 9.619881627695921e-06, + "loss": 0.3429, + "step": 2178 + }, + { + "epoch": 0.1512984307735037, + "grad_norm": 3.5766231935228876, + "learning_rate": 9.619451451064132e-06, + "loss": 0.4632, + "step": 2179 + }, + { + "epoch": 0.1513678655742258, + "grad_norm": 4.191523906066392, + "learning_rate": 9.619021040785717e-06, + "loss": 0.2871, + "step": 2180 + }, + { + "epoch": 0.15143730037494793, + "grad_norm": 5.094071493747692, + "learning_rate": 9.618590396882443e-06, + "loss": 0.7728, + "step": 2181 + }, + { + "epoch": 0.15150673517567004, + "grad_norm": 4.585236149998841, + "learning_rate": 9.618159519376094e-06, + "loss": 0.6826, + "step": 2182 + }, + { + "epoch": 0.15157616997639217, + "grad_norm": 3.561424487335961, + "learning_rate": 9.61772840828846e-06, + "loss": 0.3847, + "step": 2183 + }, + { + "epoch": 0.15164560477711428, + "grad_norm": 4.479997799178344, + "learning_rate": 9.617297063641352e-06, + "loss": 0.554, + "step": 2184 + }, + { + "epoch": 0.1517150395778364, + "grad_norm": 4.894157220215697, + "learning_rate": 9.61686548545658e-06, + "loss": 0.6575, + "step": 2185 + }, + { + "epoch": 0.15178447437855852, + "grad_norm": 3.1848964285422348, + "learning_rate": 9.616433673755977e-06, + "loss": 0.4898, + "step": 2186 + }, + { + "epoch": 0.15185390917928066, + "grad_norm": 4.301072185107848, + "learning_rate": 9.616001628561383e-06, + "loss": 0.5825, + "step": 2187 + }, + { + "epoch": 0.1519233439800028, + "grad_norm": 4.001166271475615, + "learning_rate": 9.61556934989465e-06, + "loss": 0.6727, + "step": 2188 + }, + { + "epoch": 0.1519927787807249, + "grad_norm": 4.029510516138517, + "learning_rate": 9.615136837777642e-06, + "loss": 0.5502, + "step": 2189 + }, + { + "epoch": 0.15206221358144703, + "grad_norm": 4.045130807084963, + "learning_rate": 9.614704092232236e-06, + "loss": 0.4201, + "step": 2190 + }, + { + "epoch": 0.15213164838216914, + "grad_norm": 4.799132886434701, + "learning_rate": 9.614271113280317e-06, + "loss": 0.6717, + "step": 2191 + }, + { + "epoch": 0.15220108318289127, + "grad_norm": 4.417803179709248, + "learning_rate": 9.61383790094379e-06, + "loss": 0.5045, + "step": 2192 + }, + { + "epoch": 0.15227051798361338, + "grad_norm": 3.0958310419921995, + "learning_rate": 9.613404455244559e-06, + "loss": 0.2164, + "step": 2193 + }, + { + "epoch": 0.15233995278433551, + "grad_norm": 4.987880669311369, + "learning_rate": 9.612970776204553e-06, + "loss": 0.7546, + "step": 2194 + }, + { + "epoch": 0.15240938758505762, + "grad_norm": 3.1490928292903564, + "learning_rate": 9.612536863845704e-06, + "loss": 0.3057, + "step": 2195 + }, + { + "epoch": 0.15247882238577976, + "grad_norm": 4.859826456677941, + "learning_rate": 9.612102718189962e-06, + "loss": 0.6992, + "step": 2196 + }, + { + "epoch": 0.15254825718650186, + "grad_norm": 3.759370550433794, + "learning_rate": 9.611668339259281e-06, + "loss": 0.4494, + "step": 2197 + }, + { + "epoch": 0.152617691987224, + "grad_norm": 4.696407139683973, + "learning_rate": 9.611233727075635e-06, + "loss": 0.59, + "step": 2198 + }, + { + "epoch": 0.15268712678794613, + "grad_norm": 4.251295120929709, + "learning_rate": 9.610798881661005e-06, + "loss": 0.554, + "step": 2199 + }, + { + "epoch": 0.15275656158866824, + "grad_norm": 5.010389993922083, + "learning_rate": 9.610363803037384e-06, + "loss": 0.2998, + "step": 2200 + }, + { + "epoch": 0.15282599638939037, + "grad_norm": 4.599848443036542, + "learning_rate": 9.60992849122678e-06, + "loss": 0.446, + "step": 2201 + }, + { + "epoch": 0.15289543119011248, + "grad_norm": 4.504389324676448, + "learning_rate": 9.60949294625121e-06, + "loss": 0.4754, + "step": 2202 + }, + { + "epoch": 0.15296486599083461, + "grad_norm": 3.983256582971974, + "learning_rate": 9.609057168132701e-06, + "loss": 0.5499, + "step": 2203 + }, + { + "epoch": 0.15303430079155672, + "grad_norm": 5.459218118067474, + "learning_rate": 9.608621156893298e-06, + "loss": 0.6484, + "step": 2204 + }, + { + "epoch": 0.15310373559227886, + "grad_norm": 4.489706173966664, + "learning_rate": 9.60818491255505e-06, + "loss": 0.5891, + "step": 2205 + }, + { + "epoch": 0.15317317039300096, + "grad_norm": 4.859178105499087, + "learning_rate": 9.607748435140025e-06, + "loss": 0.6211, + "step": 2206 + }, + { + "epoch": 0.1532426051937231, + "grad_norm": 5.534765668202554, + "learning_rate": 9.607311724670297e-06, + "loss": 0.701, + "step": 2207 + }, + { + "epoch": 0.1533120399944452, + "grad_norm": 4.723858289525773, + "learning_rate": 9.606874781167957e-06, + "loss": 0.5543, + "step": 2208 + }, + { + "epoch": 0.15338147479516734, + "grad_norm": 4.39705125249002, + "learning_rate": 9.606437604655103e-06, + "loss": 0.5024, + "step": 2209 + }, + { + "epoch": 0.15345090959588947, + "grad_norm": 4.221118384057801, + "learning_rate": 9.606000195153846e-06, + "loss": 0.5979, + "step": 2210 + }, + { + "epoch": 0.15352034439661158, + "grad_norm": 4.093665115879449, + "learning_rate": 9.605562552686312e-06, + "loss": 0.4684, + "step": 2211 + }, + { + "epoch": 0.15358977919733371, + "grad_norm": 5.669613904245084, + "learning_rate": 9.605124677274637e-06, + "loss": 0.6409, + "step": 2212 + }, + { + "epoch": 0.15365921399805582, + "grad_norm": 3.7993841474952244, + "learning_rate": 9.604686568940966e-06, + "loss": 0.3656, + "step": 2213 + }, + { + "epoch": 0.15372864879877796, + "grad_norm": 3.2532120597641616, + "learning_rate": 9.604248227707457e-06, + "loss": 0.2739, + "step": 2214 + }, + { + "epoch": 0.15379808359950006, + "grad_norm": 4.865153529138012, + "learning_rate": 9.603809653596286e-06, + "loss": 0.7091, + "step": 2215 + }, + { + "epoch": 0.1538675184002222, + "grad_norm": 4.698515233693294, + "learning_rate": 9.60337084662963e-06, + "loss": 0.7606, + "step": 2216 + }, + { + "epoch": 0.1539369532009443, + "grad_norm": 4.328576777087992, + "learning_rate": 9.602931806829688e-06, + "loss": 0.585, + "step": 2217 + }, + { + "epoch": 0.15400638800166644, + "grad_norm": 4.772455909519958, + "learning_rate": 9.602492534218662e-06, + "loss": 0.6278, + "step": 2218 + }, + { + "epoch": 0.15407582280238855, + "grad_norm": 3.090391041170871, + "learning_rate": 9.602053028818772e-06, + "loss": 0.2159, + "step": 2219 + }, + { + "epoch": 0.15414525760311068, + "grad_norm": 3.8649101984275194, + "learning_rate": 9.601613290652246e-06, + "loss": 0.5631, + "step": 2220 + }, + { + "epoch": 0.15421469240383281, + "grad_norm": 8.536454886869532, + "learning_rate": 9.601173319741327e-06, + "loss": 0.6514, + "step": 2221 + }, + { + "epoch": 0.15428412720455492, + "grad_norm": 4.58777928140159, + "learning_rate": 9.60073311610827e-06, + "loss": 0.6224, + "step": 2222 + }, + { + "epoch": 0.15435356200527706, + "grad_norm": 5.352664238632613, + "learning_rate": 9.600292679775338e-06, + "loss": 0.7644, + "step": 2223 + }, + { + "epoch": 0.15442299680599916, + "grad_norm": 4.90504377939357, + "learning_rate": 9.599852010764808e-06, + "loss": 0.6574, + "step": 2224 + }, + { + "epoch": 0.1544924316067213, + "grad_norm": 4.18308356178959, + "learning_rate": 9.599411109098967e-06, + "loss": 0.4716, + "step": 2225 + }, + { + "epoch": 0.1545618664074434, + "grad_norm": 4.6087451639829045, + "learning_rate": 9.598969974800116e-06, + "loss": 0.5011, + "step": 2226 + }, + { + "epoch": 0.15463130120816554, + "grad_norm": 4.833187987561192, + "learning_rate": 9.598528607890569e-06, + "loss": 0.749, + "step": 2227 + }, + { + "epoch": 0.15470073600888765, + "grad_norm": 3.6491229759270984, + "learning_rate": 9.59808700839265e-06, + "loss": 0.4544, + "step": 2228 + }, + { + "epoch": 0.15477017080960978, + "grad_norm": 4.974073027682453, + "learning_rate": 9.597645176328691e-06, + "loss": 0.5836, + "step": 2229 + }, + { + "epoch": 0.1548396056103319, + "grad_norm": 2.9360282418807655, + "learning_rate": 9.597203111721042e-06, + "loss": 0.213, + "step": 2230 + }, + { + "epoch": 0.15490904041105402, + "grad_norm": 5.232706077129518, + "learning_rate": 9.596760814592061e-06, + "loss": 0.7803, + "step": 2231 + }, + { + "epoch": 0.15497847521177613, + "grad_norm": 4.371667142036753, + "learning_rate": 9.59631828496412e-06, + "loss": 0.6815, + "step": 2232 + }, + { + "epoch": 0.15504791001249826, + "grad_norm": 4.854097833053744, + "learning_rate": 9.595875522859604e-06, + "loss": 0.7341, + "step": 2233 + }, + { + "epoch": 0.1551173448132204, + "grad_norm": 5.009857229455537, + "learning_rate": 9.5954325283009e-06, + "loss": 0.456, + "step": 2234 + }, + { + "epoch": 0.1551867796139425, + "grad_norm": 4.086937530145662, + "learning_rate": 9.594989301310423e-06, + "loss": 0.4557, + "step": 2235 + }, + { + "epoch": 0.15525621441466464, + "grad_norm": 4.628705263050036, + "learning_rate": 9.594545841910586e-06, + "loss": 0.6023, + "step": 2236 + }, + { + "epoch": 0.15532564921538675, + "grad_norm": 4.3001149926052875, + "learning_rate": 9.594102150123816e-06, + "loss": 0.5026, + "step": 2237 + }, + { + "epoch": 0.15539508401610888, + "grad_norm": 4.127495344339525, + "learning_rate": 9.593658225972562e-06, + "loss": 0.599, + "step": 2238 + }, + { + "epoch": 0.155464518816831, + "grad_norm": 5.060900644131906, + "learning_rate": 9.593214069479272e-06, + "loss": 0.5588, + "step": 2239 + }, + { + "epoch": 0.15553395361755312, + "grad_norm": 5.4176957997946715, + "learning_rate": 9.592769680666409e-06, + "loss": 0.6048, + "step": 2240 + }, + { + "epoch": 0.15560338841827523, + "grad_norm": 4.793367140954558, + "learning_rate": 9.592325059556455e-06, + "loss": 0.623, + "step": 2241 + }, + { + "epoch": 0.15567282321899736, + "grad_norm": 4.3226670598415184, + "learning_rate": 9.591880206171896e-06, + "loss": 0.657, + "step": 2242 + }, + { + "epoch": 0.15574225801971947, + "grad_norm": 3.261919850240303, + "learning_rate": 9.591435120535231e-06, + "loss": 0.1683, + "step": 2243 + }, + { + "epoch": 0.1558116928204416, + "grad_norm": 2.3377488856361692, + "learning_rate": 9.590989802668975e-06, + "loss": 0.2288, + "step": 2244 + }, + { + "epoch": 0.15588112762116374, + "grad_norm": 4.095308226757974, + "learning_rate": 9.59054425259565e-06, + "loss": 0.4485, + "step": 2245 + }, + { + "epoch": 0.15595056242188585, + "grad_norm": 2.840755720801834, + "learning_rate": 9.59009847033779e-06, + "loss": 0.2508, + "step": 2246 + }, + { + "epoch": 0.15601999722260798, + "grad_norm": 5.410580650052158, + "learning_rate": 9.589652455917943e-06, + "loss": 0.5681, + "step": 2247 + }, + { + "epoch": 0.1560894320233301, + "grad_norm": 3.7555928328296013, + "learning_rate": 9.58920620935867e-06, + "loss": 0.5221, + "step": 2248 + }, + { + "epoch": 0.15615886682405222, + "grad_norm": 4.371211165196956, + "learning_rate": 9.588759730682538e-06, + "loss": 0.695, + "step": 2249 + }, + { + "epoch": 0.15622830162477433, + "grad_norm": 3.6418905183885792, + "learning_rate": 9.588313019912132e-06, + "loss": 0.5032, + "step": 2250 + }, + { + "epoch": 0.15629773642549646, + "grad_norm": 4.747795675500118, + "learning_rate": 9.587866077070046e-06, + "loss": 0.7966, + "step": 2251 + }, + { + "epoch": 0.15636717122621857, + "grad_norm": 5.318563700813509, + "learning_rate": 9.587418902178885e-06, + "loss": 0.4884, + "step": 2252 + }, + { + "epoch": 0.1564366060269407, + "grad_norm": 4.468146392052785, + "learning_rate": 9.586971495261268e-06, + "loss": 0.6217, + "step": 2253 + }, + { + "epoch": 0.1565060408276628, + "grad_norm": 4.499484728799713, + "learning_rate": 9.586523856339822e-06, + "loss": 0.6122, + "step": 2254 + }, + { + "epoch": 0.15657547562838495, + "grad_norm": 4.502491887358075, + "learning_rate": 9.58607598543719e-06, + "loss": 0.5106, + "step": 2255 + }, + { + "epoch": 0.15664491042910708, + "grad_norm": 4.498915940861701, + "learning_rate": 9.585627882576023e-06, + "loss": 0.553, + "step": 2256 + }, + { + "epoch": 0.1567143452298292, + "grad_norm": 3.350750672659533, + "learning_rate": 9.585179547778987e-06, + "loss": 0.2702, + "step": 2257 + }, + { + "epoch": 0.15678378003055132, + "grad_norm": 3.931956938849642, + "learning_rate": 9.584730981068759e-06, + "loss": 0.4332, + "step": 2258 + }, + { + "epoch": 0.15685321483127343, + "grad_norm": 2.8802322444599913, + "learning_rate": 9.584282182468025e-06, + "loss": 0.1681, + "step": 2259 + }, + { + "epoch": 0.15692264963199556, + "grad_norm": 4.728179444858324, + "learning_rate": 9.583833151999488e-06, + "loss": 0.5674, + "step": 2260 + }, + { + "epoch": 0.15699208443271767, + "grad_norm": 4.505436592965721, + "learning_rate": 9.583383889685854e-06, + "loss": 0.5343, + "step": 2261 + }, + { + "epoch": 0.1570615192334398, + "grad_norm": 4.50883175774027, + "learning_rate": 9.58293439554985e-06, + "loss": 0.5165, + "step": 2262 + }, + { + "epoch": 0.1571309540341619, + "grad_norm": 3.347169107573412, + "learning_rate": 9.582484669614212e-06, + "loss": 0.4138, + "step": 2263 + }, + { + "epoch": 0.15720038883488405, + "grad_norm": 14.352319034529115, + "learning_rate": 9.582034711901684e-06, + "loss": 0.5213, + "step": 2264 + }, + { + "epoch": 0.15726982363560615, + "grad_norm": 4.016815832051416, + "learning_rate": 9.581584522435025e-06, + "loss": 0.3601, + "step": 2265 + }, + { + "epoch": 0.1573392584363283, + "grad_norm": 4.760764139599011, + "learning_rate": 9.581134101237004e-06, + "loss": 0.7454, + "step": 2266 + }, + { + "epoch": 0.15740869323705042, + "grad_norm": 4.032330445073442, + "learning_rate": 9.580683448330405e-06, + "loss": 0.3894, + "step": 2267 + }, + { + "epoch": 0.15747812803777253, + "grad_norm": 5.274412689107398, + "learning_rate": 9.58023256373802e-06, + "loss": 0.6202, + "step": 2268 + }, + { + "epoch": 0.15754756283849466, + "grad_norm": 4.023481654255643, + "learning_rate": 9.579781447482657e-06, + "loss": 0.4831, + "step": 2269 + }, + { + "epoch": 0.15761699763921677, + "grad_norm": 4.11254151064439, + "learning_rate": 9.57933009958713e-06, + "loss": 0.5913, + "step": 2270 + }, + { + "epoch": 0.1576864324399389, + "grad_norm": 4.192353973138778, + "learning_rate": 9.578878520074268e-06, + "loss": 0.5148, + "step": 2271 + }, + { + "epoch": 0.157755867240661, + "grad_norm": 4.462575874594395, + "learning_rate": 9.57842670896691e-06, + "loss": 0.587, + "step": 2272 + }, + { + "epoch": 0.15782530204138315, + "grad_norm": 4.357870644536954, + "learning_rate": 9.577974666287913e-06, + "loss": 0.5564, + "step": 2273 + }, + { + "epoch": 0.15789473684210525, + "grad_norm": 4.8292456288199945, + "learning_rate": 9.577522392060134e-06, + "loss": 0.5367, + "step": 2274 + }, + { + "epoch": 0.1579641716428274, + "grad_norm": 4.401807915568853, + "learning_rate": 9.577069886306455e-06, + "loss": 0.6625, + "step": 2275 + }, + { + "epoch": 0.1580336064435495, + "grad_norm": 2.9934394086346443, + "learning_rate": 9.57661714904976e-06, + "loss": 0.3232, + "step": 2276 + }, + { + "epoch": 0.15810304124427163, + "grad_norm": 4.82092681484257, + "learning_rate": 9.57616418031295e-06, + "loss": 0.7151, + "step": 2277 + }, + { + "epoch": 0.15817247604499374, + "grad_norm": 7.375034875308307, + "learning_rate": 9.575710980118931e-06, + "loss": 0.5792, + "step": 2278 + }, + { + "epoch": 0.15824191084571587, + "grad_norm": 5.080238516115881, + "learning_rate": 9.57525754849063e-06, + "loss": 0.7074, + "step": 2279 + }, + { + "epoch": 0.158311345646438, + "grad_norm": 4.848069078435341, + "learning_rate": 9.574803885450979e-06, + "loss": 0.7688, + "step": 2280 + }, + { + "epoch": 0.1583807804471601, + "grad_norm": 4.3755574281481495, + "learning_rate": 9.574349991022924e-06, + "loss": 0.3341, + "step": 2281 + }, + { + "epoch": 0.15845021524788225, + "grad_norm": 2.9668206570989697, + "learning_rate": 9.573895865229424e-06, + "loss": 0.3524, + "step": 2282 + }, + { + "epoch": 0.15851965004860435, + "grad_norm": 3.5247035725124625, + "learning_rate": 9.573441508093445e-06, + "loss": 0.5685, + "step": 2283 + }, + { + "epoch": 0.1585890848493265, + "grad_norm": 3.604211417509853, + "learning_rate": 9.572986919637972e-06, + "loss": 0.5509, + "step": 2284 + }, + { + "epoch": 0.1586585196500486, + "grad_norm": 3.4357426443151473, + "learning_rate": 9.572532099885992e-06, + "loss": 0.2446, + "step": 2285 + }, + { + "epoch": 0.15872795445077073, + "grad_norm": 4.267981730472809, + "learning_rate": 9.572077048860516e-06, + "loss": 0.5516, + "step": 2286 + }, + { + "epoch": 0.15879738925149284, + "grad_norm": 4.705510938624109, + "learning_rate": 9.571621766584554e-06, + "loss": 0.5019, + "step": 2287 + }, + { + "epoch": 0.15886682405221497, + "grad_norm": 4.077655349108888, + "learning_rate": 9.571166253081137e-06, + "loss": 0.4756, + "step": 2288 + }, + { + "epoch": 0.15893625885293708, + "grad_norm": 4.622230707108691, + "learning_rate": 9.570710508373304e-06, + "loss": 0.6148, + "step": 2289 + }, + { + "epoch": 0.1590056936536592, + "grad_norm": 3.1458093793763147, + "learning_rate": 9.570254532484106e-06, + "loss": 0.2311, + "step": 2290 + }, + { + "epoch": 0.15907512845438135, + "grad_norm": 3.7905227092981617, + "learning_rate": 9.569798325436603e-06, + "loss": 0.5566, + "step": 2291 + }, + { + "epoch": 0.15914456325510346, + "grad_norm": 4.410040109309413, + "learning_rate": 9.569341887253875e-06, + "loss": 0.5267, + "step": 2292 + }, + { + "epoch": 0.1592139980558256, + "grad_norm": 4.052569758457331, + "learning_rate": 9.568885217959002e-06, + "loss": 0.3068, + "step": 2293 + }, + { + "epoch": 0.1592834328565477, + "grad_norm": 4.754984411285499, + "learning_rate": 9.568428317575086e-06, + "loss": 0.548, + "step": 2294 + }, + { + "epoch": 0.15935286765726983, + "grad_norm": 4.581153026015169, + "learning_rate": 9.567971186125236e-06, + "loss": 0.7509, + "step": 2295 + }, + { + "epoch": 0.15942230245799194, + "grad_norm": 4.615198944833523, + "learning_rate": 9.567513823632573e-06, + "loss": 0.2786, + "step": 2296 + }, + { + "epoch": 0.15949173725871407, + "grad_norm": 4.113398900341135, + "learning_rate": 9.567056230120227e-06, + "loss": 0.465, + "step": 2297 + }, + { + "epoch": 0.15956117205943618, + "grad_norm": 3.6507246939887468, + "learning_rate": 9.566598405611344e-06, + "loss": 0.5308, + "step": 2298 + }, + { + "epoch": 0.15963060686015831, + "grad_norm": 4.113783316427625, + "learning_rate": 9.566140350129084e-06, + "loss": 0.4454, + "step": 2299 + }, + { + "epoch": 0.15970004166088042, + "grad_norm": 4.174566137807122, + "learning_rate": 9.565682063696611e-06, + "loss": 0.4666, + "step": 2300 + }, + { + "epoch": 0.15976947646160256, + "grad_norm": 3.289926752244745, + "learning_rate": 9.565223546337106e-06, + "loss": 0.32, + "step": 2301 + }, + { + "epoch": 0.1598389112623247, + "grad_norm": 3.481615028270102, + "learning_rate": 9.56476479807376e-06, + "loss": 0.463, + "step": 2302 + }, + { + "epoch": 0.1599083460630468, + "grad_norm": 3.2731810528337966, + "learning_rate": 9.564305818929776e-06, + "loss": 0.3808, + "step": 2303 + }, + { + "epoch": 0.15997778086376893, + "grad_norm": 3.617734992367984, + "learning_rate": 9.563846608928366e-06, + "loss": 0.3764, + "step": 2304 + }, + { + "epoch": 0.16004721566449104, + "grad_norm": 3.9754828886928397, + "learning_rate": 9.563387168092762e-06, + "loss": 0.298, + "step": 2305 + }, + { + "epoch": 0.16011665046521317, + "grad_norm": 4.30938466323922, + "learning_rate": 9.562927496446197e-06, + "loss": 0.6277, + "step": 2306 + }, + { + "epoch": 0.16018608526593528, + "grad_norm": 3.544408251240936, + "learning_rate": 9.562467594011924e-06, + "loss": 0.3815, + "step": 2307 + }, + { + "epoch": 0.16025552006665741, + "grad_norm": 6.458064427857517, + "learning_rate": 9.562007460813201e-06, + "loss": 0.8546, + "step": 2308 + }, + { + "epoch": 0.16032495486737952, + "grad_norm": 4.2848439665283165, + "learning_rate": 9.561547096873305e-06, + "loss": 0.5042, + "step": 2309 + }, + { + "epoch": 0.16039438966810166, + "grad_norm": 4.672207480846155, + "learning_rate": 9.561086502215517e-06, + "loss": 0.6461, + "step": 2310 + }, + { + "epoch": 0.16046382446882376, + "grad_norm": 3.849411890887942, + "learning_rate": 9.560625676863135e-06, + "loss": 0.4879, + "step": 2311 + }, + { + "epoch": 0.1605332592695459, + "grad_norm": 4.409426410111942, + "learning_rate": 9.560164620839468e-06, + "loss": 0.49, + "step": 2312 + }, + { + "epoch": 0.16060269407026803, + "grad_norm": 4.271218248722517, + "learning_rate": 9.559703334167834e-06, + "loss": 0.2877, + "step": 2313 + }, + { + "epoch": 0.16067212887099014, + "grad_norm": 5.275863020656294, + "learning_rate": 9.559241816871565e-06, + "loss": 0.6568, + "step": 2314 + }, + { + "epoch": 0.16074156367171227, + "grad_norm": 4.718131238979712, + "learning_rate": 9.558780068974006e-06, + "loss": 0.7289, + "step": 2315 + }, + { + "epoch": 0.16081099847243438, + "grad_norm": 4.208251730114253, + "learning_rate": 9.558318090498507e-06, + "loss": 0.4908, + "step": 2316 + }, + { + "epoch": 0.16088043327315651, + "grad_norm": 3.5384967955225304, + "learning_rate": 9.557855881468437e-06, + "loss": 0.4776, + "step": 2317 + }, + { + "epoch": 0.16094986807387862, + "grad_norm": 3.38593928192316, + "learning_rate": 9.557393441907175e-06, + "loss": 0.4885, + "step": 2318 + }, + { + "epoch": 0.16101930287460076, + "grad_norm": 4.467286604842763, + "learning_rate": 9.55693077183811e-06, + "loss": 0.6762, + "step": 2319 + }, + { + "epoch": 0.16108873767532286, + "grad_norm": 3.923019104578957, + "learning_rate": 9.556467871284643e-06, + "loss": 0.2991, + "step": 2320 + }, + { + "epoch": 0.161158172476045, + "grad_norm": 3.8786260218939463, + "learning_rate": 9.556004740270188e-06, + "loss": 0.3058, + "step": 2321 + }, + { + "epoch": 0.1612276072767671, + "grad_norm": 3.4827282875526056, + "learning_rate": 9.555541378818168e-06, + "loss": 0.2017, + "step": 2322 + }, + { + "epoch": 0.16129704207748924, + "grad_norm": 3.893391384235062, + "learning_rate": 9.55507778695202e-06, + "loss": 0.4303, + "step": 2323 + }, + { + "epoch": 0.16136647687821135, + "grad_norm": 5.119219559535707, + "learning_rate": 9.554613964695189e-06, + "loss": 0.6286, + "step": 2324 + }, + { + "epoch": 0.16143591167893348, + "grad_norm": 4.024923499510395, + "learning_rate": 9.55414991207114e-06, + "loss": 0.7174, + "step": 2325 + }, + { + "epoch": 0.16150534647965561, + "grad_norm": 5.2836129756977295, + "learning_rate": 9.553685629103344e-06, + "loss": 0.4814, + "step": 2326 + }, + { + "epoch": 0.16157478128037772, + "grad_norm": 3.2169923076170575, + "learning_rate": 9.553221115815278e-06, + "loss": 0.2713, + "step": 2327 + }, + { + "epoch": 0.16164421608109986, + "grad_norm": 2.655304326401075, + "learning_rate": 9.552756372230442e-06, + "loss": 0.2459, + "step": 2328 + }, + { + "epoch": 0.16171365088182196, + "grad_norm": 4.579757588695799, + "learning_rate": 9.552291398372342e-06, + "loss": 0.3437, + "step": 2329 + }, + { + "epoch": 0.1617830856825441, + "grad_norm": 4.225758348469034, + "learning_rate": 9.551826194264493e-06, + "loss": 0.4447, + "step": 2330 + }, + { + "epoch": 0.1618525204832662, + "grad_norm": 3.5043011752638007, + "learning_rate": 9.551360759930424e-06, + "loss": 0.3426, + "step": 2331 + }, + { + "epoch": 0.16192195528398834, + "grad_norm": 2.9642269184920425, + "learning_rate": 9.550895095393679e-06, + "loss": 0.2659, + "step": 2332 + }, + { + "epoch": 0.16199139008471045, + "grad_norm": 4.60677120752096, + "learning_rate": 9.550429200677811e-06, + "loss": 0.6668, + "step": 2333 + }, + { + "epoch": 0.16206082488543258, + "grad_norm": 3.425591902262055, + "learning_rate": 9.549963075806383e-06, + "loss": 0.3882, + "step": 2334 + }, + { + "epoch": 0.1621302596861547, + "grad_norm": 4.1161195158159245, + "learning_rate": 9.549496720802971e-06, + "loss": 0.5028, + "step": 2335 + }, + { + "epoch": 0.16219969448687682, + "grad_norm": 3.5550127838083743, + "learning_rate": 9.549030135691161e-06, + "loss": 0.4077, + "step": 2336 + }, + { + "epoch": 0.16226912928759896, + "grad_norm": 4.53908049728783, + "learning_rate": 9.548563320494557e-06, + "loss": 0.6878, + "step": 2337 + }, + { + "epoch": 0.16233856408832106, + "grad_norm": 3.709617337055653, + "learning_rate": 9.548096275236766e-06, + "loss": 0.6337, + "step": 2338 + }, + { + "epoch": 0.1624079988890432, + "grad_norm": 4.3936284301072135, + "learning_rate": 9.547628999941413e-06, + "loss": 0.5906, + "step": 2339 + }, + { + "epoch": 0.1624774336897653, + "grad_norm": 4.046642486700777, + "learning_rate": 9.54716149463213e-06, + "loss": 0.618, + "step": 2340 + }, + { + "epoch": 0.16254686849048744, + "grad_norm": 3.4111402312648753, + "learning_rate": 9.546693759332564e-06, + "loss": 0.3449, + "step": 2341 + }, + { + "epoch": 0.16261630329120955, + "grad_norm": 2.786881927263152, + "learning_rate": 9.546225794066373e-06, + "loss": 0.2162, + "step": 2342 + }, + { + "epoch": 0.16268573809193168, + "grad_norm": 3.730035142246005, + "learning_rate": 9.545757598857227e-06, + "loss": 0.4596, + "step": 2343 + }, + { + "epoch": 0.1627551728926538, + "grad_norm": 3.098781151664003, + "learning_rate": 9.545289173728805e-06, + "loss": 0.472, + "step": 2344 + }, + { + "epoch": 0.16282460769337592, + "grad_norm": 3.9739797971503217, + "learning_rate": 9.544820518704798e-06, + "loss": 0.5474, + "step": 2345 + }, + { + "epoch": 0.16289404249409803, + "grad_norm": 4.808088174047386, + "learning_rate": 9.544351633808914e-06, + "loss": 0.283, + "step": 2346 + }, + { + "epoch": 0.16296347729482016, + "grad_norm": 4.018741365451562, + "learning_rate": 9.543882519064866e-06, + "loss": 0.401, + "step": 2347 + }, + { + "epoch": 0.1630329120955423, + "grad_norm": 4.679918028713415, + "learning_rate": 9.543413174496381e-06, + "loss": 0.6496, + "step": 2348 + }, + { + "epoch": 0.1631023468962644, + "grad_norm": 4.798289975448442, + "learning_rate": 9.542943600127201e-06, + "loss": 0.6941, + "step": 2349 + }, + { + "epoch": 0.16317178169698654, + "grad_norm": 4.462042661379931, + "learning_rate": 9.542473795981074e-06, + "loss": 0.6275, + "step": 2350 + }, + { + "epoch": 0.16324121649770865, + "grad_norm": 3.4013343660971582, + "learning_rate": 9.54200376208176e-06, + "loss": 0.4502, + "step": 2351 + }, + { + "epoch": 0.16331065129843078, + "grad_norm": 4.275023703120487, + "learning_rate": 9.541533498453037e-06, + "loss": 0.5999, + "step": 2352 + }, + { + "epoch": 0.1633800860991529, + "grad_norm": 4.728477156008202, + "learning_rate": 9.54106300511869e-06, + "loss": 0.8082, + "step": 2353 + }, + { + "epoch": 0.16344952089987502, + "grad_norm": 3.1344819277455946, + "learning_rate": 9.540592282102512e-06, + "loss": 0.2498, + "step": 2354 + }, + { + "epoch": 0.16351895570059713, + "grad_norm": 4.3756885617936625, + "learning_rate": 9.540121329428316e-06, + "loss": 0.6281, + "step": 2355 + }, + { + "epoch": 0.16358839050131926, + "grad_norm": 2.864864701435672, + "learning_rate": 9.53965014711992e-06, + "loss": 0.2832, + "step": 2356 + }, + { + "epoch": 0.16365782530204137, + "grad_norm": 3.5960456133555043, + "learning_rate": 9.539178735201157e-06, + "loss": 0.4716, + "step": 2357 + }, + { + "epoch": 0.1637272601027635, + "grad_norm": 4.0279930454859745, + "learning_rate": 9.53870709369587e-06, + "loss": 0.5604, + "step": 2358 + }, + { + "epoch": 0.16379669490348564, + "grad_norm": 3.6287455510808275, + "learning_rate": 9.538235222627914e-06, + "loss": 0.4891, + "step": 2359 + }, + { + "epoch": 0.16386612970420775, + "grad_norm": 3.950405636673243, + "learning_rate": 9.537763122021158e-06, + "loss": 0.4139, + "step": 2360 + }, + { + "epoch": 0.16393556450492988, + "grad_norm": 3.9847880187786355, + "learning_rate": 9.537290791899475e-06, + "loss": 0.3308, + "step": 2361 + }, + { + "epoch": 0.164004999305652, + "grad_norm": 3.512379434738461, + "learning_rate": 9.53681823228676e-06, + "loss": 0.4483, + "step": 2362 + }, + { + "epoch": 0.16407443410637412, + "grad_norm": 2.8985876093539957, + "learning_rate": 9.536345443206912e-06, + "loss": 0.2381, + "step": 2363 + }, + { + "epoch": 0.16414386890709623, + "grad_norm": 2.8708160775213196, + "learning_rate": 9.535872424683842e-06, + "loss": 0.2271, + "step": 2364 + }, + { + "epoch": 0.16421330370781836, + "grad_norm": 8.298354364388473, + "learning_rate": 9.53539917674148e-06, + "loss": 0.4437, + "step": 2365 + }, + { + "epoch": 0.16428273850854047, + "grad_norm": 4.536971426062279, + "learning_rate": 9.53492569940376e-06, + "loss": 0.5058, + "step": 2366 + }, + { + "epoch": 0.1643521733092626, + "grad_norm": 4.99397177022692, + "learning_rate": 9.53445199269463e-06, + "loss": 0.5501, + "step": 2367 + }, + { + "epoch": 0.1644216081099847, + "grad_norm": 4.706911104442606, + "learning_rate": 9.53397805663805e-06, + "loss": 0.4867, + "step": 2368 + }, + { + "epoch": 0.16449104291070685, + "grad_norm": 5.052019938687275, + "learning_rate": 9.533503891257989e-06, + "loss": 0.8182, + "step": 2369 + }, + { + "epoch": 0.16456047771142898, + "grad_norm": 4.918998269727205, + "learning_rate": 9.53302949657843e-06, + "loss": 0.4797, + "step": 2370 + }, + { + "epoch": 0.1646299125121511, + "grad_norm": 4.000032134556176, + "learning_rate": 9.53255487262337e-06, + "loss": 0.3573, + "step": 2371 + }, + { + "epoch": 0.16469934731287322, + "grad_norm": 3.2125303798885234, + "learning_rate": 9.532080019416814e-06, + "loss": 0.2463, + "step": 2372 + }, + { + "epoch": 0.16476878211359533, + "grad_norm": 4.740107079059526, + "learning_rate": 9.531604936982777e-06, + "loss": 0.6076, + "step": 2373 + }, + { + "epoch": 0.16483821691431746, + "grad_norm": 4.7335859724997045, + "learning_rate": 9.531129625345292e-06, + "loss": 0.6571, + "step": 2374 + }, + { + "epoch": 0.16490765171503957, + "grad_norm": 3.7862317288224836, + "learning_rate": 9.530654084528397e-06, + "loss": 0.6106, + "step": 2375 + }, + { + "epoch": 0.1649770865157617, + "grad_norm": 5.038323788202095, + "learning_rate": 9.530178314556146e-06, + "loss": 0.696, + "step": 2376 + }, + { + "epoch": 0.1650465213164838, + "grad_norm": 4.4971577390890545, + "learning_rate": 9.529702315452603e-06, + "loss": 0.6284, + "step": 2377 + }, + { + "epoch": 0.16511595611720595, + "grad_norm": 3.421754441211268, + "learning_rate": 9.52922608724184e-06, + "loss": 0.3289, + "step": 2378 + }, + { + "epoch": 0.16518539091792805, + "grad_norm": 4.311718973436578, + "learning_rate": 9.528749629947947e-06, + "loss": 0.7679, + "step": 2379 + }, + { + "epoch": 0.1652548257186502, + "grad_norm": 4.398435652130076, + "learning_rate": 9.528272943595023e-06, + "loss": 0.6875, + "step": 2380 + }, + { + "epoch": 0.1653242605193723, + "grad_norm": 4.0594377040309135, + "learning_rate": 9.52779602820718e-06, + "loss": 0.4043, + "step": 2381 + }, + { + "epoch": 0.16539369532009443, + "grad_norm": 3.9290419341809315, + "learning_rate": 9.527318883808535e-06, + "loss": 0.6643, + "step": 2382 + }, + { + "epoch": 0.16546313012081657, + "grad_norm": 4.76096238194875, + "learning_rate": 9.526841510423223e-06, + "loss": 0.7886, + "step": 2383 + }, + { + "epoch": 0.16553256492153867, + "grad_norm": 3.1263155832019445, + "learning_rate": 9.526363908075391e-06, + "loss": 0.3825, + "step": 2384 + }, + { + "epoch": 0.1656019997222608, + "grad_norm": 4.468461768655371, + "learning_rate": 9.525886076789195e-06, + "loss": 0.593, + "step": 2385 + }, + { + "epoch": 0.1656714345229829, + "grad_norm": 5.311994081990736, + "learning_rate": 9.525408016588802e-06, + "loss": 0.6052, + "step": 2386 + }, + { + "epoch": 0.16574086932370505, + "grad_norm": 3.8546551951799155, + "learning_rate": 9.524929727498393e-06, + "loss": 0.3102, + "step": 2387 + }, + { + "epoch": 0.16581030412442715, + "grad_norm": 4.552532562299914, + "learning_rate": 9.524451209542157e-06, + "loss": 0.8007, + "step": 2388 + }, + { + "epoch": 0.1658797389251493, + "grad_norm": 3.838363034448072, + "learning_rate": 9.5239724627443e-06, + "loss": 0.4641, + "step": 2389 + }, + { + "epoch": 0.1659491737258714, + "grad_norm": 3.618034399924716, + "learning_rate": 9.523493487129035e-06, + "loss": 0.5616, + "step": 2390 + }, + { + "epoch": 0.16601860852659353, + "grad_norm": 3.3708671723703425, + "learning_rate": 9.523014282720587e-06, + "loss": 0.2932, + "step": 2391 + }, + { + "epoch": 0.16608804332731564, + "grad_norm": 3.7174677994420113, + "learning_rate": 9.522534849543198e-06, + "loss": 0.492, + "step": 2392 + }, + { + "epoch": 0.16615747812803777, + "grad_norm": 4.176829289780711, + "learning_rate": 9.52205518762111e-06, + "loss": 0.5082, + "step": 2393 + }, + { + "epoch": 0.1662269129287599, + "grad_norm": 4.615197202309693, + "learning_rate": 9.52157529697859e-06, + "loss": 0.6968, + "step": 2394 + }, + { + "epoch": 0.166296347729482, + "grad_norm": 3.37278870722557, + "learning_rate": 9.521095177639905e-06, + "loss": 0.4376, + "step": 2395 + }, + { + "epoch": 0.16636578253020415, + "grad_norm": 2.8535398286940605, + "learning_rate": 9.520614829629344e-06, + "loss": 0.3118, + "step": 2396 + }, + { + "epoch": 0.16643521733092626, + "grad_norm": 4.043381898561645, + "learning_rate": 9.520134252971201e-06, + "loss": 0.4571, + "step": 2397 + }, + { + "epoch": 0.1665046521316484, + "grad_norm": 3.694026701209633, + "learning_rate": 9.519653447689782e-06, + "loss": 0.4708, + "step": 2398 + }, + { + "epoch": 0.1665740869323705, + "grad_norm": 4.10414508332498, + "learning_rate": 9.519172413809405e-06, + "loss": 0.6155, + "step": 2399 + }, + { + "epoch": 0.16664352173309263, + "grad_norm": 4.552246253317947, + "learning_rate": 9.518691151354403e-06, + "loss": 0.4564, + "step": 2400 + }, + { + "epoch": 0.16671295653381474, + "grad_norm": 4.528597680302501, + "learning_rate": 9.518209660349114e-06, + "loss": 0.4594, + "step": 2401 + }, + { + "epoch": 0.16678239133453687, + "grad_norm": 3.9139058579759443, + "learning_rate": 9.517727940817893e-06, + "loss": 0.6658, + "step": 2402 + }, + { + "epoch": 0.16685182613525898, + "grad_norm": 4.468573084623158, + "learning_rate": 9.517245992785107e-06, + "loss": 0.5455, + "step": 2403 + }, + { + "epoch": 0.16692126093598111, + "grad_norm": 3.7708704107736986, + "learning_rate": 9.516763816275128e-06, + "loss": 0.5429, + "step": 2404 + }, + { + "epoch": 0.16699069573670325, + "grad_norm": 4.131363708080557, + "learning_rate": 9.516281411312347e-06, + "loss": 0.5911, + "step": 2405 + }, + { + "epoch": 0.16706013053742536, + "grad_norm": 3.4773982007532065, + "learning_rate": 9.515798777921163e-06, + "loss": 0.3994, + "step": 2406 + }, + { + "epoch": 0.1671295653381475, + "grad_norm": 4.3617090557855, + "learning_rate": 9.515315916125987e-06, + "loss": 0.516, + "step": 2407 + }, + { + "epoch": 0.1671990001388696, + "grad_norm": 3.747226463171343, + "learning_rate": 9.51483282595124e-06, + "loss": 0.497, + "step": 2408 + }, + { + "epoch": 0.16726843493959173, + "grad_norm": 3.7462229647053786, + "learning_rate": 9.514349507421359e-06, + "loss": 0.4571, + "step": 2409 + }, + { + "epoch": 0.16733786974031384, + "grad_norm": 3.6885696168511917, + "learning_rate": 9.513865960560788e-06, + "loss": 0.5938, + "step": 2410 + }, + { + "epoch": 0.16740730454103597, + "grad_norm": 4.6336556338151045, + "learning_rate": 9.513382185393984e-06, + "loss": 0.7066, + "step": 2411 + }, + { + "epoch": 0.16747673934175808, + "grad_norm": 3.5338751812132054, + "learning_rate": 9.512898181945418e-06, + "loss": 0.3413, + "step": 2412 + }, + { + "epoch": 0.16754617414248021, + "grad_norm": 4.066274858770111, + "learning_rate": 9.512413950239566e-06, + "loss": 0.3728, + "step": 2413 + }, + { + "epoch": 0.16761560894320232, + "grad_norm": 4.5401311505861655, + "learning_rate": 9.511929490300924e-06, + "loss": 0.5686, + "step": 2414 + }, + { + "epoch": 0.16768504374392446, + "grad_norm": 4.494541541384937, + "learning_rate": 9.511444802153995e-06, + "loss": 0.8168, + "step": 2415 + }, + { + "epoch": 0.1677544785446466, + "grad_norm": 3.9445172571290996, + "learning_rate": 9.510959885823289e-06, + "loss": 0.5884, + "step": 2416 + }, + { + "epoch": 0.1678239133453687, + "grad_norm": 3.8153756099880005, + "learning_rate": 9.510474741333341e-06, + "loss": 0.3322, + "step": 2417 + }, + { + "epoch": 0.16789334814609083, + "grad_norm": 4.269161473790331, + "learning_rate": 9.509989368708684e-06, + "loss": 0.7182, + "step": 2418 + }, + { + "epoch": 0.16796278294681294, + "grad_norm": 3.3826141088984127, + "learning_rate": 9.509503767973866e-06, + "loss": 0.3696, + "step": 2419 + }, + { + "epoch": 0.16803221774753507, + "grad_norm": 4.15557468886651, + "learning_rate": 9.509017939153452e-06, + "loss": 0.3244, + "step": 2420 + }, + { + "epoch": 0.16810165254825718, + "grad_norm": 3.6718491976743928, + "learning_rate": 9.50853188227201e-06, + "loss": 0.5639, + "step": 2421 + }, + { + "epoch": 0.16817108734897931, + "grad_norm": 4.6583416673393065, + "learning_rate": 9.50804559735413e-06, + "loss": 0.6644, + "step": 2422 + }, + { + "epoch": 0.16824052214970142, + "grad_norm": 3.79596881146758, + "learning_rate": 9.507559084424404e-06, + "loss": 0.5245, + "step": 2423 + }, + { + "epoch": 0.16830995695042356, + "grad_norm": 4.156108840786895, + "learning_rate": 9.50707234350744e-06, + "loss": 0.3648, + "step": 2424 + }, + { + "epoch": 0.16837939175114566, + "grad_norm": 3.689455522010027, + "learning_rate": 9.506585374627857e-06, + "loss": 0.3116, + "step": 2425 + }, + { + "epoch": 0.1684488265518678, + "grad_norm": 3.755881942223915, + "learning_rate": 9.506098177810286e-06, + "loss": 0.3292, + "step": 2426 + }, + { + "epoch": 0.1685182613525899, + "grad_norm": 2.8024359395239467, + "learning_rate": 9.505610753079367e-06, + "loss": 0.2049, + "step": 2427 + }, + { + "epoch": 0.16858769615331204, + "grad_norm": 4.49714945130422, + "learning_rate": 9.505123100459755e-06, + "loss": 0.7371, + "step": 2428 + }, + { + "epoch": 0.16865713095403417, + "grad_norm": 5.312037461605566, + "learning_rate": 9.504635219976114e-06, + "loss": 0.7069, + "step": 2429 + }, + { + "epoch": 0.16872656575475628, + "grad_norm": 4.205291103104701, + "learning_rate": 9.504147111653123e-06, + "loss": 0.6004, + "step": 2430 + }, + { + "epoch": 0.16879600055547841, + "grad_norm": 5.245704109499341, + "learning_rate": 9.503658775515466e-06, + "loss": 0.6133, + "step": 2431 + }, + { + "epoch": 0.16886543535620052, + "grad_norm": 4.978854456845057, + "learning_rate": 9.503170211587845e-06, + "loss": 0.5544, + "step": 2432 + }, + { + "epoch": 0.16893487015692266, + "grad_norm": 3.6776073538204703, + "learning_rate": 9.50268141989497e-06, + "loss": 0.3496, + "step": 2433 + }, + { + "epoch": 0.16900430495764476, + "grad_norm": 3.0046077463745084, + "learning_rate": 9.502192400461565e-06, + "loss": 0.2767, + "step": 2434 + }, + { + "epoch": 0.1690737397583669, + "grad_norm": 4.282518730750572, + "learning_rate": 9.501703153312361e-06, + "loss": 0.5636, + "step": 2435 + }, + { + "epoch": 0.169143174559089, + "grad_norm": 4.661711579009234, + "learning_rate": 9.501213678472107e-06, + "loss": 0.5752, + "step": 2436 + }, + { + "epoch": 0.16921260935981114, + "grad_norm": 4.7422146279079795, + "learning_rate": 9.500723975965558e-06, + "loss": 0.6866, + "step": 2437 + }, + { + "epoch": 0.16928204416053325, + "grad_norm": 4.3380421612000974, + "learning_rate": 9.500234045817485e-06, + "loss": 0.5878, + "step": 2438 + }, + { + "epoch": 0.16935147896125538, + "grad_norm": 4.60943920474025, + "learning_rate": 9.499743888052663e-06, + "loss": 0.6599, + "step": 2439 + }, + { + "epoch": 0.16942091376197752, + "grad_norm": 3.867257208741439, + "learning_rate": 9.49925350269589e-06, + "loss": 0.6893, + "step": 2440 + }, + { + "epoch": 0.16949034856269962, + "grad_norm": 2.4610368979572685, + "learning_rate": 9.498762889771964e-06, + "loss": 0.1735, + "step": 2441 + }, + { + "epoch": 0.16955978336342176, + "grad_norm": 4.408139180335056, + "learning_rate": 9.498272049305703e-06, + "loss": 0.3538, + "step": 2442 + }, + { + "epoch": 0.16962921816414386, + "grad_norm": 3.799621706654066, + "learning_rate": 9.497780981321931e-06, + "loss": 0.4171, + "step": 2443 + }, + { + "epoch": 0.169698652964866, + "grad_norm": 4.464408553967309, + "learning_rate": 9.497289685845487e-06, + "loss": 0.5159, + "step": 2444 + }, + { + "epoch": 0.1697680877655881, + "grad_norm": 4.876331346663356, + "learning_rate": 9.49679816290122e-06, + "loss": 0.4691, + "step": 2445 + }, + { + "epoch": 0.16983752256631024, + "grad_norm": 3.189575232540383, + "learning_rate": 9.496306412513989e-06, + "loss": 0.3184, + "step": 2446 + }, + { + "epoch": 0.16990695736703235, + "grad_norm": 4.890500059741749, + "learning_rate": 9.49581443470867e-06, + "loss": 0.4038, + "step": 2447 + }, + { + "epoch": 0.16997639216775448, + "grad_norm": 3.7978752822500517, + "learning_rate": 9.495322229510142e-06, + "loss": 0.4084, + "step": 2448 + }, + { + "epoch": 0.1700458269684766, + "grad_norm": 4.3587212771452375, + "learning_rate": 9.494829796943304e-06, + "loss": 0.6418, + "step": 2449 + }, + { + "epoch": 0.17011526176919872, + "grad_norm": 5.007592297039119, + "learning_rate": 9.49433713703306e-06, + "loss": 0.7855, + "step": 2450 + }, + { + "epoch": 0.17018469656992086, + "grad_norm": 4.04901020610556, + "learning_rate": 9.493844249804328e-06, + "loss": 0.5663, + "step": 2451 + }, + { + "epoch": 0.17025413137064296, + "grad_norm": 4.310272379992436, + "learning_rate": 9.493351135282043e-06, + "loss": 0.5723, + "step": 2452 + }, + { + "epoch": 0.1703235661713651, + "grad_norm": 3.8364400491606734, + "learning_rate": 9.492857793491138e-06, + "loss": 0.2925, + "step": 2453 + }, + { + "epoch": 0.1703930009720872, + "grad_norm": 3.63145789351474, + "learning_rate": 9.492364224456574e-06, + "loss": 0.3312, + "step": 2454 + }, + { + "epoch": 0.17046243577280934, + "grad_norm": 4.77388187844432, + "learning_rate": 9.491870428203306e-06, + "loss": 0.7356, + "step": 2455 + }, + { + "epoch": 0.17053187057353145, + "grad_norm": 3.277398461369125, + "learning_rate": 9.491376404756318e-06, + "loss": 0.2545, + "step": 2456 + }, + { + "epoch": 0.17060130537425358, + "grad_norm": 3.2382704299524687, + "learning_rate": 9.490882154140594e-06, + "loss": 0.4156, + "step": 2457 + }, + { + "epoch": 0.1706707401749757, + "grad_norm": 3.9234597465667274, + "learning_rate": 9.490387676381132e-06, + "loss": 0.4654, + "step": 2458 + }, + { + "epoch": 0.17074017497569782, + "grad_norm": 3.8773634209709313, + "learning_rate": 9.489892971502941e-06, + "loss": 0.3416, + "step": 2459 + }, + { + "epoch": 0.17080960977641993, + "grad_norm": 4.126082767822525, + "learning_rate": 9.489398039531045e-06, + "loss": 0.6697, + "step": 2460 + }, + { + "epoch": 0.17087904457714206, + "grad_norm": 4.509680058161002, + "learning_rate": 9.488902880490476e-06, + "loss": 0.3857, + "step": 2461 + }, + { + "epoch": 0.1709484793778642, + "grad_norm": 3.2936954499385185, + "learning_rate": 9.48840749440628e-06, + "loss": 0.4885, + "step": 2462 + }, + { + "epoch": 0.1710179141785863, + "grad_norm": 4.446826991135186, + "learning_rate": 9.487911881303512e-06, + "loss": 0.4549, + "step": 2463 + }, + { + "epoch": 0.17108734897930844, + "grad_norm": 5.616094797782135, + "learning_rate": 9.487416041207239e-06, + "loss": 0.679, + "step": 2464 + }, + { + "epoch": 0.17115678378003055, + "grad_norm": 5.334464652359958, + "learning_rate": 9.486919974142539e-06, + "loss": 0.8263, + "step": 2465 + }, + { + "epoch": 0.17122621858075268, + "grad_norm": 4.012462243737707, + "learning_rate": 9.486423680134506e-06, + "loss": 0.5833, + "step": 2466 + }, + { + "epoch": 0.1712956533814748, + "grad_norm": 4.2629563988376376, + "learning_rate": 9.485927159208239e-06, + "loss": 0.5212, + "step": 2467 + }, + { + "epoch": 0.17136508818219692, + "grad_norm": 4.307555934858232, + "learning_rate": 9.485430411388852e-06, + "loss": 0.712, + "step": 2468 + }, + { + "epoch": 0.17143452298291903, + "grad_norm": 4.572750886075486, + "learning_rate": 9.484933436701471e-06, + "loss": 0.7104, + "step": 2469 + }, + { + "epoch": 0.17150395778364116, + "grad_norm": 4.613429790628599, + "learning_rate": 9.484436235171232e-06, + "loss": 0.5473, + "step": 2470 + }, + { + "epoch": 0.17157339258436327, + "grad_norm": 4.332762239260598, + "learning_rate": 9.483938806823283e-06, + "loss": 0.3874, + "step": 2471 + }, + { + "epoch": 0.1716428273850854, + "grad_norm": 4.965580954508247, + "learning_rate": 9.483441151682781e-06, + "loss": 0.6581, + "step": 2472 + }, + { + "epoch": 0.17171226218580754, + "grad_norm": 4.37400242255445, + "learning_rate": 9.482943269774903e-06, + "loss": 0.5639, + "step": 2473 + }, + { + "epoch": 0.17178169698652965, + "grad_norm": 4.329458222806838, + "learning_rate": 9.482445161124824e-06, + "loss": 0.774, + "step": 2474 + }, + { + "epoch": 0.17185113178725178, + "grad_norm": 4.182391102070757, + "learning_rate": 9.481946825757742e-06, + "loss": 0.4937, + "step": 2475 + }, + { + "epoch": 0.1719205665879739, + "grad_norm": 4.558755052925949, + "learning_rate": 9.48144826369886e-06, + "loss": 0.6855, + "step": 2476 + }, + { + "epoch": 0.17199000138869602, + "grad_norm": 4.116640408597874, + "learning_rate": 9.480949474973398e-06, + "loss": 0.5586, + "step": 2477 + }, + { + "epoch": 0.17205943618941813, + "grad_norm": 6.134955614054857, + "learning_rate": 9.48045045960658e-06, + "loss": 0.6961, + "step": 2478 + }, + { + "epoch": 0.17212887099014026, + "grad_norm": 5.5211758815305485, + "learning_rate": 9.479951217623649e-06, + "loss": 0.9109, + "step": 2479 + }, + { + "epoch": 0.17219830579086237, + "grad_norm": 4.403960580482242, + "learning_rate": 9.479451749049855e-06, + "loss": 0.626, + "step": 2480 + }, + { + "epoch": 0.1722677405915845, + "grad_norm": 4.3508048321288495, + "learning_rate": 9.47895205391046e-06, + "loss": 0.5596, + "step": 2481 + }, + { + "epoch": 0.1723371753923066, + "grad_norm": 4.237715170632942, + "learning_rate": 9.478452132230738e-06, + "loss": 0.3406, + "step": 2482 + }, + { + "epoch": 0.17240661019302875, + "grad_norm": 3.7245643613229786, + "learning_rate": 9.477951984035975e-06, + "loss": 0.4676, + "step": 2483 + }, + { + "epoch": 0.17247604499375085, + "grad_norm": 4.791425834786883, + "learning_rate": 9.477451609351467e-06, + "loss": 0.8774, + "step": 2484 + }, + { + "epoch": 0.172545479794473, + "grad_norm": 3.7346370031303193, + "learning_rate": 9.476951008202523e-06, + "loss": 0.4457, + "step": 2485 + }, + { + "epoch": 0.17261491459519512, + "grad_norm": 4.216735665638584, + "learning_rate": 9.476450180614465e-06, + "loss": 0.5258, + "step": 2486 + }, + { + "epoch": 0.17268434939591723, + "grad_norm": 4.705078524019482, + "learning_rate": 9.475949126612622e-06, + "loss": 0.6157, + "step": 2487 + }, + { + "epoch": 0.17275378419663937, + "grad_norm": 3.126093149768665, + "learning_rate": 9.475447846222336e-06, + "loss": 0.3631, + "step": 2488 + }, + { + "epoch": 0.17282321899736147, + "grad_norm": 4.768709168842983, + "learning_rate": 9.474946339468963e-06, + "loss": 0.2688, + "step": 2489 + }, + { + "epoch": 0.1728926537980836, + "grad_norm": 4.248480545995738, + "learning_rate": 9.474444606377869e-06, + "loss": 0.4894, + "step": 2490 + }, + { + "epoch": 0.1729620885988057, + "grad_norm": 3.7852532303774353, + "learning_rate": 9.473942646974428e-06, + "loss": 0.5209, + "step": 2491 + }, + { + "epoch": 0.17303152339952785, + "grad_norm": 3.274787675820715, + "learning_rate": 9.473440461284032e-06, + "loss": 0.302, + "step": 2492 + }, + { + "epoch": 0.17310095820024995, + "grad_norm": 4.10101652755142, + "learning_rate": 9.472938049332077e-06, + "loss": 0.5499, + "step": 2493 + }, + { + "epoch": 0.1731703930009721, + "grad_norm": 5.698311623012093, + "learning_rate": 9.472435411143979e-06, + "loss": 0.7219, + "step": 2494 + }, + { + "epoch": 0.1732398278016942, + "grad_norm": 3.022957291163817, + "learning_rate": 9.471932546745157e-06, + "loss": 0.3453, + "step": 2495 + }, + { + "epoch": 0.17330926260241633, + "grad_norm": 5.570720864179199, + "learning_rate": 9.47142945616105e-06, + "loss": 0.5649, + "step": 2496 + }, + { + "epoch": 0.17337869740313847, + "grad_norm": 3.690666704461808, + "learning_rate": 9.470926139417099e-06, + "loss": 0.5439, + "step": 2497 + }, + { + "epoch": 0.17344813220386057, + "grad_norm": 4.8021056071034165, + "learning_rate": 9.470422596538762e-06, + "loss": 0.5627, + "step": 2498 + }, + { + "epoch": 0.1735175670045827, + "grad_norm": 3.799113231612468, + "learning_rate": 9.469918827551511e-06, + "loss": 0.4677, + "step": 2499 + }, + { + "epoch": 0.1735870018053048, + "grad_norm": 4.167959430426168, + "learning_rate": 9.469414832480824e-06, + "loss": 0.7912, + "step": 2500 + }, + { + "epoch": 0.17365643660602695, + "grad_norm": 3.539133572913732, + "learning_rate": 9.468910611352192e-06, + "loss": 0.4062, + "step": 2501 + }, + { + "epoch": 0.17372587140674906, + "grad_norm": 3.621334290097632, + "learning_rate": 9.468406164191115e-06, + "loss": 0.5838, + "step": 2502 + }, + { + "epoch": 0.1737953062074712, + "grad_norm": 4.572593454779693, + "learning_rate": 9.467901491023113e-06, + "loss": 0.7584, + "step": 2503 + }, + { + "epoch": 0.1738647410081933, + "grad_norm": 5.2394872572667275, + "learning_rate": 9.46739659187371e-06, + "loss": 0.7588, + "step": 2504 + }, + { + "epoch": 0.17393417580891543, + "grad_norm": 3.9627653974160157, + "learning_rate": 9.466891466768442e-06, + "loss": 0.5346, + "step": 2505 + }, + { + "epoch": 0.17400361060963754, + "grad_norm": 4.825983525269473, + "learning_rate": 9.466386115732857e-06, + "loss": 0.7055, + "step": 2506 + }, + { + "epoch": 0.17407304541035967, + "grad_norm": 3.7413348095963697, + "learning_rate": 9.465880538792519e-06, + "loss": 0.3747, + "step": 2507 + }, + { + "epoch": 0.1741424802110818, + "grad_norm": 3.3402827620686835, + "learning_rate": 9.465374735972994e-06, + "loss": 0.5207, + "step": 2508 + }, + { + "epoch": 0.17421191501180391, + "grad_norm": 3.8828334658948593, + "learning_rate": 9.46486870729987e-06, + "loss": 0.4463, + "step": 2509 + }, + { + "epoch": 0.17428134981252605, + "grad_norm": 4.1078424063160375, + "learning_rate": 9.464362452798738e-06, + "loss": 0.5495, + "step": 2510 + }, + { + "epoch": 0.17435078461324816, + "grad_norm": 4.4492975938808375, + "learning_rate": 9.463855972495204e-06, + "loss": 0.5307, + "step": 2511 + }, + { + "epoch": 0.1744202194139703, + "grad_norm": 3.317158215582718, + "learning_rate": 9.463349266414888e-06, + "loss": 0.2606, + "step": 2512 + }, + { + "epoch": 0.1744896542146924, + "grad_norm": 4.140430321705507, + "learning_rate": 9.462842334583415e-06, + "loss": 0.2702, + "step": 2513 + }, + { + "epoch": 0.17455908901541453, + "grad_norm": 3.553630993493244, + "learning_rate": 9.462335177026428e-06, + "loss": 0.4736, + "step": 2514 + }, + { + "epoch": 0.17462852381613664, + "grad_norm": 4.754250410093507, + "learning_rate": 9.461827793769576e-06, + "loss": 0.4211, + "step": 2515 + }, + { + "epoch": 0.17469795861685877, + "grad_norm": 3.5810850361491946, + "learning_rate": 9.461320184838525e-06, + "loss": 0.3673, + "step": 2516 + }, + { + "epoch": 0.17476739341758088, + "grad_norm": 3.1074727212077518, + "learning_rate": 9.460812350258946e-06, + "loss": 0.2814, + "step": 2517 + }, + { + "epoch": 0.17483682821830301, + "grad_norm": 5.625353929190155, + "learning_rate": 9.460304290056527e-06, + "loss": 0.5769, + "step": 2518 + }, + { + "epoch": 0.17490626301902515, + "grad_norm": 4.39464902168474, + "learning_rate": 9.459796004256963e-06, + "loss": 0.6023, + "step": 2519 + }, + { + "epoch": 0.17497569781974726, + "grad_norm": 3.8670630639759933, + "learning_rate": 9.459287492885965e-06, + "loss": 0.395, + "step": 2520 + }, + { + "epoch": 0.1750451326204694, + "grad_norm": 4.975056511938133, + "learning_rate": 9.458778755969251e-06, + "loss": 0.5925, + "step": 2521 + }, + { + "epoch": 0.1751145674211915, + "grad_norm": 3.6703256448193344, + "learning_rate": 9.458269793532553e-06, + "loss": 0.3675, + "step": 2522 + }, + { + "epoch": 0.17518400222191363, + "grad_norm": 4.046397273775209, + "learning_rate": 9.457760605601613e-06, + "loss": 0.583, + "step": 2523 + }, + { + "epoch": 0.17525343702263574, + "grad_norm": 4.414716799176455, + "learning_rate": 9.457251192202185e-06, + "loss": 0.5963, + "step": 2524 + }, + { + "epoch": 0.17532287182335787, + "grad_norm": 4.914761629735875, + "learning_rate": 9.456741553360039e-06, + "loss": 0.6278, + "step": 2525 + }, + { + "epoch": 0.17539230662407998, + "grad_norm": 3.9052324029947716, + "learning_rate": 9.456231689100945e-06, + "loss": 0.5533, + "step": 2526 + }, + { + "epoch": 0.17546174142480211, + "grad_norm": 5.117364200082593, + "learning_rate": 9.455721599450696e-06, + "loss": 0.7875, + "step": 2527 + }, + { + "epoch": 0.17553117622552422, + "grad_norm": 5.289025429004056, + "learning_rate": 9.45521128443509e-06, + "loss": 0.6239, + "step": 2528 + }, + { + "epoch": 0.17560061102624636, + "grad_norm": 5.204444854119443, + "learning_rate": 9.454700744079941e-06, + "loss": 0.5305, + "step": 2529 + }, + { + "epoch": 0.17567004582696846, + "grad_norm": 2.6537194441694814, + "learning_rate": 9.454189978411068e-06, + "loss": 0.2434, + "step": 2530 + }, + { + "epoch": 0.1757394806276906, + "grad_norm": 4.3928848938700975, + "learning_rate": 9.453678987454305e-06, + "loss": 0.7166, + "step": 2531 + }, + { + "epoch": 0.17580891542841273, + "grad_norm": 3.1494994019101097, + "learning_rate": 9.453167771235499e-06, + "loss": 0.3704, + "step": 2532 + }, + { + "epoch": 0.17587835022913484, + "grad_norm": 4.165953584897143, + "learning_rate": 9.452656329780506e-06, + "loss": 0.3486, + "step": 2533 + }, + { + "epoch": 0.17594778502985697, + "grad_norm": 3.0588125029061946, + "learning_rate": 9.452144663115195e-06, + "loss": 0.3192, + "step": 2534 + }, + { + "epoch": 0.17601721983057908, + "grad_norm": 4.829807215733037, + "learning_rate": 9.451632771265444e-06, + "loss": 0.6823, + "step": 2535 + }, + { + "epoch": 0.17608665463130121, + "grad_norm": 5.503397799908635, + "learning_rate": 9.451120654257144e-06, + "loss": 0.8862, + "step": 2536 + }, + { + "epoch": 0.17615608943202332, + "grad_norm": 4.419571922372808, + "learning_rate": 9.4506083121162e-06, + "loss": 0.3456, + "step": 2537 + }, + { + "epoch": 0.17622552423274546, + "grad_norm": 3.435600822609814, + "learning_rate": 9.450095744868523e-06, + "loss": 0.342, + "step": 2538 + }, + { + "epoch": 0.17629495903346756, + "grad_norm": 3.9251691797279262, + "learning_rate": 9.44958295254004e-06, + "loss": 0.4531, + "step": 2539 + }, + { + "epoch": 0.1763643938341897, + "grad_norm": 3.5249949602663575, + "learning_rate": 9.449069935156684e-06, + "loss": 0.2855, + "step": 2540 + }, + { + "epoch": 0.1764338286349118, + "grad_norm": 3.8553430441706396, + "learning_rate": 9.448556692744406e-06, + "loss": 0.472, + "step": 2541 + }, + { + "epoch": 0.17650326343563394, + "grad_norm": 4.695961105175266, + "learning_rate": 9.448043225329166e-06, + "loss": 0.7451, + "step": 2542 + }, + { + "epoch": 0.17657269823635607, + "grad_norm": 3.1447620209924754, + "learning_rate": 9.447529532936932e-06, + "loss": 0.2998, + "step": 2543 + }, + { + "epoch": 0.17664213303707818, + "grad_norm": 5.034252418815933, + "learning_rate": 9.447015615593687e-06, + "loss": 0.5559, + "step": 2544 + }, + { + "epoch": 0.17671156783780032, + "grad_norm": 4.652850602310597, + "learning_rate": 9.446501473325422e-06, + "loss": 0.6781, + "step": 2545 + }, + { + "epoch": 0.17678100263852242, + "grad_norm": 3.791145253430967, + "learning_rate": 9.445987106158148e-06, + "loss": 0.532, + "step": 2546 + }, + { + "epoch": 0.17685043743924456, + "grad_norm": 3.645616754843282, + "learning_rate": 9.445472514117874e-06, + "loss": 0.5799, + "step": 2547 + }, + { + "epoch": 0.17691987223996666, + "grad_norm": 4.469863345440935, + "learning_rate": 9.444957697230633e-06, + "loss": 0.4935, + "step": 2548 + }, + { + "epoch": 0.1769893070406888, + "grad_norm": 3.6638120238355993, + "learning_rate": 9.44444265552246e-06, + "loss": 0.3045, + "step": 2549 + }, + { + "epoch": 0.1770587418414109, + "grad_norm": 6.4355196428719506, + "learning_rate": 9.443927389019407e-06, + "loss": 0.5495, + "step": 2550 + }, + { + "epoch": 0.17712817664213304, + "grad_norm": 4.828843155329521, + "learning_rate": 9.443411897747534e-06, + "loss": 0.7995, + "step": 2551 + }, + { + "epoch": 0.17719761144285515, + "grad_norm": 3.3413988283878817, + "learning_rate": 9.442896181732918e-06, + "loss": 0.2981, + "step": 2552 + }, + { + "epoch": 0.17726704624357728, + "grad_norm": 4.828586393264978, + "learning_rate": 9.44238024100164e-06, + "loss": 0.5184, + "step": 2553 + }, + { + "epoch": 0.17733648104429942, + "grad_norm": 4.374276013777648, + "learning_rate": 9.441864075579794e-06, + "loss": 0.6367, + "step": 2554 + }, + { + "epoch": 0.17740591584502152, + "grad_norm": 4.602794787741462, + "learning_rate": 9.44134768549349e-06, + "loss": 0.6283, + "step": 2555 + }, + { + "epoch": 0.17747535064574366, + "grad_norm": 3.825073669974407, + "learning_rate": 9.440831070768848e-06, + "loss": 0.518, + "step": 2556 + }, + { + "epoch": 0.17754478544646576, + "grad_norm": 3.512590050106786, + "learning_rate": 9.440314231431993e-06, + "loss": 0.3142, + "step": 2557 + }, + { + "epoch": 0.1776142202471879, + "grad_norm": 4.9653758107614525, + "learning_rate": 9.43979716750907e-06, + "loss": 0.4623, + "step": 2558 + }, + { + "epoch": 0.17768365504791, + "grad_norm": 4.221497483096389, + "learning_rate": 9.439279879026231e-06, + "loss": 0.5783, + "step": 2559 + }, + { + "epoch": 0.17775308984863214, + "grad_norm": 3.580331688334701, + "learning_rate": 9.438762366009638e-06, + "loss": 0.2322, + "step": 2560 + }, + { + "epoch": 0.17782252464935425, + "grad_norm": 6.075770423491884, + "learning_rate": 9.438244628485466e-06, + "loss": 0.7625, + "step": 2561 + }, + { + "epoch": 0.17789195945007638, + "grad_norm": 5.8891010305906715, + "learning_rate": 9.437726666479906e-06, + "loss": 0.8774, + "step": 2562 + }, + { + "epoch": 0.1779613942507985, + "grad_norm": 4.437590072464308, + "learning_rate": 9.437208480019152e-06, + "loss": 0.3963, + "step": 2563 + }, + { + "epoch": 0.17803082905152062, + "grad_norm": 4.195769191252464, + "learning_rate": 9.436690069129413e-06, + "loss": 0.3887, + "step": 2564 + }, + { + "epoch": 0.17810026385224276, + "grad_norm": 4.623620089034488, + "learning_rate": 9.436171433836911e-06, + "loss": 0.5898, + "step": 2565 + }, + { + "epoch": 0.17816969865296486, + "grad_norm": 3.1361681145364133, + "learning_rate": 9.435652574167877e-06, + "loss": 0.2388, + "step": 2566 + }, + { + "epoch": 0.178239133453687, + "grad_norm": 3.945517694619383, + "learning_rate": 9.435133490148557e-06, + "loss": 0.4957, + "step": 2567 + }, + { + "epoch": 0.1783085682544091, + "grad_norm": 4.789306683601022, + "learning_rate": 9.434614181805203e-06, + "loss": 0.5541, + "step": 2568 + }, + { + "epoch": 0.17837800305513124, + "grad_norm": 4.211993493003448, + "learning_rate": 9.434094649164081e-06, + "loss": 0.5206, + "step": 2569 + }, + { + "epoch": 0.17844743785585335, + "grad_norm": 3.895569156452278, + "learning_rate": 9.43357489225147e-06, + "loss": 0.5212, + "step": 2570 + }, + { + "epoch": 0.17851687265657548, + "grad_norm": 3.8098220015554523, + "learning_rate": 9.433054911093658e-06, + "loss": 0.28, + "step": 2571 + }, + { + "epoch": 0.1785863074572976, + "grad_norm": 3.094355393548992, + "learning_rate": 9.432534705716945e-06, + "loss": 0.2913, + "step": 2572 + }, + { + "epoch": 0.17865574225801972, + "grad_norm": 4.075841825753508, + "learning_rate": 9.43201427614764e-06, + "loss": 0.4941, + "step": 2573 + }, + { + "epoch": 0.17872517705874183, + "grad_norm": 4.06290239638234, + "learning_rate": 9.431493622412072e-06, + "loss": 0.4982, + "step": 2574 + }, + { + "epoch": 0.17879461185946396, + "grad_norm": 3.1330324957417583, + "learning_rate": 9.430972744536568e-06, + "loss": 0.4332, + "step": 2575 + }, + { + "epoch": 0.1788640466601861, + "grad_norm": 4.604136494096769, + "learning_rate": 9.430451642547478e-06, + "loss": 0.5489, + "step": 2576 + }, + { + "epoch": 0.1789334814609082, + "grad_norm": 4.149784820732293, + "learning_rate": 9.429930316471158e-06, + "loss": 0.6032, + "step": 2577 + }, + { + "epoch": 0.17900291626163034, + "grad_norm": 3.383524892839391, + "learning_rate": 9.429408766333974e-06, + "loss": 0.5634, + "step": 2578 + }, + { + "epoch": 0.17907235106235245, + "grad_norm": 4.9137357438599025, + "learning_rate": 9.428886992162307e-06, + "loss": 0.7109, + "step": 2579 + }, + { + "epoch": 0.17914178586307458, + "grad_norm": 5.114699088246984, + "learning_rate": 9.428364993982548e-06, + "loss": 1.0056, + "step": 2580 + }, + { + "epoch": 0.1792112206637967, + "grad_norm": 3.678788204054091, + "learning_rate": 9.427842771821099e-06, + "loss": 0.3424, + "step": 2581 + }, + { + "epoch": 0.17928065546451882, + "grad_norm": 4.201820132548689, + "learning_rate": 9.427320325704372e-06, + "loss": 0.8085, + "step": 2582 + }, + { + "epoch": 0.17935009026524093, + "grad_norm": 2.4798176732912096, + "learning_rate": 9.426797655658793e-06, + "loss": 0.1866, + "step": 2583 + }, + { + "epoch": 0.17941952506596306, + "grad_norm": 4.660639319594791, + "learning_rate": 9.426274761710799e-06, + "loss": 0.8595, + "step": 2584 + }, + { + "epoch": 0.17948895986668517, + "grad_norm": 4.765098857506229, + "learning_rate": 9.425751643886835e-06, + "loss": 0.4, + "step": 2585 + }, + { + "epoch": 0.1795583946674073, + "grad_norm": 3.771592890418262, + "learning_rate": 9.42522830221336e-06, + "loss": 0.2954, + "step": 2586 + }, + { + "epoch": 0.1796278294681294, + "grad_norm": 3.8275180226428698, + "learning_rate": 9.424704736716847e-06, + "loss": 0.6602, + "step": 2587 + }, + { + "epoch": 0.17969726426885155, + "grad_norm": 4.30850383427892, + "learning_rate": 9.424180947423774e-06, + "loss": 0.6607, + "step": 2588 + }, + { + "epoch": 0.17976669906957368, + "grad_norm": 4.79549595289997, + "learning_rate": 9.423656934360637e-06, + "loss": 0.672, + "step": 2589 + }, + { + "epoch": 0.1798361338702958, + "grad_norm": 3.3059563850323648, + "learning_rate": 9.423132697553937e-06, + "loss": 0.3413, + "step": 2590 + }, + { + "epoch": 0.17990556867101792, + "grad_norm": 4.016048624923509, + "learning_rate": 9.422608237030189e-06, + "loss": 0.4841, + "step": 2591 + }, + { + "epoch": 0.17997500347174003, + "grad_norm": 3.5180965141476728, + "learning_rate": 9.422083552815922e-06, + "loss": 0.3386, + "step": 2592 + }, + { + "epoch": 0.18004443827246217, + "grad_norm": 4.25964721047945, + "learning_rate": 9.421558644937672e-06, + "loss": 0.3766, + "step": 2593 + }, + { + "epoch": 0.18011387307318427, + "grad_norm": 3.785716005792397, + "learning_rate": 9.42103351342199e-06, + "loss": 0.4496, + "step": 2594 + }, + { + "epoch": 0.1801833078739064, + "grad_norm": 4.182574179469882, + "learning_rate": 9.420508158295434e-06, + "loss": 0.5564, + "step": 2595 + }, + { + "epoch": 0.1802527426746285, + "grad_norm": 2.996819485798898, + "learning_rate": 9.419982579584578e-06, + "loss": 0.2496, + "step": 2596 + }, + { + "epoch": 0.18032217747535065, + "grad_norm": 3.5301387664346193, + "learning_rate": 9.419456777316006e-06, + "loss": 0.3734, + "step": 2597 + }, + { + "epoch": 0.18039161227607275, + "grad_norm": 3.821625108864334, + "learning_rate": 9.41893075151631e-06, + "loss": 0.4334, + "step": 2598 + }, + { + "epoch": 0.1804610470767949, + "grad_norm": 2.8667496492111826, + "learning_rate": 9.418404502212098e-06, + "loss": 0.2425, + "step": 2599 + }, + { + "epoch": 0.18053048187751702, + "grad_norm": 4.644936523177233, + "learning_rate": 9.417878029429985e-06, + "loss": 0.4482, + "step": 2600 + }, + { + "epoch": 0.18059991667823913, + "grad_norm": 4.578899850787322, + "learning_rate": 9.417351333196602e-06, + "loss": 0.4448, + "step": 2601 + }, + { + "epoch": 0.18066935147896127, + "grad_norm": 3.9513336860389603, + "learning_rate": 9.416824413538585e-06, + "loss": 0.4418, + "step": 2602 + }, + { + "epoch": 0.18073878627968337, + "grad_norm": 3.9088965785236907, + "learning_rate": 9.416297270482589e-06, + "loss": 0.2561, + "step": 2603 + }, + { + "epoch": 0.1808082210804055, + "grad_norm": 4.97371150714962, + "learning_rate": 9.415769904055274e-06, + "loss": 0.7128, + "step": 2604 + }, + { + "epoch": 0.1808776558811276, + "grad_norm": 4.7474894520208215, + "learning_rate": 9.415242314283312e-06, + "loss": 0.6806, + "step": 2605 + }, + { + "epoch": 0.18094709068184975, + "grad_norm": 4.621623965915265, + "learning_rate": 9.414714501193393e-06, + "loss": 0.7374, + "step": 2606 + }, + { + "epoch": 0.18101652548257186, + "grad_norm": 3.4151302464723146, + "learning_rate": 9.414186464812208e-06, + "loss": 0.5039, + "step": 2607 + }, + { + "epoch": 0.181085960283294, + "grad_norm": 3.867066680550426, + "learning_rate": 9.413658205166467e-06, + "loss": 0.4512, + "step": 2608 + }, + { + "epoch": 0.1811553950840161, + "grad_norm": 3.7529630769956643, + "learning_rate": 9.41312972228289e-06, + "loss": 0.482, + "step": 2609 + }, + { + "epoch": 0.18122482988473823, + "grad_norm": 3.752713128661412, + "learning_rate": 9.412601016188204e-06, + "loss": 0.525, + "step": 2610 + }, + { + "epoch": 0.18129426468546037, + "grad_norm": 4.270473284220056, + "learning_rate": 9.412072086909152e-06, + "loss": 0.3234, + "step": 2611 + }, + { + "epoch": 0.18136369948618247, + "grad_norm": 4.835264370030915, + "learning_rate": 9.411542934472487e-06, + "loss": 0.7415, + "step": 2612 + }, + { + "epoch": 0.1814331342869046, + "grad_norm": 3.986420311435972, + "learning_rate": 9.41101355890497e-06, + "loss": 0.3977, + "step": 2613 + }, + { + "epoch": 0.18150256908762671, + "grad_norm": 6.500336539729755, + "learning_rate": 9.41048396023338e-06, + "loss": 0.5897, + "step": 2614 + }, + { + "epoch": 0.18157200388834885, + "grad_norm": 4.478610637460873, + "learning_rate": 9.409954138484502e-06, + "loss": 0.59, + "step": 2615 + }, + { + "epoch": 0.18164143868907096, + "grad_norm": 4.059114296021768, + "learning_rate": 9.409424093685136e-06, + "loss": 0.6371, + "step": 2616 + }, + { + "epoch": 0.1817108734897931, + "grad_norm": 4.712199768856567, + "learning_rate": 9.408893825862087e-06, + "loss": 0.5524, + "step": 2617 + }, + { + "epoch": 0.1817803082905152, + "grad_norm": 4.829087755183378, + "learning_rate": 9.408363335042176e-06, + "loss": 0.5665, + "step": 2618 + }, + { + "epoch": 0.18184974309123733, + "grad_norm": 4.23453680908939, + "learning_rate": 9.407832621252238e-06, + "loss": 0.6399, + "step": 2619 + }, + { + "epoch": 0.18191917789195944, + "grad_norm": 4.948459135831155, + "learning_rate": 9.407301684519114e-06, + "loss": 0.4331, + "step": 2620 + }, + { + "epoch": 0.18198861269268157, + "grad_norm": 3.4723506064251817, + "learning_rate": 9.406770524869657e-06, + "loss": 0.4795, + "step": 2621 + }, + { + "epoch": 0.1820580474934037, + "grad_norm": 5.035648561846344, + "learning_rate": 9.406239142330734e-06, + "loss": 0.4367, + "step": 2622 + }, + { + "epoch": 0.18212748229412581, + "grad_norm": 3.863234836385909, + "learning_rate": 9.405707536929221e-06, + "loss": 0.4996, + "step": 2623 + }, + { + "epoch": 0.18219691709484795, + "grad_norm": 4.065753722644651, + "learning_rate": 9.405175708692005e-06, + "loss": 0.493, + "step": 2624 + }, + { + "epoch": 0.18226635189557006, + "grad_norm": 3.8633272158548846, + "learning_rate": 9.404643657645988e-06, + "loss": 0.4596, + "step": 2625 + }, + { + "epoch": 0.1823357866962922, + "grad_norm": 4.2708432403884675, + "learning_rate": 9.40411138381808e-06, + "loss": 0.3629, + "step": 2626 + }, + { + "epoch": 0.1824052214970143, + "grad_norm": 4.455077326166294, + "learning_rate": 9.403578887235198e-06, + "loss": 0.6898, + "step": 2627 + }, + { + "epoch": 0.18247465629773643, + "grad_norm": 2.871854933908505, + "learning_rate": 9.403046167924282e-06, + "loss": 0.4401, + "step": 2628 + }, + { + "epoch": 0.18254409109845854, + "grad_norm": 3.6874690350425587, + "learning_rate": 9.402513225912273e-06, + "loss": 0.5697, + "step": 2629 + }, + { + "epoch": 0.18261352589918067, + "grad_norm": 4.189768546522508, + "learning_rate": 9.401980061226126e-06, + "loss": 0.4704, + "step": 2630 + }, + { + "epoch": 0.18268296069990278, + "grad_norm": 4.209606084958585, + "learning_rate": 9.40144667389281e-06, + "loss": 0.4992, + "step": 2631 + }, + { + "epoch": 0.18275239550062491, + "grad_norm": 4.368302939734846, + "learning_rate": 9.400913063939299e-06, + "loss": 0.6804, + "step": 2632 + }, + { + "epoch": 0.18282183030134702, + "grad_norm": 3.755068095445998, + "learning_rate": 9.400379231392587e-06, + "loss": 0.4957, + "step": 2633 + }, + { + "epoch": 0.18289126510206916, + "grad_norm": 3.6279847827056257, + "learning_rate": 9.399845176279673e-06, + "loss": 0.5565, + "step": 2634 + }, + { + "epoch": 0.1829606999027913, + "grad_norm": 4.830164038300566, + "learning_rate": 9.39931089862757e-06, + "loss": 0.6452, + "step": 2635 + }, + { + "epoch": 0.1830301347035134, + "grad_norm": 3.842295137568818, + "learning_rate": 9.398776398463296e-06, + "loss": 0.3495, + "step": 2636 + }, + { + "epoch": 0.18309956950423553, + "grad_norm": 3.8495804633275235, + "learning_rate": 9.398241675813893e-06, + "loss": 0.6371, + "step": 2637 + }, + { + "epoch": 0.18316900430495764, + "grad_norm": 4.103967734243168, + "learning_rate": 9.397706730706402e-06, + "loss": 0.5425, + "step": 2638 + }, + { + "epoch": 0.18323843910567977, + "grad_norm": 4.6841532757175415, + "learning_rate": 9.39717156316788e-06, + "loss": 0.5694, + "step": 2639 + }, + { + "epoch": 0.18330787390640188, + "grad_norm": 5.649685095767387, + "learning_rate": 9.396636173225398e-06, + "loss": 0.8654, + "step": 2640 + }, + { + "epoch": 0.18337730870712401, + "grad_norm": 4.061522963793848, + "learning_rate": 9.39610056090603e-06, + "loss": 0.5021, + "step": 2641 + }, + { + "epoch": 0.18344674350784612, + "grad_norm": 3.464055033847907, + "learning_rate": 9.395564726236872e-06, + "loss": 0.3587, + "step": 2642 + }, + { + "epoch": 0.18351617830856826, + "grad_norm": 5.045865020505513, + "learning_rate": 9.395028669245025e-06, + "loss": 0.9348, + "step": 2643 + }, + { + "epoch": 0.18358561310929036, + "grad_norm": 4.455677300812232, + "learning_rate": 9.394492389957602e-06, + "loss": 0.7207, + "step": 2644 + }, + { + "epoch": 0.1836550479100125, + "grad_norm": 3.4874376683372055, + "learning_rate": 9.393955888401725e-06, + "loss": 0.3084, + "step": 2645 + }, + { + "epoch": 0.18372448271073463, + "grad_norm": 3.5953519400125398, + "learning_rate": 9.39341916460453e-06, + "loss": 0.4434, + "step": 2646 + }, + { + "epoch": 0.18379391751145674, + "grad_norm": 4.362552652175736, + "learning_rate": 9.392882218593166e-06, + "loss": 0.7288, + "step": 2647 + }, + { + "epoch": 0.18386335231217887, + "grad_norm": 4.2848285050376695, + "learning_rate": 9.392345050394792e-06, + "loss": 0.5396, + "step": 2648 + }, + { + "epoch": 0.18393278711290098, + "grad_norm": 3.746814563243955, + "learning_rate": 9.391807660036575e-06, + "loss": 0.3151, + "step": 2649 + }, + { + "epoch": 0.18400222191362312, + "grad_norm": 3.4428445199161866, + "learning_rate": 9.391270047545695e-06, + "loss": 0.3143, + "step": 2650 + }, + { + "epoch": 0.18407165671434522, + "grad_norm": 3.615423917648399, + "learning_rate": 9.390732212949345e-06, + "loss": 0.3328, + "step": 2651 + }, + { + "epoch": 0.18414109151506736, + "grad_norm": 3.8539542676126404, + "learning_rate": 9.39019415627473e-06, + "loss": 0.4763, + "step": 2652 + }, + { + "epoch": 0.18421052631578946, + "grad_norm": 3.589699469516653, + "learning_rate": 9.389655877549062e-06, + "loss": 0.5355, + "step": 2653 + }, + { + "epoch": 0.1842799611165116, + "grad_norm": 4.7557986634837395, + "learning_rate": 9.389117376799565e-06, + "loss": 0.6126, + "step": 2654 + }, + { + "epoch": 0.1843493959172337, + "grad_norm": 4.252885564188266, + "learning_rate": 9.38857865405348e-06, + "loss": 0.6426, + "step": 2655 + }, + { + "epoch": 0.18441883071795584, + "grad_norm": 4.850771133809857, + "learning_rate": 9.38803970933805e-06, + "loss": 0.6627, + "step": 2656 + }, + { + "epoch": 0.18448826551867797, + "grad_norm": 4.745332883283432, + "learning_rate": 9.38750054268054e-06, + "loss": 0.7279, + "step": 2657 + }, + { + "epoch": 0.18455770031940008, + "grad_norm": 4.998169672481638, + "learning_rate": 9.386961154108216e-06, + "loss": 0.8132, + "step": 2658 + }, + { + "epoch": 0.18462713512012222, + "grad_norm": 4.216403963800552, + "learning_rate": 9.386421543648361e-06, + "loss": 0.429, + "step": 2659 + }, + { + "epoch": 0.18469656992084432, + "grad_norm": 3.285685174883587, + "learning_rate": 9.385881711328268e-06, + "loss": 0.2602, + "step": 2660 + }, + { + "epoch": 0.18476600472156646, + "grad_norm": 4.2732567440425875, + "learning_rate": 9.38534165717524e-06, + "loss": 0.4432, + "step": 2661 + }, + { + "epoch": 0.18483543952228856, + "grad_norm": 4.515064034558809, + "learning_rate": 9.384801381216595e-06, + "loss": 0.3799, + "step": 2662 + }, + { + "epoch": 0.1849048743230107, + "grad_norm": 3.999345716429541, + "learning_rate": 9.384260883479657e-06, + "loss": 0.5192, + "step": 2663 + }, + { + "epoch": 0.1849743091237328, + "grad_norm": 3.093273078533135, + "learning_rate": 9.383720163991763e-06, + "loss": 0.4324, + "step": 2664 + }, + { + "epoch": 0.18504374392445494, + "grad_norm": 3.253106532777847, + "learning_rate": 9.383179222780266e-06, + "loss": 0.3638, + "step": 2665 + }, + { + "epoch": 0.18511317872517705, + "grad_norm": 4.4860566801124175, + "learning_rate": 9.382638059872523e-06, + "loss": 0.3877, + "step": 2666 + }, + { + "epoch": 0.18518261352589918, + "grad_norm": 4.272186455965108, + "learning_rate": 9.382096675295906e-06, + "loss": 0.5467, + "step": 2667 + }, + { + "epoch": 0.18525204832662132, + "grad_norm": 4.63151004344994, + "learning_rate": 9.381555069077799e-06, + "loss": 0.5893, + "step": 2668 + }, + { + "epoch": 0.18532148312734342, + "grad_norm": 3.971234968677336, + "learning_rate": 9.381013241245593e-06, + "loss": 0.5765, + "step": 2669 + }, + { + "epoch": 0.18539091792806556, + "grad_norm": 4.62659117515569, + "learning_rate": 9.380471191826696e-06, + "loss": 0.8164, + "step": 2670 + }, + { + "epoch": 0.18546035272878766, + "grad_norm": 4.661653313611526, + "learning_rate": 9.379928920848523e-06, + "loss": 0.452, + "step": 2671 + }, + { + "epoch": 0.1855297875295098, + "grad_norm": 3.9862730516995115, + "learning_rate": 9.379386428338499e-06, + "loss": 0.3762, + "step": 2672 + }, + { + "epoch": 0.1855992223302319, + "grad_norm": 3.3959861824044153, + "learning_rate": 9.378843714324069e-06, + "loss": 0.3968, + "step": 2673 + }, + { + "epoch": 0.18566865713095404, + "grad_norm": 4.130213865502688, + "learning_rate": 9.378300778832676e-06, + "loss": 0.5228, + "step": 2674 + }, + { + "epoch": 0.18573809193167615, + "grad_norm": 4.000332228479085, + "learning_rate": 9.377757621891786e-06, + "loss": 0.6585, + "step": 2675 + }, + { + "epoch": 0.18580752673239828, + "grad_norm": 4.360601849195899, + "learning_rate": 9.377214243528868e-06, + "loss": 0.7967, + "step": 2676 + }, + { + "epoch": 0.1858769615331204, + "grad_norm": 3.7506733547067315, + "learning_rate": 9.376670643771405e-06, + "loss": 0.5865, + "step": 2677 + }, + { + "epoch": 0.18594639633384252, + "grad_norm": 4.293988253689358, + "learning_rate": 9.376126822646895e-06, + "loss": 0.6054, + "step": 2678 + }, + { + "epoch": 0.18601583113456466, + "grad_norm": 4.565326671518356, + "learning_rate": 9.375582780182843e-06, + "loss": 0.581, + "step": 2679 + }, + { + "epoch": 0.18608526593528676, + "grad_norm": 3.667405717599369, + "learning_rate": 9.375038516406765e-06, + "loss": 0.4897, + "step": 2680 + }, + { + "epoch": 0.1861547007360089, + "grad_norm": 4.674126851451416, + "learning_rate": 9.374494031346189e-06, + "loss": 0.5175, + "step": 2681 + }, + { + "epoch": 0.186224135536731, + "grad_norm": 4.693795148754813, + "learning_rate": 9.373949325028655e-06, + "loss": 0.6797, + "step": 2682 + }, + { + "epoch": 0.18629357033745314, + "grad_norm": 4.517987558032381, + "learning_rate": 9.373404397481715e-06, + "loss": 0.6698, + "step": 2683 + }, + { + "epoch": 0.18636300513817525, + "grad_norm": 3.306546482658048, + "learning_rate": 9.372859248732928e-06, + "loss": 0.3668, + "step": 2684 + }, + { + "epoch": 0.18643243993889738, + "grad_norm": 3.4324960784679415, + "learning_rate": 9.372313878809868e-06, + "loss": 0.4192, + "step": 2685 + }, + { + "epoch": 0.1865018747396195, + "grad_norm": 3.893627845899475, + "learning_rate": 9.371768287740121e-06, + "loss": 0.4881, + "step": 2686 + }, + { + "epoch": 0.18657130954034162, + "grad_norm": 4.637353267076004, + "learning_rate": 9.371222475551281e-06, + "loss": 0.6366, + "step": 2687 + }, + { + "epoch": 0.18664074434106373, + "grad_norm": 5.496973660559162, + "learning_rate": 9.370676442270953e-06, + "loss": 0.4352, + "step": 2688 + }, + { + "epoch": 0.18671017914178586, + "grad_norm": 4.704562350998911, + "learning_rate": 9.370130187926759e-06, + "loss": 0.5153, + "step": 2689 + }, + { + "epoch": 0.18677961394250797, + "grad_norm": 4.099705667440132, + "learning_rate": 9.369583712546322e-06, + "loss": 0.5575, + "step": 2690 + }, + { + "epoch": 0.1868490487432301, + "grad_norm": 3.9142226362209414, + "learning_rate": 9.369037016157286e-06, + "loss": 0.6525, + "step": 2691 + }, + { + "epoch": 0.18691848354395224, + "grad_norm": 5.092744404946171, + "learning_rate": 9.368490098787302e-06, + "loss": 0.7738, + "step": 2692 + }, + { + "epoch": 0.18698791834467435, + "grad_norm": 4.366350397356477, + "learning_rate": 9.367942960464034e-06, + "loss": 0.6119, + "step": 2693 + }, + { + "epoch": 0.18705735314539648, + "grad_norm": 3.7796989173002964, + "learning_rate": 9.36739560121515e-06, + "loss": 0.3767, + "step": 2694 + }, + { + "epoch": 0.1871267879461186, + "grad_norm": 4.465157904727939, + "learning_rate": 9.366848021068341e-06, + "loss": 0.4584, + "step": 2695 + }, + { + "epoch": 0.18719622274684072, + "grad_norm": 3.8234237737463452, + "learning_rate": 9.366300220051301e-06, + "loss": 0.5005, + "step": 2696 + }, + { + "epoch": 0.18726565754756283, + "grad_norm": 3.750841668592917, + "learning_rate": 9.365752198191735e-06, + "loss": 0.4426, + "step": 2697 + }, + { + "epoch": 0.18733509234828497, + "grad_norm": 3.553119612657192, + "learning_rate": 9.365203955517366e-06, + "loss": 0.4838, + "step": 2698 + }, + { + "epoch": 0.18740452714900707, + "grad_norm": 4.569877706251379, + "learning_rate": 9.364655492055917e-06, + "loss": 0.5901, + "step": 2699 + }, + { + "epoch": 0.1874739619497292, + "grad_norm": 3.264565579017233, + "learning_rate": 9.364106807835134e-06, + "loss": 0.3152, + "step": 2700 + }, + { + "epoch": 0.1875433967504513, + "grad_norm": 3.911946645297428, + "learning_rate": 9.363557902882768e-06, + "loss": 0.4852, + "step": 2701 + }, + { + "epoch": 0.18761283155117345, + "grad_norm": 4.057238251950517, + "learning_rate": 9.363008777226579e-06, + "loss": 0.598, + "step": 2702 + }, + { + "epoch": 0.18768226635189558, + "grad_norm": 4.942989454162959, + "learning_rate": 9.362459430894342e-06, + "loss": 0.9231, + "step": 2703 + }, + { + "epoch": 0.1877517011526177, + "grad_norm": 3.4189552785359854, + "learning_rate": 9.361909863913845e-06, + "loss": 0.5374, + "step": 2704 + }, + { + "epoch": 0.18782113595333982, + "grad_norm": 3.300158387504638, + "learning_rate": 9.361360076312885e-06, + "loss": 0.379, + "step": 2705 + }, + { + "epoch": 0.18789057075406193, + "grad_norm": 4.269859189069414, + "learning_rate": 9.360810068119263e-06, + "loss": 0.6118, + "step": 2706 + }, + { + "epoch": 0.18796000555478407, + "grad_norm": 4.414861276635057, + "learning_rate": 9.360259839360806e-06, + "loss": 0.5451, + "step": 2707 + }, + { + "epoch": 0.18802944035550617, + "grad_norm": 4.971972510749617, + "learning_rate": 9.35970939006534e-06, + "loss": 0.5853, + "step": 2708 + }, + { + "epoch": 0.1880988751562283, + "grad_norm": 3.2139045384928284, + "learning_rate": 9.359158720260704e-06, + "loss": 0.2629, + "step": 2709 + }, + { + "epoch": 0.1881683099569504, + "grad_norm": 4.2835668957995585, + "learning_rate": 9.358607829974755e-06, + "loss": 0.598, + "step": 2710 + }, + { + "epoch": 0.18823774475767255, + "grad_norm": 4.576594729965632, + "learning_rate": 9.358056719235353e-06, + "loss": 0.6368, + "step": 2711 + }, + { + "epoch": 0.18830717955839466, + "grad_norm": 3.649299382433351, + "learning_rate": 9.357505388070374e-06, + "loss": 0.6356, + "step": 2712 + }, + { + "epoch": 0.1883766143591168, + "grad_norm": 4.190225111288776, + "learning_rate": 9.356953836507702e-06, + "loss": 0.5943, + "step": 2713 + }, + { + "epoch": 0.18844604915983892, + "grad_norm": 3.799433608007023, + "learning_rate": 9.356402064575237e-06, + "loss": 0.4415, + "step": 2714 + }, + { + "epoch": 0.18851548396056103, + "grad_norm": 3.2704213302503367, + "learning_rate": 9.355850072300885e-06, + "loss": 0.4086, + "step": 2715 + }, + { + "epoch": 0.18858491876128317, + "grad_norm": 4.809849727129228, + "learning_rate": 9.355297859712565e-06, + "loss": 0.517, + "step": 2716 + }, + { + "epoch": 0.18865435356200527, + "grad_norm": 3.51281557191898, + "learning_rate": 9.354745426838208e-06, + "loss": 0.3684, + "step": 2717 + }, + { + "epoch": 0.1887237883627274, + "grad_norm": 3.9953390577791876, + "learning_rate": 9.354192773705755e-06, + "loss": 0.4533, + "step": 2718 + }, + { + "epoch": 0.18879322316344951, + "grad_norm": 3.4078418732593767, + "learning_rate": 9.353639900343156e-06, + "loss": 0.2911, + "step": 2719 + }, + { + "epoch": 0.18886265796417165, + "grad_norm": 3.6734345955257854, + "learning_rate": 9.35308680677838e-06, + "loss": 0.4506, + "step": 2720 + }, + { + "epoch": 0.18893209276489376, + "grad_norm": 3.4191989686856177, + "learning_rate": 9.352533493039399e-06, + "loss": 0.3259, + "step": 2721 + }, + { + "epoch": 0.1890015275656159, + "grad_norm": 3.6567529470743425, + "learning_rate": 9.351979959154198e-06, + "loss": 0.491, + "step": 2722 + }, + { + "epoch": 0.189070962366338, + "grad_norm": 4.624552391559659, + "learning_rate": 9.351426205150778e-06, + "loss": 0.7231, + "step": 2723 + }, + { + "epoch": 0.18914039716706013, + "grad_norm": 5.913573668756369, + "learning_rate": 9.350872231057143e-06, + "loss": 0.6042, + "step": 2724 + }, + { + "epoch": 0.18920983196778227, + "grad_norm": 2.6099992463084014, + "learning_rate": 9.35031803690131e-06, + "loss": 0.3139, + "step": 2725 + }, + { + "epoch": 0.18927926676850437, + "grad_norm": 4.50995402186975, + "learning_rate": 9.349763622711318e-06, + "loss": 0.6777, + "step": 2726 + }, + { + "epoch": 0.1893487015692265, + "grad_norm": 3.2216086594793483, + "learning_rate": 9.349208988515204e-06, + "loss": 0.3374, + "step": 2727 + }, + { + "epoch": 0.18941813636994861, + "grad_norm": 4.107081382539988, + "learning_rate": 9.34865413434102e-06, + "loss": 0.6015, + "step": 2728 + }, + { + "epoch": 0.18948757117067075, + "grad_norm": 3.61101862039446, + "learning_rate": 9.34809906021683e-06, + "loss": 0.337, + "step": 2729 + }, + { + "epoch": 0.18955700597139286, + "grad_norm": 4.354232240522092, + "learning_rate": 9.34754376617071e-06, + "loss": 0.3765, + "step": 2730 + }, + { + "epoch": 0.189626440772115, + "grad_norm": 4.351544470282709, + "learning_rate": 9.346988252230746e-06, + "loss": 0.4047, + "step": 2731 + }, + { + "epoch": 0.1896958755728371, + "grad_norm": 3.7045585897261866, + "learning_rate": 9.346432518425035e-06, + "loss": 0.4443, + "step": 2732 + }, + { + "epoch": 0.18976531037355923, + "grad_norm": 3.5778556173733738, + "learning_rate": 9.345876564781687e-06, + "loss": 0.4219, + "step": 2733 + }, + { + "epoch": 0.18983474517428134, + "grad_norm": 3.6630542223987717, + "learning_rate": 9.345320391328818e-06, + "loss": 0.3879, + "step": 2734 + }, + { + "epoch": 0.18990417997500347, + "grad_norm": 4.429881979430322, + "learning_rate": 9.344763998094561e-06, + "loss": 0.4893, + "step": 2735 + }, + { + "epoch": 0.18997361477572558, + "grad_norm": 4.055937117026432, + "learning_rate": 9.344207385107058e-06, + "loss": 0.5636, + "step": 2736 + }, + { + "epoch": 0.19004304957644771, + "grad_norm": 3.7728912695566645, + "learning_rate": 9.343650552394461e-06, + "loss": 0.3829, + "step": 2737 + }, + { + "epoch": 0.19011248437716985, + "grad_norm": 3.253544783639077, + "learning_rate": 9.343093499984934e-06, + "loss": 0.3914, + "step": 2738 + }, + { + "epoch": 0.19018191917789196, + "grad_norm": 2.821709914668401, + "learning_rate": 9.342536227906653e-06, + "loss": 0.1828, + "step": 2739 + }, + { + "epoch": 0.1902513539786141, + "grad_norm": 3.812753702751062, + "learning_rate": 9.341978736187805e-06, + "loss": 0.4588, + "step": 2740 + }, + { + "epoch": 0.1903207887793362, + "grad_norm": 3.9297013449581657, + "learning_rate": 9.341421024856582e-06, + "loss": 0.5823, + "step": 2741 + }, + { + "epoch": 0.19039022358005833, + "grad_norm": 3.4883306366601654, + "learning_rate": 9.3408630939412e-06, + "loss": 0.5591, + "step": 2742 + }, + { + "epoch": 0.19045965838078044, + "grad_norm": 3.511011886453169, + "learning_rate": 9.340304943469871e-06, + "loss": 0.3234, + "step": 2743 + }, + { + "epoch": 0.19052909318150257, + "grad_norm": 4.479663853988132, + "learning_rate": 9.339746573470832e-06, + "loss": 0.7253, + "step": 2744 + }, + { + "epoch": 0.19059852798222468, + "grad_norm": 4.487626675708971, + "learning_rate": 9.339187983972321e-06, + "loss": 0.5299, + "step": 2745 + }, + { + "epoch": 0.19066796278294681, + "grad_norm": 3.7987376491743134, + "learning_rate": 9.338629175002592e-06, + "loss": 0.3924, + "step": 2746 + }, + { + "epoch": 0.19073739758366892, + "grad_norm": 5.6539131576751345, + "learning_rate": 9.33807014658991e-06, + "loss": 0.7562, + "step": 2747 + }, + { + "epoch": 0.19080683238439106, + "grad_norm": 3.290502924696365, + "learning_rate": 9.337510898762548e-06, + "loss": 0.2409, + "step": 2748 + }, + { + "epoch": 0.1908762671851132, + "grad_norm": 3.562466193535862, + "learning_rate": 9.336951431548794e-06, + "loss": 0.3545, + "step": 2749 + }, + { + "epoch": 0.1909457019858353, + "grad_norm": 3.460301785349842, + "learning_rate": 9.336391744976942e-06, + "loss": 0.3328, + "step": 2750 + }, + { + "epoch": 0.19101513678655743, + "grad_norm": 5.162659018358059, + "learning_rate": 9.335831839075303e-06, + "loss": 0.7052, + "step": 2751 + }, + { + "epoch": 0.19108457158727954, + "grad_norm": 4.929602906230562, + "learning_rate": 9.335271713872198e-06, + "loss": 0.669, + "step": 2752 + }, + { + "epoch": 0.19115400638800167, + "grad_norm": 4.430727543953501, + "learning_rate": 9.334711369395953e-06, + "loss": 0.6739, + "step": 2753 + }, + { + "epoch": 0.19122344118872378, + "grad_norm": 3.061095759664949, + "learning_rate": 9.334150805674913e-06, + "loss": 0.3962, + "step": 2754 + }, + { + "epoch": 0.19129287598944592, + "grad_norm": 5.1623950974380675, + "learning_rate": 9.333590022737428e-06, + "loss": 0.5439, + "step": 2755 + }, + { + "epoch": 0.19136231079016802, + "grad_norm": 4.463649858992726, + "learning_rate": 9.333029020611865e-06, + "loss": 0.4312, + "step": 2756 + }, + { + "epoch": 0.19143174559089016, + "grad_norm": 4.8136711203666875, + "learning_rate": 9.332467799326599e-06, + "loss": 0.8508, + "step": 2757 + }, + { + "epoch": 0.19150118039161226, + "grad_norm": 4.544231933795228, + "learning_rate": 9.331906358910012e-06, + "loss": 0.7869, + "step": 2758 + }, + { + "epoch": 0.1915706151923344, + "grad_norm": 4.003804009963702, + "learning_rate": 9.331344699390504e-06, + "loss": 0.3863, + "step": 2759 + }, + { + "epoch": 0.19164004999305653, + "grad_norm": 3.6890801937053013, + "learning_rate": 9.330782820796482e-06, + "loss": 0.4428, + "step": 2760 + }, + { + "epoch": 0.19170948479377864, + "grad_norm": 3.791804860372565, + "learning_rate": 9.330220723156365e-06, + "loss": 0.3654, + "step": 2761 + }, + { + "epoch": 0.19177891959450077, + "grad_norm": 4.019905593335045, + "learning_rate": 9.329658406498586e-06, + "loss": 0.3392, + "step": 2762 + }, + { + "epoch": 0.19184835439522288, + "grad_norm": 3.4847390614949942, + "learning_rate": 9.329095870851581e-06, + "loss": 0.454, + "step": 2763 + }, + { + "epoch": 0.19191778919594502, + "grad_norm": 3.8882326450944285, + "learning_rate": 9.328533116243807e-06, + "loss": 0.5431, + "step": 2764 + }, + { + "epoch": 0.19198722399666712, + "grad_norm": 3.578616256686992, + "learning_rate": 9.327970142703726e-06, + "loss": 0.3556, + "step": 2765 + }, + { + "epoch": 0.19205665879738926, + "grad_norm": 4.471841768198546, + "learning_rate": 9.327406950259814e-06, + "loss": 0.5637, + "step": 2766 + }, + { + "epoch": 0.19212609359811136, + "grad_norm": 4.314400921717331, + "learning_rate": 9.326843538940553e-06, + "loss": 0.5535, + "step": 2767 + }, + { + "epoch": 0.1921955283988335, + "grad_norm": 4.252126876824009, + "learning_rate": 9.326279908774444e-06, + "loss": 0.564, + "step": 2768 + }, + { + "epoch": 0.1922649631995556, + "grad_norm": 3.9393460822026127, + "learning_rate": 9.32571605978999e-06, + "loss": 0.7253, + "step": 2769 + }, + { + "epoch": 0.19233439800027774, + "grad_norm": 3.4707706948114545, + "learning_rate": 9.325151992015715e-06, + "loss": 0.2873, + "step": 2770 + }, + { + "epoch": 0.19240383280099987, + "grad_norm": 4.311575867253222, + "learning_rate": 9.324587705480145e-06, + "loss": 0.5869, + "step": 2771 + }, + { + "epoch": 0.19247326760172198, + "grad_norm": 4.989532099298601, + "learning_rate": 9.324023200211825e-06, + "loss": 0.4649, + "step": 2772 + }, + { + "epoch": 0.19254270240244412, + "grad_norm": 3.6178106807347805, + "learning_rate": 9.323458476239302e-06, + "loss": 0.3059, + "step": 2773 + }, + { + "epoch": 0.19261213720316622, + "grad_norm": 4.909673010947121, + "learning_rate": 9.322893533591144e-06, + "loss": 0.5213, + "step": 2774 + }, + { + "epoch": 0.19268157200388836, + "grad_norm": 3.4305574362653704, + "learning_rate": 9.32232837229592e-06, + "loss": 0.3366, + "step": 2775 + }, + { + "epoch": 0.19275100680461046, + "grad_norm": 4.390128546793546, + "learning_rate": 9.32176299238222e-06, + "loss": 0.6442, + "step": 2776 + }, + { + "epoch": 0.1928204416053326, + "grad_norm": 3.0614079651599946, + "learning_rate": 9.321197393878637e-06, + "loss": 0.1788, + "step": 2777 + }, + { + "epoch": 0.1928898764060547, + "grad_norm": 4.285743652857991, + "learning_rate": 9.320631576813779e-06, + "loss": 0.485, + "step": 2778 + }, + { + "epoch": 0.19295931120677684, + "grad_norm": 3.8545005090056277, + "learning_rate": 9.320065541216265e-06, + "loss": 0.3717, + "step": 2779 + }, + { + "epoch": 0.19302874600749895, + "grad_norm": 5.4509651800760635, + "learning_rate": 9.319499287114726e-06, + "loss": 0.5684, + "step": 2780 + }, + { + "epoch": 0.19309818080822108, + "grad_norm": 3.2767933437028454, + "learning_rate": 9.318932814537801e-06, + "loss": 0.4264, + "step": 2781 + }, + { + "epoch": 0.19316761560894322, + "grad_norm": 3.173529415205206, + "learning_rate": 9.31836612351414e-06, + "loss": 0.2306, + "step": 2782 + }, + { + "epoch": 0.19323705040966532, + "grad_norm": 4.374773340902683, + "learning_rate": 9.317799214072408e-06, + "loss": 0.4269, + "step": 2783 + }, + { + "epoch": 0.19330648521038746, + "grad_norm": 3.3408269185084256, + "learning_rate": 9.317232086241277e-06, + "loss": 0.2131, + "step": 2784 + }, + { + "epoch": 0.19337592001110956, + "grad_norm": 5.302543156896799, + "learning_rate": 9.316664740049433e-06, + "loss": 0.5825, + "step": 2785 + }, + { + "epoch": 0.1934453548118317, + "grad_norm": 3.168899584533589, + "learning_rate": 9.316097175525571e-06, + "loss": 0.4335, + "step": 2786 + }, + { + "epoch": 0.1935147896125538, + "grad_norm": 3.9844736151978393, + "learning_rate": 9.315529392698396e-06, + "loss": 0.6811, + "step": 2787 + }, + { + "epoch": 0.19358422441327594, + "grad_norm": 4.663782641145318, + "learning_rate": 9.314961391596631e-06, + "loss": 0.5962, + "step": 2788 + }, + { + "epoch": 0.19365365921399805, + "grad_norm": 5.314800989511133, + "learning_rate": 9.314393172249e-06, + "loss": 0.7669, + "step": 2789 + }, + { + "epoch": 0.19372309401472018, + "grad_norm": 5.251449302011684, + "learning_rate": 9.313824734684243e-06, + "loss": 0.7401, + "step": 2790 + }, + { + "epoch": 0.1937925288154423, + "grad_norm": 3.515634392366791, + "learning_rate": 9.313256078931115e-06, + "loss": 0.3952, + "step": 2791 + }, + { + "epoch": 0.19386196361616442, + "grad_norm": 4.161358828944821, + "learning_rate": 9.312687205018373e-06, + "loss": 0.4989, + "step": 2792 + }, + { + "epoch": 0.19393139841688653, + "grad_norm": 4.544955284737086, + "learning_rate": 9.312118112974792e-06, + "loss": 0.4924, + "step": 2793 + }, + { + "epoch": 0.19400083321760866, + "grad_norm": 4.4341974414983, + "learning_rate": 9.31154880282916e-06, + "loss": 0.6021, + "step": 2794 + }, + { + "epoch": 0.1940702680183308, + "grad_norm": 3.671305050461036, + "learning_rate": 9.310979274610266e-06, + "loss": 0.3005, + "step": 2795 + }, + { + "epoch": 0.1941397028190529, + "grad_norm": 4.483897711088626, + "learning_rate": 9.310409528346917e-06, + "loss": 0.6386, + "step": 2796 + }, + { + "epoch": 0.19420913761977504, + "grad_norm": 4.731574327185386, + "learning_rate": 9.309839564067932e-06, + "loss": 0.6518, + "step": 2797 + }, + { + "epoch": 0.19427857242049715, + "grad_norm": 4.300848137354891, + "learning_rate": 9.309269381802142e-06, + "loss": 0.5616, + "step": 2798 + }, + { + "epoch": 0.19434800722121928, + "grad_norm": 2.9043070145178507, + "learning_rate": 9.30869898157838e-06, + "loss": 0.2264, + "step": 2799 + }, + { + "epoch": 0.1944174420219414, + "grad_norm": 4.5630022657246485, + "learning_rate": 9.3081283634255e-06, + "loss": 0.5008, + "step": 2800 + }, + { + "epoch": 0.19448687682266352, + "grad_norm": 3.8082555706484094, + "learning_rate": 9.307557527372362e-06, + "loss": 0.4062, + "step": 2801 + }, + { + "epoch": 0.19455631162338563, + "grad_norm": 4.221238460729788, + "learning_rate": 9.306986473447839e-06, + "loss": 0.3888, + "step": 2802 + }, + { + "epoch": 0.19462574642410777, + "grad_norm": 2.3523711258331708, + "learning_rate": 9.306415201680814e-06, + "loss": 0.2021, + "step": 2803 + }, + { + "epoch": 0.19469518122482987, + "grad_norm": 3.6494852125817734, + "learning_rate": 9.30584371210018e-06, + "loss": 0.5035, + "step": 2804 + }, + { + "epoch": 0.194764616025552, + "grad_norm": 5.314266633581966, + "learning_rate": 9.305272004734844e-06, + "loss": 0.6419, + "step": 2805 + }, + { + "epoch": 0.19483405082627414, + "grad_norm": 2.90490224754564, + "learning_rate": 9.304700079613724e-06, + "loss": 0.317, + "step": 2806 + }, + { + "epoch": 0.19490348562699625, + "grad_norm": 3.129894216355529, + "learning_rate": 9.304127936765742e-06, + "loss": 0.2155, + "step": 2807 + }, + { + "epoch": 0.19497292042771838, + "grad_norm": 4.419536740165044, + "learning_rate": 9.30355557621984e-06, + "loss": 0.3615, + "step": 2808 + }, + { + "epoch": 0.1950423552284405, + "grad_norm": 5.153098610474333, + "learning_rate": 9.302982998004969e-06, + "loss": 0.7054, + "step": 2809 + }, + { + "epoch": 0.19511179002916262, + "grad_norm": 3.6023131061547438, + "learning_rate": 9.302410202150085e-06, + "loss": 0.2973, + "step": 2810 + }, + { + "epoch": 0.19518122482988473, + "grad_norm": 4.036006808990755, + "learning_rate": 9.30183718868416e-06, + "loss": 0.4837, + "step": 2811 + }, + { + "epoch": 0.19525065963060687, + "grad_norm": 4.018437137749635, + "learning_rate": 9.30126395763618e-06, + "loss": 0.4471, + "step": 2812 + }, + { + "epoch": 0.19532009443132897, + "grad_norm": 3.773208313710731, + "learning_rate": 9.300690509035137e-06, + "loss": 0.4447, + "step": 2813 + }, + { + "epoch": 0.1953895292320511, + "grad_norm": 4.380705912628938, + "learning_rate": 9.300116842910033e-06, + "loss": 0.4603, + "step": 2814 + }, + { + "epoch": 0.1954589640327732, + "grad_norm": 3.7126717256336, + "learning_rate": 9.299542959289886e-06, + "loss": 0.5517, + "step": 2815 + }, + { + "epoch": 0.19552839883349535, + "grad_norm": 4.478601610383789, + "learning_rate": 9.29896885820372e-06, + "loss": 0.4721, + "step": 2816 + }, + { + "epoch": 0.19559783363421748, + "grad_norm": 4.070336578517675, + "learning_rate": 9.298394539680575e-06, + "loss": 0.4748, + "step": 2817 + }, + { + "epoch": 0.1956672684349396, + "grad_norm": 3.8628663005563113, + "learning_rate": 9.297820003749499e-06, + "loss": 0.4625, + "step": 2818 + }, + { + "epoch": 0.19573670323566172, + "grad_norm": 5.244829616893765, + "learning_rate": 9.297245250439548e-06, + "loss": 0.8626, + "step": 2819 + }, + { + "epoch": 0.19580613803638383, + "grad_norm": 3.323327387207132, + "learning_rate": 9.296670279779797e-06, + "loss": 0.5417, + "step": 2820 + }, + { + "epoch": 0.19587557283710597, + "grad_norm": 3.8631300637906065, + "learning_rate": 9.296095091799324e-06, + "loss": 0.5871, + "step": 2821 + }, + { + "epoch": 0.19594500763782807, + "grad_norm": 3.4227619997658096, + "learning_rate": 9.295519686527224e-06, + "loss": 0.4033, + "step": 2822 + }, + { + "epoch": 0.1960144424385502, + "grad_norm": 4.651250344923784, + "learning_rate": 9.294944063992597e-06, + "loss": 0.6142, + "step": 2823 + }, + { + "epoch": 0.19608387723927231, + "grad_norm": 2.965613086222905, + "learning_rate": 9.294368224224562e-06, + "loss": 0.389, + "step": 2824 + }, + { + "epoch": 0.19615331203999445, + "grad_norm": 4.593639847556587, + "learning_rate": 9.29379216725224e-06, + "loss": 0.5812, + "step": 2825 + }, + { + "epoch": 0.19622274684071656, + "grad_norm": 5.101824592425328, + "learning_rate": 9.293215893104767e-06, + "loss": 0.7988, + "step": 2826 + }, + { + "epoch": 0.1962921816414387, + "grad_norm": 3.9791459098321065, + "learning_rate": 9.292639401811295e-06, + "loss": 0.457, + "step": 2827 + }, + { + "epoch": 0.19636161644216082, + "grad_norm": 3.894962360423816, + "learning_rate": 9.292062693400979e-06, + "loss": 0.2939, + "step": 2828 + }, + { + "epoch": 0.19643105124288293, + "grad_norm": 3.8883014281747132, + "learning_rate": 9.291485767902988e-06, + "loss": 0.592, + "step": 2829 + }, + { + "epoch": 0.19650048604360507, + "grad_norm": 4.358102151108144, + "learning_rate": 9.290908625346505e-06, + "loss": 0.638, + "step": 2830 + }, + { + "epoch": 0.19656992084432717, + "grad_norm": 3.263443770950024, + "learning_rate": 9.290331265760717e-06, + "loss": 0.3062, + "step": 2831 + }, + { + "epoch": 0.1966393556450493, + "grad_norm": 3.722123562750182, + "learning_rate": 9.289753689174829e-06, + "loss": 0.3017, + "step": 2832 + }, + { + "epoch": 0.19670879044577141, + "grad_norm": 4.170869769936464, + "learning_rate": 9.289175895618053e-06, + "loss": 0.5381, + "step": 2833 + }, + { + "epoch": 0.19677822524649355, + "grad_norm": 4.056698633598476, + "learning_rate": 9.288597885119614e-06, + "loss": 0.4596, + "step": 2834 + }, + { + "epoch": 0.19684766004721566, + "grad_norm": 3.543849811122119, + "learning_rate": 9.288019657708748e-06, + "loss": 0.5461, + "step": 2835 + }, + { + "epoch": 0.1969170948479378, + "grad_norm": 2.6729923932583275, + "learning_rate": 9.287441213414699e-06, + "loss": 0.1371, + "step": 2836 + }, + { + "epoch": 0.1969865296486599, + "grad_norm": 2.998740615867098, + "learning_rate": 9.286862552266724e-06, + "loss": 0.1333, + "step": 2837 + }, + { + "epoch": 0.19705596444938203, + "grad_norm": 3.931708523755941, + "learning_rate": 9.286283674294094e-06, + "loss": 0.5478, + "step": 2838 + }, + { + "epoch": 0.19712539925010414, + "grad_norm": 3.5670240798862833, + "learning_rate": 9.285704579526086e-06, + "loss": 0.2976, + "step": 2839 + }, + { + "epoch": 0.19719483405082627, + "grad_norm": 2.5788393953324453, + "learning_rate": 9.285125267991989e-06, + "loss": 0.2778, + "step": 2840 + }, + { + "epoch": 0.1972642688515484, + "grad_norm": 3.186000849080737, + "learning_rate": 9.284545739721105e-06, + "loss": 0.3043, + "step": 2841 + }, + { + "epoch": 0.19733370365227051, + "grad_norm": 4.570872250529393, + "learning_rate": 9.283965994742746e-06, + "loss": 0.5433, + "step": 2842 + }, + { + "epoch": 0.19740313845299265, + "grad_norm": 4.073703574615141, + "learning_rate": 9.283386033086235e-06, + "loss": 0.4412, + "step": 2843 + }, + { + "epoch": 0.19747257325371476, + "grad_norm": 4.3701470762613095, + "learning_rate": 9.282805854780906e-06, + "loss": 0.3853, + "step": 2844 + }, + { + "epoch": 0.1975420080544369, + "grad_norm": 4.822016343162825, + "learning_rate": 9.2822254598561e-06, + "loss": 0.4714, + "step": 2845 + }, + { + "epoch": 0.197611442855159, + "grad_norm": 4.480888160800615, + "learning_rate": 9.281644848341178e-06, + "loss": 0.4886, + "step": 2846 + }, + { + "epoch": 0.19768087765588113, + "grad_norm": 4.967095534526796, + "learning_rate": 9.281064020265504e-06, + "loss": 0.6235, + "step": 2847 + }, + { + "epoch": 0.19775031245660324, + "grad_norm": 3.5552872352014426, + "learning_rate": 9.280482975658457e-06, + "loss": 0.5288, + "step": 2848 + }, + { + "epoch": 0.19781974725732537, + "grad_norm": 3.071121353381513, + "learning_rate": 9.279901714549427e-06, + "loss": 0.3055, + "step": 2849 + }, + { + "epoch": 0.19788918205804748, + "grad_norm": 5.192346239082116, + "learning_rate": 9.279320236967808e-06, + "loss": 0.8085, + "step": 2850 + }, + { + "epoch": 0.19795861685876961, + "grad_norm": 4.338361991320224, + "learning_rate": 9.278738542943014e-06, + "loss": 0.7138, + "step": 2851 + }, + { + "epoch": 0.19802805165949175, + "grad_norm": 5.426002678737996, + "learning_rate": 9.278156632504467e-06, + "loss": 0.6292, + "step": 2852 + }, + { + "epoch": 0.19809748646021386, + "grad_norm": 4.900933285310241, + "learning_rate": 9.2775745056816e-06, + "loss": 0.5777, + "step": 2853 + }, + { + "epoch": 0.198166921260936, + "grad_norm": 4.341100405697076, + "learning_rate": 9.276992162503851e-06, + "loss": 0.7086, + "step": 2854 + }, + { + "epoch": 0.1982363560616581, + "grad_norm": 4.712246236287635, + "learning_rate": 9.276409603000683e-06, + "loss": 0.7005, + "step": 2855 + }, + { + "epoch": 0.19830579086238023, + "grad_norm": 4.55468193033358, + "learning_rate": 9.275826827201553e-06, + "loss": 0.6057, + "step": 2856 + }, + { + "epoch": 0.19837522566310234, + "grad_norm": 3.868273286292925, + "learning_rate": 9.27524383513594e-06, + "loss": 0.4427, + "step": 2857 + }, + { + "epoch": 0.19844466046382447, + "grad_norm": 4.427906210640304, + "learning_rate": 9.274660626833334e-06, + "loss": 0.6805, + "step": 2858 + }, + { + "epoch": 0.19851409526454658, + "grad_norm": 4.440817530845498, + "learning_rate": 9.27407720232323e-06, + "loss": 0.5951, + "step": 2859 + }, + { + "epoch": 0.19858353006526872, + "grad_norm": 4.1146360643677165, + "learning_rate": 9.273493561635136e-06, + "loss": 0.4785, + "step": 2860 + }, + { + "epoch": 0.19865296486599082, + "grad_norm": 3.2975626630161465, + "learning_rate": 9.272909704798575e-06, + "loss": 0.3489, + "step": 2861 + }, + { + "epoch": 0.19872239966671296, + "grad_norm": 2.966735840236814, + "learning_rate": 9.272325631843076e-06, + "loss": 0.3953, + "step": 2862 + }, + { + "epoch": 0.1987918344674351, + "grad_norm": 5.006951664533371, + "learning_rate": 9.271741342798181e-06, + "loss": 0.6096, + "step": 2863 + }, + { + "epoch": 0.1988612692681572, + "grad_norm": 4.093955024075473, + "learning_rate": 9.271156837693442e-06, + "loss": 0.541, + "step": 2864 + }, + { + "epoch": 0.19893070406887933, + "grad_norm": 3.8209976522536717, + "learning_rate": 9.270572116558426e-06, + "loss": 0.4163, + "step": 2865 + }, + { + "epoch": 0.19900013886960144, + "grad_norm": 3.7051121713552573, + "learning_rate": 9.269987179422702e-06, + "loss": 0.5604, + "step": 2866 + }, + { + "epoch": 0.19906957367032357, + "grad_norm": 3.867872448330714, + "learning_rate": 9.269402026315859e-06, + "loss": 0.3634, + "step": 2867 + }, + { + "epoch": 0.19913900847104568, + "grad_norm": 4.560730970668807, + "learning_rate": 9.268816657267493e-06, + "loss": 0.6268, + "step": 2868 + }, + { + "epoch": 0.19920844327176782, + "grad_norm": 4.252405331903572, + "learning_rate": 9.26823107230721e-06, + "loss": 0.5711, + "step": 2869 + }, + { + "epoch": 0.19927787807248992, + "grad_norm": 4.10706863231667, + "learning_rate": 9.26764527146463e-06, + "loss": 0.3839, + "step": 2870 + }, + { + "epoch": 0.19934731287321206, + "grad_norm": 16.72678210183898, + "learning_rate": 9.26705925476938e-06, + "loss": 0.6435, + "step": 2871 + }, + { + "epoch": 0.19941674767393416, + "grad_norm": 3.2039398430779125, + "learning_rate": 9.266473022251103e-06, + "loss": 0.2513, + "step": 2872 + }, + { + "epoch": 0.1994861824746563, + "grad_norm": 4.798987822597973, + "learning_rate": 9.265886573939448e-06, + "loss": 0.6732, + "step": 2873 + }, + { + "epoch": 0.19955561727537843, + "grad_norm": 3.9599936457517213, + "learning_rate": 9.265299909864076e-06, + "loss": 0.5171, + "step": 2874 + }, + { + "epoch": 0.19962505207610054, + "grad_norm": 4.647332580827222, + "learning_rate": 9.264713030054664e-06, + "loss": 0.5541, + "step": 2875 + }, + { + "epoch": 0.19969448687682267, + "grad_norm": 4.043905424604964, + "learning_rate": 9.26412593454089e-06, + "loss": 0.7019, + "step": 2876 + }, + { + "epoch": 0.19976392167754478, + "grad_norm": 2.7922678307577247, + "learning_rate": 9.263538623352453e-06, + "loss": 0.3012, + "step": 2877 + }, + { + "epoch": 0.19983335647826692, + "grad_norm": 3.3536319431468264, + "learning_rate": 9.262951096519056e-06, + "loss": 0.2987, + "step": 2878 + }, + { + "epoch": 0.19990279127898902, + "grad_norm": 2.8726638702647964, + "learning_rate": 9.262363354070416e-06, + "loss": 0.3041, + "step": 2879 + }, + { + "epoch": 0.19997222607971116, + "grad_norm": 5.233352087729568, + "learning_rate": 9.261775396036262e-06, + "loss": 0.6618, + "step": 2880 + }, + { + "epoch": 0.20004166088043326, + "grad_norm": 4.6701881707817305, + "learning_rate": 9.26118722244633e-06, + "loss": 0.558, + "step": 2881 + }, + { + "epoch": 0.2001110956811554, + "grad_norm": 3.015451627342665, + "learning_rate": 9.260598833330372e-06, + "loss": 0.2588, + "step": 2882 + }, + { + "epoch": 0.2001805304818775, + "grad_norm": 3.995105038264002, + "learning_rate": 9.260010228718144e-06, + "loss": 0.5419, + "step": 2883 + }, + { + "epoch": 0.20024996528259964, + "grad_norm": 4.266776646372009, + "learning_rate": 9.259421408639421e-06, + "loss": 0.7082, + "step": 2884 + }, + { + "epoch": 0.20031940008332177, + "grad_norm": 5.044144064454226, + "learning_rate": 9.25883237312398e-06, + "loss": 0.5746, + "step": 2885 + }, + { + "epoch": 0.20038883488404388, + "grad_norm": 4.2600481731678865, + "learning_rate": 9.25824312220162e-06, + "loss": 0.7338, + "step": 2886 + }, + { + "epoch": 0.20045826968476602, + "grad_norm": 3.679001086251324, + "learning_rate": 9.25765365590214e-06, + "loss": 0.4332, + "step": 2887 + }, + { + "epoch": 0.20052770448548812, + "grad_norm": 4.523112344384331, + "learning_rate": 9.257063974255357e-06, + "loss": 0.4191, + "step": 2888 + }, + { + "epoch": 0.20059713928621026, + "grad_norm": 4.589641026024377, + "learning_rate": 9.256474077291095e-06, + "loss": 0.4619, + "step": 2889 + }, + { + "epoch": 0.20066657408693236, + "grad_norm": 4.753899821995365, + "learning_rate": 9.25588396503919e-06, + "loss": 0.6826, + "step": 2890 + }, + { + "epoch": 0.2007360088876545, + "grad_norm": 4.3996051461422105, + "learning_rate": 9.25529363752949e-06, + "loss": 0.6415, + "step": 2891 + }, + { + "epoch": 0.2008054436883766, + "grad_norm": 3.171732823770511, + "learning_rate": 9.254703094791853e-06, + "loss": 0.326, + "step": 2892 + }, + { + "epoch": 0.20087487848909874, + "grad_norm": 3.5869900559025067, + "learning_rate": 9.254112336856148e-06, + "loss": 0.4881, + "step": 2893 + }, + { + "epoch": 0.20094431328982085, + "grad_norm": 3.889098136905742, + "learning_rate": 9.253521363752254e-06, + "loss": 0.483, + "step": 2894 + }, + { + "epoch": 0.20101374809054298, + "grad_norm": 4.2068532276195185, + "learning_rate": 9.252930175510064e-06, + "loss": 0.6299, + "step": 2895 + }, + { + "epoch": 0.2010831828912651, + "grad_norm": 3.3606570952293953, + "learning_rate": 9.252338772159478e-06, + "loss": 0.3605, + "step": 2896 + }, + { + "epoch": 0.20115261769198722, + "grad_norm": 3.06567646335898, + "learning_rate": 9.251747153730409e-06, + "loss": 0.2652, + "step": 2897 + }, + { + "epoch": 0.20122205249270936, + "grad_norm": 4.680752764109054, + "learning_rate": 9.251155320252779e-06, + "loss": 0.6425, + "step": 2898 + }, + { + "epoch": 0.20129148729343146, + "grad_norm": 3.6014270678149205, + "learning_rate": 9.250563271756526e-06, + "loss": 0.4729, + "step": 2899 + }, + { + "epoch": 0.2013609220941536, + "grad_norm": 4.313883512704269, + "learning_rate": 9.24997100827159e-06, + "loss": 0.6282, + "step": 2900 + }, + { + "epoch": 0.2014303568948757, + "grad_norm": 3.6981417396891216, + "learning_rate": 9.249378529827931e-06, + "loss": 0.5673, + "step": 2901 + }, + { + "epoch": 0.20149979169559784, + "grad_norm": 3.5908118208917, + "learning_rate": 9.248785836455512e-06, + "loss": 0.3073, + "step": 2902 + }, + { + "epoch": 0.20156922649631995, + "grad_norm": 4.159934861635394, + "learning_rate": 9.248192928184315e-06, + "loss": 0.5134, + "step": 2903 + }, + { + "epoch": 0.20163866129704208, + "grad_norm": 4.209681562138876, + "learning_rate": 9.247599805044328e-06, + "loss": 0.4735, + "step": 2904 + }, + { + "epoch": 0.2017080960977642, + "grad_norm": 4.1803551890617605, + "learning_rate": 9.247006467065547e-06, + "loss": 0.6779, + "step": 2905 + }, + { + "epoch": 0.20177753089848632, + "grad_norm": 4.783665529861281, + "learning_rate": 9.246412914277986e-06, + "loss": 0.7115, + "step": 2906 + }, + { + "epoch": 0.20184696569920843, + "grad_norm": 4.223882427887777, + "learning_rate": 9.245819146711665e-06, + "loss": 0.5924, + "step": 2907 + }, + { + "epoch": 0.20191640049993057, + "grad_norm": 4.009003404468766, + "learning_rate": 9.245225164396616e-06, + "loss": 0.4349, + "step": 2908 + }, + { + "epoch": 0.2019858353006527, + "grad_norm": 7.910085825559944, + "learning_rate": 9.244630967362881e-06, + "loss": 0.9379, + "step": 2909 + }, + { + "epoch": 0.2020552701013748, + "grad_norm": 3.2015646197293606, + "learning_rate": 9.244036555640517e-06, + "loss": 0.4615, + "step": 2910 + }, + { + "epoch": 0.20212470490209694, + "grad_norm": 3.918230465847634, + "learning_rate": 9.243441929259585e-06, + "loss": 0.6131, + "step": 2911 + }, + { + "epoch": 0.20219413970281905, + "grad_norm": 4.217653758541043, + "learning_rate": 9.242847088250161e-06, + "loss": 0.5875, + "step": 2912 + }, + { + "epoch": 0.20226357450354118, + "grad_norm": 5.31502105223746, + "learning_rate": 9.242252032642334e-06, + "loss": 0.6769, + "step": 2913 + }, + { + "epoch": 0.2023330093042633, + "grad_norm": 4.741691150932073, + "learning_rate": 9.241656762466197e-06, + "loss": 0.5952, + "step": 2914 + }, + { + "epoch": 0.20240244410498542, + "grad_norm": 3.963954480353428, + "learning_rate": 9.241061277751863e-06, + "loss": 0.5107, + "step": 2915 + }, + { + "epoch": 0.20247187890570753, + "grad_norm": 3.944650357171821, + "learning_rate": 9.240465578529449e-06, + "loss": 0.5586, + "step": 2916 + }, + { + "epoch": 0.20254131370642967, + "grad_norm": 4.278504472374262, + "learning_rate": 9.239869664829084e-06, + "loss": 0.7028, + "step": 2917 + }, + { + "epoch": 0.20261074850715177, + "grad_norm": 4.947280904543761, + "learning_rate": 9.23927353668091e-06, + "loss": 0.8043, + "step": 2918 + }, + { + "epoch": 0.2026801833078739, + "grad_norm": 4.269798383634381, + "learning_rate": 9.238677194115075e-06, + "loss": 0.516, + "step": 2919 + }, + { + "epoch": 0.20274961810859604, + "grad_norm": 3.7163544619123883, + "learning_rate": 9.238080637161747e-06, + "loss": 0.4065, + "step": 2920 + }, + { + "epoch": 0.20281905290931815, + "grad_norm": 3.297777424087066, + "learning_rate": 9.237483865851093e-06, + "loss": 0.4312, + "step": 2921 + }, + { + "epoch": 0.20288848771004028, + "grad_norm": 3.725025719387034, + "learning_rate": 9.236886880213303e-06, + "loss": 0.4778, + "step": 2922 + }, + { + "epoch": 0.2029579225107624, + "grad_norm": 3.7811309441109793, + "learning_rate": 9.236289680278567e-06, + "loss": 0.6261, + "step": 2923 + }, + { + "epoch": 0.20302735731148452, + "grad_norm": 3.7628040791675716, + "learning_rate": 9.235692266077094e-06, + "loss": 0.3686, + "step": 2924 + }, + { + "epoch": 0.20309679211220663, + "grad_norm": 2.7595855424126223, + "learning_rate": 9.235094637639099e-06, + "loss": 0.2416, + "step": 2925 + }, + { + "epoch": 0.20316622691292877, + "grad_norm": 3.581946861493472, + "learning_rate": 9.234496794994809e-06, + "loss": 0.4834, + "step": 2926 + }, + { + "epoch": 0.20323566171365087, + "grad_norm": 3.5601245772492685, + "learning_rate": 9.233898738174461e-06, + "loss": 0.4425, + "step": 2927 + }, + { + "epoch": 0.203305096514373, + "grad_norm": 3.6285639308142783, + "learning_rate": 9.233300467208309e-06, + "loss": 0.3101, + "step": 2928 + }, + { + "epoch": 0.20337453131509511, + "grad_norm": 4.1234962126204495, + "learning_rate": 9.232701982126608e-06, + "loss": 0.5094, + "step": 2929 + }, + { + "epoch": 0.20344396611581725, + "grad_norm": 4.325504527190692, + "learning_rate": 9.23210328295963e-06, + "loss": 0.5721, + "step": 2930 + }, + { + "epoch": 0.20351340091653938, + "grad_norm": 5.669834090129811, + "learning_rate": 9.231504369737658e-06, + "loss": 0.563, + "step": 2931 + }, + { + "epoch": 0.2035828357172615, + "grad_norm": 4.328193778458343, + "learning_rate": 9.230905242490981e-06, + "loss": 0.7028, + "step": 2932 + }, + { + "epoch": 0.20365227051798362, + "grad_norm": 5.317017198970767, + "learning_rate": 9.230305901249907e-06, + "loss": 0.5494, + "step": 2933 + }, + { + "epoch": 0.20372170531870573, + "grad_norm": 4.432966571197587, + "learning_rate": 9.229706346044749e-06, + "loss": 0.6649, + "step": 2934 + }, + { + "epoch": 0.20379114011942787, + "grad_norm": 4.728486635701924, + "learning_rate": 9.229106576905827e-06, + "loss": 0.5592, + "step": 2935 + }, + { + "epoch": 0.20386057492014997, + "grad_norm": 4.555859436240858, + "learning_rate": 9.22850659386348e-06, + "loss": 0.5771, + "step": 2936 + }, + { + "epoch": 0.2039300097208721, + "grad_norm": 3.775546039879905, + "learning_rate": 9.227906396948055e-06, + "loss": 0.4177, + "step": 2937 + }, + { + "epoch": 0.20399944452159421, + "grad_norm": 5.554831271820678, + "learning_rate": 9.227305986189909e-06, + "loss": 0.6755, + "step": 2938 + }, + { + "epoch": 0.20406887932231635, + "grad_norm": 4.106718235307273, + "learning_rate": 9.22670536161941e-06, + "loss": 0.4106, + "step": 2939 + }, + { + "epoch": 0.20413831412303846, + "grad_norm": 4.431333948988815, + "learning_rate": 9.226104523266935e-06, + "loss": 0.7343, + "step": 2940 + }, + { + "epoch": 0.2042077489237606, + "grad_norm": 3.5587412526022506, + "learning_rate": 9.225503471162878e-06, + "loss": 0.435, + "step": 2941 + }, + { + "epoch": 0.2042771837244827, + "grad_norm": 3.1837939612170736, + "learning_rate": 9.224902205337635e-06, + "loss": 0.3048, + "step": 2942 + }, + { + "epoch": 0.20434661852520483, + "grad_norm": 3.292866601586631, + "learning_rate": 9.22430072582162e-06, + "loss": 0.339, + "step": 2943 + }, + { + "epoch": 0.20441605332592697, + "grad_norm": 2.679715320239534, + "learning_rate": 9.223699032645253e-06, + "loss": 0.2536, + "step": 2944 + }, + { + "epoch": 0.20448548812664907, + "grad_norm": 3.958460500456027, + "learning_rate": 9.223097125838969e-06, + "loss": 0.2439, + "step": 2945 + }, + { + "epoch": 0.2045549229273712, + "grad_norm": 3.399172119980921, + "learning_rate": 9.222495005433211e-06, + "loss": 0.3214, + "step": 2946 + }, + { + "epoch": 0.20462435772809331, + "grad_norm": 5.016057539963686, + "learning_rate": 9.221892671458437e-06, + "loss": 0.7045, + "step": 2947 + }, + { + "epoch": 0.20469379252881545, + "grad_norm": 4.262393861374148, + "learning_rate": 9.221290123945107e-06, + "loss": 0.4399, + "step": 2948 + }, + { + "epoch": 0.20476322732953756, + "grad_norm": 4.422303838711451, + "learning_rate": 9.220687362923696e-06, + "loss": 0.598, + "step": 2949 + }, + { + "epoch": 0.2048326621302597, + "grad_norm": 3.975302639975611, + "learning_rate": 9.220084388424698e-06, + "loss": 0.4963, + "step": 2950 + }, + { + "epoch": 0.2049020969309818, + "grad_norm": 4.372213926050992, + "learning_rate": 9.219481200478606e-06, + "loss": 0.4883, + "step": 2951 + }, + { + "epoch": 0.20497153173170393, + "grad_norm": 4.816523240614509, + "learning_rate": 9.218877799115929e-06, + "loss": 0.5668, + "step": 2952 + }, + { + "epoch": 0.20504096653242604, + "grad_norm": 4.5302116964549395, + "learning_rate": 9.218274184367187e-06, + "loss": 0.5612, + "step": 2953 + }, + { + "epoch": 0.20511040133314817, + "grad_norm": 3.026730972312457, + "learning_rate": 9.217670356262911e-06, + "loss": 0.4173, + "step": 2954 + }, + { + "epoch": 0.2051798361338703, + "grad_norm": 2.703287260040338, + "learning_rate": 9.217066314833641e-06, + "loss": 0.2322, + "step": 2955 + }, + { + "epoch": 0.20524927093459241, + "grad_norm": 4.015242042178952, + "learning_rate": 9.216462060109927e-06, + "loss": 0.5551, + "step": 2956 + }, + { + "epoch": 0.20531870573531455, + "grad_norm": 5.154877499724889, + "learning_rate": 9.215857592122334e-06, + "loss": 0.414, + "step": 2957 + }, + { + "epoch": 0.20538814053603666, + "grad_norm": 3.998525625006187, + "learning_rate": 9.215252910901436e-06, + "loss": 0.3237, + "step": 2958 + }, + { + "epoch": 0.2054575753367588, + "grad_norm": 4.098215810781089, + "learning_rate": 9.214648016477814e-06, + "loss": 0.4172, + "step": 2959 + }, + { + "epoch": 0.2055270101374809, + "grad_norm": 2.698607671722973, + "learning_rate": 9.214042908882065e-06, + "loss": 0.131, + "step": 2960 + }, + { + "epoch": 0.20559644493820303, + "grad_norm": 3.641773156612639, + "learning_rate": 9.213437588144793e-06, + "loss": 0.44, + "step": 2961 + }, + { + "epoch": 0.20566587973892514, + "grad_norm": 4.620562233300561, + "learning_rate": 9.212832054296618e-06, + "loss": 0.4445, + "step": 2962 + }, + { + "epoch": 0.20573531453964727, + "grad_norm": 3.7285650618439576, + "learning_rate": 9.212226307368164e-06, + "loss": 0.2866, + "step": 2963 + }, + { + "epoch": 0.20580474934036938, + "grad_norm": 1.5409635407663693, + "learning_rate": 9.211620347390068e-06, + "loss": 0.1435, + "step": 2964 + }, + { + "epoch": 0.20587418414109152, + "grad_norm": 4.431454260697495, + "learning_rate": 9.211014174392983e-06, + "loss": 0.5389, + "step": 2965 + }, + { + "epoch": 0.20594361894181365, + "grad_norm": 4.088549974359027, + "learning_rate": 9.210407788407564e-06, + "loss": 0.5235, + "step": 2966 + }, + { + "epoch": 0.20601305374253576, + "grad_norm": 3.6081366578461123, + "learning_rate": 9.209801189464486e-06, + "loss": 0.3829, + "step": 2967 + }, + { + "epoch": 0.2060824885432579, + "grad_norm": 5.040971164870507, + "learning_rate": 9.209194377594427e-06, + "loss": 0.5671, + "step": 2968 + }, + { + "epoch": 0.20615192334398, + "grad_norm": 5.876744985494463, + "learning_rate": 9.208587352828077e-06, + "loss": 0.3555, + "step": 2969 + }, + { + "epoch": 0.20622135814470213, + "grad_norm": 2.9949444063169133, + "learning_rate": 9.207980115196145e-06, + "loss": 0.2446, + "step": 2970 + }, + { + "epoch": 0.20629079294542424, + "grad_norm": 3.3200413999387512, + "learning_rate": 9.207372664729337e-06, + "loss": 0.3733, + "step": 2971 + }, + { + "epoch": 0.20636022774614637, + "grad_norm": 2.1710359423043917, + "learning_rate": 9.206765001458381e-06, + "loss": 0.2332, + "step": 2972 + }, + { + "epoch": 0.20642966254686848, + "grad_norm": 3.6244245685484553, + "learning_rate": 9.206157125414013e-06, + "loss": 0.6052, + "step": 2973 + }, + { + "epoch": 0.20649909734759062, + "grad_norm": 3.5936789816901316, + "learning_rate": 9.205549036626976e-06, + "loss": 0.4288, + "step": 2974 + }, + { + "epoch": 0.20656853214831272, + "grad_norm": 3.34349110424724, + "learning_rate": 9.204940735128028e-06, + "loss": 0.375, + "step": 2975 + }, + { + "epoch": 0.20663796694903486, + "grad_norm": 4.007977871492753, + "learning_rate": 9.204332220947936e-06, + "loss": 0.4116, + "step": 2976 + }, + { + "epoch": 0.206707401749757, + "grad_norm": 5.015516193720728, + "learning_rate": 9.203723494117479e-06, + "loss": 0.8886, + "step": 2977 + }, + { + "epoch": 0.2067768365504791, + "grad_norm": 3.992597594019516, + "learning_rate": 9.203114554667442e-06, + "loss": 0.3354, + "step": 2978 + }, + { + "epoch": 0.20684627135120123, + "grad_norm": 3.3863435882181934, + "learning_rate": 9.202505402628628e-06, + "loss": 0.4754, + "step": 2979 + }, + { + "epoch": 0.20691570615192334, + "grad_norm": 4.859684467840073, + "learning_rate": 9.201896038031847e-06, + "loss": 0.6911, + "step": 2980 + }, + { + "epoch": 0.20698514095264547, + "grad_norm": 3.8974634220770175, + "learning_rate": 9.201286460907918e-06, + "loss": 0.5883, + "step": 2981 + }, + { + "epoch": 0.20705457575336758, + "grad_norm": 4.202218565871006, + "learning_rate": 9.200676671287674e-06, + "loss": 0.5944, + "step": 2982 + }, + { + "epoch": 0.20712401055408972, + "grad_norm": 4.404358746071678, + "learning_rate": 9.200066669201958e-06, + "loss": 0.6628, + "step": 2983 + }, + { + "epoch": 0.20719344535481182, + "grad_norm": 4.257972988606241, + "learning_rate": 9.199456454681623e-06, + "loss": 0.5393, + "step": 2984 + }, + { + "epoch": 0.20726288015553396, + "grad_norm": 3.845464549637663, + "learning_rate": 9.19884602775753e-06, + "loss": 0.4122, + "step": 2985 + }, + { + "epoch": 0.20733231495625606, + "grad_norm": 4.379398247964277, + "learning_rate": 9.198235388460558e-06, + "loss": 0.6378, + "step": 2986 + }, + { + "epoch": 0.2074017497569782, + "grad_norm": 3.235536582148886, + "learning_rate": 9.197624536821591e-06, + "loss": 0.2484, + "step": 2987 + }, + { + "epoch": 0.2074711845577003, + "grad_norm": 4.0465118409422205, + "learning_rate": 9.197013472871526e-06, + "loss": 0.3766, + "step": 2988 + }, + { + "epoch": 0.20754061935842244, + "grad_norm": 4.190572616191252, + "learning_rate": 9.196402196641268e-06, + "loss": 0.3946, + "step": 2989 + }, + { + "epoch": 0.20761005415914457, + "grad_norm": 2.4794056426187585, + "learning_rate": 9.195790708161733e-06, + "loss": 0.2265, + "step": 2990 + }, + { + "epoch": 0.20767948895986668, + "grad_norm": 4.432359829642248, + "learning_rate": 9.195179007463855e-06, + "loss": 0.69, + "step": 2991 + }, + { + "epoch": 0.20774892376058882, + "grad_norm": 3.8711637191924084, + "learning_rate": 9.194567094578569e-06, + "loss": 0.3126, + "step": 2992 + }, + { + "epoch": 0.20781835856131092, + "grad_norm": 4.510346441785111, + "learning_rate": 9.193954969536825e-06, + "loss": 0.6388, + "step": 2993 + }, + { + "epoch": 0.20788779336203306, + "grad_norm": 4.696288262956791, + "learning_rate": 9.193342632369584e-06, + "loss": 0.6456, + "step": 2994 + }, + { + "epoch": 0.20795722816275516, + "grad_norm": 3.9114869935332957, + "learning_rate": 9.19273008310782e-06, + "loss": 0.3599, + "step": 2995 + }, + { + "epoch": 0.2080266629634773, + "grad_norm": 3.991378834077295, + "learning_rate": 9.19211732178251e-06, + "loss": 0.4249, + "step": 2996 + }, + { + "epoch": 0.2080960977641994, + "grad_norm": 3.935206347909007, + "learning_rate": 9.19150434842465e-06, + "loss": 0.4334, + "step": 2997 + }, + { + "epoch": 0.20816553256492154, + "grad_norm": 4.179782509145216, + "learning_rate": 9.190891163065246e-06, + "loss": 0.5651, + "step": 2998 + }, + { + "epoch": 0.20823496736564365, + "grad_norm": 4.461541853580165, + "learning_rate": 9.190277765735307e-06, + "loss": 0.6242, + "step": 2999 + }, + { + "epoch": 0.20830440216636578, + "grad_norm": 3.787464471347851, + "learning_rate": 9.18966415646586e-06, + "loss": 0.4826, + "step": 3000 + }, + { + "epoch": 0.20837383696708792, + "grad_norm": 4.996412982875818, + "learning_rate": 9.189050335287941e-06, + "loss": 0.7873, + "step": 3001 + }, + { + "epoch": 0.20844327176781002, + "grad_norm": 3.810718241960916, + "learning_rate": 9.188436302232598e-06, + "loss": 0.4396, + "step": 3002 + }, + { + "epoch": 0.20851270656853216, + "grad_norm": 2.3939946214001777, + "learning_rate": 9.187822057330883e-06, + "loss": 0.0925, + "step": 3003 + }, + { + "epoch": 0.20858214136925426, + "grad_norm": 3.49864888648758, + "learning_rate": 9.18720760061387e-06, + "loss": 0.3737, + "step": 3004 + }, + { + "epoch": 0.2086515761699764, + "grad_norm": 3.376209598562402, + "learning_rate": 9.186592932112634e-06, + "loss": 0.422, + "step": 3005 + }, + { + "epoch": 0.2087210109706985, + "grad_norm": 3.718660065251022, + "learning_rate": 9.185978051858265e-06, + "loss": 0.4118, + "step": 3006 + }, + { + "epoch": 0.20879044577142064, + "grad_norm": 3.945956979543894, + "learning_rate": 9.185362959881862e-06, + "loss": 0.5775, + "step": 3007 + }, + { + "epoch": 0.20885988057214275, + "grad_norm": 4.461495896998751, + "learning_rate": 9.18474765621454e-06, + "loss": 0.4672, + "step": 3008 + }, + { + "epoch": 0.20892931537286488, + "grad_norm": 3.595031843733969, + "learning_rate": 9.184132140887414e-06, + "loss": 0.404, + "step": 3009 + }, + { + "epoch": 0.208998750173587, + "grad_norm": 3.8326058447841085, + "learning_rate": 9.183516413931618e-06, + "loss": 0.4135, + "step": 3010 + }, + { + "epoch": 0.20906818497430912, + "grad_norm": 7.490574766918556, + "learning_rate": 9.182900475378297e-06, + "loss": 0.4537, + "step": 3011 + }, + { + "epoch": 0.20913761977503126, + "grad_norm": 4.996906096220394, + "learning_rate": 9.182284325258603e-06, + "loss": 0.5429, + "step": 3012 + }, + { + "epoch": 0.20920705457575337, + "grad_norm": 4.987454899082449, + "learning_rate": 9.1816679636037e-06, + "loss": 0.8081, + "step": 3013 + }, + { + "epoch": 0.2092764893764755, + "grad_norm": 3.253770813941013, + "learning_rate": 9.181051390444765e-06, + "loss": 0.2422, + "step": 3014 + }, + { + "epoch": 0.2093459241771976, + "grad_norm": 5.243902509584505, + "learning_rate": 9.18043460581298e-06, + "loss": 0.6212, + "step": 3015 + }, + { + "epoch": 0.20941535897791974, + "grad_norm": 3.4425282557283547, + "learning_rate": 9.179817609739545e-06, + "loss": 0.431, + "step": 3016 + }, + { + "epoch": 0.20948479377864185, + "grad_norm": 4.4298823483233924, + "learning_rate": 9.179200402255664e-06, + "loss": 0.5552, + "step": 3017 + }, + { + "epoch": 0.20955422857936398, + "grad_norm": 4.905425205483724, + "learning_rate": 9.178582983392556e-06, + "loss": 0.7432, + "step": 3018 + }, + { + "epoch": 0.2096236633800861, + "grad_norm": 4.692332724087887, + "learning_rate": 9.177965353181448e-06, + "loss": 0.6348, + "step": 3019 + }, + { + "epoch": 0.20969309818080822, + "grad_norm": 3.8306559690106794, + "learning_rate": 9.17734751165358e-06, + "loss": 0.3818, + "step": 3020 + }, + { + "epoch": 0.20976253298153033, + "grad_norm": 4.629743094497733, + "learning_rate": 9.176729458840203e-06, + "loss": 0.4196, + "step": 3021 + }, + { + "epoch": 0.20983196778225247, + "grad_norm": 4.200183941718307, + "learning_rate": 9.176111194772577e-06, + "loss": 0.7091, + "step": 3022 + }, + { + "epoch": 0.2099014025829746, + "grad_norm": 3.629942279609985, + "learning_rate": 9.175492719481971e-06, + "loss": 0.5527, + "step": 3023 + }, + { + "epoch": 0.2099708373836967, + "grad_norm": 4.2507857358365975, + "learning_rate": 9.174874032999668e-06, + "loss": 0.4814, + "step": 3024 + }, + { + "epoch": 0.21004027218441884, + "grad_norm": 5.183896950097042, + "learning_rate": 9.174255135356963e-06, + "loss": 0.7816, + "step": 3025 + }, + { + "epoch": 0.21010970698514095, + "grad_norm": 4.695165086044882, + "learning_rate": 9.173636026585154e-06, + "loss": 0.8463, + "step": 3026 + }, + { + "epoch": 0.21017914178586308, + "grad_norm": 4.080712897436878, + "learning_rate": 9.173016706715559e-06, + "loss": 0.4484, + "step": 3027 + }, + { + "epoch": 0.2102485765865852, + "grad_norm": 3.8730389445143376, + "learning_rate": 9.1723971757795e-06, + "loss": 0.4386, + "step": 3028 + }, + { + "epoch": 0.21031801138730732, + "grad_norm": 5.03105743399599, + "learning_rate": 9.171777433808313e-06, + "loss": 0.8397, + "step": 3029 + }, + { + "epoch": 0.21038744618802943, + "grad_norm": 5.116355636263747, + "learning_rate": 9.171157480833345e-06, + "loss": 0.6534, + "step": 3030 + }, + { + "epoch": 0.21045688098875157, + "grad_norm": 2.6440019186833057, + "learning_rate": 9.170537316885953e-06, + "loss": 0.4631, + "step": 3031 + }, + { + "epoch": 0.21052631578947367, + "grad_norm": 4.02988983940807, + "learning_rate": 9.1699169419975e-06, + "loss": 0.4753, + "step": 3032 + }, + { + "epoch": 0.2105957505901958, + "grad_norm": 3.8867657627818892, + "learning_rate": 9.169296356199368e-06, + "loss": 0.6021, + "step": 3033 + }, + { + "epoch": 0.21066518539091794, + "grad_norm": 4.0898968108518, + "learning_rate": 9.168675559522944e-06, + "loss": 0.4906, + "step": 3034 + }, + { + "epoch": 0.21073462019164005, + "grad_norm": 3.7192952105480126, + "learning_rate": 9.168054551999627e-06, + "loss": 0.5023, + "step": 3035 + }, + { + "epoch": 0.21080405499236218, + "grad_norm": 4.965968860732031, + "learning_rate": 9.167433333660828e-06, + "loss": 0.9098, + "step": 3036 + }, + { + "epoch": 0.2108734897930843, + "grad_norm": 3.881113663567508, + "learning_rate": 9.166811904537967e-06, + "loss": 0.5022, + "step": 3037 + }, + { + "epoch": 0.21094292459380642, + "grad_norm": 4.284229542078125, + "learning_rate": 9.166190264662473e-06, + "loss": 0.4457, + "step": 3038 + }, + { + "epoch": 0.21101235939452853, + "grad_norm": 3.9295783925446393, + "learning_rate": 9.165568414065792e-06, + "loss": 0.4882, + "step": 3039 + }, + { + "epoch": 0.21108179419525067, + "grad_norm": 4.216564450240247, + "learning_rate": 9.164946352779373e-06, + "loss": 0.6646, + "step": 3040 + }, + { + "epoch": 0.21115122899597277, + "grad_norm": 4.130812573234921, + "learning_rate": 9.16432408083468e-06, + "loss": 0.4314, + "step": 3041 + }, + { + "epoch": 0.2112206637966949, + "grad_norm": 3.8034464243990493, + "learning_rate": 9.163701598263188e-06, + "loss": 0.3907, + "step": 3042 + }, + { + "epoch": 0.21129009859741701, + "grad_norm": 4.298404027631847, + "learning_rate": 9.16307890509638e-06, + "loss": 0.5337, + "step": 3043 + }, + { + "epoch": 0.21135953339813915, + "grad_norm": 3.7488555212177417, + "learning_rate": 9.162456001365753e-06, + "loss": 0.5129, + "step": 3044 + }, + { + "epoch": 0.21142896819886126, + "grad_norm": 3.5679628953252567, + "learning_rate": 9.161832887102813e-06, + "loss": 0.7084, + "step": 3045 + }, + { + "epoch": 0.2114984029995834, + "grad_norm": 3.591279135741178, + "learning_rate": 9.161209562339073e-06, + "loss": 0.3695, + "step": 3046 + }, + { + "epoch": 0.21156783780030552, + "grad_norm": 4.072850313228122, + "learning_rate": 9.160586027106062e-06, + "loss": 0.5386, + "step": 3047 + }, + { + "epoch": 0.21163727260102763, + "grad_norm": 3.42282009376979, + "learning_rate": 9.159962281435318e-06, + "loss": 0.3228, + "step": 3048 + }, + { + "epoch": 0.21170670740174977, + "grad_norm": 4.462195846448209, + "learning_rate": 9.159338325358389e-06, + "loss": 0.5386, + "step": 3049 + }, + { + "epoch": 0.21177614220247187, + "grad_norm": 4.440935303686969, + "learning_rate": 9.158714158906834e-06, + "loss": 0.6398, + "step": 3050 + }, + { + "epoch": 0.211845577003194, + "grad_norm": 3.676460675767309, + "learning_rate": 9.158089782112223e-06, + "loss": 0.4207, + "step": 3051 + }, + { + "epoch": 0.21191501180391611, + "grad_norm": 4.759515778061281, + "learning_rate": 9.157465195006137e-06, + "loss": 0.6801, + "step": 3052 + }, + { + "epoch": 0.21198444660463825, + "grad_norm": 2.9339254048991856, + "learning_rate": 9.156840397620164e-06, + "loss": 0.1921, + "step": 3053 + }, + { + "epoch": 0.21205388140536036, + "grad_norm": 4.113335174822009, + "learning_rate": 9.15621538998591e-06, + "loss": 0.5049, + "step": 3054 + }, + { + "epoch": 0.2121233162060825, + "grad_norm": 3.421880643667943, + "learning_rate": 9.155590172134983e-06, + "loss": 0.3204, + "step": 3055 + }, + { + "epoch": 0.2121927510068046, + "grad_norm": 4.970850030457273, + "learning_rate": 9.154964744099006e-06, + "loss": 0.6536, + "step": 3056 + }, + { + "epoch": 0.21226218580752673, + "grad_norm": 4.107373472600804, + "learning_rate": 9.154339105909615e-06, + "loss": 0.6691, + "step": 3057 + }, + { + "epoch": 0.21233162060824887, + "grad_norm": 3.1683352666123916, + "learning_rate": 9.153713257598453e-06, + "loss": 0.4187, + "step": 3058 + }, + { + "epoch": 0.21240105540897097, + "grad_norm": 3.2769558879244687, + "learning_rate": 9.153087199197175e-06, + "loss": 0.5633, + "step": 3059 + }, + { + "epoch": 0.2124704902096931, + "grad_norm": 3.9345365387921034, + "learning_rate": 9.152460930737448e-06, + "loss": 0.2962, + "step": 3060 + }, + { + "epoch": 0.21253992501041521, + "grad_norm": 3.703587921502508, + "learning_rate": 9.151834452250942e-06, + "loss": 0.3698, + "step": 3061 + }, + { + "epoch": 0.21260935981113735, + "grad_norm": 3.732968760068661, + "learning_rate": 9.151207763769352e-06, + "loss": 0.3339, + "step": 3062 + }, + { + "epoch": 0.21267879461185946, + "grad_norm": 4.56938222565007, + "learning_rate": 9.150580865324368e-06, + "loss": 0.4469, + "step": 3063 + }, + { + "epoch": 0.2127482294125816, + "grad_norm": 5.158801847966025, + "learning_rate": 9.1499537569477e-06, + "loss": 0.8767, + "step": 3064 + }, + { + "epoch": 0.2128176642133037, + "grad_norm": 4.1474335913631855, + "learning_rate": 9.149326438671067e-06, + "loss": 0.4557, + "step": 3065 + }, + { + "epoch": 0.21288709901402583, + "grad_norm": 4.340886894224082, + "learning_rate": 9.148698910526199e-06, + "loss": 0.8676, + "step": 3066 + }, + { + "epoch": 0.21295653381474794, + "grad_norm": 3.68725084153512, + "learning_rate": 9.148071172544835e-06, + "loss": 0.5116, + "step": 3067 + }, + { + "epoch": 0.21302596861547007, + "grad_norm": 3.8678573821943125, + "learning_rate": 9.147443224758725e-06, + "loss": 0.311, + "step": 3068 + }, + { + "epoch": 0.2130954034161922, + "grad_norm": 2.927458804768907, + "learning_rate": 9.146815067199628e-06, + "loss": 0.4113, + "step": 3069 + }, + { + "epoch": 0.21316483821691432, + "grad_norm": 3.3600972357085057, + "learning_rate": 9.14618669989932e-06, + "loss": 0.3246, + "step": 3070 + }, + { + "epoch": 0.21323427301763645, + "grad_norm": 5.328649073457586, + "learning_rate": 9.145558122889579e-06, + "loss": 0.4158, + "step": 3071 + }, + { + "epoch": 0.21330370781835856, + "grad_norm": 3.54143183878089, + "learning_rate": 9.144929336202199e-06, + "loss": 0.4415, + "step": 3072 + }, + { + "epoch": 0.2133731426190807, + "grad_norm": 3.839025124560028, + "learning_rate": 9.144300339868983e-06, + "loss": 0.5091, + "step": 3073 + }, + { + "epoch": 0.2134425774198028, + "grad_norm": 3.3880694412616466, + "learning_rate": 9.143671133921746e-06, + "loss": 0.3608, + "step": 3074 + }, + { + "epoch": 0.21351201222052493, + "grad_norm": 4.5241255698422425, + "learning_rate": 9.143041718392313e-06, + "loss": 0.5161, + "step": 3075 + }, + { + "epoch": 0.21358144702124704, + "grad_norm": 5.692720021469452, + "learning_rate": 9.142412093312516e-06, + "loss": 0.658, + "step": 3076 + }, + { + "epoch": 0.21365088182196917, + "grad_norm": 5.132239433808138, + "learning_rate": 9.141782258714205e-06, + "loss": 0.5712, + "step": 3077 + }, + { + "epoch": 0.21372031662269128, + "grad_norm": 4.335707092185283, + "learning_rate": 9.141152214629231e-06, + "loss": 0.6008, + "step": 3078 + }, + { + "epoch": 0.21378975142341342, + "grad_norm": 3.1903193374390875, + "learning_rate": 9.140521961089468e-06, + "loss": 0.3771, + "step": 3079 + }, + { + "epoch": 0.21385918622413555, + "grad_norm": 4.3278411634058145, + "learning_rate": 9.139891498126787e-06, + "loss": 0.5265, + "step": 3080 + }, + { + "epoch": 0.21392862102485766, + "grad_norm": 4.087571064325811, + "learning_rate": 9.139260825773079e-06, + "loss": 0.4189, + "step": 3081 + }, + { + "epoch": 0.2139980558255798, + "grad_norm": 4.4525788973645035, + "learning_rate": 9.138629944060243e-06, + "loss": 0.5436, + "step": 3082 + }, + { + "epoch": 0.2140674906263019, + "grad_norm": 3.447727968723044, + "learning_rate": 9.137998853020187e-06, + "loss": 0.3647, + "step": 3083 + }, + { + "epoch": 0.21413692542702403, + "grad_norm": 5.936355902588573, + "learning_rate": 9.13736755268483e-06, + "loss": 0.6887, + "step": 3084 + }, + { + "epoch": 0.21420636022774614, + "grad_norm": 3.8405917051082423, + "learning_rate": 9.136736043086107e-06, + "loss": 0.3461, + "step": 3085 + }, + { + "epoch": 0.21427579502846827, + "grad_norm": 3.319732761861475, + "learning_rate": 9.136104324255955e-06, + "loss": 0.4738, + "step": 3086 + }, + { + "epoch": 0.21434522982919038, + "grad_norm": 4.85617079122543, + "learning_rate": 9.135472396226325e-06, + "loss": 0.6516, + "step": 3087 + }, + { + "epoch": 0.21441466462991252, + "grad_norm": 4.511568375550234, + "learning_rate": 9.134840259029183e-06, + "loss": 0.426, + "step": 3088 + }, + { + "epoch": 0.21448409943063462, + "grad_norm": 3.18494414858035, + "learning_rate": 9.134207912696497e-06, + "loss": 0.4717, + "step": 3089 + }, + { + "epoch": 0.21455353423135676, + "grad_norm": 4.008745512177627, + "learning_rate": 9.133575357260254e-06, + "loss": 0.4918, + "step": 3090 + }, + { + "epoch": 0.21462296903207886, + "grad_norm": 5.293775798695443, + "learning_rate": 9.132942592752447e-06, + "loss": 0.6368, + "step": 3091 + }, + { + "epoch": 0.214692403832801, + "grad_norm": 4.942373543541338, + "learning_rate": 9.132309619205081e-06, + "loss": 0.5674, + "step": 3092 + }, + { + "epoch": 0.21476183863352313, + "grad_norm": 5.116525949864272, + "learning_rate": 9.13167643665017e-06, + "loss": 0.79, + "step": 3093 + }, + { + "epoch": 0.21483127343424524, + "grad_norm": 3.03424408274422, + "learning_rate": 9.131043045119741e-06, + "loss": 0.4294, + "step": 3094 + }, + { + "epoch": 0.21490070823496737, + "grad_norm": 3.7602860195400267, + "learning_rate": 9.130409444645829e-06, + "loss": 0.4811, + "step": 3095 + }, + { + "epoch": 0.21497014303568948, + "grad_norm": 3.2834263800866617, + "learning_rate": 9.12977563526048e-06, + "loss": 0.3168, + "step": 3096 + }, + { + "epoch": 0.21503957783641162, + "grad_norm": 4.3501943240532155, + "learning_rate": 9.129141616995754e-06, + "loss": 0.5386, + "step": 3097 + }, + { + "epoch": 0.21510901263713372, + "grad_norm": 4.596567151953131, + "learning_rate": 9.128507389883716e-06, + "loss": 0.6332, + "step": 3098 + }, + { + "epoch": 0.21517844743785586, + "grad_norm": 5.802297573056446, + "learning_rate": 9.127872953956447e-06, + "loss": 0.9454, + "step": 3099 + }, + { + "epoch": 0.21524788223857796, + "grad_norm": 4.453147883072212, + "learning_rate": 9.127238309246036e-06, + "loss": 0.5962, + "step": 3100 + }, + { + "epoch": 0.2153173170393001, + "grad_norm": 2.921091746462143, + "learning_rate": 9.12660345578458e-06, + "loss": 0.4112, + "step": 3101 + }, + { + "epoch": 0.2153867518400222, + "grad_norm": 3.4713130535078305, + "learning_rate": 9.12596839360419e-06, + "loss": 0.3862, + "step": 3102 + }, + { + "epoch": 0.21545618664074434, + "grad_norm": 3.73070738233506, + "learning_rate": 9.125333122736987e-06, + "loss": 0.4481, + "step": 3103 + }, + { + "epoch": 0.21552562144146648, + "grad_norm": 4.043128920618464, + "learning_rate": 9.124697643215105e-06, + "loss": 0.6725, + "step": 3104 + }, + { + "epoch": 0.21559505624218858, + "grad_norm": 3.937878555945242, + "learning_rate": 9.124061955070685e-06, + "loss": 0.5313, + "step": 3105 + }, + { + "epoch": 0.21566449104291072, + "grad_norm": 4.535071609588837, + "learning_rate": 9.123426058335877e-06, + "loss": 0.8052, + "step": 3106 + }, + { + "epoch": 0.21573392584363282, + "grad_norm": 4.535369551654566, + "learning_rate": 9.122789953042843e-06, + "loss": 0.613, + "step": 3107 + }, + { + "epoch": 0.21580336064435496, + "grad_norm": 5.094929868553323, + "learning_rate": 9.122153639223758e-06, + "loss": 0.651, + "step": 3108 + }, + { + "epoch": 0.21587279544507706, + "grad_norm": 2.8565849666850784, + "learning_rate": 9.121517116910808e-06, + "loss": 0.2055, + "step": 3109 + }, + { + "epoch": 0.2159422302457992, + "grad_norm": 4.212783117165023, + "learning_rate": 9.120880386136186e-06, + "loss": 0.7382, + "step": 3110 + }, + { + "epoch": 0.2160116650465213, + "grad_norm": 4.077992027163121, + "learning_rate": 9.120243446932099e-06, + "loss": 0.3683, + "step": 3111 + }, + { + "epoch": 0.21608109984724344, + "grad_norm": 3.6622676370870297, + "learning_rate": 9.119606299330757e-06, + "loss": 0.3335, + "step": 3112 + }, + { + "epoch": 0.21615053464796555, + "grad_norm": 4.509152574725696, + "learning_rate": 9.118968943364393e-06, + "loss": 0.4701, + "step": 3113 + }, + { + "epoch": 0.21621996944868768, + "grad_norm": 3.822939585896521, + "learning_rate": 9.11833137906524e-06, + "loss": 0.4954, + "step": 3114 + }, + { + "epoch": 0.21628940424940982, + "grad_norm": 4.630548226738602, + "learning_rate": 9.117693606465544e-06, + "loss": 0.6423, + "step": 3115 + }, + { + "epoch": 0.21635883905013192, + "grad_norm": 3.615145934213717, + "learning_rate": 9.117055625597567e-06, + "loss": 0.3944, + "step": 3116 + }, + { + "epoch": 0.21642827385085406, + "grad_norm": 4.231738578002882, + "learning_rate": 9.116417436493574e-06, + "loss": 0.4536, + "step": 3117 + }, + { + "epoch": 0.21649770865157617, + "grad_norm": 4.484160279718635, + "learning_rate": 9.115779039185847e-06, + "loss": 0.5683, + "step": 3118 + }, + { + "epoch": 0.2165671434522983, + "grad_norm": 3.3916200399309084, + "learning_rate": 9.115140433706673e-06, + "loss": 0.4321, + "step": 3119 + }, + { + "epoch": 0.2166365782530204, + "grad_norm": 5.573636579364532, + "learning_rate": 9.114501620088352e-06, + "loss": 0.9509, + "step": 3120 + }, + { + "epoch": 0.21670601305374254, + "grad_norm": 4.056580604098502, + "learning_rate": 9.113862598363195e-06, + "loss": 0.4642, + "step": 3121 + }, + { + "epoch": 0.21677544785446465, + "grad_norm": 4.07908743687115, + "learning_rate": 9.113223368563521e-06, + "loss": 0.6822, + "step": 3122 + }, + { + "epoch": 0.21684488265518678, + "grad_norm": 2.770760683959282, + "learning_rate": 9.112583930721666e-06, + "loss": 0.3059, + "step": 3123 + }, + { + "epoch": 0.2169143174559089, + "grad_norm": 3.3734619221312214, + "learning_rate": 9.111944284869969e-06, + "loss": 0.2395, + "step": 3124 + }, + { + "epoch": 0.21698375225663102, + "grad_norm": 4.299023993253073, + "learning_rate": 9.111304431040782e-06, + "loss": 0.5411, + "step": 3125 + }, + { + "epoch": 0.21705318705735316, + "grad_norm": 4.213144221304634, + "learning_rate": 9.11066436926647e-06, + "loss": 0.7102, + "step": 3126 + }, + { + "epoch": 0.21712262185807527, + "grad_norm": 3.725235422971617, + "learning_rate": 9.110024099579407e-06, + "loss": 0.3095, + "step": 3127 + }, + { + "epoch": 0.2171920566587974, + "grad_norm": 3.0490560910198083, + "learning_rate": 9.109383622011973e-06, + "loss": 0.4276, + "step": 3128 + }, + { + "epoch": 0.2172614914595195, + "grad_norm": 4.351720996469051, + "learning_rate": 9.108742936596567e-06, + "loss": 0.7454, + "step": 3129 + }, + { + "epoch": 0.21733092626024164, + "grad_norm": 3.693640954962, + "learning_rate": 9.108102043365594e-06, + "loss": 0.5545, + "step": 3130 + }, + { + "epoch": 0.21740036106096375, + "grad_norm": 3.932869010382426, + "learning_rate": 9.107460942351467e-06, + "loss": 0.4209, + "step": 3131 + }, + { + "epoch": 0.21746979586168588, + "grad_norm": 4.529151509314926, + "learning_rate": 9.106819633586612e-06, + "loss": 0.5477, + "step": 3132 + }, + { + "epoch": 0.217539230662408, + "grad_norm": 7.065562075850721, + "learning_rate": 9.106178117103468e-06, + "loss": 0.5704, + "step": 3133 + }, + { + "epoch": 0.21760866546313012, + "grad_norm": 3.445804052243519, + "learning_rate": 9.105536392934483e-06, + "loss": 0.5002, + "step": 3134 + }, + { + "epoch": 0.21767810026385223, + "grad_norm": 4.298564030661732, + "learning_rate": 9.10489446111211e-06, + "loss": 0.4868, + "step": 3135 + }, + { + "epoch": 0.21774753506457437, + "grad_norm": 4.187801444503477, + "learning_rate": 9.104252321668823e-06, + "loss": 0.5251, + "step": 3136 + }, + { + "epoch": 0.2178169698652965, + "grad_norm": 3.6736762743810614, + "learning_rate": 9.103609974637097e-06, + "loss": 0.4618, + "step": 3137 + }, + { + "epoch": 0.2178864046660186, + "grad_norm": 4.253603379700095, + "learning_rate": 9.102967420049421e-06, + "loss": 0.5018, + "step": 3138 + }, + { + "epoch": 0.21795583946674074, + "grad_norm": 3.9327163022409075, + "learning_rate": 9.102324657938297e-06, + "loss": 0.3432, + "step": 3139 + }, + { + "epoch": 0.21802527426746285, + "grad_norm": 3.597650195560748, + "learning_rate": 9.101681688336233e-06, + "loss": 0.4101, + "step": 3140 + }, + { + "epoch": 0.21809470906818498, + "grad_norm": 4.571305699698438, + "learning_rate": 9.10103851127575e-06, + "loss": 0.6379, + "step": 3141 + }, + { + "epoch": 0.2181641438689071, + "grad_norm": 5.020259576371573, + "learning_rate": 9.10039512678938e-06, + "loss": 0.7497, + "step": 3142 + }, + { + "epoch": 0.21823357866962922, + "grad_norm": 4.440256458876575, + "learning_rate": 9.099751534909666e-06, + "loss": 0.4613, + "step": 3143 + }, + { + "epoch": 0.21830301347035133, + "grad_norm": 3.3436968701459557, + "learning_rate": 9.099107735669157e-06, + "loss": 0.371, + "step": 3144 + }, + { + "epoch": 0.21837244827107347, + "grad_norm": 4.269541620196962, + "learning_rate": 9.098463729100418e-06, + "loss": 0.6622, + "step": 3145 + }, + { + "epoch": 0.21844188307179557, + "grad_norm": 3.8250525133325137, + "learning_rate": 9.097819515236022e-06, + "loss": 0.5138, + "step": 3146 + }, + { + "epoch": 0.2185113178725177, + "grad_norm": 4.198308017536499, + "learning_rate": 9.09717509410855e-06, + "loss": 0.4571, + "step": 3147 + }, + { + "epoch": 0.21858075267323981, + "grad_norm": 4.306533146874031, + "learning_rate": 9.096530465750599e-06, + "loss": 0.7002, + "step": 3148 + }, + { + "epoch": 0.21865018747396195, + "grad_norm": 4.10067975244862, + "learning_rate": 9.095885630194774e-06, + "loss": 0.3935, + "step": 3149 + }, + { + "epoch": 0.21871962227468408, + "grad_norm": 4.29636125111369, + "learning_rate": 9.095240587473687e-06, + "loss": 0.6706, + "step": 3150 + }, + { + "epoch": 0.2187890570754062, + "grad_norm": 3.8332562629814184, + "learning_rate": 9.094595337619964e-06, + "loss": 0.5684, + "step": 3151 + }, + { + "epoch": 0.21885849187612832, + "grad_norm": 3.8241695664611233, + "learning_rate": 9.093949880666244e-06, + "loss": 0.418, + "step": 3152 + }, + { + "epoch": 0.21892792667685043, + "grad_norm": 4.31222951366557, + "learning_rate": 9.09330421664517e-06, + "loss": 0.601, + "step": 3153 + }, + { + "epoch": 0.21899736147757257, + "grad_norm": 4.041826293521384, + "learning_rate": 9.092658345589404e-06, + "loss": 0.4314, + "step": 3154 + }, + { + "epoch": 0.21906679627829467, + "grad_norm": 2.6831636813476956, + "learning_rate": 9.092012267531607e-06, + "loss": 0.1996, + "step": 3155 + }, + { + "epoch": 0.2191362310790168, + "grad_norm": 4.690498104972807, + "learning_rate": 9.091365982504461e-06, + "loss": 0.8055, + "step": 3156 + }, + { + "epoch": 0.21920566587973891, + "grad_norm": 3.6573197529870876, + "learning_rate": 9.090719490540654e-06, + "loss": 0.4624, + "step": 3157 + }, + { + "epoch": 0.21927510068046105, + "grad_norm": 4.648329256333156, + "learning_rate": 9.090072791672881e-06, + "loss": 0.4473, + "step": 3158 + }, + { + "epoch": 0.21934453548118316, + "grad_norm": 4.431520922017104, + "learning_rate": 9.089425885933858e-06, + "loss": 0.8378, + "step": 3159 + }, + { + "epoch": 0.2194139702819053, + "grad_norm": 3.642096154107174, + "learning_rate": 9.088778773356299e-06, + "loss": 0.5909, + "step": 3160 + }, + { + "epoch": 0.21948340508262743, + "grad_norm": 3.7035281519428684, + "learning_rate": 9.088131453972938e-06, + "loss": 0.3521, + "step": 3161 + }, + { + "epoch": 0.21955283988334953, + "grad_norm": 3.958560969195321, + "learning_rate": 9.087483927816513e-06, + "loss": 0.4117, + "step": 3162 + }, + { + "epoch": 0.21962227468407167, + "grad_norm": 3.4362757123754744, + "learning_rate": 9.086836194919777e-06, + "loss": 0.5871, + "step": 3163 + }, + { + "epoch": 0.21969170948479377, + "grad_norm": 5.062514970008441, + "learning_rate": 9.08618825531549e-06, + "loss": 0.5547, + "step": 3164 + }, + { + "epoch": 0.2197611442855159, + "grad_norm": 3.7799760996796348, + "learning_rate": 9.085540109036426e-06, + "loss": 0.5471, + "step": 3165 + }, + { + "epoch": 0.21983057908623801, + "grad_norm": 3.4072184801236856, + "learning_rate": 9.084891756115365e-06, + "loss": 0.3145, + "step": 3166 + }, + { + "epoch": 0.21990001388696015, + "grad_norm": 4.313631465829302, + "learning_rate": 9.084243196585101e-06, + "loss": 0.4822, + "step": 3167 + }, + { + "epoch": 0.21996944868768226, + "grad_norm": 3.3087178110709976, + "learning_rate": 9.08359443047844e-06, + "loss": 0.2751, + "step": 3168 + }, + { + "epoch": 0.2200388834884044, + "grad_norm": 4.733265203565386, + "learning_rate": 9.082945457828191e-06, + "loss": 0.7757, + "step": 3169 + }, + { + "epoch": 0.2201083182891265, + "grad_norm": 3.576480283422527, + "learning_rate": 9.082296278667184e-06, + "loss": 0.4656, + "step": 3170 + }, + { + "epoch": 0.22017775308984863, + "grad_norm": 3.1839757828048962, + "learning_rate": 9.081646893028248e-06, + "loss": 0.2634, + "step": 3171 + }, + { + "epoch": 0.22024718789057077, + "grad_norm": 5.321503059839413, + "learning_rate": 9.080997300944232e-06, + "loss": 0.8959, + "step": 3172 + }, + { + "epoch": 0.22031662269129287, + "grad_norm": 3.3603177372770303, + "learning_rate": 9.08034750244799e-06, + "loss": 0.3754, + "step": 3173 + }, + { + "epoch": 0.220386057492015, + "grad_norm": 4.16192163000631, + "learning_rate": 9.079697497572389e-06, + "loss": 0.467, + "step": 3174 + }, + { + "epoch": 0.22045549229273712, + "grad_norm": 4.213405818720391, + "learning_rate": 9.079047286350304e-06, + "loss": 0.6342, + "step": 3175 + }, + { + "epoch": 0.22052492709345925, + "grad_norm": 3.9366326505046887, + "learning_rate": 9.078396868814625e-06, + "loss": 0.4861, + "step": 3176 + }, + { + "epoch": 0.22059436189418136, + "grad_norm": 3.5387391629559555, + "learning_rate": 9.077746244998246e-06, + "loss": 0.3605, + "step": 3177 + }, + { + "epoch": 0.2206637966949035, + "grad_norm": 4.952069669021886, + "learning_rate": 9.077095414934076e-06, + "loss": 0.7291, + "step": 3178 + }, + { + "epoch": 0.2207332314956256, + "grad_norm": 4.989549346045639, + "learning_rate": 9.076444378655034e-06, + "loss": 0.9141, + "step": 3179 + }, + { + "epoch": 0.22080266629634773, + "grad_norm": 5.1225030074973015, + "learning_rate": 9.07579313619405e-06, + "loss": 0.7268, + "step": 3180 + }, + { + "epoch": 0.22087210109706984, + "grad_norm": 2.7799762186087116, + "learning_rate": 9.075141687584056e-06, + "loss": 0.2322, + "step": 3181 + }, + { + "epoch": 0.22094153589779197, + "grad_norm": 3.296339146768401, + "learning_rate": 9.074490032858013e-06, + "loss": 0.3359, + "step": 3182 + }, + { + "epoch": 0.2210109706985141, + "grad_norm": 2.410597587949289, + "learning_rate": 9.07383817204887e-06, + "loss": 0.3118, + "step": 3183 + }, + { + "epoch": 0.22108040549923622, + "grad_norm": 4.430216342465204, + "learning_rate": 9.073186105189605e-06, + "loss": 0.3594, + "step": 3184 + }, + { + "epoch": 0.22114984029995835, + "grad_norm": 3.810111461541002, + "learning_rate": 9.072533832313194e-06, + "loss": 0.351, + "step": 3185 + }, + { + "epoch": 0.22121927510068046, + "grad_norm": 5.231565937218624, + "learning_rate": 9.071881353452632e-06, + "loss": 0.8923, + "step": 3186 + }, + { + "epoch": 0.2212887099014026, + "grad_norm": 4.3664163065531465, + "learning_rate": 9.071228668640918e-06, + "loss": 0.5511, + "step": 3187 + }, + { + "epoch": 0.2213581447021247, + "grad_norm": 4.395651104645602, + "learning_rate": 9.070575777911063e-06, + "loss": 0.6657, + "step": 3188 + }, + { + "epoch": 0.22142757950284683, + "grad_norm": 3.86060901837731, + "learning_rate": 9.069922681296094e-06, + "loss": 0.2892, + "step": 3189 + }, + { + "epoch": 0.22149701430356894, + "grad_norm": 4.846235252109851, + "learning_rate": 9.06926937882904e-06, + "loss": 0.5847, + "step": 3190 + }, + { + "epoch": 0.22156644910429107, + "grad_norm": 5.186671314569035, + "learning_rate": 9.068615870542946e-06, + "loss": 0.5685, + "step": 3191 + }, + { + "epoch": 0.22163588390501318, + "grad_norm": 4.398646902131686, + "learning_rate": 9.067962156470865e-06, + "loss": 0.633, + "step": 3192 + }, + { + "epoch": 0.22170531870573532, + "grad_norm": 4.210034692604905, + "learning_rate": 9.06730823664586e-06, + "loss": 0.2974, + "step": 3193 + }, + { + "epoch": 0.22177475350645742, + "grad_norm": 4.185427526192808, + "learning_rate": 9.066654111101009e-06, + "loss": 0.5827, + "step": 3194 + }, + { + "epoch": 0.22184418830717956, + "grad_norm": 4.920770646954655, + "learning_rate": 9.065999779869394e-06, + "loss": 0.6647, + "step": 3195 + }, + { + "epoch": 0.2219136231079017, + "grad_norm": 5.472157451721229, + "learning_rate": 9.06534524298411e-06, + "loss": 0.4983, + "step": 3196 + }, + { + "epoch": 0.2219830579086238, + "grad_norm": 4.207769175036573, + "learning_rate": 9.064690500478266e-06, + "loss": 0.3872, + "step": 3197 + }, + { + "epoch": 0.22205249270934593, + "grad_norm": 4.248045562387065, + "learning_rate": 9.064035552384975e-06, + "loss": 0.6654, + "step": 3198 + }, + { + "epoch": 0.22212192751006804, + "grad_norm": 3.0786019399998996, + "learning_rate": 9.063380398737364e-06, + "loss": 0.3686, + "step": 3199 + }, + { + "epoch": 0.22219136231079017, + "grad_norm": 3.4721083765118705, + "learning_rate": 9.06272503956857e-06, + "loss": 0.3282, + "step": 3200 + }, + { + "epoch": 0.22226079711151228, + "grad_norm": 3.0602153438617465, + "learning_rate": 9.062069474911742e-06, + "loss": 0.3121, + "step": 3201 + }, + { + "epoch": 0.22233023191223442, + "grad_norm": 3.418533436434518, + "learning_rate": 9.061413704800037e-06, + "loss": 0.484, + "step": 3202 + }, + { + "epoch": 0.22239966671295652, + "grad_norm": 3.7860165484407484, + "learning_rate": 9.06075772926662e-06, + "loss": 0.53, + "step": 3203 + }, + { + "epoch": 0.22246910151367866, + "grad_norm": 4.405256710961151, + "learning_rate": 9.060101548344675e-06, + "loss": 0.3916, + "step": 3204 + }, + { + "epoch": 0.22253853631440076, + "grad_norm": 4.031819357590662, + "learning_rate": 9.059445162067386e-06, + "loss": 0.447, + "step": 3205 + }, + { + "epoch": 0.2226079711151229, + "grad_norm": 5.452168709658814, + "learning_rate": 9.058788570467956e-06, + "loss": 0.7057, + "step": 3206 + }, + { + "epoch": 0.22267740591584503, + "grad_norm": 3.190825358038926, + "learning_rate": 9.058131773579592e-06, + "loss": 0.3113, + "step": 3207 + }, + { + "epoch": 0.22274684071656714, + "grad_norm": 6.781812916756434, + "learning_rate": 9.057474771435516e-06, + "loss": 0.6399, + "step": 3208 + }, + { + "epoch": 0.22281627551728928, + "grad_norm": 4.026958928228671, + "learning_rate": 9.056817564068956e-06, + "loss": 0.5334, + "step": 3209 + }, + { + "epoch": 0.22288571031801138, + "grad_norm": 3.755392518967289, + "learning_rate": 9.056160151513154e-06, + "loss": 0.4576, + "step": 3210 + }, + { + "epoch": 0.22295514511873352, + "grad_norm": 4.258764120726826, + "learning_rate": 9.055502533801361e-06, + "loss": 0.6035, + "step": 3211 + }, + { + "epoch": 0.22302457991945562, + "grad_norm": 4.790069014847633, + "learning_rate": 9.05484471096684e-06, + "loss": 0.8171, + "step": 3212 + }, + { + "epoch": 0.22309401472017776, + "grad_norm": 3.4455477515667283, + "learning_rate": 9.054186683042863e-06, + "loss": 0.3367, + "step": 3213 + }, + { + "epoch": 0.22316344952089986, + "grad_norm": 5.080014953759017, + "learning_rate": 9.05352845006271e-06, + "loss": 0.813, + "step": 3214 + }, + { + "epoch": 0.223232884321622, + "grad_norm": 3.125153708248819, + "learning_rate": 9.052870012059676e-06, + "loss": 0.3587, + "step": 3215 + }, + { + "epoch": 0.2233023191223441, + "grad_norm": 4.201543242961882, + "learning_rate": 9.05221136906706e-06, + "loss": 0.4502, + "step": 3216 + }, + { + "epoch": 0.22337175392306624, + "grad_norm": 4.223503254330185, + "learning_rate": 9.051552521118181e-06, + "loss": 0.4295, + "step": 3217 + }, + { + "epoch": 0.22344118872378838, + "grad_norm": 3.911988669682742, + "learning_rate": 9.05089346824636e-06, + "loss": 0.4821, + "step": 3218 + }, + { + "epoch": 0.22351062352451048, + "grad_norm": 4.11918215030611, + "learning_rate": 9.05023421048493e-06, + "loss": 0.514, + "step": 3219 + }, + { + "epoch": 0.22358005832523262, + "grad_norm": 3.6457240653186638, + "learning_rate": 9.049574747867239e-06, + "loss": 0.3099, + "step": 3220 + }, + { + "epoch": 0.22364949312595472, + "grad_norm": 4.362450941916721, + "learning_rate": 9.048915080426637e-06, + "loss": 0.4088, + "step": 3221 + }, + { + "epoch": 0.22371892792667686, + "grad_norm": 3.934809977691779, + "learning_rate": 9.048255208196492e-06, + "loss": 0.4095, + "step": 3222 + }, + { + "epoch": 0.22378836272739897, + "grad_norm": 4.851740634140416, + "learning_rate": 9.04759513121018e-06, + "loss": 0.6844, + "step": 3223 + }, + { + "epoch": 0.2238577975281211, + "grad_norm": 3.558937352019959, + "learning_rate": 9.046934849501087e-06, + "loss": 0.4246, + "step": 3224 + }, + { + "epoch": 0.2239272323288432, + "grad_norm": 3.666594750735752, + "learning_rate": 9.04627436310261e-06, + "loss": 0.6376, + "step": 3225 + }, + { + "epoch": 0.22399666712956534, + "grad_norm": 4.244613573097513, + "learning_rate": 9.045613672048152e-06, + "loss": 0.4875, + "step": 3226 + }, + { + "epoch": 0.22406610193028745, + "grad_norm": 4.0717711785397706, + "learning_rate": 9.044952776371134e-06, + "loss": 0.4137, + "step": 3227 + }, + { + "epoch": 0.22413553673100958, + "grad_norm": 4.080885430378061, + "learning_rate": 9.044291676104981e-06, + "loss": 0.4361, + "step": 3228 + }, + { + "epoch": 0.22420497153173172, + "grad_norm": 3.7138611550110108, + "learning_rate": 9.043630371283131e-06, + "loss": 0.4302, + "step": 3229 + }, + { + "epoch": 0.22427440633245382, + "grad_norm": 4.146470582755441, + "learning_rate": 9.042968861939034e-06, + "loss": 0.536, + "step": 3230 + }, + { + "epoch": 0.22434384113317596, + "grad_norm": 3.4519970328055987, + "learning_rate": 9.042307148106146e-06, + "loss": 0.3918, + "step": 3231 + }, + { + "epoch": 0.22441327593389807, + "grad_norm": 4.480878934569296, + "learning_rate": 9.041645229817936e-06, + "loss": 0.6384, + "step": 3232 + }, + { + "epoch": 0.2244827107346202, + "grad_norm": 3.7571363646842455, + "learning_rate": 9.040983107107884e-06, + "loss": 0.5155, + "step": 3233 + }, + { + "epoch": 0.2245521455353423, + "grad_norm": 4.437106872771973, + "learning_rate": 9.04032078000948e-06, + "loss": 0.5441, + "step": 3234 + }, + { + "epoch": 0.22462158033606444, + "grad_norm": 3.4468186919304937, + "learning_rate": 9.039658248556222e-06, + "loss": 0.2585, + "step": 3235 + }, + { + "epoch": 0.22469101513678655, + "grad_norm": 3.086416253982117, + "learning_rate": 9.03899551278162e-06, + "loss": 0.2822, + "step": 3236 + }, + { + "epoch": 0.22476044993750868, + "grad_norm": 5.550854900193479, + "learning_rate": 9.038332572719197e-06, + "loss": 0.8222, + "step": 3237 + }, + { + "epoch": 0.2248298847382308, + "grad_norm": 4.370674228309681, + "learning_rate": 9.037669428402482e-06, + "loss": 0.5625, + "step": 3238 + }, + { + "epoch": 0.22489931953895292, + "grad_norm": 4.248561478430586, + "learning_rate": 9.037006079865017e-06, + "loss": 0.487, + "step": 3239 + }, + { + "epoch": 0.22496875433967506, + "grad_norm": 4.1471973778933, + "learning_rate": 9.036342527140352e-06, + "loss": 0.556, + "step": 3240 + }, + { + "epoch": 0.22503818914039717, + "grad_norm": 5.027461615377546, + "learning_rate": 9.035678770262049e-06, + "loss": 0.6528, + "step": 3241 + }, + { + "epoch": 0.2251076239411193, + "grad_norm": 4.55207968453122, + "learning_rate": 9.035014809263681e-06, + "loss": 0.5913, + "step": 3242 + }, + { + "epoch": 0.2251770587418414, + "grad_norm": 4.170993253745282, + "learning_rate": 9.034350644178829e-06, + "loss": 0.4206, + "step": 3243 + }, + { + "epoch": 0.22524649354256354, + "grad_norm": 5.388173689516993, + "learning_rate": 9.033686275041088e-06, + "loss": 0.4943, + "step": 3244 + }, + { + "epoch": 0.22531592834328565, + "grad_norm": 3.5239911456343282, + "learning_rate": 9.033021701884057e-06, + "loss": 0.5444, + "step": 3245 + }, + { + "epoch": 0.22538536314400778, + "grad_norm": 4.871107000153552, + "learning_rate": 9.032356924741354e-06, + "loss": 0.6076, + "step": 3246 + }, + { + "epoch": 0.2254547979447299, + "grad_norm": 5.69230000963202, + "learning_rate": 9.031691943646602e-06, + "loss": 0.8392, + "step": 3247 + }, + { + "epoch": 0.22552423274545202, + "grad_norm": 5.381434365494375, + "learning_rate": 9.03102675863343e-06, + "loss": 0.642, + "step": 3248 + }, + { + "epoch": 0.22559366754617413, + "grad_norm": 4.120124775108753, + "learning_rate": 9.030361369735488e-06, + "loss": 0.5776, + "step": 3249 + }, + { + "epoch": 0.22566310234689627, + "grad_norm": 3.8530163948326495, + "learning_rate": 9.029695776986428e-06, + "loss": 0.4595, + "step": 3250 + }, + { + "epoch": 0.22573253714761837, + "grad_norm": 3.4757791229564754, + "learning_rate": 9.029029980419917e-06, + "loss": 0.4112, + "step": 3251 + }, + { + "epoch": 0.2258019719483405, + "grad_norm": 4.12374353009636, + "learning_rate": 9.028363980069628e-06, + "loss": 0.4492, + "step": 3252 + }, + { + "epoch": 0.22587140674906264, + "grad_norm": 3.721077081011987, + "learning_rate": 9.027697775969246e-06, + "loss": 0.5169, + "step": 3253 + }, + { + "epoch": 0.22594084154978475, + "grad_norm": 3.6983501167657207, + "learning_rate": 9.02703136815247e-06, + "loss": 0.5111, + "step": 3254 + }, + { + "epoch": 0.22601027635050688, + "grad_norm": 5.066074521070494, + "learning_rate": 9.026364756653004e-06, + "loss": 0.763, + "step": 3255 + }, + { + "epoch": 0.226079711151229, + "grad_norm": 3.1084805793498775, + "learning_rate": 9.025697941504564e-06, + "loss": 0.4527, + "step": 3256 + }, + { + "epoch": 0.22614914595195112, + "grad_norm": 4.020763489521019, + "learning_rate": 9.025030922740879e-06, + "loss": 0.4474, + "step": 3257 + }, + { + "epoch": 0.22621858075267323, + "grad_norm": 3.1682521011323046, + "learning_rate": 9.024363700395683e-06, + "loss": 0.3735, + "step": 3258 + }, + { + "epoch": 0.22628801555339537, + "grad_norm": 3.532028945786179, + "learning_rate": 9.023696274502725e-06, + "loss": 0.4093, + "step": 3259 + }, + { + "epoch": 0.22635745035411747, + "grad_norm": 3.6075348518352155, + "learning_rate": 9.02302864509576e-06, + "loss": 0.4937, + "step": 3260 + }, + { + "epoch": 0.2264268851548396, + "grad_norm": 2.9671341943831693, + "learning_rate": 9.022360812208564e-06, + "loss": 0.2126, + "step": 3261 + }, + { + "epoch": 0.22649631995556171, + "grad_norm": 4.166635242307017, + "learning_rate": 9.021692775874906e-06, + "loss": 0.6409, + "step": 3262 + }, + { + "epoch": 0.22656575475628385, + "grad_norm": 4.194861020198873, + "learning_rate": 9.02102453612858e-06, + "loss": 0.516, + "step": 3263 + }, + { + "epoch": 0.22663518955700598, + "grad_norm": 4.169700348540964, + "learning_rate": 9.020356093003381e-06, + "loss": 0.4806, + "step": 3264 + }, + { + "epoch": 0.2267046243577281, + "grad_norm": 3.5001225912535925, + "learning_rate": 9.019687446533122e-06, + "loss": 0.3067, + "step": 3265 + }, + { + "epoch": 0.22677405915845023, + "grad_norm": 4.0189903287019355, + "learning_rate": 9.01901859675162e-06, + "loss": 0.4979, + "step": 3266 + }, + { + "epoch": 0.22684349395917233, + "grad_norm": 3.4185651570264945, + "learning_rate": 9.018349543692706e-06, + "loss": 0.457, + "step": 3267 + }, + { + "epoch": 0.22691292875989447, + "grad_norm": 3.2986901878932637, + "learning_rate": 9.017680287390217e-06, + "loss": 0.4494, + "step": 3268 + }, + { + "epoch": 0.22698236356061657, + "grad_norm": 3.0910952370393887, + "learning_rate": 9.017010827878008e-06, + "loss": 0.3756, + "step": 3269 + }, + { + "epoch": 0.2270517983613387, + "grad_norm": 4.087155541552879, + "learning_rate": 9.016341165189935e-06, + "loss": 0.6461, + "step": 3270 + }, + { + "epoch": 0.22712123316206081, + "grad_norm": 3.473777532901928, + "learning_rate": 9.015671299359871e-06, + "loss": 0.3388, + "step": 3271 + }, + { + "epoch": 0.22719066796278295, + "grad_norm": 3.43156124132588, + "learning_rate": 9.015001230421698e-06, + "loss": 0.4307, + "step": 3272 + }, + { + "epoch": 0.22726010276350506, + "grad_norm": 3.845123770791104, + "learning_rate": 9.014330958409306e-06, + "loss": 0.5157, + "step": 3273 + }, + { + "epoch": 0.2273295375642272, + "grad_norm": 3.8474545722598683, + "learning_rate": 9.013660483356594e-06, + "loss": 0.4224, + "step": 3274 + }, + { + "epoch": 0.22739897236494933, + "grad_norm": 4.209494611295047, + "learning_rate": 9.012989805297479e-06, + "loss": 0.4056, + "step": 3275 + }, + { + "epoch": 0.22746840716567143, + "grad_norm": 4.41777787432151, + "learning_rate": 9.01231892426588e-06, + "loss": 0.3616, + "step": 3276 + }, + { + "epoch": 0.22753784196639357, + "grad_norm": 3.7129817690982936, + "learning_rate": 9.011647840295729e-06, + "loss": 0.5377, + "step": 3277 + }, + { + "epoch": 0.22760727676711567, + "grad_norm": 3.3732750482280642, + "learning_rate": 9.010976553420972e-06, + "loss": 0.3097, + "step": 3278 + }, + { + "epoch": 0.2276767115678378, + "grad_norm": 3.3035935992133534, + "learning_rate": 9.010305063675557e-06, + "loss": 0.3631, + "step": 3279 + }, + { + "epoch": 0.22774614636855992, + "grad_norm": 3.5745572365909277, + "learning_rate": 9.009633371093452e-06, + "loss": 0.328, + "step": 3280 + }, + { + "epoch": 0.22781558116928205, + "grad_norm": 2.959695718181537, + "learning_rate": 9.008961475708628e-06, + "loss": 0.4074, + "step": 3281 + }, + { + "epoch": 0.22788501597000416, + "grad_norm": 4.657067745681218, + "learning_rate": 9.008289377555068e-06, + "loss": 0.6203, + "step": 3282 + }, + { + "epoch": 0.2279544507707263, + "grad_norm": 4.206290552074754, + "learning_rate": 9.007617076666768e-06, + "loss": 0.6147, + "step": 3283 + }, + { + "epoch": 0.2280238855714484, + "grad_norm": 4.150764225909826, + "learning_rate": 9.006944573077729e-06, + "loss": 0.5385, + "step": 3284 + }, + { + "epoch": 0.22809332037217053, + "grad_norm": 3.5266526916199794, + "learning_rate": 9.006271866821969e-06, + "loss": 0.2318, + "step": 3285 + }, + { + "epoch": 0.22816275517289267, + "grad_norm": 4.338667428327251, + "learning_rate": 9.005598957933513e-06, + "loss": 0.5261, + "step": 3286 + }, + { + "epoch": 0.22823218997361477, + "grad_norm": 4.523497214010638, + "learning_rate": 9.004925846446393e-06, + "loss": 0.4, + "step": 3287 + }, + { + "epoch": 0.2283016247743369, + "grad_norm": 3.4181914217814127, + "learning_rate": 9.004252532394654e-06, + "loss": 0.3159, + "step": 3288 + }, + { + "epoch": 0.22837105957505902, + "grad_norm": 3.3918555790964926, + "learning_rate": 9.003579015812356e-06, + "loss": 0.338, + "step": 3289 + }, + { + "epoch": 0.22844049437578115, + "grad_norm": 4.093306528153707, + "learning_rate": 9.002905296733561e-06, + "loss": 0.3947, + "step": 3290 + }, + { + "epoch": 0.22850992917650326, + "grad_norm": 4.879188339408868, + "learning_rate": 9.002231375192346e-06, + "loss": 0.5572, + "step": 3291 + }, + { + "epoch": 0.2285793639772254, + "grad_norm": 4.647888178501052, + "learning_rate": 9.001557251222794e-06, + "loss": 0.6671, + "step": 3292 + }, + { + "epoch": 0.2286487987779475, + "grad_norm": 4.3152061567601105, + "learning_rate": 9.000882924859007e-06, + "loss": 0.3607, + "step": 3293 + }, + { + "epoch": 0.22871823357866963, + "grad_norm": 5.509443840881015, + "learning_rate": 9.000208396135088e-06, + "loss": 0.532, + "step": 3294 + }, + { + "epoch": 0.22878766837939174, + "grad_norm": 3.030300980135691, + "learning_rate": 8.999533665085155e-06, + "loss": 0.3756, + "step": 3295 + }, + { + "epoch": 0.22885710318011387, + "grad_norm": 3.8930106321551032, + "learning_rate": 8.998858731743335e-06, + "loss": 0.3733, + "step": 3296 + }, + { + "epoch": 0.22892653798083598, + "grad_norm": 4.533993772398375, + "learning_rate": 8.998183596143765e-06, + "loss": 0.7729, + "step": 3297 + }, + { + "epoch": 0.22899597278155812, + "grad_norm": 3.140762447363481, + "learning_rate": 8.997508258320593e-06, + "loss": 0.3236, + "step": 3298 + }, + { + "epoch": 0.22906540758228025, + "grad_norm": 5.233020907170975, + "learning_rate": 8.996832718307979e-06, + "loss": 0.3306, + "step": 3299 + }, + { + "epoch": 0.22913484238300236, + "grad_norm": 4.264765678673607, + "learning_rate": 8.996156976140088e-06, + "loss": 0.4723, + "step": 3300 + }, + { + "epoch": 0.2292042771837245, + "grad_norm": 3.2513966117048323, + "learning_rate": 8.995481031851097e-06, + "loss": 0.33, + "step": 3301 + }, + { + "epoch": 0.2292737119844466, + "grad_norm": 2.7407812579739916, + "learning_rate": 8.994804885475197e-06, + "loss": 0.2695, + "step": 3302 + }, + { + "epoch": 0.22934314678516873, + "grad_norm": 3.788670678873121, + "learning_rate": 8.994128537046587e-06, + "loss": 0.4258, + "step": 3303 + }, + { + "epoch": 0.22941258158589084, + "grad_norm": 4.835697299970811, + "learning_rate": 8.993451986599474e-06, + "loss": 0.5633, + "step": 3304 + }, + { + "epoch": 0.22948201638661297, + "grad_norm": 4.317329183551526, + "learning_rate": 8.99277523416808e-06, + "loss": 0.4643, + "step": 3305 + }, + { + "epoch": 0.22955145118733508, + "grad_norm": 3.8886125378452236, + "learning_rate": 8.992098279786634e-06, + "loss": 0.4422, + "step": 3306 + }, + { + "epoch": 0.22962088598805722, + "grad_norm": 4.398256003664236, + "learning_rate": 8.991421123489373e-06, + "loss": 0.5, + "step": 3307 + }, + { + "epoch": 0.22969032078877932, + "grad_norm": 4.9720455301084, + "learning_rate": 8.990743765310547e-06, + "loss": 0.5397, + "step": 3308 + }, + { + "epoch": 0.22975975558950146, + "grad_norm": 4.433124513989263, + "learning_rate": 8.990066205284417e-06, + "loss": 0.6294, + "step": 3309 + }, + { + "epoch": 0.2298291903902236, + "grad_norm": 4.664645622465553, + "learning_rate": 8.989388443445256e-06, + "loss": 0.6, + "step": 3310 + }, + { + "epoch": 0.2298986251909457, + "grad_norm": 3.8341873829765674, + "learning_rate": 8.98871047982734e-06, + "loss": 0.5526, + "step": 3311 + }, + { + "epoch": 0.22996805999166783, + "grad_norm": 4.255273053935012, + "learning_rate": 8.988032314464963e-06, + "loss": 0.6691, + "step": 3312 + }, + { + "epoch": 0.23003749479238994, + "grad_norm": 4.1662027455166255, + "learning_rate": 8.987353947392422e-06, + "loss": 0.5802, + "step": 3313 + }, + { + "epoch": 0.23010692959311208, + "grad_norm": 5.45198074243529, + "learning_rate": 8.986675378644032e-06, + "loss": 0.4117, + "step": 3314 + }, + { + "epoch": 0.23017636439383418, + "grad_norm": 2.2447038933412173, + "learning_rate": 8.985996608254113e-06, + "loss": 0.1665, + "step": 3315 + }, + { + "epoch": 0.23024579919455632, + "grad_norm": 4.192959016608387, + "learning_rate": 8.985317636256994e-06, + "loss": 0.4812, + "step": 3316 + }, + { + "epoch": 0.23031523399527842, + "grad_norm": 4.3753098058211775, + "learning_rate": 8.98463846268702e-06, + "loss": 0.6928, + "step": 3317 + }, + { + "epoch": 0.23038466879600056, + "grad_norm": 3.5556490756304346, + "learning_rate": 8.983959087578542e-06, + "loss": 0.4773, + "step": 3318 + }, + { + "epoch": 0.23045410359672266, + "grad_norm": 3.418455644042121, + "learning_rate": 8.98327951096592e-06, + "loss": 0.4675, + "step": 3319 + }, + { + "epoch": 0.2305235383974448, + "grad_norm": 5.472887332941176, + "learning_rate": 8.982599732883528e-06, + "loss": 0.8142, + "step": 3320 + }, + { + "epoch": 0.23059297319816693, + "grad_norm": 2.8833440781651283, + "learning_rate": 8.981919753365748e-06, + "loss": 0.2592, + "step": 3321 + }, + { + "epoch": 0.23066240799888904, + "grad_norm": 3.3418810146106117, + "learning_rate": 8.981239572446975e-06, + "loss": 0.308, + "step": 3322 + }, + { + "epoch": 0.23073184279961118, + "grad_norm": 3.0048785547972487, + "learning_rate": 8.980559190161607e-06, + "loss": 0.2899, + "step": 3323 + }, + { + "epoch": 0.23080127760033328, + "grad_norm": 5.295175041185349, + "learning_rate": 8.979878606544061e-06, + "loss": 0.5676, + "step": 3324 + }, + { + "epoch": 0.23087071240105542, + "grad_norm": 2.3756071796125333, + "learning_rate": 8.979197821628758e-06, + "loss": 0.1594, + "step": 3325 + }, + { + "epoch": 0.23094014720177752, + "grad_norm": 4.123512813892296, + "learning_rate": 8.97851683545013e-06, + "loss": 0.5433, + "step": 3326 + }, + { + "epoch": 0.23100958200249966, + "grad_norm": 2.315339577945573, + "learning_rate": 8.977835648042626e-06, + "loss": 0.2494, + "step": 3327 + }, + { + "epoch": 0.23107901680322177, + "grad_norm": 4.115183519525345, + "learning_rate": 8.977154259440695e-06, + "loss": 0.479, + "step": 3328 + }, + { + "epoch": 0.2311484516039439, + "grad_norm": 3.7364876316647617, + "learning_rate": 8.976472669678801e-06, + "loss": 0.2769, + "step": 3329 + }, + { + "epoch": 0.231217886404666, + "grad_norm": 3.650959086572034, + "learning_rate": 8.97579087879142e-06, + "loss": 0.5645, + "step": 3330 + }, + { + "epoch": 0.23128732120538814, + "grad_norm": 4.4933665091326915, + "learning_rate": 8.975108886813035e-06, + "loss": 0.9186, + "step": 3331 + }, + { + "epoch": 0.23135675600611028, + "grad_norm": 3.74900299916282, + "learning_rate": 8.97442669377814e-06, + "loss": 0.653, + "step": 3332 + }, + { + "epoch": 0.23142619080683238, + "grad_norm": 3.718195321957163, + "learning_rate": 8.97374429972124e-06, + "loss": 0.3182, + "step": 3333 + }, + { + "epoch": 0.23149562560755452, + "grad_norm": 3.798842222962264, + "learning_rate": 8.97306170467685e-06, + "loss": 0.4301, + "step": 3334 + }, + { + "epoch": 0.23156506040827662, + "grad_norm": 3.2507870132983534, + "learning_rate": 8.972378908679496e-06, + "loss": 0.268, + "step": 3335 + }, + { + "epoch": 0.23163449520899876, + "grad_norm": 3.5743744707341465, + "learning_rate": 8.971695911763712e-06, + "loss": 0.3383, + "step": 3336 + }, + { + "epoch": 0.23170393000972087, + "grad_norm": 3.98713149391989, + "learning_rate": 8.971012713964043e-06, + "loss": 0.4376, + "step": 3337 + }, + { + "epoch": 0.231773364810443, + "grad_norm": 4.803120078072777, + "learning_rate": 8.970329315315042e-06, + "loss": 0.6666, + "step": 3338 + }, + { + "epoch": 0.2318427996111651, + "grad_norm": 3.408056810018496, + "learning_rate": 8.969645715851279e-06, + "loss": 0.5395, + "step": 3339 + }, + { + "epoch": 0.23191223441188724, + "grad_norm": 4.152958202627536, + "learning_rate": 8.968961915607327e-06, + "loss": 0.3961, + "step": 3340 + }, + { + "epoch": 0.23198166921260935, + "grad_norm": 2.749034230274466, + "learning_rate": 8.968277914617773e-06, + "loss": 0.3492, + "step": 3341 + }, + { + "epoch": 0.23205110401333148, + "grad_norm": 4.200202739207985, + "learning_rate": 8.967593712917213e-06, + "loss": 0.4991, + "step": 3342 + }, + { + "epoch": 0.23212053881405362, + "grad_norm": 4.467870838964765, + "learning_rate": 8.966909310540251e-06, + "loss": 0.7526, + "step": 3343 + }, + { + "epoch": 0.23218997361477572, + "grad_norm": 2.8735861593028713, + "learning_rate": 8.966224707521508e-06, + "loss": 0.4459, + "step": 3344 + }, + { + "epoch": 0.23225940841549786, + "grad_norm": 8.423817411811424, + "learning_rate": 8.965539903895603e-06, + "loss": 0.5026, + "step": 3345 + }, + { + "epoch": 0.23232884321621997, + "grad_norm": 4.716740028027578, + "learning_rate": 8.96485489969718e-06, + "loss": 0.6625, + "step": 3346 + }, + { + "epoch": 0.2323982780169421, + "grad_norm": 4.9332017759714795, + "learning_rate": 8.964169694960882e-06, + "loss": 0.6211, + "step": 3347 + }, + { + "epoch": 0.2324677128176642, + "grad_norm": 2.6922439074272724, + "learning_rate": 8.963484289721366e-06, + "loss": 0.4367, + "step": 3348 + }, + { + "epoch": 0.23253714761838634, + "grad_norm": 3.0381392436537102, + "learning_rate": 8.9627986840133e-06, + "loss": 0.3502, + "step": 3349 + }, + { + "epoch": 0.23260658241910845, + "grad_norm": 5.328153369560276, + "learning_rate": 8.96211287787136e-06, + "loss": 0.736, + "step": 3350 + }, + { + "epoch": 0.23267601721983058, + "grad_norm": 4.235209959488185, + "learning_rate": 8.961426871330236e-06, + "loss": 0.5585, + "step": 3351 + }, + { + "epoch": 0.2327454520205527, + "grad_norm": 4.197855303305832, + "learning_rate": 8.960740664424622e-06, + "loss": 0.6997, + "step": 3352 + }, + { + "epoch": 0.23281488682127482, + "grad_norm": 4.291113952435272, + "learning_rate": 8.960054257189228e-06, + "loss": 0.7383, + "step": 3353 + }, + { + "epoch": 0.23288432162199693, + "grad_norm": 3.8358199007491205, + "learning_rate": 8.95936764965877e-06, + "loss": 0.2604, + "step": 3354 + }, + { + "epoch": 0.23295375642271907, + "grad_norm": 3.7635641552551484, + "learning_rate": 8.958680841867977e-06, + "loss": 0.304, + "step": 3355 + }, + { + "epoch": 0.2330231912234412, + "grad_norm": 4.27048773599421, + "learning_rate": 8.957993833851587e-06, + "loss": 0.542, + "step": 3356 + }, + { + "epoch": 0.2330926260241633, + "grad_norm": 3.9157452887181488, + "learning_rate": 8.957306625644347e-06, + "loss": 0.4503, + "step": 3357 + }, + { + "epoch": 0.23316206082488544, + "grad_norm": 3.4182495579614045, + "learning_rate": 8.956619217281016e-06, + "loss": 0.2536, + "step": 3358 + }, + { + "epoch": 0.23323149562560755, + "grad_norm": 4.185413196427375, + "learning_rate": 8.955931608796362e-06, + "loss": 0.6125, + "step": 3359 + }, + { + "epoch": 0.23330093042632968, + "grad_norm": 3.896797424380583, + "learning_rate": 8.955243800225164e-06, + "loss": 0.293, + "step": 3360 + }, + { + "epoch": 0.2333703652270518, + "grad_norm": 3.798300902174269, + "learning_rate": 8.95455579160221e-06, + "loss": 0.6208, + "step": 3361 + }, + { + "epoch": 0.23343980002777392, + "grad_norm": 3.4009955175680773, + "learning_rate": 8.9538675829623e-06, + "loss": 0.3601, + "step": 3362 + }, + { + "epoch": 0.23350923482849603, + "grad_norm": 4.543279021141069, + "learning_rate": 8.953179174340241e-06, + "loss": 0.7166, + "step": 3363 + }, + { + "epoch": 0.23357866962921817, + "grad_norm": 3.839788504616365, + "learning_rate": 8.952490565770853e-06, + "loss": 0.4993, + "step": 3364 + }, + { + "epoch": 0.23364810442994027, + "grad_norm": 5.475177320637296, + "learning_rate": 8.951801757288962e-06, + "loss": 0.8026, + "step": 3365 + }, + { + "epoch": 0.2337175392306624, + "grad_norm": 4.835838292295004, + "learning_rate": 8.951112748929414e-06, + "loss": 0.4774, + "step": 3366 + }, + { + "epoch": 0.23378697403138454, + "grad_norm": 4.378367624530631, + "learning_rate": 8.950423540727051e-06, + "loss": 0.5953, + "step": 3367 + }, + { + "epoch": 0.23385640883210665, + "grad_norm": 4.858037252958956, + "learning_rate": 8.949734132716737e-06, + "loss": 0.4573, + "step": 3368 + }, + { + "epoch": 0.23392584363282878, + "grad_norm": 4.137376308407242, + "learning_rate": 8.949044524933342e-06, + "loss": 0.6468, + "step": 3369 + }, + { + "epoch": 0.2339952784335509, + "grad_norm": 3.936538430847476, + "learning_rate": 8.94835471741174e-06, + "loss": 0.5321, + "step": 3370 + }, + { + "epoch": 0.23406471323427303, + "grad_norm": 4.094439928988578, + "learning_rate": 8.947664710186825e-06, + "loss": 0.609, + "step": 3371 + }, + { + "epoch": 0.23413414803499513, + "grad_norm": 3.310639094320513, + "learning_rate": 8.946974503293497e-06, + "loss": 0.37, + "step": 3372 + }, + { + "epoch": 0.23420358283571727, + "grad_norm": 4.932059504385731, + "learning_rate": 8.946284096766664e-06, + "loss": 0.9065, + "step": 3373 + }, + { + "epoch": 0.23427301763643937, + "grad_norm": 4.670274968641153, + "learning_rate": 8.945593490641248e-06, + "loss": 0.4096, + "step": 3374 + }, + { + "epoch": 0.2343424524371615, + "grad_norm": 4.3246680480367, + "learning_rate": 8.944902684952178e-06, + "loss": 0.6245, + "step": 3375 + }, + { + "epoch": 0.23441188723788361, + "grad_norm": 4.647584938884255, + "learning_rate": 8.944211679734394e-06, + "loss": 0.5449, + "step": 3376 + }, + { + "epoch": 0.23448132203860575, + "grad_norm": 3.6210826047017073, + "learning_rate": 8.943520475022846e-06, + "loss": 0.3787, + "step": 3377 + }, + { + "epoch": 0.23455075683932788, + "grad_norm": 3.8531015668948907, + "learning_rate": 8.942829070852495e-06, + "loss": 0.4568, + "step": 3378 + }, + { + "epoch": 0.23462019164005, + "grad_norm": 5.032536681824429, + "learning_rate": 8.942137467258312e-06, + "loss": 0.8581, + "step": 3379 + }, + { + "epoch": 0.23468962644077213, + "grad_norm": 4.877122835383702, + "learning_rate": 8.941445664275274e-06, + "loss": 0.8188, + "step": 3380 + }, + { + "epoch": 0.23475906124149423, + "grad_norm": 4.355878772229833, + "learning_rate": 8.940753661938377e-06, + "loss": 0.4724, + "step": 3381 + }, + { + "epoch": 0.23482849604221637, + "grad_norm": 3.476655711401579, + "learning_rate": 8.940061460282619e-06, + "loss": 0.4733, + "step": 3382 + }, + { + "epoch": 0.23489793084293847, + "grad_norm": 4.220378142969899, + "learning_rate": 8.939369059343008e-06, + "loss": 0.4638, + "step": 3383 + }, + { + "epoch": 0.2349673656436606, + "grad_norm": 4.086210202368822, + "learning_rate": 8.93867645915457e-06, + "loss": 0.5657, + "step": 3384 + }, + { + "epoch": 0.23503680044438272, + "grad_norm": 4.625643542404187, + "learning_rate": 8.937983659752335e-06, + "loss": 0.7502, + "step": 3385 + }, + { + "epoch": 0.23510623524510485, + "grad_norm": 3.6129740585457695, + "learning_rate": 8.937290661171341e-06, + "loss": 0.4391, + "step": 3386 + }, + { + "epoch": 0.23517567004582696, + "grad_norm": 4.954692914858558, + "learning_rate": 8.936597463446639e-06, + "loss": 0.842, + "step": 3387 + }, + { + "epoch": 0.2352451048465491, + "grad_norm": 3.231737859588972, + "learning_rate": 8.935904066613295e-06, + "loss": 0.3204, + "step": 3388 + }, + { + "epoch": 0.23531453964727123, + "grad_norm": 3.229392515818551, + "learning_rate": 8.935210470706374e-06, + "loss": 0.2561, + "step": 3389 + }, + { + "epoch": 0.23538397444799333, + "grad_norm": 3.756300216832138, + "learning_rate": 8.934516675760962e-06, + "loss": 0.6203, + "step": 3390 + }, + { + "epoch": 0.23545340924871547, + "grad_norm": 3.953787631441059, + "learning_rate": 8.93382268181215e-06, + "loss": 0.6087, + "step": 3391 + }, + { + "epoch": 0.23552284404943757, + "grad_norm": 3.448353761025936, + "learning_rate": 8.933128488895036e-06, + "loss": 0.3747, + "step": 3392 + }, + { + "epoch": 0.2355922788501597, + "grad_norm": 3.2720514376138494, + "learning_rate": 8.932434097044735e-06, + "loss": 0.369, + "step": 3393 + }, + { + "epoch": 0.23566171365088182, + "grad_norm": 3.649427377454302, + "learning_rate": 8.931739506296366e-06, + "loss": 0.4324, + "step": 3394 + }, + { + "epoch": 0.23573114845160395, + "grad_norm": 3.9630549818490164, + "learning_rate": 8.931044716685063e-06, + "loss": 0.4788, + "step": 3395 + }, + { + "epoch": 0.23580058325232606, + "grad_norm": 3.937715497607778, + "learning_rate": 8.930349728245965e-06, + "loss": 0.429, + "step": 3396 + }, + { + "epoch": 0.2358700180530482, + "grad_norm": 4.31892630707009, + "learning_rate": 8.929654541014227e-06, + "loss": 0.4829, + "step": 3397 + }, + { + "epoch": 0.2359394528537703, + "grad_norm": 3.708245271854308, + "learning_rate": 8.928959155025008e-06, + "loss": 0.1864, + "step": 3398 + }, + { + "epoch": 0.23600888765449243, + "grad_norm": 4.174177054413822, + "learning_rate": 8.928263570313483e-06, + "loss": 0.6108, + "step": 3399 + }, + { + "epoch": 0.23607832245521454, + "grad_norm": 4.85156222682328, + "learning_rate": 8.92756778691483e-06, + "loss": 0.6724, + "step": 3400 + }, + { + "epoch": 0.23614775725593667, + "grad_norm": 2.9856238664889143, + "learning_rate": 8.926871804864242e-06, + "loss": 0.3353, + "step": 3401 + }, + { + "epoch": 0.2362171920566588, + "grad_norm": 3.7501304671640083, + "learning_rate": 8.926175624196921e-06, + "loss": 0.3548, + "step": 3402 + }, + { + "epoch": 0.23628662685738092, + "grad_norm": 3.1277728620823306, + "learning_rate": 8.925479244948081e-06, + "loss": 0.3819, + "step": 3403 + }, + { + "epoch": 0.23635606165810305, + "grad_norm": 3.738017512608187, + "learning_rate": 8.924782667152943e-06, + "loss": 0.4004, + "step": 3404 + }, + { + "epoch": 0.23642549645882516, + "grad_norm": 3.770749830226631, + "learning_rate": 8.92408589084674e-06, + "loss": 0.4789, + "step": 3405 + }, + { + "epoch": 0.2364949312595473, + "grad_norm": 3.9300022084069752, + "learning_rate": 8.92338891606471e-06, + "loss": 0.4008, + "step": 3406 + }, + { + "epoch": 0.2365643660602694, + "grad_norm": 3.1195924790060263, + "learning_rate": 8.922691742842112e-06, + "loss": 0.2789, + "step": 3407 + }, + { + "epoch": 0.23663380086099153, + "grad_norm": 4.93983735552413, + "learning_rate": 8.921994371214202e-06, + "loss": 0.6781, + "step": 3408 + }, + { + "epoch": 0.23670323566171364, + "grad_norm": 3.621970808135141, + "learning_rate": 8.921296801216255e-06, + "loss": 0.4139, + "step": 3409 + }, + { + "epoch": 0.23677267046243577, + "grad_norm": 3.8903205996193853, + "learning_rate": 8.920599032883553e-06, + "loss": 0.5832, + "step": 3410 + }, + { + "epoch": 0.23684210526315788, + "grad_norm": 3.252887920455228, + "learning_rate": 8.919901066251389e-06, + "loss": 0.4231, + "step": 3411 + }, + { + "epoch": 0.23691154006388002, + "grad_norm": 2.96533854518334, + "learning_rate": 8.919202901355064e-06, + "loss": 0.3032, + "step": 3412 + }, + { + "epoch": 0.23698097486460215, + "grad_norm": 4.348096951179054, + "learning_rate": 8.91850453822989e-06, + "loss": 0.5009, + "step": 3413 + }, + { + "epoch": 0.23705040966532426, + "grad_norm": 3.0525439725841887, + "learning_rate": 8.917805976911194e-06, + "loss": 0.2848, + "step": 3414 + }, + { + "epoch": 0.2371198444660464, + "grad_norm": 4.687705473686893, + "learning_rate": 8.917107217434302e-06, + "loss": 0.7724, + "step": 3415 + }, + { + "epoch": 0.2371892792667685, + "grad_norm": 3.871926767307289, + "learning_rate": 8.916408259834562e-06, + "loss": 0.5542, + "step": 3416 + }, + { + "epoch": 0.23725871406749063, + "grad_norm": 3.7909469279179953, + "learning_rate": 8.91570910414732e-06, + "loss": 0.5268, + "step": 3417 + }, + { + "epoch": 0.23732814886821274, + "grad_norm": 3.8419992792217426, + "learning_rate": 8.915009750407947e-06, + "loss": 0.4926, + "step": 3418 + }, + { + "epoch": 0.23739758366893488, + "grad_norm": 4.135368143245829, + "learning_rate": 8.914310198651807e-06, + "loss": 0.477, + "step": 3419 + }, + { + "epoch": 0.23746701846965698, + "grad_norm": 4.452521067169484, + "learning_rate": 8.91361044891429e-06, + "loss": 0.5635, + "step": 3420 + }, + { + "epoch": 0.23753645327037912, + "grad_norm": 3.7541742409623815, + "learning_rate": 8.912910501230782e-06, + "loss": 0.4751, + "step": 3421 + }, + { + "epoch": 0.23760588807110122, + "grad_norm": 3.8371193885839014, + "learning_rate": 8.91221035563669e-06, + "loss": 0.5039, + "step": 3422 + }, + { + "epoch": 0.23767532287182336, + "grad_norm": 5.077340597136206, + "learning_rate": 8.911510012167425e-06, + "loss": 0.6132, + "step": 3423 + }, + { + "epoch": 0.2377447576725455, + "grad_norm": 3.141818035172513, + "learning_rate": 8.91080947085841e-06, + "loss": 0.3086, + "step": 3424 + }, + { + "epoch": 0.2378141924732676, + "grad_norm": 5.615593639178157, + "learning_rate": 8.910108731745078e-06, + "loss": 0.7889, + "step": 3425 + }, + { + "epoch": 0.23788362727398973, + "grad_norm": 4.70487180473826, + "learning_rate": 8.90940779486287e-06, + "loss": 0.6064, + "step": 3426 + }, + { + "epoch": 0.23795306207471184, + "grad_norm": 3.602691751691279, + "learning_rate": 8.908706660247239e-06, + "loss": 0.4251, + "step": 3427 + }, + { + "epoch": 0.23802249687543398, + "grad_norm": 4.228402901905052, + "learning_rate": 8.90800532793365e-06, + "loss": 0.5611, + "step": 3428 + }, + { + "epoch": 0.23809193167615608, + "grad_norm": 4.405210442623311, + "learning_rate": 8.907303797957571e-06, + "loss": 0.5285, + "step": 3429 + }, + { + "epoch": 0.23816136647687822, + "grad_norm": 3.505146206411121, + "learning_rate": 8.906602070354488e-06, + "loss": 0.3053, + "step": 3430 + }, + { + "epoch": 0.23823080127760032, + "grad_norm": 4.601475378960544, + "learning_rate": 8.905900145159894e-06, + "loss": 0.6146, + "step": 3431 + }, + { + "epoch": 0.23830023607832246, + "grad_norm": 4.418610404683968, + "learning_rate": 8.905198022409292e-06, + "loss": 0.5927, + "step": 3432 + }, + { + "epoch": 0.23836967087904457, + "grad_norm": 3.9570299858963445, + "learning_rate": 8.904495702138189e-06, + "loss": 0.4316, + "step": 3433 + }, + { + "epoch": 0.2384391056797667, + "grad_norm": 4.3308963416843245, + "learning_rate": 8.903793184382115e-06, + "loss": 0.623, + "step": 3434 + }, + { + "epoch": 0.23850854048048883, + "grad_norm": 4.8135802585804095, + "learning_rate": 8.903090469176598e-06, + "loss": 0.6551, + "step": 3435 + }, + { + "epoch": 0.23857797528121094, + "grad_norm": 4.253986366506154, + "learning_rate": 8.902387556557182e-06, + "loss": 0.535, + "step": 3436 + }, + { + "epoch": 0.23864741008193308, + "grad_norm": 4.492071024323041, + "learning_rate": 8.90168444655942e-06, + "loss": 0.4903, + "step": 3437 + }, + { + "epoch": 0.23871684488265518, + "grad_norm": 4.059494433365717, + "learning_rate": 8.900981139218872e-06, + "loss": 0.689, + "step": 3438 + }, + { + "epoch": 0.23878627968337732, + "grad_norm": 3.417187640754658, + "learning_rate": 8.900277634571113e-06, + "loss": 0.4187, + "step": 3439 + }, + { + "epoch": 0.23885571448409942, + "grad_norm": 4.655441852707797, + "learning_rate": 8.899573932651723e-06, + "loss": 0.4867, + "step": 3440 + }, + { + "epoch": 0.23892514928482156, + "grad_norm": 4.566931576308445, + "learning_rate": 8.898870033496299e-06, + "loss": 0.7239, + "step": 3441 + }, + { + "epoch": 0.23899458408554367, + "grad_norm": 4.561754223053981, + "learning_rate": 8.898165937140439e-06, + "loss": 0.7939, + "step": 3442 + }, + { + "epoch": 0.2390640188862658, + "grad_norm": 3.7492515932581174, + "learning_rate": 8.897461643619758e-06, + "loss": 0.3455, + "step": 3443 + }, + { + "epoch": 0.2391334536869879, + "grad_norm": 3.9796283953703826, + "learning_rate": 8.896757152969878e-06, + "loss": 0.5, + "step": 3444 + }, + { + "epoch": 0.23920288848771004, + "grad_norm": 4.256454843044218, + "learning_rate": 8.896052465226427e-06, + "loss": 0.5779, + "step": 3445 + }, + { + "epoch": 0.23927232328843218, + "grad_norm": 3.5169316922912373, + "learning_rate": 8.895347580425056e-06, + "loss": 0.5087, + "step": 3446 + }, + { + "epoch": 0.23934175808915428, + "grad_norm": 4.651881746849845, + "learning_rate": 8.894642498601409e-06, + "loss": 0.5431, + "step": 3447 + }, + { + "epoch": 0.23941119288987642, + "grad_norm": 4.529313098759388, + "learning_rate": 8.893937219791151e-06, + "loss": 0.482, + "step": 3448 + }, + { + "epoch": 0.23948062769059852, + "grad_norm": 5.094807145100306, + "learning_rate": 8.893231744029956e-06, + "loss": 0.5853, + "step": 3449 + }, + { + "epoch": 0.23955006249132066, + "grad_norm": 3.8647826896018502, + "learning_rate": 8.892526071353504e-06, + "loss": 0.6818, + "step": 3450 + }, + { + "epoch": 0.23961949729204277, + "grad_norm": 4.028957408905017, + "learning_rate": 8.89182020179749e-06, + "loss": 0.499, + "step": 3451 + }, + { + "epoch": 0.2396889320927649, + "grad_norm": 4.135513025509712, + "learning_rate": 8.891114135397614e-06, + "loss": 0.4164, + "step": 3452 + }, + { + "epoch": 0.239758366893487, + "grad_norm": 3.831839688098623, + "learning_rate": 8.890407872189587e-06, + "loss": 0.3281, + "step": 3453 + }, + { + "epoch": 0.23982780169420914, + "grad_norm": 5.540201262194585, + "learning_rate": 8.889701412209131e-06, + "loss": 0.7008, + "step": 3454 + }, + { + "epoch": 0.23989723649493125, + "grad_norm": 4.466899840394317, + "learning_rate": 8.88899475549198e-06, + "loss": 0.5492, + "step": 3455 + }, + { + "epoch": 0.23996667129565338, + "grad_norm": 3.8026019918869927, + "learning_rate": 8.888287902073875e-06, + "loss": 0.5855, + "step": 3456 + }, + { + "epoch": 0.2400361060963755, + "grad_norm": 4.562941758582137, + "learning_rate": 8.887580851990568e-06, + "loss": 0.7271, + "step": 3457 + }, + { + "epoch": 0.24010554089709762, + "grad_norm": 3.74035408592314, + "learning_rate": 8.886873605277822e-06, + "loss": 0.4254, + "step": 3458 + }, + { + "epoch": 0.24017497569781976, + "grad_norm": 4.428535303961577, + "learning_rate": 8.886166161971404e-06, + "loss": 0.6783, + "step": 3459 + }, + { + "epoch": 0.24024441049854187, + "grad_norm": 4.413689275496993, + "learning_rate": 8.885458522107102e-06, + "loss": 0.6705, + "step": 3460 + }, + { + "epoch": 0.240313845299264, + "grad_norm": 3.0964466053211765, + "learning_rate": 8.884750685720703e-06, + "loss": 0.3354, + "step": 3461 + }, + { + "epoch": 0.2403832800999861, + "grad_norm": 4.743670843049077, + "learning_rate": 8.884042652848011e-06, + "loss": 0.6877, + "step": 3462 + }, + { + "epoch": 0.24045271490070824, + "grad_norm": 3.5843044436040934, + "learning_rate": 8.883334423524837e-06, + "loss": 0.3225, + "step": 3463 + }, + { + "epoch": 0.24052214970143035, + "grad_norm": 3.992543955167913, + "learning_rate": 8.882625997787e-06, + "loss": 0.4635, + "step": 3464 + }, + { + "epoch": 0.24059158450215248, + "grad_norm": 4.101986971823068, + "learning_rate": 8.881917375670336e-06, + "loss": 0.4948, + "step": 3465 + }, + { + "epoch": 0.2406610193028746, + "grad_norm": 4.0560980395492425, + "learning_rate": 8.881208557210683e-06, + "loss": 0.339, + "step": 3466 + }, + { + "epoch": 0.24073045410359672, + "grad_norm": 4.285602605052266, + "learning_rate": 8.880499542443892e-06, + "loss": 0.3424, + "step": 3467 + }, + { + "epoch": 0.24079988890431883, + "grad_norm": 4.03833138974363, + "learning_rate": 8.879790331405827e-06, + "loss": 0.6373, + "step": 3468 + }, + { + "epoch": 0.24086932370504097, + "grad_norm": 4.343961404758944, + "learning_rate": 8.879080924132357e-06, + "loss": 0.6724, + "step": 3469 + }, + { + "epoch": 0.2409387585057631, + "grad_norm": 2.76921893582247, + "learning_rate": 8.878371320659362e-06, + "loss": 0.2177, + "step": 3470 + }, + { + "epoch": 0.2410081933064852, + "grad_norm": 3.5264093015772207, + "learning_rate": 8.877661521022735e-06, + "loss": 0.3222, + "step": 3471 + }, + { + "epoch": 0.24107762810720734, + "grad_norm": 3.231164985060828, + "learning_rate": 8.876951525258377e-06, + "loss": 0.4275, + "step": 3472 + }, + { + "epoch": 0.24114706290792945, + "grad_norm": 3.629759068444725, + "learning_rate": 8.876241333402197e-06, + "loss": 0.3833, + "step": 3473 + }, + { + "epoch": 0.24121649770865158, + "grad_norm": 3.5329451781239816, + "learning_rate": 8.875530945490118e-06, + "loss": 0.4025, + "step": 3474 + }, + { + "epoch": 0.2412859325093737, + "grad_norm": 3.8651259222070564, + "learning_rate": 8.874820361558068e-06, + "loss": 0.4571, + "step": 3475 + }, + { + "epoch": 0.24135536731009583, + "grad_norm": 4.584271283635883, + "learning_rate": 8.874109581641987e-06, + "loss": 0.6936, + "step": 3476 + }, + { + "epoch": 0.24142480211081793, + "grad_norm": 4.75277884644892, + "learning_rate": 8.87339860577783e-06, + "loss": 0.6095, + "step": 3477 + }, + { + "epoch": 0.24149423691154007, + "grad_norm": 3.2282265627761513, + "learning_rate": 8.872687434001554e-06, + "loss": 0.3934, + "step": 3478 + }, + { + "epoch": 0.24156367171226217, + "grad_norm": 3.9686150297828546, + "learning_rate": 8.871976066349131e-06, + "loss": 0.6361, + "step": 3479 + }, + { + "epoch": 0.2416331065129843, + "grad_norm": 3.6858671517398953, + "learning_rate": 8.871264502856538e-06, + "loss": 0.4687, + "step": 3480 + }, + { + "epoch": 0.24170254131370644, + "grad_norm": 3.1962732865078567, + "learning_rate": 8.870552743559766e-06, + "loss": 0.3877, + "step": 3481 + }, + { + "epoch": 0.24177197611442855, + "grad_norm": 5.115025850842917, + "learning_rate": 8.86984078849482e-06, + "loss": 0.5889, + "step": 3482 + }, + { + "epoch": 0.24184141091515068, + "grad_norm": 2.7936324764430127, + "learning_rate": 8.869128637697702e-06, + "loss": 0.3243, + "step": 3483 + }, + { + "epoch": 0.2419108457158728, + "grad_norm": 4.525926732330099, + "learning_rate": 8.868416291204437e-06, + "loss": 0.5303, + "step": 3484 + }, + { + "epoch": 0.24198028051659493, + "grad_norm": 5.115483855169774, + "learning_rate": 8.867703749051054e-06, + "loss": 0.3235, + "step": 3485 + }, + { + "epoch": 0.24204971531731703, + "grad_norm": 3.749797035021384, + "learning_rate": 8.86699101127359e-06, + "loss": 0.6413, + "step": 3486 + }, + { + "epoch": 0.24211915011803917, + "grad_norm": 2.3784005825318384, + "learning_rate": 8.8662780779081e-06, + "loss": 0.1788, + "step": 3487 + }, + { + "epoch": 0.24218858491876127, + "grad_norm": 4.448423098383032, + "learning_rate": 8.865564948990637e-06, + "loss": 0.4768, + "step": 3488 + }, + { + "epoch": 0.2422580197194834, + "grad_norm": 4.008890554114843, + "learning_rate": 8.864851624557275e-06, + "loss": 0.6424, + "step": 3489 + }, + { + "epoch": 0.24232745452020552, + "grad_norm": 3.717288120750269, + "learning_rate": 8.86413810464409e-06, + "loss": 0.5609, + "step": 3490 + }, + { + "epoch": 0.24239688932092765, + "grad_norm": 5.368235057613119, + "learning_rate": 8.863424389287172e-06, + "loss": 0.6841, + "step": 3491 + }, + { + "epoch": 0.24246632412164978, + "grad_norm": 4.556455129869119, + "learning_rate": 8.86271047852262e-06, + "loss": 0.828, + "step": 3492 + }, + { + "epoch": 0.2425357589223719, + "grad_norm": 4.923971417499381, + "learning_rate": 8.861996372386544e-06, + "loss": 0.507, + "step": 3493 + }, + { + "epoch": 0.24260519372309403, + "grad_norm": 5.507383727474723, + "learning_rate": 8.861282070915062e-06, + "loss": 0.9531, + "step": 3494 + }, + { + "epoch": 0.24267462852381613, + "grad_norm": 4.001556448279561, + "learning_rate": 8.860567574144301e-06, + "loss": 0.62, + "step": 3495 + }, + { + "epoch": 0.24274406332453827, + "grad_norm": 5.171409257461122, + "learning_rate": 8.8598528821104e-06, + "loss": 0.6338, + "step": 3496 + }, + { + "epoch": 0.24281349812526037, + "grad_norm": 3.7008047484375424, + "learning_rate": 8.85913799484951e-06, + "loss": 0.5443, + "step": 3497 + }, + { + "epoch": 0.2428829329259825, + "grad_norm": 3.561741785124683, + "learning_rate": 8.858422912397785e-06, + "loss": 0.4276, + "step": 3498 + }, + { + "epoch": 0.24295236772670462, + "grad_norm": 4.36708606561379, + "learning_rate": 8.857707634791398e-06, + "loss": 0.6536, + "step": 3499 + }, + { + "epoch": 0.24302180252742675, + "grad_norm": 3.4563224610533343, + "learning_rate": 8.856992162066522e-06, + "loss": 0.4809, + "step": 3500 + }, + { + "epoch": 0.24309123732814886, + "grad_norm": 4.422443847250424, + "learning_rate": 8.856276494259345e-06, + "loss": 0.6138, + "step": 3501 + }, + { + "epoch": 0.243160672128871, + "grad_norm": 5.3523206109867765, + "learning_rate": 8.85556063140607e-06, + "loss": 0.6802, + "step": 3502 + }, + { + "epoch": 0.2432301069295931, + "grad_norm": 4.042651987928296, + "learning_rate": 8.8548445735429e-06, + "loss": 0.6427, + "step": 3503 + }, + { + "epoch": 0.24329954173031523, + "grad_norm": 3.472452935412435, + "learning_rate": 8.854128320706054e-06, + "loss": 0.3488, + "step": 3504 + }, + { + "epoch": 0.24336897653103737, + "grad_norm": 4.257518770969773, + "learning_rate": 8.853411872931758e-06, + "loss": 0.4204, + "step": 3505 + }, + { + "epoch": 0.24343841133175947, + "grad_norm": 4.236966141025333, + "learning_rate": 8.85269523025625e-06, + "loss": 0.6052, + "step": 3506 + }, + { + "epoch": 0.2435078461324816, + "grad_norm": 3.6469696271218295, + "learning_rate": 8.851978392715776e-06, + "loss": 0.4649, + "step": 3507 + }, + { + "epoch": 0.24357728093320372, + "grad_norm": 4.946733980228326, + "learning_rate": 8.851261360346596e-06, + "loss": 0.4172, + "step": 3508 + }, + { + "epoch": 0.24364671573392585, + "grad_norm": 11.474036104532104, + "learning_rate": 8.850544133184973e-06, + "loss": 0.5279, + "step": 3509 + }, + { + "epoch": 0.24371615053464796, + "grad_norm": 4.828537435557371, + "learning_rate": 8.849826711267185e-06, + "loss": 0.4246, + "step": 3510 + }, + { + "epoch": 0.2437855853353701, + "grad_norm": 4.196021493476455, + "learning_rate": 8.849109094629517e-06, + "loss": 0.4524, + "step": 3511 + }, + { + "epoch": 0.2438550201360922, + "grad_norm": 4.702989804911505, + "learning_rate": 8.848391283308269e-06, + "loss": 0.4107, + "step": 3512 + }, + { + "epoch": 0.24392445493681433, + "grad_norm": 5.169319101376446, + "learning_rate": 8.847673277339744e-06, + "loss": 0.3251, + "step": 3513 + }, + { + "epoch": 0.24399388973753644, + "grad_norm": 4.5793182066594005, + "learning_rate": 8.846955076760257e-06, + "loss": 0.5392, + "step": 3514 + }, + { + "epoch": 0.24406332453825857, + "grad_norm": 4.57674884002615, + "learning_rate": 8.846236681606137e-06, + "loss": 0.5417, + "step": 3515 + }, + { + "epoch": 0.2441327593389807, + "grad_norm": 4.017780847209307, + "learning_rate": 8.845518091913718e-06, + "loss": 0.5056, + "step": 3516 + }, + { + "epoch": 0.24420219413970282, + "grad_norm": 3.100508784587696, + "learning_rate": 8.844799307719346e-06, + "loss": 0.2766, + "step": 3517 + }, + { + "epoch": 0.24427162894042495, + "grad_norm": 3.100303861920758, + "learning_rate": 8.844080329059373e-06, + "loss": 0.3772, + "step": 3518 + }, + { + "epoch": 0.24434106374114706, + "grad_norm": 4.14780103820431, + "learning_rate": 8.84336115597017e-06, + "loss": 0.5875, + "step": 3519 + }, + { + "epoch": 0.2444104985418692, + "grad_norm": 5.198249679127238, + "learning_rate": 8.842641788488106e-06, + "loss": 0.544, + "step": 3520 + }, + { + "epoch": 0.2444799333425913, + "grad_norm": 4.1218422602524765, + "learning_rate": 8.84192222664957e-06, + "loss": 0.4322, + "step": 3521 + }, + { + "epoch": 0.24454936814331343, + "grad_norm": 4.611596621501159, + "learning_rate": 8.841202470490956e-06, + "loss": 0.3419, + "step": 3522 + }, + { + "epoch": 0.24461880294403554, + "grad_norm": 4.294724805593306, + "learning_rate": 8.840482520048665e-06, + "loss": 0.6214, + "step": 3523 + }, + { + "epoch": 0.24468823774475768, + "grad_norm": 4.397296886629712, + "learning_rate": 8.839762375359116e-06, + "loss": 0.5415, + "step": 3524 + }, + { + "epoch": 0.24475767254547978, + "grad_norm": 4.119329509408553, + "learning_rate": 8.83904203645873e-06, + "loss": 0.6755, + "step": 3525 + }, + { + "epoch": 0.24482710734620192, + "grad_norm": 2.690446440938124, + "learning_rate": 8.838321503383944e-06, + "loss": 0.1978, + "step": 3526 + }, + { + "epoch": 0.24489654214692405, + "grad_norm": 4.663776912336376, + "learning_rate": 8.837600776171197e-06, + "loss": 0.6693, + "step": 3527 + }, + { + "epoch": 0.24496597694764616, + "grad_norm": 3.603460056303718, + "learning_rate": 8.836879854856947e-06, + "loss": 0.4874, + "step": 3528 + }, + { + "epoch": 0.2450354117483683, + "grad_norm": 3.9241289718369714, + "learning_rate": 8.836158739477656e-06, + "loss": 0.3979, + "step": 3529 + }, + { + "epoch": 0.2451048465490904, + "grad_norm": 3.86377067525096, + "learning_rate": 8.835437430069796e-06, + "loss": 0.4273, + "step": 3530 + }, + { + "epoch": 0.24517428134981253, + "grad_norm": 3.69691209136962, + "learning_rate": 8.83471592666985e-06, + "loss": 0.4975, + "step": 3531 + }, + { + "epoch": 0.24524371615053464, + "grad_norm": 4.470364631154342, + "learning_rate": 8.833994229314313e-06, + "loss": 0.5501, + "step": 3532 + }, + { + "epoch": 0.24531315095125678, + "grad_norm": 4.146740815152786, + "learning_rate": 8.833272338039686e-06, + "loss": 0.4969, + "step": 3533 + }, + { + "epoch": 0.24538258575197888, + "grad_norm": 3.5208109814199, + "learning_rate": 8.832550252882482e-06, + "loss": 0.3429, + "step": 3534 + }, + { + "epoch": 0.24545202055270102, + "grad_norm": 3.7044745887001223, + "learning_rate": 8.831827973879224e-06, + "loss": 0.4798, + "step": 3535 + }, + { + "epoch": 0.24552145535342312, + "grad_norm": 4.324288618207141, + "learning_rate": 8.83110550106644e-06, + "loss": 0.3887, + "step": 3536 + }, + { + "epoch": 0.24559089015414526, + "grad_norm": 5.115329305867889, + "learning_rate": 8.830382834480678e-06, + "loss": 0.7651, + "step": 3537 + }, + { + "epoch": 0.2456603249548674, + "grad_norm": 4.4921222253130475, + "learning_rate": 8.829659974158483e-06, + "loss": 0.4086, + "step": 3538 + }, + { + "epoch": 0.2457297597555895, + "grad_norm": 3.568785533279242, + "learning_rate": 8.828936920136424e-06, + "loss": 0.5179, + "step": 3539 + }, + { + "epoch": 0.24579919455631163, + "grad_norm": 4.524499382115759, + "learning_rate": 8.828213672451066e-06, + "loss": 0.5334, + "step": 3540 + }, + { + "epoch": 0.24586862935703374, + "grad_norm": 3.5895766528792445, + "learning_rate": 8.827490231138993e-06, + "loss": 0.4367, + "step": 3541 + }, + { + "epoch": 0.24593806415775588, + "grad_norm": 3.366393593268394, + "learning_rate": 8.826766596236797e-06, + "loss": 0.3875, + "step": 3542 + }, + { + "epoch": 0.24600749895847798, + "grad_norm": 3.21275650851757, + "learning_rate": 8.826042767781075e-06, + "loss": 0.2262, + "step": 3543 + }, + { + "epoch": 0.24607693375920012, + "grad_norm": 4.645932879839337, + "learning_rate": 8.82531874580844e-06, + "loss": 0.4215, + "step": 3544 + }, + { + "epoch": 0.24614636855992222, + "grad_norm": 3.210595562013852, + "learning_rate": 8.82459453035551e-06, + "loss": 0.3727, + "step": 3545 + }, + { + "epoch": 0.24621580336064436, + "grad_norm": 4.294531871288308, + "learning_rate": 8.823870121458919e-06, + "loss": 0.4362, + "step": 3546 + }, + { + "epoch": 0.24628523816136647, + "grad_norm": 5.672633157989422, + "learning_rate": 8.823145519155303e-06, + "loss": 0.9266, + "step": 3547 + }, + { + "epoch": 0.2463546729620886, + "grad_norm": 4.065258773350984, + "learning_rate": 8.822420723481312e-06, + "loss": 0.4598, + "step": 3548 + }, + { + "epoch": 0.24642410776281073, + "grad_norm": 3.3069900874278564, + "learning_rate": 8.821695734473607e-06, + "loss": 0.4232, + "step": 3549 + }, + { + "epoch": 0.24649354256353284, + "grad_norm": 4.030398967772835, + "learning_rate": 8.820970552168854e-06, + "loss": 0.435, + "step": 3550 + }, + { + "epoch": 0.24656297736425498, + "grad_norm": 3.3095629617353906, + "learning_rate": 8.820245176603737e-06, + "loss": 0.3608, + "step": 3551 + }, + { + "epoch": 0.24663241216497708, + "grad_norm": 5.006761832837181, + "learning_rate": 8.819519607814942e-06, + "loss": 0.4751, + "step": 3552 + }, + { + "epoch": 0.24670184696569922, + "grad_norm": 3.918058284618927, + "learning_rate": 8.818793845839166e-06, + "loss": 0.4008, + "step": 3553 + }, + { + "epoch": 0.24677128176642132, + "grad_norm": 4.884306130574844, + "learning_rate": 8.81806789071312e-06, + "loss": 0.3733, + "step": 3554 + }, + { + "epoch": 0.24684071656714346, + "grad_norm": 3.579634229650744, + "learning_rate": 8.817341742473518e-06, + "loss": 0.4559, + "step": 3555 + }, + { + "epoch": 0.24691015136786557, + "grad_norm": 4.7129044348561315, + "learning_rate": 8.816615401157091e-06, + "loss": 0.8162, + "step": 3556 + }, + { + "epoch": 0.2469795861685877, + "grad_norm": 4.6627084496188935, + "learning_rate": 8.815888866800577e-06, + "loss": 0.3568, + "step": 3557 + }, + { + "epoch": 0.2470490209693098, + "grad_norm": 3.5072675528545196, + "learning_rate": 8.815162139440723e-06, + "loss": 0.4519, + "step": 3558 + }, + { + "epoch": 0.24711845577003194, + "grad_norm": 4.301887818243652, + "learning_rate": 8.814435219114283e-06, + "loss": 0.4241, + "step": 3559 + }, + { + "epoch": 0.24718789057075405, + "grad_norm": 4.132318148586803, + "learning_rate": 8.813708105858028e-06, + "loss": 0.4976, + "step": 3560 + }, + { + "epoch": 0.24725732537147618, + "grad_norm": 3.962017022498468, + "learning_rate": 8.81298079970873e-06, + "loss": 0.4165, + "step": 3561 + }, + { + "epoch": 0.24732676017219832, + "grad_norm": 3.5190888435760024, + "learning_rate": 8.81225330070318e-06, + "loss": 0.3577, + "step": 3562 + }, + { + "epoch": 0.24739619497292042, + "grad_norm": 3.2543413803605206, + "learning_rate": 8.81152560887817e-06, + "loss": 0.2487, + "step": 3563 + }, + { + "epoch": 0.24746562977364256, + "grad_norm": 3.230279552489996, + "learning_rate": 8.810797724270508e-06, + "loss": 0.3464, + "step": 3564 + }, + { + "epoch": 0.24753506457436467, + "grad_norm": 3.9236631580115295, + "learning_rate": 8.81006964691701e-06, + "loss": 0.504, + "step": 3565 + }, + { + "epoch": 0.2476044993750868, + "grad_norm": 3.023801008147578, + "learning_rate": 8.809341376854502e-06, + "loss": 0.3255, + "step": 3566 + }, + { + "epoch": 0.2476739341758089, + "grad_norm": 3.9637064407598244, + "learning_rate": 8.808612914119817e-06, + "loss": 0.4151, + "step": 3567 + }, + { + "epoch": 0.24774336897653104, + "grad_norm": 2.906557934543004, + "learning_rate": 8.807884258749799e-06, + "loss": 0.3187, + "step": 3568 + }, + { + "epoch": 0.24781280377725315, + "grad_norm": 4.099038071958367, + "learning_rate": 8.807155410781306e-06, + "loss": 0.5943, + "step": 3569 + }, + { + "epoch": 0.24788223857797528, + "grad_norm": 3.39973451392086, + "learning_rate": 8.806426370251198e-06, + "loss": 0.4191, + "step": 3570 + }, + { + "epoch": 0.2479516733786974, + "grad_norm": 4.495282181749335, + "learning_rate": 8.805697137196354e-06, + "loss": 0.6804, + "step": 3571 + }, + { + "epoch": 0.24802110817941952, + "grad_norm": 5.832171965523165, + "learning_rate": 8.804967711653654e-06, + "loss": 0.6672, + "step": 3572 + }, + { + "epoch": 0.24809054298014166, + "grad_norm": 4.71914727258289, + "learning_rate": 8.80423809365999e-06, + "loss": 0.4282, + "step": 3573 + }, + { + "epoch": 0.24815997778086377, + "grad_norm": 2.6071709743470697, + "learning_rate": 8.803508283252273e-06, + "loss": 0.234, + "step": 3574 + }, + { + "epoch": 0.2482294125815859, + "grad_norm": 3.8902674295237625, + "learning_rate": 8.802778280467408e-06, + "loss": 0.552, + "step": 3575 + }, + { + "epoch": 0.248298847382308, + "grad_norm": 4.594274108845213, + "learning_rate": 8.802048085342323e-06, + "loss": 0.499, + "step": 3576 + }, + { + "epoch": 0.24836828218303014, + "grad_norm": 4.561424017198423, + "learning_rate": 8.801317697913945e-06, + "loss": 0.5721, + "step": 3577 + }, + { + "epoch": 0.24843771698375225, + "grad_norm": 4.684597155688811, + "learning_rate": 8.80058711821922e-06, + "loss": 0.4497, + "step": 3578 + }, + { + "epoch": 0.24850715178447438, + "grad_norm": 4.287908890262907, + "learning_rate": 8.799856346295102e-06, + "loss": 0.3877, + "step": 3579 + }, + { + "epoch": 0.2485765865851965, + "grad_norm": 5.083252578486347, + "learning_rate": 8.799125382178549e-06, + "loss": 0.5935, + "step": 3580 + }, + { + "epoch": 0.24864602138591863, + "grad_norm": 4.73830918312085, + "learning_rate": 8.798394225906532e-06, + "loss": 0.3947, + "step": 3581 + }, + { + "epoch": 0.24871545618664073, + "grad_norm": 5.015842549006712, + "learning_rate": 8.797662877516033e-06, + "loss": 0.8407, + "step": 3582 + }, + { + "epoch": 0.24878489098736287, + "grad_norm": 3.7213335249810404, + "learning_rate": 8.796931337044044e-06, + "loss": 0.4483, + "step": 3583 + }, + { + "epoch": 0.248854325788085, + "grad_norm": 3.151143230861929, + "learning_rate": 8.796199604527565e-06, + "loss": 0.3466, + "step": 3584 + }, + { + "epoch": 0.2489237605888071, + "grad_norm": 4.06314198418967, + "learning_rate": 8.795467680003604e-06, + "loss": 0.4826, + "step": 3585 + }, + { + "epoch": 0.24899319538952924, + "grad_norm": 3.6776099803374422, + "learning_rate": 8.794735563509183e-06, + "loss": 0.4429, + "step": 3586 + }, + { + "epoch": 0.24906263019025135, + "grad_norm": 4.917035515170583, + "learning_rate": 8.794003255081332e-06, + "loss": 0.7712, + "step": 3587 + }, + { + "epoch": 0.24913206499097348, + "grad_norm": 2.4468320380180373, + "learning_rate": 8.793270754757089e-06, + "loss": 0.0983, + "step": 3588 + }, + { + "epoch": 0.2492014997916956, + "grad_norm": 4.398217835711658, + "learning_rate": 8.792538062573503e-06, + "loss": 0.4024, + "step": 3589 + }, + { + "epoch": 0.24927093459241773, + "grad_norm": 4.822895929890573, + "learning_rate": 8.791805178567633e-06, + "loss": 0.7639, + "step": 3590 + }, + { + "epoch": 0.24934036939313983, + "grad_norm": 4.625758633902606, + "learning_rate": 8.79107210277655e-06, + "loss": 0.7146, + "step": 3591 + }, + { + "epoch": 0.24940980419386197, + "grad_norm": 3.2865196102545786, + "learning_rate": 8.79033883523733e-06, + "loss": 0.3829, + "step": 3592 + }, + { + "epoch": 0.24947923899458407, + "grad_norm": 3.3042104829087036, + "learning_rate": 8.789605375987058e-06, + "loss": 0.2158, + "step": 3593 + }, + { + "epoch": 0.2495486737953062, + "grad_norm": 3.6189334451907618, + "learning_rate": 8.788871725062836e-06, + "loss": 0.3905, + "step": 3594 + }, + { + "epoch": 0.24961810859602834, + "grad_norm": 5.909954686312657, + "learning_rate": 8.78813788250177e-06, + "loss": 0.8676, + "step": 3595 + }, + { + "epoch": 0.24968754339675045, + "grad_norm": 3.7474712826596486, + "learning_rate": 8.787403848340976e-06, + "loss": 0.5168, + "step": 3596 + }, + { + "epoch": 0.24975697819747258, + "grad_norm": 2.892563194478331, + "learning_rate": 8.78666962261758e-06, + "loss": 0.3759, + "step": 3597 + }, + { + "epoch": 0.2498264129981947, + "grad_norm": 3.540121142107857, + "learning_rate": 8.78593520536872e-06, + "loss": 0.6119, + "step": 3598 + }, + { + "epoch": 0.24989584779891683, + "grad_norm": 4.144610718526077, + "learning_rate": 8.785200596631543e-06, + "loss": 0.4343, + "step": 3599 + }, + { + "epoch": 0.24996528259963893, + "grad_norm": 4.000802200521305, + "learning_rate": 8.784465796443201e-06, + "loss": 0.5409, + "step": 3600 + }, + { + "epoch": 0.25003471740036104, + "grad_norm": 3.5132985526432554, + "learning_rate": 8.783730804840861e-06, + "loss": 0.3216, + "step": 3601 + }, + { + "epoch": 0.2501041522010832, + "grad_norm": 4.502903541809305, + "learning_rate": 8.782995621861702e-06, + "loss": 0.664, + "step": 3602 + }, + { + "epoch": 0.2501735870018053, + "grad_norm": 3.102606855236283, + "learning_rate": 8.782260247542902e-06, + "loss": 0.3258, + "step": 3603 + }, + { + "epoch": 0.25024302180252744, + "grad_norm": 4.597347976442723, + "learning_rate": 8.78152468192166e-06, + "loss": 0.5729, + "step": 3604 + }, + { + "epoch": 0.2503124566032495, + "grad_norm": 5.805953369370509, + "learning_rate": 8.780788925035178e-06, + "loss": 0.6389, + "step": 3605 + }, + { + "epoch": 0.25038189140397166, + "grad_norm": 2.92902719939337, + "learning_rate": 8.78005297692067e-06, + "loss": 0.2813, + "step": 3606 + }, + { + "epoch": 0.2504513262046938, + "grad_norm": 3.937389409005245, + "learning_rate": 8.77931683761536e-06, + "loss": 0.6427, + "step": 3607 + }, + { + "epoch": 0.2505207610054159, + "grad_norm": 4.649954172796734, + "learning_rate": 8.778580507156482e-06, + "loss": 0.4159, + "step": 3608 + }, + { + "epoch": 0.25059019580613806, + "grad_norm": 3.838746173915422, + "learning_rate": 8.777843985581278e-06, + "loss": 0.3741, + "step": 3609 + }, + { + "epoch": 0.25065963060686014, + "grad_norm": 3.549208890333452, + "learning_rate": 8.777107272926999e-06, + "loss": 0.4535, + "step": 3610 + }, + { + "epoch": 0.2507290654075823, + "grad_norm": 3.859925793358068, + "learning_rate": 8.776370369230909e-06, + "loss": 0.4632, + "step": 3611 + }, + { + "epoch": 0.2507985002083044, + "grad_norm": 4.610844773680847, + "learning_rate": 8.775633274530278e-06, + "loss": 0.622, + "step": 3612 + }, + { + "epoch": 0.25086793500902654, + "grad_norm": 4.139742428804724, + "learning_rate": 8.774895988862388e-06, + "loss": 0.4407, + "step": 3613 + }, + { + "epoch": 0.2509373698097486, + "grad_norm": 3.943339084228374, + "learning_rate": 8.774158512264532e-06, + "loss": 0.5416, + "step": 3614 + }, + { + "epoch": 0.25100680461047076, + "grad_norm": 3.230383698727962, + "learning_rate": 8.773420844774008e-06, + "loss": 0.3282, + "step": 3615 + }, + { + "epoch": 0.2510762394111929, + "grad_norm": 4.323247617295156, + "learning_rate": 8.772682986428127e-06, + "loss": 0.6489, + "step": 3616 + }, + { + "epoch": 0.251145674211915, + "grad_norm": 2.932844242013293, + "learning_rate": 8.771944937264211e-06, + "loss": 0.2246, + "step": 3617 + }, + { + "epoch": 0.25121510901263716, + "grad_norm": 3.36799511813649, + "learning_rate": 8.771206697319589e-06, + "loss": 0.525, + "step": 3618 + }, + { + "epoch": 0.25128454381335924, + "grad_norm": 4.053378883153455, + "learning_rate": 8.770468266631597e-06, + "loss": 0.4624, + "step": 3619 + }, + { + "epoch": 0.2513539786140814, + "grad_norm": 3.7672796058655966, + "learning_rate": 8.76972964523759e-06, + "loss": 0.5482, + "step": 3620 + }, + { + "epoch": 0.2514234134148035, + "grad_norm": 4.479030793614153, + "learning_rate": 8.768990833174922e-06, + "loss": 0.8307, + "step": 3621 + }, + { + "epoch": 0.25149284821552564, + "grad_norm": 4.474930832897893, + "learning_rate": 8.768251830480961e-06, + "loss": 0.5424, + "step": 3622 + }, + { + "epoch": 0.2515622830162477, + "grad_norm": 4.492412094694208, + "learning_rate": 8.767512637193087e-06, + "loss": 0.4893, + "step": 3623 + }, + { + "epoch": 0.25163171781696986, + "grad_norm": 3.184243977779598, + "learning_rate": 8.766773253348688e-06, + "loss": 0.3811, + "step": 3624 + }, + { + "epoch": 0.251701152617692, + "grad_norm": 4.637354344562509, + "learning_rate": 8.76603367898516e-06, + "loss": 0.5443, + "step": 3625 + }, + { + "epoch": 0.2517705874184141, + "grad_norm": 4.079590444048545, + "learning_rate": 8.76529391413991e-06, + "loss": 0.5648, + "step": 3626 + }, + { + "epoch": 0.2518400222191362, + "grad_norm": 3.0484044303861886, + "learning_rate": 8.764553958850355e-06, + "loss": 0.3502, + "step": 3627 + }, + { + "epoch": 0.25190945701985834, + "grad_norm": 3.0354564220285494, + "learning_rate": 8.76381381315392e-06, + "loss": 0.4384, + "step": 3628 + }, + { + "epoch": 0.2519788918205805, + "grad_norm": 4.366812456871676, + "learning_rate": 8.763073477088042e-06, + "loss": 0.5984, + "step": 3629 + }, + { + "epoch": 0.2520483266213026, + "grad_norm": 4.226622389450282, + "learning_rate": 8.762332950690164e-06, + "loss": 0.4704, + "step": 3630 + }, + { + "epoch": 0.25211776142202474, + "grad_norm": 4.040621303424189, + "learning_rate": 8.761592233997745e-06, + "loss": 0.4489, + "step": 3631 + }, + { + "epoch": 0.2521871962227468, + "grad_norm": 4.203514471193115, + "learning_rate": 8.760851327048246e-06, + "loss": 0.5196, + "step": 3632 + }, + { + "epoch": 0.25225663102346896, + "grad_norm": 4.2215117460974, + "learning_rate": 8.760110229879141e-06, + "loss": 0.8041, + "step": 3633 + }, + { + "epoch": 0.2523260658241911, + "grad_norm": 3.946850381200541, + "learning_rate": 8.75936894252792e-06, + "loss": 0.4112, + "step": 3634 + }, + { + "epoch": 0.2523955006249132, + "grad_norm": 3.7897335125099327, + "learning_rate": 8.758627465032067e-06, + "loss": 0.4262, + "step": 3635 + }, + { + "epoch": 0.2524649354256353, + "grad_norm": 2.8736133562246757, + "learning_rate": 8.757885797429094e-06, + "loss": 0.2942, + "step": 3636 + }, + { + "epoch": 0.25253437022635744, + "grad_norm": 3.785259565516119, + "learning_rate": 8.757143939756508e-06, + "loss": 0.4697, + "step": 3637 + }, + { + "epoch": 0.2526038050270796, + "grad_norm": 4.08405023953646, + "learning_rate": 8.756401892051833e-06, + "loss": 0.5857, + "step": 3638 + }, + { + "epoch": 0.2526732398278017, + "grad_norm": 4.0307755030621735, + "learning_rate": 8.755659654352599e-06, + "loss": 0.4941, + "step": 3639 + }, + { + "epoch": 0.2527426746285238, + "grad_norm": 3.9516246259651386, + "learning_rate": 8.754917226696354e-06, + "loss": 0.4915, + "step": 3640 + }, + { + "epoch": 0.2528121094292459, + "grad_norm": 4.884640595768288, + "learning_rate": 8.754174609120641e-06, + "loss": 0.4803, + "step": 3641 + }, + { + "epoch": 0.25288154422996806, + "grad_norm": 4.212373541346412, + "learning_rate": 8.753431801663026e-06, + "loss": 0.571, + "step": 3642 + }, + { + "epoch": 0.2529509790306902, + "grad_norm": 4.202355914645096, + "learning_rate": 8.752688804361076e-06, + "loss": 0.5444, + "step": 3643 + }, + { + "epoch": 0.2530204138314123, + "grad_norm": 3.0879241793157015, + "learning_rate": 8.751945617252375e-06, + "loss": 0.428, + "step": 3644 + }, + { + "epoch": 0.2530898486321344, + "grad_norm": 4.728177083807009, + "learning_rate": 8.751202240374509e-06, + "loss": 0.6275, + "step": 3645 + }, + { + "epoch": 0.25315928343285654, + "grad_norm": 4.118359739641388, + "learning_rate": 8.750458673765079e-06, + "loss": 0.2562, + "step": 3646 + }, + { + "epoch": 0.2532287182335787, + "grad_norm": 3.295194792987422, + "learning_rate": 8.74971491746169e-06, + "loss": 0.418, + "step": 3647 + }, + { + "epoch": 0.2532981530343008, + "grad_norm": 3.6907412271185565, + "learning_rate": 8.748970971501969e-06, + "loss": 0.2962, + "step": 3648 + }, + { + "epoch": 0.2533675878350229, + "grad_norm": 4.381140820125764, + "learning_rate": 8.748226835923536e-06, + "loss": 0.4977, + "step": 3649 + }, + { + "epoch": 0.253437022635745, + "grad_norm": 3.9172406466866456, + "learning_rate": 8.747482510764031e-06, + "loss": 0.5541, + "step": 3650 + }, + { + "epoch": 0.25350645743646716, + "grad_norm": 3.405115914085888, + "learning_rate": 8.746737996061101e-06, + "loss": 0.4783, + "step": 3651 + }, + { + "epoch": 0.2535758922371893, + "grad_norm": 3.7074751347279156, + "learning_rate": 8.745993291852403e-06, + "loss": 0.4945, + "step": 3652 + }, + { + "epoch": 0.25364532703791143, + "grad_norm": 4.250690387344984, + "learning_rate": 8.745248398175603e-06, + "loss": 0.4796, + "step": 3653 + }, + { + "epoch": 0.2537147618386335, + "grad_norm": 3.7414967435958997, + "learning_rate": 8.744503315068378e-06, + "loss": 0.368, + "step": 3654 + }, + { + "epoch": 0.25378419663935564, + "grad_norm": 4.075277359632381, + "learning_rate": 8.743758042568411e-06, + "loss": 0.3835, + "step": 3655 + }, + { + "epoch": 0.2538536314400778, + "grad_norm": 3.8935116917625407, + "learning_rate": 8.743012580713399e-06, + "loss": 0.4072, + "step": 3656 + }, + { + "epoch": 0.2539230662407999, + "grad_norm": 5.225153267260068, + "learning_rate": 8.742266929541047e-06, + "loss": 0.4731, + "step": 3657 + }, + { + "epoch": 0.253992501041522, + "grad_norm": 4.466770558968311, + "learning_rate": 8.741521089089067e-06, + "loss": 0.6497, + "step": 3658 + }, + { + "epoch": 0.2540619358422441, + "grad_norm": 3.7053352967803677, + "learning_rate": 8.740775059395186e-06, + "loss": 0.5237, + "step": 3659 + }, + { + "epoch": 0.25413137064296626, + "grad_norm": 3.57390461731123, + "learning_rate": 8.740028840497135e-06, + "loss": 0.4618, + "step": 3660 + }, + { + "epoch": 0.2542008054436884, + "grad_norm": 3.724980180152874, + "learning_rate": 8.739282432432656e-06, + "loss": 0.5086, + "step": 3661 + }, + { + "epoch": 0.2542702402444105, + "grad_norm": 4.675003657936353, + "learning_rate": 8.738535835239503e-06, + "loss": 0.7822, + "step": 3662 + }, + { + "epoch": 0.2543396750451326, + "grad_norm": 4.16525664946798, + "learning_rate": 8.737789048955441e-06, + "loss": 0.5575, + "step": 3663 + }, + { + "epoch": 0.25440910984585474, + "grad_norm": 3.482066131114158, + "learning_rate": 8.737042073618235e-06, + "loss": 0.3992, + "step": 3664 + }, + { + "epoch": 0.2544785446465769, + "grad_norm": 3.3182608724070213, + "learning_rate": 8.73629490926567e-06, + "loss": 0.4392, + "step": 3665 + }, + { + "epoch": 0.254547979447299, + "grad_norm": 4.1851825580433895, + "learning_rate": 8.735547555935538e-06, + "loss": 0.4706, + "step": 3666 + }, + { + "epoch": 0.2546174142480211, + "grad_norm": 3.5759274157686423, + "learning_rate": 8.734800013665636e-06, + "loss": 0.3779, + "step": 3667 + }, + { + "epoch": 0.2546868490487432, + "grad_norm": 4.036421183320925, + "learning_rate": 8.734052282493777e-06, + "loss": 0.4628, + "step": 3668 + }, + { + "epoch": 0.25475628384946536, + "grad_norm": 3.057088943827132, + "learning_rate": 8.73330436245778e-06, + "loss": 0.3298, + "step": 3669 + }, + { + "epoch": 0.2548257186501875, + "grad_norm": 3.9774513532894225, + "learning_rate": 8.73255625359547e-06, + "loss": 0.4872, + "step": 3670 + }, + { + "epoch": 0.2548951534509096, + "grad_norm": 3.9196377120040458, + "learning_rate": 8.731807955944688e-06, + "loss": 0.4498, + "step": 3671 + }, + { + "epoch": 0.2549645882516317, + "grad_norm": 2.9761825268456295, + "learning_rate": 8.731059469543285e-06, + "loss": 0.1846, + "step": 3672 + }, + { + "epoch": 0.25503402305235384, + "grad_norm": 3.692458981632782, + "learning_rate": 8.730310794429115e-06, + "loss": 0.556, + "step": 3673 + }, + { + "epoch": 0.255103457853076, + "grad_norm": 24.86622286813129, + "learning_rate": 8.729561930640048e-06, + "loss": 0.2963, + "step": 3674 + }, + { + "epoch": 0.2551728926537981, + "grad_norm": 3.851109028921879, + "learning_rate": 8.728812878213954e-06, + "loss": 0.3692, + "step": 3675 + }, + { + "epoch": 0.2552423274545202, + "grad_norm": 4.162831173573109, + "learning_rate": 8.728063637188728e-06, + "loss": 0.6184, + "step": 3676 + }, + { + "epoch": 0.2553117622552423, + "grad_norm": 3.6093935617377224, + "learning_rate": 8.727314207602261e-06, + "loss": 0.3999, + "step": 3677 + }, + { + "epoch": 0.25538119705596446, + "grad_norm": 4.285372398943112, + "learning_rate": 8.72656458949246e-06, + "loss": 0.4057, + "step": 3678 + }, + { + "epoch": 0.2554506318566866, + "grad_norm": 3.3743181014105144, + "learning_rate": 8.725814782897237e-06, + "loss": 0.4756, + "step": 3679 + }, + { + "epoch": 0.2555200666574087, + "grad_norm": 4.471286202051784, + "learning_rate": 8.72506478785452e-06, + "loss": 0.5942, + "step": 3680 + }, + { + "epoch": 0.2555895014581308, + "grad_norm": 5.726229220612755, + "learning_rate": 8.72431460440224e-06, + "loss": 0.7285, + "step": 3681 + }, + { + "epoch": 0.25565893625885294, + "grad_norm": 3.8798350765523564, + "learning_rate": 8.723564232578341e-06, + "loss": 0.6202, + "step": 3682 + }, + { + "epoch": 0.2557283710595751, + "grad_norm": 6.404690843120875, + "learning_rate": 8.722813672420777e-06, + "loss": 0.4851, + "step": 3683 + }, + { + "epoch": 0.25579780586029716, + "grad_norm": 4.297676062369375, + "learning_rate": 8.722062923967512e-06, + "loss": 0.6379, + "step": 3684 + }, + { + "epoch": 0.2558672406610193, + "grad_norm": 3.8458737779939236, + "learning_rate": 8.721311987256515e-06, + "loss": 0.52, + "step": 3685 + }, + { + "epoch": 0.2559366754617414, + "grad_norm": 4.669469687906569, + "learning_rate": 8.72056086232577e-06, + "loss": 0.7007, + "step": 3686 + }, + { + "epoch": 0.25600611026246356, + "grad_norm": 3.6919988310888714, + "learning_rate": 8.719809549213263e-06, + "loss": 0.4704, + "step": 3687 + }, + { + "epoch": 0.2560755450631857, + "grad_norm": 3.3944859547390185, + "learning_rate": 8.719058047957e-06, + "loss": 0.5059, + "step": 3688 + }, + { + "epoch": 0.2561449798639078, + "grad_norm": 4.471839233972908, + "learning_rate": 8.718306358594992e-06, + "loss": 0.7689, + "step": 3689 + }, + { + "epoch": 0.2562144146646299, + "grad_norm": 4.0410926864541254, + "learning_rate": 8.717554481165253e-06, + "loss": 0.1858, + "step": 3690 + }, + { + "epoch": 0.25628384946535204, + "grad_norm": 3.9511930998372895, + "learning_rate": 8.716802415705817e-06, + "loss": 0.5426, + "step": 3691 + }, + { + "epoch": 0.2563532842660742, + "grad_norm": 4.538200525076151, + "learning_rate": 8.716050162254719e-06, + "loss": 0.8154, + "step": 3692 + }, + { + "epoch": 0.25642271906679626, + "grad_norm": 4.022554026893457, + "learning_rate": 8.715297720850008e-06, + "loss": 0.602, + "step": 3693 + }, + { + "epoch": 0.2564921538675184, + "grad_norm": 3.4645986290554394, + "learning_rate": 8.714545091529744e-06, + "loss": 0.5585, + "step": 3694 + }, + { + "epoch": 0.2565615886682405, + "grad_norm": 4.195554527251548, + "learning_rate": 8.713792274331992e-06, + "loss": 0.4479, + "step": 3695 + }, + { + "epoch": 0.25663102346896266, + "grad_norm": 4.464151820857455, + "learning_rate": 8.71303926929483e-06, + "loss": 0.7188, + "step": 3696 + }, + { + "epoch": 0.25670045826968474, + "grad_norm": 3.3854129015094743, + "learning_rate": 8.712286076456343e-06, + "loss": 0.5404, + "step": 3697 + }, + { + "epoch": 0.2567698930704069, + "grad_norm": 3.698147523172282, + "learning_rate": 8.711532695854626e-06, + "loss": 0.4118, + "step": 3698 + }, + { + "epoch": 0.256839327871129, + "grad_norm": 6.089872241714095, + "learning_rate": 8.710779127527787e-06, + "loss": 0.761, + "step": 3699 + }, + { + "epoch": 0.25690876267185114, + "grad_norm": 3.0248302688147124, + "learning_rate": 8.710025371513938e-06, + "loss": 0.2765, + "step": 3700 + }, + { + "epoch": 0.2569781974725733, + "grad_norm": 4.7582300074726325, + "learning_rate": 8.709271427851202e-06, + "loss": 0.8491, + "step": 3701 + }, + { + "epoch": 0.25704763227329536, + "grad_norm": 3.927282465707881, + "learning_rate": 8.708517296577717e-06, + "loss": 0.6237, + "step": 3702 + }, + { + "epoch": 0.2571170670740175, + "grad_norm": 3.9276460844862453, + "learning_rate": 8.707762977731623e-06, + "loss": 0.5451, + "step": 3703 + }, + { + "epoch": 0.2571865018747396, + "grad_norm": 3.7129210863975564, + "learning_rate": 8.707008471351072e-06, + "loss": 0.4854, + "step": 3704 + }, + { + "epoch": 0.25725593667546176, + "grad_norm": 4.378542857487529, + "learning_rate": 8.706253777474229e-06, + "loss": 0.5442, + "step": 3705 + }, + { + "epoch": 0.25732537147618384, + "grad_norm": 2.7478404921210866, + "learning_rate": 8.705498896139262e-06, + "loss": 0.3281, + "step": 3706 + }, + { + "epoch": 0.257394806276906, + "grad_norm": 3.764600016738008, + "learning_rate": 8.704743827384354e-06, + "loss": 0.4428, + "step": 3707 + }, + { + "epoch": 0.2574642410776281, + "grad_norm": 3.5789090920691433, + "learning_rate": 8.703988571247697e-06, + "loss": 0.5114, + "step": 3708 + }, + { + "epoch": 0.25753367587835024, + "grad_norm": 3.4778798247844627, + "learning_rate": 8.70323312776749e-06, + "loss": 0.5107, + "step": 3709 + }, + { + "epoch": 0.2576031106790724, + "grad_norm": 4.705778284769779, + "learning_rate": 8.70247749698194e-06, + "loss": 0.8659, + "step": 3710 + }, + { + "epoch": 0.25767254547979446, + "grad_norm": 2.9687088745549928, + "learning_rate": 8.70172167892927e-06, + "loss": 0.3563, + "step": 3711 + }, + { + "epoch": 0.2577419802805166, + "grad_norm": 4.061336723632215, + "learning_rate": 8.700965673647703e-06, + "loss": 0.3727, + "step": 3712 + }, + { + "epoch": 0.2578114150812387, + "grad_norm": 5.665995477290148, + "learning_rate": 8.700209481175482e-06, + "loss": 0.7981, + "step": 3713 + }, + { + "epoch": 0.25788084988196086, + "grad_norm": 4.2363723457374585, + "learning_rate": 8.699453101550851e-06, + "loss": 0.5755, + "step": 3714 + }, + { + "epoch": 0.25795028468268294, + "grad_norm": 4.403971077616883, + "learning_rate": 8.69869653481207e-06, + "loss": 0.7409, + "step": 3715 + }, + { + "epoch": 0.2580197194834051, + "grad_norm": 3.5425542790285633, + "learning_rate": 8.697939780997403e-06, + "loss": 0.3264, + "step": 3716 + }, + { + "epoch": 0.2580891542841272, + "grad_norm": 3.5325835683909896, + "learning_rate": 8.697182840145129e-06, + "loss": 0.5285, + "step": 3717 + }, + { + "epoch": 0.25815858908484934, + "grad_norm": 3.669265364198408, + "learning_rate": 8.696425712293528e-06, + "loss": 0.5511, + "step": 3718 + }, + { + "epoch": 0.2582280238855714, + "grad_norm": 3.6563355371365134, + "learning_rate": 8.695668397480896e-06, + "loss": 0.4521, + "step": 3719 + }, + { + "epoch": 0.25829745868629356, + "grad_norm": 3.2591664195204846, + "learning_rate": 8.69491089574554e-06, + "loss": 0.4233, + "step": 3720 + }, + { + "epoch": 0.2583668934870157, + "grad_norm": 3.2601829432722345, + "learning_rate": 8.694153207125773e-06, + "loss": 0.3865, + "step": 3721 + }, + { + "epoch": 0.2584363282877378, + "grad_norm": 4.1589999012102945, + "learning_rate": 8.693395331659915e-06, + "loss": 0.6507, + "step": 3722 + }, + { + "epoch": 0.25850576308845996, + "grad_norm": 3.8521434799643637, + "learning_rate": 8.692637269386301e-06, + "loss": 0.4159, + "step": 3723 + }, + { + "epoch": 0.25857519788918204, + "grad_norm": 4.223491911413969, + "learning_rate": 8.691879020343275e-06, + "loss": 0.4368, + "step": 3724 + }, + { + "epoch": 0.2586446326899042, + "grad_norm": 3.8970764252375183, + "learning_rate": 8.691120584569182e-06, + "loss": 0.3934, + "step": 3725 + }, + { + "epoch": 0.2587140674906263, + "grad_norm": 4.544287573826822, + "learning_rate": 8.690361962102389e-06, + "loss": 0.5748, + "step": 3726 + }, + { + "epoch": 0.25878350229134844, + "grad_norm": 3.4477499550117856, + "learning_rate": 8.689603152981262e-06, + "loss": 0.5012, + "step": 3727 + }, + { + "epoch": 0.2588529370920705, + "grad_norm": 4.679099911045072, + "learning_rate": 8.688844157244185e-06, + "loss": 0.6416, + "step": 3728 + }, + { + "epoch": 0.25892237189279266, + "grad_norm": 2.8864586149355462, + "learning_rate": 8.688084974929542e-06, + "loss": 0.3298, + "step": 3729 + }, + { + "epoch": 0.2589918066935148, + "grad_norm": 3.823790535025746, + "learning_rate": 8.687325606075735e-06, + "loss": 0.4523, + "step": 3730 + }, + { + "epoch": 0.2590612414942369, + "grad_norm": 4.238353655221532, + "learning_rate": 8.686566050721174e-06, + "loss": 0.619, + "step": 3731 + }, + { + "epoch": 0.259130676294959, + "grad_norm": 4.356311785181821, + "learning_rate": 8.685806308904272e-06, + "loss": 0.6209, + "step": 3732 + }, + { + "epoch": 0.25920011109568114, + "grad_norm": 4.558367248298183, + "learning_rate": 8.685046380663457e-06, + "loss": 0.6248, + "step": 3733 + }, + { + "epoch": 0.2592695458964033, + "grad_norm": 3.7073873196327547, + "learning_rate": 8.684286266037168e-06, + "loss": 0.3233, + "step": 3734 + }, + { + "epoch": 0.2593389806971254, + "grad_norm": 3.3628312008263204, + "learning_rate": 8.683525965063845e-06, + "loss": 0.3987, + "step": 3735 + }, + { + "epoch": 0.25940841549784754, + "grad_norm": 3.679609783276532, + "learning_rate": 8.68276547778195e-06, + "loss": 0.298, + "step": 3736 + }, + { + "epoch": 0.2594778502985696, + "grad_norm": 3.9455659140466603, + "learning_rate": 8.682004804229945e-06, + "loss": 0.303, + "step": 3737 + }, + { + "epoch": 0.25954728509929176, + "grad_norm": 3.103492199022302, + "learning_rate": 8.6812439444463e-06, + "loss": 0.1908, + "step": 3738 + }, + { + "epoch": 0.2596167199000139, + "grad_norm": 3.341647308622433, + "learning_rate": 8.680482898469504e-06, + "loss": 0.2076, + "step": 3739 + }, + { + "epoch": 0.259686154700736, + "grad_norm": 3.3935584487925445, + "learning_rate": 8.679721666338048e-06, + "loss": 0.4279, + "step": 3740 + }, + { + "epoch": 0.2597555895014581, + "grad_norm": 4.834601659346408, + "learning_rate": 8.678960248090433e-06, + "loss": 0.6755, + "step": 3741 + }, + { + "epoch": 0.25982502430218024, + "grad_norm": 3.672041143407877, + "learning_rate": 8.678198643765174e-06, + "loss": 0.4076, + "step": 3742 + }, + { + "epoch": 0.2598944591029024, + "grad_norm": 4.688737112950332, + "learning_rate": 8.677436853400786e-06, + "loss": 0.5583, + "step": 3743 + }, + { + "epoch": 0.2599638939036245, + "grad_norm": 3.1196529498052517, + "learning_rate": 8.676674877035806e-06, + "loss": 0.3651, + "step": 3744 + }, + { + "epoch": 0.26003332870434664, + "grad_norm": 3.535350411870906, + "learning_rate": 8.675912714708771e-06, + "loss": 0.4869, + "step": 3745 + }, + { + "epoch": 0.2601027635050687, + "grad_norm": 4.41781643681792, + "learning_rate": 8.67515036645823e-06, + "loss": 0.6239, + "step": 3746 + }, + { + "epoch": 0.26017219830579086, + "grad_norm": 3.9610319128571203, + "learning_rate": 8.674387832322744e-06, + "loss": 0.5729, + "step": 3747 + }, + { + "epoch": 0.260241633106513, + "grad_norm": 3.820885237594903, + "learning_rate": 8.673625112340876e-06, + "loss": 0.4452, + "step": 3748 + }, + { + "epoch": 0.2603110679072351, + "grad_norm": 4.001776286416796, + "learning_rate": 8.672862206551209e-06, + "loss": 0.5465, + "step": 3749 + }, + { + "epoch": 0.2603805027079572, + "grad_norm": 3.352673362378982, + "learning_rate": 8.672099114992327e-06, + "loss": 0.5004, + "step": 3750 + }, + { + "epoch": 0.26044993750867934, + "grad_norm": 4.064581245372834, + "learning_rate": 8.671335837702827e-06, + "loss": 0.7259, + "step": 3751 + }, + { + "epoch": 0.2605193723094015, + "grad_norm": 4.37582457625417, + "learning_rate": 8.670572374721316e-06, + "loss": 0.6717, + "step": 3752 + }, + { + "epoch": 0.2605888071101236, + "grad_norm": 3.2761967136565553, + "learning_rate": 8.669808726086407e-06, + "loss": 0.2703, + "step": 3753 + }, + { + "epoch": 0.2606582419108457, + "grad_norm": 3.4886537153901784, + "learning_rate": 8.669044891836723e-06, + "loss": 0.2042, + "step": 3754 + }, + { + "epoch": 0.2607276767115678, + "grad_norm": 4.673566482489093, + "learning_rate": 8.668280872010902e-06, + "loss": 0.6267, + "step": 3755 + }, + { + "epoch": 0.26079711151228996, + "grad_norm": 3.6465630390547363, + "learning_rate": 8.667516666647586e-06, + "loss": 0.3612, + "step": 3756 + }, + { + "epoch": 0.2608665463130121, + "grad_norm": 3.2626920658503207, + "learning_rate": 8.666752275785427e-06, + "loss": 0.4037, + "step": 3757 + }, + { + "epoch": 0.26093598111373423, + "grad_norm": 4.606720147918789, + "learning_rate": 8.665987699463087e-06, + "loss": 0.4988, + "step": 3758 + }, + { + "epoch": 0.2610054159144563, + "grad_norm": 3.25884793127794, + "learning_rate": 8.665222937719237e-06, + "loss": 0.37, + "step": 3759 + }, + { + "epoch": 0.26107485071517844, + "grad_norm": 5.113012219061018, + "learning_rate": 8.664457990592559e-06, + "loss": 0.547, + "step": 3760 + }, + { + "epoch": 0.2611442855159006, + "grad_norm": 3.1486922647506543, + "learning_rate": 8.663692858121741e-06, + "loss": 0.298, + "step": 3761 + }, + { + "epoch": 0.2612137203166227, + "grad_norm": 3.836045705085945, + "learning_rate": 8.662927540345485e-06, + "loss": 0.3853, + "step": 3762 + }, + { + "epoch": 0.2612831551173448, + "grad_norm": 3.4533157945695168, + "learning_rate": 8.662162037302498e-06, + "loss": 0.2393, + "step": 3763 + }, + { + "epoch": 0.2613525899180669, + "grad_norm": 4.700753035554289, + "learning_rate": 8.661396349031502e-06, + "loss": 0.582, + "step": 3764 + }, + { + "epoch": 0.26142202471878906, + "grad_norm": 4.927499997392843, + "learning_rate": 8.660630475571217e-06, + "loss": 0.4884, + "step": 3765 + }, + { + "epoch": 0.2614914595195112, + "grad_norm": 2.6440646874593394, + "learning_rate": 8.65986441696039e-06, + "loss": 0.2278, + "step": 3766 + }, + { + "epoch": 0.26156089432023333, + "grad_norm": 2.927078536762051, + "learning_rate": 8.65909817323776e-06, + "loss": 0.324, + "step": 3767 + }, + { + "epoch": 0.2616303291209554, + "grad_norm": 3.402025023092969, + "learning_rate": 8.658331744442083e-06, + "loss": 0.422, + "step": 3768 + }, + { + "epoch": 0.26169976392167754, + "grad_norm": 3.9085888250836014, + "learning_rate": 8.657565130612129e-06, + "loss": 0.4297, + "step": 3769 + }, + { + "epoch": 0.2617691987223997, + "grad_norm": 4.704031019647921, + "learning_rate": 8.656798331786667e-06, + "loss": 0.7359, + "step": 3770 + }, + { + "epoch": 0.2618386335231218, + "grad_norm": 3.1802009589598046, + "learning_rate": 8.656031348004485e-06, + "loss": 0.2815, + "step": 3771 + }, + { + "epoch": 0.2619080683238439, + "grad_norm": 2.998361920106762, + "learning_rate": 8.655264179304374e-06, + "loss": 0.4179, + "step": 3772 + }, + { + "epoch": 0.261977503124566, + "grad_norm": 4.423547920011755, + "learning_rate": 8.654496825725138e-06, + "loss": 0.5624, + "step": 3773 + }, + { + "epoch": 0.26204693792528816, + "grad_norm": 3.57355921739013, + "learning_rate": 8.653729287305587e-06, + "loss": 0.3846, + "step": 3774 + }, + { + "epoch": 0.2621163727260103, + "grad_norm": 4.350798646433192, + "learning_rate": 8.652961564084544e-06, + "loss": 0.5279, + "step": 3775 + }, + { + "epoch": 0.2621858075267324, + "grad_norm": 3.9354246477590733, + "learning_rate": 8.652193656100837e-06, + "loss": 0.6322, + "step": 3776 + }, + { + "epoch": 0.2622552423274545, + "grad_norm": 4.3015505002426755, + "learning_rate": 8.651425563393307e-06, + "loss": 0.7676, + "step": 3777 + }, + { + "epoch": 0.26232467712817664, + "grad_norm": 3.947754858390564, + "learning_rate": 8.650657286000808e-06, + "loss": 0.6426, + "step": 3778 + }, + { + "epoch": 0.2623941119288988, + "grad_norm": 3.514071081103875, + "learning_rate": 8.649888823962191e-06, + "loss": 0.4179, + "step": 3779 + }, + { + "epoch": 0.2624635467296209, + "grad_norm": 3.801685335216067, + "learning_rate": 8.649120177316328e-06, + "loss": 0.3758, + "step": 3780 + }, + { + "epoch": 0.262532981530343, + "grad_norm": 6.882026619986459, + "learning_rate": 8.648351346102095e-06, + "loss": 0.7294, + "step": 3781 + }, + { + "epoch": 0.2626024163310651, + "grad_norm": 3.1114883394645783, + "learning_rate": 8.647582330358381e-06, + "loss": 0.2768, + "step": 3782 + }, + { + "epoch": 0.26267185113178726, + "grad_norm": 3.5751433738350284, + "learning_rate": 8.64681313012408e-06, + "loss": 0.3243, + "step": 3783 + }, + { + "epoch": 0.2627412859325094, + "grad_norm": 6.539711888238014, + "learning_rate": 8.646043745438097e-06, + "loss": 0.9965, + "step": 3784 + }, + { + "epoch": 0.2628107207332315, + "grad_norm": 2.8124836461727023, + "learning_rate": 8.645274176339346e-06, + "loss": 0.5004, + "step": 3785 + }, + { + "epoch": 0.2628801555339536, + "grad_norm": 2.788695676727389, + "learning_rate": 8.644504422866755e-06, + "loss": 0.311, + "step": 3786 + }, + { + "epoch": 0.26294959033467574, + "grad_norm": 4.296597862178043, + "learning_rate": 8.643734485059251e-06, + "loss": 0.767, + "step": 3787 + }, + { + "epoch": 0.2630190251353979, + "grad_norm": 4.746730540125942, + "learning_rate": 8.642964362955781e-06, + "loss": 0.5542, + "step": 3788 + }, + { + "epoch": 0.26308845993611996, + "grad_norm": 2.914023897910806, + "learning_rate": 8.642194056595296e-06, + "loss": 0.317, + "step": 3789 + }, + { + "epoch": 0.2631578947368421, + "grad_norm": 4.54338720894241, + "learning_rate": 8.64142356601676e-06, + "loss": 0.762, + "step": 3790 + }, + { + "epoch": 0.2632273295375642, + "grad_norm": 3.584984396457684, + "learning_rate": 8.640652891259137e-06, + "loss": 0.5233, + "step": 3791 + }, + { + "epoch": 0.26329676433828636, + "grad_norm": 3.408992675298201, + "learning_rate": 8.639882032361411e-06, + "loss": 0.4627, + "step": 3792 + }, + { + "epoch": 0.2633661991390085, + "grad_norm": 3.8739063791524706, + "learning_rate": 8.63911098936257e-06, + "loss": 0.4694, + "step": 3793 + }, + { + "epoch": 0.2634356339397306, + "grad_norm": 3.5742839483160447, + "learning_rate": 8.638339762301616e-06, + "loss": 0.3901, + "step": 3794 + }, + { + "epoch": 0.2635050687404527, + "grad_norm": 3.3205130963177987, + "learning_rate": 8.63756835121755e-06, + "loss": 0.4456, + "step": 3795 + }, + { + "epoch": 0.26357450354117484, + "grad_norm": 3.6653432946672933, + "learning_rate": 8.636796756149396e-06, + "loss": 0.4762, + "step": 3796 + }, + { + "epoch": 0.263643938341897, + "grad_norm": 3.32571864693562, + "learning_rate": 8.636024977136177e-06, + "loss": 0.3565, + "step": 3797 + }, + { + "epoch": 0.26371337314261906, + "grad_norm": 4.290371587467403, + "learning_rate": 8.635253014216928e-06, + "loss": 0.5047, + "step": 3798 + }, + { + "epoch": 0.2637828079433412, + "grad_norm": 3.374953103251966, + "learning_rate": 8.634480867430695e-06, + "loss": 0.4623, + "step": 3799 + }, + { + "epoch": 0.2638522427440633, + "grad_norm": 3.9390408204088514, + "learning_rate": 8.633708536816534e-06, + "loss": 0.5433, + "step": 3800 + }, + { + "epoch": 0.26392167754478546, + "grad_norm": 4.712009496590449, + "learning_rate": 8.632936022413505e-06, + "loss": 0.4005, + "step": 3801 + }, + { + "epoch": 0.2639911123455076, + "grad_norm": 3.688528639183929, + "learning_rate": 8.632163324260684e-06, + "loss": 0.6237, + "step": 3802 + }, + { + "epoch": 0.2640605471462297, + "grad_norm": 4.170705360892748, + "learning_rate": 8.631390442397152e-06, + "loss": 0.6521, + "step": 3803 + }, + { + "epoch": 0.2641299819469518, + "grad_norm": 4.08723631012054, + "learning_rate": 8.630617376862e-06, + "loss": 0.5216, + "step": 3804 + }, + { + "epoch": 0.26419941674767394, + "grad_norm": 3.4493718510062004, + "learning_rate": 8.629844127694332e-06, + "loss": 0.2866, + "step": 3805 + }, + { + "epoch": 0.2642688515483961, + "grad_norm": 3.302268233080713, + "learning_rate": 8.629070694933253e-06, + "loss": 0.4769, + "step": 3806 + }, + { + "epoch": 0.26433828634911816, + "grad_norm": 3.776633995435359, + "learning_rate": 8.628297078617885e-06, + "loss": 0.5345, + "step": 3807 + }, + { + "epoch": 0.2644077211498403, + "grad_norm": 3.8120901718808957, + "learning_rate": 8.627523278787357e-06, + "loss": 0.4971, + "step": 3808 + }, + { + "epoch": 0.2644771559505624, + "grad_norm": 3.4268250674306353, + "learning_rate": 8.626749295480805e-06, + "loss": 0.4652, + "step": 3809 + }, + { + "epoch": 0.26454659075128456, + "grad_norm": 3.970691406011485, + "learning_rate": 8.62597512873738e-06, + "loss": 0.475, + "step": 3810 + }, + { + "epoch": 0.26461602555200664, + "grad_norm": 4.718484252031654, + "learning_rate": 8.625200778596234e-06, + "loss": 0.6127, + "step": 3811 + }, + { + "epoch": 0.2646854603527288, + "grad_norm": 3.9584262285431713, + "learning_rate": 8.624426245096535e-06, + "loss": 0.5658, + "step": 3812 + }, + { + "epoch": 0.2647548951534509, + "grad_norm": 3.2644966313755113, + "learning_rate": 8.623651528277459e-06, + "loss": 0.4205, + "step": 3813 + }, + { + "epoch": 0.26482432995417304, + "grad_norm": 4.556259436948974, + "learning_rate": 8.62287662817819e-06, + "loss": 0.6465, + "step": 3814 + }, + { + "epoch": 0.2648937647548952, + "grad_norm": 2.8780325686080412, + "learning_rate": 8.622101544837917e-06, + "loss": 0.335, + "step": 3815 + }, + { + "epoch": 0.26496319955561726, + "grad_norm": 4.100471378746066, + "learning_rate": 8.621326278295849e-06, + "loss": 0.5263, + "step": 3816 + }, + { + "epoch": 0.2650326343563394, + "grad_norm": 2.224855121211458, + "learning_rate": 8.620550828591197e-06, + "loss": 0.2391, + "step": 3817 + }, + { + "epoch": 0.2651020691570615, + "grad_norm": 3.6960904514928727, + "learning_rate": 8.61977519576318e-06, + "loss": 0.3606, + "step": 3818 + }, + { + "epoch": 0.26517150395778366, + "grad_norm": 3.9057121774521533, + "learning_rate": 8.618999379851028e-06, + "loss": 0.575, + "step": 3819 + }, + { + "epoch": 0.26524093875850574, + "grad_norm": 3.8153213463117206, + "learning_rate": 8.618223380893984e-06, + "loss": 0.415, + "step": 3820 + }, + { + "epoch": 0.2653103735592279, + "grad_norm": 5.335332175421417, + "learning_rate": 8.617447198931295e-06, + "loss": 0.6197, + "step": 3821 + }, + { + "epoch": 0.26537980835995, + "grad_norm": 4.481521131599379, + "learning_rate": 8.61667083400222e-06, + "loss": 0.6084, + "step": 3822 + }, + { + "epoch": 0.26544924316067214, + "grad_norm": 5.224329864438417, + "learning_rate": 8.615894286146027e-06, + "loss": 0.6111, + "step": 3823 + }, + { + "epoch": 0.2655186779613943, + "grad_norm": 3.800069033108678, + "learning_rate": 8.615117555401994e-06, + "loss": 0.4478, + "step": 3824 + }, + { + "epoch": 0.26558811276211636, + "grad_norm": 5.239495041191937, + "learning_rate": 8.614340641809405e-06, + "loss": 0.6503, + "step": 3825 + }, + { + "epoch": 0.2656575475628385, + "grad_norm": 4.039016755031907, + "learning_rate": 8.613563545407554e-06, + "loss": 0.6781, + "step": 3826 + }, + { + "epoch": 0.2657269823635606, + "grad_norm": 4.231362451096744, + "learning_rate": 8.612786266235752e-06, + "loss": 0.4616, + "step": 3827 + }, + { + "epoch": 0.26579641716428276, + "grad_norm": 4.371438656833525, + "learning_rate": 8.612008804333307e-06, + "loss": 0.4893, + "step": 3828 + }, + { + "epoch": 0.26586585196500484, + "grad_norm": 3.213769006806774, + "learning_rate": 8.611231159739544e-06, + "loss": 0.2833, + "step": 3829 + }, + { + "epoch": 0.265935286765727, + "grad_norm": 4.112808008409419, + "learning_rate": 8.610453332493793e-06, + "loss": 0.5225, + "step": 3830 + }, + { + "epoch": 0.2660047215664491, + "grad_norm": 4.308154137605331, + "learning_rate": 8.6096753226354e-06, + "loss": 0.837, + "step": 3831 + }, + { + "epoch": 0.26607415636717124, + "grad_norm": 2.4229927303720684, + "learning_rate": 8.608897130203714e-06, + "loss": 0.2418, + "step": 3832 + }, + { + "epoch": 0.2661435911678933, + "grad_norm": 3.6925050085731472, + "learning_rate": 8.608118755238095e-06, + "loss": 0.5549, + "step": 3833 + }, + { + "epoch": 0.26621302596861546, + "grad_norm": 4.193244552810381, + "learning_rate": 8.607340197777911e-06, + "loss": 0.5605, + "step": 3834 + }, + { + "epoch": 0.2662824607693376, + "grad_norm": 2.994637132759814, + "learning_rate": 8.606561457862543e-06, + "loss": 0.1686, + "step": 3835 + }, + { + "epoch": 0.2663518955700597, + "grad_norm": 4.25942134858396, + "learning_rate": 8.605782535531377e-06, + "loss": 0.5894, + "step": 3836 + }, + { + "epoch": 0.26642133037078186, + "grad_norm": 3.292129482183229, + "learning_rate": 8.605003430823811e-06, + "loss": 0.3642, + "step": 3837 + }, + { + "epoch": 0.26649076517150394, + "grad_norm": 5.713508352119714, + "learning_rate": 8.60422414377925e-06, + "loss": 0.7843, + "step": 3838 + }, + { + "epoch": 0.2665601999722261, + "grad_norm": 3.861083019718728, + "learning_rate": 8.603444674437111e-06, + "loss": 0.7057, + "step": 3839 + }, + { + "epoch": 0.2666296347729482, + "grad_norm": 4.103552462258168, + "learning_rate": 8.602665022836819e-06, + "loss": 0.315, + "step": 3840 + }, + { + "epoch": 0.26669906957367034, + "grad_norm": 5.847778639386878, + "learning_rate": 8.601885189017805e-06, + "loss": 0.8071, + "step": 3841 + }, + { + "epoch": 0.2667685043743924, + "grad_norm": 3.577010243957329, + "learning_rate": 8.601105173019515e-06, + "loss": 0.3759, + "step": 3842 + }, + { + "epoch": 0.26683793917511456, + "grad_norm": 2.63615955770399, + "learning_rate": 8.6003249748814e-06, + "loss": 0.1789, + "step": 3843 + }, + { + "epoch": 0.2669073739758367, + "grad_norm": 3.9319838727918874, + "learning_rate": 8.599544594642921e-06, + "loss": 0.6455, + "step": 3844 + }, + { + "epoch": 0.2669768087765588, + "grad_norm": 4.437684699802833, + "learning_rate": 8.598764032343551e-06, + "loss": 0.6937, + "step": 3845 + }, + { + "epoch": 0.2670462435772809, + "grad_norm": 3.421097589180136, + "learning_rate": 8.597983288022766e-06, + "loss": 0.4942, + "step": 3846 + }, + { + "epoch": 0.26711567837800304, + "grad_norm": 3.046556726355791, + "learning_rate": 8.59720236172006e-06, + "loss": 0.3484, + "step": 3847 + }, + { + "epoch": 0.2671851131787252, + "grad_norm": 3.550665537742162, + "learning_rate": 8.59642125347493e-06, + "loss": 0.6028, + "step": 3848 + }, + { + "epoch": 0.2672545479794473, + "grad_norm": 4.26848441595344, + "learning_rate": 8.59563996332688e-06, + "loss": 0.6712, + "step": 3849 + }, + { + "epoch": 0.26732398278016944, + "grad_norm": 3.518828961044441, + "learning_rate": 8.59485849131543e-06, + "loss": 0.4858, + "step": 3850 + }, + { + "epoch": 0.2673934175808915, + "grad_norm": 5.818887364146886, + "learning_rate": 8.594076837480105e-06, + "loss": 0.3209, + "step": 3851 + }, + { + "epoch": 0.26746285238161366, + "grad_norm": 2.8048280036065534, + "learning_rate": 8.593295001860443e-06, + "loss": 0.4392, + "step": 3852 + }, + { + "epoch": 0.2675322871823358, + "grad_norm": 3.089201713948864, + "learning_rate": 8.592512984495984e-06, + "loss": 0.4257, + "step": 3853 + }, + { + "epoch": 0.2676017219830579, + "grad_norm": 3.663746925968469, + "learning_rate": 8.591730785426282e-06, + "loss": 0.462, + "step": 3854 + }, + { + "epoch": 0.26767115678378, + "grad_norm": 3.569923215691886, + "learning_rate": 8.590948404690904e-06, + "loss": 0.3531, + "step": 3855 + }, + { + "epoch": 0.26774059158450214, + "grad_norm": 3.9023213865239192, + "learning_rate": 8.59016584232942e-06, + "loss": 0.5818, + "step": 3856 + }, + { + "epoch": 0.2678100263852243, + "grad_norm": 3.2719269519988448, + "learning_rate": 8.589383098381408e-06, + "loss": 0.2685, + "step": 3857 + }, + { + "epoch": 0.2678794611859464, + "grad_norm": 4.158223664845444, + "learning_rate": 8.58860017288646e-06, + "loss": 0.7703, + "step": 3858 + }, + { + "epoch": 0.26794889598666854, + "grad_norm": 2.829621025541304, + "learning_rate": 8.587817065884177e-06, + "loss": 0.32, + "step": 3859 + }, + { + "epoch": 0.2680183307873906, + "grad_norm": 4.087408740164263, + "learning_rate": 8.587033777414167e-06, + "loss": 0.3155, + "step": 3860 + }, + { + "epoch": 0.26808776558811276, + "grad_norm": 5.557694376384905, + "learning_rate": 8.586250307516046e-06, + "loss": 0.9102, + "step": 3861 + }, + { + "epoch": 0.2681572003888349, + "grad_norm": 4.0097911468739165, + "learning_rate": 8.585466656229445e-06, + "loss": 0.576, + "step": 3862 + }, + { + "epoch": 0.26822663518955703, + "grad_norm": 3.8949080584365805, + "learning_rate": 8.584682823593996e-06, + "loss": 0.5071, + "step": 3863 + }, + { + "epoch": 0.2682960699902791, + "grad_norm": 3.81584766261813, + "learning_rate": 8.583898809649345e-06, + "loss": 0.471, + "step": 3864 + }, + { + "epoch": 0.26836550479100124, + "grad_norm": 4.392704051084909, + "learning_rate": 8.58311461443515e-06, + "loss": 0.5164, + "step": 3865 + }, + { + "epoch": 0.2684349395917234, + "grad_norm": 3.0818068184538605, + "learning_rate": 8.582330237991072e-06, + "loss": 0.3706, + "step": 3866 + }, + { + "epoch": 0.2685043743924455, + "grad_norm": 3.1203525685382076, + "learning_rate": 8.581545680356783e-06, + "loss": 0.215, + "step": 3867 + }, + { + "epoch": 0.2685738091931676, + "grad_norm": 5.03731914711839, + "learning_rate": 8.580760941571968e-06, + "loss": 0.786, + "step": 3868 + }, + { + "epoch": 0.2686432439938897, + "grad_norm": 3.6466514811477366, + "learning_rate": 8.579976021676315e-06, + "loss": 0.3877, + "step": 3869 + }, + { + "epoch": 0.26871267879461186, + "grad_norm": 3.8792744694470773, + "learning_rate": 8.579190920709527e-06, + "loss": 0.473, + "step": 3870 + }, + { + "epoch": 0.268782113595334, + "grad_norm": 3.509184200615901, + "learning_rate": 8.57840563871131e-06, + "loss": 0.3894, + "step": 3871 + }, + { + "epoch": 0.26885154839605613, + "grad_norm": 3.2675731402737838, + "learning_rate": 8.577620175721388e-06, + "loss": 0.4483, + "step": 3872 + }, + { + "epoch": 0.2689209831967782, + "grad_norm": 6.009858957663147, + "learning_rate": 8.576834531779484e-06, + "loss": 0.6879, + "step": 3873 + }, + { + "epoch": 0.26899041799750034, + "grad_norm": 3.582344873853896, + "learning_rate": 8.576048706925338e-06, + "loss": 0.2515, + "step": 3874 + }, + { + "epoch": 0.2690598527982225, + "grad_norm": 3.4651956502576295, + "learning_rate": 8.575262701198694e-06, + "loss": 0.3092, + "step": 3875 + }, + { + "epoch": 0.2691292875989446, + "grad_norm": 5.552752785621959, + "learning_rate": 8.574476514639309e-06, + "loss": 0.9008, + "step": 3876 + }, + { + "epoch": 0.2691987223996667, + "grad_norm": 3.796075710995265, + "learning_rate": 8.573690147286947e-06, + "loss": 0.4454, + "step": 3877 + }, + { + "epoch": 0.2692681572003888, + "grad_norm": 4.513370285562939, + "learning_rate": 8.57290359918138e-06, + "loss": 0.5987, + "step": 3878 + }, + { + "epoch": 0.26933759200111096, + "grad_norm": 2.825597766741668, + "learning_rate": 8.572116870362392e-06, + "loss": 0.2787, + "step": 3879 + }, + { + "epoch": 0.2694070268018331, + "grad_norm": 5.247271576577546, + "learning_rate": 8.571329960869776e-06, + "loss": 0.4598, + "step": 3880 + }, + { + "epoch": 0.26947646160255523, + "grad_norm": 4.003190775525243, + "learning_rate": 8.570542870743333e-06, + "loss": 0.3947, + "step": 3881 + }, + { + "epoch": 0.2695458964032773, + "grad_norm": 4.009250111141734, + "learning_rate": 8.569755600022869e-06, + "loss": 0.3646, + "step": 3882 + }, + { + "epoch": 0.26961533120399944, + "grad_norm": 3.2573358114813353, + "learning_rate": 8.568968148748208e-06, + "loss": 0.3189, + "step": 3883 + }, + { + "epoch": 0.2696847660047216, + "grad_norm": 3.6530009168342485, + "learning_rate": 8.568180516959175e-06, + "loss": 0.5447, + "step": 3884 + }, + { + "epoch": 0.2697542008054437, + "grad_norm": 3.0762048773545834, + "learning_rate": 8.567392704695612e-06, + "loss": 0.245, + "step": 3885 + }, + { + "epoch": 0.2698236356061658, + "grad_norm": 3.504238387281911, + "learning_rate": 8.56660471199736e-06, + "loss": 0.4517, + "step": 3886 + }, + { + "epoch": 0.2698930704068879, + "grad_norm": 4.826330345087875, + "learning_rate": 8.56581653890428e-06, + "loss": 0.6409, + "step": 3887 + }, + { + "epoch": 0.26996250520761006, + "grad_norm": 4.098379604927787, + "learning_rate": 8.56502818545623e-06, + "loss": 0.5758, + "step": 3888 + }, + { + "epoch": 0.2700319400083322, + "grad_norm": 4.813088830645753, + "learning_rate": 8.564239651693092e-06, + "loss": 0.3833, + "step": 3889 + }, + { + "epoch": 0.2701013748090543, + "grad_norm": 3.952636111842192, + "learning_rate": 8.563450937654746e-06, + "loss": 0.3664, + "step": 3890 + }, + { + "epoch": 0.2701708096097764, + "grad_norm": 4.279041753253571, + "learning_rate": 8.562662043381083e-06, + "loss": 0.4426, + "step": 3891 + }, + { + "epoch": 0.27024024441049854, + "grad_norm": 4.359928729625919, + "learning_rate": 8.561872968912005e-06, + "loss": 0.5343, + "step": 3892 + }, + { + "epoch": 0.2703096792112207, + "grad_norm": 4.2040795690099335, + "learning_rate": 8.561083714287423e-06, + "loss": 0.5281, + "step": 3893 + }, + { + "epoch": 0.2703791140119428, + "grad_norm": 3.340529601914484, + "learning_rate": 8.560294279547256e-06, + "loss": 0.2504, + "step": 3894 + }, + { + "epoch": 0.2704485488126649, + "grad_norm": 4.169057448270038, + "learning_rate": 8.559504664731432e-06, + "loss": 0.4156, + "step": 3895 + }, + { + "epoch": 0.270517983613387, + "grad_norm": 3.6196791851967562, + "learning_rate": 8.558714869879892e-06, + "loss": 0.3248, + "step": 3896 + }, + { + "epoch": 0.27058741841410916, + "grad_norm": 2.8569923021341577, + "learning_rate": 8.55792489503258e-06, + "loss": 0.3679, + "step": 3897 + }, + { + "epoch": 0.2706568532148313, + "grad_norm": 3.1882548681120553, + "learning_rate": 8.557134740229453e-06, + "loss": 0.3203, + "step": 3898 + }, + { + "epoch": 0.2707262880155534, + "grad_norm": 4.082105513197329, + "learning_rate": 8.556344405510478e-06, + "loss": 0.5906, + "step": 3899 + }, + { + "epoch": 0.2707957228162755, + "grad_norm": 3.5623634899337175, + "learning_rate": 8.555553890915625e-06, + "loss": 0.516, + "step": 3900 + }, + { + "epoch": 0.27086515761699764, + "grad_norm": 4.02893261001425, + "learning_rate": 8.55476319648488e-06, + "loss": 0.5333, + "step": 3901 + }, + { + "epoch": 0.2709345924177198, + "grad_norm": 5.080311954976574, + "learning_rate": 8.553972322258235e-06, + "loss": 0.6897, + "step": 3902 + }, + { + "epoch": 0.27100402721844186, + "grad_norm": 4.51289904146512, + "learning_rate": 8.553181268275691e-06, + "loss": 0.7505, + "step": 3903 + }, + { + "epoch": 0.271073462019164, + "grad_norm": 3.566562049272631, + "learning_rate": 8.55239003457726e-06, + "loss": 0.4698, + "step": 3904 + }, + { + "epoch": 0.2711428968198861, + "grad_norm": 2.4311598191921244, + "learning_rate": 8.55159862120296e-06, + "loss": 0.2783, + "step": 3905 + }, + { + "epoch": 0.27121233162060826, + "grad_norm": 4.35402246560189, + "learning_rate": 8.55080702819282e-06, + "loss": 0.5469, + "step": 3906 + }, + { + "epoch": 0.2712817664213304, + "grad_norm": 2.131067693260364, + "learning_rate": 8.550015255586881e-06, + "loss": 0.2056, + "step": 3907 + }, + { + "epoch": 0.2713512012220525, + "grad_norm": 3.3862729869829495, + "learning_rate": 8.549223303425187e-06, + "loss": 0.4187, + "step": 3908 + }, + { + "epoch": 0.2714206360227746, + "grad_norm": 4.079833994777261, + "learning_rate": 8.548431171747793e-06, + "loss": 0.455, + "step": 3909 + }, + { + "epoch": 0.27149007082349674, + "grad_norm": 3.5826218475095297, + "learning_rate": 8.547638860594765e-06, + "loss": 0.3455, + "step": 3910 + }, + { + "epoch": 0.2715595056242189, + "grad_norm": 3.0111521465501183, + "learning_rate": 8.54684637000618e-06, + "loss": 0.2479, + "step": 3911 + }, + { + "epoch": 0.27162894042494096, + "grad_norm": 3.007210227602982, + "learning_rate": 8.546053700022118e-06, + "loss": 0.457, + "step": 3912 + }, + { + "epoch": 0.2716983752256631, + "grad_norm": 3.1536558948414273, + "learning_rate": 8.545260850682673e-06, + "loss": 0.3014, + "step": 3913 + }, + { + "epoch": 0.2717678100263852, + "grad_norm": 3.9974074625601457, + "learning_rate": 8.544467822027946e-06, + "loss": 0.5332, + "step": 3914 + }, + { + "epoch": 0.27183724482710736, + "grad_norm": 4.546530981497959, + "learning_rate": 8.543674614098046e-06, + "loss": 0.6505, + "step": 3915 + }, + { + "epoch": 0.2719066796278295, + "grad_norm": 3.7122579297131852, + "learning_rate": 8.542881226933095e-06, + "loss": 0.4536, + "step": 3916 + }, + { + "epoch": 0.2719761144285516, + "grad_norm": 4.141981177387173, + "learning_rate": 8.54208766057322e-06, + "loss": 0.5269, + "step": 3917 + }, + { + "epoch": 0.2720455492292737, + "grad_norm": 3.5738918931496393, + "learning_rate": 8.541293915058561e-06, + "loss": 0.5078, + "step": 3918 + }, + { + "epoch": 0.27211498402999584, + "grad_norm": 3.873632814578405, + "learning_rate": 8.540499990429262e-06, + "loss": 0.3717, + "step": 3919 + }, + { + "epoch": 0.272184418830718, + "grad_norm": 4.4500103806219835, + "learning_rate": 8.53970588672548e-06, + "loss": 0.7431, + "step": 3920 + }, + { + "epoch": 0.27225385363144006, + "grad_norm": 3.2402467822452876, + "learning_rate": 8.538911603987379e-06, + "loss": 0.1798, + "step": 3921 + }, + { + "epoch": 0.2723232884321622, + "grad_norm": 4.4644854094805675, + "learning_rate": 8.538117142255133e-06, + "loss": 0.5751, + "step": 3922 + }, + { + "epoch": 0.2723927232328843, + "grad_norm": 3.8541168607766143, + "learning_rate": 8.537322501568927e-06, + "loss": 0.5901, + "step": 3923 + }, + { + "epoch": 0.27246215803360646, + "grad_norm": 3.2966949927377187, + "learning_rate": 8.536527681968951e-06, + "loss": 0.3975, + "step": 3924 + }, + { + "epoch": 0.27253159283432854, + "grad_norm": 4.459296143148606, + "learning_rate": 8.535732683495408e-06, + "loss": 0.6631, + "step": 3925 + }, + { + "epoch": 0.2726010276350507, + "grad_norm": 2.7370043017821066, + "learning_rate": 8.534937506188505e-06, + "loss": 0.2215, + "step": 3926 + }, + { + "epoch": 0.2726704624357728, + "grad_norm": 3.6387638061871885, + "learning_rate": 8.534142150088465e-06, + "loss": 0.4871, + "step": 3927 + }, + { + "epoch": 0.27273989723649494, + "grad_norm": 3.3401665506071017, + "learning_rate": 8.533346615235511e-06, + "loss": 0.3502, + "step": 3928 + }, + { + "epoch": 0.2728093320372171, + "grad_norm": 3.902736803443321, + "learning_rate": 8.532550901669888e-06, + "loss": 0.4743, + "step": 3929 + }, + { + "epoch": 0.27287876683793916, + "grad_norm": 5.121392973387632, + "learning_rate": 8.531755009431834e-06, + "loss": 0.6266, + "step": 3930 + }, + { + "epoch": 0.2729482016386613, + "grad_norm": 4.6875362862297765, + "learning_rate": 8.530958938561608e-06, + "loss": 0.602, + "step": 3931 + }, + { + "epoch": 0.2730176364393834, + "grad_norm": 3.5698927408132506, + "learning_rate": 8.530162689099478e-06, + "loss": 0.3078, + "step": 3932 + }, + { + "epoch": 0.27308707124010556, + "grad_norm": 3.842575041333963, + "learning_rate": 8.529366261085709e-06, + "loss": 0.3684, + "step": 3933 + }, + { + "epoch": 0.27315650604082764, + "grad_norm": 3.3087525843160663, + "learning_rate": 8.528569654560592e-06, + "loss": 0.4151, + "step": 3934 + }, + { + "epoch": 0.2732259408415498, + "grad_norm": 3.492061981393543, + "learning_rate": 8.527772869564412e-06, + "loss": 0.4676, + "step": 3935 + }, + { + "epoch": 0.2732953756422719, + "grad_norm": 3.7733239444621827, + "learning_rate": 8.526975906137476e-06, + "loss": 0.4144, + "step": 3936 + }, + { + "epoch": 0.27336481044299404, + "grad_norm": 4.19321264976016, + "learning_rate": 8.526178764320086e-06, + "loss": 0.6234, + "step": 3937 + }, + { + "epoch": 0.2734342452437161, + "grad_norm": 2.610959778680088, + "learning_rate": 8.525381444152566e-06, + "loss": 0.2507, + "step": 3938 + }, + { + "epoch": 0.27350368004443826, + "grad_norm": 4.553971601585641, + "learning_rate": 8.524583945675243e-06, + "loss": 0.2621, + "step": 3939 + }, + { + "epoch": 0.2735731148451604, + "grad_norm": 4.1710120958577255, + "learning_rate": 8.523786268928449e-06, + "loss": 0.6324, + "step": 3940 + }, + { + "epoch": 0.2736425496458825, + "grad_norm": 4.37450159491833, + "learning_rate": 8.522988413952535e-06, + "loss": 0.3864, + "step": 3941 + }, + { + "epoch": 0.27371198444660466, + "grad_norm": 3.8745029075193185, + "learning_rate": 8.522190380787853e-06, + "loss": 0.5334, + "step": 3942 + }, + { + "epoch": 0.27378141924732674, + "grad_norm": 4.221566270834122, + "learning_rate": 8.521392169474766e-06, + "loss": 0.4507, + "step": 3943 + }, + { + "epoch": 0.2738508540480489, + "grad_norm": 4.096753753463659, + "learning_rate": 8.520593780053653e-06, + "loss": 0.595, + "step": 3944 + }, + { + "epoch": 0.273920288848771, + "grad_norm": 3.8841435059680607, + "learning_rate": 8.519795212564886e-06, + "loss": 0.5323, + "step": 3945 + }, + { + "epoch": 0.27398972364949314, + "grad_norm": 4.497939338855549, + "learning_rate": 8.51899646704886e-06, + "loss": 0.6577, + "step": 3946 + }, + { + "epoch": 0.2740591584502152, + "grad_norm": 3.2301426627009513, + "learning_rate": 8.518197543545975e-06, + "loss": 0.2191, + "step": 3947 + }, + { + "epoch": 0.27412859325093736, + "grad_norm": 3.43826617069453, + "learning_rate": 8.51739844209664e-06, + "loss": 0.3505, + "step": 3948 + }, + { + "epoch": 0.2741980280516595, + "grad_norm": 2.8496398595137156, + "learning_rate": 8.516599162741272e-06, + "loss": 0.3319, + "step": 3949 + }, + { + "epoch": 0.2742674628523816, + "grad_norm": 3.9102946648353747, + "learning_rate": 8.515799705520298e-06, + "loss": 0.3348, + "step": 3950 + }, + { + "epoch": 0.27433689765310376, + "grad_norm": 4.780337176054412, + "learning_rate": 8.515000070474152e-06, + "loss": 0.7771, + "step": 3951 + }, + { + "epoch": 0.27440633245382584, + "grad_norm": 4.0990320522954455, + "learning_rate": 8.514200257643281e-06, + "loss": 0.5314, + "step": 3952 + }, + { + "epoch": 0.274475767254548, + "grad_norm": 4.193847029044622, + "learning_rate": 8.513400267068137e-06, + "loss": 0.3235, + "step": 3953 + }, + { + "epoch": 0.2745452020552701, + "grad_norm": 2.9117500427935106, + "learning_rate": 8.512600098789184e-06, + "loss": 0.3009, + "step": 3954 + }, + { + "epoch": 0.27461463685599224, + "grad_norm": 4.360164559496565, + "learning_rate": 8.511799752846893e-06, + "loss": 0.5964, + "step": 3955 + }, + { + "epoch": 0.2746840716567143, + "grad_norm": 4.941288939914137, + "learning_rate": 8.510999229281742e-06, + "loss": 0.7531, + "step": 3956 + }, + { + "epoch": 0.27475350645743646, + "grad_norm": 4.5160464079108635, + "learning_rate": 8.510198528134224e-06, + "loss": 0.5213, + "step": 3957 + }, + { + "epoch": 0.2748229412581586, + "grad_norm": 4.866185133989373, + "learning_rate": 8.509397649444837e-06, + "loss": 0.5629, + "step": 3958 + }, + { + "epoch": 0.2748923760588807, + "grad_norm": 3.659014231939476, + "learning_rate": 8.508596593254088e-06, + "loss": 0.3265, + "step": 3959 + }, + { + "epoch": 0.2749618108596028, + "grad_norm": 5.648875352908416, + "learning_rate": 8.507795359602493e-06, + "loss": 0.5491, + "step": 3960 + }, + { + "epoch": 0.27503124566032494, + "grad_norm": 4.328783987942329, + "learning_rate": 8.50699394853058e-06, + "loss": 0.4401, + "step": 3961 + }, + { + "epoch": 0.2751006804610471, + "grad_norm": 5.173724432264053, + "learning_rate": 8.50619236007888e-06, + "loss": 0.6956, + "step": 3962 + }, + { + "epoch": 0.2751701152617692, + "grad_norm": 5.5432746400195585, + "learning_rate": 8.505390594287939e-06, + "loss": 0.4899, + "step": 3963 + }, + { + "epoch": 0.27523955006249134, + "grad_norm": 36.555204148244876, + "learning_rate": 8.504588651198307e-06, + "loss": 0.6905, + "step": 3964 + }, + { + "epoch": 0.2753089848632134, + "grad_norm": 4.510116141813476, + "learning_rate": 8.503786530850545e-06, + "loss": 0.3931, + "step": 3965 + }, + { + "epoch": 0.27537841966393556, + "grad_norm": 5.804165046003108, + "learning_rate": 8.50298423328523e-06, + "loss": 0.6384, + "step": 3966 + }, + { + "epoch": 0.2754478544646577, + "grad_norm": 3.5509848381216984, + "learning_rate": 8.502181758542934e-06, + "loss": 0.5456, + "step": 3967 + }, + { + "epoch": 0.27551728926537983, + "grad_norm": 3.2958677391363818, + "learning_rate": 8.501379106664247e-06, + "loss": 0.2616, + "step": 3968 + }, + { + "epoch": 0.2755867240661019, + "grad_norm": 4.333922625407603, + "learning_rate": 8.500576277689768e-06, + "loss": 0.5866, + "step": 3969 + }, + { + "epoch": 0.27565615886682404, + "grad_norm": 3.4166556645241015, + "learning_rate": 8.4997732716601e-06, + "loss": 0.1816, + "step": 3970 + }, + { + "epoch": 0.2757255936675462, + "grad_norm": 3.1016646177239564, + "learning_rate": 8.498970088615861e-06, + "loss": 0.2755, + "step": 3971 + }, + { + "epoch": 0.2757950284682683, + "grad_norm": 3.128761465521594, + "learning_rate": 8.498166728597675e-06, + "loss": 0.2171, + "step": 3972 + }, + { + "epoch": 0.27586446326899045, + "grad_norm": 3.4957978795719913, + "learning_rate": 8.497363191646174e-06, + "loss": 0.4962, + "step": 3973 + }, + { + "epoch": 0.2759338980697125, + "grad_norm": 3.7163187490375336, + "learning_rate": 8.496559477801999e-06, + "loss": 0.3683, + "step": 3974 + }, + { + "epoch": 0.27600333287043466, + "grad_norm": 4.556605352713522, + "learning_rate": 8.495755587105805e-06, + "loss": 0.7999, + "step": 3975 + }, + { + "epoch": 0.2760727676711568, + "grad_norm": 3.5344759807280424, + "learning_rate": 8.494951519598247e-06, + "loss": 0.4172, + "step": 3976 + }, + { + "epoch": 0.27614220247187893, + "grad_norm": 5.475560162721737, + "learning_rate": 8.494147275319997e-06, + "loss": 0.6752, + "step": 3977 + }, + { + "epoch": 0.276211637272601, + "grad_norm": 4.064245414278551, + "learning_rate": 8.493342854311732e-06, + "loss": 0.5148, + "step": 3978 + }, + { + "epoch": 0.27628107207332314, + "grad_norm": 2.8269240789283794, + "learning_rate": 8.492538256614138e-06, + "loss": 0.2589, + "step": 3979 + }, + { + "epoch": 0.2763505068740453, + "grad_norm": 4.478320409797524, + "learning_rate": 8.49173348226791e-06, + "loss": 0.5475, + "step": 3980 + }, + { + "epoch": 0.2764199416747674, + "grad_norm": 4.383672258718609, + "learning_rate": 8.490928531313754e-06, + "loss": 0.5026, + "step": 3981 + }, + { + "epoch": 0.2764893764754895, + "grad_norm": 3.1383803432342288, + "learning_rate": 8.490123403792385e-06, + "loss": 0.3206, + "step": 3982 + }, + { + "epoch": 0.2765588112762116, + "grad_norm": 2.686453896282508, + "learning_rate": 8.48931809974452e-06, + "loss": 0.211, + "step": 3983 + }, + { + "epoch": 0.27662824607693376, + "grad_norm": 4.1028639941544505, + "learning_rate": 8.488512619210898e-06, + "loss": 0.5724, + "step": 3984 + }, + { + "epoch": 0.2766976808776559, + "grad_norm": 3.5597639153926828, + "learning_rate": 8.487706962232252e-06, + "loss": 0.409, + "step": 3985 + }, + { + "epoch": 0.27676711567837803, + "grad_norm": 3.8829886712348727, + "learning_rate": 8.486901128849337e-06, + "loss": 0.627, + "step": 3986 + }, + { + "epoch": 0.2768365504791001, + "grad_norm": 3.638811596020324, + "learning_rate": 8.486095119102907e-06, + "loss": 0.3147, + "step": 3987 + }, + { + "epoch": 0.27690598527982224, + "grad_norm": 4.594077141814216, + "learning_rate": 8.48528893303373e-06, + "loss": 0.4465, + "step": 3988 + }, + { + "epoch": 0.2769754200805444, + "grad_norm": 4.525517072607048, + "learning_rate": 8.484482570682584e-06, + "loss": 0.7978, + "step": 3989 + }, + { + "epoch": 0.2770448548812665, + "grad_norm": 4.158662107570065, + "learning_rate": 8.483676032090252e-06, + "loss": 0.5915, + "step": 3990 + }, + { + "epoch": 0.2771142896819886, + "grad_norm": 4.117731456261733, + "learning_rate": 8.482869317297529e-06, + "loss": 0.5039, + "step": 3991 + }, + { + "epoch": 0.2771837244827107, + "grad_norm": 3.518711943853984, + "learning_rate": 8.482062426345217e-06, + "loss": 0.2799, + "step": 3992 + }, + { + "epoch": 0.27725315928343286, + "grad_norm": 3.475791772361931, + "learning_rate": 8.481255359274127e-06, + "loss": 0.3491, + "step": 3993 + }, + { + "epoch": 0.277322594084155, + "grad_norm": 4.2651457753923205, + "learning_rate": 8.48044811612508e-06, + "loss": 0.4764, + "step": 3994 + }, + { + "epoch": 0.2773920288848771, + "grad_norm": 3.8506198038834603, + "learning_rate": 8.479640696938906e-06, + "loss": 0.3734, + "step": 3995 + }, + { + "epoch": 0.2774614636855992, + "grad_norm": 5.688336871992002, + "learning_rate": 8.478833101756444e-06, + "loss": 0.5026, + "step": 3996 + }, + { + "epoch": 0.27753089848632134, + "grad_norm": 3.8932229558853, + "learning_rate": 8.478025330618541e-06, + "loss": 0.5945, + "step": 3997 + }, + { + "epoch": 0.2776003332870435, + "grad_norm": 3.1092978864306047, + "learning_rate": 8.47721738356605e-06, + "loss": 0.3724, + "step": 3998 + }, + { + "epoch": 0.2776697680877656, + "grad_norm": 3.17830692841414, + "learning_rate": 8.476409260639838e-06, + "loss": 0.4245, + "step": 3999 + }, + { + "epoch": 0.2777392028884877, + "grad_norm": 3.689409196680556, + "learning_rate": 8.475600961880781e-06, + "loss": 0.5328, + "step": 4000 + }, + { + "epoch": 0.2778086376892098, + "grad_norm": 3.217836477647881, + "learning_rate": 8.474792487329761e-06, + "loss": 0.3163, + "step": 4001 + }, + { + "epoch": 0.27787807248993196, + "grad_norm": 3.4252580676287905, + "learning_rate": 8.473983837027668e-06, + "loss": 0.3379, + "step": 4002 + }, + { + "epoch": 0.2779475072906541, + "grad_norm": 4.147392996962823, + "learning_rate": 8.473175011015405e-06, + "loss": 0.7043, + "step": 4003 + }, + { + "epoch": 0.2780169420913762, + "grad_norm": 5.565553678850914, + "learning_rate": 8.472366009333877e-06, + "loss": 0.8151, + "step": 4004 + }, + { + "epoch": 0.2780863768920983, + "grad_norm": 3.2255866601641023, + "learning_rate": 8.471556832024008e-06, + "loss": 0.3363, + "step": 4005 + }, + { + "epoch": 0.27815581169282044, + "grad_norm": 3.929571808171114, + "learning_rate": 8.470747479126721e-06, + "loss": 0.6045, + "step": 4006 + }, + { + "epoch": 0.2782252464935426, + "grad_norm": 4.042282621854157, + "learning_rate": 8.469937950682956e-06, + "loss": 0.5686, + "step": 4007 + }, + { + "epoch": 0.2782946812942647, + "grad_norm": 4.016366702539411, + "learning_rate": 8.469128246733655e-06, + "loss": 0.238, + "step": 4008 + }, + { + "epoch": 0.2783641160949868, + "grad_norm": 4.417358218370244, + "learning_rate": 8.468318367319772e-06, + "loss": 0.4056, + "step": 4009 + }, + { + "epoch": 0.2784335508957089, + "grad_norm": 3.9453179882589793, + "learning_rate": 8.46750831248227e-06, + "loss": 0.583, + "step": 4010 + }, + { + "epoch": 0.27850298569643106, + "grad_norm": 5.159194775574404, + "learning_rate": 8.466698082262125e-06, + "loss": 0.7985, + "step": 4011 + }, + { + "epoch": 0.2785724204971532, + "grad_norm": 3.724096746460141, + "learning_rate": 8.465887676700311e-06, + "loss": 0.4227, + "step": 4012 + }, + { + "epoch": 0.2786418552978753, + "grad_norm": 2.5674478051109877, + "learning_rate": 8.46507709583782e-06, + "loss": 0.1777, + "step": 4013 + }, + { + "epoch": 0.2787112900985974, + "grad_norm": 4.350340933384753, + "learning_rate": 8.464266339715652e-06, + "loss": 0.4746, + "step": 4014 + }, + { + "epoch": 0.27878072489931954, + "grad_norm": 2.4926137147585155, + "learning_rate": 8.463455408374812e-06, + "loss": 0.2077, + "step": 4015 + }, + { + "epoch": 0.2788501597000417, + "grad_norm": 4.068468642851547, + "learning_rate": 8.462644301856318e-06, + "loss": 0.4392, + "step": 4016 + }, + { + "epoch": 0.27891959450076376, + "grad_norm": 3.786098716381982, + "learning_rate": 8.461833020201191e-06, + "loss": 0.2454, + "step": 4017 + }, + { + "epoch": 0.2789890293014859, + "grad_norm": 3.584446980298048, + "learning_rate": 8.461021563450469e-06, + "loss": 0.3159, + "step": 4018 + }, + { + "epoch": 0.279058464102208, + "grad_norm": 3.4579509412790883, + "learning_rate": 8.460209931645191e-06, + "loss": 0.3715, + "step": 4019 + }, + { + "epoch": 0.27912789890293016, + "grad_norm": 4.6930096329954205, + "learning_rate": 8.45939812482641e-06, + "loss": 0.5753, + "step": 4020 + }, + { + "epoch": 0.2791973337036523, + "grad_norm": 3.794703968397586, + "learning_rate": 8.458586143035188e-06, + "loss": 0.3902, + "step": 4021 + }, + { + "epoch": 0.2792667685043744, + "grad_norm": 3.7278509551810712, + "learning_rate": 8.457773986312592e-06, + "loss": 0.3939, + "step": 4022 + }, + { + "epoch": 0.2793362033050965, + "grad_norm": 3.7231569350685474, + "learning_rate": 8.4569616546997e-06, + "loss": 0.4265, + "step": 4023 + }, + { + "epoch": 0.27940563810581864, + "grad_norm": 3.800563153929813, + "learning_rate": 8.4561491482376e-06, + "loss": 0.3028, + "step": 4024 + }, + { + "epoch": 0.2794750729065408, + "grad_norm": 7.606980929230774, + "learning_rate": 8.455336466967387e-06, + "loss": 0.4427, + "step": 4025 + }, + { + "epoch": 0.27954450770726286, + "grad_norm": 3.656863151236229, + "learning_rate": 8.454523610930165e-06, + "loss": 0.4883, + "step": 4026 + }, + { + "epoch": 0.279613942507985, + "grad_norm": 3.8517080664818066, + "learning_rate": 8.45371058016705e-06, + "loss": 0.399, + "step": 4027 + }, + { + "epoch": 0.2796833773087071, + "grad_norm": 4.129533158578845, + "learning_rate": 8.45289737471916e-06, + "loss": 0.4375, + "step": 4028 + }, + { + "epoch": 0.27975281210942926, + "grad_norm": 3.5278347102911156, + "learning_rate": 8.452083994627629e-06, + "loss": 0.316, + "step": 4029 + }, + { + "epoch": 0.2798222469101514, + "grad_norm": 3.5101761845345, + "learning_rate": 8.451270439933595e-06, + "loss": 0.4084, + "step": 4030 + }, + { + "epoch": 0.2798916817108735, + "grad_norm": 3.8681780688047662, + "learning_rate": 8.450456710678205e-06, + "loss": 0.4593, + "step": 4031 + }, + { + "epoch": 0.2799611165115956, + "grad_norm": 5.160114530704963, + "learning_rate": 8.449642806902623e-06, + "loss": 0.4941, + "step": 4032 + }, + { + "epoch": 0.28003055131231774, + "grad_norm": 2.939221119622317, + "learning_rate": 8.44882872864801e-06, + "loss": 0.376, + "step": 4033 + }, + { + "epoch": 0.2800999861130399, + "grad_norm": 4.070923858188423, + "learning_rate": 8.44801447595554e-06, + "loss": 0.3259, + "step": 4034 + }, + { + "epoch": 0.28016942091376196, + "grad_norm": 4.677821186775436, + "learning_rate": 8.447200048866404e-06, + "loss": 0.5048, + "step": 4035 + }, + { + "epoch": 0.2802388557144841, + "grad_norm": 4.006686975978456, + "learning_rate": 8.446385447421788e-06, + "loss": 0.4987, + "step": 4036 + }, + { + "epoch": 0.2803082905152062, + "grad_norm": 3.348060900891605, + "learning_rate": 8.445570671662897e-06, + "loss": 0.4799, + "step": 4037 + }, + { + "epoch": 0.28037772531592836, + "grad_norm": 4.516830064846909, + "learning_rate": 8.44475572163094e-06, + "loss": 0.7503, + "step": 4038 + }, + { + "epoch": 0.28044716011665044, + "grad_norm": 5.02513680369139, + "learning_rate": 8.443940597367135e-06, + "loss": 0.7388, + "step": 4039 + }, + { + "epoch": 0.2805165949173726, + "grad_norm": 3.4592416976367395, + "learning_rate": 8.443125298912713e-06, + "loss": 0.3575, + "step": 4040 + }, + { + "epoch": 0.2805860297180947, + "grad_norm": 3.4929084233964676, + "learning_rate": 8.442309826308909e-06, + "loss": 0.4165, + "step": 4041 + }, + { + "epoch": 0.28065546451881684, + "grad_norm": 3.301489716982089, + "learning_rate": 8.441494179596969e-06, + "loss": 0.401, + "step": 4042 + }, + { + "epoch": 0.280724899319539, + "grad_norm": 3.9330085755571256, + "learning_rate": 8.440678358818149e-06, + "loss": 0.4365, + "step": 4043 + }, + { + "epoch": 0.28079433412026106, + "grad_norm": 4.107595698802399, + "learning_rate": 8.43986236401371e-06, + "loss": 0.4836, + "step": 4044 + }, + { + "epoch": 0.2808637689209832, + "grad_norm": 4.022721885102071, + "learning_rate": 8.439046195224925e-06, + "loss": 0.4467, + "step": 4045 + }, + { + "epoch": 0.2809332037217053, + "grad_norm": 4.01047608546661, + "learning_rate": 8.438229852493077e-06, + "loss": 0.514, + "step": 4046 + }, + { + "epoch": 0.28100263852242746, + "grad_norm": 3.3408439635940472, + "learning_rate": 8.43741333585945e-06, + "loss": 0.3194, + "step": 4047 + }, + { + "epoch": 0.28107207332314954, + "grad_norm": 4.533620842178191, + "learning_rate": 8.436596645365348e-06, + "loss": 0.4275, + "step": 4048 + }, + { + "epoch": 0.2811415081238717, + "grad_norm": 3.6033551587674872, + "learning_rate": 8.435779781052076e-06, + "loss": 0.543, + "step": 4049 + }, + { + "epoch": 0.2812109429245938, + "grad_norm": 4.482075488511583, + "learning_rate": 8.434962742960952e-06, + "loss": 0.7, + "step": 4050 + }, + { + "epoch": 0.28128037772531594, + "grad_norm": 3.8171253109635694, + "learning_rate": 8.434145531133298e-06, + "loss": 0.4778, + "step": 4051 + }, + { + "epoch": 0.281349812526038, + "grad_norm": 4.695338771151494, + "learning_rate": 8.433328145610452e-06, + "loss": 0.6964, + "step": 4052 + }, + { + "epoch": 0.28141924732676016, + "grad_norm": 2.7371675297922575, + "learning_rate": 8.43251058643375e-06, + "loss": 0.2559, + "step": 4053 + }, + { + "epoch": 0.2814886821274823, + "grad_norm": 4.527182311600126, + "learning_rate": 8.431692853644547e-06, + "loss": 0.6303, + "step": 4054 + }, + { + "epoch": 0.2815581169282044, + "grad_norm": 4.098375824420949, + "learning_rate": 8.430874947284204e-06, + "loss": 0.4423, + "step": 4055 + }, + { + "epoch": 0.28162755172892656, + "grad_norm": 3.4640817191640783, + "learning_rate": 8.430056867394087e-06, + "loss": 0.3723, + "step": 4056 + }, + { + "epoch": 0.28169698652964864, + "grad_norm": 2.719024683888056, + "learning_rate": 8.429238614015575e-06, + "loss": 0.2484, + "step": 4057 + }, + { + "epoch": 0.2817664213303708, + "grad_norm": 4.010671736206263, + "learning_rate": 8.428420187190057e-06, + "loss": 0.5737, + "step": 4058 + }, + { + "epoch": 0.2818358561310929, + "grad_norm": 4.307792403034726, + "learning_rate": 8.427601586958923e-06, + "loss": 0.7896, + "step": 4059 + }, + { + "epoch": 0.28190529093181504, + "grad_norm": 6.6604089718524, + "learning_rate": 8.42678281336358e-06, + "loss": 0.4925, + "step": 4060 + }, + { + "epoch": 0.2819747257325371, + "grad_norm": 4.95119217689486, + "learning_rate": 8.42596386644544e-06, + "loss": 0.4747, + "step": 4061 + }, + { + "epoch": 0.28204416053325926, + "grad_norm": 4.386565032445933, + "learning_rate": 8.425144746245925e-06, + "loss": 0.7824, + "step": 4062 + }, + { + "epoch": 0.2821135953339814, + "grad_norm": 4.022928500527897, + "learning_rate": 8.424325452806463e-06, + "loss": 0.4656, + "step": 4063 + }, + { + "epoch": 0.2821830301347035, + "grad_norm": 3.730351474349969, + "learning_rate": 8.423505986168497e-06, + "loss": 0.5111, + "step": 4064 + }, + { + "epoch": 0.28225246493542566, + "grad_norm": 3.855507767179331, + "learning_rate": 8.42268634637347e-06, + "loss": 0.3539, + "step": 4065 + }, + { + "epoch": 0.28232189973614774, + "grad_norm": 3.236919774150023, + "learning_rate": 8.421866533462841e-06, + "loss": 0.2889, + "step": 4066 + }, + { + "epoch": 0.2823913345368699, + "grad_norm": 3.413153721224843, + "learning_rate": 8.421046547478074e-06, + "loss": 0.465, + "step": 4067 + }, + { + "epoch": 0.282460769337592, + "grad_norm": 4.702650215184505, + "learning_rate": 8.420226388460647e-06, + "loss": 0.6201, + "step": 4068 + }, + { + "epoch": 0.28253020413831414, + "grad_norm": 4.891329942037103, + "learning_rate": 8.419406056452035e-06, + "loss": 0.5424, + "step": 4069 + }, + { + "epoch": 0.2825996389390362, + "grad_norm": 3.760425391817513, + "learning_rate": 8.418585551493736e-06, + "loss": 0.5935, + "step": 4070 + }, + { + "epoch": 0.28266907373975836, + "grad_norm": 4.930744264288468, + "learning_rate": 8.41776487362725e-06, + "loss": 1.0282, + "step": 4071 + }, + { + "epoch": 0.2827385085404805, + "grad_norm": 2.9914054410193662, + "learning_rate": 8.416944022894082e-06, + "loss": 0.3457, + "step": 4072 + }, + { + "epoch": 0.28280794334120263, + "grad_norm": 3.226931845016531, + "learning_rate": 8.416122999335754e-06, + "loss": 0.4147, + "step": 4073 + }, + { + "epoch": 0.2828773781419247, + "grad_norm": 3.7500736773139987, + "learning_rate": 8.415301802993787e-06, + "loss": 0.5911, + "step": 4074 + }, + { + "epoch": 0.28294681294264684, + "grad_norm": 3.4392896032730147, + "learning_rate": 8.414480433909721e-06, + "loss": 0.4706, + "step": 4075 + }, + { + "epoch": 0.283016247743369, + "grad_norm": 3.9289255040722453, + "learning_rate": 8.413658892125099e-06, + "loss": 0.51, + "step": 4076 + }, + { + "epoch": 0.2830856825440911, + "grad_norm": 3.9453723048772043, + "learning_rate": 8.412837177681474e-06, + "loss": 0.5765, + "step": 4077 + }, + { + "epoch": 0.28315511734481325, + "grad_norm": 3.8218365106942445, + "learning_rate": 8.412015290620405e-06, + "loss": 0.3521, + "step": 4078 + }, + { + "epoch": 0.2832245521455353, + "grad_norm": 4.773772181346369, + "learning_rate": 8.411193230983463e-06, + "loss": 0.5365, + "step": 4079 + }, + { + "epoch": 0.28329398694625746, + "grad_norm": 3.623292174646872, + "learning_rate": 8.41037099881223e-06, + "loss": 0.3438, + "step": 4080 + }, + { + "epoch": 0.2833634217469796, + "grad_norm": 3.575940874712963, + "learning_rate": 8.409548594148289e-06, + "loss": 0.3277, + "step": 4081 + }, + { + "epoch": 0.28343285654770173, + "grad_norm": 3.4379510268769806, + "learning_rate": 8.408726017033237e-06, + "loss": 0.5344, + "step": 4082 + }, + { + "epoch": 0.2835022913484238, + "grad_norm": 3.6130402586921346, + "learning_rate": 8.407903267508681e-06, + "loss": 0.2961, + "step": 4083 + }, + { + "epoch": 0.28357172614914594, + "grad_norm": 4.297132562603742, + "learning_rate": 8.407080345616236e-06, + "loss": 0.5173, + "step": 4084 + }, + { + "epoch": 0.2836411609498681, + "grad_norm": 3.4587881960059406, + "learning_rate": 8.406257251397522e-06, + "loss": 0.4273, + "step": 4085 + }, + { + "epoch": 0.2837105957505902, + "grad_norm": 4.004248957218643, + "learning_rate": 8.405433984894169e-06, + "loss": 0.5476, + "step": 4086 + }, + { + "epoch": 0.2837800305513123, + "grad_norm": 3.2863216151252006, + "learning_rate": 8.40461054614782e-06, + "loss": 0.3802, + "step": 4087 + }, + { + "epoch": 0.2838494653520344, + "grad_norm": 3.0987867162244966, + "learning_rate": 8.403786935200121e-06, + "loss": 0.4116, + "step": 4088 + }, + { + "epoch": 0.28391890015275656, + "grad_norm": 4.509268572891679, + "learning_rate": 8.40296315209273e-06, + "loss": 0.6288, + "step": 4089 + }, + { + "epoch": 0.2839883349534787, + "grad_norm": 4.253878629277526, + "learning_rate": 8.402139196867317e-06, + "loss": 0.5307, + "step": 4090 + }, + { + "epoch": 0.28405776975420083, + "grad_norm": 3.8227184164159977, + "learning_rate": 8.40131506956555e-06, + "loss": 0.4914, + "step": 4091 + }, + { + "epoch": 0.2841272045549229, + "grad_norm": 4.528993929907842, + "learning_rate": 8.400490770229115e-06, + "loss": 0.5349, + "step": 4092 + }, + { + "epoch": 0.28419663935564504, + "grad_norm": 4.99315911486536, + "learning_rate": 8.399666298899706e-06, + "loss": 0.6564, + "step": 4093 + }, + { + "epoch": 0.2842660741563672, + "grad_norm": 4.7847903010010855, + "learning_rate": 8.398841655619024e-06, + "loss": 0.8837, + "step": 4094 + }, + { + "epoch": 0.2843355089570893, + "grad_norm": 4.487162351864438, + "learning_rate": 8.398016840428776e-06, + "loss": 0.5113, + "step": 4095 + }, + { + "epoch": 0.2844049437578114, + "grad_norm": 3.58871113688031, + "learning_rate": 8.39719185337068e-06, + "loss": 0.5797, + "step": 4096 + }, + { + "epoch": 0.2844743785585335, + "grad_norm": 3.9166396000144355, + "learning_rate": 8.396366694486466e-06, + "loss": 0.4529, + "step": 4097 + }, + { + "epoch": 0.28454381335925566, + "grad_norm": 3.42310090575979, + "learning_rate": 8.395541363817868e-06, + "loss": 0.4789, + "step": 4098 + }, + { + "epoch": 0.2846132481599778, + "grad_norm": 4.081190130535716, + "learning_rate": 8.394715861406628e-06, + "loss": 0.624, + "step": 4099 + }, + { + "epoch": 0.28468268296069993, + "grad_norm": 3.951274934045624, + "learning_rate": 8.393890187294504e-06, + "loss": 0.4355, + "step": 4100 + }, + { + "epoch": 0.284752117761422, + "grad_norm": 3.998624590371367, + "learning_rate": 8.393064341523253e-06, + "loss": 0.6357, + "step": 4101 + }, + { + "epoch": 0.28482155256214414, + "grad_norm": 2.9383962896418834, + "learning_rate": 8.392238324134646e-06, + "loss": 0.3114, + "step": 4102 + }, + { + "epoch": 0.2848909873628663, + "grad_norm": 3.6078407916489503, + "learning_rate": 8.391412135170467e-06, + "loss": 0.5298, + "step": 4103 + }, + { + "epoch": 0.2849604221635884, + "grad_norm": 3.4590017908215427, + "learning_rate": 8.390585774672494e-06, + "loss": 0.5292, + "step": 4104 + }, + { + "epoch": 0.2850298569643105, + "grad_norm": 3.3611177969892645, + "learning_rate": 8.389759242682534e-06, + "loss": 0.3649, + "step": 4105 + }, + { + "epoch": 0.2850992917650326, + "grad_norm": 5.057511271296704, + "learning_rate": 8.388932539242387e-06, + "loss": 0.5748, + "step": 4106 + }, + { + "epoch": 0.28516872656575476, + "grad_norm": 3.410356217128276, + "learning_rate": 8.388105664393866e-06, + "loss": 0.5103, + "step": 4107 + }, + { + "epoch": 0.2852381613664769, + "grad_norm": 1.63942889339211, + "learning_rate": 8.387278618178794e-06, + "loss": 0.1453, + "step": 4108 + }, + { + "epoch": 0.285307596167199, + "grad_norm": 3.5302353437661846, + "learning_rate": 8.386451400639002e-06, + "loss": 0.5076, + "step": 4109 + }, + { + "epoch": 0.2853770309679211, + "grad_norm": 3.6999585792976784, + "learning_rate": 8.385624011816332e-06, + "loss": 0.5428, + "step": 4110 + }, + { + "epoch": 0.28544646576864324, + "grad_norm": 3.3792429300962734, + "learning_rate": 8.384796451752627e-06, + "loss": 0.278, + "step": 4111 + }, + { + "epoch": 0.2855159005693654, + "grad_norm": 3.8187010986034737, + "learning_rate": 8.383968720489752e-06, + "loss": 0.4468, + "step": 4112 + }, + { + "epoch": 0.2855853353700875, + "grad_norm": 4.9680420258857545, + "learning_rate": 8.383140818069566e-06, + "loss": 0.2834, + "step": 4113 + }, + { + "epoch": 0.2856547701708096, + "grad_norm": 3.8259416083641393, + "learning_rate": 8.382312744533947e-06, + "loss": 0.5547, + "step": 4114 + }, + { + "epoch": 0.2857242049715317, + "grad_norm": 4.6858429745538945, + "learning_rate": 8.381484499924775e-06, + "loss": 0.5408, + "step": 4115 + }, + { + "epoch": 0.28579363977225386, + "grad_norm": 2.685338585231715, + "learning_rate": 8.380656084283945e-06, + "loss": 0.3507, + "step": 4116 + }, + { + "epoch": 0.285863074572976, + "grad_norm": 3.2446294873849277, + "learning_rate": 8.379827497653354e-06, + "loss": 0.4532, + "step": 4117 + }, + { + "epoch": 0.2859325093736981, + "grad_norm": 3.4901822940792955, + "learning_rate": 8.378998740074916e-06, + "loss": 0.385, + "step": 4118 + }, + { + "epoch": 0.2860019441744202, + "grad_norm": 2.8884029656115198, + "learning_rate": 8.378169811590542e-06, + "loss": 0.2963, + "step": 4119 + }, + { + "epoch": 0.28607137897514234, + "grad_norm": 3.4122874840941764, + "learning_rate": 8.377340712242161e-06, + "loss": 0.3827, + "step": 4120 + }, + { + "epoch": 0.2861408137758645, + "grad_norm": 3.509702570015625, + "learning_rate": 8.376511442071711e-06, + "loss": 0.3106, + "step": 4121 + }, + { + "epoch": 0.2862102485765866, + "grad_norm": 3.8734074011445365, + "learning_rate": 8.375682001121132e-06, + "loss": 0.5135, + "step": 4122 + }, + { + "epoch": 0.2862796833773087, + "grad_norm": 3.033185463260702, + "learning_rate": 8.374852389432378e-06, + "loss": 0.3503, + "step": 4123 + }, + { + "epoch": 0.2863491181780308, + "grad_norm": 3.7384426901948555, + "learning_rate": 8.374022607047409e-06, + "loss": 0.4274, + "step": 4124 + }, + { + "epoch": 0.28641855297875296, + "grad_norm": 2.8769429499840355, + "learning_rate": 8.373192654008194e-06, + "loss": 0.1879, + "step": 4125 + }, + { + "epoch": 0.2864879877794751, + "grad_norm": 4.174490762887198, + "learning_rate": 8.372362530356711e-06, + "loss": 0.4198, + "step": 4126 + }, + { + "epoch": 0.2865574225801972, + "grad_norm": 3.2563087393739174, + "learning_rate": 8.371532236134947e-06, + "loss": 0.5054, + "step": 4127 + }, + { + "epoch": 0.2866268573809193, + "grad_norm": 3.7420856353625744, + "learning_rate": 8.3707017713849e-06, + "loss": 0.243, + "step": 4128 + }, + { + "epoch": 0.28669629218164144, + "grad_norm": 2.9865918320588025, + "learning_rate": 8.36987113614857e-06, + "loss": 0.2106, + "step": 4129 + }, + { + "epoch": 0.2867657269823636, + "grad_norm": 4.617791887214233, + "learning_rate": 8.36904033046797e-06, + "loss": 0.5665, + "step": 4130 + }, + { + "epoch": 0.28683516178308566, + "grad_norm": 3.5611140232804797, + "learning_rate": 8.368209354385124e-06, + "loss": 0.3485, + "step": 4131 + }, + { + "epoch": 0.2869045965838078, + "grad_norm": 4.28164494757258, + "learning_rate": 8.36737820794206e-06, + "loss": 0.4814, + "step": 4132 + }, + { + "epoch": 0.2869740313845299, + "grad_norm": 3.640662571704023, + "learning_rate": 8.366546891180817e-06, + "loss": 0.5143, + "step": 4133 + }, + { + "epoch": 0.28704346618525206, + "grad_norm": 3.7269083350101777, + "learning_rate": 8.365715404143442e-06, + "loss": 0.4231, + "step": 4134 + }, + { + "epoch": 0.2871129009859742, + "grad_norm": 4.859879121718179, + "learning_rate": 8.36488374687199e-06, + "loss": 0.8108, + "step": 4135 + }, + { + "epoch": 0.2871823357866963, + "grad_norm": 3.854094664920411, + "learning_rate": 8.364051919408524e-06, + "loss": 0.3596, + "step": 4136 + }, + { + "epoch": 0.2872517705874184, + "grad_norm": 4.492058715186599, + "learning_rate": 8.363219921795121e-06, + "loss": 0.5469, + "step": 4137 + }, + { + "epoch": 0.28732120538814054, + "grad_norm": 4.0033171953003945, + "learning_rate": 8.36238775407386e-06, + "loss": 0.6207, + "step": 4138 + }, + { + "epoch": 0.2873906401888627, + "grad_norm": 3.2335879049724516, + "learning_rate": 8.361555416286829e-06, + "loss": 0.4234, + "step": 4139 + }, + { + "epoch": 0.28746007498958476, + "grad_norm": 3.8102254306126495, + "learning_rate": 8.36072290847613e-06, + "loss": 0.6541, + "step": 4140 + }, + { + "epoch": 0.2875295097903069, + "grad_norm": 3.715870180617638, + "learning_rate": 8.35989023068387e-06, + "loss": 0.2695, + "step": 4141 + }, + { + "epoch": 0.287598944591029, + "grad_norm": 4.643962878105529, + "learning_rate": 8.359057382952162e-06, + "loss": 0.6681, + "step": 4142 + }, + { + "epoch": 0.28766837939175116, + "grad_norm": 3.863724060574862, + "learning_rate": 8.358224365323135e-06, + "loss": 0.5237, + "step": 4143 + }, + { + "epoch": 0.28773781419247324, + "grad_norm": 2.321551725792302, + "learning_rate": 8.35739117783892e-06, + "loss": 0.1565, + "step": 4144 + }, + { + "epoch": 0.2878072489931954, + "grad_norm": 4.356858765544578, + "learning_rate": 8.356557820541658e-06, + "loss": 0.6937, + "step": 4145 + }, + { + "epoch": 0.2878766837939175, + "grad_norm": 4.281247251295548, + "learning_rate": 8.355724293473498e-06, + "loss": 0.6678, + "step": 4146 + }, + { + "epoch": 0.28794611859463964, + "grad_norm": 3.753505011902372, + "learning_rate": 8.354890596676601e-06, + "loss": 0.5815, + "step": 4147 + }, + { + "epoch": 0.2880155533953618, + "grad_norm": 5.487036051490371, + "learning_rate": 8.354056730193136e-06, + "loss": 0.5379, + "step": 4148 + }, + { + "epoch": 0.28808498819608386, + "grad_norm": 4.166525356439661, + "learning_rate": 8.353222694065273e-06, + "loss": 0.6189, + "step": 4149 + }, + { + "epoch": 0.288154422996806, + "grad_norm": 3.691179444867459, + "learning_rate": 8.352388488335203e-06, + "loss": 0.2824, + "step": 4150 + }, + { + "epoch": 0.2882238577975281, + "grad_norm": 3.8384098543521077, + "learning_rate": 8.351554113045118e-06, + "loss": 0.4719, + "step": 4151 + }, + { + "epoch": 0.28829329259825026, + "grad_norm": 3.380193810299827, + "learning_rate": 8.350719568237216e-06, + "loss": 0.491, + "step": 4152 + }, + { + "epoch": 0.28836272739897234, + "grad_norm": 2.07452743196578, + "learning_rate": 8.349884853953712e-06, + "loss": 0.1928, + "step": 4153 + }, + { + "epoch": 0.2884321621996945, + "grad_norm": 4.000157877126909, + "learning_rate": 8.349049970236822e-06, + "loss": 0.5222, + "step": 4154 + }, + { + "epoch": 0.2885015970004166, + "grad_norm": 4.797546814876716, + "learning_rate": 8.348214917128773e-06, + "loss": 0.5449, + "step": 4155 + }, + { + "epoch": 0.28857103180113874, + "grad_norm": 4.106034294061708, + "learning_rate": 8.347379694671803e-06, + "loss": 0.396, + "step": 4156 + }, + { + "epoch": 0.2886404666018609, + "grad_norm": 4.209357182159019, + "learning_rate": 8.346544302908158e-06, + "loss": 0.5141, + "step": 4157 + }, + { + "epoch": 0.28870990140258296, + "grad_norm": 3.414874014017322, + "learning_rate": 8.345708741880086e-06, + "loss": 0.3919, + "step": 4158 + }, + { + "epoch": 0.2887793362033051, + "grad_norm": 4.338356040043959, + "learning_rate": 8.344873011629852e-06, + "loss": 0.5006, + "step": 4159 + }, + { + "epoch": 0.2888487710040272, + "grad_norm": 4.588598281808183, + "learning_rate": 8.344037112199727e-06, + "loss": 0.706, + "step": 4160 + }, + { + "epoch": 0.28891820580474936, + "grad_norm": 4.541332212418766, + "learning_rate": 8.34320104363199e-06, + "loss": 0.5058, + "step": 4161 + }, + { + "epoch": 0.28898764060547144, + "grad_norm": 4.90621851037414, + "learning_rate": 8.342364805968925e-06, + "loss": 0.6953, + "step": 4162 + }, + { + "epoch": 0.2890570754061936, + "grad_norm": 4.0128998344196285, + "learning_rate": 8.341528399252832e-06, + "loss": 0.54, + "step": 4163 + }, + { + "epoch": 0.2891265102069157, + "grad_norm": 4.290941781276502, + "learning_rate": 8.340691823526013e-06, + "loss": 0.7322, + "step": 4164 + }, + { + "epoch": 0.28919594500763784, + "grad_norm": 3.216600272784175, + "learning_rate": 8.339855078830781e-06, + "loss": 0.4447, + "step": 4165 + }, + { + "epoch": 0.2892653798083599, + "grad_norm": 3.829790720469176, + "learning_rate": 8.33901816520946e-06, + "loss": 0.4243, + "step": 4166 + }, + { + "epoch": 0.28933481460908206, + "grad_norm": 3.213559204009377, + "learning_rate": 8.338181082704377e-06, + "loss": 0.3345, + "step": 4167 + }, + { + "epoch": 0.2894042494098042, + "grad_norm": 3.521831945790899, + "learning_rate": 8.337343831357874e-06, + "loss": 0.4695, + "step": 4168 + }, + { + "epoch": 0.2894736842105263, + "grad_norm": 4.575263814331052, + "learning_rate": 8.336506411212295e-06, + "loss": 0.4924, + "step": 4169 + }, + { + "epoch": 0.28954311901124846, + "grad_norm": 3.7462252735678603, + "learning_rate": 8.335668822309997e-06, + "loss": 0.481, + "step": 4170 + }, + { + "epoch": 0.28961255381197054, + "grad_norm": 3.2283012195956124, + "learning_rate": 8.334831064693348e-06, + "loss": 0.5578, + "step": 4171 + }, + { + "epoch": 0.2896819886126927, + "grad_norm": 2.959677690055825, + "learning_rate": 8.333993138404712e-06, + "loss": 0.3459, + "step": 4172 + }, + { + "epoch": 0.2897514234134148, + "grad_norm": 3.88611831804658, + "learning_rate": 8.333155043486479e-06, + "loss": 0.3662, + "step": 4173 + }, + { + "epoch": 0.28982085821413694, + "grad_norm": 4.063428224272474, + "learning_rate": 8.332316779981035e-06, + "loss": 0.7501, + "step": 4174 + }, + { + "epoch": 0.289890293014859, + "grad_norm": 3.866401215203011, + "learning_rate": 8.33147834793078e-06, + "loss": 0.447, + "step": 4175 + }, + { + "epoch": 0.28995972781558116, + "grad_norm": 3.5902627745509177, + "learning_rate": 8.33063974737812e-06, + "loss": 0.4886, + "step": 4176 + }, + { + "epoch": 0.2900291626163033, + "grad_norm": 3.129036605595824, + "learning_rate": 8.329800978365468e-06, + "loss": 0.5396, + "step": 4177 + }, + { + "epoch": 0.29009859741702543, + "grad_norm": 3.550193816314146, + "learning_rate": 8.328962040935253e-06, + "loss": 0.3312, + "step": 4178 + }, + { + "epoch": 0.29016803221774756, + "grad_norm": 3.283254816387682, + "learning_rate": 8.328122935129902e-06, + "loss": 0.3887, + "step": 4179 + }, + { + "epoch": 0.29023746701846964, + "grad_norm": 3.5394317275438953, + "learning_rate": 8.327283660991861e-06, + "loss": 0.4123, + "step": 4180 + }, + { + "epoch": 0.2903069018191918, + "grad_norm": 3.0358270804421403, + "learning_rate": 8.326444218563578e-06, + "loss": 0.2669, + "step": 4181 + }, + { + "epoch": 0.2903763366199139, + "grad_norm": 3.6605537521434233, + "learning_rate": 8.325604607887509e-06, + "loss": 0.3005, + "step": 4182 + }, + { + "epoch": 0.29044577142063605, + "grad_norm": 4.828173302742655, + "learning_rate": 8.324764829006125e-06, + "loss": 0.848, + "step": 4183 + }, + { + "epoch": 0.2905152062213581, + "grad_norm": 4.931233995571509, + "learning_rate": 8.323924881961895e-06, + "loss": 0.7007, + "step": 4184 + }, + { + "epoch": 0.29058464102208026, + "grad_norm": 3.8660106780032115, + "learning_rate": 8.323084766797307e-06, + "loss": 0.5526, + "step": 4185 + }, + { + "epoch": 0.2906540758228024, + "grad_norm": 4.193697788739468, + "learning_rate": 8.322244483554852e-06, + "loss": 0.845, + "step": 4186 + }, + { + "epoch": 0.29072351062352453, + "grad_norm": 3.5803052432590787, + "learning_rate": 8.321404032277031e-06, + "loss": 0.3493, + "step": 4187 + }, + { + "epoch": 0.2907929454242466, + "grad_norm": 3.858407318636596, + "learning_rate": 8.32056341300635e-06, + "loss": 0.5402, + "step": 4188 + }, + { + "epoch": 0.29086238022496874, + "grad_norm": 4.383166192621695, + "learning_rate": 8.319722625785332e-06, + "loss": 0.6952, + "step": 4189 + }, + { + "epoch": 0.2909318150256909, + "grad_norm": 3.453880230865078, + "learning_rate": 8.318881670656499e-06, + "loss": 0.3548, + "step": 4190 + }, + { + "epoch": 0.291001249826413, + "grad_norm": 5.265988078628725, + "learning_rate": 8.318040547662386e-06, + "loss": 0.6534, + "step": 4191 + }, + { + "epoch": 0.29107068462713515, + "grad_norm": 4.6395359570415025, + "learning_rate": 8.317199256845536e-06, + "loss": 0.5682, + "step": 4192 + }, + { + "epoch": 0.2911401194278572, + "grad_norm": 4.39344953712397, + "learning_rate": 8.316357798248504e-06, + "loss": 0.2198, + "step": 4193 + }, + { + "epoch": 0.29120955422857936, + "grad_norm": 4.656650297559044, + "learning_rate": 8.315516171913845e-06, + "loss": 0.6717, + "step": 4194 + }, + { + "epoch": 0.2912789890293015, + "grad_norm": 4.548866008408077, + "learning_rate": 8.314674377884131e-06, + "loss": 0.6236, + "step": 4195 + }, + { + "epoch": 0.29134842383002363, + "grad_norm": 4.396075149918093, + "learning_rate": 8.313832416201937e-06, + "loss": 0.5899, + "step": 4196 + }, + { + "epoch": 0.2914178586307457, + "grad_norm": 4.086374431699724, + "learning_rate": 8.31299028690985e-06, + "loss": 0.4253, + "step": 4197 + }, + { + "epoch": 0.29148729343146784, + "grad_norm": 3.2217776417070767, + "learning_rate": 8.312147990050464e-06, + "loss": 0.2348, + "step": 4198 + }, + { + "epoch": 0.29155672823219, + "grad_norm": 5.312734753181491, + "learning_rate": 8.311305525666377e-06, + "loss": 1.0386, + "step": 4199 + }, + { + "epoch": 0.2916261630329121, + "grad_norm": 3.28999700181382, + "learning_rate": 8.310462893800207e-06, + "loss": 0.4045, + "step": 4200 + }, + { + "epoch": 0.2916955978336342, + "grad_norm": 2.940133710946343, + "learning_rate": 8.309620094494568e-06, + "loss": 0.2961, + "step": 4201 + }, + { + "epoch": 0.2917650326343563, + "grad_norm": 4.200256456724963, + "learning_rate": 8.30877712779209e-06, + "loss": 0.62, + "step": 4202 + }, + { + "epoch": 0.29183446743507846, + "grad_norm": 3.996763767766416, + "learning_rate": 8.30793399373541e-06, + "loss": 0.3832, + "step": 4203 + }, + { + "epoch": 0.2919039022358006, + "grad_norm": 2.696016977270572, + "learning_rate": 8.307090692367172e-06, + "loss": 0.3309, + "step": 4204 + }, + { + "epoch": 0.29197333703652273, + "grad_norm": 2.8540688523409674, + "learning_rate": 8.306247223730028e-06, + "loss": 0.3036, + "step": 4205 + }, + { + "epoch": 0.2920427718372448, + "grad_norm": 4.293694836120316, + "learning_rate": 8.30540358786664e-06, + "loss": 0.3466, + "step": 4206 + }, + { + "epoch": 0.29211220663796694, + "grad_norm": 4.22118610771197, + "learning_rate": 8.30455978481968e-06, + "loss": 0.4281, + "step": 4207 + }, + { + "epoch": 0.2921816414386891, + "grad_norm": 3.818633440563336, + "learning_rate": 8.303715814631825e-06, + "loss": 0.3839, + "step": 4208 + }, + { + "epoch": 0.2922510762394112, + "grad_norm": 3.9241789444038084, + "learning_rate": 8.302871677345763e-06, + "loss": 0.4339, + "step": 4209 + }, + { + "epoch": 0.2923205110401333, + "grad_norm": 4.1984709809832665, + "learning_rate": 8.302027373004189e-06, + "loss": 0.4579, + "step": 4210 + }, + { + "epoch": 0.2923899458408554, + "grad_norm": 5.0609571834216664, + "learning_rate": 8.301182901649806e-06, + "loss": 0.9839, + "step": 4211 + }, + { + "epoch": 0.29245938064157756, + "grad_norm": 3.326461378639018, + "learning_rate": 8.30033826332533e-06, + "loss": 0.2321, + "step": 4212 + }, + { + "epoch": 0.2925288154422997, + "grad_norm": 3.751149001931698, + "learning_rate": 8.299493458073475e-06, + "loss": 0.4685, + "step": 4213 + }, + { + "epoch": 0.29259825024302183, + "grad_norm": 4.277311702710431, + "learning_rate": 8.298648485936978e-06, + "loss": 0.6342, + "step": 4214 + }, + { + "epoch": 0.2926676850437439, + "grad_norm": 5.030596122069059, + "learning_rate": 8.29780334695857e-06, + "loss": 0.6863, + "step": 4215 + }, + { + "epoch": 0.29273711984446604, + "grad_norm": 5.348911723394327, + "learning_rate": 8.296958041181004e-06, + "loss": 0.4601, + "step": 4216 + }, + { + "epoch": 0.2928065546451882, + "grad_norm": 4.314339788885729, + "learning_rate": 8.29611256864703e-06, + "loss": 0.4571, + "step": 4217 + }, + { + "epoch": 0.2928759894459103, + "grad_norm": 3.4090964308541576, + "learning_rate": 8.29526692939941e-06, + "loss": 0.2755, + "step": 4218 + }, + { + "epoch": 0.2929454242466324, + "grad_norm": 4.276628955857692, + "learning_rate": 8.294421123480921e-06, + "loss": 0.6232, + "step": 4219 + }, + { + "epoch": 0.2930148590473545, + "grad_norm": 3.6513031781668888, + "learning_rate": 8.293575150934337e-06, + "loss": 0.4808, + "step": 4220 + }, + { + "epoch": 0.29308429384807666, + "grad_norm": 3.550774567699921, + "learning_rate": 8.292729011802449e-06, + "loss": 0.2929, + "step": 4221 + }, + { + "epoch": 0.2931537286487988, + "grad_norm": 2.8749819415608338, + "learning_rate": 8.291882706128054e-06, + "loss": 0.183, + "step": 4222 + }, + { + "epoch": 0.2932231634495209, + "grad_norm": 4.175315650986765, + "learning_rate": 8.291036233953957e-06, + "loss": 0.698, + "step": 4223 + }, + { + "epoch": 0.293292598250243, + "grad_norm": 4.845579174633432, + "learning_rate": 8.29018959532297e-06, + "loss": 0.4577, + "step": 4224 + }, + { + "epoch": 0.29336203305096514, + "grad_norm": 4.204962210770397, + "learning_rate": 8.289342790277918e-06, + "loss": 0.595, + "step": 4225 + }, + { + "epoch": 0.2934314678516873, + "grad_norm": 4.7096260595511845, + "learning_rate": 8.288495818861631e-06, + "loss": 0.609, + "step": 4226 + }, + { + "epoch": 0.2935009026524094, + "grad_norm": 3.615911736413674, + "learning_rate": 8.287648681116945e-06, + "loss": 0.5239, + "step": 4227 + }, + { + "epoch": 0.2935703374531315, + "grad_norm": 3.94130606686846, + "learning_rate": 8.286801377086708e-06, + "loss": 0.5995, + "step": 4228 + }, + { + "epoch": 0.2936397722538536, + "grad_norm": 3.555702495229184, + "learning_rate": 8.285953906813778e-06, + "loss": 0.4366, + "step": 4229 + }, + { + "epoch": 0.29370920705457576, + "grad_norm": 4.204786601128588, + "learning_rate": 8.285106270341017e-06, + "loss": 0.441, + "step": 4230 + }, + { + "epoch": 0.2937786418552979, + "grad_norm": 3.8905135890219213, + "learning_rate": 8.284258467711297e-06, + "loss": 0.4538, + "step": 4231 + }, + { + "epoch": 0.29384807665602, + "grad_norm": 3.6774063273554147, + "learning_rate": 8.283410498967502e-06, + "loss": 0.3105, + "step": 4232 + }, + { + "epoch": 0.2939175114567421, + "grad_norm": 5.488546702178218, + "learning_rate": 8.282562364152519e-06, + "loss": 0.9099, + "step": 4233 + }, + { + "epoch": 0.29398694625746424, + "grad_norm": 2.871320157189596, + "learning_rate": 8.281714063309246e-06, + "loss": 0.3757, + "step": 4234 + }, + { + "epoch": 0.2940563810581864, + "grad_norm": 3.3211984939217523, + "learning_rate": 8.28086559648059e-06, + "loss": 0.2074, + "step": 4235 + }, + { + "epoch": 0.2941258158589085, + "grad_norm": 3.603076061294814, + "learning_rate": 8.280016963709463e-06, + "loss": 0.4063, + "step": 4236 + }, + { + "epoch": 0.2941952506596306, + "grad_norm": 3.3592424320612837, + "learning_rate": 8.27916816503879e-06, + "loss": 0.386, + "step": 4237 + }, + { + "epoch": 0.2942646854603527, + "grad_norm": 5.16203435031679, + "learning_rate": 8.278319200511501e-06, + "loss": 0.4563, + "step": 4238 + }, + { + "epoch": 0.29433412026107486, + "grad_norm": 4.038750884560467, + "learning_rate": 8.277470070170538e-06, + "loss": 0.481, + "step": 4239 + }, + { + "epoch": 0.294403555061797, + "grad_norm": 3.3213110674815947, + "learning_rate": 8.276620774058847e-06, + "loss": 0.3534, + "step": 4240 + }, + { + "epoch": 0.2944729898625191, + "grad_norm": 4.172828393178461, + "learning_rate": 8.275771312219387e-06, + "loss": 0.6278, + "step": 4241 + }, + { + "epoch": 0.2945424246632412, + "grad_norm": 3.606952062268344, + "learning_rate": 8.274921684695119e-06, + "loss": 0.2889, + "step": 4242 + }, + { + "epoch": 0.29461185946396334, + "grad_norm": 4.538497812827897, + "learning_rate": 8.27407189152902e-06, + "loss": 0.6587, + "step": 4243 + }, + { + "epoch": 0.2946812942646855, + "grad_norm": 3.610867670562362, + "learning_rate": 8.273221932764065e-06, + "loss": 0.4255, + "step": 4244 + }, + { + "epoch": 0.29475072906540756, + "grad_norm": 3.7750422561544634, + "learning_rate": 8.27237180844325e-06, + "loss": 0.3736, + "step": 4245 + }, + { + "epoch": 0.2948201638661297, + "grad_norm": 3.9269836477596214, + "learning_rate": 8.271521518609576e-06, + "loss": 0.5608, + "step": 4246 + }, + { + "epoch": 0.2948895986668518, + "grad_norm": 4.025751950641143, + "learning_rate": 8.270671063306042e-06, + "loss": 0.3022, + "step": 4247 + }, + { + "epoch": 0.29495903346757396, + "grad_norm": 3.796040347308218, + "learning_rate": 8.269820442575667e-06, + "loss": 0.4126, + "step": 4248 + }, + { + "epoch": 0.2950284682682961, + "grad_norm": 4.81411674855092, + "learning_rate": 8.268969656461475e-06, + "loss": 0.6856, + "step": 4249 + }, + { + "epoch": 0.2950979030690182, + "grad_norm": 3.268076174439191, + "learning_rate": 8.268118705006495e-06, + "loss": 0.3138, + "step": 4250 + }, + { + "epoch": 0.2951673378697403, + "grad_norm": 3.201372526533332, + "learning_rate": 8.267267588253771e-06, + "loss": 0.2161, + "step": 4251 + }, + { + "epoch": 0.29523677267046244, + "grad_norm": 3.86100659409227, + "learning_rate": 8.26641630624635e-06, + "loss": 0.3946, + "step": 4252 + }, + { + "epoch": 0.2953062074711846, + "grad_norm": 4.009248968458765, + "learning_rate": 8.265564859027287e-06, + "loss": 0.3914, + "step": 4253 + }, + { + "epoch": 0.29537564227190666, + "grad_norm": 4.818749270345499, + "learning_rate": 8.264713246639648e-06, + "loss": 0.5471, + "step": 4254 + }, + { + "epoch": 0.2954450770726288, + "grad_norm": 3.500759855445182, + "learning_rate": 8.263861469126508e-06, + "loss": 0.322, + "step": 4255 + }, + { + "epoch": 0.2955145118733509, + "grad_norm": 3.71540834259118, + "learning_rate": 8.263009526530949e-06, + "loss": 0.4293, + "step": 4256 + }, + { + "epoch": 0.29558394667407306, + "grad_norm": 4.496607737120699, + "learning_rate": 8.262157418896059e-06, + "loss": 0.5934, + "step": 4257 + }, + { + "epoch": 0.29565338147479514, + "grad_norm": 4.899336327526058, + "learning_rate": 8.26130514626494e-06, + "loss": 0.7202, + "step": 4258 + }, + { + "epoch": 0.2957228162755173, + "grad_norm": 3.291350312788581, + "learning_rate": 8.260452708680695e-06, + "loss": 0.458, + "step": 4259 + }, + { + "epoch": 0.2957922510762394, + "grad_norm": 4.099156702875364, + "learning_rate": 8.259600106186444e-06, + "loss": 0.5491, + "step": 4260 + }, + { + "epoch": 0.29586168587696154, + "grad_norm": 4.462024193148102, + "learning_rate": 8.258747338825306e-06, + "loss": 0.7147, + "step": 4261 + }, + { + "epoch": 0.2959311206776837, + "grad_norm": 3.3575359608426254, + "learning_rate": 8.257894406640416e-06, + "loss": 0.412, + "step": 4262 + }, + { + "epoch": 0.29600055547840576, + "grad_norm": 3.630188523235875, + "learning_rate": 8.257041309674913e-06, + "loss": 0.6407, + "step": 4263 + }, + { + "epoch": 0.2960699902791279, + "grad_norm": 3.790671663036885, + "learning_rate": 8.256188047971948e-06, + "loss": 0.5687, + "step": 4264 + }, + { + "epoch": 0.29613942507985, + "grad_norm": 4.133396061038882, + "learning_rate": 8.255334621574673e-06, + "loss": 0.6824, + "step": 4265 + }, + { + "epoch": 0.29620885988057216, + "grad_norm": 3.8646728399417576, + "learning_rate": 8.254481030526258e-06, + "loss": 0.4167, + "step": 4266 + }, + { + "epoch": 0.29627829468129424, + "grad_norm": 5.225141120871583, + "learning_rate": 8.253627274869875e-06, + "loss": 0.5329, + "step": 4267 + }, + { + "epoch": 0.2963477294820164, + "grad_norm": 2.7938501843439356, + "learning_rate": 8.252773354648707e-06, + "loss": 0.2769, + "step": 4268 + }, + { + "epoch": 0.2964171642827385, + "grad_norm": 3.6474052755829103, + "learning_rate": 8.251919269905944e-06, + "loss": 0.4024, + "step": 4269 + }, + { + "epoch": 0.29648659908346064, + "grad_norm": 3.1463226898160834, + "learning_rate": 8.25106502068478e-06, + "loss": 0.3972, + "step": 4270 + }, + { + "epoch": 0.2965560338841828, + "grad_norm": 3.4947158724567124, + "learning_rate": 8.25021060702843e-06, + "loss": 0.6515, + "step": 4271 + }, + { + "epoch": 0.29662546868490486, + "grad_norm": 3.2985506696609166, + "learning_rate": 8.249356028980105e-06, + "loss": 0.3299, + "step": 4272 + }, + { + "epoch": 0.296694903485627, + "grad_norm": 3.4846761551504004, + "learning_rate": 8.248501286583029e-06, + "loss": 0.3759, + "step": 4273 + }, + { + "epoch": 0.2967643382863491, + "grad_norm": 3.540839269402786, + "learning_rate": 8.247646379880433e-06, + "loss": 0.4681, + "step": 4274 + }, + { + "epoch": 0.29683377308707126, + "grad_norm": 3.1937217035626473, + "learning_rate": 8.246791308915558e-06, + "loss": 0.246, + "step": 4275 + }, + { + "epoch": 0.29690320788779334, + "grad_norm": 4.303737318051593, + "learning_rate": 8.245936073731654e-06, + "loss": 0.7185, + "step": 4276 + }, + { + "epoch": 0.2969726426885155, + "grad_norm": 2.8997555707519087, + "learning_rate": 8.245080674371975e-06, + "loss": 0.2243, + "step": 4277 + }, + { + "epoch": 0.2970420774892376, + "grad_norm": 3.4736187592007743, + "learning_rate": 8.244225110879787e-06, + "loss": 0.4692, + "step": 4278 + }, + { + "epoch": 0.29711151228995974, + "grad_norm": 2.7724146308399726, + "learning_rate": 8.243369383298365e-06, + "loss": 0.3082, + "step": 4279 + }, + { + "epoch": 0.2971809470906818, + "grad_norm": 3.7652902144440166, + "learning_rate": 8.24251349167099e-06, + "loss": 0.3723, + "step": 4280 + }, + { + "epoch": 0.29725038189140396, + "grad_norm": 3.222825020753176, + "learning_rate": 8.24165743604095e-06, + "loss": 0.3276, + "step": 4281 + }, + { + "epoch": 0.2973198166921261, + "grad_norm": 4.759606560201149, + "learning_rate": 8.240801216451548e-06, + "loss": 0.6407, + "step": 4282 + }, + { + "epoch": 0.29738925149284823, + "grad_norm": 4.654161386667701, + "learning_rate": 8.239944832946085e-06, + "loss": 0.7273, + "step": 4283 + }, + { + "epoch": 0.29745868629357036, + "grad_norm": 3.2456572284308627, + "learning_rate": 8.23908828556788e-06, + "loss": 0.3964, + "step": 4284 + }, + { + "epoch": 0.29752812109429244, + "grad_norm": 4.475221443050549, + "learning_rate": 8.238231574360256e-06, + "loss": 0.4329, + "step": 4285 + }, + { + "epoch": 0.2975975558950146, + "grad_norm": 3.6377426839872715, + "learning_rate": 8.23737469936654e-06, + "loss": 0.4988, + "step": 4286 + }, + { + "epoch": 0.2976669906957367, + "grad_norm": 4.138112859934414, + "learning_rate": 8.236517660630077e-06, + "loss": 0.5797, + "step": 4287 + }, + { + "epoch": 0.29773642549645885, + "grad_norm": 3.804048089992206, + "learning_rate": 8.235660458194213e-06, + "loss": 0.5082, + "step": 4288 + }, + { + "epoch": 0.2978058602971809, + "grad_norm": 3.667172197218969, + "learning_rate": 8.234803092102305e-06, + "loss": 0.5346, + "step": 4289 + }, + { + "epoch": 0.29787529509790306, + "grad_norm": 3.749333566257004, + "learning_rate": 8.233945562397716e-06, + "loss": 0.3514, + "step": 4290 + }, + { + "epoch": 0.2979447298986252, + "grad_norm": 4.46242032534881, + "learning_rate": 8.23308786912382e-06, + "loss": 0.6833, + "step": 4291 + }, + { + "epoch": 0.29801416469934733, + "grad_norm": 4.368434279930634, + "learning_rate": 8.232230012323998e-06, + "loss": 0.5334, + "step": 4292 + }, + { + "epoch": 0.2980835995000694, + "grad_norm": 4.0289765023981925, + "learning_rate": 8.23137199204164e-06, + "loss": 0.5327, + "step": 4293 + }, + { + "epoch": 0.29815303430079154, + "grad_norm": 6.840398878298236, + "learning_rate": 8.230513808320144e-06, + "loss": 0.8362, + "step": 4294 + }, + { + "epoch": 0.2982224691015137, + "grad_norm": 3.051279309231908, + "learning_rate": 8.229655461202913e-06, + "loss": 0.4083, + "step": 4295 + }, + { + "epoch": 0.2982919039022358, + "grad_norm": 4.559531897290236, + "learning_rate": 8.228796950733364e-06, + "loss": 0.6167, + "step": 4296 + }, + { + "epoch": 0.29836133870295795, + "grad_norm": 3.018156734934253, + "learning_rate": 8.22793827695492e-06, + "loss": 0.2414, + "step": 4297 + }, + { + "epoch": 0.29843077350368, + "grad_norm": 2.991476571051541, + "learning_rate": 8.22707943991101e-06, + "loss": 0.2886, + "step": 4298 + }, + { + "epoch": 0.29850020830440216, + "grad_norm": 4.276136082720696, + "learning_rate": 8.226220439645074e-06, + "loss": 0.5661, + "step": 4299 + }, + { + "epoch": 0.2985696431051243, + "grad_norm": 4.1525673361261415, + "learning_rate": 8.225361276200559e-06, + "loss": 0.5241, + "step": 4300 + }, + { + "epoch": 0.29863907790584643, + "grad_norm": 4.226823036820217, + "learning_rate": 8.22450194962092e-06, + "loss": 0.7547, + "step": 4301 + }, + { + "epoch": 0.2987085127065685, + "grad_norm": 4.325073579184161, + "learning_rate": 8.22364245994962e-06, + "loss": 0.6188, + "step": 4302 + }, + { + "epoch": 0.29877794750729064, + "grad_norm": 2.8589516055818227, + "learning_rate": 8.222782807230135e-06, + "loss": 0.3185, + "step": 4303 + }, + { + "epoch": 0.2988473823080128, + "grad_norm": 4.27184594695777, + "learning_rate": 8.22192299150594e-06, + "loss": 0.5827, + "step": 4304 + }, + { + "epoch": 0.2989168171087349, + "grad_norm": 3.955433825000662, + "learning_rate": 8.221063012820527e-06, + "loss": 0.4045, + "step": 4305 + }, + { + "epoch": 0.29898625190945705, + "grad_norm": 3.298826754643124, + "learning_rate": 8.22020287121739e-06, + "loss": 0.3766, + "step": 4306 + }, + { + "epoch": 0.2990556867101791, + "grad_norm": 3.3878754313463446, + "learning_rate": 8.219342566740036e-06, + "loss": 0.3628, + "step": 4307 + }, + { + "epoch": 0.29912512151090126, + "grad_norm": 2.740792065932492, + "learning_rate": 8.21848209943198e-06, + "loss": 0.3518, + "step": 4308 + }, + { + "epoch": 0.2991945563116234, + "grad_norm": 4.578593118588602, + "learning_rate": 8.217621469336741e-06, + "loss": 0.7125, + "step": 4309 + }, + { + "epoch": 0.29926399111234553, + "grad_norm": 3.9051337858976773, + "learning_rate": 8.216760676497849e-06, + "loss": 0.3083, + "step": 4310 + }, + { + "epoch": 0.2993334259130676, + "grad_norm": 2.535543703457087, + "learning_rate": 8.21589972095884e-06, + "loss": 0.2933, + "step": 4311 + }, + { + "epoch": 0.29940286071378974, + "grad_norm": 4.2050038967646515, + "learning_rate": 8.215038602763264e-06, + "loss": 0.7416, + "step": 4312 + }, + { + "epoch": 0.2994722955145119, + "grad_norm": 4.5563772719799776, + "learning_rate": 8.214177321954672e-06, + "loss": 0.7102, + "step": 4313 + }, + { + "epoch": 0.299541730315234, + "grad_norm": 4.057365451691864, + "learning_rate": 8.21331587857663e-06, + "loss": 0.5877, + "step": 4314 + }, + { + "epoch": 0.2996111651159561, + "grad_norm": 4.434078074434157, + "learning_rate": 8.212454272672705e-06, + "loss": 0.4333, + "step": 4315 + }, + { + "epoch": 0.2996805999166782, + "grad_norm": 4.384880336635134, + "learning_rate": 8.211592504286479e-06, + "loss": 0.6168, + "step": 4316 + }, + { + "epoch": 0.29975003471740036, + "grad_norm": 4.150694608944207, + "learning_rate": 8.210730573461539e-06, + "loss": 0.5901, + "step": 4317 + }, + { + "epoch": 0.2998194695181225, + "grad_norm": 3.10361656018691, + "learning_rate": 8.209868480241479e-06, + "loss": 0.2815, + "step": 4318 + }, + { + "epoch": 0.29988890431884463, + "grad_norm": 3.7841265385971696, + "learning_rate": 8.209006224669904e-06, + "loss": 0.473, + "step": 4319 + }, + { + "epoch": 0.2999583391195667, + "grad_norm": 3.171945248596576, + "learning_rate": 8.208143806790425e-06, + "loss": 0.4406, + "step": 4320 + }, + { + "epoch": 0.30002777392028884, + "grad_norm": 3.6923611028444654, + "learning_rate": 8.207281226646662e-06, + "loss": 0.423, + "step": 4321 + }, + { + "epoch": 0.300097208721011, + "grad_norm": 3.184410139087182, + "learning_rate": 8.206418484282245e-06, + "loss": 0.4724, + "step": 4322 + }, + { + "epoch": 0.3001666435217331, + "grad_norm": 4.316900703355333, + "learning_rate": 8.20555557974081e-06, + "loss": 0.4758, + "step": 4323 + }, + { + "epoch": 0.3002360783224552, + "grad_norm": 5.695773996229779, + "learning_rate": 8.204692513065999e-06, + "loss": 0.6536, + "step": 4324 + }, + { + "epoch": 0.3003055131231773, + "grad_norm": 3.5423146186576067, + "learning_rate": 8.203829284301468e-06, + "loss": 0.4574, + "step": 4325 + }, + { + "epoch": 0.30037494792389946, + "grad_norm": 4.455268562250085, + "learning_rate": 8.202965893490877e-06, + "loss": 0.6349, + "step": 4326 + }, + { + "epoch": 0.3004443827246216, + "grad_norm": 3.9058474034541066, + "learning_rate": 8.202102340677896e-06, + "loss": 0.6202, + "step": 4327 + }, + { + "epoch": 0.30051381752534373, + "grad_norm": 5.310037267389661, + "learning_rate": 8.2012386259062e-06, + "loss": 0.6712, + "step": 4328 + }, + { + "epoch": 0.3005832523260658, + "grad_norm": 3.5520677768569446, + "learning_rate": 8.200374749219479e-06, + "loss": 0.402, + "step": 4329 + }, + { + "epoch": 0.30065268712678794, + "grad_norm": 3.8463060959578526, + "learning_rate": 8.199510710661424e-06, + "loss": 0.5665, + "step": 4330 + }, + { + "epoch": 0.3007221219275101, + "grad_norm": 3.3525048423676402, + "learning_rate": 8.198646510275738e-06, + "loss": 0.325, + "step": 4331 + }, + { + "epoch": 0.3007915567282322, + "grad_norm": 5.075693294061668, + "learning_rate": 8.19778214810613e-06, + "loss": 0.7931, + "step": 4332 + }, + { + "epoch": 0.3008609915289543, + "grad_norm": 3.240218155371128, + "learning_rate": 8.196917624196322e-06, + "loss": 0.2891, + "step": 4333 + }, + { + "epoch": 0.3009304263296764, + "grad_norm": 4.2114369093971, + "learning_rate": 8.196052938590036e-06, + "loss": 0.6269, + "step": 4334 + }, + { + "epoch": 0.30099986113039856, + "grad_norm": 3.5805273775393993, + "learning_rate": 8.19518809133101e-06, + "loss": 0.4408, + "step": 4335 + }, + { + "epoch": 0.3010692959311207, + "grad_norm": 2.928402808157151, + "learning_rate": 8.194323082462984e-06, + "loss": 0.5091, + "step": 4336 + }, + { + "epoch": 0.3011387307318428, + "grad_norm": 4.177516793762414, + "learning_rate": 8.193457912029713e-06, + "loss": 0.5763, + "step": 4337 + }, + { + "epoch": 0.3012081655325649, + "grad_norm": 4.0466022234030685, + "learning_rate": 8.192592580074954e-06, + "loss": 0.5488, + "step": 4338 + }, + { + "epoch": 0.30127760033328704, + "grad_norm": 3.2846480886267555, + "learning_rate": 8.191727086642475e-06, + "loss": 0.4344, + "step": 4339 + }, + { + "epoch": 0.3013470351340092, + "grad_norm": 3.9710449387973212, + "learning_rate": 8.190861431776051e-06, + "loss": 0.4172, + "step": 4340 + }, + { + "epoch": 0.3014164699347313, + "grad_norm": 4.970611595831778, + "learning_rate": 8.189995615519467e-06, + "loss": 0.7167, + "step": 4341 + }, + { + "epoch": 0.3014859047354534, + "grad_norm": 3.9192354518779866, + "learning_rate": 8.189129637916515e-06, + "loss": 0.4873, + "step": 4342 + }, + { + "epoch": 0.3015553395361755, + "grad_norm": 4.241166958266135, + "learning_rate": 8.188263499010993e-06, + "loss": 0.4216, + "step": 4343 + }, + { + "epoch": 0.30162477433689766, + "grad_norm": 4.231452814182144, + "learning_rate": 8.187397198846714e-06, + "loss": 0.5637, + "step": 4344 + }, + { + "epoch": 0.3016942091376198, + "grad_norm": 3.42862198225882, + "learning_rate": 8.186530737467488e-06, + "loss": 0.3464, + "step": 4345 + }, + { + "epoch": 0.3017636439383419, + "grad_norm": 3.3195594374595845, + "learning_rate": 8.185664114917146e-06, + "loss": 0.4057, + "step": 4346 + }, + { + "epoch": 0.301833078739064, + "grad_norm": 5.168701984619272, + "learning_rate": 8.184797331239515e-06, + "loss": 0.5304, + "step": 4347 + }, + { + "epoch": 0.30190251353978614, + "grad_norm": 3.5979356319784097, + "learning_rate": 8.183930386478442e-06, + "loss": 0.3188, + "step": 4348 + }, + { + "epoch": 0.3019719483405083, + "grad_norm": 4.535023099458533, + "learning_rate": 8.18306328067777e-06, + "loss": 0.7025, + "step": 4349 + }, + { + "epoch": 0.30204138314123036, + "grad_norm": 4.083349874596811, + "learning_rate": 8.18219601388136e-06, + "loss": 0.4164, + "step": 4350 + }, + { + "epoch": 0.3021108179419525, + "grad_norm": 3.3088679511209684, + "learning_rate": 8.181328586133078e-06, + "loss": 0.1994, + "step": 4351 + }, + { + "epoch": 0.3021802527426746, + "grad_norm": 3.45560539032056, + "learning_rate": 8.180460997476796e-06, + "loss": 0.346, + "step": 4352 + }, + { + "epoch": 0.30224968754339676, + "grad_norm": 3.8546845112624477, + "learning_rate": 8.179593247956394e-06, + "loss": 0.4427, + "step": 4353 + }, + { + "epoch": 0.3023191223441189, + "grad_norm": 2.5079896357619123, + "learning_rate": 8.178725337615764e-06, + "loss": 0.178, + "step": 4354 + }, + { + "epoch": 0.302388557144841, + "grad_norm": 2.6722945743559783, + "learning_rate": 8.177857266498804e-06, + "loss": 0.222, + "step": 4355 + }, + { + "epoch": 0.3024579919455631, + "grad_norm": 4.088999251563087, + "learning_rate": 8.176989034649419e-06, + "loss": 0.4666, + "step": 4356 + }, + { + "epoch": 0.30252742674628524, + "grad_norm": 4.582462299929816, + "learning_rate": 8.176120642111523e-06, + "loss": 0.5749, + "step": 4357 + }, + { + "epoch": 0.3025968615470074, + "grad_norm": 4.506313844374007, + "learning_rate": 8.17525208892904e-06, + "loss": 0.6591, + "step": 4358 + }, + { + "epoch": 0.30266629634772946, + "grad_norm": 4.668941509840434, + "learning_rate": 8.1743833751459e-06, + "loss": 0.8776, + "step": 4359 + }, + { + "epoch": 0.3027357311484516, + "grad_norm": 2.853682001796087, + "learning_rate": 8.173514500806039e-06, + "loss": 0.3936, + "step": 4360 + }, + { + "epoch": 0.3028051659491737, + "grad_norm": 3.2192634537315388, + "learning_rate": 8.172645465953405e-06, + "loss": 0.3946, + "step": 4361 + }, + { + "epoch": 0.30287460074989586, + "grad_norm": 2.8210479254113032, + "learning_rate": 8.171776270631958e-06, + "loss": 0.4015, + "step": 4362 + }, + { + "epoch": 0.302944035550618, + "grad_norm": 3.8432029232622846, + "learning_rate": 8.170906914885651e-06, + "loss": 0.4156, + "step": 4363 + }, + { + "epoch": 0.3030134703513401, + "grad_norm": 3.272333744479109, + "learning_rate": 8.170037398758464e-06, + "loss": 0.4669, + "step": 4364 + }, + { + "epoch": 0.3030829051520622, + "grad_norm": 3.055422708425608, + "learning_rate": 8.16916772229437e-06, + "loss": 0.3, + "step": 4365 + }, + { + "epoch": 0.30315233995278434, + "grad_norm": 4.141494613464518, + "learning_rate": 8.16829788553736e-06, + "loss": 0.5087, + "step": 4366 + }, + { + "epoch": 0.3032217747535065, + "grad_norm": 4.179485824864375, + "learning_rate": 8.167427888531429e-06, + "loss": 0.5049, + "step": 4367 + }, + { + "epoch": 0.30329120955422856, + "grad_norm": 3.544204503499928, + "learning_rate": 8.166557731320577e-06, + "loss": 0.5535, + "step": 4368 + }, + { + "epoch": 0.3033606443549507, + "grad_norm": 5.7066224761601125, + "learning_rate": 8.165687413948821e-06, + "loss": 0.6478, + "step": 4369 + }, + { + "epoch": 0.3034300791556728, + "grad_norm": 3.7911779992224637, + "learning_rate": 8.164816936460176e-06, + "loss": 0.5755, + "step": 4370 + }, + { + "epoch": 0.30349951395639496, + "grad_norm": 3.5305719064361503, + "learning_rate": 8.163946298898673e-06, + "loss": 0.4962, + "step": 4371 + }, + { + "epoch": 0.30356894875711704, + "grad_norm": 2.6845894481267187, + "learning_rate": 8.163075501308345e-06, + "loss": 0.4123, + "step": 4372 + }, + { + "epoch": 0.3036383835578392, + "grad_norm": 3.76521304095693, + "learning_rate": 8.162204543733238e-06, + "loss": 0.4075, + "step": 4373 + }, + { + "epoch": 0.3037078183585613, + "grad_norm": 4.606683924172447, + "learning_rate": 8.161333426217404e-06, + "loss": 0.7127, + "step": 4374 + }, + { + "epoch": 0.30377725315928344, + "grad_norm": 3.3956887826231203, + "learning_rate": 8.160462148804902e-06, + "loss": 0.4297, + "step": 4375 + }, + { + "epoch": 0.3038466879600056, + "grad_norm": 3.0227703429094697, + "learning_rate": 8.159590711539802e-06, + "loss": 0.3099, + "step": 4376 + }, + { + "epoch": 0.30391612276072766, + "grad_norm": 3.6803607657322917, + "learning_rate": 8.158719114466176e-06, + "loss": 0.3429, + "step": 4377 + }, + { + "epoch": 0.3039855575614498, + "grad_norm": 3.8038208520649572, + "learning_rate": 8.157847357628115e-06, + "loss": 0.4823, + "step": 4378 + }, + { + "epoch": 0.3040549923621719, + "grad_norm": 3.235762413282526, + "learning_rate": 8.156975441069705e-06, + "loss": 0.3557, + "step": 4379 + }, + { + "epoch": 0.30412442716289406, + "grad_norm": 3.7474106346540084, + "learning_rate": 8.156103364835052e-06, + "loss": 0.337, + "step": 4380 + }, + { + "epoch": 0.30419386196361614, + "grad_norm": 3.70037265898967, + "learning_rate": 8.155231128968262e-06, + "loss": 0.4728, + "step": 4381 + }, + { + "epoch": 0.3042632967643383, + "grad_norm": 3.614290543234798, + "learning_rate": 8.15435873351345e-06, + "loss": 0.4484, + "step": 4382 + }, + { + "epoch": 0.3043327315650604, + "grad_norm": 4.517794589641213, + "learning_rate": 8.153486178514744e-06, + "loss": 0.5234, + "step": 4383 + }, + { + "epoch": 0.30440216636578254, + "grad_norm": 3.6952257771516246, + "learning_rate": 8.152613464016278e-06, + "loss": 0.5045, + "step": 4384 + }, + { + "epoch": 0.3044716011665047, + "grad_norm": 3.382801037918305, + "learning_rate": 8.151740590062187e-06, + "loss": 0.3345, + "step": 4385 + }, + { + "epoch": 0.30454103596722676, + "grad_norm": 4.118293481703411, + "learning_rate": 8.150867556696623e-06, + "loss": 0.3631, + "step": 4386 + }, + { + "epoch": 0.3046104707679489, + "grad_norm": 5.414131044206265, + "learning_rate": 8.149994363963745e-06, + "loss": 0.486, + "step": 4387 + }, + { + "epoch": 0.30467990556867103, + "grad_norm": 4.4491637788055085, + "learning_rate": 8.149121011907715e-06, + "loss": 0.5188, + "step": 4388 + }, + { + "epoch": 0.30474934036939316, + "grad_norm": 2.8998483139193536, + "learning_rate": 8.14824750057271e-06, + "loss": 0.3033, + "step": 4389 + }, + { + "epoch": 0.30481877517011524, + "grad_norm": 4.5882046790101585, + "learning_rate": 8.147373830002906e-06, + "loss": 0.475, + "step": 4390 + }, + { + "epoch": 0.3048882099708374, + "grad_norm": 3.837718364234041, + "learning_rate": 8.1465000002425e-06, + "loss": 0.5431, + "step": 4391 + }, + { + "epoch": 0.3049576447715595, + "grad_norm": 4.3215187227720735, + "learning_rate": 8.14562601133568e-06, + "loss": 0.4061, + "step": 4392 + }, + { + "epoch": 0.30502707957228165, + "grad_norm": 4.082962065203325, + "learning_rate": 8.144751863326656e-06, + "loss": 0.375, + "step": 4393 + }, + { + "epoch": 0.3050965143730037, + "grad_norm": 3.9799476281434676, + "learning_rate": 8.143877556259644e-06, + "loss": 0.3558, + "step": 4394 + }, + { + "epoch": 0.30516594917372586, + "grad_norm": 4.333491636663673, + "learning_rate": 8.143003090178861e-06, + "loss": 0.6461, + "step": 4395 + }, + { + "epoch": 0.305235383974448, + "grad_norm": 4.31820612823554, + "learning_rate": 8.142128465128539e-06, + "loss": 0.5984, + "step": 4396 + }, + { + "epoch": 0.30530481877517013, + "grad_norm": 3.9168145078000594, + "learning_rate": 8.141253681152913e-06, + "loss": 0.3759, + "step": 4397 + }, + { + "epoch": 0.30537425357589226, + "grad_norm": 3.470542531161544, + "learning_rate": 8.140378738296233e-06, + "loss": 0.4397, + "step": 4398 + }, + { + "epoch": 0.30544368837661434, + "grad_norm": 5.082617596426652, + "learning_rate": 8.139503636602748e-06, + "loss": 0.4643, + "step": 4399 + }, + { + "epoch": 0.3055131231773365, + "grad_norm": 2.75953376479929, + "learning_rate": 8.138628376116722e-06, + "loss": 0.2343, + "step": 4400 + }, + { + "epoch": 0.3055825579780586, + "grad_norm": 4.262094642901102, + "learning_rate": 8.137752956882425e-06, + "loss": 0.6372, + "step": 4401 + }, + { + "epoch": 0.30565199277878075, + "grad_norm": 3.854959305590892, + "learning_rate": 8.136877378944133e-06, + "loss": 0.534, + "step": 4402 + }, + { + "epoch": 0.3057214275795028, + "grad_norm": 3.8173774445742996, + "learning_rate": 8.136001642346134e-06, + "loss": 0.4997, + "step": 4403 + }, + { + "epoch": 0.30579086238022496, + "grad_norm": 4.256830494731312, + "learning_rate": 8.13512574713272e-06, + "loss": 0.4724, + "step": 4404 + }, + { + "epoch": 0.3058602971809471, + "grad_norm": 4.219029971034703, + "learning_rate": 8.134249693348194e-06, + "loss": 0.4348, + "step": 4405 + }, + { + "epoch": 0.30592973198166923, + "grad_norm": 3.03421053916804, + "learning_rate": 8.133373481036863e-06, + "loss": 0.3317, + "step": 4406 + }, + { + "epoch": 0.3059991667823913, + "grad_norm": 2.4838249242367634, + "learning_rate": 8.13249711024305e-06, + "loss": 0.2369, + "step": 4407 + }, + { + "epoch": 0.30606860158311344, + "grad_norm": 4.476396723391165, + "learning_rate": 8.131620581011078e-06, + "loss": 0.7447, + "step": 4408 + }, + { + "epoch": 0.3061380363838356, + "grad_norm": 4.272066139148233, + "learning_rate": 8.130743893385278e-06, + "loss": 0.5377, + "step": 4409 + }, + { + "epoch": 0.3062074711845577, + "grad_norm": 3.582838350403838, + "learning_rate": 8.129867047409996e-06, + "loss": 0.4204, + "step": 4410 + }, + { + "epoch": 0.30627690598527985, + "grad_norm": 4.530648710603671, + "learning_rate": 8.128990043129578e-06, + "loss": 0.3536, + "step": 4411 + }, + { + "epoch": 0.3063463407860019, + "grad_norm": 4.3601479853972105, + "learning_rate": 8.128112880588388e-06, + "loss": 0.6034, + "step": 4412 + }, + { + "epoch": 0.30641577558672406, + "grad_norm": 4.241356024877396, + "learning_rate": 8.127235559830787e-06, + "loss": 0.5378, + "step": 4413 + }, + { + "epoch": 0.3064852103874462, + "grad_norm": 4.41454313466964, + "learning_rate": 8.12635808090115e-06, + "loss": 0.5095, + "step": 4414 + }, + { + "epoch": 0.30655464518816833, + "grad_norm": 3.1461283435761143, + "learning_rate": 8.125480443843856e-06, + "loss": 0.2991, + "step": 4415 + }, + { + "epoch": 0.3066240799888904, + "grad_norm": 3.619036081279081, + "learning_rate": 8.124602648703302e-06, + "loss": 0.4409, + "step": 4416 + }, + { + "epoch": 0.30669351478961254, + "grad_norm": 3.8710041152815564, + "learning_rate": 8.12372469552388e-06, + "loss": 0.3634, + "step": 4417 + }, + { + "epoch": 0.3067629495903347, + "grad_norm": 10.470330163352108, + "learning_rate": 8.122846584349997e-06, + "loss": 0.4854, + "step": 4418 + }, + { + "epoch": 0.3068323843910568, + "grad_norm": 5.409820845049516, + "learning_rate": 8.12196831522607e-06, + "loss": 0.566, + "step": 4419 + }, + { + "epoch": 0.30690181919177895, + "grad_norm": 3.7227677108899293, + "learning_rate": 8.121089888196517e-06, + "loss": 0.5143, + "step": 4420 + }, + { + "epoch": 0.306971253992501, + "grad_norm": 2.316100758041508, + "learning_rate": 8.120211303305767e-06, + "loss": 0.2651, + "step": 4421 + }, + { + "epoch": 0.30704068879322316, + "grad_norm": 3.0144149025319704, + "learning_rate": 8.119332560598263e-06, + "loss": 0.3067, + "step": 4422 + }, + { + "epoch": 0.3071101235939453, + "grad_norm": 4.561779940617743, + "learning_rate": 8.118453660118446e-06, + "loss": 0.5702, + "step": 4423 + }, + { + "epoch": 0.30717955839466743, + "grad_norm": 3.4037221623850034, + "learning_rate": 8.117574601910773e-06, + "loss": 0.3958, + "step": 4424 + }, + { + "epoch": 0.3072489931953895, + "grad_norm": 5.497384671760635, + "learning_rate": 8.116695386019703e-06, + "loss": 0.4654, + "step": 4425 + }, + { + "epoch": 0.30731842799611164, + "grad_norm": 2.5014560796022423, + "learning_rate": 8.115816012489709e-06, + "loss": 0.1476, + "step": 4426 + }, + { + "epoch": 0.3073878627968338, + "grad_norm": 3.628303282130237, + "learning_rate": 8.114936481365266e-06, + "loss": 0.3464, + "step": 4427 + }, + { + "epoch": 0.3074572975975559, + "grad_norm": 4.20300144097651, + "learning_rate": 8.114056792690861e-06, + "loss": 0.5402, + "step": 4428 + }, + { + "epoch": 0.307526732398278, + "grad_norm": 4.154491168365554, + "learning_rate": 8.113176946510985e-06, + "loss": 0.6373, + "step": 4429 + }, + { + "epoch": 0.3075961671990001, + "grad_norm": 4.590013238096061, + "learning_rate": 8.112296942870145e-06, + "loss": 0.4922, + "step": 4430 + }, + { + "epoch": 0.30766560199972226, + "grad_norm": 2.839191476875827, + "learning_rate": 8.111416781812844e-06, + "loss": 0.2131, + "step": 4431 + }, + { + "epoch": 0.3077350368004444, + "grad_norm": 3.916844429240888, + "learning_rate": 8.110536463383607e-06, + "loss": 0.4405, + "step": 4432 + }, + { + "epoch": 0.30780447160116653, + "grad_norm": 3.0358787420557767, + "learning_rate": 8.109655987626952e-06, + "loss": 0.213, + "step": 4433 + }, + { + "epoch": 0.3078739064018886, + "grad_norm": 3.2277461176625004, + "learning_rate": 8.108775354587419e-06, + "loss": 0.2478, + "step": 4434 + }, + { + "epoch": 0.30794334120261074, + "grad_norm": 3.8289680055955713, + "learning_rate": 8.107894564309544e-06, + "loss": 0.8028, + "step": 4435 + }, + { + "epoch": 0.3080127760033329, + "grad_norm": 3.1632155161137456, + "learning_rate": 8.107013616837879e-06, + "loss": 0.4484, + "step": 4436 + }, + { + "epoch": 0.308082210804055, + "grad_norm": 5.217563547397045, + "learning_rate": 8.106132512216982e-06, + "loss": 0.6494, + "step": 4437 + }, + { + "epoch": 0.3081516456047771, + "grad_norm": 3.618649134408479, + "learning_rate": 8.105251250491416e-06, + "loss": 0.4117, + "step": 4438 + }, + { + "epoch": 0.3082210804054992, + "grad_norm": 4.378405825404889, + "learning_rate": 8.104369831705757e-06, + "loss": 0.649, + "step": 4439 + }, + { + "epoch": 0.30829051520622136, + "grad_norm": 2.656974482680879, + "learning_rate": 8.103488255904584e-06, + "loss": 0.1773, + "step": 4440 + }, + { + "epoch": 0.3083599500069435, + "grad_norm": 3.2092800781291064, + "learning_rate": 8.102606523132487e-06, + "loss": 0.3493, + "step": 4441 + }, + { + "epoch": 0.30842938480766563, + "grad_norm": 3.4566222925875874, + "learning_rate": 8.101724633434065e-06, + "loss": 0.6609, + "step": 4442 + }, + { + "epoch": 0.3084988196083877, + "grad_norm": 2.7678484656225226, + "learning_rate": 8.100842586853918e-06, + "loss": 0.3951, + "step": 4443 + }, + { + "epoch": 0.30856825440910984, + "grad_norm": 3.017962517665393, + "learning_rate": 8.099960383436663e-06, + "loss": 0.4165, + "step": 4444 + }, + { + "epoch": 0.308637689209832, + "grad_norm": 3.7899308794899644, + "learning_rate": 8.099078023226919e-06, + "loss": 0.416, + "step": 4445 + }, + { + "epoch": 0.3087071240105541, + "grad_norm": 4.036789949575463, + "learning_rate": 8.098195506269315e-06, + "loss": 0.6015, + "step": 4446 + }, + { + "epoch": 0.3087765588112762, + "grad_norm": 3.813199671432458, + "learning_rate": 8.097312832608488e-06, + "loss": 0.3948, + "step": 4447 + }, + { + "epoch": 0.3088459936119983, + "grad_norm": 2.7095700296841194, + "learning_rate": 8.096430002289086e-06, + "loss": 0.1121, + "step": 4448 + }, + { + "epoch": 0.30891542841272046, + "grad_norm": 4.094781410560263, + "learning_rate": 8.095547015355756e-06, + "loss": 0.5487, + "step": 4449 + }, + { + "epoch": 0.3089848632134426, + "grad_norm": 3.3463116924288405, + "learning_rate": 8.094663871853162e-06, + "loss": 0.5388, + "step": 4450 + }, + { + "epoch": 0.3090542980141647, + "grad_norm": 4.412867885211758, + "learning_rate": 8.093780571825968e-06, + "loss": 0.675, + "step": 4451 + }, + { + "epoch": 0.3091237328148868, + "grad_norm": 3.52994549400418, + "learning_rate": 8.092897115318857e-06, + "loss": 0.4467, + "step": 4452 + }, + { + "epoch": 0.30919316761560894, + "grad_norm": 4.995930469254473, + "learning_rate": 8.092013502376509e-06, + "loss": 0.7392, + "step": 4453 + }, + { + "epoch": 0.3092626024163311, + "grad_norm": 5.2865955633498345, + "learning_rate": 8.091129733043614e-06, + "loss": 0.7246, + "step": 4454 + }, + { + "epoch": 0.3093320372170532, + "grad_norm": 4.551841109687148, + "learning_rate": 8.090245807364879e-06, + "loss": 0.4455, + "step": 4455 + }, + { + "epoch": 0.3094014720177753, + "grad_norm": 4.032816901609972, + "learning_rate": 8.089361725385005e-06, + "loss": 0.649, + "step": 4456 + }, + { + "epoch": 0.3094709068184974, + "grad_norm": 3.6388332366288583, + "learning_rate": 8.088477487148712e-06, + "loss": 0.3065, + "step": 4457 + }, + { + "epoch": 0.30954034161921956, + "grad_norm": 4.44412661526409, + "learning_rate": 8.087593092700723e-06, + "loss": 0.6061, + "step": 4458 + }, + { + "epoch": 0.3096097764199417, + "grad_norm": 2.271353875797235, + "learning_rate": 8.086708542085769e-06, + "loss": 0.1833, + "step": 4459 + }, + { + "epoch": 0.3096792112206638, + "grad_norm": 5.52872821102468, + "learning_rate": 8.085823835348588e-06, + "loss": 0.6503, + "step": 4460 + }, + { + "epoch": 0.3097486460213859, + "grad_norm": 4.005711313915286, + "learning_rate": 8.084938972533931e-06, + "loss": 0.5572, + "step": 4461 + }, + { + "epoch": 0.30981808082210804, + "grad_norm": 5.070902280390715, + "learning_rate": 8.084053953686551e-06, + "loss": 0.5378, + "step": 4462 + }, + { + "epoch": 0.3098875156228302, + "grad_norm": 3.8672260068133943, + "learning_rate": 8.083168778851213e-06, + "loss": 0.5637, + "step": 4463 + }, + { + "epoch": 0.30995695042355226, + "grad_norm": 5.090046857239955, + "learning_rate": 8.082283448072685e-06, + "loss": 0.5059, + "step": 4464 + }, + { + "epoch": 0.3100263852242744, + "grad_norm": 2.24476687530852, + "learning_rate": 8.08139796139575e-06, + "loss": 0.2829, + "step": 4465 + }, + { + "epoch": 0.3100958200249965, + "grad_norm": 3.288728483761353, + "learning_rate": 8.080512318865192e-06, + "loss": 0.6081, + "step": 4466 + }, + { + "epoch": 0.31016525482571866, + "grad_norm": 3.651776211041886, + "learning_rate": 8.079626520525808e-06, + "loss": 0.4627, + "step": 4467 + }, + { + "epoch": 0.3102346896264408, + "grad_norm": 3.444922896922857, + "learning_rate": 8.078740566422398e-06, + "loss": 0.5138, + "step": 4468 + }, + { + "epoch": 0.3103041244271629, + "grad_norm": 3.9692262858277667, + "learning_rate": 8.077854456599775e-06, + "loss": 0.3492, + "step": 4469 + }, + { + "epoch": 0.310373559227885, + "grad_norm": 3.4782587058965344, + "learning_rate": 8.076968191102754e-06, + "loss": 0.4013, + "step": 4470 + }, + { + "epoch": 0.31044299402860714, + "grad_norm": 3.854553662645897, + "learning_rate": 8.076081769976166e-06, + "loss": 0.4763, + "step": 4471 + }, + { + "epoch": 0.3105124288293293, + "grad_norm": 4.063362219286672, + "learning_rate": 8.075195193264843e-06, + "loss": 0.4785, + "step": 4472 + }, + { + "epoch": 0.31058186363005136, + "grad_norm": 4.5851089788534605, + "learning_rate": 8.074308461013626e-06, + "loss": 0.4994, + "step": 4473 + }, + { + "epoch": 0.3106512984307735, + "grad_norm": 4.376715667161838, + "learning_rate": 8.073421573267366e-06, + "loss": 0.57, + "step": 4474 + }, + { + "epoch": 0.3107207332314956, + "grad_norm": 3.4490719851174343, + "learning_rate": 8.072534530070922e-06, + "loss": 0.3916, + "step": 4475 + }, + { + "epoch": 0.31079016803221776, + "grad_norm": 4.509297679238457, + "learning_rate": 8.071647331469157e-06, + "loss": 0.3915, + "step": 4476 + }, + { + "epoch": 0.3108596028329399, + "grad_norm": 2.8812626526753298, + "learning_rate": 8.070759977506945e-06, + "loss": 0.2958, + "step": 4477 + }, + { + "epoch": 0.310929037633662, + "grad_norm": 4.039628430094393, + "learning_rate": 8.06987246822917e-06, + "loss": 0.3646, + "step": 4478 + }, + { + "epoch": 0.3109984724343841, + "grad_norm": 2.977710598727337, + "learning_rate": 8.068984803680717e-06, + "loss": 0.193, + "step": 4479 + }, + { + "epoch": 0.31106790723510624, + "grad_norm": 3.672612287269218, + "learning_rate": 8.068096983906488e-06, + "loss": 0.5416, + "step": 4480 + }, + { + "epoch": 0.3111373420358284, + "grad_norm": 3.432180510890564, + "learning_rate": 8.067209008951382e-06, + "loss": 0.5045, + "step": 4481 + }, + { + "epoch": 0.31120677683655046, + "grad_norm": 3.146804182232353, + "learning_rate": 8.06632087886032e-06, + "loss": 0.2903, + "step": 4482 + }, + { + "epoch": 0.3112762116372726, + "grad_norm": 3.20689131971911, + "learning_rate": 8.065432593678213e-06, + "loss": 0.4862, + "step": 4483 + }, + { + "epoch": 0.3113456464379947, + "grad_norm": 4.2783633822559235, + "learning_rate": 8.064544153449996e-06, + "loss": 0.5001, + "step": 4484 + }, + { + "epoch": 0.31141508123871686, + "grad_norm": 3.6604235538562166, + "learning_rate": 8.063655558220605e-06, + "loss": 0.4118, + "step": 4485 + }, + { + "epoch": 0.31148451603943894, + "grad_norm": 3.797398685293877, + "learning_rate": 8.06276680803498e-06, + "loss": 0.4201, + "step": 4486 + }, + { + "epoch": 0.3115539508401611, + "grad_norm": 4.250788222197798, + "learning_rate": 8.061877902938077e-06, + "loss": 0.5273, + "step": 4487 + }, + { + "epoch": 0.3116233856408832, + "grad_norm": 3.3677202341977908, + "learning_rate": 8.060988842974853e-06, + "loss": 0.4621, + "step": 4488 + }, + { + "epoch": 0.31169282044160534, + "grad_norm": 3.6594708925218193, + "learning_rate": 8.06009962819028e-06, + "loss": 0.4634, + "step": 4489 + }, + { + "epoch": 0.3117622552423275, + "grad_norm": 3.384579206101019, + "learning_rate": 8.059210258629328e-06, + "loss": 0.5538, + "step": 4490 + }, + { + "epoch": 0.31183169004304956, + "grad_norm": 4.3945471726361, + "learning_rate": 8.058320734336984e-06, + "loss": 0.7264, + "step": 4491 + }, + { + "epoch": 0.3119011248437717, + "grad_norm": 4.594607014140406, + "learning_rate": 8.057431055358238e-06, + "loss": 0.6429, + "step": 4492 + }, + { + "epoch": 0.31197055964449383, + "grad_norm": 3.4497983979041433, + "learning_rate": 8.056541221738088e-06, + "loss": 0.4277, + "step": 4493 + }, + { + "epoch": 0.31203999444521596, + "grad_norm": 4.362289135572665, + "learning_rate": 8.055651233521543e-06, + "loss": 0.5336, + "step": 4494 + }, + { + "epoch": 0.31210942924593804, + "grad_norm": 4.509164446041097, + "learning_rate": 8.054761090753615e-06, + "loss": 0.6592, + "step": 4495 + }, + { + "epoch": 0.3121788640466602, + "grad_norm": 3.1237688586523187, + "learning_rate": 8.053870793479329e-06, + "loss": 0.3181, + "step": 4496 + }, + { + "epoch": 0.3122482988473823, + "grad_norm": 3.6116413598140897, + "learning_rate": 8.052980341743712e-06, + "loss": 0.482, + "step": 4497 + }, + { + "epoch": 0.31231773364810445, + "grad_norm": 4.313299276718832, + "learning_rate": 8.052089735591806e-06, + "loss": 0.4442, + "step": 4498 + }, + { + "epoch": 0.3123871684488265, + "grad_norm": 10.795401691382107, + "learning_rate": 8.051198975068655e-06, + "loss": 0.5234, + "step": 4499 + }, + { + "epoch": 0.31245660324954866, + "grad_norm": 3.8421364182217945, + "learning_rate": 8.050308060219311e-06, + "loss": 0.7851, + "step": 4500 + }, + { + "epoch": 0.3125260380502708, + "grad_norm": 4.173483350252587, + "learning_rate": 8.049416991088838e-06, + "loss": 0.5298, + "step": 4501 + }, + { + "epoch": 0.31259547285099293, + "grad_norm": 3.9334632226309623, + "learning_rate": 8.048525767722304e-06, + "loss": 0.659, + "step": 4502 + }, + { + "epoch": 0.31266490765171506, + "grad_norm": 3.3854651646308405, + "learning_rate": 8.047634390164787e-06, + "loss": 0.4844, + "step": 4503 + }, + { + "epoch": 0.31273434245243714, + "grad_norm": 3.168982435519991, + "learning_rate": 8.04674285846137e-06, + "loss": 0.358, + "step": 4504 + }, + { + "epoch": 0.3128037772531593, + "grad_norm": 4.045872058279486, + "learning_rate": 8.045851172657146e-06, + "loss": 0.7503, + "step": 4505 + }, + { + "epoch": 0.3128732120538814, + "grad_norm": 4.824018499992043, + "learning_rate": 8.044959332797217e-06, + "loss": 0.5927, + "step": 4506 + }, + { + "epoch": 0.31294264685460355, + "grad_norm": 3.4837328009379145, + "learning_rate": 8.044067338926693e-06, + "loss": 0.5333, + "step": 4507 + }, + { + "epoch": 0.3130120816553256, + "grad_norm": 4.12526070940582, + "learning_rate": 8.043175191090687e-06, + "loss": 0.5585, + "step": 4508 + }, + { + "epoch": 0.31308151645604776, + "grad_norm": 3.922028171082318, + "learning_rate": 8.042282889334322e-06, + "loss": 0.6927, + "step": 4509 + }, + { + "epoch": 0.3131509512567699, + "grad_norm": 3.228177792195968, + "learning_rate": 8.041390433702729e-06, + "loss": 0.3174, + "step": 4510 + }, + { + "epoch": 0.31322038605749203, + "grad_norm": 3.421018627605359, + "learning_rate": 8.040497824241051e-06, + "loss": 0.3538, + "step": 4511 + }, + { + "epoch": 0.31328982085821416, + "grad_norm": 3.697949005925923, + "learning_rate": 8.039605060994435e-06, + "loss": 0.4627, + "step": 4512 + }, + { + "epoch": 0.31335925565893624, + "grad_norm": 2.7125115776848157, + "learning_rate": 8.038712144008035e-06, + "loss": 0.2444, + "step": 4513 + }, + { + "epoch": 0.3134286904596584, + "grad_norm": 5.499710724462778, + "learning_rate": 8.037819073327012e-06, + "loss": 0.4499, + "step": 4514 + }, + { + "epoch": 0.3134981252603805, + "grad_norm": 4.5252127951966825, + "learning_rate": 8.036925848996537e-06, + "loss": 0.463, + "step": 4515 + }, + { + "epoch": 0.31356756006110265, + "grad_norm": 3.6768390267608715, + "learning_rate": 8.03603247106179e-06, + "loss": 0.437, + "step": 4516 + }, + { + "epoch": 0.3136369948618247, + "grad_norm": 3.734811904427, + "learning_rate": 8.035138939567955e-06, + "loss": 0.5122, + "step": 4517 + }, + { + "epoch": 0.31370642966254686, + "grad_norm": 3.010988053258721, + "learning_rate": 8.03424525456023e-06, + "loss": 0.3149, + "step": 4518 + }, + { + "epoch": 0.313775864463269, + "grad_norm": 3.9902219394831295, + "learning_rate": 8.03335141608381e-06, + "loss": 0.5176, + "step": 4519 + }, + { + "epoch": 0.31384529926399113, + "grad_norm": 3.501876809299136, + "learning_rate": 8.032457424183909e-06, + "loss": 0.3673, + "step": 4520 + }, + { + "epoch": 0.3139147340647132, + "grad_norm": 3.034950270406639, + "learning_rate": 8.031563278905744e-06, + "loss": 0.3731, + "step": 4521 + }, + { + "epoch": 0.31398416886543534, + "grad_norm": 3.785104588218375, + "learning_rate": 8.030668980294539e-06, + "loss": 0.3874, + "step": 4522 + }, + { + "epoch": 0.3140536036661575, + "grad_norm": 4.376463199172921, + "learning_rate": 8.029774528395523e-06, + "loss": 0.7204, + "step": 4523 + }, + { + "epoch": 0.3141230384668796, + "grad_norm": 5.318149157963461, + "learning_rate": 8.028879923253943e-06, + "loss": 0.6724, + "step": 4524 + }, + { + "epoch": 0.31419247326760175, + "grad_norm": 2.7053782739354886, + "learning_rate": 8.027985164915044e-06, + "loss": 0.2198, + "step": 4525 + }, + { + "epoch": 0.3142619080683238, + "grad_norm": 3.889788717869216, + "learning_rate": 8.02709025342408e-06, + "loss": 0.4828, + "step": 4526 + }, + { + "epoch": 0.31433134286904596, + "grad_norm": 3.603170873120601, + "learning_rate": 8.026195188826318e-06, + "loss": 0.4517, + "step": 4527 + }, + { + "epoch": 0.3144007776697681, + "grad_norm": 4.352725565702803, + "learning_rate": 8.025299971167026e-06, + "loss": 0.5557, + "step": 4528 + }, + { + "epoch": 0.31447021247049023, + "grad_norm": 6.346075373124257, + "learning_rate": 8.024404600491485e-06, + "loss": 1.0009, + "step": 4529 + }, + { + "epoch": 0.3145396472712123, + "grad_norm": 2.7681282881066087, + "learning_rate": 8.023509076844984e-06, + "loss": 0.2242, + "step": 4530 + }, + { + "epoch": 0.31460908207193444, + "grad_norm": 4.342935971872531, + "learning_rate": 8.022613400272813e-06, + "loss": 0.5686, + "step": 4531 + }, + { + "epoch": 0.3146785168726566, + "grad_norm": 3.7165907157696405, + "learning_rate": 8.021717570820278e-06, + "loss": 0.5998, + "step": 4532 + }, + { + "epoch": 0.3147479516733787, + "grad_norm": 3.5314554016819524, + "learning_rate": 8.020821588532685e-06, + "loss": 0.3413, + "step": 4533 + }, + { + "epoch": 0.31481738647410085, + "grad_norm": 4.270056155307777, + "learning_rate": 8.019925453455357e-06, + "loss": 0.4416, + "step": 4534 + }, + { + "epoch": 0.3148868212748229, + "grad_norm": 3.282529859459503, + "learning_rate": 8.019029165633617e-06, + "loss": 0.3327, + "step": 4535 + }, + { + "epoch": 0.31495625607554506, + "grad_norm": 4.855975144877896, + "learning_rate": 8.018132725112796e-06, + "loss": 0.5824, + "step": 4536 + }, + { + "epoch": 0.3150256908762672, + "grad_norm": 3.218366639815275, + "learning_rate": 8.017236131938238e-06, + "loss": 0.3953, + "step": 4537 + }, + { + "epoch": 0.31509512567698933, + "grad_norm": 4.418789432625289, + "learning_rate": 8.01633938615529e-06, + "loss": 0.7477, + "step": 4538 + }, + { + "epoch": 0.3151645604777114, + "grad_norm": 3.558490107985622, + "learning_rate": 8.015442487809311e-06, + "loss": 0.3256, + "step": 4539 + }, + { + "epoch": 0.31523399527843354, + "grad_norm": 3.3864253570776777, + "learning_rate": 8.014545436945662e-06, + "loss": 0.4903, + "step": 4540 + }, + { + "epoch": 0.3153034300791557, + "grad_norm": 4.368700704511398, + "learning_rate": 8.013648233609715e-06, + "loss": 0.6201, + "step": 4541 + }, + { + "epoch": 0.3153728648798778, + "grad_norm": 3.711882282295784, + "learning_rate": 8.012750877846853e-06, + "loss": 0.5054, + "step": 4542 + }, + { + "epoch": 0.3154422996805999, + "grad_norm": 3.6020104268932513, + "learning_rate": 8.011853369702458e-06, + "loss": 0.4528, + "step": 4543 + }, + { + "epoch": 0.315511734481322, + "grad_norm": 3.5006019312694976, + "learning_rate": 8.010955709221929e-06, + "loss": 0.4258, + "step": 4544 + }, + { + "epoch": 0.31558116928204416, + "grad_norm": 4.429251798225652, + "learning_rate": 8.010057896450668e-06, + "loss": 0.6561, + "step": 4545 + }, + { + "epoch": 0.3156506040827663, + "grad_norm": 4.20194708811198, + "learning_rate": 8.009159931434082e-06, + "loss": 0.7396, + "step": 4546 + }, + { + "epoch": 0.31572003888348843, + "grad_norm": 4.685831937197169, + "learning_rate": 8.008261814217596e-06, + "loss": 0.6368, + "step": 4547 + }, + { + "epoch": 0.3157894736842105, + "grad_norm": 3.738448314997094, + "learning_rate": 8.007363544846628e-06, + "loss": 0.3896, + "step": 4548 + }, + { + "epoch": 0.31585890848493264, + "grad_norm": 4.906875865520298, + "learning_rate": 8.006465123366616e-06, + "loss": 0.6939, + "step": 4549 + }, + { + "epoch": 0.3159283432856548, + "grad_norm": 4.105792076079457, + "learning_rate": 8.005566549823e-06, + "loss": 0.4661, + "step": 4550 + }, + { + "epoch": 0.3159977780863769, + "grad_norm": 3.73387453417385, + "learning_rate": 8.00466782426123e-06, + "loss": 0.5312, + "step": 4551 + }, + { + "epoch": 0.316067212887099, + "grad_norm": 3.8873780417617807, + "learning_rate": 8.00376894672676e-06, + "loss": 0.5587, + "step": 4552 + }, + { + "epoch": 0.3161366476878211, + "grad_norm": 2.1738424812859884, + "learning_rate": 8.002869917265056e-06, + "loss": 0.2238, + "step": 4553 + }, + { + "epoch": 0.31620608248854326, + "grad_norm": 3.279184022679103, + "learning_rate": 8.001970735921591e-06, + "loss": 0.4829, + "step": 4554 + }, + { + "epoch": 0.3162755172892654, + "grad_norm": 4.519534670663577, + "learning_rate": 8.001071402741843e-06, + "loss": 0.4506, + "step": 4555 + }, + { + "epoch": 0.3163449520899875, + "grad_norm": 5.004565282506696, + "learning_rate": 8.000171917771297e-06, + "loss": 0.544, + "step": 4556 + }, + { + "epoch": 0.3164143868907096, + "grad_norm": 4.211814064690637, + "learning_rate": 7.999272281055452e-06, + "loss": 0.5274, + "step": 4557 + }, + { + "epoch": 0.31648382169143174, + "grad_norm": 3.85219753702993, + "learning_rate": 7.99837249263981e-06, + "loss": 0.4346, + "step": 4558 + }, + { + "epoch": 0.3165532564921539, + "grad_norm": 3.7765355623234136, + "learning_rate": 7.997472552569877e-06, + "loss": 0.4536, + "step": 4559 + }, + { + "epoch": 0.316622691292876, + "grad_norm": 2.97148807572818, + "learning_rate": 7.996572460891176e-06, + "loss": 0.3746, + "step": 4560 + }, + { + "epoch": 0.3166921260935981, + "grad_norm": 3.696867565621778, + "learning_rate": 7.995672217649231e-06, + "loss": 0.6967, + "step": 4561 + }, + { + "epoch": 0.3167615608943202, + "grad_norm": 4.64768828842926, + "learning_rate": 7.994771822889575e-06, + "loss": 0.4421, + "step": 4562 + }, + { + "epoch": 0.31683099569504236, + "grad_norm": 3.646561863846733, + "learning_rate": 7.99387127665775e-06, + "loss": 0.6111, + "step": 4563 + }, + { + "epoch": 0.3169004304957645, + "grad_norm": 4.225993571261041, + "learning_rate": 7.992970578999303e-06, + "loss": 0.5535, + "step": 4564 + }, + { + "epoch": 0.3169698652964866, + "grad_norm": 4.045295345049359, + "learning_rate": 7.99206972995979e-06, + "loss": 0.5822, + "step": 4565 + }, + { + "epoch": 0.3170393000972087, + "grad_norm": 3.9962793551379185, + "learning_rate": 7.991168729584775e-06, + "loss": 0.4423, + "step": 4566 + }, + { + "epoch": 0.31710873489793084, + "grad_norm": 4.475517415464021, + "learning_rate": 7.990267577919833e-06, + "loss": 0.4236, + "step": 4567 + }, + { + "epoch": 0.317178169698653, + "grad_norm": 3.67563569380176, + "learning_rate": 7.98936627501054e-06, + "loss": 0.3612, + "step": 4568 + }, + { + "epoch": 0.3172476044993751, + "grad_norm": 3.592914863446774, + "learning_rate": 7.988464820902482e-06, + "loss": 0.4735, + "step": 4569 + }, + { + "epoch": 0.3173170393000972, + "grad_norm": 2.59805260898133, + "learning_rate": 7.987563215641256e-06, + "loss": 0.204, + "step": 4570 + }, + { + "epoch": 0.3173864741008193, + "grad_norm": 4.154543370907658, + "learning_rate": 7.986661459272463e-06, + "loss": 0.5733, + "step": 4571 + }, + { + "epoch": 0.31745590890154146, + "grad_norm": 3.5861481067565735, + "learning_rate": 7.985759551841711e-06, + "loss": 0.4414, + "step": 4572 + }, + { + "epoch": 0.3175253437022636, + "grad_norm": 3.3827084904939437, + "learning_rate": 7.984857493394623e-06, + "loss": 0.4369, + "step": 4573 + }, + { + "epoch": 0.3175947785029857, + "grad_norm": 3.2560966029796687, + "learning_rate": 7.98395528397682e-06, + "loss": 0.4787, + "step": 4574 + }, + { + "epoch": 0.3176642133037078, + "grad_norm": 3.991344325912637, + "learning_rate": 7.983052923633933e-06, + "loss": 0.5226, + "step": 4575 + }, + { + "epoch": 0.31773364810442994, + "grad_norm": 4.704753002267821, + "learning_rate": 7.982150412411607e-06, + "loss": 0.7063, + "step": 4576 + }, + { + "epoch": 0.3178030829051521, + "grad_norm": 4.499960285252185, + "learning_rate": 7.981247750355486e-06, + "loss": 0.5527, + "step": 4577 + }, + { + "epoch": 0.31787251770587416, + "grad_norm": 2.684181899305464, + "learning_rate": 7.980344937511229e-06, + "loss": 0.3847, + "step": 4578 + }, + { + "epoch": 0.3179419525065963, + "grad_norm": 2.0736181021625506, + "learning_rate": 7.979441973924496e-06, + "loss": 0.2376, + "step": 4579 + }, + { + "epoch": 0.3180113873073184, + "grad_norm": 3.545523347003672, + "learning_rate": 7.97853885964096e-06, + "loss": 0.4363, + "step": 4580 + }, + { + "epoch": 0.31808082210804056, + "grad_norm": 3.395274430805249, + "learning_rate": 7.977635594706298e-06, + "loss": 0.466, + "step": 4581 + }, + { + "epoch": 0.3181502569087627, + "grad_norm": 4.3213454506717195, + "learning_rate": 7.9767321791662e-06, + "loss": 0.5375, + "step": 4582 + }, + { + "epoch": 0.3182196917094848, + "grad_norm": 3.8104639067360657, + "learning_rate": 7.975828613066354e-06, + "loss": 0.4245, + "step": 4583 + }, + { + "epoch": 0.3182891265102069, + "grad_norm": 3.9666997296154505, + "learning_rate": 7.974924896452466e-06, + "loss": 0.4862, + "step": 4584 + }, + { + "epoch": 0.31835856131092904, + "grad_norm": 5.364981480732536, + "learning_rate": 7.974021029370242e-06, + "loss": 0.7239, + "step": 4585 + }, + { + "epoch": 0.3184279961116512, + "grad_norm": 3.08931924921442, + "learning_rate": 7.973117011865398e-06, + "loss": 0.3552, + "step": 4586 + }, + { + "epoch": 0.31849743091237326, + "grad_norm": 3.10929712553979, + "learning_rate": 7.972212843983662e-06, + "loss": 0.3238, + "step": 4587 + }, + { + "epoch": 0.3185668657130954, + "grad_norm": 2.50848245218919, + "learning_rate": 7.971308525770763e-06, + "loss": 0.1952, + "step": 4588 + }, + { + "epoch": 0.3186363005138175, + "grad_norm": 4.6272823142786885, + "learning_rate": 7.970404057272441e-06, + "loss": 0.5662, + "step": 4589 + }, + { + "epoch": 0.31870573531453966, + "grad_norm": 4.317798080943889, + "learning_rate": 7.969499438534445e-06, + "loss": 0.5852, + "step": 4590 + }, + { + "epoch": 0.3187751701152618, + "grad_norm": 3.758440421974862, + "learning_rate": 7.968594669602525e-06, + "loss": 0.5574, + "step": 4591 + }, + { + "epoch": 0.3188446049159839, + "grad_norm": 5.321842531780306, + "learning_rate": 7.967689750522447e-06, + "loss": 0.6716, + "step": 4592 + }, + { + "epoch": 0.318914039716706, + "grad_norm": 4.271673454246642, + "learning_rate": 7.966784681339978e-06, + "loss": 0.5639, + "step": 4593 + }, + { + "epoch": 0.31898347451742814, + "grad_norm": 4.166481008501559, + "learning_rate": 7.965879462100898e-06, + "loss": 0.6935, + "step": 4594 + }, + { + "epoch": 0.3190529093181503, + "grad_norm": 3.6344461696172634, + "learning_rate": 7.964974092850992e-06, + "loss": 0.3892, + "step": 4595 + }, + { + "epoch": 0.31912234411887236, + "grad_norm": 3.6726102318285117, + "learning_rate": 7.964068573636051e-06, + "loss": 0.4744, + "step": 4596 + }, + { + "epoch": 0.3191917789195945, + "grad_norm": 4.173658715763125, + "learning_rate": 7.963162904501874e-06, + "loss": 0.5806, + "step": 4597 + }, + { + "epoch": 0.31926121372031663, + "grad_norm": 3.1698397010838977, + "learning_rate": 7.962257085494273e-06, + "loss": 0.3242, + "step": 4598 + }, + { + "epoch": 0.31933064852103876, + "grad_norm": 3.1721050466740297, + "learning_rate": 7.96135111665906e-06, + "loss": 0.2268, + "step": 4599 + }, + { + "epoch": 0.31940008332176084, + "grad_norm": 3.6555288223855484, + "learning_rate": 7.960444998042057e-06, + "loss": 0.1804, + "step": 4600 + }, + { + "epoch": 0.319469518122483, + "grad_norm": 5.5226825700954025, + "learning_rate": 7.959538729689095e-06, + "loss": 0.8646, + "step": 4601 + }, + { + "epoch": 0.3195389529232051, + "grad_norm": 2.9690630029429204, + "learning_rate": 7.958632311646015e-06, + "loss": 0.28, + "step": 4602 + }, + { + "epoch": 0.31960838772392725, + "grad_norm": 3.665297740589691, + "learning_rate": 7.957725743958662e-06, + "loss": 0.4565, + "step": 4603 + }, + { + "epoch": 0.3196778225246494, + "grad_norm": 3.806323776510999, + "learning_rate": 7.956819026672885e-06, + "loss": 0.5064, + "step": 4604 + }, + { + "epoch": 0.31974725732537146, + "grad_norm": 3.321615353592721, + "learning_rate": 7.955912159834549e-06, + "loss": 0.4375, + "step": 4605 + }, + { + "epoch": 0.3198166921260936, + "grad_norm": 3.1440187834910986, + "learning_rate": 7.95500514348952e-06, + "loss": 0.3389, + "step": 4606 + }, + { + "epoch": 0.31988612692681573, + "grad_norm": 3.550764641168834, + "learning_rate": 7.954097977683675e-06, + "loss": 0.3334, + "step": 4607 + }, + { + "epoch": 0.31995556172753786, + "grad_norm": 5.012512197261403, + "learning_rate": 7.953190662462897e-06, + "loss": 0.5701, + "step": 4608 + }, + { + "epoch": 0.32002499652825994, + "grad_norm": 2.7699909374771035, + "learning_rate": 7.952283197873077e-06, + "loss": 0.2799, + "step": 4609 + }, + { + "epoch": 0.3200944313289821, + "grad_norm": 4.770496178497335, + "learning_rate": 7.951375583960114e-06, + "loss": 0.6681, + "step": 4610 + }, + { + "epoch": 0.3201638661297042, + "grad_norm": 4.361290994687142, + "learning_rate": 7.950467820769914e-06, + "loss": 0.5187, + "step": 4611 + }, + { + "epoch": 0.32023330093042635, + "grad_norm": 4.435622470136309, + "learning_rate": 7.949559908348389e-06, + "loss": 0.6855, + "step": 4612 + }, + { + "epoch": 0.3203027357311484, + "grad_norm": 4.5653730498547755, + "learning_rate": 7.948651846741461e-06, + "loss": 0.5833, + "step": 4613 + }, + { + "epoch": 0.32037217053187056, + "grad_norm": 4.503956814882699, + "learning_rate": 7.947743635995062e-06, + "loss": 0.8082, + "step": 4614 + }, + { + "epoch": 0.3204416053325927, + "grad_norm": 3.1523464676738393, + "learning_rate": 7.946835276155123e-06, + "loss": 0.4796, + "step": 4615 + }, + { + "epoch": 0.32051104013331483, + "grad_norm": 3.9845614660329645, + "learning_rate": 7.945926767267591e-06, + "loss": 0.4445, + "step": 4616 + }, + { + "epoch": 0.32058047493403696, + "grad_norm": 4.213881643869688, + "learning_rate": 7.945018109378417e-06, + "loss": 0.7245, + "step": 4617 + }, + { + "epoch": 0.32064990973475904, + "grad_norm": 3.2213069592082957, + "learning_rate": 7.944109302533559e-06, + "loss": 0.2825, + "step": 4618 + }, + { + "epoch": 0.3207193445354812, + "grad_norm": 6.864401109934016, + "learning_rate": 7.943200346778986e-06, + "loss": 0.6383, + "step": 4619 + }, + { + "epoch": 0.3207887793362033, + "grad_norm": 3.805601480082595, + "learning_rate": 7.942291242160667e-06, + "loss": 0.434, + "step": 4620 + }, + { + "epoch": 0.32085821413692545, + "grad_norm": 4.174043821867604, + "learning_rate": 7.941381988724588e-06, + "loss": 0.5853, + "step": 4621 + }, + { + "epoch": 0.3209276489376475, + "grad_norm": 3.913797952737136, + "learning_rate": 7.940472586516736e-06, + "loss": 0.5452, + "step": 4622 + }, + { + "epoch": 0.32099708373836966, + "grad_norm": 4.592740016667488, + "learning_rate": 7.939563035583109e-06, + "loss": 0.7351, + "step": 4623 + }, + { + "epoch": 0.3210665185390918, + "grad_norm": 3.854015029196456, + "learning_rate": 7.938653335969708e-06, + "loss": 0.6549, + "step": 4624 + }, + { + "epoch": 0.32113595333981393, + "grad_norm": 4.83387939563592, + "learning_rate": 7.937743487722546e-06, + "loss": 0.4381, + "step": 4625 + }, + { + "epoch": 0.32120538814053606, + "grad_norm": 3.7113110721342855, + "learning_rate": 7.936833490887645e-06, + "loss": 0.507, + "step": 4626 + }, + { + "epoch": 0.32127482294125814, + "grad_norm": 3.864979484085414, + "learning_rate": 7.93592334551103e-06, + "loss": 0.4661, + "step": 4627 + }, + { + "epoch": 0.3213442577419803, + "grad_norm": 4.7423085173980875, + "learning_rate": 7.935013051638732e-06, + "loss": 0.5067, + "step": 4628 + }, + { + "epoch": 0.3214136925427024, + "grad_norm": 4.047918617692219, + "learning_rate": 7.934102609316796e-06, + "loss": 0.6548, + "step": 4629 + }, + { + "epoch": 0.32148312734342455, + "grad_norm": 4.9278533171062495, + "learning_rate": 7.93319201859127e-06, + "loss": 0.3736, + "step": 4630 + }, + { + "epoch": 0.3215525621441466, + "grad_norm": 4.657116767590137, + "learning_rate": 7.932281279508211e-06, + "loss": 0.5713, + "step": 4631 + }, + { + "epoch": 0.32162199694486876, + "grad_norm": 4.508238235123358, + "learning_rate": 7.931370392113684e-06, + "loss": 0.6774, + "step": 4632 + }, + { + "epoch": 0.3216914317455909, + "grad_norm": 3.792245474840228, + "learning_rate": 7.930459356453757e-06, + "loss": 0.5425, + "step": 4633 + }, + { + "epoch": 0.32176086654631303, + "grad_norm": 4.444086239333384, + "learning_rate": 7.929548172574515e-06, + "loss": 0.5196, + "step": 4634 + }, + { + "epoch": 0.3218303013470351, + "grad_norm": 2.8759854741317192, + "learning_rate": 7.92863684052204e-06, + "loss": 0.2465, + "step": 4635 + }, + { + "epoch": 0.32189973614775724, + "grad_norm": 3.777057895864684, + "learning_rate": 7.927725360342426e-06, + "loss": 0.4267, + "step": 4636 + }, + { + "epoch": 0.3219691709484794, + "grad_norm": 3.596245613784351, + "learning_rate": 7.926813732081778e-06, + "loss": 0.5298, + "step": 4637 + }, + { + "epoch": 0.3220386057492015, + "grad_norm": 2.5888536577246923, + "learning_rate": 7.925901955786203e-06, + "loss": 0.2207, + "step": 4638 + }, + { + "epoch": 0.32210804054992365, + "grad_norm": 3.430925414938377, + "learning_rate": 7.924990031501818e-06, + "loss": 0.4014, + "step": 4639 + }, + { + "epoch": 0.3221774753506457, + "grad_norm": 3.6567542735206833, + "learning_rate": 7.924077959274748e-06, + "loss": 0.3783, + "step": 4640 + }, + { + "epoch": 0.32224691015136786, + "grad_norm": 4.284605663095535, + "learning_rate": 7.923165739151123e-06, + "loss": 0.5944, + "step": 4641 + }, + { + "epoch": 0.32231634495209, + "grad_norm": 4.158889384928871, + "learning_rate": 7.922253371177081e-06, + "loss": 0.3697, + "step": 4642 + }, + { + "epoch": 0.32238577975281213, + "grad_norm": 6.163553069002673, + "learning_rate": 7.92134085539877e-06, + "loss": 0.8389, + "step": 4643 + }, + { + "epoch": 0.3224552145535342, + "grad_norm": 4.416300654922108, + "learning_rate": 7.920428191862349e-06, + "loss": 0.5493, + "step": 4644 + }, + { + "epoch": 0.32252464935425634, + "grad_norm": 4.884184625388962, + "learning_rate": 7.91951538061397e-06, + "loss": 0.7031, + "step": 4645 + }, + { + "epoch": 0.3225940841549785, + "grad_norm": 4.898345432005847, + "learning_rate": 7.918602421699808e-06, + "loss": 0.6119, + "step": 4646 + }, + { + "epoch": 0.3226635189557006, + "grad_norm": 4.394891147265864, + "learning_rate": 7.917689315166039e-06, + "loss": 0.6013, + "step": 4647 + }, + { + "epoch": 0.3227329537564227, + "grad_norm": 4.692922459589077, + "learning_rate": 7.916776061058846e-06, + "loss": 0.7299, + "step": 4648 + }, + { + "epoch": 0.3228023885571448, + "grad_norm": 3.8570564366702866, + "learning_rate": 7.915862659424419e-06, + "loss": 0.4548, + "step": 4649 + }, + { + "epoch": 0.32287182335786696, + "grad_norm": 6.502251882988549, + "learning_rate": 7.914949110308959e-06, + "loss": 0.7557, + "step": 4650 + }, + { + "epoch": 0.3229412581585891, + "grad_norm": 3.8412091538633804, + "learning_rate": 7.914035413758671e-06, + "loss": 0.3041, + "step": 4651 + }, + { + "epoch": 0.32301069295931123, + "grad_norm": 5.0526817022030555, + "learning_rate": 7.91312156981977e-06, + "loss": 0.664, + "step": 4652 + }, + { + "epoch": 0.3230801277600333, + "grad_norm": 4.2386209293253865, + "learning_rate": 7.912207578538478e-06, + "loss": 0.5079, + "step": 4653 + }, + { + "epoch": 0.32314956256075544, + "grad_norm": 4.059204685867363, + "learning_rate": 7.91129343996102e-06, + "loss": 0.4375, + "step": 4654 + }, + { + "epoch": 0.3232189973614776, + "grad_norm": 4.165574699128413, + "learning_rate": 7.910379154133634e-06, + "loss": 0.6977, + "step": 4655 + }, + { + "epoch": 0.3232884321621997, + "grad_norm": 3.743241076099596, + "learning_rate": 7.909464721102564e-06, + "loss": 0.5233, + "step": 4656 + }, + { + "epoch": 0.3233578669629218, + "grad_norm": 4.3367259118941535, + "learning_rate": 7.90855014091406e-06, + "loss": 0.5999, + "step": 4657 + }, + { + "epoch": 0.3234273017636439, + "grad_norm": 3.6506698632173835, + "learning_rate": 7.907635413614383e-06, + "loss": 0.4829, + "step": 4658 + }, + { + "epoch": 0.32349673656436606, + "grad_norm": 2.508147130562036, + "learning_rate": 7.906720539249795e-06, + "loss": 0.2944, + "step": 4659 + }, + { + "epoch": 0.3235661713650882, + "grad_norm": 3.8889760772615203, + "learning_rate": 7.905805517866572e-06, + "loss": 0.6976, + "step": 4660 + }, + { + "epoch": 0.32363560616581033, + "grad_norm": 4.054815641364154, + "learning_rate": 7.904890349510994e-06, + "loss": 0.63, + "step": 4661 + }, + { + "epoch": 0.3237050409665324, + "grad_norm": 3.8996557833561702, + "learning_rate": 7.90397503422935e-06, + "loss": 0.5693, + "step": 4662 + }, + { + "epoch": 0.32377447576725454, + "grad_norm": 3.8678112981275903, + "learning_rate": 7.903059572067933e-06, + "loss": 0.5863, + "step": 4663 + }, + { + "epoch": 0.3238439105679767, + "grad_norm": 4.09382195206162, + "learning_rate": 7.90214396307305e-06, + "loss": 0.4455, + "step": 4664 + }, + { + "epoch": 0.3239133453686988, + "grad_norm": 3.190047969818176, + "learning_rate": 7.90122820729101e-06, + "loss": 0.365, + "step": 4665 + }, + { + "epoch": 0.3239827801694209, + "grad_norm": 3.914207039534262, + "learning_rate": 7.900312304768127e-06, + "loss": 0.4955, + "step": 4666 + }, + { + "epoch": 0.324052214970143, + "grad_norm": 4.258849357837336, + "learning_rate": 7.899396255550733e-06, + "loss": 0.5264, + "step": 4667 + }, + { + "epoch": 0.32412164977086516, + "grad_norm": 3.734791926017694, + "learning_rate": 7.898480059685154e-06, + "loss": 0.4164, + "step": 4668 + }, + { + "epoch": 0.3241910845715873, + "grad_norm": 3.938144601132886, + "learning_rate": 7.897563717217736e-06, + "loss": 0.5074, + "step": 4669 + }, + { + "epoch": 0.3242605193723094, + "grad_norm": 4.458406724746656, + "learning_rate": 7.896647228194823e-06, + "loss": 0.7585, + "step": 4670 + }, + { + "epoch": 0.3243299541730315, + "grad_norm": 3.672883063241422, + "learning_rate": 7.895730592662772e-06, + "loss": 0.3493, + "step": 4671 + }, + { + "epoch": 0.32439938897375364, + "grad_norm": 4.024615462551951, + "learning_rate": 7.894813810667942e-06, + "loss": 0.4803, + "step": 4672 + }, + { + "epoch": 0.3244688237744758, + "grad_norm": 4.5435348849871495, + "learning_rate": 7.893896882256707e-06, + "loss": 0.775, + "step": 4673 + }, + { + "epoch": 0.3245382585751979, + "grad_norm": 3.321601545819084, + "learning_rate": 7.892979807475443e-06, + "loss": 0.3376, + "step": 4674 + }, + { + "epoch": 0.32460769337592, + "grad_norm": 3.218001652286279, + "learning_rate": 7.892062586370533e-06, + "loss": 0.3474, + "step": 4675 + }, + { + "epoch": 0.3246771281766421, + "grad_norm": 3.505112846495203, + "learning_rate": 7.891145218988369e-06, + "loss": 0.445, + "step": 4676 + }, + { + "epoch": 0.32474656297736426, + "grad_norm": 3.449205586774331, + "learning_rate": 7.890227705375352e-06, + "loss": 0.5055, + "step": 4677 + }, + { + "epoch": 0.3248159977780864, + "grad_norm": 3.394558783722113, + "learning_rate": 7.889310045577889e-06, + "loss": 0.2543, + "step": 4678 + }, + { + "epoch": 0.3248854325788085, + "grad_norm": 4.194647851846029, + "learning_rate": 7.888392239642392e-06, + "loss": 0.5433, + "step": 4679 + }, + { + "epoch": 0.3249548673795306, + "grad_norm": 3.190438123651363, + "learning_rate": 7.887474287615285e-06, + "loss": 0.3345, + "step": 4680 + }, + { + "epoch": 0.32502430218025274, + "grad_norm": 4.548012915209548, + "learning_rate": 7.886556189542995e-06, + "loss": 0.4888, + "step": 4681 + }, + { + "epoch": 0.3250937369809749, + "grad_norm": 4.340228071042583, + "learning_rate": 7.88563794547196e-06, + "loss": 0.3026, + "step": 4682 + }, + { + "epoch": 0.325163171781697, + "grad_norm": 4.777459926891092, + "learning_rate": 7.884719555448623e-06, + "loss": 0.5877, + "step": 4683 + }, + { + "epoch": 0.3252326065824191, + "grad_norm": 4.557339607602754, + "learning_rate": 7.883801019519436e-06, + "loss": 0.5472, + "step": 4684 + }, + { + "epoch": 0.3253020413831412, + "grad_norm": 4.030100785730653, + "learning_rate": 7.882882337730855e-06, + "loss": 0.4973, + "step": 4685 + }, + { + "epoch": 0.32537147618386336, + "grad_norm": 5.210024784273937, + "learning_rate": 7.881963510129349e-06, + "loss": 0.6391, + "step": 4686 + }, + { + "epoch": 0.3254409109845855, + "grad_norm": 3.7215092746754004, + "learning_rate": 7.881044536761389e-06, + "loss": 0.5831, + "step": 4687 + }, + { + "epoch": 0.3255103457853076, + "grad_norm": 4.517778416764318, + "learning_rate": 7.880125417673457e-06, + "loss": 0.6917, + "step": 4688 + }, + { + "epoch": 0.3255797805860297, + "grad_norm": 3.8345256405396246, + "learning_rate": 7.87920615291204e-06, + "loss": 0.3102, + "step": 4689 + }, + { + "epoch": 0.32564921538675184, + "grad_norm": 3.6555974450580218, + "learning_rate": 7.878286742523634e-06, + "loss": 0.5172, + "step": 4690 + }, + { + "epoch": 0.325718650187474, + "grad_norm": 4.658409032547017, + "learning_rate": 7.877367186554741e-06, + "loss": 0.674, + "step": 4691 + }, + { + "epoch": 0.32578808498819606, + "grad_norm": 1.9841531113872088, + "learning_rate": 7.876447485051872e-06, + "loss": 0.183, + "step": 4692 + }, + { + "epoch": 0.3258575197889182, + "grad_norm": 3.8532623541618416, + "learning_rate": 7.875527638061544e-06, + "loss": 0.5343, + "step": 4693 + }, + { + "epoch": 0.3259269545896403, + "grad_norm": 3.2099282592893004, + "learning_rate": 7.874607645630283e-06, + "loss": 0.296, + "step": 4694 + }, + { + "epoch": 0.32599638939036246, + "grad_norm": 4.038551422799916, + "learning_rate": 7.873687507804619e-06, + "loss": 0.6481, + "step": 4695 + }, + { + "epoch": 0.3260658241910846, + "grad_norm": 4.193105456308426, + "learning_rate": 7.872767224631093e-06, + "loss": 0.4533, + "step": 4696 + }, + { + "epoch": 0.3261352589918067, + "grad_norm": 5.690814272373825, + "learning_rate": 7.871846796156251e-06, + "loss": 0.7202, + "step": 4697 + }, + { + "epoch": 0.3262046937925288, + "grad_norm": 3.2513754020271644, + "learning_rate": 7.87092622242665e-06, + "loss": 0.186, + "step": 4698 + }, + { + "epoch": 0.32627412859325094, + "grad_norm": 2.3770210229974658, + "learning_rate": 7.870005503488847e-06, + "loss": 0.1447, + "step": 4699 + }, + { + "epoch": 0.3263435633939731, + "grad_norm": 4.136432066972335, + "learning_rate": 7.869084639389412e-06, + "loss": 0.5305, + "step": 4700 + }, + { + "epoch": 0.32641299819469516, + "grad_norm": 3.610216002697001, + "learning_rate": 7.868163630174926e-06, + "loss": 0.6002, + "step": 4701 + }, + { + "epoch": 0.3264824329954173, + "grad_norm": 3.3223454580462897, + "learning_rate": 7.867242475891968e-06, + "loss": 0.5736, + "step": 4702 + }, + { + "epoch": 0.32655186779613943, + "grad_norm": 3.7511849357144924, + "learning_rate": 7.866321176587129e-06, + "loss": 0.424, + "step": 4703 + }, + { + "epoch": 0.32662130259686156, + "grad_norm": 5.260149073115975, + "learning_rate": 7.865399732307008e-06, + "loss": 0.7874, + "step": 4704 + }, + { + "epoch": 0.32669073739758364, + "grad_norm": 3.3025411040532116, + "learning_rate": 7.864478143098212e-06, + "loss": 0.3391, + "step": 4705 + }, + { + "epoch": 0.3267601721983058, + "grad_norm": 4.20691907899176, + "learning_rate": 7.863556409007355e-06, + "loss": 0.7692, + "step": 4706 + }, + { + "epoch": 0.3268296069990279, + "grad_norm": 4.748310725321278, + "learning_rate": 7.862634530081053e-06, + "loss": 0.8968, + "step": 4707 + }, + { + "epoch": 0.32689904179975005, + "grad_norm": 3.588014528880619, + "learning_rate": 7.861712506365934e-06, + "loss": 0.4014, + "step": 4708 + }, + { + "epoch": 0.3269684766004722, + "grad_norm": 3.70231044481188, + "learning_rate": 7.860790337908638e-06, + "loss": 0.4725, + "step": 4709 + }, + { + "epoch": 0.32703791140119426, + "grad_norm": 4.813706646372495, + "learning_rate": 7.859868024755802e-06, + "loss": 0.605, + "step": 4710 + }, + { + "epoch": 0.3271073462019164, + "grad_norm": 3.59113578779413, + "learning_rate": 7.858945566954076e-06, + "loss": 0.4283, + "step": 4711 + }, + { + "epoch": 0.32717678100263853, + "grad_norm": 4.1891251646625145, + "learning_rate": 7.858022964550122e-06, + "loss": 0.7248, + "step": 4712 + }, + { + "epoch": 0.32724621580336066, + "grad_norm": 3.466639358787095, + "learning_rate": 7.857100217590599e-06, + "loss": 0.3072, + "step": 4713 + }, + { + "epoch": 0.32731565060408274, + "grad_norm": 3.635187172134428, + "learning_rate": 7.856177326122179e-06, + "loss": 0.4059, + "step": 4714 + }, + { + "epoch": 0.3273850854048049, + "grad_norm": 4.835500747168967, + "learning_rate": 7.855254290191543e-06, + "loss": 0.7096, + "step": 4715 + }, + { + "epoch": 0.327454520205527, + "grad_norm": 3.172193259516325, + "learning_rate": 7.854331109845376e-06, + "loss": 0.3931, + "step": 4716 + }, + { + "epoch": 0.32752395500624915, + "grad_norm": 3.7820031310325692, + "learning_rate": 7.853407785130371e-06, + "loss": 0.5338, + "step": 4717 + }, + { + "epoch": 0.3275933898069713, + "grad_norm": 3.343050140866542, + "learning_rate": 7.852484316093227e-06, + "loss": 0.4621, + "step": 4718 + }, + { + "epoch": 0.32766282460769336, + "grad_norm": 3.9915269148585972, + "learning_rate": 7.851560702780658e-06, + "loss": 0.4657, + "step": 4719 + }, + { + "epoch": 0.3277322594084155, + "grad_norm": 4.2046483794872005, + "learning_rate": 7.850636945239373e-06, + "loss": 0.7303, + "step": 4720 + }, + { + "epoch": 0.32780169420913763, + "grad_norm": 4.004710359783434, + "learning_rate": 7.849713043516097e-06, + "loss": 0.6645, + "step": 4721 + }, + { + "epoch": 0.32787112900985976, + "grad_norm": 4.337612345542736, + "learning_rate": 7.848788997657561e-06, + "loss": 0.5615, + "step": 4722 + }, + { + "epoch": 0.32794056381058184, + "grad_norm": 3.9725592507520737, + "learning_rate": 7.8478648077105e-06, + "loss": 0.4277, + "step": 4723 + }, + { + "epoch": 0.328009998611304, + "grad_norm": 3.8417761165988633, + "learning_rate": 7.84694047372166e-06, + "loss": 0.4871, + "step": 4724 + }, + { + "epoch": 0.3280794334120261, + "grad_norm": 5.023924331115942, + "learning_rate": 7.846015995737792e-06, + "loss": 0.7762, + "step": 4725 + }, + { + "epoch": 0.32814886821274825, + "grad_norm": 3.8734636495564474, + "learning_rate": 7.845091373805656e-06, + "loss": 0.3322, + "step": 4726 + }, + { + "epoch": 0.3282183030134703, + "grad_norm": 3.9789398545757715, + "learning_rate": 7.844166607972018e-06, + "loss": 0.5292, + "step": 4727 + }, + { + "epoch": 0.32828773781419246, + "grad_norm": 4.649774119117001, + "learning_rate": 7.843241698283652e-06, + "loss": 0.6158, + "step": 4728 + }, + { + "epoch": 0.3283571726149146, + "grad_norm": 4.185950464704938, + "learning_rate": 7.842316644787336e-06, + "loss": 0.4567, + "step": 4729 + }, + { + "epoch": 0.32842660741563673, + "grad_norm": 3.6639796818769503, + "learning_rate": 7.841391447529862e-06, + "loss": 0.5612, + "step": 4730 + }, + { + "epoch": 0.32849604221635886, + "grad_norm": 3.2403976095474807, + "learning_rate": 7.840466106558025e-06, + "loss": 0.3463, + "step": 4731 + }, + { + "epoch": 0.32856547701708094, + "grad_norm": 4.194396170098813, + "learning_rate": 7.839540621918625e-06, + "loss": 0.506, + "step": 4732 + }, + { + "epoch": 0.3286349118178031, + "grad_norm": 3.824496801777473, + "learning_rate": 7.838614993658474e-06, + "loss": 0.4243, + "step": 4733 + }, + { + "epoch": 0.3287043466185252, + "grad_norm": 4.014966399424347, + "learning_rate": 7.837689221824388e-06, + "loss": 0.6806, + "step": 4734 + }, + { + "epoch": 0.32877378141924735, + "grad_norm": 4.2485326973969615, + "learning_rate": 7.836763306463191e-06, + "loss": 0.5998, + "step": 4735 + }, + { + "epoch": 0.3288432162199694, + "grad_norm": 4.00131314388345, + "learning_rate": 7.835837247621718e-06, + "loss": 0.395, + "step": 4736 + }, + { + "epoch": 0.32891265102069156, + "grad_norm": 4.039448615559206, + "learning_rate": 7.834911045346805e-06, + "loss": 0.2687, + "step": 4737 + }, + { + "epoch": 0.3289820858214137, + "grad_norm": 3.3593159033491093, + "learning_rate": 7.8339846996853e-06, + "loss": 0.4399, + "step": 4738 + }, + { + "epoch": 0.32905152062213583, + "grad_norm": 3.742343244644939, + "learning_rate": 7.833058210684055e-06, + "loss": 0.2418, + "step": 4739 + }, + { + "epoch": 0.32912095542285796, + "grad_norm": 3.4910570011367748, + "learning_rate": 7.832131578389932e-06, + "loss": 0.4679, + "step": 4740 + }, + { + "epoch": 0.32919039022358004, + "grad_norm": 3.7221313590050853, + "learning_rate": 7.8312048028498e-06, + "loss": 0.4242, + "step": 4741 + }, + { + "epoch": 0.3292598250243022, + "grad_norm": 3.437734492948652, + "learning_rate": 7.830277884110531e-06, + "loss": 0.3457, + "step": 4742 + }, + { + "epoch": 0.3293292598250243, + "grad_norm": 3.187803973487701, + "learning_rate": 7.829350822219008e-06, + "loss": 0.5265, + "step": 4743 + }, + { + "epoch": 0.32939869462574645, + "grad_norm": 4.5191883912061765, + "learning_rate": 7.828423617222125e-06, + "loss": 0.4803, + "step": 4744 + }, + { + "epoch": 0.3294681294264685, + "grad_norm": 3.0449015249764235, + "learning_rate": 7.827496269166774e-06, + "loss": 0.2957, + "step": 4745 + }, + { + "epoch": 0.32953756422719066, + "grad_norm": 4.796420550732095, + "learning_rate": 7.826568778099862e-06, + "loss": 0.4296, + "step": 4746 + }, + { + "epoch": 0.3296069990279128, + "grad_norm": 3.906738951938081, + "learning_rate": 7.825641144068299e-06, + "loss": 0.4259, + "step": 4747 + }, + { + "epoch": 0.32967643382863493, + "grad_norm": 3.8564593328589925, + "learning_rate": 7.824713367119006e-06, + "loss": 0.3805, + "step": 4748 + }, + { + "epoch": 0.329745868629357, + "grad_norm": 2.8244329478739427, + "learning_rate": 7.823785447298906e-06, + "loss": 0.2499, + "step": 4749 + }, + { + "epoch": 0.32981530343007914, + "grad_norm": 2.44415300355179, + "learning_rate": 7.822857384654934e-06, + "loss": 0.197, + "step": 4750 + }, + { + "epoch": 0.3298847382308013, + "grad_norm": 4.004220519866786, + "learning_rate": 7.82192917923403e-06, + "loss": 0.5909, + "step": 4751 + }, + { + "epoch": 0.3299541730315234, + "grad_norm": 4.008199758870167, + "learning_rate": 7.821000831083139e-06, + "loss": 0.5831, + "step": 4752 + }, + { + "epoch": 0.33002360783224555, + "grad_norm": 2.6884398092751107, + "learning_rate": 7.82007234024922e-06, + "loss": 0.3029, + "step": 4753 + }, + { + "epoch": 0.3300930426329676, + "grad_norm": 2.9098692925158036, + "learning_rate": 7.819143706779233e-06, + "loss": 0.2855, + "step": 4754 + }, + { + "epoch": 0.33016247743368976, + "grad_norm": 3.649353618292213, + "learning_rate": 7.818214930720149e-06, + "loss": 0.3424, + "step": 4755 + }, + { + "epoch": 0.3302319122344119, + "grad_norm": 4.2660052893192795, + "learning_rate": 7.817286012118941e-06, + "loss": 0.4426, + "step": 4756 + }, + { + "epoch": 0.33030134703513403, + "grad_norm": 4.269290924481466, + "learning_rate": 7.816356951022594e-06, + "loss": 0.55, + "step": 4757 + }, + { + "epoch": 0.3303707818358561, + "grad_norm": 3.588995613667248, + "learning_rate": 7.8154277474781e-06, + "loss": 0.3982, + "step": 4758 + }, + { + "epoch": 0.33044021663657824, + "grad_norm": 3.8191343968506777, + "learning_rate": 7.814498401532456e-06, + "loss": 0.5013, + "step": 4759 + }, + { + "epoch": 0.3305096514373004, + "grad_norm": 3.7814610227534966, + "learning_rate": 7.81356891323267e-06, + "loss": 0.4199, + "step": 4760 + }, + { + "epoch": 0.3305790862380225, + "grad_norm": 2.542225823851452, + "learning_rate": 7.812639282625748e-06, + "loss": 0.2038, + "step": 4761 + }, + { + "epoch": 0.3306485210387446, + "grad_norm": 3.791506059479617, + "learning_rate": 7.811709509758719e-06, + "loss": 0.4281, + "step": 4762 + }, + { + "epoch": 0.3307179558394667, + "grad_norm": 3.4062761318242503, + "learning_rate": 7.810779594678599e-06, + "loss": 0.3439, + "step": 4763 + }, + { + "epoch": 0.33078739064018886, + "grad_norm": 3.5833761857715327, + "learning_rate": 7.809849537432432e-06, + "loss": 0.2861, + "step": 4764 + }, + { + "epoch": 0.330856825440911, + "grad_norm": 3.6414608191704176, + "learning_rate": 7.808919338067254e-06, + "loss": 0.4716, + "step": 4765 + }, + { + "epoch": 0.33092626024163313, + "grad_norm": 2.397444582857908, + "learning_rate": 7.807988996630112e-06, + "loss": 0.1842, + "step": 4766 + }, + { + "epoch": 0.3309956950423552, + "grad_norm": 3.8983879458180914, + "learning_rate": 7.807058513168066e-06, + "loss": 0.2066, + "step": 4767 + }, + { + "epoch": 0.33106512984307734, + "grad_norm": 4.768107241605324, + "learning_rate": 7.806127887728176e-06, + "loss": 0.407, + "step": 4768 + }, + { + "epoch": 0.3311345646437995, + "grad_norm": 3.9123896238914835, + "learning_rate": 7.805197120357515e-06, + "loss": 0.567, + "step": 4769 + }, + { + "epoch": 0.3312039994445216, + "grad_norm": 4.553800062624672, + "learning_rate": 7.804266211103155e-06, + "loss": 0.5677, + "step": 4770 + }, + { + "epoch": 0.3312734342452437, + "grad_norm": 4.992154002882232, + "learning_rate": 7.803335160012187e-06, + "loss": 0.7013, + "step": 4771 + }, + { + "epoch": 0.3313428690459658, + "grad_norm": 3.0707185097279734, + "learning_rate": 7.802403967131696e-06, + "loss": 0.3535, + "step": 4772 + }, + { + "epoch": 0.33141230384668796, + "grad_norm": 3.8771915613240666, + "learning_rate": 7.801472632508783e-06, + "loss": 0.5341, + "step": 4773 + }, + { + "epoch": 0.3314817386474101, + "grad_norm": 3.6241602583643573, + "learning_rate": 7.800541156190556e-06, + "loss": 0.4209, + "step": 4774 + }, + { + "epoch": 0.33155117344813223, + "grad_norm": 3.325742162634742, + "learning_rate": 7.799609538224124e-06, + "loss": 0.2486, + "step": 4775 + }, + { + "epoch": 0.3316206082488543, + "grad_norm": 3.5883195483550456, + "learning_rate": 7.798677778656613e-06, + "loss": 0.4583, + "step": 4776 + }, + { + "epoch": 0.33169004304957644, + "grad_norm": 3.620266839793677, + "learning_rate": 7.797745877535144e-06, + "loss": 0.4232, + "step": 4777 + }, + { + "epoch": 0.3317594778502986, + "grad_norm": 5.59170449305756, + "learning_rate": 7.796813834906856e-06, + "loss": 0.5315, + "step": 4778 + }, + { + "epoch": 0.3318289126510207, + "grad_norm": 2.922557851954977, + "learning_rate": 7.795881650818889e-06, + "loss": 0.2239, + "step": 4779 + }, + { + "epoch": 0.3318983474517428, + "grad_norm": 4.9684873010448785, + "learning_rate": 7.79494932531839e-06, + "loss": 0.3773, + "step": 4780 + }, + { + "epoch": 0.3319677822524649, + "grad_norm": 3.2627132718022143, + "learning_rate": 7.794016858452519e-06, + "loss": 0.3741, + "step": 4781 + }, + { + "epoch": 0.33203721705318706, + "grad_norm": 2.707711150782449, + "learning_rate": 7.793084250268435e-06, + "loss": 0.2963, + "step": 4782 + }, + { + "epoch": 0.3321066518539092, + "grad_norm": 3.4103403754598727, + "learning_rate": 7.792151500813311e-06, + "loss": 0.4594, + "step": 4783 + }, + { + "epoch": 0.3321760866546313, + "grad_norm": 4.4959981043701704, + "learning_rate": 7.791218610134324e-06, + "loss": 0.3072, + "step": 4784 + }, + { + "epoch": 0.3322455214553534, + "grad_norm": 3.078185755865892, + "learning_rate": 7.79028557827866e-06, + "loss": 0.3279, + "step": 4785 + }, + { + "epoch": 0.33231495625607554, + "grad_norm": 4.011355266928748, + "learning_rate": 7.789352405293506e-06, + "loss": 0.4368, + "step": 4786 + }, + { + "epoch": 0.3323843910567977, + "grad_norm": 4.371563747043994, + "learning_rate": 7.788419091226065e-06, + "loss": 0.5169, + "step": 4787 + }, + { + "epoch": 0.3324538258575198, + "grad_norm": 3.139082235794632, + "learning_rate": 7.78748563612354e-06, + "loss": 0.3709, + "step": 4788 + }, + { + "epoch": 0.3325232606582419, + "grad_norm": 3.335298063370998, + "learning_rate": 7.786552040033147e-06, + "loss": 0.4315, + "step": 4789 + }, + { + "epoch": 0.332592695458964, + "grad_norm": 3.8218843478523303, + "learning_rate": 7.785618303002106e-06, + "loss": 0.4739, + "step": 4790 + }, + { + "epoch": 0.33266213025968616, + "grad_norm": 3.1891259571647748, + "learning_rate": 7.784684425077641e-06, + "loss": 0.3058, + "step": 4791 + }, + { + "epoch": 0.3327315650604083, + "grad_norm": 3.7348887067457808, + "learning_rate": 7.783750406306992e-06, + "loss": 0.4707, + "step": 4792 + }, + { + "epoch": 0.3328009998611304, + "grad_norm": 4.192637608330812, + "learning_rate": 7.782816246737394e-06, + "loss": 0.5349, + "step": 4793 + }, + { + "epoch": 0.3328704346618525, + "grad_norm": 5.0784938113673785, + "learning_rate": 7.781881946416103e-06, + "loss": 0.6123, + "step": 4794 + }, + { + "epoch": 0.33293986946257464, + "grad_norm": 3.2915472326496795, + "learning_rate": 7.780947505390369e-06, + "loss": 0.4767, + "step": 4795 + }, + { + "epoch": 0.3330093042632968, + "grad_norm": 5.1741757841899085, + "learning_rate": 7.780012923707457e-06, + "loss": 0.6191, + "step": 4796 + }, + { + "epoch": 0.3330787390640189, + "grad_norm": 3.5247167295447985, + "learning_rate": 7.779078201414638e-06, + "loss": 0.6964, + "step": 4797 + }, + { + "epoch": 0.333148173864741, + "grad_norm": 2.914965141077988, + "learning_rate": 7.778143338559186e-06, + "loss": 0.2555, + "step": 4798 + }, + { + "epoch": 0.3332176086654631, + "grad_norm": 2.4532326864591054, + "learning_rate": 7.77720833518839e-06, + "loss": 0.1711, + "step": 4799 + }, + { + "epoch": 0.33328704346618526, + "grad_norm": 4.270107820155478, + "learning_rate": 7.776273191349537e-06, + "loss": 0.4523, + "step": 4800 + }, + { + "epoch": 0.3333564782669074, + "grad_norm": 3.8697806017873653, + "learning_rate": 7.77533790708993e-06, + "loss": 0.6405, + "step": 4801 + }, + { + "epoch": 0.3334259130676295, + "grad_norm": 4.150335586804698, + "learning_rate": 7.774402482456872e-06, + "loss": 0.5049, + "step": 4802 + }, + { + "epoch": 0.3334953478683516, + "grad_norm": 3.8577780744431234, + "learning_rate": 7.773466917497674e-06, + "loss": 0.4201, + "step": 4803 + }, + { + "epoch": 0.33356478266907374, + "grad_norm": 3.824038058402649, + "learning_rate": 7.772531212259657e-06, + "loss": 0.3057, + "step": 4804 + }, + { + "epoch": 0.3336342174697959, + "grad_norm": 4.295519952819993, + "learning_rate": 7.77159536679015e-06, + "loss": 0.7575, + "step": 4805 + }, + { + "epoch": 0.33370365227051796, + "grad_norm": 4.015597405122909, + "learning_rate": 7.770659381136487e-06, + "loss": 0.3624, + "step": 4806 + }, + { + "epoch": 0.3337730870712401, + "grad_norm": 4.987754175448214, + "learning_rate": 7.769723255346006e-06, + "loss": 0.712, + "step": 4807 + }, + { + "epoch": 0.33384252187196223, + "grad_norm": 3.212356078967072, + "learning_rate": 7.768786989466058e-06, + "loss": 0.2764, + "step": 4808 + }, + { + "epoch": 0.33391195667268436, + "grad_norm": 3.858440909073672, + "learning_rate": 7.767850583543997e-06, + "loss": 0.4117, + "step": 4809 + }, + { + "epoch": 0.3339813914734065, + "grad_norm": 4.076307036761403, + "learning_rate": 7.766914037627184e-06, + "loss": 0.4454, + "step": 4810 + }, + { + "epoch": 0.3340508262741286, + "grad_norm": 3.6527313254892415, + "learning_rate": 7.76597735176299e-06, + "loss": 0.3244, + "step": 4811 + }, + { + "epoch": 0.3341202610748507, + "grad_norm": 3.8134843308834148, + "learning_rate": 7.765040525998794e-06, + "loss": 0.4097, + "step": 4812 + }, + { + "epoch": 0.33418969587557285, + "grad_norm": 3.0115712133756074, + "learning_rate": 7.764103560381976e-06, + "loss": 0.3068, + "step": 4813 + }, + { + "epoch": 0.334259130676295, + "grad_norm": 2.7229885429355005, + "learning_rate": 7.763166454959926e-06, + "loss": 0.2451, + "step": 4814 + }, + { + "epoch": 0.33432856547701706, + "grad_norm": 3.8717244842302927, + "learning_rate": 7.762229209780045e-06, + "loss": 0.2722, + "step": 4815 + }, + { + "epoch": 0.3343980002777392, + "grad_norm": 4.080329307689948, + "learning_rate": 7.761291824889736e-06, + "loss": 0.5056, + "step": 4816 + }, + { + "epoch": 0.33446743507846133, + "grad_norm": 3.511412978592732, + "learning_rate": 7.760354300336411e-06, + "loss": 0.2584, + "step": 4817 + }, + { + "epoch": 0.33453686987918346, + "grad_norm": 3.945974759117404, + "learning_rate": 7.759416636167488e-06, + "loss": 0.5245, + "step": 4818 + }, + { + "epoch": 0.33460630467990554, + "grad_norm": 3.4154578367973722, + "learning_rate": 7.758478832430396e-06, + "loss": 0.202, + "step": 4819 + }, + { + "epoch": 0.3346757394806277, + "grad_norm": 3.7451621818757252, + "learning_rate": 7.757540889172565e-06, + "loss": 0.395, + "step": 4820 + }, + { + "epoch": 0.3347451742813498, + "grad_norm": 3.919663514284129, + "learning_rate": 7.756602806441436e-06, + "loss": 0.5694, + "step": 4821 + }, + { + "epoch": 0.33481460908207195, + "grad_norm": 3.3997221928671637, + "learning_rate": 7.755664584284456e-06, + "loss": 0.5812, + "step": 4822 + }, + { + "epoch": 0.3348840438827941, + "grad_norm": 4.7651089585699875, + "learning_rate": 7.75472622274908e-06, + "loss": 0.6252, + "step": 4823 + }, + { + "epoch": 0.33495347868351616, + "grad_norm": 3.671551309405416, + "learning_rate": 7.75378772188277e-06, + "loss": 0.4007, + "step": 4824 + }, + { + "epoch": 0.3350229134842383, + "grad_norm": 3.780443719607119, + "learning_rate": 7.752849081732993e-06, + "loss": 0.495, + "step": 4825 + }, + { + "epoch": 0.33509234828496043, + "grad_norm": 4.809225419934876, + "learning_rate": 7.751910302347223e-06, + "loss": 0.5839, + "step": 4826 + }, + { + "epoch": 0.33516178308568256, + "grad_norm": 3.3365582124055027, + "learning_rate": 7.750971383772946e-06, + "loss": 0.4372, + "step": 4827 + }, + { + "epoch": 0.33523121788640464, + "grad_norm": 4.333372309869496, + "learning_rate": 7.750032326057647e-06, + "loss": 0.4893, + "step": 4828 + }, + { + "epoch": 0.3353006526871268, + "grad_norm": 2.926101144649098, + "learning_rate": 7.749093129248826e-06, + "loss": 0.4039, + "step": 4829 + }, + { + "epoch": 0.3353700874878489, + "grad_norm": 3.298689571862438, + "learning_rate": 7.748153793393986e-06, + "loss": 0.3748, + "step": 4830 + }, + { + "epoch": 0.33543952228857105, + "grad_norm": 3.3854606003846386, + "learning_rate": 7.747214318540635e-06, + "loss": 0.378, + "step": 4831 + }, + { + "epoch": 0.3355089570892932, + "grad_norm": 3.244868505274605, + "learning_rate": 7.746274704736294e-06, + "loss": 0.4066, + "step": 4832 + }, + { + "epoch": 0.33557839189001526, + "grad_norm": 3.9848895295749402, + "learning_rate": 7.745334952028486e-06, + "loss": 0.3815, + "step": 4833 + }, + { + "epoch": 0.3356478266907374, + "grad_norm": 3.2972008927057708, + "learning_rate": 7.744395060464742e-06, + "loss": 0.3845, + "step": 4834 + }, + { + "epoch": 0.33571726149145953, + "grad_norm": 4.46255539624817, + "learning_rate": 7.743455030092602e-06, + "loss": 0.8389, + "step": 4835 + }, + { + "epoch": 0.33578669629218166, + "grad_norm": 4.231880794881921, + "learning_rate": 7.74251486095961e-06, + "loss": 0.6917, + "step": 4836 + }, + { + "epoch": 0.33585613109290374, + "grad_norm": 4.304026469753395, + "learning_rate": 7.74157455311332e-06, + "loss": 0.6669, + "step": 4837 + }, + { + "epoch": 0.3359255658936259, + "grad_norm": 4.387056685524457, + "learning_rate": 7.740634106601292e-06, + "loss": 0.6693, + "step": 4838 + }, + { + "epoch": 0.335995000694348, + "grad_norm": 3.964334597191936, + "learning_rate": 7.739693521471093e-06, + "loss": 0.7126, + "step": 4839 + }, + { + "epoch": 0.33606443549507015, + "grad_norm": 3.4139242945942487, + "learning_rate": 7.738752797770294e-06, + "loss": 0.3177, + "step": 4840 + }, + { + "epoch": 0.3361338702957922, + "grad_norm": 4.1495741046687655, + "learning_rate": 7.737811935546478e-06, + "loss": 0.4242, + "step": 4841 + }, + { + "epoch": 0.33620330509651436, + "grad_norm": 3.027435921769314, + "learning_rate": 7.736870934847233e-06, + "loss": 0.2829, + "step": 4842 + }, + { + "epoch": 0.3362727398972365, + "grad_norm": 4.339870815562016, + "learning_rate": 7.735929795720151e-06, + "loss": 0.547, + "step": 4843 + }, + { + "epoch": 0.33634217469795863, + "grad_norm": 3.4442966852245105, + "learning_rate": 7.734988518212838e-06, + "loss": 0.3925, + "step": 4844 + }, + { + "epoch": 0.33641160949868076, + "grad_norm": 3.564799783814901, + "learning_rate": 7.7340471023729e-06, + "loss": 0.4119, + "step": 4845 + }, + { + "epoch": 0.33648104429940284, + "grad_norm": 2.6823870065591033, + "learning_rate": 7.733105548247954e-06, + "loss": 0.2629, + "step": 4846 + }, + { + "epoch": 0.336550479100125, + "grad_norm": 2.8283188997926776, + "learning_rate": 7.732163855885621e-06, + "loss": 0.3227, + "step": 4847 + }, + { + "epoch": 0.3366199139008471, + "grad_norm": 6.76110201181669, + "learning_rate": 7.731222025333534e-06, + "loss": 0.362, + "step": 4848 + }, + { + "epoch": 0.33668934870156925, + "grad_norm": 4.464878096866039, + "learning_rate": 7.730280056639326e-06, + "loss": 0.6647, + "step": 4849 + }, + { + "epoch": 0.3367587835022913, + "grad_norm": 4.255593172137241, + "learning_rate": 7.729337949850641e-06, + "loss": 0.3596, + "step": 4850 + }, + { + "epoch": 0.33682821830301346, + "grad_norm": 4.405526751743151, + "learning_rate": 7.728395705015133e-06, + "loss": 0.7481, + "step": 4851 + }, + { + "epoch": 0.3368976531037356, + "grad_norm": 4.3179207544808955, + "learning_rate": 7.727453322180455e-06, + "loss": 0.4988, + "step": 4852 + }, + { + "epoch": 0.33696708790445773, + "grad_norm": 2.4158164033993463, + "learning_rate": 7.726510801394279e-06, + "loss": 0.1601, + "step": 4853 + }, + { + "epoch": 0.3370365227051798, + "grad_norm": 2.8664915975517427, + "learning_rate": 7.725568142704267e-06, + "loss": 0.3752, + "step": 4854 + }, + { + "epoch": 0.33710595750590194, + "grad_norm": 4.171164121618114, + "learning_rate": 7.724625346158105e-06, + "loss": 0.4222, + "step": 4855 + }, + { + "epoch": 0.3371753923066241, + "grad_norm": 4.223995004071627, + "learning_rate": 7.723682411803476e-06, + "loss": 0.6088, + "step": 4856 + }, + { + "epoch": 0.3372448271073462, + "grad_norm": 3.205020896793061, + "learning_rate": 7.722739339688072e-06, + "loss": 0.323, + "step": 4857 + }, + { + "epoch": 0.33731426190806835, + "grad_norm": 5.280527854329665, + "learning_rate": 7.721796129859594e-06, + "loss": 0.9124, + "step": 4858 + }, + { + "epoch": 0.3373836967087904, + "grad_norm": 3.694301264452014, + "learning_rate": 7.720852782365747e-06, + "loss": 0.3079, + "step": 4859 + }, + { + "epoch": 0.33745313150951256, + "grad_norm": 5.397706369740218, + "learning_rate": 7.719909297254244e-06, + "loss": 0.5943, + "step": 4860 + }, + { + "epoch": 0.3375225663102347, + "grad_norm": 3.546554630183329, + "learning_rate": 7.718965674572808e-06, + "loss": 0.2817, + "step": 4861 + }, + { + "epoch": 0.33759200111095683, + "grad_norm": 3.9138020027590827, + "learning_rate": 7.718021914369167e-06, + "loss": 0.578, + "step": 4862 + }, + { + "epoch": 0.3376614359116789, + "grad_norm": 3.6946986227486502, + "learning_rate": 7.71707801669105e-06, + "loss": 0.4071, + "step": 4863 + }, + { + "epoch": 0.33773087071240104, + "grad_norm": 4.188903698589989, + "learning_rate": 7.716133981586202e-06, + "loss": 0.6166, + "step": 4864 + }, + { + "epoch": 0.3378003055131232, + "grad_norm": 3.056632682117543, + "learning_rate": 7.715189809102372e-06, + "loss": 0.4631, + "step": 4865 + }, + { + "epoch": 0.3378697403138453, + "grad_norm": 4.77857611494246, + "learning_rate": 7.714245499287312e-06, + "loss": 0.583, + "step": 4866 + }, + { + "epoch": 0.33793917511456745, + "grad_norm": 4.429316357593643, + "learning_rate": 7.713301052188787e-06, + "loss": 0.4545, + "step": 4867 + }, + { + "epoch": 0.3380086099152895, + "grad_norm": 3.1279844753637747, + "learning_rate": 7.712356467854565e-06, + "loss": 0.3131, + "step": 4868 + }, + { + "epoch": 0.33807804471601166, + "grad_norm": 4.283684134010977, + "learning_rate": 7.711411746332423e-06, + "loss": 0.4626, + "step": 4869 + }, + { + "epoch": 0.3381474795167338, + "grad_norm": 4.719123702265057, + "learning_rate": 7.710466887670141e-06, + "loss": 0.7056, + "step": 4870 + }, + { + "epoch": 0.33821691431745593, + "grad_norm": 4.072975065691144, + "learning_rate": 7.709521891915513e-06, + "loss": 0.3944, + "step": 4871 + }, + { + "epoch": 0.338286349118178, + "grad_norm": 3.6388609438877166, + "learning_rate": 7.708576759116333e-06, + "loss": 0.4477, + "step": 4872 + }, + { + "epoch": 0.33835578391890014, + "grad_norm": 4.384558956802952, + "learning_rate": 7.707631489320407e-06, + "loss": 0.6101, + "step": 4873 + }, + { + "epoch": 0.3384252187196223, + "grad_norm": 3.2073475885225586, + "learning_rate": 7.706686082575541e-06, + "loss": 0.3848, + "step": 4874 + }, + { + "epoch": 0.3384946535203444, + "grad_norm": 4.954920602307715, + "learning_rate": 7.705740538929559e-06, + "loss": 0.6687, + "step": 4875 + }, + { + "epoch": 0.3385640883210665, + "grad_norm": 26.01129372291356, + "learning_rate": 7.70479485843028e-06, + "loss": 0.4851, + "step": 4876 + }, + { + "epoch": 0.3386335231217886, + "grad_norm": 3.6228150558132475, + "learning_rate": 7.703849041125538e-06, + "loss": 0.4666, + "step": 4877 + }, + { + "epoch": 0.33870295792251076, + "grad_norm": 4.280092026607003, + "learning_rate": 7.702903087063171e-06, + "loss": 0.7155, + "step": 4878 + }, + { + "epoch": 0.3387723927232329, + "grad_norm": 4.969199188899165, + "learning_rate": 7.701956996291025e-06, + "loss": 0.7843, + "step": 4879 + }, + { + "epoch": 0.33884182752395503, + "grad_norm": 3.9546061084672504, + "learning_rate": 7.701010768856951e-06, + "loss": 0.3964, + "step": 4880 + }, + { + "epoch": 0.3389112623246771, + "grad_norm": 3.0491401168525485, + "learning_rate": 7.700064404808812e-06, + "loss": 0.3486, + "step": 4881 + }, + { + "epoch": 0.33898069712539924, + "grad_norm": 3.2153289763417376, + "learning_rate": 7.699117904194466e-06, + "loss": 0.2309, + "step": 4882 + }, + { + "epoch": 0.3390501319261214, + "grad_norm": 3.9754318015786776, + "learning_rate": 7.698171267061793e-06, + "loss": 0.5292, + "step": 4883 + }, + { + "epoch": 0.3391195667268435, + "grad_norm": 4.320458148405599, + "learning_rate": 7.69722449345867e-06, + "loss": 0.5635, + "step": 4884 + }, + { + "epoch": 0.3391890015275656, + "grad_norm": 4.1237640758285234, + "learning_rate": 7.696277583432985e-06, + "loss": 0.7333, + "step": 4885 + }, + { + "epoch": 0.3392584363282877, + "grad_norm": 4.557901532739388, + "learning_rate": 7.695330537032629e-06, + "loss": 0.6619, + "step": 4886 + }, + { + "epoch": 0.33932787112900986, + "grad_norm": 3.0344752806553577, + "learning_rate": 7.694383354305505e-06, + "loss": 0.2528, + "step": 4887 + }, + { + "epoch": 0.339397305929732, + "grad_norm": 3.364559343118975, + "learning_rate": 7.693436035299522e-06, + "loss": 0.2991, + "step": 4888 + }, + { + "epoch": 0.33946674073045413, + "grad_norm": 2.778081617168606, + "learning_rate": 7.692488580062589e-06, + "loss": 0.2362, + "step": 4889 + }, + { + "epoch": 0.3395361755311762, + "grad_norm": 4.1799237534769995, + "learning_rate": 7.691540988642635e-06, + "loss": 0.4254, + "step": 4890 + }, + { + "epoch": 0.33960561033189834, + "grad_norm": 3.5868929205412288, + "learning_rate": 7.690593261087579e-06, + "loss": 0.3424, + "step": 4891 + }, + { + "epoch": 0.3396750451326205, + "grad_norm": 4.652041887723486, + "learning_rate": 7.689645397445362e-06, + "loss": 0.6067, + "step": 4892 + }, + { + "epoch": 0.3397444799333426, + "grad_norm": 4.096387104685421, + "learning_rate": 7.688697397763924e-06, + "loss": 0.5055, + "step": 4893 + }, + { + "epoch": 0.3398139147340647, + "grad_norm": 2.8088127192920456, + "learning_rate": 7.687749262091215e-06, + "loss": 0.2793, + "step": 4894 + }, + { + "epoch": 0.3398833495347868, + "grad_norm": 4.007960699932023, + "learning_rate": 7.686800990475189e-06, + "loss": 0.6868, + "step": 4895 + }, + { + "epoch": 0.33995278433550896, + "grad_norm": 3.373245232530958, + "learning_rate": 7.685852582963808e-06, + "loss": 0.255, + "step": 4896 + }, + { + "epoch": 0.3400222191362311, + "grad_norm": 4.2758864730429345, + "learning_rate": 7.684904039605046e-06, + "loss": 0.4856, + "step": 4897 + }, + { + "epoch": 0.3400916539369532, + "grad_norm": 3.686511076928021, + "learning_rate": 7.683955360446873e-06, + "loss": 0.4835, + "step": 4898 + }, + { + "epoch": 0.3401610887376753, + "grad_norm": 5.084192417096578, + "learning_rate": 7.683006545537274e-06, + "loss": 0.6442, + "step": 4899 + }, + { + "epoch": 0.34023052353839744, + "grad_norm": 4.834501091308603, + "learning_rate": 7.682057594924244e-06, + "loss": 0.7456, + "step": 4900 + }, + { + "epoch": 0.3402999583391196, + "grad_norm": 3.2085964702561247, + "learning_rate": 7.681108508655772e-06, + "loss": 0.2815, + "step": 4901 + }, + { + "epoch": 0.3403693931398417, + "grad_norm": 5.140670939533525, + "learning_rate": 7.680159286779866e-06, + "loss": 0.9264, + "step": 4902 + }, + { + "epoch": 0.3404388279405638, + "grad_norm": 3.8884503924099363, + "learning_rate": 7.67920992934454e-06, + "loss": 0.5157, + "step": 4903 + }, + { + "epoch": 0.3405082627412859, + "grad_norm": 3.666289792784984, + "learning_rate": 7.678260436397805e-06, + "loss": 0.4411, + "step": 4904 + }, + { + "epoch": 0.34057769754200806, + "grad_norm": 3.6330060998524103, + "learning_rate": 7.677310807987685e-06, + "loss": 0.4306, + "step": 4905 + }, + { + "epoch": 0.3406471323427302, + "grad_norm": 2.1483564135560993, + "learning_rate": 7.676361044162217e-06, + "loss": 0.141, + "step": 4906 + }, + { + "epoch": 0.3407165671434523, + "grad_norm": 4.252595432998503, + "learning_rate": 7.675411144969436e-06, + "loss": 0.591, + "step": 4907 + }, + { + "epoch": 0.3407860019441744, + "grad_norm": 4.761579563032916, + "learning_rate": 7.674461110457385e-06, + "loss": 0.6819, + "step": 4908 + }, + { + "epoch": 0.34085543674489654, + "grad_norm": 3.286738879183506, + "learning_rate": 7.673510940674118e-06, + "loss": 0.3453, + "step": 4909 + }, + { + "epoch": 0.3409248715456187, + "grad_norm": 4.109525049249137, + "learning_rate": 7.672560635667694e-06, + "loss": 0.4241, + "step": 4910 + }, + { + "epoch": 0.34099430634634076, + "grad_norm": 3.7447246825401264, + "learning_rate": 7.671610195486177e-06, + "loss": 0.3232, + "step": 4911 + }, + { + "epoch": 0.3410637411470629, + "grad_norm": 4.146407333780971, + "learning_rate": 7.670659620177638e-06, + "loss": 0.5391, + "step": 4912 + }, + { + "epoch": 0.34113317594778503, + "grad_norm": 3.648471926251393, + "learning_rate": 7.66970890979016e-06, + "loss": 0.2495, + "step": 4913 + }, + { + "epoch": 0.34120261074850716, + "grad_norm": 4.485083472890363, + "learning_rate": 7.668758064371824e-06, + "loss": 0.4666, + "step": 4914 + }, + { + "epoch": 0.3412720455492293, + "grad_norm": 4.238565405560386, + "learning_rate": 7.667807083970727e-06, + "loss": 0.4392, + "step": 4915 + }, + { + "epoch": 0.3413414803499514, + "grad_norm": 4.671663469781545, + "learning_rate": 7.666855968634964e-06, + "loss": 0.5653, + "step": 4916 + }, + { + "epoch": 0.3414109151506735, + "grad_norm": 4.11868155642245, + "learning_rate": 7.665904718412646e-06, + "loss": 0.579, + "step": 4917 + }, + { + "epoch": 0.34148034995139565, + "grad_norm": 4.192327929046221, + "learning_rate": 7.664953333351882e-06, + "loss": 0.2979, + "step": 4918 + }, + { + "epoch": 0.3415497847521178, + "grad_norm": 3.5876036510924116, + "learning_rate": 7.664001813500794e-06, + "loss": 0.5838, + "step": 4919 + }, + { + "epoch": 0.34161921955283986, + "grad_norm": 3.9145969710148454, + "learning_rate": 7.66305015890751e-06, + "loss": 0.613, + "step": 4920 + }, + { + "epoch": 0.341688654353562, + "grad_norm": 4.9276723599203756, + "learning_rate": 7.662098369620164e-06, + "loss": 0.5713, + "step": 4921 + }, + { + "epoch": 0.34175808915428413, + "grad_norm": 3.968461559555649, + "learning_rate": 7.661146445686893e-06, + "loss": 0.6693, + "step": 4922 + }, + { + "epoch": 0.34182752395500626, + "grad_norm": 3.9380937338729782, + "learning_rate": 7.660194387155846e-06, + "loss": 0.4176, + "step": 4923 + }, + { + "epoch": 0.3418969587557284, + "grad_norm": 2.9929718911102867, + "learning_rate": 7.659242194075178e-06, + "loss": 0.2875, + "step": 4924 + }, + { + "epoch": 0.3419663935564505, + "grad_norm": 6.360276240506255, + "learning_rate": 7.658289866493047e-06, + "loss": 0.604, + "step": 4925 + }, + { + "epoch": 0.3420358283571726, + "grad_norm": 3.9855764884951603, + "learning_rate": 7.657337404457626e-06, + "loss": 0.5393, + "step": 4926 + }, + { + "epoch": 0.34210526315789475, + "grad_norm": 4.775029293113738, + "learning_rate": 7.656384808017083e-06, + "loss": 0.7523, + "step": 4927 + }, + { + "epoch": 0.3421746979586169, + "grad_norm": 3.2276019636249687, + "learning_rate": 7.655432077219602e-06, + "loss": 0.438, + "step": 4928 + }, + { + "epoch": 0.34224413275933896, + "grad_norm": 2.929945487154513, + "learning_rate": 7.654479212113373e-06, + "loss": 0.2248, + "step": 4929 + }, + { + "epoch": 0.3423135675600611, + "grad_norm": 3.0992219229547033, + "learning_rate": 7.653526212746589e-06, + "loss": 0.227, + "step": 4930 + }, + { + "epoch": 0.34238300236078323, + "grad_norm": 3.380113063638498, + "learning_rate": 7.652573079167453e-06, + "loss": 0.466, + "step": 4931 + }, + { + "epoch": 0.34245243716150536, + "grad_norm": 3.4433183463704102, + "learning_rate": 7.65161981142417e-06, + "loss": 0.4475, + "step": 4932 + }, + { + "epoch": 0.34252187196222744, + "grad_norm": 3.166585173445886, + "learning_rate": 7.650666409564958e-06, + "loss": 0.2795, + "step": 4933 + }, + { + "epoch": 0.3425913067629496, + "grad_norm": 3.271855559538518, + "learning_rate": 7.649712873638039e-06, + "loss": 0.3896, + "step": 4934 + }, + { + "epoch": 0.3426607415636717, + "grad_norm": 4.860060135648331, + "learning_rate": 7.64875920369164e-06, + "loss": 0.6546, + "step": 4935 + }, + { + "epoch": 0.34273017636439385, + "grad_norm": 3.0421181789493708, + "learning_rate": 7.647805399774e-06, + "loss": 0.3974, + "step": 4936 + }, + { + "epoch": 0.342799611165116, + "grad_norm": 4.166106379730964, + "learning_rate": 7.646851461933357e-06, + "loss": 0.6803, + "step": 4937 + }, + { + "epoch": 0.34286904596583806, + "grad_norm": 4.351358842219458, + "learning_rate": 7.645897390217964e-06, + "loss": 0.3591, + "step": 4938 + }, + { + "epoch": 0.3429384807665602, + "grad_norm": 3.7871030000973946, + "learning_rate": 7.644943184676074e-06, + "loss": 0.4997, + "step": 4939 + }, + { + "epoch": 0.34300791556728233, + "grad_norm": 4.31251538164162, + "learning_rate": 7.643988845355951e-06, + "loss": 0.6772, + "step": 4940 + }, + { + "epoch": 0.34307735036800446, + "grad_norm": 2.657027498019386, + "learning_rate": 7.643034372305864e-06, + "loss": 0.266, + "step": 4941 + }, + { + "epoch": 0.34314678516872654, + "grad_norm": 3.3323060956473114, + "learning_rate": 7.642079765574089e-06, + "loss": 0.3742, + "step": 4942 + }, + { + "epoch": 0.3432162199694487, + "grad_norm": 3.377416305664132, + "learning_rate": 7.641125025208909e-06, + "loss": 0.4202, + "step": 4943 + }, + { + "epoch": 0.3432856547701708, + "grad_norm": 2.924600904250069, + "learning_rate": 7.640170151258614e-06, + "loss": 0.2127, + "step": 4944 + }, + { + "epoch": 0.34335508957089295, + "grad_norm": 4.4982830758182155, + "learning_rate": 7.6392151437715e-06, + "loss": 0.8252, + "step": 4945 + }, + { + "epoch": 0.3434245243716151, + "grad_norm": 3.5556512517355694, + "learning_rate": 7.638260002795871e-06, + "loss": 0.383, + "step": 4946 + }, + { + "epoch": 0.34349395917233716, + "grad_norm": 4.193044201234624, + "learning_rate": 7.637304728380036e-06, + "loss": 0.5593, + "step": 4947 + }, + { + "epoch": 0.3435633939730593, + "grad_norm": 4.326857867834154, + "learning_rate": 7.636349320572313e-06, + "loss": 0.7883, + "step": 4948 + }, + { + "epoch": 0.34363282877378143, + "grad_norm": 4.667291806371995, + "learning_rate": 7.635393779421025e-06, + "loss": 0.7982, + "step": 4949 + }, + { + "epoch": 0.34370226357450356, + "grad_norm": 3.645378722455744, + "learning_rate": 7.634438104974499e-06, + "loss": 0.4164, + "step": 4950 + }, + { + "epoch": 0.34377169837522564, + "grad_norm": 4.403122593060269, + "learning_rate": 7.633482297281078e-06, + "loss": 0.48, + "step": 4951 + }, + { + "epoch": 0.3438411331759478, + "grad_norm": 3.4079372096549374, + "learning_rate": 7.6325263563891e-06, + "loss": 0.2931, + "step": 4952 + }, + { + "epoch": 0.3439105679766699, + "grad_norm": 4.853677766783322, + "learning_rate": 7.631570282346918e-06, + "loss": 0.7206, + "step": 4953 + }, + { + "epoch": 0.34398000277739205, + "grad_norm": 3.4232188042669653, + "learning_rate": 7.63061407520289e-06, + "loss": 0.454, + "step": 4954 + }, + { + "epoch": 0.3440494375781141, + "grad_norm": 3.2896641669605104, + "learning_rate": 7.629657735005381e-06, + "loss": 0.2678, + "step": 4955 + }, + { + "epoch": 0.34411887237883626, + "grad_norm": 4.090866257333853, + "learning_rate": 7.628701261802756e-06, + "loss": 0.4528, + "step": 4956 + }, + { + "epoch": 0.3441883071795584, + "grad_norm": 3.860838640162453, + "learning_rate": 7.627744655643396e-06, + "loss": 0.527, + "step": 4957 + }, + { + "epoch": 0.34425774198028053, + "grad_norm": 4.2395130551207565, + "learning_rate": 7.626787916575686e-06, + "loss": 0.5153, + "step": 4958 + }, + { + "epoch": 0.34432717678100266, + "grad_norm": 2.832207217493252, + "learning_rate": 7.625831044648014e-06, + "loss": 0.3489, + "step": 4959 + }, + { + "epoch": 0.34439661158172474, + "grad_norm": 5.1999940724480735, + "learning_rate": 7.624874039908781e-06, + "loss": 0.578, + "step": 4960 + }, + { + "epoch": 0.3444660463824469, + "grad_norm": 4.435956837479547, + "learning_rate": 7.623916902406388e-06, + "loss": 0.7175, + "step": 4961 + }, + { + "epoch": 0.344535481183169, + "grad_norm": 4.744054261416868, + "learning_rate": 7.622959632189248e-06, + "loss": 0.7576, + "step": 4962 + }, + { + "epoch": 0.34460491598389115, + "grad_norm": 3.9423970598658853, + "learning_rate": 7.6220022293057784e-06, + "loss": 0.4735, + "step": 4963 + }, + { + "epoch": 0.3446743507846132, + "grad_norm": 4.504747855627425, + "learning_rate": 7.6210446938044016e-06, + "loss": 0.7768, + "step": 4964 + }, + { + "epoch": 0.34474378558533536, + "grad_norm": 4.186346682172334, + "learning_rate": 7.620087025733552e-06, + "loss": 0.5777, + "step": 4965 + }, + { + "epoch": 0.3448132203860575, + "grad_norm": 3.3698277111604957, + "learning_rate": 7.619129225141664e-06, + "loss": 0.4934, + "step": 4966 + }, + { + "epoch": 0.34488265518677963, + "grad_norm": 3.4966782911616705, + "learning_rate": 7.618171292077183e-06, + "loss": 0.3665, + "step": 4967 + }, + { + "epoch": 0.3449520899875017, + "grad_norm": 4.033482080154214, + "learning_rate": 7.617213226588561e-06, + "loss": 0.7563, + "step": 4968 + }, + { + "epoch": 0.34502152478822384, + "grad_norm": 3.298472226325857, + "learning_rate": 7.6162550287242565e-06, + "loss": 0.4609, + "step": 4969 + }, + { + "epoch": 0.345090959588946, + "grad_norm": 2.6373381605452764, + "learning_rate": 7.615296698532732e-06, + "loss": 0.298, + "step": 4970 + }, + { + "epoch": 0.3451603943896681, + "grad_norm": 3.358468531528095, + "learning_rate": 7.614338236062462e-06, + "loss": 0.4216, + "step": 4971 + }, + { + "epoch": 0.34522982919039025, + "grad_norm": 3.5707197631501284, + "learning_rate": 7.613379641361921e-06, + "loss": 0.3622, + "step": 4972 + }, + { + "epoch": 0.3452992639911123, + "grad_norm": 4.567646691117784, + "learning_rate": 7.612420914479595e-06, + "loss": 0.5992, + "step": 4973 + }, + { + "epoch": 0.34536869879183446, + "grad_norm": 4.270655759432465, + "learning_rate": 7.611462055463974e-06, + "loss": 0.5079, + "step": 4974 + }, + { + "epoch": 0.3454381335925566, + "grad_norm": 3.284226924917533, + "learning_rate": 7.610503064363559e-06, + "loss": 0.3587, + "step": 4975 + }, + { + "epoch": 0.34550756839327873, + "grad_norm": 4.000699191645003, + "learning_rate": 7.609543941226852e-06, + "loss": 0.529, + "step": 4976 + }, + { + "epoch": 0.3455770031940008, + "grad_norm": 2.919561897917775, + "learning_rate": 7.608584686102367e-06, + "loss": 0.1985, + "step": 4977 + }, + { + "epoch": 0.34564643799472294, + "grad_norm": 3.376862122032198, + "learning_rate": 7.607625299038619e-06, + "loss": 0.462, + "step": 4978 + }, + { + "epoch": 0.3457158727954451, + "grad_norm": 4.0853830925223145, + "learning_rate": 7.606665780084134e-06, + "loss": 0.3814, + "step": 4979 + }, + { + "epoch": 0.3457853075961672, + "grad_norm": 3.2076017186018198, + "learning_rate": 7.6057061292874436e-06, + "loss": 0.3978, + "step": 4980 + }, + { + "epoch": 0.34585474239688935, + "grad_norm": 2.702879077998113, + "learning_rate": 7.604746346697087e-06, + "loss": 0.3163, + "step": 4981 + }, + { + "epoch": 0.3459241771976114, + "grad_norm": 3.486118826670658, + "learning_rate": 7.603786432361606e-06, + "loss": 0.5266, + "step": 4982 + }, + { + "epoch": 0.34599361199833356, + "grad_norm": 3.155462987189284, + "learning_rate": 7.6028263863295535e-06, + "loss": 0.2953, + "step": 4983 + }, + { + "epoch": 0.3460630467990557, + "grad_norm": 5.789916715940687, + "learning_rate": 7.601866208649489e-06, + "loss": 1.0424, + "step": 4984 + }, + { + "epoch": 0.34613248159977783, + "grad_norm": 2.326342116146704, + "learning_rate": 7.600905899369976e-06, + "loss": 0.1081, + "step": 4985 + }, + { + "epoch": 0.3462019164004999, + "grad_norm": 3.308308103644391, + "learning_rate": 7.599945458539584e-06, + "loss": 0.4041, + "step": 4986 + }, + { + "epoch": 0.34627135120122204, + "grad_norm": 9.524943640380164, + "learning_rate": 7.598984886206895e-06, + "loss": 0.4442, + "step": 4987 + }, + { + "epoch": 0.3463407860019442, + "grad_norm": 3.2353082029263613, + "learning_rate": 7.598024182420491e-06, + "loss": 0.454, + "step": 4988 + }, + { + "epoch": 0.3464102208026663, + "grad_norm": 4.20705455551874, + "learning_rate": 7.597063347228961e-06, + "loss": 0.7184, + "step": 4989 + }, + { + "epoch": 0.3464796556033884, + "grad_norm": 4.266489095068036, + "learning_rate": 7.5961023806809075e-06, + "loss": 0.7042, + "step": 4990 + }, + { + "epoch": 0.3465490904041105, + "grad_norm": 3.9716891467774142, + "learning_rate": 7.595141282824934e-06, + "loss": 0.3931, + "step": 4991 + }, + { + "epoch": 0.34661852520483266, + "grad_norm": 3.8347544817365398, + "learning_rate": 7.59418005370965e-06, + "loss": 0.3965, + "step": 4992 + }, + { + "epoch": 0.3466879600055548, + "grad_norm": 3.516202644540482, + "learning_rate": 7.593218693383675e-06, + "loss": 0.378, + "step": 4993 + }, + { + "epoch": 0.34675739480627693, + "grad_norm": 4.101569105223676, + "learning_rate": 7.592257201895633e-06, + "loss": 0.4947, + "step": 4994 + }, + { + "epoch": 0.346826829606999, + "grad_norm": 3.203913055585752, + "learning_rate": 7.591295579294153e-06, + "loss": 0.2467, + "step": 4995 + }, + { + "epoch": 0.34689626440772114, + "grad_norm": 3.185677118317719, + "learning_rate": 7.590333825627878e-06, + "loss": 0.4108, + "step": 4996 + }, + { + "epoch": 0.3469656992084433, + "grad_norm": 3.1309434348068663, + "learning_rate": 7.589371940945449e-06, + "loss": 0.2656, + "step": 4997 + }, + { + "epoch": 0.3470351340091654, + "grad_norm": 3.9095524796559755, + "learning_rate": 7.588409925295515e-06, + "loss": 0.488, + "step": 4998 + }, + { + "epoch": 0.3471045688098875, + "grad_norm": 3.6282229472152787, + "learning_rate": 7.587447778726737e-06, + "loss": 0.3676, + "step": 4999 + }, + { + "epoch": 0.3471740036106096, + "grad_norm": 3.087622169003717, + "learning_rate": 7.586485501287779e-06, + "loss": 0.2439, + "step": 5000 + }, + { + "epoch": 0.34724343841133176, + "grad_norm": 4.809241947674745, + "learning_rate": 7.58552309302731e-06, + "loss": 0.4697, + "step": 5001 + }, + { + "epoch": 0.3473128732120539, + "grad_norm": 3.6147559490717263, + "learning_rate": 7.584560553994009e-06, + "loss": 0.6059, + "step": 5002 + }, + { + "epoch": 0.34738230801277603, + "grad_norm": 3.3760245492471146, + "learning_rate": 7.583597884236561e-06, + "loss": 0.3014, + "step": 5003 + }, + { + "epoch": 0.3474517428134981, + "grad_norm": 3.679397960901014, + "learning_rate": 7.582635083803656e-06, + "loss": 0.4794, + "step": 5004 + }, + { + "epoch": 0.34752117761422024, + "grad_norm": 4.145391730854627, + "learning_rate": 7.5816721527439875e-06, + "loss": 0.5993, + "step": 5005 + }, + { + "epoch": 0.3475906124149424, + "grad_norm": 6.34470076593736, + "learning_rate": 7.5807090911062655e-06, + "loss": 0.9443, + "step": 5006 + }, + { + "epoch": 0.3476600472156645, + "grad_norm": 3.2650439849410966, + "learning_rate": 7.579745898939197e-06, + "loss": 0.3352, + "step": 5007 + }, + { + "epoch": 0.3477294820163866, + "grad_norm": 3.1197404095547934, + "learning_rate": 7.578782576291501e-06, + "loss": 0.3294, + "step": 5008 + }, + { + "epoch": 0.3477989168171087, + "grad_norm": 4.5173950421168385, + "learning_rate": 7.577819123211898e-06, + "loss": 0.5264, + "step": 5009 + }, + { + "epoch": 0.34786835161783086, + "grad_norm": 3.9040792945982354, + "learning_rate": 7.5768555397491236e-06, + "loss": 0.4207, + "step": 5010 + }, + { + "epoch": 0.347937786418553, + "grad_norm": 3.470578091050898, + "learning_rate": 7.575891825951911e-06, + "loss": 0.4245, + "step": 5011 + }, + { + "epoch": 0.3480072212192751, + "grad_norm": 2.7550899871253955, + "learning_rate": 7.574927981869005e-06, + "loss": 0.2478, + "step": 5012 + }, + { + "epoch": 0.3480766560199972, + "grad_norm": 3.8536817438992768, + "learning_rate": 7.5739640075491546e-06, + "loss": 0.6882, + "step": 5013 + }, + { + "epoch": 0.34814609082071934, + "grad_norm": 3.7317183606845536, + "learning_rate": 7.572999903041117e-06, + "loss": 0.4475, + "step": 5014 + }, + { + "epoch": 0.3482155256214415, + "grad_norm": 3.3051209417986795, + "learning_rate": 7.572035668393657e-06, + "loss": 0.3982, + "step": 5015 + }, + { + "epoch": 0.3482849604221636, + "grad_norm": 3.3081172837977055, + "learning_rate": 7.5710713036555425e-06, + "loss": 0.5134, + "step": 5016 + }, + { + "epoch": 0.3483543952228857, + "grad_norm": 3.9990490961232554, + "learning_rate": 7.570106808875552e-06, + "loss": 0.5308, + "step": 5017 + }, + { + "epoch": 0.34842383002360783, + "grad_norm": 3.7941985634494055, + "learning_rate": 7.5691421841024645e-06, + "loss": 0.5223, + "step": 5018 + }, + { + "epoch": 0.34849326482432996, + "grad_norm": 3.4464714701542642, + "learning_rate": 7.5681774293850756e-06, + "loss": 0.471, + "step": 5019 + }, + { + "epoch": 0.3485626996250521, + "grad_norm": 3.6754481072251006, + "learning_rate": 7.567212544772178e-06, + "loss": 0.3892, + "step": 5020 + }, + { + "epoch": 0.3486321344257742, + "grad_norm": 3.9245735878475654, + "learning_rate": 7.566247530312574e-06, + "loss": 0.4625, + "step": 5021 + }, + { + "epoch": 0.3487015692264963, + "grad_norm": 3.691761014546558, + "learning_rate": 7.565282386055075e-06, + "loss": 0.5374, + "step": 5022 + }, + { + "epoch": 0.34877100402721845, + "grad_norm": 3.844108036938108, + "learning_rate": 7.5643171120484935e-06, + "loss": 0.5432, + "step": 5023 + }, + { + "epoch": 0.3488404388279406, + "grad_norm": 3.5590915042686344, + "learning_rate": 7.563351708341657e-06, + "loss": 0.6736, + "step": 5024 + }, + { + "epoch": 0.34890987362866266, + "grad_norm": 3.6418640736255856, + "learning_rate": 7.562386174983389e-06, + "loss": 0.4483, + "step": 5025 + }, + { + "epoch": 0.3489793084293848, + "grad_norm": 3.810707503487819, + "learning_rate": 7.56142051202253e-06, + "loss": 0.6407, + "step": 5026 + }, + { + "epoch": 0.34904874323010693, + "grad_norm": 5.125669805091938, + "learning_rate": 7.560454719507918e-06, + "loss": 0.6929, + "step": 5027 + }, + { + "epoch": 0.34911817803082906, + "grad_norm": 3.028233717407423, + "learning_rate": 7.559488797488404e-06, + "loss": 0.2888, + "step": 5028 + }, + { + "epoch": 0.3491876128315512, + "grad_norm": 3.728650296358148, + "learning_rate": 7.558522746012845e-06, + "loss": 0.4351, + "step": 5029 + }, + { + "epoch": 0.3492570476322733, + "grad_norm": 3.227338174062697, + "learning_rate": 7.5575565651300985e-06, + "loss": 0.5075, + "step": 5030 + }, + { + "epoch": 0.3493264824329954, + "grad_norm": 3.657684218138805, + "learning_rate": 7.556590254889036e-06, + "loss": 0.4699, + "step": 5031 + }, + { + "epoch": 0.34939591723371755, + "grad_norm": 3.678752571902783, + "learning_rate": 7.555623815338531e-06, + "loss": 0.449, + "step": 5032 + }, + { + "epoch": 0.3494653520344397, + "grad_norm": 3.9535238145747975, + "learning_rate": 7.554657246527466e-06, + "loss": 0.3857, + "step": 5033 + }, + { + "epoch": 0.34953478683516176, + "grad_norm": 2.433561894281368, + "learning_rate": 7.553690548504726e-06, + "loss": 0.242, + "step": 5034 + }, + { + "epoch": 0.3496042216358839, + "grad_norm": 3.589812847724724, + "learning_rate": 7.552723721319211e-06, + "loss": 0.3105, + "step": 5035 + }, + { + "epoch": 0.34967365643660603, + "grad_norm": 3.7792969099608404, + "learning_rate": 7.5517567650198155e-06, + "loss": 0.3774, + "step": 5036 + }, + { + "epoch": 0.34974309123732816, + "grad_norm": 3.4980878278701226, + "learning_rate": 7.550789679655453e-06, + "loss": 0.379, + "step": 5037 + }, + { + "epoch": 0.3498125260380503, + "grad_norm": 3.5502703084386806, + "learning_rate": 7.549822465275034e-06, + "loss": 0.3263, + "step": 5038 + }, + { + "epoch": 0.3498819608387724, + "grad_norm": 4.333475887675362, + "learning_rate": 7.54885512192748e-06, + "loss": 0.5408, + "step": 5039 + }, + { + "epoch": 0.3499513956394945, + "grad_norm": 3.442654049145105, + "learning_rate": 7.547887649661718e-06, + "loss": 0.1839, + "step": 5040 + }, + { + "epoch": 0.35002083044021665, + "grad_norm": 3.1848949796924795, + "learning_rate": 7.546920048526681e-06, + "loss": 0.3377, + "step": 5041 + }, + { + "epoch": 0.3500902652409388, + "grad_norm": 6.289694110036782, + "learning_rate": 7.545952318571312e-06, + "loss": 0.6819, + "step": 5042 + }, + { + "epoch": 0.35015970004166086, + "grad_norm": 3.0939061190239823, + "learning_rate": 7.544984459844553e-06, + "loss": 0.3019, + "step": 5043 + }, + { + "epoch": 0.350229134842383, + "grad_norm": 3.7466443840417316, + "learning_rate": 7.5440164723953625e-06, + "loss": 0.4437, + "step": 5044 + }, + { + "epoch": 0.35029856964310513, + "grad_norm": 5.217260454900235, + "learning_rate": 7.5430483562726964e-06, + "loss": 0.8, + "step": 5045 + }, + { + "epoch": 0.35036800444382726, + "grad_norm": 4.098974076584689, + "learning_rate": 7.542080111525523e-06, + "loss": 0.4995, + "step": 5046 + }, + { + "epoch": 0.35043743924454934, + "grad_norm": 4.532379127037582, + "learning_rate": 7.541111738202813e-06, + "loss": 0.3306, + "step": 5047 + }, + { + "epoch": 0.3505068740452715, + "grad_norm": 3.1755955248933696, + "learning_rate": 7.540143236353548e-06, + "loss": 0.2972, + "step": 5048 + }, + { + "epoch": 0.3505763088459936, + "grad_norm": 3.5783267316309493, + "learning_rate": 7.539174606026711e-06, + "loss": 0.4031, + "step": 5049 + }, + { + "epoch": 0.35064574364671575, + "grad_norm": 2.679469539207239, + "learning_rate": 7.538205847271295e-06, + "loss": 0.1788, + "step": 5050 + }, + { + "epoch": 0.3507151784474379, + "grad_norm": 5.226601990932275, + "learning_rate": 7.5372369601363e-06, + "loss": 0.8277, + "step": 5051 + }, + { + "epoch": 0.35078461324815996, + "grad_norm": 3.3555124295917755, + "learning_rate": 7.536267944670732e-06, + "loss": 0.4122, + "step": 5052 + }, + { + "epoch": 0.3508540480488821, + "grad_norm": 3.96976715454651, + "learning_rate": 7.535298800923601e-06, + "loss": 0.3753, + "step": 5053 + }, + { + "epoch": 0.35092348284960423, + "grad_norm": 3.558249892869726, + "learning_rate": 7.534329528943925e-06, + "loss": 0.3054, + "step": 5054 + }, + { + "epoch": 0.35099291765032636, + "grad_norm": 2.9080808699755023, + "learning_rate": 7.533360128780729e-06, + "loss": 0.3005, + "step": 5055 + }, + { + "epoch": 0.35106235245104844, + "grad_norm": 2.6411945128578, + "learning_rate": 7.532390600483046e-06, + "loss": 0.1862, + "step": 5056 + }, + { + "epoch": 0.3511317872517706, + "grad_norm": 3.528977537067989, + "learning_rate": 7.531420944099909e-06, + "loss": 0.4028, + "step": 5057 + }, + { + "epoch": 0.3512012220524927, + "grad_norm": 6.73408172114143, + "learning_rate": 7.530451159680369e-06, + "loss": 0.5786, + "step": 5058 + }, + { + "epoch": 0.35127065685321485, + "grad_norm": 3.3743823596867846, + "learning_rate": 7.529481247273469e-06, + "loss": 0.3801, + "step": 5059 + }, + { + "epoch": 0.3513400916539369, + "grad_norm": 3.363252840984427, + "learning_rate": 7.528511206928272e-06, + "loss": 0.3785, + "step": 5060 + }, + { + "epoch": 0.35140952645465906, + "grad_norm": 5.028617899263473, + "learning_rate": 7.527541038693839e-06, + "loss": 0.7898, + "step": 5061 + }, + { + "epoch": 0.3514789612553812, + "grad_norm": 3.8429369821287334, + "learning_rate": 7.52657074261924e-06, + "loss": 0.3131, + "step": 5062 + }, + { + "epoch": 0.35154839605610333, + "grad_norm": 4.255881097598719, + "learning_rate": 7.525600318753552e-06, + "loss": 0.3449, + "step": 5063 + }, + { + "epoch": 0.35161783085682546, + "grad_norm": 5.109844173204817, + "learning_rate": 7.524629767145858e-06, + "loss": 0.6087, + "step": 5064 + }, + { + "epoch": 0.35168726565754754, + "grad_norm": 3.332935811236292, + "learning_rate": 7.523659087845247e-06, + "loss": 0.3957, + "step": 5065 + }, + { + "epoch": 0.3517567004582697, + "grad_norm": 3.849874430905618, + "learning_rate": 7.522688280900814e-06, + "loss": 0.3064, + "step": 5066 + }, + { + "epoch": 0.3518261352589918, + "grad_norm": 3.9627379927505966, + "learning_rate": 7.521717346361663e-06, + "loss": 0.5379, + "step": 5067 + }, + { + "epoch": 0.35189557005971395, + "grad_norm": 4.115828743175035, + "learning_rate": 7.520746284276903e-06, + "loss": 0.6845, + "step": 5068 + }, + { + "epoch": 0.351965004860436, + "grad_norm": 3.4220087497853826, + "learning_rate": 7.51977509469565e-06, + "loss": 0.4625, + "step": 5069 + }, + { + "epoch": 0.35203443966115816, + "grad_norm": 4.2939019877036655, + "learning_rate": 7.518803777667019e-06, + "loss": 0.3672, + "step": 5070 + }, + { + "epoch": 0.3521038744618803, + "grad_norm": 3.707428777977297, + "learning_rate": 7.517832333240148e-06, + "loss": 0.4247, + "step": 5071 + }, + { + "epoch": 0.35217330926260243, + "grad_norm": 4.580632139232148, + "learning_rate": 7.516860761464164e-06, + "loss": 0.5729, + "step": 5072 + }, + { + "epoch": 0.35224274406332456, + "grad_norm": 4.026010416083216, + "learning_rate": 7.515889062388212e-06, + "loss": 0.3215, + "step": 5073 + }, + { + "epoch": 0.35231217886404664, + "grad_norm": 3.9329882551314803, + "learning_rate": 7.514917236061439e-06, + "loss": 0.4484, + "step": 5074 + }, + { + "epoch": 0.3523816136647688, + "grad_norm": 3.9763464010319183, + "learning_rate": 7.513945282532995e-06, + "loss": 0.5524, + "step": 5075 + }, + { + "epoch": 0.3524510484654909, + "grad_norm": 4.432501531128317, + "learning_rate": 7.512973201852045e-06, + "loss": 0.2751, + "step": 5076 + }, + { + "epoch": 0.35252048326621305, + "grad_norm": 2.858450525586634, + "learning_rate": 7.512000994067754e-06, + "loss": 0.4788, + "step": 5077 + }, + { + "epoch": 0.3525899180669351, + "grad_norm": 3.5100143463642364, + "learning_rate": 7.511028659229296e-06, + "loss": 0.6, + "step": 5078 + }, + { + "epoch": 0.35265935286765726, + "grad_norm": 3.5667520858997, + "learning_rate": 7.510056197385849e-06, + "loss": 0.452, + "step": 5079 + }, + { + "epoch": 0.3527287876683794, + "grad_norm": 4.20181025896451, + "learning_rate": 7.5090836085866e-06, + "loss": 0.5358, + "step": 5080 + }, + { + "epoch": 0.35279822246910153, + "grad_norm": 3.670930227059418, + "learning_rate": 7.508110892880743e-06, + "loss": 0.4909, + "step": 5081 + }, + { + "epoch": 0.3528676572698236, + "grad_norm": 3.7585188611870564, + "learning_rate": 7.507138050317471e-06, + "loss": 0.5334, + "step": 5082 + }, + { + "epoch": 0.35293709207054574, + "grad_norm": 3.6898727863433916, + "learning_rate": 7.506165080945997e-06, + "loss": 0.4755, + "step": 5083 + }, + { + "epoch": 0.3530065268712679, + "grad_norm": 4.801593865012166, + "learning_rate": 7.505191984815528e-06, + "loss": 0.5129, + "step": 5084 + }, + { + "epoch": 0.35307596167199, + "grad_norm": 4.345113521359982, + "learning_rate": 7.504218761975284e-06, + "loss": 0.6738, + "step": 5085 + }, + { + "epoch": 0.35314539647271215, + "grad_norm": 3.05013337366833, + "learning_rate": 7.503245412474487e-06, + "loss": 0.4401, + "step": 5086 + }, + { + "epoch": 0.3532148312734342, + "grad_norm": 3.1992875001686207, + "learning_rate": 7.502271936362371e-06, + "loss": 0.4152, + "step": 5087 + }, + { + "epoch": 0.35328426607415636, + "grad_norm": 3.6865908335844577, + "learning_rate": 7.501298333688171e-06, + "loss": 0.4075, + "step": 5088 + }, + { + "epoch": 0.3533537008748785, + "grad_norm": 3.030499961151148, + "learning_rate": 7.5003246045011325e-06, + "loss": 0.2025, + "step": 5089 + }, + { + "epoch": 0.35342313567560063, + "grad_norm": 2.4092893967900553, + "learning_rate": 7.499350748850504e-06, + "loss": 0.1898, + "step": 5090 + }, + { + "epoch": 0.3534925704763227, + "grad_norm": 4.050137950372166, + "learning_rate": 7.498376766785544e-06, + "loss": 0.31, + "step": 5091 + }, + { + "epoch": 0.35356200527704484, + "grad_norm": 3.7560851289282993, + "learning_rate": 7.497402658355514e-06, + "loss": 0.4685, + "step": 5092 + }, + { + "epoch": 0.353631440077767, + "grad_norm": 4.5650354346721285, + "learning_rate": 7.496428423609683e-06, + "loss": 0.7466, + "step": 5093 + }, + { + "epoch": 0.3537008748784891, + "grad_norm": 4.708974663696075, + "learning_rate": 7.495454062597327e-06, + "loss": 0.6004, + "step": 5094 + }, + { + "epoch": 0.35377030967921125, + "grad_norm": 2.740239467897368, + "learning_rate": 7.49447957536773e-06, + "loss": 0.267, + "step": 5095 + }, + { + "epoch": 0.3538397444799333, + "grad_norm": 3.8750891205585645, + "learning_rate": 7.493504961970176e-06, + "loss": 0.4118, + "step": 5096 + }, + { + "epoch": 0.35390917928065546, + "grad_norm": 3.220071268920196, + "learning_rate": 7.492530222453965e-06, + "loss": 0.4078, + "step": 5097 + }, + { + "epoch": 0.3539786140813776, + "grad_norm": 3.797214305271728, + "learning_rate": 7.491555356868394e-06, + "loss": 0.477, + "step": 5098 + }, + { + "epoch": 0.35404804888209973, + "grad_norm": 3.526919580558146, + "learning_rate": 7.490580365262774e-06, + "loss": 0.3859, + "step": 5099 + }, + { + "epoch": 0.3541174836828218, + "grad_norm": 4.36592115231672, + "learning_rate": 7.489605247686418e-06, + "loss": 0.6633, + "step": 5100 + }, + { + "epoch": 0.35418691848354394, + "grad_norm": 4.306410318277185, + "learning_rate": 7.488630004188644e-06, + "loss": 0.5159, + "step": 5101 + }, + { + "epoch": 0.3542563532842661, + "grad_norm": 4.2094183032909775, + "learning_rate": 7.487654634818779e-06, + "loss": 0.6105, + "step": 5102 + }, + { + "epoch": 0.3543257880849882, + "grad_norm": 3.152924946354207, + "learning_rate": 7.486679139626162e-06, + "loss": 0.4262, + "step": 5103 + }, + { + "epoch": 0.3543952228857103, + "grad_norm": 4.873150838666274, + "learning_rate": 7.485703518660125e-06, + "loss": 0.7435, + "step": 5104 + }, + { + "epoch": 0.3544646576864324, + "grad_norm": 3.4442813140711044, + "learning_rate": 7.484727771970018e-06, + "loss": 0.3982, + "step": 5105 + }, + { + "epoch": 0.35453409248715456, + "grad_norm": 3.6019492370140065, + "learning_rate": 7.48375189960519e-06, + "loss": 0.3958, + "step": 5106 + }, + { + "epoch": 0.3546035272878767, + "grad_norm": 3.82644381910013, + "learning_rate": 7.482775901615004e-06, + "loss": 0.4856, + "step": 5107 + }, + { + "epoch": 0.35467296208859883, + "grad_norm": 3.477387646264915, + "learning_rate": 7.48179977804882e-06, + "loss": 0.4396, + "step": 5108 + }, + { + "epoch": 0.3547423968893209, + "grad_norm": 3.51182238002862, + "learning_rate": 7.480823528956014e-06, + "loss": 0.414, + "step": 5109 + }, + { + "epoch": 0.35481183169004304, + "grad_norm": 4.452272184424566, + "learning_rate": 7.4798471543859596e-06, + "loss": 0.681, + "step": 5110 + }, + { + "epoch": 0.3548812664907652, + "grad_norm": 3.946796502272044, + "learning_rate": 7.478870654388042e-06, + "loss": 0.5638, + "step": 5111 + }, + { + "epoch": 0.3549507012914873, + "grad_norm": 3.05091184749042, + "learning_rate": 7.477894029011652e-06, + "loss": 0.3458, + "step": 5112 + }, + { + "epoch": 0.3550201360922094, + "grad_norm": 3.830585148424721, + "learning_rate": 7.476917278306187e-06, + "loss": 0.3243, + "step": 5113 + }, + { + "epoch": 0.3550895708929315, + "grad_norm": 2.44485822516337, + "learning_rate": 7.475940402321047e-06, + "loss": 0.2004, + "step": 5114 + }, + { + "epoch": 0.35515900569365366, + "grad_norm": 3.3391141212712507, + "learning_rate": 7.474963401105645e-06, + "loss": 0.3286, + "step": 5115 + }, + { + "epoch": 0.3552284404943758, + "grad_norm": 3.442540217833721, + "learning_rate": 7.473986274709394e-06, + "loss": 0.4583, + "step": 5116 + }, + { + "epoch": 0.3552978752950979, + "grad_norm": 4.63407256960876, + "learning_rate": 7.473009023181716e-06, + "loss": 0.816, + "step": 5117 + }, + { + "epoch": 0.35536731009582, + "grad_norm": 3.822461600354311, + "learning_rate": 7.472031646572041e-06, + "loss": 0.4705, + "step": 5118 + }, + { + "epoch": 0.35543674489654214, + "grad_norm": 6.352055086655258, + "learning_rate": 7.471054144929803e-06, + "loss": 0.7226, + "step": 5119 + }, + { + "epoch": 0.3555061796972643, + "grad_norm": 3.6755774766232947, + "learning_rate": 7.470076518304442e-06, + "loss": 0.4753, + "step": 5120 + }, + { + "epoch": 0.3555756144979864, + "grad_norm": 3.425734697930106, + "learning_rate": 7.469098766745405e-06, + "loss": 0.336, + "step": 5121 + }, + { + "epoch": 0.3556450492987085, + "grad_norm": 4.181907890672625, + "learning_rate": 7.4681208903021475e-06, + "loss": 0.5783, + "step": 5122 + }, + { + "epoch": 0.35571448409943063, + "grad_norm": 3.6531353706307828, + "learning_rate": 7.467142889024128e-06, + "loss": 0.4971, + "step": 5123 + }, + { + "epoch": 0.35578391890015276, + "grad_norm": 4.7086077798523585, + "learning_rate": 7.466164762960813e-06, + "loss": 0.7279, + "step": 5124 + }, + { + "epoch": 0.3558533537008749, + "grad_norm": 3.575866276327494, + "learning_rate": 7.465186512161673e-06, + "loss": 0.3507, + "step": 5125 + }, + { + "epoch": 0.355922788501597, + "grad_norm": 3.8811157899343454, + "learning_rate": 7.464208136676192e-06, + "loss": 0.3347, + "step": 5126 + }, + { + "epoch": 0.3559922233023191, + "grad_norm": 3.373350801542921, + "learning_rate": 7.463229636553849e-06, + "loss": 0.3407, + "step": 5127 + }, + { + "epoch": 0.35606165810304125, + "grad_norm": 4.580205091734243, + "learning_rate": 7.46225101184414e-06, + "loss": 0.5791, + "step": 5128 + }, + { + "epoch": 0.3561310929037634, + "grad_norm": 3.679391786235105, + "learning_rate": 7.4612722625965615e-06, + "loss": 0.4474, + "step": 5129 + }, + { + "epoch": 0.3562005277044855, + "grad_norm": 2.927897980835335, + "learning_rate": 7.460293388860616e-06, + "loss": 0.3082, + "step": 5130 + }, + { + "epoch": 0.3562699625052076, + "grad_norm": 3.3387537979858486, + "learning_rate": 7.459314390685815e-06, + "loss": 0.4871, + "step": 5131 + }, + { + "epoch": 0.35633939730592973, + "grad_norm": 3.4735151813449425, + "learning_rate": 7.458335268121676e-06, + "loss": 0.3503, + "step": 5132 + }, + { + "epoch": 0.35640883210665186, + "grad_norm": 3.2819009685628955, + "learning_rate": 7.45735602121772e-06, + "loss": 0.3309, + "step": 5133 + }, + { + "epoch": 0.356478266907374, + "grad_norm": 3.6017808184563407, + "learning_rate": 7.456376650023476e-06, + "loss": 0.4008, + "step": 5134 + }, + { + "epoch": 0.3565477017080961, + "grad_norm": 3.583515084334034, + "learning_rate": 7.4553971545884815e-06, + "loss": 0.4022, + "step": 5135 + }, + { + "epoch": 0.3566171365088182, + "grad_norm": 3.817768644533773, + "learning_rate": 7.454417534962279e-06, + "loss": 0.4465, + "step": 5136 + }, + { + "epoch": 0.35668657130954035, + "grad_norm": 3.4986597516410245, + "learning_rate": 7.453437791194414e-06, + "loss": 0.2979, + "step": 5137 + }, + { + "epoch": 0.3567560061102625, + "grad_norm": 3.076940286097457, + "learning_rate": 7.452457923334441e-06, + "loss": 0.2533, + "step": 5138 + }, + { + "epoch": 0.35682544091098456, + "grad_norm": 5.651024902883033, + "learning_rate": 7.451477931431922e-06, + "loss": 0.6201, + "step": 5139 + }, + { + "epoch": 0.3568948757117067, + "grad_norm": 3.0586800679149917, + "learning_rate": 7.450497815536424e-06, + "loss": 0.425, + "step": 5140 + }, + { + "epoch": 0.35696431051242883, + "grad_norm": 3.752184734875135, + "learning_rate": 7.449517575697517e-06, + "loss": 0.4455, + "step": 5141 + }, + { + "epoch": 0.35703374531315096, + "grad_norm": 3.837818998558299, + "learning_rate": 7.448537211964786e-06, + "loss": 0.4091, + "step": 5142 + }, + { + "epoch": 0.3571031801138731, + "grad_norm": 3.8101585730952463, + "learning_rate": 7.44755672438781e-06, + "loss": 0.4156, + "step": 5143 + }, + { + "epoch": 0.3571726149145952, + "grad_norm": 3.0879600080824745, + "learning_rate": 7.446576113016188e-06, + "loss": 0.235, + "step": 5144 + }, + { + "epoch": 0.3572420497153173, + "grad_norm": 4.241898492276191, + "learning_rate": 7.4455953778995125e-06, + "loss": 0.3403, + "step": 5145 + }, + { + "epoch": 0.35731148451603945, + "grad_norm": 4.927193058056566, + "learning_rate": 7.444614519087391e-06, + "loss": 0.2474, + "step": 5146 + }, + { + "epoch": 0.3573809193167616, + "grad_norm": 3.2232857521381137, + "learning_rate": 7.4436335366294334e-06, + "loss": 0.3312, + "step": 5147 + }, + { + "epoch": 0.35745035411748366, + "grad_norm": 3.3998370588161744, + "learning_rate": 7.442652430575257e-06, + "loss": 0.3784, + "step": 5148 + }, + { + "epoch": 0.3575197889182058, + "grad_norm": 3.92296589317403, + "learning_rate": 7.441671200974483e-06, + "loss": 0.3995, + "step": 5149 + }, + { + "epoch": 0.35758922371892793, + "grad_norm": 3.7896101974754783, + "learning_rate": 7.440689847876743e-06, + "loss": 0.5331, + "step": 5150 + }, + { + "epoch": 0.35765865851965006, + "grad_norm": 2.894088286185405, + "learning_rate": 7.439708371331673e-06, + "loss": 0.4172, + "step": 5151 + }, + { + "epoch": 0.3577280933203722, + "grad_norm": 3.7119832001087274, + "learning_rate": 7.438726771388915e-06, + "loss": 0.3261, + "step": 5152 + }, + { + "epoch": 0.3577975281210943, + "grad_norm": 3.7623974683311845, + "learning_rate": 7.437745048098116e-06, + "loss": 0.3755, + "step": 5153 + }, + { + "epoch": 0.3578669629218164, + "grad_norm": 4.261158375084097, + "learning_rate": 7.436763201508931e-06, + "loss": 0.5336, + "step": 5154 + }, + { + "epoch": 0.35793639772253855, + "grad_norm": 2.354268980583923, + "learning_rate": 7.43578123167102e-06, + "loss": 0.1809, + "step": 5155 + }, + { + "epoch": 0.3580058325232607, + "grad_norm": 2.6639528134118375, + "learning_rate": 7.434799138634051e-06, + "loss": 0.2497, + "step": 5156 + }, + { + "epoch": 0.35807526732398276, + "grad_norm": 2.9806344497623813, + "learning_rate": 7.433816922447697e-06, + "loss": 0.188, + "step": 5157 + }, + { + "epoch": 0.3581447021247049, + "grad_norm": 4.251981101823567, + "learning_rate": 7.432834583161637e-06, + "loss": 0.5382, + "step": 5158 + }, + { + "epoch": 0.35821413692542703, + "grad_norm": 3.8351636666552893, + "learning_rate": 7.4318521208255555e-06, + "loss": 0.5478, + "step": 5159 + }, + { + "epoch": 0.35828357172614916, + "grad_norm": 4.359071933954251, + "learning_rate": 7.430869535489147e-06, + "loss": 0.6244, + "step": 5160 + }, + { + "epoch": 0.35835300652687124, + "grad_norm": 4.231283947676127, + "learning_rate": 7.429886827202108e-06, + "loss": 0.6036, + "step": 5161 + }, + { + "epoch": 0.3584224413275934, + "grad_norm": 3.691277181649569, + "learning_rate": 7.428903996014143e-06, + "loss": 0.3578, + "step": 5162 + }, + { + "epoch": 0.3584918761283155, + "grad_norm": 4.061539534427449, + "learning_rate": 7.427921041974961e-06, + "loss": 0.5851, + "step": 5163 + }, + { + "epoch": 0.35856131092903765, + "grad_norm": 4.283109853697811, + "learning_rate": 7.426937965134282e-06, + "loss": 0.4067, + "step": 5164 + }, + { + "epoch": 0.3586307457297598, + "grad_norm": 4.03655546508861, + "learning_rate": 7.4259547655418265e-06, + "loss": 0.3808, + "step": 5165 + }, + { + "epoch": 0.35870018053048186, + "grad_norm": 4.346470175112034, + "learning_rate": 7.424971443247323e-06, + "loss": 0.4962, + "step": 5166 + }, + { + "epoch": 0.358769615331204, + "grad_norm": 3.431830100018635, + "learning_rate": 7.423987998300508e-06, + "loss": 0.3864, + "step": 5167 + }, + { + "epoch": 0.35883905013192613, + "grad_norm": 3.6227313882576118, + "learning_rate": 7.423004430751124e-06, + "loss": 0.4748, + "step": 5168 + }, + { + "epoch": 0.35890848493264826, + "grad_norm": 3.6309923782791307, + "learning_rate": 7.4220207406489176e-06, + "loss": 0.421, + "step": 5169 + }, + { + "epoch": 0.35897791973337034, + "grad_norm": 4.557882501971525, + "learning_rate": 7.421036928043643e-06, + "loss": 0.5369, + "step": 5170 + }, + { + "epoch": 0.3590473545340925, + "grad_norm": 4.170451569158213, + "learning_rate": 7.4200529929850596e-06, + "loss": 0.4801, + "step": 5171 + }, + { + "epoch": 0.3591167893348146, + "grad_norm": 4.8303186161802145, + "learning_rate": 7.419068935522935e-06, + "loss": 0.5724, + "step": 5172 + }, + { + "epoch": 0.35918622413553675, + "grad_norm": 3.7128687380257452, + "learning_rate": 7.418084755707039e-06, + "loss": 0.3838, + "step": 5173 + }, + { + "epoch": 0.3592556589362588, + "grad_norm": 3.764641466905656, + "learning_rate": 7.417100453587155e-06, + "loss": 0.3514, + "step": 5174 + }, + { + "epoch": 0.35932509373698096, + "grad_norm": 2.6466446309278227, + "learning_rate": 7.416116029213063e-06, + "loss": 0.2088, + "step": 5175 + }, + { + "epoch": 0.3593945285377031, + "grad_norm": 3.74943102285954, + "learning_rate": 7.415131482634558e-06, + "loss": 0.4627, + "step": 5176 + }, + { + "epoch": 0.35946396333842523, + "grad_norm": 3.971065791394491, + "learning_rate": 7.414146813901434e-06, + "loss": 0.404, + "step": 5177 + }, + { + "epoch": 0.35953339813914736, + "grad_norm": 4.7189982526175225, + "learning_rate": 7.413162023063497e-06, + "loss": 0.585, + "step": 5178 + }, + { + "epoch": 0.35960283293986944, + "grad_norm": 4.464242563749744, + "learning_rate": 7.412177110170555e-06, + "loss": 0.5609, + "step": 5179 + }, + { + "epoch": 0.3596722677405916, + "grad_norm": 4.351045089001082, + "learning_rate": 7.411192075272424e-06, + "loss": 0.5843, + "step": 5180 + }, + { + "epoch": 0.3597417025413137, + "grad_norm": 3.7251552536797847, + "learning_rate": 7.410206918418927e-06, + "loss": 0.4888, + "step": 5181 + }, + { + "epoch": 0.35981113734203585, + "grad_norm": 3.013973264601099, + "learning_rate": 7.409221639659892e-06, + "loss": 0.4285, + "step": 5182 + }, + { + "epoch": 0.3598805721427579, + "grad_norm": 4.599783170174827, + "learning_rate": 7.4082362390451514e-06, + "loss": 0.7125, + "step": 5183 + }, + { + "epoch": 0.35995000694348006, + "grad_norm": 5.373362477637537, + "learning_rate": 7.407250716624548e-06, + "loss": 0.6862, + "step": 5184 + }, + { + "epoch": 0.3600194417442022, + "grad_norm": 4.1047433762547225, + "learning_rate": 7.406265072447927e-06, + "loss": 0.5084, + "step": 5185 + }, + { + "epoch": 0.36008887654492433, + "grad_norm": 3.9460208601691815, + "learning_rate": 7.4052793065651406e-06, + "loss": 0.6067, + "step": 5186 + }, + { + "epoch": 0.36015831134564646, + "grad_norm": 3.811237232718791, + "learning_rate": 7.404293419026049e-06, + "loss": 0.4171, + "step": 5187 + }, + { + "epoch": 0.36022774614636854, + "grad_norm": 2.906746769387508, + "learning_rate": 7.403307409880518e-06, + "loss": 0.4269, + "step": 5188 + }, + { + "epoch": 0.3602971809470907, + "grad_norm": 4.9345261219228, + "learning_rate": 7.402321279178416e-06, + "loss": 0.5333, + "step": 5189 + }, + { + "epoch": 0.3603666157478128, + "grad_norm": 4.195846774321302, + "learning_rate": 7.401335026969624e-06, + "loss": 0.5604, + "step": 5190 + }, + { + "epoch": 0.36043605054853495, + "grad_norm": 3.0865426179268205, + "learning_rate": 7.400348653304022e-06, + "loss": 0.4003, + "step": 5191 + }, + { + "epoch": 0.360505485349257, + "grad_norm": 3.9512045337731916, + "learning_rate": 7.3993621582315026e-06, + "loss": 0.5755, + "step": 5192 + }, + { + "epoch": 0.36057492014997916, + "grad_norm": 4.95496577552601, + "learning_rate": 7.398375541801958e-06, + "loss": 0.6059, + "step": 5193 + }, + { + "epoch": 0.3606443549507013, + "grad_norm": 4.49146802581522, + "learning_rate": 7.397388804065295e-06, + "loss": 0.4936, + "step": 5194 + }, + { + "epoch": 0.36071378975142343, + "grad_norm": 4.262702809089655, + "learning_rate": 7.396401945071418e-06, + "loss": 0.4605, + "step": 5195 + }, + { + "epoch": 0.3607832245521455, + "grad_norm": 2.4272195273363124, + "learning_rate": 7.3954149648702425e-06, + "loss": 0.1331, + "step": 5196 + }, + { + "epoch": 0.36085265935286764, + "grad_norm": 3.875595990027703, + "learning_rate": 7.39442786351169e-06, + "loss": 0.3984, + "step": 5197 + }, + { + "epoch": 0.3609220941535898, + "grad_norm": 3.551575066844826, + "learning_rate": 7.393440641045684e-06, + "loss": 0.5079, + "step": 5198 + }, + { + "epoch": 0.3609915289543119, + "grad_norm": 3.866348054455549, + "learning_rate": 7.392453297522159e-06, + "loss": 0.4711, + "step": 5199 + }, + { + "epoch": 0.36106096375503405, + "grad_norm": 3.3568438154277995, + "learning_rate": 7.391465832991054e-06, + "loss": 0.34, + "step": 5200 + }, + { + "epoch": 0.3611303985557561, + "grad_norm": 3.671695421773663, + "learning_rate": 7.390478247502313e-06, + "loss": 0.3849, + "step": 5201 + }, + { + "epoch": 0.36119983335647826, + "grad_norm": 3.8577267975490765, + "learning_rate": 7.389490541105886e-06, + "loss": 0.5757, + "step": 5202 + }, + { + "epoch": 0.3612692681572004, + "grad_norm": 3.8988334185466287, + "learning_rate": 7.388502713851733e-06, + "loss": 0.4949, + "step": 5203 + }, + { + "epoch": 0.36133870295792253, + "grad_norm": 2.636997471803004, + "learning_rate": 7.387514765789815e-06, + "loss": 0.2375, + "step": 5204 + }, + { + "epoch": 0.3614081377586446, + "grad_norm": 3.869454872048885, + "learning_rate": 7.386526696970101e-06, + "loss": 0.4365, + "step": 5205 + }, + { + "epoch": 0.36147757255936674, + "grad_norm": 4.00359029345579, + "learning_rate": 7.385538507442568e-06, + "loss": 0.4625, + "step": 5206 + }, + { + "epoch": 0.3615470073600889, + "grad_norm": 4.804503710614609, + "learning_rate": 7.3845501972571965e-06, + "loss": 0.8323, + "step": 5207 + }, + { + "epoch": 0.361616442160811, + "grad_norm": 4.409764481915239, + "learning_rate": 7.3835617664639745e-06, + "loss": 0.3822, + "step": 5208 + }, + { + "epoch": 0.36168587696153315, + "grad_norm": 3.09672297085788, + "learning_rate": 7.382573215112896e-06, + "loss": 0.2411, + "step": 5209 + }, + { + "epoch": 0.3617553117622552, + "grad_norm": 3.934619100792288, + "learning_rate": 7.38158454325396e-06, + "loss": 0.5692, + "step": 5210 + }, + { + "epoch": 0.36182474656297736, + "grad_norm": 4.4710745413826825, + "learning_rate": 7.380595750937173e-06, + "loss": 0.7858, + "step": 5211 + }, + { + "epoch": 0.3618941813636995, + "grad_norm": 4.044929621972786, + "learning_rate": 7.379606838212545e-06, + "loss": 0.3569, + "step": 5212 + }, + { + "epoch": 0.36196361616442163, + "grad_norm": 4.043225176635758, + "learning_rate": 7.378617805130099e-06, + "loss": 0.5504, + "step": 5213 + }, + { + "epoch": 0.3620330509651437, + "grad_norm": 3.884783666682311, + "learning_rate": 7.377628651739853e-06, + "loss": 0.2867, + "step": 5214 + }, + { + "epoch": 0.36210248576586584, + "grad_norm": 4.181176039200803, + "learning_rate": 7.376639378091841e-06, + "loss": 0.4164, + "step": 5215 + }, + { + "epoch": 0.362171920566588, + "grad_norm": 3.641751353475488, + "learning_rate": 7.3756499842361e-06, + "loss": 0.5453, + "step": 5216 + }, + { + "epoch": 0.3622413553673101, + "grad_norm": 4.0412756899372875, + "learning_rate": 7.3746604702226696e-06, + "loss": 0.4528, + "step": 5217 + }, + { + "epoch": 0.3623107901680322, + "grad_norm": 2.758086316360966, + "learning_rate": 7.3736708361016e-06, + "loss": 0.2696, + "step": 5218 + }, + { + "epoch": 0.3623802249687543, + "grad_norm": 3.7266699836166013, + "learning_rate": 7.372681081922947e-06, + "loss": 0.5316, + "step": 5219 + }, + { + "epoch": 0.36244965976947646, + "grad_norm": 4.357410868657281, + "learning_rate": 7.371691207736769e-06, + "loss": 0.476, + "step": 5220 + }, + { + "epoch": 0.3625190945701986, + "grad_norm": 3.48191688827045, + "learning_rate": 7.370701213593131e-06, + "loss": 0.3644, + "step": 5221 + }, + { + "epoch": 0.36258852937092073, + "grad_norm": 4.3173703480943955, + "learning_rate": 7.369711099542111e-06, + "loss": 0.5097, + "step": 5222 + }, + { + "epoch": 0.3626579641716428, + "grad_norm": 4.529308761032398, + "learning_rate": 7.368720865633784e-06, + "loss": 0.7696, + "step": 5223 + }, + { + "epoch": 0.36272739897236494, + "grad_norm": 4.746701413286964, + "learning_rate": 7.367730511918236e-06, + "loss": 0.67, + "step": 5224 + }, + { + "epoch": 0.3627968337730871, + "grad_norm": 3.364463320412421, + "learning_rate": 7.366740038445558e-06, + "loss": 0.2047, + "step": 5225 + }, + { + "epoch": 0.3628662685738092, + "grad_norm": 3.446472059365696, + "learning_rate": 7.365749445265848e-06, + "loss": 0.4153, + "step": 5226 + }, + { + "epoch": 0.3629357033745313, + "grad_norm": 3.670606176534461, + "learning_rate": 7.364758732429207e-06, + "loss": 0.4569, + "step": 5227 + }, + { + "epoch": 0.36300513817525343, + "grad_norm": 3.106071035396148, + "learning_rate": 7.363767899985745e-06, + "loss": 0.363, + "step": 5228 + }, + { + "epoch": 0.36307457297597556, + "grad_norm": 3.0759031907010272, + "learning_rate": 7.362776947985579e-06, + "loss": 0.2045, + "step": 5229 + }, + { + "epoch": 0.3631440077766977, + "grad_norm": 2.7909201927851286, + "learning_rate": 7.361785876478827e-06, + "loss": 0.3142, + "step": 5230 + }, + { + "epoch": 0.3632134425774198, + "grad_norm": 3.935041565733543, + "learning_rate": 7.360794685515619e-06, + "loss": 0.5807, + "step": 5231 + }, + { + "epoch": 0.3632828773781419, + "grad_norm": 4.476043050234251, + "learning_rate": 7.359803375146086e-06, + "loss": 0.5882, + "step": 5232 + }, + { + "epoch": 0.36335231217886405, + "grad_norm": 4.3698002466110815, + "learning_rate": 7.35881194542037e-06, + "loss": 0.5222, + "step": 5233 + }, + { + "epoch": 0.3634217469795862, + "grad_norm": 4.014297716606658, + "learning_rate": 7.357820396388613e-06, + "loss": 0.5179, + "step": 5234 + }, + { + "epoch": 0.3634911817803083, + "grad_norm": 4.902661988077576, + "learning_rate": 7.356828728100971e-06, + "loss": 0.5505, + "step": 5235 + }, + { + "epoch": 0.3635606165810304, + "grad_norm": 4.732035972737129, + "learning_rate": 7.355836940607598e-06, + "loss": 0.7167, + "step": 5236 + }, + { + "epoch": 0.36363005138175253, + "grad_norm": 3.8664416142644855, + "learning_rate": 7.354845033958657e-06, + "loss": 0.5711, + "step": 5237 + }, + { + "epoch": 0.36369948618247466, + "grad_norm": 3.1375484152522084, + "learning_rate": 7.35385300820432e-06, + "loss": 0.3431, + "step": 5238 + }, + { + "epoch": 0.3637689209831968, + "grad_norm": 3.870866541431729, + "learning_rate": 7.352860863394762e-06, + "loss": 0.627, + "step": 5239 + }, + { + "epoch": 0.3638383557839189, + "grad_norm": 3.8516168247849816, + "learning_rate": 7.351868599580163e-06, + "loss": 0.3579, + "step": 5240 + }, + { + "epoch": 0.363907790584641, + "grad_norm": 3.607571547776457, + "learning_rate": 7.350876216810711e-06, + "loss": 0.451, + "step": 5241 + }, + { + "epoch": 0.36397722538536315, + "grad_norm": 4.559476636915713, + "learning_rate": 7.349883715136601e-06, + "loss": 0.5555, + "step": 5242 + }, + { + "epoch": 0.3640466601860853, + "grad_norm": 4.121016620171231, + "learning_rate": 7.348891094608031e-06, + "loss": 0.6644, + "step": 5243 + }, + { + "epoch": 0.3641160949868074, + "grad_norm": 3.957438445150098, + "learning_rate": 7.347898355275208e-06, + "loss": 0.4973, + "step": 5244 + }, + { + "epoch": 0.3641855297875295, + "grad_norm": 4.144260264535891, + "learning_rate": 7.346905497188344e-06, + "loss": 0.4929, + "step": 5245 + }, + { + "epoch": 0.36425496458825163, + "grad_norm": 2.842187093455992, + "learning_rate": 7.345912520397655e-06, + "loss": 0.2226, + "step": 5246 + }, + { + "epoch": 0.36432439938897376, + "grad_norm": 3.758283283670636, + "learning_rate": 7.344919424953365e-06, + "loss": 0.4126, + "step": 5247 + }, + { + "epoch": 0.3643938341896959, + "grad_norm": 3.947415827646632, + "learning_rate": 7.343926210905703e-06, + "loss": 0.6244, + "step": 5248 + }, + { + "epoch": 0.364463268990418, + "grad_norm": 4.62861053883523, + "learning_rate": 7.342932878304907e-06, + "loss": 0.3936, + "step": 5249 + }, + { + "epoch": 0.3645327037911401, + "grad_norm": 3.8111531731136408, + "learning_rate": 7.341939427201216e-06, + "loss": 0.5124, + "step": 5250 + }, + { + "epoch": 0.36460213859186225, + "grad_norm": 3.0192700686366267, + "learning_rate": 7.34094585764488e-06, + "loss": 0.4593, + "step": 5251 + }, + { + "epoch": 0.3646715733925844, + "grad_norm": 3.5492621108719176, + "learning_rate": 7.3399521696861505e-06, + "loss": 0.3993, + "step": 5252 + }, + { + "epoch": 0.36474100819330646, + "grad_norm": 3.5282137789283623, + "learning_rate": 7.338958363375289e-06, + "loss": 0.3456, + "step": 5253 + }, + { + "epoch": 0.3648104429940286, + "grad_norm": 4.795810296475828, + "learning_rate": 7.337964438762561e-06, + "loss": 0.6379, + "step": 5254 + }, + { + "epoch": 0.36487987779475073, + "grad_norm": 3.895704498200813, + "learning_rate": 7.336970395898235e-06, + "loss": 0.5505, + "step": 5255 + }, + { + "epoch": 0.36494931259547286, + "grad_norm": 3.7751176813765284, + "learning_rate": 7.3359762348325924e-06, + "loss": 0.4449, + "step": 5256 + }, + { + "epoch": 0.365018747396195, + "grad_norm": 3.0246909841024086, + "learning_rate": 7.334981955615914e-06, + "loss": 0.2935, + "step": 5257 + }, + { + "epoch": 0.3650881821969171, + "grad_norm": 4.258508689344232, + "learning_rate": 7.333987558298492e-06, + "loss": 0.536, + "step": 5258 + }, + { + "epoch": 0.3651576169976392, + "grad_norm": 3.4538149863428282, + "learning_rate": 7.33299304293062e-06, + "loss": 0.4596, + "step": 5259 + }, + { + "epoch": 0.36522705179836135, + "grad_norm": 3.8057677283709945, + "learning_rate": 7.331998409562598e-06, + "loss": 0.5072, + "step": 5260 + }, + { + "epoch": 0.3652964865990835, + "grad_norm": 3.391090788268474, + "learning_rate": 7.331003658244738e-06, + "loss": 0.3337, + "step": 5261 + }, + { + "epoch": 0.36536592139980556, + "grad_norm": 3.7674882006690207, + "learning_rate": 7.3300087890273496e-06, + "loss": 0.5325, + "step": 5262 + }, + { + "epoch": 0.3654353562005277, + "grad_norm": 4.780023535577388, + "learning_rate": 7.329013801960752e-06, + "loss": 0.7462, + "step": 5263 + }, + { + "epoch": 0.36550479100124983, + "grad_norm": 3.2825605342517714, + "learning_rate": 7.328018697095274e-06, + "loss": 0.3117, + "step": 5264 + }, + { + "epoch": 0.36557422580197196, + "grad_norm": 4.230946027704936, + "learning_rate": 7.327023474481243e-06, + "loss": 0.4621, + "step": 5265 + }, + { + "epoch": 0.36564366060269404, + "grad_norm": 2.9427390772138278, + "learning_rate": 7.326028134168998e-06, + "loss": 0.2859, + "step": 5266 + }, + { + "epoch": 0.3657130954034162, + "grad_norm": 3.6615293487189127, + "learning_rate": 7.325032676208882e-06, + "loss": 0.7188, + "step": 5267 + }, + { + "epoch": 0.3657825302041383, + "grad_norm": 4.537087291950671, + "learning_rate": 7.324037100651245e-06, + "loss": 0.6963, + "step": 5268 + }, + { + "epoch": 0.36585196500486045, + "grad_norm": 4.155695048588974, + "learning_rate": 7.32304140754644e-06, + "loss": 0.6195, + "step": 5269 + }, + { + "epoch": 0.3659213998055826, + "grad_norm": 4.023666585952381, + "learning_rate": 7.3220455969448315e-06, + "loss": 0.2538, + "step": 5270 + }, + { + "epoch": 0.36599083460630466, + "grad_norm": 3.8526617838127613, + "learning_rate": 7.321049668896783e-06, + "loss": 0.5782, + "step": 5271 + }, + { + "epoch": 0.3660602694070268, + "grad_norm": 2.9281759798026505, + "learning_rate": 7.320053623452668e-06, + "loss": 0.361, + "step": 5272 + }, + { + "epoch": 0.36612970420774893, + "grad_norm": 3.5749795127803807, + "learning_rate": 7.319057460662866e-06, + "loss": 0.2888, + "step": 5273 + }, + { + "epoch": 0.36619913900847106, + "grad_norm": 5.041645832718997, + "learning_rate": 7.318061180577763e-06, + "loss": 0.5884, + "step": 5274 + }, + { + "epoch": 0.36626857380919314, + "grad_norm": 3.408888256001804, + "learning_rate": 7.317064783247748e-06, + "loss": 0.2853, + "step": 5275 + }, + { + "epoch": 0.3663380086099153, + "grad_norm": 4.4212115541538815, + "learning_rate": 7.3160682687232186e-06, + "loss": 0.4673, + "step": 5276 + }, + { + "epoch": 0.3664074434106374, + "grad_norm": 4.353473922522457, + "learning_rate": 7.315071637054576e-06, + "loss": 0.5888, + "step": 5277 + }, + { + "epoch": 0.36647687821135955, + "grad_norm": 5.260715173062866, + "learning_rate": 7.314074888292231e-06, + "loss": 0.8462, + "step": 5278 + }, + { + "epoch": 0.3665463130120817, + "grad_norm": 2.8620908715304836, + "learning_rate": 7.313078022486596e-06, + "loss": 0.2493, + "step": 5279 + }, + { + "epoch": 0.36661574781280376, + "grad_norm": 3.759821781758709, + "learning_rate": 7.3120810396880925e-06, + "loss": 0.5773, + "step": 5280 + }, + { + "epoch": 0.3666851826135259, + "grad_norm": 3.573085953884359, + "learning_rate": 7.311083939947146e-06, + "loss": 0.4299, + "step": 5281 + }, + { + "epoch": 0.36675461741424803, + "grad_norm": 3.0366561594339925, + "learning_rate": 7.310086723314187e-06, + "loss": 0.3328, + "step": 5282 + }, + { + "epoch": 0.36682405221497016, + "grad_norm": 4.588819409680726, + "learning_rate": 7.309089389839658e-06, + "loss": 0.6397, + "step": 5283 + }, + { + "epoch": 0.36689348701569224, + "grad_norm": 3.316497831756882, + "learning_rate": 7.308091939574e-06, + "loss": 0.3728, + "step": 5284 + }, + { + "epoch": 0.3669629218164144, + "grad_norm": 4.253174149251256, + "learning_rate": 7.307094372567665e-06, + "loss": 0.5961, + "step": 5285 + }, + { + "epoch": 0.3670323566171365, + "grad_norm": 5.918741004012838, + "learning_rate": 7.306096688871105e-06, + "loss": 0.4941, + "step": 5286 + }, + { + "epoch": 0.36710179141785865, + "grad_norm": 2.6927485888703426, + "learning_rate": 7.305098888534785e-06, + "loss": 0.2656, + "step": 5287 + }, + { + "epoch": 0.3671712262185807, + "grad_norm": 3.4814665984798094, + "learning_rate": 7.304100971609171e-06, + "loss": 0.3898, + "step": 5288 + }, + { + "epoch": 0.36724066101930286, + "grad_norm": 6.339991452732976, + "learning_rate": 7.3031029381447375e-06, + "loss": 0.4089, + "step": 5289 + }, + { + "epoch": 0.367310095820025, + "grad_norm": 3.9694920096492936, + "learning_rate": 7.302104788191964e-06, + "loss": 0.6052, + "step": 5290 + }, + { + "epoch": 0.36737953062074713, + "grad_norm": 4.767434801543367, + "learning_rate": 7.3011065218013335e-06, + "loss": 0.7248, + "step": 5291 + }, + { + "epoch": 0.36744896542146926, + "grad_norm": 3.148374933859908, + "learning_rate": 7.300108139023341e-06, + "loss": 0.1739, + "step": 5292 + }, + { + "epoch": 0.36751840022219134, + "grad_norm": 3.7982139615036394, + "learning_rate": 7.299109639908482e-06, + "loss": 0.4263, + "step": 5293 + }, + { + "epoch": 0.3675878350229135, + "grad_norm": 4.081060341811651, + "learning_rate": 7.298111024507257e-06, + "loss": 0.6496, + "step": 5294 + }, + { + "epoch": 0.3676572698236356, + "grad_norm": 4.000938839097152, + "learning_rate": 7.297112292870177e-06, + "loss": 0.4879, + "step": 5295 + }, + { + "epoch": 0.36772670462435775, + "grad_norm": 2.483007804669054, + "learning_rate": 7.296113445047758e-06, + "loss": 0.2168, + "step": 5296 + }, + { + "epoch": 0.3677961394250798, + "grad_norm": 4.982133218500219, + "learning_rate": 7.295114481090519e-06, + "loss": 0.8941, + "step": 5297 + }, + { + "epoch": 0.36786557422580196, + "grad_norm": 3.505457970585262, + "learning_rate": 7.294115401048985e-06, + "loss": 0.5145, + "step": 5298 + }, + { + "epoch": 0.3679350090265241, + "grad_norm": 4.122571104457174, + "learning_rate": 7.293116204973691e-06, + "loss": 0.4536, + "step": 5299 + }, + { + "epoch": 0.36800444382724623, + "grad_norm": 6.8168397526491535, + "learning_rate": 7.292116892915175e-06, + "loss": 0.442, + "step": 5300 + }, + { + "epoch": 0.36807387862796836, + "grad_norm": 4.371468614081541, + "learning_rate": 7.29111746492398e-06, + "loss": 0.7694, + "step": 5301 + }, + { + "epoch": 0.36814331342869044, + "grad_norm": 3.5083510698018165, + "learning_rate": 7.290117921050657e-06, + "loss": 0.3829, + "step": 5302 + }, + { + "epoch": 0.3682127482294126, + "grad_norm": 3.2086290955614385, + "learning_rate": 7.289118261345759e-06, + "loss": 0.4295, + "step": 5303 + }, + { + "epoch": 0.3682821830301347, + "grad_norm": 4.1133573463170405, + "learning_rate": 7.288118485859851e-06, + "loss": 0.2946, + "step": 5304 + }, + { + "epoch": 0.36835161783085685, + "grad_norm": 4.831428665242624, + "learning_rate": 7.287118594643497e-06, + "loss": 0.4511, + "step": 5305 + }, + { + "epoch": 0.3684210526315789, + "grad_norm": 3.9774320312836013, + "learning_rate": 7.286118587747275e-06, + "loss": 0.3544, + "step": 5306 + }, + { + "epoch": 0.36849048743230106, + "grad_norm": 3.949426377053619, + "learning_rate": 7.285118465221761e-06, + "loss": 0.4384, + "step": 5307 + }, + { + "epoch": 0.3685599222330232, + "grad_norm": 2.5959500956444965, + "learning_rate": 7.284118227117541e-06, + "loss": 0.2793, + "step": 5308 + }, + { + "epoch": 0.36862935703374533, + "grad_norm": 3.5261209281636376, + "learning_rate": 7.283117873485206e-06, + "loss": 0.3596, + "step": 5309 + }, + { + "epoch": 0.3686987918344674, + "grad_norm": 4.22786861052139, + "learning_rate": 7.282117404375352e-06, + "loss": 0.4395, + "step": 5310 + }, + { + "epoch": 0.36876822663518954, + "grad_norm": 4.4298154480332546, + "learning_rate": 7.281116819838582e-06, + "loss": 0.5589, + "step": 5311 + }, + { + "epoch": 0.3688376614359117, + "grad_norm": 3.3456104838391303, + "learning_rate": 7.280116119925504e-06, + "loss": 0.3398, + "step": 5312 + }, + { + "epoch": 0.3689070962366338, + "grad_norm": 2.6710461073306893, + "learning_rate": 7.2791153046867344e-06, + "loss": 0.14, + "step": 5313 + }, + { + "epoch": 0.36897653103735595, + "grad_norm": 4.170125909785202, + "learning_rate": 7.27811437417289e-06, + "loss": 0.4526, + "step": 5314 + }, + { + "epoch": 0.369045965838078, + "grad_norm": 4.355060077280559, + "learning_rate": 7.2771133284345984e-06, + "loss": 0.4518, + "step": 5315 + }, + { + "epoch": 0.36911540063880016, + "grad_norm": 3.504673707567617, + "learning_rate": 7.2761121675224926e-06, + "loss": 0.4897, + "step": 5316 + }, + { + "epoch": 0.3691848354395223, + "grad_norm": 4.272493027386857, + "learning_rate": 7.275110891487208e-06, + "loss": 0.3938, + "step": 5317 + }, + { + "epoch": 0.36925427024024443, + "grad_norm": 3.885785869257253, + "learning_rate": 7.274109500379389e-06, + "loss": 0.2657, + "step": 5318 + }, + { + "epoch": 0.3693237050409665, + "grad_norm": 3.818801198535173, + "learning_rate": 7.2731079942496865e-06, + "loss": 0.2898, + "step": 5319 + }, + { + "epoch": 0.36939313984168864, + "grad_norm": 4.51990971306465, + "learning_rate": 7.272106373148753e-06, + "loss": 0.609, + "step": 5320 + }, + { + "epoch": 0.3694625746424108, + "grad_norm": 3.212455743026587, + "learning_rate": 7.271104637127248e-06, + "loss": 0.2053, + "step": 5321 + }, + { + "epoch": 0.3695320094431329, + "grad_norm": 3.6747098250465444, + "learning_rate": 7.270102786235842e-06, + "loss": 0.5695, + "step": 5322 + }, + { + "epoch": 0.369601444243855, + "grad_norm": 4.8643152895152095, + "learning_rate": 7.269100820525207e-06, + "loss": 0.6991, + "step": 5323 + }, + { + "epoch": 0.3696708790445771, + "grad_norm": 3.3344190435829195, + "learning_rate": 7.268098740046019e-06, + "loss": 0.2979, + "step": 5324 + }, + { + "epoch": 0.36974031384529926, + "grad_norm": 3.9345070348116917, + "learning_rate": 7.267096544848964e-06, + "loss": 0.4614, + "step": 5325 + }, + { + "epoch": 0.3698097486460214, + "grad_norm": 4.397042030236148, + "learning_rate": 7.26609423498473e-06, + "loss": 0.2951, + "step": 5326 + }, + { + "epoch": 0.36987918344674353, + "grad_norm": 3.619514368007099, + "learning_rate": 7.265091810504015e-06, + "loss": 0.4345, + "step": 5327 + }, + { + "epoch": 0.3699486182474656, + "grad_norm": 3.8687171577500457, + "learning_rate": 7.264089271457517e-06, + "loss": 0.4041, + "step": 5328 + }, + { + "epoch": 0.37001805304818775, + "grad_norm": 3.5983570895417527, + "learning_rate": 7.263086617895949e-06, + "loss": 0.2985, + "step": 5329 + }, + { + "epoch": 0.3700874878489099, + "grad_norm": 3.5343622140015527, + "learning_rate": 7.2620838498700185e-06, + "loss": 0.3636, + "step": 5330 + }, + { + "epoch": 0.370156922649632, + "grad_norm": 3.0571042132074706, + "learning_rate": 7.261080967430447e-06, + "loss": 0.4412, + "step": 5331 + }, + { + "epoch": 0.3702263574503541, + "grad_norm": 4.100132079238135, + "learning_rate": 7.2600779706279585e-06, + "loss": 0.7203, + "step": 5332 + }, + { + "epoch": 0.37029579225107623, + "grad_norm": 3.2362760911462245, + "learning_rate": 7.259074859513284e-06, + "loss": 0.401, + "step": 5333 + }, + { + "epoch": 0.37036522705179836, + "grad_norm": 3.421911420071634, + "learning_rate": 7.2580716341371594e-06, + "loss": 0.4264, + "step": 5334 + }, + { + "epoch": 0.3704346618525205, + "grad_norm": 5.187443005402242, + "learning_rate": 7.257068294550328e-06, + "loss": 0.5395, + "step": 5335 + }, + { + "epoch": 0.37050409665324263, + "grad_norm": 5.114298487349007, + "learning_rate": 7.256064840803535e-06, + "loss": 0.4778, + "step": 5336 + }, + { + "epoch": 0.3705735314539647, + "grad_norm": 4.08902035128331, + "learning_rate": 7.255061272947535e-06, + "loss": 0.4919, + "step": 5337 + }, + { + "epoch": 0.37064296625468685, + "grad_norm": 4.266926616486269, + "learning_rate": 7.254057591033088e-06, + "loss": 0.4322, + "step": 5338 + }, + { + "epoch": 0.370712401055409, + "grad_norm": 3.5956776393265417, + "learning_rate": 7.253053795110959e-06, + "loss": 0.3342, + "step": 5339 + }, + { + "epoch": 0.3707818358561311, + "grad_norm": 4.127598444649088, + "learning_rate": 7.252049885231919e-06, + "loss": 0.5547, + "step": 5340 + }, + { + "epoch": 0.3708512706568532, + "grad_norm": 4.0647867038519925, + "learning_rate": 7.251045861446741e-06, + "loss": 0.5805, + "step": 5341 + }, + { + "epoch": 0.37092070545757533, + "grad_norm": 3.2870579504611563, + "learning_rate": 7.250041723806215e-06, + "loss": 0.3859, + "step": 5342 + }, + { + "epoch": 0.37099014025829746, + "grad_norm": 4.002343069734852, + "learning_rate": 7.249037472361122e-06, + "loss": 0.5498, + "step": 5343 + }, + { + "epoch": 0.3710595750590196, + "grad_norm": 5.989080017540409, + "learning_rate": 7.24803310716226e-06, + "loss": 0.4263, + "step": 5344 + }, + { + "epoch": 0.3711290098597417, + "grad_norm": 3.336577746460094, + "learning_rate": 7.247028628260427e-06, + "loss": 0.3076, + "step": 5345 + }, + { + "epoch": 0.3711984446604638, + "grad_norm": 3.782180950500412, + "learning_rate": 7.246024035706427e-06, + "loss": 0.6037, + "step": 5346 + }, + { + "epoch": 0.37126787946118595, + "grad_norm": 3.523336924712128, + "learning_rate": 7.245019329551075e-06, + "loss": 0.5285, + "step": 5347 + }, + { + "epoch": 0.3713373142619081, + "grad_norm": 2.909187582974306, + "learning_rate": 7.244014509845185e-06, + "loss": 0.3129, + "step": 5348 + }, + { + "epoch": 0.3714067490626302, + "grad_norm": 3.9257869886414127, + "learning_rate": 7.24300957663958e-06, + "loss": 0.4827, + "step": 5349 + }, + { + "epoch": 0.3714761838633523, + "grad_norm": 4.759376921279855, + "learning_rate": 7.242004529985088e-06, + "loss": 0.7958, + "step": 5350 + }, + { + "epoch": 0.37154561866407443, + "grad_norm": 3.0704874727802753, + "learning_rate": 7.2409993699325455e-06, + "loss": 0.3401, + "step": 5351 + }, + { + "epoch": 0.37161505346479656, + "grad_norm": 3.4863473330233274, + "learning_rate": 7.239994096532791e-06, + "loss": 0.3516, + "step": 5352 + }, + { + "epoch": 0.3716844882655187, + "grad_norm": 3.859641586497657, + "learning_rate": 7.238988709836667e-06, + "loss": 0.5821, + "step": 5353 + }, + { + "epoch": 0.3717539230662408, + "grad_norm": 3.750376212473087, + "learning_rate": 7.23798320989503e-06, + "loss": 0.5067, + "step": 5354 + }, + { + "epoch": 0.3718233578669629, + "grad_norm": 2.6504672904390043, + "learning_rate": 7.236977596758734e-06, + "loss": 0.2996, + "step": 5355 + }, + { + "epoch": 0.37189279266768505, + "grad_norm": 4.798207729253736, + "learning_rate": 7.235971870478643e-06, + "loss": 0.5289, + "step": 5356 + }, + { + "epoch": 0.3719622274684072, + "grad_norm": 3.3210465521389483, + "learning_rate": 7.234966031105624e-06, + "loss": 0.3565, + "step": 5357 + }, + { + "epoch": 0.3720316622691293, + "grad_norm": 5.1063542531716575, + "learning_rate": 7.233960078690554e-06, + "loss": 0.6655, + "step": 5358 + }, + { + "epoch": 0.3721010970698514, + "grad_norm": 3.8372303135424457, + "learning_rate": 7.232954013284309e-06, + "loss": 0.5025, + "step": 5359 + }, + { + "epoch": 0.37217053187057353, + "grad_norm": 3.87013071111723, + "learning_rate": 7.231947834937778e-06, + "loss": 0.5926, + "step": 5360 + }, + { + "epoch": 0.37223996667129566, + "grad_norm": 4.338086203255544, + "learning_rate": 7.230941543701852e-06, + "loss": 0.2548, + "step": 5361 + }, + { + "epoch": 0.3723094014720178, + "grad_norm": 4.280938990520116, + "learning_rate": 7.229935139627425e-06, + "loss": 0.3738, + "step": 5362 + }, + { + "epoch": 0.3723788362727399, + "grad_norm": 3.779119701968508, + "learning_rate": 7.228928622765403e-06, + "loss": 0.5819, + "step": 5363 + }, + { + "epoch": 0.372448271073462, + "grad_norm": 3.6944573076621623, + "learning_rate": 7.227921993166693e-06, + "loss": 0.4794, + "step": 5364 + }, + { + "epoch": 0.37251770587418415, + "grad_norm": 3.5566956861035712, + "learning_rate": 7.2269152508822105e-06, + "loss": 0.3298, + "step": 5365 + }, + { + "epoch": 0.3725871406749063, + "grad_norm": 4.043473612760919, + "learning_rate": 7.225908395962874e-06, + "loss": 0.636, + "step": 5366 + }, + { + "epoch": 0.37265657547562836, + "grad_norm": 3.005836441072363, + "learning_rate": 7.22490142845961e-06, + "loss": 0.3953, + "step": 5367 + }, + { + "epoch": 0.3727260102763505, + "grad_norm": 3.815252297419189, + "learning_rate": 7.223894348423348e-06, + "loss": 0.3678, + "step": 5368 + }, + { + "epoch": 0.37279544507707263, + "grad_norm": 9.738091458011734, + "learning_rate": 7.2228871559050285e-06, + "loss": 0.5951, + "step": 5369 + }, + { + "epoch": 0.37286487987779476, + "grad_norm": 3.375737299053214, + "learning_rate": 7.221879850955591e-06, + "loss": 0.4138, + "step": 5370 + }, + { + "epoch": 0.3729343146785169, + "grad_norm": 4.526199130311125, + "learning_rate": 7.220872433625985e-06, + "loss": 0.5608, + "step": 5371 + }, + { + "epoch": 0.373003749479239, + "grad_norm": 3.4127951358586457, + "learning_rate": 7.219864903967165e-06, + "loss": 0.3672, + "step": 5372 + }, + { + "epoch": 0.3730731842799611, + "grad_norm": 3.9487718542003947, + "learning_rate": 7.218857262030088e-06, + "loss": 0.5505, + "step": 5373 + }, + { + "epoch": 0.37314261908068325, + "grad_norm": 4.7702697981777025, + "learning_rate": 7.217849507865724e-06, + "loss": 0.8822, + "step": 5374 + }, + { + "epoch": 0.3732120538814054, + "grad_norm": 4.007945215007396, + "learning_rate": 7.21684164152504e-06, + "loss": 0.6258, + "step": 5375 + }, + { + "epoch": 0.37328148868212746, + "grad_norm": 2.2098324262295783, + "learning_rate": 7.215833663059014e-06, + "loss": 0.1477, + "step": 5376 + }, + { + "epoch": 0.3733509234828496, + "grad_norm": 3.786374816914992, + "learning_rate": 7.21482557251863e-06, + "loss": 0.6749, + "step": 5377 + }, + { + "epoch": 0.37342035828357173, + "grad_norm": 3.0939635771475724, + "learning_rate": 7.213817369954874e-06, + "loss": 0.3183, + "step": 5378 + }, + { + "epoch": 0.37348979308429386, + "grad_norm": 3.344641634130665, + "learning_rate": 7.212809055418741e-06, + "loss": 0.5366, + "step": 5379 + }, + { + "epoch": 0.37355922788501594, + "grad_norm": 4.892370232721007, + "learning_rate": 7.211800628961229e-06, + "loss": 0.6668, + "step": 5380 + }, + { + "epoch": 0.3736286626857381, + "grad_norm": 3.0016041194240533, + "learning_rate": 7.210792090633346e-06, + "loss": 0.4618, + "step": 5381 + }, + { + "epoch": 0.3736980974864602, + "grad_norm": 2.750773129489912, + "learning_rate": 7.209783440486099e-06, + "loss": 0.2897, + "step": 5382 + }, + { + "epoch": 0.37376753228718235, + "grad_norm": 3.6787301172973454, + "learning_rate": 7.208774678570507e-06, + "loss": 0.4531, + "step": 5383 + }, + { + "epoch": 0.3738369670879045, + "grad_norm": 36.762015953860036, + "learning_rate": 7.20776580493759e-06, + "loss": 0.3385, + "step": 5384 + }, + { + "epoch": 0.37390640188862656, + "grad_norm": 4.241586539949382, + "learning_rate": 7.206756819638379e-06, + "loss": 0.5358, + "step": 5385 + }, + { + "epoch": 0.3739758366893487, + "grad_norm": 3.8746411551401603, + "learning_rate": 7.205747722723903e-06, + "loss": 0.6325, + "step": 5386 + }, + { + "epoch": 0.37404527149007083, + "grad_norm": 3.577317496582448, + "learning_rate": 7.204738514245205e-06, + "loss": 0.3492, + "step": 5387 + }, + { + "epoch": 0.37411470629079296, + "grad_norm": 4.033481043176407, + "learning_rate": 7.203729194253326e-06, + "loss": 0.8111, + "step": 5388 + }, + { + "epoch": 0.37418414109151504, + "grad_norm": 2.464725638679876, + "learning_rate": 7.202719762799317e-06, + "loss": 0.2851, + "step": 5389 + }, + { + "epoch": 0.3742535758922372, + "grad_norm": 3.8253699518459614, + "learning_rate": 7.201710219934238e-06, + "loss": 0.5091, + "step": 5390 + }, + { + "epoch": 0.3743230106929593, + "grad_norm": 4.028008669758356, + "learning_rate": 7.200700565709146e-06, + "loss": 0.5012, + "step": 5391 + }, + { + "epoch": 0.37439244549368145, + "grad_norm": 3.5943057061521633, + "learning_rate": 7.19969080017511e-06, + "loss": 0.4352, + "step": 5392 + }, + { + "epoch": 0.3744618802944036, + "grad_norm": 4.7692909311279195, + "learning_rate": 7.1986809233832025e-06, + "loss": 0.7374, + "step": 5393 + }, + { + "epoch": 0.37453131509512566, + "grad_norm": 4.040817038949215, + "learning_rate": 7.197670935384501e-06, + "loss": 0.5026, + "step": 5394 + }, + { + "epoch": 0.3746007498958478, + "grad_norm": 4.471130086693506, + "learning_rate": 7.196660836230091e-06, + "loss": 0.3923, + "step": 5395 + }, + { + "epoch": 0.37467018469656993, + "grad_norm": 4.589928742971696, + "learning_rate": 7.195650625971061e-06, + "loss": 0.5248, + "step": 5396 + }, + { + "epoch": 0.37473961949729206, + "grad_norm": 3.773999316956789, + "learning_rate": 7.194640304658509e-06, + "loss": 0.3507, + "step": 5397 + }, + { + "epoch": 0.37480905429801414, + "grad_norm": 4.510569538960003, + "learning_rate": 7.193629872343532e-06, + "loss": 0.4856, + "step": 5398 + }, + { + "epoch": 0.3748784890987363, + "grad_norm": 4.080387751371503, + "learning_rate": 7.192619329077238e-06, + "loss": 0.5011, + "step": 5399 + }, + { + "epoch": 0.3749479238994584, + "grad_norm": 3.6624930053487477, + "learning_rate": 7.191608674910741e-06, + "loss": 0.6582, + "step": 5400 + }, + { + "epoch": 0.37501735870018055, + "grad_norm": 48.04198102537829, + "learning_rate": 7.1905979098951564e-06, + "loss": 0.4876, + "step": 5401 + }, + { + "epoch": 0.3750867935009026, + "grad_norm": 4.6597997271489335, + "learning_rate": 7.189587034081607e-06, + "loss": 0.2864, + "step": 5402 + }, + { + "epoch": 0.37515622830162476, + "grad_norm": 4.146369748773861, + "learning_rate": 7.188576047521225e-06, + "loss": 0.4768, + "step": 5403 + }, + { + "epoch": 0.3752256631023469, + "grad_norm": 5.027219434348952, + "learning_rate": 7.187564950265142e-06, + "loss": 0.4588, + "step": 5404 + }, + { + "epoch": 0.37529509790306903, + "grad_norm": 2.904670121686568, + "learning_rate": 7.1865537423644995e-06, + "loss": 0.3735, + "step": 5405 + }, + { + "epoch": 0.37536453270379116, + "grad_norm": 3.3301729604548296, + "learning_rate": 7.185542423870444e-06, + "loss": 0.2556, + "step": 5406 + }, + { + "epoch": 0.37543396750451324, + "grad_norm": 3.14772773265129, + "learning_rate": 7.184530994834125e-06, + "loss": 0.3296, + "step": 5407 + }, + { + "epoch": 0.3755034023052354, + "grad_norm": 3.3737242326943875, + "learning_rate": 7.183519455306699e-06, + "loss": 0.3524, + "step": 5408 + }, + { + "epoch": 0.3755728371059575, + "grad_norm": 3.1365187846686298, + "learning_rate": 7.182507805339331e-06, + "loss": 0.2187, + "step": 5409 + }, + { + "epoch": 0.37564227190667965, + "grad_norm": 4.533940165369903, + "learning_rate": 7.181496044983189e-06, + "loss": 0.697, + "step": 5410 + }, + { + "epoch": 0.3757117067074017, + "grad_norm": 4.985615856094159, + "learning_rate": 7.1804841742894435e-06, + "loss": 0.2645, + "step": 5411 + }, + { + "epoch": 0.37578114150812386, + "grad_norm": 2.856238086371595, + "learning_rate": 7.179472193309276e-06, + "loss": 0.2854, + "step": 5412 + }, + { + "epoch": 0.375850576308846, + "grad_norm": 3.9332383109636284, + "learning_rate": 7.178460102093873e-06, + "loss": 0.5405, + "step": 5413 + }, + { + "epoch": 0.37592001110956813, + "grad_norm": 3.3940188691844666, + "learning_rate": 7.177447900694421e-06, + "loss": 0.326, + "step": 5414 + }, + { + "epoch": 0.3759894459102902, + "grad_norm": 4.015251333125671, + "learning_rate": 7.176435589162119e-06, + "loss": 0.4991, + "step": 5415 + }, + { + "epoch": 0.37605888071101234, + "grad_norm": 3.134419853083636, + "learning_rate": 7.175423167548168e-06, + "loss": 0.3621, + "step": 5416 + }, + { + "epoch": 0.3761283155117345, + "grad_norm": 2.997903168259594, + "learning_rate": 7.174410635903776e-06, + "loss": 0.3495, + "step": 5417 + }, + { + "epoch": 0.3761977503124566, + "grad_norm": 4.222126231618553, + "learning_rate": 7.173397994280153e-06, + "loss": 0.6791, + "step": 5418 + }, + { + "epoch": 0.37626718511317875, + "grad_norm": 4.953936237039759, + "learning_rate": 7.172385242728519e-06, + "loss": 0.7217, + "step": 5419 + }, + { + "epoch": 0.3763366199139008, + "grad_norm": 4.25229243122479, + "learning_rate": 7.171372381300098e-06, + "loss": 0.5345, + "step": 5420 + }, + { + "epoch": 0.37640605471462296, + "grad_norm": 3.6334790829745294, + "learning_rate": 7.1703594100461175e-06, + "loss": 0.3446, + "step": 5421 + }, + { + "epoch": 0.3764754895153451, + "grad_norm": 4.820496968330673, + "learning_rate": 7.169346329017816e-06, + "loss": 0.6859, + "step": 5422 + }, + { + "epoch": 0.37654492431606723, + "grad_norm": 3.2716435536380457, + "learning_rate": 7.168333138266431e-06, + "loss": 0.2991, + "step": 5423 + }, + { + "epoch": 0.3766143591167893, + "grad_norm": 5.7735154924825425, + "learning_rate": 7.167319837843209e-06, + "loss": 0.3938, + "step": 5424 + }, + { + "epoch": 0.37668379391751144, + "grad_norm": 4.281871000185905, + "learning_rate": 7.1663064277994035e-06, + "loss": 0.3823, + "step": 5425 + }, + { + "epoch": 0.3767532287182336, + "grad_norm": 3.9901090798417784, + "learning_rate": 7.16529290818627e-06, + "loss": 0.48, + "step": 5426 + }, + { + "epoch": 0.3768226635189557, + "grad_norm": 2.8722216748194054, + "learning_rate": 7.164279279055071e-06, + "loss": 0.292, + "step": 5427 + }, + { + "epoch": 0.37689209831967785, + "grad_norm": 4.298053844479467, + "learning_rate": 7.163265540457074e-06, + "loss": 0.6027, + "step": 5428 + }, + { + "epoch": 0.3769615331203999, + "grad_norm": 3.6531964034251043, + "learning_rate": 7.162251692443556e-06, + "loss": 0.3515, + "step": 5429 + }, + { + "epoch": 0.37703096792112206, + "grad_norm": 3.601747961125408, + "learning_rate": 7.161237735065791e-06, + "loss": 0.3833, + "step": 5430 + }, + { + "epoch": 0.3771004027218442, + "grad_norm": 3.620878583847293, + "learning_rate": 7.16022366837507e-06, + "loss": 0.2218, + "step": 5431 + }, + { + "epoch": 0.37716983752256633, + "grad_norm": 3.7121192672800842, + "learning_rate": 7.15920949242268e-06, + "loss": 0.3214, + "step": 5432 + }, + { + "epoch": 0.3772392723232884, + "grad_norm": 3.3064207946155495, + "learning_rate": 7.158195207259916e-06, + "loss": 0.4449, + "step": 5433 + }, + { + "epoch": 0.37730870712401055, + "grad_norm": 3.6858261543834967, + "learning_rate": 7.157180812938081e-06, + "loss": 0.6709, + "step": 5434 + }, + { + "epoch": 0.3773781419247327, + "grad_norm": 5.675707380817739, + "learning_rate": 7.156166309508482e-06, + "loss": 1.1867, + "step": 5435 + }, + { + "epoch": 0.3774475767254548, + "grad_norm": 3.89372978194419, + "learning_rate": 7.155151697022431e-06, + "loss": 0.3408, + "step": 5436 + }, + { + "epoch": 0.3775170115261769, + "grad_norm": 3.1947594731323297, + "learning_rate": 7.154136975531244e-06, + "loss": 0.2899, + "step": 5437 + }, + { + "epoch": 0.37758644632689903, + "grad_norm": 3.2902161588855328, + "learning_rate": 7.153122145086248e-06, + "loss": 0.3016, + "step": 5438 + }, + { + "epoch": 0.37765588112762116, + "grad_norm": 4.811639373918064, + "learning_rate": 7.15210720573877e-06, + "loss": 0.5154, + "step": 5439 + }, + { + "epoch": 0.3777253159283433, + "grad_norm": 3.8598214055407736, + "learning_rate": 7.151092157540145e-06, + "loss": 0.6604, + "step": 5440 + }, + { + "epoch": 0.37779475072906543, + "grad_norm": 3.123640282521276, + "learning_rate": 7.150077000541713e-06, + "loss": 0.4385, + "step": 5441 + }, + { + "epoch": 0.3778641855297875, + "grad_norm": 3.0631929095056063, + "learning_rate": 7.149061734794819e-06, + "loss": 0.323, + "step": 5442 + }, + { + "epoch": 0.37793362033050965, + "grad_norm": 3.3844866492038723, + "learning_rate": 7.148046360350814e-06, + "loss": 0.4488, + "step": 5443 + }, + { + "epoch": 0.3780030551312318, + "grad_norm": 5.284883612692944, + "learning_rate": 7.147030877261054e-06, + "loss": 0.5551, + "step": 5444 + }, + { + "epoch": 0.3780724899319539, + "grad_norm": 2.8763180907193617, + "learning_rate": 7.146015285576904e-06, + "loss": 0.202, + "step": 5445 + }, + { + "epoch": 0.378141924732676, + "grad_norm": 4.854673765515048, + "learning_rate": 7.144999585349728e-06, + "loss": 0.6433, + "step": 5446 + }, + { + "epoch": 0.37821135953339813, + "grad_norm": 2.6366804395372103, + "learning_rate": 7.143983776630899e-06, + "loss": 0.2114, + "step": 5447 + }, + { + "epoch": 0.37828079433412026, + "grad_norm": 4.2391565104810125, + "learning_rate": 7.142967859471799e-06, + "loss": 0.573, + "step": 5448 + }, + { + "epoch": 0.3783502291348424, + "grad_norm": 3.5483377237472067, + "learning_rate": 7.1419518339238085e-06, + "loss": 0.3062, + "step": 5449 + }, + { + "epoch": 0.37841966393556453, + "grad_norm": 3.5619749492052675, + "learning_rate": 7.140935700038316e-06, + "loss": 0.4807, + "step": 5450 + }, + { + "epoch": 0.3784890987362866, + "grad_norm": 5.327809889321563, + "learning_rate": 7.139919457866722e-06, + "loss": 0.6001, + "step": 5451 + }, + { + "epoch": 0.37855853353700875, + "grad_norm": 5.0798829698977075, + "learning_rate": 7.1389031074604224e-06, + "loss": 0.5704, + "step": 5452 + }, + { + "epoch": 0.3786279683377309, + "grad_norm": 3.0281918073975733, + "learning_rate": 7.1378866488708224e-06, + "loss": 0.2863, + "step": 5453 + }, + { + "epoch": 0.378697403138453, + "grad_norm": 4.0301619454856334, + "learning_rate": 7.136870082149335e-06, + "loss": 0.6036, + "step": 5454 + }, + { + "epoch": 0.3787668379391751, + "grad_norm": 3.8643286923765743, + "learning_rate": 7.135853407347378e-06, + "loss": 0.6244, + "step": 5455 + }, + { + "epoch": 0.37883627273989723, + "grad_norm": 5.1446638069854655, + "learning_rate": 7.134836624516372e-06, + "loss": 0.6473, + "step": 5456 + }, + { + "epoch": 0.37890570754061936, + "grad_norm": 3.1444419308767335, + "learning_rate": 7.133819733707746e-06, + "loss": 0.3929, + "step": 5457 + }, + { + "epoch": 0.3789751423413415, + "grad_norm": 4.856174748892138, + "learning_rate": 7.132802734972932e-06, + "loss": 0.5114, + "step": 5458 + }, + { + "epoch": 0.3790445771420636, + "grad_norm": 3.93955621427381, + "learning_rate": 7.131785628363368e-06, + "loss": 0.4291, + "step": 5459 + }, + { + "epoch": 0.3791140119427857, + "grad_norm": 3.9711469027849926, + "learning_rate": 7.1307684139305e-06, + "loss": 0.4916, + "step": 5460 + }, + { + "epoch": 0.37918344674350785, + "grad_norm": 4.051329616446848, + "learning_rate": 7.129751091725778e-06, + "loss": 0.4507, + "step": 5461 + }, + { + "epoch": 0.37925288154423, + "grad_norm": 3.6734105585592363, + "learning_rate": 7.1287336618006546e-06, + "loss": 0.3443, + "step": 5462 + }, + { + "epoch": 0.3793223163449521, + "grad_norm": 3.7985443047913594, + "learning_rate": 7.127716124206591e-06, + "loss": 0.5239, + "step": 5463 + }, + { + "epoch": 0.3793917511456742, + "grad_norm": 5.660547669819418, + "learning_rate": 7.126698478995054e-06, + "loss": 0.5432, + "step": 5464 + }, + { + "epoch": 0.37946118594639633, + "grad_norm": 3.282673283643919, + "learning_rate": 7.125680726217515e-06, + "loss": 0.3606, + "step": 5465 + }, + { + "epoch": 0.37953062074711846, + "grad_norm": 4.718901484530516, + "learning_rate": 7.124662865925449e-06, + "loss": 0.7748, + "step": 5466 + }, + { + "epoch": 0.3796000555478406, + "grad_norm": 3.371075923614832, + "learning_rate": 7.12364489817034e-06, + "loss": 0.4036, + "step": 5467 + }, + { + "epoch": 0.3796694903485627, + "grad_norm": 3.5911201724504793, + "learning_rate": 7.122626823003677e-06, + "loss": 0.5538, + "step": 5468 + }, + { + "epoch": 0.3797389251492848, + "grad_norm": 3.427194827738846, + "learning_rate": 7.121608640476948e-06, + "loss": 0.4817, + "step": 5469 + }, + { + "epoch": 0.37980835995000695, + "grad_norm": 3.291335486451126, + "learning_rate": 7.120590350641656e-06, + "loss": 0.3871, + "step": 5470 + }, + { + "epoch": 0.3798777947507291, + "grad_norm": 4.329793331690985, + "learning_rate": 7.119571953549305e-06, + "loss": 0.3769, + "step": 5471 + }, + { + "epoch": 0.37994722955145116, + "grad_norm": 5.010495033096167, + "learning_rate": 7.118553449251402e-06, + "loss": 0.4888, + "step": 5472 + }, + { + "epoch": 0.3800166643521733, + "grad_norm": 3.7227629233246335, + "learning_rate": 7.117534837799461e-06, + "loss": 0.2788, + "step": 5473 + }, + { + "epoch": 0.38008609915289543, + "grad_norm": 2.62153569879019, + "learning_rate": 7.116516119245007e-06, + "loss": 0.2344, + "step": 5474 + }, + { + "epoch": 0.38015553395361756, + "grad_norm": 4.28439689435706, + "learning_rate": 7.115497293639561e-06, + "loss": 0.7034, + "step": 5475 + }, + { + "epoch": 0.3802249687543397, + "grad_norm": 4.000524668621709, + "learning_rate": 7.1144783610346555e-06, + "loss": 0.6261, + "step": 5476 + }, + { + "epoch": 0.3802944035550618, + "grad_norm": 3.7112984820800374, + "learning_rate": 7.113459321481828e-06, + "loss": 0.4558, + "step": 5477 + }, + { + "epoch": 0.3803638383557839, + "grad_norm": 3.222052983332126, + "learning_rate": 7.11244017503262e-06, + "loss": 0.5072, + "step": 5478 + }, + { + "epoch": 0.38043327315650605, + "grad_norm": 4.168095180536021, + "learning_rate": 7.111420921738579e-06, + "loss": 0.4549, + "step": 5479 + }, + { + "epoch": 0.3805027079572282, + "grad_norm": 5.098181002899938, + "learning_rate": 7.1104015616512555e-06, + "loss": 0.9791, + "step": 5480 + }, + { + "epoch": 0.38057214275795026, + "grad_norm": 4.786685032851522, + "learning_rate": 7.1093820948222095e-06, + "loss": 0.8421, + "step": 5481 + }, + { + "epoch": 0.3806415775586724, + "grad_norm": 3.493777368207477, + "learning_rate": 7.108362521303003e-06, + "loss": 0.1085, + "step": 5482 + }, + { + "epoch": 0.38071101235939453, + "grad_norm": 4.243236676366345, + "learning_rate": 7.107342841145207e-06, + "loss": 0.4051, + "step": 5483 + }, + { + "epoch": 0.38078044716011666, + "grad_norm": 2.8855855180416308, + "learning_rate": 7.106323054400395e-06, + "loss": 0.3682, + "step": 5484 + }, + { + "epoch": 0.3808498819608388, + "grad_norm": 4.209320509335512, + "learning_rate": 7.105303161120146e-06, + "loss": 0.4553, + "step": 5485 + }, + { + "epoch": 0.3809193167615609, + "grad_norm": 3.842062861389581, + "learning_rate": 7.104283161356046e-06, + "loss": 0.5366, + "step": 5486 + }, + { + "epoch": 0.380988751562283, + "grad_norm": 3.8359673042540803, + "learning_rate": 7.103263055159684e-06, + "loss": 0.3823, + "step": 5487 + }, + { + "epoch": 0.38105818636300515, + "grad_norm": 4.4044078559972615, + "learning_rate": 7.102242842582658e-06, + "loss": 0.5356, + "step": 5488 + }, + { + "epoch": 0.3811276211637273, + "grad_norm": 3.268262353247625, + "learning_rate": 7.101222523676565e-06, + "loss": 0.2907, + "step": 5489 + }, + { + "epoch": 0.38119705596444936, + "grad_norm": 4.055831248213965, + "learning_rate": 7.100202098493017e-06, + "loss": 0.5756, + "step": 5490 + }, + { + "epoch": 0.3812664907651715, + "grad_norm": 2.825264798556411, + "learning_rate": 7.099181567083622e-06, + "loss": 0.3259, + "step": 5491 + }, + { + "epoch": 0.38133592556589363, + "grad_norm": 3.6947763603750765, + "learning_rate": 7.0981609294999994e-06, + "loss": 0.4522, + "step": 5492 + }, + { + "epoch": 0.38140536036661576, + "grad_norm": 2.8581579799599797, + "learning_rate": 7.097140185793771e-06, + "loss": 0.2625, + "step": 5493 + }, + { + "epoch": 0.38147479516733784, + "grad_norm": 3.982201048535751, + "learning_rate": 7.096119336016567e-06, + "loss": 0.4934, + "step": 5494 + }, + { + "epoch": 0.38154422996806, + "grad_norm": 4.771532021786225, + "learning_rate": 7.0950983802200165e-06, + "loss": 0.4685, + "step": 5495 + }, + { + "epoch": 0.3816136647687821, + "grad_norm": 3.632382522157554, + "learning_rate": 7.094077318455762e-06, + "loss": 0.4472, + "step": 5496 + }, + { + "epoch": 0.38168309956950425, + "grad_norm": 3.749120895626868, + "learning_rate": 7.093056150775446e-06, + "loss": 0.5335, + "step": 5497 + }, + { + "epoch": 0.3817525343702264, + "grad_norm": 3.7952118509261643, + "learning_rate": 7.092034877230717e-06, + "loss": 0.2069, + "step": 5498 + }, + { + "epoch": 0.38182196917094846, + "grad_norm": 4.5287487518908645, + "learning_rate": 7.091013497873232e-06, + "loss": 0.4937, + "step": 5499 + }, + { + "epoch": 0.3818914039716706, + "grad_norm": 5.142826672831739, + "learning_rate": 7.0899920127546514e-06, + "loss": 0.3951, + "step": 5500 + }, + { + "epoch": 0.38196083877239273, + "grad_norm": 3.433837542335874, + "learning_rate": 7.088970421926638e-06, + "loss": 0.3134, + "step": 5501 + }, + { + "epoch": 0.38203027357311486, + "grad_norm": 3.535113941660185, + "learning_rate": 7.087948725440867e-06, + "loss": 0.3935, + "step": 5502 + }, + { + "epoch": 0.38209970837383694, + "grad_norm": 3.8778537412139378, + "learning_rate": 7.086926923349011e-06, + "loss": 0.5613, + "step": 5503 + }, + { + "epoch": 0.3821691431745591, + "grad_norm": 3.100672672292877, + "learning_rate": 7.085905015702752e-06, + "loss": 0.3661, + "step": 5504 + }, + { + "epoch": 0.3822385779752812, + "grad_norm": 3.4649361142514876, + "learning_rate": 7.084883002553777e-06, + "loss": 0.5329, + "step": 5505 + }, + { + "epoch": 0.38230801277600335, + "grad_norm": 3.9367063808967306, + "learning_rate": 7.083860883953782e-06, + "loss": 0.3842, + "step": 5506 + }, + { + "epoch": 0.3823774475767255, + "grad_norm": 4.248300396352316, + "learning_rate": 7.082838659954459e-06, + "loss": 0.5383, + "step": 5507 + }, + { + "epoch": 0.38244688237744756, + "grad_norm": 4.619421953652103, + "learning_rate": 7.0818163306075135e-06, + "loss": 0.8405, + "step": 5508 + }, + { + "epoch": 0.3825163171781697, + "grad_norm": 3.7866627141924836, + "learning_rate": 7.080793895964655e-06, + "loss": 0.3803, + "step": 5509 + }, + { + "epoch": 0.38258575197889183, + "grad_norm": 3.5134780945763544, + "learning_rate": 7.079771356077595e-06, + "loss": 0.5887, + "step": 5510 + }, + { + "epoch": 0.38265518677961396, + "grad_norm": 3.6462447497931056, + "learning_rate": 7.078748710998053e-06, + "loss": 0.3331, + "step": 5511 + }, + { + "epoch": 0.38272462158033604, + "grad_norm": 3.5840849815821696, + "learning_rate": 7.0777259607777534e-06, + "loss": 0.3431, + "step": 5512 + }, + { + "epoch": 0.3827940563810582, + "grad_norm": 3.1535235820731193, + "learning_rate": 7.076703105468428e-06, + "loss": 0.3658, + "step": 5513 + }, + { + "epoch": 0.3828634911817803, + "grad_norm": 3.3595035133197473, + "learning_rate": 7.075680145121806e-06, + "loss": 0.3113, + "step": 5514 + }, + { + "epoch": 0.38293292598250245, + "grad_norm": 3.7859810344569085, + "learning_rate": 7.074657079789633e-06, + "loss": 0.4559, + "step": 5515 + }, + { + "epoch": 0.3830023607832245, + "grad_norm": 5.642204639597304, + "learning_rate": 7.073633909523653e-06, + "loss": 0.5079, + "step": 5516 + }, + { + "epoch": 0.38307179558394666, + "grad_norm": 4.35707192959465, + "learning_rate": 7.072610634375615e-06, + "loss": 0.6242, + "step": 5517 + }, + { + "epoch": 0.3831412303846688, + "grad_norm": 4.5397980050325, + "learning_rate": 7.071587254397278e-06, + "loss": 0.6895, + "step": 5518 + }, + { + "epoch": 0.38321066518539093, + "grad_norm": 3.5104830199609816, + "learning_rate": 7.070563769640401e-06, + "loss": 0.4902, + "step": 5519 + }, + { + "epoch": 0.38328009998611307, + "grad_norm": 4.792848552316435, + "learning_rate": 7.0695401801567505e-06, + "loss": 0.4915, + "step": 5520 + }, + { + "epoch": 0.38334953478683514, + "grad_norm": 4.953515852254025, + "learning_rate": 7.0685164859981e-06, + "loss": 0.6449, + "step": 5521 + }, + { + "epoch": 0.3834189695875573, + "grad_norm": 5.08388751143556, + "learning_rate": 7.067492687216226e-06, + "loss": 0.664, + "step": 5522 + }, + { + "epoch": 0.3834884043882794, + "grad_norm": 3.4777921883020797, + "learning_rate": 7.066468783862913e-06, + "loss": 0.2175, + "step": 5523 + }, + { + "epoch": 0.38355783918900155, + "grad_norm": 3.746619376685117, + "learning_rate": 7.065444775989947e-06, + "loss": 0.5288, + "step": 5524 + }, + { + "epoch": 0.3836272739897236, + "grad_norm": 4.590216361801467, + "learning_rate": 7.064420663649121e-06, + "loss": 0.4268, + "step": 5525 + }, + { + "epoch": 0.38369670879044576, + "grad_norm": 4.499353396360326, + "learning_rate": 7.063396446892235e-06, + "loss": 0.5877, + "step": 5526 + }, + { + "epoch": 0.3837661435911679, + "grad_norm": 4.775062380016619, + "learning_rate": 7.06237212577109e-06, + "loss": 0.6094, + "step": 5527 + }, + { + "epoch": 0.38383557839189003, + "grad_norm": 4.560545407395623, + "learning_rate": 7.0613477003374955e-06, + "loss": 0.5627, + "step": 5528 + }, + { + "epoch": 0.3839050131926121, + "grad_norm": 4.692329623314918, + "learning_rate": 7.06032317064327e-06, + "loss": 0.7517, + "step": 5529 + }, + { + "epoch": 0.38397444799333424, + "grad_norm": 5.156727926138188, + "learning_rate": 7.0592985367402264e-06, + "loss": 0.8359, + "step": 5530 + }, + { + "epoch": 0.3840438827940564, + "grad_norm": 3.5543404613190934, + "learning_rate": 7.058273798680195e-06, + "loss": 0.544, + "step": 5531 + }, + { + "epoch": 0.3841133175947785, + "grad_norm": 4.497729168733534, + "learning_rate": 7.057248956515005e-06, + "loss": 0.6553, + "step": 5532 + }, + { + "epoch": 0.38418275239550065, + "grad_norm": 4.585107535495594, + "learning_rate": 7.0562240102964894e-06, + "loss": 0.4542, + "step": 5533 + }, + { + "epoch": 0.3842521871962227, + "grad_norm": 4.276323784272024, + "learning_rate": 7.05519896007649e-06, + "loss": 0.5539, + "step": 5534 + }, + { + "epoch": 0.38432162199694486, + "grad_norm": 3.729650412082024, + "learning_rate": 7.0541738059068545e-06, + "loss": 0.4812, + "step": 5535 + }, + { + "epoch": 0.384391056797667, + "grad_norm": 3.413983618224626, + "learning_rate": 7.053148547839431e-06, + "loss": 0.5296, + "step": 5536 + }, + { + "epoch": 0.38446049159838913, + "grad_norm": 3.517262575650505, + "learning_rate": 7.052123185926077e-06, + "loss": 0.5011, + "step": 5537 + }, + { + "epoch": 0.3845299263991112, + "grad_norm": 6.451468643657486, + "learning_rate": 7.051097720218653e-06, + "loss": 1.024, + "step": 5538 + }, + { + "epoch": 0.38459936119983335, + "grad_norm": 4.692449896647971, + "learning_rate": 7.050072150769031e-06, + "loss": 0.6824, + "step": 5539 + }, + { + "epoch": 0.3846687960005555, + "grad_norm": 3.248528350180921, + "learning_rate": 7.049046477629077e-06, + "loss": 0.3442, + "step": 5540 + }, + { + "epoch": 0.3847382308012776, + "grad_norm": 5.052686408667282, + "learning_rate": 7.04802070085067e-06, + "loss": 0.75, + "step": 5541 + }, + { + "epoch": 0.38480766560199975, + "grad_norm": 3.719787411831542, + "learning_rate": 7.046994820485696e-06, + "loss": 0.4893, + "step": 5542 + }, + { + "epoch": 0.38487710040272183, + "grad_norm": 4.245469196283293, + "learning_rate": 7.045968836586039e-06, + "loss": 0.5536, + "step": 5543 + }, + { + "epoch": 0.38494653520344396, + "grad_norm": 2.5570080486129316, + "learning_rate": 7.044942749203593e-06, + "loss": 0.2408, + "step": 5544 + }, + { + "epoch": 0.3850159700041661, + "grad_norm": 3.2690842814637073, + "learning_rate": 7.043916558390258e-06, + "loss": 0.4979, + "step": 5545 + }, + { + "epoch": 0.38508540480488823, + "grad_norm": 3.453828813289731, + "learning_rate": 7.042890264197935e-06, + "loss": 0.441, + "step": 5546 + }, + { + "epoch": 0.3851548396056103, + "grad_norm": 4.055282436906723, + "learning_rate": 7.041863866678535e-06, + "loss": 0.5536, + "step": 5547 + }, + { + "epoch": 0.38522427440633245, + "grad_norm": 2.7667159071586314, + "learning_rate": 7.04083736588397e-06, + "loss": 0.2381, + "step": 5548 + }, + { + "epoch": 0.3852937092070546, + "grad_norm": 3.2417702348858817, + "learning_rate": 7.039810761866162e-06, + "loss": 0.3266, + "step": 5549 + }, + { + "epoch": 0.3853631440077767, + "grad_norm": 3.714801108540632, + "learning_rate": 7.038784054677033e-06, + "loss": 0.6171, + "step": 5550 + }, + { + "epoch": 0.3854325788084988, + "grad_norm": 2.4978252546068855, + "learning_rate": 7.037757244368514e-06, + "loss": 0.1695, + "step": 5551 + }, + { + "epoch": 0.38550201360922093, + "grad_norm": 3.7866144629789855, + "learning_rate": 7.036730330992538e-06, + "loss": 0.4029, + "step": 5552 + }, + { + "epoch": 0.38557144840994306, + "grad_norm": 4.732409396782647, + "learning_rate": 7.035703314601048e-06, + "loss": 0.5831, + "step": 5553 + }, + { + "epoch": 0.3856408832106652, + "grad_norm": 3.6181020923228013, + "learning_rate": 7.034676195245988e-06, + "loss": 0.4055, + "step": 5554 + }, + { + "epoch": 0.38571031801138733, + "grad_norm": 4.055514673968562, + "learning_rate": 7.033648972979308e-06, + "loss": 0.6137, + "step": 5555 + }, + { + "epoch": 0.3857797528121094, + "grad_norm": 4.5921542454519315, + "learning_rate": 7.032621647852964e-06, + "loss": 0.4806, + "step": 5556 + }, + { + "epoch": 0.38584918761283155, + "grad_norm": 4.198207802605822, + "learning_rate": 7.031594219918916e-06, + "loss": 0.3423, + "step": 5557 + }, + { + "epoch": 0.3859186224135537, + "grad_norm": 4.624994704116025, + "learning_rate": 7.030566689229133e-06, + "loss": 0.4937, + "step": 5558 + }, + { + "epoch": 0.3859880572142758, + "grad_norm": 2.9322745459068833, + "learning_rate": 7.0295390558355846e-06, + "loss": 0.3031, + "step": 5559 + }, + { + "epoch": 0.3860574920149979, + "grad_norm": 3.8228415611583246, + "learning_rate": 7.028511319790247e-06, + "loss": 0.5731, + "step": 5560 + }, + { + "epoch": 0.38612692681572003, + "grad_norm": 3.190057175191812, + "learning_rate": 7.027483481145102e-06, + "loss": 0.3455, + "step": 5561 + }, + { + "epoch": 0.38619636161644216, + "grad_norm": 4.1901651611560355, + "learning_rate": 7.026455539952136e-06, + "loss": 0.5482, + "step": 5562 + }, + { + "epoch": 0.3862657964171643, + "grad_norm": 5.919078359061228, + "learning_rate": 7.025427496263344e-06, + "loss": 0.6511, + "step": 5563 + }, + { + "epoch": 0.38633523121788643, + "grad_norm": 3.7727643451859714, + "learning_rate": 7.02439935013072e-06, + "loss": 0.4331, + "step": 5564 + }, + { + "epoch": 0.3864046660186085, + "grad_norm": 4.679877728127092, + "learning_rate": 7.023371101606267e-06, + "loss": 0.6575, + "step": 5565 + }, + { + "epoch": 0.38647410081933065, + "grad_norm": 3.3039380722154883, + "learning_rate": 7.022342750741993e-06, + "loss": 0.4654, + "step": 5566 + }, + { + "epoch": 0.3865435356200528, + "grad_norm": 6.645272198616555, + "learning_rate": 7.021314297589913e-06, + "loss": 0.4788, + "step": 5567 + }, + { + "epoch": 0.3866129704207749, + "grad_norm": 4.822798338025373, + "learning_rate": 7.0202857422020424e-06, + "loss": 0.5537, + "step": 5568 + }, + { + "epoch": 0.386682405221497, + "grad_norm": 3.2169703775892917, + "learning_rate": 7.019257084630404e-06, + "loss": 0.3326, + "step": 5569 + }, + { + "epoch": 0.38675184002221913, + "grad_norm": 3.1645760359552937, + "learning_rate": 7.018228324927027e-06, + "loss": 0.3592, + "step": 5570 + }, + { + "epoch": 0.38682127482294126, + "grad_norm": 4.018737878555018, + "learning_rate": 7.017199463143946e-06, + "loss": 0.4987, + "step": 5571 + }, + { + "epoch": 0.3868907096236634, + "grad_norm": 2.8093752159617997, + "learning_rate": 7.0161704993332e-06, + "loss": 0.4107, + "step": 5572 + }, + { + "epoch": 0.3869601444243855, + "grad_norm": 4.174055932966737, + "learning_rate": 7.015141433546829e-06, + "loss": 0.471, + "step": 5573 + }, + { + "epoch": 0.3870295792251076, + "grad_norm": 3.8015600366779454, + "learning_rate": 7.014112265836887e-06, + "loss": 0.4382, + "step": 5574 + }, + { + "epoch": 0.38709901402582975, + "grad_norm": 3.8721827049204744, + "learning_rate": 7.013082996255425e-06, + "loss": 0.3839, + "step": 5575 + }, + { + "epoch": 0.3871684488265519, + "grad_norm": 7.348275241277975, + "learning_rate": 7.012053624854503e-06, + "loss": 0.4103, + "step": 5576 + }, + { + "epoch": 0.387237883627274, + "grad_norm": 3.71490420962562, + "learning_rate": 7.011024151686185e-06, + "loss": 0.4849, + "step": 5577 + }, + { + "epoch": 0.3873073184279961, + "grad_norm": 3.717905137044848, + "learning_rate": 7.009994576802541e-06, + "loss": 0.3523, + "step": 5578 + }, + { + "epoch": 0.38737675322871823, + "grad_norm": 3.507506038575984, + "learning_rate": 7.008964900255646e-06, + "loss": 0.3487, + "step": 5579 + }, + { + "epoch": 0.38744618802944036, + "grad_norm": 3.340294639954744, + "learning_rate": 7.00793512209758e-06, + "loss": 0.2975, + "step": 5580 + }, + { + "epoch": 0.3875156228301625, + "grad_norm": 3.9263552243048125, + "learning_rate": 7.006905242380426e-06, + "loss": 0.5627, + "step": 5581 + }, + { + "epoch": 0.3875850576308846, + "grad_norm": 3.439278262923715, + "learning_rate": 7.005875261156276e-06, + "loss": 0.3546, + "step": 5582 + }, + { + "epoch": 0.3876544924316067, + "grad_norm": 4.274673593606483, + "learning_rate": 7.004845178477227e-06, + "loss": 0.6202, + "step": 5583 + }, + { + "epoch": 0.38772392723232885, + "grad_norm": 3.489918092315722, + "learning_rate": 7.003814994395378e-06, + "loss": 0.3566, + "step": 5584 + }, + { + "epoch": 0.387793362033051, + "grad_norm": 2.708956815756573, + "learning_rate": 7.002784708962832e-06, + "loss": 0.2368, + "step": 5585 + }, + { + "epoch": 0.38786279683377306, + "grad_norm": 4.315090205916436, + "learning_rate": 7.001754322231702e-06, + "loss": 0.358, + "step": 5586 + }, + { + "epoch": 0.3879322316344952, + "grad_norm": 5.724749626709329, + "learning_rate": 7.0007238342541035e-06, + "loss": 0.9594, + "step": 5587 + }, + { + "epoch": 0.38800166643521733, + "grad_norm": 7.165319967867814, + "learning_rate": 6.99969324508216e-06, + "loss": 0.6098, + "step": 5588 + }, + { + "epoch": 0.38807110123593946, + "grad_norm": 4.6523906368739825, + "learning_rate": 6.998662554767991e-06, + "loss": 0.764, + "step": 5589 + }, + { + "epoch": 0.3881405360366616, + "grad_norm": 3.4303971874574115, + "learning_rate": 6.9976317633637344e-06, + "loss": 0.3919, + "step": 5590 + }, + { + "epoch": 0.3882099708373837, + "grad_norm": 3.6018037527804823, + "learning_rate": 6.996600870921522e-06, + "loss": 0.3691, + "step": 5591 + }, + { + "epoch": 0.3882794056381058, + "grad_norm": 19.431526586127553, + "learning_rate": 6.995569877493498e-06, + "loss": 0.3072, + "step": 5592 + }, + { + "epoch": 0.38834884043882795, + "grad_norm": 3.86914378857059, + "learning_rate": 6.994538783131808e-06, + "loss": 0.5025, + "step": 5593 + }, + { + "epoch": 0.3884182752395501, + "grad_norm": 3.5008499951980943, + "learning_rate": 6.993507587888602e-06, + "loss": 0.324, + "step": 5594 + }, + { + "epoch": 0.38848771004027216, + "grad_norm": 3.68821762968492, + "learning_rate": 6.992476291816039e-06, + "loss": 0.5203, + "step": 5595 + }, + { + "epoch": 0.3885571448409943, + "grad_norm": 4.420635092630538, + "learning_rate": 6.991444894966279e-06, + "loss": 0.5721, + "step": 5596 + }, + { + "epoch": 0.38862657964171643, + "grad_norm": 4.009226130677225, + "learning_rate": 6.990413397391491e-06, + "loss": 0.5236, + "step": 5597 + }, + { + "epoch": 0.38869601444243856, + "grad_norm": 3.7039231455641466, + "learning_rate": 6.989381799143844e-06, + "loss": 0.4255, + "step": 5598 + }, + { + "epoch": 0.3887654492431607, + "grad_norm": 2.554638186231122, + "learning_rate": 6.9883501002755175e-06, + "loss": 0.1971, + "step": 5599 + }, + { + "epoch": 0.3888348840438828, + "grad_norm": 3.8927945369170023, + "learning_rate": 6.987318300838695e-06, + "loss": 0.6669, + "step": 5600 + }, + { + "epoch": 0.3889043188446049, + "grad_norm": 3.1765062018794157, + "learning_rate": 6.986286400885558e-06, + "loss": 0.4318, + "step": 5601 + }, + { + "epoch": 0.38897375364532705, + "grad_norm": 4.712948994945242, + "learning_rate": 6.985254400468305e-06, + "loss": 0.6575, + "step": 5602 + }, + { + "epoch": 0.3890431884460492, + "grad_norm": 4.662775590173519, + "learning_rate": 6.984222299639129e-06, + "loss": 0.6473, + "step": 5603 + }, + { + "epoch": 0.38911262324677126, + "grad_norm": 4.340494894506122, + "learning_rate": 6.983190098450235e-06, + "loss": 0.4358, + "step": 5604 + }, + { + "epoch": 0.3891820580474934, + "grad_norm": 2.5312331926975187, + "learning_rate": 6.98215779695383e-06, + "loss": 0.2963, + "step": 5605 + }, + { + "epoch": 0.38925149284821553, + "grad_norm": 4.308918574518346, + "learning_rate": 6.981125395202128e-06, + "loss": 0.6722, + "step": 5606 + }, + { + "epoch": 0.38932092764893766, + "grad_norm": 4.860550664419271, + "learning_rate": 6.980092893247344e-06, + "loss": 0.823, + "step": 5607 + }, + { + "epoch": 0.38939036244965974, + "grad_norm": 4.371834572942315, + "learning_rate": 6.979060291141702e-06, + "loss": 0.6467, + "step": 5608 + }, + { + "epoch": 0.3894597972503819, + "grad_norm": 3.5341300384562904, + "learning_rate": 6.9780275889374305e-06, + "loss": 0.5019, + "step": 5609 + }, + { + "epoch": 0.389529232051104, + "grad_norm": 4.056427588271549, + "learning_rate": 6.976994786686761e-06, + "loss": 0.4976, + "step": 5610 + }, + { + "epoch": 0.38959866685182615, + "grad_norm": 3.776327125606446, + "learning_rate": 6.975961884441933e-06, + "loss": 0.4237, + "step": 5611 + }, + { + "epoch": 0.3896681016525483, + "grad_norm": 2.8983650089052917, + "learning_rate": 6.974928882255189e-06, + "loss": 0.4039, + "step": 5612 + }, + { + "epoch": 0.38973753645327036, + "grad_norm": 4.204035839604379, + "learning_rate": 6.973895780178776e-06, + "loss": 0.4017, + "step": 5613 + }, + { + "epoch": 0.3898069712539925, + "grad_norm": 2.880651194525737, + "learning_rate": 6.9728625782649495e-06, + "loss": 0.2168, + "step": 5614 + }, + { + "epoch": 0.38987640605471463, + "grad_norm": 4.44286454813566, + "learning_rate": 6.971829276565964e-06, + "loss": 0.5611, + "step": 5615 + }, + { + "epoch": 0.38994584085543676, + "grad_norm": 4.001242223190559, + "learning_rate": 6.970795875134088e-06, + "loss": 0.5831, + "step": 5616 + }, + { + "epoch": 0.39001527565615884, + "grad_norm": 2.382641154340544, + "learning_rate": 6.969762374021585e-06, + "loss": 0.1705, + "step": 5617 + }, + { + "epoch": 0.390084710456881, + "grad_norm": 4.263278390666253, + "learning_rate": 6.96872877328073e-06, + "loss": 0.4376, + "step": 5618 + }, + { + "epoch": 0.3901541452576031, + "grad_norm": 3.289177508651344, + "learning_rate": 6.967695072963802e-06, + "loss": 0.3664, + "step": 5619 + }, + { + "epoch": 0.39022358005832525, + "grad_norm": 3.4325002230462744, + "learning_rate": 6.966661273123083e-06, + "loss": 0.6771, + "step": 5620 + }, + { + "epoch": 0.3902930148590473, + "grad_norm": 3.8667911670375363, + "learning_rate": 6.965627373810863e-06, + "loss": 0.483, + "step": 5621 + }, + { + "epoch": 0.39036244965976946, + "grad_norm": 3.718252455372809, + "learning_rate": 6.964593375079435e-06, + "loss": 0.2595, + "step": 5622 + }, + { + "epoch": 0.3904318844604916, + "grad_norm": 3.929784413371683, + "learning_rate": 6.963559276981095e-06, + "loss": 0.5785, + "step": 5623 + }, + { + "epoch": 0.39050131926121373, + "grad_norm": 3.5372707404233057, + "learning_rate": 6.962525079568151e-06, + "loss": 0.459, + "step": 5624 + }, + { + "epoch": 0.39057075406193587, + "grad_norm": 5.114464347641802, + "learning_rate": 6.961490782892908e-06, + "loss": 0.686, + "step": 5625 + }, + { + "epoch": 0.39064018886265794, + "grad_norm": 3.92918219508859, + "learning_rate": 6.960456387007682e-06, + "loss": 0.4886, + "step": 5626 + }, + { + "epoch": 0.3907096236633801, + "grad_norm": 4.884722372586597, + "learning_rate": 6.95942189196479e-06, + "loss": 0.5602, + "step": 5627 + }, + { + "epoch": 0.3907790584641022, + "grad_norm": 4.150849891568653, + "learning_rate": 6.9583872978165534e-06, + "loss": 0.7654, + "step": 5628 + }, + { + "epoch": 0.39084849326482435, + "grad_norm": 4.8439816219451535, + "learning_rate": 6.957352604615307e-06, + "loss": 0.7586, + "step": 5629 + }, + { + "epoch": 0.3909179280655464, + "grad_norm": 4.348139707625982, + "learning_rate": 6.956317812413378e-06, + "loss": 0.3261, + "step": 5630 + }, + { + "epoch": 0.39098736286626856, + "grad_norm": 4.469305718657237, + "learning_rate": 6.955282921263109e-06, + "loss": 0.5415, + "step": 5631 + }, + { + "epoch": 0.3910567976669907, + "grad_norm": 3.849947284197216, + "learning_rate": 6.9542479312168436e-06, + "loss": 0.4641, + "step": 5632 + }, + { + "epoch": 0.39112623246771283, + "grad_norm": 3.4145016591109085, + "learning_rate": 6.953212842326927e-06, + "loss": 0.3429, + "step": 5633 + }, + { + "epoch": 0.39119566726843497, + "grad_norm": 3.2896478984667, + "learning_rate": 6.952177654645717e-06, + "loss": 0.3205, + "step": 5634 + }, + { + "epoch": 0.39126510206915704, + "grad_norm": 3.974683380064343, + "learning_rate": 6.95114236822557e-06, + "loss": 0.4984, + "step": 5635 + }, + { + "epoch": 0.3913345368698792, + "grad_norm": 5.161437814693137, + "learning_rate": 6.950106983118848e-06, + "loss": 0.5833, + "step": 5636 + }, + { + "epoch": 0.3914039716706013, + "grad_norm": 3.073619093326718, + "learning_rate": 6.949071499377922e-06, + "loss": 0.2787, + "step": 5637 + }, + { + "epoch": 0.39147340647132345, + "grad_norm": 3.0741303349864437, + "learning_rate": 6.9480359170551655e-06, + "loss": 0.3635, + "step": 5638 + }, + { + "epoch": 0.3915428412720455, + "grad_norm": 5.1197127438759535, + "learning_rate": 6.947000236202958e-06, + "loss": 0.417, + "step": 5639 + }, + { + "epoch": 0.39161227607276766, + "grad_norm": 4.851343354653931, + "learning_rate": 6.9459644568736805e-06, + "loss": 0.4673, + "step": 5640 + }, + { + "epoch": 0.3916817108734898, + "grad_norm": 4.34458764056399, + "learning_rate": 6.944928579119723e-06, + "loss": 0.6188, + "step": 5641 + }, + { + "epoch": 0.39175114567421193, + "grad_norm": 1.892028433648046, + "learning_rate": 6.943892602993478e-06, + "loss": 0.0983, + "step": 5642 + }, + { + "epoch": 0.391820580474934, + "grad_norm": 4.16143930583073, + "learning_rate": 6.942856528547346e-06, + "loss": 0.3165, + "step": 5643 + }, + { + "epoch": 0.39189001527565615, + "grad_norm": 3.368090511867243, + "learning_rate": 6.941820355833728e-06, + "loss": 0.3535, + "step": 5644 + }, + { + "epoch": 0.3919594500763783, + "grad_norm": 3.2248980383005175, + "learning_rate": 6.940784084905037e-06, + "loss": 0.4341, + "step": 5645 + }, + { + "epoch": 0.3920288848771004, + "grad_norm": 4.304220058262542, + "learning_rate": 6.939747715813679e-06, + "loss": 0.6458, + "step": 5646 + }, + { + "epoch": 0.39209831967782255, + "grad_norm": 3.6405002356229637, + "learning_rate": 6.938711248612078e-06, + "loss": 0.2951, + "step": 5647 + }, + { + "epoch": 0.39216775447854463, + "grad_norm": 3.1226667823843623, + "learning_rate": 6.9376746833526565e-06, + "loss": 0.1876, + "step": 5648 + }, + { + "epoch": 0.39223718927926676, + "grad_norm": 3.7314782416779204, + "learning_rate": 6.936638020087842e-06, + "loss": 0.5664, + "step": 5649 + }, + { + "epoch": 0.3923066240799889, + "grad_norm": 3.748428245801171, + "learning_rate": 6.935601258870067e-06, + "loss": 0.3878, + "step": 5650 + }, + { + "epoch": 0.39237605888071103, + "grad_norm": 3.651862971980098, + "learning_rate": 6.934564399751772e-06, + "loss": 0.57, + "step": 5651 + }, + { + "epoch": 0.3924454936814331, + "grad_norm": 4.286191481096184, + "learning_rate": 6.933527442785397e-06, + "loss": 0.2867, + "step": 5652 + }, + { + "epoch": 0.39251492848215525, + "grad_norm": 4.31259829474449, + "learning_rate": 6.932490388023393e-06, + "loss": 0.5817, + "step": 5653 + }, + { + "epoch": 0.3925843632828774, + "grad_norm": 4.059726610183235, + "learning_rate": 6.931453235518212e-06, + "loss": 0.4275, + "step": 5654 + }, + { + "epoch": 0.3926537980835995, + "grad_norm": 4.147220750858426, + "learning_rate": 6.930415985322312e-06, + "loss": 0.8832, + "step": 5655 + }, + { + "epoch": 0.39272323288432165, + "grad_norm": 3.6406309757150543, + "learning_rate": 6.929378637488156e-06, + "loss": 0.466, + "step": 5656 + }, + { + "epoch": 0.39279266768504373, + "grad_norm": 2.528006067096268, + "learning_rate": 6.928341192068211e-06, + "loss": 0.2144, + "step": 5657 + }, + { + "epoch": 0.39286210248576586, + "grad_norm": 3.5934751826163924, + "learning_rate": 6.9273036491149506e-06, + "loss": 0.56, + "step": 5658 + }, + { + "epoch": 0.392931537286488, + "grad_norm": 4.103672026055777, + "learning_rate": 6.926266008680853e-06, + "loss": 0.4996, + "step": 5659 + }, + { + "epoch": 0.39300097208721013, + "grad_norm": 3.7559624270861898, + "learning_rate": 6.9252282708183995e-06, + "loss": 0.4962, + "step": 5660 + }, + { + "epoch": 0.3930704068879322, + "grad_norm": 3.941428510568149, + "learning_rate": 6.92419043558008e-06, + "loss": 0.5785, + "step": 5661 + }, + { + "epoch": 0.39313984168865435, + "grad_norm": 4.499825759436411, + "learning_rate": 6.923152503018384e-06, + "loss": 0.7415, + "step": 5662 + }, + { + "epoch": 0.3932092764893765, + "grad_norm": 3.122930677618771, + "learning_rate": 6.922114473185812e-06, + "loss": 0.4815, + "step": 5663 + }, + { + "epoch": 0.3932787112900986, + "grad_norm": 3.2400275808225887, + "learning_rate": 6.9210763461348655e-06, + "loss": 0.4084, + "step": 5664 + }, + { + "epoch": 0.3933481460908207, + "grad_norm": 3.5740210898751714, + "learning_rate": 6.920038121918049e-06, + "loss": 0.3103, + "step": 5665 + }, + { + "epoch": 0.39341758089154283, + "grad_norm": 3.242407917280528, + "learning_rate": 6.9189998005878776e-06, + "loss": 0.2125, + "step": 5666 + }, + { + "epoch": 0.39348701569226496, + "grad_norm": 3.3168840406506113, + "learning_rate": 6.9179613821968695e-06, + "loss": 0.3734, + "step": 5667 + }, + { + "epoch": 0.3935564504929871, + "grad_norm": 3.822325628234687, + "learning_rate": 6.916922866797544e-06, + "loss": 0.4396, + "step": 5668 + }, + { + "epoch": 0.39362588529370923, + "grad_norm": 2.979766358154854, + "learning_rate": 6.915884254442429e-06, + "loss": 0.2746, + "step": 5669 + }, + { + "epoch": 0.3936953200944313, + "grad_norm": 3.7979847729435434, + "learning_rate": 6.914845545184056e-06, + "loss": 0.3301, + "step": 5670 + }, + { + "epoch": 0.39376475489515345, + "grad_norm": 4.384429971176278, + "learning_rate": 6.913806739074962e-06, + "loss": 0.4338, + "step": 5671 + }, + { + "epoch": 0.3938341896958756, + "grad_norm": 5.4920167477982265, + "learning_rate": 6.9127678361676896e-06, + "loss": 0.5428, + "step": 5672 + }, + { + "epoch": 0.3939036244965977, + "grad_norm": 4.32585380348086, + "learning_rate": 6.911728836514784e-06, + "loss": 0.665, + "step": 5673 + }, + { + "epoch": 0.3939730592973198, + "grad_norm": 3.8688090807236257, + "learning_rate": 6.910689740168798e-06, + "loss": 0.3709, + "step": 5674 + }, + { + "epoch": 0.39404249409804193, + "grad_norm": 3.916852042272188, + "learning_rate": 6.909650547182286e-06, + "loss": 0.3611, + "step": 5675 + }, + { + "epoch": 0.39411192889876406, + "grad_norm": 3.250147561970421, + "learning_rate": 6.90861125760781e-06, + "loss": 0.2622, + "step": 5676 + }, + { + "epoch": 0.3941813636994862, + "grad_norm": 3.0542572483972177, + "learning_rate": 6.9075718714979375e-06, + "loss": 0.3407, + "step": 5677 + }, + { + "epoch": 0.3942507985002083, + "grad_norm": 2.744717701354929, + "learning_rate": 6.906532388905237e-06, + "loss": 0.1949, + "step": 5678 + }, + { + "epoch": 0.3943202333009304, + "grad_norm": 3.7925690102687164, + "learning_rate": 6.905492809882286e-06, + "loss": 0.5389, + "step": 5679 + }, + { + "epoch": 0.39438966810165255, + "grad_norm": 3.4525734113339515, + "learning_rate": 6.9044531344816655e-06, + "loss": 0.4237, + "step": 5680 + }, + { + "epoch": 0.3944591029023747, + "grad_norm": 3.073162612849862, + "learning_rate": 6.903413362755961e-06, + "loss": 0.257, + "step": 5681 + }, + { + "epoch": 0.3945285377030968, + "grad_norm": 4.2202133215026985, + "learning_rate": 6.902373494757761e-06, + "loss": 0.4427, + "step": 5682 + }, + { + "epoch": 0.3945979725038189, + "grad_norm": 4.706320370791534, + "learning_rate": 6.901333530539662e-06, + "loss": 0.7231, + "step": 5683 + }, + { + "epoch": 0.39466740730454103, + "grad_norm": 3.7565743947962935, + "learning_rate": 6.900293470154267e-06, + "loss": 0.494, + "step": 5684 + }, + { + "epoch": 0.39473684210526316, + "grad_norm": 3.7580221314488385, + "learning_rate": 6.899253313654176e-06, + "loss": 0.4698, + "step": 5685 + }, + { + "epoch": 0.3948062769059853, + "grad_norm": 4.283663052222066, + "learning_rate": 6.898213061092003e-06, + "loss": 0.4602, + "step": 5686 + }, + { + "epoch": 0.3948757117067074, + "grad_norm": 3.128887990541601, + "learning_rate": 6.897172712520361e-06, + "loss": 0.3616, + "step": 5687 + }, + { + "epoch": 0.3949451465074295, + "grad_norm": 2.3506992899499646, + "learning_rate": 6.896132267991871e-06, + "loss": 0.2614, + "step": 5688 + }, + { + "epoch": 0.39501458130815165, + "grad_norm": 3.2703354348261233, + "learning_rate": 6.895091727559153e-06, + "loss": 0.4132, + "step": 5689 + }, + { + "epoch": 0.3950840161088738, + "grad_norm": 3.0196151914221456, + "learning_rate": 6.894051091274845e-06, + "loss": 0.4032, + "step": 5690 + }, + { + "epoch": 0.3951534509095959, + "grad_norm": 3.699039380100635, + "learning_rate": 6.893010359191572e-06, + "loss": 0.5582, + "step": 5691 + }, + { + "epoch": 0.395222885710318, + "grad_norm": 4.283148583781468, + "learning_rate": 6.891969531361979e-06, + "loss": 0.5212, + "step": 5692 + }, + { + "epoch": 0.39529232051104013, + "grad_norm": 3.403303670303957, + "learning_rate": 6.890928607838708e-06, + "loss": 0.3134, + "step": 5693 + }, + { + "epoch": 0.39536175531176226, + "grad_norm": 3.0504556519539663, + "learning_rate": 6.889887588674407e-06, + "loss": 0.2064, + "step": 5694 + }, + { + "epoch": 0.3954311901124844, + "grad_norm": 3.5711469171689862, + "learning_rate": 6.888846473921731e-06, + "loss": 0.5361, + "step": 5695 + }, + { + "epoch": 0.3955006249132065, + "grad_norm": 2.7723326193925906, + "learning_rate": 6.887805263633337e-06, + "loss": 0.2763, + "step": 5696 + }, + { + "epoch": 0.3955700597139286, + "grad_norm": 3.4297016123590174, + "learning_rate": 6.886763957861889e-06, + "loss": 0.4801, + "step": 5697 + }, + { + "epoch": 0.39563949451465075, + "grad_norm": 3.7451022007737786, + "learning_rate": 6.885722556660055e-06, + "loss": 0.4784, + "step": 5698 + }, + { + "epoch": 0.3957089293153729, + "grad_norm": 5.7728960260090405, + "learning_rate": 6.884681060080508e-06, + "loss": 0.5532, + "step": 5699 + }, + { + "epoch": 0.39577836411609496, + "grad_norm": 3.6806774963214837, + "learning_rate": 6.883639468175926e-06, + "loss": 0.2997, + "step": 5700 + }, + { + "epoch": 0.3958477989168171, + "grad_norm": 4.2502706128469505, + "learning_rate": 6.882597780998991e-06, + "loss": 0.5236, + "step": 5701 + }, + { + "epoch": 0.39591723371753923, + "grad_norm": 4.807145252930331, + "learning_rate": 6.8815559986023905e-06, + "loss": 0.6043, + "step": 5702 + }, + { + "epoch": 0.39598666851826136, + "grad_norm": 2.2817857365450567, + "learning_rate": 6.880514121038817e-06, + "loss": 0.2168, + "step": 5703 + }, + { + "epoch": 0.3960561033189835, + "grad_norm": 2.627355321915552, + "learning_rate": 6.8794721483609675e-06, + "loss": 0.1961, + "step": 5704 + }, + { + "epoch": 0.3961255381197056, + "grad_norm": 3.481442034571039, + "learning_rate": 6.878430080621542e-06, + "loss": 0.3637, + "step": 5705 + }, + { + "epoch": 0.3961949729204277, + "grad_norm": 4.109572521635142, + "learning_rate": 6.877387917873251e-06, + "loss": 0.545, + "step": 5706 + }, + { + "epoch": 0.39626440772114985, + "grad_norm": 4.529743450045566, + "learning_rate": 6.8763456601688016e-06, + "loss": 0.7499, + "step": 5707 + }, + { + "epoch": 0.396333842521872, + "grad_norm": 3.7313108160221815, + "learning_rate": 6.875303307560912e-06, + "loss": 0.4675, + "step": 5708 + }, + { + "epoch": 0.39640327732259406, + "grad_norm": 3.380360971431975, + "learning_rate": 6.874260860102304e-06, + "loss": 0.4114, + "step": 5709 + }, + { + "epoch": 0.3964727121233162, + "grad_norm": 4.414549822904587, + "learning_rate": 6.873218317845702e-06, + "loss": 0.5324, + "step": 5710 + }, + { + "epoch": 0.39654214692403833, + "grad_norm": 3.8894110901931835, + "learning_rate": 6.872175680843838e-06, + "loss": 0.7147, + "step": 5711 + }, + { + "epoch": 0.39661158172476046, + "grad_norm": 2.909511194718064, + "learning_rate": 6.871132949149446e-06, + "loss": 0.3169, + "step": 5712 + }, + { + "epoch": 0.3966810165254826, + "grad_norm": 3.804742124671051, + "learning_rate": 6.8700901228152676e-06, + "loss": 0.451, + "step": 5713 + }, + { + "epoch": 0.3967504513262047, + "grad_norm": 2.962890617307222, + "learning_rate": 6.869047201894044e-06, + "loss": 0.299, + "step": 5714 + }, + { + "epoch": 0.3968198861269268, + "grad_norm": 2.9025558713792647, + "learning_rate": 6.868004186438531e-06, + "loss": 0.3069, + "step": 5715 + }, + { + "epoch": 0.39688932092764895, + "grad_norm": 3.9509765110919703, + "learning_rate": 6.866961076501479e-06, + "loss": 0.4061, + "step": 5716 + }, + { + "epoch": 0.3969587557283711, + "grad_norm": 3.249056966087549, + "learning_rate": 6.8659178721356475e-06, + "loss": 0.5183, + "step": 5717 + }, + { + "epoch": 0.39702819052909316, + "grad_norm": 4.238189500010239, + "learning_rate": 6.864874573393803e-06, + "loss": 0.5372, + "step": 5718 + }, + { + "epoch": 0.3970976253298153, + "grad_norm": 3.5986005768163736, + "learning_rate": 6.863831180328713e-06, + "loss": 0.5113, + "step": 5719 + }, + { + "epoch": 0.39716706013053743, + "grad_norm": 4.118884875098035, + "learning_rate": 6.862787692993149e-06, + "loss": 0.6818, + "step": 5720 + }, + { + "epoch": 0.39723649493125956, + "grad_norm": 3.7038356343748293, + "learning_rate": 6.861744111439892e-06, + "loss": 0.4052, + "step": 5721 + }, + { + "epoch": 0.39730592973198164, + "grad_norm": 3.0483853210755716, + "learning_rate": 6.8607004357217265e-06, + "loss": 0.477, + "step": 5722 + }, + { + "epoch": 0.3973753645327038, + "grad_norm": 3.346963108688846, + "learning_rate": 6.8596566658914365e-06, + "loss": 0.3977, + "step": 5723 + }, + { + "epoch": 0.3974447993334259, + "grad_norm": 4.5038583792909055, + "learning_rate": 6.858612802001817e-06, + "loss": 0.6457, + "step": 5724 + }, + { + "epoch": 0.39751423413414805, + "grad_norm": 3.5704439308258538, + "learning_rate": 6.857568844105665e-06, + "loss": 0.3142, + "step": 5725 + }, + { + "epoch": 0.3975836689348702, + "grad_norm": 3.352613496352378, + "learning_rate": 6.856524792255783e-06, + "loss": 0.2664, + "step": 5726 + }, + { + "epoch": 0.39765310373559226, + "grad_norm": 4.935145846798327, + "learning_rate": 6.855480646504978e-06, + "loss": 0.8577, + "step": 5727 + }, + { + "epoch": 0.3977225385363144, + "grad_norm": 4.235744910050579, + "learning_rate": 6.854436406906062e-06, + "loss": 0.7081, + "step": 5728 + }, + { + "epoch": 0.39779197333703653, + "grad_norm": 3.088193450647446, + "learning_rate": 6.853392073511851e-06, + "loss": 0.2973, + "step": 5729 + }, + { + "epoch": 0.39786140813775867, + "grad_norm": 4.656690591011675, + "learning_rate": 6.852347646375165e-06, + "loss": 0.8398, + "step": 5730 + }, + { + "epoch": 0.39793084293848074, + "grad_norm": 4.014157080101815, + "learning_rate": 6.851303125548832e-06, + "loss": 0.3277, + "step": 5731 + }, + { + "epoch": 0.3980002777392029, + "grad_norm": 3.563924655667442, + "learning_rate": 6.850258511085681e-06, + "loss": 0.5612, + "step": 5732 + }, + { + "epoch": 0.398069712539925, + "grad_norm": 3.8358159251406607, + "learning_rate": 6.849213803038549e-06, + "loss": 0.5555, + "step": 5733 + }, + { + "epoch": 0.39813914734064715, + "grad_norm": 3.664411173842289, + "learning_rate": 6.848169001460276e-06, + "loss": 0.4186, + "step": 5734 + }, + { + "epoch": 0.3982085821413692, + "grad_norm": 3.3591401129858527, + "learning_rate": 6.847124106403705e-06, + "loss": 0.4952, + "step": 5735 + }, + { + "epoch": 0.39827801694209136, + "grad_norm": 4.627110099346733, + "learning_rate": 6.846079117921688e-06, + "loss": 0.5048, + "step": 5736 + }, + { + "epoch": 0.3983474517428135, + "grad_norm": 2.5849897856317483, + "learning_rate": 6.8450340360670764e-06, + "loss": 0.2822, + "step": 5737 + }, + { + "epoch": 0.39841688654353563, + "grad_norm": 2.4415686443865092, + "learning_rate": 6.843988860892733e-06, + "loss": 0.2027, + "step": 5738 + }, + { + "epoch": 0.39848632134425777, + "grad_norm": 4.293609472217636, + "learning_rate": 6.8429435924515184e-06, + "loss": 0.7884, + "step": 5739 + }, + { + "epoch": 0.39855575614497984, + "grad_norm": 3.840276490537884, + "learning_rate": 6.841898230796302e-06, + "loss": 0.3357, + "step": 5740 + }, + { + "epoch": 0.398625190945702, + "grad_norm": 6.584551300194193, + "learning_rate": 6.840852775979958e-06, + "loss": 0.5519, + "step": 5741 + }, + { + "epoch": 0.3986946257464241, + "grad_norm": 4.155648557139466, + "learning_rate": 6.839807228055364e-06, + "loss": 0.6205, + "step": 5742 + }, + { + "epoch": 0.39876406054714625, + "grad_norm": 3.9743670204833865, + "learning_rate": 6.838761587075402e-06, + "loss": 0.5917, + "step": 5743 + }, + { + "epoch": 0.3988334953478683, + "grad_norm": 3.2416449999328663, + "learning_rate": 6.837715853092959e-06, + "loss": 0.519, + "step": 5744 + }, + { + "epoch": 0.39890293014859046, + "grad_norm": 3.556474667587179, + "learning_rate": 6.83667002616093e-06, + "loss": 0.4837, + "step": 5745 + }, + { + "epoch": 0.3989723649493126, + "grad_norm": 4.604360051831115, + "learning_rate": 6.835624106332206e-06, + "loss": 0.5177, + "step": 5746 + }, + { + "epoch": 0.39904179975003473, + "grad_norm": 2.7360673463044645, + "learning_rate": 6.834578093659694e-06, + "loss": 0.334, + "step": 5747 + }, + { + "epoch": 0.39911123455075687, + "grad_norm": 4.14502600402918, + "learning_rate": 6.833531988196298e-06, + "loss": 0.634, + "step": 5748 + }, + { + "epoch": 0.39918066935147895, + "grad_norm": 3.5076968645883717, + "learning_rate": 6.8324857899949285e-06, + "loss": 0.4232, + "step": 5749 + }, + { + "epoch": 0.3992501041522011, + "grad_norm": 4.138182094059944, + "learning_rate": 6.831439499108501e-06, + "loss": 0.3635, + "step": 5750 + }, + { + "epoch": 0.3993195389529232, + "grad_norm": 3.012203454688716, + "learning_rate": 6.830393115589936e-06, + "loss": 0.3278, + "step": 5751 + }, + { + "epoch": 0.39938897375364535, + "grad_norm": 3.756862358094155, + "learning_rate": 6.8293466394921595e-06, + "loss": 0.3345, + "step": 5752 + }, + { + "epoch": 0.39945840855436743, + "grad_norm": 3.407170541963846, + "learning_rate": 6.828300070868098e-06, + "loss": 0.407, + "step": 5753 + }, + { + "epoch": 0.39952784335508956, + "grad_norm": 11.645297484858022, + "learning_rate": 6.827253409770689e-06, + "loss": 0.2956, + "step": 5754 + }, + { + "epoch": 0.3995972781558117, + "grad_norm": 3.4056288681042672, + "learning_rate": 6.82620665625287e-06, + "loss": 0.3145, + "step": 5755 + }, + { + "epoch": 0.39966671295653383, + "grad_norm": 3.5116101156797828, + "learning_rate": 6.825159810367585e-06, + "loss": 0.5053, + "step": 5756 + }, + { + "epoch": 0.3997361477572559, + "grad_norm": 4.5644216140444565, + "learning_rate": 6.8241128721677825e-06, + "loss": 0.4335, + "step": 5757 + }, + { + "epoch": 0.39980558255797805, + "grad_norm": 4.104930749672699, + "learning_rate": 6.823065841706415e-06, + "loss": 0.6777, + "step": 5758 + }, + { + "epoch": 0.3998750173587002, + "grad_norm": 2.7627837793095114, + "learning_rate": 6.822018719036439e-06, + "loss": 0.2426, + "step": 5759 + }, + { + "epoch": 0.3999444521594223, + "grad_norm": 3.805381598568102, + "learning_rate": 6.820971504210818e-06, + "loss": 0.4598, + "step": 5760 + }, + { + "epoch": 0.40001388696014445, + "grad_norm": 4.175901637400691, + "learning_rate": 6.819924197282521e-06, + "loss": 0.4677, + "step": 5761 + }, + { + "epoch": 0.40008332176086653, + "grad_norm": 4.489454257235447, + "learning_rate": 6.818876798304516e-06, + "loss": 0.5574, + "step": 5762 + }, + { + "epoch": 0.40015275656158866, + "grad_norm": 3.858224049626737, + "learning_rate": 6.817829307329781e-06, + "loss": 0.482, + "step": 5763 + }, + { + "epoch": 0.4002221913623108, + "grad_norm": 4.165831120904523, + "learning_rate": 6.816781724411296e-06, + "loss": 0.5596, + "step": 5764 + }, + { + "epoch": 0.40029162616303293, + "grad_norm": 4.035193447051069, + "learning_rate": 6.815734049602048e-06, + "loss": 0.6447, + "step": 5765 + }, + { + "epoch": 0.400361060963755, + "grad_norm": 3.2983159184469812, + "learning_rate": 6.814686282955026e-06, + "loss": 0.3218, + "step": 5766 + }, + { + "epoch": 0.40043049576447715, + "grad_norm": 2.23306171781236, + "learning_rate": 6.813638424523226e-06, + "loss": 0.1493, + "step": 5767 + }, + { + "epoch": 0.4004999305651993, + "grad_norm": 3.65837406842894, + "learning_rate": 6.812590474359646e-06, + "loss": 0.3744, + "step": 5768 + }, + { + "epoch": 0.4005693653659214, + "grad_norm": 2.6245185124045602, + "learning_rate": 6.81154243251729e-06, + "loss": 0.2114, + "step": 5769 + }, + { + "epoch": 0.40063880016664355, + "grad_norm": 3.177351778945432, + "learning_rate": 6.8104942990491694e-06, + "loss": 0.4593, + "step": 5770 + }, + { + "epoch": 0.40070823496736563, + "grad_norm": 4.050022592112915, + "learning_rate": 6.809446074008295e-06, + "loss": 0.4178, + "step": 5771 + }, + { + "epoch": 0.40077766976808776, + "grad_norm": 2.617423169864227, + "learning_rate": 6.808397757447687e-06, + "loss": 0.3036, + "step": 5772 + }, + { + "epoch": 0.4008471045688099, + "grad_norm": 3.095521385865647, + "learning_rate": 6.807349349420363e-06, + "loss": 0.3924, + "step": 5773 + }, + { + "epoch": 0.40091653936953203, + "grad_norm": 3.7010854328070404, + "learning_rate": 6.806300849979359e-06, + "loss": 0.2906, + "step": 5774 + }, + { + "epoch": 0.4009859741702541, + "grad_norm": 4.00599377103177, + "learning_rate": 6.8052522591776995e-06, + "loss": 0.4504, + "step": 5775 + }, + { + "epoch": 0.40105540897097625, + "grad_norm": 4.104953870184914, + "learning_rate": 6.804203577068422e-06, + "loss": 0.5452, + "step": 5776 + }, + { + "epoch": 0.4011248437716984, + "grad_norm": 3.7746641240075545, + "learning_rate": 6.803154803704572e-06, + "loss": 0.4431, + "step": 5777 + }, + { + "epoch": 0.4011942785724205, + "grad_norm": 4.219222140235012, + "learning_rate": 6.802105939139192e-06, + "loss": 0.4746, + "step": 5778 + }, + { + "epoch": 0.4012637133731426, + "grad_norm": 4.294727074809847, + "learning_rate": 6.801056983425331e-06, + "loss": 0.4823, + "step": 5779 + }, + { + "epoch": 0.40133314817386473, + "grad_norm": 3.416377662392865, + "learning_rate": 6.800007936616048e-06, + "loss": 0.3535, + "step": 5780 + }, + { + "epoch": 0.40140258297458686, + "grad_norm": 3.1025720405610833, + "learning_rate": 6.798958798764399e-06, + "loss": 0.3409, + "step": 5781 + }, + { + "epoch": 0.401472017775309, + "grad_norm": 3.386928657199894, + "learning_rate": 6.797909569923449e-06, + "loss": 0.4774, + "step": 5782 + }, + { + "epoch": 0.40154145257603113, + "grad_norm": 3.7299661140802507, + "learning_rate": 6.7968602501462696e-06, + "loss": 0.2652, + "step": 5783 + }, + { + "epoch": 0.4016108873767532, + "grad_norm": 3.43756468272369, + "learning_rate": 6.795810839485931e-06, + "loss": 0.4033, + "step": 5784 + }, + { + "epoch": 0.40168032217747535, + "grad_norm": 4.07862473644909, + "learning_rate": 6.7947613379955114e-06, + "loss": 0.4492, + "step": 5785 + }, + { + "epoch": 0.4017497569781975, + "grad_norm": 3.0305183260104065, + "learning_rate": 6.7937117457280955e-06, + "loss": 0.3218, + "step": 5786 + }, + { + "epoch": 0.4018191917789196, + "grad_norm": 3.7100359060060866, + "learning_rate": 6.792662062736768e-06, + "loss": 0.4045, + "step": 5787 + }, + { + "epoch": 0.4018886265796417, + "grad_norm": 3.089562047783084, + "learning_rate": 6.791612289074623e-06, + "loss": 0.2837, + "step": 5788 + }, + { + "epoch": 0.40195806138036383, + "grad_norm": 2.5363350142854926, + "learning_rate": 6.790562424794754e-06, + "loss": 0.1855, + "step": 5789 + }, + { + "epoch": 0.40202749618108596, + "grad_norm": 4.793365727999766, + "learning_rate": 6.789512469950265e-06, + "loss": 0.5399, + "step": 5790 + }, + { + "epoch": 0.4020969309818081, + "grad_norm": 3.205774524442511, + "learning_rate": 6.788462424594262e-06, + "loss": 0.3727, + "step": 5791 + }, + { + "epoch": 0.4021663657825302, + "grad_norm": 4.38948906733427, + "learning_rate": 6.78741228877985e-06, + "loss": 0.545, + "step": 5792 + }, + { + "epoch": 0.4022358005832523, + "grad_norm": 3.4987646306342093, + "learning_rate": 6.78636206256015e-06, + "loss": 0.3759, + "step": 5793 + }, + { + "epoch": 0.40230523538397445, + "grad_norm": 4.258628120400825, + "learning_rate": 6.785311745988276e-06, + "loss": 0.5523, + "step": 5794 + }, + { + "epoch": 0.4023746701846966, + "grad_norm": 5.674581198108077, + "learning_rate": 6.784261339117355e-06, + "loss": 0.5127, + "step": 5795 + }, + { + "epoch": 0.4024441049854187, + "grad_norm": 4.434488510400074, + "learning_rate": 6.783210842000515e-06, + "loss": 0.6336, + "step": 5796 + }, + { + "epoch": 0.4025135397861408, + "grad_norm": 5.663311344543477, + "learning_rate": 6.7821602546908884e-06, + "loss": 0.7357, + "step": 5797 + }, + { + "epoch": 0.40258297458686293, + "grad_norm": 3.287741894596281, + "learning_rate": 6.7811095772416116e-06, + "loss": 0.3827, + "step": 5798 + }, + { + "epoch": 0.40265240938758506, + "grad_norm": 4.051059167590617, + "learning_rate": 6.780058809705827e-06, + "loss": 0.5029, + "step": 5799 + }, + { + "epoch": 0.4027218441883072, + "grad_norm": 4.724989369449416, + "learning_rate": 6.7790079521366856e-06, + "loss": 0.5271, + "step": 5800 + }, + { + "epoch": 0.4027912789890293, + "grad_norm": 3.4425312288034995, + "learning_rate": 6.777957004587332e-06, + "loss": 0.4991, + "step": 5801 + }, + { + "epoch": 0.4028607137897514, + "grad_norm": 4.834440885483448, + "learning_rate": 6.776905967110927e-06, + "loss": 0.4887, + "step": 5802 + }, + { + "epoch": 0.40293014859047355, + "grad_norm": 3.7880383589498443, + "learning_rate": 6.775854839760627e-06, + "loss": 0.4994, + "step": 5803 + }, + { + "epoch": 0.4029995833911957, + "grad_norm": 3.961305044181077, + "learning_rate": 6.774803622589601e-06, + "loss": 0.489, + "step": 5804 + }, + { + "epoch": 0.4030690181919178, + "grad_norm": 4.279406877610342, + "learning_rate": 6.773752315651014e-06, + "loss": 0.6022, + "step": 5805 + }, + { + "epoch": 0.4031384529926399, + "grad_norm": 4.321755319561774, + "learning_rate": 6.772700918998043e-06, + "loss": 0.6169, + "step": 5806 + }, + { + "epoch": 0.40320788779336203, + "grad_norm": 3.286839295697232, + "learning_rate": 6.771649432683866e-06, + "loss": 0.3646, + "step": 5807 + }, + { + "epoch": 0.40327732259408416, + "grad_norm": 4.034684149937206, + "learning_rate": 6.770597856761664e-06, + "loss": 0.347, + "step": 5808 + }, + { + "epoch": 0.4033467573948063, + "grad_norm": 3.169383136423241, + "learning_rate": 6.7695461912846265e-06, + "loss": 0.2739, + "step": 5809 + }, + { + "epoch": 0.4034161921955284, + "grad_norm": 4.05296309115515, + "learning_rate": 6.768494436305946e-06, + "loss": 0.4014, + "step": 5810 + }, + { + "epoch": 0.4034856269962505, + "grad_norm": 4.280778602000879, + "learning_rate": 6.767442591878817e-06, + "loss": 0.4736, + "step": 5811 + }, + { + "epoch": 0.40355506179697265, + "grad_norm": 2.8342990434506956, + "learning_rate": 6.766390658056442e-06, + "loss": 0.2555, + "step": 5812 + }, + { + "epoch": 0.4036244965976948, + "grad_norm": 3.655331189146411, + "learning_rate": 6.765338634892027e-06, + "loss": 0.3159, + "step": 5813 + }, + { + "epoch": 0.40369393139841686, + "grad_norm": 2.689161924230765, + "learning_rate": 6.764286522438779e-06, + "loss": 0.3332, + "step": 5814 + }, + { + "epoch": 0.403763366199139, + "grad_norm": 3.8326487770047533, + "learning_rate": 6.763234320749917e-06, + "loss": 0.3385, + "step": 5815 + }, + { + "epoch": 0.40383280099986113, + "grad_norm": 3.0041302486605157, + "learning_rate": 6.76218202987866e-06, + "loss": 0.3453, + "step": 5816 + }, + { + "epoch": 0.40390223580058326, + "grad_norm": 3.309314467306892, + "learning_rate": 6.7611296498782264e-06, + "loss": 0.3419, + "step": 5817 + }, + { + "epoch": 0.4039716706013054, + "grad_norm": 3.6594349458454065, + "learning_rate": 6.76007718080185e-06, + "loss": 0.3718, + "step": 5818 + }, + { + "epoch": 0.4040411054020275, + "grad_norm": 4.845944680218625, + "learning_rate": 6.759024622702761e-06, + "loss": 0.5115, + "step": 5819 + }, + { + "epoch": 0.4041105402027496, + "grad_norm": 4.622515238858883, + "learning_rate": 6.757971975634199e-06, + "loss": 0.6267, + "step": 5820 + }, + { + "epoch": 0.40417997500347175, + "grad_norm": 5.081849847481002, + "learning_rate": 6.7569192396493995e-06, + "loss": 0.5335, + "step": 5821 + }, + { + "epoch": 0.4042494098041939, + "grad_norm": 3.385219280763738, + "learning_rate": 6.755866414801617e-06, + "loss": 0.3789, + "step": 5822 + }, + { + "epoch": 0.40431884460491596, + "grad_norm": 4.214311919192354, + "learning_rate": 6.754813501144096e-06, + "loss": 0.6231, + "step": 5823 + }, + { + "epoch": 0.4043882794056381, + "grad_norm": 3.484737866613268, + "learning_rate": 6.753760498730095e-06, + "loss": 0.2889, + "step": 5824 + }, + { + "epoch": 0.40445771420636023, + "grad_norm": 4.11068589343891, + "learning_rate": 6.7527074076128716e-06, + "loss": 0.6504, + "step": 5825 + }, + { + "epoch": 0.40452714900708237, + "grad_norm": 6.988661685370977, + "learning_rate": 6.751654227845692e-06, + "loss": 0.9208, + "step": 5826 + }, + { + "epoch": 0.40459658380780444, + "grad_norm": 3.671144525136897, + "learning_rate": 6.750600959481822e-06, + "loss": 0.4628, + "step": 5827 + }, + { + "epoch": 0.4046660186085266, + "grad_norm": 3.259757705149907, + "learning_rate": 6.749547602574537e-06, + "loss": 0.339, + "step": 5828 + }, + { + "epoch": 0.4047354534092487, + "grad_norm": 4.357809595916123, + "learning_rate": 6.748494157177114e-06, + "loss": 0.584, + "step": 5829 + }, + { + "epoch": 0.40480488820997085, + "grad_norm": 4.380261056317368, + "learning_rate": 6.747440623342835e-06, + "loss": 0.5702, + "step": 5830 + }, + { + "epoch": 0.404874323010693, + "grad_norm": 4.215734879340497, + "learning_rate": 6.746387001124987e-06, + "loss": 0.6637, + "step": 5831 + }, + { + "epoch": 0.40494375781141506, + "grad_norm": 4.567374733643246, + "learning_rate": 6.745333290576863e-06, + "loss": 0.4943, + "step": 5832 + }, + { + "epoch": 0.4050131926121372, + "grad_norm": 2.897764971602156, + "learning_rate": 6.744279491751753e-06, + "loss": 0.3262, + "step": 5833 + }, + { + "epoch": 0.40508262741285933, + "grad_norm": 3.411687608336122, + "learning_rate": 6.743225604702961e-06, + "loss": 0.4164, + "step": 5834 + }, + { + "epoch": 0.40515206221358147, + "grad_norm": 4.958622337848191, + "learning_rate": 6.74217162948379e-06, + "loss": 0.6992, + "step": 5835 + }, + { + "epoch": 0.40522149701430354, + "grad_norm": 4.084297381718783, + "learning_rate": 6.741117566147551e-06, + "loss": 0.6105, + "step": 5836 + }, + { + "epoch": 0.4052909318150257, + "grad_norm": 4.605534157165234, + "learning_rate": 6.740063414747553e-06, + "loss": 0.5499, + "step": 5837 + }, + { + "epoch": 0.4053603666157478, + "grad_norm": 3.2796469338575327, + "learning_rate": 6.739009175337119e-06, + "loss": 0.2975, + "step": 5838 + }, + { + "epoch": 0.40542980141646995, + "grad_norm": 4.318903704469736, + "learning_rate": 6.7379548479695655e-06, + "loss": 0.5882, + "step": 5839 + }, + { + "epoch": 0.4054992362171921, + "grad_norm": 2.6683786096547664, + "learning_rate": 6.736900432698226e-06, + "loss": 0.244, + "step": 5840 + }, + { + "epoch": 0.40556867101791416, + "grad_norm": 2.8937504455784286, + "learning_rate": 6.735845929576425e-06, + "loss": 0.2444, + "step": 5841 + }, + { + "epoch": 0.4056381058186363, + "grad_norm": 4.6382827542910725, + "learning_rate": 6.734791338657505e-06, + "loss": 0.3108, + "step": 5842 + }, + { + "epoch": 0.40570754061935843, + "grad_norm": 3.43594571415395, + "learning_rate": 6.733736659994799e-06, + "loss": 0.2983, + "step": 5843 + }, + { + "epoch": 0.40577697542008057, + "grad_norm": 3.05742724271604, + "learning_rate": 6.732681893641655e-06, + "loss": 0.2937, + "step": 5844 + }, + { + "epoch": 0.40584641022080264, + "grad_norm": 2.7629751818574095, + "learning_rate": 6.731627039651423e-06, + "loss": 0.2259, + "step": 5845 + }, + { + "epoch": 0.4059158450215248, + "grad_norm": 5.038036689219401, + "learning_rate": 6.7305720980774525e-06, + "loss": 0.2739, + "step": 5846 + }, + { + "epoch": 0.4059852798222469, + "grad_norm": 3.788951390224794, + "learning_rate": 6.729517068973106e-06, + "loss": 0.4617, + "step": 5847 + }, + { + "epoch": 0.40605471462296905, + "grad_norm": 3.525201091172801, + "learning_rate": 6.728461952391742e-06, + "loss": 0.394, + "step": 5848 + }, + { + "epoch": 0.4061241494236911, + "grad_norm": 3.8455628464808966, + "learning_rate": 6.727406748386728e-06, + "loss": 0.5284, + "step": 5849 + }, + { + "epoch": 0.40619358422441326, + "grad_norm": 4.295510608276293, + "learning_rate": 6.726351457011437e-06, + "loss": 0.6841, + "step": 5850 + }, + { + "epoch": 0.4062630190251354, + "grad_norm": 4.647480687292294, + "learning_rate": 6.725296078319241e-06, + "loss": 0.5685, + "step": 5851 + }, + { + "epoch": 0.40633245382585753, + "grad_norm": 3.9092716055630294, + "learning_rate": 6.724240612363521e-06, + "loss": 0.4853, + "step": 5852 + }, + { + "epoch": 0.40640188862657967, + "grad_norm": 2.997315850629966, + "learning_rate": 6.723185059197663e-06, + "loss": 0.3067, + "step": 5853 + }, + { + "epoch": 0.40647132342730175, + "grad_norm": 3.0311459090407276, + "learning_rate": 6.7221294188750555e-06, + "loss": 0.32, + "step": 5854 + }, + { + "epoch": 0.4065407582280239, + "grad_norm": 4.281366380606873, + "learning_rate": 6.72107369144909e-06, + "loss": 0.4652, + "step": 5855 + }, + { + "epoch": 0.406610193028746, + "grad_norm": 5.490409653600115, + "learning_rate": 6.720017876973165e-06, + "loss": 0.2866, + "step": 5856 + }, + { + "epoch": 0.40667962782946815, + "grad_norm": 3.3538252204615118, + "learning_rate": 6.718961975500682e-06, + "loss": 0.2365, + "step": 5857 + }, + { + "epoch": 0.40674906263019023, + "grad_norm": 4.367921246647332, + "learning_rate": 6.7179059870850475e-06, + "loss": 0.6269, + "step": 5858 + }, + { + "epoch": 0.40681849743091236, + "grad_norm": 4.617943948107425, + "learning_rate": 6.716849911779672e-06, + "loss": 0.7698, + "step": 5859 + }, + { + "epoch": 0.4068879322316345, + "grad_norm": 4.250952030354651, + "learning_rate": 6.71579374963797e-06, + "loss": 0.5047, + "step": 5860 + }, + { + "epoch": 0.40695736703235663, + "grad_norm": 4.180005023947055, + "learning_rate": 6.714737500713363e-06, + "loss": 0.6449, + "step": 5861 + }, + { + "epoch": 0.40702680183307877, + "grad_norm": 2.078575843162596, + "learning_rate": 6.713681165059271e-06, + "loss": 0.1761, + "step": 5862 + }, + { + "epoch": 0.40709623663380085, + "grad_norm": 4.0723398700108095, + "learning_rate": 6.712624742729128e-06, + "loss": 0.5921, + "step": 5863 + }, + { + "epoch": 0.407165671434523, + "grad_norm": 2.9012724208563734, + "learning_rate": 6.711568233776362e-06, + "loss": 0.3326, + "step": 5864 + }, + { + "epoch": 0.4072351062352451, + "grad_norm": 5.925823425340588, + "learning_rate": 6.710511638254412e-06, + "loss": 0.7255, + "step": 5865 + }, + { + "epoch": 0.40730454103596725, + "grad_norm": 3.9631755882419233, + "learning_rate": 6.709454956216719e-06, + "loss": 0.5727, + "step": 5866 + }, + { + "epoch": 0.40737397583668933, + "grad_norm": 3.900004258123668, + "learning_rate": 6.708398187716728e-06, + "loss": 0.4965, + "step": 5867 + }, + { + "epoch": 0.40744341063741146, + "grad_norm": 4.313163358290437, + "learning_rate": 6.7073413328078904e-06, + "loss": 0.4028, + "step": 5868 + }, + { + "epoch": 0.4075128454381336, + "grad_norm": 3.1658095815284657, + "learning_rate": 6.706284391543659e-06, + "loss": 0.2757, + "step": 5869 + }, + { + "epoch": 0.40758228023885573, + "grad_norm": 4.438429410335899, + "learning_rate": 6.705227363977495e-06, + "loss": 0.54, + "step": 5870 + }, + { + "epoch": 0.4076517150395778, + "grad_norm": 4.573335138664659, + "learning_rate": 6.70417025016286e-06, + "loss": 0.5805, + "step": 5871 + }, + { + "epoch": 0.40772114984029995, + "grad_norm": 6.075891757690346, + "learning_rate": 6.703113050153222e-06, + "loss": 0.4729, + "step": 5872 + }, + { + "epoch": 0.4077905846410221, + "grad_norm": 7.154696697144242, + "learning_rate": 6.702055764002055e-06, + "loss": 0.5836, + "step": 5873 + }, + { + "epoch": 0.4078600194417442, + "grad_norm": 3.62723589599387, + "learning_rate": 6.700998391762832e-06, + "loss": 0.3056, + "step": 5874 + }, + { + "epoch": 0.40792945424246635, + "grad_norm": 4.995113186134501, + "learning_rate": 6.6999409334890355e-06, + "loss": 0.7423, + "step": 5875 + }, + { + "epoch": 0.40799888904318843, + "grad_norm": 3.192289291959845, + "learning_rate": 6.698883389234149e-06, + "loss": 0.3186, + "step": 5876 + }, + { + "epoch": 0.40806832384391056, + "grad_norm": 4.027991287662262, + "learning_rate": 6.697825759051665e-06, + "loss": 0.5152, + "step": 5877 + }, + { + "epoch": 0.4081377586446327, + "grad_norm": 3.8166397144513824, + "learning_rate": 6.696768042995073e-06, + "loss": 0.435, + "step": 5878 + }, + { + "epoch": 0.40820719344535483, + "grad_norm": 3.8838489575156774, + "learning_rate": 6.695710241117874e-06, + "loss": 0.4021, + "step": 5879 + }, + { + "epoch": 0.4082766282460769, + "grad_norm": 3.921780602818124, + "learning_rate": 6.694652353473572e-06, + "loss": 0.4075, + "step": 5880 + }, + { + "epoch": 0.40834606304679905, + "grad_norm": 5.618549817364158, + "learning_rate": 6.69359438011567e-06, + "loss": 0.6882, + "step": 5881 + }, + { + "epoch": 0.4084154978475212, + "grad_norm": 4.279394435667104, + "learning_rate": 6.6925363210976805e-06, + "loss": 0.715, + "step": 5882 + }, + { + "epoch": 0.4084849326482433, + "grad_norm": 3.352895743852748, + "learning_rate": 6.691478176473119e-06, + "loss": 0.2688, + "step": 5883 + }, + { + "epoch": 0.4085543674489654, + "grad_norm": 4.787252541492366, + "learning_rate": 6.6904199462955066e-06, + "loss": 0.4539, + "step": 5884 + }, + { + "epoch": 0.40862380224968753, + "grad_norm": 4.2170197641690415, + "learning_rate": 6.689361630618365e-06, + "loss": 0.5739, + "step": 5885 + }, + { + "epoch": 0.40869323705040966, + "grad_norm": 3.0514195585222166, + "learning_rate": 6.6883032294952235e-06, + "loss": 0.3535, + "step": 5886 + }, + { + "epoch": 0.4087626718511318, + "grad_norm": 3.174092047219007, + "learning_rate": 6.6872447429796165e-06, + "loss": 0.3313, + "step": 5887 + }, + { + "epoch": 0.40883210665185393, + "grad_norm": 3.9586476320541824, + "learning_rate": 6.686186171125078e-06, + "loss": 0.2986, + "step": 5888 + }, + { + "epoch": 0.408901541452576, + "grad_norm": 4.707054949895881, + "learning_rate": 6.685127513985153e-06, + "loss": 0.8038, + "step": 5889 + }, + { + "epoch": 0.40897097625329815, + "grad_norm": 5.222779371017647, + "learning_rate": 6.6840687716133836e-06, + "loss": 0.4819, + "step": 5890 + }, + { + "epoch": 0.4090404110540203, + "grad_norm": 5.914431568774466, + "learning_rate": 6.683009944063322e-06, + "loss": 0.975, + "step": 5891 + }, + { + "epoch": 0.4091098458547424, + "grad_norm": 3.6476355790144117, + "learning_rate": 6.681951031388522e-06, + "loss": 0.4605, + "step": 5892 + }, + { + "epoch": 0.4091792806554645, + "grad_norm": 4.1983743182352296, + "learning_rate": 6.680892033642542e-06, + "loss": 0.4921, + "step": 5893 + }, + { + "epoch": 0.40924871545618663, + "grad_norm": 3.763099160781569, + "learning_rate": 6.6798329508789435e-06, + "loss": 0.5579, + "step": 5894 + }, + { + "epoch": 0.40931815025690876, + "grad_norm": 3.8992747205643394, + "learning_rate": 6.678773783151298e-06, + "loss": 0.266, + "step": 5895 + }, + { + "epoch": 0.4093875850576309, + "grad_norm": 4.806555818325423, + "learning_rate": 6.677714530513172e-06, + "loss": 0.739, + "step": 5896 + }, + { + "epoch": 0.40945701985835303, + "grad_norm": 4.052946994876943, + "learning_rate": 6.676655193018145e-06, + "loss": 0.5116, + "step": 5897 + }, + { + "epoch": 0.4095264546590751, + "grad_norm": 4.395198902434428, + "learning_rate": 6.675595770719794e-06, + "loss": 0.5286, + "step": 5898 + }, + { + "epoch": 0.40959588945979725, + "grad_norm": 4.162047009414529, + "learning_rate": 6.674536263671705e-06, + "loss": 0.7285, + "step": 5899 + }, + { + "epoch": 0.4096653242605194, + "grad_norm": 3.237450897307577, + "learning_rate": 6.673476671927469e-06, + "loss": 0.3734, + "step": 5900 + }, + { + "epoch": 0.4097347590612415, + "grad_norm": 3.155129636765505, + "learning_rate": 6.672416995540674e-06, + "loss": 0.3517, + "step": 5901 + }, + { + "epoch": 0.4098041938619636, + "grad_norm": 4.660231469046866, + "learning_rate": 6.671357234564922e-06, + "loss": 0.6399, + "step": 5902 + }, + { + "epoch": 0.40987362866268573, + "grad_norm": 3.076170155508563, + "learning_rate": 6.67029738905381e-06, + "loss": 0.4378, + "step": 5903 + }, + { + "epoch": 0.40994306346340786, + "grad_norm": 4.208727675373513, + "learning_rate": 6.669237459060949e-06, + "loss": 0.5492, + "step": 5904 + }, + { + "epoch": 0.41001249826413, + "grad_norm": 3.803122969220418, + "learning_rate": 6.668177444639943e-06, + "loss": 0.5074, + "step": 5905 + }, + { + "epoch": 0.4100819330648521, + "grad_norm": 4.055629583275958, + "learning_rate": 6.667117345844412e-06, + "loss": 0.5432, + "step": 5906 + }, + { + "epoch": 0.4101513678655742, + "grad_norm": 2.9202481356540666, + "learning_rate": 6.666057162727971e-06, + "loss": 0.3016, + "step": 5907 + }, + { + "epoch": 0.41022080266629635, + "grad_norm": 3.759433899415458, + "learning_rate": 6.664996895344244e-06, + "loss": 0.4002, + "step": 5908 + }, + { + "epoch": 0.4102902374670185, + "grad_norm": 4.450768686666573, + "learning_rate": 6.663936543746859e-06, + "loss": 0.4668, + "step": 5909 + }, + { + "epoch": 0.4103596722677406, + "grad_norm": 3.231435287481911, + "learning_rate": 6.6628761079894455e-06, + "loss": 0.4188, + "step": 5910 + }, + { + "epoch": 0.4104291070684627, + "grad_norm": 3.679610150834794, + "learning_rate": 6.66181558812564e-06, + "loss": 0.4736, + "step": 5911 + }, + { + "epoch": 0.41049854186918483, + "grad_norm": 1.9954281910558032, + "learning_rate": 6.660754984209083e-06, + "loss": 0.1997, + "step": 5912 + }, + { + "epoch": 0.41056797666990696, + "grad_norm": 5.289002001172379, + "learning_rate": 6.659694296293419e-06, + "loss": 0.7583, + "step": 5913 + }, + { + "epoch": 0.4106374114706291, + "grad_norm": 4.273182986741465, + "learning_rate": 6.658633524432293e-06, + "loss": 0.5964, + "step": 5914 + }, + { + "epoch": 0.4107068462713512, + "grad_norm": 3.8681286637149257, + "learning_rate": 6.657572668679362e-06, + "loss": 0.6502, + "step": 5915 + }, + { + "epoch": 0.4107762810720733, + "grad_norm": 3.4056447014614992, + "learning_rate": 6.656511729088283e-06, + "loss": 0.3841, + "step": 5916 + }, + { + "epoch": 0.41084571587279545, + "grad_norm": 3.2910703824932046, + "learning_rate": 6.655450705712712e-06, + "loss": 0.2944, + "step": 5917 + }, + { + "epoch": 0.4109151506735176, + "grad_norm": 5.194844303241585, + "learning_rate": 6.654389598606319e-06, + "loss": 0.5354, + "step": 5918 + }, + { + "epoch": 0.4109845854742397, + "grad_norm": 4.982133942194317, + "learning_rate": 6.653328407822772e-06, + "loss": 0.5852, + "step": 5919 + }, + { + "epoch": 0.4110540202749618, + "grad_norm": 4.331500222866751, + "learning_rate": 6.652267133415745e-06, + "loss": 0.4463, + "step": 5920 + }, + { + "epoch": 0.41112345507568393, + "grad_norm": 3.5967563353571497, + "learning_rate": 6.651205775438915e-06, + "loss": 0.5262, + "step": 5921 + }, + { + "epoch": 0.41119288987640606, + "grad_norm": 3.518169563269043, + "learning_rate": 6.650144333945967e-06, + "loss": 0.4601, + "step": 5922 + }, + { + "epoch": 0.4112623246771282, + "grad_norm": 4.184806884149068, + "learning_rate": 6.6490828089905854e-06, + "loss": 0.3972, + "step": 5923 + }, + { + "epoch": 0.4113317594778503, + "grad_norm": 4.829607267750261, + "learning_rate": 6.648021200626461e-06, + "loss": 0.5794, + "step": 5924 + }, + { + "epoch": 0.4114011942785724, + "grad_norm": 3.8021323096553172, + "learning_rate": 6.646959508907289e-06, + "loss": 0.381, + "step": 5925 + }, + { + "epoch": 0.41147062907929455, + "grad_norm": 3.2063745931897927, + "learning_rate": 6.645897733886769e-06, + "loss": 0.3945, + "step": 5926 + }, + { + "epoch": 0.4115400638800167, + "grad_norm": 4.092857132261174, + "learning_rate": 6.644835875618605e-06, + "loss": 0.3987, + "step": 5927 + }, + { + "epoch": 0.41160949868073876, + "grad_norm": 3.7730263416638468, + "learning_rate": 6.643773934156502e-06, + "loss": 0.3898, + "step": 5928 + }, + { + "epoch": 0.4116789334814609, + "grad_norm": 3.9907095817209117, + "learning_rate": 6.6427119095541745e-06, + "loss": 0.5007, + "step": 5929 + }, + { + "epoch": 0.41174836828218303, + "grad_norm": 4.058541369300089, + "learning_rate": 6.641649801865336e-06, + "loss": 0.5124, + "step": 5930 + }, + { + "epoch": 0.41181780308290517, + "grad_norm": 3.6012011650320703, + "learning_rate": 6.64058761114371e-06, + "loss": 0.5843, + "step": 5931 + }, + { + "epoch": 0.4118872378836273, + "grad_norm": 3.5267397701417655, + "learning_rate": 6.63952533744302e-06, + "loss": 0.3361, + "step": 5932 + }, + { + "epoch": 0.4119566726843494, + "grad_norm": 3.2973009106072024, + "learning_rate": 6.638462980816991e-06, + "loss": 0.451, + "step": 5933 + }, + { + "epoch": 0.4120261074850715, + "grad_norm": 3.6014745606413405, + "learning_rate": 6.63740054131936e-06, + "loss": 0.4682, + "step": 5934 + }, + { + "epoch": 0.41209554228579365, + "grad_norm": 5.1985078424696365, + "learning_rate": 6.636338019003862e-06, + "loss": 0.6571, + "step": 5935 + }, + { + "epoch": 0.4121649770865158, + "grad_norm": 3.5711542056633547, + "learning_rate": 6.635275413924238e-06, + "loss": 0.2541, + "step": 5936 + }, + { + "epoch": 0.41223441188723786, + "grad_norm": 3.7969106283580007, + "learning_rate": 6.634212726134235e-06, + "loss": 0.5522, + "step": 5937 + }, + { + "epoch": 0.41230384668796, + "grad_norm": 3.7764966457648703, + "learning_rate": 6.633149955687602e-06, + "loss": 0.3602, + "step": 5938 + }, + { + "epoch": 0.41237328148868213, + "grad_norm": 3.5961277066134283, + "learning_rate": 6.632087102638091e-06, + "loss": 0.4246, + "step": 5939 + }, + { + "epoch": 0.41244271628940427, + "grad_norm": 4.283934639118943, + "learning_rate": 6.631024167039461e-06, + "loss": 0.4702, + "step": 5940 + }, + { + "epoch": 0.41251215109012634, + "grad_norm": 4.507665720805658, + "learning_rate": 6.629961148945475e-06, + "loss": 0.6833, + "step": 5941 + }, + { + "epoch": 0.4125815858908485, + "grad_norm": 3.336149228634475, + "learning_rate": 6.6288980484098994e-06, + "loss": 0.2978, + "step": 5942 + }, + { + "epoch": 0.4126510206915706, + "grad_norm": 3.6698902634904136, + "learning_rate": 6.627834865486502e-06, + "loss": 0.4876, + "step": 5943 + }, + { + "epoch": 0.41272045549229275, + "grad_norm": 4.214610520727473, + "learning_rate": 6.626771600229059e-06, + "loss": 0.7225, + "step": 5944 + }, + { + "epoch": 0.4127898902930149, + "grad_norm": 5.163373777729271, + "learning_rate": 6.6257082526913515e-06, + "loss": 0.6017, + "step": 5945 + }, + { + "epoch": 0.41285932509373696, + "grad_norm": 2.590636025921402, + "learning_rate": 6.624644822927159e-06, + "loss": 0.1924, + "step": 5946 + }, + { + "epoch": 0.4129287598944591, + "grad_norm": 4.752575031449287, + "learning_rate": 6.623581310990269e-06, + "loss": 0.7692, + "step": 5947 + }, + { + "epoch": 0.41299819469518123, + "grad_norm": 4.148498231048011, + "learning_rate": 6.6225177169344756e-06, + "loss": 0.4368, + "step": 5948 + }, + { + "epoch": 0.41306762949590337, + "grad_norm": 3.3461980276016012, + "learning_rate": 6.621454040813569e-06, + "loss": 0.319, + "step": 5949 + }, + { + "epoch": 0.41313706429662544, + "grad_norm": 3.4121787640450085, + "learning_rate": 6.620390282681355e-06, + "loss": 0.2289, + "step": 5950 + }, + { + "epoch": 0.4132064990973476, + "grad_norm": 3.076598224519381, + "learning_rate": 6.6193264425916325e-06, + "loss": 0.4939, + "step": 5951 + }, + { + "epoch": 0.4132759338980697, + "grad_norm": 3.24952883063537, + "learning_rate": 6.618262520598211e-06, + "loss": 0.4662, + "step": 5952 + }, + { + "epoch": 0.41334536869879185, + "grad_norm": 3.273221906035571, + "learning_rate": 6.617198516754901e-06, + "loss": 0.1984, + "step": 5953 + }, + { + "epoch": 0.413414803499514, + "grad_norm": 3.9943603386298974, + "learning_rate": 6.616134431115523e-06, + "loss": 0.4798, + "step": 5954 + }, + { + "epoch": 0.41348423830023606, + "grad_norm": 3.432414428143647, + "learning_rate": 6.615070263733891e-06, + "loss": 0.2774, + "step": 5955 + }, + { + "epoch": 0.4135536731009582, + "grad_norm": 3.8023889458774183, + "learning_rate": 6.6140060146638355e-06, + "loss": 0.4927, + "step": 5956 + }, + { + "epoch": 0.41362310790168033, + "grad_norm": 4.107753646599888, + "learning_rate": 6.612941683959181e-06, + "loss": 0.2787, + "step": 5957 + }, + { + "epoch": 0.41369254270240247, + "grad_norm": 2.862977909829752, + "learning_rate": 6.611877271673761e-06, + "loss": 0.3095, + "step": 5958 + }, + { + "epoch": 0.41376197750312455, + "grad_norm": 4.08107664800648, + "learning_rate": 6.6108127778614126e-06, + "loss": 0.3139, + "step": 5959 + }, + { + "epoch": 0.4138314123038467, + "grad_norm": 3.283768566514904, + "learning_rate": 6.609748202575976e-06, + "loss": 0.2975, + "step": 5960 + }, + { + "epoch": 0.4139008471045688, + "grad_norm": 4.548084471201058, + "learning_rate": 6.6086835458712994e-06, + "loss": 0.5423, + "step": 5961 + }, + { + "epoch": 0.41397028190529095, + "grad_norm": 4.756708378913505, + "learning_rate": 6.607618807801227e-06, + "loss": 0.483, + "step": 5962 + }, + { + "epoch": 0.41403971670601303, + "grad_norm": 4.271132643086925, + "learning_rate": 6.606553988419615e-06, + "loss": 0.6305, + "step": 5963 + }, + { + "epoch": 0.41410915150673516, + "grad_norm": 3.7117500649357322, + "learning_rate": 6.60548908778032e-06, + "loss": 0.3805, + "step": 5964 + }, + { + "epoch": 0.4141785863074573, + "grad_norm": 5.839294345960325, + "learning_rate": 6.604424105937205e-06, + "loss": 0.647, + "step": 5965 + }, + { + "epoch": 0.41424802110817943, + "grad_norm": 5.026592636058434, + "learning_rate": 6.603359042944133e-06, + "loss": 0.7131, + "step": 5966 + }, + { + "epoch": 0.41431745590890157, + "grad_norm": 4.099199914534691, + "learning_rate": 6.6022938988549755e-06, + "loss": 0.5236, + "step": 5967 + }, + { + "epoch": 0.41438689070962365, + "grad_norm": 3.2668865112559056, + "learning_rate": 6.6012286737236056e-06, + "loss": 0.2629, + "step": 5968 + }, + { + "epoch": 0.4144563255103458, + "grad_norm": 5.500072938008738, + "learning_rate": 6.6001633676039e-06, + "loss": 0.724, + "step": 5969 + }, + { + "epoch": 0.4145257603110679, + "grad_norm": 3.564160387367135, + "learning_rate": 6.599097980549744e-06, + "loss": 0.4112, + "step": 5970 + }, + { + "epoch": 0.41459519511179005, + "grad_norm": 5.935853775523354, + "learning_rate": 6.59803251261502e-06, + "loss": 0.4225, + "step": 5971 + }, + { + "epoch": 0.41466462991251213, + "grad_norm": 3.4353260115452873, + "learning_rate": 6.596966963853621e-06, + "loss": 0.3019, + "step": 5972 + }, + { + "epoch": 0.41473406471323426, + "grad_norm": 4.752866054882168, + "learning_rate": 6.59590133431944e-06, + "loss": 0.7035, + "step": 5973 + }, + { + "epoch": 0.4148034995139564, + "grad_norm": 3.8998604835877315, + "learning_rate": 6.594835624066375e-06, + "loss": 0.6103, + "step": 5974 + }, + { + "epoch": 0.41487293431467853, + "grad_norm": 4.039922923711053, + "learning_rate": 6.5937698331483295e-06, + "loss": 0.4017, + "step": 5975 + }, + { + "epoch": 0.4149423691154006, + "grad_norm": 3.791477448342948, + "learning_rate": 6.592703961619207e-06, + "loss": 0.3924, + "step": 5976 + }, + { + "epoch": 0.41501180391612275, + "grad_norm": 5.979169761290956, + "learning_rate": 6.591638009532923e-06, + "loss": 0.5755, + "step": 5977 + }, + { + "epoch": 0.4150812387168449, + "grad_norm": 3.999373012322334, + "learning_rate": 6.590571976943387e-06, + "loss": 0.5232, + "step": 5978 + }, + { + "epoch": 0.415150673517567, + "grad_norm": 5.013172341464592, + "learning_rate": 6.589505863904523e-06, + "loss": 0.7576, + "step": 5979 + }, + { + "epoch": 0.41522010831828915, + "grad_norm": 2.707524023017276, + "learning_rate": 6.58843967047025e-06, + "loss": 0.2317, + "step": 5980 + }, + { + "epoch": 0.41528954311901123, + "grad_norm": 4.43508441736149, + "learning_rate": 6.587373396694496e-06, + "loss": 0.6286, + "step": 5981 + }, + { + "epoch": 0.41535897791973336, + "grad_norm": 4.7917072832940715, + "learning_rate": 6.586307042631192e-06, + "loss": 0.4474, + "step": 5982 + }, + { + "epoch": 0.4154284127204555, + "grad_norm": 7.629401313170102, + "learning_rate": 6.585240608334272e-06, + "loss": 0.32, + "step": 5983 + }, + { + "epoch": 0.41549784752117763, + "grad_norm": 4.045749950822491, + "learning_rate": 6.584174093857676e-06, + "loss": 0.6604, + "step": 5984 + }, + { + "epoch": 0.4155672823218997, + "grad_norm": 4.100293467743907, + "learning_rate": 6.5831074992553465e-06, + "loss": 0.4401, + "step": 5985 + }, + { + "epoch": 0.41563671712262185, + "grad_norm": 3.5283461686205664, + "learning_rate": 6.582040824581233e-06, + "loss": 0.4108, + "step": 5986 + }, + { + "epoch": 0.415706151923344, + "grad_norm": 4.061756782528265, + "learning_rate": 6.580974069889283e-06, + "loss": 0.3636, + "step": 5987 + }, + { + "epoch": 0.4157755867240661, + "grad_norm": 3.667780322548429, + "learning_rate": 6.579907235233454e-06, + "loss": 0.615, + "step": 5988 + }, + { + "epoch": 0.41584502152478825, + "grad_norm": 4.50694731920286, + "learning_rate": 6.578840320667705e-06, + "loss": 0.8167, + "step": 5989 + }, + { + "epoch": 0.41591445632551033, + "grad_norm": 4.018970295523801, + "learning_rate": 6.5777733262459995e-06, + "loss": 0.4205, + "step": 5990 + }, + { + "epoch": 0.41598389112623246, + "grad_norm": 4.256155107757088, + "learning_rate": 6.576706252022304e-06, + "loss": 0.4638, + "step": 5991 + }, + { + "epoch": 0.4160533259269546, + "grad_norm": 3.9799483208314923, + "learning_rate": 6.57563909805059e-06, + "loss": 0.3579, + "step": 5992 + }, + { + "epoch": 0.41612276072767673, + "grad_norm": 5.250308741263798, + "learning_rate": 6.574571864384835e-06, + "loss": 0.4732, + "step": 5993 + }, + { + "epoch": 0.4161921955283988, + "grad_norm": 3.5431734289452934, + "learning_rate": 6.573504551079015e-06, + "loss": 0.4609, + "step": 5994 + }, + { + "epoch": 0.41626163032912095, + "grad_norm": 4.086440270062286, + "learning_rate": 6.5724371581871175e-06, + "loss": 0.6302, + "step": 5995 + }, + { + "epoch": 0.4163310651298431, + "grad_norm": 4.630688252009449, + "learning_rate": 6.571369685763126e-06, + "loss": 0.6693, + "step": 5996 + }, + { + "epoch": 0.4164004999305652, + "grad_norm": 4.355906978789595, + "learning_rate": 6.570302133861034e-06, + "loss": 0.5666, + "step": 5997 + }, + { + "epoch": 0.4164699347312873, + "grad_norm": 4.106175428632236, + "learning_rate": 6.569234502534838e-06, + "loss": 0.5607, + "step": 5998 + }, + { + "epoch": 0.41653936953200943, + "grad_norm": 4.417857746977174, + "learning_rate": 6.568166791838536e-06, + "loss": 0.7039, + "step": 5999 + }, + { + "epoch": 0.41660880433273156, + "grad_norm": 4.135282633525598, + "learning_rate": 6.567099001826133e-06, + "loss": 0.2914, + "step": 6000 + }, + { + "epoch": 0.4166782391334537, + "grad_norm": 3.678670965085961, + "learning_rate": 6.566031132551634e-06, + "loss": 0.3352, + "step": 6001 + }, + { + "epoch": 0.41674767393417583, + "grad_norm": 4.819477573572015, + "learning_rate": 6.564963184069053e-06, + "loss": 0.6571, + "step": 6002 + }, + { + "epoch": 0.4168171087348979, + "grad_norm": 4.308681583605821, + "learning_rate": 6.563895156432405e-06, + "loss": 0.5728, + "step": 6003 + }, + { + "epoch": 0.41688654353562005, + "grad_norm": 2.8802495366572396, + "learning_rate": 6.56282704969571e-06, + "loss": 0.2767, + "step": 6004 + }, + { + "epoch": 0.4169559783363422, + "grad_norm": 4.055709462249376, + "learning_rate": 6.561758863912991e-06, + "loss": 0.4474, + "step": 6005 + }, + { + "epoch": 0.4170254131370643, + "grad_norm": 4.740713992840043, + "learning_rate": 6.560690599138277e-06, + "loss": 0.9646, + "step": 6006 + }, + { + "epoch": 0.4170948479377864, + "grad_norm": 3.869120748204385, + "learning_rate": 6.559622255425598e-06, + "loss": 0.43, + "step": 6007 + }, + { + "epoch": 0.41716428273850853, + "grad_norm": 4.631126825076075, + "learning_rate": 6.55855383282899e-06, + "loss": 0.7757, + "step": 6008 + }, + { + "epoch": 0.41723371753923066, + "grad_norm": 3.5143602200022994, + "learning_rate": 6.557485331402494e-06, + "loss": 0.4444, + "step": 6009 + }, + { + "epoch": 0.4173031523399528, + "grad_norm": 3.2979481071024743, + "learning_rate": 6.5564167512001496e-06, + "loss": 0.2892, + "step": 6010 + }, + { + "epoch": 0.41737258714067493, + "grad_norm": 7.261492177131983, + "learning_rate": 6.555348092276008e-06, + "loss": 0.5031, + "step": 6011 + }, + { + "epoch": 0.417442021941397, + "grad_norm": 3.974404138460476, + "learning_rate": 6.554279354684121e-06, + "loss": 0.4757, + "step": 6012 + }, + { + "epoch": 0.41751145674211915, + "grad_norm": 3.983571118866898, + "learning_rate": 6.553210538478542e-06, + "loss": 0.4088, + "step": 6013 + }, + { + "epoch": 0.4175808915428413, + "grad_norm": 3.5801342877607665, + "learning_rate": 6.552141643713334e-06, + "loss": 0.3163, + "step": 6014 + }, + { + "epoch": 0.4176503263435634, + "grad_norm": 3.4938435226373725, + "learning_rate": 6.551072670442554e-06, + "loss": 0.5077, + "step": 6015 + }, + { + "epoch": 0.4177197611442855, + "grad_norm": 3.479033959102143, + "learning_rate": 6.550003618720277e-06, + "loss": 0.4043, + "step": 6016 + }, + { + "epoch": 0.41778919594500763, + "grad_norm": 4.204367460846488, + "learning_rate": 6.548934488600569e-06, + "loss": 0.5408, + "step": 6017 + }, + { + "epoch": 0.41785863074572976, + "grad_norm": 4.360862455777884, + "learning_rate": 6.547865280137507e-06, + "loss": 0.5516, + "step": 6018 + }, + { + "epoch": 0.4179280655464519, + "grad_norm": 2.8065093647682264, + "learning_rate": 6.546795993385172e-06, + "loss": 0.1874, + "step": 6019 + }, + { + "epoch": 0.417997500347174, + "grad_norm": 3.699325362917268, + "learning_rate": 6.545726628397646e-06, + "loss": 0.531, + "step": 6020 + }, + { + "epoch": 0.4180669351478961, + "grad_norm": 3.9946305771491617, + "learning_rate": 6.544657185229014e-06, + "loss": 0.6079, + "step": 6021 + }, + { + "epoch": 0.41813636994861825, + "grad_norm": 3.9575612081875935, + "learning_rate": 6.543587663933373e-06, + "loss": 0.5549, + "step": 6022 + }, + { + "epoch": 0.4182058047493404, + "grad_norm": 2.7961794737288055, + "learning_rate": 6.5425180645648125e-06, + "loss": 0.2726, + "step": 6023 + }, + { + "epoch": 0.4182752395500625, + "grad_norm": 4.570486094680746, + "learning_rate": 6.541448387177434e-06, + "loss": 0.678, + "step": 6024 + }, + { + "epoch": 0.4183446743507846, + "grad_norm": 3.899465307463808, + "learning_rate": 6.540378631825339e-06, + "loss": 0.513, + "step": 6025 + }, + { + "epoch": 0.41841410915150673, + "grad_norm": 2.9027411671370875, + "learning_rate": 6.539308798562638e-06, + "loss": 0.2125, + "step": 6026 + }, + { + "epoch": 0.41848354395222886, + "grad_norm": 3.618225188840961, + "learning_rate": 6.538238887443439e-06, + "loss": 0.3345, + "step": 6027 + }, + { + "epoch": 0.418552978752951, + "grad_norm": 3.600633353446839, + "learning_rate": 6.537168898521857e-06, + "loss": 0.3684, + "step": 6028 + }, + { + "epoch": 0.4186224135536731, + "grad_norm": 3.7134990189042694, + "learning_rate": 6.536098831852013e-06, + "loss": 0.5795, + "step": 6029 + }, + { + "epoch": 0.4186918483543952, + "grad_norm": 4.089622555002528, + "learning_rate": 6.535028687488028e-06, + "loss": 0.4647, + "step": 6030 + }, + { + "epoch": 0.41876128315511735, + "grad_norm": 3.5980417513099034, + "learning_rate": 6.5339584654840285e-06, + "loss": 0.422, + "step": 6031 + }, + { + "epoch": 0.4188307179558395, + "grad_norm": 4.5279715614629, + "learning_rate": 6.5328881658941465e-06, + "loss": 0.5197, + "step": 6032 + }, + { + "epoch": 0.41890015275656156, + "grad_norm": 3.679356878002001, + "learning_rate": 6.531817788772514e-06, + "loss": 0.342, + "step": 6033 + }, + { + "epoch": 0.4189695875572837, + "grad_norm": 4.512231380692237, + "learning_rate": 6.5307473341732716e-06, + "loss": 0.5692, + "step": 6034 + }, + { + "epoch": 0.41903902235800583, + "grad_norm": 4.115720243011601, + "learning_rate": 6.529676802150562e-06, + "loss": 0.6132, + "step": 6035 + }, + { + "epoch": 0.41910845715872797, + "grad_norm": 3.2013356523126517, + "learning_rate": 6.52860619275853e-06, + "loss": 0.3609, + "step": 6036 + }, + { + "epoch": 0.4191778919594501, + "grad_norm": 5.542840427215459, + "learning_rate": 6.5275355060513256e-06, + "loss": 0.3734, + "step": 6037 + }, + { + "epoch": 0.4192473267601722, + "grad_norm": 4.268606719949882, + "learning_rate": 6.526464742083106e-06, + "loss": 0.7196, + "step": 6038 + }, + { + "epoch": 0.4193167615608943, + "grad_norm": 3.201347655982047, + "learning_rate": 6.525393900908027e-06, + "loss": 0.3387, + "step": 6039 + }, + { + "epoch": 0.41938619636161645, + "grad_norm": 2.5681047723346166, + "learning_rate": 6.524322982580249e-06, + "loss": 0.3211, + "step": 6040 + }, + { + "epoch": 0.4194556311623386, + "grad_norm": 5.038397113646505, + "learning_rate": 6.52325198715394e-06, + "loss": 0.4734, + "step": 6041 + }, + { + "epoch": 0.41952506596306066, + "grad_norm": 3.8208049712749186, + "learning_rate": 6.522180914683269e-06, + "loss": 0.2741, + "step": 6042 + }, + { + "epoch": 0.4195945007637828, + "grad_norm": 2.9651733411660572, + "learning_rate": 6.5211097652224095e-06, + "loss": 0.1394, + "step": 6043 + }, + { + "epoch": 0.41966393556450493, + "grad_norm": 2.7721057672388594, + "learning_rate": 6.52003853882554e-06, + "loss": 0.376, + "step": 6044 + }, + { + "epoch": 0.41973337036522707, + "grad_norm": 3.3019578493470996, + "learning_rate": 6.5189672355468415e-06, + "loss": 0.4997, + "step": 6045 + }, + { + "epoch": 0.4198028051659492, + "grad_norm": 4.714418992647794, + "learning_rate": 6.517895855440498e-06, + "loss": 0.5589, + "step": 6046 + }, + { + "epoch": 0.4198722399666713, + "grad_norm": 4.078158973383872, + "learning_rate": 6.516824398560701e-06, + "loss": 0.3377, + "step": 6047 + }, + { + "epoch": 0.4199416747673934, + "grad_norm": 6.592595413578484, + "learning_rate": 6.515752864961642e-06, + "loss": 0.8498, + "step": 6048 + }, + { + "epoch": 0.42001110956811555, + "grad_norm": 4.520407060254008, + "learning_rate": 6.514681254697517e-06, + "loss": 0.2933, + "step": 6049 + }, + { + "epoch": 0.4200805443688377, + "grad_norm": 4.5882533122976685, + "learning_rate": 6.513609567822529e-06, + "loss": 0.6601, + "step": 6050 + }, + { + "epoch": 0.42014997916955976, + "grad_norm": 1.8845564112630744, + "learning_rate": 6.512537804390883e-06, + "loss": 0.0911, + "step": 6051 + }, + { + "epoch": 0.4202194139702819, + "grad_norm": 4.111633155370928, + "learning_rate": 6.511465964456785e-06, + "loss": 0.6183, + "step": 6052 + }, + { + "epoch": 0.42028884877100403, + "grad_norm": 4.327947265337648, + "learning_rate": 6.510394048074449e-06, + "loss": 0.4192, + "step": 6053 + }, + { + "epoch": 0.42035828357172617, + "grad_norm": 3.3528841845329613, + "learning_rate": 6.509322055298092e-06, + "loss": 0.3139, + "step": 6054 + }, + { + "epoch": 0.42042771837244824, + "grad_norm": 3.202619124518965, + "learning_rate": 6.508249986181931e-06, + "loss": 0.3789, + "step": 6055 + }, + { + "epoch": 0.4204971531731704, + "grad_norm": 3.8857586249414524, + "learning_rate": 6.5071778407801955e-06, + "loss": 0.4773, + "step": 6056 + }, + { + "epoch": 0.4205665879738925, + "grad_norm": 4.261108104256129, + "learning_rate": 6.506105619147108e-06, + "loss": 0.5262, + "step": 6057 + }, + { + "epoch": 0.42063602277461465, + "grad_norm": 5.682191542890119, + "learning_rate": 6.5050333213369044e-06, + "loss": 0.6677, + "step": 6058 + }, + { + "epoch": 0.4207054575753368, + "grad_norm": 3.777291835713108, + "learning_rate": 6.503960947403818e-06, + "loss": 0.4188, + "step": 6059 + }, + { + "epoch": 0.42077489237605886, + "grad_norm": 3.8156496824070323, + "learning_rate": 6.502888497402087e-06, + "loss": 0.6032, + "step": 6060 + }, + { + "epoch": 0.420844327176781, + "grad_norm": 3.1060753127707854, + "learning_rate": 6.501815971385959e-06, + "loss": 0.2598, + "step": 6061 + }, + { + "epoch": 0.42091376197750313, + "grad_norm": 4.019809038108703, + "learning_rate": 6.500743369409676e-06, + "loss": 0.6035, + "step": 6062 + }, + { + "epoch": 0.42098319677822527, + "grad_norm": 4.271632667044726, + "learning_rate": 6.4996706915274935e-06, + "loss": 0.5402, + "step": 6063 + }, + { + "epoch": 0.42105263157894735, + "grad_norm": 4.2791206431556175, + "learning_rate": 6.498597937793665e-06, + "loss": 0.4962, + "step": 6064 + }, + { + "epoch": 0.4211220663796695, + "grad_norm": 3.863856827027266, + "learning_rate": 6.497525108262446e-06, + "loss": 0.426, + "step": 6065 + }, + { + "epoch": 0.4211915011803916, + "grad_norm": 3.3488976404895543, + "learning_rate": 6.496452202988104e-06, + "loss": 0.3959, + "step": 6066 + }, + { + "epoch": 0.42126093598111375, + "grad_norm": 3.501628951042371, + "learning_rate": 6.495379222024901e-06, + "loss": 0.5029, + "step": 6067 + }, + { + "epoch": 0.4213303707818359, + "grad_norm": 3.609672961257711, + "learning_rate": 6.494306165427111e-06, + "loss": 0.3534, + "step": 6068 + }, + { + "epoch": 0.42139980558255796, + "grad_norm": 4.653207558954737, + "learning_rate": 6.493233033249004e-06, + "loss": 0.3919, + "step": 6069 + }, + { + "epoch": 0.4214692403832801, + "grad_norm": 3.8126400644483818, + "learning_rate": 6.492159825544861e-06, + "loss": 0.4411, + "step": 6070 + }, + { + "epoch": 0.42153867518400223, + "grad_norm": 3.427054617712613, + "learning_rate": 6.4910865423689626e-06, + "loss": 0.5088, + "step": 6071 + }, + { + "epoch": 0.42160810998472437, + "grad_norm": 5.068917800583965, + "learning_rate": 6.490013183775594e-06, + "loss": 0.6713, + "step": 6072 + }, + { + "epoch": 0.42167754478544645, + "grad_norm": 2.866985555881974, + "learning_rate": 6.488939749819045e-06, + "loss": 0.3681, + "step": 6073 + }, + { + "epoch": 0.4217469795861686, + "grad_norm": 4.196937653836931, + "learning_rate": 6.487866240553608e-06, + "loss": 0.6518, + "step": 6074 + }, + { + "epoch": 0.4218164143868907, + "grad_norm": 3.9649150266569557, + "learning_rate": 6.4867926560335805e-06, + "loss": 0.6451, + "step": 6075 + }, + { + "epoch": 0.42188584918761285, + "grad_norm": 3.7453206291006342, + "learning_rate": 6.485718996313262e-06, + "loss": 0.5116, + "step": 6076 + }, + { + "epoch": 0.42195528398833493, + "grad_norm": 3.823194618011116, + "learning_rate": 6.484645261446959e-06, + "loss": 0.5023, + "step": 6077 + }, + { + "epoch": 0.42202471878905706, + "grad_norm": 3.009890919251052, + "learning_rate": 6.483571451488977e-06, + "loss": 0.2257, + "step": 6078 + }, + { + "epoch": 0.4220941535897792, + "grad_norm": 3.2477430597453893, + "learning_rate": 6.4824975664936305e-06, + "loss": 0.3676, + "step": 6079 + }, + { + "epoch": 0.42216358839050133, + "grad_norm": 4.068978198132351, + "learning_rate": 6.4814236065152355e-06, + "loss": 0.4056, + "step": 6080 + }, + { + "epoch": 0.42223302319122347, + "grad_norm": 3.7073421207993302, + "learning_rate": 6.480349571608111e-06, + "loss": 0.3759, + "step": 6081 + }, + { + "epoch": 0.42230245799194555, + "grad_norm": 3.441880660842226, + "learning_rate": 6.479275461826579e-06, + "loss": 0.4573, + "step": 6082 + }, + { + "epoch": 0.4223718927926677, + "grad_norm": 2.769291940772123, + "learning_rate": 6.478201277224969e-06, + "loss": 0.367, + "step": 6083 + }, + { + "epoch": 0.4224413275933898, + "grad_norm": 2.6160906696091972, + "learning_rate": 6.477127017857611e-06, + "loss": 0.1689, + "step": 6084 + }, + { + "epoch": 0.42251076239411195, + "grad_norm": 3.895257025321492, + "learning_rate": 6.4760526837788384e-06, + "loss": 0.3702, + "step": 6085 + }, + { + "epoch": 0.42258019719483403, + "grad_norm": 4.112193591869085, + "learning_rate": 6.474978275042994e-06, + "loss": 0.4545, + "step": 6086 + }, + { + "epoch": 0.42264963199555616, + "grad_norm": 2.1516462197723025, + "learning_rate": 6.4739037917044145e-06, + "loss": 0.2185, + "step": 6087 + }, + { + "epoch": 0.4227190667962783, + "grad_norm": 2.939015898205002, + "learning_rate": 6.472829233817452e-06, + "loss": 0.1933, + "step": 6088 + }, + { + "epoch": 0.42278850159700043, + "grad_norm": 3.624853366272666, + "learning_rate": 6.471754601436452e-06, + "loss": 0.4424, + "step": 6089 + }, + { + "epoch": 0.4228579363977225, + "grad_norm": 5.082966205152242, + "learning_rate": 6.47067989461577e-06, + "loss": 0.5578, + "step": 6090 + }, + { + "epoch": 0.42292737119844465, + "grad_norm": 4.224631542291856, + "learning_rate": 6.4696051134097624e-06, + "loss": 0.7495, + "step": 6091 + }, + { + "epoch": 0.4229968059991668, + "grad_norm": 3.43386118208664, + "learning_rate": 6.46853025787279e-06, + "loss": 0.3722, + "step": 6092 + }, + { + "epoch": 0.4230662407998889, + "grad_norm": 1.9495776387695833, + "learning_rate": 6.467455328059222e-06, + "loss": 0.1817, + "step": 6093 + }, + { + "epoch": 0.42313567560061105, + "grad_norm": 4.181882839237819, + "learning_rate": 6.466380324023423e-06, + "loss": 0.396, + "step": 6094 + }, + { + "epoch": 0.42320511040133313, + "grad_norm": 3.1779226403844603, + "learning_rate": 6.465305245819765e-06, + "loss": 0.6808, + "step": 6095 + }, + { + "epoch": 0.42327454520205526, + "grad_norm": 4.439537409913754, + "learning_rate": 6.464230093502627e-06, + "loss": 0.5226, + "step": 6096 + }, + { + "epoch": 0.4233439800027774, + "grad_norm": 3.4228970498265068, + "learning_rate": 6.4631548671263876e-06, + "loss": 0.5751, + "step": 6097 + }, + { + "epoch": 0.42341341480349953, + "grad_norm": 3.804722097950861, + "learning_rate": 6.4620795667454315e-06, + "loss": 0.615, + "step": 6098 + }, + { + "epoch": 0.4234828496042216, + "grad_norm": 3.053084757824642, + "learning_rate": 6.4610041924141445e-06, + "loss": 0.3096, + "step": 6099 + }, + { + "epoch": 0.42355228440494375, + "grad_norm": 3.2253592180049804, + "learning_rate": 6.459928744186919e-06, + "loss": 0.441, + "step": 6100 + }, + { + "epoch": 0.4236217192056659, + "grad_norm": 3.2217589147523253, + "learning_rate": 6.4588532221181484e-06, + "loss": 0.3218, + "step": 6101 + }, + { + "epoch": 0.423691154006388, + "grad_norm": 4.160929195466393, + "learning_rate": 6.457777626262234e-06, + "loss": 0.4574, + "step": 6102 + }, + { + "epoch": 0.42376058880711015, + "grad_norm": 3.363528054234572, + "learning_rate": 6.456701956673577e-06, + "loss": 0.3042, + "step": 6103 + }, + { + "epoch": 0.42383002360783223, + "grad_norm": 3.4362607872843194, + "learning_rate": 6.455626213406583e-06, + "loss": 0.2377, + "step": 6104 + }, + { + "epoch": 0.42389945840855436, + "grad_norm": 4.450401785729174, + "learning_rate": 6.454550396515661e-06, + "loss": 0.5959, + "step": 6105 + }, + { + "epoch": 0.4239688932092765, + "grad_norm": 4.395041436749567, + "learning_rate": 6.453474506055228e-06, + "loss": 0.4338, + "step": 6106 + }, + { + "epoch": 0.42403832800999863, + "grad_norm": 3.4028518171125057, + "learning_rate": 6.4523985420796984e-06, + "loss": 0.3306, + "step": 6107 + }, + { + "epoch": 0.4241077628107207, + "grad_norm": 5.809384522899484, + "learning_rate": 6.451322504643493e-06, + "loss": 0.5098, + "step": 6108 + }, + { + "epoch": 0.42417719761144285, + "grad_norm": 2.8713062600082533, + "learning_rate": 6.45024639380104e-06, + "loss": 0.3193, + "step": 6109 + }, + { + "epoch": 0.424246632412165, + "grad_norm": 2.7163570111851105, + "learning_rate": 6.4491702096067634e-06, + "loss": 0.2602, + "step": 6110 + }, + { + "epoch": 0.4243160672128871, + "grad_norm": 4.115612960663711, + "learning_rate": 6.448093952115098e-06, + "loss": 0.3214, + "step": 6111 + }, + { + "epoch": 0.4243855020136092, + "grad_norm": 3.865730155726266, + "learning_rate": 6.447017621380479e-06, + "loss": 0.3075, + "step": 6112 + }, + { + "epoch": 0.42445493681433133, + "grad_norm": 3.7544772635090333, + "learning_rate": 6.4459412174573464e-06, + "loss": 0.4696, + "step": 6113 + }, + { + "epoch": 0.42452437161505346, + "grad_norm": 3.7601954440388607, + "learning_rate": 6.444864740400143e-06, + "loss": 0.4165, + "step": 6114 + }, + { + "epoch": 0.4245938064157756, + "grad_norm": 3.511231598413274, + "learning_rate": 6.443788190263316e-06, + "loss": 0.498, + "step": 6115 + }, + { + "epoch": 0.42466324121649773, + "grad_norm": 3.9591369495328705, + "learning_rate": 6.4427115671013165e-06, + "loss": 0.6344, + "step": 6116 + }, + { + "epoch": 0.4247326760172198, + "grad_norm": 3.8396890711328138, + "learning_rate": 6.441634870968597e-06, + "loss": 0.4813, + "step": 6117 + }, + { + "epoch": 0.42480211081794195, + "grad_norm": 4.641531558147842, + "learning_rate": 6.440558101919619e-06, + "loss": 0.5964, + "step": 6118 + }, + { + "epoch": 0.4248715456186641, + "grad_norm": 3.562442047518287, + "learning_rate": 6.4394812600088415e-06, + "loss": 0.3657, + "step": 6119 + }, + { + "epoch": 0.4249409804193862, + "grad_norm": 3.540489536958872, + "learning_rate": 6.4384043452907305e-06, + "loss": 0.5034, + "step": 6120 + }, + { + "epoch": 0.4250104152201083, + "grad_norm": 3.7734022411787715, + "learning_rate": 6.4373273578197545e-06, + "loss": 0.6397, + "step": 6121 + }, + { + "epoch": 0.42507985002083043, + "grad_norm": 3.2766099514332834, + "learning_rate": 6.43625029765039e-06, + "loss": 0.368, + "step": 6122 + }, + { + "epoch": 0.42514928482155256, + "grad_norm": 3.203916049624322, + "learning_rate": 6.435173164837109e-06, + "loss": 0.5466, + "step": 6123 + }, + { + "epoch": 0.4252187196222747, + "grad_norm": 3.3270231272103334, + "learning_rate": 6.434095959434392e-06, + "loss": 0.3635, + "step": 6124 + }, + { + "epoch": 0.42528815442299683, + "grad_norm": 4.024554458056902, + "learning_rate": 6.433018681496728e-06, + "loss": 0.3972, + "step": 6125 + }, + { + "epoch": 0.4253575892237189, + "grad_norm": 4.436967559525287, + "learning_rate": 6.4319413310785965e-06, + "loss": 0.5474, + "step": 6126 + }, + { + "epoch": 0.42542702402444105, + "grad_norm": 3.441141697285122, + "learning_rate": 6.430863908234495e-06, + "loss": 0.4243, + "step": 6127 + }, + { + "epoch": 0.4254964588251632, + "grad_norm": 2.9312991765415015, + "learning_rate": 6.429786413018915e-06, + "loss": 0.2505, + "step": 6128 + }, + { + "epoch": 0.4255658936258853, + "grad_norm": 3.39208262346195, + "learning_rate": 6.428708845486358e-06, + "loss": 0.4172, + "step": 6129 + }, + { + "epoch": 0.4256353284266074, + "grad_norm": 3.6616072857145037, + "learning_rate": 6.4276312056913226e-06, + "loss": 0.4142, + "step": 6130 + }, + { + "epoch": 0.42570476322732953, + "grad_norm": 4.053630312940221, + "learning_rate": 6.426553493688315e-06, + "loss": 0.3978, + "step": 6131 + }, + { + "epoch": 0.42577419802805166, + "grad_norm": 4.025600586076838, + "learning_rate": 6.42547570953185e-06, + "loss": 0.4863, + "step": 6132 + }, + { + "epoch": 0.4258436328287738, + "grad_norm": 4.595075601590586, + "learning_rate": 6.424397853276433e-06, + "loss": 0.4768, + "step": 6133 + }, + { + "epoch": 0.4259130676294959, + "grad_norm": 3.647108490783121, + "learning_rate": 6.423319924976586e-06, + "loss": 0.489, + "step": 6134 + }, + { + "epoch": 0.425982502430218, + "grad_norm": 4.5010524737115185, + "learning_rate": 6.422241924686827e-06, + "loss": 0.604, + "step": 6135 + }, + { + "epoch": 0.42605193723094015, + "grad_norm": 3.9702804083668957, + "learning_rate": 6.421163852461682e-06, + "loss": 0.6158, + "step": 6136 + }, + { + "epoch": 0.4261213720316623, + "grad_norm": 3.738192682727602, + "learning_rate": 6.420085708355676e-06, + "loss": 0.4821, + "step": 6137 + }, + { + "epoch": 0.4261908068323844, + "grad_norm": 2.7251659153171075, + "learning_rate": 6.419007492423344e-06, + "loss": 0.2899, + "step": 6138 + }, + { + "epoch": 0.4262602416331065, + "grad_norm": 3.942330946782234, + "learning_rate": 6.417929204719218e-06, + "loss": 0.5805, + "step": 6139 + }, + { + "epoch": 0.42632967643382863, + "grad_norm": 3.5241678269643626, + "learning_rate": 6.416850845297835e-06, + "loss": 0.4847, + "step": 6140 + }, + { + "epoch": 0.42639911123455077, + "grad_norm": 3.872661473144205, + "learning_rate": 6.415772414213741e-06, + "loss": 0.3761, + "step": 6141 + }, + { + "epoch": 0.4264685460352729, + "grad_norm": 4.423697086361109, + "learning_rate": 6.414693911521481e-06, + "loss": 0.5974, + "step": 6142 + }, + { + "epoch": 0.426537980835995, + "grad_norm": 4.151335729475707, + "learning_rate": 6.413615337275604e-06, + "loss": 0.4959, + "step": 6143 + }, + { + "epoch": 0.4266074156367171, + "grad_norm": 3.471652507019552, + "learning_rate": 6.412536691530662e-06, + "loss": 0.383, + "step": 6144 + }, + { + "epoch": 0.42667685043743925, + "grad_norm": 3.374826871939644, + "learning_rate": 6.411457974341213e-06, + "loss": 0.1825, + "step": 6145 + }, + { + "epoch": 0.4267462852381614, + "grad_norm": 5.995279899736149, + "learning_rate": 6.410379185761816e-06, + "loss": 0.4838, + "step": 6146 + }, + { + "epoch": 0.42681572003888346, + "grad_norm": 3.34778237916351, + "learning_rate": 6.409300325847036e-06, + "loss": 0.4385, + "step": 6147 + }, + { + "epoch": 0.4268851548396056, + "grad_norm": 3.7724298815392494, + "learning_rate": 6.408221394651441e-06, + "loss": 0.4342, + "step": 6148 + }, + { + "epoch": 0.42695458964032773, + "grad_norm": 4.057265289047125, + "learning_rate": 6.407142392229601e-06, + "loss": 0.4915, + "step": 6149 + }, + { + "epoch": 0.42702402444104987, + "grad_norm": 3.4926721763659225, + "learning_rate": 6.406063318636091e-06, + "loss": 0.2714, + "step": 6150 + }, + { + "epoch": 0.427093459241772, + "grad_norm": 3.1131552205320245, + "learning_rate": 6.40498417392549e-06, + "loss": 0.3838, + "step": 6151 + }, + { + "epoch": 0.4271628940424941, + "grad_norm": 3.361258014981579, + "learning_rate": 6.403904958152378e-06, + "loss": 0.1894, + "step": 6152 + }, + { + "epoch": 0.4272323288432162, + "grad_norm": 3.218553734409216, + "learning_rate": 6.402825671371342e-06, + "loss": 0.4672, + "step": 6153 + }, + { + "epoch": 0.42730176364393835, + "grad_norm": 4.273292563547726, + "learning_rate": 6.401746313636973e-06, + "loss": 0.331, + "step": 6154 + }, + { + "epoch": 0.4273711984446605, + "grad_norm": 4.316483317142835, + "learning_rate": 6.400666885003861e-06, + "loss": 0.6188, + "step": 6155 + }, + { + "epoch": 0.42744063324538256, + "grad_norm": 3.314495097353974, + "learning_rate": 6.399587385526601e-06, + "loss": 0.4089, + "step": 6156 + }, + { + "epoch": 0.4275100680461047, + "grad_norm": 3.044933522252987, + "learning_rate": 6.398507815259796e-06, + "loss": 0.2628, + "step": 6157 + }, + { + "epoch": 0.42757950284682683, + "grad_norm": 3.628670345301784, + "learning_rate": 6.397428174258048e-06, + "loss": 0.4727, + "step": 6158 + }, + { + "epoch": 0.42764893764754897, + "grad_norm": 4.498950609684083, + "learning_rate": 6.3963484625759645e-06, + "loss": 0.4911, + "step": 6159 + }, + { + "epoch": 0.4277183724482711, + "grad_norm": 8.158931495231998, + "learning_rate": 6.3952686802681565e-06, + "loss": 0.4679, + "step": 6160 + }, + { + "epoch": 0.4277878072489932, + "grad_norm": 3.521487697685304, + "learning_rate": 6.394188827389236e-06, + "loss": 0.6341, + "step": 6161 + }, + { + "epoch": 0.4278572420497153, + "grad_norm": 4.238024758148727, + "learning_rate": 6.3931089039938225e-06, + "loss": 0.8481, + "step": 6162 + }, + { + "epoch": 0.42792667685043745, + "grad_norm": 2.9021670290139414, + "learning_rate": 6.3920289101365375e-06, + "loss": 0.3003, + "step": 6163 + }, + { + "epoch": 0.4279961116511596, + "grad_norm": 2.0578058299711364, + "learning_rate": 6.390948845872007e-06, + "loss": 0.2322, + "step": 6164 + }, + { + "epoch": 0.42806554645188166, + "grad_norm": 3.4358771304708915, + "learning_rate": 6.389868711254855e-06, + "loss": 0.523, + "step": 6165 + }, + { + "epoch": 0.4281349812526038, + "grad_norm": 3.802293289421713, + "learning_rate": 6.388788506339718e-06, + "loss": 0.58, + "step": 6166 + }, + { + "epoch": 0.42820441605332593, + "grad_norm": 3.9697112986486953, + "learning_rate": 6.387708231181229e-06, + "loss": 0.5044, + "step": 6167 + }, + { + "epoch": 0.42827385085404807, + "grad_norm": 3.8060208513613256, + "learning_rate": 6.3866278858340295e-06, + "loss": 0.5174, + "step": 6168 + }, + { + "epoch": 0.42834328565477015, + "grad_norm": 4.411844931449198, + "learning_rate": 6.385547470352758e-06, + "loss": 0.5321, + "step": 6169 + }, + { + "epoch": 0.4284127204554923, + "grad_norm": 4.516817406719567, + "learning_rate": 6.384466984792066e-06, + "loss": 0.6663, + "step": 6170 + }, + { + "epoch": 0.4284821552562144, + "grad_norm": 4.313660558214254, + "learning_rate": 6.3833864292065985e-06, + "loss": 0.5489, + "step": 6171 + }, + { + "epoch": 0.42855159005693655, + "grad_norm": 4.453285933251541, + "learning_rate": 6.3823058036510124e-06, + "loss": 0.6331, + "step": 6172 + }, + { + "epoch": 0.4286210248576587, + "grad_norm": 2.6972715093604607, + "learning_rate": 6.3812251081799615e-06, + "loss": 0.4027, + "step": 6173 + }, + { + "epoch": 0.42869045965838076, + "grad_norm": 2.6017593600327458, + "learning_rate": 6.380144342848109e-06, + "loss": 0.1743, + "step": 6174 + }, + { + "epoch": 0.4287598944591029, + "grad_norm": 4.3261312890547785, + "learning_rate": 6.379063507710117e-06, + "loss": 0.5511, + "step": 6175 + }, + { + "epoch": 0.42882932925982503, + "grad_norm": 4.05977718061093, + "learning_rate": 6.377982602820651e-06, + "loss": 0.5268, + "step": 6176 + }, + { + "epoch": 0.42889876406054717, + "grad_norm": 3.206955776170202, + "learning_rate": 6.376901628234389e-06, + "loss": 0.432, + "step": 6177 + }, + { + "epoch": 0.42896819886126925, + "grad_norm": 2.4015639265780817, + "learning_rate": 6.375820584005996e-06, + "loss": 0.168, + "step": 6178 + }, + { + "epoch": 0.4290376336619914, + "grad_norm": 3.6136295905364353, + "learning_rate": 6.374739470190157e-06, + "loss": 0.3063, + "step": 6179 + }, + { + "epoch": 0.4291070684627135, + "grad_norm": 4.166383954490124, + "learning_rate": 6.373658286841552e-06, + "loss": 0.5429, + "step": 6180 + }, + { + "epoch": 0.42917650326343565, + "grad_norm": 3.2563161863746743, + "learning_rate": 6.372577034014863e-06, + "loss": 0.4107, + "step": 6181 + }, + { + "epoch": 0.42924593806415773, + "grad_norm": 3.6736664605662046, + "learning_rate": 6.371495711764783e-06, + "loss": 0.5004, + "step": 6182 + }, + { + "epoch": 0.42931537286487986, + "grad_norm": 3.2907558128364474, + "learning_rate": 6.370414320146e-06, + "loss": 0.3884, + "step": 6183 + }, + { + "epoch": 0.429384807665602, + "grad_norm": 5.364322915852689, + "learning_rate": 6.3693328592132135e-06, + "loss": 0.6337, + "step": 6184 + }, + { + "epoch": 0.42945424246632413, + "grad_norm": 4.457239379309641, + "learning_rate": 6.368251329021118e-06, + "loss": 0.6233, + "step": 6185 + }, + { + "epoch": 0.42952367726704627, + "grad_norm": 3.7179634433302873, + "learning_rate": 6.3671697296244195e-06, + "loss": 0.6133, + "step": 6186 + }, + { + "epoch": 0.42959311206776835, + "grad_norm": 3.2921191653155533, + "learning_rate": 6.366088061077823e-06, + "loss": 0.4374, + "step": 6187 + }, + { + "epoch": 0.4296625468684905, + "grad_norm": 3.2560945071811114, + "learning_rate": 6.365006323436039e-06, + "loss": 0.4529, + "step": 6188 + }, + { + "epoch": 0.4297319816692126, + "grad_norm": 2.7333213474512426, + "learning_rate": 6.363924516753779e-06, + "loss": 0.3384, + "step": 6189 + }, + { + "epoch": 0.42980141646993475, + "grad_norm": 3.598629968181966, + "learning_rate": 6.3628426410857615e-06, + "loss": 0.5157, + "step": 6190 + }, + { + "epoch": 0.42987085127065683, + "grad_norm": 3.426960860786126, + "learning_rate": 6.361760696486704e-06, + "loss": 0.3785, + "step": 6191 + }, + { + "epoch": 0.42994028607137896, + "grad_norm": 2.7514027894688455, + "learning_rate": 6.360678683011332e-06, + "loss": 0.2937, + "step": 6192 + }, + { + "epoch": 0.4300097208721011, + "grad_norm": 3.3941879927613594, + "learning_rate": 6.359596600714373e-06, + "loss": 0.323, + "step": 6193 + }, + { + "epoch": 0.43007915567282323, + "grad_norm": 3.2927727007323058, + "learning_rate": 6.358514449650554e-06, + "loss": 0.339, + "step": 6194 + }, + { + "epoch": 0.43014859047354537, + "grad_norm": 4.527619586004049, + "learning_rate": 6.3574322298746126e-06, + "loss": 0.6325, + "step": 6195 + }, + { + "epoch": 0.43021802527426745, + "grad_norm": 3.3550187720318934, + "learning_rate": 6.356349941441286e-06, + "loss": 0.3778, + "step": 6196 + }, + { + "epoch": 0.4302874600749896, + "grad_norm": 3.657734296225048, + "learning_rate": 6.355267584405314e-06, + "loss": 0.4068, + "step": 6197 + }, + { + "epoch": 0.4303568948757117, + "grad_norm": 4.569850246635278, + "learning_rate": 6.35418515882144e-06, + "loss": 0.5493, + "step": 6198 + }, + { + "epoch": 0.43042632967643385, + "grad_norm": 4.198647490754046, + "learning_rate": 6.353102664744413e-06, + "loss": 0.4748, + "step": 6199 + }, + { + "epoch": 0.43049576447715593, + "grad_norm": 3.158135947762238, + "learning_rate": 6.352020102228985e-06, + "loss": 0.4843, + "step": 6200 + }, + { + "epoch": 0.43056519927787806, + "grad_norm": 4.387471013540285, + "learning_rate": 6.35093747132991e-06, + "loss": 0.7032, + "step": 6201 + }, + { + "epoch": 0.4306346340786002, + "grad_norm": 3.2150422721582994, + "learning_rate": 6.349854772101946e-06, + "loss": 0.251, + "step": 6202 + }, + { + "epoch": 0.43070406887932233, + "grad_norm": 4.196692955772125, + "learning_rate": 6.348772004599856e-06, + "loss": 0.6914, + "step": 6203 + }, + { + "epoch": 0.4307735036800444, + "grad_norm": 3.820219439777216, + "learning_rate": 6.3476891688784036e-06, + "loss": 0.5693, + "step": 6204 + }, + { + "epoch": 0.43084293848076655, + "grad_norm": 5.856131972127276, + "learning_rate": 6.346606264992359e-06, + "loss": 0.6002, + "step": 6205 + }, + { + "epoch": 0.4309123732814887, + "grad_norm": 3.8306785474543825, + "learning_rate": 6.345523292996492e-06, + "loss": 0.3749, + "step": 6206 + }, + { + "epoch": 0.4309818080822108, + "grad_norm": 4.531593760807623, + "learning_rate": 6.344440252945581e-06, + "loss": 0.6542, + "step": 6207 + }, + { + "epoch": 0.43105124288293295, + "grad_norm": 3.0504734588578475, + "learning_rate": 6.3433571448943995e-06, + "loss": 0.3293, + "step": 6208 + }, + { + "epoch": 0.43112067768365503, + "grad_norm": 3.5630384644858895, + "learning_rate": 6.342273968897739e-06, + "loss": 0.2986, + "step": 6209 + }, + { + "epoch": 0.43119011248437716, + "grad_norm": 4.162995106672686, + "learning_rate": 6.341190725010376e-06, + "loss": 0.4781, + "step": 6210 + }, + { + "epoch": 0.4312595472850993, + "grad_norm": 3.528974969779685, + "learning_rate": 6.340107413287105e-06, + "loss": 0.4632, + "step": 6211 + }, + { + "epoch": 0.43132898208582143, + "grad_norm": 3.830091631203563, + "learning_rate": 6.3390240337827195e-06, + "loss": 0.508, + "step": 6212 + }, + { + "epoch": 0.4313984168865435, + "grad_norm": 2.965138919729062, + "learning_rate": 6.337940586552012e-06, + "loss": 0.4046, + "step": 6213 + }, + { + "epoch": 0.43146785168726565, + "grad_norm": 3.6198752805128795, + "learning_rate": 6.336857071649783e-06, + "loss": 0.4246, + "step": 6214 + }, + { + "epoch": 0.4315372864879878, + "grad_norm": 3.276994350501958, + "learning_rate": 6.335773489130836e-06, + "loss": 0.4339, + "step": 6215 + }, + { + "epoch": 0.4316067212887099, + "grad_norm": 3.8300853748447836, + "learning_rate": 6.334689839049979e-06, + "loss": 0.3931, + "step": 6216 + }, + { + "epoch": 0.43167615608943205, + "grad_norm": 3.126244155762975, + "learning_rate": 6.333606121462018e-06, + "loss": 0.3783, + "step": 6217 + }, + { + "epoch": 0.43174559089015413, + "grad_norm": 3.4044486176369273, + "learning_rate": 6.3325223364217695e-06, + "loss": 0.3964, + "step": 6218 + }, + { + "epoch": 0.43181502569087626, + "grad_norm": 3.842516779526409, + "learning_rate": 6.331438483984049e-06, + "loss": 0.4021, + "step": 6219 + }, + { + "epoch": 0.4318844604915984, + "grad_norm": 3.857044452304051, + "learning_rate": 6.330354564203678e-06, + "loss": 0.6023, + "step": 6220 + }, + { + "epoch": 0.43195389529232053, + "grad_norm": 3.203766751879517, + "learning_rate": 6.329270577135477e-06, + "loss": 0.4275, + "step": 6221 + }, + { + "epoch": 0.4320233300930426, + "grad_norm": 3.854780059053013, + "learning_rate": 6.328186522834276e-06, + "loss": 0.3416, + "step": 6222 + }, + { + "epoch": 0.43209276489376475, + "grad_norm": 4.31001493319814, + "learning_rate": 6.327102401354902e-06, + "loss": 0.3666, + "step": 6223 + }, + { + "epoch": 0.4321621996944869, + "grad_norm": 3.993518923811869, + "learning_rate": 6.3260182127521895e-06, + "loss": 0.4039, + "step": 6224 + }, + { + "epoch": 0.432231634495209, + "grad_norm": 5.170902919387802, + "learning_rate": 6.3249339570809785e-06, + "loss": 0.8354, + "step": 6225 + }, + { + "epoch": 0.4323010692959311, + "grad_norm": 3.7512461289643557, + "learning_rate": 6.323849634396105e-06, + "loss": 0.4974, + "step": 6226 + }, + { + "epoch": 0.43237050409665323, + "grad_norm": 5.063321421563801, + "learning_rate": 6.322765244752417e-06, + "loss": 0.5905, + "step": 6227 + }, + { + "epoch": 0.43243993889737536, + "grad_norm": 2.9651251060680037, + "learning_rate": 6.3216807882047585e-06, + "loss": 0.4106, + "step": 6228 + }, + { + "epoch": 0.4325093736980975, + "grad_norm": 3.864325585647238, + "learning_rate": 6.320596264807982e-06, + "loss": 0.4079, + "step": 6229 + }, + { + "epoch": 0.43257880849881963, + "grad_norm": 4.174936246643424, + "learning_rate": 6.319511674616941e-06, + "loss": 0.6877, + "step": 6230 + }, + { + "epoch": 0.4326482432995417, + "grad_norm": 4.871606079294851, + "learning_rate": 6.318427017686492e-06, + "loss": 0.529, + "step": 6231 + }, + { + "epoch": 0.43271767810026385, + "grad_norm": 4.022767134671024, + "learning_rate": 6.317342294071497e-06, + "loss": 0.6402, + "step": 6232 + }, + { + "epoch": 0.432787112900986, + "grad_norm": 4.015514653397711, + "learning_rate": 6.316257503826818e-06, + "loss": 0.538, + "step": 6233 + }, + { + "epoch": 0.4328565477017081, + "grad_norm": 4.211121252807417, + "learning_rate": 6.315172647007325e-06, + "loss": 0.5353, + "step": 6234 + }, + { + "epoch": 0.4329259825024302, + "grad_norm": 4.155505020982948, + "learning_rate": 6.314087723667888e-06, + "loss": 0.4662, + "step": 6235 + }, + { + "epoch": 0.43299541730315233, + "grad_norm": 3.309727161091552, + "learning_rate": 6.313002733863381e-06, + "loss": 0.3797, + "step": 6236 + }, + { + "epoch": 0.43306485210387446, + "grad_norm": 3.273735534154873, + "learning_rate": 6.311917677648679e-06, + "loss": 0.3887, + "step": 6237 + }, + { + "epoch": 0.4331342869045966, + "grad_norm": 4.952631455792074, + "learning_rate": 6.3108325550786684e-06, + "loss": 0.5046, + "step": 6238 + }, + { + "epoch": 0.4332037217053187, + "grad_norm": 5.177352625785571, + "learning_rate": 6.3097473662082295e-06, + "loss": 0.6613, + "step": 6239 + }, + { + "epoch": 0.4332731565060408, + "grad_norm": 2.8794287861606738, + "learning_rate": 6.308662111092251e-06, + "loss": 0.212, + "step": 6240 + }, + { + "epoch": 0.43334259130676295, + "grad_norm": 3.149121066036248, + "learning_rate": 6.307576789785625e-06, + "loss": 0.5001, + "step": 6241 + }, + { + "epoch": 0.4334120261074851, + "grad_norm": 3.9306306089303598, + "learning_rate": 6.306491402343242e-06, + "loss": 0.4868, + "step": 6242 + }, + { + "epoch": 0.4334814609082072, + "grad_norm": 4.537332670125194, + "learning_rate": 6.305405948820004e-06, + "loss": 0.5738, + "step": 6243 + }, + { + "epoch": 0.4335508957089293, + "grad_norm": 3.5635309940963964, + "learning_rate": 6.304320429270809e-06, + "loss": 0.4863, + "step": 6244 + }, + { + "epoch": 0.43362033050965143, + "grad_norm": 5.213153857910934, + "learning_rate": 6.303234843750564e-06, + "loss": 0.679, + "step": 6245 + }, + { + "epoch": 0.43368976531037357, + "grad_norm": 3.595521705175229, + "learning_rate": 6.3021491923141754e-06, + "loss": 0.5134, + "step": 6246 + }, + { + "epoch": 0.4337592001110957, + "grad_norm": 3.656660661179552, + "learning_rate": 6.301063475016551e-06, + "loss": 0.278, + "step": 6247 + }, + { + "epoch": 0.4338286349118178, + "grad_norm": 4.630509349384864, + "learning_rate": 6.299977691912613e-06, + "loss": 0.6721, + "step": 6248 + }, + { + "epoch": 0.4338980697125399, + "grad_norm": 3.682352105783668, + "learning_rate": 6.2988918430572706e-06, + "loss": 0.4432, + "step": 6249 + }, + { + "epoch": 0.43396750451326205, + "grad_norm": 3.666991064415426, + "learning_rate": 6.29780592850545e-06, + "loss": 0.1883, + "step": 6250 + }, + { + "epoch": 0.4340369393139842, + "grad_norm": 4.613861818537799, + "learning_rate": 6.296719948312074e-06, + "loss": 0.4703, + "step": 6251 + }, + { + "epoch": 0.4341063741147063, + "grad_norm": 4.381498376832106, + "learning_rate": 6.295633902532071e-06, + "loss": 0.609, + "step": 6252 + }, + { + "epoch": 0.4341758089154284, + "grad_norm": 3.8712790668841013, + "learning_rate": 6.29454779122037e-06, + "loss": 0.5603, + "step": 6253 + }, + { + "epoch": 0.43424524371615053, + "grad_norm": 3.7259353022541917, + "learning_rate": 6.2934616144319096e-06, + "loss": 0.4209, + "step": 6254 + }, + { + "epoch": 0.43431467851687267, + "grad_norm": 3.7251965620997183, + "learning_rate": 6.292375372221622e-06, + "loss": 0.3717, + "step": 6255 + }, + { + "epoch": 0.4343841133175948, + "grad_norm": 3.540529929298755, + "learning_rate": 6.29128906464445e-06, + "loss": 0.48, + "step": 6256 + }, + { + "epoch": 0.4344535481183169, + "grad_norm": 3.3442460579804005, + "learning_rate": 6.290202691755338e-06, + "loss": 0.4438, + "step": 6257 + }, + { + "epoch": 0.434522982919039, + "grad_norm": 5.512456991686067, + "learning_rate": 6.289116253609235e-06, + "loss": 0.6124, + "step": 6258 + }, + { + "epoch": 0.43459241771976115, + "grad_norm": 3.9172157549382742, + "learning_rate": 6.288029750261091e-06, + "loss": 0.3657, + "step": 6259 + }, + { + "epoch": 0.4346618525204833, + "grad_norm": 4.407281520310914, + "learning_rate": 6.28694318176586e-06, + "loss": 0.5637, + "step": 6260 + }, + { + "epoch": 0.43473128732120536, + "grad_norm": 5.406711611027732, + "learning_rate": 6.285856548178498e-06, + "loss": 0.6946, + "step": 6261 + }, + { + "epoch": 0.4348007221219275, + "grad_norm": 4.374091927254284, + "learning_rate": 6.284769849553967e-06, + "loss": 0.5585, + "step": 6262 + }, + { + "epoch": 0.43487015692264963, + "grad_norm": 4.018837631950021, + "learning_rate": 6.283683085947231e-06, + "loss": 0.5537, + "step": 6263 + }, + { + "epoch": 0.43493959172337177, + "grad_norm": 3.091788051910182, + "learning_rate": 6.282596257413258e-06, + "loss": 0.4993, + "step": 6264 + }, + { + "epoch": 0.4350090265240939, + "grad_norm": 4.355272553388547, + "learning_rate": 6.2815093640070156e-06, + "loss": 0.4774, + "step": 6265 + }, + { + "epoch": 0.435078461324816, + "grad_norm": 3.1155100456252254, + "learning_rate": 6.280422405783482e-06, + "loss": 0.2782, + "step": 6266 + }, + { + "epoch": 0.4351478961255381, + "grad_norm": 5.624575123183313, + "learning_rate": 6.2793353827976314e-06, + "loss": 0.5197, + "step": 6267 + }, + { + "epoch": 0.43521733092626025, + "grad_norm": 3.1778482473440715, + "learning_rate": 6.278248295104443e-06, + "loss": 0.4633, + "step": 6268 + }, + { + "epoch": 0.4352867657269824, + "grad_norm": 4.762522964150873, + "learning_rate": 6.277161142758903e-06, + "loss": 0.7185, + "step": 6269 + }, + { + "epoch": 0.43535620052770446, + "grad_norm": 3.193375504660091, + "learning_rate": 6.2760739258159995e-06, + "loss": 0.2972, + "step": 6270 + }, + { + "epoch": 0.4354256353284266, + "grad_norm": 3.908466918004387, + "learning_rate": 6.274986644330719e-06, + "loss": 0.514, + "step": 6271 + }, + { + "epoch": 0.43549507012914873, + "grad_norm": 3.7058670048466946, + "learning_rate": 6.273899298358057e-06, + "loss": 0.4089, + "step": 6272 + }, + { + "epoch": 0.43556450492987087, + "grad_norm": 4.342551125251282, + "learning_rate": 6.272811887953009e-06, + "loss": 0.5115, + "step": 6273 + }, + { + "epoch": 0.435633939730593, + "grad_norm": 3.5946400576630113, + "learning_rate": 6.271724413170579e-06, + "loss": 0.4494, + "step": 6274 + }, + { + "epoch": 0.4357033745313151, + "grad_norm": 3.6199582975699824, + "learning_rate": 6.270636874065766e-06, + "loss": 0.5463, + "step": 6275 + }, + { + "epoch": 0.4357728093320372, + "grad_norm": 4.911996207750501, + "learning_rate": 6.269549270693576e-06, + "loss": 0.5419, + "step": 6276 + }, + { + "epoch": 0.43584224413275935, + "grad_norm": 2.768496308600077, + "learning_rate": 6.268461603109023e-06, + "loss": 0.342, + "step": 6277 + }, + { + "epoch": 0.4359116789334815, + "grad_norm": 3.5732622311759186, + "learning_rate": 6.267373871367115e-06, + "loss": 0.4356, + "step": 6278 + }, + { + "epoch": 0.43598111373420356, + "grad_norm": 4.430813893048166, + "learning_rate": 6.266286075522872e-06, + "loss": 0.4786, + "step": 6279 + }, + { + "epoch": 0.4360505485349257, + "grad_norm": 3.9345620310893143, + "learning_rate": 6.265198215631313e-06, + "loss": 0.5277, + "step": 6280 + }, + { + "epoch": 0.43611998333564783, + "grad_norm": 3.521946262248316, + "learning_rate": 6.264110291747458e-06, + "loss": 0.4195, + "step": 6281 + }, + { + "epoch": 0.43618941813636997, + "grad_norm": 3.1177796546296315, + "learning_rate": 6.263022303926336e-06, + "loss": 0.3275, + "step": 6282 + }, + { + "epoch": 0.43625885293709205, + "grad_norm": 3.0610177893834667, + "learning_rate": 6.261934252222975e-06, + "loss": 0.2638, + "step": 6283 + }, + { + "epoch": 0.4363282877378142, + "grad_norm": 4.368808448241052, + "learning_rate": 6.260846136692408e-06, + "loss": 0.4377, + "step": 6284 + }, + { + "epoch": 0.4363977225385363, + "grad_norm": 3.685414709462015, + "learning_rate": 6.259757957389669e-06, + "loss": 0.4031, + "step": 6285 + }, + { + "epoch": 0.43646715733925845, + "grad_norm": 5.406926886525559, + "learning_rate": 6.258669714369799e-06, + "loss": 0.6408, + "step": 6286 + }, + { + "epoch": 0.4365365921399806, + "grad_norm": 3.43114825479396, + "learning_rate": 6.257581407687838e-06, + "loss": 0.4048, + "step": 6287 + }, + { + "epoch": 0.43660602694070266, + "grad_norm": 3.3213039720473776, + "learning_rate": 6.256493037398834e-06, + "loss": 0.3204, + "step": 6288 + }, + { + "epoch": 0.4366754617414248, + "grad_norm": 4.0424244421176025, + "learning_rate": 6.255404603557833e-06, + "loss": 0.5476, + "step": 6289 + }, + { + "epoch": 0.43674489654214693, + "grad_norm": 9.350532773026421, + "learning_rate": 6.254316106219887e-06, + "loss": 0.5336, + "step": 6290 + }, + { + "epoch": 0.43681433134286907, + "grad_norm": 3.500586088293062, + "learning_rate": 6.253227545440054e-06, + "loss": 0.4152, + "step": 6291 + }, + { + "epoch": 0.43688376614359115, + "grad_norm": 3.4747199313887505, + "learning_rate": 6.252138921273387e-06, + "loss": 0.3851, + "step": 6292 + }, + { + "epoch": 0.4369532009443133, + "grad_norm": 3.9514436963721535, + "learning_rate": 6.251050233774953e-06, + "loss": 0.7342, + "step": 6293 + }, + { + "epoch": 0.4370226357450354, + "grad_norm": 3.7221958352180886, + "learning_rate": 6.249961482999812e-06, + "loss": 0.4237, + "step": 6294 + }, + { + "epoch": 0.43709207054575755, + "grad_norm": 3.1242007263475964, + "learning_rate": 6.248872669003034e-06, + "loss": 0.3739, + "step": 6295 + }, + { + "epoch": 0.43716150534647963, + "grad_norm": 4.213094697758579, + "learning_rate": 6.247783791839691e-06, + "loss": 0.6812, + "step": 6296 + }, + { + "epoch": 0.43723094014720176, + "grad_norm": 3.666798381574115, + "learning_rate": 6.246694851564854e-06, + "loss": 0.354, + "step": 6297 + }, + { + "epoch": 0.4373003749479239, + "grad_norm": 3.6823700115746933, + "learning_rate": 6.245605848233602e-06, + "loss": 0.3702, + "step": 6298 + }, + { + "epoch": 0.43736980974864603, + "grad_norm": 3.0808060315751757, + "learning_rate": 6.244516781901018e-06, + "loss": 0.3097, + "step": 6299 + }, + { + "epoch": 0.43743924454936817, + "grad_norm": 3.2591826788743683, + "learning_rate": 6.2434276526221814e-06, + "loss": 0.3494, + "step": 6300 + }, + { + "epoch": 0.43750867935009025, + "grad_norm": 4.251471875757505, + "learning_rate": 6.242338460452181e-06, + "loss": 0.7552, + "step": 6301 + }, + { + "epoch": 0.4375781141508124, + "grad_norm": 3.6652863451656787, + "learning_rate": 6.241249205446107e-06, + "loss": 0.4499, + "step": 6302 + }, + { + "epoch": 0.4376475489515345, + "grad_norm": 4.3385412988438725, + "learning_rate": 6.240159887659054e-06, + "loss": 0.6957, + "step": 6303 + }, + { + "epoch": 0.43771698375225665, + "grad_norm": 3.6317915155946343, + "learning_rate": 6.2390705071461165e-06, + "loss": 0.3951, + "step": 6304 + }, + { + "epoch": 0.43778641855297873, + "grad_norm": 3.86560993896574, + "learning_rate": 6.237981063962395e-06, + "loss": 0.4704, + "step": 6305 + }, + { + "epoch": 0.43785585335370086, + "grad_norm": 3.023131027607578, + "learning_rate": 6.236891558162993e-06, + "loss": 0.3241, + "step": 6306 + }, + { + "epoch": 0.437925288154423, + "grad_norm": 3.6746180644952826, + "learning_rate": 6.235801989803015e-06, + "loss": 0.4107, + "step": 6307 + }, + { + "epoch": 0.43799472295514513, + "grad_norm": 4.491708145064552, + "learning_rate": 6.234712358937569e-06, + "loss": 0.6872, + "step": 6308 + }, + { + "epoch": 0.43806415775586727, + "grad_norm": 3.857875349762248, + "learning_rate": 6.2336226656217724e-06, + "loss": 0.5015, + "step": 6309 + }, + { + "epoch": 0.43813359255658935, + "grad_norm": 3.2917308362868787, + "learning_rate": 6.232532909910735e-06, + "loss": 0.4069, + "step": 6310 + }, + { + "epoch": 0.4382030273573115, + "grad_norm": 2.31715582346272, + "learning_rate": 6.231443091859578e-06, + "loss": 0.1419, + "step": 6311 + }, + { + "epoch": 0.4382724621580336, + "grad_norm": 4.364501155687156, + "learning_rate": 6.2303532115234235e-06, + "loss": 0.671, + "step": 6312 + }, + { + "epoch": 0.43834189695875575, + "grad_norm": 3.2326852329743465, + "learning_rate": 6.229263268957395e-06, + "loss": 0.4033, + "step": 6313 + }, + { + "epoch": 0.43841133175947783, + "grad_norm": 3.15497422172834, + "learning_rate": 6.228173264216622e-06, + "loss": 0.3396, + "step": 6314 + }, + { + "epoch": 0.43848076656019996, + "grad_norm": 4.325934785550777, + "learning_rate": 6.227083197356235e-06, + "loss": 0.6066, + "step": 6315 + }, + { + "epoch": 0.4385502013609221, + "grad_norm": 3.227545766795584, + "learning_rate": 6.2259930684313685e-06, + "loss": 0.2638, + "step": 6316 + }, + { + "epoch": 0.43861963616164423, + "grad_norm": 2.7997864323913526, + "learning_rate": 6.224902877497159e-06, + "loss": 0.3507, + "step": 6317 + }, + { + "epoch": 0.4386890709623663, + "grad_norm": 4.036725519723322, + "learning_rate": 6.2238126246087495e-06, + "loss": 0.6026, + "step": 6318 + }, + { + "epoch": 0.43875850576308845, + "grad_norm": 3.1885803647718665, + "learning_rate": 6.222722309821281e-06, + "loss": 0.265, + "step": 6319 + }, + { + "epoch": 0.4388279405638106, + "grad_norm": 3.4711771196752, + "learning_rate": 6.221631933189903e-06, + "loss": 0.3755, + "step": 6320 + }, + { + "epoch": 0.4388973753645327, + "grad_norm": 3.4957236006543977, + "learning_rate": 6.220541494769763e-06, + "loss": 0.4657, + "step": 6321 + }, + { + "epoch": 0.43896681016525485, + "grad_norm": 4.027063362632856, + "learning_rate": 6.219450994616015e-06, + "loss": 0.4228, + "step": 6322 + }, + { + "epoch": 0.43903624496597693, + "grad_norm": 3.762994585218503, + "learning_rate": 6.218360432783816e-06, + "loss": 0.5389, + "step": 6323 + }, + { + "epoch": 0.43910567976669906, + "grad_norm": 5.590656972490438, + "learning_rate": 6.217269809328326e-06, + "loss": 0.87, + "step": 6324 + }, + { + "epoch": 0.4391751145674212, + "grad_norm": 2.648865681721893, + "learning_rate": 6.2161791243047065e-06, + "loss": 0.2843, + "step": 6325 + }, + { + "epoch": 0.43924454936814333, + "grad_norm": 4.014279071194499, + "learning_rate": 6.215088377768121e-06, + "loss": 0.6164, + "step": 6326 + }, + { + "epoch": 0.4393139841688654, + "grad_norm": 3.9078202668686175, + "learning_rate": 6.213997569773742e-06, + "loss": 0.6364, + "step": 6327 + }, + { + "epoch": 0.43938341896958755, + "grad_norm": 3.698323126595387, + "learning_rate": 6.2129067003767395e-06, + "loss": 0.655, + "step": 6328 + }, + { + "epoch": 0.4394528537703097, + "grad_norm": 4.439157217765335, + "learning_rate": 6.2118157696322875e-06, + "loss": 0.5008, + "step": 6329 + }, + { + "epoch": 0.4395222885710318, + "grad_norm": 3.984877774849983, + "learning_rate": 6.210724777595567e-06, + "loss": 0.4757, + "step": 6330 + }, + { + "epoch": 0.43959172337175395, + "grad_norm": 3.1734151789047895, + "learning_rate": 6.209633724321756e-06, + "loss": 0.3155, + "step": 6331 + }, + { + "epoch": 0.43966115817247603, + "grad_norm": 3.1802912935557552, + "learning_rate": 6.208542609866041e-06, + "loss": 0.4019, + "step": 6332 + }, + { + "epoch": 0.43973059297319816, + "grad_norm": 3.8702403510380625, + "learning_rate": 6.207451434283607e-06, + "loss": 0.4299, + "step": 6333 + }, + { + "epoch": 0.4398000277739203, + "grad_norm": 4.900151106451885, + "learning_rate": 6.206360197629646e-06, + "loss": 0.6643, + "step": 6334 + }, + { + "epoch": 0.43986946257464243, + "grad_norm": 3.7047540569029005, + "learning_rate": 6.2052688999593525e-06, + "loss": 0.5053, + "step": 6335 + }, + { + "epoch": 0.4399388973753645, + "grad_norm": 3.9766997133529656, + "learning_rate": 6.204177541327922e-06, + "loss": 0.5562, + "step": 6336 + }, + { + "epoch": 0.44000833217608665, + "grad_norm": 5.037806722750332, + "learning_rate": 6.203086121790553e-06, + "loss": 0.6293, + "step": 6337 + }, + { + "epoch": 0.4400777669768088, + "grad_norm": 3.8981757847394674, + "learning_rate": 6.201994641402449e-06, + "loss": 0.4743, + "step": 6338 + }, + { + "epoch": 0.4401472017775309, + "grad_norm": 2.9708228590882637, + "learning_rate": 6.200903100218817e-06, + "loss": 0.2783, + "step": 6339 + }, + { + "epoch": 0.440216636578253, + "grad_norm": 3.3625020209834364, + "learning_rate": 6.199811498294864e-06, + "loss": 0.4026, + "step": 6340 + }, + { + "epoch": 0.44028607137897513, + "grad_norm": 3.2066176367403627, + "learning_rate": 6.198719835685805e-06, + "loss": 0.2333, + "step": 6341 + }, + { + "epoch": 0.44035550617969726, + "grad_norm": 4.689883205711582, + "learning_rate": 6.1976281124468515e-06, + "loss": 0.5064, + "step": 6342 + }, + { + "epoch": 0.4404249409804194, + "grad_norm": 4.086958159371678, + "learning_rate": 6.1965363286332236e-06, + "loss": 0.6102, + "step": 6343 + }, + { + "epoch": 0.44049437578114153, + "grad_norm": 3.5330187007705955, + "learning_rate": 6.195444484300143e-06, + "loss": 0.3546, + "step": 6344 + }, + { + "epoch": 0.4405638105818636, + "grad_norm": 3.363498297895666, + "learning_rate": 6.194352579502832e-06, + "loss": 0.5109, + "step": 6345 + }, + { + "epoch": 0.44063324538258575, + "grad_norm": 3.3883380947062847, + "learning_rate": 6.19326061429652e-06, + "loss": 0.3446, + "step": 6346 + }, + { + "epoch": 0.4407026801833079, + "grad_norm": 4.076007675652173, + "learning_rate": 6.192168588736436e-06, + "loss": 0.5026, + "step": 6347 + }, + { + "epoch": 0.44077211498403, + "grad_norm": 3.2341208733171998, + "learning_rate": 6.1910765028778145e-06, + "loss": 0.3634, + "step": 6348 + }, + { + "epoch": 0.4408415497847521, + "grad_norm": 2.382854252484818, + "learning_rate": 6.18998435677589e-06, + "loss": 0.1706, + "step": 6349 + }, + { + "epoch": 0.44091098458547423, + "grad_norm": 5.975773241650764, + "learning_rate": 6.188892150485904e-06, + "loss": 0.4608, + "step": 6350 + }, + { + "epoch": 0.44098041938619637, + "grad_norm": 3.6089222822727667, + "learning_rate": 6.187799884063098e-06, + "loss": 0.4476, + "step": 6351 + }, + { + "epoch": 0.4410498541869185, + "grad_norm": 3.432907937474817, + "learning_rate": 6.186707557562718e-06, + "loss": 0.4611, + "step": 6352 + }, + { + "epoch": 0.4411192889876406, + "grad_norm": 3.003570337475251, + "learning_rate": 6.185615171040013e-06, + "loss": 0.3365, + "step": 6353 + }, + { + "epoch": 0.4411887237883627, + "grad_norm": 3.9255578306433025, + "learning_rate": 6.184522724550235e-06, + "loss": 0.4916, + "step": 6354 + }, + { + "epoch": 0.44125815858908485, + "grad_norm": 3.54023018165851, + "learning_rate": 6.183430218148636e-06, + "loss": 0.3805, + "step": 6355 + }, + { + "epoch": 0.441327593389807, + "grad_norm": 3.075331779595415, + "learning_rate": 6.182337651890477e-06, + "loss": 0.4181, + "step": 6356 + }, + { + "epoch": 0.4413970281905291, + "grad_norm": 3.410112647264346, + "learning_rate": 6.181245025831017e-06, + "loss": 0.517, + "step": 6357 + }, + { + "epoch": 0.4414664629912512, + "grad_norm": 3.0456582535383947, + "learning_rate": 6.18015234002552e-06, + "loss": 0.3997, + "step": 6358 + }, + { + "epoch": 0.44153589779197333, + "grad_norm": 3.0990238339398872, + "learning_rate": 6.179059594529255e-06, + "loss": 0.3283, + "step": 6359 + }, + { + "epoch": 0.44160533259269547, + "grad_norm": 3.997714601804692, + "learning_rate": 6.177966789397487e-06, + "loss": 0.3722, + "step": 6360 + }, + { + "epoch": 0.4416747673934176, + "grad_norm": 3.9771153178117773, + "learning_rate": 6.1768739246854936e-06, + "loss": 0.583, + "step": 6361 + }, + { + "epoch": 0.4417442021941397, + "grad_norm": 3.829096924210249, + "learning_rate": 6.175781000448549e-06, + "loss": 0.4084, + "step": 6362 + }, + { + "epoch": 0.4418136369948618, + "grad_norm": 3.88167808298752, + "learning_rate": 6.17468801674193e-06, + "loss": 0.4129, + "step": 6363 + }, + { + "epoch": 0.44188307179558395, + "grad_norm": 4.051326177626964, + "learning_rate": 6.173594973620923e-06, + "loss": 0.4728, + "step": 6364 + }, + { + "epoch": 0.4419525065963061, + "grad_norm": 4.687033519131718, + "learning_rate": 6.172501871140808e-06, + "loss": 0.6342, + "step": 6365 + }, + { + "epoch": 0.4420219413970282, + "grad_norm": 4.518954459965344, + "learning_rate": 6.171408709356876e-06, + "loss": 0.6057, + "step": 6366 + }, + { + "epoch": 0.4420913761977503, + "grad_norm": 3.7419127766649143, + "learning_rate": 6.170315488324417e-06, + "loss": 0.3315, + "step": 6367 + }, + { + "epoch": 0.44216081099847243, + "grad_norm": 3.3656677522079748, + "learning_rate": 6.169222208098725e-06, + "loss": 0.4091, + "step": 6368 + }, + { + "epoch": 0.44223024579919457, + "grad_norm": 3.7711204979932065, + "learning_rate": 6.168128868735096e-06, + "loss": 0.3937, + "step": 6369 + }, + { + "epoch": 0.4422996805999167, + "grad_norm": 3.5046429865618083, + "learning_rate": 6.167035470288832e-06, + "loss": 0.4633, + "step": 6370 + }, + { + "epoch": 0.4423691154006388, + "grad_norm": 4.002858070146452, + "learning_rate": 6.1659420128152345e-06, + "loss": 0.3966, + "step": 6371 + }, + { + "epoch": 0.4424385502013609, + "grad_norm": 2.673015930655621, + "learning_rate": 6.1648484963696075e-06, + "loss": 0.3356, + "step": 6372 + }, + { + "epoch": 0.44250798500208305, + "grad_norm": 4.350660933679766, + "learning_rate": 6.163754921007264e-06, + "loss": 0.5224, + "step": 6373 + }, + { + "epoch": 0.4425774198028052, + "grad_norm": 2.4947880868381023, + "learning_rate": 6.162661286783512e-06, + "loss": 0.1623, + "step": 6374 + }, + { + "epoch": 0.44264685460352726, + "grad_norm": 4.6702841150731444, + "learning_rate": 6.161567593753668e-06, + "loss": 0.4899, + "step": 6375 + }, + { + "epoch": 0.4427162894042494, + "grad_norm": 4.350387679809751, + "learning_rate": 6.16047384197305e-06, + "loss": 0.5456, + "step": 6376 + }, + { + "epoch": 0.44278572420497153, + "grad_norm": 3.629506047659448, + "learning_rate": 6.159380031496978e-06, + "loss": 0.5402, + "step": 6377 + }, + { + "epoch": 0.44285515900569367, + "grad_norm": 5.27740101340111, + "learning_rate": 6.158286162380776e-06, + "loss": 0.5268, + "step": 6378 + }, + { + "epoch": 0.4429245938064158, + "grad_norm": 3.7441763676539765, + "learning_rate": 6.15719223467977e-06, + "loss": 0.5073, + "step": 6379 + }, + { + "epoch": 0.4429940286071379, + "grad_norm": 4.610113321999732, + "learning_rate": 6.1560982484492915e-06, + "loss": 0.3578, + "step": 6380 + }, + { + "epoch": 0.44306346340786, + "grad_norm": 4.000932635852753, + "learning_rate": 6.15500420374467e-06, + "loss": 0.4669, + "step": 6381 + }, + { + "epoch": 0.44313289820858215, + "grad_norm": 4.3067229951273855, + "learning_rate": 6.153910100621243e-06, + "loss": 0.6057, + "step": 6382 + }, + { + "epoch": 0.4432023330093043, + "grad_norm": 2.87851573004484, + "learning_rate": 6.15281593913435e-06, + "loss": 0.2247, + "step": 6383 + }, + { + "epoch": 0.44327176781002636, + "grad_norm": 3.087343219556703, + "learning_rate": 6.151721719339332e-06, + "loss": 0.2943, + "step": 6384 + }, + { + "epoch": 0.4433412026107485, + "grad_norm": 3.8609677856199043, + "learning_rate": 6.15062744129153e-06, + "loss": 0.4063, + "step": 6385 + }, + { + "epoch": 0.44341063741147063, + "grad_norm": 4.7439968169337225, + "learning_rate": 6.1495331050462966e-06, + "loss": 0.6875, + "step": 6386 + }, + { + "epoch": 0.44348007221219277, + "grad_norm": 3.1960805493483213, + "learning_rate": 6.148438710658979e-06, + "loss": 0.2667, + "step": 6387 + }, + { + "epoch": 0.44354950701291485, + "grad_norm": 3.550867256927177, + "learning_rate": 6.14734425818493e-06, + "loss": 0.2779, + "step": 6388 + }, + { + "epoch": 0.443618941813637, + "grad_norm": 3.716546086324014, + "learning_rate": 6.146249747679507e-06, + "loss": 0.7447, + "step": 6389 + }, + { + "epoch": 0.4436883766143591, + "grad_norm": 4.456961776346747, + "learning_rate": 6.145155179198069e-06, + "loss": 0.5507, + "step": 6390 + }, + { + "epoch": 0.44375781141508125, + "grad_norm": 4.229950912810634, + "learning_rate": 6.144060552795978e-06, + "loss": 0.7541, + "step": 6391 + }, + { + "epoch": 0.4438272462158034, + "grad_norm": 3.0815503832105957, + "learning_rate": 6.142965868528598e-06, + "loss": 0.1598, + "step": 6392 + }, + { + "epoch": 0.44389668101652546, + "grad_norm": 3.22997200764313, + "learning_rate": 6.1418711264513e-06, + "loss": 0.4398, + "step": 6393 + }, + { + "epoch": 0.4439661158172476, + "grad_norm": 3.589137294057625, + "learning_rate": 6.140776326619451e-06, + "loss": 0.5427, + "step": 6394 + }, + { + "epoch": 0.44403555061796973, + "grad_norm": 3.5423850584550665, + "learning_rate": 6.1396814690884254e-06, + "loss": 0.3325, + "step": 6395 + }, + { + "epoch": 0.44410498541869187, + "grad_norm": 3.8327896687178287, + "learning_rate": 6.138586553913604e-06, + "loss": 0.3546, + "step": 6396 + }, + { + "epoch": 0.44417442021941395, + "grad_norm": 3.62283166119116, + "learning_rate": 6.1374915811503585e-06, + "loss": 0.3159, + "step": 6397 + }, + { + "epoch": 0.4442438550201361, + "grad_norm": 5.1428058433119945, + "learning_rate": 6.136396550854079e-06, + "loss": 0.8069, + "step": 6398 + }, + { + "epoch": 0.4443132898208582, + "grad_norm": 4.1747025944983, + "learning_rate": 6.135301463080147e-06, + "loss": 0.455, + "step": 6399 + }, + { + "epoch": 0.44438272462158035, + "grad_norm": 4.245236770873, + "learning_rate": 6.134206317883953e-06, + "loss": 0.6182, + "step": 6400 + }, + { + "epoch": 0.4444521594223025, + "grad_norm": 3.5473758719600834, + "learning_rate": 6.133111115320884e-06, + "loss": 0.3919, + "step": 6401 + }, + { + "epoch": 0.44452159422302456, + "grad_norm": 3.94802805498725, + "learning_rate": 6.132015855446339e-06, + "loss": 0.4976, + "step": 6402 + }, + { + "epoch": 0.4445910290237467, + "grad_norm": 3.9741885838752844, + "learning_rate": 6.130920538315712e-06, + "loss": 0.5679, + "step": 6403 + }, + { + "epoch": 0.44466046382446883, + "grad_norm": 3.008557601102426, + "learning_rate": 6.129825163984406e-06, + "loss": 0.4498, + "step": 6404 + }, + { + "epoch": 0.44472989862519097, + "grad_norm": 4.710685062964972, + "learning_rate": 6.12872973250782e-06, + "loss": 0.5374, + "step": 6405 + }, + { + "epoch": 0.44479933342591305, + "grad_norm": 3.337555939249492, + "learning_rate": 6.127634243941361e-06, + "loss": 0.4082, + "step": 6406 + }, + { + "epoch": 0.4448687682266352, + "grad_norm": 4.878930678027899, + "learning_rate": 6.12653869834044e-06, + "loss": 0.692, + "step": 6407 + }, + { + "epoch": 0.4449382030273573, + "grad_norm": 3.847734244466304, + "learning_rate": 6.125443095760464e-06, + "loss": 0.4965, + "step": 6408 + }, + { + "epoch": 0.44500763782807945, + "grad_norm": 3.0639586629229654, + "learning_rate": 6.124347436256851e-06, + "loss": 0.3744, + "step": 6409 + }, + { + "epoch": 0.44507707262880153, + "grad_norm": 2.99496622389321, + "learning_rate": 6.123251719885016e-06, + "loss": 0.3575, + "step": 6410 + }, + { + "epoch": 0.44514650742952366, + "grad_norm": 2.5733126593609366, + "learning_rate": 6.122155946700381e-06, + "loss": 0.2548, + "step": 6411 + }, + { + "epoch": 0.4452159422302458, + "grad_norm": 3.2347342789073847, + "learning_rate": 6.121060116758369e-06, + "loss": 0.4612, + "step": 6412 + }, + { + "epoch": 0.44528537703096793, + "grad_norm": 2.49083332535861, + "learning_rate": 6.119964230114403e-06, + "loss": 0.3196, + "step": 6413 + }, + { + "epoch": 0.44535481183169007, + "grad_norm": 3.515091511047168, + "learning_rate": 6.1188682868239135e-06, + "loss": 0.6336, + "step": 6414 + }, + { + "epoch": 0.44542424663241215, + "grad_norm": 3.2685478058427773, + "learning_rate": 6.117772286942334e-06, + "loss": 0.3536, + "step": 6415 + }, + { + "epoch": 0.4454936814331343, + "grad_norm": 4.767681147435549, + "learning_rate": 6.116676230525096e-06, + "loss": 0.5015, + "step": 6416 + }, + { + "epoch": 0.4455631162338564, + "grad_norm": 3.200942202515901, + "learning_rate": 6.115580117627636e-06, + "loss": 0.436, + "step": 6417 + }, + { + "epoch": 0.44563255103457855, + "grad_norm": 3.420496222217815, + "learning_rate": 6.114483948305399e-06, + "loss": 0.5002, + "step": 6418 + }, + { + "epoch": 0.44570198583530063, + "grad_norm": 3.119375724754164, + "learning_rate": 6.1133877226138235e-06, + "loss": 0.452, + "step": 6419 + }, + { + "epoch": 0.44577142063602276, + "grad_norm": 2.6876541812729395, + "learning_rate": 6.112291440608357e-06, + "loss": 0.2981, + "step": 6420 + }, + { + "epoch": 0.4458408554367449, + "grad_norm": 3.915636930526553, + "learning_rate": 6.111195102344448e-06, + "loss": 0.4211, + "step": 6421 + }, + { + "epoch": 0.44591029023746703, + "grad_norm": 2.749146971685769, + "learning_rate": 6.110098707877549e-06, + "loss": 0.2795, + "step": 6422 + }, + { + "epoch": 0.44597972503818917, + "grad_norm": 4.0801914307425005, + "learning_rate": 6.1090022572631125e-06, + "loss": 0.2346, + "step": 6423 + }, + { + "epoch": 0.44604915983891125, + "grad_norm": 3.868554075643408, + "learning_rate": 6.107905750556597e-06, + "loss": 0.5105, + "step": 6424 + }, + { + "epoch": 0.4461185946396334, + "grad_norm": 3.9398585562685526, + "learning_rate": 6.106809187813464e-06, + "loss": 0.369, + "step": 6425 + }, + { + "epoch": 0.4461880294403555, + "grad_norm": 2.933579552805726, + "learning_rate": 6.105712569089171e-06, + "loss": 0.2349, + "step": 6426 + }, + { + "epoch": 0.44625746424107765, + "grad_norm": 4.449118146734304, + "learning_rate": 6.104615894439191e-06, + "loss": 0.5248, + "step": 6427 + }, + { + "epoch": 0.44632689904179973, + "grad_norm": 2.7846045603929617, + "learning_rate": 6.103519163918987e-06, + "loss": 0.2892, + "step": 6428 + }, + { + "epoch": 0.44639633384252186, + "grad_norm": 4.263506635889358, + "learning_rate": 6.102422377584033e-06, + "loss": 0.5561, + "step": 6429 + }, + { + "epoch": 0.446465768643244, + "grad_norm": 3.9825871560879884, + "learning_rate": 6.101325535489804e-06, + "loss": 0.5442, + "step": 6430 + }, + { + "epoch": 0.44653520344396613, + "grad_norm": 4.072769761096678, + "learning_rate": 6.100228637691774e-06, + "loss": 0.3631, + "step": 6431 + }, + { + "epoch": 0.4466046382446882, + "grad_norm": 3.810580430723449, + "learning_rate": 6.099131684245425e-06, + "loss": 0.5508, + "step": 6432 + }, + { + "epoch": 0.44667407304541035, + "grad_norm": 1.7417939742780761, + "learning_rate": 6.098034675206238e-06, + "loss": 0.1271, + "step": 6433 + }, + { + "epoch": 0.4467435078461325, + "grad_norm": 2.27168586854458, + "learning_rate": 6.096937610629701e-06, + "loss": 0.1419, + "step": 6434 + }, + { + "epoch": 0.4468129426468546, + "grad_norm": 4.316612777870374, + "learning_rate": 6.0958404905712994e-06, + "loss": 0.608, + "step": 6435 + }, + { + "epoch": 0.44688237744757675, + "grad_norm": 4.5478349034822845, + "learning_rate": 6.094743315086528e-06, + "loss": 0.4858, + "step": 6436 + }, + { + "epoch": 0.44695181224829883, + "grad_norm": 4.06929033117874, + "learning_rate": 6.093646084230878e-06, + "loss": 0.6528, + "step": 6437 + }, + { + "epoch": 0.44702124704902096, + "grad_norm": 3.248152342239782, + "learning_rate": 6.092548798059845e-06, + "loss": 0.3749, + "step": 6438 + }, + { + "epoch": 0.4470906818497431, + "grad_norm": 3.6207689253100357, + "learning_rate": 6.091451456628931e-06, + "loss": 0.4234, + "step": 6439 + }, + { + "epoch": 0.44716011665046523, + "grad_norm": 3.855759779381679, + "learning_rate": 6.090354059993637e-06, + "loss": 0.4233, + "step": 6440 + }, + { + "epoch": 0.4472295514511873, + "grad_norm": 4.024103861891615, + "learning_rate": 6.089256608209471e-06, + "loss": 0.5426, + "step": 6441 + }, + { + "epoch": 0.44729898625190945, + "grad_norm": 4.092167397978343, + "learning_rate": 6.0881591013319355e-06, + "loss": 0.4418, + "step": 6442 + }, + { + "epoch": 0.4473684210526316, + "grad_norm": 2.9814363639974117, + "learning_rate": 6.087061539416545e-06, + "loss": 0.3078, + "step": 6443 + }, + { + "epoch": 0.4474378558533537, + "grad_norm": 3.273519534715452, + "learning_rate": 6.085963922518812e-06, + "loss": 0.3414, + "step": 6444 + }, + { + "epoch": 0.4475072906540758, + "grad_norm": 3.837182545658379, + "learning_rate": 6.084866250694252e-06, + "loss": 0.4782, + "step": 6445 + }, + { + "epoch": 0.44757672545479793, + "grad_norm": 3.079212197627249, + "learning_rate": 6.083768523998385e-06, + "loss": 0.3565, + "step": 6446 + }, + { + "epoch": 0.44764616025552006, + "grad_norm": 4.4872532259516, + "learning_rate": 6.0826707424867316e-06, + "loss": 0.5113, + "step": 6447 + }, + { + "epoch": 0.4477155950562422, + "grad_norm": 4.563572380269752, + "learning_rate": 6.0815729062148195e-06, + "loss": 0.5979, + "step": 6448 + }, + { + "epoch": 0.44778502985696433, + "grad_norm": 3.274635695211956, + "learning_rate": 6.080475015238172e-06, + "loss": 0.5138, + "step": 6449 + }, + { + "epoch": 0.4478544646576864, + "grad_norm": 6.66466284073973, + "learning_rate": 6.079377069612321e-06, + "loss": 0.627, + "step": 6450 + }, + { + "epoch": 0.44792389945840855, + "grad_norm": 3.9203767325609675, + "learning_rate": 6.0782790693928e-06, + "loss": 0.6612, + "step": 6451 + }, + { + "epoch": 0.4479933342591307, + "grad_norm": 4.0129877039263535, + "learning_rate": 6.0771810146351425e-06, + "loss": 0.3677, + "step": 6452 + }, + { + "epoch": 0.4480627690598528, + "grad_norm": 4.066409085454307, + "learning_rate": 6.076082905394889e-06, + "loss": 0.5723, + "step": 6453 + }, + { + "epoch": 0.4481322038605749, + "grad_norm": 4.8410763683185, + "learning_rate": 6.07498474172758e-06, + "loss": 0.4041, + "step": 6454 + }, + { + "epoch": 0.44820163866129703, + "grad_norm": 3.761606450929358, + "learning_rate": 6.073886523688759e-06, + "loss": 0.3268, + "step": 6455 + }, + { + "epoch": 0.44827107346201917, + "grad_norm": 3.8963442184339656, + "learning_rate": 6.072788251333972e-06, + "loss": 0.4744, + "step": 6456 + }, + { + "epoch": 0.4483405082627413, + "grad_norm": 2.3374548369309753, + "learning_rate": 6.071689924718771e-06, + "loss": 0.2232, + "step": 6457 + }, + { + "epoch": 0.44840994306346343, + "grad_norm": 2.697756012898419, + "learning_rate": 6.070591543898704e-06, + "loss": 0.2619, + "step": 6458 + }, + { + "epoch": 0.4484793778641855, + "grad_norm": 3.7144267313974746, + "learning_rate": 6.06949310892933e-06, + "loss": 0.3687, + "step": 6459 + }, + { + "epoch": 0.44854881266490765, + "grad_norm": 4.884429454142245, + "learning_rate": 6.068394619866203e-06, + "loss": 0.492, + "step": 6460 + }, + { + "epoch": 0.4486182474656298, + "grad_norm": 4.355845168427054, + "learning_rate": 6.067296076764887e-06, + "loss": 0.569, + "step": 6461 + }, + { + "epoch": 0.4486876822663519, + "grad_norm": 3.569563726022667, + "learning_rate": 6.066197479680942e-06, + "loss": 0.4869, + "step": 6462 + }, + { + "epoch": 0.448757117067074, + "grad_norm": 3.644916133422157, + "learning_rate": 6.0650988286699355e-06, + "loss": 0.4771, + "step": 6463 + }, + { + "epoch": 0.44882655186779613, + "grad_norm": 5.587910492731703, + "learning_rate": 6.064000123787436e-06, + "loss": 0.6544, + "step": 6464 + }, + { + "epoch": 0.44889598666851827, + "grad_norm": 3.8562091816907516, + "learning_rate": 6.062901365089012e-06, + "loss": 0.3854, + "step": 6465 + }, + { + "epoch": 0.4489654214692404, + "grad_norm": 3.696616018881992, + "learning_rate": 6.061802552630242e-06, + "loss": 0.3737, + "step": 6466 + }, + { + "epoch": 0.4490348562699625, + "grad_norm": 3.7386340102730693, + "learning_rate": 6.0607036864667e-06, + "loss": 0.4022, + "step": 6467 + }, + { + "epoch": 0.4491042910706846, + "grad_norm": 3.4263693065715293, + "learning_rate": 6.059604766653966e-06, + "loss": 0.3583, + "step": 6468 + }, + { + "epoch": 0.44917372587140675, + "grad_norm": 3.3489562660500956, + "learning_rate": 6.05850579324762e-06, + "loss": 0.5038, + "step": 6469 + }, + { + "epoch": 0.4492431606721289, + "grad_norm": 3.5459014461282408, + "learning_rate": 6.057406766303251e-06, + "loss": 0.5291, + "step": 6470 + }, + { + "epoch": 0.449312595472851, + "grad_norm": 2.410279609674972, + "learning_rate": 6.056307685876443e-06, + "loss": 0.2024, + "step": 6471 + }, + { + "epoch": 0.4493820302735731, + "grad_norm": 4.366032966181556, + "learning_rate": 6.0552085520227875e-06, + "loss": 0.5726, + "step": 6472 + }, + { + "epoch": 0.44945146507429523, + "grad_norm": 4.947445276836761, + "learning_rate": 6.054109364797879e-06, + "loss": 0.7236, + "step": 6473 + }, + { + "epoch": 0.44952089987501737, + "grad_norm": 3.4253802678475003, + "learning_rate": 6.05301012425731e-06, + "loss": 0.4136, + "step": 6474 + }, + { + "epoch": 0.4495903346757395, + "grad_norm": 4.091537607190693, + "learning_rate": 6.051910830456682e-06, + "loss": 0.539, + "step": 6475 + }, + { + "epoch": 0.4496597694764616, + "grad_norm": 5.540231509868604, + "learning_rate": 6.050811483451593e-06, + "loss": 0.7037, + "step": 6476 + }, + { + "epoch": 0.4497292042771837, + "grad_norm": 3.4410372298804117, + "learning_rate": 6.04971208329765e-06, + "loss": 0.449, + "step": 6477 + }, + { + "epoch": 0.44979863907790585, + "grad_norm": 3.296300143342138, + "learning_rate": 6.0486126300504555e-06, + "loss": 0.512, + "step": 6478 + }, + { + "epoch": 0.449868073878628, + "grad_norm": 4.098748643240273, + "learning_rate": 6.047513123765622e-06, + "loss": 0.6582, + "step": 6479 + }, + { + "epoch": 0.4499375086793501, + "grad_norm": 5.096787399807512, + "learning_rate": 6.04641356449876e-06, + "loss": 0.4871, + "step": 6480 + }, + { + "epoch": 0.4500069434800722, + "grad_norm": 4.330007712410489, + "learning_rate": 6.045313952305483e-06, + "loss": 0.3909, + "step": 6481 + }, + { + "epoch": 0.45007637828079433, + "grad_norm": 2.9042193196347115, + "learning_rate": 6.04421428724141e-06, + "loss": 0.4092, + "step": 6482 + }, + { + "epoch": 0.45014581308151647, + "grad_norm": 4.132846516220287, + "learning_rate": 6.04311456936216e-06, + "loss": 0.5942, + "step": 6483 + }, + { + "epoch": 0.4502152478822386, + "grad_norm": 4.462373672900015, + "learning_rate": 6.0420147987233556e-06, + "loss": 0.5726, + "step": 6484 + }, + { + "epoch": 0.4502846826829607, + "grad_norm": 2.8726902520630895, + "learning_rate": 6.040914975380621e-06, + "loss": 0.3279, + "step": 6485 + }, + { + "epoch": 0.4503541174836828, + "grad_norm": 4.56609613262533, + "learning_rate": 6.039815099389587e-06, + "loss": 0.5645, + "step": 6486 + }, + { + "epoch": 0.45042355228440495, + "grad_norm": 3.518723474580144, + "learning_rate": 6.03871517080588e-06, + "loss": 0.3518, + "step": 6487 + }, + { + "epoch": 0.4504929870851271, + "grad_norm": 3.9695216053766282, + "learning_rate": 6.037615189685134e-06, + "loss": 0.3725, + "step": 6488 + }, + { + "epoch": 0.45056242188584916, + "grad_norm": 3.317152102645948, + "learning_rate": 6.036515156082987e-06, + "loss": 0.3224, + "step": 6489 + }, + { + "epoch": 0.4506318566865713, + "grad_norm": 3.4119277128209355, + "learning_rate": 6.035415070055077e-06, + "loss": 0.3289, + "step": 6490 + }, + { + "epoch": 0.45070129148729343, + "grad_norm": 4.382366282335321, + "learning_rate": 6.0343149316570435e-06, + "loss": 0.5411, + "step": 6491 + }, + { + "epoch": 0.45077072628801557, + "grad_norm": 3.438983864807977, + "learning_rate": 6.033214740944532e-06, + "loss": 0.4609, + "step": 6492 + }, + { + "epoch": 0.4508401610887377, + "grad_norm": 3.921548772247009, + "learning_rate": 6.032114497973188e-06, + "loss": 0.5714, + "step": 6493 + }, + { + "epoch": 0.4509095958894598, + "grad_norm": 3.799233028095503, + "learning_rate": 6.03101420279866e-06, + "loss": 0.6858, + "step": 6494 + }, + { + "epoch": 0.4509790306901819, + "grad_norm": 4.723661022129299, + "learning_rate": 6.0299138554766005e-06, + "loss": 0.7396, + "step": 6495 + }, + { + "epoch": 0.45104846549090405, + "grad_norm": 3.546524755539137, + "learning_rate": 6.028813456062665e-06, + "loss": 0.5713, + "step": 6496 + }, + { + "epoch": 0.4511179002916262, + "grad_norm": 3.943418532059535, + "learning_rate": 6.027713004612506e-06, + "loss": 0.5071, + "step": 6497 + }, + { + "epoch": 0.45118733509234826, + "grad_norm": 4.2650291057796945, + "learning_rate": 6.026612501181788e-06, + "loss": 0.6585, + "step": 6498 + }, + { + "epoch": 0.4512567698930704, + "grad_norm": 2.7630657741099425, + "learning_rate": 6.025511945826171e-06, + "loss": 0.2768, + "step": 6499 + }, + { + "epoch": 0.45132620469379253, + "grad_norm": 2.6287336200063374, + "learning_rate": 6.024411338601321e-06, + "loss": 0.2516, + "step": 6500 + }, + { + "epoch": 0.45139563949451467, + "grad_norm": 3.449307383703831, + "learning_rate": 6.023310679562904e-06, + "loss": 0.3791, + "step": 6501 + }, + { + "epoch": 0.45146507429523675, + "grad_norm": 3.781952534402416, + "learning_rate": 6.022209968766591e-06, + "loss": 0.3955, + "step": 6502 + }, + { + "epoch": 0.4515345090959589, + "grad_norm": 3.4150039885564665, + "learning_rate": 6.021109206268054e-06, + "loss": 0.5224, + "step": 6503 + }, + { + "epoch": 0.451603943896681, + "grad_norm": 2.5463190703288245, + "learning_rate": 6.020008392122969e-06, + "loss": 0.1787, + "step": 6504 + }, + { + "epoch": 0.45167337869740315, + "grad_norm": 3.821476738345288, + "learning_rate": 6.018907526387014e-06, + "loss": 0.3652, + "step": 6505 + }, + { + "epoch": 0.4517428134981253, + "grad_norm": 3.4833280093390493, + "learning_rate": 6.017806609115869e-06, + "loss": 0.3346, + "step": 6506 + }, + { + "epoch": 0.45181224829884736, + "grad_norm": 3.1699036982286204, + "learning_rate": 6.016705640365219e-06, + "loss": 0.3922, + "step": 6507 + }, + { + "epoch": 0.4518816830995695, + "grad_norm": 4.1190980363933605, + "learning_rate": 6.015604620190746e-06, + "loss": 0.6074, + "step": 6508 + }, + { + "epoch": 0.45195111790029163, + "grad_norm": 3.169103594163511, + "learning_rate": 6.0145035486481415e-06, + "loss": 0.3396, + "step": 6509 + }, + { + "epoch": 0.45202055270101377, + "grad_norm": 4.036900529603892, + "learning_rate": 6.013402425793095e-06, + "loss": 0.4181, + "step": 6510 + }, + { + "epoch": 0.45208998750173585, + "grad_norm": 3.68484214207789, + "learning_rate": 6.012301251681302e-06, + "loss": 0.5354, + "step": 6511 + }, + { + "epoch": 0.452159422302458, + "grad_norm": 3.511488133133981, + "learning_rate": 6.011200026368457e-06, + "loss": 0.3993, + "step": 6512 + }, + { + "epoch": 0.4522288571031801, + "grad_norm": 4.155115814296944, + "learning_rate": 6.010098749910257e-06, + "loss": 0.5291, + "step": 6513 + }, + { + "epoch": 0.45229829190390225, + "grad_norm": 3.2489209591183132, + "learning_rate": 6.008997422362406e-06, + "loss": 0.4399, + "step": 6514 + }, + { + "epoch": 0.4523677267046244, + "grad_norm": 3.408908623445261, + "learning_rate": 6.007896043780608e-06, + "loss": 0.3544, + "step": 6515 + }, + { + "epoch": 0.45243716150534646, + "grad_norm": 3.2671560823917365, + "learning_rate": 6.006794614220568e-06, + "loss": 0.3208, + "step": 6516 + }, + { + "epoch": 0.4525065963060686, + "grad_norm": 4.477703750694288, + "learning_rate": 6.005693133737996e-06, + "loss": 0.3418, + "step": 6517 + }, + { + "epoch": 0.45257603110679073, + "grad_norm": 2.9776327947096437, + "learning_rate": 6.004591602388602e-06, + "loss": 0.3322, + "step": 6518 + }, + { + "epoch": 0.45264546590751287, + "grad_norm": 3.085286799322782, + "learning_rate": 6.0034900202281034e-06, + "loss": 0.3832, + "step": 6519 + }, + { + "epoch": 0.45271490070823495, + "grad_norm": 3.5592489441454225, + "learning_rate": 6.0023883873122145e-06, + "loss": 0.2628, + "step": 6520 + }, + { + "epoch": 0.4527843355089571, + "grad_norm": 4.593385658713493, + "learning_rate": 6.001286703696655e-06, + "loss": 0.6076, + "step": 6521 + }, + { + "epoch": 0.4528537703096792, + "grad_norm": 3.956838336465241, + "learning_rate": 6.0001849694371475e-06, + "loss": 0.5129, + "step": 6522 + }, + { + "epoch": 0.45292320511040135, + "grad_norm": 4.738356196009445, + "learning_rate": 5.9990831845894145e-06, + "loss": 0.4617, + "step": 6523 + }, + { + "epoch": 0.45299263991112343, + "grad_norm": 3.34740383763524, + "learning_rate": 5.997981349209186e-06, + "loss": 0.3613, + "step": 6524 + }, + { + "epoch": 0.45306207471184556, + "grad_norm": 3.0664907865648865, + "learning_rate": 5.996879463352191e-06, + "loss": 0.2668, + "step": 6525 + }, + { + "epoch": 0.4531315095125677, + "grad_norm": 4.0369270552444405, + "learning_rate": 5.995777527074158e-06, + "loss": 0.5807, + "step": 6526 + }, + { + "epoch": 0.45320094431328983, + "grad_norm": 4.5037001228108045, + "learning_rate": 5.9946755404308265e-06, + "loss": 0.6769, + "step": 6527 + }, + { + "epoch": 0.45327037911401197, + "grad_norm": 4.175441549008766, + "learning_rate": 5.993573503477932e-06, + "loss": 0.496, + "step": 6528 + }, + { + "epoch": 0.45333981391473405, + "grad_norm": 3.7016753651379415, + "learning_rate": 5.992471416271213e-06, + "loss": 0.6437, + "step": 6529 + }, + { + "epoch": 0.4534092487154562, + "grad_norm": 4.732485345547643, + "learning_rate": 5.9913692788664136e-06, + "loss": 0.668, + "step": 6530 + }, + { + "epoch": 0.4534786835161783, + "grad_norm": 4.19583377204447, + "learning_rate": 5.9902670913192774e-06, + "loss": 0.4971, + "step": 6531 + }, + { + "epoch": 0.45354811831690045, + "grad_norm": 4.805159208359288, + "learning_rate": 5.989164853685552e-06, + "loss": 0.7558, + "step": 6532 + }, + { + "epoch": 0.45361755311762253, + "grad_norm": 3.985902015990341, + "learning_rate": 5.988062566020987e-06, + "loss": 0.7363, + "step": 6533 + }, + { + "epoch": 0.45368698791834466, + "grad_norm": 3.809640709870053, + "learning_rate": 5.9869602283813365e-06, + "loss": 0.4559, + "step": 6534 + }, + { + "epoch": 0.4537564227190668, + "grad_norm": 4.527125327110629, + "learning_rate": 5.985857840822355e-06, + "loss": 0.6012, + "step": 6535 + }, + { + "epoch": 0.45382585751978893, + "grad_norm": 3.9570781769808736, + "learning_rate": 5.984755403399799e-06, + "loss": 0.5308, + "step": 6536 + }, + { + "epoch": 0.45389529232051107, + "grad_norm": 4.306501037389282, + "learning_rate": 5.983652916169429e-06, + "loss": 0.4533, + "step": 6537 + }, + { + "epoch": 0.45396472712123315, + "grad_norm": 3.83909339676864, + "learning_rate": 5.982550379187008e-06, + "loss": 0.5921, + "step": 6538 + }, + { + "epoch": 0.4540341619219553, + "grad_norm": 4.157353401461698, + "learning_rate": 5.9814477925083e-06, + "loss": 0.3536, + "step": 6539 + }, + { + "epoch": 0.4541035967226774, + "grad_norm": 4.4099439795831685, + "learning_rate": 5.980345156189072e-06, + "loss": 0.4417, + "step": 6540 + }, + { + "epoch": 0.45417303152339955, + "grad_norm": 3.247225112953667, + "learning_rate": 5.979242470285098e-06, + "loss": 0.3139, + "step": 6541 + }, + { + "epoch": 0.45424246632412163, + "grad_norm": 3.7456758799817043, + "learning_rate": 5.978139734852146e-06, + "loss": 0.5087, + "step": 6542 + }, + { + "epoch": 0.45431190112484376, + "grad_norm": 3.3363107932578955, + "learning_rate": 5.977036949945993e-06, + "loss": 0.3107, + "step": 6543 + }, + { + "epoch": 0.4543813359255659, + "grad_norm": 4.03767880613357, + "learning_rate": 5.9759341156224185e-06, + "loss": 0.2792, + "step": 6544 + }, + { + "epoch": 0.45445077072628803, + "grad_norm": 4.033706516517885, + "learning_rate": 5.974831231937199e-06, + "loss": 0.5703, + "step": 6545 + }, + { + "epoch": 0.4545202055270101, + "grad_norm": 4.333455404575776, + "learning_rate": 5.97372829894612e-06, + "loss": 0.5762, + "step": 6546 + }, + { + "epoch": 0.45458964032773225, + "grad_norm": 4.30149083955346, + "learning_rate": 5.972625316704965e-06, + "loss": 0.5668, + "step": 6547 + }, + { + "epoch": 0.4546590751284544, + "grad_norm": 2.446215639209645, + "learning_rate": 5.971522285269522e-06, + "loss": 0.2339, + "step": 6548 + }, + { + "epoch": 0.4547285099291765, + "grad_norm": 3.013153979220055, + "learning_rate": 5.970419204695581e-06, + "loss": 0.2908, + "step": 6549 + }, + { + "epoch": 0.45479794472989865, + "grad_norm": 3.7870980705655897, + "learning_rate": 5.9693160750389344e-06, + "loss": 0.632, + "step": 6550 + }, + { + "epoch": 0.45486737953062073, + "grad_norm": 2.848945364913972, + "learning_rate": 5.968212896355379e-06, + "loss": 0.3146, + "step": 6551 + }, + { + "epoch": 0.45493681433134286, + "grad_norm": 6.099280860262838, + "learning_rate": 5.9671096687007105e-06, + "loss": 0.5378, + "step": 6552 + }, + { + "epoch": 0.455006249132065, + "grad_norm": 4.45201272408518, + "learning_rate": 5.966006392130729e-06, + "loss": 0.376, + "step": 6553 + }, + { + "epoch": 0.45507568393278713, + "grad_norm": 4.14188576035099, + "learning_rate": 5.9649030667012376e-06, + "loss": 0.5746, + "step": 6554 + }, + { + "epoch": 0.4551451187335092, + "grad_norm": 3.9794506830324523, + "learning_rate": 5.96379969246804e-06, + "loss": 0.3662, + "step": 6555 + }, + { + "epoch": 0.45521455353423135, + "grad_norm": 4.409586180191458, + "learning_rate": 5.962696269486944e-06, + "loss": 0.5659, + "step": 6556 + }, + { + "epoch": 0.4552839883349535, + "grad_norm": 3.8602612967368306, + "learning_rate": 5.961592797813762e-06, + "loss": 0.4451, + "step": 6557 + }, + { + "epoch": 0.4553534231356756, + "grad_norm": 3.1593327881630273, + "learning_rate": 5.960489277504303e-06, + "loss": 0.3196, + "step": 6558 + }, + { + "epoch": 0.4554228579363977, + "grad_norm": 4.346761784878238, + "learning_rate": 5.959385708614384e-06, + "loss": 0.5645, + "step": 6559 + }, + { + "epoch": 0.45549229273711983, + "grad_norm": 3.727137767881669, + "learning_rate": 5.9582820911998216e-06, + "loss": 0.4348, + "step": 6560 + }, + { + "epoch": 0.45556172753784197, + "grad_norm": 3.489938655162405, + "learning_rate": 5.957178425316434e-06, + "loss": 0.3333, + "step": 6561 + }, + { + "epoch": 0.4556311623385641, + "grad_norm": 3.3614363516798464, + "learning_rate": 5.956074711020047e-06, + "loss": 0.3874, + "step": 6562 + }, + { + "epoch": 0.45570059713928623, + "grad_norm": 3.5719187450217653, + "learning_rate": 5.9549709483664805e-06, + "loss": 0.3903, + "step": 6563 + }, + { + "epoch": 0.4557700319400083, + "grad_norm": 4.8100724622581525, + "learning_rate": 5.953867137411566e-06, + "loss": 0.4796, + "step": 6564 + }, + { + "epoch": 0.45583946674073045, + "grad_norm": 3.91707932192862, + "learning_rate": 5.95276327821113e-06, + "loss": 0.3591, + "step": 6565 + }, + { + "epoch": 0.4559089015414526, + "grad_norm": 4.609547147627957, + "learning_rate": 5.951659370821007e-06, + "loss": 0.7442, + "step": 6566 + }, + { + "epoch": 0.4559783363421747, + "grad_norm": 4.727461112063819, + "learning_rate": 5.950555415297028e-06, + "loss": 0.6209, + "step": 6567 + }, + { + "epoch": 0.4560477711428968, + "grad_norm": 3.5596629380724654, + "learning_rate": 5.949451411695033e-06, + "loss": 0.3818, + "step": 6568 + }, + { + "epoch": 0.45611720594361893, + "grad_norm": 3.7511576048959334, + "learning_rate": 5.9483473600708595e-06, + "loss": 0.4017, + "step": 6569 + }, + { + "epoch": 0.45618664074434107, + "grad_norm": 3.555544630357605, + "learning_rate": 5.94724326048035e-06, + "loss": 0.3939, + "step": 6570 + }, + { + "epoch": 0.4562560755450632, + "grad_norm": 4.569068145967377, + "learning_rate": 5.946139112979348e-06, + "loss": 0.6096, + "step": 6571 + }, + { + "epoch": 0.45632551034578533, + "grad_norm": 4.6059989735404665, + "learning_rate": 5.9450349176237e-06, + "loss": 0.5843, + "step": 6572 + }, + { + "epoch": 0.4563949451465074, + "grad_norm": 4.383031629464643, + "learning_rate": 5.943930674469255e-06, + "loss": 0.7014, + "step": 6573 + }, + { + "epoch": 0.45646437994722955, + "grad_norm": 3.8334233196955627, + "learning_rate": 5.942826383571865e-06, + "loss": 0.359, + "step": 6574 + }, + { + "epoch": 0.4565338147479517, + "grad_norm": 4.466379465098667, + "learning_rate": 5.941722044987384e-06, + "loss": 0.6505, + "step": 6575 + }, + { + "epoch": 0.4566032495486738, + "grad_norm": 3.7776982502947374, + "learning_rate": 5.940617658771667e-06, + "loss": 0.5938, + "step": 6576 + }, + { + "epoch": 0.4566726843493959, + "grad_norm": 3.1530693263154332, + "learning_rate": 5.939513224980573e-06, + "loss": 0.2954, + "step": 6577 + }, + { + "epoch": 0.45674211915011803, + "grad_norm": 3.922452185488341, + "learning_rate": 5.938408743669965e-06, + "loss": 0.6157, + "step": 6578 + }, + { + "epoch": 0.45681155395084017, + "grad_norm": 3.365699573063296, + "learning_rate": 5.937304214895702e-06, + "loss": 0.492, + "step": 6579 + }, + { + "epoch": 0.4568809887515623, + "grad_norm": 4.9646814560035954, + "learning_rate": 5.936199638713655e-06, + "loss": 0.5575, + "step": 6580 + }, + { + "epoch": 0.4569504235522844, + "grad_norm": 3.7922676630089254, + "learning_rate": 5.935095015179686e-06, + "loss": 0.5089, + "step": 6581 + }, + { + "epoch": 0.4570198583530065, + "grad_norm": 3.540303227263665, + "learning_rate": 5.933990344349671e-06, + "loss": 0.6771, + "step": 6582 + }, + { + "epoch": 0.45708929315372865, + "grad_norm": 3.8816923951348197, + "learning_rate": 5.932885626279483e-06, + "loss": 0.4535, + "step": 6583 + }, + { + "epoch": 0.4571587279544508, + "grad_norm": 2.663319807428733, + "learning_rate": 5.931780861024995e-06, + "loss": 0.3249, + "step": 6584 + }, + { + "epoch": 0.4572281627551729, + "grad_norm": 4.029833467360081, + "learning_rate": 5.930676048642083e-06, + "loss": 0.5332, + "step": 6585 + }, + { + "epoch": 0.457297597555895, + "grad_norm": 3.217052442293713, + "learning_rate": 5.929571189186634e-06, + "loss": 0.5392, + "step": 6586 + }, + { + "epoch": 0.45736703235661713, + "grad_norm": 3.542865908716853, + "learning_rate": 5.928466282714523e-06, + "loss": 0.3384, + "step": 6587 + }, + { + "epoch": 0.45743646715733927, + "grad_norm": 4.324082415072232, + "learning_rate": 5.927361329281638e-06, + "loss": 0.4848, + "step": 6588 + }, + { + "epoch": 0.4575059019580614, + "grad_norm": 3.989650807065453, + "learning_rate": 5.926256328943867e-06, + "loss": 0.3371, + "step": 6589 + }, + { + "epoch": 0.4575753367587835, + "grad_norm": 4.686273767322977, + "learning_rate": 5.925151281757099e-06, + "loss": 0.6548, + "step": 6590 + }, + { + "epoch": 0.4576447715595056, + "grad_norm": 7.052148073713641, + "learning_rate": 5.924046187777226e-06, + "loss": 0.4253, + "step": 6591 + }, + { + "epoch": 0.45771420636022775, + "grad_norm": 4.923526941618985, + "learning_rate": 5.922941047060142e-06, + "loss": 0.6459, + "step": 6592 + }, + { + "epoch": 0.4577836411609499, + "grad_norm": 4.407521206810167, + "learning_rate": 5.921835859661746e-06, + "loss": 0.6013, + "step": 6593 + }, + { + "epoch": 0.45785307596167196, + "grad_norm": 4.113004088596926, + "learning_rate": 5.920730625637934e-06, + "loss": 0.5125, + "step": 6594 + }, + { + "epoch": 0.4579225107623941, + "grad_norm": 2.7146217597963176, + "learning_rate": 5.919625345044607e-06, + "loss": 0.2922, + "step": 6595 + }, + { + "epoch": 0.45799194556311623, + "grad_norm": 3.1315540708269416, + "learning_rate": 5.918520017937674e-06, + "loss": 0.2947, + "step": 6596 + }, + { + "epoch": 0.45806138036383837, + "grad_norm": 4.573486605305715, + "learning_rate": 5.9174146443730355e-06, + "loss": 0.5861, + "step": 6597 + }, + { + "epoch": 0.4581308151645605, + "grad_norm": 3.1044043197639914, + "learning_rate": 5.916309224406604e-06, + "loss": 0.4049, + "step": 6598 + }, + { + "epoch": 0.4582002499652826, + "grad_norm": 3.0094861406810622, + "learning_rate": 5.9152037580942875e-06, + "loss": 0.2895, + "step": 6599 + }, + { + "epoch": 0.4582696847660047, + "grad_norm": 4.028224506489351, + "learning_rate": 5.914098245492002e-06, + "loss": 0.4288, + "step": 6600 + }, + { + "epoch": 0.45833911956672685, + "grad_norm": 5.119873227376004, + "learning_rate": 5.912992686655659e-06, + "loss": 0.7436, + "step": 6601 + }, + { + "epoch": 0.458408554367449, + "grad_norm": 4.066444758992238, + "learning_rate": 5.911887081641182e-06, + "loss": 0.5358, + "step": 6602 + }, + { + "epoch": 0.45847798916817106, + "grad_norm": 3.208447167405556, + "learning_rate": 5.910781430504488e-06, + "loss": 0.2359, + "step": 6603 + }, + { + "epoch": 0.4585474239688932, + "grad_norm": 4.28542725376236, + "learning_rate": 5.9096757333014975e-06, + "loss": 0.4515, + "step": 6604 + }, + { + "epoch": 0.45861685876961533, + "grad_norm": 4.655296430066386, + "learning_rate": 5.90856999008814e-06, + "loss": 0.578, + "step": 6605 + }, + { + "epoch": 0.45868629357033747, + "grad_norm": 4.127636141596106, + "learning_rate": 5.90746420092034e-06, + "loss": 0.4842, + "step": 6606 + }, + { + "epoch": 0.4587557283710596, + "grad_norm": 2.7212380661110913, + "learning_rate": 5.906358365854027e-06, + "loss": 0.2188, + "step": 6607 + }, + { + "epoch": 0.4588251631717817, + "grad_norm": 4.225683300705746, + "learning_rate": 5.905252484945133e-06, + "loss": 0.4258, + "step": 6608 + }, + { + "epoch": 0.4588945979725038, + "grad_norm": 4.179077956454522, + "learning_rate": 5.904146558249595e-06, + "loss": 0.4166, + "step": 6609 + }, + { + "epoch": 0.45896403277322595, + "grad_norm": 4.205205141165408, + "learning_rate": 5.903040585823344e-06, + "loss": 0.3628, + "step": 6610 + }, + { + "epoch": 0.4590334675739481, + "grad_norm": 3.8088258038483924, + "learning_rate": 5.901934567722324e-06, + "loss": 0.5675, + "step": 6611 + }, + { + "epoch": 0.45910290237467016, + "grad_norm": 3.2565375886806764, + "learning_rate": 5.9008285040024746e-06, + "loss": 0.3089, + "step": 6612 + }, + { + "epoch": 0.4591723371753923, + "grad_norm": 3.995322723745124, + "learning_rate": 5.899722394719738e-06, + "loss": 0.4555, + "step": 6613 + }, + { + "epoch": 0.45924177197611443, + "grad_norm": 2.794780689661263, + "learning_rate": 5.898616239930061e-06, + "loss": 0.2991, + "step": 6614 + }, + { + "epoch": 0.45931120677683657, + "grad_norm": 3.475949973943329, + "learning_rate": 5.897510039689391e-06, + "loss": 0.5611, + "step": 6615 + }, + { + "epoch": 0.45938064157755865, + "grad_norm": 5.972298618868445, + "learning_rate": 5.896403794053679e-06, + "loss": 0.8622, + "step": 6616 + }, + { + "epoch": 0.4594500763782808, + "grad_norm": 3.0693407187702637, + "learning_rate": 5.8952975030788775e-06, + "loss": 0.4123, + "step": 6617 + }, + { + "epoch": 0.4595195111790029, + "grad_norm": 4.31122841974351, + "learning_rate": 5.894191166820941e-06, + "loss": 0.5879, + "step": 6618 + }, + { + "epoch": 0.45958894597972505, + "grad_norm": 4.738422659036301, + "learning_rate": 5.893084785335829e-06, + "loss": 0.3926, + "step": 6619 + }, + { + "epoch": 0.4596583807804472, + "grad_norm": 4.1612585082901346, + "learning_rate": 5.891978358679497e-06, + "loss": 0.5911, + "step": 6620 + }, + { + "epoch": 0.45972781558116926, + "grad_norm": 3.7838889007136944, + "learning_rate": 5.89087188690791e-06, + "loss": 0.5357, + "step": 6621 + }, + { + "epoch": 0.4597972503818914, + "grad_norm": 3.5449771796564318, + "learning_rate": 5.889765370077031e-06, + "loss": 0.3802, + "step": 6622 + }, + { + "epoch": 0.45986668518261353, + "grad_norm": 4.432373441933868, + "learning_rate": 5.888658808242825e-06, + "loss": 0.6374, + "step": 6623 + }, + { + "epoch": 0.45993611998333567, + "grad_norm": 4.409254883126733, + "learning_rate": 5.887552201461263e-06, + "loss": 0.5281, + "step": 6624 + }, + { + "epoch": 0.46000555478405775, + "grad_norm": 2.826211397557647, + "learning_rate": 5.886445549788317e-06, + "loss": 0.3517, + "step": 6625 + }, + { + "epoch": 0.4600749895847799, + "grad_norm": 3.729620339797882, + "learning_rate": 5.885338853279955e-06, + "loss": 0.5066, + "step": 6626 + }, + { + "epoch": 0.460144424385502, + "grad_norm": 3.047776291328894, + "learning_rate": 5.884232111992156e-06, + "loss": 0.3146, + "step": 6627 + }, + { + "epoch": 0.46021385918622415, + "grad_norm": 5.75168144838073, + "learning_rate": 5.8831253259809005e-06, + "loss": 0.8869, + "step": 6628 + }, + { + "epoch": 0.4602832939869463, + "grad_norm": 3.561639654290226, + "learning_rate": 5.882018495302162e-06, + "loss": 0.5262, + "step": 6629 + }, + { + "epoch": 0.46035272878766836, + "grad_norm": 2.1712020449143203, + "learning_rate": 5.880911620011928e-06, + "loss": 0.1913, + "step": 6630 + }, + { + "epoch": 0.4604221635883905, + "grad_norm": 2.736258385069976, + "learning_rate": 5.879804700166181e-06, + "loss": 0.329, + "step": 6631 + }, + { + "epoch": 0.46049159838911263, + "grad_norm": 2.8498743726928732, + "learning_rate": 5.878697735820906e-06, + "loss": 0.245, + "step": 6632 + }, + { + "epoch": 0.46056103318983477, + "grad_norm": 3.4948686960130906, + "learning_rate": 5.877590727032094e-06, + "loss": 0.4492, + "step": 6633 + }, + { + "epoch": 0.46063046799055685, + "grad_norm": 4.090872153049156, + "learning_rate": 5.876483673855737e-06, + "loss": 0.6287, + "step": 6634 + }, + { + "epoch": 0.460699902791279, + "grad_norm": 3.4405019259144027, + "learning_rate": 5.875376576347828e-06, + "loss": 0.5276, + "step": 6635 + }, + { + "epoch": 0.4607693375920011, + "grad_norm": 3.5094811197045437, + "learning_rate": 5.8742694345643625e-06, + "loss": 0.631, + "step": 6636 + }, + { + "epoch": 0.46083877239272325, + "grad_norm": 3.766766950755339, + "learning_rate": 5.873162248561338e-06, + "loss": 0.4954, + "step": 6637 + }, + { + "epoch": 0.46090820719344533, + "grad_norm": 3.3403051802824923, + "learning_rate": 5.872055018394756e-06, + "loss": 0.4203, + "step": 6638 + }, + { + "epoch": 0.46097764199416746, + "grad_norm": 3.6911561933127945, + "learning_rate": 5.870947744120616e-06, + "loss": 0.3949, + "step": 6639 + }, + { + "epoch": 0.4610470767948896, + "grad_norm": 3.985577438347011, + "learning_rate": 5.869840425794925e-06, + "loss": 0.3917, + "step": 6640 + }, + { + "epoch": 0.46111651159561173, + "grad_norm": 3.860375539750852, + "learning_rate": 5.868733063473691e-06, + "loss": 0.4095, + "step": 6641 + }, + { + "epoch": 0.46118594639633387, + "grad_norm": 3.6974773248108974, + "learning_rate": 5.86762565721292e-06, + "loss": 0.4307, + "step": 6642 + }, + { + "epoch": 0.46125538119705595, + "grad_norm": 3.2431074952069388, + "learning_rate": 5.866518207068626e-06, + "loss": 0.3737, + "step": 6643 + }, + { + "epoch": 0.4613248159977781, + "grad_norm": 2.7567039492981036, + "learning_rate": 5.865410713096821e-06, + "loss": 0.2014, + "step": 6644 + }, + { + "epoch": 0.4613942507985002, + "grad_norm": 3.8317909858615806, + "learning_rate": 5.864303175353522e-06, + "loss": 0.5063, + "step": 6645 + }, + { + "epoch": 0.46146368559922235, + "grad_norm": 3.5735269688620876, + "learning_rate": 5.863195593894746e-06, + "loss": 0.4775, + "step": 6646 + }, + { + "epoch": 0.46153312039994443, + "grad_norm": 3.8841543513824406, + "learning_rate": 5.862087968776514e-06, + "loss": 0.3998, + "step": 6647 + }, + { + "epoch": 0.46160255520066656, + "grad_norm": 2.7758799757518022, + "learning_rate": 5.860980300054848e-06, + "loss": 0.2009, + "step": 6648 + }, + { + "epoch": 0.4616719900013887, + "grad_norm": 2.832289046012341, + "learning_rate": 5.859872587785772e-06, + "loss": 0.2976, + "step": 6649 + }, + { + "epoch": 0.46174142480211083, + "grad_norm": 3.1672537257080458, + "learning_rate": 5.8587648320253145e-06, + "loss": 0.4843, + "step": 6650 + }, + { + "epoch": 0.4618108596028329, + "grad_norm": 3.4557574481962745, + "learning_rate": 5.857657032829503e-06, + "loss": 0.3487, + "step": 6651 + }, + { + "epoch": 0.46188029440355505, + "grad_norm": 3.4546219135484737, + "learning_rate": 5.85654919025437e-06, + "loss": 0.3744, + "step": 6652 + }, + { + "epoch": 0.4619497292042772, + "grad_norm": 4.428602052283512, + "learning_rate": 5.855441304355947e-06, + "loss": 0.5319, + "step": 6653 + }, + { + "epoch": 0.4620191640049993, + "grad_norm": 4.015501160374193, + "learning_rate": 5.854333375190272e-06, + "loss": 0.5409, + "step": 6654 + }, + { + "epoch": 0.46208859880572145, + "grad_norm": 2.297262350223988, + "learning_rate": 5.853225402813381e-06, + "loss": 0.1324, + "step": 6655 + }, + { + "epoch": 0.46215803360644353, + "grad_norm": 4.041103395628692, + "learning_rate": 5.852117387281313e-06, + "loss": 0.4155, + "step": 6656 + }, + { + "epoch": 0.46222746840716566, + "grad_norm": 4.726834220060106, + "learning_rate": 5.851009328650114e-06, + "loss": 0.7386, + "step": 6657 + }, + { + "epoch": 0.4622969032078878, + "grad_norm": 3.6000841593577366, + "learning_rate": 5.849901226975826e-06, + "loss": 0.3482, + "step": 6658 + }, + { + "epoch": 0.46236633800860993, + "grad_norm": 4.207718458711362, + "learning_rate": 5.848793082314496e-06, + "loss": 0.4331, + "step": 6659 + }, + { + "epoch": 0.462435772809332, + "grad_norm": 4.143866872790647, + "learning_rate": 5.8476848947221705e-06, + "loss": 0.4656, + "step": 6660 + }, + { + "epoch": 0.46250520761005415, + "grad_norm": 3.7324131604448945, + "learning_rate": 5.846576664254904e-06, + "loss": 0.4365, + "step": 6661 + }, + { + "epoch": 0.4625746424107763, + "grad_norm": 3.9620947477267396, + "learning_rate": 5.8454683909687474e-06, + "loss": 0.4761, + "step": 6662 + }, + { + "epoch": 0.4626440772114984, + "grad_norm": 3.477942988100187, + "learning_rate": 5.844360074919756e-06, + "loss": 0.3271, + "step": 6663 + }, + { + "epoch": 0.46271351201222055, + "grad_norm": 3.9479927716406507, + "learning_rate": 5.8432517161639875e-06, + "loss": 0.4262, + "step": 6664 + }, + { + "epoch": 0.46278294681294263, + "grad_norm": 4.062232175232798, + "learning_rate": 5.842143314757501e-06, + "loss": 0.4303, + "step": 6665 + }, + { + "epoch": 0.46285238161366477, + "grad_norm": 3.4832001985872107, + "learning_rate": 5.841034870756359e-06, + "loss": 0.3871, + "step": 6666 + }, + { + "epoch": 0.4629218164143869, + "grad_norm": 3.3878096761522176, + "learning_rate": 5.839926384216625e-06, + "loss": 0.4959, + "step": 6667 + }, + { + "epoch": 0.46299125121510903, + "grad_norm": 3.492659507729984, + "learning_rate": 5.838817855194365e-06, + "loss": 0.4153, + "step": 6668 + }, + { + "epoch": 0.4630606860158311, + "grad_norm": 3.9940103368909803, + "learning_rate": 5.837709283745647e-06, + "loss": 0.6313, + "step": 6669 + }, + { + "epoch": 0.46313012081655325, + "grad_norm": 2.6808185278652563, + "learning_rate": 5.836600669926542e-06, + "loss": 0.2365, + "step": 6670 + }, + { + "epoch": 0.4631995556172754, + "grad_norm": 2.7508931180559597, + "learning_rate": 5.835492013793122e-06, + "loss": 0.2514, + "step": 6671 + }, + { + "epoch": 0.4632689904179975, + "grad_norm": 3.685683231221985, + "learning_rate": 5.834383315401458e-06, + "loss": 0.4754, + "step": 6672 + }, + { + "epoch": 0.4633384252187196, + "grad_norm": 3.417746281882585, + "learning_rate": 5.833274574807635e-06, + "loss": 0.4691, + "step": 6673 + }, + { + "epoch": 0.46340786001944173, + "grad_norm": 4.33086735426182, + "learning_rate": 5.832165792067723e-06, + "loss": 0.6191, + "step": 6674 + }, + { + "epoch": 0.46347729482016387, + "grad_norm": 3.2378244575517536, + "learning_rate": 5.831056967237808e-06, + "loss": 0.4363, + "step": 6675 + }, + { + "epoch": 0.463546729620886, + "grad_norm": 3.695386462880349, + "learning_rate": 5.829948100373973e-06, + "loss": 0.3119, + "step": 6676 + }, + { + "epoch": 0.46361616442160813, + "grad_norm": 3.4320593305452602, + "learning_rate": 5.828839191532301e-06, + "loss": 0.422, + "step": 6677 + }, + { + "epoch": 0.4636855992223302, + "grad_norm": 3.3667652941611483, + "learning_rate": 5.827730240768882e-06, + "loss": 0.4156, + "step": 6678 + }, + { + "epoch": 0.46375503402305235, + "grad_norm": 3.4692293085957506, + "learning_rate": 5.826621248139802e-06, + "loss": 0.321, + "step": 6679 + }, + { + "epoch": 0.4638244688237745, + "grad_norm": 3.3936836447616847, + "learning_rate": 5.8255122137011575e-06, + "loss": 0.352, + "step": 6680 + }, + { + "epoch": 0.4638939036244966, + "grad_norm": 4.925315055984609, + "learning_rate": 5.824403137509037e-06, + "loss": 0.5505, + "step": 6681 + }, + { + "epoch": 0.4639633384252187, + "grad_norm": 4.537521705974809, + "learning_rate": 5.82329401961954e-06, + "loss": 0.565, + "step": 6682 + }, + { + "epoch": 0.46403277322594083, + "grad_norm": 4.119392642260565, + "learning_rate": 5.822184860088762e-06, + "loss": 0.6114, + "step": 6683 + }, + { + "epoch": 0.46410220802666297, + "grad_norm": 3.9129910628138074, + "learning_rate": 5.821075658972806e-06, + "loss": 0.5601, + "step": 6684 + }, + { + "epoch": 0.4641716428273851, + "grad_norm": 4.607533160977391, + "learning_rate": 5.81996641632777e-06, + "loss": 0.612, + "step": 6685 + }, + { + "epoch": 0.46424107762810723, + "grad_norm": 3.749344479185912, + "learning_rate": 5.818857132209762e-06, + "loss": 0.3412, + "step": 6686 + }, + { + "epoch": 0.4643105124288293, + "grad_norm": 3.570205156664813, + "learning_rate": 5.817747806674886e-06, + "loss": 0.2557, + "step": 6687 + }, + { + "epoch": 0.46437994722955145, + "grad_norm": 4.72649116886234, + "learning_rate": 5.8166384397792505e-06, + "loss": 0.9041, + "step": 6688 + }, + { + "epoch": 0.4644493820302736, + "grad_norm": 4.679178087118482, + "learning_rate": 5.815529031578968e-06, + "loss": 0.5121, + "step": 6689 + }, + { + "epoch": 0.4645188168309957, + "grad_norm": 4.70697121397895, + "learning_rate": 5.814419582130151e-06, + "loss": 0.6076, + "step": 6690 + }, + { + "epoch": 0.4645882516317178, + "grad_norm": 3.6296495806519657, + "learning_rate": 5.813310091488912e-06, + "loss": 0.5333, + "step": 6691 + }, + { + "epoch": 0.46465768643243993, + "grad_norm": 4.4130051528915475, + "learning_rate": 5.812200559711369e-06, + "loss": 0.5203, + "step": 6692 + }, + { + "epoch": 0.46472712123316207, + "grad_norm": 3.7941963268806247, + "learning_rate": 5.81109098685364e-06, + "loss": 0.4889, + "step": 6693 + }, + { + "epoch": 0.4647965560338842, + "grad_norm": 3.6223899791016367, + "learning_rate": 5.8099813729718475e-06, + "loss": 0.5496, + "step": 6694 + }, + { + "epoch": 0.4648659908346063, + "grad_norm": 3.1419047673968565, + "learning_rate": 5.808871718122113e-06, + "loss": 0.3057, + "step": 6695 + }, + { + "epoch": 0.4649354256353284, + "grad_norm": 3.8295425330406547, + "learning_rate": 5.807762022360564e-06, + "loss": 0.3902, + "step": 6696 + }, + { + "epoch": 0.46500486043605055, + "grad_norm": 4.182559159749013, + "learning_rate": 5.806652285743324e-06, + "loss": 0.4894, + "step": 6697 + }, + { + "epoch": 0.4650742952367727, + "grad_norm": 4.076081553611543, + "learning_rate": 5.805542508326525e-06, + "loss": 0.6503, + "step": 6698 + }, + { + "epoch": 0.4651437300374948, + "grad_norm": 2.9983328607869764, + "learning_rate": 5.804432690166298e-06, + "loss": 0.3046, + "step": 6699 + }, + { + "epoch": 0.4652131648382169, + "grad_norm": 5.488522566022272, + "learning_rate": 5.803322831318775e-06, + "loss": 0.7442, + "step": 6700 + }, + { + "epoch": 0.46528259963893903, + "grad_norm": 5.179497085190627, + "learning_rate": 5.802212931840092e-06, + "loss": 0.7672, + "step": 6701 + }, + { + "epoch": 0.46535203443966117, + "grad_norm": 2.7521223726909962, + "learning_rate": 5.8011029917863885e-06, + "loss": 0.2702, + "step": 6702 + }, + { + "epoch": 0.4654214692403833, + "grad_norm": 4.165229888218057, + "learning_rate": 5.7999930112138e-06, + "loss": 0.692, + "step": 6703 + }, + { + "epoch": 0.4654909040411054, + "grad_norm": 3.8098471065517643, + "learning_rate": 5.798882990178471e-06, + "loss": 0.3947, + "step": 6704 + }, + { + "epoch": 0.4655603388418275, + "grad_norm": 3.64607339157889, + "learning_rate": 5.797772928736543e-06, + "loss": 0.4162, + "step": 6705 + }, + { + "epoch": 0.46562977364254965, + "grad_norm": 4.178100418160067, + "learning_rate": 5.796662826944166e-06, + "loss": 0.7043, + "step": 6706 + }, + { + "epoch": 0.4656992084432718, + "grad_norm": 3.6683335120860754, + "learning_rate": 5.795552684857483e-06, + "loss": 0.4398, + "step": 6707 + }, + { + "epoch": 0.46576864324399386, + "grad_norm": 3.78707414379052, + "learning_rate": 5.794442502532646e-06, + "loss": 0.3695, + "step": 6708 + }, + { + "epoch": 0.465838078044716, + "grad_norm": 2.788736482323854, + "learning_rate": 5.793332280025805e-06, + "loss": 0.3224, + "step": 6709 + }, + { + "epoch": 0.46590751284543813, + "grad_norm": 3.8239043489708493, + "learning_rate": 5.792222017393116e-06, + "loss": 0.5014, + "step": 6710 + }, + { + "epoch": 0.46597694764616027, + "grad_norm": 3.811727094077433, + "learning_rate": 5.791111714690733e-06, + "loss": 0.5067, + "step": 6711 + }, + { + "epoch": 0.4660463824468824, + "grad_norm": 3.69668910315829, + "learning_rate": 5.790001371974816e-06, + "loss": 0.5259, + "step": 6712 + }, + { + "epoch": 0.4661158172476045, + "grad_norm": 3.069166811558484, + "learning_rate": 5.7888909893015225e-06, + "loss": 0.3392, + "step": 6713 + }, + { + "epoch": 0.4661852520483266, + "grad_norm": 2.5221099442599595, + "learning_rate": 5.787780566727016e-06, + "loss": 0.208, + "step": 6714 + }, + { + "epoch": 0.46625468684904875, + "grad_norm": 3.4365418730793795, + "learning_rate": 5.786670104307459e-06, + "loss": 0.2855, + "step": 6715 + }, + { + "epoch": 0.4663241216497709, + "grad_norm": 3.3902813035115065, + "learning_rate": 5.785559602099019e-06, + "loss": 0.3721, + "step": 6716 + }, + { + "epoch": 0.46639355645049296, + "grad_norm": 3.0301535449471046, + "learning_rate": 5.784449060157863e-06, + "loss": 0.2714, + "step": 6717 + }, + { + "epoch": 0.4664629912512151, + "grad_norm": 4.9984355552320485, + "learning_rate": 5.7833384785401624e-06, + "loss": 0.5896, + "step": 6718 + }, + { + "epoch": 0.46653242605193723, + "grad_norm": 4.142349935239867, + "learning_rate": 5.782227857302086e-06, + "loss": 0.4898, + "step": 6719 + }, + { + "epoch": 0.46660186085265937, + "grad_norm": 4.047557623426405, + "learning_rate": 5.78111719649981e-06, + "loss": 0.5157, + "step": 6720 + }, + { + "epoch": 0.4666712956533815, + "grad_norm": 3.616305102156895, + "learning_rate": 5.7800064961895105e-06, + "loss": 0.5131, + "step": 6721 + }, + { + "epoch": 0.4667407304541036, + "grad_norm": 3.752536341097849, + "learning_rate": 5.778895756427366e-06, + "loss": 0.4905, + "step": 6722 + }, + { + "epoch": 0.4668101652548257, + "grad_norm": 3.8501381635620637, + "learning_rate": 5.777784977269555e-06, + "loss": 0.517, + "step": 6723 + }, + { + "epoch": 0.46687960005554785, + "grad_norm": 4.882629815374773, + "learning_rate": 5.776674158772258e-06, + "loss": 0.5395, + "step": 6724 + }, + { + "epoch": 0.46694903485627, + "grad_norm": 3.4776902371729954, + "learning_rate": 5.775563300991664e-06, + "loss": 0.3463, + "step": 6725 + }, + { + "epoch": 0.46701846965699206, + "grad_norm": 3.202491584594151, + "learning_rate": 5.774452403983955e-06, + "loss": 0.4431, + "step": 6726 + }, + { + "epoch": 0.4670879044577142, + "grad_norm": 4.622260807498098, + "learning_rate": 5.773341467805319e-06, + "loss": 0.7356, + "step": 6727 + }, + { + "epoch": 0.46715733925843633, + "grad_norm": 3.9173617656489603, + "learning_rate": 5.772230492511948e-06, + "loss": 0.6255, + "step": 6728 + }, + { + "epoch": 0.46722677405915847, + "grad_norm": 4.476440996838808, + "learning_rate": 5.771119478160031e-06, + "loss": 0.6822, + "step": 6729 + }, + { + "epoch": 0.46729620885988055, + "grad_norm": 3.7597108114218156, + "learning_rate": 5.7700084248057645e-06, + "loss": 0.4639, + "step": 6730 + }, + { + "epoch": 0.4673656436606027, + "grad_norm": 4.106728518521276, + "learning_rate": 5.768897332505342e-06, + "loss": 0.4522, + "step": 6731 + }, + { + "epoch": 0.4674350784613248, + "grad_norm": 4.394082044207434, + "learning_rate": 5.767786201314964e-06, + "loss": 0.4963, + "step": 6732 + }, + { + "epoch": 0.46750451326204695, + "grad_norm": 3.4248351268552844, + "learning_rate": 5.766675031290826e-06, + "loss": 0.3746, + "step": 6733 + }, + { + "epoch": 0.4675739480627691, + "grad_norm": 3.6680265088772726, + "learning_rate": 5.765563822489134e-06, + "loss": 0.4904, + "step": 6734 + }, + { + "epoch": 0.46764338286349116, + "grad_norm": 3.824219600192621, + "learning_rate": 5.764452574966092e-06, + "loss": 0.6171, + "step": 6735 + }, + { + "epoch": 0.4677128176642133, + "grad_norm": 3.6950687864063645, + "learning_rate": 5.763341288777902e-06, + "loss": 0.3641, + "step": 6736 + }, + { + "epoch": 0.46778225246493543, + "grad_norm": 5.007832218064642, + "learning_rate": 5.7622299639807735e-06, + "loss": 0.2299, + "step": 6737 + }, + { + "epoch": 0.46785168726565757, + "grad_norm": 4.248901369538338, + "learning_rate": 5.761118600630915e-06, + "loss": 0.4186, + "step": 6738 + }, + { + "epoch": 0.46792112206637965, + "grad_norm": 3.6113643843421, + "learning_rate": 5.7600071987845405e-06, + "loss": 0.5514, + "step": 6739 + }, + { + "epoch": 0.4679905568671018, + "grad_norm": 4.005009135098748, + "learning_rate": 5.7588957584978615e-06, + "loss": 0.3797, + "step": 6740 + }, + { + "epoch": 0.4680599916678239, + "grad_norm": 3.3860626758666412, + "learning_rate": 5.757784279827094e-06, + "loss": 0.4078, + "step": 6741 + }, + { + "epoch": 0.46812942646854605, + "grad_norm": 2.8505917984201203, + "learning_rate": 5.756672762828454e-06, + "loss": 0.3937, + "step": 6742 + }, + { + "epoch": 0.46819886126926813, + "grad_norm": 3.4658380243430456, + "learning_rate": 5.7555612075581625e-06, + "loss": 0.4083, + "step": 6743 + }, + { + "epoch": 0.46826829606999026, + "grad_norm": 3.424347254084609, + "learning_rate": 5.75444961407244e-06, + "loss": 0.4948, + "step": 6744 + }, + { + "epoch": 0.4683377308707124, + "grad_norm": 3.2542726910236643, + "learning_rate": 5.753337982427511e-06, + "loss": 0.318, + "step": 6745 + }, + { + "epoch": 0.46840716567143453, + "grad_norm": 3.5009869627425534, + "learning_rate": 5.7522263126795986e-06, + "loss": 0.4804, + "step": 6746 + }, + { + "epoch": 0.46847660047215667, + "grad_norm": 5.005366740932165, + "learning_rate": 5.75111460488493e-06, + "loss": 0.5303, + "step": 6747 + }, + { + "epoch": 0.46854603527287875, + "grad_norm": 3.4413564729407455, + "learning_rate": 5.750002859099735e-06, + "loss": 0.3209, + "step": 6748 + }, + { + "epoch": 0.4686154700736009, + "grad_norm": 3.769728505265845, + "learning_rate": 5.748891075380243e-06, + "loss": 0.4922, + "step": 6749 + }, + { + "epoch": 0.468684904874323, + "grad_norm": 4.897230734198281, + "learning_rate": 5.747779253782688e-06, + "loss": 0.4281, + "step": 6750 + }, + { + "epoch": 0.46875433967504515, + "grad_norm": 4.0411639440870015, + "learning_rate": 5.746667394363308e-06, + "loss": 0.5228, + "step": 6751 + }, + { + "epoch": 0.46882377447576723, + "grad_norm": 4.407968313966592, + "learning_rate": 5.745555497178332e-06, + "loss": 0.8793, + "step": 6752 + }, + { + "epoch": 0.46889320927648936, + "grad_norm": 4.110558483720758, + "learning_rate": 5.744443562284003e-06, + "loss": 0.5292, + "step": 6753 + }, + { + "epoch": 0.4689626440772115, + "grad_norm": 4.751684836318537, + "learning_rate": 5.743331589736563e-06, + "loss": 0.7217, + "step": 6754 + }, + { + "epoch": 0.46903207887793363, + "grad_norm": 2.948645157842378, + "learning_rate": 5.742219579592252e-06, + "loss": 0.4631, + "step": 6755 + }, + { + "epoch": 0.46910151367865577, + "grad_norm": 4.1488207129295125, + "learning_rate": 5.741107531907313e-06, + "loss": 0.545, + "step": 6756 + }, + { + "epoch": 0.46917094847937785, + "grad_norm": 2.254468199220277, + "learning_rate": 5.739995446737995e-06, + "loss": 0.1934, + "step": 6757 + }, + { + "epoch": 0.4692403832801, + "grad_norm": 3.0475527326654333, + "learning_rate": 5.7388833241405425e-06, + "loss": 0.3873, + "step": 6758 + }, + { + "epoch": 0.4693098180808221, + "grad_norm": 2.948671689881155, + "learning_rate": 5.737771164171211e-06, + "loss": 0.2264, + "step": 6759 + }, + { + "epoch": 0.46937925288154425, + "grad_norm": 3.507086333045732, + "learning_rate": 5.736658966886246e-06, + "loss": 0.3489, + "step": 6760 + }, + { + "epoch": 0.46944868768226633, + "grad_norm": 4.008319428035398, + "learning_rate": 5.735546732341906e-06, + "loss": 0.6018, + "step": 6761 + }, + { + "epoch": 0.46951812248298846, + "grad_norm": 4.046157183997936, + "learning_rate": 5.734434460594443e-06, + "loss": 0.4384, + "step": 6762 + }, + { + "epoch": 0.4695875572837106, + "grad_norm": 5.363941253718464, + "learning_rate": 5.733322151700118e-06, + "loss": 0.4157, + "step": 6763 + }, + { + "epoch": 0.46965699208443273, + "grad_norm": 2.636461927496305, + "learning_rate": 5.732209805715186e-06, + "loss": 0.2345, + "step": 6764 + }, + { + "epoch": 0.4697264268851548, + "grad_norm": 4.17068679258831, + "learning_rate": 5.7310974226959115e-06, + "loss": 0.5451, + "step": 6765 + }, + { + "epoch": 0.46979586168587695, + "grad_norm": 3.6115089350808605, + "learning_rate": 5.729985002698557e-06, + "loss": 0.41, + "step": 6766 + }, + { + "epoch": 0.4698652964865991, + "grad_norm": 3.6762860746970683, + "learning_rate": 5.728872545779387e-06, + "loss": 0.4221, + "step": 6767 + }, + { + "epoch": 0.4699347312873212, + "grad_norm": 2.9521455638614755, + "learning_rate": 5.727760051994669e-06, + "loss": 0.3671, + "step": 6768 + }, + { + "epoch": 0.47000416608804335, + "grad_norm": 3.6669555104162908, + "learning_rate": 5.726647521400671e-06, + "loss": 0.3742, + "step": 6769 + }, + { + "epoch": 0.47007360088876543, + "grad_norm": 3.678585168560078, + "learning_rate": 5.725534954053663e-06, + "loss": 0.5591, + "step": 6770 + }, + { + "epoch": 0.47014303568948757, + "grad_norm": 3.696206814664136, + "learning_rate": 5.7244223500099185e-06, + "loss": 0.4748, + "step": 6771 + }, + { + "epoch": 0.4702124704902097, + "grad_norm": 3.287582383772018, + "learning_rate": 5.72330970932571e-06, + "loss": 0.406, + "step": 6772 + }, + { + "epoch": 0.47028190529093183, + "grad_norm": 5.912194450755426, + "learning_rate": 5.722197032057317e-06, + "loss": 0.6034, + "step": 6773 + }, + { + "epoch": 0.4703513400916539, + "grad_norm": 3.9379744916679433, + "learning_rate": 5.7210843182610134e-06, + "loss": 0.5249, + "step": 6774 + }, + { + "epoch": 0.47042077489237605, + "grad_norm": 3.236000134104256, + "learning_rate": 5.719971567993081e-06, + "loss": 0.2431, + "step": 6775 + }, + { + "epoch": 0.4704902096930982, + "grad_norm": 4.312539027789056, + "learning_rate": 5.718858781309803e-06, + "loss": 0.4646, + "step": 6776 + }, + { + "epoch": 0.4705596444938203, + "grad_norm": 3.2141708309090546, + "learning_rate": 5.7177459582674595e-06, + "loss": 0.3563, + "step": 6777 + }, + { + "epoch": 0.47062907929454245, + "grad_norm": 4.147081450269217, + "learning_rate": 5.716633098922339e-06, + "loss": 0.5529, + "step": 6778 + }, + { + "epoch": 0.47069851409526453, + "grad_norm": 1.9551920820933877, + "learning_rate": 5.715520203330727e-06, + "loss": 0.1531, + "step": 6779 + }, + { + "epoch": 0.47076794889598667, + "grad_norm": 4.009777669391415, + "learning_rate": 5.714407271548913e-06, + "loss": 0.5255, + "step": 6780 + }, + { + "epoch": 0.4708373836967088, + "grad_norm": 3.035458823122768, + "learning_rate": 5.713294303633185e-06, + "loss": 0.2727, + "step": 6781 + }, + { + "epoch": 0.47090681849743093, + "grad_norm": 4.3646039928532305, + "learning_rate": 5.712181299639842e-06, + "loss": 0.6682, + "step": 6782 + }, + { + "epoch": 0.470976253298153, + "grad_norm": 3.8693201715519807, + "learning_rate": 5.711068259625173e-06, + "loss": 0.5833, + "step": 6783 + }, + { + "epoch": 0.47104568809887515, + "grad_norm": 3.3023294017727522, + "learning_rate": 5.709955183645477e-06, + "loss": 0.3254, + "step": 6784 + }, + { + "epoch": 0.4711151228995973, + "grad_norm": 4.282266506793972, + "learning_rate": 5.70884207175705e-06, + "loss": 0.4258, + "step": 6785 + }, + { + "epoch": 0.4711845577003194, + "grad_norm": 3.6253810184418565, + "learning_rate": 5.7077289240161935e-06, + "loss": 0.3366, + "step": 6786 + }, + { + "epoch": 0.4712539925010415, + "grad_norm": 3.635647098225617, + "learning_rate": 5.7066157404792085e-06, + "loss": 0.3975, + "step": 6787 + }, + { + "epoch": 0.47132342730176363, + "grad_norm": 4.3401213376896814, + "learning_rate": 5.705502521202399e-06, + "loss": 0.6986, + "step": 6788 + }, + { + "epoch": 0.47139286210248577, + "grad_norm": 3.757513211767109, + "learning_rate": 5.704389266242072e-06, + "loss": 0.4602, + "step": 6789 + }, + { + "epoch": 0.4714622969032079, + "grad_norm": 3.8871209182798205, + "learning_rate": 5.703275975654531e-06, + "loss": 0.583, + "step": 6790 + }, + { + "epoch": 0.47153173170393003, + "grad_norm": 4.058279494308542, + "learning_rate": 5.702162649496088e-06, + "loss": 0.6407, + "step": 6791 + }, + { + "epoch": 0.4716011665046521, + "grad_norm": 3.8852291641208136, + "learning_rate": 5.701049287823052e-06, + "loss": 0.2628, + "step": 6792 + }, + { + "epoch": 0.47167060130537425, + "grad_norm": 4.190713855430629, + "learning_rate": 5.699935890691737e-06, + "loss": 0.6037, + "step": 6793 + }, + { + "epoch": 0.4717400361060964, + "grad_norm": 5.154799364062615, + "learning_rate": 5.6988224581584565e-06, + "loss": 0.631, + "step": 6794 + }, + { + "epoch": 0.4718094709068185, + "grad_norm": 4.245105837555174, + "learning_rate": 5.697708990279525e-06, + "loss": 0.6525, + "step": 6795 + }, + { + "epoch": 0.4718789057075406, + "grad_norm": 3.7951904360769957, + "learning_rate": 5.696595487111266e-06, + "loss": 0.6016, + "step": 6796 + }, + { + "epoch": 0.47194834050826273, + "grad_norm": 3.6192700235332254, + "learning_rate": 5.6954819487099924e-06, + "loss": 0.5459, + "step": 6797 + }, + { + "epoch": 0.47201777530898487, + "grad_norm": 3.392157843751449, + "learning_rate": 5.69436837513203e-06, + "loss": 0.3827, + "step": 6798 + }, + { + "epoch": 0.472087210109707, + "grad_norm": 3.044128786177679, + "learning_rate": 5.693254766433701e-06, + "loss": 0.3309, + "step": 6799 + }, + { + "epoch": 0.4721566449104291, + "grad_norm": 4.477221368726688, + "learning_rate": 5.692141122671331e-06, + "loss": 0.4217, + "step": 6800 + }, + { + "epoch": 0.4722260797111512, + "grad_norm": 3.4798803931166598, + "learning_rate": 5.691027443901245e-06, + "loss": 0.3819, + "step": 6801 + }, + { + "epoch": 0.47229551451187335, + "grad_norm": 4.5430920123767, + "learning_rate": 5.689913730179773e-06, + "loss": 0.7103, + "step": 6802 + }, + { + "epoch": 0.4723649493125955, + "grad_norm": 4.530463191157134, + "learning_rate": 5.688799981563246e-06, + "loss": 0.3467, + "step": 6803 + }, + { + "epoch": 0.4724343841133176, + "grad_norm": 4.089373455616298, + "learning_rate": 5.687686198107993e-06, + "loss": 0.5019, + "step": 6804 + }, + { + "epoch": 0.4725038189140397, + "grad_norm": 5.819093724568025, + "learning_rate": 5.686572379870353e-06, + "loss": 0.5005, + "step": 6805 + }, + { + "epoch": 0.47257325371476183, + "grad_norm": 3.6884016709420235, + "learning_rate": 5.685458526906659e-06, + "loss": 0.5948, + "step": 6806 + }, + { + "epoch": 0.47264268851548397, + "grad_norm": 4.31751103056666, + "learning_rate": 5.6843446392732475e-06, + "loss": 0.5589, + "step": 6807 + }, + { + "epoch": 0.4727121233162061, + "grad_norm": 2.2621620334272534, + "learning_rate": 5.6832307170264575e-06, + "loss": 0.2189, + "step": 6808 + }, + { + "epoch": 0.4727815581169282, + "grad_norm": 3.673482004790837, + "learning_rate": 5.6821167602226325e-06, + "loss": 0.3734, + "step": 6809 + }, + { + "epoch": 0.4728509929176503, + "grad_norm": 2.9728019538039954, + "learning_rate": 5.681002768918114e-06, + "loss": 0.2858, + "step": 6810 + }, + { + "epoch": 0.47292042771837245, + "grad_norm": 3.2473891024229116, + "learning_rate": 5.679888743169243e-06, + "loss": 0.4056, + "step": 6811 + }, + { + "epoch": 0.4729898625190946, + "grad_norm": 4.000645328095766, + "learning_rate": 5.678774683032372e-06, + "loss": 0.7438, + "step": 6812 + }, + { + "epoch": 0.4730592973198167, + "grad_norm": 4.006431534255663, + "learning_rate": 5.677660588563843e-06, + "loss": 0.5656, + "step": 6813 + }, + { + "epoch": 0.4731287321205388, + "grad_norm": 5.849799306472783, + "learning_rate": 5.676546459820011e-06, + "loss": 0.5468, + "step": 6814 + }, + { + "epoch": 0.47319816692126093, + "grad_norm": 4.527294094890077, + "learning_rate": 5.675432296857223e-06, + "loss": 0.4095, + "step": 6815 + }, + { + "epoch": 0.47326760172198307, + "grad_norm": 3.7941751354602267, + "learning_rate": 5.6743180997318345e-06, + "loss": 0.5521, + "step": 6816 + }, + { + "epoch": 0.4733370365227052, + "grad_norm": 3.982629729252017, + "learning_rate": 5.673203868500198e-06, + "loss": 0.6308, + "step": 6817 + }, + { + "epoch": 0.4734064713234273, + "grad_norm": 3.9194971217227548, + "learning_rate": 5.672089603218673e-06, + "loss": 0.4781, + "step": 6818 + }, + { + "epoch": 0.4734759061241494, + "grad_norm": 2.8979209591414845, + "learning_rate": 5.670975303943617e-06, + "loss": 0.4298, + "step": 6819 + }, + { + "epoch": 0.47354534092487155, + "grad_norm": 3.6549389968316777, + "learning_rate": 5.669860970731388e-06, + "loss": 0.4107, + "step": 6820 + }, + { + "epoch": 0.4736147757255937, + "grad_norm": 4.495714563876899, + "learning_rate": 5.668746603638349e-06, + "loss": 0.5784, + "step": 6821 + }, + { + "epoch": 0.47368421052631576, + "grad_norm": 3.9551237790749463, + "learning_rate": 5.667632202720864e-06, + "loss": 0.4847, + "step": 6822 + }, + { + "epoch": 0.4737536453270379, + "grad_norm": 4.229460570977114, + "learning_rate": 5.666517768035299e-06, + "loss": 0.4761, + "step": 6823 + }, + { + "epoch": 0.47382308012776003, + "grad_norm": 4.086441366644287, + "learning_rate": 5.665403299638017e-06, + "loss": 0.45, + "step": 6824 + }, + { + "epoch": 0.47389251492848217, + "grad_norm": 3.230781295872515, + "learning_rate": 5.664288797585391e-06, + "loss": 0.3526, + "step": 6825 + }, + { + "epoch": 0.4739619497292043, + "grad_norm": 3.413271420975861, + "learning_rate": 5.663174261933789e-06, + "loss": 0.449, + "step": 6826 + }, + { + "epoch": 0.4740313845299264, + "grad_norm": 3.2315376008947574, + "learning_rate": 5.662059692739582e-06, + "loss": 0.4001, + "step": 6827 + }, + { + "epoch": 0.4741008193306485, + "grad_norm": 3.4688587117706517, + "learning_rate": 5.660945090059147e-06, + "loss": 0.3994, + "step": 6828 + }, + { + "epoch": 0.47417025413137065, + "grad_norm": 3.9483486991960137, + "learning_rate": 5.6598304539488555e-06, + "loss": 0.5032, + "step": 6829 + }, + { + "epoch": 0.4742396889320928, + "grad_norm": 4.879392566075788, + "learning_rate": 5.6587157844650865e-06, + "loss": 0.6989, + "step": 6830 + }, + { + "epoch": 0.47430912373281486, + "grad_norm": 4.231585181179906, + "learning_rate": 5.657601081664219e-06, + "loss": 0.545, + "step": 6831 + }, + { + "epoch": 0.474378558533537, + "grad_norm": 4.044458555523051, + "learning_rate": 5.656486345602633e-06, + "loss": 0.6012, + "step": 6832 + }, + { + "epoch": 0.47444799333425913, + "grad_norm": 3.5959176189119795, + "learning_rate": 5.65537157633671e-06, + "loss": 0.5022, + "step": 6833 + }, + { + "epoch": 0.47451742813498127, + "grad_norm": 2.687586781158489, + "learning_rate": 5.654256773922835e-06, + "loss": 0.2664, + "step": 6834 + }, + { + "epoch": 0.4745868629357034, + "grad_norm": 3.63289143660525, + "learning_rate": 5.653141938417393e-06, + "loss": 0.4087, + "step": 6835 + }, + { + "epoch": 0.4746562977364255, + "grad_norm": 2.8785834644162605, + "learning_rate": 5.652027069876769e-06, + "loss": 0.2605, + "step": 6836 + }, + { + "epoch": 0.4747257325371476, + "grad_norm": 2.941691162408179, + "learning_rate": 5.650912168357355e-06, + "loss": 0.2483, + "step": 6837 + }, + { + "epoch": 0.47479516733786975, + "grad_norm": 3.648821669470954, + "learning_rate": 5.649797233915539e-06, + "loss": 0.5812, + "step": 6838 + }, + { + "epoch": 0.4748646021385919, + "grad_norm": 4.041615463098413, + "learning_rate": 5.648682266607715e-06, + "loss": 0.5791, + "step": 6839 + }, + { + "epoch": 0.47493403693931396, + "grad_norm": 3.9356304671476674, + "learning_rate": 5.647567266490276e-06, + "loss": 0.5223, + "step": 6840 + }, + { + "epoch": 0.4750034717400361, + "grad_norm": 3.6974502860589493, + "learning_rate": 5.646452233619619e-06, + "loss": 0.3477, + "step": 6841 + }, + { + "epoch": 0.47507290654075823, + "grad_norm": 4.16395822953417, + "learning_rate": 5.6453371680521375e-06, + "loss": 0.5913, + "step": 6842 + }, + { + "epoch": 0.47514234134148037, + "grad_norm": 3.7122191065307835, + "learning_rate": 5.644222069844232e-06, + "loss": 0.31, + "step": 6843 + }, + { + "epoch": 0.47521177614220245, + "grad_norm": 3.807580927204713, + "learning_rate": 5.643106939052305e-06, + "loss": 0.4784, + "step": 6844 + }, + { + "epoch": 0.4752812109429246, + "grad_norm": 2.511359419128291, + "learning_rate": 5.641991775732756e-06, + "loss": 0.217, + "step": 6845 + }, + { + "epoch": 0.4753506457436467, + "grad_norm": 4.493740716968294, + "learning_rate": 5.640876579941988e-06, + "loss": 0.578, + "step": 6846 + }, + { + "epoch": 0.47542008054436885, + "grad_norm": 3.450773154357236, + "learning_rate": 5.63976135173641e-06, + "loss": 0.3787, + "step": 6847 + }, + { + "epoch": 0.475489515345091, + "grad_norm": 2.063527958346874, + "learning_rate": 5.638646091172426e-06, + "loss": 0.165, + "step": 6848 + }, + { + "epoch": 0.47555895014581306, + "grad_norm": 3.4050633134737542, + "learning_rate": 5.637530798306445e-06, + "loss": 0.3151, + "step": 6849 + }, + { + "epoch": 0.4756283849465352, + "grad_norm": 3.025376782644657, + "learning_rate": 5.6364154731948765e-06, + "loss": 0.2757, + "step": 6850 + }, + { + "epoch": 0.47569781974725733, + "grad_norm": 3.607501158083564, + "learning_rate": 5.635300115894135e-06, + "loss": 0.613, + "step": 6851 + }, + { + "epoch": 0.47576725454797947, + "grad_norm": 4.109430756620664, + "learning_rate": 5.634184726460632e-06, + "loss": 0.6593, + "step": 6852 + }, + { + "epoch": 0.47583668934870155, + "grad_norm": 4.186070690159678, + "learning_rate": 5.633069304950784e-06, + "loss": 0.6286, + "step": 6853 + }, + { + "epoch": 0.4759061241494237, + "grad_norm": 3.6984068720825234, + "learning_rate": 5.631953851421007e-06, + "loss": 0.2947, + "step": 6854 + }, + { + "epoch": 0.4759755589501458, + "grad_norm": 4.498567239354485, + "learning_rate": 5.6308383659277184e-06, + "loss": 0.7054, + "step": 6855 + }, + { + "epoch": 0.47604499375086795, + "grad_norm": 4.037436376071036, + "learning_rate": 5.629722848527338e-06, + "loss": 0.4119, + "step": 6856 + }, + { + "epoch": 0.47611442855159003, + "grad_norm": 3.0672514631053494, + "learning_rate": 5.628607299276292e-06, + "loss": 0.1917, + "step": 6857 + }, + { + "epoch": 0.47618386335231216, + "grad_norm": 3.76752336053928, + "learning_rate": 5.627491718230997e-06, + "loss": 0.5562, + "step": 6858 + }, + { + "epoch": 0.4762532981530343, + "grad_norm": 4.140813254773085, + "learning_rate": 5.6263761054478814e-06, + "loss": 0.6648, + "step": 6859 + }, + { + "epoch": 0.47632273295375643, + "grad_norm": 3.4528271235043815, + "learning_rate": 5.6252604609833725e-06, + "loss": 0.5038, + "step": 6860 + }, + { + "epoch": 0.47639216775447857, + "grad_norm": 5.6185496371337145, + "learning_rate": 5.624144784893896e-06, + "loss": 0.6925, + "step": 6861 + }, + { + "epoch": 0.47646160255520065, + "grad_norm": 5.18851194358585, + "learning_rate": 5.623029077235883e-06, + "loss": 0.6733, + "step": 6862 + }, + { + "epoch": 0.4765310373559228, + "grad_norm": 3.6075112143165744, + "learning_rate": 5.621913338065764e-06, + "loss": 0.4313, + "step": 6863 + }, + { + "epoch": 0.4766004721566449, + "grad_norm": 3.638654786300206, + "learning_rate": 5.620797567439973e-06, + "loss": 0.5565, + "step": 6864 + }, + { + "epoch": 0.47666990695736705, + "grad_norm": 3.8049830169194077, + "learning_rate": 5.6196817654149415e-06, + "loss": 0.3336, + "step": 6865 + }, + { + "epoch": 0.47673934175808913, + "grad_norm": 4.11430271998524, + "learning_rate": 5.618565932047109e-06, + "loss": 0.5104, + "step": 6866 + }, + { + "epoch": 0.47680877655881126, + "grad_norm": 3.145703709202706, + "learning_rate": 5.6174500673929126e-06, + "loss": 0.2909, + "step": 6867 + }, + { + "epoch": 0.4768782113595334, + "grad_norm": 3.9987729475673666, + "learning_rate": 5.6163341715087884e-06, + "loss": 0.6205, + "step": 6868 + }, + { + "epoch": 0.47694764616025553, + "grad_norm": 4.392068622771916, + "learning_rate": 5.6152182444511795e-06, + "loss": 0.6173, + "step": 6869 + }, + { + "epoch": 0.47701708096097767, + "grad_norm": 3.0889245040899054, + "learning_rate": 5.6141022862765285e-06, + "loss": 0.204, + "step": 6870 + }, + { + "epoch": 0.47708651576169975, + "grad_norm": 2.878083209137268, + "learning_rate": 5.6129862970412785e-06, + "loss": 0.4188, + "step": 6871 + }, + { + "epoch": 0.4771559505624219, + "grad_norm": 2.7472741013184274, + "learning_rate": 5.611870276801875e-06, + "loss": 0.3325, + "step": 6872 + }, + { + "epoch": 0.477225385363144, + "grad_norm": 3.653764927358615, + "learning_rate": 5.610754225614767e-06, + "loss": 0.48, + "step": 6873 + }, + { + "epoch": 0.47729482016386615, + "grad_norm": 3.916384246052207, + "learning_rate": 5.609638143536399e-06, + "loss": 0.502, + "step": 6874 + }, + { + "epoch": 0.47736425496458823, + "grad_norm": 3.54344228155308, + "learning_rate": 5.608522030623224e-06, + "loss": 0.3945, + "step": 6875 + }, + { + "epoch": 0.47743368976531037, + "grad_norm": 4.152623121685118, + "learning_rate": 5.607405886931695e-06, + "loss": 0.422, + "step": 6876 + }, + { + "epoch": 0.4775031245660325, + "grad_norm": 3.1658735291811633, + "learning_rate": 5.606289712518262e-06, + "loss": 0.3394, + "step": 6877 + }, + { + "epoch": 0.47757255936675463, + "grad_norm": 3.627879497229283, + "learning_rate": 5.605173507439383e-06, + "loss": 0.5644, + "step": 6878 + }, + { + "epoch": 0.4776419941674767, + "grad_norm": 3.430171247443991, + "learning_rate": 5.604057271751512e-06, + "loss": 0.4958, + "step": 6879 + }, + { + "epoch": 0.47771142896819885, + "grad_norm": 3.6680342882311185, + "learning_rate": 5.602941005511108e-06, + "loss": 0.376, + "step": 6880 + }, + { + "epoch": 0.477780863768921, + "grad_norm": 3.4917121726224667, + "learning_rate": 5.601824708774628e-06, + "loss": 0.3612, + "step": 6881 + }, + { + "epoch": 0.4778502985696431, + "grad_norm": 2.878605142881803, + "learning_rate": 5.6007083815985374e-06, + "loss": 0.2539, + "step": 6882 + }, + { + "epoch": 0.47791973337036525, + "grad_norm": 4.05497237255503, + "learning_rate": 5.599592024039297e-06, + "loss": 0.6409, + "step": 6883 + }, + { + "epoch": 0.47798916817108733, + "grad_norm": 3.9288315105684544, + "learning_rate": 5.598475636153369e-06, + "loss": 0.5477, + "step": 6884 + }, + { + "epoch": 0.47805860297180947, + "grad_norm": 3.595512501740939, + "learning_rate": 5.597359217997222e-06, + "loss": 0.4696, + "step": 6885 + }, + { + "epoch": 0.4781280377725316, + "grad_norm": 4.35193461591855, + "learning_rate": 5.5962427696273206e-06, + "loss": 0.5946, + "step": 6886 + }, + { + "epoch": 0.47819747257325373, + "grad_norm": 4.466316643509731, + "learning_rate": 5.595126291100134e-06, + "loss": 0.5, + "step": 6887 + }, + { + "epoch": 0.4782669073739758, + "grad_norm": 3.2935441115985187, + "learning_rate": 5.594009782472134e-06, + "loss": 0.3136, + "step": 6888 + }, + { + "epoch": 0.47833634217469795, + "grad_norm": 3.442847498412359, + "learning_rate": 5.592893243799793e-06, + "loss": 0.2528, + "step": 6889 + }, + { + "epoch": 0.4784057769754201, + "grad_norm": 4.074573609257723, + "learning_rate": 5.59177667513958e-06, + "loss": 0.4578, + "step": 6890 + }, + { + "epoch": 0.4784752117761422, + "grad_norm": 3.3762103261527923, + "learning_rate": 5.590660076547974e-06, + "loss": 0.3324, + "step": 6891 + }, + { + "epoch": 0.47854464657686435, + "grad_norm": 2.445749368959596, + "learning_rate": 5.589543448081448e-06, + "loss": 0.1821, + "step": 6892 + }, + { + "epoch": 0.47861408137758643, + "grad_norm": 3.748354979387605, + "learning_rate": 5.588426789796483e-06, + "loss": 0.4633, + "step": 6893 + }, + { + "epoch": 0.47868351617830857, + "grad_norm": 4.7330246567560055, + "learning_rate": 5.587310101749557e-06, + "loss": 0.357, + "step": 6894 + }, + { + "epoch": 0.4787529509790307, + "grad_norm": 3.8172098401676298, + "learning_rate": 5.586193383997149e-06, + "loss": 0.5969, + "step": 6895 + }, + { + "epoch": 0.47882238577975283, + "grad_norm": 3.1169255852256432, + "learning_rate": 5.585076636595744e-06, + "loss": 0.3476, + "step": 6896 + }, + { + "epoch": 0.4788918205804749, + "grad_norm": 2.9050568090816697, + "learning_rate": 5.583959859601824e-06, + "loss": 0.3122, + "step": 6897 + }, + { + "epoch": 0.47896125538119705, + "grad_norm": 4.254835872587314, + "learning_rate": 5.582843053071877e-06, + "loss": 0.5182, + "step": 6898 + }, + { + "epoch": 0.4790306901819192, + "grad_norm": 5.191033855448743, + "learning_rate": 5.5817262170623865e-06, + "loss": 0.6578, + "step": 6899 + }, + { + "epoch": 0.4791001249826413, + "grad_norm": 4.192838803153526, + "learning_rate": 5.580609351629842e-06, + "loss": 0.5865, + "step": 6900 + }, + { + "epoch": 0.4791695597833634, + "grad_norm": 3.2599993604117774, + "learning_rate": 5.5794924568307355e-06, + "loss": 0.31, + "step": 6901 + }, + { + "epoch": 0.47923899458408553, + "grad_norm": 3.8075429259870304, + "learning_rate": 5.578375532721554e-06, + "loss": 0.4033, + "step": 6902 + }, + { + "epoch": 0.47930842938480767, + "grad_norm": 4.398786312918532, + "learning_rate": 5.577258579358794e-06, + "loss": 0.5978, + "step": 6903 + }, + { + "epoch": 0.4793778641855298, + "grad_norm": 3.496795971192161, + "learning_rate": 5.5761415967989464e-06, + "loss": 0.4616, + "step": 6904 + }, + { + "epoch": 0.47944729898625194, + "grad_norm": 3.9163787725176835, + "learning_rate": 5.57502458509851e-06, + "loss": 0.512, + "step": 6905 + }, + { + "epoch": 0.479516733786974, + "grad_norm": 5.538483485289079, + "learning_rate": 5.573907544313981e-06, + "loss": 0.7099, + "step": 6906 + }, + { + "epoch": 0.47958616858769615, + "grad_norm": 3.3088933948981696, + "learning_rate": 5.572790474501857e-06, + "loss": 0.3226, + "step": 6907 + }, + { + "epoch": 0.4796556033884183, + "grad_norm": 3.554231417179007, + "learning_rate": 5.5716733757186406e-06, + "loss": 0.5112, + "step": 6908 + }, + { + "epoch": 0.4797250381891404, + "grad_norm": 3.7105077137238043, + "learning_rate": 5.57055624802083e-06, + "loss": 0.3253, + "step": 6909 + }, + { + "epoch": 0.4797944729898625, + "grad_norm": 3.5395388443250146, + "learning_rate": 5.569439091464931e-06, + "loss": 0.3743, + "step": 6910 + }, + { + "epoch": 0.47986390779058463, + "grad_norm": 3.0983258613078632, + "learning_rate": 5.5683219061074455e-06, + "loss": 0.3293, + "step": 6911 + }, + { + "epoch": 0.47993334259130677, + "grad_norm": 3.7405967197390018, + "learning_rate": 5.5672046920048835e-06, + "loss": 0.499, + "step": 6912 + }, + { + "epoch": 0.4800027773920289, + "grad_norm": 3.876461888308058, + "learning_rate": 5.566087449213748e-06, + "loss": 0.4959, + "step": 6913 + }, + { + "epoch": 0.480072212192751, + "grad_norm": 2.640293489427926, + "learning_rate": 5.564970177790552e-06, + "loss": 0.1982, + "step": 6914 + }, + { + "epoch": 0.4801416469934731, + "grad_norm": 3.822609780965002, + "learning_rate": 5.5638528777918024e-06, + "loss": 0.423, + "step": 6915 + }, + { + "epoch": 0.48021108179419525, + "grad_norm": 3.0085274032615064, + "learning_rate": 5.562735549274012e-06, + "loss": 0.3137, + "step": 6916 + }, + { + "epoch": 0.4802805165949174, + "grad_norm": 4.830303009526612, + "learning_rate": 5.561618192293695e-06, + "loss": 0.8478, + "step": 6917 + }, + { + "epoch": 0.4803499513956395, + "grad_norm": 4.388400865237029, + "learning_rate": 5.5605008069073655e-06, + "loss": 0.6717, + "step": 6918 + }, + { + "epoch": 0.4804193861963616, + "grad_norm": 3.541469917585858, + "learning_rate": 5.559383393171538e-06, + "loss": 0.463, + "step": 6919 + }, + { + "epoch": 0.48048882099708373, + "grad_norm": 3.4132899966746404, + "learning_rate": 5.5582659511427315e-06, + "loss": 0.4368, + "step": 6920 + }, + { + "epoch": 0.48055825579780587, + "grad_norm": 2.9292842685575953, + "learning_rate": 5.557148480877467e-06, + "loss": 0.3308, + "step": 6921 + }, + { + "epoch": 0.480627690598528, + "grad_norm": 2.819719721077523, + "learning_rate": 5.556030982432261e-06, + "loss": 0.3713, + "step": 6922 + }, + { + "epoch": 0.4806971253992501, + "grad_norm": 3.092139446288678, + "learning_rate": 5.5549134558636375e-06, + "loss": 0.4073, + "step": 6923 + }, + { + "epoch": 0.4807665601999722, + "grad_norm": 3.276199820665668, + "learning_rate": 5.553795901228121e-06, + "loss": 0.3676, + "step": 6924 + }, + { + "epoch": 0.48083599500069435, + "grad_norm": 3.2317018216467934, + "learning_rate": 5.552678318582232e-06, + "loss": 0.4514, + "step": 6925 + }, + { + "epoch": 0.4809054298014165, + "grad_norm": 3.210481088970257, + "learning_rate": 5.551560707982501e-06, + "loss": 0.3899, + "step": 6926 + }, + { + "epoch": 0.4809748646021386, + "grad_norm": 3.5502074859850983, + "learning_rate": 5.550443069485452e-06, + "loss": 0.529, + "step": 6927 + }, + { + "epoch": 0.4810442994028607, + "grad_norm": 3.5994570695687513, + "learning_rate": 5.549325403147616e-06, + "loss": 0.43, + "step": 6928 + }, + { + "epoch": 0.48111373420358283, + "grad_norm": 3.6930306118730245, + "learning_rate": 5.5482077090255215e-06, + "loss": 0.4677, + "step": 6929 + }, + { + "epoch": 0.48118316900430497, + "grad_norm": 3.1986492975802547, + "learning_rate": 5.5470899871757034e-06, + "loss": 0.3978, + "step": 6930 + }, + { + "epoch": 0.4812526038050271, + "grad_norm": 3.6768283482726956, + "learning_rate": 5.545972237654692e-06, + "loss": 0.3953, + "step": 6931 + }, + { + "epoch": 0.4813220386057492, + "grad_norm": 3.0719332372228862, + "learning_rate": 5.5448544605190224e-06, + "loss": 0.4218, + "step": 6932 + }, + { + "epoch": 0.4813914734064713, + "grad_norm": 4.882881498690462, + "learning_rate": 5.54373665582523e-06, + "loss": 0.7839, + "step": 6933 + }, + { + "epoch": 0.48146090820719345, + "grad_norm": 3.639410359653315, + "learning_rate": 5.5426188236298565e-06, + "loss": 0.3294, + "step": 6934 + }, + { + "epoch": 0.4815303430079156, + "grad_norm": 3.626441459686348, + "learning_rate": 5.541500963989433e-06, + "loss": 0.3336, + "step": 6935 + }, + { + "epoch": 0.48159977780863766, + "grad_norm": 3.953041244999979, + "learning_rate": 5.540383076960505e-06, + "loss": 0.4123, + "step": 6936 + }, + { + "epoch": 0.4816692126093598, + "grad_norm": 4.526534768297793, + "learning_rate": 5.539265162599612e-06, + "loss": 0.5172, + "step": 6937 + }, + { + "epoch": 0.48173864741008193, + "grad_norm": 4.899549171352108, + "learning_rate": 5.538147220963297e-06, + "loss": 0.6023, + "step": 6938 + }, + { + "epoch": 0.48180808221080407, + "grad_norm": 3.766134267984721, + "learning_rate": 5.537029252108106e-06, + "loss": 0.359, + "step": 6939 + }, + { + "epoch": 0.4818775170115262, + "grad_norm": 2.7574986754881907, + "learning_rate": 5.53591125609058e-06, + "loss": 0.2612, + "step": 6940 + }, + { + "epoch": 0.4819469518122483, + "grad_norm": 3.968142391790459, + "learning_rate": 5.534793232967273e-06, + "loss": 0.5711, + "step": 6941 + }, + { + "epoch": 0.4820163866129704, + "grad_norm": 3.8524787646436485, + "learning_rate": 5.533675182794728e-06, + "loss": 0.6372, + "step": 6942 + }, + { + "epoch": 0.48208582141369255, + "grad_norm": 8.165968067738195, + "learning_rate": 5.5325571056294945e-06, + "loss": 0.5467, + "step": 6943 + }, + { + "epoch": 0.4821552562144147, + "grad_norm": 4.701594522723309, + "learning_rate": 5.531439001528128e-06, + "loss": 0.3691, + "step": 6944 + }, + { + "epoch": 0.48222469101513676, + "grad_norm": 3.6774611846374565, + "learning_rate": 5.5303208705471756e-06, + "loss": 0.5579, + "step": 6945 + }, + { + "epoch": 0.4822941258158589, + "grad_norm": 4.402197250710555, + "learning_rate": 5.529202712743194e-06, + "loss": 0.5021, + "step": 6946 + }, + { + "epoch": 0.48236356061658103, + "grad_norm": 3.2362805677283575, + "learning_rate": 5.52808452817274e-06, + "loss": 0.3996, + "step": 6947 + }, + { + "epoch": 0.48243299541730317, + "grad_norm": 4.881821744103387, + "learning_rate": 5.526966316892367e-06, + "loss": 0.5232, + "step": 6948 + }, + { + "epoch": 0.48250243021802525, + "grad_norm": 4.898920162475988, + "learning_rate": 5.525848078958634e-06, + "loss": 0.3947, + "step": 6949 + }, + { + "epoch": 0.4825718650187474, + "grad_norm": 2.557136591505448, + "learning_rate": 5.5247298144280995e-06, + "loss": 0.261, + "step": 6950 + }, + { + "epoch": 0.4826412998194695, + "grad_norm": 3.456637004090075, + "learning_rate": 5.523611523357326e-06, + "loss": 0.4461, + "step": 6951 + }, + { + "epoch": 0.48271073462019165, + "grad_norm": 5.136043732008318, + "learning_rate": 5.522493205802873e-06, + "loss": 0.8836, + "step": 6952 + }, + { + "epoch": 0.4827801694209138, + "grad_norm": 4.149510195418904, + "learning_rate": 5.5213748618213055e-06, + "loss": 0.5754, + "step": 6953 + }, + { + "epoch": 0.48284960422163586, + "grad_norm": 5.0614104046568515, + "learning_rate": 5.520256491469187e-06, + "loss": 0.5501, + "step": 6954 + }, + { + "epoch": 0.482919039022358, + "grad_norm": 2.6423079631453086, + "learning_rate": 5.519138094803085e-06, + "loss": 0.2684, + "step": 6955 + }, + { + "epoch": 0.48298847382308013, + "grad_norm": 3.0468470414439945, + "learning_rate": 5.518019671879564e-06, + "loss": 0.471, + "step": 6956 + }, + { + "epoch": 0.48305790862380227, + "grad_norm": 5.515769984123761, + "learning_rate": 5.516901222755195e-06, + "loss": 0.5988, + "step": 6957 + }, + { + "epoch": 0.48312734342452435, + "grad_norm": 3.2011807681663793, + "learning_rate": 5.515782747486549e-06, + "loss": 0.2984, + "step": 6958 + }, + { + "epoch": 0.4831967782252465, + "grad_norm": 2.2121177030803167, + "learning_rate": 5.5146642461301915e-06, + "loss": 0.2455, + "step": 6959 + }, + { + "epoch": 0.4832662130259686, + "grad_norm": 4.208613049488054, + "learning_rate": 5.513545718742702e-06, + "loss": 0.5201, + "step": 6960 + }, + { + "epoch": 0.48333564782669075, + "grad_norm": 3.055382881476904, + "learning_rate": 5.512427165380649e-06, + "loss": 0.2731, + "step": 6961 + }, + { + "epoch": 0.4834050826274129, + "grad_norm": 4.185969415502752, + "learning_rate": 5.511308586100611e-06, + "loss": 0.5186, + "step": 6962 + }, + { + "epoch": 0.48347451742813496, + "grad_norm": 3.27396345127389, + "learning_rate": 5.510189980959163e-06, + "loss": 0.4021, + "step": 6963 + }, + { + "epoch": 0.4835439522288571, + "grad_norm": 3.1102008975673914, + "learning_rate": 5.509071350012883e-06, + "loss": 0.3534, + "step": 6964 + }, + { + "epoch": 0.48361338702957923, + "grad_norm": 4.328759916473042, + "learning_rate": 5.5079526933183496e-06, + "loss": 0.465, + "step": 6965 + }, + { + "epoch": 0.48368282183030137, + "grad_norm": 3.2923007372167263, + "learning_rate": 5.506834010932145e-06, + "loss": 0.3065, + "step": 6966 + }, + { + "epoch": 0.48375225663102345, + "grad_norm": 5.536271458988812, + "learning_rate": 5.50571530291085e-06, + "loss": 0.731, + "step": 6967 + }, + { + "epoch": 0.4838216914317456, + "grad_norm": 3.5332932561127572, + "learning_rate": 5.504596569311044e-06, + "loss": 0.2962, + "step": 6968 + }, + { + "epoch": 0.4838911262324677, + "grad_norm": 4.360482451786159, + "learning_rate": 5.503477810189318e-06, + "loss": 0.5836, + "step": 6969 + }, + { + "epoch": 0.48396056103318985, + "grad_norm": 4.375020346718677, + "learning_rate": 5.502359025602253e-06, + "loss": 0.578, + "step": 6970 + }, + { + "epoch": 0.48402999583391193, + "grad_norm": 4.625206674837379, + "learning_rate": 5.501240215606437e-06, + "loss": 0.6806, + "step": 6971 + }, + { + "epoch": 0.48409943063463406, + "grad_norm": 2.6111694213058114, + "learning_rate": 5.500121380258456e-06, + "loss": 0.2446, + "step": 6972 + }, + { + "epoch": 0.4841688654353562, + "grad_norm": 4.931910887739966, + "learning_rate": 5.499002519614906e-06, + "loss": 0.6414, + "step": 6973 + }, + { + "epoch": 0.48423830023607833, + "grad_norm": 4.453377758831862, + "learning_rate": 5.497883633732369e-06, + "loss": 0.4173, + "step": 6974 + }, + { + "epoch": 0.48430773503680047, + "grad_norm": 3.4753371181469683, + "learning_rate": 5.4967647226674435e-06, + "loss": 0.4151, + "step": 6975 + }, + { + "epoch": 0.48437716983752255, + "grad_norm": 4.983755723896898, + "learning_rate": 5.49564578647672e-06, + "loss": 0.5652, + "step": 6976 + }, + { + "epoch": 0.4844466046382447, + "grad_norm": 3.2378244701895116, + "learning_rate": 5.494526825216793e-06, + "loss": 0.4206, + "step": 6977 + }, + { + "epoch": 0.4845160394389668, + "grad_norm": 2.254154446435162, + "learning_rate": 5.493407838944259e-06, + "loss": 0.1622, + "step": 6978 + }, + { + "epoch": 0.48458547423968895, + "grad_norm": 3.2083847517799122, + "learning_rate": 5.492288827715715e-06, + "loss": 0.2658, + "step": 6979 + }, + { + "epoch": 0.48465490904041103, + "grad_norm": 3.198020158176356, + "learning_rate": 5.491169791587759e-06, + "loss": 0.498, + "step": 6980 + }, + { + "epoch": 0.48472434384113317, + "grad_norm": 3.92769984715268, + "learning_rate": 5.490050730616991e-06, + "loss": 0.5434, + "step": 6981 + }, + { + "epoch": 0.4847937786418553, + "grad_norm": 3.6286286735741005, + "learning_rate": 5.4889316448600104e-06, + "loss": 0.3701, + "step": 6982 + }, + { + "epoch": 0.48486321344257743, + "grad_norm": 5.12574482288133, + "learning_rate": 5.487812534373423e-06, + "loss": 0.5249, + "step": 6983 + }, + { + "epoch": 0.48493264824329957, + "grad_norm": 4.939031066254939, + "learning_rate": 5.486693399213827e-06, + "loss": 0.5723, + "step": 6984 + }, + { + "epoch": 0.48500208304402165, + "grad_norm": 3.508050947216148, + "learning_rate": 5.485574239437831e-06, + "loss": 0.356, + "step": 6985 + }, + { + "epoch": 0.4850715178447438, + "grad_norm": 3.2832477202887205, + "learning_rate": 5.484455055102039e-06, + "loss": 0.3478, + "step": 6986 + }, + { + "epoch": 0.4851409526454659, + "grad_norm": 4.419111943834761, + "learning_rate": 5.483335846263059e-06, + "loss": 0.5915, + "step": 6987 + }, + { + "epoch": 0.48521038744618805, + "grad_norm": 3.022562895674062, + "learning_rate": 5.482216612977497e-06, + "loss": 0.4529, + "step": 6988 + }, + { + "epoch": 0.48527982224691013, + "grad_norm": 4.44636186440409, + "learning_rate": 5.481097355301968e-06, + "loss": 0.3865, + "step": 6989 + }, + { + "epoch": 0.48534925704763227, + "grad_norm": 3.2376941666003205, + "learning_rate": 5.4799780732930755e-06, + "loss": 0.3118, + "step": 6990 + }, + { + "epoch": 0.4854186918483544, + "grad_norm": 15.034008485773793, + "learning_rate": 5.478858767007436e-06, + "loss": 0.5178, + "step": 6991 + }, + { + "epoch": 0.48548812664907653, + "grad_norm": 3.2225523327747587, + "learning_rate": 5.477739436501663e-06, + "loss": 0.4585, + "step": 6992 + }, + { + "epoch": 0.4855575614497986, + "grad_norm": 4.02782040763264, + "learning_rate": 5.47662008183237e-06, + "loss": 0.5577, + "step": 6993 + }, + { + "epoch": 0.48562699625052075, + "grad_norm": 3.52370522902014, + "learning_rate": 5.475500703056173e-06, + "loss": 0.4555, + "step": 6994 + }, + { + "epoch": 0.4856964310512429, + "grad_norm": 3.3619368402007113, + "learning_rate": 5.474381300229686e-06, + "loss": 0.4282, + "step": 6995 + }, + { + "epoch": 0.485765865851965, + "grad_norm": 3.7271603759356418, + "learning_rate": 5.473261873409532e-06, + "loss": 0.5351, + "step": 6996 + }, + { + "epoch": 0.48583530065268715, + "grad_norm": 4.789424603339742, + "learning_rate": 5.472142422652327e-06, + "loss": 0.6059, + "step": 6997 + }, + { + "epoch": 0.48590473545340923, + "grad_norm": 2.6327874954895716, + "learning_rate": 5.471022948014692e-06, + "loss": 0.2564, + "step": 6998 + }, + { + "epoch": 0.48597417025413137, + "grad_norm": 3.8982884075469695, + "learning_rate": 5.46990344955325e-06, + "loss": 0.4103, + "step": 6999 + }, + { + "epoch": 0.4860436050548535, + "grad_norm": 3.530450155813866, + "learning_rate": 5.4687839273246236e-06, + "loss": 0.5684, + "step": 7000 + }, + { + "epoch": 0.48611303985557563, + "grad_norm": 4.467241391084448, + "learning_rate": 5.467664381385437e-06, + "loss": 0.5692, + "step": 7001 + }, + { + "epoch": 0.4861824746562977, + "grad_norm": 3.6942966954454532, + "learning_rate": 5.466544811792314e-06, + "loss": 0.4233, + "step": 7002 + }, + { + "epoch": 0.48625190945701985, + "grad_norm": 3.957394547192311, + "learning_rate": 5.465425218601883e-06, + "loss": 0.1946, + "step": 7003 + }, + { + "epoch": 0.486321344257742, + "grad_norm": 3.441764463297761, + "learning_rate": 5.46430560187077e-06, + "loss": 0.5798, + "step": 7004 + }, + { + "epoch": 0.4863907790584641, + "grad_norm": 3.2716238644687867, + "learning_rate": 5.463185961655607e-06, + "loss": 0.36, + "step": 7005 + }, + { + "epoch": 0.4864602138591862, + "grad_norm": 4.549098908901638, + "learning_rate": 5.46206629801302e-06, + "loss": 0.7096, + "step": 7006 + }, + { + "epoch": 0.48652964865990833, + "grad_norm": 3.9242384803116432, + "learning_rate": 5.4609466109996445e-06, + "loss": 0.4479, + "step": 7007 + }, + { + "epoch": 0.48659908346063047, + "grad_norm": 3.482857185747351, + "learning_rate": 5.459826900672109e-06, + "loss": 0.1778, + "step": 7008 + }, + { + "epoch": 0.4866685182613526, + "grad_norm": 3.267806434218714, + "learning_rate": 5.4587071670870515e-06, + "loss": 0.3487, + "step": 7009 + }, + { + "epoch": 0.48673795306207474, + "grad_norm": 4.008501552147815, + "learning_rate": 5.457587410301104e-06, + "loss": 0.5819, + "step": 7010 + }, + { + "epoch": 0.4868073878627968, + "grad_norm": 3.721885054933105, + "learning_rate": 5.4564676303709e-06, + "loss": 0.3322, + "step": 7011 + }, + { + "epoch": 0.48687682266351895, + "grad_norm": 4.037027769775607, + "learning_rate": 5.455347827353085e-06, + "loss": 0.5758, + "step": 7012 + }, + { + "epoch": 0.4869462574642411, + "grad_norm": 11.805811688208767, + "learning_rate": 5.454228001304288e-06, + "loss": 0.413, + "step": 7013 + }, + { + "epoch": 0.4870156922649632, + "grad_norm": 4.482429578626988, + "learning_rate": 5.453108152281154e-06, + "loss": 0.6947, + "step": 7014 + }, + { + "epoch": 0.4870851270656853, + "grad_norm": 2.997358411330258, + "learning_rate": 5.451988280340322e-06, + "loss": 0.4322, + "step": 7015 + }, + { + "epoch": 0.48715456186640743, + "grad_norm": 3.810653214437037, + "learning_rate": 5.450868385538436e-06, + "loss": 0.3427, + "step": 7016 + }, + { + "epoch": 0.48722399666712957, + "grad_norm": 4.146119243655524, + "learning_rate": 5.4497484679321355e-06, + "loss": 0.5509, + "step": 7017 + }, + { + "epoch": 0.4872934314678517, + "grad_norm": 3.3805430736576456, + "learning_rate": 5.4486285275780674e-06, + "loss": 0.3999, + "step": 7018 + }, + { + "epoch": 0.48736286626857384, + "grad_norm": 3.1038843143646258, + "learning_rate": 5.447508564532875e-06, + "loss": 0.4051, + "step": 7019 + }, + { + "epoch": 0.4874323010692959, + "grad_norm": 3.544914678857529, + "learning_rate": 5.446388578853205e-06, + "loss": 0.4009, + "step": 7020 + }, + { + "epoch": 0.48750173587001805, + "grad_norm": 2.9970421466413257, + "learning_rate": 5.4452685705957084e-06, + "loss": 0.1605, + "step": 7021 + }, + { + "epoch": 0.4875711706707402, + "grad_norm": 3.7623921304847836, + "learning_rate": 5.44414853981703e-06, + "loss": 0.4198, + "step": 7022 + }, + { + "epoch": 0.4876406054714623, + "grad_norm": 3.6739995613831193, + "learning_rate": 5.443028486573821e-06, + "loss": 0.3413, + "step": 7023 + }, + { + "epoch": 0.4877100402721844, + "grad_norm": 3.042382801032805, + "learning_rate": 5.441908410922734e-06, + "loss": 0.2823, + "step": 7024 + }, + { + "epoch": 0.48777947507290653, + "grad_norm": 3.689622993727875, + "learning_rate": 5.440788312920418e-06, + "loss": 0.6014, + "step": 7025 + }, + { + "epoch": 0.48784890987362867, + "grad_norm": 4.233184047969592, + "learning_rate": 5.4396681926235285e-06, + "loss": 0.6397, + "step": 7026 + }, + { + "epoch": 0.4879183446743508, + "grad_norm": 3.0752474160202716, + "learning_rate": 5.4385480500887175e-06, + "loss": 0.4699, + "step": 7027 + }, + { + "epoch": 0.4879877794750729, + "grad_norm": 3.5066890091344343, + "learning_rate": 5.437427885372645e-06, + "loss": 0.5761, + "step": 7028 + }, + { + "epoch": 0.488057214275795, + "grad_norm": 3.772400078707538, + "learning_rate": 5.436307698531963e-06, + "loss": 0.4267, + "step": 7029 + }, + { + "epoch": 0.48812664907651715, + "grad_norm": 4.399794687980241, + "learning_rate": 5.435187489623332e-06, + "loss": 0.532, + "step": 7030 + }, + { + "epoch": 0.4881960838772393, + "grad_norm": 4.479420280086334, + "learning_rate": 5.434067258703411e-06, + "loss": 0.606, + "step": 7031 + }, + { + "epoch": 0.4882655186779614, + "grad_norm": 3.025849777195786, + "learning_rate": 5.432947005828859e-06, + "loss": 0.3548, + "step": 7032 + }, + { + "epoch": 0.4883349534786835, + "grad_norm": 4.850528342891305, + "learning_rate": 5.431826731056337e-06, + "loss": 0.641, + "step": 7033 + }, + { + "epoch": 0.48840438827940563, + "grad_norm": 3.2368770609065187, + "learning_rate": 5.430706434442508e-06, + "loss": 0.271, + "step": 7034 + }, + { + "epoch": 0.48847382308012777, + "grad_norm": 3.86410741783333, + "learning_rate": 5.429586116044034e-06, + "loss": 0.3402, + "step": 7035 + }, + { + "epoch": 0.4885432578808499, + "grad_norm": 4.846426014110778, + "learning_rate": 5.4284657759175805e-06, + "loss": 0.8363, + "step": 7036 + }, + { + "epoch": 0.488612692681572, + "grad_norm": 4.2225166411818, + "learning_rate": 5.427345414119813e-06, + "loss": 0.6361, + "step": 7037 + }, + { + "epoch": 0.4886821274822941, + "grad_norm": 3.1896809850729566, + "learning_rate": 5.4262250307074e-06, + "loss": 0.357, + "step": 7038 + }, + { + "epoch": 0.48875156228301625, + "grad_norm": 2.828416229092935, + "learning_rate": 5.425104625737004e-06, + "loss": 0.2406, + "step": 7039 + }, + { + "epoch": 0.4888209970837384, + "grad_norm": 4.359043070900398, + "learning_rate": 5.4239841992653e-06, + "loss": 0.5679, + "step": 7040 + }, + { + "epoch": 0.4888904318844605, + "grad_norm": 3.829085878821352, + "learning_rate": 5.4228637513489535e-06, + "loss": 0.4839, + "step": 7041 + }, + { + "epoch": 0.4889598666851826, + "grad_norm": 3.8646972208938988, + "learning_rate": 5.421743282044637e-06, + "loss": 0.4884, + "step": 7042 + }, + { + "epoch": 0.48902930148590473, + "grad_norm": 4.510938864107892, + "learning_rate": 5.420622791409022e-06, + "loss": 0.4178, + "step": 7043 + }, + { + "epoch": 0.48909873628662687, + "grad_norm": 3.827014090785199, + "learning_rate": 5.419502279498784e-06, + "loss": 0.397, + "step": 7044 + }, + { + "epoch": 0.489168171087349, + "grad_norm": 3.1468740688734402, + "learning_rate": 5.418381746370593e-06, + "loss": 0.4166, + "step": 7045 + }, + { + "epoch": 0.4892376058880711, + "grad_norm": 3.355730729530613, + "learning_rate": 5.417261192081128e-06, + "loss": 0.5955, + "step": 7046 + }, + { + "epoch": 0.4893070406887932, + "grad_norm": 4.179213487328587, + "learning_rate": 5.4161406166870645e-06, + "loss": 0.587, + "step": 7047 + }, + { + "epoch": 0.48937647548951535, + "grad_norm": 5.122705864014512, + "learning_rate": 5.415020020245079e-06, + "loss": 0.3448, + "step": 7048 + }, + { + "epoch": 0.4894459102902375, + "grad_norm": 4.599408780890138, + "learning_rate": 5.41389940281185e-06, + "loss": 0.5739, + "step": 7049 + }, + { + "epoch": 0.48951534509095956, + "grad_norm": 3.823790023557841, + "learning_rate": 5.412778764444058e-06, + "loss": 0.4686, + "step": 7050 + }, + { + "epoch": 0.4895847798916817, + "grad_norm": 4.289588729872633, + "learning_rate": 5.411658105198384e-06, + "loss": 0.5063, + "step": 7051 + }, + { + "epoch": 0.48965421469240383, + "grad_norm": 4.817521982435509, + "learning_rate": 5.410537425131507e-06, + "loss": 0.668, + "step": 7052 + }, + { + "epoch": 0.48972364949312597, + "grad_norm": 4.310958725884966, + "learning_rate": 5.409416724300113e-06, + "loss": 0.6766, + "step": 7053 + }, + { + "epoch": 0.4897930842938481, + "grad_norm": 4.1585112975107466, + "learning_rate": 5.408296002760885e-06, + "loss": 0.3002, + "step": 7054 + }, + { + "epoch": 0.4898625190945702, + "grad_norm": 4.4878297730743295, + "learning_rate": 5.407175260570506e-06, + "loss": 0.549, + "step": 7055 + }, + { + "epoch": 0.4899319538952923, + "grad_norm": 3.714329462055869, + "learning_rate": 5.406054497785662e-06, + "loss": 0.525, + "step": 7056 + }, + { + "epoch": 0.49000138869601445, + "grad_norm": 3.667374424833374, + "learning_rate": 5.404933714463043e-06, + "loss": 0.3344, + "step": 7057 + }, + { + "epoch": 0.4900708234967366, + "grad_norm": 3.936808443607699, + "learning_rate": 5.403812910659335e-06, + "loss": 0.4177, + "step": 7058 + }, + { + "epoch": 0.49014025829745866, + "grad_norm": 4.622057090956318, + "learning_rate": 5.402692086431225e-06, + "loss": 0.777, + "step": 7059 + }, + { + "epoch": 0.4902096930981808, + "grad_norm": 3.7568078116979398, + "learning_rate": 5.401571241835407e-06, + "loss": 0.4949, + "step": 7060 + }, + { + "epoch": 0.49027912789890293, + "grad_norm": 3.503833511660692, + "learning_rate": 5.400450376928569e-06, + "loss": 0.404, + "step": 7061 + }, + { + "epoch": 0.49034856269962507, + "grad_norm": 3.8503778219293006, + "learning_rate": 5.399329491767405e-06, + "loss": 0.5039, + "step": 7062 + }, + { + "epoch": 0.49041799750034715, + "grad_norm": 3.889654729978222, + "learning_rate": 5.398208586408605e-06, + "loss": 0.2657, + "step": 7063 + }, + { + "epoch": 0.4904874323010693, + "grad_norm": 4.136377780270438, + "learning_rate": 5.397087660908867e-06, + "loss": 0.2129, + "step": 7064 + }, + { + "epoch": 0.4905568671017914, + "grad_norm": 3.196631418474297, + "learning_rate": 5.395966715324883e-06, + "loss": 0.2706, + "step": 7065 + }, + { + "epoch": 0.49062630190251355, + "grad_norm": 3.843932926563781, + "learning_rate": 5.394845749713352e-06, + "loss": 0.6307, + "step": 7066 + }, + { + "epoch": 0.4906957367032357, + "grad_norm": 3.0776744817711017, + "learning_rate": 5.393724764130971e-06, + "loss": 0.5121, + "step": 7067 + }, + { + "epoch": 0.49076517150395776, + "grad_norm": 2.7512322894592893, + "learning_rate": 5.392603758634435e-06, + "loss": 0.1882, + "step": 7068 + }, + { + "epoch": 0.4908346063046799, + "grad_norm": 4.403634363135146, + "learning_rate": 5.391482733280446e-06, + "loss": 0.3319, + "step": 7069 + }, + { + "epoch": 0.49090404110540203, + "grad_norm": 3.0689038484707956, + "learning_rate": 5.390361688125702e-06, + "loss": 0.3281, + "step": 7070 + }, + { + "epoch": 0.49097347590612417, + "grad_norm": 3.626154411725304, + "learning_rate": 5.389240623226907e-06, + "loss": 0.3713, + "step": 7071 + }, + { + "epoch": 0.49104291070684625, + "grad_norm": 3.377471853637553, + "learning_rate": 5.388119538640761e-06, + "loss": 0.4627, + "step": 7072 + }, + { + "epoch": 0.4911123455075684, + "grad_norm": 2.0119859117946515, + "learning_rate": 5.386998434423969e-06, + "loss": 0.1082, + "step": 7073 + }, + { + "epoch": 0.4911817803082905, + "grad_norm": 3.4463036968993155, + "learning_rate": 5.385877310633233e-06, + "loss": 0.2909, + "step": 7074 + }, + { + "epoch": 0.49125121510901265, + "grad_norm": 4.524175940638843, + "learning_rate": 5.384756167325259e-06, + "loss": 0.7222, + "step": 7075 + }, + { + "epoch": 0.4913206499097348, + "grad_norm": 5.814402119204693, + "learning_rate": 5.383635004556754e-06, + "loss": 0.8433, + "step": 7076 + }, + { + "epoch": 0.49139008471045686, + "grad_norm": 2.670521714896633, + "learning_rate": 5.382513822384424e-06, + "loss": 0.2156, + "step": 7077 + }, + { + "epoch": 0.491459519511179, + "grad_norm": 3.6336452436456557, + "learning_rate": 5.381392620864979e-06, + "loss": 0.4604, + "step": 7078 + }, + { + "epoch": 0.49152895431190113, + "grad_norm": 3.3957624242968216, + "learning_rate": 5.380271400055126e-06, + "loss": 0.5248, + "step": 7079 + }, + { + "epoch": 0.49159838911262327, + "grad_norm": 4.21256576472506, + "learning_rate": 5.379150160011576e-06, + "loss": 0.5441, + "step": 7080 + }, + { + "epoch": 0.49166782391334535, + "grad_norm": 4.19586458485088, + "learning_rate": 5.378028900791039e-06, + "loss": 0.4626, + "step": 7081 + }, + { + "epoch": 0.4917372587140675, + "grad_norm": 4.064079229713481, + "learning_rate": 5.376907622450229e-06, + "loss": 0.6011, + "step": 7082 + }, + { + "epoch": 0.4918066935147896, + "grad_norm": 4.355155217395831, + "learning_rate": 5.37578632504586e-06, + "loss": 0.5049, + "step": 7083 + }, + { + "epoch": 0.49187612831551175, + "grad_norm": 3.170112863918857, + "learning_rate": 5.37466500863464e-06, + "loss": 0.2759, + "step": 7084 + }, + { + "epoch": 0.49194556311623383, + "grad_norm": 3.6284582460962804, + "learning_rate": 5.373543673273289e-06, + "loss": 0.5731, + "step": 7085 + }, + { + "epoch": 0.49201499791695597, + "grad_norm": 3.360994326288128, + "learning_rate": 5.3724223190185235e-06, + "loss": 0.3512, + "step": 7086 + }, + { + "epoch": 0.4920844327176781, + "grad_norm": 3.1923036025832463, + "learning_rate": 5.371300945927057e-06, + "loss": 0.4018, + "step": 7087 + }, + { + "epoch": 0.49215386751840023, + "grad_norm": 4.543802119796461, + "learning_rate": 5.370179554055608e-06, + "loss": 0.3276, + "step": 7088 + }, + { + "epoch": 0.49222330231912237, + "grad_norm": 3.5772570153764645, + "learning_rate": 5.369058143460899e-06, + "loss": 0.4548, + "step": 7089 + }, + { + "epoch": 0.49229273711984445, + "grad_norm": 3.836663798380964, + "learning_rate": 5.367936714199643e-06, + "loss": 0.5101, + "step": 7090 + }, + { + "epoch": 0.4923621719205666, + "grad_norm": 3.4147183342396357, + "learning_rate": 5.366815266328567e-06, + "loss": 0.359, + "step": 7091 + }, + { + "epoch": 0.4924316067212887, + "grad_norm": 3.718788778802708, + "learning_rate": 5.365693799904389e-06, + "loss": 0.5331, + "step": 7092 + }, + { + "epoch": 0.49250104152201085, + "grad_norm": 3.82234695435734, + "learning_rate": 5.364572314983833e-06, + "loss": 0.2334, + "step": 7093 + }, + { + "epoch": 0.49257047632273293, + "grad_norm": 3.6650577667817386, + "learning_rate": 5.363450811623622e-06, + "loss": 0.5805, + "step": 7094 + }, + { + "epoch": 0.49263991112345507, + "grad_norm": 3.2618651506345344, + "learning_rate": 5.362329289880479e-06, + "loss": 0.4341, + "step": 7095 + }, + { + "epoch": 0.4927093459241772, + "grad_norm": 3.7045229958678574, + "learning_rate": 5.361207749811131e-06, + "loss": 0.4515, + "step": 7096 + }, + { + "epoch": 0.49277878072489933, + "grad_norm": 3.793494209936031, + "learning_rate": 5.360086191472303e-06, + "loss": 0.3455, + "step": 7097 + }, + { + "epoch": 0.49284821552562147, + "grad_norm": 4.543797999111898, + "learning_rate": 5.358964614920724e-06, + "loss": 0.5888, + "step": 7098 + }, + { + "epoch": 0.49291765032634355, + "grad_norm": 3.526633109201806, + "learning_rate": 5.3578430202131215e-06, + "loss": 0.4215, + "step": 7099 + }, + { + "epoch": 0.4929870851270657, + "grad_norm": 3.4893156897797626, + "learning_rate": 5.356721407406223e-06, + "loss": 0.4875, + "step": 7100 + }, + { + "epoch": 0.4930565199277878, + "grad_norm": 3.5379761901320665, + "learning_rate": 5.355599776556759e-06, + "loss": 0.4249, + "step": 7101 + }, + { + "epoch": 0.49312595472850995, + "grad_norm": 4.2541936107763725, + "learning_rate": 5.354478127721462e-06, + "loss": 0.3551, + "step": 7102 + }, + { + "epoch": 0.49319538952923203, + "grad_norm": 3.8310424033390107, + "learning_rate": 5.353356460957062e-06, + "loss": 0.3181, + "step": 7103 + }, + { + "epoch": 0.49326482432995417, + "grad_norm": 3.7087785114890433, + "learning_rate": 5.352234776320291e-06, + "loss": 0.5541, + "step": 7104 + }, + { + "epoch": 0.4933342591306763, + "grad_norm": 4.4491572652855975, + "learning_rate": 5.351113073867885e-06, + "loss": 0.4496, + "step": 7105 + }, + { + "epoch": 0.49340369393139843, + "grad_norm": 3.084191534893803, + "learning_rate": 5.349991353656575e-06, + "loss": 0.2512, + "step": 7106 + }, + { + "epoch": 0.4934731287321205, + "grad_norm": 3.35212575027703, + "learning_rate": 5.3488696157431005e-06, + "loss": 0.4875, + "step": 7107 + }, + { + "epoch": 0.49354256353284265, + "grad_norm": 4.689501310777951, + "learning_rate": 5.347747860184195e-06, + "loss": 0.5149, + "step": 7108 + }, + { + "epoch": 0.4936119983335648, + "grad_norm": 2.603225165651721, + "learning_rate": 5.346626087036596e-06, + "loss": 0.272, + "step": 7109 + }, + { + "epoch": 0.4936814331342869, + "grad_norm": 3.101240869300462, + "learning_rate": 5.345504296357042e-06, + "loss": 0.3345, + "step": 7110 + }, + { + "epoch": 0.49375086793500905, + "grad_norm": 3.1128146752482295, + "learning_rate": 5.344382488202271e-06, + "loss": 0.4328, + "step": 7111 + }, + { + "epoch": 0.49382030273573113, + "grad_norm": 3.3555256220325975, + "learning_rate": 5.343260662629025e-06, + "loss": 0.3468, + "step": 7112 + }, + { + "epoch": 0.49388973753645327, + "grad_norm": 3.8189806418256564, + "learning_rate": 5.3421388196940405e-06, + "loss": 0.534, + "step": 7113 + }, + { + "epoch": 0.4939591723371754, + "grad_norm": 3.120467300028105, + "learning_rate": 5.341016959454064e-06, + "loss": 0.422, + "step": 7114 + }, + { + "epoch": 0.49402860713789754, + "grad_norm": 6.044010655463055, + "learning_rate": 5.339895081965835e-06, + "loss": 0.7997, + "step": 7115 + }, + { + "epoch": 0.4940980419386196, + "grad_norm": 4.056277184607958, + "learning_rate": 5.338773187286097e-06, + "loss": 0.4908, + "step": 7116 + }, + { + "epoch": 0.49416747673934175, + "grad_norm": 3.5275031696452324, + "learning_rate": 5.337651275471595e-06, + "loss": 0.26, + "step": 7117 + }, + { + "epoch": 0.4942369115400639, + "grad_norm": 2.2069894139170403, + "learning_rate": 5.336529346579073e-06, + "loss": 0.1885, + "step": 7118 + }, + { + "epoch": 0.494306346340786, + "grad_norm": 4.580578305307517, + "learning_rate": 5.335407400665278e-06, + "loss": 0.5559, + "step": 7119 + }, + { + "epoch": 0.4943757811415081, + "grad_norm": 4.291592562011333, + "learning_rate": 5.334285437786955e-06, + "loss": 0.5441, + "step": 7120 + }, + { + "epoch": 0.49444521594223023, + "grad_norm": 3.396321083622744, + "learning_rate": 5.333163458000854e-06, + "loss": 0.4067, + "step": 7121 + }, + { + "epoch": 0.49451465074295237, + "grad_norm": 3.610393560243008, + "learning_rate": 5.332041461363721e-06, + "loss": 0.5126, + "step": 7122 + }, + { + "epoch": 0.4945840855436745, + "grad_norm": 4.240141452919585, + "learning_rate": 5.330919447932309e-06, + "loss": 0.544, + "step": 7123 + }, + { + "epoch": 0.49465352034439664, + "grad_norm": 2.9656862054456883, + "learning_rate": 5.329797417763365e-06, + "loss": 0.3652, + "step": 7124 + }, + { + "epoch": 0.4947229551451187, + "grad_norm": 2.9987462203281803, + "learning_rate": 5.32867537091364e-06, + "loss": 0.2055, + "step": 7125 + }, + { + "epoch": 0.49479238994584085, + "grad_norm": 3.255309700091123, + "learning_rate": 5.327553307439887e-06, + "loss": 0.3632, + "step": 7126 + }, + { + "epoch": 0.494861824746563, + "grad_norm": 3.8199439769958374, + "learning_rate": 5.326431227398857e-06, + "loss": 0.5408, + "step": 7127 + }, + { + "epoch": 0.4949312595472851, + "grad_norm": 5.576465449820448, + "learning_rate": 5.325309130847307e-06, + "loss": 0.3894, + "step": 7128 + }, + { + "epoch": 0.4950006943480072, + "grad_norm": 3.799649889176957, + "learning_rate": 5.3241870178419885e-06, + "loss": 0.4203, + "step": 7129 + }, + { + "epoch": 0.49507012914872933, + "grad_norm": 4.3278162787920085, + "learning_rate": 5.323064888439657e-06, + "loss": 0.6124, + "step": 7130 + }, + { + "epoch": 0.49513956394945147, + "grad_norm": 4.242917064519879, + "learning_rate": 5.321942742697069e-06, + "loss": 0.5528, + "step": 7131 + }, + { + "epoch": 0.4952089987501736, + "grad_norm": 4.507205001983563, + "learning_rate": 5.320820580670982e-06, + "loss": 0.7479, + "step": 7132 + }, + { + "epoch": 0.49527843355089574, + "grad_norm": 3.8092447295342855, + "learning_rate": 5.319698402418154e-06, + "loss": 0.4543, + "step": 7133 + }, + { + "epoch": 0.4953478683516178, + "grad_norm": 3.7839617013593854, + "learning_rate": 5.318576207995342e-06, + "loss": 0.5772, + "step": 7134 + }, + { + "epoch": 0.49541730315233995, + "grad_norm": 4.512963681277308, + "learning_rate": 5.3174539974593055e-06, + "loss": 0.6533, + "step": 7135 + }, + { + "epoch": 0.4954867379530621, + "grad_norm": 3.75318013876013, + "learning_rate": 5.316331770866803e-06, + "loss": 0.3972, + "step": 7136 + }, + { + "epoch": 0.4955561727537842, + "grad_norm": 3.1119616258028784, + "learning_rate": 5.3152095282746e-06, + "loss": 0.2738, + "step": 7137 + }, + { + "epoch": 0.4956256075545063, + "grad_norm": 3.460554031361484, + "learning_rate": 5.3140872697394555e-06, + "loss": 0.4003, + "step": 7138 + }, + { + "epoch": 0.49569504235522843, + "grad_norm": 4.974105861641772, + "learning_rate": 5.312964995318132e-06, + "loss": 0.7448, + "step": 7139 + }, + { + "epoch": 0.49576447715595057, + "grad_norm": 5.708659071362712, + "learning_rate": 5.311842705067392e-06, + "loss": 0.2668, + "step": 7140 + }, + { + "epoch": 0.4958339119566727, + "grad_norm": 3.8930872092586766, + "learning_rate": 5.310720399044002e-06, + "loss": 0.6422, + "step": 7141 + }, + { + "epoch": 0.4959033467573948, + "grad_norm": 4.108552338881843, + "learning_rate": 5.3095980773047265e-06, + "loss": 0.4942, + "step": 7142 + }, + { + "epoch": 0.4959727815581169, + "grad_norm": 3.763301652610291, + "learning_rate": 5.308475739906329e-06, + "loss": 0.4892, + "step": 7143 + }, + { + "epoch": 0.49604221635883905, + "grad_norm": 3.4021805320908554, + "learning_rate": 5.307353386905579e-06, + "loss": 0.455, + "step": 7144 + }, + { + "epoch": 0.4961116511595612, + "grad_norm": 2.520922171228968, + "learning_rate": 5.306231018359242e-06, + "loss": 0.2056, + "step": 7145 + }, + { + "epoch": 0.4961810859602833, + "grad_norm": 3.8460211549509062, + "learning_rate": 5.305108634324087e-06, + "loss": 0.4752, + "step": 7146 + }, + { + "epoch": 0.4962505207610054, + "grad_norm": 2.8812511183880996, + "learning_rate": 5.303986234856882e-06, + "loss": 0.262, + "step": 7147 + }, + { + "epoch": 0.49631995556172753, + "grad_norm": 3.190455172760793, + "learning_rate": 5.302863820014396e-06, + "loss": 0.3286, + "step": 7148 + }, + { + "epoch": 0.49638939036244967, + "grad_norm": 4.792910080450556, + "learning_rate": 5.301741389853403e-06, + "loss": 0.6328, + "step": 7149 + }, + { + "epoch": 0.4964588251631718, + "grad_norm": 4.752682503127999, + "learning_rate": 5.300618944430671e-06, + "loss": 0.86, + "step": 7150 + }, + { + "epoch": 0.4965282599638939, + "grad_norm": 7.409280634904407, + "learning_rate": 5.2994964838029736e-06, + "loss": 0.3251, + "step": 7151 + }, + { + "epoch": 0.496597694764616, + "grad_norm": 3.8859552868736054, + "learning_rate": 5.298374008027082e-06, + "loss": 0.4129, + "step": 7152 + }, + { + "epoch": 0.49666712956533815, + "grad_norm": 3.321691512534654, + "learning_rate": 5.29725151715977e-06, + "loss": 0.1707, + "step": 7153 + }, + { + "epoch": 0.4967365643660603, + "grad_norm": 4.804087252609198, + "learning_rate": 5.296129011257814e-06, + "loss": 0.6742, + "step": 7154 + }, + { + "epoch": 0.49680599916678236, + "grad_norm": 4.369310117087779, + "learning_rate": 5.295006490377987e-06, + "loss": 0.6106, + "step": 7155 + }, + { + "epoch": 0.4968754339675045, + "grad_norm": 4.554791532773279, + "learning_rate": 5.293883954577066e-06, + "loss": 0.5304, + "step": 7156 + }, + { + "epoch": 0.49694486876822663, + "grad_norm": 4.020452555715454, + "learning_rate": 5.292761403911828e-06, + "loss": 0.4763, + "step": 7157 + }, + { + "epoch": 0.49701430356894877, + "grad_norm": 4.341033573823346, + "learning_rate": 5.2916388384390485e-06, + "loss": 0.4591, + "step": 7158 + }, + { + "epoch": 0.4970837383696709, + "grad_norm": 3.55171904675619, + "learning_rate": 5.290516258215505e-06, + "loss": 0.4488, + "step": 7159 + }, + { + "epoch": 0.497153173170393, + "grad_norm": 5.6200575089314055, + "learning_rate": 5.289393663297981e-06, + "loss": 0.6136, + "step": 7160 + }, + { + "epoch": 0.4972226079711151, + "grad_norm": 3.4192579088732624, + "learning_rate": 5.28827105374325e-06, + "loss": 0.3172, + "step": 7161 + }, + { + "epoch": 0.49729204277183725, + "grad_norm": 3.4421862406591974, + "learning_rate": 5.287148429608098e-06, + "loss": 0.4058, + "step": 7162 + }, + { + "epoch": 0.4973614775725594, + "grad_norm": 3.494628996611738, + "learning_rate": 5.286025790949302e-06, + "loss": 0.3734, + "step": 7163 + }, + { + "epoch": 0.49743091237328146, + "grad_norm": 3.8188799078379594, + "learning_rate": 5.2849031378236445e-06, + "loss": 0.5037, + "step": 7164 + }, + { + "epoch": 0.4975003471740036, + "grad_norm": 3.5239823969836026, + "learning_rate": 5.2837804702879105e-06, + "loss": 0.3853, + "step": 7165 + }, + { + "epoch": 0.49756978197472573, + "grad_norm": 4.128526274174642, + "learning_rate": 5.2826577883988805e-06, + "loss": 0.5364, + "step": 7166 + }, + { + "epoch": 0.49763921677544787, + "grad_norm": 4.125547750480884, + "learning_rate": 5.281535092213339e-06, + "loss": 0.3785, + "step": 7167 + }, + { + "epoch": 0.49770865157617, + "grad_norm": 3.924061951826711, + "learning_rate": 5.28041238178807e-06, + "loss": 0.5469, + "step": 7168 + }, + { + "epoch": 0.4977780863768921, + "grad_norm": 4.769729360190772, + "learning_rate": 5.279289657179862e-06, + "loss": 0.7011, + "step": 7169 + }, + { + "epoch": 0.4978475211776142, + "grad_norm": 4.728584667652929, + "learning_rate": 5.278166918445499e-06, + "loss": 0.5105, + "step": 7170 + }, + { + "epoch": 0.49791695597833635, + "grad_norm": 3.3970712513989882, + "learning_rate": 5.277044165641767e-06, + "loss": 0.3371, + "step": 7171 + }, + { + "epoch": 0.4979863907790585, + "grad_norm": 4.008725301555456, + "learning_rate": 5.2759213988254544e-06, + "loss": 0.5655, + "step": 7172 + }, + { + "epoch": 0.49805582557978056, + "grad_norm": 6.875714919516217, + "learning_rate": 5.274798618053352e-06, + "loss": 0.9303, + "step": 7173 + }, + { + "epoch": 0.4981252603805027, + "grad_norm": 3.8747047293998595, + "learning_rate": 5.273675823382245e-06, + "loss": 0.4319, + "step": 7174 + }, + { + "epoch": 0.49819469518122483, + "grad_norm": 2.9699327859739992, + "learning_rate": 5.272553014868924e-06, + "loss": 0.3657, + "step": 7175 + }, + { + "epoch": 0.49826412998194697, + "grad_norm": 2.371481669411635, + "learning_rate": 5.271430192570181e-06, + "loss": 0.246, + "step": 7176 + }, + { + "epoch": 0.49833356478266905, + "grad_norm": 4.078410693745689, + "learning_rate": 5.270307356542805e-06, + "loss": 0.4913, + "step": 7177 + }, + { + "epoch": 0.4984029995833912, + "grad_norm": 4.309350950486881, + "learning_rate": 5.269184506843589e-06, + "loss": 0.6337, + "step": 7178 + }, + { + "epoch": 0.4984724343841133, + "grad_norm": 4.594534756780209, + "learning_rate": 5.268061643529326e-06, + "loss": 0.5938, + "step": 7179 + }, + { + "epoch": 0.49854186918483545, + "grad_norm": 3.998845901976261, + "learning_rate": 5.266938766656807e-06, + "loss": 0.4451, + "step": 7180 + }, + { + "epoch": 0.4986113039855576, + "grad_norm": 3.2176590880594107, + "learning_rate": 5.265815876282829e-06, + "loss": 0.3404, + "step": 7181 + }, + { + "epoch": 0.49868073878627966, + "grad_norm": 3.6939987142670985, + "learning_rate": 5.264692972464183e-06, + "loss": 0.5014, + "step": 7182 + }, + { + "epoch": 0.4987501735870018, + "grad_norm": 4.129135520229409, + "learning_rate": 5.263570055257668e-06, + "loss": 0.6594, + "step": 7183 + }, + { + "epoch": 0.49881960838772393, + "grad_norm": 2.9283959666625576, + "learning_rate": 5.262447124720076e-06, + "loss": 0.2899, + "step": 7184 + }, + { + "epoch": 0.49888904318844607, + "grad_norm": 4.875773599050903, + "learning_rate": 5.2613241809082065e-06, + "loss": 0.5409, + "step": 7185 + }, + { + "epoch": 0.49895847798916815, + "grad_norm": 3.793237469910889, + "learning_rate": 5.260201223878855e-06, + "loss": 0.5597, + "step": 7186 + }, + { + "epoch": 0.4990279127898903, + "grad_norm": 2.80112940842597, + "learning_rate": 5.25907825368882e-06, + "loss": 0.1872, + "step": 7187 + }, + { + "epoch": 0.4990973475906124, + "grad_norm": 3.5622148434070793, + "learning_rate": 5.2579552703949e-06, + "loss": 0.3903, + "step": 7188 + }, + { + "epoch": 0.49916678239133455, + "grad_norm": 3.6302453123686633, + "learning_rate": 5.256832274053896e-06, + "loss": 0.5845, + "step": 7189 + }, + { + "epoch": 0.4992362171920567, + "grad_norm": 3.8159433823095594, + "learning_rate": 5.255709264722605e-06, + "loss": 0.4908, + "step": 7190 + }, + { + "epoch": 0.49930565199277877, + "grad_norm": 3.935442865805352, + "learning_rate": 5.2545862424578276e-06, + "loss": 0.6225, + "step": 7191 + }, + { + "epoch": 0.4993750867935009, + "grad_norm": 3.251944791967001, + "learning_rate": 5.2534632073163675e-06, + "loss": 0.3343, + "step": 7192 + }, + { + "epoch": 0.49944452159422303, + "grad_norm": 4.172297095128883, + "learning_rate": 5.252340159355024e-06, + "loss": 0.5531, + "step": 7193 + }, + { + "epoch": 0.49951395639494517, + "grad_norm": 4.600623832685623, + "learning_rate": 5.251217098630602e-06, + "loss": 0.6608, + "step": 7194 + }, + { + "epoch": 0.49958339119566725, + "grad_norm": 3.2144871832961814, + "learning_rate": 5.250094025199903e-06, + "loss": 0.3431, + "step": 7195 + }, + { + "epoch": 0.4996528259963894, + "grad_norm": 3.654094390946151, + "learning_rate": 5.248970939119732e-06, + "loss": 0.5365, + "step": 7196 + }, + { + "epoch": 0.4997222607971115, + "grad_norm": 2.3868768317250066, + "learning_rate": 5.24784784044689e-06, + "loss": 0.2038, + "step": 7197 + }, + { + "epoch": 0.49979169559783365, + "grad_norm": 2.9928772026534056, + "learning_rate": 5.246724729238188e-06, + "loss": 0.1364, + "step": 7198 + }, + { + "epoch": 0.49986113039855573, + "grad_norm": 1.9501663736045818, + "learning_rate": 5.2456016055504274e-06, + "loss": 0.2002, + "step": 7199 + }, + { + "epoch": 0.49993056519927787, + "grad_norm": 4.660104696791479, + "learning_rate": 5.244478469440414e-06, + "loss": 0.7036, + "step": 7200 + }, + { + "epoch": 0.5, + "grad_norm": 3.746836592493186, + "learning_rate": 5.243355320964958e-06, + "loss": 0.4439, + "step": 7201 + }, + { + "epoch": 0.5000694348007221, + "grad_norm": 3.6720621338020467, + "learning_rate": 5.2422321601808646e-06, + "loss": 0.519, + "step": 7202 + }, + { + "epoch": 0.5001388696014443, + "grad_norm": 3.619176276475976, + "learning_rate": 5.241108987144943e-06, + "loss": 0.4642, + "step": 7203 + }, + { + "epoch": 0.5002083044021663, + "grad_norm": 3.477907487767762, + "learning_rate": 5.2399858019140005e-06, + "loss": 0.4744, + "step": 7204 + }, + { + "epoch": 0.5002777392028885, + "grad_norm": 3.4226791179143947, + "learning_rate": 5.238862604544849e-06, + "loss": 0.4601, + "step": 7205 + }, + { + "epoch": 0.5003471740036106, + "grad_norm": 4.212163114215492, + "learning_rate": 5.237739395094296e-06, + "loss": 0.5548, + "step": 7206 + }, + { + "epoch": 0.5004166088043327, + "grad_norm": 3.5845944353631563, + "learning_rate": 5.236616173619155e-06, + "loss": 0.2511, + "step": 7207 + }, + { + "epoch": 0.5004860436050549, + "grad_norm": 4.317463510068775, + "learning_rate": 5.235492940176235e-06, + "loss": 0.702, + "step": 7208 + }, + { + "epoch": 0.500555478405777, + "grad_norm": 3.5267036628826256, + "learning_rate": 5.234369694822348e-06, + "loss": 0.5953, + "step": 7209 + }, + { + "epoch": 0.500624913206499, + "grad_norm": 4.603833257260333, + "learning_rate": 5.233246437614308e-06, + "loss": 0.6984, + "step": 7210 + }, + { + "epoch": 0.5006943480072212, + "grad_norm": 3.119370751023977, + "learning_rate": 5.232123168608926e-06, + "loss": 0.2596, + "step": 7211 + }, + { + "epoch": 0.5007637828079433, + "grad_norm": 3.594355511152304, + "learning_rate": 5.230999887863018e-06, + "loss": 0.3693, + "step": 7212 + }, + { + "epoch": 0.5008332176086655, + "grad_norm": 4.278470400684505, + "learning_rate": 5.229876595433395e-06, + "loss": 0.5303, + "step": 7213 + }, + { + "epoch": 0.5009026524093876, + "grad_norm": 2.9327968403421445, + "learning_rate": 5.228753291376875e-06, + "loss": 0.3906, + "step": 7214 + }, + { + "epoch": 0.5009720872101097, + "grad_norm": 3.0158133264807714, + "learning_rate": 5.227629975750274e-06, + "loss": 0.3153, + "step": 7215 + }, + { + "epoch": 0.5010415220108319, + "grad_norm": 3.940323533922246, + "learning_rate": 5.226506648610404e-06, + "loss": 0.2882, + "step": 7216 + }, + { + "epoch": 0.5011109568115539, + "grad_norm": 3.527280735717722, + "learning_rate": 5.225383310014086e-06, + "loss": 0.4358, + "step": 7217 + }, + { + "epoch": 0.5011803916122761, + "grad_norm": 3.543320125475908, + "learning_rate": 5.224259960018135e-06, + "loss": 0.6069, + "step": 7218 + }, + { + "epoch": 0.5012498264129982, + "grad_norm": 3.725611661775222, + "learning_rate": 5.223136598679368e-06, + "loss": 0.4378, + "step": 7219 + }, + { + "epoch": 0.5013192612137203, + "grad_norm": 3.804259297246827, + "learning_rate": 5.2220132260546045e-06, + "loss": 0.5409, + "step": 7220 + }, + { + "epoch": 0.5013886960144425, + "grad_norm": 4.327521733094509, + "learning_rate": 5.2208898422006634e-06, + "loss": 0.4174, + "step": 7221 + }, + { + "epoch": 0.5014581308151645, + "grad_norm": 3.6486906051113133, + "learning_rate": 5.219766447174364e-06, + "loss": 0.3962, + "step": 7222 + }, + { + "epoch": 0.5015275656158866, + "grad_norm": 2.0114211985789847, + "learning_rate": 5.218643041032526e-06, + "loss": 0.1319, + "step": 7223 + }, + { + "epoch": 0.5015970004166088, + "grad_norm": 3.083387601193856, + "learning_rate": 5.217519623831971e-06, + "loss": 0.2501, + "step": 7224 + }, + { + "epoch": 0.5016664352173309, + "grad_norm": 2.98307443287388, + "learning_rate": 5.2163961956295196e-06, + "loss": 0.3029, + "step": 7225 + }, + { + "epoch": 0.5017358700180531, + "grad_norm": 3.767359754295796, + "learning_rate": 5.215272756481994e-06, + "loss": 0.397, + "step": 7226 + }, + { + "epoch": 0.5018053048187752, + "grad_norm": 3.7242593811855165, + "learning_rate": 5.214149306446215e-06, + "loss": 0.4136, + "step": 7227 + }, + { + "epoch": 0.5018747396194972, + "grad_norm": 3.5941509184210534, + "learning_rate": 5.2130258455790075e-06, + "loss": 0.5528, + "step": 7228 + }, + { + "epoch": 0.5019441744202194, + "grad_norm": 4.336523150469935, + "learning_rate": 5.211902373937192e-06, + "loss": 0.6431, + "step": 7229 + }, + { + "epoch": 0.5020136092209415, + "grad_norm": 4.142841145177535, + "learning_rate": 5.210778891577595e-06, + "loss": 0.5995, + "step": 7230 + }, + { + "epoch": 0.5020830440216637, + "grad_norm": 4.208289591673076, + "learning_rate": 5.209655398557041e-06, + "loss": 0.3495, + "step": 7231 + }, + { + "epoch": 0.5021524788223858, + "grad_norm": 4.3535542251548796, + "learning_rate": 5.208531894932354e-06, + "loss": 0.5908, + "step": 7232 + }, + { + "epoch": 0.5022219136231079, + "grad_norm": 3.3485461579942655, + "learning_rate": 5.207408380760358e-06, + "loss": 0.3773, + "step": 7233 + }, + { + "epoch": 0.50229134842383, + "grad_norm": 4.239984321277322, + "learning_rate": 5.206284856097883e-06, + "loss": 0.533, + "step": 7234 + }, + { + "epoch": 0.5023607832245521, + "grad_norm": 3.4166096039427867, + "learning_rate": 5.205161321001753e-06, + "loss": 0.5408, + "step": 7235 + }, + { + "epoch": 0.5024302180252743, + "grad_norm": 3.954747818849586, + "learning_rate": 5.204037775528794e-06, + "loss": 0.5229, + "step": 7236 + }, + { + "epoch": 0.5024996528259964, + "grad_norm": 3.9410199089888955, + "learning_rate": 5.202914219735836e-06, + "loss": 0.5692, + "step": 7237 + }, + { + "epoch": 0.5025690876267185, + "grad_norm": 3.972022698930772, + "learning_rate": 5.201790653679707e-06, + "loss": 0.4713, + "step": 7238 + }, + { + "epoch": 0.5026385224274407, + "grad_norm": 3.967305803979107, + "learning_rate": 5.200667077417234e-06, + "loss": 0.6258, + "step": 7239 + }, + { + "epoch": 0.5027079572281627, + "grad_norm": 5.519282879703435, + "learning_rate": 5.199543491005249e-06, + "loss": 0.7682, + "step": 7240 + }, + { + "epoch": 0.5027773920288848, + "grad_norm": 3.2535404542959037, + "learning_rate": 5.198419894500579e-06, + "loss": 0.3684, + "step": 7241 + }, + { + "epoch": 0.502846826829607, + "grad_norm": 3.3846768529165505, + "learning_rate": 5.197296287960055e-06, + "loss": 0.3128, + "step": 7242 + }, + { + "epoch": 0.5029162616303291, + "grad_norm": 4.429698524310986, + "learning_rate": 5.196172671440508e-06, + "loss": 0.502, + "step": 7243 + }, + { + "epoch": 0.5029856964310513, + "grad_norm": 2.7409830824998163, + "learning_rate": 5.19504904499877e-06, + "loss": 0.3191, + "step": 7244 + }, + { + "epoch": 0.5030551312317734, + "grad_norm": 4.208283805060563, + "learning_rate": 5.193925408691671e-06, + "loss": 0.7864, + "step": 7245 + }, + { + "epoch": 0.5031245660324954, + "grad_norm": 4.575571167096378, + "learning_rate": 5.192801762576045e-06, + "loss": 0.4382, + "step": 7246 + }, + { + "epoch": 0.5031940008332176, + "grad_norm": 4.7332623190695475, + "learning_rate": 5.191678106708724e-06, + "loss": 0.6272, + "step": 7247 + }, + { + "epoch": 0.5032634356339397, + "grad_norm": 4.878381439411061, + "learning_rate": 5.190554441146541e-06, + "loss": 0.5952, + "step": 7248 + }, + { + "epoch": 0.5033328704346619, + "grad_norm": 3.5523760281111154, + "learning_rate": 5.189430765946331e-06, + "loss": 0.5221, + "step": 7249 + }, + { + "epoch": 0.503402305235384, + "grad_norm": 2.836863366208607, + "learning_rate": 5.188307081164926e-06, + "loss": 0.3598, + "step": 7250 + }, + { + "epoch": 0.5034717400361061, + "grad_norm": 3.7142630293813452, + "learning_rate": 5.187183386859162e-06, + "loss": 0.329, + "step": 7251 + }, + { + "epoch": 0.5035411748368283, + "grad_norm": 2.6228456310122588, + "learning_rate": 5.186059683085874e-06, + "loss": 0.1799, + "step": 7252 + }, + { + "epoch": 0.5036106096375503, + "grad_norm": 3.7124971982354755, + "learning_rate": 5.1849359699018974e-06, + "loss": 0.3587, + "step": 7253 + }, + { + "epoch": 0.5036800444382724, + "grad_norm": 3.7997365969125294, + "learning_rate": 5.18381224736407e-06, + "loss": 0.598, + "step": 7254 + }, + { + "epoch": 0.5037494792389946, + "grad_norm": 3.2179499113229424, + "learning_rate": 5.182688515529227e-06, + "loss": 0.156, + "step": 7255 + }, + { + "epoch": 0.5038189140397167, + "grad_norm": 4.255157356847298, + "learning_rate": 5.181564774454205e-06, + "loss": 0.4395, + "step": 7256 + }, + { + "epoch": 0.5038883488404389, + "grad_norm": 2.768835391935664, + "learning_rate": 5.180441024195843e-06, + "loss": 0.1541, + "step": 7257 + }, + { + "epoch": 0.503957783641161, + "grad_norm": 4.988180931525735, + "learning_rate": 5.179317264810977e-06, + "loss": 0.7126, + "step": 7258 + }, + { + "epoch": 0.504027218441883, + "grad_norm": 3.634251859391279, + "learning_rate": 5.178193496356446e-06, + "loss": 0.4069, + "step": 7259 + }, + { + "epoch": 0.5040966532426052, + "grad_norm": 3.4521715364683736, + "learning_rate": 5.177069718889093e-06, + "loss": 0.5582, + "step": 7260 + }, + { + "epoch": 0.5041660880433273, + "grad_norm": 3.6005713469618175, + "learning_rate": 5.17594593246575e-06, + "loss": 0.4963, + "step": 7261 + }, + { + "epoch": 0.5042355228440495, + "grad_norm": 4.2469378512974005, + "learning_rate": 5.174822137143263e-06, + "loss": 0.4855, + "step": 7262 + }, + { + "epoch": 0.5043049576447716, + "grad_norm": 3.8001143595842377, + "learning_rate": 5.173698332978469e-06, + "loss": 0.3286, + "step": 7263 + }, + { + "epoch": 0.5043743924454936, + "grad_norm": 4.392261018707386, + "learning_rate": 5.1725745200282114e-06, + "loss": 0.564, + "step": 7264 + }, + { + "epoch": 0.5044438272462158, + "grad_norm": 3.7688785137422776, + "learning_rate": 5.171450698349329e-06, + "loss": 0.4837, + "step": 7265 + }, + { + "epoch": 0.5045132620469379, + "grad_norm": 4.346456270743364, + "learning_rate": 5.170326867998665e-06, + "loss": 0.5479, + "step": 7266 + }, + { + "epoch": 0.50458269684766, + "grad_norm": 2.2108039164863627, + "learning_rate": 5.16920302903306e-06, + "loss": 0.1792, + "step": 7267 + }, + { + "epoch": 0.5046521316483822, + "grad_norm": 3.140518888563056, + "learning_rate": 5.168079181509357e-06, + "loss": 0.3722, + "step": 7268 + }, + { + "epoch": 0.5047215664491043, + "grad_norm": 3.938158284869595, + "learning_rate": 5.166955325484398e-06, + "loss": 0.292, + "step": 7269 + }, + { + "epoch": 0.5047910012498265, + "grad_norm": 4.015870184255071, + "learning_rate": 5.165831461015031e-06, + "loss": 0.5239, + "step": 7270 + }, + { + "epoch": 0.5048604360505485, + "grad_norm": 2.780512823474123, + "learning_rate": 5.1647075881580935e-06, + "loss": 0.2994, + "step": 7271 + }, + { + "epoch": 0.5049298708512706, + "grad_norm": 2.833534239456496, + "learning_rate": 5.163583706970433e-06, + "loss": 0.2391, + "step": 7272 + }, + { + "epoch": 0.5049993056519928, + "grad_norm": 4.251828879688886, + "learning_rate": 5.162459817508896e-06, + "loss": 0.5862, + "step": 7273 + }, + { + "epoch": 0.5050687404527149, + "grad_norm": 3.613754087450469, + "learning_rate": 5.161335919830322e-06, + "loss": 0.4699, + "step": 7274 + }, + { + "epoch": 0.5051381752534371, + "grad_norm": 3.090862377531071, + "learning_rate": 5.160212013991561e-06, + "loss": 0.1662, + "step": 7275 + }, + { + "epoch": 0.5052076100541592, + "grad_norm": 3.139315243955183, + "learning_rate": 5.15908810004946e-06, + "loss": 0.3352, + "step": 7276 + }, + { + "epoch": 0.5052770448548812, + "grad_norm": 3.6782138058658727, + "learning_rate": 5.157964178060859e-06, + "loss": 0.215, + "step": 7277 + }, + { + "epoch": 0.5053464796556034, + "grad_norm": 4.07282405471761, + "learning_rate": 5.156840248082611e-06, + "loss": 0.7034, + "step": 7278 + }, + { + "epoch": 0.5054159144563255, + "grad_norm": 3.504236977706975, + "learning_rate": 5.15571631017156e-06, + "loss": 0.37, + "step": 7279 + }, + { + "epoch": 0.5054853492570476, + "grad_norm": 3.586813778622438, + "learning_rate": 5.154592364384554e-06, + "loss": 0.3322, + "step": 7280 + }, + { + "epoch": 0.5055547840577698, + "grad_norm": 2.3498077722842603, + "learning_rate": 5.153468410778441e-06, + "loss": 0.1749, + "step": 7281 + }, + { + "epoch": 0.5056242188584918, + "grad_norm": 5.903836002185436, + "learning_rate": 5.152344449410069e-06, + "loss": 0.765, + "step": 7282 + }, + { + "epoch": 0.505693653659214, + "grad_norm": 3.7105459873810984, + "learning_rate": 5.1512204803362895e-06, + "loss": 0.3497, + "step": 7283 + }, + { + "epoch": 0.5057630884599361, + "grad_norm": 4.523459835859084, + "learning_rate": 5.150096503613947e-06, + "loss": 0.7992, + "step": 7284 + }, + { + "epoch": 0.5058325232606582, + "grad_norm": 5.172702330291772, + "learning_rate": 5.1489725192998955e-06, + "loss": 0.6305, + "step": 7285 + }, + { + "epoch": 0.5059019580613804, + "grad_norm": 4.0215923493998496, + "learning_rate": 5.14784852745098e-06, + "loss": 0.3395, + "step": 7286 + }, + { + "epoch": 0.5059713928621025, + "grad_norm": 4.326317382938955, + "learning_rate": 5.146724528124055e-06, + "loss": 0.4767, + "step": 7287 + }, + { + "epoch": 0.5060408276628247, + "grad_norm": 3.785356305000666, + "learning_rate": 5.1456005213759674e-06, + "loss": 0.4784, + "step": 7288 + }, + { + "epoch": 0.5061102624635467, + "grad_norm": 3.213885380427425, + "learning_rate": 5.144476507263573e-06, + "loss": 0.4045, + "step": 7289 + }, + { + "epoch": 0.5061796972642688, + "grad_norm": 4.232182679619171, + "learning_rate": 5.143352485843719e-06, + "loss": 0.2863, + "step": 7290 + }, + { + "epoch": 0.506249132064991, + "grad_norm": 3.4622279350739253, + "learning_rate": 5.142228457173257e-06, + "loss": 0.5058, + "step": 7291 + }, + { + "epoch": 0.5063185668657131, + "grad_norm": 4.010560856621945, + "learning_rate": 5.141104421309043e-06, + "loss": 0.6036, + "step": 7292 + }, + { + "epoch": 0.5063880016664353, + "grad_norm": 4.454136884777085, + "learning_rate": 5.139980378307926e-06, + "loss": 0.5393, + "step": 7293 + }, + { + "epoch": 0.5064574364671574, + "grad_norm": 4.022361922257905, + "learning_rate": 5.138856328226759e-06, + "loss": 0.5726, + "step": 7294 + }, + { + "epoch": 0.5065268712678794, + "grad_norm": 6.854155959548227, + "learning_rate": 5.137732271122398e-06, + "loss": 0.4179, + "step": 7295 + }, + { + "epoch": 0.5065963060686016, + "grad_norm": 4.435266811393554, + "learning_rate": 5.1366082070516935e-06, + "loss": 0.4173, + "step": 7296 + }, + { + "epoch": 0.5066657408693237, + "grad_norm": 3.188885397761379, + "learning_rate": 5.135484136071499e-06, + "loss": 0.2022, + "step": 7297 + }, + { + "epoch": 0.5067351756700458, + "grad_norm": 3.9331959380564467, + "learning_rate": 5.134360058238672e-06, + "loss": 0.4341, + "step": 7298 + }, + { + "epoch": 0.506804610470768, + "grad_norm": 4.152511559922815, + "learning_rate": 5.1332359736100655e-06, + "loss": 0.4742, + "step": 7299 + }, + { + "epoch": 0.50687404527149, + "grad_norm": 4.397799624821824, + "learning_rate": 5.132111882242533e-06, + "loss": 0.6547, + "step": 7300 + }, + { + "epoch": 0.5069434800722122, + "grad_norm": 9.070957585781645, + "learning_rate": 5.130987784192932e-06, + "loss": 0.3703, + "step": 7301 + }, + { + "epoch": 0.5070129148729343, + "grad_norm": 4.2665091829176065, + "learning_rate": 5.129863679518116e-06, + "loss": 0.3536, + "step": 7302 + }, + { + "epoch": 0.5070823496736564, + "grad_norm": 4.678782200414299, + "learning_rate": 5.1287395682749444e-06, + "loss": 0.6875, + "step": 7303 + }, + { + "epoch": 0.5071517844743786, + "grad_norm": 3.6715605865168413, + "learning_rate": 5.127615450520268e-06, + "loss": 0.4513, + "step": 7304 + }, + { + "epoch": 0.5072212192751007, + "grad_norm": 3.7407373989866155, + "learning_rate": 5.126491326310949e-06, + "loss": 0.4072, + "step": 7305 + }, + { + "epoch": 0.5072906540758229, + "grad_norm": 2.8431098758028597, + "learning_rate": 5.125367195703841e-06, + "loss": 0.3618, + "step": 7306 + }, + { + "epoch": 0.5073600888765449, + "grad_norm": 5.255642422136762, + "learning_rate": 5.124243058755801e-06, + "loss": 0.5029, + "step": 7307 + }, + { + "epoch": 0.507429523677267, + "grad_norm": 5.0063359438651185, + "learning_rate": 5.123118915523689e-06, + "loss": 0.5127, + "step": 7308 + }, + { + "epoch": 0.5074989584779892, + "grad_norm": 3.7628705433561844, + "learning_rate": 5.121994766064361e-06, + "loss": 0.5409, + "step": 7309 + }, + { + "epoch": 0.5075683932787113, + "grad_norm": 4.046950010828195, + "learning_rate": 5.120870610434676e-06, + "loss": 0.6884, + "step": 7310 + }, + { + "epoch": 0.5076378280794334, + "grad_norm": 3.2299242185746277, + "learning_rate": 5.1197464486914925e-06, + "loss": 0.4736, + "step": 7311 + }, + { + "epoch": 0.5077072628801556, + "grad_norm": 3.3990792276475648, + "learning_rate": 5.11862228089167e-06, + "loss": 0.442, + "step": 7312 + }, + { + "epoch": 0.5077766976808776, + "grad_norm": 4.03911631318669, + "learning_rate": 5.117498107092064e-06, + "loss": 0.5153, + "step": 7313 + }, + { + "epoch": 0.5078461324815998, + "grad_norm": 4.619454303163214, + "learning_rate": 5.116373927349538e-06, + "loss": 0.5535, + "step": 7314 + }, + { + "epoch": 0.5079155672823219, + "grad_norm": 4.215895479172811, + "learning_rate": 5.115249741720953e-06, + "loss": 0.4981, + "step": 7315 + }, + { + "epoch": 0.507985002083044, + "grad_norm": 5.1518174910545245, + "learning_rate": 5.1141255502631626e-06, + "loss": 0.5276, + "step": 7316 + }, + { + "epoch": 0.5080544368837662, + "grad_norm": 3.639036208755218, + "learning_rate": 5.113001353033034e-06, + "loss": 0.4965, + "step": 7317 + }, + { + "epoch": 0.5081238716844882, + "grad_norm": 4.997077914934165, + "learning_rate": 5.111877150087423e-06, + "loss": 0.5622, + "step": 7318 + }, + { + "epoch": 0.5081933064852104, + "grad_norm": 3.3227340752957364, + "learning_rate": 5.110752941483192e-06, + "loss": 0.4569, + "step": 7319 + }, + { + "epoch": 0.5082627412859325, + "grad_norm": 3.333572433367519, + "learning_rate": 5.109628727277204e-06, + "loss": 0.3045, + "step": 7320 + }, + { + "epoch": 0.5083321760866546, + "grad_norm": 3.869795359563918, + "learning_rate": 5.10850450752632e-06, + "loss": 0.3898, + "step": 7321 + }, + { + "epoch": 0.5084016108873768, + "grad_norm": 3.8734084260986616, + "learning_rate": 5.107380282287397e-06, + "loss": 0.5122, + "step": 7322 + }, + { + "epoch": 0.5084710456880989, + "grad_norm": 2.6485488193135343, + "learning_rate": 5.106256051617303e-06, + "loss": 0.3834, + "step": 7323 + }, + { + "epoch": 0.508540480488821, + "grad_norm": 4.331602587561121, + "learning_rate": 5.105131815572899e-06, + "loss": 0.6433, + "step": 7324 + }, + { + "epoch": 0.5086099152895431, + "grad_norm": 3.7808476035374707, + "learning_rate": 5.104007574211046e-06, + "loss": 0.2841, + "step": 7325 + }, + { + "epoch": 0.5086793500902652, + "grad_norm": 4.351890799148692, + "learning_rate": 5.102883327588608e-06, + "loss": 0.4539, + "step": 7326 + }, + { + "epoch": 0.5087487848909874, + "grad_norm": 3.7487331575200065, + "learning_rate": 5.101759075762446e-06, + "loss": 0.237, + "step": 7327 + }, + { + "epoch": 0.5088182196917095, + "grad_norm": 3.239051486277959, + "learning_rate": 5.100634818789427e-06, + "loss": 0.2157, + "step": 7328 + }, + { + "epoch": 0.5088876544924316, + "grad_norm": 3.9147030633068347, + "learning_rate": 5.099510556726411e-06, + "loss": 0.7202, + "step": 7329 + }, + { + "epoch": 0.5089570892931538, + "grad_norm": 3.515192339882417, + "learning_rate": 5.098386289630266e-06, + "loss": 0.4358, + "step": 7330 + }, + { + "epoch": 0.5090265240938758, + "grad_norm": 3.1993609765652256, + "learning_rate": 5.097262017557853e-06, + "loss": 0.2289, + "step": 7331 + }, + { + "epoch": 0.509095958894598, + "grad_norm": 3.3784722802459024, + "learning_rate": 5.096137740566036e-06, + "loss": 0.2266, + "step": 7332 + }, + { + "epoch": 0.5091653936953201, + "grad_norm": 3.6923315489913824, + "learning_rate": 5.095013458711682e-06, + "loss": 0.4823, + "step": 7333 + }, + { + "epoch": 0.5092348284960422, + "grad_norm": 4.338774770405565, + "learning_rate": 5.093889172051655e-06, + "loss": 0.3231, + "step": 7334 + }, + { + "epoch": 0.5093042632967644, + "grad_norm": 3.4343717968562437, + "learning_rate": 5.092764880642819e-06, + "loss": 0.4444, + "step": 7335 + }, + { + "epoch": 0.5093736980974864, + "grad_norm": 2.209798312569309, + "learning_rate": 5.091640584542039e-06, + "loss": 0.2428, + "step": 7336 + }, + { + "epoch": 0.5094431328982085, + "grad_norm": 3.583212744776147, + "learning_rate": 5.090516283806185e-06, + "loss": 0.4566, + "step": 7337 + }, + { + "epoch": 0.5095125676989307, + "grad_norm": 3.648913615352976, + "learning_rate": 5.089391978492118e-06, + "loss": 0.3551, + "step": 7338 + }, + { + "epoch": 0.5095820024996528, + "grad_norm": 4.457411859698555, + "learning_rate": 5.0882676686567056e-06, + "loss": 0.5513, + "step": 7339 + }, + { + "epoch": 0.509651437300375, + "grad_norm": 4.651116125006129, + "learning_rate": 5.087143354356816e-06, + "loss": 0.5883, + "step": 7340 + }, + { + "epoch": 0.5097208721010971, + "grad_norm": 3.9702899954139377, + "learning_rate": 5.086019035649313e-06, + "loss": 0.571, + "step": 7341 + }, + { + "epoch": 0.5097903069018191, + "grad_norm": 4.366411952162467, + "learning_rate": 5.084894712591065e-06, + "loss": 0.6354, + "step": 7342 + }, + { + "epoch": 0.5098597417025413, + "grad_norm": 4.039745825946049, + "learning_rate": 5.083770385238938e-06, + "loss": 0.6537, + "step": 7343 + }, + { + "epoch": 0.5099291765032634, + "grad_norm": 2.7223353738746323, + "learning_rate": 5.0826460536498e-06, + "loss": 0.235, + "step": 7344 + }, + { + "epoch": 0.5099986113039856, + "grad_norm": 3.892021564779901, + "learning_rate": 5.081521717880517e-06, + "loss": 0.3636, + "step": 7345 + }, + { + "epoch": 0.5100680461047077, + "grad_norm": 3.3003064540332017, + "learning_rate": 5.080397377987959e-06, + "loss": 0.509, + "step": 7346 + }, + { + "epoch": 0.5101374809054298, + "grad_norm": 4.099239393762142, + "learning_rate": 5.079273034028993e-06, + "loss": 0.3808, + "step": 7347 + }, + { + "epoch": 0.510206915706152, + "grad_norm": 5.045927676357197, + "learning_rate": 5.078148686060485e-06, + "loss": 0.6752, + "step": 7348 + }, + { + "epoch": 0.510276350506874, + "grad_norm": 3.9211210312711975, + "learning_rate": 5.077024334139307e-06, + "loss": 0.5902, + "step": 7349 + }, + { + "epoch": 0.5103457853075962, + "grad_norm": 2.5422465456688115, + "learning_rate": 5.075899978322324e-06, + "loss": 0.2332, + "step": 7350 + }, + { + "epoch": 0.5104152201083183, + "grad_norm": 3.7084603078155274, + "learning_rate": 5.074775618666407e-06, + "loss": 0.3367, + "step": 7351 + }, + { + "epoch": 0.5104846549090404, + "grad_norm": 3.1723032394773703, + "learning_rate": 5.073651255228422e-06, + "loss": 0.1995, + "step": 7352 + }, + { + "epoch": 0.5105540897097626, + "grad_norm": 4.87057584060196, + "learning_rate": 5.072526888065241e-06, + "loss": 0.3142, + "step": 7353 + }, + { + "epoch": 0.5106235245104846, + "grad_norm": 5.192692942379798, + "learning_rate": 5.071402517233732e-06, + "loss": 0.6636, + "step": 7354 + }, + { + "epoch": 0.5106929593112067, + "grad_norm": 4.121587746243678, + "learning_rate": 5.070278142790765e-06, + "loss": 0.6396, + "step": 7355 + }, + { + "epoch": 0.5107623941119289, + "grad_norm": 4.72840724797986, + "learning_rate": 5.069153764793208e-06, + "loss": 0.5234, + "step": 7356 + }, + { + "epoch": 0.510831828912651, + "grad_norm": 4.898512609125218, + "learning_rate": 5.068029383297934e-06, + "loss": 0.5852, + "step": 7357 + }, + { + "epoch": 0.5109012637133732, + "grad_norm": 4.567178052927413, + "learning_rate": 5.066904998361808e-06, + "loss": 0.3001, + "step": 7358 + }, + { + "epoch": 0.5109706985140953, + "grad_norm": 3.367337365241316, + "learning_rate": 5.065780610041703e-06, + "loss": 0.2904, + "step": 7359 + }, + { + "epoch": 0.5110401333148173, + "grad_norm": 3.912805513220785, + "learning_rate": 5.064656218394491e-06, + "loss": 0.5044, + "step": 7360 + }, + { + "epoch": 0.5111095681155395, + "grad_norm": 3.650919678174253, + "learning_rate": 5.0635318234770394e-06, + "loss": 0.637, + "step": 7361 + }, + { + "epoch": 0.5111790029162616, + "grad_norm": 2.868649650562126, + "learning_rate": 5.062407425346221e-06, + "loss": 0.2507, + "step": 7362 + }, + { + "epoch": 0.5112484377169838, + "grad_norm": 4.067788908101779, + "learning_rate": 5.061283024058905e-06, + "loss": 0.4835, + "step": 7363 + }, + { + "epoch": 0.5113178725177059, + "grad_norm": 4.138576359209665, + "learning_rate": 5.060158619671963e-06, + "loss": 0.3445, + "step": 7364 + }, + { + "epoch": 0.511387307318428, + "grad_norm": 4.09788121012581, + "learning_rate": 5.059034212242266e-06, + "loss": 0.4249, + "step": 7365 + }, + { + "epoch": 0.5114567421191502, + "grad_norm": 3.6792889648849867, + "learning_rate": 5.057909801826686e-06, + "loss": 0.4177, + "step": 7366 + }, + { + "epoch": 0.5115261769198722, + "grad_norm": 3.9235997644277627, + "learning_rate": 5.056785388482094e-06, + "loss": 0.4392, + "step": 7367 + }, + { + "epoch": 0.5115956117205943, + "grad_norm": 3.6891302097659935, + "learning_rate": 5.055660972265359e-06, + "loss": 0.5416, + "step": 7368 + }, + { + "epoch": 0.5116650465213165, + "grad_norm": 3.7346039332093857, + "learning_rate": 5.054536553233357e-06, + "loss": 0.5202, + "step": 7369 + }, + { + "epoch": 0.5117344813220386, + "grad_norm": 4.190990611199103, + "learning_rate": 5.053412131442957e-06, + "loss": 0.6197, + "step": 7370 + }, + { + "epoch": 0.5118039161227608, + "grad_norm": 4.6552316316834546, + "learning_rate": 5.052287706951033e-06, + "loss": 0.6024, + "step": 7371 + }, + { + "epoch": 0.5118733509234829, + "grad_norm": 2.668848292089595, + "learning_rate": 5.051163279814456e-06, + "loss": 0.2663, + "step": 7372 + }, + { + "epoch": 0.5119427857242049, + "grad_norm": 4.849814525151152, + "learning_rate": 5.050038850090097e-06, + "loss": 0.6434, + "step": 7373 + }, + { + "epoch": 0.5120122205249271, + "grad_norm": 5.532234751676624, + "learning_rate": 5.048914417834831e-06, + "loss": 0.5771, + "step": 7374 + }, + { + "epoch": 0.5120816553256492, + "grad_norm": 5.894770271013601, + "learning_rate": 5.047789983105527e-06, + "loss": 0.5901, + "step": 7375 + }, + { + "epoch": 0.5121510901263714, + "grad_norm": 3.8916712185232156, + "learning_rate": 5.046665545959062e-06, + "loss": 0.5251, + "step": 7376 + }, + { + "epoch": 0.5122205249270935, + "grad_norm": 3.643642701607656, + "learning_rate": 5.045541106452304e-06, + "loss": 0.4344, + "step": 7377 + }, + { + "epoch": 0.5122899597278155, + "grad_norm": 3.927717597354102, + "learning_rate": 5.0444166646421296e-06, + "loss": 0.415, + "step": 7378 + }, + { + "epoch": 0.5123593945285377, + "grad_norm": 4.5091329258044786, + "learning_rate": 5.0432922205854105e-06, + "loss": 0.6988, + "step": 7379 + }, + { + "epoch": 0.5124288293292598, + "grad_norm": 3.8226146647026296, + "learning_rate": 5.042167774339021e-06, + "loss": 0.3855, + "step": 7380 + }, + { + "epoch": 0.5124982641299819, + "grad_norm": 4.27580246810149, + "learning_rate": 5.041043325959832e-06, + "loss": 0.6088, + "step": 7381 + }, + { + "epoch": 0.5125676989307041, + "grad_norm": 5.026632666519692, + "learning_rate": 5.039918875504719e-06, + "loss": 0.7263, + "step": 7382 + }, + { + "epoch": 0.5126371337314262, + "grad_norm": 3.929315988947903, + "learning_rate": 5.038794423030554e-06, + "loss": 0.4225, + "step": 7383 + }, + { + "epoch": 0.5127065685321484, + "grad_norm": 3.3843586499910154, + "learning_rate": 5.03766996859421e-06, + "loss": 0.2893, + "step": 7384 + }, + { + "epoch": 0.5127760033328704, + "grad_norm": 4.21622162747944, + "learning_rate": 5.036545512252562e-06, + "loss": 0.4854, + "step": 7385 + }, + { + "epoch": 0.5128454381335925, + "grad_norm": 3.1419041804649, + "learning_rate": 5.0354210540624835e-06, + "loss": 0.3465, + "step": 7386 + }, + { + "epoch": 0.5129148729343147, + "grad_norm": 3.726252964432927, + "learning_rate": 5.034296594080849e-06, + "loss": 0.5058, + "step": 7387 + }, + { + "epoch": 0.5129843077350368, + "grad_norm": 4.551023190621925, + "learning_rate": 5.03317213236453e-06, + "loss": 0.6602, + "step": 7388 + }, + { + "epoch": 0.513053742535759, + "grad_norm": 3.9417216256808816, + "learning_rate": 5.032047668970403e-06, + "loss": 0.3407, + "step": 7389 + }, + { + "epoch": 0.513123177336481, + "grad_norm": 3.614828451274167, + "learning_rate": 5.030923203955342e-06, + "loss": 0.4358, + "step": 7390 + }, + { + "epoch": 0.5131926121372031, + "grad_norm": 3.439788279758886, + "learning_rate": 5.0297987373762195e-06, + "loss": 0.2892, + "step": 7391 + }, + { + "epoch": 0.5132620469379253, + "grad_norm": 3.410438631041953, + "learning_rate": 5.02867426928991e-06, + "loss": 0.2713, + "step": 7392 + }, + { + "epoch": 0.5133314817386474, + "grad_norm": 3.1960815787517345, + "learning_rate": 5.027549799753289e-06, + "loss": 0.3394, + "step": 7393 + }, + { + "epoch": 0.5134009165393695, + "grad_norm": 4.049103593529985, + "learning_rate": 5.02642532882323e-06, + "loss": 0.5214, + "step": 7394 + }, + { + "epoch": 0.5134703513400917, + "grad_norm": 3.503746421951426, + "learning_rate": 5.025300856556609e-06, + "loss": 0.3548, + "step": 7395 + }, + { + "epoch": 0.5135397861408137, + "grad_norm": 3.5221906436998074, + "learning_rate": 5.024176383010299e-06, + "loss": 0.3894, + "step": 7396 + }, + { + "epoch": 0.5136092209415359, + "grad_norm": 4.367991582801302, + "learning_rate": 5.023051908241173e-06, + "loss": 0.4638, + "step": 7397 + }, + { + "epoch": 0.513678655742258, + "grad_norm": 3.663850545934597, + "learning_rate": 5.021927432306108e-06, + "loss": 0.3295, + "step": 7398 + }, + { + "epoch": 0.5137480905429801, + "grad_norm": 3.936735311414971, + "learning_rate": 5.020802955261981e-06, + "loss": 0.424, + "step": 7399 + }, + { + "epoch": 0.5138175253437023, + "grad_norm": 4.893341683518537, + "learning_rate": 5.019678477165661e-06, + "loss": 0.6211, + "step": 7400 + }, + { + "epoch": 0.5138869601444244, + "grad_norm": 3.8650943334381473, + "learning_rate": 5.018553998074027e-06, + "loss": 0.6089, + "step": 7401 + }, + { + "epoch": 0.5139563949451466, + "grad_norm": 5.110811447140414, + "learning_rate": 5.0174295180439535e-06, + "loss": 0.6568, + "step": 7402 + }, + { + "epoch": 0.5140258297458686, + "grad_norm": 3.4259390688196474, + "learning_rate": 5.016305037132314e-06, + "loss": 0.2707, + "step": 7403 + }, + { + "epoch": 0.5140952645465907, + "grad_norm": 3.6219259196108067, + "learning_rate": 5.015180555395983e-06, + "loss": 0.5879, + "step": 7404 + }, + { + "epoch": 0.5141646993473129, + "grad_norm": 4.4679404760022265, + "learning_rate": 5.01405607289184e-06, + "loss": 0.4615, + "step": 7405 + }, + { + "epoch": 0.514234134148035, + "grad_norm": 4.862082444895862, + "learning_rate": 5.012931589676754e-06, + "loss": 0.2659, + "step": 7406 + }, + { + "epoch": 0.5143035689487571, + "grad_norm": 4.829242437920496, + "learning_rate": 5.0118071058076026e-06, + "loss": 0.5855, + "step": 7407 + }, + { + "epoch": 0.5143730037494793, + "grad_norm": 3.9486886250318687, + "learning_rate": 5.010682621341262e-06, + "loss": 0.3947, + "step": 7408 + }, + { + "epoch": 0.5144424385502013, + "grad_norm": 3.103357133665367, + "learning_rate": 5.009558136334607e-06, + "loss": 0.3305, + "step": 7409 + }, + { + "epoch": 0.5145118733509235, + "grad_norm": 3.650317343219308, + "learning_rate": 5.008433650844512e-06, + "loss": 0.4124, + "step": 7410 + }, + { + "epoch": 0.5145813081516456, + "grad_norm": 3.8131190801382377, + "learning_rate": 5.007309164927853e-06, + "loss": 0.5105, + "step": 7411 + }, + { + "epoch": 0.5146507429523677, + "grad_norm": 3.532756246143908, + "learning_rate": 5.006184678641504e-06, + "loss": 0.4849, + "step": 7412 + }, + { + "epoch": 0.5147201777530899, + "grad_norm": 4.222547184845305, + "learning_rate": 5.005060192042339e-06, + "loss": 0.4929, + "step": 7413 + }, + { + "epoch": 0.514789612553812, + "grad_norm": 3.919410398359665, + "learning_rate": 5.003935705187237e-06, + "loss": 0.5032, + "step": 7414 + }, + { + "epoch": 0.5148590473545341, + "grad_norm": 3.1342244339931993, + "learning_rate": 5.0028112181330736e-06, + "loss": 0.347, + "step": 7415 + }, + { + "epoch": 0.5149284821552562, + "grad_norm": 3.5028107039265497, + "learning_rate": 5.001686730936719e-06, + "loss": 0.4159, + "step": 7416 + }, + { + "epoch": 0.5149979169559783, + "grad_norm": 3.1337309621149907, + "learning_rate": 5.0005622436550525e-06, + "loss": 0.4034, + "step": 7417 + }, + { + "epoch": 0.5150673517567005, + "grad_norm": 3.9667189110597776, + "learning_rate": 4.999437756344948e-06, + "loss": 0.4024, + "step": 7418 + }, + { + "epoch": 0.5151367865574226, + "grad_norm": 3.396356699479342, + "learning_rate": 4.998313269063282e-06, + "loss": 0.5023, + "step": 7419 + }, + { + "epoch": 0.5152062213581448, + "grad_norm": 2.0701880851423495, + "learning_rate": 4.997188781866929e-06, + "loss": 0.1875, + "step": 7420 + }, + { + "epoch": 0.5152756561588668, + "grad_norm": 2.9196388401222144, + "learning_rate": 4.9960642948127635e-06, + "loss": 0.2814, + "step": 7421 + }, + { + "epoch": 0.5153450909595889, + "grad_norm": 4.0452288604675894, + "learning_rate": 4.994939807957661e-06, + "loss": 0.5527, + "step": 7422 + }, + { + "epoch": 0.5154145257603111, + "grad_norm": 3.813480093911458, + "learning_rate": 4.993815321358499e-06, + "loss": 0.4133, + "step": 7423 + }, + { + "epoch": 0.5154839605610332, + "grad_norm": 4.631406447952932, + "learning_rate": 4.992690835072149e-06, + "loss": 0.4669, + "step": 7424 + }, + { + "epoch": 0.5155533953617553, + "grad_norm": 3.601168122937942, + "learning_rate": 4.991566349155489e-06, + "loss": 0.4162, + "step": 7425 + }, + { + "epoch": 0.5156228301624775, + "grad_norm": 4.761889546969619, + "learning_rate": 4.990441863665393e-06, + "loss": 0.4695, + "step": 7426 + }, + { + "epoch": 0.5156922649631995, + "grad_norm": 3.6994665527818253, + "learning_rate": 4.989317378658739e-06, + "loss": 0.4137, + "step": 7427 + }, + { + "epoch": 0.5157616997639217, + "grad_norm": 4.779711746647431, + "learning_rate": 4.988192894192398e-06, + "loss": 0.322, + "step": 7428 + }, + { + "epoch": 0.5158311345646438, + "grad_norm": 3.739015475062009, + "learning_rate": 4.987068410323248e-06, + "loss": 0.4807, + "step": 7429 + }, + { + "epoch": 0.5159005693653659, + "grad_norm": 3.3389499584328077, + "learning_rate": 4.985943927108163e-06, + "loss": 0.4506, + "step": 7430 + }, + { + "epoch": 0.5159700041660881, + "grad_norm": 4.732029473768147, + "learning_rate": 4.984819444604018e-06, + "loss": 0.6887, + "step": 7431 + }, + { + "epoch": 0.5160394389668101, + "grad_norm": 3.9530305200250453, + "learning_rate": 4.983694962867687e-06, + "loss": 0.4963, + "step": 7432 + }, + { + "epoch": 0.5161088737675323, + "grad_norm": 4.139718479421074, + "learning_rate": 4.982570481956047e-06, + "loss": 0.6287, + "step": 7433 + }, + { + "epoch": 0.5161783085682544, + "grad_norm": 3.526725979520689, + "learning_rate": 4.9814460019259745e-06, + "loss": 0.411, + "step": 7434 + }, + { + "epoch": 0.5162477433689765, + "grad_norm": 3.133034095546358, + "learning_rate": 4.9803215228343405e-06, + "loss": 0.433, + "step": 7435 + }, + { + "epoch": 0.5163171781696987, + "grad_norm": 3.6174425989802415, + "learning_rate": 4.97919704473802e-06, + "loss": 0.4467, + "step": 7436 + }, + { + "epoch": 0.5163866129704208, + "grad_norm": 4.031198835084751, + "learning_rate": 4.978072567693892e-06, + "loss": 0.3667, + "step": 7437 + }, + { + "epoch": 0.5164560477711428, + "grad_norm": 3.3930918882064818, + "learning_rate": 4.9769480917588295e-06, + "loss": 0.3938, + "step": 7438 + }, + { + "epoch": 0.516525482571865, + "grad_norm": 4.076396016414378, + "learning_rate": 4.975823616989704e-06, + "loss": 0.4294, + "step": 7439 + }, + { + "epoch": 0.5165949173725871, + "grad_norm": 4.615457457825244, + "learning_rate": 4.974699143443393e-06, + "loss": 0.6725, + "step": 7440 + }, + { + "epoch": 0.5166643521733093, + "grad_norm": 3.882061475766921, + "learning_rate": 4.973574671176772e-06, + "loss": 0.4237, + "step": 7441 + }, + { + "epoch": 0.5167337869740314, + "grad_norm": 3.1425675779098405, + "learning_rate": 4.972450200246712e-06, + "loss": 0.3549, + "step": 7442 + }, + { + "epoch": 0.5168032217747535, + "grad_norm": 4.475548458954799, + "learning_rate": 4.971325730710091e-06, + "loss": 0.3639, + "step": 7443 + }, + { + "epoch": 0.5168726565754757, + "grad_norm": 4.987274369695312, + "learning_rate": 4.970201262623781e-06, + "loss": 0.5691, + "step": 7444 + }, + { + "epoch": 0.5169420913761977, + "grad_norm": 3.7843322770991743, + "learning_rate": 4.96907679604466e-06, + "loss": 0.4522, + "step": 7445 + }, + { + "epoch": 0.5170115261769199, + "grad_norm": 5.852420954155457, + "learning_rate": 4.9679523310295976e-06, + "loss": 0.52, + "step": 7446 + }, + { + "epoch": 0.517080960977642, + "grad_norm": 3.795972864003024, + "learning_rate": 4.966827867635471e-06, + "loss": 0.4762, + "step": 7447 + }, + { + "epoch": 0.5171503957783641, + "grad_norm": 3.599465245664327, + "learning_rate": 4.965703405919154e-06, + "loss": 0.3878, + "step": 7448 + }, + { + "epoch": 0.5172198305790863, + "grad_norm": 4.076955225766076, + "learning_rate": 4.964578945937517e-06, + "loss": 0.3919, + "step": 7449 + }, + { + "epoch": 0.5172892653798083, + "grad_norm": 4.967015797190261, + "learning_rate": 4.963454487747439e-06, + "loss": 0.5377, + "step": 7450 + }, + { + "epoch": 0.5173587001805304, + "grad_norm": 4.488148372757971, + "learning_rate": 4.962330031405791e-06, + "loss": 0.5394, + "step": 7451 + }, + { + "epoch": 0.5174281349812526, + "grad_norm": 3.7525627444133955, + "learning_rate": 4.961205576969449e-06, + "loss": 0.4171, + "step": 7452 + }, + { + "epoch": 0.5174975697819747, + "grad_norm": 3.137059576876212, + "learning_rate": 4.960081124495283e-06, + "loss": 0.3239, + "step": 7453 + }, + { + "epoch": 0.5175670045826969, + "grad_norm": 3.508016311545466, + "learning_rate": 4.958956674040169e-06, + "loss": 0.5235, + "step": 7454 + }, + { + "epoch": 0.517636439383419, + "grad_norm": 3.8043459093943346, + "learning_rate": 4.957832225660982e-06, + "loss": 0.3742, + "step": 7455 + }, + { + "epoch": 0.517705874184141, + "grad_norm": 3.6020784514952737, + "learning_rate": 4.95670777941459e-06, + "loss": 0.4566, + "step": 7456 + }, + { + "epoch": 0.5177753089848632, + "grad_norm": 4.047470226598575, + "learning_rate": 4.955583335357871e-06, + "loss": 0.5954, + "step": 7457 + }, + { + "epoch": 0.5178447437855853, + "grad_norm": 3.803362490498877, + "learning_rate": 4.954458893547696e-06, + "loss": 0.4045, + "step": 7458 + }, + { + "epoch": 0.5179141785863075, + "grad_norm": 4.258490219661581, + "learning_rate": 4.953334454040941e-06, + "loss": 0.497, + "step": 7459 + }, + { + "epoch": 0.5179836133870296, + "grad_norm": 3.1665407656401183, + "learning_rate": 4.952210016894475e-06, + "loss": 0.351, + "step": 7460 + }, + { + "epoch": 0.5180530481877517, + "grad_norm": 7.124607720989545, + "learning_rate": 4.951085582165171e-06, + "loss": 0.504, + "step": 7461 + }, + { + "epoch": 0.5181224829884739, + "grad_norm": 3.7920177226079876, + "learning_rate": 4.9499611499099044e-06, + "loss": 0.4604, + "step": 7462 + }, + { + "epoch": 0.5181919177891959, + "grad_norm": 4.3638013575300665, + "learning_rate": 4.948836720185547e-06, + "loss": 0.5517, + "step": 7463 + }, + { + "epoch": 0.518261352589918, + "grad_norm": 3.8830018687896573, + "learning_rate": 4.947712293048968e-06, + "loss": 0.4623, + "step": 7464 + }, + { + "epoch": 0.5183307873906402, + "grad_norm": 4.021336543863978, + "learning_rate": 4.9465878685570426e-06, + "loss": 0.5641, + "step": 7465 + }, + { + "epoch": 0.5184002221913623, + "grad_norm": 3.6199174824948535, + "learning_rate": 4.9454634467666445e-06, + "loss": 0.4274, + "step": 7466 + }, + { + "epoch": 0.5184696569920845, + "grad_norm": 3.043527862789783, + "learning_rate": 4.944339027734642e-06, + "loss": 0.2518, + "step": 7467 + }, + { + "epoch": 0.5185390917928066, + "grad_norm": 4.590053773232761, + "learning_rate": 4.943214611517907e-06, + "loss": 0.6955, + "step": 7468 + }, + { + "epoch": 0.5186085265935286, + "grad_norm": 2.9521522873524786, + "learning_rate": 4.9420901981733155e-06, + "loss": 0.1785, + "step": 7469 + }, + { + "epoch": 0.5186779613942508, + "grad_norm": 3.690117446195559, + "learning_rate": 4.940965787757736e-06, + "loss": 0.3715, + "step": 7470 + }, + { + "epoch": 0.5187473961949729, + "grad_norm": 3.67230083518739, + "learning_rate": 4.939841380328038e-06, + "loss": 0.4515, + "step": 7471 + }, + { + "epoch": 0.5188168309956951, + "grad_norm": 4.435123467769752, + "learning_rate": 4.938716975941096e-06, + "loss": 0.7382, + "step": 7472 + }, + { + "epoch": 0.5188862657964172, + "grad_norm": 5.936918097764168, + "learning_rate": 4.937592574653782e-06, + "loss": 0.5296, + "step": 7473 + }, + { + "epoch": 0.5189557005971392, + "grad_norm": 2.619471109127567, + "learning_rate": 4.936468176522962e-06, + "loss": 0.2328, + "step": 7474 + }, + { + "epoch": 0.5190251353978614, + "grad_norm": 3.5656751582580815, + "learning_rate": 4.93534378160551e-06, + "loss": 0.6072, + "step": 7475 + }, + { + "epoch": 0.5190945701985835, + "grad_norm": 3.848373382293671, + "learning_rate": 4.9342193899582975e-06, + "loss": 0.4654, + "step": 7476 + }, + { + "epoch": 0.5191640049993057, + "grad_norm": 3.129756283013653, + "learning_rate": 4.933095001638195e-06, + "loss": 0.3167, + "step": 7477 + }, + { + "epoch": 0.5192334398000278, + "grad_norm": 4.480671072898298, + "learning_rate": 4.931970616702069e-06, + "loss": 0.505, + "step": 7478 + }, + { + "epoch": 0.5193028746007499, + "grad_norm": 5.173018447424363, + "learning_rate": 4.930846235206793e-06, + "loss": 0.7989, + "step": 7479 + }, + { + "epoch": 0.519372309401472, + "grad_norm": 3.570656650148614, + "learning_rate": 4.929721857209238e-06, + "loss": 0.4606, + "step": 7480 + }, + { + "epoch": 0.5194417442021941, + "grad_norm": 4.372306302253888, + "learning_rate": 4.928597482766269e-06, + "loss": 0.3936, + "step": 7481 + }, + { + "epoch": 0.5195111790029162, + "grad_norm": 3.6346792488532222, + "learning_rate": 4.92747311193476e-06, + "loss": 0.3943, + "step": 7482 + }, + { + "epoch": 0.5195806138036384, + "grad_norm": 3.8986357997135075, + "learning_rate": 4.926348744771579e-06, + "loss": 0.3916, + "step": 7483 + }, + { + "epoch": 0.5196500486043605, + "grad_norm": 4.321729184597143, + "learning_rate": 4.9252243813335956e-06, + "loss": 0.3067, + "step": 7484 + }, + { + "epoch": 0.5197194834050827, + "grad_norm": 3.309782373956979, + "learning_rate": 4.924100021677677e-06, + "loss": 0.2252, + "step": 7485 + }, + { + "epoch": 0.5197889182058048, + "grad_norm": 4.7120736422940315, + "learning_rate": 4.922975665860694e-06, + "loss": 0.5368, + "step": 7486 + }, + { + "epoch": 0.5198583530065268, + "grad_norm": 3.629825826108819, + "learning_rate": 4.921851313939515e-06, + "loss": 0.335, + "step": 7487 + }, + { + "epoch": 0.519927787807249, + "grad_norm": 2.697189788751329, + "learning_rate": 4.9207269659710085e-06, + "loss": 0.3812, + "step": 7488 + }, + { + "epoch": 0.5199972226079711, + "grad_norm": 4.065926088847972, + "learning_rate": 4.919602622012042e-06, + "loss": 0.3214, + "step": 7489 + }, + { + "epoch": 0.5200666574086933, + "grad_norm": 2.8749544035743337, + "learning_rate": 4.9184782821194835e-06, + "loss": 0.304, + "step": 7490 + }, + { + "epoch": 0.5201360922094154, + "grad_norm": 4.989590159116698, + "learning_rate": 4.9173539463502025e-06, + "loss": 0.8215, + "step": 7491 + }, + { + "epoch": 0.5202055270101374, + "grad_norm": 2.9707239020785052, + "learning_rate": 4.916229614761065e-06, + "loss": 0.2269, + "step": 7492 + }, + { + "epoch": 0.5202749618108596, + "grad_norm": 5.967053870860305, + "learning_rate": 4.9151052874089365e-06, + "loss": 0.5471, + "step": 7493 + }, + { + "epoch": 0.5203443966115817, + "grad_norm": 4.346946189581933, + "learning_rate": 4.913980964350688e-06, + "loss": 0.4469, + "step": 7494 + }, + { + "epoch": 0.5204138314123038, + "grad_norm": 5.292851305489597, + "learning_rate": 4.912856645643186e-06, + "loss": 0.7722, + "step": 7495 + }, + { + "epoch": 0.520483266213026, + "grad_norm": 3.499532189367827, + "learning_rate": 4.911732331343295e-06, + "loss": 0.3472, + "step": 7496 + }, + { + "epoch": 0.5205527010137481, + "grad_norm": 4.1228641227892435, + "learning_rate": 4.910608021507883e-06, + "loss": 0.5266, + "step": 7497 + }, + { + "epoch": 0.5206221358144703, + "grad_norm": 3.294174014222333, + "learning_rate": 4.909483716193817e-06, + "loss": 0.3738, + "step": 7498 + }, + { + "epoch": 0.5206915706151923, + "grad_norm": 3.425465900396751, + "learning_rate": 4.908359415457962e-06, + "loss": 0.3754, + "step": 7499 + }, + { + "epoch": 0.5207610054159144, + "grad_norm": 3.852591340405903, + "learning_rate": 4.907235119357183e-06, + "loss": 0.4138, + "step": 7500 + }, + { + "epoch": 0.5208304402166366, + "grad_norm": 4.701215699862018, + "learning_rate": 4.906110827948347e-06, + "loss": 0.7044, + "step": 7501 + }, + { + "epoch": 0.5208998750173587, + "grad_norm": 3.046167050709071, + "learning_rate": 4.9049865412883205e-06, + "loss": 0.2782, + "step": 7502 + }, + { + "epoch": 0.5209693098180809, + "grad_norm": 3.700247199629093, + "learning_rate": 4.903862259433965e-06, + "loss": 0.4541, + "step": 7503 + }, + { + "epoch": 0.521038744618803, + "grad_norm": 3.488366701342752, + "learning_rate": 4.902737982442148e-06, + "loss": 0.6177, + "step": 7504 + }, + { + "epoch": 0.521108179419525, + "grad_norm": 4.21476650065884, + "learning_rate": 4.9016137103697356e-06, + "loss": 0.8293, + "step": 7505 + }, + { + "epoch": 0.5211776142202472, + "grad_norm": 3.57792653700439, + "learning_rate": 4.90048944327359e-06, + "loss": 0.3288, + "step": 7506 + }, + { + "epoch": 0.5212470490209693, + "grad_norm": 3.973096155203936, + "learning_rate": 4.899365181210574e-06, + "loss": 0.4767, + "step": 7507 + }, + { + "epoch": 0.5213164838216914, + "grad_norm": 4.678196918418609, + "learning_rate": 4.898240924237554e-06, + "loss": 0.5879, + "step": 7508 + }, + { + "epoch": 0.5213859186224136, + "grad_norm": 3.637778059472762, + "learning_rate": 4.897116672411395e-06, + "loss": 0.637, + "step": 7509 + }, + { + "epoch": 0.5214553534231356, + "grad_norm": 3.3309840268946096, + "learning_rate": 4.8959924257889554e-06, + "loss": 0.211, + "step": 7510 + }, + { + "epoch": 0.5215247882238578, + "grad_norm": 3.71746568758711, + "learning_rate": 4.894868184427102e-06, + "loss": 0.3039, + "step": 7511 + }, + { + "epoch": 0.5215942230245799, + "grad_norm": 3.5051974282577003, + "learning_rate": 4.893743948382696e-06, + "loss": 0.5485, + "step": 7512 + }, + { + "epoch": 0.521663657825302, + "grad_norm": 3.8216843200357498, + "learning_rate": 4.8926197177126035e-06, + "loss": 0.3697, + "step": 7513 + }, + { + "epoch": 0.5217330926260242, + "grad_norm": 3.4712902380385993, + "learning_rate": 4.891495492473683e-06, + "loss": 0.2865, + "step": 7514 + }, + { + "epoch": 0.5218025274267463, + "grad_norm": 4.220174423767041, + "learning_rate": 4.890371272722797e-06, + "loss": 0.5742, + "step": 7515 + }, + { + "epoch": 0.5218719622274685, + "grad_norm": 3.179004269920638, + "learning_rate": 4.8892470585168095e-06, + "loss": 0.4484, + "step": 7516 + }, + { + "epoch": 0.5219413970281905, + "grad_norm": 3.5736651577557743, + "learning_rate": 4.888122849912579e-06, + "loss": 0.3727, + "step": 7517 + }, + { + "epoch": 0.5220108318289126, + "grad_norm": 2.9745945323294856, + "learning_rate": 4.886998646966968e-06, + "loss": 0.2614, + "step": 7518 + }, + { + "epoch": 0.5220802666296348, + "grad_norm": 3.4109334530506255, + "learning_rate": 4.885874449736837e-06, + "loss": 0.3683, + "step": 7519 + }, + { + "epoch": 0.5221497014303569, + "grad_norm": 3.8946956171958402, + "learning_rate": 4.88475025827905e-06, + "loss": 0.4094, + "step": 7520 + }, + { + "epoch": 0.522219136231079, + "grad_norm": 3.963858319031122, + "learning_rate": 4.883626072650463e-06, + "loss": 0.4972, + "step": 7521 + }, + { + "epoch": 0.5222885710318012, + "grad_norm": 5.976793283812102, + "learning_rate": 4.882501892907936e-06, + "loss": 0.6885, + "step": 7522 + }, + { + "epoch": 0.5223580058325232, + "grad_norm": 3.7249661146290753, + "learning_rate": 4.8813777191083335e-06, + "loss": 0.5074, + "step": 7523 + }, + { + "epoch": 0.5224274406332454, + "grad_norm": 4.1624662574957325, + "learning_rate": 4.880253551308508e-06, + "loss": 0.3992, + "step": 7524 + }, + { + "epoch": 0.5224968754339675, + "grad_norm": 3.1828458714283197, + "learning_rate": 4.879129389565325e-06, + "loss": 0.1703, + "step": 7525 + }, + { + "epoch": 0.5225663102346896, + "grad_norm": 4.1379185093262745, + "learning_rate": 4.878005233935639e-06, + "loss": 0.3613, + "step": 7526 + }, + { + "epoch": 0.5226357450354118, + "grad_norm": 2.654738806001754, + "learning_rate": 4.876881084476312e-06, + "loss": 0.276, + "step": 7527 + }, + { + "epoch": 0.5227051798361338, + "grad_norm": 3.759988743770085, + "learning_rate": 4.8757569412442e-06, + "loss": 0.4422, + "step": 7528 + }, + { + "epoch": 0.522774614636856, + "grad_norm": 4.68881995870071, + "learning_rate": 4.874632804296161e-06, + "loss": 0.5652, + "step": 7529 + }, + { + "epoch": 0.5228440494375781, + "grad_norm": 3.656255301243486, + "learning_rate": 4.873508673689053e-06, + "loss": 0.4475, + "step": 7530 + }, + { + "epoch": 0.5229134842383002, + "grad_norm": 3.0593216618017802, + "learning_rate": 4.872384549479733e-06, + "loss": 0.2277, + "step": 7531 + }, + { + "epoch": 0.5229829190390224, + "grad_norm": 2.915545148413807, + "learning_rate": 4.871260431725058e-06, + "loss": 0.3215, + "step": 7532 + }, + { + "epoch": 0.5230523538397445, + "grad_norm": 3.7709763705217973, + "learning_rate": 4.870136320481884e-06, + "loss": 0.5412, + "step": 7533 + }, + { + "epoch": 0.5231217886404667, + "grad_norm": 3.5937118194443407, + "learning_rate": 4.869012215807069e-06, + "loss": 0.3742, + "step": 7534 + }, + { + "epoch": 0.5231912234411887, + "grad_norm": 4.355929676260524, + "learning_rate": 4.867888117757468e-06, + "loss": 0.5215, + "step": 7535 + }, + { + "epoch": 0.5232606582419108, + "grad_norm": 4.128021594091155, + "learning_rate": 4.866764026389935e-06, + "loss": 0.6572, + "step": 7536 + }, + { + "epoch": 0.523330093042633, + "grad_norm": 3.3808461552816205, + "learning_rate": 4.865639941761329e-06, + "loss": 0.4563, + "step": 7537 + }, + { + "epoch": 0.5233995278433551, + "grad_norm": 4.173449901292679, + "learning_rate": 4.8645158639285025e-06, + "loss": 0.4999, + "step": 7538 + }, + { + "epoch": 0.5234689626440772, + "grad_norm": 4.599080494001665, + "learning_rate": 4.863391792948308e-06, + "loss": 0.6843, + "step": 7539 + }, + { + "epoch": 0.5235383974447994, + "grad_norm": 3.9274742877234643, + "learning_rate": 4.862267728877604e-06, + "loss": 0.5224, + "step": 7540 + }, + { + "epoch": 0.5236078322455214, + "grad_norm": 5.140539743114913, + "learning_rate": 4.861143671773243e-06, + "loss": 0.6705, + "step": 7541 + }, + { + "epoch": 0.5236772670462436, + "grad_norm": 4.033299633213148, + "learning_rate": 4.860019621692076e-06, + "loss": 0.5198, + "step": 7542 + }, + { + "epoch": 0.5237467018469657, + "grad_norm": 3.3158099968009216, + "learning_rate": 4.858895578690958e-06, + "loss": 0.3143, + "step": 7543 + }, + { + "epoch": 0.5238161366476878, + "grad_norm": 3.3562223728878218, + "learning_rate": 4.857771542826743e-06, + "loss": 0.3163, + "step": 7544 + }, + { + "epoch": 0.52388557144841, + "grad_norm": 3.6998718390310827, + "learning_rate": 4.856647514156284e-06, + "loss": 0.3125, + "step": 7545 + }, + { + "epoch": 0.523955006249132, + "grad_norm": 3.4450661664028988, + "learning_rate": 4.855523492736429e-06, + "loss": 0.5172, + "step": 7546 + }, + { + "epoch": 0.5240244410498542, + "grad_norm": 3.4132250383845832, + "learning_rate": 4.8543994786240325e-06, + "loss": 0.4554, + "step": 7547 + }, + { + "epoch": 0.5240938758505763, + "grad_norm": 4.657955207740734, + "learning_rate": 4.8532754718759475e-06, + "loss": 0.6324, + "step": 7548 + }, + { + "epoch": 0.5241633106512984, + "grad_norm": 4.274213761246854, + "learning_rate": 4.852151472549021e-06, + "loss": 0.5593, + "step": 7549 + }, + { + "epoch": 0.5242327454520206, + "grad_norm": 3.8313621344046433, + "learning_rate": 4.851027480700107e-06, + "loss": 0.4469, + "step": 7550 + }, + { + "epoch": 0.5243021802527427, + "grad_norm": 3.4638806955300536, + "learning_rate": 4.849903496386052e-06, + "loss": 0.4327, + "step": 7551 + }, + { + "epoch": 0.5243716150534647, + "grad_norm": 3.634738002569708, + "learning_rate": 4.848779519663712e-06, + "loss": 0.4888, + "step": 7552 + }, + { + "epoch": 0.5244410498541869, + "grad_norm": 3.777626123264553, + "learning_rate": 4.8476555505899314e-06, + "loss": 0.2734, + "step": 7553 + }, + { + "epoch": 0.524510484654909, + "grad_norm": 2.909240926714825, + "learning_rate": 4.8465315892215595e-06, + "loss": 0.3226, + "step": 7554 + }, + { + "epoch": 0.5245799194556312, + "grad_norm": 4.260706526750719, + "learning_rate": 4.845407635615448e-06, + "loss": 0.4559, + "step": 7555 + }, + { + "epoch": 0.5246493542563533, + "grad_norm": 3.526066805594896, + "learning_rate": 4.844283689828441e-06, + "loss": 0.2152, + "step": 7556 + }, + { + "epoch": 0.5247187890570754, + "grad_norm": 3.281158577202449, + "learning_rate": 4.843159751917391e-06, + "loss": 0.3405, + "step": 7557 + }, + { + "epoch": 0.5247882238577976, + "grad_norm": 3.703555121517168, + "learning_rate": 4.842035821939141e-06, + "loss": 0.5067, + "step": 7558 + }, + { + "epoch": 0.5248576586585196, + "grad_norm": 2.2924320746164124, + "learning_rate": 4.8409118999505435e-06, + "loss": 0.2614, + "step": 7559 + }, + { + "epoch": 0.5249270934592418, + "grad_norm": 3.277476625894575, + "learning_rate": 4.83978798600844e-06, + "loss": 0.3365, + "step": 7560 + }, + { + "epoch": 0.5249965282599639, + "grad_norm": 4.826216468081945, + "learning_rate": 4.8386640801696786e-06, + "loss": 0.458, + "step": 7561 + }, + { + "epoch": 0.525065963060686, + "grad_norm": 3.3247688162565274, + "learning_rate": 4.8375401824911064e-06, + "loss": 0.245, + "step": 7562 + }, + { + "epoch": 0.5251353978614082, + "grad_norm": 4.1948600140664585, + "learning_rate": 4.836416293029569e-06, + "loss": 0.4996, + "step": 7563 + }, + { + "epoch": 0.5252048326621302, + "grad_norm": 4.355933448161882, + "learning_rate": 4.835292411841907e-06, + "loss": 0.5548, + "step": 7564 + }, + { + "epoch": 0.5252742674628523, + "grad_norm": 4.6402188376246905, + "learning_rate": 4.83416853898497e-06, + "loss": 0.7553, + "step": 7565 + }, + { + "epoch": 0.5253437022635745, + "grad_norm": 2.9360575364270263, + "learning_rate": 4.833044674515603e-06, + "loss": 0.3392, + "step": 7566 + }, + { + "epoch": 0.5254131370642966, + "grad_norm": 2.948554253163831, + "learning_rate": 4.831920818490645e-06, + "loss": 0.3724, + "step": 7567 + }, + { + "epoch": 0.5254825718650188, + "grad_norm": 4.466094060872265, + "learning_rate": 4.830796970966942e-06, + "loss": 0.3714, + "step": 7568 + }, + { + "epoch": 0.5255520066657409, + "grad_norm": 4.7105727525794014, + "learning_rate": 4.829673132001337e-06, + "loss": 0.4727, + "step": 7569 + }, + { + "epoch": 0.525621441466463, + "grad_norm": 4.557010346927838, + "learning_rate": 4.828549301650673e-06, + "loss": 0.6636, + "step": 7570 + }, + { + "epoch": 0.5256908762671851, + "grad_norm": 4.24206942199176, + "learning_rate": 4.82742547997179e-06, + "loss": 0.4691, + "step": 7571 + }, + { + "epoch": 0.5257603110679072, + "grad_norm": 3.560349152013464, + "learning_rate": 4.826301667021532e-06, + "loss": 0.4934, + "step": 7572 + }, + { + "epoch": 0.5258297458686294, + "grad_norm": 2.848386125812199, + "learning_rate": 4.8251778628567395e-06, + "loss": 0.2237, + "step": 7573 + }, + { + "epoch": 0.5258991806693515, + "grad_norm": 3.041182188505059, + "learning_rate": 4.824054067534251e-06, + "loss": 0.2207, + "step": 7574 + }, + { + "epoch": 0.5259686154700736, + "grad_norm": 4.026251639351946, + "learning_rate": 4.822930281110909e-06, + "loss": 0.391, + "step": 7575 + }, + { + "epoch": 0.5260380502707958, + "grad_norm": 4.02501985722207, + "learning_rate": 4.821806503643554e-06, + "loss": 0.503, + "step": 7576 + }, + { + "epoch": 0.5261074850715178, + "grad_norm": 4.426648874782034, + "learning_rate": 4.820682735189025e-06, + "loss": 0.5981, + "step": 7577 + }, + { + "epoch": 0.5261769198722399, + "grad_norm": 3.7160441872205165, + "learning_rate": 4.819558975804159e-06, + "loss": 0.3262, + "step": 7578 + }, + { + "epoch": 0.5262463546729621, + "grad_norm": 2.970748354883217, + "learning_rate": 4.818435225545796e-06, + "loss": 0.3478, + "step": 7579 + }, + { + "epoch": 0.5263157894736842, + "grad_norm": 3.9234364557102133, + "learning_rate": 4.817311484470776e-06, + "loss": 0.3742, + "step": 7580 + }, + { + "epoch": 0.5263852242744064, + "grad_norm": 3.209848973909349, + "learning_rate": 4.816187752635931e-06, + "loss": 0.32, + "step": 7581 + }, + { + "epoch": 0.5264546590751285, + "grad_norm": 15.826325501719342, + "learning_rate": 4.815064030098103e-06, + "loss": 0.2911, + "step": 7582 + }, + { + "epoch": 0.5265240938758505, + "grad_norm": 2.92216938282492, + "learning_rate": 4.813940316914127e-06, + "loss": 0.316, + "step": 7583 + }, + { + "epoch": 0.5265935286765727, + "grad_norm": 3.3176866862206076, + "learning_rate": 4.81281661314084e-06, + "loss": 0.4231, + "step": 7584 + }, + { + "epoch": 0.5266629634772948, + "grad_norm": 2.8538681242744928, + "learning_rate": 4.811692918835075e-06, + "loss": 0.2186, + "step": 7585 + }, + { + "epoch": 0.526732398278017, + "grad_norm": 2.5661296163764065, + "learning_rate": 4.810569234053671e-06, + "loss": 0.2495, + "step": 7586 + }, + { + "epoch": 0.5268018330787391, + "grad_norm": 4.428315134144549, + "learning_rate": 4.809445558853459e-06, + "loss": 0.4703, + "step": 7587 + }, + { + "epoch": 0.5268712678794611, + "grad_norm": 2.953286961936975, + "learning_rate": 4.8083218932912776e-06, + "loss": 0.3682, + "step": 7588 + }, + { + "epoch": 0.5269407026801833, + "grad_norm": 3.8433508467282036, + "learning_rate": 4.807198237423957e-06, + "loss": 0.5439, + "step": 7589 + }, + { + "epoch": 0.5270101374809054, + "grad_norm": 4.538520863946223, + "learning_rate": 4.80607459130833e-06, + "loss": 0.4865, + "step": 7590 + }, + { + "epoch": 0.5270795722816276, + "grad_norm": 3.178012338165663, + "learning_rate": 4.804950955001232e-06, + "loss": 0.2528, + "step": 7591 + }, + { + "epoch": 0.5271490070823497, + "grad_norm": 4.176085519729535, + "learning_rate": 4.803827328559494e-06, + "loss": 0.3465, + "step": 7592 + }, + { + "epoch": 0.5272184418830718, + "grad_norm": 3.376475931668156, + "learning_rate": 4.802703712039946e-06, + "loss": 0.5196, + "step": 7593 + }, + { + "epoch": 0.527287876683794, + "grad_norm": 3.8417306555503066, + "learning_rate": 4.801580105499422e-06, + "loss": 0.3458, + "step": 7594 + }, + { + "epoch": 0.527357311484516, + "grad_norm": 3.8340198618244594, + "learning_rate": 4.800456508994752e-06, + "loss": 0.4164, + "step": 7595 + }, + { + "epoch": 0.5274267462852381, + "grad_norm": 4.601995105973886, + "learning_rate": 4.799332922582767e-06, + "loss": 0.6385, + "step": 7596 + }, + { + "epoch": 0.5274961810859603, + "grad_norm": 4.629812581629832, + "learning_rate": 4.7982093463202935e-06, + "loss": 0.4247, + "step": 7597 + }, + { + "epoch": 0.5275656158866824, + "grad_norm": 4.193230211695929, + "learning_rate": 4.797085780264166e-06, + "loss": 0.3899, + "step": 7598 + }, + { + "epoch": 0.5276350506874046, + "grad_norm": 3.6981027121400376, + "learning_rate": 4.7959622244712075e-06, + "loss": 0.4659, + "step": 7599 + }, + { + "epoch": 0.5277044854881267, + "grad_norm": 5.287201246304581, + "learning_rate": 4.794838678998249e-06, + "loss": 0.9563, + "step": 7600 + }, + { + "epoch": 0.5277739202888487, + "grad_norm": 4.908797808238697, + "learning_rate": 4.793715143902119e-06, + "loss": 0.6463, + "step": 7601 + }, + { + "epoch": 0.5278433550895709, + "grad_norm": 3.0148535977925417, + "learning_rate": 4.792591619239644e-06, + "loss": 0.3651, + "step": 7602 + }, + { + "epoch": 0.527912789890293, + "grad_norm": 3.4765397181752475, + "learning_rate": 4.791468105067648e-06, + "loss": 0.5398, + "step": 7603 + }, + { + "epoch": 0.5279822246910152, + "grad_norm": 3.6370504942297273, + "learning_rate": 4.79034460144296e-06, + "loss": 0.4001, + "step": 7604 + }, + { + "epoch": 0.5280516594917373, + "grad_norm": 3.587560445602243, + "learning_rate": 4.789221108422407e-06, + "loss": 0.537, + "step": 7605 + }, + { + "epoch": 0.5281210942924593, + "grad_norm": 2.5616184767807617, + "learning_rate": 4.78809762606281e-06, + "loss": 0.2123, + "step": 7606 + }, + { + "epoch": 0.5281905290931815, + "grad_norm": 4.546211942773034, + "learning_rate": 4.786974154420994e-06, + "loss": 0.5091, + "step": 7607 + }, + { + "epoch": 0.5282599638939036, + "grad_norm": 3.4442057259691006, + "learning_rate": 4.785850693553786e-06, + "loss": 0.4746, + "step": 7608 + }, + { + "epoch": 0.5283293986946257, + "grad_norm": 4.490318720086395, + "learning_rate": 4.784727243518009e-06, + "loss": 0.4585, + "step": 7609 + }, + { + "epoch": 0.5283988334953479, + "grad_norm": 4.633471245654575, + "learning_rate": 4.783603804370481e-06, + "loss": 0.6022, + "step": 7610 + }, + { + "epoch": 0.52846826829607, + "grad_norm": 4.316620574197479, + "learning_rate": 4.78248037616803e-06, + "loss": 0.6819, + "step": 7611 + }, + { + "epoch": 0.5285377030967922, + "grad_norm": 4.17672287241586, + "learning_rate": 4.781356958967474e-06, + "loss": 0.7453, + "step": 7612 + }, + { + "epoch": 0.5286071378975142, + "grad_norm": 4.364221692216325, + "learning_rate": 4.780233552825638e-06, + "loss": 0.6576, + "step": 7613 + }, + { + "epoch": 0.5286765726982363, + "grad_norm": 3.184957002896794, + "learning_rate": 4.779110157799337e-06, + "loss": 0.4321, + "step": 7614 + }, + { + "epoch": 0.5287460074989585, + "grad_norm": 5.264073496133862, + "learning_rate": 4.777986773945396e-06, + "loss": 0.7561, + "step": 7615 + }, + { + "epoch": 0.5288154422996806, + "grad_norm": 6.599535217025938, + "learning_rate": 4.776863401320635e-06, + "loss": 0.8908, + "step": 7616 + }, + { + "epoch": 0.5288848771004028, + "grad_norm": 3.5496142758851956, + "learning_rate": 4.775740039981867e-06, + "loss": 0.4606, + "step": 7617 + }, + { + "epoch": 0.5289543119011249, + "grad_norm": 3.5300314840032723, + "learning_rate": 4.774616689985916e-06, + "loss": 0.382, + "step": 7618 + }, + { + "epoch": 0.5290237467018469, + "grad_norm": 3.6451272995713273, + "learning_rate": 4.773493351389596e-06, + "loss": 0.5546, + "step": 7619 + }, + { + "epoch": 0.5290931815025691, + "grad_norm": 4.482994235308365, + "learning_rate": 4.772370024249728e-06, + "loss": 0.7301, + "step": 7620 + }, + { + "epoch": 0.5291626163032912, + "grad_norm": 3.3104289437322407, + "learning_rate": 4.7712467086231255e-06, + "loss": 0.4218, + "step": 7621 + }, + { + "epoch": 0.5292320511040133, + "grad_norm": 4.323829170807633, + "learning_rate": 4.770123404566605e-06, + "loss": 0.6127, + "step": 7622 + }, + { + "epoch": 0.5293014859047355, + "grad_norm": 2.729079520417261, + "learning_rate": 4.769000112136985e-06, + "loss": 0.147, + "step": 7623 + }, + { + "epoch": 0.5293709207054575, + "grad_norm": 3.6893090363571086, + "learning_rate": 4.767876831391075e-06, + "loss": 0.3154, + "step": 7624 + }, + { + "epoch": 0.5294403555061797, + "grad_norm": 3.971447487848771, + "learning_rate": 4.7667535623856945e-06, + "loss": 0.3691, + "step": 7625 + }, + { + "epoch": 0.5295097903069018, + "grad_norm": 4.281544982294868, + "learning_rate": 4.765630305177652e-06, + "loss": 0.6053, + "step": 7626 + }, + { + "epoch": 0.5295792251076239, + "grad_norm": 3.81483841632714, + "learning_rate": 4.764507059823767e-06, + "loss": 0.4556, + "step": 7627 + }, + { + "epoch": 0.5296486599083461, + "grad_norm": 3.0557442397083245, + "learning_rate": 4.763383826380848e-06, + "loss": 0.3772, + "step": 7628 + }, + { + "epoch": 0.5297180947090682, + "grad_norm": 3.8671314498525673, + "learning_rate": 4.762260604905704e-06, + "loss": 0.5598, + "step": 7629 + }, + { + "epoch": 0.5297875295097904, + "grad_norm": 2.3940355617309, + "learning_rate": 4.761137395455153e-06, + "loss": 0.1093, + "step": 7630 + }, + { + "epoch": 0.5298569643105124, + "grad_norm": 4.148900277553757, + "learning_rate": 4.760014198086001e-06, + "loss": 0.4845, + "step": 7631 + }, + { + "epoch": 0.5299263991112345, + "grad_norm": 4.487238401018542, + "learning_rate": 4.758891012855059e-06, + "loss": 0.5781, + "step": 7632 + }, + { + "epoch": 0.5299958339119567, + "grad_norm": 3.6223210433942787, + "learning_rate": 4.757767839819137e-06, + "loss": 0.2489, + "step": 7633 + }, + { + "epoch": 0.5300652687126788, + "grad_norm": 3.279842795484512, + "learning_rate": 4.756644679035045e-06, + "loss": 0.4866, + "step": 7634 + }, + { + "epoch": 0.5301347035134009, + "grad_norm": 3.482369081542808, + "learning_rate": 4.755521530559587e-06, + "loss": 0.4453, + "step": 7635 + }, + { + "epoch": 0.530204138314123, + "grad_norm": 3.7935855730939383, + "learning_rate": 4.754398394449573e-06, + "loss": 0.6607, + "step": 7636 + }, + { + "epoch": 0.5302735731148451, + "grad_norm": 2.7895429220366323, + "learning_rate": 4.753275270761814e-06, + "loss": 0.2803, + "step": 7637 + }, + { + "epoch": 0.5303430079155673, + "grad_norm": 4.264072129495534, + "learning_rate": 4.752152159553111e-06, + "loss": 0.3608, + "step": 7638 + }, + { + "epoch": 0.5304124427162894, + "grad_norm": 3.4616402154084134, + "learning_rate": 4.7510290608802695e-06, + "loss": 0.4841, + "step": 7639 + }, + { + "epoch": 0.5304818775170115, + "grad_norm": 3.3924460325273893, + "learning_rate": 4.749905974800098e-06, + "loss": 0.3477, + "step": 7640 + }, + { + "epoch": 0.5305513123177337, + "grad_norm": 3.8219978207962946, + "learning_rate": 4.7487829013694e-06, + "loss": 0.4219, + "step": 7641 + }, + { + "epoch": 0.5306207471184557, + "grad_norm": 3.4408200776477345, + "learning_rate": 4.747659840644977e-06, + "loss": 0.2711, + "step": 7642 + }, + { + "epoch": 0.5306901819191779, + "grad_norm": 4.012072261374291, + "learning_rate": 4.746536792683635e-06, + "loss": 0.6345, + "step": 7643 + }, + { + "epoch": 0.5307596167199, + "grad_norm": 4.129265164399618, + "learning_rate": 4.745413757542173e-06, + "loss": 0.223, + "step": 7644 + }, + { + "epoch": 0.5308290515206221, + "grad_norm": 3.7250093689686774, + "learning_rate": 4.744290735277398e-06, + "loss": 0.1672, + "step": 7645 + }, + { + "epoch": 0.5308984863213443, + "grad_norm": 3.4691721261954522, + "learning_rate": 4.7431677259461065e-06, + "loss": 0.3186, + "step": 7646 + }, + { + "epoch": 0.5309679211220664, + "grad_norm": 2.9930076097453617, + "learning_rate": 4.7420447296051005e-06, + "loss": 0.3577, + "step": 7647 + }, + { + "epoch": 0.5310373559227886, + "grad_norm": 3.0937243809183848, + "learning_rate": 4.740921746311183e-06, + "loss": 0.2283, + "step": 7648 + }, + { + "epoch": 0.5311067907235106, + "grad_norm": 2.6757122504765234, + "learning_rate": 4.739798776121147e-06, + "loss": 0.3811, + "step": 7649 + }, + { + "epoch": 0.5311762255242327, + "grad_norm": 4.6182376935736995, + "learning_rate": 4.738675819091795e-06, + "loss": 0.3601, + "step": 7650 + }, + { + "epoch": 0.5312456603249549, + "grad_norm": 3.8754991444887446, + "learning_rate": 4.7375528752799245e-06, + "loss": 0.4221, + "step": 7651 + }, + { + "epoch": 0.531315095125677, + "grad_norm": 3.482607727905127, + "learning_rate": 4.736429944742335e-06, + "loss": 0.3851, + "step": 7652 + }, + { + "epoch": 0.5313845299263991, + "grad_norm": 3.5451539100047142, + "learning_rate": 4.735307027535819e-06, + "loss": 0.4364, + "step": 7653 + }, + { + "epoch": 0.5314539647271213, + "grad_norm": 3.0033916656801196, + "learning_rate": 4.734184123717172e-06, + "loss": 0.2232, + "step": 7654 + }, + { + "epoch": 0.5315233995278433, + "grad_norm": 3.8129059864485484, + "learning_rate": 4.733061233343195e-06, + "loss": 0.4561, + "step": 7655 + }, + { + "epoch": 0.5315928343285655, + "grad_norm": 4.463133125395323, + "learning_rate": 4.731938356470675e-06, + "loss": 0.5236, + "step": 7656 + }, + { + "epoch": 0.5316622691292876, + "grad_norm": 3.5203712411289576, + "learning_rate": 4.730815493156412e-06, + "loss": 0.3622, + "step": 7657 + }, + { + "epoch": 0.5317317039300097, + "grad_norm": 3.809319442148734, + "learning_rate": 4.729692643457196e-06, + "loss": 0.3419, + "step": 7658 + }, + { + "epoch": 0.5318011387307319, + "grad_norm": 3.1573776806496032, + "learning_rate": 4.728569807429821e-06, + "loss": 0.3349, + "step": 7659 + }, + { + "epoch": 0.531870573531454, + "grad_norm": 2.6179847221489996, + "learning_rate": 4.727446985131078e-06, + "loss": 0.1373, + "step": 7660 + }, + { + "epoch": 0.5319400083321761, + "grad_norm": 3.1789125892452685, + "learning_rate": 4.726324176617757e-06, + "loss": 0.3723, + "step": 7661 + }, + { + "epoch": 0.5320094431328982, + "grad_norm": 5.235128873493515, + "learning_rate": 4.72520138194665e-06, + "loss": 0.5147, + "step": 7662 + }, + { + "epoch": 0.5320788779336203, + "grad_norm": 4.3924821990763485, + "learning_rate": 4.724078601174546e-06, + "loss": 0.5038, + "step": 7663 + }, + { + "epoch": 0.5321483127343425, + "grad_norm": 3.3363166469608854, + "learning_rate": 4.722955834358234e-06, + "loss": 0.327, + "step": 7664 + }, + { + "epoch": 0.5322177475350646, + "grad_norm": 3.4566621635125263, + "learning_rate": 4.721833081554502e-06, + "loss": 0.5327, + "step": 7665 + }, + { + "epoch": 0.5322871823357866, + "grad_norm": 3.4473483043462965, + "learning_rate": 4.72071034282014e-06, + "loss": 0.2266, + "step": 7666 + }, + { + "epoch": 0.5323566171365088, + "grad_norm": 3.781971859630506, + "learning_rate": 4.719587618211931e-06, + "loss": 0.6296, + "step": 7667 + }, + { + "epoch": 0.5324260519372309, + "grad_norm": 3.6206056143832575, + "learning_rate": 4.718464907786662e-06, + "loss": 0.2911, + "step": 7668 + }, + { + "epoch": 0.5324954867379531, + "grad_norm": 3.7255293777568648, + "learning_rate": 4.717342211601121e-06, + "loss": 0.3603, + "step": 7669 + }, + { + "epoch": 0.5325649215386752, + "grad_norm": 4.319241156800124, + "learning_rate": 4.716219529712092e-06, + "loss": 0.4791, + "step": 7670 + }, + { + "epoch": 0.5326343563393973, + "grad_norm": 3.866847180484654, + "learning_rate": 4.715096862176356e-06, + "loss": 0.3733, + "step": 7671 + }, + { + "epoch": 0.5327037911401195, + "grad_norm": 4.7365965469779585, + "learning_rate": 4.713974209050699e-06, + "loss": 0.4856, + "step": 7672 + }, + { + "epoch": 0.5327732259408415, + "grad_norm": 2.869318549935514, + "learning_rate": 4.712851570391905e-06, + "loss": 0.1557, + "step": 7673 + }, + { + "epoch": 0.5328426607415637, + "grad_norm": 3.3944422745507588, + "learning_rate": 4.711728946256751e-06, + "loss": 0.3371, + "step": 7674 + }, + { + "epoch": 0.5329120955422858, + "grad_norm": 3.2953735733597553, + "learning_rate": 4.71060633670202e-06, + "loss": 0.2186, + "step": 7675 + }, + { + "epoch": 0.5329815303430079, + "grad_norm": 4.044717213004769, + "learning_rate": 4.7094837417844946e-06, + "loss": 0.6493, + "step": 7676 + }, + { + "epoch": 0.5330509651437301, + "grad_norm": 4.385837450981106, + "learning_rate": 4.708361161560954e-06, + "loss": 0.609, + "step": 7677 + }, + { + "epoch": 0.5331203999444522, + "grad_norm": 3.381586300161956, + "learning_rate": 4.707238596088173e-06, + "loss": 0.3276, + "step": 7678 + }, + { + "epoch": 0.5331898347451742, + "grad_norm": 3.327144351858488, + "learning_rate": 4.706116045422935e-06, + "loss": 0.4117, + "step": 7679 + }, + { + "epoch": 0.5332592695458964, + "grad_norm": 4.5291930560703735, + "learning_rate": 4.704993509622014e-06, + "loss": 0.3867, + "step": 7680 + }, + { + "epoch": 0.5333287043466185, + "grad_norm": 4.488588201939917, + "learning_rate": 4.703870988742187e-06, + "loss": 0.4146, + "step": 7681 + }, + { + "epoch": 0.5333981391473407, + "grad_norm": 3.849611191232792, + "learning_rate": 4.702748482840231e-06, + "loss": 0.3757, + "step": 7682 + }, + { + "epoch": 0.5334675739480628, + "grad_norm": 2.903008397490526, + "learning_rate": 4.701625991972919e-06, + "loss": 0.2998, + "step": 7683 + }, + { + "epoch": 0.5335370087487848, + "grad_norm": 2.5369271531814204, + "learning_rate": 4.700503516197029e-06, + "loss": 0.1545, + "step": 7684 + }, + { + "epoch": 0.533606443549507, + "grad_norm": 3.4001771146595092, + "learning_rate": 4.69938105556933e-06, + "loss": 0.212, + "step": 7685 + }, + { + "epoch": 0.5336758783502291, + "grad_norm": 3.30503283862454, + "learning_rate": 4.6982586101465985e-06, + "loss": 0.3887, + "step": 7686 + }, + { + "epoch": 0.5337453131509513, + "grad_norm": 2.230267232281318, + "learning_rate": 4.697136179985605e-06, + "loss": 0.2099, + "step": 7687 + }, + { + "epoch": 0.5338147479516734, + "grad_norm": 4.1876891419692575, + "learning_rate": 4.69601376514312e-06, + "loss": 0.5612, + "step": 7688 + }, + { + "epoch": 0.5338841827523955, + "grad_norm": 5.073850885955729, + "learning_rate": 4.694891365675915e-06, + "loss": 0.5868, + "step": 7689 + }, + { + "epoch": 0.5339536175531177, + "grad_norm": 4.44110986471076, + "learning_rate": 4.693768981640758e-06, + "loss": 0.5934, + "step": 7690 + }, + { + "epoch": 0.5340230523538397, + "grad_norm": 3.250650351202611, + "learning_rate": 4.692646613094422e-06, + "loss": 0.3002, + "step": 7691 + }, + { + "epoch": 0.5340924871545618, + "grad_norm": 3.9513613706719224, + "learning_rate": 4.691524260093672e-06, + "loss": 0.5048, + "step": 7692 + }, + { + "epoch": 0.534161921955284, + "grad_norm": 4.705778722531619, + "learning_rate": 4.690401922695274e-06, + "loss": 0.3941, + "step": 7693 + }, + { + "epoch": 0.5342313567560061, + "grad_norm": 3.8176042294210735, + "learning_rate": 4.689279600955998e-06, + "loss": 0.393, + "step": 7694 + }, + { + "epoch": 0.5343007915567283, + "grad_norm": 4.641355428099003, + "learning_rate": 4.688157294932608e-06, + "loss": 0.5573, + "step": 7695 + }, + { + "epoch": 0.5343702263574504, + "grad_norm": 3.4015256019912607, + "learning_rate": 4.6870350046818705e-06, + "loss": 0.4906, + "step": 7696 + }, + { + "epoch": 0.5344396611581724, + "grad_norm": 3.7177063890413615, + "learning_rate": 4.685912730260545e-06, + "loss": 0.6134, + "step": 7697 + }, + { + "epoch": 0.5345090959588946, + "grad_norm": 3.5773231876750087, + "learning_rate": 4.684790471725402e-06, + "loss": 0.346, + "step": 7698 + }, + { + "epoch": 0.5345785307596167, + "grad_norm": 3.0379603806792947, + "learning_rate": 4.683668229133198e-06, + "loss": 0.4674, + "step": 7699 + }, + { + "epoch": 0.5346479655603389, + "grad_norm": 4.032505367124036, + "learning_rate": 4.682546002540697e-06, + "loss": 0.5441, + "step": 7700 + }, + { + "epoch": 0.534717400361061, + "grad_norm": 3.527588768043789, + "learning_rate": 4.68142379200466e-06, + "loss": 0.5461, + "step": 7701 + }, + { + "epoch": 0.534786835161783, + "grad_norm": 3.5795773442064904, + "learning_rate": 4.680301597581849e-06, + "loss": 0.365, + "step": 7702 + }, + { + "epoch": 0.5348562699625052, + "grad_norm": 3.4524466502656987, + "learning_rate": 4.679179419329019e-06, + "loss": 0.4785, + "step": 7703 + }, + { + "epoch": 0.5349257047632273, + "grad_norm": 3.5518359435491127, + "learning_rate": 4.678057257302931e-06, + "loss": 0.4363, + "step": 7704 + }, + { + "epoch": 0.5349951395639495, + "grad_norm": 5.136777772980805, + "learning_rate": 4.6769351115603445e-06, + "loss": 0.4172, + "step": 7705 + }, + { + "epoch": 0.5350645743646716, + "grad_norm": 3.9765180566040943, + "learning_rate": 4.675812982158013e-06, + "loss": 0.5559, + "step": 7706 + }, + { + "epoch": 0.5351340091653937, + "grad_norm": 26.38968430614968, + "learning_rate": 4.674690869152694e-06, + "loss": 0.3006, + "step": 7707 + }, + { + "epoch": 0.5352034439661159, + "grad_norm": 2.838714000868335, + "learning_rate": 4.673568772601143e-06, + "loss": 0.2575, + "step": 7708 + }, + { + "epoch": 0.5352728787668379, + "grad_norm": 3.4687534111998404, + "learning_rate": 4.672446692560116e-06, + "loss": 0.4209, + "step": 7709 + }, + { + "epoch": 0.53534231356756, + "grad_norm": 3.7265551307033853, + "learning_rate": 4.671324629086362e-06, + "loss": 0.2799, + "step": 7710 + }, + { + "epoch": 0.5354117483682822, + "grad_norm": 3.305952283692755, + "learning_rate": 4.670202582236637e-06, + "loss": 0.3792, + "step": 7711 + }, + { + "epoch": 0.5354811831690043, + "grad_norm": 4.291653013827585, + "learning_rate": 4.669080552067694e-06, + "loss": 0.5356, + "step": 7712 + }, + { + "epoch": 0.5355506179697265, + "grad_norm": 3.9962102549505794, + "learning_rate": 4.6679585386362795e-06, + "loss": 0.5538, + "step": 7713 + }, + { + "epoch": 0.5356200527704486, + "grad_norm": 3.017980999955834, + "learning_rate": 4.666836541999148e-06, + "loss": 0.3825, + "step": 7714 + }, + { + "epoch": 0.5356894875711706, + "grad_norm": 5.094235537153492, + "learning_rate": 4.665714562213045e-06, + "loss": 0.7998, + "step": 7715 + }, + { + "epoch": 0.5357589223718928, + "grad_norm": 3.5668450671897385, + "learning_rate": 4.664592599334725e-06, + "loss": 0.4263, + "step": 7716 + }, + { + "epoch": 0.5358283571726149, + "grad_norm": 3.3347559110882745, + "learning_rate": 4.6634706534209284e-06, + "loss": 0.4169, + "step": 7717 + }, + { + "epoch": 0.5358977919733371, + "grad_norm": 3.5257045728119025, + "learning_rate": 4.662348724528407e-06, + "loss": 0.4106, + "step": 7718 + }, + { + "epoch": 0.5359672267740592, + "grad_norm": 4.4474454759232405, + "learning_rate": 4.661226812713903e-06, + "loss": 0.6376, + "step": 7719 + }, + { + "epoch": 0.5360366615747812, + "grad_norm": 3.2134895603726723, + "learning_rate": 4.660104918034167e-06, + "loss": 0.2785, + "step": 7720 + }, + { + "epoch": 0.5361060963755034, + "grad_norm": 3.523472452190626, + "learning_rate": 4.658983040545938e-06, + "loss": 0.2902, + "step": 7721 + }, + { + "epoch": 0.5361755311762255, + "grad_norm": 3.410977745571911, + "learning_rate": 4.6578611803059594e-06, + "loss": 0.3642, + "step": 7722 + }, + { + "epoch": 0.5362449659769476, + "grad_norm": 2.5124714232768786, + "learning_rate": 4.656739337370978e-06, + "loss": 0.2007, + "step": 7723 + }, + { + "epoch": 0.5363144007776698, + "grad_norm": 3.745830488325483, + "learning_rate": 4.655617511797731e-06, + "loss": 0.4836, + "step": 7724 + }, + { + "epoch": 0.5363838355783919, + "grad_norm": 4.295268882219355, + "learning_rate": 4.654495703642959e-06, + "loss": 0.6661, + "step": 7725 + }, + { + "epoch": 0.5364532703791141, + "grad_norm": 4.076473026278984, + "learning_rate": 4.653373912963404e-06, + "loss": 0.4766, + "step": 7726 + }, + { + "epoch": 0.5365227051798361, + "grad_norm": 4.340279129528889, + "learning_rate": 4.652252139815807e-06, + "loss": 0.4945, + "step": 7727 + }, + { + "epoch": 0.5365921399805582, + "grad_norm": 3.5886032831450483, + "learning_rate": 4.651130384256901e-06, + "loss": 0.3673, + "step": 7728 + }, + { + "epoch": 0.5366615747812804, + "grad_norm": 3.453286487693722, + "learning_rate": 4.650008646343425e-06, + "loss": 0.3705, + "step": 7729 + }, + { + "epoch": 0.5367310095820025, + "grad_norm": 3.7908337695937786, + "learning_rate": 4.6488869261321175e-06, + "loss": 0.4063, + "step": 7730 + }, + { + "epoch": 0.5368004443827247, + "grad_norm": 2.955141196344212, + "learning_rate": 4.647765223679711e-06, + "loss": 0.3931, + "step": 7731 + }, + { + "epoch": 0.5368698791834468, + "grad_norm": 3.258880556476825, + "learning_rate": 4.64664353904294e-06, + "loss": 0.3063, + "step": 7732 + }, + { + "epoch": 0.5369393139841688, + "grad_norm": 3.6619120641441336, + "learning_rate": 4.6455218722785395e-06, + "loss": 0.4918, + "step": 7733 + }, + { + "epoch": 0.537008748784891, + "grad_norm": 5.004413325584721, + "learning_rate": 4.644400223443243e-06, + "loss": 0.6756, + "step": 7734 + }, + { + "epoch": 0.5370781835856131, + "grad_norm": 3.5466729757842073, + "learning_rate": 4.643278592593778e-06, + "loss": 0.4134, + "step": 7735 + }, + { + "epoch": 0.5371476183863352, + "grad_norm": 4.159772931134172, + "learning_rate": 4.642156979786879e-06, + "loss": 0.505, + "step": 7736 + }, + { + "epoch": 0.5372170531870574, + "grad_norm": 3.0506058871701685, + "learning_rate": 4.641035385079277e-06, + "loss": 0.3657, + "step": 7737 + }, + { + "epoch": 0.5372864879877794, + "grad_norm": 4.004025663918991, + "learning_rate": 4.6399138085276985e-06, + "loss": 0.3494, + "step": 7738 + }, + { + "epoch": 0.5373559227885016, + "grad_norm": 4.100908851495084, + "learning_rate": 4.6387922501888706e-06, + "loss": 0.6365, + "step": 7739 + }, + { + "epoch": 0.5374253575892237, + "grad_norm": 4.2170936361681495, + "learning_rate": 4.637670710119523e-06, + "loss": 0.6643, + "step": 7740 + }, + { + "epoch": 0.5374947923899458, + "grad_norm": 3.028322461942456, + "learning_rate": 4.636549188376381e-06, + "loss": 0.416, + "step": 7741 + }, + { + "epoch": 0.537564227190668, + "grad_norm": 4.591543547514798, + "learning_rate": 4.635427685016169e-06, + "loss": 0.5728, + "step": 7742 + }, + { + "epoch": 0.5376336619913901, + "grad_norm": 3.696756224258373, + "learning_rate": 4.6343062000956125e-06, + "loss": 0.4103, + "step": 7743 + }, + { + "epoch": 0.5377030967921123, + "grad_norm": 2.980852100919446, + "learning_rate": 4.633184733671433e-06, + "loss": 0.2286, + "step": 7744 + }, + { + "epoch": 0.5377725315928343, + "grad_norm": 3.0010691153029545, + "learning_rate": 4.632063285800358e-06, + "loss": 0.2447, + "step": 7745 + }, + { + "epoch": 0.5378419663935564, + "grad_norm": 4.291870237707775, + "learning_rate": 4.630941856539103e-06, + "loss": 0.4616, + "step": 7746 + }, + { + "epoch": 0.5379114011942786, + "grad_norm": 3.1002540774051073, + "learning_rate": 4.629820445944392e-06, + "loss": 0.2843, + "step": 7747 + }, + { + "epoch": 0.5379808359950007, + "grad_norm": 3.9640776032418827, + "learning_rate": 4.628699054072945e-06, + "loss": 0.5418, + "step": 7748 + }, + { + "epoch": 0.5380502707957228, + "grad_norm": 4.119176862089531, + "learning_rate": 4.627577680981478e-06, + "loss": 0.6691, + "step": 7749 + }, + { + "epoch": 0.538119705596445, + "grad_norm": 3.4080735134052036, + "learning_rate": 4.626456326726712e-06, + "loss": 0.2981, + "step": 7750 + }, + { + "epoch": 0.538189140397167, + "grad_norm": 2.3168713393825735, + "learning_rate": 4.625334991365361e-06, + "loss": 0.1928, + "step": 7751 + }, + { + "epoch": 0.5382585751978892, + "grad_norm": 3.056506382606844, + "learning_rate": 4.624213674954144e-06, + "loss": 0.2702, + "step": 7752 + }, + { + "epoch": 0.5383280099986113, + "grad_norm": 2.8041572825177146, + "learning_rate": 4.623092377549772e-06, + "loss": 0.2992, + "step": 7753 + }, + { + "epoch": 0.5383974447993334, + "grad_norm": 4.057377334564871, + "learning_rate": 4.621971099208961e-06, + "loss": 0.6518, + "step": 7754 + }, + { + "epoch": 0.5384668796000556, + "grad_norm": 3.4762119089042693, + "learning_rate": 4.620849839988426e-06, + "loss": 0.3528, + "step": 7755 + }, + { + "epoch": 0.5385363144007776, + "grad_norm": 4.943733974828772, + "learning_rate": 4.6197285999448755e-06, + "loss": 0.6247, + "step": 7756 + }, + { + "epoch": 0.5386057492014998, + "grad_norm": 4.372872721685097, + "learning_rate": 4.618607379135023e-06, + "loss": 0.4667, + "step": 7757 + }, + { + "epoch": 0.5386751840022219, + "grad_norm": 4.231452392392652, + "learning_rate": 4.617486177615576e-06, + "loss": 0.5437, + "step": 7758 + }, + { + "epoch": 0.538744618802944, + "grad_norm": 5.030879270436095, + "learning_rate": 4.616364995443248e-06, + "loss": 0.327, + "step": 7759 + }, + { + "epoch": 0.5388140536036662, + "grad_norm": 4.232128076290483, + "learning_rate": 4.615243832674743e-06, + "loss": 0.4787, + "step": 7760 + }, + { + "epoch": 0.5388834884043883, + "grad_norm": 3.8312330615921155, + "learning_rate": 4.614122689366769e-06, + "loss": 0.5087, + "step": 7761 + }, + { + "epoch": 0.5389529232051105, + "grad_norm": 4.372523599526809, + "learning_rate": 4.613001565576034e-06, + "loss": 0.7023, + "step": 7762 + }, + { + "epoch": 0.5390223580058325, + "grad_norm": 2.4090663749630292, + "learning_rate": 4.611880461359241e-06, + "loss": 0.2228, + "step": 7763 + }, + { + "epoch": 0.5390917928065546, + "grad_norm": 3.1187785104429095, + "learning_rate": 4.610759376773095e-06, + "loss": 0.3343, + "step": 7764 + }, + { + "epoch": 0.5391612276072768, + "grad_norm": 3.9413439002826975, + "learning_rate": 4.6096383118742986e-06, + "loss": 0.5463, + "step": 7765 + }, + { + "epoch": 0.5392306624079989, + "grad_norm": 3.348581764306762, + "learning_rate": 4.608517266719556e-06, + "loss": 0.3457, + "step": 7766 + }, + { + "epoch": 0.539300097208721, + "grad_norm": 5.236299105298392, + "learning_rate": 4.607396241365567e-06, + "loss": 0.6456, + "step": 7767 + }, + { + "epoch": 0.5393695320094432, + "grad_norm": 4.852551301895995, + "learning_rate": 4.606275235869031e-06, + "loss": 0.5471, + "step": 7768 + }, + { + "epoch": 0.5394389668101652, + "grad_norm": 3.63275820650788, + "learning_rate": 4.605154250286648e-06, + "loss": 0.4642, + "step": 7769 + }, + { + "epoch": 0.5395084016108874, + "grad_norm": 4.466145826048087, + "learning_rate": 4.604033284675118e-06, + "loss": 0.5292, + "step": 7770 + }, + { + "epoch": 0.5395778364116095, + "grad_norm": 3.8139071239048175, + "learning_rate": 4.602912339091134e-06, + "loss": 0.4908, + "step": 7771 + }, + { + "epoch": 0.5396472712123316, + "grad_norm": 4.4039152789258855, + "learning_rate": 4.601791413591396e-06, + "loss": 0.3488, + "step": 7772 + }, + { + "epoch": 0.5397167060130538, + "grad_norm": 3.0673191210392856, + "learning_rate": 4.600670508232599e-06, + "loss": 0.3073, + "step": 7773 + }, + { + "epoch": 0.5397861408137758, + "grad_norm": 3.359753617284215, + "learning_rate": 4.599549623071433e-06, + "loss": 0.31, + "step": 7774 + }, + { + "epoch": 0.539855575614498, + "grad_norm": 4.390795619418485, + "learning_rate": 4.598428758164594e-06, + "loss": 0.605, + "step": 7775 + }, + { + "epoch": 0.5399250104152201, + "grad_norm": 3.511963318432062, + "learning_rate": 4.597307913568775e-06, + "loss": 0.3442, + "step": 7776 + }, + { + "epoch": 0.5399944452159422, + "grad_norm": 4.0537972734714876, + "learning_rate": 4.5961870893406675e-06, + "loss": 0.5945, + "step": 7777 + }, + { + "epoch": 0.5400638800166644, + "grad_norm": 3.5042964140299055, + "learning_rate": 4.595066285536958e-06, + "loss": 0.4823, + "step": 7778 + }, + { + "epoch": 0.5401333148173865, + "grad_norm": 3.804070367279913, + "learning_rate": 4.593945502214339e-06, + "loss": 0.4052, + "step": 7779 + }, + { + "epoch": 0.5402027496181085, + "grad_norm": 2.877467191129682, + "learning_rate": 4.592824739429497e-06, + "loss": 0.2685, + "step": 7780 + }, + { + "epoch": 0.5402721844188307, + "grad_norm": 4.161668333031738, + "learning_rate": 4.591703997239118e-06, + "loss": 0.4799, + "step": 7781 + }, + { + "epoch": 0.5403416192195528, + "grad_norm": 3.993144969802978, + "learning_rate": 4.590583275699888e-06, + "loss": 0.5943, + "step": 7782 + }, + { + "epoch": 0.540411054020275, + "grad_norm": 3.3958560165694087, + "learning_rate": 4.589462574868493e-06, + "loss": 0.4757, + "step": 7783 + }, + { + "epoch": 0.5404804888209971, + "grad_norm": 2.999212949651576, + "learning_rate": 4.588341894801619e-06, + "loss": 0.2654, + "step": 7784 + }, + { + "epoch": 0.5405499236217192, + "grad_norm": 3.8120488966796438, + "learning_rate": 4.587221235555942e-06, + "loss": 0.6412, + "step": 7785 + }, + { + "epoch": 0.5406193584224414, + "grad_norm": 3.992937347172324, + "learning_rate": 4.586100597188151e-06, + "loss": 0.4797, + "step": 7786 + }, + { + "epoch": 0.5406887932231634, + "grad_norm": 4.580935596231863, + "learning_rate": 4.584979979754924e-06, + "loss": 0.5467, + "step": 7787 + }, + { + "epoch": 0.5407582280238856, + "grad_norm": 3.4949970486162525, + "learning_rate": 4.583859383312937e-06, + "loss": 0.4276, + "step": 7788 + }, + { + "epoch": 0.5408276628246077, + "grad_norm": 3.5389538263131177, + "learning_rate": 4.582738807918873e-06, + "loss": 0.3806, + "step": 7789 + }, + { + "epoch": 0.5408970976253298, + "grad_norm": 4.785179912872509, + "learning_rate": 4.581618253629408e-06, + "loss": 0.5611, + "step": 7790 + }, + { + "epoch": 0.540966532426052, + "grad_norm": 2.647674476657264, + "learning_rate": 4.580497720501219e-06, + "loss": 0.2475, + "step": 7791 + }, + { + "epoch": 0.541035967226774, + "grad_norm": 4.569117088219584, + "learning_rate": 4.57937720859098e-06, + "loss": 0.5439, + "step": 7792 + }, + { + "epoch": 0.5411054020274961, + "grad_norm": 3.95336936303858, + "learning_rate": 4.5782567179553645e-06, + "loss": 0.4036, + "step": 7793 + }, + { + "epoch": 0.5411748368282183, + "grad_norm": 4.391317913258363, + "learning_rate": 4.5771362486510464e-06, + "loss": 0.5631, + "step": 7794 + }, + { + "epoch": 0.5412442716289404, + "grad_norm": 4.082782999119234, + "learning_rate": 4.5760158007347024e-06, + "loss": 0.435, + "step": 7795 + }, + { + "epoch": 0.5413137064296626, + "grad_norm": 3.4416321255622924, + "learning_rate": 4.5748953742629965e-06, + "loss": 0.3792, + "step": 7796 + }, + { + "epoch": 0.5413831412303847, + "grad_norm": 3.9509235767153603, + "learning_rate": 4.573774969292601e-06, + "loss": 0.4394, + "step": 7797 + }, + { + "epoch": 0.5414525760311067, + "grad_norm": 4.059817996063551, + "learning_rate": 4.572654585880188e-06, + "loss": 0.5338, + "step": 7798 + }, + { + "epoch": 0.5415220108318289, + "grad_norm": 2.851290477009252, + "learning_rate": 4.571534224082421e-06, + "loss": 0.207, + "step": 7799 + }, + { + "epoch": 0.541591445632551, + "grad_norm": 3.9023556018040484, + "learning_rate": 4.570413883955967e-06, + "loss": 0.4765, + "step": 7800 + }, + { + "epoch": 0.5416608804332732, + "grad_norm": 2.877499804618252, + "learning_rate": 4.569293565557494e-06, + "loss": 0.3124, + "step": 7801 + }, + { + "epoch": 0.5417303152339953, + "grad_norm": 2.6779613446884825, + "learning_rate": 4.568173268943666e-06, + "loss": 0.2407, + "step": 7802 + }, + { + "epoch": 0.5417997500347174, + "grad_norm": 3.944465727418278, + "learning_rate": 4.567052994171142e-06, + "loss": 0.4367, + "step": 7803 + }, + { + "epoch": 0.5418691848354396, + "grad_norm": 3.8489436943239403, + "learning_rate": 4.56593274129659e-06, + "loss": 0.3924, + "step": 7804 + }, + { + "epoch": 0.5419386196361616, + "grad_norm": 5.346934484377518, + "learning_rate": 4.56481251037667e-06, + "loss": 0.7692, + "step": 7805 + }, + { + "epoch": 0.5420080544368837, + "grad_norm": 3.741945537500703, + "learning_rate": 4.563692301468038e-06, + "loss": 0.4884, + "step": 7806 + }, + { + "epoch": 0.5420774892376059, + "grad_norm": 3.343131613175298, + "learning_rate": 4.562572114627356e-06, + "loss": 0.2434, + "step": 7807 + }, + { + "epoch": 0.542146924038328, + "grad_norm": 4.323468480977452, + "learning_rate": 4.561451949911283e-06, + "loss": 0.614, + "step": 7808 + }, + { + "epoch": 0.5422163588390502, + "grad_norm": 4.175738385090236, + "learning_rate": 4.560331807376475e-06, + "loss": 0.31, + "step": 7809 + }, + { + "epoch": 0.5422857936397723, + "grad_norm": 4.091246703256656, + "learning_rate": 4.559211687079584e-06, + "loss": 0.4352, + "step": 7810 + }, + { + "epoch": 0.5423552284404943, + "grad_norm": 4.560568703125746, + "learning_rate": 4.558091589077268e-06, + "loss": 0.3646, + "step": 7811 + }, + { + "epoch": 0.5424246632412165, + "grad_norm": 3.578650024481626, + "learning_rate": 4.556971513426181e-06, + "loss": 0.298, + "step": 7812 + }, + { + "epoch": 0.5424940980419386, + "grad_norm": 4.787485870006128, + "learning_rate": 4.555851460182971e-06, + "loss": 0.6568, + "step": 7813 + }, + { + "epoch": 0.5425635328426608, + "grad_norm": 3.309158398136236, + "learning_rate": 4.554731429404293e-06, + "loss": 0.4776, + "step": 7814 + }, + { + "epoch": 0.5426329676433829, + "grad_norm": 4.460722245154039, + "learning_rate": 4.553611421146794e-06, + "loss": 0.5775, + "step": 7815 + }, + { + "epoch": 0.542702402444105, + "grad_norm": 3.9798967845419244, + "learning_rate": 4.552491435467126e-06, + "loss": 0.4332, + "step": 7816 + }, + { + "epoch": 0.5427718372448271, + "grad_norm": 6.24690165336592, + "learning_rate": 4.551371472421933e-06, + "loss": 0.5835, + "step": 7817 + }, + { + "epoch": 0.5428412720455492, + "grad_norm": 3.821024194378232, + "learning_rate": 4.550251532067865e-06, + "loss": 0.3577, + "step": 7818 + }, + { + "epoch": 0.5429107068462713, + "grad_norm": 3.2908906248024246, + "learning_rate": 4.549131614461564e-06, + "loss": 0.3094, + "step": 7819 + }, + { + "epoch": 0.5429801416469935, + "grad_norm": 4.509235617357524, + "learning_rate": 4.548011719659679e-06, + "loss": 0.3551, + "step": 7820 + }, + { + "epoch": 0.5430495764477156, + "grad_norm": 3.8538095685754064, + "learning_rate": 4.5468918477188474e-06, + "loss": 0.347, + "step": 7821 + }, + { + "epoch": 0.5431190112484378, + "grad_norm": 3.5227639195684954, + "learning_rate": 4.545771998695712e-06, + "loss": 0.4202, + "step": 7822 + }, + { + "epoch": 0.5431884460491598, + "grad_norm": 3.6155845215261824, + "learning_rate": 4.5446521726469185e-06, + "loss": 0.4865, + "step": 7823 + }, + { + "epoch": 0.5432578808498819, + "grad_norm": 3.6685302920354173, + "learning_rate": 4.5435323696291005e-06, + "loss": 0.4639, + "step": 7824 + }, + { + "epoch": 0.5433273156506041, + "grad_norm": 3.658261813317143, + "learning_rate": 4.542412589698898e-06, + "loss": 0.5128, + "step": 7825 + }, + { + "epoch": 0.5433967504513262, + "grad_norm": 4.296781719352591, + "learning_rate": 4.541292832912949e-06, + "loss": 0.4389, + "step": 7826 + }, + { + "epoch": 0.5434661852520484, + "grad_norm": 4.627301915242828, + "learning_rate": 4.540173099327892e-06, + "loss": 0.4105, + "step": 7827 + }, + { + "epoch": 0.5435356200527705, + "grad_norm": 4.397616599142978, + "learning_rate": 4.539053389000357e-06, + "loss": 0.8035, + "step": 7828 + }, + { + "epoch": 0.5436050548534925, + "grad_norm": 3.7429011610219924, + "learning_rate": 4.53793370198698e-06, + "loss": 0.4082, + "step": 7829 + }, + { + "epoch": 0.5436744896542147, + "grad_norm": 3.7622088692591458, + "learning_rate": 4.536814038344396e-06, + "loss": 0.5232, + "step": 7830 + }, + { + "epoch": 0.5437439244549368, + "grad_norm": 2.565176565544614, + "learning_rate": 4.535694398129232e-06, + "loss": 0.1621, + "step": 7831 + }, + { + "epoch": 0.543813359255659, + "grad_norm": 4.699676542639202, + "learning_rate": 4.534574781398119e-06, + "loss": 0.7175, + "step": 7832 + }, + { + "epoch": 0.5438827940563811, + "grad_norm": 3.857790470826273, + "learning_rate": 4.533455188207688e-06, + "loss": 0.4953, + "step": 7833 + }, + { + "epoch": 0.5439522288571031, + "grad_norm": 4.108780476502911, + "learning_rate": 4.5323356186145665e-06, + "loss": 0.4502, + "step": 7834 + }, + { + "epoch": 0.5440216636578253, + "grad_norm": 3.902921243273005, + "learning_rate": 4.531216072675378e-06, + "loss": 0.5658, + "step": 7835 + }, + { + "epoch": 0.5440910984585474, + "grad_norm": 4.072623988526568, + "learning_rate": 4.53009655044675e-06, + "loss": 0.4544, + "step": 7836 + }, + { + "epoch": 0.5441605332592695, + "grad_norm": 4.973514606390276, + "learning_rate": 4.5289770519853094e-06, + "loss": 0.628, + "step": 7837 + }, + { + "epoch": 0.5442299680599917, + "grad_norm": 3.2988403859413453, + "learning_rate": 4.527857577347675e-06, + "loss": 0.4276, + "step": 7838 + }, + { + "epoch": 0.5442994028607138, + "grad_norm": 3.531401392772517, + "learning_rate": 4.526738126590469e-06, + "loss": 0.4412, + "step": 7839 + }, + { + "epoch": 0.544368837661436, + "grad_norm": 4.188836507060861, + "learning_rate": 4.525618699770314e-06, + "loss": 0.5406, + "step": 7840 + }, + { + "epoch": 0.544438272462158, + "grad_norm": 2.9878048815041467, + "learning_rate": 4.524499296943831e-06, + "loss": 0.3095, + "step": 7841 + }, + { + "epoch": 0.5445077072628801, + "grad_norm": 3.1216665902706184, + "learning_rate": 4.523379918167632e-06, + "loss": 0.2296, + "step": 7842 + }, + { + "epoch": 0.5445771420636023, + "grad_norm": 3.823838070691442, + "learning_rate": 4.522260563498338e-06, + "loss": 0.4619, + "step": 7843 + }, + { + "epoch": 0.5446465768643244, + "grad_norm": 3.6633734361941426, + "learning_rate": 4.521141232992566e-06, + "loss": 0.4079, + "step": 7844 + }, + { + "epoch": 0.5447160116650466, + "grad_norm": 3.237974871365177, + "learning_rate": 4.520021926706926e-06, + "loss": 0.3856, + "step": 7845 + }, + { + "epoch": 0.5447854464657687, + "grad_norm": 4.531569033342239, + "learning_rate": 4.518902644698035e-06, + "loss": 0.7278, + "step": 7846 + }, + { + "epoch": 0.5448548812664907, + "grad_norm": 3.3474516602436326, + "learning_rate": 4.517783387022503e-06, + "loss": 0.2808, + "step": 7847 + }, + { + "epoch": 0.5449243160672129, + "grad_norm": 3.841947251692044, + "learning_rate": 4.516664153736943e-06, + "loss": 0.4473, + "step": 7848 + }, + { + "epoch": 0.544993750867935, + "grad_norm": 5.024935846692401, + "learning_rate": 4.515544944897962e-06, + "loss": 0.7742, + "step": 7849 + }, + { + "epoch": 0.5450631856686571, + "grad_norm": 2.451779319537501, + "learning_rate": 4.5144257605621705e-06, + "loss": 0.169, + "step": 7850 + }, + { + "epoch": 0.5451326204693793, + "grad_norm": 3.2673103920403133, + "learning_rate": 4.5133066007861735e-06, + "loss": 0.3717, + "step": 7851 + }, + { + "epoch": 0.5452020552701013, + "grad_norm": 2.865751525649564, + "learning_rate": 4.512187465626579e-06, + "loss": 0.2934, + "step": 7852 + }, + { + "epoch": 0.5452714900708235, + "grad_norm": 2.436368662225953, + "learning_rate": 4.511068355139991e-06, + "loss": 0.1529, + "step": 7853 + }, + { + "epoch": 0.5453409248715456, + "grad_norm": 4.112247132082978, + "learning_rate": 4.50994926938301e-06, + "loss": 0.4986, + "step": 7854 + }, + { + "epoch": 0.5454103596722677, + "grad_norm": 2.93869574583043, + "learning_rate": 4.5088302084122436e-06, + "loss": 0.3858, + "step": 7855 + }, + { + "epoch": 0.5454797944729899, + "grad_norm": 3.3623534927603385, + "learning_rate": 4.507711172284286e-06, + "loss": 0.4445, + "step": 7856 + }, + { + "epoch": 0.545549229273712, + "grad_norm": 4.335016490223838, + "learning_rate": 4.506592161055742e-06, + "loss": 0.4385, + "step": 7857 + }, + { + "epoch": 0.5456186640744342, + "grad_norm": 3.496126976370751, + "learning_rate": 4.505473174783207e-06, + "loss": 0.5206, + "step": 7858 + }, + { + "epoch": 0.5456880988751562, + "grad_norm": 3.993937390005536, + "learning_rate": 4.504354213523282e-06, + "loss": 0.2683, + "step": 7859 + }, + { + "epoch": 0.5457575336758783, + "grad_norm": 3.9507671069411776, + "learning_rate": 4.503235277332558e-06, + "loss": 0.3484, + "step": 7860 + }, + { + "epoch": 0.5458269684766005, + "grad_norm": 3.2029148736822406, + "learning_rate": 4.502116366267631e-06, + "loss": 0.3091, + "step": 7861 + }, + { + "epoch": 0.5458964032773226, + "grad_norm": 4.583294644459198, + "learning_rate": 4.500997480385097e-06, + "loss": 0.6528, + "step": 7862 + }, + { + "epoch": 0.5459658380780447, + "grad_norm": 3.1356845502109882, + "learning_rate": 4.499878619741545e-06, + "loss": 0.237, + "step": 7863 + }, + { + "epoch": 0.5460352728787669, + "grad_norm": 3.5063987998937542, + "learning_rate": 4.498759784393564e-06, + "loss": 0.241, + "step": 7864 + }, + { + "epoch": 0.5461047076794889, + "grad_norm": 3.4683401464472445, + "learning_rate": 4.497640974397747e-06, + "loss": 0.3361, + "step": 7865 + }, + { + "epoch": 0.5461741424802111, + "grad_norm": 3.557318186266412, + "learning_rate": 4.496522189810683e-06, + "loss": 0.454, + "step": 7866 + }, + { + "epoch": 0.5462435772809332, + "grad_norm": 3.8530556362666597, + "learning_rate": 4.495403430688956e-06, + "loss": 0.5746, + "step": 7867 + }, + { + "epoch": 0.5463130120816553, + "grad_norm": 5.0421371621566955, + "learning_rate": 4.494284697089152e-06, + "loss": 0.9102, + "step": 7868 + }, + { + "epoch": 0.5463824468823775, + "grad_norm": 4.0578908552130795, + "learning_rate": 4.493165989067857e-06, + "loss": 0.4863, + "step": 7869 + }, + { + "epoch": 0.5464518816830995, + "grad_norm": 3.4959425216219624, + "learning_rate": 4.492047306681651e-06, + "loss": 0.4289, + "step": 7870 + }, + { + "epoch": 0.5465213164838217, + "grad_norm": 3.7829928586282735, + "learning_rate": 4.490928649987118e-06, + "loss": 0.4026, + "step": 7871 + }, + { + "epoch": 0.5465907512845438, + "grad_norm": 4.177019234742986, + "learning_rate": 4.489810019040839e-06, + "loss": 0.6519, + "step": 7872 + }, + { + "epoch": 0.5466601860852659, + "grad_norm": 5.38908905030776, + "learning_rate": 4.4886914138993914e-06, + "loss": 0.818, + "step": 7873 + }, + { + "epoch": 0.5467296208859881, + "grad_norm": 2.7556952788455993, + "learning_rate": 4.4875728346193526e-06, + "loss": 0.4404, + "step": 7874 + }, + { + "epoch": 0.5467990556867102, + "grad_norm": 4.9663409906876, + "learning_rate": 4.4864542812573e-06, + "loss": 0.5904, + "step": 7875 + }, + { + "epoch": 0.5468684904874322, + "grad_norm": 4.14203694200325, + "learning_rate": 4.4853357538698085e-06, + "loss": 0.4948, + "step": 7876 + }, + { + "epoch": 0.5469379252881544, + "grad_norm": 5.130982168646041, + "learning_rate": 4.484217252513454e-06, + "loss": 0.9528, + "step": 7877 + }, + { + "epoch": 0.5470073600888765, + "grad_norm": 5.258988293292949, + "learning_rate": 4.483098777244806e-06, + "loss": 0.6053, + "step": 7878 + }, + { + "epoch": 0.5470767948895987, + "grad_norm": 4.486548574093626, + "learning_rate": 4.4819803281204365e-06, + "loss": 0.5033, + "step": 7879 + }, + { + "epoch": 0.5471462296903208, + "grad_norm": 4.433264916177619, + "learning_rate": 4.480861905196917e-06, + "loss": 0.5708, + "step": 7880 + }, + { + "epoch": 0.5472156644910429, + "grad_norm": 6.16111650136586, + "learning_rate": 4.479743508530814e-06, + "loss": 0.58, + "step": 7881 + }, + { + "epoch": 0.547285099291765, + "grad_norm": 3.4379582286320725, + "learning_rate": 4.478625138178696e-06, + "loss": 0.4042, + "step": 7882 + }, + { + "epoch": 0.5473545340924871, + "grad_norm": 4.298553254120449, + "learning_rate": 4.477506794197127e-06, + "loss": 0.4323, + "step": 7883 + }, + { + "epoch": 0.5474239688932093, + "grad_norm": 3.4021884041417505, + "learning_rate": 4.476388476642676e-06, + "loss": 0.3333, + "step": 7884 + }, + { + "epoch": 0.5474934036939314, + "grad_norm": 3.615758129567429, + "learning_rate": 4.475270185571902e-06, + "loss": 0.4725, + "step": 7885 + }, + { + "epoch": 0.5475628384946535, + "grad_norm": 3.706068996605706, + "learning_rate": 4.474151921041367e-06, + "loss": 0.3636, + "step": 7886 + }, + { + "epoch": 0.5476322732953757, + "grad_norm": 3.594665121381344, + "learning_rate": 4.4730336831076355e-06, + "loss": 0.4392, + "step": 7887 + }, + { + "epoch": 0.5477017080960978, + "grad_norm": 4.651491386235978, + "learning_rate": 4.471915471827262e-06, + "loss": 0.5858, + "step": 7888 + }, + { + "epoch": 0.5477711428968199, + "grad_norm": 3.869906140473688, + "learning_rate": 4.4707972872568065e-06, + "loss": 0.439, + "step": 7889 + }, + { + "epoch": 0.547840577697542, + "grad_norm": 4.14370410379573, + "learning_rate": 4.469679129452824e-06, + "loss": 0.3423, + "step": 7890 + }, + { + "epoch": 0.5479100124982641, + "grad_norm": 3.593538761836902, + "learning_rate": 4.4685609984718745e-06, + "loss": 0.285, + "step": 7891 + }, + { + "epoch": 0.5479794472989863, + "grad_norm": 3.8856291822531186, + "learning_rate": 4.467442894370506e-06, + "loss": 0.2935, + "step": 7892 + }, + { + "epoch": 0.5480488820997084, + "grad_norm": 4.006517617151857, + "learning_rate": 4.466324817205274e-06, + "loss": 0.5322, + "step": 7893 + }, + { + "epoch": 0.5481183169004304, + "grad_norm": 3.806870167661226, + "learning_rate": 4.465206767032729e-06, + "loss": 0.4075, + "step": 7894 + }, + { + "epoch": 0.5481877517011526, + "grad_norm": 4.125140631790992, + "learning_rate": 4.464088743909421e-06, + "loss": 0.4935, + "step": 7895 + }, + { + "epoch": 0.5482571865018747, + "grad_norm": 4.290221451239905, + "learning_rate": 4.462970747891896e-06, + "loss": 0.6986, + "step": 7896 + }, + { + "epoch": 0.5483266213025969, + "grad_norm": 4.3913599570855375, + "learning_rate": 4.461852779036703e-06, + "loss": 0.5693, + "step": 7897 + }, + { + "epoch": 0.548396056103319, + "grad_norm": 3.7434284310815085, + "learning_rate": 4.46073483740039e-06, + "loss": 0.5228, + "step": 7898 + }, + { + "epoch": 0.5484654909040411, + "grad_norm": 3.2362813759181686, + "learning_rate": 4.459616923039497e-06, + "loss": 0.2786, + "step": 7899 + }, + { + "epoch": 0.5485349257047633, + "grad_norm": 4.258901384452667, + "learning_rate": 4.458499036010568e-06, + "loss": 0.4919, + "step": 7900 + }, + { + "epoch": 0.5486043605054853, + "grad_norm": 3.968102879658112, + "learning_rate": 4.457381176370146e-06, + "loss": 0.3607, + "step": 7901 + }, + { + "epoch": 0.5486737953062075, + "grad_norm": 4.148065141826787, + "learning_rate": 4.456263344174771e-06, + "loss": 0.4299, + "step": 7902 + }, + { + "epoch": 0.5487432301069296, + "grad_norm": 3.4507670866569047, + "learning_rate": 4.455145539480978e-06, + "loss": 0.2755, + "step": 7903 + }, + { + "epoch": 0.5488126649076517, + "grad_norm": 3.1032518910106757, + "learning_rate": 4.45402776234531e-06, + "loss": 0.2928, + "step": 7904 + }, + { + "epoch": 0.5488820997083739, + "grad_norm": 4.1950812626960925, + "learning_rate": 4.452910012824299e-06, + "loss": 0.5158, + "step": 7905 + }, + { + "epoch": 0.548951534509096, + "grad_norm": 3.9567106677171178, + "learning_rate": 4.451792290974479e-06, + "loss": 0.5323, + "step": 7906 + }, + { + "epoch": 0.549020969309818, + "grad_norm": 4.305575843093653, + "learning_rate": 4.4506745968523855e-06, + "loss": 0.367, + "step": 7907 + }, + { + "epoch": 0.5490904041105402, + "grad_norm": 5.131052091892561, + "learning_rate": 4.449556930514549e-06, + "loss": 0.3009, + "step": 7908 + }, + { + "epoch": 0.5491598389112623, + "grad_norm": 4.096069285898798, + "learning_rate": 4.448439292017502e-06, + "loss": 0.5735, + "step": 7909 + }, + { + "epoch": 0.5492292737119845, + "grad_norm": 4.430343650529863, + "learning_rate": 4.4473216814177685e-06, + "loss": 0.4035, + "step": 7910 + }, + { + "epoch": 0.5492987085127066, + "grad_norm": 3.0240317406961656, + "learning_rate": 4.446204098771881e-06, + "loss": 0.2514, + "step": 7911 + }, + { + "epoch": 0.5493681433134286, + "grad_norm": 3.8256352346244302, + "learning_rate": 4.445086544136364e-06, + "loss": 0.5403, + "step": 7912 + }, + { + "epoch": 0.5494375781141508, + "grad_norm": 5.794244850334644, + "learning_rate": 4.443969017567741e-06, + "loss": 0.7466, + "step": 7913 + }, + { + "epoch": 0.5495070129148729, + "grad_norm": 5.474179751459487, + "learning_rate": 4.4428515191225354e-06, + "loss": 0.7446, + "step": 7914 + }, + { + "epoch": 0.5495764477155951, + "grad_norm": 3.9035724592946592, + "learning_rate": 4.4417340488572685e-06, + "loss": 0.4756, + "step": 7915 + }, + { + "epoch": 0.5496458825163172, + "grad_norm": 2.415844427581755, + "learning_rate": 4.440616606828464e-06, + "loss": 0.2236, + "step": 7916 + }, + { + "epoch": 0.5497153173170393, + "grad_norm": 3.5292265895432378, + "learning_rate": 4.439499193092637e-06, + "loss": 0.4957, + "step": 7917 + }, + { + "epoch": 0.5497847521177615, + "grad_norm": 2.855162428387412, + "learning_rate": 4.438381807706306e-06, + "loss": 0.3292, + "step": 7918 + }, + { + "epoch": 0.5498541869184835, + "grad_norm": 3.7998163312577633, + "learning_rate": 4.4372644507259905e-06, + "loss": 0.3757, + "step": 7919 + }, + { + "epoch": 0.5499236217192056, + "grad_norm": 3.482455272048503, + "learning_rate": 4.4361471222082e-06, + "loss": 0.5242, + "step": 7920 + }, + { + "epoch": 0.5499930565199278, + "grad_norm": 4.41984722393816, + "learning_rate": 4.43502982220945e-06, + "loss": 0.4838, + "step": 7921 + }, + { + "epoch": 0.5500624913206499, + "grad_norm": 5.535387869736434, + "learning_rate": 4.4339125507862516e-06, + "loss": 0.5463, + "step": 7922 + }, + { + "epoch": 0.5501319261213721, + "grad_norm": 3.817464795803458, + "learning_rate": 4.432795307995118e-06, + "loss": 0.368, + "step": 7923 + }, + { + "epoch": 0.5502013609220942, + "grad_norm": 3.676068914334841, + "learning_rate": 4.431678093892555e-06, + "loss": 0.3689, + "step": 7924 + }, + { + "epoch": 0.5502707957228162, + "grad_norm": 3.019774095957323, + "learning_rate": 4.43056090853507e-06, + "loss": 0.3959, + "step": 7925 + }, + { + "epoch": 0.5503402305235384, + "grad_norm": 3.181964786539587, + "learning_rate": 4.42944375197917e-06, + "loss": 0.3957, + "step": 7926 + }, + { + "epoch": 0.5504096653242605, + "grad_norm": 3.6707778627188437, + "learning_rate": 4.428326624281362e-06, + "loss": 0.3619, + "step": 7927 + }, + { + "epoch": 0.5504791001249827, + "grad_norm": 3.5896921107083184, + "learning_rate": 4.427209525498144e-06, + "loss": 0.3856, + "step": 7928 + }, + { + "epoch": 0.5505485349257048, + "grad_norm": 4.395115483168356, + "learning_rate": 4.42609245568602e-06, + "loss": 0.6444, + "step": 7929 + }, + { + "epoch": 0.5506179697264268, + "grad_norm": 5.509488388574817, + "learning_rate": 4.424975414901492e-06, + "loss": 0.8689, + "step": 7930 + }, + { + "epoch": 0.550687404527149, + "grad_norm": 3.149073901880616, + "learning_rate": 4.423858403201055e-06, + "loss": 0.3253, + "step": 7931 + }, + { + "epoch": 0.5507568393278711, + "grad_norm": 3.6854112896197857, + "learning_rate": 4.422741420641208e-06, + "loss": 0.3073, + "step": 7932 + }, + { + "epoch": 0.5508262741285932, + "grad_norm": 4.840887949330318, + "learning_rate": 4.421624467278447e-06, + "loss": 0.5498, + "step": 7933 + }, + { + "epoch": 0.5508957089293154, + "grad_norm": 2.5902444660353283, + "learning_rate": 4.420507543169268e-06, + "loss": 0.2945, + "step": 7934 + }, + { + "epoch": 0.5509651437300375, + "grad_norm": 3.927002084019557, + "learning_rate": 4.419390648370159e-06, + "loss": 0.6209, + "step": 7935 + }, + { + "epoch": 0.5510345785307597, + "grad_norm": 3.7969213503396917, + "learning_rate": 4.4182737829376135e-06, + "loss": 0.3833, + "step": 7936 + }, + { + "epoch": 0.5511040133314817, + "grad_norm": 5.087144198340094, + "learning_rate": 4.417156946928125e-06, + "loss": 0.6636, + "step": 7937 + }, + { + "epoch": 0.5511734481322038, + "grad_norm": 4.1873270410429555, + "learning_rate": 4.416040140398177e-06, + "loss": 0.416, + "step": 7938 + }, + { + "epoch": 0.551242882932926, + "grad_norm": 4.023183231306474, + "learning_rate": 4.414923363404257e-06, + "loss": 0.5194, + "step": 7939 + }, + { + "epoch": 0.5513123177336481, + "grad_norm": 2.7471062583481105, + "learning_rate": 4.413806616002852e-06, + "loss": 0.3136, + "step": 7940 + }, + { + "epoch": 0.5513817525343703, + "grad_norm": 4.369241212270322, + "learning_rate": 4.412689898250446e-06, + "loss": 0.3908, + "step": 7941 + }, + { + "epoch": 0.5514511873350924, + "grad_norm": 3.700229070054268, + "learning_rate": 4.4115732102035186e-06, + "loss": 0.415, + "step": 7942 + }, + { + "epoch": 0.5515206221358144, + "grad_norm": 4.458270099068796, + "learning_rate": 4.410456551918553e-06, + "loss": 0.5552, + "step": 7943 + }, + { + "epoch": 0.5515900569365366, + "grad_norm": 2.4179399159846326, + "learning_rate": 4.40933992345203e-06, + "loss": 0.2123, + "step": 7944 + }, + { + "epoch": 0.5516594917372587, + "grad_norm": 4.634787773946215, + "learning_rate": 4.408223324860422e-06, + "loss": 0.7405, + "step": 7945 + }, + { + "epoch": 0.5517289265379809, + "grad_norm": 3.185264285833028, + "learning_rate": 4.407106756200209e-06, + "loss": 0.3842, + "step": 7946 + }, + { + "epoch": 0.551798361338703, + "grad_norm": 2.4773409308235594, + "learning_rate": 4.405990217527866e-06, + "loss": 0.2298, + "step": 7947 + }, + { + "epoch": 0.551867796139425, + "grad_norm": 4.418729088339476, + "learning_rate": 4.404873708899867e-06, + "loss": 0.5424, + "step": 7948 + }, + { + "epoch": 0.5519372309401472, + "grad_norm": 3.7655194930842253, + "learning_rate": 4.403757230372681e-06, + "loss": 0.4721, + "step": 7949 + }, + { + "epoch": 0.5520066657408693, + "grad_norm": 3.5031912701625787, + "learning_rate": 4.40264078200278e-06, + "loss": 0.3636, + "step": 7950 + }, + { + "epoch": 0.5520761005415914, + "grad_norm": 3.3385631527255244, + "learning_rate": 4.401524363846631e-06, + "loss": 0.3839, + "step": 7951 + }, + { + "epoch": 0.5521455353423136, + "grad_norm": 4.13453418697, + "learning_rate": 4.400407975960705e-06, + "loss": 0.4541, + "step": 7952 + }, + { + "epoch": 0.5522149701430357, + "grad_norm": 4.875365416167943, + "learning_rate": 4.399291618401464e-06, + "loss": 0.7122, + "step": 7953 + }, + { + "epoch": 0.5522844049437579, + "grad_norm": 3.716012812244266, + "learning_rate": 4.3981752912253725e-06, + "loss": 0.3943, + "step": 7954 + }, + { + "epoch": 0.5523538397444799, + "grad_norm": 3.5602949624444227, + "learning_rate": 4.397058994488895e-06, + "loss": 0.3792, + "step": 7955 + }, + { + "epoch": 0.552423274545202, + "grad_norm": 3.9412152639985045, + "learning_rate": 4.3959427282484895e-06, + "loss": 0.5076, + "step": 7956 + }, + { + "epoch": 0.5524927093459242, + "grad_norm": 3.1688328648719755, + "learning_rate": 4.394826492560618e-06, + "loss": 0.3416, + "step": 7957 + }, + { + "epoch": 0.5525621441466463, + "grad_norm": 4.013513182224877, + "learning_rate": 4.393710287481737e-06, + "loss": 0.3626, + "step": 7958 + }, + { + "epoch": 0.5526315789473685, + "grad_norm": 3.8322144817976946, + "learning_rate": 4.392594113068306e-06, + "loss": 0.5341, + "step": 7959 + }, + { + "epoch": 0.5527010137480906, + "grad_norm": 4.080909115889743, + "learning_rate": 4.391477969376777e-06, + "loss": 0.4921, + "step": 7960 + }, + { + "epoch": 0.5527704485488126, + "grad_norm": 4.154841665087655, + "learning_rate": 4.390361856463601e-06, + "loss": 0.347, + "step": 7961 + }, + { + "epoch": 0.5528398833495348, + "grad_norm": 3.7389184847166272, + "learning_rate": 4.389245774385235e-06, + "loss": 0.3459, + "step": 7962 + }, + { + "epoch": 0.5529093181502569, + "grad_norm": 4.498990352242344, + "learning_rate": 4.388129723198126e-06, + "loss": 0.578, + "step": 7963 + }, + { + "epoch": 0.552978752950979, + "grad_norm": 5.6533854135584365, + "learning_rate": 4.387013702958722e-06, + "loss": 0.3347, + "step": 7964 + }, + { + "epoch": 0.5530481877517012, + "grad_norm": 4.461771946469103, + "learning_rate": 4.385897713723472e-06, + "loss": 0.4243, + "step": 7965 + }, + { + "epoch": 0.5531176225524232, + "grad_norm": 3.8490168128715125, + "learning_rate": 4.384781755548823e-06, + "loss": 0.2819, + "step": 7966 + }, + { + "epoch": 0.5531870573531454, + "grad_norm": 4.128367880781366, + "learning_rate": 4.383665828491214e-06, + "loss": 0.3998, + "step": 7967 + }, + { + "epoch": 0.5532564921538675, + "grad_norm": 2.525011343418954, + "learning_rate": 4.382549932607089e-06, + "loss": 0.1469, + "step": 7968 + }, + { + "epoch": 0.5533259269545896, + "grad_norm": 2.8130810609876242, + "learning_rate": 4.381434067952893e-06, + "loss": 0.2523, + "step": 7969 + }, + { + "epoch": 0.5533953617553118, + "grad_norm": 4.468741439854196, + "learning_rate": 4.38031823458506e-06, + "loss": 0.7451, + "step": 7970 + }, + { + "epoch": 0.5534647965560339, + "grad_norm": 2.87534946199836, + "learning_rate": 4.379202432560029e-06, + "loss": 0.2741, + "step": 7971 + }, + { + "epoch": 0.5535342313567561, + "grad_norm": 3.2750310603315587, + "learning_rate": 4.378086661934237e-06, + "loss": 0.3077, + "step": 7972 + }, + { + "epoch": 0.5536036661574781, + "grad_norm": 4.270109746229505, + "learning_rate": 4.376970922764119e-06, + "loss": 0.4675, + "step": 7973 + }, + { + "epoch": 0.5536731009582002, + "grad_norm": 3.4226562554158124, + "learning_rate": 4.375855215106105e-06, + "loss": 0.3705, + "step": 7974 + }, + { + "epoch": 0.5537425357589224, + "grad_norm": 4.00250215696577, + "learning_rate": 4.374739539016629e-06, + "loss": 0.502, + "step": 7975 + }, + { + "epoch": 0.5538119705596445, + "grad_norm": 4.991099343483806, + "learning_rate": 4.3736238945521185e-06, + "loss": 0.4291, + "step": 7976 + }, + { + "epoch": 0.5538814053603666, + "grad_norm": 2.8092616961154255, + "learning_rate": 4.372508281769004e-06, + "loss": 0.2455, + "step": 7977 + }, + { + "epoch": 0.5539508401610888, + "grad_norm": 3.1087895334313167, + "learning_rate": 4.3713927007237104e-06, + "loss": 0.1976, + "step": 7978 + }, + { + "epoch": 0.5540202749618108, + "grad_norm": 4.780987592212305, + "learning_rate": 4.3702771514726615e-06, + "loss": 0.6574, + "step": 7979 + }, + { + "epoch": 0.554089709762533, + "grad_norm": 4.441327826193259, + "learning_rate": 4.369161634072284e-06, + "loss": 0.757, + "step": 7980 + }, + { + "epoch": 0.5541591445632551, + "grad_norm": 3.3393032626235737, + "learning_rate": 4.368046148578995e-06, + "loss": 0.364, + "step": 7981 + }, + { + "epoch": 0.5542285793639772, + "grad_norm": 3.546433096390069, + "learning_rate": 4.366930695049217e-06, + "loss": 0.3617, + "step": 7982 + }, + { + "epoch": 0.5542980141646994, + "grad_norm": 3.836359145991592, + "learning_rate": 4.365815273539368e-06, + "loss": 0.511, + "step": 7983 + }, + { + "epoch": 0.5543674489654214, + "grad_norm": 3.3392862422919194, + "learning_rate": 4.364699884105866e-06, + "loss": 0.3546, + "step": 7984 + }, + { + "epoch": 0.5544368837661436, + "grad_norm": 2.745442412705354, + "learning_rate": 4.363584526805124e-06, + "loss": 0.3373, + "step": 7985 + }, + { + "epoch": 0.5545063185668657, + "grad_norm": 3.325892497304828, + "learning_rate": 4.362469201693556e-06, + "loss": 0.2191, + "step": 7986 + }, + { + "epoch": 0.5545757533675878, + "grad_norm": 2.6735411206784683, + "learning_rate": 4.361353908827577e-06, + "loss": 0.2911, + "step": 7987 + }, + { + "epoch": 0.55464518816831, + "grad_norm": 3.977619051697231, + "learning_rate": 4.360238648263591e-06, + "loss": 0.4674, + "step": 7988 + }, + { + "epoch": 0.5547146229690321, + "grad_norm": 3.7267064974992037, + "learning_rate": 4.359123420058013e-06, + "loss": 0.4593, + "step": 7989 + }, + { + "epoch": 0.5547840577697541, + "grad_norm": 4.999904390531836, + "learning_rate": 4.358008224267245e-06, + "loss": 0.5662, + "step": 7990 + }, + { + "epoch": 0.5548534925704763, + "grad_norm": 4.479396806272263, + "learning_rate": 4.3568930609476964e-06, + "loss": 0.5029, + "step": 7991 + }, + { + "epoch": 0.5549229273711984, + "grad_norm": 2.6431742753672096, + "learning_rate": 4.355777930155769e-06, + "loss": 0.1754, + "step": 7992 + }, + { + "epoch": 0.5549923621719206, + "grad_norm": 3.6006094853421353, + "learning_rate": 4.354662831947863e-06, + "loss": 0.3914, + "step": 7993 + }, + { + "epoch": 0.5550617969726427, + "grad_norm": 4.026048817642806, + "learning_rate": 4.353547766380384e-06, + "loss": 0.3545, + "step": 7994 + }, + { + "epoch": 0.5551312317733648, + "grad_norm": 3.9100487941225617, + "learning_rate": 4.352432733509725e-06, + "loss": 0.5687, + "step": 7995 + }, + { + "epoch": 0.555200666574087, + "grad_norm": 3.872365307883858, + "learning_rate": 4.3513177333922855e-06, + "loss": 0.4288, + "step": 7996 + }, + { + "epoch": 0.555270101374809, + "grad_norm": 4.934567948767992, + "learning_rate": 4.3502027660844606e-06, + "loss": 0.6546, + "step": 7997 + }, + { + "epoch": 0.5553395361755312, + "grad_norm": 4.308362687182634, + "learning_rate": 4.3490878316426464e-06, + "loss": 0.5472, + "step": 7998 + }, + { + "epoch": 0.5554089709762533, + "grad_norm": 3.933495045189264, + "learning_rate": 4.347972930123232e-06, + "loss": 0.3982, + "step": 7999 + }, + { + "epoch": 0.5554784057769754, + "grad_norm": 4.539997064667931, + "learning_rate": 4.346858061582608e-06, + "loss": 0.4613, + "step": 8000 + }, + { + "epoch": 0.5555478405776976, + "grad_norm": 3.159671633508985, + "learning_rate": 4.345743226077166e-06, + "loss": 0.355, + "step": 8001 + }, + { + "epoch": 0.5556172753784197, + "grad_norm": 3.9236063272338515, + "learning_rate": 4.344628423663292e-06, + "loss": 0.4882, + "step": 8002 + }, + { + "epoch": 0.5556867101791418, + "grad_norm": 3.9446194270922357, + "learning_rate": 4.343513654397368e-06, + "loss": 0.2749, + "step": 8003 + }, + { + "epoch": 0.5557561449798639, + "grad_norm": 3.63565619146985, + "learning_rate": 4.3423989183357815e-06, + "loss": 0.3144, + "step": 8004 + }, + { + "epoch": 0.555825579780586, + "grad_norm": 3.7745700007046206, + "learning_rate": 4.341284215534915e-06, + "loss": 0.5633, + "step": 8005 + }, + { + "epoch": 0.5558950145813082, + "grad_norm": 3.8122967868522304, + "learning_rate": 4.340169546051146e-06, + "loss": 0.3741, + "step": 8006 + }, + { + "epoch": 0.5559644493820303, + "grad_norm": 3.6017088553048207, + "learning_rate": 4.339054909940854e-06, + "loss": 0.4157, + "step": 8007 + }, + { + "epoch": 0.5560338841827523, + "grad_norm": 4.171330297258857, + "learning_rate": 4.337940307260418e-06, + "loss": 0.6017, + "step": 8008 + }, + { + "epoch": 0.5561033189834745, + "grad_norm": 2.546143847923642, + "learning_rate": 4.336825738066213e-06, + "loss": 0.2734, + "step": 8009 + }, + { + "epoch": 0.5561727537841966, + "grad_norm": 5.051997531763219, + "learning_rate": 4.3357112024146095e-06, + "loss": 0.7547, + "step": 8010 + }, + { + "epoch": 0.5562421885849188, + "grad_norm": 3.1660691836084416, + "learning_rate": 4.334596700361984e-06, + "loss": 0.231, + "step": 8011 + }, + { + "epoch": 0.5563116233856409, + "grad_norm": 3.570132483906301, + "learning_rate": 4.333482231964704e-06, + "loss": 0.4546, + "step": 8012 + }, + { + "epoch": 0.556381058186363, + "grad_norm": 4.673398747368056, + "learning_rate": 4.3323677972791365e-06, + "loss": 0.5107, + "step": 8013 + }, + { + "epoch": 0.5564504929870852, + "grad_norm": 3.209563382113947, + "learning_rate": 4.331253396361652e-06, + "loss": 0.257, + "step": 8014 + }, + { + "epoch": 0.5565199277878072, + "grad_norm": 2.551323817402932, + "learning_rate": 4.330139029268613e-06, + "loss": 0.2315, + "step": 8015 + }, + { + "epoch": 0.5565893625885294, + "grad_norm": 3.9244732258550434, + "learning_rate": 4.3290246960563855e-06, + "loss": 0.359, + "step": 8016 + }, + { + "epoch": 0.5566587973892515, + "grad_norm": 3.8565560404401933, + "learning_rate": 4.3279103967813275e-06, + "loss": 0.3186, + "step": 8017 + }, + { + "epoch": 0.5567282321899736, + "grad_norm": 3.8660123835589433, + "learning_rate": 4.326796131499802e-06, + "loss": 0.5323, + "step": 8018 + }, + { + "epoch": 0.5567976669906958, + "grad_norm": 3.821889060541428, + "learning_rate": 4.325681900268168e-06, + "loss": 0.4549, + "step": 8019 + }, + { + "epoch": 0.5568671017914179, + "grad_norm": 2.651366966879796, + "learning_rate": 4.324567703142779e-06, + "loss": 0.1881, + "step": 8020 + }, + { + "epoch": 0.5569365365921399, + "grad_norm": 4.25735707991398, + "learning_rate": 4.323453540179991e-06, + "loss": 0.4554, + "step": 8021 + }, + { + "epoch": 0.5570059713928621, + "grad_norm": 4.6344108468425755, + "learning_rate": 4.322339411436157e-06, + "loss": 0.5277, + "step": 8022 + }, + { + "epoch": 0.5570754061935842, + "grad_norm": 3.338843726031172, + "learning_rate": 4.32122531696763e-06, + "loss": 0.459, + "step": 8023 + }, + { + "epoch": 0.5571448409943064, + "grad_norm": 4.0649598247326475, + "learning_rate": 4.320111256830758e-06, + "loss": 0.337, + "step": 8024 + }, + { + "epoch": 0.5572142757950285, + "grad_norm": 4.8411121173564435, + "learning_rate": 4.318997231081888e-06, + "loss": 0.5425, + "step": 8025 + }, + { + "epoch": 0.5572837105957505, + "grad_norm": 3.7479473571589232, + "learning_rate": 4.3178832397773675e-06, + "loss": 0.409, + "step": 8026 + }, + { + "epoch": 0.5573531453964727, + "grad_norm": 3.5850288105319428, + "learning_rate": 4.316769282973543e-06, + "loss": 0.419, + "step": 8027 + }, + { + "epoch": 0.5574225801971948, + "grad_norm": 3.6079457080658313, + "learning_rate": 4.315655360726755e-06, + "loss": 0.4074, + "step": 8028 + }, + { + "epoch": 0.557492014997917, + "grad_norm": 3.6309558812798164, + "learning_rate": 4.314541473093342e-06, + "loss": 0.3185, + "step": 8029 + }, + { + "epoch": 0.5575614497986391, + "grad_norm": 3.8587085769601885, + "learning_rate": 4.3134276201296484e-06, + "loss": 0.6381, + "step": 8030 + }, + { + "epoch": 0.5576308845993612, + "grad_norm": 3.6111334703464117, + "learning_rate": 4.312313801892008e-06, + "loss": 0.4805, + "step": 8031 + }, + { + "epoch": 0.5577003194000834, + "grad_norm": 3.791047079299569, + "learning_rate": 4.311200018436755e-06, + "loss": 0.3241, + "step": 8032 + }, + { + "epoch": 0.5577697542008054, + "grad_norm": 3.4350727058912027, + "learning_rate": 4.310086269820228e-06, + "loss": 0.4461, + "step": 8033 + }, + { + "epoch": 0.5578391890015275, + "grad_norm": 4.000253154079633, + "learning_rate": 4.308972556098758e-06, + "loss": 0.4685, + "step": 8034 + }, + { + "epoch": 0.5579086238022497, + "grad_norm": 3.033555607443416, + "learning_rate": 4.307858877328671e-06, + "loss": 0.4714, + "step": 8035 + }, + { + "epoch": 0.5579780586029718, + "grad_norm": 2.8589462096367972, + "learning_rate": 4.3067452335663e-06, + "loss": 0.2226, + "step": 8036 + }, + { + "epoch": 0.558047493403694, + "grad_norm": 3.785152531452116, + "learning_rate": 4.305631624867971e-06, + "loss": 0.3644, + "step": 8037 + }, + { + "epoch": 0.558116928204416, + "grad_norm": 4.041312525851003, + "learning_rate": 4.304518051290009e-06, + "loss": 0.579, + "step": 8038 + }, + { + "epoch": 0.5581863630051381, + "grad_norm": 3.456910964301835, + "learning_rate": 4.303404512888736e-06, + "loss": 0.406, + "step": 8039 + }, + { + "epoch": 0.5582557978058603, + "grad_norm": 4.003596298550473, + "learning_rate": 4.3022910097204746e-06, + "loss": 0.5526, + "step": 8040 + }, + { + "epoch": 0.5583252326065824, + "grad_norm": 3.440118120043311, + "learning_rate": 4.301177541841546e-06, + "loss": 0.483, + "step": 8041 + }, + { + "epoch": 0.5583946674073046, + "grad_norm": 2.6851504514986217, + "learning_rate": 4.300064109308265e-06, + "loss": 0.3009, + "step": 8042 + }, + { + "epoch": 0.5584641022080267, + "grad_norm": 4.312399885079339, + "learning_rate": 4.298950712176949e-06, + "loss": 0.4678, + "step": 8043 + }, + { + "epoch": 0.5585335370087487, + "grad_norm": 4.759977007532914, + "learning_rate": 4.297837350503915e-06, + "loss": 0.4394, + "step": 8044 + }, + { + "epoch": 0.5586029718094709, + "grad_norm": 4.037399607168306, + "learning_rate": 4.296724024345471e-06, + "loss": 0.5861, + "step": 8045 + }, + { + "epoch": 0.558672406610193, + "grad_norm": 4.221280452644258, + "learning_rate": 4.295610733757929e-06, + "loss": 0.684, + "step": 8046 + }, + { + "epoch": 0.5587418414109151, + "grad_norm": 4.75674460599765, + "learning_rate": 4.2944974787976015e-06, + "loss": 0.4667, + "step": 8047 + }, + { + "epoch": 0.5588112762116373, + "grad_norm": 3.8070836015104073, + "learning_rate": 4.293384259520793e-06, + "loss": 0.4039, + "step": 8048 + }, + { + "epoch": 0.5588807110123594, + "grad_norm": 4.655021697529806, + "learning_rate": 4.292271075983807e-06, + "loss": 0.5336, + "step": 8049 + }, + { + "epoch": 0.5589501458130816, + "grad_norm": 3.822841050726024, + "learning_rate": 4.2911579282429514e-06, + "loss": 0.4689, + "step": 8050 + }, + { + "epoch": 0.5590195806138036, + "grad_norm": 5.005227463254897, + "learning_rate": 4.290044816354526e-06, + "loss": 0.7394, + "step": 8051 + }, + { + "epoch": 0.5590890154145257, + "grad_norm": 3.5699912229478286, + "learning_rate": 4.288931740374828e-06, + "loss": 0.2747, + "step": 8052 + }, + { + "epoch": 0.5591584502152479, + "grad_norm": 4.759385720995197, + "learning_rate": 4.28781870036016e-06, + "loss": 0.4766, + "step": 8053 + }, + { + "epoch": 0.55922788501597, + "grad_norm": 3.651996214781099, + "learning_rate": 4.286705696366814e-06, + "loss": 0.3897, + "step": 8054 + }, + { + "epoch": 0.5592973198166922, + "grad_norm": 4.0622158308199126, + "learning_rate": 4.28559272845109e-06, + "loss": 0.6018, + "step": 8055 + }, + { + "epoch": 0.5593667546174143, + "grad_norm": 5.5779982527649725, + "learning_rate": 4.284479796669276e-06, + "loss": 0.6662, + "step": 8056 + }, + { + "epoch": 0.5594361894181363, + "grad_norm": 2.9891069608549317, + "learning_rate": 4.283366901077662e-06, + "loss": 0.1936, + "step": 8057 + }, + { + "epoch": 0.5595056242188585, + "grad_norm": 3.467276993891775, + "learning_rate": 4.28225404173254e-06, + "loss": 0.3669, + "step": 8058 + }, + { + "epoch": 0.5595750590195806, + "grad_norm": 2.9775066106144528, + "learning_rate": 4.281141218690199e-06, + "loss": 0.3, + "step": 8059 + }, + { + "epoch": 0.5596444938203028, + "grad_norm": 4.222788936038443, + "learning_rate": 4.28002843200692e-06, + "loss": 0.4675, + "step": 8060 + }, + { + "epoch": 0.5597139286210249, + "grad_norm": 2.747053951464099, + "learning_rate": 4.278915681738987e-06, + "loss": 0.2577, + "step": 8061 + }, + { + "epoch": 0.559783363421747, + "grad_norm": 3.7261632998639644, + "learning_rate": 4.277802967942686e-06, + "loss": 0.2379, + "step": 8062 + }, + { + "epoch": 0.5598527982224691, + "grad_norm": 3.731873232279424, + "learning_rate": 4.276690290674292e-06, + "loss": 0.386, + "step": 8063 + }, + { + "epoch": 0.5599222330231912, + "grad_norm": 4.5001940344366, + "learning_rate": 4.275577649990083e-06, + "loss": 0.6856, + "step": 8064 + }, + { + "epoch": 0.5599916678239133, + "grad_norm": 3.0482818554032476, + "learning_rate": 4.274465045946338e-06, + "loss": 0.2661, + "step": 8065 + }, + { + "epoch": 0.5600611026246355, + "grad_norm": 3.784758115346529, + "learning_rate": 4.273352478599332e-06, + "loss": 0.3525, + "step": 8066 + }, + { + "epoch": 0.5601305374253576, + "grad_norm": 4.759387961089601, + "learning_rate": 4.272239948005332e-06, + "loss": 0.3067, + "step": 8067 + }, + { + "epoch": 0.5601999722260798, + "grad_norm": 4.621699454443586, + "learning_rate": 4.271127454220613e-06, + "loss": 0.5957, + "step": 8068 + }, + { + "epoch": 0.5602694070268018, + "grad_norm": 3.9103628684669545, + "learning_rate": 4.2700149973014445e-06, + "loss": 0.4948, + "step": 8069 + }, + { + "epoch": 0.5603388418275239, + "grad_norm": 5.138704526475872, + "learning_rate": 4.26890257730409e-06, + "loss": 0.6486, + "step": 8070 + }, + { + "epoch": 0.5604082766282461, + "grad_norm": 2.957036252937091, + "learning_rate": 4.267790194284815e-06, + "loss": 0.3464, + "step": 8071 + }, + { + "epoch": 0.5604777114289682, + "grad_norm": 5.764796852455868, + "learning_rate": 4.266677848299884e-06, + "loss": 0.7154, + "step": 8072 + }, + { + "epoch": 0.5605471462296904, + "grad_norm": 4.573464369438991, + "learning_rate": 4.265565539405559e-06, + "loss": 0.4377, + "step": 8073 + }, + { + "epoch": 0.5606165810304125, + "grad_norm": 4.342873209204397, + "learning_rate": 4.264453267658096e-06, + "loss": 0.8664, + "step": 8074 + }, + { + "epoch": 0.5606860158311345, + "grad_norm": 3.689379978303678, + "learning_rate": 4.263341033113754e-06, + "loss": 0.5167, + "step": 8075 + }, + { + "epoch": 0.5607554506318567, + "grad_norm": 3.728726126436983, + "learning_rate": 4.262228835828792e-06, + "loss": 0.4355, + "step": 8076 + }, + { + "epoch": 0.5608248854325788, + "grad_norm": 3.5475220770009157, + "learning_rate": 4.261116675859458e-06, + "loss": 0.3629, + "step": 8077 + }, + { + "epoch": 0.5608943202333009, + "grad_norm": 3.8060252779515347, + "learning_rate": 4.260004553262006e-06, + "loss": 0.5349, + "step": 8078 + }, + { + "epoch": 0.5609637550340231, + "grad_norm": 2.539140125577981, + "learning_rate": 4.258892468092688e-06, + "loss": 0.2316, + "step": 8079 + }, + { + "epoch": 0.5610331898347451, + "grad_norm": 6.392793929203389, + "learning_rate": 4.257780420407751e-06, + "loss": 1.0245, + "step": 8080 + }, + { + "epoch": 0.5611026246354673, + "grad_norm": 4.560841650768598, + "learning_rate": 4.256668410263438e-06, + "loss": 0.5007, + "step": 8081 + }, + { + "epoch": 0.5611720594361894, + "grad_norm": 4.846623830495395, + "learning_rate": 4.255556437715998e-06, + "loss": 0.4971, + "step": 8082 + }, + { + "epoch": 0.5612414942369115, + "grad_norm": 9.84865015745314, + "learning_rate": 4.254444502821669e-06, + "loss": 0.3536, + "step": 8083 + }, + { + "epoch": 0.5613109290376337, + "grad_norm": 4.034626626972785, + "learning_rate": 4.253332605636695e-06, + "loss": 0.5029, + "step": 8084 + }, + { + "epoch": 0.5613803638383558, + "grad_norm": 8.492252176999823, + "learning_rate": 4.2522207462173124e-06, + "loss": 0.5979, + "step": 8085 + }, + { + "epoch": 0.561449798639078, + "grad_norm": 4.39176848080515, + "learning_rate": 4.2511089246197575e-06, + "loss": 0.6506, + "step": 8086 + }, + { + "epoch": 0.5615192334398, + "grad_norm": 4.582675213777285, + "learning_rate": 4.2499971409002675e-06, + "loss": 0.629, + "step": 8087 + }, + { + "epoch": 0.5615886682405221, + "grad_norm": 3.734472841424299, + "learning_rate": 4.248885395115072e-06, + "loss": 0.3284, + "step": 8088 + }, + { + "epoch": 0.5616581030412443, + "grad_norm": 3.6625870446563185, + "learning_rate": 4.247773687320403e-06, + "loss": 0.344, + "step": 8089 + }, + { + "epoch": 0.5617275378419664, + "grad_norm": 3.2084158119287145, + "learning_rate": 4.246662017572489e-06, + "loss": 0.2704, + "step": 8090 + }, + { + "epoch": 0.5617969726426885, + "grad_norm": 3.455062142003421, + "learning_rate": 4.245550385927561e-06, + "loss": 0.3266, + "step": 8091 + }, + { + "epoch": 0.5618664074434107, + "grad_norm": 4.928357066696252, + "learning_rate": 4.244438792441838e-06, + "loss": 0.5406, + "step": 8092 + }, + { + "epoch": 0.5619358422441327, + "grad_norm": 3.5877828963060976, + "learning_rate": 4.243327237171546e-06, + "loss": 0.3744, + "step": 8093 + }, + { + "epoch": 0.5620052770448549, + "grad_norm": 2.5498584729273333, + "learning_rate": 4.242215720172908e-06, + "loss": 0.2633, + "step": 8094 + }, + { + "epoch": 0.562074711845577, + "grad_norm": 4.4099002854716165, + "learning_rate": 4.241104241502141e-06, + "loss": 0.4558, + "step": 8095 + }, + { + "epoch": 0.5621441466462991, + "grad_norm": 3.656778387369396, + "learning_rate": 4.23999280121546e-06, + "loss": 0.3621, + "step": 8096 + }, + { + "epoch": 0.5622135814470213, + "grad_norm": 4.191310171800451, + "learning_rate": 4.238881399369085e-06, + "loss": 0.2192, + "step": 8097 + }, + { + "epoch": 0.5622830162477434, + "grad_norm": 2.9876215132200725, + "learning_rate": 4.237770036019229e-06, + "loss": 0.3389, + "step": 8098 + }, + { + "epoch": 0.5623524510484655, + "grad_norm": 4.058141304487534, + "learning_rate": 4.2366587112221e-06, + "loss": 0.5927, + "step": 8099 + }, + { + "epoch": 0.5624218858491876, + "grad_norm": 3.151403944126138, + "learning_rate": 4.235547425033909e-06, + "loss": 0.4508, + "step": 8100 + }, + { + "epoch": 0.5624913206499097, + "grad_norm": 3.3437092798908945, + "learning_rate": 4.2344361775108675e-06, + "loss": 0.3172, + "step": 8101 + }, + { + "epoch": 0.5625607554506319, + "grad_norm": 3.701951222376618, + "learning_rate": 4.2333249687091755e-06, + "loss": 0.4957, + "step": 8102 + }, + { + "epoch": 0.562630190251354, + "grad_norm": 3.231759933245245, + "learning_rate": 4.232213798685039e-06, + "loss": 0.3692, + "step": 8103 + }, + { + "epoch": 0.562699625052076, + "grad_norm": 3.3931137239978866, + "learning_rate": 4.23110266749466e-06, + "loss": 0.3564, + "step": 8104 + }, + { + "epoch": 0.5627690598527982, + "grad_norm": 3.2259419099428177, + "learning_rate": 4.229991575194239e-06, + "loss": 0.2396, + "step": 8105 + }, + { + "epoch": 0.5628384946535203, + "grad_norm": 2.647385183411293, + "learning_rate": 4.228880521839971e-06, + "loss": 0.4323, + "step": 8106 + }, + { + "epoch": 0.5629079294542425, + "grad_norm": 4.167743942983958, + "learning_rate": 4.227769507488053e-06, + "loss": 0.5169, + "step": 8107 + }, + { + "epoch": 0.5629773642549646, + "grad_norm": 4.180563368683919, + "learning_rate": 4.226658532194682e-06, + "loss": 0.4443, + "step": 8108 + }, + { + "epoch": 0.5630467990556867, + "grad_norm": 3.690142800221043, + "learning_rate": 4.225547596016047e-06, + "loss": 0.5928, + "step": 8109 + }, + { + "epoch": 0.5631162338564089, + "grad_norm": 4.903539476942145, + "learning_rate": 4.2244366990083365e-06, + "loss": 0.4863, + "step": 8110 + }, + { + "epoch": 0.5631856686571309, + "grad_norm": 4.76017789991848, + "learning_rate": 4.223325841227741e-06, + "loss": 0.5019, + "step": 8111 + }, + { + "epoch": 0.5632551034578531, + "grad_norm": 3.0784452277795746, + "learning_rate": 4.222215022730447e-06, + "loss": 0.2069, + "step": 8112 + }, + { + "epoch": 0.5633245382585752, + "grad_norm": 4.429703459975234, + "learning_rate": 4.221104243572635e-06, + "loss": 0.4352, + "step": 8113 + }, + { + "epoch": 0.5633939730592973, + "grad_norm": 3.7123668615264998, + "learning_rate": 4.21999350381049e-06, + "loss": 0.5006, + "step": 8114 + }, + { + "epoch": 0.5634634078600195, + "grad_norm": 3.5757844972252277, + "learning_rate": 4.21888280350019e-06, + "loss": 0.4769, + "step": 8115 + }, + { + "epoch": 0.5635328426607416, + "grad_norm": 2.6086103075963103, + "learning_rate": 4.217772142697916e-06, + "loss": 0.1998, + "step": 8116 + }, + { + "epoch": 0.5636022774614637, + "grad_norm": 3.9128340499299585, + "learning_rate": 4.216661521459839e-06, + "loss": 0.3663, + "step": 8117 + }, + { + "epoch": 0.5636717122621858, + "grad_norm": 3.3748034214434766, + "learning_rate": 4.215550939842138e-06, + "loss": 0.3027, + "step": 8118 + }, + { + "epoch": 0.5637411470629079, + "grad_norm": 3.025397680232139, + "learning_rate": 4.214440397900983e-06, + "loss": 0.318, + "step": 8119 + }, + { + "epoch": 0.5638105818636301, + "grad_norm": 3.4050101831654214, + "learning_rate": 4.213329895692542e-06, + "loss": 0.3515, + "step": 8120 + }, + { + "epoch": 0.5638800166643522, + "grad_norm": 4.36811476668919, + "learning_rate": 4.212219433272986e-06, + "loss": 0.5281, + "step": 8121 + }, + { + "epoch": 0.5639494514650742, + "grad_norm": 2.7638261172320795, + "learning_rate": 4.211109010698478e-06, + "loss": 0.2208, + "step": 8122 + }, + { + "epoch": 0.5640188862657964, + "grad_norm": 3.769782909893306, + "learning_rate": 4.209998628025186e-06, + "loss": 0.3619, + "step": 8123 + }, + { + "epoch": 0.5640883210665185, + "grad_norm": 4.071857298714483, + "learning_rate": 4.208888285309268e-06, + "loss": 0.5466, + "step": 8124 + }, + { + "epoch": 0.5641577558672407, + "grad_norm": 3.182954396456208, + "learning_rate": 4.207777982606885e-06, + "loss": 0.295, + "step": 8125 + }, + { + "epoch": 0.5642271906679628, + "grad_norm": 4.602885597909247, + "learning_rate": 4.2066677199741965e-06, + "loss": 0.577, + "step": 8126 + }, + { + "epoch": 0.5642966254686849, + "grad_norm": 4.068994059212368, + "learning_rate": 4.205557497467356e-06, + "loss": 0.4953, + "step": 8127 + }, + { + "epoch": 0.564366060269407, + "grad_norm": 5.018801198612864, + "learning_rate": 4.204447315142519e-06, + "loss": 0.7809, + "step": 8128 + }, + { + "epoch": 0.5644354950701291, + "grad_norm": 4.362779075125485, + "learning_rate": 4.203337173055835e-06, + "loss": 0.4866, + "step": 8129 + }, + { + "epoch": 0.5645049298708513, + "grad_norm": 3.6279325163334257, + "learning_rate": 4.2022270712634576e-06, + "loss": 0.4996, + "step": 8130 + }, + { + "epoch": 0.5645743646715734, + "grad_norm": 3.460649103174236, + "learning_rate": 4.201117009821531e-06, + "loss": 0.4026, + "step": 8131 + }, + { + "epoch": 0.5646437994722955, + "grad_norm": 3.6763728338597885, + "learning_rate": 4.200006988786201e-06, + "loss": 0.5306, + "step": 8132 + }, + { + "epoch": 0.5647132342730177, + "grad_norm": 2.315921030171904, + "learning_rate": 4.198897008213614e-06, + "loss": 0.189, + "step": 8133 + }, + { + "epoch": 0.5647826690737398, + "grad_norm": 4.327994743204927, + "learning_rate": 4.19778706815991e-06, + "loss": 0.7816, + "step": 8134 + }, + { + "epoch": 0.5648521038744618, + "grad_norm": 2.526784547402994, + "learning_rate": 4.196677168681226e-06, + "loss": 0.1978, + "step": 8135 + }, + { + "epoch": 0.564921538675184, + "grad_norm": 2.610381848846158, + "learning_rate": 4.195567309833703e-06, + "loss": 0.3588, + "step": 8136 + }, + { + "epoch": 0.5649909734759061, + "grad_norm": 3.7727991895008213, + "learning_rate": 4.194457491673477e-06, + "loss": 0.4962, + "step": 8137 + }, + { + "epoch": 0.5650604082766283, + "grad_norm": 4.059890289448165, + "learning_rate": 4.1933477142566775e-06, + "loss": 0.5952, + "step": 8138 + }, + { + "epoch": 0.5651298430773504, + "grad_norm": 4.232470839620361, + "learning_rate": 4.192237977639437e-06, + "loss": 0.3745, + "step": 8139 + }, + { + "epoch": 0.5651992778780724, + "grad_norm": 3.1302027887411916, + "learning_rate": 4.191128281877887e-06, + "loss": 0.453, + "step": 8140 + }, + { + "epoch": 0.5652687126787946, + "grad_norm": 3.518813226779574, + "learning_rate": 4.190018627028154e-06, + "loss": 0.3857, + "step": 8141 + }, + { + "epoch": 0.5653381474795167, + "grad_norm": 3.58982894310111, + "learning_rate": 4.188909013146361e-06, + "loss": 0.4728, + "step": 8142 + }, + { + "epoch": 0.5654075822802389, + "grad_norm": 4.510067031770324, + "learning_rate": 4.187799440288632e-06, + "loss": 0.7264, + "step": 8143 + }, + { + "epoch": 0.565477017080961, + "grad_norm": 4.110441271929254, + "learning_rate": 4.186689908511091e-06, + "loss": 0.412, + "step": 8144 + }, + { + "epoch": 0.5655464518816831, + "grad_norm": 3.638411301016271, + "learning_rate": 4.185580417869851e-06, + "loss": 0.3549, + "step": 8145 + }, + { + "epoch": 0.5656158866824053, + "grad_norm": 5.9781689104685425, + "learning_rate": 4.184470968421033e-06, + "loss": 0.2968, + "step": 8146 + }, + { + "epoch": 0.5656853214831273, + "grad_norm": 3.477545016170227, + "learning_rate": 4.183361560220749e-06, + "loss": 0.3887, + "step": 8147 + }, + { + "epoch": 0.5657547562838494, + "grad_norm": 3.675754948258682, + "learning_rate": 4.182252193325116e-06, + "loss": 0.3711, + "step": 8148 + }, + { + "epoch": 0.5658241910845716, + "grad_norm": 3.7872778159432876, + "learning_rate": 4.18114286779024e-06, + "loss": 0.3829, + "step": 8149 + }, + { + "epoch": 0.5658936258852937, + "grad_norm": 3.349732192396922, + "learning_rate": 4.180033583672231e-06, + "loss": 0.3943, + "step": 8150 + }, + { + "epoch": 0.5659630606860159, + "grad_norm": 3.9521781762702917, + "learning_rate": 4.178924341027197e-06, + "loss": 0.4907, + "step": 8151 + }, + { + "epoch": 0.566032495486738, + "grad_norm": 5.12389887956287, + "learning_rate": 4.177815139911239e-06, + "loss": 0.682, + "step": 8152 + }, + { + "epoch": 0.56610193028746, + "grad_norm": 3.792130725067246, + "learning_rate": 4.176705980380462e-06, + "loss": 0.3857, + "step": 8153 + }, + { + "epoch": 0.5661713650881822, + "grad_norm": 4.0475833522663, + "learning_rate": 4.175596862490963e-06, + "loss": 0.5314, + "step": 8154 + }, + { + "epoch": 0.5662407998889043, + "grad_norm": 4.077816106461868, + "learning_rate": 4.174487786298845e-06, + "loss": 0.3961, + "step": 8155 + }, + { + "epoch": 0.5663102346896265, + "grad_norm": 2.799849899290536, + "learning_rate": 4.173378751860199e-06, + "loss": 0.4125, + "step": 8156 + }, + { + "epoch": 0.5663796694903486, + "grad_norm": 2.4496052269283792, + "learning_rate": 4.172269759231119e-06, + "loss": 0.2269, + "step": 8157 + }, + { + "epoch": 0.5664491042910706, + "grad_norm": 4.368187198904308, + "learning_rate": 4.171160808467699e-06, + "loss": 0.3796, + "step": 8158 + }, + { + "epoch": 0.5665185390917928, + "grad_norm": 3.659180489482833, + "learning_rate": 4.170051899626029e-06, + "loss": 0.293, + "step": 8159 + }, + { + "epoch": 0.5665879738925149, + "grad_norm": 3.493171828669165, + "learning_rate": 4.168943032762193e-06, + "loss": 0.3841, + "step": 8160 + }, + { + "epoch": 0.566657408693237, + "grad_norm": 4.391921531075759, + "learning_rate": 4.167834207932278e-06, + "loss": 0.6614, + "step": 8161 + }, + { + "epoch": 0.5667268434939592, + "grad_norm": 4.374730041533376, + "learning_rate": 4.1667254251923685e-06, + "loss": 0.5358, + "step": 8162 + }, + { + "epoch": 0.5667962782946813, + "grad_norm": 3.965302754317366, + "learning_rate": 4.165616684598543e-06, + "loss": 0.4544, + "step": 8163 + }, + { + "epoch": 0.5668657130954035, + "grad_norm": 4.1689496812477564, + "learning_rate": 4.164507986206881e-06, + "loss": 0.4955, + "step": 8164 + }, + { + "epoch": 0.5669351478961255, + "grad_norm": 4.04818930650821, + "learning_rate": 4.1633993300734595e-06, + "loss": 0.6027, + "step": 8165 + }, + { + "epoch": 0.5670045826968476, + "grad_norm": 4.426784094949415, + "learning_rate": 4.162290716254355e-06, + "loss": 0.5542, + "step": 8166 + }, + { + "epoch": 0.5670740174975698, + "grad_norm": 4.467901552743312, + "learning_rate": 4.1611821448056364e-06, + "loss": 0.5287, + "step": 8167 + }, + { + "epoch": 0.5671434522982919, + "grad_norm": 3.5207290458031424, + "learning_rate": 4.160073615783375e-06, + "loss": 0.399, + "step": 8168 + }, + { + "epoch": 0.5672128870990141, + "grad_norm": 6.4365259725437625, + "learning_rate": 4.158965129243643e-06, + "loss": 0.3712, + "step": 8169 + }, + { + "epoch": 0.5672823218997362, + "grad_norm": 2.9158785655865356, + "learning_rate": 4.1578566852425e-06, + "loss": 0.3681, + "step": 8170 + }, + { + "epoch": 0.5673517567004582, + "grad_norm": 4.835608873758036, + "learning_rate": 4.156748283836013e-06, + "loss": 0.6262, + "step": 8171 + }, + { + "epoch": 0.5674211915011804, + "grad_norm": 3.98491407674, + "learning_rate": 4.155639925080245e-06, + "loss": 0.3814, + "step": 8172 + }, + { + "epoch": 0.5674906263019025, + "grad_norm": 4.147869131977386, + "learning_rate": 4.154531609031255e-06, + "loss": 0.2885, + "step": 8173 + }, + { + "epoch": 0.5675600611026246, + "grad_norm": 3.8741148789186943, + "learning_rate": 4.153423335745097e-06, + "loss": 0.3922, + "step": 8174 + }, + { + "epoch": 0.5676294959033468, + "grad_norm": 4.183791892786589, + "learning_rate": 4.15231510527783e-06, + "loss": 0.5087, + "step": 8175 + }, + { + "epoch": 0.5676989307040688, + "grad_norm": 4.202114691470736, + "learning_rate": 4.151206917685508e-06, + "loss": 0.6302, + "step": 8176 + }, + { + "epoch": 0.567768365504791, + "grad_norm": 3.0739438071394924, + "learning_rate": 4.150098773024175e-06, + "loss": 0.2406, + "step": 8177 + }, + { + "epoch": 0.5678378003055131, + "grad_norm": 5.166071564980779, + "learning_rate": 4.148990671349887e-06, + "loss": 0.5116, + "step": 8178 + }, + { + "epoch": 0.5679072351062352, + "grad_norm": 3.0959499547210756, + "learning_rate": 4.147882612718687e-06, + "loss": 0.4007, + "step": 8179 + }, + { + "epoch": 0.5679766699069574, + "grad_norm": 3.2367790976698916, + "learning_rate": 4.146774597186622e-06, + "loss": 0.4305, + "step": 8180 + }, + { + "epoch": 0.5680461047076795, + "grad_norm": 3.886395889701452, + "learning_rate": 4.14566662480973e-06, + "loss": 0.5084, + "step": 8181 + }, + { + "epoch": 0.5681155395084017, + "grad_norm": 3.868854214754719, + "learning_rate": 4.144558695644054e-06, + "loss": 0.5114, + "step": 8182 + }, + { + "epoch": 0.5681849743091237, + "grad_norm": 3.5779261248020635, + "learning_rate": 4.143450809745631e-06, + "loss": 0.3348, + "step": 8183 + }, + { + "epoch": 0.5682544091098458, + "grad_norm": 4.118830080027121, + "learning_rate": 4.142342967170498e-06, + "loss": 0.5417, + "step": 8184 + }, + { + "epoch": 0.568323843910568, + "grad_norm": 4.436253411587737, + "learning_rate": 4.141235167974687e-06, + "loss": 0.5689, + "step": 8185 + }, + { + "epoch": 0.5683932787112901, + "grad_norm": 3.329951400517597, + "learning_rate": 4.1401274122142285e-06, + "loss": 0.2797, + "step": 8186 + }, + { + "epoch": 0.5684627135120123, + "grad_norm": 4.648025047439982, + "learning_rate": 4.139019699945154e-06, + "loss": 0.5371, + "step": 8187 + }, + { + "epoch": 0.5685321483127344, + "grad_norm": 3.6962183467864995, + "learning_rate": 4.137912031223487e-06, + "loss": 0.5129, + "step": 8188 + }, + { + "epoch": 0.5686015831134564, + "grad_norm": 3.6309138034480872, + "learning_rate": 4.136804406105256e-06, + "loss": 0.5365, + "step": 8189 + }, + { + "epoch": 0.5686710179141786, + "grad_norm": 3.710523533558838, + "learning_rate": 4.135696824646479e-06, + "loss": 0.4167, + "step": 8190 + }, + { + "epoch": 0.5687404527149007, + "grad_norm": 4.871071732358399, + "learning_rate": 4.134589286903181e-06, + "loss": 0.6011, + "step": 8191 + }, + { + "epoch": 0.5688098875156228, + "grad_norm": 4.371993090295723, + "learning_rate": 4.133481792931376e-06, + "loss": 0.7042, + "step": 8192 + }, + { + "epoch": 0.568879322316345, + "grad_norm": 4.752809887192429, + "learning_rate": 4.132374342787081e-06, + "loss": 0.6542, + "step": 8193 + }, + { + "epoch": 0.568948757117067, + "grad_norm": 4.10511814127952, + "learning_rate": 4.131266936526312e-06, + "loss": 0.2812, + "step": 8194 + }, + { + "epoch": 0.5690181919177892, + "grad_norm": 4.256562734406948, + "learning_rate": 4.130159574205077e-06, + "loss": 0.4694, + "step": 8195 + }, + { + "epoch": 0.5690876267185113, + "grad_norm": 2.757829124983594, + "learning_rate": 4.1290522558793855e-06, + "loss": 0.2625, + "step": 8196 + }, + { + "epoch": 0.5691570615192334, + "grad_norm": 4.976259576743357, + "learning_rate": 4.127944981605245e-06, + "loss": 0.5731, + "step": 8197 + }, + { + "epoch": 0.5692264963199556, + "grad_norm": 3.067717985681503, + "learning_rate": 4.126837751438663e-06, + "loss": 0.1835, + "step": 8198 + }, + { + "epoch": 0.5692959311206777, + "grad_norm": 4.603090658263815, + "learning_rate": 4.125730565435639e-06, + "loss": 0.61, + "step": 8199 + }, + { + "epoch": 0.5693653659213999, + "grad_norm": 5.141820677497581, + "learning_rate": 4.124623423652172e-06, + "loss": 0.6183, + "step": 8200 + }, + { + "epoch": 0.5694348007221219, + "grad_norm": 5.334562339426687, + "learning_rate": 4.123516326144264e-06, + "loss": 0.527, + "step": 8201 + }, + { + "epoch": 0.569504235522844, + "grad_norm": 4.938105013990115, + "learning_rate": 4.1224092729679065e-06, + "loss": 0.3932, + "step": 8202 + }, + { + "epoch": 0.5695736703235662, + "grad_norm": 4.886546812371977, + "learning_rate": 4.121302264179095e-06, + "loss": 0.5181, + "step": 8203 + }, + { + "epoch": 0.5696431051242883, + "grad_norm": 3.8297936736361935, + "learning_rate": 4.120195299833821e-06, + "loss": 0.5723, + "step": 8204 + }, + { + "epoch": 0.5697125399250104, + "grad_norm": 4.881437063272225, + "learning_rate": 4.1190883799880745e-06, + "loss": 0.6236, + "step": 8205 + }, + { + "epoch": 0.5697819747257326, + "grad_norm": 6.653667897876564, + "learning_rate": 4.117981504697839e-06, + "loss": 0.8079, + "step": 8206 + }, + { + "epoch": 0.5698514095264546, + "grad_norm": 3.86727325025805, + "learning_rate": 4.116874674019101e-06, + "loss": 0.3782, + "step": 8207 + }, + { + "epoch": 0.5699208443271768, + "grad_norm": 3.1006918668145316, + "learning_rate": 4.1157678880078436e-06, + "loss": 0.4621, + "step": 8208 + }, + { + "epoch": 0.5699902791278989, + "grad_norm": 3.8350282099357487, + "learning_rate": 4.114661146720047e-06, + "loss": 0.5566, + "step": 8209 + }, + { + "epoch": 0.570059713928621, + "grad_norm": 3.7037370882502816, + "learning_rate": 4.113554450211685e-06, + "loss": 0.383, + "step": 8210 + }, + { + "epoch": 0.5701291487293432, + "grad_norm": 4.151300314907731, + "learning_rate": 4.112447798538737e-06, + "loss": 0.2447, + "step": 8211 + }, + { + "epoch": 0.5701985835300653, + "grad_norm": 3.510790263352245, + "learning_rate": 4.111341191757176e-06, + "loss": 0.33, + "step": 8212 + }, + { + "epoch": 0.5702680183307874, + "grad_norm": 3.8732529086503704, + "learning_rate": 4.110234629922971e-06, + "loss": 0.2227, + "step": 8213 + }, + { + "epoch": 0.5703374531315095, + "grad_norm": 4.174888923701188, + "learning_rate": 4.109128113092092e-06, + "loss": 0.7096, + "step": 8214 + }, + { + "epoch": 0.5704068879322316, + "grad_norm": 3.6537448416740186, + "learning_rate": 4.108021641320504e-06, + "loss": 0.3924, + "step": 8215 + }, + { + "epoch": 0.5704763227329538, + "grad_norm": 3.731109541635737, + "learning_rate": 4.106915214664174e-06, + "loss": 0.4461, + "step": 8216 + }, + { + "epoch": 0.5705457575336759, + "grad_norm": 3.374042079141516, + "learning_rate": 4.10580883317906e-06, + "loss": 0.3449, + "step": 8217 + }, + { + "epoch": 0.570615192334398, + "grad_norm": 4.190076660298445, + "learning_rate": 4.104702496921123e-06, + "loss": 0.3763, + "step": 8218 + }, + { + "epoch": 0.5706846271351201, + "grad_norm": 3.576769621090016, + "learning_rate": 4.103596205946323e-06, + "loss": 0.4509, + "step": 8219 + }, + { + "epoch": 0.5707540619358422, + "grad_norm": 3.012672379462364, + "learning_rate": 4.1024899603106105e-06, + "loss": 0.3729, + "step": 8220 + }, + { + "epoch": 0.5708234967365644, + "grad_norm": 4.032490793076996, + "learning_rate": 4.101383760069941e-06, + "loss": 0.5286, + "step": 8221 + }, + { + "epoch": 0.5708929315372865, + "grad_norm": 4.497212335074443, + "learning_rate": 4.100277605280263e-06, + "loss": 0.5735, + "step": 8222 + }, + { + "epoch": 0.5709623663380086, + "grad_norm": 4.339599970119813, + "learning_rate": 4.099171495997528e-06, + "loss": 0.3435, + "step": 8223 + }, + { + "epoch": 0.5710318011387308, + "grad_norm": 4.227598600975215, + "learning_rate": 4.0980654322776775e-06, + "loss": 0.4741, + "step": 8224 + }, + { + "epoch": 0.5711012359394528, + "grad_norm": 4.323423024859913, + "learning_rate": 4.096959414176656e-06, + "loss": 0.7926, + "step": 8225 + }, + { + "epoch": 0.571170670740175, + "grad_norm": 3.5309036089927983, + "learning_rate": 4.0958534417504085e-06, + "loss": 0.3851, + "step": 8226 + }, + { + "epoch": 0.5712401055408971, + "grad_norm": 4.016654047361848, + "learning_rate": 4.094747515054869e-06, + "loss": 0.671, + "step": 8227 + }, + { + "epoch": 0.5713095403416192, + "grad_norm": 5.486159824766313, + "learning_rate": 4.093641634145974e-06, + "loss": 0.5476, + "step": 8228 + }, + { + "epoch": 0.5713789751423414, + "grad_norm": 3.107279755666281, + "learning_rate": 4.092535799079661e-06, + "loss": 0.4037, + "step": 8229 + }, + { + "epoch": 0.5714484099430635, + "grad_norm": 4.0484560224919655, + "learning_rate": 4.091430009911862e-06, + "loss": 0.3758, + "step": 8230 + }, + { + "epoch": 0.5715178447437855, + "grad_norm": 5.049242392125939, + "learning_rate": 4.090324266698504e-06, + "loss": 0.4354, + "step": 8231 + }, + { + "epoch": 0.5715872795445077, + "grad_norm": 4.945383010019044, + "learning_rate": 4.089218569495514e-06, + "loss": 0.4905, + "step": 8232 + }, + { + "epoch": 0.5716567143452298, + "grad_norm": 3.90100571268338, + "learning_rate": 4.088112918358819e-06, + "loss": 0.2238, + "step": 8233 + }, + { + "epoch": 0.571726149145952, + "grad_norm": 4.149724421606885, + "learning_rate": 4.087007313344342e-06, + "loss": 0.3803, + "step": 8234 + }, + { + "epoch": 0.5717955839466741, + "grad_norm": 3.676290486778727, + "learning_rate": 4.085901754508e-06, + "loss": 0.3579, + "step": 8235 + }, + { + "epoch": 0.5718650187473961, + "grad_norm": 3.6656625537944003, + "learning_rate": 4.084796241905713e-06, + "loss": 0.5401, + "step": 8236 + }, + { + "epoch": 0.5719344535481183, + "grad_norm": 3.2580458218522432, + "learning_rate": 4.083690775593399e-06, + "loss": 0.2925, + "step": 8237 + }, + { + "epoch": 0.5720038883488404, + "grad_norm": 3.9363137629387666, + "learning_rate": 4.082585355626965e-06, + "loss": 0.5388, + "step": 8238 + }, + { + "epoch": 0.5720733231495626, + "grad_norm": 4.000743846331227, + "learning_rate": 4.0814799820623275e-06, + "loss": 0.7363, + "step": 8239 + }, + { + "epoch": 0.5721427579502847, + "grad_norm": 3.8465567973638835, + "learning_rate": 4.080374654955393e-06, + "loss": 0.3689, + "step": 8240 + }, + { + "epoch": 0.5722121927510068, + "grad_norm": 4.64989596743859, + "learning_rate": 4.0792693743620695e-06, + "loss": 0.6305, + "step": 8241 + }, + { + "epoch": 0.572281627551729, + "grad_norm": 4.194235239910773, + "learning_rate": 4.078164140338256e-06, + "loss": 0.7083, + "step": 8242 + }, + { + "epoch": 0.572351062352451, + "grad_norm": 3.3921206302533835, + "learning_rate": 4.077058952939859e-06, + "loss": 0.3234, + "step": 8243 + }, + { + "epoch": 0.5724204971531732, + "grad_norm": 3.3887926702927524, + "learning_rate": 4.075953812222777e-06, + "loss": 0.2621, + "step": 8244 + }, + { + "epoch": 0.5724899319538953, + "grad_norm": 3.596906679910927, + "learning_rate": 4.074848718242903e-06, + "loss": 0.4348, + "step": 8245 + }, + { + "epoch": 0.5725593667546174, + "grad_norm": 4.848947362706893, + "learning_rate": 4.073743671056135e-06, + "loss": 0.5158, + "step": 8246 + }, + { + "epoch": 0.5726288015553396, + "grad_norm": 2.829421388768847, + "learning_rate": 4.072638670718363e-06, + "loss": 0.317, + "step": 8247 + }, + { + "epoch": 0.5726982363560617, + "grad_norm": 3.758014021769186, + "learning_rate": 4.07153371728548e-06, + "loss": 0.3952, + "step": 8248 + }, + { + "epoch": 0.5727676711567837, + "grad_norm": 4.1082237048614285, + "learning_rate": 4.070428810813369e-06, + "loss": 0.6192, + "step": 8249 + }, + { + "epoch": 0.5728371059575059, + "grad_norm": 2.948796228396795, + "learning_rate": 4.069323951357917e-06, + "loss": 0.1813, + "step": 8250 + }, + { + "epoch": 0.572906540758228, + "grad_norm": 2.9276160374647873, + "learning_rate": 4.068219138975008e-06, + "loss": 0.3392, + "step": 8251 + }, + { + "epoch": 0.5729759755589502, + "grad_norm": 4.598374429548369, + "learning_rate": 4.067114373720518e-06, + "loss": 0.5748, + "step": 8252 + }, + { + "epoch": 0.5730454103596723, + "grad_norm": 3.207341570442264, + "learning_rate": 4.0660096556503295e-06, + "loss": 0.3085, + "step": 8253 + }, + { + "epoch": 0.5731148451603943, + "grad_norm": 3.4050989042323794, + "learning_rate": 4.064904984820313e-06, + "loss": 0.3733, + "step": 8254 + }, + { + "epoch": 0.5731842799611165, + "grad_norm": 3.359431054972266, + "learning_rate": 4.063800361286349e-06, + "loss": 0.3887, + "step": 8255 + }, + { + "epoch": 0.5732537147618386, + "grad_norm": 4.063161873144555, + "learning_rate": 4.062695785104301e-06, + "loss": 0.504, + "step": 8256 + }, + { + "epoch": 0.5733231495625608, + "grad_norm": 4.1074015338232375, + "learning_rate": 4.061591256330038e-06, + "loss": 0.6453, + "step": 8257 + }, + { + "epoch": 0.5733925843632829, + "grad_norm": 3.5124201203747085, + "learning_rate": 4.060486775019429e-06, + "loss": 0.41, + "step": 8258 + }, + { + "epoch": 0.573462019164005, + "grad_norm": 4.2381340516835, + "learning_rate": 4.059382341228334e-06, + "loss": 0.4797, + "step": 8259 + }, + { + "epoch": 0.5735314539647272, + "grad_norm": 3.037417725016238, + "learning_rate": 4.058277955012617e-06, + "loss": 0.385, + "step": 8260 + }, + { + "epoch": 0.5736008887654492, + "grad_norm": 3.9205697561469934, + "learning_rate": 4.057173616428135e-06, + "loss": 0.5236, + "step": 8261 + }, + { + "epoch": 0.5736703235661713, + "grad_norm": 2.872065804430074, + "learning_rate": 4.056069325530746e-06, + "loss": 0.3457, + "step": 8262 + }, + { + "epoch": 0.5737397583668935, + "grad_norm": 3.4163167632435223, + "learning_rate": 4.054965082376302e-06, + "loss": 0.425, + "step": 8263 + }, + { + "epoch": 0.5738091931676156, + "grad_norm": 10.761068023792847, + "learning_rate": 4.053860887020653e-06, + "loss": 0.5428, + "step": 8264 + }, + { + "epoch": 0.5738786279683378, + "grad_norm": 5.886325752572919, + "learning_rate": 4.052756739519652e-06, + "loss": 0.4421, + "step": 8265 + }, + { + "epoch": 0.5739480627690599, + "grad_norm": 4.085505623340654, + "learning_rate": 4.051652639929143e-06, + "loss": 0.5554, + "step": 8266 + }, + { + "epoch": 0.5740174975697819, + "grad_norm": 4.136658766300606, + "learning_rate": 4.050548588304968e-06, + "loss": 0.3957, + "step": 8267 + }, + { + "epoch": 0.5740869323705041, + "grad_norm": 3.906387490028484, + "learning_rate": 4.049444584702972e-06, + "loss": 0.3925, + "step": 8268 + }, + { + "epoch": 0.5741563671712262, + "grad_norm": 4.196516660486522, + "learning_rate": 4.048340629178995e-06, + "loss": 0.52, + "step": 8269 + }, + { + "epoch": 0.5742258019719484, + "grad_norm": 2.8985905758339476, + "learning_rate": 4.047236721788871e-06, + "loss": 0.3754, + "step": 8270 + }, + { + "epoch": 0.5742952367726705, + "grad_norm": 6.115894965211508, + "learning_rate": 4.046132862588435e-06, + "loss": 0.4109, + "step": 8271 + }, + { + "epoch": 0.5743646715733925, + "grad_norm": 3.607390801562125, + "learning_rate": 4.0450290516335195e-06, + "loss": 0.4645, + "step": 8272 + }, + { + "epoch": 0.5744341063741147, + "grad_norm": 4.569251003926501, + "learning_rate": 4.043925288979956e-06, + "loss": 0.6653, + "step": 8273 + }, + { + "epoch": 0.5745035411748368, + "grad_norm": 3.487761169646035, + "learning_rate": 4.042821574683567e-06, + "loss": 0.4078, + "step": 8274 + }, + { + "epoch": 0.5745729759755589, + "grad_norm": 4.280770182506097, + "learning_rate": 4.04171790880018e-06, + "loss": 0.3805, + "step": 8275 + }, + { + "epoch": 0.5746424107762811, + "grad_norm": 3.883991240322071, + "learning_rate": 4.040614291385619e-06, + "loss": 0.3446, + "step": 8276 + }, + { + "epoch": 0.5747118455770032, + "grad_norm": 2.9467998866383995, + "learning_rate": 4.039510722495698e-06, + "loss": 0.3023, + "step": 8277 + }, + { + "epoch": 0.5747812803777254, + "grad_norm": 3.236039428818039, + "learning_rate": 4.038407202186239e-06, + "loss": 0.3014, + "step": 8278 + }, + { + "epoch": 0.5748507151784474, + "grad_norm": 3.14964037069236, + "learning_rate": 4.037303730513056e-06, + "loss": 0.281, + "step": 8279 + }, + { + "epoch": 0.5749201499791695, + "grad_norm": 3.8918177628176442, + "learning_rate": 4.036200307531962e-06, + "loss": 0.5459, + "step": 8280 + }, + { + "epoch": 0.5749895847798917, + "grad_norm": 2.9748550421965843, + "learning_rate": 4.035096933298764e-06, + "loss": 0.2267, + "step": 8281 + }, + { + "epoch": 0.5750590195806138, + "grad_norm": 4.169971190126039, + "learning_rate": 4.033993607869273e-06, + "loss": 0.3896, + "step": 8282 + }, + { + "epoch": 0.575128454381336, + "grad_norm": 2.344897522663576, + "learning_rate": 4.032890331299292e-06, + "loss": 0.2403, + "step": 8283 + }, + { + "epoch": 0.575197889182058, + "grad_norm": 3.4844068283426006, + "learning_rate": 4.031787103644623e-06, + "loss": 0.3134, + "step": 8284 + }, + { + "epoch": 0.5752673239827801, + "grad_norm": 3.7517548270607093, + "learning_rate": 4.030683924961066e-06, + "loss": 0.4406, + "step": 8285 + }, + { + "epoch": 0.5753367587835023, + "grad_norm": 3.2762846799811007, + "learning_rate": 4.02958079530442e-06, + "loss": 0.3804, + "step": 8286 + }, + { + "epoch": 0.5754061935842244, + "grad_norm": 3.3957480830337214, + "learning_rate": 4.02847771473048e-06, + "loss": 0.444, + "step": 8287 + }, + { + "epoch": 0.5754756283849465, + "grad_norm": 3.437316311920712, + "learning_rate": 4.027374683295036e-06, + "loss": 0.4216, + "step": 8288 + }, + { + "epoch": 0.5755450631856687, + "grad_norm": 6.355746239357887, + "learning_rate": 4.026271701053881e-06, + "loss": 0.7195, + "step": 8289 + }, + { + "epoch": 0.5756144979863907, + "grad_norm": 5.734150169101989, + "learning_rate": 4.025168768062801e-06, + "loss": 0.8573, + "step": 8290 + }, + { + "epoch": 0.5756839327871129, + "grad_norm": 3.7545094896807294, + "learning_rate": 4.024065884377583e-06, + "loss": 0.541, + "step": 8291 + }, + { + "epoch": 0.575753367587835, + "grad_norm": 5.287401368554198, + "learning_rate": 4.0229630500540075e-06, + "loss": 0.4149, + "step": 8292 + }, + { + "epoch": 0.5758228023885571, + "grad_norm": 2.623034989930876, + "learning_rate": 4.021860265147854e-06, + "loss": 0.3252, + "step": 8293 + }, + { + "epoch": 0.5758922371892793, + "grad_norm": 3.303578251172555, + "learning_rate": 4.020757529714904e-06, + "loss": 0.2271, + "step": 8294 + }, + { + "epoch": 0.5759616719900014, + "grad_norm": 4.450790331947789, + "learning_rate": 4.019654843810929e-06, + "loss": 0.5132, + "step": 8295 + }, + { + "epoch": 0.5760311067907236, + "grad_norm": 5.229785866138677, + "learning_rate": 4.018552207491702e-06, + "loss": 0.7396, + "step": 8296 + }, + { + "epoch": 0.5761005415914456, + "grad_norm": 4.4838989114748165, + "learning_rate": 4.017449620812992e-06, + "loss": 0.4912, + "step": 8297 + }, + { + "epoch": 0.5761699763921677, + "grad_norm": 3.8068326485199884, + "learning_rate": 4.016347083830572e-06, + "loss": 0.5299, + "step": 8298 + }, + { + "epoch": 0.5762394111928899, + "grad_norm": 3.1571259414289674, + "learning_rate": 4.015244596600202e-06, + "loss": 0.3301, + "step": 8299 + }, + { + "epoch": 0.576308845993612, + "grad_norm": 5.927453176704675, + "learning_rate": 4.014142159177645e-06, + "loss": 0.6132, + "step": 8300 + }, + { + "epoch": 0.5763782807943342, + "grad_norm": 3.993876816356581, + "learning_rate": 4.013039771618664e-06, + "loss": 0.5652, + "step": 8301 + }, + { + "epoch": 0.5764477155950563, + "grad_norm": 4.538385168103553, + "learning_rate": 4.011937433979014e-06, + "loss": 0.5237, + "step": 8302 + }, + { + "epoch": 0.5765171503957783, + "grad_norm": 4.813324538972775, + "learning_rate": 4.010835146314449e-06, + "loss": 0.4832, + "step": 8303 + }, + { + "epoch": 0.5765865851965005, + "grad_norm": 3.97776369235501, + "learning_rate": 4.009732908680724e-06, + "loss": 0.557, + "step": 8304 + }, + { + "epoch": 0.5766560199972226, + "grad_norm": 3.6619011202153247, + "learning_rate": 4.00863072113359e-06, + "loss": 0.4126, + "step": 8305 + }, + { + "epoch": 0.5767254547979447, + "grad_norm": 4.47160206177307, + "learning_rate": 4.007528583728788e-06, + "loss": 0.4051, + "step": 8306 + }, + { + "epoch": 0.5767948895986669, + "grad_norm": 4.8179334562298735, + "learning_rate": 4.00642649652207e-06, + "loss": 0.4593, + "step": 8307 + }, + { + "epoch": 0.576864324399389, + "grad_norm": 3.1833440261712727, + "learning_rate": 4.005324459569176e-06, + "loss": 0.2362, + "step": 8308 + }, + { + "epoch": 0.5769337592001111, + "grad_norm": 4.141055657701225, + "learning_rate": 4.004222472925843e-06, + "loss": 0.2366, + "step": 8309 + }, + { + "epoch": 0.5770031940008332, + "grad_norm": 3.0514158939574108, + "learning_rate": 4.003120536647811e-06, + "loss": 0.4718, + "step": 8310 + }, + { + "epoch": 0.5770726288015553, + "grad_norm": 3.4230497066950556, + "learning_rate": 4.002018650790815e-06, + "loss": 0.2625, + "step": 8311 + }, + { + "epoch": 0.5771420636022775, + "grad_norm": 4.803066722102279, + "learning_rate": 4.000916815410587e-06, + "loss": 0.7524, + "step": 8312 + }, + { + "epoch": 0.5772114984029996, + "grad_norm": 3.4898718891381, + "learning_rate": 3.999815030562855e-06, + "loss": 0.5221, + "step": 8313 + }, + { + "epoch": 0.5772809332037218, + "grad_norm": 3.6619907669021785, + "learning_rate": 3.998713296303347e-06, + "loss": 0.4321, + "step": 8314 + }, + { + "epoch": 0.5773503680044438, + "grad_norm": 3.729229086518605, + "learning_rate": 3.997611612687786e-06, + "loss": 0.4449, + "step": 8315 + }, + { + "epoch": 0.5774198028051659, + "grad_norm": 2.8968451595450944, + "learning_rate": 3.996509979771899e-06, + "loss": 0.2336, + "step": 8316 + }, + { + "epoch": 0.5774892376058881, + "grad_norm": 3.542188430787019, + "learning_rate": 3.995408397611399e-06, + "loss": 0.4427, + "step": 8317 + }, + { + "epoch": 0.5775586724066102, + "grad_norm": 10.303692356523682, + "learning_rate": 3.994306866262005e-06, + "loss": 0.6655, + "step": 8318 + }, + { + "epoch": 0.5776281072073323, + "grad_norm": 2.678691479838158, + "learning_rate": 3.993205385779434e-06, + "loss": 0.2019, + "step": 8319 + }, + { + "epoch": 0.5776975420080545, + "grad_norm": 3.0774692979372746, + "learning_rate": 3.992103956219393e-06, + "loss": 0.3375, + "step": 8320 + }, + { + "epoch": 0.5777669768087765, + "grad_norm": 4.43823016801703, + "learning_rate": 3.991002577637595e-06, + "loss": 0.6069, + "step": 8321 + }, + { + "epoch": 0.5778364116094987, + "grad_norm": 4.0164346791106125, + "learning_rate": 3.989901250089743e-06, + "loss": 0.5368, + "step": 8322 + }, + { + "epoch": 0.5779058464102208, + "grad_norm": 3.925388077439334, + "learning_rate": 3.988799973631546e-06, + "loss": 0.4042, + "step": 8323 + }, + { + "epoch": 0.5779752812109429, + "grad_norm": 2.2756170192592777, + "learning_rate": 3.9876987483187005e-06, + "loss": 0.1972, + "step": 8324 + }, + { + "epoch": 0.5780447160116651, + "grad_norm": 3.8199855051037748, + "learning_rate": 3.9865975742069055e-06, + "loss": 0.3483, + "step": 8325 + }, + { + "epoch": 0.5781141508123872, + "grad_norm": 3.367689396144764, + "learning_rate": 3.98549645135186e-06, + "loss": 0.3895, + "step": 8326 + }, + { + "epoch": 0.5781835856131093, + "grad_norm": 3.6054070777508493, + "learning_rate": 3.984395379809256e-06, + "loss": 0.4893, + "step": 8327 + }, + { + "epoch": 0.5782530204138314, + "grad_norm": 5.23973456659734, + "learning_rate": 3.983294359634783e-06, + "loss": 0.6332, + "step": 8328 + }, + { + "epoch": 0.5783224552145535, + "grad_norm": 3.564335850316491, + "learning_rate": 3.98219339088413e-06, + "loss": 0.3913, + "step": 8329 + }, + { + "epoch": 0.5783918900152757, + "grad_norm": 3.2602457904661364, + "learning_rate": 3.981092473612987e-06, + "loss": 0.3611, + "step": 8330 + }, + { + "epoch": 0.5784613248159978, + "grad_norm": 6.553837489491782, + "learning_rate": 3.979991607877032e-06, + "loss": 0.9449, + "step": 8331 + }, + { + "epoch": 0.5785307596167198, + "grad_norm": 4.596923151282438, + "learning_rate": 3.978890793731947e-06, + "loss": 0.4192, + "step": 8332 + }, + { + "epoch": 0.578600194417442, + "grad_norm": 3.7962532720516124, + "learning_rate": 3.977790031233411e-06, + "loss": 0.5468, + "step": 8333 + }, + { + "epoch": 0.5786696292181641, + "grad_norm": 5.051442999233338, + "learning_rate": 3.976689320437098e-06, + "loss": 0.5829, + "step": 8334 + }, + { + "epoch": 0.5787390640188863, + "grad_norm": 3.125875125123957, + "learning_rate": 3.9755886613986806e-06, + "loss": 0.3667, + "step": 8335 + }, + { + "epoch": 0.5788084988196084, + "grad_norm": 4.08034835110671, + "learning_rate": 3.97448805417383e-06, + "loss": 0.5218, + "step": 8336 + }, + { + "epoch": 0.5788779336203305, + "grad_norm": 5.200081542687821, + "learning_rate": 3.9733874988182145e-06, + "loss": 0.7339, + "step": 8337 + }, + { + "epoch": 0.5789473684210527, + "grad_norm": 4.351381081037609, + "learning_rate": 3.972286995387495e-06, + "loss": 0.6607, + "step": 8338 + }, + { + "epoch": 0.5790168032217747, + "grad_norm": 2.748537906854643, + "learning_rate": 3.971186543937337e-06, + "loss": 0.1615, + "step": 8339 + }, + { + "epoch": 0.5790862380224969, + "grad_norm": 3.2936182164727166, + "learning_rate": 3.9700861445234e-06, + "loss": 0.3194, + "step": 8340 + }, + { + "epoch": 0.579155672823219, + "grad_norm": 4.5301561613284855, + "learning_rate": 3.968985797201343e-06, + "loss": 0.5029, + "step": 8341 + }, + { + "epoch": 0.5792251076239411, + "grad_norm": 4.713040705120686, + "learning_rate": 3.9678855020268135e-06, + "loss": 0.2712, + "step": 8342 + }, + { + "epoch": 0.5792945424246633, + "grad_norm": 4.391819122280112, + "learning_rate": 3.966785259055469e-06, + "loss": 0.5121, + "step": 8343 + }, + { + "epoch": 0.5793639772253854, + "grad_norm": 4.020215422494957, + "learning_rate": 3.965685068342958e-06, + "loss": 0.4846, + "step": 8344 + }, + { + "epoch": 0.5794334120261074, + "grad_norm": 3.8145602613541683, + "learning_rate": 3.9645849299449245e-06, + "loss": 0.4079, + "step": 8345 + }, + { + "epoch": 0.5795028468268296, + "grad_norm": 5.172869668220643, + "learning_rate": 3.963484843917014e-06, + "loss": 0.7602, + "step": 8346 + }, + { + "epoch": 0.5795722816275517, + "grad_norm": 4.389851392994651, + "learning_rate": 3.962384810314867e-06, + "loss": 0.6465, + "step": 8347 + }, + { + "epoch": 0.5796417164282739, + "grad_norm": 4.421636816440425, + "learning_rate": 3.9612848291941234e-06, + "loss": 0.4587, + "step": 8348 + }, + { + "epoch": 0.579711151228996, + "grad_norm": 3.7393576479878536, + "learning_rate": 3.960184900610416e-06, + "loss": 0.5233, + "step": 8349 + }, + { + "epoch": 0.579780586029718, + "grad_norm": 4.315731618211668, + "learning_rate": 3.95908502461938e-06, + "loss": 0.5783, + "step": 8350 + }, + { + "epoch": 0.5798500208304402, + "grad_norm": 4.116040934602692, + "learning_rate": 3.957985201276647e-06, + "loss": 0.4575, + "step": 8351 + }, + { + "epoch": 0.5799194556311623, + "grad_norm": 4.828259289999425, + "learning_rate": 3.956885430637841e-06, + "loss": 0.4806, + "step": 8352 + }, + { + "epoch": 0.5799888904318845, + "grad_norm": 3.9071177219333877, + "learning_rate": 3.955785712758591e-06, + "loss": 0.5295, + "step": 8353 + }, + { + "epoch": 0.5800583252326066, + "grad_norm": 3.6242505495354087, + "learning_rate": 3.954686047694517e-06, + "loss": 0.4358, + "step": 8354 + }, + { + "epoch": 0.5801277600333287, + "grad_norm": 3.8431766203035234, + "learning_rate": 3.9535864355012425e-06, + "loss": 0.5996, + "step": 8355 + }, + { + "epoch": 0.5801971948340509, + "grad_norm": 5.256780190710877, + "learning_rate": 3.95248687623438e-06, + "loss": 0.663, + "step": 8356 + }, + { + "epoch": 0.5802666296347729, + "grad_norm": 4.930484442314402, + "learning_rate": 3.951387369949545e-06, + "loss": 0.5178, + "step": 8357 + }, + { + "epoch": 0.5803360644354951, + "grad_norm": 2.889109550262033, + "learning_rate": 3.950287916702354e-06, + "loss": 0.2985, + "step": 8358 + }, + { + "epoch": 0.5804054992362172, + "grad_norm": 4.232154749773528, + "learning_rate": 3.949188516548409e-06, + "loss": 0.3386, + "step": 8359 + }, + { + "epoch": 0.5804749340369393, + "grad_norm": 3.2420368922988847, + "learning_rate": 3.9480891695433195e-06, + "loss": 0.317, + "step": 8360 + }, + { + "epoch": 0.5805443688376615, + "grad_norm": 4.291089469216785, + "learning_rate": 3.94698987574269e-06, + "loss": 0.3614, + "step": 8361 + }, + { + "epoch": 0.5806138036383836, + "grad_norm": 3.520656868587251, + "learning_rate": 3.945890635202123e-06, + "loss": 0.3863, + "step": 8362 + }, + { + "epoch": 0.5806832384391056, + "grad_norm": 3.3824923807433773, + "learning_rate": 3.944791447977213e-06, + "loss": 0.2196, + "step": 8363 + }, + { + "epoch": 0.5807526732398278, + "grad_norm": 4.406458554674027, + "learning_rate": 3.9436923141235574e-06, + "loss": 0.5993, + "step": 8364 + }, + { + "epoch": 0.5808221080405499, + "grad_norm": 2.8893878532168, + "learning_rate": 3.94259323369675e-06, + "loss": 0.3231, + "step": 8365 + }, + { + "epoch": 0.5808915428412721, + "grad_norm": 3.401012218895298, + "learning_rate": 3.941494206752381e-06, + "loss": 0.3107, + "step": 8366 + }, + { + "epoch": 0.5809609776419942, + "grad_norm": 3.6532482587450565, + "learning_rate": 3.940395233346036e-06, + "loss": 0.397, + "step": 8367 + }, + { + "epoch": 0.5810304124427162, + "grad_norm": 4.010710991936554, + "learning_rate": 3.939296313533301e-06, + "loss": 0.5426, + "step": 8368 + }, + { + "epoch": 0.5810998472434384, + "grad_norm": 2.777008502644784, + "learning_rate": 3.938197447369759e-06, + "loss": 0.2758, + "step": 8369 + }, + { + "epoch": 0.5811692820441605, + "grad_norm": 4.163724887190737, + "learning_rate": 3.937098634910989e-06, + "loss": 0.5743, + "step": 8370 + }, + { + "epoch": 0.5812387168448827, + "grad_norm": 3.3126008796877575, + "learning_rate": 3.9359998762125655e-06, + "loss": 0.3639, + "step": 8371 + }, + { + "epoch": 0.5813081516456048, + "grad_norm": 3.3345488838460353, + "learning_rate": 3.934901171330065e-06, + "loss": 0.4139, + "step": 8372 + }, + { + "epoch": 0.5813775864463269, + "grad_norm": 4.64065549538711, + "learning_rate": 3.93380252031906e-06, + "loss": 0.4962, + "step": 8373 + }, + { + "epoch": 0.5814470212470491, + "grad_norm": 4.018089025717462, + "learning_rate": 3.932703923235115e-06, + "loss": 0.5307, + "step": 8374 + }, + { + "epoch": 0.5815164560477711, + "grad_norm": 4.165649964919355, + "learning_rate": 3.9316053801337975e-06, + "loss": 0.4472, + "step": 8375 + }, + { + "epoch": 0.5815858908484932, + "grad_norm": 3.341719043285829, + "learning_rate": 3.930506891070673e-06, + "loss": 0.3559, + "step": 8376 + }, + { + "epoch": 0.5816553256492154, + "grad_norm": 3.935040521240501, + "learning_rate": 3.929408456101298e-06, + "loss": 0.3765, + "step": 8377 + }, + { + "epoch": 0.5817247604499375, + "grad_norm": 5.183353394951048, + "learning_rate": 3.9283100752812306e-06, + "loss": 0.7425, + "step": 8378 + }, + { + "epoch": 0.5817941952506597, + "grad_norm": 3.8219290875100342, + "learning_rate": 3.927211748666029e-06, + "loss": 0.5006, + "step": 8379 + }, + { + "epoch": 0.5818636300513818, + "grad_norm": 4.245770193129816, + "learning_rate": 3.926113476311244e-06, + "loss": 0.6895, + "step": 8380 + }, + { + "epoch": 0.5819330648521038, + "grad_norm": 3.8657273928962828, + "learning_rate": 3.925015258272422e-06, + "loss": 0.568, + "step": 8381 + }, + { + "epoch": 0.582002499652826, + "grad_norm": 2.510081813948898, + "learning_rate": 3.923917094605113e-06, + "loss": 0.2302, + "step": 8382 + }, + { + "epoch": 0.5820719344535481, + "grad_norm": 3.9545930185537284, + "learning_rate": 3.92281898536486e-06, + "loss": 0.5249, + "step": 8383 + }, + { + "epoch": 0.5821413692542703, + "grad_norm": 4.138970728580292, + "learning_rate": 3.921720930607203e-06, + "loss": 0.5818, + "step": 8384 + }, + { + "epoch": 0.5822108040549924, + "grad_norm": 2.657277571267311, + "learning_rate": 3.920622930387681e-06, + "loss": 0.1763, + "step": 8385 + }, + { + "epoch": 0.5822802388557144, + "grad_norm": 3.5027391466759363, + "learning_rate": 3.9195249847618285e-06, + "loss": 0.3472, + "step": 8386 + }, + { + "epoch": 0.5823496736564366, + "grad_norm": 4.819812076049637, + "learning_rate": 3.918427093785183e-06, + "loss": 0.4851, + "step": 8387 + }, + { + "epoch": 0.5824191084571587, + "grad_norm": 3.4196915628035707, + "learning_rate": 3.917329257513269e-06, + "loss": 0.2543, + "step": 8388 + }, + { + "epoch": 0.5824885432578808, + "grad_norm": 5.104625973904496, + "learning_rate": 3.916231476001616e-06, + "loss": 0.8003, + "step": 8389 + }, + { + "epoch": 0.582557978058603, + "grad_norm": 2.715210096401347, + "learning_rate": 3.915133749305748e-06, + "loss": 0.3185, + "step": 8390 + }, + { + "epoch": 0.5826274128593251, + "grad_norm": 3.275802814641016, + "learning_rate": 3.91403607748119e-06, + "loss": 0.2854, + "step": 8391 + }, + { + "epoch": 0.5826968476600473, + "grad_norm": 4.9067210119764395, + "learning_rate": 3.912938460583456e-06, + "loss": 0.3857, + "step": 8392 + }, + { + "epoch": 0.5827662824607693, + "grad_norm": 4.004462018097176, + "learning_rate": 3.911840898668065e-06, + "loss": 0.4379, + "step": 8393 + }, + { + "epoch": 0.5828357172614914, + "grad_norm": 5.375110249648641, + "learning_rate": 3.910743391790532e-06, + "loss": 0.7087, + "step": 8394 + }, + { + "epoch": 0.5829051520622136, + "grad_norm": 3.3378333116121435, + "learning_rate": 3.909645940006364e-06, + "loss": 0.404, + "step": 8395 + }, + { + "epoch": 0.5829745868629357, + "grad_norm": 4.329949587818784, + "learning_rate": 3.908548543371069e-06, + "loss": 0.5316, + "step": 8396 + }, + { + "epoch": 0.5830440216636579, + "grad_norm": 3.0871269555376424, + "learning_rate": 3.9074512019401555e-06, + "loss": 0.2353, + "step": 8397 + }, + { + "epoch": 0.58311345646438, + "grad_norm": 4.657783021310161, + "learning_rate": 3.906353915769125e-06, + "loss": 0.6441, + "step": 8398 + }, + { + "epoch": 0.583182891265102, + "grad_norm": 3.1496711144035108, + "learning_rate": 3.905256684913473e-06, + "loss": 0.3655, + "step": 8399 + }, + { + "epoch": 0.5832523260658242, + "grad_norm": 2.620243311916356, + "learning_rate": 3.9041595094287e-06, + "loss": 0.2785, + "step": 8400 + }, + { + "epoch": 0.5833217608665463, + "grad_norm": 3.6432526829370118, + "learning_rate": 3.903062389370301e-06, + "loss": 0.3487, + "step": 8401 + }, + { + "epoch": 0.5833911956672684, + "grad_norm": 4.440160697921049, + "learning_rate": 3.901965324793764e-06, + "loss": 0.3752, + "step": 8402 + }, + { + "epoch": 0.5834606304679906, + "grad_norm": 5.492622623658774, + "learning_rate": 3.9008683157545765e-06, + "loss": 0.5273, + "step": 8403 + }, + { + "epoch": 0.5835300652687126, + "grad_norm": 4.439879393608571, + "learning_rate": 3.899771362308228e-06, + "loss": 0.6175, + "step": 8404 + }, + { + "epoch": 0.5835995000694348, + "grad_norm": 6.350907617978958, + "learning_rate": 3.8986744645101994e-06, + "loss": 0.6469, + "step": 8405 + }, + { + "epoch": 0.5836689348701569, + "grad_norm": 3.3756604517968545, + "learning_rate": 3.897577622415968e-06, + "loss": 0.3683, + "step": 8406 + }, + { + "epoch": 0.583738369670879, + "grad_norm": 3.2427457915381033, + "learning_rate": 3.896480836081014e-06, + "loss": 0.3068, + "step": 8407 + }, + { + "epoch": 0.5838078044716012, + "grad_norm": 3.96406869259864, + "learning_rate": 3.895384105560812e-06, + "loss": 0.4709, + "step": 8408 + }, + { + "epoch": 0.5838772392723233, + "grad_norm": 3.650824203760492, + "learning_rate": 3.894287430910829e-06, + "loss": 0.2619, + "step": 8409 + }, + { + "epoch": 0.5839466740730455, + "grad_norm": 3.7244571852663095, + "learning_rate": 3.893190812186538e-06, + "loss": 0.5548, + "step": 8410 + }, + { + "epoch": 0.5840161088737675, + "grad_norm": 4.751765833161116, + "learning_rate": 3.892094249443403e-06, + "loss": 0.4457, + "step": 8411 + }, + { + "epoch": 0.5840855436744896, + "grad_norm": 4.12967003817428, + "learning_rate": 3.890997742736889e-06, + "loss": 0.6108, + "step": 8412 + }, + { + "epoch": 0.5841549784752118, + "grad_norm": 3.6533504670571486, + "learning_rate": 3.889901292122452e-06, + "loss": 0.4312, + "step": 8413 + }, + { + "epoch": 0.5842244132759339, + "grad_norm": 3.3699815215688744, + "learning_rate": 3.888804897655553e-06, + "loss": 0.3667, + "step": 8414 + }, + { + "epoch": 0.5842938480766561, + "grad_norm": 3.696356017414049, + "learning_rate": 3.887708559391643e-06, + "loss": 0.4391, + "step": 8415 + }, + { + "epoch": 0.5843632828773782, + "grad_norm": 3.7280304797598873, + "learning_rate": 3.886612277386178e-06, + "loss": 0.4402, + "step": 8416 + }, + { + "epoch": 0.5844327176781002, + "grad_norm": 4.858284882106675, + "learning_rate": 3.885516051694604e-06, + "loss": 0.7441, + "step": 8417 + }, + { + "epoch": 0.5845021524788224, + "grad_norm": 4.323521178526095, + "learning_rate": 3.884419882372364e-06, + "loss": 0.5589, + "step": 8418 + }, + { + "epoch": 0.5845715872795445, + "grad_norm": 4.402313337475904, + "learning_rate": 3.883323769474907e-06, + "loss": 0.6068, + "step": 8419 + }, + { + "epoch": 0.5846410220802666, + "grad_norm": 3.645314357194898, + "learning_rate": 3.882227713057668e-06, + "loss": 0.2983, + "step": 8420 + }, + { + "epoch": 0.5847104568809888, + "grad_norm": 17.097228147996045, + "learning_rate": 3.881131713176087e-06, + "loss": 0.3851, + "step": 8421 + }, + { + "epoch": 0.5847798916817109, + "grad_norm": 3.790211358874174, + "learning_rate": 3.880035769885597e-06, + "loss": 0.3715, + "step": 8422 + }, + { + "epoch": 0.584849326482433, + "grad_norm": 4.874024090775505, + "learning_rate": 3.878939883241633e-06, + "loss": 0.6526, + "step": 8423 + }, + { + "epoch": 0.5849187612831551, + "grad_norm": 3.8993941694969596, + "learning_rate": 3.87784405329962e-06, + "loss": 0.3321, + "step": 8424 + }, + { + "epoch": 0.5849881960838772, + "grad_norm": 3.1606081153851804, + "learning_rate": 3.876748280114984e-06, + "loss": 0.4448, + "step": 8425 + }, + { + "epoch": 0.5850576308845994, + "grad_norm": 3.3479964256189514, + "learning_rate": 3.875652563743151e-06, + "loss": 0.3126, + "step": 8426 + }, + { + "epoch": 0.5851270656853215, + "grad_norm": 3.2693408452751327, + "learning_rate": 3.874556904239538e-06, + "loss": 0.2758, + "step": 8427 + }, + { + "epoch": 0.5851965004860437, + "grad_norm": 4.942275814622401, + "learning_rate": 3.873461301659562e-06, + "loss": 0.4931, + "step": 8428 + }, + { + "epoch": 0.5852659352867657, + "grad_norm": 3.7595652371294213, + "learning_rate": 3.872365756058639e-06, + "loss": 0.4454, + "step": 8429 + }, + { + "epoch": 0.5853353700874878, + "grad_norm": 4.166533526352181, + "learning_rate": 3.8712702674921815e-06, + "loss": 0.6004, + "step": 8430 + }, + { + "epoch": 0.58540480488821, + "grad_norm": 4.791549355396392, + "learning_rate": 3.870174836015596e-06, + "loss": 0.5471, + "step": 8431 + }, + { + "epoch": 0.5854742396889321, + "grad_norm": 3.252195116406542, + "learning_rate": 3.869079461684287e-06, + "loss": 0.498, + "step": 8432 + }, + { + "epoch": 0.5855436744896542, + "grad_norm": 3.5098078464936293, + "learning_rate": 3.867984144553662e-06, + "loss": 0.3072, + "step": 8433 + }, + { + "epoch": 0.5856131092903764, + "grad_norm": 3.8295261714452975, + "learning_rate": 3.866888884679117e-06, + "loss": 0.3309, + "step": 8434 + }, + { + "epoch": 0.5856825440910984, + "grad_norm": 3.9296071218312236, + "learning_rate": 3.8657936821160495e-06, + "loss": 0.5009, + "step": 8435 + }, + { + "epoch": 0.5857519788918206, + "grad_norm": 3.5504965955494425, + "learning_rate": 3.864698536919854e-06, + "loss": 0.3203, + "step": 8436 + }, + { + "epoch": 0.5858214136925427, + "grad_norm": 2.5504847698761615, + "learning_rate": 3.863603449145923e-06, + "loss": 0.2454, + "step": 8437 + }, + { + "epoch": 0.5858908484932648, + "grad_norm": 4.111766292954278, + "learning_rate": 3.862508418849642e-06, + "loss": 0.4455, + "step": 8438 + }, + { + "epoch": 0.585960283293987, + "grad_norm": 3.6683038046789465, + "learning_rate": 3.861413446086398e-06, + "loss": 0.3525, + "step": 8439 + }, + { + "epoch": 0.586029718094709, + "grad_norm": 3.608241214120669, + "learning_rate": 3.8603185309115745e-06, + "loss": 0.4096, + "step": 8440 + }, + { + "epoch": 0.5860991528954312, + "grad_norm": 2.252712947091438, + "learning_rate": 3.859223673380551e-06, + "loss": 0.2396, + "step": 8441 + }, + { + "epoch": 0.5861685876961533, + "grad_norm": 3.005488730931568, + "learning_rate": 3.858128873548701e-06, + "loss": 0.2475, + "step": 8442 + }, + { + "epoch": 0.5862380224968754, + "grad_norm": 3.8407954942218097, + "learning_rate": 3.8570341314714025e-06, + "loss": 0.4409, + "step": 8443 + }, + { + "epoch": 0.5863074572975976, + "grad_norm": 2.495524239940476, + "learning_rate": 3.855939447204024e-06, + "loss": 0.1602, + "step": 8444 + }, + { + "epoch": 0.5863768920983197, + "grad_norm": 2.7912954588430563, + "learning_rate": 3.854844820801932e-06, + "loss": 0.3244, + "step": 8445 + }, + { + "epoch": 0.5864463268990417, + "grad_norm": 4.950443307519895, + "learning_rate": 3.853750252320494e-06, + "loss": 0.6244, + "step": 8446 + }, + { + "epoch": 0.5865157616997639, + "grad_norm": 4.754334454270179, + "learning_rate": 3.852655741815071e-06, + "loss": 0.5453, + "step": 8447 + }, + { + "epoch": 0.586585196500486, + "grad_norm": 4.2891910884307265, + "learning_rate": 3.851561289341023e-06, + "loss": 0.4327, + "step": 8448 + }, + { + "epoch": 0.5866546313012082, + "grad_norm": 2.4214264081507855, + "learning_rate": 3.850466894953704e-06, + "loss": 0.2041, + "step": 8449 + }, + { + "epoch": 0.5867240661019303, + "grad_norm": 3.0842758703989257, + "learning_rate": 3.84937255870847e-06, + "loss": 0.2756, + "step": 8450 + }, + { + "epoch": 0.5867935009026524, + "grad_norm": 2.002186032555324, + "learning_rate": 3.848278280660671e-06, + "loss": 0.1039, + "step": 8451 + }, + { + "epoch": 0.5868629357033746, + "grad_norm": 4.1827634175033195, + "learning_rate": 3.847184060865651e-06, + "loss": 0.4986, + "step": 8452 + }, + { + "epoch": 0.5869323705040966, + "grad_norm": 4.051641478625084, + "learning_rate": 3.846089899378758e-06, + "loss": 0.3005, + "step": 8453 + }, + { + "epoch": 0.5870018053048188, + "grad_norm": 4.689006398047894, + "learning_rate": 3.8449957962553305e-06, + "loss": 0.6752, + "step": 8454 + }, + { + "epoch": 0.5870712401055409, + "grad_norm": 3.95345387433119, + "learning_rate": 3.843901751550712e-06, + "loss": 0.5682, + "step": 8455 + }, + { + "epoch": 0.587140674906263, + "grad_norm": 4.860830166962689, + "learning_rate": 3.842807765320233e-06, + "loss": 0.5492, + "step": 8456 + }, + { + "epoch": 0.5872101097069852, + "grad_norm": 2.8187573834130037, + "learning_rate": 3.841713837619225e-06, + "loss": 0.4363, + "step": 8457 + }, + { + "epoch": 0.5872795445077073, + "grad_norm": 4.741577810534145, + "learning_rate": 3.840619968503025e-06, + "loss": 0.4092, + "step": 8458 + }, + { + "epoch": 0.5873489793084293, + "grad_norm": 3.873465295308872, + "learning_rate": 3.839526158026951e-06, + "loss": 0.2452, + "step": 8459 + }, + { + "epoch": 0.5874184141091515, + "grad_norm": 3.1930337554913097, + "learning_rate": 3.838432406246333e-06, + "loss": 0.3823, + "step": 8460 + }, + { + "epoch": 0.5874878489098736, + "grad_norm": 4.108479806624415, + "learning_rate": 3.837338713216489e-06, + "loss": 0.4078, + "step": 8461 + }, + { + "epoch": 0.5875572837105958, + "grad_norm": 3.813778921813502, + "learning_rate": 3.836245078992738e-06, + "loss": 0.5565, + "step": 8462 + }, + { + "epoch": 0.5876267185113179, + "grad_norm": 4.279888073831197, + "learning_rate": 3.835151503630393e-06, + "loss": 0.6473, + "step": 8463 + }, + { + "epoch": 0.58769615331204, + "grad_norm": 3.7070688370117675, + "learning_rate": 3.834057987184766e-06, + "loss": 0.3695, + "step": 8464 + }, + { + "epoch": 0.5877655881127621, + "grad_norm": 3.9476571650736454, + "learning_rate": 3.83296452971117e-06, + "loss": 0.5541, + "step": 8465 + }, + { + "epoch": 0.5878350229134842, + "grad_norm": 5.015061827729214, + "learning_rate": 3.831871131264905e-06, + "loss": 0.6399, + "step": 8466 + }, + { + "epoch": 0.5879044577142064, + "grad_norm": 4.876442935476198, + "learning_rate": 3.830777791901276e-06, + "loss": 0.3994, + "step": 8467 + }, + { + "epoch": 0.5879738925149285, + "grad_norm": 3.748252251791791, + "learning_rate": 3.8296845116755835e-06, + "loss": 0.506, + "step": 8468 + }, + { + "epoch": 0.5880433273156506, + "grad_norm": 4.075130910895806, + "learning_rate": 3.8285912906431255e-06, + "loss": 0.3991, + "step": 8469 + }, + { + "epoch": 0.5881127621163728, + "grad_norm": 3.7209239906883975, + "learning_rate": 3.827498128859193e-06, + "loss": 0.3816, + "step": 8470 + }, + { + "epoch": 0.5881821969170948, + "grad_norm": 4.733018665176041, + "learning_rate": 3.826405026379078e-06, + "loss": 0.4629, + "step": 8471 + }, + { + "epoch": 0.588251631717817, + "grad_norm": 4.195344191506253, + "learning_rate": 3.82531198325807e-06, + "loss": 0.6005, + "step": 8472 + }, + { + "epoch": 0.5883210665185391, + "grad_norm": 3.977346420240632, + "learning_rate": 3.824218999551454e-06, + "loss": 0.5097, + "step": 8473 + }, + { + "epoch": 0.5883905013192612, + "grad_norm": 3.8309195728138135, + "learning_rate": 3.823126075314508e-06, + "loss": 0.4314, + "step": 8474 + }, + { + "epoch": 0.5884599361199834, + "grad_norm": 3.9785861384914085, + "learning_rate": 3.822033210602514e-06, + "loss": 0.4829, + "step": 8475 + }, + { + "epoch": 0.5885293709207055, + "grad_norm": 3.696269388990146, + "learning_rate": 3.820940405470749e-06, + "loss": 0.3352, + "step": 8476 + }, + { + "epoch": 0.5885988057214275, + "grad_norm": 2.9841407377205145, + "learning_rate": 3.819847659974481e-06, + "loss": 0.3525, + "step": 8477 + }, + { + "epoch": 0.5886682405221497, + "grad_norm": 4.15582494760309, + "learning_rate": 3.818754974168985e-06, + "loss": 0.5331, + "step": 8478 + }, + { + "epoch": 0.5887376753228718, + "grad_norm": 3.0474825899216573, + "learning_rate": 3.817662348109524e-06, + "loss": 0.3317, + "step": 8479 + }, + { + "epoch": 0.588807110123594, + "grad_norm": 4.288021191793315, + "learning_rate": 3.816569781851366e-06, + "loss": 0.5627, + "step": 8480 + }, + { + "epoch": 0.5888765449243161, + "grad_norm": 4.980832700153119, + "learning_rate": 3.815477275449767e-06, + "loss": 0.7722, + "step": 8481 + }, + { + "epoch": 0.5889459797250381, + "grad_norm": 3.859889456936487, + "learning_rate": 3.8143848289599885e-06, + "loss": 0.446, + "step": 8482 + }, + { + "epoch": 0.5890154145257603, + "grad_norm": 3.6758535871594593, + "learning_rate": 3.813292442437284e-06, + "loss": 0.3974, + "step": 8483 + }, + { + "epoch": 0.5890848493264824, + "grad_norm": 2.975107543897671, + "learning_rate": 3.8122001159369038e-06, + "loss": 0.3408, + "step": 8484 + }, + { + "epoch": 0.5891542841272046, + "grad_norm": 4.080592256913607, + "learning_rate": 3.811107849514098e-06, + "loss": 0.6018, + "step": 8485 + }, + { + "epoch": 0.5892237189279267, + "grad_norm": 4.191330199340877, + "learning_rate": 3.810015643224111e-06, + "loss": 0.4093, + "step": 8486 + }, + { + "epoch": 0.5892931537286488, + "grad_norm": 3.9594657119985364, + "learning_rate": 3.8089234971221884e-06, + "loss": 0.3774, + "step": 8487 + }, + { + "epoch": 0.589362588529371, + "grad_norm": 4.767078790143186, + "learning_rate": 3.807831411263566e-06, + "loss": 0.7029, + "step": 8488 + }, + { + "epoch": 0.589432023330093, + "grad_norm": 3.327620621157235, + "learning_rate": 3.806739385703481e-06, + "loss": 0.4763, + "step": 8489 + }, + { + "epoch": 0.5895014581308151, + "grad_norm": 5.143335390960936, + "learning_rate": 3.80564742049717e-06, + "loss": 0.7385, + "step": 8490 + }, + { + "epoch": 0.5895708929315373, + "grad_norm": 4.276557970754809, + "learning_rate": 3.804555515699859e-06, + "loss": 0.7023, + "step": 8491 + }, + { + "epoch": 0.5896403277322594, + "grad_norm": 6.804498934939847, + "learning_rate": 3.8034636713667777e-06, + "loss": 0.5053, + "step": 8492 + }, + { + "epoch": 0.5897097625329816, + "grad_norm": 4.7116579864499215, + "learning_rate": 3.802371887553149e-06, + "loss": 0.5227, + "step": 8493 + }, + { + "epoch": 0.5897791973337037, + "grad_norm": 3.368677250784847, + "learning_rate": 3.801280164314197e-06, + "loss": 0.4151, + "step": 8494 + }, + { + "epoch": 0.5898486321344257, + "grad_norm": 2.2769359342258837, + "learning_rate": 3.8001885017051377e-06, + "loss": 0.1154, + "step": 8495 + }, + { + "epoch": 0.5899180669351479, + "grad_norm": 3.5989338637236257, + "learning_rate": 3.7990968997811847e-06, + "loss": 0.3009, + "step": 8496 + }, + { + "epoch": 0.58998750173587, + "grad_norm": 4.838549550301688, + "learning_rate": 3.7980053585975526e-06, + "loss": 0.4369, + "step": 8497 + }, + { + "epoch": 0.5900569365365922, + "grad_norm": 3.0852712330663654, + "learning_rate": 3.79691387820945e-06, + "loss": 0.3329, + "step": 8498 + }, + { + "epoch": 0.5901263713373143, + "grad_norm": 3.740370702252939, + "learning_rate": 3.7958224586720805e-06, + "loss": 0.4645, + "step": 8499 + }, + { + "epoch": 0.5901958061380363, + "grad_norm": 3.5197247511630367, + "learning_rate": 3.7947311000406483e-06, + "loss": 0.3749, + "step": 8500 + }, + { + "epoch": 0.5902652409387585, + "grad_norm": 3.441555325258468, + "learning_rate": 3.793639802370355e-06, + "loss": 0.4317, + "step": 8501 + }, + { + "epoch": 0.5903346757394806, + "grad_norm": 3.435896582663432, + "learning_rate": 3.7925485657163946e-06, + "loss": 0.2947, + "step": 8502 + }, + { + "epoch": 0.5904041105402027, + "grad_norm": 3.8504637330436027, + "learning_rate": 3.7914573901339602e-06, + "loss": 0.5698, + "step": 8503 + }, + { + "epoch": 0.5904735453409249, + "grad_norm": 4.944167743574787, + "learning_rate": 3.790366275678245e-06, + "loss": 0.632, + "step": 8504 + }, + { + "epoch": 0.590542980141647, + "grad_norm": 3.126626931646617, + "learning_rate": 3.7892752224044353e-06, + "loss": 0.287, + "step": 8505 + }, + { + "epoch": 0.5906124149423692, + "grad_norm": 3.1152108949424897, + "learning_rate": 3.788184230367713e-06, + "loss": 0.3999, + "step": 8506 + }, + { + "epoch": 0.5906818497430912, + "grad_norm": 3.534621815375558, + "learning_rate": 3.787093299623262e-06, + "loss": 0.3545, + "step": 8507 + }, + { + "epoch": 0.5907512845438133, + "grad_norm": 3.81013552083832, + "learning_rate": 3.7860024302262603e-06, + "loss": 0.3806, + "step": 8508 + }, + { + "epoch": 0.5908207193445355, + "grad_norm": 4.033634866429512, + "learning_rate": 3.7849116222318803e-06, + "loss": 0.4994, + "step": 8509 + }, + { + "epoch": 0.5908901541452576, + "grad_norm": 5.071912557673434, + "learning_rate": 3.7838208756952956e-06, + "loss": 0.517, + "step": 8510 + }, + { + "epoch": 0.5909595889459798, + "grad_norm": 3.6541344130061715, + "learning_rate": 3.782730190671675e-06, + "loss": 0.2126, + "step": 8511 + }, + { + "epoch": 0.5910290237467019, + "grad_norm": 4.402263908175208, + "learning_rate": 3.7816395672161852e-06, + "loss": 0.5274, + "step": 8512 + }, + { + "epoch": 0.5910984585474239, + "grad_norm": 4.79185426522873, + "learning_rate": 3.780549005383986e-06, + "loss": 0.6416, + "step": 8513 + }, + { + "epoch": 0.5911678933481461, + "grad_norm": 3.7500921710261137, + "learning_rate": 3.779458505230239e-06, + "loss": 0.5415, + "step": 8514 + }, + { + "epoch": 0.5912373281488682, + "grad_norm": 3.2405816830650713, + "learning_rate": 3.7783680668101003e-06, + "loss": 0.3399, + "step": 8515 + }, + { + "epoch": 0.5913067629495903, + "grad_norm": 4.04246279272713, + "learning_rate": 3.7772776901787206e-06, + "loss": 0.689, + "step": 8516 + }, + { + "epoch": 0.5913761977503125, + "grad_norm": 3.520377505867746, + "learning_rate": 3.7761873753912526e-06, + "loss": 0.4181, + "step": 8517 + }, + { + "epoch": 0.5914456325510346, + "grad_norm": 6.778310251607479, + "learning_rate": 3.7750971225028415e-06, + "loss": 0.411, + "step": 8518 + }, + { + "epoch": 0.5915150673517567, + "grad_norm": 4.107538884274487, + "learning_rate": 3.774006931568634e-06, + "loss": 0.4638, + "step": 8519 + }, + { + "epoch": 0.5915845021524788, + "grad_norm": 4.386435497473944, + "learning_rate": 3.7729168026437663e-06, + "loss": 0.4391, + "step": 8520 + }, + { + "epoch": 0.5916539369532009, + "grad_norm": 3.218257816738442, + "learning_rate": 3.7718267357833794e-06, + "loss": 0.334, + "step": 8521 + }, + { + "epoch": 0.5917233717539231, + "grad_norm": 4.273498719851935, + "learning_rate": 3.770736731042605e-06, + "loss": 0.5073, + "step": 8522 + }, + { + "epoch": 0.5917928065546452, + "grad_norm": 3.2483792076016558, + "learning_rate": 3.769646788476578e-06, + "loss": 0.245, + "step": 8523 + }, + { + "epoch": 0.5918622413553674, + "grad_norm": 2.726052675213289, + "learning_rate": 3.768556908140424e-06, + "loss": 0.2128, + "step": 8524 + }, + { + "epoch": 0.5919316761560894, + "grad_norm": 4.011885320113991, + "learning_rate": 3.7674670900892656e-06, + "loss": 0.4947, + "step": 8525 + }, + { + "epoch": 0.5920011109568115, + "grad_norm": 3.79163239049805, + "learning_rate": 3.76637733437823e-06, + "loss": 0.434, + "step": 8526 + }, + { + "epoch": 0.5920705457575337, + "grad_norm": 4.100593410796163, + "learning_rate": 3.7652876410624316e-06, + "loss": 0.6677, + "step": 8527 + }, + { + "epoch": 0.5921399805582558, + "grad_norm": 3.894478103531174, + "learning_rate": 3.7641980101969865e-06, + "loss": 0.3234, + "step": 8528 + }, + { + "epoch": 0.5922094153589779, + "grad_norm": 4.279489061700669, + "learning_rate": 3.7631084418370075e-06, + "loss": 0.515, + "step": 8529 + }, + { + "epoch": 0.5922788501597, + "grad_norm": 2.749537735965704, + "learning_rate": 3.762018936037606e-06, + "loss": 0.2081, + "step": 8530 + }, + { + "epoch": 0.5923482849604221, + "grad_norm": 4.787935202757789, + "learning_rate": 3.7609294928538843e-06, + "loss": 0.6375, + "step": 8531 + }, + { + "epoch": 0.5924177197611443, + "grad_norm": 4.510729870770608, + "learning_rate": 3.7598401123409462e-06, + "loss": 0.5762, + "step": 8532 + }, + { + "epoch": 0.5924871545618664, + "grad_norm": 4.107892817168008, + "learning_rate": 3.758750794553894e-06, + "loss": 0.6634, + "step": 8533 + }, + { + "epoch": 0.5925565893625885, + "grad_norm": 4.098512660753035, + "learning_rate": 3.7576615395478206e-06, + "loss": 0.4491, + "step": 8534 + }, + { + "epoch": 0.5926260241633107, + "grad_norm": 4.49218611839039, + "learning_rate": 3.75657234737782e-06, + "loss": 0.4436, + "step": 8535 + }, + { + "epoch": 0.5926954589640328, + "grad_norm": 3.5015570819070736, + "learning_rate": 3.755483218098984e-06, + "loss": 0.3831, + "step": 8536 + }, + { + "epoch": 0.5927648937647549, + "grad_norm": 3.0685222467576274, + "learning_rate": 3.7543941517663997e-06, + "loss": 0.2457, + "step": 8537 + }, + { + "epoch": 0.592834328565477, + "grad_norm": 3.03573312759437, + "learning_rate": 3.7533051484351473e-06, + "loss": 0.4012, + "step": 8538 + }, + { + "epoch": 0.5929037633661991, + "grad_norm": 4.2552610664733885, + "learning_rate": 3.7522162081603096e-06, + "loss": 0.6107, + "step": 8539 + }, + { + "epoch": 0.5929731981669213, + "grad_norm": 4.033852957496691, + "learning_rate": 3.751127330996967e-06, + "loss": 0.5426, + "step": 8540 + }, + { + "epoch": 0.5930426329676434, + "grad_norm": 3.962614328504431, + "learning_rate": 3.7500385170001897e-06, + "loss": 0.4882, + "step": 8541 + }, + { + "epoch": 0.5931120677683656, + "grad_norm": 13.860643685321643, + "learning_rate": 3.7489497662250483e-06, + "loss": 0.4452, + "step": 8542 + }, + { + "epoch": 0.5931815025690876, + "grad_norm": 4.14015735607899, + "learning_rate": 3.7478610787266134e-06, + "loss": 0.4337, + "step": 8543 + }, + { + "epoch": 0.5932509373698097, + "grad_norm": 3.5139651110165206, + "learning_rate": 3.7467724545599486e-06, + "loss": 0.4353, + "step": 8544 + }, + { + "epoch": 0.5933203721705319, + "grad_norm": 3.587939671686392, + "learning_rate": 3.745683893780113e-06, + "loss": 0.2525, + "step": 8545 + }, + { + "epoch": 0.593389806971254, + "grad_norm": 4.5587159138361315, + "learning_rate": 3.744595396442169e-06, + "loss": 0.4313, + "step": 8546 + }, + { + "epoch": 0.5934592417719761, + "grad_norm": 4.723224048193733, + "learning_rate": 3.7435069626011667e-06, + "loss": 0.5722, + "step": 8547 + }, + { + "epoch": 0.5935286765726983, + "grad_norm": 4.433577581060515, + "learning_rate": 3.7424185923121635e-06, + "loss": 0.4098, + "step": 8548 + }, + { + "epoch": 0.5935981113734203, + "grad_norm": 4.083520708320628, + "learning_rate": 3.741330285630202e-06, + "loss": 0.5824, + "step": 8549 + }, + { + "epoch": 0.5936675461741425, + "grad_norm": 3.636996752293765, + "learning_rate": 3.740242042610332e-06, + "loss": 0.4044, + "step": 8550 + }, + { + "epoch": 0.5937369809748646, + "grad_norm": 4.958901777944066, + "learning_rate": 3.7391538633075947e-06, + "loss": 0.4734, + "step": 8551 + }, + { + "epoch": 0.5938064157755867, + "grad_norm": 3.997282738181894, + "learning_rate": 3.7380657477770267e-06, + "loss": 0.3476, + "step": 8552 + }, + { + "epoch": 0.5938758505763089, + "grad_norm": 3.300794959287424, + "learning_rate": 3.736977696073665e-06, + "loss": 0.4325, + "step": 8553 + }, + { + "epoch": 0.593945285377031, + "grad_norm": 3.165498133305783, + "learning_rate": 3.7358897082525425e-06, + "loss": 0.2107, + "step": 8554 + }, + { + "epoch": 0.5940147201777531, + "grad_norm": 3.6111078339872043, + "learning_rate": 3.7348017843686896e-06, + "loss": 0.3428, + "step": 8555 + }, + { + "epoch": 0.5940841549784752, + "grad_norm": 4.250980776360398, + "learning_rate": 3.73371392447713e-06, + "loss": 0.4566, + "step": 8556 + }, + { + "epoch": 0.5941535897791973, + "grad_norm": 6.508266462831398, + "learning_rate": 3.7326261286328856e-06, + "loss": 0.5029, + "step": 8557 + }, + { + "epoch": 0.5942230245799195, + "grad_norm": 4.712878495269399, + "learning_rate": 3.73153839689098e-06, + "loss": 0.7578, + "step": 8558 + }, + { + "epoch": 0.5942924593806416, + "grad_norm": 3.6910113551378876, + "learning_rate": 3.7304507293064265e-06, + "loss": 0.3324, + "step": 8559 + }, + { + "epoch": 0.5943618941813636, + "grad_norm": 2.5879518247745446, + "learning_rate": 3.729363125934236e-06, + "loss": 0.2129, + "step": 8560 + }, + { + "epoch": 0.5944313289820858, + "grad_norm": 4.48830414233212, + "learning_rate": 3.728275586829422e-06, + "loss": 0.57, + "step": 8561 + }, + { + "epoch": 0.5945007637828079, + "grad_norm": 3.9150188324557775, + "learning_rate": 3.727188112046991e-06, + "loss": 0.4332, + "step": 8562 + }, + { + "epoch": 0.5945701985835301, + "grad_norm": 3.5938955720013115, + "learning_rate": 3.7261007016419448e-06, + "loss": 0.3939, + "step": 8563 + }, + { + "epoch": 0.5946396333842522, + "grad_norm": 5.252194547524712, + "learning_rate": 3.725013355669282e-06, + "loss": 0.4298, + "step": 8564 + }, + { + "epoch": 0.5947090681849743, + "grad_norm": 3.7305220747712746, + "learning_rate": 3.7239260741840035e-06, + "loss": 0.4066, + "step": 8565 + }, + { + "epoch": 0.5947785029856965, + "grad_norm": 4.869072208500784, + "learning_rate": 3.7228388572410986e-06, + "loss": 0.6933, + "step": 8566 + }, + { + "epoch": 0.5948479377864185, + "grad_norm": 3.0820242862668015, + "learning_rate": 3.7217517048955576e-06, + "loss": 0.2294, + "step": 8567 + }, + { + "epoch": 0.5949173725871407, + "grad_norm": 3.3258705131241473, + "learning_rate": 3.7206646172023707e-06, + "loss": 0.3889, + "step": 8568 + }, + { + "epoch": 0.5949868073878628, + "grad_norm": 3.10738452228029, + "learning_rate": 3.7195775942165213e-06, + "loss": 0.2476, + "step": 8569 + }, + { + "epoch": 0.5950562421885849, + "grad_norm": 4.5507049766775065, + "learning_rate": 3.7184906359929853e-06, + "loss": 0.5887, + "step": 8570 + }, + { + "epoch": 0.5951256769893071, + "grad_norm": 4.562776831312148, + "learning_rate": 3.717403742586743e-06, + "loss": 0.5362, + "step": 8571 + }, + { + "epoch": 0.5951951117900292, + "grad_norm": 3.5971891373729483, + "learning_rate": 3.7163169140527694e-06, + "loss": 0.3734, + "step": 8572 + }, + { + "epoch": 0.5952645465907512, + "grad_norm": 3.618300196006156, + "learning_rate": 3.7152301504460348e-06, + "loss": 0.496, + "step": 8573 + }, + { + "epoch": 0.5953339813914734, + "grad_norm": 3.623724703184307, + "learning_rate": 3.714143451821503e-06, + "loss": 0.2985, + "step": 8574 + }, + { + "epoch": 0.5954034161921955, + "grad_norm": 3.4334741352813283, + "learning_rate": 3.7130568182341414e-06, + "loss": 0.3509, + "step": 8575 + }, + { + "epoch": 0.5954728509929177, + "grad_norm": 4.283910729230446, + "learning_rate": 3.711970249738911e-06, + "loss": 0.6051, + "step": 8576 + }, + { + "epoch": 0.5955422857936398, + "grad_norm": 3.9642111495467622, + "learning_rate": 3.7108837463907656e-06, + "loss": 0.5265, + "step": 8577 + }, + { + "epoch": 0.5956117205943618, + "grad_norm": 3.452648313979229, + "learning_rate": 3.7097973082446625e-06, + "loss": 0.3549, + "step": 8578 + }, + { + "epoch": 0.595681155395084, + "grad_norm": 2.7891606040482464, + "learning_rate": 3.7087109353555507e-06, + "loss": 0.2149, + "step": 8579 + }, + { + "epoch": 0.5957505901958061, + "grad_norm": 4.167676805754961, + "learning_rate": 3.7076246277783805e-06, + "loss": 0.4808, + "step": 8580 + }, + { + "epoch": 0.5958200249965283, + "grad_norm": 1.9841120185185133, + "learning_rate": 3.7065383855680925e-06, + "loss": 0.1229, + "step": 8581 + }, + { + "epoch": 0.5958894597972504, + "grad_norm": 4.108624975799672, + "learning_rate": 3.7054522087796297e-06, + "loss": 0.3976, + "step": 8582 + }, + { + "epoch": 0.5959588945979725, + "grad_norm": 2.9633584808048736, + "learning_rate": 3.704366097467931e-06, + "loss": 0.3312, + "step": 8583 + }, + { + "epoch": 0.5960283293986947, + "grad_norm": 2.74523931436068, + "learning_rate": 3.703280051687926e-06, + "loss": 0.3181, + "step": 8584 + }, + { + "epoch": 0.5960977641994167, + "grad_norm": 3.925660047105778, + "learning_rate": 3.702194071494551e-06, + "loss": 0.4, + "step": 8585 + }, + { + "epoch": 0.5961671990001388, + "grad_norm": 3.1899026899110092, + "learning_rate": 3.7011081569427294e-06, + "loss": 0.3294, + "step": 8586 + }, + { + "epoch": 0.596236633800861, + "grad_norm": 3.526590953350226, + "learning_rate": 3.70002230808739e-06, + "loss": 0.4775, + "step": 8587 + }, + { + "epoch": 0.5963060686015831, + "grad_norm": 3.1109440167875366, + "learning_rate": 3.6989365249834495e-06, + "loss": 0.3854, + "step": 8588 + }, + { + "epoch": 0.5963755034023053, + "grad_norm": 3.8144028837813106, + "learning_rate": 3.6978508076858266e-06, + "loss": 0.4626, + "step": 8589 + }, + { + "epoch": 0.5964449382030274, + "grad_norm": 3.738120378184053, + "learning_rate": 3.696765156249439e-06, + "loss": 0.3759, + "step": 8590 + }, + { + "epoch": 0.5965143730037494, + "grad_norm": 6.135987536518666, + "learning_rate": 3.695679570729192e-06, + "loss": 0.5693, + "step": 8591 + }, + { + "epoch": 0.5965838078044716, + "grad_norm": 3.768880593779302, + "learning_rate": 3.694594051179998e-06, + "loss": 0.3858, + "step": 8592 + }, + { + "epoch": 0.5966532426051937, + "grad_norm": 3.1042479796598417, + "learning_rate": 3.6935085976567585e-06, + "loss": 0.3175, + "step": 8593 + }, + { + "epoch": 0.5967226774059159, + "grad_norm": 3.0095981842626243, + "learning_rate": 3.6924232102143782e-06, + "loss": 0.2138, + "step": 8594 + }, + { + "epoch": 0.596792112206638, + "grad_norm": 3.6272129571237284, + "learning_rate": 3.691337888907751e-06, + "loss": 0.4748, + "step": 8595 + }, + { + "epoch": 0.59686154700736, + "grad_norm": 4.631128190662742, + "learning_rate": 3.690252633791771e-06, + "loss": 0.648, + "step": 8596 + }, + { + "epoch": 0.5969309818080822, + "grad_norm": 2.165215058226834, + "learning_rate": 3.689167444921332e-06, + "loss": 0.135, + "step": 8597 + }, + { + "epoch": 0.5970004166088043, + "grad_norm": 3.243582544786251, + "learning_rate": 3.6880823223513217e-06, + "loss": 0.383, + "step": 8598 + }, + { + "epoch": 0.5970698514095265, + "grad_norm": 2.9742764972041877, + "learning_rate": 3.6869972661366214e-06, + "loss": 0.377, + "step": 8599 + }, + { + "epoch": 0.5971392862102486, + "grad_norm": 4.191603759978091, + "learning_rate": 3.685912276332113e-06, + "loss": 0.6315, + "step": 8600 + }, + { + "epoch": 0.5972087210109707, + "grad_norm": 3.6064965100774433, + "learning_rate": 3.6848273529926764e-06, + "loss": 0.4051, + "step": 8601 + }, + { + "epoch": 0.5972781558116929, + "grad_norm": 5.332571548741159, + "learning_rate": 3.683742496173184e-06, + "loss": 0.3432, + "step": 8602 + }, + { + "epoch": 0.5973475906124149, + "grad_norm": 4.259959098088565, + "learning_rate": 3.6826577059285045e-06, + "loss": 0.3901, + "step": 8603 + }, + { + "epoch": 0.597417025413137, + "grad_norm": 5.193812928929078, + "learning_rate": 3.6815729823135094e-06, + "loss": 0.6605, + "step": 8604 + }, + { + "epoch": 0.5974864602138592, + "grad_norm": 3.977917847461278, + "learning_rate": 3.6804883253830614e-06, + "loss": 0.4712, + "step": 8605 + }, + { + "epoch": 0.5975558950145813, + "grad_norm": 4.036382772770804, + "learning_rate": 3.6794037351920194e-06, + "loss": 0.388, + "step": 8606 + }, + { + "epoch": 0.5976253298153035, + "grad_norm": 4.085163502269295, + "learning_rate": 3.6783192117952427e-06, + "loss": 0.4378, + "step": 8607 + }, + { + "epoch": 0.5976947646160256, + "grad_norm": 4.487342755027042, + "learning_rate": 3.6772347552475857e-06, + "loss": 0.664, + "step": 8608 + }, + { + "epoch": 0.5977641994167476, + "grad_norm": 4.042795182086229, + "learning_rate": 3.676150365603896e-06, + "loss": 0.5911, + "step": 8609 + }, + { + "epoch": 0.5978336342174698, + "grad_norm": 3.8372546831076444, + "learning_rate": 3.6750660429190227e-06, + "loss": 0.3467, + "step": 8610 + }, + { + "epoch": 0.5979030690181919, + "grad_norm": 3.991659105143138, + "learning_rate": 3.6739817872478113e-06, + "loss": 0.366, + "step": 8611 + }, + { + "epoch": 0.5979725038189141, + "grad_norm": 3.270534536783417, + "learning_rate": 3.672897598645101e-06, + "loss": 0.3272, + "step": 8612 + }, + { + "epoch": 0.5980419386196362, + "grad_norm": 3.206931713408161, + "learning_rate": 3.6718134771657265e-06, + "loss": 0.2669, + "step": 8613 + }, + { + "epoch": 0.5981113734203582, + "grad_norm": 4.957500645650893, + "learning_rate": 3.6707294228645242e-06, + "loss": 0.5807, + "step": 8614 + }, + { + "epoch": 0.5981808082210804, + "grad_norm": 3.2041602940830503, + "learning_rate": 3.669645435796325e-06, + "loss": 0.3509, + "step": 8615 + }, + { + "epoch": 0.5982502430218025, + "grad_norm": 3.995778250124758, + "learning_rate": 3.6685615160159514e-06, + "loss": 0.4554, + "step": 8616 + }, + { + "epoch": 0.5983196778225246, + "grad_norm": 2.4261517232059244, + "learning_rate": 3.6674776635782317e-06, + "loss": 0.2522, + "step": 8617 + }, + { + "epoch": 0.5983891126232468, + "grad_norm": 3.7473313929810304, + "learning_rate": 3.6663938785379826e-06, + "loss": 0.246, + "step": 8618 + }, + { + "epoch": 0.5984585474239689, + "grad_norm": 3.4107790283875157, + "learning_rate": 3.665310160950024e-06, + "loss": 0.445, + "step": 8619 + }, + { + "epoch": 0.5985279822246911, + "grad_norm": 3.848285208237695, + "learning_rate": 3.664226510869165e-06, + "loss": 0.2575, + "step": 8620 + }, + { + "epoch": 0.5985974170254131, + "grad_norm": 3.897876545256366, + "learning_rate": 3.6631429283502185e-06, + "loss": 0.4726, + "step": 8621 + }, + { + "epoch": 0.5986668518261352, + "grad_norm": 2.5919789344186235, + "learning_rate": 3.662059413447989e-06, + "loss": 0.2068, + "step": 8622 + }, + { + "epoch": 0.5987362866268574, + "grad_norm": 3.5179564507626537, + "learning_rate": 3.6609759662172826e-06, + "loss": 0.4282, + "step": 8623 + }, + { + "epoch": 0.5988057214275795, + "grad_norm": 3.500731537975049, + "learning_rate": 3.659892586712896e-06, + "loss": 0.3883, + "step": 8624 + }, + { + "epoch": 0.5988751562283017, + "grad_norm": 3.8957347071692476, + "learning_rate": 3.658809274989624e-06, + "loss": 0.566, + "step": 8625 + }, + { + "epoch": 0.5989445910290238, + "grad_norm": 4.20055416943044, + "learning_rate": 3.657726031102264e-06, + "loss": 0.6276, + "step": 8626 + }, + { + "epoch": 0.5990140258297458, + "grad_norm": 4.345415490468386, + "learning_rate": 3.656642855105601e-06, + "loss": 0.2989, + "step": 8627 + }, + { + "epoch": 0.599083460630468, + "grad_norm": 4.022248682785972, + "learning_rate": 3.6555597470544214e-06, + "loss": 0.3746, + "step": 8628 + }, + { + "epoch": 0.5991528954311901, + "grad_norm": 3.284370262405294, + "learning_rate": 3.654476707003508e-06, + "loss": 0.44, + "step": 8629 + }, + { + "epoch": 0.5992223302319122, + "grad_norm": 3.346547665855775, + "learning_rate": 3.653393735007643e-06, + "loss": 0.4198, + "step": 8630 + }, + { + "epoch": 0.5992917650326344, + "grad_norm": 4.529580981964243, + "learning_rate": 3.652310831121598e-06, + "loss": 0.5439, + "step": 8631 + }, + { + "epoch": 0.5993611998333565, + "grad_norm": 4.300155370920969, + "learning_rate": 3.651227995400145e-06, + "loss": 0.4082, + "step": 8632 + }, + { + "epoch": 0.5994306346340786, + "grad_norm": 4.391527118825359, + "learning_rate": 3.6501452278980555e-06, + "loss": 0.4562, + "step": 8633 + }, + { + "epoch": 0.5995000694348007, + "grad_norm": 5.864002266378577, + "learning_rate": 3.649062528670092e-06, + "loss": 0.9552, + "step": 8634 + }, + { + "epoch": 0.5995695042355228, + "grad_norm": 3.3583079605869837, + "learning_rate": 3.647979897771016e-06, + "loss": 0.365, + "step": 8635 + }, + { + "epoch": 0.599638939036245, + "grad_norm": 4.438735818610624, + "learning_rate": 3.646897335255588e-06, + "loss": 0.4548, + "step": 8636 + }, + { + "epoch": 0.5997083738369671, + "grad_norm": 5.273928347055552, + "learning_rate": 3.6458148411785633e-06, + "loss": 0.6335, + "step": 8637 + }, + { + "epoch": 0.5997778086376893, + "grad_norm": 4.10915053409455, + "learning_rate": 3.6447324155946883e-06, + "loss": 0.4588, + "step": 8638 + }, + { + "epoch": 0.5998472434384113, + "grad_norm": 4.354382010528509, + "learning_rate": 3.643650058558716e-06, + "loss": 0.6129, + "step": 8639 + }, + { + "epoch": 0.5999166782391334, + "grad_norm": 4.345784147557371, + "learning_rate": 3.64256777012539e-06, + "loss": 0.4098, + "step": 8640 + }, + { + "epoch": 0.5999861130398556, + "grad_norm": 3.707584022554522, + "learning_rate": 3.6414855503494473e-06, + "loss": 0.3188, + "step": 8641 + }, + { + "epoch": 0.6000555478405777, + "grad_norm": 4.757621519271678, + "learning_rate": 3.6404033992856293e-06, + "loss": 0.6449, + "step": 8642 + }, + { + "epoch": 0.6001249826412998, + "grad_norm": 3.880107314296162, + "learning_rate": 3.6393213169886686e-06, + "loss": 0.5856, + "step": 8643 + }, + { + "epoch": 0.600194417442022, + "grad_norm": 4.936598863948294, + "learning_rate": 3.638239303513298e-06, + "loss": 0.5337, + "step": 8644 + }, + { + "epoch": 0.600263852242744, + "grad_norm": 4.120337172898331, + "learning_rate": 3.63715735891424e-06, + "loss": 0.4638, + "step": 8645 + }, + { + "epoch": 0.6003332870434662, + "grad_norm": 2.73339366609182, + "learning_rate": 3.636075483246222e-06, + "loss": 0.1481, + "step": 8646 + }, + { + "epoch": 0.6004027218441883, + "grad_norm": 3.627615872107336, + "learning_rate": 3.6349936765639616e-06, + "loss": 0.359, + "step": 8647 + }, + { + "epoch": 0.6004721566449104, + "grad_norm": 4.320692410097562, + "learning_rate": 3.6339119389221783e-06, + "loss": 0.4608, + "step": 8648 + }, + { + "epoch": 0.6005415914456326, + "grad_norm": 3.9988797239677916, + "learning_rate": 3.6328302703755825e-06, + "loss": 0.4037, + "step": 8649 + }, + { + "epoch": 0.6006110262463547, + "grad_norm": 3.5148687749478515, + "learning_rate": 3.631748670978883e-06, + "loss": 0.3221, + "step": 8650 + }, + { + "epoch": 0.6006804610470768, + "grad_norm": 4.612059747326045, + "learning_rate": 3.63066714078679e-06, + "loss": 0.4108, + "step": 8651 + }, + { + "epoch": 0.6007498958477989, + "grad_norm": 5.027465871338378, + "learning_rate": 3.629585679854001e-06, + "loss": 0.5507, + "step": 8652 + }, + { + "epoch": 0.600819330648521, + "grad_norm": 2.38998514833837, + "learning_rate": 3.6285042882352193e-06, + "loss": 0.1307, + "step": 8653 + }, + { + "epoch": 0.6008887654492432, + "grad_norm": 4.435243456427651, + "learning_rate": 3.627422965985137e-06, + "loss": 0.2746, + "step": 8654 + }, + { + "epoch": 0.6009582002499653, + "grad_norm": 5.100130199929668, + "learning_rate": 3.6263417131584506e-06, + "loss": 0.7242, + "step": 8655 + }, + { + "epoch": 0.6010276350506875, + "grad_norm": 3.9133888363348315, + "learning_rate": 3.625260529809844e-06, + "loss": 0.4415, + "step": 8656 + }, + { + "epoch": 0.6010970698514095, + "grad_norm": 3.3109831892797015, + "learning_rate": 3.624179415994004e-06, + "loss": 0.3364, + "step": 8657 + }, + { + "epoch": 0.6011665046521316, + "grad_norm": 3.7542900437375004, + "learning_rate": 3.6230983717656142e-06, + "loss": 0.2582, + "step": 8658 + }, + { + "epoch": 0.6012359394528538, + "grad_norm": 7.929298163740489, + "learning_rate": 3.62201739717935e-06, + "loss": 0.2628, + "step": 8659 + }, + { + "epoch": 0.6013053742535759, + "grad_norm": 3.234264522405559, + "learning_rate": 3.620936492289885e-06, + "loss": 0.3657, + "step": 8660 + }, + { + "epoch": 0.601374809054298, + "grad_norm": 3.6221428714487347, + "learning_rate": 3.619855657151892e-06, + "loss": 0.6025, + "step": 8661 + }, + { + "epoch": 0.6014442438550202, + "grad_norm": 1.8947079876273223, + "learning_rate": 3.6187748918200393e-06, + "loss": 0.1474, + "step": 8662 + }, + { + "epoch": 0.6015136786557422, + "grad_norm": 4.18935700379198, + "learning_rate": 3.6176941963489897e-06, + "loss": 0.4117, + "step": 8663 + }, + { + "epoch": 0.6015831134564644, + "grad_norm": 3.379113088835297, + "learning_rate": 3.616613570793402e-06, + "loss": 0.2874, + "step": 8664 + }, + { + "epoch": 0.6016525482571865, + "grad_norm": 4.399729440285468, + "learning_rate": 3.6155330152079364e-06, + "loss": 0.7185, + "step": 8665 + }, + { + "epoch": 0.6017219830579086, + "grad_norm": 3.6623941908405744, + "learning_rate": 3.6144525296472432e-06, + "loss": 0.3593, + "step": 8666 + }, + { + "epoch": 0.6017914178586308, + "grad_norm": 2.6767288226483736, + "learning_rate": 3.6133721141659726e-06, + "loss": 0.2048, + "step": 8667 + }, + { + "epoch": 0.6018608526593529, + "grad_norm": 3.3392668204244385, + "learning_rate": 3.612291768818772e-06, + "loss": 0.3512, + "step": 8668 + }, + { + "epoch": 0.601930287460075, + "grad_norm": 4.348289012079685, + "learning_rate": 3.611211493660285e-06, + "loss": 0.5815, + "step": 8669 + }, + { + "epoch": 0.6019997222607971, + "grad_norm": 3.4674558129043027, + "learning_rate": 3.6101312887451467e-06, + "loss": 0.4518, + "step": 8670 + }, + { + "epoch": 0.6020691570615192, + "grad_norm": 4.9541904768993685, + "learning_rate": 3.609051154127995e-06, + "loss": 0.5296, + "step": 8671 + }, + { + "epoch": 0.6021385918622414, + "grad_norm": 3.402912515886812, + "learning_rate": 3.607971089863464e-06, + "loss": 0.4476, + "step": 8672 + }, + { + "epoch": 0.6022080266629635, + "grad_norm": 5.242159546952663, + "learning_rate": 3.6068910960061787e-06, + "loss": 0.3154, + "step": 8673 + }, + { + "epoch": 0.6022774614636855, + "grad_norm": 4.719783281340203, + "learning_rate": 3.6058111726107647e-06, + "loss": 0.4873, + "step": 8674 + }, + { + "epoch": 0.6023468962644077, + "grad_norm": 4.158722427726425, + "learning_rate": 3.6047313197318456e-06, + "loss": 0.5464, + "step": 8675 + }, + { + "epoch": 0.6024163310651298, + "grad_norm": 5.3219974669796, + "learning_rate": 3.6036515374240376e-06, + "loss": 0.687, + "step": 8676 + }, + { + "epoch": 0.602485765865852, + "grad_norm": 4.008408998789982, + "learning_rate": 3.6025718257419532e-06, + "loss": 0.5832, + "step": 8677 + }, + { + "epoch": 0.6025552006665741, + "grad_norm": 3.8724585356435326, + "learning_rate": 3.6014921847402058e-06, + "loss": 0.6422, + "step": 8678 + }, + { + "epoch": 0.6026246354672962, + "grad_norm": 4.0352091085747395, + "learning_rate": 3.6004126144733996e-06, + "loss": 0.4024, + "step": 8679 + }, + { + "epoch": 0.6026940702680184, + "grad_norm": 3.991269382579248, + "learning_rate": 3.5993331149961417e-06, + "loss": 0.572, + "step": 8680 + }, + { + "epoch": 0.6027635050687404, + "grad_norm": 3.379690840860304, + "learning_rate": 3.598253686363028e-06, + "loss": 0.3761, + "step": 8681 + }, + { + "epoch": 0.6028329398694626, + "grad_norm": 3.8800209348608625, + "learning_rate": 3.597174328628658e-06, + "loss": 0.4973, + "step": 8682 + }, + { + "epoch": 0.6029023746701847, + "grad_norm": 3.8026134950243926, + "learning_rate": 3.5960950418476236e-06, + "loss": 0.2977, + "step": 8683 + }, + { + "epoch": 0.6029718094709068, + "grad_norm": 3.432440725632957, + "learning_rate": 3.5950158260745117e-06, + "loss": 0.2832, + "step": 8684 + }, + { + "epoch": 0.603041244271629, + "grad_norm": 3.0947062323770393, + "learning_rate": 3.5939366813639103e-06, + "loss": 0.3841, + "step": 8685 + }, + { + "epoch": 0.603110679072351, + "grad_norm": 10.541388090290436, + "learning_rate": 3.5928576077703993e-06, + "loss": 0.5674, + "step": 8686 + }, + { + "epoch": 0.6031801138730731, + "grad_norm": 2.572908154764333, + "learning_rate": 3.5917786053485603e-06, + "loss": 0.2591, + "step": 8687 + }, + { + "epoch": 0.6032495486737953, + "grad_norm": 4.687081412677106, + "learning_rate": 3.5906996741529653e-06, + "loss": 0.4936, + "step": 8688 + }, + { + "epoch": 0.6033189834745174, + "grad_norm": 4.144613169078189, + "learning_rate": 3.589620814238185e-06, + "loss": 0.5671, + "step": 8689 + }, + { + "epoch": 0.6033884182752396, + "grad_norm": 4.044968399254317, + "learning_rate": 3.5885420256587895e-06, + "loss": 0.4994, + "step": 8690 + }, + { + "epoch": 0.6034578530759617, + "grad_norm": 3.562266820497365, + "learning_rate": 3.5874633084693395e-06, + "loss": 0.2858, + "step": 8691 + }, + { + "epoch": 0.6035272878766837, + "grad_norm": 4.885451100321208, + "learning_rate": 3.586384662724398e-06, + "loss": 0.6234, + "step": 8692 + }, + { + "epoch": 0.6035967226774059, + "grad_norm": 3.7994971280086114, + "learning_rate": 3.5853060884785197e-06, + "loss": 0.3424, + "step": 8693 + }, + { + "epoch": 0.603666157478128, + "grad_norm": 3.8891386626011846, + "learning_rate": 3.5842275857862595e-06, + "loss": 0.5786, + "step": 8694 + }, + { + "epoch": 0.6037355922788502, + "grad_norm": 3.4482185141396053, + "learning_rate": 3.5831491547021664e-06, + "loss": 0.2276, + "step": 8695 + }, + { + "epoch": 0.6038050270795723, + "grad_norm": 4.178711851078422, + "learning_rate": 3.5820707952807842e-06, + "loss": 0.3249, + "step": 8696 + }, + { + "epoch": 0.6038744618802944, + "grad_norm": 2.9326195752460325, + "learning_rate": 3.580992507576659e-06, + "loss": 0.3305, + "step": 8697 + }, + { + "epoch": 0.6039438966810166, + "grad_norm": 2.95286702967796, + "learning_rate": 3.5799142916443253e-06, + "loss": 0.2302, + "step": 8698 + }, + { + "epoch": 0.6040133314817386, + "grad_norm": 2.2719617892512636, + "learning_rate": 3.5788361475383194e-06, + "loss": 0.2012, + "step": 8699 + }, + { + "epoch": 0.6040827662824607, + "grad_norm": 3.2873191825193633, + "learning_rate": 3.577758075313173e-06, + "loss": 0.4519, + "step": 8700 + }, + { + "epoch": 0.6041522010831829, + "grad_norm": 3.644117340517901, + "learning_rate": 3.5766800750234156e-06, + "loss": 0.378, + "step": 8701 + }, + { + "epoch": 0.604221635883905, + "grad_norm": 4.367153651821961, + "learning_rate": 3.5756021467235686e-06, + "loss": 0.5127, + "step": 8702 + }, + { + "epoch": 0.6042910706846272, + "grad_norm": 4.947311418662219, + "learning_rate": 3.574524290468152e-06, + "loss": 0.4625, + "step": 8703 + }, + { + "epoch": 0.6043605054853493, + "grad_norm": 4.090274042044305, + "learning_rate": 3.5734465063116847e-06, + "loss": 0.5356, + "step": 8704 + }, + { + "epoch": 0.6044299402860713, + "grad_norm": 3.65364948683183, + "learning_rate": 3.5723687943086804e-06, + "loss": 0.4798, + "step": 8705 + }, + { + "epoch": 0.6044993750867935, + "grad_norm": 4.72837814626655, + "learning_rate": 3.571291154513644e-06, + "loss": 0.4627, + "step": 8706 + }, + { + "epoch": 0.6045688098875156, + "grad_norm": 2.6334657432812123, + "learning_rate": 3.570213586981086e-06, + "loss": 0.2183, + "step": 8707 + }, + { + "epoch": 0.6046382446882378, + "grad_norm": 4.5491830992829545, + "learning_rate": 3.569136091765508e-06, + "loss": 0.6933, + "step": 8708 + }, + { + "epoch": 0.6047076794889599, + "grad_norm": 3.5993397738785045, + "learning_rate": 3.5680586689214047e-06, + "loss": 0.2839, + "step": 8709 + }, + { + "epoch": 0.604777114289682, + "grad_norm": 2.7082637840660566, + "learning_rate": 3.566981318503274e-06, + "loss": 0.27, + "step": 8710 + }, + { + "epoch": 0.6048465490904041, + "grad_norm": 3.705569244903075, + "learning_rate": 3.5659040405656074e-06, + "loss": 0.2535, + "step": 8711 + }, + { + "epoch": 0.6049159838911262, + "grad_norm": 6.678303518008989, + "learning_rate": 3.5648268351628935e-06, + "loss": 0.7644, + "step": 8712 + }, + { + "epoch": 0.6049854186918484, + "grad_norm": 2.821744364470262, + "learning_rate": 3.5637497023496115e-06, + "loss": 0.2779, + "step": 8713 + }, + { + "epoch": 0.6050548534925705, + "grad_norm": 3.198409396898823, + "learning_rate": 3.5626726421802455e-06, + "loss": 0.3139, + "step": 8714 + }, + { + "epoch": 0.6051242882932926, + "grad_norm": 5.676992668860264, + "learning_rate": 3.5615956547092715e-06, + "loss": 0.7564, + "step": 8715 + }, + { + "epoch": 0.6051937230940148, + "grad_norm": 4.042745478267019, + "learning_rate": 3.56051873999116e-06, + "loss": 0.3531, + "step": 8716 + }, + { + "epoch": 0.6052631578947368, + "grad_norm": 3.1634752682152327, + "learning_rate": 3.5594418980803825e-06, + "loss": 0.418, + "step": 8717 + }, + { + "epoch": 0.6053325926954589, + "grad_norm": 9.234277288418367, + "learning_rate": 3.558365129031403e-06, + "loss": 0.292, + "step": 8718 + }, + { + "epoch": 0.6054020274961811, + "grad_norm": 3.7192162728405926, + "learning_rate": 3.5572884328986856e-06, + "loss": 0.5705, + "step": 8719 + }, + { + "epoch": 0.6054714622969032, + "grad_norm": 3.6212870565342894, + "learning_rate": 3.5562118097366848e-06, + "loss": 0.4759, + "step": 8720 + }, + { + "epoch": 0.6055408970976254, + "grad_norm": 5.061682244145416, + "learning_rate": 3.5551352595998583e-06, + "loss": 0.8653, + "step": 8721 + }, + { + "epoch": 0.6056103318983475, + "grad_norm": 3.7414676949175845, + "learning_rate": 3.5540587825426565e-06, + "loss": 0.5166, + "step": 8722 + }, + { + "epoch": 0.6056797666990695, + "grad_norm": 4.648427830199838, + "learning_rate": 3.552982378619523e-06, + "loss": 0.3677, + "step": 8723 + }, + { + "epoch": 0.6057492014997917, + "grad_norm": 3.208149578923469, + "learning_rate": 3.5519060478849036e-06, + "loss": 0.3744, + "step": 8724 + }, + { + "epoch": 0.6058186363005138, + "grad_norm": 3.311356909795269, + "learning_rate": 3.550829790393238e-06, + "loss": 0.3831, + "step": 8725 + }, + { + "epoch": 0.605888071101236, + "grad_norm": 2.8624753519597723, + "learning_rate": 3.5497536061989625e-06, + "loss": 0.2523, + "step": 8726 + }, + { + "epoch": 0.6059575059019581, + "grad_norm": 4.15747624473492, + "learning_rate": 3.5486774953565083e-06, + "loss": 0.4347, + "step": 8727 + }, + { + "epoch": 0.6060269407026802, + "grad_norm": 4.402562774565508, + "learning_rate": 3.5476014579203032e-06, + "loss": 0.6639, + "step": 8728 + }, + { + "epoch": 0.6060963755034023, + "grad_norm": 4.546749278528348, + "learning_rate": 3.5465254939447737e-06, + "loss": 0.3551, + "step": 8729 + }, + { + "epoch": 0.6061658103041244, + "grad_norm": 2.51016087230274, + "learning_rate": 3.5454496034843407e-06, + "loss": 0.2819, + "step": 8730 + }, + { + "epoch": 0.6062352451048465, + "grad_norm": 4.671286233097564, + "learning_rate": 3.5443737865934193e-06, + "loss": 0.6115, + "step": 8731 + }, + { + "epoch": 0.6063046799055687, + "grad_norm": 3.913294174180678, + "learning_rate": 3.5432980433264243e-06, + "loss": 0.4216, + "step": 8732 + }, + { + "epoch": 0.6063741147062908, + "grad_norm": 4.1952545591775054, + "learning_rate": 3.5422223737377677e-06, + "loss": 0.585, + "step": 8733 + }, + { + "epoch": 0.606443549507013, + "grad_norm": 3.5357635734944624, + "learning_rate": 3.5411467778818532e-06, + "loss": 0.3554, + "step": 8734 + }, + { + "epoch": 0.606512984307735, + "grad_norm": 2.8776568312925876, + "learning_rate": 3.5400712558130824e-06, + "loss": 0.3419, + "step": 8735 + }, + { + "epoch": 0.6065824191084571, + "grad_norm": 4.035596338161299, + "learning_rate": 3.5389958075858567e-06, + "loss": 0.4722, + "step": 8736 + }, + { + "epoch": 0.6066518539091793, + "grad_norm": 3.997916688543593, + "learning_rate": 3.5379204332545715e-06, + "loss": 0.6014, + "step": 8737 + }, + { + "epoch": 0.6067212887099014, + "grad_norm": 3.3846190203961797, + "learning_rate": 3.5368451328736133e-06, + "loss": 0.4997, + "step": 8738 + }, + { + "epoch": 0.6067907235106236, + "grad_norm": 3.758923787989218, + "learning_rate": 3.535769906497374e-06, + "loss": 0.4671, + "step": 8739 + }, + { + "epoch": 0.6068601583113457, + "grad_norm": 3.063391493095799, + "learning_rate": 3.534694754180237e-06, + "loss": 0.2856, + "step": 8740 + }, + { + "epoch": 0.6069295931120677, + "grad_norm": 3.5308203911660088, + "learning_rate": 3.5336196759765797e-06, + "loss": 0.4827, + "step": 8741 + }, + { + "epoch": 0.6069990279127899, + "grad_norm": 3.6196541717798354, + "learning_rate": 3.5325446719407787e-06, + "loss": 0.2828, + "step": 8742 + }, + { + "epoch": 0.607068462713512, + "grad_norm": 3.2834500337065387, + "learning_rate": 3.5314697421272092e-06, + "loss": 0.2852, + "step": 8743 + }, + { + "epoch": 0.6071378975142341, + "grad_norm": 4.609996918158254, + "learning_rate": 3.5303948865902405e-06, + "loss": 0.6473, + "step": 8744 + }, + { + "epoch": 0.6072073323149563, + "grad_norm": 3.904154807980599, + "learning_rate": 3.5293201053842317e-06, + "loss": 0.4375, + "step": 8745 + }, + { + "epoch": 0.6072767671156784, + "grad_norm": 3.606667791509874, + "learning_rate": 3.52824539856355e-06, + "loss": 0.3763, + "step": 8746 + }, + { + "epoch": 0.6073462019164005, + "grad_norm": 3.9963688788720058, + "learning_rate": 3.527170766182551e-06, + "loss": 0.3507, + "step": 8747 + }, + { + "epoch": 0.6074156367171226, + "grad_norm": 2.8185441768025803, + "learning_rate": 3.5260962082955863e-06, + "loss": 0.3351, + "step": 8748 + }, + { + "epoch": 0.6074850715178447, + "grad_norm": 3.766094116212072, + "learning_rate": 3.5250217249570085e-06, + "loss": 0.4437, + "step": 8749 + }, + { + "epoch": 0.6075545063185669, + "grad_norm": 3.2812710033297403, + "learning_rate": 3.523947316221162e-06, + "loss": 0.2882, + "step": 8750 + }, + { + "epoch": 0.607623941119289, + "grad_norm": 3.669777838150931, + "learning_rate": 3.522872982142392e-06, + "loss": 0.316, + "step": 8751 + }, + { + "epoch": 0.6076933759200112, + "grad_norm": 3.4990757878870817, + "learning_rate": 3.5217987227750327e-06, + "loss": 0.4352, + "step": 8752 + }, + { + "epoch": 0.6077628107207332, + "grad_norm": 3.7920229325892176, + "learning_rate": 3.5207245381734223e-06, + "loss": 0.2807, + "step": 8753 + }, + { + "epoch": 0.6078322455214553, + "grad_norm": 3.1464042249534203, + "learning_rate": 3.5196504283918897e-06, + "loss": 0.2947, + "step": 8754 + }, + { + "epoch": 0.6079016803221775, + "grad_norm": 3.658905341414522, + "learning_rate": 3.518576393484766e-06, + "loss": 0.4309, + "step": 8755 + }, + { + "epoch": 0.6079711151228996, + "grad_norm": 4.820644362463891, + "learning_rate": 3.5175024335063707e-06, + "loss": 0.6304, + "step": 8756 + }, + { + "epoch": 0.6080405499236217, + "grad_norm": 2.4751253879652, + "learning_rate": 3.516428548511023e-06, + "loss": 0.1461, + "step": 8757 + }, + { + "epoch": 0.6081099847243439, + "grad_norm": 3.3926635511300534, + "learning_rate": 3.515354738553044e-06, + "loss": 0.5285, + "step": 8758 + }, + { + "epoch": 0.6081794195250659, + "grad_norm": 3.292710082121794, + "learning_rate": 3.51428100368674e-06, + "loss": 0.1882, + "step": 8759 + }, + { + "epoch": 0.6082488543257881, + "grad_norm": 3.593982526990828, + "learning_rate": 3.5132073439664215e-06, + "loss": 0.3269, + "step": 8760 + }, + { + "epoch": 0.6083182891265102, + "grad_norm": 4.994855225589099, + "learning_rate": 3.5121337594463923e-06, + "loss": 0.5468, + "step": 8761 + }, + { + "epoch": 0.6083877239272323, + "grad_norm": 7.484275729992098, + "learning_rate": 3.5110602501809565e-06, + "loss": 0.6978, + "step": 8762 + }, + { + "epoch": 0.6084571587279545, + "grad_norm": 11.848722941882333, + "learning_rate": 3.5099868162244077e-06, + "loss": 0.7132, + "step": 8763 + }, + { + "epoch": 0.6085265935286766, + "grad_norm": 3.767247583808914, + "learning_rate": 3.508913457631038e-06, + "loss": 0.3759, + "step": 8764 + }, + { + "epoch": 0.6085960283293987, + "grad_norm": 5.878782909083482, + "learning_rate": 3.5078401744551404e-06, + "loss": 0.7495, + "step": 8765 + }, + { + "epoch": 0.6086654631301208, + "grad_norm": 4.450280679700323, + "learning_rate": 3.5067669667509975e-06, + "loss": 0.4169, + "step": 8766 + }, + { + "epoch": 0.6087348979308429, + "grad_norm": 3.887879756049915, + "learning_rate": 3.5056938345728908e-06, + "loss": 0.3313, + "step": 8767 + }, + { + "epoch": 0.6088043327315651, + "grad_norm": 3.4498904786225433, + "learning_rate": 3.5046207779750994e-06, + "loss": 0.2941, + "step": 8768 + }, + { + "epoch": 0.6088737675322872, + "grad_norm": 4.121688356850154, + "learning_rate": 3.503547797011899e-06, + "loss": 0.5696, + "step": 8769 + }, + { + "epoch": 0.6089432023330094, + "grad_norm": 3.9662655076621314, + "learning_rate": 3.5024748917375552e-06, + "loss": 0.5685, + "step": 8770 + }, + { + "epoch": 0.6090126371337314, + "grad_norm": 4.542597359992858, + "learning_rate": 3.5014020622063365e-06, + "loss": 0.4078, + "step": 8771 + }, + { + "epoch": 0.6090820719344535, + "grad_norm": 3.323797452605993, + "learning_rate": 3.500329308472508e-06, + "loss": 0.3538, + "step": 8772 + }, + { + "epoch": 0.6091515067351757, + "grad_norm": 4.148309669616246, + "learning_rate": 3.4992566305903253e-06, + "loss": 0.4835, + "step": 8773 + }, + { + "epoch": 0.6092209415358978, + "grad_norm": 3.653952497111871, + "learning_rate": 3.4981840286140425e-06, + "loss": 0.4486, + "step": 8774 + }, + { + "epoch": 0.6092903763366199, + "grad_norm": 2.695493999153073, + "learning_rate": 3.497111502597913e-06, + "loss": 0.1049, + "step": 8775 + }, + { + "epoch": 0.6093598111373421, + "grad_norm": 4.07849329769582, + "learning_rate": 3.4960390525961853e-06, + "loss": 0.6116, + "step": 8776 + }, + { + "epoch": 0.6094292459380641, + "grad_norm": 4.275370125409202, + "learning_rate": 3.494966678663097e-06, + "loss": 0.5959, + "step": 8777 + }, + { + "epoch": 0.6094986807387863, + "grad_norm": 5.791210458057832, + "learning_rate": 3.4938943808528924e-06, + "loss": 0.3814, + "step": 8778 + }, + { + "epoch": 0.6095681155395084, + "grad_norm": 2.9804918785192314, + "learning_rate": 3.4928221592198053e-06, + "loss": 0.2367, + "step": 8779 + }, + { + "epoch": 0.6096375503402305, + "grad_norm": 5.398198364466904, + "learning_rate": 3.4917500138180695e-06, + "loss": 0.3856, + "step": 8780 + }, + { + "epoch": 0.6097069851409527, + "grad_norm": 3.2649536457690767, + "learning_rate": 3.490677944701909e-06, + "loss": 0.1792, + "step": 8781 + }, + { + "epoch": 0.6097764199416748, + "grad_norm": 2.819860429628112, + "learning_rate": 3.489605951925552e-06, + "loss": 0.4197, + "step": 8782 + }, + { + "epoch": 0.6098458547423969, + "grad_norm": 5.247148237561463, + "learning_rate": 3.4885340355432175e-06, + "loss": 0.7434, + "step": 8783 + }, + { + "epoch": 0.609915289543119, + "grad_norm": 4.236740290689382, + "learning_rate": 3.4874621956091186e-06, + "loss": 0.3853, + "step": 8784 + }, + { + "epoch": 0.6099847243438411, + "grad_norm": 3.6618283189949636, + "learning_rate": 3.486390432177472e-06, + "loss": 0.3223, + "step": 8785 + }, + { + "epoch": 0.6100541591445633, + "grad_norm": 3.6704054038804292, + "learning_rate": 3.4853187453024827e-06, + "loss": 0.4361, + "step": 8786 + }, + { + "epoch": 0.6101235939452854, + "grad_norm": 3.6285768004194274, + "learning_rate": 3.48424713503836e-06, + "loss": 0.2842, + "step": 8787 + }, + { + "epoch": 0.6101930287460074, + "grad_norm": 4.42477390677402, + "learning_rate": 3.483175601439301e-06, + "loss": 0.4643, + "step": 8788 + }, + { + "epoch": 0.6102624635467296, + "grad_norm": 4.036486958323342, + "learning_rate": 3.482104144559503e-06, + "loss": 0.4778, + "step": 8789 + }, + { + "epoch": 0.6103318983474517, + "grad_norm": 5.144953646562805, + "learning_rate": 3.4810327644531606e-06, + "loss": 0.468, + "step": 8790 + }, + { + "epoch": 0.6104013331481739, + "grad_norm": 3.327906577397172, + "learning_rate": 3.479961461174461e-06, + "loss": 0.347, + "step": 8791 + }, + { + "epoch": 0.610470767948896, + "grad_norm": 4.536725368663805, + "learning_rate": 3.478890234777591e-06, + "loss": 0.4807, + "step": 8792 + }, + { + "epoch": 0.6105402027496181, + "grad_norm": 3.066558974834664, + "learning_rate": 3.4778190853167315e-06, + "loss": 0.35, + "step": 8793 + }, + { + "epoch": 0.6106096375503403, + "grad_norm": 3.764874721401947, + "learning_rate": 3.4767480128460616e-06, + "loss": 0.3399, + "step": 8794 + }, + { + "epoch": 0.6106790723510623, + "grad_norm": 3.8708608296847467, + "learning_rate": 3.475677017419753e-06, + "loss": 0.3217, + "step": 8795 + }, + { + "epoch": 0.6107485071517845, + "grad_norm": 3.493059073168975, + "learning_rate": 3.474606099091975e-06, + "loss": 0.4612, + "step": 8796 + }, + { + "epoch": 0.6108179419525066, + "grad_norm": 3.7398319023151787, + "learning_rate": 3.4735352579168956e-06, + "loss": 0.4019, + "step": 8797 + }, + { + "epoch": 0.6108873767532287, + "grad_norm": 2.568160475932117, + "learning_rate": 3.4724644939486753e-06, + "loss": 0.1737, + "step": 8798 + }, + { + "epoch": 0.6109568115539509, + "grad_norm": 4.234843802130741, + "learning_rate": 3.471393807241471e-06, + "loss": 0.6381, + "step": 8799 + }, + { + "epoch": 0.611026246354673, + "grad_norm": 3.55702450378208, + "learning_rate": 3.4703231978494388e-06, + "loss": 0.4172, + "step": 8800 + }, + { + "epoch": 0.611095681155395, + "grad_norm": 3.863896878506644, + "learning_rate": 3.46925266582673e-06, + "loss": 0.4431, + "step": 8801 + }, + { + "epoch": 0.6111651159561172, + "grad_norm": 3.3630169996881905, + "learning_rate": 3.4681822112274877e-06, + "loss": 0.3636, + "step": 8802 + }, + { + "epoch": 0.6112345507568393, + "grad_norm": 4.130317869284291, + "learning_rate": 3.467111834105855e-06, + "loss": 0.44, + "step": 8803 + }, + { + "epoch": 0.6113039855575615, + "grad_norm": 3.447223413116083, + "learning_rate": 3.4660415345159727e-06, + "loss": 0.3196, + "step": 8804 + }, + { + "epoch": 0.6113734203582836, + "grad_norm": 4.445051833966361, + "learning_rate": 3.4649713125119742e-06, + "loss": 0.6113, + "step": 8805 + }, + { + "epoch": 0.6114428551590056, + "grad_norm": 3.2950981307291056, + "learning_rate": 3.4639011681479885e-06, + "loss": 0.3285, + "step": 8806 + }, + { + "epoch": 0.6115122899597278, + "grad_norm": 4.526714737940395, + "learning_rate": 3.4628311014781435e-06, + "loss": 0.6081, + "step": 8807 + }, + { + "epoch": 0.6115817247604499, + "grad_norm": 3.2820146872311375, + "learning_rate": 3.461761112556563e-06, + "loss": 0.2442, + "step": 8808 + }, + { + "epoch": 0.6116511595611721, + "grad_norm": 3.3767762409644915, + "learning_rate": 3.4606912014373633e-06, + "loss": 0.4206, + "step": 8809 + }, + { + "epoch": 0.6117205943618942, + "grad_norm": 2.828917003674686, + "learning_rate": 3.4596213681746615e-06, + "loss": 0.227, + "step": 8810 + }, + { + "epoch": 0.6117900291626163, + "grad_norm": 3.070436221140293, + "learning_rate": 3.4585516128225673e-06, + "loss": 0.3192, + "step": 8811 + }, + { + "epoch": 0.6118594639633385, + "grad_norm": 2.9965596268550847, + "learning_rate": 3.4574819354351896e-06, + "loss": 0.2736, + "step": 8812 + }, + { + "epoch": 0.6119288987640605, + "grad_norm": 4.253918162474615, + "learning_rate": 3.4564123360666292e-06, + "loss": 0.4445, + "step": 8813 + }, + { + "epoch": 0.6119983335647826, + "grad_norm": 3.4834202100898275, + "learning_rate": 3.455342814770986e-06, + "loss": 0.467, + "step": 8814 + }, + { + "epoch": 0.6120677683655048, + "grad_norm": 2.8502495553489937, + "learning_rate": 3.454273371602357e-06, + "loss": 0.2169, + "step": 8815 + }, + { + "epoch": 0.6121372031662269, + "grad_norm": 6.873085745477257, + "learning_rate": 3.4532040066148296e-06, + "loss": 0.7758, + "step": 8816 + }, + { + "epoch": 0.6122066379669491, + "grad_norm": 3.9070505450184583, + "learning_rate": 3.4521347198624945e-06, + "loss": 0.5103, + "step": 8817 + }, + { + "epoch": 0.6122760727676712, + "grad_norm": 3.5089996568781245, + "learning_rate": 3.4510655113994317e-06, + "loss": 0.3198, + "step": 8818 + }, + { + "epoch": 0.6123455075683932, + "grad_norm": 4.26302970854848, + "learning_rate": 3.4499963812797253e-06, + "loss": 0.4869, + "step": 8819 + }, + { + "epoch": 0.6124149423691154, + "grad_norm": 4.263491769328727, + "learning_rate": 3.4489273295574473e-06, + "loss": 0.4804, + "step": 8820 + }, + { + "epoch": 0.6124843771698375, + "grad_norm": 3.59272162039872, + "learning_rate": 3.4478583562866687e-06, + "loss": 0.3145, + "step": 8821 + }, + { + "epoch": 0.6125538119705597, + "grad_norm": 4.565373399713263, + "learning_rate": 3.4467894615214593e-06, + "loss": 0.6119, + "step": 8822 + }, + { + "epoch": 0.6126232467712818, + "grad_norm": 4.733431228688889, + "learning_rate": 3.44572064531588e-06, + "loss": 0.4495, + "step": 8823 + }, + { + "epoch": 0.6126926815720038, + "grad_norm": 2.499875554095829, + "learning_rate": 3.4446519077239925e-06, + "loss": 0.2889, + "step": 8824 + }, + { + "epoch": 0.612762116372726, + "grad_norm": 4.210601456668067, + "learning_rate": 3.4435832487998513e-06, + "loss": 0.6043, + "step": 8825 + }, + { + "epoch": 0.6128315511734481, + "grad_norm": 4.0874179985612615, + "learning_rate": 3.4425146685975084e-06, + "loss": 0.3603, + "step": 8826 + }, + { + "epoch": 0.6129009859741703, + "grad_norm": 7.6718983598756845, + "learning_rate": 3.4414461671710118e-06, + "loss": 0.3045, + "step": 8827 + }, + { + "epoch": 0.6129704207748924, + "grad_norm": 4.114497972604883, + "learning_rate": 3.440377744574403e-06, + "loss": 0.4455, + "step": 8828 + }, + { + "epoch": 0.6130398555756145, + "grad_norm": 7.6175148987063785, + "learning_rate": 3.4393094008617235e-06, + "loss": 0.5355, + "step": 8829 + }, + { + "epoch": 0.6131092903763367, + "grad_norm": 3.3153797679956667, + "learning_rate": 3.4382411360870103e-06, + "loss": 0.2266, + "step": 8830 + }, + { + "epoch": 0.6131787251770587, + "grad_norm": 4.658982003162793, + "learning_rate": 3.4371729503042905e-06, + "loss": 0.456, + "step": 8831 + }, + { + "epoch": 0.6132481599777808, + "grad_norm": 2.995687420310173, + "learning_rate": 3.4361048435675946e-06, + "loss": 0.2585, + "step": 8832 + }, + { + "epoch": 0.613317594778503, + "grad_norm": 4.041921263592843, + "learning_rate": 3.4350368159309485e-06, + "loss": 0.4858, + "step": 8833 + }, + { + "epoch": 0.6133870295792251, + "grad_norm": 3.6743486765491986, + "learning_rate": 3.433968867448368e-06, + "loss": 0.4095, + "step": 8834 + }, + { + "epoch": 0.6134564643799473, + "grad_norm": 4.7755375754577365, + "learning_rate": 3.4329009981738694e-06, + "loss": 0.519, + "step": 8835 + }, + { + "epoch": 0.6135258991806694, + "grad_norm": 3.8121589577816497, + "learning_rate": 3.4318332081614653e-06, + "loss": 0.4024, + "step": 8836 + }, + { + "epoch": 0.6135953339813914, + "grad_norm": 2.4652277615141935, + "learning_rate": 3.4307654974651646e-06, + "loss": 0.1867, + "step": 8837 + }, + { + "epoch": 0.6136647687821136, + "grad_norm": 3.743097312374, + "learning_rate": 3.429697866138967e-06, + "loss": 0.1834, + "step": 8838 + }, + { + "epoch": 0.6137342035828357, + "grad_norm": 3.7070803664321565, + "learning_rate": 3.428630314236876e-06, + "loss": 0.2808, + "step": 8839 + }, + { + "epoch": 0.6138036383835579, + "grad_norm": 4.470430758771801, + "learning_rate": 3.427562841812886e-06, + "loss": 0.5572, + "step": 8840 + }, + { + "epoch": 0.61387307318428, + "grad_norm": 3.9595464403271685, + "learning_rate": 3.4264954489209863e-06, + "loss": 0.4157, + "step": 8841 + }, + { + "epoch": 0.613942507985002, + "grad_norm": 4.2733463495985955, + "learning_rate": 3.4254281356151665e-06, + "loss": 0.5208, + "step": 8842 + }, + { + "epoch": 0.6140119427857242, + "grad_norm": 4.050260943499243, + "learning_rate": 3.42436090194941e-06, + "loss": 0.5675, + "step": 8843 + }, + { + "epoch": 0.6140813775864463, + "grad_norm": 3.42306606736064, + "learning_rate": 3.4232937479776977e-06, + "loss": 0.328, + "step": 8844 + }, + { + "epoch": 0.6141508123871684, + "grad_norm": 3.086523847368888, + "learning_rate": 3.4222266737540017e-06, + "loss": 0.3963, + "step": 8845 + }, + { + "epoch": 0.6142202471878906, + "grad_norm": 3.137793094328264, + "learning_rate": 3.421159679332296e-06, + "loss": 0.2813, + "step": 8846 + }, + { + "epoch": 0.6142896819886127, + "grad_norm": 2.7669615693239464, + "learning_rate": 3.4200927647665484e-06, + "loss": 0.2516, + "step": 8847 + }, + { + "epoch": 0.6143591167893349, + "grad_norm": 3.800275682901952, + "learning_rate": 3.4190259301107187e-06, + "loss": 0.5211, + "step": 8848 + }, + { + "epoch": 0.6144285515900569, + "grad_norm": 3.858443102032203, + "learning_rate": 3.4179591754187692e-06, + "loss": 0.4899, + "step": 8849 + }, + { + "epoch": 0.614497986390779, + "grad_norm": 3.7112136230199213, + "learning_rate": 3.4168925007446535e-06, + "loss": 0.5609, + "step": 8850 + }, + { + "epoch": 0.6145674211915012, + "grad_norm": 3.5705100471043894, + "learning_rate": 3.415825906142326e-06, + "loss": 0.243, + "step": 8851 + }, + { + "epoch": 0.6146368559922233, + "grad_norm": 3.5555371947258845, + "learning_rate": 3.4147593916657294e-06, + "loss": 0.4282, + "step": 8852 + }, + { + "epoch": 0.6147062907929455, + "grad_norm": 4.348133127226109, + "learning_rate": 3.41369295736881e-06, + "loss": 0.526, + "step": 8853 + }, + { + "epoch": 0.6147757255936676, + "grad_norm": 2.9015866024526384, + "learning_rate": 3.4126266033055042e-06, + "loss": 0.2459, + "step": 8854 + }, + { + "epoch": 0.6148451603943896, + "grad_norm": 3.443649522031746, + "learning_rate": 3.411560329529752e-06, + "loss": 0.3978, + "step": 8855 + }, + { + "epoch": 0.6149145951951118, + "grad_norm": 3.8251773224159353, + "learning_rate": 3.4104941360954785e-06, + "loss": 0.5023, + "step": 8856 + }, + { + "epoch": 0.6149840299958339, + "grad_norm": 4.687300855272303, + "learning_rate": 3.4094280230566123e-06, + "loss": 0.4949, + "step": 8857 + }, + { + "epoch": 0.615053464796556, + "grad_norm": 3.3203615763143164, + "learning_rate": 3.4083619904670796e-06, + "loss": 0.3176, + "step": 8858 + }, + { + "epoch": 0.6151228995972782, + "grad_norm": 4.278693301282499, + "learning_rate": 3.407296038380794e-06, + "loss": 0.3024, + "step": 8859 + }, + { + "epoch": 0.6151923343980003, + "grad_norm": 3.568714253800124, + "learning_rate": 3.406230166851673e-06, + "loss": 0.4886, + "step": 8860 + }, + { + "epoch": 0.6152617691987224, + "grad_norm": 2.1286816237504875, + "learning_rate": 3.4051643759336256e-06, + "loss": 0.1472, + "step": 8861 + }, + { + "epoch": 0.6153312039994445, + "grad_norm": 4.20814328590448, + "learning_rate": 3.404098665680562e-06, + "loss": 0.684, + "step": 8862 + }, + { + "epoch": 0.6154006388001666, + "grad_norm": 3.4462645805893715, + "learning_rate": 3.4030330361463803e-06, + "loss": 0.3764, + "step": 8863 + }, + { + "epoch": 0.6154700736008888, + "grad_norm": 3.4337351573514594, + "learning_rate": 3.40196748738498e-06, + "loss": 0.4078, + "step": 8864 + }, + { + "epoch": 0.6155395084016109, + "grad_norm": 4.411755843560975, + "learning_rate": 3.400902019450258e-06, + "loss": 0.4952, + "step": 8865 + }, + { + "epoch": 0.6156089432023331, + "grad_norm": 3.8201259387704023, + "learning_rate": 3.3998366323961013e-06, + "loss": 0.5878, + "step": 8866 + }, + { + "epoch": 0.6156783780030551, + "grad_norm": 3.998293258277663, + "learning_rate": 3.3987713262763957e-06, + "loss": 0.4708, + "step": 8867 + }, + { + "epoch": 0.6157478128037772, + "grad_norm": 3.6092836582110697, + "learning_rate": 3.397706101145026e-06, + "loss": 0.3317, + "step": 8868 + }, + { + "epoch": 0.6158172476044994, + "grad_norm": 2.456280766453252, + "learning_rate": 3.396640957055869e-06, + "loss": 0.1868, + "step": 8869 + }, + { + "epoch": 0.6158866824052215, + "grad_norm": 4.299979372384046, + "learning_rate": 3.395575894062797e-06, + "loss": 0.4995, + "step": 8870 + }, + { + "epoch": 0.6159561172059436, + "grad_norm": 3.5152924490110893, + "learning_rate": 3.3945109122196797e-06, + "loss": 0.3992, + "step": 8871 + }, + { + "epoch": 0.6160255520066658, + "grad_norm": 4.327403529507235, + "learning_rate": 3.3934460115803863e-06, + "loss": 0.6731, + "step": 8872 + }, + { + "epoch": 0.6160949868073878, + "grad_norm": 2.2403095590063744, + "learning_rate": 3.3923811921987748e-06, + "loss": 0.1671, + "step": 8873 + }, + { + "epoch": 0.61616442160811, + "grad_norm": 3.5622165988462764, + "learning_rate": 3.3913164541287026e-06, + "loss": 0.3192, + "step": 8874 + }, + { + "epoch": 0.6162338564088321, + "grad_norm": 3.8751735615082565, + "learning_rate": 3.390251797424024e-06, + "loss": 0.4521, + "step": 8875 + }, + { + "epoch": 0.6163032912095542, + "grad_norm": 3.932985385424576, + "learning_rate": 3.389187222138589e-06, + "loss": 0.2698, + "step": 8876 + }, + { + "epoch": 0.6163727260102764, + "grad_norm": 5.404326713159761, + "learning_rate": 3.3881227283262403e-06, + "loss": 0.5754, + "step": 8877 + }, + { + "epoch": 0.6164421608109985, + "grad_norm": 3.7388839658478794, + "learning_rate": 3.3870583160408203e-06, + "loss": 0.5022, + "step": 8878 + }, + { + "epoch": 0.6165115956117206, + "grad_norm": 3.0764830862051675, + "learning_rate": 3.3859939853361674e-06, + "loss": 0.1961, + "step": 8879 + }, + { + "epoch": 0.6165810304124427, + "grad_norm": 3.710339976383918, + "learning_rate": 3.3849297362661092e-06, + "loss": 0.3062, + "step": 8880 + }, + { + "epoch": 0.6166504652131648, + "grad_norm": 3.4193998625342212, + "learning_rate": 3.3838655688844785e-06, + "loss": 0.3121, + "step": 8881 + }, + { + "epoch": 0.616719900013887, + "grad_norm": 3.998249990459775, + "learning_rate": 3.3828014832450987e-06, + "loss": 0.4919, + "step": 8882 + }, + { + "epoch": 0.6167893348146091, + "grad_norm": 4.003892193030432, + "learning_rate": 3.3817374794017916e-06, + "loss": 0.586, + "step": 8883 + }, + { + "epoch": 0.6168587696153313, + "grad_norm": 2.780894147925231, + "learning_rate": 3.380673557408369e-06, + "loss": 0.1996, + "step": 8884 + }, + { + "epoch": 0.6169282044160533, + "grad_norm": 4.308226561724884, + "learning_rate": 3.3796097173186472e-06, + "loss": 0.4837, + "step": 8885 + }, + { + "epoch": 0.6169976392167754, + "grad_norm": 2.2490334509221808, + "learning_rate": 3.37854595918643e-06, + "loss": 0.2372, + "step": 8886 + }, + { + "epoch": 0.6170670740174976, + "grad_norm": 3.448596747981524, + "learning_rate": 3.377482283065526e-06, + "loss": 0.4637, + "step": 8887 + }, + { + "epoch": 0.6171365088182197, + "grad_norm": 3.580542009223081, + "learning_rate": 3.376418689009732e-06, + "loss": 0.3825, + "step": 8888 + }, + { + "epoch": 0.6172059436189418, + "grad_norm": 4.148608292931165, + "learning_rate": 3.3753551770728417e-06, + "loss": 0.5765, + "step": 8889 + }, + { + "epoch": 0.617275378419664, + "grad_norm": 3.565873083541428, + "learning_rate": 3.37429174730865e-06, + "loss": 0.2782, + "step": 8890 + }, + { + "epoch": 0.617344813220386, + "grad_norm": 5.038561254483094, + "learning_rate": 3.3732283997709414e-06, + "loss": 0.6114, + "step": 8891 + }, + { + "epoch": 0.6174142480211082, + "grad_norm": 3.4638264423546223, + "learning_rate": 3.3721651345134985e-06, + "loss": 0.3435, + "step": 8892 + }, + { + "epoch": 0.6174836828218303, + "grad_norm": 5.4677454552394495, + "learning_rate": 3.371101951590101e-06, + "loss": 0.5218, + "step": 8893 + }, + { + "epoch": 0.6175531176225524, + "grad_norm": 4.082852809269385, + "learning_rate": 3.3700388510545256e-06, + "loss": 0.4324, + "step": 8894 + }, + { + "epoch": 0.6176225524232746, + "grad_norm": 2.9785945596914787, + "learning_rate": 3.36897583296054e-06, + "loss": 0.3668, + "step": 8895 + }, + { + "epoch": 0.6176919872239967, + "grad_norm": 7.050132443067151, + "learning_rate": 3.3679128973619097e-06, + "loss": 0.7564, + "step": 8896 + }, + { + "epoch": 0.6177614220247188, + "grad_norm": 3.2394877629889463, + "learning_rate": 3.366850044312401e-06, + "loss": 0.4109, + "step": 8897 + }, + { + "epoch": 0.6178308568254409, + "grad_norm": 3.797158443456836, + "learning_rate": 3.365787273865767e-06, + "loss": 0.4654, + "step": 8898 + }, + { + "epoch": 0.617900291626163, + "grad_norm": 3.117512770790711, + "learning_rate": 3.3647245860757626e-06, + "loss": 0.2712, + "step": 8899 + }, + { + "epoch": 0.6179697264268852, + "grad_norm": 5.029807959578462, + "learning_rate": 3.3636619809961398e-06, + "loss": 0.3782, + "step": 8900 + }, + { + "epoch": 0.6180391612276073, + "grad_norm": 2.7476560484303634, + "learning_rate": 3.362599458680643e-06, + "loss": 0.2044, + "step": 8901 + }, + { + "epoch": 0.6181085960283293, + "grad_norm": 3.111577718647958, + "learning_rate": 3.3615370191830104e-06, + "loss": 0.3491, + "step": 8902 + }, + { + "epoch": 0.6181780308290515, + "grad_norm": 3.2224597351197217, + "learning_rate": 3.360474662556982e-06, + "loss": 0.2718, + "step": 8903 + }, + { + "epoch": 0.6182474656297736, + "grad_norm": 2.936160571109473, + "learning_rate": 3.3594123888562915e-06, + "loss": 0.2204, + "step": 8904 + }, + { + "epoch": 0.6183169004304958, + "grad_norm": 3.544824785258668, + "learning_rate": 3.3583501981346646e-06, + "loss": 0.4693, + "step": 8905 + }, + { + "epoch": 0.6183863352312179, + "grad_norm": 4.118583421756876, + "learning_rate": 3.3572880904458267e-06, + "loss": 0.4732, + "step": 8906 + }, + { + "epoch": 0.61845577003194, + "grad_norm": 5.748645519302652, + "learning_rate": 3.3562260658434984e-06, + "loss": 0.3019, + "step": 8907 + }, + { + "epoch": 0.6185252048326622, + "grad_norm": 4.25843478269813, + "learning_rate": 3.355164124381398e-06, + "loss": 0.4267, + "step": 8908 + }, + { + "epoch": 0.6185946396333842, + "grad_norm": 4.090064727931622, + "learning_rate": 3.3541022661132324e-06, + "loss": 0.525, + "step": 8909 + }, + { + "epoch": 0.6186640744341064, + "grad_norm": 4.482175342473842, + "learning_rate": 3.3530404910927124e-06, + "loss": 0.4554, + "step": 8910 + }, + { + "epoch": 0.6187335092348285, + "grad_norm": 4.808436071628028, + "learning_rate": 3.35197879937354e-06, + "loss": 0.2951, + "step": 8911 + }, + { + "epoch": 0.6188029440355506, + "grad_norm": 4.612688068329443, + "learning_rate": 3.3509171910094162e-06, + "loss": 0.6373, + "step": 8912 + }, + { + "epoch": 0.6188723788362728, + "grad_norm": 2.8273767538153622, + "learning_rate": 3.3498556660540337e-06, + "loss": 0.2209, + "step": 8913 + }, + { + "epoch": 0.6189418136369949, + "grad_norm": 4.23562953268146, + "learning_rate": 3.348794224561085e-06, + "loss": 0.2846, + "step": 8914 + }, + { + "epoch": 0.6190112484377169, + "grad_norm": 3.2248588096946573, + "learning_rate": 3.347732866584258e-06, + "loss": 0.2979, + "step": 8915 + }, + { + "epoch": 0.6190806832384391, + "grad_norm": 4.293269968820316, + "learning_rate": 3.3466715921772297e-06, + "loss": 0.5579, + "step": 8916 + }, + { + "epoch": 0.6191501180391612, + "grad_norm": 4.148686548597329, + "learning_rate": 3.345610401393683e-06, + "loss": 0.5158, + "step": 8917 + }, + { + "epoch": 0.6192195528398834, + "grad_norm": 3.254696254648424, + "learning_rate": 3.344549294287288e-06, + "loss": 0.2812, + "step": 8918 + }, + { + "epoch": 0.6192889876406055, + "grad_norm": 3.9736566934074675, + "learning_rate": 3.34348827091172e-06, + "loss": 0.6182, + "step": 8919 + }, + { + "epoch": 0.6193584224413275, + "grad_norm": 3.3794847061171485, + "learning_rate": 3.342427331320639e-06, + "loss": 0.4227, + "step": 8920 + }, + { + "epoch": 0.6194278572420497, + "grad_norm": 5.1063443574964955, + "learning_rate": 3.341366475567707e-06, + "loss": 0.5266, + "step": 8921 + }, + { + "epoch": 0.6194972920427718, + "grad_norm": 2.4202464062809286, + "learning_rate": 3.340305703706584e-06, + "loss": 0.2438, + "step": 8922 + }, + { + "epoch": 0.619566726843494, + "grad_norm": 3.2525305594531497, + "learning_rate": 3.339245015790918e-06, + "loss": 0.2282, + "step": 8923 + }, + { + "epoch": 0.6196361616442161, + "grad_norm": 4.358863905389245, + "learning_rate": 3.338184411874361e-06, + "loss": 0.3752, + "step": 8924 + }, + { + "epoch": 0.6197055964449382, + "grad_norm": 4.181693113430663, + "learning_rate": 3.3371238920105554e-06, + "loss": 0.4458, + "step": 8925 + }, + { + "epoch": 0.6197750312456604, + "grad_norm": 4.1391026237390625, + "learning_rate": 3.336063456253143e-06, + "loss": 0.4326, + "step": 8926 + }, + { + "epoch": 0.6198444660463824, + "grad_norm": 3.074673598875534, + "learning_rate": 3.3350031046557573e-06, + "loss": 0.2806, + "step": 8927 + }, + { + "epoch": 0.6199139008471045, + "grad_norm": 3.189230893579366, + "learning_rate": 3.3339428372720295e-06, + "loss": 0.3194, + "step": 8928 + }, + { + "epoch": 0.6199833356478267, + "grad_norm": 4.586643892909302, + "learning_rate": 3.3328826541555905e-06, + "loss": 0.4879, + "step": 8929 + }, + { + "epoch": 0.6200527704485488, + "grad_norm": 3.340480594932884, + "learning_rate": 3.3318225553600582e-06, + "loss": 0.3196, + "step": 8930 + }, + { + "epoch": 0.620122205249271, + "grad_norm": 3.3354803777115496, + "learning_rate": 3.3307625409390533e-06, + "loss": 0.3078, + "step": 8931 + }, + { + "epoch": 0.620191640049993, + "grad_norm": 6.136814589448868, + "learning_rate": 3.3297026109461896e-06, + "loss": 0.7291, + "step": 8932 + }, + { + "epoch": 0.6202610748507151, + "grad_norm": 4.177866688371145, + "learning_rate": 3.32864276543508e-06, + "loss": 0.4478, + "step": 8933 + }, + { + "epoch": 0.6203305096514373, + "grad_norm": 3.426458910265748, + "learning_rate": 3.3275830044593272e-06, + "loss": 0.3917, + "step": 8934 + }, + { + "epoch": 0.6203999444521594, + "grad_norm": 3.78701457833112, + "learning_rate": 3.3265233280725325e-06, + "loss": 0.6512, + "step": 8935 + }, + { + "epoch": 0.6204693792528816, + "grad_norm": 4.1605233376360635, + "learning_rate": 3.325463736328295e-06, + "loss": 0.5566, + "step": 8936 + }, + { + "epoch": 0.6205388140536037, + "grad_norm": 4.102994492402127, + "learning_rate": 3.324404229280208e-06, + "loss": 0.321, + "step": 8937 + }, + { + "epoch": 0.6206082488543258, + "grad_norm": 3.1760675064732777, + "learning_rate": 3.323344806981857e-06, + "loss": 0.4112, + "step": 8938 + }, + { + "epoch": 0.6206776836550479, + "grad_norm": 3.686598083025207, + "learning_rate": 3.322285469486829e-06, + "loss": 0.4862, + "step": 8939 + }, + { + "epoch": 0.62074711845577, + "grad_norm": 3.992048391250304, + "learning_rate": 3.3212262168487053e-06, + "loss": 0.4923, + "step": 8940 + }, + { + "epoch": 0.6208165532564921, + "grad_norm": 4.395638780436744, + "learning_rate": 3.320167049121057e-06, + "loss": 0.464, + "step": 8941 + }, + { + "epoch": 0.6208859880572143, + "grad_norm": 4.463266375264487, + "learning_rate": 3.319107966357459e-06, + "loss": 0.5432, + "step": 8942 + }, + { + "epoch": 0.6209554228579364, + "grad_norm": 3.835221332881704, + "learning_rate": 3.318048968611479e-06, + "loss": 0.5147, + "step": 8943 + }, + { + "epoch": 0.6210248576586586, + "grad_norm": 4.9437114297164895, + "learning_rate": 3.3169900559366797e-06, + "loss": 0.5789, + "step": 8944 + }, + { + "epoch": 0.6210942924593806, + "grad_norm": 5.446543230435596, + "learning_rate": 3.3159312283866173e-06, + "loss": 0.5016, + "step": 8945 + }, + { + "epoch": 0.6211637272601027, + "grad_norm": 3.331033647084621, + "learning_rate": 3.3148724860148486e-06, + "loss": 0.2522, + "step": 8946 + }, + { + "epoch": 0.6212331620608249, + "grad_norm": 4.446017173800562, + "learning_rate": 3.3138138288749233e-06, + "loss": 0.4903, + "step": 8947 + }, + { + "epoch": 0.621302596861547, + "grad_norm": 3.9759594698348404, + "learning_rate": 3.3127552570203848e-06, + "loss": 0.4596, + "step": 8948 + }, + { + "epoch": 0.6213720316622692, + "grad_norm": 3.4766982739504386, + "learning_rate": 3.3116967705047778e-06, + "loss": 0.5102, + "step": 8949 + }, + { + "epoch": 0.6214414664629913, + "grad_norm": 3.108054077664294, + "learning_rate": 3.3106383693816356e-06, + "loss": 0.2278, + "step": 8950 + }, + { + "epoch": 0.6215109012637133, + "grad_norm": 2.81017797846336, + "learning_rate": 3.3095800537044955e-06, + "loss": 0.2269, + "step": 8951 + }, + { + "epoch": 0.6215803360644355, + "grad_norm": 11.29337652881421, + "learning_rate": 3.3085218235268814e-06, + "loss": 0.4081, + "step": 8952 + }, + { + "epoch": 0.6216497708651576, + "grad_norm": 4.269991170430919, + "learning_rate": 3.3074636789023207e-06, + "loss": 0.6077, + "step": 8953 + }, + { + "epoch": 0.6217192056658798, + "grad_norm": 3.878249165013554, + "learning_rate": 3.3064056198843324e-06, + "loss": 0.4451, + "step": 8954 + }, + { + "epoch": 0.6217886404666019, + "grad_norm": 3.495601004928863, + "learning_rate": 3.30534764652643e-06, + "loss": 0.3896, + "step": 8955 + }, + { + "epoch": 0.621858075267324, + "grad_norm": 4.117749286960358, + "learning_rate": 3.3042897588821266e-06, + "loss": 0.3503, + "step": 8956 + }, + { + "epoch": 0.6219275100680461, + "grad_norm": 2.9455276888058735, + "learning_rate": 3.303231957004928e-06, + "loss": 0.3606, + "step": 8957 + }, + { + "epoch": 0.6219969448687682, + "grad_norm": 3.576283646783539, + "learning_rate": 3.3021742409483377e-06, + "loss": 0.2957, + "step": 8958 + }, + { + "epoch": 0.6220663796694903, + "grad_norm": 3.8510852096967017, + "learning_rate": 3.3011166107658533e-06, + "loss": 0.4388, + "step": 8959 + }, + { + "epoch": 0.6221358144702125, + "grad_norm": 3.5140867863313914, + "learning_rate": 3.3000590665109666e-06, + "loss": 0.396, + "step": 8960 + }, + { + "epoch": 0.6222052492709346, + "grad_norm": 5.053596372067923, + "learning_rate": 3.299001608237169e-06, + "loss": 0.5821, + "step": 8961 + }, + { + "epoch": 0.6222746840716568, + "grad_norm": 4.194408828184071, + "learning_rate": 3.297944235997947e-06, + "loss": 0.4454, + "step": 8962 + }, + { + "epoch": 0.6223441188723788, + "grad_norm": 3.794174361042864, + "learning_rate": 3.296886949846778e-06, + "loss": 0.3753, + "step": 8963 + }, + { + "epoch": 0.6224135536731009, + "grad_norm": 3.325305633369674, + "learning_rate": 3.29582974983714e-06, + "loss": 0.3717, + "step": 8964 + }, + { + "epoch": 0.6224829884738231, + "grad_norm": 4.097661349877509, + "learning_rate": 3.294772636022506e-06, + "loss": 0.31, + "step": 8965 + }, + { + "epoch": 0.6225524232745452, + "grad_norm": 3.7101726783391054, + "learning_rate": 3.2937156084563426e-06, + "loss": 0.434, + "step": 8966 + }, + { + "epoch": 0.6226218580752674, + "grad_norm": 4.678320279816437, + "learning_rate": 3.2926586671921112e-06, + "loss": 0.5411, + "step": 8967 + }, + { + "epoch": 0.6226912928759895, + "grad_norm": 2.767381312053914, + "learning_rate": 3.2916018122832734e-06, + "loss": 0.1098, + "step": 8968 + }, + { + "epoch": 0.6227607276767115, + "grad_norm": 3.5352557440866175, + "learning_rate": 3.2905450437832837e-06, + "loss": 0.3617, + "step": 8969 + }, + { + "epoch": 0.6228301624774337, + "grad_norm": 5.82755823278559, + "learning_rate": 3.2894883617455898e-06, + "loss": 0.6603, + "step": 8970 + }, + { + "epoch": 0.6228995972781558, + "grad_norm": 3.9041101833879077, + "learning_rate": 3.2884317662236383e-06, + "loss": 0.4414, + "step": 8971 + }, + { + "epoch": 0.6229690320788779, + "grad_norm": 4.004251511431086, + "learning_rate": 3.287375257270874e-06, + "loss": 0.5276, + "step": 8972 + }, + { + "epoch": 0.6230384668796001, + "grad_norm": 4.841416363184572, + "learning_rate": 3.2863188349407293e-06, + "loss": 0.6096, + "step": 8973 + }, + { + "epoch": 0.6231079016803222, + "grad_norm": 3.3382927605081996, + "learning_rate": 3.285262499286638e-06, + "loss": 0.3478, + "step": 8974 + }, + { + "epoch": 0.6231773364810443, + "grad_norm": 4.318534336309797, + "learning_rate": 3.284206250362031e-06, + "loss": 0.5535, + "step": 8975 + }, + { + "epoch": 0.6232467712817664, + "grad_norm": 4.280109432267604, + "learning_rate": 3.2831500882203306e-06, + "loss": 0.3308, + "step": 8976 + }, + { + "epoch": 0.6233162060824885, + "grad_norm": 4.7821423933146034, + "learning_rate": 3.2820940129149537e-06, + "loss": 0.6301, + "step": 8977 + }, + { + "epoch": 0.6233856408832107, + "grad_norm": 3.7154703456734093, + "learning_rate": 3.2810380244993194e-06, + "loss": 0.4113, + "step": 8978 + }, + { + "epoch": 0.6234550756839328, + "grad_norm": 4.156670549543116, + "learning_rate": 3.2799821230268374e-06, + "loss": 0.5935, + "step": 8979 + }, + { + "epoch": 0.623524510484655, + "grad_norm": 4.04397332128435, + "learning_rate": 3.2789263085509117e-06, + "loss": 0.4655, + "step": 8980 + }, + { + "epoch": 0.623593945285377, + "grad_norm": 3.919783901319768, + "learning_rate": 3.2778705811249457e-06, + "loss": 0.4802, + "step": 8981 + }, + { + "epoch": 0.6236633800860991, + "grad_norm": 4.240432975680613, + "learning_rate": 3.276814940802337e-06, + "loss": 0.5088, + "step": 8982 + }, + { + "epoch": 0.6237328148868213, + "grad_norm": 4.437544004404756, + "learning_rate": 3.2757593876364802e-06, + "loss": 0.5367, + "step": 8983 + }, + { + "epoch": 0.6238022496875434, + "grad_norm": 4.421928936623407, + "learning_rate": 3.2747039216807607e-06, + "loss": 0.5654, + "step": 8984 + }, + { + "epoch": 0.6238716844882655, + "grad_norm": 4.09492559396691, + "learning_rate": 3.273648542988566e-06, + "loss": 0.5398, + "step": 8985 + }, + { + "epoch": 0.6239411192889877, + "grad_norm": 3.3251721284795224, + "learning_rate": 3.2725932516132718e-06, + "loss": 0.3802, + "step": 8986 + }, + { + "epoch": 0.6240105540897097, + "grad_norm": 4.0172754100899715, + "learning_rate": 3.27153804760826e-06, + "loss": 0.4968, + "step": 8987 + }, + { + "epoch": 0.6240799888904319, + "grad_norm": 2.9831868573379277, + "learning_rate": 3.270482931026896e-06, + "loss": 0.337, + "step": 8988 + }, + { + "epoch": 0.624149423691154, + "grad_norm": 4.4904188958483635, + "learning_rate": 3.269427901922547e-06, + "loss": 0.4517, + "step": 8989 + }, + { + "epoch": 0.6242188584918761, + "grad_norm": 4.630562921268973, + "learning_rate": 3.268372960348579e-06, + "loss": 0.5792, + "step": 8990 + }, + { + "epoch": 0.6242882932925983, + "grad_norm": 4.0097492103052605, + "learning_rate": 3.2673181063583465e-06, + "loss": 0.3977, + "step": 8991 + }, + { + "epoch": 0.6243577280933204, + "grad_norm": 3.9674126128040546, + "learning_rate": 3.2662633400052024e-06, + "loss": 0.415, + "step": 8992 + }, + { + "epoch": 0.6244271628940425, + "grad_norm": 4.571299549606146, + "learning_rate": 3.265208661342496e-06, + "loss": 0.7069, + "step": 8993 + }, + { + "epoch": 0.6244965976947646, + "grad_norm": 2.682808453270179, + "learning_rate": 3.2641540704235754e-06, + "loss": 0.1882, + "step": 8994 + }, + { + "epoch": 0.6245660324954867, + "grad_norm": 3.9389360522043355, + "learning_rate": 3.263099567301776e-06, + "loss": 0.3573, + "step": 8995 + }, + { + "epoch": 0.6246354672962089, + "grad_norm": 3.36854396165017, + "learning_rate": 3.262045152030434e-06, + "loss": 0.36, + "step": 8996 + }, + { + "epoch": 0.624704902096931, + "grad_norm": 3.7646545474297173, + "learning_rate": 3.260990824662884e-06, + "loss": 0.3665, + "step": 8997 + }, + { + "epoch": 0.624774336897653, + "grad_norm": 4.319426677523966, + "learning_rate": 3.259936585252449e-06, + "loss": 0.4495, + "step": 8998 + }, + { + "epoch": 0.6248437716983752, + "grad_norm": 6.785832555900826, + "learning_rate": 3.258882433852452e-06, + "loss": 0.5352, + "step": 8999 + }, + { + "epoch": 0.6249132064990973, + "grad_norm": 4.204417482643048, + "learning_rate": 3.2578283705162117e-06, + "loss": 0.4865, + "step": 9000 + }, + { + "epoch": 0.6249826412998195, + "grad_norm": 2.6676647165279617, + "learning_rate": 3.256774395297042e-06, + "loss": 0.179, + "step": 9001 + }, + { + "epoch": 0.6250520761005416, + "grad_norm": 3.5890064653683136, + "learning_rate": 3.255720508248249e-06, + "loss": 0.3673, + "step": 9002 + }, + { + "epoch": 0.6251215109012637, + "grad_norm": 4.192944739343706, + "learning_rate": 3.254666709423139e-06, + "loss": 0.3802, + "step": 9003 + }, + { + "epoch": 0.6251909457019859, + "grad_norm": 4.441264127308771, + "learning_rate": 3.253612998875014e-06, + "loss": 0.4433, + "step": 9004 + }, + { + "epoch": 0.6252603805027079, + "grad_norm": 4.494084642904855, + "learning_rate": 3.2525593766571663e-06, + "loss": 0.4904, + "step": 9005 + }, + { + "epoch": 0.6253298153034301, + "grad_norm": 4.408331407940073, + "learning_rate": 3.251505842822886e-06, + "loss": 0.4434, + "step": 9006 + }, + { + "epoch": 0.6253992501041522, + "grad_norm": 4.041272252944142, + "learning_rate": 3.250452397425464e-06, + "loss": 0.393, + "step": 9007 + }, + { + "epoch": 0.6254686849048743, + "grad_norm": 4.011531290242249, + "learning_rate": 3.2493990405181797e-06, + "loss": 0.481, + "step": 9008 + }, + { + "epoch": 0.6255381197055965, + "grad_norm": 4.2362711664151425, + "learning_rate": 3.24834577215431e-06, + "loss": 0.6241, + "step": 9009 + }, + { + "epoch": 0.6256075545063186, + "grad_norm": 4.278327500342482, + "learning_rate": 3.2472925923871297e-06, + "loss": 0.5626, + "step": 9010 + }, + { + "epoch": 0.6256769893070407, + "grad_norm": 4.227086493343128, + "learning_rate": 3.2462395012699053e-06, + "loss": 0.3774, + "step": 9011 + }, + { + "epoch": 0.6257464241077628, + "grad_norm": 4.516249183747287, + "learning_rate": 3.245186498855906e-06, + "loss": 0.6426, + "step": 9012 + }, + { + "epoch": 0.6258158589084849, + "grad_norm": 3.097392369470254, + "learning_rate": 3.244133585198384e-06, + "loss": 0.2213, + "step": 9013 + }, + { + "epoch": 0.6258852937092071, + "grad_norm": 4.331486255058671, + "learning_rate": 3.2430807603505996e-06, + "loss": 0.557, + "step": 9014 + }, + { + "epoch": 0.6259547285099292, + "grad_norm": 3.875794829016102, + "learning_rate": 3.2420280243658044e-06, + "loss": 0.3534, + "step": 9015 + }, + { + "epoch": 0.6260241633106512, + "grad_norm": 4.148788326155634, + "learning_rate": 3.24097537729724e-06, + "loss": 0.3867, + "step": 9016 + }, + { + "epoch": 0.6260935981113734, + "grad_norm": 4.837398845783604, + "learning_rate": 3.2399228191981515e-06, + "loss": 0.57, + "step": 9017 + }, + { + "epoch": 0.6261630329120955, + "grad_norm": 3.8692866917323063, + "learning_rate": 3.238870350121773e-06, + "loss": 0.59, + "step": 9018 + }, + { + "epoch": 0.6262324677128177, + "grad_norm": 3.2516628088330526, + "learning_rate": 3.237817970121343e-06, + "loss": 0.2857, + "step": 9019 + }, + { + "epoch": 0.6263019025135398, + "grad_norm": 7.595626555877067, + "learning_rate": 3.236765679250084e-06, + "loss": 0.21, + "step": 9020 + }, + { + "epoch": 0.6263713373142619, + "grad_norm": 3.8447979977921207, + "learning_rate": 3.235713477561221e-06, + "loss": 0.4557, + "step": 9021 + }, + { + "epoch": 0.6264407721149841, + "grad_norm": 2.890888777142742, + "learning_rate": 3.234661365107976e-06, + "loss": 0.3525, + "step": 9022 + }, + { + "epoch": 0.6265102069157061, + "grad_norm": 3.815185239918401, + "learning_rate": 3.2336093419435597e-06, + "loss": 0.51, + "step": 9023 + }, + { + "epoch": 0.6265796417164283, + "grad_norm": 4.027388270659074, + "learning_rate": 3.232557408121185e-06, + "loss": 0.4839, + "step": 9024 + }, + { + "epoch": 0.6266490765171504, + "grad_norm": 3.2612492799509827, + "learning_rate": 3.2315055636940553e-06, + "loss": 0.2954, + "step": 9025 + }, + { + "epoch": 0.6267185113178725, + "grad_norm": 3.354474441164539, + "learning_rate": 3.2304538087153748e-06, + "loss": 0.3099, + "step": 9026 + }, + { + "epoch": 0.6267879461185947, + "grad_norm": 3.8525876880102587, + "learning_rate": 3.2294021432383375e-06, + "loss": 0.4962, + "step": 9027 + }, + { + "epoch": 0.6268573809193168, + "grad_norm": 3.8854912035547637, + "learning_rate": 3.2283505673161354e-06, + "loss": 0.5525, + "step": 9028 + }, + { + "epoch": 0.6269268157200388, + "grad_norm": 4.008680604361545, + "learning_rate": 3.2272990810019587e-06, + "loss": 0.4658, + "step": 9029 + }, + { + "epoch": 0.626996250520761, + "grad_norm": 3.2660675058326194, + "learning_rate": 3.2262476843489876e-06, + "loss": 0.3845, + "step": 9030 + }, + { + "epoch": 0.6270656853214831, + "grad_norm": 3.4016747154614007, + "learning_rate": 3.225196377410401e-06, + "loss": 0.3127, + "step": 9031 + }, + { + "epoch": 0.6271351201222053, + "grad_norm": 3.7010499987079646, + "learning_rate": 3.2241451602393725e-06, + "loss": 0.4047, + "step": 9032 + }, + { + "epoch": 0.6272045549229274, + "grad_norm": 4.133439477857955, + "learning_rate": 3.2230940328890746e-06, + "loss": 0.4985, + "step": 9033 + }, + { + "epoch": 0.6272739897236494, + "grad_norm": 3.633030554670607, + "learning_rate": 3.222042995412669e-06, + "loss": 0.415, + "step": 9034 + }, + { + "epoch": 0.6273434245243716, + "grad_norm": 3.8941318514447887, + "learning_rate": 3.2209920478633157e-06, + "loss": 0.4426, + "step": 9035 + }, + { + "epoch": 0.6274128593250937, + "grad_norm": 4.355914523541789, + "learning_rate": 3.2199411902941724e-06, + "loss": 0.5264, + "step": 9036 + }, + { + "epoch": 0.6274822941258159, + "grad_norm": 3.66399642635389, + "learning_rate": 3.2188904227583905e-06, + "loss": 0.439, + "step": 9037 + }, + { + "epoch": 0.627551728926538, + "grad_norm": 5.7953863089403725, + "learning_rate": 3.2178397453091137e-06, + "loss": 0.7627, + "step": 9038 + }, + { + "epoch": 0.6276211637272601, + "grad_norm": 8.762645532771407, + "learning_rate": 3.216789157999487e-06, + "loss": 0.4795, + "step": 9039 + }, + { + "epoch": 0.6276905985279823, + "grad_norm": 4.210207491433881, + "learning_rate": 3.2157386608826473e-06, + "loss": 0.5654, + "step": 9040 + }, + { + "epoch": 0.6277600333287043, + "grad_norm": 4.32036350155374, + "learning_rate": 3.2146882540117253e-06, + "loss": 0.6372, + "step": 9041 + }, + { + "epoch": 0.6278294681294264, + "grad_norm": 4.439258143489007, + "learning_rate": 3.2136379374398514e-06, + "loss": 0.5532, + "step": 9042 + }, + { + "epoch": 0.6278989029301486, + "grad_norm": 4.320433934369404, + "learning_rate": 3.2125877112201497e-06, + "loss": 0.4743, + "step": 9043 + }, + { + "epoch": 0.6279683377308707, + "grad_norm": 3.8006062649271923, + "learning_rate": 3.2115375754057408e-06, + "loss": 0.5284, + "step": 9044 + }, + { + "epoch": 0.6280377725315929, + "grad_norm": 3.8265181132884716, + "learning_rate": 3.210487530049735e-06, + "loss": 0.3664, + "step": 9045 + }, + { + "epoch": 0.628107207332315, + "grad_norm": 3.5485285900860233, + "learning_rate": 3.2094375752052464e-06, + "loss": 0.4097, + "step": 9046 + }, + { + "epoch": 0.628176642133037, + "grad_norm": 2.8807486986276047, + "learning_rate": 3.20838771092538e-06, + "loss": 0.3151, + "step": 9047 + }, + { + "epoch": 0.6282460769337592, + "grad_norm": 4.118234514357313, + "learning_rate": 3.207337937263233e-06, + "loss": 0.4802, + "step": 9048 + }, + { + "epoch": 0.6283155117344813, + "grad_norm": 4.75502354866508, + "learning_rate": 3.2062882542719065e-06, + "loss": 0.7166, + "step": 9049 + }, + { + "epoch": 0.6283849465352035, + "grad_norm": 5.477466643250541, + "learning_rate": 3.2052386620044894e-06, + "loss": 0.56, + "step": 9050 + }, + { + "epoch": 0.6284543813359256, + "grad_norm": 3.3067271582116127, + "learning_rate": 3.2041891605140717e-06, + "loss": 0.3326, + "step": 9051 + }, + { + "epoch": 0.6285238161366477, + "grad_norm": 4.131545396951763, + "learning_rate": 3.203139749853732e-06, + "loss": 0.4406, + "step": 9052 + }, + { + "epoch": 0.6285932509373698, + "grad_norm": 4.2597589680827985, + "learning_rate": 3.202090430076551e-06, + "loss": 0.4907, + "step": 9053 + }, + { + "epoch": 0.6286626857380919, + "grad_norm": 4.001488334879843, + "learning_rate": 3.2010412012356033e-06, + "loss": 0.3803, + "step": 9054 + }, + { + "epoch": 0.628732120538814, + "grad_norm": 4.383751412856482, + "learning_rate": 3.1999920633839537e-06, + "loss": 0.5641, + "step": 9055 + }, + { + "epoch": 0.6288015553395362, + "grad_norm": 3.879851740950923, + "learning_rate": 3.1989430165746697e-06, + "loss": 0.4171, + "step": 9056 + }, + { + "epoch": 0.6288709901402583, + "grad_norm": 4.048561836292851, + "learning_rate": 3.197894060860809e-06, + "loss": 0.519, + "step": 9057 + }, + { + "epoch": 0.6289404249409805, + "grad_norm": 5.145456084574193, + "learning_rate": 3.1968451962954295e-06, + "loss": 0.593, + "step": 9058 + }, + { + "epoch": 0.6290098597417025, + "grad_norm": 3.612395344716014, + "learning_rate": 3.1957964229315786e-06, + "loss": 0.3915, + "step": 9059 + }, + { + "epoch": 0.6290792945424246, + "grad_norm": 3.908662020871548, + "learning_rate": 3.1947477408223018e-06, + "loss": 0.3766, + "step": 9060 + }, + { + "epoch": 0.6291487293431468, + "grad_norm": 2.454000261762465, + "learning_rate": 3.1936991500206427e-06, + "loss": 0.2675, + "step": 9061 + }, + { + "epoch": 0.6292181641438689, + "grad_norm": 4.741723339937936, + "learning_rate": 3.1926506505796372e-06, + "loss": 0.556, + "step": 9062 + }, + { + "epoch": 0.6292875989445911, + "grad_norm": 3.70455977897339, + "learning_rate": 3.1916022425523154e-06, + "loss": 0.2425, + "step": 9063 + }, + { + "epoch": 0.6293570337453132, + "grad_norm": 3.3076529339459824, + "learning_rate": 3.190553925991705e-06, + "loss": 0.3268, + "step": 9064 + }, + { + "epoch": 0.6294264685460352, + "grad_norm": 3.2657091345156104, + "learning_rate": 3.1895057009508322e-06, + "loss": 0.464, + "step": 9065 + }, + { + "epoch": 0.6294959033467574, + "grad_norm": 3.4743753174772327, + "learning_rate": 3.1884575674827113e-06, + "loss": 0.5878, + "step": 9066 + }, + { + "epoch": 0.6295653381474795, + "grad_norm": 3.9985169685032935, + "learning_rate": 3.1874095256403547e-06, + "loss": 0.3798, + "step": 9067 + }, + { + "epoch": 0.6296347729482017, + "grad_norm": 5.74882975098507, + "learning_rate": 3.186361575476775e-06, + "loss": 0.6254, + "step": 9068 + }, + { + "epoch": 0.6297042077489238, + "grad_norm": 3.70218517709707, + "learning_rate": 3.185313717044976e-06, + "loss": 0.5777, + "step": 9069 + }, + { + "epoch": 0.6297736425496459, + "grad_norm": 3.8857262609012797, + "learning_rate": 3.1842659503979535e-06, + "loss": 0.497, + "step": 9070 + }, + { + "epoch": 0.629843077350368, + "grad_norm": 3.115277029655286, + "learning_rate": 3.1832182755887055e-06, + "loss": 0.3111, + "step": 9071 + }, + { + "epoch": 0.6299125121510901, + "grad_norm": 3.431121871853508, + "learning_rate": 3.182170692670222e-06, + "loss": 0.4478, + "step": 9072 + }, + { + "epoch": 0.6299819469518122, + "grad_norm": 3.513610449149736, + "learning_rate": 3.1811232016954857e-06, + "loss": 0.3327, + "step": 9073 + }, + { + "epoch": 0.6300513817525344, + "grad_norm": 2.3319133734152198, + "learning_rate": 3.18007580271748e-06, + "loss": 0.1969, + "step": 9074 + }, + { + "epoch": 0.6301208165532565, + "grad_norm": 3.4960578268544795, + "learning_rate": 3.1790284957891815e-06, + "loss": 0.5259, + "step": 9075 + }, + { + "epoch": 0.6301902513539787, + "grad_norm": 4.784284808198029, + "learning_rate": 3.1779812809635624e-06, + "loss": 0.3209, + "step": 9076 + }, + { + "epoch": 0.6302596861547007, + "grad_norm": 4.012218277341716, + "learning_rate": 3.1769341582935863e-06, + "loss": 0.3555, + "step": 9077 + }, + { + "epoch": 0.6303291209554228, + "grad_norm": 4.446047199406678, + "learning_rate": 3.175887127832219e-06, + "loss": 0.6318, + "step": 9078 + }, + { + "epoch": 0.630398555756145, + "grad_norm": 3.0163621753875995, + "learning_rate": 3.1748401896324173e-06, + "loss": 0.3288, + "step": 9079 + }, + { + "epoch": 0.6304679905568671, + "grad_norm": 2.742546354995392, + "learning_rate": 3.1737933437471314e-06, + "loss": 0.2384, + "step": 9080 + }, + { + "epoch": 0.6305374253575893, + "grad_norm": 3.6768064090314354, + "learning_rate": 3.1727465902293125e-06, + "loss": 0.4935, + "step": 9081 + }, + { + "epoch": 0.6306068601583114, + "grad_norm": 2.7434549600501414, + "learning_rate": 3.1716999291319026e-06, + "loss": 0.1034, + "step": 9082 + }, + { + "epoch": 0.6306762949590334, + "grad_norm": 3.800859727663394, + "learning_rate": 3.170653360507843e-06, + "loss": 0.3391, + "step": 9083 + }, + { + "epoch": 0.6307457297597556, + "grad_norm": 2.6726134812277538, + "learning_rate": 3.169606884410065e-06, + "loss": 0.22, + "step": 9084 + }, + { + "epoch": 0.6308151645604777, + "grad_norm": 4.231115437929923, + "learning_rate": 3.1685605008915004e-06, + "loss": 0.3776, + "step": 9085 + }, + { + "epoch": 0.6308845993611998, + "grad_norm": 3.826693861310103, + "learning_rate": 3.1675142100050744e-06, + "loss": 0.3184, + "step": 9086 + }, + { + "epoch": 0.630954034161922, + "grad_norm": 3.663681085753831, + "learning_rate": 3.166468011803704e-06, + "loss": 0.4207, + "step": 9087 + }, + { + "epoch": 0.631023468962644, + "grad_norm": 3.375147307648443, + "learning_rate": 3.1654219063403076e-06, + "loss": 0.3182, + "step": 9088 + }, + { + "epoch": 0.6310929037633662, + "grad_norm": 3.736604715097471, + "learning_rate": 3.164375893667794e-06, + "loss": 0.3128, + "step": 9089 + }, + { + "epoch": 0.6311623385640883, + "grad_norm": 3.73312822649804, + "learning_rate": 3.1633299738390726e-06, + "loss": 0.3098, + "step": 9090 + }, + { + "epoch": 0.6312317733648104, + "grad_norm": 2.7690171891500723, + "learning_rate": 3.162284146907042e-06, + "loss": 0.3444, + "step": 9091 + }, + { + "epoch": 0.6313012081655326, + "grad_norm": 4.18514692708291, + "learning_rate": 3.1612384129245986e-06, + "loss": 0.562, + "step": 9092 + }, + { + "epoch": 0.6313706429662547, + "grad_norm": 3.382995159227156, + "learning_rate": 3.1601927719446357e-06, + "loss": 0.367, + "step": 9093 + }, + { + "epoch": 0.6314400777669769, + "grad_norm": 3.1906500266927265, + "learning_rate": 3.1591472240200427e-06, + "loss": 0.2477, + "step": 9094 + }, + { + "epoch": 0.6315095125676989, + "grad_norm": 4.416880471462263, + "learning_rate": 3.1581017692036986e-06, + "loss": 0.5047, + "step": 9095 + }, + { + "epoch": 0.631578947368421, + "grad_norm": 2.893121131467638, + "learning_rate": 3.1570564075484824e-06, + "loss": 0.3209, + "step": 9096 + }, + { + "epoch": 0.6316483821691432, + "grad_norm": 3.8434697881373787, + "learning_rate": 3.156011139107269e-06, + "loss": 0.5578, + "step": 9097 + }, + { + "epoch": 0.6317178169698653, + "grad_norm": 4.863409223416476, + "learning_rate": 3.1549659639329256e-06, + "loss": 0.6212, + "step": 9098 + }, + { + "epoch": 0.6317872517705874, + "grad_norm": 3.3468939151848414, + "learning_rate": 3.153920882078314e-06, + "loss": 0.5158, + "step": 9099 + }, + { + "epoch": 0.6318566865713096, + "grad_norm": 3.7067201692810814, + "learning_rate": 3.152875893596296e-06, + "loss": 0.4897, + "step": 9100 + }, + { + "epoch": 0.6319261213720316, + "grad_norm": 3.1456197258398535, + "learning_rate": 3.1518309985397265e-06, + "loss": 0.3053, + "step": 9101 + }, + { + "epoch": 0.6319955561727538, + "grad_norm": 3.9198227091925153, + "learning_rate": 3.1507861969614517e-06, + "loss": 0.4363, + "step": 9102 + }, + { + "epoch": 0.6320649909734759, + "grad_norm": 5.044639932319711, + "learning_rate": 3.149741488914319e-06, + "loss": 0.5066, + "step": 9103 + }, + { + "epoch": 0.632134425774198, + "grad_norm": 3.805464765361104, + "learning_rate": 3.14869687445117e-06, + "loss": 0.3658, + "step": 9104 + }, + { + "epoch": 0.6322038605749202, + "grad_norm": 4.527840748366383, + "learning_rate": 3.1476523536248365e-06, + "loss": 0.4906, + "step": 9105 + }, + { + "epoch": 0.6322732953756423, + "grad_norm": 3.700763323443944, + "learning_rate": 3.1466079264881504e-06, + "loss": 0.4415, + "step": 9106 + }, + { + "epoch": 0.6323427301763644, + "grad_norm": 3.7476152299160486, + "learning_rate": 3.145563593093939e-06, + "loss": 0.3267, + "step": 9107 + }, + { + "epoch": 0.6324121649770865, + "grad_norm": 4.405255047935883, + "learning_rate": 3.1445193534950238e-06, + "loss": 0.4081, + "step": 9108 + }, + { + "epoch": 0.6324815997778086, + "grad_norm": 3.126115439796439, + "learning_rate": 3.1434752077442176e-06, + "loss": 0.2721, + "step": 9109 + }, + { + "epoch": 0.6325510345785308, + "grad_norm": 3.9276357009718357, + "learning_rate": 3.142431155894336e-06, + "loss": 0.3816, + "step": 9110 + }, + { + "epoch": 0.6326204693792529, + "grad_norm": 3.5783861664470953, + "learning_rate": 3.1413871979981857e-06, + "loss": 0.4196, + "step": 9111 + }, + { + "epoch": 0.632689904179975, + "grad_norm": 3.6296268359062283, + "learning_rate": 3.1403433341085656e-06, + "loss": 0.422, + "step": 9112 + }, + { + "epoch": 0.6327593389806971, + "grad_norm": 3.2728292597054423, + "learning_rate": 3.139299564278275e-06, + "loss": 0.285, + "step": 9113 + }, + { + "epoch": 0.6328287737814192, + "grad_norm": 4.198386314155632, + "learning_rate": 3.1382558885601083e-06, + "loss": 0.537, + "step": 9114 + }, + { + "epoch": 0.6328982085821414, + "grad_norm": 3.8199848362621505, + "learning_rate": 3.1372123070068526e-06, + "loss": 0.3613, + "step": 9115 + }, + { + "epoch": 0.6329676433828635, + "grad_norm": 4.088662462855837, + "learning_rate": 3.136168819671289e-06, + "loss": 0.4417, + "step": 9116 + }, + { + "epoch": 0.6330370781835856, + "grad_norm": 2.890919771929855, + "learning_rate": 3.135125426606198e-06, + "loss": 0.2857, + "step": 9117 + }, + { + "epoch": 0.6331065129843078, + "grad_norm": 3.3794837237678936, + "learning_rate": 3.1340821278643517e-06, + "loss": 0.4171, + "step": 9118 + }, + { + "epoch": 0.6331759477850298, + "grad_norm": 4.197204846182557, + "learning_rate": 3.1330389234985227e-06, + "loss": 0.4376, + "step": 9119 + }, + { + "epoch": 0.633245382585752, + "grad_norm": 3.713106767913252, + "learning_rate": 3.131995813561471e-06, + "loss": 0.3739, + "step": 9120 + }, + { + "epoch": 0.6333148173864741, + "grad_norm": 3.7582068089545384, + "learning_rate": 3.130952798105956e-06, + "loss": 0.3419, + "step": 9121 + }, + { + "epoch": 0.6333842521871962, + "grad_norm": 3.687507563916093, + "learning_rate": 3.129909877184736e-06, + "loss": 0.3733, + "step": 9122 + }, + { + "epoch": 0.6334536869879184, + "grad_norm": 3.8300578761503767, + "learning_rate": 3.1288670508505553e-06, + "loss": 0.2964, + "step": 9123 + }, + { + "epoch": 0.6335231217886405, + "grad_norm": 4.511827133565723, + "learning_rate": 3.1278243191561634e-06, + "loss": 0.6327, + "step": 9124 + }, + { + "epoch": 0.6335925565893626, + "grad_norm": 4.4568938968699925, + "learning_rate": 3.1267816821542978e-06, + "loss": 0.4722, + "step": 9125 + }, + { + "epoch": 0.6336619913900847, + "grad_norm": 3.2337831406696464, + "learning_rate": 3.1257391398976976e-06, + "loss": 0.4047, + "step": 9126 + }, + { + "epoch": 0.6337314261908068, + "grad_norm": 3.3788951686237114, + "learning_rate": 3.1246966924390894e-06, + "loss": 0.3955, + "step": 9127 + }, + { + "epoch": 0.633800860991529, + "grad_norm": 3.605748428797732, + "learning_rate": 3.1236543398311993e-06, + "loss": 0.5181, + "step": 9128 + }, + { + "epoch": 0.6338702957922511, + "grad_norm": 5.3409707596416895, + "learning_rate": 3.1226120821267512e-06, + "loss": 0.5262, + "step": 9129 + }, + { + "epoch": 0.6339397305929731, + "grad_norm": 3.5995351170178984, + "learning_rate": 3.121569919378459e-06, + "loss": 0.3828, + "step": 9130 + }, + { + "epoch": 0.6340091653936953, + "grad_norm": 4.166939013846498, + "learning_rate": 3.1205278516390338e-06, + "loss": 0.5369, + "step": 9131 + }, + { + "epoch": 0.6340786001944174, + "grad_norm": 2.535761624502143, + "learning_rate": 3.1194858789611827e-06, + "loss": 0.1961, + "step": 9132 + }, + { + "epoch": 0.6341480349951396, + "grad_norm": 4.007329306605491, + "learning_rate": 3.1184440013976103e-06, + "loss": 0.2878, + "step": 9133 + }, + { + "epoch": 0.6342174697958617, + "grad_norm": 5.514203966393208, + "learning_rate": 3.11740221900101e-06, + "loss": 0.577, + "step": 9134 + }, + { + "epoch": 0.6342869045965838, + "grad_norm": 4.604426319202341, + "learning_rate": 3.116360531824074e-06, + "loss": 0.4144, + "step": 9135 + }, + { + "epoch": 0.634356339397306, + "grad_norm": 2.7595258394355575, + "learning_rate": 3.1153189399194935e-06, + "loss": 0.2687, + "step": 9136 + }, + { + "epoch": 0.634425774198028, + "grad_norm": 4.085625996009995, + "learning_rate": 3.114277443339947e-06, + "loss": 0.5095, + "step": 9137 + }, + { + "epoch": 0.6344952089987502, + "grad_norm": 3.955963767938622, + "learning_rate": 3.1132360421381123e-06, + "loss": 0.5686, + "step": 9138 + }, + { + "epoch": 0.6345646437994723, + "grad_norm": 3.3987516402706883, + "learning_rate": 3.112194736366665e-06, + "loss": 0.3147, + "step": 9139 + }, + { + "epoch": 0.6346340786001944, + "grad_norm": 3.848326083806358, + "learning_rate": 3.1111535260782723e-06, + "loss": 0.4984, + "step": 9140 + }, + { + "epoch": 0.6347035134009166, + "grad_norm": 3.4941473582728078, + "learning_rate": 3.1101124113255947e-06, + "loss": 0.2504, + "step": 9141 + }, + { + "epoch": 0.6347729482016387, + "grad_norm": 3.486891948914801, + "learning_rate": 3.1090713921612937e-06, + "loss": 0.3349, + "step": 9142 + }, + { + "epoch": 0.6348423830023607, + "grad_norm": 3.212744681813171, + "learning_rate": 3.1080304686380218e-06, + "loss": 0.3667, + "step": 9143 + }, + { + "epoch": 0.6349118178030829, + "grad_norm": 3.465894362717034, + "learning_rate": 3.10698964080843e-06, + "loss": 0.401, + "step": 9144 + }, + { + "epoch": 0.634981252603805, + "grad_norm": 4.842518361655951, + "learning_rate": 3.105948908725157e-06, + "loss": 0.5998, + "step": 9145 + }, + { + "epoch": 0.6350506874045272, + "grad_norm": 2.9034744565300685, + "learning_rate": 3.1049082724408464e-06, + "loss": 0.2476, + "step": 9146 + }, + { + "epoch": 0.6351201222052493, + "grad_norm": 3.977694882727863, + "learning_rate": 3.1038677320081322e-06, + "loss": 0.3819, + "step": 9147 + }, + { + "epoch": 0.6351895570059714, + "grad_norm": 3.237019929530829, + "learning_rate": 3.1028272874796406e-06, + "loss": 0.2966, + "step": 9148 + }, + { + "epoch": 0.6352589918066935, + "grad_norm": 3.277904586955306, + "learning_rate": 3.1017869389079985e-06, + "loss": 0.2749, + "step": 9149 + }, + { + "epoch": 0.6353284266074156, + "grad_norm": 3.026511340443682, + "learning_rate": 3.100746686345824e-06, + "loss": 0.3198, + "step": 9150 + }, + { + "epoch": 0.6353978614081378, + "grad_norm": 4.367265916985372, + "learning_rate": 3.0997065298457353e-06, + "loss": 0.6081, + "step": 9151 + }, + { + "epoch": 0.6354672962088599, + "grad_norm": 4.566468567971123, + "learning_rate": 3.098666469460339e-06, + "loss": 0.4312, + "step": 9152 + }, + { + "epoch": 0.635536731009582, + "grad_norm": 4.217257836991581, + "learning_rate": 3.09762650524224e-06, + "loss": 0.5185, + "step": 9153 + }, + { + "epoch": 0.6356061658103042, + "grad_norm": 2.3964626680034242, + "learning_rate": 3.096586637244042e-06, + "loss": 0.1905, + "step": 9154 + }, + { + "epoch": 0.6356756006110262, + "grad_norm": 3.8074629842982923, + "learning_rate": 3.095546865518336e-06, + "loss": 0.4193, + "step": 9155 + }, + { + "epoch": 0.6357450354117483, + "grad_norm": 4.827086755163089, + "learning_rate": 3.094507190117715e-06, + "loss": 0.4293, + "step": 9156 + }, + { + "epoch": 0.6358144702124705, + "grad_norm": 2.990164308735573, + "learning_rate": 3.0934676110947635e-06, + "loss": 0.2474, + "step": 9157 + }, + { + "epoch": 0.6358839050131926, + "grad_norm": 4.471027081612553, + "learning_rate": 3.0924281285020646e-06, + "loss": 0.6616, + "step": 9158 + }, + { + "epoch": 0.6359533398139148, + "grad_norm": 4.0299693091815225, + "learning_rate": 3.0913887423921917e-06, + "loss": 0.5341, + "step": 9159 + }, + { + "epoch": 0.6360227746146369, + "grad_norm": 4.348721162546524, + "learning_rate": 3.090349452817715e-06, + "loss": 0.6003, + "step": 9160 + }, + { + "epoch": 0.6360922094153589, + "grad_norm": 3.82363678430812, + "learning_rate": 3.0893102598312047e-06, + "loss": 0.5168, + "step": 9161 + }, + { + "epoch": 0.6361616442160811, + "grad_norm": 3.594729607567887, + "learning_rate": 3.088271163485218e-06, + "loss": 0.5112, + "step": 9162 + }, + { + "epoch": 0.6362310790168032, + "grad_norm": 2.835408198276321, + "learning_rate": 3.0872321638323117e-06, + "loss": 0.1965, + "step": 9163 + }, + { + "epoch": 0.6363005138175254, + "grad_norm": 3.885422020306639, + "learning_rate": 3.086193260925038e-06, + "loss": 0.3341, + "step": 9164 + }, + { + "epoch": 0.6363699486182475, + "grad_norm": 3.504304409267629, + "learning_rate": 3.085154454815945e-06, + "loss": 0.3923, + "step": 9165 + }, + { + "epoch": 0.6364393834189696, + "grad_norm": 3.645453873153388, + "learning_rate": 3.084115745557573e-06, + "loss": 0.3383, + "step": 9166 + }, + { + "epoch": 0.6365088182196917, + "grad_norm": 4.409696684021182, + "learning_rate": 3.0830771332024567e-06, + "loss": 0.594, + "step": 9167 + }, + { + "epoch": 0.6365782530204138, + "grad_norm": 2.963163384210168, + "learning_rate": 3.0820386178031313e-06, + "loss": 0.3585, + "step": 9168 + }, + { + "epoch": 0.6366476878211359, + "grad_norm": 4.117616242330566, + "learning_rate": 3.0810001994121237e-06, + "loss": 0.3832, + "step": 9169 + }, + { + "epoch": 0.6367171226218581, + "grad_norm": 2.9640720695834277, + "learning_rate": 3.079961878081952e-06, + "loss": 0.2228, + "step": 9170 + }, + { + "epoch": 0.6367865574225802, + "grad_norm": 3.386964593622585, + "learning_rate": 3.078923653865137e-06, + "loss": 0.2889, + "step": 9171 + }, + { + "epoch": 0.6368559922233024, + "grad_norm": 5.3027892092155255, + "learning_rate": 3.0778855268141905e-06, + "loss": 0.6608, + "step": 9172 + }, + { + "epoch": 0.6369254270240244, + "grad_norm": 4.036928537132345, + "learning_rate": 3.076847496981617e-06, + "loss": 0.5347, + "step": 9173 + }, + { + "epoch": 0.6369948618247465, + "grad_norm": 4.362397885665151, + "learning_rate": 3.0758095644199215e-06, + "loss": 0.5091, + "step": 9174 + }, + { + "epoch": 0.6370642966254687, + "grad_norm": 4.599330372845791, + "learning_rate": 3.0747717291816013e-06, + "loss": 0.5214, + "step": 9175 + }, + { + "epoch": 0.6371337314261908, + "grad_norm": 3.7927700716358315, + "learning_rate": 3.0737339913191494e-06, + "loss": 0.4355, + "step": 9176 + }, + { + "epoch": 0.637203166226913, + "grad_norm": 4.059265231002045, + "learning_rate": 3.0726963508850507e-06, + "loss": 0.6361, + "step": 9177 + }, + { + "epoch": 0.637272601027635, + "grad_norm": 4.205293404857485, + "learning_rate": 3.0716588079317912e-06, + "loss": 0.558, + "step": 9178 + }, + { + "epoch": 0.6373420358283571, + "grad_norm": 3.48428331349899, + "learning_rate": 3.0706213625118477e-06, + "loss": 0.4593, + "step": 9179 + }, + { + "epoch": 0.6374114706290793, + "grad_norm": 3.51581763091995, + "learning_rate": 3.06958401467769e-06, + "loss": 0.3077, + "step": 9180 + }, + { + "epoch": 0.6374809054298014, + "grad_norm": 5.095879932939914, + "learning_rate": 3.06854676448179e-06, + "loss": 0.4579, + "step": 9181 + }, + { + "epoch": 0.6375503402305236, + "grad_norm": 3.8311189047380445, + "learning_rate": 3.067509611976608e-06, + "loss": 0.444, + "step": 9182 + }, + { + "epoch": 0.6376197750312457, + "grad_norm": 3.190544033935898, + "learning_rate": 3.0664725572146048e-06, + "loss": 0.4386, + "step": 9183 + }, + { + "epoch": 0.6376892098319678, + "grad_norm": 5.109490575221364, + "learning_rate": 3.06543560024823e-06, + "loss": 0.3498, + "step": 9184 + }, + { + "epoch": 0.6377586446326899, + "grad_norm": 4.639418743248024, + "learning_rate": 3.064398741129934e-06, + "loss": 0.7355, + "step": 9185 + }, + { + "epoch": 0.637828079433412, + "grad_norm": 4.491716608473725, + "learning_rate": 3.0633619799121605e-06, + "loss": 0.4238, + "step": 9186 + }, + { + "epoch": 0.6378975142341341, + "grad_norm": 4.083749807547369, + "learning_rate": 3.0623253166473447e-06, + "loss": 0.5447, + "step": 9187 + }, + { + "epoch": 0.6379669490348563, + "grad_norm": 4.58828917576333, + "learning_rate": 3.061288751387923e-06, + "loss": 0.6826, + "step": 9188 + }, + { + "epoch": 0.6380363838355784, + "grad_norm": 2.1574993530422284, + "learning_rate": 3.0602522841863213e-06, + "loss": 0.1673, + "step": 9189 + }, + { + "epoch": 0.6381058186363006, + "grad_norm": 3.095599145691764, + "learning_rate": 3.0592159150949665e-06, + "loss": 0.3611, + "step": 9190 + }, + { + "epoch": 0.6381752534370226, + "grad_norm": 3.06301454519742, + "learning_rate": 3.0581796441662726e-06, + "loss": 0.2606, + "step": 9191 + }, + { + "epoch": 0.6382446882377447, + "grad_norm": 3.7398299833285473, + "learning_rate": 3.057143471452655e-06, + "loss": 0.5205, + "step": 9192 + }, + { + "epoch": 0.6383141230384669, + "grad_norm": 4.379182693150694, + "learning_rate": 3.056107397006521e-06, + "loss": 0.6175, + "step": 9193 + }, + { + "epoch": 0.638383557839189, + "grad_norm": 3.50329170915092, + "learning_rate": 3.0550714208802784e-06, + "loss": 0.3814, + "step": 9194 + }, + { + "epoch": 0.6384529926399112, + "grad_norm": 3.986684558682219, + "learning_rate": 3.054035543126321e-06, + "loss": 0.5117, + "step": 9195 + }, + { + "epoch": 0.6385224274406333, + "grad_norm": 4.538989947927328, + "learning_rate": 3.0529997637970427e-06, + "loss": 0.5644, + "step": 9196 + }, + { + "epoch": 0.6385918622413553, + "grad_norm": 3.85732703851041, + "learning_rate": 3.051964082944835e-06, + "loss": 0.3692, + "step": 9197 + }, + { + "epoch": 0.6386612970420775, + "grad_norm": 4.0775988933878375, + "learning_rate": 3.050928500622079e-06, + "loss": 0.3508, + "step": 9198 + }, + { + "epoch": 0.6387307318427996, + "grad_norm": 3.190283741040305, + "learning_rate": 3.049893016881153e-06, + "loss": 0.2838, + "step": 9199 + }, + { + "epoch": 0.6388001666435217, + "grad_norm": 3.581947921733446, + "learning_rate": 3.0488576317744324e-06, + "loss": 0.4977, + "step": 9200 + }, + { + "epoch": 0.6388696014442439, + "grad_norm": 3.2675839920562297, + "learning_rate": 3.0478223453542854e-06, + "loss": 0.2496, + "step": 9201 + }, + { + "epoch": 0.638939036244966, + "grad_norm": 4.085595421299331, + "learning_rate": 3.0467871576730732e-06, + "loss": 0.3809, + "step": 9202 + }, + { + "epoch": 0.6390084710456881, + "grad_norm": 4.000723923632379, + "learning_rate": 3.0457520687831577e-06, + "loss": 0.4013, + "step": 9203 + }, + { + "epoch": 0.6390779058464102, + "grad_norm": 4.265001820942161, + "learning_rate": 3.0447170787368917e-06, + "loss": 0.5814, + "step": 9204 + }, + { + "epoch": 0.6391473406471323, + "grad_norm": 5.050124054977223, + "learning_rate": 3.043682187586623e-06, + "loss": 0.7794, + "step": 9205 + }, + { + "epoch": 0.6392167754478545, + "grad_norm": 3.1133868284628488, + "learning_rate": 3.042647395384695e-06, + "loss": 0.5265, + "step": 9206 + }, + { + "epoch": 0.6392862102485766, + "grad_norm": 2.7650399210438312, + "learning_rate": 3.041612702183446e-06, + "loss": 0.212, + "step": 9207 + }, + { + "epoch": 0.6393556450492988, + "grad_norm": 3.8289477239861816, + "learning_rate": 3.0405781080352132e-06, + "loss": 0.4708, + "step": 9208 + }, + { + "epoch": 0.6394250798500208, + "grad_norm": 3.0974020931115938, + "learning_rate": 3.0395436129923195e-06, + "loss": 0.2894, + "step": 9209 + }, + { + "epoch": 0.6394945146507429, + "grad_norm": 4.167026765069896, + "learning_rate": 3.038509217107093e-06, + "loss": 0.6079, + "step": 9210 + }, + { + "epoch": 0.6395639494514651, + "grad_norm": 4.547077902789774, + "learning_rate": 3.037474920431852e-06, + "loss": 0.4101, + "step": 9211 + }, + { + "epoch": 0.6396333842521872, + "grad_norm": 3.9678593277322007, + "learning_rate": 3.0364407230189065e-06, + "loss": 0.3608, + "step": 9212 + }, + { + "epoch": 0.6397028190529093, + "grad_norm": 3.84118401822579, + "learning_rate": 3.0354066249205665e-06, + "loss": 0.4282, + "step": 9213 + }, + { + "epoch": 0.6397722538536315, + "grad_norm": 2.989267316218682, + "learning_rate": 3.0343726261891384e-06, + "loss": 0.2451, + "step": 9214 + }, + { + "epoch": 0.6398416886543535, + "grad_norm": 2.872453265565878, + "learning_rate": 3.0333387268769187e-06, + "loss": 0.3888, + "step": 9215 + }, + { + "epoch": 0.6399111234550757, + "grad_norm": 3.857653654404107, + "learning_rate": 3.032304927036199e-06, + "loss": 0.4433, + "step": 9216 + }, + { + "epoch": 0.6399805582557978, + "grad_norm": 4.0454267085239035, + "learning_rate": 3.0312712267192713e-06, + "loss": 0.5639, + "step": 9217 + }, + { + "epoch": 0.6400499930565199, + "grad_norm": 3.9770261557645985, + "learning_rate": 3.0302376259784156e-06, + "loss": 0.5282, + "step": 9218 + }, + { + "epoch": 0.6401194278572421, + "grad_norm": 4.087713146463761, + "learning_rate": 3.0292041248659136e-06, + "loss": 0.4148, + "step": 9219 + }, + { + "epoch": 0.6401888626579642, + "grad_norm": 4.526583811865199, + "learning_rate": 3.0281707234340365e-06, + "loss": 0.5451, + "step": 9220 + }, + { + "epoch": 0.6402582974586863, + "grad_norm": 3.153833275766328, + "learning_rate": 3.0271374217350513e-06, + "loss": 0.2717, + "step": 9221 + }, + { + "epoch": 0.6403277322594084, + "grad_norm": 3.0581615320885907, + "learning_rate": 3.0261042198212254e-06, + "loss": 0.2514, + "step": 9222 + }, + { + "epoch": 0.6403971670601305, + "grad_norm": 3.4754140996312417, + "learning_rate": 3.0250711177448124e-06, + "loss": 0.2466, + "step": 9223 + }, + { + "epoch": 0.6404666018608527, + "grad_norm": 2.824554805909999, + "learning_rate": 3.0240381155580677e-06, + "loss": 0.2196, + "step": 9224 + }, + { + "epoch": 0.6405360366615748, + "grad_norm": 2.759779438842844, + "learning_rate": 3.0230052133132394e-06, + "loss": 0.2793, + "step": 9225 + }, + { + "epoch": 0.6406054714622968, + "grad_norm": 4.148619318996465, + "learning_rate": 3.0219724110625716e-06, + "loss": 0.6206, + "step": 9226 + }, + { + "epoch": 0.640674906263019, + "grad_norm": 4.032413160928527, + "learning_rate": 3.020939708858299e-06, + "loss": 0.4216, + "step": 9227 + }, + { + "epoch": 0.6407443410637411, + "grad_norm": 3.525562155733383, + "learning_rate": 3.0199071067526568e-06, + "loss": 0.2415, + "step": 9228 + }, + { + "epoch": 0.6408137758644633, + "grad_norm": 3.546115546267027, + "learning_rate": 3.0188746047978743e-06, + "loss": 0.385, + "step": 9229 + }, + { + "epoch": 0.6408832106651854, + "grad_norm": 3.6978198108449374, + "learning_rate": 3.0178422030461714e-06, + "loss": 0.44, + "step": 9230 + }, + { + "epoch": 0.6409526454659075, + "grad_norm": 3.958845986852517, + "learning_rate": 3.0168099015497653e-06, + "loss": 0.4344, + "step": 9231 + }, + { + "epoch": 0.6410220802666297, + "grad_norm": 2.829624068401706, + "learning_rate": 3.0157777003608724e-06, + "loss": 0.3333, + "step": 9232 + }, + { + "epoch": 0.6410915150673517, + "grad_norm": 4.110706734502829, + "learning_rate": 3.0147455995316983e-06, + "loss": 0.4729, + "step": 9233 + }, + { + "epoch": 0.6411609498680739, + "grad_norm": 4.594615858764353, + "learning_rate": 3.013713599114444e-06, + "loss": 0.6442, + "step": 9234 + }, + { + "epoch": 0.641230384668796, + "grad_norm": 4.20527620229064, + "learning_rate": 3.0126816991613074e-06, + "loss": 0.5115, + "step": 9235 + }, + { + "epoch": 0.6412998194695181, + "grad_norm": 2.8735100264985896, + "learning_rate": 3.0116498997244837e-06, + "loss": 0.1481, + "step": 9236 + }, + { + "epoch": 0.6413692542702403, + "grad_norm": 3.419586929280404, + "learning_rate": 3.010618200856158e-06, + "loss": 0.2749, + "step": 9237 + }, + { + "epoch": 0.6414386890709624, + "grad_norm": 4.403665044999153, + "learning_rate": 3.009586602608511e-06, + "loss": 0.3478, + "step": 9238 + }, + { + "epoch": 0.6415081238716845, + "grad_norm": 5.149912788284767, + "learning_rate": 3.008555105033722e-06, + "loss": 0.5213, + "step": 9239 + }, + { + "epoch": 0.6415775586724066, + "grad_norm": 2.623899093127052, + "learning_rate": 3.0075237081839637e-06, + "loss": 0.105, + "step": 9240 + }, + { + "epoch": 0.6416469934731287, + "grad_norm": 4.4069661978926895, + "learning_rate": 3.006492412111399e-06, + "loss": 0.4818, + "step": 9241 + }, + { + "epoch": 0.6417164282738509, + "grad_norm": 4.040430397226186, + "learning_rate": 3.005461216868194e-06, + "loss": 0.2917, + "step": 9242 + }, + { + "epoch": 0.641785863074573, + "grad_norm": 2.600201974593412, + "learning_rate": 3.0044301225065026e-06, + "loss": 0.2026, + "step": 9243 + }, + { + "epoch": 0.641855297875295, + "grad_norm": 3.6272162738628633, + "learning_rate": 3.0033991290784793e-06, + "loss": 0.5295, + "step": 9244 + }, + { + "epoch": 0.6419247326760172, + "grad_norm": 3.5671470684859696, + "learning_rate": 3.002368236636267e-06, + "loss": 0.1925, + "step": 9245 + }, + { + "epoch": 0.6419941674767393, + "grad_norm": 3.9941066726524665, + "learning_rate": 3.0013374452320093e-06, + "loss": 0.3482, + "step": 9246 + }, + { + "epoch": 0.6420636022774615, + "grad_norm": 3.921454590055716, + "learning_rate": 3.0003067549178432e-06, + "loss": 0.4234, + "step": 9247 + }, + { + "epoch": 0.6421330370781836, + "grad_norm": 2.6385570502835973, + "learning_rate": 2.999276165745897e-06, + "loss": 0.1885, + "step": 9248 + }, + { + "epoch": 0.6422024718789057, + "grad_norm": 3.8665810016721767, + "learning_rate": 2.998245677768299e-06, + "loss": 0.3181, + "step": 9249 + }, + { + "epoch": 0.6422719066796279, + "grad_norm": 3.715810186000461, + "learning_rate": 2.9972152910371686e-06, + "loss": 0.4123, + "step": 9250 + }, + { + "epoch": 0.6423413414803499, + "grad_norm": 4.305073678726027, + "learning_rate": 2.9961850056046243e-06, + "loss": 0.4204, + "step": 9251 + }, + { + "epoch": 0.6424107762810721, + "grad_norm": 3.644948504333781, + "learning_rate": 2.9951548215227744e-06, + "loss": 0.4583, + "step": 9252 + }, + { + "epoch": 0.6424802110817942, + "grad_norm": 3.1164302782202307, + "learning_rate": 2.994124738843723e-06, + "loss": 0.1733, + "step": 9253 + }, + { + "epoch": 0.6425496458825163, + "grad_norm": 3.2753664899275563, + "learning_rate": 2.9930947576195755e-06, + "loss": 0.3275, + "step": 9254 + }, + { + "epoch": 0.6426190806832385, + "grad_norm": 4.629362178816517, + "learning_rate": 2.9920648779024218e-06, + "loss": 0.4944, + "step": 9255 + }, + { + "epoch": 0.6426885154839606, + "grad_norm": 4.090489019122891, + "learning_rate": 2.991035099744356e-06, + "loss": 0.4967, + "step": 9256 + }, + { + "epoch": 0.6427579502846826, + "grad_norm": 5.338297088656182, + "learning_rate": 2.9900054231974595e-06, + "loss": 0.5633, + "step": 9257 + }, + { + "epoch": 0.6428273850854048, + "grad_norm": 3.090780318212311, + "learning_rate": 2.988975848313817e-06, + "loss": 0.2025, + "step": 9258 + }, + { + "epoch": 0.6428968198861269, + "grad_norm": 2.6801072044715806, + "learning_rate": 2.9879463751454995e-06, + "loss": 0.2895, + "step": 9259 + }, + { + "epoch": 0.6429662546868491, + "grad_norm": 3.8634603642899563, + "learning_rate": 2.986917003744576e-06, + "loss": 0.767, + "step": 9260 + }, + { + "epoch": 0.6430356894875712, + "grad_norm": 4.203461797468175, + "learning_rate": 2.9858877341631147e-06, + "loss": 0.3959, + "step": 9261 + }, + { + "epoch": 0.6431051242882933, + "grad_norm": 3.242818750460865, + "learning_rate": 2.984858566453172e-06, + "loss": 0.2447, + "step": 9262 + }, + { + "epoch": 0.6431745590890154, + "grad_norm": 4.212255819282311, + "learning_rate": 2.9838295006668017e-06, + "loss": 0.5383, + "step": 9263 + }, + { + "epoch": 0.6432439938897375, + "grad_norm": 4.777169198005545, + "learning_rate": 2.982800536856053e-06, + "loss": 0.5767, + "step": 9264 + }, + { + "epoch": 0.6433134286904597, + "grad_norm": 4.440459526544951, + "learning_rate": 2.9817716750729734e-06, + "loss": 0.5095, + "step": 9265 + }, + { + "epoch": 0.6433828634911818, + "grad_norm": 4.3443924943978605, + "learning_rate": 2.9807429153695978e-06, + "loss": 0.5769, + "step": 9266 + }, + { + "epoch": 0.6434522982919039, + "grad_norm": 5.774244938021644, + "learning_rate": 2.979714257797959e-06, + "loss": 0.5874, + "step": 9267 + }, + { + "epoch": 0.6435217330926261, + "grad_norm": 4.141338818881184, + "learning_rate": 2.978685702410088e-06, + "loss": 0.3195, + "step": 9268 + }, + { + "epoch": 0.6435911678933481, + "grad_norm": 4.57017352619534, + "learning_rate": 2.977657249258008e-06, + "loss": 0.2907, + "step": 9269 + }, + { + "epoch": 0.6436606026940702, + "grad_norm": 3.3870542807694655, + "learning_rate": 2.976628898393734e-06, + "loss": 0.2796, + "step": 9270 + }, + { + "epoch": 0.6437300374947924, + "grad_norm": 4.255446700411184, + "learning_rate": 2.975600649869282e-06, + "loss": 0.5319, + "step": 9271 + }, + { + "epoch": 0.6437994722955145, + "grad_norm": 4.777211866562169, + "learning_rate": 2.9745725037366592e-06, + "loss": 0.71, + "step": 9272 + }, + { + "epoch": 0.6438689070962367, + "grad_norm": 4.063096944041227, + "learning_rate": 2.9735444600478645e-06, + "loss": 0.4951, + "step": 9273 + }, + { + "epoch": 0.6439383418969588, + "grad_norm": 3.2525037389842084, + "learning_rate": 2.972516518854899e-06, + "loss": 0.3324, + "step": 9274 + }, + { + "epoch": 0.6440077766976808, + "grad_norm": 4.056170994305566, + "learning_rate": 2.9714886802097544e-06, + "loss": 0.4801, + "step": 9275 + }, + { + "epoch": 0.644077211498403, + "grad_norm": 3.4055820485624655, + "learning_rate": 2.970460944164417e-06, + "loss": 0.3944, + "step": 9276 + }, + { + "epoch": 0.6441466462991251, + "grad_norm": 3.160960310144805, + "learning_rate": 2.969433310770868e-06, + "loss": 0.3766, + "step": 9277 + }, + { + "epoch": 0.6442160810998473, + "grad_norm": 3.645772111136888, + "learning_rate": 2.9684057800810844e-06, + "loss": 0.2753, + "step": 9278 + }, + { + "epoch": 0.6442855159005694, + "grad_norm": 4.0263276900214615, + "learning_rate": 2.967378352147039e-06, + "loss": 0.3478, + "step": 9279 + }, + { + "epoch": 0.6443549507012915, + "grad_norm": 3.752482071975173, + "learning_rate": 2.9663510270206943e-06, + "loss": 0.467, + "step": 9280 + }, + { + "epoch": 0.6444243855020136, + "grad_norm": 3.6982982992348115, + "learning_rate": 2.9653238047540145e-06, + "loss": 0.6211, + "step": 9281 + }, + { + "epoch": 0.6444938203027357, + "grad_norm": 3.0495770811725325, + "learning_rate": 2.964296685398953e-06, + "loss": 0.2402, + "step": 9282 + }, + { + "epoch": 0.6445632551034578, + "grad_norm": 4.45616646446758, + "learning_rate": 2.9632696690074637e-06, + "loss": 0.5415, + "step": 9283 + }, + { + "epoch": 0.64463268990418, + "grad_norm": 3.6830146353708937, + "learning_rate": 2.962242755631488e-06, + "loss": 0.2941, + "step": 9284 + }, + { + "epoch": 0.6447021247049021, + "grad_norm": 2.9766987603458417, + "learning_rate": 2.9612159453229684e-06, + "loss": 0.1465, + "step": 9285 + }, + { + "epoch": 0.6447715595056243, + "grad_norm": 2.8155170167967447, + "learning_rate": 2.9601892381338403e-06, + "loss": 0.1859, + "step": 9286 + }, + { + "epoch": 0.6448409943063463, + "grad_norm": 4.614336730960642, + "learning_rate": 2.9591626341160306e-06, + "loss": 0.5458, + "step": 9287 + }, + { + "epoch": 0.6449104291070684, + "grad_norm": 4.159466938555124, + "learning_rate": 2.9581361333214666e-06, + "loss": 0.221, + "step": 9288 + }, + { + "epoch": 0.6449798639077906, + "grad_norm": 2.87577471129891, + "learning_rate": 2.9571097358020655e-06, + "loss": 0.2529, + "step": 9289 + }, + { + "epoch": 0.6450492987085127, + "grad_norm": 4.691077294551126, + "learning_rate": 2.9560834416097438e-06, + "loss": 0.4253, + "step": 9290 + }, + { + "epoch": 0.6451187335092349, + "grad_norm": 4.186629536946255, + "learning_rate": 2.9550572507964084e-06, + "loss": 0.5545, + "step": 9291 + }, + { + "epoch": 0.645188168309957, + "grad_norm": 4.088181002105651, + "learning_rate": 2.9540311634139616e-06, + "loss": 0.2277, + "step": 9292 + }, + { + "epoch": 0.645257603110679, + "grad_norm": 3.345797464352814, + "learning_rate": 2.9530051795143046e-06, + "loss": 0.3451, + "step": 9293 + }, + { + "epoch": 0.6453270379114012, + "grad_norm": 3.1945937936846662, + "learning_rate": 2.95197929914933e-06, + "loss": 0.1959, + "step": 9294 + }, + { + "epoch": 0.6453964727121233, + "grad_norm": 3.763238481239324, + "learning_rate": 2.950953522370924e-06, + "loss": 0.3678, + "step": 9295 + }, + { + "epoch": 0.6454659075128454, + "grad_norm": 4.271868434021699, + "learning_rate": 2.9499278492309702e-06, + "loss": 0.6152, + "step": 9296 + }, + { + "epoch": 0.6455353423135676, + "grad_norm": 2.435005242879208, + "learning_rate": 2.948902279781347e-06, + "loss": 0.1579, + "step": 9297 + }, + { + "epoch": 0.6456047771142897, + "grad_norm": 4.3125500864892095, + "learning_rate": 2.947876814073925e-06, + "loss": 0.4421, + "step": 9298 + }, + { + "epoch": 0.6456742119150118, + "grad_norm": 4.339643505583527, + "learning_rate": 2.9468514521605705e-06, + "loss": 0.3355, + "step": 9299 + }, + { + "epoch": 0.6457436467157339, + "grad_norm": 4.0756121044204585, + "learning_rate": 2.945826194093147e-06, + "loss": 0.3912, + "step": 9300 + }, + { + "epoch": 0.645813081516456, + "grad_norm": 3.7548965988748924, + "learning_rate": 2.9448010399235118e-06, + "loss": 0.4316, + "step": 9301 + }, + { + "epoch": 0.6458825163171782, + "grad_norm": 4.196059796538449, + "learning_rate": 2.9437759897035114e-06, + "loss": 0.4771, + "step": 9302 + }, + { + "epoch": 0.6459519511179003, + "grad_norm": 3.496743051493309, + "learning_rate": 2.9427510434849954e-06, + "loss": 0.2502, + "step": 9303 + }, + { + "epoch": 0.6460213859186225, + "grad_norm": 3.770622589578973, + "learning_rate": 2.9417262013198055e-06, + "loss": 0.3055, + "step": 9304 + }, + { + "epoch": 0.6460908207193445, + "grad_norm": 2.4486059725104377, + "learning_rate": 2.940701463259774e-06, + "loss": 0.2529, + "step": 9305 + }, + { + "epoch": 0.6461602555200666, + "grad_norm": 3.5994856689681356, + "learning_rate": 2.939676829356732e-06, + "loss": 0.4097, + "step": 9306 + }, + { + "epoch": 0.6462296903207888, + "grad_norm": 4.566875924389935, + "learning_rate": 2.9386522996625045e-06, + "loss": 0.5534, + "step": 9307 + }, + { + "epoch": 0.6462991251215109, + "grad_norm": 3.7398873365120946, + "learning_rate": 2.9376278742289133e-06, + "loss": 0.3693, + "step": 9308 + }, + { + "epoch": 0.6463685599222331, + "grad_norm": 3.6462965957186624, + "learning_rate": 2.9366035531077676e-06, + "loss": 0.4176, + "step": 9309 + }, + { + "epoch": 0.6464379947229552, + "grad_norm": 3.301633085355053, + "learning_rate": 2.93557933635088e-06, + "loss": 0.4995, + "step": 9310 + }, + { + "epoch": 0.6465074295236772, + "grad_norm": 4.66987493322447, + "learning_rate": 2.934555224010055e-06, + "loss": 0.5465, + "step": 9311 + }, + { + "epoch": 0.6465768643243994, + "grad_norm": 3.3879447225887005, + "learning_rate": 2.9335312161370877e-06, + "loss": 0.3307, + "step": 9312 + }, + { + "epoch": 0.6466462991251215, + "grad_norm": 3.157931854386785, + "learning_rate": 2.9325073127837746e-06, + "loss": 0.2664, + "step": 9313 + }, + { + "epoch": 0.6467157339258436, + "grad_norm": 3.5718797988927484, + "learning_rate": 2.9314835140019002e-06, + "loss": 0.3344, + "step": 9314 + }, + { + "epoch": 0.6467851687265658, + "grad_norm": 4.07947656641248, + "learning_rate": 2.930459819843251e-06, + "loss": 0.3712, + "step": 9315 + }, + { + "epoch": 0.6468546035272879, + "grad_norm": 3.324083947054877, + "learning_rate": 2.929436230359601e-06, + "loss": 0.3227, + "step": 9316 + }, + { + "epoch": 0.64692403832801, + "grad_norm": 4.408991516546936, + "learning_rate": 2.928412745602724e-06, + "loss": 0.5036, + "step": 9317 + }, + { + "epoch": 0.6469934731287321, + "grad_norm": 3.1950205194458876, + "learning_rate": 2.927389365624387e-06, + "loss": 0.3897, + "step": 9318 + }, + { + "epoch": 0.6470629079294542, + "grad_norm": 3.699530482376584, + "learning_rate": 2.926366090476349e-06, + "loss": 0.36, + "step": 9319 + }, + { + "epoch": 0.6471323427301764, + "grad_norm": 3.1454626127292244, + "learning_rate": 2.9253429202103683e-06, + "loss": 0.398, + "step": 9320 + }, + { + "epoch": 0.6472017775308985, + "grad_norm": 3.613941857386303, + "learning_rate": 2.9243198548781944e-06, + "loss": 0.2997, + "step": 9321 + }, + { + "epoch": 0.6472712123316207, + "grad_norm": 5.476086439522293, + "learning_rate": 2.9232968945315754e-06, + "loss": 0.6228, + "step": 9322 + }, + { + "epoch": 0.6473406471323427, + "grad_norm": 5.174463313074087, + "learning_rate": 2.9222740392222482e-06, + "loss": 0.5212, + "step": 9323 + }, + { + "epoch": 0.6474100819330648, + "grad_norm": 3.899216200689656, + "learning_rate": 2.921251289001948e-06, + "loss": 0.3755, + "step": 9324 + }, + { + "epoch": 0.647479516733787, + "grad_norm": 3.6275258758170517, + "learning_rate": 2.920228643922405e-06, + "loss": 0.4094, + "step": 9325 + }, + { + "epoch": 0.6475489515345091, + "grad_norm": 3.6260691164197483, + "learning_rate": 2.919206104035348e-06, + "loss": 0.3234, + "step": 9326 + }, + { + "epoch": 0.6476183863352312, + "grad_norm": 3.881638948091992, + "learning_rate": 2.918183669392487e-06, + "loss": 0.3377, + "step": 9327 + }, + { + "epoch": 0.6476878211359534, + "grad_norm": 3.9760436370029497, + "learning_rate": 2.917161340045542e-06, + "loss": 0.4103, + "step": 9328 + }, + { + "epoch": 0.6477572559366754, + "grad_norm": 4.147155756551901, + "learning_rate": 2.9161391160462217e-06, + "loss": 0.4818, + "step": 9329 + }, + { + "epoch": 0.6478266907373976, + "grad_norm": 3.884510537932424, + "learning_rate": 2.915116997446223e-06, + "loss": 0.3239, + "step": 9330 + }, + { + "epoch": 0.6478961255381197, + "grad_norm": 5.105626892689658, + "learning_rate": 2.9140949842972486e-06, + "loss": 0.456, + "step": 9331 + }, + { + "epoch": 0.6479655603388418, + "grad_norm": 2.6109505187509248, + "learning_rate": 2.913073076650991e-06, + "loss": 0.1872, + "step": 9332 + }, + { + "epoch": 0.648034995139564, + "grad_norm": 5.67933043200784, + "learning_rate": 2.9120512745591346e-06, + "loss": 0.5114, + "step": 9333 + }, + { + "epoch": 0.648104429940286, + "grad_norm": 3.66553518854289, + "learning_rate": 2.911029578073362e-06, + "loss": 0.4519, + "step": 9334 + }, + { + "epoch": 0.6481738647410082, + "grad_norm": 4.354977181623627, + "learning_rate": 2.9100079872453502e-06, + "loss": 0.5249, + "step": 9335 + }, + { + "epoch": 0.6482432995417303, + "grad_norm": 4.56304854028624, + "learning_rate": 2.9089865021267695e-06, + "loss": 0.2835, + "step": 9336 + }, + { + "epoch": 0.6483127343424524, + "grad_norm": 3.8785521605118056, + "learning_rate": 2.9079651227692845e-06, + "loss": 0.4648, + "step": 9337 + }, + { + "epoch": 0.6483821691431746, + "grad_norm": 4.039012143331419, + "learning_rate": 2.9069438492245567e-06, + "loss": 0.3608, + "step": 9338 + }, + { + "epoch": 0.6484516039438967, + "grad_norm": 3.394670340118899, + "learning_rate": 2.9059226815442386e-06, + "loss": 0.3325, + "step": 9339 + }, + { + "epoch": 0.6485210387446187, + "grad_norm": 3.898850774297787, + "learning_rate": 2.9049016197799856e-06, + "loss": 0.2918, + "step": 9340 + }, + { + "epoch": 0.6485904735453409, + "grad_norm": 5.3741973194503085, + "learning_rate": 2.903880663983436e-06, + "loss": 0.3897, + "step": 9341 + }, + { + "epoch": 0.648659908346063, + "grad_norm": 4.331797937415311, + "learning_rate": 2.9028598142062283e-06, + "loss": 0.3659, + "step": 9342 + }, + { + "epoch": 0.6487293431467852, + "grad_norm": 5.110900150942827, + "learning_rate": 2.9018390705000022e-06, + "loss": 0.7566, + "step": 9343 + }, + { + "epoch": 0.6487987779475073, + "grad_norm": 3.458513341315482, + "learning_rate": 2.900818432916378e-06, + "loss": 0.3842, + "step": 9344 + }, + { + "epoch": 0.6488682127482294, + "grad_norm": 4.582739325769734, + "learning_rate": 2.899797901506984e-06, + "loss": 0.5856, + "step": 9345 + }, + { + "epoch": 0.6489376475489516, + "grad_norm": 4.0171285482929235, + "learning_rate": 2.8987774763234354e-06, + "loss": 0.5015, + "step": 9346 + }, + { + "epoch": 0.6490070823496736, + "grad_norm": 3.3826203749936776, + "learning_rate": 2.897757157417344e-06, + "loss": 0.3244, + "step": 9347 + }, + { + "epoch": 0.6490765171503958, + "grad_norm": 3.0735300449725087, + "learning_rate": 2.896736944840317e-06, + "loss": 0.2157, + "step": 9348 + }, + { + "epoch": 0.6491459519511179, + "grad_norm": 3.3004858462260085, + "learning_rate": 2.895716838643956e-06, + "loss": 0.3785, + "step": 9349 + }, + { + "epoch": 0.64921538675184, + "grad_norm": 3.8865975745398527, + "learning_rate": 2.8946968388798557e-06, + "loss": 0.4063, + "step": 9350 + }, + { + "epoch": 0.6492848215525622, + "grad_norm": 3.94926838100648, + "learning_rate": 2.8936769455996066e-06, + "loss": 0.4054, + "step": 9351 + }, + { + "epoch": 0.6493542563532843, + "grad_norm": 4.741055921822281, + "learning_rate": 2.892657158854795e-06, + "loss": 0.5535, + "step": 9352 + }, + { + "epoch": 0.6494236911540063, + "grad_norm": 3.332688508916348, + "learning_rate": 2.891637478696998e-06, + "loss": 0.3072, + "step": 9353 + }, + { + "epoch": 0.6494931259547285, + "grad_norm": 2.917056835295274, + "learning_rate": 2.8906179051777926e-06, + "loss": 0.3087, + "step": 9354 + }, + { + "epoch": 0.6495625607554506, + "grad_norm": 3.5053490858694802, + "learning_rate": 2.8895984383487475e-06, + "loss": 0.387, + "step": 9355 + }, + { + "epoch": 0.6496319955561728, + "grad_norm": 3.805396138769203, + "learning_rate": 2.888579078261422e-06, + "loss": 0.3763, + "step": 9356 + }, + { + "epoch": 0.6497014303568949, + "grad_norm": 3.955696978857148, + "learning_rate": 2.88755982496738e-06, + "loss": 0.3298, + "step": 9357 + }, + { + "epoch": 0.649770865157617, + "grad_norm": 5.121203581236098, + "learning_rate": 2.886540678518174e-06, + "loss": 0.8178, + "step": 9358 + }, + { + "epoch": 0.6498402999583391, + "grad_norm": 3.420584347673197, + "learning_rate": 2.885521638965344e-06, + "loss": 0.2548, + "step": 9359 + }, + { + "epoch": 0.6499097347590612, + "grad_norm": 4.166473809225999, + "learning_rate": 2.884502706360439e-06, + "loss": 0.4677, + "step": 9360 + }, + { + "epoch": 0.6499791695597834, + "grad_norm": 3.128657149499682, + "learning_rate": 2.883483880754996e-06, + "loss": 0.3595, + "step": 9361 + }, + { + "epoch": 0.6500486043605055, + "grad_norm": 2.308489802678098, + "learning_rate": 2.8824651622005397e-06, + "loss": 0.0821, + "step": 9362 + }, + { + "epoch": 0.6501180391612276, + "grad_norm": 4.257329271348108, + "learning_rate": 2.8814465507486e-06, + "loss": 0.4832, + "step": 9363 + }, + { + "epoch": 0.6501874739619498, + "grad_norm": 3.298318149970269, + "learning_rate": 2.880428046450697e-06, + "loss": 0.3627, + "step": 9364 + }, + { + "epoch": 0.6502569087626718, + "grad_norm": 3.051162484634395, + "learning_rate": 2.879409649358345e-06, + "loss": 0.2751, + "step": 9365 + }, + { + "epoch": 0.650326343563394, + "grad_norm": 3.86974315888777, + "learning_rate": 2.8783913595230534e-06, + "loss": 0.3285, + "step": 9366 + }, + { + "epoch": 0.6503957783641161, + "grad_norm": 5.004707192382065, + "learning_rate": 2.8773731769963258e-06, + "loss": 0.526, + "step": 9367 + }, + { + "epoch": 0.6504652131648382, + "grad_norm": 3.2439490702939455, + "learning_rate": 2.8763551018296613e-06, + "loss": 0.2709, + "step": 9368 + }, + { + "epoch": 0.6505346479655604, + "grad_norm": 3.3770258010722896, + "learning_rate": 2.875337134074553e-06, + "loss": 0.403, + "step": 9369 + }, + { + "epoch": 0.6506040827662825, + "grad_norm": 4.6734116230833385, + "learning_rate": 2.874319273782488e-06, + "loss": 0.3776, + "step": 9370 + }, + { + "epoch": 0.6506735175670045, + "grad_norm": 5.937027780975314, + "learning_rate": 2.8733015210049462e-06, + "loss": 0.743, + "step": 9371 + }, + { + "epoch": 0.6507429523677267, + "grad_norm": 3.737889984245252, + "learning_rate": 2.872283875793411e-06, + "loss": 0.4344, + "step": 9372 + }, + { + "epoch": 0.6508123871684488, + "grad_norm": 4.634185624799447, + "learning_rate": 2.871266338199348e-06, + "loss": 0.5887, + "step": 9373 + }, + { + "epoch": 0.650881821969171, + "grad_norm": 4.940839812456883, + "learning_rate": 2.870248908274223e-06, + "loss": 0.4494, + "step": 9374 + }, + { + "epoch": 0.6509512567698931, + "grad_norm": 7.047144938774491, + "learning_rate": 2.8692315860695e-06, + "loss": 0.8221, + "step": 9375 + }, + { + "epoch": 0.6510206915706152, + "grad_norm": 3.4407492783022375, + "learning_rate": 2.868214371636632e-06, + "loss": 0.3819, + "step": 9376 + }, + { + "epoch": 0.6510901263713373, + "grad_norm": 3.0803540924676724, + "learning_rate": 2.8671972650270695e-06, + "loss": 0.3629, + "step": 9377 + }, + { + "epoch": 0.6511595611720594, + "grad_norm": 4.978125860415443, + "learning_rate": 2.8661802662922556e-06, + "loss": 0.6539, + "step": 9378 + }, + { + "epoch": 0.6512289959727816, + "grad_norm": 4.502976219657025, + "learning_rate": 2.865163375483629e-06, + "loss": 0.505, + "step": 9379 + }, + { + "epoch": 0.6512984307735037, + "grad_norm": 3.9076168663370994, + "learning_rate": 2.8641465926526235e-06, + "loss": 0.2995, + "step": 9380 + }, + { + "epoch": 0.6513678655742258, + "grad_norm": 3.752686275580926, + "learning_rate": 2.8631299178506656e-06, + "loss": 0.4979, + "step": 9381 + }, + { + "epoch": 0.651437300374948, + "grad_norm": 3.2833461122910363, + "learning_rate": 2.8621133511291792e-06, + "loss": 0.2101, + "step": 9382 + }, + { + "epoch": 0.65150673517567, + "grad_norm": 4.614988181049766, + "learning_rate": 2.8610968925395805e-06, + "loss": 0.6809, + "step": 9383 + }, + { + "epoch": 0.6515761699763921, + "grad_norm": 3.271682433424328, + "learning_rate": 2.86008054213328e-06, + "loss": 0.2899, + "step": 9384 + }, + { + "epoch": 0.6516456047771143, + "grad_norm": 4.42284071177736, + "learning_rate": 2.8590642999616823e-06, + "loss": 0.5159, + "step": 9385 + }, + { + "epoch": 0.6517150395778364, + "grad_norm": 5.7427574701124735, + "learning_rate": 2.8580481660761944e-06, + "loss": 0.7401, + "step": 9386 + }, + { + "epoch": 0.6517844743785586, + "grad_norm": 4.245649908746984, + "learning_rate": 2.8570321405282043e-06, + "loss": 0.4425, + "step": 9387 + }, + { + "epoch": 0.6518539091792807, + "grad_norm": 3.2752949081075173, + "learning_rate": 2.856016223369101e-06, + "loss": 0.3203, + "step": 9388 + }, + { + "epoch": 0.6519233439800027, + "grad_norm": 3.6892365387933337, + "learning_rate": 2.855000414650273e-06, + "loss": 0.2599, + "step": 9389 + }, + { + "epoch": 0.6519927787807249, + "grad_norm": 3.3998215643764187, + "learning_rate": 2.8539847144230992e-06, + "loss": 0.4094, + "step": 9390 + }, + { + "epoch": 0.652062213581447, + "grad_norm": 4.155136311122048, + "learning_rate": 2.852969122738946e-06, + "loss": 0.5109, + "step": 9391 + }, + { + "epoch": 0.6521316483821692, + "grad_norm": 3.1716562335203293, + "learning_rate": 2.8519536396491877e-06, + "loss": 0.313, + "step": 9392 + }, + { + "epoch": 0.6522010831828913, + "grad_norm": 4.112345617261162, + "learning_rate": 2.8509382652051843e-06, + "loss": 0.3294, + "step": 9393 + }, + { + "epoch": 0.6522705179836134, + "grad_norm": 4.065197620457973, + "learning_rate": 2.849922999458289e-06, + "loss": 0.4338, + "step": 9394 + }, + { + "epoch": 0.6523399527843355, + "grad_norm": 2.8851051699457133, + "learning_rate": 2.8489078424598565e-06, + "loss": 0.4062, + "step": 9395 + }, + { + "epoch": 0.6524093875850576, + "grad_norm": 4.1469101304846445, + "learning_rate": 2.847892794261231e-06, + "loss": 0.3137, + "step": 9396 + }, + { + "epoch": 0.6524788223857797, + "grad_norm": 6.404058408765919, + "learning_rate": 2.846877854913753e-06, + "loss": 0.689, + "step": 9397 + }, + { + "epoch": 0.6525482571865019, + "grad_norm": 4.072306161301421, + "learning_rate": 2.845863024468757e-06, + "loss": 0.4959, + "step": 9398 + }, + { + "epoch": 0.652617691987224, + "grad_norm": 4.637139648833932, + "learning_rate": 2.844848302977572e-06, + "loss": 0.5147, + "step": 9399 + }, + { + "epoch": 0.6526871267879462, + "grad_norm": 3.8949063037172493, + "learning_rate": 2.8438336904915186e-06, + "loss": 0.355, + "step": 9400 + }, + { + "epoch": 0.6527565615886682, + "grad_norm": 3.157622893164029, + "learning_rate": 2.8428191870619214e-06, + "loss": 0.3536, + "step": 9401 + }, + { + "epoch": 0.6528259963893903, + "grad_norm": 3.217229612491774, + "learning_rate": 2.841804792740086e-06, + "loss": 0.3362, + "step": 9402 + }, + { + "epoch": 0.6528954311901125, + "grad_norm": 4.614250766195664, + "learning_rate": 2.840790507577321e-06, + "loss": 0.4752, + "step": 9403 + }, + { + "epoch": 0.6529648659908346, + "grad_norm": 4.654944607537033, + "learning_rate": 2.8397763316249326e-06, + "loss": 0.6321, + "step": 9404 + }, + { + "epoch": 0.6530343007915568, + "grad_norm": 9.08052888358467, + "learning_rate": 2.8387622649342085e-06, + "loss": 0.4345, + "step": 9405 + }, + { + "epoch": 0.6531037355922789, + "grad_norm": 3.7585305374610862, + "learning_rate": 2.837748307556446e-06, + "loss": 0.5195, + "step": 9406 + }, + { + "epoch": 0.6531731703930009, + "grad_norm": 4.222379292556262, + "learning_rate": 2.836734459542927e-06, + "loss": 0.3009, + "step": 9407 + }, + { + "epoch": 0.6532426051937231, + "grad_norm": 3.3073759503276037, + "learning_rate": 2.8357207209449313e-06, + "loss": 0.4324, + "step": 9408 + }, + { + "epoch": 0.6533120399944452, + "grad_norm": 4.3155187687353385, + "learning_rate": 2.834707091813732e-06, + "loss": 0.3764, + "step": 9409 + }, + { + "epoch": 0.6533814747951673, + "grad_norm": 4.510995642625837, + "learning_rate": 2.8336935722005978e-06, + "loss": 0.5204, + "step": 9410 + }, + { + "epoch": 0.6534509095958895, + "grad_norm": 4.415632176963448, + "learning_rate": 2.8326801621567916e-06, + "loss": 0.3294, + "step": 9411 + }, + { + "epoch": 0.6535203443966116, + "grad_norm": 5.107058864692759, + "learning_rate": 2.8316668617335707e-06, + "loss": 0.604, + "step": 9412 + }, + { + "epoch": 0.6535897791973337, + "grad_norm": 3.500325629384166, + "learning_rate": 2.8306536709821857e-06, + "loss": 0.2805, + "step": 9413 + }, + { + "epoch": 0.6536592139980558, + "grad_norm": 3.939608135289932, + "learning_rate": 2.8296405899538816e-06, + "loss": 0.3309, + "step": 9414 + }, + { + "epoch": 0.6537286487987779, + "grad_norm": 3.371634977330208, + "learning_rate": 2.8286276186999045e-06, + "loss": 0.3919, + "step": 9415 + }, + { + "epoch": 0.6537980835995001, + "grad_norm": 4.065187481058551, + "learning_rate": 2.8276147572714837e-06, + "loss": 0.4225, + "step": 9416 + }, + { + "epoch": 0.6538675184002222, + "grad_norm": 4.5360742353141745, + "learning_rate": 2.8266020057198473e-06, + "loss": 0.5915, + "step": 9417 + }, + { + "epoch": 0.6539369532009444, + "grad_norm": 3.961525376429098, + "learning_rate": 2.8255893640962272e-06, + "loss": 0.4723, + "step": 9418 + }, + { + "epoch": 0.6540063880016664, + "grad_norm": 4.028084661502884, + "learning_rate": 2.8245768324518343e-06, + "loss": 0.4204, + "step": 9419 + }, + { + "epoch": 0.6540758228023885, + "grad_norm": 4.390266134362424, + "learning_rate": 2.8235644108378814e-06, + "loss": 0.4997, + "step": 9420 + }, + { + "epoch": 0.6541452576031107, + "grad_norm": 3.196205460919116, + "learning_rate": 2.822552099305579e-06, + "loss": 0.3422, + "step": 9421 + }, + { + "epoch": 0.6542146924038328, + "grad_norm": 4.317644580182893, + "learning_rate": 2.8215398979061302e-06, + "loss": 0.6501, + "step": 9422 + }, + { + "epoch": 0.654284127204555, + "grad_norm": 4.746119261789729, + "learning_rate": 2.8205278066907242e-06, + "loss": 0.7082, + "step": 9423 + }, + { + "epoch": 0.6543535620052771, + "grad_norm": 4.339973239398779, + "learning_rate": 2.8195158257105577e-06, + "loss": 0.5299, + "step": 9424 + }, + { + "epoch": 0.6544229968059991, + "grad_norm": 3.9512468422616496, + "learning_rate": 2.818503955016813e-06, + "loss": 0.5035, + "step": 9425 + }, + { + "epoch": 0.6544924316067213, + "grad_norm": 3.2062485142961266, + "learning_rate": 2.8174921946606704e-06, + "loss": 0.329, + "step": 9426 + }, + { + "epoch": 0.6545618664074434, + "grad_norm": 5.023163478942884, + "learning_rate": 2.8164805446933018e-06, + "loss": 0.466, + "step": 9427 + }, + { + "epoch": 0.6546313012081655, + "grad_norm": 4.065959456524006, + "learning_rate": 2.815469005165877e-06, + "loss": 0.5718, + "step": 9428 + }, + { + "epoch": 0.6547007360088877, + "grad_norm": 4.077856629595067, + "learning_rate": 2.8144575761295585e-06, + "loss": 0.4269, + "step": 9429 + }, + { + "epoch": 0.6547701708096098, + "grad_norm": 3.8468307093769765, + "learning_rate": 2.813446257635502e-06, + "loss": 0.343, + "step": 9430 + }, + { + "epoch": 0.6548396056103319, + "grad_norm": 3.878269166011031, + "learning_rate": 2.8124350497348595e-06, + "loss": 0.4979, + "step": 9431 + }, + { + "epoch": 0.654909040411054, + "grad_norm": 3.9485188757479985, + "learning_rate": 2.8114239524787756e-06, + "loss": 0.5487, + "step": 9432 + }, + { + "epoch": 0.6549784752117761, + "grad_norm": 3.691865044168531, + "learning_rate": 2.8104129659183943e-06, + "loss": 0.2353, + "step": 9433 + }, + { + "epoch": 0.6550479100124983, + "grad_norm": 3.87956581474772, + "learning_rate": 2.809402090104845e-06, + "loss": 0.585, + "step": 9434 + }, + { + "epoch": 0.6551173448132204, + "grad_norm": 3.7261727767842414, + "learning_rate": 2.80839132508926e-06, + "loss": 0.5071, + "step": 9435 + }, + { + "epoch": 0.6551867796139426, + "grad_norm": 4.372315241923775, + "learning_rate": 2.807380670922765e-06, + "loss": 0.6274, + "step": 9436 + }, + { + "epoch": 0.6552562144146646, + "grad_norm": 3.3406027324353476, + "learning_rate": 2.8063701276564693e-06, + "loss": 0.1662, + "step": 9437 + }, + { + "epoch": 0.6553256492153867, + "grad_norm": 3.4589709672516737, + "learning_rate": 2.8053596953414926e-06, + "loss": 0.434, + "step": 9438 + }, + { + "epoch": 0.6553950840161089, + "grad_norm": 3.4633096284696436, + "learning_rate": 2.804349374028939e-06, + "loss": 0.3813, + "step": 9439 + }, + { + "epoch": 0.655464518816831, + "grad_norm": 4.795957499113184, + "learning_rate": 2.8033391637699104e-06, + "loss": 0.7162, + "step": 9440 + }, + { + "epoch": 0.6555339536175531, + "grad_norm": 3.1346465241284958, + "learning_rate": 2.8023290646155e-06, + "loss": 0.3977, + "step": 9441 + }, + { + "epoch": 0.6556033884182753, + "grad_norm": 4.0510520458471735, + "learning_rate": 2.8013190766167996e-06, + "loss": 0.5197, + "step": 9442 + }, + { + "epoch": 0.6556728232189973, + "grad_norm": 3.1486789912602884, + "learning_rate": 2.800309199824892e-06, + "loss": 0.2936, + "step": 9443 + }, + { + "epoch": 0.6557422580197195, + "grad_norm": 2.854366861333353, + "learning_rate": 2.7992994342908553e-06, + "loss": 0.3496, + "step": 9444 + }, + { + "epoch": 0.6558116928204416, + "grad_norm": 5.911400212759046, + "learning_rate": 2.798289780065764e-06, + "loss": 0.7833, + "step": 9445 + }, + { + "epoch": 0.6558811276211637, + "grad_norm": 4.92580756945385, + "learning_rate": 2.7972802372006817e-06, + "loss": 0.6177, + "step": 9446 + }, + { + "epoch": 0.6559505624218859, + "grad_norm": 2.1508113209527675, + "learning_rate": 2.796270805746677e-06, + "loss": 0.1657, + "step": 9447 + }, + { + "epoch": 0.656019997222608, + "grad_norm": 3.391681722784454, + "learning_rate": 2.7952614857547988e-06, + "loss": 0.3564, + "step": 9448 + }, + { + "epoch": 0.6560894320233301, + "grad_norm": 4.561336486198823, + "learning_rate": 2.7942522772760972e-06, + "loss": 0.5534, + "step": 9449 + }, + { + "epoch": 0.6561588668240522, + "grad_norm": 3.5379538366712118, + "learning_rate": 2.7932431803616222e-06, + "loss": 0.4115, + "step": 9450 + }, + { + "epoch": 0.6562283016247743, + "grad_norm": 4.173720774769059, + "learning_rate": 2.792234195062412e-06, + "loss": 0.591, + "step": 9451 + }, + { + "epoch": 0.6562977364254965, + "grad_norm": 4.828417939356, + "learning_rate": 2.791225321429494e-06, + "loss": 0.6814, + "step": 9452 + }, + { + "epoch": 0.6563671712262186, + "grad_norm": 4.30530759688471, + "learning_rate": 2.790216559513902e-06, + "loss": 0.4877, + "step": 9453 + }, + { + "epoch": 0.6564366060269406, + "grad_norm": 4.233169351653628, + "learning_rate": 2.789207909366657e-06, + "loss": 0.4658, + "step": 9454 + }, + { + "epoch": 0.6565060408276628, + "grad_norm": 3.4197145306518486, + "learning_rate": 2.788199371038771e-06, + "loss": 0.4003, + "step": 9455 + }, + { + "epoch": 0.6565754756283849, + "grad_norm": 3.6696890831276456, + "learning_rate": 2.78719094458126e-06, + "loss": 0.3445, + "step": 9456 + }, + { + "epoch": 0.6566449104291071, + "grad_norm": 3.6881360678497104, + "learning_rate": 2.7861826300451265e-06, + "loss": 0.3285, + "step": 9457 + }, + { + "epoch": 0.6567143452298292, + "grad_norm": 3.8447697234504146, + "learning_rate": 2.7851744274813715e-06, + "loss": 0.6384, + "step": 9458 + }, + { + "epoch": 0.6567837800305513, + "grad_norm": 4.010011938636868, + "learning_rate": 2.7841663369409865e-06, + "loss": 0.5725, + "step": 9459 + }, + { + "epoch": 0.6568532148312735, + "grad_norm": 3.2609916663868344, + "learning_rate": 2.7831583584749617e-06, + "loss": 0.4202, + "step": 9460 + }, + { + "epoch": 0.6569226496319955, + "grad_norm": 4.986409902104478, + "learning_rate": 2.782150492134278e-06, + "loss": 0.3977, + "step": 9461 + }, + { + "epoch": 0.6569920844327177, + "grad_norm": 3.5874794718612164, + "learning_rate": 2.781142737969913e-06, + "loss": 0.5873, + "step": 9462 + }, + { + "epoch": 0.6570615192334398, + "grad_norm": 5.393404136180233, + "learning_rate": 2.780135096032838e-06, + "loss": 0.5011, + "step": 9463 + }, + { + "epoch": 0.6571309540341619, + "grad_norm": 5.613559341184744, + "learning_rate": 2.779127566374015e-06, + "loss": 0.5624, + "step": 9464 + }, + { + "epoch": 0.6572003888348841, + "grad_norm": 4.324332084541916, + "learning_rate": 2.778120149044411e-06, + "loss": 0.372, + "step": 9465 + }, + { + "epoch": 0.6572698236356062, + "grad_norm": 3.82158140877189, + "learning_rate": 2.777112844094972e-06, + "loss": 0.2801, + "step": 9466 + }, + { + "epoch": 0.6573392584363282, + "grad_norm": 4.280744272105727, + "learning_rate": 2.7761056515766514e-06, + "loss": 0.535, + "step": 9467 + }, + { + "epoch": 0.6574086932370504, + "grad_norm": 3.034637235214766, + "learning_rate": 2.775098571540393e-06, + "loss": 0.2876, + "step": 9468 + }, + { + "epoch": 0.6574781280377725, + "grad_norm": 3.8984467380337797, + "learning_rate": 2.774091604037127e-06, + "loss": 0.3841, + "step": 9469 + }, + { + "epoch": 0.6575475628384947, + "grad_norm": 3.612568467326305, + "learning_rate": 2.7730847491177903e-06, + "loss": 0.3854, + "step": 9470 + }, + { + "epoch": 0.6576169976392168, + "grad_norm": 2.4300020398700846, + "learning_rate": 2.7720780068333074e-06, + "loss": 0.2166, + "step": 9471 + }, + { + "epoch": 0.6576864324399389, + "grad_norm": 3.3605197919468237, + "learning_rate": 2.7710713772345988e-06, + "loss": 0.4421, + "step": 9472 + }, + { + "epoch": 0.657755867240661, + "grad_norm": 4.574376599185595, + "learning_rate": 2.7700648603725767e-06, + "loss": 0.3837, + "step": 9473 + }, + { + "epoch": 0.6578253020413831, + "grad_norm": 3.5178147516700258, + "learning_rate": 2.7690584562981505e-06, + "loss": 0.3384, + "step": 9474 + }, + { + "epoch": 0.6578947368421053, + "grad_norm": 3.594964246235022, + "learning_rate": 2.7680521650622216e-06, + "loss": 0.4173, + "step": 9475 + }, + { + "epoch": 0.6579641716428274, + "grad_norm": 4.560789712788866, + "learning_rate": 2.7670459867156925e-06, + "loss": 0.4824, + "step": 9476 + }, + { + "epoch": 0.6580336064435495, + "grad_norm": 4.260460039277188, + "learning_rate": 2.766039921309448e-06, + "loss": 0.5662, + "step": 9477 + }, + { + "epoch": 0.6581030412442717, + "grad_norm": 4.351240083279505, + "learning_rate": 2.765033968894375e-06, + "loss": 0.4114, + "step": 9478 + }, + { + "epoch": 0.6581724760449937, + "grad_norm": 4.273458786860868, + "learning_rate": 2.764028129521359e-06, + "loss": 0.6259, + "step": 9479 + }, + { + "epoch": 0.6582419108457159, + "grad_norm": 3.1936253346742953, + "learning_rate": 2.7630224032412677e-06, + "loss": 0.2226, + "step": 9480 + }, + { + "epoch": 0.658311345646438, + "grad_norm": 4.1660025582388585, + "learning_rate": 2.76201679010497e-06, + "loss": 0.573, + "step": 9481 + }, + { + "epoch": 0.6583807804471601, + "grad_norm": 4.245996062120205, + "learning_rate": 2.7610112901633325e-06, + "loss": 0.296, + "step": 9482 + }, + { + "epoch": 0.6584502152478823, + "grad_norm": 4.139649590303785, + "learning_rate": 2.760005903467213e-06, + "loss": 0.5474, + "step": 9483 + }, + { + "epoch": 0.6585196500486044, + "grad_norm": 4.1500634222347506, + "learning_rate": 2.7590006300674553e-06, + "loss": 0.5, + "step": 9484 + }, + { + "epoch": 0.6585890848493264, + "grad_norm": 3.1030816063227964, + "learning_rate": 2.7579954700149124e-06, + "loss": 0.3219, + "step": 9485 + }, + { + "epoch": 0.6586585196500486, + "grad_norm": 2.388486828059138, + "learning_rate": 2.7569904233604215e-06, + "loss": 0.1244, + "step": 9486 + }, + { + "epoch": 0.6587279544507707, + "grad_norm": 3.2980890652964208, + "learning_rate": 2.7559854901548165e-06, + "loss": 0.3751, + "step": 9487 + }, + { + "epoch": 0.6587973892514929, + "grad_norm": 4.057768517231572, + "learning_rate": 2.754980670448927e-06, + "loss": 0.3973, + "step": 9488 + }, + { + "epoch": 0.658866824052215, + "grad_norm": 4.586690125122526, + "learning_rate": 2.753975964293574e-06, + "loss": 0.5271, + "step": 9489 + }, + { + "epoch": 0.658936258852937, + "grad_norm": 4.714494701496468, + "learning_rate": 2.752971371739575e-06, + "loss": 0.5462, + "step": 9490 + }, + { + "epoch": 0.6590056936536592, + "grad_norm": 2.64844438023939, + "learning_rate": 2.7519668928377417e-06, + "loss": 0.2088, + "step": 9491 + }, + { + "epoch": 0.6590751284543813, + "grad_norm": 4.122080466376102, + "learning_rate": 2.75096252763888e-06, + "loss": 0.4102, + "step": 9492 + }, + { + "epoch": 0.6591445632551035, + "grad_norm": 3.4759312255850365, + "learning_rate": 2.749958276193787e-06, + "loss": 0.3943, + "step": 9493 + }, + { + "epoch": 0.6592139980558256, + "grad_norm": 2.1566262098524267, + "learning_rate": 2.748954138553259e-06, + "loss": 0.1623, + "step": 9494 + }, + { + "epoch": 0.6592834328565477, + "grad_norm": 2.810574917776052, + "learning_rate": 2.747950114768082e-06, + "loss": 0.1843, + "step": 9495 + }, + { + "epoch": 0.6593528676572699, + "grad_norm": 2.1790045492095036, + "learning_rate": 2.7469462048890418e-06, + "loss": 0.1607, + "step": 9496 + }, + { + "epoch": 0.6594223024579919, + "grad_norm": 4.632378980619174, + "learning_rate": 2.745942408966914e-06, + "loss": 0.4814, + "step": 9497 + }, + { + "epoch": 0.659491737258714, + "grad_norm": 4.199321776653104, + "learning_rate": 2.7449387270524654e-06, + "loss": 0.4418, + "step": 9498 + }, + { + "epoch": 0.6595611720594362, + "grad_norm": 3.4535412185618735, + "learning_rate": 2.7439351591964658e-06, + "loss": 0.2742, + "step": 9499 + }, + { + "epoch": 0.6596306068601583, + "grad_norm": 4.484573202722018, + "learning_rate": 2.742931705449673e-06, + "loss": 0.5249, + "step": 9500 + }, + { + "epoch": 0.6597000416608805, + "grad_norm": 3.330238970247279, + "learning_rate": 2.741928365862841e-06, + "loss": 0.3292, + "step": 9501 + }, + { + "epoch": 0.6597694764616026, + "grad_norm": 3.492060577913008, + "learning_rate": 2.7409251404867166e-06, + "loss": 0.4112, + "step": 9502 + }, + { + "epoch": 0.6598389112623246, + "grad_norm": 3.926728572281711, + "learning_rate": 2.7399220293720423e-06, + "loss": 0.3863, + "step": 9503 + }, + { + "epoch": 0.6599083460630468, + "grad_norm": 5.5201527395032555, + "learning_rate": 2.738919032569555e-06, + "loss": 0.7849, + "step": 9504 + }, + { + "epoch": 0.6599777808637689, + "grad_norm": 2.529980733017903, + "learning_rate": 2.7379161501299836e-06, + "loss": 0.1904, + "step": 9505 + }, + { + "epoch": 0.6600472156644911, + "grad_norm": 3.798546529842414, + "learning_rate": 2.736913382104054e-06, + "loss": 0.4624, + "step": 9506 + }, + { + "epoch": 0.6601166504652132, + "grad_norm": 3.850280368946491, + "learning_rate": 2.7359107285424826e-06, + "loss": 0.4397, + "step": 9507 + }, + { + "epoch": 0.6601860852659353, + "grad_norm": 4.016073474375954, + "learning_rate": 2.734908189495988e-06, + "loss": 0.4539, + "step": 9508 + }, + { + "epoch": 0.6602555200666574, + "grad_norm": 3.7905226266710943, + "learning_rate": 2.7339057650152723e-06, + "loss": 0.5917, + "step": 9509 + }, + { + "epoch": 0.6603249548673795, + "grad_norm": 3.1879243721701775, + "learning_rate": 2.732903455151037e-06, + "loss": 0.3622, + "step": 9510 + }, + { + "epoch": 0.6603943896681016, + "grad_norm": 3.850743682813295, + "learning_rate": 2.7319012599539836e-06, + "loss": 0.4112, + "step": 9511 + }, + { + "epoch": 0.6604638244688238, + "grad_norm": 2.6806593864397343, + "learning_rate": 2.730899179474796e-06, + "loss": 0.2308, + "step": 9512 + }, + { + "epoch": 0.6605332592695459, + "grad_norm": 3.947650848410535, + "learning_rate": 2.729897213764158e-06, + "loss": 0.3364, + "step": 9513 + }, + { + "epoch": 0.6606026940702681, + "grad_norm": 3.2865625183857228, + "learning_rate": 2.728895362872752e-06, + "loss": 0.3076, + "step": 9514 + }, + { + "epoch": 0.6606721288709901, + "grad_norm": 3.3879047214706217, + "learning_rate": 2.727893626851249e-06, + "loss": 0.2817, + "step": 9515 + }, + { + "epoch": 0.6607415636717122, + "grad_norm": 4.020696075693928, + "learning_rate": 2.726892005750315e-06, + "loss": 0.4665, + "step": 9516 + }, + { + "epoch": 0.6608109984724344, + "grad_norm": 3.6926961145755106, + "learning_rate": 2.7258904996206113e-06, + "loss": 0.2341, + "step": 9517 + }, + { + "epoch": 0.6608804332731565, + "grad_norm": 4.913795642498241, + "learning_rate": 2.7248891085127927e-06, + "loss": 0.5222, + "step": 9518 + }, + { + "epoch": 0.6609498680738787, + "grad_norm": 3.957949082858564, + "learning_rate": 2.7238878324775087e-06, + "loss": 0.3418, + "step": 9519 + }, + { + "epoch": 0.6610193028746008, + "grad_norm": 4.545325193675648, + "learning_rate": 2.7228866715654024e-06, + "loss": 0.6244, + "step": 9520 + }, + { + "epoch": 0.6610887376753228, + "grad_norm": 4.951857946006939, + "learning_rate": 2.721885625827112e-06, + "loss": 0.5444, + "step": 9521 + }, + { + "epoch": 0.661158172476045, + "grad_norm": 2.4536647070951934, + "learning_rate": 2.7208846953132685e-06, + "loss": 0.1821, + "step": 9522 + }, + { + "epoch": 0.6612276072767671, + "grad_norm": 3.417244302680744, + "learning_rate": 2.7198838800744974e-06, + "loss": 0.2703, + "step": 9523 + }, + { + "epoch": 0.6612970420774892, + "grad_norm": 4.459790982249659, + "learning_rate": 2.7188831801614202e-06, + "loss": 0.3464, + "step": 9524 + }, + { + "epoch": 0.6613664768782114, + "grad_norm": 4.200540792354211, + "learning_rate": 2.7178825956246503e-06, + "loss": 0.7596, + "step": 9525 + }, + { + "epoch": 0.6614359116789335, + "grad_norm": 3.2995915762971153, + "learning_rate": 2.7168821265147965e-06, + "loss": 0.2297, + "step": 9526 + }, + { + "epoch": 0.6615053464796556, + "grad_norm": 2.894082688728053, + "learning_rate": 2.7158817728824593e-06, + "loss": 0.2135, + "step": 9527 + }, + { + "epoch": 0.6615747812803777, + "grad_norm": 2.6857276171046776, + "learning_rate": 2.7148815347782392e-06, + "loss": 0.256, + "step": 9528 + }, + { + "epoch": 0.6616442160810998, + "grad_norm": 3.4579157197694914, + "learning_rate": 2.7138814122527274e-06, + "loss": 0.3779, + "step": 9529 + }, + { + "epoch": 0.661713650881822, + "grad_norm": 3.1637071704333195, + "learning_rate": 2.7128814053565027e-06, + "loss": 0.1874, + "step": 9530 + }, + { + "epoch": 0.6617830856825441, + "grad_norm": 3.310116618892332, + "learning_rate": 2.7118815141401504e-06, + "loss": 0.4021, + "step": 9531 + }, + { + "epoch": 0.6618525204832663, + "grad_norm": 4.516345195196129, + "learning_rate": 2.7108817386542417e-06, + "loss": 0.5105, + "step": 9532 + }, + { + "epoch": 0.6619219552839883, + "grad_norm": 3.473607401592932, + "learning_rate": 2.7098820789493453e-06, + "loss": 0.4443, + "step": 9533 + }, + { + "epoch": 0.6619913900847104, + "grad_norm": 3.654458863424914, + "learning_rate": 2.708882535076021e-06, + "loss": 0.4112, + "step": 9534 + }, + { + "epoch": 0.6620608248854326, + "grad_norm": 3.659929473515625, + "learning_rate": 2.7078831070848254e-06, + "loss": 0.3556, + "step": 9535 + }, + { + "epoch": 0.6621302596861547, + "grad_norm": 3.68895679562386, + "learning_rate": 2.7068837950263093e-06, + "loss": 0.3124, + "step": 9536 + }, + { + "epoch": 0.6621996944868769, + "grad_norm": 6.135996576626905, + "learning_rate": 2.7058845989510156e-06, + "loss": 1.1037, + "step": 9537 + }, + { + "epoch": 0.662269129287599, + "grad_norm": 4.050658674131581, + "learning_rate": 2.704885518909483e-06, + "loss": 0.3642, + "step": 9538 + }, + { + "epoch": 0.662338564088321, + "grad_norm": 2.7595776298732235, + "learning_rate": 2.703886554952242e-06, + "loss": 0.2577, + "step": 9539 + }, + { + "epoch": 0.6624079988890432, + "grad_norm": 4.200284803503475, + "learning_rate": 2.7028877071298243e-06, + "loss": 0.4466, + "step": 9540 + }, + { + "epoch": 0.6624774336897653, + "grad_norm": 3.4055673911198623, + "learning_rate": 2.701888975492745e-06, + "loss": 0.2973, + "step": 9541 + }, + { + "epoch": 0.6625468684904874, + "grad_norm": 3.981354654422262, + "learning_rate": 2.7008903600915194e-06, + "loss": 0.5716, + "step": 9542 + }, + { + "epoch": 0.6626163032912096, + "grad_norm": 4.178806245999192, + "learning_rate": 2.699891860976661e-06, + "loss": 0.611, + "step": 9543 + }, + { + "epoch": 0.6626857380919317, + "grad_norm": 3.38498200024003, + "learning_rate": 2.6988934781986686e-06, + "loss": 0.3335, + "step": 9544 + }, + { + "epoch": 0.6627551728926538, + "grad_norm": 5.794123213895749, + "learning_rate": 2.697895211808037e-06, + "loss": 0.6271, + "step": 9545 + }, + { + "epoch": 0.6628246076933759, + "grad_norm": 3.143598899451623, + "learning_rate": 2.6968970618552638e-06, + "loss": 0.2275, + "step": 9546 + }, + { + "epoch": 0.662894042494098, + "grad_norm": 5.123682207267904, + "learning_rate": 2.6958990283908305e-06, + "loss": 0.495, + "step": 9547 + }, + { + "epoch": 0.6629634772948202, + "grad_norm": 2.5973732475025515, + "learning_rate": 2.694901111465217e-06, + "loss": 0.2782, + "step": 9548 + }, + { + "epoch": 0.6630329120955423, + "grad_norm": 4.7580558291304165, + "learning_rate": 2.6939033111288964e-06, + "loss": 0.6355, + "step": 9549 + }, + { + "epoch": 0.6631023468962645, + "grad_norm": 4.511425975517061, + "learning_rate": 2.6929056274323373e-06, + "loss": 0.6073, + "step": 9550 + }, + { + "epoch": 0.6631717816969865, + "grad_norm": 6.531573305544805, + "learning_rate": 2.6919080604260004e-06, + "loss": 0.6995, + "step": 9551 + }, + { + "epoch": 0.6632412164977086, + "grad_norm": 4.131580711063387, + "learning_rate": 2.6909106101603434e-06, + "loss": 0.4971, + "step": 9552 + }, + { + "epoch": 0.6633106512984308, + "grad_norm": 3.3712046380750076, + "learning_rate": 2.689913276685814e-06, + "loss": 0.3827, + "step": 9553 + }, + { + "epoch": 0.6633800860991529, + "grad_norm": 3.6905076080797934, + "learning_rate": 2.688916060052856e-06, + "loss": 0.5169, + "step": 9554 + }, + { + "epoch": 0.663449520899875, + "grad_norm": 3.0672731121113967, + "learning_rate": 2.6879189603119104e-06, + "loss": 0.3544, + "step": 9555 + }, + { + "epoch": 0.6635189557005972, + "grad_norm": 3.639212225449984, + "learning_rate": 2.686921977513404e-06, + "loss": 0.3049, + "step": 9556 + }, + { + "epoch": 0.6635883905013192, + "grad_norm": 3.2271750112385975, + "learning_rate": 2.6859251117077694e-06, + "loss": 0.345, + "step": 9557 + }, + { + "epoch": 0.6636578253020414, + "grad_norm": 3.2387168975810687, + "learning_rate": 2.684928362945426e-06, + "loss": 0.389, + "step": 9558 + }, + { + "epoch": 0.6637272601027635, + "grad_norm": 3.709161272280732, + "learning_rate": 2.6839317312767823e-06, + "loss": 0.4305, + "step": 9559 + }, + { + "epoch": 0.6637966949034856, + "grad_norm": 5.069231118560854, + "learning_rate": 2.6829352167522525e-06, + "loss": 0.2784, + "step": 9560 + }, + { + "epoch": 0.6638661297042078, + "grad_norm": 4.43590595847331, + "learning_rate": 2.6819388194222396e-06, + "loss": 0.4425, + "step": 9561 + }, + { + "epoch": 0.6639355645049299, + "grad_norm": 3.832172992143812, + "learning_rate": 2.6809425393371342e-06, + "loss": 0.3517, + "step": 9562 + }, + { + "epoch": 0.664004999305652, + "grad_norm": 5.6195934213869645, + "learning_rate": 2.6799463765473333e-06, + "loss": 0.6691, + "step": 9563 + }, + { + "epoch": 0.6640744341063741, + "grad_norm": 3.12562356500426, + "learning_rate": 2.678950331103218e-06, + "loss": 0.4752, + "step": 9564 + }, + { + "epoch": 0.6641438689070962, + "grad_norm": 3.3942817384914927, + "learning_rate": 2.67795440305517e-06, + "loss": 0.2632, + "step": 9565 + }, + { + "epoch": 0.6642133037078184, + "grad_norm": 3.8551789043050384, + "learning_rate": 2.67695859245356e-06, + "loss": 0.4326, + "step": 9566 + }, + { + "epoch": 0.6642827385085405, + "grad_norm": 3.7555617970050545, + "learning_rate": 2.675962899348756e-06, + "loss": 0.3402, + "step": 9567 + }, + { + "epoch": 0.6643521733092626, + "grad_norm": 4.211523738861425, + "learning_rate": 2.674967323791119e-06, + "loss": 0.3856, + "step": 9568 + }, + { + "epoch": 0.6644216081099847, + "grad_norm": 3.4331081408378763, + "learning_rate": 2.6739718658310033e-06, + "loss": 0.2676, + "step": 9569 + }, + { + "epoch": 0.6644910429107068, + "grad_norm": 4.285693483471447, + "learning_rate": 2.672976525518759e-06, + "loss": 0.5442, + "step": 9570 + }, + { + "epoch": 0.664560477711429, + "grad_norm": 3.888445124132855, + "learning_rate": 2.671981302904727e-06, + "loss": 0.5365, + "step": 9571 + }, + { + "epoch": 0.6646299125121511, + "grad_norm": 3.8224308428310985, + "learning_rate": 2.6709861980392494e-06, + "loss": 0.6311, + "step": 9572 + }, + { + "epoch": 0.6646993473128732, + "grad_norm": 4.610168520527076, + "learning_rate": 2.6699912109726534e-06, + "loss": 0.6537, + "step": 9573 + }, + { + "epoch": 0.6647687821135954, + "grad_norm": 4.1172500613350715, + "learning_rate": 2.668996341755263e-06, + "loss": 0.2709, + "step": 9574 + }, + { + "epoch": 0.6648382169143174, + "grad_norm": 4.006935433093141, + "learning_rate": 2.6680015904374033e-06, + "loss": 0.3276, + "step": 9575 + }, + { + "epoch": 0.6649076517150396, + "grad_norm": 4.459840572863572, + "learning_rate": 2.667006957069381e-06, + "loss": 0.5642, + "step": 9576 + }, + { + "epoch": 0.6649770865157617, + "grad_norm": 3.5446073566824445, + "learning_rate": 2.6660124417015086e-06, + "loss": 0.3719, + "step": 9577 + }, + { + "epoch": 0.6650465213164838, + "grad_norm": 4.509106318707326, + "learning_rate": 2.665018044384087e-06, + "loss": 0.3725, + "step": 9578 + }, + { + "epoch": 0.665115956117206, + "grad_norm": 4.013338723972772, + "learning_rate": 2.664023765167409e-06, + "loss": 0.4884, + "step": 9579 + }, + { + "epoch": 0.665185390917928, + "grad_norm": 2.4969553263938202, + "learning_rate": 2.6630296041017655e-06, + "loss": 0.1839, + "step": 9580 + }, + { + "epoch": 0.6652548257186501, + "grad_norm": 4.54527118544182, + "learning_rate": 2.6620355612374414e-06, + "loss": 0.4285, + "step": 9581 + }, + { + "epoch": 0.6653242605193723, + "grad_norm": 2.6246937454178476, + "learning_rate": 2.661041636624712e-06, + "loss": 0.3278, + "step": 9582 + }, + { + "epoch": 0.6653936953200944, + "grad_norm": 4.13639168689652, + "learning_rate": 2.6600478303138503e-06, + "loss": 0.4605, + "step": 9583 + }, + { + "epoch": 0.6654631301208166, + "grad_norm": 6.337254886899172, + "learning_rate": 2.659054142355122e-06, + "loss": 0.8315, + "step": 9584 + }, + { + "epoch": 0.6655325649215387, + "grad_norm": 3.5660072477611666, + "learning_rate": 2.6580605727987835e-06, + "loss": 0.3029, + "step": 9585 + }, + { + "epoch": 0.6656019997222608, + "grad_norm": 3.4681466279547006, + "learning_rate": 2.657067121695095e-06, + "loss": 0.5109, + "step": 9586 + }, + { + "epoch": 0.6656714345229829, + "grad_norm": 2.6789334898486, + "learning_rate": 2.656073789094299e-06, + "loss": 0.2461, + "step": 9587 + }, + { + "epoch": 0.665740869323705, + "grad_norm": 3.164142642041622, + "learning_rate": 2.6550805750466357e-06, + "loss": 0.4248, + "step": 9588 + }, + { + "epoch": 0.6658103041244272, + "grad_norm": 4.881216234072196, + "learning_rate": 2.6540874796023464e-06, + "loss": 0.3402, + "step": 9589 + }, + { + "epoch": 0.6658797389251493, + "grad_norm": 3.701304767925541, + "learning_rate": 2.653094502811659e-06, + "loss": 0.3135, + "step": 9590 + }, + { + "epoch": 0.6659491737258714, + "grad_norm": 3.773606619990969, + "learning_rate": 2.652101644724792e-06, + "loss": 0.5722, + "step": 9591 + }, + { + "epoch": 0.6660186085265936, + "grad_norm": 4.553747967699819, + "learning_rate": 2.651108905391969e-06, + "loss": 0.4253, + "step": 9592 + }, + { + "epoch": 0.6660880433273156, + "grad_norm": 2.8010505599686746, + "learning_rate": 2.6501162848634023e-06, + "loss": 0.2124, + "step": 9593 + }, + { + "epoch": 0.6661574781280378, + "grad_norm": 3.617273965230249, + "learning_rate": 2.64912378318929e-06, + "loss": 0.2613, + "step": 9594 + }, + { + "epoch": 0.6662269129287599, + "grad_norm": 3.7457612366613136, + "learning_rate": 2.6481314004198387e-06, + "loss": 0.3692, + "step": 9595 + }, + { + "epoch": 0.666296347729482, + "grad_norm": 4.360504629816085, + "learning_rate": 2.6471391366052403e-06, + "loss": 0.6264, + "step": 9596 + }, + { + "epoch": 0.6663657825302042, + "grad_norm": 2.96776975161655, + "learning_rate": 2.646146991795682e-06, + "loss": 0.2847, + "step": 9597 + }, + { + "epoch": 0.6664352173309263, + "grad_norm": 4.132995594336135, + "learning_rate": 2.6451549660413445e-06, + "loss": 0.6963, + "step": 9598 + }, + { + "epoch": 0.6665046521316483, + "grad_norm": 3.5885931131855786, + "learning_rate": 2.6441630593924045e-06, + "loss": 0.3527, + "step": 9599 + }, + { + "epoch": 0.6665740869323705, + "grad_norm": 4.471230673697358, + "learning_rate": 2.643171271899031e-06, + "loss": 0.3338, + "step": 9600 + }, + { + "epoch": 0.6666435217330926, + "grad_norm": 3.0055029956156787, + "learning_rate": 2.6421796036113876e-06, + "loss": 0.3438, + "step": 9601 + }, + { + "epoch": 0.6667129565338148, + "grad_norm": 3.0158895005580506, + "learning_rate": 2.641188054579632e-06, + "loss": 0.3821, + "step": 9602 + }, + { + "epoch": 0.6667823913345369, + "grad_norm": 3.347053470068318, + "learning_rate": 2.6401966248539136e-06, + "loss": 0.4291, + "step": 9603 + }, + { + "epoch": 0.666851826135259, + "grad_norm": 4.23308599150836, + "learning_rate": 2.639205314484383e-06, + "loss": 0.4721, + "step": 9604 + }, + { + "epoch": 0.6669212609359811, + "grad_norm": 4.118729811873483, + "learning_rate": 2.6382141235211735e-06, + "loss": 0.4391, + "step": 9605 + }, + { + "epoch": 0.6669906957367032, + "grad_norm": 4.021447662441426, + "learning_rate": 2.637223052014422e-06, + "loss": 0.5309, + "step": 9606 + }, + { + "epoch": 0.6670601305374254, + "grad_norm": 4.211485549403837, + "learning_rate": 2.6362321000142554e-06, + "loss": 0.6249, + "step": 9607 + }, + { + "epoch": 0.6671295653381475, + "grad_norm": 2.848214270460804, + "learning_rate": 2.635241267570794e-06, + "loss": 0.2839, + "step": 9608 + }, + { + "epoch": 0.6671990001388696, + "grad_norm": 2.1904887106568744, + "learning_rate": 2.6342505547341534e-06, + "loss": 0.1716, + "step": 9609 + }, + { + "epoch": 0.6672684349395918, + "grad_norm": 4.321615045387327, + "learning_rate": 2.633259961554443e-06, + "loss": 0.4088, + "step": 9610 + }, + { + "epoch": 0.6673378697403138, + "grad_norm": 4.639402687449674, + "learning_rate": 2.632269488081765e-06, + "loss": 0.4933, + "step": 9611 + }, + { + "epoch": 0.6674073045410359, + "grad_norm": 4.074304870513707, + "learning_rate": 2.631279134366217e-06, + "loss": 0.4103, + "step": 9612 + }, + { + "epoch": 0.6674767393417581, + "grad_norm": 3.1927742734862345, + "learning_rate": 2.6302889004578908e-06, + "loss": 0.3551, + "step": 9613 + }, + { + "epoch": 0.6675461741424802, + "grad_norm": 3.858039895475399, + "learning_rate": 2.6292987864068697e-06, + "loss": 0.4343, + "step": 9614 + }, + { + "epoch": 0.6676156089432024, + "grad_norm": 3.15800783113702, + "learning_rate": 2.6283087922632346e-06, + "loss": 0.3691, + "step": 9615 + }, + { + "epoch": 0.6676850437439245, + "grad_norm": 3.500584036853311, + "learning_rate": 2.6273189180770555e-06, + "loss": 0.3729, + "step": 9616 + }, + { + "epoch": 0.6677544785446465, + "grad_norm": 2.933519674551038, + "learning_rate": 2.6263291638983997e-06, + "loss": 0.2655, + "step": 9617 + }, + { + "epoch": 0.6678239133453687, + "grad_norm": 3.9866604323647725, + "learning_rate": 2.625339529777332e-06, + "loss": 0.4231, + "step": 9618 + }, + { + "epoch": 0.6678933481460908, + "grad_norm": 3.2193799869386175, + "learning_rate": 2.6243500157639025e-06, + "loss": 0.435, + "step": 9619 + }, + { + "epoch": 0.667962782946813, + "grad_norm": 4.07473587253958, + "learning_rate": 2.6233606219081586e-06, + "loss": 0.4929, + "step": 9620 + }, + { + "epoch": 0.6680322177475351, + "grad_norm": 3.1030786731354305, + "learning_rate": 2.6223713482601474e-06, + "loss": 0.2653, + "step": 9621 + }, + { + "epoch": 0.6681016525482572, + "grad_norm": 2.445388410051266, + "learning_rate": 2.621382194869905e-06, + "loss": 0.2165, + "step": 9622 + }, + { + "epoch": 0.6681710873489793, + "grad_norm": 3.8817264424970896, + "learning_rate": 2.620393161787455e-06, + "loss": 0.3792, + "step": 9623 + }, + { + "epoch": 0.6682405221497014, + "grad_norm": 4.348645404799426, + "learning_rate": 2.6194042490628286e-06, + "loss": 0.6629, + "step": 9624 + }, + { + "epoch": 0.6683099569504235, + "grad_norm": 3.5489593555584262, + "learning_rate": 2.618415456746043e-06, + "loss": 0.2525, + "step": 9625 + }, + { + "epoch": 0.6683793917511457, + "grad_norm": 4.8643024420139, + "learning_rate": 2.6174267848871047e-06, + "loss": 0.7433, + "step": 9626 + }, + { + "epoch": 0.6684488265518678, + "grad_norm": 5.788648089949168, + "learning_rate": 2.6164382335360263e-06, + "loss": 0.7028, + "step": 9627 + }, + { + "epoch": 0.66851826135259, + "grad_norm": 2.2121620930031134, + "learning_rate": 2.6154498027428043e-06, + "loss": 0.0881, + "step": 9628 + }, + { + "epoch": 0.668587696153312, + "grad_norm": 3.2832432535295633, + "learning_rate": 2.614461492557433e-06, + "loss": 0.2979, + "step": 9629 + }, + { + "epoch": 0.6686571309540341, + "grad_norm": 3.264318757681284, + "learning_rate": 2.6134733030299e-06, + "loss": 0.4192, + "step": 9630 + }, + { + "epoch": 0.6687265657547563, + "grad_norm": 3.3952948696430987, + "learning_rate": 2.612485234210187e-06, + "loss": 0.2337, + "step": 9631 + }, + { + "epoch": 0.6687960005554784, + "grad_norm": 2.7404422565995885, + "learning_rate": 2.611497286148267e-06, + "loss": 0.2651, + "step": 9632 + }, + { + "epoch": 0.6688654353562006, + "grad_norm": 3.7309460232114215, + "learning_rate": 2.610509458894116e-06, + "loss": 0.3854, + "step": 9633 + }, + { + "epoch": 0.6689348701569227, + "grad_norm": 4.146778787354469, + "learning_rate": 2.60952175249769e-06, + "loss": 0.4416, + "step": 9634 + }, + { + "epoch": 0.6690043049576447, + "grad_norm": 4.227578610180248, + "learning_rate": 2.608534167008947e-06, + "loss": 0.4939, + "step": 9635 + }, + { + "epoch": 0.6690737397583669, + "grad_norm": 3.6741687472670566, + "learning_rate": 2.6075467024778433e-06, + "loss": 0.432, + "step": 9636 + }, + { + "epoch": 0.669143174559089, + "grad_norm": 3.698801323170315, + "learning_rate": 2.606559358954317e-06, + "loss": 0.345, + "step": 9637 + }, + { + "epoch": 0.6692126093598111, + "grad_norm": 4.346247046851423, + "learning_rate": 2.6055721364883113e-06, + "loss": 0.4974, + "step": 9638 + }, + { + "epoch": 0.6692820441605333, + "grad_norm": 4.046134477381292, + "learning_rate": 2.604585035129758e-06, + "loss": 0.4396, + "step": 9639 + }, + { + "epoch": 0.6693514789612554, + "grad_norm": 3.75597114853743, + "learning_rate": 2.603598054928582e-06, + "loss": 0.4566, + "step": 9640 + }, + { + "epoch": 0.6694209137619775, + "grad_norm": 3.5317690429154056, + "learning_rate": 2.6026111959347054e-06, + "loss": 0.2216, + "step": 9641 + }, + { + "epoch": 0.6694903485626996, + "grad_norm": 4.1842138775506, + "learning_rate": 2.601624458198042e-06, + "loss": 0.477, + "step": 9642 + }, + { + "epoch": 0.6695597833634217, + "grad_norm": 3.882019313490578, + "learning_rate": 2.6006378417684995e-06, + "loss": 0.5063, + "step": 9643 + }, + { + "epoch": 0.6696292181641439, + "grad_norm": 2.213100444316833, + "learning_rate": 2.599651346695979e-06, + "loss": 0.1193, + "step": 9644 + }, + { + "epoch": 0.669698652964866, + "grad_norm": 4.156878932701719, + "learning_rate": 2.598664973030378e-06, + "loss": 0.5239, + "step": 9645 + }, + { + "epoch": 0.6697680877655882, + "grad_norm": 4.801751722639197, + "learning_rate": 2.597678720821584e-06, + "loss": 0.3587, + "step": 9646 + }, + { + "epoch": 0.6698375225663102, + "grad_norm": 3.730166942670027, + "learning_rate": 2.5966925901194847e-06, + "loss": 0.3098, + "step": 9647 + }, + { + "epoch": 0.6699069573670323, + "grad_norm": 4.364207282402331, + "learning_rate": 2.595706580973953e-06, + "loss": 0.5057, + "step": 9648 + }, + { + "epoch": 0.6699763921677545, + "grad_norm": 3.1705210217551056, + "learning_rate": 2.5947206934348603e-06, + "loss": 0.2881, + "step": 9649 + }, + { + "epoch": 0.6700458269684766, + "grad_norm": 3.037068116227046, + "learning_rate": 2.593734927552076e-06, + "loss": 0.325, + "step": 9650 + }, + { + "epoch": 0.6701152617691988, + "grad_norm": 2.2121210042965895, + "learning_rate": 2.5927492833754553e-06, + "loss": 0.2793, + "step": 9651 + }, + { + "epoch": 0.6701846965699209, + "grad_norm": 7.347952234165531, + "learning_rate": 2.591763760954849e-06, + "loss": 0.4855, + "step": 9652 + }, + { + "epoch": 0.6702541313706429, + "grad_norm": 2.7150719836913293, + "learning_rate": 2.5907783603401092e-06, + "loss": 0.2143, + "step": 9653 + }, + { + "epoch": 0.6703235661713651, + "grad_norm": 4.019797804392938, + "learning_rate": 2.5897930815810756e-06, + "loss": 0.5557, + "step": 9654 + }, + { + "epoch": 0.6703930009720872, + "grad_norm": 3.6512079219894495, + "learning_rate": 2.5888079247275768e-06, + "loss": 0.4634, + "step": 9655 + }, + { + "epoch": 0.6704624357728093, + "grad_norm": 3.0866801980047325, + "learning_rate": 2.587822889829446e-06, + "loss": 0.3596, + "step": 9656 + }, + { + "epoch": 0.6705318705735315, + "grad_norm": 4.106324050363236, + "learning_rate": 2.5868379769365043e-06, + "loss": 0.4068, + "step": 9657 + }, + { + "epoch": 0.6706013053742536, + "grad_norm": 3.697600217139591, + "learning_rate": 2.585853186098567e-06, + "loss": 0.292, + "step": 9658 + }, + { + "epoch": 0.6706707401749757, + "grad_norm": 3.0078087472050035, + "learning_rate": 2.5848685173654444e-06, + "loss": 0.3413, + "step": 9659 + }, + { + "epoch": 0.6707401749756978, + "grad_norm": 4.131844811756808, + "learning_rate": 2.5838839707869377e-06, + "loss": 0.491, + "step": 9660 + }, + { + "epoch": 0.6708096097764199, + "grad_norm": 3.421265875111743, + "learning_rate": 2.582899546412847e-06, + "loss": 0.2461, + "step": 9661 + }, + { + "epoch": 0.6708790445771421, + "grad_norm": 3.743779563111239, + "learning_rate": 2.5819152442929616e-06, + "loss": 0.5023, + "step": 9662 + }, + { + "epoch": 0.6709484793778642, + "grad_norm": 3.309660503402367, + "learning_rate": 2.580931064477068e-06, + "loss": 0.4283, + "step": 9663 + }, + { + "epoch": 0.6710179141785864, + "grad_norm": 4.458567942553772, + "learning_rate": 2.579947007014941e-06, + "loss": 0.4283, + "step": 9664 + }, + { + "epoch": 0.6710873489793084, + "grad_norm": 3.0541000429995977, + "learning_rate": 2.5789630719563597e-06, + "loss": 0.3452, + "step": 9665 + }, + { + "epoch": 0.6711567837800305, + "grad_norm": 3.3621359357103686, + "learning_rate": 2.577979259351082e-06, + "loss": 0.2917, + "step": 9666 + }, + { + "epoch": 0.6712262185807527, + "grad_norm": 4.879415383627998, + "learning_rate": 2.5769955692488756e-06, + "loss": 0.3457, + "step": 9667 + }, + { + "epoch": 0.6712956533814748, + "grad_norm": 3.010817151475008, + "learning_rate": 2.576012001699494e-06, + "loss": 0.256, + "step": 9668 + }, + { + "epoch": 0.6713650881821969, + "grad_norm": 3.851151968221527, + "learning_rate": 2.575028556752678e-06, + "loss": 0.4603, + "step": 9669 + }, + { + "epoch": 0.6714345229829191, + "grad_norm": 3.096567400433019, + "learning_rate": 2.5740452344581748e-06, + "loss": 0.3588, + "step": 9670 + }, + { + "epoch": 0.6715039577836411, + "grad_norm": 6.727280938943735, + "learning_rate": 2.5730620348657186e-06, + "loss": 0.422, + "step": 9671 + }, + { + "epoch": 0.6715733925843633, + "grad_norm": 3.9113635387073034, + "learning_rate": 2.5720789580250396e-06, + "loss": 0.4502, + "step": 9672 + }, + { + "epoch": 0.6716428273850854, + "grad_norm": 3.5090095411586364, + "learning_rate": 2.5710960039858585e-06, + "loss": 0.5032, + "step": 9673 + }, + { + "epoch": 0.6717122621858075, + "grad_norm": 3.4556708167806307, + "learning_rate": 2.5701131727978933e-06, + "loss": 0.3501, + "step": 9674 + }, + { + "epoch": 0.6717816969865297, + "grad_norm": 4.582318093833808, + "learning_rate": 2.569130464510854e-06, + "loss": 0.6041, + "step": 9675 + }, + { + "epoch": 0.6718511317872518, + "grad_norm": 3.163866374720068, + "learning_rate": 2.5681478791744453e-06, + "loss": 0.3645, + "step": 9676 + }, + { + "epoch": 0.6719205665879739, + "grad_norm": 4.16681102675984, + "learning_rate": 2.5671654168383653e-06, + "loss": 0.4192, + "step": 9677 + }, + { + "epoch": 0.671990001388696, + "grad_norm": 4.017742423323768, + "learning_rate": 2.566183077552303e-06, + "loss": 0.3997, + "step": 9678 + }, + { + "epoch": 0.6720594361894181, + "grad_norm": 4.039512586415069, + "learning_rate": 2.5652008613659505e-06, + "loss": 0.3319, + "step": 9679 + }, + { + "epoch": 0.6721288709901403, + "grad_norm": 3.7546754633128443, + "learning_rate": 2.564218768328982e-06, + "loss": 0.4189, + "step": 9680 + }, + { + "epoch": 0.6721983057908624, + "grad_norm": 3.6258382057881313, + "learning_rate": 2.56323679849107e-06, + "loss": 0.3748, + "step": 9681 + }, + { + "epoch": 0.6722677405915845, + "grad_norm": 3.248460218715005, + "learning_rate": 2.562254951901885e-06, + "loss": 0.4327, + "step": 9682 + }, + { + "epoch": 0.6723371753923066, + "grad_norm": 4.6279859414738365, + "learning_rate": 2.5612732286110875e-06, + "loss": 0.4936, + "step": 9683 + }, + { + "epoch": 0.6724066101930287, + "grad_norm": 4.1435774187448535, + "learning_rate": 2.560291628668327e-06, + "loss": 0.4733, + "step": 9684 + }, + { + "epoch": 0.6724760449937509, + "grad_norm": 4.262201692772094, + "learning_rate": 2.559310152123257e-06, + "loss": 0.5389, + "step": 9685 + }, + { + "epoch": 0.672545479794473, + "grad_norm": 4.465493299089475, + "learning_rate": 2.558328799025518e-06, + "loss": 0.371, + "step": 9686 + }, + { + "epoch": 0.6726149145951951, + "grad_norm": 3.9016478429978343, + "learning_rate": 2.5573475694247452e-06, + "loss": 0.3965, + "step": 9687 + }, + { + "epoch": 0.6726843493959173, + "grad_norm": 3.580978263940241, + "learning_rate": 2.5563664633705687e-06, + "loss": 0.4331, + "step": 9688 + }, + { + "epoch": 0.6727537841966393, + "grad_norm": 3.639827556388489, + "learning_rate": 2.555385480912611e-06, + "loss": 0.5104, + "step": 9689 + }, + { + "epoch": 0.6728232189973615, + "grad_norm": 5.487090621809878, + "learning_rate": 2.554404622100489e-06, + "loss": 0.6626, + "step": 9690 + }, + { + "epoch": 0.6728926537980836, + "grad_norm": 4.166366805571317, + "learning_rate": 2.553423886983815e-06, + "loss": 0.3542, + "step": 9691 + }, + { + "epoch": 0.6729620885988057, + "grad_norm": 4.739370512927739, + "learning_rate": 2.552443275612191e-06, + "loss": 0.6167, + "step": 9692 + }, + { + "epoch": 0.6730315233995279, + "grad_norm": 3.9958821012261847, + "learning_rate": 2.5514627880352173e-06, + "loss": 0.3444, + "step": 9693 + }, + { + "epoch": 0.67310095820025, + "grad_norm": 4.169661328641242, + "learning_rate": 2.550482424302484e-06, + "loss": 0.2941, + "step": 9694 + }, + { + "epoch": 0.673170393000972, + "grad_norm": 4.284982421587112, + "learning_rate": 2.5495021844635792e-06, + "loss": 0.6146, + "step": 9695 + }, + { + "epoch": 0.6732398278016942, + "grad_norm": 3.816213151288866, + "learning_rate": 2.5485220685680786e-06, + "loss": 0.474, + "step": 9696 + }, + { + "epoch": 0.6733092626024163, + "grad_norm": 3.664107032515996, + "learning_rate": 2.547542076665561e-06, + "loss": 0.5443, + "step": 9697 + }, + { + "epoch": 0.6733786974031385, + "grad_norm": 3.7187744550337976, + "learning_rate": 2.546562208805587e-06, + "loss": 0.3318, + "step": 9698 + }, + { + "epoch": 0.6734481322038606, + "grad_norm": 4.812640658707071, + "learning_rate": 2.5455824650377214e-06, + "loss": 0.5023, + "step": 9699 + }, + { + "epoch": 0.6735175670045827, + "grad_norm": 4.244860495278399, + "learning_rate": 2.54460284541152e-06, + "loss": 0.6481, + "step": 9700 + }, + { + "epoch": 0.6735870018053048, + "grad_norm": 3.5020361806040787, + "learning_rate": 2.543623349976524e-06, + "loss": 0.3104, + "step": 9701 + }, + { + "epoch": 0.6736564366060269, + "grad_norm": 3.8387648081446444, + "learning_rate": 2.5426439787822816e-06, + "loss": 0.4668, + "step": 9702 + }, + { + "epoch": 0.6737258714067491, + "grad_norm": 1.8760198728090534, + "learning_rate": 2.541664731878326e-06, + "loss": 0.2283, + "step": 9703 + }, + { + "epoch": 0.6737953062074712, + "grad_norm": 4.425266195572725, + "learning_rate": 2.5406856093141864e-06, + "loss": 0.4582, + "step": 9704 + }, + { + "epoch": 0.6738647410081933, + "grad_norm": 3.4715635025041403, + "learning_rate": 2.539706611139385e-06, + "loss": 0.3162, + "step": 9705 + }, + { + "epoch": 0.6739341758089155, + "grad_norm": 4.194945771853427, + "learning_rate": 2.53872773740344e-06, + "loss": 0.295, + "step": 9706 + }, + { + "epoch": 0.6740036106096375, + "grad_norm": 5.5276811629922955, + "learning_rate": 2.5377489881558593e-06, + "loss": 0.5347, + "step": 9707 + }, + { + "epoch": 0.6740730454103596, + "grad_norm": 3.241952589415352, + "learning_rate": 2.5367703634461515e-06, + "loss": 0.3898, + "step": 9708 + }, + { + "epoch": 0.6741424802110818, + "grad_norm": 4.124360047425286, + "learning_rate": 2.53579186332381e-06, + "loss": 0.4553, + "step": 9709 + }, + { + "epoch": 0.6742119150118039, + "grad_norm": 4.988125358972835, + "learning_rate": 2.5348134878383256e-06, + "loss": 0.3358, + "step": 9710 + }, + { + "epoch": 0.6742813498125261, + "grad_norm": 7.130663917767488, + "learning_rate": 2.5338352370391893e-06, + "loss": 0.7001, + "step": 9711 + }, + { + "epoch": 0.6743507846132482, + "grad_norm": 2.794576112703203, + "learning_rate": 2.5328571109758747e-06, + "loss": 0.3411, + "step": 9712 + }, + { + "epoch": 0.6744202194139702, + "grad_norm": 4.10559869220069, + "learning_rate": 2.531879109697853e-06, + "loss": 0.3211, + "step": 9713 + }, + { + "epoch": 0.6744896542146924, + "grad_norm": 3.61868125005041, + "learning_rate": 2.530901233254595e-06, + "loss": 0.3552, + "step": 9714 + }, + { + "epoch": 0.6745590890154145, + "grad_norm": 3.9379609389537102, + "learning_rate": 2.529923481695561e-06, + "loss": 0.3296, + "step": 9715 + }, + { + "epoch": 0.6746285238161367, + "grad_norm": 3.136717494851782, + "learning_rate": 2.528945855070198e-06, + "loss": 0.286, + "step": 9716 + }, + { + "epoch": 0.6746979586168588, + "grad_norm": 4.947895257725833, + "learning_rate": 2.5279683534279596e-06, + "loss": 0.5271, + "step": 9717 + }, + { + "epoch": 0.6747673934175809, + "grad_norm": 3.620309611794032, + "learning_rate": 2.5269909768182842e-06, + "loss": 0.3837, + "step": 9718 + }, + { + "epoch": 0.674836828218303, + "grad_norm": 3.3405238655066642, + "learning_rate": 2.526013725290607e-06, + "loss": 0.3687, + "step": 9719 + }, + { + "epoch": 0.6749062630190251, + "grad_norm": 4.240929915472618, + "learning_rate": 2.5250365988943566e-06, + "loss": 0.5275, + "step": 9720 + }, + { + "epoch": 0.6749756978197473, + "grad_norm": 3.2041513441473266, + "learning_rate": 2.524059597678954e-06, + "loss": 0.3013, + "step": 9721 + }, + { + "epoch": 0.6750451326204694, + "grad_norm": 4.019398798219384, + "learning_rate": 2.5230827216938148e-06, + "loss": 0.5949, + "step": 9722 + }, + { + "epoch": 0.6751145674211915, + "grad_norm": 3.7713699646517314, + "learning_rate": 2.5221059709883496e-06, + "loss": 0.4183, + "step": 9723 + }, + { + "epoch": 0.6751840022219137, + "grad_norm": 5.395947669685341, + "learning_rate": 2.52112934561196e-06, + "loss": 0.7064, + "step": 9724 + }, + { + "epoch": 0.6752534370226357, + "grad_norm": 3.6830206742384166, + "learning_rate": 2.520152845614043e-06, + "loss": 0.5228, + "step": 9725 + }, + { + "epoch": 0.6753228718233578, + "grad_norm": 3.076269817256479, + "learning_rate": 2.5191764710439892e-06, + "loss": 0.323, + "step": 9726 + }, + { + "epoch": 0.67539230662408, + "grad_norm": 4.7961961343879596, + "learning_rate": 2.5182002219511796e-06, + "loss": 0.4945, + "step": 9727 + }, + { + "epoch": 0.6754617414248021, + "grad_norm": 3.9008483363731608, + "learning_rate": 2.517224098384997e-06, + "loss": 0.5505, + "step": 9728 + }, + { + "epoch": 0.6755311762255243, + "grad_norm": 4.448066835598669, + "learning_rate": 2.516248100394812e-06, + "loss": 0.4391, + "step": 9729 + }, + { + "epoch": 0.6756006110262464, + "grad_norm": 3.890368288206, + "learning_rate": 2.5152722280299836e-06, + "loss": 0.3821, + "step": 9730 + }, + { + "epoch": 0.6756700458269684, + "grad_norm": 3.538181630039485, + "learning_rate": 2.5142964813398763e-06, + "loss": 0.4109, + "step": 9731 + }, + { + "epoch": 0.6757394806276906, + "grad_norm": 3.670843140725206, + "learning_rate": 2.5133208603738414e-06, + "loss": 0.4277, + "step": 9732 + }, + { + "epoch": 0.6758089154284127, + "grad_norm": 3.6245782401617923, + "learning_rate": 2.5123453651812204e-06, + "loss": 0.4623, + "step": 9733 + }, + { + "epoch": 0.6758783502291349, + "grad_norm": 2.9457726935829682, + "learning_rate": 2.5113699958113575e-06, + "loss": 0.1603, + "step": 9734 + }, + { + "epoch": 0.675947785029857, + "grad_norm": 3.94014017342817, + "learning_rate": 2.510394752313584e-06, + "loss": 0.6213, + "step": 9735 + }, + { + "epoch": 0.676017219830579, + "grad_norm": 3.677023398941641, + "learning_rate": 2.509419634737227e-06, + "loss": 0.3307, + "step": 9736 + }, + { + "epoch": 0.6760866546313012, + "grad_norm": 2.336695177585533, + "learning_rate": 2.5084446431316067e-06, + "loss": 0.2083, + "step": 9737 + }, + { + "epoch": 0.6761560894320233, + "grad_norm": 7.145143989209525, + "learning_rate": 2.5074697775460365e-06, + "loss": 0.6122, + "step": 9738 + }, + { + "epoch": 0.6762255242327454, + "grad_norm": 4.451275925271477, + "learning_rate": 2.5064950380298232e-06, + "loss": 0.6391, + "step": 9739 + }, + { + "epoch": 0.6762949590334676, + "grad_norm": 4.137005283171907, + "learning_rate": 2.5055204246322728e-06, + "loss": 0.4332, + "step": 9740 + }, + { + "epoch": 0.6763643938341897, + "grad_norm": 4.700908113805155, + "learning_rate": 2.504545937402675e-06, + "loss": 0.6117, + "step": 9741 + }, + { + "epoch": 0.6764338286349119, + "grad_norm": 3.904947765494735, + "learning_rate": 2.5035715763903173e-06, + "loss": 0.3427, + "step": 9742 + }, + { + "epoch": 0.6765032634356339, + "grad_norm": 4.580013987464403, + "learning_rate": 2.5025973416444883e-06, + "loss": 0.5832, + "step": 9743 + }, + { + "epoch": 0.676572698236356, + "grad_norm": 3.269092805447098, + "learning_rate": 2.5016232332144584e-06, + "loss": 0.1876, + "step": 9744 + }, + { + "epoch": 0.6766421330370782, + "grad_norm": 3.3620760735665667, + "learning_rate": 2.500649251149496e-06, + "loss": 0.3505, + "step": 9745 + }, + { + "epoch": 0.6767115678378003, + "grad_norm": 3.502455504892584, + "learning_rate": 2.4996753954988683e-06, + "loss": 0.3747, + "step": 9746 + }, + { + "epoch": 0.6767810026385225, + "grad_norm": 2.622000938289736, + "learning_rate": 2.4987016663118295e-06, + "loss": 0.3391, + "step": 9747 + }, + { + "epoch": 0.6768504374392446, + "grad_norm": 5.026809252514797, + "learning_rate": 2.4977280636376304e-06, + "loss": 0.6436, + "step": 9748 + }, + { + "epoch": 0.6769198722399666, + "grad_norm": 4.302572484864318, + "learning_rate": 2.4967545875255137e-06, + "loss": 0.503, + "step": 9749 + }, + { + "epoch": 0.6769893070406888, + "grad_norm": 4.0992726521276674, + "learning_rate": 2.4957812380247183e-06, + "loss": 0.4639, + "step": 9750 + }, + { + "epoch": 0.6770587418414109, + "grad_norm": 3.447739014194639, + "learning_rate": 2.494808015184474e-06, + "loss": 0.4517, + "step": 9751 + }, + { + "epoch": 0.677128176642133, + "grad_norm": 4.457185391753953, + "learning_rate": 2.493834919054005e-06, + "loss": 0.3666, + "step": 9752 + }, + { + "epoch": 0.6771976114428552, + "grad_norm": 3.6152678178821627, + "learning_rate": 2.4928619496825296e-06, + "loss": 0.2474, + "step": 9753 + }, + { + "epoch": 0.6772670462435773, + "grad_norm": 3.749536840507778, + "learning_rate": 2.4918891071192603e-06, + "loss": 0.3497, + "step": 9754 + }, + { + "epoch": 0.6773364810442994, + "grad_norm": 4.070206381880925, + "learning_rate": 2.490916391413402e-06, + "loss": 0.4138, + "step": 9755 + }, + { + "epoch": 0.6774059158450215, + "grad_norm": 4.370123486426939, + "learning_rate": 2.489943802614151e-06, + "loss": 0.5095, + "step": 9756 + }, + { + "epoch": 0.6774753506457436, + "grad_norm": 3.8354352757635852, + "learning_rate": 2.488971340770706e-06, + "loss": 0.3033, + "step": 9757 + }, + { + "epoch": 0.6775447854464658, + "grad_norm": 3.1775994526404134, + "learning_rate": 2.4879990059322477e-06, + "loss": 0.2755, + "step": 9758 + }, + { + "epoch": 0.6776142202471879, + "grad_norm": 4.1702007371367795, + "learning_rate": 2.487026798147955e-06, + "loss": 0.4457, + "step": 9759 + }, + { + "epoch": 0.6776836550479101, + "grad_norm": 3.6304675652533693, + "learning_rate": 2.486054717467005e-06, + "loss": 0.291, + "step": 9760 + }, + { + "epoch": 0.6777530898486321, + "grad_norm": 2.583086833181414, + "learning_rate": 2.485082763938565e-06, + "loss": 0.1789, + "step": 9761 + }, + { + "epoch": 0.6778225246493542, + "grad_norm": 4.089371429553828, + "learning_rate": 2.484110937611789e-06, + "loss": 0.4765, + "step": 9762 + }, + { + "epoch": 0.6778919594500764, + "grad_norm": 3.518959333092312, + "learning_rate": 2.4831392385358367e-06, + "loss": 0.4935, + "step": 9763 + }, + { + "epoch": 0.6779613942507985, + "grad_norm": 3.7920916130237976, + "learning_rate": 2.4821676667598533e-06, + "loss": 0.272, + "step": 9764 + }, + { + "epoch": 0.6780308290515206, + "grad_norm": 5.675743168002094, + "learning_rate": 2.4811962223329806e-06, + "loss": 0.611, + "step": 9765 + }, + { + "epoch": 0.6781002638522428, + "grad_norm": 3.992265908866341, + "learning_rate": 2.4802249053043525e-06, + "loss": 0.4023, + "step": 9766 + }, + { + "epoch": 0.6781696986529648, + "grad_norm": 3.193885330440984, + "learning_rate": 2.4792537157230977e-06, + "loss": 0.2608, + "step": 9767 + }, + { + "epoch": 0.678239133453687, + "grad_norm": 3.473120120301395, + "learning_rate": 2.4782826536383377e-06, + "loss": 0.3732, + "step": 9768 + }, + { + "epoch": 0.6783085682544091, + "grad_norm": 4.249550672481268, + "learning_rate": 2.477311719099187e-06, + "loss": 0.497, + "step": 9769 + }, + { + "epoch": 0.6783780030551312, + "grad_norm": 5.083217894018776, + "learning_rate": 2.476340912154755e-06, + "loss": 0.7283, + "step": 9770 + }, + { + "epoch": 0.6784474378558534, + "grad_norm": 3.8054011520094146, + "learning_rate": 2.475370232854142e-06, + "loss": 0.4105, + "step": 9771 + }, + { + "epoch": 0.6785168726565755, + "grad_norm": 4.3101436049052975, + "learning_rate": 2.4743996812464497e-06, + "loss": 0.6002, + "step": 9772 + }, + { + "epoch": 0.6785863074572976, + "grad_norm": 4.183488453020607, + "learning_rate": 2.473429257380762e-06, + "loss": 0.5859, + "step": 9773 + }, + { + "epoch": 0.6786557422580197, + "grad_norm": 1.8007433083212114, + "learning_rate": 2.472458961306162e-06, + "loss": 0.0895, + "step": 9774 + }, + { + "epoch": 0.6787251770587418, + "grad_norm": 3.7364372105373915, + "learning_rate": 2.471488793071731e-06, + "loss": 0.3044, + "step": 9775 + }, + { + "epoch": 0.678794611859464, + "grad_norm": 3.227121480066868, + "learning_rate": 2.470518752726533e-06, + "loss": 0.3059, + "step": 9776 + }, + { + "epoch": 0.6788640466601861, + "grad_norm": 4.610378546644763, + "learning_rate": 2.469548840319633e-06, + "loss": 0.4545, + "step": 9777 + }, + { + "epoch": 0.6789334814609083, + "grad_norm": 2.6498224193838222, + "learning_rate": 2.468579055900091e-06, + "loss": 0.2713, + "step": 9778 + }, + { + "epoch": 0.6790029162616303, + "grad_norm": 4.08439232512555, + "learning_rate": 2.467609399516956e-06, + "loss": 0.511, + "step": 9779 + }, + { + "epoch": 0.6790723510623524, + "grad_norm": 3.9045889907063382, + "learning_rate": 2.466639871219272e-06, + "loss": 0.3825, + "step": 9780 + }, + { + "epoch": 0.6791417858630746, + "grad_norm": 4.048589629516293, + "learning_rate": 2.4656704710560762e-06, + "loss": 0.3581, + "step": 9781 + }, + { + "epoch": 0.6792112206637967, + "grad_norm": 3.906987869773705, + "learning_rate": 2.4647011990764004e-06, + "loss": 0.3823, + "step": 9782 + }, + { + "epoch": 0.6792806554645188, + "grad_norm": 3.1283142429687123, + "learning_rate": 2.4637320553292694e-06, + "loss": 0.2515, + "step": 9783 + }, + { + "epoch": 0.679350090265241, + "grad_norm": 4.673842667752665, + "learning_rate": 2.4627630398637003e-06, + "loss": 0.6072, + "step": 9784 + }, + { + "epoch": 0.679419525065963, + "grad_norm": 3.422247096702537, + "learning_rate": 2.4617941527287065e-06, + "loss": 0.3338, + "step": 9785 + }, + { + "epoch": 0.6794889598666852, + "grad_norm": 3.5404415532155498, + "learning_rate": 2.4608253939732916e-06, + "loss": 0.3626, + "step": 9786 + }, + { + "epoch": 0.6795583946674073, + "grad_norm": 4.230133652992493, + "learning_rate": 2.4598567636464555e-06, + "loss": 0.3833, + "step": 9787 + }, + { + "epoch": 0.6796278294681294, + "grad_norm": 4.2977484399390535, + "learning_rate": 2.4588882617971876e-06, + "loss": 0.5147, + "step": 9788 + }, + { + "epoch": 0.6796972642688516, + "grad_norm": 4.248234780759611, + "learning_rate": 2.457919888474478e-06, + "loss": 0.5749, + "step": 9789 + }, + { + "epoch": 0.6797666990695737, + "grad_norm": 4.120775296393542, + "learning_rate": 2.4569516437273057e-06, + "loss": 0.4236, + "step": 9790 + }, + { + "epoch": 0.6798361338702958, + "grad_norm": 3.967674741087389, + "learning_rate": 2.455983527604638e-06, + "loss": 0.5141, + "step": 9791 + }, + { + "epoch": 0.6799055686710179, + "grad_norm": 4.814784082828865, + "learning_rate": 2.4550155401554465e-06, + "loss": 0.4685, + "step": 9792 + }, + { + "epoch": 0.67997500347174, + "grad_norm": 3.4887386894046966, + "learning_rate": 2.454047681428691e-06, + "loss": 0.2946, + "step": 9793 + }, + { + "epoch": 0.6800444382724622, + "grad_norm": 3.9472123323822412, + "learning_rate": 2.4530799514733196e-06, + "loss": 0.4068, + "step": 9794 + }, + { + "epoch": 0.6801138730731843, + "grad_norm": 3.3454137791367344, + "learning_rate": 2.4521123503382833e-06, + "loss": 0.2648, + "step": 9795 + }, + { + "epoch": 0.6801833078739064, + "grad_norm": 4.723927272266753, + "learning_rate": 2.4511448780725218e-06, + "loss": 0.5581, + "step": 9796 + }, + { + "epoch": 0.6802527426746285, + "grad_norm": 3.2935673405432273, + "learning_rate": 2.4501775347249673e-06, + "loss": 0.327, + "step": 9797 + }, + { + "epoch": 0.6803221774753506, + "grad_norm": 3.851935796947837, + "learning_rate": 2.4492103203445484e-06, + "loss": 0.5686, + "step": 9798 + }, + { + "epoch": 0.6803916122760728, + "grad_norm": 5.14645595182481, + "learning_rate": 2.448243234980185e-06, + "loss": 0.6463, + "step": 9799 + }, + { + "epoch": 0.6804610470767949, + "grad_norm": 4.671569036324234, + "learning_rate": 2.4472762786807913e-06, + "loss": 0.6234, + "step": 9800 + }, + { + "epoch": 0.680530481877517, + "grad_norm": 3.3705166050616406, + "learning_rate": 2.4463094514952753e-06, + "loss": 0.3874, + "step": 9801 + }, + { + "epoch": 0.6805999166782392, + "grad_norm": 4.366585635441514, + "learning_rate": 2.4453427534725365e-06, + "loss": 0.5305, + "step": 9802 + }, + { + "epoch": 0.6806693514789612, + "grad_norm": 3.512261448837231, + "learning_rate": 2.4443761846614695e-06, + "loss": 0.2289, + "step": 9803 + }, + { + "epoch": 0.6807387862796834, + "grad_norm": 3.1531138672918604, + "learning_rate": 2.4434097451109663e-06, + "loss": 0.2733, + "step": 9804 + }, + { + "epoch": 0.6808082210804055, + "grad_norm": 6.024589483375098, + "learning_rate": 2.442443434869904e-06, + "loss": 0.5628, + "step": 9805 + }, + { + "epoch": 0.6808776558811276, + "grad_norm": 2.6348623262979634, + "learning_rate": 2.441477253987156e-06, + "loss": 0.2849, + "step": 9806 + }, + { + "epoch": 0.6809470906818498, + "grad_norm": 4.6013348558501885, + "learning_rate": 2.4405112025115973e-06, + "loss": 0.5005, + "step": 9807 + }, + { + "epoch": 0.6810165254825719, + "grad_norm": 3.968140317015296, + "learning_rate": 2.439545280492082e-06, + "loss": 0.563, + "step": 9808 + }, + { + "epoch": 0.6810859602832939, + "grad_norm": 4.692451579638296, + "learning_rate": 2.4385794879774714e-06, + "loss": 0.4515, + "step": 9809 + }, + { + "epoch": 0.6811553950840161, + "grad_norm": 3.381600686299743, + "learning_rate": 2.4376138250166114e-06, + "loss": 0.2589, + "step": 9810 + }, + { + "epoch": 0.6812248298847382, + "grad_norm": 3.454506973994623, + "learning_rate": 2.436648291658345e-06, + "loss": 0.2894, + "step": 9811 + }, + { + "epoch": 0.6812942646854604, + "grad_norm": 3.3844751861396825, + "learning_rate": 2.435682887951507e-06, + "loss": 0.2106, + "step": 9812 + }, + { + "epoch": 0.6813636994861825, + "grad_norm": 3.0477782983983737, + "learning_rate": 2.434717613944927e-06, + "loss": 0.2554, + "step": 9813 + }, + { + "epoch": 0.6814331342869046, + "grad_norm": 5.336881185391047, + "learning_rate": 2.433752469687427e-06, + "loss": 0.6978, + "step": 9814 + }, + { + "epoch": 0.6815025690876267, + "grad_norm": 4.726502487413906, + "learning_rate": 2.4327874552278236e-06, + "loss": 0.6078, + "step": 9815 + }, + { + "epoch": 0.6815720038883488, + "grad_norm": 6.004848463000493, + "learning_rate": 2.4318225706149257e-06, + "loss": 0.525, + "step": 9816 + }, + { + "epoch": 0.681641438689071, + "grad_norm": 3.541127098000716, + "learning_rate": 2.4308578158975338e-06, + "loss": 0.3951, + "step": 9817 + }, + { + "epoch": 0.6817108734897931, + "grad_norm": 3.926064963166198, + "learning_rate": 2.4298931911244505e-06, + "loss": 0.522, + "step": 9818 + }, + { + "epoch": 0.6817803082905152, + "grad_norm": 4.162048099655802, + "learning_rate": 2.428928696344459e-06, + "loss": 0.1981, + "step": 9819 + }, + { + "epoch": 0.6818497430912374, + "grad_norm": 4.457082748464746, + "learning_rate": 2.4279643316063432e-06, + "loss": 0.4376, + "step": 9820 + }, + { + "epoch": 0.6819191778919594, + "grad_norm": 4.883138669728883, + "learning_rate": 2.427000096958883e-06, + "loss": 0.6762, + "step": 9821 + }, + { + "epoch": 0.6819886126926815, + "grad_norm": 3.505156444799803, + "learning_rate": 2.426035992450848e-06, + "loss": 0.3529, + "step": 9822 + }, + { + "epoch": 0.6820580474934037, + "grad_norm": 2.2955569415383437, + "learning_rate": 2.425072018130996e-06, + "loss": 0.1911, + "step": 9823 + }, + { + "epoch": 0.6821274822941258, + "grad_norm": 3.9554722787225183, + "learning_rate": 2.42410817404809e-06, + "loss": 0.3052, + "step": 9824 + }, + { + "epoch": 0.682196917094848, + "grad_norm": 4.423032289292352, + "learning_rate": 2.423144460250879e-06, + "loss": 0.767, + "step": 9825 + }, + { + "epoch": 0.6822663518955701, + "grad_norm": 3.919395822501866, + "learning_rate": 2.422180876788102e-06, + "loss": 0.399, + "step": 9826 + }, + { + "epoch": 0.6823357866962921, + "grad_norm": 3.759330949911373, + "learning_rate": 2.4212174237085007e-06, + "loss": 0.4432, + "step": 9827 + }, + { + "epoch": 0.6824052214970143, + "grad_norm": 3.812652143984822, + "learning_rate": 2.4202541010608045e-06, + "loss": 0.4258, + "step": 9828 + }, + { + "epoch": 0.6824746562977364, + "grad_norm": 3.290250345342252, + "learning_rate": 2.419290908893736e-06, + "loss": 0.4326, + "step": 9829 + }, + { + "epoch": 0.6825440910984586, + "grad_norm": 4.388961955153422, + "learning_rate": 2.418327847256014e-06, + "loss": 0.225, + "step": 9830 + }, + { + "epoch": 0.6826135258991807, + "grad_norm": 4.777111873850835, + "learning_rate": 2.4173649161963475e-06, + "loss": 0.5539, + "step": 9831 + }, + { + "epoch": 0.6826829606999028, + "grad_norm": 3.4778579198757895, + "learning_rate": 2.416402115763441e-06, + "loss": 0.4531, + "step": 9832 + }, + { + "epoch": 0.6827523955006249, + "grad_norm": 3.321419067205405, + "learning_rate": 2.4154394460059923e-06, + "loss": 0.3097, + "step": 9833 + }, + { + "epoch": 0.682821830301347, + "grad_norm": 2.6870853837166777, + "learning_rate": 2.4144769069726913e-06, + "loss": 0.1043, + "step": 9834 + }, + { + "epoch": 0.6828912651020692, + "grad_norm": 4.485782733413446, + "learning_rate": 2.413514498712221e-06, + "loss": 0.5505, + "step": 9835 + }, + { + "epoch": 0.6829606999027913, + "grad_norm": 3.7519678654551405, + "learning_rate": 2.412552221273265e-06, + "loss": 0.4472, + "step": 9836 + }, + { + "epoch": 0.6830301347035134, + "grad_norm": 3.0660247520721917, + "learning_rate": 2.411590074704485e-06, + "loss": 0.2066, + "step": 9837 + }, + { + "epoch": 0.6830995695042356, + "grad_norm": 3.883769911046286, + "learning_rate": 2.4106280590545524e-06, + "loss": 0.4629, + "step": 9838 + }, + { + "epoch": 0.6831690043049576, + "grad_norm": 3.17446447432747, + "learning_rate": 2.4096661743721222e-06, + "loss": 0.4595, + "step": 9839 + }, + { + "epoch": 0.6832384391056797, + "grad_norm": 2.9699594447814897, + "learning_rate": 2.4087044207058464e-06, + "loss": 0.3838, + "step": 9840 + }, + { + "epoch": 0.6833078739064019, + "grad_norm": 4.467682145984409, + "learning_rate": 2.4077427981043683e-06, + "loss": 0.3624, + "step": 9841 + }, + { + "epoch": 0.683377308707124, + "grad_norm": 3.871110591826709, + "learning_rate": 2.406781306616326e-06, + "loss": 0.5333, + "step": 9842 + }, + { + "epoch": 0.6834467435078462, + "grad_norm": 4.420531011626496, + "learning_rate": 2.4058199462903507e-06, + "loss": 0.4566, + "step": 9843 + }, + { + "epoch": 0.6835161783085683, + "grad_norm": 4.329720520729106, + "learning_rate": 2.4048587171750674e-06, + "loss": 0.4845, + "step": 9844 + }, + { + "epoch": 0.6835856131092903, + "grad_norm": 2.8571727935793394, + "learning_rate": 2.4038976193190933e-06, + "loss": 0.179, + "step": 9845 + }, + { + "epoch": 0.6836550479100125, + "grad_norm": 5.2119036760788005, + "learning_rate": 2.402936652771038e-06, + "loss": 0.5882, + "step": 9846 + }, + { + "epoch": 0.6837244827107346, + "grad_norm": 4.0971113864352064, + "learning_rate": 2.4019758175795123e-06, + "loss": 0.4917, + "step": 9847 + }, + { + "epoch": 0.6837939175114568, + "grad_norm": 2.7377028159682433, + "learning_rate": 2.4010151137931075e-06, + "loss": 0.2143, + "step": 9848 + }, + { + "epoch": 0.6838633523121789, + "grad_norm": 3.355829205627902, + "learning_rate": 2.400054541460415e-06, + "loss": 0.3104, + "step": 9849 + }, + { + "epoch": 0.683932787112901, + "grad_norm": 4.575762143658527, + "learning_rate": 2.3990941006300267e-06, + "loss": 0.5211, + "step": 9850 + }, + { + "epoch": 0.6840022219136231, + "grad_norm": 3.6274356905656204, + "learning_rate": 2.398133791350513e-06, + "loss": 0.479, + "step": 9851 + }, + { + "epoch": 0.6840716567143452, + "grad_norm": 3.213791766494711, + "learning_rate": 2.397173613670446e-06, + "loss": 0.373, + "step": 9852 + }, + { + "epoch": 0.6841410915150673, + "grad_norm": 5.5625155185211534, + "learning_rate": 2.3962135676383946e-06, + "loss": 0.5589, + "step": 9853 + }, + { + "epoch": 0.6842105263157895, + "grad_norm": 3.5479415820895257, + "learning_rate": 2.3952536533029164e-06, + "loss": 0.3961, + "step": 9854 + }, + { + "epoch": 0.6842799611165116, + "grad_norm": 4.304659394842034, + "learning_rate": 2.394293870712557e-06, + "loss": 0.3954, + "step": 9855 + }, + { + "epoch": 0.6843493959172338, + "grad_norm": 2.9469344723330217, + "learning_rate": 2.3933342199158672e-06, + "loss": 0.2235, + "step": 9856 + }, + { + "epoch": 0.6844188307179558, + "grad_norm": 3.72194463101503, + "learning_rate": 2.3923747009613823e-06, + "loss": 0.388, + "step": 9857 + }, + { + "epoch": 0.6844882655186779, + "grad_norm": 4.458577840190922, + "learning_rate": 2.3914153138976347e-06, + "loss": 0.5241, + "step": 9858 + }, + { + "epoch": 0.6845577003194001, + "grad_norm": 4.563625355555192, + "learning_rate": 2.390456058773149e-06, + "loss": 0.4951, + "step": 9859 + }, + { + "epoch": 0.6846271351201222, + "grad_norm": 3.1891317695617456, + "learning_rate": 2.389496935636442e-06, + "loss": 0.4283, + "step": 9860 + }, + { + "epoch": 0.6846965699208444, + "grad_norm": 7.761057046491241, + "learning_rate": 2.3885379445360267e-06, + "loss": 0.5915, + "step": 9861 + }, + { + "epoch": 0.6847660047215665, + "grad_norm": 4.725632435289322, + "learning_rate": 2.3875790855204074e-06, + "loss": 0.2836, + "step": 9862 + }, + { + "epoch": 0.6848354395222885, + "grad_norm": 3.550226777742127, + "learning_rate": 2.3866203586380815e-06, + "loss": 0.3609, + "step": 9863 + }, + { + "epoch": 0.6849048743230107, + "grad_norm": 5.243164559437213, + "learning_rate": 2.3856617639375386e-06, + "loss": 0.6243, + "step": 9864 + }, + { + "epoch": 0.6849743091237328, + "grad_norm": 3.139078133660744, + "learning_rate": 2.3847033014672693e-06, + "loss": 0.2132, + "step": 9865 + }, + { + "epoch": 0.6850437439244549, + "grad_norm": 4.280098854404842, + "learning_rate": 2.3837449712757456e-06, + "loss": 0.424, + "step": 9866 + }, + { + "epoch": 0.6851131787251771, + "grad_norm": 3.9676959371392377, + "learning_rate": 2.382786773411439e-06, + "loss": 0.551, + "step": 9867 + }, + { + "epoch": 0.6851826135258992, + "grad_norm": 3.8895289132689386, + "learning_rate": 2.38182870792282e-06, + "loss": 0.3518, + "step": 9868 + }, + { + "epoch": 0.6852520483266213, + "grad_norm": 3.386574184599235, + "learning_rate": 2.3808707748583375e-06, + "loss": 0.4454, + "step": 9869 + }, + { + "epoch": 0.6853214831273434, + "grad_norm": 3.0772652248404055, + "learning_rate": 2.37991297426645e-06, + "loss": 0.3227, + "step": 9870 + }, + { + "epoch": 0.6853909179280655, + "grad_norm": 3.9581502261761505, + "learning_rate": 2.378955306195599e-06, + "loss": 0.4379, + "step": 9871 + }, + { + "epoch": 0.6854603527287877, + "grad_norm": 3.745209164161974, + "learning_rate": 2.3779977706942232e-06, + "loss": 0.3932, + "step": 9872 + }, + { + "epoch": 0.6855297875295098, + "grad_norm": 7.362292670325728, + "learning_rate": 2.3770403678107527e-06, + "loss": 0.5089, + "step": 9873 + }, + { + "epoch": 0.685599222330232, + "grad_norm": 3.4551543865411674, + "learning_rate": 2.376083097593613e-06, + "loss": 0.3409, + "step": 9874 + }, + { + "epoch": 0.685668657130954, + "grad_norm": 3.6950473301357785, + "learning_rate": 2.37512596009122e-06, + "loss": 0.4172, + "step": 9875 + }, + { + "epoch": 0.6857380919316761, + "grad_norm": 4.683621242369369, + "learning_rate": 2.374168955351987e-06, + "loss": 0.5417, + "step": 9876 + }, + { + "epoch": 0.6858075267323983, + "grad_norm": 2.845316893945616, + "learning_rate": 2.373212083424316e-06, + "loss": 0.2425, + "step": 9877 + }, + { + "epoch": 0.6858769615331204, + "grad_norm": 4.146781290959154, + "learning_rate": 2.3722553443566038e-06, + "loss": 0.5117, + "step": 9878 + }, + { + "epoch": 0.6859463963338425, + "grad_norm": 3.643348728144497, + "learning_rate": 2.3712987381972464e-06, + "loss": 0.2761, + "step": 9879 + }, + { + "epoch": 0.6860158311345647, + "grad_norm": 3.9017239229434586, + "learning_rate": 2.370342264994623e-06, + "loss": 0.5139, + "step": 9880 + }, + { + "epoch": 0.6860852659352867, + "grad_norm": 4.34224936926708, + "learning_rate": 2.3693859247971096e-06, + "loss": 0.4932, + "step": 9881 + }, + { + "epoch": 0.6861547007360089, + "grad_norm": 3.4022923221584795, + "learning_rate": 2.368429717653083e-06, + "loss": 0.4389, + "step": 9882 + }, + { + "epoch": 0.686224135536731, + "grad_norm": 3.7071649879454442, + "learning_rate": 2.3674736436109018e-06, + "loss": 0.2884, + "step": 9883 + }, + { + "epoch": 0.6862935703374531, + "grad_norm": 3.190885950397369, + "learning_rate": 2.3665177027189236e-06, + "loss": 0.4317, + "step": 9884 + }, + { + "epoch": 0.6863630051381753, + "grad_norm": 3.602591788453721, + "learning_rate": 2.365561895025501e-06, + "loss": 0.4219, + "step": 9885 + }, + { + "epoch": 0.6864324399388974, + "grad_norm": 3.790112195645559, + "learning_rate": 2.3646062205789787e-06, + "loss": 0.2881, + "step": 9886 + }, + { + "epoch": 0.6865018747396195, + "grad_norm": 4.0798318215741896, + "learning_rate": 2.363650679427688e-06, + "loss": 0.5128, + "step": 9887 + }, + { + "epoch": 0.6865713095403416, + "grad_norm": 6.378104590589276, + "learning_rate": 2.3626952716199647e-06, + "loss": 0.8599, + "step": 9888 + }, + { + "epoch": 0.6866407443410637, + "grad_norm": 3.7217274911588625, + "learning_rate": 2.3617399972041306e-06, + "loss": 0.534, + "step": 9889 + }, + { + "epoch": 0.6867101791417859, + "grad_norm": 2.66682747115505, + "learning_rate": 2.360784856228501e-06, + "loss": 0.231, + "step": 9890 + }, + { + "epoch": 0.686779613942508, + "grad_norm": 4.052588606123804, + "learning_rate": 2.3598298487413877e-06, + "loss": 0.4671, + "step": 9891 + }, + { + "epoch": 0.6868490487432302, + "grad_norm": 3.634110962963185, + "learning_rate": 2.358874974791093e-06, + "loss": 0.3105, + "step": 9892 + }, + { + "epoch": 0.6869184835439522, + "grad_norm": 5.7255852888577925, + "learning_rate": 2.3579202344259134e-06, + "loss": 0.4778, + "step": 9893 + }, + { + "epoch": 0.6869879183446743, + "grad_norm": 3.674345076350443, + "learning_rate": 2.3569656276941384e-06, + "loss": 0.3107, + "step": 9894 + }, + { + "epoch": 0.6870573531453965, + "grad_norm": 2.869175328462186, + "learning_rate": 2.3560111546440512e-06, + "loss": 0.1981, + "step": 9895 + }, + { + "epoch": 0.6871267879461186, + "grad_norm": 4.084003961052353, + "learning_rate": 2.355056815323926e-06, + "loss": 0.4533, + "step": 9896 + }, + { + "epoch": 0.6871962227468407, + "grad_norm": 3.1662151278136825, + "learning_rate": 2.3541026097820385e-06, + "loss": 0.2483, + "step": 9897 + }, + { + "epoch": 0.6872656575475629, + "grad_norm": 4.362849235929402, + "learning_rate": 2.3531485380666425e-06, + "loss": 0.2923, + "step": 9898 + }, + { + "epoch": 0.6873350923482849, + "grad_norm": 3.6564910269434883, + "learning_rate": 2.3521946002260003e-06, + "loss": 0.3972, + "step": 9899 + }, + { + "epoch": 0.6874045271490071, + "grad_norm": 4.220993109871395, + "learning_rate": 2.3512407963083617e-06, + "loss": 0.5648, + "step": 9900 + }, + { + "epoch": 0.6874739619497292, + "grad_norm": 4.581219059763115, + "learning_rate": 2.3502871263619616e-06, + "loss": 0.4656, + "step": 9901 + }, + { + "epoch": 0.6875433967504513, + "grad_norm": 6.187364063775447, + "learning_rate": 2.3493335904350427e-06, + "loss": 0.5475, + "step": 9902 + }, + { + "epoch": 0.6876128315511735, + "grad_norm": 4.018151480429149, + "learning_rate": 2.348380188575831e-06, + "loss": 0.5931, + "step": 9903 + }, + { + "epoch": 0.6876822663518956, + "grad_norm": 3.5922712318921097, + "learning_rate": 2.347426920832549e-06, + "loss": 0.3366, + "step": 9904 + }, + { + "epoch": 0.6877517011526177, + "grad_norm": 4.5581271579348765, + "learning_rate": 2.346473787253412e-06, + "loss": 0.4588, + "step": 9905 + }, + { + "epoch": 0.6878211359533398, + "grad_norm": 3.8155163476561356, + "learning_rate": 2.3455207878866283e-06, + "loss": 0.4549, + "step": 9906 + }, + { + "epoch": 0.6878905707540619, + "grad_norm": 4.926024292497031, + "learning_rate": 2.3445679227803984e-06, + "loss": 0.6584, + "step": 9907 + }, + { + "epoch": 0.6879600055547841, + "grad_norm": 5.476888498565792, + "learning_rate": 2.343615191982919e-06, + "loss": 0.391, + "step": 9908 + }, + { + "epoch": 0.6880294403555062, + "grad_norm": 4.086756105009742, + "learning_rate": 2.342662595542377e-06, + "loss": 0.5883, + "step": 9909 + }, + { + "epoch": 0.6880988751562283, + "grad_norm": 3.963603336463058, + "learning_rate": 2.3417101335069524e-06, + "loss": 0.4375, + "step": 9910 + }, + { + "epoch": 0.6881683099569504, + "grad_norm": 4.097531133120274, + "learning_rate": 2.3407578059248247e-06, + "loss": 0.5242, + "step": 9911 + }, + { + "epoch": 0.6882377447576725, + "grad_norm": 3.217336821741175, + "learning_rate": 2.339805612844156e-06, + "loss": 0.2713, + "step": 9912 + }, + { + "epoch": 0.6883071795583947, + "grad_norm": 5.435268303591026, + "learning_rate": 2.3388535543131075e-06, + "loss": 0.3886, + "step": 9913 + }, + { + "epoch": 0.6883766143591168, + "grad_norm": 4.814671256785052, + "learning_rate": 2.337901630379837e-06, + "loss": 0.4295, + "step": 9914 + }, + { + "epoch": 0.6884460491598389, + "grad_norm": 3.7031426441761615, + "learning_rate": 2.336949841092491e-06, + "loss": 0.374, + "step": 9915 + }, + { + "epoch": 0.6885154839605611, + "grad_norm": 3.632389814733023, + "learning_rate": 2.3359981864992054e-06, + "loss": 0.377, + "step": 9916 + }, + { + "epoch": 0.6885849187612831, + "grad_norm": 4.3169679519059505, + "learning_rate": 2.3350466666481184e-06, + "loss": 0.3984, + "step": 9917 + }, + { + "epoch": 0.6886543535620053, + "grad_norm": 4.363839981326813, + "learning_rate": 2.334095281587356e-06, + "loss": 0.5476, + "step": 9918 + }, + { + "epoch": 0.6887237883627274, + "grad_norm": 4.679343375249857, + "learning_rate": 2.3331440313650372e-06, + "loss": 0.5443, + "step": 9919 + }, + { + "epoch": 0.6887932231634495, + "grad_norm": 3.6524224076625575, + "learning_rate": 2.3321929160292756e-06, + "loss": 0.4618, + "step": 9920 + }, + { + "epoch": 0.6888626579641717, + "grad_norm": 3.527208751985019, + "learning_rate": 2.3312419356281774e-06, + "loss": 0.3985, + "step": 9921 + }, + { + "epoch": 0.6889320927648938, + "grad_norm": 3.1291605396034528, + "learning_rate": 2.3302910902098423e-06, + "loss": 0.3014, + "step": 9922 + }, + { + "epoch": 0.6890015275656158, + "grad_norm": 4.058476030252018, + "learning_rate": 2.329340379822363e-06, + "loss": 0.4411, + "step": 9923 + }, + { + "epoch": 0.689070962366338, + "grad_norm": 4.22663055423953, + "learning_rate": 2.328389804513825e-06, + "loss": 0.4878, + "step": 9924 + }, + { + "epoch": 0.6891403971670601, + "grad_norm": 4.171863469028785, + "learning_rate": 2.327439364332307e-06, + "loss": 0.4672, + "step": 9925 + }, + { + "epoch": 0.6892098319677823, + "grad_norm": 2.8447947524819495, + "learning_rate": 2.3264890593258827e-06, + "loss": 0.2973, + "step": 9926 + }, + { + "epoch": 0.6892792667685044, + "grad_norm": 4.250807345218405, + "learning_rate": 2.3255388895426143e-06, + "loss": 0.5103, + "step": 9927 + }, + { + "epoch": 0.6893487015692265, + "grad_norm": 4.560028909629896, + "learning_rate": 2.3245888550305644e-06, + "loss": 0.3779, + "step": 9928 + }, + { + "epoch": 0.6894181363699486, + "grad_norm": 3.7738900747012445, + "learning_rate": 2.3236389558377847e-06, + "loss": 0.3704, + "step": 9929 + }, + { + "epoch": 0.6894875711706707, + "grad_norm": 2.602156630841338, + "learning_rate": 2.3226891920123146e-06, + "loss": 0.3247, + "step": 9930 + }, + { + "epoch": 0.6895570059713929, + "grad_norm": 3.821689861329341, + "learning_rate": 2.3217395636021966e-06, + "loss": 0.3454, + "step": 9931 + }, + { + "epoch": 0.689626440772115, + "grad_norm": 2.871325671516494, + "learning_rate": 2.320790070655464e-06, + "loss": 0.2711, + "step": 9932 + }, + { + "epoch": 0.6896958755728371, + "grad_norm": 2.782848497728131, + "learning_rate": 2.319840713220133e-06, + "loss": 0.2436, + "step": 9933 + }, + { + "epoch": 0.6897653103735593, + "grad_norm": 3.3090362754466724, + "learning_rate": 2.318891491344228e-06, + "loss": 0.1628, + "step": 9934 + }, + { + "epoch": 0.6898347451742813, + "grad_norm": 3.8832291473238048, + "learning_rate": 2.3179424050757576e-06, + "loss": 0.3062, + "step": 9935 + }, + { + "epoch": 0.6899041799750034, + "grad_norm": 3.2256047648077892, + "learning_rate": 2.3169934544627254e-06, + "loss": 0.2507, + "step": 9936 + }, + { + "epoch": 0.6899736147757256, + "grad_norm": 3.700991186835935, + "learning_rate": 2.3160446395531284e-06, + "loss": 0.3939, + "step": 9937 + }, + { + "epoch": 0.6900430495764477, + "grad_norm": 3.6825527103557105, + "learning_rate": 2.3150959603949565e-06, + "loss": 0.3839, + "step": 9938 + }, + { + "epoch": 0.6901124843771699, + "grad_norm": 4.705895078883613, + "learning_rate": 2.314147417036193e-06, + "loss": 0.5134, + "step": 9939 + }, + { + "epoch": 0.690181919177892, + "grad_norm": 4.1956008113146925, + "learning_rate": 2.313199009524813e-06, + "loss": 0.5509, + "step": 9940 + }, + { + "epoch": 0.690251353978614, + "grad_norm": 2.9589955686634224, + "learning_rate": 2.3122507379087874e-06, + "loss": 0.1923, + "step": 9941 + }, + { + "epoch": 0.6903207887793362, + "grad_norm": 3.6815847327890583, + "learning_rate": 2.311302602236076e-06, + "loss": 0.3182, + "step": 9942 + }, + { + "epoch": 0.6903902235800583, + "grad_norm": 4.4253312886036, + "learning_rate": 2.3103546025546404e-06, + "loss": 0.4737, + "step": 9943 + }, + { + "epoch": 0.6904596583807805, + "grad_norm": 4.119312455531526, + "learning_rate": 2.3094067389124234e-06, + "loss": 0.3395, + "step": 9944 + }, + { + "epoch": 0.6905290931815026, + "grad_norm": 3.3771347621407197, + "learning_rate": 2.3084590113573675e-06, + "loss": 0.3735, + "step": 9945 + }, + { + "epoch": 0.6905985279822247, + "grad_norm": 4.2040133796116885, + "learning_rate": 2.3075114199374105e-06, + "loss": 0.3394, + "step": 9946 + }, + { + "epoch": 0.6906679627829468, + "grad_norm": 2.2468502031652426, + "learning_rate": 2.3065639647004812e-06, + "loss": 0.2361, + "step": 9947 + }, + { + "epoch": 0.6907373975836689, + "grad_norm": 3.310621361342557, + "learning_rate": 2.305616645694495e-06, + "loss": 0.3389, + "step": 9948 + }, + { + "epoch": 0.6908068323843911, + "grad_norm": 3.771569545352427, + "learning_rate": 2.3046694629673715e-06, + "loss": 0.4583, + "step": 9949 + }, + { + "epoch": 0.6908762671851132, + "grad_norm": 2.4293178451401922, + "learning_rate": 2.303722416567017e-06, + "loss": 0.147, + "step": 9950 + }, + { + "epoch": 0.6909457019858353, + "grad_norm": 3.912046223810117, + "learning_rate": 2.3027755065413316e-06, + "loss": 0.4233, + "step": 9951 + }, + { + "epoch": 0.6910151367865575, + "grad_norm": 3.2334436837175886, + "learning_rate": 2.301828732938209e-06, + "loss": 0.3161, + "step": 9952 + }, + { + "epoch": 0.6910845715872795, + "grad_norm": 4.447255467970093, + "learning_rate": 2.3008820958055358e-06, + "loss": 0.5692, + "step": 9953 + }, + { + "epoch": 0.6911540063880016, + "grad_norm": 4.796928012056432, + "learning_rate": 2.2999355951911917e-06, + "loss": 0.5366, + "step": 9954 + }, + { + "epoch": 0.6912234411887238, + "grad_norm": 3.494245152203853, + "learning_rate": 2.2989892311430502e-06, + "loss": 0.2756, + "step": 9955 + }, + { + "epoch": 0.6912928759894459, + "grad_norm": 3.586523140757075, + "learning_rate": 2.298043003708977e-06, + "loss": 0.5022, + "step": 9956 + }, + { + "epoch": 0.6913623107901681, + "grad_norm": 2.943459209026951, + "learning_rate": 2.297096912936831e-06, + "loss": 0.3286, + "step": 9957 + }, + { + "epoch": 0.6914317455908902, + "grad_norm": 4.11711616031827, + "learning_rate": 2.2961509588744644e-06, + "loss": 0.4856, + "step": 9958 + }, + { + "epoch": 0.6915011803916122, + "grad_norm": 4.3386763272695354, + "learning_rate": 2.295205141569721e-06, + "loss": 0.4595, + "step": 9959 + }, + { + "epoch": 0.6915706151923344, + "grad_norm": 3.628194814896253, + "learning_rate": 2.2942594610704427e-06, + "loss": 0.3946, + "step": 9960 + }, + { + "epoch": 0.6916400499930565, + "grad_norm": 5.28469705531795, + "learning_rate": 2.293313917424461e-06, + "loss": 0.6866, + "step": 9961 + }, + { + "epoch": 0.6917094847937787, + "grad_norm": 7.377312066440847, + "learning_rate": 2.292368510679595e-06, + "loss": 0.6803, + "step": 9962 + }, + { + "epoch": 0.6917789195945008, + "grad_norm": 3.4551424772539834, + "learning_rate": 2.2914232408836678e-06, + "loss": 0.4117, + "step": 9963 + }, + { + "epoch": 0.6918483543952229, + "grad_norm": 2.8658780482426103, + "learning_rate": 2.2904781080844896e-06, + "loss": 0.2443, + "step": 9964 + }, + { + "epoch": 0.691917789195945, + "grad_norm": 3.5650724855171196, + "learning_rate": 2.2895331123298593e-06, + "loss": 0.301, + "step": 9965 + }, + { + "epoch": 0.6919872239966671, + "grad_norm": 4.208389580726364, + "learning_rate": 2.2885882536675785e-06, + "loss": 0.4016, + "step": 9966 + }, + { + "epoch": 0.6920566587973892, + "grad_norm": 2.545987157531226, + "learning_rate": 2.287643532145436e-06, + "loss": 0.2877, + "step": 9967 + }, + { + "epoch": 0.6921260935981114, + "grad_norm": 3.980263278627177, + "learning_rate": 2.2866989478112146e-06, + "loss": 0.4073, + "step": 9968 + }, + { + "epoch": 0.6921955283988335, + "grad_norm": 4.888326135473543, + "learning_rate": 2.2857545007126896e-06, + "loss": 0.5063, + "step": 9969 + }, + { + "epoch": 0.6922649631995557, + "grad_norm": 3.699699563637695, + "learning_rate": 2.2848101908976305e-06, + "loss": 0.491, + "step": 9970 + }, + { + "epoch": 0.6923343980002777, + "grad_norm": 5.288522628410597, + "learning_rate": 2.2838660184137977e-06, + "loss": 0.6455, + "step": 9971 + }, + { + "epoch": 0.6924038328009998, + "grad_norm": 4.5159676061653355, + "learning_rate": 2.2829219833089527e-06, + "loss": 0.4156, + "step": 9972 + }, + { + "epoch": 0.692473267601722, + "grad_norm": 3.7817293702763526, + "learning_rate": 2.2819780856308366e-06, + "loss": 0.3614, + "step": 9973 + }, + { + "epoch": 0.6925427024024441, + "grad_norm": 4.371606431665333, + "learning_rate": 2.2810343254271915e-06, + "loss": 0.5014, + "step": 9974 + }, + { + "epoch": 0.6926121372031663, + "grad_norm": 3.1271499527580047, + "learning_rate": 2.2800907027457574e-06, + "loss": 0.3256, + "step": 9975 + }, + { + "epoch": 0.6926815720038884, + "grad_norm": 4.610960416169119, + "learning_rate": 2.279147217634256e-06, + "loss": 0.5309, + "step": 9976 + }, + { + "epoch": 0.6927510068046104, + "grad_norm": 2.7230391754338634, + "learning_rate": 2.2782038701404075e-06, + "loss": 0.2263, + "step": 9977 + }, + { + "epoch": 0.6928204416053326, + "grad_norm": 4.013955946987216, + "learning_rate": 2.277260660311929e-06, + "loss": 0.4409, + "step": 9978 + }, + { + "epoch": 0.6928898764060547, + "grad_norm": 2.6577442987258335, + "learning_rate": 2.2763175881965256e-06, + "loss": 0.2713, + "step": 9979 + }, + { + "epoch": 0.6929593112067768, + "grad_norm": 3.882094715395453, + "learning_rate": 2.275374653841896e-06, + "loss": 0.4543, + "step": 9980 + }, + { + "epoch": 0.693028746007499, + "grad_norm": 4.49746276689424, + "learning_rate": 2.2744318572957335e-06, + "loss": 0.59, + "step": 9981 + }, + { + "epoch": 0.693098180808221, + "grad_norm": 2.8683359796632426, + "learning_rate": 2.2734891986057238e-06, + "loss": 0.3016, + "step": 9982 + }, + { + "epoch": 0.6931676156089432, + "grad_norm": 4.248412387745265, + "learning_rate": 2.2725466778195447e-06, + "loss": 0.4497, + "step": 9983 + }, + { + "epoch": 0.6932370504096653, + "grad_norm": 3.4125098081177767, + "learning_rate": 2.2716042949848686e-06, + "loss": 0.358, + "step": 9984 + }, + { + "epoch": 0.6933064852103874, + "grad_norm": 4.646363050802294, + "learning_rate": 2.27066205014936e-06, + "loss": 0.5883, + "step": 9985 + }, + { + "epoch": 0.6933759200111096, + "grad_norm": 3.611587277186948, + "learning_rate": 2.2697199433606765e-06, + "loss": 0.4002, + "step": 9986 + }, + { + "epoch": 0.6934453548118317, + "grad_norm": 9.103611317580159, + "learning_rate": 2.2687779746664685e-06, + "loss": 0.7162, + "step": 9987 + }, + { + "epoch": 0.6935147896125539, + "grad_norm": 3.1084333985097232, + "learning_rate": 2.267836144114378e-06, + "loss": 0.3405, + "step": 9988 + }, + { + "epoch": 0.6935842244132759, + "grad_norm": 3.704921884320625, + "learning_rate": 2.266894451752048e-06, + "loss": 0.3252, + "step": 9989 + }, + { + "epoch": 0.693653659213998, + "grad_norm": 3.2011819661170473, + "learning_rate": 2.265952897627102e-06, + "loss": 0.3137, + "step": 9990 + }, + { + "epoch": 0.6937230940147202, + "grad_norm": 2.4864967599411343, + "learning_rate": 2.265011481787162e-06, + "loss": 0.2109, + "step": 9991 + }, + { + "epoch": 0.6937925288154423, + "grad_norm": 1.7591570786156565, + "learning_rate": 2.2640702042798486e-06, + "loss": 0.1036, + "step": 9992 + }, + { + "epoch": 0.6938619636161644, + "grad_norm": 4.319459278633239, + "learning_rate": 2.2631290651527703e-06, + "loss": 0.5381, + "step": 9993 + }, + { + "epoch": 0.6939313984168866, + "grad_norm": 2.9664689230413224, + "learning_rate": 2.262188064453523e-06, + "loss": 0.2189, + "step": 9994 + }, + { + "epoch": 0.6940008332176086, + "grad_norm": 3.8439451646627623, + "learning_rate": 2.2612472022297077e-06, + "loss": 0.4578, + "step": 9995 + }, + { + "epoch": 0.6940702680183308, + "grad_norm": 3.577245564406531, + "learning_rate": 2.260306478528909e-06, + "loss": 0.2448, + "step": 9996 + }, + { + "epoch": 0.6941397028190529, + "grad_norm": 4.829873328913069, + "learning_rate": 2.2593658933987096e-06, + "loss": 0.6455, + "step": 9997 + }, + { + "epoch": 0.694209137619775, + "grad_norm": 4.112852943719454, + "learning_rate": 2.2584254468866813e-06, + "loss": 0.5688, + "step": 9998 + }, + { + "epoch": 0.6942785724204972, + "grad_norm": 3.9224049659659572, + "learning_rate": 2.2574851390403916e-06, + "loss": 0.4956, + "step": 9999 + }, + { + "epoch": 0.6943480072212193, + "grad_norm": 7.080493875312562, + "learning_rate": 2.2565449699074004e-06, + "loss": 0.3561, + "step": 10000 + }, + { + "epoch": 0.6944174420219414, + "grad_norm": 2.149681706162477, + "learning_rate": 2.2556049395352603e-06, + "loss": 0.1182, + "step": 10001 + }, + { + "epoch": 0.6944868768226635, + "grad_norm": 2.683896947329033, + "learning_rate": 2.2546650479715164e-06, + "loss": 0.2008, + "step": 10002 + }, + { + "epoch": 0.6945563116233856, + "grad_norm": 3.2367838822909887, + "learning_rate": 2.253725295263706e-06, + "loss": 0.3089, + "step": 10003 + }, + { + "epoch": 0.6946257464241078, + "grad_norm": 4.399103027215495, + "learning_rate": 2.252785681459367e-06, + "loss": 0.405, + "step": 10004 + }, + { + "epoch": 0.6946951812248299, + "grad_norm": 3.9502071807486603, + "learning_rate": 2.2518462066060172e-06, + "loss": 0.4586, + "step": 10005 + }, + { + "epoch": 0.6947646160255521, + "grad_norm": 3.197231795733676, + "learning_rate": 2.2509068707511743e-06, + "loss": 0.2336, + "step": 10006 + }, + { + "epoch": 0.6948340508262741, + "grad_norm": 3.79934857895278, + "learning_rate": 2.2499676739423555e-06, + "loss": 0.4209, + "step": 10007 + }, + { + "epoch": 0.6949034856269962, + "grad_norm": 3.6134450302738053, + "learning_rate": 2.2490286162270553e-06, + "loss": 0.3404, + "step": 10008 + }, + { + "epoch": 0.6949729204277184, + "grad_norm": 3.5711417777188394, + "learning_rate": 2.2480896976527773e-06, + "loss": 0.4149, + "step": 10009 + }, + { + "epoch": 0.6950423552284405, + "grad_norm": 3.974676545954251, + "learning_rate": 2.247150918267008e-06, + "loss": 0.3772, + "step": 10010 + }, + { + "epoch": 0.6951117900291626, + "grad_norm": 2.9628168753507675, + "learning_rate": 2.2462122781172307e-06, + "loss": 0.2978, + "step": 10011 + }, + { + "epoch": 0.6951812248298848, + "grad_norm": 4.883904634814912, + "learning_rate": 2.24527377725092e-06, + "loss": 0.6139, + "step": 10012 + }, + { + "epoch": 0.6952506596306068, + "grad_norm": 4.692081150410176, + "learning_rate": 2.2443354157155443e-06, + "loss": 0.5453, + "step": 10013 + }, + { + "epoch": 0.695320094431329, + "grad_norm": 4.546128891364852, + "learning_rate": 2.243397193558565e-06, + "loss": 0.5357, + "step": 10014 + }, + { + "epoch": 0.6953895292320511, + "grad_norm": 3.9268931144631782, + "learning_rate": 2.2424591108274367e-06, + "loss": 0.5201, + "step": 10015 + }, + { + "epoch": 0.6954589640327732, + "grad_norm": 5.848504035181688, + "learning_rate": 2.241521167569606e-06, + "loss": 0.7109, + "step": 10016 + }, + { + "epoch": 0.6955283988334954, + "grad_norm": 3.703538044922385, + "learning_rate": 2.2405833638325115e-06, + "loss": 0.4152, + "step": 10017 + }, + { + "epoch": 0.6955978336342175, + "grad_norm": 4.104278584820147, + "learning_rate": 2.2396456996635914e-06, + "loss": 0.3604, + "step": 10018 + }, + { + "epoch": 0.6956672684349396, + "grad_norm": 4.30717670849888, + "learning_rate": 2.2387081751102663e-06, + "loss": 0.5724, + "step": 10019 + }, + { + "epoch": 0.6957367032356617, + "grad_norm": 3.4510952098220775, + "learning_rate": 2.237770790219956e-06, + "loss": 0.424, + "step": 10020 + }, + { + "epoch": 0.6958061380363838, + "grad_norm": 3.997596727567857, + "learning_rate": 2.236833545040074e-06, + "loss": 0.5152, + "step": 10021 + }, + { + "epoch": 0.695875572837106, + "grad_norm": 3.6238806879575987, + "learning_rate": 2.235896439618028e-06, + "loss": 0.3756, + "step": 10022 + }, + { + "epoch": 0.6959450076378281, + "grad_norm": 3.5828294717131772, + "learning_rate": 2.234959474001207e-06, + "loss": 0.2976, + "step": 10023 + }, + { + "epoch": 0.6960144424385502, + "grad_norm": 3.927218849538247, + "learning_rate": 2.2340226482370097e-06, + "loss": 0.482, + "step": 10024 + }, + { + "epoch": 0.6960838772392723, + "grad_norm": 4.010839985444321, + "learning_rate": 2.233085962372819e-06, + "loss": 0.4852, + "step": 10025 + }, + { + "epoch": 0.6961533120399944, + "grad_norm": 4.045678644749858, + "learning_rate": 2.232149416456005e-06, + "loss": 0.4157, + "step": 10026 + }, + { + "epoch": 0.6962227468407166, + "grad_norm": 6.947279273016797, + "learning_rate": 2.2312130105339436e-06, + "loss": 0.5106, + "step": 10027 + }, + { + "epoch": 0.6962921816414387, + "grad_norm": 3.2125717214595615, + "learning_rate": 2.2302767446539954e-06, + "loss": 0.3465, + "step": 10028 + }, + { + "epoch": 0.6963616164421608, + "grad_norm": 3.9171374111579405, + "learning_rate": 2.2293406188635147e-06, + "loss": 0.4067, + "step": 10029 + }, + { + "epoch": 0.696431051242883, + "grad_norm": 3.089140665280616, + "learning_rate": 2.2284046332098506e-06, + "loss": 0.2562, + "step": 10030 + }, + { + "epoch": 0.696500486043605, + "grad_norm": 3.0919552022676218, + "learning_rate": 2.227468787740344e-06, + "loss": 0.3265, + "step": 10031 + }, + { + "epoch": 0.6965699208443272, + "grad_norm": 3.6911070740216667, + "learning_rate": 2.2265330825023284e-06, + "loss": 0.4252, + "step": 10032 + }, + { + "epoch": 0.6966393556450493, + "grad_norm": 3.050989252972308, + "learning_rate": 2.2255975175431316e-06, + "loss": 0.3978, + "step": 10033 + }, + { + "epoch": 0.6967087904457714, + "grad_norm": 3.7061832018046426, + "learning_rate": 2.2246620929100727e-06, + "loss": 0.4131, + "step": 10034 + }, + { + "epoch": 0.6967782252464936, + "grad_norm": 3.416150136335977, + "learning_rate": 2.2237268086504627e-06, + "loss": 0.2413, + "step": 10035 + }, + { + "epoch": 0.6968476600472157, + "grad_norm": 4.339064799852144, + "learning_rate": 2.2227916648116125e-06, + "loss": 0.435, + "step": 10036 + }, + { + "epoch": 0.6969170948479377, + "grad_norm": 5.139021932495289, + "learning_rate": 2.221856661440816e-06, + "loss": 0.4812, + "step": 10037 + }, + { + "epoch": 0.6969865296486599, + "grad_norm": 3.8684957708251866, + "learning_rate": 2.2209217985853636e-06, + "loss": 0.1889, + "step": 10038 + }, + { + "epoch": 0.697055964449382, + "grad_norm": 4.68522212726937, + "learning_rate": 2.2199870762925457e-06, + "loss": 0.6465, + "step": 10039 + }, + { + "epoch": 0.6971253992501042, + "grad_norm": 2.8143657562480313, + "learning_rate": 2.2190524946096324e-06, + "loss": 0.2094, + "step": 10040 + }, + { + "epoch": 0.6971948340508263, + "grad_norm": 3.7923248249336146, + "learning_rate": 2.2181180535838985e-06, + "loss": 0.4773, + "step": 10041 + }, + { + "epoch": 0.6972642688515484, + "grad_norm": 4.682983807369046, + "learning_rate": 2.2171837532626056e-06, + "loss": 0.5874, + "step": 10042 + }, + { + "epoch": 0.6973337036522705, + "grad_norm": 3.37847923647, + "learning_rate": 2.2162495936930096e-06, + "loss": 0.3527, + "step": 10043 + }, + { + "epoch": 0.6974031384529926, + "grad_norm": 4.067124633327287, + "learning_rate": 2.2153155749223593e-06, + "loss": 0.5589, + "step": 10044 + }, + { + "epoch": 0.6974725732537148, + "grad_norm": 7.267528293294326, + "learning_rate": 2.2143816969978955e-06, + "loss": 0.4738, + "step": 10045 + }, + { + "epoch": 0.6975420080544369, + "grad_norm": 3.196400952828588, + "learning_rate": 2.213447959966854e-06, + "loss": 0.3386, + "step": 10046 + }, + { + "epoch": 0.697611442855159, + "grad_norm": 4.706714981899952, + "learning_rate": 2.212514363876461e-06, + "loss": 0.5165, + "step": 10047 + }, + { + "epoch": 0.6976808776558812, + "grad_norm": 3.1959109057736574, + "learning_rate": 2.2115809087739377e-06, + "loss": 0.2976, + "step": 10048 + }, + { + "epoch": 0.6977503124566032, + "grad_norm": 3.3241351387592126, + "learning_rate": 2.2106475947064942e-06, + "loss": 0.4036, + "step": 10049 + }, + { + "epoch": 0.6978197472573253, + "grad_norm": 4.0335666970866475, + "learning_rate": 2.2097144217213433e-06, + "loss": 0.5312, + "step": 10050 + }, + { + "epoch": 0.6978891820580475, + "grad_norm": 4.272895188798744, + "learning_rate": 2.2087813898656775e-06, + "loss": 0.5471, + "step": 10051 + }, + { + "epoch": 0.6979586168587696, + "grad_norm": 3.20167676167326, + "learning_rate": 2.2078484991866887e-06, + "loss": 0.3573, + "step": 10052 + }, + { + "epoch": 0.6980280516594918, + "grad_norm": 5.179061016554086, + "learning_rate": 2.2069157497315653e-06, + "loss": 0.6509, + "step": 10053 + }, + { + "epoch": 0.6980974864602139, + "grad_norm": 3.481199931831641, + "learning_rate": 2.2059831415474843e-06, + "loss": 0.4277, + "step": 10054 + }, + { + "epoch": 0.6981669212609359, + "grad_norm": 4.2869130508936495, + "learning_rate": 2.2050506746816102e-06, + "loss": 0.4134, + "step": 10055 + }, + { + "epoch": 0.6982363560616581, + "grad_norm": 4.687752346560159, + "learning_rate": 2.204118349181113e-06, + "loss": 0.5337, + "step": 10056 + }, + { + "epoch": 0.6983057908623802, + "grad_norm": 3.710086330418404, + "learning_rate": 2.2031861650931474e-06, + "loss": 0.447, + "step": 10057 + }, + { + "epoch": 0.6983752256631024, + "grad_norm": 3.6122840292723577, + "learning_rate": 2.202254122464857e-06, + "loss": 0.449, + "step": 10058 + }, + { + "epoch": 0.6984446604638245, + "grad_norm": 3.5791070010525057, + "learning_rate": 2.201322221343389e-06, + "loss": 0.3884, + "step": 10059 + }, + { + "epoch": 0.6985140952645466, + "grad_norm": 3.363298056010146, + "learning_rate": 2.2003904617758764e-06, + "loss": 0.3556, + "step": 10060 + }, + { + "epoch": 0.6985835300652687, + "grad_norm": 2.8658020079568582, + "learning_rate": 2.199458843809446e-06, + "loss": 0.3605, + "step": 10061 + }, + { + "epoch": 0.6986529648659908, + "grad_norm": 3.733506339657755, + "learning_rate": 2.198527367491218e-06, + "loss": 0.3846, + "step": 10062 + }, + { + "epoch": 0.6987223996667129, + "grad_norm": 3.499459273869188, + "learning_rate": 2.1975960328683065e-06, + "loss": 0.5001, + "step": 10063 + }, + { + "epoch": 0.6987918344674351, + "grad_norm": 3.2245211192724277, + "learning_rate": 2.196664839987816e-06, + "loss": 0.2981, + "step": 10064 + }, + { + "epoch": 0.6988612692681572, + "grad_norm": 3.8877544224514193, + "learning_rate": 2.1957337888968454e-06, + "loss": 0.54, + "step": 10065 + }, + { + "epoch": 0.6989307040688794, + "grad_norm": 2.956624330306984, + "learning_rate": 2.1948028796424873e-06, + "loss": 0.2034, + "step": 10066 + }, + { + "epoch": 0.6990001388696014, + "grad_norm": 4.124842478150322, + "learning_rate": 2.1938721122718234e-06, + "loss": 0.5741, + "step": 10067 + }, + { + "epoch": 0.6990695736703235, + "grad_norm": 3.3447640594206094, + "learning_rate": 2.1929414868319356e-06, + "loss": 0.2739, + "step": 10068 + }, + { + "epoch": 0.6991390084710457, + "grad_norm": 3.6518967157681943, + "learning_rate": 2.1920110033698876e-06, + "loss": 0.4074, + "step": 10069 + }, + { + "epoch": 0.6992084432717678, + "grad_norm": 3.5314287576611534, + "learning_rate": 2.1910806619327473e-06, + "loss": 0.3233, + "step": 10070 + }, + { + "epoch": 0.69927787807249, + "grad_norm": 2.9737161884832304, + "learning_rate": 2.190150462567569e-06, + "loss": 0.3309, + "step": 10071 + }, + { + "epoch": 0.6993473128732121, + "grad_norm": 5.363532773615369, + "learning_rate": 2.1892204053214007e-06, + "loss": 0.5382, + "step": 10072 + }, + { + "epoch": 0.6994167476739341, + "grad_norm": 2.816608238515145, + "learning_rate": 2.188290490241284e-06, + "loss": 0.3418, + "step": 10073 + }, + { + "epoch": 0.6994861824746563, + "grad_norm": 3.2159791791376935, + "learning_rate": 2.187360717374252e-06, + "loss": 0.3258, + "step": 10074 + }, + { + "epoch": 0.6995556172753784, + "grad_norm": 4.838792065796856, + "learning_rate": 2.186431086767332e-06, + "loss": 0.449, + "step": 10075 + }, + { + "epoch": 0.6996250520761006, + "grad_norm": 3.596881700694623, + "learning_rate": 2.185501598467545e-06, + "loss": 0.2655, + "step": 10076 + }, + { + "epoch": 0.6996944868768227, + "grad_norm": 3.0302052317278707, + "learning_rate": 2.184572252521901e-06, + "loss": 0.2253, + "step": 10077 + }, + { + "epoch": 0.6997639216775448, + "grad_norm": 5.662021755560674, + "learning_rate": 2.1836430489774057e-06, + "loss": 0.7511, + "step": 10078 + }, + { + "epoch": 0.6998333564782669, + "grad_norm": 4.05518579895283, + "learning_rate": 2.182713987881061e-06, + "loss": 0.5213, + "step": 10079 + }, + { + "epoch": 0.699902791278989, + "grad_norm": 3.4533161515607746, + "learning_rate": 2.1817850692798537e-06, + "loss": 0.359, + "step": 10080 + }, + { + "epoch": 0.6999722260797111, + "grad_norm": 4.006006469657829, + "learning_rate": 2.1808562932207668e-06, + "loss": 0.4392, + "step": 10081 + }, + { + "epoch": 0.7000416608804333, + "grad_norm": 3.6394153217505987, + "learning_rate": 2.1799276597507814e-06, + "loss": 0.3765, + "step": 10082 + }, + { + "epoch": 0.7001110956811554, + "grad_norm": 5.083913145981617, + "learning_rate": 2.178999168916862e-06, + "loss": 0.4633, + "step": 10083 + }, + { + "epoch": 0.7001805304818776, + "grad_norm": 3.497667866638374, + "learning_rate": 2.178070820765971e-06, + "loss": 0.2848, + "step": 10084 + }, + { + "epoch": 0.7002499652825996, + "grad_norm": 3.763865628904409, + "learning_rate": 2.177142615345067e-06, + "loss": 0.4864, + "step": 10085 + }, + { + "epoch": 0.7003194000833217, + "grad_norm": 5.09560591032557, + "learning_rate": 2.1762145527010962e-06, + "loss": 0.2536, + "step": 10086 + }, + { + "epoch": 0.7003888348840439, + "grad_norm": 3.8848795232393862, + "learning_rate": 2.175286632880995e-06, + "loss": 0.2773, + "step": 10087 + }, + { + "epoch": 0.700458269684766, + "grad_norm": 3.5734470866731005, + "learning_rate": 2.1743588559317013e-06, + "loss": 0.375, + "step": 10088 + }, + { + "epoch": 0.7005277044854882, + "grad_norm": 3.031106340519871, + "learning_rate": 2.1734312219001392e-06, + "loss": 0.3704, + "step": 10089 + }, + { + "epoch": 0.7005971392862103, + "grad_norm": 3.1666146648399383, + "learning_rate": 2.172503730833227e-06, + "loss": 0.3584, + "step": 10090 + }, + { + "epoch": 0.7006665740869323, + "grad_norm": 5.310853558570267, + "learning_rate": 2.1715763827778767e-06, + "loss": 0.7335, + "step": 10091 + }, + { + "epoch": 0.7007360088876545, + "grad_norm": 3.3999933984337485, + "learning_rate": 2.1706491777809923e-06, + "loss": 0.3928, + "step": 10092 + }, + { + "epoch": 0.7008054436883766, + "grad_norm": 3.9601944015225734, + "learning_rate": 2.169722115889471e-06, + "loss": 0.4921, + "step": 10093 + }, + { + "epoch": 0.7008748784890987, + "grad_norm": 3.5809445318515634, + "learning_rate": 2.1687951971502024e-06, + "loss": 0.3948, + "step": 10094 + }, + { + "epoch": 0.7009443132898209, + "grad_norm": 3.6800108860372216, + "learning_rate": 2.1678684216100693e-06, + "loss": 0.3082, + "step": 10095 + }, + { + "epoch": 0.701013748090543, + "grad_norm": 1.8879790660417708, + "learning_rate": 2.1669417893159444e-06, + "loss": 0.1529, + "step": 10096 + }, + { + "epoch": 0.7010831828912651, + "grad_norm": 4.283057485069001, + "learning_rate": 2.166015300314702e-06, + "loss": 0.4475, + "step": 10097 + }, + { + "epoch": 0.7011526176919872, + "grad_norm": 3.0596570729907016, + "learning_rate": 2.165088954653195e-06, + "loss": 0.1993, + "step": 10098 + }, + { + "epoch": 0.7012220524927093, + "grad_norm": 3.1148095735171686, + "learning_rate": 2.1641627523782827e-06, + "loss": 0.2377, + "step": 10099 + }, + { + "epoch": 0.7012914872934315, + "grad_norm": 4.536049232565741, + "learning_rate": 2.163236693536811e-06, + "loss": 0.5971, + "step": 10100 + }, + { + "epoch": 0.7013609220941536, + "grad_norm": 4.033601922306188, + "learning_rate": 2.162310778175614e-06, + "loss": 0.5141, + "step": 10101 + }, + { + "epoch": 0.7014303568948758, + "grad_norm": 3.1660411232714454, + "learning_rate": 2.161385006341528e-06, + "loss": 0.3322, + "step": 10102 + }, + { + "epoch": 0.7014997916955978, + "grad_norm": 4.437380457607839, + "learning_rate": 2.160459378081377e-06, + "loss": 0.441, + "step": 10103 + }, + { + "epoch": 0.7015692264963199, + "grad_norm": 2.5691066837552667, + "learning_rate": 2.1595338934419774e-06, + "loss": 0.2863, + "step": 10104 + }, + { + "epoch": 0.7016386612970421, + "grad_norm": 4.297599010734527, + "learning_rate": 2.158608552470139e-06, + "loss": 0.3735, + "step": 10105 + }, + { + "epoch": 0.7017080960977642, + "grad_norm": 3.4297195893863437, + "learning_rate": 2.1576833552126645e-06, + "loss": 0.265, + "step": 10106 + }, + { + "epoch": 0.7017775308984863, + "grad_norm": 3.0915197359133093, + "learning_rate": 2.1567583017163502e-06, + "loss": 0.3039, + "step": 10107 + }, + { + "epoch": 0.7018469656992085, + "grad_norm": 3.8238107892238777, + "learning_rate": 2.155833392027983e-06, + "loss": 0.3724, + "step": 10108 + }, + { + "epoch": 0.7019164004999305, + "grad_norm": 4.537413695776585, + "learning_rate": 2.1549086261943446e-06, + "loss": 0.6079, + "step": 10109 + }, + { + "epoch": 0.7019858353006527, + "grad_norm": 3.8011039643377877, + "learning_rate": 2.153984004262207e-06, + "loss": 0.4688, + "step": 10110 + }, + { + "epoch": 0.7020552701013748, + "grad_norm": 3.0175582453140004, + "learning_rate": 2.1530595262783415e-06, + "loss": 0.2592, + "step": 10111 + }, + { + "epoch": 0.7021247049020969, + "grad_norm": 4.192938878072801, + "learning_rate": 2.1521351922895018e-06, + "loss": 0.5948, + "step": 10112 + }, + { + "epoch": 0.7021941397028191, + "grad_norm": 3.9076778633930167, + "learning_rate": 2.15121100234244e-06, + "loss": 0.2829, + "step": 10113 + }, + { + "epoch": 0.7022635745035412, + "grad_norm": 4.579189177232965, + "learning_rate": 2.1502869564839054e-06, + "loss": 0.5742, + "step": 10114 + }, + { + "epoch": 0.7023330093042633, + "grad_norm": 4.358471117786607, + "learning_rate": 2.14936305476063e-06, + "loss": 0.441, + "step": 10115 + }, + { + "epoch": 0.7024024441049854, + "grad_norm": 3.3581020972103284, + "learning_rate": 2.1484392972193436e-06, + "loss": 0.4672, + "step": 10116 + }, + { + "epoch": 0.7024718789057075, + "grad_norm": 4.295773929686663, + "learning_rate": 2.1475156839067725e-06, + "loss": 0.4964, + "step": 10117 + }, + { + "epoch": 0.7025413137064297, + "grad_norm": 2.8786730977589405, + "learning_rate": 2.146592214869632e-06, + "loss": 0.2112, + "step": 10118 + }, + { + "epoch": 0.7026107485071518, + "grad_norm": 4.401165107551852, + "learning_rate": 2.145668890154626e-06, + "loss": 0.4498, + "step": 10119 + }, + { + "epoch": 0.7026801833078739, + "grad_norm": 3.582131523080712, + "learning_rate": 2.1447457098084583e-06, + "loss": 0.3764, + "step": 10120 + }, + { + "epoch": 0.702749618108596, + "grad_norm": 3.564158365752833, + "learning_rate": 2.143822673877822e-06, + "loss": 0.2471, + "step": 10121 + }, + { + "epoch": 0.7028190529093181, + "grad_norm": 2.8385295597486095, + "learning_rate": 2.142899782409403e-06, + "loss": 0.3147, + "step": 10122 + }, + { + "epoch": 0.7028884877100403, + "grad_norm": 3.683953361570253, + "learning_rate": 2.1419770354498798e-06, + "loss": 0.4972, + "step": 10123 + }, + { + "epoch": 0.7029579225107624, + "grad_norm": 3.614069427398909, + "learning_rate": 2.141054433045924e-06, + "loss": 0.4029, + "step": 10124 + }, + { + "epoch": 0.7030273573114845, + "grad_norm": 3.542946864349542, + "learning_rate": 2.1401319752441997e-06, + "loss": 0.3783, + "step": 10125 + }, + { + "epoch": 0.7030967921122067, + "grad_norm": 8.033213625077124, + "learning_rate": 2.1392096620913643e-06, + "loss": 0.518, + "step": 10126 + }, + { + "epoch": 0.7031662269129287, + "grad_norm": 3.551196811249551, + "learning_rate": 2.138287493634067e-06, + "loss": 0.3438, + "step": 10127 + }, + { + "epoch": 0.7032356617136509, + "grad_norm": 3.4525765117952294, + "learning_rate": 2.137365469918948e-06, + "loss": 0.3609, + "step": 10128 + }, + { + "epoch": 0.703305096514373, + "grad_norm": 4.4059092285601, + "learning_rate": 2.136443590992648e-06, + "loss": 0.4914, + "step": 10129 + }, + { + "epoch": 0.7033745313150951, + "grad_norm": 3.923718719552914, + "learning_rate": 2.135521856901787e-06, + "loss": 0.4075, + "step": 10130 + }, + { + "epoch": 0.7034439661158173, + "grad_norm": 3.3042187581259883, + "learning_rate": 2.134600267692991e-06, + "loss": 0.4363, + "step": 10131 + }, + { + "epoch": 0.7035134009165394, + "grad_norm": 4.969205513647791, + "learning_rate": 2.133678823412873e-06, + "loss": 0.383, + "step": 10132 + }, + { + "epoch": 0.7035828357172615, + "grad_norm": 4.977250202750475, + "learning_rate": 2.1327575241080333e-06, + "loss": 0.8724, + "step": 10133 + }, + { + "epoch": 0.7036522705179836, + "grad_norm": 3.048437017004731, + "learning_rate": 2.131836369825075e-06, + "loss": 0.2658, + "step": 10134 + }, + { + "epoch": 0.7037217053187057, + "grad_norm": 3.1852701075664, + "learning_rate": 2.1309153606105875e-06, + "loss": 0.339, + "step": 10135 + }, + { + "epoch": 0.7037911401194279, + "grad_norm": 3.7120933755632817, + "learning_rate": 2.1299944965111546e-06, + "loss": 0.3654, + "step": 10136 + }, + { + "epoch": 0.70386057492015, + "grad_norm": 4.1932691282580965, + "learning_rate": 2.129073777573352e-06, + "loss": 0.4264, + "step": 10137 + }, + { + "epoch": 0.703930009720872, + "grad_norm": 4.028992983185161, + "learning_rate": 2.12815320384375e-06, + "loss": 0.5207, + "step": 10138 + }, + { + "epoch": 0.7039994445215942, + "grad_norm": 3.9215330292588253, + "learning_rate": 2.127232775368908e-06, + "loss": 0.4332, + "step": 10139 + }, + { + "epoch": 0.7040688793223163, + "grad_norm": 2.667881894955853, + "learning_rate": 2.1263124921953824e-06, + "loss": 0.2091, + "step": 10140 + }, + { + "epoch": 0.7041383141230385, + "grad_norm": 4.288862069188275, + "learning_rate": 2.1253923543697193e-06, + "loss": 0.745, + "step": 10141 + }, + { + "epoch": 0.7042077489237606, + "grad_norm": 4.056514171117148, + "learning_rate": 2.1244723619384555e-06, + "loss": 0.3935, + "step": 10142 + }, + { + "epoch": 0.7042771837244827, + "grad_norm": 4.14125095936072, + "learning_rate": 2.1235525149481295e-06, + "loss": 0.4191, + "step": 10143 + }, + { + "epoch": 0.7043466185252049, + "grad_norm": 5.300532006437179, + "learning_rate": 2.1226328134452612e-06, + "loss": 0.7302, + "step": 10144 + }, + { + "epoch": 0.7044160533259269, + "grad_norm": 3.8349528683035974, + "learning_rate": 2.121713257476367e-06, + "loss": 0.4243, + "step": 10145 + }, + { + "epoch": 0.7044854881266491, + "grad_norm": 3.451271500956862, + "learning_rate": 2.120793847087963e-06, + "loss": 0.2921, + "step": 10146 + }, + { + "epoch": 0.7045549229273712, + "grad_norm": 4.122798255141095, + "learning_rate": 2.119874582326546e-06, + "loss": 0.3171, + "step": 10147 + }, + { + "epoch": 0.7046243577280933, + "grad_norm": 4.318609853013091, + "learning_rate": 2.1189554632386116e-06, + "loss": 0.4093, + "step": 10148 + }, + { + "epoch": 0.7046937925288155, + "grad_norm": 4.536505869543018, + "learning_rate": 2.1180364898706525e-06, + "loss": 0.5372, + "step": 10149 + }, + { + "epoch": 0.7047632273295376, + "grad_norm": 3.1117815259459114, + "learning_rate": 2.117117662269146e-06, + "loss": 0.3515, + "step": 10150 + }, + { + "epoch": 0.7048326621302596, + "grad_norm": 3.7225651864418423, + "learning_rate": 2.1161989804805657e-06, + "loss": 0.46, + "step": 10151 + }, + { + "epoch": 0.7049020969309818, + "grad_norm": 4.761140005836736, + "learning_rate": 2.115280444551378e-06, + "loss": 0.6187, + "step": 10152 + }, + { + "epoch": 0.7049715317317039, + "grad_norm": 3.822788202363711, + "learning_rate": 2.1143620545280414e-06, + "loss": 0.2576, + "step": 10153 + }, + { + "epoch": 0.7050409665324261, + "grad_norm": 3.5175776023152103, + "learning_rate": 2.113443810457006e-06, + "loss": 0.3512, + "step": 10154 + }, + { + "epoch": 0.7051104013331482, + "grad_norm": 3.581592892711257, + "learning_rate": 2.112525712384717e-06, + "loss": 0.3565, + "step": 10155 + }, + { + "epoch": 0.7051798361338703, + "grad_norm": 4.831493848511022, + "learning_rate": 2.1116077603576097e-06, + "loss": 0.3864, + "step": 10156 + }, + { + "epoch": 0.7052492709345924, + "grad_norm": 3.6280485595893075, + "learning_rate": 2.110689954422114e-06, + "loss": 0.3548, + "step": 10157 + }, + { + "epoch": 0.7053187057353145, + "grad_norm": 3.5343620364872383, + "learning_rate": 2.10977229462465e-06, + "loss": 0.4109, + "step": 10158 + }, + { + "epoch": 0.7053881405360367, + "grad_norm": 4.035579577403022, + "learning_rate": 2.1088547810116315e-06, + "loss": 0.5433, + "step": 10159 + }, + { + "epoch": 0.7054575753367588, + "grad_norm": 4.617696581174488, + "learning_rate": 2.107937413629469e-06, + "loss": 0.693, + "step": 10160 + }, + { + "epoch": 0.7055270101374809, + "grad_norm": 4.129150431219783, + "learning_rate": 2.10702019252456e-06, + "loss": 0.5244, + "step": 10161 + }, + { + "epoch": 0.7055964449382031, + "grad_norm": 4.369682756553619, + "learning_rate": 2.106103117743294e-06, + "loss": 0.5638, + "step": 10162 + }, + { + "epoch": 0.7056658797389251, + "grad_norm": 2.0971993730631397, + "learning_rate": 2.1051861893320585e-06, + "loss": 0.0466, + "step": 10163 + }, + { + "epoch": 0.7057353145396472, + "grad_norm": 4.1585937092775165, + "learning_rate": 2.1042694073372318e-06, + "loss": 0.4475, + "step": 10164 + }, + { + "epoch": 0.7058047493403694, + "grad_norm": 4.1947423425254895, + "learning_rate": 2.1033527718051776e-06, + "loss": 0.6706, + "step": 10165 + }, + { + "epoch": 0.7058741841410915, + "grad_norm": 4.105399399382044, + "learning_rate": 2.1024362827822655e-06, + "loss": 0.4999, + "step": 10166 + }, + { + "epoch": 0.7059436189418137, + "grad_norm": 3.5046972621438783, + "learning_rate": 2.101519940314846e-06, + "loss": 0.288, + "step": 10167 + }, + { + "epoch": 0.7060130537425358, + "grad_norm": 3.8731571912563667, + "learning_rate": 2.1006037444492687e-06, + "loss": 0.3247, + "step": 10168 + }, + { + "epoch": 0.7060824885432578, + "grad_norm": 3.4874687198632968, + "learning_rate": 2.0996876952318735e-06, + "loss": 0.3886, + "step": 10169 + }, + { + "epoch": 0.70615192334398, + "grad_norm": 2.883904383180147, + "learning_rate": 2.0987717927089923e-06, + "loss": 0.1806, + "step": 10170 + }, + { + "epoch": 0.7062213581447021, + "grad_norm": 3.0748646131919237, + "learning_rate": 2.0978560369269514e-06, + "loss": 0.2195, + "step": 10171 + }, + { + "epoch": 0.7062907929454243, + "grad_norm": 4.563377073606305, + "learning_rate": 2.096940427932067e-06, + "loss": 0.4869, + "step": 10172 + }, + { + "epoch": 0.7063602277461464, + "grad_norm": 4.367218883282367, + "learning_rate": 2.0960249657706515e-06, + "loss": 0.5821, + "step": 10173 + }, + { + "epoch": 0.7064296625468685, + "grad_norm": 3.6092434657124803, + "learning_rate": 2.095109650489006e-06, + "loss": 0.3408, + "step": 10174 + }, + { + "epoch": 0.7064990973475906, + "grad_norm": 3.8476093198889325, + "learning_rate": 2.0941944821334296e-06, + "loss": 0.4668, + "step": 10175 + }, + { + "epoch": 0.7065685321483127, + "grad_norm": 3.9182401058194114, + "learning_rate": 2.0932794607502073e-06, + "loss": 0.4615, + "step": 10176 + }, + { + "epoch": 0.7066379669490348, + "grad_norm": 4.088550420666286, + "learning_rate": 2.0923645863856183e-06, + "loss": 0.3528, + "step": 10177 + }, + { + "epoch": 0.706707401749757, + "grad_norm": 3.5557788914237904, + "learning_rate": 2.0914498590859405e-06, + "loss": 0.3379, + "step": 10178 + }, + { + "epoch": 0.7067768365504791, + "grad_norm": 4.135823979388475, + "learning_rate": 2.090535278897437e-06, + "loss": 0.5393, + "step": 10179 + }, + { + "epoch": 0.7068462713512013, + "grad_norm": 3.1370120564698594, + "learning_rate": 2.0896208458663673e-06, + "loss": 0.292, + "step": 10180 + }, + { + "epoch": 0.7069157061519233, + "grad_norm": 2.9838560398437695, + "learning_rate": 2.0887065600389822e-06, + "loss": 0.1911, + "step": 10181 + }, + { + "epoch": 0.7069851409526454, + "grad_norm": 3.6585182101325877, + "learning_rate": 2.087792421461524e-06, + "loss": 0.4162, + "step": 10182 + }, + { + "epoch": 0.7070545757533676, + "grad_norm": 4.229777348484727, + "learning_rate": 2.086878430180231e-06, + "loss": 0.465, + "step": 10183 + }, + { + "epoch": 0.7071240105540897, + "grad_norm": 2.8780421479827756, + "learning_rate": 2.0859645862413295e-06, + "loss": 0.2349, + "step": 10184 + }, + { + "epoch": 0.7071934453548119, + "grad_norm": 4.204842581902981, + "learning_rate": 2.085050889691042e-06, + "loss": 0.57, + "step": 10185 + }, + { + "epoch": 0.707262880155534, + "grad_norm": 3.7791702911539065, + "learning_rate": 2.084137340575582e-06, + "loss": 0.3876, + "step": 10186 + }, + { + "epoch": 0.707332314956256, + "grad_norm": 3.9936188099122996, + "learning_rate": 2.083223938941156e-06, + "loss": 0.5581, + "step": 10187 + }, + { + "epoch": 0.7074017497569782, + "grad_norm": 5.230191131615469, + "learning_rate": 2.082310684833961e-06, + "loss": 0.6194, + "step": 10188 + }, + { + "epoch": 0.7074711845577003, + "grad_norm": 3.497735854878422, + "learning_rate": 2.081397578300193e-06, + "loss": 0.3726, + "step": 10189 + }, + { + "epoch": 0.7075406193584225, + "grad_norm": 4.258537619725338, + "learning_rate": 2.080484619386032e-06, + "loss": 0.5504, + "step": 10190 + }, + { + "epoch": 0.7076100541591446, + "grad_norm": 3.8274090734964106, + "learning_rate": 2.0795718081376528e-06, + "loss": 0.3236, + "step": 10191 + }, + { + "epoch": 0.7076794889598667, + "grad_norm": 4.107262496634714, + "learning_rate": 2.0786591446012288e-06, + "loss": 0.4319, + "step": 10192 + }, + { + "epoch": 0.7077489237605888, + "grad_norm": 3.757180025022487, + "learning_rate": 2.077746628822921e-06, + "loss": 0.5161, + "step": 10193 + }, + { + "epoch": 0.7078183585613109, + "grad_norm": 3.579391599389343, + "learning_rate": 2.076834260848879e-06, + "loss": 0.3334, + "step": 10194 + }, + { + "epoch": 0.707887793362033, + "grad_norm": 3.5046715833804116, + "learning_rate": 2.0759220407252535e-06, + "loss": 0.3576, + "step": 10195 + }, + { + "epoch": 0.7079572281627552, + "grad_norm": 4.061333519979376, + "learning_rate": 2.0750099684981843e-06, + "loss": 0.4029, + "step": 10196 + }, + { + "epoch": 0.7080266629634773, + "grad_norm": 3.5300832731125382, + "learning_rate": 2.0740980442137978e-06, + "loss": 0.3067, + "step": 10197 + }, + { + "epoch": 0.7080960977641995, + "grad_norm": 3.7904861626658466, + "learning_rate": 2.073186267918223e-06, + "loss": 0.4404, + "step": 10198 + }, + { + "epoch": 0.7081655325649215, + "grad_norm": 4.418968872586018, + "learning_rate": 2.072274639657574e-06, + "loss": 0.5312, + "step": 10199 + }, + { + "epoch": 0.7082349673656436, + "grad_norm": 4.347363669681173, + "learning_rate": 2.071363159477962e-06, + "loss": 0.6069, + "step": 10200 + }, + { + "epoch": 0.7083044021663658, + "grad_norm": 3.165630552170293, + "learning_rate": 2.070451827425487e-06, + "loss": 0.2555, + "step": 10201 + }, + { + "epoch": 0.7083738369670879, + "grad_norm": 2.301867843485286, + "learning_rate": 2.0695406435462435e-06, + "loss": 0.1002, + "step": 10202 + }, + { + "epoch": 0.7084432717678101, + "grad_norm": 3.4974477091403586, + "learning_rate": 2.0686296078863166e-06, + "loss": 0.344, + "step": 10203 + }, + { + "epoch": 0.7085127065685322, + "grad_norm": 3.366741202929514, + "learning_rate": 2.06771872049179e-06, + "loss": 0.3095, + "step": 10204 + }, + { + "epoch": 0.7085821413692542, + "grad_norm": 2.750444408087107, + "learning_rate": 2.066807981408732e-06, + "loss": 0.3316, + "step": 10205 + }, + { + "epoch": 0.7086515761699764, + "grad_norm": 5.343489581647093, + "learning_rate": 2.065897390683204e-06, + "loss": 0.4287, + "step": 10206 + }, + { + "epoch": 0.7087210109706985, + "grad_norm": 3.0973831663186746, + "learning_rate": 2.06498694836127e-06, + "loss": 0.3348, + "step": 10207 + }, + { + "epoch": 0.7087904457714206, + "grad_norm": 4.2003750317444615, + "learning_rate": 2.0640766544889727e-06, + "loss": 0.4096, + "step": 10208 + }, + { + "epoch": 0.7088598805721428, + "grad_norm": 4.422291907239041, + "learning_rate": 2.0631665091123546e-06, + "loss": 0.5262, + "step": 10209 + }, + { + "epoch": 0.7089293153728649, + "grad_norm": 3.9135457466310735, + "learning_rate": 2.0622565122774536e-06, + "loss": 0.4723, + "step": 10210 + }, + { + "epoch": 0.708998750173587, + "grad_norm": 2.94048536337275, + "learning_rate": 2.0613466640302932e-06, + "loss": 0.2456, + "step": 10211 + }, + { + "epoch": 0.7090681849743091, + "grad_norm": 3.966559899025331, + "learning_rate": 2.060436964416893e-06, + "loss": 0.4796, + "step": 10212 + }, + { + "epoch": 0.7091376197750312, + "grad_norm": 6.766207460268734, + "learning_rate": 2.0595274134832655e-06, + "loss": 0.4539, + "step": 10213 + }, + { + "epoch": 0.7092070545757534, + "grad_norm": 3.176146008308433, + "learning_rate": 2.0586180112754135e-06, + "loss": 0.3057, + "step": 10214 + }, + { + "epoch": 0.7092764893764755, + "grad_norm": 3.9709084696850527, + "learning_rate": 2.057708757839334e-06, + "loss": 0.3702, + "step": 10215 + }, + { + "epoch": 0.7093459241771977, + "grad_norm": 2.9293878659088657, + "learning_rate": 2.0567996532210166e-06, + "loss": 0.1611, + "step": 10216 + }, + { + "epoch": 0.7094153589779197, + "grad_norm": 4.053415852493665, + "learning_rate": 2.055890697466442e-06, + "loss": 0.3693, + "step": 10217 + }, + { + "epoch": 0.7094847937786418, + "grad_norm": 4.362724943874189, + "learning_rate": 2.0549818906215845e-06, + "loss": 0.502, + "step": 10218 + }, + { + "epoch": 0.709554228579364, + "grad_norm": 3.3652144744456907, + "learning_rate": 2.05407323273241e-06, + "loss": 0.3656, + "step": 10219 + }, + { + "epoch": 0.7096236633800861, + "grad_norm": 3.6154586595779827, + "learning_rate": 2.053164723844877e-06, + "loss": 0.3367, + "step": 10220 + }, + { + "epoch": 0.7096930981808082, + "grad_norm": 4.465145511071633, + "learning_rate": 2.052256364004941e-06, + "loss": 0.5165, + "step": 10221 + }, + { + "epoch": 0.7097625329815304, + "grad_norm": 3.7311401559263917, + "learning_rate": 2.05134815325854e-06, + "loss": 0.383, + "step": 10222 + }, + { + "epoch": 0.7098319677822524, + "grad_norm": 2.908552013973653, + "learning_rate": 2.0504400916516116e-06, + "loss": 0.1719, + "step": 10223 + }, + { + "epoch": 0.7099014025829746, + "grad_norm": 3.781793566243704, + "learning_rate": 2.0495321792300876e-06, + "loss": 0.499, + "step": 10224 + }, + { + "epoch": 0.7099708373836967, + "grad_norm": 4.477355037442522, + "learning_rate": 2.0486244160398887e-06, + "loss": 0.4837, + "step": 10225 + }, + { + "epoch": 0.7100402721844188, + "grad_norm": 3.968750202975932, + "learning_rate": 2.0477168021269237e-06, + "loss": 0.576, + "step": 10226 + }, + { + "epoch": 0.710109706985141, + "grad_norm": 3.237730071763434, + "learning_rate": 2.046809337537104e-06, + "loss": 0.2904, + "step": 10227 + }, + { + "epoch": 0.710179141785863, + "grad_norm": 3.5355339282178093, + "learning_rate": 2.0459020223163263e-06, + "loss": 0.4037, + "step": 10228 + }, + { + "epoch": 0.7102485765865852, + "grad_norm": 2.88035120259962, + "learning_rate": 2.0449948565104816e-06, + "loss": 0.2002, + "step": 10229 + }, + { + "epoch": 0.7103180113873073, + "grad_norm": 2.915186761549717, + "learning_rate": 2.044087840165453e-06, + "loss": 0.2778, + "step": 10230 + }, + { + "epoch": 0.7103874461880294, + "grad_norm": 3.522298557445293, + "learning_rate": 2.043180973327116e-06, + "loss": 0.4344, + "step": 10231 + }, + { + "epoch": 0.7104568809887516, + "grad_norm": 3.8339276925061228, + "learning_rate": 2.0422742560413404e-06, + "loss": 0.4759, + "step": 10232 + }, + { + "epoch": 0.7105263157894737, + "grad_norm": 3.7996320393161365, + "learning_rate": 2.041367688353986e-06, + "loss": 0.2358, + "step": 10233 + }, + { + "epoch": 0.7105957505901958, + "grad_norm": 3.8626462939037314, + "learning_rate": 2.0404612703109057e-06, + "loss": 0.3665, + "step": 10234 + }, + { + "epoch": 0.7106651853909179, + "grad_norm": 4.274676271973296, + "learning_rate": 2.0395550019579435e-06, + "loss": 0.4061, + "step": 10235 + }, + { + "epoch": 0.71073462019164, + "grad_norm": 5.221887225666951, + "learning_rate": 2.0386488833409433e-06, + "loss": 0.4785, + "step": 10236 + }, + { + "epoch": 0.7108040549923622, + "grad_norm": 3.5509909492170117, + "learning_rate": 2.03774291450573e-06, + "loss": 0.4291, + "step": 10237 + }, + { + "epoch": 0.7108734897930843, + "grad_norm": 4.032676964383473, + "learning_rate": 2.0368370954981254e-06, + "loss": 0.4853, + "step": 10238 + }, + { + "epoch": 0.7109429245938064, + "grad_norm": 2.7217984738598564, + "learning_rate": 2.035931426363952e-06, + "loss": 0.1494, + "step": 10239 + }, + { + "epoch": 0.7110123593945286, + "grad_norm": 5.049383456701229, + "learning_rate": 2.0350259071490084e-06, + "loss": 0.5735, + "step": 10240 + }, + { + "epoch": 0.7110817941952506, + "grad_norm": 3.5940910615726005, + "learning_rate": 2.0341205378991018e-06, + "loss": 0.2347, + "step": 10241 + }, + { + "epoch": 0.7111512289959728, + "grad_norm": 3.003081098279493, + "learning_rate": 2.033215318660022e-06, + "loss": 0.1955, + "step": 10242 + }, + { + "epoch": 0.7112206637966949, + "grad_norm": 3.1885132088039008, + "learning_rate": 2.0323102494775544e-06, + "loss": 0.3374, + "step": 10243 + }, + { + "epoch": 0.711290098597417, + "grad_norm": 4.1267214201917435, + "learning_rate": 2.0314053303974764e-06, + "loss": 0.3987, + "step": 10244 + }, + { + "epoch": 0.7113595333981392, + "grad_norm": 3.9929828175615714, + "learning_rate": 2.030500561465557e-06, + "loss": 0.4196, + "step": 10245 + }, + { + "epoch": 0.7114289681988613, + "grad_norm": 3.500043143998452, + "learning_rate": 2.0295959427275597e-06, + "loss": 0.2078, + "step": 10246 + }, + { + "epoch": 0.7114984029995834, + "grad_norm": 3.0619426436234227, + "learning_rate": 2.028691474229238e-06, + "loss": 0.2922, + "step": 10247 + }, + { + "epoch": 0.7115678378003055, + "grad_norm": 4.459717625918739, + "learning_rate": 2.027787156016339e-06, + "loss": 0.6904, + "step": 10248 + }, + { + "epoch": 0.7116372726010276, + "grad_norm": 6.091099985782494, + "learning_rate": 2.026882988134601e-06, + "loss": 0.7108, + "step": 10249 + }, + { + "epoch": 0.7117067074017498, + "grad_norm": 3.8221776818251136, + "learning_rate": 2.0259789706297604e-06, + "loss": 0.4027, + "step": 10250 + }, + { + "epoch": 0.7117761422024719, + "grad_norm": 4.1754689254674275, + "learning_rate": 2.025075103547537e-06, + "loss": 0.4369, + "step": 10251 + }, + { + "epoch": 0.711845577003194, + "grad_norm": 4.243266467977239, + "learning_rate": 2.024171386933646e-06, + "loss": 0.6651, + "step": 10252 + }, + { + "epoch": 0.7119150118039161, + "grad_norm": 3.808983578454301, + "learning_rate": 2.023267820833801e-06, + "loss": 0.5844, + "step": 10253 + }, + { + "epoch": 0.7119844466046382, + "grad_norm": 3.3413043601578205, + "learning_rate": 2.022364405293703e-06, + "loss": 0.3171, + "step": 10254 + }, + { + "epoch": 0.7120538814053604, + "grad_norm": 4.35088920397091, + "learning_rate": 2.02146114035904e-06, + "loss": 0.5152, + "step": 10255 + }, + { + "epoch": 0.7121233162060825, + "grad_norm": 4.460693919941862, + "learning_rate": 2.0205580260755043e-06, + "loss": 0.4889, + "step": 10256 + }, + { + "epoch": 0.7121927510068046, + "grad_norm": 4.279693120785385, + "learning_rate": 2.019655062488774e-06, + "loss": 0.072, + "step": 10257 + }, + { + "epoch": 0.7122621858075268, + "grad_norm": 3.5454998742295807, + "learning_rate": 2.0187522496445145e-06, + "loss": 0.2851, + "step": 10258 + }, + { + "epoch": 0.7123316206082488, + "grad_norm": 3.469199511681457, + "learning_rate": 2.017849587588394e-06, + "loss": 0.3623, + "step": 10259 + }, + { + "epoch": 0.712401055408971, + "grad_norm": 3.1131116609492233, + "learning_rate": 2.0169470763660675e-06, + "loss": 0.4296, + "step": 10260 + }, + { + "epoch": 0.7124704902096931, + "grad_norm": 3.8794660348925687, + "learning_rate": 2.0160447160231823e-06, + "loss": 0.3428, + "step": 10261 + }, + { + "epoch": 0.7125399250104152, + "grad_norm": 4.4240191962832345, + "learning_rate": 2.015142506605378e-06, + "loss": 0.3967, + "step": 10262 + }, + { + "epoch": 0.7126093598111374, + "grad_norm": 4.742019580380956, + "learning_rate": 2.0142404481582888e-06, + "loss": 0.5509, + "step": 10263 + }, + { + "epoch": 0.7126787946118595, + "grad_norm": 3.5427807075817763, + "learning_rate": 2.013338540727539e-06, + "loss": 0.3121, + "step": 10264 + }, + { + "epoch": 0.7127482294125815, + "grad_norm": 4.857330896660007, + "learning_rate": 2.012436784358746e-06, + "loss": 0.5318, + "step": 10265 + }, + { + "epoch": 0.7128176642133037, + "grad_norm": 4.410608768044099, + "learning_rate": 2.01153517909752e-06, + "loss": 0.5933, + "step": 10266 + }, + { + "epoch": 0.7128870990140258, + "grad_norm": 4.734544478533617, + "learning_rate": 2.0106337249894613e-06, + "loss": 0.5929, + "step": 10267 + }, + { + "epoch": 0.712956533814748, + "grad_norm": 4.245343027103758, + "learning_rate": 2.0097324220801696e-06, + "loss": 0.5292, + "step": 10268 + }, + { + "epoch": 0.7130259686154701, + "grad_norm": 4.633856383149074, + "learning_rate": 2.0088312704152247e-06, + "loss": 0.6787, + "step": 10269 + }, + { + "epoch": 0.7130954034161922, + "grad_norm": 6.2002036660909, + "learning_rate": 2.007930270040211e-06, + "loss": 0.5811, + "step": 10270 + }, + { + "epoch": 0.7131648382169143, + "grad_norm": 3.1013742741004777, + "learning_rate": 2.0070294210007004e-06, + "loss": 0.3194, + "step": 10271 + }, + { + "epoch": 0.7132342730176364, + "grad_norm": 5.202356837403619, + "learning_rate": 2.006128723342251e-06, + "loss": 0.423, + "step": 10272 + }, + { + "epoch": 0.7133037078183586, + "grad_norm": 3.9610527922688403, + "learning_rate": 2.0052281771104255e-06, + "loss": 0.4671, + "step": 10273 + }, + { + "epoch": 0.7133731426190807, + "grad_norm": 3.6238533350804554, + "learning_rate": 2.0043277823507697e-06, + "loss": 0.3581, + "step": 10274 + }, + { + "epoch": 0.7134425774198028, + "grad_norm": 3.9927396517254974, + "learning_rate": 2.0034275391088245e-06, + "loss": 0.4482, + "step": 10275 + }, + { + "epoch": 0.713512012220525, + "grad_norm": 3.9096826367983772, + "learning_rate": 2.0025274474301238e-06, + "loss": 0.4933, + "step": 10276 + }, + { + "epoch": 0.713581447021247, + "grad_norm": 4.267205255289849, + "learning_rate": 2.0016275073601927e-06, + "loss": 0.4924, + "step": 10277 + }, + { + "epoch": 0.7136508818219691, + "grad_norm": 5.846700274558715, + "learning_rate": 2.0007277189445478e-06, + "loss": 0.7268, + "step": 10278 + }, + { + "epoch": 0.7137203166226913, + "grad_norm": 3.197860546306613, + "learning_rate": 1.9998280822287043e-06, + "loss": 0.2401, + "step": 10279 + }, + { + "epoch": 0.7137897514234134, + "grad_norm": 4.130063578033967, + "learning_rate": 1.9989285972581595e-06, + "loss": 0.2663, + "step": 10280 + }, + { + "epoch": 0.7138591862241356, + "grad_norm": 4.175152336158564, + "learning_rate": 1.9980292640784095e-06, + "loss": 0.3002, + "step": 10281 + }, + { + "epoch": 0.7139286210248577, + "grad_norm": 3.388710457670331, + "learning_rate": 1.997130082734945e-06, + "loss": 0.2875, + "step": 10282 + }, + { + "epoch": 0.7139980558255797, + "grad_norm": 3.5778496133832505, + "learning_rate": 1.9962310532732413e-06, + "loss": 0.5172, + "step": 10283 + }, + { + "epoch": 0.7140674906263019, + "grad_norm": 4.323732719302313, + "learning_rate": 1.9953321757387704e-06, + "loss": 0.3105, + "step": 10284 + }, + { + "epoch": 0.714136925427024, + "grad_norm": 4.052305678389807, + "learning_rate": 1.994433450177e-06, + "loss": 0.3866, + "step": 10285 + }, + { + "epoch": 0.7142063602277462, + "grad_norm": 3.749262124733676, + "learning_rate": 1.993534876633386e-06, + "loss": 0.3014, + "step": 10286 + }, + { + "epoch": 0.7142757950284683, + "grad_norm": 2.7221677046336588, + "learning_rate": 1.9926364551533727e-06, + "loss": 0.3221, + "step": 10287 + }, + { + "epoch": 0.7143452298291904, + "grad_norm": 4.280799263476717, + "learning_rate": 1.991738185782406e-06, + "loss": 0.5113, + "step": 10288 + }, + { + "epoch": 0.7144146646299125, + "grad_norm": 4.022547668120578, + "learning_rate": 1.99084006856592e-06, + "loss": 0.4923, + "step": 10289 + }, + { + "epoch": 0.7144840994306346, + "grad_norm": 3.617828295975291, + "learning_rate": 1.9899421035493336e-06, + "loss": 0.453, + "step": 10290 + }, + { + "epoch": 0.7145535342313567, + "grad_norm": 4.2778688453422, + "learning_rate": 1.989044290778072e-06, + "loss": 0.5161, + "step": 10291 + }, + { + "epoch": 0.7146229690320789, + "grad_norm": 3.9448701862028983, + "learning_rate": 1.9881466302975432e-06, + "loss": 0.5158, + "step": 10292 + }, + { + "epoch": 0.714692403832801, + "grad_norm": 3.664423732695156, + "learning_rate": 1.98724912215315e-06, + "loss": 0.4505, + "step": 10293 + }, + { + "epoch": 0.7147618386335232, + "grad_norm": 3.2678375703983082, + "learning_rate": 1.986351766390286e-06, + "loss": 0.2866, + "step": 10294 + }, + { + "epoch": 0.7148312734342452, + "grad_norm": 4.158709177797214, + "learning_rate": 1.9854545630543405e-06, + "loss": 0.4678, + "step": 10295 + }, + { + "epoch": 0.7149007082349673, + "grad_norm": 4.429980432296422, + "learning_rate": 1.9845575121906913e-06, + "loss": 0.5944, + "step": 10296 + }, + { + "epoch": 0.7149701430356895, + "grad_norm": 4.349010131972708, + "learning_rate": 1.983660613844711e-06, + "loss": 0.7745, + "step": 10297 + }, + { + "epoch": 0.7150395778364116, + "grad_norm": 3.756604965833278, + "learning_rate": 1.982763868061764e-06, + "loss": 0.4949, + "step": 10298 + }, + { + "epoch": 0.7151090126371338, + "grad_norm": 2.8291109422806344, + "learning_rate": 1.981867274887204e-06, + "loss": 0.2355, + "step": 10299 + }, + { + "epoch": 0.7151784474378559, + "grad_norm": 4.247133914553282, + "learning_rate": 1.9809708343663863e-06, + "loss": 0.4082, + "step": 10300 + }, + { + "epoch": 0.7152478822385779, + "grad_norm": 3.9475217159480938, + "learning_rate": 1.9800745465446436e-06, + "loss": 0.5585, + "step": 10301 + }, + { + "epoch": 0.7153173170393001, + "grad_norm": 4.4970577170303505, + "learning_rate": 1.9791784114673146e-06, + "loss": 0.4855, + "step": 10302 + }, + { + "epoch": 0.7153867518400222, + "grad_norm": 3.2251235796382947, + "learning_rate": 1.9782824291797237e-06, + "loss": 0.3114, + "step": 10303 + }, + { + "epoch": 0.7154561866407444, + "grad_norm": 3.733810189294256, + "learning_rate": 1.977386599727188e-06, + "loss": 0.4752, + "step": 10304 + }, + { + "epoch": 0.7155256214414665, + "grad_norm": 4.505326185519829, + "learning_rate": 1.976490923155018e-06, + "loss": 0.375, + "step": 10305 + }, + { + "epoch": 0.7155950562421886, + "grad_norm": 3.2617868126369585, + "learning_rate": 1.9755953995085155e-06, + "loss": 0.4362, + "step": 10306 + }, + { + "epoch": 0.7156644910429107, + "grad_norm": 4.765167638524704, + "learning_rate": 1.9747000288329748e-06, + "loss": 0.4187, + "step": 10307 + }, + { + "epoch": 0.7157339258436328, + "grad_norm": 3.6545828698252905, + "learning_rate": 1.973804811173684e-06, + "loss": 0.3378, + "step": 10308 + }, + { + "epoch": 0.7158033606443549, + "grad_norm": 4.310249983091472, + "learning_rate": 1.972909746575921e-06, + "loss": 0.6332, + "step": 10309 + }, + { + "epoch": 0.7158727954450771, + "grad_norm": 2.0469125333809837, + "learning_rate": 1.9720148350849566e-06, + "loss": 0.1441, + "step": 10310 + }, + { + "epoch": 0.7159422302457992, + "grad_norm": 4.451796016636683, + "learning_rate": 1.9711200767460586e-06, + "loss": 0.5256, + "step": 10311 + }, + { + "epoch": 0.7160116650465214, + "grad_norm": 4.006588309250673, + "learning_rate": 1.970225471604478e-06, + "loss": 0.2637, + "step": 10312 + }, + { + "epoch": 0.7160810998472434, + "grad_norm": 3.349476034569757, + "learning_rate": 1.9693310197054627e-06, + "loss": 0.3325, + "step": 10313 + }, + { + "epoch": 0.7161505346479655, + "grad_norm": 3.8461471350715266, + "learning_rate": 1.9684367210942584e-06, + "loss": 0.4986, + "step": 10314 + }, + { + "epoch": 0.7162199694486877, + "grad_norm": 3.8739108903679154, + "learning_rate": 1.9675425758160927e-06, + "loss": 0.4683, + "step": 10315 + }, + { + "epoch": 0.7162894042494098, + "grad_norm": 3.6367773515914577, + "learning_rate": 1.9666485839161903e-06, + "loss": 0.4329, + "step": 10316 + }, + { + "epoch": 0.716358839050132, + "grad_norm": 4.378257841755187, + "learning_rate": 1.965754745439772e-06, + "loss": 0.5452, + "step": 10317 + }, + { + "epoch": 0.7164282738508541, + "grad_norm": 3.884110485774903, + "learning_rate": 1.9648610604320467e-06, + "loss": 0.3565, + "step": 10318 + }, + { + "epoch": 0.7164977086515761, + "grad_norm": 4.623689031783223, + "learning_rate": 1.9639675289382115e-06, + "loss": 0.6918, + "step": 10319 + }, + { + "epoch": 0.7165671434522983, + "grad_norm": 4.225074253888768, + "learning_rate": 1.9630741510034646e-06, + "loss": 0.5476, + "step": 10320 + }, + { + "epoch": 0.7166365782530204, + "grad_norm": 3.2039931692167536, + "learning_rate": 1.96218092667299e-06, + "loss": 0.2619, + "step": 10321 + }, + { + "epoch": 0.7167060130537425, + "grad_norm": 3.4436096467232367, + "learning_rate": 1.9612878559919673e-06, + "loss": 0.3397, + "step": 10322 + }, + { + "epoch": 0.7167754478544647, + "grad_norm": 3.818075751303728, + "learning_rate": 1.960394939005566e-06, + "loss": 0.3356, + "step": 10323 + }, + { + "epoch": 0.7168448826551868, + "grad_norm": 3.4247548880612206, + "learning_rate": 1.9595021757589496e-06, + "loss": 0.3518, + "step": 10324 + }, + { + "epoch": 0.716914317455909, + "grad_norm": 3.642101640670237, + "learning_rate": 1.958609566297272e-06, + "loss": 0.4255, + "step": 10325 + }, + { + "epoch": 0.716983752256631, + "grad_norm": 4.011159203124636, + "learning_rate": 1.957717110665681e-06, + "loss": 0.4696, + "step": 10326 + }, + { + "epoch": 0.7170531870573531, + "grad_norm": 3.9967840929984484, + "learning_rate": 1.956824808909317e-06, + "loss": 0.4702, + "step": 10327 + }, + { + "epoch": 0.7171226218580753, + "grad_norm": 3.3649957770874948, + "learning_rate": 1.9559326610733077e-06, + "loss": 0.4584, + "step": 10328 + }, + { + "epoch": 0.7171920566587974, + "grad_norm": 3.103648931140334, + "learning_rate": 1.9550406672027837e-06, + "loss": 0.2119, + "step": 10329 + }, + { + "epoch": 0.7172614914595196, + "grad_norm": 5.567371371952293, + "learning_rate": 1.9541488273428534e-06, + "loss": 0.6729, + "step": 10330 + }, + { + "epoch": 0.7173309262602416, + "grad_norm": 2.8639494269657684, + "learning_rate": 1.953257141538631e-06, + "loss": 0.2318, + "step": 10331 + }, + { + "epoch": 0.7174003610609637, + "grad_norm": 3.193118514037793, + "learning_rate": 1.9523656098352162e-06, + "loss": 0.4453, + "step": 10332 + }, + { + "epoch": 0.7174697958616859, + "grad_norm": 4.278928955471596, + "learning_rate": 1.9514742322776968e-06, + "loss": 0.5416, + "step": 10333 + }, + { + "epoch": 0.717539230662408, + "grad_norm": 3.017751094277211, + "learning_rate": 1.950583008911163e-06, + "loss": 0.2469, + "step": 10334 + }, + { + "epoch": 0.7176086654631301, + "grad_norm": 4.8235734492926055, + "learning_rate": 1.9496919397806898e-06, + "loss": 0.5053, + "step": 10335 + }, + { + "epoch": 0.7176781002638523, + "grad_norm": 4.492921336757589, + "learning_rate": 1.948801024931346e-06, + "loss": 0.4533, + "step": 10336 + }, + { + "epoch": 0.7177475350645743, + "grad_norm": 6.33195854033049, + "learning_rate": 1.947910264408195e-06, + "loss": 0.4194, + "step": 10337 + }, + { + "epoch": 0.7178169698652965, + "grad_norm": 4.139448565051755, + "learning_rate": 1.9470196582562877e-06, + "loss": 0.5734, + "step": 10338 + }, + { + "epoch": 0.7178864046660186, + "grad_norm": 3.6245010704987792, + "learning_rate": 1.9461292065206726e-06, + "loss": 0.4707, + "step": 10339 + }, + { + "epoch": 0.7179558394667407, + "grad_norm": 4.135308336798016, + "learning_rate": 1.945238909246386e-06, + "loss": 0.5861, + "step": 10340 + }, + { + "epoch": 0.7180252742674629, + "grad_norm": 3.8152999821739098, + "learning_rate": 1.9443487664784587e-06, + "loss": 0.5539, + "step": 10341 + }, + { + "epoch": 0.718094709068185, + "grad_norm": 2.3691593332426297, + "learning_rate": 1.9434587782619113e-06, + "loss": 0.2617, + "step": 10342 + }, + { + "epoch": 0.7181641438689071, + "grad_norm": 3.291795561724959, + "learning_rate": 1.942568944641764e-06, + "loss": 0.3892, + "step": 10343 + }, + { + "epoch": 0.7182335786696292, + "grad_norm": 4.047865186084089, + "learning_rate": 1.941679265663018e-06, + "loss": 0.6025, + "step": 10344 + }, + { + "epoch": 0.7183030134703513, + "grad_norm": 5.744633300685295, + "learning_rate": 1.940789741370672e-06, + "loss": 0.4259, + "step": 10345 + }, + { + "epoch": 0.7183724482710735, + "grad_norm": 5.9308705829550625, + "learning_rate": 1.9399003718097224e-06, + "loss": 0.5873, + "step": 10346 + }, + { + "epoch": 0.7184418830717956, + "grad_norm": 4.377756597756969, + "learning_rate": 1.939011157025148e-06, + "loss": 0.5717, + "step": 10347 + }, + { + "epoch": 0.7185113178725177, + "grad_norm": 3.4066513824482523, + "learning_rate": 1.9381220970619237e-06, + "loss": 0.4097, + "step": 10348 + }, + { + "epoch": 0.7185807526732398, + "grad_norm": 3.1109932005999164, + "learning_rate": 1.9372331919650205e-06, + "loss": 0.3406, + "step": 10349 + }, + { + "epoch": 0.7186501874739619, + "grad_norm": 3.060398903516486, + "learning_rate": 1.936344441779397e-06, + "loss": 0.2693, + "step": 10350 + }, + { + "epoch": 0.7187196222746841, + "grad_norm": 3.465404605835533, + "learning_rate": 1.9354558465500044e-06, + "loss": 0.3913, + "step": 10351 + }, + { + "epoch": 0.7187890570754062, + "grad_norm": 4.1049677910661035, + "learning_rate": 1.934567406321788e-06, + "loss": 0.6235, + "step": 10352 + }, + { + "epoch": 0.7188584918761283, + "grad_norm": 4.881389486145153, + "learning_rate": 1.933679121139683e-06, + "loss": 0.5979, + "step": 10353 + }, + { + "epoch": 0.7189279266768505, + "grad_norm": 3.648992695445106, + "learning_rate": 1.932790991048619e-06, + "loss": 0.3266, + "step": 10354 + }, + { + "epoch": 0.7189973614775725, + "grad_norm": 4.326416716160098, + "learning_rate": 1.931903016093515e-06, + "loss": 0.7641, + "step": 10355 + }, + { + "epoch": 0.7190667962782947, + "grad_norm": 3.8862646799721725, + "learning_rate": 1.9310151963192847e-06, + "loss": 0.3119, + "step": 10356 + }, + { + "epoch": 0.7191362310790168, + "grad_norm": 4.497789632752804, + "learning_rate": 1.930127531770833e-06, + "loss": 0.6797, + "step": 10357 + }, + { + "epoch": 0.7192056658797389, + "grad_norm": 3.6669789494683913, + "learning_rate": 1.929240022493057e-06, + "loss": 0.3102, + "step": 10358 + }, + { + "epoch": 0.7192751006804611, + "grad_norm": 3.1917938954653975, + "learning_rate": 1.928352668530844e-06, + "loss": 0.3264, + "step": 10359 + }, + { + "epoch": 0.7193445354811832, + "grad_norm": 3.254455571459327, + "learning_rate": 1.9274654699290797e-06, + "loss": 0.2302, + "step": 10360 + }, + { + "epoch": 0.7194139702819053, + "grad_norm": 3.0650925235826634, + "learning_rate": 1.926578426732636e-06, + "loss": 0.4025, + "step": 10361 + }, + { + "epoch": 0.7194834050826274, + "grad_norm": 4.61136272395699, + "learning_rate": 1.9256915389863746e-06, + "loss": 0.3383, + "step": 10362 + }, + { + "epoch": 0.7195528398833495, + "grad_norm": 4.196656349664857, + "learning_rate": 1.924804806735158e-06, + "loss": 0.4692, + "step": 10363 + }, + { + "epoch": 0.7196222746840717, + "grad_norm": 3.782286021574904, + "learning_rate": 1.923918230023836e-06, + "loss": 0.4036, + "step": 10364 + }, + { + "epoch": 0.7196917094847938, + "grad_norm": 3.3212379188802954, + "learning_rate": 1.923031808897246e-06, + "loss": 0.3843, + "step": 10365 + }, + { + "epoch": 0.7197611442855159, + "grad_norm": 4.845230799994959, + "learning_rate": 1.9221455434002272e-06, + "loss": 0.6732, + "step": 10366 + }, + { + "epoch": 0.719830579086238, + "grad_norm": 3.9720150571105086, + "learning_rate": 1.9212594335776035e-06, + "loss": 0.4554, + "step": 10367 + }, + { + "epoch": 0.7199000138869601, + "grad_norm": 4.163420696442243, + "learning_rate": 1.920373479474194e-06, + "loss": 0.5003, + "step": 10368 + }, + { + "epoch": 0.7199694486876823, + "grad_norm": 3.0662536630299932, + "learning_rate": 1.9194876811348088e-06, + "loss": 0.2985, + "step": 10369 + }, + { + "epoch": 0.7200388834884044, + "grad_norm": 2.5763261011504937, + "learning_rate": 1.9186020386042514e-06, + "loss": 0.218, + "step": 10370 + }, + { + "epoch": 0.7201083182891265, + "grad_norm": 5.625630249552127, + "learning_rate": 1.9177165519273154e-06, + "loss": 0.2906, + "step": 10371 + }, + { + "epoch": 0.7201777530898487, + "grad_norm": 3.706872863994817, + "learning_rate": 1.916831221148789e-06, + "loss": 0.4384, + "step": 10372 + }, + { + "epoch": 0.7202471878905707, + "grad_norm": 3.526486731367472, + "learning_rate": 1.9159460463134506e-06, + "loss": 0.3581, + "step": 10373 + }, + { + "epoch": 0.7203166226912929, + "grad_norm": 3.634394140501347, + "learning_rate": 1.915061027466069e-06, + "loss": 0.3842, + "step": 10374 + }, + { + "epoch": 0.720386057492015, + "grad_norm": 10.021057348217356, + "learning_rate": 1.9141761646514134e-06, + "loss": 0.3748, + "step": 10375 + }, + { + "epoch": 0.7204554922927371, + "grad_norm": 3.8866344344343107, + "learning_rate": 1.913291457914234e-06, + "loss": 0.4212, + "step": 10376 + }, + { + "epoch": 0.7205249270934593, + "grad_norm": 3.410931107051103, + "learning_rate": 1.9124069072992778e-06, + "loss": 0.3757, + "step": 10377 + }, + { + "epoch": 0.7205943618941814, + "grad_norm": 3.0257560921732836, + "learning_rate": 1.9115225128512903e-06, + "loss": 0.2282, + "step": 10378 + }, + { + "epoch": 0.7206637966949034, + "grad_norm": 3.7541917046800606, + "learning_rate": 1.9106382746149977e-06, + "loss": 0.3513, + "step": 10379 + }, + { + "epoch": 0.7207332314956256, + "grad_norm": 4.058968343059793, + "learning_rate": 1.9097541926351233e-06, + "loss": 0.4742, + "step": 10380 + }, + { + "epoch": 0.7208026662963477, + "grad_norm": 4.309107107285307, + "learning_rate": 1.908870266956386e-06, + "loss": 0.4641, + "step": 10381 + }, + { + "epoch": 0.7208721010970699, + "grad_norm": 5.021108926161804, + "learning_rate": 1.9079864976234936e-06, + "loss": 0.5119, + "step": 10382 + }, + { + "epoch": 0.720941535897792, + "grad_norm": 3.5055533002914663, + "learning_rate": 1.9071028846811451e-06, + "loss": 0.4275, + "step": 10383 + }, + { + "epoch": 0.721010970698514, + "grad_norm": 3.394838762949089, + "learning_rate": 1.9062194281740327e-06, + "loss": 0.2748, + "step": 10384 + }, + { + "epoch": 0.7210804054992362, + "grad_norm": 4.801379524664417, + "learning_rate": 1.9053361281468408e-06, + "loss": 0.4321, + "step": 10385 + }, + { + "epoch": 0.7211498402999583, + "grad_norm": 4.529608501261774, + "learning_rate": 1.9044529846442461e-06, + "loss": 0.6396, + "step": 10386 + }, + { + "epoch": 0.7212192751006805, + "grad_norm": 4.639755963630356, + "learning_rate": 1.9035699977109162e-06, + "loss": 0.2645, + "step": 10387 + }, + { + "epoch": 0.7212887099014026, + "grad_norm": 3.382497242739017, + "learning_rate": 1.9026871673915121e-06, + "loss": 0.5789, + "step": 10388 + }, + { + "epoch": 0.7213581447021247, + "grad_norm": 4.285060359994898, + "learning_rate": 1.9018044937306862e-06, + "loss": 0.4603, + "step": 10389 + }, + { + "epoch": 0.7214275795028469, + "grad_norm": 4.722807281501047, + "learning_rate": 1.900921976773083e-06, + "loss": 0.4153, + "step": 10390 + }, + { + "epoch": 0.7214970143035689, + "grad_norm": 3.7248109974500054, + "learning_rate": 1.9000396165633378e-06, + "loss": 0.3958, + "step": 10391 + }, + { + "epoch": 0.721566449104291, + "grad_norm": 4.576365644423474, + "learning_rate": 1.8991574131460826e-06, + "loss": 0.6004, + "step": 10392 + }, + { + "epoch": 0.7216358839050132, + "grad_norm": 4.98528050561496, + "learning_rate": 1.8982753665659382e-06, + "loss": 0.4566, + "step": 10393 + }, + { + "epoch": 0.7217053187057353, + "grad_norm": 4.037760117883098, + "learning_rate": 1.8973934768675128e-06, + "loss": 0.5679, + "step": 10394 + }, + { + "epoch": 0.7217747535064575, + "grad_norm": 2.8735158838748167, + "learning_rate": 1.8965117440954162e-06, + "loss": 0.2303, + "step": 10395 + }, + { + "epoch": 0.7218441883071796, + "grad_norm": 3.443778259938719, + "learning_rate": 1.8956301682942451e-06, + "loss": 0.1924, + "step": 10396 + }, + { + "epoch": 0.7219136231079016, + "grad_norm": 3.8886432294215147, + "learning_rate": 1.8947487495085842e-06, + "loss": 0.2988, + "step": 10397 + }, + { + "epoch": 0.7219830579086238, + "grad_norm": 3.5679895726899478, + "learning_rate": 1.8938674877830194e-06, + "loss": 0.471, + "step": 10398 + }, + { + "epoch": 0.7220524927093459, + "grad_norm": 2.844415399820664, + "learning_rate": 1.892986383162122e-06, + "loss": 0.3274, + "step": 10399 + }, + { + "epoch": 0.7221219275100681, + "grad_norm": 3.424525664420587, + "learning_rate": 1.8921054356904572e-06, + "loss": 0.4557, + "step": 10400 + }, + { + "epoch": 0.7221913623107902, + "grad_norm": 3.3451587368890903, + "learning_rate": 1.8912246454125831e-06, + "loss": 0.3493, + "step": 10401 + }, + { + "epoch": 0.7222607971115123, + "grad_norm": 4.131787150672724, + "learning_rate": 1.8903440123730487e-06, + "loss": 0.524, + "step": 10402 + }, + { + "epoch": 0.7223302319122344, + "grad_norm": 5.135661372963556, + "learning_rate": 1.8894635366163956e-06, + "loss": 0.6289, + "step": 10403 + }, + { + "epoch": 0.7223996667129565, + "grad_norm": 3.8328552848654214, + "learning_rate": 1.8885832181871567e-06, + "loss": 0.5213, + "step": 10404 + }, + { + "epoch": 0.7224691015136786, + "grad_norm": 5.912377009610363, + "learning_rate": 1.8877030571298577e-06, + "loss": 0.6889, + "step": 10405 + }, + { + "epoch": 0.7225385363144008, + "grad_norm": 4.595121205975614, + "learning_rate": 1.886823053489014e-06, + "loss": 0.4055, + "step": 10406 + }, + { + "epoch": 0.7226079711151229, + "grad_norm": 4.577352965878118, + "learning_rate": 1.8859432073091422e-06, + "loss": 0.65, + "step": 10407 + }, + { + "epoch": 0.7226774059158451, + "grad_norm": 4.45659078189218, + "learning_rate": 1.8850635186347366e-06, + "loss": 0.5359, + "step": 10408 + }, + { + "epoch": 0.7227468407165671, + "grad_norm": 4.353325853863383, + "learning_rate": 1.8841839875102918e-06, + "loss": 0.5194, + "step": 10409 + }, + { + "epoch": 0.7228162755172892, + "grad_norm": 5.458297866441244, + "learning_rate": 1.8833046139802968e-06, + "loss": 0.265, + "step": 10410 + }, + { + "epoch": 0.7228857103180114, + "grad_norm": 4.633364774875235, + "learning_rate": 1.8824253980892281e-06, + "loss": 0.585, + "step": 10411 + }, + { + "epoch": 0.7229551451187335, + "grad_norm": 3.3341461652373843, + "learning_rate": 1.8815463398815548e-06, + "loss": 0.3202, + "step": 10412 + }, + { + "epoch": 0.7230245799194557, + "grad_norm": 5.014911007763133, + "learning_rate": 1.8806674394017382e-06, + "loss": 0.5599, + "step": 10413 + }, + { + "epoch": 0.7230940147201778, + "grad_norm": 3.9145199624704516, + "learning_rate": 1.8797886966942336e-06, + "loss": 0.3198, + "step": 10414 + }, + { + "epoch": 0.7231634495208998, + "grad_norm": 3.552068864398843, + "learning_rate": 1.8789101118034858e-06, + "loss": 0.3215, + "step": 10415 + }, + { + "epoch": 0.723232884321622, + "grad_norm": 3.0524214986926332, + "learning_rate": 1.8780316847739327e-06, + "loss": 0.2782, + "step": 10416 + }, + { + "epoch": 0.7233023191223441, + "grad_norm": 3.3194884128421127, + "learning_rate": 1.8771534156500037e-06, + "loss": 0.2133, + "step": 10417 + }, + { + "epoch": 0.7233717539230663, + "grad_norm": 6.643639390820683, + "learning_rate": 1.8762753044761218e-06, + "loss": 0.7841, + "step": 10418 + }, + { + "epoch": 0.7234411887237884, + "grad_norm": 3.491015914474742, + "learning_rate": 1.8753973512967005e-06, + "loss": 0.2749, + "step": 10419 + }, + { + "epoch": 0.7235106235245105, + "grad_norm": 3.5955664844133963, + "learning_rate": 1.874519556156143e-06, + "loss": 0.3446, + "step": 10420 + }, + { + "epoch": 0.7235800583252326, + "grad_norm": 2.9217095498533414, + "learning_rate": 1.8736419190988537e-06, + "loss": 0.1956, + "step": 10421 + }, + { + "epoch": 0.7236494931259547, + "grad_norm": 4.247071466552116, + "learning_rate": 1.8727644401692163e-06, + "loss": 0.4185, + "step": 10422 + }, + { + "epoch": 0.7237189279266768, + "grad_norm": 3.5017975927804788, + "learning_rate": 1.8718871194116133e-06, + "loss": 0.4481, + "step": 10423 + }, + { + "epoch": 0.723788362727399, + "grad_norm": 4.893756368947829, + "learning_rate": 1.8710099568704215e-06, + "loss": 0.4342, + "step": 10424 + }, + { + "epoch": 0.7238577975281211, + "grad_norm": 3.6936595140201933, + "learning_rate": 1.8701329525900076e-06, + "loss": 0.3599, + "step": 10425 + }, + { + "epoch": 0.7239272323288433, + "grad_norm": 3.8201770033232307, + "learning_rate": 1.8692561066147236e-06, + "loss": 0.2703, + "step": 10426 + }, + { + "epoch": 0.7239966671295653, + "grad_norm": 3.256825535518447, + "learning_rate": 1.8683794189889248e-06, + "loss": 0.3155, + "step": 10427 + }, + { + "epoch": 0.7240661019302874, + "grad_norm": 5.6849929945389786, + "learning_rate": 1.867502889756953e-06, + "loss": 0.6592, + "step": 10428 + }, + { + "epoch": 0.7241355367310096, + "grad_norm": 2.9809670425017054, + "learning_rate": 1.8666265189631367e-06, + "loss": 0.3314, + "step": 10429 + }, + { + "epoch": 0.7242049715317317, + "grad_norm": 4.212355767852974, + "learning_rate": 1.8657503066518072e-06, + "loss": 0.3658, + "step": 10430 + }, + { + "epoch": 0.7242744063324539, + "grad_norm": 4.323903712676294, + "learning_rate": 1.8648742528672808e-06, + "loss": 0.5472, + "step": 10431 + }, + { + "epoch": 0.724343841133176, + "grad_norm": 2.617350105935429, + "learning_rate": 1.8639983576538672e-06, + "loss": 0.1862, + "step": 10432 + }, + { + "epoch": 0.724413275933898, + "grad_norm": 3.248252473679322, + "learning_rate": 1.8631226210558678e-06, + "loss": 0.4001, + "step": 10433 + }, + { + "epoch": 0.7244827107346202, + "grad_norm": 4.128433096137738, + "learning_rate": 1.8622470431175764e-06, + "loss": 0.4915, + "step": 10434 + }, + { + "epoch": 0.7245521455353423, + "grad_norm": 3.863701648651562, + "learning_rate": 1.8613716238832774e-06, + "loss": 0.2347, + "step": 10435 + }, + { + "epoch": 0.7246215803360644, + "grad_norm": 4.1399887874890515, + "learning_rate": 1.8604963633972539e-06, + "loss": 0.5786, + "step": 10436 + }, + { + "epoch": 0.7246910151367866, + "grad_norm": 3.455954382110435, + "learning_rate": 1.8596212617037695e-06, + "loss": 0.3837, + "step": 10437 + }, + { + "epoch": 0.7247604499375087, + "grad_norm": 3.4697099484394953, + "learning_rate": 1.8587463188470867e-06, + "loss": 0.3358, + "step": 10438 + }, + { + "epoch": 0.7248298847382308, + "grad_norm": 3.762731027466558, + "learning_rate": 1.8578715348714638e-06, + "loss": 0.2935, + "step": 10439 + }, + { + "epoch": 0.7248993195389529, + "grad_norm": 3.925431339962249, + "learning_rate": 1.8569969098211399e-06, + "loss": 0.5109, + "step": 10440 + }, + { + "epoch": 0.724968754339675, + "grad_norm": 3.931060937458799, + "learning_rate": 1.8561224437403569e-06, + "loss": 0.3199, + "step": 10441 + }, + { + "epoch": 0.7250381891403972, + "grad_norm": 3.7276782860789353, + "learning_rate": 1.8552481366733437e-06, + "loss": 0.4382, + "step": 10442 + }, + { + "epoch": 0.7251076239411193, + "grad_norm": 3.851752227364194, + "learning_rate": 1.8543739886643214e-06, + "loss": 0.4471, + "step": 10443 + }, + { + "epoch": 0.7251770587418415, + "grad_norm": 4.473183690953418, + "learning_rate": 1.8534999997575026e-06, + "loss": 0.5164, + "step": 10444 + }, + { + "epoch": 0.7252464935425635, + "grad_norm": 2.947366210469233, + "learning_rate": 1.8526261699970937e-06, + "loss": 0.3323, + "step": 10445 + }, + { + "epoch": 0.7253159283432856, + "grad_norm": 4.842107933900169, + "learning_rate": 1.8517524994272917e-06, + "loss": 0.4656, + "step": 10446 + }, + { + "epoch": 0.7253853631440078, + "grad_norm": 5.162199496891453, + "learning_rate": 1.8508789880922857e-06, + "loss": 0.402, + "step": 10447 + }, + { + "epoch": 0.7254547979447299, + "grad_norm": 3.1272347272857486, + "learning_rate": 1.8500056360362568e-06, + "loss": 0.2671, + "step": 10448 + }, + { + "epoch": 0.725524232745452, + "grad_norm": 5.482311826227409, + "learning_rate": 1.8491324433033787e-06, + "loss": 0.6265, + "step": 10449 + }, + { + "epoch": 0.7255936675461742, + "grad_norm": 3.21324538880097, + "learning_rate": 1.8482594099378154e-06, + "loss": 0.3105, + "step": 10450 + }, + { + "epoch": 0.7256631023468962, + "grad_norm": 4.208348695972901, + "learning_rate": 1.8473865359837257e-06, + "loss": 0.3104, + "step": 10451 + }, + { + "epoch": 0.7257325371476184, + "grad_norm": 3.0272283436760996, + "learning_rate": 1.846513821485255e-06, + "loss": 0.256, + "step": 10452 + }, + { + "epoch": 0.7258019719483405, + "grad_norm": 5.75661854706887, + "learning_rate": 1.845641266486551e-06, + "loss": 0.7772, + "step": 10453 + }, + { + "epoch": 0.7258714067490626, + "grad_norm": 3.300814890445595, + "learning_rate": 1.8447688710317402e-06, + "loss": 0.3599, + "step": 10454 + }, + { + "epoch": 0.7259408415497848, + "grad_norm": 4.1604795104864, + "learning_rate": 1.8438966351649478e-06, + "loss": 0.4031, + "step": 10455 + }, + { + "epoch": 0.7260102763505069, + "grad_norm": 4.11808694476157, + "learning_rate": 1.8430245589302943e-06, + "loss": 0.457, + "step": 10456 + }, + { + "epoch": 0.726079711151229, + "grad_norm": 4.210490363365273, + "learning_rate": 1.8421526423718878e-06, + "loss": 0.4652, + "step": 10457 + }, + { + "epoch": 0.7261491459519511, + "grad_norm": 3.42724931718003, + "learning_rate": 1.8412808855338237e-06, + "loss": 0.3925, + "step": 10458 + }, + { + "epoch": 0.7262185807526732, + "grad_norm": 2.657240049410536, + "learning_rate": 1.8404092884601998e-06, + "loss": 0.2128, + "step": 10459 + }, + { + "epoch": 0.7262880155533954, + "grad_norm": 4.249968656747497, + "learning_rate": 1.839537851195099e-06, + "loss": 0.562, + "step": 10460 + }, + { + "epoch": 0.7263574503541175, + "grad_norm": 3.8245258436166836, + "learning_rate": 1.838666573782597e-06, + "loss": 0.4739, + "step": 10461 + }, + { + "epoch": 0.7264268851548396, + "grad_norm": 2.59958998655832, + "learning_rate": 1.8377954562667627e-06, + "loss": 0.3104, + "step": 10462 + }, + { + "epoch": 0.7264963199555617, + "grad_norm": 4.866218067793073, + "learning_rate": 1.8369244986916556e-06, + "loss": 0.3223, + "step": 10463 + }, + { + "epoch": 0.7265657547562838, + "grad_norm": 4.892854945361474, + "learning_rate": 1.8360537011013286e-06, + "loss": 0.4744, + "step": 10464 + }, + { + "epoch": 0.726635189557006, + "grad_norm": 5.476254607352019, + "learning_rate": 1.835183063539825e-06, + "loss": 0.5793, + "step": 10465 + }, + { + "epoch": 0.7267046243577281, + "grad_norm": 2.921998900734984, + "learning_rate": 1.8343125860511808e-06, + "loss": 0.2855, + "step": 10466 + }, + { + "epoch": 0.7267740591584502, + "grad_norm": 4.410731545322353, + "learning_rate": 1.8334422686794218e-06, + "loss": 0.4924, + "step": 10467 + }, + { + "epoch": 0.7268434939591724, + "grad_norm": 4.085849210258286, + "learning_rate": 1.8325721114685735e-06, + "loss": 0.359, + "step": 10468 + }, + { + "epoch": 0.7269129287598944, + "grad_norm": 3.5644106349719977, + "learning_rate": 1.8317021144626418e-06, + "loss": 0.3354, + "step": 10469 + }, + { + "epoch": 0.7269823635606166, + "grad_norm": 3.158365048687018, + "learning_rate": 1.8308322777056297e-06, + "loss": 0.4888, + "step": 10470 + }, + { + "epoch": 0.7270517983613387, + "grad_norm": 3.9303843434828445, + "learning_rate": 1.8299626012415389e-06, + "loss": 0.3609, + "step": 10471 + }, + { + "epoch": 0.7271212331620608, + "grad_norm": 6.599042062038926, + "learning_rate": 1.829093085114349e-06, + "loss": 0.7258, + "step": 10472 + }, + { + "epoch": 0.727190667962783, + "grad_norm": 4.184879342939031, + "learning_rate": 1.8282237293680444e-06, + "loss": 0.4301, + "step": 10473 + }, + { + "epoch": 0.7272601027635051, + "grad_norm": 4.451119699734514, + "learning_rate": 1.8273545340465948e-06, + "loss": 0.5293, + "step": 10474 + }, + { + "epoch": 0.7273295375642271, + "grad_norm": 4.398997749649687, + "learning_rate": 1.8264854991939623e-06, + "loss": 0.5675, + "step": 10475 + }, + { + "epoch": 0.7273989723649493, + "grad_norm": 4.448067733852272, + "learning_rate": 1.8256166248541024e-06, + "loss": 0.6692, + "step": 10476 + }, + { + "epoch": 0.7274684071656714, + "grad_norm": 3.708971740325353, + "learning_rate": 1.8247479110709616e-06, + "loss": 0.4476, + "step": 10477 + }, + { + "epoch": 0.7275378419663936, + "grad_norm": 2.4855686031326303, + "learning_rate": 1.8238793578884778e-06, + "loss": 0.1343, + "step": 10478 + }, + { + "epoch": 0.7276072767671157, + "grad_norm": 3.773123443437252, + "learning_rate": 1.823010965350583e-06, + "loss": 0.4621, + "step": 10479 + }, + { + "epoch": 0.7276767115678378, + "grad_norm": 3.887977735083403, + "learning_rate": 1.8221427335011983e-06, + "loss": 0.3585, + "step": 10480 + }, + { + "epoch": 0.7277461463685599, + "grad_norm": 1.9983578420190555, + "learning_rate": 1.821274662384236e-06, + "loss": 0.1022, + "step": 10481 + }, + { + "epoch": 0.727815581169282, + "grad_norm": 3.324723701368467, + "learning_rate": 1.8204067520436081e-06, + "loss": 0.2719, + "step": 10482 + }, + { + "epoch": 0.7278850159700042, + "grad_norm": 3.5660804570084084, + "learning_rate": 1.8195390025232069e-06, + "loss": 0.517, + "step": 10483 + }, + { + "epoch": 0.7279544507707263, + "grad_norm": 4.489318101460402, + "learning_rate": 1.818671413866922e-06, + "loss": 0.3725, + "step": 10484 + }, + { + "epoch": 0.7280238855714484, + "grad_norm": 3.906122543091545, + "learning_rate": 1.817803986118639e-06, + "loss": 0.3255, + "step": 10485 + }, + { + "epoch": 0.7280933203721706, + "grad_norm": 4.317719500880463, + "learning_rate": 1.8169367193222315e-06, + "loss": 0.4528, + "step": 10486 + }, + { + "epoch": 0.7281627551728926, + "grad_norm": 3.838412397300347, + "learning_rate": 1.8160696135215593e-06, + "loss": 0.4493, + "step": 10487 + }, + { + "epoch": 0.7282321899736148, + "grad_norm": 3.2759970803867797, + "learning_rate": 1.8152026687604845e-06, + "loss": 0.3043, + "step": 10488 + }, + { + "epoch": 0.7283016247743369, + "grad_norm": 3.4089725995355806, + "learning_rate": 1.8143358850828574e-06, + "loss": 0.37, + "step": 10489 + }, + { + "epoch": 0.728371059575059, + "grad_norm": 3.649035721723917, + "learning_rate": 1.8134692625325122e-06, + "loss": 0.4728, + "step": 10490 + }, + { + "epoch": 0.7284404943757812, + "grad_norm": 4.183406835687212, + "learning_rate": 1.812602801153288e-06, + "loss": 0.4875, + "step": 10491 + }, + { + "epoch": 0.7285099291765033, + "grad_norm": 3.1038897017837757, + "learning_rate": 1.811736500989007e-06, + "loss": 0.1829, + "step": 10492 + }, + { + "epoch": 0.7285793639772253, + "grad_norm": 3.907787400133084, + "learning_rate": 1.8108703620834867e-06, + "loss": 0.3847, + "step": 10493 + }, + { + "epoch": 0.7286487987779475, + "grad_norm": 5.073873055142729, + "learning_rate": 1.8100043844805343e-06, + "loss": 0.5125, + "step": 10494 + }, + { + "epoch": 0.7287182335786696, + "grad_norm": 4.105170910212948, + "learning_rate": 1.8091385682239503e-06, + "loss": 0.4146, + "step": 10495 + }, + { + "epoch": 0.7287876683793918, + "grad_norm": 3.3586930671701536, + "learning_rate": 1.8082729133575267e-06, + "loss": 0.2951, + "step": 10496 + }, + { + "epoch": 0.7288571031801139, + "grad_norm": 3.910943558899524, + "learning_rate": 1.807407419925048e-06, + "loss": 0.4517, + "step": 10497 + }, + { + "epoch": 0.728926537980836, + "grad_norm": 4.43391871594695, + "learning_rate": 1.8065420879702888e-06, + "loss": 0.483, + "step": 10498 + }, + { + "epoch": 0.7289959727815581, + "grad_norm": 3.722803566859759, + "learning_rate": 1.8056769175370159e-06, + "loss": 0.3976, + "step": 10499 + }, + { + "epoch": 0.7290654075822802, + "grad_norm": 3.622510456360244, + "learning_rate": 1.804811908668993e-06, + "loss": 0.3786, + "step": 10500 + }, + { + "epoch": 0.7291348423830024, + "grad_norm": 2.448157517500828, + "learning_rate": 1.8039470614099647e-06, + "loss": 0.2437, + "step": 10501 + }, + { + "epoch": 0.7292042771837245, + "grad_norm": 3.961519510373321, + "learning_rate": 1.8030823758036791e-06, + "loss": 0.4784, + "step": 10502 + }, + { + "epoch": 0.7292737119844466, + "grad_norm": 4.29226445555635, + "learning_rate": 1.8022178518938715e-06, + "loss": 0.5028, + "step": 10503 + }, + { + "epoch": 0.7293431467851688, + "grad_norm": 3.33734522452691, + "learning_rate": 1.8013534897242625e-06, + "loss": 0.2753, + "step": 10504 + }, + { + "epoch": 0.7294125815858908, + "grad_norm": 3.9729155806300933, + "learning_rate": 1.8004892893385766e-06, + "loss": 0.3048, + "step": 10505 + }, + { + "epoch": 0.7294820163866129, + "grad_norm": 3.7052280366624397, + "learning_rate": 1.7996252507805218e-06, + "loss": 0.4737, + "step": 10506 + }, + { + "epoch": 0.7295514511873351, + "grad_norm": 3.9964376681114735, + "learning_rate": 1.7987613740938003e-06, + "loss": 0.2655, + "step": 10507 + }, + { + "epoch": 0.7296208859880572, + "grad_norm": 4.267274680989353, + "learning_rate": 1.797897659322106e-06, + "loss": 0.4288, + "step": 10508 + }, + { + "epoch": 0.7296903207887794, + "grad_norm": 4.558896270599049, + "learning_rate": 1.7970341065091246e-06, + "loss": 0.536, + "step": 10509 + }, + { + "epoch": 0.7297597555895015, + "grad_norm": 3.6820911703646173, + "learning_rate": 1.7961707156985324e-06, + "loss": 0.4562, + "step": 10510 + }, + { + "epoch": 0.7298291903902235, + "grad_norm": 3.5131344427311095, + "learning_rate": 1.7953074869340032e-06, + "loss": 0.3073, + "step": 10511 + }, + { + "epoch": 0.7298986251909457, + "grad_norm": 3.690547985525075, + "learning_rate": 1.7944444202591933e-06, + "loss": 0.3631, + "step": 10512 + }, + { + "epoch": 0.7299680599916678, + "grad_norm": 3.719140078552416, + "learning_rate": 1.7935815157177554e-06, + "loss": 0.3906, + "step": 10513 + }, + { + "epoch": 0.73003749479239, + "grad_norm": 3.879398143880803, + "learning_rate": 1.7927187733533396e-06, + "loss": 0.3386, + "step": 10514 + }, + { + "epoch": 0.7301069295931121, + "grad_norm": 4.218767558784246, + "learning_rate": 1.7918561932095773e-06, + "loss": 0.5616, + "step": 10515 + }, + { + "epoch": 0.7301763643938342, + "grad_norm": 4.715215060930386, + "learning_rate": 1.7909937753300966e-06, + "loss": 0.7104, + "step": 10516 + }, + { + "epoch": 0.7302457991945563, + "grad_norm": 3.7780169990909824, + "learning_rate": 1.7901315197585217e-06, + "loss": 0.4348, + "step": 10517 + }, + { + "epoch": 0.7303152339952784, + "grad_norm": 4.042296457584013, + "learning_rate": 1.7892694265384635e-06, + "loss": 0.4143, + "step": 10518 + }, + { + "epoch": 0.7303846687960005, + "grad_norm": 4.264413124108612, + "learning_rate": 1.7884074957135216e-06, + "loss": 0.4555, + "step": 10519 + }, + { + "epoch": 0.7304541035967227, + "grad_norm": 2.469162485752799, + "learning_rate": 1.7875457273272956e-06, + "loss": 0.1771, + "step": 10520 + }, + { + "epoch": 0.7305235383974448, + "grad_norm": 4.167556826555442, + "learning_rate": 1.7866841214233721e-06, + "loss": 0.6714, + "step": 10521 + }, + { + "epoch": 0.730592973198167, + "grad_norm": 3.3310053007104377, + "learning_rate": 1.7858226780453292e-06, + "loss": 0.4217, + "step": 10522 + }, + { + "epoch": 0.730662407998889, + "grad_norm": 3.4434611942084885, + "learning_rate": 1.784961397236738e-06, + "loss": 0.2951, + "step": 10523 + }, + { + "epoch": 0.7307318427996111, + "grad_norm": 3.5352861901437485, + "learning_rate": 1.7841002790411616e-06, + "loss": 0.3887, + "step": 10524 + }, + { + "epoch": 0.7308012776003333, + "grad_norm": 3.4971151295247487, + "learning_rate": 1.7832393235021545e-06, + "loss": 0.2548, + "step": 10525 + }, + { + "epoch": 0.7308707124010554, + "grad_norm": 4.238930623100809, + "learning_rate": 1.7823785306632619e-06, + "loss": 0.5031, + "step": 10526 + }, + { + "epoch": 0.7309401472017776, + "grad_norm": 3.455251116520109, + "learning_rate": 1.7815179005680221e-06, + "loss": 0.3743, + "step": 10527 + }, + { + "epoch": 0.7310095820024997, + "grad_norm": 3.8518685882978354, + "learning_rate": 1.7806574332599652e-06, + "loss": 0.4775, + "step": 10528 + }, + { + "epoch": 0.7310790168032217, + "grad_norm": 3.783315387842042, + "learning_rate": 1.7797971287826121e-06, + "loss": 0.5054, + "step": 10529 + }, + { + "epoch": 0.7311484516039439, + "grad_norm": 3.231229144150515, + "learning_rate": 1.778936987179475e-06, + "loss": 0.3327, + "step": 10530 + }, + { + "epoch": 0.731217886404666, + "grad_norm": 3.5790989952365373, + "learning_rate": 1.7780770084940612e-06, + "loss": 0.3378, + "step": 10531 + }, + { + "epoch": 0.7312873212053881, + "grad_norm": 3.6676136803723147, + "learning_rate": 1.7772171927698689e-06, + "loss": 0.3853, + "step": 10532 + }, + { + "epoch": 0.7313567560061103, + "grad_norm": 4.443682839813009, + "learning_rate": 1.7763575400503806e-06, + "loss": 0.6299, + "step": 10533 + }, + { + "epoch": 0.7314261908068324, + "grad_norm": 3.1081066690934853, + "learning_rate": 1.7754980503790814e-06, + "loss": 0.3724, + "step": 10534 + }, + { + "epoch": 0.7314956256075545, + "grad_norm": 3.76067526479934, + "learning_rate": 1.7746387237994427e-06, + "loss": 0.3632, + "step": 10535 + }, + { + "epoch": 0.7315650604082766, + "grad_norm": 2.9821728953082407, + "learning_rate": 1.7737795603549274e-06, + "loss": 0.2617, + "step": 10536 + }, + { + "epoch": 0.7316344952089987, + "grad_norm": 3.9188570127809252, + "learning_rate": 1.772920560088991e-06, + "loss": 0.4416, + "step": 10537 + }, + { + "epoch": 0.7317039300097209, + "grad_norm": 2.9213538657891087, + "learning_rate": 1.772061723045081e-06, + "loss": 0.356, + "step": 10538 + }, + { + "epoch": 0.731773364810443, + "grad_norm": 3.441116726955416, + "learning_rate": 1.7712030492666365e-06, + "loss": 0.3216, + "step": 10539 + }, + { + "epoch": 0.7318427996111652, + "grad_norm": 4.085639653745112, + "learning_rate": 1.770344538797088e-06, + "loss": 0.624, + "step": 10540 + }, + { + "epoch": 0.7319122344118872, + "grad_norm": 2.529913742600644, + "learning_rate": 1.7694861916798584e-06, + "loss": 0.2951, + "step": 10541 + }, + { + "epoch": 0.7319816692126093, + "grad_norm": 2.4643820132652734, + "learning_rate": 1.7686280079583595e-06, + "loss": 0.1795, + "step": 10542 + }, + { + "epoch": 0.7320511040133315, + "grad_norm": 2.338269229588886, + "learning_rate": 1.7677699876760036e-06, + "loss": 0.211, + "step": 10543 + }, + { + "epoch": 0.7321205388140536, + "grad_norm": 3.579323118881092, + "learning_rate": 1.7669121308761816e-06, + "loss": 0.3066, + "step": 10544 + }, + { + "epoch": 0.7321899736147758, + "grad_norm": 4.077552183746781, + "learning_rate": 1.7660544376022842e-06, + "loss": 0.3834, + "step": 10545 + }, + { + "epoch": 0.7322594084154979, + "grad_norm": 3.9770409146871257, + "learning_rate": 1.7651969078976978e-06, + "loss": 0.4588, + "step": 10546 + }, + { + "epoch": 0.7323288432162199, + "grad_norm": 4.503823996316076, + "learning_rate": 1.764339541805789e-06, + "loss": 0.6985, + "step": 10547 + }, + { + "epoch": 0.7323982780169421, + "grad_norm": 3.6587060572075263, + "learning_rate": 1.7634823393699236e-06, + "loss": 0.3497, + "step": 10548 + }, + { + "epoch": 0.7324677128176642, + "grad_norm": 2.7901669987809767, + "learning_rate": 1.7626253006334603e-06, + "loss": 0.24, + "step": 10549 + }, + { + "epoch": 0.7325371476183863, + "grad_norm": 3.4708876180741597, + "learning_rate": 1.7617684256397476e-06, + "loss": 0.2834, + "step": 10550 + }, + { + "epoch": 0.7326065824191085, + "grad_norm": 3.138755476637452, + "learning_rate": 1.760911714432121e-06, + "loss": 0.2244, + "step": 10551 + }, + { + "epoch": 0.7326760172198306, + "grad_norm": 3.692187376985266, + "learning_rate": 1.7600551670539158e-06, + "loss": 0.243, + "step": 10552 + }, + { + "epoch": 0.7327454520205527, + "grad_norm": 3.152687812929138, + "learning_rate": 1.7591987835484536e-06, + "loss": 0.2951, + "step": 10553 + }, + { + "epoch": 0.7328148868212748, + "grad_norm": 3.7674976979512858, + "learning_rate": 1.7583425639590502e-06, + "loss": 0.3472, + "step": 10554 + }, + { + "epoch": 0.7328843216219969, + "grad_norm": 3.4068277812819523, + "learning_rate": 1.7574865083290116e-06, + "loss": 0.3432, + "step": 10555 + }, + { + "epoch": 0.7329537564227191, + "grad_norm": 4.309598547239222, + "learning_rate": 1.756630616701636e-06, + "loss": 0.3401, + "step": 10556 + }, + { + "epoch": 0.7330231912234412, + "grad_norm": 2.6643862516545753, + "learning_rate": 1.7557748891202142e-06, + "loss": 0.2417, + "step": 10557 + }, + { + "epoch": 0.7330926260241634, + "grad_norm": 3.777351395000426, + "learning_rate": 1.7549193256280273e-06, + "loss": 0.4583, + "step": 10558 + }, + { + "epoch": 0.7331620608248854, + "grad_norm": 4.107006602791873, + "learning_rate": 1.754063926268349e-06, + "loss": 0.6083, + "step": 10559 + }, + { + "epoch": 0.7332314956256075, + "grad_norm": 4.003110049455225, + "learning_rate": 1.7532086910844438e-06, + "loss": 0.5299, + "step": 10560 + }, + { + "epoch": 0.7333009304263297, + "grad_norm": 5.149417127303963, + "learning_rate": 1.752353620119569e-06, + "loss": 0.5884, + "step": 10561 + }, + { + "epoch": 0.7333703652270518, + "grad_norm": 3.7482200794838496, + "learning_rate": 1.751498713416972e-06, + "loss": 0.4493, + "step": 10562 + }, + { + "epoch": 0.7334398000277739, + "grad_norm": 3.3738013442176826, + "learning_rate": 1.7506439710198953e-06, + "loss": 0.5243, + "step": 10563 + }, + { + "epoch": 0.7335092348284961, + "grad_norm": 4.387644485450916, + "learning_rate": 1.7497893929715714e-06, + "loss": 0.481, + "step": 10564 + }, + { + "epoch": 0.7335786696292181, + "grad_norm": 4.491377061087058, + "learning_rate": 1.7489349793152193e-06, + "loss": 0.4243, + "step": 10565 + }, + { + "epoch": 0.7336481044299403, + "grad_norm": 3.3066742651551126, + "learning_rate": 1.7480807300940584e-06, + "loss": 0.3714, + "step": 10566 + }, + { + "epoch": 0.7337175392306624, + "grad_norm": 3.4646704190588937, + "learning_rate": 1.7472266453512942e-06, + "loss": 0.2724, + "step": 10567 + }, + { + "epoch": 0.7337869740313845, + "grad_norm": 3.7862121221384784, + "learning_rate": 1.746372725130126e-06, + "loss": 0.5116, + "step": 10568 + }, + { + "epoch": 0.7338564088321067, + "grad_norm": 3.930365913342115, + "learning_rate": 1.7455189694737434e-06, + "loss": 0.566, + "step": 10569 + }, + { + "epoch": 0.7339258436328288, + "grad_norm": 2.8121578799514646, + "learning_rate": 1.7446653784253287e-06, + "loss": 0.3351, + "step": 10570 + }, + { + "epoch": 0.733995278433551, + "grad_norm": 4.0249084310036185, + "learning_rate": 1.7438119520280549e-06, + "loss": 0.4863, + "step": 10571 + }, + { + "epoch": 0.734064713234273, + "grad_norm": 4.732715308458906, + "learning_rate": 1.7429586903250889e-06, + "loss": 0.81, + "step": 10572 + }, + { + "epoch": 0.7341341480349951, + "grad_norm": 4.6556108558713065, + "learning_rate": 1.742105593359586e-06, + "loss": 0.5392, + "step": 10573 + }, + { + "epoch": 0.7342035828357173, + "grad_norm": 3.7063553562532614, + "learning_rate": 1.7412526611746945e-06, + "loss": 0.4437, + "step": 10574 + }, + { + "epoch": 0.7342730176364394, + "grad_norm": 5.626877407794784, + "learning_rate": 1.7403998938135587e-06, + "loss": 0.5388, + "step": 10575 + }, + { + "epoch": 0.7343424524371615, + "grad_norm": 7.55607016230924, + "learning_rate": 1.7395472913193062e-06, + "loss": 0.7203, + "step": 10576 + }, + { + "epoch": 0.7344118872378836, + "grad_norm": 3.833093680957534, + "learning_rate": 1.7386948537350612e-06, + "loss": 0.3414, + "step": 10577 + }, + { + "epoch": 0.7344813220386057, + "grad_norm": 3.9882920370627164, + "learning_rate": 1.737842581103943e-06, + "loss": 0.5102, + "step": 10578 + }, + { + "epoch": 0.7345507568393279, + "grad_norm": 3.564528985960407, + "learning_rate": 1.736990473469054e-06, + "loss": 0.1944, + "step": 10579 + }, + { + "epoch": 0.73462019164005, + "grad_norm": 5.0481737893539815, + "learning_rate": 1.7361385308734923e-06, + "loss": 0.8724, + "step": 10580 + }, + { + "epoch": 0.7346896264407721, + "grad_norm": 4.112235154830738, + "learning_rate": 1.7352867533603529e-06, + "loss": 0.4618, + "step": 10581 + }, + { + "epoch": 0.7347590612414943, + "grad_norm": 5.00657933892249, + "learning_rate": 1.7344351409727151e-06, + "loss": 0.5871, + "step": 10582 + }, + { + "epoch": 0.7348284960422163, + "grad_norm": 3.9659554676904727, + "learning_rate": 1.733583693753652e-06, + "loss": 0.1648, + "step": 10583 + }, + { + "epoch": 0.7348979308429385, + "grad_norm": 4.45003692163835, + "learning_rate": 1.7327324117462302e-06, + "loss": 0.3574, + "step": 10584 + }, + { + "epoch": 0.7349673656436606, + "grad_norm": 3.062951870285048, + "learning_rate": 1.7318812949935055e-06, + "loss": 0.321, + "step": 10585 + }, + { + "epoch": 0.7350368004443827, + "grad_norm": 4.090718713734945, + "learning_rate": 1.7310303435385267e-06, + "loss": 0.3697, + "step": 10586 + }, + { + "epoch": 0.7351062352451049, + "grad_norm": 4.921462382847311, + "learning_rate": 1.7301795574243345e-06, + "loss": 0.6726, + "step": 10587 + }, + { + "epoch": 0.735175670045827, + "grad_norm": 3.594562587801793, + "learning_rate": 1.7293289366939597e-06, + "loss": 0.3201, + "step": 10588 + }, + { + "epoch": 0.735245104846549, + "grad_norm": 3.606940662180231, + "learning_rate": 1.7284784813904265e-06, + "loss": 0.4745, + "step": 10589 + }, + { + "epoch": 0.7353145396472712, + "grad_norm": 4.845083882722045, + "learning_rate": 1.72762819155675e-06, + "loss": 0.5281, + "step": 10590 + }, + { + "epoch": 0.7353839744479933, + "grad_norm": 4.010441406547519, + "learning_rate": 1.7267780672359348e-06, + "loss": 0.5498, + "step": 10591 + }, + { + "epoch": 0.7354534092487155, + "grad_norm": 4.414236996827545, + "learning_rate": 1.7259281084709823e-06, + "loss": 0.468, + "step": 10592 + }, + { + "epoch": 0.7355228440494376, + "grad_norm": 3.730984742364967, + "learning_rate": 1.7250783153048834e-06, + "loss": 0.4136, + "step": 10593 + }, + { + "epoch": 0.7355922788501597, + "grad_norm": 3.059830910129794, + "learning_rate": 1.7242286877806136e-06, + "loss": 0.3391, + "step": 10594 + }, + { + "epoch": 0.7356617136508818, + "grad_norm": 4.054309637032811, + "learning_rate": 1.7233792259411519e-06, + "loss": 0.4948, + "step": 10595 + }, + { + "epoch": 0.7357311484516039, + "grad_norm": 6.324899559916543, + "learning_rate": 1.7225299298294634e-06, + "loss": 0.7274, + "step": 10596 + }, + { + "epoch": 0.7358005832523261, + "grad_norm": 4.212299812710117, + "learning_rate": 1.7216807994884982e-06, + "loss": 0.591, + "step": 10597 + }, + { + "epoch": 0.7358700180530482, + "grad_norm": 3.689736656394049, + "learning_rate": 1.7208318349612108e-06, + "loss": 0.4432, + "step": 10598 + }, + { + "epoch": 0.7359394528537703, + "grad_norm": 4.616659342138726, + "learning_rate": 1.7199830362905384e-06, + "loss": 0.6354, + "step": 10599 + }, + { + "epoch": 0.7360088876544925, + "grad_norm": 5.02419656416883, + "learning_rate": 1.7191344035194118e-06, + "loss": 0.6663, + "step": 10600 + }, + { + "epoch": 0.7360783224552145, + "grad_norm": 2.912271065716018, + "learning_rate": 1.7182859366907555e-06, + "loss": 0.2638, + "step": 10601 + }, + { + "epoch": 0.7361477572559367, + "grad_norm": 3.8427746515479337, + "learning_rate": 1.7174376358474826e-06, + "loss": 0.4737, + "step": 10602 + }, + { + "epoch": 0.7362171920566588, + "grad_norm": 4.402436089286478, + "learning_rate": 1.7165895010324995e-06, + "loss": 0.4816, + "step": 10603 + }, + { + "epoch": 0.7362866268573809, + "grad_norm": 2.003931027552049, + "learning_rate": 1.7157415322887038e-06, + "loss": 0.0697, + "step": 10604 + }, + { + "epoch": 0.7363560616581031, + "grad_norm": 3.909509254591714, + "learning_rate": 1.7148937296589852e-06, + "loss": 0.4498, + "step": 10605 + }, + { + "epoch": 0.7364254964588252, + "grad_norm": 4.78734186696786, + "learning_rate": 1.714046093186223e-06, + "loss": 0.43, + "step": 10606 + }, + { + "epoch": 0.7364949312595472, + "grad_norm": 4.668730637176559, + "learning_rate": 1.713198622913294e-06, + "loss": 0.5842, + "step": 10607 + }, + { + "epoch": 0.7365643660602694, + "grad_norm": 4.842395930405256, + "learning_rate": 1.7123513188830582e-06, + "loss": 0.477, + "step": 10608 + }, + { + "epoch": 0.7366338008609915, + "grad_norm": 3.9693036500587016, + "learning_rate": 1.7115041811383703e-06, + "loss": 0.3928, + "step": 10609 + }, + { + "epoch": 0.7367032356617137, + "grad_norm": 3.699737742266566, + "learning_rate": 1.7106572097220836e-06, + "loss": 0.4692, + "step": 10610 + }, + { + "epoch": 0.7367726704624358, + "grad_norm": 3.571920398156959, + "learning_rate": 1.7098104046770298e-06, + "loss": 0.2492, + "step": 10611 + }, + { + "epoch": 0.7368421052631579, + "grad_norm": 5.110931579329591, + "learning_rate": 1.7089637660460434e-06, + "loss": 0.7028, + "step": 10612 + }, + { + "epoch": 0.73691154006388, + "grad_norm": 4.229279096110702, + "learning_rate": 1.7081172938719465e-06, + "loss": 0.474, + "step": 10613 + }, + { + "epoch": 0.7369809748646021, + "grad_norm": 2.954081228509903, + "learning_rate": 1.7072709881975513e-06, + "loss": 0.2551, + "step": 10614 + }, + { + "epoch": 0.7370504096653243, + "grad_norm": 4.263877901998473, + "learning_rate": 1.7064248490656642e-06, + "loss": 0.4503, + "step": 10615 + }, + { + "epoch": 0.7371198444660464, + "grad_norm": 3.40520953064715, + "learning_rate": 1.7055788765190807e-06, + "loss": 0.2357, + "step": 10616 + }, + { + "epoch": 0.7371892792667685, + "grad_norm": 2.539062774060217, + "learning_rate": 1.7047330706005898e-06, + "loss": 0.1736, + "step": 10617 + }, + { + "epoch": 0.7372587140674907, + "grad_norm": 4.692035551478609, + "learning_rate": 1.703887431352972e-06, + "loss": 0.6744, + "step": 10618 + }, + { + "epoch": 0.7373281488682127, + "grad_norm": 3.598012866552002, + "learning_rate": 1.7030419588189972e-06, + "loss": 0.4952, + "step": 10619 + }, + { + "epoch": 0.7373975836689348, + "grad_norm": 4.386760215409834, + "learning_rate": 1.7021966530414303e-06, + "loss": 0.6006, + "step": 10620 + }, + { + "epoch": 0.737467018469657, + "grad_norm": 3.994348005953582, + "learning_rate": 1.7013515140630243e-06, + "loss": 0.5086, + "step": 10621 + }, + { + "epoch": 0.7375364532703791, + "grad_norm": 3.5903462485368225, + "learning_rate": 1.700506541926526e-06, + "loss": 0.3866, + "step": 10622 + }, + { + "epoch": 0.7376058880711013, + "grad_norm": 3.9257774142925728, + "learning_rate": 1.699661736674672e-06, + "loss": 0.6985, + "step": 10623 + }, + { + "epoch": 0.7376753228718234, + "grad_norm": 2.846875914430184, + "learning_rate": 1.6988170983501938e-06, + "loss": 0.2356, + "step": 10624 + }, + { + "epoch": 0.7377447576725454, + "grad_norm": 3.788747775143197, + "learning_rate": 1.6979726269958135e-06, + "loss": 0.3149, + "step": 10625 + }, + { + "epoch": 0.7378141924732676, + "grad_norm": 4.332525506170356, + "learning_rate": 1.697128322654238e-06, + "loss": 0.5235, + "step": 10626 + }, + { + "epoch": 0.7378836272739897, + "grad_norm": 3.125292498000971, + "learning_rate": 1.6962841853681755e-06, + "loss": 0.177, + "step": 10627 + }, + { + "epoch": 0.7379530620747119, + "grad_norm": 3.6056274709317604, + "learning_rate": 1.6954402151803223e-06, + "loss": 0.436, + "step": 10628 + }, + { + "epoch": 0.738022496875434, + "grad_norm": 3.0866583293624474, + "learning_rate": 1.6945964121333608e-06, + "loss": 0.1496, + "step": 10629 + }, + { + "epoch": 0.738091931676156, + "grad_norm": 3.014486342078335, + "learning_rate": 1.6937527762699735e-06, + "loss": 0.3299, + "step": 10630 + }, + { + "epoch": 0.7381613664768782, + "grad_norm": 3.4669886901950244, + "learning_rate": 1.69290930763283e-06, + "loss": 0.358, + "step": 10631 + }, + { + "epoch": 0.7382308012776003, + "grad_norm": 4.584519170127436, + "learning_rate": 1.692066006264591e-06, + "loss": 0.3636, + "step": 10632 + }, + { + "epoch": 0.7383002360783224, + "grad_norm": 3.9200094881594505, + "learning_rate": 1.691222872207911e-06, + "loss": 0.5111, + "step": 10633 + }, + { + "epoch": 0.7383696708790446, + "grad_norm": 3.5949064119449985, + "learning_rate": 1.6903799055054332e-06, + "loss": 0.327, + "step": 10634 + }, + { + "epoch": 0.7384391056797667, + "grad_norm": 3.994188747945362, + "learning_rate": 1.689537106199795e-06, + "loss": 0.5309, + "step": 10635 + }, + { + "epoch": 0.7385085404804889, + "grad_norm": 3.9450100318848413, + "learning_rate": 1.6886944743336241e-06, + "loss": 0.2352, + "step": 10636 + }, + { + "epoch": 0.7385779752812109, + "grad_norm": 5.011949239872855, + "learning_rate": 1.6878520099495394e-06, + "loss": 0.8115, + "step": 10637 + }, + { + "epoch": 0.738647410081933, + "grad_norm": 2.59902254334525, + "learning_rate": 1.6870097130901509e-06, + "loss": 0.1226, + "step": 10638 + }, + { + "epoch": 0.7387168448826552, + "grad_norm": 3.779459647632509, + "learning_rate": 1.6861675837980652e-06, + "loss": 0.4686, + "step": 10639 + }, + { + "epoch": 0.7387862796833773, + "grad_norm": 5.194121947771429, + "learning_rate": 1.6853256221158715e-06, + "loss": 0.6672, + "step": 10640 + }, + { + "epoch": 0.7388557144840995, + "grad_norm": 3.1394744060762427, + "learning_rate": 1.6844838280861552e-06, + "loss": 0.2295, + "step": 10641 + }, + { + "epoch": 0.7389251492848216, + "grad_norm": 3.624280284024086, + "learning_rate": 1.6836422017514969e-06, + "loss": 0.2599, + "step": 10642 + }, + { + "epoch": 0.7389945840855436, + "grad_norm": 3.33501032330394, + "learning_rate": 1.6828007431544635e-06, + "loss": 0.337, + "step": 10643 + }, + { + "epoch": 0.7390640188862658, + "grad_norm": 4.587733642153555, + "learning_rate": 1.6819594523376149e-06, + "loss": 0.3987, + "step": 10644 + }, + { + "epoch": 0.7391334536869879, + "grad_norm": 4.318189897701996, + "learning_rate": 1.6811183293435023e-06, + "loss": 0.4891, + "step": 10645 + }, + { + "epoch": 0.73920288848771, + "grad_norm": 4.372005282893266, + "learning_rate": 1.6802773742146694e-06, + "loss": 0.3233, + "step": 10646 + }, + { + "epoch": 0.7392723232884322, + "grad_norm": 3.7103020237333224, + "learning_rate": 1.6794365869936502e-06, + "loss": 0.2844, + "step": 10647 + }, + { + "epoch": 0.7393417580891543, + "grad_norm": 3.87504184454104, + "learning_rate": 1.6785959677229708e-06, + "loss": 0.5376, + "step": 10648 + }, + { + "epoch": 0.7394111928898764, + "grad_norm": 5.361744027738653, + "learning_rate": 1.6777555164451493e-06, + "loss": 0.6037, + "step": 10649 + }, + { + "epoch": 0.7394806276905985, + "grad_norm": 3.958438418182807, + "learning_rate": 1.6769152332026938e-06, + "loss": 0.4888, + "step": 10650 + }, + { + "epoch": 0.7395500624913206, + "grad_norm": 3.95243140380165, + "learning_rate": 1.6760751180381062e-06, + "loss": 0.4886, + "step": 10651 + }, + { + "epoch": 0.7396194972920428, + "grad_norm": 2.878847248770449, + "learning_rate": 1.6752351709938758e-06, + "loss": 0.2575, + "step": 10652 + }, + { + "epoch": 0.7396889320927649, + "grad_norm": 4.183764920761019, + "learning_rate": 1.6743953921124917e-06, + "loss": 0.5084, + "step": 10653 + }, + { + "epoch": 0.7397583668934871, + "grad_norm": 4.275057056813458, + "learning_rate": 1.673555781436424e-06, + "loss": 0.6561, + "step": 10654 + }, + { + "epoch": 0.7398278016942091, + "grad_norm": 3.997526638019358, + "learning_rate": 1.6727163390081385e-06, + "loss": 0.5602, + "step": 10655 + }, + { + "epoch": 0.7398972364949312, + "grad_norm": 3.586168525124893, + "learning_rate": 1.6718770648700977e-06, + "loss": 0.3754, + "step": 10656 + }, + { + "epoch": 0.7399666712956534, + "grad_norm": 5.108306301142851, + "learning_rate": 1.6710379590647503e-06, + "loss": 0.6142, + "step": 10657 + }, + { + "epoch": 0.7400361060963755, + "grad_norm": 3.524018542146671, + "learning_rate": 1.6701990216345326e-06, + "loss": 0.3744, + "step": 10658 + }, + { + "epoch": 0.7401055408970977, + "grad_norm": 4.33551059011242, + "learning_rate": 1.6693602526218821e-06, + "loss": 0.4488, + "step": 10659 + }, + { + "epoch": 0.7401749756978198, + "grad_norm": 4.740040974930598, + "learning_rate": 1.6685216520692233e-06, + "loss": 0.4963, + "step": 10660 + }, + { + "epoch": 0.7402444104985418, + "grad_norm": 3.9379839510375927, + "learning_rate": 1.6676832200189658e-06, + "loss": 0.4384, + "step": 10661 + }, + { + "epoch": 0.740313845299264, + "grad_norm": 5.2101380785618385, + "learning_rate": 1.6668449565135219e-06, + "loss": 0.6737, + "step": 10662 + }, + { + "epoch": 0.7403832800999861, + "grad_norm": 3.105409845484737, + "learning_rate": 1.6660068615952884e-06, + "loss": 0.1342, + "step": 10663 + }, + { + "epoch": 0.7404527149007082, + "grad_norm": 5.632949540306084, + "learning_rate": 1.6651689353066552e-06, + "loss": 0.6824, + "step": 10664 + }, + { + "epoch": 0.7405221497014304, + "grad_norm": 4.067908020251523, + "learning_rate": 1.6643311776900034e-06, + "loss": 0.4641, + "step": 10665 + }, + { + "epoch": 0.7405915845021525, + "grad_norm": 3.4590098718425866, + "learning_rate": 1.6634935887877068e-06, + "loss": 0.3261, + "step": 10666 + }, + { + "epoch": 0.7406610193028746, + "grad_norm": 4.066589112270933, + "learning_rate": 1.6626561686421266e-06, + "loss": 0.2903, + "step": 10667 + }, + { + "epoch": 0.7407304541035967, + "grad_norm": 3.8238287116007115, + "learning_rate": 1.6618189172956245e-06, + "loss": 0.3464, + "step": 10668 + }, + { + "epoch": 0.7407998889043188, + "grad_norm": 3.4091300163487515, + "learning_rate": 1.6609818347905426e-06, + "loss": 0.5223, + "step": 10669 + }, + { + "epoch": 0.740869323705041, + "grad_norm": 3.4834387727021308, + "learning_rate": 1.6601449211692194e-06, + "loss": 0.2462, + "step": 10670 + }, + { + "epoch": 0.7409387585057631, + "grad_norm": 5.288438458344361, + "learning_rate": 1.6593081764739898e-06, + "loss": 0.5196, + "step": 10671 + }, + { + "epoch": 0.7410081933064853, + "grad_norm": 3.4207708687792984, + "learning_rate": 1.6584716007471696e-06, + "loss": 0.2983, + "step": 10672 + }, + { + "epoch": 0.7410776281072073, + "grad_norm": 4.385583102051702, + "learning_rate": 1.657635194031076e-06, + "loss": 0.5508, + "step": 10673 + }, + { + "epoch": 0.7411470629079294, + "grad_norm": 3.9426851805668544, + "learning_rate": 1.656798956368012e-06, + "loss": 0.707, + "step": 10674 + }, + { + "epoch": 0.7412164977086516, + "grad_norm": 4.5392998150025115, + "learning_rate": 1.6559628878002736e-06, + "loss": 0.5385, + "step": 10675 + }, + { + "epoch": 0.7412859325093737, + "grad_norm": 3.9916476892254815, + "learning_rate": 1.6551269883701486e-06, + "loss": 0.5663, + "step": 10676 + }, + { + "epoch": 0.7413553673100958, + "grad_norm": 3.6908051614373605, + "learning_rate": 1.6542912581199155e-06, + "loss": 0.2546, + "step": 10677 + }, + { + "epoch": 0.741424802110818, + "grad_norm": 3.849045138272675, + "learning_rate": 1.6534556970918447e-06, + "loss": 0.5313, + "step": 10678 + }, + { + "epoch": 0.74149423691154, + "grad_norm": 3.828181398266914, + "learning_rate": 1.652620305328198e-06, + "loss": 0.3945, + "step": 10679 + }, + { + "epoch": 0.7415636717122622, + "grad_norm": 4.298421912022778, + "learning_rate": 1.6517850828712283e-06, + "loss": 0.4906, + "step": 10680 + }, + { + "epoch": 0.7416331065129843, + "grad_norm": 3.916682994162593, + "learning_rate": 1.6509500297631786e-06, + "loss": 0.3398, + "step": 10681 + }, + { + "epoch": 0.7417025413137064, + "grad_norm": 4.504665215498936, + "learning_rate": 1.65011514604629e-06, + "loss": 0.4661, + "step": 10682 + }, + { + "epoch": 0.7417719761144286, + "grad_norm": 3.4210735869302065, + "learning_rate": 1.6492804317627852e-06, + "loss": 0.2425, + "step": 10683 + }, + { + "epoch": 0.7418414109151507, + "grad_norm": 4.736972380110522, + "learning_rate": 1.6484458869548824e-06, + "loss": 0.5308, + "step": 10684 + }, + { + "epoch": 0.7419108457158728, + "grad_norm": 3.3866915493995235, + "learning_rate": 1.6476115116647979e-06, + "loss": 0.3424, + "step": 10685 + }, + { + "epoch": 0.7419802805165949, + "grad_norm": 4.176760613307982, + "learning_rate": 1.646777305934728e-06, + "loss": 0.5837, + "step": 10686 + }, + { + "epoch": 0.742049715317317, + "grad_norm": 3.323665279313503, + "learning_rate": 1.6459432698068655e-06, + "loss": 0.2203, + "step": 10687 + }, + { + "epoch": 0.7421191501180392, + "grad_norm": 4.367397662764991, + "learning_rate": 1.6451094033233993e-06, + "loss": 0.4499, + "step": 10688 + }, + { + "epoch": 0.7421885849187613, + "grad_norm": 3.5143825290078916, + "learning_rate": 1.644275706526504e-06, + "loss": 0.3207, + "step": 10689 + }, + { + "epoch": 0.7422580197194834, + "grad_norm": 3.561132172175201, + "learning_rate": 1.6434421794583437e-06, + "loss": 0.5917, + "step": 10690 + }, + { + "epoch": 0.7423274545202055, + "grad_norm": 3.152377054179345, + "learning_rate": 1.6426088221610815e-06, + "loss": 0.2704, + "step": 10691 + }, + { + "epoch": 0.7423968893209276, + "grad_norm": 3.369850231829523, + "learning_rate": 1.6417756346768653e-06, + "loss": 0.4817, + "step": 10692 + }, + { + "epoch": 0.7424663241216498, + "grad_norm": 3.510689412744198, + "learning_rate": 1.6409426170478376e-06, + "loss": 0.4003, + "step": 10693 + }, + { + "epoch": 0.7425357589223719, + "grad_norm": 3.2593195733276215, + "learning_rate": 1.640109769316131e-06, + "loss": 0.3343, + "step": 10694 + }, + { + "epoch": 0.742605193723094, + "grad_norm": 3.7921478869028435, + "learning_rate": 1.639277091523871e-06, + "loss": 0.3897, + "step": 10695 + }, + { + "epoch": 0.7426746285238162, + "grad_norm": 12.452257479201311, + "learning_rate": 1.6384445837131724e-06, + "loss": 0.477, + "step": 10696 + }, + { + "epoch": 0.7427440633245382, + "grad_norm": 3.49881076665471, + "learning_rate": 1.637612245926143e-06, + "loss": 0.31, + "step": 10697 + }, + { + "epoch": 0.7428134981252604, + "grad_norm": 3.139905925812879, + "learning_rate": 1.6367800782048808e-06, + "loss": 0.3202, + "step": 10698 + }, + { + "epoch": 0.7428829329259825, + "grad_norm": 4.205274354879697, + "learning_rate": 1.6359480805914757e-06, + "loss": 0.327, + "step": 10699 + }, + { + "epoch": 0.7429523677267046, + "grad_norm": 4.986827690555049, + "learning_rate": 1.635116253128013e-06, + "loss": 0.7973, + "step": 10700 + }, + { + "epoch": 0.7430218025274268, + "grad_norm": 3.73900556902471, + "learning_rate": 1.6342845958565607e-06, + "loss": 0.4383, + "step": 10701 + }, + { + "epoch": 0.7430912373281489, + "grad_norm": 4.6272361630638335, + "learning_rate": 1.6334531088191834e-06, + "loss": 0.4296, + "step": 10702 + }, + { + "epoch": 0.7431606721288709, + "grad_norm": 3.3939648847837485, + "learning_rate": 1.6326217920579418e-06, + "loss": 0.488, + "step": 10703 + }, + { + "epoch": 0.7432301069295931, + "grad_norm": 6.25345925262801, + "learning_rate": 1.6317906456148763e-06, + "loss": 0.5421, + "step": 10704 + }, + { + "epoch": 0.7432995417303152, + "grad_norm": 4.502575771352725, + "learning_rate": 1.6309596695320301e-06, + "loss": 0.6098, + "step": 10705 + }, + { + "epoch": 0.7433689765310374, + "grad_norm": 3.0639678756382627, + "learning_rate": 1.6301288638514319e-06, + "loss": 0.2767, + "step": 10706 + }, + { + "epoch": 0.7434384113317595, + "grad_norm": 2.5940522320351818, + "learning_rate": 1.6292982286151015e-06, + "loss": 0.1974, + "step": 10707 + }, + { + "epoch": 0.7435078461324816, + "grad_norm": 3.36531594061589, + "learning_rate": 1.6284677638650536e-06, + "loss": 0.2959, + "step": 10708 + }, + { + "epoch": 0.7435772809332037, + "grad_norm": 4.1209477184486545, + "learning_rate": 1.6276374696432906e-06, + "loss": 0.2717, + "step": 10709 + }, + { + "epoch": 0.7436467157339258, + "grad_norm": 5.094487672716471, + "learning_rate": 1.6268073459918081e-06, + "loss": 0.3275, + "step": 10710 + }, + { + "epoch": 0.743716150534648, + "grad_norm": 4.617978640641326, + "learning_rate": 1.6259773929525929e-06, + "loss": 0.516, + "step": 10711 + }, + { + "epoch": 0.7437855853353701, + "grad_norm": 4.3007004207057244, + "learning_rate": 1.6251476105676233e-06, + "loss": 0.4942, + "step": 10712 + }, + { + "epoch": 0.7438550201360922, + "grad_norm": 4.507369502120782, + "learning_rate": 1.6243179988788677e-06, + "loss": 0.5102, + "step": 10713 + }, + { + "epoch": 0.7439244549368144, + "grad_norm": 3.909731558896055, + "learning_rate": 1.6234885579282906e-06, + "loss": 0.5227, + "step": 10714 + }, + { + "epoch": 0.7439938897375364, + "grad_norm": 4.402916143364794, + "learning_rate": 1.6226592877578395e-06, + "loss": 0.6036, + "step": 10715 + }, + { + "epoch": 0.7440633245382586, + "grad_norm": 3.443645077991829, + "learning_rate": 1.621830188409459e-06, + "loss": 0.3471, + "step": 10716 + }, + { + "epoch": 0.7441327593389807, + "grad_norm": 2.7998720548341387, + "learning_rate": 1.6210012599250858e-06, + "loss": 0.3916, + "step": 10717 + }, + { + "epoch": 0.7442021941397028, + "grad_norm": 3.732907849999485, + "learning_rate": 1.6201725023466476e-06, + "loss": 0.4536, + "step": 10718 + }, + { + "epoch": 0.744271628940425, + "grad_norm": 4.587264814300033, + "learning_rate": 1.6193439157160557e-06, + "loss": 0.512, + "step": 10719 + }, + { + "epoch": 0.7443410637411471, + "grad_norm": 4.359301508876848, + "learning_rate": 1.6185155000752252e-06, + "loss": 0.6733, + "step": 10720 + }, + { + "epoch": 0.7444104985418691, + "grad_norm": 5.1990413694372934, + "learning_rate": 1.6176872554660562e-06, + "loss": 0.3316, + "step": 10721 + }, + { + "epoch": 0.7444799333425913, + "grad_norm": 3.8093599209303846, + "learning_rate": 1.6168591819304348e-06, + "loss": 0.4291, + "step": 10722 + }, + { + "epoch": 0.7445493681433134, + "grad_norm": 4.242857888634728, + "learning_rate": 1.616031279510249e-06, + "loss": 0.3842, + "step": 10723 + }, + { + "epoch": 0.7446188029440356, + "grad_norm": 4.827956027437733, + "learning_rate": 1.6152035482473721e-06, + "loss": 0.4372, + "step": 10724 + }, + { + "epoch": 0.7446882377447577, + "grad_norm": 3.37151837933274, + "learning_rate": 1.61437598818367e-06, + "loss": 0.3953, + "step": 10725 + }, + { + "epoch": 0.7447576725454798, + "grad_norm": 5.025975528285516, + "learning_rate": 1.613548599360999e-06, + "loss": 0.6674, + "step": 10726 + }, + { + "epoch": 0.7448271073462019, + "grad_norm": 4.6569875934445335, + "learning_rate": 1.612721381821208e-06, + "loss": 0.3867, + "step": 10727 + }, + { + "epoch": 0.744896542146924, + "grad_norm": 3.840588181174105, + "learning_rate": 1.6118943356061362e-06, + "loss": 0.3752, + "step": 10728 + }, + { + "epoch": 0.7449659769476462, + "grad_norm": 5.239786664718549, + "learning_rate": 1.6110674607576155e-06, + "loss": 0.7656, + "step": 10729 + }, + { + "epoch": 0.7450354117483683, + "grad_norm": 4.091971181305727, + "learning_rate": 1.6102407573174673e-06, + "loss": 0.3595, + "step": 10730 + }, + { + "epoch": 0.7451048465490904, + "grad_norm": 6.996525987195954, + "learning_rate": 1.609414225327504e-06, + "loss": 0.73, + "step": 10731 + }, + { + "epoch": 0.7451742813498126, + "grad_norm": 3.5023602291871208, + "learning_rate": 1.6085878648295362e-06, + "loss": 0.4316, + "step": 10732 + }, + { + "epoch": 0.7452437161505346, + "grad_norm": 8.369436038659725, + "learning_rate": 1.6077616758653535e-06, + "loss": 0.6986, + "step": 10733 + }, + { + "epoch": 0.7453131509512567, + "grad_norm": 3.2503108024914606, + "learning_rate": 1.6069356584767476e-06, + "loss": 0.2635, + "step": 10734 + }, + { + "epoch": 0.7453825857519789, + "grad_norm": 3.417845314701299, + "learning_rate": 1.6061098127054986e-06, + "loss": 0.3492, + "step": 10735 + }, + { + "epoch": 0.745452020552701, + "grad_norm": 3.963536802357365, + "learning_rate": 1.6052841385933722e-06, + "loss": 0.5561, + "step": 10736 + }, + { + "epoch": 0.7455214553534232, + "grad_norm": 2.899756155959895, + "learning_rate": 1.6044586361821334e-06, + "loss": 0.2992, + "step": 10737 + }, + { + "epoch": 0.7455908901541453, + "grad_norm": 4.056068132552119, + "learning_rate": 1.6036333055135345e-06, + "loss": 0.346, + "step": 10738 + }, + { + "epoch": 0.7456603249548673, + "grad_norm": 5.024307644332964, + "learning_rate": 1.6028081466293205e-06, + "loss": 0.6109, + "step": 10739 + }, + { + "epoch": 0.7457297597555895, + "grad_norm": 4.288763680268224, + "learning_rate": 1.6019831595712253e-06, + "loss": 0.4385, + "step": 10740 + }, + { + "epoch": 0.7457991945563116, + "grad_norm": 5.253143407926876, + "learning_rate": 1.6011583443809775e-06, + "loss": 0.5774, + "step": 10741 + }, + { + "epoch": 0.7458686293570338, + "grad_norm": 6.125331191291605, + "learning_rate": 1.6003337011002928e-06, + "loss": 0.7968, + "step": 10742 + }, + { + "epoch": 0.7459380641577559, + "grad_norm": 4.491532595821953, + "learning_rate": 1.5995092297708853e-06, + "loss": 0.6202, + "step": 10743 + }, + { + "epoch": 0.746007498958478, + "grad_norm": 4.227695370365082, + "learning_rate": 1.598684930434452e-06, + "loss": 0.5645, + "step": 10744 + }, + { + "epoch": 0.7460769337592001, + "grad_norm": 4.610301110265456, + "learning_rate": 1.5978608031326843e-06, + "loss": 0.5898, + "step": 10745 + }, + { + "epoch": 0.7461463685599222, + "grad_norm": 3.3250132519725963, + "learning_rate": 1.5970368479072706e-06, + "loss": 0.2467, + "step": 10746 + }, + { + "epoch": 0.7462158033606443, + "grad_norm": 4.475805380480244, + "learning_rate": 1.5962130647998808e-06, + "loss": 0.5252, + "step": 10747 + }, + { + "epoch": 0.7462852381613665, + "grad_norm": 3.7565357315095937, + "learning_rate": 1.5953894538521808e-06, + "loss": 0.3475, + "step": 10748 + }, + { + "epoch": 0.7463546729620886, + "grad_norm": 3.9483797873444706, + "learning_rate": 1.5945660151058313e-06, + "loss": 0.4317, + "step": 10749 + }, + { + "epoch": 0.7464241077628108, + "grad_norm": 3.5784515376103876, + "learning_rate": 1.593742748602481e-06, + "loss": 0.3668, + "step": 10750 + }, + { + "epoch": 0.7464935425635328, + "grad_norm": 4.52426849579044, + "learning_rate": 1.592919654383765e-06, + "loss": 0.5252, + "step": 10751 + }, + { + "epoch": 0.7465629773642549, + "grad_norm": 4.645301720287652, + "learning_rate": 1.5920967324913189e-06, + "loss": 0.4686, + "step": 10752 + }, + { + "epoch": 0.7466324121649771, + "grad_norm": 3.630575073213047, + "learning_rate": 1.5912739829667634e-06, + "loss": 0.4839, + "step": 10753 + }, + { + "epoch": 0.7467018469656992, + "grad_norm": 5.103885647452111, + "learning_rate": 1.5904514058517129e-06, + "loss": 0.7937, + "step": 10754 + }, + { + "epoch": 0.7467712817664214, + "grad_norm": 3.5805977973637946, + "learning_rate": 1.5896290011877724e-06, + "loss": 0.3834, + "step": 10755 + }, + { + "epoch": 0.7468407165671435, + "grad_norm": 3.085330222388638, + "learning_rate": 1.5888067690165377e-06, + "loss": 0.2817, + "step": 10756 + }, + { + "epoch": 0.7469101513678655, + "grad_norm": 5.371420411151042, + "learning_rate": 1.5879847093795963e-06, + "loss": 0.6069, + "step": 10757 + }, + { + "epoch": 0.7469795861685877, + "grad_norm": 4.63678851263773, + "learning_rate": 1.5871628223185276e-06, + "loss": 0.438, + "step": 10758 + }, + { + "epoch": 0.7470490209693098, + "grad_norm": 2.451520096724459, + "learning_rate": 1.586341107874902e-06, + "loss": 0.1333, + "step": 10759 + }, + { + "epoch": 0.7471184557700319, + "grad_norm": 2.9624286283012875, + "learning_rate": 1.5855195660902795e-06, + "loss": 0.3513, + "step": 10760 + }, + { + "epoch": 0.7471878905707541, + "grad_norm": 3.9496752528056156, + "learning_rate": 1.5846981970062142e-06, + "loss": 0.4849, + "step": 10761 + }, + { + "epoch": 0.7472573253714762, + "grad_norm": 3.6247196333969063, + "learning_rate": 1.5838770006642472e-06, + "loss": 0.1976, + "step": 10762 + }, + { + "epoch": 0.7473267601721983, + "grad_norm": 3.7226403352849684, + "learning_rate": 1.5830559771059179e-06, + "loss": 0.4268, + "step": 10763 + }, + { + "epoch": 0.7473961949729204, + "grad_norm": 3.3911655258994102, + "learning_rate": 1.5822351263727525e-06, + "loss": 0.4843, + "step": 10764 + }, + { + "epoch": 0.7474656297736425, + "grad_norm": 4.272109474991247, + "learning_rate": 1.5814144485062632e-06, + "loss": 0.543, + "step": 10765 + }, + { + "epoch": 0.7475350645743647, + "grad_norm": 3.73506786183235, + "learning_rate": 1.580593943547965e-06, + "loss": 0.4154, + "step": 10766 + }, + { + "epoch": 0.7476044993750868, + "grad_norm": 2.504551809721038, + "learning_rate": 1.579773611539357e-06, + "loss": 0.2156, + "step": 10767 + }, + { + "epoch": 0.747673934175809, + "grad_norm": 4.502962883950316, + "learning_rate": 1.5789534525219264e-06, + "loss": 0.3648, + "step": 10768 + }, + { + "epoch": 0.747743368976531, + "grad_norm": 2.8362106367606397, + "learning_rate": 1.5781334665371606e-06, + "loss": 0.2606, + "step": 10769 + }, + { + "epoch": 0.7478128037772531, + "grad_norm": 3.0329793469758384, + "learning_rate": 1.5773136536265316e-06, + "loss": 0.2942, + "step": 10770 + }, + { + "epoch": 0.7478822385779753, + "grad_norm": 3.2096632581618776, + "learning_rate": 1.5764940138315054e-06, + "loss": 0.2565, + "step": 10771 + }, + { + "epoch": 0.7479516733786974, + "grad_norm": 3.614935601039839, + "learning_rate": 1.575674547193538e-06, + "loss": 0.5519, + "step": 10772 + }, + { + "epoch": 0.7480211081794196, + "grad_norm": 4.31374214616722, + "learning_rate": 1.5748552537540767e-06, + "loss": 0.4851, + "step": 10773 + }, + { + "epoch": 0.7480905429801417, + "grad_norm": 4.198776640616115, + "learning_rate": 1.5740361335545594e-06, + "loss": 0.7891, + "step": 10774 + }, + { + "epoch": 0.7481599777808637, + "grad_norm": 4.374388127008037, + "learning_rate": 1.5732171866364215e-06, + "loss": 0.424, + "step": 10775 + }, + { + "epoch": 0.7482294125815859, + "grad_norm": 3.7762069518537484, + "learning_rate": 1.5723984130410786e-06, + "loss": 0.3127, + "step": 10776 + }, + { + "epoch": 0.748298847382308, + "grad_norm": 4.2095153584519425, + "learning_rate": 1.5715798128099434e-06, + "loss": 0.4966, + "step": 10777 + }, + { + "epoch": 0.7483682821830301, + "grad_norm": 3.4465296106155674, + "learning_rate": 1.5707613859844256e-06, + "loss": 0.4021, + "step": 10778 + }, + { + "epoch": 0.7484377169837523, + "grad_norm": 4.433322252930832, + "learning_rate": 1.5699431326059144e-06, + "loss": 0.5788, + "step": 10779 + }, + { + "epoch": 0.7485071517844744, + "grad_norm": 3.8884119864142237, + "learning_rate": 1.569125052715797e-06, + "loss": 0.5514, + "step": 10780 + }, + { + "epoch": 0.7485765865851965, + "grad_norm": 4.24004776531264, + "learning_rate": 1.5683071463554533e-06, + "loss": 0.379, + "step": 10781 + }, + { + "epoch": 0.7486460213859186, + "grad_norm": 3.778303820482812, + "learning_rate": 1.5674894135662511e-06, + "loss": 0.3796, + "step": 10782 + }, + { + "epoch": 0.7487154561866407, + "grad_norm": 3.2977740412209724, + "learning_rate": 1.5666718543895504e-06, + "loss": 0.3489, + "step": 10783 + }, + { + "epoch": 0.7487848909873629, + "grad_norm": 3.2309793608249584, + "learning_rate": 1.565854468866702e-06, + "loss": 0.2639, + "step": 10784 + }, + { + "epoch": 0.748854325788085, + "grad_norm": 3.686978238339916, + "learning_rate": 1.5650372570390486e-06, + "loss": 0.3476, + "step": 10785 + }, + { + "epoch": 0.7489237605888072, + "grad_norm": 4.615793781438089, + "learning_rate": 1.564220218947924e-06, + "loss": 0.6413, + "step": 10786 + }, + { + "epoch": 0.7489931953895292, + "grad_norm": 3.8349704554233814, + "learning_rate": 1.5634033546346528e-06, + "loss": 0.3406, + "step": 10787 + }, + { + "epoch": 0.7490626301902513, + "grad_norm": 3.668357356361762, + "learning_rate": 1.5625866641405507e-06, + "loss": 0.3792, + "step": 10788 + }, + { + "epoch": 0.7491320649909735, + "grad_norm": 3.1241498140073625, + "learning_rate": 1.5617701475069258e-06, + "loss": 0.2701, + "step": 10789 + }, + { + "epoch": 0.7492014997916956, + "grad_norm": 4.667344470913812, + "learning_rate": 1.5609538047750765e-06, + "loss": 0.5592, + "step": 10790 + }, + { + "epoch": 0.7492709345924177, + "grad_norm": 3.9011715709285157, + "learning_rate": 1.5601376359862919e-06, + "loss": 0.5367, + "step": 10791 + }, + { + "epoch": 0.7493403693931399, + "grad_norm": 4.798538971638306, + "learning_rate": 1.5593216411818534e-06, + "loss": 0.5888, + "step": 10792 + }, + { + "epoch": 0.7494098041938619, + "grad_norm": 4.759249304416267, + "learning_rate": 1.5585058204030324e-06, + "loss": 0.4821, + "step": 10793 + }, + { + "epoch": 0.7494792389945841, + "grad_norm": 4.632998171494326, + "learning_rate": 1.5576901736910916e-06, + "loss": 0.6155, + "step": 10794 + }, + { + "epoch": 0.7495486737953062, + "grad_norm": 3.7465869284027975, + "learning_rate": 1.5568747010872881e-06, + "loss": 0.4414, + "step": 10795 + }, + { + "epoch": 0.7496181085960283, + "grad_norm": 3.544095792551927, + "learning_rate": 1.5560594026328674e-06, + "loss": 0.2607, + "step": 10796 + }, + { + "epoch": 0.7496875433967505, + "grad_norm": 3.5155628327023924, + "learning_rate": 1.5552442783690618e-06, + "loss": 0.2695, + "step": 10797 + }, + { + "epoch": 0.7497569781974726, + "grad_norm": 2.964366086313482, + "learning_rate": 1.554429328337105e-06, + "loss": 0.3318, + "step": 10798 + }, + { + "epoch": 0.7498264129981947, + "grad_norm": 3.9308731033453093, + "learning_rate": 1.5536145525782125e-06, + "loss": 0.4363, + "step": 10799 + }, + { + "epoch": 0.7498958477989168, + "grad_norm": 3.9051659731151678, + "learning_rate": 1.5527999511335973e-06, + "loss": 0.4176, + "step": 10800 + }, + { + "epoch": 0.7499652825996389, + "grad_norm": 3.729238828892474, + "learning_rate": 1.5519855240444592e-06, + "loss": 0.3564, + "step": 10801 + }, + { + "epoch": 0.7500347174003611, + "grad_norm": 5.684337033058436, + "learning_rate": 1.5511712713519917e-06, + "loss": 0.4475, + "step": 10802 + }, + { + "epoch": 0.7501041522010832, + "grad_norm": 4.963222512340322, + "learning_rate": 1.5503571930973788e-06, + "loss": 0.3704, + "step": 10803 + }, + { + "epoch": 0.7501735870018053, + "grad_norm": 3.3679724509121725, + "learning_rate": 1.5495432893217954e-06, + "loss": 0.3256, + "step": 10804 + }, + { + "epoch": 0.7502430218025274, + "grad_norm": 3.551451503744839, + "learning_rate": 1.5487295600664082e-06, + "loss": 0.2862, + "step": 10805 + }, + { + "epoch": 0.7503124566032495, + "grad_norm": 4.118530458548856, + "learning_rate": 1.5479160053723724e-06, + "loss": 0.1842, + "step": 10806 + }, + { + "epoch": 0.7503818914039717, + "grad_norm": 4.475244658256488, + "learning_rate": 1.5471026252808425e-06, + "loss": 0.3785, + "step": 10807 + }, + { + "epoch": 0.7504513262046938, + "grad_norm": 3.6188302680077125, + "learning_rate": 1.5462894198329525e-06, + "loss": 0.4149, + "step": 10808 + }, + { + "epoch": 0.7505207610054159, + "grad_norm": 2.8734628134609985, + "learning_rate": 1.5454763890698343e-06, + "loss": 0.2985, + "step": 10809 + }, + { + "epoch": 0.7505901958061381, + "grad_norm": 4.09254119199055, + "learning_rate": 1.5446635330326143e-06, + "loss": 0.4912, + "step": 10810 + }, + { + "epoch": 0.7506596306068601, + "grad_norm": 3.8011498164116686, + "learning_rate": 1.5438508517624013e-06, + "loss": 0.461, + "step": 10811 + }, + { + "epoch": 0.7507290654075823, + "grad_norm": 3.7660988716721966, + "learning_rate": 1.5430383453002994e-06, + "loss": 0.289, + "step": 10812 + }, + { + "epoch": 0.7507985002083044, + "grad_norm": 3.871700841257737, + "learning_rate": 1.542226013687408e-06, + "loss": 0.4064, + "step": 10813 + }, + { + "epoch": 0.7508679350090265, + "grad_norm": 3.262608480930866, + "learning_rate": 1.5414138569648123e-06, + "loss": 0.2609, + "step": 10814 + }, + { + "epoch": 0.7509373698097487, + "grad_norm": 4.040779300852778, + "learning_rate": 1.5406018751735897e-06, + "loss": 0.5068, + "step": 10815 + }, + { + "epoch": 0.7510068046104708, + "grad_norm": 3.774810794282543, + "learning_rate": 1.53979006835481e-06, + "loss": 0.3397, + "step": 10816 + }, + { + "epoch": 0.7510762394111928, + "grad_norm": 3.936055734514942, + "learning_rate": 1.538978436549533e-06, + "loss": 0.4956, + "step": 10817 + }, + { + "epoch": 0.751145674211915, + "grad_norm": 3.499377305583845, + "learning_rate": 1.5381669797988102e-06, + "loss": 0.2968, + "step": 10818 + }, + { + "epoch": 0.7512151090126371, + "grad_norm": 5.9183578317186845, + "learning_rate": 1.5373556981436844e-06, + "loss": 0.4002, + "step": 10819 + }, + { + "epoch": 0.7512845438133593, + "grad_norm": 3.367113603604195, + "learning_rate": 1.5365445916251892e-06, + "loss": 0.2772, + "step": 10820 + }, + { + "epoch": 0.7513539786140814, + "grad_norm": 3.6204883671421926, + "learning_rate": 1.5357336602843493e-06, + "loss": 0.3106, + "step": 10821 + }, + { + "epoch": 0.7514234134148035, + "grad_norm": 4.1995443239462915, + "learning_rate": 1.5349229041621805e-06, + "loss": 0.5288, + "step": 10822 + }, + { + "epoch": 0.7514928482155256, + "grad_norm": 2.3651253268659755, + "learning_rate": 1.5341123232996885e-06, + "loss": 0.2201, + "step": 10823 + }, + { + "epoch": 0.7515622830162477, + "grad_norm": 3.8934704116646848, + "learning_rate": 1.5333019177378755e-06, + "loss": 0.2655, + "step": 10824 + }, + { + "epoch": 0.7516317178169699, + "grad_norm": 2.4630705927357233, + "learning_rate": 1.5324916875177298e-06, + "loss": 0.203, + "step": 10825 + }, + { + "epoch": 0.751701152617692, + "grad_norm": 3.672495719394316, + "learning_rate": 1.531681632680228e-06, + "loss": 0.3714, + "step": 10826 + }, + { + "epoch": 0.7517705874184141, + "grad_norm": 4.115347602760836, + "learning_rate": 1.5308717532663459e-06, + "loss": 0.3668, + "step": 10827 + }, + { + "epoch": 0.7518400222191363, + "grad_norm": 4.114003218512139, + "learning_rate": 1.5300620493170464e-06, + "loss": 0.5001, + "step": 10828 + }, + { + "epoch": 0.7519094570198583, + "grad_norm": 3.883999831962567, + "learning_rate": 1.529252520873279e-06, + "loss": 0.4119, + "step": 10829 + }, + { + "epoch": 0.7519788918205804, + "grad_norm": 4.099435219746042, + "learning_rate": 1.5284431679759931e-06, + "loss": 0.3746, + "step": 10830 + }, + { + "epoch": 0.7520483266213026, + "grad_norm": 4.261140404968297, + "learning_rate": 1.5276339906661237e-06, + "loss": 0.5792, + "step": 10831 + }, + { + "epoch": 0.7521177614220247, + "grad_norm": 3.989681417245947, + "learning_rate": 1.5268249889845976e-06, + "loss": 0.5014, + "step": 10832 + }, + { + "epoch": 0.7521871962227469, + "grad_norm": 4.1525628306265, + "learning_rate": 1.5260161629723335e-06, + "loss": 0.3542, + "step": 10833 + }, + { + "epoch": 0.752256631023469, + "grad_norm": 4.403864980357464, + "learning_rate": 1.5252075126702404e-06, + "loss": 0.4907, + "step": 10834 + }, + { + "epoch": 0.752326065824191, + "grad_norm": 4.667618177595061, + "learning_rate": 1.5243990381192198e-06, + "loss": 0.4436, + "step": 10835 + }, + { + "epoch": 0.7523955006249132, + "grad_norm": 3.635707599117771, + "learning_rate": 1.523590739360163e-06, + "loss": 0.4036, + "step": 10836 + }, + { + "epoch": 0.7524649354256353, + "grad_norm": 3.937004148282174, + "learning_rate": 1.522782616433952e-06, + "loss": 0.4583, + "step": 10837 + }, + { + "epoch": 0.7525343702263575, + "grad_norm": 3.404220925640154, + "learning_rate": 1.5219746693814608e-06, + "loss": 0.2288, + "step": 10838 + }, + { + "epoch": 0.7526038050270796, + "grad_norm": 3.6370156474716353, + "learning_rate": 1.521166898243558e-06, + "loss": 0.4149, + "step": 10839 + }, + { + "epoch": 0.7526732398278017, + "grad_norm": 4.001346093756767, + "learning_rate": 1.5203593030610952e-06, + "loss": 0.3355, + "step": 10840 + }, + { + "epoch": 0.7527426746285238, + "grad_norm": 3.388106660978092, + "learning_rate": 1.51955188387492e-06, + "loss": 0.4198, + "step": 10841 + }, + { + "epoch": 0.7528121094292459, + "grad_norm": 4.365037132100963, + "learning_rate": 1.5187446407258754e-06, + "loss": 0.3926, + "step": 10842 + }, + { + "epoch": 0.7528815442299681, + "grad_norm": 4.557933514482715, + "learning_rate": 1.5179375736547841e-06, + "loss": 0.3109, + "step": 10843 + }, + { + "epoch": 0.7529509790306902, + "grad_norm": 4.04629911341105, + "learning_rate": 1.5171306827024717e-06, + "loss": 0.503, + "step": 10844 + }, + { + "epoch": 0.7530204138314123, + "grad_norm": 3.930145655538194, + "learning_rate": 1.5163239679097485e-06, + "loss": 0.4098, + "step": 10845 + }, + { + "epoch": 0.7530898486321345, + "grad_norm": 4.780393180299837, + "learning_rate": 1.5155174293174169e-06, + "loss": 0.3488, + "step": 10846 + }, + { + "epoch": 0.7531592834328565, + "grad_norm": 3.712062323488566, + "learning_rate": 1.5147110669662706e-06, + "loss": 0.4577, + "step": 10847 + }, + { + "epoch": 0.7532287182335786, + "grad_norm": 4.574773515264511, + "learning_rate": 1.5139048808970946e-06, + "loss": 0.2985, + "step": 10848 + }, + { + "epoch": 0.7532981530343008, + "grad_norm": 3.9969745331329376, + "learning_rate": 1.513098871150665e-06, + "loss": 0.3586, + "step": 10849 + }, + { + "epoch": 0.7533675878350229, + "grad_norm": 4.703913016772175, + "learning_rate": 1.5122930377677491e-06, + "loss": 0.49, + "step": 10850 + }, + { + "epoch": 0.7534370226357451, + "grad_norm": 4.17241359021055, + "learning_rate": 1.5114873807891046e-06, + "loss": 0.5335, + "step": 10851 + }, + { + "epoch": 0.7535064574364672, + "grad_norm": 3.4750077882147274, + "learning_rate": 1.510681900255479e-06, + "loss": 0.3889, + "step": 10852 + }, + { + "epoch": 0.7535758922371892, + "grad_norm": 4.337672957674126, + "learning_rate": 1.5098765962076179e-06, + "loss": 0.3779, + "step": 10853 + }, + { + "epoch": 0.7536453270379114, + "grad_norm": 4.963217297367776, + "learning_rate": 1.5090714686862474e-06, + "loss": 0.4656, + "step": 10854 + }, + { + "epoch": 0.7537147618386335, + "grad_norm": 3.3649512188678554, + "learning_rate": 1.5082665177320904e-06, + "loss": 0.2619, + "step": 10855 + }, + { + "epoch": 0.7537841966393557, + "grad_norm": 3.7016798046377044, + "learning_rate": 1.5074617433858635e-06, + "loss": 0.475, + "step": 10856 + }, + { + "epoch": 0.7538536314400778, + "grad_norm": 3.497121932521108, + "learning_rate": 1.5066571456882706e-06, + "loss": 0.5004, + "step": 10857 + }, + { + "epoch": 0.7539230662407999, + "grad_norm": 3.513544525193985, + "learning_rate": 1.5058527246800036e-06, + "loss": 0.2602, + "step": 10858 + }, + { + "epoch": 0.753992501041522, + "grad_norm": 2.7748400544612246, + "learning_rate": 1.5050484804017535e-06, + "loss": 0.1948, + "step": 10859 + }, + { + "epoch": 0.7540619358422441, + "grad_norm": 4.180236064269588, + "learning_rate": 1.5042444128941973e-06, + "loss": 0.4174, + "step": 10860 + }, + { + "epoch": 0.7541313706429662, + "grad_norm": 3.7099289523615657, + "learning_rate": 1.5034405221980008e-06, + "loss": 0.3756, + "step": 10861 + }, + { + "epoch": 0.7542008054436884, + "grad_norm": 3.6075002210497438, + "learning_rate": 1.5026368083538274e-06, + "loss": 0.3312, + "step": 10862 + }, + { + "epoch": 0.7542702402444105, + "grad_norm": 5.8062302508348305, + "learning_rate": 1.5018332714023259e-06, + "loss": 0.5987, + "step": 10863 + }, + { + "epoch": 0.7543396750451327, + "grad_norm": 3.772019394556794, + "learning_rate": 1.5010299113841397e-06, + "loss": 0.4437, + "step": 10864 + }, + { + "epoch": 0.7544091098458547, + "grad_norm": 4.177867038414421, + "learning_rate": 1.5002267283399014e-06, + "loss": 0.2815, + "step": 10865 + }, + { + "epoch": 0.7544785446465768, + "grad_norm": 5.761422937283608, + "learning_rate": 1.4994237223102348e-06, + "loss": 0.508, + "step": 10866 + }, + { + "epoch": 0.754547979447299, + "grad_norm": 3.780957287474016, + "learning_rate": 1.498620893335755e-06, + "loss": 0.518, + "step": 10867 + }, + { + "epoch": 0.7546174142480211, + "grad_norm": 2.368172189958371, + "learning_rate": 1.4978182414570686e-06, + "loss": 0.0944, + "step": 10868 + }, + { + "epoch": 0.7546868490487433, + "grad_norm": 5.407462471123704, + "learning_rate": 1.4970157667147722e-06, + "loss": 0.6807, + "step": 10869 + }, + { + "epoch": 0.7547562838494654, + "grad_norm": 3.3425220849430484, + "learning_rate": 1.4962134691494535e-06, + "loss": 0.3829, + "step": 10870 + }, + { + "epoch": 0.7548257186501874, + "grad_norm": 4.972395060596145, + "learning_rate": 1.4954113488016953e-06, + "loss": 0.6794, + "step": 10871 + }, + { + "epoch": 0.7548951534509096, + "grad_norm": 4.161418687028618, + "learning_rate": 1.4946094057120635e-06, + "loss": 0.4752, + "step": 10872 + }, + { + "epoch": 0.7549645882516317, + "grad_norm": 4.208006096791668, + "learning_rate": 1.4938076399211204e-06, + "loss": 0.5209, + "step": 10873 + }, + { + "epoch": 0.7550340230523538, + "grad_norm": 4.628660155705154, + "learning_rate": 1.4930060514694205e-06, + "loss": 0.6538, + "step": 10874 + }, + { + "epoch": 0.755103457853076, + "grad_norm": 4.490862294122593, + "learning_rate": 1.4922046403975066e-06, + "loss": 0.3604, + "step": 10875 + }, + { + "epoch": 0.7551728926537981, + "grad_norm": 4.751099851937854, + "learning_rate": 1.4914034067459121e-06, + "loss": 0.6423, + "step": 10876 + }, + { + "epoch": 0.7552423274545202, + "grad_norm": 4.352000952317273, + "learning_rate": 1.4906023505551636e-06, + "loss": 0.4182, + "step": 10877 + }, + { + "epoch": 0.7553117622552423, + "grad_norm": 4.22780731862895, + "learning_rate": 1.4898014718657766e-06, + "loss": 0.2967, + "step": 10878 + }, + { + "epoch": 0.7553811970559644, + "grad_norm": 3.5822158384145, + "learning_rate": 1.489000770718259e-06, + "loss": 0.4534, + "step": 10879 + }, + { + "epoch": 0.7554506318566866, + "grad_norm": 4.1824914233985195, + "learning_rate": 1.4882002471531098e-06, + "loss": 0.5078, + "step": 10880 + }, + { + "epoch": 0.7555200666574087, + "grad_norm": 5.142505104374632, + "learning_rate": 1.4873999012108181e-06, + "loss": 0.5922, + "step": 10881 + }, + { + "epoch": 0.7555895014581309, + "grad_norm": 3.932518587642267, + "learning_rate": 1.486599732931865e-06, + "loss": 0.543, + "step": 10882 + }, + { + "epoch": 0.7556589362588529, + "grad_norm": 3.4152065987041818, + "learning_rate": 1.485799742356721e-06, + "loss": 0.3439, + "step": 10883 + }, + { + "epoch": 0.755728371059575, + "grad_norm": 4.046163460223747, + "learning_rate": 1.4849999295258482e-06, + "loss": 0.5141, + "step": 10884 + }, + { + "epoch": 0.7557978058602972, + "grad_norm": 4.122519009710842, + "learning_rate": 1.4842002944797045e-06, + "loss": 0.3613, + "step": 10885 + }, + { + "epoch": 0.7558672406610193, + "grad_norm": 2.644082516938727, + "learning_rate": 1.4834008372587305e-06, + "loss": 0.2647, + "step": 10886 + }, + { + "epoch": 0.7559366754617414, + "grad_norm": 4.669648632098493, + "learning_rate": 1.4826015579033609e-06, + "loss": 0.5353, + "step": 10887 + }, + { + "epoch": 0.7560061102624636, + "grad_norm": 4.019301436356018, + "learning_rate": 1.4818024564540257e-06, + "loss": 0.4886, + "step": 10888 + }, + { + "epoch": 0.7560755450631856, + "grad_norm": 4.680988454273236, + "learning_rate": 1.481003532951143e-06, + "loss": 0.4747, + "step": 10889 + }, + { + "epoch": 0.7561449798639078, + "grad_norm": 4.164668628694312, + "learning_rate": 1.480204787435116e-06, + "loss": 0.5531, + "step": 10890 + }, + { + "epoch": 0.7562144146646299, + "grad_norm": 3.685863543818351, + "learning_rate": 1.4794062199463499e-06, + "loss": 0.3992, + "step": 10891 + }, + { + "epoch": 0.756283849465352, + "grad_norm": 3.717078368859052, + "learning_rate": 1.478607830525235e-06, + "loss": 0.3209, + "step": 10892 + }, + { + "epoch": 0.7563532842660742, + "grad_norm": 3.9986374885378204, + "learning_rate": 1.4778096192121478e-06, + "loss": 0.4633, + "step": 10893 + }, + { + "epoch": 0.7564227190667963, + "grad_norm": 3.5311867353538404, + "learning_rate": 1.477011586047466e-06, + "loss": 0.3858, + "step": 10894 + }, + { + "epoch": 0.7564921538675184, + "grad_norm": 3.485576507525538, + "learning_rate": 1.476213731071552e-06, + "loss": 0.2748, + "step": 10895 + }, + { + "epoch": 0.7565615886682405, + "grad_norm": 5.073771462552815, + "learning_rate": 1.4754160543247597e-06, + "loss": 0.8224, + "step": 10896 + }, + { + "epoch": 0.7566310234689626, + "grad_norm": 5.422545770992055, + "learning_rate": 1.4746185558474358e-06, + "loss": 0.4369, + "step": 10897 + }, + { + "epoch": 0.7567004582696848, + "grad_norm": 3.958965071722932, + "learning_rate": 1.4738212356799153e-06, + "loss": 0.3833, + "step": 10898 + }, + { + "epoch": 0.7567698930704069, + "grad_norm": 4.88867367738557, + "learning_rate": 1.4730240938625251e-06, + "loss": 0.6082, + "step": 10899 + }, + { + "epoch": 0.7568393278711291, + "grad_norm": 4.467608551966727, + "learning_rate": 1.4722271304355884e-06, + "loss": 0.3547, + "step": 10900 + }, + { + "epoch": 0.7569087626718511, + "grad_norm": 3.9899162358376965, + "learning_rate": 1.4714303454394103e-06, + "loss": 0.583, + "step": 10901 + }, + { + "epoch": 0.7569781974725732, + "grad_norm": 3.354183586815086, + "learning_rate": 1.4706337389142905e-06, + "loss": 0.2078, + "step": 10902 + }, + { + "epoch": 0.7570476322732954, + "grad_norm": 3.9184933082372346, + "learning_rate": 1.4698373109005253e-06, + "loss": 0.4165, + "step": 10903 + }, + { + "epoch": 0.7571170670740175, + "grad_norm": 3.8744722239441667, + "learning_rate": 1.4690410614383914e-06, + "loss": 0.4644, + "step": 10904 + }, + { + "epoch": 0.7571865018747396, + "grad_norm": 4.426706589796809, + "learning_rate": 1.4682449905681672e-06, + "loss": 0.4417, + "step": 10905 + }, + { + "epoch": 0.7572559366754618, + "grad_norm": 8.27906077644034, + "learning_rate": 1.4674490983301143e-06, + "loss": 0.8201, + "step": 10906 + }, + { + "epoch": 0.7573253714761838, + "grad_norm": 4.676584687506646, + "learning_rate": 1.4666533847644887e-06, + "loss": 0.5956, + "step": 10907 + }, + { + "epoch": 0.757394806276906, + "grad_norm": 3.078449690980452, + "learning_rate": 1.4658578499115372e-06, + "loss": 0.3931, + "step": 10908 + }, + { + "epoch": 0.7574642410776281, + "grad_norm": 3.2204840088878997, + "learning_rate": 1.465062493811496e-06, + "loss": 0.4068, + "step": 10909 + }, + { + "epoch": 0.7575336758783502, + "grad_norm": 4.318542502859666, + "learning_rate": 1.464267316504594e-06, + "loss": 0.4775, + "step": 10910 + }, + { + "epoch": 0.7576031106790724, + "grad_norm": 3.256763616788462, + "learning_rate": 1.4634723180310495e-06, + "loss": 0.2183, + "step": 10911 + }, + { + "epoch": 0.7576725454797945, + "grad_norm": 3.5248678420744097, + "learning_rate": 1.4626774984310742e-06, + "loss": 0.3286, + "step": 10912 + }, + { + "epoch": 0.7577419802805166, + "grad_norm": 3.7711773436452263, + "learning_rate": 1.4618828577448663e-06, + "loss": 0.4209, + "step": 10913 + }, + { + "epoch": 0.7578114150812387, + "grad_norm": 4.72696928367763, + "learning_rate": 1.4610883960126232e-06, + "loss": 0.3767, + "step": 10914 + }, + { + "epoch": 0.7578808498819608, + "grad_norm": 4.808793829319325, + "learning_rate": 1.4602941132745225e-06, + "loss": 0.5401, + "step": 10915 + }, + { + "epoch": 0.757950284682683, + "grad_norm": 3.615677660030094, + "learning_rate": 1.459500009570739e-06, + "loss": 0.3157, + "step": 10916 + }, + { + "epoch": 0.7580197194834051, + "grad_norm": 3.7737435773844057, + "learning_rate": 1.4587060849414415e-06, + "loss": 0.3845, + "step": 10917 + }, + { + "epoch": 0.7580891542841272, + "grad_norm": 3.766402120036325, + "learning_rate": 1.4579123394267814e-06, + "loss": 0.5296, + "step": 10918 + }, + { + "epoch": 0.7581585890848493, + "grad_norm": 3.1311428156071432, + "learning_rate": 1.4571187730669057e-06, + "loss": 0.2308, + "step": 10919 + }, + { + "epoch": 0.7582280238855714, + "grad_norm": 3.835725771422906, + "learning_rate": 1.4563253859019544e-06, + "loss": 0.505, + "step": 10920 + }, + { + "epoch": 0.7582974586862936, + "grad_norm": 3.146939907260934, + "learning_rate": 1.4555321779720571e-06, + "loss": 0.3194, + "step": 10921 + }, + { + "epoch": 0.7583668934870157, + "grad_norm": 3.9043515776473408, + "learning_rate": 1.4547391493173285e-06, + "loss": 0.268, + "step": 10922 + }, + { + "epoch": 0.7584363282877378, + "grad_norm": 3.8527789628750364, + "learning_rate": 1.4539462999778831e-06, + "loss": 0.4096, + "step": 10923 + }, + { + "epoch": 0.75850576308846, + "grad_norm": 3.414074510647886, + "learning_rate": 1.4531536299938216e-06, + "loss": 0.4134, + "step": 10924 + }, + { + "epoch": 0.758575197889182, + "grad_norm": 3.1377960132999143, + "learning_rate": 1.4523611394052356e-06, + "loss": 0.3202, + "step": 10925 + }, + { + "epoch": 0.7586446326899042, + "grad_norm": 4.069910720808549, + "learning_rate": 1.4515688282522094e-06, + "loss": 0.347, + "step": 10926 + }, + { + "epoch": 0.7587140674906263, + "grad_norm": 3.8571936785460585, + "learning_rate": 1.450776696574816e-06, + "loss": 0.4382, + "step": 10927 + }, + { + "epoch": 0.7587835022913484, + "grad_norm": 4.643614880634198, + "learning_rate": 1.449984744413121e-06, + "loss": 0.6005, + "step": 10928 + }, + { + "epoch": 0.7588529370920706, + "grad_norm": 3.5938137935970578, + "learning_rate": 1.4491929718071807e-06, + "loss": 0.1946, + "step": 10929 + }, + { + "epoch": 0.7589223718927927, + "grad_norm": 4.00247687648354, + "learning_rate": 1.4484013787970418e-06, + "loss": 0.5941, + "step": 10930 + }, + { + "epoch": 0.7589918066935147, + "grad_norm": 3.8574954815912497, + "learning_rate": 1.447609965422741e-06, + "loss": 0.3641, + "step": 10931 + }, + { + "epoch": 0.7590612414942369, + "grad_norm": 5.037720016603001, + "learning_rate": 1.446818731724311e-06, + "loss": 0.7039, + "step": 10932 + }, + { + "epoch": 0.759130676294959, + "grad_norm": 4.240567145618297, + "learning_rate": 1.446027677741766e-06, + "loss": 0.3219, + "step": 10933 + }, + { + "epoch": 0.7592001110956812, + "grad_norm": 3.4015416361454283, + "learning_rate": 1.445236803515121e-06, + "loss": 0.2688, + "step": 10934 + }, + { + "epoch": 0.7592695458964033, + "grad_norm": 5.384306442141966, + "learning_rate": 1.4444461090843776e-06, + "loss": 0.5274, + "step": 10935 + }, + { + "epoch": 0.7593389806971254, + "grad_norm": 4.4560760267245225, + "learning_rate": 1.4436555944895237e-06, + "loss": 0.3908, + "step": 10936 + }, + { + "epoch": 0.7594084154978475, + "grad_norm": 5.138149924464828, + "learning_rate": 1.4428652597705468e-06, + "loss": 0.735, + "step": 10937 + }, + { + "epoch": 0.7594778502985696, + "grad_norm": 4.339409895962606, + "learning_rate": 1.4420751049674197e-06, + "loss": 0.5518, + "step": 10938 + }, + { + "epoch": 0.7595472850992918, + "grad_norm": 3.645186962249192, + "learning_rate": 1.4412851301201081e-06, + "loss": 0.4549, + "step": 10939 + }, + { + "epoch": 0.7596167199000139, + "grad_norm": 3.33934283139182, + "learning_rate": 1.4404953352685674e-06, + "loss": 0.28, + "step": 10940 + }, + { + "epoch": 0.759686154700736, + "grad_norm": 3.1084539357089267, + "learning_rate": 1.4397057204527454e-06, + "loss": 0.307, + "step": 10941 + }, + { + "epoch": 0.7597555895014582, + "grad_norm": 3.5363094914532818, + "learning_rate": 1.4389162857125787e-06, + "loss": 0.3696, + "step": 10942 + }, + { + "epoch": 0.7598250243021802, + "grad_norm": 4.544872836449702, + "learning_rate": 1.438127031087997e-06, + "loss": 0.6233, + "step": 10943 + }, + { + "epoch": 0.7598944591029023, + "grad_norm": 4.309297769849216, + "learning_rate": 1.4373379566189187e-06, + "loss": 0.3705, + "step": 10944 + }, + { + "epoch": 0.7599638939036245, + "grad_norm": 4.94856867090616, + "learning_rate": 1.4365490623452545e-06, + "loss": 0.6321, + "step": 10945 + }, + { + "epoch": 0.7600333287043466, + "grad_norm": 4.76191566096668, + "learning_rate": 1.4357603483069093e-06, + "loss": 0.6405, + "step": 10946 + }, + { + "epoch": 0.7601027635050688, + "grad_norm": 4.019769199876363, + "learning_rate": 1.4349718145437709e-06, + "loss": 0.5488, + "step": 10947 + }, + { + "epoch": 0.7601721983057909, + "grad_norm": 2.8543690064401614, + "learning_rate": 1.4341834610957223e-06, + "loss": 0.2474, + "step": 10948 + }, + { + "epoch": 0.7602416331065129, + "grad_norm": 3.620728182205127, + "learning_rate": 1.4333952880026408e-06, + "loss": 0.3312, + "step": 10949 + }, + { + "epoch": 0.7603110679072351, + "grad_norm": 3.700413527920763, + "learning_rate": 1.4326072953043912e-06, + "loss": 0.2188, + "step": 10950 + }, + { + "epoch": 0.7603805027079572, + "grad_norm": 3.5941979097540813, + "learning_rate": 1.431819483040825e-06, + "loss": 0.5416, + "step": 10951 + }, + { + "epoch": 0.7604499375086794, + "grad_norm": 4.388212391367575, + "learning_rate": 1.4310318512517928e-06, + "loss": 0.6015, + "step": 10952 + }, + { + "epoch": 0.7605193723094015, + "grad_norm": 4.9117577966896, + "learning_rate": 1.430244399977133e-06, + "loss": 0.6144, + "step": 10953 + }, + { + "epoch": 0.7605888071101236, + "grad_norm": 4.01610718792258, + "learning_rate": 1.4294571292566688e-06, + "loss": 0.5142, + "step": 10954 + }, + { + "epoch": 0.7606582419108457, + "grad_norm": 3.8762706093362485, + "learning_rate": 1.4286700391302244e-06, + "loss": 0.6493, + "step": 10955 + }, + { + "epoch": 0.7607276767115678, + "grad_norm": 3.423684601803941, + "learning_rate": 1.4278831296376079e-06, + "loss": 0.2988, + "step": 10956 + }, + { + "epoch": 0.76079711151229, + "grad_norm": 3.8209232494697862, + "learning_rate": 1.4270964008186206e-06, + "loss": 0.3774, + "step": 10957 + }, + { + "epoch": 0.7608665463130121, + "grad_norm": 3.3411175676014473, + "learning_rate": 1.4263098527130547e-06, + "loss": 0.391, + "step": 10958 + }, + { + "epoch": 0.7609359811137342, + "grad_norm": 3.727050047535712, + "learning_rate": 1.425523485360692e-06, + "loss": 0.3562, + "step": 10959 + }, + { + "epoch": 0.7610054159144564, + "grad_norm": 2.9857723148221487, + "learning_rate": 1.424737298801307e-06, + "loss": 0.3153, + "step": 10960 + }, + { + "epoch": 0.7610748507151784, + "grad_norm": 3.5180359478289884, + "learning_rate": 1.4239512930746635e-06, + "loss": 0.4533, + "step": 10961 + }, + { + "epoch": 0.7611442855159005, + "grad_norm": 3.7867690947130237, + "learning_rate": 1.423165468220517e-06, + "loss": 0.4351, + "step": 10962 + }, + { + "epoch": 0.7612137203166227, + "grad_norm": 3.8978401723867875, + "learning_rate": 1.4223798242786124e-06, + "loss": 0.604, + "step": 10963 + }, + { + "epoch": 0.7612831551173448, + "grad_norm": 4.558177344395122, + "learning_rate": 1.4215943612886907e-06, + "loss": 0.5644, + "step": 10964 + }, + { + "epoch": 0.761352589918067, + "grad_norm": 3.8463845879042147, + "learning_rate": 1.4208090792904743e-06, + "loss": 0.2956, + "step": 10965 + }, + { + "epoch": 0.7614220247187891, + "grad_norm": 4.608890113536634, + "learning_rate": 1.4200239783236857e-06, + "loss": 0.3729, + "step": 10966 + }, + { + "epoch": 0.7614914595195111, + "grad_norm": 4.031550935031122, + "learning_rate": 1.4192390584280347e-06, + "loss": 0.4503, + "step": 10967 + }, + { + "epoch": 0.7615608943202333, + "grad_norm": 3.464828254085752, + "learning_rate": 1.4184543196432176e-06, + "loss": 0.2297, + "step": 10968 + }, + { + "epoch": 0.7616303291209554, + "grad_norm": 2.9174019812399616, + "learning_rate": 1.4176697620089296e-06, + "loss": 0.1686, + "step": 10969 + }, + { + "epoch": 0.7616997639216776, + "grad_norm": 3.6217188116511307, + "learning_rate": 1.4168853855648513e-06, + "loss": 0.4203, + "step": 10970 + }, + { + "epoch": 0.7617691987223997, + "grad_norm": 3.5868076564151035, + "learning_rate": 1.4161011903506555e-06, + "loss": 0.2856, + "step": 10971 + }, + { + "epoch": 0.7618386335231218, + "grad_norm": 2.9579709330508863, + "learning_rate": 1.4153171764060058e-06, + "loss": 0.3851, + "step": 10972 + }, + { + "epoch": 0.761908068323844, + "grad_norm": 4.272188936418521, + "learning_rate": 1.4145333437705576e-06, + "loss": 0.384, + "step": 10973 + }, + { + "epoch": 0.761977503124566, + "grad_norm": 3.993314028996715, + "learning_rate": 1.4137496924839556e-06, + "loss": 0.3477, + "step": 10974 + }, + { + "epoch": 0.7620469379252881, + "grad_norm": 4.901878263879728, + "learning_rate": 1.4129662225858354e-06, + "loss": 0.3346, + "step": 10975 + }, + { + "epoch": 0.7621163727260103, + "grad_norm": 9.646807348821858, + "learning_rate": 1.412182934115825e-06, + "loss": 0.4908, + "step": 10976 + }, + { + "epoch": 0.7621858075267324, + "grad_norm": 4.6339626309225235, + "learning_rate": 1.4113998271135404e-06, + "loss": 0.5413, + "step": 10977 + }, + { + "epoch": 0.7622552423274546, + "grad_norm": 3.7782081073902036, + "learning_rate": 1.410616901618595e-06, + "loss": 0.2719, + "step": 10978 + }, + { + "epoch": 0.7623246771281766, + "grad_norm": 3.51054444337811, + "learning_rate": 1.4098341576705832e-06, + "loss": 0.2677, + "step": 10979 + }, + { + "epoch": 0.7623941119288987, + "grad_norm": 3.9557926383295006, + "learning_rate": 1.4090515953090961e-06, + "loss": 0.4163, + "step": 10980 + }, + { + "epoch": 0.7624635467296209, + "grad_norm": 3.7247359613431055, + "learning_rate": 1.408269214573717e-06, + "loss": 0.4653, + "step": 10981 + }, + { + "epoch": 0.762532981530343, + "grad_norm": 3.3624363950277107, + "learning_rate": 1.4074870155040182e-06, + "loss": 0.3879, + "step": 10982 + }, + { + "epoch": 0.7626024163310652, + "grad_norm": 4.19338304593881, + "learning_rate": 1.4067049981395581e-06, + "loss": 0.5461, + "step": 10983 + }, + { + "epoch": 0.7626718511317873, + "grad_norm": 4.935492317679381, + "learning_rate": 1.405923162519895e-06, + "loss": 0.5417, + "step": 10984 + }, + { + "epoch": 0.7627412859325093, + "grad_norm": 4.6064147495218615, + "learning_rate": 1.4051415086845705e-06, + "loss": 0.5944, + "step": 10985 + }, + { + "epoch": 0.7628107207332315, + "grad_norm": 4.2376599290643595, + "learning_rate": 1.4043600366731213e-06, + "loss": 0.4908, + "step": 10986 + }, + { + "epoch": 0.7628801555339536, + "grad_norm": 4.751898471858316, + "learning_rate": 1.403578746525072e-06, + "loss": 0.5594, + "step": 10987 + }, + { + "epoch": 0.7629495903346757, + "grad_norm": 6.6423669602073465, + "learning_rate": 1.4027976382799408e-06, + "loss": 0.5553, + "step": 10988 + }, + { + "epoch": 0.7630190251353979, + "grad_norm": 4.8269199077628295, + "learning_rate": 1.4020167119772342e-06, + "loss": 0.7402, + "step": 10989 + }, + { + "epoch": 0.76308845993612, + "grad_norm": 4.152973773517244, + "learning_rate": 1.4012359676564513e-06, + "loss": 0.5146, + "step": 10990 + }, + { + "epoch": 0.7631578947368421, + "grad_norm": 3.8811602090113273, + "learning_rate": 1.4004554053570806e-06, + "loss": 0.3829, + "step": 10991 + }, + { + "epoch": 0.7632273295375642, + "grad_norm": 3.2270909585437995, + "learning_rate": 1.3996750251186025e-06, + "loss": 0.3147, + "step": 10992 + }, + { + "epoch": 0.7632967643382863, + "grad_norm": 3.709802856407118, + "learning_rate": 1.398894826980487e-06, + "loss": 0.3527, + "step": 10993 + }, + { + "epoch": 0.7633661991390085, + "grad_norm": 4.958090947198897, + "learning_rate": 1.3981148109821952e-06, + "loss": 0.4774, + "step": 10994 + }, + { + "epoch": 0.7634356339397306, + "grad_norm": 3.6044344804891026, + "learning_rate": 1.3973349771631823e-06, + "loss": 0.428, + "step": 10995 + }, + { + "epoch": 0.7635050687404528, + "grad_norm": 4.240270993438797, + "learning_rate": 1.3965553255628906e-06, + "loss": 0.5392, + "step": 10996 + }, + { + "epoch": 0.7635745035411748, + "grad_norm": 4.684313099285424, + "learning_rate": 1.3957758562207496e-06, + "loss": 0.6308, + "step": 10997 + }, + { + "epoch": 0.7636439383418969, + "grad_norm": 3.805792597752898, + "learning_rate": 1.3949965691761896e-06, + "loss": 0.4469, + "step": 10998 + }, + { + "epoch": 0.7637133731426191, + "grad_norm": 3.405494520488374, + "learning_rate": 1.3942174644686245e-06, + "loss": 0.2651, + "step": 10999 + }, + { + "epoch": 0.7637828079433412, + "grad_norm": 3.4067240180800984, + "learning_rate": 1.3934385421374575e-06, + "loss": 0.3343, + "step": 11000 + }, + { + "epoch": 0.7638522427440633, + "grad_norm": 3.3445573379671716, + "learning_rate": 1.3926598022220894e-06, + "loss": 0.3754, + "step": 11001 + }, + { + "epoch": 0.7639216775447855, + "grad_norm": 4.245815220116447, + "learning_rate": 1.3918812447619062e-06, + "loss": 0.4401, + "step": 11002 + }, + { + "epoch": 0.7639911123455075, + "grad_norm": 6.6184195002970485, + "learning_rate": 1.391102869796287e-06, + "loss": 0.5715, + "step": 11003 + }, + { + "epoch": 0.7640605471462297, + "grad_norm": 3.911845541428749, + "learning_rate": 1.3903246773646006e-06, + "loss": 0.5033, + "step": 11004 + }, + { + "epoch": 0.7641299819469518, + "grad_norm": 3.713713104821458, + "learning_rate": 1.3895466675062074e-06, + "loss": 0.447, + "step": 11005 + }, + { + "epoch": 0.7641994167476739, + "grad_norm": 2.5811308716934405, + "learning_rate": 1.3887688402604571e-06, + "loss": 0.2323, + "step": 11006 + }, + { + "epoch": 0.7642688515483961, + "grad_norm": 4.733080297858727, + "learning_rate": 1.3879911956666952e-06, + "loss": 0.3949, + "step": 11007 + }, + { + "epoch": 0.7643382863491182, + "grad_norm": 4.176857060839163, + "learning_rate": 1.3872137337642505e-06, + "loss": 0.4814, + "step": 11008 + }, + { + "epoch": 0.7644077211498403, + "grad_norm": 3.8485496966395796, + "learning_rate": 1.3864364545924448e-06, + "loss": 0.3188, + "step": 11009 + }, + { + "epoch": 0.7644771559505624, + "grad_norm": 2.275475647298644, + "learning_rate": 1.3856593581905975e-06, + "loss": 0.1935, + "step": 11010 + }, + { + "epoch": 0.7645465907512845, + "grad_norm": 4.148206732542511, + "learning_rate": 1.384882444598008e-06, + "loss": 0.6678, + "step": 11011 + }, + { + "epoch": 0.7646160255520067, + "grad_norm": 3.11227029777124, + "learning_rate": 1.3841057138539726e-06, + "loss": 0.3046, + "step": 11012 + }, + { + "epoch": 0.7646854603527288, + "grad_norm": 5.434531004613774, + "learning_rate": 1.3833291659977798e-06, + "loss": 0.3342, + "step": 11013 + }, + { + "epoch": 0.764754895153451, + "grad_norm": 3.356218109948482, + "learning_rate": 1.3825528010687057e-06, + "loss": 0.3896, + "step": 11014 + }, + { + "epoch": 0.764824329954173, + "grad_norm": 3.528432579504851, + "learning_rate": 1.3817766191060172e-06, + "loss": 0.2837, + "step": 11015 + }, + { + "epoch": 0.7648937647548951, + "grad_norm": 5.743410417382064, + "learning_rate": 1.3810006201489729e-06, + "loss": 0.3698, + "step": 11016 + }, + { + "epoch": 0.7649631995556173, + "grad_norm": 3.8026511819010755, + "learning_rate": 1.380224804236822e-06, + "loss": 0.484, + "step": 11017 + }, + { + "epoch": 0.7650326343563394, + "grad_norm": 4.279163249706843, + "learning_rate": 1.3794491714088048e-06, + "loss": 0.5687, + "step": 11018 + }, + { + "epoch": 0.7651020691570615, + "grad_norm": 6.633459168956002, + "learning_rate": 1.3786737217041513e-06, + "loss": 0.961, + "step": 11019 + }, + { + "epoch": 0.7651715039577837, + "grad_norm": 3.2612433089063164, + "learning_rate": 1.3778984551620833e-06, + "loss": 0.3707, + "step": 11020 + }, + { + "epoch": 0.7652409387585057, + "grad_norm": 3.4735747204569027, + "learning_rate": 1.377123371821813e-06, + "loss": 0.3748, + "step": 11021 + }, + { + "epoch": 0.7653103735592279, + "grad_norm": 4.027200753699064, + "learning_rate": 1.3763484717225422e-06, + "loss": 0.4372, + "step": 11022 + }, + { + "epoch": 0.76537980835995, + "grad_norm": 3.1978618087549866, + "learning_rate": 1.3755737549034647e-06, + "loss": 0.352, + "step": 11023 + }, + { + "epoch": 0.7654492431606721, + "grad_norm": 3.6215600884610044, + "learning_rate": 1.3747992214037675e-06, + "loss": 0.4578, + "step": 11024 + }, + { + "epoch": 0.7655186779613943, + "grad_norm": 4.708609023443965, + "learning_rate": 1.3740248712626226e-06, + "loss": 0.5726, + "step": 11025 + }, + { + "epoch": 0.7655881127621164, + "grad_norm": 3.422983561562488, + "learning_rate": 1.3732507045191951e-06, + "loss": 0.2419, + "step": 11026 + }, + { + "epoch": 0.7656575475628385, + "grad_norm": 3.1951605708713138, + "learning_rate": 1.3724767212126444e-06, + "loss": 0.2084, + "step": 11027 + }, + { + "epoch": 0.7657269823635606, + "grad_norm": 3.674804002846155, + "learning_rate": 1.3717029213821176e-06, + "loss": 0.3337, + "step": 11028 + }, + { + "epoch": 0.7657964171642827, + "grad_norm": 3.500491709275867, + "learning_rate": 1.3709293050667487e-06, + "loss": 0.5162, + "step": 11029 + }, + { + "epoch": 0.7658658519650049, + "grad_norm": 3.8004001702432078, + "learning_rate": 1.37015587230567e-06, + "loss": 0.1409, + "step": 11030 + }, + { + "epoch": 0.765935286765727, + "grad_norm": 3.100742469697651, + "learning_rate": 1.3693826231380002e-06, + "loss": 0.2833, + "step": 11031 + }, + { + "epoch": 0.766004721566449, + "grad_norm": 3.8323918660038174, + "learning_rate": 1.3686095576028492e-06, + "loss": 0.3172, + "step": 11032 + }, + { + "epoch": 0.7660741563671712, + "grad_norm": 4.170177234340749, + "learning_rate": 1.3678366757393175e-06, + "loss": 0.592, + "step": 11033 + }, + { + "epoch": 0.7661435911678933, + "grad_norm": 5.603019246987314, + "learning_rate": 1.3670639775864963e-06, + "loss": 0.7365, + "step": 11034 + }, + { + "epoch": 0.7662130259686155, + "grad_norm": 2.3899431905811177, + "learning_rate": 1.3662914631834684e-06, + "loss": 0.1378, + "step": 11035 + }, + { + "epoch": 0.7662824607693376, + "grad_norm": 4.210255953075779, + "learning_rate": 1.3655191325693061e-06, + "loss": 0.5173, + "step": 11036 + }, + { + "epoch": 0.7663518955700597, + "grad_norm": 3.7388554398164286, + "learning_rate": 1.3647469857830736e-06, + "loss": 0.2854, + "step": 11037 + }, + { + "epoch": 0.7664213303707819, + "grad_norm": 3.541205578750305, + "learning_rate": 1.3639750228638237e-06, + "loss": 0.3633, + "step": 11038 + }, + { + "epoch": 0.7664907651715039, + "grad_norm": 2.769618483680573, + "learning_rate": 1.3632032438506059e-06, + "loss": 0.2469, + "step": 11039 + }, + { + "epoch": 0.7665601999722261, + "grad_norm": 4.095283652497651, + "learning_rate": 1.3624316487824508e-06, + "loss": 0.3301, + "step": 11040 + }, + { + "epoch": 0.7666296347729482, + "grad_norm": 3.566725008994478, + "learning_rate": 1.3616602376983851e-06, + "loss": 0.29, + "step": 11041 + }, + { + "epoch": 0.7666990695736703, + "grad_norm": 3.884235960206201, + "learning_rate": 1.3608890106374312e-06, + "loss": 0.3209, + "step": 11042 + }, + { + "epoch": 0.7667685043743925, + "grad_norm": 4.252279050155112, + "learning_rate": 1.3601179676385912e-06, + "loss": 0.4251, + "step": 11043 + }, + { + "epoch": 0.7668379391751146, + "grad_norm": 3.5623754911128684, + "learning_rate": 1.3593471087408638e-06, + "loss": 0.5348, + "step": 11044 + }, + { + "epoch": 0.7669073739758366, + "grad_norm": 3.2175435326464457, + "learning_rate": 1.358576433983242e-06, + "loss": 0.285, + "step": 11045 + }, + { + "epoch": 0.7669768087765588, + "grad_norm": 2.4555681251078836, + "learning_rate": 1.3578059434047037e-06, + "loss": 0.3022, + "step": 11046 + }, + { + "epoch": 0.7670462435772809, + "grad_norm": 3.659576030922715, + "learning_rate": 1.357035637044219e-06, + "loss": 0.3639, + "step": 11047 + }, + { + "epoch": 0.7671156783780031, + "grad_norm": 3.8291064089381917, + "learning_rate": 1.3562655149407494e-06, + "loss": 0.3501, + "step": 11048 + }, + { + "epoch": 0.7671851131787252, + "grad_norm": 3.5446508387418603, + "learning_rate": 1.3554955771332473e-06, + "loss": 0.3788, + "step": 11049 + }, + { + "epoch": 0.7672545479794473, + "grad_norm": 3.460828445742911, + "learning_rate": 1.3547258236606542e-06, + "loss": 0.2884, + "step": 11050 + }, + { + "epoch": 0.7673239827801694, + "grad_norm": 4.003112816465432, + "learning_rate": 1.3539562545619045e-06, + "loss": 0.5391, + "step": 11051 + }, + { + "epoch": 0.7673934175808915, + "grad_norm": 4.526404952861863, + "learning_rate": 1.353186869875922e-06, + "loss": 0.5574, + "step": 11052 + }, + { + "epoch": 0.7674628523816137, + "grad_norm": 3.872098341102574, + "learning_rate": 1.3524176696416203e-06, + "loss": 0.3986, + "step": 11053 + }, + { + "epoch": 0.7675322871823358, + "grad_norm": 5.311546058713373, + "learning_rate": 1.3516486538979056e-06, + "loss": 0.7617, + "step": 11054 + }, + { + "epoch": 0.7676017219830579, + "grad_norm": 6.528486560143228, + "learning_rate": 1.3508798226836722e-06, + "loss": 0.4809, + "step": 11055 + }, + { + "epoch": 0.7676711567837801, + "grad_norm": 5.197957867697868, + "learning_rate": 1.35011117603781e-06, + "loss": 0.6618, + "step": 11056 + }, + { + "epoch": 0.7677405915845021, + "grad_norm": 3.8128046904813013, + "learning_rate": 1.3493427139991954e-06, + "loss": 0.3211, + "step": 11057 + }, + { + "epoch": 0.7678100263852242, + "grad_norm": 3.6100193703705634, + "learning_rate": 1.3485744366066921e-06, + "loss": 0.3594, + "step": 11058 + }, + { + "epoch": 0.7678794611859464, + "grad_norm": 3.466134751811614, + "learning_rate": 1.347806343899164e-06, + "loss": 0.2964, + "step": 11059 + }, + { + "epoch": 0.7679488959866685, + "grad_norm": 3.8921848312473837, + "learning_rate": 1.3470384359154592e-06, + "loss": 0.2056, + "step": 11060 + }, + { + "epoch": 0.7680183307873907, + "grad_norm": 3.4611479870315858, + "learning_rate": 1.3462707126944141e-06, + "loss": 0.3876, + "step": 11061 + }, + { + "epoch": 0.7680877655881128, + "grad_norm": 4.452158673833338, + "learning_rate": 1.3455031742748632e-06, + "loss": 0.4647, + "step": 11062 + }, + { + "epoch": 0.7681572003888348, + "grad_norm": 4.448376828207619, + "learning_rate": 1.3447358206956268e-06, + "loss": 0.4656, + "step": 11063 + }, + { + "epoch": 0.768226635189557, + "grad_norm": 3.8963970791603826, + "learning_rate": 1.3439686519955158e-06, + "loss": 0.4096, + "step": 11064 + }, + { + "epoch": 0.7682960699902791, + "grad_norm": 4.054021194060909, + "learning_rate": 1.3432016682133337e-06, + "loss": 0.3679, + "step": 11065 + }, + { + "epoch": 0.7683655047910013, + "grad_norm": 4.239365275354775, + "learning_rate": 1.3424348693878731e-06, + "loss": 0.5044, + "step": 11066 + }, + { + "epoch": 0.7684349395917234, + "grad_norm": 3.2482667266524343, + "learning_rate": 1.341668255557918e-06, + "loss": 0.3654, + "step": 11067 + }, + { + "epoch": 0.7685043743924455, + "grad_norm": 3.2766740786919955, + "learning_rate": 1.340901826762243e-06, + "loss": 0.218, + "step": 11068 + }, + { + "epoch": 0.7685738091931676, + "grad_norm": 4.228232124209404, + "learning_rate": 1.3401355830396129e-06, + "loss": 0.4182, + "step": 11069 + }, + { + "epoch": 0.7686432439938897, + "grad_norm": 3.75177060764092, + "learning_rate": 1.3393695244287819e-06, + "loss": 0.3856, + "step": 11070 + }, + { + "epoch": 0.7687126787946119, + "grad_norm": 4.640480166162068, + "learning_rate": 1.3386036509685014e-06, + "loss": 0.5131, + "step": 11071 + }, + { + "epoch": 0.768782113595334, + "grad_norm": 4.601072141059501, + "learning_rate": 1.3378379626975035e-06, + "loss": 0.4706, + "step": 11072 + }, + { + "epoch": 0.7688515483960561, + "grad_norm": 3.734949047524425, + "learning_rate": 1.337072459654516e-06, + "loss": 0.3289, + "step": 11073 + }, + { + "epoch": 0.7689209831967783, + "grad_norm": 2.981265161101862, + "learning_rate": 1.3363071418782613e-06, + "loss": 0.2877, + "step": 11074 + }, + { + "epoch": 0.7689904179975003, + "grad_norm": 3.8250752708045734, + "learning_rate": 1.3355420094074427e-06, + "loss": 0.3943, + "step": 11075 + }, + { + "epoch": 0.7690598527982224, + "grad_norm": 2.926501179191031, + "learning_rate": 1.3347770622807649e-06, + "loss": 0.2121, + "step": 11076 + }, + { + "epoch": 0.7691292875989446, + "grad_norm": 4.045149367083221, + "learning_rate": 1.334012300536915e-06, + "loss": 0.4724, + "step": 11077 + }, + { + "epoch": 0.7691987223996667, + "grad_norm": 4.478289337564379, + "learning_rate": 1.3332477242145747e-06, + "loss": 0.568, + "step": 11078 + }, + { + "epoch": 0.7692681572003889, + "grad_norm": 3.742266215722157, + "learning_rate": 1.3324833333524156e-06, + "loss": 0.4501, + "step": 11079 + }, + { + "epoch": 0.769337592001111, + "grad_norm": 4.239586890367387, + "learning_rate": 1.331719127989099e-06, + "loss": 0.5153, + "step": 11080 + }, + { + "epoch": 0.769407026801833, + "grad_norm": 3.0390562303454, + "learning_rate": 1.3309551081632781e-06, + "loss": 0.1832, + "step": 11081 + }, + { + "epoch": 0.7694764616025552, + "grad_norm": 4.395028641237015, + "learning_rate": 1.3301912739135964e-06, + "loss": 0.4599, + "step": 11082 + }, + { + "epoch": 0.7695458964032773, + "grad_norm": 3.8206407649689247, + "learning_rate": 1.329427625278687e-06, + "loss": 0.3166, + "step": 11083 + }, + { + "epoch": 0.7696153312039995, + "grad_norm": 2.7696648377365842, + "learning_rate": 1.328664162297173e-06, + "loss": 0.2558, + "step": 11084 + }, + { + "epoch": 0.7696847660047216, + "grad_norm": 2.975794473150173, + "learning_rate": 1.3279008850076747e-06, + "loss": 0.321, + "step": 11085 + }, + { + "epoch": 0.7697542008054437, + "grad_norm": 3.705988267959164, + "learning_rate": 1.3271377934487927e-06, + "loss": 0.426, + "step": 11086 + }, + { + "epoch": 0.7698236356061658, + "grad_norm": 3.004881050812452, + "learning_rate": 1.3263748876591237e-06, + "loss": 0.2529, + "step": 11087 + }, + { + "epoch": 0.7698930704068879, + "grad_norm": 6.690827262559391, + "learning_rate": 1.3256121676772577e-06, + "loss": 0.5149, + "step": 11088 + }, + { + "epoch": 0.76996250520761, + "grad_norm": 3.387881168443318, + "learning_rate": 1.3248496335417715e-06, + "loss": 0.3394, + "step": 11089 + }, + { + "epoch": 0.7700319400083322, + "grad_norm": 11.182716191377102, + "learning_rate": 1.324087285291229e-06, + "loss": 0.4477, + "step": 11090 + }, + { + "epoch": 0.7701013748090543, + "grad_norm": 4.6528302205606265, + "learning_rate": 1.3233251229641937e-06, + "loss": 0.782, + "step": 11091 + }, + { + "epoch": 0.7701708096097765, + "grad_norm": 3.234439676702835, + "learning_rate": 1.3225631465992144e-06, + "loss": 0.3717, + "step": 11092 + }, + { + "epoch": 0.7702402444104985, + "grad_norm": 3.9225629523835486, + "learning_rate": 1.3218013562348274e-06, + "loss": 0.3742, + "step": 11093 + }, + { + "epoch": 0.7703096792112206, + "grad_norm": 3.719603057221862, + "learning_rate": 1.3210397519095669e-06, + "loss": 0.3364, + "step": 11094 + }, + { + "epoch": 0.7703791140119428, + "grad_norm": 3.7118041286228487, + "learning_rate": 1.3202783336619528e-06, + "loss": 0.2746, + "step": 11095 + }, + { + "epoch": 0.7704485488126649, + "grad_norm": 4.699558722200026, + "learning_rate": 1.3195171015304964e-06, + "loss": 0.3835, + "step": 11096 + }, + { + "epoch": 0.7705179836133871, + "grad_norm": 4.531947981971148, + "learning_rate": 1.3187560555537005e-06, + "loss": 0.4333, + "step": 11097 + }, + { + "epoch": 0.7705874184141092, + "grad_norm": 3.748795210373505, + "learning_rate": 1.3179951957700577e-06, + "loss": 0.391, + "step": 11098 + }, + { + "epoch": 0.7706568532148312, + "grad_norm": 3.34970461137328, + "learning_rate": 1.3172345222180516e-06, + "loss": 0.3171, + "step": 11099 + }, + { + "epoch": 0.7707262880155534, + "grad_norm": 4.543096327741528, + "learning_rate": 1.3164740349361555e-06, + "loss": 0.6598, + "step": 11100 + }, + { + "epoch": 0.7707957228162755, + "grad_norm": 3.999562499357687, + "learning_rate": 1.3157137339628352e-06, + "loss": 0.4614, + "step": 11101 + }, + { + "epoch": 0.7708651576169976, + "grad_norm": 3.908686521988311, + "learning_rate": 1.314953619336543e-06, + "loss": 0.4525, + "step": 11102 + }, + { + "epoch": 0.7709345924177198, + "grad_norm": 3.963515152662578, + "learning_rate": 1.3141936910957303e-06, + "loss": 0.3669, + "step": 11103 + }, + { + "epoch": 0.7710040272184419, + "grad_norm": 4.739756173807416, + "learning_rate": 1.3134339492788267e-06, + "loss": 0.5854, + "step": 11104 + }, + { + "epoch": 0.771073462019164, + "grad_norm": 4.49243072899559, + "learning_rate": 1.3126743939242643e-06, + "loss": 0.5688, + "step": 11105 + }, + { + "epoch": 0.7711428968198861, + "grad_norm": 2.708406004214198, + "learning_rate": 1.3119150250704578e-06, + "loss": 0.2601, + "step": 11106 + }, + { + "epoch": 0.7712123316206082, + "grad_norm": 5.026015686256312, + "learning_rate": 1.3111558427558163e-06, + "loss": 0.4407, + "step": 11107 + }, + { + "epoch": 0.7712817664213304, + "grad_norm": 5.774790291668631, + "learning_rate": 1.3103968470187384e-06, + "loss": 0.656, + "step": 11108 + }, + { + "epoch": 0.7713512012220525, + "grad_norm": 5.732531113390396, + "learning_rate": 1.3096380378976126e-06, + "loss": 0.5403, + "step": 11109 + }, + { + "epoch": 0.7714206360227747, + "grad_norm": 3.6236871925607743, + "learning_rate": 1.3088794154308188e-06, + "loss": 0.4528, + "step": 11110 + }, + { + "epoch": 0.7714900708234967, + "grad_norm": 3.326469555615002, + "learning_rate": 1.3081209796567273e-06, + "loss": 0.4517, + "step": 11111 + }, + { + "epoch": 0.7715595056242188, + "grad_norm": 3.8946023878743663, + "learning_rate": 1.3073627306136994e-06, + "loss": 0.2974, + "step": 11112 + }, + { + "epoch": 0.771628940424941, + "grad_norm": 4.604630629939227, + "learning_rate": 1.3066046683400847e-06, + "loss": 0.4481, + "step": 11113 + }, + { + "epoch": 0.7716983752256631, + "grad_norm": 4.3383117078592, + "learning_rate": 1.305846792874229e-06, + "loss": 0.5713, + "step": 11114 + }, + { + "epoch": 0.7717678100263852, + "grad_norm": 3.4441635510815214, + "learning_rate": 1.305089104254461e-06, + "loss": 0.3985, + "step": 11115 + }, + { + "epoch": 0.7718372448271074, + "grad_norm": 2.3300318488355924, + "learning_rate": 1.3043316025191033e-06, + "loss": 0.272, + "step": 11116 + }, + { + "epoch": 0.7719066796278294, + "grad_norm": 4.58129962100579, + "learning_rate": 1.3035742877064745e-06, + "loss": 0.3659, + "step": 11117 + }, + { + "epoch": 0.7719761144285516, + "grad_norm": 3.577670938560837, + "learning_rate": 1.3028171598548739e-06, + "loss": 0.2649, + "step": 11118 + }, + { + "epoch": 0.7720455492292737, + "grad_norm": 4.51070262258794, + "learning_rate": 1.302060219002596e-06, + "loss": 0.6092, + "step": 11119 + }, + { + "epoch": 0.7721149840299958, + "grad_norm": 3.4713305699704615, + "learning_rate": 1.3013034651879296e-06, + "loss": 0.3951, + "step": 11120 + }, + { + "epoch": 0.772184418830718, + "grad_norm": 4.506783336053114, + "learning_rate": 1.3005468984491498e-06, + "loss": 0.6209, + "step": 11121 + }, + { + "epoch": 0.7722538536314401, + "grad_norm": 4.480311284300879, + "learning_rate": 1.2997905188245192e-06, + "loss": 0.4549, + "step": 11122 + }, + { + "epoch": 0.7723232884321622, + "grad_norm": 4.9291650661949244, + "learning_rate": 1.2990343263522981e-06, + "loss": 0.6107, + "step": 11123 + }, + { + "epoch": 0.7723927232328843, + "grad_norm": 3.925324749050907, + "learning_rate": 1.2982783210707344e-06, + "loss": 0.5315, + "step": 11124 + }, + { + "epoch": 0.7724621580336064, + "grad_norm": 4.2775014290038955, + "learning_rate": 1.2975225030180611e-06, + "loss": 0.3596, + "step": 11125 + }, + { + "epoch": 0.7725315928343286, + "grad_norm": 5.451064868441418, + "learning_rate": 1.2967668722325122e-06, + "loss": 0.4843, + "step": 11126 + }, + { + "epoch": 0.7726010276350507, + "grad_norm": 3.768753251155597, + "learning_rate": 1.296011428752304e-06, + "loss": 0.3946, + "step": 11127 + }, + { + "epoch": 0.7726704624357729, + "grad_norm": 4.2742716557016776, + "learning_rate": 1.295256172615646e-06, + "loss": 0.6154, + "step": 11128 + }, + { + "epoch": 0.7727398972364949, + "grad_norm": 4.989508233636311, + "learning_rate": 1.2945011038607391e-06, + "loss": 0.4086, + "step": 11129 + }, + { + "epoch": 0.772809332037217, + "grad_norm": 4.290905212256956, + "learning_rate": 1.2937462225257736e-06, + "loss": 0.583, + "step": 11130 + }, + { + "epoch": 0.7728787668379392, + "grad_norm": 3.342178181321543, + "learning_rate": 1.2929915286489282e-06, + "loss": 0.4671, + "step": 11131 + }, + { + "epoch": 0.7729482016386613, + "grad_norm": 4.239194978571748, + "learning_rate": 1.2922370222683794e-06, + "loss": 0.5314, + "step": 11132 + }, + { + "epoch": 0.7730176364393834, + "grad_norm": 4.373494955072838, + "learning_rate": 1.2914827034222854e-06, + "loss": 0.5807, + "step": 11133 + }, + { + "epoch": 0.7730870712401056, + "grad_norm": 3.5426233860507566, + "learning_rate": 1.2907285721487977e-06, + "loss": 0.5046, + "step": 11134 + }, + { + "epoch": 0.7731565060408276, + "grad_norm": 6.912792922237515, + "learning_rate": 1.2899746284860648e-06, + "loss": 0.3068, + "step": 11135 + }, + { + "epoch": 0.7732259408415498, + "grad_norm": 3.4953234177342196, + "learning_rate": 1.2892208724722143e-06, + "loss": 0.2164, + "step": 11136 + }, + { + "epoch": 0.7732953756422719, + "grad_norm": 4.3429618460675465, + "learning_rate": 1.2884673041453744e-06, + "loss": 0.509, + "step": 11137 + }, + { + "epoch": 0.773364810442994, + "grad_norm": 4.1691942238732205, + "learning_rate": 1.2877139235436581e-06, + "loss": 0.702, + "step": 11138 + }, + { + "epoch": 0.7734342452437162, + "grad_norm": 2.1622376248559547, + "learning_rate": 1.2869607307051712e-06, + "loss": 0.0621, + "step": 11139 + }, + { + "epoch": 0.7735036800444383, + "grad_norm": 3.370398681781712, + "learning_rate": 1.286207725668009e-06, + "loss": 0.3279, + "step": 11140 + }, + { + "epoch": 0.7735731148451604, + "grad_norm": 4.418365996213047, + "learning_rate": 1.2854549084702572e-06, + "loss": 0.5992, + "step": 11141 + }, + { + "epoch": 0.7736425496458825, + "grad_norm": 4.039835781910002, + "learning_rate": 1.2847022791499925e-06, + "loss": 0.45, + "step": 11142 + }, + { + "epoch": 0.7737119844466046, + "grad_norm": 4.598566850025614, + "learning_rate": 1.283949837745283e-06, + "loss": 0.4429, + "step": 11143 + }, + { + "epoch": 0.7737814192473268, + "grad_norm": 4.047718471545714, + "learning_rate": 1.2831975842941857e-06, + "loss": 0.5609, + "step": 11144 + }, + { + "epoch": 0.7738508540480489, + "grad_norm": 4.87478660792395, + "learning_rate": 1.2824455188347467e-06, + "loss": 0.5243, + "step": 11145 + }, + { + "epoch": 0.773920288848771, + "grad_norm": 4.5626222464537145, + "learning_rate": 1.2816936414050101e-06, + "loss": 0.4664, + "step": 11146 + }, + { + "epoch": 0.7739897236494931, + "grad_norm": 4.449152530240054, + "learning_rate": 1.2809419520430005e-06, + "loss": 0.4394, + "step": 11147 + }, + { + "epoch": 0.7740591584502152, + "grad_norm": 3.9104753064122777, + "learning_rate": 1.2801904507867364e-06, + "loss": 0.4877, + "step": 11148 + }, + { + "epoch": 0.7741285932509374, + "grad_norm": 3.965173988542395, + "learning_rate": 1.2794391376742333e-06, + "loss": 0.3623, + "step": 11149 + }, + { + "epoch": 0.7741980280516595, + "grad_norm": 4.1383813612732, + "learning_rate": 1.278688012743487e-06, + "loss": 0.5335, + "step": 11150 + }, + { + "epoch": 0.7742674628523816, + "grad_norm": 6.293982406771598, + "learning_rate": 1.2779370760324883e-06, + "loss": 0.2873, + "step": 11151 + }, + { + "epoch": 0.7743368976531038, + "grad_norm": 3.7902786235906336, + "learning_rate": 1.2771863275792224e-06, + "loss": 0.4438, + "step": 11152 + }, + { + "epoch": 0.7744063324538258, + "grad_norm": 3.5599776231892606, + "learning_rate": 1.2764357674216604e-06, + "loss": 0.3033, + "step": 11153 + }, + { + "epoch": 0.774475767254548, + "grad_norm": 4.160360799135336, + "learning_rate": 1.2756853955977615e-06, + "loss": 0.4496, + "step": 11154 + }, + { + "epoch": 0.7745452020552701, + "grad_norm": 3.6985132761543933, + "learning_rate": 1.2749352121454823e-06, + "loss": 0.421, + "step": 11155 + }, + { + "epoch": 0.7746146368559922, + "grad_norm": 3.5901490194287913, + "learning_rate": 1.274185217102764e-06, + "loss": 0.2036, + "step": 11156 + }, + { + "epoch": 0.7746840716567144, + "grad_norm": 6.492080285726098, + "learning_rate": 1.2734354105075424e-06, + "loss": 0.5901, + "step": 11157 + }, + { + "epoch": 0.7747535064574365, + "grad_norm": 3.843998910953194, + "learning_rate": 1.2726857923977404e-06, + "loss": 0.3586, + "step": 11158 + }, + { + "epoch": 0.7748229412581585, + "grad_norm": 4.634498578618431, + "learning_rate": 1.271936362811273e-06, + "loss": 0.6976, + "step": 11159 + }, + { + "epoch": 0.7748923760588807, + "grad_norm": 4.356909338343021, + "learning_rate": 1.2711871217860461e-06, + "loss": 0.551, + "step": 11160 + }, + { + "epoch": 0.7749618108596028, + "grad_norm": 2.1160183251843185, + "learning_rate": 1.2704380693599554e-06, + "loss": 0.1749, + "step": 11161 + }, + { + "epoch": 0.775031245660325, + "grad_norm": 3.7708734675145603, + "learning_rate": 1.2696892055708865e-06, + "loss": 0.4576, + "step": 11162 + }, + { + "epoch": 0.7751006804610471, + "grad_norm": 4.189641816182364, + "learning_rate": 1.2689405304567154e-06, + "loss": 0.3033, + "step": 11163 + }, + { + "epoch": 0.7751701152617692, + "grad_norm": 3.2469457590471174, + "learning_rate": 1.2681920440553124e-06, + "loss": 0.3359, + "step": 11164 + }, + { + "epoch": 0.7752395500624913, + "grad_norm": 3.159495107922084, + "learning_rate": 1.2674437464045309e-06, + "loss": 0.3316, + "step": 11165 + }, + { + "epoch": 0.7753089848632134, + "grad_norm": 4.187543324894305, + "learning_rate": 1.2666956375422225e-06, + "loss": 0.4446, + "step": 11166 + }, + { + "epoch": 0.7753784196639356, + "grad_norm": 4.501309253207573, + "learning_rate": 1.2659477175062252e-06, + "loss": 0.5944, + "step": 11167 + }, + { + "epoch": 0.7754478544646577, + "grad_norm": 4.1310521787448975, + "learning_rate": 1.2651999863343638e-06, + "loss": 0.5855, + "step": 11168 + }, + { + "epoch": 0.7755172892653798, + "grad_norm": 4.103732258574949, + "learning_rate": 1.2644524440644628e-06, + "loss": 0.4594, + "step": 11169 + }, + { + "epoch": 0.775586724066102, + "grad_norm": 3.1038501954351827, + "learning_rate": 1.26370509073433e-06, + "loss": 0.1778, + "step": 11170 + }, + { + "epoch": 0.775656158866824, + "grad_norm": 3.315473051285473, + "learning_rate": 1.2629579263817659e-06, + "loss": 0.3184, + "step": 11171 + }, + { + "epoch": 0.7757255936675461, + "grad_norm": 3.3907964650048306, + "learning_rate": 1.2622109510445613e-06, + "loss": 0.4255, + "step": 11172 + }, + { + "epoch": 0.7757950284682683, + "grad_norm": 3.5330528223402475, + "learning_rate": 1.261464164760497e-06, + "loss": 0.4794, + "step": 11173 + }, + { + "epoch": 0.7758644632689904, + "grad_norm": 4.482184282501224, + "learning_rate": 1.260717567567345e-06, + "loss": 0.404, + "step": 11174 + }, + { + "epoch": 0.7759338980697126, + "grad_norm": 3.7916923243713367, + "learning_rate": 1.2599711595028668e-06, + "loss": 0.3922, + "step": 11175 + }, + { + "epoch": 0.7760033328704347, + "grad_norm": 3.9800725797856944, + "learning_rate": 1.2592249406048156e-06, + "loss": 0.3623, + "step": 11176 + }, + { + "epoch": 0.7760727676711567, + "grad_norm": 4.530939846911843, + "learning_rate": 1.2584789109109325e-06, + "loss": 0.5085, + "step": 11177 + }, + { + "epoch": 0.7761422024718789, + "grad_norm": 4.525872438064664, + "learning_rate": 1.2577330704589546e-06, + "loss": 0.6661, + "step": 11178 + }, + { + "epoch": 0.776211637272601, + "grad_norm": 4.550337782203634, + "learning_rate": 1.2569874192866023e-06, + "loss": 0.4555, + "step": 11179 + }, + { + "epoch": 0.7762810720733232, + "grad_norm": 3.1989676489406493, + "learning_rate": 1.2562419574315893e-06, + "loss": 0.3755, + "step": 11180 + }, + { + "epoch": 0.7763505068740453, + "grad_norm": 3.69667624966104, + "learning_rate": 1.2554966849316248e-06, + "loss": 0.4358, + "step": 11181 + }, + { + "epoch": 0.7764199416747674, + "grad_norm": 2.812789794811207, + "learning_rate": 1.2547516018243989e-06, + "loss": 0.2851, + "step": 11182 + }, + { + "epoch": 0.7764893764754895, + "grad_norm": 3.696093825882281, + "learning_rate": 1.2540067081475982e-06, + "loss": 0.592, + "step": 11183 + }, + { + "epoch": 0.7765588112762116, + "grad_norm": 3.0240310655375127, + "learning_rate": 1.2532620039388999e-06, + "loss": 0.3427, + "step": 11184 + }, + { + "epoch": 0.7766282460769337, + "grad_norm": 4.426728765442772, + "learning_rate": 1.2525174892359703e-06, + "loss": 0.3118, + "step": 11185 + }, + { + "epoch": 0.7766976808776559, + "grad_norm": 3.1907989879479324, + "learning_rate": 1.2517731640764658e-06, + "loss": 0.2592, + "step": 11186 + }, + { + "epoch": 0.776767115678378, + "grad_norm": 3.57740323825533, + "learning_rate": 1.2510290284980325e-06, + "loss": 0.4283, + "step": 11187 + }, + { + "epoch": 0.7768365504791002, + "grad_norm": 3.299562039862095, + "learning_rate": 1.250285082538309e-06, + "loss": 0.3577, + "step": 11188 + }, + { + "epoch": 0.7769059852798222, + "grad_norm": 9.413314086670317, + "learning_rate": 1.2495413262349232e-06, + "loss": 0.3637, + "step": 11189 + }, + { + "epoch": 0.7769754200805443, + "grad_norm": 3.6647091312422937, + "learning_rate": 1.248797759625493e-06, + "loss": 0.4019, + "step": 11190 + }, + { + "epoch": 0.7770448548812665, + "grad_norm": 4.454577746079493, + "learning_rate": 1.2480543827476271e-06, + "loss": 0.3812, + "step": 11191 + }, + { + "epoch": 0.7771142896819886, + "grad_norm": 4.204303993916557, + "learning_rate": 1.2473111956389256e-06, + "loss": 0.4174, + "step": 11192 + }, + { + "epoch": 0.7771837244827108, + "grad_norm": 3.733056030932026, + "learning_rate": 1.2465681983369765e-06, + "loss": 0.3973, + "step": 11193 + }, + { + "epoch": 0.7772531592834329, + "grad_norm": 6.573629691502059, + "learning_rate": 1.2458253908793593e-06, + "loss": 0.6169, + "step": 11194 + }, + { + "epoch": 0.7773225940841549, + "grad_norm": 4.425803197350088, + "learning_rate": 1.245082773303648e-06, + "loss": 0.5293, + "step": 11195 + }, + { + "epoch": 0.7773920288848771, + "grad_norm": 3.442383394349508, + "learning_rate": 1.2443403456474017e-06, + "loss": 0.4475, + "step": 11196 + }, + { + "epoch": 0.7774614636855992, + "grad_norm": 4.418300191948008, + "learning_rate": 1.2435981079481685e-06, + "loss": 0.4828, + "step": 11197 + }, + { + "epoch": 0.7775308984863214, + "grad_norm": 4.122954938482227, + "learning_rate": 1.242856060243493e-06, + "loss": 0.3046, + "step": 11198 + }, + { + "epoch": 0.7776003332870435, + "grad_norm": 3.458910747379604, + "learning_rate": 1.2421142025709087e-06, + "loss": 0.3116, + "step": 11199 + }, + { + "epoch": 0.7776697680877656, + "grad_norm": 5.177356225642038, + "learning_rate": 1.2413725349679323e-06, + "loss": 0.4959, + "step": 11200 + }, + { + "epoch": 0.7777392028884877, + "grad_norm": 3.9561133792476832, + "learning_rate": 1.240631057472082e-06, + "loss": 0.4218, + "step": 11201 + }, + { + "epoch": 0.7778086376892098, + "grad_norm": 4.600400626689768, + "learning_rate": 1.2398897701208584e-06, + "loss": 0.6321, + "step": 11202 + }, + { + "epoch": 0.7778780724899319, + "grad_norm": 4.684915422183934, + "learning_rate": 1.2391486729517555e-06, + "loss": 0.665, + "step": 11203 + }, + { + "epoch": 0.7779475072906541, + "grad_norm": 4.468353799148326, + "learning_rate": 1.2384077660022564e-06, + "loss": 0.7129, + "step": 11204 + }, + { + "epoch": 0.7780169420913762, + "grad_norm": 3.9434280793964187, + "learning_rate": 1.2376670493098369e-06, + "loss": 0.5862, + "step": 11205 + }, + { + "epoch": 0.7780863768920984, + "grad_norm": 4.407253756433314, + "learning_rate": 1.2369265229119604e-06, + "loss": 0.6763, + "step": 11206 + }, + { + "epoch": 0.7781558116928204, + "grad_norm": 4.348571153194414, + "learning_rate": 1.2361861868460817e-06, + "loss": 0.276, + "step": 11207 + }, + { + "epoch": 0.7782252464935425, + "grad_norm": 4.781600539493049, + "learning_rate": 1.2354460411496471e-06, + "loss": 0.4435, + "step": 11208 + }, + { + "epoch": 0.7782946812942647, + "grad_norm": 4.73954866057255, + "learning_rate": 1.2347060858600901e-06, + "loss": 0.5868, + "step": 11209 + }, + { + "epoch": 0.7783641160949868, + "grad_norm": 3.667012292074982, + "learning_rate": 1.2339663210148418e-06, + "loss": 0.3197, + "step": 11210 + }, + { + "epoch": 0.778433550895709, + "grad_norm": 3.381520937209768, + "learning_rate": 1.2332267466513136e-06, + "loss": 0.4305, + "step": 11211 + }, + { + "epoch": 0.7785029856964311, + "grad_norm": 4.3554728188783125, + "learning_rate": 1.2324873628069128e-06, + "loss": 0.3372, + "step": 11212 + }, + { + "epoch": 0.7785724204971531, + "grad_norm": 2.8944360516061134, + "learning_rate": 1.2317481695190397e-06, + "loss": 0.2599, + "step": 11213 + }, + { + "epoch": 0.7786418552978753, + "grad_norm": 2.890490663249299, + "learning_rate": 1.2310091668250812e-06, + "loss": 0.2777, + "step": 11214 + }, + { + "epoch": 0.7787112900985974, + "grad_norm": 3.3529103723832026, + "learning_rate": 1.2302703547624117e-06, + "loss": 0.4323, + "step": 11215 + }, + { + "epoch": 0.7787807248993195, + "grad_norm": 4.315742041441327, + "learning_rate": 1.229531733368403e-06, + "loss": 0.6468, + "step": 11216 + }, + { + "epoch": 0.7788501597000417, + "grad_norm": 3.012007804277922, + "learning_rate": 1.2287933026804127e-06, + "loss": 0.332, + "step": 11217 + }, + { + "epoch": 0.7789195945007638, + "grad_norm": 3.113154367919129, + "learning_rate": 1.2280550627357895e-06, + "loss": 0.2818, + "step": 11218 + }, + { + "epoch": 0.778989029301486, + "grad_norm": 3.18966482272493, + "learning_rate": 1.2273170135718737e-06, + "loss": 0.3165, + "step": 11219 + }, + { + "epoch": 0.779058464102208, + "grad_norm": 3.8347605356456924, + "learning_rate": 1.2265791552259936e-06, + "loss": 0.5446, + "step": 11220 + }, + { + "epoch": 0.7791278989029301, + "grad_norm": 4.240622930131518, + "learning_rate": 1.2258414877354703e-06, + "loss": 0.4574, + "step": 11221 + }, + { + "epoch": 0.7791973337036523, + "grad_norm": 5.407028027413241, + "learning_rate": 1.2251040111376134e-06, + "loss": 0.6103, + "step": 11222 + }, + { + "epoch": 0.7792667685043744, + "grad_norm": 3.9794865762751694, + "learning_rate": 1.2243667254697244e-06, + "loss": 0.3972, + "step": 11223 + }, + { + "epoch": 0.7793362033050966, + "grad_norm": 3.3105021397290617, + "learning_rate": 1.2236296307690938e-06, + "loss": 0.2735, + "step": 11224 + }, + { + "epoch": 0.7794056381058186, + "grad_norm": 7.101593547162669, + "learning_rate": 1.2228927270730034e-06, + "loss": 0.3886, + "step": 11225 + }, + { + "epoch": 0.7794750729065407, + "grad_norm": 3.6320232204462144, + "learning_rate": 1.2221560144187234e-06, + "loss": 0.4237, + "step": 11226 + }, + { + "epoch": 0.7795445077072629, + "grad_norm": 3.414145462836716, + "learning_rate": 1.2214194928435187e-06, + "loss": 0.3518, + "step": 11227 + }, + { + "epoch": 0.779613942507985, + "grad_norm": 2.181605485923959, + "learning_rate": 1.2206831623846416e-06, + "loss": 0.1132, + "step": 11228 + }, + { + "epoch": 0.7796833773087071, + "grad_norm": 4.021769952031729, + "learning_rate": 1.2199470230793303e-06, + "loss": 0.428, + "step": 11229 + }, + { + "epoch": 0.7797528121094293, + "grad_norm": 4.024510015530156, + "learning_rate": 1.2192110749648233e-06, + "loss": 0.4476, + "step": 11230 + }, + { + "epoch": 0.7798222469101513, + "grad_norm": 3.8428545033013193, + "learning_rate": 1.2184753180783426e-06, + "loss": 0.6037, + "step": 11231 + }, + { + "epoch": 0.7798916817108735, + "grad_norm": 2.665232030006567, + "learning_rate": 1.2177397524570988e-06, + "loss": 0.3054, + "step": 11232 + }, + { + "epoch": 0.7799611165115956, + "grad_norm": 5.267989077484462, + "learning_rate": 1.2170043781383e-06, + "loss": 0.6951, + "step": 11233 + }, + { + "epoch": 0.7800305513123177, + "grad_norm": 5.576370981818137, + "learning_rate": 1.2162691951591387e-06, + "loss": 0.4987, + "step": 11234 + }, + { + "epoch": 0.7800999861130399, + "grad_norm": 2.5268382964252236, + "learning_rate": 1.2155342035568002e-06, + "loss": 0.2081, + "step": 11235 + }, + { + "epoch": 0.780169420913762, + "grad_norm": 3.0816699803578915, + "learning_rate": 1.214799403368459e-06, + "loss": 0.2639, + "step": 11236 + }, + { + "epoch": 0.7802388557144841, + "grad_norm": 3.6181086210979063, + "learning_rate": 1.2140647946312807e-06, + "loss": 0.3882, + "step": 11237 + }, + { + "epoch": 0.7803082905152062, + "grad_norm": 3.4696028305666453, + "learning_rate": 1.2133303773824196e-06, + "loss": 0.2354, + "step": 11238 + }, + { + "epoch": 0.7803777253159283, + "grad_norm": 4.407893344862321, + "learning_rate": 1.2125961516590263e-06, + "loss": 0.6045, + "step": 11239 + }, + { + "epoch": 0.7804471601166505, + "grad_norm": 4.293439763404174, + "learning_rate": 1.211862117498232e-06, + "loss": 0.4276, + "step": 11240 + }, + { + "epoch": 0.7805165949173726, + "grad_norm": 4.379460585466756, + "learning_rate": 1.2111282749371645e-06, + "loss": 0.457, + "step": 11241 + }, + { + "epoch": 0.7805860297180947, + "grad_norm": 4.066907193858863, + "learning_rate": 1.2103946240129438e-06, + "loss": 0.1741, + "step": 11242 + }, + { + "epoch": 0.7806554645188168, + "grad_norm": 4.085401848180031, + "learning_rate": 1.2096611647626733e-06, + "loss": 0.3117, + "step": 11243 + }, + { + "epoch": 0.7807248993195389, + "grad_norm": 3.3251191478432895, + "learning_rate": 1.2089278972234507e-06, + "loss": 0.4288, + "step": 11244 + }, + { + "epoch": 0.7807943341202611, + "grad_norm": 3.7193105088589844, + "learning_rate": 1.2081948214323664e-06, + "loss": 0.4398, + "step": 11245 + }, + { + "epoch": 0.7808637689209832, + "grad_norm": 3.073414544856734, + "learning_rate": 1.2074619374264978e-06, + "loss": 0.2748, + "step": 11246 + }, + { + "epoch": 0.7809332037217053, + "grad_norm": 4.601508182394447, + "learning_rate": 1.2067292452429124e-06, + "loss": 0.5329, + "step": 11247 + }, + { + "epoch": 0.7810026385224275, + "grad_norm": 4.728242211643195, + "learning_rate": 1.20599674491867e-06, + "loss": 0.6275, + "step": 11248 + }, + { + "epoch": 0.7810720733231495, + "grad_norm": 4.759074461582454, + "learning_rate": 1.2052644364908184e-06, + "loss": 0.6808, + "step": 11249 + }, + { + "epoch": 0.7811415081238717, + "grad_norm": 4.164736966673322, + "learning_rate": 1.2045323199963976e-06, + "loss": 0.4705, + "step": 11250 + }, + { + "epoch": 0.7812109429245938, + "grad_norm": 4.578369622572767, + "learning_rate": 1.2038003954724376e-06, + "loss": 0.6658, + "step": 11251 + }, + { + "epoch": 0.7812803777253159, + "grad_norm": 3.4822720150675006, + "learning_rate": 1.2030686629559574e-06, + "loss": 0.3235, + "step": 11252 + }, + { + "epoch": 0.7813498125260381, + "grad_norm": 3.624964442548062, + "learning_rate": 1.202337122483968e-06, + "loss": 0.385, + "step": 11253 + }, + { + "epoch": 0.7814192473267602, + "grad_norm": 3.97712023280229, + "learning_rate": 1.20160577409347e-06, + "loss": 0.4751, + "step": 11254 + }, + { + "epoch": 0.7814886821274823, + "grad_norm": 5.879873981579039, + "learning_rate": 1.2008746178214514e-06, + "loss": 0.4977, + "step": 11255 + }, + { + "epoch": 0.7815581169282044, + "grad_norm": 2.8985257073468507, + "learning_rate": 1.2001436537048994e-06, + "loss": 0.3399, + "step": 11256 + }, + { + "epoch": 0.7816275517289265, + "grad_norm": 3.0558038455456162, + "learning_rate": 1.1994128817807799e-06, + "loss": 0.2203, + "step": 11257 + }, + { + "epoch": 0.7816969865296487, + "grad_norm": 4.807277853865202, + "learning_rate": 1.1986823020860543e-06, + "loss": 0.659, + "step": 11258 + }, + { + "epoch": 0.7817664213303708, + "grad_norm": 4.658283278105699, + "learning_rate": 1.1979519146576784e-06, + "loss": 0.5582, + "step": 11259 + }, + { + "epoch": 0.7818358561310929, + "grad_norm": 4.045257622114772, + "learning_rate": 1.1972217195325936e-06, + "loss": 0.4471, + "step": 11260 + }, + { + "epoch": 0.781905290931815, + "grad_norm": 4.115604195055608, + "learning_rate": 1.196491716747728e-06, + "loss": 0.2566, + "step": 11261 + }, + { + "epoch": 0.7819747257325371, + "grad_norm": 4.609078265124921, + "learning_rate": 1.195761906340009e-06, + "loss": 0.4705, + "step": 11262 + }, + { + "epoch": 0.7820441605332593, + "grad_norm": 5.723452325117509, + "learning_rate": 1.1950322883463478e-06, + "loss": 0.4996, + "step": 11263 + }, + { + "epoch": 0.7821135953339814, + "grad_norm": 3.881851879338284, + "learning_rate": 1.1943028628036479e-06, + "loss": 0.3259, + "step": 11264 + }, + { + "epoch": 0.7821830301347035, + "grad_norm": 3.6404610830946975, + "learning_rate": 1.1935736297488032e-06, + "loss": 0.3962, + "step": 11265 + }, + { + "epoch": 0.7822524649354257, + "grad_norm": 3.852032070702059, + "learning_rate": 1.1928445892186968e-06, + "loss": 0.434, + "step": 11266 + }, + { + "epoch": 0.7823218997361477, + "grad_norm": 4.141008831667328, + "learning_rate": 1.1921157412502028e-06, + "loss": 0.4678, + "step": 11267 + }, + { + "epoch": 0.7823913345368699, + "grad_norm": 4.139166213952968, + "learning_rate": 1.1913870858801856e-06, + "loss": 0.27, + "step": 11268 + }, + { + "epoch": 0.782460769337592, + "grad_norm": 5.126137652570993, + "learning_rate": 1.1906586231455004e-06, + "loss": 0.598, + "step": 11269 + }, + { + "epoch": 0.7825302041383141, + "grad_norm": 4.3770950308468475, + "learning_rate": 1.1899303530829892e-06, + "loss": 0.5311, + "step": 11270 + }, + { + "epoch": 0.7825996389390363, + "grad_norm": 4.197732898960514, + "learning_rate": 1.189202275729493e-06, + "loss": 0.4435, + "step": 11271 + }, + { + "epoch": 0.7826690737397584, + "grad_norm": 3.364514654689977, + "learning_rate": 1.1884743911218316e-06, + "loss": 0.3003, + "step": 11272 + }, + { + "epoch": 0.7827385085404804, + "grad_norm": 3.4575674605396487, + "learning_rate": 1.1877466992968213e-06, + "loss": 0.3273, + "step": 11273 + }, + { + "epoch": 0.7828079433412026, + "grad_norm": 4.469433419188783, + "learning_rate": 1.1870192002912718e-06, + "loss": 0.4423, + "step": 11274 + }, + { + "epoch": 0.7828773781419247, + "grad_norm": 3.9076299214456585, + "learning_rate": 1.1862918941419737e-06, + "loss": 0.4181, + "step": 11275 + }, + { + "epoch": 0.7829468129426469, + "grad_norm": 4.389406482550695, + "learning_rate": 1.1855647808857174e-06, + "loss": 0.487, + "step": 11276 + }, + { + "epoch": 0.783016247743369, + "grad_norm": 3.6328097332679685, + "learning_rate": 1.1848378605592787e-06, + "loss": 0.3856, + "step": 11277 + }, + { + "epoch": 0.783085682544091, + "grad_norm": 4.2236849045680405, + "learning_rate": 1.1841111331994232e-06, + "loss": 0.3211, + "step": 11278 + }, + { + "epoch": 0.7831551173448132, + "grad_norm": 4.0957971864797855, + "learning_rate": 1.1833845988429087e-06, + "loss": 0.3555, + "step": 11279 + }, + { + "epoch": 0.7832245521455353, + "grad_norm": 3.3012225863481226, + "learning_rate": 1.1826582575264829e-06, + "loss": 0.2574, + "step": 11280 + }, + { + "epoch": 0.7832939869462575, + "grad_norm": 4.049365111882, + "learning_rate": 1.1819321092868824e-06, + "loss": 0.4068, + "step": 11281 + }, + { + "epoch": 0.7833634217469796, + "grad_norm": 4.329727190282177, + "learning_rate": 1.1812061541608356e-06, + "loss": 0.5488, + "step": 11282 + }, + { + "epoch": 0.7834328565477017, + "grad_norm": 3.062418882354128, + "learning_rate": 1.18048039218506e-06, + "loss": 0.2609, + "step": 11283 + }, + { + "epoch": 0.7835022913484239, + "grad_norm": 2.50708345205818, + "learning_rate": 1.1797548233962624e-06, + "loss": 0.225, + "step": 11284 + }, + { + "epoch": 0.7835717261491459, + "grad_norm": 4.166137544973621, + "learning_rate": 1.1790294478311465e-06, + "loss": 0.4001, + "step": 11285 + }, + { + "epoch": 0.783641160949868, + "grad_norm": 3.041044855762488, + "learning_rate": 1.1783042655263955e-06, + "loss": 0.2213, + "step": 11286 + }, + { + "epoch": 0.7837105957505902, + "grad_norm": 4.8354025596662105, + "learning_rate": 1.1775792765186888e-06, + "loss": 0.455, + "step": 11287 + }, + { + "epoch": 0.7837800305513123, + "grad_norm": 5.318005937227194, + "learning_rate": 1.1768544808446985e-06, + "loss": 0.6227, + "step": 11288 + }, + { + "epoch": 0.7838494653520345, + "grad_norm": 4.203548368096632, + "learning_rate": 1.1761298785410836e-06, + "loss": 0.4818, + "step": 11289 + }, + { + "epoch": 0.7839189001527566, + "grad_norm": 3.7041946831282875, + "learning_rate": 1.17540546964449e-06, + "loss": 0.4006, + "step": 11290 + }, + { + "epoch": 0.7839883349534786, + "grad_norm": 3.525087539162239, + "learning_rate": 1.1746812541915609e-06, + "loss": 0.2785, + "step": 11291 + }, + { + "epoch": 0.7840577697542008, + "grad_norm": 3.6946546337740385, + "learning_rate": 1.1739572322189269e-06, + "loss": 0.2869, + "step": 11292 + }, + { + "epoch": 0.7841272045549229, + "grad_norm": 2.9358199681807786, + "learning_rate": 1.1732334037632038e-06, + "loss": 0.1875, + "step": 11293 + }, + { + "epoch": 0.7841966393556451, + "grad_norm": 4.1319814146350495, + "learning_rate": 1.1725097688610065e-06, + "loss": 0.5681, + "step": 11294 + }, + { + "epoch": 0.7842660741563672, + "grad_norm": 5.23825998443839, + "learning_rate": 1.1717863275489339e-06, + "loss": 0.6425, + "step": 11295 + }, + { + "epoch": 0.7843355089570893, + "grad_norm": 3.7781263827885163, + "learning_rate": 1.1710630798635765e-06, + "loss": 0.3648, + "step": 11296 + }, + { + "epoch": 0.7844049437578114, + "grad_norm": 5.083380049675135, + "learning_rate": 1.1703400258415159e-06, + "loss": 0.5372, + "step": 11297 + }, + { + "epoch": 0.7844743785585335, + "grad_norm": 4.759762034732927, + "learning_rate": 1.1696171655193238e-06, + "loss": 0.5125, + "step": 11298 + }, + { + "epoch": 0.7845438133592556, + "grad_norm": 2.4277255079905204, + "learning_rate": 1.168894498933561e-06, + "loss": 0.1963, + "step": 11299 + }, + { + "epoch": 0.7846132481599778, + "grad_norm": 3.7366891157357665, + "learning_rate": 1.1681720261207785e-06, + "loss": 0.4515, + "step": 11300 + }, + { + "epoch": 0.7846826829606999, + "grad_norm": 4.694677896883966, + "learning_rate": 1.1674497471175194e-06, + "loss": 0.4774, + "step": 11301 + }, + { + "epoch": 0.7847521177614221, + "grad_norm": 3.6119023036919145, + "learning_rate": 1.1667276619603141e-06, + "loss": 0.2631, + "step": 11302 + }, + { + "epoch": 0.7848215525621441, + "grad_norm": 3.5973027115586804, + "learning_rate": 1.1660057706856887e-06, + "loss": 0.3789, + "step": 11303 + }, + { + "epoch": 0.7848909873628662, + "grad_norm": 3.296962565143426, + "learning_rate": 1.1652840733301512e-06, + "loss": 0.2093, + "step": 11304 + }, + { + "epoch": 0.7849604221635884, + "grad_norm": 3.294077115621472, + "learning_rate": 1.164562569930205e-06, + "loss": 0.3254, + "step": 11305 + }, + { + "epoch": 0.7850298569643105, + "grad_norm": 2.866217880312782, + "learning_rate": 1.1638412605223465e-06, + "loss": 0.2866, + "step": 11306 + }, + { + "epoch": 0.7850992917650327, + "grad_norm": 3.5844885014074737, + "learning_rate": 1.1631201451430535e-06, + "loss": 0.4526, + "step": 11307 + }, + { + "epoch": 0.7851687265657548, + "grad_norm": 4.475695810219736, + "learning_rate": 1.1623992238288034e-06, + "loss": 0.4257, + "step": 11308 + }, + { + "epoch": 0.7852381613664768, + "grad_norm": 4.246499966671158, + "learning_rate": 1.1616784966160576e-06, + "loss": 0.4862, + "step": 11309 + }, + { + "epoch": 0.785307596167199, + "grad_norm": 2.7874084214166017, + "learning_rate": 1.1609579635412704e-06, + "loss": 0.3554, + "step": 11310 + }, + { + "epoch": 0.7853770309679211, + "grad_norm": 4.248810921379555, + "learning_rate": 1.1602376246408853e-06, + "loss": 0.4654, + "step": 11311 + }, + { + "epoch": 0.7854464657686433, + "grad_norm": 3.507245997760363, + "learning_rate": 1.1595174799513358e-06, + "loss": 0.338, + "step": 11312 + }, + { + "epoch": 0.7855159005693654, + "grad_norm": 3.0467589220948055, + "learning_rate": 1.1587975295090465e-06, + "loss": 0.3589, + "step": 11313 + }, + { + "epoch": 0.7855853353700875, + "grad_norm": 3.3511849244859837, + "learning_rate": 1.1580777733504316e-06, + "loss": 0.282, + "step": 11314 + }, + { + "epoch": 0.7856547701708096, + "grad_norm": 3.8506623318471362, + "learning_rate": 1.1573582115118953e-06, + "loss": 0.5069, + "step": 11315 + }, + { + "epoch": 0.7857242049715317, + "grad_norm": 4.257211457454584, + "learning_rate": 1.156638844029831e-06, + "loss": 0.5351, + "step": 11316 + }, + { + "epoch": 0.7857936397722538, + "grad_norm": 3.688456548373987, + "learning_rate": 1.1559196709406284e-06, + "loss": 0.3937, + "step": 11317 + }, + { + "epoch": 0.785863074572976, + "grad_norm": 3.3632369695703925, + "learning_rate": 1.1552006922806569e-06, + "loss": 0.2828, + "step": 11318 + }, + { + "epoch": 0.7859325093736981, + "grad_norm": 4.239737360815985, + "learning_rate": 1.1544819080862823e-06, + "loss": 0.4121, + "step": 11319 + }, + { + "epoch": 0.7860019441744203, + "grad_norm": 2.461831572729932, + "learning_rate": 1.153763318393863e-06, + "loss": 0.2209, + "step": 11320 + }, + { + "epoch": 0.7860713789751423, + "grad_norm": 3.215012840104009, + "learning_rate": 1.153044923239744e-06, + "loss": 0.317, + "step": 11321 + }, + { + "epoch": 0.7861408137758644, + "grad_norm": 2.696100821982559, + "learning_rate": 1.152326722660257e-06, + "loss": 0.2525, + "step": 11322 + }, + { + "epoch": 0.7862102485765866, + "grad_norm": 4.336999945366079, + "learning_rate": 1.1516087166917318e-06, + "loss": 0.3804, + "step": 11323 + }, + { + "epoch": 0.7862796833773087, + "grad_norm": 4.640526743614021, + "learning_rate": 1.1508909053704837e-06, + "loss": 0.4588, + "step": 11324 + }, + { + "epoch": 0.7863491181780309, + "grad_norm": 3.610609363223274, + "learning_rate": 1.150173288732816e-06, + "loss": 0.4553, + "step": 11325 + }, + { + "epoch": 0.786418552978753, + "grad_norm": 5.645895054468631, + "learning_rate": 1.149455866815028e-06, + "loss": 0.5591, + "step": 11326 + }, + { + "epoch": 0.786487987779475, + "grad_norm": 4.992208129795625, + "learning_rate": 1.1487386396534057e-06, + "loss": 0.5609, + "step": 11327 + }, + { + "epoch": 0.7865574225801972, + "grad_norm": 5.7692978063073435, + "learning_rate": 1.1480216072842242e-06, + "loss": 0.4148, + "step": 11328 + }, + { + "epoch": 0.7866268573809193, + "grad_norm": 3.863420327676239, + "learning_rate": 1.147304769743751e-06, + "loss": 0.4899, + "step": 11329 + }, + { + "epoch": 0.7866962921816414, + "grad_norm": 3.383752960351298, + "learning_rate": 1.146588127068244e-06, + "loss": 0.3332, + "step": 11330 + }, + { + "epoch": 0.7867657269823636, + "grad_norm": 4.739458440692642, + "learning_rate": 1.1458716792939477e-06, + "loss": 0.5221, + "step": 11331 + }, + { + "epoch": 0.7868351617830857, + "grad_norm": 3.6766396762620577, + "learning_rate": 1.145155426457102e-06, + "loss": 0.3528, + "step": 11332 + }, + { + "epoch": 0.7869045965838078, + "grad_norm": 3.3785907412123586, + "learning_rate": 1.1444393685939315e-06, + "loss": 0.3226, + "step": 11333 + }, + { + "epoch": 0.7869740313845299, + "grad_norm": 4.114867066639711, + "learning_rate": 1.1437235057406543e-06, + "loss": 0.5705, + "step": 11334 + }, + { + "epoch": 0.787043466185252, + "grad_norm": 4.188706201760283, + "learning_rate": 1.1430078379334808e-06, + "loss": 0.6575, + "step": 11335 + }, + { + "epoch": 0.7871129009859742, + "grad_norm": 2.8431987406723107, + "learning_rate": 1.1422923652086043e-06, + "loss": 0.3519, + "step": 11336 + }, + { + "epoch": 0.7871823357866963, + "grad_norm": 3.4195178006511835, + "learning_rate": 1.1415770876022152e-06, + "loss": 0.3554, + "step": 11337 + }, + { + "epoch": 0.7872517705874185, + "grad_norm": 3.5678878208596907, + "learning_rate": 1.1408620051504916e-06, + "loss": 0.3064, + "step": 11338 + }, + { + "epoch": 0.7873212053881405, + "grad_norm": 3.4653970757440975, + "learning_rate": 1.1401471178896006e-06, + "loss": 0.329, + "step": 11339 + }, + { + "epoch": 0.7873906401888626, + "grad_norm": 4.0258921715810265, + "learning_rate": 1.1394324258557005e-06, + "loss": 0.4223, + "step": 11340 + }, + { + "epoch": 0.7874600749895848, + "grad_norm": 4.6370837855410105, + "learning_rate": 1.1387179290849397e-06, + "loss": 0.5369, + "step": 11341 + }, + { + "epoch": 0.7875295097903069, + "grad_norm": 4.634735910484719, + "learning_rate": 1.1380036276134571e-06, + "loss": 0.6078, + "step": 11342 + }, + { + "epoch": 0.787598944591029, + "grad_norm": 4.795402388224027, + "learning_rate": 1.1372895214773805e-06, + "loss": 0.5842, + "step": 11343 + }, + { + "epoch": 0.7876683793917512, + "grad_norm": 3.9914281449789253, + "learning_rate": 1.1365756107128296e-06, + "loss": 0.4008, + "step": 11344 + }, + { + "epoch": 0.7877378141924732, + "grad_norm": 2.978236126373611, + "learning_rate": 1.1358618953559103e-06, + "loss": 0.4343, + "step": 11345 + }, + { + "epoch": 0.7878072489931954, + "grad_norm": 3.14465639920799, + "learning_rate": 1.1351483754427274e-06, + "loss": 0.3717, + "step": 11346 + }, + { + "epoch": 0.7878766837939175, + "grad_norm": 4.0801536124238345, + "learning_rate": 1.1344350510093642e-06, + "loss": 0.582, + "step": 11347 + }, + { + "epoch": 0.7879461185946396, + "grad_norm": 2.1096445705145035, + "learning_rate": 1.1337219220919011e-06, + "loss": 0.1528, + "step": 11348 + }, + { + "epoch": 0.7880155533953618, + "grad_norm": 4.069408588990413, + "learning_rate": 1.1330089887264106e-06, + "loss": 0.2747, + "step": 11349 + }, + { + "epoch": 0.7880849881960839, + "grad_norm": 3.135354718168621, + "learning_rate": 1.1322962509489483e-06, + "loss": 0.292, + "step": 11350 + }, + { + "epoch": 0.788154422996806, + "grad_norm": 3.818235705967781, + "learning_rate": 1.1315837087955633e-06, + "loss": 0.2741, + "step": 11351 + }, + { + "epoch": 0.7882238577975281, + "grad_norm": 3.9448328600431477, + "learning_rate": 1.1308713623022988e-06, + "loss": 0.417, + "step": 11352 + }, + { + "epoch": 0.7882932925982502, + "grad_norm": 4.2721714069369465, + "learning_rate": 1.130159211505184e-06, + "loss": 0.463, + "step": 11353 + }, + { + "epoch": 0.7883627273989724, + "grad_norm": 4.563973400391536, + "learning_rate": 1.1294472564402342e-06, + "loss": 0.404, + "step": 11354 + }, + { + "epoch": 0.7884321621996945, + "grad_norm": 5.477863687049568, + "learning_rate": 1.1287354971434639e-06, + "loss": 0.5807, + "step": 11355 + }, + { + "epoch": 0.7885015970004166, + "grad_norm": 3.9311900698928564, + "learning_rate": 1.1280239336508713e-06, + "loss": 0.4953, + "step": 11356 + }, + { + "epoch": 0.7885710318011387, + "grad_norm": 5.308933016813782, + "learning_rate": 1.1273125659984468e-06, + "loss": 0.5735, + "step": 11357 + }, + { + "epoch": 0.7886404666018608, + "grad_norm": 14.351149690961492, + "learning_rate": 1.126601394222171e-06, + "loss": 0.4252, + "step": 11358 + }, + { + "epoch": 0.788709901402583, + "grad_norm": 5.846547295406732, + "learning_rate": 1.1258904183580127e-06, + "loss": 0.4391, + "step": 11359 + }, + { + "epoch": 0.7887793362033051, + "grad_norm": 4.9560155367976835, + "learning_rate": 1.1251796384419338e-06, + "loss": 0.7574, + "step": 11360 + }, + { + "epoch": 0.7888487710040272, + "grad_norm": 3.9002003616817498, + "learning_rate": 1.1244690545098842e-06, + "loss": 0.3342, + "step": 11361 + }, + { + "epoch": 0.7889182058047494, + "grad_norm": 3.464082850667971, + "learning_rate": 1.123758666597804e-06, + "loss": 0.3157, + "step": 11362 + }, + { + "epoch": 0.7889876406054714, + "grad_norm": 3.075223352038389, + "learning_rate": 1.1230484747416233e-06, + "loss": 0.1811, + "step": 11363 + }, + { + "epoch": 0.7890570754061936, + "grad_norm": 2.5720711961771316, + "learning_rate": 1.1223384789772656e-06, + "loss": 0.2618, + "step": 11364 + }, + { + "epoch": 0.7891265102069157, + "grad_norm": 3.402149815148726, + "learning_rate": 1.1216286793406378e-06, + "loss": 0.2784, + "step": 11365 + }, + { + "epoch": 0.7891959450076378, + "grad_norm": 5.136218681265127, + "learning_rate": 1.1209190758676436e-06, + "loss": 0.6989, + "step": 11366 + }, + { + "epoch": 0.78926537980836, + "grad_norm": 3.4413046765409376, + "learning_rate": 1.120209668594175e-06, + "loss": 0.2925, + "step": 11367 + }, + { + "epoch": 0.7893348146090821, + "grad_norm": 4.403634189919041, + "learning_rate": 1.1195004575561075e-06, + "loss": 0.4813, + "step": 11368 + }, + { + "epoch": 0.7894042494098042, + "grad_norm": 4.732150795275055, + "learning_rate": 1.1187914427893177e-06, + "loss": 0.5689, + "step": 11369 + }, + { + "epoch": 0.7894736842105263, + "grad_norm": 2.9846277896872633, + "learning_rate": 1.118082624329665e-06, + "loss": 0.3565, + "step": 11370 + }, + { + "epoch": 0.7895431190112484, + "grad_norm": 2.8662150701869322, + "learning_rate": 1.1173740022129998e-06, + "loss": 0.3794, + "step": 11371 + }, + { + "epoch": 0.7896125538119706, + "grad_norm": 4.214248067582184, + "learning_rate": 1.1166655764751648e-06, + "loss": 0.3747, + "step": 11372 + }, + { + "epoch": 0.7896819886126927, + "grad_norm": 3.7382274766875625, + "learning_rate": 1.1159573471519903e-06, + "loss": 0.3726, + "step": 11373 + }, + { + "epoch": 0.7897514234134148, + "grad_norm": 3.7965082916418997, + "learning_rate": 1.115249314279298e-06, + "loss": 0.3797, + "step": 11374 + }, + { + "epoch": 0.789820858214137, + "grad_norm": 3.3573902100911286, + "learning_rate": 1.1145414778928999e-06, + "loss": 0.3044, + "step": 11375 + }, + { + "epoch": 0.789890293014859, + "grad_norm": 3.485567226242866, + "learning_rate": 1.1138338380285969e-06, + "loss": 0.4248, + "step": 11376 + }, + { + "epoch": 0.7899597278155812, + "grad_norm": 3.709787352114896, + "learning_rate": 1.1131263947221792e-06, + "loss": 0.4199, + "step": 11377 + }, + { + "epoch": 0.7900291626163033, + "grad_norm": 2.241648028198475, + "learning_rate": 1.1124191480094331e-06, + "loss": 0.174, + "step": 11378 + }, + { + "epoch": 0.7900985974170254, + "grad_norm": 5.6160220069789775, + "learning_rate": 1.1117120979261264e-06, + "loss": 0.5943, + "step": 11379 + }, + { + "epoch": 0.7901680322177476, + "grad_norm": 3.0222569566143624, + "learning_rate": 1.1110052445080205e-06, + "loss": 0.336, + "step": 11380 + }, + { + "epoch": 0.7902374670184696, + "grad_norm": 5.799603260903549, + "learning_rate": 1.1102985877908707e-06, + "loss": 0.6521, + "step": 11381 + }, + { + "epoch": 0.7903069018191918, + "grad_norm": 5.239861649730828, + "learning_rate": 1.109592127810416e-06, + "loss": 0.6127, + "step": 11382 + }, + { + "epoch": 0.7903763366199139, + "grad_norm": 4.202794597479939, + "learning_rate": 1.1088858646023877e-06, + "loss": 0.4555, + "step": 11383 + }, + { + "epoch": 0.790445771420636, + "grad_norm": 5.2336135889429904, + "learning_rate": 1.1081797982025106e-06, + "loss": 0.639, + "step": 11384 + }, + { + "epoch": 0.7905152062213582, + "grad_norm": 5.447836132448847, + "learning_rate": 1.107473928646497e-06, + "loss": 0.3724, + "step": 11385 + }, + { + "epoch": 0.7905846410220803, + "grad_norm": 3.770882545042695, + "learning_rate": 1.1067682559700442e-06, + "loss": 0.393, + "step": 11386 + }, + { + "epoch": 0.7906540758228023, + "grad_norm": 5.588764071768843, + "learning_rate": 1.1060627802088497e-06, + "loss": 0.5672, + "step": 11387 + }, + { + "epoch": 0.7907235106235245, + "grad_norm": 5.192479853214447, + "learning_rate": 1.1053575013985929e-06, + "loss": 0.6306, + "step": 11388 + }, + { + "epoch": 0.7907929454242466, + "grad_norm": 3.221832961800813, + "learning_rate": 1.1046524195749463e-06, + "loss": 0.2934, + "step": 11389 + }, + { + "epoch": 0.7908623802249688, + "grad_norm": 4.050030552807425, + "learning_rate": 1.103947534773573e-06, + "loss": 0.479, + "step": 11390 + }, + { + "epoch": 0.7909318150256909, + "grad_norm": 3.056292858572746, + "learning_rate": 1.1032428470301243e-06, + "loss": 0.2265, + "step": 11391 + }, + { + "epoch": 0.791001249826413, + "grad_norm": 5.587868980434448, + "learning_rate": 1.1025383563802433e-06, + "loss": 0.3016, + "step": 11392 + }, + { + "epoch": 0.7910706846271351, + "grad_norm": 3.8111147807601755, + "learning_rate": 1.101834062859562e-06, + "loss": 0.3732, + "step": 11393 + }, + { + "epoch": 0.7911401194278572, + "grad_norm": 4.111932189847655, + "learning_rate": 1.1011299665037023e-06, + "loss": 0.4646, + "step": 11394 + }, + { + "epoch": 0.7912095542285794, + "grad_norm": 4.655663501246254, + "learning_rate": 1.100426067348276e-06, + "loss": 0.4728, + "step": 11395 + }, + { + "epoch": 0.7912789890293015, + "grad_norm": 4.342257963490471, + "learning_rate": 1.0997223654288892e-06, + "loss": 0.4918, + "step": 11396 + }, + { + "epoch": 0.7913484238300236, + "grad_norm": 4.022621557854533, + "learning_rate": 1.0990188607811287e-06, + "loss": 0.4864, + "step": 11397 + }, + { + "epoch": 0.7914178586307458, + "grad_norm": 4.223223567335567, + "learning_rate": 1.0983155534405816e-06, + "loss": 0.4457, + "step": 11398 + }, + { + "epoch": 0.7914872934314678, + "grad_norm": 4.853320475448637, + "learning_rate": 1.0976124434428204e-06, + "loss": 0.5384, + "step": 11399 + }, + { + "epoch": 0.7915567282321899, + "grad_norm": 3.062076961081114, + "learning_rate": 1.0969095308234023e-06, + "loss": 0.202, + "step": 11400 + }, + { + "epoch": 0.7916261630329121, + "grad_norm": 3.951845544263266, + "learning_rate": 1.0962068156178856e-06, + "loss": 0.5427, + "step": 11401 + }, + { + "epoch": 0.7916955978336342, + "grad_norm": 4.708205552598833, + "learning_rate": 1.0955042978618109e-06, + "loss": 0.7011, + "step": 11402 + }, + { + "epoch": 0.7917650326343564, + "grad_norm": 2.093864122244701, + "learning_rate": 1.09480197759071e-06, + "loss": 0.2048, + "step": 11403 + }, + { + "epoch": 0.7918344674350785, + "grad_norm": 11.81141771187345, + "learning_rate": 1.094099854840106e-06, + "loss": 0.3428, + "step": 11404 + }, + { + "epoch": 0.7919039022358005, + "grad_norm": 5.09072913830011, + "learning_rate": 1.0933979296455122e-06, + "loss": 0.4202, + "step": 11405 + }, + { + "epoch": 0.7919733370365227, + "grad_norm": 2.478239817528434, + "learning_rate": 1.0926962020424297e-06, + "loss": 0.2247, + "step": 11406 + }, + { + "epoch": 0.7920427718372448, + "grad_norm": 4.63469623575284, + "learning_rate": 1.0919946720663528e-06, + "loss": 0.4647, + "step": 11407 + }, + { + "epoch": 0.792112206637967, + "grad_norm": 3.532954161080818, + "learning_rate": 1.0912933397527625e-06, + "loss": 0.3081, + "step": 11408 + }, + { + "epoch": 0.7921816414386891, + "grad_norm": 3.9391903919124376, + "learning_rate": 1.090592205137131e-06, + "loss": 0.6705, + "step": 11409 + }, + { + "epoch": 0.7922510762394112, + "grad_norm": 4.012845833437784, + "learning_rate": 1.0898912682549245e-06, + "loss": 0.5821, + "step": 11410 + }, + { + "epoch": 0.7923205110401333, + "grad_norm": 3.5590552808185882, + "learning_rate": 1.0891905291415917e-06, + "loss": 0.2403, + "step": 11411 + }, + { + "epoch": 0.7923899458408554, + "grad_norm": 4.323847562142719, + "learning_rate": 1.088489987832575e-06, + "loss": 0.4675, + "step": 11412 + }, + { + "epoch": 0.7924593806415775, + "grad_norm": 4.749864319496748, + "learning_rate": 1.0877896443633118e-06, + "loss": 0.4445, + "step": 11413 + }, + { + "epoch": 0.7925288154422997, + "grad_norm": 4.231018638367464, + "learning_rate": 1.0870894987692198e-06, + "loss": 0.5556, + "step": 11414 + }, + { + "epoch": 0.7925982502430218, + "grad_norm": 3.434825712017066, + "learning_rate": 1.0863895510857119e-06, + "loss": 0.3795, + "step": 11415 + }, + { + "epoch": 0.792667685043744, + "grad_norm": 3.82233143463189, + "learning_rate": 1.0856898013481932e-06, + "loss": 0.3502, + "step": 11416 + }, + { + "epoch": 0.792737119844466, + "grad_norm": 3.3538319026804517, + "learning_rate": 1.084990249592055e-06, + "loss": 0.2732, + "step": 11417 + }, + { + "epoch": 0.7928065546451881, + "grad_norm": 4.087743133779592, + "learning_rate": 1.08429089585268e-06, + "loss": 0.3376, + "step": 11418 + }, + { + "epoch": 0.7928759894459103, + "grad_norm": 3.074319084527011, + "learning_rate": 1.0835917401654406e-06, + "loss": 0.4395, + "step": 11419 + }, + { + "epoch": 0.7929454242466324, + "grad_norm": 3.8185262962715427, + "learning_rate": 1.0828927825656987e-06, + "loss": 0.311, + "step": 11420 + }, + { + "epoch": 0.7930148590473546, + "grad_norm": 5.014484069464639, + "learning_rate": 1.0821940230888078e-06, + "loss": 0.5815, + "step": 11421 + }, + { + "epoch": 0.7930842938480767, + "grad_norm": 4.698885415166073, + "learning_rate": 1.0814954617701102e-06, + "loss": 0.5449, + "step": 11422 + }, + { + "epoch": 0.7931537286487987, + "grad_norm": 4.7217518995994485, + "learning_rate": 1.0807970986449378e-06, + "loss": 0.4832, + "step": 11423 + }, + { + "epoch": 0.7932231634495209, + "grad_norm": 3.672152802213499, + "learning_rate": 1.0800989337486129e-06, + "loss": 0.3005, + "step": 11424 + }, + { + "epoch": 0.793292598250243, + "grad_norm": 4.390104747309561, + "learning_rate": 1.0794009671164484e-06, + "loss": 0.5253, + "step": 11425 + }, + { + "epoch": 0.7933620330509652, + "grad_norm": 2.9158674915322536, + "learning_rate": 1.0787031987837455e-06, + "loss": 0.3402, + "step": 11426 + }, + { + "epoch": 0.7934314678516873, + "grad_norm": 3.8840060934394676, + "learning_rate": 1.0780056287857988e-06, + "loss": 0.3336, + "step": 11427 + }, + { + "epoch": 0.7935009026524094, + "grad_norm": 4.038687325193779, + "learning_rate": 1.0773082571578908e-06, + "loss": 0.4804, + "step": 11428 + }, + { + "epoch": 0.7935703374531315, + "grad_norm": 4.5654430627550004, + "learning_rate": 1.0766110839352894e-06, + "loss": 0.4577, + "step": 11429 + }, + { + "epoch": 0.7936397722538536, + "grad_norm": 2.203770933775792, + "learning_rate": 1.0759141091532615e-06, + "loss": 0.1397, + "step": 11430 + }, + { + "epoch": 0.7937092070545757, + "grad_norm": 4.95148022480219, + "learning_rate": 1.0752173328470582e-06, + "loss": 0.6316, + "step": 11431 + }, + { + "epoch": 0.7937786418552979, + "grad_norm": 3.2225166551481745, + "learning_rate": 1.0745207550519187e-06, + "loss": 0.461, + "step": 11432 + }, + { + "epoch": 0.79384807665602, + "grad_norm": 4.1472609778762415, + "learning_rate": 1.0738243758030791e-06, + "loss": 0.4408, + "step": 11433 + }, + { + "epoch": 0.7939175114567422, + "grad_norm": 2.3630805842852416, + "learning_rate": 1.0731281951357597e-06, + "loss": 0.2223, + "step": 11434 + }, + { + "epoch": 0.7939869462574642, + "grad_norm": 4.739536938339032, + "learning_rate": 1.0724322130851723e-06, + "loss": 0.407, + "step": 11435 + }, + { + "epoch": 0.7940563810581863, + "grad_norm": 4.022950296477599, + "learning_rate": 1.071736429686519e-06, + "loss": 0.3868, + "step": 11436 + }, + { + "epoch": 0.7941258158589085, + "grad_norm": 3.9784636895685215, + "learning_rate": 1.0710408449749927e-06, + "loss": 0.3614, + "step": 11437 + }, + { + "epoch": 0.7941952506596306, + "grad_norm": 4.914240697310668, + "learning_rate": 1.0703454589857743e-06, + "loss": 0.5893, + "step": 11438 + }, + { + "epoch": 0.7942646854603528, + "grad_norm": 3.047037942776805, + "learning_rate": 1.0696502717540353e-06, + "loss": 0.3215, + "step": 11439 + }, + { + "epoch": 0.7943341202610749, + "grad_norm": 3.604482979723412, + "learning_rate": 1.0689552833149387e-06, + "loss": 0.274, + "step": 11440 + }, + { + "epoch": 0.7944035550617969, + "grad_norm": 4.36668924882485, + "learning_rate": 1.068260493703634e-06, + "loss": 0.4154, + "step": 11441 + }, + { + "epoch": 0.7944729898625191, + "grad_norm": 2.8960732288298026, + "learning_rate": 1.0675659029552672e-06, + "loss": 0.2865, + "step": 11442 + }, + { + "epoch": 0.7945424246632412, + "grad_norm": 4.29806870705308, + "learning_rate": 1.066871511104966e-06, + "loss": 0.4564, + "step": 11443 + }, + { + "epoch": 0.7946118594639633, + "grad_norm": 4.257517142878325, + "learning_rate": 1.0661773181878515e-06, + "loss": 0.3435, + "step": 11444 + }, + { + "epoch": 0.7946812942646855, + "grad_norm": 3.5325034300728864, + "learning_rate": 1.0654833242390383e-06, + "loss": 0.3564, + "step": 11445 + }, + { + "epoch": 0.7947507290654076, + "grad_norm": 2.9374752219689126, + "learning_rate": 1.064789529293626e-06, + "loss": 0.2635, + "step": 11446 + }, + { + "epoch": 0.7948201638661297, + "grad_norm": 3.7010546671834534, + "learning_rate": 1.0640959333867068e-06, + "loss": 0.3089, + "step": 11447 + }, + { + "epoch": 0.7948895986668518, + "grad_norm": 3.4616129455471465, + "learning_rate": 1.0634025365533617e-06, + "loss": 0.3069, + "step": 11448 + }, + { + "epoch": 0.7949590334675739, + "grad_norm": 4.411756599817269, + "learning_rate": 1.0627093388286614e-06, + "loss": 0.3523, + "step": 11449 + }, + { + "epoch": 0.7950284682682961, + "grad_norm": 7.70337993809111, + "learning_rate": 1.0620163402476675e-06, + "loss": 0.6194, + "step": 11450 + }, + { + "epoch": 0.7950979030690182, + "grad_norm": 4.783363352291953, + "learning_rate": 1.061323540845431e-06, + "loss": 0.6487, + "step": 11451 + }, + { + "epoch": 0.7951673378697404, + "grad_norm": 4.784237460139205, + "learning_rate": 1.0606309406569925e-06, + "loss": 0.4299, + "step": 11452 + }, + { + "epoch": 0.7952367726704624, + "grad_norm": 3.9141149635222896, + "learning_rate": 1.0599385397173833e-06, + "loss": 0.3801, + "step": 11453 + }, + { + "epoch": 0.7953062074711845, + "grad_norm": 4.249914157108824, + "learning_rate": 1.0592463380616247e-06, + "loss": 0.5519, + "step": 11454 + }, + { + "epoch": 0.7953756422719067, + "grad_norm": 3.2260450781861127, + "learning_rate": 1.0585543357247258e-06, + "loss": 0.3482, + "step": 11455 + }, + { + "epoch": 0.7954450770726288, + "grad_norm": 3.8245202742903097, + "learning_rate": 1.0578625327416907e-06, + "loss": 0.4916, + "step": 11456 + }, + { + "epoch": 0.7955145118733509, + "grad_norm": 3.276290151244739, + "learning_rate": 1.0571709291475073e-06, + "loss": 0.1266, + "step": 11457 + }, + { + "epoch": 0.7955839466740731, + "grad_norm": 3.9858262208845963, + "learning_rate": 1.0564795249771548e-06, + "loss": 0.4636, + "step": 11458 + }, + { + "epoch": 0.7956533814747951, + "grad_norm": 3.2241374148736157, + "learning_rate": 1.0557883202656072e-06, + "loss": 0.4098, + "step": 11459 + }, + { + "epoch": 0.7957228162755173, + "grad_norm": 3.721026150061444, + "learning_rate": 1.0550973150478239e-06, + "loss": 0.3027, + "step": 11460 + }, + { + "epoch": 0.7957922510762394, + "grad_norm": 4.027021454197483, + "learning_rate": 1.0544065093587524e-06, + "loss": 0.5003, + "step": 11461 + }, + { + "epoch": 0.7958616858769615, + "grad_norm": 3.0545596024797854, + "learning_rate": 1.0537159032333361e-06, + "loss": 0.3103, + "step": 11462 + }, + { + "epoch": 0.7959311206776837, + "grad_norm": 3.287583059696945, + "learning_rate": 1.0530254967065052e-06, + "loss": 0.2832, + "step": 11463 + }, + { + "epoch": 0.7960005554784058, + "grad_norm": 2.809480189484644, + "learning_rate": 1.0523352898131756e-06, + "loss": 0.2993, + "step": 11464 + }, + { + "epoch": 0.796069990279128, + "grad_norm": 5.294044879190656, + "learning_rate": 1.0516452825882612e-06, + "loss": 0.5479, + "step": 11465 + }, + { + "epoch": 0.79613942507985, + "grad_norm": 3.2006299585355977, + "learning_rate": 1.0509554750666606e-06, + "loss": 0.2543, + "step": 11466 + }, + { + "epoch": 0.7962088598805721, + "grad_norm": 4.325927957459826, + "learning_rate": 1.0502658672832634e-06, + "loss": 0.4231, + "step": 11467 + }, + { + "epoch": 0.7962782946812943, + "grad_norm": 3.9752954683958612, + "learning_rate": 1.0495764592729495e-06, + "loss": 0.4619, + "step": 11468 + }, + { + "epoch": 0.7963477294820164, + "grad_norm": 3.118831831158478, + "learning_rate": 1.0488872510705878e-06, + "loss": 0.1781, + "step": 11469 + }, + { + "epoch": 0.7964171642827385, + "grad_norm": 3.448943107949009, + "learning_rate": 1.0481982427110365e-06, + "loss": 0.3753, + "step": 11470 + }, + { + "epoch": 0.7964865990834606, + "grad_norm": 3.8826892168531795, + "learning_rate": 1.0475094342291491e-06, + "loss": 0.2379, + "step": 11471 + }, + { + "epoch": 0.7965560338841827, + "grad_norm": 4.744012162350741, + "learning_rate": 1.046820825659761e-06, + "loss": 0.6411, + "step": 11472 + }, + { + "epoch": 0.7966254686849049, + "grad_norm": 6.792380990464082, + "learning_rate": 1.0461324170377008e-06, + "loss": 0.4343, + "step": 11473 + }, + { + "epoch": 0.796694903485627, + "grad_norm": 2.177860755370552, + "learning_rate": 1.045444208397791e-06, + "loss": 0.1457, + "step": 11474 + }, + { + "epoch": 0.7967643382863491, + "grad_norm": 3.594035678903114, + "learning_rate": 1.0447561997748373e-06, + "loss": 0.3707, + "step": 11475 + }, + { + "epoch": 0.7968337730870713, + "grad_norm": 3.721335180375672, + "learning_rate": 1.044068391203638e-06, + "loss": 0.3936, + "step": 11476 + }, + { + "epoch": 0.7969032078877933, + "grad_norm": 3.8675002859292134, + "learning_rate": 1.0433807827189846e-06, + "loss": 0.4478, + "step": 11477 + }, + { + "epoch": 0.7969726426885155, + "grad_norm": 3.5134327960748286, + "learning_rate": 1.042693374355654e-06, + "loss": 0.2533, + "step": 11478 + }, + { + "epoch": 0.7970420774892376, + "grad_norm": 3.8628064983866897, + "learning_rate": 1.0420061661484138e-06, + "loss": 0.5374, + "step": 11479 + }, + { + "epoch": 0.7971115122899597, + "grad_norm": 3.78714073570003, + "learning_rate": 1.0413191581320238e-06, + "loss": 0.4324, + "step": 11480 + }, + { + "epoch": 0.7971809470906819, + "grad_norm": 4.172901718676249, + "learning_rate": 1.0406323503412307e-06, + "loss": 0.4519, + "step": 11481 + }, + { + "epoch": 0.797250381891404, + "grad_norm": 3.8530761067003554, + "learning_rate": 1.0399457428107728e-06, + "loss": 0.4426, + "step": 11482 + }, + { + "epoch": 0.7973198166921261, + "grad_norm": 3.0127014281714994, + "learning_rate": 1.0392593355753788e-06, + "loss": 0.1788, + "step": 11483 + }, + { + "epoch": 0.7973892514928482, + "grad_norm": 4.691397657438658, + "learning_rate": 1.0385731286697653e-06, + "loss": 0.4566, + "step": 11484 + }, + { + "epoch": 0.7974586862935703, + "grad_norm": 4.711840683300377, + "learning_rate": 1.03788712212864e-06, + "loss": 0.6133, + "step": 11485 + }, + { + "epoch": 0.7975281210942925, + "grad_norm": 2.3018658962822935, + "learning_rate": 1.037201315986701e-06, + "loss": 0.1805, + "step": 11486 + }, + { + "epoch": 0.7975975558950146, + "grad_norm": 4.584858143115192, + "learning_rate": 1.0365157102786338e-06, + "loss": 0.6429, + "step": 11487 + }, + { + "epoch": 0.7976669906957367, + "grad_norm": 3.9788503299160687, + "learning_rate": 1.03583030503912e-06, + "loss": 0.597, + "step": 11488 + }, + { + "epoch": 0.7977364254964588, + "grad_norm": 4.3635188043688276, + "learning_rate": 1.0351451003028223e-06, + "loss": 0.5098, + "step": 11489 + }, + { + "epoch": 0.7978058602971809, + "grad_norm": 5.0342324313252655, + "learning_rate": 1.0344600961043971e-06, + "loss": 0.3083, + "step": 11490 + }, + { + "epoch": 0.7978752950979031, + "grad_norm": 4.044708962535251, + "learning_rate": 1.0337752924784943e-06, + "loss": 0.4044, + "step": 11491 + }, + { + "epoch": 0.7979447298986252, + "grad_norm": 4.3344586660248865, + "learning_rate": 1.033090689459751e-06, + "loss": 0.4512, + "step": 11492 + }, + { + "epoch": 0.7980141646993473, + "grad_norm": 3.5979046655383016, + "learning_rate": 1.0324062870827883e-06, + "loss": 0.3249, + "step": 11493 + }, + { + "epoch": 0.7980835995000695, + "grad_norm": 3.399477252365134, + "learning_rate": 1.031722085382228e-06, + "loss": 0.2209, + "step": 11494 + }, + { + "epoch": 0.7981530343007915, + "grad_norm": 4.676724002479445, + "learning_rate": 1.0310380843926737e-06, + "loss": 0.3036, + "step": 11495 + }, + { + "epoch": 0.7982224691015137, + "grad_norm": 4.392197971796721, + "learning_rate": 1.030354284148722e-06, + "loss": 0.4234, + "step": 11496 + }, + { + "epoch": 0.7982919039022358, + "grad_norm": 5.141818290355714, + "learning_rate": 1.0296706846849591e-06, + "loss": 0.7346, + "step": 11497 + }, + { + "epoch": 0.7983613387029579, + "grad_norm": 4.822164739445092, + "learning_rate": 1.0289872860359596e-06, + "loss": 0.3313, + "step": 11498 + }, + { + "epoch": 0.7984307735036801, + "grad_norm": 4.368802795018902, + "learning_rate": 1.02830408823629e-06, + "loss": 0.3877, + "step": 11499 + }, + { + "epoch": 0.7985002083044022, + "grad_norm": 3.3784158894313605, + "learning_rate": 1.0276210913205053e-06, + "loss": 0.3393, + "step": 11500 + }, + { + "epoch": 0.7985696431051242, + "grad_norm": 4.072258572738818, + "learning_rate": 1.0269382953231504e-06, + "loss": 0.3492, + "step": 11501 + }, + { + "epoch": 0.7986390779058464, + "grad_norm": 15.887596741049208, + "learning_rate": 1.0262557002787599e-06, + "loss": 0.5611, + "step": 11502 + }, + { + "epoch": 0.7987085127065685, + "grad_norm": 5.861608965646581, + "learning_rate": 1.025573306221862e-06, + "loss": 0.7034, + "step": 11503 + }, + { + "epoch": 0.7987779475072907, + "grad_norm": 4.4587152840849225, + "learning_rate": 1.0248911131869677e-06, + "loss": 0.6109, + "step": 11504 + }, + { + "epoch": 0.7988473823080128, + "grad_norm": 2.9232580904068626, + "learning_rate": 1.0242091212085815e-06, + "loss": 0.204, + "step": 11505 + }, + { + "epoch": 0.7989168171087349, + "grad_norm": 3.325370405536152, + "learning_rate": 1.0235273303212013e-06, + "loss": 0.3579, + "step": 11506 + }, + { + "epoch": 0.798986251909457, + "grad_norm": 2.556114828205371, + "learning_rate": 1.0228457405593061e-06, + "loss": 0.2057, + "step": 11507 + }, + { + "epoch": 0.7990556867101791, + "grad_norm": 3.768179106847722, + "learning_rate": 1.0221643519573748e-06, + "loss": 0.4156, + "step": 11508 + }, + { + "epoch": 0.7991251215109013, + "grad_norm": 4.034683191295604, + "learning_rate": 1.0214831645498697e-06, + "loss": 0.3954, + "step": 11509 + }, + { + "epoch": 0.7991945563116234, + "grad_norm": 4.328890217216435, + "learning_rate": 1.0208021783712434e-06, + "loss": 0.697, + "step": 11510 + }, + { + "epoch": 0.7992639911123455, + "grad_norm": 3.7213986630121294, + "learning_rate": 1.02012139345594e-06, + "loss": 0.3362, + "step": 11511 + }, + { + "epoch": 0.7993334259130677, + "grad_norm": 4.239830904993366, + "learning_rate": 1.019440809838394e-06, + "loss": 0.4021, + "step": 11512 + }, + { + "epoch": 0.7994028607137897, + "grad_norm": 2.9098737932753584, + "learning_rate": 1.0187604275530266e-06, + "loss": 0.1892, + "step": 11513 + }, + { + "epoch": 0.7994722955145118, + "grad_norm": 3.6436368058370414, + "learning_rate": 1.018080246634252e-06, + "loss": 0.3348, + "step": 11514 + }, + { + "epoch": 0.799541730315234, + "grad_norm": 4.380265032978523, + "learning_rate": 1.017400267116473e-06, + "loss": 0.4809, + "step": 11515 + }, + { + "epoch": 0.7996111651159561, + "grad_norm": 4.453844726811093, + "learning_rate": 1.0167204890340798e-06, + "loss": 0.4727, + "step": 11516 + }, + { + "epoch": 0.7996805999166783, + "grad_norm": 3.3128742035198178, + "learning_rate": 1.0160409124214604e-06, + "loss": 0.2246, + "step": 11517 + }, + { + "epoch": 0.7997500347174004, + "grad_norm": 3.4579487771258606, + "learning_rate": 1.0153615373129816e-06, + "loss": 0.4841, + "step": 11518 + }, + { + "epoch": 0.7998194695181224, + "grad_norm": 4.023629012495005, + "learning_rate": 1.014682363743006e-06, + "loss": 0.3353, + "step": 11519 + }, + { + "epoch": 0.7998889043188446, + "grad_norm": 3.0430128863745183, + "learning_rate": 1.014003391745888e-06, + "loss": 0.2903, + "step": 11520 + }, + { + "epoch": 0.7999583391195667, + "grad_norm": 3.2775009879398103, + "learning_rate": 1.01332462135597e-06, + "loss": 0.3423, + "step": 11521 + }, + { + "epoch": 0.8000277739202889, + "grad_norm": 4.5654926420445845, + "learning_rate": 1.0126460526075782e-06, + "loss": 0.4997, + "step": 11522 + }, + { + "epoch": 0.800097208721011, + "grad_norm": 3.821257671084501, + "learning_rate": 1.0119676855350385e-06, + "loss": 0.342, + "step": 11523 + }, + { + "epoch": 0.8001666435217331, + "grad_norm": 3.0939520133261023, + "learning_rate": 1.011289520172662e-06, + "loss": 0.2798, + "step": 11524 + }, + { + "epoch": 0.8002360783224552, + "grad_norm": 4.775305551151815, + "learning_rate": 1.0106115565547446e-06, + "loss": 0.6606, + "step": 11525 + }, + { + "epoch": 0.8003055131231773, + "grad_norm": 3.3817870372304797, + "learning_rate": 1.0099337947155824e-06, + "loss": 0.4102, + "step": 11526 + }, + { + "epoch": 0.8003749479238994, + "grad_norm": 5.530806245459377, + "learning_rate": 1.0092562346894536e-06, + "loss": 0.5367, + "step": 11527 + }, + { + "epoch": 0.8004443827246216, + "grad_norm": 5.0106588370089575, + "learning_rate": 1.0085788765106291e-06, + "loss": 0.4985, + "step": 11528 + }, + { + "epoch": 0.8005138175253437, + "grad_norm": 3.1802391231871656, + "learning_rate": 1.0079017202133678e-06, + "loss": 0.2264, + "step": 11529 + }, + { + "epoch": 0.8005832523260659, + "grad_norm": 3.941094362183221, + "learning_rate": 1.0072247658319206e-06, + "loss": 0.441, + "step": 11530 + }, + { + "epoch": 0.8006526871267879, + "grad_norm": 3.5782637410800726, + "learning_rate": 1.0065480134005261e-06, + "loss": 0.3526, + "step": 11531 + }, + { + "epoch": 0.80072212192751, + "grad_norm": 4.660680256942138, + "learning_rate": 1.0058714629534144e-06, + "loss": 0.4544, + "step": 11532 + }, + { + "epoch": 0.8007915567282322, + "grad_norm": 4.0151450694063, + "learning_rate": 1.0051951145248047e-06, + "loss": 0.484, + "step": 11533 + }, + { + "epoch": 0.8008609915289543, + "grad_norm": 2.6722349607481624, + "learning_rate": 1.0045189681489042e-06, + "loss": 0.2137, + "step": 11534 + }, + { + "epoch": 0.8009304263296765, + "grad_norm": 2.5746218248864623, + "learning_rate": 1.0038430238599156e-06, + "loss": 0.169, + "step": 11535 + }, + { + "epoch": 0.8009998611303986, + "grad_norm": 3.912904184170569, + "learning_rate": 1.0031672816920223e-06, + "loss": 0.4779, + "step": 11536 + }, + { + "epoch": 0.8010692959311206, + "grad_norm": 3.8829982191520056, + "learning_rate": 1.0024917416794068e-06, + "loss": 0.4297, + "step": 11537 + }, + { + "epoch": 0.8011387307318428, + "grad_norm": 4.113685857081385, + "learning_rate": 1.0018164038562362e-06, + "loss": 0.401, + "step": 11538 + }, + { + "epoch": 0.8012081655325649, + "grad_norm": 3.3450687529916854, + "learning_rate": 1.0011412682566656e-06, + "loss": 0.348, + "step": 11539 + }, + { + "epoch": 0.8012776003332871, + "grad_norm": 4.914813644910723, + "learning_rate": 1.0004663349148458e-06, + "loss": 0.5854, + "step": 11540 + }, + { + "epoch": 0.8013470351340092, + "grad_norm": 3.8401678616989856, + "learning_rate": 9.99791603864913e-07, + "loss": 0.3447, + "step": 11541 + }, + { + "epoch": 0.8014164699347313, + "grad_norm": 6.41928258122755, + "learning_rate": 9.99117075140994e-07, + "loss": 0.8809, + "step": 11542 + }, + { + "epoch": 0.8014859047354534, + "grad_norm": 5.16749519424555, + "learning_rate": 9.984427487772064e-07, + "loss": 0.4863, + "step": 11543 + }, + { + "epoch": 0.8015553395361755, + "grad_norm": 3.166091891179892, + "learning_rate": 9.977686248076562e-07, + "loss": 0.3965, + "step": 11544 + }, + { + "epoch": 0.8016247743368976, + "grad_norm": 10.046057963649357, + "learning_rate": 9.970947032664407e-07, + "loss": 0.6321, + "step": 11545 + }, + { + "epoch": 0.8016942091376198, + "grad_norm": 4.103893759892576, + "learning_rate": 9.96420984187645e-07, + "loss": 0.4439, + "step": 11546 + }, + { + "epoch": 0.8017636439383419, + "grad_norm": 3.477348990739748, + "learning_rate": 9.957474676053458e-07, + "loss": 0.4188, + "step": 11547 + }, + { + "epoch": 0.8018330787390641, + "grad_norm": 3.191902572693076, + "learning_rate": 9.950741535536073e-07, + "loss": 0.2284, + "step": 11548 + }, + { + "epoch": 0.8019025135397861, + "grad_norm": 2.91173398693346, + "learning_rate": 9.944010420664884e-07, + "loss": 0.2442, + "step": 11549 + }, + { + "epoch": 0.8019719483405082, + "grad_norm": 3.7648332715260118, + "learning_rate": 9.937281331780313e-07, + "loss": 0.267, + "step": 11550 + }, + { + "epoch": 0.8020413831412304, + "grad_norm": 3.135443969655885, + "learning_rate": 9.930554269222704e-07, + "loss": 0.3643, + "step": 11551 + }, + { + "epoch": 0.8021108179419525, + "grad_norm": 3.3438386090964114, + "learning_rate": 9.923829233332334e-07, + "loss": 0.2683, + "step": 11552 + }, + { + "epoch": 0.8021802527426747, + "grad_norm": 4.194513367440136, + "learning_rate": 9.91710622444934e-07, + "loss": 0.3672, + "step": 11553 + }, + { + "epoch": 0.8022496875433968, + "grad_norm": 7.253803188072973, + "learning_rate": 9.910385242913733e-07, + "loss": 0.5058, + "step": 11554 + }, + { + "epoch": 0.8023191223441188, + "grad_norm": 3.1316357152417953, + "learning_rate": 9.903666289065488e-07, + "loss": 0.3237, + "step": 11555 + }, + { + "epoch": 0.802388557144841, + "grad_norm": 3.615172873264802, + "learning_rate": 9.896949363244446e-07, + "loss": 0.3937, + "step": 11556 + }, + { + "epoch": 0.8024579919455631, + "grad_norm": 4.791242084283195, + "learning_rate": 9.890234465790293e-07, + "loss": 0.6589, + "step": 11557 + }, + { + "epoch": 0.8025274267462852, + "grad_norm": 3.2256680379862277, + "learning_rate": 9.883521597042712e-07, + "loss": 0.3, + "step": 11558 + }, + { + "epoch": 0.8025968615470074, + "grad_norm": 4.313426235128009, + "learning_rate": 9.876810757341215e-07, + "loss": 0.6012, + "step": 11559 + }, + { + "epoch": 0.8026662963477295, + "grad_norm": 3.8489306954683267, + "learning_rate": 9.870101947025223e-07, + "loss": 0.3325, + "step": 11560 + }, + { + "epoch": 0.8027357311484516, + "grad_norm": 4.371860127734536, + "learning_rate": 9.863395166434064e-07, + "loss": 0.4007, + "step": 11561 + }, + { + "epoch": 0.8028051659491737, + "grad_norm": 3.82734081900594, + "learning_rate": 9.856690415906966e-07, + "loss": 0.403, + "step": 11562 + }, + { + "epoch": 0.8028746007498958, + "grad_norm": 3.147724113317814, + "learning_rate": 9.849987695783037e-07, + "loss": 0.1802, + "step": 11563 + }, + { + "epoch": 0.802944035550618, + "grad_norm": 4.6390092822246425, + "learning_rate": 9.843287006401297e-07, + "loss": 0.5143, + "step": 11564 + }, + { + "epoch": 0.8030134703513401, + "grad_norm": 2.8054001100371453, + "learning_rate": 9.836588348100663e-07, + "loss": 0.1695, + "step": 11565 + }, + { + "epoch": 0.8030829051520623, + "grad_norm": 3.754858071446223, + "learning_rate": 9.829891721219925e-07, + "loss": 0.2738, + "step": 11566 + }, + { + "epoch": 0.8031523399527843, + "grad_norm": 3.9757196636384453, + "learning_rate": 9.82319712609784e-07, + "loss": 0.5841, + "step": 11567 + }, + { + "epoch": 0.8032217747535064, + "grad_norm": 4.225815402513942, + "learning_rate": 9.816504563072948e-07, + "loss": 0.4895, + "step": 11568 + }, + { + "epoch": 0.8032912095542286, + "grad_norm": 4.015230639118562, + "learning_rate": 9.8098140324838e-07, + "loss": 0.648, + "step": 11569 + }, + { + "epoch": 0.8033606443549507, + "grad_norm": 3.353840486006697, + "learning_rate": 9.803125534668783e-07, + "loss": 0.2915, + "step": 11570 + }, + { + "epoch": 0.8034300791556728, + "grad_norm": 6.491814703848902, + "learning_rate": 9.796439069966191e-07, + "loss": 0.5784, + "step": 11571 + }, + { + "epoch": 0.803499513956395, + "grad_norm": 4.137385459773879, + "learning_rate": 9.78975463871421e-07, + "loss": 0.5446, + "step": 11572 + }, + { + "epoch": 0.803568948757117, + "grad_norm": 3.8500082169425127, + "learning_rate": 9.783072241250947e-07, + "loss": 0.3533, + "step": 11573 + }, + { + "epoch": 0.8036383835578392, + "grad_norm": 3.4543491846657464, + "learning_rate": 9.776391877914376e-07, + "loss": 0.4153, + "step": 11574 + }, + { + "epoch": 0.8037078183585613, + "grad_norm": 5.328030650704951, + "learning_rate": 9.769713549042392e-07, + "loss": 0.5232, + "step": 11575 + }, + { + "epoch": 0.8037772531592834, + "grad_norm": 4.078535209694497, + "learning_rate": 9.763037254972773e-07, + "loss": 0.6775, + "step": 11576 + }, + { + "epoch": 0.8038466879600056, + "grad_norm": 5.0549122117999294, + "learning_rate": 9.756362996043183e-07, + "loss": 0.6467, + "step": 11577 + }, + { + "epoch": 0.8039161227607277, + "grad_norm": 3.251974425842952, + "learning_rate": 9.749690772591243e-07, + "loss": 0.4418, + "step": 11578 + }, + { + "epoch": 0.8039855575614498, + "grad_norm": 3.1682921672171322, + "learning_rate": 9.743020584954376e-07, + "loss": 0.2992, + "step": 11579 + }, + { + "epoch": 0.8040549923621719, + "grad_norm": 4.30418766235949, + "learning_rate": 9.73635243346997e-07, + "loss": 0.4772, + "step": 11580 + }, + { + "epoch": 0.804124427162894, + "grad_norm": 3.6000691224263774, + "learning_rate": 9.729686318475317e-07, + "loss": 0.3605, + "step": 11581 + }, + { + "epoch": 0.8041938619636162, + "grad_norm": 4.560802425802625, + "learning_rate": 9.72302224030755e-07, + "loss": 0.4419, + "step": 11582 + }, + { + "epoch": 0.8042632967643383, + "grad_norm": 3.786641925045908, + "learning_rate": 9.716360199303732e-07, + "loss": 0.517, + "step": 11583 + }, + { + "epoch": 0.8043327315650604, + "grad_norm": 4.234162751089818, + "learning_rate": 9.709700195800836e-07, + "loss": 0.2147, + "step": 11584 + }, + { + "epoch": 0.8044021663657825, + "grad_norm": 3.7024765671675364, + "learning_rate": 9.703042230135735e-07, + "loss": 0.4591, + "step": 11585 + }, + { + "epoch": 0.8044716011665046, + "grad_norm": 3.603791999534332, + "learning_rate": 9.696386302645127e-07, + "loss": 0.4421, + "step": 11586 + }, + { + "epoch": 0.8045410359672268, + "grad_norm": 5.369838582317884, + "learning_rate": 9.689732413665709e-07, + "loss": 0.607, + "step": 11587 + }, + { + "epoch": 0.8046104707679489, + "grad_norm": 4.3393935682961935, + "learning_rate": 9.68308056353401e-07, + "loss": 0.5234, + "step": 11588 + }, + { + "epoch": 0.804679905568671, + "grad_norm": 3.0287443440575013, + "learning_rate": 9.676430752586474e-07, + "loss": 0.2969, + "step": 11589 + }, + { + "epoch": 0.8047493403693932, + "grad_norm": 1.7074565822072896, + "learning_rate": 9.669782981159443e-07, + "loss": 0.102, + "step": 11590 + }, + { + "epoch": 0.8048187751701152, + "grad_norm": 3.9643302225915886, + "learning_rate": 9.66313724958915e-07, + "loss": 0.5384, + "step": 11591 + }, + { + "epoch": 0.8048882099708374, + "grad_norm": 3.6072803223760084, + "learning_rate": 9.656493558211731e-07, + "loss": 0.3078, + "step": 11592 + }, + { + "epoch": 0.8049576447715595, + "grad_norm": 3.7011261097383743, + "learning_rate": 9.649851907363216e-07, + "loss": 0.3425, + "step": 11593 + }, + { + "epoch": 0.8050270795722816, + "grad_norm": 4.8053510885454, + "learning_rate": 9.643212297379534e-07, + "loss": 0.5175, + "step": 11594 + }, + { + "epoch": 0.8050965143730038, + "grad_norm": 5.496978489531954, + "learning_rate": 9.636574728596504e-07, + "loss": 0.8954, + "step": 11595 + }, + { + "epoch": 0.8051659491737259, + "grad_norm": 3.6784338389001436, + "learning_rate": 9.629939201349852e-07, + "loss": 0.2611, + "step": 11596 + }, + { + "epoch": 0.8052353839744479, + "grad_norm": 3.6954787107595557, + "learning_rate": 9.62330571597518e-07, + "loss": 0.3939, + "step": 11597 + }, + { + "epoch": 0.8053048187751701, + "grad_norm": 3.718111944899777, + "learning_rate": 9.61667427280803e-07, + "loss": 0.3948, + "step": 11598 + }, + { + "epoch": 0.8053742535758922, + "grad_norm": 3.864399976950323, + "learning_rate": 9.61004487218381e-07, + "loss": 0.5318, + "step": 11599 + }, + { + "epoch": 0.8054436883766144, + "grad_norm": 5.548366395004784, + "learning_rate": 9.60341751443779e-07, + "loss": 0.5521, + "step": 11600 + }, + { + "epoch": 0.8055131231773365, + "grad_norm": 3.8723173766577177, + "learning_rate": 9.596792199905214e-07, + "loss": 0.3937, + "step": 11601 + }, + { + "epoch": 0.8055825579780586, + "grad_norm": 5.539681092755038, + "learning_rate": 9.590168928921168e-07, + "loss": 0.3535, + "step": 11602 + }, + { + "epoch": 0.8056519927787807, + "grad_norm": 4.624906126614333, + "learning_rate": 9.583547701820651e-07, + "loss": 0.6208, + "step": 11603 + }, + { + "epoch": 0.8057214275795028, + "grad_norm": 3.9509392130189913, + "learning_rate": 9.57692851893856e-07, + "loss": 0.1438, + "step": 11604 + }, + { + "epoch": 0.805790862380225, + "grad_norm": 5.576258915275627, + "learning_rate": 9.570311380609675e-07, + "loss": 0.932, + "step": 11605 + }, + { + "epoch": 0.8058602971809471, + "grad_norm": 3.673354287698509, + "learning_rate": 9.5636962871687e-07, + "loss": 0.464, + "step": 11606 + }, + { + "epoch": 0.8059297319816692, + "grad_norm": 3.065061282223472, + "learning_rate": 9.557083238950204e-07, + "loss": 0.2461, + "step": 11607 + }, + { + "epoch": 0.8059991667823914, + "grad_norm": 3.8798516491045025, + "learning_rate": 9.550472236288676e-07, + "loss": 0.4881, + "step": 11608 + }, + { + "epoch": 0.8060686015831134, + "grad_norm": 3.6059036895179437, + "learning_rate": 9.543863279518477e-07, + "loss": 0.4019, + "step": 11609 + }, + { + "epoch": 0.8061380363838356, + "grad_norm": 4.561426022636995, + "learning_rate": 9.537256368973923e-07, + "loss": 0.5414, + "step": 11610 + }, + { + "epoch": 0.8062074711845577, + "grad_norm": 3.2153300802351703, + "learning_rate": 9.530651504989141e-07, + "loss": 0.3647, + "step": 11611 + }, + { + "epoch": 0.8062769059852798, + "grad_norm": 3.9877563144941486, + "learning_rate": 9.524048687898196e-07, + "loss": 0.5099, + "step": 11612 + }, + { + "epoch": 0.806346340786002, + "grad_norm": 3.938540017924112, + "learning_rate": 9.517447918035094e-07, + "loss": 0.3856, + "step": 11613 + }, + { + "epoch": 0.8064157755867241, + "grad_norm": 3.7479109513828486, + "learning_rate": 9.510849195733657e-07, + "loss": 0.4977, + "step": 11614 + }, + { + "epoch": 0.8064852103874461, + "grad_norm": 2.694959370332333, + "learning_rate": 9.504252521327634e-07, + "loss": 0.2551, + "step": 11615 + }, + { + "epoch": 0.8065546451881683, + "grad_norm": 4.302011083425289, + "learning_rate": 9.497657895150708e-07, + "loss": 0.5392, + "step": 11616 + }, + { + "epoch": 0.8066240799888904, + "grad_norm": 4.670664911306058, + "learning_rate": 9.491065317536413e-07, + "loss": 0.5469, + "step": 11617 + }, + { + "epoch": 0.8066935147896126, + "grad_norm": 7.801537519026751, + "learning_rate": 9.484474788818199e-07, + "loss": 0.376, + "step": 11618 + }, + { + "epoch": 0.8067629495903347, + "grad_norm": 4.555914483239768, + "learning_rate": 9.477886309329404e-07, + "loss": 0.3173, + "step": 11619 + }, + { + "epoch": 0.8068323843910568, + "grad_norm": 3.9229716964260084, + "learning_rate": 9.471299879403262e-07, + "loss": 0.2979, + "step": 11620 + }, + { + "epoch": 0.806901819191779, + "grad_norm": 3.507326065038648, + "learning_rate": 9.464715499372912e-07, + "loss": 0.3474, + "step": 11621 + }, + { + "epoch": 0.806971253992501, + "grad_norm": 3.3112362329966025, + "learning_rate": 9.458133169571382e-07, + "loss": 0.2483, + "step": 11622 + }, + { + "epoch": 0.8070406887932232, + "grad_norm": 2.6122711959537, + "learning_rate": 9.451552890331606e-07, + "loss": 0.2489, + "step": 11623 + }, + { + "epoch": 0.8071101235939453, + "grad_norm": 3.764484858243425, + "learning_rate": 9.444974661986394e-07, + "loss": 0.4488, + "step": 11624 + }, + { + "epoch": 0.8071795583946674, + "grad_norm": 4.1026634358773055, + "learning_rate": 9.438398484868477e-07, + "loss": 0.4714, + "step": 11625 + }, + { + "epoch": 0.8072489931953896, + "grad_norm": 3.6540160689500034, + "learning_rate": 9.431824359310465e-07, + "loss": 0.4947, + "step": 11626 + }, + { + "epoch": 0.8073184279961116, + "grad_norm": 4.289930305180008, + "learning_rate": 9.425252285644853e-07, + "loss": 0.428, + "step": 11627 + }, + { + "epoch": 0.8073878627968337, + "grad_norm": 4.535471518619672, + "learning_rate": 9.418682264204098e-07, + "loss": 0.4267, + "step": 11628 + }, + { + "epoch": 0.8074572975975559, + "grad_norm": 5.2702215849549185, + "learning_rate": 9.412114295320446e-07, + "loss": 0.3765, + "step": 11629 + }, + { + "epoch": 0.807526732398278, + "grad_norm": 3.4126044570094027, + "learning_rate": 9.40554837932614e-07, + "loss": 0.382, + "step": 11630 + }, + { + "epoch": 0.8075961671990002, + "grad_norm": 5.012263533042071, + "learning_rate": 9.398984516553272e-07, + "loss": 0.5361, + "step": 11631 + }, + { + "epoch": 0.8076656019997223, + "grad_norm": 4.153983153121576, + "learning_rate": 9.392422707333798e-07, + "loss": 0.4327, + "step": 11632 + }, + { + "epoch": 0.8077350368004443, + "grad_norm": 3.7945920474430346, + "learning_rate": 9.385862951999647e-07, + "loss": 0.4802, + "step": 11633 + }, + { + "epoch": 0.8078044716011665, + "grad_norm": 3.6051615407851236, + "learning_rate": 9.379305250882586e-07, + "loss": 0.4119, + "step": 11634 + }, + { + "epoch": 0.8078739064018886, + "grad_norm": 2.859589108470037, + "learning_rate": 9.372749604314307e-07, + "loss": 0.2675, + "step": 11635 + }, + { + "epoch": 0.8079433412026108, + "grad_norm": 4.204546198260028, + "learning_rate": 9.366196012626377e-07, + "loss": 0.4848, + "step": 11636 + }, + { + "epoch": 0.8080127760033329, + "grad_norm": 3.7144024992422415, + "learning_rate": 9.35964447615027e-07, + "loss": 0.2744, + "step": 11637 + }, + { + "epoch": 0.808082210804055, + "grad_norm": 4.732323960755227, + "learning_rate": 9.35309499521736e-07, + "loss": 0.3986, + "step": 11638 + }, + { + "epoch": 0.8081516456047771, + "grad_norm": 3.5317461726339543, + "learning_rate": 9.346547570158909e-07, + "loss": 0.2648, + "step": 11639 + }, + { + "epoch": 0.8082210804054992, + "grad_norm": 3.115813496848785, + "learning_rate": 9.340002201306081e-07, + "loss": 0.3997, + "step": 11640 + }, + { + "epoch": 0.8082905152062213, + "grad_norm": 3.783953906214396, + "learning_rate": 9.333458888989916e-07, + "loss": 0.3858, + "step": 11641 + }, + { + "epoch": 0.8083599500069435, + "grad_norm": 2.922295604064217, + "learning_rate": 9.32691763354141e-07, + "loss": 0.2345, + "step": 11642 + }, + { + "epoch": 0.8084293848076656, + "grad_norm": 4.876545781173021, + "learning_rate": 9.320378435291371e-07, + "loss": 0.4578, + "step": 11643 + }, + { + "epoch": 0.8084988196083878, + "grad_norm": 3.9619938530368732, + "learning_rate": 9.313841294570553e-07, + "loss": 0.2778, + "step": 11644 + }, + { + "epoch": 0.8085682544091098, + "grad_norm": 6.291322396612963, + "learning_rate": 9.307306211709621e-07, + "loss": 0.6204, + "step": 11645 + }, + { + "epoch": 0.8086376892098319, + "grad_norm": 3.5499873703287417, + "learning_rate": 9.300773187039086e-07, + "loss": 0.3361, + "step": 11646 + }, + { + "epoch": 0.8087071240105541, + "grad_norm": 3.2739319710833152, + "learning_rate": 9.294242220889371e-07, + "loss": 0.4185, + "step": 11647 + }, + { + "epoch": 0.8087765588112762, + "grad_norm": 3.9672692755125394, + "learning_rate": 9.28771331359084e-07, + "loss": 0.5162, + "step": 11648 + }, + { + "epoch": 0.8088459936119984, + "grad_norm": 4.149445842740863, + "learning_rate": 9.281186465473696e-07, + "loss": 0.361, + "step": 11649 + }, + { + "epoch": 0.8089154284127205, + "grad_norm": 3.9513397070141014, + "learning_rate": 9.274661676868068e-07, + "loss": 0.5013, + "step": 11650 + }, + { + "epoch": 0.8089848632134425, + "grad_norm": 3.12624910204514, + "learning_rate": 9.268138948103966e-07, + "loss": 0.2656, + "step": 11651 + }, + { + "epoch": 0.8090542980141647, + "grad_norm": 3.4371604692683912, + "learning_rate": 9.261618279511309e-07, + "loss": 0.2517, + "step": 11652 + }, + { + "epoch": 0.8091237328148868, + "grad_norm": 3.3888503661696228, + "learning_rate": 9.255099671419898e-07, + "loss": 0.318, + "step": 11653 + }, + { + "epoch": 0.8091931676156089, + "grad_norm": 4.31244501796927, + "learning_rate": 9.248583124159438e-07, + "loss": 0.4893, + "step": 11654 + }, + { + "epoch": 0.8092626024163311, + "grad_norm": 4.231233989460859, + "learning_rate": 9.242068638059531e-07, + "loss": 0.3424, + "step": 11655 + }, + { + "epoch": 0.8093320372170532, + "grad_norm": 4.126074615227126, + "learning_rate": 9.235556213449676e-07, + "loss": 0.3782, + "step": 11656 + }, + { + "epoch": 0.8094014720177753, + "grad_norm": 4.369641489300976, + "learning_rate": 9.229045850659252e-07, + "loss": 0.658, + "step": 11657 + }, + { + "epoch": 0.8094709068184974, + "grad_norm": 5.539542730973939, + "learning_rate": 9.222537550017546e-07, + "loss": 0.6715, + "step": 11658 + }, + { + "epoch": 0.8095403416192195, + "grad_norm": 3.4370301747427825, + "learning_rate": 9.216031311853757e-07, + "loss": 0.3314, + "step": 11659 + }, + { + "epoch": 0.8096097764199417, + "grad_norm": 3.822453272235385, + "learning_rate": 9.209527136496971e-07, + "loss": 0.4535, + "step": 11660 + }, + { + "epoch": 0.8096792112206638, + "grad_norm": 3.963960243724221, + "learning_rate": 9.203025024276119e-07, + "loss": 0.3644, + "step": 11661 + }, + { + "epoch": 0.809748646021386, + "grad_norm": 5.4443674334287415, + "learning_rate": 9.196524975520104e-07, + "loss": 0.4643, + "step": 11662 + }, + { + "epoch": 0.809818080822108, + "grad_norm": 5.6946014127009645, + "learning_rate": 9.190026990557704e-07, + "loss": 0.558, + "step": 11663 + }, + { + "epoch": 0.8098875156228301, + "grad_norm": 4.1444481753318145, + "learning_rate": 9.183531069717527e-07, + "loss": 0.4244, + "step": 11664 + }, + { + "epoch": 0.8099569504235523, + "grad_norm": 4.59506281635092, + "learning_rate": 9.177037213328177e-07, + "loss": 0.5666, + "step": 11665 + }, + { + "epoch": 0.8100263852242744, + "grad_norm": 4.57369915159194, + "learning_rate": 9.170545421718091e-07, + "loss": 0.5455, + "step": 11666 + }, + { + "epoch": 0.8100958200249966, + "grad_norm": 3.964826243467328, + "learning_rate": 9.164055695215618e-07, + "loss": 0.3261, + "step": 11667 + }, + { + "epoch": 0.8101652548257187, + "grad_norm": 4.232016453162892, + "learning_rate": 9.157568034148995e-07, + "loss": 0.5799, + "step": 11668 + }, + { + "epoch": 0.8102346896264407, + "grad_norm": 7.29902461723007, + "learning_rate": 9.151082438846365e-07, + "loss": 0.5576, + "step": 11669 + }, + { + "epoch": 0.8103041244271629, + "grad_norm": 4.1579209502392995, + "learning_rate": 9.14459890963576e-07, + "loss": 0.4852, + "step": 11670 + }, + { + "epoch": 0.810373559227885, + "grad_norm": 6.591272821066115, + "learning_rate": 9.138117446845113e-07, + "loss": 0.8385, + "step": 11671 + }, + { + "epoch": 0.8104429940286071, + "grad_norm": 4.4263810584227015, + "learning_rate": 9.131638050802249e-07, + "loss": 0.4848, + "step": 11672 + }, + { + "epoch": 0.8105124288293293, + "grad_norm": 3.7437909334607844, + "learning_rate": 9.125160721834875e-07, + "loss": 0.2791, + "step": 11673 + }, + { + "epoch": 0.8105818636300514, + "grad_norm": 3.3582595772695902, + "learning_rate": 9.118685460270638e-07, + "loss": 0.3894, + "step": 11674 + }, + { + "epoch": 0.8106512984307735, + "grad_norm": 2.548255292175957, + "learning_rate": 9.11221226643702e-07, + "loss": 0.2183, + "step": 11675 + }, + { + "epoch": 0.8107207332314956, + "grad_norm": 3.2391437950357416, + "learning_rate": 9.105741140661428e-07, + "loss": 0.3967, + "step": 11676 + }, + { + "epoch": 0.8107901680322177, + "grad_norm": 3.846310177296689, + "learning_rate": 9.099272083271188e-07, + "loss": 0.3998, + "step": 11677 + }, + { + "epoch": 0.8108596028329399, + "grad_norm": 4.330698741297082, + "learning_rate": 9.092805094593476e-07, + "loss": 0.5589, + "step": 11678 + }, + { + "epoch": 0.810929037633662, + "grad_norm": 3.5107798499723555, + "learning_rate": 9.086340174955399e-07, + "loss": 0.2514, + "step": 11679 + }, + { + "epoch": 0.8109984724343842, + "grad_norm": 5.354326672537009, + "learning_rate": 9.079877324683939e-07, + "loss": 0.655, + "step": 11680 + }, + { + "epoch": 0.8110679072351062, + "grad_norm": 2.853083942661186, + "learning_rate": 9.073416544105978e-07, + "loss": 0.1361, + "step": 11681 + }, + { + "epoch": 0.8111373420358283, + "grad_norm": 4.161348763136716, + "learning_rate": 9.066957833548295e-07, + "loss": 0.3647, + "step": 11682 + }, + { + "epoch": 0.8112067768365505, + "grad_norm": 5.792675936872284, + "learning_rate": 9.060501193337567e-07, + "loss": 0.4758, + "step": 11683 + }, + { + "epoch": 0.8112762116372726, + "grad_norm": 5.546001126073153, + "learning_rate": 9.054046623800367e-07, + "loss": 0.6815, + "step": 11684 + }, + { + "epoch": 0.8113456464379947, + "grad_norm": 3.0948799274444045, + "learning_rate": 9.047594125263149e-07, + "loss": 0.3737, + "step": 11685 + }, + { + "epoch": 0.8114150812387169, + "grad_norm": 4.243022266742495, + "learning_rate": 9.041143698052285e-07, + "loss": 0.5286, + "step": 11686 + }, + { + "epoch": 0.8114845160394389, + "grad_norm": 4.226221300703433, + "learning_rate": 9.034695342494004e-07, + "loss": 0.3947, + "step": 11687 + }, + { + "epoch": 0.8115539508401611, + "grad_norm": 3.742213983672117, + "learning_rate": 9.028249058914512e-07, + "loss": 0.4412, + "step": 11688 + }, + { + "epoch": 0.8116233856408832, + "grad_norm": 3.7812790224035866, + "learning_rate": 9.021804847639798e-07, + "loss": 0.3201, + "step": 11689 + }, + { + "epoch": 0.8116928204416053, + "grad_norm": 5.10757928568081, + "learning_rate": 9.015362708995817e-07, + "loss": 0.4761, + "step": 11690 + }, + { + "epoch": 0.8117622552423275, + "grad_norm": 4.657862089135266, + "learning_rate": 9.008922643308426e-07, + "loss": 0.5464, + "step": 11691 + }, + { + "epoch": 0.8118316900430496, + "grad_norm": 4.796201443415369, + "learning_rate": 9.002484650903359e-07, + "loss": 0.6323, + "step": 11692 + }, + { + "epoch": 0.8119011248437717, + "grad_norm": 5.767071331560206, + "learning_rate": 8.996048732106194e-07, + "loss": 0.7317, + "step": 11693 + }, + { + "epoch": 0.8119705596444938, + "grad_norm": 3.1347832920735503, + "learning_rate": 8.989614887242504e-07, + "loss": 0.2817, + "step": 11694 + }, + { + "epoch": 0.8120399944452159, + "grad_norm": 3.649657096754373, + "learning_rate": 8.983183116637695e-07, + "loss": 0.4242, + "step": 11695 + }, + { + "epoch": 0.8121094292459381, + "grad_norm": 2.702552672857004, + "learning_rate": 8.976753420617046e-07, + "loss": 0.2099, + "step": 11696 + }, + { + "epoch": 0.8121788640466602, + "grad_norm": 4.143593427395381, + "learning_rate": 8.970325799505803e-07, + "loss": 0.3733, + "step": 11697 + }, + { + "epoch": 0.8122482988473823, + "grad_norm": 3.6186425495596732, + "learning_rate": 8.963900253629048e-07, + "loss": 0.384, + "step": 11698 + }, + { + "epoch": 0.8123177336481044, + "grad_norm": 2.130662018489372, + "learning_rate": 8.957476783311786e-07, + "loss": 0.1195, + "step": 11699 + }, + { + "epoch": 0.8123871684488265, + "grad_norm": 4.156612297779111, + "learning_rate": 8.951055388878904e-07, + "loss": 0.4369, + "step": 11700 + }, + { + "epoch": 0.8124566032495487, + "grad_norm": 2.4985340899941497, + "learning_rate": 8.944636070655194e-07, + "loss": 0.1438, + "step": 11701 + }, + { + "epoch": 0.8125260380502708, + "grad_norm": 4.016665790870501, + "learning_rate": 8.938218828965317e-07, + "loss": 0.4047, + "step": 11702 + }, + { + "epoch": 0.8125954728509929, + "grad_norm": 3.481404079123336, + "learning_rate": 8.931803664133893e-07, + "loss": 0.3394, + "step": 11703 + }, + { + "epoch": 0.8126649076517151, + "grad_norm": 5.889106192001581, + "learning_rate": 8.92539057648536e-07, + "loss": 0.4699, + "step": 11704 + }, + { + "epoch": 0.8127343424524371, + "grad_norm": 4.599518317706325, + "learning_rate": 8.918979566344078e-07, + "loss": 0.5283, + "step": 11705 + }, + { + "epoch": 0.8128037772531593, + "grad_norm": 5.07934536581827, + "learning_rate": 8.912570634034345e-07, + "loss": 0.4626, + "step": 11706 + }, + { + "epoch": 0.8128732120538814, + "grad_norm": 3.9313910684253366, + "learning_rate": 8.906163779880272e-07, + "loss": 0.5278, + "step": 11707 + }, + { + "epoch": 0.8129426468546035, + "grad_norm": 5.135765536363077, + "learning_rate": 8.899759004205949e-07, + "loss": 0.3373, + "step": 11708 + }, + { + "epoch": 0.8130120816553257, + "grad_norm": 4.380650904258193, + "learning_rate": 8.893356307335305e-07, + "loss": 0.5902, + "step": 11709 + }, + { + "epoch": 0.8130815164560478, + "grad_norm": 3.7704484005734846, + "learning_rate": 8.886955689592186e-07, + "loss": 0.2092, + "step": 11710 + }, + { + "epoch": 0.8131509512567698, + "grad_norm": 4.794211265638571, + "learning_rate": 8.880557151300323e-07, + "loss": 0.5208, + "step": 11711 + }, + { + "epoch": 0.813220386057492, + "grad_norm": 5.463236989130208, + "learning_rate": 8.874160692783352e-07, + "loss": 0.5852, + "step": 11712 + }, + { + "epoch": 0.8132898208582141, + "grad_norm": 4.614266297920103, + "learning_rate": 8.86776631436479e-07, + "loss": 0.5403, + "step": 11713 + }, + { + "epoch": 0.8133592556589363, + "grad_norm": 3.2234902159625483, + "learning_rate": 8.861374016368068e-07, + "loss": 0.2709, + "step": 11714 + }, + { + "epoch": 0.8134286904596584, + "grad_norm": 3.450784819277149, + "learning_rate": 8.854983799116501e-07, + "loss": 0.2111, + "step": 11715 + }, + { + "epoch": 0.8134981252603805, + "grad_norm": 3.2234271434619175, + "learning_rate": 8.848595662933291e-07, + "loss": 0.3071, + "step": 11716 + }, + { + "epoch": 0.8135675600611026, + "grad_norm": 3.6747911633187385, + "learning_rate": 8.842209608141545e-07, + "loss": 0.3265, + "step": 11717 + }, + { + "epoch": 0.8136369948618247, + "grad_norm": 3.2690111484913045, + "learning_rate": 8.835825635064266e-07, + "loss": 0.3828, + "step": 11718 + }, + { + "epoch": 0.8137064296625469, + "grad_norm": 2.5871667795837987, + "learning_rate": 8.82944374402433e-07, + "loss": 0.2795, + "step": 11719 + }, + { + "epoch": 0.813775864463269, + "grad_norm": 4.412982197488445, + "learning_rate": 8.823063935344573e-07, + "loss": 0.3728, + "step": 11720 + }, + { + "epoch": 0.8138452992639911, + "grad_norm": 3.248760315976615, + "learning_rate": 8.816686209347631e-07, + "loss": 0.327, + "step": 11721 + }, + { + "epoch": 0.8139147340647133, + "grad_norm": 3.9131349035505814, + "learning_rate": 8.810310566356083e-07, + "loss": 0.4029, + "step": 11722 + }, + { + "epoch": 0.8139841688654353, + "grad_norm": 4.798613554271064, + "learning_rate": 8.803937006692432e-07, + "loss": 0.4879, + "step": 11723 + }, + { + "epoch": 0.8140536036661575, + "grad_norm": 3.216667739670734, + "learning_rate": 8.797565530679047e-07, + "loss": 0.306, + "step": 11724 + }, + { + "epoch": 0.8141230384668796, + "grad_norm": 3.717894063012203, + "learning_rate": 8.791196138638141e-07, + "loss": 0.3432, + "step": 11725 + }, + { + "epoch": 0.8141924732676017, + "grad_norm": 3.826254653617694, + "learning_rate": 8.784828830891922e-07, + "loss": 0.3952, + "step": 11726 + }, + { + "epoch": 0.8142619080683239, + "grad_norm": 4.591420065925006, + "learning_rate": 8.778463607762422e-07, + "loss": 0.5335, + "step": 11727 + }, + { + "epoch": 0.814331342869046, + "grad_norm": 3.617734547356876, + "learning_rate": 8.772100469571587e-07, + "loss": 0.4482, + "step": 11728 + }, + { + "epoch": 0.814400777669768, + "grad_norm": 3.6780400753749842, + "learning_rate": 8.765739416641255e-07, + "loss": 0.4905, + "step": 11729 + }, + { + "epoch": 0.8144702124704902, + "grad_norm": 4.395861752561041, + "learning_rate": 8.75938044929317e-07, + "loss": 0.4487, + "step": 11730 + }, + { + "epoch": 0.8145396472712123, + "grad_norm": 3.805288703143475, + "learning_rate": 8.75302356784895e-07, + "loss": 0.4543, + "step": 11731 + }, + { + "epoch": 0.8146090820719345, + "grad_norm": 4.329994251527006, + "learning_rate": 8.746668772630124e-07, + "loss": 0.233, + "step": 11732 + }, + { + "epoch": 0.8146785168726566, + "grad_norm": 3.2389701564617392, + "learning_rate": 8.74031606395811e-07, + "loss": 0.4294, + "step": 11733 + }, + { + "epoch": 0.8147479516733787, + "grad_norm": 4.588858961221016, + "learning_rate": 8.733965442154213e-07, + "loss": 0.5455, + "step": 11734 + }, + { + "epoch": 0.8148173864741008, + "grad_norm": 3.660455164788703, + "learning_rate": 8.727616907539666e-07, + "loss": 0.4666, + "step": 11735 + }, + { + "epoch": 0.8148868212748229, + "grad_norm": 3.804055281698499, + "learning_rate": 8.721270460435544e-07, + "loss": 0.427, + "step": 11736 + }, + { + "epoch": 0.8149562560755451, + "grad_norm": 2.9656015720603843, + "learning_rate": 8.714926101162841e-07, + "loss": 0.2853, + "step": 11737 + }, + { + "epoch": 0.8150256908762672, + "grad_norm": 4.790025399187547, + "learning_rate": 8.708583830042483e-07, + "loss": 0.5091, + "step": 11738 + }, + { + "epoch": 0.8150951256769893, + "grad_norm": 3.9322738743468792, + "learning_rate": 8.702243647395203e-07, + "loss": 0.4801, + "step": 11739 + }, + { + "epoch": 0.8151645604777115, + "grad_norm": 7.778572672042966, + "learning_rate": 8.695905553541723e-07, + "loss": 0.5444, + "step": 11740 + }, + { + "epoch": 0.8152339952784335, + "grad_norm": 3.4139733094545552, + "learning_rate": 8.689569548802601e-07, + "loss": 0.392, + "step": 11741 + }, + { + "epoch": 0.8153034300791556, + "grad_norm": 4.350806781618852, + "learning_rate": 8.683235633498305e-07, + "loss": 0.4755, + "step": 11742 + }, + { + "epoch": 0.8153728648798778, + "grad_norm": 3.7306487742360024, + "learning_rate": 8.676903807949194e-07, + "loss": 0.4495, + "step": 11743 + }, + { + "epoch": 0.8154422996805999, + "grad_norm": 4.9268292351051075, + "learning_rate": 8.670574072475535e-07, + "loss": 0.5075, + "step": 11744 + }, + { + "epoch": 0.8155117344813221, + "grad_norm": 3.558726384200234, + "learning_rate": 8.664246427397466e-07, + "loss": 0.3177, + "step": 11745 + }, + { + "epoch": 0.8155811692820442, + "grad_norm": 2.860706756865757, + "learning_rate": 8.657920873035042e-07, + "loss": 0.346, + "step": 11746 + }, + { + "epoch": 0.8156506040827662, + "grad_norm": 4.202468146138646, + "learning_rate": 8.651597409708196e-07, + "loss": 0.5484, + "step": 11747 + }, + { + "epoch": 0.8157200388834884, + "grad_norm": 3.633624389576488, + "learning_rate": 8.645276037736755e-07, + "loss": 0.3786, + "step": 11748 + }, + { + "epoch": 0.8157894736842105, + "grad_norm": 5.198307541591239, + "learning_rate": 8.638956757440475e-07, + "loss": 0.4677, + "step": 11749 + }, + { + "epoch": 0.8158589084849327, + "grad_norm": 4.035914901780297, + "learning_rate": 8.63263956913895e-07, + "loss": 0.4651, + "step": 11750 + }, + { + "epoch": 0.8159283432856548, + "grad_norm": 4.3773828251529325, + "learning_rate": 8.626324473151693e-07, + "loss": 0.4234, + "step": 11751 + }, + { + "epoch": 0.8159977780863769, + "grad_norm": 5.1169331903104975, + "learning_rate": 8.620011469798139e-07, + "loss": 0.6327, + "step": 11752 + }, + { + "epoch": 0.816067212887099, + "grad_norm": 4.117637788918677, + "learning_rate": 8.613700559397598e-07, + "loss": 0.2584, + "step": 11753 + }, + { + "epoch": 0.8161366476878211, + "grad_norm": 4.188071517494151, + "learning_rate": 8.607391742269217e-07, + "loss": 0.4541, + "step": 11754 + }, + { + "epoch": 0.8162060824885432, + "grad_norm": 4.28867547548891, + "learning_rate": 8.601085018732142e-07, + "loss": 0.4626, + "step": 11755 + }, + { + "epoch": 0.8162755172892654, + "grad_norm": 4.1678650775571136, + "learning_rate": 8.594780389105351e-07, + "loss": 0.4996, + "step": 11756 + }, + { + "epoch": 0.8163449520899875, + "grad_norm": 3.9255288671579684, + "learning_rate": 8.588477853707688e-07, + "loss": 0.417, + "step": 11757 + }, + { + "epoch": 0.8164143868907097, + "grad_norm": 5.625644907410497, + "learning_rate": 8.582177412857967e-07, + "loss": 0.6356, + "step": 11758 + }, + { + "epoch": 0.8164838216914317, + "grad_norm": 3.0959458209092765, + "learning_rate": 8.575879066874848e-07, + "loss": 0.2577, + "step": 11759 + }, + { + "epoch": 0.8165532564921538, + "grad_norm": 4.258004912583366, + "learning_rate": 8.569582816076888e-07, + "loss": 0.4924, + "step": 11760 + }, + { + "epoch": 0.816622691292876, + "grad_norm": 3.6276148097787058, + "learning_rate": 8.563288660782548e-07, + "loss": 0.4298, + "step": 11761 + }, + { + "epoch": 0.8166921260935981, + "grad_norm": 5.264889757442817, + "learning_rate": 8.556996601310181e-07, + "loss": 0.5962, + "step": 11762 + }, + { + "epoch": 0.8167615608943203, + "grad_norm": 3.85259224254862, + "learning_rate": 8.550706637978029e-07, + "loss": 0.3875, + "step": 11763 + }, + { + "epoch": 0.8168309956950424, + "grad_norm": 5.183780422405194, + "learning_rate": 8.544418771104229e-07, + "loss": 0.6088, + "step": 11764 + }, + { + "epoch": 0.8169004304957644, + "grad_norm": 4.940234839356221, + "learning_rate": 8.538133001006821e-07, + "loss": 0.6547, + "step": 11765 + }, + { + "epoch": 0.8169698652964866, + "grad_norm": 2.6284836402872855, + "learning_rate": 8.531849328003716e-07, + "loss": 0.238, + "step": 11766 + }, + { + "epoch": 0.8170393000972087, + "grad_norm": 3.8350718188674677, + "learning_rate": 8.525567752412772e-07, + "loss": 0.3925, + "step": 11767 + }, + { + "epoch": 0.8171087348979308, + "grad_norm": 3.565168271351903, + "learning_rate": 8.519288274551657e-07, + "loss": 0.2826, + "step": 11768 + }, + { + "epoch": 0.817178169698653, + "grad_norm": 3.7217926658146303, + "learning_rate": 8.51301089473801e-07, + "loss": 0.3608, + "step": 11769 + }, + { + "epoch": 0.8172476044993751, + "grad_norm": 4.377627527759458, + "learning_rate": 8.506735613289341e-07, + "loss": 0.6863, + "step": 11770 + }, + { + "epoch": 0.8173170393000972, + "grad_norm": 3.254936429335114, + "learning_rate": 8.500462430523009e-07, + "loss": 0.3478, + "step": 11771 + }, + { + "epoch": 0.8173864741008193, + "grad_norm": 3.242908566457988, + "learning_rate": 8.494191346756336e-07, + "loss": 0.2212, + "step": 11772 + }, + { + "epoch": 0.8174559089015414, + "grad_norm": 4.830566281632919, + "learning_rate": 8.4879223623065e-07, + "loss": 0.5504, + "step": 11773 + }, + { + "epoch": 0.8175253437022636, + "grad_norm": 2.825502310342076, + "learning_rate": 8.481655477490574e-07, + "loss": 0.1777, + "step": 11774 + }, + { + "epoch": 0.8175947785029857, + "grad_norm": 4.203805270489655, + "learning_rate": 8.47539069262554e-07, + "loss": 0.2708, + "step": 11775 + }, + { + "epoch": 0.8176642133037079, + "grad_norm": 4.14694991191907, + "learning_rate": 8.469128008028249e-07, + "loss": 0.3793, + "step": 11776 + }, + { + "epoch": 0.8177336481044299, + "grad_norm": 3.4125189605766435, + "learning_rate": 8.462867424015458e-07, + "loss": 0.347, + "step": 11777 + }, + { + "epoch": 0.817803082905152, + "grad_norm": 3.5849173350360677, + "learning_rate": 8.456608940903854e-07, + "loss": 0.4366, + "step": 11778 + }, + { + "epoch": 0.8178725177058742, + "grad_norm": 4.126955692901241, + "learning_rate": 8.45035255900995e-07, + "loss": 0.4323, + "step": 11779 + }, + { + "epoch": 0.8179419525065963, + "grad_norm": 3.7335233058599573, + "learning_rate": 8.444098278650187e-07, + "loss": 0.5796, + "step": 11780 + }, + { + "epoch": 0.8180113873073185, + "grad_norm": 3.641989588834993, + "learning_rate": 8.437846100140929e-07, + "loss": 0.4593, + "step": 11781 + }, + { + "epoch": 0.8180808221080406, + "grad_norm": 4.147041786640367, + "learning_rate": 8.431596023798372e-07, + "loss": 0.5802, + "step": 11782 + }, + { + "epoch": 0.8181502569087626, + "grad_norm": 4.547038681032045, + "learning_rate": 8.425348049938642e-07, + "loss": 0.5166, + "step": 11783 + }, + { + "epoch": 0.8182196917094848, + "grad_norm": 4.1194730750005935, + "learning_rate": 8.419102178877775e-07, + "loss": 0.6038, + "step": 11784 + }, + { + "epoch": 0.8182891265102069, + "grad_norm": 3.791710422696346, + "learning_rate": 8.412858410931679e-07, + "loss": 0.4025, + "step": 11785 + }, + { + "epoch": 0.818358561310929, + "grad_norm": 3.501015018509845, + "learning_rate": 8.406616746416118e-07, + "loss": 0.4012, + "step": 11786 + }, + { + "epoch": 0.8184279961116512, + "grad_norm": 3.721183783266032, + "learning_rate": 8.40037718564683e-07, + "loss": 0.4119, + "step": 11787 + }, + { + "epoch": 0.8184974309123733, + "grad_norm": 2.271695502413639, + "learning_rate": 8.394139728939393e-07, + "loss": 0.0965, + "step": 11788 + }, + { + "epoch": 0.8185668657130954, + "grad_norm": 5.629761681595073, + "learning_rate": 8.387904376609285e-07, + "loss": 0.8634, + "step": 11789 + }, + { + "epoch": 0.8186363005138175, + "grad_norm": 3.482439444902559, + "learning_rate": 8.381671128971885e-07, + "loss": 0.245, + "step": 11790 + }, + { + "epoch": 0.8187057353145396, + "grad_norm": 3.817969049406683, + "learning_rate": 8.375439986342471e-07, + "loss": 0.3042, + "step": 11791 + }, + { + "epoch": 0.8187751701152618, + "grad_norm": 3.3459168135874386, + "learning_rate": 8.369210949036194e-07, + "loss": 0.267, + "step": 11792 + }, + { + "epoch": 0.8188446049159839, + "grad_norm": 3.404588956661382, + "learning_rate": 8.362984017368125e-07, + "loss": 0.4823, + "step": 11793 + }, + { + "epoch": 0.8189140397167061, + "grad_norm": 3.369483287613501, + "learning_rate": 8.356759191653207e-07, + "loss": 0.1967, + "step": 11794 + }, + { + "epoch": 0.8189834745174281, + "grad_norm": 3.216671663186988, + "learning_rate": 8.350536472206283e-07, + "loss": 0.2273, + "step": 11795 + }, + { + "epoch": 0.8190529093181502, + "grad_norm": 3.7274559812521004, + "learning_rate": 8.3443158593421e-07, + "loss": 0.4235, + "step": 11796 + }, + { + "epoch": 0.8191223441188724, + "grad_norm": 3.77603843400379, + "learning_rate": 8.338097353375286e-07, + "loss": 0.6066, + "step": 11797 + }, + { + "epoch": 0.8191917789195945, + "grad_norm": 4.428119762696871, + "learning_rate": 8.331880954620342e-07, + "loss": 0.4267, + "step": 11798 + }, + { + "epoch": 0.8192612137203166, + "grad_norm": 3.794264823905194, + "learning_rate": 8.325666663391741e-07, + "loss": 0.486, + "step": 11799 + }, + { + "epoch": 0.8193306485210388, + "grad_norm": 3.963416776674324, + "learning_rate": 8.319454480003736e-07, + "loss": 0.596, + "step": 11800 + }, + { + "epoch": 0.8194000833217608, + "grad_norm": 3.076051387970303, + "learning_rate": 8.31324440477057e-07, + "loss": 0.2821, + "step": 11801 + }, + { + "epoch": 0.819469518122483, + "grad_norm": 4.003870600294956, + "learning_rate": 8.307036438006344e-07, + "loss": 0.3204, + "step": 11802 + }, + { + "epoch": 0.8195389529232051, + "grad_norm": 3.999558028238334, + "learning_rate": 8.30083058002501e-07, + "loss": 0.411, + "step": 11803 + }, + { + "epoch": 0.8196083877239272, + "grad_norm": 3.6979591738081194, + "learning_rate": 8.294626831140495e-07, + "loss": 0.3266, + "step": 11804 + }, + { + "epoch": 0.8196778225246494, + "grad_norm": 3.5547863631405243, + "learning_rate": 8.28842519166656e-07, + "loss": 0.3286, + "step": 11805 + }, + { + "epoch": 0.8197472573253715, + "grad_norm": 3.3767280639410266, + "learning_rate": 8.282225661916882e-07, + "loss": 0.3916, + "step": 11806 + }, + { + "epoch": 0.8198166921260936, + "grad_norm": 3.7518527532892056, + "learning_rate": 8.276028242205019e-07, + "loss": 0.0968, + "step": 11807 + }, + { + "epoch": 0.8198861269268157, + "grad_norm": 3.749327690954258, + "learning_rate": 8.269832932844434e-07, + "loss": 0.2939, + "step": 11808 + }, + { + "epoch": 0.8199555617275378, + "grad_norm": 3.8283094404721054, + "learning_rate": 8.263639734148466e-07, + "loss": 0.4071, + "step": 11809 + }, + { + "epoch": 0.82002499652826, + "grad_norm": 3.3381380630929325, + "learning_rate": 8.257448646430399e-07, + "loss": 0.4336, + "step": 11810 + }, + { + "epoch": 0.8200944313289821, + "grad_norm": 3.8973953681706615, + "learning_rate": 8.251259670003331e-07, + "loss": 0.4016, + "step": 11811 + }, + { + "epoch": 0.8201638661297042, + "grad_norm": 3.475250815912152, + "learning_rate": 8.245072805180293e-07, + "loss": 0.3295, + "step": 11812 + }, + { + "epoch": 0.8202333009304263, + "grad_norm": 3.326826779105959, + "learning_rate": 8.238888052274252e-07, + "loss": 0.3611, + "step": 11813 + }, + { + "epoch": 0.8203027357311484, + "grad_norm": 3.853830712016339, + "learning_rate": 8.232705411597979e-07, + "loss": 0.4108, + "step": 11814 + }, + { + "epoch": 0.8203721705318706, + "grad_norm": 4.435964056258887, + "learning_rate": 8.226524883464198e-07, + "loss": 0.3734, + "step": 11815 + }, + { + "epoch": 0.8204416053325927, + "grad_norm": 4.9857630993007795, + "learning_rate": 8.220346468185524e-07, + "loss": 0.5537, + "step": 11816 + }, + { + "epoch": 0.8205110401333148, + "grad_norm": 3.6638136795905876, + "learning_rate": 8.21417016607447e-07, + "loss": 0.2572, + "step": 11817 + }, + { + "epoch": 0.820580474934037, + "grad_norm": 4.189176414677505, + "learning_rate": 8.207995977443373e-07, + "loss": 0.5802, + "step": 11818 + }, + { + "epoch": 0.820649909734759, + "grad_norm": 3.7362507666752736, + "learning_rate": 8.201823902604567e-07, + "loss": 0.4085, + "step": 11819 + }, + { + "epoch": 0.8207193445354812, + "grad_norm": 3.5357866655912953, + "learning_rate": 8.195653941870202e-07, + "loss": 0.3259, + "step": 11820 + }, + { + "epoch": 0.8207887793362033, + "grad_norm": 3.5338976934981265, + "learning_rate": 8.189486095552363e-07, + "loss": 0.4002, + "step": 11821 + }, + { + "epoch": 0.8208582141369254, + "grad_norm": 4.2675884025539235, + "learning_rate": 8.183320363963005e-07, + "loss": 0.4096, + "step": 11822 + }, + { + "epoch": 0.8209276489376476, + "grad_norm": 4.883084461692116, + "learning_rate": 8.177156747413978e-07, + "loss": 0.5743, + "step": 11823 + }, + { + "epoch": 0.8209970837383697, + "grad_norm": 3.803487902807504, + "learning_rate": 8.170995246217044e-07, + "loss": 0.3872, + "step": 11824 + }, + { + "epoch": 0.8210665185390917, + "grad_norm": 3.3177195501312537, + "learning_rate": 8.164835860683834e-07, + "loss": 0.3996, + "step": 11825 + }, + { + "epoch": 0.8211359533398139, + "grad_norm": 3.9694433216938836, + "learning_rate": 8.158678591125884e-07, + "loss": 0.4356, + "step": 11826 + }, + { + "epoch": 0.821205388140536, + "grad_norm": 3.478551937667529, + "learning_rate": 8.152523437854632e-07, + "loss": 0.3031, + "step": 11827 + }, + { + "epoch": 0.8212748229412582, + "grad_norm": 3.6500682771072537, + "learning_rate": 8.146370401181385e-07, + "loss": 0.241, + "step": 11828 + }, + { + "epoch": 0.8213442577419803, + "grad_norm": 3.6980903290299, + "learning_rate": 8.140219481417349e-07, + "loss": 0.4854, + "step": 11829 + }, + { + "epoch": 0.8214136925427024, + "grad_norm": 3.4815588583037536, + "learning_rate": 8.134070678873663e-07, + "loss": 0.4046, + "step": 11830 + }, + { + "epoch": 0.8214831273434245, + "grad_norm": 2.6631870670184865, + "learning_rate": 8.127923993861314e-07, + "loss": 0.1252, + "step": 11831 + }, + { + "epoch": 0.8215525621441466, + "grad_norm": 2.0472134750238955, + "learning_rate": 8.121779426691168e-07, + "loss": 0.112, + "step": 11832 + }, + { + "epoch": 0.8216219969448688, + "grad_norm": 3.747711253133913, + "learning_rate": 8.115636977674035e-07, + "loss": 0.436, + "step": 11833 + }, + { + "epoch": 0.8216914317455909, + "grad_norm": 3.5338476745777614, + "learning_rate": 8.109496647120596e-07, + "loss": 0.4273, + "step": 11834 + }, + { + "epoch": 0.821760866546313, + "grad_norm": 3.4009277546233685, + "learning_rate": 8.103358435341408e-07, + "loss": 0.2471, + "step": 11835 + }, + { + "epoch": 0.8218303013470352, + "grad_norm": 4.278668523702549, + "learning_rate": 8.097222342646949e-07, + "loss": 0.4842, + "step": 11836 + }, + { + "epoch": 0.8218997361477572, + "grad_norm": 3.410975248926277, + "learning_rate": 8.091088369347561e-07, + "loss": 0.4192, + "step": 11837 + }, + { + "epoch": 0.8219691709484794, + "grad_norm": 3.898112778836242, + "learning_rate": 8.084956515753501e-07, + "loss": 0.4611, + "step": 11838 + }, + { + "epoch": 0.8220386057492015, + "grad_norm": 3.691503202432564, + "learning_rate": 8.078826782174909e-07, + "loss": 0.3731, + "step": 11839 + }, + { + "epoch": 0.8221080405499236, + "grad_norm": 4.094032440388885, + "learning_rate": 8.072699168921827e-07, + "loss": 0.4603, + "step": 11840 + }, + { + "epoch": 0.8221774753506458, + "grad_norm": 3.0401355856406806, + "learning_rate": 8.066573676304162e-07, + "loss": 0.2802, + "step": 11841 + }, + { + "epoch": 0.8222469101513679, + "grad_norm": 6.4513016656549045, + "learning_rate": 8.060450304631773e-07, + "loss": 0.6185, + "step": 11842 + }, + { + "epoch": 0.8223163449520899, + "grad_norm": 3.660261474391956, + "learning_rate": 8.054329054214338e-07, + "loss": 0.4485, + "step": 11843 + }, + { + "epoch": 0.8223857797528121, + "grad_norm": 4.74406900424621, + "learning_rate": 8.04820992536146e-07, + "loss": 0.5641, + "step": 11844 + }, + { + "epoch": 0.8224552145535342, + "grad_norm": 3.0524751519418114, + "learning_rate": 8.042092918382683e-07, + "loss": 0.1614, + "step": 11845 + }, + { + "epoch": 0.8225246493542564, + "grad_norm": 4.070790026598544, + "learning_rate": 8.03597803358735e-07, + "loss": 0.3232, + "step": 11846 + }, + { + "epoch": 0.8225940841549785, + "grad_norm": 4.991697955829069, + "learning_rate": 8.029865271284753e-07, + "loss": 0.5925, + "step": 11847 + }, + { + "epoch": 0.8226635189557006, + "grad_norm": 3.9421899127522084, + "learning_rate": 8.023754631784092e-07, + "loss": 0.5229, + "step": 11848 + }, + { + "epoch": 0.8227329537564227, + "grad_norm": 5.1946706657279496, + "learning_rate": 8.017646115394418e-07, + "loss": 0.4825, + "step": 11849 + }, + { + "epoch": 0.8228023885571448, + "grad_norm": 5.384412710101801, + "learning_rate": 8.011539722424699e-07, + "loss": 0.5415, + "step": 11850 + }, + { + "epoch": 0.822871823357867, + "grad_norm": 4.687429768196469, + "learning_rate": 8.00543545318379e-07, + "loss": 0.4706, + "step": 11851 + }, + { + "epoch": 0.8229412581585891, + "grad_norm": 5.785711712375405, + "learning_rate": 7.999333307980434e-07, + "loss": 0.3475, + "step": 11852 + }, + { + "epoch": 0.8230106929593112, + "grad_norm": 4.817299707876041, + "learning_rate": 7.99323328712327e-07, + "loss": 0.4534, + "step": 11853 + }, + { + "epoch": 0.8230801277600334, + "grad_norm": 4.5749468654289105, + "learning_rate": 7.987135390920836e-07, + "loss": 0.2885, + "step": 11854 + }, + { + "epoch": 0.8231495625607554, + "grad_norm": 4.621172065712622, + "learning_rate": 7.981039619681552e-07, + "loss": 0.5033, + "step": 11855 + }, + { + "epoch": 0.8232189973614775, + "grad_norm": 4.291780027732288, + "learning_rate": 7.974945973713732e-07, + "loss": 0.3877, + "step": 11856 + }, + { + "epoch": 0.8232884321621997, + "grad_norm": 3.152468469304622, + "learning_rate": 7.968854453325597e-07, + "loss": 0.269, + "step": 11857 + }, + { + "epoch": 0.8233578669629218, + "grad_norm": 3.2433076866166526, + "learning_rate": 7.962765058825228e-07, + "loss": 0.3356, + "step": 11858 + }, + { + "epoch": 0.823427301763644, + "grad_norm": 3.9719669097725627, + "learning_rate": 7.956677790520645e-07, + "loss": 0.3124, + "step": 11859 + }, + { + "epoch": 0.8234967365643661, + "grad_norm": 4.39531555296524, + "learning_rate": 7.950592648719735e-07, + "loss": 0.316, + "step": 11860 + }, + { + "epoch": 0.8235661713650881, + "grad_norm": 3.6406819993029456, + "learning_rate": 7.944509633730241e-07, + "loss": 0.4637, + "step": 11861 + }, + { + "epoch": 0.8236356061658103, + "grad_norm": 4.212858426748226, + "learning_rate": 7.938428745859878e-07, + "loss": 0.3658, + "step": 11862 + }, + { + "epoch": 0.8237050409665324, + "grad_norm": 3.1715331844289465, + "learning_rate": 7.932349985416205e-07, + "loss": 0.199, + "step": 11863 + }, + { + "epoch": 0.8237744757672546, + "grad_norm": 5.138640461995357, + "learning_rate": 7.926273352706638e-07, + "loss": 0.5809, + "step": 11864 + }, + { + "epoch": 0.8238439105679767, + "grad_norm": 3.901967466217089, + "learning_rate": 7.920198848038574e-07, + "loss": 0.2423, + "step": 11865 + }, + { + "epoch": 0.8239133453686988, + "grad_norm": 3.986085144850681, + "learning_rate": 7.91412647171923e-07, + "loss": 0.4077, + "step": 11866 + }, + { + "epoch": 0.823982780169421, + "grad_norm": 5.419288766397098, + "learning_rate": 7.908056224055749e-07, + "loss": 0.6957, + "step": 11867 + }, + { + "epoch": 0.824052214970143, + "grad_norm": 4.727425625420789, + "learning_rate": 7.901988105355152e-07, + "loss": 0.4867, + "step": 11868 + }, + { + "epoch": 0.8241216497708651, + "grad_norm": 4.762372471184213, + "learning_rate": 7.89592211592436e-07, + "loss": 0.6424, + "step": 11869 + }, + { + "epoch": 0.8241910845715873, + "grad_norm": 5.53892895260762, + "learning_rate": 7.889858256070182e-07, + "loss": 0.811, + "step": 11870 + }, + { + "epoch": 0.8242605193723094, + "grad_norm": 4.008489719786029, + "learning_rate": 7.883796526099325e-07, + "loss": 0.4265, + "step": 11871 + }, + { + "epoch": 0.8243299541730316, + "grad_norm": 4.463427046956273, + "learning_rate": 7.87773692631838e-07, + "loss": 0.6022, + "step": 11872 + }, + { + "epoch": 0.8243993889737536, + "grad_norm": 4.263002089042544, + "learning_rate": 7.871679457033827e-07, + "loss": 0.3317, + "step": 11873 + }, + { + "epoch": 0.8244688237744757, + "grad_norm": 4.7440664640268935, + "learning_rate": 7.865624118552073e-07, + "loss": 0.5285, + "step": 11874 + }, + { + "epoch": 0.8245382585751979, + "grad_norm": 3.437832291635814, + "learning_rate": 7.85957091117937e-07, + "loss": 0.3027, + "step": 11875 + }, + { + "epoch": 0.82460769337592, + "grad_norm": 3.6765544157904677, + "learning_rate": 7.853519835221867e-07, + "loss": 0.4509, + "step": 11876 + }, + { + "epoch": 0.8246771281766422, + "grad_norm": 3.2310627234699534, + "learning_rate": 7.847470890985665e-07, + "loss": 0.3506, + "step": 11877 + }, + { + "epoch": 0.8247465629773643, + "grad_norm": 4.3003553906755405, + "learning_rate": 7.841424078776677e-07, + "loss": 0.1406, + "step": 11878 + }, + { + "epoch": 0.8248159977780863, + "grad_norm": 2.620507495163355, + "learning_rate": 7.835379398900738e-07, + "loss": 0.2078, + "step": 11879 + }, + { + "epoch": 0.8248854325788085, + "grad_norm": 2.8452927584059515, + "learning_rate": 7.82933685166361e-07, + "loss": 0.2781, + "step": 11880 + }, + { + "epoch": 0.8249548673795306, + "grad_norm": 2.982041109309288, + "learning_rate": 7.823296437370903e-07, + "loss": 0.2129, + "step": 11881 + }, + { + "epoch": 0.8250243021802527, + "grad_norm": 3.3393221261893817, + "learning_rate": 7.817258156328139e-07, + "loss": 0.3882, + "step": 11882 + }, + { + "epoch": 0.8250937369809749, + "grad_norm": 4.081932172624764, + "learning_rate": 7.811222008840719e-07, + "loss": 0.5612, + "step": 11883 + }, + { + "epoch": 0.825163171781697, + "grad_norm": 2.35604466801223, + "learning_rate": 7.805187995213954e-07, + "loss": 0.1732, + "step": 11884 + }, + { + "epoch": 0.8252326065824191, + "grad_norm": 3.815559953798593, + "learning_rate": 7.799156115753037e-07, + "loss": 0.3573, + "step": 11885 + }, + { + "epoch": 0.8253020413831412, + "grad_norm": 3.5903812441811076, + "learning_rate": 7.793126370763043e-07, + "loss": 0.3931, + "step": 11886 + }, + { + "epoch": 0.8253714761838633, + "grad_norm": 4.065662319771238, + "learning_rate": 7.787098760548961e-07, + "loss": 0.5873, + "step": 11887 + }, + { + "epoch": 0.8254409109845855, + "grad_norm": 3.5040751204565828, + "learning_rate": 7.781073285415658e-07, + "loss": 0.3467, + "step": 11888 + }, + { + "epoch": 0.8255103457853076, + "grad_norm": 4.774674273623775, + "learning_rate": 7.77504994566789e-07, + "loss": 0.4328, + "step": 11889 + }, + { + "epoch": 0.8255797805860298, + "grad_norm": 4.489129003728178, + "learning_rate": 7.769028741610307e-07, + "loss": 0.3566, + "step": 11890 + }, + { + "epoch": 0.8256492153867518, + "grad_norm": 4.224640561041638, + "learning_rate": 7.763009673547472e-07, + "loss": 0.3797, + "step": 11891 + }, + { + "epoch": 0.8257186501874739, + "grad_norm": 4.121939762447984, + "learning_rate": 7.756992741783825e-07, + "loss": 0.3783, + "step": 11892 + }, + { + "epoch": 0.8257880849881961, + "grad_norm": 4.416317616891731, + "learning_rate": 7.750977946623661e-07, + "loss": 0.5136, + "step": 11893 + }, + { + "epoch": 0.8258575197889182, + "grad_norm": 4.1350225537671985, + "learning_rate": 7.744965288371237e-07, + "loss": 0.2733, + "step": 11894 + }, + { + "epoch": 0.8259269545896404, + "grad_norm": 3.601991375956312, + "learning_rate": 7.738954767330664e-07, + "loss": 0.3348, + "step": 11895 + }, + { + "epoch": 0.8259963893903625, + "grad_norm": 3.5039716349987025, + "learning_rate": 7.732946383805912e-07, + "loss": 0.3457, + "step": 11896 + }, + { + "epoch": 0.8260658241910845, + "grad_norm": 2.433782199734208, + "learning_rate": 7.72694013810092e-07, + "loss": 0.1587, + "step": 11897 + }, + { + "epoch": 0.8261352589918067, + "grad_norm": 3.8184836264571516, + "learning_rate": 7.720936030519461e-07, + "loss": 0.2757, + "step": 11898 + }, + { + "epoch": 0.8262046937925288, + "grad_norm": 5.5016917841091715, + "learning_rate": 7.714934061365209e-07, + "loss": 0.6712, + "step": 11899 + }, + { + "epoch": 0.8262741285932509, + "grad_norm": 3.9243131350400398, + "learning_rate": 7.70893423094175e-07, + "loss": 0.521, + "step": 11900 + }, + { + "epoch": 0.8263435633939731, + "grad_norm": 6.1892890218332175, + "learning_rate": 7.702936539552541e-07, + "loss": 0.3458, + "step": 11901 + }, + { + "epoch": 0.8264129981946952, + "grad_norm": 3.071936997800823, + "learning_rate": 7.696940987500934e-07, + "loss": 0.3008, + "step": 11902 + }, + { + "epoch": 0.8264824329954173, + "grad_norm": 3.2645292949173297, + "learning_rate": 7.690947575090191e-07, + "loss": 0.429, + "step": 11903 + }, + { + "epoch": 0.8265518677961394, + "grad_norm": 4.089806173810195, + "learning_rate": 7.684956302623436e-07, + "loss": 0.4765, + "step": 11904 + }, + { + "epoch": 0.8266213025968615, + "grad_norm": 3.732425270761966, + "learning_rate": 7.678967170403701e-07, + "loss": 0.2622, + "step": 11905 + }, + { + "epoch": 0.8266907373975837, + "grad_norm": 3.276817016099543, + "learning_rate": 7.67298017873394e-07, + "loss": 0.3337, + "step": 11906 + }, + { + "epoch": 0.8267601721983058, + "grad_norm": 2.916021849146458, + "learning_rate": 7.666995327916932e-07, + "loss": 0.2954, + "step": 11907 + }, + { + "epoch": 0.826829606999028, + "grad_norm": 3.3933070556538345, + "learning_rate": 7.661012618255387e-07, + "loss": 0.3108, + "step": 11908 + }, + { + "epoch": 0.82689904179975, + "grad_norm": 3.013613117365627, + "learning_rate": 7.655032050051925e-07, + "loss": 0.4297, + "step": 11909 + }, + { + "epoch": 0.8269684766004721, + "grad_norm": 5.195818249474201, + "learning_rate": 7.649053623609026e-07, + "loss": 0.4789, + "step": 11910 + }, + { + "epoch": 0.8270379114011943, + "grad_norm": 4.604618119993308, + "learning_rate": 7.643077339229071e-07, + "loss": 0.6189, + "step": 11911 + }, + { + "epoch": 0.8271073462019164, + "grad_norm": 3.6200793514598284, + "learning_rate": 7.637103197214335e-07, + "loss": 0.509, + "step": 11912 + }, + { + "epoch": 0.8271767810026385, + "grad_norm": 4.29780312299174, + "learning_rate": 7.631131197866981e-07, + "loss": 0.4304, + "step": 11913 + }, + { + "epoch": 0.8272462158033607, + "grad_norm": 3.524860323583806, + "learning_rate": 7.625161341489068e-07, + "loss": 0.2911, + "step": 11914 + }, + { + "epoch": 0.8273156506040827, + "grad_norm": 3.8311439978811106, + "learning_rate": 7.619193628382548e-07, + "loss": 0.3845, + "step": 11915 + }, + { + "epoch": 0.8273850854048049, + "grad_norm": 4.880759236739138, + "learning_rate": 7.613228058849253e-07, + "loss": 0.3677, + "step": 11916 + }, + { + "epoch": 0.827454520205527, + "grad_norm": 5.989081087003398, + "learning_rate": 7.60726463319092e-07, + "loss": 0.6684, + "step": 11917 + }, + { + "epoch": 0.8275239550062491, + "grad_norm": 3.768389690851867, + "learning_rate": 7.601303351709166e-07, + "loss": 0.3685, + "step": 11918 + }, + { + "epoch": 0.8275933898069713, + "grad_norm": 4.082518659235821, + "learning_rate": 7.595344214705508e-07, + "loss": 0.6686, + "step": 11919 + }, + { + "epoch": 0.8276628246076934, + "grad_norm": 3.777879173562721, + "learning_rate": 7.58938722248137e-07, + "loss": 0.3157, + "step": 11920 + }, + { + "epoch": 0.8277322594084155, + "grad_norm": 4.589371680838984, + "learning_rate": 7.583432375338029e-07, + "loss": 0.4287, + "step": 11921 + }, + { + "epoch": 0.8278016942091376, + "grad_norm": 3.0669351793944624, + "learning_rate": 7.577479673576671e-07, + "loss": 0.2335, + "step": 11922 + }, + { + "epoch": 0.8278711290098597, + "grad_norm": 4.693895679343266, + "learning_rate": 7.571529117498394e-07, + "loss": 0.4043, + "step": 11923 + }, + { + "epoch": 0.8279405638105819, + "grad_norm": 4.2592931833743695, + "learning_rate": 7.56558070740418e-07, + "loss": 0.2718, + "step": 11924 + }, + { + "epoch": 0.828009998611304, + "grad_norm": 4.561344249311811, + "learning_rate": 7.559634443594849e-07, + "loss": 0.4164, + "step": 11925 + }, + { + "epoch": 0.8280794334120261, + "grad_norm": 3.1449838994737314, + "learning_rate": 7.553690326371193e-07, + "loss": 0.4223, + "step": 11926 + }, + { + "epoch": 0.8281488682127482, + "grad_norm": 5.8271672426196, + "learning_rate": 7.547748356033863e-07, + "loss": 0.5208, + "step": 11927 + }, + { + "epoch": 0.8282183030134703, + "grad_norm": 4.809114409487456, + "learning_rate": 7.54180853288336e-07, + "loss": 0.6948, + "step": 11928 + }, + { + "epoch": 0.8282877378141925, + "grad_norm": 4.085525023129078, + "learning_rate": 7.535870857220151e-07, + "loss": 0.3885, + "step": 11929 + }, + { + "epoch": 0.8283571726149146, + "grad_norm": 3.9246528759566734, + "learning_rate": 7.52993532934454e-07, + "loss": 0.4707, + "step": 11930 + }, + { + "epoch": 0.8284266074156367, + "grad_norm": 4.401499298750683, + "learning_rate": 7.524001949556742e-07, + "loss": 0.3778, + "step": 11931 + }, + { + "epoch": 0.8284960422163589, + "grad_norm": 4.051871691495512, + "learning_rate": 7.518070718156861e-07, + "loss": 0.4648, + "step": 11932 + }, + { + "epoch": 0.8285654770170809, + "grad_norm": 3.095422349533252, + "learning_rate": 7.512141635444892e-07, + "loss": 0.2205, + "step": 11933 + }, + { + "epoch": 0.8286349118178031, + "grad_norm": 3.9478884565266537, + "learning_rate": 7.506214701720704e-07, + "loss": 0.3097, + "step": 11934 + }, + { + "epoch": 0.8287043466185252, + "grad_norm": 3.6783246836342762, + "learning_rate": 7.500289917284121e-07, + "loss": 0.3038, + "step": 11935 + }, + { + "epoch": 0.8287737814192473, + "grad_norm": 3.494896439113337, + "learning_rate": 7.494367282434772e-07, + "loss": 0.3933, + "step": 11936 + }, + { + "epoch": 0.8288432162199695, + "grad_norm": 2.8360790990135065, + "learning_rate": 7.488446797472209e-07, + "loss": 0.2689, + "step": 11937 + }, + { + "epoch": 0.8289126510206916, + "grad_norm": 4.349090313301468, + "learning_rate": 7.482528462695932e-07, + "loss": 0.3252, + "step": 11938 + }, + { + "epoch": 0.8289820858214136, + "grad_norm": 3.688598116107836, + "learning_rate": 7.476612278405227e-07, + "loss": 0.4917, + "step": 11939 + }, + { + "epoch": 0.8290515206221358, + "grad_norm": 3.257715343891594, + "learning_rate": 7.470698244899366e-07, + "loss": 0.3806, + "step": 11940 + }, + { + "epoch": 0.8291209554228579, + "grad_norm": 5.129776088178313, + "learning_rate": 7.464786362477461e-07, + "loss": 0.492, + "step": 11941 + }, + { + "epoch": 0.8291903902235801, + "grad_norm": 3.4836783095926602, + "learning_rate": 7.458876631438533e-07, + "loss": 0.3956, + "step": 11942 + }, + { + "epoch": 0.8292598250243022, + "grad_norm": 3.190906658018773, + "learning_rate": 7.452969052081483e-07, + "loss": 0.3648, + "step": 11943 + }, + { + "epoch": 0.8293292598250243, + "grad_norm": 5.803172367343852, + "learning_rate": 7.447063624705114e-07, + "loss": 0.4155, + "step": 11944 + }, + { + "epoch": 0.8293986946257464, + "grad_norm": 3.4388316524600624, + "learning_rate": 7.441160349608117e-07, + "loss": 0.2793, + "step": 11945 + }, + { + "epoch": 0.8294681294264685, + "grad_norm": 2.8148218300455605, + "learning_rate": 7.43525922708907e-07, + "loss": 0.1677, + "step": 11946 + }, + { + "epoch": 0.8295375642271907, + "grad_norm": 4.175678198955694, + "learning_rate": 7.429360257446449e-07, + "loss": 0.4921, + "step": 11947 + }, + { + "epoch": 0.8296069990279128, + "grad_norm": 4.272701937033411, + "learning_rate": 7.423463440978596e-07, + "loss": 0.4954, + "step": 11948 + }, + { + "epoch": 0.8296764338286349, + "grad_norm": 4.353684967633905, + "learning_rate": 7.417568777983814e-07, + "loss": 0.4126, + "step": 11949 + }, + { + "epoch": 0.8297458686293571, + "grad_norm": 5.548368276650892, + "learning_rate": 7.411676268760199e-07, + "loss": 0.4468, + "step": 11950 + }, + { + "epoch": 0.8298153034300791, + "grad_norm": 6.362592342629756, + "learning_rate": 7.405785913605801e-07, + "loss": 0.6409, + "step": 11951 + }, + { + "epoch": 0.8298847382308012, + "grad_norm": 4.321804119384618, + "learning_rate": 7.399897712818572e-07, + "loss": 0.3932, + "step": 11952 + }, + { + "epoch": 0.8299541730315234, + "grad_norm": 3.982081394784734, + "learning_rate": 7.394011666696304e-07, + "loss": 0.4919, + "step": 11953 + }, + { + "epoch": 0.8300236078322455, + "grad_norm": 4.108935734060896, + "learning_rate": 7.388127775536696e-07, + "loss": 0.4741, + "step": 11954 + }, + { + "epoch": 0.8300930426329677, + "grad_norm": 3.110887300167369, + "learning_rate": 7.382246039637386e-07, + "loss": 0.2945, + "step": 11955 + }, + { + "epoch": 0.8301624774336898, + "grad_norm": 4.128288538408571, + "learning_rate": 7.376366459295853e-07, + "loss": 0.3703, + "step": 11956 + }, + { + "epoch": 0.8302319122344118, + "grad_norm": 1.7492305890525575, + "learning_rate": 7.370489034809447e-07, + "loss": 0.1178, + "step": 11957 + }, + { + "epoch": 0.830301347035134, + "grad_norm": 2.9614505927711776, + "learning_rate": 7.364613766475481e-07, + "loss": 0.2383, + "step": 11958 + }, + { + "epoch": 0.8303707818358561, + "grad_norm": 3.2815376919879524, + "learning_rate": 7.358740654591107e-07, + "loss": 0.3329, + "step": 11959 + }, + { + "epoch": 0.8304402166365783, + "grad_norm": 4.762145916929719, + "learning_rate": 7.35286969945338e-07, + "loss": 0.6134, + "step": 11960 + }, + { + "epoch": 0.8305096514373004, + "grad_norm": 4.656724449283461, + "learning_rate": 7.347000901359242e-07, + "loss": 0.3983, + "step": 11961 + }, + { + "epoch": 0.8305790862380225, + "grad_norm": 3.87558184903679, + "learning_rate": 7.341134260605537e-07, + "loss": 0.4133, + "step": 11962 + }, + { + "epoch": 0.8306485210387446, + "grad_norm": 3.2858645494932803, + "learning_rate": 7.335269777488984e-07, + "loss": 0.3108, + "step": 11963 + }, + { + "epoch": 0.8307179558394667, + "grad_norm": 5.081743884747921, + "learning_rate": 7.329407452306208e-07, + "loss": 0.4964, + "step": 11964 + }, + { + "epoch": 0.8307873906401889, + "grad_norm": 4.398726677619599, + "learning_rate": 7.323547285353721e-07, + "loss": 0.3621, + "step": 11965 + }, + { + "epoch": 0.830856825440911, + "grad_norm": 3.428798148131502, + "learning_rate": 7.317689276927909e-07, + "loss": 0.2505, + "step": 11966 + }, + { + "epoch": 0.8309262602416331, + "grad_norm": 3.0020140630651446, + "learning_rate": 7.311833427325099e-07, + "loss": 0.3836, + "step": 11967 + }, + { + "epoch": 0.8309956950423553, + "grad_norm": 3.6621068917816593, + "learning_rate": 7.305979736841434e-07, + "loss": 0.3016, + "step": 11968 + }, + { + "epoch": 0.8310651298430773, + "grad_norm": 4.967144712143544, + "learning_rate": 7.300128205772994e-07, + "loss": 0.469, + "step": 11969 + }, + { + "epoch": 0.8311345646437994, + "grad_norm": 4.42088218802386, + "learning_rate": 7.294278834415774e-07, + "loss": 0.4362, + "step": 11970 + }, + { + "epoch": 0.8312039994445216, + "grad_norm": 3.3350235969385755, + "learning_rate": 7.288431623065584e-07, + "loss": 0.3347, + "step": 11971 + }, + { + "epoch": 0.8312734342452437, + "grad_norm": 2.8695791482879205, + "learning_rate": 7.282586572018196e-07, + "loss": 0.1674, + "step": 11972 + }, + { + "epoch": 0.8313428690459659, + "grad_norm": 3.12251724080186, + "learning_rate": 7.276743681569248e-07, + "loss": 0.182, + "step": 11973 + }, + { + "epoch": 0.831412303846688, + "grad_norm": 4.155450737602543, + "learning_rate": 7.270902952014259e-07, + "loss": 0.4364, + "step": 11974 + }, + { + "epoch": 0.83148173864741, + "grad_norm": 4.346994334907897, + "learning_rate": 7.265064383648645e-07, + "loss": 0.3412, + "step": 11975 + }, + { + "epoch": 0.8315511734481322, + "grad_norm": 3.8013519798443585, + "learning_rate": 7.259227976767713e-07, + "loss": 0.3263, + "step": 11976 + }, + { + "epoch": 0.8316206082488543, + "grad_norm": 3.3698851913103627, + "learning_rate": 7.253393731666675e-07, + "loss": 0.1348, + "step": 11977 + }, + { + "epoch": 0.8316900430495765, + "grad_norm": 4.250324475309351, + "learning_rate": 7.247561648640606e-07, + "loss": 0.2879, + "step": 11978 + }, + { + "epoch": 0.8317594778502986, + "grad_norm": 4.313575659488107, + "learning_rate": 7.241731727984491e-07, + "loss": 0.4974, + "step": 11979 + }, + { + "epoch": 0.8318289126510207, + "grad_norm": 4.134644287477897, + "learning_rate": 7.235903969993185e-07, + "loss": 0.6356, + "step": 11980 + }, + { + "epoch": 0.8318983474517428, + "grad_norm": 3.200239001761598, + "learning_rate": 7.230078374961497e-07, + "loss": 0.2647, + "step": 11981 + }, + { + "epoch": 0.8319677822524649, + "grad_norm": 4.4029324686175775, + "learning_rate": 7.224254943184028e-07, + "loss": 0.4543, + "step": 11982 + }, + { + "epoch": 0.832037217053187, + "grad_norm": 4.264739819551894, + "learning_rate": 7.218433674955333e-07, + "loss": 0.6348, + "step": 11983 + }, + { + "epoch": 0.8321066518539092, + "grad_norm": 4.976450552361448, + "learning_rate": 7.212614570569865e-07, + "loss": 0.5577, + "step": 11984 + }, + { + "epoch": 0.8321760866546313, + "grad_norm": 3.0568343875206105, + "learning_rate": 7.206797630321944e-07, + "loss": 0.2992, + "step": 11985 + }, + { + "epoch": 0.8322455214553535, + "grad_norm": 7.8053516655279225, + "learning_rate": 7.200982854505751e-07, + "loss": 0.372, + "step": 11986 + }, + { + "epoch": 0.8323149562560755, + "grad_norm": 3.5378539685153485, + "learning_rate": 7.195170243415428e-07, + "loss": 0.4187, + "step": 11987 + }, + { + "epoch": 0.8323843910567976, + "grad_norm": 4.8658630496222885, + "learning_rate": 7.189359797344969e-07, + "loss": 0.5902, + "step": 11988 + }, + { + "epoch": 0.8324538258575198, + "grad_norm": 4.757984693157151, + "learning_rate": 7.183551516588221e-07, + "loss": 0.4108, + "step": 11989 + }, + { + "epoch": 0.8325232606582419, + "grad_norm": 3.613457068767353, + "learning_rate": 7.177745401439007e-07, + "loss": 0.3119, + "step": 11990 + }, + { + "epoch": 0.8325926954589641, + "grad_norm": 8.20288177400798, + "learning_rate": 7.171941452190966e-07, + "loss": 0.5144, + "step": 11991 + }, + { + "epoch": 0.8326621302596862, + "grad_norm": 3.1490152901619535, + "learning_rate": 7.166139669137668e-07, + "loss": 0.2896, + "step": 11992 + }, + { + "epoch": 0.8327315650604082, + "grad_norm": 4.608907674267553, + "learning_rate": 7.16034005257255e-07, + "loss": 0.6668, + "step": 11993 + }, + { + "epoch": 0.8328009998611304, + "grad_norm": 4.015424949268355, + "learning_rate": 7.15454260278896e-07, + "loss": 0.424, + "step": 11994 + }, + { + "epoch": 0.8328704346618525, + "grad_norm": 3.9733086682153957, + "learning_rate": 7.148747320080124e-07, + "loss": 0.4513, + "step": 11995 + }, + { + "epoch": 0.8329398694625746, + "grad_norm": 3.991423289105037, + "learning_rate": 7.142954204739156e-07, + "loss": 0.3679, + "step": 11996 + }, + { + "epoch": 0.8330093042632968, + "grad_norm": 5.986676936072969, + "learning_rate": 7.137163257059071e-07, + "loss": 0.5887, + "step": 11997 + }, + { + "epoch": 0.8330787390640189, + "grad_norm": 3.00824041479492, + "learning_rate": 7.131374477332754e-07, + "loss": 0.2305, + "step": 11998 + }, + { + "epoch": 0.833148173864741, + "grad_norm": 4.05830346813593, + "learning_rate": 7.125587865853023e-07, + "loss": 0.5108, + "step": 11999 + }, + { + "epoch": 0.8332176086654631, + "grad_norm": 3.545792156264135, + "learning_rate": 7.119803422912525e-07, + "loss": 0.3328, + "step": 12000 + }, + { + "epoch": 0.8332870434661852, + "grad_norm": 4.541569151605179, + "learning_rate": 7.114021148803863e-07, + "loss": 0.598, + "step": 12001 + }, + { + "epoch": 0.8333564782669074, + "grad_norm": 3.513191341475214, + "learning_rate": 7.108241043819486e-07, + "loss": 0.3435, + "step": 12002 + }, + { + "epoch": 0.8334259130676295, + "grad_norm": 4.498911108838659, + "learning_rate": 7.102463108251722e-07, + "loss": 0.4271, + "step": 12003 + }, + { + "epoch": 0.8334953478683517, + "grad_norm": 14.088428310116448, + "learning_rate": 7.096687342392844e-07, + "loss": 0.3764, + "step": 12004 + }, + { + "epoch": 0.8335647826690737, + "grad_norm": 5.390795534364898, + "learning_rate": 7.090913746534972e-07, + "loss": 0.7113, + "step": 12005 + }, + { + "epoch": 0.8336342174697958, + "grad_norm": 5.646516611894107, + "learning_rate": 7.085142320970128e-07, + "loss": 0.521, + "step": 12006 + }, + { + "epoch": 0.833703652270518, + "grad_norm": 3.6743847949003214, + "learning_rate": 7.079373065990225e-07, + "loss": 0.2251, + "step": 12007 + }, + { + "epoch": 0.8337730870712401, + "grad_norm": 4.879345695098777, + "learning_rate": 7.07360598188706e-07, + "loss": 0.5546, + "step": 12008 + }, + { + "epoch": 0.8338425218719622, + "grad_norm": 4.000258554098824, + "learning_rate": 7.067841068952336e-07, + "loss": 0.2963, + "step": 12009 + }, + { + "epoch": 0.8339119566726844, + "grad_norm": 3.888764917184493, + "learning_rate": 7.062078327477623e-07, + "loss": 0.3177, + "step": 12010 + }, + { + "epoch": 0.8339813914734064, + "grad_norm": 4.372826770993004, + "learning_rate": 7.056317757754405e-07, + "loss": 0.5725, + "step": 12011 + }, + { + "epoch": 0.8340508262741286, + "grad_norm": 3.35593725724384, + "learning_rate": 7.050559360074027e-07, + "loss": 0.356, + "step": 12012 + }, + { + "epoch": 0.8341202610748507, + "grad_norm": 3.7674666013388283, + "learning_rate": 7.044803134727779e-07, + "loss": 0.5072, + "step": 12013 + }, + { + "epoch": 0.8341896958755728, + "grad_norm": 4.083482736926625, + "learning_rate": 7.039049082006771e-07, + "loss": 0.4023, + "step": 12014 + }, + { + "epoch": 0.834259130676295, + "grad_norm": 4.3551582958513055, + "learning_rate": 7.033297202202038e-07, + "loss": 0.494, + "step": 12015 + }, + { + "epoch": 0.8343285654770171, + "grad_norm": 3.4467801773633706, + "learning_rate": 7.02754749560452e-07, + "loss": 0.4117, + "step": 12016 + }, + { + "epoch": 0.8343980002777392, + "grad_norm": 3.472441700537884, + "learning_rate": 7.021799962505038e-07, + "loss": 0.1223, + "step": 12017 + }, + { + "epoch": 0.8344674350784613, + "grad_norm": 4.722562449055913, + "learning_rate": 7.016054603194255e-07, + "loss": 0.5183, + "step": 12018 + }, + { + "epoch": 0.8345368698791834, + "grad_norm": 3.5937583623762093, + "learning_rate": 7.0103114179628e-07, + "loss": 0.3315, + "step": 12019 + }, + { + "epoch": 0.8346063046799056, + "grad_norm": 3.728916454801873, + "learning_rate": 7.004570407101152e-07, + "loss": 0.3651, + "step": 12020 + }, + { + "epoch": 0.8346757394806277, + "grad_norm": 8.527737199382713, + "learning_rate": 6.998831570899678e-07, + "loss": 0.4799, + "step": 12021 + }, + { + "epoch": 0.8347451742813499, + "grad_norm": 3.3521806168014106, + "learning_rate": 6.993094909648646e-07, + "loss": 0.3113, + "step": 12022 + }, + { + "epoch": 0.834814609082072, + "grad_norm": 4.873815299303339, + "learning_rate": 6.987360423638206e-07, + "loss": 0.4844, + "step": 12023 + }, + { + "epoch": 0.834884043882794, + "grad_norm": 4.94581898302854, + "learning_rate": 6.981628113158401e-07, + "loss": 0.3704, + "step": 12024 + }, + { + "epoch": 0.8349534786835162, + "grad_norm": 3.83743323659239, + "learning_rate": 6.975897978499175e-07, + "loss": 0.4026, + "step": 12025 + }, + { + "epoch": 0.8350229134842383, + "grad_norm": 3.6895229356698414, + "learning_rate": 6.970170019950334e-07, + "loss": 0.4636, + "step": 12026 + }, + { + "epoch": 0.8350923482849604, + "grad_norm": 3.0738458732614626, + "learning_rate": 6.96444423780161e-07, + "loss": 0.2402, + "step": 12027 + }, + { + "epoch": 0.8351617830856826, + "grad_norm": 2.9012529978130512, + "learning_rate": 6.958720632342597e-07, + "loss": 0.1474, + "step": 12028 + }, + { + "epoch": 0.8352312178864046, + "grad_norm": 5.788937707862736, + "learning_rate": 6.952999203862776e-07, + "loss": 0.5261, + "step": 12029 + }, + { + "epoch": 0.8353006526871268, + "grad_norm": 4.800132340904821, + "learning_rate": 6.947279952651558e-07, + "loss": 0.7018, + "step": 12030 + }, + { + "epoch": 0.8353700874878489, + "grad_norm": 3.80114961470897, + "learning_rate": 6.941562878998209e-07, + "loss": 0.4123, + "step": 12031 + }, + { + "epoch": 0.835439522288571, + "grad_norm": 3.713213629955207, + "learning_rate": 6.935847983191868e-07, + "loss": 0.346, + "step": 12032 + }, + { + "epoch": 0.8355089570892932, + "grad_norm": 3.9080649893585906, + "learning_rate": 6.930135265521615e-07, + "loss": 0.4942, + "step": 12033 + }, + { + "epoch": 0.8355783918900153, + "grad_norm": 3.7789738502497014, + "learning_rate": 6.924424726276396e-07, + "loss": 0.5496, + "step": 12034 + }, + { + "epoch": 0.8356478266907374, + "grad_norm": 4.099219345610852, + "learning_rate": 6.91871636574501e-07, + "loss": 0.4458, + "step": 12035 + }, + { + "epoch": 0.8357172614914595, + "grad_norm": 3.7707243361809195, + "learning_rate": 6.913010184216206e-07, + "loss": 0.4939, + "step": 12036 + }, + { + "epoch": 0.8357866962921816, + "grad_norm": 4.047240036981536, + "learning_rate": 6.907306181978596e-07, + "loss": 0.518, + "step": 12037 + }, + { + "epoch": 0.8358561310929038, + "grad_norm": 4.3985812035276, + "learning_rate": 6.901604359320673e-07, + "loss": 0.5076, + "step": 12038 + }, + { + "epoch": 0.8359255658936259, + "grad_norm": 3.277824006902283, + "learning_rate": 6.895904716530838e-07, + "loss": 0.3197, + "step": 12039 + }, + { + "epoch": 0.835995000694348, + "grad_norm": 4.757673428785765, + "learning_rate": 6.890207253897363e-07, + "loss": 0.6074, + "step": 12040 + }, + { + "epoch": 0.8360644354950701, + "grad_norm": 4.280781521027781, + "learning_rate": 6.884511971708413e-07, + "loss": 0.3922, + "step": 12041 + }, + { + "epoch": 0.8361338702957922, + "grad_norm": 4.174368929298065, + "learning_rate": 6.87881887025208e-07, + "loss": 0.4153, + "step": 12042 + }, + { + "epoch": 0.8362033050965144, + "grad_norm": 3.9650001451393253, + "learning_rate": 6.873127949816283e-07, + "loss": 0.56, + "step": 12043 + }, + { + "epoch": 0.8362727398972365, + "grad_norm": 3.9241558193612325, + "learning_rate": 6.867439210688859e-07, + "loss": 0.4091, + "step": 12044 + }, + { + "epoch": 0.8363421746979586, + "grad_norm": 3.3787642787508023, + "learning_rate": 6.861752653157577e-07, + "loss": 0.177, + "step": 12045 + }, + { + "epoch": 0.8364116094986808, + "grad_norm": 3.5483844566645333, + "learning_rate": 6.85606827751002e-07, + "loss": 0.4131, + "step": 12046 + }, + { + "epoch": 0.8364810442994028, + "grad_norm": 3.9525324911297957, + "learning_rate": 6.8503860840337e-07, + "loss": 0.2742, + "step": 12047 + }, + { + "epoch": 0.836550479100125, + "grad_norm": 3.010945775150134, + "learning_rate": 6.844706073016033e-07, + "loss": 0.2514, + "step": 12048 + }, + { + "epoch": 0.8366199139008471, + "grad_norm": 4.072115198668518, + "learning_rate": 6.839028244744311e-07, + "loss": 0.419, + "step": 12049 + }, + { + "epoch": 0.8366893487015692, + "grad_norm": 4.5846470554537495, + "learning_rate": 6.833352599505677e-07, + "loss": 0.6139, + "step": 12050 + }, + { + "epoch": 0.8367587835022914, + "grad_norm": 5.346941741372405, + "learning_rate": 6.827679137587234e-07, + "loss": 0.5601, + "step": 12051 + }, + { + "epoch": 0.8368282183030135, + "grad_norm": 2.637682265777028, + "learning_rate": 6.822007859275931e-07, + "loss": 0.2626, + "step": 12052 + }, + { + "epoch": 0.8368976531037355, + "grad_norm": 4.690070071955338, + "learning_rate": 6.81633876485861e-07, + "loss": 0.3318, + "step": 12053 + }, + { + "epoch": 0.8369670879044577, + "grad_norm": 3.0284677842712475, + "learning_rate": 6.810671854622003e-07, + "loss": 0.2484, + "step": 12054 + }, + { + "epoch": 0.8370365227051798, + "grad_norm": 4.358762279547056, + "learning_rate": 6.805007128852747e-07, + "loss": 0.5125, + "step": 12055 + }, + { + "epoch": 0.837105957505902, + "grad_norm": 4.171724072101502, + "learning_rate": 6.799344587837353e-07, + "loss": 0.3824, + "step": 12056 + }, + { + "epoch": 0.8371753923066241, + "grad_norm": 3.791692917117011, + "learning_rate": 6.793684231862219e-07, + "loss": 0.356, + "step": 12057 + }, + { + "epoch": 0.8372448271073462, + "grad_norm": 5.611619927942887, + "learning_rate": 6.788026061213649e-07, + "loss": 0.4324, + "step": 12058 + }, + { + "epoch": 0.8373142619080683, + "grad_norm": 3.1735309680238206, + "learning_rate": 6.782370076177824e-07, + "loss": 0.382, + "step": 12059 + }, + { + "epoch": 0.8373836967087904, + "grad_norm": 3.2478330976721796, + "learning_rate": 6.776716277040818e-07, + "loss": 0.3389, + "step": 12060 + }, + { + "epoch": 0.8374531315095126, + "grad_norm": 4.812072231550191, + "learning_rate": 6.771064664088578e-07, + "loss": 0.3887, + "step": 12061 + }, + { + "epoch": 0.8375225663102347, + "grad_norm": 5.325633168809928, + "learning_rate": 6.765415237606981e-07, + "loss": 0.4463, + "step": 12062 + }, + { + "epoch": 0.8375920011109568, + "grad_norm": 3.9349652489470284, + "learning_rate": 6.759767997881772e-07, + "loss": 0.4983, + "step": 12063 + }, + { + "epoch": 0.837661435911679, + "grad_norm": 3.058103105730613, + "learning_rate": 6.754122945198543e-07, + "loss": 0.2411, + "step": 12064 + }, + { + "epoch": 0.837730870712401, + "grad_norm": 14.521711583019288, + "learning_rate": 6.748480079842851e-07, + "loss": 0.3483, + "step": 12065 + }, + { + "epoch": 0.8378003055131231, + "grad_norm": 3.2568466962367726, + "learning_rate": 6.742839402100094e-07, + "loss": 0.2923, + "step": 12066 + }, + { + "epoch": 0.8378697403138453, + "grad_norm": 3.2489512572250243, + "learning_rate": 6.737200912255576e-07, + "loss": 0.1745, + "step": 12067 + }, + { + "epoch": 0.8379391751145674, + "grad_norm": 3.739548472000005, + "learning_rate": 6.731564610594476e-07, + "loss": 0.3317, + "step": 12068 + }, + { + "epoch": 0.8380086099152896, + "grad_norm": 4.291314283431788, + "learning_rate": 6.725930497401878e-07, + "loss": 0.3978, + "step": 12069 + }, + { + "epoch": 0.8380780447160117, + "grad_norm": 5.287451332434072, + "learning_rate": 6.720298572962746e-07, + "loss": 0.5458, + "step": 12070 + }, + { + "epoch": 0.8381474795167337, + "grad_norm": 4.259716934834898, + "learning_rate": 6.714668837561938e-07, + "loss": 0.2897, + "step": 12071 + }, + { + "epoch": 0.8382169143174559, + "grad_norm": 2.350690319276187, + "learning_rate": 6.709041291484197e-07, + "loss": 0.2656, + "step": 12072 + }, + { + "epoch": 0.838286349118178, + "grad_norm": 5.432258551318331, + "learning_rate": 6.703415935014157e-07, + "loss": 0.4469, + "step": 12073 + }, + { + "epoch": 0.8383557839189002, + "grad_norm": 4.86093019079665, + "learning_rate": 6.697792768436362e-07, + "loss": 0.6218, + "step": 12074 + }, + { + "epoch": 0.8384252187196223, + "grad_norm": 4.1713014482094515, + "learning_rate": 6.692171792035196e-07, + "loss": 0.5244, + "step": 12075 + }, + { + "epoch": 0.8384946535203444, + "grad_norm": 4.289203952001535, + "learning_rate": 6.686553006094971e-07, + "loss": 0.4485, + "step": 12076 + }, + { + "epoch": 0.8385640883210665, + "grad_norm": 4.690420407153732, + "learning_rate": 6.680936410899896e-07, + "loss": 0.5174, + "step": 12077 + }, + { + "epoch": 0.8386335231217886, + "grad_norm": 5.159691522135387, + "learning_rate": 6.675322006734037e-07, + "loss": 0.3372, + "step": 12078 + }, + { + "epoch": 0.8387029579225108, + "grad_norm": 4.098626387962295, + "learning_rate": 6.669709793881346e-07, + "loss": 0.4957, + "step": 12079 + }, + { + "epoch": 0.8387723927232329, + "grad_norm": 3.835449078123772, + "learning_rate": 6.664099772625715e-07, + "loss": 0.5504, + "step": 12080 + }, + { + "epoch": 0.838841827523955, + "grad_norm": 4.308911008419336, + "learning_rate": 6.658491943250884e-07, + "loss": 0.3094, + "step": 12081 + }, + { + "epoch": 0.8389112623246772, + "grad_norm": 4.277234384550386, + "learning_rate": 6.652886306040485e-07, + "loss": 0.3482, + "step": 12082 + }, + { + "epoch": 0.8389806971253992, + "grad_norm": 4.8260125451735725, + "learning_rate": 6.647282861278042e-07, + "loss": 0.4134, + "step": 12083 + }, + { + "epoch": 0.8390501319261213, + "grad_norm": 6.014046019775549, + "learning_rate": 6.641681609246981e-07, + "loss": 0.6019, + "step": 12084 + }, + { + "epoch": 0.8391195667268435, + "grad_norm": 5.041483322642799, + "learning_rate": 6.636082550230594e-07, + "loss": 0.6887, + "step": 12085 + }, + { + "epoch": 0.8391890015275656, + "grad_norm": 4.0426056686033025, + "learning_rate": 6.630485684512084e-07, + "loss": 0.4724, + "step": 12086 + }, + { + "epoch": 0.8392584363282878, + "grad_norm": 4.038158959735304, + "learning_rate": 6.624891012374535e-07, + "loss": 0.4577, + "step": 12087 + }, + { + "epoch": 0.8393278711290099, + "grad_norm": 5.7976071143839425, + "learning_rate": 6.619298534100916e-07, + "loss": 0.4808, + "step": 12088 + }, + { + "epoch": 0.8393973059297319, + "grad_norm": 3.7330327426738696, + "learning_rate": 6.613708249974088e-07, + "loss": 0.2623, + "step": 12089 + }, + { + "epoch": 0.8394667407304541, + "grad_norm": 3.752746122838492, + "learning_rate": 6.608120160276788e-07, + "loss": 0.3843, + "step": 12090 + }, + { + "epoch": 0.8395361755311762, + "grad_norm": 2.9563235041244447, + "learning_rate": 6.602534265291683e-07, + "loss": 0.2192, + "step": 12091 + }, + { + "epoch": 0.8396056103318984, + "grad_norm": 4.546821378767146, + "learning_rate": 6.596950565301296e-07, + "loss": 0.5758, + "step": 12092 + }, + { + "epoch": 0.8396750451326205, + "grad_norm": 4.978151533355817, + "learning_rate": 6.591369060588016e-07, + "loss": 0.6097, + "step": 12093 + }, + { + "epoch": 0.8397444799333426, + "grad_norm": 3.635551933708019, + "learning_rate": 6.585789751434179e-07, + "loss": 0.3664, + "step": 12094 + }, + { + "epoch": 0.8398139147340647, + "grad_norm": 3.6984216758539907, + "learning_rate": 6.58021263812198e-07, + "loss": 0.5292, + "step": 12095 + }, + { + "epoch": 0.8398833495347868, + "grad_norm": 4.394357191090792, + "learning_rate": 6.574637720933474e-07, + "loss": 0.57, + "step": 12096 + }, + { + "epoch": 0.8399527843355089, + "grad_norm": 4.777789026229585, + "learning_rate": 6.569065000150659e-07, + "loss": 0.6232, + "step": 12097 + }, + { + "epoch": 0.8400222191362311, + "grad_norm": 3.310080267310657, + "learning_rate": 6.563494476055399e-07, + "loss": 0.2869, + "step": 12098 + }, + { + "epoch": 0.8400916539369532, + "grad_norm": 7.084461522475243, + "learning_rate": 6.557926148929428e-07, + "loss": 0.3753, + "step": 12099 + }, + { + "epoch": 0.8401610887376754, + "grad_norm": 5.432044312904549, + "learning_rate": 6.552360019054399e-07, + "loss": 0.5885, + "step": 12100 + }, + { + "epoch": 0.8402305235383974, + "grad_norm": 3.045909829754521, + "learning_rate": 6.546796086711832e-07, + "loss": 0.1963, + "step": 12101 + }, + { + "epoch": 0.8402999583391195, + "grad_norm": 4.602216628706258, + "learning_rate": 6.541234352183151e-07, + "loss": 0.6552, + "step": 12102 + }, + { + "epoch": 0.8403693931398417, + "grad_norm": 3.9567074885591826, + "learning_rate": 6.535674815749659e-07, + "loss": 0.3036, + "step": 12103 + }, + { + "epoch": 0.8404388279405638, + "grad_norm": 3.804471313268846, + "learning_rate": 6.530117477692555e-07, + "loss": 0.3998, + "step": 12104 + }, + { + "epoch": 0.840508262741286, + "grad_norm": 3.338994418270334, + "learning_rate": 6.524562338292906e-07, + "loss": 0.3007, + "step": 12105 + }, + { + "epoch": 0.8405776975420081, + "grad_norm": 2.8705948591495902, + "learning_rate": 6.519009397831716e-07, + "loss": 0.0975, + "step": 12106 + }, + { + "epoch": 0.8406471323427301, + "grad_norm": 3.9358282297905403, + "learning_rate": 6.513458656589822e-07, + "loss": 0.4511, + "step": 12107 + }, + { + "epoch": 0.8407165671434523, + "grad_norm": 3.9324186438534334, + "learning_rate": 6.507910114847971e-07, + "loss": 0.4765, + "step": 12108 + }, + { + "epoch": 0.8407860019441744, + "grad_norm": 5.115345520558615, + "learning_rate": 6.502363772886833e-07, + "loss": 0.5629, + "step": 12109 + }, + { + "epoch": 0.8408554367448965, + "grad_norm": 2.408524077313909, + "learning_rate": 6.496819630986889e-07, + "loss": 0.215, + "step": 12110 + }, + { + "epoch": 0.8409248715456187, + "grad_norm": 3.685838082085246, + "learning_rate": 6.491277689428594e-07, + "loss": 0.3062, + "step": 12111 + }, + { + "epoch": 0.8409943063463408, + "grad_norm": 5.2676979876396945, + "learning_rate": 6.485737948492237e-07, + "loss": 0.5152, + "step": 12112 + }, + { + "epoch": 0.841063741147063, + "grad_norm": 2.6822704984663566, + "learning_rate": 6.48020040845802e-07, + "loss": 0.1888, + "step": 12113 + }, + { + "epoch": 0.841133175947785, + "grad_norm": 4.240257167608544, + "learning_rate": 6.474665069606017e-07, + "loss": 0.3633, + "step": 12114 + }, + { + "epoch": 0.8412026107485071, + "grad_norm": 4.451522345957834, + "learning_rate": 6.469131932216205e-07, + "loss": 0.381, + "step": 12115 + }, + { + "epoch": 0.8412720455492293, + "grad_norm": 4.7147672489306895, + "learning_rate": 6.463600996568443e-07, + "loss": 0.5011, + "step": 12116 + }, + { + "epoch": 0.8413414803499514, + "grad_norm": 3.009393104262819, + "learning_rate": 6.458072262942472e-07, + "loss": 0.198, + "step": 12117 + }, + { + "epoch": 0.8414109151506736, + "grad_norm": 4.4908408696271, + "learning_rate": 6.452545731617937e-07, + "loss": 0.6326, + "step": 12118 + }, + { + "epoch": 0.8414803499513956, + "grad_norm": 3.4053293878096182, + "learning_rate": 6.447021402874354e-07, + "loss": 0.2542, + "step": 12119 + }, + { + "epoch": 0.8415497847521177, + "grad_norm": 3.1337166419601177, + "learning_rate": 6.441499276991165e-07, + "loss": 0.4481, + "step": 12120 + }, + { + "epoch": 0.8416192195528399, + "grad_norm": 3.4176291546192443, + "learning_rate": 6.43597935424764e-07, + "loss": 0.3149, + "step": 12121 + }, + { + "epoch": 0.841688654353562, + "grad_norm": 3.4015739928887854, + "learning_rate": 6.430461634922974e-07, + "loss": 0.2852, + "step": 12122 + }, + { + "epoch": 0.8417580891542841, + "grad_norm": 1.684408820382857, + "learning_rate": 6.424946119296266e-07, + "loss": 0.133, + "step": 12123 + }, + { + "epoch": 0.8418275239550063, + "grad_norm": 4.188034154445266, + "learning_rate": 6.419432807646486e-07, + "loss": 0.5555, + "step": 12124 + }, + { + "epoch": 0.8418969587557283, + "grad_norm": 3.8707605178227267, + "learning_rate": 6.413921700252457e-07, + "loss": 0.3914, + "step": 12125 + }, + { + "epoch": 0.8419663935564505, + "grad_norm": 3.7306760827278183, + "learning_rate": 6.40841279739296e-07, + "loss": 0.3757, + "step": 12126 + }, + { + "epoch": 0.8420358283571726, + "grad_norm": 4.445559439800578, + "learning_rate": 6.402906099346628e-07, + "loss": 0.5274, + "step": 12127 + }, + { + "epoch": 0.8421052631578947, + "grad_norm": 3.1581581874723645, + "learning_rate": 6.397401606391951e-07, + "loss": 0.2841, + "step": 12128 + }, + { + "epoch": 0.8421746979586169, + "grad_norm": 3.3018705008585476, + "learning_rate": 6.391899318807371e-07, + "loss": 0.4344, + "step": 12129 + }, + { + "epoch": 0.842244132759339, + "grad_norm": 10.301867014811508, + "learning_rate": 6.386399236871177e-07, + "loss": 0.6764, + "step": 12130 + }, + { + "epoch": 0.8423135675600611, + "grad_norm": 3.310240825426331, + "learning_rate": 6.380901360861557e-07, + "loss": 0.31, + "step": 12131 + }, + { + "epoch": 0.8423830023607832, + "grad_norm": 3.7061237611320568, + "learning_rate": 6.375405691056585e-07, + "loss": 0.3972, + "step": 12132 + }, + { + "epoch": 0.8424524371615053, + "grad_norm": 4.834204590850926, + "learning_rate": 6.369912227734232e-07, + "loss": 0.6174, + "step": 12133 + }, + { + "epoch": 0.8425218719622275, + "grad_norm": 3.4793328933191403, + "learning_rate": 6.364420971172347e-07, + "loss": 0.3118, + "step": 12134 + }, + { + "epoch": 0.8425913067629496, + "grad_norm": 3.695846156112402, + "learning_rate": 6.358931921648675e-07, + "loss": 0.3756, + "step": 12135 + }, + { + "epoch": 0.8426607415636718, + "grad_norm": 4.584199756503634, + "learning_rate": 6.353445079440839e-07, + "loss": 0.4908, + "step": 12136 + }, + { + "epoch": 0.8427301763643938, + "grad_norm": 4.018206814414804, + "learning_rate": 6.347960444826357e-07, + "loss": 0.3748, + "step": 12137 + }, + { + "epoch": 0.8427996111651159, + "grad_norm": 3.8202765408581594, + "learning_rate": 6.342478018082654e-07, + "loss": 0.3246, + "step": 12138 + }, + { + "epoch": 0.8428690459658381, + "grad_norm": 4.150335146622992, + "learning_rate": 6.336997799487004e-07, + "loss": 0.3827, + "step": 12139 + }, + { + "epoch": 0.8429384807665602, + "grad_norm": 3.3344818365189304, + "learning_rate": 6.331519789316587e-07, + "loss": 0.3205, + "step": 12140 + }, + { + "epoch": 0.8430079155672823, + "grad_norm": 4.195926009165619, + "learning_rate": 6.326043987848496e-07, + "loss": 0.6112, + "step": 12141 + }, + { + "epoch": 0.8430773503680045, + "grad_norm": 3.199569917804556, + "learning_rate": 6.320570395359677e-07, + "loss": 0.4363, + "step": 12142 + }, + { + "epoch": 0.8431467851687265, + "grad_norm": 1.6302477172729717, + "learning_rate": 6.315099012126979e-07, + "loss": 0.1146, + "step": 12143 + }, + { + "epoch": 0.8432162199694487, + "grad_norm": 4.333952192288873, + "learning_rate": 6.309629838427145e-07, + "loss": 0.3937, + "step": 12144 + }, + { + "epoch": 0.8432856547701708, + "grad_norm": 5.078167469995292, + "learning_rate": 6.304162874536796e-07, + "loss": 0.5036, + "step": 12145 + }, + { + "epoch": 0.8433550895708929, + "grad_norm": 3.838706986443937, + "learning_rate": 6.298698120732439e-07, + "loss": 0.4605, + "step": 12146 + }, + { + "epoch": 0.8434245243716151, + "grad_norm": 4.1165877646389495, + "learning_rate": 6.29323557729048e-07, + "loss": 0.4262, + "step": 12147 + }, + { + "epoch": 0.8434939591723372, + "grad_norm": 4.440287480076317, + "learning_rate": 6.28777524448721e-07, + "loss": 0.403, + "step": 12148 + }, + { + "epoch": 0.8435633939730594, + "grad_norm": 4.840428058227124, + "learning_rate": 6.282317122598802e-07, + "loss": 0.3905, + "step": 12149 + }, + { + "epoch": 0.8436328287737814, + "grad_norm": 4.525489943046081, + "learning_rate": 6.276861211901325e-07, + "loss": 0.4227, + "step": 12150 + }, + { + "epoch": 0.8437022635745035, + "grad_norm": 3.056840340009979, + "learning_rate": 6.271407512670724e-07, + "loss": 0.2631, + "step": 12151 + }, + { + "epoch": 0.8437716983752257, + "grad_norm": 4.112520535246105, + "learning_rate": 6.265956025182867e-07, + "loss": 0.469, + "step": 12152 + }, + { + "epoch": 0.8438411331759478, + "grad_norm": 3.348017375423266, + "learning_rate": 6.260506749713458e-07, + "loss": 0.355, + "step": 12153 + }, + { + "epoch": 0.8439105679766699, + "grad_norm": 4.5898083309624935, + "learning_rate": 6.255059686538107e-07, + "loss": 0.5462, + "step": 12154 + }, + { + "epoch": 0.843980002777392, + "grad_norm": 4.084106845146247, + "learning_rate": 6.24961483593235e-07, + "loss": 0.4341, + "step": 12155 + }, + { + "epoch": 0.8440494375781141, + "grad_norm": 2.7116054805180663, + "learning_rate": 6.244172198171583e-07, + "loss": 0.1943, + "step": 12156 + }, + { + "epoch": 0.8441188723788363, + "grad_norm": 4.046027273392892, + "learning_rate": 6.238731773531043e-07, + "loss": 0.3558, + "step": 12157 + }, + { + "epoch": 0.8441883071795584, + "grad_norm": 4.456152148524055, + "learning_rate": 6.233293562285947e-07, + "loss": 0.6335, + "step": 12158 + }, + { + "epoch": 0.8442577419802805, + "grad_norm": 4.1314444734569555, + "learning_rate": 6.227857564711349e-07, + "loss": 0.3647, + "step": 12159 + }, + { + "epoch": 0.8443271767810027, + "grad_norm": 4.923585305556519, + "learning_rate": 6.22242378108216e-07, + "loss": 0.5856, + "step": 12160 + }, + { + "epoch": 0.8443966115817247, + "grad_norm": 3.1566700945344124, + "learning_rate": 6.216992211673251e-07, + "loss": 0.1857, + "step": 12161 + }, + { + "epoch": 0.8444660463824469, + "grad_norm": 4.453711800775669, + "learning_rate": 6.211562856759329e-07, + "loss": 0.4809, + "step": 12162 + }, + { + "epoch": 0.844535481183169, + "grad_norm": 3.564672612322839, + "learning_rate": 6.206135716615008e-07, + "loss": 0.4273, + "step": 12163 + }, + { + "epoch": 0.8446049159838911, + "grad_norm": 4.762640826053146, + "learning_rate": 6.200710791514786e-07, + "loss": 0.4764, + "step": 12164 + }, + { + "epoch": 0.8446743507846133, + "grad_norm": 3.9651077330421347, + "learning_rate": 6.195288081733053e-07, + "loss": 0.3398, + "step": 12165 + }, + { + "epoch": 0.8447437855853354, + "grad_norm": 4.306728738360438, + "learning_rate": 6.189867587544069e-07, + "loss": 0.4535, + "step": 12166 + }, + { + "epoch": 0.8448132203860574, + "grad_norm": 4.408322192206743, + "learning_rate": 6.184449309222029e-07, + "loss": 0.6838, + "step": 12167 + }, + { + "epoch": 0.8448826551867796, + "grad_norm": 3.91013212953302, + "learning_rate": 6.17903324704095e-07, + "loss": 0.2568, + "step": 12168 + }, + { + "epoch": 0.8449520899875017, + "grad_norm": 3.7460925152929634, + "learning_rate": 6.173619401274772e-07, + "loss": 0.2804, + "step": 12169 + }, + { + "epoch": 0.8450215247882239, + "grad_norm": 3.915106233738582, + "learning_rate": 6.168207772197354e-07, + "loss": 0.4011, + "step": 12170 + }, + { + "epoch": 0.845090959588946, + "grad_norm": 3.9746618336392054, + "learning_rate": 6.162798360082367e-07, + "loss": 0.2284, + "step": 12171 + }, + { + "epoch": 0.8451603943896681, + "grad_norm": 4.499429806141879, + "learning_rate": 6.157391165203447e-07, + "loss": 0.4028, + "step": 12172 + }, + { + "epoch": 0.8452298291903902, + "grad_norm": 4.564079409016079, + "learning_rate": 6.151986187834064e-07, + "loss": 0.3923, + "step": 12173 + }, + { + "epoch": 0.8452992639911123, + "grad_norm": 3.1485269575232784, + "learning_rate": 6.14658342824761e-07, + "loss": 0.2613, + "step": 12174 + }, + { + "epoch": 0.8453686987918345, + "grad_norm": 4.917737945703657, + "learning_rate": 6.14118288671734e-07, + "loss": 0.3409, + "step": 12175 + }, + { + "epoch": 0.8454381335925566, + "grad_norm": 4.312264302844288, + "learning_rate": 6.135784563516406e-07, + "loss": 0.4679, + "step": 12176 + }, + { + "epoch": 0.8455075683932787, + "grad_norm": 5.401662846096275, + "learning_rate": 6.130388458917857e-07, + "loss": 0.5964, + "step": 12177 + }, + { + "epoch": 0.8455770031940009, + "grad_norm": 3.6706491459442687, + "learning_rate": 6.124994573194614e-07, + "loss": 0.5256, + "step": 12178 + }, + { + "epoch": 0.8456464379947229, + "grad_norm": 5.132920500794997, + "learning_rate": 6.119602906619504e-07, + "loss": 0.6762, + "step": 12179 + }, + { + "epoch": 0.845715872795445, + "grad_norm": 3.9090582687781508, + "learning_rate": 6.114213459465207e-07, + "loss": 0.3513, + "step": 12180 + }, + { + "epoch": 0.8457853075961672, + "grad_norm": 4.71666456538568, + "learning_rate": 6.108826232004362e-07, + "loss": 0.5316, + "step": 12181 + }, + { + "epoch": 0.8458547423968893, + "grad_norm": 3.163182654861291, + "learning_rate": 6.103441224509404e-07, + "loss": 0.2611, + "step": 12182 + }, + { + "epoch": 0.8459241771976115, + "grad_norm": 3.498578759119988, + "learning_rate": 6.098058437252708e-07, + "loss": 0.2379, + "step": 12183 + }, + { + "epoch": 0.8459936119983336, + "grad_norm": 4.715978149635673, + "learning_rate": 6.092677870506553e-07, + "loss": 0.5607, + "step": 12184 + }, + { + "epoch": 0.8460630467990556, + "grad_norm": 2.811158546134452, + "learning_rate": 6.087299524543061e-07, + "loss": 0.2497, + "step": 12185 + }, + { + "epoch": 0.8461324815997778, + "grad_norm": 3.967570890139553, + "learning_rate": 6.08192339963426e-07, + "loss": 0.4508, + "step": 12186 + }, + { + "epoch": 0.8462019164004999, + "grad_norm": 4.445040087713319, + "learning_rate": 6.076549496052086e-07, + "loss": 0.3412, + "step": 12187 + }, + { + "epoch": 0.8462713512012221, + "grad_norm": 4.841178762659162, + "learning_rate": 6.071177814068347e-07, + "loss": 0.555, + "step": 12188 + }, + { + "epoch": 0.8463407860019442, + "grad_norm": 3.9330242301644835, + "learning_rate": 6.065808353954706e-07, + "loss": 0.3238, + "step": 12189 + }, + { + "epoch": 0.8464102208026663, + "grad_norm": 4.060568700525655, + "learning_rate": 6.060441115982768e-07, + "loss": 0.4425, + "step": 12190 + }, + { + "epoch": 0.8464796556033884, + "grad_norm": 3.0285159488702336, + "learning_rate": 6.055076100424001e-07, + "loss": 0.4352, + "step": 12191 + }, + { + "epoch": 0.8465490904041105, + "grad_norm": 5.030166507950769, + "learning_rate": 6.049713307549759e-07, + "loss": 0.6109, + "step": 12192 + }, + { + "epoch": 0.8466185252048327, + "grad_norm": 3.7222376207847687, + "learning_rate": 6.044352737631281e-07, + "loss": 0.3208, + "step": 12193 + }, + { + "epoch": 0.8466879600055548, + "grad_norm": 3.5891667223535757, + "learning_rate": 6.038994390939706e-07, + "loss": 0.3951, + "step": 12194 + }, + { + "epoch": 0.8467573948062769, + "grad_norm": 4.92483633963376, + "learning_rate": 6.033638267746045e-07, + "loss": 0.3356, + "step": 12195 + }, + { + "epoch": 0.8468268296069991, + "grad_norm": 2.943420523708218, + "learning_rate": 6.028284368321214e-07, + "loss": 0.2848, + "step": 12196 + }, + { + "epoch": 0.8468962644077211, + "grad_norm": 4.739292868027685, + "learning_rate": 6.022932692936001e-07, + "loss": 0.5303, + "step": 12197 + }, + { + "epoch": 0.8469656992084432, + "grad_norm": 4.140369089546648, + "learning_rate": 6.01758324186108e-07, + "loss": 0.3831, + "step": 12198 + }, + { + "epoch": 0.8470351340091654, + "grad_norm": 4.6058156256297575, + "learning_rate": 6.012236015367046e-07, + "loss": 0.6212, + "step": 12199 + }, + { + "epoch": 0.8471045688098875, + "grad_norm": 3.605251457716223, + "learning_rate": 6.00689101372432e-07, + "loss": 0.213, + "step": 12200 + }, + { + "epoch": 0.8471740036106097, + "grad_norm": 4.8381112772977435, + "learning_rate": 6.001548237203275e-07, + "loss": 0.6777, + "step": 12201 + }, + { + "epoch": 0.8472434384113318, + "grad_norm": 4.121756155822516, + "learning_rate": 5.996207686074146e-07, + "loss": 0.4533, + "step": 12202 + }, + { + "epoch": 0.8473128732120538, + "grad_norm": 4.573705556978793, + "learning_rate": 5.99086936060701e-07, + "loss": 0.5476, + "step": 12203 + }, + { + "epoch": 0.847382308012776, + "grad_norm": 3.4718319727237623, + "learning_rate": 5.985533261071919e-07, + "loss": 0.3684, + "step": 12204 + }, + { + "epoch": 0.8474517428134981, + "grad_norm": 5.669360120154309, + "learning_rate": 5.98019938773875e-07, + "loss": 0.2917, + "step": 12205 + }, + { + "epoch": 0.8475211776142203, + "grad_norm": 5.523199317575706, + "learning_rate": 5.974867740877282e-07, + "loss": 0.9849, + "step": 12206 + }, + { + "epoch": 0.8475906124149424, + "grad_norm": 3.4667141399365393, + "learning_rate": 5.969538320757185e-07, + "loss": 0.2452, + "step": 12207 + }, + { + "epoch": 0.8476600472156645, + "grad_norm": 4.070831459619358, + "learning_rate": 5.964211127648018e-07, + "loss": 0.634, + "step": 12208 + }, + { + "epoch": 0.8477294820163866, + "grad_norm": 3.798694521408924, + "learning_rate": 5.958886161819222e-07, + "loss": 0.5122, + "step": 12209 + }, + { + "epoch": 0.8477989168171087, + "grad_norm": 4.021971530101663, + "learning_rate": 5.953563423540126e-07, + "loss": 0.5093, + "step": 12210 + }, + { + "epoch": 0.8478683516178308, + "grad_norm": 3.712045113595136, + "learning_rate": 5.948242913079955e-07, + "loss": 0.3583, + "step": 12211 + }, + { + "epoch": 0.847937786418553, + "grad_norm": 3.2154494100360065, + "learning_rate": 5.942924630707797e-07, + "loss": 0.3067, + "step": 12212 + }, + { + "epoch": 0.8480072212192751, + "grad_norm": 4.096350515960314, + "learning_rate": 5.937608576692678e-07, + "loss": 0.2565, + "step": 12213 + }, + { + "epoch": 0.8480766560199973, + "grad_norm": 3.6223500079978774, + "learning_rate": 5.932294751303442e-07, + "loss": 0.4931, + "step": 12214 + }, + { + "epoch": 0.8481460908207193, + "grad_norm": 4.028436579117262, + "learning_rate": 5.926983154808863e-07, + "loss": 0.4036, + "step": 12215 + }, + { + "epoch": 0.8482155256214414, + "grad_norm": 1.8809456631074808, + "learning_rate": 5.921673787477628e-07, + "loss": 0.0989, + "step": 12216 + }, + { + "epoch": 0.8482849604221636, + "grad_norm": 3.6153824689662235, + "learning_rate": 5.916366649578242e-07, + "loss": 0.2135, + "step": 12217 + }, + { + "epoch": 0.8483543952228857, + "grad_norm": 4.028119784124289, + "learning_rate": 5.911061741379137e-07, + "loss": 0.3683, + "step": 12218 + }, + { + "epoch": 0.8484238300236079, + "grad_norm": 4.0729897327189555, + "learning_rate": 5.905759063148653e-07, + "loss": 0.3428, + "step": 12219 + }, + { + "epoch": 0.84849326482433, + "grad_norm": 2.3999245601205366, + "learning_rate": 5.900458615154986e-07, + "loss": 0.1935, + "step": 12220 + }, + { + "epoch": 0.848562699625052, + "grad_norm": 4.071949364961603, + "learning_rate": 5.8951603976662e-07, + "loss": 0.647, + "step": 12221 + }, + { + "epoch": 0.8486321344257742, + "grad_norm": 3.6461806022547014, + "learning_rate": 5.889864410950308e-07, + "loss": 0.3233, + "step": 12222 + }, + { + "epoch": 0.8487015692264963, + "grad_norm": 3.9312077660752838, + "learning_rate": 5.884570655275151e-07, + "loss": 0.4189, + "step": 12223 + }, + { + "epoch": 0.8487710040272184, + "grad_norm": 3.752492892736189, + "learning_rate": 5.879279130908499e-07, + "loss": 0.4394, + "step": 12224 + }, + { + "epoch": 0.8488404388279406, + "grad_norm": 4.192219970280358, + "learning_rate": 5.873989838117977e-07, + "loss": 0.5021, + "step": 12225 + }, + { + "epoch": 0.8489098736286627, + "grad_norm": 4.6115288604517675, + "learning_rate": 5.86870277717112e-07, + "loss": 0.5447, + "step": 12226 + }, + { + "epoch": 0.8489793084293848, + "grad_norm": 4.565363770737357, + "learning_rate": 5.863417948335337e-07, + "loss": 0.4384, + "step": 12227 + }, + { + "epoch": 0.8490487432301069, + "grad_norm": 3.8487643089359826, + "learning_rate": 5.858135351877931e-07, + "loss": 0.3198, + "step": 12228 + }, + { + "epoch": 0.849118178030829, + "grad_norm": 3.3054897375667784, + "learning_rate": 5.852854988066087e-07, + "loss": 0.5101, + "step": 12229 + }, + { + "epoch": 0.8491876128315512, + "grad_norm": 3.6064656371773283, + "learning_rate": 5.847576857166875e-07, + "loss": 0.323, + "step": 12230 + }, + { + "epoch": 0.8492570476322733, + "grad_norm": 4.279862648788139, + "learning_rate": 5.842300959447283e-07, + "loss": 0.5092, + "step": 12231 + }, + { + "epoch": 0.8493264824329955, + "grad_norm": 4.780175623324486, + "learning_rate": 5.83702729517412e-07, + "loss": 0.4791, + "step": 12232 + }, + { + "epoch": 0.8493959172337175, + "grad_norm": 3.239389872357473, + "learning_rate": 5.831755864614152e-07, + "loss": 0.2071, + "step": 12233 + }, + { + "epoch": 0.8494653520344396, + "grad_norm": 4.5088513698841455, + "learning_rate": 5.826486668034003e-07, + "loss": 0.5925, + "step": 12234 + }, + { + "epoch": 0.8495347868351618, + "grad_norm": 4.720196593058774, + "learning_rate": 5.821219705700155e-07, + "loss": 0.5545, + "step": 12235 + }, + { + "epoch": 0.8496042216358839, + "grad_norm": 4.2691571442845735, + "learning_rate": 5.815954977879029e-07, + "loss": 0.5947, + "step": 12236 + }, + { + "epoch": 0.849673656436606, + "grad_norm": 5.1415768518600276, + "learning_rate": 5.810692484836905e-07, + "loss": 0.5419, + "step": 12237 + }, + { + "epoch": 0.8497430912373282, + "grad_norm": 5.198268022727231, + "learning_rate": 5.805432226839952e-07, + "loss": 0.446, + "step": 12238 + }, + { + "epoch": 0.8498125260380502, + "grad_norm": 4.61904349401658, + "learning_rate": 5.800174204154224e-07, + "loss": 0.4455, + "step": 12239 + }, + { + "epoch": 0.8498819608387724, + "grad_norm": 4.1301285351195265, + "learning_rate": 5.794918417045669e-07, + "loss": 0.4342, + "step": 12240 + }, + { + "epoch": 0.8499513956394945, + "grad_norm": 3.3955045439556173, + "learning_rate": 5.789664865780126e-07, + "loss": 0.3304, + "step": 12241 + }, + { + "epoch": 0.8500208304402166, + "grad_norm": 2.987779453931659, + "learning_rate": 5.784413550623297e-07, + "loss": 0.2258, + "step": 12242 + }, + { + "epoch": 0.8500902652409388, + "grad_norm": 4.349686047304896, + "learning_rate": 5.779164471840803e-07, + "loss": 0.3548, + "step": 12243 + }, + { + "epoch": 0.8501597000416609, + "grad_norm": 3.327286912739435, + "learning_rate": 5.773917629698122e-07, + "loss": 0.3871, + "step": 12244 + }, + { + "epoch": 0.850229134842383, + "grad_norm": 3.948618879192863, + "learning_rate": 5.768673024460658e-07, + "loss": 0.5174, + "step": 12245 + }, + { + "epoch": 0.8502985696431051, + "grad_norm": 3.862347198483988, + "learning_rate": 5.763430656393654e-07, + "loss": 0.5158, + "step": 12246 + }, + { + "epoch": 0.8503680044438272, + "grad_norm": 4.317379985292529, + "learning_rate": 5.758190525762259e-07, + "loss": 0.5908, + "step": 12247 + }, + { + "epoch": 0.8504374392445494, + "grad_norm": 5.090662349349204, + "learning_rate": 5.752952632831538e-07, + "loss": 0.5552, + "step": 12248 + }, + { + "epoch": 0.8505068740452715, + "grad_norm": 3.975057580438571, + "learning_rate": 5.747716977866408e-07, + "loss": 0.3974, + "step": 12249 + }, + { + "epoch": 0.8505763088459937, + "grad_norm": 4.071979506423305, + "learning_rate": 5.742483561131662e-07, + "loss": 0.4792, + "step": 12250 + }, + { + "epoch": 0.8506457436467157, + "grad_norm": 4.314378412000532, + "learning_rate": 5.737252382892028e-07, + "loss": 0.5487, + "step": 12251 + }, + { + "epoch": 0.8507151784474378, + "grad_norm": 2.984807290709036, + "learning_rate": 5.732023443412077e-07, + "loss": 0.3208, + "step": 12252 + }, + { + "epoch": 0.85078461324816, + "grad_norm": 4.8416319603725, + "learning_rate": 5.726796742956292e-07, + "loss": 0.5465, + "step": 12253 + }, + { + "epoch": 0.8508540480488821, + "grad_norm": 4.860150716542382, + "learning_rate": 5.721572281789028e-07, + "loss": 0.4569, + "step": 12254 + }, + { + "epoch": 0.8509234828496042, + "grad_norm": 3.361643017469316, + "learning_rate": 5.71635006017453e-07, + "loss": 0.3839, + "step": 12255 + }, + { + "epoch": 0.8509929176503264, + "grad_norm": 4.177977318689177, + "learning_rate": 5.711130078376942e-07, + "loss": 0.387, + "step": 12256 + }, + { + "epoch": 0.8510623524510484, + "grad_norm": 3.5248143272627908, + "learning_rate": 5.705912336660275e-07, + "loss": 0.43, + "step": 12257 + }, + { + "epoch": 0.8511317872517706, + "grad_norm": 6.064225970056006, + "learning_rate": 5.700696835288438e-07, + "loss": 0.5049, + "step": 12258 + }, + { + "epoch": 0.8512012220524927, + "grad_norm": 4.565825394973027, + "learning_rate": 5.695483574525229e-07, + "loss": 0.3958, + "step": 12259 + }, + { + "epoch": 0.8512706568532148, + "grad_norm": 5.298029660504252, + "learning_rate": 5.69027255463433e-07, + "loss": 0.5505, + "step": 12260 + }, + { + "epoch": 0.851340091653937, + "grad_norm": 5.633766162176873, + "learning_rate": 5.68506377587929e-07, + "loss": 0.7292, + "step": 12261 + }, + { + "epoch": 0.8514095264546591, + "grad_norm": 5.929971081247732, + "learning_rate": 5.67985723852359e-07, + "loss": 0.3402, + "step": 12262 + }, + { + "epoch": 0.8514789612553813, + "grad_norm": 3.344784161895592, + "learning_rate": 5.674652942830572e-07, + "loss": 0.4162, + "step": 12263 + }, + { + "epoch": 0.8515483960561033, + "grad_norm": 3.6895928471744974, + "learning_rate": 5.669450889063427e-07, + "loss": 0.3938, + "step": 12264 + }, + { + "epoch": 0.8516178308568254, + "grad_norm": 3.983689101468244, + "learning_rate": 5.664251077485305e-07, + "loss": 0.2958, + "step": 12265 + }, + { + "epoch": 0.8516872656575476, + "grad_norm": 3.5481353379054132, + "learning_rate": 5.659053508359203e-07, + "loss": 0.398, + "step": 12266 + }, + { + "epoch": 0.8517567004582697, + "grad_norm": 2.6459195208619652, + "learning_rate": 5.65385818194798e-07, + "loss": 0.1637, + "step": 12267 + }, + { + "epoch": 0.8518261352589918, + "grad_norm": 3.788530726971908, + "learning_rate": 5.648665098514439e-07, + "loss": 0.4592, + "step": 12268 + }, + { + "epoch": 0.851895570059714, + "grad_norm": 4.23138568261878, + "learning_rate": 5.64347425832123e-07, + "loss": 0.6579, + "step": 12269 + }, + { + "epoch": 0.851965004860436, + "grad_norm": 3.839308367714743, + "learning_rate": 5.638285661630899e-07, + "loss": 0.4547, + "step": 12270 + }, + { + "epoch": 0.8520344396611582, + "grad_norm": 4.086932317361261, + "learning_rate": 5.633099308705886e-07, + "loss": 0.4562, + "step": 12271 + }, + { + "epoch": 0.8521038744618803, + "grad_norm": 4.421305315132611, + "learning_rate": 5.627915199808498e-07, + "loss": 0.4116, + "step": 12272 + }, + { + "epoch": 0.8521733092626024, + "grad_norm": 3.8091753897441585, + "learning_rate": 5.622733335200941e-07, + "loss": 0.6617, + "step": 12273 + }, + { + "epoch": 0.8522427440633246, + "grad_norm": 3.519101651242471, + "learning_rate": 5.61755371514534e-07, + "loss": 0.4311, + "step": 12274 + }, + { + "epoch": 0.8523121788640466, + "grad_norm": 3.6878332049447606, + "learning_rate": 5.612376339903636e-07, + "loss": 0.2974, + "step": 12275 + }, + { + "epoch": 0.8523816136647688, + "grad_norm": 4.630776964236321, + "learning_rate": 5.607201209737701e-07, + "loss": 0.4619, + "step": 12276 + }, + { + "epoch": 0.8524510484654909, + "grad_norm": 3.9828606842456584, + "learning_rate": 5.602028324909315e-07, + "loss": 0.5292, + "step": 12277 + }, + { + "epoch": 0.852520483266213, + "grad_norm": 4.284576995416832, + "learning_rate": 5.596857685680085e-07, + "loss": 0.5218, + "step": 12278 + }, + { + "epoch": 0.8525899180669352, + "grad_norm": 15.363578501773345, + "learning_rate": 5.591689292311536e-07, + "loss": 0.6177, + "step": 12279 + }, + { + "epoch": 0.8526593528676573, + "grad_norm": 3.227742148113535, + "learning_rate": 5.586523145065103e-07, + "loss": 0.1368, + "step": 12280 + }, + { + "epoch": 0.8527287876683793, + "grad_norm": 3.3265515992038344, + "learning_rate": 5.581359244202073e-07, + "loss": 0.3771, + "step": 12281 + }, + { + "epoch": 0.8527982224691015, + "grad_norm": 4.2286324474483346, + "learning_rate": 5.576197589983629e-07, + "loss": 0.4629, + "step": 12282 + }, + { + "epoch": 0.8528676572698236, + "grad_norm": 3.598579892785284, + "learning_rate": 5.57103818267084e-07, + "loss": 0.3721, + "step": 12283 + }, + { + "epoch": 0.8529370920705458, + "grad_norm": 5.170346212615741, + "learning_rate": 5.565881022524667e-07, + "loss": 0.5754, + "step": 12284 + }, + { + "epoch": 0.8530065268712679, + "grad_norm": 3.3997444710626423, + "learning_rate": 5.560726109805947e-07, + "loss": 0.3155, + "step": 12285 + }, + { + "epoch": 0.85307596167199, + "grad_norm": 4.791155592801918, + "learning_rate": 5.555573444775419e-07, + "loss": 0.6648, + "step": 12286 + }, + { + "epoch": 0.8531453964727121, + "grad_norm": 4.327778893074458, + "learning_rate": 5.550423027693697e-07, + "loss": 0.4721, + "step": 12287 + }, + { + "epoch": 0.8532148312734342, + "grad_norm": 3.6147258471487946, + "learning_rate": 5.54527485882127e-07, + "loss": 0.3952, + "step": 12288 + }, + { + "epoch": 0.8532842660741564, + "grad_norm": 3.3165743663401894, + "learning_rate": 5.540128938418543e-07, + "loss": 0.2843, + "step": 12289 + }, + { + "epoch": 0.8533537008748785, + "grad_norm": 3.368433799948595, + "learning_rate": 5.534985266745774e-07, + "loss": 0.3802, + "step": 12290 + }, + { + "epoch": 0.8534231356756006, + "grad_norm": 3.933978091796547, + "learning_rate": 5.529843844063154e-07, + "loss": 0.5325, + "step": 12291 + }, + { + "epoch": 0.8534925704763228, + "grad_norm": 3.43378183232829, + "learning_rate": 5.524704670630704e-07, + "loss": 0.3722, + "step": 12292 + }, + { + "epoch": 0.8535620052770448, + "grad_norm": 3.738411061960536, + "learning_rate": 5.519567746708355e-07, + "loss": 0.3663, + "step": 12293 + }, + { + "epoch": 0.8536314400777669, + "grad_norm": 3.9691906990800363, + "learning_rate": 5.514433072555941e-07, + "loss": 0.4228, + "step": 12294 + }, + { + "epoch": 0.8537008748784891, + "grad_norm": 5.46245184185958, + "learning_rate": 5.509300648433174e-07, + "loss": 0.51, + "step": 12295 + }, + { + "epoch": 0.8537703096792112, + "grad_norm": 3.277630844259243, + "learning_rate": 5.504170474599618e-07, + "loss": 0.3504, + "step": 12296 + }, + { + "epoch": 0.8538397444799334, + "grad_norm": 3.4549482049710694, + "learning_rate": 5.499042551314776e-07, + "loss": 0.4014, + "step": 12297 + }, + { + "epoch": 0.8539091792806555, + "grad_norm": 4.019008467868914, + "learning_rate": 5.493916878838013e-07, + "loss": 0.5574, + "step": 12298 + }, + { + "epoch": 0.8539786140813775, + "grad_norm": 4.21839809196305, + "learning_rate": 5.488793457428566e-07, + "loss": 0.4218, + "step": 12299 + }, + { + "epoch": 0.8540480488820997, + "grad_norm": 2.4801310627169664, + "learning_rate": 5.483672287345576e-07, + "loss": 0.1504, + "step": 12300 + }, + { + "epoch": 0.8541174836828218, + "grad_norm": 3.6559023850592203, + "learning_rate": 5.478553368848072e-07, + "loss": 0.473, + "step": 12301 + }, + { + "epoch": 0.854186918483544, + "grad_norm": 5.42300207878597, + "learning_rate": 5.473436702194956e-07, + "loss": 0.6136, + "step": 12302 + }, + { + "epoch": 0.8542563532842661, + "grad_norm": 3.3119062527166507, + "learning_rate": 5.468322287645034e-07, + "loss": 0.3005, + "step": 12303 + }, + { + "epoch": 0.8543257880849882, + "grad_norm": 3.388901200465658, + "learning_rate": 5.463210125456969e-07, + "loss": 0.1815, + "step": 12304 + }, + { + "epoch": 0.8543952228857103, + "grad_norm": 3.9925382658988697, + "learning_rate": 5.458100215889339e-07, + "loss": 0.4848, + "step": 12305 + }, + { + "epoch": 0.8544646576864324, + "grad_norm": 4.103495620680537, + "learning_rate": 5.45299255920061e-07, + "loss": 0.4879, + "step": 12306 + }, + { + "epoch": 0.8545340924871546, + "grad_norm": 6.961395361230468, + "learning_rate": 5.447887155649101e-07, + "loss": 0.7065, + "step": 12307 + }, + { + "epoch": 0.8546035272878767, + "grad_norm": 3.5705730178129156, + "learning_rate": 5.442784005493041e-07, + "loss": 0.3166, + "step": 12308 + }, + { + "epoch": 0.8546729620885988, + "grad_norm": 4.007083496805182, + "learning_rate": 5.43768310899056e-07, + "loss": 0.4543, + "step": 12309 + }, + { + "epoch": 0.854742396889321, + "grad_norm": 3.7626892869195108, + "learning_rate": 5.432584466399632e-07, + "loss": 0.4297, + "step": 12310 + }, + { + "epoch": 0.854811831690043, + "grad_norm": 5.48392820069255, + "learning_rate": 5.427488077978143e-07, + "loss": 0.4668, + "step": 12311 + }, + { + "epoch": 0.8548812664907651, + "grad_norm": 4.754016039017512, + "learning_rate": 5.422393943983878e-07, + "loss": 0.4999, + "step": 12312 + }, + { + "epoch": 0.8549507012914873, + "grad_norm": 3.5486684278149117, + "learning_rate": 5.417302064674485e-07, + "loss": 0.2995, + "step": 12313 + }, + { + "epoch": 0.8550201360922094, + "grad_norm": 4.5036436618144435, + "learning_rate": 5.412212440307501e-07, + "loss": 0.3581, + "step": 12314 + }, + { + "epoch": 0.8550895708929316, + "grad_norm": 4.509403789310186, + "learning_rate": 5.407125071140363e-07, + "loss": 0.4214, + "step": 12315 + }, + { + "epoch": 0.8551590056936537, + "grad_norm": 2.6874951927372557, + "learning_rate": 5.402039957430378e-07, + "loss": 0.282, + "step": 12316 + }, + { + "epoch": 0.8552284404943757, + "grad_norm": 4.583547984979454, + "learning_rate": 5.396957099434741e-07, + "loss": 0.5758, + "step": 12317 + }, + { + "epoch": 0.8552978752950979, + "grad_norm": 4.261571638624774, + "learning_rate": 5.391876497410548e-07, + "loss": 0.4011, + "step": 12318 + }, + { + "epoch": 0.85536731009582, + "grad_norm": 3.932238622598854, + "learning_rate": 5.386798151614764e-07, + "loss": 0.4097, + "step": 12319 + }, + { + "epoch": 0.8554367448965422, + "grad_norm": 3.895268006392734, + "learning_rate": 5.38172206230424e-07, + "loss": 0.5534, + "step": 12320 + }, + { + "epoch": 0.8555061796972643, + "grad_norm": 3.509724360885379, + "learning_rate": 5.376648229735731e-07, + "loss": 0.3486, + "step": 12321 + }, + { + "epoch": 0.8555756144979864, + "grad_norm": 5.046432003675272, + "learning_rate": 5.37157665416585e-07, + "loss": 0.6183, + "step": 12322 + }, + { + "epoch": 0.8556450492987085, + "grad_norm": 3.8420358184632035, + "learning_rate": 5.366507335851129e-07, + "loss": 0.476, + "step": 12323 + }, + { + "epoch": 0.8557144840994306, + "grad_norm": 3.2741549365089324, + "learning_rate": 5.361440275047969e-07, + "loss": 0.2644, + "step": 12324 + }, + { + "epoch": 0.8557839189001527, + "grad_norm": 4.079362184383126, + "learning_rate": 5.356375472012631e-07, + "loss": 0.464, + "step": 12325 + }, + { + "epoch": 0.8558533537008749, + "grad_norm": 4.165102903331528, + "learning_rate": 5.351312927001312e-07, + "loss": 0.4577, + "step": 12326 + }, + { + "epoch": 0.855922788501597, + "grad_norm": 5.622002796731427, + "learning_rate": 5.346252640270072e-07, + "loss": 0.8521, + "step": 12327 + }, + { + "epoch": 0.8559922233023192, + "grad_norm": 4.623472022132399, + "learning_rate": 5.341194612074824e-07, + "loss": 0.6321, + "step": 12328 + }, + { + "epoch": 0.8560616581030412, + "grad_norm": 3.345821449163678, + "learning_rate": 5.336138842671429e-07, + "loss": 0.3314, + "step": 12329 + }, + { + "epoch": 0.8561310929037633, + "grad_norm": 4.068165998365277, + "learning_rate": 5.331085332315594e-07, + "loss": 0.3998, + "step": 12330 + }, + { + "epoch": 0.8562005277044855, + "grad_norm": 3.5183321601005453, + "learning_rate": 5.32603408126291e-07, + "loss": 0.3596, + "step": 12331 + }, + { + "epoch": 0.8562699625052076, + "grad_norm": 2.6483731422259247, + "learning_rate": 5.320985089768876e-07, + "loss": 0.2351, + "step": 12332 + }, + { + "epoch": 0.8563393973059298, + "grad_norm": 4.7137919415894345, + "learning_rate": 5.315938358088851e-07, + "loss": 0.5836, + "step": 12333 + }, + { + "epoch": 0.8564088321066519, + "grad_norm": 3.2532818523197617, + "learning_rate": 5.310893886478108e-07, + "loss": 0.3109, + "step": 12334 + }, + { + "epoch": 0.8564782669073739, + "grad_norm": 3.5252464512522494, + "learning_rate": 5.305851675191781e-07, + "loss": 0.322, + "step": 12335 + }, + { + "epoch": 0.8565477017080961, + "grad_norm": 7.649791104076804, + "learning_rate": 5.3008117244849e-07, + "loss": 0.6806, + "step": 12336 + }, + { + "epoch": 0.8566171365088182, + "grad_norm": 4.239958439249266, + "learning_rate": 5.295774034612372e-07, + "loss": 0.4313, + "step": 12337 + }, + { + "epoch": 0.8566865713095403, + "grad_norm": 3.39858900405438, + "learning_rate": 5.290738605829026e-07, + "loss": 0.2401, + "step": 12338 + }, + { + "epoch": 0.8567560061102625, + "grad_norm": 4.1558241732338725, + "learning_rate": 5.285705438389521e-07, + "loss": 0.4439, + "step": 12339 + }, + { + "epoch": 0.8568254409109846, + "grad_norm": 3.8964469967319126, + "learning_rate": 5.280674532548424e-07, + "loss": 0.3994, + "step": 12340 + }, + { + "epoch": 0.8568948757117067, + "grad_norm": 4.267217188862538, + "learning_rate": 5.275645888560233e-07, + "loss": 0.5703, + "step": 12341 + }, + { + "epoch": 0.8569643105124288, + "grad_norm": 2.9280935191690065, + "learning_rate": 5.270619506679236e-07, + "loss": 0.229, + "step": 12342 + }, + { + "epoch": 0.8570337453131509, + "grad_norm": 3.745986984152243, + "learning_rate": 5.265595387159705e-07, + "loss": 0.2808, + "step": 12343 + }, + { + "epoch": 0.8571031801138731, + "grad_norm": 3.3326795133892304, + "learning_rate": 5.260573530255736e-07, + "loss": 0.259, + "step": 12344 + }, + { + "epoch": 0.8571726149145952, + "grad_norm": 3.431093524695176, + "learning_rate": 5.255553936221331e-07, + "loss": 0.2873, + "step": 12345 + }, + { + "epoch": 0.8572420497153174, + "grad_norm": 4.132601325597009, + "learning_rate": 5.25053660531038e-07, + "loss": 0.5389, + "step": 12346 + }, + { + "epoch": 0.8573114845160394, + "grad_norm": 4.376452809498118, + "learning_rate": 5.245521537776649e-07, + "loss": 0.4308, + "step": 12347 + }, + { + "epoch": 0.8573809193167615, + "grad_norm": 4.249522349769682, + "learning_rate": 5.24050873387379e-07, + "loss": 0.4767, + "step": 12348 + }, + { + "epoch": 0.8574503541174837, + "grad_norm": 3.9774814651689336, + "learning_rate": 5.235498193855354e-07, + "loss": 0.4055, + "step": 12349 + }, + { + "epoch": 0.8575197889182058, + "grad_norm": 2.921955233444921, + "learning_rate": 5.230489917974768e-07, + "loss": 0.2861, + "step": 12350 + }, + { + "epoch": 0.8575892237189279, + "grad_norm": 4.44052867020784, + "learning_rate": 5.22548390648533e-07, + "loss": 0.5184, + "step": 12351 + }, + { + "epoch": 0.8576586585196501, + "grad_norm": 4.765860968933322, + "learning_rate": 5.220480159640268e-07, + "loss": 0.4761, + "step": 12352 + }, + { + "epoch": 0.8577280933203721, + "grad_norm": 3.3747488324476898, + "learning_rate": 5.215478677692642e-07, + "loss": 0.3322, + "step": 12353 + }, + { + "epoch": 0.8577975281210943, + "grad_norm": 3.8031731152570005, + "learning_rate": 5.210479460895413e-07, + "loss": 0.3972, + "step": 12354 + }, + { + "epoch": 0.8578669629218164, + "grad_norm": 3.5934029623017376, + "learning_rate": 5.205482509501463e-07, + "loss": 0.2899, + "step": 12355 + }, + { + "epoch": 0.8579363977225385, + "grad_norm": 3.177147790060027, + "learning_rate": 5.200487823763529e-07, + "loss": 0.4276, + "step": 12356 + }, + { + "epoch": 0.8580058325232607, + "grad_norm": 4.858997766589289, + "learning_rate": 5.195495403934209e-07, + "loss": 0.42, + "step": 12357 + }, + { + "epoch": 0.8580752673239828, + "grad_norm": 11.479131480367396, + "learning_rate": 5.190505250266036e-07, + "loss": 0.2795, + "step": 12358 + }, + { + "epoch": 0.858144702124705, + "grad_norm": 2.794703419707781, + "learning_rate": 5.185517363011416e-07, + "loss": 0.1971, + "step": 12359 + }, + { + "epoch": 0.858214136925427, + "grad_norm": 3.6063450472942837, + "learning_rate": 5.180531742422595e-07, + "loss": 0.4005, + "step": 12360 + }, + { + "epoch": 0.8582835717261491, + "grad_norm": 4.17125988060682, + "learning_rate": 5.175548388751772e-07, + "loss": 0.3932, + "step": 12361 + }, + { + "epoch": 0.8583530065268713, + "grad_norm": 3.7825077353753382, + "learning_rate": 5.17056730225099e-07, + "loss": 0.4931, + "step": 12362 + }, + { + "epoch": 0.8584224413275934, + "grad_norm": 3.145042493724635, + "learning_rate": 5.165588483172185e-07, + "loss": 0.3043, + "step": 12363 + }, + { + "epoch": 0.8584918761283155, + "grad_norm": 5.188653605490194, + "learning_rate": 5.160611931767185e-07, + "loss": 0.5134, + "step": 12364 + }, + { + "epoch": 0.8585613109290376, + "grad_norm": 4.10439367951557, + "learning_rate": 5.155637648287687e-07, + "loss": 0.4534, + "step": 12365 + }, + { + "epoch": 0.8586307457297597, + "grad_norm": 3.0912102861616235, + "learning_rate": 5.150665632985297e-07, + "loss": 0.243, + "step": 12366 + }, + { + "epoch": 0.8587001805304819, + "grad_norm": 3.1320756946183597, + "learning_rate": 5.14569588611149e-07, + "loss": 0.2977, + "step": 12367 + }, + { + "epoch": 0.858769615331204, + "grad_norm": 3.426097063836646, + "learning_rate": 5.140728407917628e-07, + "loss": 0.2931, + "step": 12368 + }, + { + "epoch": 0.8588390501319261, + "grad_norm": 3.020723946399208, + "learning_rate": 5.135763198654947e-07, + "loss": 0.2949, + "step": 12369 + }, + { + "epoch": 0.8589084849326483, + "grad_norm": 3.8349109421803442, + "learning_rate": 5.13080025857462e-07, + "loss": 0.3828, + "step": 12370 + }, + { + "epoch": 0.8589779197333703, + "grad_norm": 4.238156740132818, + "learning_rate": 5.125839587927623e-07, + "loss": 0.4172, + "step": 12371 + }, + { + "epoch": 0.8590473545340925, + "grad_norm": 4.0165695742907594, + "learning_rate": 5.120881186964888e-07, + "loss": 0.4794, + "step": 12372 + }, + { + "epoch": 0.8591167893348146, + "grad_norm": 2.903841450424251, + "learning_rate": 5.115925055937205e-07, + "loss": 0.2705, + "step": 12373 + }, + { + "epoch": 0.8591862241355367, + "grad_norm": 4.469243559537574, + "learning_rate": 5.110971195095238e-07, + "loss": 0.3706, + "step": 12374 + }, + { + "epoch": 0.8592556589362589, + "grad_norm": 3.0368659349288496, + "learning_rate": 5.10601960468955e-07, + "loss": 0.4166, + "step": 12375 + }, + { + "epoch": 0.859325093736981, + "grad_norm": 4.022607807565288, + "learning_rate": 5.101070284970594e-07, + "loss": 0.2505, + "step": 12376 + }, + { + "epoch": 0.8593945285377032, + "grad_norm": 3.8295013150507273, + "learning_rate": 5.096123236188694e-07, + "loss": 0.2476, + "step": 12377 + }, + { + "epoch": 0.8594639633384252, + "grad_norm": 3.2789729830279484, + "learning_rate": 5.09117845859407e-07, + "loss": 0.3142, + "step": 12378 + }, + { + "epoch": 0.8595333981391473, + "grad_norm": 3.230404566931052, + "learning_rate": 5.086235952436819e-07, + "loss": 0.2772, + "step": 12379 + }, + { + "epoch": 0.8596028329398695, + "grad_norm": 5.186344873873336, + "learning_rate": 5.081295717966928e-07, + "loss": 0.7393, + "step": 12380 + }, + { + "epoch": 0.8596722677405916, + "grad_norm": 3.8174422166804316, + "learning_rate": 5.076357755434281e-07, + "loss": 0.395, + "step": 12381 + }, + { + "epoch": 0.8597417025413137, + "grad_norm": 4.666192286727472, + "learning_rate": 5.071422065088621e-07, + "loss": 0.4968, + "step": 12382 + }, + { + "epoch": 0.8598111373420358, + "grad_norm": 3.9651775673686482, + "learning_rate": 5.066488647179579e-07, + "loss": 0.4934, + "step": 12383 + }, + { + "epoch": 0.8598805721427579, + "grad_norm": 2.9459854804671823, + "learning_rate": 5.061557501956715e-07, + "loss": 0.213, + "step": 12384 + }, + { + "epoch": 0.8599500069434801, + "grad_norm": 1.7298313904286367, + "learning_rate": 5.056628629669413e-07, + "loss": 0.0741, + "step": 12385 + }, + { + "epoch": 0.8600194417442022, + "grad_norm": 3.9243441663304703, + "learning_rate": 5.051702030566969e-07, + "loss": 0.4282, + "step": 12386 + }, + { + "epoch": 0.8600888765449243, + "grad_norm": 3.1915614812310444, + "learning_rate": 5.046777704898582e-07, + "loss": 0.2351, + "step": 12387 + }, + { + "epoch": 0.8601583113456465, + "grad_norm": 5.8815364778123165, + "learning_rate": 5.041855652913325e-07, + "loss": 0.6513, + "step": 12388 + }, + { + "epoch": 0.8602277461463685, + "grad_norm": 4.935711368278944, + "learning_rate": 5.036935874860111e-07, + "loss": 0.5023, + "step": 12389 + }, + { + "epoch": 0.8602971809470907, + "grad_norm": 6.034346714885967, + "learning_rate": 5.032018370987812e-07, + "loss": 0.47, + "step": 12390 + }, + { + "epoch": 0.8603666157478128, + "grad_norm": 3.807652106262269, + "learning_rate": 5.027103141545148e-07, + "loss": 0.2236, + "step": 12391 + }, + { + "epoch": 0.8604360505485349, + "grad_norm": 4.1081781937123525, + "learning_rate": 5.022190186780701e-07, + "loss": 0.5549, + "step": 12392 + }, + { + "epoch": 0.8605054853492571, + "grad_norm": 3.998779442757059, + "learning_rate": 5.017279506942985e-07, + "loss": 0.4686, + "step": 12393 + }, + { + "epoch": 0.8605749201499792, + "grad_norm": 4.109943023970411, + "learning_rate": 5.012371102280366e-07, + "loss": 0.5381, + "step": 12394 + }, + { + "epoch": 0.8606443549507012, + "grad_norm": 3.7218651383309678, + "learning_rate": 5.007464973041115e-07, + "loss": 0.3322, + "step": 12395 + }, + { + "epoch": 0.8607137897514234, + "grad_norm": 4.121022869251024, + "learning_rate": 5.002561119473376e-07, + "loss": 0.355, + "step": 12396 + }, + { + "epoch": 0.8607832245521455, + "grad_norm": 7.288388066759092, + "learning_rate": 4.997659541825173e-07, + "loss": 0.5127, + "step": 12397 + }, + { + "epoch": 0.8608526593528677, + "grad_norm": 4.057317307326759, + "learning_rate": 4.992760240344418e-07, + "loss": 0.3623, + "step": 12398 + }, + { + "epoch": 0.8609220941535898, + "grad_norm": 4.225378725501834, + "learning_rate": 4.987863215278943e-07, + "loss": 0.3855, + "step": 12399 + }, + { + "epoch": 0.8609915289543119, + "grad_norm": 4.187455606729623, + "learning_rate": 4.982968466876398e-07, + "loss": 0.4348, + "step": 12400 + }, + { + "epoch": 0.861060963755034, + "grad_norm": 5.83181126038767, + "learning_rate": 4.978075995384362e-07, + "loss": 0.4329, + "step": 12401 + }, + { + "epoch": 0.8611303985557561, + "grad_norm": 3.160199892085061, + "learning_rate": 4.973185801050312e-07, + "loss": 0.2364, + "step": 12402 + }, + { + "epoch": 0.8611998333564783, + "grad_norm": 2.5076371229371746, + "learning_rate": 4.968297884121553e-07, + "loss": 0.1836, + "step": 12403 + }, + { + "epoch": 0.8612692681572004, + "grad_norm": 3.4632920805048877, + "learning_rate": 4.963412244845345e-07, + "loss": 0.3068, + "step": 12404 + }, + { + "epoch": 0.8613387029579225, + "grad_norm": 5.383389480348123, + "learning_rate": 4.958528883468778e-07, + "loss": 0.4755, + "step": 12405 + }, + { + "epoch": 0.8614081377586447, + "grad_norm": 2.1833797444863663, + "learning_rate": 4.953647800238859e-07, + "loss": 0.0984, + "step": 12406 + }, + { + "epoch": 0.8614775725593667, + "grad_norm": 4.71025148140748, + "learning_rate": 4.948768995402453e-07, + "loss": 0.5459, + "step": 12407 + }, + { + "epoch": 0.8615470073600888, + "grad_norm": 3.5218711238696527, + "learning_rate": 4.943892469206335e-07, + "loss": 0.3206, + "step": 12408 + }, + { + "epoch": 0.861616442160811, + "grad_norm": 3.8864440981656543, + "learning_rate": 4.939018221897151e-07, + "loss": 0.4047, + "step": 12409 + }, + { + "epoch": 0.8616858769615331, + "grad_norm": 4.17485527182572, + "learning_rate": 4.934146253721439e-07, + "loss": 0.5595, + "step": 12410 + }, + { + "epoch": 0.8617553117622553, + "grad_norm": 3.5720838167485836, + "learning_rate": 4.929276564925611e-07, + "loss": 0.36, + "step": 12411 + }, + { + "epoch": 0.8618247465629774, + "grad_norm": 4.5706519225115425, + "learning_rate": 4.924409155755966e-07, + "loss": 0.5903, + "step": 12412 + }, + { + "epoch": 0.8618941813636994, + "grad_norm": 2.469652730633405, + "learning_rate": 4.919544026458717e-07, + "loss": 0.2693, + "step": 12413 + }, + { + "epoch": 0.8619636161644216, + "grad_norm": 4.17909519642469, + "learning_rate": 4.914681177279907e-07, + "loss": 0.3521, + "step": 12414 + }, + { + "epoch": 0.8620330509651437, + "grad_norm": 4.197170879164432, + "learning_rate": 4.909820608465499e-07, + "loss": 0.5699, + "step": 12415 + }, + { + "epoch": 0.8621024857658659, + "grad_norm": 5.222314019123935, + "learning_rate": 4.904962320261364e-07, + "loss": 0.6539, + "step": 12416 + }, + { + "epoch": 0.862171920566588, + "grad_norm": 4.030675470527228, + "learning_rate": 4.900106312913189e-07, + "loss": 0.4918, + "step": 12417 + }, + { + "epoch": 0.8622413553673101, + "grad_norm": 4.8105376801118656, + "learning_rate": 4.895252586666599e-07, + "loss": 0.7096, + "step": 12418 + }, + { + "epoch": 0.8623107901680322, + "grad_norm": 3.6265574196845813, + "learning_rate": 4.890401141767104e-07, + "loss": 0.1999, + "step": 12419 + }, + { + "epoch": 0.8623802249687543, + "grad_norm": 3.479167984834636, + "learning_rate": 4.885551978460079e-07, + "loss": 0.289, + "step": 12420 + }, + { + "epoch": 0.8624496597694764, + "grad_norm": 4.688369845052894, + "learning_rate": 4.880705096990767e-07, + "loss": 0.3519, + "step": 12421 + }, + { + "epoch": 0.8625190945701986, + "grad_norm": 6.750551182495961, + "learning_rate": 4.875860497604345e-07, + "loss": 0.5872, + "step": 12422 + }, + { + "epoch": 0.8625885293709207, + "grad_norm": 4.765882874911608, + "learning_rate": 4.871018180545839e-07, + "loss": 0.6733, + "step": 12423 + }, + { + "epoch": 0.8626579641716429, + "grad_norm": 2.9256252136539174, + "learning_rate": 4.866178146060163e-07, + "loss": 0.2462, + "step": 12424 + }, + { + "epoch": 0.862727398972365, + "grad_norm": 3.8824082516153564, + "learning_rate": 4.861340394392127e-07, + "loss": 0.3964, + "step": 12425 + }, + { + "epoch": 0.862796833773087, + "grad_norm": 4.060090453614927, + "learning_rate": 4.856504925786415e-07, + "loss": 0.3939, + "step": 12426 + }, + { + "epoch": 0.8628662685738092, + "grad_norm": 4.542620672818572, + "learning_rate": 4.851671740487602e-07, + "loss": 0.5503, + "step": 12427 + }, + { + "epoch": 0.8629357033745313, + "grad_norm": 3.7797245520819813, + "learning_rate": 4.846840838740147e-07, + "loss": 0.4229, + "step": 12428 + }, + { + "epoch": 0.8630051381752535, + "grad_norm": 3.364276192680082, + "learning_rate": 4.842012220788384e-07, + "loss": 0.3261, + "step": 12429 + }, + { + "epoch": 0.8630745729759756, + "grad_norm": 3.2884530187518424, + "learning_rate": 4.837185886876533e-07, + "loss": 0.2802, + "step": 12430 + }, + { + "epoch": 0.8631440077766976, + "grad_norm": 4.942494844380097, + "learning_rate": 4.832361837248734e-07, + "loss": 0.5706, + "step": 12431 + }, + { + "epoch": 0.8632134425774198, + "grad_norm": 4.178505893884873, + "learning_rate": 4.827540072148939e-07, + "loss": 0.3445, + "step": 12432 + }, + { + "epoch": 0.8632828773781419, + "grad_norm": 4.052459940084662, + "learning_rate": 4.822720591821068e-07, + "loss": 0.112, + "step": 12433 + }, + { + "epoch": 0.8633523121788641, + "grad_norm": 2.9573999684721346, + "learning_rate": 4.817903396508877e-07, + "loss": 0.2076, + "step": 12434 + }, + { + "epoch": 0.8634217469795862, + "grad_norm": 4.380866050295134, + "learning_rate": 4.81308848645598e-07, + "loss": 0.4278, + "step": 12435 + }, + { + "epoch": 0.8634911817803083, + "grad_norm": 3.683129217684303, + "learning_rate": 4.808275861905947e-07, + "loss": 0.3046, + "step": 12436 + }, + { + "epoch": 0.8635606165810304, + "grad_norm": 4.339321749435191, + "learning_rate": 4.803465523102186e-07, + "loss": 0.4309, + "step": 12437 + }, + { + "epoch": 0.8636300513817525, + "grad_norm": 4.318028991326213, + "learning_rate": 4.798657470287993e-07, + "loss": 0.5358, + "step": 12438 + }, + { + "epoch": 0.8636994861824746, + "grad_norm": 3.264117965413702, + "learning_rate": 4.793851703706559e-07, + "loss": 0.2319, + "step": 12439 + }, + { + "epoch": 0.8637689209831968, + "grad_norm": 3.0686639832495675, + "learning_rate": 4.789048223600951e-07, + "loss": 0.2197, + "step": 12440 + }, + { + "epoch": 0.8638383557839189, + "grad_norm": 3.7864772848224995, + "learning_rate": 4.784247030214123e-07, + "loss": 0.2281, + "step": 12441 + }, + { + "epoch": 0.8639077905846411, + "grad_norm": 3.6430810006259873, + "learning_rate": 4.779448123788915e-07, + "loss": 0.422, + "step": 12442 + }, + { + "epoch": 0.8639772253853631, + "grad_norm": 3.9524174067852686, + "learning_rate": 4.774651504568051e-07, + "loss": 0.3518, + "step": 12443 + }, + { + "epoch": 0.8640466601860852, + "grad_norm": 5.260713653314153, + "learning_rate": 4.769857172794129e-07, + "loss": 0.5136, + "step": 12444 + }, + { + "epoch": 0.8641160949868074, + "grad_norm": 4.115444790185732, + "learning_rate": 4.7650651287096673e-07, + "loss": 0.5037, + "step": 12445 + }, + { + "epoch": 0.8641855297875295, + "grad_norm": 3.6819106555640495, + "learning_rate": 4.760275372557016e-07, + "loss": 0.251, + "step": 12446 + }, + { + "epoch": 0.8642549645882517, + "grad_norm": 4.060274832509004, + "learning_rate": 4.7554879045784317e-07, + "loss": 0.2655, + "step": 12447 + }, + { + "epoch": 0.8643243993889738, + "grad_norm": 5.4386910520289975, + "learning_rate": 4.750702725016093e-07, + "loss": 0.4917, + "step": 12448 + }, + { + "epoch": 0.8643938341896958, + "grad_norm": 4.450027048685496, + "learning_rate": 4.7459198341120004e-07, + "loss": 0.502, + "step": 12449 + }, + { + "epoch": 0.864463268990418, + "grad_norm": 3.7291228170358353, + "learning_rate": 4.7411392321080606e-07, + "loss": 0.5111, + "step": 12450 + }, + { + "epoch": 0.8645327037911401, + "grad_norm": 3.4805922263223525, + "learning_rate": 4.736360919246097e-07, + "loss": 0.2967, + "step": 12451 + }, + { + "epoch": 0.8646021385918622, + "grad_norm": 4.450087662709071, + "learning_rate": 4.7315848957677767e-07, + "loss": 0.5248, + "step": 12452 + }, + { + "epoch": 0.8646715733925844, + "grad_norm": 3.6429657597617817, + "learning_rate": 4.726811161914668e-07, + "loss": 0.3782, + "step": 12453 + }, + { + "epoch": 0.8647410081933065, + "grad_norm": 5.480448534070716, + "learning_rate": 4.7220397179282227e-07, + "loss": 0.4102, + "step": 12454 + }, + { + "epoch": 0.8648104429940286, + "grad_norm": 4.137790261032268, + "learning_rate": 4.7172705640497695e-07, + "loss": 0.3362, + "step": 12455 + }, + { + "epoch": 0.8648798777947507, + "grad_norm": 3.863180907123696, + "learning_rate": 4.712503700520532e-07, + "loss": 0.4368, + "step": 12456 + }, + { + "epoch": 0.8649493125954728, + "grad_norm": 4.140162498694205, + "learning_rate": 4.707739127581612e-07, + "loss": 0.5447, + "step": 12457 + }, + { + "epoch": 0.865018747396195, + "grad_norm": 3.91639652390322, + "learning_rate": 4.702976845473994e-07, + "loss": 0.3337, + "step": 12458 + }, + { + "epoch": 0.8650881821969171, + "grad_norm": 3.356567131408803, + "learning_rate": 4.698216854438553e-07, + "loss": 0.2813, + "step": 12459 + }, + { + "epoch": 0.8651576169976393, + "grad_norm": 4.006134601892631, + "learning_rate": 4.69345915471604e-07, + "loss": 0.377, + "step": 12460 + }, + { + "epoch": 0.8652270517983613, + "grad_norm": 2.9570029558467223, + "learning_rate": 4.6887037465470853e-07, + "loss": 0.2651, + "step": 12461 + }, + { + "epoch": 0.8652964865990834, + "grad_norm": 4.920846169559993, + "learning_rate": 4.683950630172229e-07, + "loss": 0.5727, + "step": 12462 + }, + { + "epoch": 0.8653659213998056, + "grad_norm": 10.789596103442541, + "learning_rate": 4.67919980583188e-07, + "loss": 0.5007, + "step": 12463 + }, + { + "epoch": 0.8654353562005277, + "grad_norm": 3.3626453613818645, + "learning_rate": 4.6744512737663063e-07, + "loss": 0.2618, + "step": 12464 + }, + { + "epoch": 0.8655047910012498, + "grad_norm": 3.7391183458011246, + "learning_rate": 4.6697050342157045e-07, + "loss": 0.4561, + "step": 12465 + }, + { + "epoch": 0.865574225801972, + "grad_norm": 3.098009759773729, + "learning_rate": 4.664961087420139e-07, + "loss": 0.2445, + "step": 12466 + }, + { + "epoch": 0.865643660602694, + "grad_norm": 3.2282784774103237, + "learning_rate": 4.6602194336195173e-07, + "loss": 0.3593, + "step": 12467 + }, + { + "epoch": 0.8657130954034162, + "grad_norm": 3.01019199546965, + "learning_rate": 4.6554800730537084e-07, + "loss": 0.3448, + "step": 12468 + }, + { + "epoch": 0.8657825302041383, + "grad_norm": 4.356681361639288, + "learning_rate": 4.6507430059623994e-07, + "loss": 0.5009, + "step": 12469 + }, + { + "epoch": 0.8658519650048604, + "grad_norm": 5.212131992363794, + "learning_rate": 4.6460082325851974e-07, + "loss": 0.7968, + "step": 12470 + }, + { + "epoch": 0.8659213998055826, + "grad_norm": 3.6984155809395487, + "learning_rate": 4.641275753161578e-07, + "loss": 0.453, + "step": 12471 + }, + { + "epoch": 0.8659908346063047, + "grad_norm": 4.483860320415954, + "learning_rate": 4.6365455679309003e-07, + "loss": 0.6148, + "step": 12472 + }, + { + "epoch": 0.8660602694070269, + "grad_norm": 4.589579815444638, + "learning_rate": 4.631817677132422e-07, + "loss": 0.364, + "step": 12473 + }, + { + "epoch": 0.8661297042077489, + "grad_norm": 4.318332057572095, + "learning_rate": 4.627092081005263e-07, + "loss": 0.4526, + "step": 12474 + }, + { + "epoch": 0.866199139008471, + "grad_norm": 3.854282690739607, + "learning_rate": 4.6223687797884444e-07, + "loss": 0.4851, + "step": 12475 + }, + { + "epoch": 0.8662685738091932, + "grad_norm": 4.1743104714804975, + "learning_rate": 4.6176477737208515e-07, + "loss": 0.4572, + "step": 12476 + }, + { + "epoch": 0.8663380086099153, + "grad_norm": 5.014095408385739, + "learning_rate": 4.612929063041305e-07, + "loss": 0.444, + "step": 12477 + }, + { + "epoch": 0.8664074434106374, + "grad_norm": 5.770329459678691, + "learning_rate": 4.608212647988436e-07, + "loss": 0.3561, + "step": 12478 + }, + { + "epoch": 0.8664768782113595, + "grad_norm": 6.773986142408096, + "learning_rate": 4.6034985288007984e-07, + "loss": 0.4905, + "step": 12479 + }, + { + "epoch": 0.8665463130120816, + "grad_norm": 4.338688123289783, + "learning_rate": 4.5987867057168456e-07, + "loss": 0.3147, + "step": 12480 + }, + { + "epoch": 0.8666157478128038, + "grad_norm": 3.4344264612397404, + "learning_rate": 4.5940771789748926e-07, + "loss": 0.3809, + "step": 12481 + }, + { + "epoch": 0.8666851826135259, + "grad_norm": 3.6231626662898755, + "learning_rate": 4.5893699488131217e-07, + "loss": 0.2755, + "step": 12482 + }, + { + "epoch": 0.866754617414248, + "grad_norm": 2.6935962508894793, + "learning_rate": 4.584665015469636e-07, + "loss": 0.1877, + "step": 12483 + }, + { + "epoch": 0.8668240522149702, + "grad_norm": 3.2297964365348486, + "learning_rate": 4.579962379182407e-07, + "loss": 0.2753, + "step": 12484 + }, + { + "epoch": 0.8668934870156922, + "grad_norm": 4.486183041832954, + "learning_rate": 4.575262040189288e-07, + "loss": 0.334, + "step": 12485 + }, + { + "epoch": 0.8669629218164144, + "grad_norm": 4.510899188820433, + "learning_rate": 4.570563998728006e-07, + "loss": 0.5014, + "step": 12486 + }, + { + "epoch": 0.8670323566171365, + "grad_norm": 3.9863505996368884, + "learning_rate": 4.5658682550361933e-07, + "loss": 0.4206, + "step": 12487 + }, + { + "epoch": 0.8671017914178586, + "grad_norm": 3.4766609511786193, + "learning_rate": 4.5611748093513544e-07, + "loss": 0.3441, + "step": 12488 + }, + { + "epoch": 0.8671712262185808, + "grad_norm": 4.4642026444751, + "learning_rate": 4.5564836619108765e-07, + "loss": 0.5091, + "step": 12489 + }, + { + "epoch": 0.8672406610193029, + "grad_norm": 4.281646795955753, + "learning_rate": 4.5517948129520263e-07, + "loss": 0.5544, + "step": 12490 + }, + { + "epoch": 0.867310095820025, + "grad_norm": 4.850790554228908, + "learning_rate": 4.5471082627119746e-07, + "loss": 0.4631, + "step": 12491 + }, + { + "epoch": 0.8673795306207471, + "grad_norm": 4.591486419774582, + "learning_rate": 4.5424240114277487e-07, + "loss": 0.6294, + "step": 12492 + }, + { + "epoch": 0.8674489654214692, + "grad_norm": 5.197386119862946, + "learning_rate": 4.537742059336264e-07, + "loss": 0.4583, + "step": 12493 + }, + { + "epoch": 0.8675184002221914, + "grad_norm": 3.992774245045591, + "learning_rate": 4.53306240667436e-07, + "loss": 0.4455, + "step": 12494 + }, + { + "epoch": 0.8675878350229135, + "grad_norm": 6.7074798651977625, + "learning_rate": 4.5283850536787133e-07, + "loss": 0.6659, + "step": 12495 + }, + { + "epoch": 0.8676572698236356, + "grad_norm": 4.735310948675853, + "learning_rate": 4.5237100005858794e-07, + "loss": 0.6564, + "step": 12496 + }, + { + "epoch": 0.8677267046243577, + "grad_norm": 4.561215131830352, + "learning_rate": 4.519037247632341e-07, + "loss": 0.443, + "step": 12497 + }, + { + "epoch": 0.8677961394250798, + "grad_norm": 4.564225470633781, + "learning_rate": 4.514366795054448e-07, + "loss": 0.4683, + "step": 12498 + }, + { + "epoch": 0.867865574225802, + "grad_norm": 4.496684442810846, + "learning_rate": 4.509698643088389e-07, + "loss": 0.3268, + "step": 12499 + }, + { + "epoch": 0.8679350090265241, + "grad_norm": 4.352535399613586, + "learning_rate": 4.505032791970304e-07, + "loss": 0.4339, + "step": 12500 + }, + { + "epoch": 0.8680044438272462, + "grad_norm": 3.8794268647331878, + "learning_rate": 4.5003692419361864e-07, + "loss": 0.2622, + "step": 12501 + }, + { + "epoch": 0.8680738786279684, + "grad_norm": 6.103129700542274, + "learning_rate": 4.4957079932218985e-07, + "loss": 0.4832, + "step": 12502 + }, + { + "epoch": 0.8681433134286904, + "grad_norm": 6.538270262006157, + "learning_rate": 4.491049046063212e-07, + "loss": 0.5364, + "step": 12503 + }, + { + "epoch": 0.8682127482294126, + "grad_norm": 4.443892140212067, + "learning_rate": 4.4863924006957716e-07, + "loss": 0.547, + "step": 12504 + }, + { + "epoch": 0.8682821830301347, + "grad_norm": 4.089286278982273, + "learning_rate": 4.4817380573550894e-07, + "loss": 0.3442, + "step": 12505 + }, + { + "epoch": 0.8683516178308568, + "grad_norm": 3.4936444104181885, + "learning_rate": 4.477086016276605e-07, + "loss": 0.3197, + "step": 12506 + }, + { + "epoch": 0.868421052631579, + "grad_norm": 5.0186433731745, + "learning_rate": 4.472436277695591e-07, + "loss": 0.5511, + "step": 12507 + }, + { + "epoch": 0.8684904874323011, + "grad_norm": 5.080693544770023, + "learning_rate": 4.46778884184722e-07, + "loss": 0.395, + "step": 12508 + }, + { + "epoch": 0.8685599222330231, + "grad_norm": 5.247110691810856, + "learning_rate": 4.4631437089665886e-07, + "loss": 0.5719, + "step": 12509 + }, + { + "epoch": 0.8686293570337453, + "grad_norm": 4.01006728928897, + "learning_rate": 4.4585008792886076e-07, + "loss": 0.4548, + "step": 12510 + }, + { + "epoch": 0.8686987918344674, + "grad_norm": 3.8618763527587423, + "learning_rate": 4.4538603530481117e-07, + "loss": 0.3607, + "step": 12511 + }, + { + "epoch": 0.8687682266351896, + "grad_norm": 3.4525475638776015, + "learning_rate": 4.4492221304798243e-07, + "loss": 0.1727, + "step": 12512 + }, + { + "epoch": 0.8688376614359117, + "grad_norm": 3.7831203826512136, + "learning_rate": 4.444586211818341e-07, + "loss": 0.4719, + "step": 12513 + }, + { + "epoch": 0.8689070962366338, + "grad_norm": 3.653530545564179, + "learning_rate": 4.439952597298142e-07, + "loss": 0.287, + "step": 12514 + }, + { + "epoch": 0.868976531037356, + "grad_norm": 2.9658327036025947, + "learning_rate": 4.435321287153582e-07, + "loss": 0.1481, + "step": 12515 + }, + { + "epoch": 0.869045965838078, + "grad_norm": 4.244686345336976, + "learning_rate": 4.430692281618909e-07, + "loss": 0.4227, + "step": 12516 + }, + { + "epoch": 0.8691154006388002, + "grad_norm": 4.38252372517127, + "learning_rate": 4.4260655809282573e-07, + "loss": 0.4785, + "step": 12517 + }, + { + "epoch": 0.8691848354395223, + "grad_norm": 5.540574411192276, + "learning_rate": 4.421441185315639e-07, + "loss": 0.6755, + "step": 12518 + }, + { + "epoch": 0.8692542702402444, + "grad_norm": 3.627995825690956, + "learning_rate": 4.416819095014946e-07, + "loss": 0.3358, + "step": 12519 + }, + { + "epoch": 0.8693237050409666, + "grad_norm": 3.855529907227254, + "learning_rate": 4.412199310259968e-07, + "loss": 0.4421, + "step": 12520 + }, + { + "epoch": 0.8693931398416886, + "grad_norm": 3.1824778165987633, + "learning_rate": 4.407581831284358e-07, + "loss": 0.2853, + "step": 12521 + }, + { + "epoch": 0.8694625746424107, + "grad_norm": 3.5800943230357443, + "learning_rate": 4.402966658321661e-07, + "loss": 0.4394, + "step": 12522 + }, + { + "epoch": 0.8695320094431329, + "grad_norm": 4.407088957199432, + "learning_rate": 4.398353791605331e-07, + "loss": 0.4707, + "step": 12523 + }, + { + "epoch": 0.869601444243855, + "grad_norm": 3.5179232612766613, + "learning_rate": 4.3937432313686576e-07, + "loss": 0.5042, + "step": 12524 + }, + { + "epoch": 0.8696708790445772, + "grad_norm": 5.998355318549039, + "learning_rate": 4.3891349778448336e-07, + "loss": 0.3789, + "step": 12525 + }, + { + "epoch": 0.8697403138452993, + "grad_norm": 4.923394931674917, + "learning_rate": 4.3845290312669597e-07, + "loss": 0.6041, + "step": 12526 + }, + { + "epoch": 0.8698097486460213, + "grad_norm": 3.4369502722219396, + "learning_rate": 4.3799253918680006e-07, + "loss": 0.3404, + "step": 12527 + }, + { + "epoch": 0.8698791834467435, + "grad_norm": 7.322313594197815, + "learning_rate": 4.37532405988077e-07, + "loss": 0.5699, + "step": 12528 + }, + { + "epoch": 0.8699486182474656, + "grad_norm": 5.319938113101198, + "learning_rate": 4.370725035538037e-07, + "loss": 0.6454, + "step": 12529 + }, + { + "epoch": 0.8700180530481878, + "grad_norm": 3.550949985934782, + "learning_rate": 4.3661283190723935e-07, + "loss": 0.2547, + "step": 12530 + }, + { + "epoch": 0.8700874878489099, + "grad_norm": 2.854765564841774, + "learning_rate": 4.3615339107163423e-07, + "loss": 0.3221, + "step": 12531 + }, + { + "epoch": 0.870156922649632, + "grad_norm": 4.307725314920983, + "learning_rate": 4.356941810702259e-07, + "loss": 0.5485, + "step": 12532 + }, + { + "epoch": 0.8702263574503541, + "grad_norm": 4.149558358556161, + "learning_rate": 4.352352019262418e-07, + "loss": 0.4346, + "step": 12533 + }, + { + "epoch": 0.8702957922510762, + "grad_norm": 4.11456254311931, + "learning_rate": 4.347764536628951e-07, + "loss": 0.5049, + "step": 12534 + }, + { + "epoch": 0.8703652270517983, + "grad_norm": 3.804645696309221, + "learning_rate": 4.3431793630339e-07, + "loss": 0.3573, + "step": 12535 + }, + { + "epoch": 0.8704346618525205, + "grad_norm": 3.5007707140653612, + "learning_rate": 4.338596498709169e-07, + "loss": 0.2918, + "step": 12536 + }, + { + "epoch": 0.8705040966532426, + "grad_norm": 3.7795506218693524, + "learning_rate": 4.3340159438865494e-07, + "loss": 0.2067, + "step": 12537 + }, + { + "epoch": 0.8705735314539648, + "grad_norm": 5.124772632332957, + "learning_rate": 4.329437698797745e-07, + "loss": 0.523, + "step": 12538 + }, + { + "epoch": 0.8706429662546868, + "grad_norm": 3.816774844654177, + "learning_rate": 4.3248617636742985e-07, + "loss": 0.4392, + "step": 12539 + }, + { + "epoch": 0.8707124010554089, + "grad_norm": 2.265441093974705, + "learning_rate": 4.320288138747647e-07, + "loss": 0.2182, + "step": 12540 + }, + { + "epoch": 0.8707818358561311, + "grad_norm": 3.584893490289672, + "learning_rate": 4.31571682424915e-07, + "loss": 0.3536, + "step": 12541 + }, + { + "epoch": 0.8708512706568532, + "grad_norm": 3.6226738848571096, + "learning_rate": 4.3111478204099775e-07, + "loss": 0.4136, + "step": 12542 + }, + { + "epoch": 0.8709207054575754, + "grad_norm": 3.9325129333898783, + "learning_rate": 4.3065811274612623e-07, + "loss": 0.3918, + "step": 12543 + }, + { + "epoch": 0.8709901402582975, + "grad_norm": 3.967945241782063, + "learning_rate": 4.302016745633969e-07, + "loss": 0.468, + "step": 12544 + }, + { + "epoch": 0.8710595750590195, + "grad_norm": 5.232783261925022, + "learning_rate": 4.2974546751589575e-07, + "loss": 0.3449, + "step": 12545 + }, + { + "epoch": 0.8711290098597417, + "grad_norm": 3.931726964374569, + "learning_rate": 4.292894916266971e-07, + "loss": 0.424, + "step": 12546 + }, + { + "epoch": 0.8711984446604638, + "grad_norm": 4.782269357824777, + "learning_rate": 4.288337469188636e-07, + "loss": 0.4208, + "step": 12547 + }, + { + "epoch": 0.871267879461186, + "grad_norm": 2.8372192078647345, + "learning_rate": 4.283782334154468e-07, + "loss": 0.173, + "step": 12548 + }, + { + "epoch": 0.8713373142619081, + "grad_norm": 3.8865195951874396, + "learning_rate": 4.279229511394861e-07, + "loss": 0.3726, + "step": 12549 + }, + { + "epoch": 0.8714067490626302, + "grad_norm": 3.8927440489906497, + "learning_rate": 4.2746790011400863e-07, + "loss": 0.3687, + "step": 12550 + }, + { + "epoch": 0.8714761838633523, + "grad_norm": 4.110865473141025, + "learning_rate": 4.2701308036203036e-07, + "loss": 0.3274, + "step": 12551 + }, + { + "epoch": 0.8715456186640744, + "grad_norm": 4.206186029917439, + "learning_rate": 4.2655849190655576e-07, + "loss": 0.5328, + "step": 12552 + }, + { + "epoch": 0.8716150534647965, + "grad_norm": 5.259134229543114, + "learning_rate": 4.26104134770578e-07, + "loss": 0.478, + "step": 12553 + }, + { + "epoch": 0.8716844882655187, + "grad_norm": 4.504765983175128, + "learning_rate": 4.25650008977076e-07, + "loss": 0.3844, + "step": 12554 + }, + { + "epoch": 0.8717539230662408, + "grad_norm": 4.404772942720361, + "learning_rate": 4.251961145490213e-07, + "loss": 0.5807, + "step": 12555 + }, + { + "epoch": 0.871823357866963, + "grad_norm": 3.7667045113396864, + "learning_rate": 4.2474245150937176e-07, + "loss": 0.3746, + "step": 12556 + }, + { + "epoch": 0.871892792667685, + "grad_norm": 4.096397600370992, + "learning_rate": 4.242890198810695e-07, + "loss": 0.4211, + "step": 12557 + }, + { + "epoch": 0.8719622274684071, + "grad_norm": 3.3064893160057625, + "learning_rate": 4.238358196870518e-07, + "loss": 0.3423, + "step": 12558 + }, + { + "epoch": 0.8720316622691293, + "grad_norm": 3.9563497368911023, + "learning_rate": 4.233828509502408e-07, + "loss": 0.4513, + "step": 12559 + }, + { + "epoch": 0.8721010970698514, + "grad_norm": 3.7327506745668257, + "learning_rate": 4.229301136935449e-07, + "loss": 0.416, + "step": 12560 + }, + { + "epoch": 0.8721705318705736, + "grad_norm": 3.371553052611191, + "learning_rate": 4.224776079398657e-07, + "loss": 0.2004, + "step": 12561 + }, + { + "epoch": 0.8722399666712957, + "grad_norm": 2.671650901547622, + "learning_rate": 4.220253337120889e-07, + "loss": 0.1814, + "step": 12562 + }, + { + "epoch": 0.8723094014720177, + "grad_norm": 3.3513482429027097, + "learning_rate": 4.215732910330905e-07, + "loss": 0.4341, + "step": 12563 + }, + { + "epoch": 0.8723788362727399, + "grad_norm": 2.5346461212512774, + "learning_rate": 4.2112147992573403e-07, + "loss": 0.2473, + "step": 12564 + }, + { + "epoch": 0.872448271073462, + "grad_norm": 3.90611545733432, + "learning_rate": 4.206699004128717e-07, + "loss": 0.4764, + "step": 12565 + }, + { + "epoch": 0.8725177058741841, + "grad_norm": 3.5711973218110162, + "learning_rate": 4.202185525173441e-07, + "loss": 0.3163, + "step": 12566 + }, + { + "epoch": 0.8725871406749063, + "grad_norm": 3.1402997016783654, + "learning_rate": 4.197674362619797e-07, + "loss": 0.326, + "step": 12567 + }, + { + "epoch": 0.8726565754756284, + "grad_norm": 4.67574544630349, + "learning_rate": 4.1931655166959575e-07, + "loss": 0.4871, + "step": 12568 + }, + { + "epoch": 0.8727260102763506, + "grad_norm": 3.0708134403279392, + "learning_rate": 4.1886589876299567e-07, + "loss": 0.2945, + "step": 12569 + }, + { + "epoch": 0.8727954450770726, + "grad_norm": 4.737306363422412, + "learning_rate": 4.184154775649768e-07, + "loss": 0.5643, + "step": 12570 + }, + { + "epoch": 0.8728648798777947, + "grad_norm": 3.6394318316785785, + "learning_rate": 4.179652880983176e-07, + "loss": 0.3624, + "step": 12571 + }, + { + "epoch": 0.8729343146785169, + "grad_norm": 3.8847758302824, + "learning_rate": 4.175153303857887e-07, + "loss": 0.4923, + "step": 12572 + }, + { + "epoch": 0.873003749479239, + "grad_norm": 3.1678836473096217, + "learning_rate": 4.170656044501503e-07, + "loss": 0.2273, + "step": 12573 + }, + { + "epoch": 0.8730731842799612, + "grad_norm": 4.035057885110074, + "learning_rate": 4.166161103141464e-07, + "loss": 0.4978, + "step": 12574 + }, + { + "epoch": 0.8731426190806832, + "grad_norm": 4.236121573758128, + "learning_rate": 4.161668480005138e-07, + "loss": 0.2953, + "step": 12575 + }, + { + "epoch": 0.8732120538814053, + "grad_norm": 4.304436140713968, + "learning_rate": 4.157178175319748e-07, + "loss": 0.4418, + "step": 12576 + }, + { + "epoch": 0.8732814886821275, + "grad_norm": 4.667126598531289, + "learning_rate": 4.152690189312414e-07, + "loss": 0.5538, + "step": 12577 + }, + { + "epoch": 0.8733509234828496, + "grad_norm": 3.679895405624628, + "learning_rate": 4.148204522210131e-07, + "loss": 0.3302, + "step": 12578 + }, + { + "epoch": 0.8734203582835717, + "grad_norm": 4.320447610936786, + "learning_rate": 4.1437211742397734e-07, + "loss": 0.2611, + "step": 12579 + }, + { + "epoch": 0.8734897930842939, + "grad_norm": 5.253916669706793, + "learning_rate": 4.139240145628115e-07, + "loss": 0.5923, + "step": 12580 + }, + { + "epoch": 0.8735592278850159, + "grad_norm": 4.124862893288386, + "learning_rate": 4.134761436601792e-07, + "loss": 0.43, + "step": 12581 + }, + { + "epoch": 0.8736286626857381, + "grad_norm": 3.6500129818949674, + "learning_rate": 4.130285047387339e-07, + "loss": 0.3821, + "step": 12582 + }, + { + "epoch": 0.8736980974864602, + "grad_norm": 4.110696272907427, + "learning_rate": 4.1258109782111476e-07, + "loss": 0.4587, + "step": 12583 + }, + { + "epoch": 0.8737675322871823, + "grad_norm": 4.424237306625331, + "learning_rate": 4.1213392292995537e-07, + "loss": 0.714, + "step": 12584 + }, + { + "epoch": 0.8738369670879045, + "grad_norm": 3.7320199626994612, + "learning_rate": 4.1168698008786925e-07, + "loss": 0.5332, + "step": 12585 + }, + { + "epoch": 0.8739064018886266, + "grad_norm": 3.7254062421172622, + "learning_rate": 4.1124026931746275e-07, + "loss": 0.2751, + "step": 12586 + }, + { + "epoch": 0.8739758366893488, + "grad_norm": 4.172097300752744, + "learning_rate": 4.1079379064133176e-07, + "loss": 0.3854, + "step": 12587 + }, + { + "epoch": 0.8740452714900708, + "grad_norm": 3.6899557749230816, + "learning_rate": 4.103475440820587e-07, + "loss": 0.4824, + "step": 12588 + }, + { + "epoch": 0.8741147062907929, + "grad_norm": 4.499574103922014, + "learning_rate": 4.0990152966221165e-07, + "loss": 0.6648, + "step": 12589 + }, + { + "epoch": 0.8741841410915151, + "grad_norm": 3.4986139799483964, + "learning_rate": 4.0945574740435203e-07, + "loss": 0.3635, + "step": 12590 + }, + { + "epoch": 0.8742535758922372, + "grad_norm": 3.796033793640912, + "learning_rate": 4.090101973310268e-07, + "loss": 0.5684, + "step": 12591 + }, + { + "epoch": 0.8743230106929593, + "grad_norm": 3.670243120610566, + "learning_rate": 4.0856487946476895e-07, + "loss": 0.4573, + "step": 12592 + }, + { + "epoch": 0.8743924454936814, + "grad_norm": 3.9657178196162186, + "learning_rate": 4.0811979382810506e-07, + "loss": 0.4062, + "step": 12593 + }, + { + "epoch": 0.8744618802944035, + "grad_norm": 3.031784508853248, + "learning_rate": 4.0767494044354597e-07, + "loss": 0.3889, + "step": 12594 + }, + { + "epoch": 0.8745313150951257, + "grad_norm": 4.315318649879098, + "learning_rate": 4.0723031933359193e-07, + "loss": 0.2987, + "step": 12595 + }, + { + "epoch": 0.8746007498958478, + "grad_norm": 5.025409254513043, + "learning_rate": 4.0678593052073114e-07, + "loss": 0.543, + "step": 12596 + }, + { + "epoch": 0.8746701846965699, + "grad_norm": 2.618939459496006, + "learning_rate": 4.0634177402744e-07, + "loss": 0.2107, + "step": 12597 + }, + { + "epoch": 0.8747396194972921, + "grad_norm": 4.019225632921441, + "learning_rate": 4.058978498761845e-07, + "loss": 0.4605, + "step": 12598 + }, + { + "epoch": 0.8748090542980141, + "grad_norm": 3.0606394788415257, + "learning_rate": 4.054541580894167e-07, + "loss": 0.3346, + "step": 12599 + }, + { + "epoch": 0.8748784890987363, + "grad_norm": 4.00611315859264, + "learning_rate": 4.0501069868957856e-07, + "loss": 0.4352, + "step": 12600 + }, + { + "epoch": 0.8749479238994584, + "grad_norm": 2.396993521572427, + "learning_rate": 4.045674716990994e-07, + "loss": 0.1533, + "step": 12601 + }, + { + "epoch": 0.8750173587001805, + "grad_norm": 5.571146997033519, + "learning_rate": 4.041244771403985e-07, + "loss": 0.6474, + "step": 12602 + }, + { + "epoch": 0.8750867935009027, + "grad_norm": 4.73467024757072, + "learning_rate": 4.0368171503587963e-07, + "loss": 0.3417, + "step": 12603 + }, + { + "epoch": 0.8751562283016248, + "grad_norm": 3.7075292726850697, + "learning_rate": 4.032391854079393e-07, + "loss": 0.2482, + "step": 12604 + }, + { + "epoch": 0.875225663102347, + "grad_norm": 5.130587694141415, + "learning_rate": 4.0279688827895905e-07, + "loss": 0.5931, + "step": 12605 + }, + { + "epoch": 0.875295097903069, + "grad_norm": 3.140700657148172, + "learning_rate": 4.023548236713104e-07, + "loss": 0.3465, + "step": 12606 + }, + { + "epoch": 0.8753645327037911, + "grad_norm": 6.77453630533239, + "learning_rate": 4.019129916073522e-07, + "loss": 0.2749, + "step": 12607 + }, + { + "epoch": 0.8754339675045133, + "grad_norm": 3.9859463077890065, + "learning_rate": 4.0147139210943155e-07, + "loss": 0.3835, + "step": 12608 + }, + { + "epoch": 0.8755034023052354, + "grad_norm": 3.0789113122434175, + "learning_rate": 4.0103002519988445e-07, + "loss": 0.3287, + "step": 12609 + }, + { + "epoch": 0.8755728371059575, + "grad_norm": 3.445806902792383, + "learning_rate": 4.005888909010347e-07, + "loss": 0.3654, + "step": 12610 + }, + { + "epoch": 0.8756422719066796, + "grad_norm": 5.110273170312192, + "learning_rate": 4.001479892351945e-07, + "loss": 0.5662, + "step": 12611 + }, + { + "epoch": 0.8757117067074017, + "grad_norm": 3.384073851585577, + "learning_rate": 3.997073202246626e-07, + "loss": 0.3497, + "step": 12612 + }, + { + "epoch": 0.8757811415081239, + "grad_norm": 6.105989910662532, + "learning_rate": 3.9926688389173074e-07, + "loss": 0.7357, + "step": 12613 + }, + { + "epoch": 0.875850576308846, + "grad_norm": 3.676308334668443, + "learning_rate": 3.9882668025867264e-07, + "loss": 0.4337, + "step": 12614 + }, + { + "epoch": 0.8759200111095681, + "grad_norm": 4.074124533577655, + "learning_rate": 3.983867093477539e-07, + "loss": 0.4398, + "step": 12615 + }, + { + "epoch": 0.8759894459102903, + "grad_norm": 4.239186644871816, + "learning_rate": 3.9794697118123007e-07, + "loss": 0.4549, + "step": 12616 + }, + { + "epoch": 0.8760588807110123, + "grad_norm": 5.016385487611267, + "learning_rate": 3.975074657813405e-07, + "loss": 0.3726, + "step": 12617 + }, + { + "epoch": 0.8761283155117345, + "grad_norm": 3.3678270290961336, + "learning_rate": 3.9706819317031355e-07, + "loss": 0.3761, + "step": 12618 + }, + { + "epoch": 0.8761977503124566, + "grad_norm": 3.6639048286158786, + "learning_rate": 3.966291533703698e-07, + "loss": 0.3069, + "step": 12619 + }, + { + "epoch": 0.8762671851131787, + "grad_norm": 4.195334065068262, + "learning_rate": 3.961903464037159e-07, + "loss": 0.3566, + "step": 12620 + }, + { + "epoch": 0.8763366199139009, + "grad_norm": 4.5602322676139195, + "learning_rate": 3.9575177229254247e-07, + "loss": 0.4911, + "step": 12621 + }, + { + "epoch": 0.876406054714623, + "grad_norm": 4.517805898530028, + "learning_rate": 3.953134310590351e-07, + "loss": 0.4461, + "step": 12622 + }, + { + "epoch": 0.876475489515345, + "grad_norm": 2.822823593010967, + "learning_rate": 3.948753227253643e-07, + "loss": 0.3225, + "step": 12623 + }, + { + "epoch": 0.8765449243160672, + "grad_norm": 5.112648586758141, + "learning_rate": 3.9443744731368804e-07, + "loss": 0.4956, + "step": 12624 + }, + { + "epoch": 0.8766143591167893, + "grad_norm": 4.662548127363206, + "learning_rate": 3.939998048461546e-07, + "loss": 0.3771, + "step": 12625 + }, + { + "epoch": 0.8766837939175115, + "grad_norm": 8.915333391766282, + "learning_rate": 3.9356239534489857e-07, + "loss": 0.5583, + "step": 12626 + }, + { + "epoch": 0.8767532287182336, + "grad_norm": 3.9092344329258304, + "learning_rate": 3.931252188320445e-07, + "loss": 0.2941, + "step": 12627 + }, + { + "epoch": 0.8768226635189557, + "grad_norm": 4.19105871778343, + "learning_rate": 3.9268827532970344e-07, + "loss": 0.4441, + "step": 12628 + }, + { + "epoch": 0.8768920983196778, + "grad_norm": 4.6513537056612355, + "learning_rate": 3.922515648599762e-07, + "loss": 0.6096, + "step": 12629 + }, + { + "epoch": 0.8769615331203999, + "grad_norm": 4.291674837798719, + "learning_rate": 3.918150874449511e-07, + "loss": 0.5898, + "step": 12630 + }, + { + "epoch": 0.8770309679211221, + "grad_norm": 3.0278689231914866, + "learning_rate": 3.9137884310670396e-07, + "loss": 0.3127, + "step": 12631 + }, + { + "epoch": 0.8771004027218442, + "grad_norm": 4.1605490414467985, + "learning_rate": 3.9094283186729866e-07, + "loss": 0.3284, + "step": 12632 + }, + { + "epoch": 0.8771698375225663, + "grad_norm": 3.3417952172712493, + "learning_rate": 3.9050705374879097e-07, + "loss": 0.2936, + "step": 12633 + }, + { + "epoch": 0.8772392723232885, + "grad_norm": 4.786249632664648, + "learning_rate": 3.90071508773221e-07, + "loss": 0.3526, + "step": 12634 + }, + { + "epoch": 0.8773087071240105, + "grad_norm": 3.972142077091876, + "learning_rate": 3.896361969626161e-07, + "loss": 0.4675, + "step": 12635 + }, + { + "epoch": 0.8773781419247326, + "grad_norm": 3.6012966916971805, + "learning_rate": 3.89201118338996e-07, + "loss": 0.269, + "step": 12636 + }, + { + "epoch": 0.8774475767254548, + "grad_norm": 4.6629417124770605, + "learning_rate": 3.8876627292436577e-07, + "loss": 0.5505, + "step": 12637 + }, + { + "epoch": 0.8775170115261769, + "grad_norm": 3.6903982074673003, + "learning_rate": 3.8833166074072007e-07, + "loss": 0.3713, + "step": 12638 + }, + { + "epoch": 0.8775864463268991, + "grad_norm": 4.6796801634560135, + "learning_rate": 3.8789728181004017e-07, + "loss": 0.4503, + "step": 12639 + }, + { + "epoch": 0.8776558811276212, + "grad_norm": 2.8912570578768984, + "learning_rate": 3.874631361542963e-07, + "loss": 0.3085, + "step": 12640 + }, + { + "epoch": 0.8777253159283432, + "grad_norm": 4.166656775077804, + "learning_rate": 3.8702922379544814e-07, + "loss": 0.4316, + "step": 12641 + }, + { + "epoch": 0.8777947507290654, + "grad_norm": 3.820620341473406, + "learning_rate": 3.86595544755442e-07, + "loss": 0.3623, + "step": 12642 + }, + { + "epoch": 0.8778641855297875, + "grad_norm": 3.8015599326835776, + "learning_rate": 3.861620990562126e-07, + "loss": 0.4097, + "step": 12643 + }, + { + "epoch": 0.8779336203305097, + "grad_norm": 4.132296166827703, + "learning_rate": 3.857288867196823e-07, + "loss": 0.4902, + "step": 12644 + }, + { + "epoch": 0.8780030551312318, + "grad_norm": 3.992867200307658, + "learning_rate": 3.852959077677654e-07, + "loss": 0.4596, + "step": 12645 + }, + { + "epoch": 0.8780724899319539, + "grad_norm": 3.2711590741923997, + "learning_rate": 3.8486316222235864e-07, + "loss": 0.1807, + "step": 12646 + }, + { + "epoch": 0.878141924732676, + "grad_norm": 3.7169861304398797, + "learning_rate": 3.844306501053502e-07, + "loss": 0.2897, + "step": 12647 + }, + { + "epoch": 0.8782113595333981, + "grad_norm": 3.7936713249323404, + "learning_rate": 3.839983714386181e-07, + "loss": 0.3987, + "step": 12648 + }, + { + "epoch": 0.8782807943341202, + "grad_norm": 3.942630945232322, + "learning_rate": 3.835663262440242e-07, + "loss": 0.4574, + "step": 12649 + }, + { + "epoch": 0.8783502291348424, + "grad_norm": 4.355991869468211, + "learning_rate": 3.8313451454342064e-07, + "loss": 0.442, + "step": 12650 + }, + { + "epoch": 0.8784196639355645, + "grad_norm": 4.579467624392515, + "learning_rate": 3.827029363586504e-07, + "loss": 0.5073, + "step": 12651 + }, + { + "epoch": 0.8784890987362867, + "grad_norm": 3.2550135907896482, + "learning_rate": 3.82271591711541e-07, + "loss": 0.2771, + "step": 12652 + }, + { + "epoch": 0.8785585335370087, + "grad_norm": 2.9855263354187516, + "learning_rate": 3.818404806239079e-07, + "loss": 0.3344, + "step": 12653 + }, + { + "epoch": 0.8786279683377308, + "grad_norm": 3.04457199173619, + "learning_rate": 3.814096031175579e-07, + "loss": 0.1929, + "step": 12654 + }, + { + "epoch": 0.878697403138453, + "grad_norm": 3.4069836926640664, + "learning_rate": 3.809789592142843e-07, + "loss": 0.4072, + "step": 12655 + }, + { + "epoch": 0.8787668379391751, + "grad_norm": 3.844267439635705, + "learning_rate": 3.805485489358679e-07, + "loss": 0.4193, + "step": 12656 + }, + { + "epoch": 0.8788362727398973, + "grad_norm": 3.9221023368185057, + "learning_rate": 3.801183723040791e-07, + "loss": 0.4036, + "step": 12657 + }, + { + "epoch": 0.8789057075406194, + "grad_norm": 4.426114254252452, + "learning_rate": 3.7968842934067496e-07, + "loss": 0.615, + "step": 12658 + }, + { + "epoch": 0.8789751423413414, + "grad_norm": 3.4735238091240794, + "learning_rate": 3.7925872006740136e-07, + "loss": 0.2957, + "step": 12659 + }, + { + "epoch": 0.8790445771420636, + "grad_norm": 3.6216257281109825, + "learning_rate": 3.788292445059938e-07, + "loss": 0.6276, + "step": 12660 + }, + { + "epoch": 0.8791140119427857, + "grad_norm": 4.6578165274889605, + "learning_rate": 3.784000026781731e-07, + "loss": 0.4337, + "step": 12661 + }, + { + "epoch": 0.8791834467435079, + "grad_norm": 9.18987774923812, + "learning_rate": 3.7797099460565036e-07, + "loss": 0.8011, + "step": 12662 + }, + { + "epoch": 0.87925288154423, + "grad_norm": 3.3000471517590375, + "learning_rate": 3.775422203101259e-07, + "loss": 0.1853, + "step": 12663 + }, + { + "epoch": 0.8793223163449521, + "grad_norm": 3.772326122213835, + "learning_rate": 3.771136798132835e-07, + "loss": 0.267, + "step": 12664 + }, + { + "epoch": 0.8793917511456742, + "grad_norm": 3.3859419195871685, + "learning_rate": 3.7668537313680087e-07, + "loss": 0.3804, + "step": 12665 + }, + { + "epoch": 0.8794611859463963, + "grad_norm": 4.685378955076384, + "learning_rate": 3.7625730030234175e-07, + "loss": 0.5829, + "step": 12666 + }, + { + "epoch": 0.8795306207471184, + "grad_norm": 4.708020617201583, + "learning_rate": 3.758294613315544e-07, + "loss": 0.5483, + "step": 12667 + }, + { + "epoch": 0.8796000555478406, + "grad_norm": 4.190510249854428, + "learning_rate": 3.7540185624608096e-07, + "loss": 0.3454, + "step": 12668 + }, + { + "epoch": 0.8796694903485627, + "grad_norm": 4.108965470514319, + "learning_rate": 3.7497448506754854e-07, + "loss": 0.3344, + "step": 12669 + }, + { + "epoch": 0.8797389251492849, + "grad_norm": 4.996142393352747, + "learning_rate": 3.7454734781757265e-07, + "loss": 0.4025, + "step": 12670 + }, + { + "epoch": 0.879808359950007, + "grad_norm": 3.841471433459928, + "learning_rate": 3.741204445177588e-07, + "loss": 0.3632, + "step": 12671 + }, + { + "epoch": 0.879877794750729, + "grad_norm": 4.999474765744073, + "learning_rate": 3.736937751896974e-07, + "loss": 0.6572, + "step": 12672 + }, + { + "epoch": 0.8799472295514512, + "grad_norm": 3.5004973759737226, + "learning_rate": 3.732673398549702e-07, + "loss": 0.3181, + "step": 12673 + }, + { + "epoch": 0.8800166643521733, + "grad_norm": 2.841794115592065, + "learning_rate": 3.7284113853514604e-07, + "loss": 0.2392, + "step": 12674 + }, + { + "epoch": 0.8800860991528955, + "grad_norm": 3.3162983773781294, + "learning_rate": 3.724151712517804e-07, + "loss": 0.1652, + "step": 12675 + }, + { + "epoch": 0.8801555339536176, + "grad_norm": 4.210404759501814, + "learning_rate": 3.719894380264183e-07, + "loss": 0.4902, + "step": 12676 + }, + { + "epoch": 0.8802249687543396, + "grad_norm": 3.859281851974725, + "learning_rate": 3.7156393888059527e-07, + "loss": 0.3392, + "step": 12677 + }, + { + "epoch": 0.8802944035550618, + "grad_norm": 4.835021900836062, + "learning_rate": 3.711386738358297e-07, + "loss": 0.5415, + "step": 12678 + }, + { + "epoch": 0.8803638383557839, + "grad_norm": 4.134633742220323, + "learning_rate": 3.70713642913631e-07, + "loss": 0.3743, + "step": 12679 + }, + { + "epoch": 0.880433273156506, + "grad_norm": 3.576183698128719, + "learning_rate": 3.702888461355003e-07, + "loss": 0.4207, + "step": 12680 + }, + { + "epoch": 0.8805027079572282, + "grad_norm": 4.398132638846183, + "learning_rate": 3.6986428352291937e-07, + "loss": 0.6311, + "step": 12681 + }, + { + "epoch": 0.8805721427579503, + "grad_norm": 3.9058294176657196, + "learning_rate": 3.694399550973632e-07, + "loss": 0.3683, + "step": 12682 + }, + { + "epoch": 0.8806415775586725, + "grad_norm": 3.8231625104467635, + "learning_rate": 3.6901586088029516e-07, + "loss": 0.484, + "step": 12683 + }, + { + "epoch": 0.8807110123593945, + "grad_norm": 3.8898835086243873, + "learning_rate": 3.6859200089316426e-07, + "loss": 0.3479, + "step": 12684 + }, + { + "epoch": 0.8807804471601166, + "grad_norm": 3.9659673186307276, + "learning_rate": 3.6816837515740946e-07, + "loss": 0.4872, + "step": 12685 + }, + { + "epoch": 0.8808498819608388, + "grad_norm": 4.158553391528972, + "learning_rate": 3.677449836944563e-07, + "loss": 0.3587, + "step": 12686 + }, + { + "epoch": 0.8809193167615609, + "grad_norm": 3.7778478260029256, + "learning_rate": 3.6732182652572057e-07, + "loss": 0.2871, + "step": 12687 + }, + { + "epoch": 0.8809887515622831, + "grad_norm": 3.1938660342687517, + "learning_rate": 3.6689890367260507e-07, + "loss": 0.2493, + "step": 12688 + }, + { + "epoch": 0.8810581863630051, + "grad_norm": 3.7941734408743226, + "learning_rate": 3.664762151564999e-07, + "loss": 0.4458, + "step": 12689 + }, + { + "epoch": 0.8811276211637272, + "grad_norm": 4.098733049693217, + "learning_rate": 3.660537609987846e-07, + "loss": 0.4641, + "step": 12690 + }, + { + "epoch": 0.8811970559644494, + "grad_norm": 4.028860519529612, + "learning_rate": 3.656315412208261e-07, + "loss": 0.4478, + "step": 12691 + }, + { + "epoch": 0.8812664907651715, + "grad_norm": 2.918185055922554, + "learning_rate": 3.6520955584398056e-07, + "loss": 0.2254, + "step": 12692 + }, + { + "epoch": 0.8813359255658936, + "grad_norm": 3.34426464416976, + "learning_rate": 3.6478780488958974e-07, + "loss": 0.3035, + "step": 12693 + }, + { + "epoch": 0.8814053603666158, + "grad_norm": 4.429481927535252, + "learning_rate": 3.643662883789878e-07, + "loss": 0.5489, + "step": 12694 + }, + { + "epoch": 0.8814747951673378, + "grad_norm": 3.8273504833703362, + "learning_rate": 3.6394500633349425e-07, + "loss": 0.3708, + "step": 12695 + }, + { + "epoch": 0.88154422996806, + "grad_norm": 4.352768491029765, + "learning_rate": 3.6352395877441493e-07, + "loss": 0.4468, + "step": 12696 + }, + { + "epoch": 0.8816136647687821, + "grad_norm": 3.97796817359362, + "learning_rate": 3.6310314572304775e-07, + "loss": 0.5135, + "step": 12697 + }, + { + "epoch": 0.8816830995695042, + "grad_norm": 4.544509006029131, + "learning_rate": 3.626825672006773e-07, + "loss": 0.354, + "step": 12698 + }, + { + "epoch": 0.8817525343702264, + "grad_norm": 3.738559685056856, + "learning_rate": 3.622622232285733e-07, + "loss": 0.3289, + "step": 12699 + }, + { + "epoch": 0.8818219691709485, + "grad_norm": 3.623546972480734, + "learning_rate": 3.6184211382799875e-07, + "loss": 0.3173, + "step": 12700 + }, + { + "epoch": 0.8818914039716707, + "grad_norm": 2.6672287336018643, + "learning_rate": 3.6142223902020214e-07, + "loss": 0.2758, + "step": 12701 + }, + { + "epoch": 0.8819608387723927, + "grad_norm": 3.8079443357632723, + "learning_rate": 3.610025988264193e-07, + "loss": 0.4385, + "step": 12702 + }, + { + "epoch": 0.8820302735731148, + "grad_norm": 3.580002911235336, + "learning_rate": 3.6058319326787603e-07, + "loss": 0.1842, + "step": 12703 + }, + { + "epoch": 0.882099708373837, + "grad_norm": 4.889151974094751, + "learning_rate": 3.601640223657849e-07, + "loss": 0.5459, + "step": 12704 + }, + { + "epoch": 0.8821691431745591, + "grad_norm": 3.111699489361301, + "learning_rate": 3.597450861413471e-07, + "loss": 0.2637, + "step": 12705 + }, + { + "epoch": 0.8822385779752812, + "grad_norm": 3.9026267811988604, + "learning_rate": 3.5932638461575244e-07, + "loss": 0.4011, + "step": 12706 + }, + { + "epoch": 0.8823080127760033, + "grad_norm": 3.0997107176118854, + "learning_rate": 3.589079178101773e-07, + "loss": 0.2146, + "step": 12707 + }, + { + "epoch": 0.8823774475767254, + "grad_norm": 4.415997193583695, + "learning_rate": 3.584896857457876e-07, + "loss": 0.5051, + "step": 12708 + }, + { + "epoch": 0.8824468823774476, + "grad_norm": 3.8402917681412188, + "learning_rate": 3.580716884437385e-07, + "loss": 0.3979, + "step": 12709 + }, + { + "epoch": 0.8825163171781697, + "grad_norm": 4.801482224365796, + "learning_rate": 3.5765392592517044e-07, + "loss": 0.4159, + "step": 12710 + }, + { + "epoch": 0.8825857519788918, + "grad_norm": 4.442724235905857, + "learning_rate": 3.572363982112126e-07, + "loss": 0.6103, + "step": 12711 + }, + { + "epoch": 0.882655186779614, + "grad_norm": 5.22368719083455, + "learning_rate": 3.568191053229847e-07, + "loss": 0.4527, + "step": 12712 + }, + { + "epoch": 0.882724621580336, + "grad_norm": 3.9302027455103805, + "learning_rate": 3.5640204728159266e-07, + "loss": 0.4312, + "step": 12713 + }, + { + "epoch": 0.8827940563810582, + "grad_norm": 3.972405226974514, + "learning_rate": 3.559852241081302e-07, + "loss": 0.4222, + "step": 12714 + }, + { + "epoch": 0.8828634911817803, + "grad_norm": 4.917674485548975, + "learning_rate": 3.555686358236804e-07, + "loss": 0.5951, + "step": 12715 + }, + { + "epoch": 0.8829329259825024, + "grad_norm": 4.029427733456624, + "learning_rate": 3.551522824493131e-07, + "loss": 0.357, + "step": 12716 + }, + { + "epoch": 0.8830023607832246, + "grad_norm": 2.7753817190898076, + "learning_rate": 3.547361640060876e-07, + "loss": 0.2712, + "step": 12717 + }, + { + "epoch": 0.8830717955839467, + "grad_norm": 4.633762114409212, + "learning_rate": 3.543202805150503e-07, + "loss": 0.6012, + "step": 12718 + }, + { + "epoch": 0.8831412303846687, + "grad_norm": 4.964980065719515, + "learning_rate": 3.539046319972367e-07, + "loss": 0.4519, + "step": 12719 + }, + { + "epoch": 0.8832106651853909, + "grad_norm": 4.464788278461588, + "learning_rate": 3.534892184736688e-07, + "loss": 0.3419, + "step": 12720 + }, + { + "epoch": 0.883280099986113, + "grad_norm": 3.8196769046504486, + "learning_rate": 3.530740399653587e-07, + "loss": 0.3093, + "step": 12721 + }, + { + "epoch": 0.8833495347868352, + "grad_norm": 4.199211137671611, + "learning_rate": 3.526590964933052e-07, + "loss": 0.5223, + "step": 12722 + }, + { + "epoch": 0.8834189695875573, + "grad_norm": 3.4651955952471485, + "learning_rate": 3.5224438807849594e-07, + "loss": 0.2419, + "step": 12723 + }, + { + "epoch": 0.8834884043882794, + "grad_norm": 3.988985114580155, + "learning_rate": 3.5182991474190573e-07, + "loss": 0.8336, + "step": 12724 + }, + { + "epoch": 0.8835578391890015, + "grad_norm": 3.0199584854856383, + "learning_rate": 3.514156765044985e-07, + "loss": 0.3336, + "step": 12725 + }, + { + "epoch": 0.8836272739897236, + "grad_norm": 3.4218371562388743, + "learning_rate": 3.510016733872268e-07, + "loss": 0.2836, + "step": 12726 + }, + { + "epoch": 0.8836967087904458, + "grad_norm": 4.95277231711674, + "learning_rate": 3.5058790541103003e-07, + "loss": 0.5291, + "step": 12727 + }, + { + "epoch": 0.8837661435911679, + "grad_norm": 3.9885738526162293, + "learning_rate": 3.5017437259683487e-07, + "loss": 0.2873, + "step": 12728 + }, + { + "epoch": 0.88383557839189, + "grad_norm": 3.976576150825886, + "learning_rate": 3.497610749655589e-07, + "loss": 0.5622, + "step": 12729 + }, + { + "epoch": 0.8839050131926122, + "grad_norm": 4.7203247654926495, + "learning_rate": 3.4934801253810656e-07, + "loss": 0.4644, + "step": 12730 + }, + { + "epoch": 0.8839744479933342, + "grad_norm": 5.587966168831514, + "learning_rate": 3.489351853353673e-07, + "loss": 0.6989, + "step": 12731 + }, + { + "epoch": 0.8840438827940564, + "grad_norm": 4.387133364631243, + "learning_rate": 3.485225933782244e-07, + "loss": 0.3713, + "step": 12732 + }, + { + "epoch": 0.8841133175947785, + "grad_norm": 3.789527249945736, + "learning_rate": 3.48110236687545e-07, + "loss": 0.3252, + "step": 12733 + }, + { + "epoch": 0.8841827523955006, + "grad_norm": 4.1147341436474125, + "learning_rate": 3.476981152841857e-07, + "loss": 0.5674, + "step": 12734 + }, + { + "epoch": 0.8842521871962228, + "grad_norm": 3.545641475231648, + "learning_rate": 3.472862291889917e-07, + "loss": 0.3125, + "step": 12735 + }, + { + "epoch": 0.8843216219969449, + "grad_norm": 3.609643425963449, + "learning_rate": 3.4687457842279503e-07, + "loss": 0.5486, + "step": 12736 + }, + { + "epoch": 0.8843910567976669, + "grad_norm": 3.453174685742406, + "learning_rate": 3.4646316300641636e-07, + "loss": 0.3823, + "step": 12737 + }, + { + "epoch": 0.8844604915983891, + "grad_norm": 3.5744564382108304, + "learning_rate": 3.460519829606662e-07, + "loss": 0.4, + "step": 12738 + }, + { + "epoch": 0.8845299263991112, + "grad_norm": 3.981195410336905, + "learning_rate": 3.4564103830634024e-07, + "loss": 0.487, + "step": 12739 + }, + { + "epoch": 0.8845993611998334, + "grad_norm": 5.900618063011281, + "learning_rate": 3.4523032906422225e-07, + "loss": 0.435, + "step": 12740 + }, + { + "epoch": 0.8846687960005555, + "grad_norm": 4.198322082859053, + "learning_rate": 3.448198552550891e-07, + "loss": 0.5029, + "step": 12741 + }, + { + "epoch": 0.8847382308012776, + "grad_norm": 3.4530794755925327, + "learning_rate": 3.444096168996991e-07, + "loss": 0.4522, + "step": 12742 + }, + { + "epoch": 0.8848076656019997, + "grad_norm": 5.243828308957343, + "learning_rate": 3.439996140188018e-07, + "loss": 0.408, + "step": 12743 + }, + { + "epoch": 0.8848771004027218, + "grad_norm": 4.758751932550911, + "learning_rate": 3.435898466331361e-07, + "loss": 0.5295, + "step": 12744 + }, + { + "epoch": 0.884946535203444, + "grad_norm": 3.9425006777772187, + "learning_rate": 3.4318031476342715e-07, + "loss": 0.4525, + "step": 12745 + }, + { + "epoch": 0.8850159700041661, + "grad_norm": 2.9813214304713913, + "learning_rate": 3.4277101843038785e-07, + "loss": 0.2667, + "step": 12746 + }, + { + "epoch": 0.8850854048048882, + "grad_norm": 3.1216116126948, + "learning_rate": 3.423619576547205e-07, + "loss": 0.2429, + "step": 12747 + }, + { + "epoch": 0.8851548396056104, + "grad_norm": 4.994077530071743, + "learning_rate": 3.4195313245711514e-07, + "loss": 0.4746, + "step": 12748 + }, + { + "epoch": 0.8852242744063324, + "grad_norm": 3.7331412703198437, + "learning_rate": 3.4154454285824924e-07, + "loss": 0.398, + "step": 12749 + }, + { + "epoch": 0.8852937092070545, + "grad_norm": 3.5133878355916925, + "learning_rate": 3.4113618887878894e-07, + "loss": 0.2985, + "step": 12750 + }, + { + "epoch": 0.8853631440077767, + "grad_norm": 4.408846392387566, + "learning_rate": 3.407280705393884e-07, + "loss": 0.4437, + "step": 12751 + }, + { + "epoch": 0.8854325788084988, + "grad_norm": 6.394271493485068, + "learning_rate": 3.403201878606899e-07, + "loss": 0.5735, + "step": 12752 + }, + { + "epoch": 0.885502013609221, + "grad_norm": 4.424049907089565, + "learning_rate": 3.39912540863323e-07, + "loss": 0.6709, + "step": 12753 + }, + { + "epoch": 0.8855714484099431, + "grad_norm": 4.5274157315501355, + "learning_rate": 3.395051295679064e-07, + "loss": 0.3046, + "step": 12754 + }, + { + "epoch": 0.8856408832106651, + "grad_norm": 4.853856296491952, + "learning_rate": 3.390979539950479e-07, + "loss": 0.3848, + "step": 12755 + }, + { + "epoch": 0.8857103180113873, + "grad_norm": 5.593923492862155, + "learning_rate": 3.386910141653399e-07, + "loss": 0.5193, + "step": 12756 + }, + { + "epoch": 0.8857797528121094, + "grad_norm": 4.565479140200587, + "learning_rate": 3.382843100993649e-07, + "loss": 0.4083, + "step": 12757 + }, + { + "epoch": 0.8858491876128316, + "grad_norm": 3.968843219780575, + "learning_rate": 3.3787784181769533e-07, + "loss": 0.417, + "step": 12758 + }, + { + "epoch": 0.8859186224135537, + "grad_norm": 3.893853526762602, + "learning_rate": 3.3747160934088976e-07, + "loss": 0.3735, + "step": 12759 + }, + { + "epoch": 0.8859880572142758, + "grad_norm": 3.991488499747273, + "learning_rate": 3.3706561268949276e-07, + "loss": 0.4928, + "step": 12760 + }, + { + "epoch": 0.886057492014998, + "grad_norm": 3.4161515450748974, + "learning_rate": 3.366598518840408e-07, + "loss": 0.2873, + "step": 12761 + }, + { + "epoch": 0.88612692681572, + "grad_norm": 6.454785446147054, + "learning_rate": 3.3625432694505746e-07, + "loss": 0.5785, + "step": 12762 + }, + { + "epoch": 0.8861963616164421, + "grad_norm": 4.879053916398975, + "learning_rate": 3.3584903789305233e-07, + "loss": 0.4228, + "step": 12763 + }, + { + "epoch": 0.8862657964171643, + "grad_norm": 4.351524129746283, + "learning_rate": 3.354439847485247e-07, + "loss": 0.6223, + "step": 12764 + }, + { + "epoch": 0.8863352312178864, + "grad_norm": 2.5065225290514372, + "learning_rate": 3.3503916753196254e-07, + "loss": 0.239, + "step": 12765 + }, + { + "epoch": 0.8864046660186086, + "grad_norm": 2.456231418847352, + "learning_rate": 3.346345862638406e-07, + "loss": 0.0807, + "step": 12766 + }, + { + "epoch": 0.8864741008193306, + "grad_norm": 4.592162900792178, + "learning_rate": 3.34230240964622e-07, + "loss": 0.5394, + "step": 12767 + }, + { + "epoch": 0.8865435356200527, + "grad_norm": 4.065203923117166, + "learning_rate": 3.3382613165475807e-07, + "loss": 0.3497, + "step": 12768 + }, + { + "epoch": 0.8866129704207749, + "grad_norm": 3.186644246087373, + "learning_rate": 3.3342225835468754e-07, + "loss": 0.2981, + "step": 12769 + }, + { + "epoch": 0.886682405221497, + "grad_norm": 5.420063628362271, + "learning_rate": 3.3301862108483964e-07, + "loss": 0.5912, + "step": 12770 + }, + { + "epoch": 0.8867518400222192, + "grad_norm": 4.927465996201278, + "learning_rate": 3.326152198656285e-07, + "loss": 0.4938, + "step": 12771 + }, + { + "epoch": 0.8868212748229413, + "grad_norm": 4.005408831020627, + "learning_rate": 3.322120547174573e-07, + "loss": 0.4848, + "step": 12772 + }, + { + "epoch": 0.8868907096236633, + "grad_norm": 2.627080537802142, + "learning_rate": 3.3180912566071975e-07, + "loss": 0.1652, + "step": 12773 + }, + { + "epoch": 0.8869601444243855, + "grad_norm": 4.164698275224688, + "learning_rate": 3.3140643271579287e-07, + "loss": 0.4219, + "step": 12774 + }, + { + "epoch": 0.8870295792251076, + "grad_norm": 3.9968159405571932, + "learning_rate": 3.3100397590304645e-07, + "loss": 0.4772, + "step": 12775 + }, + { + "epoch": 0.8870990140258297, + "grad_norm": 3.241536811905565, + "learning_rate": 3.306017552428359e-07, + "loss": 0.1911, + "step": 12776 + }, + { + "epoch": 0.8871684488265519, + "grad_norm": 3.5437850709659604, + "learning_rate": 3.3019977075550436e-07, + "loss": 0.3443, + "step": 12777 + }, + { + "epoch": 0.887237883627274, + "grad_norm": 4.063452972664163, + "learning_rate": 3.297980224613845e-07, + "loss": 0.4281, + "step": 12778 + }, + { + "epoch": 0.8873073184279962, + "grad_norm": 4.362934216573782, + "learning_rate": 3.293965103807956e-07, + "loss": 0.4766, + "step": 12779 + }, + { + "epoch": 0.8873767532287182, + "grad_norm": 3.8439030057020283, + "learning_rate": 3.2899523453404635e-07, + "loss": 0.3446, + "step": 12780 + }, + { + "epoch": 0.8874461880294403, + "grad_norm": 3.01740956779014, + "learning_rate": 3.285941949414323e-07, + "loss": 0.3692, + "step": 12781 + }, + { + "epoch": 0.8875156228301625, + "grad_norm": 3.565528847331846, + "learning_rate": 3.2819339162323826e-07, + "loss": 0.3305, + "step": 12782 + }, + { + "epoch": 0.8875850576308846, + "grad_norm": 5.072536479030013, + "learning_rate": 3.277928245997347e-07, + "loss": 0.5515, + "step": 12783 + }, + { + "epoch": 0.8876544924316068, + "grad_norm": 12.128032611159652, + "learning_rate": 3.2739249389118487e-07, + "loss": 0.3927, + "step": 12784 + }, + { + "epoch": 0.8877239272323288, + "grad_norm": 4.199790191276391, + "learning_rate": 3.2699239951783477e-07, + "loss": 0.2579, + "step": 12785 + }, + { + "epoch": 0.8877933620330509, + "grad_norm": 3.401943405285184, + "learning_rate": 3.265925414999199e-07, + "loss": 0.2318, + "step": 12786 + }, + { + "epoch": 0.8878627968337731, + "grad_norm": 4.453269277090434, + "learning_rate": 3.261929198576669e-07, + "loss": 0.4031, + "step": 12787 + }, + { + "epoch": 0.8879322316344952, + "grad_norm": 3.7857765360679965, + "learning_rate": 3.257935346112884e-07, + "loss": 0.2769, + "step": 12788 + }, + { + "epoch": 0.8880016664352174, + "grad_norm": 3.980913851082153, + "learning_rate": 3.253943857809816e-07, + "loss": 0.4005, + "step": 12789 + }, + { + "epoch": 0.8880711012359395, + "grad_norm": 3.9704615029819013, + "learning_rate": 3.2499547338693815e-07, + "loss": 0.5446, + "step": 12790 + }, + { + "epoch": 0.8881405360366615, + "grad_norm": 4.580995929496314, + "learning_rate": 3.245967974493347e-07, + "loss": 0.3834, + "step": 12791 + }, + { + "epoch": 0.8882099708373837, + "grad_norm": 3.911201017527928, + "learning_rate": 3.241983579883329e-07, + "loss": 0.433, + "step": 12792 + }, + { + "epoch": 0.8882794056381058, + "grad_norm": 4.146645668179906, + "learning_rate": 3.2380015502408767e-07, + "loss": 0.4232, + "step": 12793 + }, + { + "epoch": 0.8883488404388279, + "grad_norm": 2.217702922049972, + "learning_rate": 3.234021885767391e-07, + "loss": 0.1392, + "step": 12794 + }, + { + "epoch": 0.8884182752395501, + "grad_norm": 4.267387497850099, + "learning_rate": 3.2300445866641603e-07, + "loss": 0.5891, + "step": 12795 + }, + { + "epoch": 0.8884877100402722, + "grad_norm": 4.481287548659737, + "learning_rate": 3.2260696531323464e-07, + "loss": 0.5603, + "step": 12796 + }, + { + "epoch": 0.8885571448409944, + "grad_norm": 4.8593917066126116, + "learning_rate": 3.222097085373005e-07, + "loss": 0.5428, + "step": 12797 + }, + { + "epoch": 0.8886265796417164, + "grad_norm": 3.254290156850447, + "learning_rate": 3.218126883587064e-07, + "loss": 0.3581, + "step": 12798 + }, + { + "epoch": 0.8886960144424385, + "grad_norm": 3.23905754878414, + "learning_rate": 3.214159047975324e-07, + "loss": 0.3206, + "step": 12799 + }, + { + "epoch": 0.8887654492431607, + "grad_norm": 3.4720483769098336, + "learning_rate": 3.210193578738474e-07, + "loss": 0.2885, + "step": 12800 + }, + { + "epoch": 0.8888348840438828, + "grad_norm": 4.441164111727076, + "learning_rate": 3.2062304760770825e-07, + "loss": 0.4466, + "step": 12801 + }, + { + "epoch": 0.888904318844605, + "grad_norm": 3.472163700677182, + "learning_rate": 3.2022697401916215e-07, + "loss": 0.3727, + "step": 12802 + }, + { + "epoch": 0.888973753645327, + "grad_norm": 4.150602688090228, + "learning_rate": 3.198311371282381e-07, + "loss": 0.4964, + "step": 12803 + }, + { + "epoch": 0.8890431884460491, + "grad_norm": 2.876602990657226, + "learning_rate": 3.194355369549601e-07, + "loss": 0.2766, + "step": 12804 + }, + { + "epoch": 0.8891126232467713, + "grad_norm": 3.6635571070717043, + "learning_rate": 3.1904017351933715e-07, + "loss": 0.298, + "step": 12805 + }, + { + "epoch": 0.8891820580474934, + "grad_norm": 3.683589839225522, + "learning_rate": 3.186450468413638e-07, + "loss": 0.3893, + "step": 12806 + }, + { + "epoch": 0.8892514928482155, + "grad_norm": 5.032389855644175, + "learning_rate": 3.1825015694102736e-07, + "loss": 0.264, + "step": 12807 + }, + { + "epoch": 0.8893209276489377, + "grad_norm": 4.625058846296304, + "learning_rate": 3.178555038383002e-07, + "loss": 0.6085, + "step": 12808 + }, + { + "epoch": 0.8893903624496597, + "grad_norm": 3.994245846464518, + "learning_rate": 3.1746108755314307e-07, + "loss": 0.4114, + "step": 12809 + }, + { + "epoch": 0.8894597972503819, + "grad_norm": 3.2871479565223165, + "learning_rate": 3.1706690810550613e-07, + "loss": 0.3126, + "step": 12810 + }, + { + "epoch": 0.889529232051104, + "grad_norm": 2.6788534825148336, + "learning_rate": 3.166729655153256e-07, + "loss": 0.1957, + "step": 12811 + }, + { + "epoch": 0.8895986668518261, + "grad_norm": 4.054121480576292, + "learning_rate": 3.1627925980252717e-07, + "loss": 0.3945, + "step": 12812 + }, + { + "epoch": 0.8896681016525483, + "grad_norm": 2.7179163563916684, + "learning_rate": 3.1588579098702333e-07, + "loss": 0.2858, + "step": 12813 + }, + { + "epoch": 0.8897375364532704, + "grad_norm": 4.539162753457772, + "learning_rate": 3.154925590887159e-07, + "loss": 0.6347, + "step": 12814 + }, + { + "epoch": 0.8898069712539926, + "grad_norm": 3.6091242386609497, + "learning_rate": 3.150995641274934e-07, + "loss": 0.4113, + "step": 12815 + }, + { + "epoch": 0.8898764060547146, + "grad_norm": 3.70707630163381, + "learning_rate": 3.1470680612323503e-07, + "loss": 0.3912, + "step": 12816 + }, + { + "epoch": 0.8899458408554367, + "grad_norm": 3.7386078346168583, + "learning_rate": 3.1431428509580365e-07, + "loss": 0.3863, + "step": 12817 + }, + { + "epoch": 0.8900152756561589, + "grad_norm": 3.9583681600677894, + "learning_rate": 3.1392200106505287e-07, + "loss": 0.4671, + "step": 12818 + }, + { + "epoch": 0.890084710456881, + "grad_norm": 2.757319084506268, + "learning_rate": 3.1352995405082576e-07, + "loss": 0.1399, + "step": 12819 + }, + { + "epoch": 0.8901541452576031, + "grad_norm": 3.4486305106287825, + "learning_rate": 3.131381440729508e-07, + "loss": 0.2294, + "step": 12820 + }, + { + "epoch": 0.8902235800583252, + "grad_norm": 4.542063529139695, + "learning_rate": 3.1274657115124397e-07, + "loss": 0.4704, + "step": 12821 + }, + { + "epoch": 0.8902930148590473, + "grad_norm": 3.7980928417680637, + "learning_rate": 3.12355235305512e-07, + "loss": 0.4042, + "step": 12822 + }, + { + "epoch": 0.8903624496597695, + "grad_norm": 3.1687854317295976, + "learning_rate": 3.119641365555481e-07, + "loss": 0.2905, + "step": 12823 + }, + { + "epoch": 0.8904318844604916, + "grad_norm": 3.1578138576732737, + "learning_rate": 3.1157327492113255e-07, + "loss": 0.3377, + "step": 12824 + }, + { + "epoch": 0.8905013192612137, + "grad_norm": 4.9479643719424695, + "learning_rate": 3.1118265042203553e-07, + "loss": 0.4317, + "step": 12825 + }, + { + "epoch": 0.8905707540619359, + "grad_norm": 4.001335759330411, + "learning_rate": 3.107922630780147e-07, + "loss": 0.3943, + "step": 12826 + }, + { + "epoch": 0.8906401888626579, + "grad_norm": 3.9556990379016277, + "learning_rate": 3.104021129088153e-07, + "loss": 0.4359, + "step": 12827 + }, + { + "epoch": 0.8907096236633801, + "grad_norm": 4.159421379416483, + "learning_rate": 3.1001219993416986e-07, + "loss": 0.5689, + "step": 12828 + }, + { + "epoch": 0.8907790584641022, + "grad_norm": 3.7036053809797167, + "learning_rate": 3.0962252417380045e-07, + "loss": 0.3535, + "step": 12829 + }, + { + "epoch": 0.8908484932648243, + "grad_norm": 4.4094530755562085, + "learning_rate": 3.092330856474163e-07, + "loss": 0.4673, + "step": 12830 + }, + { + "epoch": 0.8909179280655465, + "grad_norm": 4.395263331199589, + "learning_rate": 3.088438843747149e-07, + "loss": 0.4345, + "step": 12831 + }, + { + "epoch": 0.8909873628662686, + "grad_norm": 3.863118442902577, + "learning_rate": 3.0845492037538117e-07, + "loss": 0.4144, + "step": 12832 + }, + { + "epoch": 0.8910567976669906, + "grad_norm": 4.625055215497941, + "learning_rate": 3.080661936690876e-07, + "loss": 0.5333, + "step": 12833 + }, + { + "epoch": 0.8911262324677128, + "grad_norm": 3.459298215402622, + "learning_rate": 3.0767770427549857e-07, + "loss": 0.2689, + "step": 12834 + }, + { + "epoch": 0.8911956672684349, + "grad_norm": 3.420341231284176, + "learning_rate": 3.0728945221425997e-07, + "loss": 0.3997, + "step": 12835 + }, + { + "epoch": 0.8912651020691571, + "grad_norm": 4.064553612764126, + "learning_rate": 3.069014375050117e-07, + "loss": 0.3529, + "step": 12836 + }, + { + "epoch": 0.8913345368698792, + "grad_norm": 3.507685745807081, + "learning_rate": 3.0651366016737794e-07, + "loss": 0.2878, + "step": 12837 + }, + { + "epoch": 0.8914039716706013, + "grad_norm": 3.2822661061839464, + "learning_rate": 3.06126120220972e-07, + "loss": 0.4179, + "step": 12838 + }, + { + "epoch": 0.8914734064713234, + "grad_norm": 4.270972169712055, + "learning_rate": 3.057388176853959e-07, + "loss": 0.3712, + "step": 12839 + }, + { + "epoch": 0.8915428412720455, + "grad_norm": 3.415788309261789, + "learning_rate": 3.0535175258023797e-07, + "loss": 0.3393, + "step": 12840 + }, + { + "epoch": 0.8916122760727677, + "grad_norm": 3.706596251951023, + "learning_rate": 3.0496492492507636e-07, + "loss": 0.3564, + "step": 12841 + }, + { + "epoch": 0.8916817108734898, + "grad_norm": 5.195888331262543, + "learning_rate": 3.0457833473947595e-07, + "loss": 0.4879, + "step": 12842 + }, + { + "epoch": 0.8917511456742119, + "grad_norm": 4.226074969457352, + "learning_rate": 3.041919820429906e-07, + "loss": 0.4914, + "step": 12843 + }, + { + "epoch": 0.8918205804749341, + "grad_norm": 2.7297929457870556, + "learning_rate": 3.0380586685515965e-07, + "loss": 0.2075, + "step": 12844 + }, + { + "epoch": 0.8918900152756561, + "grad_norm": 4.111806190454208, + "learning_rate": 3.0341998919551586e-07, + "loss": 0.3406, + "step": 12845 + }, + { + "epoch": 0.8919594500763783, + "grad_norm": 4.301417007083595, + "learning_rate": 3.0303434908357355e-07, + "loss": 0.473, + "step": 12846 + }, + { + "epoch": 0.8920288848771004, + "grad_norm": 2.8294405638990114, + "learning_rate": 3.026489465388377e-07, + "loss": 0.1983, + "step": 12847 + }, + { + "epoch": 0.8920983196778225, + "grad_norm": 4.520265092031402, + "learning_rate": 3.0226378158080495e-07, + "loss": 0.5387, + "step": 12848 + }, + { + "epoch": 0.8921677544785447, + "grad_norm": 4.159576014079366, + "learning_rate": 3.0187885422895303e-07, + "loss": 0.6115, + "step": 12849 + }, + { + "epoch": 0.8922371892792668, + "grad_norm": 3.568281876383245, + "learning_rate": 3.014941645027519e-07, + "loss": 0.3014, + "step": 12850 + }, + { + "epoch": 0.8923066240799888, + "grad_norm": 3.599825103200741, + "learning_rate": 3.0110971242166e-07, + "loss": 0.3437, + "step": 12851 + }, + { + "epoch": 0.892376058880711, + "grad_norm": 5.068268914030686, + "learning_rate": 3.0072549800512216e-07, + "loss": 0.4464, + "step": 12852 + }, + { + "epoch": 0.8924454936814331, + "grad_norm": 3.8789950123010613, + "learning_rate": 3.0034152127256966e-07, + "loss": 0.4476, + "step": 12853 + }, + { + "epoch": 0.8925149284821553, + "grad_norm": 2.939982041177012, + "learning_rate": 2.999577822434258e-07, + "loss": 0.1564, + "step": 12854 + }, + { + "epoch": 0.8925843632828774, + "grad_norm": 4.99509754427649, + "learning_rate": 2.9957428093709885e-07, + "loss": 0.5681, + "step": 12855 + }, + { + "epoch": 0.8926537980835995, + "grad_norm": 4.770607016159064, + "learning_rate": 2.9919101737298615e-07, + "loss": 0.4218, + "step": 12856 + }, + { + "epoch": 0.8927232328843216, + "grad_norm": 2.845691271695139, + "learning_rate": 2.988079915704717e-07, + "loss": 0.2832, + "step": 12857 + }, + { + "epoch": 0.8927926676850437, + "grad_norm": 2.286775951779598, + "learning_rate": 2.984252035489299e-07, + "loss": 0.0966, + "step": 12858 + }, + { + "epoch": 0.8928621024857659, + "grad_norm": 4.312400199508682, + "learning_rate": 2.980426533277209e-07, + "loss": 0.3796, + "step": 12859 + }, + { + "epoch": 0.892931537286488, + "grad_norm": 4.508689968288141, + "learning_rate": 2.976603409261941e-07, + "loss": 0.4633, + "step": 12860 + }, + { + "epoch": 0.8930009720872101, + "grad_norm": 3.470159154909685, + "learning_rate": 2.9727826636368586e-07, + "loss": 0.3905, + "step": 12861 + }, + { + "epoch": 0.8930704068879323, + "grad_norm": 3.39971287568434, + "learning_rate": 2.968964296595217e-07, + "loss": 0.3133, + "step": 12862 + }, + { + "epoch": 0.8931398416886543, + "grad_norm": 3.951582877566643, + "learning_rate": 2.96514830833014e-07, + "loss": 0.4118, + "step": 12863 + }, + { + "epoch": 0.8932092764893764, + "grad_norm": 2.662907995410107, + "learning_rate": 2.961334699034635e-07, + "loss": 0.254, + "step": 12864 + }, + { + "epoch": 0.8932787112900986, + "grad_norm": 3.7258462642264556, + "learning_rate": 2.9575234689015965e-07, + "loss": 0.2729, + "step": 12865 + }, + { + "epoch": 0.8933481460908207, + "grad_norm": 3.8908226603081246, + "learning_rate": 2.953714618123793e-07, + "loss": 0.3516, + "step": 12866 + }, + { + "epoch": 0.8934175808915429, + "grad_norm": 4.214120431869059, + "learning_rate": 2.9499081468938594e-07, + "loss": 0.4436, + "step": 12867 + }, + { + "epoch": 0.893487015692265, + "grad_norm": 3.8448962919748646, + "learning_rate": 2.9461040554043365e-07, + "loss": 0.3761, + "step": 12868 + }, + { + "epoch": 0.893556450492987, + "grad_norm": 4.447035620151592, + "learning_rate": 2.942302343847625e-07, + "loss": 0.4246, + "step": 12869 + }, + { + "epoch": 0.8936258852937092, + "grad_norm": 3.169314913408982, + "learning_rate": 2.938503012416011e-07, + "loss": 0.2155, + "step": 12870 + }, + { + "epoch": 0.8936953200944313, + "grad_norm": 4.623362617989716, + "learning_rate": 2.9347060613016564e-07, + "loss": 0.5392, + "step": 12871 + }, + { + "epoch": 0.8937647548951535, + "grad_norm": 4.072746544466012, + "learning_rate": 2.9309114906966197e-07, + "loss": 0.5797, + "step": 12872 + }, + { + "epoch": 0.8938341896958756, + "grad_norm": 3.0597334939781846, + "learning_rate": 2.927119300792813e-07, + "loss": 0.2887, + "step": 12873 + }, + { + "epoch": 0.8939036244965977, + "grad_norm": 3.455371105441647, + "learning_rate": 2.9233294917820454e-07, + "loss": 0.3669, + "step": 12874 + }, + { + "epoch": 0.8939730592973198, + "grad_norm": 3.836268660844611, + "learning_rate": 2.9195420638560013e-07, + "loss": 0.3296, + "step": 12875 + }, + { + "epoch": 0.8940424940980419, + "grad_norm": 2.5184324375340266, + "learning_rate": 2.9157570172062386e-07, + "loss": 0.1885, + "step": 12876 + }, + { + "epoch": 0.894111928898764, + "grad_norm": 3.468973558208001, + "learning_rate": 2.9119743520242216e-07, + "loss": 0.4362, + "step": 12877 + }, + { + "epoch": 0.8941813636994862, + "grad_norm": 2.744911371048733, + "learning_rate": 2.9081940685012464e-07, + "loss": 0.1756, + "step": 12878 + }, + { + "epoch": 0.8942507985002083, + "grad_norm": 4.498927499720553, + "learning_rate": 2.9044161668285275e-07, + "loss": 0.3641, + "step": 12879 + }, + { + "epoch": 0.8943202333009305, + "grad_norm": 3.013664754266281, + "learning_rate": 2.900640647197156e-07, + "loss": 0.2386, + "step": 12880 + }, + { + "epoch": 0.8943896681016525, + "grad_norm": 4.380782575717267, + "learning_rate": 2.896867509798074e-07, + "loss": 0.5476, + "step": 12881 + }, + { + "epoch": 0.8944591029023746, + "grad_norm": 3.194992856690282, + "learning_rate": 2.8930967548221334e-07, + "loss": 0.3822, + "step": 12882 + }, + { + "epoch": 0.8945285377030968, + "grad_norm": 3.954452200291028, + "learning_rate": 2.889328382460055e-07, + "loss": 0.3742, + "step": 12883 + }, + { + "epoch": 0.8945979725038189, + "grad_norm": 4.115262132264761, + "learning_rate": 2.8855623929024357e-07, + "loss": 0.2841, + "step": 12884 + }, + { + "epoch": 0.8946674073045411, + "grad_norm": 5.052765348126075, + "learning_rate": 2.881798786339762e-07, + "loss": 0.3741, + "step": 12885 + }, + { + "epoch": 0.8947368421052632, + "grad_norm": 3.617217884568912, + "learning_rate": 2.878037562962388e-07, + "loss": 0.2994, + "step": 12886 + }, + { + "epoch": 0.8948062769059852, + "grad_norm": 3.987513437086178, + "learning_rate": 2.8742787229605494e-07, + "loss": 0.375, + "step": 12887 + }, + { + "epoch": 0.8948757117067074, + "grad_norm": 5.1224525053250085, + "learning_rate": 2.870522266524367e-07, + "loss": 0.573, + "step": 12888 + }, + { + "epoch": 0.8949451465074295, + "grad_norm": 2.6481383068233626, + "learning_rate": 2.8667681938438384e-07, + "loss": 0.2097, + "step": 12889 + }, + { + "epoch": 0.8950145813081516, + "grad_norm": 4.302001559100502, + "learning_rate": 2.8630165051088397e-07, + "loss": 0.3005, + "step": 12890 + }, + { + "epoch": 0.8950840161088738, + "grad_norm": 3.5844565984767454, + "learning_rate": 2.8592672005091307e-07, + "loss": 0.4488, + "step": 12891 + }, + { + "epoch": 0.8951534509095959, + "grad_norm": 2.4295574838960032, + "learning_rate": 2.855520280234336e-07, + "loss": 0.174, + "step": 12892 + }, + { + "epoch": 0.895222885710318, + "grad_norm": 4.274389114395069, + "learning_rate": 2.8517757444739836e-07, + "loss": 0.4211, + "step": 12893 + }, + { + "epoch": 0.8952923205110401, + "grad_norm": 3.5620174423235063, + "learning_rate": 2.848033593417454e-07, + "loss": 0.3719, + "step": 12894 + }, + { + "epoch": 0.8953617553117622, + "grad_norm": 3.357212606211454, + "learning_rate": 2.844293827254041e-07, + "loss": 0.233, + "step": 12895 + }, + { + "epoch": 0.8954311901124844, + "grad_norm": 3.553012340361745, + "learning_rate": 2.8405564461728706e-07, + "loss": 0.3876, + "step": 12896 + }, + { + "epoch": 0.8955006249132065, + "grad_norm": 4.705628512769024, + "learning_rate": 2.836821450363003e-07, + "loss": 0.6429, + "step": 12897 + }, + { + "epoch": 0.8955700597139287, + "grad_norm": 4.165164560393895, + "learning_rate": 2.833088840013343e-07, + "loss": 0.3633, + "step": 12898 + }, + { + "epoch": 0.8956394945146507, + "grad_norm": 3.1987366408535545, + "learning_rate": 2.829358615312661e-07, + "loss": 0.3947, + "step": 12899 + }, + { + "epoch": 0.8957089293153728, + "grad_norm": 4.530685887602706, + "learning_rate": 2.8256307764496503e-07, + "loss": 0.3641, + "step": 12900 + }, + { + "epoch": 0.895778364116095, + "grad_norm": 3.957455735579709, + "learning_rate": 2.82190532361285e-07, + "loss": 0.3392, + "step": 12901 + }, + { + "epoch": 0.8958477989168171, + "grad_norm": 6.111207436450076, + "learning_rate": 2.8181822569906983e-07, + "loss": 0.5198, + "step": 12902 + }, + { + "epoch": 0.8959172337175393, + "grad_norm": 3.501150484132737, + "learning_rate": 2.814461576771499e-07, + "loss": 0.3344, + "step": 12903 + }, + { + "epoch": 0.8959866685182614, + "grad_norm": 4.130746907424348, + "learning_rate": 2.8107432831434355e-07, + "loss": 0.4193, + "step": 12904 + }, + { + "epoch": 0.8960561033189834, + "grad_norm": 5.2765705484883, + "learning_rate": 2.8070273762945797e-07, + "loss": 0.6294, + "step": 12905 + }, + { + "epoch": 0.8961255381197056, + "grad_norm": 4.065809948273628, + "learning_rate": 2.803313856412876e-07, + "loss": 0.45, + "step": 12906 + }, + { + "epoch": 0.8961949729204277, + "grad_norm": 2.1505868102158785, + "learning_rate": 2.7996027236861514e-07, + "loss": 0.1173, + "step": 12907 + }, + { + "epoch": 0.8962644077211498, + "grad_norm": 3.6164522357719338, + "learning_rate": 2.7958939783021057e-07, + "loss": 0.3747, + "step": 12908 + }, + { + "epoch": 0.896333842521872, + "grad_norm": 3.2789791885258444, + "learning_rate": 2.7921876204483387e-07, + "loss": 0.3375, + "step": 12909 + }, + { + "epoch": 0.8964032773225941, + "grad_norm": 4.302051014171993, + "learning_rate": 2.7884836503122956e-07, + "loss": 0.4435, + "step": 12910 + }, + { + "epoch": 0.8964727121233163, + "grad_norm": 3.1925006951741755, + "learning_rate": 2.784782068081315e-07, + "loss": 0.3418, + "step": 12911 + }, + { + "epoch": 0.8965421469240383, + "grad_norm": 3.565139274210202, + "learning_rate": 2.7810828739426533e-07, + "loss": 0.349, + "step": 12912 + }, + { + "epoch": 0.8966115817247604, + "grad_norm": 4.853294259501187, + "learning_rate": 2.7773860680833706e-07, + "loss": 0.5076, + "step": 12913 + }, + { + "epoch": 0.8966810165254826, + "grad_norm": 3.257888405696199, + "learning_rate": 2.773691650690463e-07, + "loss": 0.2372, + "step": 12914 + }, + { + "epoch": 0.8967504513262047, + "grad_norm": 2.3821591604489787, + "learning_rate": 2.769999621950803e-07, + "loss": 0.2213, + "step": 12915 + }, + { + "epoch": 0.8968198861269269, + "grad_norm": 2.9701936148645904, + "learning_rate": 2.766309982051113e-07, + "loss": 0.303, + "step": 12916 + }, + { + "epoch": 0.896889320927649, + "grad_norm": 4.258624028878511, + "learning_rate": 2.7626227311780105e-07, + "loss": 0.3848, + "step": 12917 + }, + { + "epoch": 0.896958755728371, + "grad_norm": 3.1360053188952812, + "learning_rate": 2.7589378695180027e-07, + "loss": 0.386, + "step": 12918 + }, + { + "epoch": 0.8970281905290932, + "grad_norm": 3.504461249644197, + "learning_rate": 2.7552553972574614e-07, + "loss": 0.3022, + "step": 12919 + }, + { + "epoch": 0.8970976253298153, + "grad_norm": 5.9034992727899205, + "learning_rate": 2.751575314582644e-07, + "loss": 0.4118, + "step": 12920 + }, + { + "epoch": 0.8971670601305374, + "grad_norm": 3.4579194707666634, + "learning_rate": 2.747897621679674e-07, + "loss": 0.2884, + "step": 12921 + }, + { + "epoch": 0.8972364949312596, + "grad_norm": 4.253551168506638, + "learning_rate": 2.74422231873458e-07, + "loss": 0.3824, + "step": 12922 + }, + { + "epoch": 0.8973059297319816, + "grad_norm": 3.8287897330114347, + "learning_rate": 2.740549405933246e-07, + "loss": 0.4417, + "step": 12923 + }, + { + "epoch": 0.8973753645327038, + "grad_norm": 3.3737153011687955, + "learning_rate": 2.736878883461441e-07, + "loss": 0.3343, + "step": 12924 + }, + { + "epoch": 0.8974447993334259, + "grad_norm": 3.141404413860597, + "learning_rate": 2.733210751504817e-07, + "loss": 0.228, + "step": 12925 + }, + { + "epoch": 0.897514234134148, + "grad_norm": 4.328134240873491, + "learning_rate": 2.7295450102489127e-07, + "loss": 0.5278, + "step": 12926 + }, + { + "epoch": 0.8975836689348702, + "grad_norm": 4.372730357774936, + "learning_rate": 2.7258816598791427e-07, + "loss": 0.3937, + "step": 12927 + }, + { + "epoch": 0.8976531037355923, + "grad_norm": 4.25056635375421, + "learning_rate": 2.722220700580763e-07, + "loss": 0.4751, + "step": 12928 + }, + { + "epoch": 0.8977225385363145, + "grad_norm": 4.049791838382141, + "learning_rate": 2.718562132538971e-07, + "loss": 0.4032, + "step": 12929 + }, + { + "epoch": 0.8977919733370365, + "grad_norm": 4.4959586251954535, + "learning_rate": 2.7149059559388126e-07, + "loss": 0.4552, + "step": 12930 + }, + { + "epoch": 0.8978614081377586, + "grad_norm": 4.12449014113677, + "learning_rate": 2.7112521709651896e-07, + "loss": 0.349, + "step": 12931 + }, + { + "epoch": 0.8979308429384808, + "grad_norm": 4.303215565335407, + "learning_rate": 2.7076007778029325e-07, + "loss": 0.3358, + "step": 12932 + }, + { + "epoch": 0.8980002777392029, + "grad_norm": 5.221679613651867, + "learning_rate": 2.70395177663671e-07, + "loss": 0.4333, + "step": 12933 + }, + { + "epoch": 0.898069712539925, + "grad_norm": 4.718617696215007, + "learning_rate": 2.700305167651085e-07, + "loss": 0.3898, + "step": 12934 + }, + { + "epoch": 0.8981391473406471, + "grad_norm": 4.018147620291798, + "learning_rate": 2.6966609510304995e-07, + "loss": 0.4427, + "step": 12935 + }, + { + "epoch": 0.8982085821413692, + "grad_norm": 3.3984143802994415, + "learning_rate": 2.693019126959284e-07, + "loss": 0.2871, + "step": 12936 + }, + { + "epoch": 0.8982780169420914, + "grad_norm": 4.354403276095537, + "learning_rate": 2.6893796956216244e-07, + "loss": 0.5377, + "step": 12937 + }, + { + "epoch": 0.8983474517428135, + "grad_norm": 3.5342686773579954, + "learning_rate": 2.685742657201601e-07, + "loss": 0.4195, + "step": 12938 + }, + { + "epoch": 0.8984168865435356, + "grad_norm": 5.135105535455678, + "learning_rate": 2.6821080118831775e-07, + "loss": 0.6695, + "step": 12939 + }, + { + "epoch": 0.8984863213442578, + "grad_norm": 3.7698273401362274, + "learning_rate": 2.67847575985018e-07, + "loss": 0.3073, + "step": 12940 + }, + { + "epoch": 0.8985557561449798, + "grad_norm": 3.3237398968772, + "learning_rate": 2.674845901286349e-07, + "loss": 0.3527, + "step": 12941 + }, + { + "epoch": 0.898625190945702, + "grad_norm": 3.659822417326642, + "learning_rate": 2.6712184363752447e-07, + "loss": 0.2923, + "step": 12942 + }, + { + "epoch": 0.8986946257464241, + "grad_norm": 2.7203776694679758, + "learning_rate": 2.667593365300353e-07, + "loss": 0.093, + "step": 12943 + }, + { + "epoch": 0.8987640605471462, + "grad_norm": 4.360891489953443, + "learning_rate": 2.663970688245038e-07, + "loss": 0.397, + "step": 12944 + }, + { + "epoch": 0.8988334953478684, + "grad_norm": 3.55398659231266, + "learning_rate": 2.66035040539252e-07, + "loss": 0.2696, + "step": 12945 + }, + { + "epoch": 0.8989029301485905, + "grad_norm": 3.938602362393521, + "learning_rate": 2.6567325169259085e-07, + "loss": 0.2985, + "step": 12946 + }, + { + "epoch": 0.8989723649493125, + "grad_norm": 3.7470886996640274, + "learning_rate": 2.6531170230281954e-07, + "loss": 0.4329, + "step": 12947 + }, + { + "epoch": 0.8990417997500347, + "grad_norm": 3.7273255572022754, + "learning_rate": 2.6495039238822463e-07, + "loss": 0.5164, + "step": 12948 + }, + { + "epoch": 0.8991112345507568, + "grad_norm": 3.236643231517121, + "learning_rate": 2.645893219670803e-07, + "loss": 0.2181, + "step": 12949 + }, + { + "epoch": 0.899180669351479, + "grad_norm": 4.069185383680679, + "learning_rate": 2.6422849105765034e-07, + "loss": 0.348, + "step": 12950 + }, + { + "epoch": 0.8992501041522011, + "grad_norm": 4.28488822157351, + "learning_rate": 2.6386789967818407e-07, + "loss": 0.57, + "step": 12951 + }, + { + "epoch": 0.8993195389529232, + "grad_norm": 4.91881961176447, + "learning_rate": 2.6350754784692014e-07, + "loss": 0.4561, + "step": 12952 + }, + { + "epoch": 0.8993889737536453, + "grad_norm": 4.533544274943324, + "learning_rate": 2.6314743558208457e-07, + "loss": 0.5344, + "step": 12953 + }, + { + "epoch": 0.8994584085543674, + "grad_norm": 4.886559031694449, + "learning_rate": 2.627875629018911e-07, + "loss": 0.8121, + "step": 12954 + }, + { + "epoch": 0.8995278433550896, + "grad_norm": 3.5113065457779356, + "learning_rate": 2.624279298245436e-07, + "loss": 0.3544, + "step": 12955 + }, + { + "epoch": 0.8995972781558117, + "grad_norm": 4.001815023685714, + "learning_rate": 2.6206853636823016e-07, + "loss": 0.4938, + "step": 12956 + }, + { + "epoch": 0.8996667129565338, + "grad_norm": 3.5534610699023053, + "learning_rate": 2.6170938255112745e-07, + "loss": 0.3296, + "step": 12957 + }, + { + "epoch": 0.899736147757256, + "grad_norm": 4.60547226414963, + "learning_rate": 2.613504683914031e-07, + "loss": 0.5194, + "step": 12958 + }, + { + "epoch": 0.899805582557978, + "grad_norm": 4.4658279320436804, + "learning_rate": 2.60991793907211e-07, + "loss": 0.4448, + "step": 12959 + }, + { + "epoch": 0.8998750173587002, + "grad_norm": 2.98950975394793, + "learning_rate": 2.6063335911668983e-07, + "loss": 0.2301, + "step": 12960 + }, + { + "epoch": 0.8999444521594223, + "grad_norm": 3.2029875421650003, + "learning_rate": 2.6027516403797135e-07, + "loss": 0.275, + "step": 12961 + }, + { + "epoch": 0.9000138869601444, + "grad_norm": 3.194784992993854, + "learning_rate": 2.5991720868917216e-07, + "loss": 0.3154, + "step": 12962 + }, + { + "epoch": 0.9000833217608666, + "grad_norm": 4.982827886146601, + "learning_rate": 2.5955949308839545e-07, + "loss": 0.8683, + "step": 12963 + }, + { + "epoch": 0.9001527565615887, + "grad_norm": 3.9277956766939903, + "learning_rate": 2.592020172537363e-07, + "loss": 0.4277, + "step": 12964 + }, + { + "epoch": 0.9002221913623107, + "grad_norm": 4.372360976809634, + "learning_rate": 2.588447812032746e-07, + "loss": 0.4201, + "step": 12965 + }, + { + "epoch": 0.9002916261630329, + "grad_norm": 4.372097880989456, + "learning_rate": 2.5848778495507876e-07, + "loss": 0.5957, + "step": 12966 + }, + { + "epoch": 0.900361060963755, + "grad_norm": 4.85031711931894, + "learning_rate": 2.5813102852720487e-07, + "loss": 0.3726, + "step": 12967 + }, + { + "epoch": 0.9004304957644772, + "grad_norm": 3.6557133161209654, + "learning_rate": 2.577745119376984e-07, + "loss": 0.3604, + "step": 12968 + }, + { + "epoch": 0.9004999305651993, + "grad_norm": 5.812470646942752, + "learning_rate": 2.574182352045901e-07, + "loss": 0.6442, + "step": 12969 + }, + { + "epoch": 0.9005693653659214, + "grad_norm": 3.064031630593118, + "learning_rate": 2.570621983459021e-07, + "loss": 0.1917, + "step": 12970 + }, + { + "epoch": 0.9006388001666435, + "grad_norm": 3.3897776916762683, + "learning_rate": 2.5670640137964053e-07, + "loss": 0.3184, + "step": 12971 + }, + { + "epoch": 0.9007082349673656, + "grad_norm": 4.163334129236111, + "learning_rate": 2.563508443238011e-07, + "loss": 0.428, + "step": 12972 + }, + { + "epoch": 0.9007776697680878, + "grad_norm": 3.3823411763199918, + "learning_rate": 2.5599552719636933e-07, + "loss": 0.3516, + "step": 12973 + }, + { + "epoch": 0.9008471045688099, + "grad_norm": 3.6700561492783916, + "learning_rate": 2.556404500153148e-07, + "loss": 0.3557, + "step": 12974 + }, + { + "epoch": 0.900916539369532, + "grad_norm": 3.995218927239919, + "learning_rate": 2.5528561279859756e-07, + "loss": 0.2665, + "step": 12975 + }, + { + "epoch": 0.9009859741702542, + "grad_norm": 3.0094200666856783, + "learning_rate": 2.5493101556416544e-07, + "loss": 0.2654, + "step": 12976 + }, + { + "epoch": 0.9010554089709762, + "grad_norm": 3.2953052408947006, + "learning_rate": 2.5457665832995305e-07, + "loss": 0.3069, + "step": 12977 + }, + { + "epoch": 0.9011248437716983, + "grad_norm": 3.4710947342011225, + "learning_rate": 2.542225411138832e-07, + "loss": 0.2857, + "step": 12978 + }, + { + "epoch": 0.9011942785724205, + "grad_norm": 3.6863493458590737, + "learning_rate": 2.5386866393386775e-07, + "loss": 0.2976, + "step": 12979 + }, + { + "epoch": 0.9012637133731426, + "grad_norm": 3.451147237073787, + "learning_rate": 2.5351502680780406e-07, + "loss": 0.3346, + "step": 12980 + }, + { + "epoch": 0.9013331481738648, + "grad_norm": 3.1789780825732876, + "learning_rate": 2.531616297535794e-07, + "loss": 0.3592, + "step": 12981 + }, + { + "epoch": 0.9014025829745869, + "grad_norm": 6.056553169980843, + "learning_rate": 2.528084727890684e-07, + "loss": 0.5311, + "step": 12982 + }, + { + "epoch": 0.9014720177753089, + "grad_norm": 6.049113295295283, + "learning_rate": 2.524555559321329e-07, + "loss": 0.572, + "step": 12983 + }, + { + "epoch": 0.9015414525760311, + "grad_norm": 4.20719668247249, + "learning_rate": 2.5210287920062304e-07, + "loss": 0.4302, + "step": 12984 + }, + { + "epoch": 0.9016108873767532, + "grad_norm": 4.578346458285832, + "learning_rate": 2.517504426123768e-07, + "loss": 0.6333, + "step": 12985 + }, + { + "epoch": 0.9016803221774754, + "grad_norm": 4.600342020563578, + "learning_rate": 2.5139824618521993e-07, + "loss": 0.5494, + "step": 12986 + }, + { + "epoch": 0.9017497569781975, + "grad_norm": 4.85189613511165, + "learning_rate": 2.5104628993696757e-07, + "loss": 0.4686, + "step": 12987 + }, + { + "epoch": 0.9018191917789196, + "grad_norm": 5.5573796484820015, + "learning_rate": 2.5069457388541883e-07, + "loss": 0.4881, + "step": 12988 + }, + { + "epoch": 0.9018886265796418, + "grad_norm": 2.868008636470154, + "learning_rate": 2.5034309804836454e-07, + "loss": 0.2633, + "step": 12989 + }, + { + "epoch": 0.9019580613803638, + "grad_norm": 2.755381922612412, + "learning_rate": 2.4999186244358153e-07, + "loss": 0.1875, + "step": 12990 + }, + { + "epoch": 0.9020274961810859, + "grad_norm": 6.203060277131062, + "learning_rate": 2.4964086708883674e-07, + "loss": 0.4863, + "step": 12991 + }, + { + "epoch": 0.9020969309818081, + "grad_norm": 5.574116650970451, + "learning_rate": 2.4929011200187926e-07, + "loss": 0.8191, + "step": 12992 + }, + { + "epoch": 0.9021663657825302, + "grad_norm": 4.696648482417634, + "learning_rate": 2.4893959720045323e-07, + "loss": 0.5019, + "step": 12993 + }, + { + "epoch": 0.9022358005832524, + "grad_norm": 3.4360410094352685, + "learning_rate": 2.485893227022862e-07, + "loss": 0.3141, + "step": 12994 + }, + { + "epoch": 0.9023052353839744, + "grad_norm": 4.0729095214234405, + "learning_rate": 2.4823928852509506e-07, + "loss": 0.4624, + "step": 12995 + }, + { + "epoch": 0.9023746701846965, + "grad_norm": 3.207610360015554, + "learning_rate": 2.478894946865829e-07, + "loss": 0.3684, + "step": 12996 + }, + { + "epoch": 0.9024441049854187, + "grad_norm": 6.801288089257314, + "learning_rate": 2.4753994120444326e-07, + "loss": 0.492, + "step": 12997 + }, + { + "epoch": 0.9025135397861408, + "grad_norm": 4.551896710699016, + "learning_rate": 2.4719062809635543e-07, + "loss": 0.5121, + "step": 12998 + }, + { + "epoch": 0.902582974586863, + "grad_norm": 2.589783132566977, + "learning_rate": 2.4684155537998743e-07, + "loss": 0.1387, + "step": 12999 + }, + { + "epoch": 0.9026524093875851, + "grad_norm": 3.5143753968419453, + "learning_rate": 2.464927230729952e-07, + "loss": 0.3518, + "step": 13000 + }, + { + "epoch": 0.9027218441883071, + "grad_norm": 4.704342507227891, + "learning_rate": 2.461441311930213e-07, + "loss": 0.4438, + "step": 13001 + }, + { + "epoch": 0.9027912789890293, + "grad_norm": 3.601530773408349, + "learning_rate": 2.457957797576993e-07, + "loss": 0.3508, + "step": 13002 + }, + { + "epoch": 0.9028607137897514, + "grad_norm": 4.9375105836349915, + "learning_rate": 2.454476687846463e-07, + "loss": 0.5646, + "step": 13003 + }, + { + "epoch": 0.9029301485904735, + "grad_norm": 3.7277551838373664, + "learning_rate": 2.4509979829146933e-07, + "loss": 0.4147, + "step": 13004 + }, + { + "epoch": 0.9029995833911957, + "grad_norm": 5.155712573765384, + "learning_rate": 2.447521682957654e-07, + "loss": 0.6069, + "step": 13005 + }, + { + "epoch": 0.9030690181919178, + "grad_norm": 3.0750698859186616, + "learning_rate": 2.444047788151144e-07, + "loss": 0.2836, + "step": 13006 + }, + { + "epoch": 0.90313845299264, + "grad_norm": 3.2028164443671314, + "learning_rate": 2.4405762986708937e-07, + "loss": 0.2498, + "step": 13007 + }, + { + "epoch": 0.903207887793362, + "grad_norm": 3.4606134672905893, + "learning_rate": 2.4371072146924744e-07, + "loss": 0.4066, + "step": 13008 + }, + { + "epoch": 0.9032773225940841, + "grad_norm": 3.6699471677955393, + "learning_rate": 2.433640536391352e-07, + "loss": 0.4276, + "step": 13009 + }, + { + "epoch": 0.9033467573948063, + "grad_norm": 4.730326885375009, + "learning_rate": 2.430176263942863e-07, + "loss": 0.4253, + "step": 13010 + }, + { + "epoch": 0.9034161921955284, + "grad_norm": 4.0011606825698145, + "learning_rate": 2.426714397522234e-07, + "loss": 0.4129, + "step": 13011 + }, + { + "epoch": 0.9034856269962506, + "grad_norm": 5.757822786117658, + "learning_rate": 2.423254937304553e-07, + "loss": 0.469, + "step": 13012 + }, + { + "epoch": 0.9035550617969726, + "grad_norm": 3.7449379065922175, + "learning_rate": 2.419797883464808e-07, + "loss": 0.3974, + "step": 13013 + }, + { + "epoch": 0.9036244965976947, + "grad_norm": 3.6885168625379645, + "learning_rate": 2.4163432361778426e-07, + "loss": 0.3684, + "step": 13014 + }, + { + "epoch": 0.9036939313984169, + "grad_norm": 4.506636005222762, + "learning_rate": 2.412890995618383e-07, + "loss": 0.533, + "step": 13015 + }, + { + "epoch": 0.903763366199139, + "grad_norm": 4.506640079825281, + "learning_rate": 2.4094411619610627e-07, + "loss": 0.6252, + "step": 13016 + }, + { + "epoch": 0.9038328009998612, + "grad_norm": 4.143794407553553, + "learning_rate": 2.4059937353803467e-07, + "loss": 0.678, + "step": 13017 + }, + { + "epoch": 0.9039022358005833, + "grad_norm": 3.115126398988296, + "learning_rate": 2.4025487160506066e-07, + "loss": 0.3779, + "step": 13018 + }, + { + "epoch": 0.9039716706013053, + "grad_norm": 4.01327067181937, + "learning_rate": 2.3991061041461037e-07, + "loss": 0.4553, + "step": 13019 + }, + { + "epoch": 0.9040411054020275, + "grad_norm": 4.360936113192454, + "learning_rate": 2.395665899840949e-07, + "loss": 0.2577, + "step": 13020 + }, + { + "epoch": 0.9041105402027496, + "grad_norm": 4.462828322160175, + "learning_rate": 2.3922281033091356e-07, + "loss": 0.527, + "step": 13021 + }, + { + "epoch": 0.9041799750034717, + "grad_norm": 3.036306935145937, + "learning_rate": 2.388792714724564e-07, + "loss": 0.2916, + "step": 13022 + }, + { + "epoch": 0.9042494098041939, + "grad_norm": 4.919271859926876, + "learning_rate": 2.3853597342609836e-07, + "loss": 0.5928, + "step": 13023 + }, + { + "epoch": 0.904318844604916, + "grad_norm": 3.669787228989885, + "learning_rate": 2.3819291620920116e-07, + "loss": 0.393, + "step": 13024 + }, + { + "epoch": 0.9043882794056382, + "grad_norm": 4.323014778250983, + "learning_rate": 2.3785009983911866e-07, + "loss": 0.5831, + "step": 13025 + }, + { + "epoch": 0.9044577142063602, + "grad_norm": 2.6593313664283715, + "learning_rate": 2.3750752433318979e-07, + "loss": 0.2034, + "step": 13026 + }, + { + "epoch": 0.9045271490070823, + "grad_norm": 3.111678345988155, + "learning_rate": 2.3716518970874126e-07, + "loss": 0.2414, + "step": 13027 + }, + { + "epoch": 0.9045965838078045, + "grad_norm": 4.1727674296138595, + "learning_rate": 2.368230959830875e-07, + "loss": 0.425, + "step": 13028 + }, + { + "epoch": 0.9046660186085266, + "grad_norm": 3.1757675516671635, + "learning_rate": 2.3648124317353195e-07, + "loss": 0.4538, + "step": 13029 + }, + { + "epoch": 0.9047354534092488, + "grad_norm": 4.800423717454337, + "learning_rate": 2.361396312973646e-07, + "loss": 0.3738, + "step": 13030 + }, + { + "epoch": 0.9048048882099708, + "grad_norm": 3.7757961157067164, + "learning_rate": 2.3579826037186392e-07, + "loss": 0.4477, + "step": 13031 + }, + { + "epoch": 0.9048743230106929, + "grad_norm": 4.90570741745172, + "learning_rate": 2.3545713041429664e-07, + "loss": 0.4406, + "step": 13032 + }, + { + "epoch": 0.9049437578114151, + "grad_norm": 3.106769926054841, + "learning_rate": 2.3511624144191503e-07, + "loss": 0.2469, + "step": 13033 + }, + { + "epoch": 0.9050131926121372, + "grad_norm": 5.747091596601599, + "learning_rate": 2.347755934719642e-07, + "loss": 0.4732, + "step": 13034 + }, + { + "epoch": 0.9050826274128593, + "grad_norm": 3.407454672069645, + "learning_rate": 2.3443518652166986e-07, + "loss": 0.2804, + "step": 13035 + }, + { + "epoch": 0.9051520622135815, + "grad_norm": 4.37999867356424, + "learning_rate": 2.340950206082515e-07, + "loss": 0.4581, + "step": 13036 + }, + { + "epoch": 0.9052214970143035, + "grad_norm": 3.8709224997868046, + "learning_rate": 2.3375509574891487e-07, + "loss": 0.5326, + "step": 13037 + }, + { + "epoch": 0.9052909318150257, + "grad_norm": 3.3948751929757144, + "learning_rate": 2.3341541196085116e-07, + "loss": 0.351, + "step": 13038 + }, + { + "epoch": 0.9053603666157478, + "grad_norm": 3.492627929989953, + "learning_rate": 2.3307596926124276e-07, + "loss": 0.2733, + "step": 13039 + }, + { + "epoch": 0.9054298014164699, + "grad_norm": 4.040892140245462, + "learning_rate": 2.3273676766725762e-07, + "loss": 0.5133, + "step": 13040 + }, + { + "epoch": 0.9054992362171921, + "grad_norm": 4.600885641962837, + "learning_rate": 2.3239780719605197e-07, + "loss": 0.6244, + "step": 13041 + }, + { + "epoch": 0.9055686710179142, + "grad_norm": 4.556268243693032, + "learning_rate": 2.320590878647705e-07, + "loss": 0.6481, + "step": 13042 + }, + { + "epoch": 0.9056381058186364, + "grad_norm": 4.781475339631384, + "learning_rate": 2.3172060969054499e-07, + "loss": 0.437, + "step": 13043 + }, + { + "epoch": 0.9057075406193584, + "grad_norm": 3.626838532706603, + "learning_rate": 2.3138237269049456e-07, + "loss": 0.3012, + "step": 13044 + }, + { + "epoch": 0.9057769754200805, + "grad_norm": 4.613560299179202, + "learning_rate": 2.3104437688172943e-07, + "loss": 0.4228, + "step": 13045 + }, + { + "epoch": 0.9058464102208027, + "grad_norm": 4.107885776825883, + "learning_rate": 2.3070662228134256e-07, + "loss": 0.1829, + "step": 13046 + }, + { + "epoch": 0.9059158450215248, + "grad_norm": 3.929738844200927, + "learning_rate": 2.3036910890641696e-07, + "loss": 0.492, + "step": 13047 + }, + { + "epoch": 0.9059852798222469, + "grad_norm": 4.126676063574219, + "learning_rate": 2.300318367740262e-07, + "loss": 0.3411, + "step": 13048 + }, + { + "epoch": 0.906054714622969, + "grad_norm": 3.480877673696189, + "learning_rate": 2.2969480590122662e-07, + "loss": 0.2988, + "step": 13049 + }, + { + "epoch": 0.9061241494236911, + "grad_norm": 4.680954651550427, + "learning_rate": 2.293580163050657e-07, + "loss": 0.4577, + "step": 13050 + }, + { + "epoch": 0.9061935842244133, + "grad_norm": 4.964504499439824, + "learning_rate": 2.2902146800257818e-07, + "loss": 0.3604, + "step": 13051 + }, + { + "epoch": 0.9062630190251354, + "grad_norm": 5.677240729048436, + "learning_rate": 2.2868516101078653e-07, + "loss": 0.3269, + "step": 13052 + }, + { + "epoch": 0.9063324538258575, + "grad_norm": 4.475095451962496, + "learning_rate": 2.2834909534669935e-07, + "loss": 0.1831, + "step": 13053 + }, + { + "epoch": 0.9064018886265797, + "grad_norm": 3.378598009062433, + "learning_rate": 2.2801327102731586e-07, + "loss": 0.2614, + "step": 13054 + }, + { + "epoch": 0.9064713234273017, + "grad_norm": 3.9248998180960855, + "learning_rate": 2.276776880696213e-07, + "loss": 0.2751, + "step": 13055 + }, + { + "epoch": 0.9065407582280239, + "grad_norm": 5.064390245017869, + "learning_rate": 2.273423464905894e-07, + "loss": 0.6056, + "step": 13056 + }, + { + "epoch": 0.906610193028746, + "grad_norm": 4.180137343380059, + "learning_rate": 2.2700724630718095e-07, + "loss": 0.2763, + "step": 13057 + }, + { + "epoch": 0.9066796278294681, + "grad_norm": 2.8921803899519465, + "learning_rate": 2.2667238753634468e-07, + "loss": 0.2479, + "step": 13058 + }, + { + "epoch": 0.9067490626301903, + "grad_norm": 3.846601768127386, + "learning_rate": 2.2633777019501757e-07, + "loss": 0.3874, + "step": 13059 + }, + { + "epoch": 0.9068184974309124, + "grad_norm": 4.36868718087222, + "learning_rate": 2.260033943001244e-07, + "loss": 0.3805, + "step": 13060 + }, + { + "epoch": 0.9068879322316344, + "grad_norm": 3.8000645180588597, + "learning_rate": 2.2566925986857725e-07, + "loss": 0.3017, + "step": 13061 + }, + { + "epoch": 0.9069573670323566, + "grad_norm": 3.397392851789259, + "learning_rate": 2.2533536691727697e-07, + "loss": 0.3284, + "step": 13062 + }, + { + "epoch": 0.9070268018330787, + "grad_norm": 3.881401928406966, + "learning_rate": 2.2500171546311068e-07, + "loss": 0.3942, + "step": 13063 + }, + { + "epoch": 0.9070962366338009, + "grad_norm": 3.1282542593337404, + "learning_rate": 2.2466830552295426e-07, + "loss": 0.2871, + "step": 13064 + }, + { + "epoch": 0.907165671434523, + "grad_norm": 2.878897714053635, + "learning_rate": 2.243351371136704e-07, + "loss": 0.2043, + "step": 13065 + }, + { + "epoch": 0.9072351062352451, + "grad_norm": 4.720691268587579, + "learning_rate": 2.2400221025211278e-07, + "loss": 0.4706, + "step": 13066 + }, + { + "epoch": 0.9073045410359672, + "grad_norm": 3.4750226630922, + "learning_rate": 2.2366952495511796e-07, + "loss": 0.3156, + "step": 13067 + }, + { + "epoch": 0.9073739758366893, + "grad_norm": 2.057385681840549, + "learning_rate": 2.2333708123951414e-07, + "loss": 0.2007, + "step": 13068 + }, + { + "epoch": 0.9074434106374115, + "grad_norm": 4.938059011505567, + "learning_rate": 2.2300487912211623e-07, + "loss": 0.557, + "step": 13069 + }, + { + "epoch": 0.9075128454381336, + "grad_norm": 4.547617072396145, + "learning_rate": 2.2267291861972463e-07, + "loss": 0.5346, + "step": 13070 + }, + { + "epoch": 0.9075822802388557, + "grad_norm": 5.1877502980555406, + "learning_rate": 2.2234119974913148e-07, + "loss": 0.7655, + "step": 13071 + }, + { + "epoch": 0.9076517150395779, + "grad_norm": 9.408937863179526, + "learning_rate": 2.220097225271145e-07, + "loss": 0.3667, + "step": 13072 + }, + { + "epoch": 0.9077211498403, + "grad_norm": 3.5700303734613437, + "learning_rate": 2.2167848697043858e-07, + "loss": 0.3974, + "step": 13073 + }, + { + "epoch": 0.9077905846410221, + "grad_norm": 3.1885770264765627, + "learning_rate": 2.2134749309585813e-07, + "loss": 0.2297, + "step": 13074 + }, + { + "epoch": 0.9078600194417442, + "grad_norm": 4.034696146134198, + "learning_rate": 2.2101674092011417e-07, + "loss": 0.4953, + "step": 13075 + }, + { + "epoch": 0.9079294542424663, + "grad_norm": 4.090098010251932, + "learning_rate": 2.2068623045993498e-07, + "loss": 0.4153, + "step": 13076 + }, + { + "epoch": 0.9079988890431885, + "grad_norm": 3.391655356363241, + "learning_rate": 2.203559617320389e-07, + "loss": 0.3102, + "step": 13077 + }, + { + "epoch": 0.9080683238439106, + "grad_norm": 3.079239633772833, + "learning_rate": 2.2002593475312972e-07, + "loss": 0.2552, + "step": 13078 + }, + { + "epoch": 0.9081377586446326, + "grad_norm": 2.6520299463217714, + "learning_rate": 2.1969614953989914e-07, + "loss": 0.2322, + "step": 13079 + }, + { + "epoch": 0.9082071934453548, + "grad_norm": 4.470167912877859, + "learning_rate": 2.1936660610902882e-07, + "loss": 0.4205, + "step": 13080 + }, + { + "epoch": 0.9082766282460769, + "grad_norm": 3.903469663391334, + "learning_rate": 2.1903730447718596e-07, + "loss": 0.3695, + "step": 13081 + }, + { + "epoch": 0.9083460630467991, + "grad_norm": 4.3259627401575536, + "learning_rate": 2.1870824466102503e-07, + "loss": 0.49, + "step": 13082 + }, + { + "epoch": 0.9084154978475212, + "grad_norm": 4.824051317464986, + "learning_rate": 2.1837942667719157e-07, + "loss": 0.5019, + "step": 13083 + }, + { + "epoch": 0.9084849326482433, + "grad_norm": 4.041752634741632, + "learning_rate": 2.1805085054231622e-07, + "loss": 0.4854, + "step": 13084 + }, + { + "epoch": 0.9085543674489654, + "grad_norm": 6.5575539073310205, + "learning_rate": 2.1772251627301676e-07, + "loss": 0.4013, + "step": 13085 + }, + { + "epoch": 0.9086238022496875, + "grad_norm": 4.611684692420382, + "learning_rate": 2.173944238859016e-07, + "loss": 0.5457, + "step": 13086 + }, + { + "epoch": 0.9086932370504097, + "grad_norm": 3.72698208210969, + "learning_rate": 2.1706657339756466e-07, + "loss": 0.3076, + "step": 13087 + }, + { + "epoch": 0.9087626718511318, + "grad_norm": 3.5867654864515948, + "learning_rate": 2.1673896482458768e-07, + "loss": 0.4468, + "step": 13088 + }, + { + "epoch": 0.9088321066518539, + "grad_norm": 4.879969154658898, + "learning_rate": 2.1641159818354184e-07, + "loss": 0.5419, + "step": 13089 + }, + { + "epoch": 0.9089015414525761, + "grad_norm": 3.1424057618503256, + "learning_rate": 2.1608447349098393e-07, + "loss": 0.2672, + "step": 13090 + }, + { + "epoch": 0.9089709762532981, + "grad_norm": 3.3077327260424587, + "learning_rate": 2.1575759076346014e-07, + "loss": 0.3627, + "step": 13091 + }, + { + "epoch": 0.9090404110540202, + "grad_norm": 3.2595704214147245, + "learning_rate": 2.154309500175039e-07, + "loss": 0.3339, + "step": 13092 + }, + { + "epoch": 0.9091098458547424, + "grad_norm": 3.1818829550765457, + "learning_rate": 2.1510455126963592e-07, + "loss": 0.3083, + "step": 13093 + }, + { + "epoch": 0.9091792806554645, + "grad_norm": 3.6306790361797634, + "learning_rate": 2.1477839453636518e-07, + "loss": 0.4544, + "step": 13094 + }, + { + "epoch": 0.9092487154561867, + "grad_norm": 4.090606769185803, + "learning_rate": 2.144524798341885e-07, + "loss": 0.4622, + "step": 13095 + }, + { + "epoch": 0.9093181502569088, + "grad_norm": 4.656064699023622, + "learning_rate": 2.1412680717958934e-07, + "loss": 0.5533, + "step": 13096 + }, + { + "epoch": 0.9093875850576308, + "grad_norm": 4.181148433151871, + "learning_rate": 2.1380137658904176e-07, + "loss": 0.2659, + "step": 13097 + }, + { + "epoch": 0.909457019858353, + "grad_norm": 3.0973463079305166, + "learning_rate": 2.1347618807900482e-07, + "loss": 0.3757, + "step": 13098 + }, + { + "epoch": 0.9095264546590751, + "grad_norm": 4.124002180933459, + "learning_rate": 2.1315124166592484e-07, + "loss": 0.5821, + "step": 13099 + }, + { + "epoch": 0.9095958894597973, + "grad_norm": 5.183879174023916, + "learning_rate": 2.1282653736623914e-07, + "loss": 0.5093, + "step": 13100 + }, + { + "epoch": 0.9096653242605194, + "grad_norm": 5.2218200376180866, + "learning_rate": 2.1250207519636967e-07, + "loss": 0.5617, + "step": 13101 + }, + { + "epoch": 0.9097347590612415, + "grad_norm": 4.970940100298773, + "learning_rate": 2.1217785517272827e-07, + "loss": 0.5363, + "step": 13102 + }, + { + "epoch": 0.9098041938619637, + "grad_norm": 3.6929497502478412, + "learning_rate": 2.1185387731171293e-07, + "loss": 0.4188, + "step": 13103 + }, + { + "epoch": 0.9098736286626857, + "grad_norm": 3.987627841430215, + "learning_rate": 2.1153014162971052e-07, + "loss": 0.2798, + "step": 13104 + }, + { + "epoch": 0.9099430634634078, + "grad_norm": 4.979498710588045, + "learning_rate": 2.1120664814309522e-07, + "loss": 0.5792, + "step": 13105 + }, + { + "epoch": 0.91001249826413, + "grad_norm": 3.6260793420066806, + "learning_rate": 2.1088339686822834e-07, + "loss": 0.4317, + "step": 13106 + }, + { + "epoch": 0.9100819330648521, + "grad_norm": 3.912139358259877, + "learning_rate": 2.1056038782146015e-07, + "loss": 0.2036, + "step": 13107 + }, + { + "epoch": 0.9101513678655743, + "grad_norm": 4.587462412676917, + "learning_rate": 2.1023762101912704e-07, + "loss": 0.3209, + "step": 13108 + }, + { + "epoch": 0.9102208026662963, + "grad_norm": 3.7109577291065663, + "learning_rate": 2.0991509647755703e-07, + "loss": 0.368, + "step": 13109 + }, + { + "epoch": 0.9102902374670184, + "grad_norm": 3.0770212384633875, + "learning_rate": 2.095928142130599e-07, + "loss": 0.2124, + "step": 13110 + }, + { + "epoch": 0.9103596722677406, + "grad_norm": 4.305743383728951, + "learning_rate": 2.0927077424193698e-07, + "loss": 0.418, + "step": 13111 + }, + { + "epoch": 0.9104291070684627, + "grad_norm": 3.619542197417392, + "learning_rate": 2.0894897658047863e-07, + "loss": 0.4248, + "step": 13112 + }, + { + "epoch": 0.9104985418691849, + "grad_norm": 3.12456480609724, + "learning_rate": 2.0862742124495849e-07, + "loss": 0.3744, + "step": 13113 + }, + { + "epoch": 0.910567976669907, + "grad_norm": 2.979345355795465, + "learning_rate": 2.0830610825164132e-07, + "loss": 0.2418, + "step": 13114 + }, + { + "epoch": 0.910637411470629, + "grad_norm": 4.92449446593579, + "learning_rate": 2.079850376167797e-07, + "loss": 0.5774, + "step": 13115 + }, + { + "epoch": 0.9107068462713512, + "grad_norm": 4.43583426950556, + "learning_rate": 2.0766420935661224e-07, + "loss": 0.6745, + "step": 13116 + }, + { + "epoch": 0.9107762810720733, + "grad_norm": 4.486465249955106, + "learning_rate": 2.0734362348736602e-07, + "loss": 0.5872, + "step": 13117 + }, + { + "epoch": 0.9108457158727954, + "grad_norm": 5.384097997897376, + "learning_rate": 2.070232800252564e-07, + "loss": 0.5412, + "step": 13118 + }, + { + "epoch": 0.9109151506735176, + "grad_norm": 4.35103971016548, + "learning_rate": 2.0670317898648539e-07, + "loss": 0.3021, + "step": 13119 + }, + { + "epoch": 0.9109845854742397, + "grad_norm": 4.31781133386301, + "learning_rate": 2.0638332038724395e-07, + "loss": 0.4234, + "step": 13120 + }, + { + "epoch": 0.9110540202749619, + "grad_norm": 4.731078052258188, + "learning_rate": 2.060637042437097e-07, + "loss": 0.5315, + "step": 13121 + }, + { + "epoch": 0.9111234550756839, + "grad_norm": 5.107630053721914, + "learning_rate": 2.0574433057204857e-07, + "loss": 0.448, + "step": 13122 + }, + { + "epoch": 0.911192889876406, + "grad_norm": 3.2288047178390507, + "learning_rate": 2.0542519938841377e-07, + "loss": 0.3396, + "step": 13123 + }, + { + "epoch": 0.9112623246771282, + "grad_norm": 2.904088071258934, + "learning_rate": 2.0510631070894738e-07, + "loss": 0.1951, + "step": 13124 + }, + { + "epoch": 0.9113317594778503, + "grad_norm": 4.206015643693509, + "learning_rate": 2.0478766454977705e-07, + "loss": 0.2977, + "step": 13125 + }, + { + "epoch": 0.9114011942785725, + "grad_norm": 4.458746333437605, + "learning_rate": 2.0446926092702157e-07, + "loss": 0.5085, + "step": 13126 + }, + { + "epoch": 0.9114706290792945, + "grad_norm": 4.213553729978643, + "learning_rate": 2.041510998567847e-07, + "loss": 0.5041, + "step": 13127 + }, + { + "epoch": 0.9115400638800166, + "grad_norm": 3.7938070538951405, + "learning_rate": 2.0383318135515696e-07, + "loss": 0.3449, + "step": 13128 + }, + { + "epoch": 0.9116094986807388, + "grad_norm": 4.281740109494131, + "learning_rate": 2.035155054382204e-07, + "loss": 0.5241, + "step": 13129 + }, + { + "epoch": 0.9116789334814609, + "grad_norm": 5.480033043775263, + "learning_rate": 2.031980721220428e-07, + "loss": 0.3525, + "step": 13130 + }, + { + "epoch": 0.911748368282183, + "grad_norm": 4.235983615570748, + "learning_rate": 2.0288088142267738e-07, + "loss": 0.4991, + "step": 13131 + }, + { + "epoch": 0.9118178030829052, + "grad_norm": 4.896283107934131, + "learning_rate": 2.0256393335616908e-07, + "loss": 0.4829, + "step": 13132 + }, + { + "epoch": 0.9118872378836272, + "grad_norm": 4.217037722044566, + "learning_rate": 2.02247227938549e-07, + "loss": 0.4453, + "step": 13133 + }, + { + "epoch": 0.9119566726843494, + "grad_norm": 3.6659673916493296, + "learning_rate": 2.0193076518583432e-07, + "loss": 0.3288, + "step": 13134 + }, + { + "epoch": 0.9120261074850715, + "grad_norm": 3.6400615594539003, + "learning_rate": 2.0161454511403278e-07, + "loss": 0.2917, + "step": 13135 + }, + { + "epoch": 0.9120955422857936, + "grad_norm": 5.996333766272976, + "learning_rate": 2.012985677391377e-07, + "loss": 0.7983, + "step": 13136 + }, + { + "epoch": 0.9121649770865158, + "grad_norm": 5.320652772708727, + "learning_rate": 2.0098283307713074e-07, + "loss": 0.7372, + "step": 13137 + }, + { + "epoch": 0.9122344118872379, + "grad_norm": 4.41517487059579, + "learning_rate": 2.0066734114398133e-07, + "loss": 0.3472, + "step": 13138 + }, + { + "epoch": 0.91230384668796, + "grad_norm": 3.077609143357197, + "learning_rate": 2.003520919556473e-07, + "loss": 0.2265, + "step": 13139 + }, + { + "epoch": 0.9123732814886821, + "grad_norm": 4.421108047239686, + "learning_rate": 2.000370855280731e-07, + "loss": 0.3491, + "step": 13140 + }, + { + "epoch": 0.9124427162894042, + "grad_norm": 3.770967885855413, + "learning_rate": 1.997223218771921e-07, + "loss": 0.429, + "step": 13141 + }, + { + "epoch": 0.9125121510901264, + "grad_norm": 3.767351199194019, + "learning_rate": 1.9940780101892433e-07, + "loss": 0.288, + "step": 13142 + }, + { + "epoch": 0.9125815858908485, + "grad_norm": 4.04283038612596, + "learning_rate": 1.990935229691765e-07, + "loss": 0.3414, + "step": 13143 + }, + { + "epoch": 0.9126510206915707, + "grad_norm": 5.234372040102413, + "learning_rate": 1.9877948774384703e-07, + "loss": 0.456, + "step": 13144 + }, + { + "epoch": 0.9127204554922927, + "grad_norm": 4.301840014487488, + "learning_rate": 1.9846569535881767e-07, + "loss": 0.3411, + "step": 13145 + }, + { + "epoch": 0.9127898902930148, + "grad_norm": 4.345692905338997, + "learning_rate": 1.9815214582995955e-07, + "loss": 0.4133, + "step": 13146 + }, + { + "epoch": 0.912859325093737, + "grad_norm": 4.620465126924886, + "learning_rate": 1.9783883917313285e-07, + "loss": 0.6071, + "step": 13147 + }, + { + "epoch": 0.9129287598944591, + "grad_norm": 2.7182267876185304, + "learning_rate": 1.9752577540418315e-07, + "loss": 0.169, + "step": 13148 + }, + { + "epoch": 0.9129981946951812, + "grad_norm": 3.59505303118981, + "learning_rate": 1.9721295453894618e-07, + "loss": 0.3005, + "step": 13149 + }, + { + "epoch": 0.9130676294959034, + "grad_norm": 4.129588110346204, + "learning_rate": 1.9690037659324256e-07, + "loss": 0.4218, + "step": 13150 + }, + { + "epoch": 0.9131370642966254, + "grad_norm": 3.874565154033344, + "learning_rate": 1.9658804158288303e-07, + "loss": 0.5208, + "step": 13151 + }, + { + "epoch": 0.9132064990973476, + "grad_norm": 4.232318982492772, + "learning_rate": 1.9627594952366491e-07, + "loss": 0.5539, + "step": 13152 + }, + { + "epoch": 0.9132759338980697, + "grad_norm": 3.786113284056205, + "learning_rate": 1.9596410043137393e-07, + "loss": 0.4187, + "step": 13153 + }, + { + "epoch": 0.9133453686987918, + "grad_norm": 4.288814734476795, + "learning_rate": 1.9565249432178192e-07, + "loss": 0.5484, + "step": 13154 + }, + { + "epoch": 0.913414803499514, + "grad_norm": 4.6077161340572905, + "learning_rate": 1.953411312106507e-07, + "loss": 0.581, + "step": 13155 + }, + { + "epoch": 0.9134842383002361, + "grad_norm": 3.7002858088458694, + "learning_rate": 1.950300111137282e-07, + "loss": 0.2884, + "step": 13156 + }, + { + "epoch": 0.9135536731009583, + "grad_norm": 3.4228897042453195, + "learning_rate": 1.947191340467497e-07, + "loss": 0.2982, + "step": 13157 + }, + { + "epoch": 0.9136231079016803, + "grad_norm": 4.4247415412960045, + "learning_rate": 1.9440850002544086e-07, + "loss": 0.5913, + "step": 13158 + }, + { + "epoch": 0.9136925427024024, + "grad_norm": 5.161644179865504, + "learning_rate": 1.940981090655125e-07, + "loss": 0.5296, + "step": 13159 + }, + { + "epoch": 0.9137619775031246, + "grad_norm": 4.240518231752646, + "learning_rate": 1.9378796118266208e-07, + "loss": 0.3994, + "step": 13160 + }, + { + "epoch": 0.9138314123038467, + "grad_norm": 4.530987075162484, + "learning_rate": 1.9347805639257923e-07, + "loss": 0.4318, + "step": 13161 + }, + { + "epoch": 0.9139008471045688, + "grad_norm": 4.516957955514719, + "learning_rate": 1.9316839471093752e-07, + "loss": 0.2844, + "step": 13162 + }, + { + "epoch": 0.913970281905291, + "grad_norm": 3.207712536734829, + "learning_rate": 1.9285897615339776e-07, + "loss": 0.1931, + "step": 13163 + }, + { + "epoch": 0.914039716706013, + "grad_norm": 4.03011072165392, + "learning_rate": 1.9254980073561246e-07, + "loss": 0.5403, + "step": 13164 + }, + { + "epoch": 0.9141091515067352, + "grad_norm": 3.709004275165941, + "learning_rate": 1.92240868473218e-07, + "loss": 0.2588, + "step": 13165 + }, + { + "epoch": 0.9141785863074573, + "grad_norm": 4.849142845541423, + "learning_rate": 1.919321793818396e-07, + "loss": 0.34, + "step": 13166 + }, + { + "epoch": 0.9142480211081794, + "grad_norm": 4.836180380826499, + "learning_rate": 1.916237334770915e-07, + "loss": 0.5642, + "step": 13167 + }, + { + "epoch": 0.9143174559089016, + "grad_norm": 3.4192675363392953, + "learning_rate": 1.91315530774574e-07, + "loss": 0.272, + "step": 13168 + }, + { + "epoch": 0.9143868907096236, + "grad_norm": 4.461030118373413, + "learning_rate": 1.9100757128987513e-07, + "loss": 0.5749, + "step": 13169 + }, + { + "epoch": 0.9144563255103458, + "grad_norm": 3.5118586422396962, + "learning_rate": 1.9069985503857135e-07, + "loss": 0.3602, + "step": 13170 + }, + { + "epoch": 0.9145257603110679, + "grad_norm": 4.182774019121627, + "learning_rate": 1.9039238203622745e-07, + "loss": 0.349, + "step": 13171 + }, + { + "epoch": 0.91459519511179, + "grad_norm": 4.905202071340501, + "learning_rate": 1.9008515229839318e-07, + "loss": 0.3973, + "step": 13172 + }, + { + "epoch": 0.9146646299125122, + "grad_norm": 4.413039607281084, + "learning_rate": 1.8977816584061115e-07, + "loss": 0.6284, + "step": 13173 + }, + { + "epoch": 0.9147340647132343, + "grad_norm": 4.085802079557506, + "learning_rate": 1.89471422678405e-07, + "loss": 0.459, + "step": 13174 + }, + { + "epoch": 0.9148034995139563, + "grad_norm": 2.3108712034100463, + "learning_rate": 1.8916492282729072e-07, + "loss": 0.2271, + "step": 13175 + }, + { + "epoch": 0.9148729343146785, + "grad_norm": 4.3812902883413525, + "learning_rate": 1.8885866630277083e-07, + "loss": 0.4005, + "step": 13176 + }, + { + "epoch": 0.9149423691154006, + "grad_norm": 3.862582454130768, + "learning_rate": 1.8855265312033577e-07, + "loss": 0.483, + "step": 13177 + }, + { + "epoch": 0.9150118039161228, + "grad_norm": 3.084282224808126, + "learning_rate": 1.8824688329546315e-07, + "loss": 0.3058, + "step": 13178 + }, + { + "epoch": 0.9150812387168449, + "grad_norm": 2.5293551785501323, + "learning_rate": 1.8794135684361837e-07, + "loss": 0.1241, + "step": 13179 + }, + { + "epoch": 0.915150673517567, + "grad_norm": 4.268197955635075, + "learning_rate": 1.8763607378025406e-07, + "loss": 0.4241, + "step": 13180 + }, + { + "epoch": 0.9152201083182891, + "grad_norm": 3.707227929041444, + "learning_rate": 1.8733103412081177e-07, + "loss": 0.3503, + "step": 13181 + }, + { + "epoch": 0.9152895431190112, + "grad_norm": 5.877329062520276, + "learning_rate": 1.8702623788072028e-07, + "loss": 0.7088, + "step": 13182 + }, + { + "epoch": 0.9153589779197334, + "grad_norm": 3.5167450667373106, + "learning_rate": 1.86721685075395e-07, + "loss": 0.3157, + "step": 13183 + }, + { + "epoch": 0.9154284127204555, + "grad_norm": 4.546791752627194, + "learning_rate": 1.8641737572024033e-07, + "loss": 0.5028, + "step": 13184 + }, + { + "epoch": 0.9154978475211776, + "grad_norm": 4.144392595469462, + "learning_rate": 1.8611330983064779e-07, + "loss": 0.2806, + "step": 13185 + }, + { + "epoch": 0.9155672823218998, + "grad_norm": 4.256470108514353, + "learning_rate": 1.8580948742199622e-07, + "loss": 0.4336, + "step": 13186 + }, + { + "epoch": 0.9156367171226218, + "grad_norm": 4.261859307341919, + "learning_rate": 1.8550590850965443e-07, + "loss": 0.4324, + "step": 13187 + }, + { + "epoch": 0.9157061519233439, + "grad_norm": 3.52455772933577, + "learning_rate": 1.8520257310897516e-07, + "loss": 0.3491, + "step": 13188 + }, + { + "epoch": 0.9157755867240661, + "grad_norm": 4.672633881327756, + "learning_rate": 1.848994812353011e-07, + "loss": 0.52, + "step": 13189 + }, + { + "epoch": 0.9158450215247882, + "grad_norm": 5.053456379977315, + "learning_rate": 1.8459663290396278e-07, + "loss": 0.2968, + "step": 13190 + }, + { + "epoch": 0.9159144563255104, + "grad_norm": 4.199483525488225, + "learning_rate": 1.8429402813027853e-07, + "loss": 0.5053, + "step": 13191 + }, + { + "epoch": 0.9159838911262325, + "grad_norm": 5.206677005417869, + "learning_rate": 1.8399166692955217e-07, + "loss": 0.5118, + "step": 13192 + }, + { + "epoch": 0.9160533259269545, + "grad_norm": 5.094949363906195, + "learning_rate": 1.836895493170776e-07, + "loss": 0.4364, + "step": 13193 + }, + { + "epoch": 0.9161227607276767, + "grad_norm": 4.410648550347962, + "learning_rate": 1.833876753081365e-07, + "loss": 0.3204, + "step": 13194 + }, + { + "epoch": 0.9161921955283988, + "grad_norm": 4.071826634437693, + "learning_rate": 1.8308604491799554e-07, + "loss": 0.4599, + "step": 13195 + }, + { + "epoch": 0.916261630329121, + "grad_norm": 3.327936807243054, + "learning_rate": 1.8278465816191194e-07, + "loss": 0.2187, + "step": 13196 + }, + { + "epoch": 0.9163310651298431, + "grad_norm": 3.4793058211951178, + "learning_rate": 1.8248351505512906e-07, + "loss": 0.2691, + "step": 13197 + }, + { + "epoch": 0.9164004999305652, + "grad_norm": 4.279305454266996, + "learning_rate": 1.821826156128792e-07, + "loss": 0.444, + "step": 13198 + }, + { + "epoch": 0.9164699347312874, + "grad_norm": 5.630757092699069, + "learning_rate": 1.818819598503807e-07, + "loss": 0.4323, + "step": 13199 + }, + { + "epoch": 0.9165393695320094, + "grad_norm": 4.524862603023, + "learning_rate": 1.815815477828403e-07, + "loss": 0.5411, + "step": 13200 + }, + { + "epoch": 0.9166088043327316, + "grad_norm": 3.7393453977510585, + "learning_rate": 1.8128137942545254e-07, + "loss": 0.3696, + "step": 13201 + }, + { + "epoch": 0.9166782391334537, + "grad_norm": 3.694405157551287, + "learning_rate": 1.8098145479340078e-07, + "loss": 0.3588, + "step": 13202 + }, + { + "epoch": 0.9167476739341758, + "grad_norm": 3.2889094398678904, + "learning_rate": 1.8068177390185348e-07, + "loss": 0.3362, + "step": 13203 + }, + { + "epoch": 0.916817108734898, + "grad_norm": 3.7868491226048087, + "learning_rate": 1.8038233676596796e-07, + "loss": 0.4876, + "step": 13204 + }, + { + "epoch": 0.91688654353562, + "grad_norm": 4.347107936584964, + "learning_rate": 1.800831434008915e-07, + "loss": 0.3629, + "step": 13205 + }, + { + "epoch": 0.9169559783363421, + "grad_norm": 3.3288660794735896, + "learning_rate": 1.7978419382175428e-07, + "loss": 0.2376, + "step": 13206 + }, + { + "epoch": 0.9170254131370643, + "grad_norm": 4.357758878373558, + "learning_rate": 1.794854880436786e-07, + "loss": 0.3279, + "step": 13207 + }, + { + "epoch": 0.9170948479377864, + "grad_norm": 3.7916209749486818, + "learning_rate": 1.7918702608177186e-07, + "loss": 0.3335, + "step": 13208 + }, + { + "epoch": 0.9171642827385086, + "grad_norm": 4.068544470916159, + "learning_rate": 1.7888880795113083e-07, + "loss": 0.6633, + "step": 13209 + }, + { + "epoch": 0.9172337175392307, + "grad_norm": 4.598300631864856, + "learning_rate": 1.7859083366683793e-07, + "loss": 0.5846, + "step": 13210 + }, + { + "epoch": 0.9173031523399527, + "grad_norm": 4.877864533407342, + "learning_rate": 1.78293103243965e-07, + "loss": 0.5308, + "step": 13211 + }, + { + "epoch": 0.9173725871406749, + "grad_norm": 4.012021340204592, + "learning_rate": 1.7799561669757103e-07, + "loss": 0.4508, + "step": 13212 + }, + { + "epoch": 0.917442021941397, + "grad_norm": 4.056358268684323, + "learning_rate": 1.776983740427024e-07, + "loss": 0.456, + "step": 13213 + }, + { + "epoch": 0.9175114567421192, + "grad_norm": 4.065072105354813, + "learning_rate": 1.774013752943926e-07, + "loss": 0.5014, + "step": 13214 + }, + { + "epoch": 0.9175808915428413, + "grad_norm": 3.8114492903877513, + "learning_rate": 1.771046204676641e-07, + "loss": 0.2906, + "step": 13215 + }, + { + "epoch": 0.9176503263435634, + "grad_norm": 5.640629438637088, + "learning_rate": 1.7680810957752702e-07, + "loss": 0.38, + "step": 13216 + }, + { + "epoch": 0.9177197611442856, + "grad_norm": 3.9378314589925454, + "learning_rate": 1.765118426389778e-07, + "loss": 0.3722, + "step": 13217 + }, + { + "epoch": 0.9177891959450076, + "grad_norm": 4.866989601091155, + "learning_rate": 1.7621581966700051e-07, + "loss": 0.4743, + "step": 13218 + }, + { + "epoch": 0.9178586307457297, + "grad_norm": 5.976386976635638, + "learning_rate": 1.7592004067657041e-07, + "loss": 0.7048, + "step": 13219 + }, + { + "epoch": 0.9179280655464519, + "grad_norm": 3.2847469562493203, + "learning_rate": 1.7562450568264444e-07, + "loss": 0.2608, + "step": 13220 + }, + { + "epoch": 0.917997500347174, + "grad_norm": 3.9599363592648653, + "learning_rate": 1.7532921470017173e-07, + "loss": 0.4972, + "step": 13221 + }, + { + "epoch": 0.9180669351478962, + "grad_norm": 3.76859785500059, + "learning_rate": 1.750341677440881e-07, + "loss": 0.3654, + "step": 13222 + }, + { + "epoch": 0.9181363699486182, + "grad_norm": 3.584613913306775, + "learning_rate": 1.7473936482931718e-07, + "loss": 0.3657, + "step": 13223 + }, + { + "epoch": 0.9182058047493403, + "grad_norm": 4.819573222505809, + "learning_rate": 1.744448059707682e-07, + "loss": 0.6046, + "step": 13224 + }, + { + "epoch": 0.9182752395500625, + "grad_norm": 3.405697693611392, + "learning_rate": 1.7415049118334083e-07, + "loss": 0.296, + "step": 13225 + }, + { + "epoch": 0.9183446743507846, + "grad_norm": 4.185277550514758, + "learning_rate": 1.738564204819204e-07, + "loss": 0.4308, + "step": 13226 + }, + { + "epoch": 0.9184141091515068, + "grad_norm": 5.09992880037529, + "learning_rate": 1.7356259388138176e-07, + "loss": 0.4437, + "step": 13227 + }, + { + "epoch": 0.9184835439522289, + "grad_norm": 4.694179696395701, + "learning_rate": 1.7326901139658514e-07, + "loss": 0.5961, + "step": 13228 + }, + { + "epoch": 0.9185529787529509, + "grad_norm": 3.5838688843420106, + "learning_rate": 1.729756730423804e-07, + "loss": 0.3195, + "step": 13229 + }, + { + "epoch": 0.9186224135536731, + "grad_norm": 3.6760958948571845, + "learning_rate": 1.7268257883360394e-07, + "loss": 0.4393, + "step": 13230 + }, + { + "epoch": 0.9186918483543952, + "grad_norm": 2.9584001868479812, + "learning_rate": 1.723897287850801e-07, + "loss": 0.3919, + "step": 13231 + }, + { + "epoch": 0.9187612831551173, + "grad_norm": 4.722996632878373, + "learning_rate": 1.7209712291162085e-07, + "loss": 0.6724, + "step": 13232 + }, + { + "epoch": 0.9188307179558395, + "grad_norm": 4.25715981972975, + "learning_rate": 1.7180476122802548e-07, + "loss": 0.4906, + "step": 13233 + }, + { + "epoch": 0.9189001527565616, + "grad_norm": 3.810037760473995, + "learning_rate": 1.7151264374908273e-07, + "loss": 0.3251, + "step": 13234 + }, + { + "epoch": 0.9189695875572838, + "grad_norm": 5.391328664969378, + "learning_rate": 1.712207704895663e-07, + "loss": 0.7413, + "step": 13235 + }, + { + "epoch": 0.9190390223580058, + "grad_norm": 2.7894993101818946, + "learning_rate": 1.7092914146423834e-07, + "loss": 0.2573, + "step": 13236 + }, + { + "epoch": 0.9191084571587279, + "grad_norm": 3.5334720819866305, + "learning_rate": 1.7063775668785143e-07, + "loss": 0.4482, + "step": 13237 + }, + { + "epoch": 0.9191778919594501, + "grad_norm": 3.2130815993870376, + "learning_rate": 1.7034661617514102e-07, + "loss": 0.3157, + "step": 13238 + }, + { + "epoch": 0.9192473267601722, + "grad_norm": 2.9479394920341764, + "learning_rate": 1.700557199408337e-07, + "loss": 0.2149, + "step": 13239 + }, + { + "epoch": 0.9193167615608944, + "grad_norm": 4.382621519179842, + "learning_rate": 1.697650679996432e-07, + "loss": 0.502, + "step": 13240 + }, + { + "epoch": 0.9193861963616164, + "grad_norm": 3.9385369364031715, + "learning_rate": 1.6947466036626947e-07, + "loss": 0.4312, + "step": 13241 + }, + { + "epoch": 0.9194556311623385, + "grad_norm": 5.21086882412259, + "learning_rate": 1.691844970554013e-07, + "loss": 0.615, + "step": 13242 + }, + { + "epoch": 0.9195250659630607, + "grad_norm": 5.038349045925452, + "learning_rate": 1.6889457808171473e-07, + "loss": 0.6033, + "step": 13243 + }, + { + "epoch": 0.9195945007637828, + "grad_norm": 4.33985111383103, + "learning_rate": 1.6860490345987358e-07, + "loss": 0.3521, + "step": 13244 + }, + { + "epoch": 0.9196639355645049, + "grad_norm": 3.6348218633322915, + "learning_rate": 1.6831547320452945e-07, + "loss": 0.3688, + "step": 13245 + }, + { + "epoch": 0.9197333703652271, + "grad_norm": 2.2391799227611715, + "learning_rate": 1.6802628733032123e-07, + "loss": 0.1203, + "step": 13246 + }, + { + "epoch": 0.9198028051659491, + "grad_norm": 4.156520408728646, + "learning_rate": 1.6773734585187495e-07, + "loss": 0.497, + "step": 13247 + }, + { + "epoch": 0.9198722399666713, + "grad_norm": 5.057616130028476, + "learning_rate": 1.6744864878380728e-07, + "loss": 0.4434, + "step": 13248 + }, + { + "epoch": 0.9199416747673934, + "grad_norm": 4.116897760876217, + "learning_rate": 1.6716019614071766e-07, + "loss": 0.4944, + "step": 13249 + }, + { + "epoch": 0.9200111095681155, + "grad_norm": 3.288221731762155, + "learning_rate": 1.6687198793719605e-07, + "loss": 0.2487, + "step": 13250 + }, + { + "epoch": 0.9200805443688377, + "grad_norm": 3.3360130806993156, + "learning_rate": 1.6658402418782083e-07, + "loss": 0.2485, + "step": 13251 + }, + { + "epoch": 0.9201499791695598, + "grad_norm": 4.1688821330911665, + "learning_rate": 1.6629630490715642e-07, + "loss": 0.3957, + "step": 13252 + }, + { + "epoch": 0.920219413970282, + "grad_norm": 4.602391138795815, + "learning_rate": 1.6600883010975454e-07, + "loss": 0.5724, + "step": 13253 + }, + { + "epoch": 0.920288848771004, + "grad_norm": 3.65147619981418, + "learning_rate": 1.6572159981015634e-07, + "loss": 0.3903, + "step": 13254 + }, + { + "epoch": 0.9203582835717261, + "grad_norm": 5.170488632408698, + "learning_rate": 1.654346140228902e-07, + "loss": 0.6156, + "step": 13255 + }, + { + "epoch": 0.9204277183724483, + "grad_norm": 6.4843181972837804, + "learning_rate": 1.651478727624689e-07, + "loss": 0.4088, + "step": 13256 + }, + { + "epoch": 0.9204971531731704, + "grad_norm": 3.4931291236595743, + "learning_rate": 1.6486137604339813e-07, + "loss": 0.4011, + "step": 13257 + }, + { + "epoch": 0.9205665879738926, + "grad_norm": 3.7680024008452033, + "learning_rate": 1.6457512388016795e-07, + "loss": 0.4589, + "step": 13258 + }, + { + "epoch": 0.9206360227746146, + "grad_norm": 3.1342241125727073, + "learning_rate": 1.6428911628725563e-07, + "loss": 0.3699, + "step": 13259 + }, + { + "epoch": 0.9207054575753367, + "grad_norm": 4.226559482047216, + "learning_rate": 1.6400335327912854e-07, + "loss": 0.3166, + "step": 13260 + }, + { + "epoch": 0.9207748923760589, + "grad_norm": 2.955892670788817, + "learning_rate": 1.6371783487023897e-07, + "loss": 0.2298, + "step": 13261 + }, + { + "epoch": 0.920844327176781, + "grad_norm": 5.034508761736175, + "learning_rate": 1.6343256107502925e-07, + "loss": 0.6193, + "step": 13262 + }, + { + "epoch": 0.9209137619775031, + "grad_norm": 4.803019608120732, + "learning_rate": 1.631475319079273e-07, + "loss": 0.3488, + "step": 13263 + }, + { + "epoch": 0.9209831967782253, + "grad_norm": 4.285440672482229, + "learning_rate": 1.628627473833494e-07, + "loss": 0.5497, + "step": 13264 + }, + { + "epoch": 0.9210526315789473, + "grad_norm": 4.84450915866471, + "learning_rate": 1.6257820751570063e-07, + "loss": 0.2294, + "step": 13265 + }, + { + "epoch": 0.9211220663796695, + "grad_norm": 3.6426051437246545, + "learning_rate": 1.622939123193723e-07, + "loss": 0.3159, + "step": 13266 + }, + { + "epoch": 0.9211915011803916, + "grad_norm": 4.312335916224246, + "learning_rate": 1.6200986180874346e-07, + "loss": 0.5334, + "step": 13267 + }, + { + "epoch": 0.9212609359811137, + "grad_norm": 5.3598060493380055, + "learning_rate": 1.6172605599818147e-07, + "loss": 0.6245, + "step": 13268 + }, + { + "epoch": 0.9213303707818359, + "grad_norm": 3.9457633467402125, + "learning_rate": 1.61442494902041e-07, + "loss": 0.3488, + "step": 13269 + }, + { + "epoch": 0.921399805582558, + "grad_norm": 3.5351825237952696, + "learning_rate": 1.611591785346628e-07, + "loss": 0.442, + "step": 13270 + }, + { + "epoch": 0.9214692403832802, + "grad_norm": 4.384392902487356, + "learning_rate": 1.6087610691037868e-07, + "loss": 0.6413, + "step": 13271 + }, + { + "epoch": 0.9215386751840022, + "grad_norm": 2.852414864167171, + "learning_rate": 1.6059328004350504e-07, + "loss": 0.191, + "step": 13272 + }, + { + "epoch": 0.9216081099847243, + "grad_norm": 3.9789472758633657, + "learning_rate": 1.6031069794834709e-07, + "loss": 0.245, + "step": 13273 + }, + { + "epoch": 0.9216775447854465, + "grad_norm": 3.9672704089694544, + "learning_rate": 1.600283606391978e-07, + "loss": 0.5368, + "step": 13274 + }, + { + "epoch": 0.9217469795861686, + "grad_norm": 3.295467421759712, + "learning_rate": 1.5974626813033743e-07, + "loss": 0.3576, + "step": 13275 + }, + { + "epoch": 0.9218164143868907, + "grad_norm": 3.9857068558116313, + "learning_rate": 1.594644204360335e-07, + "loss": 0.4698, + "step": 13276 + }, + { + "epoch": 0.9218858491876128, + "grad_norm": 5.939251652722061, + "learning_rate": 1.5918281757054122e-07, + "loss": 0.3042, + "step": 13277 + }, + { + "epoch": 0.9219552839883349, + "grad_norm": 3.4526225540413256, + "learning_rate": 1.5890145954810478e-07, + "loss": 0.2741, + "step": 13278 + }, + { + "epoch": 0.9220247187890571, + "grad_norm": 4.034337080556549, + "learning_rate": 1.5862034638295388e-07, + "loss": 0.4025, + "step": 13279 + }, + { + "epoch": 0.9220941535897792, + "grad_norm": 4.026573781583536, + "learning_rate": 1.5833947808930828e-07, + "loss": 0.5206, + "step": 13280 + }, + { + "epoch": 0.9221635883905013, + "grad_norm": 2.6585815587371022, + "learning_rate": 1.5805885468137329e-07, + "loss": 0.1908, + "step": 13281 + }, + { + "epoch": 0.9222330231912235, + "grad_norm": 4.11864478898156, + "learning_rate": 1.577784761733414e-07, + "loss": 0.4444, + "step": 13282 + }, + { + "epoch": 0.9223024579919455, + "grad_norm": 3.553709735341548, + "learning_rate": 1.5749834257939523e-07, + "loss": 0.2517, + "step": 13283 + }, + { + "epoch": 0.9223718927926677, + "grad_norm": 11.038280186854696, + "learning_rate": 1.5721845391370394e-07, + "loss": 0.5494, + "step": 13284 + }, + { + "epoch": 0.9224413275933898, + "grad_norm": 7.965257479315802, + "learning_rate": 1.5693881019042235e-07, + "loss": 0.3387, + "step": 13285 + }, + { + "epoch": 0.9225107623941119, + "grad_norm": 4.222757466805156, + "learning_rate": 1.566594114236958e-07, + "loss": 0.383, + "step": 13286 + }, + { + "epoch": 0.9225801971948341, + "grad_norm": 3.6959150346619967, + "learning_rate": 1.5638025762765518e-07, + "loss": 0.3647, + "step": 13287 + }, + { + "epoch": 0.9226496319955562, + "grad_norm": 4.193364213624366, + "learning_rate": 1.561013488164209e-07, + "loss": 0.4148, + "step": 13288 + }, + { + "epoch": 0.9227190667962782, + "grad_norm": 4.912501551946821, + "learning_rate": 1.5582268500409893e-07, + "loss": 0.5047, + "step": 13289 + }, + { + "epoch": 0.9227885015970004, + "grad_norm": 3.964488784649308, + "learning_rate": 1.5554426620478402e-07, + "loss": 0.4374, + "step": 13290 + }, + { + "epoch": 0.9228579363977225, + "grad_norm": 3.3647218424063112, + "learning_rate": 1.552660924325583e-07, + "loss": 0.4019, + "step": 13291 + }, + { + "epoch": 0.9229273711984447, + "grad_norm": 4.651062096247276, + "learning_rate": 1.5498816370149105e-07, + "loss": 0.5114, + "step": 13292 + }, + { + "epoch": 0.9229968059991668, + "grad_norm": 11.359765893872696, + "learning_rate": 1.547104800256405e-07, + "loss": 0.3849, + "step": 13293 + }, + { + "epoch": 0.9230662407998889, + "grad_norm": 4.297665333684869, + "learning_rate": 1.5443304141905092e-07, + "loss": 0.5264, + "step": 13294 + }, + { + "epoch": 0.923135675600611, + "grad_norm": 3.2419756232215753, + "learning_rate": 1.5415584789575444e-07, + "loss": 0.2964, + "step": 13295 + }, + { + "epoch": 0.9232051104013331, + "grad_norm": 3.8380765077570347, + "learning_rate": 1.5387889946977207e-07, + "loss": 0.6351, + "step": 13296 + }, + { + "epoch": 0.9232745452020553, + "grad_norm": 4.2556913476755565, + "learning_rate": 1.5360219615511097e-07, + "loss": 0.5869, + "step": 13297 + }, + { + "epoch": 0.9233439800027774, + "grad_norm": 3.2453627362161837, + "learning_rate": 1.5332573796576767e-07, + "loss": 0.2551, + "step": 13298 + }, + { + "epoch": 0.9234134148034995, + "grad_norm": 4.567420446415648, + "learning_rate": 1.5304952491572322e-07, + "loss": 0.4406, + "step": 13299 + }, + { + "epoch": 0.9234828496042217, + "grad_norm": 3.7246895345484963, + "learning_rate": 1.5277355701894924e-07, + "loss": 0.314, + "step": 13300 + }, + { + "epoch": 0.9235522844049437, + "grad_norm": 4.084053511806754, + "learning_rate": 1.5249783428940456e-07, + "loss": 0.3632, + "step": 13301 + }, + { + "epoch": 0.9236217192056658, + "grad_norm": 2.898323971987378, + "learning_rate": 1.5222235674103304e-07, + "loss": 0.2198, + "step": 13302 + }, + { + "epoch": 0.923691154006388, + "grad_norm": 3.710216119772604, + "learning_rate": 1.5194712438776904e-07, + "loss": 0.4989, + "step": 13303 + }, + { + "epoch": 0.9237605888071101, + "grad_norm": 3.64956909979816, + "learning_rate": 1.5167213724353426e-07, + "loss": 0.2736, + "step": 13304 + }, + { + "epoch": 0.9238300236078323, + "grad_norm": 3.9450867521006434, + "learning_rate": 1.5139739532223642e-07, + "loss": 0.4004, + "step": 13305 + }, + { + "epoch": 0.9238994584085544, + "grad_norm": 3.7613422047895995, + "learning_rate": 1.5112289863777163e-07, + "loss": 0.3616, + "step": 13306 + }, + { + "epoch": 0.9239688932092764, + "grad_norm": 5.211063469598256, + "learning_rate": 1.508486472040238e-07, + "loss": 0.4765, + "step": 13307 + }, + { + "epoch": 0.9240383280099986, + "grad_norm": 4.061588904009952, + "learning_rate": 1.505746410348635e-07, + "loss": 0.3618, + "step": 13308 + }, + { + "epoch": 0.9241077628107207, + "grad_norm": 4.531682732207523, + "learning_rate": 1.5030088014415133e-07, + "loss": 0.5235, + "step": 13309 + }, + { + "epoch": 0.9241771976114429, + "grad_norm": 3.1638927643588213, + "learning_rate": 1.5002736454573287e-07, + "loss": 0.2853, + "step": 13310 + }, + { + "epoch": 0.924246632412165, + "grad_norm": 3.8604299931400887, + "learning_rate": 1.497540942534409e-07, + "loss": 0.3547, + "step": 13311 + }, + { + "epoch": 0.9243160672128871, + "grad_norm": 3.348296392298224, + "learning_rate": 1.4948106928109995e-07, + "loss": 0.248, + "step": 13312 + }, + { + "epoch": 0.9243855020136093, + "grad_norm": 3.8796676490964517, + "learning_rate": 1.4920828964251733e-07, + "loss": 0.3375, + "step": 13313 + }, + { + "epoch": 0.9244549368143313, + "grad_norm": 3.632342457620517, + "learning_rate": 1.4893575535148973e-07, + "loss": 0.3273, + "step": 13314 + }, + { + "epoch": 0.9245243716150535, + "grad_norm": 3.4475054608039644, + "learning_rate": 1.4866346642180286e-07, + "loss": 0.4601, + "step": 13315 + }, + { + "epoch": 0.9245938064157756, + "grad_norm": 4.380433506883066, + "learning_rate": 1.483914228672284e-07, + "loss": 0.4999, + "step": 13316 + }, + { + "epoch": 0.9246632412164977, + "grad_norm": 2.5281999448329056, + "learning_rate": 1.4811962470152485e-07, + "loss": 0.1931, + "step": 13317 + }, + { + "epoch": 0.9247326760172199, + "grad_norm": 2.913557164250303, + "learning_rate": 1.478480719384412e-07, + "loss": 0.2598, + "step": 13318 + }, + { + "epoch": 0.924802110817942, + "grad_norm": 4.6090760493367915, + "learning_rate": 1.4757676459171145e-07, + "loss": 0.492, + "step": 13319 + }, + { + "epoch": 0.924871545618664, + "grad_norm": 3.0279305150821787, + "learning_rate": 1.4730570267505796e-07, + "loss": 0.2288, + "step": 13320 + }, + { + "epoch": 0.9249409804193862, + "grad_norm": 4.103128710965601, + "learning_rate": 1.4703488620219087e-07, + "loss": 0.3181, + "step": 13321 + }, + { + "epoch": 0.9250104152201083, + "grad_norm": 3.7922170834422384, + "learning_rate": 1.4676431518680757e-07, + "loss": 0.4872, + "step": 13322 + }, + { + "epoch": 0.9250798500208305, + "grad_norm": 4.4807413462046295, + "learning_rate": 1.4649398964259376e-07, + "loss": 0.5223, + "step": 13323 + }, + { + "epoch": 0.9251492848215526, + "grad_norm": 3.448960407105857, + "learning_rate": 1.4622390958322185e-07, + "loss": 0.3404, + "step": 13324 + }, + { + "epoch": 0.9252187196222746, + "grad_norm": 3.8986114075130383, + "learning_rate": 1.4595407502235203e-07, + "loss": 0.4343, + "step": 13325 + }, + { + "epoch": 0.9252881544229968, + "grad_norm": 3.6105242861733755, + "learning_rate": 1.4568448597363227e-07, + "loss": 0.3114, + "step": 13326 + }, + { + "epoch": 0.9253575892237189, + "grad_norm": 3.6050439825245832, + "learning_rate": 1.4541514245069776e-07, + "loss": 0.419, + "step": 13327 + }, + { + "epoch": 0.9254270240244411, + "grad_norm": 4.664839831309387, + "learning_rate": 1.4514604446717207e-07, + "loss": 0.4244, + "step": 13328 + }, + { + "epoch": 0.9254964588251632, + "grad_norm": 3.8118731179601166, + "learning_rate": 1.448771920366665e-07, + "loss": 0.2723, + "step": 13329 + }, + { + "epoch": 0.9255658936258853, + "grad_norm": 4.338576541188535, + "learning_rate": 1.4460858517277855e-07, + "loss": 0.3205, + "step": 13330 + }, + { + "epoch": 0.9256353284266075, + "grad_norm": 3.4887750455650575, + "learning_rate": 1.4434022388909342e-07, + "loss": 0.3753, + "step": 13331 + }, + { + "epoch": 0.9257047632273295, + "grad_norm": 3.9332030672595106, + "learning_rate": 1.4407210819918582e-07, + "loss": 0.3537, + "step": 13332 + }, + { + "epoch": 0.9257741980280516, + "grad_norm": 2.361709121622135, + "learning_rate": 1.4380423811661603e-07, + "loss": 0.2156, + "step": 13333 + }, + { + "epoch": 0.9258436328287738, + "grad_norm": 4.921957634251678, + "learning_rate": 1.4353661365493265e-07, + "loss": 0.3616, + "step": 13334 + }, + { + "epoch": 0.9259130676294959, + "grad_norm": 4.662194249980464, + "learning_rate": 1.4326923482767153e-07, + "loss": 0.6619, + "step": 13335 + }, + { + "epoch": 0.9259825024302181, + "grad_norm": 3.514248205787015, + "learning_rate": 1.4300210164835737e-07, + "loss": 0.4691, + "step": 13336 + }, + { + "epoch": 0.9260519372309401, + "grad_norm": 4.317618879115888, + "learning_rate": 1.427352141305005e-07, + "loss": 0.3852, + "step": 13337 + }, + { + "epoch": 0.9261213720316622, + "grad_norm": 3.3425766621548973, + "learning_rate": 1.4246857228760015e-07, + "loss": 0.4199, + "step": 13338 + }, + { + "epoch": 0.9261908068323844, + "grad_norm": 2.3232312131200716, + "learning_rate": 1.4220217613314325e-07, + "loss": 0.1737, + "step": 13339 + }, + { + "epoch": 0.9262602416331065, + "grad_norm": 8.01487637849613, + "learning_rate": 1.4193602568060182e-07, + "loss": 0.3074, + "step": 13340 + }, + { + "epoch": 0.9263296764338287, + "grad_norm": 3.449634575308138, + "learning_rate": 1.4167012094344068e-07, + "loss": 0.3382, + "step": 13341 + }, + { + "epoch": 0.9263991112345508, + "grad_norm": 4.410114519094716, + "learning_rate": 1.4140446193510682e-07, + "loss": 0.322, + "step": 13342 + }, + { + "epoch": 0.9264685460352728, + "grad_norm": 3.4394082796744736, + "learning_rate": 1.4113904866903672e-07, + "loss": 0.379, + "step": 13343 + }, + { + "epoch": 0.926537980835995, + "grad_norm": 3.6781233059270493, + "learning_rate": 1.408738811586563e-07, + "loss": 0.3576, + "step": 13344 + }, + { + "epoch": 0.9266074156367171, + "grad_norm": 4.3628366633387365, + "learning_rate": 1.4060895941737596e-07, + "loss": 0.6982, + "step": 13345 + }, + { + "epoch": 0.9266768504374392, + "grad_norm": 3.9871849731194287, + "learning_rate": 1.4034428345859496e-07, + "loss": 0.2542, + "step": 13346 + }, + { + "epoch": 0.9267462852381614, + "grad_norm": 5.942378573209138, + "learning_rate": 1.4007985329570205e-07, + "loss": 0.6101, + "step": 13347 + }, + { + "epoch": 0.9268157200388835, + "grad_norm": 4.346096340257077, + "learning_rate": 1.3981566894207044e-07, + "loss": 0.5666, + "step": 13348 + }, + { + "epoch": 0.9268851548396057, + "grad_norm": 4.280100559377925, + "learning_rate": 1.3955173041106274e-07, + "loss": 0.5099, + "step": 13349 + }, + { + "epoch": 0.9269545896403277, + "grad_norm": 3.328453778222543, + "learning_rate": 1.392880377160283e-07, + "loss": 0.2706, + "step": 13350 + }, + { + "epoch": 0.9270240244410498, + "grad_norm": 3.8451592399104157, + "learning_rate": 1.390245908703053e-07, + "loss": 0.4853, + "step": 13351 + }, + { + "epoch": 0.927093459241772, + "grad_norm": 4.248739999242768, + "learning_rate": 1.38761389887217e-07, + "loss": 0.5379, + "step": 13352 + }, + { + "epoch": 0.9271628940424941, + "grad_norm": 4.279608584608455, + "learning_rate": 1.3849843478007773e-07, + "loss": 0.6773, + "step": 13353 + }, + { + "epoch": 0.9272323288432163, + "grad_norm": 8.97334313142825, + "learning_rate": 1.3823572556218578e-07, + "loss": 0.1931, + "step": 13354 + }, + { + "epoch": 0.9273017636439383, + "grad_norm": 3.333794703994239, + "learning_rate": 1.3797326224682938e-07, + "loss": 0.3352, + "step": 13355 + }, + { + "epoch": 0.9273711984446604, + "grad_norm": 3.0496757173581077, + "learning_rate": 1.3771104484728405e-07, + "loss": 0.3036, + "step": 13356 + }, + { + "epoch": 0.9274406332453826, + "grad_norm": 4.705592693072218, + "learning_rate": 1.3744907337681134e-07, + "loss": 0.4031, + "step": 13357 + }, + { + "epoch": 0.9275100680461047, + "grad_norm": 3.5077775476340896, + "learning_rate": 1.3718734784866238e-07, + "loss": 0.3633, + "step": 13358 + }, + { + "epoch": 0.9275795028468268, + "grad_norm": 3.7177010346117574, + "learning_rate": 1.3692586827607545e-07, + "loss": 0.3666, + "step": 13359 + }, + { + "epoch": 0.927648937647549, + "grad_norm": 4.018276430880449, + "learning_rate": 1.3666463467227386e-07, + "loss": 0.3882, + "step": 13360 + }, + { + "epoch": 0.927718372448271, + "grad_norm": 3.4497912868772227, + "learning_rate": 1.364036470504726e-07, + "loss": 0.3745, + "step": 13361 + }, + { + "epoch": 0.9277878072489932, + "grad_norm": 3.667429090884349, + "learning_rate": 1.361429054238722e-07, + "loss": 0.3754, + "step": 13362 + }, + { + "epoch": 0.9278572420497153, + "grad_norm": 4.0263715034177805, + "learning_rate": 1.3588240980565825e-07, + "loss": 0.3888, + "step": 13363 + }, + { + "epoch": 0.9279266768504374, + "grad_norm": 3.0923393133320016, + "learning_rate": 1.3562216020900852e-07, + "loss": 0.2734, + "step": 13364 + }, + { + "epoch": 0.9279961116511596, + "grad_norm": 4.676160870650495, + "learning_rate": 1.3536215664708585e-07, + "loss": 0.4859, + "step": 13365 + }, + { + "epoch": 0.9280655464518817, + "grad_norm": 3.1269741153218304, + "learning_rate": 1.3510239913304024e-07, + "loss": 0.1929, + "step": 13366 + }, + { + "epoch": 0.9281349812526039, + "grad_norm": 4.743728173270907, + "learning_rate": 1.348428876800101e-07, + "loss": 0.4814, + "step": 13367 + }, + { + "epoch": 0.9282044160533259, + "grad_norm": 3.918670705184777, + "learning_rate": 1.345836223011221e-07, + "loss": 0.2752, + "step": 13368 + }, + { + "epoch": 0.928273850854048, + "grad_norm": 4.015663948749614, + "learning_rate": 1.34324603009488e-07, + "loss": 0.4704, + "step": 13369 + }, + { + "epoch": 0.9283432856547702, + "grad_norm": 3.9021087013061253, + "learning_rate": 1.3406582981821016e-07, + "loss": 0.4257, + "step": 13370 + }, + { + "epoch": 0.9284127204554923, + "grad_norm": 4.300624593773659, + "learning_rate": 1.3380730274037634e-07, + "loss": 0.4127, + "step": 13371 + }, + { + "epoch": 0.9284821552562145, + "grad_norm": 4.149234118757186, + "learning_rate": 1.3354902178906227e-07, + "loss": 0.5341, + "step": 13372 + }, + { + "epoch": 0.9285515900569365, + "grad_norm": 4.153791252283487, + "learning_rate": 1.33290986977333e-07, + "loss": 0.6317, + "step": 13373 + }, + { + "epoch": 0.9286210248576586, + "grad_norm": 4.423569173936873, + "learning_rate": 1.3303319831823759e-07, + "loss": 0.4995, + "step": 13374 + }, + { + "epoch": 0.9286904596583808, + "grad_norm": 4.210045115366354, + "learning_rate": 1.3277565582481555e-07, + "loss": 0.4809, + "step": 13375 + }, + { + "epoch": 0.9287598944591029, + "grad_norm": 6.279177272831838, + "learning_rate": 1.3251835951009428e-07, + "loss": 0.9155, + "step": 13376 + }, + { + "epoch": 0.928829329259825, + "grad_norm": 4.056269349944527, + "learning_rate": 1.3226130938708504e-07, + "loss": 0.3655, + "step": 13377 + }, + { + "epoch": 0.9288987640605472, + "grad_norm": 4.256605815422928, + "learning_rate": 1.3200450546879129e-07, + "loss": 0.4373, + "step": 13378 + }, + { + "epoch": 0.9289681988612692, + "grad_norm": 3.4137491845716905, + "learning_rate": 1.3174794776820154e-07, + "loss": 0.4286, + "step": 13379 + }, + { + "epoch": 0.9290376336619914, + "grad_norm": 3.3266658383358365, + "learning_rate": 1.31491636298291e-07, + "loss": 0.3604, + "step": 13380 + }, + { + "epoch": 0.9291070684627135, + "grad_norm": 3.6086989314861846, + "learning_rate": 1.3123557107202478e-07, + "loss": 0.3309, + "step": 13381 + }, + { + "epoch": 0.9291765032634356, + "grad_norm": 3.306958706280662, + "learning_rate": 1.3097975210235424e-07, + "loss": 0.2856, + "step": 13382 + }, + { + "epoch": 0.9292459380641578, + "grad_norm": 5.032928133386874, + "learning_rate": 1.3072417940221792e-07, + "loss": 0.5646, + "step": 13383 + }, + { + "epoch": 0.9293153728648799, + "grad_norm": 4.713520359021732, + "learning_rate": 1.304688529845427e-07, + "loss": 0.58, + "step": 13384 + }, + { + "epoch": 0.929384807665602, + "grad_norm": 4.509716528398813, + "learning_rate": 1.302137728622427e-07, + "loss": 0.4278, + "step": 13385 + }, + { + "epoch": 0.9294542424663241, + "grad_norm": 3.986201257290607, + "learning_rate": 1.299589390482192e-07, + "loss": 0.4246, + "step": 13386 + }, + { + "epoch": 0.9295236772670462, + "grad_norm": 4.151068229343168, + "learning_rate": 1.2970435155536198e-07, + "loss": 0.3676, + "step": 13387 + }, + { + "epoch": 0.9295931120677684, + "grad_norm": 4.544722756172872, + "learning_rate": 1.2945001039654792e-07, + "loss": 0.539, + "step": 13388 + }, + { + "epoch": 0.9296625468684905, + "grad_norm": 5.739027037458772, + "learning_rate": 1.2919591558464006e-07, + "loss": 0.6711, + "step": 13389 + }, + { + "epoch": 0.9297319816692126, + "grad_norm": 3.6361227206766826, + "learning_rate": 1.2894206713249146e-07, + "loss": 0.4703, + "step": 13390 + }, + { + "epoch": 0.9298014164699347, + "grad_norm": 2.4565688423590992, + "learning_rate": 1.2868846505294185e-07, + "loss": 0.2294, + "step": 13391 + }, + { + "epoch": 0.9298708512706568, + "grad_norm": 3.286327270538481, + "learning_rate": 1.2843510935881655e-07, + "loss": 0.2578, + "step": 13392 + }, + { + "epoch": 0.929940286071379, + "grad_norm": 3.128018898931536, + "learning_rate": 1.281820000629308e-07, + "loss": 0.2529, + "step": 13393 + }, + { + "epoch": 0.9300097208721011, + "grad_norm": 4.21390290092579, + "learning_rate": 1.2792913717808775e-07, + "loss": 0.3177, + "step": 13394 + }, + { + "epoch": 0.9300791556728232, + "grad_norm": 5.478445347797341, + "learning_rate": 1.2767652071707437e-07, + "loss": 0.4805, + "step": 13395 + }, + { + "epoch": 0.9301485904735454, + "grad_norm": 3.7802128615196713, + "learning_rate": 1.2742415069267044e-07, + "loss": 0.4558, + "step": 13396 + }, + { + "epoch": 0.9302180252742674, + "grad_norm": 4.064468842600926, + "learning_rate": 1.2717202711763854e-07, + "loss": 0.4456, + "step": 13397 + }, + { + "epoch": 0.9302874600749896, + "grad_norm": 3.531028681754328, + "learning_rate": 1.2692015000473178e-07, + "loss": 0.2084, + "step": 13398 + }, + { + "epoch": 0.9303568948757117, + "grad_norm": 4.249615000533118, + "learning_rate": 1.2666851936668944e-07, + "loss": 0.4942, + "step": 13399 + }, + { + "epoch": 0.9304263296764338, + "grad_norm": 3.578678679234296, + "learning_rate": 1.264171352162391e-07, + "loss": 0.3868, + "step": 13400 + }, + { + "epoch": 0.930495764477156, + "grad_norm": 5.720178640214364, + "learning_rate": 1.2616599756609505e-07, + "loss": 0.4775, + "step": 13401 + }, + { + "epoch": 0.9305651992778781, + "grad_norm": 4.845708447135187, + "learning_rate": 1.259151064289593e-07, + "loss": 0.52, + "step": 13402 + }, + { + "epoch": 0.9306346340786001, + "grad_norm": 4.212503304641778, + "learning_rate": 1.2566446181752289e-07, + "loss": 0.5581, + "step": 13403 + }, + { + "epoch": 0.9307040688793223, + "grad_norm": 2.386083942465921, + "learning_rate": 1.2541406374446118e-07, + "loss": 0.1476, + "step": 13404 + }, + { + "epoch": 0.9307735036800444, + "grad_norm": 4.18141908470409, + "learning_rate": 1.2516391222244127e-07, + "loss": 0.4586, + "step": 13405 + }, + { + "epoch": 0.9308429384807666, + "grad_norm": 3.5163799426976374, + "learning_rate": 1.249140072641142e-07, + "loss": 0.3246, + "step": 13406 + }, + { + "epoch": 0.9309123732814887, + "grad_norm": 4.178001038043437, + "learning_rate": 1.246643488821192e-07, + "loss": 0.468, + "step": 13407 + }, + { + "epoch": 0.9309818080822108, + "grad_norm": 5.642762921846356, + "learning_rate": 1.244149370890857e-07, + "loss": 0.563, + "step": 13408 + }, + { + "epoch": 0.931051242882933, + "grad_norm": 3.048448747338096, + "learning_rate": 1.2416577189762746e-07, + "loss": 0.2191, + "step": 13409 + }, + { + "epoch": 0.931120677683655, + "grad_norm": 3.2086226890240916, + "learning_rate": 1.2391685332034665e-07, + "loss": 0.2427, + "step": 13410 + }, + { + "epoch": 0.9311901124843772, + "grad_norm": 3.4641191975310344, + "learning_rate": 1.2366818136983427e-07, + "loss": 0.3698, + "step": 13411 + }, + { + "epoch": 0.9312595472850993, + "grad_norm": 3.545007994079871, + "learning_rate": 1.2341975605866695e-07, + "loss": 0.2668, + "step": 13412 + }, + { + "epoch": 0.9313289820858214, + "grad_norm": 5.804644124989576, + "learning_rate": 1.2317157739941022e-07, + "loss": 0.4835, + "step": 13413 + }, + { + "epoch": 0.9313984168865436, + "grad_norm": 4.515875968365035, + "learning_rate": 1.2292364540461677e-07, + "loss": 0.5697, + "step": 13414 + }, + { + "epoch": 0.9314678516872656, + "grad_norm": 3.1315653553394527, + "learning_rate": 1.2267596008682603e-07, + "loss": 0.2375, + "step": 13415 + }, + { + "epoch": 0.9315372864879877, + "grad_norm": 3.3184663759057296, + "learning_rate": 1.2242852145856688e-07, + "loss": 0.2525, + "step": 13416 + }, + { + "epoch": 0.9316067212887099, + "grad_norm": 3.424081520205238, + "learning_rate": 1.2218132953235317e-07, + "loss": 0.3291, + "step": 13417 + }, + { + "epoch": 0.931676156089432, + "grad_norm": 3.813937944151166, + "learning_rate": 1.219343843206877e-07, + "loss": 0.3237, + "step": 13418 + }, + { + "epoch": 0.9317455908901542, + "grad_norm": 3.661951809017704, + "learning_rate": 1.2168768583606273e-07, + "loss": 0.4094, + "step": 13419 + }, + { + "epoch": 0.9318150256908763, + "grad_norm": 3.929789939462007, + "learning_rate": 1.2144123409095377e-07, + "loss": 0.2826, + "step": 13420 + }, + { + "epoch": 0.9318844604915983, + "grad_norm": 10.832490776522803, + "learning_rate": 1.2119502909782587e-07, + "loss": 0.5107, + "step": 13421 + }, + { + "epoch": 0.9319538952923205, + "grad_norm": 5.956124979801653, + "learning_rate": 1.2094907086913354e-07, + "loss": 0.4399, + "step": 13422 + }, + { + "epoch": 0.9320233300930426, + "grad_norm": 2.8859685636733388, + "learning_rate": 1.2070335941731627e-07, + "loss": 0.2168, + "step": 13423 + }, + { + "epoch": 0.9320927648937648, + "grad_norm": 3.554188485274006, + "learning_rate": 1.204578947548013e-07, + "loss": 0.3635, + "step": 13424 + }, + { + "epoch": 0.9321621996944869, + "grad_norm": 8.690797818111259, + "learning_rate": 1.202126768940043e-07, + "loss": 0.3039, + "step": 13425 + }, + { + "epoch": 0.932231634495209, + "grad_norm": 3.701417700911383, + "learning_rate": 1.199677058473292e-07, + "loss": 0.3535, + "step": 13426 + }, + { + "epoch": 0.9323010692959312, + "grad_norm": 4.498592734422018, + "learning_rate": 1.1972298162716445e-07, + "loss": 0.4338, + "step": 13427 + }, + { + "epoch": 0.9323705040966532, + "grad_norm": 3.516900149016874, + "learning_rate": 1.1947850424588901e-07, + "loss": 0.3017, + "step": 13428 + }, + { + "epoch": 0.9324399388973754, + "grad_norm": 4.979195014796428, + "learning_rate": 1.192342737158686e-07, + "loss": 0.5719, + "step": 13429 + }, + { + "epoch": 0.9325093736980975, + "grad_norm": 3.591473416861948, + "learning_rate": 1.189902900494555e-07, + "loss": 0.435, + "step": 13430 + }, + { + "epoch": 0.9325788084988196, + "grad_norm": 3.8272588083386583, + "learning_rate": 1.1874655325898987e-07, + "loss": 0.545, + "step": 13431 + }, + { + "epoch": 0.9326482432995418, + "grad_norm": 3.195703194198509, + "learning_rate": 1.1850306335680073e-07, + "loss": 0.2745, + "step": 13432 + }, + { + "epoch": 0.9327176781002638, + "grad_norm": 3.9146850941400584, + "learning_rate": 1.1825982035520211e-07, + "loss": 0.3814, + "step": 13433 + }, + { + "epoch": 0.9327871129009859, + "grad_norm": 6.145934325374261, + "learning_rate": 1.1801682426649863e-07, + "loss": 0.3428, + "step": 13434 + }, + { + "epoch": 0.9328565477017081, + "grad_norm": 3.058108617903866, + "learning_rate": 1.1777407510297934e-07, + "loss": 0.2451, + "step": 13435 + }, + { + "epoch": 0.9329259825024302, + "grad_norm": 3.8314845893129066, + "learning_rate": 1.1753157287692218e-07, + "loss": 0.4413, + "step": 13436 + }, + { + "epoch": 0.9329954173031524, + "grad_norm": 4.860471434691263, + "learning_rate": 1.1728931760059403e-07, + "loss": 0.5077, + "step": 13437 + }, + { + "epoch": 0.9330648521038745, + "grad_norm": 4.360127232377627, + "learning_rate": 1.170473092862462e-07, + "loss": 0.4182, + "step": 13438 + }, + { + "epoch": 0.9331342869045965, + "grad_norm": 4.01883914287503, + "learning_rate": 1.1680554794612053e-07, + "loss": 0.4122, + "step": 13439 + }, + { + "epoch": 0.9332037217053187, + "grad_norm": 3.5192261733944483, + "learning_rate": 1.1656403359244506e-07, + "loss": 0.2713, + "step": 13440 + }, + { + "epoch": 0.9332731565060408, + "grad_norm": 4.759885201869912, + "learning_rate": 1.1632276623743388e-07, + "loss": 0.5096, + "step": 13441 + }, + { + "epoch": 0.933342591306763, + "grad_norm": 4.484439959724089, + "learning_rate": 1.1608174589329169e-07, + "loss": 0.4654, + "step": 13442 + }, + { + "epoch": 0.9334120261074851, + "grad_norm": 5.359800097867683, + "learning_rate": 1.158409725722076e-07, + "loss": 0.5306, + "step": 13443 + }, + { + "epoch": 0.9334814609082072, + "grad_norm": 2.883298355208876, + "learning_rate": 1.156004462863608e-07, + "loss": 0.2709, + "step": 13444 + }, + { + "epoch": 0.9335508957089294, + "grad_norm": 3.672999199793784, + "learning_rate": 1.1536016704791653e-07, + "loss": 0.2555, + "step": 13445 + }, + { + "epoch": 0.9336203305096514, + "grad_norm": 4.696010194572382, + "learning_rate": 1.151201348690273e-07, + "loss": 0.4557, + "step": 13446 + }, + { + "epoch": 0.9336897653103735, + "grad_norm": 4.325958261413924, + "learning_rate": 1.1488034976183337e-07, + "loss": 0.4916, + "step": 13447 + }, + { + "epoch": 0.9337592001110957, + "grad_norm": 4.000456079808372, + "learning_rate": 1.1464081173846453e-07, + "loss": 0.4493, + "step": 13448 + }, + { + "epoch": 0.9338286349118178, + "grad_norm": 5.860291211382361, + "learning_rate": 1.1440152081103495e-07, + "loss": 0.6867, + "step": 13449 + }, + { + "epoch": 0.93389806971254, + "grad_norm": 3.4304650129905254, + "learning_rate": 1.1416247699164774e-07, + "loss": 0.262, + "step": 13450 + }, + { + "epoch": 0.933967504513262, + "grad_norm": 4.324414033478601, + "learning_rate": 1.1392368029239432e-07, + "loss": 0.5765, + "step": 13451 + }, + { + "epoch": 0.9340369393139841, + "grad_norm": 4.1090117819007945, + "learning_rate": 1.136851307253517e-07, + "loss": 0.6206, + "step": 13452 + }, + { + "epoch": 0.9341063741147063, + "grad_norm": 22.70521511503073, + "learning_rate": 1.134468283025858e-07, + "loss": 0.446, + "step": 13453 + }, + { + "epoch": 0.9341758089154284, + "grad_norm": 3.1375896847645017, + "learning_rate": 1.1320877303614974e-07, + "loss": 0.1845, + "step": 13454 + }, + { + "epoch": 0.9342452437161506, + "grad_norm": 3.9366941394620176, + "learning_rate": 1.1297096493808502e-07, + "loss": 0.5268, + "step": 13455 + }, + { + "epoch": 0.9343146785168727, + "grad_norm": 4.727456453335802, + "learning_rate": 1.127334040204181e-07, + "loss": 0.4459, + "step": 13456 + }, + { + "epoch": 0.9343841133175947, + "grad_norm": 3.5436875273224477, + "learning_rate": 1.124960902951655e-07, + "loss": 0.2426, + "step": 13457 + }, + { + "epoch": 0.9344535481183169, + "grad_norm": 4.060824085443516, + "learning_rate": 1.122590237743304e-07, + "loss": 0.5865, + "step": 13458 + }, + { + "epoch": 0.934522982919039, + "grad_norm": 4.592407023884917, + "learning_rate": 1.1202220446990264e-07, + "loss": 0.4623, + "step": 13459 + }, + { + "epoch": 0.9345924177197611, + "grad_norm": 4.654510570030044, + "learning_rate": 1.1178563239386042e-07, + "loss": 0.4249, + "step": 13460 + }, + { + "epoch": 0.9346618525204833, + "grad_norm": 3.3427191398895735, + "learning_rate": 1.1154930755816973e-07, + "loss": 0.4212, + "step": 13461 + }, + { + "epoch": 0.9347312873212054, + "grad_norm": 3.9900870445539063, + "learning_rate": 1.1131322997478378e-07, + "loss": 0.4292, + "step": 13462 + }, + { + "epoch": 0.9348007221219276, + "grad_norm": 3.6701892258213076, + "learning_rate": 1.1107739965564247e-07, + "loss": 0.4247, + "step": 13463 + }, + { + "epoch": 0.9348701569226496, + "grad_norm": 4.091622478104981, + "learning_rate": 1.1084181661267401e-07, + "loss": 0.3738, + "step": 13464 + }, + { + "epoch": 0.9349395917233717, + "grad_norm": 3.8968206805966217, + "learning_rate": 1.106064808577939e-07, + "loss": 0.3929, + "step": 13465 + }, + { + "epoch": 0.9350090265240939, + "grad_norm": 3.8833799006397878, + "learning_rate": 1.1037139240290595e-07, + "loss": 0.3367, + "step": 13466 + }, + { + "epoch": 0.935078461324816, + "grad_norm": 2.8563722383506667, + "learning_rate": 1.1013655125989897e-07, + "loss": 0.2624, + "step": 13467 + }, + { + "epoch": 0.9351478961255382, + "grad_norm": 4.840690267749723, + "learning_rate": 1.0990195744065235e-07, + "loss": 0.519, + "step": 13468 + }, + { + "epoch": 0.9352173309262602, + "grad_norm": 4.625550267150267, + "learning_rate": 1.096676109570316e-07, + "loss": 0.6056, + "step": 13469 + }, + { + "epoch": 0.9352867657269823, + "grad_norm": 4.352917068098696, + "learning_rate": 1.094335118208889e-07, + "loss": 0.5337, + "step": 13470 + }, + { + "epoch": 0.9353562005277045, + "grad_norm": 3.818749752352249, + "learning_rate": 1.0919966004406535e-07, + "loss": 0.49, + "step": 13471 + }, + { + "epoch": 0.9354256353284266, + "grad_norm": 7.667444718670821, + "learning_rate": 1.0896605563838814e-07, + "loss": 0.6346, + "step": 13472 + }, + { + "epoch": 0.9354950701291487, + "grad_norm": 2.9323667467715713, + "learning_rate": 1.0873269861567393e-07, + "loss": 0.19, + "step": 13473 + }, + { + "epoch": 0.9355645049298709, + "grad_norm": 4.248786566800276, + "learning_rate": 1.0849958898772495e-07, + "loss": 0.4926, + "step": 13474 + }, + { + "epoch": 0.935633939730593, + "grad_norm": 3.7745792531890037, + "learning_rate": 1.082667267663312e-07, + "loss": 0.2563, + "step": 13475 + }, + { + "epoch": 0.9357033745313151, + "grad_norm": 3.976548065217878, + "learning_rate": 1.0803411196327163e-07, + "loss": 0.4498, + "step": 13476 + }, + { + "epoch": 0.9357728093320372, + "grad_norm": 4.872525356625967, + "learning_rate": 1.0780174459031068e-07, + "loss": 0.4766, + "step": 13477 + }, + { + "epoch": 0.9358422441327593, + "grad_norm": 3.833434856429584, + "learning_rate": 1.0756962465920173e-07, + "loss": 0.3923, + "step": 13478 + }, + { + "epoch": 0.9359116789334815, + "grad_norm": 2.759833034624004, + "learning_rate": 1.073377521816843e-07, + "loss": 0.2756, + "step": 13479 + }, + { + "epoch": 0.9359811137342036, + "grad_norm": 4.562036650345542, + "learning_rate": 1.0710612716948787e-07, + "loss": 0.4028, + "step": 13480 + }, + { + "epoch": 0.9360505485349258, + "grad_norm": 4.444843515264536, + "learning_rate": 1.0687474963432643e-07, + "loss": 0.5697, + "step": 13481 + }, + { + "epoch": 0.9361199833356478, + "grad_norm": 5.229619455561418, + "learning_rate": 1.0664361958790281e-07, + "loss": 0.2508, + "step": 13482 + }, + { + "epoch": 0.9361894181363699, + "grad_norm": 6.434617031694959, + "learning_rate": 1.064127370419088e-07, + "loss": 0.8769, + "step": 13483 + }, + { + "epoch": 0.9362588529370921, + "grad_norm": 4.577895472317831, + "learning_rate": 1.061821020080206e-07, + "loss": 0.5083, + "step": 13484 + }, + { + "epoch": 0.9363282877378142, + "grad_norm": 4.066058276183629, + "learning_rate": 1.0595171449790331e-07, + "loss": 0.3382, + "step": 13485 + }, + { + "epoch": 0.9363977225385363, + "grad_norm": 3.137949752217085, + "learning_rate": 1.0572157452321097e-07, + "loss": 0.4231, + "step": 13486 + }, + { + "epoch": 0.9364671573392584, + "grad_norm": 2.750542677603122, + "learning_rate": 1.0549168209558314e-07, + "loss": 0.237, + "step": 13487 + }, + { + "epoch": 0.9365365921399805, + "grad_norm": 2.7685388448366863, + "learning_rate": 1.0526203722664718e-07, + "loss": 0.1481, + "step": 13488 + }, + { + "epoch": 0.9366060269407027, + "grad_norm": 4.0841931261152435, + "learning_rate": 1.0503263992801883e-07, + "loss": 0.4172, + "step": 13489 + }, + { + "epoch": 0.9366754617414248, + "grad_norm": 4.029558062267512, + "learning_rate": 1.0480349021130043e-07, + "loss": 0.4543, + "step": 13490 + }, + { + "epoch": 0.9367448965421469, + "grad_norm": 5.334640452208785, + "learning_rate": 1.0457458808808218e-07, + "loss": 0.5332, + "step": 13491 + }, + { + "epoch": 0.9368143313428691, + "grad_norm": 5.748058963015037, + "learning_rate": 1.0434593356994205e-07, + "loss": 0.6989, + "step": 13492 + }, + { + "epoch": 0.9368837661435911, + "grad_norm": 4.104095274790957, + "learning_rate": 1.041175266684441e-07, + "loss": 0.5037, + "step": 13493 + }, + { + "epoch": 0.9369532009443133, + "grad_norm": 6.506968511149294, + "learning_rate": 1.0388936739514244e-07, + "loss": 0.4864, + "step": 13494 + }, + { + "epoch": 0.9370226357450354, + "grad_norm": 3.9416774335824765, + "learning_rate": 1.0366145576157561e-07, + "loss": 0.4411, + "step": 13495 + }, + { + "epoch": 0.9370920705457575, + "grad_norm": 5.431260756563639, + "learning_rate": 1.0343379177927216e-07, + "loss": 0.6711, + "step": 13496 + }, + { + "epoch": 0.9371615053464797, + "grad_norm": 5.361559508726321, + "learning_rate": 1.0320637545974565e-07, + "loss": 0.5056, + "step": 13497 + }, + { + "epoch": 0.9372309401472018, + "grad_norm": 4.041141580723797, + "learning_rate": 1.0297920681450135e-07, + "loss": 0.3833, + "step": 13498 + }, + { + "epoch": 0.937300374947924, + "grad_norm": 3.6765910103796267, + "learning_rate": 1.0275228585502561e-07, + "loss": 0.4067, + "step": 13499 + }, + { + "epoch": 0.937369809748646, + "grad_norm": 4.064952658681537, + "learning_rate": 1.0252561259279869e-07, + "loss": 0.4555, + "step": 13500 + }, + { + "epoch": 0.9374392445493681, + "grad_norm": 3.5411392097851335, + "learning_rate": 1.0229918703928476e-07, + "loss": 0.4584, + "step": 13501 + }, + { + "epoch": 0.9375086793500903, + "grad_norm": 4.4878025421574455, + "learning_rate": 1.0207300920593521e-07, + "loss": 0.5027, + "step": 13502 + }, + { + "epoch": 0.9375781141508124, + "grad_norm": 4.813316529581607, + "learning_rate": 1.0184707910419034e-07, + "loss": 0.361, + "step": 13503 + }, + { + "epoch": 0.9376475489515345, + "grad_norm": 3.8300531472896817, + "learning_rate": 1.0162139674547767e-07, + "loss": 0.4098, + "step": 13504 + }, + { + "epoch": 0.9377169837522566, + "grad_norm": 3.84495921170896, + "learning_rate": 1.0139596214121194e-07, + "loss": 0.3125, + "step": 13505 + }, + { + "epoch": 0.9377864185529787, + "grad_norm": 3.7938559796447957, + "learning_rate": 1.0117077530279573e-07, + "loss": 0.3513, + "step": 13506 + }, + { + "epoch": 0.9378558533537009, + "grad_norm": 3.921435496293258, + "learning_rate": 1.009458362416177e-07, + "loss": 0.3348, + "step": 13507 + }, + { + "epoch": 0.937925288154423, + "grad_norm": 4.391843412748748, + "learning_rate": 1.0072114496905594e-07, + "loss": 0.5073, + "step": 13508 + }, + { + "epoch": 0.9379947229551451, + "grad_norm": 2.8221074311109873, + "learning_rate": 1.0049670149647472e-07, + "loss": 0.2055, + "step": 13509 + }, + { + "epoch": 0.9380641577558673, + "grad_norm": 3.783487249674332, + "learning_rate": 1.0027250583522608e-07, + "loss": 0.4454, + "step": 13510 + }, + { + "epoch": 0.9381335925565893, + "grad_norm": 3.781246958037558, + "learning_rate": 1.0004855799664925e-07, + "loss": 0.3285, + "step": 13511 + }, + { + "epoch": 0.9382030273573115, + "grad_norm": 4.1040878131815015, + "learning_rate": 9.982485799207297e-08, + "loss": 0.3678, + "step": 13512 + }, + { + "epoch": 0.9382724621580336, + "grad_norm": 4.162139424537777, + "learning_rate": 9.960140583280931e-08, + "loss": 0.3741, + "step": 13513 + }, + { + "epoch": 0.9383418969587557, + "grad_norm": 3.469408287752279, + "learning_rate": 9.937820153016142e-08, + "loss": 0.4232, + "step": 13514 + }, + { + "epoch": 0.9384113317594779, + "grad_norm": 4.556789795129862, + "learning_rate": 9.915524509541918e-08, + "loss": 0.2777, + "step": 13515 + }, + { + "epoch": 0.9384807665602, + "grad_norm": 3.489778945188901, + "learning_rate": 9.893253653985912e-08, + "loss": 0.2998, + "step": 13516 + }, + { + "epoch": 0.938550201360922, + "grad_norm": 4.285413433780867, + "learning_rate": 9.871007587474446e-08, + "loss": 0.3929, + "step": 13517 + }, + { + "epoch": 0.9386196361616442, + "grad_norm": 3.9591833749435175, + "learning_rate": 9.84878631113284e-08, + "loss": 0.3959, + "step": 13518 + }, + { + "epoch": 0.9386890709623663, + "grad_norm": 5.387392372611148, + "learning_rate": 9.826589826085031e-08, + "loss": 0.4413, + "step": 13519 + }, + { + "epoch": 0.9387585057630885, + "grad_norm": 2.8635993681352456, + "learning_rate": 9.804418133453619e-08, + "loss": 0.1934, + "step": 13520 + }, + { + "epoch": 0.9388279405638106, + "grad_norm": 4.866964687212404, + "learning_rate": 9.782271234359986e-08, + "loss": 0.5989, + "step": 13521 + }, + { + "epoch": 0.9388973753645327, + "grad_norm": 4.643351610670739, + "learning_rate": 9.760149129924402e-08, + "loss": 0.338, + "step": 13522 + }, + { + "epoch": 0.9389668101652549, + "grad_norm": 2.906174147518404, + "learning_rate": 9.738051821265748e-08, + "loss": 0.2335, + "step": 13523 + }, + { + "epoch": 0.9390362449659769, + "grad_norm": 3.5292706143830177, + "learning_rate": 9.715979309501633e-08, + "loss": 0.3101, + "step": 13524 + }, + { + "epoch": 0.9391056797666991, + "grad_norm": 4.745984302493065, + "learning_rate": 9.693931595748495e-08, + "loss": 0.4334, + "step": 13525 + }, + { + "epoch": 0.9391751145674212, + "grad_norm": 5.195262732164282, + "learning_rate": 9.6719086811215e-08, + "loss": 0.7283, + "step": 13526 + }, + { + "epoch": 0.9392445493681433, + "grad_norm": 2.9315489056034716, + "learning_rate": 9.649910566734533e-08, + "loss": 0.1775, + "step": 13527 + }, + { + "epoch": 0.9393139841688655, + "grad_norm": 3.4572051785343763, + "learning_rate": 9.627937253700148e-08, + "loss": 0.5215, + "step": 13528 + }, + { + "epoch": 0.9393834189695875, + "grad_norm": 4.312134973301884, + "learning_rate": 9.605988743129846e-08, + "loss": 0.3665, + "step": 13529 + }, + { + "epoch": 0.9394528537703096, + "grad_norm": 3.9660749592026545, + "learning_rate": 9.584065036133738e-08, + "loss": 0.434, + "step": 13530 + }, + { + "epoch": 0.9395222885710318, + "grad_norm": 4.941213570084439, + "learning_rate": 9.562166133820605e-08, + "loss": 0.5513, + "step": 13531 + }, + { + "epoch": 0.9395917233717539, + "grad_norm": 2.726472287545314, + "learning_rate": 9.54029203729817e-08, + "loss": 0.3021, + "step": 13532 + }, + { + "epoch": 0.9396611581724761, + "grad_norm": 3.0720416731053777, + "learning_rate": 9.518442747672884e-08, + "loss": 0.3117, + "step": 13533 + }, + { + "epoch": 0.9397305929731982, + "grad_norm": 3.187475375582225, + "learning_rate": 9.496618266049585e-08, + "loss": 0.3082, + "step": 13534 + }, + { + "epoch": 0.9398000277739202, + "grad_norm": 4.366750879342117, + "learning_rate": 9.474818593532387e-08, + "loss": 0.2929, + "step": 13535 + }, + { + "epoch": 0.9398694625746424, + "grad_norm": 3.7075092609496796, + "learning_rate": 9.453043731223744e-08, + "loss": 0.3566, + "step": 13536 + }, + { + "epoch": 0.9399388973753645, + "grad_norm": 4.5483399104013, + "learning_rate": 9.431293680225107e-08, + "loss": 0.301, + "step": 13537 + }, + { + "epoch": 0.9400083321760867, + "grad_norm": 2.677531624056893, + "learning_rate": 9.409568441636486e-08, + "loss": 0.1611, + "step": 13538 + }, + { + "epoch": 0.9400777669768088, + "grad_norm": 4.087009267678147, + "learning_rate": 9.387868016556778e-08, + "loss": 0.4068, + "step": 13539 + }, + { + "epoch": 0.9401472017775309, + "grad_norm": 5.262953592620084, + "learning_rate": 9.366192406083496e-08, + "loss": 0.6833, + "step": 13540 + }, + { + "epoch": 0.940216636578253, + "grad_norm": 3.8583115109245805, + "learning_rate": 9.344541611313097e-08, + "loss": 0.3615, + "step": 13541 + }, + { + "epoch": 0.9402860713789751, + "grad_norm": 3.554101766266527, + "learning_rate": 9.322915633340478e-08, + "loss": 0.3903, + "step": 13542 + }, + { + "epoch": 0.9403555061796972, + "grad_norm": 3.349736085075466, + "learning_rate": 9.301314473259548e-08, + "loss": 0.3232, + "step": 13543 + }, + { + "epoch": 0.9404249409804194, + "grad_norm": 3.43454475157128, + "learning_rate": 9.279738132162986e-08, + "loss": 0.3465, + "step": 13544 + }, + { + "epoch": 0.9404943757811415, + "grad_norm": 4.656818229672937, + "learning_rate": 9.258186611141861e-08, + "loss": 0.6034, + "step": 13545 + }, + { + "epoch": 0.9405638105818637, + "grad_norm": 4.311806484004886, + "learning_rate": 9.23665991128636e-08, + "loss": 0.4458, + "step": 13546 + }, + { + "epoch": 0.9406332453825857, + "grad_norm": 3.46112641615352, + "learning_rate": 9.215158033685279e-08, + "loss": 0.4616, + "step": 13547 + }, + { + "epoch": 0.9407026801833078, + "grad_norm": 6.5717914993356565, + "learning_rate": 9.193680979426189e-08, + "loss": 0.4539, + "step": 13548 + }, + { + "epoch": 0.94077211498403, + "grad_norm": 4.4687906266355055, + "learning_rate": 9.17222874959528e-08, + "loss": 0.3101, + "step": 13549 + }, + { + "epoch": 0.9408415497847521, + "grad_norm": 4.970795925362148, + "learning_rate": 9.150801345277627e-08, + "loss": 0.7092, + "step": 13550 + }, + { + "epoch": 0.9409109845854743, + "grad_norm": 4.488446301791286, + "learning_rate": 9.12939876755703e-08, + "loss": 0.6194, + "step": 13551 + }, + { + "epoch": 0.9409804193861964, + "grad_norm": 4.310892199117376, + "learning_rate": 9.108021017516011e-08, + "loss": 0.4281, + "step": 13552 + }, + { + "epoch": 0.9410498541869184, + "grad_norm": 4.119755353454573, + "learning_rate": 9.086668096235763e-08, + "loss": 0.3699, + "step": 13553 + }, + { + "epoch": 0.9411192889876406, + "grad_norm": 3.4131732832625707, + "learning_rate": 9.065340004796363e-08, + "loss": 0.3681, + "step": 13554 + }, + { + "epoch": 0.9411887237883627, + "grad_norm": 4.158828961030812, + "learning_rate": 9.044036744276563e-08, + "loss": 0.4205, + "step": 13555 + }, + { + "epoch": 0.9412581585890849, + "grad_norm": 4.227836134912753, + "learning_rate": 9.022758315753777e-08, + "loss": 0.4319, + "step": 13556 + }, + { + "epoch": 0.941327593389807, + "grad_norm": 3.5729394484009966, + "learning_rate": 9.001504720304255e-08, + "loss": 0.3436, + "step": 13557 + }, + { + "epoch": 0.9413970281905291, + "grad_norm": 3.606500028263425, + "learning_rate": 8.980275959003137e-08, + "loss": 0.4628, + "step": 13558 + }, + { + "epoch": 0.9414664629912513, + "grad_norm": 4.87273642752597, + "learning_rate": 8.959072032923955e-08, + "loss": 0.4831, + "step": 13559 + }, + { + "epoch": 0.9415358977919733, + "grad_norm": 3.2680077011066597, + "learning_rate": 8.937892943139293e-08, + "loss": 0.2821, + "step": 13560 + }, + { + "epoch": 0.9416053325926954, + "grad_norm": 5.912828145509751, + "learning_rate": 8.916738690720294e-08, + "loss": 0.4134, + "step": 13561 + }, + { + "epoch": 0.9416747673934176, + "grad_norm": 3.1993500918691784, + "learning_rate": 8.89560927673705e-08, + "loss": 0.2994, + "step": 13562 + }, + { + "epoch": 0.9417442021941397, + "grad_norm": 3.788071428517056, + "learning_rate": 8.874504702258035e-08, + "loss": 0.3698, + "step": 13563 + }, + { + "epoch": 0.9418136369948619, + "grad_norm": 4.688298209887736, + "learning_rate": 8.853424968350898e-08, + "loss": 0.4245, + "step": 13564 + }, + { + "epoch": 0.941883071795584, + "grad_norm": 4.308820410261503, + "learning_rate": 8.832370076081732e-08, + "loss": 0.4014, + "step": 13565 + }, + { + "epoch": 0.941952506596306, + "grad_norm": 4.5693004033117885, + "learning_rate": 8.811340026515514e-08, + "loss": 0.6363, + "step": 13566 + }, + { + "epoch": 0.9420219413970282, + "grad_norm": 3.283505656763116, + "learning_rate": 8.790334820715896e-08, + "loss": 0.3479, + "step": 13567 + }, + { + "epoch": 0.9420913761977503, + "grad_norm": 5.464350971884272, + "learning_rate": 8.769354459745305e-08, + "loss": 0.9185, + "step": 13568 + }, + { + "epoch": 0.9421608109984725, + "grad_norm": 3.9863662020789072, + "learning_rate": 8.748398944664893e-08, + "loss": 0.5141, + "step": 13569 + }, + { + "epoch": 0.9422302457991946, + "grad_norm": 4.719082102141583, + "learning_rate": 8.72746827653459e-08, + "loss": 0.4558, + "step": 13570 + }, + { + "epoch": 0.9422996805999166, + "grad_norm": 4.599825972209393, + "learning_rate": 8.706562456412993e-08, + "loss": 0.4936, + "step": 13571 + }, + { + "epoch": 0.9423691154006388, + "grad_norm": 5.569131630842515, + "learning_rate": 8.685681485357533e-08, + "loss": 0.4829, + "step": 13572 + }, + { + "epoch": 0.9424385502013609, + "grad_norm": 4.767376147783786, + "learning_rate": 8.664825364424423e-08, + "loss": 0.526, + "step": 13573 + }, + { + "epoch": 0.942507985002083, + "grad_norm": 2.891391760687909, + "learning_rate": 8.643994094668373e-08, + "loss": 0.1826, + "step": 13574 + }, + { + "epoch": 0.9425774198028052, + "grad_norm": 4.463103628812413, + "learning_rate": 8.623187677143097e-08, + "loss": 0.4217, + "step": 13575 + }, + { + "epoch": 0.9426468546035273, + "grad_norm": 4.412614767505137, + "learning_rate": 8.602406112901029e-08, + "loss": 0.3742, + "step": 13576 + }, + { + "epoch": 0.9427162894042495, + "grad_norm": 4.148445632782792, + "learning_rate": 8.581649402993164e-08, + "loss": 0.6073, + "step": 13577 + }, + { + "epoch": 0.9427857242049715, + "grad_norm": 3.3867396065274824, + "learning_rate": 8.560917548469328e-08, + "loss": 0.4722, + "step": 13578 + }, + { + "epoch": 0.9428551590056936, + "grad_norm": 3.4053379611300403, + "learning_rate": 8.540210550378291e-08, + "loss": 0.3951, + "step": 13579 + }, + { + "epoch": 0.9429245938064158, + "grad_norm": 4.337626772882801, + "learning_rate": 8.519528409767219e-08, + "loss": 0.4722, + "step": 13580 + }, + { + "epoch": 0.9429940286071379, + "grad_norm": 5.557160679922473, + "learning_rate": 8.498871127682273e-08, + "loss": 0.5584, + "step": 13581 + }, + { + "epoch": 0.9430634634078601, + "grad_norm": 4.287362822264059, + "learning_rate": 8.47823870516823e-08, + "loss": 0.4428, + "step": 13582 + }, + { + "epoch": 0.9431328982085821, + "grad_norm": 3.9072783643594406, + "learning_rate": 8.457631143268696e-08, + "loss": 0.3192, + "step": 13583 + }, + { + "epoch": 0.9432023330093042, + "grad_norm": 3.2883531684664113, + "learning_rate": 8.437048443026008e-08, + "loss": 0.3507, + "step": 13584 + }, + { + "epoch": 0.9432717678100264, + "grad_norm": 5.314703373564217, + "learning_rate": 8.41649060548111e-08, + "loss": 0.7017, + "step": 13585 + }, + { + "epoch": 0.9433412026107485, + "grad_norm": 3.2570621722187645, + "learning_rate": 8.39595763167389e-08, + "loss": 0.2558, + "step": 13586 + }, + { + "epoch": 0.9434106374114706, + "grad_norm": 3.3661333885657276, + "learning_rate": 8.375449522642798e-08, + "loss": 0.3303, + "step": 13587 + }, + { + "epoch": 0.9434800722121928, + "grad_norm": 3.470174114466503, + "learning_rate": 8.35496627942517e-08, + "loss": 0.2422, + "step": 13588 + }, + { + "epoch": 0.9435495070129148, + "grad_norm": 3.878313362439, + "learning_rate": 8.33450790305701e-08, + "loss": 0.5227, + "step": 13589 + }, + { + "epoch": 0.943618941813637, + "grad_norm": 3.5809938880428884, + "learning_rate": 8.314074394573102e-08, + "loss": 0.441, + "step": 13590 + }, + { + "epoch": 0.9436883766143591, + "grad_norm": 4.175595098868105, + "learning_rate": 8.293665755006952e-08, + "loss": 0.3558, + "step": 13591 + }, + { + "epoch": 0.9437578114150812, + "grad_norm": 4.350352064433606, + "learning_rate": 8.273281985390736e-08, + "loss": 0.4766, + "step": 13592 + }, + { + "epoch": 0.9438272462158034, + "grad_norm": 4.693164219525063, + "learning_rate": 8.252923086755515e-08, + "loss": 0.3884, + "step": 13593 + }, + { + "epoch": 0.9438966810165255, + "grad_norm": 4.43000429430502, + "learning_rate": 8.232589060131024e-08, + "loss": 0.4014, + "step": 13594 + }, + { + "epoch": 0.9439661158172477, + "grad_norm": 4.18384862514878, + "learning_rate": 8.212279906545661e-08, + "loss": 0.3991, + "step": 13595 + }, + { + "epoch": 0.9440355506179697, + "grad_norm": 3.8866349778943845, + "learning_rate": 8.191995627026716e-08, + "loss": 0.2645, + "step": 13596 + }, + { + "epoch": 0.9441049854186918, + "grad_norm": 3.835499506881386, + "learning_rate": 8.17173622260009e-08, + "loss": 0.2673, + "step": 13597 + }, + { + "epoch": 0.944174420219414, + "grad_norm": 2.7804260265165714, + "learning_rate": 8.15150169429052e-08, + "loss": 0.1906, + "step": 13598 + }, + { + "epoch": 0.9442438550201361, + "grad_norm": 3.726927363715068, + "learning_rate": 8.131292043121408e-08, + "loss": 0.3155, + "step": 13599 + }, + { + "epoch": 0.9443132898208582, + "grad_norm": 2.636932593239316, + "learning_rate": 8.111107270114993e-08, + "loss": 0.29, + "step": 13600 + }, + { + "epoch": 0.9443827246215803, + "grad_norm": 5.075182793939531, + "learning_rate": 8.090947376292125e-08, + "loss": 0.4154, + "step": 13601 + }, + { + "epoch": 0.9444521594223024, + "grad_norm": 4.516224788902047, + "learning_rate": 8.070812362672541e-08, + "loss": 0.5251, + "step": 13602 + }, + { + "epoch": 0.9445215942230246, + "grad_norm": 3.01062600771442, + "learning_rate": 8.050702230274598e-08, + "loss": 0.2955, + "step": 13603 + }, + { + "epoch": 0.9445910290237467, + "grad_norm": 4.732334170640067, + "learning_rate": 8.03061698011537e-08, + "loss": 0.6322, + "step": 13604 + }, + { + "epoch": 0.9446604638244688, + "grad_norm": 7.58345074903841, + "learning_rate": 8.010556613210985e-08, + "loss": 0.5992, + "step": 13605 + }, + { + "epoch": 0.944729898625191, + "grad_norm": 5.151881953073426, + "learning_rate": 7.990521130575856e-08, + "loss": 0.5578, + "step": 13606 + }, + { + "epoch": 0.944799333425913, + "grad_norm": 4.599824654385063, + "learning_rate": 7.970510533223396e-08, + "loss": 0.3198, + "step": 13607 + }, + { + "epoch": 0.9448687682266352, + "grad_norm": 4.76039793827116, + "learning_rate": 7.950524822165794e-08, + "loss": 0.4448, + "step": 13608 + }, + { + "epoch": 0.9449382030273573, + "grad_norm": 3.1831429451178495, + "learning_rate": 7.930563998413798e-08, + "loss": 0.3076, + "step": 13609 + }, + { + "epoch": 0.9450076378280794, + "grad_norm": 3.331479866424242, + "learning_rate": 7.9106280629771e-08, + "loss": 0.3245, + "step": 13610 + }, + { + "epoch": 0.9450770726288016, + "grad_norm": 3.4857400326875596, + "learning_rate": 7.890717016864003e-08, + "loss": 0.2541, + "step": 13611 + }, + { + "epoch": 0.9451465074295237, + "grad_norm": 4.380912193402173, + "learning_rate": 7.870830861081535e-08, + "loss": 0.3585, + "step": 13612 + }, + { + "epoch": 0.9452159422302459, + "grad_norm": 4.0616374755369336, + "learning_rate": 7.850969596635616e-08, + "loss": 0.3385, + "step": 13613 + }, + { + "epoch": 0.9452853770309679, + "grad_norm": 4.30601052713788, + "learning_rate": 7.831133224530718e-08, + "loss": 0.5041, + "step": 13614 + }, + { + "epoch": 0.94535481183169, + "grad_norm": 4.180230425362507, + "learning_rate": 7.811321745770206e-08, + "loss": 0.568, + "step": 13615 + }, + { + "epoch": 0.9454242466324122, + "grad_norm": 3.3222430442037307, + "learning_rate": 7.791535161356057e-08, + "loss": 0.3584, + "step": 13616 + }, + { + "epoch": 0.9454936814331343, + "grad_norm": 4.2963917001234035, + "learning_rate": 7.771773472289079e-08, + "loss": 0.3711, + "step": 13617 + }, + { + "epoch": 0.9455631162338564, + "grad_norm": 3.3858535642324203, + "learning_rate": 7.752036679568809e-08, + "loss": 0.261, + "step": 13618 + }, + { + "epoch": 0.9456325510345786, + "grad_norm": 5.180334421663684, + "learning_rate": 7.732324784193556e-08, + "loss": 0.4812, + "step": 13619 + }, + { + "epoch": 0.9457019858353006, + "grad_norm": 3.7477264181699623, + "learning_rate": 7.712637787160249e-08, + "loss": 0.3107, + "step": 13620 + }, + { + "epoch": 0.9457714206360228, + "grad_norm": 3.5983490600353543, + "learning_rate": 7.692975689464643e-08, + "loss": 0.3771, + "step": 13621 + }, + { + "epoch": 0.9458408554367449, + "grad_norm": 4.630320842483007, + "learning_rate": 7.673338492101278e-08, + "loss": 0.5198, + "step": 13622 + }, + { + "epoch": 0.945910290237467, + "grad_norm": 3.7944668473495917, + "learning_rate": 7.65372619606336e-08, + "loss": 0.3566, + "step": 13623 + }, + { + "epoch": 0.9459797250381892, + "grad_norm": 4.344666272104659, + "learning_rate": 7.634138802342761e-08, + "loss": 0.4359, + "step": 13624 + }, + { + "epoch": 0.9460491598389112, + "grad_norm": 3.8892797514625874, + "learning_rate": 7.6145763119303e-08, + "loss": 0.5293, + "step": 13625 + }, + { + "epoch": 0.9461185946396334, + "grad_norm": 2.868281995099764, + "learning_rate": 7.595038725815463e-08, + "loss": 0.3005, + "step": 13626 + }, + { + "epoch": 0.9461880294403555, + "grad_norm": 4.6190484295314596, + "learning_rate": 7.575526044986293e-08, + "loss": 0.616, + "step": 13627 + }, + { + "epoch": 0.9462574642410776, + "grad_norm": 4.131773625339716, + "learning_rate": 7.556038270429833e-08, + "loss": 0.5011, + "step": 13628 + }, + { + "epoch": 0.9463268990417998, + "grad_norm": 3.551755689602933, + "learning_rate": 7.536575403131741e-08, + "loss": 0.3168, + "step": 13629 + }, + { + "epoch": 0.9463963338425219, + "grad_norm": 4.26427803506579, + "learning_rate": 7.517137444076394e-08, + "loss": 0.3911, + "step": 13630 + }, + { + "epoch": 0.9464657686432439, + "grad_norm": 3.67287463096618, + "learning_rate": 7.497724394246952e-08, + "loss": 0.4951, + "step": 13631 + }, + { + "epoch": 0.9465352034439661, + "grad_norm": 3.9445375479516085, + "learning_rate": 7.478336254625296e-08, + "loss": 0.4539, + "step": 13632 + }, + { + "epoch": 0.9466046382446882, + "grad_norm": 3.113842843150875, + "learning_rate": 7.458973026192085e-08, + "loss": 0.2598, + "step": 13633 + }, + { + "epoch": 0.9466740730454104, + "grad_norm": 3.8740852515980846, + "learning_rate": 7.439634709926647e-08, + "loss": 0.3621, + "step": 13634 + }, + { + "epoch": 0.9467435078461325, + "grad_norm": 5.805722662976317, + "learning_rate": 7.420321306807143e-08, + "loss": 0.5896, + "step": 13635 + }, + { + "epoch": 0.9468129426468546, + "grad_norm": 3.461579509130838, + "learning_rate": 7.401032817810294e-08, + "loss": 0.3879, + "step": 13636 + }, + { + "epoch": 0.9468823774475768, + "grad_norm": 3.724007203079931, + "learning_rate": 7.38176924391193e-08, + "loss": 0.5304, + "step": 13637 + }, + { + "epoch": 0.9469518122482988, + "grad_norm": 4.182345340163832, + "learning_rate": 7.362530586086159e-08, + "loss": 0.5385, + "step": 13638 + }, + { + "epoch": 0.947021247049021, + "grad_norm": 4.294224826126223, + "learning_rate": 7.343316845306148e-08, + "loss": 0.5147, + "step": 13639 + }, + { + "epoch": 0.9470906818497431, + "grad_norm": 4.439940147242975, + "learning_rate": 7.32412802254373e-08, + "loss": 0.598, + "step": 13640 + }, + { + "epoch": 0.9471601166504652, + "grad_norm": 4.51005899672905, + "learning_rate": 7.304964118769353e-08, + "loss": 0.4493, + "step": 13641 + }, + { + "epoch": 0.9472295514511874, + "grad_norm": 3.928415110307402, + "learning_rate": 7.285825134952463e-08, + "loss": 0.3015, + "step": 13642 + }, + { + "epoch": 0.9472989862519094, + "grad_norm": 3.4409527267050555, + "learning_rate": 7.266711072060895e-08, + "loss": 0.334, + "step": 13643 + }, + { + "epoch": 0.9473684210526315, + "grad_norm": 5.13573259161139, + "learning_rate": 7.247621931061599e-08, + "loss": 0.7405, + "step": 13644 + }, + { + "epoch": 0.9474378558533537, + "grad_norm": 4.480573953211173, + "learning_rate": 7.228557712919914e-08, + "loss": 0.4192, + "step": 13645 + }, + { + "epoch": 0.9475072906540758, + "grad_norm": 3.8821490364372315, + "learning_rate": 7.209518418600236e-08, + "loss": 0.4955, + "step": 13646 + }, + { + "epoch": 0.947576725454798, + "grad_norm": 3.355088879117939, + "learning_rate": 7.190504049065462e-08, + "loss": 0.2372, + "step": 13647 + }, + { + "epoch": 0.9476461602555201, + "grad_norm": 5.5295241750359025, + "learning_rate": 7.171514605277374e-08, + "loss": 0.6387, + "step": 13648 + }, + { + "epoch": 0.9477155950562421, + "grad_norm": 3.714547535016453, + "learning_rate": 7.152550088196375e-08, + "loss": 0.4036, + "step": 13649 + }, + { + "epoch": 0.9477850298569643, + "grad_norm": 4.46452770464274, + "learning_rate": 7.13361049878164e-08, + "loss": 0.5211, + "step": 13650 + }, + { + "epoch": 0.9478544646576864, + "grad_norm": 4.312987851264458, + "learning_rate": 7.114695837991293e-08, + "loss": 0.4387, + "step": 13651 + }, + { + "epoch": 0.9479238994584086, + "grad_norm": 4.731264961559913, + "learning_rate": 7.095806106781788e-08, + "loss": 0.5521, + "step": 13652 + }, + { + "epoch": 0.9479933342591307, + "grad_norm": 4.058083029633025, + "learning_rate": 7.076941306108698e-08, + "loss": 0.3503, + "step": 13653 + }, + { + "epoch": 0.9480627690598528, + "grad_norm": 4.75617517251498, + "learning_rate": 7.05810143692609e-08, + "loss": 0.5376, + "step": 13654 + }, + { + "epoch": 0.948132203860575, + "grad_norm": 3.3126038554675783, + "learning_rate": 7.039286500187037e-08, + "loss": 0.2923, + "step": 13655 + }, + { + "epoch": 0.948201638661297, + "grad_norm": 4.1791718164097045, + "learning_rate": 7.020496496842888e-08, + "loss": 0.4386, + "step": 13656 + }, + { + "epoch": 0.9482710734620191, + "grad_norm": 4.218528832283942, + "learning_rate": 7.001731427844272e-08, + "loss": 0.3796, + "step": 13657 + }, + { + "epoch": 0.9483405082627413, + "grad_norm": 3.6979818182633526, + "learning_rate": 6.982991294140262e-08, + "loss": 0.2869, + "step": 13658 + }, + { + "epoch": 0.9484099430634634, + "grad_norm": 2.35607874179691, + "learning_rate": 6.964276096678546e-08, + "loss": 0.1814, + "step": 13659 + }, + { + "epoch": 0.9484793778641856, + "grad_norm": 4.123098524745145, + "learning_rate": 6.945585836405922e-08, + "loss": 0.435, + "step": 13660 + }, + { + "epoch": 0.9485488126649076, + "grad_norm": 4.655868099093231, + "learning_rate": 6.926920514267576e-08, + "loss": 0.5642, + "step": 13661 + }, + { + "epoch": 0.9486182474656297, + "grad_norm": 2.598059996934651, + "learning_rate": 6.908280131207701e-08, + "loss": 0.1911, + "step": 13662 + }, + { + "epoch": 0.9486876822663519, + "grad_norm": 4.457444072110447, + "learning_rate": 6.889664688168984e-08, + "loss": 0.6129, + "step": 13663 + }, + { + "epoch": 0.948757117067074, + "grad_norm": 3.315774232290024, + "learning_rate": 6.871074186093118e-08, + "loss": 0.2844, + "step": 13664 + }, + { + "epoch": 0.9488265518677962, + "grad_norm": 3.927788001597242, + "learning_rate": 6.852508625920184e-08, + "loss": 0.3313, + "step": 13665 + }, + { + "epoch": 0.9488959866685183, + "grad_norm": 3.8207391627188154, + "learning_rate": 6.833968008589486e-08, + "loss": 0.346, + "step": 13666 + }, + { + "epoch": 0.9489654214692403, + "grad_norm": 3.276000249567987, + "learning_rate": 6.815452335038552e-08, + "loss": 0.2086, + "step": 13667 + }, + { + "epoch": 0.9490348562699625, + "grad_norm": 3.372665226802382, + "learning_rate": 6.796961606203912e-08, + "loss": 0.6035, + "step": 13668 + }, + { + "epoch": 0.9491042910706846, + "grad_norm": 3.5885233580748275, + "learning_rate": 6.778495823020925e-08, + "loss": 0.4418, + "step": 13669 + }, + { + "epoch": 0.9491737258714068, + "grad_norm": 3.704607490842046, + "learning_rate": 6.760054986423459e-08, + "loss": 0.4242, + "step": 13670 + }, + { + "epoch": 0.9492431606721289, + "grad_norm": 5.301991881890821, + "learning_rate": 6.741639097344322e-08, + "loss": 0.5544, + "step": 13671 + }, + { + "epoch": 0.949312595472851, + "grad_norm": 3.1617610184585563, + "learning_rate": 6.723248156714879e-08, + "loss": 0.2743, + "step": 13672 + }, + { + "epoch": 0.9493820302735732, + "grad_norm": 2.9273173446321006, + "learning_rate": 6.704882165465388e-08, + "loss": 0.2733, + "step": 13673 + }, + { + "epoch": 0.9494514650742952, + "grad_norm": 4.0490038541021915, + "learning_rate": 6.686541124524715e-08, + "loss": 0.3737, + "step": 13674 + }, + { + "epoch": 0.9495208998750173, + "grad_norm": 3.5924407858174585, + "learning_rate": 6.668225034820619e-08, + "loss": 0.4399, + "step": 13675 + }, + { + "epoch": 0.9495903346757395, + "grad_norm": 3.43117871428001, + "learning_rate": 6.649933897279471e-08, + "loss": 0.3219, + "step": 13676 + }, + { + "epoch": 0.9496597694764616, + "grad_norm": 3.7217145665656006, + "learning_rate": 6.631667712826362e-08, + "loss": 0.2544, + "step": 13677 + }, + { + "epoch": 0.9497292042771838, + "grad_norm": 3.758525541437729, + "learning_rate": 6.613426482385222e-08, + "loss": 0.3291, + "step": 13678 + }, + { + "epoch": 0.9497986390779058, + "grad_norm": 4.258746501467238, + "learning_rate": 6.595210206878644e-08, + "loss": 0.3361, + "step": 13679 + }, + { + "epoch": 0.9498680738786279, + "grad_norm": 3.554279115930025, + "learning_rate": 6.57701888722806e-08, + "loss": 0.2929, + "step": 13680 + }, + { + "epoch": 0.9499375086793501, + "grad_norm": 3.7673448291774734, + "learning_rate": 6.558852524353509e-08, + "loss": 0.4341, + "step": 13681 + }, + { + "epoch": 0.9500069434800722, + "grad_norm": 3.351238846744773, + "learning_rate": 6.54071111917376e-08, + "loss": 0.3523, + "step": 13682 + }, + { + "epoch": 0.9500763782807944, + "grad_norm": 3.1161316831082817, + "learning_rate": 6.522594672606519e-08, + "loss": 0.3187, + "step": 13683 + }, + { + "epoch": 0.9501458130815165, + "grad_norm": 5.358799128246562, + "learning_rate": 6.504503185568056e-08, + "loss": 0.6084, + "step": 13684 + }, + { + "epoch": 0.9502152478822385, + "grad_norm": 5.625555475746649, + "learning_rate": 6.486436658973305e-08, + "loss": 0.4486, + "step": 13685 + }, + { + "epoch": 0.9502846826829607, + "grad_norm": 3.8169861363144126, + "learning_rate": 6.468395093736201e-08, + "loss": 0.4327, + "step": 13686 + }, + { + "epoch": 0.9503541174836828, + "grad_norm": 3.6956254941635147, + "learning_rate": 6.450378490769238e-08, + "loss": 0.4737, + "step": 13687 + }, + { + "epoch": 0.9504235522844049, + "grad_norm": 3.5095411747770098, + "learning_rate": 6.432386850983575e-08, + "loss": 0.2572, + "step": 13688 + }, + { + "epoch": 0.9504929870851271, + "grad_norm": 4.259990413136725, + "learning_rate": 6.414420175289316e-08, + "loss": 0.4709, + "step": 13689 + }, + { + "epoch": 0.9505624218858492, + "grad_norm": 3.8901611509865326, + "learning_rate": 6.396478464595124e-08, + "loss": 0.4531, + "step": 13690 + }, + { + "epoch": 0.9506318566865714, + "grad_norm": 4.616724500012755, + "learning_rate": 6.378561719808495e-08, + "loss": 0.655, + "step": 13691 + }, + { + "epoch": 0.9507012914872934, + "grad_norm": 3.4537477401329206, + "learning_rate": 6.360669941835707e-08, + "loss": 0.275, + "step": 13692 + }, + { + "epoch": 0.9507707262880155, + "grad_norm": 3.512772786078367, + "learning_rate": 6.342803131581587e-08, + "loss": 0.3024, + "step": 13693 + }, + { + "epoch": 0.9508401610887377, + "grad_norm": 4.8958741741450815, + "learning_rate": 6.32496128994986e-08, + "loss": 0.5526, + "step": 13694 + }, + { + "epoch": 0.9509095958894598, + "grad_norm": 3.865406298166846, + "learning_rate": 6.307144417843025e-08, + "loss": 0.41, + "step": 13695 + }, + { + "epoch": 0.950979030690182, + "grad_norm": 4.008626378788147, + "learning_rate": 6.28935251616214e-08, + "loss": 0.3548, + "step": 13696 + }, + { + "epoch": 0.951048465490904, + "grad_norm": 3.234938443853972, + "learning_rate": 6.271585585807095e-08, + "loss": 0.2858, + "step": 13697 + }, + { + "epoch": 0.9511179002916261, + "grad_norm": 3.2906952625792756, + "learning_rate": 6.253843627676614e-08, + "loss": 0.2802, + "step": 13698 + }, + { + "epoch": 0.9511873350923483, + "grad_norm": 2.9383512680773967, + "learning_rate": 6.236126642667983e-08, + "loss": 0.2325, + "step": 13699 + }, + { + "epoch": 0.9512567698930704, + "grad_norm": 3.6388071059489047, + "learning_rate": 6.218434631677316e-08, + "loss": 0.4709, + "step": 13700 + }, + { + "epoch": 0.9513262046937925, + "grad_norm": 3.513599857057769, + "learning_rate": 6.200767595599566e-08, + "loss": 0.3443, + "step": 13701 + }, + { + "epoch": 0.9513956394945147, + "grad_norm": 2.98873291672003, + "learning_rate": 6.183125535328072e-08, + "loss": 0.2342, + "step": 13702 + }, + { + "epoch": 0.9514650742952367, + "grad_norm": 3.0775042067561578, + "learning_rate": 6.165508451755397e-08, + "loss": 0.2925, + "step": 13703 + }, + { + "epoch": 0.9515345090959589, + "grad_norm": 3.4704343870757812, + "learning_rate": 6.147916345772497e-08, + "loss": 0.206, + "step": 13704 + }, + { + "epoch": 0.951603943896681, + "grad_norm": 3.0964683795669963, + "learning_rate": 6.130349218269104e-08, + "loss": 0.3312, + "step": 13705 + }, + { + "epoch": 0.9516733786974031, + "grad_norm": 3.5393836860593177, + "learning_rate": 6.112807070133842e-08, + "loss": 0.2131, + "step": 13706 + }, + { + "epoch": 0.9517428134981253, + "grad_norm": 4.569400179364535, + "learning_rate": 6.095289902253887e-08, + "loss": 0.4264, + "step": 13707 + }, + { + "epoch": 0.9518122482988474, + "grad_norm": 5.161394653168402, + "learning_rate": 6.07779771551531e-08, + "loss": 0.5866, + "step": 13708 + }, + { + "epoch": 0.9518816830995696, + "grad_norm": 5.0977324811951945, + "learning_rate": 6.060330510802848e-08, + "loss": 0.3997, + "step": 13709 + }, + { + "epoch": 0.9519511179002916, + "grad_norm": 4.759533807947581, + "learning_rate": 6.042888288999904e-08, + "loss": 0.4045, + "step": 13710 + }, + { + "epoch": 0.9520205527010137, + "grad_norm": 5.505368708477018, + "learning_rate": 6.025471050988662e-08, + "loss": 0.5899, + "step": 13711 + }, + { + "epoch": 0.9520899875017359, + "grad_norm": 5.442442112067745, + "learning_rate": 6.008078797650196e-08, + "loss": 0.5671, + "step": 13712 + }, + { + "epoch": 0.952159422302458, + "grad_norm": 4.057394585951996, + "learning_rate": 5.990711529864079e-08, + "loss": 0.5129, + "step": 13713 + }, + { + "epoch": 0.9522288571031801, + "grad_norm": 2.659618891571195, + "learning_rate": 5.973369248508775e-08, + "loss": 0.2432, + "step": 13714 + }, + { + "epoch": 0.9522982919039022, + "grad_norm": 4.104167267102356, + "learning_rate": 5.9560519544614725e-08, + "loss": 0.6073, + "step": 13715 + }, + { + "epoch": 0.9523677267046243, + "grad_norm": 4.607507065467092, + "learning_rate": 5.938759648597969e-08, + "loss": 0.3594, + "step": 13716 + }, + { + "epoch": 0.9524371615053465, + "grad_norm": 4.846589279139945, + "learning_rate": 5.9214923317928996e-08, + "loss": 0.4522, + "step": 13717 + }, + { + "epoch": 0.9525065963060686, + "grad_norm": 4.119954192191566, + "learning_rate": 5.904250004919676e-08, + "loss": 0.4353, + "step": 13718 + }, + { + "epoch": 0.9525760311067907, + "grad_norm": 4.648236196200457, + "learning_rate": 5.8870326688503785e-08, + "loss": 0.7015, + "step": 13719 + }, + { + "epoch": 0.9526454659075129, + "grad_norm": 4.080707223352292, + "learning_rate": 5.869840324455867e-08, + "loss": 0.3911, + "step": 13720 + }, + { + "epoch": 0.952714900708235, + "grad_norm": 2.8789349070511805, + "learning_rate": 5.852672972605722e-08, + "loss": 0.2019, + "step": 13721 + }, + { + "epoch": 0.9527843355089571, + "grad_norm": 3.410282571025948, + "learning_rate": 5.835530614168139e-08, + "loss": 0.2992, + "step": 13722 + }, + { + "epoch": 0.9528537703096792, + "grad_norm": 4.894677996671319, + "learning_rate": 5.818413250010313e-08, + "loss": 0.239, + "step": 13723 + }, + { + "epoch": 0.9529232051104013, + "grad_norm": 3.3017609860449375, + "learning_rate": 5.801320880997885e-08, + "loss": 0.4211, + "step": 13724 + }, + { + "epoch": 0.9529926399111235, + "grad_norm": 3.6449909829302447, + "learning_rate": 5.784253507995441e-08, + "loss": 0.4747, + "step": 13725 + }, + { + "epoch": 0.9530620747118456, + "grad_norm": 2.844534890946508, + "learning_rate": 5.7672111318661796e-08, + "loss": 0.2872, + "step": 13726 + }, + { + "epoch": 0.9531315095125678, + "grad_norm": 4.253589585625809, + "learning_rate": 5.750193753472133e-08, + "loss": 0.433, + "step": 13727 + }, + { + "epoch": 0.9532009443132898, + "grad_norm": 4.573107826333037, + "learning_rate": 5.733201373674002e-08, + "loss": 0.4319, + "step": 13728 + }, + { + "epoch": 0.9532703791140119, + "grad_norm": 3.8093677377233943, + "learning_rate": 5.716233993331266e-08, + "loss": 0.3048, + "step": 13729 + }, + { + "epoch": 0.9533398139147341, + "grad_norm": 3.9826577582816105, + "learning_rate": 5.6992916133020713e-08, + "loss": 0.5305, + "step": 13730 + }, + { + "epoch": 0.9534092487154562, + "grad_norm": 5.936500169456354, + "learning_rate": 5.682374234443344e-08, + "loss": 0.4476, + "step": 13731 + }, + { + "epoch": 0.9534786835161783, + "grad_norm": 3.5337222091478004, + "learning_rate": 5.665481857610844e-08, + "loss": 0.3033, + "step": 13732 + }, + { + "epoch": 0.9535481183169005, + "grad_norm": 4.474230508273001, + "learning_rate": 5.648614483658887e-08, + "loss": 0.3867, + "step": 13733 + }, + { + "epoch": 0.9536175531176225, + "grad_norm": 3.4395490258977115, + "learning_rate": 5.6317721134405145e-08, + "loss": 0.403, + "step": 13734 + }, + { + "epoch": 0.9536869879183447, + "grad_norm": 4.449479349855479, + "learning_rate": 5.614954747807766e-08, + "loss": 0.5633, + "step": 13735 + }, + { + "epoch": 0.9537564227190668, + "grad_norm": 3.602797052026786, + "learning_rate": 5.5981623876111833e-08, + "loss": 0.4116, + "step": 13736 + }, + { + "epoch": 0.9538258575197889, + "grad_norm": 3.8764715732710227, + "learning_rate": 5.5813950337000876e-08, + "loss": 0.2844, + "step": 13737 + }, + { + "epoch": 0.9538952923205111, + "grad_norm": 3.8746349156932447, + "learning_rate": 5.564652686922523e-08, + "loss": 0.4043, + "step": 13738 + }, + { + "epoch": 0.9539647271212331, + "grad_norm": 2.831249944933847, + "learning_rate": 5.547935348125366e-08, + "loss": 0.1951, + "step": 13739 + }, + { + "epoch": 0.9540341619219553, + "grad_norm": 5.002261056652214, + "learning_rate": 5.531243018154109e-08, + "loss": 0.6011, + "step": 13740 + }, + { + "epoch": 0.9541035967226774, + "grad_norm": 3.5291833462110653, + "learning_rate": 5.5145756978530754e-08, + "loss": 0.3471, + "step": 13741 + }, + { + "epoch": 0.9541730315233995, + "grad_norm": 4.264188643167261, + "learning_rate": 5.4979333880652574e-08, + "loss": 0.5141, + "step": 13742 + }, + { + "epoch": 0.9542424663241217, + "grad_norm": 3.9364891155309274, + "learning_rate": 5.4813160896323714e-08, + "loss": 0.3907, + "step": 13743 + }, + { + "epoch": 0.9543119011248438, + "grad_norm": 4.387793071077238, + "learning_rate": 5.4647238033949666e-08, + "loss": 0.3864, + "step": 13744 + }, + { + "epoch": 0.9543813359255658, + "grad_norm": 4.822898455030683, + "learning_rate": 5.4481565301922057e-08, + "loss": 0.5582, + "step": 13745 + }, + { + "epoch": 0.954450770726288, + "grad_norm": 4.938522930095148, + "learning_rate": 5.431614270862029e-08, + "loss": 0.7861, + "step": 13746 + }, + { + "epoch": 0.9545202055270101, + "grad_norm": 4.066376041766102, + "learning_rate": 5.4150970262412114e-08, + "loss": 0.2856, + "step": 13747 + }, + { + "epoch": 0.9545896403277323, + "grad_norm": 3.6130713492136137, + "learning_rate": 5.398604797165141e-08, + "loss": 0.3425, + "step": 13748 + }, + { + "epoch": 0.9546590751284544, + "grad_norm": 4.881464304116736, + "learning_rate": 5.3821375844678725e-08, + "loss": 0.4625, + "step": 13749 + }, + { + "epoch": 0.9547285099291765, + "grad_norm": 4.86315785564698, + "learning_rate": 5.365695388982461e-08, + "loss": 0.668, + "step": 13750 + }, + { + "epoch": 0.9547979447298987, + "grad_norm": 4.414774526005412, + "learning_rate": 5.349278211540465e-08, + "loss": 0.4378, + "step": 13751 + }, + { + "epoch": 0.9548673795306207, + "grad_norm": 5.212315794946315, + "learning_rate": 5.332886052972219e-08, + "loss": 0.4562, + "step": 13752 + }, + { + "epoch": 0.9549368143313429, + "grad_norm": 4.244604861683294, + "learning_rate": 5.3165189141068385e-08, + "loss": 0.481, + "step": 13753 + }, + { + "epoch": 0.955006249132065, + "grad_norm": 2.623848249585915, + "learning_rate": 5.300176795772161e-08, + "loss": 0.0998, + "step": 13754 + }, + { + "epoch": 0.9550756839327871, + "grad_norm": 3.1011437769689216, + "learning_rate": 5.2838596987947464e-08, + "loss": 0.2928, + "step": 13755 + }, + { + "epoch": 0.9551451187335093, + "grad_norm": 4.816876318480773, + "learning_rate": 5.2675676239999365e-08, + "loss": 0.4824, + "step": 13756 + }, + { + "epoch": 0.9552145535342313, + "grad_norm": 3.3565758188776256, + "learning_rate": 5.2513005722116816e-08, + "loss": 0.3917, + "step": 13757 + }, + { + "epoch": 0.9552839883349534, + "grad_norm": 3.345690512458578, + "learning_rate": 5.235058544252825e-08, + "loss": 0.2665, + "step": 13758 + }, + { + "epoch": 0.9553534231356756, + "grad_norm": 3.701444267394486, + "learning_rate": 5.218841540944819e-08, + "loss": 0.4149, + "step": 13759 + }, + { + "epoch": 0.9554228579363977, + "grad_norm": 3.583136724246739, + "learning_rate": 5.202649563107898e-08, + "loss": 0.3153, + "step": 13760 + }, + { + "epoch": 0.9554922927371199, + "grad_norm": 6.221288795909009, + "learning_rate": 5.1864826115611275e-08, + "loss": 0.3226, + "step": 13761 + }, + { + "epoch": 0.955561727537842, + "grad_norm": 4.410783839641024, + "learning_rate": 5.170340687122133e-08, + "loss": 0.3513, + "step": 13762 + }, + { + "epoch": 0.955631162338564, + "grad_norm": 4.9048981793991215, + "learning_rate": 5.1542237906073154e-08, + "loss": 0.4872, + "step": 13763 + }, + { + "epoch": 0.9557005971392862, + "grad_norm": 4.0147064224730995, + "learning_rate": 5.1381319228319684e-08, + "loss": 0.4485, + "step": 13764 + }, + { + "epoch": 0.9557700319400083, + "grad_norm": 4.065462247455766, + "learning_rate": 5.12206508460994e-08, + "loss": 0.3438, + "step": 13765 + }, + { + "epoch": 0.9558394667407305, + "grad_norm": 5.005870930483714, + "learning_rate": 5.106023276753802e-08, + "loss": 0.5467, + "step": 13766 + }, + { + "epoch": 0.9559089015414526, + "grad_norm": 4.1956769404460195, + "learning_rate": 5.090006500075018e-08, + "loss": 0.4811, + "step": 13767 + }, + { + "epoch": 0.9559783363421747, + "grad_norm": 3.164243683731383, + "learning_rate": 5.074014755383716e-08, + "loss": 0.206, + "step": 13768 + }, + { + "epoch": 0.9560477711428969, + "grad_norm": 4.034796986554963, + "learning_rate": 5.0580480434886946e-08, + "loss": 0.3752, + "step": 13769 + }, + { + "epoch": 0.9561172059436189, + "grad_norm": 3.1577514924323142, + "learning_rate": 5.04210636519753e-08, + "loss": 0.3422, + "step": 13770 + }, + { + "epoch": 0.956186640744341, + "grad_norm": 4.621330442701489, + "learning_rate": 5.026189721316521e-08, + "loss": 0.4951, + "step": 13771 + }, + { + "epoch": 0.9562560755450632, + "grad_norm": 3.645406553491685, + "learning_rate": 5.010298112650691e-08, + "loss": 0.3262, + "step": 13772 + }, + { + "epoch": 0.9563255103457853, + "grad_norm": 2.738805959666506, + "learning_rate": 4.9944315400039524e-08, + "loss": 0.165, + "step": 13773 + }, + { + "epoch": 0.9563949451465075, + "grad_norm": 5.5379366339991165, + "learning_rate": 4.978590004178718e-08, + "loss": 0.6481, + "step": 13774 + }, + { + "epoch": 0.9564643799472295, + "grad_norm": 2.812040631679995, + "learning_rate": 4.962773505976182e-08, + "loss": 0.2133, + "step": 13775 + }, + { + "epoch": 0.9565338147479516, + "grad_norm": 4.185047982000624, + "learning_rate": 4.94698204619648e-08, + "loss": 0.4786, + "step": 13776 + }, + { + "epoch": 0.9566032495486738, + "grad_norm": 4.22031613308588, + "learning_rate": 4.931215625638197e-08, + "loss": 0.4203, + "step": 13777 + }, + { + "epoch": 0.9566726843493959, + "grad_norm": 3.2502871450722446, + "learning_rate": 4.91547424509875e-08, + "loss": 0.3303, + "step": 13778 + }, + { + "epoch": 0.9567421191501181, + "grad_norm": 2.840915325002215, + "learning_rate": 4.8997579053744473e-08, + "loss": 0.1366, + "step": 13779 + }, + { + "epoch": 0.9568115539508402, + "grad_norm": 3.7913872967781597, + "learning_rate": 4.8840666072601516e-08, + "loss": 0.3388, + "step": 13780 + }, + { + "epoch": 0.9568809887515622, + "grad_norm": 3.46239936309188, + "learning_rate": 4.868400351549507e-08, + "loss": 0.2844, + "step": 13781 + }, + { + "epoch": 0.9569504235522844, + "grad_norm": 6.718760040575691, + "learning_rate": 4.852759139034935e-08, + "loss": 0.4516, + "step": 13782 + }, + { + "epoch": 0.9570198583530065, + "grad_norm": 3.662048534926939, + "learning_rate": 4.837142970507469e-08, + "loss": 0.3151, + "step": 13783 + }, + { + "epoch": 0.9570892931537287, + "grad_norm": 4.143910616518008, + "learning_rate": 4.8215518467569775e-08, + "loss": 0.419, + "step": 13784 + }, + { + "epoch": 0.9571587279544508, + "grad_norm": 2.8260291756886793, + "learning_rate": 4.8059857685720524e-08, + "loss": 0.3153, + "step": 13785 + }, + { + "epoch": 0.9572281627551729, + "grad_norm": 5.632561323738521, + "learning_rate": 4.790444736740063e-08, + "loss": 0.4623, + "step": 13786 + }, + { + "epoch": 0.957297597555895, + "grad_norm": 4.533362266438861, + "learning_rate": 4.7749287520469925e-08, + "loss": 0.3911, + "step": 13787 + }, + { + "epoch": 0.9573670323566171, + "grad_norm": 4.030234423717611, + "learning_rate": 4.759437815277601e-08, + "loss": 0.4466, + "step": 13788 + }, + { + "epoch": 0.9574364671573392, + "grad_norm": 3.760078342574025, + "learning_rate": 4.743971927215485e-08, + "loss": 0.4153, + "step": 13789 + }, + { + "epoch": 0.9575059019580614, + "grad_norm": 5.438482204493829, + "learning_rate": 4.728531088642851e-08, + "loss": 0.5471, + "step": 13790 + }, + { + "epoch": 0.9575753367587835, + "grad_norm": 3.5605754782880865, + "learning_rate": 4.713115300340687e-08, + "loss": 0.4443, + "step": 13791 + }, + { + "epoch": 0.9576447715595057, + "grad_norm": 3.2605874775568604, + "learning_rate": 4.697724563088646e-08, + "loss": 0.23, + "step": 13792 + }, + { + "epoch": 0.9577142063602277, + "grad_norm": 3.8953526992560596, + "learning_rate": 4.682358877665272e-08, + "loss": 0.4293, + "step": 13793 + }, + { + "epoch": 0.9577836411609498, + "grad_norm": 3.688126146802298, + "learning_rate": 4.6670182448476654e-08, + "loss": 0.5331, + "step": 13794 + }, + { + "epoch": 0.957853075961672, + "grad_norm": 3.6621761384077947, + "learning_rate": 4.6517026654117614e-08, + "loss": 0.3177, + "step": 13795 + }, + { + "epoch": 0.9579225107623941, + "grad_norm": 2.9829489682129067, + "learning_rate": 4.6364121401322185e-08, + "loss": 0.2041, + "step": 13796 + }, + { + "epoch": 0.9579919455631163, + "grad_norm": 3.8755040473465487, + "learning_rate": 4.6211466697824724e-08, + "loss": 0.3712, + "step": 13797 + }, + { + "epoch": 0.9580613803638384, + "grad_norm": 3.7293845240685735, + "learning_rate": 4.605906255134518e-08, + "loss": 0.4221, + "step": 13798 + }, + { + "epoch": 0.9581308151645604, + "grad_norm": 5.200251316713918, + "learning_rate": 4.590690896959238e-08, + "loss": 0.5624, + "step": 13799 + }, + { + "epoch": 0.9582002499652826, + "grad_norm": 3.5477653655164607, + "learning_rate": 4.5755005960262386e-08, + "loss": 0.2367, + "step": 13800 + }, + { + "epoch": 0.9582696847660047, + "grad_norm": 3.55678544387724, + "learning_rate": 4.5603353531037954e-08, + "loss": 0.3706, + "step": 13801 + }, + { + "epoch": 0.9583391195667268, + "grad_norm": 4.019138298153231, + "learning_rate": 4.54519516895896e-08, + "loss": 0.3819, + "step": 13802 + }, + { + "epoch": 0.958408554367449, + "grad_norm": 4.1423853727256965, + "learning_rate": 4.53008004435751e-08, + "loss": 0.4493, + "step": 13803 + }, + { + "epoch": 0.9584779891681711, + "grad_norm": 3.439216853021881, + "learning_rate": 4.514989980063944e-08, + "loss": 0.3431, + "step": 13804 + }, + { + "epoch": 0.9585474239688933, + "grad_norm": 4.119036257431424, + "learning_rate": 4.499924976841541e-08, + "loss": 0.3071, + "step": 13805 + }, + { + "epoch": 0.9586168587696153, + "grad_norm": 4.3364542957782115, + "learning_rate": 4.484885035452191e-08, + "loss": 0.4829, + "step": 13806 + }, + { + "epoch": 0.9586862935703374, + "grad_norm": 4.154648087154131, + "learning_rate": 4.469870156656619e-08, + "loss": 0.4828, + "step": 13807 + }, + { + "epoch": 0.9587557283710596, + "grad_norm": 4.386339857521736, + "learning_rate": 4.454880341214385e-08, + "loss": 0.414, + "step": 13808 + }, + { + "epoch": 0.9588251631717817, + "grad_norm": 4.2006746850425865, + "learning_rate": 4.4399155898834365e-08, + "loss": 0.2814, + "step": 13809 + }, + { + "epoch": 0.9588945979725039, + "grad_norm": 4.4862546746366885, + "learning_rate": 4.4249759034208916e-08, + "loss": 0.5541, + "step": 13810 + }, + { + "epoch": 0.958964032773226, + "grad_norm": 5.247518739724254, + "learning_rate": 4.410061282582256e-08, + "loss": 0.4731, + "step": 13811 + }, + { + "epoch": 0.959033467573948, + "grad_norm": 3.427119471248524, + "learning_rate": 4.395171728121872e-08, + "loss": 0.2677, + "step": 13812 + }, + { + "epoch": 0.9591029023746702, + "grad_norm": 5.207259037309656, + "learning_rate": 4.380307240792969e-08, + "loss": 0.5715, + "step": 13813 + }, + { + "epoch": 0.9591723371753923, + "grad_norm": 3.723407690108263, + "learning_rate": 4.365467821347225e-08, + "loss": 0.3877, + "step": 13814 + }, + { + "epoch": 0.9592417719761144, + "grad_norm": 4.655179146418831, + "learning_rate": 4.350653470535315e-08, + "loss": 0.4317, + "step": 13815 + }, + { + "epoch": 0.9593112067768366, + "grad_norm": 4.75184632912496, + "learning_rate": 4.3358641891064757e-08, + "loss": 0.4672, + "step": 13816 + }, + { + "epoch": 0.9593806415775586, + "grad_norm": 4.343633391023645, + "learning_rate": 4.3210999778087184e-08, + "loss": 0.4242, + "step": 13817 + }, + { + "epoch": 0.9594500763782808, + "grad_norm": 4.742145894209304, + "learning_rate": 4.30636083738889e-08, + "loss": 0.5523, + "step": 13818 + }, + { + "epoch": 0.9595195111790029, + "grad_norm": 3.63360962971277, + "learning_rate": 4.291646768592339e-08, + "loss": 0.1969, + "step": 13819 + }, + { + "epoch": 0.959588945979725, + "grad_norm": 4.658256364052358, + "learning_rate": 4.2769577721634146e-08, + "loss": 0.5623, + "step": 13820 + }, + { + "epoch": 0.9596583807804472, + "grad_norm": 3.1681681870570597, + "learning_rate": 4.262293848844967e-08, + "loss": 0.3458, + "step": 13821 + }, + { + "epoch": 0.9597278155811693, + "grad_norm": 3.1429083666928124, + "learning_rate": 4.2476549993787363e-08, + "loss": 0.3386, + "step": 13822 + }, + { + "epoch": 0.9597972503818915, + "grad_norm": 3.758413772178014, + "learning_rate": 4.233041224505241e-08, + "loss": 0.32, + "step": 13823 + }, + { + "epoch": 0.9598666851826135, + "grad_norm": 4.712591199974532, + "learning_rate": 4.21845252496339e-08, + "loss": 0.3828, + "step": 13824 + }, + { + "epoch": 0.9599361199833356, + "grad_norm": 3.370755783479801, + "learning_rate": 4.2038889014912595e-08, + "loss": 0.4166, + "step": 13825 + }, + { + "epoch": 0.9600055547840578, + "grad_norm": 4.8701764259809215, + "learning_rate": 4.189350354825428e-08, + "loss": 0.4534, + "step": 13826 + }, + { + "epoch": 0.9600749895847799, + "grad_norm": 3.9778007454806343, + "learning_rate": 4.174836885701139e-08, + "loss": 0.2872, + "step": 13827 + }, + { + "epoch": 0.960144424385502, + "grad_norm": 4.249046231927962, + "learning_rate": 4.160348494852528e-08, + "loss": 0.5902, + "step": 13828 + }, + { + "epoch": 0.9602138591862242, + "grad_norm": 4.7314813108457, + "learning_rate": 4.1458851830124545e-08, + "loss": 0.4503, + "step": 13829 + }, + { + "epoch": 0.9602832939869462, + "grad_norm": 4.299908866757219, + "learning_rate": 4.131446950912388e-08, + "loss": 0.4853, + "step": 13830 + }, + { + "epoch": 0.9603527287876684, + "grad_norm": 4.087174207564177, + "learning_rate": 4.1170337992826325e-08, + "loss": 0.3416, + "step": 13831 + }, + { + "epoch": 0.9604221635883905, + "grad_norm": 3.2249131898160246, + "learning_rate": 4.102645728852217e-08, + "loss": 0.2984, + "step": 13832 + }, + { + "epoch": 0.9604915983891126, + "grad_norm": 4.891650256840407, + "learning_rate": 4.0882827403487814e-08, + "loss": 0.4193, + "step": 13833 + }, + { + "epoch": 0.9605610331898348, + "grad_norm": 4.020020612618311, + "learning_rate": 4.0739448344988554e-08, + "loss": 0.3364, + "step": 13834 + }, + { + "epoch": 0.9606304679905568, + "grad_norm": 3.6139678591424387, + "learning_rate": 4.059632012027637e-08, + "loss": 0.4618, + "step": 13835 + }, + { + "epoch": 0.960699902791279, + "grad_norm": 3.303165549296177, + "learning_rate": 4.045344273658991e-08, + "loss": 0.3134, + "step": 13836 + }, + { + "epoch": 0.9607693375920011, + "grad_norm": 3.758252980498182, + "learning_rate": 4.031081620115673e-08, + "loss": 0.3988, + "step": 13837 + }, + { + "epoch": 0.9608387723927232, + "grad_norm": 4.530037871823786, + "learning_rate": 4.0168440521189954e-08, + "loss": 0.3457, + "step": 13838 + }, + { + "epoch": 0.9609082071934454, + "grad_norm": 3.8646392516747667, + "learning_rate": 4.002631570389104e-08, + "loss": 0.188, + "step": 13839 + }, + { + "epoch": 0.9609776419941675, + "grad_norm": 4.329407701100087, + "learning_rate": 3.9884441756448676e-08, + "loss": 0.4367, + "step": 13840 + }, + { + "epoch": 0.9610470767948895, + "grad_norm": 4.600340799932416, + "learning_rate": 3.9742818686038244e-08, + "loss": 0.5344, + "step": 13841 + }, + { + "epoch": 0.9611165115956117, + "grad_norm": 3.702597773537803, + "learning_rate": 3.9601446499822894e-08, + "loss": 0.5135, + "step": 13842 + }, + { + "epoch": 0.9611859463963338, + "grad_norm": 4.5623747942811255, + "learning_rate": 3.946032520495413e-08, + "loss": 0.5533, + "step": 13843 + }, + { + "epoch": 0.961255381197056, + "grad_norm": 3.7726541753239977, + "learning_rate": 3.931945480856847e-08, + "loss": 0.4268, + "step": 13844 + }, + { + "epoch": 0.9613248159977781, + "grad_norm": 3.6886432017973907, + "learning_rate": 3.9178835317791317e-08, + "loss": 0.2752, + "step": 13845 + }, + { + "epoch": 0.9613942507985002, + "grad_norm": 4.245120722768961, + "learning_rate": 3.903846673973477e-08, + "loss": 0.4737, + "step": 13846 + }, + { + "epoch": 0.9614636855992224, + "grad_norm": 4.339663662356036, + "learning_rate": 3.889834908149925e-08, + "loss": 0.4936, + "step": 13847 + }, + { + "epoch": 0.9615331203999444, + "grad_norm": 3.128509208211385, + "learning_rate": 3.8758482350171325e-08, + "loss": 0.2537, + "step": 13848 + }, + { + "epoch": 0.9616025552006666, + "grad_norm": 4.929401318062302, + "learning_rate": 3.8618866552825874e-08, + "loss": 0.475, + "step": 13849 + }, + { + "epoch": 0.9616719900013887, + "grad_norm": 3.4409667197554143, + "learning_rate": 3.847950169652282e-08, + "loss": 0.3655, + "step": 13850 + }, + { + "epoch": 0.9617414248021108, + "grad_norm": 4.465699661608561, + "learning_rate": 3.8340387788313174e-08, + "loss": 0.5647, + "step": 13851 + }, + { + "epoch": 0.961810859602833, + "grad_norm": 3.7656554880294215, + "learning_rate": 3.820152483523187e-08, + "loss": 0.3255, + "step": 13852 + }, + { + "epoch": 0.961880294403555, + "grad_norm": 6.9087627607817215, + "learning_rate": 3.806291284430275e-08, + "loss": 0.603, + "step": 13853 + }, + { + "epoch": 0.9619497292042772, + "grad_norm": 3.7872487150111525, + "learning_rate": 3.792455182253685e-08, + "loss": 0.4127, + "step": 13854 + }, + { + "epoch": 0.9620191640049993, + "grad_norm": 7.69238565634778, + "learning_rate": 3.778644177693247e-08, + "loss": 0.6673, + "step": 13855 + }, + { + "epoch": 0.9620885988057214, + "grad_norm": 3.8270105228991014, + "learning_rate": 3.7648582714474026e-08, + "loss": 0.3714, + "step": 13856 + }, + { + "epoch": 0.9621580336064436, + "grad_norm": 3.283479712176172, + "learning_rate": 3.7510974642135375e-08, + "loss": 0.227, + "step": 13857 + }, + { + "epoch": 0.9622274684071657, + "grad_norm": 2.9699510666847657, + "learning_rate": 3.737361756687596e-08, + "loss": 0.3364, + "step": 13858 + }, + { + "epoch": 0.9622969032078877, + "grad_norm": 4.708008899051446, + "learning_rate": 3.7236511495643557e-08, + "loss": 0.513, + "step": 13859 + }, + { + "epoch": 0.9623663380086099, + "grad_norm": 3.935371065598654, + "learning_rate": 3.709965643537261e-08, + "loss": 0.3193, + "step": 13860 + }, + { + "epoch": 0.962435772809332, + "grad_norm": 4.575474964215627, + "learning_rate": 3.6963052392985364e-08, + "loss": 0.5336, + "step": 13861 + }, + { + "epoch": 0.9625052076100542, + "grad_norm": 3.7754618598643965, + "learning_rate": 3.682669937539074e-08, + "loss": 0.4019, + "step": 13862 + }, + { + "epoch": 0.9625746424107763, + "grad_norm": 3.0647322849142227, + "learning_rate": 3.669059738948544e-08, + "loss": 0.2647, + "step": 13863 + }, + { + "epoch": 0.9626440772114984, + "grad_norm": 4.9964011116632605, + "learning_rate": 3.6554746442152843e-08, + "loss": 0.5095, + "step": 13864 + }, + { + "epoch": 0.9627135120122206, + "grad_norm": 6.0119350438537795, + "learning_rate": 3.641914654026524e-08, + "loss": 0.5313, + "step": 13865 + }, + { + "epoch": 0.9627829468129426, + "grad_norm": 4.191329280499595, + "learning_rate": 3.6283797690680466e-08, + "loss": 0.3224, + "step": 13866 + }, + { + "epoch": 0.9628523816136648, + "grad_norm": 3.802544345205221, + "learning_rate": 3.614869990024417e-08, + "loss": 0.4551, + "step": 13867 + }, + { + "epoch": 0.9629218164143869, + "grad_norm": 4.182819548930081, + "learning_rate": 3.601385317578976e-08, + "loss": 0.2626, + "step": 13868 + }, + { + "epoch": 0.962991251215109, + "grad_norm": 5.381595996103115, + "learning_rate": 3.58792575241379e-08, + "loss": 0.4892, + "step": 13869 + }, + { + "epoch": 0.9630606860158312, + "grad_norm": 4.179363144423296, + "learning_rate": 3.5744912952095366e-08, + "loss": 0.441, + "step": 13870 + }, + { + "epoch": 0.9631301208165532, + "grad_norm": 3.09670662235687, + "learning_rate": 3.5610819466457834e-08, + "loss": 0.2507, + "step": 13871 + }, + { + "epoch": 0.9631995556172753, + "grad_norm": 4.087113320364595, + "learning_rate": 3.5476977074007654e-08, + "loss": 0.378, + "step": 13872 + }, + { + "epoch": 0.9632689904179975, + "grad_norm": 4.511752164745252, + "learning_rate": 3.5343385781513865e-08, + "loss": 0.5731, + "step": 13873 + }, + { + "epoch": 0.9633384252187196, + "grad_norm": 3.216509604128841, + "learning_rate": 3.521004559573438e-08, + "loss": 0.3191, + "step": 13874 + }, + { + "epoch": 0.9634078600194418, + "grad_norm": 3.3513061953819943, + "learning_rate": 3.507695652341214e-08, + "loss": 0.2203, + "step": 13875 + }, + { + "epoch": 0.9634772948201639, + "grad_norm": 4.513362211202487, + "learning_rate": 3.494411857127955e-08, + "loss": 0.6145, + "step": 13876 + }, + { + "epoch": 0.9635467296208859, + "grad_norm": 4.5578955948467215, + "learning_rate": 3.481153174605512e-08, + "loss": 0.5258, + "step": 13877 + }, + { + "epoch": 0.9636161644216081, + "grad_norm": 3.202541192684505, + "learning_rate": 3.467919605444458e-08, + "loss": 0.2442, + "step": 13878 + }, + { + "epoch": 0.9636855992223302, + "grad_norm": 3.1658711881299553, + "learning_rate": 3.454711150314205e-08, + "loss": 0.2255, + "step": 13879 + }, + { + "epoch": 0.9637550340230524, + "grad_norm": 3.515793754764216, + "learning_rate": 3.441527809882772e-08, + "loss": 0.1941, + "step": 13880 + }, + { + "epoch": 0.9638244688237745, + "grad_norm": 3.697013486145506, + "learning_rate": 3.4283695848169596e-08, + "loss": 0.3346, + "step": 13881 + }, + { + "epoch": 0.9638939036244966, + "grad_norm": 4.799943640311567, + "learning_rate": 3.415236475782291e-08, + "loss": 0.263, + "step": 13882 + }, + { + "epoch": 0.9639633384252188, + "grad_norm": 3.291851007490705, + "learning_rate": 3.402128483443068e-08, + "loss": 0.2072, + "step": 13883 + }, + { + "epoch": 0.9640327732259408, + "grad_norm": 2.839818722319311, + "learning_rate": 3.38904560846226e-08, + "loss": 0.2364, + "step": 13884 + }, + { + "epoch": 0.9641022080266629, + "grad_norm": 3.9703807204503305, + "learning_rate": 3.375987851501506e-08, + "loss": 0.3083, + "step": 13885 + }, + { + "epoch": 0.9641716428273851, + "grad_norm": 4.624207629727348, + "learning_rate": 3.362955213221386e-08, + "loss": 0.5569, + "step": 13886 + }, + { + "epoch": 0.9642410776281072, + "grad_norm": 3.4817969193369405, + "learning_rate": 3.349947694280986e-08, + "loss": 0.2156, + "step": 13887 + }, + { + "epoch": 0.9643105124288294, + "grad_norm": 3.691874359185989, + "learning_rate": 3.336965295338224e-08, + "loss": 0.4888, + "step": 13888 + }, + { + "epoch": 0.9643799472295514, + "grad_norm": 2.9049687006134506, + "learning_rate": 3.32400801704974e-08, + "loss": 0.2363, + "step": 13889 + }, + { + "epoch": 0.9644493820302735, + "grad_norm": 3.363611821759633, + "learning_rate": 3.311075860070956e-08, + "loss": 0.393, + "step": 13890 + }, + { + "epoch": 0.9645188168309957, + "grad_norm": 3.7991262378174597, + "learning_rate": 3.298168825055848e-08, + "loss": 0.3684, + "step": 13891 + }, + { + "epoch": 0.9645882516317178, + "grad_norm": 5.125278126407595, + "learning_rate": 3.2852869126573375e-08, + "loss": 0.7835, + "step": 13892 + }, + { + "epoch": 0.96465768643244, + "grad_norm": 3.940684579465366, + "learning_rate": 3.27243012352696e-08, + "loss": 0.4326, + "step": 13893 + }, + { + "epoch": 0.9647271212331621, + "grad_norm": 3.5985701664257577, + "learning_rate": 3.259598458314917e-08, + "loss": 0.4241, + "step": 13894 + }, + { + "epoch": 0.9647965560338841, + "grad_norm": 3.2520534912901007, + "learning_rate": 3.2467919176703554e-08, + "loss": 0.35, + "step": 13895 + }, + { + "epoch": 0.9648659908346063, + "grad_norm": 3.478673188989901, + "learning_rate": 3.234010502240925e-08, + "loss": 0.2868, + "step": 13896 + }, + { + "epoch": 0.9649354256353284, + "grad_norm": 3.625263396999371, + "learning_rate": 3.221254212673108e-08, + "loss": 0.3143, + "step": 13897 + }, + { + "epoch": 0.9650048604360505, + "grad_norm": 4.651318088961176, + "learning_rate": 3.2085230496121113e-08, + "loss": 0.5551, + "step": 13898 + }, + { + "epoch": 0.9650742952367727, + "grad_norm": 3.9267170889251823, + "learning_rate": 3.195817013701863e-08, + "loss": 0.2931, + "step": 13899 + }, + { + "epoch": 0.9651437300374948, + "grad_norm": 3.9273372507117843, + "learning_rate": 3.183136105584961e-08, + "loss": 0.4554, + "step": 13900 + }, + { + "epoch": 0.965213164838217, + "grad_norm": 3.5188110258859866, + "learning_rate": 3.170480325902947e-08, + "loss": 0.3658, + "step": 13901 + }, + { + "epoch": 0.965282599638939, + "grad_norm": 3.746477851652635, + "learning_rate": 3.157849675295754e-08, + "loss": 0.3493, + "step": 13902 + }, + { + "epoch": 0.9653520344396611, + "grad_norm": 3.937142812994007, + "learning_rate": 3.145244154402316e-08, + "loss": 0.3912, + "step": 13903 + }, + { + "epoch": 0.9654214692403833, + "grad_norm": 4.259630467268845, + "learning_rate": 3.132663763860233e-08, + "loss": 0.5141, + "step": 13904 + }, + { + "epoch": 0.9654909040411054, + "grad_norm": 3.824007358663462, + "learning_rate": 3.120108504305774e-08, + "loss": 0.4208, + "step": 13905 + }, + { + "epoch": 0.9655603388418276, + "grad_norm": 7.103374291264321, + "learning_rate": 3.1075783763739876e-08, + "loss": 0.6728, + "step": 13906 + }, + { + "epoch": 0.9656297736425496, + "grad_norm": 3.716147070598669, + "learning_rate": 3.095073380698588e-08, + "loss": 0.4646, + "step": 13907 + }, + { + "epoch": 0.9656992084432717, + "grad_norm": 4.21908422581669, + "learning_rate": 3.082593517912069e-08, + "loss": 0.4748, + "step": 13908 + }, + { + "epoch": 0.9657686432439939, + "grad_norm": 3.5916909202712373, + "learning_rate": 3.070138788645704e-08, + "loss": 0.4576, + "step": 13909 + }, + { + "epoch": 0.965838078044716, + "grad_norm": 4.029572919934993, + "learning_rate": 3.0577091935294345e-08, + "loss": 0.6416, + "step": 13910 + }, + { + "epoch": 0.9659075128454382, + "grad_norm": 5.220624757156848, + "learning_rate": 3.045304733191812e-08, + "loss": 0.5375, + "step": 13911 + }, + { + "epoch": 0.9659769476461603, + "grad_norm": 2.1084965171159125, + "learning_rate": 3.0329254082604454e-08, + "loss": 0.084, + "step": 13912 + }, + { + "epoch": 0.9660463824468823, + "grad_norm": 3.4264187075226173, + "learning_rate": 3.020571219361279e-08, + "loss": 0.3971, + "step": 13913 + }, + { + "epoch": 0.9661158172476045, + "grad_norm": 2.8629674011372024, + "learning_rate": 3.0082421671192576e-08, + "loss": 0.2313, + "step": 13914 + }, + { + "epoch": 0.9661852520483266, + "grad_norm": 4.098420077202031, + "learning_rate": 2.9959382521579926e-08, + "loss": 0.4725, + "step": 13915 + }, + { + "epoch": 0.9662546868490487, + "grad_norm": 3.526280752932261, + "learning_rate": 2.983659475099765e-08, + "loss": 0.4076, + "step": 13916 + }, + { + "epoch": 0.9663241216497709, + "grad_norm": 4.028203215288937, + "learning_rate": 2.9714058365655775e-08, + "loss": 0.5523, + "step": 13917 + }, + { + "epoch": 0.966393556450493, + "grad_norm": 4.108010214760812, + "learning_rate": 2.959177337175323e-08, + "loss": 0.4114, + "step": 13918 + }, + { + "epoch": 0.9664629912512152, + "grad_norm": 4.635602410599053, + "learning_rate": 2.946973977547396e-08, + "loss": 0.3416, + "step": 13919 + }, + { + "epoch": 0.9665324260519372, + "grad_norm": 3.6684954895781363, + "learning_rate": 2.9347957582990806e-08, + "loss": 0.3796, + "step": 13920 + }, + { + "epoch": 0.9666018608526593, + "grad_norm": 3.499867262010852, + "learning_rate": 2.922642680046328e-08, + "loss": 0.2188, + "step": 13921 + }, + { + "epoch": 0.9666712956533815, + "grad_norm": 5.50023340497982, + "learning_rate": 2.9105147434038693e-08, + "loss": 0.5761, + "step": 13922 + }, + { + "epoch": 0.9667407304541036, + "grad_norm": 3.5674508432278267, + "learning_rate": 2.898411948985047e-08, + "loss": 0.4676, + "step": 13923 + }, + { + "epoch": 0.9668101652548258, + "grad_norm": 2.87132609284704, + "learning_rate": 2.886334297402038e-08, + "loss": 0.2831, + "step": 13924 + }, + { + "epoch": 0.9668796000555478, + "grad_norm": 3.5372852542334465, + "learning_rate": 2.8742817892657427e-08, + "loss": 0.3986, + "step": 13925 + }, + { + "epoch": 0.9669490348562699, + "grad_norm": 3.869300358259012, + "learning_rate": 2.862254425185673e-08, + "loss": 0.5334, + "step": 13926 + }, + { + "epoch": 0.9670184696569921, + "grad_norm": 3.475380526051635, + "learning_rate": 2.8502522057702876e-08, + "loss": 0.3608, + "step": 13927 + }, + { + "epoch": 0.9670879044577142, + "grad_norm": 4.050096232006329, + "learning_rate": 2.8382751316266e-08, + "loss": 0.3324, + "step": 13928 + }, + { + "epoch": 0.9671573392584363, + "grad_norm": 3.790407215323487, + "learning_rate": 2.8263232033602926e-08, + "loss": 0.2709, + "step": 13929 + }, + { + "epoch": 0.9672267740591585, + "grad_norm": 4.753498772660416, + "learning_rate": 2.814396421576049e-08, + "loss": 0.5267, + "step": 13930 + }, + { + "epoch": 0.9672962088598805, + "grad_norm": 4.945551648944414, + "learning_rate": 2.802494786876997e-08, + "loss": 0.5693, + "step": 13931 + }, + { + "epoch": 0.9673656436606027, + "grad_norm": 3.178679104435819, + "learning_rate": 2.7906182998650998e-08, + "loss": 0.2418, + "step": 13932 + }, + { + "epoch": 0.9674350784613248, + "grad_norm": 4.115920893131331, + "learning_rate": 2.7787669611412106e-08, + "loss": 0.2757, + "step": 13933 + }, + { + "epoch": 0.9675045132620469, + "grad_norm": 4.068440622896221, + "learning_rate": 2.766940771304516e-08, + "loss": 0.2915, + "step": 13934 + }, + { + "epoch": 0.9675739480627691, + "grad_norm": 4.514547329071096, + "learning_rate": 2.7551397309533712e-08, + "loss": 0.6216, + "step": 13935 + }, + { + "epoch": 0.9676433828634912, + "grad_norm": 2.714369859630633, + "learning_rate": 2.7433638406845763e-08, + "loss": 0.1784, + "step": 13936 + }, + { + "epoch": 0.9677128176642134, + "grad_norm": 5.4820121104677035, + "learning_rate": 2.7316131010937664e-08, + "loss": 0.5198, + "step": 13937 + }, + { + "epoch": 0.9677822524649354, + "grad_norm": 3.3829991763733127, + "learning_rate": 2.7198875127752987e-08, + "loss": 0.3666, + "step": 13938 + }, + { + "epoch": 0.9678516872656575, + "grad_norm": 4.45692373172024, + "learning_rate": 2.708187076322144e-08, + "loss": 0.6309, + "step": 13939 + }, + { + "epoch": 0.9679211220663797, + "grad_norm": 4.379398574176032, + "learning_rate": 2.6965117923262175e-08, + "loss": 0.4919, + "step": 13940 + }, + { + "epoch": 0.9679905568671018, + "grad_norm": 5.470372126905691, + "learning_rate": 2.6848616613779355e-08, + "loss": 0.5843, + "step": 13941 + }, + { + "epoch": 0.9680599916678239, + "grad_norm": 4.03698712234339, + "learning_rate": 2.6732366840666047e-08, + "loss": 0.4028, + "step": 13942 + }, + { + "epoch": 0.968129426468546, + "grad_norm": 3.3348693931461693, + "learning_rate": 2.661636860980199e-08, + "loss": 0.3513, + "step": 13943 + }, + { + "epoch": 0.9681988612692681, + "grad_norm": 3.972175910683446, + "learning_rate": 2.6500621927054716e-08, + "loss": 0.3687, + "step": 13944 + }, + { + "epoch": 0.9682682960699903, + "grad_norm": 4.860776908972846, + "learning_rate": 2.638512679827787e-08, + "loss": 0.5181, + "step": 13945 + }, + { + "epoch": 0.9683377308707124, + "grad_norm": 3.4581590627111676, + "learning_rate": 2.6269883229312897e-08, + "loss": 0.295, + "step": 13946 + }, + { + "epoch": 0.9684071656714345, + "grad_norm": 3.0561549584548975, + "learning_rate": 2.615489122599013e-08, + "loss": 0.2337, + "step": 13947 + }, + { + "epoch": 0.9684766004721567, + "grad_norm": 3.299673438627948, + "learning_rate": 2.604015079412381e-08, + "loss": 0.3189, + "step": 13948 + }, + { + "epoch": 0.9685460352728787, + "grad_norm": 3.648966254590199, + "learning_rate": 2.5925661939518176e-08, + "loss": 0.3434, + "step": 13949 + }, + { + "epoch": 0.9686154700736009, + "grad_norm": 3.384671444658221, + "learning_rate": 2.581142466796416e-08, + "loss": 0.3467, + "step": 13950 + }, + { + "epoch": 0.968684904874323, + "grad_norm": 4.618378282876279, + "learning_rate": 2.5697438985239908e-08, + "loss": 0.5661, + "step": 13951 + }, + { + "epoch": 0.9687543396750451, + "grad_norm": 3.399563428698885, + "learning_rate": 2.558370489711026e-08, + "loss": 0.3341, + "step": 13952 + }, + { + "epoch": 0.9688237744757673, + "grad_norm": 5.039923006690745, + "learning_rate": 2.5470222409327282e-08, + "loss": 0.5195, + "step": 13953 + }, + { + "epoch": 0.9688932092764894, + "grad_norm": 5.5412437171272835, + "learning_rate": 2.5356991527631934e-08, + "loss": 0.5078, + "step": 13954 + }, + { + "epoch": 0.9689626440772114, + "grad_norm": 4.684621506584292, + "learning_rate": 2.524401225775075e-08, + "loss": 0.5066, + "step": 13955 + }, + { + "epoch": 0.9690320788779336, + "grad_norm": 4.354608301912683, + "learning_rate": 2.5131284605398597e-08, + "loss": 0.5612, + "step": 13956 + }, + { + "epoch": 0.9691015136786557, + "grad_norm": 2.878837585698226, + "learning_rate": 2.501880857627592e-08, + "loss": 0.1896, + "step": 13957 + }, + { + "epoch": 0.9691709484793779, + "grad_norm": 4.692310733848334, + "learning_rate": 2.4906584176072612e-08, + "loss": 0.6131, + "step": 13958 + }, + { + "epoch": 0.9692403832801, + "grad_norm": 4.542866085970759, + "learning_rate": 2.4794611410464132e-08, + "loss": 0.3039, + "step": 13959 + }, + { + "epoch": 0.9693098180808221, + "grad_norm": 4.400934802127237, + "learning_rate": 2.4682890285114835e-08, + "loss": 0.5373, + "step": 13960 + }, + { + "epoch": 0.9693792528815443, + "grad_norm": 4.139093443720174, + "learning_rate": 2.457142080567465e-08, + "loss": 0.4823, + "step": 13961 + }, + { + "epoch": 0.9694486876822663, + "grad_norm": 3.096100549334194, + "learning_rate": 2.44602029777824e-08, + "loss": 0.2249, + "step": 13962 + }, + { + "epoch": 0.9695181224829885, + "grad_norm": 4.129648384323139, + "learning_rate": 2.434923680706247e-08, + "loss": 0.3959, + "step": 13963 + }, + { + "epoch": 0.9695875572837106, + "grad_norm": 3.5392475179846046, + "learning_rate": 2.4238522299128153e-08, + "loss": 0.3351, + "step": 13964 + }, + { + "epoch": 0.9696569920844327, + "grad_norm": 4.5226004228687735, + "learning_rate": 2.4128059459578857e-08, + "loss": 0.6346, + "step": 13965 + }, + { + "epoch": 0.9697264268851549, + "grad_norm": 4.413413076664162, + "learning_rate": 2.401784829400178e-08, + "loss": 0.3915, + "step": 13966 + }, + { + "epoch": 0.969795861685877, + "grad_norm": 3.383218116125762, + "learning_rate": 2.390788880797079e-08, + "loss": 0.4067, + "step": 13967 + }, + { + "epoch": 0.9698652964865991, + "grad_norm": 2.589696996819408, + "learning_rate": 2.379818100704867e-08, + "loss": 0.1912, + "step": 13968 + }, + { + "epoch": 0.9699347312873212, + "grad_norm": 4.407675251681019, + "learning_rate": 2.36887248967832e-08, + "loss": 0.406, + "step": 13969 + }, + { + "epoch": 0.9700041660880433, + "grad_norm": 4.516638036766161, + "learning_rate": 2.3579520482710504e-08, + "loss": 0.5152, + "step": 13970 + }, + { + "epoch": 0.9700736008887655, + "grad_norm": 4.604787964476955, + "learning_rate": 2.3470567770355058e-08, + "loss": 0.7047, + "step": 13971 + }, + { + "epoch": 0.9701430356894876, + "grad_norm": 3.9292022770929997, + "learning_rate": 2.3361866765226894e-08, + "loss": 0.4295, + "step": 13972 + }, + { + "epoch": 0.9702124704902096, + "grad_norm": 4.628537408208618, + "learning_rate": 2.3253417472823837e-08, + "loss": 0.4073, + "step": 13973 + }, + { + "epoch": 0.9702819052909318, + "grad_norm": 3.7994650360175917, + "learning_rate": 2.3145219898631498e-08, + "loss": 0.4849, + "step": 13974 + }, + { + "epoch": 0.9703513400916539, + "grad_norm": 5.082416493744106, + "learning_rate": 2.3037274048122173e-08, + "loss": 0.6425, + "step": 13975 + }, + { + "epoch": 0.9704207748923761, + "grad_norm": 4.4076748883366355, + "learning_rate": 2.292957992675593e-08, + "loss": 0.4873, + "step": 13976 + }, + { + "epoch": 0.9704902096930982, + "grad_norm": 3.7700442046890354, + "learning_rate": 2.2822137539979528e-08, + "loss": 0.2949, + "step": 13977 + }, + { + "epoch": 0.9705596444938203, + "grad_norm": 4.593228253565943, + "learning_rate": 2.2714946893226952e-08, + "loss": 0.3469, + "step": 13978 + }, + { + "epoch": 0.9706290792945425, + "grad_norm": 4.036183114120982, + "learning_rate": 2.2608007991920532e-08, + "loss": 0.402, + "step": 13979 + }, + { + "epoch": 0.9706985140952645, + "grad_norm": 5.216959280318881, + "learning_rate": 2.2501320841468722e-08, + "loss": 0.4756, + "step": 13980 + }, + { + "epoch": 0.9707679488959867, + "grad_norm": 3.5811915559320036, + "learning_rate": 2.2394885447267757e-08, + "loss": 0.4073, + "step": 13981 + }, + { + "epoch": 0.9708373836967088, + "grad_norm": 3.962072468992664, + "learning_rate": 2.2288701814701107e-08, + "loss": 0.3681, + "step": 13982 + }, + { + "epoch": 0.9709068184974309, + "grad_norm": 3.857228661279531, + "learning_rate": 2.2182769949138928e-08, + "loss": 0.4605, + "step": 13983 + }, + { + "epoch": 0.9709762532981531, + "grad_norm": 5.4152422344940705, + "learning_rate": 2.2077089855939705e-08, + "loss": 0.388, + "step": 13984 + }, + { + "epoch": 0.9710456880988751, + "grad_norm": 4.665761789217447, + "learning_rate": 2.1971661540448053e-08, + "loss": 0.5015, + "step": 13985 + }, + { + "epoch": 0.9711151228995972, + "grad_norm": 3.4001502341896264, + "learning_rate": 2.186648500799693e-08, + "loss": 0.2066, + "step": 13986 + }, + { + "epoch": 0.9711845577003194, + "grad_norm": 3.597338897756042, + "learning_rate": 2.1761560263905968e-08, + "loss": 0.4317, + "step": 13987 + }, + { + "epoch": 0.9712539925010415, + "grad_norm": 4.142210765077964, + "learning_rate": 2.1656887313482034e-08, + "loss": 0.498, + "step": 13988 + }, + { + "epoch": 0.9713234273017637, + "grad_norm": 4.25314399287038, + "learning_rate": 2.1552466162019224e-08, + "loss": 0.5422, + "step": 13989 + }, + { + "epoch": 0.9713928621024858, + "grad_norm": 3.9778184826208363, + "learning_rate": 2.1448296814798873e-08, + "loss": 0.4588, + "step": 13990 + }, + { + "epoch": 0.9714622969032078, + "grad_norm": 4.1594816750134305, + "learning_rate": 2.1344379277090653e-08, + "loss": 0.4741, + "step": 13991 + }, + { + "epoch": 0.97153173170393, + "grad_norm": 3.7406261776150664, + "learning_rate": 2.124071355414925e-08, + "loss": 0.2361, + "step": 13992 + }, + { + "epoch": 0.9716011665046521, + "grad_norm": 5.712416804111109, + "learning_rate": 2.1137299651219357e-08, + "loss": 0.3263, + "step": 13993 + }, + { + "epoch": 0.9716706013053743, + "grad_norm": 3.2608341873698468, + "learning_rate": 2.1034137573531232e-08, + "loss": 0.1761, + "step": 13994 + }, + { + "epoch": 0.9717400361060964, + "grad_norm": 3.99311135850991, + "learning_rate": 2.0931227326301262e-08, + "loss": 0.4674, + "step": 13995 + }, + { + "epoch": 0.9718094709068185, + "grad_norm": 4.505905708922432, + "learning_rate": 2.0828568914736392e-08, + "loss": 0.5078, + "step": 13996 + }, + { + "epoch": 0.9718789057075407, + "grad_norm": 2.8261682407864397, + "learning_rate": 2.0726162344028022e-08, + "loss": 0.1952, + "step": 13997 + }, + { + "epoch": 0.9719483405082627, + "grad_norm": 4.0033332747957315, + "learning_rate": 2.0624007619355903e-08, + "loss": 0.2589, + "step": 13998 + }, + { + "epoch": 0.9720177753089848, + "grad_norm": 9.477326408079183, + "learning_rate": 2.0522104745887006e-08, + "loss": 0.4496, + "step": 13999 + }, + { + "epoch": 0.972087210109707, + "grad_norm": 5.021280389494722, + "learning_rate": 2.042045372877499e-08, + "loss": 0.3323, + "step": 14000 + }, + { + "epoch": 0.9721566449104291, + "grad_norm": 4.302617182703042, + "learning_rate": 2.0319054573162412e-08, + "loss": 0.2489, + "step": 14001 + }, + { + "epoch": 0.9722260797111513, + "grad_norm": 3.610580336626151, + "learning_rate": 2.0217907284176274e-08, + "loss": 0.219, + "step": 14002 + }, + { + "epoch": 0.9722955145118733, + "grad_norm": 4.345118384187324, + "learning_rate": 2.0117011866934156e-08, + "loss": 0.5438, + "step": 14003 + }, + { + "epoch": 0.9723649493125954, + "grad_norm": 3.8227164733982075, + "learning_rate": 2.0016368326538084e-08, + "loss": 0.3896, + "step": 14004 + }, + { + "epoch": 0.9724343841133176, + "grad_norm": 1.7397228874314912, + "learning_rate": 1.9915976668078983e-08, + "loss": 0.0844, + "step": 14005 + }, + { + "epoch": 0.9725038189140397, + "grad_norm": 4.553455310483831, + "learning_rate": 1.9815836896634466e-08, + "loss": 0.6173, + "step": 14006 + }, + { + "epoch": 0.9725732537147619, + "grad_norm": 4.1421629355474785, + "learning_rate": 1.971594901726881e-08, + "loss": 0.4996, + "step": 14007 + }, + { + "epoch": 0.972642688515484, + "grad_norm": 4.355582354489459, + "learning_rate": 1.961631303503575e-08, + "loss": 0.7239, + "step": 14008 + }, + { + "epoch": 0.972712123316206, + "grad_norm": 5.153257124962559, + "learning_rate": 1.951692895497348e-08, + "loss": 0.7, + "step": 14009 + }, + { + "epoch": 0.9727815581169282, + "grad_norm": 3.996943419966421, + "learning_rate": 1.941779678210909e-08, + "loss": 0.4551, + "step": 14010 + }, + { + "epoch": 0.9728509929176503, + "grad_norm": 4.153744690478692, + "learning_rate": 1.9318916521456897e-08, + "loss": 0.4887, + "step": 14011 + }, + { + "epoch": 0.9729204277183724, + "grad_norm": 3.962604828600932, + "learning_rate": 1.922028817801791e-08, + "loss": 0.4199, + "step": 14012 + }, + { + "epoch": 0.9729898625190946, + "grad_norm": 4.870594754406823, + "learning_rate": 1.9121911756780355e-08, + "loss": 0.4947, + "step": 14013 + }, + { + "epoch": 0.9730592973198167, + "grad_norm": 4.281700257933232, + "learning_rate": 1.902378726272025e-08, + "loss": 0.3289, + "step": 14014 + }, + { + "epoch": 0.9731287321205389, + "grad_norm": 3.558708207213513, + "learning_rate": 1.892591470080085e-08, + "loss": 0.3555, + "step": 14015 + }, + { + "epoch": 0.9731981669212609, + "grad_norm": 3.4708083130096505, + "learning_rate": 1.8828294075972085e-08, + "loss": 0.2805, + "step": 14016 + }, + { + "epoch": 0.973267601721983, + "grad_norm": 3.6734092229500144, + "learning_rate": 1.8730925393171674e-08, + "loss": 0.2815, + "step": 14017 + }, + { + "epoch": 0.9733370365227052, + "grad_norm": 3.0859388753168786, + "learning_rate": 1.8633808657324558e-08, + "loss": 0.2198, + "step": 14018 + }, + { + "epoch": 0.9734064713234273, + "grad_norm": 3.924874347953349, + "learning_rate": 1.8536943873342374e-08, + "loss": 0.5066, + "step": 14019 + }, + { + "epoch": 0.9734759061241495, + "grad_norm": 2.799261342492207, + "learning_rate": 1.8440331046124525e-08, + "loss": 0.1969, + "step": 14020 + }, + { + "epoch": 0.9735453409248715, + "grad_norm": 5.383938386457754, + "learning_rate": 1.8343970180557668e-08, + "loss": 0.5267, + "step": 14021 + }, + { + "epoch": 0.9736147757255936, + "grad_norm": 2.6840559973776013, + "learning_rate": 1.8247861281516232e-08, + "loss": 0.2126, + "step": 14022 + }, + { + "epoch": 0.9736842105263158, + "grad_norm": 4.4850777318586585, + "learning_rate": 1.815200435386022e-08, + "loss": 0.5508, + "step": 14023 + }, + { + "epoch": 0.9737536453270379, + "grad_norm": 4.594805184212075, + "learning_rate": 1.805639940243853e-08, + "loss": 0.6097, + "step": 14024 + }, + { + "epoch": 0.9738230801277601, + "grad_norm": 2.7276943312260005, + "learning_rate": 1.7961046432086736e-08, + "loss": 0.172, + "step": 14025 + }, + { + "epoch": 0.9738925149284822, + "grad_norm": 3.6331541129600162, + "learning_rate": 1.7865945447628208e-08, + "loss": 0.3603, + "step": 14026 + }, + { + "epoch": 0.9739619497292042, + "grad_norm": 4.995939059703874, + "learning_rate": 1.7771096453871873e-08, + "loss": 0.5302, + "step": 14027 + }, + { + "epoch": 0.9740313845299264, + "grad_norm": 4.931120298441444, + "learning_rate": 1.7676499455615557e-08, + "loss": 0.6647, + "step": 14028 + }, + { + "epoch": 0.9741008193306485, + "grad_norm": 3.702582140068964, + "learning_rate": 1.758215445764433e-08, + "loss": 0.314, + "step": 14029 + }, + { + "epoch": 0.9741702541313706, + "grad_norm": 3.7634410155971256, + "learning_rate": 1.748806146472992e-08, + "loss": 0.3839, + "step": 14030 + }, + { + "epoch": 0.9742396889320928, + "grad_norm": 3.2031003032610417, + "learning_rate": 1.739422048163131e-08, + "loss": 0.3455, + "step": 14031 + }, + { + "epoch": 0.9743091237328149, + "grad_norm": 4.242435519955967, + "learning_rate": 1.730063151309469e-08, + "loss": 0.3657, + "step": 14032 + }, + { + "epoch": 0.974378558533537, + "grad_norm": 2.776620079247058, + "learning_rate": 1.7207294563853505e-08, + "loss": 0.2832, + "step": 14033 + }, + { + "epoch": 0.9744479933342591, + "grad_norm": 4.650814779014532, + "learning_rate": 1.7114209638628975e-08, + "loss": 0.339, + "step": 14034 + }, + { + "epoch": 0.9745174281349812, + "grad_norm": 2.997803483146635, + "learning_rate": 1.702137674212956e-08, + "loss": 0.2789, + "step": 14035 + }, + { + "epoch": 0.9745868629357034, + "grad_norm": 2.9351497361518826, + "learning_rate": 1.692879587904983e-08, + "loss": 0.2489, + "step": 14036 + }, + { + "epoch": 0.9746562977364255, + "grad_norm": 3.8783029495589414, + "learning_rate": 1.6836467054073825e-08, + "loss": 0.3264, + "step": 14037 + }, + { + "epoch": 0.9747257325371477, + "grad_norm": 5.293899694224254, + "learning_rate": 1.674439027186947e-08, + "loss": 0.6804, + "step": 14038 + }, + { + "epoch": 0.9747951673378698, + "grad_norm": 4.468216425530285, + "learning_rate": 1.6652565537095268e-08, + "loss": 0.6283, + "step": 14039 + }, + { + "epoch": 0.9748646021385918, + "grad_norm": 5.453208909883312, + "learning_rate": 1.6560992854395274e-08, + "loss": 0.7144, + "step": 14040 + }, + { + "epoch": 0.974934036939314, + "grad_norm": 3.8511193064651463, + "learning_rate": 1.6469672228401345e-08, + "loss": 0.4038, + "step": 14041 + }, + { + "epoch": 0.9750034717400361, + "grad_norm": 4.650049111742977, + "learning_rate": 1.6378603663732006e-08, + "loss": 0.4701, + "step": 14042 + }, + { + "epoch": 0.9750729065407582, + "grad_norm": 3.366645068576717, + "learning_rate": 1.628778716499302e-08, + "loss": 0.2936, + "step": 14043 + }, + { + "epoch": 0.9751423413414804, + "grad_norm": 3.4702266533829884, + "learning_rate": 1.619722273677904e-08, + "loss": 0.2644, + "step": 14044 + }, + { + "epoch": 0.9752117761422024, + "grad_norm": 5.199502611004693, + "learning_rate": 1.610691038366974e-08, + "loss": 0.6135, + "step": 14045 + }, + { + "epoch": 0.9752812109429246, + "grad_norm": 5.093024797791528, + "learning_rate": 1.6016850110233128e-08, + "loss": 0.6644, + "step": 14046 + }, + { + "epoch": 0.9753506457436467, + "grad_norm": 4.637626062284038, + "learning_rate": 1.5927041921024456e-08, + "loss": 0.5238, + "step": 14047 + }, + { + "epoch": 0.9754200805443688, + "grad_norm": 4.245040285105386, + "learning_rate": 1.5837485820586197e-08, + "loss": 0.3344, + "step": 14048 + }, + { + "epoch": 0.975489515345091, + "grad_norm": 3.806489248533676, + "learning_rate": 1.574818181344806e-08, + "loss": 0.3949, + "step": 14049 + }, + { + "epoch": 0.9755589501458131, + "grad_norm": 4.715285661589339, + "learning_rate": 1.565912990412699e-08, + "loss": 0.5764, + "step": 14050 + }, + { + "epoch": 0.9756283849465353, + "grad_norm": 4.132531433645996, + "learning_rate": 1.5570330097126607e-08, + "loss": 0.4733, + "step": 14051 + }, + { + "epoch": 0.9756978197472573, + "grad_norm": 3.2754803373674473, + "learning_rate": 1.5481782396938872e-08, + "loss": 0.3494, + "step": 14052 + }, + { + "epoch": 0.9757672545479794, + "grad_norm": 3.3347359764122073, + "learning_rate": 1.5393486808041867e-08, + "loss": 0.3196, + "step": 14053 + }, + { + "epoch": 0.9758366893487016, + "grad_norm": 4.447169383680507, + "learning_rate": 1.5305443334902583e-08, + "loss": 0.4422, + "step": 14054 + }, + { + "epoch": 0.9759061241494237, + "grad_norm": 3.969648705020858, + "learning_rate": 1.5217651981973004e-08, + "loss": 0.3482, + "step": 14055 + }, + { + "epoch": 0.9759755589501458, + "grad_norm": 2.1921582542511713, + "learning_rate": 1.513011275369347e-08, + "loss": 0.1054, + "step": 14056 + }, + { + "epoch": 0.976044993750868, + "grad_norm": 3.8832505255716, + "learning_rate": 1.5042825654492665e-08, + "loss": 0.3924, + "step": 14057 + }, + { + "epoch": 0.97611442855159, + "grad_norm": 3.289537179477533, + "learning_rate": 1.4955790688784833e-08, + "loss": 0.3595, + "step": 14058 + }, + { + "epoch": 0.9761838633523122, + "grad_norm": 3.0644739477825396, + "learning_rate": 1.4869007860972007e-08, + "loss": 0.2068, + "step": 14059 + }, + { + "epoch": 0.9762532981530343, + "grad_norm": 4.096612005375542, + "learning_rate": 1.4782477175443456e-08, + "loss": 0.3175, + "step": 14060 + }, + { + "epoch": 0.9763227329537564, + "grad_norm": 3.667531207697449, + "learning_rate": 1.4696198636576231e-08, + "loss": 0.4892, + "step": 14061 + }, + { + "epoch": 0.9763921677544786, + "grad_norm": 3.9123484108005058, + "learning_rate": 1.4610172248734067e-08, + "loss": 0.575, + "step": 14062 + }, + { + "epoch": 0.9764616025552006, + "grad_norm": 4.419122039267187, + "learning_rate": 1.4524398016267927e-08, + "loss": 0.5252, + "step": 14063 + }, + { + "epoch": 0.9765310373559228, + "grad_norm": 2.884299421029459, + "learning_rate": 1.4438875943516562e-08, + "loss": 0.3354, + "step": 14064 + }, + { + "epoch": 0.9766004721566449, + "grad_norm": 4.4642855114783275, + "learning_rate": 1.43536060348054e-08, + "loss": 0.4586, + "step": 14065 + }, + { + "epoch": 0.976669906957367, + "grad_norm": 3.725707585551653, + "learning_rate": 1.4268588294447105e-08, + "loss": 0.4864, + "step": 14066 + }, + { + "epoch": 0.9767393417580892, + "grad_norm": 4.453230055797431, + "learning_rate": 1.418382272674157e-08, + "loss": 0.5683, + "step": 14067 + }, + { + "epoch": 0.9768087765588113, + "grad_norm": 3.72750020637448, + "learning_rate": 1.4099309335976474e-08, + "loss": 0.2964, + "step": 14068 + }, + { + "epoch": 0.9768782113595333, + "grad_norm": 3.645604244971247, + "learning_rate": 1.4015048126426734e-08, + "loss": 0.4134, + "step": 14069 + }, + { + "epoch": 0.9769476461602555, + "grad_norm": 5.784177350338976, + "learning_rate": 1.3931039102353383e-08, + "loss": 0.6415, + "step": 14070 + }, + { + "epoch": 0.9770170809609776, + "grad_norm": 4.146574278201863, + "learning_rate": 1.3847282268006357e-08, + "loss": 0.3822, + "step": 14071 + }, + { + "epoch": 0.9770865157616998, + "grad_norm": 3.8973869437181103, + "learning_rate": 1.376377762762171e-08, + "loss": 0.3145, + "step": 14072 + }, + { + "epoch": 0.9771559505624219, + "grad_norm": 3.7636944714959606, + "learning_rate": 1.3680525185422177e-08, + "loss": 0.2118, + "step": 14073 + }, + { + "epoch": 0.977225385363144, + "grad_norm": 4.758499981745516, + "learning_rate": 1.3597524945619945e-08, + "loss": 0.4027, + "step": 14074 + }, + { + "epoch": 0.9772948201638662, + "grad_norm": 3.0733841253417107, + "learning_rate": 1.3514776912412208e-08, + "loss": 0.2956, + "step": 14075 + }, + { + "epoch": 0.9773642549645882, + "grad_norm": 4.140393052224029, + "learning_rate": 1.3432281089983956e-08, + "loss": 0.4269, + "step": 14076 + }, + { + "epoch": 0.9774336897653104, + "grad_norm": 3.5635142593657636, + "learning_rate": 1.335003748250907e-08, + "loss": 0.3161, + "step": 14077 + }, + { + "epoch": 0.9775031245660325, + "grad_norm": 2.8875187195242598, + "learning_rate": 1.3268046094145892e-08, + "loss": 0.2647, + "step": 14078 + }, + { + "epoch": 0.9775725593667546, + "grad_norm": 4.12734233227536, + "learning_rate": 1.3186306929042214e-08, + "loss": 0.4996, + "step": 14079 + }, + { + "epoch": 0.9776419941674768, + "grad_norm": 3.6233855783744864, + "learning_rate": 1.3104819991332506e-08, + "loss": 0.4636, + "step": 14080 + }, + { + "epoch": 0.9777114289681988, + "grad_norm": 4.7655919438223515, + "learning_rate": 1.3023585285137918e-08, + "loss": 0.4999, + "step": 14081 + }, + { + "epoch": 0.977780863768921, + "grad_norm": 4.338123120796176, + "learning_rate": 1.2942602814566829e-08, + "loss": 0.3928, + "step": 14082 + }, + { + "epoch": 0.9778502985696431, + "grad_norm": 3.474490758938959, + "learning_rate": 1.2861872583715962e-08, + "loss": 0.3457, + "step": 14083 + }, + { + "epoch": 0.9779197333703652, + "grad_norm": 4.189072065583572, + "learning_rate": 1.2781394596668162e-08, + "loss": 0.5384, + "step": 14084 + }, + { + "epoch": 0.9779891681710874, + "grad_norm": 10.838476821743498, + "learning_rate": 1.2701168857494062e-08, + "loss": 0.3271, + "step": 14085 + }, + { + "epoch": 0.9780586029718095, + "grad_norm": 4.401879839841229, + "learning_rate": 1.262119537025097e-08, + "loss": 0.3156, + "step": 14086 + }, + { + "epoch": 0.9781280377725315, + "grad_norm": 3.7527818008339753, + "learning_rate": 1.2541474138985099e-08, + "loss": 0.3326, + "step": 14087 + }, + { + "epoch": 0.9781974725732537, + "grad_norm": 3.366481565929875, + "learning_rate": 1.2462005167726553e-08, + "loss": 0.3272, + "step": 14088 + }, + { + "epoch": 0.9782669073739758, + "grad_norm": 3.968869740589805, + "learning_rate": 1.238278846049712e-08, + "loss": 0.5455, + "step": 14089 + }, + { + "epoch": 0.978336342174698, + "grad_norm": 4.4403806593110975, + "learning_rate": 1.2303824021301925e-08, + "loss": 0.3175, + "step": 14090 + }, + { + "epoch": 0.9784057769754201, + "grad_norm": 4.492118851394086, + "learning_rate": 1.2225111854134997e-08, + "loss": 0.547, + "step": 14091 + }, + { + "epoch": 0.9784752117761422, + "grad_norm": 4.1103904018939055, + "learning_rate": 1.2146651962978151e-08, + "loss": 0.42, + "step": 14092 + }, + { + "epoch": 0.9785446465768644, + "grad_norm": 4.096429101482497, + "learning_rate": 1.206844435179988e-08, + "loss": 0.4513, + "step": 14093 + }, + { + "epoch": 0.9786140813775864, + "grad_norm": 4.000859251593539, + "learning_rate": 1.1990489024554797e-08, + "loss": 0.4535, + "step": 14094 + }, + { + "epoch": 0.9786835161783086, + "grad_norm": 3.9083888157693965, + "learning_rate": 1.191278598518697e-08, + "loss": 0.3842, + "step": 14095 + }, + { + "epoch": 0.9787529509790307, + "grad_norm": 3.001478569978702, + "learning_rate": 1.1835335237626033e-08, + "loss": 0.2125, + "step": 14096 + }, + { + "epoch": 0.9788223857797528, + "grad_norm": 3.6682027844070113, + "learning_rate": 1.1758136785788854e-08, + "loss": 0.3298, + "step": 14097 + }, + { + "epoch": 0.978891820580475, + "grad_norm": 4.57049687022491, + "learning_rate": 1.1681190633581197e-08, + "loss": 0.5791, + "step": 14098 + }, + { + "epoch": 0.978961255381197, + "grad_norm": 3.7324466620370873, + "learning_rate": 1.1604496784893838e-08, + "loss": 0.3683, + "step": 14099 + }, + { + "epoch": 0.9790306901819191, + "grad_norm": 4.135751380637167, + "learning_rate": 1.1528055243605896e-08, + "loss": 0.5112, + "step": 14100 + }, + { + "epoch": 0.9791001249826413, + "grad_norm": 4.929288847928859, + "learning_rate": 1.1451866013584834e-08, + "loss": 0.7065, + "step": 14101 + }, + { + "epoch": 0.9791695597833634, + "grad_norm": 3.628106871153336, + "learning_rate": 1.1375929098683124e-08, + "loss": 0.425, + "step": 14102 + }, + { + "epoch": 0.9792389945840856, + "grad_norm": 5.046142894497169, + "learning_rate": 1.1300244502742142e-08, + "loss": 0.6785, + "step": 14103 + }, + { + "epoch": 0.9793084293848077, + "grad_norm": 4.1546328291859576, + "learning_rate": 1.1224812229589376e-08, + "loss": 0.4606, + "step": 14104 + }, + { + "epoch": 0.9793778641855297, + "grad_norm": 3.470277117568092, + "learning_rate": 1.1149632283040667e-08, + "loss": 0.342, + "step": 14105 + }, + { + "epoch": 0.9794472989862519, + "grad_norm": 4.589791903990124, + "learning_rate": 1.1074704666897972e-08, + "loss": 0.5509, + "step": 14106 + }, + { + "epoch": 0.979516733786974, + "grad_norm": 3.882001590830077, + "learning_rate": 1.1000029384951593e-08, + "loss": 0.4499, + "step": 14107 + }, + { + "epoch": 0.9795861685876962, + "grad_norm": 3.8949986906195138, + "learning_rate": 1.092560644097851e-08, + "loss": 0.3785, + "step": 14108 + }, + { + "epoch": 0.9796556033884183, + "grad_norm": 3.8848782925952796, + "learning_rate": 1.0851435838742374e-08, + "loss": 0.2903, + "step": 14109 + }, + { + "epoch": 0.9797250381891404, + "grad_norm": 2.693951429835676, + "learning_rate": 1.0777517581995189e-08, + "loss": 0.164, + "step": 14110 + }, + { + "epoch": 0.9797944729898626, + "grad_norm": 2.790320800779656, + "learning_rate": 1.0703851674475074e-08, + "loss": 0.259, + "step": 14111 + }, + { + "epoch": 0.9798639077905846, + "grad_norm": 5.045466073428521, + "learning_rate": 1.0630438119909048e-08, + "loss": 0.4457, + "step": 14112 + }, + { + "epoch": 0.9799333425913067, + "grad_norm": 3.918648184439637, + "learning_rate": 1.055727692200914e-08, + "loss": 0.4784, + "step": 14113 + }, + { + "epoch": 0.9800027773920289, + "grad_norm": 4.769809544447168, + "learning_rate": 1.0484368084476281e-08, + "loss": 0.376, + "step": 14114 + }, + { + "epoch": 0.980072212192751, + "grad_norm": 3.871387398081835, + "learning_rate": 1.0411711610998632e-08, + "loss": 0.326, + "step": 14115 + }, + { + "epoch": 0.9801416469934732, + "grad_norm": 5.198775243204175, + "learning_rate": 1.0339307505249918e-08, + "loss": 0.6159, + "step": 14116 + }, + { + "epoch": 0.9802110817941952, + "grad_norm": 4.189067507587056, + "learning_rate": 1.0267155770892767e-08, + "loss": 0.6066, + "step": 14117 + }, + { + "epoch": 0.9802805165949173, + "grad_norm": 5.505401133345937, + "learning_rate": 1.0195256411577037e-08, + "loss": 0.535, + "step": 14118 + }, + { + "epoch": 0.9803499513956395, + "grad_norm": 5.734626077939506, + "learning_rate": 1.0123609430938709e-08, + "loss": 0.7098, + "step": 14119 + }, + { + "epoch": 0.9804193861963616, + "grad_norm": 3.9781117663079337, + "learning_rate": 1.0052214832601547e-08, + "loss": 0.3901, + "step": 14120 + }, + { + "epoch": 0.9804888209970838, + "grad_norm": 3.367742073536449, + "learning_rate": 9.981072620177112e-09, + "loss": 0.1947, + "step": 14121 + }, + { + "epoch": 0.9805582557978059, + "grad_norm": 4.082077112135187, + "learning_rate": 9.910182797263079e-09, + "loss": 0.3973, + "step": 14122 + }, + { + "epoch": 0.980627690598528, + "grad_norm": 6.02665435647441, + "learning_rate": 9.839545367446024e-09, + "loss": 0.4334, + "step": 14123 + }, + { + "epoch": 0.9806971253992501, + "grad_norm": 3.826928592624585, + "learning_rate": 9.769160334297533e-09, + "loss": 0.5478, + "step": 14124 + }, + { + "epoch": 0.9807665601999722, + "grad_norm": 4.3772798889108495, + "learning_rate": 9.699027701378095e-09, + "loss": 0.4715, + "step": 14125 + }, + { + "epoch": 0.9808359950006943, + "grad_norm": 4.607695835737244, + "learning_rate": 9.629147472234868e-09, + "loss": 0.3438, + "step": 14126 + }, + { + "epoch": 0.9809054298014165, + "grad_norm": 4.212014137988611, + "learning_rate": 9.559519650402805e-09, + "loss": 0.3875, + "step": 14127 + }, + { + "epoch": 0.9809748646021386, + "grad_norm": 4.754714971375352, + "learning_rate": 9.490144239402976e-09, + "loss": 0.5744, + "step": 14128 + }, + { + "epoch": 0.9810442994028608, + "grad_norm": 4.943645687968767, + "learning_rate": 9.421021242744244e-09, + "loss": 0.4683, + "step": 14129 + }, + { + "epoch": 0.9811137342035828, + "grad_norm": 4.671313135898658, + "learning_rate": 9.352150663923254e-09, + "loss": 0.7006, + "step": 14130 + }, + { + "epoch": 0.9811831690043049, + "grad_norm": 3.6089221112469816, + "learning_rate": 9.283532506422777e-09, + "loss": 0.3423, + "step": 14131 + }, + { + "epoch": 0.9812526038050271, + "grad_norm": 4.696693145201252, + "learning_rate": 9.215166773714479e-09, + "loss": 0.5602, + "step": 14132 + }, + { + "epoch": 0.9813220386057492, + "grad_norm": 8.366195893118412, + "learning_rate": 9.147053469255041e-09, + "loss": 0.6616, + "step": 14133 + }, + { + "epoch": 0.9813914734064714, + "grad_norm": 4.371286666423065, + "learning_rate": 9.079192596490594e-09, + "loss": 0.4674, + "step": 14134 + }, + { + "epoch": 0.9814609082071934, + "grad_norm": 2.5539821177237454, + "learning_rate": 9.011584158852838e-09, + "loss": 0.2391, + "step": 14135 + }, + { + "epoch": 0.9815303430079155, + "grad_norm": 2.4957108760730704, + "learning_rate": 8.944228159761259e-09, + "loss": 0.1908, + "step": 14136 + }, + { + "epoch": 0.9815997778086377, + "grad_norm": 5.590215268791873, + "learning_rate": 8.877124602623133e-09, + "loss": 0.4717, + "step": 14137 + }, + { + "epoch": 0.9816692126093598, + "grad_norm": 4.554911578679251, + "learning_rate": 8.810273490831855e-09, + "loss": 0.4343, + "step": 14138 + }, + { + "epoch": 0.981738647410082, + "grad_norm": 3.609519446723891, + "learning_rate": 8.743674827769166e-09, + "loss": 0.3441, + "step": 14139 + }, + { + "epoch": 0.9818080822108041, + "grad_norm": 4.682400225371072, + "learning_rate": 8.677328616803482e-09, + "loss": 0.5968, + "step": 14140 + }, + { + "epoch": 0.9818775170115261, + "grad_norm": 5.29781204664176, + "learning_rate": 8.611234861290451e-09, + "loss": 0.559, + "step": 14141 + }, + { + "epoch": 0.9819469518122483, + "grad_norm": 4.065611354346616, + "learning_rate": 8.545393564572957e-09, + "loss": 0.4812, + "step": 14142 + }, + { + "epoch": 0.9820163866129704, + "grad_norm": 5.389734326414243, + "learning_rate": 8.479804729981667e-09, + "loss": 0.659, + "step": 14143 + }, + { + "epoch": 0.9820858214136925, + "grad_norm": 3.01990955899393, + "learning_rate": 8.414468360832817e-09, + "loss": 0.3784, + "step": 14144 + }, + { + "epoch": 0.9821552562144147, + "grad_norm": 4.176493850689642, + "learning_rate": 8.349384460432652e-09, + "loss": 0.4268, + "step": 14145 + }, + { + "epoch": 0.9822246910151368, + "grad_norm": 4.074199684074437, + "learning_rate": 8.284553032071319e-09, + "loss": 0.4792, + "step": 14146 + }, + { + "epoch": 0.982294125815859, + "grad_norm": 4.131093091531945, + "learning_rate": 8.219974079029524e-09, + "loss": 0.5293, + "step": 14147 + }, + { + "epoch": 0.982363560616581, + "grad_norm": 3.8749695924108445, + "learning_rate": 8.155647604572437e-09, + "loss": 0.4293, + "step": 14148 + }, + { + "epoch": 0.9824329954173031, + "grad_norm": 3.8739585367146914, + "learning_rate": 8.091573611954118e-09, + "loss": 0.3286, + "step": 14149 + }, + { + "epoch": 0.9825024302180253, + "grad_norm": 7.368468135141709, + "learning_rate": 8.02775210441531e-09, + "loss": 0.3853, + "step": 14150 + }, + { + "epoch": 0.9825718650187474, + "grad_norm": 3.826584228956766, + "learning_rate": 7.964183085183986e-09, + "loss": 0.497, + "step": 14151 + }, + { + "epoch": 0.9826412998194696, + "grad_norm": 4.300948281869529, + "learning_rate": 7.90086655747535e-09, + "loss": 0.2242, + "step": 14152 + }, + { + "epoch": 0.9827107346201917, + "grad_norm": 4.749027840000823, + "learning_rate": 7.837802524491844e-09, + "loss": 0.4977, + "step": 14153 + }, + { + "epoch": 0.9827801694209137, + "grad_norm": 5.04991920666717, + "learning_rate": 7.774990989423692e-09, + "loss": 0.5446, + "step": 14154 + }, + { + "epoch": 0.9828496042216359, + "grad_norm": 3.7447001871968, + "learning_rate": 7.712431955447242e-09, + "loss": 0.3505, + "step": 14155 + }, + { + "epoch": 0.982919039022358, + "grad_norm": 4.657464663673959, + "learning_rate": 7.650125425726628e-09, + "loss": 0.562, + "step": 14156 + }, + { + "epoch": 0.9829884738230801, + "grad_norm": 2.992367973552169, + "learning_rate": 7.588071403413221e-09, + "loss": 0.353, + "step": 14157 + }, + { + "epoch": 0.9830579086238023, + "grad_norm": 3.825488736538764, + "learning_rate": 7.526269891646176e-09, + "loss": 0.4779, + "step": 14158 + }, + { + "epoch": 0.9831273434245243, + "grad_norm": 3.597152256526084, + "learning_rate": 7.464720893551324e-09, + "loss": 0.372, + "step": 14159 + }, + { + "epoch": 0.9831967782252465, + "grad_norm": 4.876675183059779, + "learning_rate": 7.403424412241178e-09, + "loss": 0.5298, + "step": 14160 + }, + { + "epoch": 0.9832662130259686, + "grad_norm": 3.755935809743802, + "learning_rate": 7.342380450816034e-09, + "loss": 0.3097, + "step": 14161 + }, + { + "epoch": 0.9833356478266907, + "grad_norm": 4.376632789534533, + "learning_rate": 7.281589012363976e-09, + "loss": 0.4964, + "step": 14162 + }, + { + "epoch": 0.9834050826274129, + "grad_norm": 3.912126786450365, + "learning_rate": 7.22105009995977e-09, + "loss": 0.3354, + "step": 14163 + }, + { + "epoch": 0.983474517428135, + "grad_norm": 3.6394292092041023, + "learning_rate": 7.160763716664853e-09, + "loss": 0.3885, + "step": 14164 + }, + { + "epoch": 0.9835439522288572, + "grad_norm": 2.919243442843382, + "learning_rate": 7.10072986552901e-09, + "loss": 0.1903, + "step": 14165 + }, + { + "epoch": 0.9836133870295792, + "grad_norm": 4.173854438218789, + "learning_rate": 7.0409485495881444e-09, + "loss": 0.431, + "step": 14166 + }, + { + "epoch": 0.9836828218303013, + "grad_norm": 4.686502651890082, + "learning_rate": 6.981419771865949e-09, + "loss": 0.671, + "step": 14167 + }, + { + "epoch": 0.9837522566310235, + "grad_norm": 4.120819515339244, + "learning_rate": 6.922143535373904e-09, + "loss": 0.454, + "step": 14168 + }, + { + "epoch": 0.9838216914317456, + "grad_norm": 4.331144582877351, + "learning_rate": 6.863119843109611e-09, + "loss": 0.4561, + "step": 14169 + }, + { + "epoch": 0.9838911262324677, + "grad_norm": 5.596195732068436, + "learning_rate": 6.80434869805846e-09, + "loss": 0.3442, + "step": 14170 + }, + { + "epoch": 0.9839605610331899, + "grad_norm": 4.89207446982786, + "learning_rate": 6.7458301031936295e-09, + "loss": 0.5232, + "step": 14171 + }, + { + "epoch": 0.9840299958339119, + "grad_norm": 3.4273103989318034, + "learning_rate": 6.687564061474416e-09, + "loss": 0.3381, + "step": 14172 + }, + { + "epoch": 0.9840994306346341, + "grad_norm": 3.229415934682325, + "learning_rate": 6.629550575847355e-09, + "loss": 0.2252, + "step": 14173 + }, + { + "epoch": 0.9841688654353562, + "grad_norm": 3.1772466200707856, + "learning_rate": 6.571789649247873e-09, + "loss": 0.2441, + "step": 14174 + }, + { + "epoch": 0.9842383002360783, + "grad_norm": 3.5274997732370528, + "learning_rate": 6.514281284596413e-09, + "loss": 0.3999, + "step": 14175 + }, + { + "epoch": 0.9843077350368005, + "grad_norm": 4.4079541734597845, + "learning_rate": 6.457025484802315e-09, + "loss": 0.3907, + "step": 14176 + }, + { + "epoch": 0.9843771698375225, + "grad_norm": 4.122472300530622, + "learning_rate": 6.40002225276104e-09, + "loss": 0.5338, + "step": 14177 + }, + { + "epoch": 0.9844466046382447, + "grad_norm": 3.589075220721958, + "learning_rate": 6.3432715913563926e-09, + "loss": 0.3137, + "step": 14178 + }, + { + "epoch": 0.9845160394389668, + "grad_norm": 4.209194871194281, + "learning_rate": 6.286773503458299e-09, + "loss": 0.34, + "step": 14179 + }, + { + "epoch": 0.9845854742396889, + "grad_norm": 4.317748281712373, + "learning_rate": 6.230527991923918e-09, + "loss": 0.4693, + "step": 14180 + }, + { + "epoch": 0.9846549090404111, + "grad_norm": 4.504707847332406, + "learning_rate": 6.174535059598752e-09, + "loss": 0.4781, + "step": 14181 + }, + { + "epoch": 0.9847243438411332, + "grad_norm": 4.086500518127666, + "learning_rate": 6.118794709314979e-09, + "loss": 0.3124, + "step": 14182 + }, + { + "epoch": 0.9847937786418552, + "grad_norm": 4.488603063683037, + "learning_rate": 6.063306943891456e-09, + "loss": 0.5561, + "step": 14183 + }, + { + "epoch": 0.9848632134425774, + "grad_norm": 3.57468711045499, + "learning_rate": 6.008071766135382e-09, + "loss": 0.4904, + "step": 14184 + }, + { + "epoch": 0.9849326482432995, + "grad_norm": 5.985968591119745, + "learning_rate": 5.953089178839522e-09, + "loss": 0.3403, + "step": 14185 + }, + { + "epoch": 0.9850020830440217, + "grad_norm": 3.71301667238091, + "learning_rate": 5.898359184784985e-09, + "loss": 0.518, + "step": 14186 + }, + { + "epoch": 0.9850715178447438, + "grad_norm": 4.557541910643263, + "learning_rate": 5.843881786740668e-09, + "loss": 0.3161, + "step": 14187 + }, + { + "epoch": 0.9851409526454659, + "grad_norm": 3.526151989708653, + "learning_rate": 5.7896569874610345e-09, + "loss": 0.3959, + "step": 14188 + }, + { + "epoch": 0.985210387446188, + "grad_norm": 2.8416666859683675, + "learning_rate": 5.735684789689444e-09, + "loss": 0.2019, + "step": 14189 + }, + { + "epoch": 0.9852798222469101, + "grad_norm": 3.4584920144762927, + "learning_rate": 5.681965196155937e-09, + "loss": 0.3093, + "step": 14190 + }, + { + "epoch": 0.9853492570476323, + "grad_norm": 4.14636616090418, + "learning_rate": 5.628498209576672e-09, + "loss": 0.4154, + "step": 14191 + }, + { + "epoch": 0.9854186918483544, + "grad_norm": 3.9049732087820788, + "learning_rate": 5.575283832656708e-09, + "loss": 0.3858, + "step": 14192 + }, + { + "epoch": 0.9854881266490765, + "grad_norm": 3.8332280371123515, + "learning_rate": 5.522322068087227e-09, + "loss": 0.2814, + "step": 14193 + }, + { + "epoch": 0.9855575614497987, + "grad_norm": 5.171097059368011, + "learning_rate": 5.469612918547196e-09, + "loss": 0.352, + "step": 14194 + }, + { + "epoch": 0.9856269962505207, + "grad_norm": 3.3162668708479477, + "learning_rate": 5.41715638670226e-09, + "loss": 0.2855, + "step": 14195 + }, + { + "epoch": 0.9856964310512429, + "grad_norm": 3.629310011488377, + "learning_rate": 5.3649524752058535e-09, + "loss": 0.2959, + "step": 14196 + }, + { + "epoch": 0.985765865851965, + "grad_norm": 5.003806680354501, + "learning_rate": 5.313001186698641e-09, + "loss": 0.3164, + "step": 14197 + }, + { + "epoch": 0.9858353006526871, + "grad_norm": 4.344733664439827, + "learning_rate": 5.26130252380741e-09, + "loss": 0.4822, + "step": 14198 + }, + { + "epoch": 0.9859047354534093, + "grad_norm": 3.864044038068947, + "learning_rate": 5.209856489147847e-09, + "loss": 0.4809, + "step": 14199 + }, + { + "epoch": 0.9859741702541314, + "grad_norm": 3.149019775061917, + "learning_rate": 5.158663085321758e-09, + "loss": 0.3169, + "step": 14200 + }, + { + "epoch": 0.9860436050548534, + "grad_norm": 4.4814503851221055, + "learning_rate": 5.107722314918184e-09, + "loss": 0.5874, + "step": 14201 + }, + { + "epoch": 0.9861130398555756, + "grad_norm": 3.0312936100976198, + "learning_rate": 5.057034180514509e-09, + "loss": 0.3351, + "step": 14202 + }, + { + "epoch": 0.9861824746562977, + "grad_norm": 2.906331784651308, + "learning_rate": 5.00659868467368e-09, + "loss": 0.3824, + "step": 14203 + }, + { + "epoch": 0.9862519094570199, + "grad_norm": 3.7906194678540004, + "learning_rate": 4.956415829946437e-09, + "loss": 0.312, + "step": 14204 + }, + { + "epoch": 0.986321344257742, + "grad_norm": 4.8510019580673704, + "learning_rate": 4.906485618871859e-09, + "loss": 0.6608, + "step": 14205 + }, + { + "epoch": 0.9863907790584641, + "grad_norm": 3.7813065156221333, + "learning_rate": 4.856808053974593e-09, + "loss": 0.3299, + "step": 14206 + }, + { + "epoch": 0.9864602138591863, + "grad_norm": 4.133393283046333, + "learning_rate": 4.807383137767629e-09, + "loss": 0.3333, + "step": 14207 + }, + { + "epoch": 0.9865296486599083, + "grad_norm": 4.143413824971348, + "learning_rate": 4.75821087275119e-09, + "loss": 0.3314, + "step": 14208 + }, + { + "epoch": 0.9865990834606305, + "grad_norm": 3.924968519938344, + "learning_rate": 4.7092912614116195e-09, + "loss": 0.3509, + "step": 14209 + }, + { + "epoch": 0.9866685182613526, + "grad_norm": 4.458364063164606, + "learning_rate": 4.660624306223605e-09, + "loss": 0.3226, + "step": 14210 + }, + { + "epoch": 0.9867379530620747, + "grad_norm": 3.1188221630282777, + "learning_rate": 4.61221000964851e-09, + "loss": 0.335, + "step": 14211 + }, + { + "epoch": 0.9868073878627969, + "grad_norm": 2.6807442275385607, + "learning_rate": 4.5640483741349325e-09, + "loss": 0.2218, + "step": 14212 + }, + { + "epoch": 0.986876822663519, + "grad_norm": 4.610058498184213, + "learning_rate": 4.516139402119813e-09, + "loss": 0.6192, + "step": 14213 + }, + { + "epoch": 0.986946257464241, + "grad_norm": 4.009593942032591, + "learning_rate": 4.4684830960251e-09, + "loss": 0.5193, + "step": 14214 + }, + { + "epoch": 0.9870156922649632, + "grad_norm": 3.680663467870674, + "learning_rate": 4.421079458261646e-09, + "loss": 0.3762, + "step": 14215 + }, + { + "epoch": 0.9870851270656853, + "grad_norm": 3.1512938151262007, + "learning_rate": 4.373928491226975e-09, + "loss": 0.235, + "step": 14216 + }, + { + "epoch": 0.9871545618664075, + "grad_norm": 3.2395615532317663, + "learning_rate": 4.327030197306403e-09, + "loss": 0.3058, + "step": 14217 + }, + { + "epoch": 0.9872239966671296, + "grad_norm": 4.980506581599429, + "learning_rate": 4.2803845788713664e-09, + "loss": 0.4203, + "step": 14218 + }, + { + "epoch": 0.9872934314678516, + "grad_norm": 3.744771345696337, + "learning_rate": 4.233991638281642e-09, + "loss": 0.4522, + "step": 14219 + }, + { + "epoch": 0.9873628662685738, + "grad_norm": 4.119476329631578, + "learning_rate": 4.187851377883689e-09, + "loss": 0.4914, + "step": 14220 + }, + { + "epoch": 0.9874323010692959, + "grad_norm": 3.317723696709438, + "learning_rate": 4.14196380001064e-09, + "loss": 0.2931, + "step": 14221 + }, + { + "epoch": 0.9875017358700181, + "grad_norm": 5.218260272938277, + "learning_rate": 4.096328906984526e-09, + "loss": 0.585, + "step": 14222 + }, + { + "epoch": 0.9875711706707402, + "grad_norm": 3.230147301402645, + "learning_rate": 4.0509467011123906e-09, + "loss": 0.3427, + "step": 14223 + }, + { + "epoch": 0.9876406054714623, + "grad_norm": 3.109538729330601, + "learning_rate": 4.005817184690175e-09, + "loss": 0.3414, + "step": 14224 + }, + { + "epoch": 0.9877100402721845, + "grad_norm": 3.7601146698927734, + "learning_rate": 3.960940359999943e-09, + "loss": 0.3362, + "step": 14225 + }, + { + "epoch": 0.9877794750729065, + "grad_norm": 4.439727874924048, + "learning_rate": 3.9163162293126554e-09, + "loss": 0.2895, + "step": 14226 + }, + { + "epoch": 0.9878489098736286, + "grad_norm": 2.508634621559771, + "learning_rate": 3.871944794884286e-09, + "loss": 0.1936, + "step": 14227 + }, + { + "epoch": 0.9879183446743508, + "grad_norm": 4.20587110479154, + "learning_rate": 3.82782605895915e-09, + "loss": 0.2941, + "step": 14228 + }, + { + "epoch": 0.9879877794750729, + "grad_norm": 4.122638343125635, + "learning_rate": 3.783960023769906e-09, + "loss": 0.538, + "step": 14229 + }, + { + "epoch": 0.9880572142757951, + "grad_norm": 4.213129670831205, + "learning_rate": 3.7403466915336695e-09, + "loss": 0.4974, + "step": 14230 + }, + { + "epoch": 0.9881266490765171, + "grad_norm": 3.754216109402717, + "learning_rate": 3.6969860644570087e-09, + "loss": 0.3353, + "step": 14231 + }, + { + "epoch": 0.9881960838772392, + "grad_norm": 4.953921450927265, + "learning_rate": 3.6538781447331694e-09, + "loss": 0.5899, + "step": 14232 + }, + { + "epoch": 0.9882655186779614, + "grad_norm": 3.4467166992592237, + "learning_rate": 3.6110229345426297e-09, + "loss": 0.3421, + "step": 14233 + }, + { + "epoch": 0.9883349534786835, + "grad_norm": 3.506998569487072, + "learning_rate": 3.5684204360525443e-09, + "loss": 0.4057, + "step": 14234 + }, + { + "epoch": 0.9884043882794057, + "grad_norm": 3.0907568675238353, + "learning_rate": 3.526070651417857e-09, + "loss": 0.2455, + "step": 14235 + }, + { + "epoch": 0.9884738230801278, + "grad_norm": 3.362668306929536, + "learning_rate": 3.483973582780742e-09, + "loss": 0.2925, + "step": 14236 + }, + { + "epoch": 0.9885432578808498, + "grad_norm": 3.4578164349244984, + "learning_rate": 3.442129232270608e-09, + "loss": 0.2931, + "step": 14237 + }, + { + "epoch": 0.988612692681572, + "grad_norm": 3.85679652992569, + "learning_rate": 3.4005376020029848e-09, + "loss": 0.4281, + "step": 14238 + }, + { + "epoch": 0.9886821274822941, + "grad_norm": 3.0586036118853728, + "learning_rate": 3.3591986940828546e-09, + "loss": 0.2463, + "step": 14239 + }, + { + "epoch": 0.9887515622830162, + "grad_norm": 3.803732510223765, + "learning_rate": 3.318112510599658e-09, + "loss": 0.2636, + "step": 14240 + }, + { + "epoch": 0.9888209970837384, + "grad_norm": 2.8853049483691926, + "learning_rate": 3.2772790536328426e-09, + "loss": 0.1564, + "step": 14241 + }, + { + "epoch": 0.9888904318844605, + "grad_norm": 3.7314160271238386, + "learning_rate": 3.2366983252463126e-09, + "loss": 0.3955, + "step": 14242 + }, + { + "epoch": 0.9889598666851827, + "grad_norm": 4.545927575892611, + "learning_rate": 3.1963703274939805e-09, + "loss": 0.3805, + "step": 14243 + }, + { + "epoch": 0.9890293014859047, + "grad_norm": 4.248724984063371, + "learning_rate": 3.1562950624147713e-09, + "loss": 0.5861, + "step": 14244 + }, + { + "epoch": 0.9890987362866268, + "grad_norm": 4.253931839935822, + "learning_rate": 3.1164725320359525e-09, + "loss": 0.5171, + "step": 14245 + }, + { + "epoch": 0.989168171087349, + "grad_norm": 5.316945959766639, + "learning_rate": 3.0769027383714677e-09, + "loss": 0.4917, + "step": 14246 + }, + { + "epoch": 0.9892376058880711, + "grad_norm": 4.058011368721931, + "learning_rate": 3.03758568342305e-09, + "loss": 0.5262, + "step": 14247 + }, + { + "epoch": 0.9893070406887933, + "grad_norm": 3.4919473846590545, + "learning_rate": 2.9985213691791082e-09, + "loss": 0.2666, + "step": 14248 + }, + { + "epoch": 0.9893764754895154, + "grad_norm": 3.318359125052282, + "learning_rate": 2.9597097976152845e-09, + "loss": 0.3349, + "step": 14249 + }, + { + "epoch": 0.9894459102902374, + "grad_norm": 3.1524749485761334, + "learning_rate": 2.921150970694453e-09, + "loss": 0.2583, + "step": 14250 + }, + { + "epoch": 0.9895153450909596, + "grad_norm": 5.137345018754172, + "learning_rate": 2.8828448903678307e-09, + "loss": 0.4136, + "step": 14251 + }, + { + "epoch": 0.9895847798916817, + "grad_norm": 4.78156723056081, + "learning_rate": 2.844791558572202e-09, + "loss": 0.4089, + "step": 14252 + }, + { + "epoch": 0.9896542146924038, + "grad_norm": 3.8922416130471724, + "learning_rate": 2.806990977232138e-09, + "loss": 0.4361, + "step": 14253 + }, + { + "epoch": 0.989723649493126, + "grad_norm": 5.229318373526022, + "learning_rate": 2.769443148259443e-09, + "loss": 0.5801, + "step": 14254 + }, + { + "epoch": 0.989793084293848, + "grad_norm": 2.4235266948588223, + "learning_rate": 2.7321480735542638e-09, + "loss": 0.1813, + "step": 14255 + }, + { + "epoch": 0.9898625190945702, + "grad_norm": 2.8685022915012044, + "learning_rate": 2.695105755001759e-09, + "loss": 0.2734, + "step": 14256 + }, + { + "epoch": 0.9899319538952923, + "grad_norm": 3.2518068959560114, + "learning_rate": 2.658316194475985e-09, + "loss": 0.3173, + "step": 14257 + }, + { + "epoch": 0.9900013886960144, + "grad_norm": 3.1318949610475864, + "learning_rate": 2.621779393838231e-09, + "loss": 0.294, + "step": 14258 + }, + { + "epoch": 0.9900708234967366, + "grad_norm": 3.538063291172598, + "learning_rate": 2.5854953549353524e-09, + "loss": 0.3819, + "step": 14259 + }, + { + "epoch": 0.9901402582974587, + "grad_norm": 3.3415004386966904, + "learning_rate": 2.5494640796036585e-09, + "loss": 0.3756, + "step": 14260 + }, + { + "epoch": 0.9902096930981809, + "grad_norm": 3.5622495862132495, + "learning_rate": 2.51368556966447e-09, + "loss": 0.3242, + "step": 14261 + }, + { + "epoch": 0.9902791278989029, + "grad_norm": 3.7482023050587836, + "learning_rate": 2.4781598269285613e-09, + "loss": 0.4508, + "step": 14262 + }, + { + "epoch": 0.990348562699625, + "grad_norm": 3.9950671345953728, + "learning_rate": 2.4428868531922722e-09, + "loss": 0.321, + "step": 14263 + }, + { + "epoch": 0.9904179975003472, + "grad_norm": 3.2183729135146186, + "learning_rate": 2.407866650239177e-09, + "loss": 0.3491, + "step": 14264 + }, + { + "epoch": 0.9904874323010693, + "grad_norm": 1.6428671812482867, + "learning_rate": 2.3730992198411907e-09, + "loss": 0.1066, + "step": 14265 + }, + { + "epoch": 0.9905568671017915, + "grad_norm": 4.01500593309831, + "learning_rate": 2.338584563756907e-09, + "loss": 0.4169, + "step": 14266 + }, + { + "epoch": 0.9906263019025136, + "grad_norm": 4.395543049611679, + "learning_rate": 2.3043226837315967e-09, + "loss": 0.5331, + "step": 14267 + }, + { + "epoch": 0.9906957367032356, + "grad_norm": 4.7131437188914855, + "learning_rate": 2.2703135814983178e-09, + "loss": 0.6364, + "step": 14268 + }, + { + "epoch": 0.9907651715039578, + "grad_norm": 4.4089483284487025, + "learning_rate": 2.236557258777361e-09, + "loss": 0.5121, + "step": 14269 + }, + { + "epoch": 0.9908346063046799, + "grad_norm": 2.8187029126115704, + "learning_rate": 2.203053717276249e-09, + "loss": 0.1886, + "step": 14270 + }, + { + "epoch": 0.990904041105402, + "grad_norm": 3.68986898732914, + "learning_rate": 2.169802958688627e-09, + "loss": 0.2882, + "step": 14271 + }, + { + "epoch": 0.9909734759061242, + "grad_norm": 3.5070658445179195, + "learning_rate": 2.1368049846975936e-09, + "loss": 0.3263, + "step": 14272 + }, + { + "epoch": 0.9910429107068462, + "grad_norm": 3.4575929077687424, + "learning_rate": 2.1040597969712586e-09, + "loss": 0.1993, + "step": 14273 + }, + { + "epoch": 0.9911123455075684, + "grad_norm": 4.921026638718083, + "learning_rate": 2.0715673971660744e-09, + "loss": 0.7173, + "step": 14274 + }, + { + "epoch": 0.9911817803082905, + "grad_norm": 5.689504801078911, + "learning_rate": 2.0393277869251717e-09, + "loss": 0.59, + "step": 14275 + }, + { + "epoch": 0.9912512151090126, + "grad_norm": 3.233557456374596, + "learning_rate": 2.0073409678800227e-09, + "loss": 0.2097, + "step": 14276 + }, + { + "epoch": 0.9913206499097348, + "grad_norm": 4.570685483882463, + "learning_rate": 1.9756069416476677e-09, + "loss": 0.5052, + "step": 14277 + }, + { + "epoch": 0.9913900847104569, + "grad_norm": 3.970399761257439, + "learning_rate": 1.9441257098334886e-09, + "loss": 0.3548, + "step": 14278 + }, + { + "epoch": 0.991459519511179, + "grad_norm": 4.3469033650536195, + "learning_rate": 1.9128972740295458e-09, + "loss": 0.4343, + "step": 14279 + }, + { + "epoch": 0.9915289543119011, + "grad_norm": 3.274271664665922, + "learning_rate": 1.8819216358156865e-09, + "loss": 0.4066, + "step": 14280 + }, + { + "epoch": 0.9915983891126232, + "grad_norm": 3.621070949269692, + "learning_rate": 1.8511987967589906e-09, + "loss": 0.3817, + "step": 14281 + }, + { + "epoch": 0.9916678239133454, + "grad_norm": 3.8249458126028766, + "learning_rate": 1.8207287584121046e-09, + "loss": 0.3736, + "step": 14282 + }, + { + "epoch": 0.9917372587140675, + "grad_norm": 4.146319164937174, + "learning_rate": 1.7905115223171287e-09, + "loss": 0.6416, + "step": 14283 + }, + { + "epoch": 0.9918066935147896, + "grad_norm": 3.9439980721220382, + "learning_rate": 1.7605470900028399e-09, + "loss": 0.459, + "step": 14284 + }, + { + "epoch": 0.9918761283155118, + "grad_norm": 4.110899005641816, + "learning_rate": 1.7308354629835822e-09, + "loss": 0.3902, + "step": 14285 + }, + { + "epoch": 0.9919455631162338, + "grad_norm": 3.81131684874795, + "learning_rate": 1.7013766427625976e-09, + "loss": 0.4883, + "step": 14286 + }, + { + "epoch": 0.992014997916956, + "grad_norm": 4.327292220682511, + "learning_rate": 1.6721706308303609e-09, + "loss": 0.3972, + "step": 14287 + }, + { + "epoch": 0.9920844327176781, + "grad_norm": 4.619631209062738, + "learning_rate": 1.6432174286634684e-09, + "loss": 0.4488, + "step": 14288 + }, + { + "epoch": 0.9921538675184002, + "grad_norm": 3.716669698779563, + "learning_rate": 1.6145170377268593e-09, + "loss": 0.1797, + "step": 14289 + }, + { + "epoch": 0.9922233023191224, + "grad_norm": 2.8123973836239937, + "learning_rate": 1.5860694594721504e-09, + "loss": 0.2102, + "step": 14290 + }, + { + "epoch": 0.9922927371198444, + "grad_norm": 2.4821299991790053, + "learning_rate": 1.5578746953376355e-09, + "loss": 0.1956, + "step": 14291 + }, + { + "epoch": 0.9923621719205666, + "grad_norm": 2.9432849741502443, + "learning_rate": 1.5299327467499515e-09, + "loss": 0.1909, + "step": 14292 + }, + { + "epoch": 0.9924316067212887, + "grad_norm": 3.3279888053593556, + "learning_rate": 1.5022436151218566e-09, + "loss": 0.3511, + "step": 14293 + }, + { + "epoch": 0.9925010415220108, + "grad_norm": 4.1882467731649555, + "learning_rate": 1.4748073018538978e-09, + "loss": 0.4041, + "step": 14294 + }, + { + "epoch": 0.992570476322733, + "grad_norm": 4.999210832809188, + "learning_rate": 1.4476238083344086e-09, + "loss": 0.5945, + "step": 14295 + }, + { + "epoch": 0.9926399111234551, + "grad_norm": 3.676634550792359, + "learning_rate": 1.4206931359378452e-09, + "loss": 0.341, + "step": 14296 + }, + { + "epoch": 0.9927093459241771, + "grad_norm": 5.204906464761587, + "learning_rate": 1.394015286025896e-09, + "loss": 0.6376, + "step": 14297 + }, + { + "epoch": 0.9927787807248993, + "grad_norm": 2.9809916611844356, + "learning_rate": 1.3675902599485923e-09, + "loss": 0.2389, + "step": 14298 + }, + { + "epoch": 0.9928482155256214, + "grad_norm": 4.788205271186599, + "learning_rate": 1.3414180590426428e-09, + "loss": 0.4541, + "step": 14299 + }, + { + "epoch": 0.9929176503263436, + "grad_norm": 4.396971422959542, + "learning_rate": 1.3154986846308782e-09, + "loss": 0.4539, + "step": 14300 + }, + { + "epoch": 0.9929870851270657, + "grad_norm": 3.4993828652186894, + "learning_rate": 1.2898321380250268e-09, + "loss": 0.2373, + "step": 14301 + }, + { + "epoch": 0.9930565199277878, + "grad_norm": 4.241135310146371, + "learning_rate": 1.2644184205229393e-09, + "loss": 0.3309, + "step": 14302 + }, + { + "epoch": 0.99312595472851, + "grad_norm": 4.163670563864669, + "learning_rate": 1.2392575334102542e-09, + "loss": 0.3438, + "step": 14303 + }, + { + "epoch": 0.993195389529232, + "grad_norm": 4.928342356441496, + "learning_rate": 1.214349477959287e-09, + "loss": 0.4909, + "step": 14304 + }, + { + "epoch": 0.9932648243299542, + "grad_norm": 3.6313620635274764, + "learning_rate": 1.1896942554301404e-09, + "loss": 0.477, + "step": 14305 + }, + { + "epoch": 0.9933342591306763, + "grad_norm": 3.471134556966968, + "learning_rate": 1.1652918670695957e-09, + "loss": 0.3222, + "step": 14306 + }, + { + "epoch": 0.9934036939313984, + "grad_norm": 3.5291741071732075, + "learning_rate": 1.141142314111665e-09, + "loss": 0.4845, + "step": 14307 + }, + { + "epoch": 0.9934731287321206, + "grad_norm": 3.765400422710313, + "learning_rate": 1.1172455977787044e-09, + "loss": 0.4228, + "step": 14308 + }, + { + "epoch": 0.9935425635328426, + "grad_norm": 3.804016674577753, + "learning_rate": 1.0936017192786363e-09, + "loss": 0.5, + "step": 14309 + }, + { + "epoch": 0.9936119983335647, + "grad_norm": 3.1696330186674877, + "learning_rate": 1.0702106798077261e-09, + "loss": 0.2484, + "step": 14310 + }, + { + "epoch": 0.9936814331342869, + "grad_norm": 3.057451940789772, + "learning_rate": 1.0470724805489163e-09, + "loss": 0.3217, + "step": 14311 + }, + { + "epoch": 0.993750867935009, + "grad_norm": 4.276196679236634, + "learning_rate": 1.024187122672382e-09, + "loss": 0.4818, + "step": 14312 + }, + { + "epoch": 0.9938203027357312, + "grad_norm": 3.498487589320478, + "learning_rate": 1.0015546073355309e-09, + "loss": 0.2358, + "step": 14313 + }, + { + "epoch": 0.9938897375364533, + "grad_norm": 3.803493460886441, + "learning_rate": 9.791749356835579e-10, + "loss": 0.3194, + "step": 14314 + }, + { + "epoch": 0.9939591723371753, + "grad_norm": 3.811656074957991, + "learning_rate": 9.570481088483352e-10, + "loss": 0.269, + "step": 14315 + }, + { + "epoch": 0.9940286071378975, + "grad_norm": 3.935123208256896, + "learning_rate": 9.351741279484128e-10, + "loss": 0.3469, + "step": 14316 + }, + { + "epoch": 0.9940980419386196, + "grad_norm": 2.70180021577124, + "learning_rate": 9.135529940906829e-10, + "loss": 0.1904, + "step": 14317 + }, + { + "epoch": 0.9941674767393418, + "grad_norm": 3.241460830434318, + "learning_rate": 8.921847083687152e-10, + "loss": 0.2558, + "step": 14318 + }, + { + "epoch": 0.9942369115400639, + "grad_norm": 3.612127990033217, + "learning_rate": 8.710692718627567e-10, + "loss": 0.4021, + "step": 14319 + }, + { + "epoch": 0.994306346340786, + "grad_norm": 5.32856884521121, + "learning_rate": 8.502066856413971e-10, + "loss": 0.2568, + "step": 14320 + }, + { + "epoch": 0.9943757811415082, + "grad_norm": 4.136154519001808, + "learning_rate": 8.295969507599033e-10, + "loss": 0.4283, + "step": 14321 + }, + { + "epoch": 0.9944452159422302, + "grad_norm": 6.247594261274532, + "learning_rate": 8.092400682602198e-10, + "loss": 0.8369, + "step": 14322 + }, + { + "epoch": 0.9945146507429524, + "grad_norm": 6.899591566016916, + "learning_rate": 7.891360391726332e-10, + "loss": 0.5912, + "step": 14323 + }, + { + "epoch": 0.9945840855436745, + "grad_norm": 3.6852629169544016, + "learning_rate": 7.692848645129979e-10, + "loss": 0.2857, + "step": 14324 + }, + { + "epoch": 0.9946535203443966, + "grad_norm": 4.620357253190392, + "learning_rate": 7.496865452866209e-10, + "loss": 0.3683, + "step": 14325 + }, + { + "epoch": 0.9947229551451188, + "grad_norm": 4.259646133206421, + "learning_rate": 7.303410824832657e-10, + "loss": 0.356, + "step": 14326 + }, + { + "epoch": 0.9947923899458408, + "grad_norm": 3.3375291263487274, + "learning_rate": 7.112484770827044e-10, + "loss": 0.3905, + "step": 14327 + }, + { + "epoch": 0.9948618247465629, + "grad_norm": 2.3152696550521927, + "learning_rate": 6.924087300497206e-10, + "loss": 0.1936, + "step": 14328 + }, + { + "epoch": 0.9949312595472851, + "grad_norm": 4.3917848806331925, + "learning_rate": 6.738218423379961e-10, + "loss": 0.4615, + "step": 14329 + }, + { + "epoch": 0.9950006943480072, + "grad_norm": 1.8163046293100398, + "learning_rate": 6.554878148873344e-10, + "loss": 0.1138, + "step": 14330 + }, + { + "epoch": 0.9950701291487294, + "grad_norm": 3.399325711960607, + "learning_rate": 6.37406648624217e-10, + "loss": 0.3499, + "step": 14331 + }, + { + "epoch": 0.9951395639494515, + "grad_norm": 4.652243487534354, + "learning_rate": 6.195783444645776e-10, + "loss": 0.496, + "step": 14332 + }, + { + "epoch": 0.9952089987501735, + "grad_norm": 3.7719703148456194, + "learning_rate": 6.020029033093622e-10, + "loss": 0.3249, + "step": 14333 + }, + { + "epoch": 0.9952784335508957, + "grad_norm": 3.41673245698817, + "learning_rate": 5.846803260478595e-10, + "loss": 0.2825, + "step": 14334 + }, + { + "epoch": 0.9953478683516178, + "grad_norm": 4.828105014118911, + "learning_rate": 5.676106135554805e-10, + "loss": 0.5024, + "step": 14335 + }, + { + "epoch": 0.99541730315234, + "grad_norm": 4.223585567026278, + "learning_rate": 5.507937666965335e-10, + "loss": 0.5431, + "step": 14336 + }, + { + "epoch": 0.9954867379530621, + "grad_norm": 4.876472785475875, + "learning_rate": 5.342297863208945e-10, + "loss": 0.3773, + "step": 14337 + }, + { + "epoch": 0.9955561727537842, + "grad_norm": 4.052987243835515, + "learning_rate": 5.179186732667818e-10, + "loss": 0.5475, + "step": 14338 + }, + { + "epoch": 0.9956256075545064, + "grad_norm": 4.292254185584279, + "learning_rate": 5.018604283590911e-10, + "loss": 0.4824, + "step": 14339 + }, + { + "epoch": 0.9956950423552284, + "grad_norm": 4.452921171595044, + "learning_rate": 4.860550524099505e-10, + "loss": 0.5335, + "step": 14340 + }, + { + "epoch": 0.9957644771559505, + "grad_norm": 4.378897176409008, + "learning_rate": 4.705025462187207e-10, + "loss": 0.5605, + "step": 14341 + }, + { + "epoch": 0.9958339119566727, + "grad_norm": 4.4018985299093, + "learning_rate": 4.552029105719946e-10, + "loss": 0.5458, + "step": 14342 + }, + { + "epoch": 0.9959033467573948, + "grad_norm": 4.274582997510982, + "learning_rate": 4.401561462441528e-10, + "loss": 0.5652, + "step": 14343 + }, + { + "epoch": 0.995972781558117, + "grad_norm": 3.526885518426328, + "learning_rate": 4.25362253995143e-10, + "loss": 0.1884, + "step": 14344 + }, + { + "epoch": 0.996042216358839, + "grad_norm": 3.064406321902503, + "learning_rate": 4.1082123457436564e-10, + "loss": 0.2163, + "step": 14345 + }, + { + "epoch": 0.9961116511595611, + "grad_norm": 3.439231271991117, + "learning_rate": 3.965330887167884e-10, + "loss": 0.3964, + "step": 14346 + }, + { + "epoch": 0.9961810859602833, + "grad_norm": 3.9051098654732215, + "learning_rate": 3.8249781714572167e-10, + "loss": 0.5116, + "step": 14347 + }, + { + "epoch": 0.9962505207610054, + "grad_norm": 4.59841724202004, + "learning_rate": 3.687154205694876e-10, + "loss": 0.5343, + "step": 14348 + }, + { + "epoch": 0.9963199555617276, + "grad_norm": 3.7779883918339126, + "learning_rate": 3.551858996869717e-10, + "loss": 0.4635, + "step": 14349 + }, + { + "epoch": 0.9963893903624497, + "grad_norm": 4.713106235984339, + "learning_rate": 3.4190925518096106e-10, + "loss": 0.546, + "step": 14350 + }, + { + "epoch": 0.9964588251631717, + "grad_norm": 4.271927391880247, + "learning_rate": 3.288854877242509e-10, + "loss": 0.4137, + "step": 14351 + }, + { + "epoch": 0.9965282599638939, + "grad_norm": 4.631151367705543, + "learning_rate": 3.161145979746483e-10, + "loss": 0.5257, + "step": 14352 + }, + { + "epoch": 0.996597694764616, + "grad_norm": 3.8280930635546975, + "learning_rate": 3.0359658657830306e-10, + "loss": 0.4239, + "step": 14353 + }, + { + "epoch": 0.9966671295653381, + "grad_norm": 3.6349747874549223, + "learning_rate": 2.9133145416915253e-10, + "loss": 0.374, + "step": 14354 + }, + { + "epoch": 0.9967365643660603, + "grad_norm": 3.7011679805033055, + "learning_rate": 2.793192013661461e-10, + "loss": 0.3401, + "step": 14355 + }, + { + "epoch": 0.9968059991667824, + "grad_norm": 4.464926053303731, + "learning_rate": 2.67559828778241e-10, + "loss": 0.6501, + "step": 14356 + }, + { + "epoch": 0.9968754339675046, + "grad_norm": 4.7481025144504665, + "learning_rate": 2.560533369994067e-10, + "loss": 0.3476, + "step": 14357 + }, + { + "epoch": 0.9969448687682266, + "grad_norm": 4.131734328997915, + "learning_rate": 2.4479972661139993e-10, + "loss": 0.4138, + "step": 14358 + }, + { + "epoch": 0.9970143035689487, + "grad_norm": 4.881259808307799, + "learning_rate": 2.3379899818432025e-10, + "loss": 0.5463, + "step": 14359 + }, + { + "epoch": 0.9970837383696709, + "grad_norm": 6.08629152175895, + "learning_rate": 2.2305115227383433e-10, + "loss": 0.285, + "step": 14360 + }, + { + "epoch": 0.997153173170393, + "grad_norm": 3.9147647844031073, + "learning_rate": 2.125561894239514e-10, + "loss": 0.4627, + "step": 14361 + }, + { + "epoch": 0.9972226079711152, + "grad_norm": 4.296743034553536, + "learning_rate": 2.0231411016480295e-10, + "loss": 0.4637, + "step": 14362 + }, + { + "epoch": 0.9972920427718373, + "grad_norm": 4.869943486579398, + "learning_rate": 1.9232491501541827e-10, + "loss": 0.498, + "step": 14363 + }, + { + "epoch": 0.9973614775725593, + "grad_norm": 2.913578423169678, + "learning_rate": 1.8258860448094885e-10, + "loss": 0.2188, + "step": 14364 + }, + { + "epoch": 0.9974309123732815, + "grad_norm": 4.394009231080312, + "learning_rate": 1.7310517905266832e-10, + "loss": 0.3591, + "step": 14365 + }, + { + "epoch": 0.9975003471740036, + "grad_norm": 4.473613556361954, + "learning_rate": 1.638746392113033e-10, + "loss": 0.351, + "step": 14366 + }, + { + "epoch": 0.9975697819747257, + "grad_norm": 4.527002859289881, + "learning_rate": 1.5489698542370258e-10, + "loss": 0.298, + "step": 14367 + }, + { + "epoch": 0.9976392167754479, + "grad_norm": 4.931518392819714, + "learning_rate": 1.4617221814339222e-10, + "loss": 0.3902, + "step": 14368 + }, + { + "epoch": 0.99770865157617, + "grad_norm": 3.295643777382922, + "learning_rate": 1.3770033781224101e-10, + "loss": 0.2652, + "step": 14369 + }, + { + "epoch": 0.9977780863768921, + "grad_norm": 4.336957428345614, + "learning_rate": 1.2948134485879504e-10, + "loss": 0.4922, + "step": 14370 + }, + { + "epoch": 0.9978475211776142, + "grad_norm": 3.800642716750994, + "learning_rate": 1.2151523969772262e-10, + "loss": 0.4345, + "step": 14371 + }, + { + "epoch": 0.9979169559783363, + "grad_norm": 5.637153835001134, + "learning_rate": 1.1380202273314489e-10, + "loss": 0.4615, + "step": 14372 + }, + { + "epoch": 0.9979863907790585, + "grad_norm": 4.372711692886307, + "learning_rate": 1.0634169435419505e-10, + "loss": 0.3265, + "step": 14373 + }, + { + "epoch": 0.9980558255797806, + "grad_norm": 4.03590999986349, + "learning_rate": 9.913425493945916e-11, + "loss": 0.2412, + "step": 14374 + }, + { + "epoch": 0.9981252603805028, + "grad_norm": 3.0456800452687065, + "learning_rate": 9.217970485198013e-11, + "loss": 0.3345, + "step": 14375 + }, + { + "epoch": 0.9981946951812248, + "grad_norm": 4.965162411580839, + "learning_rate": 8.54780444448089e-11, + "loss": 0.5296, + "step": 14376 + }, + { + "epoch": 0.9982641299819469, + "grad_norm": 4.744323212513148, + "learning_rate": 7.902927405600835e-11, + "loss": 0.5469, + "step": 14377 + }, + { + "epoch": 0.9983335647826691, + "grad_norm": 4.124086971903773, + "learning_rate": 7.28333940125392e-11, + "loss": 0.3947, + "step": 14378 + }, + { + "epoch": 0.9984029995833912, + "grad_norm": 3.4519323720795585, + "learning_rate": 6.68904046269292e-11, + "loss": 0.2258, + "step": 14379 + }, + { + "epoch": 0.9984724343841134, + "grad_norm": 5.76855628145136, + "learning_rate": 6.120030620060391e-11, + "loss": 0.6673, + "step": 14380 + }, + { + "epoch": 0.9985418691848355, + "grad_norm": 3.891626573084236, + "learning_rate": 5.576309902055599e-11, + "loss": 0.4636, + "step": 14381 + }, + { + "epoch": 0.9986113039855575, + "grad_norm": 3.159311806430729, + "learning_rate": 5.0578783362675856e-11, + "loss": 0.3372, + "step": 14382 + }, + { + "epoch": 0.9986807387862797, + "grad_norm": 4.208190358657429, + "learning_rate": 4.5647359488421025e-11, + "loss": 0.473, + "step": 14383 + }, + { + "epoch": 0.9987501735870018, + "grad_norm": 3.1290034236584243, + "learning_rate": 4.096882764759169e-11, + "loss": 0.2771, + "step": 14384 + }, + { + "epoch": 0.9988196083877239, + "grad_norm": 3.589393401645181, + "learning_rate": 3.6543188076110235e-11, + "loss": 0.2638, + "step": 14385 + }, + { + "epoch": 0.9988890431884461, + "grad_norm": 4.011885412095503, + "learning_rate": 3.237044099935194e-11, + "loss": 0.2933, + "step": 14386 + }, + { + "epoch": 0.9989584779891681, + "grad_norm": 2.367969525572387, + "learning_rate": 2.8450586626593836e-11, + "loss": 0.2107, + "step": 14387 + }, + { + "epoch": 0.9990279127898903, + "grad_norm": 3.986704134123939, + "learning_rate": 2.4783625157676072e-11, + "loss": 0.3447, + "step": 14388 + }, + { + "epoch": 0.9990973475906124, + "grad_norm": 3.0388981460566695, + "learning_rate": 2.136955677689567e-11, + "loss": 0.1861, + "step": 14389 + }, + { + "epoch": 0.9991667823913345, + "grad_norm": 3.0442833650455903, + "learning_rate": 1.8208381657447426e-11, + "loss": 0.3338, + "step": 14390 + }, + { + "epoch": 0.9992362171920567, + "grad_norm": 4.509848356790682, + "learning_rate": 1.5300099959203453e-11, + "loss": 0.4787, + "step": 14391 + }, + { + "epoch": 0.9993056519927788, + "grad_norm": 4.103838688282994, + "learning_rate": 1.2644711829268297e-11, + "loss": 0.31, + "step": 14392 + }, + { + "epoch": 0.999375086793501, + "grad_norm": 3.262261419453084, + "learning_rate": 1.024221740197895e-11, + "loss": 0.2631, + "step": 14393 + }, + { + "epoch": 0.999444521594223, + "grad_norm": 2.661518672983248, + "learning_rate": 8.092616798904829e-12, + "loss": 0.1516, + "step": 14394 + }, + { + "epoch": 0.9995139563949451, + "grad_norm": 3.9376351127357148, + "learning_rate": 6.195910128292682e-12, + "loss": 0.4019, + "step": 14395 + }, + { + "epoch": 0.9995833911956673, + "grad_norm": 4.374059515767892, + "learning_rate": 4.5520974867319104e-12, + "loss": 0.5041, + "step": 14396 + }, + { + "epoch": 0.9996528259963894, + "grad_norm": 2.606162531344671, + "learning_rate": 3.16117895693413e-12, + "loss": 0.3171, + "step": 14397 + }, + { + "epoch": 0.9997222607971115, + "grad_norm": 3.9646854163658993, + "learning_rate": 2.0231546088433917e-12, + "loss": 0.5151, + "step": 14398 + }, + { + "epoch": 0.9997916955978337, + "grad_norm": 5.148206642292722, + "learning_rate": 1.1380245013015156e-12, + "loss": 0.3505, + "step": 14399 + }, + { + "epoch": 0.9998611303985557, + "grad_norm": 6.692968332845101, + "learning_rate": 5.057886776071996e-13, + "loss": 0.3662, + "step": 14400 + }, + { + "epoch": 0.9999305651992779, + "grad_norm": 3.977562708427656, + "learning_rate": 1.2644717106713445e-13, + "loss": 0.2606, + "step": 14401 + }, + { + "epoch": 1.0, + "grad_norm": 4.821303598316035, + "learning_rate": 0.0, + "loss": 0.4289, + "step": 14402 + }, + { + "epoch": 1.0, + "step": 14402, + "total_flos": 153613769664512.0, + "train_loss": 0.47091111083759557, + "train_runtime": 42172.3476, + "train_samples_per_second": 2.732, + "train_steps_per_second": 0.342 + } + ], + "logging_steps": 1.0, + "max_steps": 14402, + "num_input_tokens_seen": 0, + "num_train_epochs": 1, + "save_steps": 5000, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": true + }, + "attributes": {} + } + }, + "total_flos": 153613769664512.0, + "train_batch_size": 1, + "trial_name": null, + "trial_params": null +}