{ "best_metric": null, "best_model_checkpoint": null, "epoch": 1.0, "eval_steps": 500, "global_step": 9302, "is_hyper_param_search": false, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { "epoch": 0.0, "grad_norm": 5.132667040745654, "learning_rate": 3.571428571428572e-08, "loss": 1.1077, "step": 1 }, { "epoch": 0.0, "grad_norm": 3.5832425816734204, "learning_rate": 7.142857142857144e-08, "loss": 1.0251, "step": 2 }, { "epoch": 0.0, "grad_norm": 5.081331918493752, "learning_rate": 1.0714285714285716e-07, "loss": 1.1024, "step": 3 }, { "epoch": 0.0, "grad_norm": 3.8633328566479195, "learning_rate": 1.4285714285714287e-07, "loss": 1.1216, "step": 4 }, { "epoch": 0.0, "grad_norm": 0.7936032871635522, "learning_rate": 1.7857142857142858e-07, "loss": 1.0761, "step": 5 }, { "epoch": 0.0, "grad_norm": 4.083283544203863, "learning_rate": 2.142857142857143e-07, "loss": 1.1378, "step": 6 }, { "epoch": 0.0, "grad_norm": 4.990553192653829, "learning_rate": 2.5000000000000004e-07, "loss": 1.1512, "step": 7 }, { "epoch": 0.0, "grad_norm": 4.780291733936853, "learning_rate": 2.8571428571428575e-07, "loss": 1.1272, "step": 8 }, { "epoch": 0.0, "grad_norm": 4.798410582076193, "learning_rate": 3.214285714285714e-07, "loss": 1.1643, "step": 9 }, { "epoch": 0.0, "grad_norm": 4.618792809252698, "learning_rate": 3.5714285714285716e-07, "loss": 1.0503, "step": 10 }, { "epoch": 0.0, "grad_norm": 4.489419747018667, "learning_rate": 3.9285714285714286e-07, "loss": 1.1526, "step": 11 }, { "epoch": 0.0, "grad_norm": 3.737918990138968, "learning_rate": 4.285714285714286e-07, "loss": 1.1737, "step": 12 }, { "epoch": 0.0, "grad_norm": 4.9032250615675395, "learning_rate": 4.642857142857143e-07, "loss": 1.0996, "step": 13 }, { "epoch": 0.0, "grad_norm": 0.7614359569379131, "learning_rate": 5.000000000000001e-07, "loss": 1.0415, "step": 14 }, { "epoch": 0.0, "grad_norm": 3.6846012859223602, "learning_rate": 5.357142857142857e-07, "loss": 1.1706, "step": 15 }, { "epoch": 0.0, "grad_norm": 4.363759732901119, "learning_rate": 5.714285714285715e-07, "loss": 1.1177, "step": 16 }, { "epoch": 0.0, "grad_norm": 3.965490335364641, "learning_rate": 6.071428571428572e-07, "loss": 1.1345, "step": 17 }, { "epoch": 0.0, "grad_norm": 4.217408774339712, "learning_rate": 6.428571428571428e-07, "loss": 1.1446, "step": 18 }, { "epoch": 0.0, "grad_norm": 4.288632652626488, "learning_rate": 6.785714285714286e-07, "loss": 1.1303, "step": 19 }, { "epoch": 0.0, "grad_norm": 4.036957671044967, "learning_rate": 7.142857142857143e-07, "loss": 1.1462, "step": 20 }, { "epoch": 0.0, "grad_norm": 4.503082109980841, "learning_rate": 7.5e-07, "loss": 1.1061, "step": 21 }, { "epoch": 0.0, "grad_norm": 4.075934264654508, "learning_rate": 7.857142857142857e-07, "loss": 1.1608, "step": 22 }, { "epoch": 0.0, "grad_norm": 3.8841677561501564, "learning_rate": 8.214285714285715e-07, "loss": 1.1, "step": 23 }, { "epoch": 0.0, "grad_norm": 0.7592592592734663, "learning_rate": 8.571428571428572e-07, "loss": 1.0412, "step": 24 }, { "epoch": 0.0, "grad_norm": 3.2771766359738415, "learning_rate": 8.928571428571429e-07, "loss": 1.1736, "step": 25 }, { "epoch": 0.0, "grad_norm": 3.9519357053838924, "learning_rate": 9.285714285714287e-07, "loss": 1.1072, "step": 26 }, { "epoch": 0.0, "grad_norm": 3.6181542903595982, "learning_rate": 9.642857142857145e-07, "loss": 1.1146, "step": 27 }, { "epoch": 0.0, "grad_norm": 3.1157188676659278, "learning_rate": 1.0000000000000002e-06, "loss": 1.1193, "step": 28 }, { "epoch": 0.0, "grad_norm": 3.85238806546678, "learning_rate": 1.0357142857142859e-06, "loss": 1.1558, "step": 29 }, { "epoch": 0.0, "grad_norm": 0.7945059133569435, "learning_rate": 1.0714285714285714e-06, "loss": 1.0713, "step": 30 }, { "epoch": 0.0, "grad_norm": 3.182934667495162, "learning_rate": 1.1071428571428573e-06, "loss": 1.1067, "step": 31 }, { "epoch": 0.0, "grad_norm": 2.60556636198004, "learning_rate": 1.142857142857143e-06, "loss": 1.0752, "step": 32 }, { "epoch": 0.0, "grad_norm": 2.286711288434339, "learning_rate": 1.1785714285714287e-06, "loss": 1.0178, "step": 33 }, { "epoch": 0.0, "grad_norm": 2.6556224312393986, "learning_rate": 1.2142857142857144e-06, "loss": 1.0089, "step": 34 }, { "epoch": 0.0, "grad_norm": 3.799681523216164, "learning_rate": 1.25e-06, "loss": 1.1192, "step": 35 }, { "epoch": 0.0, "grad_norm": 2.448133385030778, "learning_rate": 1.2857142857142856e-06, "loss": 1.0778, "step": 36 }, { "epoch": 0.0, "grad_norm": 2.1832669597916006, "learning_rate": 1.3214285714285715e-06, "loss": 1.0193, "step": 37 }, { "epoch": 0.0, "grad_norm": 2.3479972378836447, "learning_rate": 1.3571428571428572e-06, "loss": 1.0401, "step": 38 }, { "epoch": 0.0, "grad_norm": 2.1381509060973554, "learning_rate": 1.392857142857143e-06, "loss": 1.0512, "step": 39 }, { "epoch": 0.0, "grad_norm": 0.8779552164019765, "learning_rate": 1.4285714285714286e-06, "loss": 1.063, "step": 40 }, { "epoch": 0.0, "grad_norm": 2.428905843479653, "learning_rate": 1.4642857142857145e-06, "loss": 1.0743, "step": 41 }, { "epoch": 0.0, "grad_norm": 2.124776423877994, "learning_rate": 1.5e-06, "loss": 1.0816, "step": 42 }, { "epoch": 0.0, "grad_norm": 2.0109705235401445, "learning_rate": 1.535714285714286e-06, "loss": 0.9992, "step": 43 }, { "epoch": 0.0, "grad_norm": 2.1162201902784097, "learning_rate": 1.5714285714285714e-06, "loss": 1.0692, "step": 44 }, { "epoch": 0.0, "grad_norm": 2.142106122929943, "learning_rate": 1.6071428571428574e-06, "loss": 1.135, "step": 45 }, { "epoch": 0.0, "grad_norm": 2.065089444816147, "learning_rate": 1.642857142857143e-06, "loss": 1.0484, "step": 46 }, { "epoch": 0.01, "grad_norm": 1.8847204976170282, "learning_rate": 1.6785714285714286e-06, "loss": 1.1077, "step": 47 }, { "epoch": 0.01, "grad_norm": 2.0327528518531324, "learning_rate": 1.7142857142857145e-06, "loss": 1.1642, "step": 48 }, { "epoch": 0.01, "grad_norm": 2.0341463283734225, "learning_rate": 1.75e-06, "loss": 1.1166, "step": 49 }, { "epoch": 0.01, "grad_norm": 1.8338487229571316, "learning_rate": 1.7857142857142859e-06, "loss": 0.9445, "step": 50 }, { "epoch": 0.01, "grad_norm": 1.967377778585595, "learning_rate": 1.8214285714285716e-06, "loss": 1.1091, "step": 51 }, { "epoch": 0.01, "grad_norm": 2.1284920128828375, "learning_rate": 1.8571428571428573e-06, "loss": 1.1126, "step": 52 }, { "epoch": 0.01, "grad_norm": 2.0672831930590867, "learning_rate": 1.892857142857143e-06, "loss": 1.0507, "step": 53 }, { "epoch": 0.01, "grad_norm": 2.0333319879417613, "learning_rate": 1.928571428571429e-06, "loss": 1.0415, "step": 54 }, { "epoch": 0.01, "grad_norm": 1.8786659219203434, "learning_rate": 1.9642857142857144e-06, "loss": 1.0165, "step": 55 }, { "epoch": 0.01, "grad_norm": 1.918407820837012, "learning_rate": 2.0000000000000003e-06, "loss": 1.0638, "step": 56 }, { "epoch": 0.01, "grad_norm": 0.8516765852067223, "learning_rate": 2.035714285714286e-06, "loss": 1.0447, "step": 57 }, { "epoch": 0.01, "grad_norm": 2.4320660831231042, "learning_rate": 2.0714285714285717e-06, "loss": 1.0313, "step": 58 }, { "epoch": 0.01, "grad_norm": 1.8872259842859693, "learning_rate": 2.1071428571428572e-06, "loss": 1.0493, "step": 59 }, { "epoch": 0.01, "grad_norm": 2.2014917685697886, "learning_rate": 2.1428571428571427e-06, "loss": 1.0829, "step": 60 }, { "epoch": 0.01, "grad_norm": 1.7494056578946249, "learning_rate": 2.1785714285714286e-06, "loss": 1.0281, "step": 61 }, { "epoch": 0.01, "grad_norm": 1.8926843897702263, "learning_rate": 2.2142857142857146e-06, "loss": 1.0901, "step": 62 }, { "epoch": 0.01, "grad_norm": 1.9982540726924571, "learning_rate": 2.25e-06, "loss": 1.0307, "step": 63 }, { "epoch": 0.01, "grad_norm": 1.9553156105718255, "learning_rate": 2.285714285714286e-06, "loss": 1.0379, "step": 64 }, { "epoch": 0.01, "grad_norm": 1.7508115743290498, "learning_rate": 2.321428571428572e-06, "loss": 1.0175, "step": 65 }, { "epoch": 0.01, "grad_norm": 1.76567899787695, "learning_rate": 2.3571428571428574e-06, "loss": 0.9992, "step": 66 }, { "epoch": 0.01, "grad_norm": 1.9818949212936914, "learning_rate": 2.3928571428571433e-06, "loss": 1.0146, "step": 67 }, { "epoch": 0.01, "grad_norm": 1.9235911390313425, "learning_rate": 2.428571428571429e-06, "loss": 1.0556, "step": 68 }, { "epoch": 0.01, "grad_norm": 1.9334278051472502, "learning_rate": 2.4642857142857147e-06, "loss": 0.9865, "step": 69 }, { "epoch": 0.01, "grad_norm": 1.9175828101791368, "learning_rate": 2.5e-06, "loss": 0.9776, "step": 70 }, { "epoch": 0.01, "grad_norm": 1.9516094394347074, "learning_rate": 2.5357142857142857e-06, "loss": 0.933, "step": 71 }, { "epoch": 0.01, "grad_norm": 1.802534782906283, "learning_rate": 2.571428571428571e-06, "loss": 1.016, "step": 72 }, { "epoch": 0.01, "grad_norm": 1.9136428941209254, "learning_rate": 2.6071428571428575e-06, "loss": 0.9673, "step": 73 }, { "epoch": 0.01, "grad_norm": 1.94501972139913, "learning_rate": 2.642857142857143e-06, "loss": 1.0887, "step": 74 }, { "epoch": 0.01, "grad_norm": 1.913293449765966, "learning_rate": 2.6785714285714285e-06, "loss": 1.0172, "step": 75 }, { "epoch": 0.01, "grad_norm": 1.817372303588891, "learning_rate": 2.7142857142857144e-06, "loss": 1.1163, "step": 76 }, { "epoch": 0.01, "grad_norm": 1.9481917224020602, "learning_rate": 2.7500000000000004e-06, "loss": 1.011, "step": 77 }, { "epoch": 0.01, "grad_norm": 1.788267732281819, "learning_rate": 2.785714285714286e-06, "loss": 0.997, "step": 78 }, { "epoch": 0.01, "grad_norm": 1.8784637878723836, "learning_rate": 2.8214285714285718e-06, "loss": 0.9679, "step": 79 }, { "epoch": 0.01, "grad_norm": 1.8537341419288107, "learning_rate": 2.8571428571428573e-06, "loss": 1.0761, "step": 80 }, { "epoch": 0.01, "grad_norm": 1.8078542130867514, "learning_rate": 2.892857142857143e-06, "loss": 1.082, "step": 81 }, { "epoch": 0.01, "grad_norm": 2.0120693392018367, "learning_rate": 2.928571428571429e-06, "loss": 0.9997, "step": 82 }, { "epoch": 0.01, "grad_norm": 1.858289061402852, "learning_rate": 2.9642857142857146e-06, "loss": 0.9857, "step": 83 }, { "epoch": 0.01, "grad_norm": 3.443760850229742, "learning_rate": 3e-06, "loss": 1.0857, "step": 84 }, { "epoch": 0.01, "grad_norm": 1.8929874508264222, "learning_rate": 3.0357142857142856e-06, "loss": 1.0229, "step": 85 }, { "epoch": 0.01, "grad_norm": 1.6712403104381248, "learning_rate": 3.071428571428572e-06, "loss": 0.9918, "step": 86 }, { "epoch": 0.01, "grad_norm": 1.9400055474881428, "learning_rate": 3.1071428571428574e-06, "loss": 0.9699, "step": 87 }, { "epoch": 0.01, "grad_norm": 1.8587272521034581, "learning_rate": 3.142857142857143e-06, "loss": 1.0221, "step": 88 }, { "epoch": 0.01, "grad_norm": 1.7873447084415033, "learning_rate": 3.178571428571429e-06, "loss": 0.9635, "step": 89 }, { "epoch": 0.01, "grad_norm": 0.840087749643695, "learning_rate": 3.2142857142857147e-06, "loss": 1.0235, "step": 90 }, { "epoch": 0.01, "grad_norm": 1.727243481814505, "learning_rate": 3.2500000000000002e-06, "loss": 0.9754, "step": 91 }, { "epoch": 0.01, "grad_norm": 1.8327650161161178, "learning_rate": 3.285714285714286e-06, "loss": 0.9965, "step": 92 }, { "epoch": 0.01, "grad_norm": 1.9350250589762261, "learning_rate": 3.3214285714285716e-06, "loss": 1.0383, "step": 93 }, { "epoch": 0.01, "grad_norm": 1.8991733726284912, "learning_rate": 3.357142857142857e-06, "loss": 1.0678, "step": 94 }, { "epoch": 0.01, "grad_norm": 1.7973465752671096, "learning_rate": 3.3928571428571435e-06, "loss": 1.0978, "step": 95 }, { "epoch": 0.01, "grad_norm": 1.9533674203712352, "learning_rate": 3.428571428571429e-06, "loss": 0.9195, "step": 96 }, { "epoch": 0.01, "grad_norm": 1.9088003679630476, "learning_rate": 3.4642857142857145e-06, "loss": 0.9903, "step": 97 }, { "epoch": 0.01, "grad_norm": 1.8372828926002955, "learning_rate": 3.5e-06, "loss": 1.0251, "step": 98 }, { "epoch": 0.01, "grad_norm": 1.9696122216873582, "learning_rate": 3.5357142857142863e-06, "loss": 1.0034, "step": 99 }, { "epoch": 0.01, "grad_norm": 1.8380300025142526, "learning_rate": 3.5714285714285718e-06, "loss": 0.9751, "step": 100 }, { "epoch": 0.01, "grad_norm": 1.847954449033685, "learning_rate": 3.6071428571428573e-06, "loss": 0.9459, "step": 101 }, { "epoch": 0.01, "grad_norm": 1.8832981594056148, "learning_rate": 3.642857142857143e-06, "loss": 1.0059, "step": 102 }, { "epoch": 0.01, "grad_norm": 1.8547492680917304, "learning_rate": 3.678571428571429e-06, "loss": 0.9932, "step": 103 }, { "epoch": 0.01, "grad_norm": 1.8657425751623185, "learning_rate": 3.7142857142857146e-06, "loss": 1.002, "step": 104 }, { "epoch": 0.01, "grad_norm": 1.6665780702081876, "learning_rate": 3.7500000000000005e-06, "loss": 1.0593, "step": 105 }, { "epoch": 0.01, "grad_norm": 1.7964712148810413, "learning_rate": 3.785714285714286e-06, "loss": 0.9619, "step": 106 }, { "epoch": 0.01, "grad_norm": 1.7553910708732687, "learning_rate": 3.8214285714285715e-06, "loss": 1.01, "step": 107 }, { "epoch": 0.01, "grad_norm": 1.7747469002432539, "learning_rate": 3.857142857142858e-06, "loss": 1.0268, "step": 108 }, { "epoch": 0.01, "grad_norm": 1.9173742281169184, "learning_rate": 3.892857142857143e-06, "loss": 1.0891, "step": 109 }, { "epoch": 0.01, "grad_norm": 1.7949439015706234, "learning_rate": 3.928571428571429e-06, "loss": 0.983, "step": 110 }, { "epoch": 0.01, "grad_norm": 1.867520643718796, "learning_rate": 3.964285714285714e-06, "loss": 0.9982, "step": 111 }, { "epoch": 0.01, "grad_norm": 1.8034726808554176, "learning_rate": 4.000000000000001e-06, "loss": 1.0578, "step": 112 }, { "epoch": 0.01, "grad_norm": 1.8141924506809295, "learning_rate": 4.035714285714286e-06, "loss": 1.001, "step": 113 }, { "epoch": 0.01, "grad_norm": 1.7976283640270174, "learning_rate": 4.071428571428572e-06, "loss": 0.9854, "step": 114 }, { "epoch": 0.01, "grad_norm": 1.797596603852026, "learning_rate": 4.107142857142857e-06, "loss": 0.9949, "step": 115 }, { "epoch": 0.01, "grad_norm": 1.6659524586669274, "learning_rate": 4.1428571428571435e-06, "loss": 1.0108, "step": 116 }, { "epoch": 0.01, "grad_norm": 0.8330019349258238, "learning_rate": 4.178571428571429e-06, "loss": 1.044, "step": 117 }, { "epoch": 0.01, "grad_norm": 1.7917795096457647, "learning_rate": 4.2142857142857145e-06, "loss": 0.9551, "step": 118 }, { "epoch": 0.01, "grad_norm": 1.8926908130434266, "learning_rate": 4.25e-06, "loss": 1.0879, "step": 119 }, { "epoch": 0.01, "grad_norm": 0.8494192643378685, "learning_rate": 4.2857142857142855e-06, "loss": 1.0516, "step": 120 }, { "epoch": 0.01, "grad_norm": 1.7928814318941266, "learning_rate": 4.321428571428572e-06, "loss": 0.9981, "step": 121 }, { "epoch": 0.01, "grad_norm": 1.7244277737182516, "learning_rate": 4.357142857142857e-06, "loss": 1.0243, "step": 122 }, { "epoch": 0.01, "grad_norm": 1.8920283111381664, "learning_rate": 4.392857142857143e-06, "loss": 1.0406, "step": 123 }, { "epoch": 0.01, "grad_norm": 1.8754037128801189, "learning_rate": 4.428571428571429e-06, "loss": 0.9638, "step": 124 }, { "epoch": 0.01, "grad_norm": 1.911665558253273, "learning_rate": 4.464285714285715e-06, "loss": 1.0164, "step": 125 }, { "epoch": 0.01, "grad_norm": 1.772633550590763, "learning_rate": 4.5e-06, "loss": 0.9896, "step": 126 }, { "epoch": 0.01, "grad_norm": 1.669546660563049, "learning_rate": 4.5357142857142865e-06, "loss": 0.9915, "step": 127 }, { "epoch": 0.01, "grad_norm": 1.9082535381339016, "learning_rate": 4.571428571428572e-06, "loss": 0.998, "step": 128 }, { "epoch": 0.01, "grad_norm": 1.8817733444830658, "learning_rate": 4.6071428571428574e-06, "loss": 0.9828, "step": 129 }, { "epoch": 0.01, "grad_norm": 1.9198395496696505, "learning_rate": 4.642857142857144e-06, "loss": 1.0489, "step": 130 }, { "epoch": 0.01, "grad_norm": 1.8826157598716975, "learning_rate": 4.678571428571429e-06, "loss": 0.9634, "step": 131 }, { "epoch": 0.01, "grad_norm": 1.735718457310413, "learning_rate": 4.714285714285715e-06, "loss": 0.9558, "step": 132 }, { "epoch": 0.01, "grad_norm": 1.8876893880232615, "learning_rate": 4.75e-06, "loss": 0.9628, "step": 133 }, { "epoch": 0.01, "grad_norm": 0.8906222378020202, "learning_rate": 4.785714285714287e-06, "loss": 1.0632, "step": 134 }, { "epoch": 0.01, "grad_norm": 1.7780584176322876, "learning_rate": 4.821428571428572e-06, "loss": 0.8832, "step": 135 }, { "epoch": 0.01, "grad_norm": 1.949660343989006, "learning_rate": 4.857142857142858e-06, "loss": 0.9787, "step": 136 }, { "epoch": 0.01, "grad_norm": 1.8651817922616587, "learning_rate": 4.892857142857143e-06, "loss": 1.0215, "step": 137 }, { "epoch": 0.01, "grad_norm": 1.8595636989861604, "learning_rate": 4.928571428571429e-06, "loss": 1.0211, "step": 138 }, { "epoch": 0.01, "grad_norm": 1.9155569626611333, "learning_rate": 4.964285714285715e-06, "loss": 1.0025, "step": 139 }, { "epoch": 0.02, "grad_norm": 1.854851167247576, "learning_rate": 5e-06, "loss": 1.0933, "step": 140 }, { "epoch": 0.02, "grad_norm": 2.1191389008275445, "learning_rate": 5.035714285714286e-06, "loss": 0.9626, "step": 141 }, { "epoch": 0.02, "grad_norm": 0.8318167230835649, "learning_rate": 5.071428571428571e-06, "loss": 1.0578, "step": 142 }, { "epoch": 0.02, "grad_norm": 1.9756106842049228, "learning_rate": 5.107142857142857e-06, "loss": 1.0221, "step": 143 }, { "epoch": 0.02, "grad_norm": 0.8017593586744073, "learning_rate": 5.142857142857142e-06, "loss": 1.0654, "step": 144 }, { "epoch": 0.02, "grad_norm": 1.8686875983722218, "learning_rate": 5.1785714285714296e-06, "loss": 1.0484, "step": 145 }, { "epoch": 0.02, "grad_norm": 2.142993315548542, "learning_rate": 5.214285714285715e-06, "loss": 1.0742, "step": 146 }, { "epoch": 0.02, "grad_norm": 1.758586210268953, "learning_rate": 5.2500000000000006e-06, "loss": 0.9645, "step": 147 }, { "epoch": 0.02, "grad_norm": 1.8497120419127009, "learning_rate": 5.285714285714286e-06, "loss": 0.966, "step": 148 }, { "epoch": 0.02, "grad_norm": 2.19940116235944, "learning_rate": 5.3214285714285715e-06, "loss": 1.0068, "step": 149 }, { "epoch": 0.02, "grad_norm": 1.9107416277972262, "learning_rate": 5.357142857142857e-06, "loss": 0.9777, "step": 150 }, { "epoch": 0.02, "grad_norm": 1.7854396855590269, "learning_rate": 5.392857142857143e-06, "loss": 1.0025, "step": 151 }, { "epoch": 0.02, "grad_norm": 0.849147562026073, "learning_rate": 5.428571428571429e-06, "loss": 1.0689, "step": 152 }, { "epoch": 0.02, "grad_norm": 1.9479106386506027, "learning_rate": 5.464285714285714e-06, "loss": 1.0963, "step": 153 }, { "epoch": 0.02, "grad_norm": 1.8601881293482652, "learning_rate": 5.500000000000001e-06, "loss": 0.9894, "step": 154 }, { "epoch": 0.02, "grad_norm": 0.8068654290580078, "learning_rate": 5.535714285714286e-06, "loss": 1.0484, "step": 155 }, { "epoch": 0.02, "grad_norm": 1.9335778532937649, "learning_rate": 5.571428571428572e-06, "loss": 0.9808, "step": 156 }, { "epoch": 0.02, "grad_norm": 3.671782156709297, "learning_rate": 5.607142857142858e-06, "loss": 0.9491, "step": 157 }, { "epoch": 0.02, "grad_norm": 1.865357045098957, "learning_rate": 5.6428571428571435e-06, "loss": 1.0005, "step": 158 }, { "epoch": 0.02, "grad_norm": 1.8325287278744955, "learning_rate": 5.678571428571429e-06, "loss": 1.0134, "step": 159 }, { "epoch": 0.02, "grad_norm": 1.8969848642682183, "learning_rate": 5.7142857142857145e-06, "loss": 0.9595, "step": 160 }, { "epoch": 0.02, "grad_norm": 2.0133514679110736, "learning_rate": 5.75e-06, "loss": 1.0015, "step": 161 }, { "epoch": 0.02, "grad_norm": 1.913586407953343, "learning_rate": 5.785714285714286e-06, "loss": 0.9709, "step": 162 }, { "epoch": 0.02, "grad_norm": 1.7144487151715317, "learning_rate": 5.821428571428573e-06, "loss": 0.8927, "step": 163 }, { "epoch": 0.02, "grad_norm": 1.851837740629236, "learning_rate": 5.857142857142858e-06, "loss": 0.9865, "step": 164 }, { "epoch": 0.02, "grad_norm": 1.8280151614241462, "learning_rate": 5.892857142857144e-06, "loss": 0.9441, "step": 165 }, { "epoch": 0.02, "grad_norm": 1.7242042075904989, "learning_rate": 5.928571428571429e-06, "loss": 0.984, "step": 166 }, { "epoch": 0.02, "grad_norm": 0.9161404562957403, "learning_rate": 5.964285714285715e-06, "loss": 1.0448, "step": 167 }, { "epoch": 0.02, "grad_norm": 1.7210219289051765, "learning_rate": 6e-06, "loss": 1.0076, "step": 168 }, { "epoch": 0.02, "grad_norm": 1.9161873976847124, "learning_rate": 6.035714285714286e-06, "loss": 1.0101, "step": 169 }, { "epoch": 0.02, "grad_norm": 1.994381188481452, "learning_rate": 6.071428571428571e-06, "loss": 1.0199, "step": 170 }, { "epoch": 0.02, "grad_norm": 1.8155916608768936, "learning_rate": 6.107142857142858e-06, "loss": 1.0366, "step": 171 }, { "epoch": 0.02, "grad_norm": 1.8529987765332334, "learning_rate": 6.142857142857144e-06, "loss": 1.0381, "step": 172 }, { "epoch": 0.02, "grad_norm": 1.7646415075275224, "learning_rate": 6.178571428571429e-06, "loss": 0.9645, "step": 173 }, { "epoch": 0.02, "grad_norm": 1.9556687103694002, "learning_rate": 6.214285714285715e-06, "loss": 1.0198, "step": 174 }, { "epoch": 0.02, "grad_norm": 1.8690329443893716, "learning_rate": 6.25e-06, "loss": 0.9835, "step": 175 }, { "epoch": 0.02, "grad_norm": 1.9878692524987729, "learning_rate": 6.285714285714286e-06, "loss": 1.0341, "step": 176 }, { "epoch": 0.02, "grad_norm": 1.7552142151229924, "learning_rate": 6.321428571428571e-06, "loss": 1.0388, "step": 177 }, { "epoch": 0.02, "grad_norm": 1.9164847564790324, "learning_rate": 6.357142857142858e-06, "loss": 0.9507, "step": 178 }, { "epoch": 0.02, "grad_norm": 1.8482424875142105, "learning_rate": 6.392857142857143e-06, "loss": 0.9355, "step": 179 }, { "epoch": 0.02, "grad_norm": 1.8769436691779848, "learning_rate": 6.4285714285714295e-06, "loss": 1.1162, "step": 180 }, { "epoch": 0.02, "grad_norm": 1.7499514391959903, "learning_rate": 6.464285714285715e-06, "loss": 1.0159, "step": 181 }, { "epoch": 0.02, "grad_norm": 1.7872268649535845, "learning_rate": 6.5000000000000004e-06, "loss": 0.9953, "step": 182 }, { "epoch": 0.02, "grad_norm": 1.8434107714580807, "learning_rate": 6.535714285714286e-06, "loss": 0.9673, "step": 183 }, { "epoch": 0.02, "grad_norm": 1.932556414731293, "learning_rate": 6.571428571428572e-06, "loss": 1.0159, "step": 184 }, { "epoch": 0.02, "grad_norm": 1.7812262305105497, "learning_rate": 6.607142857142858e-06, "loss": 1.0057, "step": 185 }, { "epoch": 0.02, "grad_norm": 1.9146929237583254, "learning_rate": 6.642857142857143e-06, "loss": 0.952, "step": 186 }, { "epoch": 0.02, "grad_norm": 2.050237576845038, "learning_rate": 6.678571428571429e-06, "loss": 1.0187, "step": 187 }, { "epoch": 0.02, "grad_norm": 1.8055046835895017, "learning_rate": 6.714285714285714e-06, "loss": 0.9374, "step": 188 }, { "epoch": 0.02, "grad_norm": 1.7428815471800148, "learning_rate": 6.750000000000001e-06, "loss": 0.9654, "step": 189 }, { "epoch": 0.02, "grad_norm": 1.7290508613297726, "learning_rate": 6.785714285714287e-06, "loss": 0.979, "step": 190 }, { "epoch": 0.02, "grad_norm": 1.8709292729971096, "learning_rate": 6.8214285714285724e-06, "loss": 1.032, "step": 191 }, { "epoch": 0.02, "grad_norm": 1.8539278040187954, "learning_rate": 6.857142857142858e-06, "loss": 0.9922, "step": 192 }, { "epoch": 0.02, "grad_norm": 1.8943122990472057, "learning_rate": 6.892857142857143e-06, "loss": 0.9647, "step": 193 }, { "epoch": 0.02, "grad_norm": 1.9487611423888436, "learning_rate": 6.928571428571429e-06, "loss": 0.958, "step": 194 }, { "epoch": 0.02, "grad_norm": 1.9710707176492916, "learning_rate": 6.964285714285714e-06, "loss": 0.9513, "step": 195 }, { "epoch": 0.02, "grad_norm": 1.8810856608114985, "learning_rate": 7e-06, "loss": 1.0234, "step": 196 }, { "epoch": 0.02, "grad_norm": 1.9246220265109757, "learning_rate": 7.035714285714287e-06, "loss": 0.9576, "step": 197 }, { "epoch": 0.02, "grad_norm": 1.9171919203292742, "learning_rate": 7.0714285714285726e-06, "loss": 1.0097, "step": 198 }, { "epoch": 0.02, "grad_norm": 1.8111290026886595, "learning_rate": 7.107142857142858e-06, "loss": 1.0222, "step": 199 }, { "epoch": 0.02, "grad_norm": 1.8889835487175084, "learning_rate": 7.1428571428571436e-06, "loss": 0.9164, "step": 200 }, { "epoch": 0.02, "grad_norm": 1.9183040195395535, "learning_rate": 7.178571428571429e-06, "loss": 1.0267, "step": 201 }, { "epoch": 0.02, "grad_norm": 1.796736996058039, "learning_rate": 7.2142857142857145e-06, "loss": 0.9211, "step": 202 }, { "epoch": 0.02, "grad_norm": 1.725403341102231, "learning_rate": 7.25e-06, "loss": 0.9405, "step": 203 }, { "epoch": 0.02, "grad_norm": 1.803756098788917, "learning_rate": 7.285714285714286e-06, "loss": 0.9899, "step": 204 }, { "epoch": 0.02, "grad_norm": 1.8910097314906544, "learning_rate": 7.321428571428572e-06, "loss": 0.9688, "step": 205 }, { "epoch": 0.02, "grad_norm": 1.7738192196269755, "learning_rate": 7.357142857142858e-06, "loss": 1.0411, "step": 206 }, { "epoch": 0.02, "grad_norm": 1.8355213449343943, "learning_rate": 7.392857142857144e-06, "loss": 0.9451, "step": 207 }, { "epoch": 0.02, "grad_norm": 0.9872759071908677, "learning_rate": 7.428571428571429e-06, "loss": 1.0579, "step": 208 }, { "epoch": 0.02, "grad_norm": 2.036220596836638, "learning_rate": 7.464285714285715e-06, "loss": 1.0807, "step": 209 }, { "epoch": 0.02, "grad_norm": 2.046736507093887, "learning_rate": 7.500000000000001e-06, "loss": 0.9566, "step": 210 }, { "epoch": 0.02, "grad_norm": 1.834145401119039, "learning_rate": 7.5357142857142865e-06, "loss": 0.9485, "step": 211 }, { "epoch": 0.02, "grad_norm": 1.8167955776419433, "learning_rate": 7.571428571428572e-06, "loss": 1.0094, "step": 212 }, { "epoch": 0.02, "grad_norm": 1.7953286542018367, "learning_rate": 7.6071428571428575e-06, "loss": 0.9822, "step": 213 }, { "epoch": 0.02, "grad_norm": 1.934893444982141, "learning_rate": 7.642857142857143e-06, "loss": 0.9906, "step": 214 }, { "epoch": 0.02, "grad_norm": 1.8117607424872288, "learning_rate": 7.67857142857143e-06, "loss": 0.9645, "step": 215 }, { "epoch": 0.02, "grad_norm": 1.8050905557334838, "learning_rate": 7.714285714285716e-06, "loss": 0.9649, "step": 216 }, { "epoch": 0.02, "grad_norm": 2.0057293784376, "learning_rate": 7.75e-06, "loss": 1.005, "step": 217 }, { "epoch": 0.02, "grad_norm": 1.7653909964252372, "learning_rate": 7.785714285714287e-06, "loss": 0.9747, "step": 218 }, { "epoch": 0.02, "grad_norm": 1.8667043820377498, "learning_rate": 7.821428571428571e-06, "loss": 1.0398, "step": 219 }, { "epoch": 0.02, "grad_norm": 1.9457652331065496, "learning_rate": 7.857142857142858e-06, "loss": 0.9192, "step": 220 }, { "epoch": 0.02, "grad_norm": 1.8650452583254298, "learning_rate": 7.892857142857144e-06, "loss": 0.9619, "step": 221 }, { "epoch": 0.02, "grad_norm": 1.8425348807571091, "learning_rate": 7.928571428571429e-06, "loss": 0.9311, "step": 222 }, { "epoch": 0.02, "grad_norm": 1.095624587213627, "learning_rate": 7.964285714285715e-06, "loss": 1.0646, "step": 223 }, { "epoch": 0.02, "grad_norm": 2.0660069130445873, "learning_rate": 8.000000000000001e-06, "loss": 1.0495, "step": 224 }, { "epoch": 0.02, "grad_norm": 1.8776626176180948, "learning_rate": 8.035714285714286e-06, "loss": 0.8902, "step": 225 }, { "epoch": 0.02, "grad_norm": 0.8569786061484821, "learning_rate": 8.071428571428572e-06, "loss": 1.0566, "step": 226 }, { "epoch": 0.02, "grad_norm": 1.944510348870039, "learning_rate": 8.107142857142859e-06, "loss": 0.9745, "step": 227 }, { "epoch": 0.02, "grad_norm": 1.7657772602076074, "learning_rate": 8.142857142857143e-06, "loss": 1.0124, "step": 228 }, { "epoch": 0.02, "grad_norm": 1.8081497041727166, "learning_rate": 8.17857142857143e-06, "loss": 0.9777, "step": 229 }, { "epoch": 0.02, "grad_norm": 1.7912166529817861, "learning_rate": 8.214285714285714e-06, "loss": 1.0445, "step": 230 }, { "epoch": 0.02, "grad_norm": 1.8123940070158067, "learning_rate": 8.25e-06, "loss": 1.0406, "step": 231 }, { "epoch": 0.02, "grad_norm": 1.8576805497501068, "learning_rate": 8.285714285714287e-06, "loss": 0.9653, "step": 232 }, { "epoch": 0.03, "grad_norm": 1.9225745882521939, "learning_rate": 8.321428571428573e-06, "loss": 1.0605, "step": 233 }, { "epoch": 0.03, "grad_norm": 1.7395879984469136, "learning_rate": 8.357142857142858e-06, "loss": 0.9636, "step": 234 }, { "epoch": 0.03, "grad_norm": 1.9006397505812607, "learning_rate": 8.392857142857144e-06, "loss": 0.9565, "step": 235 }, { "epoch": 0.03, "grad_norm": 1.291257477520151, "learning_rate": 8.428571428571429e-06, "loss": 1.043, "step": 236 }, { "epoch": 0.03, "grad_norm": 1.8203677694879266, "learning_rate": 8.464285714285715e-06, "loss": 0.9956, "step": 237 }, { "epoch": 0.03, "grad_norm": 1.7966100172996862, "learning_rate": 8.5e-06, "loss": 1.0042, "step": 238 }, { "epoch": 0.03, "grad_norm": 1.7573668767925414, "learning_rate": 8.535714285714286e-06, "loss": 0.9407, "step": 239 }, { "epoch": 0.03, "grad_norm": 1.8643526191975748, "learning_rate": 8.571428571428571e-06, "loss": 1.0195, "step": 240 }, { "epoch": 0.03, "grad_norm": 1.8545606002743658, "learning_rate": 8.607142857142859e-06, "loss": 1.012, "step": 241 }, { "epoch": 0.03, "grad_norm": 1.8015753058981967, "learning_rate": 8.642857142857144e-06, "loss": 0.9649, "step": 242 }, { "epoch": 0.03, "grad_norm": 2.1035101534370813, "learning_rate": 8.67857142857143e-06, "loss": 0.9757, "step": 243 }, { "epoch": 0.03, "grad_norm": 1.7262336482910128, "learning_rate": 8.714285714285715e-06, "loss": 1.0357, "step": 244 }, { "epoch": 0.03, "grad_norm": 1.781661870931318, "learning_rate": 8.750000000000001e-06, "loss": 0.9269, "step": 245 }, { "epoch": 0.03, "grad_norm": 1.7344842248404833, "learning_rate": 8.785714285714286e-06, "loss": 0.978, "step": 246 }, { "epoch": 0.03, "grad_norm": 2.2122058143163352, "learning_rate": 8.821428571428572e-06, "loss": 0.9117, "step": 247 }, { "epoch": 0.03, "grad_norm": 1.7336709645662929, "learning_rate": 8.857142857142858e-06, "loss": 0.9877, "step": 248 }, { "epoch": 0.03, "grad_norm": 1.7301957684985823, "learning_rate": 8.892857142857143e-06, "loss": 0.8862, "step": 249 }, { "epoch": 0.03, "grad_norm": 1.9050622733091505, "learning_rate": 8.92857142857143e-06, "loss": 0.912, "step": 250 }, { "epoch": 0.03, "grad_norm": 1.8358700381944053, "learning_rate": 8.964285714285716e-06, "loss": 0.9989, "step": 251 }, { "epoch": 0.03, "grad_norm": 1.735802137705787, "learning_rate": 9e-06, "loss": 0.9906, "step": 252 }, { "epoch": 0.03, "grad_norm": 1.7854181328182608, "learning_rate": 9.035714285714287e-06, "loss": 1.0675, "step": 253 }, { "epoch": 0.03, "grad_norm": 1.9189468675670451, "learning_rate": 9.071428571428573e-06, "loss": 0.9858, "step": 254 }, { "epoch": 0.03, "grad_norm": 2.032427778485921, "learning_rate": 9.107142857142858e-06, "loss": 1.0544, "step": 255 }, { "epoch": 0.03, "grad_norm": 1.8066424778786374, "learning_rate": 9.142857142857144e-06, "loss": 1.0026, "step": 256 }, { "epoch": 0.03, "grad_norm": 1.9110162535480548, "learning_rate": 9.178571428571429e-06, "loss": 0.9976, "step": 257 }, { "epoch": 0.03, "grad_norm": 1.7861930824601886, "learning_rate": 9.214285714285715e-06, "loss": 0.9512, "step": 258 }, { "epoch": 0.03, "grad_norm": 1.770632806920664, "learning_rate": 9.250000000000001e-06, "loss": 0.9302, "step": 259 }, { "epoch": 0.03, "grad_norm": 1.9557164474413924, "learning_rate": 9.285714285714288e-06, "loss": 1.0303, "step": 260 }, { "epoch": 0.03, "grad_norm": 1.81306497888572, "learning_rate": 9.321428571428572e-06, "loss": 0.961, "step": 261 }, { "epoch": 0.03, "grad_norm": 1.1778120221806583, "learning_rate": 9.357142857142859e-06, "loss": 1.0658, "step": 262 }, { "epoch": 0.03, "grad_norm": 1.9161924776149604, "learning_rate": 9.392857142857143e-06, "loss": 0.9645, "step": 263 }, { "epoch": 0.03, "grad_norm": 1.8556829068296476, "learning_rate": 9.42857142857143e-06, "loss": 0.8909, "step": 264 }, { "epoch": 0.03, "grad_norm": 1.8808322974032767, "learning_rate": 9.464285714285714e-06, "loss": 1.0483, "step": 265 }, { "epoch": 0.03, "grad_norm": 1.0288988239443093, "learning_rate": 9.5e-06, "loss": 1.0503, "step": 266 }, { "epoch": 0.03, "grad_norm": 1.9520986111176257, "learning_rate": 9.535714285714287e-06, "loss": 1.0207, "step": 267 }, { "epoch": 0.03, "grad_norm": 1.807397773832005, "learning_rate": 9.571428571428573e-06, "loss": 0.9126, "step": 268 }, { "epoch": 0.03, "grad_norm": 1.7665364979436666, "learning_rate": 9.607142857142858e-06, "loss": 0.9769, "step": 269 }, { "epoch": 0.03, "grad_norm": 2.0276055140102667, "learning_rate": 9.642857142857144e-06, "loss": 0.9747, "step": 270 }, { "epoch": 0.03, "grad_norm": 1.9350435118211642, "learning_rate": 9.678571428571429e-06, "loss": 0.9097, "step": 271 }, { "epoch": 0.03, "grad_norm": 0.8864951736289273, "learning_rate": 9.714285714285715e-06, "loss": 1.0829, "step": 272 }, { "epoch": 0.03, "grad_norm": 1.714042820673516, "learning_rate": 9.75e-06, "loss": 0.9118, "step": 273 }, { "epoch": 0.03, "grad_norm": 1.7691063494705084, "learning_rate": 9.785714285714286e-06, "loss": 0.9189, "step": 274 }, { "epoch": 0.03, "grad_norm": 1.9451414181368836, "learning_rate": 9.821428571428573e-06, "loss": 0.9581, "step": 275 }, { "epoch": 0.03, "grad_norm": 1.7953846137041707, "learning_rate": 9.857142857142859e-06, "loss": 0.9632, "step": 276 }, { "epoch": 0.03, "grad_norm": 1.8061258794044173, "learning_rate": 9.892857142857143e-06, "loss": 0.9539, "step": 277 }, { "epoch": 0.03, "grad_norm": 1.007758057953556, "learning_rate": 9.92857142857143e-06, "loss": 1.1074, "step": 278 }, { "epoch": 0.03, "grad_norm": 0.9535907710363158, "learning_rate": 9.964285714285714e-06, "loss": 1.0491, "step": 279 }, { "epoch": 0.03, "grad_norm": 0.838674484696826, "learning_rate": 1e-05, "loss": 1.0559, "step": 280 }, { "epoch": 0.03, "grad_norm": 1.9413909246598793, "learning_rate": 9.999999696866382e-06, "loss": 0.9741, "step": 281 }, { "epoch": 0.03, "grad_norm": 1.9014065870768257, "learning_rate": 9.999998787465562e-06, "loss": 1.0312, "step": 282 }, { "epoch": 0.03, "grad_norm": 1.9284806265774397, "learning_rate": 9.999997271797653e-06, "loss": 1.0811, "step": 283 }, { "epoch": 0.03, "grad_norm": 1.7991993311833812, "learning_rate": 9.999995149862836e-06, "loss": 0.938, "step": 284 }, { "epoch": 0.03, "grad_norm": 1.826482791858458, "learning_rate": 9.999992421661369e-06, "loss": 1.0089, "step": 285 }, { "epoch": 0.03, "grad_norm": 1.7407124628486075, "learning_rate": 9.999989087193582e-06, "loss": 1.0458, "step": 286 }, { "epoch": 0.03, "grad_norm": 1.8169931241354556, "learning_rate": 9.999985146459882e-06, "loss": 0.9556, "step": 287 }, { "epoch": 0.03, "grad_norm": 1.852519306063477, "learning_rate": 9.999980599460747e-06, "loss": 0.9774, "step": 288 }, { "epoch": 0.03, "grad_norm": 1.79173319189818, "learning_rate": 9.999975446196725e-06, "loss": 0.9972, "step": 289 }, { "epoch": 0.03, "grad_norm": 1.981412318188983, "learning_rate": 9.999969686668442e-06, "loss": 0.9291, "step": 290 }, { "epoch": 0.03, "grad_norm": 1.8168744903192728, "learning_rate": 9.9999633208766e-06, "loss": 0.9561, "step": 291 }, { "epoch": 0.03, "grad_norm": 1.6672658485947125, "learning_rate": 9.999956348821965e-06, "loss": 0.9927, "step": 292 }, { "epoch": 0.03, "grad_norm": 1.7859722035567993, "learning_rate": 9.999948770505386e-06, "loss": 0.9362, "step": 293 }, { "epoch": 0.03, "grad_norm": 1.7537741457767284, "learning_rate": 9.99994058592778e-06, "loss": 1.0143, "step": 294 }, { "epoch": 0.03, "grad_norm": 1.8105585270396467, "learning_rate": 9.999931795090143e-06, "loss": 0.9353, "step": 295 }, { "epoch": 0.03, "grad_norm": 1.7608932258417644, "learning_rate": 9.999922397993537e-06, "loss": 0.9088, "step": 296 }, { "epoch": 0.03, "grad_norm": 2.0198290778560577, "learning_rate": 9.999912394639105e-06, "loss": 0.8845, "step": 297 }, { "epoch": 0.03, "grad_norm": 1.7707371639021934, "learning_rate": 9.999901785028055e-06, "loss": 0.9302, "step": 298 }, { "epoch": 0.03, "grad_norm": 1.8824966806420331, "learning_rate": 9.999890569161676e-06, "loss": 0.9837, "step": 299 }, { "epoch": 0.03, "grad_norm": 1.8653828960816923, "learning_rate": 9.99987874704133e-06, "loss": 1.0813, "step": 300 }, { "epoch": 0.03, "grad_norm": 1.8399969651795043, "learning_rate": 9.99986631866845e-06, "loss": 1.0106, "step": 301 }, { "epoch": 0.03, "grad_norm": 1.8724158233403128, "learning_rate": 9.999853284044539e-06, "loss": 0.9073, "step": 302 }, { "epoch": 0.03, "grad_norm": 1.8148194528040043, "learning_rate": 9.99983964317118e-06, "loss": 0.9504, "step": 303 }, { "epoch": 0.03, "grad_norm": 1.8698145593419844, "learning_rate": 9.99982539605003e-06, "loss": 1.0199, "step": 304 }, { "epoch": 0.03, "grad_norm": 1.948495131611859, "learning_rate": 9.999810542682811e-06, "loss": 1.0659, "step": 305 }, { "epoch": 0.03, "grad_norm": 1.0240633345617711, "learning_rate": 9.999795083071328e-06, "loss": 1.0572, "step": 306 }, { "epoch": 0.03, "grad_norm": 1.9271320490521096, "learning_rate": 9.999779017217454e-06, "loss": 0.9408, "step": 307 }, { "epoch": 0.03, "grad_norm": 1.799310225281412, "learning_rate": 9.999762345123137e-06, "loss": 1.0931, "step": 308 }, { "epoch": 0.03, "grad_norm": 1.8771991773467342, "learning_rate": 9.999745066790398e-06, "loss": 1.0826, "step": 309 }, { "epoch": 0.03, "grad_norm": 0.911607519215812, "learning_rate": 9.999727182221335e-06, "loss": 1.0542, "step": 310 }, { "epoch": 0.03, "grad_norm": 1.8276600341947764, "learning_rate": 9.999708691418113e-06, "loss": 0.963, "step": 311 }, { "epoch": 0.03, "grad_norm": 1.9260927865590318, "learning_rate": 9.999689594382977e-06, "loss": 0.9583, "step": 312 }, { "epoch": 0.03, "grad_norm": 1.769568358359543, "learning_rate": 9.99966989111824e-06, "loss": 0.9126, "step": 313 }, { "epoch": 0.03, "grad_norm": 1.9279884601418389, "learning_rate": 9.999649581626292e-06, "loss": 0.957, "step": 314 }, { "epoch": 0.03, "grad_norm": 2.0169806444231524, "learning_rate": 9.999628665909597e-06, "loss": 1.0059, "step": 315 }, { "epoch": 0.03, "grad_norm": 1.769920040310968, "learning_rate": 9.999607143970689e-06, "loss": 0.9079, "step": 316 }, { "epoch": 0.03, "grad_norm": 1.3205038104644193, "learning_rate": 9.99958501581218e-06, "loss": 1.0608, "step": 317 }, { "epoch": 0.03, "grad_norm": 1.9634857245188009, "learning_rate": 9.999562281436749e-06, "loss": 0.8918, "step": 318 }, { "epoch": 0.03, "grad_norm": 1.7511875874435554, "learning_rate": 9.999538940847156e-06, "loss": 0.9796, "step": 319 }, { "epoch": 0.03, "grad_norm": 1.7254704409678374, "learning_rate": 9.999514994046232e-06, "loss": 1.0014, "step": 320 }, { "epoch": 0.03, "grad_norm": 1.7490919194395176, "learning_rate": 9.999490441036877e-06, "loss": 1.0146, "step": 321 }, { "epoch": 0.03, "grad_norm": 1.9439622195078756, "learning_rate": 9.999465281822072e-06, "loss": 0.9571, "step": 322 }, { "epoch": 0.03, "grad_norm": 1.782253938648281, "learning_rate": 9.999439516404864e-06, "loss": 1.0272, "step": 323 }, { "epoch": 0.03, "grad_norm": 1.9432956938428907, "learning_rate": 9.99941314478838e-06, "loss": 0.9866, "step": 324 }, { "epoch": 0.03, "grad_norm": 1.8152523837360088, "learning_rate": 9.999386166975818e-06, "loss": 0.9852, "step": 325 }, { "epoch": 0.04, "grad_norm": 1.7937956475411325, "learning_rate": 9.999358582970444e-06, "loss": 1.0544, "step": 326 }, { "epoch": 0.04, "grad_norm": 1.795258404936494, "learning_rate": 9.99933039277561e-06, "loss": 1.0072, "step": 327 }, { "epoch": 0.04, "grad_norm": 1.960989877077747, "learning_rate": 9.999301596394728e-06, "loss": 1.0839, "step": 328 }, { "epoch": 0.04, "grad_norm": 2.8300759682454655, "learning_rate": 9.999272193831294e-06, "loss": 0.9527, "step": 329 }, { "epoch": 0.04, "grad_norm": 1.7395742534137697, "learning_rate": 9.999242185088871e-06, "loss": 0.9977, "step": 330 }, { "epoch": 0.04, "grad_norm": 1.790302817546493, "learning_rate": 9.999211570171098e-06, "loss": 1.0114, "step": 331 }, { "epoch": 0.04, "grad_norm": 1.9012583446470444, "learning_rate": 9.999180349081688e-06, "loss": 0.981, "step": 332 }, { "epoch": 0.04, "grad_norm": 1.7391960130633908, "learning_rate": 9.999148521824424e-06, "loss": 1.0103, "step": 333 }, { "epoch": 0.04, "grad_norm": 1.8080327218213963, "learning_rate": 9.999116088403168e-06, "loss": 1.0077, "step": 334 }, { "epoch": 0.04, "grad_norm": 1.789355024851214, "learning_rate": 9.999083048821852e-06, "loss": 1.0322, "step": 335 }, { "epoch": 0.04, "grad_norm": 1.7604125625680604, "learning_rate": 9.999049403084481e-06, "loss": 1.0077, "step": 336 }, { "epoch": 0.04, "grad_norm": 1.751969691466195, "learning_rate": 9.999015151195137e-06, "loss": 0.9632, "step": 337 }, { "epoch": 0.04, "grad_norm": 1.9422430315733938, "learning_rate": 9.99898029315797e-06, "loss": 1.045, "step": 338 }, { "epoch": 0.04, "grad_norm": 1.8526265905252293, "learning_rate": 9.998944828977209e-06, "loss": 0.9418, "step": 339 }, { "epoch": 0.04, "grad_norm": 1.7598020549446747, "learning_rate": 9.998908758657153e-06, "loss": 0.9381, "step": 340 }, { "epoch": 0.04, "grad_norm": 1.947987701677072, "learning_rate": 9.998872082202178e-06, "loss": 0.9885, "step": 341 }, { "epoch": 0.04, "grad_norm": 1.8449518459226886, "learning_rate": 9.998834799616729e-06, "loss": 0.9486, "step": 342 }, { "epoch": 0.04, "grad_norm": 1.7242455557867136, "learning_rate": 9.998796910905326e-06, "loss": 1.0132, "step": 343 }, { "epoch": 0.04, "grad_norm": 1.8378995486721486, "learning_rate": 9.998758416072563e-06, "loss": 0.9658, "step": 344 }, { "epoch": 0.04, "grad_norm": 1.7452730650892918, "learning_rate": 9.99871931512311e-06, "loss": 0.9142, "step": 345 }, { "epoch": 0.04, "grad_norm": 1.7216184771824707, "learning_rate": 9.998679608061706e-06, "loss": 0.8848, "step": 346 }, { "epoch": 0.04, "grad_norm": 1.771244228117099, "learning_rate": 9.998639294893168e-06, "loss": 0.9513, "step": 347 }, { "epoch": 0.04, "grad_norm": 1.8524535352268292, "learning_rate": 9.998598375622382e-06, "loss": 0.878, "step": 348 }, { "epoch": 0.04, "grad_norm": 1.7574556626857367, "learning_rate": 9.99855685025431e-06, "loss": 0.9874, "step": 349 }, { "epoch": 0.04, "grad_norm": 1.936301479766512, "learning_rate": 9.998514718793986e-06, "loss": 1.01, "step": 350 }, { "epoch": 0.04, "grad_norm": 1.743238898998547, "learning_rate": 9.998471981246522e-06, "loss": 0.9206, "step": 351 }, { "epoch": 0.04, "grad_norm": 1.908407426072886, "learning_rate": 9.998428637617096e-06, "loss": 1.0315, "step": 352 }, { "epoch": 0.04, "grad_norm": 1.887323151948872, "learning_rate": 9.998384687910968e-06, "loss": 1.0054, "step": 353 }, { "epoch": 0.04, "grad_norm": 1.8689358502968432, "learning_rate": 9.998340132133462e-06, "loss": 0.9786, "step": 354 }, { "epoch": 0.04, "grad_norm": 1.766866877247742, "learning_rate": 9.998294970289984e-06, "loss": 0.974, "step": 355 }, { "epoch": 0.04, "grad_norm": 1.668049969560548, "learning_rate": 9.998249202386008e-06, "loss": 0.9332, "step": 356 }, { "epoch": 0.04, "grad_norm": 1.7810907957387372, "learning_rate": 9.998202828427087e-06, "loss": 1.072, "step": 357 }, { "epoch": 0.04, "grad_norm": 1.7810003122175218, "learning_rate": 9.99815584841884e-06, "loss": 1.0325, "step": 358 }, { "epoch": 0.04, "grad_norm": 1.8694430603696126, "learning_rate": 9.998108262366965e-06, "loss": 0.9572, "step": 359 }, { "epoch": 0.04, "grad_norm": 2.1378034244315067, "learning_rate": 9.998060070277232e-06, "loss": 0.9627, "step": 360 }, { "epoch": 0.04, "grad_norm": 1.8597088740879761, "learning_rate": 9.998011272155486e-06, "loss": 0.9268, "step": 361 }, { "epoch": 0.04, "grad_norm": 1.9075515554522582, "learning_rate": 9.997961868007641e-06, "loss": 0.9384, "step": 362 }, { "epoch": 0.04, "grad_norm": 1.1093648375171188, "learning_rate": 9.99791185783969e-06, "loss": 1.0893, "step": 363 }, { "epoch": 0.04, "grad_norm": 1.6774363515926407, "learning_rate": 9.997861241657695e-06, "loss": 0.9866, "step": 364 }, { "epoch": 0.04, "grad_norm": 1.9226192210210127, "learning_rate": 9.997810019467794e-06, "loss": 0.9917, "step": 365 }, { "epoch": 0.04, "grad_norm": 1.7369964385383962, "learning_rate": 9.997758191276199e-06, "loss": 0.9783, "step": 366 }, { "epoch": 0.04, "grad_norm": 1.768290530051918, "learning_rate": 9.997705757089193e-06, "loss": 0.9573, "step": 367 }, { "epoch": 0.04, "grad_norm": 1.7267396405436175, "learning_rate": 9.997652716913135e-06, "loss": 0.9405, "step": 368 }, { "epoch": 0.04, "grad_norm": 1.7974486258138789, "learning_rate": 9.997599070754454e-06, "loss": 0.972, "step": 369 }, { "epoch": 0.04, "grad_norm": 1.7718921771971852, "learning_rate": 9.997544818619658e-06, "loss": 0.9679, "step": 370 }, { "epoch": 0.04, "grad_norm": 1.7677117622153677, "learning_rate": 9.99748996051532e-06, "loss": 1.0525, "step": 371 }, { "epoch": 0.04, "grad_norm": 1.8489389044812485, "learning_rate": 9.997434496448099e-06, "loss": 0.9741, "step": 372 }, { "epoch": 0.04, "grad_norm": 1.8615316587238664, "learning_rate": 9.997378426424716e-06, "loss": 0.9277, "step": 373 }, { "epoch": 0.04, "grad_norm": 1.7626686084934176, "learning_rate": 9.99732175045197e-06, "loss": 1.0037, "step": 374 }, { "epoch": 0.04, "grad_norm": 1.842797240944744, "learning_rate": 9.997264468536734e-06, "loss": 0.9722, "step": 375 }, { "epoch": 0.04, "grad_norm": 1.740147069318428, "learning_rate": 9.99720658068595e-06, "loss": 0.9897, "step": 376 }, { "epoch": 0.04, "grad_norm": 1.4163507091164966, "learning_rate": 9.997148086906643e-06, "loss": 1.0379, "step": 377 }, { "epoch": 0.04, "grad_norm": 1.7718206364251659, "learning_rate": 9.997088987205902e-06, "loss": 1.0424, "step": 378 }, { "epoch": 0.04, "grad_norm": 1.7341436912414976, "learning_rate": 9.997029281590893e-06, "loss": 0.9789, "step": 379 }, { "epoch": 0.04, "grad_norm": 1.8998843933957248, "learning_rate": 9.996968970068857e-06, "loss": 0.9529, "step": 380 }, { "epoch": 0.04, "grad_norm": 1.8303203647864688, "learning_rate": 9.996908052647107e-06, "loss": 1.0387, "step": 381 }, { "epoch": 0.04, "grad_norm": 1.0217903943934874, "learning_rate": 9.996846529333028e-06, "loss": 1.0489, "step": 382 }, { "epoch": 0.04, "grad_norm": 1.7377397366551048, "learning_rate": 9.99678440013408e-06, "loss": 0.8791, "step": 383 }, { "epoch": 0.04, "grad_norm": 1.9158974151548998, "learning_rate": 9.996721665057796e-06, "loss": 0.8786, "step": 384 }, { "epoch": 0.04, "grad_norm": 1.8046078589216081, "learning_rate": 9.996658324111786e-06, "loss": 0.9756, "step": 385 }, { "epoch": 0.04, "grad_norm": 1.723119485274888, "learning_rate": 9.996594377303727e-06, "loss": 0.9463, "step": 386 }, { "epoch": 0.04, "grad_norm": 1.822813652888169, "learning_rate": 9.996529824641375e-06, "loss": 0.919, "step": 387 }, { "epoch": 0.04, "grad_norm": 1.7650058929683152, "learning_rate": 9.996464666132554e-06, "loss": 0.9869, "step": 388 }, { "epoch": 0.04, "grad_norm": 1.8839699219901074, "learning_rate": 9.996398901785166e-06, "loss": 1.0376, "step": 389 }, { "epoch": 0.04, "grad_norm": 1.989083906714401, "learning_rate": 9.996332531607189e-06, "loss": 0.9248, "step": 390 }, { "epoch": 0.04, "grad_norm": 0.9767177439526261, "learning_rate": 9.996265555606666e-06, "loss": 1.1028, "step": 391 }, { "epoch": 0.04, "grad_norm": 1.8001547919049068, "learning_rate": 9.996197973791722e-06, "loss": 0.9378, "step": 392 }, { "epoch": 0.04, "grad_norm": 1.8601221209189258, "learning_rate": 9.996129786170545e-06, "loss": 0.9148, "step": 393 }, { "epoch": 0.04, "grad_norm": 1.7460548937345546, "learning_rate": 9.996060992751411e-06, "loss": 0.9483, "step": 394 }, { "epoch": 0.04, "grad_norm": 1.948467329702224, "learning_rate": 9.995991593542655e-06, "loss": 1.0333, "step": 395 }, { "epoch": 0.04, "grad_norm": 1.6954938973889058, "learning_rate": 9.995921588552694e-06, "loss": 0.9458, "step": 396 }, { "epoch": 0.04, "grad_norm": 1.7805111411197816, "learning_rate": 9.99585097779002e-06, "loss": 1.0234, "step": 397 }, { "epoch": 0.04, "grad_norm": 1.8173023960655723, "learning_rate": 9.995779761263188e-06, "loss": 0.9241, "step": 398 }, { "epoch": 0.04, "grad_norm": 1.780629413302553, "learning_rate": 9.995707938980841e-06, "loss": 0.9113, "step": 399 }, { "epoch": 0.04, "grad_norm": 1.923387366399058, "learning_rate": 9.99563551095168e-06, "loss": 0.9415, "step": 400 }, { "epoch": 0.04, "grad_norm": 1.1846628128580843, "learning_rate": 9.995562477184492e-06, "loss": 1.0783, "step": 401 }, { "epoch": 0.04, "grad_norm": 2.067450718611197, "learning_rate": 9.995488837688132e-06, "loss": 0.9961, "step": 402 }, { "epoch": 0.04, "grad_norm": 1.8192408543082925, "learning_rate": 9.995414592471528e-06, "loss": 0.9835, "step": 403 }, { "epoch": 0.04, "grad_norm": 1.9682916396457573, "learning_rate": 9.995339741543682e-06, "loss": 0.9839, "step": 404 }, { "epoch": 0.04, "grad_norm": 1.8957509477365946, "learning_rate": 9.995264284913671e-06, "loss": 1.0281, "step": 405 }, { "epoch": 0.04, "grad_norm": 1.8091428564274485, "learning_rate": 9.995188222590646e-06, "loss": 0.989, "step": 406 }, { "epoch": 0.04, "grad_norm": 1.7407792514054405, "learning_rate": 9.995111554583825e-06, "loss": 0.9978, "step": 407 }, { "epoch": 0.04, "grad_norm": 1.826377586139703, "learning_rate": 9.995034280902508e-06, "loss": 1.001, "step": 408 }, { "epoch": 0.04, "grad_norm": 1.7977044321030833, "learning_rate": 9.994956401556067e-06, "loss": 0.8442, "step": 409 }, { "epoch": 0.04, "grad_norm": 1.7611367649859104, "learning_rate": 9.994877916553937e-06, "loss": 1.0238, "step": 410 }, { "epoch": 0.04, "grad_norm": 1.9552743499553855, "learning_rate": 9.994798825905644e-06, "loss": 1.0599, "step": 411 }, { "epoch": 0.04, "grad_norm": 1.2054686137262969, "learning_rate": 9.99471912962077e-06, "loss": 1.0891, "step": 412 }, { "epoch": 0.04, "grad_norm": 1.6706010293151303, "learning_rate": 9.994638827708986e-06, "loss": 0.9328, "step": 413 }, { "epoch": 0.04, "grad_norm": 1.948189979164849, "learning_rate": 9.994557920180024e-06, "loss": 1.0057, "step": 414 }, { "epoch": 0.04, "grad_norm": 1.8265247740545638, "learning_rate": 9.994476407043694e-06, "loss": 0.895, "step": 415 }, { "epoch": 0.04, "grad_norm": 2.5305730347541093, "learning_rate": 9.99439428830988e-06, "loss": 0.953, "step": 416 }, { "epoch": 0.04, "grad_norm": 1.9373030008073546, "learning_rate": 9.994311563988543e-06, "loss": 0.916, "step": 417 }, { "epoch": 0.04, "grad_norm": 1.8555670580350845, "learning_rate": 9.99422823408971e-06, "loss": 0.9907, "step": 418 }, { "epoch": 0.05, "grad_norm": 1.763068302574431, "learning_rate": 9.994144298623485e-06, "loss": 0.875, "step": 419 }, { "epoch": 0.05, "grad_norm": 1.2126877008153958, "learning_rate": 9.994059757600047e-06, "loss": 1.1077, "step": 420 }, { "epoch": 0.05, "grad_norm": 1.0319541692222705, "learning_rate": 9.993974611029646e-06, "loss": 1.0642, "step": 421 }, { "epoch": 0.05, "grad_norm": 1.7862262757935998, "learning_rate": 9.993888858922606e-06, "loss": 0.9159, "step": 422 }, { "epoch": 0.05, "grad_norm": 1.8430679573996356, "learning_rate": 9.993802501289327e-06, "loss": 0.9196, "step": 423 }, { "epoch": 0.05, "grad_norm": 1.2034916194647973, "learning_rate": 9.993715538140277e-06, "loss": 1.0918, "step": 424 }, { "epoch": 0.05, "grad_norm": 1.1434693347387117, "learning_rate": 9.993627969486003e-06, "loss": 1.0497, "step": 425 }, { "epoch": 0.05, "grad_norm": 0.9667992296409438, "learning_rate": 9.993539795337122e-06, "loss": 1.0522, "step": 426 }, { "epoch": 0.05, "grad_norm": 1.7356235034275533, "learning_rate": 9.993451015704324e-06, "loss": 0.8628, "step": 427 }, { "epoch": 0.05, "grad_norm": 1.03162997263821, "learning_rate": 9.993361630598377e-06, "loss": 1.0795, "step": 428 }, { "epoch": 0.05, "grad_norm": 1.095996321926192, "learning_rate": 9.993271640030116e-06, "loss": 1.054, "step": 429 }, { "epoch": 0.05, "grad_norm": 1.8531287594286143, "learning_rate": 9.993181044010456e-06, "loss": 0.9752, "step": 430 }, { "epoch": 0.05, "grad_norm": 1.7220756831092734, "learning_rate": 9.993089842550379e-06, "loss": 0.9634, "step": 431 }, { "epoch": 0.05, "grad_norm": 1.7837394850945063, "learning_rate": 9.992998035660946e-06, "loss": 0.9995, "step": 432 }, { "epoch": 0.05, "grad_norm": 1.7110706526802253, "learning_rate": 9.992905623353287e-06, "loss": 0.9145, "step": 433 }, { "epoch": 0.05, "grad_norm": 1.6534140467040328, "learning_rate": 9.992812605638606e-06, "loss": 0.9078, "step": 434 }, { "epoch": 0.05, "grad_norm": 1.746210291124398, "learning_rate": 9.992718982528187e-06, "loss": 0.8916, "step": 435 }, { "epoch": 0.05, "grad_norm": 1.935310891172882, "learning_rate": 9.992624754033377e-06, "loss": 0.9946, "step": 436 }, { "epoch": 0.05, "grad_norm": 1.7873889464337964, "learning_rate": 9.992529920165603e-06, "loss": 0.8361, "step": 437 }, { "epoch": 0.05, "grad_norm": 1.8612968636748695, "learning_rate": 9.992434480936365e-06, "loss": 1.0078, "step": 438 }, { "epoch": 0.05, "grad_norm": 1.7918394896269425, "learning_rate": 9.992338436357235e-06, "loss": 0.9402, "step": 439 }, { "epoch": 0.05, "grad_norm": 1.9078193197942677, "learning_rate": 9.992241786439859e-06, "loss": 0.9495, "step": 440 }, { "epoch": 0.05, "grad_norm": 1.8319620670045862, "learning_rate": 9.992144531195955e-06, "loss": 0.935, "step": 441 }, { "epoch": 0.05, "grad_norm": 1.8449293510185172, "learning_rate": 9.992046670637316e-06, "loss": 0.9734, "step": 442 }, { "epoch": 0.05, "grad_norm": 1.8730803588651936, "learning_rate": 9.991948204775808e-06, "loss": 0.9007, "step": 443 }, { "epoch": 0.05, "grad_norm": 1.8714182092309293, "learning_rate": 9.99184913362337e-06, "loss": 1.009, "step": 444 }, { "epoch": 0.05, "grad_norm": 1.811743033637239, "learning_rate": 9.991749457192015e-06, "loss": 1.0229, "step": 445 }, { "epoch": 0.05, "grad_norm": 1.89992424778901, "learning_rate": 9.991649175493829e-06, "loss": 0.9306, "step": 446 }, { "epoch": 0.05, "grad_norm": 1.767758615947386, "learning_rate": 9.991548288540971e-06, "loss": 0.8862, "step": 447 }, { "epoch": 0.05, "grad_norm": 1.717951201708389, "learning_rate": 9.991446796345676e-06, "loss": 0.9847, "step": 448 }, { "epoch": 0.05, "grad_norm": 1.8102586640244867, "learning_rate": 9.991344698920248e-06, "loss": 1.0063, "step": 449 }, { "epoch": 0.05, "grad_norm": 1.8310093291758218, "learning_rate": 9.991241996277068e-06, "loss": 0.8896, "step": 450 }, { "epoch": 0.05, "grad_norm": 1.770758327814944, "learning_rate": 9.991138688428588e-06, "loss": 1.0795, "step": 451 }, { "epoch": 0.05, "grad_norm": 1.8460990532070476, "learning_rate": 9.991034775387336e-06, "loss": 0.9816, "step": 452 }, { "epoch": 0.05, "grad_norm": 1.6429231499080639, "learning_rate": 9.99093025716591e-06, "loss": 0.9052, "step": 453 }, { "epoch": 0.05, "grad_norm": 1.9432220732897085, "learning_rate": 9.990825133776983e-06, "loss": 1.0062, "step": 454 }, { "epoch": 0.05, "grad_norm": 1.7485747471408606, "learning_rate": 9.990719405233302e-06, "loss": 0.9437, "step": 455 }, { "epoch": 0.05, "grad_norm": 1.790664740982909, "learning_rate": 9.99061307154769e-06, "loss": 1.0697, "step": 456 }, { "epoch": 0.05, "grad_norm": 1.8680188794611297, "learning_rate": 9.990506132733036e-06, "loss": 1.025, "step": 457 }, { "epoch": 0.05, "grad_norm": 1.2723494579556252, "learning_rate": 9.99039858880231e-06, "loss": 1.0975, "step": 458 }, { "epoch": 0.05, "grad_norm": 1.7684429078817845, "learning_rate": 9.99029043976855e-06, "loss": 0.9523, "step": 459 }, { "epoch": 0.05, "grad_norm": 1.9800672354643611, "learning_rate": 9.990181685644869e-06, "loss": 0.9782, "step": 460 }, { "epoch": 0.05, "grad_norm": 1.7497641597424318, "learning_rate": 9.990072326444456e-06, "loss": 0.8998, "step": 461 }, { "epoch": 0.05, "grad_norm": 1.7678804701646167, "learning_rate": 9.98996236218057e-06, "loss": 1.0159, "step": 462 }, { "epoch": 0.05, "grad_norm": 1.8333399979742488, "learning_rate": 9.989851792866544e-06, "loss": 0.9202, "step": 463 }, { "epoch": 0.05, "grad_norm": 1.8342458871717588, "learning_rate": 9.989740618515787e-06, "loss": 0.8786, "step": 464 }, { "epoch": 0.05, "grad_norm": 1.8275362399611232, "learning_rate": 9.989628839141777e-06, "loss": 0.9931, "step": 465 }, { "epoch": 0.05, "grad_norm": 1.776468802584567, "learning_rate": 9.989516454758066e-06, "loss": 0.9564, "step": 466 }, { "epoch": 0.05, "grad_norm": 1.779785337605654, "learning_rate": 9.989403465378286e-06, "loss": 0.9898, "step": 467 }, { "epoch": 0.05, "grad_norm": 1.2474927320282596, "learning_rate": 9.989289871016134e-06, "loss": 1.057, "step": 468 }, { "epoch": 0.05, "grad_norm": 1.8832695871158862, "learning_rate": 9.989175671685383e-06, "loss": 0.9224, "step": 469 }, { "epoch": 0.05, "grad_norm": 0.8812481153777465, "learning_rate": 9.989060867399884e-06, "loss": 1.075, "step": 470 }, { "epoch": 0.05, "grad_norm": 1.852361736150292, "learning_rate": 9.988945458173553e-06, "loss": 1.0411, "step": 471 }, { "epoch": 0.05, "grad_norm": 1.7585999424822527, "learning_rate": 9.988829444020388e-06, "loss": 0.9183, "step": 472 }, { "epoch": 0.05, "grad_norm": 1.9517448301169975, "learning_rate": 9.988712824954452e-06, "loss": 0.9947, "step": 473 }, { "epoch": 0.05, "grad_norm": 1.928191014054902, "learning_rate": 9.988595600989887e-06, "loss": 0.966, "step": 474 }, { "epoch": 0.05, "grad_norm": 1.7355021778443462, "learning_rate": 9.988477772140908e-06, "loss": 0.9682, "step": 475 }, { "epoch": 0.05, "grad_norm": 1.8370788754365992, "learning_rate": 9.988359338421801e-06, "loss": 1.0217, "step": 476 }, { "epoch": 0.05, "grad_norm": 1.8807883614600507, "learning_rate": 9.988240299846927e-06, "loss": 0.9342, "step": 477 }, { "epoch": 0.05, "grad_norm": 1.3272969544545015, "learning_rate": 9.988120656430718e-06, "loss": 1.0586, "step": 478 }, { "epoch": 0.05, "grad_norm": 1.9627495429852833, "learning_rate": 9.988000408187684e-06, "loss": 0.9488, "step": 479 }, { "epoch": 0.05, "grad_norm": 1.8990878635514887, "learning_rate": 9.987879555132404e-06, "loss": 0.9213, "step": 480 }, { "epoch": 0.05, "grad_norm": 1.9523811405836686, "learning_rate": 9.987758097279535e-06, "loss": 0.9519, "step": 481 }, { "epoch": 0.05, "grad_norm": 1.9348596004746337, "learning_rate": 9.987636034643798e-06, "loss": 0.9765, "step": 482 }, { "epoch": 0.05, "grad_norm": 1.8986060031488083, "learning_rate": 9.987513367239995e-06, "loss": 0.8812, "step": 483 }, { "epoch": 0.05, "grad_norm": 1.7834458054495186, "learning_rate": 9.987390095083005e-06, "loss": 0.9296, "step": 484 }, { "epoch": 0.05, "grad_norm": 1.7732203264226643, "learning_rate": 9.987266218187771e-06, "loss": 0.8164, "step": 485 }, { "epoch": 0.05, "grad_norm": 1.9347718463354657, "learning_rate": 9.987141736569314e-06, "loss": 0.9611, "step": 486 }, { "epoch": 0.05, "grad_norm": 1.8765773287844287, "learning_rate": 9.98701665024273e-06, "loss": 0.9491, "step": 487 }, { "epoch": 0.05, "grad_norm": 1.8150677633824404, "learning_rate": 9.986890959223181e-06, "loss": 0.9206, "step": 488 }, { "epoch": 0.05, "grad_norm": 1.8867460056400567, "learning_rate": 9.986764663525913e-06, "loss": 0.9396, "step": 489 }, { "epoch": 0.05, "grad_norm": 1.8298813753799374, "learning_rate": 9.986637763166236e-06, "loss": 1.0279, "step": 490 }, { "epoch": 0.05, "grad_norm": 1.6398417232609694, "learning_rate": 9.986510258159541e-06, "loss": 0.9621, "step": 491 }, { "epoch": 0.05, "grad_norm": 1.7268728251216694, "learning_rate": 9.986382148521283e-06, "loss": 0.9665, "step": 492 }, { "epoch": 0.05, "grad_norm": 1.8633207629215438, "learning_rate": 9.986253434267002e-06, "loss": 0.8896, "step": 493 }, { "epoch": 0.05, "grad_norm": 1.8284720123352507, "learning_rate": 9.9861241154123e-06, "loss": 0.965, "step": 494 }, { "epoch": 0.05, "grad_norm": 1.8156493599253845, "learning_rate": 9.98599419197286e-06, "loss": 1.0073, "step": 495 }, { "epoch": 0.05, "grad_norm": 1.9796908925385863, "learning_rate": 9.985863663964434e-06, "loss": 1.0066, "step": 496 }, { "epoch": 0.05, "grad_norm": 1.8337812904898183, "learning_rate": 9.98573253140285e-06, "loss": 0.9401, "step": 497 }, { "epoch": 0.05, "grad_norm": 1.7559436180098102, "learning_rate": 9.98560079430401e-06, "loss": 1.019, "step": 498 }, { "epoch": 0.05, "grad_norm": 1.1679472626168819, "learning_rate": 9.985468452683882e-06, "loss": 1.072, "step": 499 }, { "epoch": 0.05, "grad_norm": 0.922452023770267, "learning_rate": 9.98533550655852e-06, "loss": 1.0815, "step": 500 }, { "epoch": 0.05, "grad_norm": 1.7237522608965452, "learning_rate": 9.985201955944039e-06, "loss": 0.891, "step": 501 }, { "epoch": 0.05, "grad_norm": 1.8582005561984585, "learning_rate": 9.985067800856636e-06, "loss": 1.0206, "step": 502 }, { "epoch": 0.05, "grad_norm": 1.7448129739558724, "learning_rate": 9.984933041312575e-06, "loss": 0.9692, "step": 503 }, { "epoch": 0.05, "grad_norm": 1.8282086515360507, "learning_rate": 9.984797677328195e-06, "loss": 0.9114, "step": 504 }, { "epoch": 0.05, "grad_norm": 1.7351393366243573, "learning_rate": 9.984661708919913e-06, "loss": 0.9995, "step": 505 }, { "epoch": 0.05, "grad_norm": 1.7591151114795303, "learning_rate": 9.984525136104214e-06, "loss": 1.0112, "step": 506 }, { "epoch": 0.05, "grad_norm": 1.868634073105971, "learning_rate": 9.98438795889766e-06, "loss": 0.9551, "step": 507 }, { "epoch": 0.05, "grad_norm": 1.7998162548535677, "learning_rate": 9.984250177316881e-06, "loss": 0.9458, "step": 508 }, { "epoch": 0.05, "grad_norm": 1.8821961179539208, "learning_rate": 9.984111791378583e-06, "loss": 0.9718, "step": 509 }, { "epoch": 0.05, "grad_norm": 1.7585068521771228, "learning_rate": 9.983972801099548e-06, "loss": 0.9697, "step": 510 }, { "epoch": 0.05, "grad_norm": 1.7915986259264736, "learning_rate": 9.98383320649663e-06, "loss": 0.9561, "step": 511 }, { "epoch": 0.06, "grad_norm": 1.678142637420618, "learning_rate": 9.983693007586754e-06, "loss": 0.8566, "step": 512 }, { "epoch": 0.06, "grad_norm": 1.749509545081998, "learning_rate": 9.983552204386916e-06, "loss": 0.936, "step": 513 }, { "epoch": 0.06, "grad_norm": 1.834234008138445, "learning_rate": 9.983410796914197e-06, "loss": 0.952, "step": 514 }, { "epoch": 0.06, "grad_norm": 2.033294029345438, "learning_rate": 9.983268785185736e-06, "loss": 1.003, "step": 515 }, { "epoch": 0.06, "grad_norm": 1.888426292024446, "learning_rate": 9.983126169218754e-06, "loss": 1.036, "step": 516 }, { "epoch": 0.06, "grad_norm": 1.8609013930333695, "learning_rate": 9.982982949030546e-06, "loss": 0.9874, "step": 517 }, { "epoch": 0.06, "grad_norm": 2.0116723727488406, "learning_rate": 9.982839124638474e-06, "loss": 1.0292, "step": 518 }, { "epoch": 0.06, "grad_norm": 1.9039604388176836, "learning_rate": 9.982694696059982e-06, "loss": 0.9762, "step": 519 }, { "epoch": 0.06, "grad_norm": 1.7936355511215125, "learning_rate": 9.982549663312582e-06, "loss": 0.8692, "step": 520 }, { "epoch": 0.06, "grad_norm": 1.7573773134145743, "learning_rate": 9.982404026413854e-06, "loss": 0.9193, "step": 521 }, { "epoch": 0.06, "grad_norm": 1.874692404142349, "learning_rate": 9.982257785381464e-06, "loss": 0.9192, "step": 522 }, { "epoch": 0.06, "grad_norm": 1.761098423029259, "learning_rate": 9.982110940233139e-06, "loss": 1.0955, "step": 523 }, { "epoch": 0.06, "grad_norm": 1.6651444864648541, "learning_rate": 9.981963490986687e-06, "loss": 0.9885, "step": 524 }, { "epoch": 0.06, "grad_norm": 1.863108919064453, "learning_rate": 9.981815437659987e-06, "loss": 0.9783, "step": 525 }, { "epoch": 0.06, "grad_norm": 1.7341218868163277, "learning_rate": 9.98166678027099e-06, "loss": 0.859, "step": 526 }, { "epoch": 0.06, "grad_norm": 1.8583856446853357, "learning_rate": 9.98151751883772e-06, "loss": 0.9862, "step": 527 }, { "epoch": 0.06, "grad_norm": 1.7902381185921945, "learning_rate": 9.981367653378278e-06, "loss": 0.9678, "step": 528 }, { "epoch": 0.06, "grad_norm": 1.7232482103771993, "learning_rate": 9.981217183910834e-06, "loss": 0.9854, "step": 529 }, { "epoch": 0.06, "grad_norm": 1.7245166406569976, "learning_rate": 9.981066110453634e-06, "loss": 1.044, "step": 530 }, { "epoch": 0.06, "grad_norm": 1.8755340432400052, "learning_rate": 9.980914433024997e-06, "loss": 0.9439, "step": 531 }, { "epoch": 0.06, "grad_norm": 1.8180651767153069, "learning_rate": 9.980762151643313e-06, "loss": 0.9877, "step": 532 }, { "epoch": 0.06, "grad_norm": 1.6642117033393633, "learning_rate": 9.980609266327046e-06, "loss": 0.9304, "step": 533 }, { "epoch": 0.06, "grad_norm": 1.770751243847106, "learning_rate": 9.980455777094733e-06, "loss": 0.9437, "step": 534 }, { "epoch": 0.06, "grad_norm": 1.6630684700108163, "learning_rate": 9.98030168396499e-06, "loss": 0.9379, "step": 535 }, { "epoch": 0.06, "grad_norm": 1.8048619734291211, "learning_rate": 9.980146986956495e-06, "loss": 0.9545, "step": 536 }, { "epoch": 0.06, "grad_norm": 1.8298762898602046, "learning_rate": 9.979991686088012e-06, "loss": 0.9512, "step": 537 }, { "epoch": 0.06, "grad_norm": 1.9104819626973562, "learning_rate": 9.979835781378366e-06, "loss": 1.0086, "step": 538 }, { "epoch": 0.06, "grad_norm": 1.8346423261750149, "learning_rate": 9.979679272846463e-06, "loss": 0.9459, "step": 539 }, { "epoch": 0.06, "grad_norm": 1.7542480905298683, "learning_rate": 9.979522160511282e-06, "loss": 1.0184, "step": 540 }, { "epoch": 0.06, "grad_norm": 1.7604193676146613, "learning_rate": 9.979364444391873e-06, "loss": 0.9596, "step": 541 }, { "epoch": 0.06, "grad_norm": 1.7285763067588011, "learning_rate": 9.979206124507357e-06, "loss": 0.9392, "step": 542 }, { "epoch": 0.06, "grad_norm": 1.7086746994545379, "learning_rate": 9.979047200876932e-06, "loss": 0.9275, "step": 543 }, { "epoch": 0.06, "grad_norm": 1.8537858338204671, "learning_rate": 9.97888767351987e-06, "loss": 1.0081, "step": 544 }, { "epoch": 0.06, "grad_norm": 1.8793186820578498, "learning_rate": 9.978727542455512e-06, "loss": 0.9517, "step": 545 }, { "epoch": 0.06, "grad_norm": 1.7469288461512746, "learning_rate": 9.978566807703275e-06, "loss": 1.1121, "step": 546 }, { "epoch": 0.06, "grad_norm": 1.8215703390582285, "learning_rate": 9.978405469282648e-06, "loss": 0.9125, "step": 547 }, { "epoch": 0.06, "grad_norm": 1.1155966186705373, "learning_rate": 9.978243527213196e-06, "loss": 1.0629, "step": 548 }, { "epoch": 0.06, "grad_norm": 1.7365975649787089, "learning_rate": 9.978080981514554e-06, "loss": 0.9202, "step": 549 }, { "epoch": 0.06, "grad_norm": 1.7687962806959705, "learning_rate": 9.977917832206431e-06, "loss": 0.9539, "step": 550 }, { "epoch": 0.06, "grad_norm": 1.7911541862755394, "learning_rate": 9.977754079308608e-06, "loss": 0.9884, "step": 551 }, { "epoch": 0.06, "grad_norm": 1.9017310930732227, "learning_rate": 9.977589722840943e-06, "loss": 0.9943, "step": 552 }, { "epoch": 0.06, "grad_norm": 2.138313033014973, "learning_rate": 9.977424762823364e-06, "loss": 0.845, "step": 553 }, { "epoch": 0.06, "grad_norm": 1.8215438597774032, "learning_rate": 9.977259199275871e-06, "loss": 0.9565, "step": 554 }, { "epoch": 0.06, "grad_norm": 1.9462909274871398, "learning_rate": 9.977093032218544e-06, "loss": 1.0496, "step": 555 }, { "epoch": 0.06, "grad_norm": 1.8921178982747826, "learning_rate": 9.976926261671524e-06, "loss": 0.9938, "step": 556 }, { "epoch": 0.06, "grad_norm": 1.8287017846381217, "learning_rate": 9.97675888765504e-06, "loss": 0.9369, "step": 557 }, { "epoch": 0.06, "grad_norm": 1.7895611904073543, "learning_rate": 9.976590910189382e-06, "loss": 0.9526, "step": 558 }, { "epoch": 0.06, "grad_norm": 1.846818399416353, "learning_rate": 9.976422329294919e-06, "loss": 0.9289, "step": 559 }, { "epoch": 0.06, "grad_norm": 1.8702061937596497, "learning_rate": 9.976253144992094e-06, "loss": 0.9949, "step": 560 }, { "epoch": 0.06, "grad_norm": 1.955839330032564, "learning_rate": 9.976083357301419e-06, "loss": 0.9309, "step": 561 }, { "epoch": 0.06, "grad_norm": 1.8023863188200453, "learning_rate": 9.97591296624348e-06, "loss": 0.9942, "step": 562 }, { "epoch": 0.06, "grad_norm": 1.7903171343827091, "learning_rate": 9.97574197183894e-06, "loss": 0.9201, "step": 563 }, { "epoch": 0.06, "grad_norm": 1.8089959609657595, "learning_rate": 9.975570374108531e-06, "loss": 0.8746, "step": 564 }, { "epoch": 0.06, "grad_norm": 1.8127768129223576, "learning_rate": 9.975398173073064e-06, "loss": 0.9709, "step": 565 }, { "epoch": 0.06, "grad_norm": 1.9045708550657638, "learning_rate": 9.975225368753412e-06, "loss": 1.0174, "step": 566 }, { "epoch": 0.06, "grad_norm": 1.7450261882038183, "learning_rate": 9.975051961170533e-06, "loss": 0.953, "step": 567 }, { "epoch": 0.06, "grad_norm": 1.9166097491832965, "learning_rate": 9.974877950345453e-06, "loss": 0.9467, "step": 568 }, { "epoch": 0.06, "grad_norm": 1.8483370251536706, "learning_rate": 9.97470333629927e-06, "loss": 0.9745, "step": 569 }, { "epoch": 0.06, "grad_norm": 1.9224517206725216, "learning_rate": 9.974528119053158e-06, "loss": 1.0604, "step": 570 }, { "epoch": 0.06, "grad_norm": 1.8135361467431292, "learning_rate": 9.97435229862836e-06, "loss": 0.989, "step": 571 }, { "epoch": 0.06, "grad_norm": 1.7391744135358165, "learning_rate": 9.974175875046197e-06, "loss": 0.8963, "step": 572 }, { "epoch": 0.06, "grad_norm": 1.8259354012034896, "learning_rate": 9.973998848328062e-06, "loss": 0.9903, "step": 573 }, { "epoch": 0.06, "grad_norm": 1.873932729050753, "learning_rate": 9.973821218495417e-06, "loss": 0.8828, "step": 574 }, { "epoch": 0.06, "grad_norm": 1.7524517468682828, "learning_rate": 9.973642985569803e-06, "loss": 0.9067, "step": 575 }, { "epoch": 0.06, "grad_norm": 2.0015384145181394, "learning_rate": 9.973464149572829e-06, "loss": 1.0994, "step": 576 }, { "epoch": 0.06, "grad_norm": 1.897425966186243, "learning_rate": 9.97328471052618e-06, "loss": 1.0066, "step": 577 }, { "epoch": 0.06, "grad_norm": 1.2178348468792712, "learning_rate": 9.973104668451617e-06, "loss": 1.0475, "step": 578 }, { "epoch": 0.06, "grad_norm": 1.912013919624642, "learning_rate": 9.972924023370967e-06, "loss": 1.006, "step": 579 }, { "epoch": 0.06, "grad_norm": 2.0686877461036826, "learning_rate": 9.972742775306135e-06, "loss": 1.0357, "step": 580 }, { "epoch": 0.06, "grad_norm": 2.017591536114641, "learning_rate": 9.972560924279098e-06, "loss": 1.0012, "step": 581 }, { "epoch": 0.06, "grad_norm": 2.2379380531776647, "learning_rate": 9.972378470311906e-06, "loss": 1.0291, "step": 582 }, { "epoch": 0.06, "grad_norm": 1.9020642874721987, "learning_rate": 9.97219541342668e-06, "loss": 0.9986, "step": 583 }, { "epoch": 0.06, "grad_norm": 1.892875164281699, "learning_rate": 9.97201175364562e-06, "loss": 0.9579, "step": 584 }, { "epoch": 0.06, "grad_norm": 1.830561365390589, "learning_rate": 9.971827490990993e-06, "loss": 0.9995, "step": 585 }, { "epoch": 0.06, "grad_norm": 1.9935730223840498, "learning_rate": 9.971642625485144e-06, "loss": 0.9408, "step": 586 }, { "epoch": 0.06, "grad_norm": 1.8769784060918686, "learning_rate": 9.971457157150485e-06, "loss": 0.9592, "step": 587 }, { "epoch": 0.06, "grad_norm": 2.04349603128331, "learning_rate": 9.971271086009509e-06, "loss": 0.9998, "step": 588 }, { "epoch": 0.06, "grad_norm": 1.9254072544653607, "learning_rate": 9.971084412084773e-06, "loss": 0.9745, "step": 589 }, { "epoch": 0.06, "grad_norm": 2.2928783245331386, "learning_rate": 9.970897135398914e-06, "loss": 1.1144, "step": 590 }, { "epoch": 0.06, "grad_norm": 1.859779802438604, "learning_rate": 9.97070925597464e-06, "loss": 1.0651, "step": 591 }, { "epoch": 0.06, "grad_norm": 1.9521111212451354, "learning_rate": 9.970520773834734e-06, "loss": 0.8919, "step": 592 }, { "epoch": 0.06, "grad_norm": 1.0769150280121682, "learning_rate": 9.970331689002046e-06, "loss": 1.0606, "step": 593 }, { "epoch": 0.06, "grad_norm": 1.9367091767594273, "learning_rate": 9.970142001499505e-06, "loss": 0.932, "step": 594 }, { "epoch": 0.06, "grad_norm": 1.9241743588307845, "learning_rate": 9.969951711350114e-06, "loss": 0.9728, "step": 595 }, { "epoch": 0.06, "grad_norm": 1.7815244053234291, "learning_rate": 9.969760818576943e-06, "loss": 0.9051, "step": 596 }, { "epoch": 0.06, "grad_norm": 1.7541410479484068, "learning_rate": 9.969569323203138e-06, "loss": 0.9613, "step": 597 }, { "epoch": 0.06, "grad_norm": 1.9976257598875988, "learning_rate": 9.96937722525192e-06, "loss": 0.9828, "step": 598 }, { "epoch": 0.06, "grad_norm": 1.9730089547222298, "learning_rate": 9.969184524746585e-06, "loss": 0.9612, "step": 599 }, { "epoch": 0.06, "grad_norm": 1.9912894581740013, "learning_rate": 9.96899122171049e-06, "loss": 0.9905, "step": 600 }, { "epoch": 0.06, "grad_norm": 1.8575772946937077, "learning_rate": 9.968797316167083e-06, "loss": 0.958, "step": 601 }, { "epoch": 0.06, "grad_norm": 1.8819876995599274, "learning_rate": 9.96860280813987e-06, "loss": 0.9882, "step": 602 }, { "epoch": 0.06, "grad_norm": 2.6457416407968006, "learning_rate": 9.968407697652436e-06, "loss": 1.0823, "step": 603 }, { "epoch": 0.06, "grad_norm": 1.817364641800979, "learning_rate": 9.96821198472844e-06, "loss": 0.9558, "step": 604 }, { "epoch": 0.07, "grad_norm": 1.8771531816842049, "learning_rate": 9.968015669391613e-06, "loss": 1.0313, "step": 605 }, { "epoch": 0.07, "grad_norm": 1.837738421712122, "learning_rate": 9.967818751665759e-06, "loss": 1.0452, "step": 606 }, { "epoch": 0.07, "grad_norm": 1.9799716024859662, "learning_rate": 9.967621231574754e-06, "loss": 0.9136, "step": 607 }, { "epoch": 0.07, "grad_norm": 1.8973139433595638, "learning_rate": 9.96742310914255e-06, "loss": 1.0104, "step": 608 }, { "epoch": 0.07, "grad_norm": 1.8616875016391605, "learning_rate": 9.967224384393168e-06, "loss": 1.0116, "step": 609 }, { "epoch": 0.07, "grad_norm": 1.9552365160279477, "learning_rate": 9.967025057350705e-06, "loss": 0.9468, "step": 610 }, { "epoch": 0.07, "grad_norm": 1.8682790571888328, "learning_rate": 9.96682512803933e-06, "loss": 0.934, "step": 611 }, { "epoch": 0.07, "grad_norm": 1.8512425230724565, "learning_rate": 9.966624596483284e-06, "loss": 1.0202, "step": 612 }, { "epoch": 0.07, "grad_norm": 1.7897736543785032, "learning_rate": 9.966423462706885e-06, "loss": 0.9423, "step": 613 }, { "epoch": 0.07, "grad_norm": 1.8637879693105739, "learning_rate": 9.966221726734518e-06, "loss": 1.016, "step": 614 }, { "epoch": 0.07, "grad_norm": 1.8289382477344223, "learning_rate": 9.966019388590645e-06, "loss": 1.0386, "step": 615 }, { "epoch": 0.07, "grad_norm": 1.7184673263447736, "learning_rate": 9.965816448299801e-06, "loss": 0.9467, "step": 616 }, { "epoch": 0.07, "grad_norm": 1.9270740641333812, "learning_rate": 9.965612905886593e-06, "loss": 0.9502, "step": 617 }, { "epoch": 0.07, "grad_norm": 1.8449413187934345, "learning_rate": 9.965408761375702e-06, "loss": 0.9439, "step": 618 }, { "epoch": 0.07, "grad_norm": 1.8221184142114624, "learning_rate": 9.965204014791879e-06, "loss": 0.9395, "step": 619 }, { "epoch": 0.07, "grad_norm": 1.8938004785244031, "learning_rate": 9.964998666159952e-06, "loss": 0.9725, "step": 620 }, { "epoch": 0.07, "grad_norm": 2.0583541343532343, "learning_rate": 9.96479271550482e-06, "loss": 0.854, "step": 621 }, { "epoch": 0.07, "grad_norm": 1.956440267051593, "learning_rate": 9.964586162851455e-06, "loss": 0.9236, "step": 622 }, { "epoch": 0.07, "grad_norm": 1.775693996598594, "learning_rate": 9.964379008224902e-06, "loss": 0.9357, "step": 623 }, { "epoch": 0.07, "grad_norm": 1.6530095208317594, "learning_rate": 9.96417125165028e-06, "loss": 0.9268, "step": 624 }, { "epoch": 0.07, "grad_norm": 1.9327439029302136, "learning_rate": 9.963962893152779e-06, "loss": 0.9302, "step": 625 }, { "epoch": 0.07, "grad_norm": 1.8169518109892377, "learning_rate": 9.963753932757663e-06, "loss": 0.9559, "step": 626 }, { "epoch": 0.07, "grad_norm": 1.9687021807597014, "learning_rate": 9.96354437049027e-06, "loss": 0.9264, "step": 627 }, { "epoch": 0.07, "grad_norm": 1.7503101529375042, "learning_rate": 9.963334206376011e-06, "loss": 0.9384, "step": 628 }, { "epoch": 0.07, "grad_norm": 1.0987366385180621, "learning_rate": 9.96312344044037e-06, "loss": 1.0687, "step": 629 }, { "epoch": 0.07, "grad_norm": 1.7913385847332246, "learning_rate": 9.962912072708898e-06, "loss": 0.9251, "step": 630 }, { "epoch": 0.07, "grad_norm": 1.7342714295246835, "learning_rate": 9.96270010320723e-06, "loss": 1.038, "step": 631 }, { "epoch": 0.07, "grad_norm": 1.7780518676028247, "learning_rate": 9.962487531961064e-06, "loss": 0.9799, "step": 632 }, { "epoch": 0.07, "grad_norm": 1.788720066371652, "learning_rate": 9.962274358996178e-06, "loss": 0.9225, "step": 633 }, { "epoch": 0.07, "grad_norm": 1.7654704240372205, "learning_rate": 9.962060584338416e-06, "loss": 0.9222, "step": 634 }, { "epoch": 0.07, "grad_norm": 1.7625696357265461, "learning_rate": 9.961846208013703e-06, "loss": 0.9726, "step": 635 }, { "epoch": 0.07, "grad_norm": 1.6841560229671029, "learning_rate": 9.961631230048032e-06, "loss": 0.9397, "step": 636 }, { "epoch": 0.07, "grad_norm": 0.9103175919775225, "learning_rate": 9.961415650467468e-06, "loss": 1.0677, "step": 637 }, { "epoch": 0.07, "grad_norm": 1.768671834701704, "learning_rate": 9.961199469298151e-06, "loss": 1.0266, "step": 638 }, { "epoch": 0.07, "grad_norm": 1.78625536991518, "learning_rate": 9.960982686566295e-06, "loss": 0.9044, "step": 639 }, { "epoch": 0.07, "grad_norm": 1.7628255178663492, "learning_rate": 9.960765302298185e-06, "loss": 0.8897, "step": 640 }, { "epoch": 0.07, "grad_norm": 1.7237950091053256, "learning_rate": 9.960547316520182e-06, "loss": 0.9906, "step": 641 }, { "epoch": 0.07, "grad_norm": 0.8392334080878924, "learning_rate": 9.960328729258713e-06, "loss": 1.0882, "step": 642 }, { "epoch": 0.07, "grad_norm": 1.8005071344569594, "learning_rate": 9.960109540540283e-06, "loss": 1.0159, "step": 643 }, { "epoch": 0.07, "grad_norm": 1.7761629381340716, "learning_rate": 9.959889750391474e-06, "loss": 1.0221, "step": 644 }, { "epoch": 0.07, "grad_norm": 1.7060716439274553, "learning_rate": 9.959669358838932e-06, "loss": 0.9978, "step": 645 }, { "epoch": 0.07, "grad_norm": 1.8953033852689745, "learning_rate": 9.959448365909383e-06, "loss": 0.9311, "step": 646 }, { "epoch": 0.07, "grad_norm": 1.7863509286896406, "learning_rate": 9.959226771629623e-06, "loss": 0.8935, "step": 647 }, { "epoch": 0.07, "grad_norm": 1.728604807127596, "learning_rate": 9.959004576026516e-06, "loss": 0.9478, "step": 648 }, { "epoch": 0.07, "grad_norm": 1.8419916827797762, "learning_rate": 9.95878177912701e-06, "loss": 0.9079, "step": 649 }, { "epoch": 0.07, "grad_norm": 1.2087797719122222, "learning_rate": 9.958558380958118e-06, "loss": 1.0666, "step": 650 }, { "epoch": 0.07, "grad_norm": 1.7791755105817448, "learning_rate": 9.958334381546927e-06, "loss": 0.9997, "step": 651 }, { "epoch": 0.07, "grad_norm": 1.8707170135919433, "learning_rate": 9.958109780920598e-06, "loss": 0.8551, "step": 652 }, { "epoch": 0.07, "grad_norm": 1.9541392269333469, "learning_rate": 9.957884579106365e-06, "loss": 0.9551, "step": 653 }, { "epoch": 0.07, "grad_norm": 1.905994967707482, "learning_rate": 9.957658776131536e-06, "loss": 0.9826, "step": 654 }, { "epoch": 0.07, "grad_norm": 1.899777785878718, "learning_rate": 9.957432372023487e-06, "loss": 0.9648, "step": 655 }, { "epoch": 0.07, "grad_norm": 1.0979333253902503, "learning_rate": 9.957205366809673e-06, "loss": 1.1097, "step": 656 }, { "epoch": 0.07, "grad_norm": 0.8775999868428007, "learning_rate": 9.956977760517617e-06, "loss": 1.0705, "step": 657 }, { "epoch": 0.07, "grad_norm": 1.9362360310686466, "learning_rate": 9.956749553174919e-06, "loss": 0.9588, "step": 658 }, { "epoch": 0.07, "grad_norm": 1.9388211625572105, "learning_rate": 9.956520744809249e-06, "loss": 0.9761, "step": 659 }, { "epoch": 0.07, "grad_norm": 1.8519198696963795, "learning_rate": 9.956291335448351e-06, "loss": 1.003, "step": 660 }, { "epoch": 0.07, "grad_norm": 1.8454020936116626, "learning_rate": 9.95606132512004e-06, "loss": 0.9225, "step": 661 }, { "epoch": 0.07, "grad_norm": 1.8060025455197584, "learning_rate": 9.95583071385221e-06, "loss": 1.0429, "step": 662 }, { "epoch": 0.07, "grad_norm": 1.7171723636224656, "learning_rate": 9.95559950167282e-06, "loss": 0.9743, "step": 663 }, { "epoch": 0.07, "grad_norm": 1.8787139336762622, "learning_rate": 9.955367688609904e-06, "loss": 1.003, "step": 664 }, { "epoch": 0.07, "grad_norm": 1.6749247472999584, "learning_rate": 9.955135274691575e-06, "loss": 0.934, "step": 665 }, { "epoch": 0.07, "grad_norm": 1.7332859395977531, "learning_rate": 9.954902259946008e-06, "loss": 0.9183, "step": 666 }, { "epoch": 0.07, "grad_norm": 1.9021831293417901, "learning_rate": 9.954668644401462e-06, "loss": 1.0768, "step": 667 }, { "epoch": 0.07, "grad_norm": 1.856143973447642, "learning_rate": 9.95443442808626e-06, "loss": 1.015, "step": 668 }, { "epoch": 0.07, "grad_norm": 1.8090075540328545, "learning_rate": 9.954199611028803e-06, "loss": 0.9164, "step": 669 }, { "epoch": 0.07, "grad_norm": 1.7650216485821708, "learning_rate": 9.953964193257563e-06, "loss": 1.0329, "step": 670 }, { "epoch": 0.07, "grad_norm": 1.708200062633118, "learning_rate": 9.953728174801089e-06, "loss": 0.9691, "step": 671 }, { "epoch": 0.07, "grad_norm": 1.842674374882094, "learning_rate": 9.953491555687994e-06, "loss": 1.066, "step": 672 }, { "epoch": 0.07, "grad_norm": 1.7718209773301226, "learning_rate": 9.95325433594697e-06, "loss": 0.917, "step": 673 }, { "epoch": 0.07, "grad_norm": 1.4880659865595764, "learning_rate": 9.95301651560678e-06, "loss": 1.0497, "step": 674 }, { "epoch": 0.07, "grad_norm": 1.1691065286797593, "learning_rate": 9.952778094696263e-06, "loss": 1.0805, "step": 675 }, { "epoch": 0.07, "grad_norm": 1.7628223270057626, "learning_rate": 9.952539073244326e-06, "loss": 0.9391, "step": 676 }, { "epoch": 0.07, "grad_norm": 0.9530968609954608, "learning_rate": 9.952299451279956e-06, "loss": 1.0834, "step": 677 }, { "epoch": 0.07, "grad_norm": 1.7191467821085589, "learning_rate": 9.952059228832202e-06, "loss": 0.987, "step": 678 }, { "epoch": 0.07, "grad_norm": 1.9518416576120086, "learning_rate": 9.951818405930194e-06, "loss": 0.915, "step": 679 }, { "epoch": 0.07, "grad_norm": 1.8824786204070405, "learning_rate": 9.951576982603133e-06, "loss": 1.0249, "step": 680 }, { "epoch": 0.07, "grad_norm": 1.9540247887414999, "learning_rate": 9.951334958880292e-06, "loss": 0.9899, "step": 681 }, { "epoch": 0.07, "grad_norm": 1.8103136107534201, "learning_rate": 9.951092334791018e-06, "loss": 0.9051, "step": 682 }, { "epoch": 0.07, "grad_norm": 1.9171593119399666, "learning_rate": 9.950849110364729e-06, "loss": 1.0154, "step": 683 }, { "epoch": 0.07, "grad_norm": 1.8212591907813733, "learning_rate": 9.950605285630917e-06, "loss": 0.9529, "step": 684 }, { "epoch": 0.07, "grad_norm": 1.7956146149315781, "learning_rate": 9.950360860619147e-06, "loss": 0.9842, "step": 685 }, { "epoch": 0.07, "grad_norm": 1.8475635658461347, "learning_rate": 9.950115835359055e-06, "loss": 0.9212, "step": 686 }, { "epoch": 0.07, "grad_norm": 1.6732753547058545, "learning_rate": 9.949870209880355e-06, "loss": 0.9607, "step": 687 }, { "epoch": 0.07, "grad_norm": 1.7881646180243027, "learning_rate": 9.949623984212825e-06, "loss": 0.9131, "step": 688 }, { "epoch": 0.07, "grad_norm": 1.7632314528143287, "learning_rate": 9.949377158386324e-06, "loss": 0.9592, "step": 689 }, { "epoch": 0.07, "grad_norm": 1.72638724645718, "learning_rate": 9.949129732430779e-06, "loss": 0.9474, "step": 690 }, { "epoch": 0.07, "grad_norm": 1.75036474727148, "learning_rate": 9.948881706376193e-06, "loss": 0.9568, "step": 691 }, { "epoch": 0.07, "grad_norm": 1.6302994955465049, "learning_rate": 9.948633080252637e-06, "loss": 1.0904, "step": 692 }, { "epoch": 0.07, "grad_norm": 1.720832326694608, "learning_rate": 9.94838385409026e-06, "loss": 0.9587, "step": 693 }, { "epoch": 0.07, "grad_norm": 1.0458443644499276, "learning_rate": 9.94813402791928e-06, "loss": 1.0918, "step": 694 }, { "epoch": 0.07, "grad_norm": 1.9654229081399957, "learning_rate": 9.947883601769992e-06, "loss": 0.9023, "step": 695 }, { "epoch": 0.07, "grad_norm": 1.9860269735905858, "learning_rate": 9.947632575672758e-06, "loss": 0.9692, "step": 696 }, { "epoch": 0.07, "grad_norm": 1.7071028298728652, "learning_rate": 9.947380949658018e-06, "loss": 0.9506, "step": 697 }, { "epoch": 0.08, "grad_norm": 1.7538962637600446, "learning_rate": 9.947128723756282e-06, "loss": 0.8981, "step": 698 }, { "epoch": 0.08, "grad_norm": 1.2947361655354426, "learning_rate": 9.946875897998131e-06, "loss": 1.0773, "step": 699 }, { "epoch": 0.08, "grad_norm": 1.8541641733446406, "learning_rate": 9.946622472414224e-06, "loss": 0.9427, "step": 700 }, { "epoch": 0.08, "grad_norm": 1.9265828758974637, "learning_rate": 9.946368447035291e-06, "loss": 0.9609, "step": 701 }, { "epoch": 0.08, "grad_norm": 1.81874713320279, "learning_rate": 9.946113821892129e-06, "loss": 0.9073, "step": 702 }, { "epoch": 0.08, "grad_norm": 1.0227154371529525, "learning_rate": 9.945858597015614e-06, "loss": 1.0915, "step": 703 }, { "epoch": 0.08, "grad_norm": 1.6918596382511752, "learning_rate": 9.945602772436693e-06, "loss": 0.9359, "step": 704 }, { "epoch": 0.08, "grad_norm": 1.8193262539606974, "learning_rate": 9.945346348186385e-06, "loss": 0.9487, "step": 705 }, { "epoch": 0.08, "grad_norm": 1.7522003758107525, "learning_rate": 9.945089324295785e-06, "loss": 0.9456, "step": 706 }, { "epoch": 0.08, "grad_norm": 1.896463535938412, "learning_rate": 9.944831700796055e-06, "loss": 0.9862, "step": 707 }, { "epoch": 0.08, "grad_norm": 1.8321035180909384, "learning_rate": 9.944573477718435e-06, "loss": 1.0236, "step": 708 }, { "epoch": 0.08, "grad_norm": 1.6590614298208861, "learning_rate": 9.944314655094233e-06, "loss": 0.9222, "step": 709 }, { "epoch": 0.08, "grad_norm": 1.8113615471791165, "learning_rate": 9.944055232954834e-06, "loss": 1.0818, "step": 710 }, { "epoch": 0.08, "grad_norm": 1.7238807744908444, "learning_rate": 9.943795211331692e-06, "loss": 0.9948, "step": 711 }, { "epoch": 0.08, "grad_norm": 1.6970854361188064, "learning_rate": 9.943534590256338e-06, "loss": 0.993, "step": 712 }, { "epoch": 0.08, "grad_norm": 1.146543441409252, "learning_rate": 9.943273369760373e-06, "loss": 1.0946, "step": 713 }, { "epoch": 0.08, "grad_norm": 1.8770392079366378, "learning_rate": 9.943011549875468e-06, "loss": 0.9566, "step": 714 }, { "epoch": 0.08, "grad_norm": 0.9521917888426603, "learning_rate": 9.942749130633371e-06, "loss": 1.0605, "step": 715 }, { "epoch": 0.08, "grad_norm": 1.7807992782522637, "learning_rate": 9.942486112065903e-06, "loss": 0.9806, "step": 716 }, { "epoch": 0.08, "grad_norm": 1.9385575090178524, "learning_rate": 9.942222494204956e-06, "loss": 0.8975, "step": 717 }, { "epoch": 0.08, "grad_norm": 1.7069462399149318, "learning_rate": 9.941958277082491e-06, "loss": 0.9507, "step": 718 }, { "epoch": 0.08, "grad_norm": 1.8106305629744721, "learning_rate": 9.941693460730549e-06, "loss": 0.9822, "step": 719 }, { "epoch": 0.08, "grad_norm": 1.8201826080289825, "learning_rate": 9.941428045181236e-06, "loss": 0.9576, "step": 720 }, { "epoch": 0.08, "grad_norm": 1.763219712486219, "learning_rate": 9.941162030466738e-06, "loss": 0.9385, "step": 721 }, { "epoch": 0.08, "grad_norm": 1.8086066736383897, "learning_rate": 9.940895416619308e-06, "loss": 1.0173, "step": 722 }, { "epoch": 0.08, "grad_norm": 1.4087884379310707, "learning_rate": 9.940628203671277e-06, "loss": 1.0614, "step": 723 }, { "epoch": 0.08, "grad_norm": 1.7889845618807796, "learning_rate": 9.940360391655042e-06, "loss": 0.99, "step": 724 }, { "epoch": 0.08, "grad_norm": 1.8224591862233448, "learning_rate": 9.940091980603077e-06, "loss": 1.0077, "step": 725 }, { "epoch": 0.08, "grad_norm": 1.7157721857885622, "learning_rate": 9.93982297054793e-06, "loss": 0.9752, "step": 726 }, { "epoch": 0.08, "grad_norm": 1.7476637291527775, "learning_rate": 9.939553361522216e-06, "loss": 0.9338, "step": 727 }, { "epoch": 0.08, "grad_norm": 1.8725732835583182, "learning_rate": 9.939283153558628e-06, "loss": 0.9515, "step": 728 }, { "epoch": 0.08, "grad_norm": 1.8196883956992354, "learning_rate": 9.93901234668993e-06, "loss": 0.9523, "step": 729 }, { "epoch": 0.08, "grad_norm": 1.852767824352539, "learning_rate": 9.938740940948958e-06, "loss": 0.8896, "step": 730 }, { "epoch": 0.08, "grad_norm": 1.8279283348121713, "learning_rate": 9.93846893636862e-06, "loss": 0.9793, "step": 731 }, { "epoch": 0.08, "grad_norm": 1.7601982567831413, "learning_rate": 9.938196332981897e-06, "loss": 0.9993, "step": 732 }, { "epoch": 0.08, "grad_norm": 1.8075529151933076, "learning_rate": 9.937923130821845e-06, "loss": 0.9624, "step": 733 }, { "epoch": 0.08, "grad_norm": 1.8088169140742196, "learning_rate": 9.93764932992159e-06, "loss": 0.8712, "step": 734 }, { "epoch": 0.08, "grad_norm": 1.7456650635646105, "learning_rate": 9.937374930314329e-06, "loss": 0.8955, "step": 735 }, { "epoch": 0.08, "grad_norm": 1.820080867070887, "learning_rate": 9.937099932033338e-06, "loss": 0.972, "step": 736 }, { "epoch": 0.08, "grad_norm": 1.8135064108896235, "learning_rate": 9.93682433511196e-06, "loss": 0.9319, "step": 737 }, { "epoch": 0.08, "grad_norm": 1.8341605777113597, "learning_rate": 9.93654813958361e-06, "loss": 1.0143, "step": 738 }, { "epoch": 0.08, "grad_norm": 1.689388119074655, "learning_rate": 9.93627134548178e-06, "loss": 0.9801, "step": 739 }, { "epoch": 0.08, "grad_norm": 1.0866878366430799, "learning_rate": 9.935993952840033e-06, "loss": 1.0823, "step": 740 }, { "epoch": 0.08, "grad_norm": 1.8593270576815693, "learning_rate": 9.935715961691999e-06, "loss": 0.9909, "step": 741 }, { "epoch": 0.08, "grad_norm": 0.8616278854311283, "learning_rate": 9.93543737207139e-06, "loss": 1.0867, "step": 742 }, { "epoch": 0.08, "grad_norm": 0.8438166281667455, "learning_rate": 9.935158184011986e-06, "loss": 1.077, "step": 743 }, { "epoch": 0.08, "grad_norm": 1.6302606809454292, "learning_rate": 9.934878397547636e-06, "loss": 0.8916, "step": 744 }, { "epoch": 0.08, "grad_norm": 1.7674304057868386, "learning_rate": 9.934598012712268e-06, "loss": 0.913, "step": 745 }, { "epoch": 0.08, "grad_norm": 1.7740834844671542, "learning_rate": 9.93431702953988e-06, "loss": 1.0073, "step": 746 }, { "epoch": 0.08, "grad_norm": 1.742712634550345, "learning_rate": 9.934035448064538e-06, "loss": 0.9415, "step": 747 }, { "epoch": 0.08, "grad_norm": 1.8245864128902234, "learning_rate": 9.933753268320391e-06, "loss": 0.9103, "step": 748 }, { "epoch": 0.08, "grad_norm": 1.7466188233865187, "learning_rate": 9.933470490341651e-06, "loss": 0.9975, "step": 749 }, { "epoch": 0.08, "grad_norm": 1.8248312212161273, "learning_rate": 9.933187114162604e-06, "loss": 1.0739, "step": 750 }, { "epoch": 0.08, "grad_norm": 1.7719863992975844, "learning_rate": 9.932903139817612e-06, "loss": 0.9883, "step": 751 }, { "epoch": 0.08, "grad_norm": 1.8415070340387925, "learning_rate": 9.932618567341107e-06, "loss": 0.9719, "step": 752 }, { "epoch": 0.08, "grad_norm": 1.796157909232651, "learning_rate": 9.932333396767597e-06, "loss": 0.9084, "step": 753 }, { "epoch": 0.08, "grad_norm": 1.7453697848339382, "learning_rate": 9.932047628131659e-06, "loss": 0.8782, "step": 754 }, { "epoch": 0.08, "grad_norm": 1.7117725873989906, "learning_rate": 9.931761261467943e-06, "loss": 0.974, "step": 755 }, { "epoch": 0.08, "grad_norm": 1.6937052868547227, "learning_rate": 9.93147429681117e-06, "loss": 0.9048, "step": 756 }, { "epoch": 0.08, "grad_norm": 1.7995422616418353, "learning_rate": 9.931186734196137e-06, "loss": 0.9049, "step": 757 }, { "epoch": 0.08, "grad_norm": 1.7728240453817152, "learning_rate": 9.930898573657712e-06, "loss": 0.9533, "step": 758 }, { "epoch": 0.08, "grad_norm": 1.6739001112850005, "learning_rate": 9.930609815230838e-06, "loss": 0.9506, "step": 759 }, { "epoch": 0.08, "grad_norm": 1.4264406399843927, "learning_rate": 9.930320458950525e-06, "loss": 1.0621, "step": 760 }, { "epoch": 0.08, "grad_norm": 1.9943067906601395, "learning_rate": 9.930030504851857e-06, "loss": 0.9967, "step": 761 }, { "epoch": 0.08, "grad_norm": 1.8847757964544065, "learning_rate": 9.929739952969995e-06, "loss": 0.9251, "step": 762 }, { "epoch": 0.08, "grad_norm": 0.9348071031777235, "learning_rate": 9.929448803340167e-06, "loss": 1.0657, "step": 763 }, { "epoch": 0.08, "grad_norm": 1.8507551867523058, "learning_rate": 9.929157055997677e-06, "loss": 1.0051, "step": 764 }, { "epoch": 0.08, "grad_norm": 1.8662079598995955, "learning_rate": 9.928864710977903e-06, "loss": 0.9403, "step": 765 }, { "epoch": 0.08, "grad_norm": 1.6924564742815962, "learning_rate": 9.928571768316289e-06, "loss": 0.8989, "step": 766 }, { "epoch": 0.08, "grad_norm": 1.7992176984343593, "learning_rate": 9.928278228048358e-06, "loss": 1.0326, "step": 767 }, { "epoch": 0.08, "grad_norm": 1.7175018853464668, "learning_rate": 9.927984090209698e-06, "loss": 1.0425, "step": 768 }, { "epoch": 0.08, "grad_norm": 1.3479413320398927, "learning_rate": 9.927689354835983e-06, "loss": 1.0618, "step": 769 }, { "epoch": 0.08, "grad_norm": 1.840870084538823, "learning_rate": 9.927394021962942e-06, "loss": 0.9972, "step": 770 }, { "epoch": 0.08, "grad_norm": 1.8291575510899423, "learning_rate": 9.927098091626388e-06, "loss": 1.0217, "step": 771 }, { "epoch": 0.08, "grad_norm": 1.815872423949907, "learning_rate": 9.926801563862205e-06, "loss": 1.0031, "step": 772 }, { "epoch": 0.08, "grad_norm": 1.7699865405879576, "learning_rate": 9.926504438706348e-06, "loss": 1.0249, "step": 773 }, { "epoch": 0.08, "grad_norm": 1.8638404384370981, "learning_rate": 9.926206716194842e-06, "loss": 0.9735, "step": 774 }, { "epoch": 0.08, "grad_norm": 1.8466288413453076, "learning_rate": 9.92590839636379e-06, "loss": 0.8829, "step": 775 }, { "epoch": 0.08, "grad_norm": 1.9281617018916826, "learning_rate": 9.925609479249363e-06, "loss": 0.9683, "step": 776 }, { "epoch": 0.08, "grad_norm": 1.863361796006873, "learning_rate": 9.925309964887803e-06, "loss": 0.9628, "step": 777 }, { "epoch": 0.08, "grad_norm": 1.9116470414934745, "learning_rate": 9.925009853315433e-06, "loss": 1.036, "step": 778 }, { "epoch": 0.08, "grad_norm": 1.7730855246377493, "learning_rate": 9.924709144568635e-06, "loss": 0.9992, "step": 779 }, { "epoch": 0.08, "grad_norm": 1.9445079988777663, "learning_rate": 9.924407838683878e-06, "loss": 0.9462, "step": 780 }, { "epoch": 0.08, "grad_norm": 1.7966060909369934, "learning_rate": 9.924105935697694e-06, "loss": 0.9882, "step": 781 }, { "epoch": 0.08, "grad_norm": 1.7747116960351288, "learning_rate": 9.923803435646688e-06, "loss": 0.9429, "step": 782 }, { "epoch": 0.08, "grad_norm": 1.7830219888159082, "learning_rate": 9.923500338567542e-06, "loss": 0.8295, "step": 783 }, { "epoch": 0.08, "grad_norm": 1.7177629078829748, "learning_rate": 9.923196644497004e-06, "loss": 0.9394, "step": 784 }, { "epoch": 0.08, "grad_norm": 1.8316271252569067, "learning_rate": 9.9228923534719e-06, "loss": 0.968, "step": 785 }, { "epoch": 0.08, "grad_norm": 1.9414775391803776, "learning_rate": 9.922587465529128e-06, "loss": 1.0062, "step": 786 }, { "epoch": 0.08, "grad_norm": 1.7061566906701089, "learning_rate": 9.922281980705654e-06, "loss": 0.9501, "step": 787 }, { "epoch": 0.08, "grad_norm": 2.029514244238478, "learning_rate": 9.921975899038521e-06, "loss": 0.8408, "step": 788 }, { "epoch": 0.08, "grad_norm": 1.671320723077056, "learning_rate": 9.92166922056484e-06, "loss": 0.9702, "step": 789 }, { "epoch": 0.08, "grad_norm": 1.9060640324258404, "learning_rate": 9.921361945321801e-06, "loss": 0.9949, "step": 790 }, { "epoch": 0.09, "grad_norm": 1.7566011580452403, "learning_rate": 9.921054073346658e-06, "loss": 0.8964, "step": 791 }, { "epoch": 0.09, "grad_norm": 1.7578820136460285, "learning_rate": 9.920745604676744e-06, "loss": 0.9451, "step": 792 }, { "epoch": 0.09, "grad_norm": 1.8078783994268042, "learning_rate": 9.92043653934946e-06, "loss": 0.9713, "step": 793 }, { "epoch": 0.09, "grad_norm": 1.7780143426814108, "learning_rate": 9.920126877402283e-06, "loss": 0.8989, "step": 794 }, { "epoch": 0.09, "grad_norm": 1.7502681274849756, "learning_rate": 9.91981661887276e-06, "loss": 0.9007, "step": 795 }, { "epoch": 0.09, "grad_norm": 1.8431526590150353, "learning_rate": 9.919505763798508e-06, "loss": 1.0035, "step": 796 }, { "epoch": 0.09, "grad_norm": 1.7281172804129425, "learning_rate": 9.919194312217227e-06, "loss": 0.8723, "step": 797 }, { "epoch": 0.09, "grad_norm": 1.750776978331979, "learning_rate": 9.918882264166672e-06, "loss": 0.9387, "step": 798 }, { "epoch": 0.09, "grad_norm": 1.8585927174532484, "learning_rate": 9.918569619684687e-06, "loss": 0.9037, "step": 799 }, { "epoch": 0.09, "grad_norm": 1.794742139687047, "learning_rate": 9.918256378809178e-06, "loss": 0.9911, "step": 800 }, { "epoch": 0.09, "grad_norm": 1.69951349192749, "learning_rate": 9.917942541578127e-06, "loss": 0.9548, "step": 801 }, { "epoch": 0.09, "grad_norm": 0.9640403251318759, "learning_rate": 9.917628108029589e-06, "loss": 1.094, "step": 802 }, { "epoch": 0.09, "grad_norm": 1.688444744519535, "learning_rate": 9.91731307820169e-06, "loss": 0.9455, "step": 803 }, { "epoch": 0.09, "grad_norm": 1.759844535189644, "learning_rate": 9.916997452132625e-06, "loss": 0.9644, "step": 804 }, { "epoch": 0.09, "grad_norm": 1.8403238606779961, "learning_rate": 9.91668122986067e-06, "loss": 1.0178, "step": 805 }, { "epoch": 0.09, "grad_norm": 0.9280707944688548, "learning_rate": 9.916364411424164e-06, "loss": 1.0721, "step": 806 }, { "epoch": 0.09, "grad_norm": 1.7325376430503456, "learning_rate": 9.916046996861524e-06, "loss": 0.9298, "step": 807 }, { "epoch": 0.09, "grad_norm": 1.7058257406671034, "learning_rate": 9.915728986211237e-06, "loss": 0.9109, "step": 808 }, { "epoch": 0.09, "grad_norm": 1.930390188761472, "learning_rate": 9.915410379511866e-06, "loss": 0.9922, "step": 809 }, { "epoch": 0.09, "grad_norm": 1.7360505353013305, "learning_rate": 9.915091176802037e-06, "loss": 0.9615, "step": 810 }, { "epoch": 0.09, "grad_norm": 0.9161336619173658, "learning_rate": 9.91477137812046e-06, "loss": 1.1304, "step": 811 }, { "epoch": 0.09, "grad_norm": 1.7382963703023766, "learning_rate": 9.914450983505908e-06, "loss": 0.9176, "step": 812 }, { "epoch": 0.09, "grad_norm": 1.7092472530553176, "learning_rate": 9.914129992997232e-06, "loss": 0.8817, "step": 813 }, { "epoch": 0.09, "grad_norm": 0.8866513087285066, "learning_rate": 9.913808406633354e-06, "loss": 1.0628, "step": 814 }, { "epoch": 0.09, "grad_norm": 1.7451650908923817, "learning_rate": 9.913486224453266e-06, "loss": 0.9583, "step": 815 }, { "epoch": 0.09, "grad_norm": 1.7807043554573663, "learning_rate": 9.913163446496034e-06, "loss": 0.9031, "step": 816 }, { "epoch": 0.09, "grad_norm": 1.8449517807695555, "learning_rate": 9.912840072800795e-06, "loss": 1.0842, "step": 817 }, { "epoch": 0.09, "grad_norm": 1.9266464083336783, "learning_rate": 9.912516103406759e-06, "loss": 0.9285, "step": 818 }, { "epoch": 0.09, "grad_norm": 1.811910705681027, "learning_rate": 9.912191538353213e-06, "loss": 0.901, "step": 819 }, { "epoch": 0.09, "grad_norm": 1.7882865572956181, "learning_rate": 9.911866377679505e-06, "loss": 0.912, "step": 820 }, { "epoch": 0.09, "grad_norm": 1.7969028009982195, "learning_rate": 9.911540621425067e-06, "loss": 0.8945, "step": 821 }, { "epoch": 0.09, "grad_norm": 1.0191178327763168, "learning_rate": 9.911214269629394e-06, "loss": 1.089, "step": 822 }, { "epoch": 0.09, "grad_norm": 1.741187876151006, "learning_rate": 9.91088732233206e-06, "loss": 0.8335, "step": 823 }, { "epoch": 0.09, "grad_norm": 1.732133495398089, "learning_rate": 9.910559779572708e-06, "loss": 0.9242, "step": 824 }, { "epoch": 0.09, "grad_norm": 1.7343768237601058, "learning_rate": 9.910231641391053e-06, "loss": 0.9482, "step": 825 }, { "epoch": 0.09, "grad_norm": 1.7418414048653905, "learning_rate": 9.909902907826884e-06, "loss": 0.9877, "step": 826 }, { "epoch": 0.09, "grad_norm": 1.7349789307668178, "learning_rate": 9.90957357892006e-06, "loss": 0.9746, "step": 827 }, { "epoch": 0.09, "grad_norm": 1.7533232100316827, "learning_rate": 9.909243654710514e-06, "loss": 0.9751, "step": 828 }, { "epoch": 0.09, "grad_norm": 1.8124054867314066, "learning_rate": 9.90891313523825e-06, "loss": 0.8991, "step": 829 }, { "epoch": 0.09, "grad_norm": 1.8526191852080973, "learning_rate": 9.908582020543345e-06, "loss": 0.9134, "step": 830 }, { "epoch": 0.09, "grad_norm": 1.7946310001884849, "learning_rate": 9.908250310665947e-06, "loss": 0.9749, "step": 831 }, { "epoch": 0.09, "grad_norm": 1.6836509484891176, "learning_rate": 9.907918005646277e-06, "loss": 0.9909, "step": 832 }, { "epoch": 0.09, "grad_norm": 1.6965789082414533, "learning_rate": 9.90758510552463e-06, "loss": 0.8995, "step": 833 }, { "epoch": 0.09, "grad_norm": 1.8365483575889425, "learning_rate": 9.907251610341369e-06, "loss": 0.9616, "step": 834 }, { "epoch": 0.09, "grad_norm": 1.7993294714917092, "learning_rate": 9.906917520136932e-06, "loss": 1.0056, "step": 835 }, { "epoch": 0.09, "grad_norm": 1.7558556448383609, "learning_rate": 9.90658283495183e-06, "loss": 0.986, "step": 836 }, { "epoch": 0.09, "grad_norm": 1.8431305729429366, "learning_rate": 9.906247554826645e-06, "loss": 0.9029, "step": 837 }, { "epoch": 0.09, "grad_norm": 1.8425183998237253, "learning_rate": 9.905911679802028e-06, "loss": 0.96, "step": 838 }, { "epoch": 0.09, "grad_norm": 0.9681811428674642, "learning_rate": 9.905575209918705e-06, "loss": 1.0814, "step": 839 }, { "epoch": 0.09, "grad_norm": 1.7098033686864729, "learning_rate": 9.905238145217477e-06, "loss": 0.9165, "step": 840 }, { "epoch": 0.09, "grad_norm": 1.768674657630725, "learning_rate": 9.904900485739215e-06, "loss": 0.9518, "step": 841 }, { "epoch": 0.09, "grad_norm": 1.7481140936861697, "learning_rate": 9.904562231524856e-06, "loss": 0.8626, "step": 842 }, { "epoch": 0.09, "grad_norm": 1.7232018000134481, "learning_rate": 9.904223382615418e-06, "loss": 1.032, "step": 843 }, { "epoch": 0.09, "grad_norm": 4.460709642359016, "learning_rate": 9.90388393905199e-06, "loss": 1.0068, "step": 844 }, { "epoch": 0.09, "grad_norm": 1.7847390679904496, "learning_rate": 9.903543900875727e-06, "loss": 0.9547, "step": 845 }, { "epoch": 0.09, "grad_norm": 1.8188187988716773, "learning_rate": 9.903203268127861e-06, "loss": 0.8809, "step": 846 }, { "epoch": 0.09, "grad_norm": 1.8959188841722867, "learning_rate": 9.902862040849694e-06, "loss": 1.045, "step": 847 }, { "epoch": 0.09, "grad_norm": 1.9304520747760463, "learning_rate": 9.902520219082604e-06, "loss": 0.9593, "step": 848 }, { "epoch": 0.09, "grad_norm": 1.6853604254058532, "learning_rate": 9.902177802868033e-06, "loss": 0.997, "step": 849 }, { "epoch": 0.09, "grad_norm": 1.9046856757594037, "learning_rate": 9.901834792247504e-06, "loss": 0.9276, "step": 850 }, { "epoch": 0.09, "grad_norm": 1.856163675430937, "learning_rate": 9.901491187262608e-06, "loss": 0.8387, "step": 851 }, { "epoch": 0.09, "grad_norm": 1.6901082580280227, "learning_rate": 9.901146987955008e-06, "loss": 0.9335, "step": 852 }, { "epoch": 0.09, "grad_norm": 1.7626665574979539, "learning_rate": 9.900802194366439e-06, "loss": 0.9634, "step": 853 }, { "epoch": 0.09, "grad_norm": 1.7189162383293837, "learning_rate": 9.900456806538707e-06, "loss": 0.8783, "step": 854 }, { "epoch": 0.09, "grad_norm": 1.798664545332496, "learning_rate": 9.900110824513693e-06, "loss": 0.9507, "step": 855 }, { "epoch": 0.09, "grad_norm": 1.7933570794802838, "learning_rate": 9.899764248333348e-06, "loss": 1.0097, "step": 856 }, { "epoch": 0.09, "grad_norm": 1.8045860649415821, "learning_rate": 9.899417078039695e-06, "loss": 0.9356, "step": 857 }, { "epoch": 0.09, "grad_norm": 1.793830812148432, "learning_rate": 9.899069313674832e-06, "loss": 0.9589, "step": 858 }, { "epoch": 0.09, "grad_norm": 1.8818990693521418, "learning_rate": 9.898720955280926e-06, "loss": 0.872, "step": 859 }, { "epoch": 0.09, "grad_norm": 1.7388290430716535, "learning_rate": 9.898372002900214e-06, "loss": 0.9796, "step": 860 }, { "epoch": 0.09, "grad_norm": 1.8184896287341015, "learning_rate": 9.89802245657501e-06, "loss": 0.938, "step": 861 }, { "epoch": 0.09, "grad_norm": 1.751753767791437, "learning_rate": 9.897672316347697e-06, "loss": 0.9233, "step": 862 }, { "epoch": 0.09, "grad_norm": 1.777873794624135, "learning_rate": 9.897321582260731e-06, "loss": 0.9132, "step": 863 }, { "epoch": 0.09, "grad_norm": 1.875554789561168, "learning_rate": 9.896970254356638e-06, "loss": 0.997, "step": 864 }, { "epoch": 0.09, "grad_norm": 1.7159141108327025, "learning_rate": 9.896618332678021e-06, "loss": 0.9756, "step": 865 }, { "epoch": 0.09, "grad_norm": 1.802988055170041, "learning_rate": 9.89626581726755e-06, "loss": 0.9552, "step": 866 }, { "epoch": 0.09, "grad_norm": 1.788193234998761, "learning_rate": 9.895912708167968e-06, "loss": 0.939, "step": 867 }, { "epoch": 0.09, "grad_norm": 1.967937412553483, "learning_rate": 9.89555900542209e-06, "loss": 0.9564, "step": 868 }, { "epoch": 0.09, "grad_norm": 1.9584590750178426, "learning_rate": 9.895204709072807e-06, "loss": 0.8972, "step": 869 }, { "epoch": 0.09, "grad_norm": 1.776914883093344, "learning_rate": 9.894849819163076e-06, "loss": 0.9836, "step": 870 }, { "epoch": 0.09, "grad_norm": 1.657850519074155, "learning_rate": 9.894494335735929e-06, "loss": 0.9783, "step": 871 }, { "epoch": 0.09, "grad_norm": 1.82980919779469, "learning_rate": 9.894138258834471e-06, "loss": 0.9402, "step": 872 }, { "epoch": 0.09, "grad_norm": 1.7292430648274233, "learning_rate": 9.893781588501876e-06, "loss": 1.0016, "step": 873 }, { "epoch": 0.09, "grad_norm": 1.845714277354703, "learning_rate": 9.893424324781392e-06, "loss": 0.919, "step": 874 }, { "epoch": 0.09, "grad_norm": 1.6966738622607047, "learning_rate": 9.893066467716337e-06, "loss": 0.9552, "step": 875 }, { "epoch": 0.09, "grad_norm": 1.6981289358887794, "learning_rate": 9.892708017350106e-06, "loss": 0.9049, "step": 876 }, { "epoch": 0.09, "grad_norm": 1.8229690190196253, "learning_rate": 9.892348973726159e-06, "loss": 0.954, "step": 877 }, { "epoch": 0.09, "grad_norm": 1.7485689573753123, "learning_rate": 9.891989336888033e-06, "loss": 0.9521, "step": 878 }, { "epoch": 0.09, "grad_norm": 1.6752052159666504, "learning_rate": 9.891629106879333e-06, "loss": 0.9024, "step": 879 }, { "epoch": 0.09, "grad_norm": 1.7313966774914158, "learning_rate": 9.891268283743742e-06, "loss": 0.9875, "step": 880 }, { "epoch": 0.09, "grad_norm": 1.830922032405926, "learning_rate": 9.890906867525007e-06, "loss": 0.9455, "step": 881 }, { "epoch": 0.09, "grad_norm": 1.8109162507724996, "learning_rate": 9.890544858266955e-06, "loss": 0.9871, "step": 882 }, { "epoch": 0.09, "grad_norm": 1.7417205306759183, "learning_rate": 9.890182256013477e-06, "loss": 0.9317, "step": 883 }, { "epoch": 0.1, "grad_norm": 1.7853655787558373, "learning_rate": 9.889819060808541e-06, "loss": 0.9226, "step": 884 }, { "epoch": 0.1, "grad_norm": 0.9554376378711412, "learning_rate": 9.889455272696187e-06, "loss": 1.0656, "step": 885 }, { "epoch": 0.1, "grad_norm": 1.8857809456398171, "learning_rate": 9.889090891720526e-06, "loss": 0.8675, "step": 886 }, { "epoch": 0.1, "grad_norm": 1.6424155974287267, "learning_rate": 9.888725917925736e-06, "loss": 0.8298, "step": 887 }, { "epoch": 0.1, "grad_norm": 1.7808522499120072, "learning_rate": 9.888360351356077e-06, "loss": 0.8964, "step": 888 }, { "epoch": 0.1, "grad_norm": 1.7627066056269047, "learning_rate": 9.887994192055872e-06, "loss": 1.0054, "step": 889 }, { "epoch": 0.1, "grad_norm": 1.979767168464868, "learning_rate": 9.887627440069519e-06, "loss": 1.0152, "step": 890 }, { "epoch": 0.1, "grad_norm": 1.74866393019557, "learning_rate": 9.887260095441489e-06, "loss": 0.9841, "step": 891 }, { "epoch": 0.1, "grad_norm": 1.7157595730196746, "learning_rate": 9.886892158216324e-06, "loss": 0.9529, "step": 892 }, { "epoch": 0.1, "grad_norm": 1.7763716533564495, "learning_rate": 9.886523628438635e-06, "loss": 0.928, "step": 893 }, { "epoch": 0.1, "grad_norm": 1.8212550824086182, "learning_rate": 9.886154506153112e-06, "loss": 0.8341, "step": 894 }, { "epoch": 0.1, "grad_norm": 1.8343347038075255, "learning_rate": 9.88578479140451e-06, "loss": 0.9257, "step": 895 }, { "epoch": 0.1, "grad_norm": 1.7051211703837204, "learning_rate": 9.885414484237657e-06, "loss": 0.9491, "step": 896 }, { "epoch": 0.1, "grad_norm": 1.7976184374351234, "learning_rate": 9.885043584697457e-06, "loss": 0.9456, "step": 897 }, { "epoch": 0.1, "grad_norm": 1.8060206001502357, "learning_rate": 9.88467209282888e-06, "loss": 1.0235, "step": 898 }, { "epoch": 0.1, "grad_norm": 1.81298686945845, "learning_rate": 9.88430000867697e-06, "loss": 0.9829, "step": 899 }, { "epoch": 0.1, "grad_norm": 0.970570114340302, "learning_rate": 9.883927332286849e-06, "loss": 1.0613, "step": 900 }, { "epoch": 0.1, "grad_norm": 1.7854817827966023, "learning_rate": 9.883554063703697e-06, "loss": 0.9537, "step": 901 }, { "epoch": 0.1, "grad_norm": 1.927996103870089, "learning_rate": 9.883180202972781e-06, "loss": 1.0228, "step": 902 }, { "epoch": 0.1, "grad_norm": 1.856514683923491, "learning_rate": 9.882805750139432e-06, "loss": 0.9382, "step": 903 }, { "epoch": 0.1, "grad_norm": 1.8808958480854923, "learning_rate": 9.88243070524905e-06, "loss": 1.0414, "step": 904 }, { "epoch": 0.1, "grad_norm": 1.8125918194183057, "learning_rate": 9.882055068347114e-06, "loss": 0.9859, "step": 905 }, { "epoch": 0.1, "grad_norm": 1.8565888928975418, "learning_rate": 9.881678839479169e-06, "loss": 0.9809, "step": 906 }, { "epoch": 0.1, "grad_norm": 1.8011154265151827, "learning_rate": 9.881302018690834e-06, "loss": 0.9417, "step": 907 }, { "epoch": 0.1, "grad_norm": 1.8422978245339494, "learning_rate": 9.880924606027802e-06, "loss": 0.9778, "step": 908 }, { "epoch": 0.1, "grad_norm": 1.7574599608364259, "learning_rate": 9.880546601535835e-06, "loss": 0.9358, "step": 909 }, { "epoch": 0.1, "grad_norm": 1.821164318921587, "learning_rate": 9.880168005260766e-06, "loss": 1.0327, "step": 910 }, { "epoch": 0.1, "grad_norm": 1.81373773273064, "learning_rate": 9.879788817248503e-06, "loss": 0.9495, "step": 911 }, { "epoch": 0.1, "grad_norm": 1.8325467629904657, "learning_rate": 9.879409037545022e-06, "loss": 0.9436, "step": 912 }, { "epoch": 0.1, "grad_norm": 1.7707345048997194, "learning_rate": 9.879028666196373e-06, "loss": 0.9573, "step": 913 }, { "epoch": 0.1, "grad_norm": 1.6085397190552688, "learning_rate": 9.878647703248678e-06, "loss": 0.8504, "step": 914 }, { "epoch": 0.1, "grad_norm": 1.662080121129869, "learning_rate": 9.878266148748129e-06, "loss": 0.9391, "step": 915 }, { "epoch": 0.1, "grad_norm": 1.2466025279825363, "learning_rate": 9.877884002740992e-06, "loss": 1.0855, "step": 916 }, { "epoch": 0.1, "grad_norm": 1.8343435128597174, "learning_rate": 9.877501265273603e-06, "loss": 0.982, "step": 917 }, { "epoch": 0.1, "grad_norm": 1.7925314912201602, "learning_rate": 9.87711793639237e-06, "loss": 0.9993, "step": 918 }, { "epoch": 0.1, "grad_norm": 1.7650708300779172, "learning_rate": 9.876734016143775e-06, "loss": 0.8859, "step": 919 }, { "epoch": 0.1, "grad_norm": 1.8964994349322162, "learning_rate": 9.876349504574365e-06, "loss": 0.9441, "step": 920 }, { "epoch": 0.1, "grad_norm": 1.7176745169036531, "learning_rate": 9.875964401730769e-06, "loss": 0.9091, "step": 921 }, { "epoch": 0.1, "grad_norm": 1.6933410878690973, "learning_rate": 9.875578707659678e-06, "loss": 0.8961, "step": 922 }, { "epoch": 0.1, "grad_norm": 1.7363253413599269, "learning_rate": 9.87519242240786e-06, "loss": 0.987, "step": 923 }, { "epoch": 0.1, "grad_norm": 1.907173957984541, "learning_rate": 9.874805546022152e-06, "loss": 1.0431, "step": 924 }, { "epoch": 0.1, "grad_norm": 1.0433577696012308, "learning_rate": 9.874418078549467e-06, "loss": 1.0777, "step": 925 }, { "epoch": 0.1, "grad_norm": 1.9675203321833825, "learning_rate": 9.874030020036787e-06, "loss": 0.9205, "step": 926 }, { "epoch": 0.1, "grad_norm": 1.8234458795355297, "learning_rate": 9.873641370531161e-06, "loss": 0.9537, "step": 927 }, { "epoch": 0.1, "grad_norm": 1.7185461654092753, "learning_rate": 9.873252130079718e-06, "loss": 0.9812, "step": 928 }, { "epoch": 0.1, "grad_norm": 1.7871474028242238, "learning_rate": 9.872862298729653e-06, "loss": 0.9619, "step": 929 }, { "epoch": 0.1, "grad_norm": 0.943918554776973, "learning_rate": 9.872471876528235e-06, "loss": 1.0908, "step": 930 }, { "epoch": 0.1, "grad_norm": 1.8069191798692539, "learning_rate": 9.872080863522805e-06, "loss": 0.921, "step": 931 }, { "epoch": 0.1, "grad_norm": 1.793440181508253, "learning_rate": 9.871689259760772e-06, "loss": 0.9064, "step": 932 }, { "epoch": 0.1, "grad_norm": 1.789904121655422, "learning_rate": 9.871297065289623e-06, "loss": 0.9087, "step": 933 }, { "epoch": 0.1, "grad_norm": 0.8885909772120167, "learning_rate": 9.87090428015691e-06, "loss": 1.0759, "step": 934 }, { "epoch": 0.1, "grad_norm": 1.7725739192521233, "learning_rate": 9.870510904410262e-06, "loss": 0.9812, "step": 935 }, { "epoch": 0.1, "grad_norm": 1.7880024017925895, "learning_rate": 9.870116938097374e-06, "loss": 1.0041, "step": 936 }, { "epoch": 0.1, "grad_norm": 1.7802382088925373, "learning_rate": 9.869722381266016e-06, "loss": 0.9234, "step": 937 }, { "epoch": 0.1, "grad_norm": 1.74797009086092, "learning_rate": 9.869327233964032e-06, "loss": 0.968, "step": 938 }, { "epoch": 0.1, "grad_norm": 1.7845851679050015, "learning_rate": 9.868931496239335e-06, "loss": 0.8331, "step": 939 }, { "epoch": 0.1, "grad_norm": 1.786188650736567, "learning_rate": 9.868535168139907e-06, "loss": 0.9683, "step": 940 }, { "epoch": 0.1, "grad_norm": 1.7498469482370298, "learning_rate": 9.868138249713805e-06, "loss": 0.9917, "step": 941 }, { "epoch": 0.1, "grad_norm": 1.7550107891927942, "learning_rate": 9.86774074100916e-06, "loss": 0.9222, "step": 942 }, { "epoch": 0.1, "grad_norm": 1.7734088676702036, "learning_rate": 9.867342642074165e-06, "loss": 0.957, "step": 943 }, { "epoch": 0.1, "grad_norm": 1.8130629504027158, "learning_rate": 9.866943952957097e-06, "loss": 0.9783, "step": 944 }, { "epoch": 0.1, "grad_norm": 1.783181479678194, "learning_rate": 9.866544673706294e-06, "loss": 0.952, "step": 945 }, { "epoch": 0.1, "grad_norm": 1.8070743492645691, "learning_rate": 9.866144804370172e-06, "loss": 0.957, "step": 946 }, { "epoch": 0.1, "grad_norm": 1.7865090166401243, "learning_rate": 9.865744344997217e-06, "loss": 0.9805, "step": 947 }, { "epoch": 0.1, "grad_norm": 1.736712499937743, "learning_rate": 9.865343295635986e-06, "loss": 0.9228, "step": 948 }, { "epoch": 0.1, "grad_norm": 1.7343624218746638, "learning_rate": 9.864941656335105e-06, "loss": 0.9255, "step": 949 }, { "epoch": 0.1, "grad_norm": 1.6993565969738738, "learning_rate": 9.864539427143277e-06, "loss": 0.8653, "step": 950 }, { "epoch": 0.1, "grad_norm": 1.7952386953609754, "learning_rate": 9.864136608109274e-06, "loss": 0.9186, "step": 951 }, { "epoch": 0.1, "grad_norm": 1.7070412665095087, "learning_rate": 9.863733199281938e-06, "loss": 0.9977, "step": 952 }, { "epoch": 0.1, "grad_norm": 1.7091637326615385, "learning_rate": 9.863329200710183e-06, "loss": 0.9348, "step": 953 }, { "epoch": 0.1, "grad_norm": 1.7783376184079491, "learning_rate": 9.862924612442996e-06, "loss": 0.9954, "step": 954 }, { "epoch": 0.1, "grad_norm": 1.9280078714553308, "learning_rate": 9.862519434529435e-06, "loss": 0.9597, "step": 955 }, { "epoch": 0.1, "grad_norm": 1.8527421676289169, "learning_rate": 9.862113667018628e-06, "loss": 1.004, "step": 956 }, { "epoch": 0.1, "grad_norm": 1.8654319695692816, "learning_rate": 9.861707309959777e-06, "loss": 1.0228, "step": 957 }, { "epoch": 0.1, "grad_norm": 1.8068180700733758, "learning_rate": 9.861300363402154e-06, "loss": 0.9579, "step": 958 }, { "epoch": 0.1, "grad_norm": 1.7765839260998324, "learning_rate": 9.860892827395104e-06, "loss": 0.96, "step": 959 }, { "epoch": 0.1, "grad_norm": 1.821942317171618, "learning_rate": 9.860484701988038e-06, "loss": 0.9458, "step": 960 }, { "epoch": 0.1, "grad_norm": 1.6575431038625992, "learning_rate": 9.860075987230446e-06, "loss": 0.9847, "step": 961 }, { "epoch": 0.1, "grad_norm": 1.6364947500360234, "learning_rate": 9.859666683171886e-06, "loss": 0.8716, "step": 962 }, { "epoch": 0.1, "grad_norm": 1.8407579272755996, "learning_rate": 9.859256789861986e-06, "loss": 0.9524, "step": 963 }, { "epoch": 0.1, "grad_norm": 1.8312335699813784, "learning_rate": 9.858846307350449e-06, "loss": 0.9431, "step": 964 }, { "epoch": 0.1, "grad_norm": 1.7870624002242408, "learning_rate": 9.858435235687046e-06, "loss": 0.9827, "step": 965 }, { "epoch": 0.1, "grad_norm": 1.778812764134227, "learning_rate": 9.85802357492162e-06, "loss": 0.9986, "step": 966 }, { "epoch": 0.1, "grad_norm": 1.8624679934326214, "learning_rate": 9.857611325104088e-06, "loss": 0.9496, "step": 967 }, { "epoch": 0.1, "grad_norm": 2.0749965364838965, "learning_rate": 9.857198486284436e-06, "loss": 0.9997, "step": 968 }, { "epoch": 0.1, "grad_norm": 0.962839183190603, "learning_rate": 9.856785058512722e-06, "loss": 1.088, "step": 969 }, { "epoch": 0.1, "grad_norm": 1.826861056262288, "learning_rate": 9.856371041839077e-06, "loss": 0.9124, "step": 970 }, { "epoch": 0.1, "grad_norm": 1.7998874824167328, "learning_rate": 9.8559564363137e-06, "loss": 1.0311, "step": 971 }, { "epoch": 0.1, "grad_norm": 1.8646915243817053, "learning_rate": 9.855541241986864e-06, "loss": 0.9469, "step": 972 }, { "epoch": 0.1, "grad_norm": 1.7261471930948713, "learning_rate": 9.855125458908912e-06, "loss": 0.8794, "step": 973 }, { "epoch": 0.1, "grad_norm": 1.8115072836574875, "learning_rate": 9.854709087130261e-06, "loss": 0.9968, "step": 974 }, { "epoch": 0.1, "grad_norm": 1.7395941661014613, "learning_rate": 9.854292126701397e-06, "loss": 0.9884, "step": 975 }, { "epoch": 0.1, "grad_norm": 1.6195324062942877, "learning_rate": 9.853874577672875e-06, "loss": 0.9168, "step": 976 }, { "epoch": 0.11, "grad_norm": 0.9336280077133761, "learning_rate": 9.853456440095329e-06, "loss": 1.0776, "step": 977 }, { "epoch": 0.11, "grad_norm": 1.6924047582395587, "learning_rate": 9.853037714019455e-06, "loss": 0.9587, "step": 978 }, { "epoch": 0.11, "grad_norm": 1.7302928999595852, "learning_rate": 9.85261839949603e-06, "loss": 0.9836, "step": 979 }, { "epoch": 0.11, "grad_norm": 1.8250592146880602, "learning_rate": 9.852198496575893e-06, "loss": 0.969, "step": 980 }, { "epoch": 0.11, "grad_norm": 1.5987937940221362, "learning_rate": 9.85177800530996e-06, "loss": 0.93, "step": 981 }, { "epoch": 0.11, "grad_norm": 1.8164032112643722, "learning_rate": 9.851356925749218e-06, "loss": 1.0004, "step": 982 }, { "epoch": 0.11, "grad_norm": 1.8784685537036336, "learning_rate": 9.850935257944722e-06, "loss": 0.8654, "step": 983 }, { "epoch": 0.11, "grad_norm": 1.7868661690290244, "learning_rate": 9.850513001947604e-06, "loss": 0.938, "step": 984 }, { "epoch": 0.11, "grad_norm": 1.756513101119036, "learning_rate": 9.850090157809062e-06, "loss": 1.0043, "step": 985 }, { "epoch": 0.11, "grad_norm": 1.8329671738514197, "learning_rate": 9.849666725580366e-06, "loss": 0.935, "step": 986 }, { "epoch": 0.11, "grad_norm": 1.7801831076219174, "learning_rate": 9.849242705312863e-06, "loss": 0.9776, "step": 987 }, { "epoch": 0.11, "grad_norm": 1.8129292290267487, "learning_rate": 9.848818097057962e-06, "loss": 0.9517, "step": 988 }, { "epoch": 0.11, "grad_norm": 1.7482559362234948, "learning_rate": 9.848392900867151e-06, "loss": 1.0212, "step": 989 }, { "epoch": 0.11, "grad_norm": 1.9672338540711949, "learning_rate": 9.847967116791985e-06, "loss": 0.9408, "step": 990 }, { "epoch": 0.11, "grad_norm": 1.7897730064924482, "learning_rate": 9.847540744884094e-06, "loss": 1.0049, "step": 991 }, { "epoch": 0.11, "grad_norm": 1.6817636084648129, "learning_rate": 9.847113785195175e-06, "loss": 0.9242, "step": 992 }, { "epoch": 0.11, "grad_norm": 1.8249791233016752, "learning_rate": 9.846686237776999e-06, "loss": 1.006, "step": 993 }, { "epoch": 0.11, "grad_norm": 1.9357126804069176, "learning_rate": 9.846258102681407e-06, "loss": 0.881, "step": 994 }, { "epoch": 0.11, "grad_norm": 1.8692610867033181, "learning_rate": 9.845829379960312e-06, "loss": 0.8461, "step": 995 }, { "epoch": 0.11, "grad_norm": 1.8470217278899053, "learning_rate": 9.845400069665701e-06, "loss": 0.9798, "step": 996 }, { "epoch": 0.11, "grad_norm": 1.0139386725154416, "learning_rate": 9.844970171849625e-06, "loss": 1.0896, "step": 997 }, { "epoch": 0.11, "grad_norm": 2.1602670435074516, "learning_rate": 9.844539686564213e-06, "loss": 1.0017, "step": 998 }, { "epoch": 0.11, "grad_norm": 1.8588536576422883, "learning_rate": 9.844108613861663e-06, "loss": 1.0023, "step": 999 }, { "epoch": 0.11, "grad_norm": 1.700574006958072, "learning_rate": 9.843676953794242e-06, "loss": 0.8836, "step": 1000 }, { "epoch": 0.11, "grad_norm": 1.7181997383951344, "learning_rate": 9.843244706414292e-06, "loss": 0.9453, "step": 1001 }, { "epoch": 0.11, "grad_norm": 1.8298449700677073, "learning_rate": 9.842811871774224e-06, "loss": 0.9068, "step": 1002 }, { "epoch": 0.11, "grad_norm": 1.7618353343782998, "learning_rate": 9.842378449926521e-06, "loss": 0.9387, "step": 1003 }, { "epoch": 0.11, "grad_norm": 1.8723621462859346, "learning_rate": 9.841944440923738e-06, "loss": 0.9411, "step": 1004 }, { "epoch": 0.11, "grad_norm": 1.7180383125276784, "learning_rate": 9.841509844818498e-06, "loss": 0.9533, "step": 1005 }, { "epoch": 0.11, "grad_norm": 1.7886269511756854, "learning_rate": 9.841074661663497e-06, "loss": 0.9312, "step": 1006 }, { "epoch": 0.11, "grad_norm": 1.218821639247775, "learning_rate": 9.840638891511505e-06, "loss": 1.0581, "step": 1007 }, { "epoch": 0.11, "grad_norm": 1.7451178320447245, "learning_rate": 9.840202534415358e-06, "loss": 0.9473, "step": 1008 }, { "epoch": 0.11, "grad_norm": 1.8057909459822308, "learning_rate": 9.839765590427968e-06, "loss": 0.9667, "step": 1009 }, { "epoch": 0.11, "grad_norm": 1.7384824199482714, "learning_rate": 9.839328059602316e-06, "loss": 0.9005, "step": 1010 }, { "epoch": 0.11, "grad_norm": 1.6484595911957376, "learning_rate": 9.838889941991451e-06, "loss": 0.9097, "step": 1011 }, { "epoch": 0.11, "grad_norm": 1.7415414223270598, "learning_rate": 9.838451237648499e-06, "loss": 0.9625, "step": 1012 }, { "epoch": 0.11, "grad_norm": 1.8318880362447547, "learning_rate": 9.838011946626654e-06, "loss": 0.9928, "step": 1013 }, { "epoch": 0.11, "grad_norm": 2.0768110764943803, "learning_rate": 9.83757206897918e-06, "loss": 1.051, "step": 1014 }, { "epoch": 0.11, "grad_norm": 1.931089254987397, "learning_rate": 9.837131604759416e-06, "loss": 0.9729, "step": 1015 }, { "epoch": 0.11, "grad_norm": 1.7960000185747274, "learning_rate": 9.836690554020769e-06, "loss": 0.9264, "step": 1016 }, { "epoch": 0.11, "grad_norm": 1.762389443245749, "learning_rate": 9.836248916816718e-06, "loss": 0.9156, "step": 1017 }, { "epoch": 0.11, "grad_norm": 1.752816001636378, "learning_rate": 9.835806693200812e-06, "loss": 0.9202, "step": 1018 }, { "epoch": 0.11, "grad_norm": 1.898639377393412, "learning_rate": 9.835363883226675e-06, "loss": 0.9206, "step": 1019 }, { "epoch": 0.11, "grad_norm": 1.669329753830521, "learning_rate": 9.834920486947994e-06, "loss": 0.9295, "step": 1020 }, { "epoch": 0.11, "grad_norm": 1.750657614307251, "learning_rate": 9.834476504418537e-06, "loss": 0.9744, "step": 1021 }, { "epoch": 0.11, "grad_norm": 1.928201607176009, "learning_rate": 9.834031935692136e-06, "loss": 0.8937, "step": 1022 }, { "epoch": 0.11, "grad_norm": 1.7317418931000343, "learning_rate": 9.833586780822698e-06, "loss": 0.9318, "step": 1023 }, { "epoch": 0.11, "grad_norm": 1.8596109093142383, "learning_rate": 9.8331410398642e-06, "loss": 0.9689, "step": 1024 }, { "epoch": 0.11, "grad_norm": 1.7974015634012337, "learning_rate": 9.832694712870687e-06, "loss": 0.9388, "step": 1025 }, { "epoch": 0.11, "grad_norm": 1.7786555667025423, "learning_rate": 9.83224779989628e-06, "loss": 0.9819, "step": 1026 }, { "epoch": 0.11, "grad_norm": 1.7524301458796527, "learning_rate": 9.831800300995167e-06, "loss": 1.0324, "step": 1027 }, { "epoch": 0.11, "grad_norm": 1.6551019641741704, "learning_rate": 9.83135221622161e-06, "loss": 1.0417, "step": 1028 }, { "epoch": 0.11, "grad_norm": 1.694109910291689, "learning_rate": 9.830903545629942e-06, "loss": 0.9071, "step": 1029 }, { "epoch": 0.11, "grad_norm": 1.7588396521708929, "learning_rate": 9.830454289274563e-06, "loss": 0.8541, "step": 1030 }, { "epoch": 0.11, "grad_norm": 1.7291720210703856, "learning_rate": 9.830004447209949e-06, "loss": 0.968, "step": 1031 }, { "epoch": 0.11, "grad_norm": 1.700967122701236, "learning_rate": 9.829554019490644e-06, "loss": 0.9379, "step": 1032 }, { "epoch": 0.11, "grad_norm": 1.634810475927666, "learning_rate": 9.829103006171264e-06, "loss": 0.9892, "step": 1033 }, { "epoch": 0.11, "grad_norm": 1.6760866654998925, "learning_rate": 9.828651407306495e-06, "loss": 0.9784, "step": 1034 }, { "epoch": 0.11, "grad_norm": 1.6740977614798291, "learning_rate": 9.828199222951099e-06, "loss": 0.9834, "step": 1035 }, { "epoch": 0.11, "grad_norm": 1.6694694417771454, "learning_rate": 9.8277464531599e-06, "loss": 0.9068, "step": 1036 }, { "epoch": 0.11, "grad_norm": 1.161989127970139, "learning_rate": 9.8272930979878e-06, "loss": 1.0748, "step": 1037 }, { "epoch": 0.11, "grad_norm": 1.9743562979785556, "learning_rate": 9.826839157489768e-06, "loss": 0.9554, "step": 1038 }, { "epoch": 0.11, "grad_norm": 1.8637476009368845, "learning_rate": 9.826384631720849e-06, "loss": 0.947, "step": 1039 }, { "epoch": 0.11, "grad_norm": 1.810381557043378, "learning_rate": 9.825929520736155e-06, "loss": 0.9726, "step": 1040 }, { "epoch": 0.11, "grad_norm": 1.8172722380972584, "learning_rate": 9.825473824590867e-06, "loss": 0.9383, "step": 1041 }, { "epoch": 0.11, "grad_norm": 1.8940697338345922, "learning_rate": 9.825017543340244e-06, "loss": 0.9698, "step": 1042 }, { "epoch": 0.11, "grad_norm": 1.812893484359614, "learning_rate": 9.82456067703961e-06, "loss": 0.9293, "step": 1043 }, { "epoch": 0.11, "grad_norm": 1.769184440301945, "learning_rate": 9.82410322574436e-06, "loss": 0.9881, "step": 1044 }, { "epoch": 0.11, "grad_norm": 1.7476337618368556, "learning_rate": 9.823645189509961e-06, "loss": 0.9206, "step": 1045 }, { "epoch": 0.11, "grad_norm": 1.712289227917583, "learning_rate": 9.823186568391957e-06, "loss": 0.8945, "step": 1046 }, { "epoch": 0.11, "grad_norm": 1.8362004300488781, "learning_rate": 9.82272736244595e-06, "loss": 0.9478, "step": 1047 }, { "epoch": 0.11, "grad_norm": 1.7044043586888376, "learning_rate": 9.822267571727624e-06, "loss": 0.8737, "step": 1048 }, { "epoch": 0.11, "grad_norm": 1.7451132865069126, "learning_rate": 9.821807196292732e-06, "loss": 0.9271, "step": 1049 }, { "epoch": 0.11, "grad_norm": 1.1777265584003949, "learning_rate": 9.821346236197092e-06, "loss": 1.057, "step": 1050 }, { "epoch": 0.11, "grad_norm": 1.7969767375332188, "learning_rate": 9.820884691496601e-06, "loss": 0.9736, "step": 1051 }, { "epoch": 0.11, "grad_norm": 1.7379516077333648, "learning_rate": 9.82042256224722e-06, "loss": 0.9556, "step": 1052 }, { "epoch": 0.11, "grad_norm": 1.643244233541863, "learning_rate": 9.819959848504986e-06, "loss": 0.9117, "step": 1053 }, { "epoch": 0.11, "grad_norm": 1.8117506157191972, "learning_rate": 9.819496550326002e-06, "loss": 0.9194, "step": 1054 }, { "epoch": 0.11, "grad_norm": 1.8572414590857225, "learning_rate": 9.819032667766447e-06, "loss": 0.8787, "step": 1055 }, { "epoch": 0.11, "grad_norm": 1.7268845782212832, "learning_rate": 9.818568200882566e-06, "loss": 0.9105, "step": 1056 }, { "epoch": 0.11, "grad_norm": 1.7499943828483917, "learning_rate": 9.81810314973068e-06, "loss": 0.9514, "step": 1057 }, { "epoch": 0.11, "grad_norm": 1.1539247255425136, "learning_rate": 9.817637514367175e-06, "loss": 1.0687, "step": 1058 }, { "epoch": 0.11, "grad_norm": 1.0071822664097232, "learning_rate": 9.817171294848514e-06, "loss": 1.0907, "step": 1059 }, { "epoch": 0.11, "grad_norm": 1.7064070197943426, "learning_rate": 9.816704491231226e-06, "loss": 0.9668, "step": 1060 }, { "epoch": 0.11, "grad_norm": 1.0232510412023972, "learning_rate": 9.816237103571913e-06, "loss": 1.1092, "step": 1061 }, { "epoch": 0.11, "grad_norm": 1.9001613872713028, "learning_rate": 9.815769131927247e-06, "loss": 0.9514, "step": 1062 }, { "epoch": 0.11, "grad_norm": 1.8633149316002269, "learning_rate": 9.81530057635397e-06, "loss": 0.8809, "step": 1063 }, { "epoch": 0.11, "grad_norm": 1.670818616827823, "learning_rate": 9.814831436908897e-06, "loss": 0.9813, "step": 1064 }, { "epoch": 0.11, "grad_norm": 1.6838320261347404, "learning_rate": 9.814361713648914e-06, "loss": 0.9507, "step": 1065 }, { "epoch": 0.11, "grad_norm": 1.7167523344969506, "learning_rate": 9.813891406630975e-06, "loss": 0.9961, "step": 1066 }, { "epoch": 0.11, "grad_norm": 1.8456744000164487, "learning_rate": 9.81342051591211e-06, "loss": 0.9546, "step": 1067 }, { "epoch": 0.11, "grad_norm": 1.9280740862459023, "learning_rate": 9.812949041549408e-06, "loss": 0.9787, "step": 1068 }, { "epoch": 0.11, "grad_norm": 1.84611676794399, "learning_rate": 9.812476983600046e-06, "loss": 0.9256, "step": 1069 }, { "epoch": 0.12, "grad_norm": 1.800919827128619, "learning_rate": 9.812004342121257e-06, "loss": 0.9749, "step": 1070 }, { "epoch": 0.12, "grad_norm": 1.8455604703422732, "learning_rate": 9.811531117170353e-06, "loss": 1.0197, "step": 1071 }, { "epoch": 0.12, "grad_norm": 1.685741152848005, "learning_rate": 9.811057308804711e-06, "loss": 1.023, "step": 1072 }, { "epoch": 0.12, "grad_norm": 1.710455384377104, "learning_rate": 9.810582917081786e-06, "loss": 1.0491, "step": 1073 }, { "epoch": 0.12, "grad_norm": 1.7600125017366672, "learning_rate": 9.810107942059097e-06, "loss": 0.972, "step": 1074 }, { "epoch": 0.12, "grad_norm": 1.6024080768493354, "learning_rate": 9.809632383794236e-06, "loss": 0.9531, "step": 1075 }, { "epoch": 0.12, "grad_norm": 1.8615663794713686, "learning_rate": 9.809156242344868e-06, "loss": 1.0343, "step": 1076 }, { "epoch": 0.12, "grad_norm": 1.7532352719641884, "learning_rate": 9.808679517768727e-06, "loss": 0.9091, "step": 1077 }, { "epoch": 0.12, "grad_norm": 1.77917124210932, "learning_rate": 9.808202210123617e-06, "loss": 0.9827, "step": 1078 }, { "epoch": 0.12, "grad_norm": 1.7836323310069275, "learning_rate": 9.80772431946741e-06, "loss": 0.8918, "step": 1079 }, { "epoch": 0.12, "grad_norm": 1.7774652821414163, "learning_rate": 9.807245845858056e-06, "loss": 0.9467, "step": 1080 }, { "epoch": 0.12, "grad_norm": 2.0184982058455514, "learning_rate": 9.80676678935357e-06, "loss": 0.967, "step": 1081 }, { "epoch": 0.12, "grad_norm": 1.7003372981375924, "learning_rate": 9.80628715001204e-06, "loss": 0.9151, "step": 1082 }, { "epoch": 0.12, "grad_norm": 1.8329061591491158, "learning_rate": 9.805806927891622e-06, "loss": 0.9438, "step": 1083 }, { "epoch": 0.12, "grad_norm": 1.774727187159285, "learning_rate": 9.805326123050547e-06, "loss": 0.9494, "step": 1084 }, { "epoch": 0.12, "grad_norm": 1.8568606668103063, "learning_rate": 9.804844735547113e-06, "loss": 0.9996, "step": 1085 }, { "epoch": 0.12, "grad_norm": 1.2791177485487049, "learning_rate": 9.804362765439688e-06, "loss": 1.0832, "step": 1086 }, { "epoch": 0.12, "grad_norm": 1.8032529382948508, "learning_rate": 9.803880212786716e-06, "loss": 0.9669, "step": 1087 }, { "epoch": 0.12, "grad_norm": 1.7127224239865193, "learning_rate": 9.803397077646706e-06, "loss": 0.9389, "step": 1088 }, { "epoch": 0.12, "grad_norm": 1.8149416370023337, "learning_rate": 9.80291336007824e-06, "loss": 0.8991, "step": 1089 }, { "epoch": 0.12, "grad_norm": 1.760311639847556, "learning_rate": 9.802429060139973e-06, "loss": 0.9495, "step": 1090 }, { "epoch": 0.12, "grad_norm": 1.8429835200172284, "learning_rate": 9.801944177890626e-06, "loss": 0.9852, "step": 1091 }, { "epoch": 0.12, "grad_norm": 1.8350620456825846, "learning_rate": 9.801458713388991e-06, "loss": 0.9803, "step": 1092 }, { "epoch": 0.12, "grad_norm": 1.882108257764037, "learning_rate": 9.800972666693936e-06, "loss": 0.9215, "step": 1093 }, { "epoch": 0.12, "grad_norm": 1.9755543710455752, "learning_rate": 9.800486037864393e-06, "loss": 0.9311, "step": 1094 }, { "epoch": 0.12, "grad_norm": 1.1432965521731466, "learning_rate": 9.799998826959366e-06, "loss": 1.0493, "step": 1095 }, { "epoch": 0.12, "grad_norm": 1.8840183059162294, "learning_rate": 9.799511034037934e-06, "loss": 0.957, "step": 1096 }, { "epoch": 0.12, "grad_norm": 1.7858058105779706, "learning_rate": 9.799022659159243e-06, "loss": 0.905, "step": 1097 }, { "epoch": 0.12, "grad_norm": 1.9409426172326323, "learning_rate": 9.798533702382512e-06, "loss": 1.0083, "step": 1098 }, { "epoch": 0.12, "grad_norm": 1.8394467919465047, "learning_rate": 9.798044163767022e-06, "loss": 0.9388, "step": 1099 }, { "epoch": 0.12, "grad_norm": 1.9554723120353845, "learning_rate": 9.797554043372139e-06, "loss": 0.9869, "step": 1100 }, { "epoch": 0.12, "grad_norm": 1.741561855656167, "learning_rate": 9.79706334125729e-06, "loss": 0.9568, "step": 1101 }, { "epoch": 0.12, "grad_norm": 1.9362479774256827, "learning_rate": 9.796572057481969e-06, "loss": 1.0453, "step": 1102 }, { "epoch": 0.12, "grad_norm": 1.7547957918734698, "learning_rate": 9.79608019210575e-06, "loss": 1.0017, "step": 1103 }, { "epoch": 0.12, "grad_norm": 1.9790835346110744, "learning_rate": 9.795587745188277e-06, "loss": 0.9622, "step": 1104 }, { "epoch": 0.12, "grad_norm": 1.7746915491668618, "learning_rate": 9.795094716789252e-06, "loss": 0.9815, "step": 1105 }, { "epoch": 0.12, "grad_norm": 1.7750417008918324, "learning_rate": 9.794601106968466e-06, "loss": 0.9146, "step": 1106 }, { "epoch": 0.12, "grad_norm": 1.831413815780519, "learning_rate": 9.794106915785764e-06, "loss": 0.9479, "step": 1107 }, { "epoch": 0.12, "grad_norm": 1.6643258761149287, "learning_rate": 9.79361214330107e-06, "loss": 0.9926, "step": 1108 }, { "epoch": 0.12, "grad_norm": 1.8919240235227575, "learning_rate": 9.793116789574379e-06, "loss": 1.0029, "step": 1109 }, { "epoch": 0.12, "grad_norm": 1.7438980242862387, "learning_rate": 9.792620854665752e-06, "loss": 0.883, "step": 1110 }, { "epoch": 0.12, "grad_norm": 1.8180112242115445, "learning_rate": 9.792124338635325e-06, "loss": 0.993, "step": 1111 }, { "epoch": 0.12, "grad_norm": 1.816636483697899, "learning_rate": 9.7916272415433e-06, "loss": 1.0401, "step": 1112 }, { "epoch": 0.12, "grad_norm": 1.7509090735478468, "learning_rate": 9.791129563449953e-06, "loss": 0.9632, "step": 1113 }, { "epoch": 0.12, "grad_norm": 1.8267204320972894, "learning_rate": 9.790631304415627e-06, "loss": 0.9639, "step": 1114 }, { "epoch": 0.12, "grad_norm": 1.7711127633528847, "learning_rate": 9.790132464500741e-06, "loss": 1.001, "step": 1115 }, { "epoch": 0.12, "grad_norm": 1.1077124772900868, "learning_rate": 9.789633043765781e-06, "loss": 1.0949, "step": 1116 }, { "epoch": 0.12, "grad_norm": 0.9305025462701941, "learning_rate": 9.7891330422713e-06, "loss": 1.0779, "step": 1117 }, { "epoch": 0.12, "grad_norm": 1.7243184975084127, "learning_rate": 9.788632460077928e-06, "loss": 0.9928, "step": 1118 }, { "epoch": 0.12, "grad_norm": 1.7037677988003548, "learning_rate": 9.788131297246362e-06, "loss": 0.972, "step": 1119 }, { "epoch": 0.12, "grad_norm": 1.7798806860339067, "learning_rate": 9.787629553837368e-06, "loss": 0.947, "step": 1120 }, { "epoch": 0.12, "grad_norm": 1.6582626056215375, "learning_rate": 9.787127229911783e-06, "loss": 0.8571, "step": 1121 }, { "epoch": 0.12, "grad_norm": 2.033400470010361, "learning_rate": 9.786624325530521e-06, "loss": 0.9424, "step": 1122 }, { "epoch": 0.12, "grad_norm": 1.839132253265899, "learning_rate": 9.786120840754557e-06, "loss": 0.8955, "step": 1123 }, { "epoch": 0.12, "grad_norm": 1.3442596771492645, "learning_rate": 9.78561677564494e-06, "loss": 1.0709, "step": 1124 }, { "epoch": 0.12, "grad_norm": 1.7035337853042138, "learning_rate": 9.78511213026279e-06, "loss": 0.9714, "step": 1125 }, { "epoch": 0.12, "grad_norm": 1.932330612454489, "learning_rate": 9.784606904669298e-06, "loss": 0.9643, "step": 1126 }, { "epoch": 0.12, "grad_norm": 1.8038822373487082, "learning_rate": 9.784101098925723e-06, "loss": 1.0149, "step": 1127 }, { "epoch": 0.12, "grad_norm": 1.6897023886943041, "learning_rate": 9.783594713093399e-06, "loss": 0.9779, "step": 1128 }, { "epoch": 0.12, "grad_norm": 0.8829041784730604, "learning_rate": 9.783087747233722e-06, "loss": 1.0874, "step": 1129 }, { "epoch": 0.12, "grad_norm": 0.8730841857749171, "learning_rate": 9.782580201408165e-06, "loss": 1.0855, "step": 1130 }, { "epoch": 0.12, "grad_norm": 1.8725353698414466, "learning_rate": 9.782072075678273e-06, "loss": 0.8449, "step": 1131 }, { "epoch": 0.12, "grad_norm": 2.0773312155579124, "learning_rate": 9.781563370105654e-06, "loss": 0.9481, "step": 1132 }, { "epoch": 0.12, "grad_norm": 1.9102606387047252, "learning_rate": 9.781054084751993e-06, "loss": 0.8468, "step": 1133 }, { "epoch": 0.12, "grad_norm": 1.7930604081479018, "learning_rate": 9.78054421967904e-06, "loss": 0.9337, "step": 1134 }, { "epoch": 0.12, "grad_norm": 1.8473971319906888, "learning_rate": 9.78003377494862e-06, "loss": 0.9305, "step": 1135 }, { "epoch": 0.12, "grad_norm": 1.85176056028402, "learning_rate": 9.779522750622626e-06, "loss": 0.9797, "step": 1136 }, { "epoch": 0.12, "grad_norm": 1.7397960692669328, "learning_rate": 9.77901114676302e-06, "loss": 0.9492, "step": 1137 }, { "epoch": 0.12, "grad_norm": 1.73717477710704, "learning_rate": 9.778498963431838e-06, "loss": 0.8659, "step": 1138 }, { "epoch": 0.12, "grad_norm": 1.7682856161597194, "learning_rate": 9.77798620069118e-06, "loss": 0.9341, "step": 1139 }, { "epoch": 0.12, "grad_norm": 1.8309610001723984, "learning_rate": 9.777472858603227e-06, "loss": 0.9559, "step": 1140 }, { "epoch": 0.12, "grad_norm": 1.7363188810192791, "learning_rate": 9.776958937230216e-06, "loss": 0.8937, "step": 1141 }, { "epoch": 0.12, "grad_norm": 1.871640621744642, "learning_rate": 9.776444436634467e-06, "loss": 0.86, "step": 1142 }, { "epoch": 0.12, "grad_norm": 1.7276594801751906, "learning_rate": 9.775929356878362e-06, "loss": 0.9755, "step": 1143 }, { "epoch": 0.12, "grad_norm": 1.7628302899777013, "learning_rate": 9.775413698024358e-06, "loss": 0.9392, "step": 1144 }, { "epoch": 0.12, "grad_norm": 2.039096521583918, "learning_rate": 9.77489746013498e-06, "loss": 1.0588, "step": 1145 }, { "epoch": 0.12, "grad_norm": 1.0926795984818383, "learning_rate": 9.774380643272824e-06, "loss": 1.0825, "step": 1146 }, { "epoch": 0.12, "grad_norm": 1.6992906775863967, "learning_rate": 9.773863247500554e-06, "loss": 0.9219, "step": 1147 }, { "epoch": 0.12, "grad_norm": 1.8617765892884641, "learning_rate": 9.773345272880908e-06, "loss": 0.954, "step": 1148 }, { "epoch": 0.12, "grad_norm": 1.829431328990108, "learning_rate": 9.77282671947669e-06, "loss": 0.935, "step": 1149 }, { "epoch": 0.12, "grad_norm": 0.8932176065547315, "learning_rate": 9.77230758735078e-06, "loss": 1.1008, "step": 1150 }, { "epoch": 0.12, "grad_norm": 1.7197101910280772, "learning_rate": 9.771787876566121e-06, "loss": 0.8337, "step": 1151 }, { "epoch": 0.12, "grad_norm": 1.6810711385239918, "learning_rate": 9.771267587185732e-06, "loss": 0.9056, "step": 1152 }, { "epoch": 0.12, "grad_norm": 1.7277927675279006, "learning_rate": 9.770746719272696e-06, "loss": 0.9002, "step": 1153 }, { "epoch": 0.12, "grad_norm": 1.7259773786406831, "learning_rate": 9.770225272890177e-06, "loss": 0.9414, "step": 1154 }, { "epoch": 0.12, "grad_norm": 1.7368740244304055, "learning_rate": 9.769703248101397e-06, "loss": 0.9126, "step": 1155 }, { "epoch": 0.12, "grad_norm": 1.8125964343318284, "learning_rate": 9.769180644969653e-06, "loss": 0.9877, "step": 1156 }, { "epoch": 0.12, "grad_norm": 1.129001504975424, "learning_rate": 9.768657463558316e-06, "loss": 1.0956, "step": 1157 }, { "epoch": 0.12, "grad_norm": 1.6693471625847693, "learning_rate": 9.76813370393082e-06, "loss": 0.9593, "step": 1158 }, { "epoch": 0.12, "grad_norm": 1.874832684996006, "learning_rate": 9.767609366150675e-06, "loss": 0.8864, "step": 1159 }, { "epoch": 0.12, "grad_norm": 1.7556508292751147, "learning_rate": 9.767084450281457e-06, "loss": 1.0714, "step": 1160 }, { "epoch": 0.12, "grad_norm": 1.8851490659830241, "learning_rate": 9.766558956386814e-06, "loss": 0.9562, "step": 1161 }, { "epoch": 0.12, "grad_norm": 1.724498562009284, "learning_rate": 9.766032884530466e-06, "loss": 0.9199, "step": 1162 }, { "epoch": 0.13, "grad_norm": 1.779881992187159, "learning_rate": 9.765506234776201e-06, "loss": 0.9725, "step": 1163 }, { "epoch": 0.13, "grad_norm": 1.7560831366724476, "learning_rate": 9.764979007187874e-06, "loss": 0.9687, "step": 1164 }, { "epoch": 0.13, "grad_norm": 1.8925677576649702, "learning_rate": 9.764451201829415e-06, "loss": 0.9948, "step": 1165 }, { "epoch": 0.13, "grad_norm": 1.735458900323121, "learning_rate": 9.763922818764824e-06, "loss": 0.9654, "step": 1166 }, { "epoch": 0.13, "grad_norm": 1.6651433238646258, "learning_rate": 9.763393858058165e-06, "loss": 0.9009, "step": 1167 }, { "epoch": 0.13, "grad_norm": 1.8590380913076896, "learning_rate": 9.76286431977358e-06, "loss": 0.882, "step": 1168 }, { "epoch": 0.13, "grad_norm": 1.1555532803839814, "learning_rate": 9.762334203975277e-06, "loss": 1.055, "step": 1169 }, { "epoch": 0.13, "grad_norm": 1.7482108525015174, "learning_rate": 9.761803510727532e-06, "loss": 0.9774, "step": 1170 }, { "epoch": 0.13, "grad_norm": 1.836801299609463, "learning_rate": 9.761272240094695e-06, "loss": 1.0109, "step": 1171 }, { "epoch": 0.13, "grad_norm": 1.7148262171388866, "learning_rate": 9.760740392141186e-06, "loss": 0.8835, "step": 1172 }, { "epoch": 0.13, "grad_norm": 0.8583512028818457, "learning_rate": 9.76020796693149e-06, "loss": 1.0661, "step": 1173 }, { "epoch": 0.13, "grad_norm": 1.771380201318604, "learning_rate": 9.759674964530167e-06, "loss": 0.9806, "step": 1174 }, { "epoch": 0.13, "grad_norm": 1.7048611706393197, "learning_rate": 9.759141385001846e-06, "loss": 0.9796, "step": 1175 }, { "epoch": 0.13, "grad_norm": 1.666934776115197, "learning_rate": 9.758607228411226e-06, "loss": 0.9507, "step": 1176 }, { "epoch": 0.13, "grad_norm": 1.8579198394441183, "learning_rate": 9.758072494823072e-06, "loss": 0.8535, "step": 1177 }, { "epoch": 0.13, "grad_norm": 1.731610721355881, "learning_rate": 9.757537184302225e-06, "loss": 0.8855, "step": 1178 }, { "epoch": 0.13, "grad_norm": 1.8567773312664542, "learning_rate": 9.757001296913594e-06, "loss": 0.9325, "step": 1179 }, { "epoch": 0.13, "grad_norm": 1.7115101500042478, "learning_rate": 9.756464832722155e-06, "loss": 0.9759, "step": 1180 }, { "epoch": 0.13, "grad_norm": 1.7928370572520054, "learning_rate": 9.755927791792956e-06, "loss": 0.9614, "step": 1181 }, { "epoch": 0.13, "grad_norm": 1.8802490173635868, "learning_rate": 9.755390174191117e-06, "loss": 0.9717, "step": 1182 }, { "epoch": 0.13, "grad_norm": 1.6889013721166843, "learning_rate": 9.754851979981827e-06, "loss": 0.9714, "step": 1183 }, { "epoch": 0.13, "grad_norm": 1.7813643126599725, "learning_rate": 9.75431320923034e-06, "loss": 0.9536, "step": 1184 }, { "epoch": 0.13, "grad_norm": 1.76605807977974, "learning_rate": 9.753773862001985e-06, "loss": 0.9965, "step": 1185 }, { "epoch": 0.13, "grad_norm": 1.5913301669398983, "learning_rate": 9.753233938362161e-06, "loss": 0.8939, "step": 1186 }, { "epoch": 0.13, "grad_norm": 1.6881318965280296, "learning_rate": 9.752693438376337e-06, "loss": 0.9475, "step": 1187 }, { "epoch": 0.13, "grad_norm": 1.0947588332008547, "learning_rate": 9.752152362110046e-06, "loss": 1.0438, "step": 1188 }, { "epoch": 0.13, "grad_norm": 0.9686766294967777, "learning_rate": 9.751610709628898e-06, "loss": 1.0701, "step": 1189 }, { "epoch": 0.13, "grad_norm": 1.8673464143885388, "learning_rate": 9.751068480998572e-06, "loss": 0.987, "step": 1190 }, { "epoch": 0.13, "grad_norm": 1.7844467966453497, "learning_rate": 9.750525676284813e-06, "loss": 0.9515, "step": 1191 }, { "epoch": 0.13, "grad_norm": 1.7696494984768467, "learning_rate": 9.749982295553437e-06, "loss": 0.9391, "step": 1192 }, { "epoch": 0.13, "grad_norm": 1.8158550291199695, "learning_rate": 9.749438338870332e-06, "loss": 1.0003, "step": 1193 }, { "epoch": 0.13, "grad_norm": 1.7976566013889366, "learning_rate": 9.748893806301455e-06, "loss": 0.9392, "step": 1194 }, { "epoch": 0.13, "grad_norm": 1.9332542237192982, "learning_rate": 9.748348697912833e-06, "loss": 0.9493, "step": 1195 }, { "epoch": 0.13, "grad_norm": 1.604440152543112, "learning_rate": 9.74780301377056e-06, "loss": 1.0833, "step": 1196 }, { "epoch": 0.13, "grad_norm": 1.8255608816105517, "learning_rate": 9.747256753940803e-06, "loss": 0.9578, "step": 1197 }, { "epoch": 0.13, "grad_norm": 1.7809087848827114, "learning_rate": 9.746709918489801e-06, "loss": 0.8719, "step": 1198 }, { "epoch": 0.13, "grad_norm": 1.65431478005747, "learning_rate": 9.746162507483854e-06, "loss": 0.9051, "step": 1199 }, { "epoch": 0.13, "grad_norm": 1.9627018848274116, "learning_rate": 9.745614520989341e-06, "loss": 0.9859, "step": 1200 }, { "epoch": 0.13, "grad_norm": 1.913291515226628, "learning_rate": 9.745065959072708e-06, "loss": 0.9065, "step": 1201 }, { "epoch": 0.13, "grad_norm": 1.8686220949555998, "learning_rate": 9.74451682180047e-06, "loss": 0.9349, "step": 1202 }, { "epoch": 0.13, "grad_norm": 1.9723400638928938, "learning_rate": 9.743967109239207e-06, "loss": 0.8769, "step": 1203 }, { "epoch": 0.13, "grad_norm": 1.8034736380117218, "learning_rate": 9.743416821455577e-06, "loss": 0.9652, "step": 1204 }, { "epoch": 0.13, "grad_norm": 1.81550973541998, "learning_rate": 9.742865958516307e-06, "loss": 0.9602, "step": 1205 }, { "epoch": 0.13, "grad_norm": 1.7542400887946263, "learning_rate": 9.742314520488188e-06, "loss": 0.9109, "step": 1206 }, { "epoch": 0.13, "grad_norm": 1.7646559733238234, "learning_rate": 9.741762507438084e-06, "loss": 0.8948, "step": 1207 }, { "epoch": 0.13, "grad_norm": 1.8779758906380966, "learning_rate": 9.741209919432928e-06, "loss": 0.9726, "step": 1208 }, { "epoch": 0.13, "grad_norm": 1.8320607531141995, "learning_rate": 9.740656756539724e-06, "loss": 0.9371, "step": 1209 }, { "epoch": 0.13, "grad_norm": 1.8765413675366769, "learning_rate": 9.740103018825544e-06, "loss": 0.9057, "step": 1210 }, { "epoch": 0.13, "grad_norm": 1.9908868725287112, "learning_rate": 9.739548706357533e-06, "loss": 0.904, "step": 1211 }, { "epoch": 0.13, "grad_norm": 1.6610421176473633, "learning_rate": 9.7389938192029e-06, "loss": 0.9286, "step": 1212 }, { "epoch": 0.13, "grad_norm": 1.9231587102047503, "learning_rate": 9.73843835742893e-06, "loss": 0.9101, "step": 1213 }, { "epoch": 0.13, "grad_norm": 1.8222129549290134, "learning_rate": 9.737882321102973e-06, "loss": 1.0269, "step": 1214 }, { "epoch": 0.13, "grad_norm": 1.8299883463824855, "learning_rate": 9.73732571029245e-06, "loss": 0.9079, "step": 1215 }, { "epoch": 0.13, "grad_norm": 3.712060848562136, "learning_rate": 9.736768525064852e-06, "loss": 0.8776, "step": 1216 }, { "epoch": 0.13, "grad_norm": 1.8585795908956828, "learning_rate": 9.736210765487741e-06, "loss": 0.9741, "step": 1217 }, { "epoch": 0.13, "grad_norm": 1.841056951620945, "learning_rate": 9.735652431628748e-06, "loss": 0.9681, "step": 1218 }, { "epoch": 0.13, "grad_norm": 1.8485960722266441, "learning_rate": 9.73509352355557e-06, "loss": 0.8837, "step": 1219 }, { "epoch": 0.13, "grad_norm": 1.9668234966147329, "learning_rate": 9.734534041335976e-06, "loss": 0.9026, "step": 1220 }, { "epoch": 0.13, "grad_norm": 1.7998547441960084, "learning_rate": 9.73397398503781e-06, "loss": 0.9372, "step": 1221 }, { "epoch": 0.13, "grad_norm": 1.7652738569703286, "learning_rate": 9.733413354728978e-06, "loss": 0.8862, "step": 1222 }, { "epoch": 0.13, "grad_norm": 1.8306394523063005, "learning_rate": 9.732852150477456e-06, "loss": 0.9897, "step": 1223 }, { "epoch": 0.13, "grad_norm": 1.7943897000149613, "learning_rate": 9.732290372351295e-06, "loss": 0.8629, "step": 1224 }, { "epoch": 0.13, "grad_norm": 1.7186497667282286, "learning_rate": 9.731728020418612e-06, "loss": 0.8658, "step": 1225 }, { "epoch": 0.13, "grad_norm": 1.8285182177430324, "learning_rate": 9.731165094747593e-06, "loss": 0.8435, "step": 1226 }, { "epoch": 0.13, "grad_norm": 1.763297876847239, "learning_rate": 9.730601595406497e-06, "loss": 0.9361, "step": 1227 }, { "epoch": 0.13, "grad_norm": 1.8446768054265583, "learning_rate": 9.730037522463647e-06, "loss": 1.0085, "step": 1228 }, { "epoch": 0.13, "grad_norm": 1.8919520221084818, "learning_rate": 9.72947287598744e-06, "loss": 0.9534, "step": 1229 }, { "epoch": 0.13, "grad_norm": 1.812571374767467, "learning_rate": 9.728907656046344e-06, "loss": 0.9691, "step": 1230 }, { "epoch": 0.13, "grad_norm": 1.8595529542956106, "learning_rate": 9.72834186270889e-06, "loss": 1.0088, "step": 1231 }, { "epoch": 0.13, "grad_norm": 1.8890378025150192, "learning_rate": 9.727775496043685e-06, "loss": 0.9857, "step": 1232 }, { "epoch": 0.13, "grad_norm": 1.8604894870630209, "learning_rate": 9.727208556119401e-06, "loss": 1.0162, "step": 1233 }, { "epoch": 0.13, "grad_norm": 0.9827885363499883, "learning_rate": 9.726641043004782e-06, "loss": 1.074, "step": 1234 }, { "epoch": 0.13, "grad_norm": 1.7994014319664224, "learning_rate": 9.726072956768644e-06, "loss": 0.9154, "step": 1235 }, { "epoch": 0.13, "grad_norm": 2.0611412327825405, "learning_rate": 9.725504297479865e-06, "loss": 0.9611, "step": 1236 }, { "epoch": 0.13, "grad_norm": 1.7433485532841393, "learning_rate": 9.7249350652074e-06, "loss": 1.0075, "step": 1237 }, { "epoch": 0.13, "grad_norm": 1.7258647403929175, "learning_rate": 9.724365260020267e-06, "loss": 0.9223, "step": 1238 }, { "epoch": 0.13, "grad_norm": 1.777195581948761, "learning_rate": 9.723794881987561e-06, "loss": 0.9695, "step": 1239 }, { "epoch": 0.13, "grad_norm": 1.7538343864266914, "learning_rate": 9.723223931178439e-06, "loss": 0.9321, "step": 1240 }, { "epoch": 0.13, "grad_norm": 1.740926222844274, "learning_rate": 9.722652407662129e-06, "loss": 0.9413, "step": 1241 }, { "epoch": 0.13, "grad_norm": 1.9250698917754199, "learning_rate": 9.722080311507938e-06, "loss": 0.9888, "step": 1242 }, { "epoch": 0.13, "grad_norm": 1.8705263775410779, "learning_rate": 9.721507642785225e-06, "loss": 0.896, "step": 1243 }, { "epoch": 0.13, "grad_norm": 1.71888811367962, "learning_rate": 9.720934401563438e-06, "loss": 1.0403, "step": 1244 }, { "epoch": 0.13, "grad_norm": 1.6833230077152903, "learning_rate": 9.720360587912075e-06, "loss": 1.0131, "step": 1245 }, { "epoch": 0.13, "grad_norm": 1.6538990010705523, "learning_rate": 9.71978620190072e-06, "loss": 0.9447, "step": 1246 }, { "epoch": 0.13, "grad_norm": 1.7347180559872641, "learning_rate": 9.719211243599015e-06, "loss": 0.8859, "step": 1247 }, { "epoch": 0.13, "grad_norm": 1.8130683519951711, "learning_rate": 9.718635713076677e-06, "loss": 1.0101, "step": 1248 }, { "epoch": 0.13, "grad_norm": 1.6560385235192612, "learning_rate": 9.718059610403493e-06, "loss": 0.9231, "step": 1249 }, { "epoch": 0.13, "grad_norm": 1.8177146048601935, "learning_rate": 9.717482935649313e-06, "loss": 0.9771, "step": 1250 }, { "epoch": 0.13, "grad_norm": 1.7395339593735508, "learning_rate": 9.716905688884065e-06, "loss": 0.9146, "step": 1251 }, { "epoch": 0.13, "grad_norm": 1.7389112591296354, "learning_rate": 9.716327870177739e-06, "loss": 0.929, "step": 1252 }, { "epoch": 0.13, "grad_norm": 1.7990127525203634, "learning_rate": 9.7157494796004e-06, "loss": 0.9416, "step": 1253 }, { "epoch": 0.13, "grad_norm": 1.8008901225028542, "learning_rate": 9.71517051722218e-06, "loss": 0.9843, "step": 1254 }, { "epoch": 0.13, "grad_norm": 1.695225925972305, "learning_rate": 9.714590983113279e-06, "loss": 0.9486, "step": 1255 }, { "epoch": 0.14, "grad_norm": 2.0323666532676747, "learning_rate": 9.714010877343967e-06, "loss": 0.9838, "step": 1256 }, { "epoch": 0.14, "grad_norm": 1.7474522306776232, "learning_rate": 9.713430199984583e-06, "loss": 0.9453, "step": 1257 }, { "epoch": 0.14, "grad_norm": 1.7329963737080902, "learning_rate": 9.71284895110554e-06, "loss": 0.9651, "step": 1258 }, { "epoch": 0.14, "grad_norm": 1.655091167228909, "learning_rate": 9.712267130777313e-06, "loss": 0.8903, "step": 1259 }, { "epoch": 0.14, "grad_norm": 1.6270740211826473, "learning_rate": 9.71168473907045e-06, "loss": 1.0478, "step": 1260 }, { "epoch": 0.14, "grad_norm": 1.8726126384154236, "learning_rate": 9.71110177605557e-06, "loss": 0.9691, "step": 1261 }, { "epoch": 0.14, "grad_norm": 1.8305500669363823, "learning_rate": 9.710518241803357e-06, "loss": 1.0793, "step": 1262 }, { "epoch": 0.14, "grad_norm": 1.7136656451697307, "learning_rate": 9.70993413638457e-06, "loss": 0.9529, "step": 1263 }, { "epoch": 0.14, "grad_norm": 1.813780300934652, "learning_rate": 9.709349459870028e-06, "loss": 0.9345, "step": 1264 }, { "epoch": 0.14, "grad_norm": 1.6472900963296162, "learning_rate": 9.70876421233063e-06, "loss": 0.9406, "step": 1265 }, { "epoch": 0.14, "grad_norm": 1.723428613503388, "learning_rate": 9.708178393837338e-06, "loss": 1.0019, "step": 1266 }, { "epoch": 0.14, "grad_norm": 1.858267166961602, "learning_rate": 9.707592004461183e-06, "loss": 0.9419, "step": 1267 }, { "epoch": 0.14, "grad_norm": 1.8143513651790806, "learning_rate": 9.707005044273268e-06, "loss": 0.914, "step": 1268 }, { "epoch": 0.14, "grad_norm": 1.7384561200174327, "learning_rate": 9.706417513344764e-06, "loss": 0.9817, "step": 1269 }, { "epoch": 0.14, "grad_norm": 1.7266648002075469, "learning_rate": 9.705829411746912e-06, "loss": 0.9013, "step": 1270 }, { "epoch": 0.14, "grad_norm": 1.7628945304962549, "learning_rate": 9.70524073955102e-06, "loss": 0.978, "step": 1271 }, { "epoch": 0.14, "grad_norm": 1.7109629466915632, "learning_rate": 9.704651496828466e-06, "loss": 0.9939, "step": 1272 }, { "epoch": 0.14, "grad_norm": 1.6675938200800113, "learning_rate": 9.704061683650701e-06, "loss": 0.8742, "step": 1273 }, { "epoch": 0.14, "grad_norm": 1.6415771502746803, "learning_rate": 9.703471300089237e-06, "loss": 0.8555, "step": 1274 }, { "epoch": 0.14, "grad_norm": 1.0159705629237386, "learning_rate": 9.702880346215664e-06, "loss": 1.061, "step": 1275 }, { "epoch": 0.14, "grad_norm": 1.8782974841500362, "learning_rate": 9.702288822101634e-06, "loss": 0.9643, "step": 1276 }, { "epoch": 0.14, "grad_norm": 1.9142163252362512, "learning_rate": 9.701696727818875e-06, "loss": 0.9218, "step": 1277 }, { "epoch": 0.14, "grad_norm": 1.7945349869411686, "learning_rate": 9.701104063439177e-06, "loss": 0.9468, "step": 1278 }, { "epoch": 0.14, "grad_norm": 1.6823058108530389, "learning_rate": 9.700510829034404e-06, "loss": 0.894, "step": 1279 }, { "epoch": 0.14, "grad_norm": 1.6471700079296356, "learning_rate": 9.699917024676489e-06, "loss": 0.9257, "step": 1280 }, { "epoch": 0.14, "grad_norm": 1.6269068923806016, "learning_rate": 9.699322650437433e-06, "loss": 0.8255, "step": 1281 }, { "epoch": 0.14, "grad_norm": 1.7222566133072286, "learning_rate": 9.698727706389301e-06, "loss": 0.9025, "step": 1282 }, { "epoch": 0.14, "grad_norm": 1.96198972310481, "learning_rate": 9.698132192604239e-06, "loss": 0.8239, "step": 1283 }, { "epoch": 0.14, "grad_norm": 1.7052648327891688, "learning_rate": 9.69753610915445e-06, "loss": 0.9713, "step": 1284 }, { "epoch": 0.14, "grad_norm": 1.7640973430706142, "learning_rate": 9.696939456112213e-06, "loss": 0.9105, "step": 1285 }, { "epoch": 0.14, "grad_norm": 1.7377684409884142, "learning_rate": 9.696342233549874e-06, "loss": 0.8673, "step": 1286 }, { "epoch": 0.14, "grad_norm": 1.8058118218246957, "learning_rate": 9.695744441539849e-06, "loss": 1.0093, "step": 1287 }, { "epoch": 0.14, "grad_norm": 1.747296920871417, "learning_rate": 9.695146080154622e-06, "loss": 0.9903, "step": 1288 }, { "epoch": 0.14, "grad_norm": 1.76291060426145, "learning_rate": 9.694547149466745e-06, "loss": 0.9424, "step": 1289 }, { "epoch": 0.14, "grad_norm": 1.8177909459653936, "learning_rate": 9.693947649548843e-06, "loss": 0.9511, "step": 1290 }, { "epoch": 0.14, "grad_norm": 1.750848039221567, "learning_rate": 9.693347580473604e-06, "loss": 0.8586, "step": 1291 }, { "epoch": 0.14, "grad_norm": 1.8107141727760652, "learning_rate": 9.692746942313792e-06, "loss": 0.9398, "step": 1292 }, { "epoch": 0.14, "grad_norm": 1.1208810158622644, "learning_rate": 9.692145735142236e-06, "loss": 1.0863, "step": 1293 }, { "epoch": 0.14, "grad_norm": 1.758835623577077, "learning_rate": 9.691543959031831e-06, "loss": 1.0542, "step": 1294 }, { "epoch": 0.14, "grad_norm": 1.8757005104982023, "learning_rate": 9.690941614055547e-06, "loss": 0.9939, "step": 1295 }, { "epoch": 0.14, "grad_norm": 1.8712646055539242, "learning_rate": 9.690338700286422e-06, "loss": 0.9999, "step": 1296 }, { "epoch": 0.14, "grad_norm": 1.81666852819241, "learning_rate": 9.689735217797559e-06, "loss": 0.8466, "step": 1297 }, { "epoch": 0.14, "grad_norm": 1.6671873570281903, "learning_rate": 9.689131166662131e-06, "loss": 0.9924, "step": 1298 }, { "epoch": 0.14, "grad_norm": 1.9037719001113205, "learning_rate": 9.688526546953384e-06, "loss": 0.9326, "step": 1299 }, { "epoch": 0.14, "grad_norm": 1.7457763336401118, "learning_rate": 9.68792135874463e-06, "loss": 0.9576, "step": 1300 }, { "epoch": 0.14, "grad_norm": 1.790134778334665, "learning_rate": 9.687315602109248e-06, "loss": 0.9133, "step": 1301 }, { "epoch": 0.14, "grad_norm": 1.9387600455288883, "learning_rate": 9.686709277120692e-06, "loss": 0.9792, "step": 1302 }, { "epoch": 0.14, "grad_norm": 1.7072807738289963, "learning_rate": 9.686102383852478e-06, "loss": 0.9369, "step": 1303 }, { "epoch": 0.14, "grad_norm": 1.858926821639685, "learning_rate": 9.685494922378194e-06, "loss": 1.0107, "step": 1304 }, { "epoch": 0.14, "grad_norm": 1.8107108248500081, "learning_rate": 9.684886892771496e-06, "loss": 0.9232, "step": 1305 }, { "epoch": 0.14, "grad_norm": 2.210888916342686, "learning_rate": 9.684278295106112e-06, "loss": 0.8916, "step": 1306 }, { "epoch": 0.14, "grad_norm": 1.9312078253379261, "learning_rate": 9.683669129455837e-06, "loss": 1.0018, "step": 1307 }, { "epoch": 0.14, "grad_norm": 1.693339068322031, "learning_rate": 9.683059395894531e-06, "loss": 0.8573, "step": 1308 }, { "epoch": 0.14, "grad_norm": 1.7609878790371245, "learning_rate": 9.682449094496129e-06, "loss": 0.9593, "step": 1309 }, { "epoch": 0.14, "grad_norm": 1.7791779636723473, "learning_rate": 9.681838225334632e-06, "loss": 0.8852, "step": 1310 }, { "epoch": 0.14, "grad_norm": 1.8435741831611834, "learning_rate": 9.681226788484108e-06, "loss": 0.8646, "step": 1311 }, { "epoch": 0.14, "grad_norm": 1.9005094583290882, "learning_rate": 9.680614784018699e-06, "loss": 0.9362, "step": 1312 }, { "epoch": 0.14, "grad_norm": 1.6833583214904495, "learning_rate": 9.68000221201261e-06, "loss": 0.9593, "step": 1313 }, { "epoch": 0.14, "grad_norm": 1.805499964168594, "learning_rate": 9.67938907254012e-06, "loss": 0.9615, "step": 1314 }, { "epoch": 0.14, "grad_norm": 1.3072561085902317, "learning_rate": 9.67877536567557e-06, "loss": 1.1319, "step": 1315 }, { "epoch": 0.14, "grad_norm": 1.7784117500025727, "learning_rate": 9.678161091493378e-06, "loss": 0.9815, "step": 1316 }, { "epoch": 0.14, "grad_norm": 1.8026128541578461, "learning_rate": 9.677546250068025e-06, "loss": 0.9723, "step": 1317 }, { "epoch": 0.14, "grad_norm": 2.486980377449916, "learning_rate": 9.676930841474064e-06, "loss": 0.9959, "step": 1318 }, { "epoch": 0.14, "grad_norm": 0.8863922286334577, "learning_rate": 9.676314865786114e-06, "loss": 1.0542, "step": 1319 }, { "epoch": 0.14, "grad_norm": 1.75046126424522, "learning_rate": 9.675698323078865e-06, "loss": 0.9753, "step": 1320 }, { "epoch": 0.14, "grad_norm": 1.9399716336946635, "learning_rate": 9.675081213427076e-06, "loss": 1.0143, "step": 1321 }, { "epoch": 0.14, "grad_norm": 1.7157181648743232, "learning_rate": 9.67446353690557e-06, "loss": 0.9659, "step": 1322 }, { "epoch": 0.14, "grad_norm": 1.8282768376815017, "learning_rate": 9.673845293589246e-06, "loss": 0.9337, "step": 1323 }, { "epoch": 0.14, "grad_norm": 1.7252122580881186, "learning_rate": 9.673226483553068e-06, "loss": 0.9336, "step": 1324 }, { "epoch": 0.14, "grad_norm": 1.8514618276661683, "learning_rate": 9.672607106872067e-06, "loss": 0.9351, "step": 1325 }, { "epoch": 0.14, "grad_norm": 1.6597692093469811, "learning_rate": 9.671987163621344e-06, "loss": 0.9571, "step": 1326 }, { "epoch": 0.14, "grad_norm": 1.7046655882246773, "learning_rate": 9.671366653876071e-06, "loss": 1.0213, "step": 1327 }, { "epoch": 0.14, "grad_norm": 3.1981672147474396, "learning_rate": 9.670745577711488e-06, "loss": 0.9167, "step": 1328 }, { "epoch": 0.14, "grad_norm": 2.0390628311009635, "learning_rate": 9.670123935202903e-06, "loss": 1.0578, "step": 1329 }, { "epoch": 0.14, "grad_norm": 1.8354635814158557, "learning_rate": 9.669501726425687e-06, "loss": 0.9165, "step": 1330 }, { "epoch": 0.14, "grad_norm": 1.7475007969030938, "learning_rate": 9.668878951455289e-06, "loss": 0.9494, "step": 1331 }, { "epoch": 0.14, "grad_norm": 1.066607607222415, "learning_rate": 9.668255610367223e-06, "loss": 1.0763, "step": 1332 }, { "epoch": 0.14, "grad_norm": 0.9332562758103905, "learning_rate": 9.66763170323707e-06, "loss": 1.0519, "step": 1333 }, { "epoch": 0.14, "grad_norm": 1.7690239520875373, "learning_rate": 9.66700723014048e-06, "loss": 0.8634, "step": 1334 }, { "epoch": 0.14, "grad_norm": 1.8655210938415188, "learning_rate": 9.666382191153175e-06, "loss": 0.8994, "step": 1335 }, { "epoch": 0.14, "grad_norm": 1.7214682814997069, "learning_rate": 9.665756586350942e-06, "loss": 0.8422, "step": 1336 }, { "epoch": 0.14, "grad_norm": 1.9189520783844412, "learning_rate": 9.665130415809636e-06, "loss": 0.9295, "step": 1337 }, { "epoch": 0.14, "grad_norm": 1.7555819689715926, "learning_rate": 9.664503679605186e-06, "loss": 0.9372, "step": 1338 }, { "epoch": 0.14, "grad_norm": 1.5120281178561668, "learning_rate": 9.663876377813583e-06, "loss": 1.0979, "step": 1339 }, { "epoch": 0.14, "grad_norm": 1.8887715016352102, "learning_rate": 9.663248510510891e-06, "loss": 0.9317, "step": 1340 }, { "epoch": 0.14, "grad_norm": 1.78285620538769, "learning_rate": 9.66262007777324e-06, "loss": 0.9119, "step": 1341 }, { "epoch": 0.14, "grad_norm": 1.8099054179515521, "learning_rate": 9.661991079676829e-06, "loss": 0.9395, "step": 1342 }, { "epoch": 0.14, "grad_norm": 1.8409298147675015, "learning_rate": 9.661361516297929e-06, "loss": 1.0039, "step": 1343 }, { "epoch": 0.14, "grad_norm": 1.7532029746844264, "learning_rate": 9.660731387712874e-06, "loss": 0.8862, "step": 1344 }, { "epoch": 0.14, "grad_norm": 0.8677637739314561, "learning_rate": 9.66010069399807e-06, "loss": 1.0623, "step": 1345 }, { "epoch": 0.14, "grad_norm": 1.6661236538454791, "learning_rate": 9.659469435229992e-06, "loss": 0.9802, "step": 1346 }, { "epoch": 0.14, "grad_norm": 1.7228145119940494, "learning_rate": 9.658837611485181e-06, "loss": 0.8587, "step": 1347 }, { "epoch": 0.14, "grad_norm": 1.9827889608803497, "learning_rate": 9.658205222840249e-06, "loss": 0.9886, "step": 1348 }, { "epoch": 0.15, "grad_norm": 1.7001093162072172, "learning_rate": 9.657572269371873e-06, "loss": 0.9809, "step": 1349 }, { "epoch": 0.15, "grad_norm": 1.7098572325468708, "learning_rate": 9.656938751156803e-06, "loss": 0.9224, "step": 1350 }, { "epoch": 0.15, "grad_norm": 1.8442169760629519, "learning_rate": 9.656304668271855e-06, "loss": 0.9046, "step": 1351 }, { "epoch": 0.15, "grad_norm": 1.7725951821894057, "learning_rate": 9.655670020793911e-06, "loss": 0.903, "step": 1352 }, { "epoch": 0.15, "grad_norm": 1.81089926782642, "learning_rate": 9.655034808799928e-06, "loss": 0.9439, "step": 1353 }, { "epoch": 0.15, "grad_norm": 1.7046825599693822, "learning_rate": 9.654399032366925e-06, "loss": 0.8834, "step": 1354 }, { "epoch": 0.15, "grad_norm": 1.0804309527702856, "learning_rate": 9.653762691571995e-06, "loss": 1.0607, "step": 1355 }, { "epoch": 0.15, "grad_norm": 1.695882114293869, "learning_rate": 9.653125786492295e-06, "loss": 1.0056, "step": 1356 }, { "epoch": 0.15, "grad_norm": 1.7420609459047227, "learning_rate": 9.652488317205048e-06, "loss": 0.9403, "step": 1357 }, { "epoch": 0.15, "grad_norm": 1.793860685473266, "learning_rate": 9.651850283787557e-06, "loss": 0.9239, "step": 1358 }, { "epoch": 0.15, "grad_norm": 1.7042076451463992, "learning_rate": 9.65121168631718e-06, "loss": 0.9065, "step": 1359 }, { "epoch": 0.15, "grad_norm": 1.7708722354834414, "learning_rate": 9.65057252487135e-06, "loss": 0.9533, "step": 1360 }, { "epoch": 0.15, "grad_norm": 1.6386179550890096, "learning_rate": 9.64993279952757e-06, "loss": 0.9549, "step": 1361 }, { "epoch": 0.15, "grad_norm": 1.8536643936212485, "learning_rate": 9.649292510363408e-06, "loss": 0.9241, "step": 1362 }, { "epoch": 0.15, "grad_norm": 1.7339524449091333, "learning_rate": 9.648651657456498e-06, "loss": 0.9297, "step": 1363 }, { "epoch": 0.15, "grad_norm": 1.7631514901166518, "learning_rate": 9.64801024088455e-06, "loss": 0.9363, "step": 1364 }, { "epoch": 0.15, "grad_norm": 1.6965774736226111, "learning_rate": 9.647368260725336e-06, "loss": 0.8785, "step": 1365 }, { "epoch": 0.15, "grad_norm": 1.706803745351107, "learning_rate": 9.646725717056697e-06, "loss": 0.911, "step": 1366 }, { "epoch": 0.15, "grad_norm": 1.7600289695431448, "learning_rate": 9.646082609956546e-06, "loss": 0.915, "step": 1367 }, { "epoch": 0.15, "grad_norm": 1.7488046788826288, "learning_rate": 9.645438939502863e-06, "loss": 0.9704, "step": 1368 }, { "epoch": 0.15, "grad_norm": 1.7130732761905072, "learning_rate": 9.644794705773691e-06, "loss": 0.9354, "step": 1369 }, { "epoch": 0.15, "grad_norm": 1.1601707805369146, "learning_rate": 9.644149908847149e-06, "loss": 1.06, "step": 1370 }, { "epoch": 0.15, "grad_norm": 1.69304970988638, "learning_rate": 9.64350454880142e-06, "loss": 0.9424, "step": 1371 }, { "epoch": 0.15, "grad_norm": 1.9005509963414922, "learning_rate": 9.642858625714753e-06, "loss": 0.9743, "step": 1372 }, { "epoch": 0.15, "grad_norm": 1.7852006723405949, "learning_rate": 9.642212139665474e-06, "loss": 0.9634, "step": 1373 }, { "epoch": 0.15, "grad_norm": 1.8685194685507618, "learning_rate": 9.641565090731967e-06, "loss": 1.0291, "step": 1374 }, { "epoch": 0.15, "grad_norm": 1.8566019828610887, "learning_rate": 9.640917478992693e-06, "loss": 0.9851, "step": 1375 }, { "epoch": 0.15, "grad_norm": 1.7854845875722785, "learning_rate": 9.640269304526175e-06, "loss": 0.8802, "step": 1376 }, { "epoch": 0.15, "grad_norm": 1.7813969315515208, "learning_rate": 9.639620567411005e-06, "loss": 0.9305, "step": 1377 }, { "epoch": 0.15, "grad_norm": 1.7420129421277366, "learning_rate": 9.638971267725847e-06, "loss": 1.0297, "step": 1378 }, { "epoch": 0.15, "grad_norm": 1.8008666170015144, "learning_rate": 9.638321405549429e-06, "loss": 0.9802, "step": 1379 }, { "epoch": 0.15, "grad_norm": 1.958862742974025, "learning_rate": 9.63767098096055e-06, "loss": 0.7991, "step": 1380 }, { "epoch": 0.15, "grad_norm": 1.7704898569672072, "learning_rate": 9.637019994038076e-06, "loss": 0.8174, "step": 1381 }, { "epoch": 0.15, "grad_norm": 1.9454146184495025, "learning_rate": 9.636368444860942e-06, "loss": 0.97, "step": 1382 }, { "epoch": 0.15, "grad_norm": 1.6956410727746987, "learning_rate": 9.63571633350815e-06, "loss": 0.8851, "step": 1383 }, { "epoch": 0.15, "grad_norm": 1.7753421821822153, "learning_rate": 9.635063660058772e-06, "loss": 0.979, "step": 1384 }, { "epoch": 0.15, "grad_norm": 1.826476811546799, "learning_rate": 9.634410424591942e-06, "loss": 1.0011, "step": 1385 }, { "epoch": 0.15, "grad_norm": 1.770390285888198, "learning_rate": 9.633756627186874e-06, "loss": 0.9638, "step": 1386 }, { "epoch": 0.15, "grad_norm": 1.7045419233686316, "learning_rate": 9.63310226792284e-06, "loss": 0.8683, "step": 1387 }, { "epoch": 0.15, "grad_norm": 1.7270966668884549, "learning_rate": 9.632447346879182e-06, "loss": 1.0009, "step": 1388 }, { "epoch": 0.15, "grad_norm": 2.022446249033957, "learning_rate": 9.631791864135313e-06, "loss": 0.9553, "step": 1389 }, { "epoch": 0.15, "grad_norm": 1.7648309623355223, "learning_rate": 9.631135819770711e-06, "loss": 0.8933, "step": 1390 }, { "epoch": 0.15, "grad_norm": 1.7833654836449024, "learning_rate": 9.630479213864928e-06, "loss": 0.9827, "step": 1391 }, { "epoch": 0.15, "grad_norm": 1.740149213184367, "learning_rate": 9.629822046497575e-06, "loss": 0.9161, "step": 1392 }, { "epoch": 0.15, "grad_norm": 1.7662673731343899, "learning_rate": 9.629164317748336e-06, "loss": 0.9325, "step": 1393 }, { "epoch": 0.15, "grad_norm": 1.7731996284175147, "learning_rate": 9.628506027696967e-06, "loss": 0.8705, "step": 1394 }, { "epoch": 0.15, "grad_norm": 1.7094195388265079, "learning_rate": 9.627847176423283e-06, "loss": 0.8823, "step": 1395 }, { "epoch": 0.15, "grad_norm": 1.9022802877599772, "learning_rate": 9.627187764007175e-06, "loss": 0.8905, "step": 1396 }, { "epoch": 0.15, "grad_norm": 1.8615816218104457, "learning_rate": 9.6265277905286e-06, "loss": 0.961, "step": 1397 }, { "epoch": 0.15, "grad_norm": 1.7684005418903954, "learning_rate": 9.625867256067577e-06, "loss": 0.9207, "step": 1398 }, { "epoch": 0.15, "grad_norm": 1.8845015944913246, "learning_rate": 9.625206160704204e-06, "loss": 0.9769, "step": 1399 }, { "epoch": 0.15, "grad_norm": 1.625433004093357, "learning_rate": 9.624544504518637e-06, "loss": 0.9197, "step": 1400 }, { "epoch": 0.15, "grad_norm": 1.6422140537518477, "learning_rate": 9.623882287591106e-06, "loss": 0.8393, "step": 1401 }, { "epoch": 0.15, "grad_norm": 1.8103291103331431, "learning_rate": 9.623219510001907e-06, "loss": 0.9786, "step": 1402 }, { "epoch": 0.15, "grad_norm": 1.7953363283505914, "learning_rate": 9.622556171831404e-06, "loss": 0.9641, "step": 1403 }, { "epoch": 0.15, "grad_norm": 1.7144848456947013, "learning_rate": 9.621892273160028e-06, "loss": 0.9311, "step": 1404 }, { "epoch": 0.15, "grad_norm": 1.8153083903694607, "learning_rate": 9.62122781406828e-06, "loss": 0.9042, "step": 1405 }, { "epoch": 0.15, "grad_norm": 1.7453583393893406, "learning_rate": 9.62056279463673e-06, "loss": 0.9359, "step": 1406 }, { "epoch": 0.15, "grad_norm": 1.659512785089936, "learning_rate": 9.619897214946007e-06, "loss": 0.8652, "step": 1407 }, { "epoch": 0.15, "grad_norm": 1.733328565287686, "learning_rate": 9.619231075076824e-06, "loss": 0.9588, "step": 1408 }, { "epoch": 0.15, "grad_norm": 1.6997106195565865, "learning_rate": 9.618564375109946e-06, "loss": 0.9283, "step": 1409 }, { "epoch": 0.15, "grad_norm": 1.8751968559908663, "learning_rate": 9.617897115126216e-06, "loss": 1.0544, "step": 1410 }, { "epoch": 0.15, "grad_norm": 1.9064539780729668, "learning_rate": 9.617229295206538e-06, "loss": 0.9258, "step": 1411 }, { "epoch": 0.15, "grad_norm": 1.6711881808897096, "learning_rate": 9.616560915431892e-06, "loss": 0.9444, "step": 1412 }, { "epoch": 0.15, "grad_norm": 1.8847200174043122, "learning_rate": 9.61589197588332e-06, "loss": 0.9827, "step": 1413 }, { "epoch": 0.15, "grad_norm": 1.8738157795787422, "learning_rate": 9.61522247664193e-06, "loss": 1.0495, "step": 1414 }, { "epoch": 0.15, "grad_norm": 1.7425273221469568, "learning_rate": 9.614552417788906e-06, "loss": 0.9573, "step": 1415 }, { "epoch": 0.15, "grad_norm": 1.7575288608399968, "learning_rate": 9.613881799405492e-06, "loss": 0.9183, "step": 1416 }, { "epoch": 0.15, "grad_norm": 1.8834375103441199, "learning_rate": 9.613210621573003e-06, "loss": 0.9823, "step": 1417 }, { "epoch": 0.15, "grad_norm": 1.7690878548189666, "learning_rate": 9.612538884372821e-06, "loss": 0.9225, "step": 1418 }, { "epoch": 0.15, "grad_norm": 1.6542470625678858, "learning_rate": 9.611866587886399e-06, "loss": 0.9525, "step": 1419 }, { "epoch": 0.15, "grad_norm": 1.7621813487367117, "learning_rate": 9.611193732195254e-06, "loss": 0.9662, "step": 1420 }, { "epoch": 0.15, "grad_norm": 1.1506346680658193, "learning_rate": 9.61052031738097e-06, "loss": 1.0839, "step": 1421 }, { "epoch": 0.15, "grad_norm": 1.8449001583598301, "learning_rate": 9.609846343525204e-06, "loss": 0.9651, "step": 1422 }, { "epoch": 0.15, "grad_norm": 1.7414555121333162, "learning_rate": 9.609171810709676e-06, "loss": 0.9264, "step": 1423 }, { "epoch": 0.15, "grad_norm": 0.9024877412828572, "learning_rate": 9.608496719016176e-06, "loss": 1.0762, "step": 1424 }, { "epoch": 0.15, "grad_norm": 1.752707612053309, "learning_rate": 9.60782106852656e-06, "loss": 0.8828, "step": 1425 }, { "epoch": 0.15, "grad_norm": 1.0026795989633763, "learning_rate": 9.607144859322755e-06, "loss": 1.0746, "step": 1426 }, { "epoch": 0.15, "grad_norm": 1.8172629353399616, "learning_rate": 9.60646809148675e-06, "loss": 0.9938, "step": 1427 }, { "epoch": 0.15, "grad_norm": 1.6832733744100667, "learning_rate": 9.60579076510061e-06, "loss": 0.9274, "step": 1428 }, { "epoch": 0.15, "grad_norm": 0.9206543551158937, "learning_rate": 9.605112880246463e-06, "loss": 1.0608, "step": 1429 }, { "epoch": 0.15, "grad_norm": 1.7606615110417487, "learning_rate": 9.6044344370065e-06, "loss": 0.8544, "step": 1430 }, { "epoch": 0.15, "grad_norm": 1.7427976038830009, "learning_rate": 9.603755435462989e-06, "loss": 0.9507, "step": 1431 }, { "epoch": 0.15, "grad_norm": 1.6525341772533055, "learning_rate": 9.60307587569826e-06, "loss": 0.8523, "step": 1432 }, { "epoch": 0.15, "grad_norm": 1.8312518195293592, "learning_rate": 9.602395757794712e-06, "loss": 0.9351, "step": 1433 }, { "epoch": 0.15, "grad_norm": 1.7165173862702916, "learning_rate": 9.60171508183481e-06, "loss": 0.9171, "step": 1434 }, { "epoch": 0.15, "grad_norm": 1.8730417273271636, "learning_rate": 9.601033847901092e-06, "loss": 0.9468, "step": 1435 }, { "epoch": 0.15, "grad_norm": 1.8074233371503403, "learning_rate": 9.600352056076154e-06, "loss": 0.9305, "step": 1436 }, { "epoch": 0.15, "grad_norm": 1.0297949535982869, "learning_rate": 9.599669706442674e-06, "loss": 1.1048, "step": 1437 }, { "epoch": 0.15, "grad_norm": 1.913167798266604, "learning_rate": 9.59898679908338e-06, "loss": 0.8638, "step": 1438 }, { "epoch": 0.15, "grad_norm": 1.8267080809909488, "learning_rate": 9.598303334081084e-06, "loss": 0.916, "step": 1439 }, { "epoch": 0.15, "grad_norm": 1.8397781019634896, "learning_rate": 9.597619311518658e-06, "loss": 0.9389, "step": 1440 }, { "epoch": 0.15, "grad_norm": 1.7805768704962996, "learning_rate": 9.596934731479038e-06, "loss": 0.8918, "step": 1441 }, { "epoch": 0.16, "grad_norm": 1.8042871516598047, "learning_rate": 9.596249594045233e-06, "loss": 0.9516, "step": 1442 }, { "epoch": 0.16, "grad_norm": 1.7528771138655306, "learning_rate": 9.59556389930032e-06, "loss": 0.9505, "step": 1443 }, { "epoch": 0.16, "grad_norm": 1.0070859145349267, "learning_rate": 9.594877647327441e-06, "loss": 1.0661, "step": 1444 }, { "epoch": 0.16, "grad_norm": 1.700460230704621, "learning_rate": 9.594190838209806e-06, "loss": 0.8955, "step": 1445 }, { "epoch": 0.16, "grad_norm": 1.796119565528654, "learning_rate": 9.593503472030693e-06, "loss": 1.0348, "step": 1446 }, { "epoch": 0.16, "grad_norm": 1.7041173326119494, "learning_rate": 9.592815548873448e-06, "loss": 0.8967, "step": 1447 }, { "epoch": 0.16, "grad_norm": 1.7426517227021265, "learning_rate": 9.592127068821484e-06, "loss": 1.0777, "step": 1448 }, { "epoch": 0.16, "grad_norm": 1.848973488224146, "learning_rate": 9.591438031958282e-06, "loss": 1.0168, "step": 1449 }, { "epoch": 0.16, "grad_norm": 1.8584809255109977, "learning_rate": 9.59074843836739e-06, "loss": 0.9333, "step": 1450 }, { "epoch": 0.16, "grad_norm": 1.6375930421210068, "learning_rate": 9.59005828813242e-06, "loss": 0.9812, "step": 1451 }, { "epoch": 0.16, "grad_norm": 1.6748034967136696, "learning_rate": 9.589367581337062e-06, "loss": 0.854, "step": 1452 }, { "epoch": 0.16, "grad_norm": 1.7422802462084397, "learning_rate": 9.58867631806506e-06, "loss": 0.9287, "step": 1453 }, { "epoch": 0.16, "grad_norm": 1.8116740123279091, "learning_rate": 9.587984498400236e-06, "loss": 1.0021, "step": 1454 }, { "epoch": 0.16, "grad_norm": 1.7490236950223288, "learning_rate": 9.587292122426475e-06, "loss": 0.9234, "step": 1455 }, { "epoch": 0.16, "grad_norm": 1.6769719591071819, "learning_rate": 9.586599190227728e-06, "loss": 0.9893, "step": 1456 }, { "epoch": 0.16, "grad_norm": 1.8218259667147807, "learning_rate": 9.585905701888019e-06, "loss": 0.9153, "step": 1457 }, { "epoch": 0.16, "grad_norm": 1.7692596437189836, "learning_rate": 9.58521165749143e-06, "loss": 0.9391, "step": 1458 }, { "epoch": 0.16, "grad_norm": 1.9319441460901332, "learning_rate": 9.584517057122122e-06, "loss": 0.9197, "step": 1459 }, { "epoch": 0.16, "grad_norm": 1.785876796632594, "learning_rate": 9.583821900864316e-06, "loss": 0.9373, "step": 1460 }, { "epoch": 0.16, "grad_norm": 1.7772596216568746, "learning_rate": 9.583126188802301e-06, "loss": 0.9584, "step": 1461 }, { "epoch": 0.16, "grad_norm": 1.7079353356490585, "learning_rate": 9.582429921020436e-06, "loss": 0.9423, "step": 1462 }, { "epoch": 0.16, "grad_norm": 1.8163617814990844, "learning_rate": 9.581733097603146e-06, "loss": 0.9219, "step": 1463 }, { "epoch": 0.16, "grad_norm": 1.8811264923395157, "learning_rate": 9.581035718634919e-06, "loss": 0.9903, "step": 1464 }, { "epoch": 0.16, "grad_norm": 1.6829421499951367, "learning_rate": 9.58033778420032e-06, "loss": 0.9204, "step": 1465 }, { "epoch": 0.16, "grad_norm": 1.7351549270581252, "learning_rate": 9.579639294383974e-06, "loss": 0.9297, "step": 1466 }, { "epoch": 0.16, "grad_norm": 1.923461698038161, "learning_rate": 9.578940249270573e-06, "loss": 0.9457, "step": 1467 }, { "epoch": 0.16, "grad_norm": 1.8909901997231626, "learning_rate": 9.578240648944882e-06, "loss": 0.9275, "step": 1468 }, { "epoch": 0.16, "grad_norm": 1.7369715401791137, "learning_rate": 9.577540493491731e-06, "loss": 0.9252, "step": 1469 }, { "epoch": 0.16, "grad_norm": 1.7988155171833973, "learning_rate": 9.576839782996013e-06, "loss": 1.0008, "step": 1470 }, { "epoch": 0.16, "grad_norm": 1.7887788494593262, "learning_rate": 9.576138517542691e-06, "loss": 1.0239, "step": 1471 }, { "epoch": 0.16, "grad_norm": 1.807498054208189, "learning_rate": 9.575436697216798e-06, "loss": 0.9261, "step": 1472 }, { "epoch": 0.16, "grad_norm": 1.9799835770722665, "learning_rate": 9.574734322103431e-06, "loss": 0.9492, "step": 1473 }, { "epoch": 0.16, "grad_norm": 1.0846591782420674, "learning_rate": 9.574031392287756e-06, "loss": 1.0683, "step": 1474 }, { "epoch": 0.16, "grad_norm": 1.6984755730529613, "learning_rate": 9.573327907855006e-06, "loss": 0.8705, "step": 1475 }, { "epoch": 0.16, "grad_norm": 1.9126039045040988, "learning_rate": 9.572623868890482e-06, "loss": 0.9198, "step": 1476 }, { "epoch": 0.16, "grad_norm": 1.776189328225635, "learning_rate": 9.571919275479549e-06, "loss": 0.9325, "step": 1477 }, { "epoch": 0.16, "grad_norm": 0.8854589844152346, "learning_rate": 9.57121412770764e-06, "loss": 1.0957, "step": 1478 }, { "epoch": 0.16, "grad_norm": 1.6644984567232182, "learning_rate": 9.570508425660261e-06, "loss": 0.879, "step": 1479 }, { "epoch": 0.16, "grad_norm": 0.8963164863101774, "learning_rate": 9.569802169422978e-06, "loss": 1.0621, "step": 1480 }, { "epoch": 0.16, "grad_norm": 1.6499888132985112, "learning_rate": 9.569095359081426e-06, "loss": 0.9274, "step": 1481 }, { "epoch": 0.16, "grad_norm": 1.775357426320905, "learning_rate": 9.56838799472131e-06, "loss": 0.8879, "step": 1482 }, { "epoch": 0.16, "grad_norm": 1.7619305700154273, "learning_rate": 9.567680076428401e-06, "loss": 0.9135, "step": 1483 }, { "epoch": 0.16, "grad_norm": 0.8658049953436682, "learning_rate": 9.566971604288535e-06, "loss": 1.0586, "step": 1484 }, { "epoch": 0.16, "grad_norm": 0.883498273910403, "learning_rate": 9.566262578387616e-06, "loss": 1.0952, "step": 1485 }, { "epoch": 0.16, "grad_norm": 1.7860736113050653, "learning_rate": 9.56555299881162e-06, "loss": 0.999, "step": 1486 }, { "epoch": 0.16, "grad_norm": 1.788308283411982, "learning_rate": 9.564842865646581e-06, "loss": 1.0242, "step": 1487 }, { "epoch": 0.16, "grad_norm": 1.7915569612269753, "learning_rate": 9.564132178978607e-06, "loss": 0.9776, "step": 1488 }, { "epoch": 0.16, "grad_norm": 1.9244510751017048, "learning_rate": 9.563420938893872e-06, "loss": 0.9698, "step": 1489 }, { "epoch": 0.16, "grad_norm": 1.7809793029634062, "learning_rate": 9.562709145478616e-06, "loss": 0.9303, "step": 1490 }, { "epoch": 0.16, "grad_norm": 1.794132424495073, "learning_rate": 9.561996798819146e-06, "loss": 0.9136, "step": 1491 }, { "epoch": 0.16, "grad_norm": 1.7885808940988799, "learning_rate": 9.561283899001837e-06, "loss": 0.9912, "step": 1492 }, { "epoch": 0.16, "grad_norm": 0.9952579796826119, "learning_rate": 9.560570446113128e-06, "loss": 1.0934, "step": 1493 }, { "epoch": 0.16, "grad_norm": 1.7479208534644994, "learning_rate": 9.559856440239531e-06, "loss": 0.9353, "step": 1494 }, { "epoch": 0.16, "grad_norm": 1.6682315968058588, "learning_rate": 9.55914188146762e-06, "loss": 0.8799, "step": 1495 }, { "epoch": 0.16, "grad_norm": 1.9186530244838425, "learning_rate": 9.558426769884039e-06, "loss": 0.9481, "step": 1496 }, { "epoch": 0.16, "grad_norm": 1.6631815023849306, "learning_rate": 9.557711105575496e-06, "loss": 0.9056, "step": 1497 }, { "epoch": 0.16, "grad_norm": 1.7643626574075302, "learning_rate": 9.55699488862877e-06, "loss": 0.8479, "step": 1498 }, { "epoch": 0.16, "grad_norm": 1.8944318627905647, "learning_rate": 9.556278119130702e-06, "loss": 0.9493, "step": 1499 }, { "epoch": 0.16, "grad_norm": 1.7930574629291882, "learning_rate": 9.555560797168203e-06, "loss": 0.8814, "step": 1500 }, { "epoch": 0.16, "grad_norm": 1.7956590508579462, "learning_rate": 9.554842922828254e-06, "loss": 0.8887, "step": 1501 }, { "epoch": 0.16, "grad_norm": 1.6233746068238766, "learning_rate": 9.554124496197899e-06, "loss": 0.8853, "step": 1502 }, { "epoch": 0.16, "grad_norm": 1.8621655083509916, "learning_rate": 9.553405517364246e-06, "loss": 0.9964, "step": 1503 }, { "epoch": 0.16, "grad_norm": 0.9785469971424361, "learning_rate": 9.552685986414477e-06, "loss": 1.0724, "step": 1504 }, { "epoch": 0.16, "grad_norm": 1.6539520670444752, "learning_rate": 9.551965903435836e-06, "loss": 0.9225, "step": 1505 }, { "epoch": 0.16, "grad_norm": 1.795626046826408, "learning_rate": 9.551245268515637e-06, "loss": 0.9606, "step": 1506 }, { "epoch": 0.16, "grad_norm": 1.853922286442062, "learning_rate": 9.550524081741256e-06, "loss": 0.9395, "step": 1507 }, { "epoch": 0.16, "grad_norm": 1.7391171053569394, "learning_rate": 9.549802343200146e-06, "loss": 0.9164, "step": 1508 }, { "epoch": 0.16, "grad_norm": 1.6615961641332677, "learning_rate": 9.549080052979814e-06, "loss": 0.9132, "step": 1509 }, { "epoch": 0.16, "grad_norm": 0.9129865662740381, "learning_rate": 9.548357211167842e-06, "loss": 1.0955, "step": 1510 }, { "epoch": 0.16, "grad_norm": 1.7780192844165246, "learning_rate": 9.54763381785188e-06, "loss": 1.0247, "step": 1511 }, { "epoch": 0.16, "grad_norm": 0.8701512239910684, "learning_rate": 9.546909873119637e-06, "loss": 1.0618, "step": 1512 }, { "epoch": 0.16, "grad_norm": 0.825190625487844, "learning_rate": 9.5461853770589e-06, "loss": 1.049, "step": 1513 }, { "epoch": 0.16, "grad_norm": 1.7308811109640356, "learning_rate": 9.545460329757508e-06, "loss": 0.8756, "step": 1514 }, { "epoch": 0.16, "grad_norm": 1.8222613359962494, "learning_rate": 9.544734731303383e-06, "loss": 0.8985, "step": 1515 }, { "epoch": 0.16, "grad_norm": 1.7045491989571553, "learning_rate": 9.544008581784503e-06, "loss": 0.9322, "step": 1516 }, { "epoch": 0.16, "grad_norm": 0.9376396489539779, "learning_rate": 9.543281881288918e-06, "loss": 1.0352, "step": 1517 }, { "epoch": 0.16, "grad_norm": 1.8429203598553712, "learning_rate": 9.542554629904742e-06, "loss": 0.9538, "step": 1518 }, { "epoch": 0.16, "grad_norm": 1.8521445078904208, "learning_rate": 9.541826827720156e-06, "loss": 0.9153, "step": 1519 }, { "epoch": 0.16, "grad_norm": 1.7726482231253609, "learning_rate": 9.541098474823408e-06, "loss": 0.9313, "step": 1520 }, { "epoch": 0.16, "grad_norm": 1.942629681432199, "learning_rate": 9.540369571302816e-06, "loss": 0.9992, "step": 1521 }, { "epoch": 0.16, "grad_norm": 1.7332496241713975, "learning_rate": 9.539640117246758e-06, "loss": 0.8966, "step": 1522 }, { "epoch": 0.16, "grad_norm": 1.7502675788458923, "learning_rate": 9.538910112743689e-06, "loss": 0.9081, "step": 1523 }, { "epoch": 0.16, "grad_norm": 1.8238649915952863, "learning_rate": 9.538179557882119e-06, "loss": 0.8455, "step": 1524 }, { "epoch": 0.16, "grad_norm": 1.6886277968199084, "learning_rate": 9.53744845275063e-06, "loss": 0.8818, "step": 1525 }, { "epoch": 0.16, "grad_norm": 1.735663462504771, "learning_rate": 9.536716797437876e-06, "loss": 1.0018, "step": 1526 }, { "epoch": 0.16, "grad_norm": 1.7631497545731312, "learning_rate": 9.535984592032568e-06, "loss": 0.9073, "step": 1527 }, { "epoch": 0.16, "grad_norm": 1.686686930994495, "learning_rate": 9.535251836623491e-06, "loss": 0.8776, "step": 1528 }, { "epoch": 0.16, "grad_norm": 1.7324681530689945, "learning_rate": 9.534518531299494e-06, "loss": 0.9576, "step": 1529 }, { "epoch": 0.16, "grad_norm": 0.99698055434455, "learning_rate": 9.533784676149493e-06, "loss": 1.0729, "step": 1530 }, { "epoch": 0.16, "grad_norm": 0.932345823975987, "learning_rate": 9.533050271262468e-06, "loss": 1.0633, "step": 1531 }, { "epoch": 0.16, "grad_norm": 1.6957282205133997, "learning_rate": 9.53231531672747e-06, "loss": 0.9416, "step": 1532 }, { "epoch": 0.16, "grad_norm": 1.9018591607564523, "learning_rate": 9.531579812633616e-06, "loss": 0.9547, "step": 1533 }, { "epoch": 0.16, "grad_norm": 1.7956514332877205, "learning_rate": 9.530843759070085e-06, "loss": 0.9372, "step": 1534 }, { "epoch": 0.17, "grad_norm": 1.751794833552981, "learning_rate": 9.530107156126129e-06, "loss": 0.9329, "step": 1535 }, { "epoch": 0.17, "grad_norm": 1.8049222621106435, "learning_rate": 9.529370003891063e-06, "loss": 0.9409, "step": 1536 }, { "epoch": 0.17, "grad_norm": 1.8368229853757845, "learning_rate": 9.528632302454267e-06, "loss": 0.9251, "step": 1537 }, { "epoch": 0.17, "grad_norm": 1.7712436479689058, "learning_rate": 9.527894051905194e-06, "loss": 1.0048, "step": 1538 }, { "epoch": 0.17, "grad_norm": 1.6860835177225004, "learning_rate": 9.527155252333357e-06, "loss": 0.9355, "step": 1539 }, { "epoch": 0.17, "grad_norm": 1.818524618931703, "learning_rate": 9.526415903828338e-06, "loss": 0.9591, "step": 1540 }, { "epoch": 0.17, "grad_norm": 1.73315252080244, "learning_rate": 9.525676006479785e-06, "loss": 0.9697, "step": 1541 }, { "epoch": 0.17, "grad_norm": 1.7616940147017521, "learning_rate": 9.524935560377414e-06, "loss": 0.864, "step": 1542 }, { "epoch": 0.17, "grad_norm": 1.855426647962338, "learning_rate": 9.524194565611007e-06, "loss": 0.9304, "step": 1543 }, { "epoch": 0.17, "grad_norm": 1.8273627965257853, "learning_rate": 9.52345302227041e-06, "loss": 1.0417, "step": 1544 }, { "epoch": 0.17, "grad_norm": 1.8129023435010687, "learning_rate": 9.522710930445542e-06, "loss": 0.9342, "step": 1545 }, { "epoch": 0.17, "grad_norm": 1.8393727213013886, "learning_rate": 9.52196829022638e-06, "loss": 0.9886, "step": 1546 }, { "epoch": 0.17, "grad_norm": 1.8010432444774538, "learning_rate": 9.521225101702973e-06, "loss": 0.9397, "step": 1547 }, { "epoch": 0.17, "grad_norm": 1.8917464923998197, "learning_rate": 9.520481364965436e-06, "loss": 0.9216, "step": 1548 }, { "epoch": 0.17, "grad_norm": 1.7180294102427878, "learning_rate": 9.519737080103949e-06, "loss": 0.886, "step": 1549 }, { "epoch": 0.17, "grad_norm": 1.716168255277452, "learning_rate": 9.518992247208757e-06, "loss": 1.0659, "step": 1550 }, { "epoch": 0.17, "grad_norm": 1.4818956791541413, "learning_rate": 9.51824686637018e-06, "loss": 1.0851, "step": 1551 }, { "epoch": 0.17, "grad_norm": 1.7873947719702918, "learning_rate": 9.517500937678591e-06, "loss": 0.8801, "step": 1552 }, { "epoch": 0.17, "grad_norm": 1.8554870371227414, "learning_rate": 9.51675446122444e-06, "loss": 0.9329, "step": 1553 }, { "epoch": 0.17, "grad_norm": 1.8147428582141027, "learning_rate": 9.516007437098238e-06, "loss": 0.8973, "step": 1554 }, { "epoch": 0.17, "grad_norm": 1.7790162313804563, "learning_rate": 9.515259865390565e-06, "loss": 0.8941, "step": 1555 }, { "epoch": 0.17, "grad_norm": 1.8090950689132357, "learning_rate": 9.514511746192067e-06, "loss": 0.9054, "step": 1556 }, { "epoch": 0.17, "grad_norm": 1.834340272516845, "learning_rate": 9.513763079593457e-06, "loss": 0.9553, "step": 1557 }, { "epoch": 0.17, "grad_norm": 1.829696085841028, "learning_rate": 9.513013865685511e-06, "loss": 0.9906, "step": 1558 }, { "epoch": 0.17, "grad_norm": 1.7354323245986263, "learning_rate": 9.512264104559077e-06, "loss": 0.9353, "step": 1559 }, { "epoch": 0.17, "grad_norm": 1.782968330099768, "learning_rate": 9.511513796305063e-06, "loss": 0.9714, "step": 1560 }, { "epoch": 0.17, "grad_norm": 1.7612703633592532, "learning_rate": 9.510762941014446e-06, "loss": 0.8955, "step": 1561 }, { "epoch": 0.17, "grad_norm": 2.5247018338685168, "learning_rate": 9.510011538778275e-06, "loss": 0.9456, "step": 1562 }, { "epoch": 0.17, "grad_norm": 1.7817367562990276, "learning_rate": 9.509259589687653e-06, "loss": 0.9536, "step": 1563 }, { "epoch": 0.17, "grad_norm": 1.6190247079874307, "learning_rate": 9.50850709383376e-06, "loss": 1.014, "step": 1564 }, { "epoch": 0.17, "grad_norm": 1.278281096942023, "learning_rate": 9.507754051307842e-06, "loss": 1.0577, "step": 1565 }, { "epoch": 0.17, "grad_norm": 1.8701996432779897, "learning_rate": 9.507000462201202e-06, "loss": 1.0803, "step": 1566 }, { "epoch": 0.17, "grad_norm": 0.9105876298342516, "learning_rate": 9.50624632660522e-06, "loss": 1.0066, "step": 1567 }, { "epoch": 0.17, "grad_norm": 1.9314157988698444, "learning_rate": 9.505491644611334e-06, "loss": 0.9443, "step": 1568 }, { "epoch": 0.17, "grad_norm": 1.6952717504623727, "learning_rate": 9.504736416311053e-06, "loss": 0.9094, "step": 1569 }, { "epoch": 0.17, "grad_norm": 1.7325830326206304, "learning_rate": 9.503980641795952e-06, "loss": 0.8888, "step": 1570 }, { "epoch": 0.17, "grad_norm": 1.9293331985245432, "learning_rate": 9.503224321157671e-06, "loss": 0.9527, "step": 1571 }, { "epoch": 0.17, "grad_norm": 1.7845539183150374, "learning_rate": 9.502467454487916e-06, "loss": 0.9819, "step": 1572 }, { "epoch": 0.17, "grad_norm": 1.8699764226903586, "learning_rate": 9.501710041878457e-06, "loss": 0.9186, "step": 1573 }, { "epoch": 0.17, "grad_norm": 1.7201167658587087, "learning_rate": 9.50095208342114e-06, "loss": 0.9955, "step": 1574 }, { "epoch": 0.17, "grad_norm": 1.7885324426374476, "learning_rate": 9.500193579207863e-06, "loss": 0.9664, "step": 1575 }, { "epoch": 0.17, "grad_norm": 1.7543327371272046, "learning_rate": 9.499434529330601e-06, "loss": 0.9355, "step": 1576 }, { "epoch": 0.17, "grad_norm": 1.8054051675052007, "learning_rate": 9.498674933881392e-06, "loss": 0.9407, "step": 1577 }, { "epoch": 0.17, "grad_norm": 1.9167670872810607, "learning_rate": 9.497914792952334e-06, "loss": 0.9918, "step": 1578 }, { "epoch": 0.17, "grad_norm": 1.6363675984794586, "learning_rate": 9.497154106635604e-06, "loss": 0.9507, "step": 1579 }, { "epoch": 0.17, "grad_norm": 1.8280664953722363, "learning_rate": 9.496392875023433e-06, "loss": 0.9117, "step": 1580 }, { "epoch": 0.17, "grad_norm": 1.8179916217237777, "learning_rate": 9.495631098208125e-06, "loss": 0.9088, "step": 1581 }, { "epoch": 0.17, "grad_norm": 1.8249914220542176, "learning_rate": 9.494868776282047e-06, "loss": 0.9137, "step": 1582 }, { "epoch": 0.17, "grad_norm": 1.7461654862068758, "learning_rate": 9.494105909337635e-06, "loss": 0.9636, "step": 1583 }, { "epoch": 0.17, "grad_norm": 1.8669692812582928, "learning_rate": 9.493342497467386e-06, "loss": 0.9348, "step": 1584 }, { "epoch": 0.17, "grad_norm": 1.6526695338393733, "learning_rate": 9.49257854076387e-06, "loss": 0.8929, "step": 1585 }, { "epoch": 0.17, "grad_norm": 1.8097276602422698, "learning_rate": 9.491814039319716e-06, "loss": 0.9531, "step": 1586 }, { "epoch": 0.17, "grad_norm": 1.7848455431796935, "learning_rate": 9.491048993227626e-06, "loss": 0.9157, "step": 1587 }, { "epoch": 0.17, "grad_norm": 1.8571682511734318, "learning_rate": 9.490283402580362e-06, "loss": 0.9349, "step": 1588 }, { "epoch": 0.17, "grad_norm": 1.7167589560386909, "learning_rate": 9.489517267470754e-06, "loss": 0.8696, "step": 1589 }, { "epoch": 0.17, "grad_norm": 1.951280991041683, "learning_rate": 9.488750587991701e-06, "loss": 1.0203, "step": 1590 }, { "epoch": 0.17, "grad_norm": 1.7177153904997686, "learning_rate": 9.487983364236163e-06, "loss": 1.0105, "step": 1591 }, { "epoch": 0.17, "grad_norm": 1.8735576881426454, "learning_rate": 9.48721559629717e-06, "loss": 0.8333, "step": 1592 }, { "epoch": 0.17, "grad_norm": 1.7055730593186267, "learning_rate": 9.486447284267816e-06, "loss": 1.0496, "step": 1593 }, { "epoch": 0.17, "grad_norm": 1.6816028860308425, "learning_rate": 9.485678428241264e-06, "loss": 0.9392, "step": 1594 }, { "epoch": 0.17, "grad_norm": 1.7563785817696747, "learning_rate": 9.484909028310735e-06, "loss": 0.9545, "step": 1595 }, { "epoch": 0.17, "grad_norm": 0.9811548929859668, "learning_rate": 9.484139084569526e-06, "loss": 1.0301, "step": 1596 }, { "epoch": 0.17, "grad_norm": 2.0281016942196723, "learning_rate": 9.483368597110993e-06, "loss": 1.0035, "step": 1597 }, { "epoch": 0.17, "grad_norm": 1.067732895678647, "learning_rate": 9.48259756602856e-06, "loss": 1.0671, "step": 1598 }, { "epoch": 0.17, "grad_norm": 1.07628810549085, "learning_rate": 9.48182599141572e-06, "loss": 1.0963, "step": 1599 }, { "epoch": 0.17, "grad_norm": 1.6807206024487984, "learning_rate": 9.481053873366027e-06, "loss": 0.9125, "step": 1600 }, { "epoch": 0.17, "grad_norm": 1.7711055404415776, "learning_rate": 9.480281211973103e-06, "loss": 0.9392, "step": 1601 }, { "epoch": 0.17, "grad_norm": 1.8789025480854984, "learning_rate": 9.479508007330638e-06, "loss": 0.9708, "step": 1602 }, { "epoch": 0.17, "grad_norm": 1.8169271454458398, "learning_rate": 9.478734259532382e-06, "loss": 0.9445, "step": 1603 }, { "epoch": 0.17, "grad_norm": 1.8440287717479045, "learning_rate": 9.477959968672156e-06, "loss": 0.9278, "step": 1604 }, { "epoch": 0.17, "grad_norm": 1.7337011372022615, "learning_rate": 9.477185134843847e-06, "loss": 1.0127, "step": 1605 }, { "epoch": 0.17, "grad_norm": 1.779199996416407, "learning_rate": 9.476409758141404e-06, "loss": 1.0169, "step": 1606 }, { "epoch": 0.17, "grad_norm": 1.626350079092121, "learning_rate": 9.475633838658847e-06, "loss": 0.8917, "step": 1607 }, { "epoch": 0.17, "grad_norm": 1.9024358530279832, "learning_rate": 9.474857376490257e-06, "loss": 1.0131, "step": 1608 }, { "epoch": 0.17, "grad_norm": 1.74767419509368, "learning_rate": 9.474080371729782e-06, "loss": 0.8896, "step": 1609 }, { "epoch": 0.17, "grad_norm": 1.6394150252177642, "learning_rate": 9.473302824471637e-06, "loss": 0.8858, "step": 1610 }, { "epoch": 0.17, "grad_norm": 1.7423654745419432, "learning_rate": 9.472524734810105e-06, "loss": 0.9331, "step": 1611 }, { "epoch": 0.17, "grad_norm": 1.7644524200453582, "learning_rate": 9.471746102839527e-06, "loss": 0.9179, "step": 1612 }, { "epoch": 0.17, "grad_norm": 1.220360376672148, "learning_rate": 9.470966928654321e-06, "loss": 1.0978, "step": 1613 }, { "epoch": 0.17, "grad_norm": 1.8032750110490727, "learning_rate": 9.470187212348958e-06, "loss": 0.9178, "step": 1614 }, { "epoch": 0.17, "grad_norm": 1.8356720451855801, "learning_rate": 9.469406954017985e-06, "loss": 0.9292, "step": 1615 }, { "epoch": 0.17, "grad_norm": 0.9477171034498245, "learning_rate": 9.468626153756011e-06, "loss": 1.1025, "step": 1616 }, { "epoch": 0.17, "grad_norm": 1.862931298075745, "learning_rate": 9.46784481165771e-06, "loss": 1.0104, "step": 1617 }, { "epoch": 0.17, "grad_norm": 1.768769683658307, "learning_rate": 9.467062927817822e-06, "loss": 0.9502, "step": 1618 }, { "epoch": 0.17, "grad_norm": 0.88976214116001, "learning_rate": 9.466280502331156e-06, "loss": 1.0323, "step": 1619 }, { "epoch": 0.17, "grad_norm": 1.7701421486604245, "learning_rate": 9.465497535292579e-06, "loss": 0.938, "step": 1620 }, { "epoch": 0.17, "grad_norm": 1.765281719228903, "learning_rate": 9.464714026797032e-06, "loss": 0.8698, "step": 1621 }, { "epoch": 0.17, "grad_norm": 1.8079039872934388, "learning_rate": 9.463929976939516e-06, "loss": 0.954, "step": 1622 }, { "epoch": 0.17, "grad_norm": 1.7234135617783135, "learning_rate": 9.463145385815102e-06, "loss": 0.9305, "step": 1623 }, { "epoch": 0.17, "grad_norm": 1.6557501508638002, "learning_rate": 9.462360253518923e-06, "loss": 0.8903, "step": 1624 }, { "epoch": 0.17, "grad_norm": 1.8457024241595237, "learning_rate": 9.461574580146179e-06, "loss": 0.9379, "step": 1625 }, { "epoch": 0.17, "grad_norm": 1.8071902686092984, "learning_rate": 9.460788365792135e-06, "loss": 0.9648, "step": 1626 }, { "epoch": 0.17, "grad_norm": 1.9248373207448017, "learning_rate": 9.460001610552125e-06, "loss": 0.9292, "step": 1627 }, { "epoch": 0.18, "grad_norm": 1.740968994604588, "learning_rate": 9.45921431452154e-06, "loss": 0.9059, "step": 1628 }, { "epoch": 0.18, "grad_norm": 1.8340951487338804, "learning_rate": 9.45842647779585e-06, "loss": 0.9302, "step": 1629 }, { "epoch": 0.18, "grad_norm": 1.7695954559215235, "learning_rate": 9.457638100470577e-06, "loss": 0.8537, "step": 1630 }, { "epoch": 0.18, "grad_norm": 1.086350342743236, "learning_rate": 9.456849182641318e-06, "loss": 1.0556, "step": 1631 }, { "epoch": 0.18, "grad_norm": 1.8687061764855715, "learning_rate": 9.45605972440373e-06, "loss": 0.893, "step": 1632 }, { "epoch": 0.18, "grad_norm": 1.9159875056050855, "learning_rate": 9.455269725853538e-06, "loss": 0.9313, "step": 1633 }, { "epoch": 0.18, "grad_norm": 1.7296572387104225, "learning_rate": 9.45447918708653e-06, "loss": 0.9379, "step": 1634 }, { "epoch": 0.18, "grad_norm": 1.7670363777478117, "learning_rate": 9.453688108198567e-06, "loss": 0.9185, "step": 1635 }, { "epoch": 0.18, "grad_norm": 1.656734510161265, "learning_rate": 9.452896489285563e-06, "loss": 0.9733, "step": 1636 }, { "epoch": 0.18, "grad_norm": 1.7392638484626881, "learning_rate": 9.45210433044351e-06, "loss": 0.976, "step": 1637 }, { "epoch": 0.18, "grad_norm": 1.0073051601870442, "learning_rate": 9.451311631768459e-06, "loss": 1.0418, "step": 1638 }, { "epoch": 0.18, "grad_norm": 1.7278805469677507, "learning_rate": 9.450518393356527e-06, "loss": 0.9823, "step": 1639 }, { "epoch": 0.18, "grad_norm": 1.7944010060562974, "learning_rate": 9.449724615303894e-06, "loss": 0.9177, "step": 1640 }, { "epoch": 0.18, "grad_norm": 1.7058312883371887, "learning_rate": 9.448930297706813e-06, "loss": 0.9326, "step": 1641 }, { "epoch": 0.18, "grad_norm": 1.8196673192989918, "learning_rate": 9.448135440661595e-06, "loss": 0.9506, "step": 1642 }, { "epoch": 0.18, "grad_norm": 1.7291427316863979, "learning_rate": 9.44734004426462e-06, "loss": 0.9078, "step": 1643 }, { "epoch": 0.18, "grad_norm": 1.8403164886108503, "learning_rate": 9.446544108612332e-06, "loss": 0.9225, "step": 1644 }, { "epoch": 0.18, "grad_norm": 1.9127663778519497, "learning_rate": 9.445747633801241e-06, "loss": 0.93, "step": 1645 }, { "epoch": 0.18, "grad_norm": 1.7592111347234174, "learning_rate": 9.444950619927924e-06, "loss": 0.8959, "step": 1646 }, { "epoch": 0.18, "grad_norm": 1.8022025253155796, "learning_rate": 9.44415306708902e-06, "loss": 0.9831, "step": 1647 }, { "epoch": 0.18, "grad_norm": 1.820025829321247, "learning_rate": 9.443354975381233e-06, "loss": 0.8694, "step": 1648 }, { "epoch": 0.18, "grad_norm": 1.750461400262516, "learning_rate": 9.44255634490134e-06, "loss": 0.8559, "step": 1649 }, { "epoch": 0.18, "grad_norm": 1.830341755116359, "learning_rate": 9.441757175746171e-06, "loss": 0.834, "step": 1650 }, { "epoch": 0.18, "grad_norm": 1.7265623475677363, "learning_rate": 9.440957468012633e-06, "loss": 0.8895, "step": 1651 }, { "epoch": 0.18, "grad_norm": 1.6538500801793754, "learning_rate": 9.440157221797692e-06, "loss": 0.904, "step": 1652 }, { "epoch": 0.18, "grad_norm": 1.8371413480294632, "learning_rate": 9.43935643719838e-06, "loss": 0.8971, "step": 1653 }, { "epoch": 0.18, "grad_norm": 1.8165757942635419, "learning_rate": 9.438555114311796e-06, "loss": 0.9567, "step": 1654 }, { "epoch": 0.18, "grad_norm": 1.81669489268732, "learning_rate": 9.4377532532351e-06, "loss": 0.8942, "step": 1655 }, { "epoch": 0.18, "grad_norm": 1.8451808848962241, "learning_rate": 9.436950854065526e-06, "loss": 0.9541, "step": 1656 }, { "epoch": 0.18, "grad_norm": 1.8458044779466614, "learning_rate": 9.436147916900361e-06, "loss": 0.8467, "step": 1657 }, { "epoch": 0.18, "grad_norm": 1.722324078905247, "learning_rate": 9.43534444183697e-06, "loss": 0.8859, "step": 1658 }, { "epoch": 0.18, "grad_norm": 1.691866988807825, "learning_rate": 9.434540428972772e-06, "loss": 0.9492, "step": 1659 }, { "epoch": 0.18, "grad_norm": 1.7502877887173267, "learning_rate": 9.433735878405262e-06, "loss": 0.8941, "step": 1660 }, { "epoch": 0.18, "grad_norm": 1.7792898371870884, "learning_rate": 9.43293079023199e-06, "loss": 1.025, "step": 1661 }, { "epoch": 0.18, "grad_norm": 1.762664681017269, "learning_rate": 9.432125164550576e-06, "loss": 0.8761, "step": 1662 }, { "epoch": 0.18, "grad_norm": 1.8601389065199223, "learning_rate": 9.431319001458705e-06, "loss": 1.0363, "step": 1663 }, { "epoch": 0.18, "grad_norm": 1.7526931412635853, "learning_rate": 9.43051230105413e-06, "loss": 0.9018, "step": 1664 }, { "epoch": 0.18, "grad_norm": 1.114019706039584, "learning_rate": 9.429705063434664e-06, "loss": 1.0547, "step": 1665 }, { "epoch": 0.18, "grad_norm": 1.8480240255875005, "learning_rate": 9.428897288698189e-06, "loss": 0.9316, "step": 1666 }, { "epoch": 0.18, "grad_norm": 1.635479378220326, "learning_rate": 9.428088976942647e-06, "loss": 0.9115, "step": 1667 }, { "epoch": 0.18, "grad_norm": 1.790600979480228, "learning_rate": 9.427280128266049e-06, "loss": 0.9439, "step": 1668 }, { "epoch": 0.18, "grad_norm": 1.7525994600566974, "learning_rate": 9.426470742766476e-06, "loss": 0.9443, "step": 1669 }, { "epoch": 0.18, "grad_norm": 1.7789619974972335, "learning_rate": 9.425660820542064e-06, "loss": 0.8716, "step": 1670 }, { "epoch": 0.18, "grad_norm": 1.8117298472593428, "learning_rate": 9.424850361691019e-06, "loss": 1.0137, "step": 1671 }, { "epoch": 0.18, "grad_norm": 1.7097333731050373, "learning_rate": 9.424039366311612e-06, "loss": 0.9298, "step": 1672 }, { "epoch": 0.18, "grad_norm": 1.2594893555758384, "learning_rate": 9.423227834502183e-06, "loss": 1.0603, "step": 1673 }, { "epoch": 0.18, "grad_norm": 1.8179521064441009, "learning_rate": 9.422415766361125e-06, "loss": 0.9371, "step": 1674 }, { "epoch": 0.18, "grad_norm": 1.730241451741859, "learning_rate": 9.421603161986912e-06, "loss": 0.9825, "step": 1675 }, { "epoch": 0.18, "grad_norm": 1.8318522423858585, "learning_rate": 9.420790021478073e-06, "loss": 0.9114, "step": 1676 }, { "epoch": 0.18, "grad_norm": 1.659267656269558, "learning_rate": 9.419976344933201e-06, "loss": 0.9546, "step": 1677 }, { "epoch": 0.18, "grad_norm": 1.9429111508718713, "learning_rate": 9.419162132450961e-06, "loss": 0.9045, "step": 1678 }, { "epoch": 0.18, "grad_norm": 1.7985025022189522, "learning_rate": 9.418347384130076e-06, "loss": 1.0022, "step": 1679 }, { "epoch": 0.18, "grad_norm": 1.7071283665607146, "learning_rate": 9.41753210006934e-06, "loss": 0.9458, "step": 1680 }, { "epoch": 0.18, "grad_norm": 1.7996364207984064, "learning_rate": 9.416716280367605e-06, "loss": 0.9557, "step": 1681 }, { "epoch": 0.18, "grad_norm": 1.851427660718819, "learning_rate": 9.415899925123796e-06, "loss": 0.9572, "step": 1682 }, { "epoch": 0.18, "grad_norm": 1.6162901433304024, "learning_rate": 9.415083034436896e-06, "loss": 0.9134, "step": 1683 }, { "epoch": 0.18, "grad_norm": 1.8304412921571063, "learning_rate": 9.414265608405956e-06, "loss": 1.0524, "step": 1684 }, { "epoch": 0.18, "grad_norm": 1.7312831483848148, "learning_rate": 9.413447647130096e-06, "loss": 0.9604, "step": 1685 }, { "epoch": 0.18, "grad_norm": 1.6867956895027407, "learning_rate": 9.412629150708493e-06, "loss": 0.9964, "step": 1686 }, { "epoch": 0.18, "grad_norm": 1.7260900851908219, "learning_rate": 9.41181011924039e-06, "loss": 0.8596, "step": 1687 }, { "epoch": 0.18, "grad_norm": 1.8338422456172219, "learning_rate": 9.4109905528251e-06, "loss": 0.9942, "step": 1688 }, { "epoch": 0.18, "grad_norm": 6.276983262554703, "learning_rate": 9.410170451562002e-06, "loss": 0.8987, "step": 1689 }, { "epoch": 0.18, "grad_norm": 1.7464955134675695, "learning_rate": 9.40934981555053e-06, "loss": 0.9934, "step": 1690 }, { "epoch": 0.18, "grad_norm": 0.9891399845125455, "learning_rate": 9.408528644890191e-06, "loss": 1.0958, "step": 1691 }, { "epoch": 0.18, "grad_norm": 1.8236482413130668, "learning_rate": 9.407706939680557e-06, "loss": 0.97, "step": 1692 }, { "epoch": 0.18, "grad_norm": 1.956203143244961, "learning_rate": 9.406884700021261e-06, "loss": 0.9836, "step": 1693 }, { "epoch": 0.18, "grad_norm": 1.7783697360440844, "learning_rate": 9.406061926012001e-06, "loss": 0.9919, "step": 1694 }, { "epoch": 0.18, "grad_norm": 1.7980040165228948, "learning_rate": 9.405238617752543e-06, "loss": 0.9264, "step": 1695 }, { "epoch": 0.18, "grad_norm": 1.8977718336965468, "learning_rate": 9.404414775342715e-06, "loss": 0.9411, "step": 1696 }, { "epoch": 0.18, "grad_norm": 1.716048278216333, "learning_rate": 9.403590398882411e-06, "loss": 0.8783, "step": 1697 }, { "epoch": 0.18, "grad_norm": 1.8687999347529645, "learning_rate": 9.402765488471592e-06, "loss": 0.9226, "step": 1698 }, { "epoch": 0.18, "grad_norm": 1.7921786998645939, "learning_rate": 9.401940044210276e-06, "loss": 1.0053, "step": 1699 }, { "epoch": 0.18, "grad_norm": 1.8893247857046835, "learning_rate": 9.401114066198556e-06, "loss": 0.9306, "step": 1700 }, { "epoch": 0.18, "grad_norm": 1.911770251978091, "learning_rate": 9.400287554536582e-06, "loss": 0.9063, "step": 1701 }, { "epoch": 0.18, "grad_norm": 1.7639527171051343, "learning_rate": 9.399460509324571e-06, "loss": 0.947, "step": 1702 }, { "epoch": 0.18, "grad_norm": 1.678642982496663, "learning_rate": 9.398632930662805e-06, "loss": 0.9691, "step": 1703 }, { "epoch": 0.18, "grad_norm": 1.7640254632149746, "learning_rate": 9.397804818651634e-06, "loss": 0.9074, "step": 1704 }, { "epoch": 0.18, "grad_norm": 1.7275336488265538, "learning_rate": 9.396976173391467e-06, "loss": 0.9465, "step": 1705 }, { "epoch": 0.18, "grad_norm": 1.6870582043790836, "learning_rate": 9.39614699498278e-06, "loss": 0.9308, "step": 1706 }, { "epoch": 0.18, "grad_norm": 1.875282164695613, "learning_rate": 9.395317283526113e-06, "loss": 0.8677, "step": 1707 }, { "epoch": 0.18, "grad_norm": 1.0021100237895224, "learning_rate": 9.394487039122074e-06, "loss": 1.0353, "step": 1708 }, { "epoch": 0.18, "grad_norm": 1.685631021467314, "learning_rate": 9.39365626187133e-06, "loss": 0.9138, "step": 1709 }, { "epoch": 0.18, "grad_norm": 1.6326056023592486, "learning_rate": 9.392824951874618e-06, "loss": 0.9474, "step": 1710 }, { "epoch": 0.18, "grad_norm": 0.8360342479292169, "learning_rate": 9.391993109232735e-06, "loss": 1.0661, "step": 1711 }, { "epoch": 0.18, "grad_norm": 1.7775888298773734, "learning_rate": 9.391160734046548e-06, "loss": 0.8829, "step": 1712 }, { "epoch": 0.18, "grad_norm": 1.7382127354905041, "learning_rate": 9.390327826416981e-06, "loss": 0.956, "step": 1713 }, { "epoch": 0.18, "grad_norm": 0.8915432667767361, "learning_rate": 9.38949438644503e-06, "loss": 1.0573, "step": 1714 }, { "epoch": 0.18, "grad_norm": 1.8271534237491605, "learning_rate": 9.388660414231752e-06, "loss": 0.946, "step": 1715 }, { "epoch": 0.18, "grad_norm": 1.8903463557802382, "learning_rate": 9.387825909878269e-06, "loss": 0.9542, "step": 1716 }, { "epoch": 0.18, "grad_norm": 1.756842681333753, "learning_rate": 9.386990873485767e-06, "loss": 0.9901, "step": 1717 }, { "epoch": 0.18, "grad_norm": 1.7395299761818248, "learning_rate": 9.386155305155498e-06, "loss": 0.8932, "step": 1718 }, { "epoch": 0.18, "grad_norm": 2.007461065323802, "learning_rate": 9.385319204988777e-06, "loss": 0.8891, "step": 1719 }, { "epoch": 0.18, "grad_norm": 1.7450897912687702, "learning_rate": 9.384482573086983e-06, "loss": 0.9365, "step": 1720 }, { "epoch": 0.19, "grad_norm": 1.74741028697665, "learning_rate": 9.383645409551561e-06, "loss": 0.9097, "step": 1721 }, { "epoch": 0.19, "grad_norm": 0.9872578539790238, "learning_rate": 9.382807714484021e-06, "loss": 1.0602, "step": 1722 }, { "epoch": 0.19, "grad_norm": 0.8811157894796312, "learning_rate": 9.381969487985936e-06, "loss": 1.0649, "step": 1723 }, { "epoch": 0.19, "grad_norm": 1.8183150384506244, "learning_rate": 9.381130730158944e-06, "loss": 0.9838, "step": 1724 }, { "epoch": 0.19, "grad_norm": 1.6657369674080504, "learning_rate": 9.380291441104748e-06, "loss": 0.9889, "step": 1725 }, { "epoch": 0.19, "grad_norm": 1.7400245509389738, "learning_rate": 9.379451620925112e-06, "loss": 0.9416, "step": 1726 }, { "epoch": 0.19, "grad_norm": 1.773389312861087, "learning_rate": 9.378611269721867e-06, "loss": 0.9152, "step": 1727 }, { "epoch": 0.19, "grad_norm": 1.7173663822412004, "learning_rate": 9.377770387596911e-06, "loss": 0.954, "step": 1728 }, { "epoch": 0.19, "grad_norm": 1.886290186201423, "learning_rate": 9.376928974652205e-06, "loss": 0.9685, "step": 1729 }, { "epoch": 0.19, "grad_norm": 1.173724170079782, "learning_rate": 9.37608703098977e-06, "loss": 1.0623, "step": 1730 }, { "epoch": 0.19, "grad_norm": 1.7162980369423855, "learning_rate": 9.375244556711696e-06, "loss": 0.8327, "step": 1731 }, { "epoch": 0.19, "grad_norm": 1.818891667608514, "learning_rate": 9.374401551920135e-06, "loss": 0.9076, "step": 1732 }, { "epoch": 0.19, "grad_norm": 1.807792908512656, "learning_rate": 9.373558016717306e-06, "loss": 0.8853, "step": 1733 }, { "epoch": 0.19, "grad_norm": 1.770136249884862, "learning_rate": 9.37271395120549e-06, "loss": 0.9191, "step": 1734 }, { "epoch": 0.19, "grad_norm": 0.8245802616585581, "learning_rate": 9.37186935548703e-06, "loss": 1.0523, "step": 1735 }, { "epoch": 0.19, "grad_norm": 1.682712444213008, "learning_rate": 9.371024229664342e-06, "loss": 0.8898, "step": 1736 }, { "epoch": 0.19, "grad_norm": 1.9603860104010589, "learning_rate": 9.370178573839894e-06, "loss": 0.9586, "step": 1737 }, { "epoch": 0.19, "grad_norm": 0.8444404000709858, "learning_rate": 9.36933238811623e-06, "loss": 1.0851, "step": 1738 }, { "epoch": 0.19, "grad_norm": 1.7520007827510016, "learning_rate": 9.368485672595949e-06, "loss": 0.9783, "step": 1739 }, { "epoch": 0.19, "grad_norm": 1.8687984153549504, "learning_rate": 9.36763842738172e-06, "loss": 0.9161, "step": 1740 }, { "epoch": 0.19, "grad_norm": 1.785100797770794, "learning_rate": 9.366790652576274e-06, "loss": 0.9118, "step": 1741 }, { "epoch": 0.19, "grad_norm": 1.8303576721024377, "learning_rate": 9.365942348282407e-06, "loss": 0.9912, "step": 1742 }, { "epoch": 0.19, "grad_norm": 0.8976531811747362, "learning_rate": 9.365093514602979e-06, "loss": 1.0925, "step": 1743 }, { "epoch": 0.19, "grad_norm": 1.806749114395195, "learning_rate": 9.364244151640913e-06, "loss": 0.9934, "step": 1744 }, { "epoch": 0.19, "grad_norm": 1.7479843534337158, "learning_rate": 9.363394259499197e-06, "loss": 0.8819, "step": 1745 }, { "epoch": 0.19, "grad_norm": 0.8609222091437427, "learning_rate": 9.362543838280886e-06, "loss": 1.0513, "step": 1746 }, { "epoch": 0.19, "grad_norm": 1.77354517792749, "learning_rate": 9.361692888089094e-06, "loss": 0.9278, "step": 1747 }, { "epoch": 0.19, "grad_norm": 1.79386575789392, "learning_rate": 9.360841409027002e-06, "loss": 0.9378, "step": 1748 }, { "epoch": 0.19, "grad_norm": 1.744791541652462, "learning_rate": 9.359989401197853e-06, "loss": 0.9286, "step": 1749 }, { "epoch": 0.19, "grad_norm": 1.7402018810828206, "learning_rate": 9.359136864704962e-06, "loss": 0.9058, "step": 1750 }, { "epoch": 0.19, "grad_norm": 1.7596778410455853, "learning_rate": 9.358283799651694e-06, "loss": 0.9815, "step": 1751 }, { "epoch": 0.19, "grad_norm": 1.754761807858723, "learning_rate": 9.357430206141492e-06, "loss": 0.9455, "step": 1752 }, { "epoch": 0.19, "grad_norm": 1.7327405049413305, "learning_rate": 9.356576084277856e-06, "loss": 0.9538, "step": 1753 }, { "epoch": 0.19, "grad_norm": 1.8396020467896548, "learning_rate": 9.355721434164349e-06, "loss": 0.92, "step": 1754 }, { "epoch": 0.19, "grad_norm": 1.7726794497885092, "learning_rate": 9.354866255904602e-06, "loss": 0.8798, "step": 1755 }, { "epoch": 0.19, "grad_norm": 1.9095979274070243, "learning_rate": 9.354010549602308e-06, "loss": 0.916, "step": 1756 }, { "epoch": 0.19, "grad_norm": 1.6597055764282813, "learning_rate": 9.353154315361223e-06, "loss": 0.9121, "step": 1757 }, { "epoch": 0.19, "grad_norm": 1.8546264695961023, "learning_rate": 9.352297553285172e-06, "loss": 0.9795, "step": 1758 }, { "epoch": 0.19, "grad_norm": 1.686545908487543, "learning_rate": 9.351440263478036e-06, "loss": 0.9389, "step": 1759 }, { "epoch": 0.19, "grad_norm": 1.685972669062883, "learning_rate": 9.350582446043768e-06, "loss": 0.9533, "step": 1760 }, { "epoch": 0.19, "grad_norm": 1.6002553818014105, "learning_rate": 9.34972410108638e-06, "loss": 0.9161, "step": 1761 }, { "epoch": 0.19, "grad_norm": 1.7821144165092586, "learning_rate": 9.348865228709947e-06, "loss": 0.9715, "step": 1762 }, { "epoch": 0.19, "grad_norm": 1.8242500155584662, "learning_rate": 9.348005829018613e-06, "loss": 0.9391, "step": 1763 }, { "epoch": 0.19, "grad_norm": 1.8326264201222315, "learning_rate": 9.347145902116583e-06, "loss": 0.9884, "step": 1764 }, { "epoch": 0.19, "grad_norm": 1.6548577983005488, "learning_rate": 9.346285448108125e-06, "loss": 0.9427, "step": 1765 }, { "epoch": 0.19, "grad_norm": 1.9314200512681357, "learning_rate": 9.345424467097572e-06, "loss": 0.9757, "step": 1766 }, { "epoch": 0.19, "grad_norm": 1.650666830597433, "learning_rate": 9.344562959189321e-06, "loss": 0.9408, "step": 1767 }, { "epoch": 0.19, "grad_norm": 1.766967884709704, "learning_rate": 9.343700924487835e-06, "loss": 0.8825, "step": 1768 }, { "epoch": 0.19, "grad_norm": 1.7275370066164668, "learning_rate": 9.342838363097634e-06, "loss": 0.9065, "step": 1769 }, { "epoch": 0.19, "grad_norm": 1.781654308048231, "learning_rate": 9.341975275123314e-06, "loss": 0.8703, "step": 1770 }, { "epoch": 0.19, "grad_norm": 2.0388997801494995, "learning_rate": 9.34111166066952e-06, "loss": 0.9073, "step": 1771 }, { "epoch": 0.19, "grad_norm": 1.9582044532087621, "learning_rate": 9.34024751984097e-06, "loss": 0.9287, "step": 1772 }, { "epoch": 0.19, "grad_norm": 1.8782088926863287, "learning_rate": 9.339382852742447e-06, "loss": 0.9203, "step": 1773 }, { "epoch": 0.19, "grad_norm": 1.7621544218526413, "learning_rate": 9.338517659478792e-06, "loss": 0.9262, "step": 1774 }, { "epoch": 0.19, "grad_norm": 1.8200918598568752, "learning_rate": 9.337651940154914e-06, "loss": 0.9448, "step": 1775 }, { "epoch": 0.19, "grad_norm": 1.7470440485922911, "learning_rate": 9.336785694875785e-06, "loss": 0.9773, "step": 1776 }, { "epoch": 0.19, "grad_norm": 1.5642024307005644, "learning_rate": 9.335918923746438e-06, "loss": 0.8281, "step": 1777 }, { "epoch": 0.19, "grad_norm": 1.7097256380956931, "learning_rate": 9.335051626871973e-06, "loss": 0.9031, "step": 1778 }, { "epoch": 0.19, "grad_norm": 1.697528024305107, "learning_rate": 9.334183804357555e-06, "loss": 0.9291, "step": 1779 }, { "epoch": 0.19, "grad_norm": 1.8407619815860479, "learning_rate": 9.333315456308407e-06, "loss": 0.893, "step": 1780 }, { "epoch": 0.19, "grad_norm": 1.8555201941547579, "learning_rate": 9.332446582829822e-06, "loss": 0.8904, "step": 1781 }, { "epoch": 0.19, "grad_norm": 0.9246445703760912, "learning_rate": 9.33157718402715e-06, "loss": 1.0409, "step": 1782 }, { "epoch": 0.19, "grad_norm": 1.6467760915998826, "learning_rate": 9.330707260005814e-06, "loss": 0.9291, "step": 1783 }, { "epoch": 0.19, "grad_norm": 1.829094922897823, "learning_rate": 9.329836810871291e-06, "loss": 0.9296, "step": 1784 }, { "epoch": 0.19, "grad_norm": 1.695543105295909, "learning_rate": 9.328965836729128e-06, "loss": 0.8795, "step": 1785 }, { "epoch": 0.19, "grad_norm": 1.8156998307582315, "learning_rate": 9.328094337684933e-06, "loss": 0.9188, "step": 1786 }, { "epoch": 0.19, "grad_norm": 1.6837655551636772, "learning_rate": 9.327222313844377e-06, "loss": 0.872, "step": 1787 }, { "epoch": 0.19, "grad_norm": 2.230197384319136, "learning_rate": 9.326349765313199e-06, "loss": 1.007, "step": 1788 }, { "epoch": 0.19, "grad_norm": 0.9701909453835805, "learning_rate": 9.325476692197197e-06, "loss": 1.0777, "step": 1789 }, { "epoch": 0.19, "grad_norm": 1.6385497318681554, "learning_rate": 9.324603094602232e-06, "loss": 0.8702, "step": 1790 }, { "epoch": 0.19, "grad_norm": 1.841278967636332, "learning_rate": 9.323728972634234e-06, "loss": 0.9683, "step": 1791 }, { "epoch": 0.19, "grad_norm": 1.7037357736735774, "learning_rate": 9.322854326399192e-06, "loss": 0.9455, "step": 1792 }, { "epoch": 0.19, "grad_norm": 1.7448140330582516, "learning_rate": 9.32197915600316e-06, "loss": 0.9036, "step": 1793 }, { "epoch": 0.19, "grad_norm": 1.7820526916193953, "learning_rate": 9.321103461552254e-06, "loss": 0.919, "step": 1794 }, { "epoch": 0.19, "grad_norm": 1.700915342819063, "learning_rate": 9.320227243152657e-06, "loss": 0.9322, "step": 1795 }, { "epoch": 0.19, "grad_norm": 1.7778023382917663, "learning_rate": 9.319350500910613e-06, "loss": 1.0182, "step": 1796 }, { "epoch": 0.19, "grad_norm": 1.7556500912438093, "learning_rate": 9.318473234932428e-06, "loss": 0.932, "step": 1797 }, { "epoch": 0.19, "grad_norm": 1.7218221285388764, "learning_rate": 9.317595445324477e-06, "loss": 0.8063, "step": 1798 }, { "epoch": 0.19, "grad_norm": 1.8407928481093185, "learning_rate": 9.316717132193193e-06, "loss": 0.9927, "step": 1799 }, { "epoch": 0.19, "grad_norm": 1.9881181254512363, "learning_rate": 9.315838295645074e-06, "loss": 0.9339, "step": 1800 }, { "epoch": 0.19, "grad_norm": 1.7534256507588757, "learning_rate": 9.314958935786684e-06, "loss": 0.9479, "step": 1801 }, { "epoch": 0.19, "grad_norm": 1.7442417586911638, "learning_rate": 9.314079052724645e-06, "loss": 0.93, "step": 1802 }, { "epoch": 0.19, "grad_norm": 1.7582023410301424, "learning_rate": 9.313198646565648e-06, "loss": 0.9448, "step": 1803 }, { "epoch": 0.19, "grad_norm": 1.8034612792690485, "learning_rate": 9.312317717416448e-06, "loss": 0.9446, "step": 1804 }, { "epoch": 0.19, "grad_norm": 1.8207857613822092, "learning_rate": 9.311436265383856e-06, "loss": 0.9265, "step": 1805 }, { "epoch": 0.19, "grad_norm": 1.755879362342169, "learning_rate": 9.310554290574754e-06, "loss": 0.9036, "step": 1806 }, { "epoch": 0.19, "grad_norm": 1.8196916446957612, "learning_rate": 9.309671793096082e-06, "loss": 0.9843, "step": 1807 }, { "epoch": 0.19, "grad_norm": 1.8259028164946325, "learning_rate": 9.308788773054848e-06, "loss": 0.9441, "step": 1808 }, { "epoch": 0.19, "grad_norm": 1.7506992265911372, "learning_rate": 9.307905230558123e-06, "loss": 0.9003, "step": 1809 }, { "epoch": 0.19, "grad_norm": 0.9348390311276369, "learning_rate": 9.307021165713034e-06, "loss": 1.0728, "step": 1810 }, { "epoch": 0.19, "grad_norm": 0.8950757941280718, "learning_rate": 9.30613657862678e-06, "loss": 1.089, "step": 1811 }, { "epoch": 0.19, "grad_norm": 1.7776572878461376, "learning_rate": 9.305251469406621e-06, "loss": 0.8855, "step": 1812 }, { "epoch": 0.19, "grad_norm": 1.7863774216540826, "learning_rate": 9.304365838159882e-06, "loss": 0.9823, "step": 1813 }, { "epoch": 0.2, "grad_norm": 1.552522256098939, "learning_rate": 9.303479684993943e-06, "loss": 0.9288, "step": 1814 }, { "epoch": 0.2, "grad_norm": 1.8666547722967624, "learning_rate": 9.302593010016255e-06, "loss": 0.9639, "step": 1815 }, { "epoch": 0.2, "grad_norm": 1.7985689090779444, "learning_rate": 9.301705813334332e-06, "loss": 0.8416, "step": 1816 }, { "epoch": 0.2, "grad_norm": 1.7903888698289745, "learning_rate": 9.30081809505575e-06, "loss": 0.9327, "step": 1817 }, { "epoch": 0.2, "grad_norm": 1.7064702630194581, "learning_rate": 9.299929855288145e-06, "loss": 0.8514, "step": 1818 }, { "epoch": 0.2, "grad_norm": 1.746260069970438, "learning_rate": 9.299041094139222e-06, "loss": 0.9537, "step": 1819 }, { "epoch": 0.2, "grad_norm": 1.7040369892385567, "learning_rate": 9.298151811716745e-06, "loss": 0.9251, "step": 1820 }, { "epoch": 0.2, "grad_norm": 1.7503350095215338, "learning_rate": 9.297262008128544e-06, "loss": 0.9434, "step": 1821 }, { "epoch": 0.2, "grad_norm": 1.824164418442658, "learning_rate": 9.296371683482508e-06, "loss": 0.9574, "step": 1822 }, { "epoch": 0.2, "grad_norm": 1.604268613892651, "learning_rate": 9.295480837886595e-06, "loss": 0.9298, "step": 1823 }, { "epoch": 0.2, "grad_norm": 1.7468451005464873, "learning_rate": 9.294589471448819e-06, "loss": 0.9784, "step": 1824 }, { "epoch": 0.2, "grad_norm": 1.7733493999230137, "learning_rate": 9.293697584277266e-06, "loss": 0.8951, "step": 1825 }, { "epoch": 0.2, "grad_norm": 1.6832605816026116, "learning_rate": 9.292805176480077e-06, "loss": 0.987, "step": 1826 }, { "epoch": 0.2, "grad_norm": 1.877231066029553, "learning_rate": 9.291912248165462e-06, "loss": 0.9943, "step": 1827 }, { "epoch": 0.2, "grad_norm": 1.7085726284566523, "learning_rate": 9.291018799441692e-06, "loss": 0.9338, "step": 1828 }, { "epoch": 0.2, "grad_norm": 1.7256740659041032, "learning_rate": 9.290124830417097e-06, "loss": 0.9365, "step": 1829 }, { "epoch": 0.2, "grad_norm": 2.538575987113729, "learning_rate": 9.289230341200075e-06, "loss": 0.9307, "step": 1830 }, { "epoch": 0.2, "grad_norm": 1.3475312689214456, "learning_rate": 9.288335331899089e-06, "loss": 1.0863, "step": 1831 }, { "epoch": 0.2, "grad_norm": 1.8768824636934855, "learning_rate": 9.28743980262266e-06, "loss": 1.0205, "step": 1832 }, { "epoch": 0.2, "grad_norm": 1.9390586983641143, "learning_rate": 9.286543753479372e-06, "loss": 0.9533, "step": 1833 }, { "epoch": 0.2, "grad_norm": 1.7434974476243281, "learning_rate": 9.285647184577878e-06, "loss": 0.9595, "step": 1834 }, { "epoch": 0.2, "grad_norm": 1.7208624336344924, "learning_rate": 9.284750096026886e-06, "loss": 0.9126, "step": 1835 }, { "epoch": 0.2, "grad_norm": 1.847226770154086, "learning_rate": 9.283852487935174e-06, "loss": 0.8367, "step": 1836 }, { "epoch": 0.2, "grad_norm": 2.0007329963835727, "learning_rate": 9.282954360411579e-06, "loss": 0.9466, "step": 1837 }, { "epoch": 0.2, "grad_norm": 1.8307194966465414, "learning_rate": 9.282055713565e-06, "loss": 0.9192, "step": 1838 }, { "epoch": 0.2, "grad_norm": 1.6531047767734504, "learning_rate": 9.28115654750441e-06, "loss": 0.9956, "step": 1839 }, { "epoch": 0.2, "grad_norm": 1.7991623053045893, "learning_rate": 9.280256862338822e-06, "loss": 0.8799, "step": 1840 }, { "epoch": 0.2, "grad_norm": 1.8347461879243223, "learning_rate": 9.279356658177337e-06, "loss": 0.9448, "step": 1841 }, { "epoch": 0.2, "grad_norm": 1.0182955352522247, "learning_rate": 9.278455935129103e-06, "loss": 1.0577, "step": 1842 }, { "epoch": 0.2, "grad_norm": 1.7375096391564306, "learning_rate": 9.277554693303337e-06, "loss": 0.9574, "step": 1843 }, { "epoch": 0.2, "grad_norm": 1.838341923274779, "learning_rate": 9.276652932809315e-06, "loss": 0.9177, "step": 1844 }, { "epoch": 0.2, "grad_norm": 1.8903074648011653, "learning_rate": 9.275750653756385e-06, "loss": 1.0053, "step": 1845 }, { "epoch": 0.2, "grad_norm": 1.738829040351453, "learning_rate": 9.274847856253946e-06, "loss": 0.953, "step": 1846 }, { "epoch": 0.2, "grad_norm": 1.8822018117669301, "learning_rate": 9.273944540411465e-06, "loss": 0.9243, "step": 1847 }, { "epoch": 0.2, "grad_norm": 1.6463768866952586, "learning_rate": 9.273040706338476e-06, "loss": 0.9678, "step": 1848 }, { "epoch": 0.2, "grad_norm": 1.769238046477046, "learning_rate": 9.272136354144569e-06, "loss": 0.9719, "step": 1849 }, { "epoch": 0.2, "grad_norm": 1.7172992350183847, "learning_rate": 9.271231483939403e-06, "loss": 0.9165, "step": 1850 }, { "epoch": 0.2, "grad_norm": 2.021291352187464, "learning_rate": 9.27032609583269e-06, "loss": 0.9904, "step": 1851 }, { "epoch": 0.2, "grad_norm": 1.7460976182349062, "learning_rate": 9.269420189934219e-06, "loss": 1.016, "step": 1852 }, { "epoch": 0.2, "grad_norm": 1.8008142021154498, "learning_rate": 9.26851376635383e-06, "loss": 0.9852, "step": 1853 }, { "epoch": 0.2, "grad_norm": 1.7223077810668341, "learning_rate": 9.267606825201433e-06, "loss": 0.9899, "step": 1854 }, { "epoch": 0.2, "grad_norm": 1.7058910764921766, "learning_rate": 9.266699366586992e-06, "loss": 0.9553, "step": 1855 }, { "epoch": 0.2, "grad_norm": 1.7452840246724481, "learning_rate": 9.265791390620547e-06, "loss": 0.9465, "step": 1856 }, { "epoch": 0.2, "grad_norm": 2.3819751045892827, "learning_rate": 9.26488289741219e-06, "loss": 0.8812, "step": 1857 }, { "epoch": 0.2, "grad_norm": 1.8648699474764825, "learning_rate": 9.263973887072074e-06, "loss": 0.9226, "step": 1858 }, { "epoch": 0.2, "grad_norm": 1.7958055821328507, "learning_rate": 9.263064359710428e-06, "loss": 0.872, "step": 1859 }, { "epoch": 0.2, "grad_norm": 1.0999266959027558, "learning_rate": 9.262154315437528e-06, "loss": 1.0782, "step": 1860 }, { "epoch": 0.2, "grad_norm": 1.8704991863521319, "learning_rate": 9.261243754363726e-06, "loss": 0.9617, "step": 1861 }, { "epoch": 0.2, "grad_norm": 0.8873386507933301, "learning_rate": 9.260332676599426e-06, "loss": 1.1003, "step": 1862 }, { "epoch": 0.2, "grad_norm": 1.8304825682040373, "learning_rate": 9.259421082255104e-06, "loss": 0.9339, "step": 1863 }, { "epoch": 0.2, "grad_norm": 1.8233412860484728, "learning_rate": 9.258508971441289e-06, "loss": 0.9645, "step": 1864 }, { "epoch": 0.2, "grad_norm": 1.801967920479368, "learning_rate": 9.25759634426858e-06, "loss": 0.8628, "step": 1865 }, { "epoch": 0.2, "grad_norm": 1.816464119954529, "learning_rate": 9.256683200847638e-06, "loss": 0.9329, "step": 1866 }, { "epoch": 0.2, "grad_norm": 1.7761026302078886, "learning_rate": 9.255769541289182e-06, "loss": 0.9358, "step": 1867 }, { "epoch": 0.2, "grad_norm": 2.009542918545504, "learning_rate": 9.254855365703997e-06, "loss": 0.9197, "step": 1868 }, { "epoch": 0.2, "grad_norm": 1.7656313833329986, "learning_rate": 9.253940674202931e-06, "loss": 0.9428, "step": 1869 }, { "epoch": 0.2, "grad_norm": 1.8082558098668886, "learning_rate": 9.25302546689689e-06, "loss": 0.9944, "step": 1870 }, { "epoch": 0.2, "grad_norm": 1.761377002250604, "learning_rate": 9.25210974389685e-06, "loss": 0.9599, "step": 1871 }, { "epoch": 0.2, "grad_norm": 1.6759222998936374, "learning_rate": 9.251193505313845e-06, "loss": 0.9614, "step": 1872 }, { "epoch": 0.2, "grad_norm": 1.6489028487714614, "learning_rate": 9.250276751258972e-06, "loss": 0.9479, "step": 1873 }, { "epoch": 0.2, "grad_norm": 1.815609536416996, "learning_rate": 9.249359481843389e-06, "loss": 0.9816, "step": 1874 }, { "epoch": 0.2, "grad_norm": 1.959187744583045, "learning_rate": 9.24844169717832e-06, "loss": 0.9655, "step": 1875 }, { "epoch": 0.2, "grad_norm": 2.0772192460809054, "learning_rate": 9.247523397375047e-06, "loss": 1.0347, "step": 1876 }, { "epoch": 0.2, "grad_norm": 1.802144048816886, "learning_rate": 9.24660458254492e-06, "loss": 0.9378, "step": 1877 }, { "epoch": 0.2, "grad_norm": 1.4350032285232037, "learning_rate": 9.245685252799346e-06, "loss": 1.0747, "step": 1878 }, { "epoch": 0.2, "grad_norm": 2.1314340863301897, "learning_rate": 9.244765408249798e-06, "loss": 0.9392, "step": 1879 }, { "epoch": 0.2, "grad_norm": 1.754997241278803, "learning_rate": 9.243845049007811e-06, "loss": 1.0273, "step": 1880 }, { "epoch": 0.2, "grad_norm": 1.7184841740951815, "learning_rate": 9.242924175184982e-06, "loss": 0.9322, "step": 1881 }, { "epoch": 0.2, "grad_norm": 1.8224043722200962, "learning_rate": 9.242002786892967e-06, "loss": 0.9573, "step": 1882 }, { "epoch": 0.2, "grad_norm": 1.727080341621473, "learning_rate": 9.241080884243492e-06, "loss": 0.9657, "step": 1883 }, { "epoch": 0.2, "grad_norm": 1.739393047694169, "learning_rate": 9.240158467348337e-06, "loss": 0.9188, "step": 1884 }, { "epoch": 0.2, "grad_norm": 1.9086289176822075, "learning_rate": 9.239235536319351e-06, "loss": 0.9648, "step": 1885 }, { "epoch": 0.2, "grad_norm": 1.1104797586184179, "learning_rate": 9.23831209126844e-06, "loss": 1.0312, "step": 1886 }, { "epoch": 0.2, "grad_norm": 1.7186667571263123, "learning_rate": 9.237388132307576e-06, "loss": 0.9911, "step": 1887 }, { "epoch": 0.2, "grad_norm": 0.9059308660291685, "learning_rate": 9.236463659548793e-06, "loss": 1.0682, "step": 1888 }, { "epoch": 0.2, "grad_norm": 1.8484201095349662, "learning_rate": 9.235538673104187e-06, "loss": 0.9949, "step": 1889 }, { "epoch": 0.2, "grad_norm": 1.712884427017372, "learning_rate": 9.234613173085913e-06, "loss": 0.9316, "step": 1890 }, { "epoch": 0.2, "grad_norm": 1.7710726546352922, "learning_rate": 9.233687159606195e-06, "loss": 0.9493, "step": 1891 }, { "epoch": 0.2, "grad_norm": 1.7899813051033093, "learning_rate": 9.232760632777311e-06, "loss": 0.9703, "step": 1892 }, { "epoch": 0.2, "grad_norm": 1.7220805068269962, "learning_rate": 9.23183359271161e-06, "loss": 0.9483, "step": 1893 }, { "epoch": 0.2, "grad_norm": 1.2041934668183552, "learning_rate": 9.230906039521495e-06, "loss": 1.0683, "step": 1894 }, { "epoch": 0.2, "grad_norm": 1.800817264327702, "learning_rate": 9.229977973319436e-06, "loss": 0.9755, "step": 1895 }, { "epoch": 0.2, "grad_norm": 1.7557702104161894, "learning_rate": 9.229049394217966e-06, "loss": 0.935, "step": 1896 }, { "epoch": 0.2, "grad_norm": 1.7523909232200132, "learning_rate": 9.228120302329678e-06, "loss": 0.995, "step": 1897 }, { "epoch": 0.2, "grad_norm": 1.7341058691265612, "learning_rate": 9.227190697767224e-06, "loss": 0.9823, "step": 1898 }, { "epoch": 0.2, "grad_norm": 1.8093974763259668, "learning_rate": 9.226260580643327e-06, "loss": 1.0165, "step": 1899 }, { "epoch": 0.2, "grad_norm": 1.701566868782907, "learning_rate": 9.225329951070762e-06, "loss": 0.9336, "step": 1900 }, { "epoch": 0.2, "grad_norm": 1.7643830016985842, "learning_rate": 9.224398809162376e-06, "loss": 0.9272, "step": 1901 }, { "epoch": 0.2, "grad_norm": 1.7193677240663048, "learning_rate": 9.22346715503107e-06, "loss": 0.9115, "step": 1902 }, { "epoch": 0.2, "grad_norm": 1.6523688576396802, "learning_rate": 9.222534988789811e-06, "loss": 1.0473, "step": 1903 }, { "epoch": 0.2, "grad_norm": 1.8254339957045995, "learning_rate": 9.221602310551627e-06, "loss": 0.9849, "step": 1904 }, { "epoch": 0.2, "grad_norm": 1.8164326724473965, "learning_rate": 9.220669120429608e-06, "loss": 1.0118, "step": 1905 }, { "epoch": 0.2, "grad_norm": 1.6952945644758017, "learning_rate": 9.219735418536907e-06, "loss": 0.9142, "step": 1906 }, { "epoch": 0.21, "grad_norm": 1.8127470274743847, "learning_rate": 9.21880120498674e-06, "loss": 0.9711, "step": 1907 }, { "epoch": 0.21, "grad_norm": 1.7777075350040212, "learning_rate": 9.217866479892383e-06, "loss": 0.952, "step": 1908 }, { "epoch": 0.21, "grad_norm": 1.8675985061156481, "learning_rate": 9.216931243367173e-06, "loss": 1.0273, "step": 1909 }, { "epoch": 0.21, "grad_norm": 1.7844054403162997, "learning_rate": 9.215995495524512e-06, "loss": 0.8971, "step": 1910 }, { "epoch": 0.21, "grad_norm": 0.9795868481997869, "learning_rate": 9.215059236477864e-06, "loss": 1.0587, "step": 1911 }, { "epoch": 0.21, "grad_norm": 1.7062858728779509, "learning_rate": 9.21412246634075e-06, "loss": 0.8878, "step": 1912 }, { "epoch": 0.21, "grad_norm": 2.253490183547763, "learning_rate": 9.21318518522676e-06, "loss": 0.9562, "step": 1913 }, { "epoch": 0.21, "grad_norm": 1.6131464294757167, "learning_rate": 9.21224739324954e-06, "loss": 0.9003, "step": 1914 }, { "epoch": 0.21, "grad_norm": 1.8289091336103582, "learning_rate": 9.211309090522803e-06, "loss": 0.9115, "step": 1915 }, { "epoch": 0.21, "grad_norm": 1.7645742491245373, "learning_rate": 9.21037027716032e-06, "loss": 0.8872, "step": 1916 }, { "epoch": 0.21, "grad_norm": 1.6715217605712385, "learning_rate": 9.209430953275925e-06, "loss": 0.9059, "step": 1917 }, { "epoch": 0.21, "grad_norm": 1.7572363930732027, "learning_rate": 9.208491118983515e-06, "loss": 0.9447, "step": 1918 }, { "epoch": 0.21, "grad_norm": 1.7309616359720637, "learning_rate": 9.207550774397048e-06, "loss": 0.9098, "step": 1919 }, { "epoch": 0.21, "grad_norm": 1.6613283057277448, "learning_rate": 9.206609919630543e-06, "loss": 0.8194, "step": 1920 }, { "epoch": 0.21, "grad_norm": 1.820017003936623, "learning_rate": 9.205668554798084e-06, "loss": 0.9482, "step": 1921 }, { "epoch": 0.21, "grad_norm": 1.7141743574117203, "learning_rate": 9.204726680013813e-06, "loss": 0.9112, "step": 1922 }, { "epoch": 0.21, "grad_norm": 1.7079502598769296, "learning_rate": 9.203784295391938e-06, "loss": 0.955, "step": 1923 }, { "epoch": 0.21, "grad_norm": 1.7130405763889902, "learning_rate": 9.202841401046722e-06, "loss": 0.92, "step": 1924 }, { "epoch": 0.21, "grad_norm": 1.8284579852362663, "learning_rate": 9.201897997092497e-06, "loss": 1.0011, "step": 1925 }, { "epoch": 0.21, "grad_norm": 0.9640964629548258, "learning_rate": 9.200954083643654e-06, "loss": 1.0599, "step": 1926 }, { "epoch": 0.21, "grad_norm": 1.8076322487427539, "learning_rate": 9.200009660814646e-06, "loss": 0.9872, "step": 1927 }, { "epoch": 0.21, "grad_norm": 1.7460418256181771, "learning_rate": 9.199064728719988e-06, "loss": 0.9384, "step": 1928 }, { "epoch": 0.21, "grad_norm": 1.6510572174913778, "learning_rate": 9.198119287474254e-06, "loss": 0.9243, "step": 1929 }, { "epoch": 0.21, "grad_norm": 1.7380002672061294, "learning_rate": 9.197173337192082e-06, "loss": 1.0062, "step": 1930 }, { "epoch": 0.21, "grad_norm": 2.065750187065165, "learning_rate": 9.196226877988174e-06, "loss": 0.9388, "step": 1931 }, { "epoch": 0.21, "grad_norm": 1.799031228706279, "learning_rate": 9.195279909977293e-06, "loss": 0.8353, "step": 1932 }, { "epoch": 0.21, "grad_norm": 1.7871456475913279, "learning_rate": 9.194332433274256e-06, "loss": 0.8984, "step": 1933 }, { "epoch": 0.21, "grad_norm": 1.6330175217381413, "learning_rate": 9.193384447993954e-06, "loss": 0.8531, "step": 1934 }, { "epoch": 0.21, "grad_norm": 1.7079918150996587, "learning_rate": 9.192435954251329e-06, "loss": 0.9251, "step": 1935 }, { "epoch": 0.21, "grad_norm": 1.8067540213974942, "learning_rate": 9.191486952161393e-06, "loss": 0.8999, "step": 1936 }, { "epoch": 0.21, "grad_norm": 1.7074488265497858, "learning_rate": 9.190537441839212e-06, "loss": 0.8949, "step": 1937 }, { "epoch": 0.21, "grad_norm": 1.6080487497123068, "learning_rate": 9.189587423399919e-06, "loss": 0.9929, "step": 1938 }, { "epoch": 0.21, "grad_norm": 1.6855637728507937, "learning_rate": 9.188636896958708e-06, "loss": 0.9412, "step": 1939 }, { "epoch": 0.21, "grad_norm": 1.6811233933981744, "learning_rate": 9.187685862630833e-06, "loss": 0.9483, "step": 1940 }, { "epoch": 0.21, "grad_norm": 1.83258543017693, "learning_rate": 9.18673432053161e-06, "loss": 0.9655, "step": 1941 }, { "epoch": 0.21, "grad_norm": 1.6355337096686275, "learning_rate": 9.185782270776416e-06, "loss": 0.964, "step": 1942 }, { "epoch": 0.21, "grad_norm": 1.7439263912013652, "learning_rate": 9.184829713480691e-06, "loss": 1.0006, "step": 1943 }, { "epoch": 0.21, "grad_norm": 1.753830070938035, "learning_rate": 9.183876648759937e-06, "loss": 0.9466, "step": 1944 }, { "epoch": 0.21, "grad_norm": 1.7433409829098994, "learning_rate": 9.182923076729716e-06, "loss": 0.9783, "step": 1945 }, { "epoch": 0.21, "grad_norm": 1.8154363994905607, "learning_rate": 9.181968997505649e-06, "loss": 0.9937, "step": 1946 }, { "epoch": 0.21, "grad_norm": 1.7301117743495247, "learning_rate": 9.181014411203426e-06, "loss": 0.8778, "step": 1947 }, { "epoch": 0.21, "grad_norm": 1.7462066214556098, "learning_rate": 9.18005931793879e-06, "loss": 0.8916, "step": 1948 }, { "epoch": 0.21, "grad_norm": 1.7346272739587119, "learning_rate": 9.179103717827551e-06, "loss": 0.886, "step": 1949 }, { "epoch": 0.21, "grad_norm": 1.8157250308424318, "learning_rate": 9.178147610985578e-06, "loss": 0.8509, "step": 1950 }, { "epoch": 0.21, "grad_norm": 1.907719918967169, "learning_rate": 9.177190997528805e-06, "loss": 0.8829, "step": 1951 }, { "epoch": 0.21, "grad_norm": 2.070732855239176, "learning_rate": 9.176233877573219e-06, "loss": 0.9263, "step": 1952 }, { "epoch": 0.21, "grad_norm": 1.881909811871556, "learning_rate": 9.175276251234881e-06, "loss": 0.8981, "step": 1953 }, { "epoch": 0.21, "grad_norm": 2.2023823781281506, "learning_rate": 9.1743181186299e-06, "loss": 0.9203, "step": 1954 }, { "epoch": 0.21, "grad_norm": 1.8743617936782138, "learning_rate": 9.17335947987446e-06, "loss": 0.874, "step": 1955 }, { "epoch": 0.21, "grad_norm": 1.6858633318633545, "learning_rate": 9.172400335084793e-06, "loss": 0.9295, "step": 1956 }, { "epoch": 0.21, "grad_norm": 1.8299749008338706, "learning_rate": 9.171440684377201e-06, "loss": 0.906, "step": 1957 }, { "epoch": 0.21, "grad_norm": 1.8708404177649354, "learning_rate": 9.170480527868046e-06, "loss": 0.9131, "step": 1958 }, { "epoch": 0.21, "grad_norm": 1.7831789643063127, "learning_rate": 9.169519865673748e-06, "loss": 0.9699, "step": 1959 }, { "epoch": 0.21, "grad_norm": 1.6263784491331625, "learning_rate": 9.168558697910792e-06, "loss": 0.9357, "step": 1960 }, { "epoch": 0.21, "grad_norm": 1.8531462836832402, "learning_rate": 9.167597024695722e-06, "loss": 0.973, "step": 1961 }, { "epoch": 0.21, "grad_norm": 1.7458217831272926, "learning_rate": 9.166634846145146e-06, "loss": 0.9441, "step": 1962 }, { "epoch": 0.21, "grad_norm": 1.8338101350552134, "learning_rate": 9.165672162375731e-06, "loss": 0.8176, "step": 1963 }, { "epoch": 0.21, "grad_norm": 1.7833876917242943, "learning_rate": 9.164708973504203e-06, "loss": 0.9604, "step": 1964 }, { "epoch": 0.21, "grad_norm": 1.744128167259554, "learning_rate": 9.163745279647356e-06, "loss": 0.8866, "step": 1965 }, { "epoch": 0.21, "grad_norm": 1.0425432478350012, "learning_rate": 9.162781080922039e-06, "loss": 1.0636, "step": 1966 }, { "epoch": 0.21, "grad_norm": 1.741259031907156, "learning_rate": 9.161816377445164e-06, "loss": 0.9246, "step": 1967 }, { "epoch": 0.21, "grad_norm": 1.8325636565956467, "learning_rate": 9.160851169333704e-06, "loss": 0.9681, "step": 1968 }, { "epoch": 0.21, "grad_norm": 1.8282740156943096, "learning_rate": 9.159885456704696e-06, "loss": 0.9145, "step": 1969 }, { "epoch": 0.21, "grad_norm": 0.8454847036119734, "learning_rate": 9.158919239675237e-06, "loss": 1.0265, "step": 1970 }, { "epoch": 0.21, "grad_norm": 1.8524124044119996, "learning_rate": 9.157952518362478e-06, "loss": 0.9422, "step": 1971 }, { "epoch": 0.21, "grad_norm": 1.7076001566816923, "learning_rate": 9.156985292883645e-06, "loss": 0.9501, "step": 1972 }, { "epoch": 0.21, "grad_norm": 1.8150945945841483, "learning_rate": 9.156017563356013e-06, "loss": 0.9645, "step": 1973 }, { "epoch": 0.21, "grad_norm": 1.8068897688800742, "learning_rate": 9.155049329896924e-06, "loss": 0.8709, "step": 1974 }, { "epoch": 0.21, "grad_norm": 1.7240076575381553, "learning_rate": 9.154080592623777e-06, "loss": 0.8868, "step": 1975 }, { "epoch": 0.21, "grad_norm": 1.7820911229507752, "learning_rate": 9.15311135165404e-06, "loss": 0.9454, "step": 1976 }, { "epoch": 0.21, "grad_norm": 1.8046001686002016, "learning_rate": 9.152141607105231e-06, "loss": 1.0538, "step": 1977 }, { "epoch": 0.21, "grad_norm": 1.7880007130694058, "learning_rate": 9.15117135909494e-06, "loss": 0.9394, "step": 1978 }, { "epoch": 0.21, "grad_norm": 1.176333230222553, "learning_rate": 9.15020060774081e-06, "loss": 1.0574, "step": 1979 }, { "epoch": 0.21, "grad_norm": 1.7154907261379015, "learning_rate": 9.149229353160546e-06, "loss": 0.9249, "step": 1980 }, { "epoch": 0.21, "grad_norm": 1.711133456808274, "learning_rate": 9.14825759547192e-06, "loss": 0.9297, "step": 1981 }, { "epoch": 0.21, "grad_norm": 1.7338962056504033, "learning_rate": 9.14728533479276e-06, "loss": 0.8757, "step": 1982 }, { "epoch": 0.21, "grad_norm": 0.8605516292058175, "learning_rate": 9.146312571240955e-06, "loss": 1.016, "step": 1983 }, { "epoch": 0.21, "grad_norm": 1.9102413862835839, "learning_rate": 9.145339304934453e-06, "loss": 0.9666, "step": 1984 }, { "epoch": 0.21, "grad_norm": 1.6929354084248291, "learning_rate": 9.144365535991273e-06, "loss": 0.9328, "step": 1985 }, { "epoch": 0.21, "grad_norm": 1.8152299667650476, "learning_rate": 9.143391264529482e-06, "loss": 1.0348, "step": 1986 }, { "epoch": 0.21, "grad_norm": 1.7466243149743401, "learning_rate": 9.142416490667217e-06, "loss": 0.8595, "step": 1987 }, { "epoch": 0.21, "grad_norm": 1.8326824901628382, "learning_rate": 9.14144121452267e-06, "loss": 0.9404, "step": 1988 }, { "epoch": 0.21, "grad_norm": 1.7347266573221758, "learning_rate": 9.1404654362141e-06, "loss": 0.911, "step": 1989 }, { "epoch": 0.21, "grad_norm": 1.6922171787383444, "learning_rate": 9.13948915585982e-06, "loss": 0.9196, "step": 1990 }, { "epoch": 0.21, "grad_norm": 1.7867279211598206, "learning_rate": 9.13851237357821e-06, "loss": 0.9403, "step": 1991 }, { "epoch": 0.21, "grad_norm": 0.9622757548302667, "learning_rate": 9.137535089487705e-06, "loss": 1.08, "step": 1992 }, { "epoch": 0.21, "grad_norm": 1.794744899221232, "learning_rate": 9.13655730370681e-06, "loss": 0.9066, "step": 1993 }, { "epoch": 0.21, "grad_norm": 1.7114966031589056, "learning_rate": 9.135579016354077e-06, "loss": 0.9057, "step": 1994 }, { "epoch": 0.21, "grad_norm": 1.783678781052789, "learning_rate": 9.134600227548133e-06, "loss": 0.9339, "step": 1995 }, { "epoch": 0.21, "grad_norm": 0.8616079528022769, "learning_rate": 9.133620937407656e-06, "loss": 1.0765, "step": 1996 }, { "epoch": 0.21, "grad_norm": 1.806231891121564, "learning_rate": 9.13264114605139e-06, "loss": 0.9005, "step": 1997 }, { "epoch": 0.21, "grad_norm": 1.6599378469707955, "learning_rate": 9.131660853598139e-06, "loss": 0.9987, "step": 1998 }, { "epoch": 0.21, "grad_norm": 1.8407034843126884, "learning_rate": 9.130680060166764e-06, "loss": 0.9262, "step": 1999 }, { "epoch": 0.22, "grad_norm": 1.7616703572122319, "learning_rate": 9.129698765876191e-06, "loss": 0.9467, "step": 2000 }, { "epoch": 0.22, "grad_norm": 1.7704948726994363, "learning_rate": 9.128716970845407e-06, "loss": 0.9364, "step": 2001 }, { "epoch": 0.22, "grad_norm": 1.6724763735565322, "learning_rate": 9.127734675193454e-06, "loss": 0.9046, "step": 2002 }, { "epoch": 0.22, "grad_norm": 1.9252039183448373, "learning_rate": 9.126751879039442e-06, "loss": 0.913, "step": 2003 }, { "epoch": 0.22, "grad_norm": 1.807972552966048, "learning_rate": 9.12576858250254e-06, "loss": 0.9207, "step": 2004 }, { "epoch": 0.22, "grad_norm": 0.8972406342722112, "learning_rate": 9.12478478570197e-06, "loss": 1.0629, "step": 2005 }, { "epoch": 0.22, "grad_norm": 1.7374363063749585, "learning_rate": 9.123800488757027e-06, "loss": 0.9665, "step": 2006 }, { "epoch": 0.22, "grad_norm": 1.7645876656194983, "learning_rate": 9.122815691787056e-06, "loss": 0.9274, "step": 2007 }, { "epoch": 0.22, "grad_norm": 1.7849554629199056, "learning_rate": 9.121830394911471e-06, "loss": 0.8234, "step": 2008 }, { "epoch": 0.22, "grad_norm": 1.7489132991385636, "learning_rate": 9.120844598249738e-06, "loss": 0.9712, "step": 2009 }, { "epoch": 0.22, "grad_norm": 1.7689031761572482, "learning_rate": 9.119858301921391e-06, "loss": 0.9483, "step": 2010 }, { "epoch": 0.22, "grad_norm": 1.875902364873694, "learning_rate": 9.118871506046025e-06, "loss": 0.9142, "step": 2011 }, { "epoch": 0.22, "grad_norm": 1.800079356393919, "learning_rate": 9.117884210743287e-06, "loss": 0.9266, "step": 2012 }, { "epoch": 0.22, "grad_norm": 1.806287813430833, "learning_rate": 9.11689641613289e-06, "loss": 0.8733, "step": 2013 }, { "epoch": 0.22, "grad_norm": 0.9654706768499086, "learning_rate": 9.115908122334611e-06, "loss": 1.0616, "step": 2014 }, { "epoch": 0.22, "grad_norm": 1.8285384671339544, "learning_rate": 9.114919329468283e-06, "loss": 0.9547, "step": 2015 }, { "epoch": 0.22, "grad_norm": 1.6156231840262498, "learning_rate": 9.113930037653799e-06, "loss": 0.991, "step": 2016 }, { "epoch": 0.22, "grad_norm": 1.8602906061650213, "learning_rate": 9.112940247011116e-06, "loss": 0.9468, "step": 2017 }, { "epoch": 0.22, "grad_norm": 1.8690035661700586, "learning_rate": 9.111949957660248e-06, "loss": 0.9612, "step": 2018 }, { "epoch": 0.22, "grad_norm": 1.7556032481202462, "learning_rate": 9.110959169721272e-06, "loss": 0.9621, "step": 2019 }, { "epoch": 0.22, "grad_norm": 1.8119238569987048, "learning_rate": 9.109967883314323e-06, "loss": 1.0198, "step": 2020 }, { "epoch": 0.22, "grad_norm": 1.781791863960439, "learning_rate": 9.108976098559601e-06, "loss": 0.9422, "step": 2021 }, { "epoch": 0.22, "grad_norm": 1.6706407494460285, "learning_rate": 9.107983815577359e-06, "loss": 0.8778, "step": 2022 }, { "epoch": 0.22, "grad_norm": 1.5926693854778404, "learning_rate": 9.106991034487917e-06, "loss": 0.8268, "step": 2023 }, { "epoch": 0.22, "grad_norm": 1.8895972079387284, "learning_rate": 9.105997755411656e-06, "loss": 1.011, "step": 2024 }, { "epoch": 0.22, "grad_norm": 1.7668174462851103, "learning_rate": 9.10500397846901e-06, "loss": 0.8454, "step": 2025 }, { "epoch": 0.22, "grad_norm": 1.7614907238180986, "learning_rate": 9.104009703780478e-06, "loss": 1.0237, "step": 2026 }, { "epoch": 0.22, "grad_norm": 1.773006796288227, "learning_rate": 9.103014931466622e-06, "loss": 1.019, "step": 2027 }, { "epoch": 0.22, "grad_norm": 1.7470113057396002, "learning_rate": 9.102019661648061e-06, "loss": 0.953, "step": 2028 }, { "epoch": 0.22, "grad_norm": 1.7763088829541076, "learning_rate": 9.101023894445474e-06, "loss": 0.915, "step": 2029 }, { "epoch": 0.22, "grad_norm": 1.8139989681481696, "learning_rate": 9.100027629979599e-06, "loss": 0.8918, "step": 2030 }, { "epoch": 0.22, "grad_norm": 1.8097044932966238, "learning_rate": 9.099030868371241e-06, "loss": 0.987, "step": 2031 }, { "epoch": 0.22, "grad_norm": 1.8458615203568167, "learning_rate": 9.09803360974126e-06, "loss": 0.9091, "step": 2032 }, { "epoch": 0.22, "grad_norm": 1.7841846436001092, "learning_rate": 9.097035854210574e-06, "loss": 0.8944, "step": 2033 }, { "epoch": 0.22, "grad_norm": 1.903974254590973, "learning_rate": 9.096037601900166e-06, "loss": 1.0455, "step": 2034 }, { "epoch": 0.22, "grad_norm": 1.808336384004518, "learning_rate": 9.095038852931077e-06, "loss": 0.9046, "step": 2035 }, { "epoch": 0.22, "grad_norm": 1.7916841903918468, "learning_rate": 9.09403960742441e-06, "loss": 0.9039, "step": 2036 }, { "epoch": 0.22, "grad_norm": 1.7569978091041596, "learning_rate": 9.093039865501328e-06, "loss": 0.9101, "step": 2037 }, { "epoch": 0.22, "grad_norm": 1.733811857902193, "learning_rate": 9.09203962728305e-06, "loss": 0.9681, "step": 2038 }, { "epoch": 0.22, "grad_norm": 1.763277805303069, "learning_rate": 9.09103889289086e-06, "loss": 0.8328, "step": 2039 }, { "epoch": 0.22, "grad_norm": 1.7230424703294562, "learning_rate": 9.0900376624461e-06, "loss": 0.8593, "step": 2040 }, { "epoch": 0.22, "grad_norm": 1.6363534489276021, "learning_rate": 9.089035936070172e-06, "loss": 0.9459, "step": 2041 }, { "epoch": 0.22, "grad_norm": 1.6782533624285076, "learning_rate": 9.08803371388454e-06, "loss": 0.9391, "step": 2042 }, { "epoch": 0.22, "grad_norm": 1.6826036915294544, "learning_rate": 9.087030996010728e-06, "loss": 0.9393, "step": 2043 }, { "epoch": 0.22, "grad_norm": 1.7950544393713717, "learning_rate": 9.086027782570318e-06, "loss": 0.9637, "step": 2044 }, { "epoch": 0.22, "grad_norm": 1.7276078090214835, "learning_rate": 9.085024073684952e-06, "loss": 0.9055, "step": 2045 }, { "epoch": 0.22, "grad_norm": 1.766277783126572, "learning_rate": 9.084019869476332e-06, "loss": 0.9954, "step": 2046 }, { "epoch": 0.22, "grad_norm": 1.8146453698645704, "learning_rate": 9.083015170066225e-06, "loss": 0.8676, "step": 2047 }, { "epoch": 0.22, "grad_norm": 1.7566042710347756, "learning_rate": 9.082009975576452e-06, "loss": 0.8732, "step": 2048 }, { "epoch": 0.22, "grad_norm": 1.878409513251426, "learning_rate": 9.081004286128896e-06, "loss": 1.0147, "step": 2049 }, { "epoch": 0.22, "grad_norm": 1.8089996758901536, "learning_rate": 9.079998101845502e-06, "loss": 0.895, "step": 2050 }, { "epoch": 0.22, "grad_norm": 1.8572021115348551, "learning_rate": 9.07899142284827e-06, "loss": 0.9963, "step": 2051 }, { "epoch": 0.22, "grad_norm": 1.7620703972493956, "learning_rate": 9.077984249259268e-06, "loss": 0.9656, "step": 2052 }, { "epoch": 0.22, "grad_norm": 1.6591946235462511, "learning_rate": 9.076976581200616e-06, "loss": 0.8536, "step": 2053 }, { "epoch": 0.22, "grad_norm": 1.8187573154498395, "learning_rate": 9.075968418794499e-06, "loss": 0.9794, "step": 2054 }, { "epoch": 0.22, "grad_norm": 1.7846316936584588, "learning_rate": 9.074959762163158e-06, "loss": 0.8393, "step": 2055 }, { "epoch": 0.22, "grad_norm": 1.7213299987690351, "learning_rate": 9.073950611428896e-06, "loss": 0.9702, "step": 2056 }, { "epoch": 0.22, "grad_norm": 1.8510982197896972, "learning_rate": 9.07294096671408e-06, "loss": 0.9566, "step": 2057 }, { "epoch": 0.22, "grad_norm": 1.7188407516943063, "learning_rate": 9.071930828141128e-06, "loss": 0.893, "step": 2058 }, { "epoch": 0.22, "grad_norm": 1.8063694802017927, "learning_rate": 9.070920195832528e-06, "loss": 0.8768, "step": 2059 }, { "epoch": 0.22, "grad_norm": 1.7589722686088451, "learning_rate": 9.069909069910817e-06, "loss": 0.9648, "step": 2060 }, { "epoch": 0.22, "grad_norm": 1.6220508022338267, "learning_rate": 9.068897450498602e-06, "loss": 0.9474, "step": 2061 }, { "epoch": 0.22, "grad_norm": 1.9559677324400258, "learning_rate": 9.067885337718542e-06, "loss": 0.9136, "step": 2062 }, { "epoch": 0.22, "grad_norm": 1.7432672554334079, "learning_rate": 9.066872731693362e-06, "loss": 0.8915, "step": 2063 }, { "epoch": 0.22, "grad_norm": 1.9504174095244735, "learning_rate": 9.065859632545842e-06, "loss": 0.9169, "step": 2064 }, { "epoch": 0.22, "grad_norm": 1.6984129844659754, "learning_rate": 9.064846040398822e-06, "loss": 0.9181, "step": 2065 }, { "epoch": 0.22, "grad_norm": 1.680348236582505, "learning_rate": 9.06383195537521e-06, "loss": 0.9209, "step": 2066 }, { "epoch": 0.22, "grad_norm": 1.8328575974696761, "learning_rate": 9.062817377597961e-06, "loss": 0.9219, "step": 2067 }, { "epoch": 0.22, "grad_norm": 1.641925455201439, "learning_rate": 9.061802307190099e-06, "loss": 0.8707, "step": 2068 }, { "epoch": 0.22, "grad_norm": 1.6851491869100046, "learning_rate": 9.060786744274703e-06, "loss": 0.887, "step": 2069 }, { "epoch": 0.22, "grad_norm": 1.7619893985132553, "learning_rate": 9.059770688974916e-06, "loss": 0.9873, "step": 2070 }, { "epoch": 0.22, "grad_norm": 1.8489254786450469, "learning_rate": 9.058754141413936e-06, "loss": 0.9401, "step": 2071 }, { "epoch": 0.22, "grad_norm": 1.6614983260873726, "learning_rate": 9.057737101715024e-06, "loss": 0.9123, "step": 2072 }, { "epoch": 0.22, "grad_norm": 1.791029374629987, "learning_rate": 9.056719570001499e-06, "loss": 0.9578, "step": 2073 }, { "epoch": 0.22, "grad_norm": 1.7554113422704898, "learning_rate": 9.05570154639674e-06, "loss": 0.8831, "step": 2074 }, { "epoch": 0.22, "grad_norm": 1.6998267906889954, "learning_rate": 9.054683031024188e-06, "loss": 0.9713, "step": 2075 }, { "epoch": 0.22, "grad_norm": 1.7483819430015746, "learning_rate": 9.053664024007339e-06, "loss": 0.9672, "step": 2076 }, { "epoch": 0.22, "grad_norm": 1.7850456123159555, "learning_rate": 9.052644525469751e-06, "loss": 0.9354, "step": 2077 }, { "epoch": 0.22, "grad_norm": 1.5983164920077244, "learning_rate": 9.051624535535043e-06, "loss": 0.9331, "step": 2078 }, { "epoch": 0.22, "grad_norm": 1.6997536678097385, "learning_rate": 9.050604054326893e-06, "loss": 0.8764, "step": 2079 }, { "epoch": 0.22, "grad_norm": 1.7968568874869546, "learning_rate": 9.049583081969038e-06, "loss": 0.9694, "step": 2080 }, { "epoch": 0.22, "grad_norm": 1.806067178908236, "learning_rate": 9.04856161858527e-06, "loss": 0.9116, "step": 2081 }, { "epoch": 0.22, "grad_norm": 1.7534743600104805, "learning_rate": 9.04753966429945e-06, "loss": 0.885, "step": 2082 }, { "epoch": 0.22, "grad_norm": 0.9586778850174237, "learning_rate": 9.046517219235492e-06, "loss": 1.07, "step": 2083 }, { "epoch": 0.22, "grad_norm": 1.6958623745899681, "learning_rate": 9.045494283517371e-06, "loss": 0.9016, "step": 2084 }, { "epoch": 0.22, "grad_norm": 1.6524888632441994, "learning_rate": 9.044470857269121e-06, "loss": 0.8771, "step": 2085 }, { "epoch": 0.22, "grad_norm": 1.6904209334105549, "learning_rate": 9.043446940614834e-06, "loss": 0.9209, "step": 2086 }, { "epoch": 0.22, "grad_norm": 1.7714665580818523, "learning_rate": 9.042422533678667e-06, "loss": 0.9213, "step": 2087 }, { "epoch": 0.22, "grad_norm": 1.936371201160908, "learning_rate": 9.041397636584833e-06, "loss": 0.9659, "step": 2088 }, { "epoch": 0.22, "grad_norm": 1.7471584111185459, "learning_rate": 9.0403722494576e-06, "loss": 0.9888, "step": 2089 }, { "epoch": 0.22, "grad_norm": 1.6799661517763989, "learning_rate": 9.039346372421304e-06, "loss": 0.9365, "step": 2090 }, { "epoch": 0.22, "grad_norm": 1.0128783018313607, "learning_rate": 9.038320005600335e-06, "loss": 1.0722, "step": 2091 }, { "epoch": 0.22, "grad_norm": 1.7459752076535546, "learning_rate": 9.037293149119144e-06, "loss": 0.9468, "step": 2092 }, { "epoch": 0.23, "grad_norm": 1.887321762699653, "learning_rate": 9.036265803102238e-06, "loss": 0.8913, "step": 2093 }, { "epoch": 0.23, "grad_norm": 1.7741390027371502, "learning_rate": 9.035237967674189e-06, "loss": 0.9597, "step": 2094 }, { "epoch": 0.23, "grad_norm": 1.7559332648108528, "learning_rate": 9.034209642959624e-06, "loss": 0.9659, "step": 2095 }, { "epoch": 0.23, "grad_norm": 1.8585174170935415, "learning_rate": 9.033180829083232e-06, "loss": 0.9336, "step": 2096 }, { "epoch": 0.23, "grad_norm": 1.7308241463426848, "learning_rate": 9.032151526169761e-06, "loss": 0.9714, "step": 2097 }, { "epoch": 0.23, "grad_norm": 1.7173827351268125, "learning_rate": 9.031121734344016e-06, "loss": 0.8977, "step": 2098 }, { "epoch": 0.23, "grad_norm": 1.6551805718104564, "learning_rate": 9.030091453730863e-06, "loss": 0.9542, "step": 2099 }, { "epoch": 0.23, "grad_norm": 1.0841225339396494, "learning_rate": 9.02906068445523e-06, "loss": 1.0792, "step": 2100 }, { "epoch": 0.23, "grad_norm": 0.9216218954203206, "learning_rate": 9.028029426642095e-06, "loss": 1.0552, "step": 2101 }, { "epoch": 0.23, "grad_norm": 1.7474996409752583, "learning_rate": 9.026997680416507e-06, "loss": 1.002, "step": 2102 }, { "epoch": 0.23, "grad_norm": 1.759584802480323, "learning_rate": 9.025965445903567e-06, "loss": 0.9576, "step": 2103 }, { "epoch": 0.23, "grad_norm": 1.8453118935501323, "learning_rate": 9.024932723228436e-06, "loss": 0.8601, "step": 2104 }, { "epoch": 0.23, "grad_norm": 1.7879436207752712, "learning_rate": 9.023899512516337e-06, "loss": 0.9901, "step": 2105 }, { "epoch": 0.23, "grad_norm": 1.7646454769840962, "learning_rate": 9.02286581389255e-06, "loss": 1.0398, "step": 2106 }, { "epoch": 0.23, "grad_norm": 1.7243040240545167, "learning_rate": 9.021831627482414e-06, "loss": 0.9319, "step": 2107 }, { "epoch": 0.23, "grad_norm": 1.7775521629013917, "learning_rate": 9.020796953411328e-06, "loss": 0.9728, "step": 2108 }, { "epoch": 0.23, "grad_norm": 1.7250758034844178, "learning_rate": 9.019761791804749e-06, "loss": 0.9096, "step": 2109 }, { "epoch": 0.23, "grad_norm": 1.7942744140564892, "learning_rate": 9.018726142788195e-06, "loss": 0.9436, "step": 2110 }, { "epoch": 0.23, "grad_norm": 1.7832292171728314, "learning_rate": 9.01769000648724e-06, "loss": 1.0329, "step": 2111 }, { "epoch": 0.23, "grad_norm": 1.7327836267277128, "learning_rate": 9.016653383027522e-06, "loss": 0.9155, "step": 2112 }, { "epoch": 0.23, "grad_norm": 1.5968188961320082, "learning_rate": 9.015616272534734e-06, "loss": 1.0114, "step": 2113 }, { "epoch": 0.23, "grad_norm": 1.7276855931267041, "learning_rate": 9.014578675134629e-06, "loss": 0.9979, "step": 2114 }, { "epoch": 0.23, "grad_norm": 1.7234218679489934, "learning_rate": 9.013540590953019e-06, "loss": 0.8758, "step": 2115 }, { "epoch": 0.23, "grad_norm": 1.6934949774508288, "learning_rate": 9.012502020115777e-06, "loss": 0.9696, "step": 2116 }, { "epoch": 0.23, "grad_norm": 1.9363392076716504, "learning_rate": 9.01146296274883e-06, "loss": 0.9088, "step": 2117 }, { "epoch": 0.23, "grad_norm": 1.552152781994723, "learning_rate": 9.01042341897817e-06, "loss": 1.086, "step": 2118 }, { "epoch": 0.23, "grad_norm": 1.7350917338251892, "learning_rate": 9.009383388929842e-06, "loss": 0.9481, "step": 2119 }, { "epoch": 0.23, "grad_norm": 1.8608947230585158, "learning_rate": 9.008342872729958e-06, "loss": 0.9233, "step": 2120 }, { "epoch": 0.23, "grad_norm": 1.9054266052237752, "learning_rate": 9.007301870504681e-06, "loss": 0.9518, "step": 2121 }, { "epoch": 0.23, "grad_norm": 1.810662070860422, "learning_rate": 9.006260382380238e-06, "loss": 0.9116, "step": 2122 }, { "epoch": 0.23, "grad_norm": 1.8604225000856063, "learning_rate": 9.005218408482912e-06, "loss": 0.9395, "step": 2123 }, { "epoch": 0.23, "grad_norm": 1.7441463572900258, "learning_rate": 9.004175948939045e-06, "loss": 0.9513, "step": 2124 }, { "epoch": 0.23, "grad_norm": 1.7303479468702503, "learning_rate": 9.003133003875039e-06, "loss": 0.9418, "step": 2125 }, { "epoch": 0.23, "grad_norm": 1.7903215283249656, "learning_rate": 9.002089573417356e-06, "loss": 0.9055, "step": 2126 }, { "epoch": 0.23, "grad_norm": 1.824790192114977, "learning_rate": 9.001045657692517e-06, "loss": 0.9084, "step": 2127 }, { "epoch": 0.23, "grad_norm": 1.860049981688993, "learning_rate": 9.000001256827096e-06, "loss": 0.9311, "step": 2128 }, { "epoch": 0.23, "grad_norm": 1.7360003938096522, "learning_rate": 8.998956370947733e-06, "loss": 0.9355, "step": 2129 }, { "epoch": 0.23, "grad_norm": 1.8818083172510638, "learning_rate": 8.997911000181124e-06, "loss": 0.9076, "step": 2130 }, { "epoch": 0.23, "grad_norm": 1.8108231129664885, "learning_rate": 8.996865144654023e-06, "loss": 0.9495, "step": 2131 }, { "epoch": 0.23, "grad_norm": 1.6721434721062056, "learning_rate": 8.995818804493244e-06, "loss": 0.9506, "step": 2132 }, { "epoch": 0.23, "grad_norm": 1.848871127699889, "learning_rate": 8.994771979825658e-06, "loss": 0.8994, "step": 2133 }, { "epoch": 0.23, "grad_norm": 1.7026413944525383, "learning_rate": 8.993724670778199e-06, "loss": 0.9674, "step": 2134 }, { "epoch": 0.23, "grad_norm": 1.7815576377227917, "learning_rate": 8.992676877477855e-06, "loss": 0.8743, "step": 2135 }, { "epoch": 0.23, "grad_norm": 1.7753246405369383, "learning_rate": 8.991628600051674e-06, "loss": 0.8371, "step": 2136 }, { "epoch": 0.23, "grad_norm": 1.7502814874125072, "learning_rate": 8.990579838626764e-06, "loss": 0.9025, "step": 2137 }, { "epoch": 0.23, "grad_norm": 1.7896942210522537, "learning_rate": 8.989530593330292e-06, "loss": 0.8898, "step": 2138 }, { "epoch": 0.23, "grad_norm": 1.7328745875524927, "learning_rate": 8.988480864289482e-06, "loss": 0.9533, "step": 2139 }, { "epoch": 0.23, "grad_norm": 1.7507694595789036, "learning_rate": 8.987430651631615e-06, "loss": 0.8727, "step": 2140 }, { "epoch": 0.23, "grad_norm": 1.8049676064187659, "learning_rate": 8.986379955484036e-06, "loss": 0.9354, "step": 2141 }, { "epoch": 0.23, "grad_norm": 2.255981423307433, "learning_rate": 8.985328775974142e-06, "loss": 0.8853, "step": 2142 }, { "epoch": 0.23, "grad_norm": 1.71076923993676, "learning_rate": 8.984277113229398e-06, "loss": 0.9674, "step": 2143 }, { "epoch": 0.23, "grad_norm": 1.0173392267794297, "learning_rate": 8.983224967377314e-06, "loss": 1.0709, "step": 2144 }, { "epoch": 0.23, "grad_norm": 1.793846484961734, "learning_rate": 8.982172338545474e-06, "loss": 0.9486, "step": 2145 }, { "epoch": 0.23, "grad_norm": 1.8252315401520471, "learning_rate": 8.981119226861508e-06, "loss": 0.8972, "step": 2146 }, { "epoch": 0.23, "grad_norm": 1.765182270604645, "learning_rate": 8.980065632453111e-06, "loss": 1.0373, "step": 2147 }, { "epoch": 0.23, "grad_norm": 1.7745222321813605, "learning_rate": 8.979011555448035e-06, "loss": 1.0042, "step": 2148 }, { "epoch": 0.23, "grad_norm": 0.9356399896124354, "learning_rate": 8.97795699597409e-06, "loss": 1.0646, "step": 2149 }, { "epoch": 0.23, "grad_norm": 1.740709576278291, "learning_rate": 8.976901954159144e-06, "loss": 0.9292, "step": 2150 }, { "epoch": 0.23, "grad_norm": 1.758730698911262, "learning_rate": 8.975846430131127e-06, "loss": 0.9706, "step": 2151 }, { "epoch": 0.23, "grad_norm": 1.6656641973076396, "learning_rate": 8.974790424018022e-06, "loss": 0.9083, "step": 2152 }, { "epoch": 0.23, "grad_norm": 1.735611933034405, "learning_rate": 8.973733935947877e-06, "loss": 0.9401, "step": 2153 }, { "epoch": 0.23, "grad_norm": 1.8150238137943122, "learning_rate": 8.97267696604879e-06, "loss": 0.9469, "step": 2154 }, { "epoch": 0.23, "grad_norm": 1.7231737559138791, "learning_rate": 8.971619514448928e-06, "loss": 0.97, "step": 2155 }, { "epoch": 0.23, "grad_norm": 1.856552177901469, "learning_rate": 8.970561581276506e-06, "loss": 0.9753, "step": 2156 }, { "epoch": 0.23, "grad_norm": 1.623287975917867, "learning_rate": 8.969503166659803e-06, "loss": 0.941, "step": 2157 }, { "epoch": 0.23, "grad_norm": 1.7123576570853698, "learning_rate": 8.968444270727157e-06, "loss": 0.94, "step": 2158 }, { "epoch": 0.23, "grad_norm": 1.7712283067389945, "learning_rate": 8.967384893606962e-06, "loss": 0.9153, "step": 2159 }, { "epoch": 0.23, "grad_norm": 1.9578754036238484, "learning_rate": 8.96632503542767e-06, "loss": 0.9497, "step": 2160 }, { "epoch": 0.23, "grad_norm": 1.8453629765570332, "learning_rate": 8.965264696317795e-06, "loss": 0.938, "step": 2161 }, { "epoch": 0.23, "grad_norm": 1.6434096741656588, "learning_rate": 8.964203876405903e-06, "loss": 0.9218, "step": 2162 }, { "epoch": 0.23, "grad_norm": 1.819508938774416, "learning_rate": 8.963142575820626e-06, "loss": 0.9825, "step": 2163 }, { "epoch": 0.23, "grad_norm": 0.9039588724434431, "learning_rate": 8.962080794690648e-06, "loss": 1.0866, "step": 2164 }, { "epoch": 0.23, "grad_norm": 1.8498672100002458, "learning_rate": 8.961018533144716e-06, "loss": 0.9592, "step": 2165 }, { "epoch": 0.23, "grad_norm": 1.8033775233866292, "learning_rate": 8.95995579131163e-06, "loss": 0.9483, "step": 2166 }, { "epoch": 0.23, "grad_norm": 0.8882628379740451, "learning_rate": 8.95889256932025e-06, "loss": 1.0734, "step": 2167 }, { "epoch": 0.23, "grad_norm": 1.7831838646891607, "learning_rate": 8.9578288672995e-06, "loss": 0.891, "step": 2168 }, { "epoch": 0.23, "grad_norm": 1.8119052984443256, "learning_rate": 8.956764685378356e-06, "loss": 0.9477, "step": 2169 }, { "epoch": 0.23, "grad_norm": 2.1028235003862004, "learning_rate": 8.955700023685851e-06, "loss": 0.8349, "step": 2170 }, { "epoch": 0.23, "grad_norm": 1.7286313124458272, "learning_rate": 8.95463488235108e-06, "loss": 0.9291, "step": 2171 }, { "epoch": 0.23, "grad_norm": 1.6443897611646197, "learning_rate": 8.953569261503198e-06, "loss": 0.8461, "step": 2172 }, { "epoch": 0.23, "grad_norm": 1.0381723298649999, "learning_rate": 8.952503161271413e-06, "loss": 1.0719, "step": 2173 }, { "epoch": 0.23, "grad_norm": 1.7971667244067868, "learning_rate": 8.951436581784992e-06, "loss": 0.9064, "step": 2174 }, { "epoch": 0.23, "grad_norm": 1.7310115421921137, "learning_rate": 8.950369523173263e-06, "loss": 0.9677, "step": 2175 }, { "epoch": 0.23, "grad_norm": 1.9047382301973255, "learning_rate": 8.949301985565611e-06, "loss": 0.9465, "step": 2176 }, { "epoch": 0.23, "grad_norm": 1.8415883032187619, "learning_rate": 8.948233969091478e-06, "loss": 0.9414, "step": 2177 }, { "epoch": 0.23, "grad_norm": 1.7537268845957987, "learning_rate": 8.947165473880364e-06, "loss": 0.9174, "step": 2178 }, { "epoch": 0.23, "grad_norm": 1.8619044409735235, "learning_rate": 8.946096500061828e-06, "loss": 0.9995, "step": 2179 }, { "epoch": 0.23, "grad_norm": 1.8880796159636029, "learning_rate": 8.94502704776549e-06, "loss": 1.005, "step": 2180 }, { "epoch": 0.23, "grad_norm": 1.7544394320130334, "learning_rate": 8.943957117121018e-06, "loss": 0.851, "step": 2181 }, { "epoch": 0.23, "grad_norm": 1.7558704694232716, "learning_rate": 8.94288670825815e-06, "loss": 0.8879, "step": 2182 }, { "epoch": 0.23, "grad_norm": 2.6765868586316195, "learning_rate": 8.941815821306675e-06, "loss": 0.9509, "step": 2183 }, { "epoch": 0.23, "grad_norm": 1.6785842619919114, "learning_rate": 8.940744456396444e-06, "loss": 0.9352, "step": 2184 }, { "epoch": 0.23, "grad_norm": 1.883867740364058, "learning_rate": 8.93967261365736e-06, "loss": 0.9202, "step": 2185 }, { "epoch": 0.24, "grad_norm": 1.7952090744667841, "learning_rate": 8.93860029321939e-06, "loss": 0.9989, "step": 2186 }, { "epoch": 0.24, "grad_norm": 1.7271422275196493, "learning_rate": 8.937527495212555e-06, "loss": 0.9294, "step": 2187 }, { "epoch": 0.24, "grad_norm": 1.7966281354314397, "learning_rate": 8.93645421976694e-06, "loss": 0.9011, "step": 2188 }, { "epoch": 0.24, "grad_norm": 0.9856080334819769, "learning_rate": 8.935380467012675e-06, "loss": 1.0742, "step": 2189 }, { "epoch": 0.24, "grad_norm": 1.9822700747764161, "learning_rate": 8.934306237079963e-06, "loss": 1.018, "step": 2190 }, { "epoch": 0.24, "grad_norm": 1.8381191098744205, "learning_rate": 8.933231530099057e-06, "loss": 0.8594, "step": 2191 }, { "epoch": 0.24, "grad_norm": 1.9040242930042737, "learning_rate": 8.932156346200269e-06, "loss": 0.9122, "step": 2192 }, { "epoch": 0.24, "grad_norm": 1.7807746758322613, "learning_rate": 8.931080685513966e-06, "loss": 0.9001, "step": 2193 }, { "epoch": 0.24, "grad_norm": 1.80857488676619, "learning_rate": 8.930004548170577e-06, "loss": 0.9608, "step": 2194 }, { "epoch": 0.24, "grad_norm": 1.6873435435612767, "learning_rate": 8.92892793430059e-06, "loss": 0.9221, "step": 2195 }, { "epoch": 0.24, "grad_norm": 1.8172995397303782, "learning_rate": 8.927850844034545e-06, "loss": 0.9213, "step": 2196 }, { "epoch": 0.24, "grad_norm": 1.8645279967908626, "learning_rate": 8.926773277503042e-06, "loss": 1.0087, "step": 2197 }, { "epoch": 0.24, "grad_norm": 1.6585159467473778, "learning_rate": 8.925695234836743e-06, "loss": 0.8852, "step": 2198 }, { "epoch": 0.24, "grad_norm": 1.730985045790014, "learning_rate": 8.924616716166363e-06, "loss": 0.9172, "step": 2199 }, { "epoch": 0.24, "grad_norm": 2.11338070948821, "learning_rate": 8.923537721622675e-06, "loss": 0.9652, "step": 2200 }, { "epoch": 0.24, "grad_norm": 1.7301506169110117, "learning_rate": 8.922458251336512e-06, "loss": 0.9122, "step": 2201 }, { "epoch": 0.24, "grad_norm": 1.7745369243752658, "learning_rate": 8.921378305438763e-06, "loss": 1.0371, "step": 2202 }, { "epoch": 0.24, "grad_norm": 2.1350645871432334, "learning_rate": 8.920297884060378e-06, "loss": 0.9403, "step": 2203 }, { "epoch": 0.24, "grad_norm": 1.0013453371618901, "learning_rate": 8.919216987332357e-06, "loss": 1.0655, "step": 2204 }, { "epoch": 0.24, "grad_norm": 1.9107170376434253, "learning_rate": 8.918135615385763e-06, "loss": 0.9088, "step": 2205 }, { "epoch": 0.24, "grad_norm": 1.8112727008068048, "learning_rate": 8.917053768351719e-06, "loss": 0.9997, "step": 2206 }, { "epoch": 0.24, "grad_norm": 1.6455256574911037, "learning_rate": 8.915971446361404e-06, "loss": 0.9264, "step": 2207 }, { "epoch": 0.24, "grad_norm": 1.6939464274422178, "learning_rate": 8.914888649546049e-06, "loss": 0.9247, "step": 2208 }, { "epoch": 0.24, "grad_norm": 1.7985990296214152, "learning_rate": 8.913805378036948e-06, "loss": 0.9887, "step": 2209 }, { "epoch": 0.24, "grad_norm": 1.7686874411135423, "learning_rate": 8.912721631965454e-06, "loss": 0.9194, "step": 2210 }, { "epoch": 0.24, "grad_norm": 1.7477330536976754, "learning_rate": 8.91163741146297e-06, "loss": 0.8807, "step": 2211 }, { "epoch": 0.24, "grad_norm": 1.6603041142877848, "learning_rate": 8.910552716660966e-06, "loss": 0.8424, "step": 2212 }, { "epoch": 0.24, "grad_norm": 1.8243212030836031, "learning_rate": 8.909467547690961e-06, "loss": 0.9215, "step": 2213 }, { "epoch": 0.24, "grad_norm": 1.7388751180644533, "learning_rate": 8.908381904684543e-06, "loss": 0.8704, "step": 2214 }, { "epoch": 0.24, "grad_norm": 1.7711008724226498, "learning_rate": 8.90729578777334e-06, "loss": 0.9716, "step": 2215 }, { "epoch": 0.24, "grad_norm": 1.675477382868681, "learning_rate": 8.906209197089055e-06, "loss": 0.9167, "step": 2216 }, { "epoch": 0.24, "grad_norm": 1.874648272016351, "learning_rate": 8.905122132763437e-06, "loss": 0.9242, "step": 2217 }, { "epoch": 0.24, "grad_norm": 1.7775010363714228, "learning_rate": 8.904034594928296e-06, "loss": 0.899, "step": 2218 }, { "epoch": 0.24, "grad_norm": 1.7882546177916347, "learning_rate": 8.902946583715503e-06, "loss": 0.9617, "step": 2219 }, { "epoch": 0.24, "grad_norm": 1.7242692666366477, "learning_rate": 8.901858099256981e-06, "loss": 0.9334, "step": 2220 }, { "epoch": 0.24, "grad_norm": 1.6973173672750974, "learning_rate": 8.900769141684714e-06, "loss": 0.9146, "step": 2221 }, { "epoch": 0.24, "grad_norm": 1.5744583914823165, "learning_rate": 8.899679711130737e-06, "loss": 0.8809, "step": 2222 }, { "epoch": 0.24, "grad_norm": 1.8411325563423053, "learning_rate": 8.898589807727154e-06, "loss": 0.9371, "step": 2223 }, { "epoch": 0.24, "grad_norm": 1.793718776578932, "learning_rate": 8.897499431606116e-06, "loss": 0.9927, "step": 2224 }, { "epoch": 0.24, "grad_norm": 1.745469146363477, "learning_rate": 8.896408582899835e-06, "loss": 0.9432, "step": 2225 }, { "epoch": 0.24, "grad_norm": 1.9943174017975462, "learning_rate": 8.89531726174058e-06, "loss": 0.9935, "step": 2226 }, { "epoch": 0.24, "grad_norm": 1.8533089556410962, "learning_rate": 8.894225468260676e-06, "loss": 0.9283, "step": 2227 }, { "epoch": 0.24, "grad_norm": 1.737238992237244, "learning_rate": 8.89313320259251e-06, "loss": 0.9106, "step": 2228 }, { "epoch": 0.24, "grad_norm": 1.6821878368741026, "learning_rate": 8.89204046486852e-06, "loss": 0.9088, "step": 2229 }, { "epoch": 0.24, "grad_norm": 1.7721624805253644, "learning_rate": 8.89094725522121e-06, "loss": 0.945, "step": 2230 }, { "epoch": 0.24, "grad_norm": 1.6789599450277066, "learning_rate": 8.889853573783128e-06, "loss": 0.9395, "step": 2231 }, { "epoch": 0.24, "grad_norm": 0.9865132905899449, "learning_rate": 8.88875942068689e-06, "loss": 1.0907, "step": 2232 }, { "epoch": 0.24, "grad_norm": 1.7707237563927818, "learning_rate": 8.887664796065167e-06, "loss": 1.007, "step": 2233 }, { "epoch": 0.24, "grad_norm": 1.7327321338903077, "learning_rate": 8.886569700050682e-06, "loss": 0.9499, "step": 2234 }, { "epoch": 0.24, "grad_norm": 1.6609941785317417, "learning_rate": 8.885474132776223e-06, "loss": 1.0144, "step": 2235 }, { "epoch": 0.24, "grad_norm": 1.7971466485026215, "learning_rate": 8.884378094374633e-06, "loss": 0.9257, "step": 2236 }, { "epoch": 0.24, "grad_norm": 1.6732007998062515, "learning_rate": 8.883281584978805e-06, "loss": 0.8977, "step": 2237 }, { "epoch": 0.24, "grad_norm": 1.5950829934834445, "learning_rate": 8.882184604721697e-06, "loss": 0.8744, "step": 2238 }, { "epoch": 0.24, "grad_norm": 1.796367796687254, "learning_rate": 8.88108715373632e-06, "loss": 0.8753, "step": 2239 }, { "epoch": 0.24, "grad_norm": 1.6761346694556256, "learning_rate": 8.879989232155749e-06, "loss": 0.8674, "step": 2240 }, { "epoch": 0.24, "grad_norm": 1.6334798257469063, "learning_rate": 8.878890840113106e-06, "loss": 0.935, "step": 2241 }, { "epoch": 0.24, "grad_norm": 1.6653832277270355, "learning_rate": 8.877791977741575e-06, "loss": 0.9177, "step": 2242 }, { "epoch": 0.24, "grad_norm": 1.630908451655982, "learning_rate": 8.8766926451744e-06, "loss": 0.9233, "step": 2243 }, { "epoch": 0.24, "grad_norm": 1.738640881366043, "learning_rate": 8.875592842544875e-06, "loss": 0.971, "step": 2244 }, { "epoch": 0.24, "grad_norm": 1.7911522860601667, "learning_rate": 8.874492569986357e-06, "loss": 0.8742, "step": 2245 }, { "epoch": 0.24, "grad_norm": 1.8520767902506714, "learning_rate": 8.873391827632258e-06, "loss": 0.8793, "step": 2246 }, { "epoch": 0.24, "grad_norm": 1.7904641727825066, "learning_rate": 8.872290615616045e-06, "loss": 0.9416, "step": 2247 }, { "epoch": 0.24, "grad_norm": 1.7471378974494611, "learning_rate": 8.871188934071247e-06, "loss": 1.0072, "step": 2248 }, { "epoch": 0.24, "grad_norm": 1.7063788848449455, "learning_rate": 8.870086783131443e-06, "loss": 0.9087, "step": 2249 }, { "epoch": 0.24, "grad_norm": 1.865822919827851, "learning_rate": 8.868984162930276e-06, "loss": 0.8438, "step": 2250 }, { "epoch": 0.24, "grad_norm": 1.7674532944386452, "learning_rate": 8.86788107360144e-06, "loss": 0.8918, "step": 2251 }, { "epoch": 0.24, "grad_norm": 1.7627097684871929, "learning_rate": 8.866777515278688e-06, "loss": 0.9112, "step": 2252 }, { "epoch": 0.24, "grad_norm": 1.7430478952958695, "learning_rate": 8.865673488095833e-06, "loss": 0.9864, "step": 2253 }, { "epoch": 0.24, "grad_norm": 1.6258436351749288, "learning_rate": 8.86456899218674e-06, "loss": 0.9776, "step": 2254 }, { "epoch": 0.24, "grad_norm": 1.8216854377307679, "learning_rate": 8.863464027685333e-06, "loss": 0.857, "step": 2255 }, { "epoch": 0.24, "grad_norm": 1.710743894586576, "learning_rate": 8.862358594725595e-06, "loss": 0.9913, "step": 2256 }, { "epoch": 0.24, "grad_norm": 1.7745092078239628, "learning_rate": 8.86125269344156e-06, "loss": 0.8389, "step": 2257 }, { "epoch": 0.24, "grad_norm": 1.8375142355389107, "learning_rate": 8.860146323967325e-06, "loss": 0.9403, "step": 2258 }, { "epoch": 0.24, "grad_norm": 1.7574870962597215, "learning_rate": 8.85903948643704e-06, "loss": 0.9456, "step": 2259 }, { "epoch": 0.24, "grad_norm": 1.710870843020024, "learning_rate": 8.857932180984914e-06, "loss": 0.9384, "step": 2260 }, { "epoch": 0.24, "grad_norm": 1.8836820207829077, "learning_rate": 8.85682440774521e-06, "loss": 0.9122, "step": 2261 }, { "epoch": 0.24, "grad_norm": 1.8551566865436915, "learning_rate": 8.855716166852249e-06, "loss": 0.9094, "step": 2262 }, { "epoch": 0.24, "grad_norm": 1.8092320466854968, "learning_rate": 8.854607458440412e-06, "loss": 0.8936, "step": 2263 }, { "epoch": 0.24, "grad_norm": 1.7866078645718164, "learning_rate": 8.85349828264413e-06, "loss": 0.8796, "step": 2264 }, { "epoch": 0.24, "grad_norm": 1.6754175410174497, "learning_rate": 8.852388639597898e-06, "loss": 0.9656, "step": 2265 }, { "epoch": 0.24, "grad_norm": 1.8444137830722658, "learning_rate": 8.85127852943626e-06, "loss": 0.9579, "step": 2266 }, { "epoch": 0.24, "grad_norm": 1.8515803788838923, "learning_rate": 8.850167952293825e-06, "loss": 0.9207, "step": 2267 }, { "epoch": 0.24, "grad_norm": 1.8050500412271848, "learning_rate": 8.849056908305252e-06, "loss": 0.9088, "step": 2268 }, { "epoch": 0.24, "grad_norm": 1.9557694686164702, "learning_rate": 8.847945397605258e-06, "loss": 0.9472, "step": 2269 }, { "epoch": 0.24, "grad_norm": 1.7012857790340064, "learning_rate": 8.84683342032862e-06, "loss": 0.8648, "step": 2270 }, { "epoch": 0.24, "grad_norm": 1.7057368969309457, "learning_rate": 8.845720976610168e-06, "loss": 0.8667, "step": 2271 }, { "epoch": 0.24, "grad_norm": 1.8645723851443787, "learning_rate": 8.844608066584788e-06, "loss": 0.8871, "step": 2272 }, { "epoch": 0.24, "grad_norm": 1.6654056404436466, "learning_rate": 8.843494690387427e-06, "loss": 0.9505, "step": 2273 }, { "epoch": 0.24, "grad_norm": 1.5977966249949151, "learning_rate": 8.842380848153083e-06, "loss": 0.8786, "step": 2274 }, { "epoch": 0.24, "grad_norm": 1.840432615825486, "learning_rate": 8.841266540016814e-06, "loss": 0.983, "step": 2275 }, { "epoch": 0.24, "grad_norm": 1.82319319116273, "learning_rate": 8.840151766113735e-06, "loss": 0.9379, "step": 2276 }, { "epoch": 0.24, "grad_norm": 1.6957639057599092, "learning_rate": 8.839036526579014e-06, "loss": 0.9221, "step": 2277 }, { "epoch": 0.24, "grad_norm": 1.724156847600573, "learning_rate": 8.837920821547881e-06, "loss": 0.8796, "step": 2278 }, { "epoch": 0.25, "grad_norm": 1.700005610858219, "learning_rate": 8.836804651155617e-06, "loss": 0.8831, "step": 2279 }, { "epoch": 0.25, "grad_norm": 1.7041111868199266, "learning_rate": 8.83568801553756e-06, "loss": 0.8353, "step": 2280 }, { "epoch": 0.25, "grad_norm": 1.9304349656851505, "learning_rate": 8.834570914829108e-06, "loss": 0.9357, "step": 2281 }, { "epoch": 0.25, "grad_norm": 2.163751313698539, "learning_rate": 8.833453349165713e-06, "loss": 0.9612, "step": 2282 }, { "epoch": 0.25, "grad_norm": 1.7484077900047175, "learning_rate": 8.832335318682884e-06, "loss": 0.9553, "step": 2283 }, { "epoch": 0.25, "grad_norm": 0.9779020881919162, "learning_rate": 8.831216823516185e-06, "loss": 1.0956, "step": 2284 }, { "epoch": 0.25, "grad_norm": 1.7345772492850835, "learning_rate": 8.830097863801239e-06, "loss": 0.9192, "step": 2285 }, { "epoch": 0.25, "grad_norm": 1.960960288587253, "learning_rate": 8.828978439673721e-06, "loss": 0.946, "step": 2286 }, { "epoch": 0.25, "grad_norm": 1.7716092125926726, "learning_rate": 8.827858551269368e-06, "loss": 0.928, "step": 2287 }, { "epoch": 0.25, "grad_norm": 1.6934791683510433, "learning_rate": 8.826738198723967e-06, "loss": 0.9356, "step": 2288 }, { "epoch": 0.25, "grad_norm": 1.7063578381591806, "learning_rate": 8.825617382173369e-06, "loss": 0.9722, "step": 2289 }, { "epoch": 0.25, "grad_norm": 1.6723775729772732, "learning_rate": 8.824496101753474e-06, "loss": 0.8904, "step": 2290 }, { "epoch": 0.25, "grad_norm": 1.8696009597020529, "learning_rate": 8.82337435760024e-06, "loss": 0.9402, "step": 2291 }, { "epoch": 0.25, "grad_norm": 1.7405233584444173, "learning_rate": 8.822252149849687e-06, "loss": 0.8992, "step": 2292 }, { "epoch": 0.25, "grad_norm": 1.8417353069930344, "learning_rate": 8.821129478637882e-06, "loss": 0.8839, "step": 2293 }, { "epoch": 0.25, "grad_norm": 1.7516243429425546, "learning_rate": 8.820006344100953e-06, "loss": 0.9211, "step": 2294 }, { "epoch": 0.25, "grad_norm": 1.7762148214051394, "learning_rate": 8.818882746375086e-06, "loss": 0.8858, "step": 2295 }, { "epoch": 0.25, "grad_norm": 1.9219412158292026, "learning_rate": 8.81775868559652e-06, "loss": 0.898, "step": 2296 }, { "epoch": 0.25, "grad_norm": 1.9138765907741295, "learning_rate": 8.816634161901553e-06, "loss": 0.8745, "step": 2297 }, { "epoch": 0.25, "grad_norm": 1.735940231667222, "learning_rate": 8.815509175426536e-06, "loss": 0.8589, "step": 2298 }, { "epoch": 0.25, "grad_norm": 1.8042703679364345, "learning_rate": 8.814383726307876e-06, "loss": 0.9251, "step": 2299 }, { "epoch": 0.25, "grad_norm": 1.819621580351289, "learning_rate": 8.81325781468204e-06, "loss": 0.8698, "step": 2300 }, { "epoch": 0.25, "grad_norm": 1.6657412622335976, "learning_rate": 8.812131440685545e-06, "loss": 0.9488, "step": 2301 }, { "epoch": 0.25, "grad_norm": 1.7417957221424745, "learning_rate": 8.811004604454973e-06, "loss": 0.8801, "step": 2302 }, { "epoch": 0.25, "grad_norm": 1.7855652671323816, "learning_rate": 8.809877306126954e-06, "loss": 0.9496, "step": 2303 }, { "epoch": 0.25, "grad_norm": 1.7373590624228923, "learning_rate": 8.808749545838177e-06, "loss": 0.8986, "step": 2304 }, { "epoch": 0.25, "grad_norm": 1.6973826252175284, "learning_rate": 8.807621323725387e-06, "loss": 0.8971, "step": 2305 }, { "epoch": 0.25, "grad_norm": 1.7037311780425466, "learning_rate": 8.806492639925383e-06, "loss": 0.8962, "step": 2306 }, { "epoch": 0.25, "grad_norm": 1.8521089654363712, "learning_rate": 8.805363494575026e-06, "loss": 0.9171, "step": 2307 }, { "epoch": 0.25, "grad_norm": 1.7426144935249974, "learning_rate": 8.804233887811224e-06, "loss": 1.0505, "step": 2308 }, { "epoch": 0.25, "grad_norm": 1.7186398819920323, "learning_rate": 8.803103819770948e-06, "loss": 0.9894, "step": 2309 }, { "epoch": 0.25, "grad_norm": 1.86370773412969, "learning_rate": 8.801973290591223e-06, "loss": 0.9554, "step": 2310 }, { "epoch": 0.25, "grad_norm": 1.101021544905293, "learning_rate": 8.800842300409129e-06, "loss": 1.0241, "step": 2311 }, { "epoch": 0.25, "grad_norm": 0.9089690733672087, "learning_rate": 8.799710849361804e-06, "loss": 1.0643, "step": 2312 }, { "epoch": 0.25, "grad_norm": 1.690893662155264, "learning_rate": 8.798578937586437e-06, "loss": 0.9375, "step": 2313 }, { "epoch": 0.25, "grad_norm": 1.8019177766880603, "learning_rate": 8.797446565220279e-06, "loss": 0.9265, "step": 2314 }, { "epoch": 0.25, "grad_norm": 1.1378893770859821, "learning_rate": 8.796313732400634e-06, "loss": 1.0682, "step": 2315 }, { "epoch": 0.25, "grad_norm": 1.8797171241528554, "learning_rate": 8.79518043926486e-06, "loss": 0.9612, "step": 2316 }, { "epoch": 0.25, "grad_norm": 1.7808559573716805, "learning_rate": 8.794046685950374e-06, "loss": 0.9891, "step": 2317 }, { "epoch": 0.25, "grad_norm": 1.7792274347163295, "learning_rate": 8.792912472594646e-06, "loss": 0.8401, "step": 2318 }, { "epoch": 0.25, "grad_norm": 1.7220294707984976, "learning_rate": 8.791777799335206e-06, "loss": 0.9237, "step": 2319 }, { "epoch": 0.25, "grad_norm": 1.824346193633782, "learning_rate": 8.790642666309637e-06, "loss": 0.8912, "step": 2320 }, { "epoch": 0.25, "grad_norm": 1.7348168957884444, "learning_rate": 8.789507073655575e-06, "loss": 0.8449, "step": 2321 }, { "epoch": 0.25, "grad_norm": 1.8747735352943342, "learning_rate": 8.788371021510713e-06, "loss": 0.9125, "step": 2322 }, { "epoch": 0.25, "grad_norm": 1.8443540259073414, "learning_rate": 8.787234510012808e-06, "loss": 0.9179, "step": 2323 }, { "epoch": 0.25, "grad_norm": 1.6345214924799176, "learning_rate": 8.786097539299661e-06, "loss": 0.9267, "step": 2324 }, { "epoch": 0.25, "grad_norm": 1.7255855353514613, "learning_rate": 8.784960109509134e-06, "loss": 0.9207, "step": 2325 }, { "epoch": 0.25, "grad_norm": 1.0979906977760512, "learning_rate": 8.783822220779145e-06, "loss": 1.0681, "step": 2326 }, { "epoch": 0.25, "grad_norm": 1.7348962102969565, "learning_rate": 8.782683873247668e-06, "loss": 0.9084, "step": 2327 }, { "epoch": 0.25, "grad_norm": 1.8441644700609707, "learning_rate": 8.781545067052731e-06, "loss": 0.987, "step": 2328 }, { "epoch": 0.25, "grad_norm": 0.8384333832232267, "learning_rate": 8.780405802332415e-06, "loss": 1.0594, "step": 2329 }, { "epoch": 0.25, "grad_norm": 1.7279788907648446, "learning_rate": 8.779266079224865e-06, "loss": 0.8792, "step": 2330 }, { "epoch": 0.25, "grad_norm": 1.880889557595136, "learning_rate": 8.778125897868273e-06, "loss": 0.9369, "step": 2331 }, { "epoch": 0.25, "grad_norm": 1.770534084427665, "learning_rate": 8.77698525840089e-06, "loss": 0.9948, "step": 2332 }, { "epoch": 0.25, "grad_norm": 1.818618539897555, "learning_rate": 8.775844160961024e-06, "loss": 1.0091, "step": 2333 }, { "epoch": 0.25, "grad_norm": 1.817389357221285, "learning_rate": 8.774702605687036e-06, "loss": 0.9243, "step": 2334 }, { "epoch": 0.25, "grad_norm": 1.8010495590084818, "learning_rate": 8.773560592717343e-06, "loss": 0.9478, "step": 2335 }, { "epoch": 0.25, "grad_norm": 1.7394516123365835, "learning_rate": 8.772418122190418e-06, "loss": 1.001, "step": 2336 }, { "epoch": 0.25, "grad_norm": 1.711819195945204, "learning_rate": 8.771275194244792e-06, "loss": 0.9386, "step": 2337 }, { "epoch": 0.25, "grad_norm": 1.7259075524906125, "learning_rate": 8.770131809019046e-06, "loss": 0.9906, "step": 2338 }, { "epoch": 0.25, "grad_norm": 1.7382129719012371, "learning_rate": 8.768987966651822e-06, "loss": 0.9466, "step": 2339 }, { "epoch": 0.25, "grad_norm": 1.8146802931959587, "learning_rate": 8.767843667281812e-06, "loss": 0.9404, "step": 2340 }, { "epoch": 0.25, "grad_norm": 1.7317072043893071, "learning_rate": 8.766698911047769e-06, "loss": 0.9411, "step": 2341 }, { "epoch": 0.25, "grad_norm": 1.7813389007301232, "learning_rate": 8.765553698088496e-06, "loss": 0.9892, "step": 2342 }, { "epoch": 0.25, "grad_norm": 1.7206874526220564, "learning_rate": 8.764408028542854e-06, "loss": 0.8401, "step": 2343 }, { "epoch": 0.25, "grad_norm": 1.8952718015961223, "learning_rate": 8.763261902549763e-06, "loss": 0.938, "step": 2344 }, { "epoch": 0.25, "grad_norm": 1.7574914750095616, "learning_rate": 8.762115320248192e-06, "loss": 1.0005, "step": 2345 }, { "epoch": 0.25, "grad_norm": 1.8309393443195099, "learning_rate": 8.760968281777167e-06, "loss": 0.936, "step": 2346 }, { "epoch": 0.25, "grad_norm": 1.7996672331657386, "learning_rate": 8.759820787275774e-06, "loss": 0.9611, "step": 2347 }, { "epoch": 0.25, "grad_norm": 1.8383364494066772, "learning_rate": 8.758672836883146e-06, "loss": 1.0341, "step": 2348 }, { "epoch": 0.25, "grad_norm": 1.7977294833673378, "learning_rate": 8.75752443073848e-06, "loss": 0.8896, "step": 2349 }, { "epoch": 0.25, "grad_norm": 1.8740986537909834, "learning_rate": 8.756375568981023e-06, "loss": 0.9897, "step": 2350 }, { "epoch": 0.25, "grad_norm": 1.7708249750936327, "learning_rate": 8.755226251750077e-06, "loss": 0.8813, "step": 2351 }, { "epoch": 0.25, "grad_norm": 1.6598312294614797, "learning_rate": 8.754076479185003e-06, "loss": 0.952, "step": 2352 }, { "epoch": 0.25, "grad_norm": 1.8258352363325507, "learning_rate": 8.752926251425212e-06, "loss": 0.8532, "step": 2353 }, { "epoch": 0.25, "grad_norm": 1.6479451241116814, "learning_rate": 8.751775568610175e-06, "loss": 0.8961, "step": 2354 }, { "epoch": 0.25, "grad_norm": 1.7806631689790196, "learning_rate": 8.750624430879417e-06, "loss": 1.0017, "step": 2355 }, { "epoch": 0.25, "grad_norm": 1.8487523174115212, "learning_rate": 8.749472838372515e-06, "loss": 0.9091, "step": 2356 }, { "epoch": 0.25, "grad_norm": 1.6742383741062217, "learning_rate": 8.748320791229106e-06, "loss": 0.8938, "step": 2357 }, { "epoch": 0.25, "grad_norm": 1.67708277650289, "learning_rate": 8.747168289588878e-06, "loss": 1.0039, "step": 2358 }, { "epoch": 0.25, "grad_norm": 1.6917890812979146, "learning_rate": 8.746015333591578e-06, "loss": 0.927, "step": 2359 }, { "epoch": 0.25, "grad_norm": 1.832077258992601, "learning_rate": 8.744861923377e-06, "loss": 0.8814, "step": 2360 }, { "epoch": 0.25, "grad_norm": 1.7527850331884436, "learning_rate": 8.74370805908501e-06, "loss": 0.9525, "step": 2361 }, { "epoch": 0.25, "grad_norm": 1.635377042651314, "learning_rate": 8.742553740855507e-06, "loss": 0.917, "step": 2362 }, { "epoch": 0.25, "grad_norm": 1.7561452525927792, "learning_rate": 8.74139896882846e-06, "loss": 0.8933, "step": 2363 }, { "epoch": 0.25, "grad_norm": 1.7629114708971814, "learning_rate": 8.74024374314389e-06, "loss": 0.8581, "step": 2364 }, { "epoch": 0.25, "grad_norm": 1.7560154093972253, "learning_rate": 8.739088063941875e-06, "loss": 1.0191, "step": 2365 }, { "epoch": 0.25, "grad_norm": 1.663945976342402, "learning_rate": 8.737931931362537e-06, "loss": 0.8691, "step": 2366 }, { "epoch": 0.25, "grad_norm": 1.809476637438118, "learning_rate": 8.736775345546067e-06, "loss": 0.9411, "step": 2367 }, { "epoch": 0.25, "grad_norm": 1.865570988133656, "learning_rate": 8.735618306632706e-06, "loss": 0.94, "step": 2368 }, { "epoch": 0.25, "grad_norm": 1.775128519795085, "learning_rate": 8.734460814762743e-06, "loss": 0.8632, "step": 2369 }, { "epoch": 0.25, "grad_norm": 1.8551373112201106, "learning_rate": 8.733302870076535e-06, "loss": 0.9393, "step": 2370 }, { "epoch": 0.25, "grad_norm": 1.7972050672680964, "learning_rate": 8.732144472714481e-06, "loss": 0.9851, "step": 2371 }, { "epoch": 0.25, "grad_norm": 1.7491128139640266, "learning_rate": 8.730985622817043e-06, "loss": 0.8879, "step": 2372 }, { "epoch": 0.26, "grad_norm": 1.297385513623497, "learning_rate": 8.729826320524737e-06, "loss": 1.0685, "step": 2373 }, { "epoch": 0.26, "grad_norm": 1.714010666604908, "learning_rate": 8.72866656597813e-06, "loss": 0.959, "step": 2374 }, { "epoch": 0.26, "grad_norm": 1.9010578853874043, "learning_rate": 8.727506359317848e-06, "loss": 0.8628, "step": 2375 }, { "epoch": 0.26, "grad_norm": 1.8073689963075528, "learning_rate": 8.72634570068457e-06, "loss": 0.9923, "step": 2376 }, { "epoch": 0.26, "grad_norm": 1.6556452906935304, "learning_rate": 8.725184590219028e-06, "loss": 0.9122, "step": 2377 }, { "epoch": 0.26, "grad_norm": 1.7001238568006234, "learning_rate": 8.724023028062011e-06, "loss": 0.8997, "step": 2378 }, { "epoch": 0.26, "grad_norm": 1.7823354709004577, "learning_rate": 8.722861014354363e-06, "loss": 0.9451, "step": 2379 }, { "epoch": 0.26, "grad_norm": 1.7115430386256802, "learning_rate": 8.721698549236982e-06, "loss": 0.8489, "step": 2380 }, { "epoch": 0.26, "grad_norm": 1.9572711424428628, "learning_rate": 8.720535632850824e-06, "loss": 0.8814, "step": 2381 }, { "epoch": 0.26, "grad_norm": 1.6934963814752633, "learning_rate": 8.719372265336893e-06, "loss": 0.9849, "step": 2382 }, { "epoch": 0.26, "grad_norm": 1.6763720959230195, "learning_rate": 8.718208446836252e-06, "loss": 0.912, "step": 2383 }, { "epoch": 0.26, "grad_norm": 1.8438502631010527, "learning_rate": 8.717044177490016e-06, "loss": 0.9256, "step": 2384 }, { "epoch": 0.26, "grad_norm": 1.7492248495930325, "learning_rate": 8.715879457439362e-06, "loss": 0.9181, "step": 2385 }, { "epoch": 0.26, "grad_norm": 1.7102807679431347, "learning_rate": 8.714714286825512e-06, "loss": 0.8908, "step": 2386 }, { "epoch": 0.26, "grad_norm": 1.7879939358125525, "learning_rate": 8.713548665789748e-06, "loss": 0.9611, "step": 2387 }, { "epoch": 0.26, "grad_norm": 1.646028710925101, "learning_rate": 8.712382594473405e-06, "loss": 0.8294, "step": 2388 }, { "epoch": 0.26, "grad_norm": 2.2280230785073463, "learning_rate": 8.711216073017875e-06, "loss": 0.9631, "step": 2389 }, { "epoch": 0.26, "grad_norm": 1.8447117162571527, "learning_rate": 8.710049101564602e-06, "loss": 0.9885, "step": 2390 }, { "epoch": 0.26, "grad_norm": 1.6017146206312893, "learning_rate": 8.708881680255084e-06, "loss": 0.9062, "step": 2391 }, { "epoch": 0.26, "grad_norm": 1.7302911117537718, "learning_rate": 8.707713809230875e-06, "loss": 0.9538, "step": 2392 }, { "epoch": 0.26, "grad_norm": 1.1646342553194156, "learning_rate": 8.706545488633587e-06, "loss": 1.1047, "step": 2393 }, { "epoch": 0.26, "grad_norm": 1.7444288806812078, "learning_rate": 8.705376718604877e-06, "loss": 0.9527, "step": 2394 }, { "epoch": 0.26, "grad_norm": 1.7061999019572438, "learning_rate": 8.704207499286467e-06, "loss": 0.9304, "step": 2395 }, { "epoch": 0.26, "grad_norm": 1.7700908359470797, "learning_rate": 8.703037830820128e-06, "loss": 0.9077, "step": 2396 }, { "epoch": 0.26, "grad_norm": 1.7387988253035824, "learning_rate": 8.701867713347686e-06, "loss": 0.9201, "step": 2397 }, { "epoch": 0.26, "grad_norm": 1.6939284969817738, "learning_rate": 8.70069714701102e-06, "loss": 0.885, "step": 2398 }, { "epoch": 0.26, "grad_norm": 1.7729418488467943, "learning_rate": 8.699526131952065e-06, "loss": 0.9034, "step": 2399 }, { "epoch": 0.26, "grad_norm": 0.927350897791296, "learning_rate": 8.698354668312816e-06, "loss": 1.0159, "step": 2400 }, { "epoch": 0.26, "grad_norm": 1.7415217059364518, "learning_rate": 8.697182756235311e-06, "loss": 0.9468, "step": 2401 }, { "epoch": 0.26, "grad_norm": 1.6778799843881669, "learning_rate": 8.696010395861651e-06, "loss": 0.9927, "step": 2402 }, { "epoch": 0.26, "grad_norm": 1.6326139638711061, "learning_rate": 8.694837587333989e-06, "loss": 0.8399, "step": 2403 }, { "epoch": 0.26, "grad_norm": 1.6936960579103433, "learning_rate": 8.69366433079453e-06, "loss": 0.9573, "step": 2404 }, { "epoch": 0.26, "grad_norm": 1.8013399609731544, "learning_rate": 8.69249062638554e-06, "loss": 0.9319, "step": 2405 }, { "epoch": 0.26, "grad_norm": 1.6746773493408487, "learning_rate": 8.691316474249329e-06, "loss": 0.8991, "step": 2406 }, { "epoch": 0.26, "grad_norm": 1.5792474222323174, "learning_rate": 8.69014187452827e-06, "loss": 0.9551, "step": 2407 }, { "epoch": 0.26, "grad_norm": 1.760524958958359, "learning_rate": 8.688966827364788e-06, "loss": 0.9171, "step": 2408 }, { "epoch": 0.26, "grad_norm": 1.789919512915395, "learning_rate": 8.68779133290136e-06, "loss": 0.961, "step": 2409 }, { "epoch": 0.26, "grad_norm": 1.7480722007484824, "learning_rate": 8.686615391280519e-06, "loss": 0.8728, "step": 2410 }, { "epoch": 0.26, "grad_norm": 0.9431781705892988, "learning_rate": 8.68543900264485e-06, "loss": 1.1135, "step": 2411 }, { "epoch": 0.26, "grad_norm": 1.7175193055801685, "learning_rate": 8.684262167136999e-06, "loss": 0.9835, "step": 2412 }, { "epoch": 0.26, "grad_norm": 1.8171572262563251, "learning_rate": 8.683084884899658e-06, "loss": 0.9108, "step": 2413 }, { "epoch": 0.26, "grad_norm": 1.7062935530704395, "learning_rate": 8.681907156075578e-06, "loss": 0.9795, "step": 2414 }, { "epoch": 0.26, "grad_norm": 1.9111796229774067, "learning_rate": 8.68072898080756e-06, "loss": 1.0238, "step": 2415 }, { "epoch": 0.26, "grad_norm": 1.7438556891838364, "learning_rate": 8.679550359238464e-06, "loss": 0.8529, "step": 2416 }, { "epoch": 0.26, "grad_norm": 1.7239260046899318, "learning_rate": 8.678371291511202e-06, "loss": 0.8956, "step": 2417 }, { "epoch": 0.26, "grad_norm": 1.851184388358164, "learning_rate": 8.67719177776874e-06, "loss": 0.8644, "step": 2418 }, { "epoch": 0.26, "grad_norm": 1.763477375099109, "learning_rate": 8.676011818154097e-06, "loss": 0.9083, "step": 2419 }, { "epoch": 0.26, "grad_norm": 1.7367579075925281, "learning_rate": 8.67483141281035e-06, "loss": 0.9629, "step": 2420 }, { "epoch": 0.26, "grad_norm": 1.7886986941192753, "learning_rate": 8.673650561880621e-06, "loss": 0.9144, "step": 2421 }, { "epoch": 0.26, "grad_norm": 1.7694078922774432, "learning_rate": 8.6724692655081e-06, "loss": 0.8746, "step": 2422 }, { "epoch": 0.26, "grad_norm": 1.7176793700985777, "learning_rate": 8.671287523836019e-06, "loss": 0.8601, "step": 2423 }, { "epoch": 0.26, "grad_norm": 1.7498294114916535, "learning_rate": 8.670105337007668e-06, "loss": 0.8916, "step": 2424 }, { "epoch": 0.26, "grad_norm": 1.8011530259450104, "learning_rate": 8.668922705166392e-06, "loss": 0.9213, "step": 2425 }, { "epoch": 0.26, "grad_norm": 1.7455938948884329, "learning_rate": 8.667739628455592e-06, "loss": 0.8951, "step": 2426 }, { "epoch": 0.26, "grad_norm": 1.8462481157794397, "learning_rate": 8.666556107018714e-06, "loss": 0.8605, "step": 2427 }, { "epoch": 0.26, "grad_norm": 1.6585592695060376, "learning_rate": 8.665372140999268e-06, "loss": 0.936, "step": 2428 }, { "epoch": 0.26, "grad_norm": 1.7887363994059737, "learning_rate": 8.664187730540815e-06, "loss": 0.9371, "step": 2429 }, { "epoch": 0.26, "grad_norm": 1.8361394118161676, "learning_rate": 8.663002875786965e-06, "loss": 0.8979, "step": 2430 }, { "epoch": 0.26, "grad_norm": 1.8026266836105669, "learning_rate": 8.661817576881391e-06, "loss": 0.9943, "step": 2431 }, { "epoch": 0.26, "grad_norm": 1.9391349801098203, "learning_rate": 8.660631833967809e-06, "loss": 0.9364, "step": 2432 }, { "epoch": 0.26, "grad_norm": 1.7429623059001227, "learning_rate": 8.65944564719e-06, "loss": 0.8566, "step": 2433 }, { "epoch": 0.26, "grad_norm": 1.686506180334736, "learning_rate": 8.658259016691786e-06, "loss": 0.9001, "step": 2434 }, { "epoch": 0.26, "grad_norm": 0.9811582285349567, "learning_rate": 8.657071942617057e-06, "loss": 1.0456, "step": 2435 }, { "epoch": 0.26, "grad_norm": 1.6710423876887421, "learning_rate": 8.655884425109747e-06, "loss": 0.8858, "step": 2436 }, { "epoch": 0.26, "grad_norm": 1.612682423384455, "learning_rate": 8.654696464313847e-06, "loss": 0.9394, "step": 2437 }, { "epoch": 0.26, "grad_norm": 1.6892512202744252, "learning_rate": 8.6535080603734e-06, "loss": 0.8818, "step": 2438 }, { "epoch": 0.26, "grad_norm": 1.7457146443398053, "learning_rate": 8.652319213432505e-06, "loss": 0.9272, "step": 2439 }, { "epoch": 0.26, "grad_norm": 1.7850855460225186, "learning_rate": 8.651129923635316e-06, "loss": 0.9046, "step": 2440 }, { "epoch": 0.26, "grad_norm": 1.8276663082594697, "learning_rate": 8.649940191126033e-06, "loss": 0.9186, "step": 2441 }, { "epoch": 0.26, "grad_norm": 1.8439766995236482, "learning_rate": 8.64875001604892e-06, "loss": 0.9325, "step": 2442 }, { "epoch": 0.26, "grad_norm": 1.7460498939797564, "learning_rate": 8.647559398548292e-06, "loss": 0.8839, "step": 2443 }, { "epoch": 0.26, "grad_norm": 1.7622776491958172, "learning_rate": 8.646368338768508e-06, "loss": 0.9733, "step": 2444 }, { "epoch": 0.26, "grad_norm": 1.6988092882535963, "learning_rate": 8.645176836853992e-06, "loss": 1.0066, "step": 2445 }, { "epoch": 0.26, "grad_norm": 1.8219745254671376, "learning_rate": 8.643984892949218e-06, "loss": 0.9651, "step": 2446 }, { "epoch": 0.26, "grad_norm": 1.7165600497102744, "learning_rate": 8.642792507198714e-06, "loss": 0.9566, "step": 2447 }, { "epoch": 0.26, "grad_norm": 1.8769730299455667, "learning_rate": 8.64159967974706e-06, "loss": 0.9096, "step": 2448 }, { "epoch": 0.26, "grad_norm": 1.8225456688865855, "learning_rate": 8.64040641073889e-06, "loss": 0.9469, "step": 2449 }, { "epoch": 0.26, "grad_norm": 1.7023033948943453, "learning_rate": 8.63921270031889e-06, "loss": 0.9382, "step": 2450 }, { "epoch": 0.26, "grad_norm": 1.7620911993920305, "learning_rate": 8.638018548631805e-06, "loss": 0.941, "step": 2451 }, { "epoch": 0.26, "grad_norm": 1.825480318983913, "learning_rate": 8.636823955822432e-06, "loss": 0.9322, "step": 2452 }, { "epoch": 0.26, "grad_norm": 1.7925792214649459, "learning_rate": 8.635628922035613e-06, "loss": 0.9164, "step": 2453 }, { "epoch": 0.26, "grad_norm": 1.6741443424863673, "learning_rate": 8.634433447416254e-06, "loss": 0.905, "step": 2454 }, { "epoch": 0.26, "grad_norm": 1.9030140317010513, "learning_rate": 8.63323753210931e-06, "loss": 0.9305, "step": 2455 }, { "epoch": 0.26, "grad_norm": 1.7988515425464195, "learning_rate": 8.63204117625979e-06, "loss": 0.9945, "step": 2456 }, { "epoch": 0.26, "grad_norm": 1.776995206313872, "learning_rate": 8.630844380012756e-06, "loss": 0.8569, "step": 2457 }, { "epoch": 0.26, "grad_norm": 1.8373709401308416, "learning_rate": 8.629647143513322e-06, "loss": 0.9302, "step": 2458 }, { "epoch": 0.26, "grad_norm": 1.6393368507149435, "learning_rate": 8.628449466906659e-06, "loss": 0.8511, "step": 2459 }, { "epoch": 0.26, "grad_norm": 1.7752240529930936, "learning_rate": 8.627251350337988e-06, "loss": 0.9113, "step": 2460 }, { "epoch": 0.26, "grad_norm": 1.7078088782028154, "learning_rate": 8.626052793952588e-06, "loss": 0.921, "step": 2461 }, { "epoch": 0.26, "grad_norm": 0.9985290487041949, "learning_rate": 8.624853797895784e-06, "loss": 1.0759, "step": 2462 }, { "epoch": 0.26, "grad_norm": 1.803737083695028, "learning_rate": 8.623654362312962e-06, "loss": 0.9, "step": 2463 }, { "epoch": 0.26, "grad_norm": 1.6602419193079645, "learning_rate": 8.622454487349556e-06, "loss": 0.8751, "step": 2464 }, { "epoch": 0.26, "grad_norm": 1.8216009125841937, "learning_rate": 8.621254173151052e-06, "loss": 0.8768, "step": 2465 }, { "epoch": 0.27, "grad_norm": 1.6428471822120607, "learning_rate": 8.620053419862997e-06, "loss": 0.8774, "step": 2466 }, { "epoch": 0.27, "grad_norm": 1.7330307936764666, "learning_rate": 8.618852227630986e-06, "loss": 0.9113, "step": 2467 }, { "epoch": 0.27, "grad_norm": 1.7681556544352635, "learning_rate": 8.617650596600665e-06, "loss": 0.9404, "step": 2468 }, { "epoch": 0.27, "grad_norm": 1.6500920082390649, "learning_rate": 8.616448526917737e-06, "loss": 0.9437, "step": 2469 }, { "epoch": 0.27, "grad_norm": 1.65131610204214, "learning_rate": 8.615246018727957e-06, "loss": 0.9913, "step": 2470 }, { "epoch": 0.27, "grad_norm": 1.8256431686893348, "learning_rate": 8.614043072177134e-06, "loss": 0.8844, "step": 2471 }, { "epoch": 0.27, "grad_norm": 1.8814306174777384, "learning_rate": 8.61283968741113e-06, "loss": 0.9532, "step": 2472 }, { "epoch": 0.27, "grad_norm": 2.0197438215628654, "learning_rate": 8.611635864575858e-06, "loss": 0.9209, "step": 2473 }, { "epoch": 0.27, "grad_norm": 1.7703169583836382, "learning_rate": 8.610431603817287e-06, "loss": 0.9773, "step": 2474 }, { "epoch": 0.27, "grad_norm": 1.7188444903311781, "learning_rate": 8.609226905281434e-06, "loss": 0.9365, "step": 2475 }, { "epoch": 0.27, "grad_norm": 1.819938953155006, "learning_rate": 8.608021769114378e-06, "loss": 0.989, "step": 2476 }, { "epoch": 0.27, "grad_norm": 0.9910546052503112, "learning_rate": 8.606816195462244e-06, "loss": 1.0919, "step": 2477 }, { "epoch": 0.27, "grad_norm": 1.7452015374875418, "learning_rate": 8.605610184471211e-06, "loss": 0.8936, "step": 2478 }, { "epoch": 0.27, "grad_norm": 2.1070025151721157, "learning_rate": 8.604403736287513e-06, "loss": 0.9244, "step": 2479 }, { "epoch": 0.27, "grad_norm": 1.8328090029537525, "learning_rate": 8.603196851057435e-06, "loss": 0.9097, "step": 2480 }, { "epoch": 0.27, "grad_norm": 1.804737255921941, "learning_rate": 8.601989528927317e-06, "loss": 0.9181, "step": 2481 }, { "epoch": 0.27, "grad_norm": 1.745638928341168, "learning_rate": 8.600781770043552e-06, "loss": 0.9713, "step": 2482 }, { "epoch": 0.27, "grad_norm": 1.6144904681570982, "learning_rate": 8.599573574552581e-06, "loss": 0.9593, "step": 2483 }, { "epoch": 0.27, "grad_norm": 1.7376525515275763, "learning_rate": 8.598364942600907e-06, "loss": 0.9063, "step": 2484 }, { "epoch": 0.27, "grad_norm": 1.6888803369834962, "learning_rate": 8.597155874335077e-06, "loss": 0.9492, "step": 2485 }, { "epoch": 0.27, "grad_norm": 1.5743218824327403, "learning_rate": 8.595946369901696e-06, "loss": 1.0023, "step": 2486 }, { "epoch": 0.27, "grad_norm": 1.7296140973810867, "learning_rate": 8.594736429447421e-06, "loss": 0.9606, "step": 2487 }, { "epoch": 0.27, "grad_norm": 1.69745796803244, "learning_rate": 8.593526053118962e-06, "loss": 0.8518, "step": 2488 }, { "epoch": 0.27, "grad_norm": 1.721398944632299, "learning_rate": 8.592315241063078e-06, "loss": 0.9289, "step": 2489 }, { "epoch": 0.27, "grad_norm": 1.7899866306878855, "learning_rate": 8.591103993426588e-06, "loss": 0.8511, "step": 2490 }, { "epoch": 0.27, "grad_norm": 1.6502318137308107, "learning_rate": 8.589892310356359e-06, "loss": 0.9624, "step": 2491 }, { "epoch": 0.27, "grad_norm": 1.729151913684393, "learning_rate": 8.588680191999308e-06, "loss": 0.9343, "step": 2492 }, { "epoch": 0.27, "grad_norm": 1.7524639362629248, "learning_rate": 8.587467638502414e-06, "loss": 0.9633, "step": 2493 }, { "epoch": 0.27, "grad_norm": 1.7146310600428751, "learning_rate": 8.5862546500127e-06, "loss": 0.8997, "step": 2494 }, { "epoch": 0.27, "grad_norm": 1.7449427125454655, "learning_rate": 8.585041226677247e-06, "loss": 0.9698, "step": 2495 }, { "epoch": 0.27, "grad_norm": 1.6653126773387807, "learning_rate": 8.583827368643185e-06, "loss": 0.9545, "step": 2496 }, { "epoch": 0.27, "grad_norm": 1.7528122431961486, "learning_rate": 8.5826130760577e-06, "loss": 0.9571, "step": 2497 }, { "epoch": 0.27, "grad_norm": 1.7761618676346362, "learning_rate": 8.581398349068029e-06, "loss": 0.8903, "step": 2498 }, { "epoch": 0.27, "grad_norm": 1.7683761331634082, "learning_rate": 8.58018318782146e-06, "loss": 0.924, "step": 2499 }, { "epoch": 0.27, "grad_norm": 1.7588087591206654, "learning_rate": 8.578967592465337e-06, "loss": 0.9075, "step": 2500 }, { "epoch": 0.27, "grad_norm": 0.9804192363450144, "learning_rate": 8.577751563147055e-06, "loss": 1.0642, "step": 2501 }, { "epoch": 0.27, "grad_norm": 0.8547548659192321, "learning_rate": 8.576535100014061e-06, "loss": 1.0702, "step": 2502 }, { "epoch": 0.27, "grad_norm": 1.6639268978327129, "learning_rate": 8.575318203213856e-06, "loss": 0.9383, "step": 2503 }, { "epoch": 0.27, "grad_norm": 1.7278945023287724, "learning_rate": 8.574100872893993e-06, "loss": 1.0086, "step": 2504 }, { "epoch": 0.27, "grad_norm": 1.6940903583135796, "learning_rate": 8.572883109202077e-06, "loss": 0.9676, "step": 2505 }, { "epoch": 0.27, "grad_norm": 0.9869919268163219, "learning_rate": 8.571664912285766e-06, "loss": 1.0709, "step": 2506 }, { "epoch": 0.27, "grad_norm": 1.8090535589569063, "learning_rate": 8.570446282292772e-06, "loss": 0.9351, "step": 2507 }, { "epoch": 0.27, "grad_norm": 1.8912921598615418, "learning_rate": 8.569227219370857e-06, "loss": 0.8556, "step": 2508 }, { "epoch": 0.27, "grad_norm": 1.7051853885029447, "learning_rate": 8.568007723667837e-06, "loss": 0.9056, "step": 2509 }, { "epoch": 0.27, "grad_norm": 1.8980951280342635, "learning_rate": 8.56678779533158e-06, "loss": 0.9061, "step": 2510 }, { "epoch": 0.27, "grad_norm": 1.7708773045604274, "learning_rate": 8.565567434510005e-06, "loss": 0.916, "step": 2511 }, { "epoch": 0.27, "grad_norm": 1.8556942782473356, "learning_rate": 8.564346641351087e-06, "loss": 0.8957, "step": 2512 }, { "epoch": 0.27, "grad_norm": 0.909167355487353, "learning_rate": 8.56312541600285e-06, "loss": 1.0551, "step": 2513 }, { "epoch": 0.27, "grad_norm": 1.6711757428283902, "learning_rate": 8.561903758613372e-06, "loss": 0.9073, "step": 2514 }, { "epoch": 0.27, "grad_norm": 1.7133018292570101, "learning_rate": 8.560681669330784e-06, "loss": 0.887, "step": 2515 }, { "epoch": 0.27, "grad_norm": 1.7689822897776997, "learning_rate": 8.559459148303268e-06, "loss": 0.9585, "step": 2516 }, { "epoch": 0.27, "grad_norm": 1.7096580997932758, "learning_rate": 8.55823619567906e-06, "loss": 0.961, "step": 2517 }, { "epoch": 0.27, "grad_norm": 1.6829407649497672, "learning_rate": 8.557012811606444e-06, "loss": 0.8537, "step": 2518 }, { "epoch": 0.27, "grad_norm": 1.7099731029499927, "learning_rate": 8.555788996233765e-06, "loss": 0.8999, "step": 2519 }, { "epoch": 0.27, "grad_norm": 0.8852470774460868, "learning_rate": 8.554564749709408e-06, "loss": 1.1142, "step": 2520 }, { "epoch": 0.27, "grad_norm": 1.7981583572550466, "learning_rate": 8.553340072181823e-06, "loss": 0.9203, "step": 2521 }, { "epoch": 0.27, "grad_norm": 1.8324423951383861, "learning_rate": 8.552114963799503e-06, "loss": 0.864, "step": 2522 }, { "epoch": 0.27, "grad_norm": 1.7864038851540567, "learning_rate": 8.550889424710998e-06, "loss": 0.8824, "step": 2523 }, { "epoch": 0.27, "grad_norm": 1.7558540844592265, "learning_rate": 8.549663455064908e-06, "loss": 0.9754, "step": 2524 }, { "epoch": 0.27, "grad_norm": 1.9061760803937868, "learning_rate": 8.548437055009886e-06, "loss": 0.9671, "step": 2525 }, { "epoch": 0.27, "grad_norm": 1.7413427651256974, "learning_rate": 8.54721022469464e-06, "loss": 0.8393, "step": 2526 }, { "epoch": 0.27, "grad_norm": 1.7560792264343383, "learning_rate": 8.545982964267923e-06, "loss": 0.9143, "step": 2527 }, { "epoch": 0.27, "grad_norm": 2.188509281740642, "learning_rate": 8.544755273878547e-06, "loss": 0.8766, "step": 2528 }, { "epoch": 0.27, "grad_norm": 1.745137770544269, "learning_rate": 8.543527153675374e-06, "loss": 1.0131, "step": 2529 }, { "epoch": 0.27, "grad_norm": 1.7448957256431548, "learning_rate": 8.542298603807317e-06, "loss": 0.9939, "step": 2530 }, { "epoch": 0.27, "grad_norm": 1.8167657441770644, "learning_rate": 8.541069624423343e-06, "loss": 0.9531, "step": 2531 }, { "epoch": 0.27, "grad_norm": 1.7079958232553756, "learning_rate": 8.539840215672468e-06, "loss": 0.9118, "step": 2532 }, { "epoch": 0.27, "grad_norm": 1.7073337206425023, "learning_rate": 8.538610377703764e-06, "loss": 0.9325, "step": 2533 }, { "epoch": 0.27, "grad_norm": 1.8936395381168936, "learning_rate": 8.537380110666351e-06, "loss": 0.9747, "step": 2534 }, { "epoch": 0.27, "grad_norm": 1.6904594663124592, "learning_rate": 8.536149414709405e-06, "loss": 0.972, "step": 2535 }, { "epoch": 0.27, "grad_norm": 1.7202107880289506, "learning_rate": 8.534918289982152e-06, "loss": 0.9147, "step": 2536 }, { "epoch": 0.27, "grad_norm": 1.7480324379164305, "learning_rate": 8.533686736633869e-06, "loss": 0.9063, "step": 2537 }, { "epoch": 0.27, "grad_norm": 1.7520262915967066, "learning_rate": 8.532454754813888e-06, "loss": 0.9765, "step": 2538 }, { "epoch": 0.27, "grad_norm": 0.9210625485925725, "learning_rate": 8.531222344671588e-06, "loss": 1.0435, "step": 2539 }, { "epoch": 0.27, "grad_norm": 1.797578736103574, "learning_rate": 8.529989506356407e-06, "loss": 0.9493, "step": 2540 }, { "epoch": 0.27, "grad_norm": 2.25241054458183, "learning_rate": 8.528756240017826e-06, "loss": 0.948, "step": 2541 }, { "epoch": 0.27, "grad_norm": 1.7160886596987808, "learning_rate": 8.527522545805386e-06, "loss": 0.8814, "step": 2542 }, { "epoch": 0.27, "grad_norm": 1.7440870045135477, "learning_rate": 8.526288423868676e-06, "loss": 0.9733, "step": 2543 }, { "epoch": 0.27, "grad_norm": 1.6975794019065396, "learning_rate": 8.525053874357338e-06, "loss": 0.9614, "step": 2544 }, { "epoch": 0.27, "grad_norm": 1.8456087674186978, "learning_rate": 8.523818897421066e-06, "loss": 1.0155, "step": 2545 }, { "epoch": 0.27, "grad_norm": 1.798447431066723, "learning_rate": 8.522583493209605e-06, "loss": 1.0064, "step": 2546 }, { "epoch": 0.27, "grad_norm": 1.7991590782940976, "learning_rate": 8.521347661872749e-06, "loss": 0.9288, "step": 2547 }, { "epoch": 0.27, "grad_norm": 1.7909268845913582, "learning_rate": 8.520111403560349e-06, "loss": 0.8793, "step": 2548 }, { "epoch": 0.27, "grad_norm": 1.7337252748638132, "learning_rate": 8.518874718422307e-06, "loss": 0.8773, "step": 2549 }, { "epoch": 0.27, "grad_norm": 1.712702095182618, "learning_rate": 8.517637606608574e-06, "loss": 0.8562, "step": 2550 }, { "epoch": 0.27, "grad_norm": 0.9531910222578568, "learning_rate": 8.516400068269153e-06, "loss": 1.082, "step": 2551 }, { "epoch": 0.27, "grad_norm": 1.848233379286405, "learning_rate": 8.515162103554102e-06, "loss": 0.9054, "step": 2552 }, { "epoch": 0.27, "grad_norm": 1.6528794441443069, "learning_rate": 8.513923712613526e-06, "loss": 1.0199, "step": 2553 }, { "epoch": 0.27, "grad_norm": 0.8384480951643789, "learning_rate": 8.512684895597587e-06, "loss": 1.0706, "step": 2554 }, { "epoch": 0.27, "grad_norm": 1.7960463627806293, "learning_rate": 8.511445652656493e-06, "loss": 0.8862, "step": 2555 }, { "epoch": 0.27, "grad_norm": 1.755474264901654, "learning_rate": 8.510205983940508e-06, "loss": 0.9448, "step": 2556 }, { "epoch": 0.27, "grad_norm": 1.720770044125286, "learning_rate": 8.508965889599948e-06, "loss": 0.9122, "step": 2557 }, { "epoch": 0.27, "grad_norm": 1.7639823782774813, "learning_rate": 8.507725369785174e-06, "loss": 0.9164, "step": 2558 }, { "epoch": 0.28, "grad_norm": 1.5653401031216214, "learning_rate": 8.506484424646606e-06, "loss": 0.9464, "step": 2559 }, { "epoch": 0.28, "grad_norm": 1.7079759477726433, "learning_rate": 8.505243054334714e-06, "loss": 0.9168, "step": 2560 }, { "epoch": 0.28, "grad_norm": 1.738999459501983, "learning_rate": 8.504001259000017e-06, "loss": 0.8935, "step": 2561 }, { "epoch": 0.28, "grad_norm": 1.8371456695428081, "learning_rate": 8.502759038793086e-06, "loss": 0.9721, "step": 2562 }, { "epoch": 0.28, "grad_norm": 1.5854288018038207, "learning_rate": 8.501516393864549e-06, "loss": 0.8508, "step": 2563 }, { "epoch": 0.28, "grad_norm": 1.7172489600826966, "learning_rate": 8.500273324365074e-06, "loss": 0.8782, "step": 2564 }, { "epoch": 0.28, "grad_norm": 1.7219230345632563, "learning_rate": 8.499029830445393e-06, "loss": 1.0533, "step": 2565 }, { "epoch": 0.28, "grad_norm": 1.7330296440402628, "learning_rate": 8.497785912256282e-06, "loss": 0.9504, "step": 2566 }, { "epoch": 0.28, "grad_norm": 1.6955362249412649, "learning_rate": 8.496541569948571e-06, "loss": 0.9245, "step": 2567 }, { "epoch": 0.28, "grad_norm": 1.718202823065509, "learning_rate": 8.495296803673138e-06, "loss": 0.8904, "step": 2568 }, { "epoch": 0.28, "grad_norm": 1.6827154076704776, "learning_rate": 8.49405161358092e-06, "loss": 0.8937, "step": 2569 }, { "epoch": 0.28, "grad_norm": 1.7376731151980855, "learning_rate": 8.492805999822898e-06, "loss": 0.9812, "step": 2570 }, { "epoch": 0.28, "grad_norm": 1.9386421883754967, "learning_rate": 8.491559962550104e-06, "loss": 0.9138, "step": 2571 }, { "epoch": 0.28, "grad_norm": 1.8945497265588094, "learning_rate": 8.49031350191363e-06, "loss": 0.9849, "step": 2572 }, { "epoch": 0.28, "grad_norm": 1.7956170935595999, "learning_rate": 8.48906661806461e-06, "loss": 0.9508, "step": 2573 }, { "epoch": 0.28, "grad_norm": 1.7754365932910854, "learning_rate": 8.487819311154233e-06, "loss": 0.9535, "step": 2574 }, { "epoch": 0.28, "grad_norm": 1.8794382384300188, "learning_rate": 8.486571581333743e-06, "loss": 0.8962, "step": 2575 }, { "epoch": 0.28, "grad_norm": 1.7937174306410917, "learning_rate": 8.485323428754427e-06, "loss": 0.9768, "step": 2576 }, { "epoch": 0.28, "grad_norm": 1.7773282609429824, "learning_rate": 8.48407485356763e-06, "loss": 0.9078, "step": 2577 }, { "epoch": 0.28, "grad_norm": 1.8646867368635935, "learning_rate": 8.482825855924743e-06, "loss": 0.8525, "step": 2578 }, { "epoch": 0.28, "grad_norm": 1.6901706847202045, "learning_rate": 8.481576435977217e-06, "loss": 0.9423, "step": 2579 }, { "epoch": 0.28, "grad_norm": 1.7628442634298063, "learning_rate": 8.480326593876545e-06, "loss": 0.9239, "step": 2580 }, { "epoch": 0.28, "grad_norm": 1.1094219963095178, "learning_rate": 8.479076329774274e-06, "loss": 1.0517, "step": 2581 }, { "epoch": 0.28, "grad_norm": 1.6102205279164097, "learning_rate": 8.477825643822003e-06, "loss": 0.9379, "step": 2582 }, { "epoch": 0.28, "grad_norm": 1.7385477973856034, "learning_rate": 8.476574536171385e-06, "loss": 0.9449, "step": 2583 }, { "epoch": 0.28, "grad_norm": 1.6785735707479192, "learning_rate": 8.475323006974116e-06, "loss": 0.9834, "step": 2584 }, { "epoch": 0.28, "grad_norm": 1.7157062630553985, "learning_rate": 8.474071056381953e-06, "loss": 0.8337, "step": 2585 }, { "epoch": 0.28, "grad_norm": 1.7320922808786041, "learning_rate": 8.472818684546697e-06, "loss": 0.9843, "step": 2586 }, { "epoch": 0.28, "grad_norm": 1.773813496638385, "learning_rate": 8.471565891620204e-06, "loss": 0.923, "step": 2587 }, { "epoch": 0.28, "grad_norm": 1.6820726665076502, "learning_rate": 8.470312677754377e-06, "loss": 0.8845, "step": 2588 }, { "epoch": 0.28, "grad_norm": 2.0732325247699444, "learning_rate": 8.469059043101176e-06, "loss": 0.8959, "step": 2589 }, { "epoch": 0.28, "grad_norm": 1.9483009257529063, "learning_rate": 8.467804987812603e-06, "loss": 0.9284, "step": 2590 }, { "epoch": 0.28, "grad_norm": 1.6916363965829946, "learning_rate": 8.466550512040722e-06, "loss": 0.9187, "step": 2591 }, { "epoch": 0.28, "grad_norm": 1.6975984320722732, "learning_rate": 8.465295615937641e-06, "loss": 0.8892, "step": 2592 }, { "epoch": 0.28, "grad_norm": 1.7753475867397381, "learning_rate": 8.464040299655518e-06, "loss": 0.8567, "step": 2593 }, { "epoch": 0.28, "grad_norm": 1.747386784806457, "learning_rate": 8.462784563346567e-06, "loss": 0.8873, "step": 2594 }, { "epoch": 0.28, "grad_norm": 1.6508356519570626, "learning_rate": 8.461528407163051e-06, "loss": 0.9508, "step": 2595 }, { "epoch": 0.28, "grad_norm": 0.9552568315655434, "learning_rate": 8.46027183125728e-06, "loss": 1.0809, "step": 2596 }, { "epoch": 0.28, "grad_norm": 1.7127428165651504, "learning_rate": 8.459014835781622e-06, "loss": 0.9326, "step": 2597 }, { "epoch": 0.28, "grad_norm": 1.7751405432908212, "learning_rate": 8.457757420888487e-06, "loss": 0.8169, "step": 2598 }, { "epoch": 0.28, "grad_norm": 1.7591020817304277, "learning_rate": 8.456499586730346e-06, "loss": 0.9029, "step": 2599 }, { "epoch": 0.28, "grad_norm": 1.7550604614597436, "learning_rate": 8.455241333459714e-06, "loss": 0.9044, "step": 2600 }, { "epoch": 0.28, "grad_norm": 1.8192353465068831, "learning_rate": 8.453982661229158e-06, "loss": 0.911, "step": 2601 }, { "epoch": 0.28, "grad_norm": 1.8249859401089377, "learning_rate": 8.452723570191298e-06, "loss": 0.8746, "step": 2602 }, { "epoch": 0.28, "grad_norm": 1.703184538855868, "learning_rate": 8.4514640604988e-06, "loss": 0.9262, "step": 2603 }, { "epoch": 0.28, "grad_norm": 1.7641586546249024, "learning_rate": 8.450204132304387e-06, "loss": 0.8108, "step": 2604 }, { "epoch": 0.28, "grad_norm": 1.7372717243764828, "learning_rate": 8.448943785760827e-06, "loss": 0.9382, "step": 2605 }, { "epoch": 0.28, "grad_norm": 1.8409697637621698, "learning_rate": 8.447683021020943e-06, "loss": 0.941, "step": 2606 }, { "epoch": 0.28, "grad_norm": 1.6909961113464358, "learning_rate": 8.446421838237606e-06, "loss": 0.8555, "step": 2607 }, { "epoch": 0.28, "grad_norm": 1.7745001724058014, "learning_rate": 8.445160237563742e-06, "loss": 0.9677, "step": 2608 }, { "epoch": 0.28, "grad_norm": 1.7799225062312554, "learning_rate": 8.44389821915232e-06, "loss": 0.8638, "step": 2609 }, { "epoch": 0.28, "grad_norm": 1.6954566451504058, "learning_rate": 8.442635783156365e-06, "loss": 0.8486, "step": 2610 }, { "epoch": 0.28, "grad_norm": 1.7981409186172237, "learning_rate": 8.441372929728955e-06, "loss": 0.8815, "step": 2611 }, { "epoch": 0.28, "grad_norm": 1.0482956565179307, "learning_rate": 8.44010965902321e-06, "loss": 1.0879, "step": 2612 }, { "epoch": 0.28, "grad_norm": 1.7584222387367217, "learning_rate": 8.438845971192313e-06, "loss": 0.937, "step": 2613 }, { "epoch": 0.28, "grad_norm": 1.9447779892074077, "learning_rate": 8.437581866389483e-06, "loss": 0.9686, "step": 2614 }, { "epoch": 0.28, "grad_norm": 1.715258059400726, "learning_rate": 8.436317344768004e-06, "loss": 0.9098, "step": 2615 }, { "epoch": 0.28, "grad_norm": 1.6744470877283355, "learning_rate": 8.435052406481196e-06, "loss": 0.8853, "step": 2616 }, { "epoch": 0.28, "grad_norm": 1.9504950314102734, "learning_rate": 8.433787051682444e-06, "loss": 0.9599, "step": 2617 }, { "epoch": 0.28, "grad_norm": 1.6897174407134756, "learning_rate": 8.432521280525174e-06, "loss": 0.9069, "step": 2618 }, { "epoch": 0.28, "grad_norm": 1.6534023091249135, "learning_rate": 8.431255093162864e-06, "loss": 0.9883, "step": 2619 }, { "epoch": 0.28, "grad_norm": 1.6663760692599767, "learning_rate": 8.429988489749045e-06, "loss": 0.9209, "step": 2620 }, { "epoch": 0.28, "grad_norm": 1.8502706037166818, "learning_rate": 8.428721470437296e-06, "loss": 0.9751, "step": 2621 }, { "epoch": 0.28, "grad_norm": 1.855344492547839, "learning_rate": 8.42745403538125e-06, "loss": 0.8843, "step": 2622 }, { "epoch": 0.28, "grad_norm": 1.1316661399333625, "learning_rate": 8.426186184734585e-06, "loss": 1.0403, "step": 2623 }, { "epoch": 0.28, "grad_norm": 1.8005642837293718, "learning_rate": 8.424917918651032e-06, "loss": 0.8829, "step": 2624 }, { "epoch": 0.28, "grad_norm": 1.8743843790242753, "learning_rate": 8.423649237284375e-06, "loss": 0.9562, "step": 2625 }, { "epoch": 0.28, "grad_norm": 1.797817086744795, "learning_rate": 8.422380140788446e-06, "loss": 0.9994, "step": 2626 }, { "epoch": 0.28, "grad_norm": 1.7396316483399437, "learning_rate": 8.421110629317123e-06, "loss": 0.9243, "step": 2627 }, { "epoch": 0.28, "grad_norm": 1.8260718730516479, "learning_rate": 8.419840703024344e-06, "loss": 0.918, "step": 2628 }, { "epoch": 0.28, "grad_norm": 1.7147826556676402, "learning_rate": 8.418570362064092e-06, "loss": 1.0168, "step": 2629 }, { "epoch": 0.28, "grad_norm": 1.7155964979321885, "learning_rate": 8.417299606590394e-06, "loss": 0.9469, "step": 2630 }, { "epoch": 0.28, "grad_norm": 1.9421527119116255, "learning_rate": 8.41602843675734e-06, "loss": 0.8977, "step": 2631 }, { "epoch": 0.28, "grad_norm": 1.6452464874127608, "learning_rate": 8.41475685271906e-06, "loss": 0.9006, "step": 2632 }, { "epoch": 0.28, "grad_norm": 1.7015816933350523, "learning_rate": 8.413484854629738e-06, "loss": 0.9426, "step": 2633 }, { "epoch": 0.28, "grad_norm": 1.8851094271960538, "learning_rate": 8.412212442643611e-06, "loss": 0.8894, "step": 2634 }, { "epoch": 0.28, "grad_norm": 1.634158953089184, "learning_rate": 8.41093961691496e-06, "loss": 0.9167, "step": 2635 }, { "epoch": 0.28, "grad_norm": 1.7876957785658818, "learning_rate": 8.409666377598125e-06, "loss": 0.7817, "step": 2636 }, { "epoch": 0.28, "grad_norm": 1.849030635849866, "learning_rate": 8.408392724847483e-06, "loss": 0.9658, "step": 2637 }, { "epoch": 0.28, "grad_norm": 1.8439699668543514, "learning_rate": 8.407118658817475e-06, "loss": 1.0209, "step": 2638 }, { "epoch": 0.28, "grad_norm": 1.7982761649350658, "learning_rate": 8.405844179662582e-06, "loss": 1.0246, "step": 2639 }, { "epoch": 0.28, "grad_norm": 1.695499513511908, "learning_rate": 8.40456928753734e-06, "loss": 0.9044, "step": 2640 }, { "epoch": 0.28, "grad_norm": 1.1288580516136915, "learning_rate": 8.403293982596336e-06, "loss": 1.0903, "step": 2641 }, { "epoch": 0.28, "grad_norm": 1.835467699708121, "learning_rate": 8.402018264994203e-06, "loss": 0.9588, "step": 2642 }, { "epoch": 0.28, "grad_norm": 1.7370235041992217, "learning_rate": 8.400742134885627e-06, "loss": 0.8996, "step": 2643 }, { "epoch": 0.28, "grad_norm": 1.6527946909522542, "learning_rate": 8.399465592425342e-06, "loss": 0.9015, "step": 2644 }, { "epoch": 0.28, "grad_norm": 1.6916102380545264, "learning_rate": 8.398188637768136e-06, "loss": 0.8555, "step": 2645 }, { "epoch": 0.28, "grad_norm": 1.709860675499942, "learning_rate": 8.396911271068842e-06, "loss": 0.912, "step": 2646 }, { "epoch": 0.28, "grad_norm": 1.7404770532970635, "learning_rate": 8.395633492482345e-06, "loss": 0.937, "step": 2647 }, { "epoch": 0.28, "grad_norm": 1.859996815597986, "learning_rate": 8.394355302163578e-06, "loss": 0.9668, "step": 2648 }, { "epoch": 0.28, "grad_norm": 1.7359267131818361, "learning_rate": 8.393076700267534e-06, "loss": 0.9, "step": 2649 }, { "epoch": 0.28, "grad_norm": 1.7100969269928719, "learning_rate": 8.391797686949238e-06, "loss": 0.9554, "step": 2650 }, { "epoch": 0.28, "grad_norm": 1.2409941394393897, "learning_rate": 8.390518262363781e-06, "loss": 1.051, "step": 2651 }, { "epoch": 0.29, "grad_norm": 1.743061164349289, "learning_rate": 8.389238426666294e-06, "loss": 0.8602, "step": 2652 }, { "epoch": 0.29, "grad_norm": 1.7255311351340976, "learning_rate": 8.387958180011966e-06, "loss": 0.952, "step": 2653 }, { "epoch": 0.29, "grad_norm": 2.840780779020878, "learning_rate": 8.386677522556026e-06, "loss": 0.9251, "step": 2654 }, { "epoch": 0.29, "grad_norm": 1.818074267855949, "learning_rate": 8.385396454453763e-06, "loss": 0.9531, "step": 2655 }, { "epoch": 0.29, "grad_norm": 1.8241573473407016, "learning_rate": 8.384114975860507e-06, "loss": 0.8904, "step": 2656 }, { "epoch": 0.29, "grad_norm": 1.8143130407628905, "learning_rate": 8.382833086931643e-06, "loss": 0.955, "step": 2657 }, { "epoch": 0.29, "grad_norm": 1.7082477282834667, "learning_rate": 8.381550787822606e-06, "loss": 0.9442, "step": 2658 }, { "epoch": 0.29, "grad_norm": 1.813769931004606, "learning_rate": 8.380268078688877e-06, "loss": 0.9379, "step": 2659 }, { "epoch": 0.29, "grad_norm": 0.9471968912168555, "learning_rate": 8.378984959685992e-06, "loss": 1.0636, "step": 2660 }, { "epoch": 0.29, "grad_norm": 0.8823170383535623, "learning_rate": 8.377701430969529e-06, "loss": 1.0113, "step": 2661 }, { "epoch": 0.29, "grad_norm": 1.782832459748016, "learning_rate": 8.376417492695123e-06, "loss": 0.9564, "step": 2662 }, { "epoch": 0.29, "grad_norm": 1.9258122328249447, "learning_rate": 8.375133145018457e-06, "loss": 0.9549, "step": 2663 }, { "epoch": 0.29, "grad_norm": 1.9066642277122932, "learning_rate": 8.37384838809526e-06, "loss": 0.9075, "step": 2664 }, { "epoch": 0.29, "grad_norm": 1.7743454664416303, "learning_rate": 8.372563222081316e-06, "loss": 0.9824, "step": 2665 }, { "epoch": 0.29, "grad_norm": 1.7473475147276225, "learning_rate": 8.371277647132454e-06, "loss": 0.9312, "step": 2666 }, { "epoch": 0.29, "grad_norm": 1.6788261542554566, "learning_rate": 8.369991663404556e-06, "loss": 0.9184, "step": 2667 }, { "epoch": 0.29, "grad_norm": 1.683602206538167, "learning_rate": 8.368705271053547e-06, "loss": 0.9634, "step": 2668 }, { "epoch": 0.29, "grad_norm": 1.9017150511640872, "learning_rate": 8.367418470235412e-06, "loss": 0.8989, "step": 2669 }, { "epoch": 0.29, "grad_norm": 1.6083489522985317, "learning_rate": 8.366131261106179e-06, "loss": 0.7707, "step": 2670 }, { "epoch": 0.29, "grad_norm": 1.6876181223498112, "learning_rate": 8.364843643821928e-06, "loss": 0.9048, "step": 2671 }, { "epoch": 0.29, "grad_norm": 1.647664941227452, "learning_rate": 8.36355561853878e-06, "loss": 0.9269, "step": 2672 }, { "epoch": 0.29, "grad_norm": 1.7622096331806898, "learning_rate": 8.36226718541292e-06, "loss": 0.9392, "step": 2673 }, { "epoch": 0.29, "grad_norm": 1.665938051692417, "learning_rate": 8.360978344600572e-06, "loss": 0.9564, "step": 2674 }, { "epoch": 0.29, "grad_norm": 1.7172261005916067, "learning_rate": 8.35968909625801e-06, "loss": 0.9068, "step": 2675 }, { "epoch": 0.29, "grad_norm": 1.678394513621128, "learning_rate": 8.358399440541567e-06, "loss": 0.8992, "step": 2676 }, { "epoch": 0.29, "grad_norm": 1.3715419765077594, "learning_rate": 8.357109377607611e-06, "loss": 1.0465, "step": 2677 }, { "epoch": 0.29, "grad_norm": 1.7960200173193683, "learning_rate": 8.355818907612569e-06, "loss": 0.9591, "step": 2678 }, { "epoch": 0.29, "grad_norm": 1.763986996707925, "learning_rate": 8.354528030712916e-06, "loss": 0.9313, "step": 2679 }, { "epoch": 0.29, "grad_norm": 1.8399837515510276, "learning_rate": 8.353236747065175e-06, "loss": 0.9687, "step": 2680 }, { "epoch": 0.29, "grad_norm": 1.7314591098805385, "learning_rate": 8.351945056825918e-06, "loss": 0.9257, "step": 2681 }, { "epoch": 0.29, "grad_norm": 1.6782753335080047, "learning_rate": 8.350652960151766e-06, "loss": 0.9402, "step": 2682 }, { "epoch": 0.29, "grad_norm": 1.746513737952812, "learning_rate": 8.349360457199392e-06, "loss": 0.8909, "step": 2683 }, { "epoch": 0.29, "grad_norm": 1.8061205911221019, "learning_rate": 8.348067548125515e-06, "loss": 0.9114, "step": 2684 }, { "epoch": 0.29, "grad_norm": 1.7596610277221119, "learning_rate": 8.346774233086906e-06, "loss": 0.9239, "step": 2685 }, { "epoch": 0.29, "grad_norm": 1.5327229850771662, "learning_rate": 8.34548051224038e-06, "loss": 0.8992, "step": 2686 }, { "epoch": 0.29, "grad_norm": 1.6502873466957853, "learning_rate": 8.344186385742811e-06, "loss": 0.9044, "step": 2687 }, { "epoch": 0.29, "grad_norm": 1.9395286084165593, "learning_rate": 8.342891853751114e-06, "loss": 0.9138, "step": 2688 }, { "epoch": 0.29, "grad_norm": 1.7330353443147073, "learning_rate": 8.341596916422255e-06, "loss": 0.9409, "step": 2689 }, { "epoch": 0.29, "grad_norm": 1.6875653507488733, "learning_rate": 8.34030157391325e-06, "loss": 0.922, "step": 2690 }, { "epoch": 0.29, "grad_norm": 1.1327084874262654, "learning_rate": 8.33900582638116e-06, "loss": 1.0953, "step": 2691 }, { "epoch": 0.29, "grad_norm": 1.7702519974024964, "learning_rate": 8.337709673983106e-06, "loss": 0.8898, "step": 2692 }, { "epoch": 0.29, "grad_norm": 1.7994040928248924, "learning_rate": 8.336413116876245e-06, "loss": 0.8392, "step": 2693 }, { "epoch": 0.29, "grad_norm": 1.912264281259712, "learning_rate": 8.335116155217793e-06, "loss": 0.9281, "step": 2694 }, { "epoch": 0.29, "grad_norm": 1.7735028590372974, "learning_rate": 8.333818789165009e-06, "loss": 0.9877, "step": 2695 }, { "epoch": 0.29, "grad_norm": 1.7630128282071538, "learning_rate": 8.332521018875202e-06, "loss": 1.0353, "step": 2696 }, { "epoch": 0.29, "grad_norm": 2.2983693167968013, "learning_rate": 8.331222844505732e-06, "loss": 0.8948, "step": 2697 }, { "epoch": 0.29, "grad_norm": 1.7637670354178085, "learning_rate": 8.32992426621401e-06, "loss": 0.9057, "step": 2698 }, { "epoch": 0.29, "grad_norm": 1.7155931075709128, "learning_rate": 8.32862528415749e-06, "loss": 0.9213, "step": 2699 }, { "epoch": 0.29, "grad_norm": 1.7810209448407306, "learning_rate": 8.327325898493677e-06, "loss": 0.9166, "step": 2700 }, { "epoch": 0.29, "grad_norm": 0.8805425341692593, "learning_rate": 8.32602610938013e-06, "loss": 1.0723, "step": 2701 }, { "epoch": 0.29, "grad_norm": 1.7871306816141626, "learning_rate": 8.32472591697445e-06, "loss": 0.8776, "step": 2702 }, { "epoch": 0.29, "grad_norm": 1.7662755858184496, "learning_rate": 8.323425321434291e-06, "loss": 0.9523, "step": 2703 }, { "epoch": 0.29, "grad_norm": 1.7951216627466384, "learning_rate": 8.322124322917354e-06, "loss": 0.8704, "step": 2704 }, { "epoch": 0.29, "grad_norm": 1.7425694502880882, "learning_rate": 8.320822921581388e-06, "loss": 0.9157, "step": 2705 }, { "epoch": 0.29, "grad_norm": 1.7295089647466184, "learning_rate": 8.319521117584195e-06, "loss": 0.8467, "step": 2706 }, { "epoch": 0.29, "grad_norm": 1.6785852328999458, "learning_rate": 8.318218911083622e-06, "loss": 0.8991, "step": 2707 }, { "epoch": 0.29, "grad_norm": 1.985076371177191, "learning_rate": 8.316916302237568e-06, "loss": 0.9487, "step": 2708 }, { "epoch": 0.29, "grad_norm": 0.8982798217976038, "learning_rate": 8.315613291203977e-06, "loss": 1.0459, "step": 2709 }, { "epoch": 0.29, "grad_norm": 1.7353343579690588, "learning_rate": 8.314309878140843e-06, "loss": 0.9358, "step": 2710 }, { "epoch": 0.29, "grad_norm": 1.7953583492918699, "learning_rate": 8.31300606320621e-06, "loss": 0.9766, "step": 2711 }, { "epoch": 0.29, "grad_norm": 1.718497371982298, "learning_rate": 8.311701846558172e-06, "loss": 0.8877, "step": 2712 }, { "epoch": 0.29, "grad_norm": 0.8574492711897579, "learning_rate": 8.310397228354864e-06, "loss": 1.0984, "step": 2713 }, { "epoch": 0.29, "grad_norm": 1.6380167005396855, "learning_rate": 8.309092208754483e-06, "loss": 0.9213, "step": 2714 }, { "epoch": 0.29, "grad_norm": 1.782591668735152, "learning_rate": 8.307786787915262e-06, "loss": 0.8366, "step": 2715 }, { "epoch": 0.29, "grad_norm": 1.6679433014847516, "learning_rate": 8.306480965995488e-06, "loss": 0.9905, "step": 2716 }, { "epoch": 0.29, "grad_norm": 1.6163176954143799, "learning_rate": 8.3051747431535e-06, "loss": 0.9058, "step": 2717 }, { "epoch": 0.29, "grad_norm": 1.7018227590720274, "learning_rate": 8.30386811954768e-06, "loss": 0.8824, "step": 2718 }, { "epoch": 0.29, "grad_norm": 1.6957234767748872, "learning_rate": 8.30256109533646e-06, "loss": 0.8786, "step": 2719 }, { "epoch": 0.29, "grad_norm": 1.8324622108291506, "learning_rate": 8.30125367067832e-06, "loss": 1.0019, "step": 2720 }, { "epoch": 0.29, "grad_norm": 1.7154783053720553, "learning_rate": 8.29994584573179e-06, "loss": 0.9289, "step": 2721 }, { "epoch": 0.29, "grad_norm": 1.6988482463980301, "learning_rate": 8.298637620655454e-06, "loss": 0.9085, "step": 2722 }, { "epoch": 0.29, "grad_norm": 1.7942469465855428, "learning_rate": 8.297328995607931e-06, "loss": 0.9633, "step": 2723 }, { "epoch": 0.29, "grad_norm": 1.6785819255676184, "learning_rate": 8.296019970747901e-06, "loss": 0.9594, "step": 2724 }, { "epoch": 0.29, "grad_norm": 1.746380807543437, "learning_rate": 8.294710546234086e-06, "loss": 0.8939, "step": 2725 }, { "epoch": 0.29, "grad_norm": 1.6461634906873621, "learning_rate": 8.29340072222526e-06, "loss": 0.8934, "step": 2726 }, { "epoch": 0.29, "grad_norm": 0.929523603310582, "learning_rate": 8.292090498880242e-06, "loss": 1.0736, "step": 2727 }, { "epoch": 0.29, "grad_norm": 1.7228344104052555, "learning_rate": 8.2907798763579e-06, "loss": 0.8662, "step": 2728 }, { "epoch": 0.29, "grad_norm": 2.203611096269428, "learning_rate": 8.289468854817153e-06, "loss": 0.9843, "step": 2729 }, { "epoch": 0.29, "grad_norm": 0.8476155680000359, "learning_rate": 8.288157434416966e-06, "loss": 1.1097, "step": 2730 }, { "epoch": 0.29, "grad_norm": 2.092711603233642, "learning_rate": 8.286845615316357e-06, "loss": 0.8866, "step": 2731 }, { "epoch": 0.29, "grad_norm": 1.631884870900226, "learning_rate": 8.285533397674384e-06, "loss": 0.8052, "step": 2732 }, { "epoch": 0.29, "grad_norm": 1.808410889766534, "learning_rate": 8.284220781650158e-06, "loss": 0.9161, "step": 2733 }, { "epoch": 0.29, "grad_norm": 1.6264433908190032, "learning_rate": 8.282907767402842e-06, "loss": 0.9334, "step": 2734 }, { "epoch": 0.29, "grad_norm": 1.7557369368203102, "learning_rate": 8.281594355091642e-06, "loss": 0.9406, "step": 2735 }, { "epoch": 0.29, "grad_norm": 0.8772571145021079, "learning_rate": 8.280280544875812e-06, "loss": 1.06, "step": 2736 }, { "epoch": 0.29, "grad_norm": 1.7095666854816451, "learning_rate": 8.278966336914656e-06, "loss": 0.933, "step": 2737 }, { "epoch": 0.29, "grad_norm": 1.7816063460096223, "learning_rate": 8.277651731367528e-06, "loss": 0.8956, "step": 2738 }, { "epoch": 0.29, "grad_norm": 1.788961895320356, "learning_rate": 8.276336728393827e-06, "loss": 0.931, "step": 2739 }, { "epoch": 0.29, "grad_norm": 1.7373480983017346, "learning_rate": 8.275021328153005e-06, "loss": 0.978, "step": 2740 }, { "epoch": 0.29, "grad_norm": 1.7042862134644112, "learning_rate": 8.273705530804554e-06, "loss": 1.0133, "step": 2741 }, { "epoch": 0.29, "grad_norm": 1.899507680529035, "learning_rate": 8.272389336508022e-06, "loss": 0.9805, "step": 2742 }, { "epoch": 0.29, "grad_norm": 1.8309833873559225, "learning_rate": 8.271072745423001e-06, "loss": 0.9816, "step": 2743 }, { "epoch": 0.29, "grad_norm": 1.699465801321826, "learning_rate": 8.269755757709132e-06, "loss": 0.9308, "step": 2744 }, { "epoch": 0.3, "grad_norm": 0.9192797505869506, "learning_rate": 8.268438373526106e-06, "loss": 1.046, "step": 2745 }, { "epoch": 0.3, "grad_norm": 1.6999154119244861, "learning_rate": 8.267120593033659e-06, "loss": 0.891, "step": 2746 }, { "epoch": 0.3, "grad_norm": 1.8664738352806673, "learning_rate": 8.265802416391577e-06, "loss": 0.8752, "step": 2747 }, { "epoch": 0.3, "grad_norm": 1.7808979863105727, "learning_rate": 8.264483843759692e-06, "loss": 0.9879, "step": 2748 }, { "epoch": 0.3, "grad_norm": 0.8965778774583174, "learning_rate": 8.263164875297888e-06, "loss": 1.0868, "step": 2749 }, { "epoch": 0.3, "grad_norm": 1.6631241854303789, "learning_rate": 8.261845511166092e-06, "loss": 0.9544, "step": 2750 }, { "epoch": 0.3, "grad_norm": 1.8049196326966428, "learning_rate": 8.260525751524282e-06, "loss": 0.9211, "step": 2751 }, { "epoch": 0.3, "grad_norm": 1.7160576619567764, "learning_rate": 8.259205596532485e-06, "loss": 0.9124, "step": 2752 }, { "epoch": 0.3, "grad_norm": 1.686019164171783, "learning_rate": 8.257885046350773e-06, "loss": 0.9157, "step": 2753 }, { "epoch": 0.3, "grad_norm": 1.7221077898374724, "learning_rate": 8.256564101139266e-06, "loss": 0.8766, "step": 2754 }, { "epoch": 0.3, "grad_norm": 1.8231759610097789, "learning_rate": 8.255242761058136e-06, "loss": 0.957, "step": 2755 }, { "epoch": 0.3, "grad_norm": 1.759682193603437, "learning_rate": 8.2539210262676e-06, "loss": 0.945, "step": 2756 }, { "epoch": 0.3, "grad_norm": 1.8793164505391022, "learning_rate": 8.252598896927918e-06, "loss": 0.952, "step": 2757 }, { "epoch": 0.3, "grad_norm": 1.822262619268538, "learning_rate": 8.25127637319941e-06, "loss": 0.9576, "step": 2758 }, { "epoch": 0.3, "grad_norm": 1.7788753680637348, "learning_rate": 8.24995345524243e-06, "loss": 0.9522, "step": 2759 }, { "epoch": 0.3, "grad_norm": 1.6838462749940153, "learning_rate": 8.248630143217391e-06, "loss": 0.9646, "step": 2760 }, { "epoch": 0.3, "grad_norm": 1.7959803781454455, "learning_rate": 8.247306437284748e-06, "loss": 1.0449, "step": 2761 }, { "epoch": 0.3, "grad_norm": 0.880919203775911, "learning_rate": 8.245982337605003e-06, "loss": 1.0622, "step": 2762 }, { "epoch": 0.3, "grad_norm": 1.622618891982184, "learning_rate": 8.244657844338708e-06, "loss": 0.8846, "step": 2763 }, { "epoch": 0.3, "grad_norm": 1.7643856711592325, "learning_rate": 8.243332957646465e-06, "loss": 0.8855, "step": 2764 }, { "epoch": 0.3, "grad_norm": 1.9391942248676322, "learning_rate": 8.242007677688919e-06, "loss": 0.931, "step": 2765 }, { "epoch": 0.3, "grad_norm": 1.7505159058787862, "learning_rate": 8.240682004626765e-06, "loss": 0.9939, "step": 2766 }, { "epoch": 0.3, "grad_norm": 1.7870261653742308, "learning_rate": 8.239355938620746e-06, "loss": 0.945, "step": 2767 }, { "epoch": 0.3, "grad_norm": 1.7457823445503122, "learning_rate": 8.238029479831651e-06, "loss": 0.9106, "step": 2768 }, { "epoch": 0.3, "grad_norm": 1.6643852951084417, "learning_rate": 8.236702628420319e-06, "loss": 0.9082, "step": 2769 }, { "epoch": 0.3, "grad_norm": 1.7248900881640263, "learning_rate": 8.235375384547634e-06, "loss": 0.9332, "step": 2770 }, { "epoch": 0.3, "grad_norm": 1.6714804928938063, "learning_rate": 8.23404774837453e-06, "loss": 0.8496, "step": 2771 }, { "epoch": 0.3, "grad_norm": 1.751678055902103, "learning_rate": 8.232719720061989e-06, "loss": 0.8999, "step": 2772 }, { "epoch": 0.3, "grad_norm": 0.9305454620126178, "learning_rate": 8.231391299771034e-06, "loss": 1.0055, "step": 2773 }, { "epoch": 0.3, "grad_norm": 1.7260134700929628, "learning_rate": 8.230062487662745e-06, "loss": 0.9145, "step": 2774 }, { "epoch": 0.3, "grad_norm": 1.677395840109699, "learning_rate": 8.228733283898244e-06, "loss": 0.8984, "step": 2775 }, { "epoch": 0.3, "grad_norm": 1.7529293727432356, "learning_rate": 8.2274036886387e-06, "loss": 0.9731, "step": 2776 }, { "epoch": 0.3, "grad_norm": 1.6944433358630042, "learning_rate": 8.226073702045334e-06, "loss": 0.8633, "step": 2777 }, { "epoch": 0.3, "grad_norm": 0.8480141334615585, "learning_rate": 8.224743324279407e-06, "loss": 1.0811, "step": 2778 }, { "epoch": 0.3, "grad_norm": 1.7254054131813572, "learning_rate": 8.223412555502238e-06, "loss": 0.9959, "step": 2779 }, { "epoch": 0.3, "grad_norm": 1.7424225148224677, "learning_rate": 8.222081395875182e-06, "loss": 0.8616, "step": 2780 }, { "epoch": 0.3, "grad_norm": 1.8218760512356698, "learning_rate": 8.220749845559648e-06, "loss": 0.9797, "step": 2781 }, { "epoch": 0.3, "grad_norm": 1.6962925290936426, "learning_rate": 8.219417904717091e-06, "loss": 0.9344, "step": 2782 }, { "epoch": 0.3, "grad_norm": 1.6802446339008914, "learning_rate": 8.218085573509016e-06, "loss": 1.0103, "step": 2783 }, { "epoch": 0.3, "grad_norm": 1.720263468485671, "learning_rate": 8.21675285209697e-06, "loss": 0.91, "step": 2784 }, { "epoch": 0.3, "grad_norm": 1.8081122650441672, "learning_rate": 8.21541974064255e-06, "loss": 0.8891, "step": 2785 }, { "epoch": 0.3, "grad_norm": 1.6801263846602599, "learning_rate": 8.214086239307402e-06, "loss": 0.9617, "step": 2786 }, { "epoch": 0.3, "grad_norm": 1.7513200525295334, "learning_rate": 8.212752348253216e-06, "loss": 0.9889, "step": 2787 }, { "epoch": 0.3, "grad_norm": 1.6632321991080574, "learning_rate": 8.211418067641734e-06, "loss": 0.9137, "step": 2788 }, { "epoch": 0.3, "grad_norm": 1.6936958150935835, "learning_rate": 8.210083397634738e-06, "loss": 0.9442, "step": 2789 }, { "epoch": 0.3, "grad_norm": 1.632199028005076, "learning_rate": 8.208748338394063e-06, "loss": 0.8623, "step": 2790 }, { "epoch": 0.3, "grad_norm": 1.8791645381117066, "learning_rate": 8.20741289008159e-06, "loss": 0.9696, "step": 2791 }, { "epoch": 0.3, "grad_norm": 1.7403213382327436, "learning_rate": 8.206077052859247e-06, "loss": 0.9606, "step": 2792 }, { "epoch": 0.3, "grad_norm": 1.700611017803361, "learning_rate": 8.204740826889009e-06, "loss": 0.9472, "step": 2793 }, { "epoch": 0.3, "grad_norm": 1.6728539355543606, "learning_rate": 8.203404212332897e-06, "loss": 0.9327, "step": 2794 }, { "epoch": 0.3, "grad_norm": 0.9094569990041427, "learning_rate": 8.20206720935298e-06, "loss": 1.0646, "step": 2795 }, { "epoch": 0.3, "grad_norm": 1.7263232240622515, "learning_rate": 8.200729818111372e-06, "loss": 0.8977, "step": 2796 }, { "epoch": 0.3, "grad_norm": 1.7276002853362364, "learning_rate": 8.199392038770243e-06, "loss": 0.9594, "step": 2797 }, { "epoch": 0.3, "grad_norm": 1.8271428048832092, "learning_rate": 8.198053871491798e-06, "loss": 0.8812, "step": 2798 }, { "epoch": 0.3, "grad_norm": 1.8169464200185546, "learning_rate": 8.196715316438295e-06, "loss": 0.8627, "step": 2799 }, { "epoch": 0.3, "grad_norm": 1.7537746440979876, "learning_rate": 8.19537637377204e-06, "loss": 0.948, "step": 2800 }, { "epoch": 0.3, "grad_norm": 1.7916949844352335, "learning_rate": 8.194037043655383e-06, "loss": 0.9641, "step": 2801 }, { "epoch": 0.3, "grad_norm": 1.7616545752731128, "learning_rate": 8.192697326250722e-06, "loss": 0.9124, "step": 2802 }, { "epoch": 0.3, "grad_norm": 1.6936851652706455, "learning_rate": 8.191357221720506e-06, "loss": 0.9201, "step": 2803 }, { "epoch": 0.3, "grad_norm": 1.9099671891053467, "learning_rate": 8.190016730227224e-06, "loss": 0.9585, "step": 2804 }, { "epoch": 0.3, "grad_norm": 1.7850895794432966, "learning_rate": 8.188675851933414e-06, "loss": 0.9881, "step": 2805 }, { "epoch": 0.3, "grad_norm": 1.8391320391949635, "learning_rate": 8.187334587001664e-06, "loss": 0.955, "step": 2806 }, { "epoch": 0.3, "grad_norm": 1.023557484890109, "learning_rate": 8.185992935594608e-06, "loss": 1.0914, "step": 2807 }, { "epoch": 0.3, "grad_norm": 1.8077328220047488, "learning_rate": 8.184650897874924e-06, "loss": 0.961, "step": 2808 }, { "epoch": 0.3, "grad_norm": 1.7982462090793578, "learning_rate": 8.18330847400534e-06, "loss": 0.9113, "step": 2809 }, { "epoch": 0.3, "grad_norm": 1.7523813734378182, "learning_rate": 8.181965664148629e-06, "loss": 0.9471, "step": 2810 }, { "epoch": 0.3, "grad_norm": 1.7926691801718706, "learning_rate": 8.180622468467612e-06, "loss": 0.8779, "step": 2811 }, { "epoch": 0.3, "grad_norm": 1.7603223730040563, "learning_rate": 8.179278887125152e-06, "loss": 0.9832, "step": 2812 }, { "epoch": 0.3, "grad_norm": 1.0126722844195852, "learning_rate": 8.17793492028417e-06, "loss": 1.0702, "step": 2813 }, { "epoch": 0.3, "grad_norm": 1.743455940782216, "learning_rate": 8.176590568107623e-06, "loss": 0.8831, "step": 2814 }, { "epoch": 0.3, "grad_norm": 1.7810972964787686, "learning_rate": 8.175245830758516e-06, "loss": 0.9399, "step": 2815 }, { "epoch": 0.3, "grad_norm": 1.6576693510758571, "learning_rate": 8.173900708399907e-06, "loss": 0.8413, "step": 2816 }, { "epoch": 0.3, "grad_norm": 1.6759733707638447, "learning_rate": 8.172555201194895e-06, "loss": 0.8622, "step": 2817 }, { "epoch": 0.3, "grad_norm": 1.7167791207919927, "learning_rate": 8.171209309306626e-06, "loss": 0.853, "step": 2818 }, { "epoch": 0.3, "grad_norm": 1.6837904562189485, "learning_rate": 8.169863032898297e-06, "loss": 0.8865, "step": 2819 }, { "epoch": 0.3, "grad_norm": 1.8210809224454656, "learning_rate": 8.168516372133145e-06, "loss": 0.929, "step": 2820 }, { "epoch": 0.3, "grad_norm": 1.7329153160651818, "learning_rate": 8.16716932717446e-06, "loss": 0.9093, "step": 2821 }, { "epoch": 0.3, "grad_norm": 1.8513229929891395, "learning_rate": 8.165821898185577e-06, "loss": 0.9891, "step": 2822 }, { "epoch": 0.3, "grad_norm": 1.7944468354068013, "learning_rate": 8.164474085329872e-06, "loss": 0.8756, "step": 2823 }, { "epoch": 0.3, "grad_norm": 0.956797991942685, "learning_rate": 8.163125888770777e-06, "loss": 1.0806, "step": 2824 }, { "epoch": 0.3, "grad_norm": 1.7871940791188323, "learning_rate": 8.161777308671763e-06, "loss": 0.9273, "step": 2825 }, { "epoch": 0.3, "grad_norm": 1.7556344359549003, "learning_rate": 8.160428345196347e-06, "loss": 0.9543, "step": 2826 }, { "epoch": 0.3, "grad_norm": 1.8228769957219948, "learning_rate": 8.159078998508101e-06, "loss": 0.8812, "step": 2827 }, { "epoch": 0.3, "grad_norm": 1.740974131494849, "learning_rate": 8.157729268770636e-06, "loss": 0.9684, "step": 2828 }, { "epoch": 0.3, "grad_norm": 1.6956669865645169, "learning_rate": 8.156379156147608e-06, "loss": 0.9403, "step": 2829 }, { "epoch": 0.3, "grad_norm": 1.7755018651352146, "learning_rate": 8.155028660802729e-06, "loss": 0.9671, "step": 2830 }, { "epoch": 0.3, "grad_norm": 1.7838456917116254, "learning_rate": 8.153677782899744e-06, "loss": 0.9101, "step": 2831 }, { "epoch": 0.3, "grad_norm": 1.6607161078193295, "learning_rate": 8.152326522602458e-06, "loss": 0.9342, "step": 2832 }, { "epoch": 0.3, "grad_norm": 0.9845393792026735, "learning_rate": 8.150974880074714e-06, "loss": 1.0851, "step": 2833 }, { "epoch": 0.3, "grad_norm": 1.7426784699647768, "learning_rate": 8.149622855480401e-06, "loss": 0.8844, "step": 2834 }, { "epoch": 0.3, "grad_norm": 1.7423948808625718, "learning_rate": 8.14827044898346e-06, "loss": 0.9315, "step": 2835 }, { "epoch": 0.3, "grad_norm": 1.676680525491925, "learning_rate": 8.146917660747872e-06, "loss": 0.9833, "step": 2836 }, { "epoch": 0.3, "grad_norm": 1.7294639602853639, "learning_rate": 8.145564490937669e-06, "loss": 0.8541, "step": 2837 }, { "epoch": 0.31, "grad_norm": 1.7865593454650484, "learning_rate": 8.144210939716929e-06, "loss": 0.9438, "step": 2838 }, { "epoch": 0.31, "grad_norm": 1.601630273824032, "learning_rate": 8.142857007249769e-06, "loss": 0.8159, "step": 2839 }, { "epoch": 0.31, "grad_norm": 1.7672434005824056, "learning_rate": 8.141502693700364e-06, "loss": 0.9836, "step": 2840 }, { "epoch": 0.31, "grad_norm": 1.6876311468679848, "learning_rate": 8.140147999232925e-06, "loss": 0.8766, "step": 2841 }, { "epoch": 0.31, "grad_norm": 1.675170392806443, "learning_rate": 8.138792924011719e-06, "loss": 0.9813, "step": 2842 }, { "epoch": 0.31, "grad_norm": 1.6803400543037053, "learning_rate": 8.137437468201047e-06, "loss": 0.9217, "step": 2843 }, { "epoch": 0.31, "grad_norm": 0.9569786048851022, "learning_rate": 8.136081631965268e-06, "loss": 1.0825, "step": 2844 }, { "epoch": 0.31, "grad_norm": 1.8166448450707677, "learning_rate": 8.134725415468777e-06, "loss": 0.8285, "step": 2845 }, { "epoch": 0.31, "grad_norm": 1.7387706074150941, "learning_rate": 8.133368818876024e-06, "loss": 0.8705, "step": 2846 }, { "epoch": 0.31, "grad_norm": 0.85656786405602, "learning_rate": 8.132011842351497e-06, "loss": 1.0582, "step": 2847 }, { "epoch": 0.31, "grad_norm": 1.6475130628248664, "learning_rate": 8.130654486059739e-06, "loss": 0.9116, "step": 2848 }, { "epoch": 0.31, "grad_norm": 1.8119435398900445, "learning_rate": 8.12929675016533e-06, "loss": 0.9517, "step": 2849 }, { "epoch": 0.31, "grad_norm": 1.6826668565577119, "learning_rate": 8.127938634832901e-06, "loss": 0.9666, "step": 2850 }, { "epoch": 0.31, "grad_norm": 1.729977879929465, "learning_rate": 8.12658014022713e-06, "loss": 0.9478, "step": 2851 }, { "epoch": 0.31, "grad_norm": 1.7033297083711196, "learning_rate": 8.125221266512739e-06, "loss": 0.918, "step": 2852 }, { "epoch": 0.31, "grad_norm": 1.7893971628280614, "learning_rate": 8.123862013854496e-06, "loss": 0.949, "step": 2853 }, { "epoch": 0.31, "grad_norm": 1.7702862410351738, "learning_rate": 8.122502382417211e-06, "loss": 0.8918, "step": 2854 }, { "epoch": 0.31, "grad_norm": 1.8759427568499567, "learning_rate": 8.121142372365749e-06, "loss": 0.9856, "step": 2855 }, { "epoch": 0.31, "grad_norm": 1.765828197522128, "learning_rate": 8.119781983865014e-06, "loss": 0.9213, "step": 2856 }, { "epoch": 0.31, "grad_norm": 1.846691765707053, "learning_rate": 8.118421217079958e-06, "loss": 0.9246, "step": 2857 }, { "epoch": 0.31, "grad_norm": 1.7082448168913487, "learning_rate": 8.117060072175577e-06, "loss": 0.9159, "step": 2858 }, { "epoch": 0.31, "grad_norm": 0.9814860035968066, "learning_rate": 8.11569854931692e-06, "loss": 1.0534, "step": 2859 }, { "epoch": 0.31, "grad_norm": 1.638293813068346, "learning_rate": 8.114336648669069e-06, "loss": 0.9557, "step": 2860 }, { "epoch": 0.31, "grad_norm": 1.8078328195382445, "learning_rate": 8.112974370397163e-06, "loss": 1.0433, "step": 2861 }, { "epoch": 0.31, "grad_norm": 1.75571520895568, "learning_rate": 8.111611714666382e-06, "loss": 0.8815, "step": 2862 }, { "epoch": 0.31, "grad_norm": 1.6332668262539578, "learning_rate": 8.110248681641957e-06, "loss": 0.9062, "step": 2863 }, { "epoch": 0.31, "grad_norm": 1.7128781504148496, "learning_rate": 8.108885271489152e-06, "loss": 0.907, "step": 2864 }, { "epoch": 0.31, "grad_norm": 1.7604821249341827, "learning_rate": 8.107521484373293e-06, "loss": 0.8669, "step": 2865 }, { "epoch": 0.31, "grad_norm": 1.666299475466886, "learning_rate": 8.10615732045974e-06, "loss": 0.8494, "step": 2866 }, { "epoch": 0.31, "grad_norm": 1.66312537039253, "learning_rate": 8.104792779913903e-06, "loss": 0.9181, "step": 2867 }, { "epoch": 0.31, "grad_norm": 1.6796549361441824, "learning_rate": 8.103427862901237e-06, "loss": 0.9127, "step": 2868 }, { "epoch": 0.31, "grad_norm": 0.9634481405300964, "learning_rate": 8.102062569587245e-06, "loss": 1.0632, "step": 2869 }, { "epoch": 0.31, "grad_norm": 1.6989288504714395, "learning_rate": 8.10069690013747e-06, "loss": 0.9578, "step": 2870 }, { "epoch": 0.31, "grad_norm": 1.7012700594309373, "learning_rate": 8.099330854717507e-06, "loss": 0.9138, "step": 2871 }, { "epoch": 0.31, "grad_norm": 0.8266361267117774, "learning_rate": 8.097964433492995e-06, "loss": 1.0602, "step": 2872 }, { "epoch": 0.31, "grad_norm": 1.9204189545693313, "learning_rate": 8.096597636629612e-06, "loss": 0.9602, "step": 2873 }, { "epoch": 0.31, "grad_norm": 1.6538042611961672, "learning_rate": 8.09523046429309e-06, "loss": 0.7947, "step": 2874 }, { "epoch": 0.31, "grad_norm": 0.863480943022291, "learning_rate": 8.093862916649206e-06, "loss": 1.0999, "step": 2875 }, { "epoch": 0.31, "grad_norm": 1.6469037531301154, "learning_rate": 8.092494993863775e-06, "loss": 0.9348, "step": 2876 }, { "epoch": 0.31, "grad_norm": 1.7379967267179726, "learning_rate": 8.091126696102666e-06, "loss": 0.9316, "step": 2877 }, { "epoch": 0.31, "grad_norm": 1.6474141358016106, "learning_rate": 8.089758023531788e-06, "loss": 0.8903, "step": 2878 }, { "epoch": 0.31, "grad_norm": 1.663640849162866, "learning_rate": 8.088388976317096e-06, "loss": 0.9303, "step": 2879 }, { "epoch": 0.31, "grad_norm": 0.8730585986589986, "learning_rate": 8.087019554624595e-06, "loss": 1.0487, "step": 2880 }, { "epoch": 0.31, "grad_norm": 1.8736874861150625, "learning_rate": 8.085649758620331e-06, "loss": 1.0197, "step": 2881 }, { "epoch": 0.31, "grad_norm": 1.711567417552114, "learning_rate": 8.084279588470395e-06, "loss": 0.8856, "step": 2882 }, { "epoch": 0.31, "grad_norm": 1.9085160248260284, "learning_rate": 8.082909044340926e-06, "loss": 0.8919, "step": 2883 }, { "epoch": 0.31, "grad_norm": 1.7127941738661436, "learning_rate": 8.081538126398105e-06, "loss": 0.8509, "step": 2884 }, { "epoch": 0.31, "grad_norm": 1.8730991458744477, "learning_rate": 8.080166834808166e-06, "loss": 0.9342, "step": 2885 }, { "epoch": 0.31, "grad_norm": 1.7544563738481065, "learning_rate": 8.078795169737377e-06, "loss": 0.8991, "step": 2886 }, { "epoch": 0.31, "grad_norm": 1.694982914036666, "learning_rate": 8.077423131352058e-06, "loss": 0.8669, "step": 2887 }, { "epoch": 0.31, "grad_norm": 2.263359502537878, "learning_rate": 8.076050719818577e-06, "loss": 0.9559, "step": 2888 }, { "epoch": 0.31, "grad_norm": 1.7157128411106437, "learning_rate": 8.074677935303343e-06, "loss": 0.9383, "step": 2889 }, { "epoch": 0.31, "grad_norm": 1.6557470052233758, "learning_rate": 8.073304777972806e-06, "loss": 0.9248, "step": 2890 }, { "epoch": 0.31, "grad_norm": 1.7197185685971936, "learning_rate": 8.071931247993472e-06, "loss": 0.8272, "step": 2891 }, { "epoch": 0.31, "grad_norm": 1.763888708393084, "learning_rate": 8.070557345531883e-06, "loss": 0.9762, "step": 2892 }, { "epoch": 0.31, "grad_norm": 1.7765761119169388, "learning_rate": 8.06918307075463e-06, "loss": 0.9896, "step": 2893 }, { "epoch": 0.31, "grad_norm": 1.740050352569229, "learning_rate": 8.067808423828347e-06, "loss": 0.9552, "step": 2894 }, { "epoch": 0.31, "grad_norm": 1.7920915483073587, "learning_rate": 8.066433404919718e-06, "loss": 0.8713, "step": 2895 }, { "epoch": 0.31, "grad_norm": 1.8601944826417283, "learning_rate": 8.065058014195466e-06, "loss": 0.9619, "step": 2896 }, { "epoch": 0.31, "grad_norm": 1.7543588636993248, "learning_rate": 8.063682251822363e-06, "loss": 0.8736, "step": 2897 }, { "epoch": 0.31, "grad_norm": 1.8395533613622328, "learning_rate": 8.062306117967224e-06, "loss": 0.861, "step": 2898 }, { "epoch": 0.31, "grad_norm": 1.8337220037637945, "learning_rate": 8.060929612796914e-06, "loss": 0.8865, "step": 2899 }, { "epoch": 0.31, "grad_norm": 1.7254885376233706, "learning_rate": 8.059552736478333e-06, "loss": 0.9309, "step": 2900 }, { "epoch": 0.31, "grad_norm": 1.6879788062038426, "learning_rate": 8.058175489178437e-06, "loss": 0.931, "step": 2901 }, { "epoch": 0.31, "grad_norm": 1.744595021716238, "learning_rate": 8.056797871064216e-06, "loss": 0.8438, "step": 2902 }, { "epoch": 0.31, "grad_norm": 1.6959573630851648, "learning_rate": 8.05541988230272e-06, "loss": 0.9046, "step": 2903 }, { "epoch": 0.31, "grad_norm": 1.7382972432655535, "learning_rate": 8.054041523061027e-06, "loss": 0.9145, "step": 2904 }, { "epoch": 0.31, "grad_norm": 1.7967584866206283, "learning_rate": 8.05266279350627e-06, "loss": 1.0066, "step": 2905 }, { "epoch": 0.31, "grad_norm": 1.726117619950246, "learning_rate": 8.051283693805624e-06, "loss": 0.8254, "step": 2906 }, { "epoch": 0.31, "grad_norm": 1.6305839145964693, "learning_rate": 8.049904224126312e-06, "loss": 0.8947, "step": 2907 }, { "epoch": 0.31, "grad_norm": 1.736890606645137, "learning_rate": 8.048524384635598e-06, "loss": 0.8799, "step": 2908 }, { "epoch": 0.31, "grad_norm": 1.7348075553905002, "learning_rate": 8.047144175500793e-06, "loss": 0.9313, "step": 2909 }, { "epoch": 0.31, "grad_norm": 1.0197252195445976, "learning_rate": 8.045763596889253e-06, "loss": 1.0553, "step": 2910 }, { "epoch": 0.31, "grad_norm": 0.8911318602830792, "learning_rate": 8.044382648968375e-06, "loss": 1.0697, "step": 2911 }, { "epoch": 0.31, "grad_norm": 1.8648565050352723, "learning_rate": 8.043001331905605e-06, "loss": 0.8657, "step": 2912 }, { "epoch": 0.31, "grad_norm": 1.7451521569135031, "learning_rate": 8.041619645868434e-06, "loss": 0.8611, "step": 2913 }, { "epoch": 0.31, "grad_norm": 1.673333981687425, "learning_rate": 8.040237591024394e-06, "loss": 0.8989, "step": 2914 }, { "epoch": 0.31, "grad_norm": 1.7488349593500943, "learning_rate": 8.038855167541064e-06, "loss": 0.925, "step": 2915 }, { "epoch": 0.31, "grad_norm": 1.6510866101260666, "learning_rate": 8.037472375586068e-06, "loss": 0.874, "step": 2916 }, { "epoch": 0.31, "grad_norm": 1.8626299467868987, "learning_rate": 8.036089215327077e-06, "loss": 1.0116, "step": 2917 }, { "epoch": 0.31, "grad_norm": 1.739515187137894, "learning_rate": 8.0347056869318e-06, "loss": 0.9359, "step": 2918 }, { "epoch": 0.31, "grad_norm": 1.7360371705141284, "learning_rate": 8.033321790567997e-06, "loss": 0.8768, "step": 2919 }, { "epoch": 0.31, "grad_norm": 1.7295647459792023, "learning_rate": 8.03193752640347e-06, "loss": 0.8981, "step": 2920 }, { "epoch": 0.31, "grad_norm": 1.2310059802688398, "learning_rate": 8.030552894606064e-06, "loss": 1.0667, "step": 2921 }, { "epoch": 0.31, "grad_norm": 1.7298853145476654, "learning_rate": 8.029167895343672e-06, "loss": 0.928, "step": 2922 }, { "epoch": 0.31, "grad_norm": 1.6233897064856069, "learning_rate": 8.02778252878423e-06, "loss": 0.8812, "step": 2923 }, { "epoch": 0.31, "grad_norm": 1.9131666267390448, "learning_rate": 8.026396795095717e-06, "loss": 0.9546, "step": 2924 }, { "epoch": 0.31, "grad_norm": 1.730614456620812, "learning_rate": 8.02501069444616e-06, "loss": 0.9281, "step": 2925 }, { "epoch": 0.31, "grad_norm": 1.7764591196803095, "learning_rate": 8.023624227003627e-06, "loss": 0.8615, "step": 2926 }, { "epoch": 0.31, "grad_norm": 1.712387957857228, "learning_rate": 8.022237392936232e-06, "loss": 1.0304, "step": 2927 }, { "epoch": 0.31, "grad_norm": 1.6311130112057033, "learning_rate": 8.020850192412134e-06, "loss": 0.8603, "step": 2928 }, { "epoch": 0.31, "grad_norm": 1.810281519117069, "learning_rate": 8.019462625599537e-06, "loss": 0.9525, "step": 2929 }, { "epoch": 0.31, "grad_norm": 1.6888513375698708, "learning_rate": 8.018074692666686e-06, "loss": 0.9356, "step": 2930 }, { "epoch": 0.32, "grad_norm": 1.7988212060366002, "learning_rate": 8.016686393781874e-06, "loss": 0.9487, "step": 2931 }, { "epoch": 0.32, "grad_norm": 1.6967369267536736, "learning_rate": 8.015297729113436e-06, "loss": 0.9826, "step": 2932 }, { "epoch": 0.32, "grad_norm": 1.6849113499071326, "learning_rate": 8.013908698829754e-06, "loss": 0.8889, "step": 2933 }, { "epoch": 0.32, "grad_norm": 1.7808594686281816, "learning_rate": 8.012519303099251e-06, "loss": 1.0021, "step": 2934 }, { "epoch": 0.32, "grad_norm": 2.089876439198362, "learning_rate": 8.011129542090396e-06, "loss": 0.9069, "step": 2935 }, { "epoch": 0.32, "grad_norm": 1.8380134174696203, "learning_rate": 8.009739415971704e-06, "loss": 0.9077, "step": 2936 }, { "epoch": 0.32, "grad_norm": 1.7667444970933812, "learning_rate": 8.008348924911734e-06, "loss": 0.9719, "step": 2937 }, { "epoch": 0.32, "grad_norm": 1.0309818756778706, "learning_rate": 8.006958069079082e-06, "loss": 1.0557, "step": 2938 }, { "epoch": 0.32, "grad_norm": 0.9260482450569845, "learning_rate": 8.005566848642398e-06, "loss": 1.06, "step": 2939 }, { "epoch": 0.32, "grad_norm": 1.7180721071682272, "learning_rate": 8.004175263770373e-06, "loss": 0.8919, "step": 2940 }, { "epoch": 0.32, "grad_norm": 0.8725319571481909, "learning_rate": 8.00278331463174e-06, "loss": 1.0494, "step": 2941 }, { "epoch": 0.32, "grad_norm": 1.8256862139733991, "learning_rate": 8.001391001395278e-06, "loss": 0.9382, "step": 2942 }, { "epoch": 0.32, "grad_norm": 1.8326475006450833, "learning_rate": 7.99999832422981e-06, "loss": 0.9035, "step": 2943 }, { "epoch": 0.32, "grad_norm": 1.7115561834170654, "learning_rate": 7.998605283304201e-06, "loss": 0.9935, "step": 2944 }, { "epoch": 0.32, "grad_norm": 1.823027935733167, "learning_rate": 7.997211878787366e-06, "loss": 1.0149, "step": 2945 }, { "epoch": 0.32, "grad_norm": 1.85341726597383, "learning_rate": 7.995818110848256e-06, "loss": 0.8772, "step": 2946 }, { "epoch": 0.32, "grad_norm": 1.7159463726841102, "learning_rate": 7.994423979655873e-06, "loss": 0.8606, "step": 2947 }, { "epoch": 0.32, "grad_norm": 1.7203228487758915, "learning_rate": 7.993029485379258e-06, "loss": 0.9061, "step": 2948 }, { "epoch": 0.32, "grad_norm": 1.7505587927625392, "learning_rate": 7.991634628187499e-06, "loss": 0.9761, "step": 2949 }, { "epoch": 0.32, "grad_norm": 1.6611388646421599, "learning_rate": 7.99023940824973e-06, "loss": 0.8543, "step": 2950 }, { "epoch": 0.32, "grad_norm": 1.727963538352937, "learning_rate": 7.98884382573512e-06, "loss": 0.9496, "step": 2951 }, { "epoch": 0.32, "grad_norm": 1.6618549244602812, "learning_rate": 7.987447880812895e-06, "loss": 0.9276, "step": 2952 }, { "epoch": 0.32, "grad_norm": 1.7408533363420053, "learning_rate": 7.986051573652315e-06, "loss": 0.9183, "step": 2953 }, { "epoch": 0.32, "grad_norm": 1.6505203951835345, "learning_rate": 7.984654904422686e-06, "loss": 0.8685, "step": 2954 }, { "epoch": 0.32, "grad_norm": 1.833688639920115, "learning_rate": 7.983257873293362e-06, "loss": 0.8817, "step": 2955 }, { "epoch": 0.32, "grad_norm": 1.755261701533233, "learning_rate": 7.981860480433735e-06, "loss": 0.8418, "step": 2956 }, { "epoch": 0.32, "grad_norm": 1.7657951827138199, "learning_rate": 7.980462726013246e-06, "loss": 0.9233, "step": 2957 }, { "epoch": 0.32, "grad_norm": 1.9123686441348768, "learning_rate": 7.979064610201372e-06, "loss": 0.9375, "step": 2958 }, { "epoch": 0.32, "grad_norm": 1.8504300875096165, "learning_rate": 7.977666133167647e-06, "loss": 1.0314, "step": 2959 }, { "epoch": 0.32, "grad_norm": 1.767709834502355, "learning_rate": 7.976267295081637e-06, "loss": 0.9717, "step": 2960 }, { "epoch": 0.32, "grad_norm": 1.7015052437190794, "learning_rate": 7.974868096112957e-06, "loss": 0.9007, "step": 2961 }, { "epoch": 0.32, "grad_norm": 1.76733128058154, "learning_rate": 7.973468536431266e-06, "loss": 0.8969, "step": 2962 }, { "epoch": 0.32, "grad_norm": 1.7987822606299657, "learning_rate": 7.972068616206261e-06, "loss": 0.8863, "step": 2963 }, { "epoch": 0.32, "grad_norm": 1.6685030764179183, "learning_rate": 7.970668335607692e-06, "loss": 0.9207, "step": 2964 }, { "epoch": 0.32, "grad_norm": 1.3498620727331005, "learning_rate": 7.969267694805345e-06, "loss": 1.0685, "step": 2965 }, { "epoch": 0.32, "grad_norm": 1.8507719264599134, "learning_rate": 7.967866693969053e-06, "loss": 0.9016, "step": 2966 }, { "epoch": 0.32, "grad_norm": 1.6764312063865234, "learning_rate": 7.966465333268693e-06, "loss": 0.9398, "step": 2967 }, { "epoch": 0.32, "grad_norm": 1.9455703125873531, "learning_rate": 7.965063612874184e-06, "loss": 0.8896, "step": 2968 }, { "epoch": 0.32, "grad_norm": 1.7060832490941273, "learning_rate": 7.963661532955492e-06, "loss": 0.891, "step": 2969 }, { "epoch": 0.32, "grad_norm": 1.7374244426753016, "learning_rate": 7.962259093682618e-06, "loss": 0.9264, "step": 2970 }, { "epoch": 0.32, "grad_norm": 1.7637257042302563, "learning_rate": 7.960856295225619e-06, "loss": 0.9111, "step": 2971 }, { "epoch": 0.32, "grad_norm": 1.7471803798653538, "learning_rate": 7.959453137754586e-06, "loss": 0.8759, "step": 2972 }, { "epoch": 0.32, "grad_norm": 1.7099133771672819, "learning_rate": 7.958049621439659e-06, "loss": 0.8675, "step": 2973 }, { "epoch": 0.32, "grad_norm": 1.01931654962637, "learning_rate": 7.956645746451014e-06, "loss": 1.0558, "step": 2974 }, { "epoch": 0.32, "grad_norm": 1.8118374219059752, "learning_rate": 7.95524151295888e-06, "loss": 0.8937, "step": 2975 }, { "epoch": 0.32, "grad_norm": 1.7442891052139102, "learning_rate": 7.953836921133526e-06, "loss": 0.859, "step": 2976 }, { "epoch": 0.32, "grad_norm": 2.7443088721289954, "learning_rate": 7.952431971145261e-06, "loss": 0.8601, "step": 2977 }, { "epoch": 0.32, "grad_norm": 1.8486025808592623, "learning_rate": 7.951026663164441e-06, "loss": 0.9132, "step": 2978 }, { "epoch": 0.32, "grad_norm": 1.7627401199208774, "learning_rate": 7.949620997361465e-06, "loss": 0.9483, "step": 2979 }, { "epoch": 0.32, "grad_norm": 1.6070348988286416, "learning_rate": 7.948214973906775e-06, "loss": 0.807, "step": 2980 }, { "epoch": 0.32, "grad_norm": 1.9076659717284925, "learning_rate": 7.946808592970851e-06, "loss": 0.958, "step": 2981 }, { "epoch": 0.32, "grad_norm": 1.6559070754735314, "learning_rate": 7.945401854724231e-06, "loss": 0.9406, "step": 2982 }, { "epoch": 0.32, "grad_norm": 1.705407010193344, "learning_rate": 7.943994759337479e-06, "loss": 1.0243, "step": 2983 }, { "epoch": 0.32, "grad_norm": 1.8206831405419486, "learning_rate": 7.942587306981213e-06, "loss": 0.9184, "step": 2984 }, { "epoch": 0.32, "grad_norm": 1.953593550419817, "learning_rate": 7.941179497826093e-06, "loss": 0.9542, "step": 2985 }, { "epoch": 0.32, "grad_norm": 1.766722529972484, "learning_rate": 7.939771332042818e-06, "loss": 0.9201, "step": 2986 }, { "epoch": 0.32, "grad_norm": 1.7906824996398598, "learning_rate": 7.938362809802134e-06, "loss": 0.9624, "step": 2987 }, { "epoch": 0.32, "grad_norm": 1.6741816641776495, "learning_rate": 7.936953931274828e-06, "loss": 0.9067, "step": 2988 }, { "epoch": 0.32, "grad_norm": 1.759505670868694, "learning_rate": 7.935544696631733e-06, "loss": 0.93, "step": 2989 }, { "epoch": 0.32, "grad_norm": 1.8037476091131501, "learning_rate": 7.934135106043726e-06, "loss": 0.8611, "step": 2990 }, { "epoch": 0.32, "grad_norm": 1.7422269021038563, "learning_rate": 7.93272515968172e-06, "loss": 0.9718, "step": 2991 }, { "epoch": 0.32, "grad_norm": 1.7453539235313276, "learning_rate": 7.931314857716677e-06, "loss": 0.8847, "step": 2992 }, { "epoch": 0.32, "grad_norm": 5.072127211437519, "learning_rate": 7.929904200319602e-06, "loss": 0.7712, "step": 2993 }, { "epoch": 0.32, "grad_norm": 1.7586598412861487, "learning_rate": 7.928493187661543e-06, "loss": 0.9442, "step": 2994 }, { "epoch": 0.32, "grad_norm": 1.7792507858132671, "learning_rate": 7.927081819913589e-06, "loss": 1.0287, "step": 2995 }, { "epoch": 0.32, "grad_norm": 1.851795925412325, "learning_rate": 7.92567009724687e-06, "loss": 0.8984, "step": 2996 }, { "epoch": 0.32, "grad_norm": 1.6501559489377569, "learning_rate": 7.92425801983257e-06, "loss": 0.8418, "step": 2997 }, { "epoch": 0.32, "grad_norm": 2.187925619145151, "learning_rate": 7.9228455878419e-06, "loss": 0.9444, "step": 2998 }, { "epoch": 0.32, "grad_norm": 1.694693462816271, "learning_rate": 7.921432801446128e-06, "loss": 0.9787, "step": 2999 }, { "epoch": 0.32, "grad_norm": 1.821450310522034, "learning_rate": 7.920019660816556e-06, "loss": 0.8504, "step": 3000 }, { "epoch": 0.32, "grad_norm": 1.5975235472576421, "learning_rate": 7.918606166124534e-06, "loss": 0.87, "step": 3001 }, { "epoch": 0.32, "grad_norm": 1.9426950529524436, "learning_rate": 7.917192317541452e-06, "loss": 0.9729, "step": 3002 }, { "epoch": 0.32, "grad_norm": 1.710853465313771, "learning_rate": 7.915778115238744e-06, "loss": 0.9292, "step": 3003 }, { "epoch": 0.32, "grad_norm": 1.6917003902401524, "learning_rate": 7.914363559387887e-06, "loss": 0.9098, "step": 3004 }, { "epoch": 0.32, "grad_norm": 1.6791101081119189, "learning_rate": 7.912948650160403e-06, "loss": 0.8839, "step": 3005 }, { "epoch": 0.32, "grad_norm": 1.7293874954884805, "learning_rate": 7.911533387727852e-06, "loss": 0.8854, "step": 3006 }, { "epoch": 0.32, "grad_norm": 1.6405422315090548, "learning_rate": 7.91011777226184e-06, "loss": 0.881, "step": 3007 }, { "epoch": 0.32, "grad_norm": 1.709493422018036, "learning_rate": 7.908701803934014e-06, "loss": 0.8996, "step": 3008 }, { "epoch": 0.32, "grad_norm": 1.8026355525597326, "learning_rate": 7.907285482916067e-06, "loss": 0.9754, "step": 3009 }, { "epoch": 0.32, "grad_norm": 1.7377275085786918, "learning_rate": 7.905868809379735e-06, "loss": 0.9349, "step": 3010 }, { "epoch": 0.32, "grad_norm": 1.814802883664392, "learning_rate": 7.904451783496789e-06, "loss": 1.001, "step": 3011 }, { "epoch": 0.32, "grad_norm": 1.7756232754016974, "learning_rate": 7.903034405439052e-06, "loss": 0.9522, "step": 3012 }, { "epoch": 0.32, "grad_norm": 1.6208715011103083, "learning_rate": 7.901616675378384e-06, "loss": 0.915, "step": 3013 }, { "epoch": 0.32, "grad_norm": 1.9284894481767172, "learning_rate": 7.900198593486691e-06, "loss": 0.9175, "step": 3014 }, { "epoch": 0.32, "grad_norm": 1.8205622467514608, "learning_rate": 7.89878015993592e-06, "loss": 0.9242, "step": 3015 }, { "epoch": 0.32, "grad_norm": 1.7856714094061996, "learning_rate": 7.897361374898063e-06, "loss": 0.951, "step": 3016 }, { "epoch": 0.32, "grad_norm": 1.7139198487546219, "learning_rate": 7.895942238545149e-06, "loss": 0.976, "step": 3017 }, { "epoch": 0.32, "grad_norm": 1.8101574280143737, "learning_rate": 7.894522751049255e-06, "loss": 0.9676, "step": 3018 }, { "epoch": 0.32, "grad_norm": 1.662349386177393, "learning_rate": 7.893102912582497e-06, "loss": 0.8866, "step": 3019 }, { "epoch": 0.32, "grad_norm": 1.85795273393601, "learning_rate": 7.89168272331704e-06, "loss": 0.9625, "step": 3020 }, { "epoch": 0.32, "grad_norm": 1.673722099552431, "learning_rate": 7.890262183425082e-06, "loss": 0.8752, "step": 3021 }, { "epoch": 0.32, "grad_norm": 1.8929166558111947, "learning_rate": 7.888841293078868e-06, "loss": 0.9555, "step": 3022 }, { "epoch": 0.32, "grad_norm": 1.8435378458037104, "learning_rate": 7.887420052450689e-06, "loss": 1.0, "step": 3023 }, { "epoch": 0.33, "grad_norm": 1.7014365406945766, "learning_rate": 7.885998461712875e-06, "loss": 0.9127, "step": 3024 }, { "epoch": 0.33, "grad_norm": 1.7433840655630795, "learning_rate": 7.8845765210378e-06, "loss": 0.9037, "step": 3025 }, { "epoch": 0.33, "grad_norm": 1.7595089236613992, "learning_rate": 7.883154230597874e-06, "loss": 0.879, "step": 3026 }, { "epoch": 0.33, "grad_norm": 1.7863327264883566, "learning_rate": 7.88173159056556e-06, "loss": 0.9323, "step": 3027 }, { "epoch": 0.33, "grad_norm": 1.558519662020936, "learning_rate": 7.880308601113353e-06, "loss": 0.9308, "step": 3028 }, { "epoch": 0.33, "grad_norm": 1.7096900760487257, "learning_rate": 7.8788852624138e-06, "loss": 0.9328, "step": 3029 }, { "epoch": 0.33, "grad_norm": 1.6583016778587245, "learning_rate": 7.877461574639485e-06, "loss": 0.9276, "step": 3030 }, { "epoch": 0.33, "grad_norm": 1.6991975447417853, "learning_rate": 7.876037537963032e-06, "loss": 0.8942, "step": 3031 }, { "epoch": 0.33, "grad_norm": 0.9099292348863601, "learning_rate": 7.874613152557113e-06, "loss": 1.0536, "step": 3032 }, { "epoch": 0.33, "grad_norm": 1.8116677911213934, "learning_rate": 7.873188418594438e-06, "loss": 0.9387, "step": 3033 }, { "epoch": 0.33, "grad_norm": 1.8340986946491094, "learning_rate": 7.871763336247764e-06, "loss": 0.9085, "step": 3034 }, { "epoch": 0.33, "grad_norm": 1.6757022910849841, "learning_rate": 7.870337905689882e-06, "loss": 0.9367, "step": 3035 }, { "epoch": 0.33, "grad_norm": 1.75404006273646, "learning_rate": 7.868912127093638e-06, "loss": 1.0033, "step": 3036 }, { "epoch": 0.33, "grad_norm": 1.738293970179308, "learning_rate": 7.867486000631902e-06, "loss": 0.9429, "step": 3037 }, { "epoch": 0.33, "grad_norm": 1.8313258671858774, "learning_rate": 7.866059526477608e-06, "loss": 0.9487, "step": 3038 }, { "epoch": 0.33, "grad_norm": 1.7814679939797695, "learning_rate": 7.864632704803712e-06, "loss": 0.8702, "step": 3039 }, { "epoch": 0.33, "grad_norm": 1.9006363824207952, "learning_rate": 7.863205535783227e-06, "loss": 0.9519, "step": 3040 }, { "epoch": 0.33, "grad_norm": 1.7216243912667661, "learning_rate": 7.861778019589198e-06, "loss": 0.9792, "step": 3041 }, { "epoch": 0.33, "grad_norm": 1.9321408733901384, "learning_rate": 7.86035015639472e-06, "loss": 0.9333, "step": 3042 }, { "epoch": 0.33, "grad_norm": 1.6825535336425743, "learning_rate": 7.858921946372923e-06, "loss": 0.8985, "step": 3043 }, { "epoch": 0.33, "grad_norm": 1.676270907219242, "learning_rate": 7.857493389696984e-06, "loss": 0.8921, "step": 3044 }, { "epoch": 0.33, "grad_norm": 1.6987940422330285, "learning_rate": 7.85606448654012e-06, "loss": 0.9349, "step": 3045 }, { "epoch": 0.33, "grad_norm": 1.7525260128011788, "learning_rate": 7.854635237075593e-06, "loss": 0.8494, "step": 3046 }, { "epoch": 0.33, "grad_norm": 1.8138621179583427, "learning_rate": 7.8532056414767e-06, "loss": 0.9907, "step": 3047 }, { "epoch": 0.33, "grad_norm": 1.710892024979379, "learning_rate": 7.851775699916785e-06, "loss": 0.9481, "step": 3048 }, { "epoch": 0.33, "grad_norm": 1.8192404818735401, "learning_rate": 7.850345412569237e-06, "loss": 0.9347, "step": 3049 }, { "epoch": 0.33, "grad_norm": 1.6641725503331903, "learning_rate": 7.84891477960748e-06, "loss": 0.9245, "step": 3050 }, { "epoch": 0.33, "grad_norm": 1.7644571012578258, "learning_rate": 7.847483801204984e-06, "loss": 0.8661, "step": 3051 }, { "epoch": 0.33, "grad_norm": 1.7231555570385668, "learning_rate": 7.846052477535263e-06, "loss": 1.0261, "step": 3052 }, { "epoch": 0.33, "grad_norm": 1.839528574825421, "learning_rate": 7.844620808771865e-06, "loss": 0.8672, "step": 3053 }, { "epoch": 0.33, "grad_norm": 1.81248959634371, "learning_rate": 7.843188795088386e-06, "loss": 0.9689, "step": 3054 }, { "epoch": 0.33, "grad_norm": 1.7149268407978189, "learning_rate": 7.841756436658466e-06, "loss": 0.8044, "step": 3055 }, { "epoch": 0.33, "grad_norm": 1.8091462278896084, "learning_rate": 7.84032373365578e-06, "loss": 0.9909, "step": 3056 }, { "epoch": 0.33, "grad_norm": 1.6670002264795896, "learning_rate": 7.838890686254049e-06, "loss": 0.8881, "step": 3057 }, { "epoch": 0.33, "grad_norm": 1.7291509427913374, "learning_rate": 7.837457294627036e-06, "loss": 0.9315, "step": 3058 }, { "epoch": 0.33, "grad_norm": 1.811722329245556, "learning_rate": 7.836023558948542e-06, "loss": 0.96, "step": 3059 }, { "epoch": 0.33, "grad_norm": 1.8580763249830132, "learning_rate": 7.834589479392416e-06, "loss": 0.9669, "step": 3060 }, { "epoch": 0.33, "grad_norm": 0.9421614663913902, "learning_rate": 7.833155056132543e-06, "loss": 1.1014, "step": 3061 }, { "epoch": 0.33, "grad_norm": 0.9030249652092395, "learning_rate": 7.831720289342853e-06, "loss": 1.0775, "step": 3062 }, { "epoch": 0.33, "grad_norm": 1.6742759831384806, "learning_rate": 7.830285179197314e-06, "loss": 0.872, "step": 3063 }, { "epoch": 0.33, "grad_norm": 1.7974940983109295, "learning_rate": 7.828849725869939e-06, "loss": 0.902, "step": 3064 }, { "epoch": 0.33, "grad_norm": 1.8254239885255714, "learning_rate": 7.827413929534784e-06, "loss": 0.8035, "step": 3065 }, { "epoch": 0.33, "grad_norm": 1.8127040522227016, "learning_rate": 7.825977790365941e-06, "loss": 1.0319, "step": 3066 }, { "epoch": 0.33, "grad_norm": 1.7526067857957455, "learning_rate": 7.824541308537548e-06, "loss": 0.9501, "step": 3067 }, { "epoch": 0.33, "grad_norm": 1.1699302195623185, "learning_rate": 7.823104484223785e-06, "loss": 1.1008, "step": 3068 }, { "epoch": 0.33, "grad_norm": 1.6239989971280733, "learning_rate": 7.821667317598871e-06, "loss": 0.9383, "step": 3069 }, { "epoch": 0.33, "grad_norm": 1.7431801886636598, "learning_rate": 7.820229808837065e-06, "loss": 0.922, "step": 3070 }, { "epoch": 0.33, "grad_norm": 1.8553771505080254, "learning_rate": 7.818791958112673e-06, "loss": 0.8473, "step": 3071 }, { "epoch": 0.33, "grad_norm": 1.7421454771045228, "learning_rate": 7.817353765600037e-06, "loss": 0.9114, "step": 3072 }, { "epoch": 0.33, "grad_norm": 1.6926811912076758, "learning_rate": 7.815915231473547e-06, "loss": 0.9939, "step": 3073 }, { "epoch": 0.33, "grad_norm": 1.7751188139799767, "learning_rate": 7.814476355907626e-06, "loss": 0.9232, "step": 3074 }, { "epoch": 0.33, "grad_norm": 1.679711572446368, "learning_rate": 7.813037139076743e-06, "loss": 0.8926, "step": 3075 }, { "epoch": 0.33, "grad_norm": 1.7646301580774613, "learning_rate": 7.811597581155407e-06, "loss": 0.9114, "step": 3076 }, { "epoch": 0.33, "grad_norm": 1.7108214067086627, "learning_rate": 7.810157682318174e-06, "loss": 0.9328, "step": 3077 }, { "epoch": 0.33, "grad_norm": 1.6909959319833827, "learning_rate": 7.808717442739634e-06, "loss": 0.9166, "step": 3078 }, { "epoch": 0.33, "grad_norm": 0.9853421847781072, "learning_rate": 7.807276862594421e-06, "loss": 1.0383, "step": 3079 }, { "epoch": 0.33, "grad_norm": 1.6316838634500315, "learning_rate": 7.80583594205721e-06, "loss": 0.9612, "step": 3080 }, { "epoch": 0.33, "grad_norm": 1.8448612655194305, "learning_rate": 7.804394681302716e-06, "loss": 0.9005, "step": 3081 }, { "epoch": 0.33, "grad_norm": 1.7132070117470934, "learning_rate": 7.8029530805057e-06, "loss": 0.9552, "step": 3082 }, { "epoch": 0.33, "grad_norm": 1.706202567616441, "learning_rate": 7.80151113984096e-06, "loss": 0.9922, "step": 3083 }, { "epoch": 0.33, "grad_norm": 0.8007173450210207, "learning_rate": 7.800068859483336e-06, "loss": 1.0346, "step": 3084 }, { "epoch": 0.33, "grad_norm": 0.8236116896977254, "learning_rate": 7.798626239607709e-06, "loss": 1.0701, "step": 3085 }, { "epoch": 0.33, "grad_norm": 1.7874491025239632, "learning_rate": 7.797183280389002e-06, "loss": 0.9589, "step": 3086 }, { "epoch": 0.33, "grad_norm": 0.8064951285402403, "learning_rate": 7.79573998200218e-06, "loss": 1.067, "step": 3087 }, { "epoch": 0.33, "grad_norm": 1.8823639928384595, "learning_rate": 7.794296344622246e-06, "loss": 0.8493, "step": 3088 }, { "epoch": 0.33, "grad_norm": 0.8391652492898654, "learning_rate": 7.792852368424246e-06, "loss": 1.0862, "step": 3089 }, { "epoch": 0.33, "grad_norm": 1.74349621240828, "learning_rate": 7.791408053583269e-06, "loss": 0.8731, "step": 3090 }, { "epoch": 0.33, "grad_norm": 1.683579091777873, "learning_rate": 7.789963400274442e-06, "loss": 1.0047, "step": 3091 }, { "epoch": 0.33, "grad_norm": 1.7869768442234644, "learning_rate": 7.788518408672935e-06, "loss": 0.9433, "step": 3092 }, { "epoch": 0.33, "grad_norm": 1.628110287054426, "learning_rate": 7.787073078953955e-06, "loss": 0.824, "step": 3093 }, { "epoch": 0.33, "grad_norm": 1.7954317453355313, "learning_rate": 7.785627411292758e-06, "loss": 0.8562, "step": 3094 }, { "epoch": 0.33, "grad_norm": 1.8072226907099973, "learning_rate": 7.784181405864634e-06, "loss": 0.9221, "step": 3095 }, { "epoch": 0.33, "grad_norm": 1.791551113166244, "learning_rate": 7.782735062844915e-06, "loss": 0.9127, "step": 3096 }, { "epoch": 0.33, "grad_norm": 1.7477268436088218, "learning_rate": 7.781288382408975e-06, "loss": 0.8178, "step": 3097 }, { "epoch": 0.33, "grad_norm": 1.0290862247952075, "learning_rate": 7.779841364732233e-06, "loss": 1.0893, "step": 3098 }, { "epoch": 0.33, "grad_norm": 1.7883825532855406, "learning_rate": 7.778394009990139e-06, "loss": 0.9025, "step": 3099 }, { "epoch": 0.33, "grad_norm": 1.7288902112124946, "learning_rate": 7.776946318358195e-06, "loss": 0.9436, "step": 3100 }, { "epoch": 0.33, "grad_norm": 1.6044119336879408, "learning_rate": 7.775498290011935e-06, "loss": 0.9, "step": 3101 }, { "epoch": 0.33, "grad_norm": 1.7781942949351532, "learning_rate": 7.774049925126938e-06, "loss": 0.9097, "step": 3102 }, { "epoch": 0.33, "grad_norm": 1.7255158475959271, "learning_rate": 7.772601223878824e-06, "loss": 0.8739, "step": 3103 }, { "epoch": 0.33, "grad_norm": 1.7577469498160256, "learning_rate": 7.771152186443254e-06, "loss": 0.9593, "step": 3104 }, { "epoch": 0.33, "grad_norm": 1.7554665635875506, "learning_rate": 7.76970281299593e-06, "loss": 0.8615, "step": 3105 }, { "epoch": 0.33, "grad_norm": 1.8543523837070472, "learning_rate": 7.768253103712589e-06, "loss": 0.9263, "step": 3106 }, { "epoch": 0.33, "grad_norm": 1.738250216364438, "learning_rate": 7.766803058769014e-06, "loss": 0.9904, "step": 3107 }, { "epoch": 0.33, "grad_norm": 1.7029036888315305, "learning_rate": 7.765352678341032e-06, "loss": 0.8262, "step": 3108 }, { "epoch": 0.33, "grad_norm": 1.8144149750045406, "learning_rate": 7.763901962604502e-06, "loss": 0.9704, "step": 3109 }, { "epoch": 0.33, "grad_norm": 1.7120251507802666, "learning_rate": 7.762450911735333e-06, "loss": 0.9475, "step": 3110 }, { "epoch": 0.33, "grad_norm": 0.9764443285987277, "learning_rate": 7.760999525909465e-06, "loss": 1.0597, "step": 3111 }, { "epoch": 0.33, "grad_norm": 1.7435416313148284, "learning_rate": 7.759547805302888e-06, "loss": 0.8576, "step": 3112 }, { "epoch": 0.33, "grad_norm": 1.8228140515521871, "learning_rate": 7.758095750091625e-06, "loss": 0.8968, "step": 3113 }, { "epoch": 0.33, "grad_norm": 1.7465910484954197, "learning_rate": 7.756643360451744e-06, "loss": 1.0077, "step": 3114 }, { "epoch": 0.33, "grad_norm": 1.7478058870889237, "learning_rate": 7.75519063655935e-06, "loss": 0.8903, "step": 3115 }, { "epoch": 0.33, "grad_norm": 1.9053925793106998, "learning_rate": 7.753737578590594e-06, "loss": 0.9959, "step": 3116 }, { "epoch": 0.34, "grad_norm": 1.7503647211441604, "learning_rate": 7.752284186721664e-06, "loss": 0.9205, "step": 3117 }, { "epoch": 0.34, "grad_norm": 1.835444213382237, "learning_rate": 7.750830461128786e-06, "loss": 0.9286, "step": 3118 }, { "epoch": 0.34, "grad_norm": 1.7430450891325475, "learning_rate": 7.749376401988232e-06, "loss": 0.9379, "step": 3119 }, { "epoch": 0.34, "grad_norm": 1.7059204447083476, "learning_rate": 7.747922009476313e-06, "loss": 0.9411, "step": 3120 }, { "epoch": 0.34, "grad_norm": 1.63132569719763, "learning_rate": 7.746467283769374e-06, "loss": 0.9053, "step": 3121 }, { "epoch": 0.34, "grad_norm": 1.7386382057297571, "learning_rate": 7.745012225043809e-06, "loss": 0.9353, "step": 3122 }, { "epoch": 0.34, "grad_norm": 1.6416360045814167, "learning_rate": 7.743556833476047e-06, "loss": 0.9523, "step": 3123 }, { "epoch": 0.34, "grad_norm": 1.6876012725087925, "learning_rate": 7.742101109242562e-06, "loss": 0.9283, "step": 3124 }, { "epoch": 0.34, "grad_norm": 1.6750258492957932, "learning_rate": 7.740645052519863e-06, "loss": 0.9386, "step": 3125 }, { "epoch": 0.34, "grad_norm": 1.6636591690049638, "learning_rate": 7.739188663484505e-06, "loss": 0.9364, "step": 3126 }, { "epoch": 0.34, "grad_norm": 1.7665127980314514, "learning_rate": 7.737731942313077e-06, "loss": 0.8939, "step": 3127 }, { "epoch": 0.34, "grad_norm": 1.7402437497321308, "learning_rate": 7.736274889182211e-06, "loss": 0.9665, "step": 3128 }, { "epoch": 0.34, "grad_norm": 0.9464497607908496, "learning_rate": 7.734817504268582e-06, "loss": 1.0719, "step": 3129 }, { "epoch": 0.34, "grad_norm": 0.8924382248481586, "learning_rate": 7.733359787748904e-06, "loss": 1.0356, "step": 3130 }, { "epoch": 0.34, "grad_norm": 1.7947140282999494, "learning_rate": 7.731901739799927e-06, "loss": 0.8639, "step": 3131 }, { "epoch": 0.34, "grad_norm": 1.772435946349754, "learning_rate": 7.730443360598447e-06, "loss": 0.9656, "step": 3132 }, { "epoch": 0.34, "grad_norm": 0.9380020134859561, "learning_rate": 7.728984650321293e-06, "loss": 1.0508, "step": 3133 }, { "epoch": 0.34, "grad_norm": 1.8998805782205812, "learning_rate": 7.727525609145345e-06, "loss": 0.9303, "step": 3134 }, { "epoch": 0.34, "grad_norm": 1.671496285415158, "learning_rate": 7.726066237247514e-06, "loss": 0.8983, "step": 3135 }, { "epoch": 0.34, "grad_norm": 1.8500614052473616, "learning_rate": 7.72460653480475e-06, "loss": 0.961, "step": 3136 }, { "epoch": 0.34, "grad_norm": 1.742155677864123, "learning_rate": 7.723146501994054e-06, "loss": 0.9057, "step": 3137 }, { "epoch": 0.34, "grad_norm": 1.6955271258626596, "learning_rate": 7.721686138992456e-06, "loss": 0.8517, "step": 3138 }, { "epoch": 0.34, "grad_norm": 1.707611666828549, "learning_rate": 7.72022544597703e-06, "loss": 0.9337, "step": 3139 }, { "epoch": 0.34, "grad_norm": 1.8852183420617958, "learning_rate": 7.718764423124892e-06, "loss": 0.9356, "step": 3140 }, { "epoch": 0.34, "grad_norm": 1.7420148000011606, "learning_rate": 7.717303070613193e-06, "loss": 0.983, "step": 3141 }, { "epoch": 0.34, "grad_norm": 1.7522962192274052, "learning_rate": 7.71584138861913e-06, "loss": 0.9595, "step": 3142 }, { "epoch": 0.34, "grad_norm": 1.8059922443969585, "learning_rate": 7.714379377319933e-06, "loss": 0.8687, "step": 3143 }, { "epoch": 0.34, "grad_norm": 1.6540211210751483, "learning_rate": 7.712917036892881e-06, "loss": 0.8285, "step": 3144 }, { "epoch": 0.34, "grad_norm": 1.0732036612055784, "learning_rate": 7.711454367515285e-06, "loss": 1.0854, "step": 3145 }, { "epoch": 0.34, "grad_norm": 1.7982009959934304, "learning_rate": 7.709991369364498e-06, "loss": 0.9898, "step": 3146 }, { "epoch": 0.34, "grad_norm": 1.7128714391638864, "learning_rate": 7.708528042617915e-06, "loss": 0.9621, "step": 3147 }, { "epoch": 0.34, "grad_norm": 1.7909820910298118, "learning_rate": 7.707064387452971e-06, "loss": 0.9693, "step": 3148 }, { "epoch": 0.34, "grad_norm": 1.6552508897388354, "learning_rate": 7.705600404047136e-06, "loss": 0.8178, "step": 3149 }, { "epoch": 0.34, "grad_norm": 1.8304325673145305, "learning_rate": 7.704136092577924e-06, "loss": 0.9607, "step": 3150 }, { "epoch": 0.34, "grad_norm": 1.7694254272745213, "learning_rate": 7.70267145322289e-06, "loss": 0.8832, "step": 3151 }, { "epoch": 0.34, "grad_norm": 1.786653127151144, "learning_rate": 7.701206486159622e-06, "loss": 0.9359, "step": 3152 }, { "epoch": 0.34, "grad_norm": 1.7727991978862936, "learning_rate": 7.699741191565758e-06, "loss": 0.9387, "step": 3153 }, { "epoch": 0.34, "grad_norm": 1.7958468161483594, "learning_rate": 7.698275569618965e-06, "loss": 0.9005, "step": 3154 }, { "epoch": 0.34, "grad_norm": 1.712626754340491, "learning_rate": 7.696809620496957e-06, "loss": 0.894, "step": 3155 }, { "epoch": 0.34, "grad_norm": 1.873992530855375, "learning_rate": 7.695343344377486e-06, "loss": 0.9205, "step": 3156 }, { "epoch": 0.34, "grad_norm": 1.5934773951072805, "learning_rate": 7.693876741438342e-06, "loss": 0.8493, "step": 3157 }, { "epoch": 0.34, "grad_norm": 1.7390521007988509, "learning_rate": 7.692409811857357e-06, "loss": 0.9531, "step": 3158 }, { "epoch": 0.34, "grad_norm": 0.9037258265048332, "learning_rate": 7.690942555812398e-06, "loss": 1.0875, "step": 3159 }, { "epoch": 0.34, "grad_norm": 1.6715584526933467, "learning_rate": 7.689474973481379e-06, "loss": 0.9829, "step": 3160 }, { "epoch": 0.34, "grad_norm": 1.7727309253045949, "learning_rate": 7.688007065042246e-06, "loss": 0.9582, "step": 3161 }, { "epoch": 0.34, "grad_norm": 1.7665136086372766, "learning_rate": 7.686538830672988e-06, "loss": 0.8438, "step": 3162 }, { "epoch": 0.34, "grad_norm": 1.6549521823296243, "learning_rate": 7.685070270551637e-06, "loss": 0.9558, "step": 3163 }, { "epoch": 0.34, "grad_norm": 1.8109040193701325, "learning_rate": 7.68360138485626e-06, "loss": 0.8631, "step": 3164 }, { "epoch": 0.34, "grad_norm": 1.8521614987644577, "learning_rate": 7.682132173764962e-06, "loss": 0.8477, "step": 3165 }, { "epoch": 0.34, "grad_norm": 1.9669715784235244, "learning_rate": 7.68066263745589e-06, "loss": 0.9767, "step": 3166 }, { "epoch": 0.34, "grad_norm": 1.7045841187736768, "learning_rate": 7.679192776107232e-06, "loss": 0.8847, "step": 3167 }, { "epoch": 0.34, "grad_norm": 1.7104221102304642, "learning_rate": 7.677722589897214e-06, "loss": 0.8685, "step": 3168 }, { "epoch": 0.34, "grad_norm": 1.6007350121258022, "learning_rate": 7.676252079004101e-06, "loss": 0.9054, "step": 3169 }, { "epoch": 0.34, "grad_norm": 1.8539976120946018, "learning_rate": 7.674781243606197e-06, "loss": 0.908, "step": 3170 }, { "epoch": 0.34, "grad_norm": 1.679287217833275, "learning_rate": 7.673310083881844e-06, "loss": 0.9406, "step": 3171 }, { "epoch": 0.34, "grad_norm": 1.6239845540371451, "learning_rate": 7.67183860000943e-06, "loss": 0.9131, "step": 3172 }, { "epoch": 0.34, "grad_norm": 1.6800973748451429, "learning_rate": 7.670366792167371e-06, "loss": 0.9436, "step": 3173 }, { "epoch": 0.34, "grad_norm": 1.8545596638374644, "learning_rate": 7.668894660534135e-06, "loss": 0.9884, "step": 3174 }, { "epoch": 0.34, "grad_norm": 1.6730062293627086, "learning_rate": 7.66742220528822e-06, "loss": 0.7663, "step": 3175 }, { "epoch": 0.34, "grad_norm": 1.7288454628087877, "learning_rate": 7.665949426608164e-06, "loss": 0.907, "step": 3176 }, { "epoch": 0.34, "grad_norm": 1.7209141364750085, "learning_rate": 7.664476324672552e-06, "loss": 0.9329, "step": 3177 }, { "epoch": 0.34, "grad_norm": 1.818573308681355, "learning_rate": 7.663002899659999e-06, "loss": 0.997, "step": 3178 }, { "epoch": 0.34, "grad_norm": 1.700003323472943, "learning_rate": 7.661529151749163e-06, "loss": 0.8711, "step": 3179 }, { "epoch": 0.34, "grad_norm": 1.8419405226913856, "learning_rate": 7.660055081118743e-06, "loss": 0.8962, "step": 3180 }, { "epoch": 0.34, "grad_norm": 1.7436673021173064, "learning_rate": 7.658580687947473e-06, "loss": 0.8893, "step": 3181 }, { "epoch": 0.34, "grad_norm": 2.0749518377331198, "learning_rate": 7.657105972414129e-06, "loss": 0.8989, "step": 3182 }, { "epoch": 0.34, "grad_norm": 1.5839894292687773, "learning_rate": 7.655630934697524e-06, "loss": 0.9468, "step": 3183 }, { "epoch": 0.34, "grad_norm": 1.9005557202354577, "learning_rate": 7.654155574976516e-06, "loss": 0.9651, "step": 3184 }, { "epoch": 0.34, "grad_norm": 1.7745447881853755, "learning_rate": 7.652679893429993e-06, "loss": 0.9408, "step": 3185 }, { "epoch": 0.34, "grad_norm": 1.694981590177117, "learning_rate": 7.651203890236886e-06, "loss": 0.9999, "step": 3186 }, { "epoch": 0.34, "grad_norm": 1.7053717191532503, "learning_rate": 7.64972756557617e-06, "loss": 0.9355, "step": 3187 }, { "epoch": 0.34, "grad_norm": 1.8275926477329423, "learning_rate": 7.64825091962685e-06, "loss": 0.8875, "step": 3188 }, { "epoch": 0.34, "grad_norm": 1.6957161672310734, "learning_rate": 7.646773952567976e-06, "loss": 0.9064, "step": 3189 }, { "epoch": 0.34, "grad_norm": 1.6682651430677493, "learning_rate": 7.645296664578635e-06, "loss": 0.8883, "step": 3190 }, { "epoch": 0.34, "grad_norm": 1.8167660103225869, "learning_rate": 7.643819055837955e-06, "loss": 0.9698, "step": 3191 }, { "epoch": 0.34, "grad_norm": 1.8470024637425095, "learning_rate": 7.6423411265251e-06, "loss": 0.8651, "step": 3192 }, { "epoch": 0.34, "grad_norm": 1.6943686315074165, "learning_rate": 7.640862876819272e-06, "loss": 0.8562, "step": 3193 }, { "epoch": 0.34, "grad_norm": 1.7079257970414865, "learning_rate": 7.639384306899716e-06, "loss": 0.9185, "step": 3194 }, { "epoch": 0.34, "grad_norm": 0.9606980082804817, "learning_rate": 7.637905416945716e-06, "loss": 1.0351, "step": 3195 }, { "epoch": 0.34, "grad_norm": 1.9095499546603583, "learning_rate": 7.636426207136588e-06, "loss": 0.9605, "step": 3196 }, { "epoch": 0.34, "grad_norm": 1.7839643222703667, "learning_rate": 7.634946677651694e-06, "loss": 0.8723, "step": 3197 }, { "epoch": 0.34, "grad_norm": 1.737502651847597, "learning_rate": 7.633466828670431e-06, "loss": 0.8967, "step": 3198 }, { "epoch": 0.34, "grad_norm": 1.8221707882765172, "learning_rate": 7.631986660372235e-06, "loss": 0.9221, "step": 3199 }, { "epoch": 0.34, "grad_norm": 0.8719886660041634, "learning_rate": 7.630506172936583e-06, "loss": 1.0559, "step": 3200 }, { "epoch": 0.34, "grad_norm": 1.6636267825452606, "learning_rate": 7.6290253665429905e-06, "loss": 0.961, "step": 3201 }, { "epoch": 0.34, "grad_norm": 0.8605510747131204, "learning_rate": 7.627544241371006e-06, "loss": 1.0321, "step": 3202 }, { "epoch": 0.34, "grad_norm": 1.6999781964765814, "learning_rate": 7.6260627976002255e-06, "loss": 0.8288, "step": 3203 }, { "epoch": 0.34, "grad_norm": 1.8029288377285624, "learning_rate": 7.624581035410277e-06, "loss": 0.8991, "step": 3204 }, { "epoch": 0.34, "grad_norm": 1.8623788673407502, "learning_rate": 7.6230989549808296e-06, "loss": 0.9363, "step": 3205 }, { "epoch": 0.34, "grad_norm": 1.7993634371626448, "learning_rate": 7.621616556491591e-06, "loss": 0.9252, "step": 3206 }, { "epoch": 0.34, "grad_norm": 1.7789923061228887, "learning_rate": 7.620133840122306e-06, "loss": 0.9313, "step": 3207 }, { "epoch": 0.34, "grad_norm": 1.7690237035386838, "learning_rate": 7.618650806052761e-06, "loss": 0.9551, "step": 3208 }, { "epoch": 0.34, "grad_norm": 1.733476776135675, "learning_rate": 7.617167454462777e-06, "loss": 0.8977, "step": 3209 }, { "epoch": 0.35, "grad_norm": 1.7521930159293178, "learning_rate": 7.615683785532218e-06, "loss": 0.9803, "step": 3210 }, { "epoch": 0.35, "grad_norm": 1.6794386201277403, "learning_rate": 7.614199799440981e-06, "loss": 0.9543, "step": 3211 }, { "epoch": 0.35, "grad_norm": 0.8941148988641795, "learning_rate": 7.612715496369007e-06, "loss": 1.0675, "step": 3212 }, { "epoch": 0.35, "grad_norm": 1.788758815051161, "learning_rate": 7.61123087649627e-06, "loss": 1.0082, "step": 3213 }, { "epoch": 0.35, "grad_norm": 1.7540886054378615, "learning_rate": 7.609745940002786e-06, "loss": 0.8425, "step": 3214 }, { "epoch": 0.35, "grad_norm": 1.8595054254276828, "learning_rate": 7.608260687068611e-06, "loss": 0.8342, "step": 3215 }, { "epoch": 0.35, "grad_norm": 1.6745912398987448, "learning_rate": 7.606775117873837e-06, "loss": 0.9142, "step": 3216 }, { "epoch": 0.35, "grad_norm": 1.7913121430235512, "learning_rate": 7.605289232598592e-06, "loss": 0.8834, "step": 3217 }, { "epoch": 0.35, "grad_norm": 1.818131893843938, "learning_rate": 7.603803031423046e-06, "loss": 0.954, "step": 3218 }, { "epoch": 0.35, "grad_norm": 1.879072223539242, "learning_rate": 7.602316514527405e-06, "loss": 0.9507, "step": 3219 }, { "epoch": 0.35, "grad_norm": 1.7415104572545848, "learning_rate": 7.600829682091915e-06, "loss": 0.9244, "step": 3220 }, { "epoch": 0.35, "grad_norm": 1.8296540841332827, "learning_rate": 7.599342534296861e-06, "loss": 0.88, "step": 3221 }, { "epoch": 0.35, "grad_norm": 1.6980749837098486, "learning_rate": 7.597855071322562e-06, "loss": 0.9143, "step": 3222 }, { "epoch": 0.35, "grad_norm": 1.8152405207758098, "learning_rate": 7.59636729334938e-06, "loss": 0.9121, "step": 3223 }, { "epoch": 0.35, "grad_norm": 1.7965257454625034, "learning_rate": 7.594879200557712e-06, "loss": 0.8219, "step": 3224 }, { "epoch": 0.35, "grad_norm": 1.6844182892925543, "learning_rate": 7.593390793127997e-06, "loss": 0.8775, "step": 3225 }, { "epoch": 0.35, "grad_norm": 1.7020023385444039, "learning_rate": 7.591902071240705e-06, "loss": 0.8523, "step": 3226 }, { "epoch": 0.35, "grad_norm": 1.7076335555207474, "learning_rate": 7.590413035076354e-06, "loss": 0.9194, "step": 3227 }, { "epoch": 0.35, "grad_norm": 1.7963068200928247, "learning_rate": 7.5889236848154905e-06, "loss": 0.8944, "step": 3228 }, { "epoch": 0.35, "grad_norm": 1.7777189488158993, "learning_rate": 7.587434020638704e-06, "loss": 0.8987, "step": 3229 }, { "epoch": 0.35, "grad_norm": 1.8386141432948673, "learning_rate": 7.585944042726623e-06, "loss": 0.8552, "step": 3230 }, { "epoch": 0.35, "grad_norm": 1.8038523386424994, "learning_rate": 7.584453751259912e-06, "loss": 0.9377, "step": 3231 }, { "epoch": 0.35, "grad_norm": 1.6768570747188016, "learning_rate": 7.582963146419273e-06, "loss": 0.888, "step": 3232 }, { "epoch": 0.35, "grad_norm": 1.811970696976311, "learning_rate": 7.581472228385448e-06, "loss": 0.9752, "step": 3233 }, { "epoch": 0.35, "grad_norm": 1.7468139178931152, "learning_rate": 7.579980997339215e-06, "loss": 0.9799, "step": 3234 }, { "epoch": 0.35, "grad_norm": 0.9462443790179987, "learning_rate": 7.5784894534613925e-06, "loss": 1.0922, "step": 3235 }, { "epoch": 0.35, "grad_norm": 1.752540486288468, "learning_rate": 7.576997596932834e-06, "loss": 0.866, "step": 3236 }, { "epoch": 0.35, "grad_norm": 1.8034097010203642, "learning_rate": 7.575505427934433e-06, "loss": 0.9398, "step": 3237 }, { "epoch": 0.35, "grad_norm": 1.7524342166948448, "learning_rate": 7.5740129466471204e-06, "loss": 0.9187, "step": 3238 }, { "epoch": 0.35, "grad_norm": 1.6865178965199388, "learning_rate": 7.572520153251862e-06, "loss": 0.9509, "step": 3239 }, { "epoch": 0.35, "grad_norm": 1.7637744852630872, "learning_rate": 7.571027047929667e-06, "loss": 0.9478, "step": 3240 }, { "epoch": 0.35, "grad_norm": 1.7002997387038798, "learning_rate": 7.5695336308615785e-06, "loss": 0.958, "step": 3241 }, { "epoch": 0.35, "grad_norm": 1.7321206683135355, "learning_rate": 7.56803990222868e-06, "loss": 0.8852, "step": 3242 }, { "epoch": 0.35, "grad_norm": 1.75358728466011, "learning_rate": 7.56654586221209e-06, "loss": 0.9307, "step": 3243 }, { "epoch": 0.35, "grad_norm": 1.7529641677278303, "learning_rate": 7.565051510992964e-06, "loss": 0.8728, "step": 3244 }, { "epoch": 0.35, "grad_norm": 1.608867549877374, "learning_rate": 7.563556848752502e-06, "loss": 0.9105, "step": 3245 }, { "epoch": 0.35, "grad_norm": 1.7456707325625758, "learning_rate": 7.5620618756719325e-06, "loss": 0.9402, "step": 3246 }, { "epoch": 0.35, "grad_norm": 1.7613599751227094, "learning_rate": 7.560566591932526e-06, "loss": 0.8894, "step": 3247 }, { "epoch": 0.35, "grad_norm": 1.7069245997247633, "learning_rate": 7.559070997715596e-06, "loss": 0.916, "step": 3248 }, { "epoch": 0.35, "grad_norm": 1.8425068464616408, "learning_rate": 7.557575093202484e-06, "loss": 0.8975, "step": 3249 }, { "epoch": 0.35, "grad_norm": 1.8165796166336952, "learning_rate": 7.556078878574573e-06, "loss": 0.9078, "step": 3250 }, { "epoch": 0.35, "grad_norm": 1.752575378113844, "learning_rate": 7.554582354013288e-06, "loss": 0.9979, "step": 3251 }, { "epoch": 0.35, "grad_norm": 1.6976734361297443, "learning_rate": 7.553085519700085e-06, "loss": 0.8914, "step": 3252 }, { "epoch": 0.35, "grad_norm": 1.685476970027444, "learning_rate": 7.551588375816461e-06, "loss": 0.9057, "step": 3253 }, { "epoch": 0.35, "grad_norm": 1.6646624663466865, "learning_rate": 7.550090922543949e-06, "loss": 0.9255, "step": 3254 }, { "epoch": 0.35, "grad_norm": 1.671410598278676, "learning_rate": 7.54859316006412e-06, "loss": 0.9241, "step": 3255 }, { "epoch": 0.35, "grad_norm": 1.6576226515766672, "learning_rate": 7.547095088558586e-06, "loss": 0.8211, "step": 3256 }, { "epoch": 0.35, "grad_norm": 1.6704685267339927, "learning_rate": 7.545596708208991e-06, "loss": 1.0216, "step": 3257 }, { "epoch": 0.35, "grad_norm": 1.69542926329254, "learning_rate": 7.544098019197018e-06, "loss": 0.8835, "step": 3258 }, { "epoch": 0.35, "grad_norm": 1.5936605632516918, "learning_rate": 7.542599021704391e-06, "loss": 0.8803, "step": 3259 }, { "epoch": 0.35, "grad_norm": 1.770487899666137, "learning_rate": 7.541099715912867e-06, "loss": 0.8214, "step": 3260 }, { "epoch": 0.35, "grad_norm": 1.7477233064981108, "learning_rate": 7.539600102004241e-06, "loss": 0.9945, "step": 3261 }, { "epoch": 0.35, "grad_norm": 1.7934872611365564, "learning_rate": 7.538100180160347e-06, "loss": 1.0322, "step": 3262 }, { "epoch": 0.35, "grad_norm": 1.7590684351205144, "learning_rate": 7.5365999505630575e-06, "loss": 0.9198, "step": 3263 }, { "epoch": 0.35, "grad_norm": 1.7633529656090035, "learning_rate": 7.5350994133942764e-06, "loss": 0.8568, "step": 3264 }, { "epoch": 0.35, "grad_norm": 1.9301144476852095, "learning_rate": 7.533598568835954e-06, "loss": 0.8365, "step": 3265 }, { "epoch": 0.35, "grad_norm": 1.791926145009191, "learning_rate": 7.53209741707007e-06, "loss": 0.891, "step": 3266 }, { "epoch": 0.35, "grad_norm": 1.7380834991297436, "learning_rate": 7.530595958278644e-06, "loss": 0.9147, "step": 3267 }, { "epoch": 0.35, "grad_norm": 1.6494701761725241, "learning_rate": 7.529094192643733e-06, "loss": 0.8571, "step": 3268 }, { "epoch": 0.35, "grad_norm": 1.7473442852751488, "learning_rate": 7.527592120347433e-06, "loss": 0.9325, "step": 3269 }, { "epoch": 0.35, "grad_norm": 1.75144451661381, "learning_rate": 7.526089741571876e-06, "loss": 0.9449, "step": 3270 }, { "epoch": 0.35, "grad_norm": 1.7281097186425467, "learning_rate": 7.524587056499227e-06, "loss": 0.9041, "step": 3271 }, { "epoch": 0.35, "grad_norm": 1.6590454150434881, "learning_rate": 7.523084065311695e-06, "loss": 0.8983, "step": 3272 }, { "epoch": 0.35, "grad_norm": 1.8579740345934426, "learning_rate": 7.521580768191521e-06, "loss": 0.9053, "step": 3273 }, { "epoch": 0.35, "grad_norm": 1.7470045275489403, "learning_rate": 7.520077165320986e-06, "loss": 0.887, "step": 3274 }, { "epoch": 0.35, "grad_norm": 1.6379848040178568, "learning_rate": 7.518573256882407e-06, "loss": 0.9262, "step": 3275 }, { "epoch": 0.35, "grad_norm": 1.7174552648963053, "learning_rate": 7.517069043058136e-06, "loss": 0.9276, "step": 3276 }, { "epoch": 0.35, "grad_norm": 1.580852153288313, "learning_rate": 7.515564524030568e-06, "loss": 0.8904, "step": 3277 }, { "epoch": 0.35, "grad_norm": 1.7244884890473644, "learning_rate": 7.514059699982128e-06, "loss": 0.9085, "step": 3278 }, { "epoch": 0.35, "grad_norm": 1.6069249931455998, "learning_rate": 7.512554571095282e-06, "loss": 0.8914, "step": 3279 }, { "epoch": 0.35, "grad_norm": 1.0577805211931215, "learning_rate": 7.511049137552533e-06, "loss": 1.0919, "step": 3280 }, { "epoch": 0.35, "grad_norm": 1.7551151519644463, "learning_rate": 7.509543399536419e-06, "loss": 0.8525, "step": 3281 }, { "epoch": 0.35, "grad_norm": 1.7273119192195896, "learning_rate": 7.508037357229515e-06, "loss": 0.9829, "step": 3282 }, { "epoch": 0.35, "grad_norm": 1.697972290513672, "learning_rate": 7.506531010814435e-06, "loss": 0.887, "step": 3283 }, { "epoch": 0.35, "grad_norm": 1.722066702915343, "learning_rate": 7.505024360473829e-06, "loss": 0.8507, "step": 3284 }, { "epoch": 0.35, "grad_norm": 0.9046665262633369, "learning_rate": 7.503517406390385e-06, "loss": 1.0987, "step": 3285 }, { "epoch": 0.35, "grad_norm": 0.8928219859526223, "learning_rate": 7.502010148746821e-06, "loss": 1.0615, "step": 3286 }, { "epoch": 0.35, "grad_norm": 1.7376419223983317, "learning_rate": 7.5005025877259045e-06, "loss": 0.9652, "step": 3287 }, { "epoch": 0.35, "grad_norm": 1.8014024314675128, "learning_rate": 7.4989947235104264e-06, "loss": 0.9315, "step": 3288 }, { "epoch": 0.35, "grad_norm": 1.9117966804886433, "learning_rate": 7.497486556283222e-06, "loss": 0.9107, "step": 3289 }, { "epoch": 0.35, "grad_norm": 1.73640326340925, "learning_rate": 7.495978086227164e-06, "loss": 0.9514, "step": 3290 }, { "epoch": 0.35, "grad_norm": 1.6915311699069033, "learning_rate": 7.494469313525159e-06, "loss": 0.8593, "step": 3291 }, { "epoch": 0.35, "grad_norm": 1.7450022154061327, "learning_rate": 7.4929602383601475e-06, "loss": 0.8731, "step": 3292 }, { "epoch": 0.35, "grad_norm": 1.7102579278956707, "learning_rate": 7.491450860915115e-06, "loss": 0.9368, "step": 3293 }, { "epoch": 0.35, "grad_norm": 1.715594217374372, "learning_rate": 7.489941181373075e-06, "loss": 1.0199, "step": 3294 }, { "epoch": 0.35, "grad_norm": 1.8003577081979956, "learning_rate": 7.488431199917081e-06, "loss": 0.9638, "step": 3295 }, { "epoch": 0.35, "grad_norm": 1.751998317382199, "learning_rate": 7.486920916730228e-06, "loss": 0.868, "step": 3296 }, { "epoch": 0.35, "grad_norm": 1.756928767150894, "learning_rate": 7.48541033199564e-06, "loss": 1.0205, "step": 3297 }, { "epoch": 0.35, "grad_norm": 1.6550424964521924, "learning_rate": 7.4838994458964785e-06, "loss": 0.9479, "step": 3298 }, { "epoch": 0.35, "grad_norm": 1.6883032413304706, "learning_rate": 7.482388258615947e-06, "loss": 0.91, "step": 3299 }, { "epoch": 0.35, "grad_norm": 1.7481837818496226, "learning_rate": 7.480876770337281e-06, "loss": 0.9805, "step": 3300 }, { "epoch": 0.35, "grad_norm": 1.8263325115939446, "learning_rate": 7.479364981243754e-06, "loss": 0.8588, "step": 3301 }, { "epoch": 0.35, "grad_norm": 1.7572162938301292, "learning_rate": 7.477852891518675e-06, "loss": 0.8628, "step": 3302 }, { "epoch": 0.36, "grad_norm": 1.8270750998523413, "learning_rate": 7.4763405013453906e-06, "loss": 0.9744, "step": 3303 }, { "epoch": 0.36, "grad_norm": 1.0534308597746713, "learning_rate": 7.474827810907284e-06, "loss": 1.0448, "step": 3304 }, { "epoch": 0.36, "grad_norm": 1.7846339277389076, "learning_rate": 7.473314820387772e-06, "loss": 0.7901, "step": 3305 }, { "epoch": 0.36, "grad_norm": 1.7110302417972643, "learning_rate": 7.471801529970312e-06, "loss": 0.9333, "step": 3306 }, { "epoch": 0.36, "grad_norm": 1.8347411262180946, "learning_rate": 7.470287939838394e-06, "loss": 0.9581, "step": 3307 }, { "epoch": 0.36, "grad_norm": 1.6973523481093706, "learning_rate": 7.468774050175548e-06, "loss": 0.9127, "step": 3308 }, { "epoch": 0.36, "grad_norm": 1.8961071483694014, "learning_rate": 7.467259861165335e-06, "loss": 0.954, "step": 3309 }, { "epoch": 0.36, "grad_norm": 1.7261615033362823, "learning_rate": 7.46574537299136e-06, "loss": 0.8639, "step": 3310 }, { "epoch": 0.36, "grad_norm": 1.6380139865200996, "learning_rate": 7.464230585837257e-06, "loss": 0.888, "step": 3311 }, { "epoch": 0.36, "grad_norm": 1.6555589446320618, "learning_rate": 7.462715499886701e-06, "loss": 0.8883, "step": 3312 }, { "epoch": 0.36, "grad_norm": 1.6983118008345057, "learning_rate": 7.4612001153234e-06, "loss": 0.9286, "step": 3313 }, { "epoch": 0.36, "grad_norm": 1.7605505240987802, "learning_rate": 7.4596844323311e-06, "loss": 0.8708, "step": 3314 }, { "epoch": 0.36, "grad_norm": 1.6871151868612113, "learning_rate": 7.458168451093583e-06, "loss": 0.8482, "step": 3315 }, { "epoch": 0.36, "grad_norm": 1.7518828427566988, "learning_rate": 7.456652171794666e-06, "loss": 0.8993, "step": 3316 }, { "epoch": 0.36, "grad_norm": 1.6085552723407484, "learning_rate": 7.455135594618204e-06, "loss": 0.9141, "step": 3317 }, { "epoch": 0.36, "grad_norm": 1.9195026846731007, "learning_rate": 7.453618719748087e-06, "loss": 0.8732, "step": 3318 }, { "epoch": 0.36, "grad_norm": 1.7029781721289585, "learning_rate": 7.452101547368242e-06, "loss": 0.998, "step": 3319 }, { "epoch": 0.36, "grad_norm": 1.8748497300245488, "learning_rate": 7.450584077662628e-06, "loss": 0.962, "step": 3320 }, { "epoch": 0.36, "grad_norm": 1.8186491422533853, "learning_rate": 7.449066310815249e-06, "loss": 0.9288, "step": 3321 }, { "epoch": 0.36, "grad_norm": 1.7461666374926603, "learning_rate": 7.447548247010137e-06, "loss": 0.9137, "step": 3322 }, { "epoch": 0.36, "grad_norm": 1.7520519312904304, "learning_rate": 7.4460298864313606e-06, "loss": 0.9748, "step": 3323 }, { "epoch": 0.36, "grad_norm": 1.685064117045246, "learning_rate": 7.444511229263027e-06, "loss": 0.8902, "step": 3324 }, { "epoch": 0.36, "grad_norm": 1.7676762665251464, "learning_rate": 7.442992275689281e-06, "loss": 0.9702, "step": 3325 }, { "epoch": 0.36, "grad_norm": 1.9013027778275644, "learning_rate": 7.441473025894298e-06, "loss": 0.9272, "step": 3326 }, { "epoch": 0.36, "grad_norm": 1.6556882740715895, "learning_rate": 7.439953480062294e-06, "loss": 0.9084, "step": 3327 }, { "epoch": 0.36, "grad_norm": 1.8016120860563096, "learning_rate": 7.43843363837752e-06, "loss": 1.0024, "step": 3328 }, { "epoch": 0.36, "grad_norm": 1.0857740614042077, "learning_rate": 7.436913501024259e-06, "loss": 1.0428, "step": 3329 }, { "epoch": 0.36, "grad_norm": 1.7906442762242527, "learning_rate": 7.435393068186836e-06, "loss": 0.921, "step": 3330 }, { "epoch": 0.36, "grad_norm": 1.785233540155477, "learning_rate": 7.4338723400496075e-06, "loss": 0.8981, "step": 3331 }, { "epoch": 0.36, "grad_norm": 1.6413606470544102, "learning_rate": 7.432351316796964e-06, "loss": 0.9126, "step": 3332 }, { "epoch": 0.36, "grad_norm": 1.7899005083175799, "learning_rate": 7.430829998613342e-06, "loss": 0.8799, "step": 3333 }, { "epoch": 0.36, "grad_norm": 1.7768760339940515, "learning_rate": 7.429308385683199e-06, "loss": 0.7923, "step": 3334 }, { "epoch": 0.36, "grad_norm": 0.8885875206253508, "learning_rate": 7.427786478191041e-06, "loss": 1.0668, "step": 3335 }, { "epoch": 0.36, "grad_norm": 1.759935724746908, "learning_rate": 7.426264276321402e-06, "loss": 0.9477, "step": 3336 }, { "epoch": 0.36, "grad_norm": 1.7616315467701726, "learning_rate": 7.424741780258855e-06, "loss": 0.8702, "step": 3337 }, { "epoch": 0.36, "grad_norm": 1.830856127209951, "learning_rate": 7.423218990188008e-06, "loss": 0.955, "step": 3338 }, { "epoch": 0.36, "grad_norm": 1.8049959649239131, "learning_rate": 7.421695906293504e-06, "loss": 0.9023, "step": 3339 }, { "epoch": 0.36, "grad_norm": 0.8838853878118114, "learning_rate": 7.420172528760022e-06, "loss": 1.0727, "step": 3340 }, { "epoch": 0.36, "grad_norm": 1.7480358579933728, "learning_rate": 7.418648857772279e-06, "loss": 0.9406, "step": 3341 }, { "epoch": 0.36, "grad_norm": 1.6461169657610413, "learning_rate": 7.417124893515022e-06, "loss": 0.9234, "step": 3342 }, { "epoch": 0.36, "grad_norm": 1.763522855230025, "learning_rate": 7.4156006361730395e-06, "loss": 0.9633, "step": 3343 }, { "epoch": 0.36, "grad_norm": 1.8635987906876557, "learning_rate": 7.414076085931152e-06, "loss": 0.8921, "step": 3344 }, { "epoch": 0.36, "grad_norm": 1.6694079108516462, "learning_rate": 7.4125512429742166e-06, "loss": 0.8498, "step": 3345 }, { "epoch": 0.36, "grad_norm": 1.9387554904060442, "learning_rate": 7.411026107487123e-06, "loss": 0.9155, "step": 3346 }, { "epoch": 0.36, "grad_norm": 1.8332840469538487, "learning_rate": 7.409500679654805e-06, "loss": 0.9309, "step": 3347 }, { "epoch": 0.36, "grad_norm": 1.6873794724323066, "learning_rate": 7.407974959662222e-06, "loss": 0.8686, "step": 3348 }, { "epoch": 0.36, "grad_norm": 1.7726669245130953, "learning_rate": 7.406448947694374e-06, "loss": 1.0045, "step": 3349 }, { "epoch": 0.36, "grad_norm": 1.5891647834478793, "learning_rate": 7.404922643936294e-06, "loss": 0.8308, "step": 3350 }, { "epoch": 0.36, "grad_norm": 1.8354347368271844, "learning_rate": 7.403396048573052e-06, "loss": 0.896, "step": 3351 }, { "epoch": 0.36, "grad_norm": 1.7301188378539172, "learning_rate": 7.4018691617897545e-06, "loss": 0.8815, "step": 3352 }, { "epoch": 0.36, "grad_norm": 1.7468554946972616, "learning_rate": 7.40034198377154e-06, "loss": 0.9513, "step": 3353 }, { "epoch": 0.36, "grad_norm": 1.903649293058784, "learning_rate": 7.398814514703585e-06, "loss": 0.9608, "step": 3354 }, { "epoch": 0.36, "grad_norm": 1.867873737546824, "learning_rate": 7.397286754771099e-06, "loss": 0.9427, "step": 3355 }, { "epoch": 0.36, "grad_norm": 1.7182649002592478, "learning_rate": 7.39575870415933e-06, "loss": 0.9124, "step": 3356 }, { "epoch": 0.36, "grad_norm": 1.7457813898873142, "learning_rate": 7.394230363053558e-06, "loss": 0.8758, "step": 3357 }, { "epoch": 0.36, "grad_norm": 1.777804667996553, "learning_rate": 7.3927017316391024e-06, "loss": 0.9732, "step": 3358 }, { "epoch": 0.36, "grad_norm": 1.0199925982213331, "learning_rate": 7.391172810101311e-06, "loss": 1.0812, "step": 3359 }, { "epoch": 0.36, "grad_norm": 1.7367887303573601, "learning_rate": 7.389643598625574e-06, "loss": 0.852, "step": 3360 }, { "epoch": 0.36, "grad_norm": 1.6509855094140875, "learning_rate": 7.388114097397312e-06, "loss": 0.9238, "step": 3361 }, { "epoch": 0.36, "grad_norm": 1.8070664565500492, "learning_rate": 7.386584306601982e-06, "loss": 0.8389, "step": 3362 }, { "epoch": 0.36, "grad_norm": 1.692140573828619, "learning_rate": 7.385054226425077e-06, "loss": 0.8075, "step": 3363 }, { "epoch": 0.36, "grad_norm": 1.836891511694827, "learning_rate": 7.383523857052125e-06, "loss": 0.9303, "step": 3364 }, { "epoch": 0.36, "grad_norm": 1.807655871308822, "learning_rate": 7.381993198668689e-06, "loss": 0.9394, "step": 3365 }, { "epoch": 0.36, "grad_norm": 0.9761758390026731, "learning_rate": 7.380462251460364e-06, "loss": 1.0469, "step": 3366 }, { "epoch": 0.36, "grad_norm": 1.774982463810186, "learning_rate": 7.378931015612786e-06, "loss": 0.8895, "step": 3367 }, { "epoch": 0.36, "grad_norm": 1.7597933204675105, "learning_rate": 7.377399491311619e-06, "loss": 0.9243, "step": 3368 }, { "epoch": 0.36, "grad_norm": 1.7683208501624577, "learning_rate": 7.375867678742567e-06, "loss": 0.8965, "step": 3369 }, { "epoch": 0.36, "grad_norm": 1.7933521337735707, "learning_rate": 7.374335578091372e-06, "loss": 0.9665, "step": 3370 }, { "epoch": 0.36, "grad_norm": 0.8697269842382845, "learning_rate": 7.372803189543799e-06, "loss": 1.036, "step": 3371 }, { "epoch": 0.36, "grad_norm": 1.6992990261014262, "learning_rate": 7.37127051328566e-06, "loss": 0.8811, "step": 3372 }, { "epoch": 0.36, "grad_norm": 1.6990455614411435, "learning_rate": 7.369737549502797e-06, "loss": 0.9821, "step": 3373 }, { "epoch": 0.36, "grad_norm": 1.7864955568474217, "learning_rate": 7.368204298381085e-06, "loss": 0.8734, "step": 3374 }, { "epoch": 0.36, "grad_norm": 1.604090882580611, "learning_rate": 7.366670760106439e-06, "loss": 0.8986, "step": 3375 }, { "epoch": 0.36, "grad_norm": 1.6866980777488114, "learning_rate": 7.365136934864804e-06, "loss": 0.9234, "step": 3376 }, { "epoch": 0.36, "grad_norm": 1.7553036531217971, "learning_rate": 7.3636028228421595e-06, "loss": 0.982, "step": 3377 }, { "epoch": 0.36, "grad_norm": 1.8151438967238653, "learning_rate": 7.3620684242245264e-06, "loss": 1.0423, "step": 3378 }, { "epoch": 0.36, "grad_norm": 1.730724790126759, "learning_rate": 7.360533739197952e-06, "loss": 0.9448, "step": 3379 }, { "epoch": 0.36, "grad_norm": 1.6941573262485696, "learning_rate": 7.358998767948525e-06, "loss": 0.9559, "step": 3380 }, { "epoch": 0.36, "grad_norm": 1.6157727713930206, "learning_rate": 7.357463510662365e-06, "loss": 0.8885, "step": 3381 }, { "epoch": 0.36, "grad_norm": 1.6743865004593845, "learning_rate": 7.355927967525626e-06, "loss": 0.923, "step": 3382 }, { "epoch": 0.36, "grad_norm": 1.6269144548954249, "learning_rate": 7.354392138724499e-06, "loss": 0.8998, "step": 3383 }, { "epoch": 0.36, "grad_norm": 1.7212943410745358, "learning_rate": 7.352856024445208e-06, "loss": 0.8217, "step": 3384 }, { "epoch": 0.36, "grad_norm": 1.7356861877373753, "learning_rate": 7.351319624874013e-06, "loss": 0.9484, "step": 3385 }, { "epoch": 0.36, "grad_norm": 1.795463863054259, "learning_rate": 7.349782940197208e-06, "loss": 0.9545, "step": 3386 }, { "epoch": 0.36, "grad_norm": 1.683754431552234, "learning_rate": 7.34824597060112e-06, "loss": 0.896, "step": 3387 }, { "epoch": 0.36, "grad_norm": 1.680330292599506, "learning_rate": 7.34670871627211e-06, "loss": 0.9006, "step": 3388 }, { "epoch": 0.36, "grad_norm": 0.9670086825680536, "learning_rate": 7.34517117739658e-06, "loss": 1.0633, "step": 3389 }, { "epoch": 0.36, "grad_norm": 1.7798192906324743, "learning_rate": 7.343633354160961e-06, "loss": 1.0078, "step": 3390 }, { "epoch": 0.36, "grad_norm": 1.7490574833579196, "learning_rate": 7.342095246751717e-06, "loss": 0.9926, "step": 3391 }, { "epoch": 0.36, "grad_norm": 1.6377152849727812, "learning_rate": 7.340556855355349e-06, "loss": 0.9025, "step": 3392 }, { "epoch": 0.36, "grad_norm": 1.768482964825861, "learning_rate": 7.339018180158395e-06, "loss": 0.9355, "step": 3393 }, { "epoch": 0.36, "grad_norm": 1.6231583351312866, "learning_rate": 7.33747922134742e-06, "loss": 0.9198, "step": 3394 }, { "epoch": 0.36, "grad_norm": 1.8476246861894299, "learning_rate": 7.335939979109032e-06, "loss": 0.8514, "step": 3395 }, { "epoch": 0.37, "grad_norm": 1.6178812077358735, "learning_rate": 7.334400453629869e-06, "loss": 0.8521, "step": 3396 }, { "epoch": 0.37, "grad_norm": 1.6684122014573253, "learning_rate": 7.332860645096604e-06, "loss": 0.8306, "step": 3397 }, { "epoch": 0.37, "grad_norm": 0.9369820314050638, "learning_rate": 7.331320553695942e-06, "loss": 1.0569, "step": 3398 }, { "epoch": 0.37, "grad_norm": 1.6376415899790333, "learning_rate": 7.329780179614625e-06, "loss": 0.8951, "step": 3399 }, { "epoch": 0.37, "grad_norm": 1.7392488393014507, "learning_rate": 7.328239523039431e-06, "loss": 0.9804, "step": 3400 }, { "epoch": 0.37, "grad_norm": 1.7282752591342359, "learning_rate": 7.3266985841571666e-06, "loss": 0.9047, "step": 3401 }, { "epoch": 0.37, "grad_norm": 1.7167152986144596, "learning_rate": 7.325157363154678e-06, "loss": 0.8986, "step": 3402 }, { "epoch": 0.37, "grad_norm": 1.7826895304474335, "learning_rate": 7.323615860218844e-06, "loss": 0.9639, "step": 3403 }, { "epoch": 0.37, "grad_norm": 1.649060803257622, "learning_rate": 7.322074075536575e-06, "loss": 0.8502, "step": 3404 }, { "epoch": 0.37, "grad_norm": 1.7382089958096956, "learning_rate": 7.320532009294818e-06, "loss": 0.8667, "step": 3405 }, { "epoch": 0.37, "grad_norm": 1.750635600304196, "learning_rate": 7.3189896616805565e-06, "loss": 0.9503, "step": 3406 }, { "epoch": 0.37, "grad_norm": 1.7102984071284362, "learning_rate": 7.317447032880804e-06, "loss": 0.9423, "step": 3407 }, { "epoch": 0.37, "grad_norm": 1.7459375123075902, "learning_rate": 7.315904123082609e-06, "loss": 0.9179, "step": 3408 }, { "epoch": 0.37, "grad_norm": 1.8225423335822546, "learning_rate": 7.314360932473054e-06, "loss": 0.9606, "step": 3409 }, { "epoch": 0.37, "grad_norm": 1.9239747391040125, "learning_rate": 7.312817461239259e-06, "loss": 0.898, "step": 3410 }, { "epoch": 0.37, "grad_norm": 1.7327283654368293, "learning_rate": 7.311273709568371e-06, "loss": 0.9005, "step": 3411 }, { "epoch": 0.37, "grad_norm": 1.9483837994623496, "learning_rate": 7.309729677647579e-06, "loss": 0.9513, "step": 3412 }, { "epoch": 0.37, "grad_norm": 0.9164921195129382, "learning_rate": 7.3081853656641e-06, "loss": 1.026, "step": 3413 }, { "epoch": 0.37, "grad_norm": 0.8797180029944156, "learning_rate": 7.306640773805188e-06, "loss": 1.0507, "step": 3414 }, { "epoch": 0.37, "grad_norm": 1.6616115847168296, "learning_rate": 7.30509590225813e-06, "loss": 0.9331, "step": 3415 }, { "epoch": 0.37, "grad_norm": 1.705659155990624, "learning_rate": 7.303550751210247e-06, "loss": 0.8546, "step": 3416 }, { "epoch": 0.37, "grad_norm": 1.6976483916926663, "learning_rate": 7.302005320848894e-06, "loss": 0.9457, "step": 3417 }, { "epoch": 0.37, "grad_norm": 1.7172659978139384, "learning_rate": 7.300459611361461e-06, "loss": 0.8174, "step": 3418 }, { "epoch": 0.37, "grad_norm": 1.050136016099378, "learning_rate": 7.298913622935366e-06, "loss": 1.0902, "step": 3419 }, { "epoch": 0.37, "grad_norm": 1.776453849163029, "learning_rate": 7.297367355758072e-06, "loss": 0.9521, "step": 3420 }, { "epoch": 0.37, "grad_norm": 1.8291316049357584, "learning_rate": 7.295820810017065e-06, "loss": 0.8535, "step": 3421 }, { "epoch": 0.37, "grad_norm": 1.801293992009186, "learning_rate": 7.294273985899868e-06, "loss": 0.9103, "step": 3422 }, { "epoch": 0.37, "grad_norm": 1.6750016380200354, "learning_rate": 7.292726883594043e-06, "loss": 0.8761, "step": 3423 }, { "epoch": 0.37, "grad_norm": 1.7792656006823968, "learning_rate": 7.291179503287178e-06, "loss": 0.8825, "step": 3424 }, { "epoch": 0.37, "grad_norm": 1.7017782707414377, "learning_rate": 7.2896318451668975e-06, "loss": 0.9076, "step": 3425 }, { "epoch": 0.37, "grad_norm": 1.6493662991980782, "learning_rate": 7.288083909420866e-06, "loss": 0.9552, "step": 3426 }, { "epoch": 0.37, "grad_norm": 0.9728495995493363, "learning_rate": 7.28653569623677e-06, "loss": 1.0732, "step": 3427 }, { "epoch": 0.37, "grad_norm": 1.6989569772599058, "learning_rate": 7.284987205802338e-06, "loss": 0.8616, "step": 3428 }, { "epoch": 0.37, "grad_norm": 1.7825258667492783, "learning_rate": 7.28343843830533e-06, "loss": 0.9139, "step": 3429 }, { "epoch": 0.37, "grad_norm": 1.7364359931910722, "learning_rate": 7.281889393933539e-06, "loss": 0.8987, "step": 3430 }, { "epoch": 0.37, "grad_norm": 1.6970828888922183, "learning_rate": 7.280340072874792e-06, "loss": 0.9766, "step": 3431 }, { "epoch": 0.37, "grad_norm": 1.7681464261981494, "learning_rate": 7.27879047531695e-06, "loss": 0.9512, "step": 3432 }, { "epoch": 0.37, "grad_norm": 1.7880879198559911, "learning_rate": 7.277240601447908e-06, "loss": 0.8701, "step": 3433 }, { "epoch": 0.37, "grad_norm": 1.7315916656125943, "learning_rate": 7.27569045145559e-06, "loss": 0.9284, "step": 3434 }, { "epoch": 0.37, "grad_norm": 1.8191070206057929, "learning_rate": 7.27414002552796e-06, "loss": 0.9299, "step": 3435 }, { "epoch": 0.37, "grad_norm": 1.9153661427624924, "learning_rate": 7.272589323853012e-06, "loss": 0.8971, "step": 3436 }, { "epoch": 0.37, "grad_norm": 1.6866005279199747, "learning_rate": 7.271038346618774e-06, "loss": 0.8737, "step": 3437 }, { "epoch": 0.37, "grad_norm": 1.7364114128552566, "learning_rate": 7.2694870940133065e-06, "loss": 0.8989, "step": 3438 }, { "epoch": 0.37, "grad_norm": 1.8170618560555738, "learning_rate": 7.267935566224705e-06, "loss": 0.9275, "step": 3439 }, { "epoch": 0.37, "grad_norm": 1.6438185755896468, "learning_rate": 7.2663837634410976e-06, "loss": 0.85, "step": 3440 }, { "epoch": 0.37, "grad_norm": 1.8391216782632975, "learning_rate": 7.2648316858506455e-06, "loss": 0.9178, "step": 3441 }, { "epoch": 0.37, "grad_norm": 1.763562830263875, "learning_rate": 7.263279333641542e-06, "loss": 0.8728, "step": 3442 }, { "epoch": 0.37, "grad_norm": 1.9055553768871036, "learning_rate": 7.261726707002017e-06, "loss": 0.9763, "step": 3443 }, { "epoch": 0.37, "grad_norm": 1.7158823421334461, "learning_rate": 7.260173806120332e-06, "loss": 0.9151, "step": 3444 }, { "epoch": 0.37, "grad_norm": 1.8181105950998127, "learning_rate": 7.258620631184782e-06, "loss": 0.9461, "step": 3445 }, { "epoch": 0.37, "grad_norm": 1.6162058789610094, "learning_rate": 7.2570671823836925e-06, "loss": 0.8986, "step": 3446 }, { "epoch": 0.37, "grad_norm": 1.8027282026510234, "learning_rate": 7.255513459905426e-06, "loss": 0.9112, "step": 3447 }, { "epoch": 0.37, "grad_norm": 1.818396226187163, "learning_rate": 7.253959463938376e-06, "loss": 0.9245, "step": 3448 }, { "epoch": 0.37, "grad_norm": 1.716609130054721, "learning_rate": 7.252405194670973e-06, "loss": 0.8662, "step": 3449 }, { "epoch": 0.37, "grad_norm": 1.7486366638058954, "learning_rate": 7.250850652291671e-06, "loss": 0.9078, "step": 3450 }, { "epoch": 0.37, "grad_norm": 1.7590519895637964, "learning_rate": 7.24929583698897e-06, "loss": 0.8997, "step": 3451 }, { "epoch": 0.37, "grad_norm": 1.7400557813578819, "learning_rate": 7.247740748951394e-06, "loss": 0.8994, "step": 3452 }, { "epoch": 0.37, "grad_norm": 1.611342829961703, "learning_rate": 7.246185388367502e-06, "loss": 0.9758, "step": 3453 }, { "epoch": 0.37, "grad_norm": 1.6746883681659026, "learning_rate": 7.2446297554258895e-06, "loss": 0.8587, "step": 3454 }, { "epoch": 0.37, "grad_norm": 1.8189898540297689, "learning_rate": 7.24307385031518e-06, "loss": 0.89, "step": 3455 }, { "epoch": 0.37, "grad_norm": 1.8961803962971986, "learning_rate": 7.2415176732240335e-06, "loss": 0.9717, "step": 3456 }, { "epoch": 0.37, "grad_norm": 1.865525383092902, "learning_rate": 7.239961224341141e-06, "loss": 1.0283, "step": 3457 }, { "epoch": 0.37, "grad_norm": 1.6677886797848644, "learning_rate": 7.238404503855228e-06, "loss": 0.8762, "step": 3458 }, { "epoch": 0.37, "grad_norm": 1.885745128908834, "learning_rate": 7.236847511955051e-06, "loss": 0.9203, "step": 3459 }, { "epoch": 0.37, "grad_norm": 1.7486665106804422, "learning_rate": 7.235290248829403e-06, "loss": 0.9285, "step": 3460 }, { "epoch": 0.37, "grad_norm": 0.953345513921867, "learning_rate": 7.233732714667104e-06, "loss": 1.0971, "step": 3461 }, { "epoch": 0.37, "grad_norm": 1.8798253475424074, "learning_rate": 7.232174909657015e-06, "loss": 0.9326, "step": 3462 }, { "epoch": 0.37, "grad_norm": 1.8255256954074066, "learning_rate": 7.230616833988021e-06, "loss": 0.8921, "step": 3463 }, { "epoch": 0.37, "grad_norm": 1.6810973696398843, "learning_rate": 7.229058487849045e-06, "loss": 0.8748, "step": 3464 }, { "epoch": 0.37, "grad_norm": 1.699431875022201, "learning_rate": 7.227499871429047e-06, "loss": 0.9374, "step": 3465 }, { "epoch": 0.37, "grad_norm": 1.7002883843124876, "learning_rate": 7.225940984917007e-06, "loss": 0.8776, "step": 3466 }, { "epoch": 0.37, "grad_norm": 0.9270130161864797, "learning_rate": 7.224381828501947e-06, "loss": 1.064, "step": 3467 }, { "epoch": 0.37, "grad_norm": 1.7414476434478512, "learning_rate": 7.222822402372924e-06, "loss": 1.0389, "step": 3468 }, { "epoch": 0.37, "grad_norm": 1.8091634335373168, "learning_rate": 7.221262706719021e-06, "loss": 0.9602, "step": 3469 }, { "epoch": 0.37, "grad_norm": 1.7427205375852077, "learning_rate": 7.219702741729357e-06, "loss": 0.8409, "step": 3470 }, { "epoch": 0.37, "grad_norm": 1.7690577900169668, "learning_rate": 7.2181425075930845e-06, "loss": 0.9345, "step": 3471 }, { "epoch": 0.37, "grad_norm": 1.6724166838587162, "learning_rate": 7.2165820044993835e-06, "loss": 0.9216, "step": 3472 }, { "epoch": 0.37, "grad_norm": 1.7634310383908485, "learning_rate": 7.215021232637474e-06, "loss": 0.8481, "step": 3473 }, { "epoch": 0.37, "grad_norm": 1.7473934818222827, "learning_rate": 7.213460192196603e-06, "loss": 0.9625, "step": 3474 }, { "epoch": 0.37, "grad_norm": 1.7296853384106563, "learning_rate": 7.211898883366053e-06, "loss": 0.9293, "step": 3475 }, { "epoch": 0.37, "grad_norm": 1.651065852169429, "learning_rate": 7.210337306335138e-06, "loss": 0.9326, "step": 3476 }, { "epoch": 0.37, "grad_norm": 1.6860875418686192, "learning_rate": 7.208775461293205e-06, "loss": 0.845, "step": 3477 }, { "epoch": 0.37, "grad_norm": 1.7629839148290547, "learning_rate": 7.20721334842963e-06, "loss": 0.9358, "step": 3478 }, { "epoch": 0.37, "grad_norm": 1.7231969885581089, "learning_rate": 7.205650967933829e-06, "loss": 0.9448, "step": 3479 }, { "epoch": 0.37, "grad_norm": 0.9721639078528476, "learning_rate": 7.204088319995245e-06, "loss": 1.0673, "step": 3480 }, { "epoch": 0.37, "grad_norm": 1.6975784893916537, "learning_rate": 7.202525404803352e-06, "loss": 0.9274, "step": 3481 }, { "epoch": 0.37, "grad_norm": 1.6695056825090684, "learning_rate": 7.2009622225476616e-06, "loss": 0.832, "step": 3482 }, { "epoch": 0.37, "grad_norm": 1.6816281471161412, "learning_rate": 7.199398773417714e-06, "loss": 0.8584, "step": 3483 }, { "epoch": 0.37, "grad_norm": 1.7921014854808943, "learning_rate": 7.197835057603079e-06, "loss": 0.9024, "step": 3484 }, { "epoch": 0.37, "grad_norm": 1.7698940163372854, "learning_rate": 7.196271075293371e-06, "loss": 1.0021, "step": 3485 }, { "epoch": 0.37, "grad_norm": 1.6747633777943998, "learning_rate": 7.194706826678222e-06, "loss": 0.9277, "step": 3486 }, { "epoch": 0.37, "grad_norm": 1.7409962166720245, "learning_rate": 7.1931423119473035e-06, "loss": 0.8582, "step": 3487 }, { "epoch": 0.37, "grad_norm": 1.7812764980601534, "learning_rate": 7.191577531290318e-06, "loss": 0.8233, "step": 3488 }, { "epoch": 0.38, "grad_norm": 1.767316632801242, "learning_rate": 7.190012484897002e-06, "loss": 0.9279, "step": 3489 }, { "epoch": 0.38, "grad_norm": 1.7692264559225885, "learning_rate": 7.188447172957121e-06, "loss": 0.9139, "step": 3490 }, { "epoch": 0.38, "grad_norm": 1.748707694661078, "learning_rate": 7.186881595660477e-06, "loss": 0.9114, "step": 3491 }, { "epoch": 0.38, "grad_norm": 1.711610896625979, "learning_rate": 7.1853157531969e-06, "loss": 0.9543, "step": 3492 }, { "epoch": 0.38, "grad_norm": 1.6984804499354145, "learning_rate": 7.183749645756254e-06, "loss": 0.893, "step": 3493 }, { "epoch": 0.38, "grad_norm": 1.6722394039213344, "learning_rate": 7.182183273528436e-06, "loss": 0.9737, "step": 3494 }, { "epoch": 0.38, "grad_norm": 2.0040722591900275, "learning_rate": 7.18061663670337e-06, "loss": 0.9287, "step": 3495 }, { "epoch": 0.38, "grad_norm": 1.75404960183231, "learning_rate": 7.1790497354710205e-06, "loss": 0.8583, "step": 3496 }, { "epoch": 0.38, "grad_norm": 1.7967910722965834, "learning_rate": 7.17748257002138e-06, "loss": 0.9323, "step": 3497 }, { "epoch": 0.38, "grad_norm": 1.8916596683543612, "learning_rate": 7.175915140544469e-06, "loss": 0.924, "step": 3498 }, { "epoch": 0.38, "grad_norm": 1.7454463320794427, "learning_rate": 7.174347447230346e-06, "loss": 0.9463, "step": 3499 }, { "epoch": 0.38, "grad_norm": 1.8483780455521097, "learning_rate": 7.172779490269099e-06, "loss": 0.9119, "step": 3500 }, { "epoch": 0.38, "grad_norm": 1.8976118256026944, "learning_rate": 7.171211269850847e-06, "loss": 0.8887, "step": 3501 }, { "epoch": 0.38, "grad_norm": 1.769158991198243, "learning_rate": 7.169642786165747e-06, "loss": 0.9119, "step": 3502 }, { "epoch": 0.38, "grad_norm": 1.8277474191183596, "learning_rate": 7.168074039403975e-06, "loss": 0.8757, "step": 3503 }, { "epoch": 0.38, "grad_norm": 1.6958585951090037, "learning_rate": 7.166505029755753e-06, "loss": 0.8831, "step": 3504 }, { "epoch": 0.38, "grad_norm": 1.7207825956886669, "learning_rate": 7.1649357574113275e-06, "loss": 0.8843, "step": 3505 }, { "epoch": 0.38, "grad_norm": 1.7815528148208544, "learning_rate": 7.163366222560977e-06, "loss": 0.8855, "step": 3506 }, { "epoch": 0.38, "grad_norm": 1.7192400789337186, "learning_rate": 7.161796425395014e-06, "loss": 0.8877, "step": 3507 }, { "epoch": 0.38, "grad_norm": 0.9783996606398329, "learning_rate": 7.160226366103782e-06, "loss": 1.0543, "step": 3508 }, { "epoch": 0.38, "grad_norm": 1.6270732648384407, "learning_rate": 7.158656044877654e-06, "loss": 0.8499, "step": 3509 }, { "epoch": 0.38, "grad_norm": 1.8241045020053068, "learning_rate": 7.15708546190704e-06, "loss": 0.996, "step": 3510 }, { "epoch": 0.38, "grad_norm": 1.8199280208426227, "learning_rate": 7.155514617382377e-06, "loss": 0.8553, "step": 3511 }, { "epoch": 0.38, "grad_norm": 1.7553806407382988, "learning_rate": 7.153943511494135e-06, "loss": 0.9134, "step": 3512 }, { "epoch": 0.38, "grad_norm": 1.8600354417877532, "learning_rate": 7.152372144432818e-06, "loss": 0.9632, "step": 3513 }, { "epoch": 0.38, "grad_norm": 1.7725834857013472, "learning_rate": 7.1508005163889575e-06, "loss": 0.9559, "step": 3514 }, { "epoch": 0.38, "grad_norm": 1.7069239121104403, "learning_rate": 7.1492286275531175e-06, "loss": 0.9619, "step": 3515 }, { "epoch": 0.38, "grad_norm": 1.7024056934719856, "learning_rate": 7.147656478115899e-06, "loss": 0.9334, "step": 3516 }, { "epoch": 0.38, "grad_norm": 1.7481645300916133, "learning_rate": 7.146084068267928e-06, "loss": 0.9439, "step": 3517 }, { "epoch": 0.38, "grad_norm": 1.7559461091840354, "learning_rate": 7.144511398199865e-06, "loss": 0.8736, "step": 3518 }, { "epoch": 0.38, "grad_norm": 1.8077910504595287, "learning_rate": 7.1429384681024e-06, "loss": 0.7897, "step": 3519 }, { "epoch": 0.38, "grad_norm": 1.6310237060081199, "learning_rate": 7.141365278166261e-06, "loss": 0.9498, "step": 3520 }, { "epoch": 0.38, "grad_norm": 1.7134204298878384, "learning_rate": 7.1397918285821965e-06, "loss": 0.9041, "step": 3521 }, { "epoch": 0.38, "grad_norm": 1.7598912789986503, "learning_rate": 7.138218119540998e-06, "loss": 0.8775, "step": 3522 }, { "epoch": 0.38, "grad_norm": 1.8203018974359149, "learning_rate": 7.1366441512334806e-06, "loss": 0.9972, "step": 3523 }, { "epoch": 0.38, "grad_norm": 1.6901003079524397, "learning_rate": 7.135069923850493e-06, "loss": 0.9122, "step": 3524 }, { "epoch": 0.38, "grad_norm": 1.7067658173682547, "learning_rate": 7.133495437582917e-06, "loss": 0.8729, "step": 3525 }, { "epoch": 0.38, "grad_norm": 1.739467279926299, "learning_rate": 7.131920692621663e-06, "loss": 0.8889, "step": 3526 }, { "epoch": 0.38, "grad_norm": 1.712169742091463, "learning_rate": 7.130345689157676e-06, "loss": 0.949, "step": 3527 }, { "epoch": 0.38, "grad_norm": 1.8493548837059157, "learning_rate": 7.12877042738193e-06, "loss": 0.948, "step": 3528 }, { "epoch": 0.38, "grad_norm": 1.760063625206818, "learning_rate": 7.127194907485431e-06, "loss": 0.9048, "step": 3529 }, { "epoch": 0.38, "grad_norm": 1.6929367389757077, "learning_rate": 7.125619129659215e-06, "loss": 0.9421, "step": 3530 }, { "epoch": 0.38, "grad_norm": 1.7601751255694287, "learning_rate": 7.1240430940943516e-06, "loss": 0.8783, "step": 3531 }, { "epoch": 0.38, "grad_norm": 1.6053897818718008, "learning_rate": 7.12246680098194e-06, "loss": 0.9588, "step": 3532 }, { "epoch": 0.38, "grad_norm": 1.7603438518916497, "learning_rate": 7.120890250513111e-06, "loss": 0.9433, "step": 3533 }, { "epoch": 0.38, "grad_norm": 2.220126126823655, "learning_rate": 7.119313442879029e-06, "loss": 0.9106, "step": 3534 }, { "epoch": 0.38, "grad_norm": 1.6541489141711925, "learning_rate": 7.1177363782708855e-06, "loss": 0.9885, "step": 3535 }, { "epoch": 0.38, "grad_norm": 1.9399622569272348, "learning_rate": 7.116159056879904e-06, "loss": 0.8974, "step": 3536 }, { "epoch": 0.38, "grad_norm": 1.794128294565982, "learning_rate": 7.114581478897342e-06, "loss": 0.9918, "step": 3537 }, { "epoch": 0.38, "grad_norm": 1.6915538964619918, "learning_rate": 7.113003644514486e-06, "loss": 0.8626, "step": 3538 }, { "epoch": 0.38, "grad_norm": 1.7960142506956844, "learning_rate": 7.111425553922653e-06, "loss": 0.8887, "step": 3539 }, { "epoch": 0.38, "grad_norm": 1.6931387091978012, "learning_rate": 7.109847207313192e-06, "loss": 0.968, "step": 3540 }, { "epoch": 0.38, "grad_norm": 1.773033659021171, "learning_rate": 7.108268604877484e-06, "loss": 0.9888, "step": 3541 }, { "epoch": 0.38, "grad_norm": 1.7818874216305898, "learning_rate": 7.1066897468069394e-06, "loss": 0.9943, "step": 3542 }, { "epoch": 0.38, "grad_norm": 1.7319535847860126, "learning_rate": 7.105110633292999e-06, "loss": 0.8553, "step": 3543 }, { "epoch": 0.38, "grad_norm": 0.9386846611193761, "learning_rate": 7.103531264527138e-06, "loss": 1.0136, "step": 3544 }, { "epoch": 0.38, "grad_norm": 0.8573797711984961, "learning_rate": 7.10195164070086e-06, "loss": 0.9914, "step": 3545 }, { "epoch": 0.38, "grad_norm": 1.7146029394373403, "learning_rate": 7.100371762005698e-06, "loss": 0.8788, "step": 3546 }, { "epoch": 0.38, "grad_norm": 1.714033874303207, "learning_rate": 7.098791628633218e-06, "loss": 0.9106, "step": 3547 }, { "epoch": 0.38, "grad_norm": 1.6715255869318555, "learning_rate": 7.097211240775018e-06, "loss": 0.9186, "step": 3548 }, { "epoch": 0.38, "grad_norm": 1.6075976494129403, "learning_rate": 7.095630598622725e-06, "loss": 0.9878, "step": 3549 }, { "epoch": 0.38, "grad_norm": 1.6510073819639206, "learning_rate": 7.094049702367998e-06, "loss": 0.9107, "step": 3550 }, { "epoch": 0.38, "grad_norm": 1.6418862968378467, "learning_rate": 7.092468552202524e-06, "loss": 0.8797, "step": 3551 }, { "epoch": 0.38, "grad_norm": 1.8735806336054408, "learning_rate": 7.090887148318024e-06, "loss": 0.9168, "step": 3552 }, { "epoch": 0.38, "grad_norm": 1.735907317119064, "learning_rate": 7.089305490906251e-06, "loss": 0.9539, "step": 3553 }, { "epoch": 0.38, "grad_norm": 1.7198969613419666, "learning_rate": 7.0877235801589825e-06, "loss": 0.9026, "step": 3554 }, { "epoch": 0.38, "grad_norm": 1.7414039773427819, "learning_rate": 7.086141416268033e-06, "loss": 0.9493, "step": 3555 }, { "epoch": 0.38, "grad_norm": 1.3458528378871621, "learning_rate": 7.084558999425245e-06, "loss": 1.0523, "step": 3556 }, { "epoch": 0.38, "grad_norm": 1.7280329297943133, "learning_rate": 7.082976329822491e-06, "loss": 0.8749, "step": 3557 }, { "epoch": 0.38, "grad_norm": 1.7705729272248942, "learning_rate": 7.081393407651676e-06, "loss": 0.9312, "step": 3558 }, { "epoch": 0.38, "grad_norm": 2.4045948670533135, "learning_rate": 7.0798102331047345e-06, "loss": 0.8842, "step": 3559 }, { "epoch": 0.38, "grad_norm": 1.6168674531526348, "learning_rate": 7.078226806373632e-06, "loss": 0.9396, "step": 3560 }, { "epoch": 0.38, "grad_norm": 1.7586232755342333, "learning_rate": 7.076643127650367e-06, "loss": 0.9196, "step": 3561 }, { "epoch": 0.38, "grad_norm": 1.6949599209699688, "learning_rate": 7.075059197126961e-06, "loss": 0.8395, "step": 3562 }, { "epoch": 0.38, "grad_norm": 1.655052520846647, "learning_rate": 7.073475014995472e-06, "loss": 0.912, "step": 3563 }, { "epoch": 0.38, "grad_norm": 0.8686108262185473, "learning_rate": 7.071890581447992e-06, "loss": 1.0668, "step": 3564 }, { "epoch": 0.38, "grad_norm": 1.66861172282927, "learning_rate": 7.070305896676635e-06, "loss": 0.9405, "step": 3565 }, { "epoch": 0.38, "grad_norm": 1.708452050627153, "learning_rate": 7.068720960873552e-06, "loss": 0.9237, "step": 3566 }, { "epoch": 0.38, "grad_norm": 1.6503561574224743, "learning_rate": 7.067135774230919e-06, "loss": 0.8877, "step": 3567 }, { "epoch": 0.38, "grad_norm": 1.8480827476985013, "learning_rate": 7.065550336940947e-06, "loss": 0.8971, "step": 3568 }, { "epoch": 0.38, "grad_norm": 1.7398193950071896, "learning_rate": 7.063964649195874e-06, "loss": 0.8831, "step": 3569 }, { "epoch": 0.38, "grad_norm": 1.764079176478315, "learning_rate": 7.0623787111879734e-06, "loss": 0.9852, "step": 3570 }, { "epoch": 0.38, "grad_norm": 1.794083433980299, "learning_rate": 7.060792523109545e-06, "loss": 0.871, "step": 3571 }, { "epoch": 0.38, "grad_norm": 1.6832595331538067, "learning_rate": 7.059206085152918e-06, "loss": 0.8388, "step": 3572 }, { "epoch": 0.38, "grad_norm": 1.6982910753805576, "learning_rate": 7.057619397510453e-06, "loss": 0.9367, "step": 3573 }, { "epoch": 0.38, "grad_norm": 1.7589671389912003, "learning_rate": 7.056032460374542e-06, "loss": 0.862, "step": 3574 }, { "epoch": 0.38, "grad_norm": 1.7350236387515476, "learning_rate": 7.054445273937609e-06, "loss": 0.8671, "step": 3575 }, { "epoch": 0.38, "grad_norm": 1.852659575495065, "learning_rate": 7.052857838392104e-06, "loss": 0.9906, "step": 3576 }, { "epoch": 0.38, "grad_norm": 1.6926339876220031, "learning_rate": 7.0512701539305065e-06, "loss": 0.823, "step": 3577 }, { "epoch": 0.38, "grad_norm": 1.6561098253224362, "learning_rate": 7.049682220745332e-06, "loss": 0.9211, "step": 3578 }, { "epoch": 0.38, "grad_norm": 1.8232130648782472, "learning_rate": 7.048094039029123e-06, "loss": 0.9338, "step": 3579 }, { "epoch": 0.38, "grad_norm": 1.8125939761170358, "learning_rate": 7.046505608974448e-06, "loss": 0.9743, "step": 3580 }, { "epoch": 0.38, "grad_norm": 1.7408365118567453, "learning_rate": 7.044916930773915e-06, "loss": 0.8947, "step": 3581 }, { "epoch": 0.39, "grad_norm": 1.7737129317931215, "learning_rate": 7.043328004620154e-06, "loss": 0.9902, "step": 3582 }, { "epoch": 0.39, "grad_norm": 1.7646357180579426, "learning_rate": 7.041738830705828e-06, "loss": 0.9835, "step": 3583 }, { "epoch": 0.39, "grad_norm": 0.926701917454396, "learning_rate": 7.040149409223629e-06, "loss": 1.0542, "step": 3584 }, { "epoch": 0.39, "grad_norm": 1.7128493588012803, "learning_rate": 7.038559740366282e-06, "loss": 0.9246, "step": 3585 }, { "epoch": 0.39, "grad_norm": 1.7843757328494119, "learning_rate": 7.036969824326536e-06, "loss": 0.8649, "step": 3586 }, { "epoch": 0.39, "grad_norm": 1.7125016556112236, "learning_rate": 7.035379661297179e-06, "loss": 0.9618, "step": 3587 }, { "epoch": 0.39, "grad_norm": 1.6121399830975427, "learning_rate": 7.033789251471019e-06, "loss": 0.9234, "step": 3588 }, { "epoch": 0.39, "grad_norm": 0.8622084126529141, "learning_rate": 7.032198595040902e-06, "loss": 1.0326, "step": 3589 }, { "epoch": 0.39, "grad_norm": 1.6648160108726295, "learning_rate": 7.0306076921997e-06, "loss": 0.9494, "step": 3590 }, { "epoch": 0.39, "grad_norm": 1.7262735458229541, "learning_rate": 7.029016543140312e-06, "loss": 0.908, "step": 3591 }, { "epoch": 0.39, "grad_norm": 1.7035821157578581, "learning_rate": 7.027425148055676e-06, "loss": 1.0045, "step": 3592 }, { "epoch": 0.39, "grad_norm": 1.8150667815822368, "learning_rate": 7.025833507138751e-06, "loss": 0.9564, "step": 3593 }, { "epoch": 0.39, "grad_norm": 1.564818670908862, "learning_rate": 7.024241620582527e-06, "loss": 0.8421, "step": 3594 }, { "epoch": 0.39, "grad_norm": 1.8292583117408145, "learning_rate": 7.022649488580029e-06, "loss": 0.8817, "step": 3595 }, { "epoch": 0.39, "grad_norm": 1.700825587067748, "learning_rate": 7.021057111324307e-06, "loss": 0.8728, "step": 3596 }, { "epoch": 0.39, "grad_norm": 1.7051856044142253, "learning_rate": 7.019464489008443e-06, "loss": 0.8248, "step": 3597 }, { "epoch": 0.39, "grad_norm": 1.69796490832553, "learning_rate": 7.017871621825549e-06, "loss": 0.8725, "step": 3598 }, { "epoch": 0.39, "grad_norm": 1.7509850983263118, "learning_rate": 7.016278509968762e-06, "loss": 0.9395, "step": 3599 }, { "epoch": 0.39, "grad_norm": 1.74357438942604, "learning_rate": 7.014685153631255e-06, "loss": 0.9511, "step": 3600 }, { "epoch": 0.39, "grad_norm": 1.6876935005557043, "learning_rate": 7.013091553006228e-06, "loss": 0.9238, "step": 3601 }, { "epoch": 0.39, "grad_norm": 1.8300493357983656, "learning_rate": 7.01149770828691e-06, "loss": 0.9737, "step": 3602 }, { "epoch": 0.39, "grad_norm": 1.606938984260058, "learning_rate": 7.009903619666561e-06, "loss": 0.8865, "step": 3603 }, { "epoch": 0.39, "grad_norm": 1.6960378197072044, "learning_rate": 7.008309287338467e-06, "loss": 0.8711, "step": 3604 }, { "epoch": 0.39, "grad_norm": 0.908480662872319, "learning_rate": 7.006714711495949e-06, "loss": 1.0797, "step": 3605 }, { "epoch": 0.39, "grad_norm": 1.7229790956755302, "learning_rate": 7.0051198923323535e-06, "loss": 0.8886, "step": 3606 }, { "epoch": 0.39, "grad_norm": 1.7752920922468276, "learning_rate": 7.0035248300410595e-06, "loss": 0.9011, "step": 3607 }, { "epoch": 0.39, "grad_norm": 1.7733270479359085, "learning_rate": 7.0019295248154714e-06, "loss": 0.8575, "step": 3608 }, { "epoch": 0.39, "grad_norm": 1.6264233153332548, "learning_rate": 7.000333976849029e-06, "loss": 0.8651, "step": 3609 }, { "epoch": 0.39, "grad_norm": 1.6092462203998321, "learning_rate": 6.998738186335194e-06, "loss": 0.8352, "step": 3610 }, { "epoch": 0.39, "grad_norm": 1.818348322029172, "learning_rate": 6.9971421534674625e-06, "loss": 1.0008, "step": 3611 }, { "epoch": 0.39, "grad_norm": 1.8252915006400738, "learning_rate": 6.99554587843936e-06, "loss": 0.9738, "step": 3612 }, { "epoch": 0.39, "grad_norm": 1.628535703389244, "learning_rate": 6.993949361444441e-06, "loss": 0.9075, "step": 3613 }, { "epoch": 0.39, "grad_norm": 1.973083286903235, "learning_rate": 6.992352602676288e-06, "loss": 0.9911, "step": 3614 }, { "epoch": 0.39, "grad_norm": 1.747209253765597, "learning_rate": 6.990755602328511e-06, "loss": 0.9526, "step": 3615 }, { "epoch": 0.39, "grad_norm": 1.8398221279292295, "learning_rate": 6.9891583605947565e-06, "loss": 0.9031, "step": 3616 }, { "epoch": 0.39, "grad_norm": 1.7261923784510973, "learning_rate": 6.987560877668692e-06, "loss": 0.9351, "step": 3617 }, { "epoch": 0.39, "grad_norm": 1.6943700548873455, "learning_rate": 6.9859631537440195e-06, "loss": 0.8773, "step": 3618 }, { "epoch": 0.39, "grad_norm": 1.6774115725503271, "learning_rate": 6.984365189014468e-06, "loss": 0.9466, "step": 3619 }, { "epoch": 0.39, "grad_norm": 1.7872511198569294, "learning_rate": 6.982766983673796e-06, "loss": 0.9934, "step": 3620 }, { "epoch": 0.39, "grad_norm": 1.7808734424974293, "learning_rate": 6.9811685379157916e-06, "loss": 0.9493, "step": 3621 }, { "epoch": 0.39, "grad_norm": 0.861382861203167, "learning_rate": 6.979569851934271e-06, "loss": 1.0723, "step": 3622 }, { "epoch": 0.39, "grad_norm": 1.891245373566788, "learning_rate": 6.977970925923081e-06, "loss": 0.929, "step": 3623 }, { "epoch": 0.39, "grad_norm": 1.8010821627236944, "learning_rate": 6.976371760076099e-06, "loss": 0.9234, "step": 3624 }, { "epoch": 0.39, "grad_norm": 1.6923243397254573, "learning_rate": 6.974772354587226e-06, "loss": 0.9559, "step": 3625 }, { "epoch": 0.39, "grad_norm": 1.7710853243633347, "learning_rate": 6.973172709650397e-06, "loss": 0.8709, "step": 3626 }, { "epoch": 0.39, "grad_norm": 1.6971943867156254, "learning_rate": 6.971572825459576e-06, "loss": 0.9049, "step": 3627 }, { "epoch": 0.39, "grad_norm": 1.735043608150645, "learning_rate": 6.96997270220875e-06, "loss": 0.8544, "step": 3628 }, { "epoch": 0.39, "grad_norm": 1.6541145806329642, "learning_rate": 6.9683723400919455e-06, "loss": 0.9346, "step": 3629 }, { "epoch": 0.39, "grad_norm": 1.8080198751977745, "learning_rate": 6.9667717393032065e-06, "loss": 1.0205, "step": 3630 }, { "epoch": 0.39, "grad_norm": 0.8772116250763214, "learning_rate": 6.965170900036614e-06, "loss": 1.0918, "step": 3631 }, { "epoch": 0.39, "grad_norm": 1.65482459642941, "learning_rate": 6.963569822486275e-06, "loss": 0.8737, "step": 3632 }, { "epoch": 0.39, "grad_norm": 1.7010884636340624, "learning_rate": 6.961968506846327e-06, "loss": 0.8542, "step": 3633 }, { "epoch": 0.39, "grad_norm": 1.7063148031770505, "learning_rate": 6.960366953310931e-06, "loss": 0.8811, "step": 3634 }, { "epoch": 0.39, "grad_norm": 1.7668854618493877, "learning_rate": 6.958765162074287e-06, "loss": 0.9082, "step": 3635 }, { "epoch": 0.39, "grad_norm": 1.7923528247129772, "learning_rate": 6.957163133330611e-06, "loss": 0.9789, "step": 3636 }, { "epoch": 0.39, "grad_norm": 1.609014366666203, "learning_rate": 6.9555608672741596e-06, "loss": 0.9025, "step": 3637 }, { "epoch": 0.39, "grad_norm": 0.8641406077462145, "learning_rate": 6.9539583640992096e-06, "loss": 1.0668, "step": 3638 }, { "epoch": 0.39, "grad_norm": 1.7531199417175816, "learning_rate": 6.9523556240000725e-06, "loss": 0.906, "step": 3639 }, { "epoch": 0.39, "grad_norm": 1.6857874244839477, "learning_rate": 6.9507526471710865e-06, "loss": 0.8895, "step": 3640 }, { "epoch": 0.39, "grad_norm": 1.7908512647272818, "learning_rate": 6.949149433806614e-06, "loss": 0.8874, "step": 3641 }, { "epoch": 0.39, "grad_norm": 1.7418529250113568, "learning_rate": 6.947545984101054e-06, "loss": 0.9729, "step": 3642 }, { "epoch": 0.39, "grad_norm": 0.8714058693541644, "learning_rate": 6.94594229824883e-06, "loss": 1.0396, "step": 3643 }, { "epoch": 0.39, "grad_norm": 0.8071138673207257, "learning_rate": 6.944338376444394e-06, "loss": 1.0111, "step": 3644 }, { "epoch": 0.39, "grad_norm": 1.759354803754439, "learning_rate": 6.942734218882225e-06, "loss": 0.8957, "step": 3645 }, { "epoch": 0.39, "grad_norm": 1.689225846792078, "learning_rate": 6.941129825756836e-06, "loss": 0.979, "step": 3646 }, { "epoch": 0.39, "grad_norm": 2.130986509949675, "learning_rate": 6.939525197262762e-06, "loss": 0.889, "step": 3647 }, { "epoch": 0.39, "grad_norm": 1.639713783453456, "learning_rate": 6.937920333594572e-06, "loss": 0.8984, "step": 3648 }, { "epoch": 0.39, "grad_norm": 1.7097915488843474, "learning_rate": 6.936315234946861e-06, "loss": 0.9067, "step": 3649 }, { "epoch": 0.39, "grad_norm": 1.8087871128239699, "learning_rate": 6.934709901514252e-06, "loss": 0.9976, "step": 3650 }, { "epoch": 0.39, "grad_norm": 1.7255688648281826, "learning_rate": 6.933104333491399e-06, "loss": 0.9276, "step": 3651 }, { "epoch": 0.39, "grad_norm": 1.9005083373032283, "learning_rate": 6.931498531072977e-06, "loss": 0.9214, "step": 3652 }, { "epoch": 0.39, "grad_norm": 1.666958058830387, "learning_rate": 6.929892494453703e-06, "loss": 0.8921, "step": 3653 }, { "epoch": 0.39, "grad_norm": 1.695282635440867, "learning_rate": 6.928286223828309e-06, "loss": 0.9089, "step": 3654 }, { "epoch": 0.39, "grad_norm": 1.7638583604870655, "learning_rate": 6.926679719391562e-06, "loss": 0.9392, "step": 3655 }, { "epoch": 0.39, "grad_norm": 1.8289464413422243, "learning_rate": 6.9250729813382585e-06, "loss": 0.9443, "step": 3656 }, { "epoch": 0.39, "grad_norm": 1.7496657726868867, "learning_rate": 6.923466009863219e-06, "loss": 1.0295, "step": 3657 }, { "epoch": 0.39, "grad_norm": 1.6332571260722488, "learning_rate": 6.921858805161295e-06, "loss": 0.8893, "step": 3658 }, { "epoch": 0.39, "grad_norm": 1.6852687531407198, "learning_rate": 6.920251367427362e-06, "loss": 1.0155, "step": 3659 }, { "epoch": 0.39, "grad_norm": 1.6986753239597217, "learning_rate": 6.918643696856333e-06, "loss": 0.8528, "step": 3660 }, { "epoch": 0.39, "grad_norm": 1.7148259665793568, "learning_rate": 6.917035793643142e-06, "loss": 0.8369, "step": 3661 }, { "epoch": 0.39, "grad_norm": 1.5803507473511513, "learning_rate": 6.915427657982752e-06, "loss": 0.8683, "step": 3662 }, { "epoch": 0.39, "grad_norm": 1.783435142997351, "learning_rate": 6.913819290070154e-06, "loss": 0.8671, "step": 3663 }, { "epoch": 0.39, "grad_norm": 1.705001770528061, "learning_rate": 6.912210690100369e-06, "loss": 0.9762, "step": 3664 }, { "epoch": 0.39, "grad_norm": 1.7800298536374306, "learning_rate": 6.910601858268444e-06, "loss": 0.8176, "step": 3665 }, { "epoch": 0.39, "grad_norm": 1.67537335112625, "learning_rate": 6.90899279476946e-06, "loss": 0.9066, "step": 3666 }, { "epoch": 0.39, "grad_norm": 1.812873194083026, "learning_rate": 6.907383499798517e-06, "loss": 0.8636, "step": 3667 }, { "epoch": 0.39, "grad_norm": 1.7521746834407605, "learning_rate": 6.905773973550748e-06, "loss": 0.9094, "step": 3668 }, { "epoch": 0.39, "grad_norm": 1.745678422759384, "learning_rate": 6.904164216221315e-06, "loss": 0.8547, "step": 3669 }, { "epoch": 0.39, "grad_norm": 1.7778083272687444, "learning_rate": 6.902554228005406e-06, "loss": 0.9038, "step": 3670 }, { "epoch": 0.39, "grad_norm": 1.688256945852924, "learning_rate": 6.900944009098238e-06, "loss": 0.9702, "step": 3671 }, { "epoch": 0.39, "grad_norm": 1.7208661560683851, "learning_rate": 6.899333559695056e-06, "loss": 0.8875, "step": 3672 }, { "epoch": 0.39, "grad_norm": 1.629997570936904, "learning_rate": 6.897722879991131e-06, "loss": 0.9192, "step": 3673 }, { "epoch": 0.39, "grad_norm": 1.7847764791928995, "learning_rate": 6.896111970181765e-06, "loss": 0.9028, "step": 3674 }, { "epoch": 0.4, "grad_norm": 1.7153027589835654, "learning_rate": 6.894500830462285e-06, "loss": 0.9097, "step": 3675 }, { "epoch": 0.4, "grad_norm": 1.7074904038794971, "learning_rate": 6.892889461028048e-06, "loss": 0.9425, "step": 3676 }, { "epoch": 0.4, "grad_norm": 1.705484829404824, "learning_rate": 6.891277862074439e-06, "loss": 0.8809, "step": 3677 }, { "epoch": 0.4, "grad_norm": 1.0074110379284535, "learning_rate": 6.889666033796869e-06, "loss": 1.0331, "step": 3678 }, { "epoch": 0.4, "grad_norm": 1.726853823249063, "learning_rate": 6.888053976390776e-06, "loss": 0.9379, "step": 3679 }, { "epoch": 0.4, "grad_norm": 1.7687340792811335, "learning_rate": 6.886441690051631e-06, "loss": 0.9329, "step": 3680 }, { "epoch": 0.4, "grad_norm": 1.6423933673616542, "learning_rate": 6.884829174974927e-06, "loss": 0.8403, "step": 3681 }, { "epoch": 0.4, "grad_norm": 1.8623918017004886, "learning_rate": 6.883216431356188e-06, "loss": 0.9943, "step": 3682 }, { "epoch": 0.4, "grad_norm": 1.871937056500414, "learning_rate": 6.881603459390964e-06, "loss": 0.9839, "step": 3683 }, { "epoch": 0.4, "grad_norm": 1.743798689550891, "learning_rate": 6.879990259274832e-06, "loss": 0.9017, "step": 3684 }, { "epoch": 0.4, "grad_norm": 0.8666613682805704, "learning_rate": 6.878376831203402e-06, "loss": 1.0712, "step": 3685 }, { "epoch": 0.4, "grad_norm": 0.879389760472025, "learning_rate": 6.876763175372306e-06, "loss": 1.0401, "step": 3686 }, { "epoch": 0.4, "grad_norm": 1.7632567345522692, "learning_rate": 6.875149291977201e-06, "loss": 0.8705, "step": 3687 }, { "epoch": 0.4, "grad_norm": 1.864786728366429, "learning_rate": 6.873535181213784e-06, "loss": 0.8552, "step": 3688 }, { "epoch": 0.4, "grad_norm": 1.6820992011268312, "learning_rate": 6.871920843277765e-06, "loss": 0.8614, "step": 3689 }, { "epoch": 0.4, "grad_norm": 0.8495160183213671, "learning_rate": 6.87030627836489e-06, "loss": 1.0403, "step": 3690 }, { "epoch": 0.4, "grad_norm": 1.899335357881142, "learning_rate": 6.868691486670932e-06, "loss": 0.9286, "step": 3691 }, { "epoch": 0.4, "grad_norm": 1.7439479097153436, "learning_rate": 6.867076468391688e-06, "loss": 0.9474, "step": 3692 }, { "epoch": 0.4, "grad_norm": 1.8186853891340966, "learning_rate": 6.865461223722987e-06, "loss": 0.9392, "step": 3693 }, { "epoch": 0.4, "grad_norm": 1.7642438398249785, "learning_rate": 6.86384575286068e-06, "loss": 0.877, "step": 3694 }, { "epoch": 0.4, "grad_norm": 2.147982875723684, "learning_rate": 6.862230056000648e-06, "loss": 0.9931, "step": 3695 }, { "epoch": 0.4, "grad_norm": 1.7402840310387402, "learning_rate": 6.860614133338804e-06, "loss": 0.8747, "step": 3696 }, { "epoch": 0.4, "grad_norm": 1.7634381472093945, "learning_rate": 6.858997985071082e-06, "loss": 1.0319, "step": 3697 }, { "epoch": 0.4, "grad_norm": 0.8313981938744797, "learning_rate": 6.857381611393445e-06, "loss": 1.0703, "step": 3698 }, { "epoch": 0.4, "grad_norm": 1.876817613513853, "learning_rate": 6.855765012501884e-06, "loss": 0.96, "step": 3699 }, { "epoch": 0.4, "grad_norm": 1.6995553086207764, "learning_rate": 6.8541481885924174e-06, "loss": 0.9551, "step": 3700 }, { "epoch": 0.4, "grad_norm": 1.7693112625411371, "learning_rate": 6.852531139861091e-06, "loss": 0.9888, "step": 3701 }, { "epoch": 0.4, "grad_norm": 0.8254403350141695, "learning_rate": 6.850913866503976e-06, "loss": 1.0925, "step": 3702 }, { "epoch": 0.4, "grad_norm": 1.680283346881603, "learning_rate": 6.849296368717176e-06, "loss": 0.8809, "step": 3703 }, { "epoch": 0.4, "grad_norm": 1.7844627933581727, "learning_rate": 6.847678646696814e-06, "loss": 0.8799, "step": 3704 }, { "epoch": 0.4, "grad_norm": 1.697828403326104, "learning_rate": 6.846060700639047e-06, "loss": 0.9546, "step": 3705 }, { "epoch": 0.4, "grad_norm": 1.7573637171302605, "learning_rate": 6.844442530740055e-06, "loss": 0.9018, "step": 3706 }, { "epoch": 0.4, "grad_norm": 1.7455961203347996, "learning_rate": 6.842824137196047e-06, "loss": 0.921, "step": 3707 }, { "epoch": 0.4, "grad_norm": 1.6999668014104816, "learning_rate": 6.84120552020326e-06, "loss": 0.9121, "step": 3708 }, { "epoch": 0.4, "grad_norm": 0.8956969697481791, "learning_rate": 6.839586679957957e-06, "loss": 1.058, "step": 3709 }, { "epoch": 0.4, "grad_norm": 1.7541173121499185, "learning_rate": 6.837967616656426e-06, "loss": 0.9004, "step": 3710 }, { "epoch": 0.4, "grad_norm": 1.8015058039687593, "learning_rate": 6.836348330494985e-06, "loss": 0.9553, "step": 3711 }, { "epoch": 0.4, "grad_norm": 1.5840961512474818, "learning_rate": 6.834728821669978e-06, "loss": 0.8442, "step": 3712 }, { "epoch": 0.4, "grad_norm": 0.8937536444855703, "learning_rate": 6.8331090903777754e-06, "loss": 1.0761, "step": 3713 }, { "epoch": 0.4, "grad_norm": 1.8280253509182, "learning_rate": 6.831489136814777e-06, "loss": 0.9211, "step": 3714 }, { "epoch": 0.4, "grad_norm": 1.636831188915033, "learning_rate": 6.829868961177406e-06, "loss": 0.8298, "step": 3715 }, { "epoch": 0.4, "grad_norm": 1.7076738444979378, "learning_rate": 6.8282485636621165e-06, "loss": 0.8862, "step": 3716 }, { "epoch": 0.4, "grad_norm": 1.836120942194683, "learning_rate": 6.826627944465384e-06, "loss": 0.8648, "step": 3717 }, { "epoch": 0.4, "grad_norm": 1.6527220215417386, "learning_rate": 6.825007103783716e-06, "loss": 0.8867, "step": 3718 }, { "epoch": 0.4, "grad_norm": 1.7162667018186137, "learning_rate": 6.8233860418136466e-06, "loss": 0.8734, "step": 3719 }, { "epoch": 0.4, "grad_norm": 1.709710387424364, "learning_rate": 6.821764758751732e-06, "loss": 0.9524, "step": 3720 }, { "epoch": 0.4, "grad_norm": 1.6790190215837812, "learning_rate": 6.820143254794559e-06, "loss": 0.871, "step": 3721 }, { "epoch": 0.4, "grad_norm": 1.034573991525158, "learning_rate": 6.818521530138743e-06, "loss": 1.0798, "step": 3722 }, { "epoch": 0.4, "grad_norm": 1.7075812638114842, "learning_rate": 6.816899584980922e-06, "loss": 0.8439, "step": 3723 }, { "epoch": 0.4, "grad_norm": 1.7981849960547498, "learning_rate": 6.815277419517761e-06, "loss": 0.9813, "step": 3724 }, { "epoch": 0.4, "grad_norm": 1.8486270231115924, "learning_rate": 6.8136550339459566e-06, "loss": 0.8872, "step": 3725 }, { "epoch": 0.4, "grad_norm": 1.7568373994344684, "learning_rate": 6.8120324284622255e-06, "loss": 0.9481, "step": 3726 }, { "epoch": 0.4, "grad_norm": 1.7144978788693175, "learning_rate": 6.810409603263315e-06, "loss": 0.8744, "step": 3727 }, { "epoch": 0.4, "grad_norm": 1.7387026891378985, "learning_rate": 6.808786558546e-06, "loss": 0.9416, "step": 3728 }, { "epoch": 0.4, "grad_norm": 1.8033905382000865, "learning_rate": 6.807163294507078e-06, "loss": 0.8616, "step": 3729 }, { "epoch": 0.4, "grad_norm": 1.673079297335638, "learning_rate": 6.805539811343377e-06, "loss": 0.7971, "step": 3730 }, { "epoch": 0.4, "grad_norm": 1.8261397689690728, "learning_rate": 6.803916109251749e-06, "loss": 0.957, "step": 3731 }, { "epoch": 0.4, "grad_norm": 1.7506908498476363, "learning_rate": 6.802292188429072e-06, "loss": 0.9321, "step": 3732 }, { "epoch": 0.4, "grad_norm": 1.7071283542021913, "learning_rate": 6.8006680490722566e-06, "loss": 0.9477, "step": 3733 }, { "epoch": 0.4, "grad_norm": 1.737197466964932, "learning_rate": 6.799043691378231e-06, "loss": 0.8985, "step": 3734 }, { "epoch": 0.4, "grad_norm": 1.710616059311271, "learning_rate": 6.797419115543955e-06, "loss": 0.8771, "step": 3735 }, { "epoch": 0.4, "grad_norm": 1.5396238290682722, "learning_rate": 6.795794321766416e-06, "loss": 0.8686, "step": 3736 }, { "epoch": 0.4, "grad_norm": 1.750814874095692, "learning_rate": 6.794169310242624e-06, "loss": 0.9697, "step": 3737 }, { "epoch": 0.4, "grad_norm": 1.677299708022049, "learning_rate": 6.7925440811696165e-06, "loss": 0.9001, "step": 3738 }, { "epoch": 0.4, "grad_norm": 1.8570951216973905, "learning_rate": 6.79091863474446e-06, "loss": 0.898, "step": 3739 }, { "epoch": 0.4, "grad_norm": 1.713846810234748, "learning_rate": 6.789292971164245e-06, "loss": 0.9287, "step": 3740 }, { "epoch": 0.4, "grad_norm": 1.8020721146555998, "learning_rate": 6.787667090626089e-06, "loss": 0.9547, "step": 3741 }, { "epoch": 0.4, "grad_norm": 1.626897982308909, "learning_rate": 6.786040993327135e-06, "loss": 0.846, "step": 3742 }, { "epoch": 0.4, "grad_norm": 1.6455487210144222, "learning_rate": 6.784414679464553e-06, "loss": 0.8831, "step": 3743 }, { "epoch": 0.4, "grad_norm": 1.663410696042311, "learning_rate": 6.7827881492355385e-06, "loss": 0.8859, "step": 3744 }, { "epoch": 0.4, "grad_norm": 0.8617414882032935, "learning_rate": 6.7811614028373166e-06, "loss": 1.0618, "step": 3745 }, { "epoch": 0.4, "grad_norm": 1.8125551621194524, "learning_rate": 6.779534440467132e-06, "loss": 0.9492, "step": 3746 }, { "epoch": 0.4, "grad_norm": 1.6218212009674808, "learning_rate": 6.777907262322263e-06, "loss": 0.8959, "step": 3747 }, { "epoch": 0.4, "grad_norm": 1.8343375643107682, "learning_rate": 6.7762798686000086e-06, "loss": 0.9681, "step": 3748 }, { "epoch": 0.4, "grad_norm": 1.696881416147612, "learning_rate": 6.774652259497696e-06, "loss": 0.9193, "step": 3749 }, { "epoch": 0.4, "grad_norm": 1.7826004206483415, "learning_rate": 6.773024435212678e-06, "loss": 0.9302, "step": 3750 }, { "epoch": 0.4, "grad_norm": 0.8319228669230851, "learning_rate": 6.7713963959423375e-06, "loss": 1.0393, "step": 3751 }, { "epoch": 0.4, "grad_norm": 1.5836593806567643, "learning_rate": 6.769768141884074e-06, "loss": 0.8792, "step": 3752 }, { "epoch": 0.4, "grad_norm": 0.8505623136624468, "learning_rate": 6.768139673235323e-06, "loss": 1.0475, "step": 3753 }, { "epoch": 0.4, "grad_norm": 1.7150614047050985, "learning_rate": 6.7665109901935426e-06, "loss": 0.9164, "step": 3754 }, { "epoch": 0.4, "grad_norm": 1.7979499570366428, "learning_rate": 6.764882092956212e-06, "loss": 0.8846, "step": 3755 }, { "epoch": 0.4, "grad_norm": 1.7078001607472872, "learning_rate": 6.763252981720845e-06, "loss": 0.9538, "step": 3756 }, { "epoch": 0.4, "grad_norm": 2.33144393160548, "learning_rate": 6.7616236566849736e-06, "loss": 0.9049, "step": 3757 }, { "epoch": 0.4, "grad_norm": 1.6731505535393738, "learning_rate": 6.759994118046161e-06, "loss": 0.9553, "step": 3758 }, { "epoch": 0.4, "grad_norm": 1.6550027864088952, "learning_rate": 6.758364366001995e-06, "loss": 0.883, "step": 3759 }, { "epoch": 0.4, "grad_norm": 0.8331334553070108, "learning_rate": 6.756734400750087e-06, "loss": 1.0632, "step": 3760 }, { "epoch": 0.4, "grad_norm": 1.757811778120651, "learning_rate": 6.755104222488076e-06, "loss": 0.9113, "step": 3761 }, { "epoch": 0.4, "grad_norm": 1.615295593456092, "learning_rate": 6.7534738314136285e-06, "loss": 0.8752, "step": 3762 }, { "epoch": 0.4, "grad_norm": 1.775132222748083, "learning_rate": 6.751843227724433e-06, "loss": 0.8604, "step": 3763 }, { "epoch": 0.4, "grad_norm": 1.6472073846059767, "learning_rate": 6.7502124116182066e-06, "loss": 0.8993, "step": 3764 }, { "epoch": 0.4, "grad_norm": 0.8549509127193766, "learning_rate": 6.7485813832926905e-06, "loss": 1.0843, "step": 3765 }, { "epoch": 0.4, "grad_norm": 1.6136547802069838, "learning_rate": 6.746950142945654e-06, "loss": 0.9533, "step": 3766 }, { "epoch": 0.4, "grad_norm": 1.8084535781462003, "learning_rate": 6.7453186907748914e-06, "loss": 0.9102, "step": 3767 }, { "epoch": 0.41, "grad_norm": 1.7109587127042984, "learning_rate": 6.743687026978219e-06, "loss": 1.0018, "step": 3768 }, { "epoch": 0.41, "grad_norm": 1.6303294676907938, "learning_rate": 6.742055151753484e-06, "loss": 0.8704, "step": 3769 }, { "epoch": 0.41, "grad_norm": 1.6104343620415118, "learning_rate": 6.740423065298556e-06, "loss": 0.853, "step": 3770 }, { "epoch": 0.41, "grad_norm": 1.6996613994771286, "learning_rate": 6.7387907678113295e-06, "loss": 1.0272, "step": 3771 }, { "epoch": 0.41, "grad_norm": 1.81173269378801, "learning_rate": 6.73715825948973e-06, "loss": 0.8517, "step": 3772 }, { "epoch": 0.41, "grad_norm": 1.7244021989865903, "learning_rate": 6.735525540531702e-06, "loss": 0.9635, "step": 3773 }, { "epoch": 0.41, "grad_norm": 1.685113080814913, "learning_rate": 6.733892611135217e-06, "loss": 0.8163, "step": 3774 }, { "epoch": 0.41, "grad_norm": 1.6550017451405235, "learning_rate": 6.732259471498278e-06, "loss": 0.9632, "step": 3775 }, { "epoch": 0.41, "grad_norm": 1.7606791252224414, "learning_rate": 6.730626121818906e-06, "loss": 0.9009, "step": 3776 }, { "epoch": 0.41, "grad_norm": 1.674070988963595, "learning_rate": 6.728992562295149e-06, "loss": 0.944, "step": 3777 }, { "epoch": 0.41, "grad_norm": 1.8175621407834393, "learning_rate": 6.727358793125084e-06, "loss": 0.9644, "step": 3778 }, { "epoch": 0.41, "grad_norm": 1.642129249192693, "learning_rate": 6.72572481450681e-06, "loss": 0.9267, "step": 3779 }, { "epoch": 0.41, "grad_norm": 1.6857643110821325, "learning_rate": 6.724090626638452e-06, "loss": 0.906, "step": 3780 }, { "epoch": 0.41, "grad_norm": 1.693399275075128, "learning_rate": 6.722456229718163e-06, "loss": 0.9085, "step": 3781 }, { "epoch": 0.41, "grad_norm": 1.8320490535834988, "learning_rate": 6.720821623944117e-06, "loss": 0.8646, "step": 3782 }, { "epoch": 0.41, "grad_norm": 1.7590343990620032, "learning_rate": 6.7191868095145165e-06, "loss": 0.8571, "step": 3783 }, { "epoch": 0.41, "grad_norm": 1.7106374493728695, "learning_rate": 6.71755178662759e-06, "loss": 0.8587, "step": 3784 }, { "epoch": 0.41, "grad_norm": 1.7451363724212081, "learning_rate": 6.715916555481585e-06, "loss": 0.8942, "step": 3785 }, { "epoch": 0.41, "grad_norm": 1.689887042497531, "learning_rate": 6.714281116274783e-06, "loss": 0.9061, "step": 3786 }, { "epoch": 0.41, "grad_norm": 1.8080863228122521, "learning_rate": 6.712645469205488e-06, "loss": 0.9156, "step": 3787 }, { "epoch": 0.41, "grad_norm": 1.7354201692908116, "learning_rate": 6.711009614472023e-06, "loss": 0.8805, "step": 3788 }, { "epoch": 0.41, "grad_norm": 1.696291411565936, "learning_rate": 6.709373552272744e-06, "loss": 0.9623, "step": 3789 }, { "epoch": 0.41, "grad_norm": 1.7908329405629066, "learning_rate": 6.7077372828060294e-06, "loss": 0.8562, "step": 3790 }, { "epoch": 0.41, "grad_norm": 1.7902477224563222, "learning_rate": 6.70610080627028e-06, "loss": 0.8979, "step": 3791 }, { "epoch": 0.41, "grad_norm": 1.857914093764839, "learning_rate": 6.704464122863929e-06, "loss": 0.9804, "step": 3792 }, { "epoch": 0.41, "grad_norm": 0.9384256381126161, "learning_rate": 6.702827232785426e-06, "loss": 1.0803, "step": 3793 }, { "epoch": 0.41, "grad_norm": 1.6929236781205865, "learning_rate": 6.701190136233248e-06, "loss": 0.8658, "step": 3794 }, { "epoch": 0.41, "grad_norm": 1.7619154198679348, "learning_rate": 6.699552833405904e-06, "loss": 0.8731, "step": 3795 }, { "epoch": 0.41, "grad_norm": 1.7524271704097114, "learning_rate": 6.697915324501918e-06, "loss": 0.8959, "step": 3796 }, { "epoch": 0.41, "grad_norm": 0.8373574567558173, "learning_rate": 6.696277609719846e-06, "loss": 1.0541, "step": 3797 }, { "epoch": 0.41, "grad_norm": 1.892572661822782, "learning_rate": 6.694639689258265e-06, "loss": 0.8571, "step": 3798 }, { "epoch": 0.41, "grad_norm": 1.7228868993933018, "learning_rate": 6.6930015633157815e-06, "loss": 0.941, "step": 3799 }, { "epoch": 0.41, "grad_norm": 1.7706629058604655, "learning_rate": 6.691363232091019e-06, "loss": 0.9637, "step": 3800 }, { "epoch": 0.41, "grad_norm": 1.7442359043668563, "learning_rate": 6.6897246957826356e-06, "loss": 0.8838, "step": 3801 }, { "epoch": 0.41, "grad_norm": 1.7168805736997106, "learning_rate": 6.688085954589307e-06, "loss": 0.941, "step": 3802 }, { "epoch": 0.41, "grad_norm": 1.687654845367884, "learning_rate": 6.686447008709735e-06, "loss": 0.9362, "step": 3803 }, { "epoch": 0.41, "grad_norm": 0.9724080773294006, "learning_rate": 6.684807858342652e-06, "loss": 1.0228, "step": 3804 }, { "epoch": 0.41, "grad_norm": 1.7950247486428166, "learning_rate": 6.683168503686807e-06, "loss": 0.8895, "step": 3805 }, { "epoch": 0.41, "grad_norm": 0.8668694818664551, "learning_rate": 6.681528944940977e-06, "loss": 1.0589, "step": 3806 }, { "epoch": 0.41, "grad_norm": 1.65610554971664, "learning_rate": 6.6798891823039655e-06, "loss": 0.9491, "step": 3807 }, { "epoch": 0.41, "grad_norm": 0.849518928219065, "learning_rate": 6.6782492159746005e-06, "loss": 1.0995, "step": 3808 }, { "epoch": 0.41, "grad_norm": 1.845130575358366, "learning_rate": 6.676609046151732e-06, "loss": 0.8472, "step": 3809 }, { "epoch": 0.41, "grad_norm": 1.7800059664355221, "learning_rate": 6.674968673034235e-06, "loss": 0.9232, "step": 3810 }, { "epoch": 0.41, "grad_norm": 1.773124242462841, "learning_rate": 6.6733280968210125e-06, "loss": 0.8427, "step": 3811 }, { "epoch": 0.41, "grad_norm": 1.653175895595962, "learning_rate": 6.67168731771099e-06, "loss": 0.8532, "step": 3812 }, { "epoch": 0.41, "grad_norm": 1.7440775503380563, "learning_rate": 6.670046335903116e-06, "loss": 0.8924, "step": 3813 }, { "epoch": 0.41, "grad_norm": 1.8321456922135435, "learning_rate": 6.668405151596366e-06, "loss": 0.9871, "step": 3814 }, { "epoch": 0.41, "grad_norm": 1.919470457788189, "learning_rate": 6.6667637649897424e-06, "loss": 1.0025, "step": 3815 }, { "epoch": 0.41, "grad_norm": 1.708580923251713, "learning_rate": 6.6651221762822635e-06, "loss": 0.8246, "step": 3816 }, { "epoch": 0.41, "grad_norm": 1.6280585666348544, "learning_rate": 6.6634803856729805e-06, "loss": 0.8471, "step": 3817 }, { "epoch": 0.41, "grad_norm": 1.7434820802522482, "learning_rate": 6.661838393360967e-06, "loss": 0.9325, "step": 3818 }, { "epoch": 0.41, "grad_norm": 1.7436636412597648, "learning_rate": 6.660196199545318e-06, "loss": 0.9595, "step": 3819 }, { "epoch": 0.41, "grad_norm": 1.8173168222149345, "learning_rate": 6.658553804425156e-06, "loss": 0.8969, "step": 3820 }, { "epoch": 0.41, "grad_norm": 1.7442794783196698, "learning_rate": 6.656911208199628e-06, "loss": 0.9485, "step": 3821 }, { "epoch": 0.41, "grad_norm": 1.0920182408093924, "learning_rate": 6.655268411067904e-06, "loss": 1.0554, "step": 3822 }, { "epoch": 0.41, "grad_norm": 1.8046815800837301, "learning_rate": 6.6536254132291765e-06, "loss": 0.8693, "step": 3823 }, { "epoch": 0.41, "grad_norm": 1.846421799904277, "learning_rate": 6.651982214882669e-06, "loss": 0.9414, "step": 3824 }, { "epoch": 0.41, "grad_norm": 1.832973563914968, "learning_rate": 6.650338816227623e-06, "loss": 0.9422, "step": 3825 }, { "epoch": 0.41, "grad_norm": 1.8462046300419594, "learning_rate": 6.6486952174633045e-06, "loss": 0.811, "step": 3826 }, { "epoch": 0.41, "grad_norm": 1.9099738734580007, "learning_rate": 6.647051418789008e-06, "loss": 0.8904, "step": 3827 }, { "epoch": 0.41, "grad_norm": 0.8745610623910935, "learning_rate": 6.645407420404048e-06, "loss": 1.0305, "step": 3828 }, { "epoch": 0.41, "grad_norm": 2.950037508660796, "learning_rate": 6.643763222507765e-06, "loss": 0.9034, "step": 3829 }, { "epoch": 0.41, "grad_norm": 1.7010749976426751, "learning_rate": 6.642118825299526e-06, "loss": 0.8652, "step": 3830 }, { "epoch": 0.41, "grad_norm": 1.7935358777836763, "learning_rate": 6.640474228978717e-06, "loss": 0.9422, "step": 3831 }, { "epoch": 0.41, "grad_norm": 0.8387611459340544, "learning_rate": 6.638829433744753e-06, "loss": 1.0706, "step": 3832 }, { "epoch": 0.41, "grad_norm": 1.7736813098131814, "learning_rate": 6.63718443979707e-06, "loss": 0.8283, "step": 3833 }, { "epoch": 0.41, "grad_norm": 1.7613412868740619, "learning_rate": 6.635539247335129e-06, "loss": 0.877, "step": 3834 }, { "epoch": 0.41, "grad_norm": 1.697444426360806, "learning_rate": 6.633893856558416e-06, "loss": 0.9267, "step": 3835 }, { "epoch": 0.41, "grad_norm": 1.8319397947035014, "learning_rate": 6.63224826766644e-06, "loss": 0.9298, "step": 3836 }, { "epoch": 0.41, "grad_norm": 1.7039154392053841, "learning_rate": 6.630602480858734e-06, "loss": 0.8515, "step": 3837 }, { "epoch": 0.41, "grad_norm": 1.7803712196467671, "learning_rate": 6.628956496334856e-06, "loss": 0.9085, "step": 3838 }, { "epoch": 0.41, "grad_norm": 1.8640364675587728, "learning_rate": 6.627310314294386e-06, "loss": 0.9888, "step": 3839 }, { "epoch": 0.41, "grad_norm": 1.7934083882854839, "learning_rate": 6.6256639349369326e-06, "loss": 0.8424, "step": 3840 }, { "epoch": 0.41, "grad_norm": 1.7860277104017066, "learning_rate": 6.624017358462122e-06, "loss": 0.8804, "step": 3841 }, { "epoch": 0.41, "grad_norm": 1.662337117575978, "learning_rate": 6.622370585069605e-06, "loss": 0.9006, "step": 3842 }, { "epoch": 0.41, "grad_norm": 0.9107321234658777, "learning_rate": 6.6207236149590635e-06, "loss": 1.0424, "step": 3843 }, { "epoch": 0.41, "grad_norm": 1.7304844065830798, "learning_rate": 6.619076448330197e-06, "loss": 0.8863, "step": 3844 }, { "epoch": 0.41, "grad_norm": 1.7110270107540906, "learning_rate": 6.6174290853827275e-06, "loss": 0.8523, "step": 3845 }, { "epoch": 0.41, "grad_norm": 1.7947723638006057, "learning_rate": 6.615781526316407e-06, "loss": 0.9608, "step": 3846 }, { "epoch": 0.41, "grad_norm": 0.8628670940753461, "learning_rate": 6.614133771331006e-06, "loss": 1.0333, "step": 3847 }, { "epoch": 0.41, "grad_norm": 2.053019135918526, "learning_rate": 6.612485820626318e-06, "loss": 0.9534, "step": 3848 }, { "epoch": 0.41, "grad_norm": 1.7457243225121548, "learning_rate": 6.610837674402167e-06, "loss": 0.8667, "step": 3849 }, { "epoch": 0.41, "grad_norm": 1.68558955804271, "learning_rate": 6.609189332858394e-06, "loss": 0.8522, "step": 3850 }, { "epoch": 0.41, "grad_norm": 1.6722147837025172, "learning_rate": 6.607540796194867e-06, "loss": 0.8847, "step": 3851 }, { "epoch": 0.41, "grad_norm": 1.8269058601456925, "learning_rate": 6.605892064611478e-06, "loss": 0.9067, "step": 3852 }, { "epoch": 0.41, "grad_norm": 1.6669734213477019, "learning_rate": 6.604243138308138e-06, "loss": 0.8944, "step": 3853 }, { "epoch": 0.41, "grad_norm": 1.6931219282539276, "learning_rate": 6.602594017484786e-06, "loss": 0.893, "step": 3854 }, { "epoch": 0.41, "grad_norm": 1.8834788772576254, "learning_rate": 6.600944702341385e-06, "loss": 0.9463, "step": 3855 }, { "epoch": 0.41, "grad_norm": 1.7777244843479938, "learning_rate": 6.599295193077921e-06, "loss": 0.9048, "step": 3856 }, { "epoch": 0.41, "grad_norm": 0.8806687181074289, "learning_rate": 6.5976454898944e-06, "loss": 1.047, "step": 3857 }, { "epoch": 0.41, "grad_norm": 1.837146852289688, "learning_rate": 6.595995592990856e-06, "loss": 0.9498, "step": 3858 }, { "epoch": 0.41, "grad_norm": 1.628324910723718, "learning_rate": 6.594345502567343e-06, "loss": 0.9036, "step": 3859 }, { "epoch": 0.41, "grad_norm": 1.6883453585095207, "learning_rate": 6.592695218823943e-06, "loss": 0.948, "step": 3860 }, { "epoch": 0.42, "grad_norm": 1.6154040961610314, "learning_rate": 6.591044741960755e-06, "loss": 0.8979, "step": 3861 }, { "epoch": 0.42, "grad_norm": 1.6721900192409, "learning_rate": 6.589394072177908e-06, "loss": 0.8374, "step": 3862 }, { "epoch": 0.42, "grad_norm": 1.653179880318606, "learning_rate": 6.58774320967555e-06, "loss": 0.8875, "step": 3863 }, { "epoch": 0.42, "grad_norm": 1.6942456114237976, "learning_rate": 6.586092154653855e-06, "loss": 0.957, "step": 3864 }, { "epoch": 0.42, "grad_norm": 1.78447349787036, "learning_rate": 6.584440907313017e-06, "loss": 0.9114, "step": 3865 }, { "epoch": 0.42, "grad_norm": 1.5381441869584935, "learning_rate": 6.582789467853257e-06, "loss": 0.9568, "step": 3866 }, { "epoch": 0.42, "grad_norm": 1.6679679679252688, "learning_rate": 6.581137836474817e-06, "loss": 0.9593, "step": 3867 }, { "epoch": 0.42, "grad_norm": 1.7474066060043723, "learning_rate": 6.579486013377963e-06, "loss": 0.904, "step": 3868 }, { "epoch": 0.42, "grad_norm": 1.730546087814758, "learning_rate": 6.577833998762986e-06, "loss": 0.8936, "step": 3869 }, { "epoch": 0.42, "grad_norm": 2.059028743171172, "learning_rate": 6.576181792830194e-06, "loss": 0.8219, "step": 3870 }, { "epoch": 0.42, "grad_norm": 1.7583059057886148, "learning_rate": 6.574529395779928e-06, "loss": 1.01, "step": 3871 }, { "epoch": 0.42, "grad_norm": 1.7226902256103813, "learning_rate": 6.572876807812545e-06, "loss": 0.9866, "step": 3872 }, { "epoch": 0.42, "grad_norm": 1.711169858080197, "learning_rate": 6.571224029128426e-06, "loss": 0.9674, "step": 3873 }, { "epoch": 0.42, "grad_norm": 1.760698996191674, "learning_rate": 6.569571059927977e-06, "loss": 0.8762, "step": 3874 }, { "epoch": 0.42, "grad_norm": 1.724970697887914, "learning_rate": 6.567917900411625e-06, "loss": 0.8974, "step": 3875 }, { "epoch": 0.42, "grad_norm": 1.686425209175494, "learning_rate": 6.566264550779821e-06, "loss": 0.9049, "step": 3876 }, { "epoch": 0.42, "grad_norm": 1.6879831993005174, "learning_rate": 6.564611011233043e-06, "loss": 0.8525, "step": 3877 }, { "epoch": 0.42, "grad_norm": 1.7284367575849662, "learning_rate": 6.562957281971786e-06, "loss": 0.8917, "step": 3878 }, { "epoch": 0.42, "grad_norm": 1.8029067057431556, "learning_rate": 6.561303363196568e-06, "loss": 0.8967, "step": 3879 }, { "epoch": 0.42, "grad_norm": 1.6411528920854073, "learning_rate": 6.559649255107936e-06, "loss": 0.9489, "step": 3880 }, { "epoch": 0.42, "grad_norm": 1.799262702319796, "learning_rate": 6.557994957906455e-06, "loss": 0.8672, "step": 3881 }, { "epoch": 0.42, "grad_norm": 1.7653157234886843, "learning_rate": 6.556340471792713e-06, "loss": 0.9603, "step": 3882 }, { "epoch": 0.42, "grad_norm": 1.6652517212523792, "learning_rate": 6.554685796967324e-06, "loss": 0.9147, "step": 3883 }, { "epoch": 0.42, "grad_norm": 1.0330163361934335, "learning_rate": 6.553030933630922e-06, "loss": 1.0568, "step": 3884 }, { "epoch": 0.42, "grad_norm": 1.8135812050180837, "learning_rate": 6.551375881984166e-06, "loss": 0.8764, "step": 3885 }, { "epoch": 0.42, "grad_norm": 1.7775067955689776, "learning_rate": 6.549720642227735e-06, "loss": 0.8721, "step": 3886 }, { "epoch": 0.42, "grad_norm": 1.6771465572452495, "learning_rate": 6.548065214562333e-06, "loss": 0.8979, "step": 3887 }, { "epoch": 0.42, "grad_norm": 1.6996344054774135, "learning_rate": 6.546409599188687e-06, "loss": 0.891, "step": 3888 }, { "epoch": 0.42, "grad_norm": 1.897490996977252, "learning_rate": 6.5447537963075485e-06, "loss": 0.9201, "step": 3889 }, { "epoch": 0.42, "grad_norm": 1.9159743141321777, "learning_rate": 6.543097806119682e-06, "loss": 0.9008, "step": 3890 }, { "epoch": 0.42, "grad_norm": 1.800614194161361, "learning_rate": 6.54144162882589e-06, "loss": 0.8942, "step": 3891 }, { "epoch": 0.42, "grad_norm": 1.674415503151337, "learning_rate": 6.539785264626985e-06, "loss": 0.8977, "step": 3892 }, { "epoch": 0.42, "grad_norm": 1.7435893311053547, "learning_rate": 6.538128713723808e-06, "loss": 0.9838, "step": 3893 }, { "epoch": 0.42, "grad_norm": 1.612309517443607, "learning_rate": 6.536471976317223e-06, "loss": 0.7666, "step": 3894 }, { "epoch": 0.42, "grad_norm": 1.738024756971578, "learning_rate": 6.534815052608114e-06, "loss": 0.9103, "step": 3895 }, { "epoch": 0.42, "grad_norm": 1.6013460627046625, "learning_rate": 6.533157942797387e-06, "loss": 0.9597, "step": 3896 }, { "epoch": 0.42, "grad_norm": 1.8089269631135607, "learning_rate": 6.531500647085974e-06, "loss": 0.9431, "step": 3897 }, { "epoch": 0.42, "grad_norm": 1.8385650563692855, "learning_rate": 6.529843165674828e-06, "loss": 0.8698, "step": 3898 }, { "epoch": 0.42, "grad_norm": 1.6575087445033134, "learning_rate": 6.528185498764925e-06, "loss": 0.9394, "step": 3899 }, { "epoch": 0.42, "grad_norm": 1.6187725412650043, "learning_rate": 6.526527646557261e-06, "loss": 0.9373, "step": 3900 }, { "epoch": 0.42, "grad_norm": 1.8889445948591954, "learning_rate": 6.5248696092528565e-06, "loss": 0.8645, "step": 3901 }, { "epoch": 0.42, "grad_norm": 1.6768692053843621, "learning_rate": 6.523211387052756e-06, "loss": 0.8966, "step": 3902 }, { "epoch": 0.42, "grad_norm": 1.6433509210642596, "learning_rate": 6.521552980158024e-06, "loss": 0.8593, "step": 3903 }, { "epoch": 0.42, "grad_norm": 1.7480503388626276, "learning_rate": 6.519894388769745e-06, "loss": 0.9619, "step": 3904 }, { "epoch": 0.42, "grad_norm": 1.0974724697563039, "learning_rate": 6.518235613089034e-06, "loss": 1.0788, "step": 3905 }, { "epoch": 0.42, "grad_norm": 1.745344751096273, "learning_rate": 6.516576653317019e-06, "loss": 0.9584, "step": 3906 }, { "epoch": 0.42, "grad_norm": 1.8307907422565648, "learning_rate": 6.514917509654856e-06, "loss": 1.0218, "step": 3907 }, { "epoch": 0.42, "grad_norm": 1.720183018886266, "learning_rate": 6.513258182303724e-06, "loss": 0.8331, "step": 3908 }, { "epoch": 0.42, "grad_norm": 1.7199463788782183, "learning_rate": 6.511598671464821e-06, "loss": 0.9171, "step": 3909 }, { "epoch": 0.42, "grad_norm": 1.6756481394455491, "learning_rate": 6.509938977339366e-06, "loss": 0.9113, "step": 3910 }, { "epoch": 0.42, "grad_norm": 1.721321574618392, "learning_rate": 6.508279100128605e-06, "loss": 0.8495, "step": 3911 }, { "epoch": 0.42, "grad_norm": 1.9797567203308861, "learning_rate": 6.5066190400338035e-06, "loss": 0.8256, "step": 3912 }, { "epoch": 0.42, "grad_norm": 1.8634837856969013, "learning_rate": 6.5049587972562485e-06, "loss": 0.9505, "step": 3913 }, { "epoch": 0.42, "grad_norm": 1.7727178916276758, "learning_rate": 6.503298371997252e-06, "loss": 0.8117, "step": 3914 }, { "epoch": 0.42, "grad_norm": 1.6939664743156453, "learning_rate": 6.501637764458145e-06, "loss": 0.9484, "step": 3915 }, { "epoch": 0.42, "grad_norm": 1.866707787904892, "learning_rate": 6.499976974840282e-06, "loss": 0.9027, "step": 3916 }, { "epoch": 0.42, "grad_norm": 1.7007876571985032, "learning_rate": 6.4983160033450395e-06, "loss": 0.8728, "step": 3917 }, { "epoch": 0.42, "grad_norm": 1.7444909663733197, "learning_rate": 6.496654850173816e-06, "loss": 0.8781, "step": 3918 }, { "epoch": 0.42, "grad_norm": 1.8385110408137166, "learning_rate": 6.494993515528032e-06, "loss": 0.9384, "step": 3919 }, { "epoch": 0.42, "grad_norm": 1.67044490482577, "learning_rate": 6.493331999609132e-06, "loss": 0.9758, "step": 3920 }, { "epoch": 0.42, "grad_norm": 1.7188206759933016, "learning_rate": 6.4916703026185755e-06, "loss": 0.9466, "step": 3921 }, { "epoch": 0.42, "grad_norm": 1.0032155462418202, "learning_rate": 6.490008424757855e-06, "loss": 1.0679, "step": 3922 }, { "epoch": 0.42, "grad_norm": 0.8985243074379652, "learning_rate": 6.488346366228475e-06, "loss": 1.0765, "step": 3923 }, { "epoch": 0.42, "grad_norm": 2.3451240214287137, "learning_rate": 6.4866841272319665e-06, "loss": 0.925, "step": 3924 }, { "epoch": 0.42, "grad_norm": 1.7369663048698247, "learning_rate": 6.485021707969883e-06, "loss": 0.9186, "step": 3925 }, { "epoch": 0.42, "grad_norm": 1.7967996895582161, "learning_rate": 6.483359108643798e-06, "loss": 0.8643, "step": 3926 }, { "epoch": 0.42, "grad_norm": 2.0005325574915815, "learning_rate": 6.481696329455307e-06, "loss": 1.0052, "step": 3927 }, { "epoch": 0.42, "grad_norm": 1.7420558287588246, "learning_rate": 6.480033370606027e-06, "loss": 0.9119, "step": 3928 }, { "epoch": 0.42, "grad_norm": 1.6268550922206093, "learning_rate": 6.478370232297599e-06, "loss": 0.858, "step": 3929 }, { "epoch": 0.42, "grad_norm": 1.6912582964924663, "learning_rate": 6.476706914731683e-06, "loss": 0.8588, "step": 3930 }, { "epoch": 0.42, "grad_norm": 1.6199594314717038, "learning_rate": 6.475043418109966e-06, "loss": 0.9028, "step": 3931 }, { "epoch": 0.42, "grad_norm": 1.6886780898064158, "learning_rate": 6.473379742634145e-06, "loss": 0.9625, "step": 3932 }, { "epoch": 0.42, "grad_norm": 1.9870782148964425, "learning_rate": 6.471715888505952e-06, "loss": 0.9951, "step": 3933 }, { "epoch": 0.42, "grad_norm": 1.6640620305644407, "learning_rate": 6.470051855927134e-06, "loss": 0.9394, "step": 3934 }, { "epoch": 0.42, "grad_norm": 1.8339189250132935, "learning_rate": 6.46838764509946e-06, "loss": 0.9783, "step": 3935 }, { "epoch": 0.42, "grad_norm": 2.0192965646508627, "learning_rate": 6.466723256224723e-06, "loss": 0.9479, "step": 3936 }, { "epoch": 0.42, "grad_norm": 1.6930254999573584, "learning_rate": 6.465058689504734e-06, "loss": 0.8802, "step": 3937 }, { "epoch": 0.42, "grad_norm": 1.76282222904526, "learning_rate": 6.463393945141328e-06, "loss": 0.9176, "step": 3938 }, { "epoch": 0.42, "grad_norm": 1.839147944027083, "learning_rate": 6.4617290233363604e-06, "loss": 0.9054, "step": 3939 }, { "epoch": 0.42, "grad_norm": 1.7003596365584817, "learning_rate": 6.46006392429171e-06, "loss": 0.9559, "step": 3940 }, { "epoch": 0.42, "grad_norm": 1.5580418437253907, "learning_rate": 6.458398648209274e-06, "loss": 1.0624, "step": 3941 }, { "epoch": 0.42, "grad_norm": 1.7760285415949977, "learning_rate": 6.456733195290976e-06, "loss": 0.9325, "step": 3942 }, { "epoch": 0.42, "grad_norm": 1.7665553398276648, "learning_rate": 6.455067565738755e-06, "loss": 0.9125, "step": 3943 }, { "epoch": 0.42, "grad_norm": 1.697421218056344, "learning_rate": 6.453401759754574e-06, "loss": 0.9738, "step": 3944 }, { "epoch": 0.42, "grad_norm": 1.8520398869277461, "learning_rate": 6.451735777540421e-06, "loss": 0.8812, "step": 3945 }, { "epoch": 0.42, "grad_norm": 1.7470787357341448, "learning_rate": 6.450069619298299e-06, "loss": 0.8844, "step": 3946 }, { "epoch": 0.42, "grad_norm": 1.7516565544068061, "learning_rate": 6.448403285230238e-06, "loss": 1.0351, "step": 3947 }, { "epoch": 0.42, "grad_norm": 1.8413578513050175, "learning_rate": 6.446736775538284e-06, "loss": 0.945, "step": 3948 }, { "epoch": 0.42, "grad_norm": 1.7074259037795683, "learning_rate": 6.4450700904245076e-06, "loss": 0.953, "step": 3949 }, { "epoch": 0.42, "grad_norm": 2.116914206446147, "learning_rate": 6.443403230091002e-06, "loss": 0.8957, "step": 3950 }, { "epoch": 0.42, "grad_norm": 1.8465286713590245, "learning_rate": 6.4417361947398785e-06, "loss": 0.8642, "step": 3951 }, { "epoch": 0.42, "grad_norm": 1.8075240989910308, "learning_rate": 6.440068984573271e-06, "loss": 0.9479, "step": 3952 }, { "epoch": 0.42, "grad_norm": 1.7044332375628606, "learning_rate": 6.438401599793333e-06, "loss": 0.8821, "step": 3953 }, { "epoch": 0.43, "grad_norm": 1.7741657267084037, "learning_rate": 6.436734040602244e-06, "loss": 1.0103, "step": 3954 }, { "epoch": 0.43, "grad_norm": 1.7326726654355806, "learning_rate": 6.435066307202198e-06, "loss": 0.8898, "step": 3955 }, { "epoch": 0.43, "grad_norm": 1.7888952608162794, "learning_rate": 6.4333983997954155e-06, "loss": 0.9306, "step": 3956 }, { "epoch": 0.43, "grad_norm": 0.9756505815040522, "learning_rate": 6.431730318584135e-06, "loss": 1.0298, "step": 3957 }, { "epoch": 0.43, "grad_norm": 1.7784356931637022, "learning_rate": 6.430062063770619e-06, "loss": 0.9815, "step": 3958 }, { "epoch": 0.43, "grad_norm": 1.7857967490961197, "learning_rate": 6.428393635557146e-06, "loss": 0.8673, "step": 3959 }, { "epoch": 0.43, "grad_norm": 1.6484432981785642, "learning_rate": 6.426725034146021e-06, "loss": 0.8174, "step": 3960 }, { "epoch": 0.43, "grad_norm": 1.7222272117479835, "learning_rate": 6.425056259739566e-06, "loss": 0.9717, "step": 3961 }, { "epoch": 0.43, "grad_norm": 1.7242847296163148, "learning_rate": 6.4233873125401266e-06, "loss": 0.9055, "step": 3962 }, { "epoch": 0.43, "grad_norm": 1.7856650571672923, "learning_rate": 6.4217181927500695e-06, "loss": 0.8802, "step": 3963 }, { "epoch": 0.43, "grad_norm": 1.5675209487734652, "learning_rate": 6.4200489005717806e-06, "loss": 0.842, "step": 3964 }, { "epoch": 0.43, "grad_norm": 1.6629797683005096, "learning_rate": 6.418379436207664e-06, "loss": 0.9133, "step": 3965 }, { "epoch": 0.43, "grad_norm": 1.9033203261972595, "learning_rate": 6.416709799860152e-06, "loss": 0.8796, "step": 3966 }, { "epoch": 0.43, "grad_norm": 1.7324247175128238, "learning_rate": 6.4150399917316945e-06, "loss": 0.9172, "step": 3967 }, { "epoch": 0.43, "grad_norm": 0.8699551433515686, "learning_rate": 6.41337001202476e-06, "loss": 1.0365, "step": 3968 }, { "epoch": 0.43, "grad_norm": 0.8835222291898363, "learning_rate": 6.411699860941836e-06, "loss": 1.0763, "step": 3969 }, { "epoch": 0.43, "grad_norm": 1.8684088455439272, "learning_rate": 6.410029538685438e-06, "loss": 0.9139, "step": 3970 }, { "epoch": 0.43, "grad_norm": 1.7258885343994286, "learning_rate": 6.408359045458099e-06, "loss": 0.9413, "step": 3971 }, { "epoch": 0.43, "grad_norm": 0.8320453269636148, "learning_rate": 6.4066883814623674e-06, "loss": 1.042, "step": 3972 }, { "epoch": 0.43, "grad_norm": 1.8506221638523055, "learning_rate": 6.4050175469008225e-06, "loss": 0.9586, "step": 3973 }, { "epoch": 0.43, "grad_norm": 1.7195702062108829, "learning_rate": 6.403346541976057e-06, "loss": 0.9008, "step": 3974 }, { "epoch": 0.43, "grad_norm": 1.7074178320775972, "learning_rate": 6.401675366890682e-06, "loss": 0.8398, "step": 3975 }, { "epoch": 0.43, "grad_norm": 1.8912538092868443, "learning_rate": 6.400004021847338e-06, "loss": 0.9583, "step": 3976 }, { "epoch": 0.43, "grad_norm": 1.7486165996178227, "learning_rate": 6.39833250704868e-06, "loss": 0.887, "step": 3977 }, { "epoch": 0.43, "grad_norm": 1.7044796776542033, "learning_rate": 6.396660822697384e-06, "loss": 0.923, "step": 3978 }, { "epoch": 0.43, "grad_norm": 1.7619137996741225, "learning_rate": 6.39498896899615e-06, "loss": 0.8927, "step": 3979 }, { "epoch": 0.43, "grad_norm": 1.739431671006542, "learning_rate": 6.393316946147692e-06, "loss": 0.8563, "step": 3980 }, { "epoch": 0.43, "grad_norm": 1.6361367810204508, "learning_rate": 6.391644754354751e-06, "loss": 0.8372, "step": 3981 }, { "epoch": 0.43, "grad_norm": 1.7833271201274037, "learning_rate": 6.389972393820088e-06, "loss": 0.9515, "step": 3982 }, { "epoch": 0.43, "grad_norm": 1.963016328471912, "learning_rate": 6.388299864746476e-06, "loss": 0.8727, "step": 3983 }, { "epoch": 0.43, "grad_norm": 1.838679891182311, "learning_rate": 6.386627167336724e-06, "loss": 1.0075, "step": 3984 }, { "epoch": 0.43, "grad_norm": 1.7743819031502654, "learning_rate": 6.384954301793647e-06, "loss": 0.9329, "step": 3985 }, { "epoch": 0.43, "grad_norm": 2.1073906245492458, "learning_rate": 6.3832812683200825e-06, "loss": 0.9673, "step": 3986 }, { "epoch": 0.43, "grad_norm": 1.7631329856644022, "learning_rate": 6.381608067118899e-06, "loss": 0.8673, "step": 3987 }, { "epoch": 0.43, "grad_norm": 0.9778189708453292, "learning_rate": 6.379934698392973e-06, "loss": 1.0379, "step": 3988 }, { "epoch": 0.43, "grad_norm": 1.7192378265329664, "learning_rate": 6.378261162345207e-06, "loss": 0.8959, "step": 3989 }, { "epoch": 0.43, "grad_norm": 0.8995153084526877, "learning_rate": 6.3765874591785245e-06, "loss": 1.0392, "step": 3990 }, { "epoch": 0.43, "grad_norm": 1.648183731366141, "learning_rate": 6.374913589095866e-06, "loss": 0.9137, "step": 3991 }, { "epoch": 0.43, "grad_norm": 1.8193897880654242, "learning_rate": 6.373239552300194e-06, "loss": 0.8234, "step": 3992 }, { "epoch": 0.43, "grad_norm": 1.976808648283257, "learning_rate": 6.371565348994493e-06, "loss": 1.0109, "step": 3993 }, { "epoch": 0.43, "grad_norm": 1.71385962557775, "learning_rate": 6.3698909793817656e-06, "loss": 0.9742, "step": 3994 }, { "epoch": 0.43, "grad_norm": 1.8390663291313467, "learning_rate": 6.3682164436650326e-06, "loss": 0.878, "step": 3995 }, { "epoch": 0.43, "grad_norm": 1.6588951781326622, "learning_rate": 6.366541742047341e-06, "loss": 0.8933, "step": 3996 }, { "epoch": 0.43, "grad_norm": 1.6787808450692152, "learning_rate": 6.364866874731749e-06, "loss": 0.8814, "step": 3997 }, { "epoch": 0.43, "grad_norm": 1.593626293404446, "learning_rate": 6.363191841921345e-06, "loss": 0.923, "step": 3998 }, { "epoch": 0.43, "grad_norm": 1.8482915144525396, "learning_rate": 6.36151664381923e-06, "loss": 0.9699, "step": 3999 }, { "epoch": 0.43, "grad_norm": 1.7151242217610918, "learning_rate": 6.359841280628529e-06, "loss": 0.8672, "step": 4000 }, { "epoch": 0.43, "grad_norm": 1.787131297260499, "learning_rate": 6.358165752552383e-06, "loss": 0.8739, "step": 4001 }, { "epoch": 0.43, "grad_norm": 1.7099209625632075, "learning_rate": 6.356490059793959e-06, "loss": 0.8755, "step": 4002 }, { "epoch": 0.43, "grad_norm": 1.6391885664671784, "learning_rate": 6.354814202556437e-06, "loss": 0.9039, "step": 4003 }, { "epoch": 0.43, "grad_norm": 1.740366418645429, "learning_rate": 6.353138181043024e-06, "loss": 0.8881, "step": 4004 }, { "epoch": 0.43, "grad_norm": 1.7480268695522394, "learning_rate": 6.351461995456941e-06, "loss": 0.9565, "step": 4005 }, { "epoch": 0.43, "grad_norm": 2.0009328208351045, "learning_rate": 6.349785646001431e-06, "loss": 0.9025, "step": 4006 }, { "epoch": 0.43, "grad_norm": 1.8009717122738471, "learning_rate": 6.348109132879758e-06, "loss": 0.903, "step": 4007 }, { "epoch": 0.43, "grad_norm": 1.8504021963425317, "learning_rate": 6.346432456295206e-06, "loss": 0.9411, "step": 4008 }, { "epoch": 0.43, "grad_norm": 1.706813212320307, "learning_rate": 6.344755616451075e-06, "loss": 0.8389, "step": 4009 }, { "epoch": 0.43, "grad_norm": 1.7051543665825062, "learning_rate": 6.343078613550691e-06, "loss": 0.8774, "step": 4010 }, { "epoch": 0.43, "grad_norm": 1.7052958775982534, "learning_rate": 6.341401447797395e-06, "loss": 0.9244, "step": 4011 }, { "epoch": 0.43, "grad_norm": 1.7093526731171986, "learning_rate": 6.339724119394548e-06, "loss": 0.9053, "step": 4012 }, { "epoch": 0.43, "grad_norm": 1.1870874952329995, "learning_rate": 6.3380466285455336e-06, "loss": 1.0854, "step": 4013 }, { "epoch": 0.43, "grad_norm": 1.7625268872432795, "learning_rate": 6.336368975453752e-06, "loss": 0.8794, "step": 4014 }, { "epoch": 0.43, "grad_norm": 1.879391858440186, "learning_rate": 6.3346911603226245e-06, "loss": 0.9488, "step": 4015 }, { "epoch": 0.43, "grad_norm": 1.681810652228696, "learning_rate": 6.333013183355594e-06, "loss": 0.8792, "step": 4016 }, { "epoch": 0.43, "grad_norm": 1.8575135165372507, "learning_rate": 6.331335044756119e-06, "loss": 0.9689, "step": 4017 }, { "epoch": 0.43, "grad_norm": 1.663544587754942, "learning_rate": 6.329656744727679e-06, "loss": 0.9284, "step": 4018 }, { "epoch": 0.43, "grad_norm": 1.740260647611781, "learning_rate": 6.3279782834737755e-06, "loss": 0.9442, "step": 4019 }, { "epoch": 0.43, "grad_norm": 1.8368563061691017, "learning_rate": 6.326299661197926e-06, "loss": 0.8632, "step": 4020 }, { "epoch": 0.43, "grad_norm": 1.8817559943663245, "learning_rate": 6.324620878103671e-06, "loss": 0.8644, "step": 4021 }, { "epoch": 0.43, "grad_norm": 1.7738441166883678, "learning_rate": 6.322941934394568e-06, "loss": 0.9868, "step": 4022 }, { "epoch": 0.43, "grad_norm": 1.7011821433322205, "learning_rate": 6.321262830274193e-06, "loss": 0.8193, "step": 4023 }, { "epoch": 0.43, "grad_norm": 1.7111022798620577, "learning_rate": 6.319583565946147e-06, "loss": 0.8641, "step": 4024 }, { "epoch": 0.43, "grad_norm": 1.8569972600601368, "learning_rate": 6.317904141614043e-06, "loss": 0.8861, "step": 4025 }, { "epoch": 0.43, "grad_norm": 1.7422377554277533, "learning_rate": 6.3162245574815186e-06, "loss": 0.946, "step": 4026 }, { "epoch": 0.43, "grad_norm": 2.1030757388947032, "learning_rate": 6.314544813752229e-06, "loss": 0.8271, "step": 4027 }, { "epoch": 0.43, "grad_norm": 1.7798087861937413, "learning_rate": 6.312864910629848e-06, "loss": 0.9319, "step": 4028 }, { "epoch": 0.43, "grad_norm": 1.7006027908633905, "learning_rate": 6.311184848318072e-06, "loss": 0.9137, "step": 4029 }, { "epoch": 0.43, "grad_norm": 0.9191937380322668, "learning_rate": 6.309504627020612e-06, "loss": 1.0761, "step": 4030 }, { "epoch": 0.43, "grad_norm": 1.8511165826895926, "learning_rate": 6.3078242469412e-06, "loss": 0.9306, "step": 4031 }, { "epoch": 0.43, "grad_norm": 1.6776837616127325, "learning_rate": 6.306143708283592e-06, "loss": 0.8846, "step": 4032 }, { "epoch": 0.43, "grad_norm": 1.739803937852351, "learning_rate": 6.3044630112515545e-06, "loss": 0.9539, "step": 4033 }, { "epoch": 0.43, "grad_norm": 1.6866745276215105, "learning_rate": 6.30278215604888e-06, "loss": 0.8769, "step": 4034 }, { "epoch": 0.43, "grad_norm": 1.6633010456857806, "learning_rate": 6.3011011428793776e-06, "loss": 0.9749, "step": 4035 }, { "epoch": 0.43, "grad_norm": 1.7736829050694976, "learning_rate": 6.299419971946876e-06, "loss": 0.9624, "step": 4036 }, { "epoch": 0.43, "grad_norm": 1.7377674810961996, "learning_rate": 6.297738643455225e-06, "loss": 0.8626, "step": 4037 }, { "epoch": 0.43, "grad_norm": 0.8335939291285382, "learning_rate": 6.2960571576082875e-06, "loss": 1.0489, "step": 4038 }, { "epoch": 0.43, "grad_norm": 1.811280321684043, "learning_rate": 6.294375514609952e-06, "loss": 0.8904, "step": 4039 }, { "epoch": 0.43, "grad_norm": 1.6895227813165294, "learning_rate": 6.292693714664122e-06, "loss": 0.8624, "step": 4040 }, { "epoch": 0.43, "grad_norm": 1.6843977924332796, "learning_rate": 6.291011757974723e-06, "loss": 0.9358, "step": 4041 }, { "epoch": 0.43, "grad_norm": 1.840856944242189, "learning_rate": 6.289329644745699e-06, "loss": 0.984, "step": 4042 }, { "epoch": 0.43, "grad_norm": 1.698903197468234, "learning_rate": 6.287647375181009e-06, "loss": 0.8886, "step": 4043 }, { "epoch": 0.43, "grad_norm": 1.7307850139654541, "learning_rate": 6.285964949484636e-06, "loss": 0.9188, "step": 4044 }, { "epoch": 0.43, "grad_norm": 1.7722243210098503, "learning_rate": 6.284282367860579e-06, "loss": 0.9404, "step": 4045 }, { "epoch": 0.43, "grad_norm": 1.7644428767719205, "learning_rate": 6.282599630512858e-06, "loss": 0.8908, "step": 4046 }, { "epoch": 0.44, "grad_norm": 1.7023937339725024, "learning_rate": 6.280916737645511e-06, "loss": 0.9174, "step": 4047 }, { "epoch": 0.44, "grad_norm": 1.755403687189088, "learning_rate": 6.2792336894625915e-06, "loss": 0.929, "step": 4048 }, { "epoch": 0.44, "grad_norm": 1.721359899662644, "learning_rate": 6.2775504861681775e-06, "loss": 0.883, "step": 4049 }, { "epoch": 0.44, "grad_norm": 1.6627270644213084, "learning_rate": 6.275867127966364e-06, "loss": 0.9124, "step": 4050 }, { "epoch": 0.44, "grad_norm": 1.7445372397088001, "learning_rate": 6.27418361506126e-06, "loss": 0.887, "step": 4051 }, { "epoch": 0.44, "grad_norm": 1.7162927245855721, "learning_rate": 6.272499947657002e-06, "loss": 0.976, "step": 4052 }, { "epoch": 0.44, "grad_norm": 0.829345682744822, "learning_rate": 6.270816125957739e-06, "loss": 1.0366, "step": 4053 }, { "epoch": 0.44, "grad_norm": 0.840423253429475, "learning_rate": 6.269132150167639e-06, "loss": 1.0693, "step": 4054 }, { "epoch": 0.44, "grad_norm": 1.6864759518847927, "learning_rate": 6.267448020490889e-06, "loss": 0.859, "step": 4055 }, { "epoch": 0.44, "grad_norm": 1.7011504529370403, "learning_rate": 6.265763737131698e-06, "loss": 0.8868, "step": 4056 }, { "epoch": 0.44, "grad_norm": 1.8716944692251618, "learning_rate": 6.26407930029429e-06, "loss": 0.9416, "step": 4057 }, { "epoch": 0.44, "grad_norm": 0.8751610096717009, "learning_rate": 6.26239471018291e-06, "loss": 1.067, "step": 4058 }, { "epoch": 0.44, "grad_norm": 1.7107574710553786, "learning_rate": 6.260709967001816e-06, "loss": 0.931, "step": 4059 }, { "epoch": 0.44, "grad_norm": 0.8801280309424162, "learning_rate": 6.259025070955296e-06, "loss": 1.0653, "step": 4060 }, { "epoch": 0.44, "grad_norm": 1.7437068473879977, "learning_rate": 6.2573400222476435e-06, "loss": 0.9227, "step": 4061 }, { "epoch": 0.44, "grad_norm": 1.7658704965340868, "learning_rate": 6.255654821083178e-06, "loss": 0.8665, "step": 4062 }, { "epoch": 0.44, "grad_norm": 1.6529819362262912, "learning_rate": 6.253969467666239e-06, "loss": 0.8847, "step": 4063 }, { "epoch": 0.44, "grad_norm": 1.8001116189743072, "learning_rate": 6.252283962201177e-06, "loss": 0.9323, "step": 4064 }, { "epoch": 0.44, "grad_norm": 0.8697944340743636, "learning_rate": 6.2505983048923675e-06, "loss": 1.0275, "step": 4065 }, { "epoch": 0.44, "grad_norm": 1.7934903149312482, "learning_rate": 6.2489124959442026e-06, "loss": 0.8545, "step": 4066 }, { "epoch": 0.44, "grad_norm": 1.6538995393255496, "learning_rate": 6.247226535561092e-06, "loss": 0.8296, "step": 4067 }, { "epoch": 0.44, "grad_norm": 1.754819005550529, "learning_rate": 6.245540423947463e-06, "loss": 0.9827, "step": 4068 }, { "epoch": 0.44, "grad_norm": 1.8111568447737512, "learning_rate": 6.243854161307765e-06, "loss": 0.9299, "step": 4069 }, { "epoch": 0.44, "grad_norm": 0.7887977407009261, "learning_rate": 6.242167747846461e-06, "loss": 1.0473, "step": 4070 }, { "epoch": 0.44, "grad_norm": 1.7272270334963726, "learning_rate": 6.240481183768036e-06, "loss": 0.9524, "step": 4071 }, { "epoch": 0.44, "grad_norm": 1.7026660414866062, "learning_rate": 6.2387944692769896e-06, "loss": 0.887, "step": 4072 }, { "epoch": 0.44, "grad_norm": 0.7984945734698372, "learning_rate": 6.237107604577843e-06, "loss": 1.0392, "step": 4073 }, { "epoch": 0.44, "grad_norm": 1.65285112778998, "learning_rate": 6.235420589875136e-06, "loss": 0.8028, "step": 4074 }, { "epoch": 0.44, "grad_norm": 1.7043596660159106, "learning_rate": 6.233733425373423e-06, "loss": 0.8184, "step": 4075 }, { "epoch": 0.44, "grad_norm": 1.5769506612191302, "learning_rate": 6.232046111277277e-06, "loss": 0.931, "step": 4076 }, { "epoch": 0.44, "grad_norm": 1.7743485144190552, "learning_rate": 6.230358647791293e-06, "loss": 0.905, "step": 4077 }, { "epoch": 0.44, "grad_norm": 1.684035043513552, "learning_rate": 6.228671035120083e-06, "loss": 0.9109, "step": 4078 }, { "epoch": 0.44, "grad_norm": 1.643274481846152, "learning_rate": 6.226983273468273e-06, "loss": 0.9696, "step": 4079 }, { "epoch": 0.44, "grad_norm": 1.6374746363121848, "learning_rate": 6.225295363040511e-06, "loss": 0.9106, "step": 4080 }, { "epoch": 0.44, "grad_norm": 1.6788832780940153, "learning_rate": 6.2236073040414625e-06, "loss": 0.973, "step": 4081 }, { "epoch": 0.44, "grad_norm": 1.7705298119470898, "learning_rate": 6.221919096675808e-06, "loss": 0.9792, "step": 4082 }, { "epoch": 0.44, "grad_norm": 1.721295931251655, "learning_rate": 6.220230741148253e-06, "loss": 0.836, "step": 4083 }, { "epoch": 0.44, "grad_norm": 1.755348691293493, "learning_rate": 6.218542237663513e-06, "loss": 0.9281, "step": 4084 }, { "epoch": 0.44, "grad_norm": 1.644138339325869, "learning_rate": 6.216853586426326e-06, "loss": 0.9661, "step": 4085 }, { "epoch": 0.44, "grad_norm": 1.8759942270728294, "learning_rate": 6.215164787641446e-06, "loss": 0.8915, "step": 4086 }, { "epoch": 0.44, "grad_norm": 1.6908116683292438, "learning_rate": 6.2134758415136465e-06, "loss": 0.9063, "step": 4087 }, { "epoch": 0.44, "grad_norm": 1.628252682535255, "learning_rate": 6.211786748247717e-06, "loss": 0.8381, "step": 4088 }, { "epoch": 0.44, "grad_norm": 2.53893147283309, "learning_rate": 6.210097508048467e-06, "loss": 0.9505, "step": 4089 }, { "epoch": 0.44, "grad_norm": 1.9076972425766632, "learning_rate": 6.208408121120722e-06, "loss": 0.9477, "step": 4090 }, { "epoch": 0.44, "grad_norm": 1.8303579836340071, "learning_rate": 6.206718587669327e-06, "loss": 0.8087, "step": 4091 }, { "epoch": 0.44, "grad_norm": 1.7217632112574377, "learning_rate": 6.205028907899143e-06, "loss": 0.9272, "step": 4092 }, { "epoch": 0.44, "grad_norm": 1.7062790110504575, "learning_rate": 6.203339082015049e-06, "loss": 0.9777, "step": 4093 }, { "epoch": 0.44, "grad_norm": 1.8178178035978312, "learning_rate": 6.2016491102219435e-06, "loss": 0.9908, "step": 4094 }, { "epoch": 0.44, "grad_norm": 0.9373185022399888, "learning_rate": 6.19995899272474e-06, "loss": 1.0453, "step": 4095 }, { "epoch": 0.44, "grad_norm": 1.6949696724126513, "learning_rate": 6.198268729728372e-06, "loss": 0.8657, "step": 4096 }, { "epoch": 0.44, "grad_norm": 1.764931852583288, "learning_rate": 6.1965783214377895e-06, "loss": 0.9579, "step": 4097 }, { "epoch": 0.44, "grad_norm": 1.8072555675100022, "learning_rate": 6.19488776805796e-06, "loss": 0.8671, "step": 4098 }, { "epoch": 0.44, "grad_norm": 1.7560189720097226, "learning_rate": 6.1931970697938695e-06, "loss": 0.81, "step": 4099 }, { "epoch": 0.44, "grad_norm": 1.7797001723841435, "learning_rate": 6.191506226850521e-06, "loss": 0.9307, "step": 4100 }, { "epoch": 0.44, "grad_norm": 1.7368902863373312, "learning_rate": 6.189815239432935e-06, "loss": 0.9122, "step": 4101 }, { "epoch": 0.44, "grad_norm": 1.7307633906267719, "learning_rate": 6.188124107746148e-06, "loss": 0.8597, "step": 4102 }, { "epoch": 0.44, "grad_norm": 1.8966760810622938, "learning_rate": 6.186432831995218e-06, "loss": 0.8888, "step": 4103 }, { "epoch": 0.44, "grad_norm": 1.7357836786170073, "learning_rate": 6.184741412385217e-06, "loss": 0.8524, "step": 4104 }, { "epoch": 0.44, "grad_norm": 1.9123475037629505, "learning_rate": 6.183049849121233e-06, "loss": 0.9417, "step": 4105 }, { "epoch": 0.44, "grad_norm": 1.6856157918660948, "learning_rate": 6.18135814240838e-06, "loss": 0.9272, "step": 4106 }, { "epoch": 0.44, "grad_norm": 1.7840470728861872, "learning_rate": 6.179666292451776e-06, "loss": 0.9444, "step": 4107 }, { "epoch": 0.44, "grad_norm": 1.6803240122309366, "learning_rate": 6.1779742994565686e-06, "loss": 0.9686, "step": 4108 }, { "epoch": 0.44, "grad_norm": 0.9627885123668279, "learning_rate": 6.176282163627917e-06, "loss": 1.0448, "step": 4109 }, { "epoch": 0.44, "grad_norm": 1.7829164959392731, "learning_rate": 6.174589885170996e-06, "loss": 0.8701, "step": 4110 }, { "epoch": 0.44, "grad_norm": 1.755903774850548, "learning_rate": 6.172897464291005e-06, "loss": 0.9063, "step": 4111 }, { "epoch": 0.44, "grad_norm": 1.7555859358773085, "learning_rate": 6.171204901193151e-06, "loss": 0.8769, "step": 4112 }, { "epoch": 0.44, "grad_norm": 0.8686098413489176, "learning_rate": 6.169512196082663e-06, "loss": 1.0658, "step": 4113 }, { "epoch": 0.44, "grad_norm": 1.7992630422196227, "learning_rate": 6.167819349164792e-06, "loss": 0.9995, "step": 4114 }, { "epoch": 0.44, "grad_norm": 1.710193442257645, "learning_rate": 6.166126360644797e-06, "loss": 0.899, "step": 4115 }, { "epoch": 0.44, "grad_norm": 1.674506640978442, "learning_rate": 6.1644332307279616e-06, "loss": 0.8869, "step": 4116 }, { "epoch": 0.44, "grad_norm": 1.719516833122571, "learning_rate": 6.1627399596195835e-06, "loss": 0.9083, "step": 4117 }, { "epoch": 0.44, "grad_norm": 1.8047172641033573, "learning_rate": 6.161046547524976e-06, "loss": 0.8978, "step": 4118 }, { "epoch": 0.44, "grad_norm": 1.6801457428529358, "learning_rate": 6.15935299464947e-06, "loss": 0.8551, "step": 4119 }, { "epoch": 0.44, "grad_norm": 1.742807902931998, "learning_rate": 6.157659301198418e-06, "loss": 0.9632, "step": 4120 }, { "epoch": 0.44, "grad_norm": 1.762897988294096, "learning_rate": 6.1559654673771865e-06, "loss": 0.9121, "step": 4121 }, { "epoch": 0.44, "grad_norm": 1.8643169353518152, "learning_rate": 6.154271493391155e-06, "loss": 0.8988, "step": 4122 }, { "epoch": 0.44, "grad_norm": 1.9079408210279591, "learning_rate": 6.152577379445725e-06, "loss": 0.8817, "step": 4123 }, { "epoch": 0.44, "grad_norm": 1.70948683843766, "learning_rate": 6.150883125746314e-06, "loss": 0.902, "step": 4124 }, { "epoch": 0.44, "grad_norm": 1.8590094273255577, "learning_rate": 6.149188732498358e-06, "loss": 0.9021, "step": 4125 }, { "epoch": 0.44, "grad_norm": 1.7757050964141583, "learning_rate": 6.147494199907306e-06, "loss": 0.9212, "step": 4126 }, { "epoch": 0.44, "grad_norm": 1.7140371471382743, "learning_rate": 6.145799528178625e-06, "loss": 0.9023, "step": 4127 }, { "epoch": 0.44, "grad_norm": 1.7531331343336856, "learning_rate": 6.1441047175178025e-06, "loss": 0.8785, "step": 4128 }, { "epoch": 0.44, "grad_norm": 0.9679592964718644, "learning_rate": 6.142409768130339e-06, "loss": 1.0468, "step": 4129 }, { "epoch": 0.44, "grad_norm": 1.7086181600491628, "learning_rate": 6.14071468022175e-06, "loss": 0.8966, "step": 4130 }, { "epoch": 0.44, "grad_norm": 1.7193607118884877, "learning_rate": 6.1390194539975754e-06, "loss": 0.8933, "step": 4131 }, { "epoch": 0.44, "grad_norm": 1.7001835640469714, "learning_rate": 6.137324089663365e-06, "loss": 1.0109, "step": 4132 }, { "epoch": 0.44, "grad_norm": 1.7059096047730586, "learning_rate": 6.135628587424688e-06, "loss": 0.9374, "step": 4133 }, { "epoch": 0.44, "grad_norm": 1.775002580371089, "learning_rate": 6.133932947487129e-06, "loss": 0.9947, "step": 4134 }, { "epoch": 0.44, "grad_norm": 1.7486233352766245, "learning_rate": 6.132237170056291e-06, "loss": 0.9187, "step": 4135 }, { "epoch": 0.44, "grad_norm": 1.655032451748233, "learning_rate": 6.130541255337792e-06, "loss": 0.8396, "step": 4136 }, { "epoch": 0.44, "grad_norm": 1.6641662513128037, "learning_rate": 6.128845203537269e-06, "loss": 0.9251, "step": 4137 }, { "epoch": 0.44, "grad_norm": 1.6735195311240925, "learning_rate": 6.127149014860374e-06, "loss": 0.9133, "step": 4138 }, { "epoch": 0.44, "grad_norm": 2.212836225302568, "learning_rate": 6.125452689512774e-06, "loss": 0.8031, "step": 4139 }, { "epoch": 0.45, "grad_norm": 1.7329577904516487, "learning_rate": 6.123756227700156e-06, "loss": 0.9344, "step": 4140 }, { "epoch": 0.45, "grad_norm": 0.8415839705011045, "learning_rate": 6.122059629628221e-06, "loss": 1.0838, "step": 4141 }, { "epoch": 0.45, "grad_norm": 1.825704230416759, "learning_rate": 6.1203628955026874e-06, "loss": 0.9437, "step": 4142 }, { "epoch": 0.45, "grad_norm": 1.6848501693058413, "learning_rate": 6.118666025529292e-06, "loss": 0.8848, "step": 4143 }, { "epoch": 0.45, "grad_norm": 1.687868731574758, "learning_rate": 6.116969019913781e-06, "loss": 0.8752, "step": 4144 }, { "epoch": 0.45, "grad_norm": 1.8149817159463055, "learning_rate": 6.115271878861928e-06, "loss": 0.9666, "step": 4145 }, { "epoch": 0.45, "grad_norm": 1.7316397426823136, "learning_rate": 6.113574602579515e-06, "loss": 0.9504, "step": 4146 }, { "epoch": 0.45, "grad_norm": 1.763981974194585, "learning_rate": 6.1118771912723396e-06, "loss": 0.8779, "step": 4147 }, { "epoch": 0.45, "grad_norm": 1.8126288377661568, "learning_rate": 6.110179645146225e-06, "loss": 0.9693, "step": 4148 }, { "epoch": 0.45, "grad_norm": 1.7585475078131667, "learning_rate": 6.1084819644070004e-06, "loss": 0.8359, "step": 4149 }, { "epoch": 0.45, "grad_norm": 1.7328965246481918, "learning_rate": 6.106784149260514e-06, "loss": 0.8959, "step": 4150 }, { "epoch": 0.45, "grad_norm": 1.788033507189026, "learning_rate": 6.1050861999126355e-06, "loss": 0.9727, "step": 4151 }, { "epoch": 0.45, "grad_norm": 1.763627202658937, "learning_rate": 6.103388116569245e-06, "loss": 0.8772, "step": 4152 }, { "epoch": 0.45, "grad_norm": 1.6889138416481313, "learning_rate": 6.101689899436241e-06, "loss": 0.9229, "step": 4153 }, { "epoch": 0.45, "grad_norm": 1.8048592328971018, "learning_rate": 6.0999915487195395e-06, "loss": 0.915, "step": 4154 }, { "epoch": 0.45, "grad_norm": 1.8384004378930034, "learning_rate": 6.09829306462507e-06, "loss": 1.0008, "step": 4155 }, { "epoch": 0.45, "grad_norm": 1.8101276452294761, "learning_rate": 6.096594447358779e-06, "loss": 0.8511, "step": 4156 }, { "epoch": 0.45, "grad_norm": 1.7904662449676756, "learning_rate": 6.094895697126631e-06, "loss": 0.92, "step": 4157 }, { "epoch": 0.45, "grad_norm": 1.790616532908676, "learning_rate": 6.0931968141346054e-06, "loss": 0.9694, "step": 4158 }, { "epoch": 0.45, "grad_norm": 1.7711627198551483, "learning_rate": 6.091497798588699e-06, "loss": 0.9177, "step": 4159 }, { "epoch": 0.45, "grad_norm": 1.625545222489051, "learning_rate": 6.08979865069492e-06, "loss": 0.7992, "step": 4160 }, { "epoch": 0.45, "grad_norm": 0.9112025291482904, "learning_rate": 6.088099370659297e-06, "loss": 1.0273, "step": 4161 }, { "epoch": 0.45, "grad_norm": 1.8204381485970937, "learning_rate": 6.086399958687875e-06, "loss": 0.9607, "step": 4162 }, { "epoch": 0.45, "grad_norm": 1.6948075399900262, "learning_rate": 6.0847004149867125e-06, "loss": 0.9564, "step": 4163 }, { "epoch": 0.45, "grad_norm": 0.8259800858785558, "learning_rate": 6.083000739761885e-06, "loss": 1.0227, "step": 4164 }, { "epoch": 0.45, "grad_norm": 1.7173192602465996, "learning_rate": 6.081300933219485e-06, "loss": 0.9705, "step": 4165 }, { "epoch": 0.45, "grad_norm": 1.7482220741144299, "learning_rate": 6.079600995565618e-06, "loss": 0.9117, "step": 4166 }, { "epoch": 0.45, "grad_norm": 0.8599253965464237, "learning_rate": 6.077900927006408e-06, "loss": 1.0675, "step": 4167 }, { "epoch": 0.45, "grad_norm": 1.7689279574854704, "learning_rate": 6.0762007277479955e-06, "loss": 1.0033, "step": 4168 }, { "epoch": 0.45, "grad_norm": 1.898672475557904, "learning_rate": 6.074500397996533e-06, "loss": 0.8752, "step": 4169 }, { "epoch": 0.45, "grad_norm": 1.6568772165901031, "learning_rate": 6.072799937958195e-06, "loss": 0.8991, "step": 4170 }, { "epoch": 0.45, "grad_norm": 1.6931204364974504, "learning_rate": 6.0710993478391645e-06, "loss": 0.9325, "step": 4171 }, { "epoch": 0.45, "grad_norm": 1.6199188526151513, "learning_rate": 6.0693986278456456e-06, "loss": 0.9271, "step": 4172 }, { "epoch": 0.45, "grad_norm": 1.6720757280955523, "learning_rate": 6.067697778183857e-06, "loss": 0.9944, "step": 4173 }, { "epoch": 0.45, "grad_norm": 1.6776850702841017, "learning_rate": 6.065996799060031e-06, "loss": 0.8823, "step": 4174 }, { "epoch": 0.45, "grad_norm": 1.864801747844051, "learning_rate": 6.064295690680418e-06, "loss": 0.9424, "step": 4175 }, { "epoch": 0.45, "grad_norm": 1.770172612206569, "learning_rate": 6.0625944532512845e-06, "loss": 0.9347, "step": 4176 }, { "epoch": 0.45, "grad_norm": 1.605435623035622, "learning_rate": 6.06089308697891e-06, "loss": 0.8525, "step": 4177 }, { "epoch": 0.45, "grad_norm": 1.74093231927897, "learning_rate": 6.05919159206959e-06, "loss": 0.8645, "step": 4178 }, { "epoch": 0.45, "grad_norm": 1.83362869926186, "learning_rate": 6.0574899687296385e-06, "loss": 0.9486, "step": 4179 }, { "epoch": 0.45, "grad_norm": 1.6872494288783138, "learning_rate": 6.055788217165384e-06, "loss": 0.902, "step": 4180 }, { "epoch": 0.45, "grad_norm": 1.8352697746365823, "learning_rate": 6.054086337583166e-06, "loss": 0.9087, "step": 4181 }, { "epoch": 0.45, "grad_norm": 1.8479977272584176, "learning_rate": 6.052384330189347e-06, "loss": 0.8954, "step": 4182 }, { "epoch": 0.45, "grad_norm": 1.8314958137444897, "learning_rate": 6.0506821951903e-06, "loss": 0.8738, "step": 4183 }, { "epoch": 0.45, "grad_norm": 0.9445605441243102, "learning_rate": 6.0489799327924125e-06, "loss": 1.0654, "step": 4184 }, { "epoch": 0.45, "grad_norm": 0.9012167092560271, "learning_rate": 6.0472775432020945e-06, "loss": 1.0733, "step": 4185 }, { "epoch": 0.45, "grad_norm": 1.6887057100296337, "learning_rate": 6.0455750266257625e-06, "loss": 0.9363, "step": 4186 }, { "epoch": 0.45, "grad_norm": 2.0889512013702944, "learning_rate": 6.043872383269855e-06, "loss": 0.8935, "step": 4187 }, { "epoch": 0.45, "grad_norm": 1.6822171667548373, "learning_rate": 6.04216961334082e-06, "loss": 0.859, "step": 4188 }, { "epoch": 0.45, "grad_norm": 1.991753969880893, "learning_rate": 6.040466717045127e-06, "loss": 0.912, "step": 4189 }, { "epoch": 0.45, "grad_norm": 1.7243561098013682, "learning_rate": 6.038763694589259e-06, "loss": 0.9024, "step": 4190 }, { "epoch": 0.45, "grad_norm": 1.667798355190635, "learning_rate": 6.0370605461797124e-06, "loss": 0.9364, "step": 4191 }, { "epoch": 0.45, "grad_norm": 1.702556719045791, "learning_rate": 6.035357272022997e-06, "loss": 0.9056, "step": 4192 }, { "epoch": 0.45, "grad_norm": 1.6750542875694823, "learning_rate": 6.033653872325644e-06, "loss": 0.8831, "step": 4193 }, { "epoch": 0.45, "grad_norm": 1.8223344963739267, "learning_rate": 6.031950347294196e-06, "loss": 0.9441, "step": 4194 }, { "epoch": 0.45, "grad_norm": 1.696875915901807, "learning_rate": 6.030246697135209e-06, "loss": 0.9297, "step": 4195 }, { "epoch": 0.45, "grad_norm": 1.734544743142106, "learning_rate": 6.028542922055261e-06, "loss": 0.8738, "step": 4196 }, { "epoch": 0.45, "grad_norm": 1.1163995622255767, "learning_rate": 6.026839022260936e-06, "loss": 1.0798, "step": 4197 }, { "epoch": 0.45, "grad_norm": 1.6709604116812578, "learning_rate": 6.0251349979588395e-06, "loss": 0.8527, "step": 4198 }, { "epoch": 0.45, "grad_norm": 1.7063395823550993, "learning_rate": 6.0234308493555906e-06, "loss": 0.8034, "step": 4199 }, { "epoch": 0.45, "grad_norm": 1.6817934376184094, "learning_rate": 6.021726576657823e-06, "loss": 0.8811, "step": 4200 }, { "epoch": 0.45, "grad_norm": 0.8639491505348974, "learning_rate": 6.0200221800721845e-06, "loss": 1.0423, "step": 4201 }, { "epoch": 0.45, "grad_norm": 1.7097166220123987, "learning_rate": 6.018317659805341e-06, "loss": 0.9341, "step": 4202 }, { "epoch": 0.45, "grad_norm": 1.8025752522178218, "learning_rate": 6.01661301606397e-06, "loss": 0.8928, "step": 4203 }, { "epoch": 0.45, "grad_norm": 1.8899662195314362, "learning_rate": 6.014908249054767e-06, "loss": 0.8807, "step": 4204 }, { "epoch": 0.45, "grad_norm": 1.6547038985566964, "learning_rate": 6.0132033589844395e-06, "loss": 0.8809, "step": 4205 }, { "epoch": 0.45, "grad_norm": 1.7031743476214085, "learning_rate": 6.011498346059712e-06, "loss": 0.9122, "step": 4206 }, { "epoch": 0.45, "grad_norm": 1.6879928488440026, "learning_rate": 6.009793210487323e-06, "loss": 0.9195, "step": 4207 }, { "epoch": 0.45, "grad_norm": 1.776632234515072, "learning_rate": 6.008087952474025e-06, "loss": 0.9295, "step": 4208 }, { "epoch": 0.45, "grad_norm": 1.7543257191893757, "learning_rate": 6.006382572226587e-06, "loss": 0.9405, "step": 4209 }, { "epoch": 0.45, "grad_norm": 1.8462332946543254, "learning_rate": 6.004677069951793e-06, "loss": 0.8343, "step": 4210 }, { "epoch": 0.45, "grad_norm": 1.7117119613914764, "learning_rate": 6.002971445856441e-06, "loss": 0.9286, "step": 4211 }, { "epoch": 0.45, "grad_norm": 1.9022704130135109, "learning_rate": 6.001265700147344e-06, "loss": 0.8858, "step": 4212 }, { "epoch": 0.45, "grad_norm": 1.739402805122118, "learning_rate": 5.999559833031328e-06, "loss": 0.8501, "step": 4213 }, { "epoch": 0.45, "grad_norm": 1.7626646900527327, "learning_rate": 5.997853844715237e-06, "loss": 0.9089, "step": 4214 }, { "epoch": 0.45, "grad_norm": 1.8759975857440898, "learning_rate": 5.996147735405926e-06, "loss": 0.9175, "step": 4215 }, { "epoch": 0.45, "grad_norm": 1.7941165170561362, "learning_rate": 5.994441505310269e-06, "loss": 0.9319, "step": 4216 }, { "epoch": 0.45, "grad_norm": 1.8850093931232688, "learning_rate": 5.992735154635151e-06, "loss": 0.8897, "step": 4217 }, { "epoch": 0.45, "grad_norm": 1.8003724313625709, "learning_rate": 5.991028683587472e-06, "loss": 0.862, "step": 4218 }, { "epoch": 0.45, "grad_norm": 1.5674610925265062, "learning_rate": 5.98932209237415e-06, "loss": 0.9377, "step": 4219 }, { "epoch": 0.45, "grad_norm": 1.6836362719644644, "learning_rate": 5.987615381202112e-06, "loss": 0.8879, "step": 4220 }, { "epoch": 0.45, "grad_norm": 1.8092850972538288, "learning_rate": 5.985908550278306e-06, "loss": 0.8558, "step": 4221 }, { "epoch": 0.45, "grad_norm": 1.7639997714852444, "learning_rate": 5.98420159980969e-06, "loss": 0.9654, "step": 4222 }, { "epoch": 0.45, "grad_norm": 1.769710424022008, "learning_rate": 5.982494530003233e-06, "loss": 0.8426, "step": 4223 }, { "epoch": 0.45, "grad_norm": 1.7734298219549138, "learning_rate": 5.98078734106593e-06, "loss": 0.9465, "step": 4224 }, { "epoch": 0.45, "grad_norm": 1.740687329906866, "learning_rate": 5.97908003320478e-06, "loss": 0.8633, "step": 4225 }, { "epoch": 0.45, "grad_norm": 1.781047897234036, "learning_rate": 5.9773726066268e-06, "loss": 0.972, "step": 4226 }, { "epoch": 0.45, "grad_norm": 1.8462053547948605, "learning_rate": 5.9756650615390226e-06, "loss": 0.8841, "step": 4227 }, { "epoch": 0.45, "grad_norm": 0.9740545298295629, "learning_rate": 5.973957398148493e-06, "loss": 1.0412, "step": 4228 }, { "epoch": 0.45, "grad_norm": 1.6773555959092912, "learning_rate": 5.972249616662271e-06, "loss": 0.9932, "step": 4229 }, { "epoch": 0.45, "grad_norm": 1.7233561777747886, "learning_rate": 5.970541717287432e-06, "loss": 0.9499, "step": 4230 }, { "epoch": 0.45, "grad_norm": 1.776659568007338, "learning_rate": 5.968833700231062e-06, "loss": 0.9525, "step": 4231 }, { "epoch": 0.45, "grad_norm": 1.7306482699067538, "learning_rate": 5.967125565700266e-06, "loss": 0.9211, "step": 4232 }, { "epoch": 0.46, "grad_norm": 1.6726863561119891, "learning_rate": 5.965417313902162e-06, "loss": 0.9478, "step": 4233 }, { "epoch": 0.46, "grad_norm": 1.7176739152729557, "learning_rate": 5.9637089450438805e-06, "loss": 0.9949, "step": 4234 }, { "epoch": 0.46, "grad_norm": 1.717804756136136, "learning_rate": 5.962000459332566e-06, "loss": 0.9186, "step": 4235 }, { "epoch": 0.46, "grad_norm": 1.7704816125365117, "learning_rate": 5.96029185697538e-06, "loss": 0.9405, "step": 4236 }, { "epoch": 0.46, "grad_norm": 0.8219912416901443, "learning_rate": 5.958583138179494e-06, "loss": 1.0529, "step": 4237 }, { "epoch": 0.46, "grad_norm": 1.823968776385327, "learning_rate": 5.9568743031521e-06, "loss": 0.93, "step": 4238 }, { "epoch": 0.46, "grad_norm": 1.8856193668218744, "learning_rate": 5.955165352100399e-06, "loss": 0.8721, "step": 4239 }, { "epoch": 0.46, "grad_norm": 1.718614387197515, "learning_rate": 5.953456285231602e-06, "loss": 0.8777, "step": 4240 }, { "epoch": 0.46, "grad_norm": 1.7663012580521227, "learning_rate": 5.951747102752946e-06, "loss": 0.877, "step": 4241 }, { "epoch": 0.46, "grad_norm": 1.7048474312706776, "learning_rate": 5.950037804871673e-06, "loss": 0.8573, "step": 4242 }, { "epoch": 0.46, "grad_norm": 1.7539695336925303, "learning_rate": 5.948328391795038e-06, "loss": 0.9076, "step": 4243 }, { "epoch": 0.46, "grad_norm": 1.7562892889825772, "learning_rate": 5.9466188637303206e-06, "loss": 0.9824, "step": 4244 }, { "epoch": 0.46, "grad_norm": 1.6773661563568087, "learning_rate": 5.9449092208848e-06, "loss": 0.8925, "step": 4245 }, { "epoch": 0.46, "grad_norm": 1.6752701090936328, "learning_rate": 5.943199463465779e-06, "loss": 0.9111, "step": 4246 }, { "epoch": 0.46, "grad_norm": 1.7486793123120292, "learning_rate": 5.9414895916805705e-06, "loss": 0.9232, "step": 4247 }, { "epoch": 0.46, "grad_norm": 1.6908071587137605, "learning_rate": 5.939779605736504e-06, "loss": 0.8137, "step": 4248 }, { "epoch": 0.46, "grad_norm": 1.7473477930003563, "learning_rate": 5.93806950584092e-06, "loss": 0.8317, "step": 4249 }, { "epoch": 0.46, "grad_norm": 1.6156932394585615, "learning_rate": 5.936359292201175e-06, "loss": 0.8703, "step": 4250 }, { "epoch": 0.46, "grad_norm": 1.8078277218393115, "learning_rate": 5.934648965024636e-06, "loss": 0.8293, "step": 4251 }, { "epoch": 0.46, "grad_norm": 1.6384893496248998, "learning_rate": 5.93293852451869e-06, "loss": 0.9121, "step": 4252 }, { "epoch": 0.46, "grad_norm": 1.575033611289935, "learning_rate": 5.931227970890732e-06, "loss": 0.9256, "step": 4253 }, { "epoch": 0.46, "grad_norm": 1.7964313342505334, "learning_rate": 5.92951730434817e-06, "loss": 0.8793, "step": 4254 }, { "epoch": 0.46, "grad_norm": 1.7706353809561772, "learning_rate": 5.927806525098432e-06, "loss": 0.8601, "step": 4255 }, { "epoch": 0.46, "grad_norm": 1.682682420705676, "learning_rate": 5.926095633348953e-06, "loss": 0.8827, "step": 4256 }, { "epoch": 0.46, "grad_norm": 1.7227003483172998, "learning_rate": 5.9243846293071846e-06, "loss": 0.8989, "step": 4257 }, { "epoch": 0.46, "grad_norm": 1.7027894467509042, "learning_rate": 5.922673513180596e-06, "loss": 0.9562, "step": 4258 }, { "epoch": 0.46, "grad_norm": 1.7394861850297625, "learning_rate": 5.9209622851766615e-06, "loss": 0.9081, "step": 4259 }, { "epoch": 0.46, "grad_norm": 1.853175846105959, "learning_rate": 5.919250945502875e-06, "loss": 0.857, "step": 4260 }, { "epoch": 0.46, "grad_norm": 1.9562459031184998, "learning_rate": 5.9175394943667415e-06, "loss": 0.8616, "step": 4261 }, { "epoch": 0.46, "grad_norm": 1.7369450636923218, "learning_rate": 5.915827931975782e-06, "loss": 0.9047, "step": 4262 }, { "epoch": 0.46, "grad_norm": 0.8864642680739013, "learning_rate": 5.914116258537528e-06, "loss": 1.0643, "step": 4263 }, { "epoch": 0.46, "grad_norm": 0.8781345712906389, "learning_rate": 5.912404474259525e-06, "loss": 1.0344, "step": 4264 }, { "epoch": 0.46, "grad_norm": 1.7446787164869402, "learning_rate": 5.910692579349336e-06, "loss": 0.8563, "step": 4265 }, { "epoch": 0.46, "grad_norm": 1.6737713352974184, "learning_rate": 5.908980574014532e-06, "loss": 0.9091, "step": 4266 }, { "epoch": 0.46, "grad_norm": 1.7718889829964044, "learning_rate": 5.907268458462699e-06, "loss": 0.9253, "step": 4267 }, { "epoch": 0.46, "grad_norm": 1.7780767141320226, "learning_rate": 5.905556232901437e-06, "loss": 0.9019, "step": 4268 }, { "epoch": 0.46, "grad_norm": 0.8431436385208149, "learning_rate": 5.90384389753836e-06, "loss": 1.034, "step": 4269 }, { "epoch": 0.46, "grad_norm": 1.881313864104935, "learning_rate": 5.902131452581097e-06, "loss": 0.8625, "step": 4270 }, { "epoch": 0.46, "grad_norm": 1.7501680225121614, "learning_rate": 5.900418898237281e-06, "loss": 0.9292, "step": 4271 }, { "epoch": 0.46, "grad_norm": 1.8245879395407776, "learning_rate": 5.8987062347145726e-06, "loss": 0.8894, "step": 4272 }, { "epoch": 0.46, "grad_norm": 1.7745937977611446, "learning_rate": 5.896993462220634e-06, "loss": 0.8966, "step": 4273 }, { "epoch": 0.46, "grad_norm": 1.6352873467373654, "learning_rate": 5.895280580963144e-06, "loss": 0.8852, "step": 4274 }, { "epoch": 0.46, "grad_norm": 1.751777238923234, "learning_rate": 5.8935675911498e-06, "loss": 0.958, "step": 4275 }, { "epoch": 0.46, "grad_norm": 1.848430217786073, "learning_rate": 5.891854492988303e-06, "loss": 0.8734, "step": 4276 }, { "epoch": 0.46, "grad_norm": 1.6723795175432834, "learning_rate": 5.890141286686375e-06, "loss": 0.8855, "step": 4277 }, { "epoch": 0.46, "grad_norm": 1.8169520772246268, "learning_rate": 5.888427972451747e-06, "loss": 0.8997, "step": 4278 }, { "epoch": 0.46, "grad_norm": 1.703262007909982, "learning_rate": 5.886714550492164e-06, "loss": 0.9783, "step": 4279 }, { "epoch": 0.46, "grad_norm": 1.734149021382422, "learning_rate": 5.8850010210153844e-06, "loss": 0.8702, "step": 4280 }, { "epoch": 0.46, "grad_norm": 1.7747630420125693, "learning_rate": 5.883287384229182e-06, "loss": 0.9107, "step": 4281 }, { "epoch": 0.46, "grad_norm": 1.788716356591265, "learning_rate": 5.881573640341336e-06, "loss": 0.906, "step": 4282 }, { "epoch": 0.46, "grad_norm": 1.729814698189558, "learning_rate": 5.879859789559649e-06, "loss": 0.9761, "step": 4283 }, { "epoch": 0.46, "grad_norm": 1.6354842820215787, "learning_rate": 5.878145832091929e-06, "loss": 0.8346, "step": 4284 }, { "epoch": 0.46, "grad_norm": 1.8784759212292081, "learning_rate": 5.876431768145998e-06, "loss": 0.8792, "step": 4285 }, { "epoch": 0.46, "grad_norm": 1.821416325371338, "learning_rate": 5.874717597929697e-06, "loss": 0.9151, "step": 4286 }, { "epoch": 0.46, "grad_norm": 1.6923328757063765, "learning_rate": 5.873003321650869e-06, "loss": 0.9621, "step": 4287 }, { "epoch": 0.46, "grad_norm": 0.8835304073228, "learning_rate": 5.871288939517378e-06, "loss": 1.0568, "step": 4288 }, { "epoch": 0.46, "grad_norm": 1.6401395836667843, "learning_rate": 5.8695744517371e-06, "loss": 0.8235, "step": 4289 }, { "epoch": 0.46, "grad_norm": 1.767899945410958, "learning_rate": 5.8678598585179226e-06, "loss": 0.8806, "step": 4290 }, { "epoch": 0.46, "grad_norm": 1.733540800078038, "learning_rate": 5.866145160067746e-06, "loss": 0.8872, "step": 4291 }, { "epoch": 0.46, "grad_norm": 1.725118061574199, "learning_rate": 5.8644303565944805e-06, "loss": 0.9158, "step": 4292 }, { "epoch": 0.46, "grad_norm": 1.6000106266898613, "learning_rate": 5.862715448306056e-06, "loss": 0.8362, "step": 4293 }, { "epoch": 0.46, "grad_norm": 1.7659261023299522, "learning_rate": 5.861000435410406e-06, "loss": 0.9669, "step": 4294 }, { "epoch": 0.46, "grad_norm": 1.6543448765509794, "learning_rate": 5.859285318115488e-06, "loss": 0.8886, "step": 4295 }, { "epoch": 0.46, "grad_norm": 1.6954925266652585, "learning_rate": 5.857570096629263e-06, "loss": 0.9006, "step": 4296 }, { "epoch": 0.46, "grad_norm": 1.6785283965901858, "learning_rate": 5.855854771159706e-06, "loss": 0.9192, "step": 4297 }, { "epoch": 0.46, "grad_norm": 1.687206935586285, "learning_rate": 5.854139341914808e-06, "loss": 0.8308, "step": 4298 }, { "epoch": 0.46, "grad_norm": 1.7646740196625315, "learning_rate": 5.8524238091025684e-06, "loss": 0.906, "step": 4299 }, { "epoch": 0.46, "grad_norm": 1.8273256207386464, "learning_rate": 5.850708172931005e-06, "loss": 0.8837, "step": 4300 }, { "epoch": 0.46, "grad_norm": 1.6832299428973034, "learning_rate": 5.848992433608143e-06, "loss": 0.9461, "step": 4301 }, { "epoch": 0.46, "grad_norm": 1.7575096572383964, "learning_rate": 5.847276591342021e-06, "loss": 0.9255, "step": 4302 }, { "epoch": 0.46, "grad_norm": 1.751066348809859, "learning_rate": 5.845560646340691e-06, "loss": 0.9738, "step": 4303 }, { "epoch": 0.46, "grad_norm": 1.7217843812268712, "learning_rate": 5.843844598812218e-06, "loss": 0.8337, "step": 4304 }, { "epoch": 0.46, "grad_norm": 1.7914325915333456, "learning_rate": 5.842128448964677e-06, "loss": 0.9462, "step": 4305 }, { "epoch": 0.46, "grad_norm": 1.7017776142339078, "learning_rate": 5.84041219700616e-06, "loss": 0.9586, "step": 4306 }, { "epoch": 0.46, "grad_norm": 1.7140065350238547, "learning_rate": 5.838695843144766e-06, "loss": 0.9231, "step": 4307 }, { "epoch": 0.46, "grad_norm": 1.7225392057018107, "learning_rate": 5.836979387588611e-06, "loss": 0.9659, "step": 4308 }, { "epoch": 0.46, "grad_norm": 0.8647052960340152, "learning_rate": 5.835262830545817e-06, "loss": 1.0681, "step": 4309 }, { "epoch": 0.46, "grad_norm": 1.8192044238230678, "learning_rate": 5.833546172224527e-06, "loss": 0.9821, "step": 4310 }, { "epoch": 0.46, "grad_norm": 1.67182539202832, "learning_rate": 5.8318294128328885e-06, "loss": 0.9422, "step": 4311 }, { "epoch": 0.46, "grad_norm": 1.720461234822713, "learning_rate": 5.830112552579068e-06, "loss": 0.8885, "step": 4312 }, { "epoch": 0.46, "grad_norm": 1.8347582402011187, "learning_rate": 5.828395591671238e-06, "loss": 0.9903, "step": 4313 }, { "epoch": 0.46, "grad_norm": 1.6966756007422459, "learning_rate": 5.826678530317585e-06, "loss": 0.9046, "step": 4314 }, { "epoch": 0.46, "grad_norm": 1.5648275193457388, "learning_rate": 5.824961368726312e-06, "loss": 0.9417, "step": 4315 }, { "epoch": 0.46, "grad_norm": 1.7062030958926075, "learning_rate": 5.823244107105628e-06, "loss": 0.8618, "step": 4316 }, { "epoch": 0.46, "grad_norm": 1.933248628902336, "learning_rate": 5.821526745663758e-06, "loss": 0.9097, "step": 4317 }, { "epoch": 0.46, "grad_norm": 1.7658419677548072, "learning_rate": 5.8198092846089396e-06, "loss": 0.863, "step": 4318 }, { "epoch": 0.46, "grad_norm": 1.7273904409940526, "learning_rate": 5.818091724149417e-06, "loss": 0.9419, "step": 4319 }, { "epoch": 0.46, "grad_norm": 1.6376488801928495, "learning_rate": 5.816374064493453e-06, "loss": 0.881, "step": 4320 }, { "epoch": 0.46, "grad_norm": 1.7075981065594557, "learning_rate": 5.81465630584932e-06, "loss": 0.964, "step": 4321 }, { "epoch": 0.46, "grad_norm": 1.8815099912296813, "learning_rate": 5.812938448425299e-06, "loss": 0.9162, "step": 4322 }, { "epoch": 0.46, "grad_norm": 1.6225405223737397, "learning_rate": 5.811220492429692e-06, "loss": 0.9306, "step": 4323 }, { "epoch": 0.46, "grad_norm": 1.6361454733396816, "learning_rate": 5.809502438070801e-06, "loss": 0.7772, "step": 4324 }, { "epoch": 0.46, "grad_norm": 1.7562619549347187, "learning_rate": 5.80778428555695e-06, "loss": 0.9236, "step": 4325 }, { "epoch": 0.47, "grad_norm": 1.7720787192198995, "learning_rate": 5.8060660350964685e-06, "loss": 0.9124, "step": 4326 }, { "epoch": 0.47, "grad_norm": 1.6743200460432013, "learning_rate": 5.8043476868977025e-06, "loss": 0.8344, "step": 4327 }, { "epoch": 0.47, "grad_norm": 0.8811800336967499, "learning_rate": 5.802629241169006e-06, "loss": 1.0546, "step": 4328 }, { "epoch": 0.47, "grad_norm": 1.690157621779071, "learning_rate": 5.800910698118746e-06, "loss": 0.9243, "step": 4329 }, { "epoch": 0.47, "grad_norm": 1.7268529082398683, "learning_rate": 5.799192057955303e-06, "loss": 0.8579, "step": 4330 }, { "epoch": 0.47, "grad_norm": 1.7267972393192101, "learning_rate": 5.7974733208870686e-06, "loss": 0.9086, "step": 4331 }, { "epoch": 0.47, "grad_norm": 1.7468968471027715, "learning_rate": 5.795754487122444e-06, "loss": 0.9707, "step": 4332 }, { "epoch": 0.47, "grad_norm": 1.6661456279089482, "learning_rate": 5.794035556869843e-06, "loss": 0.9082, "step": 4333 }, { "epoch": 0.47, "grad_norm": 1.7209165527445007, "learning_rate": 5.792316530337696e-06, "loss": 0.9397, "step": 4334 }, { "epoch": 0.47, "grad_norm": 1.7664896586594365, "learning_rate": 5.790597407734437e-06, "loss": 0.9268, "step": 4335 }, { "epoch": 0.47, "grad_norm": 1.7738362724752272, "learning_rate": 5.788878189268516e-06, "loss": 0.8993, "step": 4336 }, { "epoch": 0.47, "grad_norm": 1.6874772805153853, "learning_rate": 5.787158875148396e-06, "loss": 0.8693, "step": 4337 }, { "epoch": 0.47, "grad_norm": 1.9704488355707166, "learning_rate": 5.785439465582549e-06, "loss": 1.0663, "step": 4338 }, { "epoch": 0.47, "grad_norm": 1.7014149290465737, "learning_rate": 5.783719960779458e-06, "loss": 0.8799, "step": 4339 }, { "epoch": 0.47, "grad_norm": 1.7803285993184608, "learning_rate": 5.78200036094762e-06, "loss": 0.8408, "step": 4340 }, { "epoch": 0.47, "grad_norm": 1.6676713181013065, "learning_rate": 5.780280666295544e-06, "loss": 0.8609, "step": 4341 }, { "epoch": 0.47, "grad_norm": 1.6392816024672245, "learning_rate": 5.778560877031744e-06, "loss": 0.8944, "step": 4342 }, { "epoch": 0.47, "grad_norm": 1.6731555632694153, "learning_rate": 5.7768409933647555e-06, "loss": 0.8302, "step": 4343 }, { "epoch": 0.47, "grad_norm": 1.6890219271176474, "learning_rate": 5.775121015503118e-06, "loss": 0.8722, "step": 4344 }, { "epoch": 0.47, "grad_norm": 1.701784821704374, "learning_rate": 5.773400943655385e-06, "loss": 1.0016, "step": 4345 }, { "epoch": 0.47, "grad_norm": 1.6413016385059154, "learning_rate": 5.771680778030122e-06, "loss": 0.9035, "step": 4346 }, { "epoch": 0.47, "grad_norm": 1.7642405622325543, "learning_rate": 5.769960518835902e-06, "loss": 0.99, "step": 4347 }, { "epoch": 0.47, "grad_norm": 1.855919933081425, "learning_rate": 5.768240166281317e-06, "loss": 0.9586, "step": 4348 }, { "epoch": 0.47, "grad_norm": 1.758492355538492, "learning_rate": 5.7665197205749636e-06, "loss": 0.9111, "step": 4349 }, { "epoch": 0.47, "grad_norm": 1.8175318259710618, "learning_rate": 5.764799181925449e-06, "loss": 0.866, "step": 4350 }, { "epoch": 0.47, "grad_norm": 1.721235413612071, "learning_rate": 5.763078550541399e-06, "loss": 0.9331, "step": 4351 }, { "epoch": 0.47, "grad_norm": 1.8006451985343097, "learning_rate": 5.761357826631444e-06, "loss": 0.895, "step": 4352 }, { "epoch": 0.47, "grad_norm": 1.7103740870150868, "learning_rate": 5.759637010404226e-06, "loss": 1.0018, "step": 4353 }, { "epoch": 0.47, "grad_norm": 1.5894063217712353, "learning_rate": 5.757916102068403e-06, "loss": 0.8712, "step": 4354 }, { "epoch": 0.47, "grad_norm": 2.023462572910606, "learning_rate": 5.75619510183264e-06, "loss": 0.8934, "step": 4355 }, { "epoch": 0.47, "grad_norm": 1.6190923394457406, "learning_rate": 5.754474009905613e-06, "loss": 0.9186, "step": 4356 }, { "epoch": 0.47, "grad_norm": 1.809752420592461, "learning_rate": 5.752752826496012e-06, "loss": 0.8934, "step": 4357 }, { "epoch": 0.47, "grad_norm": 1.7547080230966816, "learning_rate": 5.751031551812536e-06, "loss": 0.9264, "step": 4358 }, { "epoch": 0.47, "grad_norm": 1.848610475092547, "learning_rate": 5.749310186063892e-06, "loss": 0.9773, "step": 4359 }, { "epoch": 0.47, "grad_norm": 1.8259705643302377, "learning_rate": 5.74758872945881e-06, "loss": 0.8971, "step": 4360 }, { "epoch": 0.47, "grad_norm": 1.7016342958289672, "learning_rate": 5.745867182206013e-06, "loss": 0.9768, "step": 4361 }, { "epoch": 0.47, "grad_norm": 1.743427085760458, "learning_rate": 5.7441455445142505e-06, "loss": 0.8538, "step": 4362 }, { "epoch": 0.47, "grad_norm": 1.8119013095299743, "learning_rate": 5.7424238165922745e-06, "loss": 0.9652, "step": 4363 }, { "epoch": 0.47, "grad_norm": 1.7107975318307735, "learning_rate": 5.74070199864885e-06, "loss": 0.9011, "step": 4364 }, { "epoch": 0.47, "grad_norm": 1.6979437689962997, "learning_rate": 5.7389800908927575e-06, "loss": 0.8717, "step": 4365 }, { "epoch": 0.47, "grad_norm": 1.8613498956215577, "learning_rate": 5.737258093532781e-06, "loss": 0.961, "step": 4366 }, { "epoch": 0.47, "grad_norm": 1.7767167747869528, "learning_rate": 5.735536006777717e-06, "loss": 0.9113, "step": 4367 }, { "epoch": 0.47, "grad_norm": 1.8073277804076218, "learning_rate": 5.733813830836377e-06, "loss": 0.9033, "step": 4368 }, { "epoch": 0.47, "grad_norm": 1.6884017835066574, "learning_rate": 5.73209156591758e-06, "loss": 0.8494, "step": 4369 }, { "epoch": 0.47, "grad_norm": 2.091243913649683, "learning_rate": 5.7303692122301566e-06, "loss": 0.8579, "step": 4370 }, { "epoch": 0.47, "grad_norm": 1.9874037224395742, "learning_rate": 5.728646769982951e-06, "loss": 0.9752, "step": 4371 }, { "epoch": 0.47, "grad_norm": 0.89495750280681, "learning_rate": 5.726924239384809e-06, "loss": 1.0992, "step": 4372 }, { "epoch": 0.47, "grad_norm": 1.6973270287082058, "learning_rate": 5.725201620644598e-06, "loss": 0.9101, "step": 4373 }, { "epoch": 0.47, "grad_norm": 0.8500862858025551, "learning_rate": 5.7234789139711914e-06, "loss": 1.0827, "step": 4374 }, { "epoch": 0.47, "grad_norm": 1.701599527327432, "learning_rate": 5.721756119573471e-06, "loss": 0.9428, "step": 4375 }, { "epoch": 0.47, "grad_norm": 2.022255303985465, "learning_rate": 5.720033237660334e-06, "loss": 0.8931, "step": 4376 }, { "epoch": 0.47, "grad_norm": 1.808461280304135, "learning_rate": 5.718310268440684e-06, "loss": 0.9132, "step": 4377 }, { "epoch": 0.47, "grad_norm": 1.7909915563865106, "learning_rate": 5.716587212123436e-06, "loss": 0.8678, "step": 4378 }, { "epoch": 0.47, "grad_norm": 1.7272557967321531, "learning_rate": 5.7148640689175194e-06, "loss": 0.8893, "step": 4379 }, { "epoch": 0.47, "grad_norm": 1.8429627517791256, "learning_rate": 5.713140839031871e-06, "loss": 0.8227, "step": 4380 }, { "epoch": 0.47, "grad_norm": 1.7817679701255957, "learning_rate": 5.711417522675437e-06, "loss": 0.9816, "step": 4381 }, { "epoch": 0.47, "grad_norm": 1.718625975627871, "learning_rate": 5.709694120057174e-06, "loss": 0.8872, "step": 4382 }, { "epoch": 0.47, "grad_norm": 1.726136968685944, "learning_rate": 5.707970631386054e-06, "loss": 0.9388, "step": 4383 }, { "epoch": 0.47, "grad_norm": 1.744502240291231, "learning_rate": 5.706247056871052e-06, "loss": 0.8295, "step": 4384 }, { "epoch": 0.47, "grad_norm": 1.740402938248681, "learning_rate": 5.704523396721162e-06, "loss": 0.9304, "step": 4385 }, { "epoch": 0.47, "grad_norm": 1.7095260944011252, "learning_rate": 5.702799651145381e-06, "loss": 0.8848, "step": 4386 }, { "epoch": 0.47, "grad_norm": 1.7219652715732094, "learning_rate": 5.701075820352718e-06, "loss": 0.9658, "step": 4387 }, { "epoch": 0.47, "grad_norm": 1.6797244914817266, "learning_rate": 5.699351904552196e-06, "loss": 0.8687, "step": 4388 }, { "epoch": 0.47, "grad_norm": 1.6895829540513125, "learning_rate": 5.697627903952845e-06, "loss": 0.8195, "step": 4389 }, { "epoch": 0.47, "grad_norm": 1.7694739505913462, "learning_rate": 5.695903818763703e-06, "loss": 0.8834, "step": 4390 }, { "epoch": 0.47, "grad_norm": 1.6748294607599246, "learning_rate": 5.694179649193826e-06, "loss": 0.8251, "step": 4391 }, { "epoch": 0.47, "grad_norm": 1.634224447186001, "learning_rate": 5.692455395452272e-06, "loss": 0.9045, "step": 4392 }, { "epoch": 0.47, "grad_norm": 1.841948887860489, "learning_rate": 5.6907310577481155e-06, "loss": 0.8925, "step": 4393 }, { "epoch": 0.47, "grad_norm": 1.804045643199622, "learning_rate": 5.6890066362904374e-06, "loss": 0.8938, "step": 4394 }, { "epoch": 0.47, "grad_norm": 1.8375508023946332, "learning_rate": 5.687282131288326e-06, "loss": 0.9708, "step": 4395 }, { "epoch": 0.47, "grad_norm": 1.758431757504343, "learning_rate": 5.68555754295089e-06, "loss": 0.8539, "step": 4396 }, { "epoch": 0.47, "grad_norm": 1.801633553963666, "learning_rate": 5.683832871487238e-06, "loss": 0.8795, "step": 4397 }, { "epoch": 0.47, "grad_norm": 1.6897793223317072, "learning_rate": 5.682108117106491e-06, "loss": 0.8703, "step": 4398 }, { "epoch": 0.47, "grad_norm": 1.7173681280307052, "learning_rate": 5.680383280017785e-06, "loss": 0.9006, "step": 4399 }, { "epoch": 0.47, "grad_norm": 0.9233956985962328, "learning_rate": 5.678658360430261e-06, "loss": 1.0783, "step": 4400 }, { "epoch": 0.47, "grad_norm": 1.8057696683195357, "learning_rate": 5.676933358553068e-06, "loss": 0.9528, "step": 4401 }, { "epoch": 0.47, "grad_norm": 1.7694931840829837, "learning_rate": 5.6752082745953765e-06, "loss": 0.8859, "step": 4402 }, { "epoch": 0.47, "grad_norm": 1.7382345113535036, "learning_rate": 5.673483108766349e-06, "loss": 0.9795, "step": 4403 }, { "epoch": 0.47, "grad_norm": 1.8703638602604409, "learning_rate": 5.671757861275176e-06, "loss": 0.9482, "step": 4404 }, { "epoch": 0.47, "grad_norm": 1.6826634622998033, "learning_rate": 5.670032532331045e-06, "loss": 0.8856, "step": 4405 }, { "epoch": 0.47, "grad_norm": 1.7224051876415518, "learning_rate": 5.668307122143159e-06, "loss": 0.9104, "step": 4406 }, { "epoch": 0.47, "grad_norm": 1.78867284449983, "learning_rate": 5.666581630920731e-06, "loss": 0.8498, "step": 4407 }, { "epoch": 0.47, "grad_norm": 0.8895480265320632, "learning_rate": 5.664856058872984e-06, "loss": 1.1357, "step": 4408 }, { "epoch": 0.47, "grad_norm": 1.8126786374096393, "learning_rate": 5.6631304062091456e-06, "loss": 0.9882, "step": 4409 }, { "epoch": 0.47, "grad_norm": 1.6491924368194437, "learning_rate": 5.661404673138459e-06, "loss": 0.9223, "step": 4410 }, { "epoch": 0.47, "grad_norm": 1.69417106399307, "learning_rate": 5.659678859870177e-06, "loss": 0.9057, "step": 4411 }, { "epoch": 0.47, "grad_norm": 1.752861565874866, "learning_rate": 5.657952966613558e-06, "loss": 0.9491, "step": 4412 }, { "epoch": 0.47, "grad_norm": 0.8398795317246768, "learning_rate": 5.656226993577875e-06, "loss": 1.0682, "step": 4413 }, { "epoch": 0.47, "grad_norm": 1.7757416164905326, "learning_rate": 5.654500940972405e-06, "loss": 0.9402, "step": 4414 }, { "epoch": 0.47, "grad_norm": 1.692296412796347, "learning_rate": 5.652774809006439e-06, "loss": 0.9632, "step": 4415 }, { "epoch": 0.47, "grad_norm": 1.8158247882279208, "learning_rate": 5.6510485978892785e-06, "loss": 0.9612, "step": 4416 }, { "epoch": 0.47, "grad_norm": 1.79296525299831, "learning_rate": 5.64932230783023e-06, "loss": 0.8147, "step": 4417 }, { "epoch": 0.47, "grad_norm": 1.6683349046646079, "learning_rate": 5.647595939038615e-06, "loss": 0.8932, "step": 4418 }, { "epoch": 0.48, "grad_norm": 1.7532381813383346, "learning_rate": 5.6458694917237575e-06, "loss": 0.8804, "step": 4419 }, { "epoch": 0.48, "grad_norm": 1.875324981687024, "learning_rate": 5.644142966094997e-06, "loss": 0.9121, "step": 4420 }, { "epoch": 0.48, "grad_norm": 1.7250095444930669, "learning_rate": 5.642416362361683e-06, "loss": 0.8858, "step": 4421 }, { "epoch": 0.48, "grad_norm": 1.7477866025864333, "learning_rate": 5.640689680733171e-06, "loss": 0.8414, "step": 4422 }, { "epoch": 0.48, "grad_norm": 0.8346747913763409, "learning_rate": 5.638962921418825e-06, "loss": 1.0483, "step": 4423 }, { "epoch": 0.48, "grad_norm": 1.7822262298579257, "learning_rate": 5.6372360846280224e-06, "loss": 0.9589, "step": 4424 }, { "epoch": 0.48, "grad_norm": 0.8399044312559669, "learning_rate": 5.635509170570148e-06, "loss": 1.037, "step": 4425 }, { "epoch": 0.48, "grad_norm": 1.8897345073343004, "learning_rate": 5.633782179454594e-06, "loss": 0.8922, "step": 4426 }, { "epoch": 0.48, "grad_norm": 1.787888350903323, "learning_rate": 5.632055111490768e-06, "loss": 0.9482, "step": 4427 }, { "epoch": 0.48, "grad_norm": 0.8152861013273506, "learning_rate": 5.6303279668880794e-06, "loss": 1.0713, "step": 4428 }, { "epoch": 0.48, "grad_norm": 1.6874950696346482, "learning_rate": 5.6286007458559525e-06, "loss": 0.9487, "step": 4429 }, { "epoch": 0.48, "grad_norm": 1.9156831539560288, "learning_rate": 5.6268734486038164e-06, "loss": 0.8808, "step": 4430 }, { "epoch": 0.48, "grad_norm": 1.7602698730568485, "learning_rate": 5.6251460753411155e-06, "loss": 0.8671, "step": 4431 }, { "epoch": 0.48, "grad_norm": 1.6472782155053773, "learning_rate": 5.623418626277296e-06, "loss": 0.8815, "step": 4432 }, { "epoch": 0.48, "grad_norm": 1.7150741769902929, "learning_rate": 5.62169110162182e-06, "loss": 0.9352, "step": 4433 }, { "epoch": 0.48, "grad_norm": 1.8308043921206636, "learning_rate": 5.619963501584155e-06, "loss": 0.8969, "step": 4434 }, { "epoch": 0.48, "grad_norm": 1.7231722052151708, "learning_rate": 5.618235826373777e-06, "loss": 0.9038, "step": 4435 }, { "epoch": 0.48, "grad_norm": 1.6684172732091316, "learning_rate": 5.616508076200175e-06, "loss": 0.7979, "step": 4436 }, { "epoch": 0.48, "grad_norm": 1.7005244987061152, "learning_rate": 5.6147802512728425e-06, "loss": 0.9377, "step": 4437 }, { "epoch": 0.48, "grad_norm": 0.8403292853601343, "learning_rate": 5.613052351801284e-06, "loss": 1.0668, "step": 4438 }, { "epoch": 0.48, "grad_norm": 1.726947377021382, "learning_rate": 5.611324377995017e-06, "loss": 0.9467, "step": 4439 }, { "epoch": 0.48, "grad_norm": 0.8087873443779541, "learning_rate": 5.6095963300635585e-06, "loss": 1.0241, "step": 4440 }, { "epoch": 0.48, "grad_norm": 1.7533901335270679, "learning_rate": 5.607868208216445e-06, "loss": 0.9842, "step": 4441 }, { "epoch": 0.48, "grad_norm": 1.7716509060196968, "learning_rate": 5.606140012663215e-06, "loss": 0.8546, "step": 4442 }, { "epoch": 0.48, "grad_norm": 1.7499975141440463, "learning_rate": 5.604411743613418e-06, "loss": 0.96, "step": 4443 }, { "epoch": 0.48, "grad_norm": 1.7689560460983862, "learning_rate": 5.6026834012766155e-06, "loss": 0.8245, "step": 4444 }, { "epoch": 0.48, "grad_norm": 1.740357221417932, "learning_rate": 5.6009549858623735e-06, "loss": 0.8817, "step": 4445 }, { "epoch": 0.48, "grad_norm": 1.848031965802933, "learning_rate": 5.5992264975802646e-06, "loss": 0.9813, "step": 4446 }, { "epoch": 0.48, "grad_norm": 0.8391077751190138, "learning_rate": 5.597497936639879e-06, "loss": 1.0452, "step": 4447 }, { "epoch": 0.48, "grad_norm": 0.8202903954942881, "learning_rate": 5.595769303250809e-06, "loss": 1.0657, "step": 4448 }, { "epoch": 0.48, "grad_norm": 1.7870355975503178, "learning_rate": 5.594040597622654e-06, "loss": 0.8703, "step": 4449 }, { "epoch": 0.48, "grad_norm": 1.7269904186260898, "learning_rate": 5.592311819965033e-06, "loss": 0.942, "step": 4450 }, { "epoch": 0.48, "grad_norm": 0.8432462187731478, "learning_rate": 5.590582970487558e-06, "loss": 1.0937, "step": 4451 }, { "epoch": 0.48, "grad_norm": 1.8230030569635873, "learning_rate": 5.588854049399864e-06, "loss": 0.9654, "step": 4452 }, { "epoch": 0.48, "grad_norm": 1.6979856613558493, "learning_rate": 5.587125056911587e-06, "loss": 0.9216, "step": 4453 }, { "epoch": 0.48, "grad_norm": 1.8310370532320412, "learning_rate": 5.585395993232371e-06, "loss": 0.9739, "step": 4454 }, { "epoch": 0.48, "grad_norm": 1.7245828142409558, "learning_rate": 5.583666858571874e-06, "loss": 0.8603, "step": 4455 }, { "epoch": 0.48, "grad_norm": 1.68867402529456, "learning_rate": 5.581937653139757e-06, "loss": 0.9371, "step": 4456 }, { "epoch": 0.48, "grad_norm": 1.7943148084305938, "learning_rate": 5.580208377145693e-06, "loss": 0.8823, "step": 4457 }, { "epoch": 0.48, "grad_norm": 1.70423497123324, "learning_rate": 5.578479030799363e-06, "loss": 0.9329, "step": 4458 }, { "epoch": 0.48, "grad_norm": 1.759876556134152, "learning_rate": 5.576749614310457e-06, "loss": 0.8506, "step": 4459 }, { "epoch": 0.48, "grad_norm": 1.7584029333198166, "learning_rate": 5.575020127888672e-06, "loss": 0.9239, "step": 4460 }, { "epoch": 0.48, "grad_norm": 1.754783794346986, "learning_rate": 5.573290571743715e-06, "loss": 0.9784, "step": 4461 }, { "epoch": 0.48, "grad_norm": 0.8732674872020809, "learning_rate": 5.5715609460852985e-06, "loss": 1.0161, "step": 4462 }, { "epoch": 0.48, "grad_norm": 0.8346460293774912, "learning_rate": 5.5698312511231454e-06, "loss": 1.0411, "step": 4463 }, { "epoch": 0.48, "grad_norm": 1.7025912552605986, "learning_rate": 5.568101487066991e-06, "loss": 0.8691, "step": 4464 }, { "epoch": 0.48, "grad_norm": 1.7238269248882718, "learning_rate": 5.566371654126573e-06, "loss": 0.8562, "step": 4465 }, { "epoch": 0.48, "grad_norm": 1.674195755641617, "learning_rate": 5.564641752511637e-06, "loss": 0.9839, "step": 4466 }, { "epoch": 0.48, "grad_norm": 0.8666056456666086, "learning_rate": 5.562911782431943e-06, "loss": 1.0398, "step": 4467 }, { "epoch": 0.48, "grad_norm": 1.7062201295774624, "learning_rate": 5.561181744097255e-06, "loss": 0.895, "step": 4468 }, { "epoch": 0.48, "grad_norm": 2.640549685719731, "learning_rate": 5.559451637717346e-06, "loss": 0.9691, "step": 4469 }, { "epoch": 0.48, "grad_norm": 0.8607872043163429, "learning_rate": 5.557721463501997e-06, "loss": 1.0335, "step": 4470 }, { "epoch": 0.48, "grad_norm": 1.6426565399455073, "learning_rate": 5.555991221660998e-06, "loss": 0.8849, "step": 4471 }, { "epoch": 0.48, "grad_norm": 0.8158690942758193, "learning_rate": 5.554260912404146e-06, "loss": 1.0659, "step": 4472 }, { "epoch": 0.48, "grad_norm": 1.6770186589003653, "learning_rate": 5.5525305359412485e-06, "loss": 0.9103, "step": 4473 }, { "epoch": 0.48, "grad_norm": 1.8198258533851601, "learning_rate": 5.550800092482117e-06, "loss": 0.9469, "step": 4474 }, { "epoch": 0.48, "grad_norm": 1.875535777489499, "learning_rate": 5.549069582236577e-06, "loss": 0.8955, "step": 4475 }, { "epoch": 0.48, "grad_norm": 1.6917452485260298, "learning_rate": 5.547339005414457e-06, "loss": 0.8596, "step": 4476 }, { "epoch": 0.48, "grad_norm": 1.7767330298346917, "learning_rate": 5.545608362225594e-06, "loss": 0.8949, "step": 4477 }, { "epoch": 0.48, "grad_norm": 1.6559786149165756, "learning_rate": 5.543877652879838e-06, "loss": 0.9589, "step": 4478 }, { "epoch": 0.48, "grad_norm": 1.6823744541613697, "learning_rate": 5.542146877587042e-06, "loss": 0.8964, "step": 4479 }, { "epoch": 0.48, "grad_norm": 0.9399703813497159, "learning_rate": 5.540416036557064e-06, "loss": 1.0678, "step": 4480 }, { "epoch": 0.48, "grad_norm": 1.8958916218514894, "learning_rate": 5.538685129999782e-06, "loss": 0.9084, "step": 4481 }, { "epoch": 0.48, "grad_norm": 1.835581180906324, "learning_rate": 5.536954158125071e-06, "loss": 0.8757, "step": 4482 }, { "epoch": 0.48, "grad_norm": 1.7588361818462706, "learning_rate": 5.535223121142817e-06, "loss": 0.8766, "step": 4483 }, { "epoch": 0.48, "grad_norm": 1.693264456882381, "learning_rate": 5.5334920192629135e-06, "loss": 0.8754, "step": 4484 }, { "epoch": 0.48, "grad_norm": 1.8138872771006682, "learning_rate": 5.531760852695264e-06, "loss": 0.9052, "step": 4485 }, { "epoch": 0.48, "grad_norm": 1.7071780273815909, "learning_rate": 5.530029621649777e-06, "loss": 0.8715, "step": 4486 }, { "epoch": 0.48, "grad_norm": 1.7696725679921885, "learning_rate": 5.528298326336372e-06, "loss": 0.9761, "step": 4487 }, { "epoch": 0.48, "grad_norm": 1.77021995286334, "learning_rate": 5.5265669669649725e-06, "loss": 0.9308, "step": 4488 }, { "epoch": 0.48, "grad_norm": 1.8538213495987061, "learning_rate": 5.524835543745515e-06, "loss": 0.9196, "step": 4489 }, { "epoch": 0.48, "grad_norm": 1.6822729512115473, "learning_rate": 5.523104056887937e-06, "loss": 0.8081, "step": 4490 }, { "epoch": 0.48, "grad_norm": 1.7688068196747344, "learning_rate": 5.521372506602187e-06, "loss": 0.8839, "step": 4491 }, { "epoch": 0.48, "grad_norm": 1.8136981048524516, "learning_rate": 5.519640893098227e-06, "loss": 0.9514, "step": 4492 }, { "epoch": 0.48, "grad_norm": 1.7286030736407574, "learning_rate": 5.517909216586015e-06, "loss": 0.9201, "step": 4493 }, { "epoch": 0.48, "grad_norm": 1.7697904809857206, "learning_rate": 5.516177477275524e-06, "loss": 0.8694, "step": 4494 }, { "epoch": 0.48, "grad_norm": 1.8336896661387727, "learning_rate": 5.514445675376736e-06, "loss": 0.884, "step": 4495 }, { "epoch": 0.48, "grad_norm": 1.7412022300923362, "learning_rate": 5.512713811099636e-06, "loss": 0.8835, "step": 4496 }, { "epoch": 0.48, "grad_norm": 1.7070109001882205, "learning_rate": 5.510981884654217e-06, "loss": 0.9104, "step": 4497 }, { "epoch": 0.48, "grad_norm": 1.5698683202437849, "learning_rate": 5.509249896250486e-06, "loss": 0.7937, "step": 4498 }, { "epoch": 0.48, "grad_norm": 1.6364746365179967, "learning_rate": 5.507517846098447e-06, "loss": 0.9448, "step": 4499 }, { "epoch": 0.48, "grad_norm": 1.7903048684370588, "learning_rate": 5.505785734408121e-06, "loss": 0.9341, "step": 4500 }, { "epoch": 0.48, "grad_norm": 1.6832840485687173, "learning_rate": 5.504053561389531e-06, "loss": 0.852, "step": 4501 }, { "epoch": 0.48, "grad_norm": 0.9081902846469121, "learning_rate": 5.502321327252708e-06, "loss": 1.0367, "step": 4502 }, { "epoch": 0.48, "grad_norm": 1.7330104159153432, "learning_rate": 5.500589032207693e-06, "loss": 0.9159, "step": 4503 }, { "epoch": 0.48, "grad_norm": 1.5751818060098317, "learning_rate": 5.4988566764645315e-06, "loss": 0.9309, "step": 4504 }, { "epoch": 0.48, "grad_norm": 1.7095080923292634, "learning_rate": 5.497124260233278e-06, "loss": 0.9391, "step": 4505 }, { "epoch": 0.48, "grad_norm": 1.8726116025603265, "learning_rate": 5.495391783723993e-06, "loss": 0.9125, "step": 4506 }, { "epoch": 0.48, "grad_norm": 1.7277559920204868, "learning_rate": 5.493659247146749e-06, "loss": 1.0202, "step": 4507 }, { "epoch": 0.48, "grad_norm": 1.8453142056947867, "learning_rate": 5.491926650711618e-06, "loss": 0.9223, "step": 4508 }, { "epoch": 0.48, "grad_norm": 1.7134062460809283, "learning_rate": 5.490193994628685e-06, "loss": 0.9147, "step": 4509 }, { "epoch": 0.48, "grad_norm": 1.6763299486919163, "learning_rate": 5.488461279108041e-06, "loss": 0.931, "step": 4510 }, { "epoch": 0.48, "grad_norm": 1.6719689144519583, "learning_rate": 5.486728504359781e-06, "loss": 0.7988, "step": 4511 }, { "epoch": 0.49, "grad_norm": 0.8806268342551522, "learning_rate": 5.484995670594012e-06, "loss": 1.0604, "step": 4512 }, { "epoch": 0.49, "grad_norm": 1.8528297403604825, "learning_rate": 5.483262778020847e-06, "loss": 0.9903, "step": 4513 }, { "epoch": 0.49, "grad_norm": 1.7233122080116507, "learning_rate": 5.481529826850404e-06, "loss": 0.9204, "step": 4514 }, { "epoch": 0.49, "grad_norm": 1.8724917390101656, "learning_rate": 5.479796817292809e-06, "loss": 0.9492, "step": 4515 }, { "epoch": 0.49, "grad_norm": 1.7332733474100304, "learning_rate": 5.478063749558195e-06, "loss": 0.9193, "step": 4516 }, { "epoch": 0.49, "grad_norm": 1.5131083453592364, "learning_rate": 5.476330623856705e-06, "loss": 0.8557, "step": 4517 }, { "epoch": 0.49, "grad_norm": 1.8462212356994248, "learning_rate": 5.474597440398483e-06, "loss": 0.971, "step": 4518 }, { "epoch": 0.49, "grad_norm": 1.6641438366282582, "learning_rate": 5.472864199393687e-06, "loss": 0.9228, "step": 4519 }, { "epoch": 0.49, "grad_norm": 1.61193354212265, "learning_rate": 5.4711309010524764e-06, "loss": 0.8517, "step": 4520 }, { "epoch": 0.49, "grad_norm": 1.5819466838445126, "learning_rate": 5.469397545585019e-06, "loss": 0.9544, "step": 4521 }, { "epoch": 0.49, "grad_norm": 1.7745831685862068, "learning_rate": 5.46766413320149e-06, "loss": 0.9264, "step": 4522 }, { "epoch": 0.49, "grad_norm": 1.7479392398203222, "learning_rate": 5.465930664112073e-06, "loss": 0.947, "step": 4523 }, { "epoch": 0.49, "grad_norm": 1.7699435927339686, "learning_rate": 5.46419713852696e-06, "loss": 0.9375, "step": 4524 }, { "epoch": 0.49, "grad_norm": 1.697043144119612, "learning_rate": 5.46246355665634e-06, "loss": 0.8739, "step": 4525 }, { "epoch": 0.49, "grad_norm": 1.6160032936047493, "learning_rate": 5.4607299187104205e-06, "loss": 0.9482, "step": 4526 }, { "epoch": 0.49, "grad_norm": 1.8449851611342056, "learning_rate": 5.458996224899409e-06, "loss": 0.9069, "step": 4527 }, { "epoch": 0.49, "grad_norm": 1.791852077255882, "learning_rate": 5.457262475433523e-06, "loss": 1.0281, "step": 4528 }, { "epoch": 0.49, "grad_norm": 1.709995209549197, "learning_rate": 5.455528670522987e-06, "loss": 0.9235, "step": 4529 }, { "epoch": 0.49, "grad_norm": 1.791368373247366, "learning_rate": 5.453794810378028e-06, "loss": 0.9479, "step": 4530 }, { "epoch": 0.49, "grad_norm": 1.7743050131258544, "learning_rate": 5.452060895208883e-06, "loss": 0.9537, "step": 4531 }, { "epoch": 0.49, "grad_norm": 1.698317462349543, "learning_rate": 5.450326925225798e-06, "loss": 0.9319, "step": 4532 }, { "epoch": 0.49, "grad_norm": 0.8034785590687042, "learning_rate": 5.44859290063902e-06, "loss": 1.0419, "step": 4533 }, { "epoch": 0.49, "grad_norm": 1.6673383191199331, "learning_rate": 5.446858821658806e-06, "loss": 0.8256, "step": 4534 }, { "epoch": 0.49, "grad_norm": 0.8114801495635167, "learning_rate": 5.44512468849542e-06, "loss": 1.0723, "step": 4535 }, { "epoch": 0.49, "grad_norm": 1.7819432992158732, "learning_rate": 5.44339050135913e-06, "loss": 0.9138, "step": 4536 }, { "epoch": 0.49, "grad_norm": 1.7496730093423345, "learning_rate": 5.441656260460213e-06, "loss": 0.9306, "step": 4537 }, { "epoch": 0.49, "grad_norm": 1.897373896804874, "learning_rate": 5.439921966008953e-06, "loss": 0.9549, "step": 4538 }, { "epoch": 0.49, "grad_norm": 1.8681262733048793, "learning_rate": 5.438187618215636e-06, "loss": 0.8661, "step": 4539 }, { "epoch": 0.49, "grad_norm": 1.721289786065305, "learning_rate": 5.436453217290562e-06, "loss": 0.8695, "step": 4540 }, { "epoch": 0.49, "grad_norm": 1.6557071408330994, "learning_rate": 5.43471876344403e-06, "loss": 0.9017, "step": 4541 }, { "epoch": 0.49, "grad_norm": 1.6925655724312247, "learning_rate": 5.432984256886348e-06, "loss": 0.9014, "step": 4542 }, { "epoch": 0.49, "grad_norm": 1.7838152948451536, "learning_rate": 5.431249697827833e-06, "loss": 0.8776, "step": 4543 }, { "epoch": 0.49, "grad_norm": 1.6716142034827348, "learning_rate": 5.429515086478805e-06, "loss": 0.9445, "step": 4544 }, { "epoch": 0.49, "grad_norm": 1.629113029553762, "learning_rate": 5.4277804230495935e-06, "loss": 0.8494, "step": 4545 }, { "epoch": 0.49, "grad_norm": 1.7549392390176415, "learning_rate": 5.426045707750529e-06, "loss": 0.9589, "step": 4546 }, { "epoch": 0.49, "grad_norm": 1.7831024032422127, "learning_rate": 5.424310940791954e-06, "loss": 0.93, "step": 4547 }, { "epoch": 0.49, "grad_norm": 1.7903773691550644, "learning_rate": 5.422576122384216e-06, "loss": 0.8886, "step": 4548 }, { "epoch": 0.49, "grad_norm": 1.7453412087419156, "learning_rate": 5.420841252737664e-06, "loss": 0.8657, "step": 4549 }, { "epoch": 0.49, "grad_norm": 1.6574610935402623, "learning_rate": 5.419106332062661e-06, "loss": 0.9364, "step": 4550 }, { "epoch": 0.49, "grad_norm": 1.7551367727458511, "learning_rate": 5.41737136056957e-06, "loss": 0.9112, "step": 4551 }, { "epoch": 0.49, "grad_norm": 1.7209471093287045, "learning_rate": 5.415636338468763e-06, "loss": 0.9443, "step": 4552 }, { "epoch": 0.49, "grad_norm": 1.808813854554889, "learning_rate": 5.413901265970616e-06, "loss": 0.9397, "step": 4553 }, { "epoch": 0.49, "grad_norm": 1.7447499149616428, "learning_rate": 5.412166143285515e-06, "loss": 0.8807, "step": 4554 }, { "epoch": 0.49, "grad_norm": 1.7024341385040747, "learning_rate": 5.410430970623847e-06, "loss": 0.9243, "step": 4555 }, { "epoch": 0.49, "grad_norm": 1.8156177987813094, "learning_rate": 5.408695748196009e-06, "loss": 0.9272, "step": 4556 }, { "epoch": 0.49, "grad_norm": 1.7028552537428028, "learning_rate": 5.406960476212403e-06, "loss": 0.8886, "step": 4557 }, { "epoch": 0.49, "grad_norm": 1.6560708558979378, "learning_rate": 5.405225154883436e-06, "loss": 0.902, "step": 4558 }, { "epoch": 0.49, "grad_norm": 1.782467321274546, "learning_rate": 5.403489784419521e-06, "loss": 0.8634, "step": 4559 }, { "epoch": 0.49, "grad_norm": 1.7021959633169952, "learning_rate": 5.40175436503108e-06, "loss": 0.8631, "step": 4560 }, { "epoch": 0.49, "grad_norm": 0.8941363983000871, "learning_rate": 5.400018896928537e-06, "loss": 1.0344, "step": 4561 }, { "epoch": 0.49, "grad_norm": 1.7703053327094236, "learning_rate": 5.398283380322323e-06, "loss": 0.8696, "step": 4562 }, { "epoch": 0.49, "grad_norm": 1.8130593245448887, "learning_rate": 5.396547815422876e-06, "loss": 0.9531, "step": 4563 }, { "epoch": 0.49, "grad_norm": 1.6952485660615186, "learning_rate": 5.3948122024406405e-06, "loss": 0.93, "step": 4564 }, { "epoch": 0.49, "grad_norm": 1.585048755078583, "learning_rate": 5.393076541586063e-06, "loss": 0.9373, "step": 4565 }, { "epoch": 0.49, "grad_norm": 1.6633059819815623, "learning_rate": 5.391340833069601e-06, "loss": 0.8809, "step": 4566 }, { "epoch": 0.49, "grad_norm": 1.6165995737699843, "learning_rate": 5.389605077101713e-06, "loss": 1.0072, "step": 4567 }, { "epoch": 0.49, "grad_norm": 1.7032233371267007, "learning_rate": 5.387869273892866e-06, "loss": 0.9313, "step": 4568 }, { "epoch": 0.49, "grad_norm": 1.7533925224009796, "learning_rate": 5.386133423653532e-06, "loss": 0.9002, "step": 4569 }, { "epoch": 0.49, "grad_norm": 1.6612093620113988, "learning_rate": 5.3843975265941896e-06, "loss": 0.9982, "step": 4570 }, { "epoch": 0.49, "grad_norm": 1.6652564903991782, "learning_rate": 5.382661582925322e-06, "loss": 0.9214, "step": 4571 }, { "epoch": 0.49, "grad_norm": 1.7069809071106985, "learning_rate": 5.38092559285742e-06, "loss": 0.9141, "step": 4572 }, { "epoch": 0.49, "grad_norm": 1.7712882949730695, "learning_rate": 5.379189556600974e-06, "loss": 0.9097, "step": 4573 }, { "epoch": 0.49, "grad_norm": 1.748173324651378, "learning_rate": 5.3774534743664885e-06, "loss": 0.8223, "step": 4574 }, { "epoch": 0.49, "grad_norm": 1.7099773912089573, "learning_rate": 5.375717346364467e-06, "loss": 0.9251, "step": 4575 }, { "epoch": 0.49, "grad_norm": 1.7386085556116726, "learning_rate": 5.373981172805421e-06, "loss": 0.9596, "step": 4576 }, { "epoch": 0.49, "grad_norm": 1.8256980004521808, "learning_rate": 5.3722449538998725e-06, "loss": 0.8583, "step": 4577 }, { "epoch": 0.49, "grad_norm": 1.7020209597649143, "learning_rate": 5.370508689858336e-06, "loss": 0.8723, "step": 4578 }, { "epoch": 0.49, "grad_norm": 1.7029699950435835, "learning_rate": 5.368772380891345e-06, "loss": 0.9415, "step": 4579 }, { "epoch": 0.49, "grad_norm": 1.7523055066111035, "learning_rate": 5.367036027209431e-06, "loss": 0.9341, "step": 4580 }, { "epoch": 0.49, "grad_norm": 1.8471106687703471, "learning_rate": 5.365299629023134e-06, "loss": 0.9037, "step": 4581 }, { "epoch": 0.49, "grad_norm": 1.727985401031493, "learning_rate": 5.363563186542997e-06, "loss": 0.7947, "step": 4582 }, { "epoch": 0.49, "grad_norm": 1.8157869200489005, "learning_rate": 5.361826699979571e-06, "loss": 0.9399, "step": 4583 }, { "epoch": 0.49, "grad_norm": 1.7338578116199364, "learning_rate": 5.36009016954341e-06, "loss": 0.8736, "step": 4584 }, { "epoch": 0.49, "grad_norm": 1.7494954011122297, "learning_rate": 5.358353595445074e-06, "loss": 0.9028, "step": 4585 }, { "epoch": 0.49, "grad_norm": 1.6984709997572656, "learning_rate": 5.35661697789513e-06, "loss": 0.929, "step": 4586 }, { "epoch": 0.49, "grad_norm": 1.7399188352381914, "learning_rate": 5.354880317104145e-06, "loss": 0.9626, "step": 4587 }, { "epoch": 0.49, "grad_norm": 1.6890655330285105, "learning_rate": 5.353143613282702e-06, "loss": 0.8139, "step": 4588 }, { "epoch": 0.49, "grad_norm": 1.853510247665804, "learning_rate": 5.351406866641378e-06, "loss": 0.8917, "step": 4589 }, { "epoch": 0.49, "grad_norm": 0.8575042507183054, "learning_rate": 5.349670077390757e-06, "loss": 1.0534, "step": 4590 }, { "epoch": 0.49, "grad_norm": 1.73827405305754, "learning_rate": 5.347933245741435e-06, "loss": 0.9241, "step": 4591 }, { "epoch": 0.49, "grad_norm": 1.8380803560071384, "learning_rate": 5.346196371904009e-06, "loss": 0.9246, "step": 4592 }, { "epoch": 0.49, "grad_norm": 1.8254939727833401, "learning_rate": 5.344459456089078e-06, "loss": 0.9106, "step": 4593 }, { "epoch": 0.49, "grad_norm": 1.6816637952038043, "learning_rate": 5.342722498507251e-06, "loss": 0.8356, "step": 4594 }, { "epoch": 0.49, "grad_norm": 1.783708588022164, "learning_rate": 5.340985499369138e-06, "loss": 0.984, "step": 4595 }, { "epoch": 0.49, "grad_norm": 1.781469591974205, "learning_rate": 5.33924845888536e-06, "loss": 0.9861, "step": 4596 }, { "epoch": 0.49, "grad_norm": 1.6732049514752332, "learning_rate": 5.337511377266535e-06, "loss": 0.885, "step": 4597 }, { "epoch": 0.49, "grad_norm": 1.8156133969968213, "learning_rate": 5.335774254723293e-06, "loss": 0.8519, "step": 4598 }, { "epoch": 0.49, "grad_norm": 1.8270018084042678, "learning_rate": 5.334037091466265e-06, "loss": 0.9226, "step": 4599 }, { "epoch": 0.49, "grad_norm": 1.6076775701121917, "learning_rate": 5.332299887706087e-06, "loss": 0.9099, "step": 4600 }, { "epoch": 0.49, "grad_norm": 1.8093160941863644, "learning_rate": 5.330562643653402e-06, "loss": 0.9659, "step": 4601 }, { "epoch": 0.49, "grad_norm": 1.7685595039435356, "learning_rate": 5.3288253595188575e-06, "loss": 0.9433, "step": 4602 }, { "epoch": 0.49, "grad_norm": 1.7669769730839444, "learning_rate": 5.327088035513105e-06, "loss": 0.9388, "step": 4603 }, { "epoch": 0.49, "grad_norm": 1.7542731992182852, "learning_rate": 5.325350671846801e-06, "loss": 0.9532, "step": 4604 }, { "epoch": 0.5, "grad_norm": 2.280128039724611, "learning_rate": 5.323613268730605e-06, "loss": 0.9025, "step": 4605 }, { "epoch": 0.5, "grad_norm": 1.8616513211245294, "learning_rate": 5.321875826375185e-06, "loss": 0.8365, "step": 4606 }, { "epoch": 0.5, "grad_norm": 2.0563842223177935, "learning_rate": 5.32013834499121e-06, "loss": 0.9732, "step": 4607 }, { "epoch": 0.5, "grad_norm": 0.8761448601317794, "learning_rate": 5.3184008247893595e-06, "loss": 1.006, "step": 4608 }, { "epoch": 0.5, "grad_norm": 1.686803324532385, "learning_rate": 5.31666326598031e-06, "loss": 0.8534, "step": 4609 }, { "epoch": 0.5, "grad_norm": 1.7103775041712825, "learning_rate": 5.314925668774747e-06, "loss": 0.9691, "step": 4610 }, { "epoch": 0.5, "grad_norm": 1.7327069199147556, "learning_rate": 5.313188033383363e-06, "loss": 0.9939, "step": 4611 }, { "epoch": 0.5, "grad_norm": 1.6494755203400253, "learning_rate": 5.311450360016848e-06, "loss": 0.8311, "step": 4612 }, { "epoch": 0.5, "grad_norm": 1.7483451268583148, "learning_rate": 5.3097126488859044e-06, "loss": 0.8875, "step": 4613 }, { "epoch": 0.5, "grad_norm": 1.6990428584893351, "learning_rate": 5.3079749002012345e-06, "loss": 0.8689, "step": 4614 }, { "epoch": 0.5, "grad_norm": 1.8658343120710026, "learning_rate": 5.306237114173546e-06, "loss": 0.8778, "step": 4615 }, { "epoch": 0.5, "grad_norm": 1.6350843307691532, "learning_rate": 5.3044992910135515e-06, "loss": 0.8677, "step": 4616 }, { "epoch": 0.5, "grad_norm": 1.7249692060686297, "learning_rate": 5.302761430931969e-06, "loss": 0.8798, "step": 4617 }, { "epoch": 0.5, "grad_norm": 1.825229117102335, "learning_rate": 5.301023534139516e-06, "loss": 0.8657, "step": 4618 }, { "epoch": 0.5, "grad_norm": 1.6677073739588832, "learning_rate": 5.299285600846926e-06, "loss": 0.8882, "step": 4619 }, { "epoch": 0.5, "grad_norm": 1.8222687665777388, "learning_rate": 5.297547631264923e-06, "loss": 0.9094, "step": 4620 }, { "epoch": 0.5, "grad_norm": 1.7832214996826337, "learning_rate": 5.2958096256042435e-06, "loss": 0.9459, "step": 4621 }, { "epoch": 0.5, "grad_norm": 0.8134629872960856, "learning_rate": 5.294071584075628e-06, "loss": 1.0521, "step": 4622 }, { "epoch": 0.5, "grad_norm": 1.9240228333849383, "learning_rate": 5.2923335068898195e-06, "loss": 0.9827, "step": 4623 }, { "epoch": 0.5, "grad_norm": 1.7323451618256709, "learning_rate": 5.290595394257564e-06, "loss": 0.8631, "step": 4624 }, { "epoch": 0.5, "grad_norm": 1.7335165381453945, "learning_rate": 5.288857246389617e-06, "loss": 0.8743, "step": 4625 }, { "epoch": 0.5, "grad_norm": 1.6816715027045794, "learning_rate": 5.287119063496731e-06, "loss": 0.8159, "step": 4626 }, { "epoch": 0.5, "grad_norm": 1.7455824876020585, "learning_rate": 5.2853808457896695e-06, "loss": 0.8407, "step": 4627 }, { "epoch": 0.5, "grad_norm": 1.8105196590728867, "learning_rate": 5.2836425934791965e-06, "loss": 0.9235, "step": 4628 }, { "epoch": 0.5, "grad_norm": 1.6026037352297327, "learning_rate": 5.2819043067760824e-06, "loss": 0.9288, "step": 4629 }, { "epoch": 0.5, "grad_norm": 1.7512980273128977, "learning_rate": 5.280165985891098e-06, "loss": 0.926, "step": 4630 }, { "epoch": 0.5, "grad_norm": 1.672702842411126, "learning_rate": 5.278427631035022e-06, "loss": 0.8206, "step": 4631 }, { "epoch": 0.5, "grad_norm": 1.7513664316094502, "learning_rate": 5.276689242418635e-06, "loss": 0.8668, "step": 4632 }, { "epoch": 0.5, "grad_norm": 1.7326789867438002, "learning_rate": 5.274950820252725e-06, "loss": 0.8822, "step": 4633 }, { "epoch": 0.5, "grad_norm": 1.8908274588421048, "learning_rate": 5.27321236474808e-06, "loss": 0.9884, "step": 4634 }, { "epoch": 0.5, "grad_norm": 1.6917190503349, "learning_rate": 5.271473876115494e-06, "loss": 0.832, "step": 4635 }, { "epoch": 0.5, "grad_norm": 1.7061425485270514, "learning_rate": 5.269735354565764e-06, "loss": 0.8352, "step": 4636 }, { "epoch": 0.5, "grad_norm": 1.8046833829344417, "learning_rate": 5.267996800309693e-06, "loss": 0.892, "step": 4637 }, { "epoch": 0.5, "grad_norm": 1.879725768424691, "learning_rate": 5.266258213558084e-06, "loss": 0.957, "step": 4638 }, { "epoch": 0.5, "grad_norm": 1.6466577504153728, "learning_rate": 5.264519594521751e-06, "loss": 0.8427, "step": 4639 }, { "epoch": 0.5, "grad_norm": 1.715273852841163, "learning_rate": 5.262780943411504e-06, "loss": 0.874, "step": 4640 }, { "epoch": 0.5, "grad_norm": 1.902334733830288, "learning_rate": 5.261042260438163e-06, "loss": 0.8975, "step": 4641 }, { "epoch": 0.5, "grad_norm": 1.8035319549729754, "learning_rate": 5.259303545812546e-06, "loss": 0.8855, "step": 4642 }, { "epoch": 0.5, "grad_norm": 1.7085984408582833, "learning_rate": 5.257564799745481e-06, "loss": 1.0221, "step": 4643 }, { "epoch": 0.5, "grad_norm": 1.6900538493223507, "learning_rate": 5.255826022447796e-06, "loss": 0.9411, "step": 4644 }, { "epoch": 0.5, "grad_norm": 1.8055883979717955, "learning_rate": 5.254087214130324e-06, "loss": 0.9634, "step": 4645 }, { "epoch": 0.5, "grad_norm": 1.7806593542176847, "learning_rate": 5.252348375003902e-06, "loss": 0.909, "step": 4646 }, { "epoch": 0.5, "grad_norm": 1.7534707635130935, "learning_rate": 5.2506095052793695e-06, "loss": 0.8806, "step": 4647 }, { "epoch": 0.5, "grad_norm": 1.7935282121799816, "learning_rate": 5.24887060516757e-06, "loss": 0.868, "step": 4648 }, { "epoch": 0.5, "grad_norm": 0.8346726882323847, "learning_rate": 5.24713167487935e-06, "loss": 1.0241, "step": 4649 }, { "epoch": 0.5, "grad_norm": 1.7183385991024804, "learning_rate": 5.2453927146255646e-06, "loss": 0.9566, "step": 4650 }, { "epoch": 0.5, "grad_norm": 1.7032994750572283, "learning_rate": 5.2436537246170675e-06, "loss": 0.8738, "step": 4651 }, { "epoch": 0.5, "grad_norm": 1.7741417968228432, "learning_rate": 5.241914705064713e-06, "loss": 0.9429, "step": 4652 }, { "epoch": 0.5, "grad_norm": 0.8091718569413904, "learning_rate": 5.2401756561793686e-06, "loss": 1.0352, "step": 4653 }, { "epoch": 0.5, "grad_norm": 1.5473360986156506, "learning_rate": 5.238436578171899e-06, "loss": 0.8129, "step": 4654 }, { "epoch": 0.5, "grad_norm": 1.7037449023925755, "learning_rate": 5.236697471253168e-06, "loss": 0.9233, "step": 4655 }, { "epoch": 0.5, "grad_norm": 0.8282079654270955, "learning_rate": 5.2349583356340575e-06, "loss": 1.0523, "step": 4656 }, { "epoch": 0.5, "grad_norm": 1.7171287861296636, "learning_rate": 5.233219171525435e-06, "loss": 0.9306, "step": 4657 }, { "epoch": 0.5, "grad_norm": 1.6948430386018638, "learning_rate": 5.231479979138187e-06, "loss": 0.8593, "step": 4658 }, { "epoch": 0.5, "grad_norm": 1.679841879965499, "learning_rate": 5.229740758683191e-06, "loss": 0.8747, "step": 4659 }, { "epoch": 0.5, "grad_norm": 1.684122506833001, "learning_rate": 5.228001510371336e-06, "loss": 0.9217, "step": 4660 }, { "epoch": 0.5, "grad_norm": 1.6755217982249582, "learning_rate": 5.226262234413514e-06, "loss": 0.8043, "step": 4661 }, { "epoch": 0.5, "grad_norm": 1.8689141853034956, "learning_rate": 5.224522931020616e-06, "loss": 0.7967, "step": 4662 }, { "epoch": 0.5, "grad_norm": 1.7211060850798299, "learning_rate": 5.222783600403536e-06, "loss": 1.0156, "step": 4663 }, { "epoch": 0.5, "grad_norm": 1.6723564027846356, "learning_rate": 5.2210442427731774e-06, "loss": 0.9582, "step": 4664 }, { "epoch": 0.5, "grad_norm": 1.6867185025040994, "learning_rate": 5.2193048583404435e-06, "loss": 0.8332, "step": 4665 }, { "epoch": 0.5, "grad_norm": 1.6855970312046424, "learning_rate": 5.217565447316238e-06, "loss": 0.8853, "step": 4666 }, { "epoch": 0.5, "grad_norm": 1.7247923440963406, "learning_rate": 5.215826009911474e-06, "loss": 0.9259, "step": 4667 }, { "epoch": 0.5, "grad_norm": 1.865404951427475, "learning_rate": 5.214086546337061e-06, "loss": 0.9305, "step": 4668 }, { "epoch": 0.5, "grad_norm": 1.7124016475459252, "learning_rate": 5.212347056803916e-06, "loss": 0.9411, "step": 4669 }, { "epoch": 0.5, "grad_norm": 1.6782666019229384, "learning_rate": 5.210607541522959e-06, "loss": 0.9349, "step": 4670 }, { "epoch": 0.5, "grad_norm": 1.6806949785879604, "learning_rate": 5.208868000705109e-06, "loss": 0.9263, "step": 4671 }, { "epoch": 0.5, "grad_norm": 1.673224801467606, "learning_rate": 5.2071284345612975e-06, "loss": 0.8365, "step": 4672 }, { "epoch": 0.5, "grad_norm": 1.8106465989183727, "learning_rate": 5.205388843302447e-06, "loss": 0.9023, "step": 4673 }, { "epoch": 0.5, "grad_norm": 1.659037687953835, "learning_rate": 5.2036492271394915e-06, "loss": 0.8482, "step": 4674 }, { "epoch": 0.5, "grad_norm": 1.8107280254431233, "learning_rate": 5.201909586283365e-06, "loss": 0.8612, "step": 4675 }, { "epoch": 0.5, "grad_norm": 1.6390588979871388, "learning_rate": 5.200169920945005e-06, "loss": 0.8622, "step": 4676 }, { "epoch": 0.5, "grad_norm": 1.7561413646389288, "learning_rate": 5.198430231335353e-06, "loss": 0.9812, "step": 4677 }, { "epoch": 0.5, "grad_norm": 1.7206455174387931, "learning_rate": 5.19669051766535e-06, "loss": 0.8775, "step": 4678 }, { "epoch": 0.5, "grad_norm": 1.664345175163039, "learning_rate": 5.1949507801459455e-06, "loss": 0.8536, "step": 4679 }, { "epoch": 0.5, "grad_norm": 1.685887386860053, "learning_rate": 5.193211018988085e-06, "loss": 0.8213, "step": 4680 }, { "epoch": 0.5, "grad_norm": 1.7387999861424646, "learning_rate": 5.191471234402723e-06, "loss": 0.9391, "step": 4681 }, { "epoch": 0.5, "grad_norm": 1.822313074342098, "learning_rate": 5.189731426600814e-06, "loss": 0.8715, "step": 4682 }, { "epoch": 0.5, "grad_norm": 1.7007484205570726, "learning_rate": 5.187991595793314e-06, "loss": 0.799, "step": 4683 }, { "epoch": 0.5, "grad_norm": 1.7467479723325923, "learning_rate": 5.1862517421911875e-06, "loss": 0.8908, "step": 4684 }, { "epoch": 0.5, "grad_norm": 1.6794355854376535, "learning_rate": 5.184511866005393e-06, "loss": 0.8693, "step": 4685 }, { "epoch": 0.5, "grad_norm": 1.6369118875533104, "learning_rate": 5.182771967446899e-06, "loss": 0.9464, "step": 4686 }, { "epoch": 0.5, "grad_norm": 1.722970626712947, "learning_rate": 5.181032046726674e-06, "loss": 0.9361, "step": 4687 }, { "epoch": 0.5, "grad_norm": 1.7125789688149462, "learning_rate": 5.17929210405569e-06, "loss": 0.9724, "step": 4688 }, { "epoch": 0.5, "grad_norm": 1.6530629707486482, "learning_rate": 5.177552139644919e-06, "loss": 0.9455, "step": 4689 }, { "epoch": 0.5, "grad_norm": 1.685861485122584, "learning_rate": 5.17581215370534e-06, "loss": 0.9516, "step": 4690 }, { "epoch": 0.5, "grad_norm": 1.692791703795029, "learning_rate": 5.17407214644793e-06, "loss": 0.8825, "step": 4691 }, { "epoch": 0.5, "grad_norm": 1.7719263972228192, "learning_rate": 5.172332118083673e-06, "loss": 0.9329, "step": 4692 }, { "epoch": 0.5, "grad_norm": 1.7086818876422143, "learning_rate": 5.170592068823554e-06, "loss": 0.939, "step": 4693 }, { "epoch": 0.5, "grad_norm": 1.6999018699786816, "learning_rate": 5.168851998878556e-06, "loss": 0.8727, "step": 4694 }, { "epoch": 0.5, "grad_norm": 1.676477408944589, "learning_rate": 5.1671119084596725e-06, "loss": 0.9673, "step": 4695 }, { "epoch": 0.5, "grad_norm": 0.8857400581083027, "learning_rate": 5.165371797777894e-06, "loss": 1.068, "step": 4696 }, { "epoch": 0.5, "grad_norm": 1.7767791450218338, "learning_rate": 5.163631667044213e-06, "loss": 0.8858, "step": 4697 }, { "epoch": 0.51, "grad_norm": 1.6537987324388463, "learning_rate": 5.1618915164696304e-06, "loss": 0.8885, "step": 4698 }, { "epoch": 0.51, "grad_norm": 1.7750384008407645, "learning_rate": 5.160151346265143e-06, "loss": 0.867, "step": 4699 }, { "epoch": 0.51, "grad_norm": 1.6805440214214908, "learning_rate": 5.158411156641752e-06, "loss": 0.8823, "step": 4700 }, { "epoch": 0.51, "grad_norm": 1.650033070846011, "learning_rate": 5.156670947810463e-06, "loss": 0.8647, "step": 4701 }, { "epoch": 0.51, "grad_norm": 1.8096955169928213, "learning_rate": 5.154930719982281e-06, "loss": 0.9324, "step": 4702 }, { "epoch": 0.51, "grad_norm": 1.698529259645741, "learning_rate": 5.153190473368213e-06, "loss": 0.934, "step": 4703 }, { "epoch": 0.51, "grad_norm": 1.6363590350479256, "learning_rate": 5.151450208179276e-06, "loss": 0.7885, "step": 4704 }, { "epoch": 0.51, "grad_norm": 1.799676340877873, "learning_rate": 5.149709924626476e-06, "loss": 0.8338, "step": 4705 }, { "epoch": 0.51, "grad_norm": 1.7272070122381717, "learning_rate": 5.147969622920833e-06, "loss": 0.8736, "step": 4706 }, { "epoch": 0.51, "grad_norm": 1.7504478431687218, "learning_rate": 5.146229303273364e-06, "loss": 0.9128, "step": 4707 }, { "epoch": 0.51, "grad_norm": 1.7287306899698407, "learning_rate": 5.144488965895084e-06, "loss": 0.9403, "step": 4708 }, { "epoch": 0.51, "grad_norm": 1.900063501428234, "learning_rate": 5.142748610997024e-06, "loss": 0.8998, "step": 4709 }, { "epoch": 0.51, "grad_norm": 1.6854663458028285, "learning_rate": 5.1410082387902e-06, "loss": 0.8287, "step": 4710 }, { "epoch": 0.51, "grad_norm": 1.6510741504136797, "learning_rate": 5.139267849485639e-06, "loss": 0.8786, "step": 4711 }, { "epoch": 0.51, "grad_norm": 1.8432290695556817, "learning_rate": 5.1375274432943745e-06, "loss": 1.0081, "step": 4712 }, { "epoch": 0.51, "grad_norm": 1.685499381294145, "learning_rate": 5.135787020427431e-06, "loss": 0.8827, "step": 4713 }, { "epoch": 0.51, "grad_norm": 1.7189958328087633, "learning_rate": 5.134046581095844e-06, "loss": 0.8548, "step": 4714 }, { "epoch": 0.51, "grad_norm": 1.7616004313609683, "learning_rate": 5.132306125510649e-06, "loss": 0.807, "step": 4715 }, { "epoch": 0.51, "grad_norm": 1.7868691143168831, "learning_rate": 5.130565653882877e-06, "loss": 0.9133, "step": 4716 }, { "epoch": 0.51, "grad_norm": 1.7772789080039024, "learning_rate": 5.12882516642357e-06, "loss": 0.9395, "step": 4717 }, { "epoch": 0.51, "grad_norm": 1.7178137006406249, "learning_rate": 5.127084663343769e-06, "loss": 0.9299, "step": 4718 }, { "epoch": 0.51, "grad_norm": 1.6934601053346077, "learning_rate": 5.125344144854514e-06, "loss": 0.8726, "step": 4719 }, { "epoch": 0.51, "grad_norm": 1.7345311422523366, "learning_rate": 5.123603611166849e-06, "loss": 0.8394, "step": 4720 }, { "epoch": 0.51, "grad_norm": 0.8912498098835481, "learning_rate": 5.121863062491819e-06, "loss": 1.097, "step": 4721 }, { "epoch": 0.51, "grad_norm": 1.7426498045054293, "learning_rate": 5.120122499040473e-06, "loss": 0.8008, "step": 4722 }, { "epoch": 0.51, "grad_norm": 1.9433080814967136, "learning_rate": 5.118381921023859e-06, "loss": 0.9114, "step": 4723 }, { "epoch": 0.51, "grad_norm": 1.7814654948141198, "learning_rate": 5.116641328653031e-06, "loss": 0.9349, "step": 4724 }, { "epoch": 0.51, "grad_norm": 0.8476925083307308, "learning_rate": 5.1149007221390386e-06, "loss": 1.0228, "step": 4725 }, { "epoch": 0.51, "grad_norm": 1.710626564060376, "learning_rate": 5.113160101692939e-06, "loss": 0.9077, "step": 4726 }, { "epoch": 0.51, "grad_norm": 1.6932430183902245, "learning_rate": 5.111419467525786e-06, "loss": 0.9194, "step": 4727 }, { "epoch": 0.51, "grad_norm": 1.7425876796972077, "learning_rate": 5.109678819848637e-06, "loss": 0.8282, "step": 4728 }, { "epoch": 0.51, "grad_norm": 1.7239156448404551, "learning_rate": 5.1079381588725544e-06, "loss": 0.934, "step": 4729 }, { "epoch": 0.51, "grad_norm": 1.7197050816190993, "learning_rate": 5.106197484808598e-06, "loss": 0.9461, "step": 4730 }, { "epoch": 0.51, "grad_norm": 1.6989920769219904, "learning_rate": 5.104456797867831e-06, "loss": 0.9028, "step": 4731 }, { "epoch": 0.51, "grad_norm": 1.8248161967967698, "learning_rate": 5.102716098261316e-06, "loss": 0.9419, "step": 4732 }, { "epoch": 0.51, "grad_norm": 1.819220088292649, "learning_rate": 5.10097538620012e-06, "loss": 1.0306, "step": 4733 }, { "epoch": 0.51, "grad_norm": 1.7244734188930948, "learning_rate": 5.09923466189531e-06, "loss": 1.0134, "step": 4734 }, { "epoch": 0.51, "grad_norm": 1.7259640537279293, "learning_rate": 5.097493925557956e-06, "loss": 0.8928, "step": 4735 }, { "epoch": 0.51, "grad_norm": 1.7209419001011022, "learning_rate": 5.095753177399127e-06, "loss": 0.9136, "step": 4736 }, { "epoch": 0.51, "grad_norm": 1.7071871727002306, "learning_rate": 5.0940124176298955e-06, "loss": 0.8989, "step": 4737 }, { "epoch": 0.51, "grad_norm": 1.8070499432221345, "learning_rate": 5.092271646461334e-06, "loss": 0.9953, "step": 4738 }, { "epoch": 0.51, "grad_norm": 1.7517178080971165, "learning_rate": 5.090530864104517e-06, "loss": 0.9309, "step": 4739 }, { "epoch": 0.51, "grad_norm": 1.6154290679092247, "learning_rate": 5.0887900707705205e-06, "loss": 0.9123, "step": 4740 }, { "epoch": 0.51, "grad_norm": 1.634135685952796, "learning_rate": 5.087049266670424e-06, "loss": 0.9218, "step": 4741 }, { "epoch": 0.51, "grad_norm": 1.7362638849291596, "learning_rate": 5.085308452015301e-06, "loss": 0.8782, "step": 4742 }, { "epoch": 0.51, "grad_norm": 1.653567802905575, "learning_rate": 5.0835676270162346e-06, "loss": 0.9128, "step": 4743 }, { "epoch": 0.51, "grad_norm": 1.787941006730624, "learning_rate": 5.081826791884307e-06, "loss": 0.8422, "step": 4744 }, { "epoch": 0.51, "grad_norm": 1.6975393713476306, "learning_rate": 5.080085946830597e-06, "loss": 0.8667, "step": 4745 }, { "epoch": 0.51, "grad_norm": 1.7709590275934095, "learning_rate": 5.078345092066191e-06, "loss": 0.9821, "step": 4746 }, { "epoch": 0.51, "grad_norm": 1.6851676586736077, "learning_rate": 5.076604227802172e-06, "loss": 0.8617, "step": 4747 }, { "epoch": 0.51, "grad_norm": 1.6780966903850907, "learning_rate": 5.074863354249625e-06, "loss": 0.9429, "step": 4748 }, { "epoch": 0.51, "grad_norm": 1.7694564947855016, "learning_rate": 5.073122471619641e-06, "loss": 0.9326, "step": 4749 }, { "epoch": 0.51, "grad_norm": 0.9192236529921621, "learning_rate": 5.0713815801233025e-06, "loss": 1.0806, "step": 4750 }, { "epoch": 0.51, "grad_norm": 1.8094256937574864, "learning_rate": 5.069640679971702e-06, "loss": 0.9047, "step": 4751 }, { "epoch": 0.51, "grad_norm": 1.617099179600719, "learning_rate": 5.0678997713759305e-06, "loss": 0.9366, "step": 4752 }, { "epoch": 0.51, "grad_norm": 1.6585948603398726, "learning_rate": 5.066158854547076e-06, "loss": 0.9135, "step": 4753 }, { "epoch": 0.51, "grad_norm": 1.8328440620211266, "learning_rate": 5.064417929696233e-06, "loss": 0.9406, "step": 4754 }, { "epoch": 0.51, "grad_norm": 1.7884813054603999, "learning_rate": 5.062676997034494e-06, "loss": 0.9033, "step": 4755 }, { "epoch": 0.51, "grad_norm": 1.7596241395064007, "learning_rate": 5.060936056772952e-06, "loss": 0.8833, "step": 4756 }, { "epoch": 0.51, "grad_norm": 1.7707263034468876, "learning_rate": 5.059195109122705e-06, "loss": 0.905, "step": 4757 }, { "epoch": 0.51, "grad_norm": 1.8458093726400087, "learning_rate": 5.057454154294846e-06, "loss": 0.8706, "step": 4758 }, { "epoch": 0.51, "grad_norm": 1.6759211096121591, "learning_rate": 5.055713192500472e-06, "loss": 0.932, "step": 4759 }, { "epoch": 0.51, "grad_norm": 1.6801193331167659, "learning_rate": 5.053972223950682e-06, "loss": 0.9218, "step": 4760 }, { "epoch": 0.51, "grad_norm": 1.6539917691599473, "learning_rate": 5.052231248856574e-06, "loss": 0.885, "step": 4761 }, { "epoch": 0.51, "grad_norm": 1.7045298975200525, "learning_rate": 5.050490267429246e-06, "loss": 0.9031, "step": 4762 }, { "epoch": 0.51, "grad_norm": 1.7972579993698374, "learning_rate": 5.0487492798798e-06, "loss": 0.9407, "step": 4763 }, { "epoch": 0.51, "grad_norm": 1.9486738941212485, "learning_rate": 5.047008286419336e-06, "loss": 0.8981, "step": 4764 }, { "epoch": 0.51, "grad_norm": 1.6839135486027523, "learning_rate": 5.0452672872589535e-06, "loss": 0.9293, "step": 4765 }, { "epoch": 0.51, "grad_norm": 1.769080031595128, "learning_rate": 5.043526282609757e-06, "loss": 0.9376, "step": 4766 }, { "epoch": 0.51, "grad_norm": 1.6648809732260106, "learning_rate": 5.04178527268285e-06, "loss": 0.8499, "step": 4767 }, { "epoch": 0.51, "grad_norm": 0.9298363149288023, "learning_rate": 5.040044257689334e-06, "loss": 1.0565, "step": 4768 }, { "epoch": 0.51, "grad_norm": 1.8062610957216028, "learning_rate": 5.038303237840314e-06, "loss": 0.9375, "step": 4769 }, { "epoch": 0.51, "grad_norm": 1.6786032960274666, "learning_rate": 5.036562213346892e-06, "loss": 0.8815, "step": 4770 }, { "epoch": 0.51, "grad_norm": 1.7761196458151665, "learning_rate": 5.0348211844201786e-06, "loss": 0.9342, "step": 4771 }, { "epoch": 0.51, "grad_norm": 1.6903762677850032, "learning_rate": 5.033080151271276e-06, "loss": 0.9238, "step": 4772 }, { "epoch": 0.51, "grad_norm": 1.745540257783605, "learning_rate": 5.03133911411129e-06, "loss": 0.9402, "step": 4773 }, { "epoch": 0.51, "grad_norm": 1.6773748049526107, "learning_rate": 5.029598073151329e-06, "loss": 0.8583, "step": 4774 }, { "epoch": 0.51, "grad_norm": 1.8351588029202397, "learning_rate": 5.0278570286025e-06, "loss": 0.9198, "step": 4775 }, { "epoch": 0.51, "grad_norm": 1.822199635499785, "learning_rate": 5.0261159806759086e-06, "loss": 0.972, "step": 4776 }, { "epoch": 0.51, "grad_norm": 1.7550983007511867, "learning_rate": 5.024374929582665e-06, "loss": 0.8906, "step": 4777 }, { "epoch": 0.51, "grad_norm": 1.6938467824753478, "learning_rate": 5.022633875533879e-06, "loss": 0.9406, "step": 4778 }, { "epoch": 0.51, "grad_norm": 1.7262467488750828, "learning_rate": 5.020892818740655e-06, "loss": 0.9384, "step": 4779 }, { "epoch": 0.51, "grad_norm": 1.702656062676979, "learning_rate": 5.019151759414107e-06, "loss": 0.9074, "step": 4780 }, { "epoch": 0.51, "grad_norm": 1.796300295115342, "learning_rate": 5.0174106977653415e-06, "loss": 0.9345, "step": 4781 }, { "epoch": 0.51, "grad_norm": 1.8993377898522714, "learning_rate": 5.015669634005467e-06, "loss": 0.9306, "step": 4782 }, { "epoch": 0.51, "grad_norm": 1.6354457280224923, "learning_rate": 5.013928568345598e-06, "loss": 0.8325, "step": 4783 }, { "epoch": 0.51, "grad_norm": 0.9218539352515713, "learning_rate": 5.01218750099684e-06, "loss": 1.0467, "step": 4784 }, { "epoch": 0.51, "grad_norm": 1.6287606711371216, "learning_rate": 5.010446432170306e-06, "loss": 0.9311, "step": 4785 }, { "epoch": 0.51, "grad_norm": 1.7029420064127943, "learning_rate": 5.008705362077108e-06, "loss": 0.919, "step": 4786 }, { "epoch": 0.51, "grad_norm": 1.7368254245738373, "learning_rate": 5.006964290928351e-06, "loss": 0.8858, "step": 4787 }, { "epoch": 0.51, "grad_norm": 1.6024921740491467, "learning_rate": 5.0052232189351524e-06, "loss": 0.9305, "step": 4788 }, { "epoch": 0.51, "grad_norm": 1.6385307225849608, "learning_rate": 5.003482146308621e-06, "loss": 0.8846, "step": 4789 }, { "epoch": 0.51, "grad_norm": 1.626391623580546, "learning_rate": 5.001741073259866e-06, "loss": 0.9018, "step": 4790 }, { "epoch": 0.52, "grad_norm": 1.7115494651105547, "learning_rate": 5e-06, "loss": 0.8457, "step": 4791 }, { "epoch": 0.52, "grad_norm": 1.7034067908295565, "learning_rate": 4.998258926740135e-06, "loss": 0.8892, "step": 4792 }, { "epoch": 0.52, "grad_norm": 1.7617499344892245, "learning_rate": 4.99651785369138e-06, "loss": 0.9169, "step": 4793 }, { "epoch": 0.52, "grad_norm": 1.6863659439187473, "learning_rate": 4.9947767810648475e-06, "loss": 0.8515, "step": 4794 }, { "epoch": 0.52, "grad_norm": 1.7052613127497251, "learning_rate": 4.993035709071649e-06, "loss": 0.895, "step": 4795 }, { "epoch": 0.52, "grad_norm": 1.7535963883285903, "learning_rate": 4.991294637922893e-06, "loss": 0.9175, "step": 4796 }, { "epoch": 0.52, "grad_norm": 1.7451774280943328, "learning_rate": 4.989553567829696e-06, "loss": 0.9251, "step": 4797 }, { "epoch": 0.52, "grad_norm": 1.7221937410329757, "learning_rate": 4.987812499003162e-06, "loss": 0.8727, "step": 4798 }, { "epoch": 0.52, "grad_norm": 1.7688201644805153, "learning_rate": 4.986071431654405e-06, "loss": 0.852, "step": 4799 }, { "epoch": 0.52, "grad_norm": 1.7273363863419482, "learning_rate": 4.984330365994535e-06, "loss": 0.9188, "step": 4800 }, { "epoch": 0.52, "grad_norm": 1.8887239502477813, "learning_rate": 4.98258930223466e-06, "loss": 0.858, "step": 4801 }, { "epoch": 0.52, "grad_norm": 1.9072582683342727, "learning_rate": 4.980848240585895e-06, "loss": 0.8942, "step": 4802 }, { "epoch": 0.52, "grad_norm": 1.655868837620404, "learning_rate": 4.979107181259346e-06, "loss": 0.9452, "step": 4803 }, { "epoch": 0.52, "grad_norm": 1.6628130420812606, "learning_rate": 4.977366124466122e-06, "loss": 0.9178, "step": 4804 }, { "epoch": 0.52, "grad_norm": 1.7746133207574712, "learning_rate": 4.975625070417336e-06, "loss": 0.9363, "step": 4805 }, { "epoch": 0.52, "grad_norm": 1.7922534236053538, "learning_rate": 4.973884019324092e-06, "loss": 0.8927, "step": 4806 }, { "epoch": 0.52, "grad_norm": 1.6460441713949103, "learning_rate": 4.972142971397504e-06, "loss": 0.8145, "step": 4807 }, { "epoch": 0.52, "grad_norm": 0.917648631524272, "learning_rate": 4.970401926848674e-06, "loss": 1.0628, "step": 4808 }, { "epoch": 0.52, "grad_norm": 1.708967594211526, "learning_rate": 4.968660885888711e-06, "loss": 0.9043, "step": 4809 }, { "epoch": 0.52, "grad_norm": 1.730400383382931, "learning_rate": 4.966919848728726e-06, "loss": 0.8849, "step": 4810 }, { "epoch": 0.52, "grad_norm": 1.6496946654061142, "learning_rate": 4.965178815579823e-06, "loss": 0.8932, "step": 4811 }, { "epoch": 0.52, "grad_norm": 0.8227242241555114, "learning_rate": 4.963437786653108e-06, "loss": 1.0611, "step": 4812 }, { "epoch": 0.52, "grad_norm": 1.7230147049972075, "learning_rate": 4.961696762159688e-06, "loss": 0.9324, "step": 4813 }, { "epoch": 0.52, "grad_norm": 1.7401100799768645, "learning_rate": 4.959955742310668e-06, "loss": 0.86, "step": 4814 }, { "epoch": 0.52, "grad_norm": 1.6954359506760037, "learning_rate": 4.958214727317152e-06, "loss": 1.0114, "step": 4815 }, { "epoch": 0.52, "grad_norm": 1.7431264884551454, "learning_rate": 4.956473717390243e-06, "loss": 0.9806, "step": 4816 }, { "epoch": 0.52, "grad_norm": 0.8429996612992005, "learning_rate": 4.9547327127410465e-06, "loss": 1.0187, "step": 4817 }, { "epoch": 0.52, "grad_norm": 0.8435627514073136, "learning_rate": 4.952991713580666e-06, "loss": 1.0812, "step": 4818 }, { "epoch": 0.52, "grad_norm": 1.7850147919658506, "learning_rate": 4.951250720120203e-06, "loss": 0.9662, "step": 4819 }, { "epoch": 0.52, "grad_norm": 1.7801700603157813, "learning_rate": 4.949509732570756e-06, "loss": 0.8399, "step": 4820 }, { "epoch": 0.52, "grad_norm": 0.8506915405232696, "learning_rate": 4.947768751143428e-06, "loss": 1.0901, "step": 4821 }, { "epoch": 0.52, "grad_norm": 1.7591150263682396, "learning_rate": 4.94602777604932e-06, "loss": 0.8969, "step": 4822 }, { "epoch": 0.52, "grad_norm": 1.7194445729195134, "learning_rate": 4.9442868074995296e-06, "loss": 0.8947, "step": 4823 }, { "epoch": 0.52, "grad_norm": 1.6439758898773533, "learning_rate": 4.942545845705156e-06, "loss": 0.8358, "step": 4824 }, { "epoch": 0.52, "grad_norm": 1.6857027545425474, "learning_rate": 4.9408048908772965e-06, "loss": 0.8942, "step": 4825 }, { "epoch": 0.52, "grad_norm": 1.6991767035652792, "learning_rate": 4.939063943227049e-06, "loss": 0.863, "step": 4826 }, { "epoch": 0.52, "grad_norm": 1.8350409648325308, "learning_rate": 4.937323002965507e-06, "loss": 0.9225, "step": 4827 }, { "epoch": 0.52, "grad_norm": 1.6774764008375693, "learning_rate": 4.935582070303767e-06, "loss": 0.8259, "step": 4828 }, { "epoch": 0.52, "grad_norm": 1.7192534155454902, "learning_rate": 4.933841145452925e-06, "loss": 0.9671, "step": 4829 }, { "epoch": 0.52, "grad_norm": 1.7724993459495595, "learning_rate": 4.932100228624072e-06, "loss": 0.928, "step": 4830 }, { "epoch": 0.52, "grad_norm": 1.7446309758462493, "learning_rate": 4.9303593200282995e-06, "loss": 0.9082, "step": 4831 }, { "epoch": 0.52, "grad_norm": 1.6614335614089262, "learning_rate": 4.928618419876698e-06, "loss": 0.978, "step": 4832 }, { "epoch": 0.52, "grad_norm": 1.727420708655698, "learning_rate": 4.926877528380362e-06, "loss": 0.9348, "step": 4833 }, { "epoch": 0.52, "grad_norm": 1.6845516467933916, "learning_rate": 4.925136645750377e-06, "loss": 0.8845, "step": 4834 }, { "epoch": 0.52, "grad_norm": 1.7590560227274192, "learning_rate": 4.923395772197829e-06, "loss": 0.9784, "step": 4835 }, { "epoch": 0.52, "grad_norm": 1.705534386820642, "learning_rate": 4.9216549079338105e-06, "loss": 0.9163, "step": 4836 }, { "epoch": 0.52, "grad_norm": 1.7662771158122483, "learning_rate": 4.919914053169404e-06, "loss": 0.8164, "step": 4837 }, { "epoch": 0.52, "grad_norm": 1.6706093813394591, "learning_rate": 4.918173208115695e-06, "loss": 0.9098, "step": 4838 }, { "epoch": 0.52, "grad_norm": 1.6863149987741377, "learning_rate": 4.916432372983768e-06, "loss": 0.9285, "step": 4839 }, { "epoch": 0.52, "grad_norm": 1.8406799270566048, "learning_rate": 4.914691547984701e-06, "loss": 0.982, "step": 4840 }, { "epoch": 0.52, "grad_norm": 0.9635601251887823, "learning_rate": 4.912950733329579e-06, "loss": 1.0319, "step": 4841 }, { "epoch": 0.52, "grad_norm": 1.753867279481688, "learning_rate": 4.91120992922948e-06, "loss": 0.9833, "step": 4842 }, { "epoch": 0.52, "grad_norm": 0.9315705758611811, "learning_rate": 4.909469135895485e-06, "loss": 1.0183, "step": 4843 }, { "epoch": 0.52, "grad_norm": 1.7676605937450491, "learning_rate": 4.9077283535386675e-06, "loss": 0.853, "step": 4844 }, { "epoch": 0.52, "grad_norm": 1.7042795112689766, "learning_rate": 4.905987582370106e-06, "loss": 0.8703, "step": 4845 }, { "epoch": 0.52, "grad_norm": 1.7850443844228427, "learning_rate": 4.904246822600874e-06, "loss": 0.9188, "step": 4846 }, { "epoch": 0.52, "grad_norm": 1.7426662083404159, "learning_rate": 4.902506074442044e-06, "loss": 0.8057, "step": 4847 }, { "epoch": 0.52, "grad_norm": 1.7208155566425132, "learning_rate": 4.900765338104691e-06, "loss": 0.9724, "step": 4848 }, { "epoch": 0.52, "grad_norm": 1.729412391628358, "learning_rate": 4.899024613799881e-06, "loss": 0.8888, "step": 4849 }, { "epoch": 0.52, "grad_norm": 1.7370049990550247, "learning_rate": 4.897283901738687e-06, "loss": 0.9518, "step": 4850 }, { "epoch": 0.52, "grad_norm": 1.6483186413808462, "learning_rate": 4.8955432021321725e-06, "loss": 0.8348, "step": 4851 }, { "epoch": 0.52, "grad_norm": 1.7788386554899722, "learning_rate": 4.893802515191404e-06, "loss": 0.875, "step": 4852 }, { "epoch": 0.52, "grad_norm": 1.7471770332821512, "learning_rate": 4.892061841127446e-06, "loss": 1.0048, "step": 4853 }, { "epoch": 0.52, "grad_norm": 1.665553389328553, "learning_rate": 4.8903211801513645e-06, "loss": 0.8034, "step": 4854 }, { "epoch": 0.52, "grad_norm": 1.0009174307895428, "learning_rate": 4.888580532474216e-06, "loss": 1.0408, "step": 4855 }, { "epoch": 0.52, "grad_norm": 1.7172313399960755, "learning_rate": 4.886839898307062e-06, "loss": 1.0074, "step": 4856 }, { "epoch": 0.52, "grad_norm": 1.914026532487705, "learning_rate": 4.885099277860962e-06, "loss": 0.85, "step": 4857 }, { "epoch": 0.52, "grad_norm": 1.7021606974594914, "learning_rate": 4.883358671346969e-06, "loss": 0.8934, "step": 4858 }, { "epoch": 0.52, "grad_norm": 1.6670179251155857, "learning_rate": 4.88161807897614e-06, "loss": 0.8868, "step": 4859 }, { "epoch": 0.52, "grad_norm": 1.6802884049022515, "learning_rate": 4.879877500959529e-06, "loss": 0.9294, "step": 4860 }, { "epoch": 0.52, "grad_norm": 1.6750226440965872, "learning_rate": 4.878136937508183e-06, "loss": 0.8543, "step": 4861 }, { "epoch": 0.52, "grad_norm": 1.7285709086271681, "learning_rate": 4.876396388833155e-06, "loss": 0.9236, "step": 4862 }, { "epoch": 0.52, "grad_norm": 1.810884848401018, "learning_rate": 4.874655855145488e-06, "loss": 0.983, "step": 4863 }, { "epoch": 0.52, "grad_norm": 1.7688546435412285, "learning_rate": 4.872915336656233e-06, "loss": 0.9332, "step": 4864 }, { "epoch": 0.52, "grad_norm": 1.6929533595809845, "learning_rate": 4.871174833576431e-06, "loss": 0.9325, "step": 4865 }, { "epoch": 0.52, "grad_norm": 1.6924147825152458, "learning_rate": 4.869434346117124e-06, "loss": 0.8823, "step": 4866 }, { "epoch": 0.52, "grad_norm": 1.721282898115799, "learning_rate": 4.867693874489353e-06, "loss": 0.8598, "step": 4867 }, { "epoch": 0.52, "grad_norm": 1.5959027853042433, "learning_rate": 4.865953418904156e-06, "loss": 0.9318, "step": 4868 }, { "epoch": 0.52, "grad_norm": 1.6625208677345755, "learning_rate": 4.864212979572569e-06, "loss": 0.9147, "step": 4869 }, { "epoch": 0.52, "grad_norm": 1.7730603584715678, "learning_rate": 4.862472556705626e-06, "loss": 0.9592, "step": 4870 }, { "epoch": 0.52, "grad_norm": 1.752387856832597, "learning_rate": 4.8607321505143614e-06, "loss": 0.8357, "step": 4871 }, { "epoch": 0.52, "grad_norm": 1.7462785312499836, "learning_rate": 4.858991761209803e-06, "loss": 0.9323, "step": 4872 }, { "epoch": 0.52, "grad_norm": 1.757173767268217, "learning_rate": 4.85725138900298e-06, "loss": 0.9003, "step": 4873 }, { "epoch": 0.52, "grad_norm": 1.81650699236033, "learning_rate": 4.855511034104916e-06, "loss": 0.8852, "step": 4874 }, { "epoch": 0.52, "grad_norm": 2.0143147896129596, "learning_rate": 4.853770696726639e-06, "loss": 0.9466, "step": 4875 }, { "epoch": 0.52, "grad_norm": 1.6719593650470217, "learning_rate": 4.852030377079169e-06, "loss": 0.9374, "step": 4876 }, { "epoch": 0.52, "grad_norm": 0.861831702530439, "learning_rate": 4.850290075373525e-06, "loss": 0.9821, "step": 4877 }, { "epoch": 0.52, "grad_norm": 1.7047592148532476, "learning_rate": 4.848549791820725e-06, "loss": 0.8953, "step": 4878 }, { "epoch": 0.52, "grad_norm": 1.8044570263578867, "learning_rate": 4.846809526631787e-06, "loss": 0.919, "step": 4879 }, { "epoch": 0.52, "grad_norm": 1.7041214123114021, "learning_rate": 4.845069280017721e-06, "loss": 0.9141, "step": 4880 }, { "epoch": 0.52, "grad_norm": 1.740102159523388, "learning_rate": 4.843329052189538e-06, "loss": 0.8715, "step": 4881 }, { "epoch": 0.52, "grad_norm": 1.6555008551474355, "learning_rate": 4.841588843358251e-06, "loss": 0.8648, "step": 4882 }, { "epoch": 0.52, "grad_norm": 1.688443291249926, "learning_rate": 4.839848653734859e-06, "loss": 0.9483, "step": 4883 }, { "epoch": 0.53, "grad_norm": 1.675964683963416, "learning_rate": 4.838108483530371e-06, "loss": 0.9117, "step": 4884 }, { "epoch": 0.53, "grad_norm": 1.7733940112563926, "learning_rate": 4.8363683329557885e-06, "loss": 0.8392, "step": 4885 }, { "epoch": 0.53, "grad_norm": 1.723104068905473, "learning_rate": 4.834628202222107e-06, "loss": 0.879, "step": 4886 }, { "epoch": 0.53, "grad_norm": 1.7076784268290062, "learning_rate": 4.832888091540328e-06, "loss": 0.9032, "step": 4887 }, { "epoch": 0.53, "grad_norm": 1.6965639441495983, "learning_rate": 4.831148001121445e-06, "loss": 0.8856, "step": 4888 }, { "epoch": 0.53, "grad_norm": 1.7562527913751769, "learning_rate": 4.829407931176447e-06, "loss": 0.8882, "step": 4889 }, { "epoch": 0.53, "grad_norm": 1.805934885301784, "learning_rate": 4.827667881916327e-06, "loss": 0.9803, "step": 4890 }, { "epoch": 0.53, "grad_norm": 1.655831784148836, "learning_rate": 4.825927853552071e-06, "loss": 0.8735, "step": 4891 }, { "epoch": 0.53, "grad_norm": 1.7267496587135869, "learning_rate": 4.824187846294662e-06, "loss": 0.8875, "step": 4892 }, { "epoch": 0.53, "grad_norm": 1.6267473470805156, "learning_rate": 4.822447860355083e-06, "loss": 0.9006, "step": 4893 }, { "epoch": 0.53, "grad_norm": 1.6723025215625729, "learning_rate": 4.820707895944312e-06, "loss": 0.8885, "step": 4894 }, { "epoch": 0.53, "grad_norm": 1.6853589333631322, "learning_rate": 4.818967953273327e-06, "loss": 0.9058, "step": 4895 }, { "epoch": 0.53, "grad_norm": 1.7831704948342737, "learning_rate": 4.817228032553103e-06, "loss": 0.9068, "step": 4896 }, { "epoch": 0.53, "grad_norm": 1.6910273452221423, "learning_rate": 4.815488133994608e-06, "loss": 0.8924, "step": 4897 }, { "epoch": 0.53, "grad_norm": 1.7380444877150196, "learning_rate": 4.813748257808814e-06, "loss": 0.8337, "step": 4898 }, { "epoch": 0.53, "grad_norm": 1.765943202911277, "learning_rate": 4.8120084042066865e-06, "loss": 0.9243, "step": 4899 }, { "epoch": 0.53, "grad_norm": 1.710470832650906, "learning_rate": 4.810268573399187e-06, "loss": 0.8523, "step": 4900 }, { "epoch": 0.53, "grad_norm": 1.7174725698469502, "learning_rate": 4.8085287655972775e-06, "loss": 0.9295, "step": 4901 }, { "epoch": 0.53, "grad_norm": 1.940075942342574, "learning_rate": 4.806788981011916e-06, "loss": 0.8977, "step": 4902 }, { "epoch": 0.53, "grad_norm": 0.8416865749041946, "learning_rate": 4.805049219854058e-06, "loss": 1.0693, "step": 4903 }, { "epoch": 0.53, "grad_norm": 1.6555694395362475, "learning_rate": 4.803309482334652e-06, "loss": 0.9145, "step": 4904 }, { "epoch": 0.53, "grad_norm": 1.6922666094176584, "learning_rate": 4.801569768664649e-06, "loss": 0.8611, "step": 4905 }, { "epoch": 0.53, "grad_norm": 1.8759528360746873, "learning_rate": 4.799830079054996e-06, "loss": 0.9331, "step": 4906 }, { "epoch": 0.53, "grad_norm": 1.853245051424717, "learning_rate": 4.798090413716637e-06, "loss": 0.9654, "step": 4907 }, { "epoch": 0.53, "grad_norm": 1.7261593599767453, "learning_rate": 4.796350772860511e-06, "loss": 0.9421, "step": 4908 }, { "epoch": 0.53, "grad_norm": 1.7399779419358394, "learning_rate": 4.7946111566975545e-06, "loss": 1.0012, "step": 4909 }, { "epoch": 0.53, "grad_norm": 1.667344544063853, "learning_rate": 4.792871565438705e-06, "loss": 0.866, "step": 4910 }, { "epoch": 0.53, "grad_norm": 1.7048439457956874, "learning_rate": 4.7911319992948914e-06, "loss": 0.8638, "step": 4911 }, { "epoch": 0.53, "grad_norm": 1.6487921953177145, "learning_rate": 4.789392458477042e-06, "loss": 0.8658, "step": 4912 }, { "epoch": 0.53, "grad_norm": 1.7510075508692577, "learning_rate": 4.787652943196087e-06, "loss": 0.8571, "step": 4913 }, { "epoch": 0.53, "grad_norm": 1.7178197112208113, "learning_rate": 4.785913453662941e-06, "loss": 0.8907, "step": 4914 }, { "epoch": 0.53, "grad_norm": 1.7542090324121329, "learning_rate": 4.784173990088529e-06, "loss": 0.8424, "step": 4915 }, { "epoch": 0.53, "grad_norm": 1.7654776563926982, "learning_rate": 4.782434552683763e-06, "loss": 0.8811, "step": 4916 }, { "epoch": 0.53, "grad_norm": 1.7584989473836496, "learning_rate": 4.780695141659557e-06, "loss": 0.9332, "step": 4917 }, { "epoch": 0.53, "grad_norm": 1.7691730439629336, "learning_rate": 4.778955757226823e-06, "loss": 0.9158, "step": 4918 }, { "epoch": 0.53, "grad_norm": 1.7138177429477168, "learning_rate": 4.777216399596466e-06, "loss": 0.9147, "step": 4919 }, { "epoch": 0.53, "grad_norm": 1.7723941864952462, "learning_rate": 4.775477068979386e-06, "loss": 0.9024, "step": 4920 }, { "epoch": 0.53, "grad_norm": 0.8676529433827483, "learning_rate": 4.773737765586487e-06, "loss": 1.0353, "step": 4921 }, { "epoch": 0.53, "grad_norm": 1.6702229850783805, "learning_rate": 4.771998489628664e-06, "loss": 0.8439, "step": 4922 }, { "epoch": 0.53, "grad_norm": 1.6606929942531037, "learning_rate": 4.7702592413168095e-06, "loss": 0.8956, "step": 4923 }, { "epoch": 0.53, "grad_norm": 1.807287816449294, "learning_rate": 4.768520020861817e-06, "loss": 0.9506, "step": 4924 }, { "epoch": 0.53, "grad_norm": 0.8094032645768986, "learning_rate": 4.766780828474566e-06, "loss": 1.0188, "step": 4925 }, { "epoch": 0.53, "grad_norm": 1.7171466898882788, "learning_rate": 4.765041664365945e-06, "loss": 0.8827, "step": 4926 }, { "epoch": 0.53, "grad_norm": 0.8413143291446661, "learning_rate": 4.763302528746833e-06, "loss": 1.0625, "step": 4927 }, { "epoch": 0.53, "grad_norm": 1.7792857919709386, "learning_rate": 4.761563421828104e-06, "loss": 0.8941, "step": 4928 }, { "epoch": 0.53, "grad_norm": 1.6568057029664993, "learning_rate": 4.759824343820632e-06, "loss": 0.9548, "step": 4929 }, { "epoch": 0.53, "grad_norm": 1.7821034969480372, "learning_rate": 4.758085294935288e-06, "loss": 0.9084, "step": 4930 }, { "epoch": 0.53, "grad_norm": 1.8216061055780877, "learning_rate": 4.756346275382934e-06, "loss": 0.9277, "step": 4931 }, { "epoch": 0.53, "grad_norm": 1.728434840077039, "learning_rate": 4.754607285374435e-06, "loss": 0.9187, "step": 4932 }, { "epoch": 0.53, "grad_norm": 1.6379028606385058, "learning_rate": 4.75286832512065e-06, "loss": 0.8911, "step": 4933 }, { "epoch": 0.53, "grad_norm": 1.773831547618212, "learning_rate": 4.7511293948324325e-06, "loss": 0.937, "step": 4934 }, { "epoch": 0.53, "grad_norm": 1.6832624291615406, "learning_rate": 4.749390494720633e-06, "loss": 0.8854, "step": 4935 }, { "epoch": 0.53, "grad_norm": 1.7084327394601138, "learning_rate": 4.7476516249961e-06, "loss": 0.8857, "step": 4936 }, { "epoch": 0.53, "grad_norm": 0.8848238275333606, "learning_rate": 4.7459127858696765e-06, "loss": 1.0375, "step": 4937 }, { "epoch": 0.53, "grad_norm": 1.7727857331064247, "learning_rate": 4.744173977552205e-06, "loss": 0.9043, "step": 4938 }, { "epoch": 0.53, "grad_norm": 1.7041458754670333, "learning_rate": 4.74243520025452e-06, "loss": 0.9586, "step": 4939 }, { "epoch": 0.53, "grad_norm": 1.6784417678457635, "learning_rate": 4.7406964541874546e-06, "loss": 0.8888, "step": 4940 }, { "epoch": 0.53, "grad_norm": 1.6991211169308387, "learning_rate": 4.7389577395618395e-06, "loss": 0.9102, "step": 4941 }, { "epoch": 0.53, "grad_norm": 2.386887547925124, "learning_rate": 4.737219056588497e-06, "loss": 0.8723, "step": 4942 }, { "epoch": 0.53, "grad_norm": 1.6572630967689768, "learning_rate": 4.735480405478249e-06, "loss": 0.9218, "step": 4943 }, { "epoch": 0.53, "grad_norm": 1.6887765341586247, "learning_rate": 4.733741786441916e-06, "loss": 0.8702, "step": 4944 }, { "epoch": 0.53, "grad_norm": 1.666158809144583, "learning_rate": 4.73200319969031e-06, "loss": 0.9591, "step": 4945 }, { "epoch": 0.53, "grad_norm": 2.093306662233018, "learning_rate": 4.730264645434238e-06, "loss": 0.8523, "step": 4946 }, { "epoch": 0.53, "grad_norm": 1.6553631036949914, "learning_rate": 4.728526123884509e-06, "loss": 0.9287, "step": 4947 }, { "epoch": 0.53, "grad_norm": 1.7872925202747005, "learning_rate": 4.7267876352519215e-06, "loss": 0.9384, "step": 4948 }, { "epoch": 0.53, "grad_norm": 1.745842003773778, "learning_rate": 4.725049179747276e-06, "loss": 0.9495, "step": 4949 }, { "epoch": 0.53, "grad_norm": 1.7608330213275287, "learning_rate": 4.723310757581365e-06, "loss": 0.8772, "step": 4950 }, { "epoch": 0.53, "grad_norm": 0.8632210263611498, "learning_rate": 4.721572368964979e-06, "loss": 1.0656, "step": 4951 }, { "epoch": 0.53, "grad_norm": 1.7111283778911603, "learning_rate": 4.719834014108903e-06, "loss": 0.9473, "step": 4952 }, { "epoch": 0.53, "grad_norm": 1.647769329340415, "learning_rate": 4.718095693223919e-06, "loss": 0.8684, "step": 4953 }, { "epoch": 0.53, "grad_norm": 1.6686118973624922, "learning_rate": 4.7163574065208034e-06, "loss": 0.8148, "step": 4954 }, { "epoch": 0.53, "grad_norm": 1.717168280452624, "learning_rate": 4.7146191542103305e-06, "loss": 0.8644, "step": 4955 }, { "epoch": 0.53, "grad_norm": 1.8039591716309638, "learning_rate": 4.712880936503271e-06, "loss": 0.9153, "step": 4956 }, { "epoch": 0.53, "grad_norm": 1.8243455927199925, "learning_rate": 4.7111427536103856e-06, "loss": 0.9185, "step": 4957 }, { "epoch": 0.53, "grad_norm": 0.8277846328534417, "learning_rate": 4.709404605742438e-06, "loss": 1.0335, "step": 4958 }, { "epoch": 0.53, "grad_norm": 1.7584402968009591, "learning_rate": 4.707666493110182e-06, "loss": 0.9273, "step": 4959 }, { "epoch": 0.53, "grad_norm": 1.724181600053812, "learning_rate": 4.7059284159243725e-06, "loss": 0.8672, "step": 4960 }, { "epoch": 0.53, "grad_norm": 1.6629145513539565, "learning_rate": 4.704190374395757e-06, "loss": 0.9043, "step": 4961 }, { "epoch": 0.53, "grad_norm": 1.7094265380277436, "learning_rate": 4.7024523687350775e-06, "loss": 0.9478, "step": 4962 }, { "epoch": 0.53, "grad_norm": 1.805608639238089, "learning_rate": 4.700714399153075e-06, "loss": 0.8706, "step": 4963 }, { "epoch": 0.53, "grad_norm": 1.7805025011859075, "learning_rate": 4.698976465860484e-06, "loss": 0.8466, "step": 4964 }, { "epoch": 0.53, "grad_norm": 1.7025785117051342, "learning_rate": 4.697238569068033e-06, "loss": 0.9355, "step": 4965 }, { "epoch": 0.53, "grad_norm": 1.6578604041334952, "learning_rate": 4.695500708986451e-06, "loss": 0.7994, "step": 4966 }, { "epoch": 0.53, "grad_norm": 1.6642505181482334, "learning_rate": 4.693762885826456e-06, "loss": 0.9109, "step": 4967 }, { "epoch": 0.53, "grad_norm": 1.8076434816373579, "learning_rate": 4.692025099798767e-06, "loss": 0.867, "step": 4968 }, { "epoch": 0.53, "grad_norm": 1.8817693193863512, "learning_rate": 4.690287351114097e-06, "loss": 0.9833, "step": 4969 }, { "epoch": 0.53, "grad_norm": 0.8551260413633194, "learning_rate": 4.688549639983154e-06, "loss": 1.0675, "step": 4970 }, { "epoch": 0.53, "grad_norm": 1.7052222427364945, "learning_rate": 4.686811966616639e-06, "loss": 0.9821, "step": 4971 }, { "epoch": 0.53, "grad_norm": 1.5530615187398884, "learning_rate": 4.685074331225254e-06, "loss": 0.8332, "step": 4972 }, { "epoch": 0.53, "grad_norm": 1.8035401098222605, "learning_rate": 4.683336734019692e-06, "loss": 0.8522, "step": 4973 }, { "epoch": 0.53, "grad_norm": 1.781242263206731, "learning_rate": 4.681599175210641e-06, "loss": 0.9823, "step": 4974 }, { "epoch": 0.53, "grad_norm": 1.6392703495582157, "learning_rate": 4.67986165500879e-06, "loss": 0.8032, "step": 4975 }, { "epoch": 0.53, "grad_norm": 1.859291988007837, "learning_rate": 4.678124173624816e-06, "loss": 0.8722, "step": 4976 }, { "epoch": 0.54, "grad_norm": 1.9749289010487534, "learning_rate": 4.676386731269398e-06, "loss": 0.934, "step": 4977 }, { "epoch": 0.54, "grad_norm": 1.6749928775437437, "learning_rate": 4.674649328153202e-06, "loss": 0.9057, "step": 4978 }, { "epoch": 0.54, "grad_norm": 1.735243114327548, "learning_rate": 4.672911964486896e-06, "loss": 0.8714, "step": 4979 }, { "epoch": 0.54, "grad_norm": 1.8147422547627723, "learning_rate": 4.671174640481143e-06, "loss": 0.9172, "step": 4980 }, { "epoch": 0.54, "grad_norm": 1.7732669645356542, "learning_rate": 4.669437356346599e-06, "loss": 0.9292, "step": 4981 }, { "epoch": 0.54, "grad_norm": 1.6300231384242516, "learning_rate": 4.6677001122939134e-06, "loss": 0.8704, "step": 4982 }, { "epoch": 0.54, "grad_norm": 1.669247919210026, "learning_rate": 4.665962908533736e-06, "loss": 0.8896, "step": 4983 }, { "epoch": 0.54, "grad_norm": 1.6589565461227995, "learning_rate": 4.664225745276709e-06, "loss": 0.9298, "step": 4984 }, { "epoch": 0.54, "grad_norm": 1.6685161508745137, "learning_rate": 4.662488622733466e-06, "loss": 0.8821, "step": 4985 }, { "epoch": 0.54, "grad_norm": 1.8721036708805519, "learning_rate": 4.660751541114641e-06, "loss": 1.028, "step": 4986 }, { "epoch": 0.54, "grad_norm": 1.6353546693453442, "learning_rate": 4.659014500630863e-06, "loss": 0.8398, "step": 4987 }, { "epoch": 0.54, "grad_norm": 1.742223477508306, "learning_rate": 4.657277501492751e-06, "loss": 0.9114, "step": 4988 }, { "epoch": 0.54, "grad_norm": 1.730970515906384, "learning_rate": 4.655540543910924e-06, "loss": 0.8551, "step": 4989 }, { "epoch": 0.54, "grad_norm": 1.8031535527938525, "learning_rate": 4.653803628095993e-06, "loss": 0.9488, "step": 4990 }, { "epoch": 0.54, "grad_norm": 1.698132700217416, "learning_rate": 4.6520667542585655e-06, "loss": 0.8638, "step": 4991 }, { "epoch": 0.54, "grad_norm": 1.7247778728320484, "learning_rate": 4.650329922609244e-06, "loss": 0.8925, "step": 4992 }, { "epoch": 0.54, "grad_norm": 1.7127755718926827, "learning_rate": 4.648593133358625e-06, "loss": 0.8779, "step": 4993 }, { "epoch": 0.54, "grad_norm": 1.743547321415513, "learning_rate": 4.646856386717299e-06, "loss": 0.8683, "step": 4994 }, { "epoch": 0.54, "grad_norm": 1.6791253428177764, "learning_rate": 4.645119682895855e-06, "loss": 0.9073, "step": 4995 }, { "epoch": 0.54, "grad_norm": 1.7796117744745876, "learning_rate": 4.643383022104871e-06, "loss": 0.8133, "step": 4996 }, { "epoch": 0.54, "grad_norm": 1.7069230659059722, "learning_rate": 4.641646404554927e-06, "loss": 0.8602, "step": 4997 }, { "epoch": 0.54, "grad_norm": 1.8301916717115407, "learning_rate": 4.639909830456593e-06, "loss": 0.9255, "step": 4998 }, { "epoch": 0.54, "grad_norm": 1.7804799526353505, "learning_rate": 4.638173300020431e-06, "loss": 0.8825, "step": 4999 }, { "epoch": 0.54, "grad_norm": 1.6784487326405346, "learning_rate": 4.6364368134570046e-06, "loss": 0.9191, "step": 5000 }, { "epoch": 0.54, "grad_norm": 1.7723975731958774, "learning_rate": 4.634700370976867e-06, "loss": 0.9004, "step": 5001 }, { "epoch": 0.54, "grad_norm": 1.7325358022086093, "learning_rate": 4.63296397279057e-06, "loss": 0.8276, "step": 5002 }, { "epoch": 0.54, "grad_norm": 1.7925479346536586, "learning_rate": 4.631227619108657e-06, "loss": 0.9254, "step": 5003 }, { "epoch": 0.54, "grad_norm": 1.7001621334201689, "learning_rate": 4.629491310141666e-06, "loss": 0.9171, "step": 5004 }, { "epoch": 0.54, "grad_norm": 1.7739891625559263, "learning_rate": 4.62775504610013e-06, "loss": 0.9894, "step": 5005 }, { "epoch": 0.54, "grad_norm": 1.6571003259366335, "learning_rate": 4.626018827194579e-06, "loss": 0.8741, "step": 5006 }, { "epoch": 0.54, "grad_norm": 3.525887234208154, "learning_rate": 4.624282653635534e-06, "loss": 0.8256, "step": 5007 }, { "epoch": 0.54, "grad_norm": 1.7663074789739956, "learning_rate": 4.622546525633512e-06, "loss": 0.9049, "step": 5008 }, { "epoch": 0.54, "grad_norm": 1.805854410033661, "learning_rate": 4.6208104433990285e-06, "loss": 0.8538, "step": 5009 }, { "epoch": 0.54, "grad_norm": 1.711185162345477, "learning_rate": 4.619074407142582e-06, "loss": 0.9004, "step": 5010 }, { "epoch": 0.54, "grad_norm": 1.6145315307261079, "learning_rate": 4.617338417074679e-06, "loss": 0.8928, "step": 5011 }, { "epoch": 0.54, "grad_norm": 0.8348426907860208, "learning_rate": 4.615602473405812e-06, "loss": 1.0594, "step": 5012 }, { "epoch": 0.54, "grad_norm": 1.8929071157458481, "learning_rate": 4.6138665763464685e-06, "loss": 0.9084, "step": 5013 }, { "epoch": 0.54, "grad_norm": 1.6558015470080174, "learning_rate": 4.612130726107135e-06, "loss": 0.8653, "step": 5014 }, { "epoch": 0.54, "grad_norm": 0.7837267933952008, "learning_rate": 4.610394922898289e-06, "loss": 1.0622, "step": 5015 }, { "epoch": 0.54, "grad_norm": 1.7689384364313019, "learning_rate": 4.6086591669303995e-06, "loss": 0.8927, "step": 5016 }, { "epoch": 0.54, "grad_norm": 1.7657955137912844, "learning_rate": 4.6069234584139374e-06, "loss": 0.9039, "step": 5017 }, { "epoch": 0.54, "grad_norm": 1.7075077938714043, "learning_rate": 4.60518779755936e-06, "loss": 0.8896, "step": 5018 }, { "epoch": 0.54, "grad_norm": 1.7203469676313992, "learning_rate": 4.603452184577126e-06, "loss": 0.8656, "step": 5019 }, { "epoch": 0.54, "grad_norm": 1.8109647475423194, "learning_rate": 4.6017166196776795e-06, "loss": 0.9596, "step": 5020 }, { "epoch": 0.54, "grad_norm": 1.7598116607320102, "learning_rate": 4.599981103071465e-06, "loss": 0.9201, "step": 5021 }, { "epoch": 0.54, "grad_norm": 1.9637712383167458, "learning_rate": 4.5982456349689214e-06, "loss": 0.8877, "step": 5022 }, { "epoch": 0.54, "grad_norm": 1.7302577737454343, "learning_rate": 4.59651021558048e-06, "loss": 0.8767, "step": 5023 }, { "epoch": 0.54, "grad_norm": 1.7615645976647443, "learning_rate": 4.594774845116565e-06, "loss": 0.9147, "step": 5024 }, { "epoch": 0.54, "grad_norm": 1.7234266576942547, "learning_rate": 4.5930395237875986e-06, "loss": 0.9438, "step": 5025 }, { "epoch": 0.54, "grad_norm": 1.6561627694704253, "learning_rate": 4.5913042518039925e-06, "loss": 0.9181, "step": 5026 }, { "epoch": 0.54, "grad_norm": 1.7235208764288112, "learning_rate": 4.589569029376153e-06, "loss": 0.9335, "step": 5027 }, { "epoch": 0.54, "grad_norm": 1.7372581847311495, "learning_rate": 4.587833856714486e-06, "loss": 0.8573, "step": 5028 }, { "epoch": 0.54, "grad_norm": 1.7861011274551326, "learning_rate": 4.586098734029384e-06, "loss": 0.8677, "step": 5029 }, { "epoch": 0.54, "grad_norm": 1.753058950019016, "learning_rate": 4.5843636615312395e-06, "loss": 0.8736, "step": 5030 }, { "epoch": 0.54, "grad_norm": 1.6487596252175827, "learning_rate": 4.582628639430432e-06, "loss": 0.8226, "step": 5031 }, { "epoch": 0.54, "grad_norm": 1.926340413471015, "learning_rate": 4.58089366793734e-06, "loss": 0.9699, "step": 5032 }, { "epoch": 0.54, "grad_norm": 0.9302532562829554, "learning_rate": 4.579158747262337e-06, "loss": 1.0428, "step": 5033 }, { "epoch": 0.54, "grad_norm": 1.7067057246929194, "learning_rate": 4.577423877615787e-06, "loss": 0.8932, "step": 5034 }, { "epoch": 0.54, "grad_norm": 1.72422945801201, "learning_rate": 4.575689059208048e-06, "loss": 0.9247, "step": 5035 }, { "epoch": 0.54, "grad_norm": 1.669732234196792, "learning_rate": 4.573954292249472e-06, "loss": 0.9102, "step": 5036 }, { "epoch": 0.54, "grad_norm": 1.6499808331322126, "learning_rate": 4.572219576950408e-06, "loss": 0.9287, "step": 5037 }, { "epoch": 0.54, "grad_norm": 1.7047732945569647, "learning_rate": 4.570484913521196e-06, "loss": 0.9072, "step": 5038 }, { "epoch": 0.54, "grad_norm": 1.7275377215426253, "learning_rate": 4.568750302172168e-06, "loss": 0.9277, "step": 5039 }, { "epoch": 0.54, "grad_norm": 0.8483489524248907, "learning_rate": 4.5670157431136546e-06, "loss": 1.0403, "step": 5040 }, { "epoch": 0.54, "grad_norm": 1.777735679661856, "learning_rate": 4.565281236555973e-06, "loss": 0.8819, "step": 5041 }, { "epoch": 0.54, "grad_norm": 2.039740631661275, "learning_rate": 4.56354678270944e-06, "loss": 0.8272, "step": 5042 }, { "epoch": 0.54, "grad_norm": 1.7523403278141951, "learning_rate": 4.561812381784366e-06, "loss": 0.9625, "step": 5043 }, { "epoch": 0.54, "grad_norm": 1.8388913211727835, "learning_rate": 4.560078033991049e-06, "loss": 0.8953, "step": 5044 }, { "epoch": 0.54, "grad_norm": 1.7789843053556418, "learning_rate": 4.558343739539788e-06, "loss": 0.9269, "step": 5045 }, { "epoch": 0.54, "grad_norm": 1.7654709345697694, "learning_rate": 4.556609498640872e-06, "loss": 0.8727, "step": 5046 }, { "epoch": 0.54, "grad_norm": 1.7818276966848035, "learning_rate": 4.554875311504581e-06, "loss": 0.8723, "step": 5047 }, { "epoch": 0.54, "grad_norm": 1.697932740364964, "learning_rate": 4.553141178341195e-06, "loss": 0.9863, "step": 5048 }, { "epoch": 0.54, "grad_norm": 1.6824305633795558, "learning_rate": 4.551407099360981e-06, "loss": 0.9704, "step": 5049 }, { "epoch": 0.54, "grad_norm": 1.7135854110353326, "learning_rate": 4.549673074774203e-06, "loss": 0.9283, "step": 5050 }, { "epoch": 0.54, "grad_norm": 0.9700543804637186, "learning_rate": 4.547939104791118e-06, "loss": 1.086, "step": 5051 }, { "epoch": 0.54, "grad_norm": 1.7852315824655227, "learning_rate": 4.546205189621974e-06, "loss": 0.9471, "step": 5052 }, { "epoch": 0.54, "grad_norm": 1.859643354853795, "learning_rate": 4.544471329477015e-06, "loss": 0.9284, "step": 5053 }, { "epoch": 0.54, "grad_norm": 1.6878635022987745, "learning_rate": 4.542737524566479e-06, "loss": 0.8686, "step": 5054 }, { "epoch": 0.54, "grad_norm": 1.8612253151187004, "learning_rate": 4.541003775100592e-06, "loss": 0.8968, "step": 5055 }, { "epoch": 0.54, "grad_norm": 1.7984457437776935, "learning_rate": 4.539270081289581e-06, "loss": 0.8612, "step": 5056 }, { "epoch": 0.54, "grad_norm": 1.7529287772734765, "learning_rate": 4.537536443343662e-06, "loss": 0.9172, "step": 5057 }, { "epoch": 0.54, "grad_norm": 0.9105040137339048, "learning_rate": 4.5358028614730424e-06, "loss": 1.057, "step": 5058 }, { "epoch": 0.54, "grad_norm": 1.8573565328816046, "learning_rate": 4.534069335887927e-06, "loss": 0.8773, "step": 5059 }, { "epoch": 0.54, "grad_norm": 1.6647024113720141, "learning_rate": 4.53233586679851e-06, "loss": 0.8925, "step": 5060 }, { "epoch": 0.54, "grad_norm": 1.6437603938106602, "learning_rate": 4.530602454414983e-06, "loss": 0.8836, "step": 5061 }, { "epoch": 0.54, "grad_norm": 1.6790031076641545, "learning_rate": 4.528869098947526e-06, "loss": 0.9063, "step": 5062 }, { "epoch": 0.54, "grad_norm": 1.7009224807102945, "learning_rate": 4.527135800606314e-06, "loss": 0.9241, "step": 5063 }, { "epoch": 0.54, "grad_norm": 1.8088811832890863, "learning_rate": 4.5254025596015175e-06, "loss": 0.937, "step": 5064 }, { "epoch": 0.54, "grad_norm": 1.796524387875531, "learning_rate": 4.523669376143296e-06, "loss": 0.9108, "step": 5065 }, { "epoch": 0.54, "grad_norm": 1.6868482056734595, "learning_rate": 4.5219362504418055e-06, "loss": 0.8425, "step": 5066 }, { "epoch": 0.54, "grad_norm": 1.7925136584320065, "learning_rate": 4.5202031827071915e-06, "loss": 0.8644, "step": 5067 }, { "epoch": 0.54, "grad_norm": 1.8086046454500984, "learning_rate": 4.518470173149597e-06, "loss": 0.8587, "step": 5068 }, { "epoch": 0.54, "grad_norm": 1.7350504706157457, "learning_rate": 4.5167372219791545e-06, "loss": 0.8693, "step": 5069 }, { "epoch": 0.55, "grad_norm": 1.7731682982512782, "learning_rate": 4.515004329405988e-06, "loss": 0.9511, "step": 5070 }, { "epoch": 0.55, "grad_norm": 1.8003066529177185, "learning_rate": 4.51327149564022e-06, "loss": 0.9252, "step": 5071 }, { "epoch": 0.55, "grad_norm": 1.7340718783537605, "learning_rate": 4.511538720891963e-06, "loss": 0.8949, "step": 5072 }, { "epoch": 0.55, "grad_norm": 0.8489959949332184, "learning_rate": 4.509806005371317e-06, "loss": 1.0348, "step": 5073 }, { "epoch": 0.55, "grad_norm": 1.704508161067822, "learning_rate": 4.508073349288384e-06, "loss": 0.9494, "step": 5074 }, { "epoch": 0.55, "grad_norm": 1.6918237325462866, "learning_rate": 4.506340752853253e-06, "loss": 0.9074, "step": 5075 }, { "epoch": 0.55, "grad_norm": 1.8087203927931619, "learning_rate": 4.5046082162760076e-06, "loss": 0.9719, "step": 5076 }, { "epoch": 0.55, "grad_norm": 1.9106539094256585, "learning_rate": 4.502875739766725e-06, "loss": 0.9012, "step": 5077 }, { "epoch": 0.55, "grad_norm": 1.691077191373585, "learning_rate": 4.50114332353547e-06, "loss": 0.9144, "step": 5078 }, { "epoch": 0.55, "grad_norm": 1.6532460182826971, "learning_rate": 4.499410967792309e-06, "loss": 0.8378, "step": 5079 }, { "epoch": 0.55, "grad_norm": 1.7335520151124657, "learning_rate": 4.497678672747294e-06, "loss": 0.884, "step": 5080 }, { "epoch": 0.55, "grad_norm": 1.8326739353913888, "learning_rate": 4.49594643861047e-06, "loss": 0.9138, "step": 5081 }, { "epoch": 0.55, "grad_norm": 1.641661747628651, "learning_rate": 4.49421426559188e-06, "loss": 0.8142, "step": 5082 }, { "epoch": 0.55, "grad_norm": 1.801576561749422, "learning_rate": 4.492482153901554e-06, "loss": 0.9036, "step": 5083 }, { "epoch": 0.55, "grad_norm": 1.8114245631584247, "learning_rate": 4.490750103749516e-06, "loss": 0.9086, "step": 5084 }, { "epoch": 0.55, "grad_norm": 1.79377208638708, "learning_rate": 4.489018115345784e-06, "loss": 0.8882, "step": 5085 }, { "epoch": 0.55, "grad_norm": 0.8734219073654627, "learning_rate": 4.487286188900365e-06, "loss": 1.0784, "step": 5086 }, { "epoch": 0.55, "grad_norm": 1.7494986004574615, "learning_rate": 4.485554324623266e-06, "loss": 0.947, "step": 5087 }, { "epoch": 0.55, "grad_norm": 1.8168018163742186, "learning_rate": 4.483822522724477e-06, "loss": 0.8659, "step": 5088 }, { "epoch": 0.55, "grad_norm": 1.8926026892436059, "learning_rate": 4.4820907834139865e-06, "loss": 0.9076, "step": 5089 }, { "epoch": 0.55, "grad_norm": 0.856481874122231, "learning_rate": 4.480359106901775e-06, "loss": 1.0331, "step": 5090 }, { "epoch": 0.55, "grad_norm": 1.6718827012367594, "learning_rate": 4.478627493397813e-06, "loss": 0.8972, "step": 5091 }, { "epoch": 0.55, "grad_norm": 1.7502436528718952, "learning_rate": 4.476895943112064e-06, "loss": 0.8984, "step": 5092 }, { "epoch": 0.55, "grad_norm": 1.726810174230518, "learning_rate": 4.475164456254488e-06, "loss": 0.8437, "step": 5093 }, { "epoch": 0.55, "grad_norm": 0.8159787995132556, "learning_rate": 4.473433033035028e-06, "loss": 1.0656, "step": 5094 }, { "epoch": 0.55, "grad_norm": 1.790334279965691, "learning_rate": 4.4717016736636295e-06, "loss": 0.9884, "step": 5095 }, { "epoch": 0.55, "grad_norm": 1.7655706367790471, "learning_rate": 4.469970378350225e-06, "loss": 0.9905, "step": 5096 }, { "epoch": 0.55, "grad_norm": 0.7998595206612015, "learning_rate": 4.468239147304737e-06, "loss": 1.0056, "step": 5097 }, { "epoch": 0.55, "grad_norm": 1.6842333121518265, "learning_rate": 4.466507980737087e-06, "loss": 0.8824, "step": 5098 }, { "epoch": 0.55, "grad_norm": 1.6970587530288117, "learning_rate": 4.464776878857184e-06, "loss": 0.9723, "step": 5099 }, { "epoch": 0.55, "grad_norm": 1.6795141726325977, "learning_rate": 4.46304584187493e-06, "loss": 0.8496, "step": 5100 }, { "epoch": 0.55, "grad_norm": 2.1365707729525605, "learning_rate": 4.461314870000218e-06, "loss": 0.9222, "step": 5101 }, { "epoch": 0.55, "grad_norm": 1.7199809337766478, "learning_rate": 4.459583963442935e-06, "loss": 0.8673, "step": 5102 }, { "epoch": 0.55, "grad_norm": 1.7547711751982245, "learning_rate": 4.45785312241296e-06, "loss": 0.9287, "step": 5103 }, { "epoch": 0.55, "grad_norm": 1.6895454250056332, "learning_rate": 4.456122347120165e-06, "loss": 0.8547, "step": 5104 }, { "epoch": 0.55, "grad_norm": 1.6872180361089455, "learning_rate": 4.454391637774408e-06, "loss": 0.8644, "step": 5105 }, { "epoch": 0.55, "grad_norm": 1.6061034337759328, "learning_rate": 4.452660994585546e-06, "loss": 0.8151, "step": 5106 }, { "epoch": 0.55, "grad_norm": 1.615189873116892, "learning_rate": 4.450930417763425e-06, "loss": 0.8286, "step": 5107 }, { "epoch": 0.55, "grad_norm": 1.7012834249162005, "learning_rate": 4.449199907517885e-06, "loss": 0.9111, "step": 5108 }, { "epoch": 0.55, "grad_norm": 1.7212794677857668, "learning_rate": 4.447469464058753e-06, "loss": 0.8681, "step": 5109 }, { "epoch": 0.55, "grad_norm": 1.7867010508905863, "learning_rate": 4.445739087595855e-06, "loss": 0.8291, "step": 5110 }, { "epoch": 0.55, "grad_norm": 1.8086509906893826, "learning_rate": 4.444008778339003e-06, "loss": 1.0169, "step": 5111 }, { "epoch": 0.55, "grad_norm": 1.7267090826304217, "learning_rate": 4.4422785364980036e-06, "loss": 1.0024, "step": 5112 }, { "epoch": 0.55, "grad_norm": 1.7105640728394493, "learning_rate": 4.440548362282655e-06, "loss": 0.9026, "step": 5113 }, { "epoch": 0.55, "grad_norm": 1.8023257963659827, "learning_rate": 4.438818255902746e-06, "loss": 0.82, "step": 5114 }, { "epoch": 0.55, "grad_norm": 1.7501616753182352, "learning_rate": 4.437088217568059e-06, "loss": 0.9549, "step": 5115 }, { "epoch": 0.55, "grad_norm": 1.794981692531099, "learning_rate": 4.435358247488365e-06, "loss": 0.8704, "step": 5116 }, { "epoch": 0.55, "grad_norm": 1.9421394918310673, "learning_rate": 4.433628345873429e-06, "loss": 0.9005, "step": 5117 }, { "epoch": 0.55, "grad_norm": 1.7184590208842059, "learning_rate": 4.431898512933011e-06, "loss": 0.8938, "step": 5118 }, { "epoch": 0.55, "grad_norm": 1.766872714664663, "learning_rate": 4.430168748876855e-06, "loss": 0.9176, "step": 5119 }, { "epoch": 0.55, "grad_norm": 1.7305551056954995, "learning_rate": 4.428439053914702e-06, "loss": 0.9304, "step": 5120 }, { "epoch": 0.55, "grad_norm": 1.7510315489012667, "learning_rate": 4.426709428256287e-06, "loss": 0.9097, "step": 5121 }, { "epoch": 0.55, "grad_norm": 1.6834922885973587, "learning_rate": 4.424979872111329e-06, "loss": 0.8831, "step": 5122 }, { "epoch": 0.55, "grad_norm": 1.717667872654713, "learning_rate": 4.423250385689543e-06, "loss": 0.9342, "step": 5123 }, { "epoch": 0.55, "grad_norm": 1.6060837868231885, "learning_rate": 4.421520969200637e-06, "loss": 0.8526, "step": 5124 }, { "epoch": 0.55, "grad_norm": 1.8606167282444606, "learning_rate": 4.419791622854309e-06, "loss": 0.8937, "step": 5125 }, { "epoch": 0.55, "grad_norm": 1.636562296784056, "learning_rate": 4.4180623468602455e-06, "loss": 0.8981, "step": 5126 }, { "epoch": 0.55, "grad_norm": 1.9406668728685015, "learning_rate": 4.416333141428129e-06, "loss": 0.9289, "step": 5127 }, { "epoch": 0.55, "grad_norm": 1.8103236328544385, "learning_rate": 4.414604006767631e-06, "loss": 0.9228, "step": 5128 }, { "epoch": 0.55, "grad_norm": 1.7720775706426872, "learning_rate": 4.412874943088416e-06, "loss": 0.866, "step": 5129 }, { "epoch": 0.55, "grad_norm": 1.5842441371802278, "learning_rate": 4.411145950600137e-06, "loss": 0.8817, "step": 5130 }, { "epoch": 0.55, "grad_norm": 1.743180288580314, "learning_rate": 4.4094170295124426e-06, "loss": 0.8885, "step": 5131 }, { "epoch": 0.55, "grad_norm": 1.6612511777810255, "learning_rate": 4.407688180034969e-06, "loss": 0.9288, "step": 5132 }, { "epoch": 0.55, "grad_norm": 2.003867326308389, "learning_rate": 4.405959402377345e-06, "loss": 0.8728, "step": 5133 }, { "epoch": 0.55, "grad_norm": 1.7689019387066054, "learning_rate": 4.4042306967491935e-06, "loss": 0.8969, "step": 5134 }, { "epoch": 0.55, "grad_norm": 1.6747953164820495, "learning_rate": 4.402502063360121e-06, "loss": 0.9634, "step": 5135 }, { "epoch": 0.55, "grad_norm": 1.8065807282744992, "learning_rate": 4.400773502419738e-06, "loss": 0.8834, "step": 5136 }, { "epoch": 0.55, "grad_norm": 1.8643115266123131, "learning_rate": 4.39904501413763e-06, "loss": 0.9427, "step": 5137 }, { "epoch": 0.55, "grad_norm": 1.7071078207940897, "learning_rate": 4.397316598723385e-06, "loss": 0.9812, "step": 5138 }, { "epoch": 0.55, "grad_norm": 0.8716453798864132, "learning_rate": 4.395588256386583e-06, "loss": 1.0674, "step": 5139 }, { "epoch": 0.55, "grad_norm": 1.6851216270952745, "learning_rate": 4.393859987336786e-06, "loss": 0.8752, "step": 5140 }, { "epoch": 0.55, "grad_norm": 1.588699914484462, "learning_rate": 4.392131791783557e-06, "loss": 0.9368, "step": 5141 }, { "epoch": 0.55, "grad_norm": 1.7913208980145345, "learning_rate": 4.390403669936443e-06, "loss": 0.8956, "step": 5142 }, { "epoch": 0.55, "grad_norm": 1.7413994973170699, "learning_rate": 4.3886756220049855e-06, "loss": 0.9145, "step": 5143 }, { "epoch": 0.55, "grad_norm": 1.7590427397202708, "learning_rate": 4.386947648198717e-06, "loss": 0.9279, "step": 5144 }, { "epoch": 0.55, "grad_norm": 1.6774553615689793, "learning_rate": 4.38521974872716e-06, "loss": 0.8735, "step": 5145 }, { "epoch": 0.55, "grad_norm": 1.7683266627589713, "learning_rate": 4.383491923799828e-06, "loss": 0.9771, "step": 5146 }, { "epoch": 0.55, "grad_norm": 1.7835347423256822, "learning_rate": 4.3817641736262255e-06, "loss": 0.943, "step": 5147 }, { "epoch": 0.55, "grad_norm": 1.7335684566555338, "learning_rate": 4.380036498415847e-06, "loss": 0.9028, "step": 5148 }, { "epoch": 0.55, "grad_norm": 1.7623361422228212, "learning_rate": 4.3783088983781815e-06, "loss": 0.9006, "step": 5149 }, { "epoch": 0.55, "grad_norm": 1.667910454355491, "learning_rate": 4.376581373722705e-06, "loss": 0.9683, "step": 5150 }, { "epoch": 0.55, "grad_norm": 1.6890897833543494, "learning_rate": 4.374853924658886e-06, "loss": 0.9259, "step": 5151 }, { "epoch": 0.55, "grad_norm": 1.8324155765875638, "learning_rate": 4.373126551396184e-06, "loss": 0.9931, "step": 5152 }, { "epoch": 0.55, "grad_norm": 1.7760150839671658, "learning_rate": 4.37139925414405e-06, "loss": 0.9249, "step": 5153 }, { "epoch": 0.55, "grad_norm": 1.7687630531632523, "learning_rate": 4.369672033111921e-06, "loss": 0.8591, "step": 5154 }, { "epoch": 0.55, "grad_norm": 0.8361678030774675, "learning_rate": 4.367944888509233e-06, "loss": 1.0678, "step": 5155 }, { "epoch": 0.55, "grad_norm": 1.7425093412631631, "learning_rate": 4.366217820545407e-06, "loss": 0.9046, "step": 5156 }, { "epoch": 0.55, "grad_norm": 1.7248029458744072, "learning_rate": 4.364490829429855e-06, "loss": 0.9018, "step": 5157 }, { "epoch": 0.55, "grad_norm": 1.7307422116002074, "learning_rate": 4.36276391537198e-06, "loss": 0.8693, "step": 5158 }, { "epoch": 0.55, "grad_norm": 1.8169200886809083, "learning_rate": 4.361037078581176e-06, "loss": 0.9446, "step": 5159 }, { "epoch": 0.55, "grad_norm": 1.6424990056199789, "learning_rate": 4.35931031926683e-06, "loss": 0.8716, "step": 5160 }, { "epoch": 0.55, "grad_norm": 1.7146585119890985, "learning_rate": 4.3575836376383175e-06, "loss": 0.9342, "step": 5161 }, { "epoch": 0.55, "grad_norm": 1.8211842445113322, "learning_rate": 4.3558570339050034e-06, "loss": 0.8531, "step": 5162 }, { "epoch": 0.56, "grad_norm": 1.7643228658517895, "learning_rate": 4.354130508276243e-06, "loss": 0.8839, "step": 5163 }, { "epoch": 0.56, "grad_norm": 1.7203456285505647, "learning_rate": 4.352404060961387e-06, "loss": 0.9007, "step": 5164 }, { "epoch": 0.56, "grad_norm": 0.8287165920418696, "learning_rate": 4.3506776921697705e-06, "loss": 1.0459, "step": 5165 }, { "epoch": 0.56, "grad_norm": 1.7961573720899304, "learning_rate": 4.3489514021107215e-06, "loss": 0.9529, "step": 5166 }, { "epoch": 0.56, "grad_norm": 1.7896551508395213, "learning_rate": 4.3472251909935635e-06, "loss": 0.9251, "step": 5167 }, { "epoch": 0.56, "grad_norm": 1.729208088159677, "learning_rate": 4.3454990590275966e-06, "loss": 0.8767, "step": 5168 }, { "epoch": 0.56, "grad_norm": 1.7266909598363769, "learning_rate": 4.343773006422128e-06, "loss": 0.8664, "step": 5169 }, { "epoch": 0.56, "grad_norm": 0.807781761334988, "learning_rate": 4.342047033386444e-06, "loss": 1.0587, "step": 5170 }, { "epoch": 0.56, "grad_norm": 1.670830287439024, "learning_rate": 4.3403211401298245e-06, "loss": 0.928, "step": 5171 }, { "epoch": 0.56, "grad_norm": 1.8339197640703069, "learning_rate": 4.338595326861542e-06, "loss": 0.9487, "step": 5172 }, { "epoch": 0.56, "grad_norm": 1.8627351602846731, "learning_rate": 4.336869593790857e-06, "loss": 0.9384, "step": 5173 }, { "epoch": 0.56, "grad_norm": 1.8474917374652085, "learning_rate": 4.335143941127018e-06, "loss": 0.905, "step": 5174 }, { "epoch": 0.56, "grad_norm": 1.8206866551215968, "learning_rate": 4.3334183690792695e-06, "loss": 0.8765, "step": 5175 }, { "epoch": 0.56, "grad_norm": 1.7065313109958729, "learning_rate": 4.3316928778568414e-06, "loss": 0.9527, "step": 5176 }, { "epoch": 0.56, "grad_norm": 1.6633947106582345, "learning_rate": 4.329967467668955e-06, "loss": 0.8799, "step": 5177 }, { "epoch": 0.56, "grad_norm": 1.7616176335129183, "learning_rate": 4.328242138724827e-06, "loss": 0.9171, "step": 5178 }, { "epoch": 0.56, "grad_norm": 1.7436189515475435, "learning_rate": 4.326516891233652e-06, "loss": 0.8841, "step": 5179 }, { "epoch": 0.56, "grad_norm": 1.8051649463451014, "learning_rate": 4.324791725404627e-06, "loss": 0.8725, "step": 5180 }, { "epoch": 0.56, "grad_norm": 1.7442729286939758, "learning_rate": 4.3230666414469325e-06, "loss": 0.925, "step": 5181 }, { "epoch": 0.56, "grad_norm": 0.8548427192518901, "learning_rate": 4.321341639569741e-06, "loss": 1.0145, "step": 5182 }, { "epoch": 0.56, "grad_norm": 1.7691357110891661, "learning_rate": 4.3196167199822155e-06, "loss": 0.8764, "step": 5183 }, { "epoch": 0.56, "grad_norm": 1.557931660663016, "learning_rate": 4.317891882893509e-06, "loss": 0.8624, "step": 5184 }, { "epoch": 0.56, "grad_norm": 1.7411703071822364, "learning_rate": 4.316167128512763e-06, "loss": 0.8814, "step": 5185 }, { "epoch": 0.56, "grad_norm": 1.7381190135608173, "learning_rate": 4.314442457049111e-06, "loss": 0.8998, "step": 5186 }, { "epoch": 0.56, "grad_norm": 1.8231275008124754, "learning_rate": 4.312717868711674e-06, "loss": 0.8584, "step": 5187 }, { "epoch": 0.56, "grad_norm": 1.8584119714062202, "learning_rate": 4.310993363709563e-06, "loss": 0.9113, "step": 5188 }, { "epoch": 0.56, "grad_norm": 1.8427930562903077, "learning_rate": 4.309268942251887e-06, "loss": 0.8443, "step": 5189 }, { "epoch": 0.56, "grad_norm": 1.7063623795224583, "learning_rate": 4.307544604547729e-06, "loss": 0.9114, "step": 5190 }, { "epoch": 0.56, "grad_norm": 1.681711518505076, "learning_rate": 4.305820350806176e-06, "loss": 0.82, "step": 5191 }, { "epoch": 0.56, "grad_norm": 0.8787990996744685, "learning_rate": 4.304096181236299e-06, "loss": 1.0272, "step": 5192 }, { "epoch": 0.56, "grad_norm": 1.8592710712841123, "learning_rate": 4.302372096047157e-06, "loss": 0.8113, "step": 5193 }, { "epoch": 0.56, "grad_norm": 2.834612918126684, "learning_rate": 4.300648095447806e-06, "loss": 0.9349, "step": 5194 }, { "epoch": 0.56, "grad_norm": 1.8008788753913036, "learning_rate": 4.298924179647283e-06, "loss": 0.9652, "step": 5195 }, { "epoch": 0.56, "grad_norm": 1.7702943002910667, "learning_rate": 4.297200348854621e-06, "loss": 0.9184, "step": 5196 }, { "epoch": 0.56, "grad_norm": 1.7864370409296657, "learning_rate": 4.295476603278839e-06, "loss": 0.9319, "step": 5197 }, { "epoch": 0.56, "grad_norm": 1.7360170364285181, "learning_rate": 4.293752943128948e-06, "loss": 0.7534, "step": 5198 }, { "epoch": 0.56, "grad_norm": 1.761449119024601, "learning_rate": 4.2920293686139485e-06, "loss": 0.8241, "step": 5199 }, { "epoch": 0.56, "grad_norm": 1.787800964764351, "learning_rate": 4.290305879942828e-06, "loss": 0.8716, "step": 5200 }, { "epoch": 0.56, "grad_norm": 1.768048893719742, "learning_rate": 4.288582477324567e-06, "loss": 0.8628, "step": 5201 }, { "epoch": 0.56, "grad_norm": 1.7747744929116274, "learning_rate": 4.286859160968131e-06, "loss": 0.961, "step": 5202 }, { "epoch": 0.56, "grad_norm": 1.7490486130252179, "learning_rate": 4.285135931082481e-06, "loss": 0.8395, "step": 5203 }, { "epoch": 0.56, "grad_norm": 1.6255006554610352, "learning_rate": 4.283412787876565e-06, "loss": 0.8423, "step": 5204 }, { "epoch": 0.56, "grad_norm": 1.7152960916147142, "learning_rate": 4.281689731559318e-06, "loss": 0.9065, "step": 5205 }, { "epoch": 0.56, "grad_norm": 1.5900430902540859, "learning_rate": 4.279966762339668e-06, "loss": 0.8583, "step": 5206 }, { "epoch": 0.56, "grad_norm": 0.793893928445111, "learning_rate": 4.27824388042653e-06, "loss": 1.045, "step": 5207 }, { "epoch": 0.56, "grad_norm": 1.7378023974690229, "learning_rate": 4.276521086028809e-06, "loss": 0.8462, "step": 5208 }, { "epoch": 0.56, "grad_norm": 1.713279240195725, "learning_rate": 4.274798379355403e-06, "loss": 0.9101, "step": 5209 }, { "epoch": 0.56, "grad_norm": 1.6802263666887662, "learning_rate": 4.273075760615193e-06, "loss": 0.8951, "step": 5210 }, { "epoch": 0.56, "grad_norm": 1.8694385822561155, "learning_rate": 4.271353230017053e-06, "loss": 0.9101, "step": 5211 }, { "epoch": 0.56, "grad_norm": 1.8160688246512284, "learning_rate": 4.269630787769845e-06, "loss": 0.8517, "step": 5212 }, { "epoch": 0.56, "grad_norm": 1.760416692276652, "learning_rate": 4.2679084340824215e-06, "loss": 0.9636, "step": 5213 }, { "epoch": 0.56, "grad_norm": 1.7697291866185991, "learning_rate": 4.266186169163625e-06, "loss": 0.8994, "step": 5214 }, { "epoch": 0.56, "grad_norm": 1.5815943711868883, "learning_rate": 4.264463993222286e-06, "loss": 0.8798, "step": 5215 }, { "epoch": 0.56, "grad_norm": 0.8059696951057306, "learning_rate": 4.262741906467221e-06, "loss": 1.0339, "step": 5216 }, { "epoch": 0.56, "grad_norm": 1.7588830107980258, "learning_rate": 4.261019909107243e-06, "loss": 0.9098, "step": 5217 }, { "epoch": 0.56, "grad_norm": 1.6149628206228688, "learning_rate": 4.25929800135115e-06, "loss": 0.8407, "step": 5218 }, { "epoch": 0.56, "grad_norm": 1.679149652430536, "learning_rate": 4.257576183407726e-06, "loss": 0.8474, "step": 5219 }, { "epoch": 0.56, "grad_norm": 1.742732141119367, "learning_rate": 4.255854455485753e-06, "loss": 0.8758, "step": 5220 }, { "epoch": 0.56, "grad_norm": 1.7666383634761624, "learning_rate": 4.254132817793989e-06, "loss": 0.9133, "step": 5221 }, { "epoch": 0.56, "grad_norm": 1.7756314083981366, "learning_rate": 4.252411270541194e-06, "loss": 0.9224, "step": 5222 }, { "epoch": 0.56, "grad_norm": 1.5902194035921124, "learning_rate": 4.250689813936109e-06, "loss": 0.9309, "step": 5223 }, { "epoch": 0.56, "grad_norm": 1.6980404182673787, "learning_rate": 4.248968448187466e-06, "loss": 0.9574, "step": 5224 }, { "epoch": 0.56, "grad_norm": 1.7621367950409685, "learning_rate": 4.247247173503989e-06, "loss": 0.9441, "step": 5225 }, { "epoch": 0.56, "grad_norm": 1.8136494441005555, "learning_rate": 4.245525990094388e-06, "loss": 0.9271, "step": 5226 }, { "epoch": 0.56, "grad_norm": 1.672629940935499, "learning_rate": 4.2438048981673615e-06, "loss": 0.8321, "step": 5227 }, { "epoch": 0.56, "grad_norm": 1.7013877565767463, "learning_rate": 4.242083897931597e-06, "loss": 0.9009, "step": 5228 }, { "epoch": 0.56, "grad_norm": 1.6390775331908634, "learning_rate": 4.240362989595775e-06, "loss": 0.7961, "step": 5229 }, { "epoch": 0.56, "grad_norm": 0.8584400988238313, "learning_rate": 4.2386421733685575e-06, "loss": 1.0467, "step": 5230 }, { "epoch": 0.56, "grad_norm": 1.7093473916504491, "learning_rate": 4.236921449458603e-06, "loss": 0.9069, "step": 5231 }, { "epoch": 0.56, "grad_norm": 1.8003714341785333, "learning_rate": 4.235200818074553e-06, "loss": 0.8582, "step": 5232 }, { "epoch": 0.56, "grad_norm": 1.708074517742616, "learning_rate": 4.233480279425039e-06, "loss": 0.9642, "step": 5233 }, { "epoch": 0.56, "grad_norm": 1.6392092686386484, "learning_rate": 4.231759833718685e-06, "loss": 0.8667, "step": 5234 }, { "epoch": 0.56, "grad_norm": 1.893148663871309, "learning_rate": 4.2300394811641e-06, "loss": 0.9856, "step": 5235 }, { "epoch": 0.56, "grad_norm": 1.6562070741872026, "learning_rate": 4.22831922196988e-06, "loss": 0.9097, "step": 5236 }, { "epoch": 0.56, "grad_norm": 1.6410693515599672, "learning_rate": 4.2265990563446165e-06, "loss": 0.829, "step": 5237 }, { "epoch": 0.56, "grad_norm": 1.8069994993695149, "learning_rate": 4.224878984496884e-06, "loss": 0.9431, "step": 5238 }, { "epoch": 0.56, "grad_norm": 1.6388830579051825, "learning_rate": 4.223159006635245e-06, "loss": 0.8409, "step": 5239 }, { "epoch": 0.56, "grad_norm": 1.7438704241435903, "learning_rate": 4.221439122968256e-06, "loss": 0.8999, "step": 5240 }, { "epoch": 0.56, "grad_norm": 1.682974406713379, "learning_rate": 4.219719333704459e-06, "loss": 0.8671, "step": 5241 }, { "epoch": 0.56, "grad_norm": 1.6509525831181275, "learning_rate": 4.217999639052382e-06, "loss": 0.9191, "step": 5242 }, { "epoch": 0.56, "grad_norm": 1.7075361037311823, "learning_rate": 4.216280039220544e-06, "loss": 0.947, "step": 5243 }, { "epoch": 0.56, "grad_norm": 1.7635450525790939, "learning_rate": 4.214560534417453e-06, "loss": 0.8893, "step": 5244 }, { "epoch": 0.56, "grad_norm": 0.8270582442764346, "learning_rate": 4.2128411248516055e-06, "loss": 1.0316, "step": 5245 }, { "epoch": 0.56, "grad_norm": 1.7337356955959509, "learning_rate": 4.211121810731485e-06, "loss": 0.9692, "step": 5246 }, { "epoch": 0.56, "grad_norm": 1.7301010623673383, "learning_rate": 4.209402592265564e-06, "loss": 0.8359, "step": 5247 }, { "epoch": 0.56, "grad_norm": 1.7427658572906044, "learning_rate": 4.207683469662306e-06, "loss": 0.8248, "step": 5248 }, { "epoch": 0.56, "grad_norm": 1.7475329097881946, "learning_rate": 4.2059644431301575e-06, "loss": 0.8939, "step": 5249 }, { "epoch": 0.56, "grad_norm": 1.8781578509375443, "learning_rate": 4.204245512877557e-06, "loss": 0.9115, "step": 5250 }, { "epoch": 0.56, "grad_norm": 1.706616050941255, "learning_rate": 4.202526679112932e-06, "loss": 0.9613, "step": 5251 }, { "epoch": 0.56, "grad_norm": 6.825120608831123, "learning_rate": 4.200807942044699e-06, "loss": 1.0615, "step": 5252 }, { "epoch": 0.56, "grad_norm": 1.8224766992448589, "learning_rate": 4.199089301881256e-06, "loss": 0.9045, "step": 5253 }, { "epoch": 0.56, "grad_norm": 0.830492760241334, "learning_rate": 4.197370758830998e-06, "loss": 1.0746, "step": 5254 }, { "epoch": 0.56, "grad_norm": 1.6651264855953978, "learning_rate": 4.1956523131023e-06, "loss": 0.9037, "step": 5255 }, { "epoch": 0.57, "grad_norm": 1.7690287648234613, "learning_rate": 4.193933964903532e-06, "loss": 1.0082, "step": 5256 }, { "epoch": 0.57, "grad_norm": 1.7066958286915905, "learning_rate": 4.192215714443052e-06, "loss": 0.8032, "step": 5257 }, { "epoch": 0.57, "grad_norm": 0.7812540058642454, "learning_rate": 4.1904975619292e-06, "loss": 1.083, "step": 5258 }, { "epoch": 0.57, "grad_norm": 1.6889118183485556, "learning_rate": 4.18877950757031e-06, "loss": 0.9161, "step": 5259 }, { "epoch": 0.57, "grad_norm": 1.7413941419707901, "learning_rate": 4.187061551574701e-06, "loss": 0.8918, "step": 5260 }, { "epoch": 0.57, "grad_norm": 1.71968704328864, "learning_rate": 4.185343694150682e-06, "loss": 0.912, "step": 5261 }, { "epoch": 0.57, "grad_norm": 1.7374996491477825, "learning_rate": 4.1836259355065475e-06, "loss": 0.8983, "step": 5262 }, { "epoch": 0.57, "grad_norm": 1.68618223079258, "learning_rate": 4.181908275850586e-06, "loss": 0.8588, "step": 5263 }, { "epoch": 0.57, "grad_norm": 1.742417569120698, "learning_rate": 4.180190715391063e-06, "loss": 0.8876, "step": 5264 }, { "epoch": 0.57, "grad_norm": 1.7940490575975223, "learning_rate": 4.178473254336243e-06, "loss": 0.9172, "step": 5265 }, { "epoch": 0.57, "grad_norm": 1.7589492760254664, "learning_rate": 4.176755892894374e-06, "loss": 0.9245, "step": 5266 }, { "epoch": 0.57, "grad_norm": 1.7816306368295989, "learning_rate": 4.175038631273689e-06, "loss": 0.8826, "step": 5267 }, { "epoch": 0.57, "grad_norm": 1.7210342204333662, "learning_rate": 4.173321469682416e-06, "loss": 0.8767, "step": 5268 }, { "epoch": 0.57, "grad_norm": 1.7770498719753853, "learning_rate": 4.171604408328765e-06, "loss": 0.8854, "step": 5269 }, { "epoch": 0.57, "grad_norm": 1.6502633300170177, "learning_rate": 4.1698874474209326e-06, "loss": 0.8413, "step": 5270 }, { "epoch": 0.57, "grad_norm": 1.774845626693493, "learning_rate": 4.1681705871671114e-06, "loss": 0.9341, "step": 5271 }, { "epoch": 0.57, "grad_norm": 1.6143898512427932, "learning_rate": 4.166453827775474e-06, "loss": 0.8462, "step": 5272 }, { "epoch": 0.57, "grad_norm": 1.639064417317491, "learning_rate": 4.164737169454185e-06, "loss": 0.9215, "step": 5273 }, { "epoch": 0.57, "grad_norm": 1.7448598031634466, "learning_rate": 4.1630206124113925e-06, "loss": 0.9223, "step": 5274 }, { "epoch": 0.57, "grad_norm": 1.6707938299038103, "learning_rate": 4.161304156855235e-06, "loss": 0.8026, "step": 5275 }, { "epoch": 0.57, "grad_norm": 0.809860532579585, "learning_rate": 4.159587802993842e-06, "loss": 1.0788, "step": 5276 }, { "epoch": 0.57, "grad_norm": 1.7048574201050153, "learning_rate": 4.157871551035324e-06, "loss": 0.837, "step": 5277 }, { "epoch": 0.57, "grad_norm": 1.612646737377047, "learning_rate": 4.156155401187783e-06, "loss": 0.8403, "step": 5278 }, { "epoch": 0.57, "grad_norm": 1.7293804228530885, "learning_rate": 4.15443935365931e-06, "loss": 1.0311, "step": 5279 }, { "epoch": 0.57, "grad_norm": 1.6142847174363417, "learning_rate": 4.1527234086579806e-06, "loss": 0.8549, "step": 5280 }, { "epoch": 0.57, "grad_norm": 1.6839282269428308, "learning_rate": 4.151007566391857e-06, "loss": 0.9223, "step": 5281 }, { "epoch": 0.57, "grad_norm": 1.7869393887568636, "learning_rate": 4.149291827068995e-06, "loss": 0.8674, "step": 5282 }, { "epoch": 0.57, "grad_norm": 1.6733283506405559, "learning_rate": 4.1475761908974315e-06, "loss": 0.906, "step": 5283 }, { "epoch": 0.57, "grad_norm": 1.7413237377571351, "learning_rate": 4.145860658085194e-06, "loss": 0.8485, "step": 5284 }, { "epoch": 0.57, "grad_norm": 1.6651980843571728, "learning_rate": 4.1441452288402965e-06, "loss": 0.9071, "step": 5285 }, { "epoch": 0.57, "grad_norm": 1.8281197660592983, "learning_rate": 4.142429903370739e-06, "loss": 0.8432, "step": 5286 }, { "epoch": 0.57, "grad_norm": 0.8828844721689901, "learning_rate": 4.140714681884513e-06, "loss": 1.0507, "step": 5287 }, { "epoch": 0.57, "grad_norm": 1.6962764278338047, "learning_rate": 4.1389995645895944e-06, "loss": 0.8533, "step": 5288 }, { "epoch": 0.57, "grad_norm": 1.6363315630697153, "learning_rate": 4.137284551693946e-06, "loss": 0.8752, "step": 5289 }, { "epoch": 0.57, "grad_norm": 2.2826618340300784, "learning_rate": 4.13556964340552e-06, "loss": 0.8596, "step": 5290 }, { "epoch": 0.57, "grad_norm": 1.5801687066033614, "learning_rate": 4.133854839932257e-06, "loss": 0.8778, "step": 5291 }, { "epoch": 0.57, "grad_norm": 1.7752846244560878, "learning_rate": 4.132140141482079e-06, "loss": 0.9377, "step": 5292 }, { "epoch": 0.57, "grad_norm": 1.856282286775335, "learning_rate": 4.1304255482629e-06, "loss": 0.9673, "step": 5293 }, { "epoch": 0.57, "grad_norm": 1.7463158389264535, "learning_rate": 4.128711060482624e-06, "loss": 0.8253, "step": 5294 }, { "epoch": 0.57, "grad_norm": 1.6896901097649875, "learning_rate": 4.126996678349133e-06, "loss": 0.9309, "step": 5295 }, { "epoch": 0.57, "grad_norm": 1.7388497352295709, "learning_rate": 4.125282402070307e-06, "loss": 0.8577, "step": 5296 }, { "epoch": 0.57, "grad_norm": 1.779369865979201, "learning_rate": 4.1235682318540035e-06, "loss": 0.9778, "step": 5297 }, { "epoch": 0.57, "grad_norm": 1.7143787873112328, "learning_rate": 4.121854167908072e-06, "loss": 0.8964, "step": 5298 }, { "epoch": 0.57, "grad_norm": 1.8205646997135942, "learning_rate": 4.120140210440352e-06, "loss": 0.8473, "step": 5299 }, { "epoch": 0.57, "grad_norm": 1.8934999377280766, "learning_rate": 4.1184263596586645e-06, "loss": 0.8857, "step": 5300 }, { "epoch": 0.57, "grad_norm": 1.7875284365607609, "learning_rate": 4.11671261577082e-06, "loss": 0.9344, "step": 5301 }, { "epoch": 0.57, "grad_norm": 1.7845150020833727, "learning_rate": 4.114998978984616e-06, "loss": 0.9505, "step": 5302 }, { "epoch": 0.57, "grad_norm": 1.9029426742163396, "learning_rate": 4.113285449507837e-06, "loss": 0.9654, "step": 5303 }, { "epoch": 0.57, "grad_norm": 1.6917276209738963, "learning_rate": 4.111572027548253e-06, "loss": 0.9363, "step": 5304 }, { "epoch": 0.57, "grad_norm": 1.6825947045669538, "learning_rate": 4.109858713313628e-06, "loss": 0.9245, "step": 5305 }, { "epoch": 0.57, "grad_norm": 0.8093897149213569, "learning_rate": 4.108145507011698e-06, "loss": 1.0505, "step": 5306 }, { "epoch": 0.57, "grad_norm": 1.8126412203451248, "learning_rate": 4.1064324088502026e-06, "loss": 0.8707, "step": 5307 }, { "epoch": 0.57, "grad_norm": 1.859055294760676, "learning_rate": 4.104719419036857e-06, "loss": 0.9273, "step": 5308 }, { "epoch": 0.57, "grad_norm": 1.6781502075804846, "learning_rate": 4.1030065377793675e-06, "loss": 0.8983, "step": 5309 }, { "epoch": 0.57, "grad_norm": 1.583928067420168, "learning_rate": 4.101293765285429e-06, "loss": 0.8204, "step": 5310 }, { "epoch": 0.57, "grad_norm": 1.8910080153479412, "learning_rate": 4.09958110176272e-06, "loss": 0.945, "step": 5311 }, { "epoch": 0.57, "grad_norm": 0.8261901627628784, "learning_rate": 4.097868547418905e-06, "loss": 1.0557, "step": 5312 }, { "epoch": 0.57, "grad_norm": 1.6867258668078684, "learning_rate": 4.09615610246164e-06, "loss": 0.9062, "step": 5313 }, { "epoch": 0.57, "grad_norm": 1.811598382882989, "learning_rate": 4.0944437670985635e-06, "loss": 0.9409, "step": 5314 }, { "epoch": 0.57, "grad_norm": 1.6183886284606244, "learning_rate": 4.0927315415373016e-06, "loss": 0.8833, "step": 5315 }, { "epoch": 0.57, "grad_norm": 1.6883529564181532, "learning_rate": 4.091019425985471e-06, "loss": 0.8733, "step": 5316 }, { "epoch": 0.57, "grad_norm": 1.7662957969264452, "learning_rate": 4.089307420650665e-06, "loss": 0.9222, "step": 5317 }, { "epoch": 0.57, "grad_norm": 1.6543661592522836, "learning_rate": 4.087595525740476e-06, "loss": 0.8671, "step": 5318 }, { "epoch": 0.57, "grad_norm": 1.67514759014006, "learning_rate": 4.085883741462474e-06, "loss": 0.8294, "step": 5319 }, { "epoch": 0.57, "grad_norm": 1.6793032705514526, "learning_rate": 4.084172068024219e-06, "loss": 0.9558, "step": 5320 }, { "epoch": 0.57, "grad_norm": 1.7903096675862071, "learning_rate": 4.082460505633259e-06, "loss": 0.8494, "step": 5321 }, { "epoch": 0.57, "grad_norm": 1.8793282370963615, "learning_rate": 4.080749054497127e-06, "loss": 0.9165, "step": 5322 }, { "epoch": 0.57, "grad_norm": 1.6937905572250167, "learning_rate": 4.079037714823341e-06, "loss": 0.8642, "step": 5323 }, { "epoch": 0.57, "grad_norm": 1.830906912396178, "learning_rate": 4.077326486819405e-06, "loss": 0.8994, "step": 5324 }, { "epoch": 0.57, "grad_norm": 1.6791987304762914, "learning_rate": 4.075615370692815e-06, "loss": 0.8562, "step": 5325 }, { "epoch": 0.57, "grad_norm": 1.8116209696419372, "learning_rate": 4.073904366651049e-06, "loss": 0.8868, "step": 5326 }, { "epoch": 0.57, "grad_norm": 0.8543754156506511, "learning_rate": 4.072193474901571e-06, "loss": 1.033, "step": 5327 }, { "epoch": 0.57, "grad_norm": 1.7895339809347135, "learning_rate": 4.070482695651832e-06, "loss": 0.8826, "step": 5328 }, { "epoch": 0.57, "grad_norm": 1.7299747082524215, "learning_rate": 4.068772029109271e-06, "loss": 0.832, "step": 5329 }, { "epoch": 0.57, "grad_norm": 1.8191728701524195, "learning_rate": 4.067061475481311e-06, "loss": 0.8756, "step": 5330 }, { "epoch": 0.57, "grad_norm": 1.7044544067464777, "learning_rate": 4.0653510349753645e-06, "loss": 0.9195, "step": 5331 }, { "epoch": 0.57, "grad_norm": 1.6615041519299651, "learning_rate": 4.063640707798826e-06, "loss": 0.9459, "step": 5332 }, { "epoch": 0.57, "grad_norm": 1.6608012764746711, "learning_rate": 4.061930494159081e-06, "loss": 0.8293, "step": 5333 }, { "epoch": 0.57, "grad_norm": 1.8209221601213084, "learning_rate": 4.060220394263497e-06, "loss": 0.9454, "step": 5334 }, { "epoch": 0.57, "grad_norm": 1.774325591506145, "learning_rate": 4.0585104083194295e-06, "loss": 0.9125, "step": 5335 }, { "epoch": 0.57, "grad_norm": 1.7477163642703712, "learning_rate": 4.056800536534222e-06, "loss": 0.9657, "step": 5336 }, { "epoch": 0.57, "grad_norm": 1.775826408680486, "learning_rate": 4.0550907791152025e-06, "loss": 0.8844, "step": 5337 }, { "epoch": 0.57, "grad_norm": 1.6841843407390913, "learning_rate": 4.053381136269682e-06, "loss": 0.8409, "step": 5338 }, { "epoch": 0.57, "grad_norm": 1.8800486690773819, "learning_rate": 4.0516716082049625e-06, "loss": 0.9136, "step": 5339 }, { "epoch": 0.57, "grad_norm": 1.983303803269195, "learning_rate": 4.049962195128328e-06, "loss": 0.9027, "step": 5340 }, { "epoch": 0.57, "grad_norm": 1.8097911053361404, "learning_rate": 4.0482528972470545e-06, "loss": 0.8761, "step": 5341 }, { "epoch": 0.57, "grad_norm": 1.8364976953788503, "learning_rate": 4.046543714768399e-06, "loss": 0.9019, "step": 5342 }, { "epoch": 0.57, "grad_norm": 1.8271791686113197, "learning_rate": 4.044834647899603e-06, "loss": 0.8965, "step": 5343 }, { "epoch": 0.57, "grad_norm": 0.8543084115340847, "learning_rate": 4.043125696847901e-06, "loss": 1.0339, "step": 5344 }, { "epoch": 0.57, "grad_norm": 1.7247376147945515, "learning_rate": 4.041416861820506e-06, "loss": 0.878, "step": 5345 }, { "epoch": 0.57, "grad_norm": 1.7929528337061738, "learning_rate": 4.039708143024621e-06, "loss": 0.9295, "step": 5346 }, { "epoch": 0.57, "grad_norm": 1.7207452756492931, "learning_rate": 4.037999540667436e-06, "loss": 0.8749, "step": 5347 }, { "epoch": 0.57, "grad_norm": 0.8159327383061363, "learning_rate": 4.036291054956122e-06, "loss": 1.0589, "step": 5348 }, { "epoch": 0.58, "grad_norm": 1.742623459107978, "learning_rate": 4.0345826860978395e-06, "loss": 0.8914, "step": 5349 }, { "epoch": 0.58, "grad_norm": 1.7560931571487364, "learning_rate": 4.0328744342997355e-06, "loss": 0.9126, "step": 5350 }, { "epoch": 0.58, "grad_norm": 0.819180724055411, "learning_rate": 4.031166299768939e-06, "loss": 1.0488, "step": 5351 }, { "epoch": 0.58, "grad_norm": 1.7520332794232376, "learning_rate": 4.029458282712571e-06, "loss": 0.9031, "step": 5352 }, { "epoch": 0.58, "grad_norm": 1.7596683633419, "learning_rate": 4.0277503833377306e-06, "loss": 0.8463, "step": 5353 }, { "epoch": 0.58, "grad_norm": 1.642914650350948, "learning_rate": 4.026042601851509e-06, "loss": 0.8806, "step": 5354 }, { "epoch": 0.58, "grad_norm": 0.8289970853285702, "learning_rate": 4.024334938460978e-06, "loss": 1.039, "step": 5355 }, { "epoch": 0.58, "grad_norm": 1.8358950126227351, "learning_rate": 4.022627393373201e-06, "loss": 0.865, "step": 5356 }, { "epoch": 0.58, "grad_norm": 1.7175001865611605, "learning_rate": 4.020919966795221e-06, "loss": 0.8852, "step": 5357 }, { "epoch": 0.58, "grad_norm": 1.6861641380403398, "learning_rate": 4.019212658934073e-06, "loss": 1.0017, "step": 5358 }, { "epoch": 0.58, "grad_norm": 1.7723028044433262, "learning_rate": 4.01750546999677e-06, "loss": 0.9758, "step": 5359 }, { "epoch": 0.58, "grad_norm": 1.628643355773677, "learning_rate": 4.015798400190314e-06, "loss": 0.8955, "step": 5360 }, { "epoch": 0.58, "grad_norm": 1.7623445209197022, "learning_rate": 4.014091449721697e-06, "loss": 0.9051, "step": 5361 }, { "epoch": 0.58, "grad_norm": 1.7360172403103387, "learning_rate": 4.01238461879789e-06, "loss": 0.9131, "step": 5362 }, { "epoch": 0.58, "grad_norm": 1.763367313769858, "learning_rate": 4.0106779076258515e-06, "loss": 0.8855, "step": 5363 }, { "epoch": 0.58, "grad_norm": 1.6607262054373768, "learning_rate": 4.0089713164125285e-06, "loss": 0.8264, "step": 5364 }, { "epoch": 0.58, "grad_norm": 1.574827716797458, "learning_rate": 4.007264845364851e-06, "loss": 0.8892, "step": 5365 }, { "epoch": 0.58, "grad_norm": 1.8080301399018799, "learning_rate": 4.005558494689732e-06, "loss": 0.869, "step": 5366 }, { "epoch": 0.58, "grad_norm": 1.7592629383423783, "learning_rate": 4.0038522645940746e-06, "loss": 1.0576, "step": 5367 }, { "epoch": 0.58, "grad_norm": 1.865349476636154, "learning_rate": 4.002146155284765e-06, "loss": 0.8947, "step": 5368 }, { "epoch": 0.58, "grad_norm": 1.6405058178867107, "learning_rate": 4.0004401669686746e-06, "loss": 0.8777, "step": 5369 }, { "epoch": 0.58, "grad_norm": 1.6215616543685971, "learning_rate": 3.998734299852659e-06, "loss": 0.7906, "step": 5370 }, { "epoch": 0.58, "grad_norm": 1.7190300181569405, "learning_rate": 3.99702855414356e-06, "loss": 0.8627, "step": 5371 }, { "epoch": 0.58, "grad_norm": 1.7561891417528654, "learning_rate": 3.9953229300482085e-06, "loss": 0.8421, "step": 5372 }, { "epoch": 0.58, "grad_norm": 1.6401439191065865, "learning_rate": 3.993617427773416e-06, "loss": 0.8078, "step": 5373 }, { "epoch": 0.58, "grad_norm": 1.7272773878587546, "learning_rate": 3.991912047525976e-06, "loss": 0.8322, "step": 5374 }, { "epoch": 0.58, "grad_norm": 1.7008708661661323, "learning_rate": 3.99020678951268e-06, "loss": 0.8927, "step": 5375 }, { "epoch": 0.58, "grad_norm": 1.8056917085061484, "learning_rate": 3.9885016539402896e-06, "loss": 0.8634, "step": 5376 }, { "epoch": 0.58, "grad_norm": 1.7502036204964766, "learning_rate": 3.986796641015561e-06, "loss": 0.9219, "step": 5377 }, { "epoch": 0.58, "grad_norm": 1.7215346983706339, "learning_rate": 3.985091750945234e-06, "loss": 0.936, "step": 5378 }, { "epoch": 0.58, "grad_norm": 1.7431472573677849, "learning_rate": 3.983386983936031e-06, "loss": 0.8869, "step": 5379 }, { "epoch": 0.58, "grad_norm": 1.7275006394336032, "learning_rate": 3.981682340194661e-06, "loss": 0.8891, "step": 5380 }, { "epoch": 0.58, "grad_norm": 1.8336665015895801, "learning_rate": 3.979977819927818e-06, "loss": 0.8744, "step": 5381 }, { "epoch": 0.58, "grad_norm": 1.7175307290944628, "learning_rate": 3.97827342334218e-06, "loss": 0.8545, "step": 5382 }, { "epoch": 0.58, "grad_norm": 1.7355177904443462, "learning_rate": 3.976569150644411e-06, "loss": 0.9773, "step": 5383 }, { "epoch": 0.58, "grad_norm": 1.764640346382825, "learning_rate": 3.974865002041163e-06, "loss": 0.8943, "step": 5384 }, { "epoch": 0.58, "grad_norm": 1.8233890580940408, "learning_rate": 3.973160977739065e-06, "loss": 0.9371, "step": 5385 }, { "epoch": 0.58, "grad_norm": 1.6642661120027258, "learning_rate": 3.97145707794474e-06, "loss": 0.8364, "step": 5386 }, { "epoch": 0.58, "grad_norm": 1.7242038750971047, "learning_rate": 3.969753302864791e-06, "loss": 0.8774, "step": 5387 }, { "epoch": 0.58, "grad_norm": 1.7291887370341168, "learning_rate": 3.9680496527058055e-06, "loss": 0.9539, "step": 5388 }, { "epoch": 0.58, "grad_norm": 1.8325325967214428, "learning_rate": 3.966346127674356e-06, "loss": 0.8919, "step": 5389 }, { "epoch": 0.58, "grad_norm": 1.9136170314591707, "learning_rate": 3.964642727977004e-06, "loss": 0.9203, "step": 5390 }, { "epoch": 0.58, "grad_norm": 1.6997352862715425, "learning_rate": 3.96293945382029e-06, "loss": 0.9209, "step": 5391 }, { "epoch": 0.58, "grad_norm": 1.6713290698052294, "learning_rate": 3.9612363054107425e-06, "loss": 0.853, "step": 5392 }, { "epoch": 0.58, "grad_norm": 1.6925650053166064, "learning_rate": 3.959533282954874e-06, "loss": 0.8738, "step": 5393 }, { "epoch": 0.58, "grad_norm": 1.7189002479504698, "learning_rate": 3.957830386659181e-06, "loss": 0.8171, "step": 5394 }, { "epoch": 0.58, "grad_norm": 1.7434379545998133, "learning_rate": 3.956127616730148e-06, "loss": 0.9529, "step": 5395 }, { "epoch": 0.58, "grad_norm": 1.7811407780823432, "learning_rate": 3.954424973374239e-06, "loss": 0.8622, "step": 5396 }, { "epoch": 0.58, "grad_norm": 0.8258810123332079, "learning_rate": 3.952722456797906e-06, "loss": 1.0486, "step": 5397 }, { "epoch": 0.58, "grad_norm": 1.635994821493495, "learning_rate": 3.9510200672075874e-06, "loss": 0.8598, "step": 5398 }, { "epoch": 0.58, "grad_norm": 1.7817125727015353, "learning_rate": 3.949317804809701e-06, "loss": 0.8878, "step": 5399 }, { "epoch": 0.58, "grad_norm": 1.9376377275647474, "learning_rate": 3.9476156698106555e-06, "loss": 0.9785, "step": 5400 }, { "epoch": 0.58, "grad_norm": 2.139339617380846, "learning_rate": 3.945913662416836e-06, "loss": 0.8852, "step": 5401 }, { "epoch": 0.58, "grad_norm": 1.6865921111538404, "learning_rate": 3.944211782834618e-06, "loss": 0.8676, "step": 5402 }, { "epoch": 0.58, "grad_norm": 1.7657886015420805, "learning_rate": 3.942510031270363e-06, "loss": 0.891, "step": 5403 }, { "epoch": 0.58, "grad_norm": 1.768086639322376, "learning_rate": 3.940808407930412e-06, "loss": 0.8701, "step": 5404 }, { "epoch": 0.58, "grad_norm": 1.7567879271773947, "learning_rate": 3.939106913021091e-06, "loss": 0.9116, "step": 5405 }, { "epoch": 0.58, "grad_norm": 1.7287134320510633, "learning_rate": 3.937405546748716e-06, "loss": 0.8697, "step": 5406 }, { "epoch": 0.58, "grad_norm": 1.721140395896254, "learning_rate": 3.935704309319583e-06, "loss": 0.9731, "step": 5407 }, { "epoch": 0.58, "grad_norm": 1.8536309978034724, "learning_rate": 3.934003200939969e-06, "loss": 0.9147, "step": 5408 }, { "epoch": 0.58, "grad_norm": 0.8405020214449676, "learning_rate": 3.932302221816144e-06, "loss": 1.0494, "step": 5409 }, { "epoch": 0.58, "grad_norm": 0.8269520425725289, "learning_rate": 3.930601372154355e-06, "loss": 0.9956, "step": 5410 }, { "epoch": 0.58, "grad_norm": 1.8563436517405723, "learning_rate": 3.928900652160837e-06, "loss": 0.9047, "step": 5411 }, { "epoch": 0.58, "grad_norm": 1.6947305189128332, "learning_rate": 3.927200062041808e-06, "loss": 0.9007, "step": 5412 }, { "epoch": 0.58, "grad_norm": 1.7031677346707028, "learning_rate": 3.925499602003468e-06, "loss": 0.9535, "step": 5413 }, { "epoch": 0.58, "grad_norm": 1.6451226280581113, "learning_rate": 3.923799272252007e-06, "loss": 0.8528, "step": 5414 }, { "epoch": 0.58, "grad_norm": 1.7944068765305448, "learning_rate": 3.9220990729935935e-06, "loss": 0.9218, "step": 5415 }, { "epoch": 0.58, "grad_norm": 1.804814683069681, "learning_rate": 3.920399004434383e-06, "loss": 0.896, "step": 5416 }, { "epoch": 0.58, "grad_norm": 1.6394907965895702, "learning_rate": 3.918699066780517e-06, "loss": 0.8655, "step": 5417 }, { "epoch": 0.58, "grad_norm": 1.750073308375723, "learning_rate": 3.916999260238116e-06, "loss": 0.8901, "step": 5418 }, { "epoch": 0.58, "grad_norm": 1.7459733081088256, "learning_rate": 3.915299585013288e-06, "loss": 0.9197, "step": 5419 }, { "epoch": 0.58, "grad_norm": 1.8374589015824432, "learning_rate": 3.913600041312125e-06, "loss": 0.9443, "step": 5420 }, { "epoch": 0.58, "grad_norm": 1.726264835632272, "learning_rate": 3.911900629340703e-06, "loss": 0.8936, "step": 5421 }, { "epoch": 0.58, "grad_norm": 1.6633657824665615, "learning_rate": 3.910201349305082e-06, "loss": 0.8277, "step": 5422 }, { "epoch": 0.58, "grad_norm": 1.717638574976483, "learning_rate": 3.908502201411304e-06, "loss": 0.8968, "step": 5423 }, { "epoch": 0.58, "grad_norm": 0.8539644854705541, "learning_rate": 3.906803185865395e-06, "loss": 1.0592, "step": 5424 }, { "epoch": 0.58, "grad_norm": 1.7960764283126927, "learning_rate": 3.9051043028733696e-06, "loss": 0.8519, "step": 5425 }, { "epoch": 0.58, "grad_norm": 0.8158328044882288, "learning_rate": 3.903405552641222e-06, "loss": 1.051, "step": 5426 }, { "epoch": 0.58, "grad_norm": 1.7676458568350797, "learning_rate": 3.901706935374933e-06, "loss": 0.8988, "step": 5427 }, { "epoch": 0.58, "grad_norm": 1.796331161895292, "learning_rate": 3.900008451280462e-06, "loss": 0.8494, "step": 5428 }, { "epoch": 0.58, "grad_norm": 1.797479930601746, "learning_rate": 3.89831010056376e-06, "loss": 0.8484, "step": 5429 }, { "epoch": 0.58, "grad_norm": 1.6498646801821606, "learning_rate": 3.896611883430756e-06, "loss": 0.9388, "step": 5430 }, { "epoch": 0.58, "grad_norm": 1.7101636998447212, "learning_rate": 3.894913800087365e-06, "loss": 0.9332, "step": 5431 }, { "epoch": 0.58, "grad_norm": 1.8819357275059887, "learning_rate": 3.8932158507394885e-06, "loss": 0.9042, "step": 5432 }, { "epoch": 0.58, "grad_norm": 1.6483786301298522, "learning_rate": 3.891518035593003e-06, "loss": 0.9179, "step": 5433 }, { "epoch": 0.58, "grad_norm": 1.7421259523552843, "learning_rate": 3.889820354853777e-06, "loss": 0.8305, "step": 5434 }, { "epoch": 0.58, "grad_norm": 1.7466925982565225, "learning_rate": 3.888122808727661e-06, "loss": 0.8568, "step": 5435 }, { "epoch": 0.58, "grad_norm": 1.6957028071264761, "learning_rate": 3.886425397420487e-06, "loss": 0.8648, "step": 5436 }, { "epoch": 0.58, "grad_norm": 1.7117086125125631, "learning_rate": 3.884728121138073e-06, "loss": 0.8752, "step": 5437 }, { "epoch": 0.58, "grad_norm": 1.8003968197385485, "learning_rate": 3.8830309800862196e-06, "loss": 0.9021, "step": 5438 }, { "epoch": 0.58, "grad_norm": 1.756984990195337, "learning_rate": 3.88133397447071e-06, "loss": 0.8687, "step": 5439 }, { "epoch": 0.58, "grad_norm": 1.7400245643971342, "learning_rate": 3.879637104497313e-06, "loss": 0.9994, "step": 5440 }, { "epoch": 0.58, "grad_norm": 1.7150744571696062, "learning_rate": 3.87794037037178e-06, "loss": 0.9498, "step": 5441 }, { "epoch": 0.59, "grad_norm": 1.6773725019316044, "learning_rate": 3.876243772299844e-06, "loss": 0.8855, "step": 5442 }, { "epoch": 0.59, "grad_norm": 1.691879758081048, "learning_rate": 3.8745473104872276e-06, "loss": 0.8908, "step": 5443 }, { "epoch": 0.59, "grad_norm": 1.743186355602486, "learning_rate": 3.872850985139628e-06, "loss": 0.9473, "step": 5444 }, { "epoch": 0.59, "grad_norm": 1.686695104766522, "learning_rate": 3.871154796462732e-06, "loss": 0.8564, "step": 5445 }, { "epoch": 0.59, "grad_norm": 1.7376438567262453, "learning_rate": 3.86945874466221e-06, "loss": 0.8553, "step": 5446 }, { "epoch": 0.59, "grad_norm": 1.803152040787705, "learning_rate": 3.86776282994371e-06, "loss": 0.9532, "step": 5447 }, { "epoch": 0.59, "grad_norm": 1.756656787083854, "learning_rate": 3.866067052512872e-06, "loss": 0.9316, "step": 5448 }, { "epoch": 0.59, "grad_norm": 1.8172871254978917, "learning_rate": 3.864371412575314e-06, "loss": 0.8864, "step": 5449 }, { "epoch": 0.59, "grad_norm": 1.6911606235084264, "learning_rate": 3.862675910336637e-06, "loss": 0.8945, "step": 5450 }, { "epoch": 0.59, "grad_norm": 1.7366971283236292, "learning_rate": 3.860980546002425e-06, "loss": 0.8927, "step": 5451 }, { "epoch": 0.59, "grad_norm": 1.7013086058548021, "learning_rate": 3.859285319778251e-06, "loss": 0.8436, "step": 5452 }, { "epoch": 0.59, "grad_norm": 1.609641865517127, "learning_rate": 3.857590231869665e-06, "loss": 0.8377, "step": 5453 }, { "epoch": 0.59, "grad_norm": 1.7931817207887, "learning_rate": 3.8558952824822e-06, "loss": 0.9228, "step": 5454 }, { "epoch": 0.59, "grad_norm": 1.7430235735648119, "learning_rate": 3.854200471821376e-06, "loss": 0.8946, "step": 5455 }, { "epoch": 0.59, "grad_norm": 1.7813790399090625, "learning_rate": 3.852505800092696e-06, "loss": 0.9269, "step": 5456 }, { "epoch": 0.59, "grad_norm": 1.7354176585258403, "learning_rate": 3.850811267501643e-06, "loss": 0.8851, "step": 5457 }, { "epoch": 0.59, "grad_norm": 1.7898293987829987, "learning_rate": 3.8491168742536865e-06, "loss": 0.8902, "step": 5458 }, { "epoch": 0.59, "grad_norm": 1.7430770786755125, "learning_rate": 3.847422620554276e-06, "loss": 0.865, "step": 5459 }, { "epoch": 0.59, "grad_norm": 1.7290401468512222, "learning_rate": 3.845728506608847e-06, "loss": 0.9232, "step": 5460 }, { "epoch": 0.59, "grad_norm": 1.6740039054225269, "learning_rate": 3.844034532622816e-06, "loss": 0.8693, "step": 5461 }, { "epoch": 0.59, "grad_norm": 1.7989609740296304, "learning_rate": 3.842340698801581e-06, "loss": 0.9529, "step": 5462 }, { "epoch": 0.59, "grad_norm": 1.6472626924425813, "learning_rate": 3.84064700535053e-06, "loss": 0.8408, "step": 5463 }, { "epoch": 0.59, "grad_norm": 1.763357469811722, "learning_rate": 3.838953452475026e-06, "loss": 0.8912, "step": 5464 }, { "epoch": 0.59, "grad_norm": 1.7773832782659156, "learning_rate": 3.837260040380419e-06, "loss": 0.8775, "step": 5465 }, { "epoch": 0.59, "grad_norm": 1.6866741221911763, "learning_rate": 3.83556676927204e-06, "loss": 0.8425, "step": 5466 }, { "epoch": 0.59, "grad_norm": 1.7696767876568005, "learning_rate": 3.833873639355204e-06, "loss": 0.865, "step": 5467 }, { "epoch": 0.59, "grad_norm": 1.723225353754857, "learning_rate": 3.83218065083521e-06, "loss": 0.905, "step": 5468 }, { "epoch": 0.59, "grad_norm": 1.72041010012403, "learning_rate": 3.830487803917338e-06, "loss": 0.7877, "step": 5469 }, { "epoch": 0.59, "grad_norm": 1.7185076581387875, "learning_rate": 3.828795098806851e-06, "loss": 0.8756, "step": 5470 }, { "epoch": 0.59, "grad_norm": 1.7458230221506337, "learning_rate": 3.827102535708998e-06, "loss": 0.8937, "step": 5471 }, { "epoch": 0.59, "grad_norm": 1.7300387753129531, "learning_rate": 3.825410114829005e-06, "loss": 0.92, "step": 5472 }, { "epoch": 0.59, "grad_norm": 0.8462507511677857, "learning_rate": 3.823717836372084e-06, "loss": 1.0439, "step": 5473 }, { "epoch": 0.59, "grad_norm": 1.7361929077376925, "learning_rate": 3.822025700543431e-06, "loss": 0.9178, "step": 5474 }, { "epoch": 0.59, "grad_norm": 1.6459593273342064, "learning_rate": 3.820333707548225e-06, "loss": 0.9242, "step": 5475 }, { "epoch": 0.59, "grad_norm": 1.7069191863913327, "learning_rate": 3.818641857591623e-06, "loss": 0.932, "step": 5476 }, { "epoch": 0.59, "grad_norm": 1.7296906975506454, "learning_rate": 3.8169501508787686e-06, "loss": 0.8599, "step": 5477 }, { "epoch": 0.59, "grad_norm": 1.6021456098522204, "learning_rate": 3.815258587614785e-06, "loss": 0.8898, "step": 5478 }, { "epoch": 0.59, "grad_norm": 1.686541510676499, "learning_rate": 3.8135671680047837e-06, "loss": 0.8729, "step": 5479 }, { "epoch": 0.59, "grad_norm": 1.6598196541498524, "learning_rate": 3.8118758922538533e-06, "loss": 0.8557, "step": 5480 }, { "epoch": 0.59, "grad_norm": 1.7353164560017316, "learning_rate": 3.8101847605670663e-06, "loss": 0.9466, "step": 5481 }, { "epoch": 0.59, "grad_norm": 1.9020871229345766, "learning_rate": 3.80849377314948e-06, "loss": 1.0161, "step": 5482 }, { "epoch": 0.59, "grad_norm": 1.661237256577535, "learning_rate": 3.806802930206131e-06, "loss": 0.9389, "step": 5483 }, { "epoch": 0.59, "grad_norm": 1.7926240697092408, "learning_rate": 3.8051122319420406e-06, "loss": 0.8601, "step": 5484 }, { "epoch": 0.59, "grad_norm": 1.6725787600985047, "learning_rate": 3.803421678562213e-06, "loss": 0.8131, "step": 5485 }, { "epoch": 0.59, "grad_norm": 1.7037017640939962, "learning_rate": 3.801731270271629e-06, "loss": 0.8705, "step": 5486 }, { "epoch": 0.59, "grad_norm": 1.733240109372076, "learning_rate": 3.8000410072752614e-06, "loss": 0.9752, "step": 5487 }, { "epoch": 0.59, "grad_norm": 1.6740980854060834, "learning_rate": 3.7983508897780586e-06, "loss": 0.9261, "step": 5488 }, { "epoch": 0.59, "grad_norm": 1.6828176414591034, "learning_rate": 3.7966609179849528e-06, "loss": 0.8448, "step": 5489 }, { "epoch": 0.59, "grad_norm": 1.7853829025214263, "learning_rate": 3.794971092100858e-06, "loss": 0.8776, "step": 5490 }, { "epoch": 0.59, "grad_norm": 1.7461555151667107, "learning_rate": 3.7932814123306737e-06, "loss": 0.9061, "step": 5491 }, { "epoch": 0.59, "grad_norm": 1.7014574257530117, "learning_rate": 3.7915918788792793e-06, "loss": 0.8533, "step": 5492 }, { "epoch": 0.59, "grad_norm": 0.8514063147977666, "learning_rate": 3.7899024919515337e-06, "loss": 1.0698, "step": 5493 }, { "epoch": 0.59, "grad_norm": 1.654335551874766, "learning_rate": 3.788213251752284e-06, "loss": 0.8629, "step": 5494 }, { "epoch": 0.59, "grad_norm": 1.786657676875161, "learning_rate": 3.7865241584863547e-06, "loss": 0.9714, "step": 5495 }, { "epoch": 0.59, "grad_norm": 1.6828499502069274, "learning_rate": 3.7848352123585562e-06, "loss": 0.9156, "step": 5496 }, { "epoch": 0.59, "grad_norm": 1.7540698700250033, "learning_rate": 3.7831464135736767e-06, "loss": 0.8709, "step": 5497 }, { "epoch": 0.59, "grad_norm": 1.7795811022332515, "learning_rate": 3.7814577623364885e-06, "loss": 0.9121, "step": 5498 }, { "epoch": 0.59, "grad_norm": 1.7274507817122284, "learning_rate": 3.7797692588517487e-06, "loss": 0.9814, "step": 5499 }, { "epoch": 0.59, "grad_norm": 1.7486240768528967, "learning_rate": 3.7780809033241927e-06, "loss": 0.8676, "step": 5500 }, { "epoch": 0.59, "grad_norm": 1.5966822051917842, "learning_rate": 3.776392695958539e-06, "loss": 0.8885, "step": 5501 }, { "epoch": 0.59, "grad_norm": 1.80495886006849, "learning_rate": 3.7747046369594905e-06, "loss": 0.9237, "step": 5502 }, { "epoch": 0.59, "grad_norm": 1.6213563534859778, "learning_rate": 3.7730167265317288e-06, "loss": 0.8893, "step": 5503 }, { "epoch": 0.59, "grad_norm": 1.6999256338477609, "learning_rate": 3.771328964879918e-06, "loss": 0.8059, "step": 5504 }, { "epoch": 0.59, "grad_norm": 0.8298462868081797, "learning_rate": 3.7696413522087068e-06, "loss": 1.0512, "step": 5505 }, { "epoch": 0.59, "grad_norm": 1.6460561723246052, "learning_rate": 3.7679538887227247e-06, "loss": 0.886, "step": 5506 }, { "epoch": 0.59, "grad_norm": 1.8052042822676628, "learning_rate": 3.7662665746265803e-06, "loss": 0.9229, "step": 5507 }, { "epoch": 0.59, "grad_norm": 1.7707642311885385, "learning_rate": 3.7645794101248666e-06, "loss": 0.9157, "step": 5508 }, { "epoch": 0.59, "grad_norm": 1.700475976841979, "learning_rate": 3.7628923954221575e-06, "loss": 0.8968, "step": 5509 }, { "epoch": 0.59, "grad_norm": 1.7952408126455726, "learning_rate": 3.7612055307230117e-06, "loss": 0.8726, "step": 5510 }, { "epoch": 0.59, "grad_norm": 1.8081859739198791, "learning_rate": 3.759518816231966e-06, "loss": 0.8969, "step": 5511 }, { "epoch": 0.59, "grad_norm": 1.7701824496304486, "learning_rate": 3.7578322521535395e-06, "loss": 0.9249, "step": 5512 }, { "epoch": 0.59, "grad_norm": 1.8076081419365964, "learning_rate": 3.7561458386922355e-06, "loss": 0.8981, "step": 5513 }, { "epoch": 0.59, "grad_norm": 1.7708094719287937, "learning_rate": 3.754459576052537e-06, "loss": 0.8501, "step": 5514 }, { "epoch": 0.59, "grad_norm": 1.6887222515511375, "learning_rate": 3.752773464438909e-06, "loss": 0.9305, "step": 5515 }, { "epoch": 0.59, "grad_norm": 1.6817858462321236, "learning_rate": 3.7510875040557974e-06, "loss": 0.8541, "step": 5516 }, { "epoch": 0.59, "grad_norm": 1.7238824278525104, "learning_rate": 3.749401695107634e-06, "loss": 0.8835, "step": 5517 }, { "epoch": 0.59, "grad_norm": 0.8473853542106831, "learning_rate": 3.7477160377988247e-06, "loss": 1.0516, "step": 5518 }, { "epoch": 0.59, "grad_norm": 1.7850614146026114, "learning_rate": 3.746030532333763e-06, "loss": 0.8477, "step": 5519 }, { "epoch": 0.59, "grad_norm": 1.7617801947645448, "learning_rate": 3.7443451789168235e-06, "loss": 0.9219, "step": 5520 }, { "epoch": 0.59, "grad_norm": 1.7465378366580957, "learning_rate": 3.7426599777523577e-06, "loss": 0.9084, "step": 5521 }, { "epoch": 0.59, "grad_norm": 1.6941672962295955, "learning_rate": 3.7409749290447062e-06, "loss": 0.8359, "step": 5522 }, { "epoch": 0.59, "grad_norm": 1.7252952322528343, "learning_rate": 3.7392900329981845e-06, "loss": 0.8662, "step": 5523 }, { "epoch": 0.59, "grad_norm": 1.9807967411887528, "learning_rate": 3.7376052898170916e-06, "loss": 0.9074, "step": 5524 }, { "epoch": 0.59, "grad_norm": 1.6577665830496298, "learning_rate": 3.735920699705711e-06, "loss": 0.8602, "step": 5525 }, { "epoch": 0.59, "grad_norm": 1.814942787120635, "learning_rate": 3.734236262868303e-06, "loss": 0.9772, "step": 5526 }, { "epoch": 0.59, "grad_norm": 1.6371620702206282, "learning_rate": 3.732551979509113e-06, "loss": 0.8678, "step": 5527 }, { "epoch": 0.59, "grad_norm": 1.7816612387157116, "learning_rate": 3.7308678498323648e-06, "loss": 0.8986, "step": 5528 }, { "epoch": 0.59, "grad_norm": 1.6849919158264808, "learning_rate": 3.7291838740422627e-06, "loss": 0.9234, "step": 5529 }, { "epoch": 0.59, "grad_norm": 0.8378444394254156, "learning_rate": 3.7275000523429993e-06, "loss": 1.0636, "step": 5530 }, { "epoch": 0.59, "grad_norm": 1.7395234485226971, "learning_rate": 3.725816384938741e-06, "loss": 0.9207, "step": 5531 }, { "epoch": 0.59, "grad_norm": 1.6559729038878623, "learning_rate": 3.7241328720336377e-06, "loss": 0.8375, "step": 5532 }, { "epoch": 0.59, "grad_norm": 1.6969503654704081, "learning_rate": 3.722449513831823e-06, "loss": 0.8953, "step": 5533 }, { "epoch": 0.59, "grad_norm": 1.8623147189327178, "learning_rate": 3.7207663105374098e-06, "loss": 0.8894, "step": 5534 }, { "epoch": 0.6, "grad_norm": 1.824315878473531, "learning_rate": 3.7190832623544902e-06, "loss": 0.889, "step": 5535 }, { "epoch": 0.6, "grad_norm": 1.7786135998824093, "learning_rate": 3.717400369487142e-06, "loss": 0.9342, "step": 5536 }, { "epoch": 0.6, "grad_norm": 0.8191369854770012, "learning_rate": 3.715717632139421e-06, "loss": 1.0468, "step": 5537 }, { "epoch": 0.6, "grad_norm": 0.8434284793520319, "learning_rate": 3.714035050515366e-06, "loss": 1.0813, "step": 5538 }, { "epoch": 0.6, "grad_norm": 0.8337268659602917, "learning_rate": 3.7123526248189935e-06, "loss": 1.0859, "step": 5539 }, { "epoch": 0.6, "grad_norm": 1.85344794231511, "learning_rate": 3.7106703552543026e-06, "loss": 0.9078, "step": 5540 }, { "epoch": 0.6, "grad_norm": 1.8296879315320331, "learning_rate": 3.7089882420252775e-06, "loss": 0.8333, "step": 5541 }, { "epoch": 0.6, "grad_norm": 1.8093035167059648, "learning_rate": 3.7073062853358794e-06, "loss": 0.9443, "step": 5542 }, { "epoch": 0.6, "grad_norm": 1.7924929683229873, "learning_rate": 3.7056244853900493e-06, "loss": 0.8867, "step": 5543 }, { "epoch": 0.6, "grad_norm": 1.6732644454675774, "learning_rate": 3.7039428423917138e-06, "loss": 0.9382, "step": 5544 }, { "epoch": 0.6, "grad_norm": 1.6566454971099709, "learning_rate": 3.7022613565447774e-06, "loss": 0.8017, "step": 5545 }, { "epoch": 0.6, "grad_norm": 1.6955677673990874, "learning_rate": 3.7005800280531244e-06, "loss": 0.8959, "step": 5546 }, { "epoch": 0.6, "grad_norm": 1.7453767405157152, "learning_rate": 3.6988988571206224e-06, "loss": 0.896, "step": 5547 }, { "epoch": 0.6, "grad_norm": 0.8263251412075266, "learning_rate": 3.697217843951121e-06, "loss": 1.0443, "step": 5548 }, { "epoch": 0.6, "grad_norm": 1.7947877778623598, "learning_rate": 3.6955369887484476e-06, "loss": 0.9294, "step": 5549 }, { "epoch": 0.6, "grad_norm": 1.6611706798070793, "learning_rate": 3.693856291716411e-06, "loss": 0.8967, "step": 5550 }, { "epoch": 0.6, "grad_norm": 1.7536699755198983, "learning_rate": 3.692175753058802e-06, "loss": 0.9205, "step": 5551 }, { "epoch": 0.6, "grad_norm": 1.7144336632932475, "learning_rate": 3.69049537297939e-06, "loss": 0.842, "step": 5552 }, { "epoch": 0.6, "grad_norm": 1.6686949893356477, "learning_rate": 3.6888151516819303e-06, "loss": 1.0578, "step": 5553 }, { "epoch": 0.6, "grad_norm": 1.8039866198202736, "learning_rate": 3.6871350893701533e-06, "loss": 0.9516, "step": 5554 }, { "epoch": 0.6, "grad_norm": 1.7297261461546214, "learning_rate": 3.6854551862477724e-06, "loss": 0.8363, "step": 5555 }, { "epoch": 0.6, "grad_norm": 1.6216737965885455, "learning_rate": 3.6837754425184823e-06, "loss": 0.9557, "step": 5556 }, { "epoch": 0.6, "grad_norm": 1.7194931369470228, "learning_rate": 3.6820958583859585e-06, "loss": 0.9312, "step": 5557 }, { "epoch": 0.6, "grad_norm": 1.8466179637176745, "learning_rate": 3.680416434053854e-06, "loss": 0.8714, "step": 5558 }, { "epoch": 0.6, "grad_norm": 2.358048567269164, "learning_rate": 3.678737169725809e-06, "loss": 0.8741, "step": 5559 }, { "epoch": 0.6, "grad_norm": 0.807373975030623, "learning_rate": 3.6770580656054343e-06, "loss": 1.0392, "step": 5560 }, { "epoch": 0.6, "grad_norm": 1.7940286785973945, "learning_rate": 3.6753791218963306e-06, "loss": 0.9257, "step": 5561 }, { "epoch": 0.6, "grad_norm": 1.6461386631210442, "learning_rate": 3.673700338802076e-06, "loss": 0.8772, "step": 5562 }, { "epoch": 0.6, "grad_norm": 1.8128366282386044, "learning_rate": 3.672021716526226e-06, "loss": 0.9334, "step": 5563 }, { "epoch": 0.6, "grad_norm": 1.6670290088021622, "learning_rate": 3.670343255272322e-06, "loss": 0.8808, "step": 5564 }, { "epoch": 0.6, "grad_norm": 0.8422077333538174, "learning_rate": 3.668664955243883e-06, "loss": 1.0345, "step": 5565 }, { "epoch": 0.6, "grad_norm": 1.7020559194372507, "learning_rate": 3.666986816644407e-06, "loss": 0.8807, "step": 5566 }, { "epoch": 0.6, "grad_norm": 1.7722858973293232, "learning_rate": 3.665308839677375e-06, "loss": 0.8448, "step": 5567 }, { "epoch": 0.6, "grad_norm": 1.6751062232187195, "learning_rate": 3.663631024546249e-06, "loss": 0.8203, "step": 5568 }, { "epoch": 0.6, "grad_norm": 1.8437965099377183, "learning_rate": 3.6619533714544664e-06, "loss": 0.9149, "step": 5569 }, { "epoch": 0.6, "grad_norm": 1.9265827081628075, "learning_rate": 3.6602758806054537e-06, "loss": 0.9376, "step": 5570 }, { "epoch": 0.6, "grad_norm": 0.8125044001298872, "learning_rate": 3.6585985522026067e-06, "loss": 1.0411, "step": 5571 }, { "epoch": 0.6, "grad_norm": 1.809003710362506, "learning_rate": 3.6569213864493103e-06, "loss": 0.9056, "step": 5572 }, { "epoch": 0.6, "grad_norm": 1.7366758178844799, "learning_rate": 3.655244383548926e-06, "loss": 0.8897, "step": 5573 }, { "epoch": 0.6, "grad_norm": 0.8422687025962967, "learning_rate": 3.6535675437047955e-06, "loss": 1.0362, "step": 5574 }, { "epoch": 0.6, "grad_norm": 1.8131825489629856, "learning_rate": 3.651890867120243e-06, "loss": 0.888, "step": 5575 }, { "epoch": 0.6, "grad_norm": 1.6846898702464828, "learning_rate": 3.6502143539985706e-06, "loss": 0.9992, "step": 5576 }, { "epoch": 0.6, "grad_norm": 1.7520949171665252, "learning_rate": 3.64853800454306e-06, "loss": 0.9715, "step": 5577 }, { "epoch": 0.6, "grad_norm": 1.6527210380942445, "learning_rate": 3.646861818956977e-06, "loss": 0.9576, "step": 5578 }, { "epoch": 0.6, "grad_norm": 1.889509295455256, "learning_rate": 3.6451857974435635e-06, "loss": 0.9303, "step": 5579 }, { "epoch": 0.6, "grad_norm": 0.8204522035795471, "learning_rate": 3.6435099402060437e-06, "loss": 1.0416, "step": 5580 }, { "epoch": 0.6, "grad_norm": 0.8544432294865565, "learning_rate": 3.6418342474476186e-06, "loss": 1.0411, "step": 5581 }, { "epoch": 0.6, "grad_norm": 1.858222558500551, "learning_rate": 3.640158719371473e-06, "loss": 0.9392, "step": 5582 }, { "epoch": 0.6, "grad_norm": 0.8077654630402038, "learning_rate": 3.638483356180771e-06, "loss": 1.0232, "step": 5583 }, { "epoch": 0.6, "grad_norm": 1.7065771527257185, "learning_rate": 3.636808158078656e-06, "loss": 0.9575, "step": 5584 }, { "epoch": 0.6, "grad_norm": 1.602905210611717, "learning_rate": 3.635133125268252e-06, "loss": 0.7753, "step": 5585 }, { "epoch": 0.6, "grad_norm": 0.8023345408645265, "learning_rate": 3.633458257952661e-06, "loss": 1.0294, "step": 5586 }, { "epoch": 0.6, "grad_norm": 2.0471990877139636, "learning_rate": 3.631783556334968e-06, "loss": 0.8164, "step": 5587 }, { "epoch": 0.6, "grad_norm": 1.6967693393904264, "learning_rate": 3.6301090206182365e-06, "loss": 0.8765, "step": 5588 }, { "epoch": 0.6, "grad_norm": 1.7100124040188183, "learning_rate": 3.628434651005507e-06, "loss": 0.9467, "step": 5589 }, { "epoch": 0.6, "grad_norm": 1.7987208693960448, "learning_rate": 3.6267604476998063e-06, "loss": 0.8698, "step": 5590 }, { "epoch": 0.6, "grad_norm": 1.850664274180435, "learning_rate": 3.625086410904136e-06, "loss": 0.9044, "step": 5591 }, { "epoch": 0.6, "grad_norm": 1.702221572553094, "learning_rate": 3.623412540821478e-06, "loss": 0.8585, "step": 5592 }, { "epoch": 0.6, "grad_norm": 1.6014603960380234, "learning_rate": 3.621738837654795e-06, "loss": 0.8303, "step": 5593 }, { "epoch": 0.6, "grad_norm": 1.6383275631227547, "learning_rate": 3.620065301607029e-06, "loss": 0.9033, "step": 5594 }, { "epoch": 0.6, "grad_norm": 1.6856015433999665, "learning_rate": 3.618391932881102e-06, "loss": 0.9159, "step": 5595 }, { "epoch": 0.6, "grad_norm": 0.8184283673595089, "learning_rate": 3.616718731679918e-06, "loss": 1.0658, "step": 5596 }, { "epoch": 0.6, "grad_norm": 1.6326251370703713, "learning_rate": 3.6150456982063555e-06, "loss": 0.8641, "step": 5597 }, { "epoch": 0.6, "grad_norm": 1.7077566976961511, "learning_rate": 3.6133728326632765e-06, "loss": 0.8769, "step": 5598 }, { "epoch": 0.6, "grad_norm": 0.8102771075159586, "learning_rate": 3.611700135253523e-06, "loss": 1.0335, "step": 5599 }, { "epoch": 0.6, "grad_norm": 1.7561055856307544, "learning_rate": 3.6100276061799133e-06, "loss": 0.9163, "step": 5600 }, { "epoch": 0.6, "grad_norm": 1.7921173651756581, "learning_rate": 3.6083552456452487e-06, "loss": 0.9182, "step": 5601 }, { "epoch": 0.6, "grad_norm": 1.87667579763137, "learning_rate": 3.60668305385231e-06, "loss": 0.8589, "step": 5602 }, { "epoch": 0.6, "grad_norm": 1.8322814743914788, "learning_rate": 3.6050110310038533e-06, "loss": 0.9011, "step": 5603 }, { "epoch": 0.6, "grad_norm": 1.6058641836457963, "learning_rate": 3.603339177302618e-06, "loss": 0.8726, "step": 5604 }, { "epoch": 0.6, "grad_norm": 1.769992996334562, "learning_rate": 3.6016674929513218e-06, "loss": 0.9335, "step": 5605 }, { "epoch": 0.6, "grad_norm": 1.7324418397678591, "learning_rate": 3.599995978152664e-06, "loss": 0.8575, "step": 5606 }, { "epoch": 0.6, "grad_norm": 1.822640472887243, "learning_rate": 3.5983246331093196e-06, "loss": 0.9061, "step": 5607 }, { "epoch": 0.6, "grad_norm": 0.8600725223371645, "learning_rate": 3.5966534580239454e-06, "loss": 1.0364, "step": 5608 }, { "epoch": 0.6, "grad_norm": 1.7365507643690683, "learning_rate": 3.5949824530991783e-06, "loss": 0.8715, "step": 5609 }, { "epoch": 0.6, "grad_norm": 1.7477268774541044, "learning_rate": 3.5933116185376325e-06, "loss": 0.9084, "step": 5610 }, { "epoch": 0.6, "grad_norm": 1.696317442501295, "learning_rate": 3.591640954541903e-06, "loss": 0.9295, "step": 5611 }, { "epoch": 0.6, "grad_norm": 1.891740035280818, "learning_rate": 3.5899704613145637e-06, "loss": 0.8756, "step": 5612 }, { "epoch": 0.6, "grad_norm": 1.729968150102001, "learning_rate": 3.588300139058165e-06, "loss": 0.8106, "step": 5613 }, { "epoch": 0.6, "grad_norm": 1.7773379505569598, "learning_rate": 3.586629987975243e-06, "loss": 0.8996, "step": 5614 }, { "epoch": 0.6, "grad_norm": 1.7604489080700891, "learning_rate": 3.5849600082683068e-06, "loss": 0.8485, "step": 5615 }, { "epoch": 0.6, "grad_norm": 0.8666187768677037, "learning_rate": 3.5832902001398486e-06, "loss": 1.0497, "step": 5616 }, { "epoch": 0.6, "grad_norm": 1.737951671921557, "learning_rate": 3.581620563792336e-06, "loss": 0.8886, "step": 5617 }, { "epoch": 0.6, "grad_norm": 1.7730476964235946, "learning_rate": 3.5799510994282215e-06, "loss": 0.9036, "step": 5618 }, { "epoch": 0.6, "grad_norm": 0.7986216097863812, "learning_rate": 3.5782818072499313e-06, "loss": 1.0632, "step": 5619 }, { "epoch": 0.6, "grad_norm": 1.753989187941031, "learning_rate": 3.5766126874598734e-06, "loss": 0.8624, "step": 5620 }, { "epoch": 0.6, "grad_norm": 1.6413928296415943, "learning_rate": 3.574943740260435e-06, "loss": 0.9417, "step": 5621 }, { "epoch": 0.6, "grad_norm": 0.810872734070383, "learning_rate": 3.57327496585398e-06, "loss": 1.0286, "step": 5622 }, { "epoch": 0.6, "grad_norm": 1.771685237740851, "learning_rate": 3.571606364442857e-06, "loss": 0.9652, "step": 5623 }, { "epoch": 0.6, "grad_norm": 1.69409231725767, "learning_rate": 3.569937936229384e-06, "loss": 0.8889, "step": 5624 }, { "epoch": 0.6, "grad_norm": 1.8137739382316567, "learning_rate": 3.568269681415866e-06, "loss": 0.9252, "step": 5625 }, { "epoch": 0.6, "grad_norm": 1.6934537123820026, "learning_rate": 3.5666016002045857e-06, "loss": 0.8559, "step": 5626 }, { "epoch": 0.6, "grad_norm": 1.69622061966645, "learning_rate": 3.5649336927978036e-06, "loss": 0.8786, "step": 5627 }, { "epoch": 0.61, "grad_norm": 1.7769454870897883, "learning_rate": 3.563265959397757e-06, "loss": 0.9224, "step": 5628 }, { "epoch": 0.61, "grad_norm": 1.6697460944034928, "learning_rate": 3.561598400206667e-06, "loss": 0.8459, "step": 5629 }, { "epoch": 0.61, "grad_norm": 1.7976141428353352, "learning_rate": 3.559931015426731e-06, "loss": 0.8522, "step": 5630 }, { "epoch": 0.61, "grad_norm": 1.7936161488417472, "learning_rate": 3.5582638052601227e-06, "loss": 0.9381, "step": 5631 }, { "epoch": 0.61, "grad_norm": 1.601610348015723, "learning_rate": 3.556596769908999e-06, "loss": 0.9252, "step": 5632 }, { "epoch": 0.61, "grad_norm": 1.633213386122276, "learning_rate": 3.554929909575494e-06, "loss": 0.8121, "step": 5633 }, { "epoch": 0.61, "grad_norm": 1.658230050768033, "learning_rate": 3.5532632244617184e-06, "loss": 0.9114, "step": 5634 }, { "epoch": 0.61, "grad_norm": 1.7586741954537217, "learning_rate": 3.551596714769765e-06, "loss": 0.814, "step": 5635 }, { "epoch": 0.61, "grad_norm": 1.7818636521949829, "learning_rate": 3.5499303807017018e-06, "loss": 0.8759, "step": 5636 }, { "epoch": 0.61, "grad_norm": 1.779964203245575, "learning_rate": 3.54826422245958e-06, "loss": 0.8645, "step": 5637 }, { "epoch": 0.61, "grad_norm": 1.7685497562787866, "learning_rate": 3.546598240245427e-06, "loss": 0.8949, "step": 5638 }, { "epoch": 0.61, "grad_norm": 1.7985707910537034, "learning_rate": 3.5449324342612464e-06, "loss": 0.7795, "step": 5639 }, { "epoch": 0.61, "grad_norm": 1.858739349362007, "learning_rate": 3.5432668047090258e-06, "loss": 0.9002, "step": 5640 }, { "epoch": 0.61, "grad_norm": 1.6748499532546481, "learning_rate": 3.5416013517907265e-06, "loss": 0.8971, "step": 5641 }, { "epoch": 0.61, "grad_norm": 1.7903718507677502, "learning_rate": 3.5399360757082918e-06, "loss": 0.8844, "step": 5642 }, { "epoch": 0.61, "grad_norm": 0.8907651267994066, "learning_rate": 3.5382709766636404e-06, "loss": 1.0004, "step": 5643 }, { "epoch": 0.61, "grad_norm": 1.7316705781084434, "learning_rate": 3.5366060548586745e-06, "loss": 0.8748, "step": 5644 }, { "epoch": 0.61, "grad_norm": 1.7529696579026233, "learning_rate": 3.5349413104952686e-06, "loss": 0.8693, "step": 5645 }, { "epoch": 0.61, "grad_norm": 1.7223377312626231, "learning_rate": 3.5332767437752792e-06, "loss": 0.8427, "step": 5646 }, { "epoch": 0.61, "grad_norm": 1.8367606486825332, "learning_rate": 3.531612354900542e-06, "loss": 0.87, "step": 5647 }, { "epoch": 0.61, "grad_norm": 1.7233465693958416, "learning_rate": 3.5299481440728673e-06, "loss": 0.9799, "step": 5648 }, { "epoch": 0.61, "grad_norm": 0.8272221517110389, "learning_rate": 3.528284111494049e-06, "loss": 1.044, "step": 5649 }, { "epoch": 0.61, "grad_norm": 1.7710311878509273, "learning_rate": 3.526620257365857e-06, "loss": 0.9254, "step": 5650 }, { "epoch": 0.61, "grad_norm": 0.8145597223891708, "learning_rate": 3.5249565818900365e-06, "loss": 1.0627, "step": 5651 }, { "epoch": 0.61, "grad_norm": 1.9221674932841766, "learning_rate": 3.5232930852683168e-06, "loss": 0.8769, "step": 5652 }, { "epoch": 0.61, "grad_norm": 1.634025118892234, "learning_rate": 3.521629767702401e-06, "loss": 0.9536, "step": 5653 }, { "epoch": 0.61, "grad_norm": 1.6873157883503735, "learning_rate": 3.5199666293939725e-06, "loss": 0.8699, "step": 5654 }, { "epoch": 0.61, "grad_norm": 1.8680320553975331, "learning_rate": 3.518303670544696e-06, "loss": 0.9231, "step": 5655 }, { "epoch": 0.61, "grad_norm": 0.8092927869223665, "learning_rate": 3.5166408913562033e-06, "loss": 1.0694, "step": 5656 }, { "epoch": 0.61, "grad_norm": 0.8384536799090528, "learning_rate": 3.5149782920301178e-06, "loss": 1.0536, "step": 5657 }, { "epoch": 0.61, "grad_norm": 1.6646442299263833, "learning_rate": 3.513315872768035e-06, "loss": 0.894, "step": 5658 }, { "epoch": 0.61, "grad_norm": 1.6578216398771763, "learning_rate": 3.5116536337715255e-06, "loss": 0.917, "step": 5659 }, { "epoch": 0.61, "grad_norm": 1.7214383106650173, "learning_rate": 3.5099915752421467e-06, "loss": 0.8678, "step": 5660 }, { "epoch": 0.61, "grad_norm": 1.7684468400330795, "learning_rate": 3.5083296973814253e-06, "loss": 0.8944, "step": 5661 }, { "epoch": 0.61, "grad_norm": 1.7835645607360697, "learning_rate": 3.5066680003908695e-06, "loss": 0.8845, "step": 5662 }, { "epoch": 0.61, "grad_norm": 1.7929621316165107, "learning_rate": 3.5050064844719677e-06, "loss": 0.871, "step": 5663 }, { "epoch": 0.61, "grad_norm": 1.6647261201456773, "learning_rate": 3.503345149826185e-06, "loss": 0.8678, "step": 5664 }, { "epoch": 0.61, "grad_norm": 1.8889024873909521, "learning_rate": 3.5016839966549626e-06, "loss": 0.9782, "step": 5665 }, { "epoch": 0.61, "grad_norm": 1.7755315105945544, "learning_rate": 3.50002302515972e-06, "loss": 0.9652, "step": 5666 }, { "epoch": 0.61, "grad_norm": 1.6269696659026092, "learning_rate": 3.498362235541856e-06, "loss": 0.8726, "step": 5667 }, { "epoch": 0.61, "grad_norm": 1.723174528423652, "learning_rate": 3.4967016280027494e-06, "loss": 0.8852, "step": 5668 }, { "epoch": 0.61, "grad_norm": 1.7534294896856653, "learning_rate": 3.4950412027437523e-06, "loss": 0.875, "step": 5669 }, { "epoch": 0.61, "grad_norm": 0.8394842374752671, "learning_rate": 3.4933809599661973e-06, "loss": 1.0248, "step": 5670 }, { "epoch": 0.61, "grad_norm": 0.8354183268277448, "learning_rate": 3.491720899871396e-06, "loss": 1.0445, "step": 5671 }, { "epoch": 0.61, "grad_norm": 1.725845834623862, "learning_rate": 3.4900610226606346e-06, "loss": 0.8397, "step": 5672 }, { "epoch": 0.61, "grad_norm": 1.7689199007674459, "learning_rate": 3.4884013285351796e-06, "loss": 0.8842, "step": 5673 }, { "epoch": 0.61, "grad_norm": 1.7047701766087395, "learning_rate": 3.4867418176962755e-06, "loss": 0.8868, "step": 5674 }, { "epoch": 0.61, "grad_norm": 1.7106233871873942, "learning_rate": 3.485082490345143e-06, "loss": 0.9266, "step": 5675 }, { "epoch": 0.61, "grad_norm": 1.8601754239861215, "learning_rate": 3.4834233466829827e-06, "loss": 0.9526, "step": 5676 }, { "epoch": 0.61, "grad_norm": 1.6917923766884189, "learning_rate": 3.4817643869109683e-06, "loss": 0.9166, "step": 5677 }, { "epoch": 0.61, "grad_norm": 1.7247029501181677, "learning_rate": 3.4801056112302554e-06, "loss": 0.9771, "step": 5678 }, { "epoch": 0.61, "grad_norm": 1.7376642320691873, "learning_rate": 3.4784470198419783e-06, "loss": 0.8287, "step": 5679 }, { "epoch": 0.61, "grad_norm": 1.6852546240687505, "learning_rate": 3.4767886129472455e-06, "loss": 0.9039, "step": 5680 }, { "epoch": 0.61, "grad_norm": 1.7082633040690438, "learning_rate": 3.4751303907471444e-06, "loss": 0.9118, "step": 5681 }, { "epoch": 0.61, "grad_norm": 1.7633879171618096, "learning_rate": 3.47347235344274e-06, "loss": 1.0027, "step": 5682 }, { "epoch": 0.61, "grad_norm": 1.9377356455620571, "learning_rate": 3.471814501235076e-06, "loss": 0.8162, "step": 5683 }, { "epoch": 0.61, "grad_norm": 1.626445069527124, "learning_rate": 3.4701568343251723e-06, "loss": 0.8798, "step": 5684 }, { "epoch": 0.61, "grad_norm": 1.7281177915022998, "learning_rate": 3.468499352914026e-06, "loss": 0.8811, "step": 5685 }, { "epoch": 0.61, "grad_norm": 1.8457188744192228, "learning_rate": 3.4668420572026158e-06, "loss": 0.867, "step": 5686 }, { "epoch": 0.61, "grad_norm": 1.7344652576529356, "learning_rate": 3.4651849473918887e-06, "loss": 0.8594, "step": 5687 }, { "epoch": 0.61, "grad_norm": 1.766703886054441, "learning_rate": 3.463528023682779e-06, "loss": 0.9157, "step": 5688 }, { "epoch": 0.61, "grad_norm": 1.6969499748763655, "learning_rate": 3.461871286276194e-06, "loss": 0.9073, "step": 5689 }, { "epoch": 0.61, "grad_norm": 1.7195402374126778, "learning_rate": 3.4602147353730165e-06, "loss": 0.9796, "step": 5690 }, { "epoch": 0.61, "grad_norm": 1.6957301264341562, "learning_rate": 3.4585583711741115e-06, "loss": 0.9358, "step": 5691 }, { "epoch": 0.61, "grad_norm": 1.6187384947830747, "learning_rate": 3.456902193880319e-06, "loss": 0.9426, "step": 5692 }, { "epoch": 0.61, "grad_norm": 1.634320839798652, "learning_rate": 3.455246203692454e-06, "loss": 0.8466, "step": 5693 }, { "epoch": 0.61, "grad_norm": 1.7978086798186974, "learning_rate": 3.453590400811313e-06, "loss": 0.924, "step": 5694 }, { "epoch": 0.61, "grad_norm": 1.7418443981846683, "learning_rate": 3.4519347854376677e-06, "loss": 0.8565, "step": 5695 }, { "epoch": 0.61, "grad_norm": 1.7197169201939806, "learning_rate": 3.450279357772266e-06, "loss": 0.9083, "step": 5696 }, { "epoch": 0.61, "grad_norm": 1.7474437701082768, "learning_rate": 3.4486241180158373e-06, "loss": 0.9903, "step": 5697 }, { "epoch": 0.61, "grad_norm": 1.635727934254354, "learning_rate": 3.44696906636908e-06, "loss": 0.9267, "step": 5698 }, { "epoch": 0.61, "grad_norm": 1.766418775765097, "learning_rate": 3.445314203032678e-06, "loss": 1.0015, "step": 5699 }, { "epoch": 0.61, "grad_norm": 1.6472864912368657, "learning_rate": 3.443659528207289e-06, "loss": 0.8065, "step": 5700 }, { "epoch": 0.61, "grad_norm": 0.8452692700400021, "learning_rate": 3.442005042093547e-06, "loss": 1.0823, "step": 5701 }, { "epoch": 0.61, "grad_norm": 1.8802698939429319, "learning_rate": 3.440350744892066e-06, "loss": 0.9812, "step": 5702 }, { "epoch": 0.61, "grad_norm": 1.6986588522164532, "learning_rate": 3.4386966368034335e-06, "loss": 0.897, "step": 5703 }, { "epoch": 0.61, "grad_norm": 1.7856493007613126, "learning_rate": 3.437042718028215e-06, "loss": 0.9253, "step": 5704 }, { "epoch": 0.61, "grad_norm": 1.75474866167282, "learning_rate": 3.435388988766958e-06, "loss": 0.8866, "step": 5705 }, { "epoch": 0.61, "grad_norm": 1.7445865708488477, "learning_rate": 3.4337354492201786e-06, "loss": 0.8907, "step": 5706 }, { "epoch": 0.61, "grad_norm": 1.622242984414676, "learning_rate": 3.4320820995883776e-06, "loss": 0.989, "step": 5707 }, { "epoch": 0.61, "grad_norm": 1.7533171983478932, "learning_rate": 3.430428940072026e-06, "loss": 0.8435, "step": 5708 }, { "epoch": 0.61, "grad_norm": 1.7757032678984757, "learning_rate": 3.4287759708715753e-06, "loss": 0.934, "step": 5709 }, { "epoch": 0.61, "grad_norm": 1.7217221974408432, "learning_rate": 3.427123192187456e-06, "loss": 0.9206, "step": 5710 }, { "epoch": 0.61, "grad_norm": 1.8145076883315294, "learning_rate": 3.4254706042200724e-06, "loss": 0.8656, "step": 5711 }, { "epoch": 0.61, "grad_norm": 1.7386062100273125, "learning_rate": 3.4238182071698066e-06, "loss": 0.9234, "step": 5712 }, { "epoch": 0.61, "grad_norm": 1.7669339235805144, "learning_rate": 3.422166001237016e-06, "loss": 0.8319, "step": 5713 }, { "epoch": 0.61, "grad_norm": 1.7099214016120043, "learning_rate": 3.4205139866220384e-06, "loss": 0.8891, "step": 5714 }, { "epoch": 0.61, "grad_norm": 1.7658374518382718, "learning_rate": 3.418862163525185e-06, "loss": 0.9116, "step": 5715 }, { "epoch": 0.61, "grad_norm": 1.7625864898946653, "learning_rate": 3.417210532146744e-06, "loss": 0.8948, "step": 5716 }, { "epoch": 0.61, "grad_norm": 1.710508732239102, "learning_rate": 3.415559092686984e-06, "loss": 0.9357, "step": 5717 }, { "epoch": 0.61, "grad_norm": 1.6971155993836997, "learning_rate": 3.4139078453461472e-06, "loss": 0.947, "step": 5718 }, { "epoch": 0.61, "grad_norm": 1.72881275206995, "learning_rate": 3.412256790324452e-06, "loss": 0.8991, "step": 5719 }, { "epoch": 0.61, "grad_norm": 1.934393845436041, "learning_rate": 3.4106059278220938e-06, "loss": 0.8343, "step": 5720 }, { "epoch": 0.62, "grad_norm": 1.757278858994872, "learning_rate": 3.408955258039246e-06, "loss": 0.8983, "step": 5721 }, { "epoch": 0.62, "grad_norm": 1.7240995504068202, "learning_rate": 3.407304781176059e-06, "loss": 0.8783, "step": 5722 }, { "epoch": 0.62, "grad_norm": 1.7134395421524382, "learning_rate": 3.4056544974326584e-06, "loss": 0.9777, "step": 5723 }, { "epoch": 0.62, "grad_norm": 1.6101468793947178, "learning_rate": 3.404004407009145e-06, "loss": 0.8154, "step": 5724 }, { "epoch": 0.62, "grad_norm": 1.6959294039152442, "learning_rate": 3.402354510105601e-06, "loss": 0.9545, "step": 5725 }, { "epoch": 0.62, "grad_norm": 1.7402670397680935, "learning_rate": 3.4007048069220807e-06, "loss": 0.9122, "step": 5726 }, { "epoch": 0.62, "grad_norm": 1.6855463297375057, "learning_rate": 3.3990552976586144e-06, "loss": 0.8455, "step": 5727 }, { "epoch": 0.62, "grad_norm": 1.7361777118985908, "learning_rate": 3.397405982515214e-06, "loss": 0.8388, "step": 5728 }, { "epoch": 0.62, "grad_norm": 0.7871447523058728, "learning_rate": 3.3957568616918644e-06, "loss": 1.0172, "step": 5729 }, { "epoch": 0.62, "grad_norm": 1.6660135005365655, "learning_rate": 3.3941079353885255e-06, "loss": 0.8818, "step": 5730 }, { "epoch": 0.62, "grad_norm": 1.6981787455385082, "learning_rate": 3.3924592038051345e-06, "loss": 0.8864, "step": 5731 }, { "epoch": 0.62, "grad_norm": 1.7832170232417044, "learning_rate": 3.390810667141606e-06, "loss": 0.9094, "step": 5732 }, { "epoch": 0.62, "grad_norm": 1.6923568125066233, "learning_rate": 3.3891623255978347e-06, "loss": 0.9375, "step": 5733 }, { "epoch": 0.62, "grad_norm": 1.8724790286602109, "learning_rate": 3.387514179373683e-06, "loss": 0.8947, "step": 5734 }, { "epoch": 0.62, "grad_norm": 1.8291648810467946, "learning_rate": 3.385866228668996e-06, "loss": 0.8931, "step": 5735 }, { "epoch": 0.62, "grad_norm": 1.8135187869317142, "learning_rate": 3.3842184736835943e-06, "loss": 0.8801, "step": 5736 }, { "epoch": 0.62, "grad_norm": 1.765389729239931, "learning_rate": 3.382570914617273e-06, "loss": 0.9826, "step": 5737 }, { "epoch": 0.62, "grad_norm": 1.904392359900347, "learning_rate": 3.3809235516698045e-06, "loss": 0.9555, "step": 5738 }, { "epoch": 0.62, "grad_norm": 1.7379480911842708, "learning_rate": 3.379276385040938e-06, "loss": 0.9911, "step": 5739 }, { "epoch": 0.62, "grad_norm": 1.7360384975289973, "learning_rate": 3.3776294149303956e-06, "loss": 0.9569, "step": 5740 }, { "epoch": 0.62, "grad_norm": 1.7899757211305796, "learning_rate": 3.375982641537881e-06, "loss": 0.8751, "step": 5741 }, { "epoch": 0.62, "grad_norm": 0.8244468884082404, "learning_rate": 3.3743360650630695e-06, "loss": 1.0447, "step": 5742 }, { "epoch": 0.62, "grad_norm": 0.8264718254936257, "learning_rate": 3.3726896857056145e-06, "loss": 1.0654, "step": 5743 }, { "epoch": 0.62, "grad_norm": 1.669230663865067, "learning_rate": 3.3710435036651453e-06, "loss": 0.9431, "step": 5744 }, { "epoch": 0.62, "grad_norm": 1.741028265127649, "learning_rate": 3.369397519141267e-06, "loss": 0.8751, "step": 5745 }, { "epoch": 0.62, "grad_norm": 1.7405595169561119, "learning_rate": 3.3677517323335616e-06, "loss": 0.8688, "step": 5746 }, { "epoch": 0.62, "grad_norm": 1.79414594356946, "learning_rate": 3.3661061434415847e-06, "loss": 1.0066, "step": 5747 }, { "epoch": 0.62, "grad_norm": 1.7589790136098502, "learning_rate": 3.3644607526648722e-06, "loss": 0.9194, "step": 5748 }, { "epoch": 0.62, "grad_norm": 1.7254000271549643, "learning_rate": 3.3628155602029312e-06, "loss": 0.9111, "step": 5749 }, { "epoch": 0.62, "grad_norm": 1.7068781784892262, "learning_rate": 3.3611705662552494e-06, "loss": 0.8894, "step": 5750 }, { "epoch": 0.62, "grad_norm": 1.6418302022950935, "learning_rate": 3.3595257710212853e-06, "loss": 0.8876, "step": 5751 }, { "epoch": 0.62, "grad_norm": 1.7475609693421148, "learning_rate": 3.3578811747004757e-06, "loss": 0.9347, "step": 5752 }, { "epoch": 0.62, "grad_norm": 1.7267591491952203, "learning_rate": 3.356236777492236e-06, "loss": 0.8496, "step": 5753 }, { "epoch": 0.62, "grad_norm": 1.6164003727443874, "learning_rate": 3.3545925795959545e-06, "loss": 0.8285, "step": 5754 }, { "epoch": 0.62, "grad_norm": 1.722020446152228, "learning_rate": 3.3529485812109934e-06, "loss": 0.9692, "step": 5755 }, { "epoch": 0.62, "grad_norm": 1.706702860051739, "learning_rate": 3.3513047825366968e-06, "loss": 0.8232, "step": 5756 }, { "epoch": 0.62, "grad_norm": 1.7650500657551864, "learning_rate": 3.349661183772379e-06, "loss": 0.9264, "step": 5757 }, { "epoch": 0.62, "grad_norm": 1.7956699121435815, "learning_rate": 3.3480177851173314e-06, "loss": 0.8948, "step": 5758 }, { "epoch": 0.62, "grad_norm": 1.7025879829427009, "learning_rate": 3.346374586770823e-06, "loss": 0.9015, "step": 5759 }, { "epoch": 0.62, "grad_norm": 1.82375281035598, "learning_rate": 3.3447315889320985e-06, "loss": 0.918, "step": 5760 }, { "epoch": 0.62, "grad_norm": 1.632183415309447, "learning_rate": 3.343088791800374e-06, "loss": 0.8812, "step": 5761 }, { "epoch": 0.62, "grad_norm": 1.811219170301259, "learning_rate": 3.341446195574846e-06, "loss": 0.8749, "step": 5762 }, { "epoch": 0.62, "grad_norm": 1.7520473981541287, "learning_rate": 3.339803800454684e-06, "loss": 0.9729, "step": 5763 }, { "epoch": 0.62, "grad_norm": 1.7889797391941364, "learning_rate": 3.3381616066390355e-06, "loss": 0.9018, "step": 5764 }, { "epoch": 0.62, "grad_norm": 1.7494296061428083, "learning_rate": 3.3365196143270207e-06, "loss": 0.8926, "step": 5765 }, { "epoch": 0.62, "grad_norm": 1.7628053667965524, "learning_rate": 3.334877823717737e-06, "loss": 0.8574, "step": 5766 }, { "epoch": 0.62, "grad_norm": 1.751832616726469, "learning_rate": 3.3332362350102592e-06, "loss": 0.8969, "step": 5767 }, { "epoch": 0.62, "grad_norm": 1.7195571774843181, "learning_rate": 3.3315948484036344e-06, "loss": 0.9042, "step": 5768 }, { "epoch": 0.62, "grad_norm": 1.762227091301854, "learning_rate": 3.3299536640968844e-06, "loss": 0.925, "step": 5769 }, { "epoch": 0.62, "grad_norm": 1.9110697913040866, "learning_rate": 3.328312682289011e-06, "loss": 0.955, "step": 5770 }, { "epoch": 0.62, "grad_norm": 1.700169285435469, "learning_rate": 3.3266719031789896e-06, "loss": 0.8472, "step": 5771 }, { "epoch": 0.62, "grad_norm": 1.6896652932168508, "learning_rate": 3.3250313269657673e-06, "loss": 0.8414, "step": 5772 }, { "epoch": 0.62, "grad_norm": 1.7818536432657286, "learning_rate": 3.3233909538482712e-06, "loss": 0.854, "step": 5773 }, { "epoch": 0.62, "grad_norm": 1.6985143627535695, "learning_rate": 3.321750784025401e-06, "loss": 0.93, "step": 5774 }, { "epoch": 0.62, "grad_norm": 1.6279143683199, "learning_rate": 3.320110817696035e-06, "loss": 0.9123, "step": 5775 }, { "epoch": 0.62, "grad_norm": 1.9555096071316398, "learning_rate": 3.3184710550590244e-06, "loss": 0.9382, "step": 5776 }, { "epoch": 0.62, "grad_norm": 2.103852717527289, "learning_rate": 3.316831496313195e-06, "loss": 0.8883, "step": 5777 }, { "epoch": 0.62, "grad_norm": 1.971856442751516, "learning_rate": 3.3151921416573486e-06, "loss": 0.8985, "step": 5778 }, { "epoch": 0.62, "grad_norm": 0.8829336693496748, "learning_rate": 3.3135529912902644e-06, "loss": 1.0066, "step": 5779 }, { "epoch": 0.62, "grad_norm": 1.786030582197453, "learning_rate": 3.3119140454106945e-06, "loss": 0.8347, "step": 5780 }, { "epoch": 0.62, "grad_norm": 1.8193071348438765, "learning_rate": 3.310275304217365e-06, "loss": 0.8452, "step": 5781 }, { "epoch": 0.62, "grad_norm": 1.7117178894294423, "learning_rate": 3.3086367679089824e-06, "loss": 0.9706, "step": 5782 }, { "epoch": 0.62, "grad_norm": 1.6892220202739219, "learning_rate": 3.306998436684221e-06, "loss": 0.8568, "step": 5783 }, { "epoch": 0.62, "grad_norm": 1.8330533094692414, "learning_rate": 3.3053603107417365e-06, "loss": 0.9201, "step": 5784 }, { "epoch": 0.62, "grad_norm": 1.6459899130515607, "learning_rate": 3.3037223902801565e-06, "loss": 0.803, "step": 5785 }, { "epoch": 0.62, "grad_norm": 1.8012086481251026, "learning_rate": 3.302084675498083e-06, "loss": 0.9313, "step": 5786 }, { "epoch": 0.62, "grad_norm": 1.637243855471105, "learning_rate": 3.3004471665940975e-06, "loss": 0.8225, "step": 5787 }, { "epoch": 0.62, "grad_norm": 1.7829961251308677, "learning_rate": 3.2988098637667524e-06, "loss": 0.8458, "step": 5788 }, { "epoch": 0.62, "grad_norm": 1.613536358794607, "learning_rate": 3.297172767214576e-06, "loss": 0.883, "step": 5789 }, { "epoch": 0.62, "grad_norm": 1.670723223885938, "learning_rate": 3.2955358771360725e-06, "loss": 0.8946, "step": 5790 }, { "epoch": 0.62, "grad_norm": 1.6456907278972213, "learning_rate": 3.293899193729719e-06, "loss": 0.9107, "step": 5791 }, { "epoch": 0.62, "grad_norm": 1.723710093010037, "learning_rate": 3.2922627171939726e-06, "loss": 0.897, "step": 5792 }, { "epoch": 0.62, "grad_norm": 1.748523795570614, "learning_rate": 3.2906264477272576e-06, "loss": 0.872, "step": 5793 }, { "epoch": 0.62, "grad_norm": 1.7301139740521405, "learning_rate": 3.288990385527978e-06, "loss": 0.9024, "step": 5794 }, { "epoch": 0.62, "grad_norm": 1.7835934813411236, "learning_rate": 3.2873545307945144e-06, "loss": 0.9094, "step": 5795 }, { "epoch": 0.62, "grad_norm": 1.9027777499611487, "learning_rate": 3.2857188837252175e-06, "loss": 0.9486, "step": 5796 }, { "epoch": 0.62, "grad_norm": 1.7222171748702142, "learning_rate": 3.2840834445184156e-06, "loss": 0.883, "step": 5797 }, { "epoch": 0.62, "grad_norm": 1.75808612779035, "learning_rate": 3.2824482133724124e-06, "loss": 0.8763, "step": 5798 }, { "epoch": 0.62, "grad_norm": 1.7571828199912405, "learning_rate": 3.2808131904854843e-06, "loss": 0.8693, "step": 5799 }, { "epoch": 0.62, "grad_norm": 1.638493796332804, "learning_rate": 3.2791783760558836e-06, "loss": 0.9599, "step": 5800 }, { "epoch": 0.62, "grad_norm": 0.817029349532796, "learning_rate": 3.277543770281838e-06, "loss": 1.0665, "step": 5801 }, { "epoch": 0.62, "grad_norm": 1.7667230253082553, "learning_rate": 3.2759093733615482e-06, "loss": 0.9491, "step": 5802 }, { "epoch": 0.62, "grad_norm": 0.7888898816856202, "learning_rate": 3.274275185493192e-06, "loss": 1.0475, "step": 5803 }, { "epoch": 0.62, "grad_norm": 1.7130336450099488, "learning_rate": 3.272641206874918e-06, "loss": 0.9359, "step": 5804 }, { "epoch": 0.62, "grad_norm": 1.660348242138406, "learning_rate": 3.2710074377048516e-06, "loss": 0.8718, "step": 5805 }, { "epoch": 0.62, "grad_norm": 1.7042370200609216, "learning_rate": 3.269373878181096e-06, "loss": 0.9358, "step": 5806 }, { "epoch": 0.62, "grad_norm": 1.7189607946490364, "learning_rate": 3.2677405285017227e-06, "loss": 0.9056, "step": 5807 }, { "epoch": 0.62, "grad_norm": 1.7544762190067664, "learning_rate": 3.266107388864783e-06, "loss": 0.965, "step": 5808 }, { "epoch": 0.62, "grad_norm": 1.804614382010377, "learning_rate": 3.264474459468299e-06, "loss": 0.9378, "step": 5809 }, { "epoch": 0.62, "grad_norm": 1.7687082694966068, "learning_rate": 3.262841740510271e-06, "loss": 0.8667, "step": 5810 }, { "epoch": 0.62, "grad_norm": 1.6952843148052628, "learning_rate": 3.2612092321886713e-06, "loss": 0.8818, "step": 5811 }, { "epoch": 0.62, "grad_norm": 1.7620733424580628, "learning_rate": 3.259576934701445e-06, "loss": 0.8665, "step": 5812 }, { "epoch": 0.62, "grad_norm": 0.8298126042883063, "learning_rate": 3.2579448482465194e-06, "loss": 1.0597, "step": 5813 }, { "epoch": 0.63, "grad_norm": 1.638757599668099, "learning_rate": 3.256312973021783e-06, "loss": 0.8826, "step": 5814 }, { "epoch": 0.63, "grad_norm": 1.8382741582075028, "learning_rate": 3.254681309225111e-06, "loss": 0.8486, "step": 5815 }, { "epoch": 0.63, "grad_norm": 0.8396038098906063, "learning_rate": 3.2530498570543477e-06, "loss": 1.0856, "step": 5816 }, { "epoch": 0.63, "grad_norm": 1.778140043055113, "learning_rate": 3.2514186167073103e-06, "loss": 0.9371, "step": 5817 }, { "epoch": 0.63, "grad_norm": 1.7919930539222075, "learning_rate": 3.2497875883817955e-06, "loss": 0.9046, "step": 5818 }, { "epoch": 0.63, "grad_norm": 1.7955919999162238, "learning_rate": 3.248156772275569e-06, "loss": 0.9549, "step": 5819 }, { "epoch": 0.63, "grad_norm": 1.696499604060048, "learning_rate": 3.2465261685863723e-06, "loss": 0.915, "step": 5820 }, { "epoch": 0.63, "grad_norm": 1.7711061301234048, "learning_rate": 3.244895777511925e-06, "loss": 0.8534, "step": 5821 }, { "epoch": 0.63, "grad_norm": 1.6958458087679902, "learning_rate": 3.243265599249914e-06, "loss": 0.9142, "step": 5822 }, { "epoch": 0.63, "grad_norm": 1.8918054880134345, "learning_rate": 3.2416356339980056e-06, "loss": 0.8632, "step": 5823 }, { "epoch": 0.63, "grad_norm": 1.7062613504988868, "learning_rate": 3.240005881953841e-06, "loss": 0.8859, "step": 5824 }, { "epoch": 0.63, "grad_norm": 1.8250422472016088, "learning_rate": 3.2383763433150277e-06, "loss": 0.9737, "step": 5825 }, { "epoch": 0.63, "grad_norm": 1.6476670911839033, "learning_rate": 3.236747018279157e-06, "loss": 0.8399, "step": 5826 }, { "epoch": 0.63, "grad_norm": 1.681962846605396, "learning_rate": 3.23511790704379e-06, "loss": 0.8563, "step": 5827 }, { "epoch": 0.63, "grad_norm": 1.5896146058912484, "learning_rate": 3.23348900980646e-06, "loss": 0.8947, "step": 5828 }, { "epoch": 0.63, "grad_norm": 1.8609636379948766, "learning_rate": 3.2318603267646774e-06, "loss": 0.8716, "step": 5829 }, { "epoch": 0.63, "grad_norm": 1.734706335680236, "learning_rate": 3.2302318581159268e-06, "loss": 0.8395, "step": 5830 }, { "epoch": 0.63, "grad_norm": 1.7055075314447223, "learning_rate": 3.228603604057664e-06, "loss": 0.8816, "step": 5831 }, { "epoch": 0.63, "grad_norm": 1.661771339457933, "learning_rate": 3.226975564787322e-06, "loss": 0.9296, "step": 5832 }, { "epoch": 0.63, "grad_norm": 0.8081030255513815, "learning_rate": 3.2253477405023047e-06, "loss": 1.0326, "step": 5833 }, { "epoch": 0.63, "grad_norm": 1.7450600295028296, "learning_rate": 3.2237201313999927e-06, "loss": 0.9199, "step": 5834 }, { "epoch": 0.63, "grad_norm": 1.7860641091909126, "learning_rate": 3.2220927376777393e-06, "loss": 0.9594, "step": 5835 }, { "epoch": 0.63, "grad_norm": 1.718613978015559, "learning_rate": 3.2204655595328693e-06, "loss": 0.88, "step": 5836 }, { "epoch": 0.63, "grad_norm": 1.8418347140608273, "learning_rate": 3.2188385971626855e-06, "loss": 0.9128, "step": 5837 }, { "epoch": 0.63, "grad_norm": 1.7116959615346439, "learning_rate": 3.2172118507644624e-06, "loss": 0.9798, "step": 5838 }, { "epoch": 0.63, "grad_norm": 1.644165060120402, "learning_rate": 3.215585320535449e-06, "loss": 0.9366, "step": 5839 }, { "epoch": 0.63, "grad_norm": 1.682685171140966, "learning_rate": 3.2139590066728667e-06, "loss": 0.8643, "step": 5840 }, { "epoch": 0.63, "grad_norm": 1.714710437004348, "learning_rate": 3.212332909373912e-06, "loss": 0.9016, "step": 5841 }, { "epoch": 0.63, "grad_norm": 1.6193632366649306, "learning_rate": 3.2107070288357557e-06, "loss": 0.8802, "step": 5842 }, { "epoch": 0.63, "grad_norm": 1.8139165637713215, "learning_rate": 3.2090813652555396e-06, "loss": 0.8576, "step": 5843 }, { "epoch": 0.63, "grad_norm": 1.7101282848964754, "learning_rate": 3.207455918830384e-06, "loss": 0.9138, "step": 5844 }, { "epoch": 0.63, "grad_norm": 1.773345262055934, "learning_rate": 3.205830689757379e-06, "loss": 0.9478, "step": 5845 }, { "epoch": 0.63, "grad_norm": 1.8002754430276613, "learning_rate": 3.204205678233586e-06, "loss": 0.9529, "step": 5846 }, { "epoch": 0.63, "grad_norm": 1.8139947586285003, "learning_rate": 3.202580884456047e-06, "loss": 0.9228, "step": 5847 }, { "epoch": 0.63, "grad_norm": 1.766318365203739, "learning_rate": 3.2009563086217703e-06, "loss": 0.9484, "step": 5848 }, { "epoch": 0.63, "grad_norm": 1.8162479348117928, "learning_rate": 3.1993319509277455e-06, "loss": 0.897, "step": 5849 }, { "epoch": 0.63, "grad_norm": 1.7993666149127705, "learning_rate": 3.1977078115709285e-06, "loss": 0.9509, "step": 5850 }, { "epoch": 0.63, "grad_norm": 1.8798044213271798, "learning_rate": 3.1960838907482524e-06, "loss": 0.9063, "step": 5851 }, { "epoch": 0.63, "grad_norm": 1.7897726830714662, "learning_rate": 3.194460188656624e-06, "loss": 0.8724, "step": 5852 }, { "epoch": 0.63, "grad_norm": 1.693502055303962, "learning_rate": 3.1928367054929234e-06, "loss": 0.9001, "step": 5853 }, { "epoch": 0.63, "grad_norm": 1.7186852925788019, "learning_rate": 3.1912134414540008e-06, "loss": 0.9553, "step": 5854 }, { "epoch": 0.63, "grad_norm": 1.6270424406216109, "learning_rate": 3.1895903967366847e-06, "loss": 0.8204, "step": 5855 }, { "epoch": 0.63, "grad_norm": 1.759408938722275, "learning_rate": 3.1879675715377766e-06, "loss": 0.9269, "step": 5856 }, { "epoch": 0.63, "grad_norm": 1.7325589827577368, "learning_rate": 3.186344966054046e-06, "loss": 0.8269, "step": 5857 }, { "epoch": 0.63, "grad_norm": 1.6812588213180517, "learning_rate": 3.1847225804822408e-06, "loss": 0.8368, "step": 5858 }, { "epoch": 0.63, "grad_norm": 1.7020839215752526, "learning_rate": 3.18310041501908e-06, "loss": 0.9014, "step": 5859 }, { "epoch": 0.63, "grad_norm": 1.8185721354319937, "learning_rate": 3.181478469861259e-06, "loss": 0.9388, "step": 5860 }, { "epoch": 0.63, "grad_norm": 1.6911778680923761, "learning_rate": 3.179856745205442e-06, "loss": 0.8233, "step": 5861 }, { "epoch": 0.63, "grad_norm": 1.7263573971030368, "learning_rate": 3.1782352412482693e-06, "loss": 0.8779, "step": 5862 }, { "epoch": 0.63, "grad_norm": 1.6619367258931648, "learning_rate": 3.176613958186355e-06, "loss": 0.8337, "step": 5863 }, { "epoch": 0.63, "grad_norm": 1.7507500152488946, "learning_rate": 3.1749928962162845e-06, "loss": 0.8808, "step": 5864 }, { "epoch": 0.63, "grad_norm": 1.82462849945111, "learning_rate": 3.1733720555346158e-06, "loss": 0.9555, "step": 5865 }, { "epoch": 0.63, "grad_norm": 1.8058423835082655, "learning_rate": 3.1717514363378864e-06, "loss": 0.9253, "step": 5866 }, { "epoch": 0.63, "grad_norm": 0.809882810738741, "learning_rate": 3.170131038822595e-06, "loss": 1.068, "step": 5867 }, { "epoch": 0.63, "grad_norm": 1.6765038888351997, "learning_rate": 3.1685108631852244e-06, "loss": 0.9453, "step": 5868 }, { "epoch": 0.63, "grad_norm": 1.6872054348538064, "learning_rate": 3.1668909096222254e-06, "loss": 0.8332, "step": 5869 }, { "epoch": 0.63, "grad_norm": 1.6314366293187808, "learning_rate": 3.1652711783300234e-06, "loss": 0.9802, "step": 5870 }, { "epoch": 0.63, "grad_norm": 1.7319075843527971, "learning_rate": 3.163651669505017e-06, "loss": 0.9108, "step": 5871 }, { "epoch": 0.63, "grad_norm": 1.6905214576276417, "learning_rate": 3.1620323833435763e-06, "loss": 0.9665, "step": 5872 }, { "epoch": 0.63, "grad_norm": 1.6689876058058062, "learning_rate": 3.160413320042045e-06, "loss": 0.8354, "step": 5873 }, { "epoch": 0.63, "grad_norm": 1.7242511033724275, "learning_rate": 3.15879447979674e-06, "loss": 0.9233, "step": 5874 }, { "epoch": 0.63, "grad_norm": 1.7616438454034957, "learning_rate": 3.157175862803953e-06, "loss": 0.8893, "step": 5875 }, { "epoch": 0.63, "grad_norm": 1.7910306081815668, "learning_rate": 3.155557469259946e-06, "loss": 0.8419, "step": 5876 }, { "epoch": 0.63, "grad_norm": 1.8121446284266336, "learning_rate": 3.153939299360956e-06, "loss": 0.9873, "step": 5877 }, { "epoch": 0.63, "grad_norm": 1.6781771244354162, "learning_rate": 3.1523213533031884e-06, "loss": 0.897, "step": 5878 }, { "epoch": 0.63, "grad_norm": 1.7116553834123547, "learning_rate": 3.150703631282826e-06, "loss": 0.8951, "step": 5879 }, { "epoch": 0.63, "grad_norm": 1.8551495244832301, "learning_rate": 3.149086133496025e-06, "loss": 0.9029, "step": 5880 }, { "epoch": 0.63, "grad_norm": 1.7034610374779258, "learning_rate": 3.1474688601389113e-06, "loss": 0.8916, "step": 5881 }, { "epoch": 0.63, "grad_norm": 1.7071151458351785, "learning_rate": 3.145851811407584e-06, "loss": 0.8459, "step": 5882 }, { "epoch": 0.63, "grad_norm": 1.7485908135064498, "learning_rate": 3.144234987498117e-06, "loss": 0.9286, "step": 5883 }, { "epoch": 0.63, "grad_norm": 1.977034554165767, "learning_rate": 3.1426183886065565e-06, "loss": 0.947, "step": 5884 }, { "epoch": 0.63, "grad_norm": 1.702660258914151, "learning_rate": 3.141002014928918e-06, "loss": 0.8267, "step": 5885 }, { "epoch": 0.63, "grad_norm": 1.6175068476557635, "learning_rate": 3.1393858666611958e-06, "loss": 0.8594, "step": 5886 }, { "epoch": 0.63, "grad_norm": 1.7277996217375142, "learning_rate": 3.1377699439993525e-06, "loss": 0.9464, "step": 5887 }, { "epoch": 0.63, "grad_norm": 1.6841837664110364, "learning_rate": 3.1361542471393232e-06, "loss": 0.8136, "step": 5888 }, { "epoch": 0.63, "grad_norm": 1.6172030697683588, "learning_rate": 3.1345387762770165e-06, "loss": 0.9168, "step": 5889 }, { "epoch": 0.63, "grad_norm": 1.7724043498871818, "learning_rate": 3.132923531608313e-06, "loss": 0.9275, "step": 5890 }, { "epoch": 0.63, "grad_norm": 0.8871575303102432, "learning_rate": 3.13130851332907e-06, "loss": 1.0896, "step": 5891 }, { "epoch": 0.63, "grad_norm": 1.8027824178855458, "learning_rate": 3.1296937216351115e-06, "loss": 0.8817, "step": 5892 }, { "epoch": 0.63, "grad_norm": 1.6955780874868664, "learning_rate": 3.128079156722236e-06, "loss": 0.9195, "step": 5893 }, { "epoch": 0.63, "grad_norm": 1.8026061750144518, "learning_rate": 3.126464818786218e-06, "loss": 0.878, "step": 5894 }, { "epoch": 0.63, "grad_norm": 1.8456182732961544, "learning_rate": 3.124850708022799e-06, "loss": 0.9758, "step": 5895 }, { "epoch": 0.63, "grad_norm": 1.8926715493755302, "learning_rate": 3.1232368246276956e-06, "loss": 0.8442, "step": 5896 }, { "epoch": 0.63, "grad_norm": 1.8223375152386583, "learning_rate": 3.121623168796598e-06, "loss": 0.8765, "step": 5897 }, { "epoch": 0.63, "grad_norm": 1.7724458824086222, "learning_rate": 3.120009740725169e-06, "loss": 0.904, "step": 5898 }, { "epoch": 0.63, "grad_norm": 1.7367399672833368, "learning_rate": 3.118396540609038e-06, "loss": 0.9019, "step": 5899 }, { "epoch": 0.63, "grad_norm": 1.6642235151955018, "learning_rate": 3.116783568643814e-06, "loss": 0.8635, "step": 5900 }, { "epoch": 0.63, "grad_norm": 1.7740724111473123, "learning_rate": 3.115170825025074e-06, "loss": 0.8207, "step": 5901 }, { "epoch": 0.63, "grad_norm": 1.7005533921918339, "learning_rate": 3.1135583099483703e-06, "loss": 0.8672, "step": 5902 }, { "epoch": 0.63, "grad_norm": 1.8503859378528282, "learning_rate": 3.1119460236092246e-06, "loss": 0.8917, "step": 5903 }, { "epoch": 0.63, "grad_norm": 0.8095082068170109, "learning_rate": 3.1103339662031328e-06, "loss": 1.0401, "step": 5904 }, { "epoch": 0.63, "grad_norm": 1.7662652192247699, "learning_rate": 3.1087221379255615e-06, "loss": 0.9392, "step": 5905 }, { "epoch": 0.63, "grad_norm": 1.9375254254121423, "learning_rate": 3.1071105389719523e-06, "loss": 0.9165, "step": 5906 }, { "epoch": 0.64, "grad_norm": 1.7984812071496903, "learning_rate": 3.1054991695377156e-06, "loss": 0.8915, "step": 5907 }, { "epoch": 0.64, "grad_norm": 1.707407525132162, "learning_rate": 3.103888029818235e-06, "loss": 0.8661, "step": 5908 }, { "epoch": 0.64, "grad_norm": 1.8170398232572122, "learning_rate": 3.102277120008871e-06, "loss": 0.9539, "step": 5909 }, { "epoch": 0.64, "grad_norm": 1.6988510130241157, "learning_rate": 3.100666440304946e-06, "loss": 0.9478, "step": 5910 }, { "epoch": 0.64, "grad_norm": 1.5817312829675299, "learning_rate": 3.0990559909017635e-06, "loss": 0.8782, "step": 5911 }, { "epoch": 0.64, "grad_norm": 0.8304123532334345, "learning_rate": 3.0974457719945953e-06, "loss": 1.0577, "step": 5912 }, { "epoch": 0.64, "grad_norm": 1.7057524585909356, "learning_rate": 3.095835783778686e-06, "loss": 0.9172, "step": 5913 }, { "epoch": 0.64, "grad_norm": 1.780023688449989, "learning_rate": 3.0942260264492537e-06, "loss": 0.8883, "step": 5914 }, { "epoch": 0.64, "grad_norm": 0.8276461051412023, "learning_rate": 3.0926165002014854e-06, "loss": 1.0582, "step": 5915 }, { "epoch": 0.64, "grad_norm": 1.6817030771110815, "learning_rate": 3.091007205230541e-06, "loss": 0.8155, "step": 5916 }, { "epoch": 0.64, "grad_norm": 1.71056639917109, "learning_rate": 3.0893981417315556e-06, "loss": 0.9186, "step": 5917 }, { "epoch": 0.64, "grad_norm": 1.7902469432423358, "learning_rate": 3.0877893098996324e-06, "loss": 0.9168, "step": 5918 }, { "epoch": 0.64, "grad_norm": 0.8247880020586374, "learning_rate": 3.086180709929849e-06, "loss": 1.0401, "step": 5919 }, { "epoch": 0.64, "grad_norm": 1.8095971890401104, "learning_rate": 3.0845723420172514e-06, "loss": 0.9764, "step": 5920 }, { "epoch": 0.64, "grad_norm": 1.6950450251188678, "learning_rate": 3.08296420635686e-06, "loss": 0.9544, "step": 5921 }, { "epoch": 0.64, "grad_norm": 2.0910220658295766, "learning_rate": 3.0813563031436676e-06, "loss": 0.9251, "step": 5922 }, { "epoch": 0.64, "grad_norm": 1.6838286193708225, "learning_rate": 3.079748632572639e-06, "loss": 0.9142, "step": 5923 }, { "epoch": 0.64, "grad_norm": 1.7032773692498633, "learning_rate": 3.0781411948387074e-06, "loss": 0.9118, "step": 5924 }, { "epoch": 0.64, "grad_norm": 1.918452891098777, "learning_rate": 3.0765339901367826e-06, "loss": 0.9139, "step": 5925 }, { "epoch": 0.64, "grad_norm": 1.7036606501229774, "learning_rate": 3.074927018661743e-06, "loss": 0.8939, "step": 5926 }, { "epoch": 0.64, "grad_norm": 1.616987712187354, "learning_rate": 3.073320280608437e-06, "loss": 0.9406, "step": 5927 }, { "epoch": 0.64, "grad_norm": 1.8117324214979824, "learning_rate": 3.071713776171692e-06, "loss": 0.8702, "step": 5928 }, { "epoch": 0.64, "grad_norm": 1.879351501104517, "learning_rate": 3.070107505546298e-06, "loss": 0.9429, "step": 5929 }, { "epoch": 0.64, "grad_norm": 1.731158276582898, "learning_rate": 3.0685014689270244e-06, "loss": 0.7974, "step": 5930 }, { "epoch": 0.64, "grad_norm": 1.6794122072728714, "learning_rate": 3.0668956665086047e-06, "loss": 0.8496, "step": 5931 }, { "epoch": 0.64, "grad_norm": 1.8182068221685521, "learning_rate": 3.0652900984857492e-06, "loss": 0.9865, "step": 5932 }, { "epoch": 0.64, "grad_norm": 1.7515375859336404, "learning_rate": 3.0636847650531404e-06, "loss": 0.8685, "step": 5933 }, { "epoch": 0.64, "grad_norm": 1.6439952511456322, "learning_rate": 3.0620796664054294e-06, "loss": 0.8575, "step": 5934 }, { "epoch": 0.64, "grad_norm": 1.7981703670205724, "learning_rate": 3.0604748027372393e-06, "loss": 0.9268, "step": 5935 }, { "epoch": 0.64, "grad_norm": 1.7069045138522576, "learning_rate": 3.0588701742431654e-06, "loss": 0.9177, "step": 5936 }, { "epoch": 0.64, "grad_norm": 1.7098471859973892, "learning_rate": 3.057265781117776e-06, "loss": 0.9167, "step": 5937 }, { "epoch": 0.64, "grad_norm": 1.7173485875129368, "learning_rate": 3.055661623555608e-06, "loss": 0.861, "step": 5938 }, { "epoch": 0.64, "grad_norm": 1.69203311469032, "learning_rate": 3.0540577017511703e-06, "loss": 0.8739, "step": 5939 }, { "epoch": 0.64, "grad_norm": 0.8165688591674226, "learning_rate": 3.0524540158989478e-06, "loss": 1.0237, "step": 5940 }, { "epoch": 0.64, "grad_norm": 1.7626798376281505, "learning_rate": 3.0508505661933873e-06, "loss": 0.9922, "step": 5941 }, { "epoch": 0.64, "grad_norm": 1.725256495051611, "learning_rate": 3.049247352828917e-06, "loss": 0.8986, "step": 5942 }, { "epoch": 0.64, "grad_norm": 1.8081005236479097, "learning_rate": 3.0476443759999296e-06, "loss": 0.9254, "step": 5943 }, { "epoch": 0.64, "grad_norm": 1.8188434452971831, "learning_rate": 3.0460416359007917e-06, "loss": 0.967, "step": 5944 }, { "epoch": 0.64, "grad_norm": 1.6943497285145859, "learning_rate": 3.0444391327258425e-06, "loss": 0.867, "step": 5945 }, { "epoch": 0.64, "grad_norm": 0.7926859645603526, "learning_rate": 3.0428368666693907e-06, "loss": 1.0175, "step": 5946 }, { "epoch": 0.64, "grad_norm": 1.7407891648600382, "learning_rate": 3.041234837925715e-06, "loss": 0.937, "step": 5947 }, { "epoch": 0.64, "grad_norm": 1.7844941206071652, "learning_rate": 3.039633046689069e-06, "loss": 0.8568, "step": 5948 }, { "epoch": 0.64, "grad_norm": 1.6509290964576664, "learning_rate": 3.038031493153675e-06, "loss": 0.8999, "step": 5949 }, { "epoch": 0.64, "grad_norm": 0.8123154619729146, "learning_rate": 3.0364301775137246e-06, "loss": 1.046, "step": 5950 }, { "epoch": 0.64, "grad_norm": 1.7764822099963673, "learning_rate": 3.0348290999633877e-06, "loss": 0.9426, "step": 5951 }, { "epoch": 0.64, "grad_norm": 1.6771879028974497, "learning_rate": 3.033228260696795e-06, "loss": 0.9158, "step": 5952 }, { "epoch": 0.64, "grad_norm": 1.6875834981263063, "learning_rate": 3.031627659908057e-06, "loss": 0.8935, "step": 5953 }, { "epoch": 0.64, "grad_norm": 0.7973580216136049, "learning_rate": 3.0300272977912516e-06, "loss": 1.0494, "step": 5954 }, { "epoch": 0.64, "grad_norm": 1.6045989277864892, "learning_rate": 3.028427174540426e-06, "loss": 0.8222, "step": 5955 }, { "epoch": 0.64, "grad_norm": 1.8550055334470514, "learning_rate": 3.0268272903496036e-06, "loss": 0.9832, "step": 5956 }, { "epoch": 0.64, "grad_norm": 1.7728818648426052, "learning_rate": 3.0252276454127753e-06, "loss": 0.8825, "step": 5957 }, { "epoch": 0.64, "grad_norm": 1.7010165986880494, "learning_rate": 3.023628239923902e-06, "loss": 0.817, "step": 5958 }, { "epoch": 0.64, "grad_norm": 0.8348257350184076, "learning_rate": 3.0220290740769193e-06, "loss": 1.0267, "step": 5959 }, { "epoch": 0.64, "grad_norm": 1.7380103659541342, "learning_rate": 3.0204301480657306e-06, "loss": 0.8772, "step": 5960 }, { "epoch": 0.64, "grad_norm": 1.7714315071128273, "learning_rate": 3.0188314620842097e-06, "loss": 0.9352, "step": 5961 }, { "epoch": 0.64, "grad_norm": 0.8143390478487336, "learning_rate": 3.0172330163262072e-06, "loss": 1.016, "step": 5962 }, { "epoch": 0.64, "grad_norm": 1.7505343289147601, "learning_rate": 3.0156348109855337e-06, "loss": 0.9167, "step": 5963 }, { "epoch": 0.64, "grad_norm": 1.8094075966124814, "learning_rate": 3.014036846255982e-06, "loss": 0.8797, "step": 5964 }, { "epoch": 0.64, "grad_norm": 1.7562080751011817, "learning_rate": 3.0124391223313093e-06, "loss": 0.9049, "step": 5965 }, { "epoch": 0.64, "grad_norm": 1.7706505604968563, "learning_rate": 3.0108416394052443e-06, "loss": 0.9225, "step": 5966 }, { "epoch": 0.64, "grad_norm": 1.7200430098579997, "learning_rate": 3.009244397671489e-06, "loss": 0.822, "step": 5967 }, { "epoch": 0.64, "grad_norm": 0.8031389733107772, "learning_rate": 3.007647397323714e-06, "loss": 1.0237, "step": 5968 }, { "epoch": 0.64, "grad_norm": 1.7894576419525712, "learning_rate": 3.00605063855556e-06, "loss": 0.8858, "step": 5969 }, { "epoch": 0.64, "grad_norm": 1.7302050680397307, "learning_rate": 3.0044541215606398e-06, "loss": 0.8412, "step": 5970 }, { "epoch": 0.64, "grad_norm": 0.7829716701145696, "learning_rate": 3.002857846532538e-06, "loss": 1.0205, "step": 5971 }, { "epoch": 0.64, "grad_norm": 1.679517764147183, "learning_rate": 3.0012618136648085e-06, "loss": 0.9287, "step": 5972 }, { "epoch": 0.64, "grad_norm": 1.840580883876344, "learning_rate": 2.9996660231509742e-06, "loss": 0.8858, "step": 5973 }, { "epoch": 0.64, "grad_norm": 1.6860806230385499, "learning_rate": 2.9980704751845302e-06, "loss": 0.8518, "step": 5974 }, { "epoch": 0.64, "grad_norm": 1.7400871044752215, "learning_rate": 2.996475169958942e-06, "loss": 0.94, "step": 5975 }, { "epoch": 0.64, "grad_norm": 1.7440812409815565, "learning_rate": 2.9948801076676477e-06, "loss": 0.9361, "step": 5976 }, { "epoch": 0.64, "grad_norm": 1.737919860365998, "learning_rate": 2.993285288504053e-06, "loss": 0.9233, "step": 5977 }, { "epoch": 0.64, "grad_norm": 1.7584902462603629, "learning_rate": 2.9916907126615343e-06, "loss": 0.9503, "step": 5978 }, { "epoch": 0.64, "grad_norm": 1.6395309564306122, "learning_rate": 2.9900963803334417e-06, "loss": 0.8621, "step": 5979 }, { "epoch": 0.64, "grad_norm": 1.7413119372710852, "learning_rate": 2.9885022917130914e-06, "loss": 0.8919, "step": 5980 }, { "epoch": 0.64, "grad_norm": 1.6809644726035522, "learning_rate": 2.986908446993772e-06, "loss": 0.8671, "step": 5981 }, { "epoch": 0.64, "grad_norm": 1.6494030906710535, "learning_rate": 2.9853148463687455e-06, "loss": 0.9215, "step": 5982 }, { "epoch": 0.64, "grad_norm": 1.738825957280495, "learning_rate": 2.9837214900312396e-06, "loss": 0.8628, "step": 5983 }, { "epoch": 0.64, "grad_norm": 0.8068360252179506, "learning_rate": 2.9821283781744537e-06, "loss": 1.0391, "step": 5984 }, { "epoch": 0.64, "grad_norm": 1.625075188469242, "learning_rate": 2.980535510991558e-06, "loss": 0.8903, "step": 5985 }, { "epoch": 0.64, "grad_norm": 1.6781251703528512, "learning_rate": 2.978942888675693e-06, "loss": 0.8902, "step": 5986 }, { "epoch": 0.64, "grad_norm": 1.7113889524109467, "learning_rate": 2.977350511419972e-06, "loss": 0.8454, "step": 5987 }, { "epoch": 0.64, "grad_norm": 1.8034136801238725, "learning_rate": 2.9757583794174744e-06, "loss": 0.8418, "step": 5988 }, { "epoch": 0.64, "grad_norm": 1.6942994468917052, "learning_rate": 2.9741664928612506e-06, "loss": 0.8373, "step": 5989 }, { "epoch": 0.64, "grad_norm": 1.759702015861477, "learning_rate": 2.972574851944325e-06, "loss": 0.9098, "step": 5990 }, { "epoch": 0.64, "grad_norm": 1.73186605271841, "learning_rate": 2.9709834568596873e-06, "loss": 0.8729, "step": 5991 }, { "epoch": 0.64, "grad_norm": 1.6970701558848489, "learning_rate": 2.9693923078003005e-06, "loss": 0.899, "step": 5992 }, { "epoch": 0.64, "grad_norm": 1.7270454880222068, "learning_rate": 2.9678014049591e-06, "loss": 0.9201, "step": 5993 }, { "epoch": 0.64, "grad_norm": 1.789339017907301, "learning_rate": 2.9662107485289815e-06, "loss": 0.8291, "step": 5994 }, { "epoch": 0.64, "grad_norm": 1.7770931892178092, "learning_rate": 2.964620338702823e-06, "loss": 0.874, "step": 5995 }, { "epoch": 0.64, "grad_norm": 1.8974762030576109, "learning_rate": 2.9630301756734646e-06, "loss": 0.9253, "step": 5996 }, { "epoch": 0.64, "grad_norm": 1.740669981591448, "learning_rate": 2.96144025963372e-06, "loss": 0.8837, "step": 5997 }, { "epoch": 0.64, "grad_norm": 1.6977449563437372, "learning_rate": 2.959850590776372e-06, "loss": 0.9595, "step": 5998 }, { "epoch": 0.64, "grad_norm": 1.7593274925550832, "learning_rate": 2.958261169294174e-06, "loss": 0.8789, "step": 5999 }, { "epoch": 0.65, "grad_norm": 1.7578438580062556, "learning_rate": 2.9566719953798474e-06, "loss": 0.8369, "step": 6000 }, { "epoch": 0.65, "grad_norm": 1.7524473715110551, "learning_rate": 2.9550830692260856e-06, "loss": 0.8574, "step": 6001 }, { "epoch": 0.65, "grad_norm": 1.7793799471767124, "learning_rate": 2.953494391025552e-06, "loss": 0.9759, "step": 6002 }, { "epoch": 0.65, "grad_norm": 1.7594154263664985, "learning_rate": 2.951905960970879e-06, "loss": 0.9248, "step": 6003 }, { "epoch": 0.65, "grad_norm": 1.7603678001473364, "learning_rate": 2.95031777925467e-06, "loss": 0.8848, "step": 6004 }, { "epoch": 0.65, "grad_norm": 1.809113419279355, "learning_rate": 2.9487298460694955e-06, "loss": 0.9269, "step": 6005 }, { "epoch": 0.65, "grad_norm": 1.746570422010187, "learning_rate": 2.9471421616078987e-06, "loss": 0.8938, "step": 6006 }, { "epoch": 0.65, "grad_norm": 1.6448328629558564, "learning_rate": 2.9455547260623926e-06, "loss": 0.8646, "step": 6007 }, { "epoch": 0.65, "grad_norm": 1.7160312740831634, "learning_rate": 2.9439675396254586e-06, "loss": 0.9686, "step": 6008 }, { "epoch": 0.65, "grad_norm": 1.7851820131759302, "learning_rate": 2.9423806024895475e-06, "loss": 0.9191, "step": 6009 }, { "epoch": 0.65, "grad_norm": 1.7940048909883966, "learning_rate": 2.9407939148470834e-06, "loss": 0.9155, "step": 6010 }, { "epoch": 0.65, "grad_norm": 0.8216422592687966, "learning_rate": 2.939207476890456e-06, "loss": 1.0329, "step": 6011 }, { "epoch": 0.65, "grad_norm": 1.7354790876734008, "learning_rate": 2.937621288812026e-06, "loss": 0.9656, "step": 6012 }, { "epoch": 0.65, "grad_norm": 1.7888611400271068, "learning_rate": 2.9360353508041257e-06, "loss": 0.8988, "step": 6013 }, { "epoch": 0.65, "grad_norm": 1.7453972062441918, "learning_rate": 2.9344496630590548e-06, "loss": 0.8861, "step": 6014 }, { "epoch": 0.65, "grad_norm": 1.618093670826447, "learning_rate": 2.9328642257690835e-06, "loss": 0.8745, "step": 6015 }, { "epoch": 0.65, "grad_norm": 1.7286287431528895, "learning_rate": 2.931279039126451e-06, "loss": 0.9263, "step": 6016 }, { "epoch": 0.65, "grad_norm": 1.7158371445740197, "learning_rate": 2.9296941033233662e-06, "loss": 0.8541, "step": 6017 }, { "epoch": 0.65, "grad_norm": 1.7616907820192784, "learning_rate": 2.9281094185520097e-06, "loss": 0.9177, "step": 6018 }, { "epoch": 0.65, "grad_norm": 1.8819569321560612, "learning_rate": 2.9265249850045284e-06, "loss": 0.9371, "step": 6019 }, { "epoch": 0.65, "grad_norm": 1.74306855507922, "learning_rate": 2.924940802873041e-06, "loss": 0.8426, "step": 6020 }, { "epoch": 0.65, "grad_norm": 1.6661061955898464, "learning_rate": 2.9233568723496354e-06, "loss": 0.9155, "step": 6021 }, { "epoch": 0.65, "grad_norm": 1.6578804687695754, "learning_rate": 2.9217731936263683e-06, "loss": 0.8616, "step": 6022 }, { "epoch": 0.65, "grad_norm": 1.7127656192611829, "learning_rate": 2.9201897668952655e-06, "loss": 0.912, "step": 6023 }, { "epoch": 0.65, "grad_norm": 0.8235003911092991, "learning_rate": 2.918606592348324e-06, "loss": 1.0367, "step": 6024 }, { "epoch": 0.65, "grad_norm": 0.8349585148487906, "learning_rate": 2.9170236701775113e-06, "loss": 1.0247, "step": 6025 }, { "epoch": 0.65, "grad_norm": 1.7789840478179613, "learning_rate": 2.9154410005747586e-06, "loss": 0.8222, "step": 6026 }, { "epoch": 0.65, "grad_norm": 1.765453500414764, "learning_rate": 2.9138585837319694e-06, "loss": 0.8439, "step": 6027 }, { "epoch": 0.65, "grad_norm": 1.8536606188868114, "learning_rate": 2.912276419841019e-06, "loss": 0.9478, "step": 6028 }, { "epoch": 0.65, "grad_norm": 1.8497212071649534, "learning_rate": 2.9106945090937524e-06, "loss": 0.913, "step": 6029 }, { "epoch": 0.65, "grad_norm": 2.024634847031988, "learning_rate": 2.9091128516819767e-06, "loss": 0.8534, "step": 6030 }, { "epoch": 0.65, "grad_norm": 1.8227162172449467, "learning_rate": 2.9075314477974764e-06, "loss": 0.9863, "step": 6031 }, { "epoch": 0.65, "grad_norm": 1.6940789843880182, "learning_rate": 2.905950297632004e-06, "loss": 0.8558, "step": 6032 }, { "epoch": 0.65, "grad_norm": 1.6851608904491584, "learning_rate": 2.9043694013772756e-06, "loss": 0.8513, "step": 6033 }, { "epoch": 0.65, "grad_norm": 1.7066822500640777, "learning_rate": 2.9027887592249817e-06, "loss": 0.9334, "step": 6034 }, { "epoch": 0.65, "grad_norm": 1.7286416496596715, "learning_rate": 2.9012083713667833e-06, "loss": 0.911, "step": 6035 }, { "epoch": 0.65, "grad_norm": 1.7863654302441503, "learning_rate": 2.8996282379943053e-06, "loss": 0.9281, "step": 6036 }, { "epoch": 0.65, "grad_norm": 1.7557832930313828, "learning_rate": 2.8980483592991426e-06, "loss": 0.9111, "step": 6037 }, { "epoch": 0.65, "grad_norm": 1.7098172894974268, "learning_rate": 2.896468735472863e-06, "loss": 0.86, "step": 6038 }, { "epoch": 0.65, "grad_norm": 1.7804800660102624, "learning_rate": 2.894889366707002e-06, "loss": 0.9099, "step": 6039 }, { "epoch": 0.65, "grad_norm": 1.7691636988629234, "learning_rate": 2.8933102531930622e-06, "loss": 1.0019, "step": 6040 }, { "epoch": 0.65, "grad_norm": 1.8042255133971408, "learning_rate": 2.8917313951225164e-06, "loss": 0.8375, "step": 6041 }, { "epoch": 0.65, "grad_norm": 1.7254406629344132, "learning_rate": 2.8901527926868088e-06, "loss": 0.9047, "step": 6042 }, { "epoch": 0.65, "grad_norm": 1.775340304987951, "learning_rate": 2.888574446077348e-06, "loss": 0.8659, "step": 6043 }, { "epoch": 0.65, "grad_norm": 1.8143593539774812, "learning_rate": 2.886996355485514e-06, "loss": 0.9103, "step": 6044 }, { "epoch": 0.65, "grad_norm": 1.7762774877316694, "learning_rate": 2.885418521102659e-06, "loss": 0.8831, "step": 6045 }, { "epoch": 0.65, "grad_norm": 1.61466674779323, "learning_rate": 2.8838409431200976e-06, "loss": 0.8773, "step": 6046 }, { "epoch": 0.65, "grad_norm": 1.9349153091569657, "learning_rate": 2.882263621729116e-06, "loss": 0.8956, "step": 6047 }, { "epoch": 0.65, "grad_norm": 1.858944405783447, "learning_rate": 2.880686557120973e-06, "loss": 0.8307, "step": 6048 }, { "epoch": 0.65, "grad_norm": 1.628088203636189, "learning_rate": 2.8791097494868896e-06, "loss": 0.8353, "step": 6049 }, { "epoch": 0.65, "grad_norm": 1.5899853853130876, "learning_rate": 2.877533199018061e-06, "loss": 0.8783, "step": 6050 }, { "epoch": 0.65, "grad_norm": 1.8280499929353247, "learning_rate": 2.875956905905651e-06, "loss": 0.8927, "step": 6051 }, { "epoch": 0.65, "grad_norm": 1.7767763436892685, "learning_rate": 2.8743808703407866e-06, "loss": 0.9425, "step": 6052 }, { "epoch": 0.65, "grad_norm": 0.8142903115228977, "learning_rate": 2.87280509251457e-06, "loss": 0.9859, "step": 6053 }, { "epoch": 0.65, "grad_norm": 1.6492030419701122, "learning_rate": 2.8712295726180717e-06, "loss": 0.8989, "step": 6054 }, { "epoch": 0.65, "grad_norm": 1.8452905252490812, "learning_rate": 2.8696543108423246e-06, "loss": 0.8865, "step": 6055 }, { "epoch": 0.65, "grad_norm": 0.832295767403557, "learning_rate": 2.8680793073783363e-06, "loss": 1.0378, "step": 6056 }, { "epoch": 0.65, "grad_norm": 1.70554044412064, "learning_rate": 2.866504562417086e-06, "loss": 0.8911, "step": 6057 }, { "epoch": 0.65, "grad_norm": 1.778417207166715, "learning_rate": 2.864930076149509e-06, "loss": 0.9217, "step": 6058 }, { "epoch": 0.65, "grad_norm": 1.7198575500235072, "learning_rate": 2.8633558487665215e-06, "loss": 0.8713, "step": 6059 }, { "epoch": 0.65, "grad_norm": 1.634632637012075, "learning_rate": 2.861781880459005e-06, "loss": 0.8268, "step": 6060 }, { "epoch": 0.65, "grad_norm": 1.9047645191316849, "learning_rate": 2.8602081714178043e-06, "loss": 0.8769, "step": 6061 }, { "epoch": 0.65, "grad_norm": 1.7232663563317052, "learning_rate": 2.8586347218337406e-06, "loss": 0.9415, "step": 6062 }, { "epoch": 0.65, "grad_norm": 1.6429387289573003, "learning_rate": 2.8570615318976013e-06, "loss": 0.8474, "step": 6063 }, { "epoch": 0.65, "grad_norm": 1.738225941033339, "learning_rate": 2.855488601800137e-06, "loss": 0.8276, "step": 6064 }, { "epoch": 0.65, "grad_norm": 1.630078377791851, "learning_rate": 2.853915931732073e-06, "loss": 0.875, "step": 6065 }, { "epoch": 0.65, "grad_norm": 1.6769466404880535, "learning_rate": 2.852343521884103e-06, "loss": 0.9314, "step": 6066 }, { "epoch": 0.65, "grad_norm": 1.8019745524330946, "learning_rate": 2.850771372446884e-06, "loss": 0.9682, "step": 6067 }, { "epoch": 0.65, "grad_norm": 1.664919311853383, "learning_rate": 2.8491994836110454e-06, "loss": 0.9121, "step": 6068 }, { "epoch": 0.65, "grad_norm": 0.8311633682934464, "learning_rate": 2.8476278555671834e-06, "loss": 1.0422, "step": 6069 }, { "epoch": 0.65, "grad_norm": 1.9697373497803674, "learning_rate": 2.846056488505866e-06, "loss": 0.8938, "step": 6070 }, { "epoch": 0.65, "grad_norm": 1.6921412618593161, "learning_rate": 2.844485382617624e-06, "loss": 0.8738, "step": 6071 }, { "epoch": 0.65, "grad_norm": 1.672874282792823, "learning_rate": 2.84291453809296e-06, "loss": 0.9525, "step": 6072 }, { "epoch": 0.65, "grad_norm": 1.7222164872068368, "learning_rate": 2.8413439551223472e-06, "loss": 0.8762, "step": 6073 }, { "epoch": 0.65, "grad_norm": 1.7768337113099122, "learning_rate": 2.839773633896219e-06, "loss": 0.9038, "step": 6074 }, { "epoch": 0.65, "grad_norm": 1.6849617734951094, "learning_rate": 2.8382035746049863e-06, "loss": 0.865, "step": 6075 }, { "epoch": 0.65, "grad_norm": 1.775290582768773, "learning_rate": 2.836633777439024e-06, "loss": 0.9265, "step": 6076 }, { "epoch": 0.65, "grad_norm": 1.6670501314048238, "learning_rate": 2.8350642425886734e-06, "loss": 0.9609, "step": 6077 }, { "epoch": 0.65, "grad_norm": 1.749364522715793, "learning_rate": 2.833494970244248e-06, "loss": 0.8145, "step": 6078 }, { "epoch": 0.65, "grad_norm": 1.6736627553678913, "learning_rate": 2.831925960596027e-06, "loss": 0.9642, "step": 6079 }, { "epoch": 0.65, "grad_norm": 1.8997364502248302, "learning_rate": 2.830357213834256e-06, "loss": 0.8854, "step": 6080 }, { "epoch": 0.65, "grad_norm": 1.7107816666517686, "learning_rate": 2.8287887301491522e-06, "loss": 0.9282, "step": 6081 }, { "epoch": 0.65, "grad_norm": 1.6829469479402708, "learning_rate": 2.827220509730903e-06, "loss": 0.9168, "step": 6082 }, { "epoch": 0.65, "grad_norm": 1.686013133551253, "learning_rate": 2.8256525527696554e-06, "loss": 0.8921, "step": 6083 }, { "epoch": 0.65, "grad_norm": 1.6752289618941414, "learning_rate": 2.8240848594555314e-06, "loss": 0.9276, "step": 6084 }, { "epoch": 0.65, "grad_norm": 1.5764214290328076, "learning_rate": 2.822517429978622e-06, "loss": 0.8378, "step": 6085 }, { "epoch": 0.65, "grad_norm": 1.6422650037291362, "learning_rate": 2.8209502645289795e-06, "loss": 0.8236, "step": 6086 }, { "epoch": 0.65, "grad_norm": 1.7487957223460724, "learning_rate": 2.8193833632966296e-06, "loss": 0.8647, "step": 6087 }, { "epoch": 0.65, "grad_norm": 1.8092256444254193, "learning_rate": 2.817816726471566e-06, "loss": 0.9285, "step": 6088 }, { "epoch": 0.65, "grad_norm": 0.8236141328707458, "learning_rate": 2.816250354243748e-06, "loss": 1.0566, "step": 6089 }, { "epoch": 0.65, "grad_norm": 1.6693660660672482, "learning_rate": 2.814684246803101e-06, "loss": 0.8262, "step": 6090 }, { "epoch": 0.65, "grad_norm": 1.7125257832949223, "learning_rate": 2.8131184043395256e-06, "loss": 0.8362, "step": 6091 }, { "epoch": 0.65, "grad_norm": 1.7759666143815545, "learning_rate": 2.8115528270428798e-06, "loss": 0.9318, "step": 6092 }, { "epoch": 0.66, "grad_norm": 1.589969089918983, "learning_rate": 2.8099875151029987e-06, "loss": 0.8193, "step": 6093 }, { "epoch": 0.66, "grad_norm": 1.8961872838356477, "learning_rate": 2.808422468709684e-06, "loss": 0.8918, "step": 6094 }, { "epoch": 0.66, "grad_norm": 1.6670071855491393, "learning_rate": 2.8068576880526986e-06, "loss": 0.8798, "step": 6095 }, { "epoch": 0.66, "grad_norm": 1.7368612222942021, "learning_rate": 2.805293173321779e-06, "loss": 0.9518, "step": 6096 }, { "epoch": 0.66, "grad_norm": 1.8193291967282579, "learning_rate": 2.8037289247066307e-06, "loss": 0.9421, "step": 6097 }, { "epoch": 0.66, "grad_norm": 1.7154085092020732, "learning_rate": 2.8021649423969198e-06, "loss": 0.8796, "step": 6098 }, { "epoch": 0.66, "grad_norm": 1.8348735251047712, "learning_rate": 2.800601226582289e-06, "loss": 0.9242, "step": 6099 }, { "epoch": 0.66, "grad_norm": 1.6148475010514527, "learning_rate": 2.7990377774523397e-06, "loss": 0.8657, "step": 6100 }, { "epoch": 0.66, "grad_norm": 1.7385997736522063, "learning_rate": 2.79747459519665e-06, "loss": 0.8558, "step": 6101 }, { "epoch": 0.66, "grad_norm": 1.7598897283801989, "learning_rate": 2.795911680004757e-06, "loss": 0.8774, "step": 6102 }, { "epoch": 0.66, "grad_norm": 1.6602601522277274, "learning_rate": 2.7943490320661703e-06, "loss": 0.8586, "step": 6103 }, { "epoch": 0.66, "grad_norm": 1.635301117132657, "learning_rate": 2.7927866515703705e-06, "loss": 0.7736, "step": 6104 }, { "epoch": 0.66, "grad_norm": 1.7754370969891193, "learning_rate": 2.7912245387067972e-06, "loss": 0.9574, "step": 6105 }, { "epoch": 0.66, "grad_norm": 1.7642830933516673, "learning_rate": 2.7896626936648622e-06, "loss": 0.8804, "step": 6106 }, { "epoch": 0.66, "grad_norm": 1.6572626116735314, "learning_rate": 2.7881011166339485e-06, "loss": 0.8855, "step": 6107 }, { "epoch": 0.66, "grad_norm": 1.7311138507231523, "learning_rate": 2.7865398078033976e-06, "loss": 0.9892, "step": 6108 }, { "epoch": 0.66, "grad_norm": 1.7606384982214118, "learning_rate": 2.784978767362528e-06, "loss": 0.9203, "step": 6109 }, { "epoch": 0.66, "grad_norm": 1.6820993322318152, "learning_rate": 2.783417995500619e-06, "loss": 0.9415, "step": 6110 }, { "epoch": 0.66, "grad_norm": 1.7114924009000194, "learning_rate": 2.7818574924069184e-06, "loss": 0.8152, "step": 6111 }, { "epoch": 0.66, "grad_norm": 1.75787421656962, "learning_rate": 2.7802972582706434e-06, "loss": 0.8778, "step": 6112 }, { "epoch": 0.66, "grad_norm": 0.7979792417167832, "learning_rate": 2.778737293280981e-06, "loss": 1.012, "step": 6113 }, { "epoch": 0.66, "grad_norm": 1.7866699431767055, "learning_rate": 2.777177597627077e-06, "loss": 0.9008, "step": 6114 }, { "epoch": 0.66, "grad_norm": 0.8327443518825703, "learning_rate": 2.7756181714980525e-06, "loss": 1.0168, "step": 6115 }, { "epoch": 0.66, "grad_norm": 1.828361947255035, "learning_rate": 2.774059015082996e-06, "loss": 0.8671, "step": 6116 }, { "epoch": 0.66, "grad_norm": 1.7077204830703197, "learning_rate": 2.772500128570955e-06, "loss": 0.8736, "step": 6117 }, { "epoch": 0.66, "grad_norm": 1.780330384818927, "learning_rate": 2.770941512150953e-06, "loss": 0.9395, "step": 6118 }, { "epoch": 0.66, "grad_norm": 1.8020072770312363, "learning_rate": 2.76938316601198e-06, "loss": 0.8437, "step": 6119 }, { "epoch": 0.66, "grad_norm": 1.830277331522214, "learning_rate": 2.7678250903429875e-06, "loss": 0.8965, "step": 6120 }, { "epoch": 0.66, "grad_norm": 1.7055387144444438, "learning_rate": 2.7662672853328966e-06, "loss": 0.8392, "step": 6121 }, { "epoch": 0.66, "grad_norm": 1.6717683683712818, "learning_rate": 2.7647097511706e-06, "loss": 0.8614, "step": 6122 }, { "epoch": 0.66, "grad_norm": 1.702798164971416, "learning_rate": 2.7631524880449513e-06, "loss": 0.9627, "step": 6123 }, { "epoch": 0.66, "grad_norm": 1.831077564777693, "learning_rate": 2.7615954961447733e-06, "loss": 0.8586, "step": 6124 }, { "epoch": 0.66, "grad_norm": 1.6853325022880683, "learning_rate": 2.7600387756588616e-06, "loss": 0.9364, "step": 6125 }, { "epoch": 0.66, "grad_norm": 1.8018666698121288, "learning_rate": 2.7584823267759686e-06, "loss": 0.799, "step": 6126 }, { "epoch": 0.66, "grad_norm": 1.7595746619995463, "learning_rate": 2.7569261496848203e-06, "loss": 0.925, "step": 6127 }, { "epoch": 0.66, "grad_norm": 0.8196257907160969, "learning_rate": 2.7553702445741125e-06, "loss": 1.0466, "step": 6128 }, { "epoch": 0.66, "grad_norm": 1.806424488256776, "learning_rate": 2.753814611632498e-06, "loss": 0.8055, "step": 6129 }, { "epoch": 0.66, "grad_norm": 1.700399070990565, "learning_rate": 2.752259251048606e-06, "loss": 0.8753, "step": 6130 }, { "epoch": 0.66, "grad_norm": 1.6877855745986927, "learning_rate": 2.750704163011031e-06, "loss": 0.8591, "step": 6131 }, { "epoch": 0.66, "grad_norm": 1.7878932804134957, "learning_rate": 2.749149347708331e-06, "loss": 0.9575, "step": 6132 }, { "epoch": 0.66, "grad_norm": 1.8296910250790603, "learning_rate": 2.7475948053290302e-06, "loss": 0.9094, "step": 6133 }, { "epoch": 0.66, "grad_norm": 1.660713281541635, "learning_rate": 2.7460405360616243e-06, "loss": 0.9381, "step": 6134 }, { "epoch": 0.66, "grad_norm": 1.7339096091046362, "learning_rate": 2.7444865400945763e-06, "loss": 0.9092, "step": 6135 }, { "epoch": 0.66, "grad_norm": 1.7005716654999283, "learning_rate": 2.7429328176163088e-06, "loss": 0.8896, "step": 6136 }, { "epoch": 0.66, "grad_norm": 1.7424268511699303, "learning_rate": 2.741379368815219e-06, "loss": 0.8578, "step": 6137 }, { "epoch": 0.66, "grad_norm": 1.7134153301420674, "learning_rate": 2.7398261938796688e-06, "loss": 0.8535, "step": 6138 }, { "epoch": 0.66, "grad_norm": 1.7039167623203721, "learning_rate": 2.7382732929979833e-06, "loss": 0.846, "step": 6139 }, { "epoch": 0.66, "grad_norm": 1.6881739499677193, "learning_rate": 2.7367206663584578e-06, "loss": 0.9552, "step": 6140 }, { "epoch": 0.66, "grad_norm": 1.7171640333496894, "learning_rate": 2.7351683141493566e-06, "loss": 0.8075, "step": 6141 }, { "epoch": 0.66, "grad_norm": 1.8379567687547802, "learning_rate": 2.733616236558905e-06, "loss": 0.8996, "step": 6142 }, { "epoch": 0.66, "grad_norm": 1.6275172951458954, "learning_rate": 2.7320644337752966e-06, "loss": 0.8432, "step": 6143 }, { "epoch": 0.66, "grad_norm": 1.6712431521403892, "learning_rate": 2.7305129059866956e-06, "loss": 0.8203, "step": 6144 }, { "epoch": 0.66, "grad_norm": 1.7396737297790197, "learning_rate": 2.728961653381228e-06, "loss": 0.9509, "step": 6145 }, { "epoch": 0.66, "grad_norm": 1.8209484307315307, "learning_rate": 2.7274106761469887e-06, "loss": 0.9298, "step": 6146 }, { "epoch": 0.66, "grad_norm": 1.9527494138888886, "learning_rate": 2.7258599744720416e-06, "loss": 0.9489, "step": 6147 }, { "epoch": 0.66, "grad_norm": 1.7011918620207098, "learning_rate": 2.7243095485444103e-06, "loss": 0.8481, "step": 6148 }, { "epoch": 0.66, "grad_norm": 1.8191168547401848, "learning_rate": 2.722759398552093e-06, "loss": 0.8755, "step": 6149 }, { "epoch": 0.66, "grad_norm": 1.7166966132940573, "learning_rate": 2.7212095246830505e-06, "loss": 0.8506, "step": 6150 }, { "epoch": 0.66, "grad_norm": 1.7393814112851602, "learning_rate": 2.719659927125208e-06, "loss": 0.8868, "step": 6151 }, { "epoch": 0.66, "grad_norm": 1.6628590426973615, "learning_rate": 2.718110606066462e-06, "loss": 0.8747, "step": 6152 }, { "epoch": 0.66, "grad_norm": 1.8767285961531108, "learning_rate": 2.7165615616946726e-06, "loss": 0.9128, "step": 6153 }, { "epoch": 0.66, "grad_norm": 1.8381295079296902, "learning_rate": 2.715012794197664e-06, "loss": 0.899, "step": 6154 }, { "epoch": 0.66, "grad_norm": 1.8012285743553142, "learning_rate": 2.7134643037632315e-06, "loss": 0.8646, "step": 6155 }, { "epoch": 0.66, "grad_norm": 1.7164834442786616, "learning_rate": 2.711916090579137e-06, "loss": 0.8701, "step": 6156 }, { "epoch": 0.66, "grad_norm": 1.6802485651173003, "learning_rate": 2.710368154833103e-06, "loss": 0.8475, "step": 6157 }, { "epoch": 0.66, "grad_norm": 1.701378978988291, "learning_rate": 2.7088204967128236e-06, "loss": 0.8761, "step": 6158 }, { "epoch": 0.66, "grad_norm": 1.8182565638409682, "learning_rate": 2.7072731164059594e-06, "loss": 0.9362, "step": 6159 }, { "epoch": 0.66, "grad_norm": 1.6926862301343832, "learning_rate": 2.7057260141001328e-06, "loss": 0.9149, "step": 6160 }, { "epoch": 0.66, "grad_norm": 1.6524880193801237, "learning_rate": 2.704179189982936e-06, "loss": 0.856, "step": 6161 }, { "epoch": 0.66, "grad_norm": 1.7574193113715393, "learning_rate": 2.70263264424193e-06, "loss": 0.9467, "step": 6162 }, { "epoch": 0.66, "grad_norm": 1.7510876589350153, "learning_rate": 2.7010863770646357e-06, "loss": 0.8388, "step": 6163 }, { "epoch": 0.66, "grad_norm": 1.9298244623026277, "learning_rate": 2.699540388638542e-06, "loss": 0.9799, "step": 6164 }, { "epoch": 0.66, "grad_norm": 1.6913001211307064, "learning_rate": 2.6979946791511066e-06, "loss": 0.8799, "step": 6165 }, { "epoch": 0.66, "grad_norm": 1.7412542077277038, "learning_rate": 2.696449248789754e-06, "loss": 0.8942, "step": 6166 }, { "epoch": 0.66, "grad_norm": 1.8776618189144318, "learning_rate": 2.6949040977418706e-06, "loss": 0.9305, "step": 6167 }, { "epoch": 0.66, "grad_norm": 1.6211607056577284, "learning_rate": 2.693359226194812e-06, "loss": 0.9374, "step": 6168 }, { "epoch": 0.66, "grad_norm": 1.6755559823174764, "learning_rate": 2.691814634335902e-06, "loss": 0.8806, "step": 6169 }, { "epoch": 0.66, "grad_norm": 1.8616153380947458, "learning_rate": 2.6902703223524216e-06, "loss": 0.9068, "step": 6170 }, { "epoch": 0.66, "grad_norm": 1.7474974771450928, "learning_rate": 2.688726290431629e-06, "loss": 0.9237, "step": 6171 }, { "epoch": 0.66, "grad_norm": 1.6975096027185144, "learning_rate": 2.687182538760743e-06, "loss": 0.848, "step": 6172 }, { "epoch": 0.66, "grad_norm": 1.7077989609981274, "learning_rate": 2.6856390675269478e-06, "loss": 0.86, "step": 6173 }, { "epoch": 0.66, "grad_norm": 1.609554740471953, "learning_rate": 2.684095876917393e-06, "loss": 0.9436, "step": 6174 }, { "epoch": 0.66, "grad_norm": 1.6832056388017014, "learning_rate": 2.6825529671191984e-06, "loss": 0.9061, "step": 6175 }, { "epoch": 0.66, "grad_norm": 1.725019550304898, "learning_rate": 2.681010338319444e-06, "loss": 0.8383, "step": 6176 }, { "epoch": 0.66, "grad_norm": 1.678435866897065, "learning_rate": 2.6794679907051817e-06, "loss": 0.9535, "step": 6177 }, { "epoch": 0.66, "grad_norm": 1.6690741678490348, "learning_rate": 2.6779259244634272e-06, "loss": 0.8366, "step": 6178 }, { "epoch": 0.66, "grad_norm": 1.7510164623022888, "learning_rate": 2.6763841397811576e-06, "loss": 0.9418, "step": 6179 }, { "epoch": 0.66, "grad_norm": 1.7566829317472894, "learning_rate": 2.674842636845322e-06, "loss": 0.9314, "step": 6180 }, { "epoch": 0.66, "grad_norm": 0.813481297400205, "learning_rate": 2.673301415842835e-06, "loss": 1.046, "step": 6181 }, { "epoch": 0.66, "grad_norm": 1.6435635367913084, "learning_rate": 2.67176047696057e-06, "loss": 0.8794, "step": 6182 }, { "epoch": 0.66, "grad_norm": 1.6996318007317546, "learning_rate": 2.670219820385374e-06, "loss": 0.8346, "step": 6183 }, { "epoch": 0.66, "grad_norm": 1.7106808405689775, "learning_rate": 2.6686794463040613e-06, "loss": 0.9316, "step": 6184 }, { "epoch": 0.66, "grad_norm": 1.627998981607846, "learning_rate": 2.6671393549033984e-06, "loss": 0.8758, "step": 6185 }, { "epoch": 0.67, "grad_norm": 1.6915832936319433, "learning_rate": 2.665599546370131e-06, "loss": 0.8529, "step": 6186 }, { "epoch": 0.67, "grad_norm": 1.715970477488259, "learning_rate": 2.6640600208909697e-06, "loss": 0.9359, "step": 6187 }, { "epoch": 0.67, "grad_norm": 2.1142375692527695, "learning_rate": 2.662520778652581e-06, "loss": 0.8449, "step": 6188 }, { "epoch": 0.67, "grad_norm": 1.781565197401211, "learning_rate": 2.660981819841607e-06, "loss": 0.8362, "step": 6189 }, { "epoch": 0.67, "grad_norm": 1.6902480019670985, "learning_rate": 2.6594431446446528e-06, "loss": 0.8901, "step": 6190 }, { "epoch": 0.67, "grad_norm": 1.6496129533624062, "learning_rate": 2.657904753248285e-06, "loss": 0.7987, "step": 6191 }, { "epoch": 0.67, "grad_norm": 1.7316616197052805, "learning_rate": 2.6563666458390396e-06, "loss": 0.908, "step": 6192 }, { "epoch": 0.67, "grad_norm": 1.7553495351245245, "learning_rate": 2.65482882260342e-06, "loss": 0.9397, "step": 6193 }, { "epoch": 0.67, "grad_norm": 1.6934216274031424, "learning_rate": 2.6532912837278896e-06, "loss": 0.9114, "step": 6194 }, { "epoch": 0.67, "grad_norm": 0.823232650728939, "learning_rate": 2.6517540293988832e-06, "loss": 1.0912, "step": 6195 }, { "epoch": 0.67, "grad_norm": 0.8743201258744652, "learning_rate": 2.650217059802794e-06, "loss": 1.0428, "step": 6196 }, { "epoch": 0.67, "grad_norm": 1.728273483205488, "learning_rate": 2.648680375125989e-06, "loss": 0.8976, "step": 6197 }, { "epoch": 0.67, "grad_norm": 1.7852192510222362, "learning_rate": 2.6471439755547934e-06, "loss": 0.8204, "step": 6198 }, { "epoch": 0.67, "grad_norm": 1.7876489853246944, "learning_rate": 2.645607861275502e-06, "loss": 0.9146, "step": 6199 }, { "epoch": 0.67, "grad_norm": 1.7495815991025765, "learning_rate": 2.644072032474376e-06, "loss": 0.9586, "step": 6200 }, { "epoch": 0.67, "grad_norm": 1.6563216523725741, "learning_rate": 2.642536489337636e-06, "loss": 0.9673, "step": 6201 }, { "epoch": 0.67, "grad_norm": 1.8265955239181466, "learning_rate": 2.6410012320514742e-06, "loss": 0.8628, "step": 6202 }, { "epoch": 0.67, "grad_norm": 1.7571581376497385, "learning_rate": 2.639466260802048e-06, "loss": 0.9667, "step": 6203 }, { "epoch": 0.67, "grad_norm": 1.7925343909729858, "learning_rate": 2.6379315757754744e-06, "loss": 0.9068, "step": 6204 }, { "epoch": 0.67, "grad_norm": 1.6086819683171814, "learning_rate": 2.6363971771578417e-06, "loss": 0.8885, "step": 6205 }, { "epoch": 0.67, "grad_norm": 1.8424749613468725, "learning_rate": 2.6348630651351997e-06, "loss": 0.8389, "step": 6206 }, { "epoch": 0.67, "grad_norm": 1.854209122636025, "learning_rate": 2.6333292398935636e-06, "loss": 0.9361, "step": 6207 }, { "epoch": 0.67, "grad_norm": 1.7259709078000067, "learning_rate": 2.6317957016189155e-06, "loss": 0.9704, "step": 6208 }, { "epoch": 0.67, "grad_norm": 1.7515886533757585, "learning_rate": 2.6302624504972054e-06, "loss": 0.8861, "step": 6209 }, { "epoch": 0.67, "grad_norm": 1.7301643400860627, "learning_rate": 2.628729486714341e-06, "loss": 0.8962, "step": 6210 }, { "epoch": 0.67, "grad_norm": 1.6698217474603771, "learning_rate": 2.627196810456201e-06, "loss": 0.9055, "step": 6211 }, { "epoch": 0.67, "grad_norm": 0.8099011066801103, "learning_rate": 2.62566442190863e-06, "loss": 1.037, "step": 6212 }, { "epoch": 0.67, "grad_norm": 1.6395785259050835, "learning_rate": 2.624132321257432e-06, "loss": 0.9171, "step": 6213 }, { "epoch": 0.67, "grad_norm": 1.6848556822820735, "learning_rate": 2.622600508688381e-06, "loss": 0.932, "step": 6214 }, { "epoch": 0.67, "grad_norm": 1.7889213707058302, "learning_rate": 2.621068984387216e-06, "loss": 0.8899, "step": 6215 }, { "epoch": 0.67, "grad_norm": 1.7345693881479076, "learning_rate": 2.6195377485396377e-06, "loss": 0.8741, "step": 6216 }, { "epoch": 0.67, "grad_norm": 1.7477254963087112, "learning_rate": 2.6180068013313133e-06, "loss": 0.8369, "step": 6217 }, { "epoch": 0.67, "grad_norm": 0.7926610085563496, "learning_rate": 2.616476142947877e-06, "loss": 1.0007, "step": 6218 }, { "epoch": 0.67, "grad_norm": 1.6474256170837194, "learning_rate": 2.614945773574924e-06, "loss": 0.8531, "step": 6219 }, { "epoch": 0.67, "grad_norm": 1.6661381948890501, "learning_rate": 2.6134156933980186e-06, "loss": 0.8473, "step": 6220 }, { "epoch": 0.67, "grad_norm": 1.813394203858817, "learning_rate": 2.61188590260269e-06, "loss": 0.9301, "step": 6221 }, { "epoch": 0.67, "grad_norm": 1.668248392688055, "learning_rate": 2.610356401374427e-06, "loss": 0.8517, "step": 6222 }, { "epoch": 0.67, "grad_norm": 1.9081147575290032, "learning_rate": 2.608827189898688e-06, "loss": 0.9288, "step": 6223 }, { "epoch": 0.67, "grad_norm": 1.696113848464344, "learning_rate": 2.6072982683608992e-06, "loss": 0.9001, "step": 6224 }, { "epoch": 0.67, "grad_norm": 1.7235785910801638, "learning_rate": 2.6057696369464412e-06, "loss": 0.8589, "step": 6225 }, { "epoch": 0.67, "grad_norm": 1.6694403220797782, "learning_rate": 2.6042412958406717e-06, "loss": 0.8371, "step": 6226 }, { "epoch": 0.67, "grad_norm": 1.674340469936745, "learning_rate": 2.6027132452289015e-06, "loss": 0.8662, "step": 6227 }, { "epoch": 0.67, "grad_norm": 1.5955827785615304, "learning_rate": 2.601185485296418e-06, "loss": 0.9183, "step": 6228 }, { "epoch": 0.67, "grad_norm": 1.698844018872994, "learning_rate": 2.5996580162284618e-06, "loss": 0.8316, "step": 6229 }, { "epoch": 0.67, "grad_norm": 1.7184580310036721, "learning_rate": 2.5981308382102467e-06, "loss": 0.9349, "step": 6230 }, { "epoch": 0.67, "grad_norm": 1.8048176256362345, "learning_rate": 2.5966039514269494e-06, "loss": 0.9832, "step": 6231 }, { "epoch": 0.67, "grad_norm": 1.6756674030837515, "learning_rate": 2.595077356063708e-06, "loss": 0.919, "step": 6232 }, { "epoch": 0.67, "grad_norm": 1.668349327487681, "learning_rate": 2.5935510523056268e-06, "loss": 0.8892, "step": 6233 }, { "epoch": 0.67, "grad_norm": 1.7747301334217365, "learning_rate": 2.592025040337779e-06, "loss": 0.8672, "step": 6234 }, { "epoch": 0.67, "grad_norm": 1.8341675821986696, "learning_rate": 2.5904993203451957e-06, "loss": 0.8769, "step": 6235 }, { "epoch": 0.67, "grad_norm": 1.6727155224561752, "learning_rate": 2.5889738925128753e-06, "loss": 0.8948, "step": 6236 }, { "epoch": 0.67, "grad_norm": 1.6194755173972684, "learning_rate": 2.587448757025787e-06, "loss": 0.8658, "step": 6237 }, { "epoch": 0.67, "grad_norm": 1.7618496047116412, "learning_rate": 2.585923914068851e-06, "loss": 0.9491, "step": 6238 }, { "epoch": 0.67, "grad_norm": 1.768186687704413, "learning_rate": 2.5843993638269618e-06, "loss": 0.9313, "step": 6239 }, { "epoch": 0.67, "grad_norm": 1.9522179335423024, "learning_rate": 2.58287510648498e-06, "loss": 0.835, "step": 6240 }, { "epoch": 0.67, "grad_norm": 0.8617733968542577, "learning_rate": 2.5813511422277227e-06, "loss": 1.0341, "step": 6241 }, { "epoch": 0.67, "grad_norm": 1.7929784821512722, "learning_rate": 2.579827471239978e-06, "loss": 0.8373, "step": 6242 }, { "epoch": 0.67, "grad_norm": 0.8244143928671318, "learning_rate": 2.5783040937064973e-06, "loss": 1.0385, "step": 6243 }, { "epoch": 0.67, "grad_norm": 1.746841577584406, "learning_rate": 2.5767810098119925e-06, "loss": 0.9423, "step": 6244 }, { "epoch": 0.67, "grad_norm": 1.7203119142976462, "learning_rate": 2.575258219741145e-06, "loss": 0.8609, "step": 6245 }, { "epoch": 0.67, "grad_norm": 0.8166916344841809, "learning_rate": 2.573735723678599e-06, "loss": 1.0295, "step": 6246 }, { "epoch": 0.67, "grad_norm": 1.7848081194818683, "learning_rate": 2.5722135218089596e-06, "loss": 0.8538, "step": 6247 }, { "epoch": 0.67, "grad_norm": 1.7623487772581938, "learning_rate": 2.570691614316802e-06, "loss": 0.9245, "step": 6248 }, { "epoch": 0.67, "grad_norm": 1.6404582625312074, "learning_rate": 2.5691700013866615e-06, "loss": 0.8274, "step": 6249 }, { "epoch": 0.67, "grad_norm": 1.7058493750051824, "learning_rate": 2.5676486832030366e-06, "loss": 0.8657, "step": 6250 }, { "epoch": 0.67, "grad_norm": 1.665627661706924, "learning_rate": 2.5661276599503946e-06, "loss": 0.9219, "step": 6251 }, { "epoch": 0.67, "grad_norm": 1.6173258466095692, "learning_rate": 2.5646069318131663e-06, "loss": 0.8614, "step": 6252 }, { "epoch": 0.67, "grad_norm": 0.8056869964177008, "learning_rate": 2.563086498975742e-06, "loss": 1.0238, "step": 6253 }, { "epoch": 0.67, "grad_norm": 2.5857994653785266, "learning_rate": 2.5615663616224807e-06, "loss": 0.8853, "step": 6254 }, { "epoch": 0.67, "grad_norm": 1.7033636318659988, "learning_rate": 2.560046519937707e-06, "loss": 0.9227, "step": 6255 }, { "epoch": 0.67, "grad_norm": 1.7328194119138727, "learning_rate": 2.558526974105703e-06, "loss": 0.8902, "step": 6256 }, { "epoch": 0.67, "grad_norm": 1.7651588127763196, "learning_rate": 2.5570077243107195e-06, "loss": 0.8991, "step": 6257 }, { "epoch": 0.67, "grad_norm": 1.8233859792312979, "learning_rate": 2.5554887707369734e-06, "loss": 0.8671, "step": 6258 }, { "epoch": 0.67, "grad_norm": 1.7853192415757095, "learning_rate": 2.5539701135686424e-06, "loss": 0.9258, "step": 6259 }, { "epoch": 0.67, "grad_norm": 1.8833438065441415, "learning_rate": 2.552451752989865e-06, "loss": 0.9093, "step": 6260 }, { "epoch": 0.67, "grad_norm": 1.7033180450457077, "learning_rate": 2.5509336891847516e-06, "loss": 1.0048, "step": 6261 }, { "epoch": 0.67, "grad_norm": 1.7738970481845326, "learning_rate": 2.549415922337373e-06, "loss": 0.8613, "step": 6262 }, { "epoch": 0.67, "grad_norm": 1.8354950668127237, "learning_rate": 2.5478984526317596e-06, "loss": 0.9139, "step": 6263 }, { "epoch": 0.67, "grad_norm": 1.7193073421244744, "learning_rate": 2.5463812802519133e-06, "loss": 0.9214, "step": 6264 }, { "epoch": 0.67, "grad_norm": 0.8390743602576256, "learning_rate": 2.544864405381797e-06, "loss": 1.0755, "step": 6265 }, { "epoch": 0.67, "grad_norm": 1.694436913237053, "learning_rate": 2.543347828205335e-06, "loss": 0.9062, "step": 6266 }, { "epoch": 0.67, "grad_norm": 1.7571316115903421, "learning_rate": 2.541831548906417e-06, "loss": 0.9149, "step": 6267 }, { "epoch": 0.67, "grad_norm": 1.7909768759227815, "learning_rate": 2.5403155676689005e-06, "loss": 0.9436, "step": 6268 }, { "epoch": 0.67, "grad_norm": 1.6163432616387976, "learning_rate": 2.5387998846766016e-06, "loss": 1.0191, "step": 6269 }, { "epoch": 0.67, "grad_norm": 1.7445657368293812, "learning_rate": 2.5372845001132995e-06, "loss": 0.9784, "step": 6270 }, { "epoch": 0.67, "grad_norm": 1.7309628117703755, "learning_rate": 2.5357694141627444e-06, "loss": 0.8894, "step": 6271 }, { "epoch": 0.67, "grad_norm": 1.71367086546903, "learning_rate": 2.534254627008641e-06, "loss": 0.8659, "step": 6272 }, { "epoch": 0.67, "grad_norm": 1.8782553533182014, "learning_rate": 2.532740138834665e-06, "loss": 0.9021, "step": 6273 }, { "epoch": 0.67, "grad_norm": 1.659536796246654, "learning_rate": 2.5312259498244544e-06, "loss": 0.8797, "step": 6274 }, { "epoch": 0.67, "grad_norm": 1.7219781013255586, "learning_rate": 2.5297120601616078e-06, "loss": 0.9111, "step": 6275 }, { "epoch": 0.67, "grad_norm": 1.7275326697380633, "learning_rate": 2.5281984700296893e-06, "loss": 0.8625, "step": 6276 }, { "epoch": 0.67, "grad_norm": 1.751038908019539, "learning_rate": 2.52668517961223e-06, "loss": 0.8456, "step": 6277 }, { "epoch": 0.67, "grad_norm": 1.7850071231354494, "learning_rate": 2.5251721890927173e-06, "loss": 0.8947, "step": 6278 }, { "epoch": 0.68, "grad_norm": 1.7948236548349568, "learning_rate": 2.5236594986546115e-06, "loss": 0.8916, "step": 6279 }, { "epoch": 0.68, "grad_norm": 0.7790145071690193, "learning_rate": 2.5221471084813275e-06, "loss": 1.0667, "step": 6280 }, { "epoch": 0.68, "grad_norm": 1.6694011775816162, "learning_rate": 2.5206350187562478e-06, "loss": 0.8233, "step": 6281 }, { "epoch": 0.68, "grad_norm": 1.7924469931292124, "learning_rate": 2.51912322966272e-06, "loss": 0.8215, "step": 6282 }, { "epoch": 0.68, "grad_norm": 1.7382225125943191, "learning_rate": 2.517611741384055e-06, "loss": 0.8745, "step": 6283 }, { "epoch": 0.68, "grad_norm": 1.784656675374681, "learning_rate": 2.516100554103523e-06, "loss": 0.8867, "step": 6284 }, { "epoch": 0.68, "grad_norm": 1.7235178279457042, "learning_rate": 2.514589668004362e-06, "loss": 0.9218, "step": 6285 }, { "epoch": 0.68, "grad_norm": 1.7003293711459908, "learning_rate": 2.513079083269774e-06, "loss": 0.87, "step": 6286 }, { "epoch": 0.68, "grad_norm": 1.7622962016753754, "learning_rate": 2.511568800082919e-06, "loss": 0.8675, "step": 6287 }, { "epoch": 0.68, "grad_norm": 1.7634359918302613, "learning_rate": 2.5100588186269257e-06, "loss": 0.9789, "step": 6288 }, { "epoch": 0.68, "grad_norm": 1.7062418159122503, "learning_rate": 2.508549139084887e-06, "loss": 0.8949, "step": 6289 }, { "epoch": 0.68, "grad_norm": 1.8258182315515412, "learning_rate": 2.5070397616398546e-06, "loss": 0.8839, "step": 6290 }, { "epoch": 0.68, "grad_norm": 1.7253363598707276, "learning_rate": 2.5055306864748434e-06, "loss": 0.8195, "step": 6291 }, { "epoch": 0.68, "grad_norm": 0.8035688670621712, "learning_rate": 2.504021913772836e-06, "loss": 1.0734, "step": 6292 }, { "epoch": 0.68, "grad_norm": 1.8141134361941402, "learning_rate": 2.502513443716779e-06, "loss": 0.945, "step": 6293 }, { "epoch": 0.68, "grad_norm": 1.702694864436999, "learning_rate": 2.5010052764895752e-06, "loss": 0.9183, "step": 6294 }, { "epoch": 0.68, "grad_norm": 1.6997325147671798, "learning_rate": 2.4994974122740968e-06, "loss": 0.9226, "step": 6295 }, { "epoch": 0.68, "grad_norm": 1.7691628706769729, "learning_rate": 2.4979898512531796e-06, "loss": 0.9788, "step": 6296 }, { "epoch": 0.68, "grad_norm": 1.8579654144770366, "learning_rate": 2.4964825936096164e-06, "loss": 0.8455, "step": 6297 }, { "epoch": 0.68, "grad_norm": 1.734242582088059, "learning_rate": 2.4949756395261706e-06, "loss": 0.8553, "step": 6298 }, { "epoch": 0.68, "grad_norm": 1.72127889455071, "learning_rate": 2.493468989185566e-06, "loss": 0.9023, "step": 6299 }, { "epoch": 0.68, "grad_norm": 1.8492916319245993, "learning_rate": 2.4919626427704873e-06, "loss": 0.8853, "step": 6300 }, { "epoch": 0.68, "grad_norm": 1.7081866454464063, "learning_rate": 2.4904566004635835e-06, "loss": 0.8857, "step": 6301 }, { "epoch": 0.68, "grad_norm": 1.8092648036106815, "learning_rate": 2.4889508624474694e-06, "loss": 0.96, "step": 6302 }, { "epoch": 0.68, "grad_norm": 1.734881060649047, "learning_rate": 2.4874454289047194e-06, "loss": 0.8888, "step": 6303 }, { "epoch": 0.68, "grad_norm": 1.6413113648280484, "learning_rate": 2.4859403000178727e-06, "loss": 0.8875, "step": 6304 }, { "epoch": 0.68, "grad_norm": 0.8255599126601714, "learning_rate": 2.484435475969434e-06, "loss": 1.0331, "step": 6305 }, { "epoch": 0.68, "grad_norm": 1.5936253508054463, "learning_rate": 2.4829309569418647e-06, "loss": 0.8797, "step": 6306 }, { "epoch": 0.68, "grad_norm": 1.6760817727923698, "learning_rate": 2.4814267431175942e-06, "loss": 0.8882, "step": 6307 }, { "epoch": 0.68, "grad_norm": 1.7049749146747466, "learning_rate": 2.4799228346790157e-06, "loss": 0.8693, "step": 6308 }, { "epoch": 0.68, "grad_norm": 0.7938586693913281, "learning_rate": 2.47841923180848e-06, "loss": 1.0454, "step": 6309 }, { "epoch": 0.68, "grad_norm": 1.659684506317678, "learning_rate": 2.4769159346883053e-06, "loss": 0.8767, "step": 6310 }, { "epoch": 0.68, "grad_norm": 1.759498728230593, "learning_rate": 2.4754129435007757e-06, "loss": 0.7966, "step": 6311 }, { "epoch": 0.68, "grad_norm": 1.7735586144994937, "learning_rate": 2.4739102584281268e-06, "loss": 0.8261, "step": 6312 }, { "epoch": 0.68, "grad_norm": 1.6222596364976718, "learning_rate": 2.4724078796525674e-06, "loss": 0.8279, "step": 6313 }, { "epoch": 0.68, "grad_norm": 1.7357147757004663, "learning_rate": 2.4709058073562688e-06, "loss": 0.896, "step": 6314 }, { "epoch": 0.68, "grad_norm": 1.6736312475252992, "learning_rate": 2.4694040417213584e-06, "loss": 0.84, "step": 6315 }, { "epoch": 0.68, "grad_norm": 1.712387500991326, "learning_rate": 2.467902582929931e-06, "loss": 0.8809, "step": 6316 }, { "epoch": 0.68, "grad_norm": 0.8312300503986002, "learning_rate": 2.4664014311640483e-06, "loss": 1.0661, "step": 6317 }, { "epoch": 0.68, "grad_norm": 1.8406427409416657, "learning_rate": 2.464900586605724e-06, "loss": 0.8822, "step": 6318 }, { "epoch": 0.68, "grad_norm": 1.6642163541767439, "learning_rate": 2.463400049436944e-06, "loss": 0.8972, "step": 6319 }, { "epoch": 0.68, "grad_norm": 1.6403885250513497, "learning_rate": 2.4618998198396544e-06, "loss": 0.8535, "step": 6320 }, { "epoch": 0.68, "grad_norm": 1.7939277976489716, "learning_rate": 2.46039989799576e-06, "loss": 0.8749, "step": 6321 }, { "epoch": 0.68, "grad_norm": 1.7437980695061905, "learning_rate": 2.4589002840871356e-06, "loss": 0.9086, "step": 6322 }, { "epoch": 0.68, "grad_norm": 1.7420456231035646, "learning_rate": 2.4574009782956098e-06, "loss": 0.8592, "step": 6323 }, { "epoch": 0.68, "grad_norm": 1.681034388239921, "learning_rate": 2.455901980802983e-06, "loss": 0.9303, "step": 6324 }, { "epoch": 0.68, "grad_norm": 1.7403804341569369, "learning_rate": 2.4544032917910105e-06, "loss": 0.8293, "step": 6325 }, { "epoch": 0.68, "grad_norm": 1.7589515871965098, "learning_rate": 2.4529049114414143e-06, "loss": 0.9118, "step": 6326 }, { "epoch": 0.68, "grad_norm": 1.6439032633449087, "learning_rate": 2.451406839935881e-06, "loss": 0.8802, "step": 6327 }, { "epoch": 0.68, "grad_norm": 1.70280600615638, "learning_rate": 2.4499090774560525e-06, "loss": 0.8553, "step": 6328 }, { "epoch": 0.68, "grad_norm": 0.7901878989428093, "learning_rate": 2.4484116241835406e-06, "loss": 1.0551, "step": 6329 }, { "epoch": 0.68, "grad_norm": 1.7773834627293514, "learning_rate": 2.446914480299917e-06, "loss": 0.8611, "step": 6330 }, { "epoch": 0.68, "grad_norm": 1.7882227483365227, "learning_rate": 2.4454176459867128e-06, "loss": 0.8776, "step": 6331 }, { "epoch": 0.68, "grad_norm": 1.784805683420405, "learning_rate": 2.4439211214254277e-06, "loss": 0.8604, "step": 6332 }, { "epoch": 0.68, "grad_norm": 0.8284797712547111, "learning_rate": 2.4424249067975193e-06, "loss": 1.05, "step": 6333 }, { "epoch": 0.68, "grad_norm": 1.7737240075618959, "learning_rate": 2.4409290022844063e-06, "loss": 0.9075, "step": 6334 }, { "epoch": 0.68, "grad_norm": 1.778426538358641, "learning_rate": 2.4394334080674737e-06, "loss": 0.8287, "step": 6335 }, { "epoch": 0.68, "grad_norm": 1.7130205874184843, "learning_rate": 2.4379381243280705e-06, "loss": 0.8397, "step": 6336 }, { "epoch": 0.68, "grad_norm": 1.9545372288863674, "learning_rate": 2.4364431512475e-06, "loss": 0.9525, "step": 6337 }, { "epoch": 0.68, "grad_norm": 1.7313769113621853, "learning_rate": 2.4349484890070357e-06, "loss": 0.9567, "step": 6338 }, { "epoch": 0.68, "grad_norm": 1.6923486560772174, "learning_rate": 2.433454137787912e-06, "loss": 0.9243, "step": 6339 }, { "epoch": 0.68, "grad_norm": 1.6825799692952383, "learning_rate": 2.4319600977713202e-06, "loss": 0.8907, "step": 6340 }, { "epoch": 0.68, "grad_norm": 1.6735757110258604, "learning_rate": 2.4304663691384206e-06, "loss": 0.9446, "step": 6341 }, { "epoch": 0.68, "grad_norm": 1.7574880840258724, "learning_rate": 2.4289729520703335e-06, "loss": 0.8246, "step": 6342 }, { "epoch": 0.68, "grad_norm": 1.7286652635244675, "learning_rate": 2.4274798467481397e-06, "loss": 0.8394, "step": 6343 }, { "epoch": 0.68, "grad_norm": 1.804380902965081, "learning_rate": 2.4259870533528816e-06, "loss": 0.8157, "step": 6344 }, { "epoch": 0.68, "grad_norm": 1.619433649124254, "learning_rate": 2.4244945720655693e-06, "loss": 0.8605, "step": 6345 }, { "epoch": 0.68, "grad_norm": 1.6771545394488558, "learning_rate": 2.4230024030671674e-06, "loss": 0.9685, "step": 6346 }, { "epoch": 0.68, "grad_norm": 0.8188077663120488, "learning_rate": 2.421510546538608e-06, "loss": 1.0438, "step": 6347 }, { "epoch": 0.68, "grad_norm": 0.7957326103343462, "learning_rate": 2.4200190026607863e-06, "loss": 1.0169, "step": 6348 }, { "epoch": 0.68, "grad_norm": 1.6400952560309199, "learning_rate": 2.418527771614553e-06, "loss": 0.9201, "step": 6349 }, { "epoch": 0.68, "grad_norm": 1.7626544881990391, "learning_rate": 2.4170368535807276e-06, "loss": 0.9058, "step": 6350 }, { "epoch": 0.68, "grad_norm": 1.795793615419143, "learning_rate": 2.41554624874009e-06, "loss": 0.9046, "step": 6351 }, { "epoch": 0.68, "grad_norm": 1.7285805637971123, "learning_rate": 2.414055957273378e-06, "loss": 0.8933, "step": 6352 }, { "epoch": 0.68, "grad_norm": 1.7044415277680376, "learning_rate": 2.412565979361298e-06, "loss": 0.8826, "step": 6353 }, { "epoch": 0.68, "grad_norm": 0.7692479680389944, "learning_rate": 2.411076315184511e-06, "loss": 1.035, "step": 6354 }, { "epoch": 0.68, "grad_norm": 1.732504776997473, "learning_rate": 2.409586964923649e-06, "loss": 0.8953, "step": 6355 }, { "epoch": 0.68, "grad_norm": 1.7191306763940477, "learning_rate": 2.4080979287592956e-06, "loss": 0.8896, "step": 6356 }, { "epoch": 0.68, "grad_norm": 1.679213792696338, "learning_rate": 2.4066092068720044e-06, "loss": 0.9286, "step": 6357 }, { "epoch": 0.68, "grad_norm": 1.7370330393150843, "learning_rate": 2.4051207994422886e-06, "loss": 0.9929, "step": 6358 }, { "epoch": 0.68, "grad_norm": 1.6569850276375588, "learning_rate": 2.4036327066506204e-06, "loss": 0.8631, "step": 6359 }, { "epoch": 0.68, "grad_norm": 1.684328953303427, "learning_rate": 2.402144928677438e-06, "loss": 0.9678, "step": 6360 }, { "epoch": 0.68, "grad_norm": 1.7581061140868317, "learning_rate": 2.4006574657031406e-06, "loss": 0.961, "step": 6361 }, { "epoch": 0.68, "grad_norm": 1.6182529012931322, "learning_rate": 2.399170317908085e-06, "loss": 0.9204, "step": 6362 }, { "epoch": 0.68, "grad_norm": 1.6800318427721672, "learning_rate": 2.3976834854725945e-06, "loss": 0.8624, "step": 6363 }, { "epoch": 0.68, "grad_norm": 1.7326101338727924, "learning_rate": 2.396196968576957e-06, "loss": 0.8201, "step": 6364 }, { "epoch": 0.68, "grad_norm": 1.6964426323139046, "learning_rate": 2.3947107674014097e-06, "loss": 0.8008, "step": 6365 }, { "epoch": 0.68, "grad_norm": 0.7852349429707048, "learning_rate": 2.393224882126164e-06, "loss": 1.0117, "step": 6366 }, { "epoch": 0.68, "grad_norm": 1.7906967346692242, "learning_rate": 2.39173931293139e-06, "loss": 0.8804, "step": 6367 }, { "epoch": 0.68, "grad_norm": 1.5991529197963048, "learning_rate": 2.3902540599972145e-06, "loss": 0.8894, "step": 6368 }, { "epoch": 0.68, "grad_norm": 1.7571790425556097, "learning_rate": 2.388769123503731e-06, "loss": 0.9482, "step": 6369 }, { "epoch": 0.68, "grad_norm": 1.8279520923737917, "learning_rate": 2.387284503630996e-06, "loss": 0.8651, "step": 6370 }, { "epoch": 0.68, "grad_norm": 1.7597453783842736, "learning_rate": 2.38580020055902e-06, "loss": 0.8438, "step": 6371 }, { "epoch": 0.69, "grad_norm": 1.649568201050979, "learning_rate": 2.3843162144677828e-06, "loss": 0.8818, "step": 6372 }, { "epoch": 0.69, "grad_norm": 1.7651789362003865, "learning_rate": 2.3828325455372235e-06, "loss": 0.8626, "step": 6373 }, { "epoch": 0.69, "grad_norm": 1.6981179040575505, "learning_rate": 2.3813491939472395e-06, "loss": 0.9027, "step": 6374 }, { "epoch": 0.69, "grad_norm": 1.857453234191097, "learning_rate": 2.379866159877695e-06, "loss": 0.8993, "step": 6375 }, { "epoch": 0.69, "grad_norm": 1.8223534122546445, "learning_rate": 2.3783834435084118e-06, "loss": 0.9394, "step": 6376 }, { "epoch": 0.69, "grad_norm": 1.747991650920335, "learning_rate": 2.376901045019172e-06, "loss": 0.9611, "step": 6377 }, { "epoch": 0.69, "grad_norm": 1.7375928943168886, "learning_rate": 2.375418964589724e-06, "loss": 0.8438, "step": 6378 }, { "epoch": 0.69, "grad_norm": 1.6699328607708452, "learning_rate": 2.3739372023997766e-06, "loss": 0.9418, "step": 6379 }, { "epoch": 0.69, "grad_norm": 1.8601396381478732, "learning_rate": 2.3724557586289948e-06, "loss": 0.9251, "step": 6380 }, { "epoch": 0.69, "grad_norm": 1.6598587504071074, "learning_rate": 2.370974633457011e-06, "loss": 0.9022, "step": 6381 }, { "epoch": 0.69, "grad_norm": 1.640404748916299, "learning_rate": 2.3694938270634187e-06, "loss": 0.856, "step": 6382 }, { "epoch": 0.69, "grad_norm": 1.780357684810993, "learning_rate": 2.368013339627766e-06, "loss": 0.9752, "step": 6383 }, { "epoch": 0.69, "grad_norm": 1.6577966730502312, "learning_rate": 2.3665331713295702e-06, "loss": 0.8679, "step": 6384 }, { "epoch": 0.69, "grad_norm": 1.6394701337816577, "learning_rate": 2.3650533223483087e-06, "loss": 0.8654, "step": 6385 }, { "epoch": 0.69, "grad_norm": 1.6092014135355948, "learning_rate": 2.363573792863415e-06, "loss": 0.8398, "step": 6386 }, { "epoch": 0.69, "grad_norm": 0.7970457322296525, "learning_rate": 2.3620945830542865e-06, "loss": 1.063, "step": 6387 }, { "epoch": 0.69, "grad_norm": 1.7806065649340237, "learning_rate": 2.360615693100284e-06, "loss": 0.8465, "step": 6388 }, { "epoch": 0.69, "grad_norm": 1.83389565021063, "learning_rate": 2.35913712318073e-06, "loss": 0.9759, "step": 6389 }, { "epoch": 0.69, "grad_norm": 1.870641427461459, "learning_rate": 2.3576588734749022e-06, "loss": 0.9264, "step": 6390 }, { "epoch": 0.69, "grad_norm": 1.907716870294996, "learning_rate": 2.3561809441620458e-06, "loss": 0.9462, "step": 6391 }, { "epoch": 0.69, "grad_norm": 1.7560314380246362, "learning_rate": 2.3547033354213663e-06, "loss": 0.8711, "step": 6392 }, { "epoch": 0.69, "grad_norm": 1.718082464890501, "learning_rate": 2.3532260474320253e-06, "loss": 0.9093, "step": 6393 }, { "epoch": 0.69, "grad_norm": 1.8607681501190125, "learning_rate": 2.3517490803731506e-06, "loss": 0.9109, "step": 6394 }, { "epoch": 0.69, "grad_norm": 1.629149739466703, "learning_rate": 2.350272434423832e-06, "loss": 0.8486, "step": 6395 }, { "epoch": 0.69, "grad_norm": 1.7286045547375022, "learning_rate": 2.3487961097631157e-06, "loss": 0.889, "step": 6396 }, { "epoch": 0.69, "grad_norm": 1.6485881294266451, "learning_rate": 2.347320106570009e-06, "loss": 0.8467, "step": 6397 }, { "epoch": 0.69, "grad_norm": 1.6462412616095465, "learning_rate": 2.3458444250234867e-06, "loss": 0.891, "step": 6398 }, { "epoch": 0.69, "grad_norm": 1.7789994729182725, "learning_rate": 2.3443690653024766e-06, "loss": 0.9041, "step": 6399 }, { "epoch": 0.69, "grad_norm": 1.7640392222154468, "learning_rate": 2.342894027585872e-06, "loss": 0.8624, "step": 6400 }, { "epoch": 0.69, "grad_norm": 1.7648592280305473, "learning_rate": 2.341419312052529e-06, "loss": 0.8123, "step": 6401 }, { "epoch": 0.69, "grad_norm": 1.5786820035097453, "learning_rate": 2.3399449188812585e-06, "loss": 0.8792, "step": 6402 }, { "epoch": 0.69, "grad_norm": 1.6446761369502103, "learning_rate": 2.338470848250837e-06, "loss": 0.952, "step": 6403 }, { "epoch": 0.69, "grad_norm": 1.7453703479620972, "learning_rate": 2.3369971003400023e-06, "loss": 0.8966, "step": 6404 }, { "epoch": 0.69, "grad_norm": 1.7360531874124625, "learning_rate": 2.3355236753274485e-06, "loss": 0.8973, "step": 6405 }, { "epoch": 0.69, "grad_norm": 1.682536674756058, "learning_rate": 2.3340505733918366e-06, "loss": 0.8975, "step": 6406 }, { "epoch": 0.69, "grad_norm": 1.6906074913766744, "learning_rate": 2.3325777947117834e-06, "loss": 0.9044, "step": 6407 }, { "epoch": 0.69, "grad_norm": 1.763309128437027, "learning_rate": 2.3311053394658673e-06, "loss": 0.9257, "step": 6408 }, { "epoch": 0.69, "grad_norm": 1.7794618159630347, "learning_rate": 2.3296332078326295e-06, "loss": 0.8532, "step": 6409 }, { "epoch": 0.69, "grad_norm": 1.9017116519705788, "learning_rate": 2.3281613999905735e-06, "loss": 0.8143, "step": 6410 }, { "epoch": 0.69, "grad_norm": 1.6167622553043741, "learning_rate": 2.326689916118157e-06, "loss": 0.9034, "step": 6411 }, { "epoch": 0.69, "grad_norm": 1.6832448697424574, "learning_rate": 2.3252187563938043e-06, "loss": 0.854, "step": 6412 }, { "epoch": 0.69, "grad_norm": 1.8022472792218662, "learning_rate": 2.3237479209959007e-06, "loss": 0.9031, "step": 6413 }, { "epoch": 0.69, "grad_norm": 1.6857281545105625, "learning_rate": 2.3222774101027864e-06, "loss": 0.8264, "step": 6414 }, { "epoch": 0.69, "grad_norm": 1.8342658948264268, "learning_rate": 2.3208072238927676e-06, "loss": 0.9711, "step": 6415 }, { "epoch": 0.69, "grad_norm": 1.6052059113500816, "learning_rate": 2.3193373625441113e-06, "loss": 0.9606, "step": 6416 }, { "epoch": 0.69, "grad_norm": 1.9129378632159246, "learning_rate": 2.3178678262350413e-06, "loss": 0.9165, "step": 6417 }, { "epoch": 0.69, "grad_norm": 1.7979017153175558, "learning_rate": 2.3163986151437425e-06, "loss": 0.8838, "step": 6418 }, { "epoch": 0.69, "grad_norm": 0.8112027081461685, "learning_rate": 2.314929729448363e-06, "loss": 1.0528, "step": 6419 }, { "epoch": 0.69, "grad_norm": 1.8016567224249653, "learning_rate": 2.313461169327013e-06, "loss": 0.8438, "step": 6420 }, { "epoch": 0.69, "grad_norm": 1.6190363985458798, "learning_rate": 2.311992934957756e-06, "loss": 0.9004, "step": 6421 }, { "epoch": 0.69, "grad_norm": 1.7395807127674972, "learning_rate": 2.3105250265186223e-06, "loss": 0.8941, "step": 6422 }, { "epoch": 0.69, "grad_norm": 1.7801445618860856, "learning_rate": 2.3090574441876037e-06, "loss": 0.8477, "step": 6423 }, { "epoch": 0.69, "grad_norm": 1.684259725232249, "learning_rate": 2.3075901881426447e-06, "loss": 0.9577, "step": 6424 }, { "epoch": 0.69, "grad_norm": 1.6512595673873343, "learning_rate": 2.306123258561658e-06, "loss": 0.876, "step": 6425 }, { "epoch": 0.69, "grad_norm": 1.7376056379547626, "learning_rate": 2.3046566556225147e-06, "loss": 0.9278, "step": 6426 }, { "epoch": 0.69, "grad_norm": 1.640799043452101, "learning_rate": 2.303190379503043e-06, "loss": 0.8539, "step": 6427 }, { "epoch": 0.69, "grad_norm": 0.837145620016028, "learning_rate": 2.3017244303810367e-06, "loss": 1.0359, "step": 6428 }, { "epoch": 0.69, "grad_norm": 1.678453617185128, "learning_rate": 2.3002588084342453e-06, "loss": 0.9749, "step": 6429 }, { "epoch": 0.69, "grad_norm": 1.7322387791929172, "learning_rate": 2.2987935138403786e-06, "loss": 0.8729, "step": 6430 }, { "epoch": 0.69, "grad_norm": 1.787351980524748, "learning_rate": 2.2973285467771117e-06, "loss": 0.8499, "step": 6431 }, { "epoch": 0.69, "grad_norm": 1.7297912063073069, "learning_rate": 2.2958639074220773e-06, "loss": 0.9498, "step": 6432 }, { "epoch": 0.69, "grad_norm": 1.7682062023649043, "learning_rate": 2.2943995959528654e-06, "loss": 0.9066, "step": 6433 }, { "epoch": 0.69, "grad_norm": 1.6911226896456393, "learning_rate": 2.2929356125470297e-06, "loss": 0.8444, "step": 6434 }, { "epoch": 0.69, "grad_norm": 1.7473526372955408, "learning_rate": 2.2914719573820854e-06, "loss": 0.8904, "step": 6435 }, { "epoch": 0.69, "grad_norm": 1.709803939428721, "learning_rate": 2.2900086306355024e-06, "loss": 0.8879, "step": 6436 }, { "epoch": 0.69, "grad_norm": 1.714766430048214, "learning_rate": 2.2885456324847156e-06, "loss": 0.9938, "step": 6437 }, { "epoch": 0.69, "grad_norm": 1.7734537824627243, "learning_rate": 2.2870829631071207e-06, "loss": 0.7769, "step": 6438 }, { "epoch": 0.69, "grad_norm": 1.6265006960462027, "learning_rate": 2.2856206226800686e-06, "loss": 0.8827, "step": 6439 }, { "epoch": 0.69, "grad_norm": 1.7953922639171864, "learning_rate": 2.284158611380873e-06, "loss": 0.8401, "step": 6440 }, { "epoch": 0.69, "grad_norm": 1.6539623981731828, "learning_rate": 2.2826969293868103e-06, "loss": 0.8675, "step": 6441 }, { "epoch": 0.69, "grad_norm": 1.6732542436388826, "learning_rate": 2.2812355768751106e-06, "loss": 0.8402, "step": 6442 }, { "epoch": 0.69, "grad_norm": 1.7148741805280188, "learning_rate": 2.2797745540229703e-06, "loss": 0.969, "step": 6443 }, { "epoch": 0.69, "grad_norm": 1.8267873061019673, "learning_rate": 2.2783138610075454e-06, "loss": 0.9714, "step": 6444 }, { "epoch": 0.69, "grad_norm": 1.718420172799168, "learning_rate": 2.2768534980059464e-06, "loss": 0.8913, "step": 6445 }, { "epoch": 0.69, "grad_norm": 1.7543979168408128, "learning_rate": 2.2753934651952485e-06, "loss": 0.9188, "step": 6446 }, { "epoch": 0.69, "grad_norm": 1.6958023606060988, "learning_rate": 2.273933762752488e-06, "loss": 0.8422, "step": 6447 }, { "epoch": 0.69, "grad_norm": 1.6728451610751116, "learning_rate": 2.2724743908546553e-06, "loss": 0.9664, "step": 6448 }, { "epoch": 0.69, "grad_norm": 1.7736551340772795, "learning_rate": 2.271015349678707e-06, "loss": 0.8438, "step": 6449 }, { "epoch": 0.69, "grad_norm": 1.7341991670853147, "learning_rate": 2.2695566394015552e-06, "loss": 0.8442, "step": 6450 }, { "epoch": 0.69, "grad_norm": 1.8223362674719412, "learning_rate": 2.268098260200075e-06, "loss": 0.8811, "step": 6451 }, { "epoch": 0.69, "grad_norm": 1.6889992774898281, "learning_rate": 2.2666402122510976e-06, "loss": 0.8354, "step": 6452 }, { "epoch": 0.69, "grad_norm": 1.7202953775164767, "learning_rate": 2.2651824957314177e-06, "loss": 0.8754, "step": 6453 }, { "epoch": 0.69, "grad_norm": 1.8086863280767134, "learning_rate": 2.2637251108177903e-06, "loss": 0.9026, "step": 6454 }, { "epoch": 0.69, "grad_norm": 1.7791433870769235, "learning_rate": 2.262268057686925e-06, "loss": 0.8948, "step": 6455 }, { "epoch": 0.69, "grad_norm": 1.704754465481641, "learning_rate": 2.260811336515496e-06, "loss": 0.911, "step": 6456 }, { "epoch": 0.69, "grad_norm": 1.6309859294236584, "learning_rate": 2.259354947480138e-06, "loss": 0.9137, "step": 6457 }, { "epoch": 0.69, "grad_norm": 1.7905060768889112, "learning_rate": 2.257898890757439e-06, "loss": 0.9301, "step": 6458 }, { "epoch": 0.69, "grad_norm": 1.6854299724254367, "learning_rate": 2.2564431665239545e-06, "loss": 0.9584, "step": 6459 }, { "epoch": 0.69, "grad_norm": 1.8071680285834402, "learning_rate": 2.2549877749561942e-06, "loss": 0.8846, "step": 6460 }, { "epoch": 0.69, "grad_norm": 1.966384103576259, "learning_rate": 2.2535327162306286e-06, "loss": 0.9001, "step": 6461 }, { "epoch": 0.69, "grad_norm": 1.644441686769002, "learning_rate": 2.2520779905236895e-06, "loss": 0.8767, "step": 6462 }, { "epoch": 0.69, "grad_norm": 1.7641531725257853, "learning_rate": 2.2506235980117695e-06, "loss": 0.8134, "step": 6463 }, { "epoch": 0.69, "grad_norm": 1.6698888397631382, "learning_rate": 2.2491695388712147e-06, "loss": 0.8875, "step": 6464 }, { "epoch": 0.7, "grad_norm": 1.7140474633689036, "learning_rate": 2.2477158132783373e-06, "loss": 0.8336, "step": 6465 }, { "epoch": 0.7, "grad_norm": 1.6292820705781816, "learning_rate": 2.2462624214094074e-06, "loss": 0.8117, "step": 6466 }, { "epoch": 0.7, "grad_norm": 1.703032164635888, "learning_rate": 2.244809363440651e-06, "loss": 0.8762, "step": 6467 }, { "epoch": 0.7, "grad_norm": 0.822290143682642, "learning_rate": 2.2433566395482577e-06, "loss": 1.0459, "step": 6468 }, { "epoch": 0.7, "grad_norm": 1.8433594189442188, "learning_rate": 2.241904249908377e-06, "loss": 0.9836, "step": 6469 }, { "epoch": 0.7, "grad_norm": 1.8341475622017183, "learning_rate": 2.2404521946971153e-06, "loss": 0.9301, "step": 6470 }, { "epoch": 0.7, "grad_norm": 1.7042718432280926, "learning_rate": 2.2390004740905363e-06, "loss": 0.8782, "step": 6471 }, { "epoch": 0.7, "grad_norm": 1.7864534021631688, "learning_rate": 2.2375490882646698e-06, "loss": 0.9144, "step": 6472 }, { "epoch": 0.7, "grad_norm": 1.6414715419715076, "learning_rate": 2.2360980373954988e-06, "loss": 0.8805, "step": 6473 }, { "epoch": 0.7, "grad_norm": 1.6337315145944105, "learning_rate": 2.2346473216589694e-06, "loss": 0.8905, "step": 6474 }, { "epoch": 0.7, "grad_norm": 1.6777478932148686, "learning_rate": 2.233196941230988e-06, "loss": 0.8884, "step": 6475 }, { "epoch": 0.7, "grad_norm": 1.7425050400721716, "learning_rate": 2.2317468962874136e-06, "loss": 0.9187, "step": 6476 }, { "epoch": 0.7, "grad_norm": 1.7734889528466828, "learning_rate": 2.230297187004072e-06, "loss": 0.9496, "step": 6477 }, { "epoch": 0.7, "grad_norm": 1.9224113032142296, "learning_rate": 2.2288478135567466e-06, "loss": 0.9179, "step": 6478 }, { "epoch": 0.7, "grad_norm": 0.8311997805937364, "learning_rate": 2.2273987761211755e-06, "loss": 1.0378, "step": 6479 }, { "epoch": 0.7, "grad_norm": 1.850258514323672, "learning_rate": 2.225950074873064e-06, "loss": 0.9207, "step": 6480 }, { "epoch": 0.7, "grad_norm": 1.7809591364767459, "learning_rate": 2.2245017099880667e-06, "loss": 0.9104, "step": 6481 }, { "epoch": 0.7, "grad_norm": 1.7240391872122136, "learning_rate": 2.223053681641808e-06, "loss": 0.9074, "step": 6482 }, { "epoch": 0.7, "grad_norm": 1.6664542788668992, "learning_rate": 2.2216059900098624e-06, "loss": 0.881, "step": 6483 }, { "epoch": 0.7, "grad_norm": 1.8632306943209074, "learning_rate": 2.220158635267769e-06, "loss": 0.8161, "step": 6484 }, { "epoch": 0.7, "grad_norm": 0.7832573405557359, "learning_rate": 2.2187116175910263e-06, "loss": 1.0424, "step": 6485 }, { "epoch": 0.7, "grad_norm": 1.6652644140554596, "learning_rate": 2.2172649371550863e-06, "loss": 0.8183, "step": 6486 }, { "epoch": 0.7, "grad_norm": 1.7201456017107313, "learning_rate": 2.2158185941353672e-06, "loss": 0.8584, "step": 6487 }, { "epoch": 0.7, "grad_norm": 1.8318461269806292, "learning_rate": 2.2143725887072433e-06, "loss": 0.8774, "step": 6488 }, { "epoch": 0.7, "grad_norm": 1.7415917176426496, "learning_rate": 2.212926921046045e-06, "loss": 0.9731, "step": 6489 }, { "epoch": 0.7, "grad_norm": 0.8229419483855842, "learning_rate": 2.2114815913270654e-06, "loss": 1.0442, "step": 6490 }, { "epoch": 0.7, "grad_norm": 1.7100792690292168, "learning_rate": 2.2100365997255603e-06, "loss": 0.8463, "step": 6491 }, { "epoch": 0.7, "grad_norm": 1.7146701057287772, "learning_rate": 2.2085919464167327e-06, "loss": 0.8774, "step": 6492 }, { "epoch": 0.7, "grad_norm": 1.7712598130870785, "learning_rate": 2.2071476315757547e-06, "loss": 0.9057, "step": 6493 }, { "epoch": 0.7, "grad_norm": 1.7653120402700195, "learning_rate": 2.2057036553777565e-06, "loss": 0.8445, "step": 6494 }, { "epoch": 0.7, "grad_norm": 1.8224215311535858, "learning_rate": 2.2042600179978217e-06, "loss": 0.8458, "step": 6495 }, { "epoch": 0.7, "grad_norm": 1.679261289247971, "learning_rate": 2.2028167196109984e-06, "loss": 0.8481, "step": 6496 }, { "epoch": 0.7, "grad_norm": 1.7110145221830693, "learning_rate": 2.201373760392293e-06, "loss": 0.8785, "step": 6497 }, { "epoch": 0.7, "grad_norm": 1.8873982272738539, "learning_rate": 2.1999311405166653e-06, "loss": 1.0019, "step": 6498 }, { "epoch": 0.7, "grad_norm": 1.6345783333742534, "learning_rate": 2.1984888601590402e-06, "loss": 0.8492, "step": 6499 }, { "epoch": 0.7, "grad_norm": 1.6164480458479393, "learning_rate": 2.1970469194943013e-06, "loss": 0.8275, "step": 6500 }, { "epoch": 0.7, "grad_norm": 1.6168536132642155, "learning_rate": 2.195605318697284e-06, "loss": 0.8976, "step": 6501 }, { "epoch": 0.7, "grad_norm": 1.7392008653487319, "learning_rate": 2.194164057942793e-06, "loss": 0.8533, "step": 6502 }, { "epoch": 0.7, "grad_norm": 1.8425339924552915, "learning_rate": 2.1927231374055824e-06, "loss": 0.8477, "step": 6503 }, { "epoch": 0.7, "grad_norm": 1.7486191085993927, "learning_rate": 2.191282557260368e-06, "loss": 0.892, "step": 6504 }, { "epoch": 0.7, "grad_norm": 0.8196822167068794, "learning_rate": 2.1898423176818266e-06, "loss": 1.0546, "step": 6505 }, { "epoch": 0.7, "grad_norm": 1.804972751044285, "learning_rate": 2.188402418844594e-06, "loss": 0.9826, "step": 6506 }, { "epoch": 0.7, "grad_norm": 1.7490978399672044, "learning_rate": 2.1869628609232595e-06, "loss": 0.9523, "step": 6507 }, { "epoch": 0.7, "grad_norm": 1.6456700300755442, "learning_rate": 2.185523644092376e-06, "loss": 0.8537, "step": 6508 }, { "epoch": 0.7, "grad_norm": 1.747714028169617, "learning_rate": 2.1840847685264553e-06, "loss": 0.8765, "step": 6509 }, { "epoch": 0.7, "grad_norm": 1.8602985304062363, "learning_rate": 2.182646234399963e-06, "loss": 0.884, "step": 6510 }, { "epoch": 0.7, "grad_norm": 1.6904509006898039, "learning_rate": 2.181208041887327e-06, "loss": 0.8733, "step": 6511 }, { "epoch": 0.7, "grad_norm": 1.7180806610484716, "learning_rate": 2.179770191162936e-06, "loss": 0.8612, "step": 6512 }, { "epoch": 0.7, "grad_norm": 1.7795506390134943, "learning_rate": 2.1783326824011325e-06, "loss": 0.9185, "step": 6513 }, { "epoch": 0.7, "grad_norm": 1.7702820459645796, "learning_rate": 2.1768955157762165e-06, "loss": 0.8839, "step": 6514 }, { "epoch": 0.7, "grad_norm": 1.84986440693234, "learning_rate": 2.1754586914624524e-06, "loss": 0.9714, "step": 6515 }, { "epoch": 0.7, "grad_norm": 1.6903287549363535, "learning_rate": 2.174022209634061e-06, "loss": 0.9282, "step": 6516 }, { "epoch": 0.7, "grad_norm": 0.7930715009173661, "learning_rate": 2.172586070465218e-06, "loss": 1.0627, "step": 6517 }, { "epoch": 0.7, "grad_norm": 1.8065462985657579, "learning_rate": 2.171150274130061e-06, "loss": 0.9436, "step": 6518 }, { "epoch": 0.7, "grad_norm": 1.7428430462294333, "learning_rate": 2.169714820802688e-06, "loss": 0.9408, "step": 6519 }, { "epoch": 0.7, "grad_norm": 1.7579717672960957, "learning_rate": 2.168279710657149e-06, "loss": 0.8797, "step": 6520 }, { "epoch": 0.7, "grad_norm": 1.704672862183652, "learning_rate": 2.1668449438674573e-06, "loss": 0.8725, "step": 6521 }, { "epoch": 0.7, "grad_norm": 1.608623656331159, "learning_rate": 2.165410520607585e-06, "loss": 0.8339, "step": 6522 }, { "epoch": 0.7, "grad_norm": 1.7820718510333096, "learning_rate": 2.1639764410514593e-06, "loss": 0.9533, "step": 6523 }, { "epoch": 0.7, "grad_norm": 1.8121246062051826, "learning_rate": 2.162542705372966e-06, "loss": 0.9224, "step": 6524 }, { "epoch": 0.7, "grad_norm": 1.7451680080069862, "learning_rate": 2.1611093137459533e-06, "loss": 0.888, "step": 6525 }, { "epoch": 0.7, "grad_norm": 1.7877371388730705, "learning_rate": 2.159676266344222e-06, "loss": 0.9034, "step": 6526 }, { "epoch": 0.7, "grad_norm": 1.7508309475572, "learning_rate": 2.158243563341535e-06, "loss": 0.8797, "step": 6527 }, { "epoch": 0.7, "grad_norm": 1.6283336682703422, "learning_rate": 2.1568112049116152e-06, "loss": 0.868, "step": 6528 }, { "epoch": 0.7, "grad_norm": 1.814573546727918, "learning_rate": 2.155379191228137e-06, "loss": 0.8615, "step": 6529 }, { "epoch": 0.7, "grad_norm": 0.7989014016275456, "learning_rate": 2.1539475224647382e-06, "loss": 1.0468, "step": 6530 }, { "epoch": 0.7, "grad_norm": 1.6934492282860902, "learning_rate": 2.1525161987950165e-06, "loss": 0.8293, "step": 6531 }, { "epoch": 0.7, "grad_norm": 1.7291628375835078, "learning_rate": 2.1510852203925208e-06, "loss": 0.9606, "step": 6532 }, { "epoch": 0.7, "grad_norm": 1.7402937626098416, "learning_rate": 2.149654587430765e-06, "loss": 0.8741, "step": 6533 }, { "epoch": 0.7, "grad_norm": 1.7789184130482414, "learning_rate": 2.148224300083216e-06, "loss": 0.8279, "step": 6534 }, { "epoch": 0.7, "grad_norm": 1.7488139381423606, "learning_rate": 2.1467943585233037e-06, "loss": 0.8515, "step": 6535 }, { "epoch": 0.7, "grad_norm": 1.6871851775977171, "learning_rate": 2.14536476292441e-06, "loss": 0.8904, "step": 6536 }, { "epoch": 0.7, "grad_norm": 0.8087439925144518, "learning_rate": 2.143935513459882e-06, "loss": 1.0543, "step": 6537 }, { "epoch": 0.7, "grad_norm": 1.869935151028315, "learning_rate": 2.1425066103030174e-06, "loss": 0.8462, "step": 6538 }, { "epoch": 0.7, "grad_norm": 0.8089762425976452, "learning_rate": 2.141078053627078e-06, "loss": 1.0616, "step": 6539 }, { "epoch": 0.7, "grad_norm": 1.732904262014079, "learning_rate": 2.1396498436052824e-06, "loss": 0.9287, "step": 6540 }, { "epoch": 0.7, "grad_norm": 1.862995675573417, "learning_rate": 2.1382219804108023e-06, "loss": 0.9363, "step": 6541 }, { "epoch": 0.7, "grad_norm": 1.6700893245665551, "learning_rate": 2.136794464216774e-06, "loss": 0.7933, "step": 6542 }, { "epoch": 0.7, "grad_norm": 1.8855387731039055, "learning_rate": 2.135367295196289e-06, "loss": 0.9527, "step": 6543 }, { "epoch": 0.7, "grad_norm": 1.7674023667883219, "learning_rate": 2.133940473522395e-06, "loss": 0.9696, "step": 6544 }, { "epoch": 0.7, "grad_norm": 1.659872679461199, "learning_rate": 2.132513999368098e-06, "loss": 0.914, "step": 6545 }, { "epoch": 0.7, "grad_norm": 0.7660987234609811, "learning_rate": 2.1310878729063645e-06, "loss": 1.0418, "step": 6546 }, { "epoch": 0.7, "grad_norm": 1.8053046035028604, "learning_rate": 2.1296620943101187e-06, "loss": 0.8083, "step": 6547 }, { "epoch": 0.7, "grad_norm": 1.6437885934811287, "learning_rate": 2.128236663752238e-06, "loss": 0.9065, "step": 6548 }, { "epoch": 0.7, "grad_norm": 1.774525129844558, "learning_rate": 2.1268115814055616e-06, "loss": 0.8546, "step": 6549 }, { "epoch": 0.7, "grad_norm": 1.7359510073685382, "learning_rate": 2.1253868474428892e-06, "loss": 0.9053, "step": 6550 }, { "epoch": 0.7, "grad_norm": 1.8131639876813324, "learning_rate": 2.123962462036969e-06, "loss": 0.8777, "step": 6551 }, { "epoch": 0.7, "grad_norm": 1.8687424577741796, "learning_rate": 2.122538425360516e-06, "loss": 0.9315, "step": 6552 }, { "epoch": 0.7, "grad_norm": 2.4971173166682594, "learning_rate": 2.1211147375862005e-06, "loss": 0.8363, "step": 6553 }, { "epoch": 0.7, "grad_norm": 1.906942881193207, "learning_rate": 2.1196913988866467e-06, "loss": 0.8663, "step": 6554 }, { "epoch": 0.7, "grad_norm": 1.6336544506942645, "learning_rate": 2.1182684094344425e-06, "loss": 0.9543, "step": 6555 }, { "epoch": 0.7, "grad_norm": 1.8278470624935752, "learning_rate": 2.116845769402128e-06, "loss": 0.9389, "step": 6556 }, { "epoch": 0.7, "grad_norm": 1.7816537710535942, "learning_rate": 2.1154234789622023e-06, "loss": 0.9152, "step": 6557 }, { "epoch": 0.71, "grad_norm": 1.7250078997103446, "learning_rate": 2.114001538287125e-06, "loss": 0.8605, "step": 6558 }, { "epoch": 0.71, "grad_norm": 1.735779656413698, "learning_rate": 2.1125799475493116e-06, "loss": 0.8959, "step": 6559 }, { "epoch": 0.71, "grad_norm": 1.7633399344610248, "learning_rate": 2.1111587069211324e-06, "loss": 0.9171, "step": 6560 }, { "epoch": 0.71, "grad_norm": 1.7970626629178774, "learning_rate": 2.1097378165749192e-06, "loss": 0.877, "step": 6561 }, { "epoch": 0.71, "grad_norm": 0.8155411533445395, "learning_rate": 2.1083172766829624e-06, "loss": 1.0459, "step": 6562 }, { "epoch": 0.71, "grad_norm": 1.7974433490189392, "learning_rate": 2.1068970874175028e-06, "loss": 0.9685, "step": 6563 }, { "epoch": 0.71, "grad_norm": 1.560233100474211, "learning_rate": 2.1054772489507454e-06, "loss": 0.8488, "step": 6564 }, { "epoch": 0.71, "grad_norm": 2.4171123543495603, "learning_rate": 2.1040577614548523e-06, "loss": 0.9033, "step": 6565 }, { "epoch": 0.71, "grad_norm": 1.7332220712905924, "learning_rate": 2.1026386251019394e-06, "loss": 0.8648, "step": 6566 }, { "epoch": 0.71, "grad_norm": 0.801269275748386, "learning_rate": 2.1012198400640804e-06, "loss": 1.0624, "step": 6567 }, { "epoch": 0.71, "grad_norm": 1.604394175743742, "learning_rate": 2.099801406513311e-06, "loss": 0.866, "step": 6568 }, { "epoch": 0.71, "grad_norm": 1.7125115013387315, "learning_rate": 2.0983833246216177e-06, "loss": 0.9311, "step": 6569 }, { "epoch": 0.71, "grad_norm": 1.7415911016110615, "learning_rate": 2.0969655945609496e-06, "loss": 0.9323, "step": 6570 }, { "epoch": 0.71, "grad_norm": 1.6713327675086096, "learning_rate": 2.095548216503214e-06, "loss": 0.9172, "step": 6571 }, { "epoch": 0.71, "grad_norm": 1.8518350382764888, "learning_rate": 2.0941311906202672e-06, "loss": 0.9405, "step": 6572 }, { "epoch": 0.71, "grad_norm": 1.7280119736042032, "learning_rate": 2.0927145170839324e-06, "loss": 0.858, "step": 6573 }, { "epoch": 0.71, "grad_norm": 1.7566571361783188, "learning_rate": 2.0912981960659873e-06, "loss": 0.8805, "step": 6574 }, { "epoch": 0.71, "grad_norm": 1.6999652566728793, "learning_rate": 2.0898822277381614e-06, "loss": 0.8304, "step": 6575 }, { "epoch": 0.71, "grad_norm": 1.8014802545931343, "learning_rate": 2.0884666122721504e-06, "loss": 0.9092, "step": 6576 }, { "epoch": 0.71, "grad_norm": 1.905328512619001, "learning_rate": 2.0870513498395976e-06, "loss": 0.824, "step": 6577 }, { "epoch": 0.71, "grad_norm": 1.7224313557461404, "learning_rate": 2.085636440612114e-06, "loss": 0.8988, "step": 6578 }, { "epoch": 0.71, "grad_norm": 1.7329489280156887, "learning_rate": 2.0842218847612574e-06, "loss": 0.954, "step": 6579 }, { "epoch": 0.71, "grad_norm": 1.695717943358972, "learning_rate": 2.0828076824585484e-06, "loss": 0.8973, "step": 6580 }, { "epoch": 0.71, "grad_norm": 1.7787279887618899, "learning_rate": 2.081393833875468e-06, "loss": 0.9379, "step": 6581 }, { "epoch": 0.71, "grad_norm": 1.861501847671149, "learning_rate": 2.0799803391834444e-06, "loss": 0.906, "step": 6582 }, { "epoch": 0.71, "grad_norm": 1.726323229737557, "learning_rate": 2.0785671985538724e-06, "loss": 0.9899, "step": 6583 }, { "epoch": 0.71, "grad_norm": 1.6718726863618756, "learning_rate": 2.0771544121581004e-06, "loss": 0.8178, "step": 6584 }, { "epoch": 0.71, "grad_norm": 1.7480456928302555, "learning_rate": 2.075741980167431e-06, "loss": 0.9852, "step": 6585 }, { "epoch": 0.71, "grad_norm": 1.7602492031713983, "learning_rate": 2.07432990275313e-06, "loss": 0.8519, "step": 6586 }, { "epoch": 0.71, "grad_norm": 1.6764569899839548, "learning_rate": 2.0729181800864147e-06, "loss": 0.8535, "step": 6587 }, { "epoch": 0.71, "grad_norm": 1.7853781465766567, "learning_rate": 2.0715068123384593e-06, "loss": 0.9452, "step": 6588 }, { "epoch": 0.71, "grad_norm": 1.804465003638183, "learning_rate": 2.0700957996803983e-06, "loss": 0.871, "step": 6589 }, { "epoch": 0.71, "grad_norm": 1.8413857129108644, "learning_rate": 2.0686851422833248e-06, "loss": 0.9544, "step": 6590 }, { "epoch": 0.71, "grad_norm": 1.6845093658371615, "learning_rate": 2.067274840318282e-06, "loss": 0.9021, "step": 6591 }, { "epoch": 0.71, "grad_norm": 1.9848772686674427, "learning_rate": 2.0658648939562752e-06, "loss": 0.9191, "step": 6592 }, { "epoch": 0.71, "grad_norm": 1.8216597560982837, "learning_rate": 2.0644553033682675e-06, "loss": 0.8991, "step": 6593 }, { "epoch": 0.71, "grad_norm": 1.7060643321128839, "learning_rate": 2.0630460687251722e-06, "loss": 0.8565, "step": 6594 }, { "epoch": 0.71, "grad_norm": 1.832488231026425, "learning_rate": 2.0616371901978672e-06, "loss": 0.9113, "step": 6595 }, { "epoch": 0.71, "grad_norm": 1.7738437949181498, "learning_rate": 2.060228667957184e-06, "loss": 0.924, "step": 6596 }, { "epoch": 0.71, "grad_norm": 1.6528044742322285, "learning_rate": 2.05882050217391e-06, "loss": 0.9417, "step": 6597 }, { "epoch": 0.71, "grad_norm": 0.8012859691026287, "learning_rate": 2.0574126930187882e-06, "loss": 1.0514, "step": 6598 }, { "epoch": 0.71, "grad_norm": 1.6518159959119059, "learning_rate": 2.0560052406625236e-06, "loss": 0.8148, "step": 6599 }, { "epoch": 0.71, "grad_norm": 1.7070087920227248, "learning_rate": 2.054598145275772e-06, "loss": 0.8351, "step": 6600 }, { "epoch": 0.71, "grad_norm": 1.7212968647519868, "learning_rate": 2.0531914070291485e-06, "loss": 0.8988, "step": 6601 }, { "epoch": 0.71, "grad_norm": 1.7681325986910603, "learning_rate": 2.0517850260932288e-06, "loss": 0.847, "step": 6602 }, { "epoch": 0.71, "grad_norm": 1.7900270133362173, "learning_rate": 2.050379002638537e-06, "loss": 0.8952, "step": 6603 }, { "epoch": 0.71, "grad_norm": 1.9298472715869732, "learning_rate": 2.048973336835559e-06, "loss": 0.886, "step": 6604 }, { "epoch": 0.71, "grad_norm": 1.7825152998053173, "learning_rate": 2.04756802885474e-06, "loss": 0.9348, "step": 6605 }, { "epoch": 0.71, "grad_norm": 1.7034938940169955, "learning_rate": 2.0461630788664746e-06, "loss": 0.9092, "step": 6606 }, { "epoch": 0.71, "grad_norm": 0.8139634288653974, "learning_rate": 2.044758487041119e-06, "loss": 1.0336, "step": 6607 }, { "epoch": 0.71, "grad_norm": 0.7927525541022816, "learning_rate": 2.0433542535489868e-06, "loss": 1.0624, "step": 6608 }, { "epoch": 0.71, "grad_norm": 1.741321114110323, "learning_rate": 2.0419503785603444e-06, "loss": 0.8042, "step": 6609 }, { "epoch": 0.71, "grad_norm": 0.7929716813261991, "learning_rate": 2.0405468622454157e-06, "loss": 1.0722, "step": 6610 }, { "epoch": 0.71, "grad_norm": 0.8319185123593109, "learning_rate": 2.039143704774382e-06, "loss": 1.0648, "step": 6611 }, { "epoch": 0.71, "grad_norm": 0.8325420973332729, "learning_rate": 2.037740906317383e-06, "loss": 1.0732, "step": 6612 }, { "epoch": 0.71, "grad_norm": 1.698885943887545, "learning_rate": 2.03633846704451e-06, "loss": 0.9319, "step": 6613 }, { "epoch": 0.71, "grad_norm": 1.793832422527581, "learning_rate": 2.034936387125816e-06, "loss": 0.8815, "step": 6614 }, { "epoch": 0.71, "grad_norm": 1.682269625795327, "learning_rate": 2.033534666731308e-06, "loss": 0.9237, "step": 6615 }, { "epoch": 0.71, "grad_norm": 1.7594399177308309, "learning_rate": 2.032133306030948e-06, "loss": 0.8752, "step": 6616 }, { "epoch": 0.71, "grad_norm": 1.7832779786521455, "learning_rate": 2.0307323051946554e-06, "loss": 0.8903, "step": 6617 }, { "epoch": 0.71, "grad_norm": 1.6436473382487349, "learning_rate": 2.0293316643923112e-06, "loss": 0.8276, "step": 6618 }, { "epoch": 0.71, "grad_norm": 1.9230790960183213, "learning_rate": 2.0279313837937407e-06, "loss": 0.8859, "step": 6619 }, { "epoch": 0.71, "grad_norm": 1.7079254024673398, "learning_rate": 2.026531463568736e-06, "loss": 0.9078, "step": 6620 }, { "epoch": 0.71, "grad_norm": 1.7036267331578427, "learning_rate": 2.0251319038870448e-06, "loss": 0.8147, "step": 6621 }, { "epoch": 0.71, "grad_norm": 1.8232668128603995, "learning_rate": 2.023732704918364e-06, "loss": 0.8379, "step": 6622 }, { "epoch": 0.71, "grad_norm": 0.8251597426370927, "learning_rate": 2.0223338668323534e-06, "loss": 1.0461, "step": 6623 }, { "epoch": 0.71, "grad_norm": 1.6319486597494803, "learning_rate": 2.0209353897986288e-06, "loss": 0.9223, "step": 6624 }, { "epoch": 0.71, "grad_norm": 1.8710712585047917, "learning_rate": 2.019537273986757e-06, "loss": 0.8417, "step": 6625 }, { "epoch": 0.71, "grad_norm": 1.7435377767995675, "learning_rate": 2.0181395195662658e-06, "loss": 0.928, "step": 6626 }, { "epoch": 0.71, "grad_norm": 1.6680285616912485, "learning_rate": 2.0167421267066396e-06, "loss": 0.8637, "step": 6627 }, { "epoch": 0.71, "grad_norm": 1.7973981059527666, "learning_rate": 2.015345095577314e-06, "loss": 0.9076, "step": 6628 }, { "epoch": 0.71, "grad_norm": 1.695107518504038, "learning_rate": 2.0139484263476867e-06, "loss": 0.952, "step": 6629 }, { "epoch": 0.71, "grad_norm": 1.691062627197711, "learning_rate": 2.0125521191871054e-06, "loss": 0.8496, "step": 6630 }, { "epoch": 0.71, "grad_norm": 1.7017830952546598, "learning_rate": 2.0111561742648807e-06, "loss": 0.8314, "step": 6631 }, { "epoch": 0.71, "grad_norm": 1.6914953698486894, "learning_rate": 2.0097605917502722e-06, "loss": 0.9175, "step": 6632 }, { "epoch": 0.71, "grad_norm": 1.7496125835090524, "learning_rate": 2.0083653718125028e-06, "loss": 0.8663, "step": 6633 }, { "epoch": 0.71, "grad_norm": 1.7200634243282291, "learning_rate": 2.0069705146207443e-06, "loss": 0.8756, "step": 6634 }, { "epoch": 0.71, "grad_norm": 1.6758419587361633, "learning_rate": 2.005576020344129e-06, "loss": 0.9793, "step": 6635 }, { "epoch": 0.71, "grad_norm": 1.7826588594816537, "learning_rate": 2.004181889151746e-06, "loss": 0.8803, "step": 6636 }, { "epoch": 0.71, "grad_norm": 0.7968083372733458, "learning_rate": 2.002788121212636e-06, "loss": 1.059, "step": 6637 }, { "epoch": 0.71, "grad_norm": 0.7634950205502977, "learning_rate": 2.0013947166957994e-06, "loss": 1.074, "step": 6638 }, { "epoch": 0.71, "grad_norm": 1.7469682647173377, "learning_rate": 2.0000016757701924e-06, "loss": 0.8641, "step": 6639 }, { "epoch": 0.71, "grad_norm": 1.7798595833165498, "learning_rate": 1.998608998604725e-06, "loss": 0.8915, "step": 6640 }, { "epoch": 0.71, "grad_norm": 1.6317725214390892, "learning_rate": 1.997216685368262e-06, "loss": 0.8831, "step": 6641 }, { "epoch": 0.71, "grad_norm": 0.773682462898771, "learning_rate": 1.995824736229628e-06, "loss": 1.0586, "step": 6642 }, { "epoch": 0.71, "grad_norm": 1.6821681318247257, "learning_rate": 1.9944331513576038e-06, "loss": 0.9522, "step": 6643 }, { "epoch": 0.71, "grad_norm": 1.71955930318639, "learning_rate": 1.9930419309209197e-06, "loss": 0.9543, "step": 6644 }, { "epoch": 0.71, "grad_norm": 1.691716800639837, "learning_rate": 1.9916510750882685e-06, "loss": 0.8728, "step": 6645 }, { "epoch": 0.71, "grad_norm": 1.7538000774750189, "learning_rate": 1.990260584028297e-06, "loss": 0.8473, "step": 6646 }, { "epoch": 0.71, "grad_norm": 1.743677045300998, "learning_rate": 1.988870457909604e-06, "loss": 1.0128, "step": 6647 }, { "epoch": 0.71, "grad_norm": 1.699347455894714, "learning_rate": 1.987480696900749e-06, "loss": 0.8996, "step": 6648 }, { "epoch": 0.71, "grad_norm": 1.724643892740598, "learning_rate": 1.9860913011702477e-06, "loss": 0.8716, "step": 6649 }, { "epoch": 0.71, "grad_norm": 1.6796773451037332, "learning_rate": 1.984702270886566e-06, "loss": 0.9562, "step": 6650 }, { "epoch": 0.72, "grad_norm": 1.7955667282605543, "learning_rate": 1.983313606218128e-06, "loss": 0.8918, "step": 6651 }, { "epoch": 0.72, "grad_norm": 1.713753932419462, "learning_rate": 1.981925307333317e-06, "loss": 0.8297, "step": 6652 }, { "epoch": 0.72, "grad_norm": 1.7615541066283877, "learning_rate": 1.980537374400465e-06, "loss": 0.904, "step": 6653 }, { "epoch": 0.72, "grad_norm": 1.8166905439886167, "learning_rate": 1.979149807587866e-06, "loss": 0.8986, "step": 6654 }, { "epoch": 0.72, "grad_norm": 1.6316944122374408, "learning_rate": 1.97776260706377e-06, "loss": 0.8339, "step": 6655 }, { "epoch": 0.72, "grad_norm": 1.802952069216094, "learning_rate": 1.9763757729963746e-06, "loss": 0.8883, "step": 6656 }, { "epoch": 0.72, "grad_norm": 1.7396031612093914, "learning_rate": 1.974989305553841e-06, "loss": 0.8402, "step": 6657 }, { "epoch": 0.72, "grad_norm": 1.7589201487340693, "learning_rate": 1.9736032049042846e-06, "loss": 0.8751, "step": 6658 }, { "epoch": 0.72, "grad_norm": 1.7807386429536298, "learning_rate": 1.9722174712157714e-06, "loss": 0.894, "step": 6659 }, { "epoch": 0.72, "grad_norm": 1.7354546466718792, "learning_rate": 1.97083210465633e-06, "loss": 0.9415, "step": 6660 }, { "epoch": 0.72, "grad_norm": 0.7986314757071624, "learning_rate": 1.9694471053939375e-06, "loss": 1.0571, "step": 6661 }, { "epoch": 0.72, "grad_norm": 1.7314159187374094, "learning_rate": 1.9680624735965327e-06, "loss": 0.898, "step": 6662 }, { "epoch": 0.72, "grad_norm": 1.721440805795135, "learning_rate": 1.966678209432004e-06, "loss": 0.9505, "step": 6663 }, { "epoch": 0.72, "grad_norm": 1.6597960366376177, "learning_rate": 1.965294313068202e-06, "loss": 0.8927, "step": 6664 }, { "epoch": 0.72, "grad_norm": 1.6118661143688366, "learning_rate": 1.9639107846729246e-06, "loss": 0.9029, "step": 6665 }, { "epoch": 0.72, "grad_norm": 1.7697234615120874, "learning_rate": 1.962527624413932e-06, "loss": 0.9011, "step": 6666 }, { "epoch": 0.72, "grad_norm": 2.081322024491878, "learning_rate": 1.961144832458938e-06, "loss": 0.8904, "step": 6667 }, { "epoch": 0.72, "grad_norm": 1.704638803918307, "learning_rate": 1.9597624089756078e-06, "loss": 0.7947, "step": 6668 }, { "epoch": 0.72, "grad_norm": 1.8230635386830232, "learning_rate": 1.958380354131567e-06, "loss": 0.8, "step": 6669 }, { "epoch": 0.72, "grad_norm": 1.616479410968544, "learning_rate": 1.9569986680943958e-06, "loss": 0.8914, "step": 6670 }, { "epoch": 0.72, "grad_norm": 1.658407105869247, "learning_rate": 1.9556173510316273e-06, "loss": 0.835, "step": 6671 }, { "epoch": 0.72, "grad_norm": 1.6491299562393036, "learning_rate": 1.9542364031107486e-06, "loss": 0.8901, "step": 6672 }, { "epoch": 0.72, "grad_norm": 1.7554670452497472, "learning_rate": 1.9528558244992063e-06, "loss": 0.8982, "step": 6673 }, { "epoch": 0.72, "grad_norm": 1.8739560360919851, "learning_rate": 1.9514756153644025e-06, "loss": 0.9125, "step": 6674 }, { "epoch": 0.72, "grad_norm": 1.7009730922870576, "learning_rate": 1.9500957758736884e-06, "loss": 0.9347, "step": 6675 }, { "epoch": 0.72, "grad_norm": 1.8331114646630717, "learning_rate": 1.948716306194376e-06, "loss": 0.769, "step": 6676 }, { "epoch": 0.72, "grad_norm": 1.7212746599080477, "learning_rate": 1.947337206493732e-06, "loss": 0.9008, "step": 6677 }, { "epoch": 0.72, "grad_norm": 1.7166641488457437, "learning_rate": 1.945958476938975e-06, "loss": 0.8605, "step": 6678 }, { "epoch": 0.72, "grad_norm": 1.6224586526106506, "learning_rate": 1.944580117697281e-06, "loss": 0.8318, "step": 6679 }, { "epoch": 0.72, "grad_norm": 1.7261616320284379, "learning_rate": 1.9432021289357833e-06, "loss": 0.8132, "step": 6680 }, { "epoch": 0.72, "grad_norm": 0.818350782995743, "learning_rate": 1.941824510821564e-06, "loss": 1.0489, "step": 6681 }, { "epoch": 0.72, "grad_norm": 1.638555482437467, "learning_rate": 1.9404472635216674e-06, "loss": 0.8358, "step": 6682 }, { "epoch": 0.72, "grad_norm": 1.7424471071570804, "learning_rate": 1.9390703872030885e-06, "loss": 0.9079, "step": 6683 }, { "epoch": 0.72, "grad_norm": 1.8907141059788921, "learning_rate": 1.937693882032776e-06, "loss": 0.8711, "step": 6684 }, { "epoch": 0.72, "grad_norm": 1.8659250193383614, "learning_rate": 1.936317748177637e-06, "loss": 0.8603, "step": 6685 }, { "epoch": 0.72, "grad_norm": 1.9011114946213044, "learning_rate": 1.934941985804536e-06, "loss": 0.8845, "step": 6686 }, { "epoch": 0.72, "grad_norm": 1.6517142975114083, "learning_rate": 1.933566595080284e-06, "loss": 0.9299, "step": 6687 }, { "epoch": 0.72, "grad_norm": 1.7111427009698037, "learning_rate": 1.9321915761716536e-06, "loss": 0.8937, "step": 6688 }, { "epoch": 0.72, "grad_norm": 1.6586672732435885, "learning_rate": 1.930816929245373e-06, "loss": 0.9177, "step": 6689 }, { "epoch": 0.72, "grad_norm": 1.7588969376173822, "learning_rate": 1.9294426544681183e-06, "loss": 1.0057, "step": 6690 }, { "epoch": 0.72, "grad_norm": 0.8007735289826785, "learning_rate": 1.9280687520065285e-06, "loss": 1.0275, "step": 6691 }, { "epoch": 0.72, "grad_norm": 1.9322877696298502, "learning_rate": 1.926695222027194e-06, "loss": 0.8988, "step": 6692 }, { "epoch": 0.72, "grad_norm": 0.8112134553442374, "learning_rate": 1.9253220646966596e-06, "loss": 1.0534, "step": 6693 }, { "epoch": 0.72, "grad_norm": 1.7972933628167935, "learning_rate": 1.923949280181423e-06, "loss": 0.9698, "step": 6694 }, { "epoch": 0.72, "grad_norm": 1.7053878352988527, "learning_rate": 1.922576868647943e-06, "loss": 0.8591, "step": 6695 }, { "epoch": 0.72, "grad_norm": 1.8280487038002564, "learning_rate": 1.921204830262625e-06, "loss": 0.8398, "step": 6696 }, { "epoch": 0.72, "grad_norm": 1.7834764977368278, "learning_rate": 1.919833165191836e-06, "loss": 0.8928, "step": 6697 }, { "epoch": 0.72, "grad_norm": 1.738724498861658, "learning_rate": 1.918461873601896e-06, "loss": 0.9567, "step": 6698 }, { "epoch": 0.72, "grad_norm": 1.757378633708845, "learning_rate": 1.917090955659076e-06, "loss": 0.8507, "step": 6699 }, { "epoch": 0.72, "grad_norm": 1.6925602036805698, "learning_rate": 1.9157204115296063e-06, "loss": 0.8778, "step": 6700 }, { "epoch": 0.72, "grad_norm": 0.7886945433566005, "learning_rate": 1.914350241379671e-06, "loss": 0.9811, "step": 6701 }, { "epoch": 0.72, "grad_norm": 1.6705505192461292, "learning_rate": 1.9129804453754053e-06, "loss": 0.8708, "step": 6702 }, { "epoch": 0.72, "grad_norm": 1.7300394029316584, "learning_rate": 1.911611023682905e-06, "loss": 0.8864, "step": 6703 }, { "epoch": 0.72, "grad_norm": 1.7045684385721467, "learning_rate": 1.9102419764682133e-06, "loss": 0.901, "step": 6704 }, { "epoch": 0.72, "grad_norm": 1.683789075295256, "learning_rate": 1.908873303897336e-06, "loss": 0.8427, "step": 6705 }, { "epoch": 0.72, "grad_norm": 1.7609103524366228, "learning_rate": 1.9075050061362255e-06, "loss": 0.8994, "step": 6706 }, { "epoch": 0.72, "grad_norm": 1.8884080763692728, "learning_rate": 1.9061370833507947e-06, "loss": 0.9191, "step": 6707 }, { "epoch": 0.72, "grad_norm": 1.6898067393719378, "learning_rate": 1.90476953570691e-06, "loss": 0.9479, "step": 6708 }, { "epoch": 0.72, "grad_norm": 0.8133154312922324, "learning_rate": 1.9034023633703885e-06, "loss": 1.0248, "step": 6709 }, { "epoch": 0.72, "grad_norm": 0.7610835156185274, "learning_rate": 1.9020355665070062e-06, "loss": 1.0294, "step": 6710 }, { "epoch": 0.72, "grad_norm": 1.7318741600489804, "learning_rate": 1.9006691452824932e-06, "loss": 0.8938, "step": 6711 }, { "epoch": 0.72, "grad_norm": 1.714713643247803, "learning_rate": 1.8993030998625295e-06, "loss": 0.9123, "step": 6712 }, { "epoch": 0.72, "grad_norm": 1.7064021100613893, "learning_rate": 1.8979374304127567e-06, "loss": 0.9344, "step": 6713 }, { "epoch": 0.72, "grad_norm": 2.011661736289325, "learning_rate": 1.896572137098765e-06, "loss": 0.9062, "step": 6714 }, { "epoch": 0.72, "grad_norm": 0.7904086199943855, "learning_rate": 1.8952072200860988e-06, "loss": 1.0447, "step": 6715 }, { "epoch": 0.72, "grad_norm": 1.7996827818822383, "learning_rate": 1.8938426795402614e-06, "loss": 0.8891, "step": 6716 }, { "epoch": 0.72, "grad_norm": 1.7096018071105226, "learning_rate": 1.8924785156267089e-06, "loss": 0.8354, "step": 6717 }, { "epoch": 0.72, "grad_norm": 1.767828307489623, "learning_rate": 1.8911147285108484e-06, "loss": 0.9179, "step": 6718 }, { "epoch": 0.72, "grad_norm": 1.7271965404623815, "learning_rate": 1.8897513183580445e-06, "loss": 0.8816, "step": 6719 }, { "epoch": 0.72, "grad_norm": 2.1629365776725202, "learning_rate": 1.8883882853336183e-06, "loss": 0.8545, "step": 6720 }, { "epoch": 0.72, "grad_norm": 1.681382392205773, "learning_rate": 1.8870256296028378e-06, "loss": 0.8798, "step": 6721 }, { "epoch": 0.72, "grad_norm": 1.729826254847735, "learning_rate": 1.8856633513309313e-06, "loss": 0.9264, "step": 6722 }, { "epoch": 0.72, "grad_norm": 1.7460456052860156, "learning_rate": 1.8843014506830826e-06, "loss": 0.9406, "step": 6723 }, { "epoch": 0.72, "grad_norm": 1.7895398124502808, "learning_rate": 1.8829399278244243e-06, "loss": 0.9232, "step": 6724 }, { "epoch": 0.72, "grad_norm": 1.7847565912451042, "learning_rate": 1.8815787829200438e-06, "loss": 0.9072, "step": 6725 }, { "epoch": 0.72, "grad_norm": 1.7191372852981854, "learning_rate": 1.880218016134987e-06, "loss": 0.9062, "step": 6726 }, { "epoch": 0.72, "grad_norm": 1.6997387214582256, "learning_rate": 1.878857627634253e-06, "loss": 0.923, "step": 6727 }, { "epoch": 0.72, "grad_norm": 1.690622040158969, "learning_rate": 1.8774976175827898e-06, "loss": 0.9001, "step": 6728 }, { "epoch": 0.72, "grad_norm": 1.7551763542787262, "learning_rate": 1.8761379861455076e-06, "loss": 0.8547, "step": 6729 }, { "epoch": 0.72, "grad_norm": 1.9137302223990171, "learning_rate": 1.874778733487262e-06, "loss": 0.8308, "step": 6730 }, { "epoch": 0.72, "grad_norm": 1.6328445469449737, "learning_rate": 1.8734198597728698e-06, "loss": 0.9271, "step": 6731 }, { "epoch": 0.72, "grad_norm": 1.7170599354966187, "learning_rate": 1.8720613651670999e-06, "loss": 0.8448, "step": 6732 }, { "epoch": 0.72, "grad_norm": 1.5388679068838917, "learning_rate": 1.8707032498346711e-06, "loss": 0.913, "step": 6733 }, { "epoch": 0.72, "grad_norm": 1.6875461713603581, "learning_rate": 1.8693455139402621e-06, "loss": 0.8645, "step": 6734 }, { "epoch": 0.72, "grad_norm": 1.8153761450798231, "learning_rate": 1.8679881576485042e-06, "loss": 0.91, "step": 6735 }, { "epoch": 0.72, "grad_norm": 1.7268004723532482, "learning_rate": 1.8666311811239796e-06, "loss": 0.9819, "step": 6736 }, { "epoch": 0.72, "grad_norm": 1.8440627641595173, "learning_rate": 1.8652745845312253e-06, "loss": 0.8178, "step": 6737 }, { "epoch": 0.72, "grad_norm": 1.819933123721399, "learning_rate": 1.8639183680347345e-06, "loss": 0.9127, "step": 6738 }, { "epoch": 0.72, "grad_norm": 1.7152058404761696, "learning_rate": 1.8625625317989548e-06, "loss": 0.9468, "step": 6739 }, { "epoch": 0.72, "grad_norm": 1.615328647068905, "learning_rate": 1.8612070759882827e-06, "loss": 0.811, "step": 6740 }, { "epoch": 0.72, "grad_norm": 1.7946315662042833, "learning_rate": 1.8598520007670744e-06, "loss": 0.9438, "step": 6741 }, { "epoch": 0.72, "grad_norm": 1.727598999381991, "learning_rate": 1.8584973062996382e-06, "loss": 0.8616, "step": 6742 }, { "epoch": 0.72, "grad_norm": 1.7560514474056543, "learning_rate": 1.8571429927502322e-06, "loss": 0.8657, "step": 6743 }, { "epoch": 0.73, "grad_norm": 1.7969532431497461, "learning_rate": 1.8557890602830731e-06, "loss": 0.8704, "step": 6744 }, { "epoch": 0.73, "grad_norm": 0.7846208435150089, "learning_rate": 1.854435509062334e-06, "loss": 1.0643, "step": 6745 }, { "epoch": 0.73, "grad_norm": 1.6655694653684243, "learning_rate": 1.8530823392521302e-06, "loss": 0.9401, "step": 6746 }, { "epoch": 0.73, "grad_norm": 1.8366621710916666, "learning_rate": 1.8517295510165416e-06, "loss": 0.9651, "step": 6747 }, { "epoch": 0.73, "grad_norm": 1.6920321586426823, "learning_rate": 1.8503771445196005e-06, "loss": 0.8207, "step": 6748 }, { "epoch": 0.73, "grad_norm": 1.6308593347114158, "learning_rate": 1.849025119925288e-06, "loss": 0.9872, "step": 6749 }, { "epoch": 0.73, "grad_norm": 1.611261217699422, "learning_rate": 1.847673477397542e-06, "loss": 0.9366, "step": 6750 }, { "epoch": 0.73, "grad_norm": 1.735059774018659, "learning_rate": 1.8463222171002565e-06, "loss": 0.8652, "step": 6751 }, { "epoch": 0.73, "grad_norm": 1.7260471724143036, "learning_rate": 1.8449713391972734e-06, "loss": 0.9177, "step": 6752 }, { "epoch": 0.73, "grad_norm": 1.77926381564285, "learning_rate": 1.8436208438523916e-06, "loss": 0.9955, "step": 6753 }, { "epoch": 0.73, "grad_norm": 1.8449248119200312, "learning_rate": 1.8422707312293663e-06, "loss": 0.8881, "step": 6754 }, { "epoch": 0.73, "grad_norm": 1.7865063621936539, "learning_rate": 1.8409210014918994e-06, "loss": 0.9436, "step": 6755 }, { "epoch": 0.73, "grad_norm": 1.8172921972911207, "learning_rate": 1.839571654803654e-06, "loss": 0.9105, "step": 6756 }, { "epoch": 0.73, "grad_norm": 1.625792545752456, "learning_rate": 1.8382226913282391e-06, "loss": 0.9343, "step": 6757 }, { "epoch": 0.73, "grad_norm": 1.7477379331484049, "learning_rate": 1.8368741112292255e-06, "loss": 0.9182, "step": 6758 }, { "epoch": 0.73, "grad_norm": 1.7917617849647247, "learning_rate": 1.8355259146701282e-06, "loss": 0.9491, "step": 6759 }, { "epoch": 0.73, "grad_norm": 1.7295379874476284, "learning_rate": 1.8341781018144256e-06, "loss": 0.8652, "step": 6760 }, { "epoch": 0.73, "grad_norm": 1.7271874629701593, "learning_rate": 1.8328306728255407e-06, "loss": 0.8769, "step": 6761 }, { "epoch": 0.73, "grad_norm": 1.741468574340731, "learning_rate": 1.8314836278668558e-06, "loss": 0.9108, "step": 6762 }, { "epoch": 0.73, "grad_norm": 1.7484048436737174, "learning_rate": 1.830136967101706e-06, "loss": 0.9058, "step": 6763 }, { "epoch": 0.73, "grad_norm": 1.7055018806028972, "learning_rate": 1.8287906906933755e-06, "loss": 0.9225, "step": 6764 }, { "epoch": 0.73, "grad_norm": 1.6526425958890114, "learning_rate": 1.8274447988051065e-06, "loss": 0.8998, "step": 6765 }, { "epoch": 0.73, "grad_norm": 1.7344644028285687, "learning_rate": 1.8260992916000947e-06, "loss": 0.863, "step": 6766 }, { "epoch": 0.73, "grad_norm": 1.6894064893115037, "learning_rate": 1.824754169241486e-06, "loss": 0.9949, "step": 6767 }, { "epoch": 0.73, "grad_norm": 1.7574064312687918, "learning_rate": 1.8234094318923795e-06, "loss": 0.8673, "step": 6768 }, { "epoch": 0.73, "grad_norm": 1.731909227435955, "learning_rate": 1.82206507971583e-06, "loss": 0.8098, "step": 6769 }, { "epoch": 0.73, "grad_norm": 0.799540911919679, "learning_rate": 1.8207211128748482e-06, "loss": 1.0364, "step": 6770 }, { "epoch": 0.73, "grad_norm": 1.8189181789756352, "learning_rate": 1.8193775315323903e-06, "loss": 0.891, "step": 6771 }, { "epoch": 0.73, "grad_norm": 1.734861886118441, "learning_rate": 1.8180343358513714e-06, "loss": 0.8983, "step": 6772 }, { "epoch": 0.73, "grad_norm": 1.7261942102713896, "learning_rate": 1.8166915259946617e-06, "loss": 0.947, "step": 6773 }, { "epoch": 0.73, "grad_norm": 1.7020924554472685, "learning_rate": 1.8153491021250763e-06, "loss": 0.8972, "step": 6774 }, { "epoch": 0.73, "grad_norm": 1.6674441194986105, "learning_rate": 1.814007064405392e-06, "loss": 0.8661, "step": 6775 }, { "epoch": 0.73, "grad_norm": 0.8388079950967003, "learning_rate": 1.8126654129983367e-06, "loss": 1.0347, "step": 6776 }, { "epoch": 0.73, "grad_norm": 0.7876221362515354, "learning_rate": 1.8113241480665883e-06, "loss": 1.0516, "step": 6777 }, { "epoch": 0.73, "grad_norm": 1.871283718528524, "learning_rate": 1.8099832697727787e-06, "loss": 0.8717, "step": 6778 }, { "epoch": 0.73, "grad_norm": 0.7801931243741674, "learning_rate": 1.8086427782794964e-06, "loss": 1.0158, "step": 6779 }, { "epoch": 0.73, "grad_norm": 1.762568277918021, "learning_rate": 1.8073026737492783e-06, "loss": 0.857, "step": 6780 }, { "epoch": 0.73, "grad_norm": 1.688306545191722, "learning_rate": 1.8059629563446173e-06, "loss": 0.8758, "step": 6781 }, { "epoch": 0.73, "grad_norm": 1.8380639240245642, "learning_rate": 1.8046236262279615e-06, "loss": 0.8622, "step": 6782 }, { "epoch": 0.73, "grad_norm": 1.6784512123660265, "learning_rate": 1.8032846835617056e-06, "loss": 0.8366, "step": 6783 }, { "epoch": 0.73, "grad_norm": 1.6943559806938433, "learning_rate": 1.8019461285082023e-06, "loss": 0.8704, "step": 6784 }, { "epoch": 0.73, "grad_norm": 0.799322959335075, "learning_rate": 1.8006079612297583e-06, "loss": 1.043, "step": 6785 }, { "epoch": 0.73, "grad_norm": 1.730418408913277, "learning_rate": 1.7992701818886271e-06, "loss": 0.8881, "step": 6786 }, { "epoch": 0.73, "grad_norm": 1.8020664794691135, "learning_rate": 1.7979327906470206e-06, "loss": 0.842, "step": 6787 }, { "epoch": 0.73, "grad_norm": 1.762974201609828, "learning_rate": 1.7965957876671047e-06, "loss": 0.9212, "step": 6788 }, { "epoch": 0.73, "grad_norm": 1.7980887024419843, "learning_rate": 1.795259173110993e-06, "loss": 0.8537, "step": 6789 }, { "epoch": 0.73, "grad_norm": 1.6637994213221692, "learning_rate": 1.7939229471407532e-06, "loss": 0.9067, "step": 6790 }, { "epoch": 0.73, "grad_norm": 1.6649602236552614, "learning_rate": 1.792587109918411e-06, "loss": 0.8135, "step": 6791 }, { "epoch": 0.73, "grad_norm": 1.681264371358223, "learning_rate": 1.7912516616059377e-06, "loss": 0.8342, "step": 6792 }, { "epoch": 0.73, "grad_norm": 1.910552699651834, "learning_rate": 1.7899166023652625e-06, "loss": 0.9898, "step": 6793 }, { "epoch": 0.73, "grad_norm": 1.6727610248705524, "learning_rate": 1.788581932358268e-06, "loss": 0.8477, "step": 6794 }, { "epoch": 0.73, "grad_norm": 1.7248290584335482, "learning_rate": 1.787247651746784e-06, "loss": 0.8808, "step": 6795 }, { "epoch": 0.73, "grad_norm": 1.769438178138947, "learning_rate": 1.785913760692598e-06, "loss": 0.8693, "step": 6796 }, { "epoch": 0.73, "grad_norm": 1.6143237499419798, "learning_rate": 1.7845802593574512e-06, "loss": 0.8722, "step": 6797 }, { "epoch": 0.73, "grad_norm": 1.689145053575692, "learning_rate": 1.7832471479030328e-06, "loss": 0.8824, "step": 6798 }, { "epoch": 0.73, "grad_norm": 1.7227798498392426, "learning_rate": 1.781914426490986e-06, "loss": 0.925, "step": 6799 }, { "epoch": 0.73, "grad_norm": 1.8114971463745766, "learning_rate": 1.7805820952829095e-06, "loss": 0.785, "step": 6800 }, { "epoch": 0.73, "grad_norm": 1.8374163404893544, "learning_rate": 1.7792501544403545e-06, "loss": 0.9134, "step": 6801 }, { "epoch": 0.73, "grad_norm": 1.6712388218340632, "learning_rate": 1.7779186041248204e-06, "loss": 0.8822, "step": 6802 }, { "epoch": 0.73, "grad_norm": 1.8459740488456966, "learning_rate": 1.7765874444977638e-06, "loss": 0.8959, "step": 6803 }, { "epoch": 0.73, "grad_norm": 0.8000833429945955, "learning_rate": 1.7752566757205936e-06, "loss": 1.0394, "step": 6804 }, { "epoch": 0.73, "grad_norm": 1.7143934114306176, "learning_rate": 1.7739262979546674e-06, "loss": 0.8324, "step": 6805 }, { "epoch": 0.73, "grad_norm": 1.670417976056517, "learning_rate": 1.7725963113612998e-06, "loss": 0.9585, "step": 6806 }, { "epoch": 0.73, "grad_norm": 1.7415364605358754, "learning_rate": 1.771266716101757e-06, "loss": 0.8391, "step": 6807 }, { "epoch": 0.73, "grad_norm": 1.6931484007181608, "learning_rate": 1.7699375123372554e-06, "loss": 0.8894, "step": 6808 }, { "epoch": 0.73, "grad_norm": 1.6954730000542293, "learning_rate": 1.768608700228967e-06, "loss": 0.9063, "step": 6809 }, { "epoch": 0.73, "grad_norm": 1.690278557438335, "learning_rate": 1.767280279938014e-06, "loss": 0.8457, "step": 6810 }, { "epoch": 0.73, "grad_norm": 1.6597553780400327, "learning_rate": 1.7659522516254707e-06, "loss": 0.9008, "step": 6811 }, { "epoch": 0.73, "grad_norm": 1.7089649572179197, "learning_rate": 1.764624615452366e-06, "loss": 0.8602, "step": 6812 }, { "epoch": 0.73, "grad_norm": 1.7441067620336643, "learning_rate": 1.7632973715796825e-06, "loss": 0.8875, "step": 6813 }, { "epoch": 0.73, "grad_norm": 1.6035093273385714, "learning_rate": 1.7619705201683495e-06, "loss": 0.8492, "step": 6814 }, { "epoch": 0.73, "grad_norm": 1.5987850700137118, "learning_rate": 1.7606440613792547e-06, "loss": 0.8182, "step": 6815 }, { "epoch": 0.73, "grad_norm": 1.7140255392667343, "learning_rate": 1.7593179953732364e-06, "loss": 0.8908, "step": 6816 }, { "epoch": 0.73, "grad_norm": 1.640938883807315, "learning_rate": 1.7579923223110818e-06, "loss": 0.9475, "step": 6817 }, { "epoch": 0.73, "grad_norm": 1.8509082320612162, "learning_rate": 1.7566670423535348e-06, "loss": 0.8842, "step": 6818 }, { "epoch": 0.73, "grad_norm": 1.7037062969734111, "learning_rate": 1.7553421556612927e-06, "loss": 0.8815, "step": 6819 }, { "epoch": 0.73, "grad_norm": 1.6966699520604616, "learning_rate": 1.754017662395e-06, "loss": 0.9197, "step": 6820 }, { "epoch": 0.73, "grad_norm": 1.8031852482407524, "learning_rate": 1.7526935627152542e-06, "loss": 0.9265, "step": 6821 }, { "epoch": 0.73, "grad_norm": 1.7120420373925513, "learning_rate": 1.75136985678261e-06, "loss": 0.9101, "step": 6822 }, { "epoch": 0.73, "grad_norm": 1.7524470028123695, "learning_rate": 1.750046544757571e-06, "loss": 0.9565, "step": 6823 }, { "epoch": 0.73, "grad_norm": 1.6446071448235784, "learning_rate": 1.7487236268005919e-06, "loss": 0.8612, "step": 6824 }, { "epoch": 0.73, "grad_norm": 1.6151391056585793, "learning_rate": 1.7474011030720834e-06, "loss": 0.9167, "step": 6825 }, { "epoch": 0.73, "grad_norm": 1.7168642476755656, "learning_rate": 1.7460789737324025e-06, "loss": 0.8656, "step": 6826 }, { "epoch": 0.73, "grad_norm": 1.764050426994709, "learning_rate": 1.7447572389418643e-06, "loss": 0.8756, "step": 6827 }, { "epoch": 0.73, "grad_norm": 1.832349362677392, "learning_rate": 1.7434358988607353e-06, "loss": 0.9614, "step": 6828 }, { "epoch": 0.73, "grad_norm": 1.751617123529363, "learning_rate": 1.7421149536492282e-06, "loss": 0.8392, "step": 6829 }, { "epoch": 0.73, "grad_norm": 1.7247464499008838, "learning_rate": 1.7407944034675173e-06, "loss": 0.861, "step": 6830 }, { "epoch": 0.73, "grad_norm": 0.7882554100636091, "learning_rate": 1.7394742484757187e-06, "loss": 1.0422, "step": 6831 }, { "epoch": 0.73, "grad_norm": 1.5938085899511738, "learning_rate": 1.7381544888339103e-06, "loss": 0.9301, "step": 6832 }, { "epoch": 0.73, "grad_norm": 1.7140139302773292, "learning_rate": 1.7368351247021136e-06, "loss": 0.8918, "step": 6833 }, { "epoch": 0.73, "grad_norm": 1.7270086826781677, "learning_rate": 1.7355161562403078e-06, "loss": 0.81, "step": 6834 }, { "epoch": 0.73, "grad_norm": 1.5778159210099778, "learning_rate": 1.7341975836084247e-06, "loss": 0.8605, "step": 6835 }, { "epoch": 0.73, "grad_norm": 1.6597368886586847, "learning_rate": 1.7328794069663414e-06, "loss": 0.804, "step": 6836 }, { "epoch": 0.74, "grad_norm": 1.7034721406773479, "learning_rate": 1.7315616264738933e-06, "loss": 0.9237, "step": 6837 }, { "epoch": 0.74, "grad_norm": 1.7029987397866007, "learning_rate": 1.7302442422908677e-06, "loss": 0.8979, "step": 6838 }, { "epoch": 0.74, "grad_norm": 1.8591539241036474, "learning_rate": 1.7289272545769987e-06, "loss": 0.9419, "step": 6839 }, { "epoch": 0.74, "grad_norm": 1.7861626747357349, "learning_rate": 1.7276106634919775e-06, "loss": 0.8741, "step": 6840 }, { "epoch": 0.74, "grad_norm": 1.6828618522195882, "learning_rate": 1.726294469195448e-06, "loss": 0.8585, "step": 6841 }, { "epoch": 0.74, "grad_norm": 1.816664303416701, "learning_rate": 1.7249786718469968e-06, "loss": 1.0151, "step": 6842 }, { "epoch": 0.74, "grad_norm": 0.7936861674238076, "learning_rate": 1.723663271606173e-06, "loss": 1.067, "step": 6843 }, { "epoch": 0.74, "grad_norm": 1.7433202108712524, "learning_rate": 1.7223482686324738e-06, "loss": 0.9628, "step": 6844 }, { "epoch": 0.74, "grad_norm": 1.6835446560491487, "learning_rate": 1.7210336630853453e-06, "loss": 0.8582, "step": 6845 }, { "epoch": 0.74, "grad_norm": 1.7244429339364802, "learning_rate": 1.7197194551241896e-06, "loss": 0.8189, "step": 6846 }, { "epoch": 0.74, "grad_norm": 1.8230269255135336, "learning_rate": 1.7184056449083603e-06, "loss": 0.9038, "step": 6847 }, { "epoch": 0.74, "grad_norm": 1.7982135554042011, "learning_rate": 1.7170922325971585e-06, "loss": 0.8781, "step": 6848 }, { "epoch": 0.74, "grad_norm": 1.5882710165572504, "learning_rate": 1.7157792183498413e-06, "loss": 0.9338, "step": 6849 }, { "epoch": 0.74, "grad_norm": 1.8340272743340436, "learning_rate": 1.714466602325618e-06, "loss": 0.8819, "step": 6850 }, { "epoch": 0.74, "grad_norm": 1.8709551624988427, "learning_rate": 1.7131543846836457e-06, "loss": 0.8402, "step": 6851 }, { "epoch": 0.74, "grad_norm": 1.6349161666153629, "learning_rate": 1.7118425655830346e-06, "loss": 0.9073, "step": 6852 }, { "epoch": 0.74, "grad_norm": 1.9498820828583627, "learning_rate": 1.7105311451828483e-06, "loss": 0.9421, "step": 6853 }, { "epoch": 0.74, "grad_norm": 1.698467816983888, "learning_rate": 1.709220123642103e-06, "loss": 0.9279, "step": 6854 }, { "epoch": 0.74, "grad_norm": 1.6953958669953584, "learning_rate": 1.707909501119761e-06, "loss": 0.8229, "step": 6855 }, { "epoch": 0.74, "grad_norm": 1.8425572802114498, "learning_rate": 1.706599277774743e-06, "loss": 0.8765, "step": 6856 }, { "epoch": 0.74, "grad_norm": 1.7068201126253246, "learning_rate": 1.705289453765915e-06, "loss": 0.913, "step": 6857 }, { "epoch": 0.74, "grad_norm": 1.7751368991901568, "learning_rate": 1.7039800292520997e-06, "loss": 0.8991, "step": 6858 }, { "epoch": 0.74, "grad_norm": 1.7273887440098223, "learning_rate": 1.7026710043920702e-06, "loss": 0.9214, "step": 6859 }, { "epoch": 0.74, "grad_norm": 1.7727513397365018, "learning_rate": 1.701362379344547e-06, "loss": 0.8671, "step": 6860 }, { "epoch": 0.74, "grad_norm": 1.6185274977732638, "learning_rate": 1.7000541542682087e-06, "loss": 0.8384, "step": 6861 }, { "epoch": 0.74, "grad_norm": 1.621562280134425, "learning_rate": 1.6987463293216817e-06, "loss": 0.9192, "step": 6862 }, { "epoch": 0.74, "grad_norm": 1.7044073868024594, "learning_rate": 1.697438904663543e-06, "loss": 0.9034, "step": 6863 }, { "epoch": 0.74, "grad_norm": 0.8136007489314534, "learning_rate": 1.696131880452322e-06, "loss": 1.0781, "step": 6864 }, { "epoch": 0.74, "grad_norm": 1.582588482545356, "learning_rate": 1.6948252568465001e-06, "loss": 0.7868, "step": 6865 }, { "epoch": 0.74, "grad_norm": 1.6796319411133531, "learning_rate": 1.6935190340045121e-06, "loss": 0.8994, "step": 6866 }, { "epoch": 0.74, "grad_norm": 1.7769314599767678, "learning_rate": 1.692213212084739e-06, "loss": 0.9106, "step": 6867 }, { "epoch": 0.74, "grad_norm": 1.700661313506012, "learning_rate": 1.690907791245518e-06, "loss": 0.9092, "step": 6868 }, { "epoch": 0.74, "grad_norm": 1.7341927765821927, "learning_rate": 1.6896027716451364e-06, "loss": 0.9427, "step": 6869 }, { "epoch": 0.74, "grad_norm": 1.697018346472416, "learning_rate": 1.6882981534418301e-06, "loss": 0.9228, "step": 6870 }, { "epoch": 0.74, "grad_norm": 1.775873086513128, "learning_rate": 1.6869939367937899e-06, "loss": 0.9057, "step": 6871 }, { "epoch": 0.74, "grad_norm": 0.8133729458009092, "learning_rate": 1.6856901218591598e-06, "loss": 1.0886, "step": 6872 }, { "epoch": 0.74, "grad_norm": 2.0108320401450697, "learning_rate": 1.6843867087960252e-06, "loss": 0.9878, "step": 6873 }, { "epoch": 0.74, "grad_norm": 1.7113148831228198, "learning_rate": 1.6830836977624326e-06, "loss": 0.8872, "step": 6874 }, { "epoch": 0.74, "grad_norm": 1.8987783013882376, "learning_rate": 1.6817810889163788e-06, "loss": 0.8558, "step": 6875 }, { "epoch": 0.74, "grad_norm": 1.8668357817895518, "learning_rate": 1.6804788824158058e-06, "loss": 0.9344, "step": 6876 }, { "epoch": 0.74, "grad_norm": 1.6915126402369727, "learning_rate": 1.6791770784186128e-06, "loss": 0.9469, "step": 6877 }, { "epoch": 0.74, "grad_norm": 1.8836868755669443, "learning_rate": 1.6778756770826488e-06, "loss": 0.9198, "step": 6878 }, { "epoch": 0.74, "grad_norm": 1.703013353621644, "learning_rate": 1.6765746785657104e-06, "loss": 0.8972, "step": 6879 }, { "epoch": 0.74, "grad_norm": 1.8463880418994392, "learning_rate": 1.6752740830255503e-06, "loss": 0.9379, "step": 6880 }, { "epoch": 0.74, "grad_norm": 1.660280151209288, "learning_rate": 1.6739738906198711e-06, "loss": 0.9099, "step": 6881 }, { "epoch": 0.74, "grad_norm": 1.6430753569167227, "learning_rate": 1.6726741015063225e-06, "loss": 0.9551, "step": 6882 }, { "epoch": 0.74, "grad_norm": 1.7091741672406278, "learning_rate": 1.671374715842512e-06, "loss": 0.902, "step": 6883 }, { "epoch": 0.74, "grad_norm": 1.7254690458154878, "learning_rate": 1.6700757337859907e-06, "loss": 0.8829, "step": 6884 }, { "epoch": 0.74, "grad_norm": 1.7993757151710104, "learning_rate": 1.668777155494269e-06, "loss": 0.9484, "step": 6885 }, { "epoch": 0.74, "grad_norm": 1.804461091842194, "learning_rate": 1.6674789811247993e-06, "loss": 0.8301, "step": 6886 }, { "epoch": 0.74, "grad_norm": 1.7916770419703338, "learning_rate": 1.666181210834994e-06, "loss": 0.9179, "step": 6887 }, { "epoch": 0.74, "grad_norm": 1.7870747253228467, "learning_rate": 1.6648838447822084e-06, "loss": 0.8647, "step": 6888 }, { "epoch": 0.74, "grad_norm": 0.7699141677728926, "learning_rate": 1.6635868831237551e-06, "loss": 1.0449, "step": 6889 }, { "epoch": 0.74, "grad_norm": 1.7952977059862438, "learning_rate": 1.6622903260168954e-06, "loss": 0.9131, "step": 6890 }, { "epoch": 0.74, "grad_norm": 1.7777452815092758, "learning_rate": 1.6609941736188395e-06, "loss": 0.939, "step": 6891 }, { "epoch": 0.74, "grad_norm": 1.677261198504098, "learning_rate": 1.6596984260867516e-06, "loss": 0.9256, "step": 6892 }, { "epoch": 0.74, "grad_norm": 1.6643533624785818, "learning_rate": 1.6584030835777465e-06, "loss": 0.8062, "step": 6893 }, { "epoch": 0.74, "grad_norm": 1.6822830117344607, "learning_rate": 1.6571081462488876e-06, "loss": 0.8812, "step": 6894 }, { "epoch": 0.74, "grad_norm": 0.8064495220306743, "learning_rate": 1.655813614257189e-06, "loss": 1.0295, "step": 6895 }, { "epoch": 0.74, "grad_norm": 1.8491145921954455, "learning_rate": 1.6545194877596194e-06, "loss": 0.924, "step": 6896 }, { "epoch": 0.74, "grad_norm": 1.7273679433025995, "learning_rate": 1.6532257669130968e-06, "loss": 0.9185, "step": 6897 }, { "epoch": 0.74, "grad_norm": 1.7384990424684188, "learning_rate": 1.6519324518744867e-06, "loss": 0.881, "step": 6898 }, { "epoch": 0.74, "grad_norm": 1.917735422829965, "learning_rate": 1.650639542800609e-06, "loss": 0.8958, "step": 6899 }, { "epoch": 0.74, "grad_norm": 1.733737055136843, "learning_rate": 1.6493470398482352e-06, "loss": 0.8784, "step": 6900 }, { "epoch": 0.74, "grad_norm": 1.7076256448992382, "learning_rate": 1.6480549431740834e-06, "loss": 0.9114, "step": 6901 }, { "epoch": 0.74, "grad_norm": 1.6846432331129724, "learning_rate": 1.646763252934825e-06, "loss": 0.804, "step": 6902 }, { "epoch": 0.74, "grad_norm": 1.725923458970688, "learning_rate": 1.645471969287084e-06, "loss": 0.9125, "step": 6903 }, { "epoch": 0.74, "grad_norm": 1.7679344107230115, "learning_rate": 1.644181092387432e-06, "loss": 0.8939, "step": 6904 }, { "epoch": 0.74, "grad_norm": 1.6542075245429493, "learning_rate": 1.6428906223923902e-06, "loss": 0.8355, "step": 6905 }, { "epoch": 0.74, "grad_norm": 1.6529529041152011, "learning_rate": 1.6416005594584355e-06, "loss": 0.9187, "step": 6906 }, { "epoch": 0.74, "grad_norm": 0.8035164497075127, "learning_rate": 1.6403109037419895e-06, "loss": 1.0725, "step": 6907 }, { "epoch": 0.74, "grad_norm": 0.7814105908416771, "learning_rate": 1.6390216553994292e-06, "loss": 1.0219, "step": 6908 }, { "epoch": 0.74, "grad_norm": 1.8286350273943441, "learning_rate": 1.6377328145870824e-06, "loss": 0.7898, "step": 6909 }, { "epoch": 0.74, "grad_norm": 1.6227577853108464, "learning_rate": 1.6364443814612207e-06, "loss": 0.8287, "step": 6910 }, { "epoch": 0.74, "grad_norm": 1.6417595165842345, "learning_rate": 1.6351563561780742e-06, "loss": 0.8393, "step": 6911 }, { "epoch": 0.74, "grad_norm": 1.7840474310224526, "learning_rate": 1.6338687388938218e-06, "loss": 0.9293, "step": 6912 }, { "epoch": 0.74, "grad_norm": 1.6835820108095845, "learning_rate": 1.6325815297645875e-06, "loss": 0.9643, "step": 6913 }, { "epoch": 0.74, "grad_norm": 1.5435596842207269, "learning_rate": 1.631294728946452e-06, "loss": 0.9091, "step": 6914 }, { "epoch": 0.74, "grad_norm": 1.6891954212070406, "learning_rate": 1.6300083365954462e-06, "loss": 0.9482, "step": 6915 }, { "epoch": 0.74, "grad_norm": 1.7460184661761975, "learning_rate": 1.6287223528675478e-06, "loss": 0.829, "step": 6916 }, { "epoch": 0.74, "grad_norm": 1.8679713008348668, "learning_rate": 1.6274367779186845e-06, "loss": 0.9668, "step": 6917 }, { "epoch": 0.74, "grad_norm": 1.7357075312241537, "learning_rate": 1.6261516119047393e-06, "loss": 0.9227, "step": 6918 }, { "epoch": 0.74, "grad_norm": 1.7040071331055202, "learning_rate": 1.6248668549815445e-06, "loss": 0.8616, "step": 6919 }, { "epoch": 0.74, "grad_norm": 1.6513700612776312, "learning_rate": 1.6235825073048772e-06, "loss": 0.9038, "step": 6920 }, { "epoch": 0.74, "grad_norm": 1.7297525587560503, "learning_rate": 1.6222985690304732e-06, "loss": 0.9246, "step": 6921 }, { "epoch": 0.74, "grad_norm": 1.7506410967908024, "learning_rate": 1.62101504031401e-06, "loss": 0.8613, "step": 6922 }, { "epoch": 0.74, "grad_norm": 0.7940945581054802, "learning_rate": 1.6197319213111235e-06, "loss": 1.0614, "step": 6923 }, { "epoch": 0.74, "grad_norm": 1.7397047232705767, "learning_rate": 1.6184492121773958e-06, "loss": 0.882, "step": 6924 }, { "epoch": 0.74, "grad_norm": 1.7275506998483345, "learning_rate": 1.6171669130683593e-06, "loss": 0.8368, "step": 6925 }, { "epoch": 0.74, "grad_norm": 1.7224315962608217, "learning_rate": 1.6158850241394957e-06, "loss": 0.9116, "step": 6926 }, { "epoch": 0.74, "grad_norm": 1.7201018195694147, "learning_rate": 1.6146035455462395e-06, "loss": 0.9029, "step": 6927 }, { "epoch": 0.74, "grad_norm": 1.6663574989875263, "learning_rate": 1.613322477443976e-06, "loss": 0.8262, "step": 6928 }, { "epoch": 0.74, "grad_norm": 1.7246444915426904, "learning_rate": 1.6120418199880366e-06, "loss": 0.8797, "step": 6929 }, { "epoch": 0.75, "grad_norm": 1.7128938946104602, "learning_rate": 1.6107615733337061e-06, "loss": 0.8596, "step": 6930 }, { "epoch": 0.75, "grad_norm": 1.7589873126369537, "learning_rate": 1.6094817376362214e-06, "loss": 0.7933, "step": 6931 }, { "epoch": 0.75, "grad_norm": 1.6961945299549808, "learning_rate": 1.6082023130507628e-06, "loss": 0.8489, "step": 6932 }, { "epoch": 0.75, "grad_norm": 1.7444580569970618, "learning_rate": 1.6069232997324679e-06, "loss": 0.8084, "step": 6933 }, { "epoch": 0.75, "grad_norm": 1.7757612283445128, "learning_rate": 1.6056446978364215e-06, "loss": 0.8378, "step": 6934 }, { "epoch": 0.75, "grad_norm": 1.7071203088551532, "learning_rate": 1.6043665075176561e-06, "loss": 0.8658, "step": 6935 }, { "epoch": 0.75, "grad_norm": 1.6705363177105303, "learning_rate": 1.6030887289311604e-06, "loss": 0.8658, "step": 6936 }, { "epoch": 0.75, "grad_norm": 1.6747912177989717, "learning_rate": 1.6018113622318666e-06, "loss": 0.8301, "step": 6937 }, { "epoch": 0.75, "grad_norm": 1.9154193765964813, "learning_rate": 1.6005344075746586e-06, "loss": 0.9773, "step": 6938 }, { "epoch": 0.75, "grad_norm": 1.770323721074381, "learning_rate": 1.5992578651143742e-06, "loss": 0.9029, "step": 6939 }, { "epoch": 0.75, "grad_norm": 1.8890726432441385, "learning_rate": 1.5979817350057991e-06, "loss": 0.9035, "step": 6940 }, { "epoch": 0.75, "grad_norm": 1.6953706263399888, "learning_rate": 1.5967060174036652e-06, "loss": 0.9298, "step": 6941 }, { "epoch": 0.75, "grad_norm": 1.6848022856465479, "learning_rate": 1.59543071246266e-06, "loss": 0.8803, "step": 6942 }, { "epoch": 0.75, "grad_norm": 1.7171093647064752, "learning_rate": 1.5941558203374198e-06, "loss": 0.9093, "step": 6943 }, { "epoch": 0.75, "grad_norm": 1.6756632979798511, "learning_rate": 1.5928813411825267e-06, "loss": 0.9145, "step": 6944 }, { "epoch": 0.75, "grad_norm": 0.8209436873978789, "learning_rate": 1.591607275152517e-06, "loss": 1.0502, "step": 6945 }, { "epoch": 0.75, "grad_norm": 1.8936097643129102, "learning_rate": 1.5903336224018774e-06, "loss": 0.8349, "step": 6946 }, { "epoch": 0.75, "grad_norm": 1.781170574716261, "learning_rate": 1.5890603830850403e-06, "loss": 0.9187, "step": 6947 }, { "epoch": 0.75, "grad_norm": 1.710306038367912, "learning_rate": 1.58778755735639e-06, "loss": 0.9744, "step": 6948 }, { "epoch": 0.75, "grad_norm": 1.8514330108392898, "learning_rate": 1.586515145370262e-06, "loss": 0.9615, "step": 6949 }, { "epoch": 0.75, "grad_norm": 1.700461809792858, "learning_rate": 1.5852431472809426e-06, "loss": 0.9278, "step": 6950 }, { "epoch": 0.75, "grad_norm": 1.7220535444817868, "learning_rate": 1.5839715632426623e-06, "loss": 0.834, "step": 6951 }, { "epoch": 0.75, "grad_norm": 1.7726919873226807, "learning_rate": 1.582700393409608e-06, "loss": 0.8332, "step": 6952 }, { "epoch": 0.75, "grad_norm": 1.7138274532707232, "learning_rate": 1.5814296379359107e-06, "loss": 0.9264, "step": 6953 }, { "epoch": 0.75, "grad_norm": 1.6806685161949746, "learning_rate": 1.5801592969756558e-06, "loss": 0.9383, "step": 6954 }, { "epoch": 0.75, "grad_norm": 1.7484392381234368, "learning_rate": 1.5788893706828773e-06, "loss": 0.8985, "step": 6955 }, { "epoch": 0.75, "grad_norm": 1.721822778145402, "learning_rate": 1.5776198592115555e-06, "loss": 0.8841, "step": 6956 }, { "epoch": 0.75, "grad_norm": 1.753028902912283, "learning_rate": 1.5763507627156265e-06, "loss": 0.9235, "step": 6957 }, { "epoch": 0.75, "grad_norm": 1.6317271525181802, "learning_rate": 1.5750820813489688e-06, "loss": 0.826, "step": 6958 }, { "epoch": 0.75, "grad_norm": 0.8122242947217784, "learning_rate": 1.5738138152654175e-06, "loss": 1.0678, "step": 6959 }, { "epoch": 0.75, "grad_norm": 1.6383249368385338, "learning_rate": 1.572545964618752e-06, "loss": 0.8806, "step": 6960 }, { "epoch": 0.75, "grad_norm": 1.774295674843163, "learning_rate": 1.5712785295627037e-06, "loss": 0.9169, "step": 6961 }, { "epoch": 0.75, "grad_norm": 1.728543292789014, "learning_rate": 1.5700115102509562e-06, "loss": 0.9564, "step": 6962 }, { "epoch": 0.75, "grad_norm": 1.7565814655714278, "learning_rate": 1.5687449068371368e-06, "loss": 0.9207, "step": 6963 }, { "epoch": 0.75, "grad_norm": 1.7620906210684442, "learning_rate": 1.5674787194748264e-06, "loss": 0.9111, "step": 6964 }, { "epoch": 0.75, "grad_norm": 1.72802578801936, "learning_rate": 1.566212948317557e-06, "loss": 0.9569, "step": 6965 }, { "epoch": 0.75, "grad_norm": 0.8009215546920747, "learning_rate": 1.5649475935188034e-06, "loss": 1.0538, "step": 6966 }, { "epoch": 0.75, "grad_norm": 1.7618794239157773, "learning_rate": 1.5636826552319973e-06, "loss": 0.8709, "step": 6967 }, { "epoch": 0.75, "grad_norm": 1.6753239312225756, "learning_rate": 1.5624181336105188e-06, "loss": 0.9109, "step": 6968 }, { "epoch": 0.75, "grad_norm": 1.7170890255168159, "learning_rate": 1.5611540288076892e-06, "loss": 1.0002, "step": 6969 }, { "epoch": 0.75, "grad_norm": 1.742737651758367, "learning_rate": 1.5598903409767895e-06, "loss": 0.8993, "step": 6970 }, { "epoch": 0.75, "grad_norm": 1.7780289816749608, "learning_rate": 1.5586270702710477e-06, "loss": 0.9139, "step": 6971 }, { "epoch": 0.75, "grad_norm": 1.6824981207035556, "learning_rate": 1.5573642168436358e-06, "loss": 0.8561, "step": 6972 }, { "epoch": 0.75, "grad_norm": 1.7948615409894175, "learning_rate": 1.5561017808476815e-06, "loss": 0.8955, "step": 6973 }, { "epoch": 0.75, "grad_norm": 1.8320705521433387, "learning_rate": 1.5548397624362605e-06, "loss": 0.8727, "step": 6974 }, { "epoch": 0.75, "grad_norm": 1.6452279104494536, "learning_rate": 1.5535781617623942e-06, "loss": 0.8845, "step": 6975 }, { "epoch": 0.75, "grad_norm": 1.7020957105647088, "learning_rate": 1.5523169789790576e-06, "loss": 0.8593, "step": 6976 }, { "epoch": 0.75, "grad_norm": 1.8085986244526793, "learning_rate": 1.5510562142391744e-06, "loss": 0.9049, "step": 6977 }, { "epoch": 0.75, "grad_norm": 1.7592665236511282, "learning_rate": 1.549795867695616e-06, "loss": 0.9266, "step": 6978 }, { "epoch": 0.75, "grad_norm": 0.7920620028604474, "learning_rate": 1.5485359395012012e-06, "loss": 1.0587, "step": 6979 }, { "epoch": 0.75, "grad_norm": 1.7050143549083525, "learning_rate": 1.547276429808703e-06, "loss": 0.8679, "step": 6980 }, { "epoch": 0.75, "grad_norm": 1.7257886152298951, "learning_rate": 1.5460173387708427e-06, "loss": 0.9222, "step": 6981 }, { "epoch": 0.75, "grad_norm": 1.7152734046663634, "learning_rate": 1.544758666540286e-06, "loss": 0.9729, "step": 6982 }, { "epoch": 0.75, "grad_norm": 1.7310456787021276, "learning_rate": 1.5435004132696547e-06, "loss": 0.8264, "step": 6983 }, { "epoch": 0.75, "grad_norm": 1.81729238627351, "learning_rate": 1.5422425791115132e-06, "loss": 0.9172, "step": 6984 }, { "epoch": 0.75, "grad_norm": 0.8002060094670137, "learning_rate": 1.5409851642183792e-06, "loss": 1.0436, "step": 6985 }, { "epoch": 0.75, "grad_norm": 1.9106706803761853, "learning_rate": 1.5397281687427212e-06, "loss": 0.9295, "step": 6986 }, { "epoch": 0.75, "grad_norm": 0.7687421945250164, "learning_rate": 1.5384715928369503e-06, "loss": 1.0349, "step": 6987 }, { "epoch": 0.75, "grad_norm": 1.7422877874302911, "learning_rate": 1.5372154366534325e-06, "loss": 0.9174, "step": 6988 }, { "epoch": 0.75, "grad_norm": 1.8252831550986466, "learning_rate": 1.5359597003444827e-06, "loss": 0.9896, "step": 6989 }, { "epoch": 0.75, "grad_norm": 1.7886248774729665, "learning_rate": 1.5347043840623617e-06, "loss": 0.8882, "step": 6990 }, { "epoch": 0.75, "grad_norm": 1.716980050286474, "learning_rate": 1.5334494879592786e-06, "loss": 0.8166, "step": 6991 }, { "epoch": 0.75, "grad_norm": 1.6884545459077325, "learning_rate": 1.5321950121873968e-06, "loss": 0.8345, "step": 6992 }, { "epoch": 0.75, "grad_norm": 1.6879131541119972, "learning_rate": 1.5309409568988264e-06, "loss": 0.8167, "step": 6993 }, { "epoch": 0.75, "grad_norm": 1.6796183951825223, "learning_rate": 1.5296873222456232e-06, "loss": 0.8919, "step": 6994 }, { "epoch": 0.75, "grad_norm": 1.7680887274351678, "learning_rate": 1.528434108379796e-06, "loss": 0.8589, "step": 6995 }, { "epoch": 0.75, "grad_norm": 1.8847060692184536, "learning_rate": 1.5271813154533033e-06, "loss": 0.9908, "step": 6996 }, { "epoch": 0.75, "grad_norm": 1.931416564405431, "learning_rate": 1.5259289436180468e-06, "loss": 0.9782, "step": 6997 }, { "epoch": 0.75, "grad_norm": 1.7510080313844953, "learning_rate": 1.524676993025883e-06, "loss": 0.8821, "step": 6998 }, { "epoch": 0.75, "grad_norm": 1.8177369009062037, "learning_rate": 1.5234254638286183e-06, "loss": 0.8809, "step": 6999 }, { "epoch": 0.75, "grad_norm": 1.8762523144608232, "learning_rate": 1.5221743561779988e-06, "loss": 0.8537, "step": 7000 }, { "epoch": 0.75, "grad_norm": 2.37520519563173, "learning_rate": 1.520923670225728e-06, "loss": 0.9237, "step": 7001 }, { "epoch": 0.75, "grad_norm": 1.7973831831114462, "learning_rate": 1.5196734061234581e-06, "loss": 0.8955, "step": 7002 }, { "epoch": 0.75, "grad_norm": 1.7370570396784173, "learning_rate": 1.5184235640227846e-06, "loss": 0.8164, "step": 7003 }, { "epoch": 0.75, "grad_norm": 1.8576582802817276, "learning_rate": 1.5171741440752568e-06, "loss": 0.9282, "step": 7004 }, { "epoch": 0.75, "grad_norm": 1.777703248050023, "learning_rate": 1.5159251464323732e-06, "loss": 0.8657, "step": 7005 }, { "epoch": 0.75, "grad_norm": 2.086855378505545, "learning_rate": 1.5146765712455747e-06, "loss": 0.8705, "step": 7006 }, { "epoch": 0.75, "grad_norm": 1.7993501356761152, "learning_rate": 1.5134284186662585e-06, "loss": 0.8701, "step": 7007 }, { "epoch": 0.75, "grad_norm": 1.6677224561185113, "learning_rate": 1.5121806888457674e-06, "loss": 0.8851, "step": 7008 }, { "epoch": 0.75, "grad_norm": 0.7645903764555948, "learning_rate": 1.5109333819353904e-06, "loss": 1.0185, "step": 7009 }, { "epoch": 0.75, "grad_norm": 1.7736482499444295, "learning_rate": 1.5096864980863718e-06, "loss": 0.8592, "step": 7010 }, { "epoch": 0.75, "grad_norm": 1.738990667294761, "learning_rate": 1.5084400374498964e-06, "loss": 0.9233, "step": 7011 }, { "epoch": 0.75, "grad_norm": 0.8197102317284632, "learning_rate": 1.507194000177105e-06, "loss": 1.0702, "step": 7012 }, { "epoch": 0.75, "grad_norm": 1.7982266840924497, "learning_rate": 1.5059483864190817e-06, "loss": 0.8755, "step": 7013 }, { "epoch": 0.75, "grad_norm": 1.6627801880163413, "learning_rate": 1.5047031963268617e-06, "loss": 0.9415, "step": 7014 }, { "epoch": 0.75, "grad_norm": 1.8317666729043471, "learning_rate": 1.503458430051431e-06, "loss": 0.8968, "step": 7015 }, { "epoch": 0.75, "grad_norm": 1.72987622534171, "learning_rate": 1.5022140877437186e-06, "loss": 0.9175, "step": 7016 }, { "epoch": 0.75, "grad_norm": 1.7600079737548184, "learning_rate": 1.500970169554608e-06, "loss": 0.857, "step": 7017 }, { "epoch": 0.75, "grad_norm": 2.0117934657736227, "learning_rate": 1.4997266756349265e-06, "loss": 0.8898, "step": 7018 }, { "epoch": 0.75, "grad_norm": 0.8087090775029488, "learning_rate": 1.4984836061354524e-06, "loss": 1.025, "step": 7019 }, { "epoch": 0.75, "grad_norm": 1.8811138831201464, "learning_rate": 1.497240961206914e-06, "loss": 0.9023, "step": 7020 }, { "epoch": 0.75, "grad_norm": 1.7986943883005735, "learning_rate": 1.4959987409999855e-06, "loss": 0.8772, "step": 7021 }, { "epoch": 0.75, "grad_norm": 1.7134760992588565, "learning_rate": 1.4947569456652877e-06, "loss": 0.8729, "step": 7022 }, { "epoch": 0.75, "grad_norm": 1.7531675057016387, "learning_rate": 1.4935155753533947e-06, "loss": 0.9017, "step": 7023 }, { "epoch": 0.76, "grad_norm": 1.6880249967291248, "learning_rate": 1.4922746302148283e-06, "loss": 0.8957, "step": 7024 }, { "epoch": 0.76, "grad_norm": 0.8014702835345631, "learning_rate": 1.4910341104000548e-06, "loss": 1.0335, "step": 7025 }, { "epoch": 0.76, "grad_norm": 1.7087155617443626, "learning_rate": 1.4897940160594926e-06, "loss": 0.8772, "step": 7026 }, { "epoch": 0.76, "grad_norm": 1.8562933028465036, "learning_rate": 1.4885543473435088e-06, "loss": 0.9163, "step": 7027 }, { "epoch": 0.76, "grad_norm": 1.6664291551411319, "learning_rate": 1.4873151044024149e-06, "loss": 0.8162, "step": 7028 }, { "epoch": 0.76, "grad_norm": 1.6848821856691956, "learning_rate": 1.4860762873864741e-06, "loss": 0.9225, "step": 7029 }, { "epoch": 0.76, "grad_norm": 1.845625665794136, "learning_rate": 1.4848378964458997e-06, "loss": 0.8225, "step": 7030 }, { "epoch": 0.76, "grad_norm": 1.6714334849685386, "learning_rate": 1.4835999317308492e-06, "loss": 0.8776, "step": 7031 }, { "epoch": 0.76, "grad_norm": 1.5663729766136538, "learning_rate": 1.4823623933914277e-06, "loss": 0.8214, "step": 7032 }, { "epoch": 0.76, "grad_norm": 1.7089695380977425, "learning_rate": 1.4811252815776955e-06, "loss": 0.847, "step": 7033 }, { "epoch": 0.76, "grad_norm": 1.633793044996098, "learning_rate": 1.479888596439652e-06, "loss": 0.8537, "step": 7034 }, { "epoch": 0.76, "grad_norm": 1.7454438218440775, "learning_rate": 1.4786523381272521e-06, "loss": 0.9549, "step": 7035 }, { "epoch": 0.76, "grad_norm": 0.7765962433907524, "learning_rate": 1.4774165067903984e-06, "loss": 1.0476, "step": 7036 }, { "epoch": 0.76, "grad_norm": 1.7271359020886792, "learning_rate": 1.4761811025789352e-06, "loss": 0.9075, "step": 7037 }, { "epoch": 0.76, "grad_norm": 1.7392364165148841, "learning_rate": 1.4749461256426617e-06, "loss": 0.8996, "step": 7038 }, { "epoch": 0.76, "grad_norm": 1.8701356227209658, "learning_rate": 1.4737115761313249e-06, "loss": 0.9331, "step": 7039 }, { "epoch": 0.76, "grad_norm": 1.658571385100619, "learning_rate": 1.4724774541946145e-06, "loss": 0.9195, "step": 7040 }, { "epoch": 0.76, "grad_norm": 1.6540504394261784, "learning_rate": 1.4712437599821743e-06, "loss": 0.8746, "step": 7041 }, { "epoch": 0.76, "grad_norm": 0.8527748439086306, "learning_rate": 1.4700104936435955e-06, "loss": 1.0221, "step": 7042 }, { "epoch": 0.76, "grad_norm": 1.74978944045805, "learning_rate": 1.468777655328414e-06, "loss": 0.9274, "step": 7043 }, { "epoch": 0.76, "grad_norm": 1.655168189879266, "learning_rate": 1.467545245186114e-06, "loss": 0.8245, "step": 7044 }, { "epoch": 0.76, "grad_norm": 1.7013224245299177, "learning_rate": 1.4663132633661315e-06, "loss": 0.9366, "step": 7045 }, { "epoch": 0.76, "grad_norm": 0.7937662254172413, "learning_rate": 1.4650817100178494e-06, "loss": 1.0461, "step": 7046 }, { "epoch": 0.76, "grad_norm": 1.7950435440689796, "learning_rate": 1.4638505852905955e-06, "loss": 0.9152, "step": 7047 }, { "epoch": 0.76, "grad_norm": 1.825621835578695, "learning_rate": 1.4626198893336507e-06, "loss": 0.8963, "step": 7048 }, { "epoch": 0.76, "grad_norm": 1.6285456283843853, "learning_rate": 1.4613896222962376e-06, "loss": 0.8471, "step": 7049 }, { "epoch": 0.76, "grad_norm": 1.6962219734541413, "learning_rate": 1.4601597843275328e-06, "loss": 0.9119, "step": 7050 }, { "epoch": 0.76, "grad_norm": 1.7539775794789876, "learning_rate": 1.4589303755766586e-06, "loss": 0.8411, "step": 7051 }, { "epoch": 0.76, "grad_norm": 1.7764619057409665, "learning_rate": 1.457701396192685e-06, "loss": 0.8666, "step": 7052 }, { "epoch": 0.76, "grad_norm": 1.6101294112231987, "learning_rate": 1.4564728463246274e-06, "loss": 0.8836, "step": 7053 }, { "epoch": 0.76, "grad_norm": 1.6723515291619309, "learning_rate": 1.4552447261214536e-06, "loss": 1.0025, "step": 7054 }, { "epoch": 0.76, "grad_norm": 1.7730777264588926, "learning_rate": 1.4540170357320788e-06, "loss": 0.8679, "step": 7055 }, { "epoch": 0.76, "grad_norm": 1.7547192806745964, "learning_rate": 1.4527897753053621e-06, "loss": 0.9042, "step": 7056 }, { "epoch": 0.76, "grad_norm": 1.728649021008933, "learning_rate": 1.4515629449901142e-06, "loss": 0.8777, "step": 7057 }, { "epoch": 0.76, "grad_norm": 1.7011515005087847, "learning_rate": 1.4503365449350938e-06, "loss": 0.8479, "step": 7058 }, { "epoch": 0.76, "grad_norm": 1.7414315692740892, "learning_rate": 1.4491105752890034e-06, "loss": 0.9368, "step": 7059 }, { "epoch": 0.76, "grad_norm": 1.7345481907189801, "learning_rate": 1.4478850362004976e-06, "loss": 0.852, "step": 7060 }, { "epoch": 0.76, "grad_norm": 1.6700186698334887, "learning_rate": 1.4466599278181786e-06, "loss": 0.8989, "step": 7061 }, { "epoch": 0.76, "grad_norm": 1.6931873727662659, "learning_rate": 1.4454352502905922e-06, "loss": 0.868, "step": 7062 }, { "epoch": 0.76, "grad_norm": 1.7775204931903898, "learning_rate": 1.4442110037662377e-06, "loss": 0.9074, "step": 7063 }, { "epoch": 0.76, "grad_norm": 1.7828795903853583, "learning_rate": 1.4429871883935577e-06, "loss": 0.8461, "step": 7064 }, { "epoch": 0.76, "grad_norm": 1.7814603854286049, "learning_rate": 1.441763804320942e-06, "loss": 0.8962, "step": 7065 }, { "epoch": 0.76, "grad_norm": 1.695409950223556, "learning_rate": 1.440540851696733e-06, "loss": 0.8856, "step": 7066 }, { "epoch": 0.76, "grad_norm": 1.705997000916623, "learning_rate": 1.4393183306692176e-06, "loss": 0.8671, "step": 7067 }, { "epoch": 0.76, "grad_norm": 1.7019530298357515, "learning_rate": 1.4380962413866289e-06, "loss": 0.8279, "step": 7068 }, { "epoch": 0.76, "grad_norm": 1.7242429954254317, "learning_rate": 1.4368745839971509e-06, "loss": 0.8827, "step": 7069 }, { "epoch": 0.76, "grad_norm": 1.6419293694439554, "learning_rate": 1.4356533586489152e-06, "loss": 0.9082, "step": 7070 }, { "epoch": 0.76, "grad_norm": 1.7414080129648566, "learning_rate": 1.4344325654899965e-06, "loss": 0.87, "step": 7071 }, { "epoch": 0.76, "grad_norm": 1.721749899847165, "learning_rate": 1.4332122046684211e-06, "loss": 0.9533, "step": 7072 }, { "epoch": 0.76, "grad_norm": 1.865263929000295, "learning_rate": 1.4319922763321641e-06, "loss": 0.9077, "step": 7073 }, { "epoch": 0.76, "grad_norm": 1.677624904590976, "learning_rate": 1.430772780629145e-06, "loss": 0.9316, "step": 7074 }, { "epoch": 0.76, "grad_norm": 1.6404621078309713, "learning_rate": 1.4295537177072289e-06, "loss": 0.9339, "step": 7075 }, { "epoch": 0.76, "grad_norm": 1.679548717134846, "learning_rate": 1.4283350877142343e-06, "loss": 0.9049, "step": 7076 }, { "epoch": 0.76, "grad_norm": 1.8451165980034023, "learning_rate": 1.4271168907979249e-06, "loss": 1.011, "step": 7077 }, { "epoch": 0.76, "grad_norm": 1.823830692431705, "learning_rate": 1.4258991271060086e-06, "loss": 0.902, "step": 7078 }, { "epoch": 0.76, "grad_norm": 1.6514477058876962, "learning_rate": 1.4246817967861464e-06, "loss": 0.9112, "step": 7079 }, { "epoch": 0.76, "grad_norm": 1.7877311718636557, "learning_rate": 1.4234648999859413e-06, "loss": 0.7937, "step": 7080 }, { "epoch": 0.76, "grad_norm": 1.7102862580260139, "learning_rate": 1.4222484368529472e-06, "loss": 0.8542, "step": 7081 }, { "epoch": 0.76, "grad_norm": 1.6522506584506498, "learning_rate": 1.4210324075346654e-06, "loss": 0.8632, "step": 7082 }, { "epoch": 0.76, "grad_norm": 1.685773869177117, "learning_rate": 1.4198168121785416e-06, "loss": 0.8619, "step": 7083 }, { "epoch": 0.76, "grad_norm": 1.7058197976865817, "learning_rate": 1.418601650931974e-06, "loss": 0.8104, "step": 7084 }, { "epoch": 0.76, "grad_norm": 1.640643042487272, "learning_rate": 1.417386923942301e-06, "loss": 0.8195, "step": 7085 }, { "epoch": 0.76, "grad_norm": 1.6609778740414582, "learning_rate": 1.4161726313568165e-06, "loss": 0.8795, "step": 7086 }, { "epoch": 0.76, "grad_norm": 1.8171929364411803, "learning_rate": 1.4149587733227543e-06, "loss": 0.8257, "step": 7087 }, { "epoch": 0.76, "grad_norm": 0.7990363265696868, "learning_rate": 1.4137453499872999e-06, "loss": 1.0239, "step": 7088 }, { "epoch": 0.76, "grad_norm": 1.7272460015542976, "learning_rate": 1.4125323614975878e-06, "loss": 0.9006, "step": 7089 }, { "epoch": 0.76, "grad_norm": 1.6502405281089467, "learning_rate": 1.4113198080006929e-06, "loss": 0.8663, "step": 7090 }, { "epoch": 0.76, "grad_norm": 1.6613112569467894, "learning_rate": 1.4101076896436427e-06, "loss": 0.9191, "step": 7091 }, { "epoch": 0.76, "grad_norm": 1.760510031326903, "learning_rate": 1.4088960065734137e-06, "loss": 0.8918, "step": 7092 }, { "epoch": 0.76, "grad_norm": 1.8004943788997478, "learning_rate": 1.4076847589369225e-06, "loss": 0.9684, "step": 7093 }, { "epoch": 0.76, "grad_norm": 1.7314136892005974, "learning_rate": 1.406473946881039e-06, "loss": 0.939, "step": 7094 }, { "epoch": 0.76, "grad_norm": 1.718912271091685, "learning_rate": 1.4052635705525813e-06, "loss": 0.8655, "step": 7095 }, { "epoch": 0.76, "grad_norm": 1.7450747248848246, "learning_rate": 1.4040536300983053e-06, "loss": 0.9333, "step": 7096 }, { "epoch": 0.76, "grad_norm": 1.6689581339057178, "learning_rate": 1.4028441256649238e-06, "loss": 0.8489, "step": 7097 }, { "epoch": 0.76, "grad_norm": 1.7440651038903439, "learning_rate": 1.401635057399095e-06, "loss": 0.8751, "step": 7098 }, { "epoch": 0.76, "grad_norm": 0.8000199991590979, "learning_rate": 1.4004264254474193e-06, "loss": 1.049, "step": 7099 }, { "epoch": 0.76, "grad_norm": 1.7741082713580016, "learning_rate": 1.3992182299564493e-06, "loss": 0.8593, "step": 7100 }, { "epoch": 0.76, "grad_norm": 1.7437981846703563, "learning_rate": 1.3980104710726843e-06, "loss": 0.852, "step": 7101 }, { "epoch": 0.76, "grad_norm": 1.8463664216158018, "learning_rate": 1.3968031489425659e-06, "loss": 0.8804, "step": 7102 }, { "epoch": 0.76, "grad_norm": 1.7346995920068333, "learning_rate": 1.395596263712488e-06, "loss": 0.8102, "step": 7103 }, { "epoch": 0.76, "grad_norm": 1.8346774280396432, "learning_rate": 1.3943898155287905e-06, "loss": 0.8929, "step": 7104 }, { "epoch": 0.76, "grad_norm": 1.691891691998727, "learning_rate": 1.3931838045377587e-06, "loss": 0.8804, "step": 7105 }, { "epoch": 0.76, "grad_norm": 1.7306681765222538, "learning_rate": 1.3919782308856233e-06, "loss": 0.9053, "step": 7106 }, { "epoch": 0.76, "grad_norm": 1.7044878452262733, "learning_rate": 1.3907730947185665e-06, "loss": 0.8695, "step": 7107 }, { "epoch": 0.76, "grad_norm": 1.6179002405648044, "learning_rate": 1.3895683961827166e-06, "loss": 0.9345, "step": 7108 }, { "epoch": 0.76, "grad_norm": 1.7825899374075032, "learning_rate": 1.3883641354241439e-06, "loss": 0.905, "step": 7109 }, { "epoch": 0.76, "grad_norm": 1.8071416559410125, "learning_rate": 1.3871603125888705e-06, "loss": 0.8613, "step": 7110 }, { "epoch": 0.76, "grad_norm": 1.818590373318333, "learning_rate": 1.3859569278228668e-06, "loss": 0.8936, "step": 7111 }, { "epoch": 0.76, "grad_norm": 1.6924579673666171, "learning_rate": 1.3847539812720434e-06, "loss": 0.79, "step": 7112 }, { "epoch": 0.76, "grad_norm": 1.7076327554381066, "learning_rate": 1.3835514730822647e-06, "loss": 0.7969, "step": 7113 }, { "epoch": 0.76, "grad_norm": 1.7189194524465523, "learning_rate": 1.3823494033993363e-06, "loss": 0.8821, "step": 7114 }, { "epoch": 0.76, "grad_norm": 1.72156953665311, "learning_rate": 1.3811477723690148e-06, "loss": 0.8289, "step": 7115 }, { "epoch": 0.76, "grad_norm": 1.7166403067589227, "learning_rate": 1.3799465801370032e-06, "loss": 0.8031, "step": 7116 }, { "epoch": 0.77, "grad_norm": 1.6315823888977337, "learning_rate": 1.378745826848949e-06, "loss": 0.9167, "step": 7117 }, { "epoch": 0.77, "grad_norm": 1.8498857251118632, "learning_rate": 1.3775455126504466e-06, "loss": 0.933, "step": 7118 }, { "epoch": 0.77, "grad_norm": 1.8869209821413018, "learning_rate": 1.3763456376870387e-06, "loss": 0.9036, "step": 7119 }, { "epoch": 0.77, "grad_norm": 1.7205644818815182, "learning_rate": 1.3751462021042167e-06, "loss": 0.8209, "step": 7120 }, { "epoch": 0.77, "grad_norm": 1.670123607955723, "learning_rate": 1.373947206047413e-06, "loss": 0.9136, "step": 7121 }, { "epoch": 0.77, "grad_norm": 1.7509639984797516, "learning_rate": 1.3727486496620113e-06, "loss": 0.8942, "step": 7122 }, { "epoch": 0.77, "grad_norm": 1.7321404207335946, "learning_rate": 1.3715505330933426e-06, "loss": 0.8957, "step": 7123 }, { "epoch": 0.77, "grad_norm": 1.7066066536545388, "learning_rate": 1.3703528564866792e-06, "loss": 0.8687, "step": 7124 }, { "epoch": 0.77, "grad_norm": 1.67954504178895, "learning_rate": 1.3691556199872453e-06, "loss": 0.8247, "step": 7125 }, { "epoch": 0.77, "grad_norm": 1.7874813747642309, "learning_rate": 1.367958823740213e-06, "loss": 0.8825, "step": 7126 }, { "epoch": 0.77, "grad_norm": 1.694268184208488, "learning_rate": 1.366762467890692e-06, "loss": 0.9211, "step": 7127 }, { "epoch": 0.77, "grad_norm": 1.8146963494786443, "learning_rate": 1.3655665525837475e-06, "loss": 0.8677, "step": 7128 }, { "epoch": 0.77, "grad_norm": 1.7000439038511104, "learning_rate": 1.3643710779643892e-06, "loss": 0.8259, "step": 7129 }, { "epoch": 0.77, "grad_norm": 1.7527552411412317, "learning_rate": 1.3631760441775704e-06, "loss": 0.956, "step": 7130 }, { "epoch": 0.77, "grad_norm": 1.7508120056334895, "learning_rate": 1.3619814513681944e-06, "loss": 0.8902, "step": 7131 }, { "epoch": 0.77, "grad_norm": 1.7394477838486113, "learning_rate": 1.3607872996811112e-06, "loss": 0.9244, "step": 7132 }, { "epoch": 0.77, "grad_norm": 1.6870177275022338, "learning_rate": 1.3595935892611123e-06, "loss": 0.8536, "step": 7133 }, { "epoch": 0.77, "grad_norm": 0.7698462856319925, "learning_rate": 1.3584003202529417e-06, "loss": 1.0608, "step": 7134 }, { "epoch": 0.77, "grad_norm": 1.7768129170651232, "learning_rate": 1.3572074928012878e-06, "loss": 0.8246, "step": 7135 }, { "epoch": 0.77, "grad_norm": 1.779485721514264, "learning_rate": 1.3560151070507827e-06, "loss": 0.9046, "step": 7136 }, { "epoch": 0.77, "grad_norm": 0.7910171414549776, "learning_rate": 1.3548231631460096e-06, "loss": 1.037, "step": 7137 }, { "epoch": 0.77, "grad_norm": 0.7725884225224293, "learning_rate": 1.3536316612314937e-06, "loss": 1.08, "step": 7138 }, { "epoch": 0.77, "grad_norm": 1.780118900891426, "learning_rate": 1.3524406014517116e-06, "loss": 0.9015, "step": 7139 }, { "epoch": 0.77, "grad_norm": 1.7316668702437834, "learning_rate": 1.3512499839510795e-06, "loss": 0.9509, "step": 7140 }, { "epoch": 0.77, "grad_norm": 1.649442707837179, "learning_rate": 1.3500598088739664e-06, "loss": 0.8908, "step": 7141 }, { "epoch": 0.77, "grad_norm": 1.6940994482813907, "learning_rate": 1.3488700763646862e-06, "loss": 0.87, "step": 7142 }, { "epoch": 0.77, "grad_norm": 0.8098702775948423, "learning_rate": 1.3476807865674951e-06, "loss": 1.0477, "step": 7143 }, { "epoch": 0.77, "grad_norm": 0.8070855001654088, "learning_rate": 1.3464919396266018e-06, "loss": 1.0366, "step": 7144 }, { "epoch": 0.77, "grad_norm": 1.7590045322739305, "learning_rate": 1.3453035356861544e-06, "loss": 0.8908, "step": 7145 }, { "epoch": 0.77, "grad_norm": 1.7726707818935807, "learning_rate": 1.3441155748902534e-06, "loss": 0.9473, "step": 7146 }, { "epoch": 0.77, "grad_norm": 1.7054039848118565, "learning_rate": 1.342928057382944e-06, "loss": 0.8981, "step": 7147 }, { "epoch": 0.77, "grad_norm": 1.7491444779381635, "learning_rate": 1.3417409833082157e-06, "loss": 0.9734, "step": 7148 }, { "epoch": 0.77, "grad_norm": 1.7032986220714532, "learning_rate": 1.340554352810003e-06, "loss": 0.8498, "step": 7149 }, { "epoch": 0.77, "grad_norm": 1.6138173839538594, "learning_rate": 1.3393681660321917e-06, "loss": 0.8324, "step": 7150 }, { "epoch": 0.77, "grad_norm": 1.787644550916954, "learning_rate": 1.3381824231186113e-06, "loss": 0.8859, "step": 7151 }, { "epoch": 0.77, "grad_norm": 0.8044161855049358, "learning_rate": 1.3369971242130354e-06, "loss": 1.0687, "step": 7152 }, { "epoch": 0.77, "grad_norm": 1.6363572998328166, "learning_rate": 1.3358122694591862e-06, "loss": 0.962, "step": 7153 }, { "epoch": 0.77, "grad_norm": 1.7640402449574797, "learning_rate": 1.3346278590007334e-06, "loss": 0.8958, "step": 7154 }, { "epoch": 0.77, "grad_norm": 1.7088826617174575, "learning_rate": 1.333443892981287e-06, "loss": 0.8884, "step": 7155 }, { "epoch": 0.77, "grad_norm": 1.6808109325821585, "learning_rate": 1.33226037154441e-06, "loss": 0.9792, "step": 7156 }, { "epoch": 0.77, "grad_norm": 0.7671699711691871, "learning_rate": 1.3310772948336086e-06, "loss": 1.0044, "step": 7157 }, { "epoch": 0.77, "grad_norm": 1.7491708799811367, "learning_rate": 1.3298946629923337e-06, "loss": 0.8957, "step": 7158 }, { "epoch": 0.77, "grad_norm": 1.6872110482674947, "learning_rate": 1.3287124761639824e-06, "loss": 0.916, "step": 7159 }, { "epoch": 0.77, "grad_norm": 1.6939223139477135, "learning_rate": 1.327530734491902e-06, "loss": 0.8547, "step": 7160 }, { "epoch": 0.77, "grad_norm": 1.7859580856893522, "learning_rate": 1.3263494381193792e-06, "loss": 0.9199, "step": 7161 }, { "epoch": 0.77, "grad_norm": 1.6506799585137797, "learning_rate": 1.3251685871896519e-06, "loss": 0.8938, "step": 7162 }, { "epoch": 0.77, "grad_norm": 1.7931945275530317, "learning_rate": 1.3239881818459044e-06, "loss": 0.963, "step": 7163 }, { "epoch": 0.77, "grad_norm": 1.6349486341012596, "learning_rate": 1.322808222231261e-06, "loss": 0.9378, "step": 7164 }, { "epoch": 0.77, "grad_norm": 0.7906801964229425, "learning_rate": 1.3216287084887985e-06, "loss": 1.0656, "step": 7165 }, { "epoch": 0.77, "grad_norm": 1.8182989342485274, "learning_rate": 1.3204496407615374e-06, "loss": 0.9266, "step": 7166 }, { "epoch": 0.77, "grad_norm": 1.6600695546441302, "learning_rate": 1.3192710191924413e-06, "loss": 0.8201, "step": 7167 }, { "epoch": 0.77, "grad_norm": 1.7022127034849772, "learning_rate": 1.3180928439244233e-06, "loss": 0.9085, "step": 7168 }, { "epoch": 0.77, "grad_norm": 0.7851094496087841, "learning_rate": 1.3169151151003435e-06, "loss": 1.0329, "step": 7169 }, { "epoch": 0.77, "grad_norm": 1.7212717211180324, "learning_rate": 1.3157378328630027e-06, "loss": 0.8333, "step": 7170 }, { "epoch": 0.77, "grad_norm": 1.7625920264468666, "learning_rate": 1.3145609973551505e-06, "loss": 0.83, "step": 7171 }, { "epoch": 0.77, "grad_norm": 1.7236212450406387, "learning_rate": 1.3133846087194823e-06, "loss": 0.8883, "step": 7172 }, { "epoch": 0.77, "grad_norm": 1.7708630358750201, "learning_rate": 1.3122086670986422e-06, "loss": 0.9148, "step": 7173 }, { "epoch": 0.77, "grad_norm": 1.7447064767902556, "learning_rate": 1.3110331726352132e-06, "loss": 0.8909, "step": 7174 }, { "epoch": 0.77, "grad_norm": 1.946780965607682, "learning_rate": 1.3098581254717313e-06, "loss": 0.9584, "step": 7175 }, { "epoch": 0.77, "grad_norm": 1.7723214645731253, "learning_rate": 1.3086835257506719e-06, "loss": 0.8672, "step": 7176 }, { "epoch": 0.77, "grad_norm": 1.700055720138507, "learning_rate": 1.3075093736144612e-06, "loss": 0.9927, "step": 7177 }, { "epoch": 0.77, "grad_norm": 1.5799844376896903, "learning_rate": 1.30633566920547e-06, "loss": 0.8789, "step": 7178 }, { "epoch": 0.77, "grad_norm": 1.75596273495497, "learning_rate": 1.3051624126660134e-06, "loss": 0.952, "step": 7179 }, { "epoch": 0.77, "grad_norm": 1.6916138863707135, "learning_rate": 1.3039896041383505e-06, "loss": 0.9306, "step": 7180 }, { "epoch": 0.77, "grad_norm": 0.8000096446973306, "learning_rate": 1.3028172437646901e-06, "loss": 1.0416, "step": 7181 }, { "epoch": 0.77, "grad_norm": 1.6553935119011225, "learning_rate": 1.3016453316871868e-06, "loss": 0.8004, "step": 7182 }, { "epoch": 0.77, "grad_norm": 1.6611702843670078, "learning_rate": 1.3004738680479357e-06, "loss": 1.0027, "step": 7183 }, { "epoch": 0.77, "grad_norm": 1.7117314630019973, "learning_rate": 1.2993028529889817e-06, "loss": 0.853, "step": 7184 }, { "epoch": 0.77, "grad_norm": 1.759996530845333, "learning_rate": 1.2981322866523171e-06, "loss": 0.9686, "step": 7185 }, { "epoch": 0.77, "grad_norm": 1.8336636413304956, "learning_rate": 1.2969621691798733e-06, "loss": 0.8435, "step": 7186 }, { "epoch": 0.77, "grad_norm": 1.7583826312310695, "learning_rate": 1.295792500713533e-06, "loss": 0.917, "step": 7187 }, { "epoch": 0.77, "grad_norm": 1.7013166412794325, "learning_rate": 1.2946232813951236e-06, "loss": 0.8528, "step": 7188 }, { "epoch": 0.77, "grad_norm": 0.7814776755600937, "learning_rate": 1.2934545113664142e-06, "loss": 1.0522, "step": 7189 }, { "epoch": 0.77, "grad_norm": 1.6847704344625294, "learning_rate": 1.2922861907691258e-06, "loss": 0.8978, "step": 7190 }, { "epoch": 0.77, "grad_norm": 1.6760121896866862, "learning_rate": 1.2911183197449183e-06, "loss": 0.8732, "step": 7191 }, { "epoch": 0.77, "grad_norm": 1.7694806264855285, "learning_rate": 1.2899508984354002e-06, "loss": 0.8387, "step": 7192 }, { "epoch": 0.77, "grad_norm": 1.6901784771049233, "learning_rate": 1.2887839269821262e-06, "loss": 0.8889, "step": 7193 }, { "epoch": 0.77, "grad_norm": 1.7684566186355903, "learning_rate": 1.2876174055265967e-06, "loss": 0.9128, "step": 7194 }, { "epoch": 0.77, "grad_norm": 1.7596035363775542, "learning_rate": 1.2864513342102536e-06, "loss": 0.9033, "step": 7195 }, { "epoch": 0.77, "grad_norm": 1.5638625115698919, "learning_rate": 1.285285713174489e-06, "loss": 0.8551, "step": 7196 }, { "epoch": 0.77, "grad_norm": 1.708286391534676, "learning_rate": 1.2841205425606396e-06, "loss": 0.9848, "step": 7197 }, { "epoch": 0.77, "grad_norm": 1.7394157424499348, "learning_rate": 1.2829558225099836e-06, "loss": 0.8693, "step": 7198 }, { "epoch": 0.77, "grad_norm": 1.818599205873659, "learning_rate": 1.2817915531637492e-06, "loss": 0.8766, "step": 7199 }, { "epoch": 0.77, "grad_norm": 1.6209794548139278, "learning_rate": 1.2806277346631085e-06, "loss": 0.7363, "step": 7200 }, { "epoch": 0.77, "grad_norm": 0.7814123130324737, "learning_rate": 1.279464367149178e-06, "loss": 1.0289, "step": 7201 }, { "epoch": 0.77, "grad_norm": 1.8237910862608124, "learning_rate": 1.2783014507630175e-06, "loss": 0.7712, "step": 7202 }, { "epoch": 0.77, "grad_norm": 1.7439312381746799, "learning_rate": 1.2771389856456374e-06, "loss": 0.9298, "step": 7203 }, { "epoch": 0.77, "grad_norm": 1.7132786996767635, "learning_rate": 1.275976971937991e-06, "loss": 0.8762, "step": 7204 }, { "epoch": 0.77, "grad_norm": 1.7062495412858127, "learning_rate": 1.2748154097809745e-06, "loss": 0.8416, "step": 7205 }, { "epoch": 0.77, "grad_norm": 1.7478705638405603, "learning_rate": 1.2736542993154316e-06, "loss": 0.8572, "step": 7206 }, { "epoch": 0.77, "grad_norm": 1.6966726612536829, "learning_rate": 1.2724936406821536e-06, "loss": 0.9788, "step": 7207 }, { "epoch": 0.77, "grad_norm": 1.768531197698784, "learning_rate": 1.2713334340218703e-06, "loss": 0.8916, "step": 7208 }, { "epoch": 0.77, "grad_norm": 1.6339292907640433, "learning_rate": 1.2701736794752645e-06, "loss": 0.962, "step": 7209 }, { "epoch": 0.78, "grad_norm": 1.7623883346738256, "learning_rate": 1.2690143771829572e-06, "loss": 0.8711, "step": 7210 }, { "epoch": 0.78, "grad_norm": 1.7175489762755871, "learning_rate": 1.267855527285521e-06, "loss": 0.9507, "step": 7211 }, { "epoch": 0.78, "grad_norm": 1.6672798221942142, "learning_rate": 1.266697129923467e-06, "loss": 0.8898, "step": 7212 }, { "epoch": 0.78, "grad_norm": 1.738222051448157, "learning_rate": 1.2655391852372583e-06, "loss": 0.9, "step": 7213 }, { "epoch": 0.78, "grad_norm": 1.7478545988833027, "learning_rate": 1.264381693367297e-06, "loss": 0.8855, "step": 7214 }, { "epoch": 0.78, "grad_norm": 1.892509551969094, "learning_rate": 1.2632246544539333e-06, "loss": 0.9423, "step": 7215 }, { "epoch": 0.78, "grad_norm": 1.6997650773381268, "learning_rate": 1.2620680686374649e-06, "loss": 0.7884, "step": 7216 }, { "epoch": 0.78, "grad_norm": 1.7560487935186873, "learning_rate": 1.2609119360581278e-06, "loss": 0.8574, "step": 7217 }, { "epoch": 0.78, "grad_norm": 1.8371574345628348, "learning_rate": 1.2597562568561095e-06, "loss": 0.9396, "step": 7218 }, { "epoch": 0.78, "grad_norm": 1.8417726652325708, "learning_rate": 1.2586010311715408e-06, "loss": 0.8285, "step": 7219 }, { "epoch": 0.78, "grad_norm": 1.706859373676377, "learning_rate": 1.257446259144494e-06, "loss": 0.9043, "step": 7220 }, { "epoch": 0.78, "grad_norm": 1.662084758529687, "learning_rate": 1.2562919409149916e-06, "loss": 0.9641, "step": 7221 }, { "epoch": 0.78, "grad_norm": 1.6935184152409044, "learning_rate": 1.2551380766230003e-06, "loss": 0.7848, "step": 7222 }, { "epoch": 0.78, "grad_norm": 1.6638082644001209, "learning_rate": 1.253984666408425e-06, "loss": 0.9693, "step": 7223 }, { "epoch": 0.78, "grad_norm": 1.727243088227625, "learning_rate": 1.2528317104111226e-06, "loss": 0.8735, "step": 7224 }, { "epoch": 0.78, "grad_norm": 1.8144125837509333, "learning_rate": 1.2516792087708963e-06, "loss": 0.8958, "step": 7225 }, { "epoch": 0.78, "grad_norm": 1.6266378421540586, "learning_rate": 1.2505271616274861e-06, "loss": 0.9876, "step": 7226 }, { "epoch": 0.78, "grad_norm": 1.8140259703830728, "learning_rate": 1.2493755691205845e-06, "loss": 0.8344, "step": 7227 }, { "epoch": 0.78, "grad_norm": 1.6778111520543377, "learning_rate": 1.2482244313898268e-06, "loss": 0.8629, "step": 7228 }, { "epoch": 0.78, "grad_norm": 1.7874628393428538, "learning_rate": 1.247073748574789e-06, "loss": 0.8016, "step": 7229 }, { "epoch": 0.78, "grad_norm": 1.7162557196400536, "learning_rate": 1.2459235208149984e-06, "loss": 0.8904, "step": 7230 }, { "epoch": 0.78, "grad_norm": 1.7612406123528435, "learning_rate": 1.2447737482499245e-06, "loss": 0.8543, "step": 7231 }, { "epoch": 0.78, "grad_norm": 0.8234760461342195, "learning_rate": 1.24362443101898e-06, "loss": 1.0432, "step": 7232 }, { "epoch": 0.78, "grad_norm": 1.6212559871747572, "learning_rate": 1.2424755692615214e-06, "loss": 0.8596, "step": 7233 }, { "epoch": 0.78, "grad_norm": 1.8629887040681894, "learning_rate": 1.2413271631168544e-06, "loss": 0.9488, "step": 7234 }, { "epoch": 0.78, "grad_norm": 1.735056008313184, "learning_rate": 1.2401792127242284e-06, "loss": 0.8535, "step": 7235 }, { "epoch": 0.78, "grad_norm": 1.7909709772165332, "learning_rate": 1.2390317182228334e-06, "loss": 0.9436, "step": 7236 }, { "epoch": 0.78, "grad_norm": 1.7153653017902766, "learning_rate": 1.2378846797518091e-06, "loss": 0.978, "step": 7237 }, { "epoch": 0.78, "grad_norm": 1.7488306602794066, "learning_rate": 1.2367380974502386e-06, "loss": 0.928, "step": 7238 }, { "epoch": 0.78, "grad_norm": 1.8985655766062213, "learning_rate": 1.2355919714571457e-06, "loss": 0.8793, "step": 7239 }, { "epoch": 0.78, "grad_norm": 1.7847978891670986, "learning_rate": 1.234446301911506e-06, "loss": 0.8729, "step": 7240 }, { "epoch": 0.78, "grad_norm": 1.7058192334055207, "learning_rate": 1.2333010889522328e-06, "loss": 0.8262, "step": 7241 }, { "epoch": 0.78, "grad_norm": 1.8251233560909994, "learning_rate": 1.2321563327181885e-06, "loss": 0.8885, "step": 7242 }, { "epoch": 0.78, "grad_norm": 1.6889597477983211, "learning_rate": 1.2310120333481795e-06, "loss": 0.8388, "step": 7243 }, { "epoch": 0.78, "grad_norm": 1.7743877453412082, "learning_rate": 1.229868190980955e-06, "loss": 0.8893, "step": 7244 }, { "epoch": 0.78, "grad_norm": 1.9246085897594924, "learning_rate": 1.2287248057552094e-06, "loss": 0.8424, "step": 7245 }, { "epoch": 0.78, "grad_norm": 1.6610038855476332, "learning_rate": 1.2275818778095821e-06, "loss": 0.8882, "step": 7246 }, { "epoch": 0.78, "grad_norm": 1.6975806634638708, "learning_rate": 1.226439407282659e-06, "loss": 0.9154, "step": 7247 }, { "epoch": 0.78, "grad_norm": 1.8990647885360525, "learning_rate": 1.225297394312966e-06, "loss": 0.9109, "step": 7248 }, { "epoch": 0.78, "grad_norm": 1.793848435127848, "learning_rate": 1.224155839038977e-06, "loss": 0.8424, "step": 7249 }, { "epoch": 0.78, "grad_norm": 1.617932472610186, "learning_rate": 1.2230147415991117e-06, "loss": 0.8161, "step": 7250 }, { "epoch": 0.78, "grad_norm": 1.7080856336516712, "learning_rate": 1.2218741021317282e-06, "loss": 0.8861, "step": 7251 }, { "epoch": 0.78, "grad_norm": 1.7051092833788617, "learning_rate": 1.2207339207751356e-06, "loss": 0.8134, "step": 7252 }, { "epoch": 0.78, "grad_norm": 1.850486419772358, "learning_rate": 1.2195941976675869e-06, "loss": 0.8992, "step": 7253 }, { "epoch": 0.78, "grad_norm": 1.795881537220781, "learning_rate": 1.2184549329472717e-06, "loss": 0.9301, "step": 7254 }, { "epoch": 0.78, "grad_norm": 1.6230241615577008, "learning_rate": 1.2173161267523332e-06, "loss": 0.8836, "step": 7255 }, { "epoch": 0.78, "grad_norm": 1.676842288983878, "learning_rate": 1.2161777792208563e-06, "loss": 0.9149, "step": 7256 }, { "epoch": 0.78, "grad_norm": 1.8035981244138628, "learning_rate": 1.2150398904908673e-06, "loss": 0.9763, "step": 7257 }, { "epoch": 0.78, "grad_norm": 1.715091564754427, "learning_rate": 1.2139024607003401e-06, "loss": 0.8615, "step": 7258 }, { "epoch": 0.78, "grad_norm": 1.620080625460703, "learning_rate": 1.2127654899871937e-06, "loss": 0.8719, "step": 7259 }, { "epoch": 0.78, "grad_norm": 1.6924009158066093, "learning_rate": 1.2116289784892872e-06, "loss": 0.8704, "step": 7260 }, { "epoch": 0.78, "grad_norm": 1.8601010629438974, "learning_rate": 1.210492926344427e-06, "loss": 0.8966, "step": 7261 }, { "epoch": 0.78, "grad_norm": 0.8179378849969926, "learning_rate": 1.2093573336903652e-06, "loss": 1.0601, "step": 7262 }, { "epoch": 0.78, "grad_norm": 1.8592943615146251, "learning_rate": 1.2082222006647942e-06, "loss": 0.9687, "step": 7263 }, { "epoch": 0.78, "grad_norm": 1.7073166809431428, "learning_rate": 1.207087527405355e-06, "loss": 0.9387, "step": 7264 }, { "epoch": 0.78, "grad_norm": 1.729277453216935, "learning_rate": 1.2059533140496277e-06, "loss": 0.8445, "step": 7265 }, { "epoch": 0.78, "grad_norm": 1.7487542353354095, "learning_rate": 1.2048195607351421e-06, "loss": 0.9179, "step": 7266 }, { "epoch": 0.78, "grad_norm": 1.819094323175927, "learning_rate": 1.2036862675993678e-06, "loss": 0.8479, "step": 7267 }, { "epoch": 0.78, "grad_norm": 1.5843550845369212, "learning_rate": 1.2025534347797214e-06, "loss": 0.8669, "step": 7268 }, { "epoch": 0.78, "grad_norm": 1.7613961938061535, "learning_rate": 1.201421062413564e-06, "loss": 0.9019, "step": 7269 }, { "epoch": 0.78, "grad_norm": 0.8018674362679901, "learning_rate": 1.2002891506381976e-06, "loss": 1.0271, "step": 7270 }, { "epoch": 0.78, "grad_norm": 1.705159139983316, "learning_rate": 1.199157699590872e-06, "loss": 0.8579, "step": 7271 }, { "epoch": 0.78, "grad_norm": 1.8121547798380657, "learning_rate": 1.1980267094087777e-06, "loss": 0.8958, "step": 7272 }, { "epoch": 0.78, "grad_norm": 1.7082727224614864, "learning_rate": 1.1968961802290524e-06, "loss": 0.848, "step": 7273 }, { "epoch": 0.78, "grad_norm": 1.8063511704123212, "learning_rate": 1.1957661121887782e-06, "loss": 0.8267, "step": 7274 }, { "epoch": 0.78, "grad_norm": 1.727891629099468, "learning_rate": 1.1946365054249775e-06, "loss": 0.8356, "step": 7275 }, { "epoch": 0.78, "grad_norm": 0.8380143292176646, "learning_rate": 1.1935073600746184e-06, "loss": 1.0375, "step": 7276 }, { "epoch": 0.78, "grad_norm": 1.7762505653514151, "learning_rate": 1.1923786762746148e-06, "loss": 0.8985, "step": 7277 }, { "epoch": 0.78, "grad_norm": 1.8333118665826367, "learning_rate": 1.1912504541618252e-06, "loss": 0.8809, "step": 7278 }, { "epoch": 0.78, "grad_norm": 1.7514522638972865, "learning_rate": 1.1901226938730471e-06, "loss": 0.9362, "step": 7279 }, { "epoch": 0.78, "grad_norm": 1.5566338765683723, "learning_rate": 1.1889953955450273e-06, "loss": 0.9149, "step": 7280 }, { "epoch": 0.78, "grad_norm": 1.7582517642218802, "learning_rate": 1.1878685593144558e-06, "loss": 0.9387, "step": 7281 }, { "epoch": 0.78, "grad_norm": 1.687905336359075, "learning_rate": 1.1867421853179622e-06, "loss": 0.9652, "step": 7282 }, { "epoch": 0.78, "grad_norm": 1.6968170896379013, "learning_rate": 1.185616273692125e-06, "loss": 0.8653, "step": 7283 }, { "epoch": 0.78, "grad_norm": 1.6951992931198046, "learning_rate": 1.1844908245734659e-06, "loss": 0.8973, "step": 7284 }, { "epoch": 0.78, "grad_norm": 1.6989555865090609, "learning_rate": 1.183365838098449e-06, "loss": 0.9782, "step": 7285 }, { "epoch": 0.78, "grad_norm": 1.6391295443282756, "learning_rate": 1.1822413144034806e-06, "loss": 0.8502, "step": 7286 }, { "epoch": 0.78, "grad_norm": 1.813290859458668, "learning_rate": 1.181117253624916e-06, "loss": 0.9087, "step": 7287 }, { "epoch": 0.78, "grad_norm": 0.8033032078115374, "learning_rate": 1.1799936558990483e-06, "loss": 1.071, "step": 7288 }, { "epoch": 0.78, "grad_norm": 1.7498922904527956, "learning_rate": 1.1788705213621199e-06, "loss": 0.9208, "step": 7289 }, { "epoch": 0.78, "grad_norm": 1.6194658761366059, "learning_rate": 1.1777478501503154e-06, "loss": 0.9911, "step": 7290 }, { "epoch": 0.78, "grad_norm": 1.6904160307365548, "learning_rate": 1.17662564239976e-06, "loss": 0.905, "step": 7291 }, { "epoch": 0.78, "grad_norm": 1.7040061486075442, "learning_rate": 1.1755038982465266e-06, "loss": 0.767, "step": 7292 }, { "epoch": 0.78, "grad_norm": 1.6634699534595698, "learning_rate": 1.174382617826632e-06, "loss": 0.8905, "step": 7293 }, { "epoch": 0.78, "grad_norm": 1.7286886154560686, "learning_rate": 1.1732618012760327e-06, "loss": 0.8822, "step": 7294 }, { "epoch": 0.78, "grad_norm": 1.8541035993595005, "learning_rate": 1.1721414487306326e-06, "loss": 0.8793, "step": 7295 }, { "epoch": 0.78, "grad_norm": 1.6430324034485775, "learning_rate": 1.1710215603262798e-06, "loss": 0.9538, "step": 7296 }, { "epoch": 0.78, "grad_norm": 0.8254952030840428, "learning_rate": 1.1699021361987634e-06, "loss": 1.0378, "step": 7297 }, { "epoch": 0.78, "grad_norm": 1.764530007115625, "learning_rate": 1.1687831764838158e-06, "loss": 0.8967, "step": 7298 }, { "epoch": 0.78, "grad_norm": 1.8164304808463245, "learning_rate": 1.1676646813171166e-06, "loss": 0.8823, "step": 7299 }, { "epoch": 0.78, "grad_norm": 1.7690369758376498, "learning_rate": 1.1665466508342876e-06, "loss": 0.8645, "step": 7300 }, { "epoch": 0.78, "grad_norm": 1.7461822654313346, "learning_rate": 1.1654290851708921e-06, "loss": 0.901, "step": 7301 }, { "epoch": 0.78, "grad_norm": 1.7227374696377924, "learning_rate": 1.16431198446244e-06, "loss": 0.9777, "step": 7302 }, { "epoch": 0.79, "grad_norm": 0.7840131721652494, "learning_rate": 1.1631953488443847e-06, "loss": 1.0426, "step": 7303 }, { "epoch": 0.79, "grad_norm": 1.7781163577538208, "learning_rate": 1.1620791784521196e-06, "loss": 0.8509, "step": 7304 }, { "epoch": 0.79, "grad_norm": 1.6213727986572777, "learning_rate": 1.1609634734209867e-06, "loss": 0.9202, "step": 7305 }, { "epoch": 0.79, "grad_norm": 1.79768087742204, "learning_rate": 1.1598482338862676e-06, "loss": 0.973, "step": 7306 }, { "epoch": 0.79, "grad_norm": 1.6748183431560135, "learning_rate": 1.158733459983188e-06, "loss": 0.8894, "step": 7307 }, { "epoch": 0.79, "grad_norm": 2.1918140233694134, "learning_rate": 1.1576191518469193e-06, "loss": 0.9032, "step": 7308 }, { "epoch": 0.79, "grad_norm": 1.683846178775958, "learning_rate": 1.156505309612576e-06, "loss": 0.8945, "step": 7309 }, { "epoch": 0.79, "grad_norm": 0.7742832052351933, "learning_rate": 1.1553919334152137e-06, "loss": 1.0328, "step": 7310 }, { "epoch": 0.79, "grad_norm": 0.7800821613076367, "learning_rate": 1.1542790233898333e-06, "loss": 1.053, "step": 7311 }, { "epoch": 0.79, "grad_norm": 1.8131941173806192, "learning_rate": 1.1531665796713814e-06, "loss": 0.939, "step": 7312 }, { "epoch": 0.79, "grad_norm": 1.7111820576782042, "learning_rate": 1.1520546023947421e-06, "loss": 0.9051, "step": 7313 }, { "epoch": 0.79, "grad_norm": 1.7934914716172021, "learning_rate": 1.1509430916947483e-06, "loss": 0.9698, "step": 7314 }, { "epoch": 0.79, "grad_norm": 1.7324047509609322, "learning_rate": 1.149832047706176e-06, "loss": 0.8245, "step": 7315 }, { "epoch": 0.79, "grad_norm": 1.6513134170744688, "learning_rate": 1.1487214705637395e-06, "loss": 0.7149, "step": 7316 }, { "epoch": 0.79, "grad_norm": 0.7894830175100631, "learning_rate": 1.1476113604021039e-06, "loss": 1.0215, "step": 7317 }, { "epoch": 0.79, "grad_norm": 1.6655822276533985, "learning_rate": 1.1465017173558717e-06, "loss": 0.9106, "step": 7318 }, { "epoch": 0.79, "grad_norm": 1.7008368810267882, "learning_rate": 1.1453925415595902e-06, "loss": 0.8867, "step": 7319 }, { "epoch": 0.79, "grad_norm": 1.6427844004435166, "learning_rate": 1.1442838331477512e-06, "loss": 0.8575, "step": 7320 }, { "epoch": 0.79, "grad_norm": 1.6872034371786135, "learning_rate": 1.143175592254792e-06, "loss": 0.9202, "step": 7321 }, { "epoch": 0.79, "grad_norm": 1.5761945999185591, "learning_rate": 1.142067819015088e-06, "loss": 0.8351, "step": 7322 }, { "epoch": 0.79, "grad_norm": 1.68627768381419, "learning_rate": 1.1409605135629603e-06, "loss": 0.865, "step": 7323 }, { "epoch": 0.79, "grad_norm": 1.6962375686957685, "learning_rate": 1.1398536760326762e-06, "loss": 0.8315, "step": 7324 }, { "epoch": 0.79, "grad_norm": 1.6936647144432988, "learning_rate": 1.1387473065584404e-06, "loss": 0.8806, "step": 7325 }, { "epoch": 0.79, "grad_norm": 1.6337509633018175, "learning_rate": 1.1376414052744055e-06, "loss": 0.8792, "step": 7326 }, { "epoch": 0.79, "grad_norm": 1.7141847906504069, "learning_rate": 1.1365359723146673e-06, "loss": 0.8596, "step": 7327 }, { "epoch": 0.79, "grad_norm": 1.7361731874652355, "learning_rate": 1.1354310078132618e-06, "loss": 0.8996, "step": 7328 }, { "epoch": 0.79, "grad_norm": 1.8100217980055973, "learning_rate": 1.1343265119041685e-06, "loss": 0.9241, "step": 7329 }, { "epoch": 0.79, "grad_norm": 1.7666386594077366, "learning_rate": 1.1332224847213125e-06, "loss": 0.8797, "step": 7330 }, { "epoch": 0.79, "grad_norm": 1.6525452492622426, "learning_rate": 1.1321189263985622e-06, "loss": 0.8439, "step": 7331 }, { "epoch": 0.79, "grad_norm": 1.833807389219447, "learning_rate": 1.1310158370697254e-06, "loss": 0.9075, "step": 7332 }, { "epoch": 0.79, "grad_norm": 1.726151570350535, "learning_rate": 1.1299132168685567e-06, "loss": 0.927, "step": 7333 }, { "epoch": 0.79, "grad_norm": 1.6779789018614633, "learning_rate": 1.1288110659287544e-06, "loss": 0.9366, "step": 7334 }, { "epoch": 0.79, "grad_norm": 1.634732437851536, "learning_rate": 1.127709384383955e-06, "loss": 0.8908, "step": 7335 }, { "epoch": 0.79, "grad_norm": 1.6429823126696996, "learning_rate": 1.1266081723677435e-06, "loss": 0.8306, "step": 7336 }, { "epoch": 0.79, "grad_norm": 0.7934801743396125, "learning_rate": 1.1255074300136437e-06, "loss": 1.0669, "step": 7337 }, { "epoch": 0.79, "grad_norm": 1.7582488421103417, "learning_rate": 1.124407157455127e-06, "loss": 0.957, "step": 7338 }, { "epoch": 0.79, "grad_norm": 1.766126857111412, "learning_rate": 1.1233073548256019e-06, "loss": 0.8935, "step": 7339 }, { "epoch": 0.79, "grad_norm": 1.7133015421767666, "learning_rate": 1.1222080222584265e-06, "loss": 0.866, "step": 7340 }, { "epoch": 0.79, "grad_norm": 1.788667575980634, "learning_rate": 1.1211091598868956e-06, "loss": 1.006, "step": 7341 }, { "epoch": 0.79, "grad_norm": 1.7316929614193102, "learning_rate": 1.1200107678442518e-06, "loss": 0.9014, "step": 7342 }, { "epoch": 0.79, "grad_norm": 1.8606435970548227, "learning_rate": 1.11891284626368e-06, "loss": 0.9248, "step": 7343 }, { "epoch": 0.79, "grad_norm": 1.759588217049405, "learning_rate": 1.1178153952783045e-06, "loss": 0.8756, "step": 7344 }, { "epoch": 0.79, "grad_norm": 1.761021070146567, "learning_rate": 1.1167184150211962e-06, "loss": 0.9426, "step": 7345 }, { "epoch": 0.79, "grad_norm": 0.7840092602368611, "learning_rate": 1.1156219056253692e-06, "loss": 1.0585, "step": 7346 }, { "epoch": 0.79, "grad_norm": 1.7221077545435126, "learning_rate": 1.1145258672237764e-06, "loss": 0.9068, "step": 7347 }, { "epoch": 0.79, "grad_norm": 1.7426769487424307, "learning_rate": 1.1134302999493174e-06, "loss": 0.8833, "step": 7348 }, { "epoch": 0.79, "grad_norm": 2.049281492564361, "learning_rate": 1.1123352039348362e-06, "loss": 0.9628, "step": 7349 }, { "epoch": 0.79, "grad_norm": 1.7068670059246993, "learning_rate": 1.1112405793131114e-06, "loss": 0.8925, "step": 7350 }, { "epoch": 0.79, "grad_norm": 1.7224558564592949, "learning_rate": 1.1101464262168733e-06, "loss": 0.9303, "step": 7351 }, { "epoch": 0.79, "grad_norm": 1.7327591815267886, "learning_rate": 1.1090527447787924e-06, "loss": 0.9608, "step": 7352 }, { "epoch": 0.79, "grad_norm": 1.8690034051988902, "learning_rate": 1.1079595351314793e-06, "loss": 0.9057, "step": 7353 }, { "epoch": 0.79, "grad_norm": 1.6892630725582283, "learning_rate": 1.1068667974074903e-06, "loss": 0.8603, "step": 7354 }, { "epoch": 0.79, "grad_norm": 1.6824083324658907, "learning_rate": 1.1057745317393254e-06, "loss": 0.7772, "step": 7355 }, { "epoch": 0.79, "grad_norm": 1.7352508705743914, "learning_rate": 1.1046827382594227e-06, "loss": 0.8726, "step": 7356 }, { "epoch": 0.79, "grad_norm": 1.7299157342692528, "learning_rate": 1.1035914171001665e-06, "loss": 0.9503, "step": 7357 }, { "epoch": 0.79, "grad_norm": 1.9452884007979727, "learning_rate": 1.1025005683938862e-06, "loss": 0.8861, "step": 7358 }, { "epoch": 0.79, "grad_norm": 1.760253546847027, "learning_rate": 1.1014101922728482e-06, "loss": 0.8722, "step": 7359 }, { "epoch": 0.79, "grad_norm": 1.7855408769882823, "learning_rate": 1.1003202888692633e-06, "loss": 0.9674, "step": 7360 }, { "epoch": 0.79, "grad_norm": 0.8386437135271612, "learning_rate": 1.0992308583152878e-06, "loss": 1.055, "step": 7361 }, { "epoch": 0.79, "grad_norm": 1.7650631422308378, "learning_rate": 1.09814190074302e-06, "loss": 0.8728, "step": 7362 }, { "epoch": 0.79, "grad_norm": 1.7471761334865932, "learning_rate": 1.0970534162844977e-06, "loss": 0.8929, "step": 7363 }, { "epoch": 0.79, "grad_norm": 1.7316805681300618, "learning_rate": 1.0959654050717034e-06, "loss": 0.8333, "step": 7364 }, { "epoch": 0.79, "grad_norm": 1.762640180827757, "learning_rate": 1.0948778672365646e-06, "loss": 0.9034, "step": 7365 }, { "epoch": 0.79, "grad_norm": 1.6079687970532943, "learning_rate": 1.0937908029109461e-06, "loss": 0.8815, "step": 7366 }, { "epoch": 0.79, "grad_norm": 1.7137084160566847, "learning_rate": 1.0927042122266613e-06, "loss": 0.926, "step": 7367 }, { "epoch": 0.79, "grad_norm": 1.8183519820313863, "learning_rate": 1.0916180953154592e-06, "loss": 0.8856, "step": 7368 }, { "epoch": 0.79, "grad_norm": 1.7871112391371549, "learning_rate": 1.0905324523090376e-06, "loss": 0.8018, "step": 7369 }, { "epoch": 0.79, "grad_norm": 1.6429311404803781, "learning_rate": 1.0894472833390357e-06, "loss": 0.9549, "step": 7370 }, { "epoch": 0.79, "grad_norm": 1.6899767084562851, "learning_rate": 1.0883625885370319e-06, "loss": 0.7481, "step": 7371 }, { "epoch": 0.79, "grad_norm": 1.7510208597576167, "learning_rate": 1.0872783680345488e-06, "loss": 0.9352, "step": 7372 }, { "epoch": 0.79, "grad_norm": 0.8158205445576651, "learning_rate": 1.0861946219630527e-06, "loss": 1.0545, "step": 7373 }, { "epoch": 0.79, "grad_norm": 1.679353629155164, "learning_rate": 1.0851113504539528e-06, "loss": 0.8479, "step": 7374 }, { "epoch": 0.79, "grad_norm": 1.7264411555008512, "learning_rate": 1.084028553638597e-06, "loss": 0.8286, "step": 7375 }, { "epoch": 0.79, "grad_norm": 1.7223374015031898, "learning_rate": 1.0829462316482798e-06, "loss": 0.8928, "step": 7376 }, { "epoch": 0.79, "grad_norm": 0.7830326446248905, "learning_rate": 1.0818643846142373e-06, "loss": 1.0336, "step": 7377 }, { "epoch": 0.79, "grad_norm": 1.6518626850335538, "learning_rate": 1.0807830126676444e-06, "loss": 0.882, "step": 7378 }, { "epoch": 0.79, "grad_norm": 1.6861574259416512, "learning_rate": 1.0797021159396231e-06, "loss": 0.8796, "step": 7379 }, { "epoch": 0.79, "grad_norm": 1.746967941865538, "learning_rate": 1.0786216945612371e-06, "loss": 0.8542, "step": 7380 }, { "epoch": 0.79, "grad_norm": 1.717896304240222, "learning_rate": 1.0775417486634893e-06, "loss": 0.8743, "step": 7381 }, { "epoch": 0.79, "grad_norm": 1.7665092728621261, "learning_rate": 1.076462278377326e-06, "loss": 0.9116, "step": 7382 }, { "epoch": 0.79, "grad_norm": 1.8316118946700117, "learning_rate": 1.0753832838336398e-06, "loss": 0.8811, "step": 7383 }, { "epoch": 0.79, "grad_norm": 1.7455592113064085, "learning_rate": 1.0743047651632587e-06, "loss": 0.8637, "step": 7384 }, { "epoch": 0.79, "grad_norm": 1.677612755007564, "learning_rate": 1.073226722496959e-06, "loss": 0.8822, "step": 7385 }, { "epoch": 0.79, "grad_norm": 1.7643441701564833, "learning_rate": 1.0721491559654579e-06, "loss": 0.8596, "step": 7386 }, { "epoch": 0.79, "grad_norm": 1.8068590676367027, "learning_rate": 1.0710720656994116e-06, "loss": 0.9888, "step": 7387 }, { "epoch": 0.79, "grad_norm": 1.6973538948144187, "learning_rate": 1.0699954518294225e-06, "loss": 0.8614, "step": 7388 }, { "epoch": 0.79, "grad_norm": 1.7135942906969515, "learning_rate": 1.0689193144860356e-06, "loss": 0.8789, "step": 7389 }, { "epoch": 0.79, "grad_norm": 1.850454387646111, "learning_rate": 1.0678436537997321e-06, "loss": 0.9111, "step": 7390 }, { "epoch": 0.79, "grad_norm": 1.7407976014197732, "learning_rate": 1.066768469900944e-06, "loss": 0.8601, "step": 7391 }, { "epoch": 0.79, "grad_norm": 1.6982264947338424, "learning_rate": 1.065693762920037e-06, "loss": 0.9358, "step": 7392 }, { "epoch": 0.79, "grad_norm": 1.8075752207421827, "learning_rate": 1.064619532987326e-06, "loss": 0.9091, "step": 7393 }, { "epoch": 0.79, "grad_norm": 1.7552954678345043, "learning_rate": 1.063545780233063e-06, "loss": 0.9515, "step": 7394 }, { "epoch": 0.79, "grad_norm": 1.6965134406390194, "learning_rate": 1.0624725047874452e-06, "loss": 0.8763, "step": 7395 }, { "epoch": 0.8, "grad_norm": 1.6647411432965047, "learning_rate": 1.061399706780612e-06, "loss": 0.8686, "step": 7396 }, { "epoch": 0.8, "grad_norm": 0.8021943512968018, "learning_rate": 1.0603273863426412e-06, "loss": 1.0537, "step": 7397 }, { "epoch": 0.8, "grad_norm": 1.6183031982979021, "learning_rate": 1.0592555436035573e-06, "loss": 0.9117, "step": 7398 }, { "epoch": 0.8, "grad_norm": 1.8194397155092208, "learning_rate": 1.0581841786933262e-06, "loss": 0.8841, "step": 7399 }, { "epoch": 0.8, "grad_norm": 1.7181061387553238, "learning_rate": 1.0571132917418508e-06, "loss": 0.9217, "step": 7400 }, { "epoch": 0.8, "grad_norm": 0.7844767536680001, "learning_rate": 1.0560428828789837e-06, "loss": 1.0515, "step": 7401 }, { "epoch": 0.8, "grad_norm": 1.818362061063144, "learning_rate": 1.0549729522345142e-06, "loss": 0.9565, "step": 7402 }, { "epoch": 0.8, "grad_norm": 1.8463317569559128, "learning_rate": 1.0539034999381731e-06, "loss": 0.9145, "step": 7403 }, { "epoch": 0.8, "grad_norm": 1.8020787141701602, "learning_rate": 1.052834526119637e-06, "loss": 0.8904, "step": 7404 }, { "epoch": 0.8, "grad_norm": 1.7138913299900018, "learning_rate": 1.0517660309085242e-06, "loss": 0.8639, "step": 7405 }, { "epoch": 0.8, "grad_norm": 0.7680815582276053, "learning_rate": 1.0506980144343898e-06, "loss": 1.063, "step": 7406 }, { "epoch": 0.8, "grad_norm": 1.7405837125287273, "learning_rate": 1.0496304768267373e-06, "loss": 0.833, "step": 7407 }, { "epoch": 0.8, "grad_norm": 1.86446043659506, "learning_rate": 1.048563418215009e-06, "loss": 0.8735, "step": 7408 }, { "epoch": 0.8, "grad_norm": 1.7140735494978634, "learning_rate": 1.0474968387285884e-06, "loss": 0.9337, "step": 7409 }, { "epoch": 0.8, "grad_norm": 0.769117581804003, "learning_rate": 1.046430738496802e-06, "loss": 1.0563, "step": 7410 }, { "epoch": 0.8, "grad_norm": 0.8336734305366362, "learning_rate": 1.04536511764892e-06, "loss": 1.0481, "step": 7411 }, { "epoch": 0.8, "grad_norm": 1.7078514003734424, "learning_rate": 1.044299976314151e-06, "loss": 0.8969, "step": 7412 }, { "epoch": 0.8, "grad_norm": 0.7906107510259344, "learning_rate": 1.0432353146216455e-06, "loss": 1.0405, "step": 7413 }, { "epoch": 0.8, "grad_norm": 1.8252322774369518, "learning_rate": 1.0421711327005014e-06, "loss": 1.0166, "step": 7414 }, { "epoch": 0.8, "grad_norm": 1.8049787129305694, "learning_rate": 1.0411074306797502e-06, "loss": 0.9323, "step": 7415 }, { "epoch": 0.8, "grad_norm": 1.737505306210331, "learning_rate": 1.0400442086883717e-06, "loss": 0.845, "step": 7416 }, { "epoch": 0.8, "grad_norm": 1.7177254183456578, "learning_rate": 1.038981466855286e-06, "loss": 0.9109, "step": 7417 }, { "epoch": 0.8, "grad_norm": 1.6679515561667353, "learning_rate": 1.0379192053093523e-06, "loss": 0.9393, "step": 7418 }, { "epoch": 0.8, "grad_norm": 1.6638152849304269, "learning_rate": 1.036857424179374e-06, "loss": 0.916, "step": 7419 }, { "epoch": 0.8, "grad_norm": 1.766203513185313, "learning_rate": 1.0357961235940978e-06, "loss": 0.8481, "step": 7420 }, { "epoch": 0.8, "grad_norm": 1.6932190004303862, "learning_rate": 1.0347353036822061e-06, "loss": 0.9329, "step": 7421 }, { "epoch": 0.8, "grad_norm": 1.6589993131162466, "learning_rate": 1.0336749645723298e-06, "loss": 0.8272, "step": 7422 }, { "epoch": 0.8, "grad_norm": 1.8156555092119184, "learning_rate": 1.0326151063930396e-06, "loss": 0.796, "step": 7423 }, { "epoch": 0.8, "grad_norm": 1.6786141058746926, "learning_rate": 1.031555729272845e-06, "loss": 0.9064, "step": 7424 }, { "epoch": 0.8, "grad_norm": 1.716384580083553, "learning_rate": 1.0304968333401983e-06, "loss": 0.8568, "step": 7425 }, { "epoch": 0.8, "grad_norm": 1.9242271727566926, "learning_rate": 1.0294384187234952e-06, "loss": 0.8794, "step": 7426 }, { "epoch": 0.8, "grad_norm": 0.7847599215961499, "learning_rate": 1.0283804855510744e-06, "loss": 1.0286, "step": 7427 }, { "epoch": 0.8, "grad_norm": 1.713969873637573, "learning_rate": 1.0273230339512103e-06, "loss": 0.8943, "step": 7428 }, { "epoch": 0.8, "grad_norm": 0.7959527533467823, "learning_rate": 1.0262660640521245e-06, "loss": 1.0106, "step": 7429 }, { "epoch": 0.8, "grad_norm": 1.7005092406688935, "learning_rate": 1.0252095759819785e-06, "loss": 0.9045, "step": 7430 }, { "epoch": 0.8, "grad_norm": 1.6279413463219636, "learning_rate": 1.0241535698688742e-06, "loss": 0.8724, "step": 7431 }, { "epoch": 0.8, "grad_norm": 1.7374742975204573, "learning_rate": 1.0230980458408574e-06, "loss": 0.8693, "step": 7432 }, { "epoch": 0.8, "grad_norm": 1.6512880788237159, "learning_rate": 1.0220430040259116e-06, "loss": 0.8845, "step": 7433 }, { "epoch": 0.8, "grad_norm": 1.6915629157491914, "learning_rate": 1.020988444551967e-06, "loss": 0.8173, "step": 7434 }, { "epoch": 0.8, "grad_norm": 1.7551003206098652, "learning_rate": 1.0199343675468898e-06, "loss": 0.9321, "step": 7435 }, { "epoch": 0.8, "grad_norm": 1.777347136653578, "learning_rate": 1.0188807731384932e-06, "loss": 0.9098, "step": 7436 }, { "epoch": 0.8, "grad_norm": 1.6950701417138496, "learning_rate": 1.0178276614545269e-06, "loss": 0.8939, "step": 7437 }, { "epoch": 0.8, "grad_norm": 1.751724881127983, "learning_rate": 1.016775032622685e-06, "loss": 0.9137, "step": 7438 }, { "epoch": 0.8, "grad_norm": 1.7561071905334344, "learning_rate": 1.0157228867706042e-06, "loss": 0.9124, "step": 7439 }, { "epoch": 0.8, "grad_norm": 1.6359660680458328, "learning_rate": 1.0146712240258578e-06, "loss": 0.8359, "step": 7440 }, { "epoch": 0.8, "grad_norm": 1.6472994814005986, "learning_rate": 1.013620044515965e-06, "loss": 0.8458, "step": 7441 }, { "epoch": 0.8, "grad_norm": 1.6671174211023274, "learning_rate": 1.0125693483683863e-06, "loss": 0.8299, "step": 7442 }, { "epoch": 0.8, "grad_norm": 1.8525611424949733, "learning_rate": 1.0115191357105192e-06, "loss": 0.854, "step": 7443 }, { "epoch": 0.8, "grad_norm": 1.7592724136367586, "learning_rate": 1.010469406669709e-06, "loss": 0.8737, "step": 7444 }, { "epoch": 0.8, "grad_norm": 1.8775773936378102, "learning_rate": 1.0094201613732374e-06, "loss": 0.8721, "step": 7445 }, { "epoch": 0.8, "grad_norm": 1.8082533765831228, "learning_rate": 1.008371399948327e-06, "loss": 0.9318, "step": 7446 }, { "epoch": 0.8, "grad_norm": 1.7821285921448549, "learning_rate": 1.0073231225221457e-06, "loss": 0.8947, "step": 7447 }, { "epoch": 0.8, "grad_norm": 1.6384583744927261, "learning_rate": 1.0062753292218025e-06, "loss": 0.8721, "step": 7448 }, { "epoch": 0.8, "grad_norm": 1.6549292838909113, "learning_rate": 1.0052280201743426e-06, "loss": 0.9225, "step": 7449 }, { "epoch": 0.8, "grad_norm": 1.7209440829804084, "learning_rate": 1.004181195506757e-06, "loss": 0.8835, "step": 7450 }, { "epoch": 0.8, "grad_norm": 1.7810106313508058, "learning_rate": 1.0031348553459785e-06, "loss": 0.8917, "step": 7451 }, { "epoch": 0.8, "grad_norm": 0.7987806226766205, "learning_rate": 1.002088999818877e-06, "loss": 1.0493, "step": 7452 }, { "epoch": 0.8, "grad_norm": 1.777629513077754, "learning_rate": 1.0010436290522674e-06, "loss": 0.9167, "step": 7453 }, { "epoch": 0.8, "grad_norm": 1.6421252603242091, "learning_rate": 9.999987431729052e-07, "loss": 0.8712, "step": 7454 }, { "epoch": 0.8, "grad_norm": 1.6940357000473558, "learning_rate": 9.989543423074855e-07, "loss": 0.8568, "step": 7455 }, { "epoch": 0.8, "grad_norm": 1.619756720744302, "learning_rate": 9.979104265826438e-07, "loss": 0.9345, "step": 7456 }, { "epoch": 0.8, "grad_norm": 1.6931185788219418, "learning_rate": 9.968669961249611e-07, "loss": 0.8134, "step": 7457 }, { "epoch": 0.8, "grad_norm": 1.697006850818595, "learning_rate": 9.95824051060957e-07, "loss": 0.8873, "step": 7458 }, { "epoch": 0.8, "grad_norm": 1.7123681122975698, "learning_rate": 9.947815915170895e-07, "loss": 0.856, "step": 7459 }, { "epoch": 0.8, "grad_norm": 1.709630604862187, "learning_rate": 9.937396176197623e-07, "loss": 0.8706, "step": 7460 }, { "epoch": 0.8, "grad_norm": 1.8597341142274448, "learning_rate": 9.926981294953192e-07, "loss": 0.9058, "step": 7461 }, { "epoch": 0.8, "grad_norm": 1.693761452785304, "learning_rate": 9.916571272700421e-07, "loss": 0.9142, "step": 7462 }, { "epoch": 0.8, "grad_norm": 1.6494704990828555, "learning_rate": 9.906166110701588e-07, "loss": 0.8724, "step": 7463 }, { "epoch": 0.8, "grad_norm": 1.750118477293998, "learning_rate": 9.895765810218322e-07, "loss": 0.8256, "step": 7464 }, { "epoch": 0.8, "grad_norm": 2.035241925263039, "learning_rate": 9.885370372511727e-07, "loss": 0.8364, "step": 7465 }, { "epoch": 0.8, "grad_norm": 1.5941246686554178, "learning_rate": 9.874979798842254e-07, "loss": 0.846, "step": 7466 }, { "epoch": 0.8, "grad_norm": 1.7307034688370917, "learning_rate": 9.86459409046983e-07, "loss": 0.9948, "step": 7467 }, { "epoch": 0.8, "grad_norm": 1.6733351971333528, "learning_rate": 9.85421324865372e-07, "loss": 0.9172, "step": 7468 }, { "epoch": 0.8, "grad_norm": 1.6672178382938263, "learning_rate": 9.843837274652667e-07, "loss": 0.8986, "step": 7469 }, { "epoch": 0.8, "grad_norm": 1.755942214009784, "learning_rate": 9.833466169724792e-07, "loss": 0.9332, "step": 7470 }, { "epoch": 0.8, "grad_norm": 1.778365900496306, "learning_rate": 9.823099935127605e-07, "loss": 0.9592, "step": 7471 }, { "epoch": 0.8, "grad_norm": 1.7441445039321022, "learning_rate": 9.812738572118063e-07, "loss": 0.9224, "step": 7472 }, { "epoch": 0.8, "grad_norm": 1.7421832119941743, "learning_rate": 9.802382081952528e-07, "loss": 0.9447, "step": 7473 }, { "epoch": 0.8, "grad_norm": 1.7454891227623683, "learning_rate": 9.792030465886736e-07, "loss": 0.8702, "step": 7474 }, { "epoch": 0.8, "grad_norm": 1.6384831352355456, "learning_rate": 9.781683725175867e-07, "loss": 0.9162, "step": 7475 }, { "epoch": 0.8, "grad_norm": 1.7305653279973348, "learning_rate": 9.771341861074523e-07, "loss": 0.9643, "step": 7476 }, { "epoch": 0.8, "grad_norm": 1.757569008966683, "learning_rate": 9.761004874836643e-07, "loss": 0.9591, "step": 7477 }, { "epoch": 0.8, "grad_norm": 1.8188570467466174, "learning_rate": 9.750672767715651e-07, "loss": 0.9423, "step": 7478 }, { "epoch": 0.8, "grad_norm": 1.7850000952825742, "learning_rate": 9.740345540964359e-07, "loss": 0.9163, "step": 7479 }, { "epoch": 0.8, "grad_norm": 1.6809155880382092, "learning_rate": 9.730023195834948e-07, "loss": 0.8204, "step": 7480 }, { "epoch": 0.8, "grad_norm": 1.6069458568819561, "learning_rate": 9.71970573357906e-07, "loss": 0.9232, "step": 7481 }, { "epoch": 0.8, "grad_norm": 1.775462146831102, "learning_rate": 9.709393155447734e-07, "loss": 0.8609, "step": 7482 }, { "epoch": 0.8, "grad_norm": 1.7943762815321713, "learning_rate": 9.699085462691376e-07, "loss": 0.8485, "step": 7483 }, { "epoch": 0.8, "grad_norm": 1.6088010007993747, "learning_rate": 9.688782656559842e-07, "loss": 0.9491, "step": 7484 }, { "epoch": 0.8, "grad_norm": 1.6491506624924286, "learning_rate": 9.678484738302401e-07, "loss": 0.8848, "step": 7485 }, { "epoch": 0.8, "grad_norm": 1.7396473431974164, "learning_rate": 9.66819170916769e-07, "loss": 0.8493, "step": 7486 }, { "epoch": 0.8, "grad_norm": 1.7575140822242683, "learning_rate": 9.65790357040377e-07, "loss": 0.9016, "step": 7487 }, { "epoch": 0.8, "grad_norm": 1.8377030982020464, "learning_rate": 9.647620323258123e-07, "loss": 0.9055, "step": 7488 }, { "epoch": 0.81, "grad_norm": 0.7688247978885039, "learning_rate": 9.637341968977638e-07, "loss": 1.0315, "step": 7489 }, { "epoch": 0.81, "grad_norm": 1.6911437341489899, "learning_rate": 9.62706850880858e-07, "loss": 0.8699, "step": 7490 }, { "epoch": 0.81, "grad_norm": 1.6089855880343522, "learning_rate": 9.616799943996652e-07, "loss": 0.9499, "step": 7491 }, { "epoch": 0.81, "grad_norm": 1.752606019649648, "learning_rate": 9.606536275786965e-07, "loss": 0.9248, "step": 7492 }, { "epoch": 0.81, "grad_norm": 1.7145776107280355, "learning_rate": 9.596277505423996e-07, "loss": 0.9204, "step": 7493 }, { "epoch": 0.81, "grad_norm": 1.7380905271503913, "learning_rate": 9.586023634151675e-07, "loss": 0.8699, "step": 7494 }, { "epoch": 0.81, "grad_norm": 1.8850460520328105, "learning_rate": 9.575774663213327e-07, "loss": 0.9142, "step": 7495 }, { "epoch": 0.81, "grad_norm": 1.6952259179932236, "learning_rate": 9.565530593851657e-07, "loss": 0.8621, "step": 7496 }, { "epoch": 0.81, "grad_norm": 1.8650568778700318, "learning_rate": 9.55529142730881e-07, "loss": 0.865, "step": 7497 }, { "epoch": 0.81, "grad_norm": 1.6995718713193269, "learning_rate": 9.545057164826317e-07, "loss": 0.872, "step": 7498 }, { "epoch": 0.81, "grad_norm": 1.7737561687512202, "learning_rate": 9.534827807645091e-07, "loss": 0.8579, "step": 7499 }, { "epoch": 0.81, "grad_norm": 1.911708725321387, "learning_rate": 9.524603357005502e-07, "loss": 0.9102, "step": 7500 }, { "epoch": 0.81, "grad_norm": 0.8176478444179738, "learning_rate": 9.51438381414731e-07, "loss": 1.0314, "step": 7501 }, { "epoch": 0.81, "grad_norm": 0.7911021447058241, "learning_rate": 9.504169180309641e-07, "loss": 1.0475, "step": 7502 }, { "epoch": 0.81, "grad_norm": 1.7801279015928657, "learning_rate": 9.493959456731072e-07, "loss": 0.8582, "step": 7503 }, { "epoch": 0.81, "grad_norm": 1.8444219055110163, "learning_rate": 9.483754644649573e-07, "loss": 0.8537, "step": 7504 }, { "epoch": 0.81, "grad_norm": 1.7777083848354767, "learning_rate": 9.473554745302494e-07, "loss": 0.8392, "step": 7505 }, { "epoch": 0.81, "grad_norm": 1.7898371461719043, "learning_rate": 9.463359759926616e-07, "loss": 0.9005, "step": 7506 }, { "epoch": 0.81, "grad_norm": 1.6989355218401136, "learning_rate": 9.453169689758135e-07, "loss": 0.8679, "step": 7507 }, { "epoch": 0.81, "grad_norm": 0.7952566528706494, "learning_rate": 9.442984536032612e-07, "loss": 1.0325, "step": 7508 }, { "epoch": 0.81, "grad_norm": 1.727238212882518, "learning_rate": 9.432804299985021e-07, "loss": 0.9495, "step": 7509 }, { "epoch": 0.81, "grad_norm": 1.7622294009983397, "learning_rate": 9.422628982849786e-07, "loss": 0.9228, "step": 7510 }, { "epoch": 0.81, "grad_norm": 1.7350765076741295, "learning_rate": 9.412458585860657e-07, "loss": 0.8686, "step": 7511 }, { "epoch": 0.81, "grad_norm": 1.9298925196996435, "learning_rate": 9.402293110250854e-07, "loss": 0.9279, "step": 7512 }, { "epoch": 0.81, "grad_norm": 1.6259806623637474, "learning_rate": 9.392132557252986e-07, "loss": 0.8543, "step": 7513 }, { "epoch": 0.81, "grad_norm": 1.7627126617419873, "learning_rate": 9.381976928099029e-07, "loss": 0.8303, "step": 7514 }, { "epoch": 0.81, "grad_norm": 1.7279499528250697, "learning_rate": 9.371826224020397e-07, "loss": 0.9442, "step": 7515 }, { "epoch": 0.81, "grad_norm": 1.8633324024823712, "learning_rate": 9.361680446247923e-07, "loss": 0.8522, "step": 7516 }, { "epoch": 0.81, "grad_norm": 1.5828403511894253, "learning_rate": 9.351539596011777e-07, "loss": 0.9122, "step": 7517 }, { "epoch": 0.81, "grad_norm": 0.7740955846138671, "learning_rate": 9.341403674541605e-07, "loss": 1.0264, "step": 7518 }, { "epoch": 0.81, "grad_norm": 1.6644677108266288, "learning_rate": 9.3312726830664e-07, "loss": 0.8328, "step": 7519 }, { "epoch": 0.81, "grad_norm": 1.6700162191088324, "learning_rate": 9.321146622814597e-07, "loss": 0.9813, "step": 7520 }, { "epoch": 0.81, "grad_norm": 1.8486260036708304, "learning_rate": 9.311025495013998e-07, "loss": 0.877, "step": 7521 }, { "epoch": 0.81, "grad_norm": 1.7762072785453806, "learning_rate": 9.300909300891831e-07, "loss": 0.9383, "step": 7522 }, { "epoch": 0.81, "grad_norm": 1.7816728927581338, "learning_rate": 9.290798041674737e-07, "loss": 0.9247, "step": 7523 }, { "epoch": 0.81, "grad_norm": 1.6263595234163108, "learning_rate": 9.280691718588713e-07, "loss": 0.9394, "step": 7524 }, { "epoch": 0.81, "grad_norm": 0.7951857027251206, "learning_rate": 9.270590332859202e-07, "loss": 1.0641, "step": 7525 }, { "epoch": 0.81, "grad_norm": 1.7693634543349892, "learning_rate": 9.260493885711036e-07, "loss": 0.8831, "step": 7526 }, { "epoch": 0.81, "grad_norm": 1.8409739002620005, "learning_rate": 9.250402378368428e-07, "loss": 0.9508, "step": 7527 }, { "epoch": 0.81, "grad_norm": 1.8049055558744254, "learning_rate": 9.240315812055029e-07, "loss": 0.9077, "step": 7528 }, { "epoch": 0.81, "grad_norm": 1.7183665578837524, "learning_rate": 9.230234187993859e-07, "loss": 0.93, "step": 7529 }, { "epoch": 0.81, "grad_norm": 1.7504152190428186, "learning_rate": 9.220157507407335e-07, "loss": 0.8819, "step": 7530 }, { "epoch": 0.81, "grad_norm": 1.8363921517880848, "learning_rate": 9.210085771517297e-07, "loss": 0.8664, "step": 7531 }, { "epoch": 0.81, "grad_norm": 1.6628223863276888, "learning_rate": 9.200018981545001e-07, "loss": 0.8601, "step": 7532 }, { "epoch": 0.81, "grad_norm": 1.556040270231309, "learning_rate": 9.189957138711053e-07, "loss": 0.8952, "step": 7533 }, { "epoch": 0.81, "grad_norm": 1.7106669614245364, "learning_rate": 9.17990024423549e-07, "loss": 0.9248, "step": 7534 }, { "epoch": 0.81, "grad_norm": 1.6069223506576378, "learning_rate": 9.169848299337763e-07, "loss": 0.8808, "step": 7535 }, { "epoch": 0.81, "grad_norm": 1.8078842526553374, "learning_rate": 9.159801305236687e-07, "loss": 0.8823, "step": 7536 }, { "epoch": 0.81, "grad_norm": 1.6980569363773421, "learning_rate": 9.149759263150493e-07, "loss": 0.792, "step": 7537 }, { "epoch": 0.81, "grad_norm": 1.7711587688808093, "learning_rate": 9.139722174296839e-07, "loss": 0.9041, "step": 7538 }, { "epoch": 0.81, "grad_norm": 1.7428206973434905, "learning_rate": 9.129690039892736e-07, "loss": 0.9107, "step": 7539 }, { "epoch": 0.81, "grad_norm": 1.7996695531159022, "learning_rate": 9.119662861154599e-07, "loss": 0.9042, "step": 7540 }, { "epoch": 0.81, "grad_norm": 1.689083312126614, "learning_rate": 9.109640639298295e-07, "loss": 0.9111, "step": 7541 }, { "epoch": 0.81, "grad_norm": 1.7631554352642633, "learning_rate": 9.099623375539019e-07, "loss": 0.9297, "step": 7542 }, { "epoch": 0.81, "grad_norm": 1.7712852959891103, "learning_rate": 9.089611071091414e-07, "loss": 0.9377, "step": 7543 }, { "epoch": 0.81, "grad_norm": 1.6318152716621532, "learning_rate": 9.079603727169523e-07, "loss": 0.8848, "step": 7544 }, { "epoch": 0.81, "grad_norm": 1.680134057195605, "learning_rate": 9.069601344986734e-07, "loss": 0.9136, "step": 7545 }, { "epoch": 0.81, "grad_norm": 1.8089847754045532, "learning_rate": 9.059603925755895e-07, "loss": 0.9491, "step": 7546 }, { "epoch": 0.81, "grad_norm": 1.6743156099227399, "learning_rate": 9.049611470689235e-07, "loss": 0.9003, "step": 7547 }, { "epoch": 0.81, "grad_norm": 1.733316341571945, "learning_rate": 9.039623980998347e-07, "loss": 0.9003, "step": 7548 }, { "epoch": 0.81, "grad_norm": 1.6374182962853234, "learning_rate": 9.029641457894267e-07, "loss": 0.8682, "step": 7549 }, { "epoch": 0.81, "grad_norm": 1.8365043625738997, "learning_rate": 9.019663902587416e-07, "loss": 0.887, "step": 7550 }, { "epoch": 0.81, "grad_norm": 1.9261231977697735, "learning_rate": 9.009691316287599e-07, "loss": 0.9191, "step": 7551 }, { "epoch": 0.81, "grad_norm": 1.7966914458141496, "learning_rate": 8.999723700204011e-07, "loss": 0.837, "step": 7552 }, { "epoch": 0.81, "grad_norm": 1.812251504940816, "learning_rate": 8.989761055545276e-07, "loss": 0.9293, "step": 7553 }, { "epoch": 0.81, "grad_norm": 0.8338486516434905, "learning_rate": 8.979803383519409e-07, "loss": 1.0231, "step": 7554 }, { "epoch": 0.81, "grad_norm": 1.7248498510981238, "learning_rate": 8.969850685333786e-07, "loss": 0.8824, "step": 7555 }, { "epoch": 0.81, "grad_norm": 1.5873929539741345, "learning_rate": 8.959902962195222e-07, "loss": 0.8746, "step": 7556 }, { "epoch": 0.81, "grad_norm": 1.6070394778845973, "learning_rate": 8.949960215309922e-07, "loss": 0.8782, "step": 7557 }, { "epoch": 0.81, "grad_norm": 1.6904616105626713, "learning_rate": 8.940022445883451e-07, "loss": 0.8486, "step": 7558 }, { "epoch": 0.81, "grad_norm": 1.6566832365377089, "learning_rate": 8.930089655120833e-07, "loss": 0.903, "step": 7559 }, { "epoch": 0.81, "grad_norm": 1.6577357780400859, "learning_rate": 8.920161844226416e-07, "loss": 0.8257, "step": 7560 }, { "epoch": 0.81, "grad_norm": 1.7689955987780956, "learning_rate": 8.910239014404016e-07, "loss": 0.8464, "step": 7561 }, { "epoch": 0.81, "grad_norm": 1.665637878029967, "learning_rate": 8.900321166856774e-07, "loss": 0.8585, "step": 7562 }, { "epoch": 0.81, "grad_norm": 1.8561626172669419, "learning_rate": 8.890408302787301e-07, "loss": 0.8627, "step": 7563 }, { "epoch": 0.81, "grad_norm": 1.7327119902903307, "learning_rate": 8.880500423397531e-07, "loss": 0.9095, "step": 7564 }, { "epoch": 0.81, "grad_norm": 1.7095745206556874, "learning_rate": 8.870597529888847e-07, "loss": 0.8933, "step": 7565 }, { "epoch": 0.81, "grad_norm": 1.6382550813919294, "learning_rate": 8.860699623462021e-07, "loss": 0.9012, "step": 7566 }, { "epoch": 0.81, "grad_norm": 1.782585531442091, "learning_rate": 8.850806705317183e-07, "loss": 0.9337, "step": 7567 }, { "epoch": 0.81, "grad_norm": 1.7589552283248235, "learning_rate": 8.84091877665389e-07, "loss": 0.8269, "step": 7568 }, { "epoch": 0.81, "grad_norm": 1.703979063988604, "learning_rate": 8.83103583867111e-07, "loss": 0.9244, "step": 7569 }, { "epoch": 0.81, "grad_norm": 1.7305442823056714, "learning_rate": 8.82115789256715e-07, "loss": 0.896, "step": 7570 }, { "epoch": 0.81, "grad_norm": 1.6470216085790148, "learning_rate": 8.811284939539771e-07, "loss": 0.8724, "step": 7571 }, { "epoch": 0.81, "grad_norm": 1.6696528080458362, "learning_rate": 8.801416980786099e-07, "loss": 0.8956, "step": 7572 }, { "epoch": 0.81, "grad_norm": 1.6175418608078778, "learning_rate": 8.791554017502634e-07, "loss": 0.9221, "step": 7573 }, { "epoch": 0.81, "grad_norm": 1.7315639921370438, "learning_rate": 8.781696050885313e-07, "loss": 0.9266, "step": 7574 }, { "epoch": 0.81, "grad_norm": 1.6739014901228928, "learning_rate": 8.771843082129455e-07, "loss": 0.861, "step": 7575 }, { "epoch": 0.81, "grad_norm": 1.7234351367320297, "learning_rate": 8.76199511242975e-07, "loss": 0.9135, "step": 7576 }, { "epoch": 0.81, "grad_norm": 1.7239969873869712, "learning_rate": 8.752152142980308e-07, "loss": 0.9117, "step": 7577 }, { "epoch": 0.81, "grad_norm": 1.6260054618043283, "learning_rate": 8.74231417497463e-07, "loss": 0.869, "step": 7578 }, { "epoch": 0.81, "grad_norm": 1.8111370010175276, "learning_rate": 8.732481209605587e-07, "loss": 0.8779, "step": 7579 }, { "epoch": 0.81, "grad_norm": 1.8087168097332318, "learning_rate": 8.722653248065466e-07, "loss": 0.8277, "step": 7580 }, { "epoch": 0.81, "grad_norm": 1.7791708064155636, "learning_rate": 8.712830291545954e-07, "loss": 0.8666, "step": 7581 }, { "epoch": 0.82, "grad_norm": 1.6474602914598075, "learning_rate": 8.70301234123811e-07, "loss": 0.9119, "step": 7582 }, { "epoch": 0.82, "grad_norm": 1.7460997560935734, "learning_rate": 8.693199398332381e-07, "loss": 0.9421, "step": 7583 }, { "epoch": 0.82, "grad_norm": 1.7966163313855352, "learning_rate": 8.683391464018625e-07, "loss": 0.864, "step": 7584 }, { "epoch": 0.82, "grad_norm": 1.7671606641699713, "learning_rate": 8.673588539486117e-07, "loss": 0.9212, "step": 7585 }, { "epoch": 0.82, "grad_norm": 1.6748575705175561, "learning_rate": 8.663790625923451e-07, "loss": 0.8446, "step": 7586 }, { "epoch": 0.82, "grad_norm": 1.7339476893560808, "learning_rate": 8.653997724518682e-07, "loss": 0.8464, "step": 7587 }, { "epoch": 0.82, "grad_norm": 1.6716303170084572, "learning_rate": 8.644209836459244e-07, "loss": 0.8786, "step": 7588 }, { "epoch": 0.82, "grad_norm": 1.728454058044721, "learning_rate": 8.634426962931925e-07, "loss": 0.8918, "step": 7589 }, { "epoch": 0.82, "grad_norm": 1.648071284751071, "learning_rate": 8.62464910512294e-07, "loss": 0.8854, "step": 7590 }, { "epoch": 0.82, "grad_norm": 1.7520429261716601, "learning_rate": 8.614876264217914e-07, "loss": 0.8482, "step": 7591 }, { "epoch": 0.82, "grad_norm": 1.810777248898115, "learning_rate": 8.60510844140181e-07, "loss": 0.9323, "step": 7592 }, { "epoch": 0.82, "grad_norm": 1.7210160077577847, "learning_rate": 8.59534563785901e-07, "loss": 0.9929, "step": 7593 }, { "epoch": 0.82, "grad_norm": 1.7303321965522753, "learning_rate": 8.585587854773308e-07, "loss": 0.933, "step": 7594 }, { "epoch": 0.82, "grad_norm": 0.792656412644984, "learning_rate": 8.575835093327845e-07, "loss": 1.05, "step": 7595 }, { "epoch": 0.82, "grad_norm": 1.7949874893749755, "learning_rate": 8.566087354705182e-07, "loss": 0.9806, "step": 7596 }, { "epoch": 0.82, "grad_norm": 1.8993914634868043, "learning_rate": 8.556344640087289e-07, "loss": 0.8789, "step": 7597 }, { "epoch": 0.82, "grad_norm": 1.7297552850615328, "learning_rate": 8.546606950655467e-07, "loss": 0.8528, "step": 7598 }, { "epoch": 0.82, "grad_norm": 1.7924984276988014, "learning_rate": 8.536874287590469e-07, "loss": 0.8846, "step": 7599 }, { "epoch": 0.82, "grad_norm": 2.202840029078937, "learning_rate": 8.527146652072421e-07, "loss": 0.8735, "step": 7600 }, { "epoch": 0.82, "grad_norm": 1.7175521049308382, "learning_rate": 8.517424045280809e-07, "loss": 0.8858, "step": 7601 }, { "epoch": 0.82, "grad_norm": 1.5930632590041474, "learning_rate": 8.507706468394544e-07, "loss": 0.8922, "step": 7602 }, { "epoch": 0.82, "grad_norm": 1.7593662082744626, "learning_rate": 8.497993922591935e-07, "loss": 0.892, "step": 7603 }, { "epoch": 0.82, "grad_norm": 1.6454711983992356, "learning_rate": 8.488286409050623e-07, "loss": 0.8895, "step": 7604 }, { "epoch": 0.82, "grad_norm": 1.722399914857841, "learning_rate": 8.478583928947692e-07, "loss": 0.8724, "step": 7605 }, { "epoch": 0.82, "grad_norm": 1.5900776065208047, "learning_rate": 8.46888648345962e-07, "loss": 0.9121, "step": 7606 }, { "epoch": 0.82, "grad_norm": 1.6671855867245031, "learning_rate": 8.459194073762228e-07, "loss": 0.9002, "step": 7607 }, { "epoch": 0.82, "grad_norm": 1.586221036620047, "learning_rate": 8.449506701030774e-07, "loss": 0.9057, "step": 7608 }, { "epoch": 0.82, "grad_norm": 1.747596861845314, "learning_rate": 8.439824366439886e-07, "loss": 0.846, "step": 7609 }, { "epoch": 0.82, "grad_norm": 1.8539056989636502, "learning_rate": 8.430147071163558e-07, "loss": 0.7976, "step": 7610 }, { "epoch": 0.82, "grad_norm": 1.6543679197811214, "learning_rate": 8.420474816375213e-07, "loss": 0.988, "step": 7611 }, { "epoch": 0.82, "grad_norm": 1.580812976734974, "learning_rate": 8.410807603247656e-07, "loss": 0.8736, "step": 7612 }, { "epoch": 0.82, "grad_norm": 1.7004682790652799, "learning_rate": 8.401145432953045e-07, "loss": 0.8903, "step": 7613 }, { "epoch": 0.82, "grad_norm": 1.75099238841902, "learning_rate": 8.391488306662976e-07, "loss": 0.9469, "step": 7614 }, { "epoch": 0.82, "grad_norm": 1.766864429740249, "learning_rate": 8.381836225548384e-07, "loss": 0.8404, "step": 7615 }, { "epoch": 0.82, "grad_norm": 1.9115772791078784, "learning_rate": 8.372189190779639e-07, "loss": 0.8333, "step": 7616 }, { "epoch": 0.82, "grad_norm": 0.7787133087559813, "learning_rate": 8.362547203526456e-07, "loss": 1.0232, "step": 7617 }, { "epoch": 0.82, "grad_norm": 1.727582878495329, "learning_rate": 8.35291026495797e-07, "loss": 0.8673, "step": 7618 }, { "epoch": 0.82, "grad_norm": 1.6727302304855556, "learning_rate": 8.343278376242709e-07, "loss": 0.9213, "step": 7619 }, { "epoch": 0.82, "grad_norm": 1.6540936053112212, "learning_rate": 8.333651538548543e-07, "loss": 0.8662, "step": 7620 }, { "epoch": 0.82, "grad_norm": 0.7987174808280918, "learning_rate": 8.324029753042773e-07, "loss": 1.0317, "step": 7621 }, { "epoch": 0.82, "grad_norm": 1.7367812444208781, "learning_rate": 8.314413020892087e-07, "loss": 0.9754, "step": 7622 }, { "epoch": 0.82, "grad_norm": 0.7773400860108092, "learning_rate": 8.304801343262525e-07, "loss": 1.0343, "step": 7623 }, { "epoch": 0.82, "grad_norm": 1.6779815990466356, "learning_rate": 8.295194721319555e-07, "loss": 0.8141, "step": 7624 }, { "epoch": 0.82, "grad_norm": 1.8139981294878667, "learning_rate": 8.285593156228006e-07, "loss": 0.915, "step": 7625 }, { "epoch": 0.82, "grad_norm": 1.727122596020832, "learning_rate": 8.275996649152085e-07, "loss": 0.9542, "step": 7626 }, { "epoch": 0.82, "grad_norm": 1.7495634345084157, "learning_rate": 8.266405201255412e-07, "loss": 0.8811, "step": 7627 }, { "epoch": 0.82, "grad_norm": 2.183515539138076, "learning_rate": 8.256818813701e-07, "loss": 0.9013, "step": 7628 }, { "epoch": 0.82, "grad_norm": 1.6403317948729157, "learning_rate": 8.247237487651205e-07, "loss": 0.8305, "step": 7629 }, { "epoch": 0.82, "grad_norm": 1.660066506854636, "learning_rate": 8.237661224267807e-07, "loss": 0.9092, "step": 7630 }, { "epoch": 0.82, "grad_norm": 1.7173635046228868, "learning_rate": 8.228090024711977e-07, "loss": 0.9305, "step": 7631 }, { "epoch": 0.82, "grad_norm": 1.6790732332000802, "learning_rate": 8.218523890144225e-07, "loss": 0.8504, "step": 7632 }, { "epoch": 0.82, "grad_norm": 1.7899474937425524, "learning_rate": 8.208962821724498e-07, "loss": 0.9235, "step": 7633 }, { "epoch": 0.82, "grad_norm": 1.7258139307383407, "learning_rate": 8.199406820612115e-07, "loss": 0.8799, "step": 7634 }, { "epoch": 0.82, "grad_norm": 1.6482010493468788, "learning_rate": 8.189855887965764e-07, "loss": 0.8391, "step": 7635 }, { "epoch": 0.82, "grad_norm": 1.704140883915139, "learning_rate": 8.180310024943517e-07, "loss": 0.8237, "step": 7636 }, { "epoch": 0.82, "grad_norm": 1.971538319681906, "learning_rate": 8.170769232702869e-07, "loss": 0.8839, "step": 7637 }, { "epoch": 0.82, "grad_norm": 1.6987021177148245, "learning_rate": 8.161233512400641e-07, "loss": 0.9097, "step": 7638 }, { "epoch": 0.82, "grad_norm": 1.907528445529689, "learning_rate": 8.15170286519309e-07, "loss": 0.9025, "step": 7639 }, { "epoch": 0.82, "grad_norm": 1.6101558918348347, "learning_rate": 8.142177292235854e-07, "loss": 0.8676, "step": 7640 }, { "epoch": 0.82, "grad_norm": 1.6558606851998914, "learning_rate": 8.132656794683914e-07, "loss": 0.9027, "step": 7641 }, { "epoch": 0.82, "grad_norm": 0.7806888554198398, "learning_rate": 8.123141373691678e-07, "loss": 1.0416, "step": 7642 }, { "epoch": 0.82, "grad_norm": 1.7276992420040653, "learning_rate": 8.113631030412932e-07, "loss": 0.9631, "step": 7643 }, { "epoch": 0.82, "grad_norm": 1.8310473963281162, "learning_rate": 8.104125766000814e-07, "loss": 0.877, "step": 7644 }, { "epoch": 0.82, "grad_norm": 1.6476861104843405, "learning_rate": 8.094625581607901e-07, "loss": 0.8596, "step": 7645 }, { "epoch": 0.82, "grad_norm": 1.6449035505441318, "learning_rate": 8.085130478386089e-07, "loss": 0.9145, "step": 7646 }, { "epoch": 0.82, "grad_norm": 1.6675686623192103, "learning_rate": 8.075640457486722e-07, "loss": 0.964, "step": 7647 }, { "epoch": 0.82, "grad_norm": 1.8523007427580116, "learning_rate": 8.066155520060476e-07, "loss": 0.9197, "step": 7648 }, { "epoch": 0.82, "grad_norm": 1.7891302598776317, "learning_rate": 8.056675667257441e-07, "loss": 0.894, "step": 7649 }, { "epoch": 0.82, "grad_norm": 1.801769979750733, "learning_rate": 8.047200900227092e-07, "loss": 0.9074, "step": 7650 }, { "epoch": 0.82, "grad_norm": 1.6137443757240406, "learning_rate": 8.037731220118256e-07, "loss": 0.8728, "step": 7651 }, { "epoch": 0.82, "grad_norm": 1.6807320722641177, "learning_rate": 8.028266628079173e-07, "loss": 0.8418, "step": 7652 }, { "epoch": 0.82, "grad_norm": 1.6853924993509217, "learning_rate": 8.018807125257478e-07, "loss": 0.8588, "step": 7653 }, { "epoch": 0.82, "grad_norm": 1.7842750176187387, "learning_rate": 8.009352712800128e-07, "loss": 0.8706, "step": 7654 }, { "epoch": 0.82, "grad_norm": 1.759825912357956, "learning_rate": 7.999903391853547e-07, "loss": 0.8628, "step": 7655 }, { "epoch": 0.82, "grad_norm": 1.7990374171003884, "learning_rate": 7.990459163563474e-07, "loss": 0.8726, "step": 7656 }, { "epoch": 0.82, "grad_norm": 0.7871925615883618, "learning_rate": 7.981020029075043e-07, "loss": 1.0623, "step": 7657 }, { "epoch": 0.82, "grad_norm": 1.632537567379499, "learning_rate": 7.971585989532793e-07, "loss": 0.894, "step": 7658 }, { "epoch": 0.82, "grad_norm": 1.664499421824791, "learning_rate": 7.962157046080648e-07, "loss": 0.8328, "step": 7659 }, { "epoch": 0.82, "grad_norm": 1.9746165672990794, "learning_rate": 7.952733199861879e-07, "loss": 0.8781, "step": 7660 }, { "epoch": 0.82, "grad_norm": 1.801139788687762, "learning_rate": 7.943314452019169e-07, "loss": 0.9396, "step": 7661 }, { "epoch": 0.82, "grad_norm": 1.7184413942738999, "learning_rate": 7.933900803694578e-07, "loss": 0.9379, "step": 7662 }, { "epoch": 0.82, "grad_norm": 1.7900193814473193, "learning_rate": 7.924492256029531e-07, "loss": 0.8515, "step": 7663 }, { "epoch": 0.82, "grad_norm": 1.7872169143727652, "learning_rate": 7.915088810164856e-07, "loss": 0.9278, "step": 7664 }, { "epoch": 0.82, "grad_norm": 1.8368366197827093, "learning_rate": 7.905690467240762e-07, "loss": 0.9348, "step": 7665 }, { "epoch": 0.82, "grad_norm": 1.7850600128005745, "learning_rate": 7.896297228396821e-07, "loss": 0.8847, "step": 7666 }, { "epoch": 0.82, "grad_norm": 1.752444361923007, "learning_rate": 7.886909094771983e-07, "loss": 0.886, "step": 7667 }, { "epoch": 0.82, "grad_norm": 1.7292238430428717, "learning_rate": 7.877526067504615e-07, "loss": 0.8447, "step": 7668 }, { "epoch": 0.82, "grad_norm": 1.753016418858087, "learning_rate": 7.868148147732413e-07, "loss": 0.9036, "step": 7669 }, { "epoch": 0.82, "grad_norm": 1.7189331177129563, "learning_rate": 7.858775336592506e-07, "loss": 0.8238, "step": 7670 }, { "epoch": 0.82, "grad_norm": 0.8074416941998166, "learning_rate": 7.849407635221378e-07, "loss": 1.0675, "step": 7671 }, { "epoch": 0.82, "grad_norm": 1.743708634926743, "learning_rate": 7.840045044754879e-07, "loss": 0.9145, "step": 7672 }, { "epoch": 0.82, "grad_norm": 1.753232106608159, "learning_rate": 7.830687566328266e-07, "loss": 0.793, "step": 7673 }, { "epoch": 0.82, "grad_norm": 1.7407737711978193, "learning_rate": 7.82133520107618e-07, "loss": 0.8413, "step": 7674 }, { "epoch": 0.83, "grad_norm": 1.6788612279997834, "learning_rate": 7.811987950132599e-07, "loss": 0.9864, "step": 7675 }, { "epoch": 0.83, "grad_norm": 1.7049726908434872, "learning_rate": 7.802645814630921e-07, "loss": 0.8516, "step": 7676 }, { "epoch": 0.83, "grad_norm": 1.7354495843714315, "learning_rate": 7.79330879570393e-07, "loss": 0.8664, "step": 7677 }, { "epoch": 0.83, "grad_norm": 1.657748055202429, "learning_rate": 7.783976894483752e-07, "loss": 0.8001, "step": 7678 }, { "epoch": 0.83, "grad_norm": 1.7373619010882853, "learning_rate": 7.774650112101911e-07, "loss": 0.8403, "step": 7679 }, { "epoch": 0.83, "grad_norm": 1.6857473827878116, "learning_rate": 7.765328449689313e-07, "loss": 0.8987, "step": 7680 }, { "epoch": 0.83, "grad_norm": 1.8546243186991298, "learning_rate": 7.756011908376254e-07, "loss": 0.8931, "step": 7681 }, { "epoch": 0.83, "grad_norm": 1.7916737711606063, "learning_rate": 7.746700489292381e-07, "loss": 0.8545, "step": 7682 }, { "epoch": 0.83, "grad_norm": 1.6916002910570889, "learning_rate": 7.737394193566744e-07, "loss": 0.8788, "step": 7683 }, { "epoch": 0.83, "grad_norm": 1.6652998494777649, "learning_rate": 7.72809302232777e-07, "loss": 0.9267, "step": 7684 }, { "epoch": 0.83, "grad_norm": 1.6658168320928952, "learning_rate": 7.718796976703241e-07, "loss": 0.9264, "step": 7685 }, { "epoch": 0.83, "grad_norm": 1.7983250095518972, "learning_rate": 7.709506057820343e-07, "loss": 0.8967, "step": 7686 }, { "epoch": 0.83, "grad_norm": 1.7037808040586886, "learning_rate": 7.700220266805647e-07, "loss": 0.8677, "step": 7687 }, { "epoch": 0.83, "grad_norm": 1.6387884239421937, "learning_rate": 7.690939604785069e-07, "loss": 0.8573, "step": 7688 }, { "epoch": 0.83, "grad_norm": 1.745865446843562, "learning_rate": 7.681664072883915e-07, "loss": 0.8585, "step": 7689 }, { "epoch": 0.83, "grad_norm": 1.7443588843556945, "learning_rate": 7.672393672226902e-07, "loss": 0.888, "step": 7690 }, { "epoch": 0.83, "grad_norm": 1.7502192909681642, "learning_rate": 7.663128403938063e-07, "loss": 0.9636, "step": 7691 }, { "epoch": 0.83, "grad_norm": 1.6140759491160124, "learning_rate": 7.653868269140868e-07, "loss": 0.8691, "step": 7692 }, { "epoch": 0.83, "grad_norm": 0.7733010970245242, "learning_rate": 7.644613268958146e-07, "loss": 1.0444, "step": 7693 }, { "epoch": 0.83, "grad_norm": 1.631721996678535, "learning_rate": 7.635363404512069e-07, "loss": 0.8333, "step": 7694 }, { "epoch": 0.83, "grad_norm": 1.6930548948147035, "learning_rate": 7.626118676924238e-07, "loss": 0.9316, "step": 7695 }, { "epoch": 0.83, "grad_norm": 1.830490619158252, "learning_rate": 7.616879087315615e-07, "loss": 0.9333, "step": 7696 }, { "epoch": 0.83, "grad_norm": 0.8203816061363631, "learning_rate": 7.607644636806505e-07, "loss": 1.0275, "step": 7697 }, { "epoch": 0.83, "grad_norm": 1.7362689319455866, "learning_rate": 7.59841532651664e-07, "loss": 0.8702, "step": 7698 }, { "epoch": 0.83, "grad_norm": 1.8144170470625411, "learning_rate": 7.589191157565102e-07, "loss": 0.9276, "step": 7699 }, { "epoch": 0.83, "grad_norm": 0.7748218256434314, "learning_rate": 7.579972131070335e-07, "loss": 1.0423, "step": 7700 }, { "epoch": 0.83, "grad_norm": 1.8963666771912087, "learning_rate": 7.570758248150195e-07, "loss": 0.9085, "step": 7701 }, { "epoch": 0.83, "grad_norm": 1.704489755929583, "learning_rate": 7.561549509921901e-07, "loss": 0.9449, "step": 7702 }, { "epoch": 0.83, "grad_norm": 1.6917353817943295, "learning_rate": 7.552345917502025e-07, "loss": 1.0182, "step": 7703 }, { "epoch": 0.83, "grad_norm": 1.7534234077322564, "learning_rate": 7.543147472006545e-07, "loss": 0.9417, "step": 7704 }, { "epoch": 0.83, "grad_norm": 1.6608910408783957, "learning_rate": 7.533954174550817e-07, "loss": 0.9031, "step": 7705 }, { "epoch": 0.83, "grad_norm": 1.7831497124393656, "learning_rate": 7.524766026249535e-07, "loss": 0.9568, "step": 7706 }, { "epoch": 0.83, "grad_norm": 1.7903010117951579, "learning_rate": 7.515583028216811e-07, "loss": 0.8577, "step": 7707 }, { "epoch": 0.83, "grad_norm": 1.7881487043017859, "learning_rate": 7.506405181566123e-07, "loss": 0.9961, "step": 7708 }, { "epoch": 0.83, "grad_norm": 1.7429719072442948, "learning_rate": 7.4972324874103e-07, "loss": 0.8588, "step": 7709 }, { "epoch": 0.83, "grad_norm": 0.7839743603731708, "learning_rate": 7.488064946861556e-07, "loss": 1.0408, "step": 7710 }, { "epoch": 0.83, "grad_norm": 1.7404313576230663, "learning_rate": 7.478902561031504e-07, "loss": 0.8622, "step": 7711 }, { "epoch": 0.83, "grad_norm": 0.7938450924002184, "learning_rate": 7.469745331031114e-07, "loss": 1.031, "step": 7712 }, { "epoch": 0.83, "grad_norm": 1.7798506293184613, "learning_rate": 7.460593257970716e-07, "loss": 0.8807, "step": 7713 }, { "epoch": 0.83, "grad_norm": 1.7359288674529645, "learning_rate": 7.451446342960044e-07, "loss": 0.9059, "step": 7714 }, { "epoch": 0.83, "grad_norm": 0.7817997384596359, "learning_rate": 7.442304587108202e-07, "loss": 1.0652, "step": 7715 }, { "epoch": 0.83, "grad_norm": 1.8067934781463348, "learning_rate": 7.433167991523632e-07, "loss": 0.8564, "step": 7716 }, { "epoch": 0.83, "grad_norm": 1.6671905178644464, "learning_rate": 7.424036557314196e-07, "loss": 0.8003, "step": 7717 }, { "epoch": 0.83, "grad_norm": 1.6677455572054083, "learning_rate": 7.414910285587118e-07, "loss": 0.9658, "step": 7718 }, { "epoch": 0.83, "grad_norm": 2.392109792983048, "learning_rate": 7.405789177448986e-07, "loss": 0.8123, "step": 7719 }, { "epoch": 0.83, "grad_norm": 1.7120892186654093, "learning_rate": 7.396673234005741e-07, "loss": 0.8959, "step": 7720 }, { "epoch": 0.83, "grad_norm": 1.6724411719694086, "learning_rate": 7.387562456362762e-07, "loss": 0.8414, "step": 7721 }, { "epoch": 0.83, "grad_norm": 1.7387898802948645, "learning_rate": 7.378456845624726e-07, "loss": 0.8507, "step": 7722 }, { "epoch": 0.83, "grad_norm": 1.7744144790886418, "learning_rate": 7.369356402895739e-07, "loss": 0.896, "step": 7723 }, { "epoch": 0.83, "grad_norm": 1.7535744184587503, "learning_rate": 7.360261129279272e-07, "loss": 0.8594, "step": 7724 }, { "epoch": 0.83, "grad_norm": 1.660808961354507, "learning_rate": 7.351171025878129e-07, "loss": 0.7881, "step": 7725 }, { "epoch": 0.83, "grad_norm": 1.6583237907325992, "learning_rate": 7.342086093794532e-07, "loss": 0.89, "step": 7726 }, { "epoch": 0.83, "grad_norm": 1.7523281388918133, "learning_rate": 7.333006334130078e-07, "loss": 0.9534, "step": 7727 }, { "epoch": 0.83, "grad_norm": 1.7313692080126075, "learning_rate": 7.323931747985679e-07, "loss": 0.8686, "step": 7728 }, { "epoch": 0.83, "grad_norm": 1.7739313161210712, "learning_rate": 7.314862336461692e-07, "loss": 0.8238, "step": 7729 }, { "epoch": 0.83, "grad_norm": 1.6780867567610633, "learning_rate": 7.305798100657823e-07, "loss": 0.8913, "step": 7730 }, { "epoch": 0.83, "grad_norm": 1.7783599336694502, "learning_rate": 7.2967390416731e-07, "loss": 0.947, "step": 7731 }, { "epoch": 0.83, "grad_norm": 1.7563416570546309, "learning_rate": 7.287685160605995e-07, "loss": 0.9241, "step": 7732 }, { "epoch": 0.83, "grad_norm": 1.7506834869381207, "learning_rate": 7.278636458554322e-07, "loss": 0.8692, "step": 7733 }, { "epoch": 0.83, "grad_norm": 1.6902865974098378, "learning_rate": 7.269592936615249e-07, "loss": 0.8747, "step": 7734 }, { "epoch": 0.83, "grad_norm": 0.7772210872584498, "learning_rate": 7.260554595885349e-07, "loss": 1.0479, "step": 7735 }, { "epoch": 0.83, "grad_norm": 1.7561431222767219, "learning_rate": 7.251521437460562e-07, "loss": 0.8388, "step": 7736 }, { "epoch": 0.83, "grad_norm": 1.7239177936464762, "learning_rate": 7.242493462436162e-07, "loss": 0.8851, "step": 7737 }, { "epoch": 0.83, "grad_norm": 1.8262056957339523, "learning_rate": 7.233470671906845e-07, "loss": 0.8667, "step": 7738 }, { "epoch": 0.83, "grad_norm": 1.6841975692543876, "learning_rate": 7.224453066966652e-07, "loss": 0.8901, "step": 7739 }, { "epoch": 0.83, "grad_norm": 1.7915975259328318, "learning_rate": 7.215440648708983e-07, "loss": 0.8739, "step": 7740 }, { "epoch": 0.83, "grad_norm": 1.7362008657223553, "learning_rate": 7.206433418226649e-07, "loss": 0.8805, "step": 7741 }, { "epoch": 0.83, "grad_norm": 1.7398175663805546, "learning_rate": 7.197431376611785e-07, "loss": 0.8579, "step": 7742 }, { "epoch": 0.83, "grad_norm": 1.844613770077535, "learning_rate": 7.18843452495594e-07, "loss": 0.8581, "step": 7743 }, { "epoch": 0.83, "grad_norm": 1.6726879315024692, "learning_rate": 7.179442864349989e-07, "loss": 0.9121, "step": 7744 }, { "epoch": 0.83, "grad_norm": 1.7302548496239603, "learning_rate": 7.170456395884218e-07, "loss": 0.9347, "step": 7745 }, { "epoch": 0.83, "grad_norm": 1.7523101409781034, "learning_rate": 7.161475120648276e-07, "loss": 0.843, "step": 7746 }, { "epoch": 0.83, "grad_norm": 1.759028985512471, "learning_rate": 7.152499039731153e-07, "loss": 0.8395, "step": 7747 }, { "epoch": 0.83, "grad_norm": 2.134177032191232, "learning_rate": 7.143528154221236e-07, "loss": 0.8703, "step": 7748 }, { "epoch": 0.83, "grad_norm": 1.6894303673661402, "learning_rate": 7.13456246520629e-07, "loss": 0.8832, "step": 7749 }, { "epoch": 0.83, "grad_norm": 1.7143994988240272, "learning_rate": 7.125601973773416e-07, "loss": 0.9664, "step": 7750 }, { "epoch": 0.83, "grad_norm": 1.7557590162891064, "learning_rate": 7.116646681009121e-07, "loss": 0.9363, "step": 7751 }, { "epoch": 0.83, "grad_norm": 1.6484940923910598, "learning_rate": 7.107696587999264e-07, "loss": 0.9637, "step": 7752 }, { "epoch": 0.83, "grad_norm": 1.8807956121737608, "learning_rate": 7.098751695829048e-07, "loss": 0.8352, "step": 7753 }, { "epoch": 0.83, "grad_norm": 1.7150758448677024, "learning_rate": 7.089812005583097e-07, "loss": 0.8571, "step": 7754 }, { "epoch": 0.83, "grad_norm": 1.7293091944638441, "learning_rate": 7.080877518345386e-07, "loss": 0.888, "step": 7755 }, { "epoch": 0.83, "grad_norm": 1.8285704812578263, "learning_rate": 7.071948235199227e-07, "loss": 0.9282, "step": 7756 }, { "epoch": 0.83, "grad_norm": 1.6840902766428387, "learning_rate": 7.063024157227343e-07, "loss": 0.9191, "step": 7757 }, { "epoch": 0.83, "grad_norm": 0.8084632743774316, "learning_rate": 7.054105285511814e-07, "loss": 1.0619, "step": 7758 }, { "epoch": 0.83, "grad_norm": 1.767387931476611, "learning_rate": 7.045191621134067e-07, "loss": 0.9149, "step": 7759 }, { "epoch": 0.83, "grad_norm": 1.7672562075077562, "learning_rate": 7.036283165174923e-07, "loss": 0.8776, "step": 7760 }, { "epoch": 0.83, "grad_norm": 1.6368711205294566, "learning_rate": 7.027379918714578e-07, "loss": 0.8671, "step": 7761 }, { "epoch": 0.83, "grad_norm": 1.6896878887050955, "learning_rate": 7.018481882832562e-07, "loss": 0.963, "step": 7762 }, { "epoch": 0.83, "grad_norm": 1.6726730560215048, "learning_rate": 7.009589058607791e-07, "loss": 0.8739, "step": 7763 }, { "epoch": 0.83, "grad_norm": 1.5900148558234117, "learning_rate": 7.000701447118563e-07, "loss": 0.7669, "step": 7764 }, { "epoch": 0.83, "grad_norm": 1.7609548318208832, "learning_rate": 6.991819049442516e-07, "loss": 0.8467, "step": 7765 }, { "epoch": 0.83, "grad_norm": 1.6644074246235667, "learning_rate": 6.982941866656684e-07, "loss": 0.9442, "step": 7766 }, { "epoch": 0.83, "grad_norm": 1.673595463654634, "learning_rate": 6.974069899837465e-07, "loss": 0.8699, "step": 7767 }, { "epoch": 0.84, "grad_norm": 1.6805623622188068, "learning_rate": 6.96520315006059e-07, "loss": 0.9298, "step": 7768 }, { "epoch": 0.84, "grad_norm": 1.7508270345304147, "learning_rate": 6.956341618401197e-07, "loss": 0.9307, "step": 7769 }, { "epoch": 0.84, "grad_norm": 1.8030828713312694, "learning_rate": 6.947485305933787e-07, "loss": 0.9312, "step": 7770 }, { "epoch": 0.84, "grad_norm": 1.6886665874151885, "learning_rate": 6.938634213732198e-07, "loss": 0.8978, "step": 7771 }, { "epoch": 0.84, "grad_norm": 1.715262980328026, "learning_rate": 6.929788342869675e-07, "loss": 0.8559, "step": 7772 }, { "epoch": 0.84, "grad_norm": 1.6603698556265347, "learning_rate": 6.92094769441879e-07, "loss": 0.9629, "step": 7773 }, { "epoch": 0.84, "grad_norm": 0.798170876604677, "learning_rate": 6.912112269451526e-07, "loss": 1.0498, "step": 7774 }, { "epoch": 0.84, "grad_norm": 1.7423056205282816, "learning_rate": 6.903282069039185e-07, "loss": 0.9595, "step": 7775 }, { "epoch": 0.84, "grad_norm": 1.8531583556840845, "learning_rate": 6.894457094252471e-07, "loss": 0.8686, "step": 7776 }, { "epoch": 0.84, "grad_norm": 1.6618977155343508, "learning_rate": 6.88563734616145e-07, "loss": 0.8046, "step": 7777 }, { "epoch": 0.84, "grad_norm": 1.7107939615914105, "learning_rate": 6.876822825835528e-07, "loss": 0.9147, "step": 7778 }, { "epoch": 0.84, "grad_norm": 1.7003297204419012, "learning_rate": 6.868013534343509e-07, "loss": 0.8589, "step": 7779 }, { "epoch": 0.84, "grad_norm": 0.8023776461057955, "learning_rate": 6.859209472753558e-07, "loss": 1.0354, "step": 7780 }, { "epoch": 0.84, "grad_norm": 1.8435252959107828, "learning_rate": 6.850410642133176e-07, "loss": 0.819, "step": 7781 }, { "epoch": 0.84, "grad_norm": 1.7567202768305095, "learning_rate": 6.841617043549259e-07, "loss": 0.8827, "step": 7782 }, { "epoch": 0.84, "grad_norm": 1.5776943904280234, "learning_rate": 6.832828678068093e-07, "loss": 0.9547, "step": 7783 }, { "epoch": 0.84, "grad_norm": 1.5914041671451347, "learning_rate": 6.824045546755243e-07, "loss": 0.9087, "step": 7784 }, { "epoch": 0.84, "grad_norm": 0.7872685828415734, "learning_rate": 6.815267650675722e-07, "loss": 1.066, "step": 7785 }, { "epoch": 0.84, "grad_norm": 1.8242710110947193, "learning_rate": 6.806494990893892e-07, "loss": 0.8739, "step": 7786 }, { "epoch": 0.84, "grad_norm": 2.0383153332442028, "learning_rate": 6.797727568473444e-07, "loss": 0.9297, "step": 7787 }, { "epoch": 0.84, "grad_norm": 1.6980530067068784, "learning_rate": 6.788965384477464e-07, "loss": 0.8814, "step": 7788 }, { "epoch": 0.84, "grad_norm": 1.857856490852591, "learning_rate": 6.780208439968416e-07, "loss": 0.9448, "step": 7789 }, { "epoch": 0.84, "grad_norm": 0.7832387571216752, "learning_rate": 6.771456736008086e-07, "loss": 1.0819, "step": 7790 }, { "epoch": 0.84, "grad_norm": 1.7201232613596555, "learning_rate": 6.762710273657658e-07, "loss": 0.8644, "step": 7791 }, { "epoch": 0.84, "grad_norm": 0.7883479208114924, "learning_rate": 6.753969053977683e-07, "loss": 1.0676, "step": 7792 }, { "epoch": 0.84, "grad_norm": 1.817404287289521, "learning_rate": 6.745233078028041e-07, "loss": 0.911, "step": 7793 }, { "epoch": 0.84, "grad_norm": 1.8406251993547607, "learning_rate": 6.736502346868018e-07, "loss": 0.9847, "step": 7794 }, { "epoch": 0.84, "grad_norm": 1.7015104907799106, "learning_rate": 6.72777686155624e-07, "loss": 0.9438, "step": 7795 }, { "epoch": 0.84, "grad_norm": 1.8575108680595303, "learning_rate": 6.719056623150688e-07, "loss": 0.9116, "step": 7796 }, { "epoch": 0.84, "grad_norm": 1.6710221987603033, "learning_rate": 6.710341632708734e-07, "loss": 0.8989, "step": 7797 }, { "epoch": 0.84, "grad_norm": 1.6475211377818015, "learning_rate": 6.701631891287108e-07, "loss": 0.8826, "step": 7798 }, { "epoch": 0.84, "grad_norm": 1.8001884357317572, "learning_rate": 6.692927399941879e-07, "loss": 0.9042, "step": 7799 }, { "epoch": 0.84, "grad_norm": 1.7881848965239275, "learning_rate": 6.6842281597285e-07, "loss": 0.9855, "step": 7800 }, { "epoch": 0.84, "grad_norm": 1.7940411516796069, "learning_rate": 6.675534171701803e-07, "loss": 0.8819, "step": 7801 }, { "epoch": 0.84, "grad_norm": 1.8609950757713096, "learning_rate": 6.666845436915942e-07, "loss": 0.8528, "step": 7802 }, { "epoch": 0.84, "grad_norm": 1.7857443241832727, "learning_rate": 6.658161956424458e-07, "loss": 0.9352, "step": 7803 }, { "epoch": 0.84, "grad_norm": 1.7723532933983268, "learning_rate": 6.64948373128027e-07, "loss": 0.9089, "step": 7804 }, { "epoch": 0.84, "grad_norm": 1.739182346768685, "learning_rate": 6.640810762535637e-07, "loss": 0.9433, "step": 7805 }, { "epoch": 0.84, "grad_norm": 1.7099544049750004, "learning_rate": 6.632143051242168e-07, "loss": 0.8763, "step": 7806 }, { "epoch": 0.84, "grad_norm": 1.7362308400147566, "learning_rate": 6.623480598450865e-07, "loss": 0.8856, "step": 7807 }, { "epoch": 0.84, "grad_norm": 1.638090381610639, "learning_rate": 6.61482340521209e-07, "loss": 0.8458, "step": 7808 }, { "epoch": 0.84, "grad_norm": 1.7894078014379493, "learning_rate": 6.606171472575539e-07, "loss": 0.919, "step": 7809 }, { "epoch": 0.84, "grad_norm": 1.6562597616812074, "learning_rate": 6.597524801590299e-07, "loss": 0.8493, "step": 7810 }, { "epoch": 0.84, "grad_norm": 1.7839685668426282, "learning_rate": 6.588883393304818e-07, "loss": 0.997, "step": 7811 }, { "epoch": 0.84, "grad_norm": 1.7414000036486947, "learning_rate": 6.580247248766875e-07, "loss": 0.8344, "step": 7812 }, { "epoch": 0.84, "grad_norm": 1.69601914283341, "learning_rate": 6.571616369023642e-07, "loss": 0.862, "step": 7813 }, { "epoch": 0.84, "grad_norm": 1.697070390605251, "learning_rate": 6.562990755121662e-07, "loss": 0.9495, "step": 7814 }, { "epoch": 0.84, "grad_norm": 1.6145975173619047, "learning_rate": 6.554370408106797e-07, "loss": 0.7864, "step": 7815 }, { "epoch": 0.84, "grad_norm": 1.7424470108045005, "learning_rate": 6.545755329024295e-07, "loss": 0.8309, "step": 7816 }, { "epoch": 0.84, "grad_norm": 1.8234251170996645, "learning_rate": 6.537145518918775e-07, "loss": 0.9436, "step": 7817 }, { "epoch": 0.84, "grad_norm": 0.7701724382962414, "learning_rate": 6.528540978834191e-07, "loss": 1.0135, "step": 7818 }, { "epoch": 0.84, "grad_norm": 1.7290914003082234, "learning_rate": 6.519941709813881e-07, "loss": 0.8811, "step": 7819 }, { "epoch": 0.84, "grad_norm": 1.6961596717780516, "learning_rate": 6.511347712900545e-07, "loss": 0.8553, "step": 7820 }, { "epoch": 0.84, "grad_norm": 1.8626118287178162, "learning_rate": 6.502758989136221e-07, "loss": 0.8108, "step": 7821 }, { "epoch": 0.84, "grad_norm": 0.8126082532986698, "learning_rate": 6.494175539562326e-07, "loss": 1.0517, "step": 7822 }, { "epoch": 0.84, "grad_norm": 1.7187137452564634, "learning_rate": 6.485597365219648e-07, "loss": 0.9059, "step": 7823 }, { "epoch": 0.84, "grad_norm": 1.7864708112473484, "learning_rate": 6.47702446714829e-07, "loss": 0.866, "step": 7824 }, { "epoch": 0.84, "grad_norm": 1.682651782942201, "learning_rate": 6.468456846387777e-07, "loss": 0.8549, "step": 7825 }, { "epoch": 0.84, "grad_norm": 0.7710435878216572, "learning_rate": 6.459894503976943e-07, "loss": 1.0419, "step": 7826 }, { "epoch": 0.84, "grad_norm": 1.7307086018090372, "learning_rate": 6.451337440953997e-07, "loss": 0.8997, "step": 7827 }, { "epoch": 0.84, "grad_norm": 0.8047019490359074, "learning_rate": 6.442785658356526e-07, "loss": 1.0562, "step": 7828 }, { "epoch": 0.84, "grad_norm": 1.75444673267608, "learning_rate": 6.434239157221467e-07, "loss": 0.9085, "step": 7829 }, { "epoch": 0.84, "grad_norm": 1.6914596953714456, "learning_rate": 6.425697938585091e-07, "loss": 0.8214, "step": 7830 }, { "epoch": 0.84, "grad_norm": 1.7377406989013107, "learning_rate": 6.417162003483062e-07, "loss": 0.9251, "step": 7831 }, { "epoch": 0.84, "grad_norm": 1.6702037485460557, "learning_rate": 6.408631352950406e-07, "loss": 0.8693, "step": 7832 }, { "epoch": 0.84, "grad_norm": 1.7169569390535613, "learning_rate": 6.400105988021471e-07, "loss": 0.8876, "step": 7833 }, { "epoch": 0.84, "grad_norm": 1.7723681688272388, "learning_rate": 6.391585909729998e-07, "loss": 0.8933, "step": 7834 }, { "epoch": 0.84, "grad_norm": 1.8281165037805127, "learning_rate": 6.38307111910908e-07, "loss": 0.8551, "step": 7835 }, { "epoch": 0.84, "grad_norm": 1.7369107429891015, "learning_rate": 6.374561617191166e-07, "loss": 0.975, "step": 7836 }, { "epoch": 0.84, "grad_norm": 1.7288818712559797, "learning_rate": 6.366057405008041e-07, "loss": 0.8239, "step": 7837 }, { "epoch": 0.84, "grad_norm": 1.7348096738844598, "learning_rate": 6.357558483590887e-07, "loss": 0.8116, "step": 7838 }, { "epoch": 0.84, "grad_norm": 1.7610652853270758, "learning_rate": 6.349064853970233e-07, "loss": 0.8702, "step": 7839 }, { "epoch": 0.84, "grad_norm": 1.706565673429808, "learning_rate": 6.340576517175939e-07, "loss": 0.8371, "step": 7840 }, { "epoch": 0.84, "grad_norm": 1.8033589397598428, "learning_rate": 6.332093474237266e-07, "loss": 0.945, "step": 7841 }, { "epoch": 0.84, "grad_norm": 0.8054077753994097, "learning_rate": 6.323615726182813e-07, "loss": 1.0775, "step": 7842 }, { "epoch": 0.84, "grad_norm": 1.6278831953214516, "learning_rate": 6.31514327404052e-07, "loss": 0.8906, "step": 7843 }, { "epoch": 0.84, "grad_norm": 1.711354896686527, "learning_rate": 6.306676118837707e-07, "loss": 0.8573, "step": 7844 }, { "epoch": 0.84, "grad_norm": 1.64182038273929, "learning_rate": 6.29821426160106e-07, "loss": 0.9186, "step": 7845 }, { "epoch": 0.84, "grad_norm": 1.820714873483598, "learning_rate": 6.289757703356597e-07, "loss": 0.8337, "step": 7846 }, { "epoch": 0.84, "grad_norm": 1.8819564530819983, "learning_rate": 6.281306445129698e-07, "loss": 0.9314, "step": 7847 }, { "epoch": 0.84, "grad_norm": 1.6144000210893918, "learning_rate": 6.27286048794512e-07, "loss": 0.8603, "step": 7848 }, { "epoch": 0.84, "grad_norm": 1.7762129548058245, "learning_rate": 6.264419832826946e-07, "loss": 0.9058, "step": 7849 }, { "epoch": 0.84, "grad_norm": 1.74795955103121, "learning_rate": 6.255984480798649e-07, "loss": 0.9265, "step": 7850 }, { "epoch": 0.84, "grad_norm": 1.7602707935884465, "learning_rate": 6.247554432883052e-07, "loss": 0.8588, "step": 7851 }, { "epoch": 0.84, "grad_norm": 1.7984519460961446, "learning_rate": 6.239129690102308e-07, "loss": 0.8724, "step": 7852 }, { "epoch": 0.84, "grad_norm": 1.7326844208148713, "learning_rate": 6.230710253477956e-07, "loss": 0.9146, "step": 7853 }, { "epoch": 0.84, "grad_norm": 1.6572665232377881, "learning_rate": 6.222296124030891e-07, "loss": 0.8222, "step": 7854 }, { "epoch": 0.84, "grad_norm": 1.736445141453136, "learning_rate": 6.213887302781335e-07, "loss": 0.8834, "step": 7855 }, { "epoch": 0.84, "grad_norm": 1.9065246129167421, "learning_rate": 6.205483790748901e-07, "loss": 0.9383, "step": 7856 }, { "epoch": 0.84, "grad_norm": 1.7246369509476258, "learning_rate": 6.197085588952556e-07, "loss": 0.9117, "step": 7857 }, { "epoch": 0.84, "grad_norm": 1.6033146934687563, "learning_rate": 6.188692698410576e-07, "loss": 0.8369, "step": 7858 }, { "epoch": 0.84, "grad_norm": 1.7719305246759598, "learning_rate": 6.180305120140645e-07, "loss": 0.812, "step": 7859 }, { "epoch": 0.84, "grad_norm": 2.0768196385484505, "learning_rate": 6.171922855159801e-07, "loss": 0.858, "step": 7860 }, { "epoch": 0.85, "grad_norm": 1.7635306030902915, "learning_rate": 6.163545904484397e-07, "loss": 0.939, "step": 7861 }, { "epoch": 0.85, "grad_norm": 1.6984876584930622, "learning_rate": 6.155174269130182e-07, "loss": 0.9281, "step": 7862 }, { "epoch": 0.85, "grad_norm": 1.8909000323797545, "learning_rate": 6.146807950112249e-07, "loss": 0.9912, "step": 7863 }, { "epoch": 0.85, "grad_norm": 1.801336717277184, "learning_rate": 6.138446948445032e-07, "loss": 0.934, "step": 7864 }, { "epoch": 0.85, "grad_norm": 1.8630002506827297, "learning_rate": 6.130091265142329e-07, "loss": 0.9332, "step": 7865 }, { "epoch": 0.85, "grad_norm": 1.9374952670812275, "learning_rate": 6.121740901217322e-07, "loss": 0.9203, "step": 7866 }, { "epoch": 0.85, "grad_norm": 1.6589477516210713, "learning_rate": 6.113395857682486e-07, "loss": 0.8926, "step": 7867 }, { "epoch": 0.85, "grad_norm": 1.7763166137758148, "learning_rate": 6.105056135549714e-07, "loss": 0.8664, "step": 7868 }, { "epoch": 0.85, "grad_norm": 1.7861660980520329, "learning_rate": 6.096721735830202e-07, "loss": 0.8506, "step": 7869 }, { "epoch": 0.85, "grad_norm": 1.7013882394706052, "learning_rate": 6.088392659534547e-07, "loss": 0.9368, "step": 7870 }, { "epoch": 0.85, "grad_norm": 1.743811797414318, "learning_rate": 6.080068907672659e-07, "loss": 1.0021, "step": 7871 }, { "epoch": 0.85, "grad_norm": 1.6497088447310508, "learning_rate": 6.071750481253835e-07, "loss": 0.8624, "step": 7872 }, { "epoch": 0.85, "grad_norm": 1.7651581393806608, "learning_rate": 6.063437381286719e-07, "loss": 0.905, "step": 7873 }, { "epoch": 0.85, "grad_norm": 1.768843292301885, "learning_rate": 6.055129608779276e-07, "loss": 0.9466, "step": 7874 }, { "epoch": 0.85, "grad_norm": 1.6552891612375364, "learning_rate": 6.046827164738872e-07, "loss": 0.8988, "step": 7875 }, { "epoch": 0.85, "grad_norm": 2.0688674203397714, "learning_rate": 6.03853005017222e-07, "loss": 0.944, "step": 7876 }, { "epoch": 0.85, "grad_norm": 1.651855956280592, "learning_rate": 6.030238266085342e-07, "loss": 0.949, "step": 7877 }, { "epoch": 0.85, "grad_norm": 0.7843709313063025, "learning_rate": 6.021951813483673e-07, "loss": 1.0395, "step": 7878 }, { "epoch": 0.85, "grad_norm": 1.7636916448887057, "learning_rate": 6.013670693371959e-07, "loss": 0.8859, "step": 7879 }, { "epoch": 0.85, "grad_norm": 1.6500553374974933, "learning_rate": 6.005394906754309e-07, "loss": 0.8215, "step": 7880 }, { "epoch": 0.85, "grad_norm": 1.6994022327112646, "learning_rate": 5.997124454634201e-07, "loss": 0.8624, "step": 7881 }, { "epoch": 0.85, "grad_norm": 1.7112742039458833, "learning_rate": 5.988859338014457e-07, "loss": 0.9346, "step": 7882 }, { "epoch": 0.85, "grad_norm": 1.7179344729269632, "learning_rate": 5.980599557897243e-07, "loss": 0.9236, "step": 7883 }, { "epoch": 0.85, "grad_norm": 1.6388942901543608, "learning_rate": 5.972345115284094e-07, "loss": 0.8503, "step": 7884 }, { "epoch": 0.85, "grad_norm": 1.713928739915737, "learning_rate": 5.96409601117589e-07, "loss": 0.8694, "step": 7885 }, { "epoch": 0.85, "grad_norm": 1.7129968476476136, "learning_rate": 5.955852246572852e-07, "loss": 0.8432, "step": 7886 }, { "epoch": 0.85, "grad_norm": 1.7667010812511776, "learning_rate": 5.947613822474574e-07, "loss": 0.8318, "step": 7887 }, { "epoch": 0.85, "grad_norm": 1.865419860719586, "learning_rate": 5.939380739880002e-07, "loss": 0.8955, "step": 7888 }, { "epoch": 0.85, "grad_norm": 1.6611657557032762, "learning_rate": 5.93115299978741e-07, "loss": 0.8858, "step": 7889 }, { "epoch": 0.85, "grad_norm": 1.8856513144596931, "learning_rate": 5.922930603194438e-07, "loss": 0.9238, "step": 7890 }, { "epoch": 0.85, "grad_norm": 1.7093377542438546, "learning_rate": 5.9147135510981e-07, "loss": 0.8986, "step": 7891 }, { "epoch": 0.85, "grad_norm": 1.766929537984134, "learning_rate": 5.906501844494716e-07, "loss": 0.851, "step": 7892 }, { "epoch": 0.85, "grad_norm": 0.7746675384328152, "learning_rate": 5.898295484379996e-07, "loss": 0.9955, "step": 7893 }, { "epoch": 0.85, "grad_norm": 1.646247497935098, "learning_rate": 5.890094471749003e-07, "loss": 0.8565, "step": 7894 }, { "epoch": 0.85, "grad_norm": 1.7517502990098466, "learning_rate": 5.881898807596115e-07, "loss": 0.8728, "step": 7895 }, { "epoch": 0.85, "grad_norm": 1.8189811847612463, "learning_rate": 5.873708492915093e-07, "loss": 0.8667, "step": 7896 }, { "epoch": 0.85, "grad_norm": 1.7768415306282674, "learning_rate": 5.865523528699051e-07, "loss": 0.9572, "step": 7897 }, { "epoch": 0.85, "grad_norm": 1.697541629888557, "learning_rate": 5.857343915940434e-07, "loss": 0.9526, "step": 7898 }, { "epoch": 0.85, "grad_norm": 1.8067957386047127, "learning_rate": 5.849169655631054e-07, "loss": 0.9177, "step": 7899 }, { "epoch": 0.85, "grad_norm": 1.6057550101116984, "learning_rate": 5.841000748762055e-07, "loss": 0.8896, "step": 7900 }, { "epoch": 0.85, "grad_norm": 1.6345049469928137, "learning_rate": 5.832837196323965e-07, "loss": 0.8056, "step": 7901 }, { "epoch": 0.85, "grad_norm": 1.7121396696200009, "learning_rate": 5.824678999306622e-07, "loss": 0.9083, "step": 7902 }, { "epoch": 0.85, "grad_norm": 1.7499764787668752, "learning_rate": 5.816526158699243e-07, "loss": 0.949, "step": 7903 }, { "epoch": 0.85, "grad_norm": 1.802574852045122, "learning_rate": 5.8083786754904e-07, "loss": 0.8465, "step": 7904 }, { "epoch": 0.85, "grad_norm": 1.7240040921421307, "learning_rate": 5.800236550667992e-07, "loss": 0.8745, "step": 7905 }, { "epoch": 0.85, "grad_norm": 1.7497590017391875, "learning_rate": 5.792099785219274e-07, "loss": 0.8233, "step": 7906 }, { "epoch": 0.85, "grad_norm": 1.7341430918906509, "learning_rate": 5.783968380130877e-07, "loss": 0.8803, "step": 7907 }, { "epoch": 0.85, "grad_norm": 1.6755727790916526, "learning_rate": 5.775842336388742e-07, "loss": 0.8515, "step": 7908 }, { "epoch": 0.85, "grad_norm": 1.6778542515605335, "learning_rate": 5.767721654978187e-07, "loss": 0.8473, "step": 7909 }, { "epoch": 0.85, "grad_norm": 1.6475146757503896, "learning_rate": 5.759606336883894e-07, "loss": 0.96, "step": 7910 }, { "epoch": 0.85, "grad_norm": 1.8076493170651007, "learning_rate": 5.751496383089833e-07, "loss": 0.8496, "step": 7911 }, { "epoch": 0.85, "grad_norm": 1.8327267749987748, "learning_rate": 5.743391794579379e-07, "loss": 0.9199, "step": 7912 }, { "epoch": 0.85, "grad_norm": 1.9226681358014712, "learning_rate": 5.735292572335261e-07, "loss": 0.9428, "step": 7913 }, { "epoch": 0.85, "grad_norm": 1.7628440127150906, "learning_rate": 5.727198717339511e-07, "loss": 0.9225, "step": 7914 }, { "epoch": 0.85, "grad_norm": 1.6505130252161535, "learning_rate": 5.719110230573543e-07, "loss": 0.8363, "step": 7915 }, { "epoch": 0.85, "grad_norm": 1.7350253381262497, "learning_rate": 5.711027113018136e-07, "loss": 0.8995, "step": 7916 }, { "epoch": 0.85, "grad_norm": 1.783277244904883, "learning_rate": 5.702949365653365e-07, "loss": 0.8799, "step": 7917 }, { "epoch": 0.85, "grad_norm": 0.7757796319785997, "learning_rate": 5.694876989458698e-07, "loss": 1.01, "step": 7918 }, { "epoch": 0.85, "grad_norm": 1.8000712806215133, "learning_rate": 5.68680998541295e-07, "loss": 0.876, "step": 7919 }, { "epoch": 0.85, "grad_norm": 1.7389306840694156, "learning_rate": 5.678748354494251e-07, "loss": 0.825, "step": 7920 }, { "epoch": 0.85, "grad_norm": 1.5345556694922722, "learning_rate": 5.670692097680124e-07, "loss": 0.8793, "step": 7921 }, { "epoch": 0.85, "grad_norm": 0.8091850847721855, "learning_rate": 5.662641215947406e-07, "loss": 1.036, "step": 7922 }, { "epoch": 0.85, "grad_norm": 1.7520169895629933, "learning_rate": 5.654595710272287e-07, "loss": 0.8793, "step": 7923 }, { "epoch": 0.85, "grad_norm": 1.753883343669283, "learning_rate": 5.646555581630319e-07, "loss": 0.8531, "step": 7924 }, { "epoch": 0.85, "grad_norm": 0.7719799634592018, "learning_rate": 5.638520830996402e-07, "loss": 1.026, "step": 7925 }, { "epoch": 0.85, "grad_norm": 1.688578778612078, "learning_rate": 5.630491459344766e-07, "loss": 0.7993, "step": 7926 }, { "epoch": 0.85, "grad_norm": 1.7145116085454655, "learning_rate": 5.622467467649007e-07, "loss": 0.8536, "step": 7927 }, { "epoch": 0.85, "grad_norm": 1.7087940050712067, "learning_rate": 5.614448856882066e-07, "loss": 0.8414, "step": 7928 }, { "epoch": 0.85, "grad_norm": 1.6713232539888867, "learning_rate": 5.60643562801621e-07, "loss": 0.9017, "step": 7929 }, { "epoch": 0.85, "grad_norm": 1.7137163936424917, "learning_rate": 5.598427782023086e-07, "loss": 0.8653, "step": 7930 }, { "epoch": 0.85, "grad_norm": 1.8587799743530178, "learning_rate": 5.590425319873683e-07, "loss": 0.9692, "step": 7931 }, { "epoch": 0.85, "grad_norm": 1.9794648227149074, "learning_rate": 5.582428242538307e-07, "loss": 0.8679, "step": 7932 }, { "epoch": 0.85, "grad_norm": 1.7824274913346327, "learning_rate": 5.574436550986628e-07, "loss": 0.9178, "step": 7933 }, { "epoch": 0.85, "grad_norm": 1.7770633777902116, "learning_rate": 5.566450246187672e-07, "loss": 0.9244, "step": 7934 }, { "epoch": 0.85, "grad_norm": 1.804258750381648, "learning_rate": 5.558469329109823e-07, "loss": 0.8725, "step": 7935 }, { "epoch": 0.85, "grad_norm": 1.7220075284791883, "learning_rate": 5.550493800720774e-07, "loss": 0.9412, "step": 7936 }, { "epoch": 0.85, "grad_norm": 0.7733129929533094, "learning_rate": 5.542523661987586e-07, "loss": 1.0002, "step": 7937 }, { "epoch": 0.85, "grad_norm": 0.7983730296490733, "learning_rate": 5.534558913876686e-07, "loss": 1.002, "step": 7938 }, { "epoch": 0.85, "grad_norm": 0.8083137484727841, "learning_rate": 5.526599557353807e-07, "loss": 1.0522, "step": 7939 }, { "epoch": 0.85, "grad_norm": 1.7854058358040377, "learning_rate": 5.518645593384047e-07, "loss": 0.8752, "step": 7940 }, { "epoch": 0.85, "grad_norm": 1.7636924112774797, "learning_rate": 5.510697022931876e-07, "loss": 1.0185, "step": 7941 }, { "epoch": 0.85, "grad_norm": 1.7782722237893485, "learning_rate": 5.502753846961067e-07, "loss": 0.8864, "step": 7942 }, { "epoch": 0.85, "grad_norm": 0.7598582535354647, "learning_rate": 5.494816066434749e-07, "loss": 1.0607, "step": 7943 }, { "epoch": 0.85, "grad_norm": 1.7367699302688586, "learning_rate": 5.486883682315425e-07, "loss": 0.933, "step": 7944 }, { "epoch": 0.85, "grad_norm": 1.6583462118643466, "learning_rate": 5.478956695564902e-07, "loss": 0.8613, "step": 7945 }, { "epoch": 0.85, "grad_norm": 1.7753376634860525, "learning_rate": 5.471035107144374e-07, "loss": 0.9057, "step": 7946 }, { "epoch": 0.85, "grad_norm": 1.7955977466345596, "learning_rate": 5.46311891801436e-07, "loss": 0.8905, "step": 7947 }, { "epoch": 0.85, "grad_norm": 1.7435345691058008, "learning_rate": 5.455208129134704e-07, "loss": 0.8429, "step": 7948 }, { "epoch": 0.85, "grad_norm": 1.838567215552812, "learning_rate": 5.447302741464638e-07, "loss": 0.9159, "step": 7949 }, { "epoch": 0.85, "grad_norm": 1.749712080414966, "learning_rate": 5.439402755962719e-07, "loss": 0.8869, "step": 7950 }, { "epoch": 0.85, "grad_norm": 1.6114560141957042, "learning_rate": 5.43150817358683e-07, "loss": 0.8519, "step": 7951 }, { "epoch": 0.85, "grad_norm": 1.6649242820896324, "learning_rate": 5.423618995294239e-07, "loss": 0.8999, "step": 7952 }, { "epoch": 0.85, "grad_norm": 1.8130109141590365, "learning_rate": 5.415735222041518e-07, "loss": 0.8223, "step": 7953 }, { "epoch": 0.86, "grad_norm": 1.8010902159043953, "learning_rate": 5.407856854784599e-07, "loss": 0.957, "step": 7954 }, { "epoch": 0.86, "grad_norm": 1.6867213101529297, "learning_rate": 5.399983894478772e-07, "loss": 0.8573, "step": 7955 }, { "epoch": 0.86, "grad_norm": 1.846847216205357, "learning_rate": 5.392116342078662e-07, "loss": 0.8121, "step": 7956 }, { "epoch": 0.86, "grad_norm": 1.7286034379292898, "learning_rate": 5.384254198538224e-07, "loss": 0.869, "step": 7957 }, { "epoch": 0.86, "grad_norm": 1.6636311259950418, "learning_rate": 5.376397464810779e-07, "loss": 0.82, "step": 7958 }, { "epoch": 0.86, "grad_norm": 1.8122530121909868, "learning_rate": 5.36854614184899e-07, "loss": 0.9347, "step": 7959 }, { "epoch": 0.86, "grad_norm": 1.804172766116558, "learning_rate": 5.360700230604842e-07, "loss": 0.9893, "step": 7960 }, { "epoch": 0.86, "grad_norm": 1.6640679028720096, "learning_rate": 5.352859732029686e-07, "loss": 0.8365, "step": 7961 }, { "epoch": 0.86, "grad_norm": 1.7716065276171995, "learning_rate": 5.345024647074221e-07, "loss": 0.9081, "step": 7962 }, { "epoch": 0.86, "grad_norm": 1.7670520956670592, "learning_rate": 5.337194976688464e-07, "loss": 0.9582, "step": 7963 }, { "epoch": 0.86, "grad_norm": 1.599721672768864, "learning_rate": 5.329370721821781e-07, "loss": 0.8467, "step": 7964 }, { "epoch": 0.86, "grad_norm": 1.7575973758919392, "learning_rate": 5.321551883422904e-07, "loss": 0.9027, "step": 7965 }, { "epoch": 0.86, "grad_norm": 1.670566651606589, "learning_rate": 5.3137384624399e-07, "loss": 0.8479, "step": 7966 }, { "epoch": 0.86, "grad_norm": 1.6620611459241823, "learning_rate": 5.305930459820158e-07, "loss": 0.8464, "step": 7967 }, { "epoch": 0.86, "grad_norm": 1.8138824929006832, "learning_rate": 5.298127876510429e-07, "loss": 0.8844, "step": 7968 }, { "epoch": 0.86, "grad_norm": 1.7538607543266584, "learning_rate": 5.29033071345681e-07, "loss": 0.9612, "step": 7969 }, { "epoch": 0.86, "grad_norm": 1.7459935380197018, "learning_rate": 5.282538971604729e-07, "loss": 0.9495, "step": 7970 }, { "epoch": 0.86, "grad_norm": 1.7253545031312323, "learning_rate": 5.274752651898957e-07, "loss": 0.8859, "step": 7971 }, { "epoch": 0.86, "grad_norm": 1.7689301395781445, "learning_rate": 5.266971755283628e-07, "loss": 0.9595, "step": 7972 }, { "epoch": 0.86, "grad_norm": 1.6964134773527824, "learning_rate": 5.259196282702178e-07, "loss": 0.8575, "step": 7973 }, { "epoch": 0.86, "grad_norm": 1.6822730003470077, "learning_rate": 5.25142623509744e-07, "loss": 0.8421, "step": 7974 }, { "epoch": 0.86, "grad_norm": 1.6508261834536417, "learning_rate": 5.243661613411544e-07, "loss": 0.8738, "step": 7975 }, { "epoch": 0.86, "grad_norm": 1.8144014282122483, "learning_rate": 5.235902418585958e-07, "loss": 0.8259, "step": 7976 }, { "epoch": 0.86, "grad_norm": 1.7920097811792606, "learning_rate": 5.228148651561537e-07, "loss": 0.8071, "step": 7977 }, { "epoch": 0.86, "grad_norm": 1.670264366394135, "learning_rate": 5.22040031327845e-07, "loss": 0.8672, "step": 7978 }, { "epoch": 0.86, "grad_norm": 1.7020123838195784, "learning_rate": 5.212657404676192e-07, "loss": 0.9179, "step": 7979 }, { "epoch": 0.86, "grad_norm": 1.7464520726371668, "learning_rate": 5.204919926693635e-07, "loss": 0.9002, "step": 7980 }, { "epoch": 0.86, "grad_norm": 1.708216445308849, "learning_rate": 5.197187880268973e-07, "loss": 0.9451, "step": 7981 }, { "epoch": 0.86, "grad_norm": 1.7912226423136715, "learning_rate": 5.189461266339735e-07, "loss": 0.8775, "step": 7982 }, { "epoch": 0.86, "grad_norm": 0.7927089787515248, "learning_rate": 5.181740085842795e-07, "loss": 1.0352, "step": 7983 }, { "epoch": 0.86, "grad_norm": 1.5725626564890451, "learning_rate": 5.174024339714401e-07, "loss": 0.9518, "step": 7984 }, { "epoch": 0.86, "grad_norm": 1.7957600221265526, "learning_rate": 5.166314028890091e-07, "loss": 0.9612, "step": 7985 }, { "epoch": 0.86, "grad_norm": 1.6649172717144423, "learning_rate": 5.158609154304761e-07, "loss": 0.7555, "step": 7986 }, { "epoch": 0.86, "grad_norm": 1.6134719274800247, "learning_rate": 5.150909716892671e-07, "loss": 0.855, "step": 7987 }, { "epoch": 0.86, "grad_norm": 1.5990768105394935, "learning_rate": 5.143215717587385e-07, "loss": 0.8743, "step": 7988 }, { "epoch": 0.86, "grad_norm": 1.7064954520102924, "learning_rate": 5.13552715732184e-07, "loss": 0.8455, "step": 7989 }, { "epoch": 0.86, "grad_norm": 1.7383807833040477, "learning_rate": 5.127844037028313e-07, "loss": 0.8494, "step": 7990 }, { "epoch": 0.86, "grad_norm": 1.6323407361198568, "learning_rate": 5.120166357638378e-07, "loss": 0.8649, "step": 7991 }, { "epoch": 0.86, "grad_norm": 1.741055547598097, "learning_rate": 5.112494120083e-07, "loss": 0.9117, "step": 7992 }, { "epoch": 0.86, "grad_norm": 1.7083230821739126, "learning_rate": 5.104827325292466e-07, "loss": 0.8849, "step": 7993 }, { "epoch": 0.86, "grad_norm": 1.7497514562162197, "learning_rate": 5.097165974196389e-07, "loss": 0.8295, "step": 7994 }, { "epoch": 0.86, "grad_norm": 0.7816401543559374, "learning_rate": 5.089510067723752e-07, "loss": 1.0412, "step": 7995 }, { "epoch": 0.86, "grad_norm": 1.7130542851389865, "learning_rate": 5.081859606802841e-07, "loss": 0.8306, "step": 7996 }, { "epoch": 0.86, "grad_norm": 1.585374111772938, "learning_rate": 5.074214592361315e-07, "loss": 0.8809, "step": 7997 }, { "epoch": 0.86, "grad_norm": 1.704889214866008, "learning_rate": 5.066575025326148e-07, "loss": 1.0048, "step": 7998 }, { "epoch": 0.86, "grad_norm": 1.688307570904051, "learning_rate": 5.05894090662366e-07, "loss": 0.8727, "step": 7999 }, { "epoch": 0.86, "grad_norm": 0.7910509254765243, "learning_rate": 5.051312237179539e-07, "loss": 1.0374, "step": 8000 }, { "epoch": 0.86, "grad_norm": 1.8153528947878643, "learning_rate": 5.043689017918757e-07, "loss": 0.9174, "step": 8001 }, { "epoch": 0.86, "grad_norm": 1.816183859868071, "learning_rate": 5.036071249765673e-07, "loss": 0.8121, "step": 8002 }, { "epoch": 0.86, "grad_norm": 1.7348330039927427, "learning_rate": 5.028458933643976e-07, "loss": 0.9448, "step": 8003 }, { "epoch": 0.86, "grad_norm": 1.70606789961362, "learning_rate": 5.020852070476657e-07, "loss": 0.8248, "step": 8004 }, { "epoch": 0.86, "grad_norm": 1.777563833275154, "learning_rate": 5.013250661186103e-07, "loss": 0.8236, "step": 8005 }, { "epoch": 0.86, "grad_norm": 1.7055021383175055, "learning_rate": 5.005654706694002e-07, "loss": 0.9108, "step": 8006 }, { "epoch": 0.86, "grad_norm": 1.719270678615359, "learning_rate": 4.998064207921377e-07, "loss": 0.8781, "step": 8007 }, { "epoch": 0.86, "grad_norm": 1.7760079722646493, "learning_rate": 4.990479165788609e-07, "loss": 0.9381, "step": 8008 }, { "epoch": 0.86, "grad_norm": 1.8193308033775668, "learning_rate": 4.982899581215428e-07, "loss": 0.9143, "step": 8009 }, { "epoch": 0.86, "grad_norm": 1.7383998627765311, "learning_rate": 4.975325455120855e-07, "loss": 0.8934, "step": 8010 }, { "epoch": 0.86, "grad_norm": 1.729471515307609, "learning_rate": 4.9677567884233e-07, "loss": 0.9249, "step": 8011 }, { "epoch": 0.86, "grad_norm": 1.8763879913062969, "learning_rate": 4.96019358204049e-07, "loss": 0.822, "step": 8012 }, { "epoch": 0.86, "grad_norm": 1.6426464075305853, "learning_rate": 4.952635836889475e-07, "loss": 0.8629, "step": 8013 }, { "epoch": 0.86, "grad_norm": 1.6671705124592293, "learning_rate": 4.94508355388667e-07, "loss": 0.8708, "step": 8014 }, { "epoch": 0.86, "grad_norm": 1.6947551754001913, "learning_rate": 4.937536733947817e-07, "loss": 0.9263, "step": 8015 }, { "epoch": 0.86, "grad_norm": 1.7346097204123276, "learning_rate": 4.92999537798799e-07, "loss": 0.8393, "step": 8016 }, { "epoch": 0.86, "grad_norm": 1.8675047846233566, "learning_rate": 4.922459486921594e-07, "loss": 0.8126, "step": 8017 }, { "epoch": 0.86, "grad_norm": 1.7063534299477443, "learning_rate": 4.914929061662399e-07, "loss": 0.8365, "step": 8018 }, { "epoch": 0.86, "grad_norm": 0.8048899249347088, "learning_rate": 4.907404103123481e-07, "loss": 1.0782, "step": 8019 }, { "epoch": 0.86, "grad_norm": 1.7250709764390129, "learning_rate": 4.89988461221727e-07, "loss": 0.82, "step": 8020 }, { "epoch": 0.86, "grad_norm": 1.776156454801782, "learning_rate": 4.89237058985555e-07, "loss": 0.9719, "step": 8021 }, { "epoch": 0.86, "grad_norm": 1.6358633380519503, "learning_rate": 4.88486203694939e-07, "loss": 0.8769, "step": 8022 }, { "epoch": 0.86, "grad_norm": 1.6784351050334996, "learning_rate": 4.877358954409245e-07, "loss": 0.8654, "step": 8023 }, { "epoch": 0.86, "grad_norm": 1.8868439257367278, "learning_rate": 4.869861343144894e-07, "loss": 0.8079, "step": 8024 }, { "epoch": 0.86, "grad_norm": 1.7352715391140265, "learning_rate": 4.862369204065437e-07, "loss": 0.8855, "step": 8025 }, { "epoch": 0.86, "grad_norm": 1.6783778390905457, "learning_rate": 4.854882538079331e-07, "loss": 0.8556, "step": 8026 }, { "epoch": 0.86, "grad_norm": 1.6363815257874872, "learning_rate": 4.847401346094361e-07, "loss": 0.8393, "step": 8027 }, { "epoch": 0.86, "grad_norm": 0.7805608358024557, "learning_rate": 4.839925629017638e-07, "loss": 1.0733, "step": 8028 }, { "epoch": 0.86, "grad_norm": 1.7247545916252052, "learning_rate": 4.832455387755619e-07, "loss": 0.9149, "step": 8029 }, { "epoch": 0.86, "grad_norm": 1.6600147169553279, "learning_rate": 4.824990623214099e-07, "loss": 0.9254, "step": 8030 }, { "epoch": 0.86, "grad_norm": 1.816016405509356, "learning_rate": 4.817531336298221e-07, "loss": 0.8752, "step": 8031 }, { "epoch": 0.86, "grad_norm": 1.6513352468162448, "learning_rate": 4.810077527912422e-07, "loss": 0.8738, "step": 8032 }, { "epoch": 0.86, "grad_norm": 1.6491773509683738, "learning_rate": 4.802629198960517e-07, "loss": 0.9033, "step": 8033 }, { "epoch": 0.86, "grad_norm": 1.6435320237552906, "learning_rate": 4.795186350345648e-07, "loss": 0.8562, "step": 8034 }, { "epoch": 0.86, "grad_norm": 0.776172509483379, "learning_rate": 4.787748982970275e-07, "loss": 0.9996, "step": 8035 }, { "epoch": 0.86, "grad_norm": 1.5376376380411116, "learning_rate": 4.780317097736203e-07, "loss": 0.8893, "step": 8036 }, { "epoch": 0.86, "grad_norm": 1.7687407000085977, "learning_rate": 4.772890695544602e-07, "loss": 0.9107, "step": 8037 }, { "epoch": 0.86, "grad_norm": 1.6244048452696227, "learning_rate": 4.7654697772959057e-07, "loss": 0.9174, "step": 8038 }, { "epoch": 0.86, "grad_norm": 2.81002711906273, "learning_rate": 4.758054343889945e-07, "loss": 0.9022, "step": 8039 }, { "epoch": 0.86, "grad_norm": 1.7510325643806377, "learning_rate": 4.750644396225873e-07, "loss": 0.8804, "step": 8040 }, { "epoch": 0.86, "grad_norm": 1.6776147876584806, "learning_rate": 4.743239935202165e-07, "loss": 0.8936, "step": 8041 }, { "epoch": 0.86, "grad_norm": 1.7775054578373586, "learning_rate": 4.735840961716631e-07, "loss": 0.837, "step": 8042 }, { "epoch": 0.86, "grad_norm": 1.7725974036631507, "learning_rate": 4.728447476666442e-07, "loss": 0.8984, "step": 8043 }, { "epoch": 0.86, "grad_norm": 1.8185002329916373, "learning_rate": 4.7210594809480647e-07, "loss": 0.956, "step": 8044 }, { "epoch": 0.86, "grad_norm": 1.8460188304989817, "learning_rate": 4.713676975457321e-07, "loss": 0.9083, "step": 8045 }, { "epoch": 0.86, "grad_norm": 1.7111886197232955, "learning_rate": 4.7062999610893835e-07, "loss": 0.8499, "step": 8046 }, { "epoch": 0.87, "grad_norm": 1.7751571878681345, "learning_rate": 4.698928438738715e-07, "loss": 0.9082, "step": 8047 }, { "epoch": 0.87, "grad_norm": 1.802182662732776, "learning_rate": 4.691562409299161e-07, "loss": 0.8759, "step": 8048 }, { "epoch": 0.87, "grad_norm": 0.7922927390253922, "learning_rate": 4.6842018736638685e-07, "loss": 1.0281, "step": 8049 }, { "epoch": 0.87, "grad_norm": 1.727938724111258, "learning_rate": 4.6768468327253127e-07, "loss": 0.9053, "step": 8050 }, { "epoch": 0.87, "grad_norm": 2.4594072211268045, "learning_rate": 4.66949728737533e-07, "loss": 0.8201, "step": 8051 }, { "epoch": 0.87, "grad_norm": 1.6930996005957646, "learning_rate": 4.6621532385050904e-07, "loss": 0.9343, "step": 8052 }, { "epoch": 0.87, "grad_norm": 1.7170146122482113, "learning_rate": 4.654814687005066e-07, "loss": 0.9348, "step": 8053 }, { "epoch": 0.87, "grad_norm": 1.884854534956769, "learning_rate": 4.6474816337650883e-07, "loss": 0.8467, "step": 8054 }, { "epoch": 0.87, "grad_norm": 1.7174408997173494, "learning_rate": 4.640154079674325e-07, "loss": 0.8094, "step": 8055 }, { "epoch": 0.87, "grad_norm": 1.87605869127692, "learning_rate": 4.6328320256212534e-07, "loss": 0.8767, "step": 8056 }, { "epoch": 0.87, "grad_norm": 1.8648936647214198, "learning_rate": 4.625515472493697e-07, "loss": 0.8017, "step": 8057 }, { "epoch": 0.87, "grad_norm": 1.7046386164481417, "learning_rate": 4.618204421178835e-07, "loss": 0.8383, "step": 8058 }, { "epoch": 0.87, "grad_norm": 1.7150157750160273, "learning_rate": 4.610898872563135e-07, "loss": 0.9014, "step": 8059 }, { "epoch": 0.87, "grad_norm": 1.7393285090018638, "learning_rate": 4.603598827532424e-07, "loss": 0.8787, "step": 8060 }, { "epoch": 0.87, "grad_norm": 1.7421368147517011, "learning_rate": 4.596304286971853e-07, "loss": 0.9119, "step": 8061 }, { "epoch": 0.87, "grad_norm": 1.7552876524109977, "learning_rate": 4.589015251765933e-07, "loss": 0.886, "step": 8062 }, { "epoch": 0.87, "grad_norm": 1.7740399297622256, "learning_rate": 4.58173172279846e-07, "loss": 0.9273, "step": 8063 }, { "epoch": 0.87, "grad_norm": 1.6619775429351784, "learning_rate": 4.574453700952591e-07, "loss": 0.8164, "step": 8064 }, { "epoch": 0.87, "grad_norm": 1.7436017289728163, "learning_rate": 4.567181187110831e-07, "loss": 0.8232, "step": 8065 }, { "epoch": 0.87, "grad_norm": 1.6712952050676817, "learning_rate": 4.5599141821549673e-07, "loss": 0.951, "step": 8066 }, { "epoch": 0.87, "grad_norm": 1.6730732802771922, "learning_rate": 4.552652686966169e-07, "loss": 0.9615, "step": 8067 }, { "epoch": 0.87, "grad_norm": 1.770916945578325, "learning_rate": 4.545396702424926e-07, "loss": 0.9703, "step": 8068 }, { "epoch": 0.87, "grad_norm": 1.7442625841483845, "learning_rate": 4.5381462294110336e-07, "loss": 0.8779, "step": 8069 }, { "epoch": 0.87, "grad_norm": 1.6576426138856928, "learning_rate": 4.5309012688036335e-07, "loss": 0.8649, "step": 8070 }, { "epoch": 0.87, "grad_norm": 1.7120236968782254, "learning_rate": 4.5236618214812233e-07, "loss": 0.8468, "step": 8071 }, { "epoch": 0.87, "grad_norm": 1.7924976104291408, "learning_rate": 4.516427888321584e-07, "loss": 0.8688, "step": 8072 }, { "epoch": 0.87, "grad_norm": 1.7277319154563702, "learning_rate": 4.5091994702018695e-07, "loss": 0.8545, "step": 8073 }, { "epoch": 0.87, "grad_norm": 1.6849481146237761, "learning_rate": 4.5019765679985614e-07, "loss": 0.9143, "step": 8074 }, { "epoch": 0.87, "grad_norm": 1.6561628558090684, "learning_rate": 4.4947591825874383e-07, "loss": 0.9423, "step": 8075 }, { "epoch": 0.87, "grad_norm": 1.6758387354156294, "learning_rate": 4.487547314843643e-07, "loss": 0.9856, "step": 8076 }, { "epoch": 0.87, "grad_norm": 1.7187461631143157, "learning_rate": 4.480340965641655e-07, "loss": 0.8725, "step": 8077 }, { "epoch": 0.87, "grad_norm": 1.7163908638667729, "learning_rate": 4.473140135855247e-07, "loss": 0.916, "step": 8078 }, { "epoch": 0.87, "grad_norm": 1.8488989254824135, "learning_rate": 4.4659448263575544e-07, "loss": 0.9783, "step": 8079 }, { "epoch": 0.87, "grad_norm": 1.8034663591798084, "learning_rate": 4.458755038021029e-07, "loss": 0.9117, "step": 8080 }, { "epoch": 0.87, "grad_norm": 1.7048816914822535, "learning_rate": 4.451570771717467e-07, "loss": 0.8337, "step": 8081 }, { "epoch": 0.87, "grad_norm": 1.745741296671989, "learning_rate": 4.444392028317968e-07, "loss": 0.8995, "step": 8082 }, { "epoch": 0.87, "grad_norm": 1.6921835156004805, "learning_rate": 4.437218808693e-07, "loss": 0.88, "step": 8083 }, { "epoch": 0.87, "grad_norm": 1.618272883163135, "learning_rate": 4.4300511137123193e-07, "loss": 0.905, "step": 8084 }, { "epoch": 0.87, "grad_norm": 0.7878331536409675, "learning_rate": 4.4228889442450463e-07, "loss": 1.0481, "step": 8085 }, { "epoch": 0.87, "grad_norm": 1.7804815306051947, "learning_rate": 4.4157323011596254e-07, "loss": 0.8395, "step": 8086 }, { "epoch": 0.87, "grad_norm": 1.6968938840559376, "learning_rate": 4.408581185323807e-07, "loss": 0.8233, "step": 8087 }, { "epoch": 0.87, "grad_norm": 1.6202629040841932, "learning_rate": 4.4014355976046974e-07, "loss": 0.8339, "step": 8088 }, { "epoch": 0.87, "grad_norm": 1.8398557056051188, "learning_rate": 4.394295538868731e-07, "loss": 0.881, "step": 8089 }, { "epoch": 0.87, "grad_norm": 1.7092206346052676, "learning_rate": 4.3871610099816606e-07, "loss": 0.8661, "step": 8090 }, { "epoch": 0.87, "grad_norm": 1.70297316506603, "learning_rate": 4.3800320118085604e-07, "loss": 0.8999, "step": 8091 }, { "epoch": 0.87, "grad_norm": 1.6831996585824591, "learning_rate": 4.37290854521385e-07, "loss": 0.8622, "step": 8092 }, { "epoch": 0.87, "grad_norm": 1.667577789771012, "learning_rate": 4.3657906110612935e-07, "loss": 0.8953, "step": 8093 }, { "epoch": 0.87, "grad_norm": 1.722015589901042, "learning_rate": 4.358678210213935e-07, "loss": 0.9319, "step": 8094 }, { "epoch": 0.87, "grad_norm": 1.721836406912448, "learning_rate": 4.3515713435342e-07, "loss": 0.8599, "step": 8095 }, { "epoch": 0.87, "grad_norm": 1.7838899583779377, "learning_rate": 4.3444700118838167e-07, "loss": 0.8881, "step": 8096 }, { "epoch": 0.87, "grad_norm": 1.8084587348900927, "learning_rate": 4.337374216123841e-07, "loss": 0.8989, "step": 8097 }, { "epoch": 0.87, "grad_norm": 1.8681547234807678, "learning_rate": 4.330283957114656e-07, "loss": 0.8839, "step": 8098 }, { "epoch": 0.87, "grad_norm": 2.0044976851354743, "learning_rate": 4.3231992357160026e-07, "loss": 0.7993, "step": 8099 }, { "epoch": 0.87, "grad_norm": 1.6738531083260109, "learning_rate": 4.316120052786904e-07, "loss": 0.8744, "step": 8100 }, { "epoch": 0.87, "grad_norm": 1.8382023475712965, "learning_rate": 4.3090464091857576e-07, "loss": 0.9305, "step": 8101 }, { "epoch": 0.87, "grad_norm": 1.6836123502412175, "learning_rate": 4.3019783057702493e-07, "loss": 0.875, "step": 8102 }, { "epoch": 0.87, "grad_norm": 1.719647776787755, "learning_rate": 4.29491574339741e-07, "loss": 0.9556, "step": 8103 }, { "epoch": 0.87, "grad_norm": 1.815766205458188, "learning_rate": 4.287858722923605e-07, "loss": 0.8301, "step": 8104 }, { "epoch": 0.87, "grad_norm": 0.755622917717037, "learning_rate": 4.2808072452045377e-07, "loss": 1.0294, "step": 8105 }, { "epoch": 0.87, "grad_norm": 1.7210863465768784, "learning_rate": 4.2737613110951924e-07, "loss": 0.8171, "step": 8106 }, { "epoch": 0.87, "grad_norm": 1.8334825786884967, "learning_rate": 4.266720921449935e-07, "loss": 0.8408, "step": 8107 }, { "epoch": 0.87, "grad_norm": 1.6728134575584162, "learning_rate": 4.2596860771224435e-07, "loss": 0.8195, "step": 8108 }, { "epoch": 0.87, "grad_norm": 2.3156528578368825, "learning_rate": 4.252656778965697e-07, "loss": 0.8667, "step": 8109 }, { "epoch": 0.87, "grad_norm": 1.7392662344132004, "learning_rate": 4.2456330278320245e-07, "loss": 0.8534, "step": 8110 }, { "epoch": 0.87, "grad_norm": 1.725682930397735, "learning_rate": 4.238614824573101e-07, "loss": 0.8911, "step": 8111 }, { "epoch": 0.87, "grad_norm": 1.8089098699349329, "learning_rate": 4.2316021700398945e-07, "loss": 0.794, "step": 8112 }, { "epoch": 0.87, "grad_norm": 1.7532770217872775, "learning_rate": 4.2245950650827037e-07, "loss": 0.9476, "step": 8113 }, { "epoch": 0.87, "grad_norm": 0.7745959127619306, "learning_rate": 4.2175935105511767e-07, "loss": 1.0359, "step": 8114 }, { "epoch": 0.87, "grad_norm": 1.7831060897720843, "learning_rate": 4.2105975072942674e-07, "loss": 0.8605, "step": 8115 }, { "epoch": 0.87, "grad_norm": 0.760119585753882, "learning_rate": 4.2036070561602695e-07, "loss": 1.0839, "step": 8116 }, { "epoch": 0.87, "grad_norm": 1.6661119537642035, "learning_rate": 4.1966221579968103e-07, "loss": 0.949, "step": 8117 }, { "epoch": 0.87, "grad_norm": 1.9474873672361783, "learning_rate": 4.1896428136508126e-07, "loss": 0.9884, "step": 8118 }, { "epoch": 0.87, "grad_norm": 1.6237346443567215, "learning_rate": 4.182669023968561e-07, "loss": 0.8585, "step": 8119 }, { "epoch": 0.87, "grad_norm": 1.786639003616739, "learning_rate": 4.1757007897956503e-07, "loss": 0.9695, "step": 8120 }, { "epoch": 0.87, "grad_norm": 0.7787767312588021, "learning_rate": 4.1687381119769887e-07, "loss": 1.003, "step": 8121 }, { "epoch": 0.87, "grad_norm": 0.8079335505777928, "learning_rate": 4.161780991356845e-07, "loss": 1.0626, "step": 8122 }, { "epoch": 0.87, "grad_norm": 1.701481883475435, "learning_rate": 4.154829428778778e-07, "loss": 0.8897, "step": 8123 }, { "epoch": 0.87, "grad_norm": 1.818219366432782, "learning_rate": 4.1478834250857027e-07, "loss": 0.9126, "step": 8124 }, { "epoch": 0.87, "grad_norm": 2.260961382099815, "learning_rate": 4.140942981119833e-07, "loss": 0.9304, "step": 8125 }, { "epoch": 0.87, "grad_norm": 1.72885634959852, "learning_rate": 4.134008097722725e-07, "loss": 0.8211, "step": 8126 }, { "epoch": 0.87, "grad_norm": 1.6378426653786742, "learning_rate": 4.127078775735266e-07, "loss": 0.799, "step": 8127 }, { "epoch": 0.87, "grad_norm": 1.7102947427228954, "learning_rate": 4.1201550159976454e-07, "loss": 0.9564, "step": 8128 }, { "epoch": 0.87, "grad_norm": 1.824772417577214, "learning_rate": 4.113236819349403e-07, "loss": 0.927, "step": 8129 }, { "epoch": 0.87, "grad_norm": 1.702348057123386, "learning_rate": 4.1063241866293955e-07, "loss": 0.8913, "step": 8130 }, { "epoch": 0.87, "grad_norm": 1.8187677372983375, "learning_rate": 4.099417118675797e-07, "loss": 0.8811, "step": 8131 }, { "epoch": 0.87, "grad_norm": 1.8205760936554345, "learning_rate": 4.092515616326126e-07, "loss": 0.9485, "step": 8132 }, { "epoch": 0.87, "grad_norm": 0.7679182208883465, "learning_rate": 4.0856196804171965e-07, "loss": 1.0463, "step": 8133 }, { "epoch": 0.87, "grad_norm": 0.7803805669142488, "learning_rate": 4.078729311785168e-07, "loss": 1.0325, "step": 8134 }, { "epoch": 0.87, "grad_norm": 1.8440505103888065, "learning_rate": 4.071844511265527e-07, "loss": 0.9139, "step": 8135 }, { "epoch": 0.87, "grad_norm": 1.7882096588328067, "learning_rate": 4.064965279693084e-07, "loss": 0.9549, "step": 8136 }, { "epoch": 0.87, "grad_norm": 1.7930667856350735, "learning_rate": 4.058091617901949e-07, "loss": 0.8789, "step": 8137 }, { "epoch": 0.87, "grad_norm": 1.7666321976988473, "learning_rate": 4.0512235267256007e-07, "loss": 0.9522, "step": 8138 }, { "epoch": 0.87, "grad_norm": 1.6579234455843967, "learning_rate": 4.044361006996811e-07, "loss": 0.8823, "step": 8139 }, { "epoch": 0.88, "grad_norm": 1.7610444687758813, "learning_rate": 4.0375040595476753e-07, "loss": 0.9515, "step": 8140 }, { "epoch": 0.88, "grad_norm": 0.779367572345615, "learning_rate": 4.0306526852096296e-07, "loss": 1.0282, "step": 8141 }, { "epoch": 0.88, "grad_norm": 1.6695118733110033, "learning_rate": 4.023806884813436e-07, "loss": 0.791, "step": 8142 }, { "epoch": 0.88, "grad_norm": 1.7501156902116224, "learning_rate": 4.016966659189159e-07, "loss": 0.8607, "step": 8143 }, { "epoch": 0.88, "grad_norm": 1.7310092272451592, "learning_rate": 4.0101320091661954e-07, "loss": 0.9122, "step": 8144 }, { "epoch": 0.88, "grad_norm": 1.8176749191318708, "learning_rate": 4.003302935573289e-07, "loss": 0.9541, "step": 8145 }, { "epoch": 0.88, "grad_norm": 1.7134905405841765, "learning_rate": 3.996479439238465e-07, "loss": 0.9011, "step": 8146 }, { "epoch": 0.88, "grad_norm": 1.7755983719651804, "learning_rate": 3.989661520989102e-07, "loss": 0.8028, "step": 8147 }, { "epoch": 0.88, "grad_norm": 1.6696769578005788, "learning_rate": 3.9828491816519153e-07, "loss": 0.8945, "step": 8148 }, { "epoch": 0.88, "grad_norm": 0.777880467216096, "learning_rate": 3.9760424220529e-07, "loss": 1.0282, "step": 8149 }, { "epoch": 0.88, "grad_norm": 1.5865532294561977, "learning_rate": 3.969241243017413e-07, "loss": 0.8616, "step": 8150 }, { "epoch": 0.88, "grad_norm": 1.7647819676090695, "learning_rate": 3.962445645370122e-07, "loss": 0.9001, "step": 8151 }, { "epoch": 0.88, "grad_norm": 1.7510273029426933, "learning_rate": 3.955655629935007e-07, "loss": 0.7952, "step": 8152 }, { "epoch": 0.88, "grad_norm": 1.6593281449366233, "learning_rate": 3.9488711975353866e-07, "loss": 0.8933, "step": 8153 }, { "epoch": 0.88, "grad_norm": 1.665852040270858, "learning_rate": 3.9420923489939034e-07, "loss": 0.8471, "step": 8154 }, { "epoch": 0.88, "grad_norm": 1.7407635567241004, "learning_rate": 3.935319085132505e-07, "loss": 0.8895, "step": 8155 }, { "epoch": 0.88, "grad_norm": 1.6318697124188302, "learning_rate": 3.928551406772468e-07, "loss": 0.9089, "step": 8156 }, { "epoch": 0.88, "grad_norm": 1.6389472793304734, "learning_rate": 3.921789314734409e-07, "loss": 0.8909, "step": 8157 }, { "epoch": 0.88, "grad_norm": 1.688808798869453, "learning_rate": 3.9150328098382593e-07, "loss": 0.7868, "step": 8158 }, { "epoch": 0.88, "grad_norm": 1.8258002826014934, "learning_rate": 3.908281892903254e-07, "loss": 0.9826, "step": 8159 }, { "epoch": 0.88, "grad_norm": 1.683348948672413, "learning_rate": 3.9015365647479655e-07, "loss": 0.815, "step": 8160 }, { "epoch": 0.88, "grad_norm": 1.6876235519214713, "learning_rate": 3.8947968261903055e-07, "loss": 0.8485, "step": 8161 }, { "epoch": 0.88, "grad_norm": 1.722380106101749, "learning_rate": 3.8880626780474717e-07, "loss": 0.8597, "step": 8162 }, { "epoch": 0.88, "grad_norm": 1.6771966477792897, "learning_rate": 3.88133412113601e-07, "loss": 0.7686, "step": 8163 }, { "epoch": 0.88, "grad_norm": 1.723507518340281, "learning_rate": 3.874611156271801e-07, "loss": 0.8606, "step": 8164 }, { "epoch": 0.88, "grad_norm": 1.818899301957327, "learning_rate": 3.8678937842699884e-07, "loss": 0.874, "step": 8165 }, { "epoch": 0.88, "grad_norm": 1.8647459702706561, "learning_rate": 3.8611820059450913e-07, "loss": 0.9046, "step": 8166 }, { "epoch": 0.88, "grad_norm": 0.7928131330933363, "learning_rate": 3.8544758221109534e-07, "loss": 1.075, "step": 8167 }, { "epoch": 0.88, "grad_norm": 1.791917699158972, "learning_rate": 3.8477752335807026e-07, "loss": 0.8587, "step": 8168 }, { "epoch": 0.88, "grad_norm": 1.729018501468878, "learning_rate": 3.841080241166811e-07, "loss": 0.9033, "step": 8169 }, { "epoch": 0.88, "grad_norm": 1.7988438949769137, "learning_rate": 3.834390845681091e-07, "loss": 0.8905, "step": 8170 }, { "epoch": 0.88, "grad_norm": 1.9008796033054556, "learning_rate": 3.82770704793462e-07, "loss": 0.879, "step": 8171 }, { "epoch": 0.88, "grad_norm": 1.7178303932301533, "learning_rate": 3.821028848737857e-07, "loss": 0.8752, "step": 8172 }, { "epoch": 0.88, "grad_norm": 1.7143364164150483, "learning_rate": 3.8143562489005525e-07, "loss": 0.8755, "step": 8173 }, { "epoch": 0.88, "grad_norm": 1.6951119947358289, "learning_rate": 3.807689249231772e-07, "loss": 0.9141, "step": 8174 }, { "epoch": 0.88, "grad_norm": 1.7760369745336935, "learning_rate": 3.801027850539929e-07, "loss": 0.836, "step": 8175 }, { "epoch": 0.88, "grad_norm": 1.6641904400138208, "learning_rate": 3.794372053632722e-07, "loss": 0.8125, "step": 8176 }, { "epoch": 0.88, "grad_norm": 1.6883503627025096, "learning_rate": 3.787721859317206e-07, "loss": 0.8758, "step": 8177 }, { "epoch": 0.88, "grad_norm": 0.773589767564627, "learning_rate": 3.7810772683997245e-07, "loss": 1.085, "step": 8178 }, { "epoch": 0.88, "grad_norm": 1.7734077416308642, "learning_rate": 3.774438281685977e-07, "loss": 0.8774, "step": 8179 }, { "epoch": 0.88, "grad_norm": 1.7624409633092848, "learning_rate": 3.767804899980937e-07, "loss": 0.9973, "step": 8180 }, { "epoch": 0.88, "grad_norm": 1.6583213643361574, "learning_rate": 3.761177124088944e-07, "loss": 0.9263, "step": 8181 }, { "epoch": 0.88, "grad_norm": 1.6559030408712874, "learning_rate": 3.754554954813644e-07, "loss": 0.9087, "step": 8182 }, { "epoch": 0.88, "grad_norm": 1.7653366808982298, "learning_rate": 3.747938392957973e-07, "loss": 0.8727, "step": 8183 }, { "epoch": 0.88, "grad_norm": 1.8141702277947844, "learning_rate": 3.7413274393242327e-07, "loss": 0.8631, "step": 8184 }, { "epoch": 0.88, "grad_norm": 1.7277509683651031, "learning_rate": 3.7347220947140273e-07, "loss": 0.8906, "step": 8185 }, { "epoch": 0.88, "grad_norm": 1.7297884823958671, "learning_rate": 3.7281223599282656e-07, "loss": 0.9022, "step": 8186 }, { "epoch": 0.88, "grad_norm": 1.6981736781539112, "learning_rate": 3.7215282357671803e-07, "loss": 0.9113, "step": 8187 }, { "epoch": 0.88, "grad_norm": 1.6502757275622002, "learning_rate": 3.7149397230303473e-07, "loss": 0.9434, "step": 8188 }, { "epoch": 0.88, "grad_norm": 1.7419547375187157, "learning_rate": 3.7083568225166456e-07, "loss": 0.8514, "step": 8189 }, { "epoch": 0.88, "grad_norm": 2.1287243611026656, "learning_rate": 3.701779535024269e-07, "loss": 0.9456, "step": 8190 }, { "epoch": 0.88, "grad_norm": 1.7395438612625287, "learning_rate": 3.6952078613507304e-07, "loss": 0.9324, "step": 8191 }, { "epoch": 0.88, "grad_norm": 1.7368789779615625, "learning_rate": 3.688641802292892e-07, "loss": 0.8322, "step": 8192 }, { "epoch": 0.88, "grad_norm": 1.7343094963255603, "learning_rate": 3.682081358646883e-07, "loss": 0.8903, "step": 8193 }, { "epoch": 0.88, "grad_norm": 1.5542959484142138, "learning_rate": 3.67552653120819e-07, "loss": 0.844, "step": 8194 }, { "epoch": 0.88, "grad_norm": 1.79854860654558, "learning_rate": 3.668977320771616e-07, "loss": 0.897, "step": 8195 }, { "epoch": 0.88, "grad_norm": 1.7563731980988884, "learning_rate": 3.6624337281312704e-07, "loss": 0.9285, "step": 8196 }, { "epoch": 0.88, "grad_norm": 1.704541133324898, "learning_rate": 3.6558957540805794e-07, "loss": 0.8265, "step": 8197 }, { "epoch": 0.88, "grad_norm": 1.707805061123568, "learning_rate": 3.649363399412309e-07, "loss": 0.9372, "step": 8198 }, { "epoch": 0.88, "grad_norm": 1.7370905444092848, "learning_rate": 3.642836664918509e-07, "loss": 0.8409, "step": 8199 }, { "epoch": 0.88, "grad_norm": 1.7284720708583472, "learning_rate": 3.6363155513905847e-07, "loss": 0.93, "step": 8200 }, { "epoch": 0.88, "grad_norm": 1.7071436315797968, "learning_rate": 3.629800059619248e-07, "loss": 0.9168, "step": 8201 }, { "epoch": 0.88, "grad_norm": 1.7540245316243035, "learning_rate": 3.6232901903945075e-07, "loss": 0.8467, "step": 8202 }, { "epoch": 0.88, "grad_norm": 1.741262651953551, "learning_rate": 3.616785944505713e-07, "loss": 0.9054, "step": 8203 }, { "epoch": 0.88, "grad_norm": 0.8118644964216342, "learning_rate": 3.61028732274154e-07, "loss": 0.9933, "step": 8204 }, { "epoch": 0.88, "grad_norm": 1.6980224099886094, "learning_rate": 3.603794325889953e-07, "loss": 0.9186, "step": 8205 }, { "epoch": 0.88, "grad_norm": 1.713919473476343, "learning_rate": 3.5973069547382556e-07, "loss": 0.7973, "step": 8206 }, { "epoch": 0.88, "grad_norm": 1.7141362790152725, "learning_rate": 3.5908252100730735e-07, "loss": 0.912, "step": 8207 }, { "epoch": 0.88, "grad_norm": 1.6281356580153694, "learning_rate": 3.584349092680328e-07, "loss": 0.821, "step": 8208 }, { "epoch": 0.88, "grad_norm": 5.621482759413654, "learning_rate": 3.577878603345269e-07, "loss": 0.951, "step": 8209 }, { "epoch": 0.88, "grad_norm": 1.7572882205238864, "learning_rate": 3.5714137428524754e-07, "loss": 0.9538, "step": 8210 }, { "epoch": 0.88, "grad_norm": 1.6877925181437599, "learning_rate": 3.5649545119858244e-07, "loss": 0.9048, "step": 8211 }, { "epoch": 0.88, "grad_norm": 1.6772556997647348, "learning_rate": 3.558500911528523e-07, "loss": 0.9105, "step": 8212 }, { "epoch": 0.88, "grad_norm": 0.7774330625583736, "learning_rate": 3.552052942263101e-07, "loss": 1.0366, "step": 8213 }, { "epoch": 0.88, "grad_norm": 1.7705531994266366, "learning_rate": 3.5456106049713835e-07, "loss": 0.7759, "step": 8214 }, { "epoch": 0.88, "grad_norm": 0.7755764200168127, "learning_rate": 3.5391739004345336e-07, "loss": 1.0619, "step": 8215 }, { "epoch": 0.88, "grad_norm": 1.6460772463533957, "learning_rate": 3.5327428294330333e-07, "loss": 0.9031, "step": 8216 }, { "epoch": 0.88, "grad_norm": 0.8196801791652817, "learning_rate": 3.5263173927466585e-07, "loss": 1.0199, "step": 8217 }, { "epoch": 0.88, "grad_norm": 1.6113002643086793, "learning_rate": 3.5198975911545143e-07, "loss": 0.8825, "step": 8218 }, { "epoch": 0.88, "grad_norm": 1.693174227300383, "learning_rate": 3.5134834254350213e-07, "loss": 0.8883, "step": 8219 }, { "epoch": 0.88, "grad_norm": 1.7251567389026323, "learning_rate": 3.5070748963659417e-07, "loss": 0.9188, "step": 8220 }, { "epoch": 0.88, "grad_norm": 1.855696886155795, "learning_rate": 3.5006720047243037e-07, "loss": 0.9299, "step": 8221 }, { "epoch": 0.88, "grad_norm": 1.6225522699011063, "learning_rate": 3.494274751286497e-07, "loss": 0.9041, "step": 8222 }, { "epoch": 0.88, "grad_norm": 1.9062900312357827, "learning_rate": 3.487883136828213e-07, "loss": 0.8708, "step": 8223 }, { "epoch": 0.88, "grad_norm": 1.8826443907434436, "learning_rate": 3.481497162124442e-07, "loss": 0.8647, "step": 8224 }, { "epoch": 0.88, "grad_norm": 1.733252118139951, "learning_rate": 3.4751168279495094e-07, "loss": 0.9538, "step": 8225 }, { "epoch": 0.88, "grad_norm": 1.6118799716555123, "learning_rate": 3.468742135077069e-07, "loss": 0.953, "step": 8226 }, { "epoch": 0.88, "grad_norm": 1.6836291484866632, "learning_rate": 3.4623730842800573e-07, "loss": 0.8231, "step": 8227 }, { "epoch": 0.88, "grad_norm": 1.6650101702332119, "learning_rate": 3.4560096763307516e-07, "loss": 0.887, "step": 8228 }, { "epoch": 0.88, "grad_norm": 1.85128967855895, "learning_rate": 3.4496519120007344e-07, "loss": 0.8312, "step": 8229 }, { "epoch": 0.88, "grad_norm": 1.7695420264761204, "learning_rate": 3.4432997920609e-07, "loss": 0.8495, "step": 8230 }, { "epoch": 0.88, "grad_norm": 0.8048646045718002, "learning_rate": 3.4369533172814715e-07, "loss": 1.0251, "step": 8231 }, { "epoch": 0.88, "grad_norm": 1.8619569597291394, "learning_rate": 3.4306124884319894e-07, "loss": 0.9592, "step": 8232 }, { "epoch": 0.89, "grad_norm": 1.6743000221666884, "learning_rate": 3.424277306281282e-07, "loss": 0.8287, "step": 8233 }, { "epoch": 0.89, "grad_norm": 1.7458004827712252, "learning_rate": 3.417947771597524e-07, "loss": 0.9071, "step": 8234 }, { "epoch": 0.89, "grad_norm": 1.799389954025595, "learning_rate": 3.411623885148202e-07, "loss": 0.8221, "step": 8235 }, { "epoch": 0.89, "grad_norm": 1.7014900791466614, "learning_rate": 3.4053056477000856e-07, "loss": 0.8628, "step": 8236 }, { "epoch": 0.89, "grad_norm": 1.6772054513650267, "learning_rate": 3.3989930600192953e-07, "loss": 0.968, "step": 8237 }, { "epoch": 0.89, "grad_norm": 1.7471942067528425, "learning_rate": 3.3926861228712637e-07, "loss": 0.9099, "step": 8238 }, { "epoch": 0.89, "grad_norm": 1.6797339124180506, "learning_rate": 3.386384837020723e-07, "loss": 0.8827, "step": 8239 }, { "epoch": 0.89, "grad_norm": 1.8036985537139214, "learning_rate": 3.3800892032317123e-07, "loss": 0.8261, "step": 8240 }, { "epoch": 0.89, "grad_norm": 1.8903091362822204, "learning_rate": 3.373799222267615e-07, "loss": 0.9073, "step": 8241 }, { "epoch": 0.89, "grad_norm": 1.7790311194577395, "learning_rate": 3.3675148948910996e-07, "loss": 0.9138, "step": 8242 }, { "epoch": 0.89, "grad_norm": 0.8049835051729738, "learning_rate": 3.3612362218641727e-07, "loss": 1.0387, "step": 8243 }, { "epoch": 0.89, "grad_norm": 1.7595528933411977, "learning_rate": 3.354963203948147e-07, "loss": 0.8983, "step": 8244 }, { "epoch": 0.89, "grad_norm": 1.7930536151247514, "learning_rate": 3.348695841903637e-07, "loss": 0.872, "step": 8245 }, { "epoch": 0.89, "grad_norm": 1.7480891087035033, "learning_rate": 3.3424341364905853e-07, "loss": 0.8826, "step": 8246 }, { "epoch": 0.89, "grad_norm": 1.7658621787608952, "learning_rate": 3.3361780884682615e-07, "loss": 0.8241, "step": 8247 }, { "epoch": 0.89, "grad_norm": 1.7903514472539686, "learning_rate": 3.3299276985952034e-07, "loss": 0.8953, "step": 8248 }, { "epoch": 0.89, "grad_norm": 1.7932063030370908, "learning_rate": 3.323682967629321e-07, "loss": 0.9293, "step": 8249 }, { "epoch": 0.89, "grad_norm": 1.9294572180963336, "learning_rate": 3.317443896327788e-07, "loss": 0.8946, "step": 8250 }, { "epoch": 0.89, "grad_norm": 1.8020539495030725, "learning_rate": 3.311210485447125e-07, "loss": 0.9875, "step": 8251 }, { "epoch": 0.89, "grad_norm": 0.8325587832327367, "learning_rate": 3.3049827357431506e-07, "loss": 1.0283, "step": 8252 }, { "epoch": 0.89, "grad_norm": 1.7110551367060656, "learning_rate": 3.2987606479709946e-07, "loss": 0.8225, "step": 8253 }, { "epoch": 0.89, "grad_norm": 1.7201780684385002, "learning_rate": 3.2925442228851246e-07, "loss": 0.9142, "step": 8254 }, { "epoch": 0.89, "grad_norm": 1.664246445006223, "learning_rate": 3.286333461239288e-07, "loss": 0.8967, "step": 8255 }, { "epoch": 0.89, "grad_norm": 1.6771430242901224, "learning_rate": 3.2801283637865613e-07, "loss": 0.7718, "step": 8256 }, { "epoch": 0.89, "grad_norm": 1.7287444116149755, "learning_rate": 3.2739289312793467e-07, "loss": 0.9445, "step": 8257 }, { "epoch": 0.89, "grad_norm": 1.7586567174474266, "learning_rate": 3.2677351644693323e-07, "loss": 0.9085, "step": 8258 }, { "epoch": 0.89, "grad_norm": 1.7149182479309786, "learning_rate": 3.261547064107551e-07, "loss": 0.8462, "step": 8259 }, { "epoch": 0.89, "grad_norm": 1.7307791061981495, "learning_rate": 3.2553646309443133e-07, "loss": 0.8548, "step": 8260 }, { "epoch": 0.89, "grad_norm": 1.5912068002473503, "learning_rate": 3.2491878657292643e-07, "loss": 0.8272, "step": 8261 }, { "epoch": 0.89, "grad_norm": 1.7368098575044884, "learning_rate": 3.24301676921136e-07, "loss": 0.8853, "step": 8262 }, { "epoch": 0.89, "grad_norm": 1.7770227770216724, "learning_rate": 3.2368513421388745e-07, "loss": 0.8144, "step": 8263 }, { "epoch": 0.89, "grad_norm": 1.783000880048568, "learning_rate": 3.230691585259371e-07, "loss": 0.9038, "step": 8264 }, { "epoch": 0.89, "grad_norm": 1.7657973176558202, "learning_rate": 3.2245374993197577e-07, "loss": 0.8617, "step": 8265 }, { "epoch": 0.89, "grad_norm": 1.7646293345475237, "learning_rate": 3.218389085066237e-07, "loss": 0.9276, "step": 8266 }, { "epoch": 0.89, "grad_norm": 1.745825437204345, "learning_rate": 3.212246343244313e-07, "loss": 0.9316, "step": 8267 }, { "epoch": 0.89, "grad_norm": 1.7224294316372664, "learning_rate": 3.2061092745988175e-07, "loss": 0.8668, "step": 8268 }, { "epoch": 0.89, "grad_norm": 0.7830915441469475, "learning_rate": 3.1999778798739057e-07, "loss": 1.061, "step": 8269 }, { "epoch": 0.89, "grad_norm": 1.7398840659731118, "learning_rate": 3.1938521598130213e-07, "loss": 0.8412, "step": 8270 }, { "epoch": 0.89, "grad_norm": 1.5959840269064922, "learning_rate": 3.18773211515892e-07, "loss": 0.8359, "step": 8271 }, { "epoch": 0.89, "grad_norm": 1.7159229727459169, "learning_rate": 3.181617746653687e-07, "loss": 0.938, "step": 8272 }, { "epoch": 0.89, "grad_norm": 1.7288830507637387, "learning_rate": 3.1755090550387167e-07, "loss": 0.8879, "step": 8273 }, { "epoch": 0.89, "grad_norm": 1.7225100291805466, "learning_rate": 3.169406041054696e-07, "loss": 0.8606, "step": 8274 }, { "epoch": 0.89, "grad_norm": 1.7013841561081522, "learning_rate": 3.163308705441648e-07, "loss": 0.8811, "step": 8275 }, { "epoch": 0.89, "grad_norm": 1.6950553143608977, "learning_rate": 3.1572170489388823e-07, "loss": 0.8879, "step": 8276 }, { "epoch": 0.89, "grad_norm": 1.6317523985256812, "learning_rate": 3.151131072285041e-07, "loss": 0.8789, "step": 8277 }, { "epoch": 0.89, "grad_norm": 1.781445641160241, "learning_rate": 3.145050776218078e-07, "loss": 0.9204, "step": 8278 }, { "epoch": 0.89, "grad_norm": 1.7186599361799402, "learning_rate": 3.1389761614752376e-07, "loss": 0.9153, "step": 8279 }, { "epoch": 0.89, "grad_norm": 1.7618082148956338, "learning_rate": 3.132907228793086e-07, "loss": 0.9007, "step": 8280 }, { "epoch": 0.89, "grad_norm": 1.768144975270962, "learning_rate": 3.126843978907518e-07, "loss": 0.9053, "step": 8281 }, { "epoch": 0.89, "grad_norm": 1.701342747693401, "learning_rate": 3.1207864125537167e-07, "loss": 0.9245, "step": 8282 }, { "epoch": 0.89, "grad_norm": 1.7650422614169508, "learning_rate": 3.1147345304661736e-07, "loss": 0.927, "step": 8283 }, { "epoch": 0.89, "grad_norm": 1.5649051054413041, "learning_rate": 3.108688333378701e-07, "loss": 0.8462, "step": 8284 }, { "epoch": 0.89, "grad_norm": 1.6401365902808507, "learning_rate": 3.1026478220244406e-07, "loss": 0.8064, "step": 8285 }, { "epoch": 0.89, "grad_norm": 1.628356480846652, "learning_rate": 3.0966129971358003e-07, "loss": 0.7961, "step": 8286 }, { "epoch": 0.89, "grad_norm": 1.7647937697238762, "learning_rate": 3.090583859444535e-07, "loss": 0.8838, "step": 8287 }, { "epoch": 0.89, "grad_norm": 1.7059483862094622, "learning_rate": 3.084560409681703e-07, "loss": 0.8476, "step": 8288 }, { "epoch": 0.89, "grad_norm": 1.730200703057485, "learning_rate": 3.078542648577659e-07, "loss": 0.8784, "step": 8289 }, { "epoch": 0.89, "grad_norm": 1.750077237880868, "learning_rate": 3.0725305768620815e-07, "loss": 0.9091, "step": 8290 }, { "epoch": 0.89, "grad_norm": 1.7395107309558624, "learning_rate": 3.0665241952639704e-07, "loss": 0.8836, "step": 8291 }, { "epoch": 0.89, "grad_norm": 1.6398811306243006, "learning_rate": 3.0605235045115867e-07, "loss": 0.8735, "step": 8292 }, { "epoch": 0.89, "grad_norm": 1.7090003297543044, "learning_rate": 3.0545285053325604e-07, "loss": 0.8159, "step": 8293 }, { "epoch": 0.89, "grad_norm": 1.7567922594390677, "learning_rate": 3.048539198453798e-07, "loss": 0.9556, "step": 8294 }, { "epoch": 0.89, "grad_norm": 1.6303668623500427, "learning_rate": 3.04255558460152e-07, "loss": 0.7677, "step": 8295 }, { "epoch": 0.89, "grad_norm": 1.6858998984106728, "learning_rate": 3.036577664501267e-07, "loss": 0.9164, "step": 8296 }, { "epoch": 0.89, "grad_norm": 1.759862512496004, "learning_rate": 3.030605438877882e-07, "loss": 0.9276, "step": 8297 }, { "epoch": 0.89, "grad_norm": 1.7814795410738065, "learning_rate": 3.024638908455513e-07, "loss": 0.8408, "step": 8298 }, { "epoch": 0.89, "grad_norm": 1.6193404930908049, "learning_rate": 3.0186780739576204e-07, "loss": 0.8317, "step": 8299 }, { "epoch": 0.89, "grad_norm": 1.6828976996261342, "learning_rate": 3.012722936106993e-07, "loss": 0.9197, "step": 8300 }, { "epoch": 0.89, "grad_norm": 1.7360006352193476, "learning_rate": 3.0067734956256867e-07, "loss": 0.8787, "step": 8301 }, { "epoch": 0.89, "grad_norm": 1.716712007892586, "learning_rate": 3.000829753235118e-07, "loss": 0.8195, "step": 8302 }, { "epoch": 0.89, "grad_norm": 1.7279039330397081, "learning_rate": 2.9948917096559617e-07, "loss": 0.9261, "step": 8303 }, { "epoch": 0.89, "grad_norm": 1.8128127608369178, "learning_rate": 2.9889593656082405e-07, "loss": 0.9598, "step": 8304 }, { "epoch": 0.89, "grad_norm": 1.7257820544096112, "learning_rate": 2.983032721811263e-07, "loss": 0.9341, "step": 8305 }, { "epoch": 0.89, "grad_norm": 1.8916615514831667, "learning_rate": 2.9771117789836713e-07, "loss": 0.8638, "step": 8306 }, { "epoch": 0.89, "grad_norm": 1.6746276576816008, "learning_rate": 2.971196537843379e-07, "loss": 0.8717, "step": 8307 }, { "epoch": 0.89, "grad_norm": 1.891322867628431, "learning_rate": 2.9652869991076414e-07, "loss": 0.9272, "step": 8308 }, { "epoch": 0.89, "grad_norm": 1.6125276485623985, "learning_rate": 2.9593831634930127e-07, "loss": 0.8787, "step": 8309 }, { "epoch": 0.89, "grad_norm": 1.5978542956039876, "learning_rate": 2.953485031715342e-07, "loss": 0.8887, "step": 8310 }, { "epoch": 0.89, "grad_norm": 1.701120392388444, "learning_rate": 2.947592604489807e-07, "loss": 0.9508, "step": 8311 }, { "epoch": 0.89, "grad_norm": 1.6976429829658428, "learning_rate": 2.9417058825308874e-07, "loss": 0.8175, "step": 8312 }, { "epoch": 0.89, "grad_norm": 1.9023824783139522, "learning_rate": 2.935824866552367e-07, "loss": 0.8653, "step": 8313 }, { "epoch": 0.89, "grad_norm": 0.7769641107788798, "learning_rate": 2.929949557267331e-07, "loss": 1.0537, "step": 8314 }, { "epoch": 0.89, "grad_norm": 1.6354062561497018, "learning_rate": 2.9240799553881813e-07, "loss": 0.8564, "step": 8315 }, { "epoch": 0.89, "grad_norm": 1.756939835427688, "learning_rate": 2.918216061626644e-07, "loss": 0.9193, "step": 8316 }, { "epoch": 0.89, "grad_norm": 1.5930890217357605, "learning_rate": 2.912357876693711e-07, "loss": 0.9139, "step": 8317 }, { "epoch": 0.89, "grad_norm": 1.7678403777636653, "learning_rate": 2.9065054012997305e-07, "loss": 0.8984, "step": 8318 }, { "epoch": 0.89, "grad_norm": 0.7690569541035667, "learning_rate": 2.900658636154324e-07, "loss": 0.999, "step": 8319 }, { "epoch": 0.89, "grad_norm": 1.7442246667689518, "learning_rate": 2.894817581966436e-07, "loss": 0.8686, "step": 8320 }, { "epoch": 0.89, "grad_norm": 1.784033595959471, "learning_rate": 2.8889822394443047e-07, "loss": 0.8895, "step": 8321 }, { "epoch": 0.89, "grad_norm": 1.7301961162666224, "learning_rate": 2.883152609295503e-07, "loss": 0.8947, "step": 8322 }, { "epoch": 0.89, "grad_norm": 1.8060890418602493, "learning_rate": 2.8773286922268885e-07, "loss": 0.9192, "step": 8323 }, { "epoch": 0.89, "grad_norm": 1.5734727175261571, "learning_rate": 2.871510488944612e-07, "loss": 0.9217, "step": 8324 }, { "epoch": 0.89, "grad_norm": 1.9352344545709945, "learning_rate": 2.8656980001541766e-07, "loss": 0.8876, "step": 8325 }, { "epoch": 0.9, "grad_norm": 0.7989544039765482, "learning_rate": 2.859891226560346e-07, "loss": 1.0072, "step": 8326 }, { "epoch": 0.9, "grad_norm": 1.7527492240774807, "learning_rate": 2.854090168867224e-07, "loss": 0.8461, "step": 8327 }, { "epoch": 0.9, "grad_norm": 1.8067250440673923, "learning_rate": 2.848294827778214e-07, "loss": 0.9115, "step": 8328 }, { "epoch": 0.9, "grad_norm": 1.7137661277835317, "learning_rate": 2.8425052039959986e-07, "loss": 0.8397, "step": 8329 }, { "epoch": 0.9, "grad_norm": 1.6971533338129419, "learning_rate": 2.836721298222611e-07, "loss": 0.8713, "step": 8330 }, { "epoch": 0.9, "grad_norm": 1.746670294137461, "learning_rate": 2.830943111159368e-07, "loss": 0.8809, "step": 8331 }, { "epoch": 0.9, "grad_norm": 1.7232542262427302, "learning_rate": 2.8251706435068806e-07, "loss": 0.8636, "step": 8332 }, { "epoch": 0.9, "grad_norm": 1.7228696503616736, "learning_rate": 2.8194038959650895e-07, "loss": 0.9181, "step": 8333 }, { "epoch": 0.9, "grad_norm": 0.7863185987799219, "learning_rate": 2.813642869233235e-07, "loss": 1.0583, "step": 8334 }, { "epoch": 0.9, "grad_norm": 1.7060508284130371, "learning_rate": 2.8078875640098646e-07, "loss": 0.9059, "step": 8335 }, { "epoch": 0.9, "grad_norm": 1.6559340539382477, "learning_rate": 2.8021379809928137e-07, "loss": 0.9174, "step": 8336 }, { "epoch": 0.9, "grad_norm": 1.7585103187938036, "learning_rate": 2.796394120879259e-07, "loss": 0.91, "step": 8337 }, { "epoch": 0.9, "grad_norm": 2.8612956859620318, "learning_rate": 2.790655984365642e-07, "loss": 0.8718, "step": 8338 }, { "epoch": 0.9, "grad_norm": 1.651371288112955, "learning_rate": 2.7849235721477407e-07, "loss": 0.9186, "step": 8339 }, { "epoch": 0.9, "grad_norm": 1.7312306160003528, "learning_rate": 2.779196884920643e-07, "loss": 0.7828, "step": 8340 }, { "epoch": 0.9, "grad_norm": 1.7241040755330146, "learning_rate": 2.7734759233787046e-07, "loss": 0.8543, "step": 8341 }, { "epoch": 0.9, "grad_norm": 1.6961230174611481, "learning_rate": 2.7677606882156314e-07, "loss": 0.898, "step": 8342 }, { "epoch": 0.9, "grad_norm": 1.7704046932855195, "learning_rate": 2.762051180124414e-07, "loss": 0.8781, "step": 8343 }, { "epoch": 0.9, "grad_norm": 1.7873476692264727, "learning_rate": 2.7563473997973434e-07, "loss": 0.8949, "step": 8344 }, { "epoch": 0.9, "grad_norm": 1.8984121809831151, "learning_rate": 2.750649347926021e-07, "loss": 0.8325, "step": 8345 }, { "epoch": 0.9, "grad_norm": 0.7712463258793013, "learning_rate": 2.7449570252013557e-07, "loss": 1.0244, "step": 8346 }, { "epoch": 0.9, "grad_norm": 1.7477946343504673, "learning_rate": 2.7392704323135676e-07, "loss": 0.9346, "step": 8347 }, { "epoch": 0.9, "grad_norm": 1.8349171715912473, "learning_rate": 2.733589569952172e-07, "loss": 0.8337, "step": 8348 }, { "epoch": 0.9, "grad_norm": 1.6531814841103745, "learning_rate": 2.7279144388059895e-07, "loss": 0.8791, "step": 8349 }, { "epoch": 0.9, "grad_norm": 1.6560968214091845, "learning_rate": 2.7222450395631593e-07, "loss": 0.8324, "step": 8350 }, { "epoch": 0.9, "grad_norm": 1.770585030882444, "learning_rate": 2.7165813729111035e-07, "loss": 0.9123, "step": 8351 }, { "epoch": 0.9, "grad_norm": 1.9754424032871574, "learning_rate": 2.7109234395365666e-07, "loss": 0.8646, "step": 8352 }, { "epoch": 0.9, "grad_norm": 1.7730158791832826, "learning_rate": 2.705271240125601e-07, "loss": 0.8872, "step": 8353 }, { "epoch": 0.9, "grad_norm": 1.7241171443052736, "learning_rate": 2.699624775363541e-07, "loss": 0.8521, "step": 8354 }, { "epoch": 0.9, "grad_norm": 0.8021656756133565, "learning_rate": 2.6939840459350497e-07, "loss": 1.0402, "step": 8355 }, { "epoch": 0.9, "grad_norm": 1.8295368356647344, "learning_rate": 2.688349052524081e-07, "loss": 0.8653, "step": 8356 }, { "epoch": 0.9, "grad_norm": 1.8299443167732117, "learning_rate": 2.6827197958138927e-07, "loss": 0.789, "step": 8357 }, { "epoch": 0.9, "grad_norm": 1.6279266773442387, "learning_rate": 2.6770962764870625e-07, "loss": 0.879, "step": 8358 }, { "epoch": 0.9, "grad_norm": 1.6396356738048945, "learning_rate": 2.6714784952254546e-07, "loss": 0.8332, "step": 8359 }, { "epoch": 0.9, "grad_norm": 1.8739378759025611, "learning_rate": 2.665866452710242e-07, "loss": 0.8668, "step": 8360 }, { "epoch": 0.9, "grad_norm": 1.7274792989861147, "learning_rate": 2.6602601496219073e-07, "loss": 0.8622, "step": 8361 }, { "epoch": 0.9, "grad_norm": 0.8215880602488403, "learning_rate": 2.65465958664024e-07, "loss": 1.0652, "step": 8362 }, { "epoch": 0.9, "grad_norm": 0.8149898291187118, "learning_rate": 2.6490647644443145e-07, "loss": 1.0473, "step": 8363 }, { "epoch": 0.9, "grad_norm": 1.6967209402491683, "learning_rate": 2.643475683712532e-07, "loss": 0.9706, "step": 8364 }, { "epoch": 0.9, "grad_norm": 1.8754854217414039, "learning_rate": 2.6378923451225893e-07, "loss": 0.795, "step": 8365 }, { "epoch": 0.9, "grad_norm": 1.7018712744058277, "learning_rate": 2.632314749351483e-07, "loss": 0.9416, "step": 8366 }, { "epoch": 0.9, "grad_norm": 1.8964919139136545, "learning_rate": 2.6267428970755124e-07, "loss": 0.9684, "step": 8367 }, { "epoch": 0.9, "grad_norm": 1.7738727748780305, "learning_rate": 2.6211767889702813e-07, "loss": 0.868, "step": 8368 }, { "epoch": 0.9, "grad_norm": 1.576380069339271, "learning_rate": 2.61561642571071e-07, "loss": 0.94, "step": 8369 }, { "epoch": 0.9, "grad_norm": 1.7461751968087937, "learning_rate": 2.610061807971004e-07, "loss": 0.8751, "step": 8370 }, { "epoch": 0.9, "grad_norm": 1.7166950632434579, "learning_rate": 2.6045129364246857e-07, "loss": 0.8049, "step": 8371 }, { "epoch": 0.9, "grad_norm": 0.7769283108857284, "learning_rate": 2.598969811744562e-07, "loss": 1.0655, "step": 8372 }, { "epoch": 0.9, "grad_norm": 1.7150001076598882, "learning_rate": 2.593432434602766e-07, "loss": 0.9434, "step": 8373 }, { "epoch": 0.9, "grad_norm": 1.672542349634942, "learning_rate": 2.5879008056707286e-07, "loss": 0.9167, "step": 8374 }, { "epoch": 0.9, "grad_norm": 1.7018201318770547, "learning_rate": 2.5823749256191686e-07, "loss": 0.824, "step": 8375 }, { "epoch": 0.9, "grad_norm": 1.7319519380302282, "learning_rate": 2.5768547951181276e-07, "loss": 0.9228, "step": 8376 }, { "epoch": 0.9, "grad_norm": 1.653157083413519, "learning_rate": 2.5713404148369316e-07, "loss": 0.8738, "step": 8377 }, { "epoch": 0.9, "grad_norm": 1.9419672351449841, "learning_rate": 2.5658317854442283e-07, "loss": 0.9794, "step": 8378 }, { "epoch": 0.9, "grad_norm": 1.7488285223565594, "learning_rate": 2.56032890760794e-07, "loss": 0.8887, "step": 8379 }, { "epoch": 0.9, "grad_norm": 1.7065456769529799, "learning_rate": 2.5548317819953205e-07, "loss": 0.9228, "step": 8380 }, { "epoch": 0.9, "grad_norm": 1.7470408461025857, "learning_rate": 2.5493404092729267e-07, "loss": 0.9296, "step": 8381 }, { "epoch": 0.9, "grad_norm": 1.8351294771186066, "learning_rate": 2.543854790106587e-07, "loss": 0.8794, "step": 8382 }, { "epoch": 0.9, "grad_norm": 1.719370269714994, "learning_rate": 2.538374925161463e-07, "loss": 0.8651, "step": 8383 }, { "epoch": 0.9, "grad_norm": 0.7963835562171078, "learning_rate": 2.5329008151020076e-07, "loss": 1.06, "step": 8384 }, { "epoch": 0.9, "grad_norm": 1.8770621158887415, "learning_rate": 2.5274324605919664e-07, "loss": 0.9537, "step": 8385 }, { "epoch": 0.9, "grad_norm": 1.9152526542136643, "learning_rate": 2.5219698622944044e-07, "loss": 0.9409, "step": 8386 }, { "epoch": 0.9, "grad_norm": 1.7078238432815724, "learning_rate": 2.5165130208716913e-07, "loss": 0.9924, "step": 8387 }, { "epoch": 0.9, "grad_norm": 1.6103081463452098, "learning_rate": 2.5110619369854594e-07, "loss": 0.8259, "step": 8388 }, { "epoch": 0.9, "grad_norm": 1.918953951719248, "learning_rate": 2.505616611296685e-07, "loss": 0.8913, "step": 8389 }, { "epoch": 0.9, "grad_norm": 1.7893580895071342, "learning_rate": 2.500177044465646e-07, "loss": 0.8251, "step": 8390 }, { "epoch": 0.9, "grad_norm": 1.6967829828997167, "learning_rate": 2.494743237151892e-07, "loss": 0.822, "step": 8391 }, { "epoch": 0.9, "grad_norm": 1.9384750155627701, "learning_rate": 2.489315190014291e-07, "loss": 0.8577, "step": 8392 }, { "epoch": 0.9, "grad_norm": 1.6954704728568617, "learning_rate": 2.483892903711027e-07, "loss": 0.8249, "step": 8393 }, { "epoch": 0.9, "grad_norm": 1.795808404921678, "learning_rate": 2.4784763788995527e-07, "loss": 0.9206, "step": 8394 }, { "epoch": 0.9, "grad_norm": 1.7046670857868997, "learning_rate": 2.473065616236647e-07, "loss": 0.846, "step": 8395 }, { "epoch": 0.9, "grad_norm": 1.720535573088482, "learning_rate": 2.467660616378398e-07, "loss": 0.9594, "step": 8396 }, { "epoch": 0.9, "grad_norm": 1.7085437143882398, "learning_rate": 2.462261379980163e-07, "loss": 0.925, "step": 8397 }, { "epoch": 0.9, "grad_norm": 1.7935701679262799, "learning_rate": 2.4568679076966197e-07, "loss": 0.8651, "step": 8398 }, { "epoch": 0.9, "grad_norm": 1.719374768746955, "learning_rate": 2.451480200181744e-07, "loss": 0.8945, "step": 8399 }, { "epoch": 0.9, "grad_norm": 1.7209782763431611, "learning_rate": 2.44609825808883e-07, "loss": 0.891, "step": 8400 }, { "epoch": 0.9, "grad_norm": 1.7408933591433935, "learning_rate": 2.44072208207044e-07, "loss": 0.9241, "step": 8401 }, { "epoch": 0.9, "grad_norm": 1.7867322790798434, "learning_rate": 2.435351672778463e-07, "loss": 0.8921, "step": 8402 }, { "epoch": 0.9, "grad_norm": 1.7211992872557857, "learning_rate": 2.4299870308640726e-07, "loss": 0.8551, "step": 8403 }, { "epoch": 0.9, "grad_norm": 1.6660130091027334, "learning_rate": 2.4246281569777485e-07, "loss": 0.8792, "step": 8404 }, { "epoch": 0.9, "grad_norm": 1.7707080399922959, "learning_rate": 2.4192750517692877e-07, "loss": 0.8595, "step": 8405 }, { "epoch": 0.9, "grad_norm": 1.8222771568290337, "learning_rate": 2.4139277158877537e-07, "loss": 0.8393, "step": 8406 }, { "epoch": 0.9, "grad_norm": 1.741653652942318, "learning_rate": 2.40858614998154e-07, "loss": 0.8889, "step": 8407 }, { "epoch": 0.9, "grad_norm": 1.7221904548262505, "learning_rate": 2.403250354698333e-07, "loss": 0.8019, "step": 8408 }, { "epoch": 0.9, "grad_norm": 1.693263474033574, "learning_rate": 2.3979203306851165e-07, "loss": 0.796, "step": 8409 }, { "epoch": 0.9, "grad_norm": 1.7828284411830166, "learning_rate": 2.3925960785881554e-07, "loss": 0.934, "step": 8410 }, { "epoch": 0.9, "grad_norm": 1.6880230606022133, "learning_rate": 2.3872775990530505e-07, "loss": 0.8203, "step": 8411 }, { "epoch": 0.9, "grad_norm": 1.8002601522633312, "learning_rate": 2.381964892724692e-07, "loss": 0.8468, "step": 8412 }, { "epoch": 0.9, "grad_norm": 1.7974549445599064, "learning_rate": 2.3766579602472474e-07, "loss": 0.9479, "step": 8413 }, { "epoch": 0.9, "grad_norm": 1.752061782235739, "learning_rate": 2.3713568022642018e-07, "loss": 0.8755, "step": 8414 }, { "epoch": 0.9, "grad_norm": 1.704083552680782, "learning_rate": 2.3660614194183584e-07, "loss": 0.9732, "step": 8415 }, { "epoch": 0.9, "grad_norm": 1.7198376069406256, "learning_rate": 2.3607718123517753e-07, "loss": 0.9093, "step": 8416 }, { "epoch": 0.9, "grad_norm": 1.6589696253128774, "learning_rate": 2.3554879817058507e-07, "loss": 0.8737, "step": 8417 }, { "epoch": 0.9, "grad_norm": 1.7727194899693068, "learning_rate": 2.3502099281212775e-07, "loss": 0.8379, "step": 8418 }, { "epoch": 0.91, "grad_norm": 1.6256817978482674, "learning_rate": 2.3449376522380106e-07, "loss": 0.8583, "step": 8419 }, { "epoch": 0.91, "grad_norm": 1.7510024964021975, "learning_rate": 2.3396711546953444e-07, "loss": 0.8703, "step": 8420 }, { "epoch": 0.91, "grad_norm": 1.8269968374734757, "learning_rate": 2.3344104361318677e-07, "loss": 0.8256, "step": 8421 }, { "epoch": 0.91, "grad_norm": 0.8023354534019362, "learning_rate": 2.329155497185448e-07, "loss": 1.0808, "step": 8422 }, { "epoch": 0.91, "grad_norm": 0.7643709267344734, "learning_rate": 2.3239063384932703e-07, "loss": 1.0539, "step": 8423 }, { "epoch": 0.91, "grad_norm": 1.721198422373936, "learning_rate": 2.3186629606918198e-07, "loss": 0.8703, "step": 8424 }, { "epoch": 0.91, "grad_norm": 1.7807718538188582, "learning_rate": 2.3134253644168604e-07, "loss": 0.9133, "step": 8425 }, { "epoch": 0.91, "grad_norm": 1.7074900577158572, "learning_rate": 2.308193550303478e-07, "loss": 0.9227, "step": 8426 }, { "epoch": 0.91, "grad_norm": 1.7204427297591909, "learning_rate": 2.3029675189860546e-07, "loss": 0.9105, "step": 8427 }, { "epoch": 0.91, "grad_norm": 1.6632258403211164, "learning_rate": 2.297747271098244e-07, "loss": 0.9132, "step": 8428 }, { "epoch": 0.91, "grad_norm": 1.7332858661318864, "learning_rate": 2.2925328072730402e-07, "loss": 0.8888, "step": 8429 }, { "epoch": 0.91, "grad_norm": 1.7260645347112678, "learning_rate": 2.287324128142704e-07, "loss": 0.9496, "step": 8430 }, { "epoch": 0.91, "grad_norm": 1.6612688491627783, "learning_rate": 2.2821212343388077e-07, "loss": 0.9502, "step": 8431 }, { "epoch": 0.91, "grad_norm": 1.678469187492468, "learning_rate": 2.2769241264922194e-07, "loss": 0.8257, "step": 8432 }, { "epoch": 0.91, "grad_norm": 1.7129464758033428, "learning_rate": 2.2717328052331122e-07, "loss": 0.8725, "step": 8433 }, { "epoch": 0.91, "grad_norm": 1.7036683279732037, "learning_rate": 2.2665472711909387e-07, "loss": 0.9064, "step": 8434 }, { "epoch": 0.91, "grad_norm": 1.7184377785281197, "learning_rate": 2.2613675249944677e-07, "loss": 0.8989, "step": 8435 }, { "epoch": 0.91, "grad_norm": 1.8476081155428064, "learning_rate": 2.256193567271775e-07, "loss": 0.9201, "step": 8436 }, { "epoch": 0.91, "grad_norm": 1.6252869184458472, "learning_rate": 2.251025398650203e-07, "loss": 0.943, "step": 8437 }, { "epoch": 0.91, "grad_norm": 1.832413032139026, "learning_rate": 2.2458630197564225e-07, "loss": 0.9019, "step": 8438 }, { "epoch": 0.91, "grad_norm": 1.6138752285384295, "learning_rate": 2.240706431216383e-07, "loss": 0.8514, "step": 8439 }, { "epoch": 0.91, "grad_norm": 1.7054899909506267, "learning_rate": 2.2355556336553452e-07, "loss": 0.8949, "step": 8440 }, { "epoch": 0.91, "grad_norm": 1.6680582260049517, "learning_rate": 2.2304106276978432e-07, "loss": 0.8939, "step": 8441 }, { "epoch": 0.91, "grad_norm": 1.7222854413360502, "learning_rate": 2.2252714139677445e-07, "loss": 0.9155, "step": 8442 }, { "epoch": 0.91, "grad_norm": 1.7430794003572796, "learning_rate": 2.2201379930882006e-07, "loss": 0.871, "step": 8443 }, { "epoch": 0.91, "grad_norm": 1.7575622928383645, "learning_rate": 2.2150103656816357e-07, "loss": 0.9337, "step": 8444 }, { "epoch": 0.91, "grad_norm": 1.7176389137049595, "learning_rate": 2.2098885323698027e-07, "loss": 0.8712, "step": 8445 }, { "epoch": 0.91, "grad_norm": 1.910411707725528, "learning_rate": 2.2047724937737547e-07, "loss": 0.8774, "step": 8446 }, { "epoch": 0.91, "grad_norm": 1.695132597174356, "learning_rate": 2.1996622505138064e-07, "loss": 0.8792, "step": 8447 }, { "epoch": 0.91, "grad_norm": 1.8518833280298195, "learning_rate": 2.1945578032096016e-07, "loss": 0.9209, "step": 8448 }, { "epoch": 0.91, "grad_norm": 1.8160747466788103, "learning_rate": 2.1894591524800835e-07, "loss": 0.9423, "step": 8449 }, { "epoch": 0.91, "grad_norm": 1.7455418439040133, "learning_rate": 2.1843662989434687e-07, "loss": 0.8595, "step": 8450 }, { "epoch": 0.91, "grad_norm": 1.6669567059849877, "learning_rate": 2.179279243217286e-07, "loss": 0.912, "step": 8451 }, { "epoch": 0.91, "grad_norm": 1.6944599810927288, "learning_rate": 2.1741979859183582e-07, "loss": 0.9169, "step": 8452 }, { "epoch": 0.91, "grad_norm": 1.6891837424699985, "learning_rate": 2.169122527662798e-07, "loss": 0.9134, "step": 8453 }, { "epoch": 0.91, "grad_norm": 1.7923096002992829, "learning_rate": 2.1640528690660302e-07, "loss": 0.9626, "step": 8454 }, { "epoch": 0.91, "grad_norm": 1.703668965755326, "learning_rate": 2.158989010742779e-07, "loss": 0.8634, "step": 8455 }, { "epoch": 0.91, "grad_norm": 1.793890274039902, "learning_rate": 2.1539309533070318e-07, "loss": 0.9155, "step": 8456 }, { "epoch": 0.91, "grad_norm": 1.6939145646838638, "learning_rate": 2.1488786973721088e-07, "loss": 0.8834, "step": 8457 }, { "epoch": 0.91, "grad_norm": 1.7453927219303724, "learning_rate": 2.1438322435506198e-07, "loss": 0.9499, "step": 8458 }, { "epoch": 0.91, "grad_norm": 1.9635421569272278, "learning_rate": 2.138791592454448e-07, "loss": 0.9039, "step": 8459 }, { "epoch": 0.91, "grad_norm": 1.7649214325215867, "learning_rate": 2.1337567446947982e-07, "loss": 0.8305, "step": 8460 }, { "epoch": 0.91, "grad_norm": 1.8188915626847422, "learning_rate": 2.1287277008821662e-07, "loss": 0.9119, "step": 8461 }, { "epoch": 0.91, "grad_norm": 1.747092580848208, "learning_rate": 2.1237044616263413e-07, "loss": 0.8697, "step": 8462 }, { "epoch": 0.91, "grad_norm": 1.675739433841438, "learning_rate": 2.118687027536398e-07, "loss": 0.8252, "step": 8463 }, { "epoch": 0.91, "grad_norm": 1.8392976662828129, "learning_rate": 2.113675399220727e-07, "loss": 0.928, "step": 8464 }, { "epoch": 0.91, "grad_norm": 1.7327069080525423, "learning_rate": 2.1086695772870092e-07, "loss": 0.8857, "step": 8465 }, { "epoch": 0.91, "grad_norm": 0.8065015711432533, "learning_rate": 2.1036695623422033e-07, "loss": 1.0524, "step": 8466 }, { "epoch": 0.91, "grad_norm": 1.6886803154044214, "learning_rate": 2.0986753549925963e-07, "loss": 0.8886, "step": 8467 }, { "epoch": 0.91, "grad_norm": 1.7236494289751492, "learning_rate": 2.0936869558437322e-07, "loss": 0.8714, "step": 8468 }, { "epoch": 0.91, "grad_norm": 1.8117039882009591, "learning_rate": 2.0887043655004825e-07, "loss": 0.9404, "step": 8469 }, { "epoch": 0.91, "grad_norm": 1.6567330758464889, "learning_rate": 2.0837275845670135e-07, "loss": 0.8387, "step": 8470 }, { "epoch": 0.91, "grad_norm": 1.713941943336574, "learning_rate": 2.0787566136467708e-07, "loss": 0.8128, "step": 8471 }, { "epoch": 0.91, "grad_norm": 0.7699394482941314, "learning_rate": 2.0737914533424885e-07, "loss": 1.0469, "step": 8472 }, { "epoch": 0.91, "grad_norm": 1.7326594808579814, "learning_rate": 2.0688321042562186e-07, "loss": 0.844, "step": 8473 }, { "epoch": 0.91, "grad_norm": 1.7034135984118404, "learning_rate": 2.0638785669893026e-07, "loss": 0.8652, "step": 8474 }, { "epoch": 0.91, "grad_norm": 1.6692342093247456, "learning_rate": 2.0589308421423704e-07, "loss": 0.8674, "step": 8475 }, { "epoch": 0.91, "grad_norm": 1.710778118295827, "learning_rate": 2.053988930315348e-07, "loss": 0.8904, "step": 8476 }, { "epoch": 0.91, "grad_norm": 0.7821696875223789, "learning_rate": 2.0490528321074732e-07, "loss": 1.0428, "step": 8477 }, { "epoch": 0.91, "grad_norm": 1.6976011601590875, "learning_rate": 2.0441225481172444e-07, "loss": 0.9578, "step": 8478 }, { "epoch": 0.91, "grad_norm": 0.7993345218138387, "learning_rate": 2.0391980789424892e-07, "loss": 1.0652, "step": 8479 }, { "epoch": 0.91, "grad_norm": 1.682202090907931, "learning_rate": 2.034279425180319e-07, "loss": 0.9084, "step": 8480 }, { "epoch": 0.91, "grad_norm": 1.753365719757043, "learning_rate": 2.0293665874271228e-07, "loss": 0.8736, "step": 8481 }, { "epoch": 0.91, "grad_norm": 1.7986869587500376, "learning_rate": 2.0244595662786138e-07, "loss": 0.8739, "step": 8482 }, { "epoch": 0.91, "grad_norm": 1.6918596011043319, "learning_rate": 2.019558362329782e-07, "loss": 0.8443, "step": 8483 }, { "epoch": 0.91, "grad_norm": 1.756852160939759, "learning_rate": 2.014662976174908e-07, "loss": 0.865, "step": 8484 }, { "epoch": 0.91, "grad_norm": 1.7422059397927088, "learning_rate": 2.0097734084075726e-07, "loss": 0.8756, "step": 8485 }, { "epoch": 0.91, "grad_norm": 1.7333984969005671, "learning_rate": 2.004889659620668e-07, "loss": 0.8575, "step": 8486 }, { "epoch": 0.91, "grad_norm": 1.8960209357448137, "learning_rate": 2.0000117304063483e-07, "loss": 0.8923, "step": 8487 }, { "epoch": 0.91, "grad_norm": 1.7626632707194032, "learning_rate": 1.9951396213560904e-07, "loss": 0.9281, "step": 8488 }, { "epoch": 0.91, "grad_norm": 1.7132937473455472, "learning_rate": 1.9902733330606605e-07, "loss": 0.8907, "step": 8489 }, { "epoch": 0.91, "grad_norm": 1.7014448067685382, "learning_rate": 1.9854128661100924e-07, "loss": 0.7954, "step": 8490 }, { "epoch": 0.91, "grad_norm": 1.7639021809221131, "learning_rate": 1.980558221093748e-07, "loss": 0.9172, "step": 8491 }, { "epoch": 0.91, "grad_norm": 1.806218603040422, "learning_rate": 1.9757093986002729e-07, "loss": 0.8525, "step": 8492 }, { "epoch": 0.91, "grad_norm": 1.6727564552144012, "learning_rate": 1.9708663992175969e-07, "loss": 0.881, "step": 8493 }, { "epoch": 0.91, "grad_norm": 1.6952936396284455, "learning_rate": 1.966029223532945e-07, "loss": 0.8536, "step": 8494 }, { "epoch": 0.91, "grad_norm": 1.6982785184908866, "learning_rate": 1.9611978721328473e-07, "loss": 0.8891, "step": 8495 }, { "epoch": 0.91, "grad_norm": 1.7711968866501198, "learning_rate": 1.9563723456031303e-07, "loss": 0.8332, "step": 8496 }, { "epoch": 0.91, "grad_norm": 1.6929181513787197, "learning_rate": 1.9515526445288923e-07, "loss": 0.9622, "step": 8497 }, { "epoch": 0.91, "grad_norm": 2.1043061295119343, "learning_rate": 1.946738769494555e-07, "loss": 0.8803, "step": 8498 }, { "epoch": 0.91, "grad_norm": 1.6806616032077493, "learning_rate": 1.9419307210837955e-07, "loss": 0.8256, "step": 8499 }, { "epoch": 0.91, "grad_norm": 1.730238576405702, "learning_rate": 1.9371284998796146e-07, "loss": 0.8754, "step": 8500 }, { "epoch": 0.91, "grad_norm": 1.792395664226214, "learning_rate": 1.9323321064643131e-07, "loss": 0.7976, "step": 8501 }, { "epoch": 0.91, "grad_norm": 0.7664670932858075, "learning_rate": 1.9275415414194476e-07, "loss": 1.0223, "step": 8502 }, { "epoch": 0.91, "grad_norm": 0.7972478238874077, "learning_rate": 1.9227568053259094e-07, "loss": 1.0165, "step": 8503 }, { "epoch": 0.91, "grad_norm": 1.7192192013580099, "learning_rate": 1.9179778987638508e-07, "loss": 0.8467, "step": 8504 }, { "epoch": 0.91, "grad_norm": 1.7802727537977003, "learning_rate": 1.913204822312742e-07, "loss": 0.902, "step": 8505 }, { "epoch": 0.91, "grad_norm": 1.7024995147397617, "learning_rate": 1.9084375765513197e-07, "loss": 0.9046, "step": 8506 }, { "epoch": 0.91, "grad_norm": 1.7209341527423896, "learning_rate": 1.9036761620576438e-07, "loss": 0.8791, "step": 8507 }, { "epoch": 0.91, "grad_norm": 1.8096865620255889, "learning_rate": 1.898920579409047e-07, "loss": 0.8976, "step": 8508 }, { "epoch": 0.91, "grad_norm": 1.7725630223497932, "learning_rate": 1.894170829182157e-07, "loss": 0.8603, "step": 8509 }, { "epoch": 0.91, "grad_norm": 1.6244322786712522, "learning_rate": 1.8894269119528963e-07, "loss": 0.9762, "step": 8510 }, { "epoch": 0.91, "grad_norm": 1.6681300451454673, "learning_rate": 1.8846888282964936e-07, "loss": 0.9358, "step": 8511 }, { "epoch": 0.92, "grad_norm": 1.6045761553831774, "learning_rate": 1.8799565787874397e-07, "loss": 0.8531, "step": 8512 }, { "epoch": 0.92, "grad_norm": 0.7400974822927735, "learning_rate": 1.8752301639995528e-07, "loss": 1.0396, "step": 8513 }, { "epoch": 0.92, "grad_norm": 1.728215221601243, "learning_rate": 1.8705095845059252e-07, "loss": 0.8429, "step": 8514 }, { "epoch": 0.92, "grad_norm": 1.7616294074481065, "learning_rate": 1.8657948408789261e-07, "loss": 0.9534, "step": 8515 }, { "epoch": 0.92, "grad_norm": 1.7567945222315553, "learning_rate": 1.8610859336902488e-07, "loss": 0.9328, "step": 8516 }, { "epoch": 0.92, "grad_norm": 1.753965621669419, "learning_rate": 1.8563828635108694e-07, "loss": 0.8379, "step": 8517 }, { "epoch": 0.92, "grad_norm": 0.7738314017867929, "learning_rate": 1.8516856309110375e-07, "loss": 1.0479, "step": 8518 }, { "epoch": 0.92, "grad_norm": 1.7415406504982403, "learning_rate": 1.8469942364603145e-07, "loss": 0.9295, "step": 8519 }, { "epoch": 0.92, "grad_norm": 1.675114518750102, "learning_rate": 1.8423086807275558e-07, "loss": 0.9131, "step": 8520 }, { "epoch": 0.92, "grad_norm": 1.7979009052966526, "learning_rate": 1.8376289642808854e-07, "loss": 0.8788, "step": 8521 }, { "epoch": 0.92, "grad_norm": 1.6164700638480836, "learning_rate": 1.8329550876877488e-07, "loss": 0.8423, "step": 8522 }, { "epoch": 0.92, "grad_norm": 1.88115533334524, "learning_rate": 1.8282870515148654e-07, "loss": 0.8835, "step": 8523 }, { "epoch": 0.92, "grad_norm": 0.7723357516652573, "learning_rate": 1.8236248563282545e-07, "loss": 1.0316, "step": 8524 }, { "epoch": 0.92, "grad_norm": 1.7045642686426519, "learning_rate": 1.8189685026932137e-07, "loss": 0.9114, "step": 8525 }, { "epoch": 0.92, "grad_norm": 1.7512391285362112, "learning_rate": 1.8143179911743415e-07, "loss": 0.9187, "step": 8526 }, { "epoch": 0.92, "grad_norm": 0.7594527495060834, "learning_rate": 1.8096733223355477e-07, "loss": 1.0647, "step": 8527 }, { "epoch": 0.92, "grad_norm": 1.6821180999355072, "learning_rate": 1.805034496739988e-07, "loss": 0.9178, "step": 8528 }, { "epoch": 0.92, "grad_norm": 1.6517212399609746, "learning_rate": 1.8004015149501564e-07, "loss": 0.8735, "step": 8529 }, { "epoch": 0.92, "grad_norm": 0.7777244297333116, "learning_rate": 1.795774377527809e-07, "loss": 1.072, "step": 8530 }, { "epoch": 0.92, "grad_norm": 0.7602072672588146, "learning_rate": 1.7911530850339976e-07, "loss": 1.042, "step": 8531 }, { "epoch": 0.92, "grad_norm": 1.8446555072881352, "learning_rate": 1.7865376380290845e-07, "loss": 0.8834, "step": 8532 }, { "epoch": 0.92, "grad_norm": 0.7885163155853431, "learning_rate": 1.7819280370726943e-07, "loss": 1.028, "step": 8533 }, { "epoch": 0.92, "grad_norm": 0.7696796241308165, "learning_rate": 1.7773242827237635e-07, "loss": 1.0395, "step": 8534 }, { "epoch": 0.92, "grad_norm": 1.665116675871728, "learning_rate": 1.7727263755405178e-07, "loss": 0.9253, "step": 8535 }, { "epoch": 0.92, "grad_norm": 2.018858750091648, "learning_rate": 1.7681343160804608e-07, "loss": 0.8293, "step": 8536 }, { "epoch": 0.92, "grad_norm": 0.812482035625714, "learning_rate": 1.7635481049003979e-07, "loss": 1.0238, "step": 8537 }, { "epoch": 0.92, "grad_norm": 1.6641465448501882, "learning_rate": 1.7589677425564222e-07, "loss": 0.9254, "step": 8538 }, { "epoch": 0.92, "grad_norm": 1.7543591172854383, "learning_rate": 1.7543932296039234e-07, "loss": 0.9038, "step": 8539 }, { "epoch": 0.92, "grad_norm": 1.8754535017864549, "learning_rate": 1.7498245665975689e-07, "loss": 0.8137, "step": 8540 }, { "epoch": 0.92, "grad_norm": 1.6411972501537255, "learning_rate": 1.7452617540913265e-07, "loss": 0.8819, "step": 8541 }, { "epoch": 0.92, "grad_norm": 1.7660797706993099, "learning_rate": 1.7407047926384646e-07, "loss": 0.9021, "step": 8542 }, { "epoch": 0.92, "grad_norm": 1.7731319376752432, "learning_rate": 1.7361536827915138e-07, "loss": 0.8874, "step": 8543 }, { "epoch": 0.92, "grad_norm": 1.6959005306150499, "learning_rate": 1.7316084251023214e-07, "loss": 0.9626, "step": 8544 }, { "epoch": 0.92, "grad_norm": 1.6749510753422585, "learning_rate": 1.727069020122024e-07, "loss": 0.9228, "step": 8545 }, { "epoch": 0.92, "grad_norm": 1.7582386757157733, "learning_rate": 1.722535468401021e-07, "loss": 0.8895, "step": 8546 }, { "epoch": 0.92, "grad_norm": 1.6423586996199162, "learning_rate": 1.7180077704890274e-07, "loss": 0.911, "step": 8547 }, { "epoch": 0.92, "grad_norm": 1.8098696594569355, "learning_rate": 1.7134859269350546e-07, "loss": 0.9774, "step": 8548 }, { "epoch": 0.92, "grad_norm": 1.643070687851437, "learning_rate": 1.7089699382873748e-07, "loss": 0.8728, "step": 8549 }, { "epoch": 0.92, "grad_norm": 1.7424715862674363, "learning_rate": 1.7044598050935722e-07, "loss": 0.8723, "step": 8550 }, { "epoch": 0.92, "grad_norm": 1.7171669083927323, "learning_rate": 1.6999555279005264e-07, "loss": 0.8998, "step": 8551 }, { "epoch": 0.92, "grad_norm": 1.7666861252827353, "learning_rate": 1.695457107254378e-07, "loss": 0.8664, "step": 8552 }, { "epoch": 0.92, "grad_norm": 1.803948347648774, "learning_rate": 1.6909645437005906e-07, "loss": 0.866, "step": 8553 }, { "epoch": 0.92, "grad_norm": 1.7417987067852403, "learning_rate": 1.6864778377839064e-07, "loss": 0.8685, "step": 8554 }, { "epoch": 0.92, "grad_norm": 1.7885228084323712, "learning_rate": 1.6819969900483347e-07, "loss": 0.9358, "step": 8555 }, { "epoch": 0.92, "grad_norm": 1.748405721495499, "learning_rate": 1.6775220010372185e-07, "loss": 0.8232, "step": 8556 }, { "epoch": 0.92, "grad_norm": 1.6992602968009074, "learning_rate": 1.6730528712931404e-07, "loss": 0.9112, "step": 8557 }, { "epoch": 0.92, "grad_norm": 1.722812051708577, "learning_rate": 1.6685896013580226e-07, "loss": 0.8543, "step": 8558 }, { "epoch": 0.92, "grad_norm": 1.7436185208111048, "learning_rate": 1.6641321917730268e-07, "loss": 0.8767, "step": 8559 }, { "epoch": 0.92, "grad_norm": 1.6559671466711385, "learning_rate": 1.6596806430786428e-07, "loss": 0.8702, "step": 8560 }, { "epoch": 0.92, "grad_norm": 1.8175757998681108, "learning_rate": 1.6552349558146442e-07, "loss": 0.8718, "step": 8561 }, { "epoch": 0.92, "grad_norm": 1.9094451019551455, "learning_rate": 1.650795130520072e-07, "loss": 0.834, "step": 8562 }, { "epoch": 0.92, "grad_norm": 1.6691151087680551, "learning_rate": 1.6463611677332792e-07, "loss": 0.8769, "step": 8563 }, { "epoch": 0.92, "grad_norm": 1.8929159848740218, "learning_rate": 1.6419330679918855e-07, "loss": 0.9367, "step": 8564 }, { "epoch": 0.92, "grad_norm": 1.7709278948730423, "learning_rate": 1.6375108318328282e-07, "loss": 0.9334, "step": 8565 }, { "epoch": 0.92, "grad_norm": 1.9020729073538922, "learning_rate": 1.633094459792317e-07, "loss": 0.9192, "step": 8566 }, { "epoch": 0.92, "grad_norm": 1.7258142148819962, "learning_rate": 1.6286839524058462e-07, "loss": 0.9369, "step": 8567 }, { "epoch": 0.92, "grad_norm": 1.6046401197121125, "learning_rate": 1.6242793102082043e-07, "loss": 0.8619, "step": 8568 }, { "epoch": 0.92, "grad_norm": 1.7651653168311088, "learning_rate": 1.6198805337334755e-07, "loss": 0.9917, "step": 8569 }, { "epoch": 0.92, "grad_norm": 1.606156060285631, "learning_rate": 1.6154876235150273e-07, "loss": 0.9296, "step": 8570 }, { "epoch": 0.92, "grad_norm": 0.7712999735482209, "learning_rate": 1.6111005800855063e-07, "loss": 1.009, "step": 8571 }, { "epoch": 0.92, "grad_norm": 1.714640988178553, "learning_rate": 1.6067194039768585e-07, "loss": 0.8256, "step": 8572 }, { "epoch": 0.92, "grad_norm": 1.8085848849985176, "learning_rate": 1.6023440957203262e-07, "loss": 0.9058, "step": 8573 }, { "epoch": 0.92, "grad_norm": 1.741224452877189, "learning_rate": 1.5979746558464237e-07, "loss": 0.7891, "step": 8574 }, { "epoch": 0.92, "grad_norm": 1.910881743105309, "learning_rate": 1.593611084884955e-07, "loss": 0.9369, "step": 8575 }, { "epoch": 0.92, "grad_norm": 1.6899516118649593, "learning_rate": 1.589253383365036e-07, "loss": 0.822, "step": 8576 }, { "epoch": 0.92, "grad_norm": 1.742788046430878, "learning_rate": 1.584901551815038e-07, "loss": 0.8711, "step": 8577 }, { "epoch": 0.92, "grad_norm": 1.6805741548736162, "learning_rate": 1.5805555907626336e-07, "loss": 0.8182, "step": 8578 }, { "epoch": 0.92, "grad_norm": 1.6785250935364462, "learning_rate": 1.5762155007347957e-07, "loss": 0.881, "step": 8579 }, { "epoch": 0.92, "grad_norm": 1.7208351744492294, "learning_rate": 1.57188128225777e-07, "loss": 0.8583, "step": 8580 }, { "epoch": 0.92, "grad_norm": 1.885055618111959, "learning_rate": 1.5675529358570918e-07, "loss": 0.9109, "step": 8581 }, { "epoch": 0.92, "grad_norm": 1.6745257921811123, "learning_rate": 1.5632304620575965e-07, "loss": 0.8848, "step": 8582 }, { "epoch": 0.92, "grad_norm": 1.5965811298179093, "learning_rate": 1.5589138613833933e-07, "loss": 0.8847, "step": 8583 }, { "epoch": 0.92, "grad_norm": 1.6985480216095172, "learning_rate": 1.5546031343578793e-07, "loss": 0.7977, "step": 8584 }, { "epoch": 0.92, "grad_norm": 1.7027249039381507, "learning_rate": 1.5502982815037648e-07, "loss": 0.9643, "step": 8585 }, { "epoch": 0.92, "grad_norm": 1.7083201196376805, "learning_rate": 1.5459993033430043e-07, "loss": 0.8199, "step": 8586 }, { "epoch": 0.92, "grad_norm": 1.6519003964403711, "learning_rate": 1.5417062003968808e-07, "loss": 0.9287, "step": 8587 }, { "epoch": 0.92, "grad_norm": 1.6143944983311755, "learning_rate": 1.537418973185939e-07, "loss": 0.7871, "step": 8588 }, { "epoch": 0.92, "grad_norm": 2.1449883088908543, "learning_rate": 1.5331376222300297e-07, "loss": 0.9641, "step": 8589 }, { "epoch": 0.92, "grad_norm": 1.7421187784891359, "learning_rate": 1.5288621480482658e-07, "loss": 0.9642, "step": 8590 }, { "epoch": 0.92, "grad_norm": 1.8935650249451195, "learning_rate": 1.524592551159071e-07, "loss": 0.9316, "step": 8591 }, { "epoch": 0.92, "grad_norm": 1.8363662934056069, "learning_rate": 1.520328832080159e-07, "loss": 0.8487, "step": 8592 }, { "epoch": 0.92, "grad_norm": 1.7115228840756376, "learning_rate": 1.5160709913285e-07, "loss": 0.8388, "step": 8593 }, { "epoch": 0.92, "grad_norm": 1.7042478061518613, "learning_rate": 1.5118190294203915e-07, "loss": 0.8898, "step": 8594 }, { "epoch": 0.92, "grad_norm": 1.6773195577168234, "learning_rate": 1.5075729468713829e-07, "loss": 0.9223, "step": 8595 }, { "epoch": 0.92, "grad_norm": 1.7626343854936184, "learning_rate": 1.5033327441963398e-07, "loss": 0.892, "step": 8596 }, { "epoch": 0.92, "grad_norm": 1.647664347513808, "learning_rate": 1.4990984219093897e-07, "loss": 0.882, "step": 8597 }, { "epoch": 0.92, "grad_norm": 1.7576961355777236, "learning_rate": 1.4948699805239718e-07, "loss": 0.8661, "step": 8598 }, { "epoch": 0.92, "grad_norm": 1.843885358596449, "learning_rate": 1.490647420552782e-07, "loss": 0.9258, "step": 8599 }, { "epoch": 0.92, "grad_norm": 0.7791036932392247, "learning_rate": 1.486430742507833e-07, "loss": 1.038, "step": 8600 }, { "epoch": 0.92, "grad_norm": 1.611416654882545, "learning_rate": 1.4822199469004094e-07, "loss": 0.8511, "step": 8601 }, { "epoch": 0.92, "grad_norm": 1.7663359506786613, "learning_rate": 1.4780150342410814e-07, "loss": 0.8642, "step": 8602 }, { "epoch": 0.92, "grad_norm": 2.248257448631594, "learning_rate": 1.473816005039713e-07, "loss": 0.8756, "step": 8603 }, { "epoch": 0.92, "grad_norm": 1.664191794382842, "learning_rate": 1.4696228598054473e-07, "loss": 0.926, "step": 8604 }, { "epoch": 0.93, "grad_norm": 0.781773582720752, "learning_rate": 1.4654355990467217e-07, "loss": 1.0483, "step": 8605 }, { "epoch": 0.93, "grad_norm": 1.687709807486767, "learning_rate": 1.4612542232712522e-07, "loss": 0.9152, "step": 8606 }, { "epoch": 0.93, "grad_norm": 1.713970020227116, "learning_rate": 1.4570787329860504e-07, "loss": 0.8974, "step": 8607 }, { "epoch": 0.93, "grad_norm": 1.8031453211671327, "learning_rate": 1.4529091286973994e-07, "loss": 0.9181, "step": 8608 }, { "epoch": 0.93, "grad_norm": 1.7843435250796245, "learning_rate": 1.44874541091089e-07, "loss": 0.9556, "step": 8609 }, { "epoch": 0.93, "grad_norm": 1.8064631469895245, "learning_rate": 1.444587580131379e-07, "loss": 0.8684, "step": 8610 }, { "epoch": 0.93, "grad_norm": 1.7298369812512413, "learning_rate": 1.4404356368630134e-07, "loss": 0.8653, "step": 8611 }, { "epoch": 0.93, "grad_norm": 1.7255770693568269, "learning_rate": 1.4362895816092403e-07, "loss": 0.8636, "step": 8612 }, { "epoch": 0.93, "grad_norm": 0.8130630922670764, "learning_rate": 1.4321494148727855e-07, "loss": 1.0349, "step": 8613 }, { "epoch": 0.93, "grad_norm": 1.7420409139949444, "learning_rate": 1.4280151371556472e-07, "loss": 1.0132, "step": 8614 }, { "epoch": 0.93, "grad_norm": 2.30230043707605, "learning_rate": 1.4238867489591302e-07, "loss": 0.8974, "step": 8615 }, { "epoch": 0.93, "grad_norm": 1.6548265642707094, "learning_rate": 1.4197642507838115e-07, "loss": 0.9238, "step": 8616 }, { "epoch": 0.93, "grad_norm": 1.9029855809286464, "learning_rate": 1.4156476431295586e-07, "loss": 0.856, "step": 8617 }, { "epoch": 0.93, "grad_norm": 2.0342822602750545, "learning_rate": 1.4115369264955215e-07, "loss": 0.8207, "step": 8618 }, { "epoch": 0.93, "grad_norm": 1.7511513432428554, "learning_rate": 1.4074321013801462e-07, "loss": 0.8704, "step": 8619 }, { "epoch": 0.93, "grad_norm": 1.7209689901016054, "learning_rate": 1.4033331682811568e-07, "loss": 0.818, "step": 8620 }, { "epoch": 0.93, "grad_norm": 1.651193755266146, "learning_rate": 1.3992401276955502e-07, "loss": 0.9065, "step": 8621 }, { "epoch": 0.93, "grad_norm": 1.7734970710199283, "learning_rate": 1.3951529801196295e-07, "loss": 0.9759, "step": 8622 }, { "epoch": 0.93, "grad_norm": 0.7350606916823506, "learning_rate": 1.391071726048987e-07, "loss": 1.0326, "step": 8623 }, { "epoch": 0.93, "grad_norm": 1.7185308467968199, "learning_rate": 1.3869963659784657e-07, "loss": 0.8437, "step": 8624 }, { "epoch": 0.93, "grad_norm": 1.8911353916896554, "learning_rate": 1.3829269004022426e-07, "loss": 0.9339, "step": 8625 }, { "epoch": 0.93, "grad_norm": 1.8372614409965347, "learning_rate": 1.3788633298137288e-07, "loss": 0.8918, "step": 8626 }, { "epoch": 0.93, "grad_norm": 1.8143927551793197, "learning_rate": 1.3748056547056632e-07, "loss": 0.9107, "step": 8627 }, { "epoch": 0.93, "grad_norm": 1.6863219394322964, "learning_rate": 1.3707538755700522e-07, "loss": 0.8685, "step": 8628 }, { "epoch": 0.93, "grad_norm": 1.689296259782041, "learning_rate": 1.3667079928981807e-07, "loss": 0.9151, "step": 8629 }, { "epoch": 0.93, "grad_norm": 1.77867239537121, "learning_rate": 1.3626680071806342e-07, "loss": 0.8845, "step": 8630 }, { "epoch": 0.93, "grad_norm": 1.658972879340762, "learning_rate": 1.358633918907265e-07, "loss": 0.8578, "step": 8631 }, { "epoch": 0.93, "grad_norm": 1.8144927322028328, "learning_rate": 1.354605728567232e-07, "loss": 0.8501, "step": 8632 }, { "epoch": 0.93, "grad_norm": 1.6879652895161257, "learning_rate": 1.3505834366489502e-07, "loss": 0.9072, "step": 8633 }, { "epoch": 0.93, "grad_norm": 1.692956573005911, "learning_rate": 1.3465670436401513e-07, "loss": 0.8445, "step": 8634 }, { "epoch": 0.93, "grad_norm": 0.7670710228349, "learning_rate": 1.3425565500278348e-07, "loss": 1.0488, "step": 8635 }, { "epoch": 0.93, "grad_norm": 1.6600288723290713, "learning_rate": 1.3385519562982842e-07, "loss": 0.8662, "step": 8636 }, { "epoch": 0.93, "grad_norm": 1.8537000796519754, "learning_rate": 1.334553262937066e-07, "loss": 0.9443, "step": 8637 }, { "epoch": 0.93, "grad_norm": 1.866380707334537, "learning_rate": 1.3305604704290432e-07, "loss": 0.9162, "step": 8638 }, { "epoch": 0.93, "grad_norm": 1.6739359278297261, "learning_rate": 1.32657357925835e-07, "loss": 0.9714, "step": 8639 }, { "epoch": 0.93, "grad_norm": 1.6035213794021221, "learning_rate": 1.322592589908417e-07, "loss": 0.8831, "step": 8640 }, { "epoch": 0.93, "grad_norm": 1.802876835066766, "learning_rate": 1.318617502861952e-07, "loss": 0.8485, "step": 8641 }, { "epoch": 0.93, "grad_norm": 1.6553619531839114, "learning_rate": 1.3146483186009418e-07, "loss": 0.8322, "step": 8642 }, { "epoch": 0.93, "grad_norm": 0.7674791427060506, "learning_rate": 1.3106850376066626e-07, "loss": 1.0342, "step": 8643 }, { "epoch": 0.93, "grad_norm": 0.7942198810367045, "learning_rate": 1.3067276603596856e-07, "loss": 1.0256, "step": 8644 }, { "epoch": 0.93, "grad_norm": 1.664552276911771, "learning_rate": 1.3027761873398437e-07, "loss": 0.985, "step": 8645 }, { "epoch": 0.93, "grad_norm": 1.7239029450737329, "learning_rate": 1.2988306190262757e-07, "loss": 0.9023, "step": 8646 }, { "epoch": 0.93, "grad_norm": 1.7248444248076231, "learning_rate": 1.2948909558974045e-07, "loss": 0.8841, "step": 8647 }, { "epoch": 0.93, "grad_norm": 1.732531426524349, "learning_rate": 1.2909571984309032e-07, "loss": 0.8131, "step": 8648 }, { "epoch": 0.93, "grad_norm": 1.6443824266504774, "learning_rate": 1.2870293471037743e-07, "loss": 0.8407, "step": 8649 }, { "epoch": 0.93, "grad_norm": 1.6113504178037183, "learning_rate": 1.2831074023922752e-07, "loss": 0.839, "step": 8650 }, { "epoch": 0.93, "grad_norm": 1.7367237237901707, "learning_rate": 1.279191364771959e-07, "loss": 0.8609, "step": 8651 }, { "epoch": 0.93, "grad_norm": 1.6241966916615753, "learning_rate": 1.2752812347176514e-07, "loss": 0.8761, "step": 8652 }, { "epoch": 0.93, "grad_norm": 1.692614323211419, "learning_rate": 1.271377012703473e-07, "loss": 0.8808, "step": 8653 }, { "epoch": 0.93, "grad_norm": 1.707329389195086, "learning_rate": 1.2674786992028287e-07, "loss": 0.886, "step": 8654 }, { "epoch": 0.93, "grad_norm": 0.778124637957207, "learning_rate": 1.263586294688396e-07, "loss": 1.0452, "step": 8655 }, { "epoch": 0.93, "grad_norm": 1.7941056500910548, "learning_rate": 1.259699799632147e-07, "loss": 0.8921, "step": 8656 }, { "epoch": 0.93, "grad_norm": 1.6272474393238914, "learning_rate": 1.2558192145053327e-07, "loss": 0.9049, "step": 8657 }, { "epoch": 0.93, "grad_norm": 1.6868110655488864, "learning_rate": 1.251944539778477e-07, "loss": 0.8849, "step": 8658 }, { "epoch": 0.93, "grad_norm": 1.9194643456643747, "learning_rate": 1.2480757759214145e-07, "loss": 0.9094, "step": 8659 }, { "epoch": 0.93, "grad_norm": 1.696345155028626, "learning_rate": 1.2442129234032373e-07, "loss": 0.8693, "step": 8660 }, { "epoch": 0.93, "grad_norm": 1.7626151921816402, "learning_rate": 1.2403559826923205e-07, "loss": 0.7961, "step": 8661 }, { "epoch": 0.93, "grad_norm": 1.7404996254326943, "learning_rate": 1.236504954256351e-07, "loss": 0.9566, "step": 8662 }, { "epoch": 0.93, "grad_norm": 1.679352627921948, "learning_rate": 1.2326598385622723e-07, "loss": 0.833, "step": 8663 }, { "epoch": 0.93, "grad_norm": 1.6771439780358839, "learning_rate": 1.2288206360763056e-07, "loss": 0.8738, "step": 8664 }, { "epoch": 0.93, "grad_norm": 1.701352170001282, "learning_rate": 1.2249873472639783e-07, "loss": 0.8534, "step": 8665 }, { "epoch": 0.93, "grad_norm": 1.7715490483093246, "learning_rate": 1.2211599725900914e-07, "loss": 0.9436, "step": 8666 }, { "epoch": 0.93, "grad_norm": 1.823500016875931, "learning_rate": 1.2173385125187177e-07, "loss": 0.8881, "step": 8667 }, { "epoch": 0.93, "grad_norm": 1.9388614534985507, "learning_rate": 1.213522967513231e-07, "loss": 0.8037, "step": 8668 }, { "epoch": 0.93, "grad_norm": 1.680898710735779, "learning_rate": 1.2097133380362835e-07, "loss": 0.8771, "step": 8669 }, { "epoch": 0.93, "grad_norm": 1.76648139891266, "learning_rate": 1.2059096245497948e-07, "loss": 0.8195, "step": 8670 }, { "epoch": 0.93, "grad_norm": 1.6935585888768638, "learning_rate": 1.202111827514979e-07, "loss": 0.8861, "step": 8671 }, { "epoch": 0.93, "grad_norm": 0.8098601469387469, "learning_rate": 1.1983199473923457e-07, "loss": 1.0908, "step": 8672 }, { "epoch": 0.93, "grad_norm": 1.6684278826931422, "learning_rate": 1.1945339846416603e-07, "loss": 0.9437, "step": 8673 }, { "epoch": 0.93, "grad_norm": 1.754932193984157, "learning_rate": 1.1907539397219837e-07, "loss": 0.8732, "step": 8674 }, { "epoch": 0.93, "grad_norm": 1.6832489565316293, "learning_rate": 1.1869798130916654e-07, "loss": 0.9166, "step": 8675 }, { "epoch": 0.93, "grad_norm": 1.6736674346043008, "learning_rate": 1.1832116052083232e-07, "loss": 0.9056, "step": 8676 }, { "epoch": 0.93, "grad_norm": 1.8126309096571516, "learning_rate": 1.1794493165288745e-07, "loss": 0.8342, "step": 8677 }, { "epoch": 0.93, "grad_norm": 1.6860264305810373, "learning_rate": 1.1756929475095103e-07, "loss": 0.8266, "step": 8678 }, { "epoch": 0.93, "grad_norm": 1.7989676083173267, "learning_rate": 1.1719424986056938e-07, "loss": 0.8745, "step": 8679 }, { "epoch": 0.93, "grad_norm": 1.6671562433377647, "learning_rate": 1.168197970272189e-07, "loss": 0.838, "step": 8680 }, { "epoch": 0.93, "grad_norm": 1.7190502965856658, "learning_rate": 1.1644593629630274e-07, "loss": 0.9228, "step": 8681 }, { "epoch": 0.93, "grad_norm": 1.7165512355743622, "learning_rate": 1.1607266771315295e-07, "loss": 0.9108, "step": 8682 }, { "epoch": 0.93, "grad_norm": 1.6235037306610982, "learning_rate": 1.1569999132303001e-07, "loss": 0.8189, "step": 8683 }, { "epoch": 0.93, "grad_norm": 1.7018717058913932, "learning_rate": 1.1532790717112164e-07, "loss": 0.9212, "step": 8684 }, { "epoch": 0.93, "grad_norm": 1.7679287715121683, "learning_rate": 1.1495641530254454e-07, "loss": 0.8869, "step": 8685 }, { "epoch": 0.93, "grad_norm": 1.677306390973604, "learning_rate": 1.1458551576234322e-07, "loss": 0.8992, "step": 8686 }, { "epoch": 0.93, "grad_norm": 1.8237467326056576, "learning_rate": 1.1421520859549063e-07, "loss": 0.9093, "step": 8687 }, { "epoch": 0.93, "grad_norm": 2.4676196670502355, "learning_rate": 1.1384549384688804e-07, "loss": 0.8605, "step": 8688 }, { "epoch": 0.93, "grad_norm": 1.7721265487395383, "learning_rate": 1.134763715613646e-07, "loss": 0.8752, "step": 8689 }, { "epoch": 0.93, "grad_norm": 1.746287931031475, "learning_rate": 1.1310784178367728e-07, "loss": 0.8731, "step": 8690 }, { "epoch": 0.93, "grad_norm": 0.8021840596513031, "learning_rate": 1.1273990455851203e-07, "loss": 1.0133, "step": 8691 }, { "epoch": 0.93, "grad_norm": 1.7972779039730296, "learning_rate": 1.1237255993048202e-07, "loss": 0.9204, "step": 8692 }, { "epoch": 0.93, "grad_norm": 1.5821575651945894, "learning_rate": 1.1200580794412997e-07, "loss": 0.786, "step": 8693 }, { "epoch": 0.93, "grad_norm": 1.685144881516343, "learning_rate": 1.1163964864392474e-07, "loss": 0.8352, "step": 8694 }, { "epoch": 0.93, "grad_norm": 1.802961833034365, "learning_rate": 1.1127408207426471e-07, "loss": 0.8492, "step": 8695 }, { "epoch": 0.93, "grad_norm": 0.7771086493368791, "learning_rate": 1.109091082794761e-07, "loss": 1.0999, "step": 8696 }, { "epoch": 0.93, "grad_norm": 1.8864989562134697, "learning_rate": 1.1054472730381405e-07, "loss": 0.8683, "step": 8697 }, { "epoch": 0.94, "grad_norm": 1.7794715471711953, "learning_rate": 1.1018093919145989e-07, "loss": 0.9312, "step": 8698 }, { "epoch": 0.94, "grad_norm": 1.8782141428372994, "learning_rate": 1.0981774398652446e-07, "loss": 0.8952, "step": 8699 }, { "epoch": 0.94, "grad_norm": 1.6887724005940208, "learning_rate": 1.0945514173304695e-07, "loss": 0.836, "step": 8700 }, { "epoch": 0.94, "grad_norm": 1.6561020357661516, "learning_rate": 1.0909313247499331e-07, "loss": 0.9074, "step": 8701 }, { "epoch": 0.94, "grad_norm": 1.807447588981726, "learning_rate": 1.08731716256259e-07, "loss": 0.8806, "step": 8702 }, { "epoch": 0.94, "grad_norm": 1.6915235198421559, "learning_rate": 1.0837089312066729e-07, "loss": 0.8927, "step": 8703 }, { "epoch": 0.94, "grad_norm": 1.6864662471900609, "learning_rate": 1.0801066311196872e-07, "loss": 0.8595, "step": 8704 }, { "epoch": 0.94, "grad_norm": 1.910660121310086, "learning_rate": 1.0765102627384227e-07, "loss": 0.8439, "step": 8705 }, { "epoch": 0.94, "grad_norm": 1.8040777317197338, "learning_rate": 1.0729198264989582e-07, "loss": 0.8684, "step": 8706 }, { "epoch": 0.94, "grad_norm": 1.6611983708931068, "learning_rate": 1.0693353228366344e-07, "loss": 0.7713, "step": 8707 }, { "epoch": 0.94, "grad_norm": 1.7055110134140223, "learning_rate": 1.0657567521860978e-07, "loss": 0.8861, "step": 8708 }, { "epoch": 0.94, "grad_norm": 1.7751152070795508, "learning_rate": 1.0621841149812573e-07, "loss": 0.8178, "step": 8709 }, { "epoch": 0.94, "grad_norm": 1.734301326268237, "learning_rate": 1.0586174116552994e-07, "loss": 0.8762, "step": 8710 }, { "epoch": 0.94, "grad_norm": 1.625066302517425, "learning_rate": 1.0550566426407117e-07, "loss": 0.8754, "step": 8711 }, { "epoch": 0.94, "grad_norm": 1.6829850258789252, "learning_rate": 1.0515018083692486e-07, "loss": 0.9457, "step": 8712 }, { "epoch": 0.94, "grad_norm": 1.785946456771606, "learning_rate": 1.0479529092719376e-07, "loss": 0.885, "step": 8713 }, { "epoch": 0.94, "grad_norm": 1.8912625434400956, "learning_rate": 1.0444099457791013e-07, "loss": 0.8776, "step": 8714 }, { "epoch": 0.94, "grad_norm": 0.7802010199443186, "learning_rate": 1.0408729183203403e-07, "loss": 0.9867, "step": 8715 }, { "epoch": 0.94, "grad_norm": 1.7398046738498467, "learning_rate": 1.0373418273245228e-07, "loss": 0.9068, "step": 8716 }, { "epoch": 0.94, "grad_norm": 1.6551546580853094, "learning_rate": 1.0338166732198062e-07, "loss": 0.8787, "step": 8717 }, { "epoch": 0.94, "grad_norm": 1.7321368640287063, "learning_rate": 1.0302974564336265e-07, "loss": 0.9183, "step": 8718 }, { "epoch": 0.94, "grad_norm": 0.768267936714945, "learning_rate": 1.0267841773927089e-07, "loss": 1.0255, "step": 8719 }, { "epoch": 0.94, "grad_norm": 1.6649649099514554, "learning_rate": 1.0232768365230462e-07, "loss": 0.7575, "step": 8720 }, { "epoch": 0.94, "grad_norm": 1.650647563833598, "learning_rate": 1.01977543424992e-07, "loss": 0.9331, "step": 8721 }, { "epoch": 0.94, "grad_norm": 1.7096864216095242, "learning_rate": 1.0162799709978743e-07, "loss": 0.9114, "step": 8722 }, { "epoch": 0.94, "grad_norm": 1.6977216704583267, "learning_rate": 1.012790447190759e-07, "loss": 0.8739, "step": 8723 }, { "epoch": 0.94, "grad_norm": 1.654371209262714, "learning_rate": 1.0093068632516855e-07, "loss": 0.8947, "step": 8724 }, { "epoch": 0.94, "grad_norm": 1.6502943794088205, "learning_rate": 1.005829219603055e-07, "loss": 0.8863, "step": 8725 }, { "epoch": 0.94, "grad_norm": 1.7266107456447235, "learning_rate": 1.0023575166665356e-07, "loss": 0.9146, "step": 8726 }, { "epoch": 0.94, "grad_norm": 1.7191674171784719, "learning_rate": 9.98891754863085e-08, "loss": 0.9053, "step": 8727 }, { "epoch": 0.94, "grad_norm": 1.6508448271613672, "learning_rate": 9.954319346129504e-08, "loss": 0.8483, "step": 8728 }, { "epoch": 0.94, "grad_norm": 1.6806593406810597, "learning_rate": 9.919780563356296e-08, "loss": 0.8375, "step": 8729 }, { "epoch": 0.94, "grad_norm": 1.756659986191418, "learning_rate": 9.885301204499321e-08, "loss": 0.8698, "step": 8730 }, { "epoch": 0.94, "grad_norm": 1.6574648906527947, "learning_rate": 9.850881273739232e-08, "loss": 0.9046, "step": 8731 }, { "epoch": 0.94, "grad_norm": 1.7825988557562138, "learning_rate": 9.816520775249583e-08, "loss": 0.9176, "step": 8732 }, { "epoch": 0.94, "grad_norm": 1.9055536481280362, "learning_rate": 9.782219713196706e-08, "loss": 0.8832, "step": 8733 }, { "epoch": 0.94, "grad_norm": 1.701554984357639, "learning_rate": 9.747978091739774e-08, "loss": 0.8438, "step": 8734 }, { "epoch": 0.94, "grad_norm": 1.7732869975969348, "learning_rate": 9.713795915030577e-08, "loss": 0.8652, "step": 8735 }, { "epoch": 0.94, "grad_norm": 1.6621618550626442, "learning_rate": 9.679673187214023e-08, "loss": 0.9237, "step": 8736 }, { "epoch": 0.94, "grad_norm": 1.6623637146218884, "learning_rate": 9.645609912427412e-08, "loss": 0.8362, "step": 8737 }, { "epoch": 0.94, "grad_norm": 1.743433721770903, "learning_rate": 9.611606094801052e-08, "loss": 0.927, "step": 8738 }, { "epoch": 0.94, "grad_norm": 1.6880934156260312, "learning_rate": 9.577661738458144e-08, "loss": 0.8818, "step": 8739 }, { "epoch": 0.94, "grad_norm": 1.8033331168405395, "learning_rate": 9.543776847514508e-08, "loss": 0.8549, "step": 8740 }, { "epoch": 0.94, "grad_norm": 1.8162257005797307, "learning_rate": 9.509951426078745e-08, "loss": 0.8896, "step": 8741 }, { "epoch": 0.94, "grad_norm": 1.8397332680300942, "learning_rate": 9.476185478252354e-08, "loss": 0.8801, "step": 8742 }, { "epoch": 0.94, "grad_norm": 0.7746170797697094, "learning_rate": 9.442479008129557e-08, "loss": 1.0676, "step": 8743 }, { "epoch": 0.94, "grad_norm": 0.7582210475904232, "learning_rate": 9.40883201979742e-08, "loss": 1.008, "step": 8744 }, { "epoch": 0.94, "grad_norm": 1.7809745981474663, "learning_rate": 9.37524451733568e-08, "loss": 0.9593, "step": 8745 }, { "epoch": 0.94, "grad_norm": 1.7212151641640505, "learning_rate": 9.341716504817022e-08, "loss": 0.9188, "step": 8746 }, { "epoch": 0.94, "grad_norm": 1.712081039534405, "learning_rate": 9.308247986306862e-08, "loss": 0.9496, "step": 8747 }, { "epoch": 0.94, "grad_norm": 0.7772235921773951, "learning_rate": 9.274838965863175e-08, "loss": 1.0282, "step": 8748 }, { "epoch": 0.94, "grad_norm": 1.7538186775136482, "learning_rate": 9.241489447537111e-08, "loss": 0.8495, "step": 8749 }, { "epoch": 0.94, "grad_norm": 1.7060304883884183, "learning_rate": 9.208199435372379e-08, "loss": 0.844, "step": 8750 }, { "epoch": 0.94, "grad_norm": 0.7742980726500839, "learning_rate": 9.174968933405415e-08, "loss": 1.0129, "step": 8751 }, { "epoch": 0.94, "grad_norm": 1.7265033516315342, "learning_rate": 9.141797945665609e-08, "loss": 0.9487, "step": 8752 }, { "epoch": 0.94, "grad_norm": 1.9427486818306074, "learning_rate": 9.108686476175132e-08, "loss": 0.9347, "step": 8753 }, { "epoch": 0.94, "grad_norm": 1.729457791940581, "learning_rate": 9.075634528948718e-08, "loss": 0.8118, "step": 8754 }, { "epoch": 0.94, "grad_norm": 1.7991561550423116, "learning_rate": 9.042642107994103e-08, "loss": 0.8532, "step": 8755 }, { "epoch": 0.94, "grad_norm": 1.7364859575347862, "learning_rate": 9.009709217311702e-08, "loss": 0.8869, "step": 8756 }, { "epoch": 0.94, "grad_norm": 1.7292755646672495, "learning_rate": 8.976835860894762e-08, "loss": 0.857, "step": 8757 }, { "epoch": 0.94, "grad_norm": 1.6628914866427786, "learning_rate": 8.944022042729317e-08, "loss": 0.9134, "step": 8758 }, { "epoch": 0.94, "grad_norm": 1.7078757929066355, "learning_rate": 8.91126776679413e-08, "loss": 0.8594, "step": 8759 }, { "epoch": 0.94, "grad_norm": 1.7924786921108733, "learning_rate": 8.878573037060689e-08, "loss": 0.8872, "step": 8760 }, { "epoch": 0.94, "grad_norm": 0.777690336494681, "learning_rate": 8.845937857493492e-08, "loss": 0.9944, "step": 8761 }, { "epoch": 0.94, "grad_norm": 1.7146020668537298, "learning_rate": 8.813362232049594e-08, "loss": 0.96, "step": 8762 }, { "epoch": 0.94, "grad_norm": 1.7702086092082934, "learning_rate": 8.780846164678836e-08, "loss": 0.8883, "step": 8763 }, { "epoch": 0.94, "grad_norm": 1.793273497276005, "learning_rate": 8.748389659324008e-08, "loss": 0.9039, "step": 8764 }, { "epoch": 0.94, "grad_norm": 1.8563030741518667, "learning_rate": 8.715992719920575e-08, "loss": 0.8821, "step": 8765 }, { "epoch": 0.94, "grad_norm": 1.692136041261812, "learning_rate": 8.683655350396724e-08, "loss": 0.8431, "step": 8766 }, { "epoch": 0.94, "grad_norm": 1.8242016865772352, "learning_rate": 8.651377554673435e-08, "loss": 0.9337, "step": 8767 }, { "epoch": 0.94, "grad_norm": 1.7128776837757196, "learning_rate": 8.619159336664684e-08, "loss": 0.8677, "step": 8768 }, { "epoch": 0.94, "grad_norm": 1.8163318864051665, "learning_rate": 8.587000700276793e-08, "loss": 0.8186, "step": 8769 }, { "epoch": 0.94, "grad_norm": 1.6389646007746306, "learning_rate": 8.554901649409253e-08, "loss": 0.8349, "step": 8770 }, { "epoch": 0.94, "grad_norm": 1.7453384275113522, "learning_rate": 8.522862187954173e-08, "loss": 0.8911, "step": 8771 }, { "epoch": 0.94, "grad_norm": 1.4937871094428208, "learning_rate": 8.49088231979639e-08, "loss": 0.8432, "step": 8772 }, { "epoch": 0.94, "grad_norm": 1.7842683633016247, "learning_rate": 8.458962048813634e-08, "loss": 0.9421, "step": 8773 }, { "epoch": 0.94, "grad_norm": 1.7105912446655767, "learning_rate": 8.427101378876368e-08, "loss": 0.8833, "step": 8774 }, { "epoch": 0.94, "grad_norm": 1.7085382583201034, "learning_rate": 8.39530031384772e-08, "loss": 0.8722, "step": 8775 }, { "epoch": 0.94, "grad_norm": 1.713771971270094, "learning_rate": 8.36355885758372e-08, "loss": 0.9537, "step": 8776 }, { "epoch": 0.94, "grad_norm": 1.7232713531532322, "learning_rate": 8.331877013933176e-08, "loss": 0.8804, "step": 8777 }, { "epoch": 0.94, "grad_norm": 1.8043590291619638, "learning_rate": 8.300254786737627e-08, "loss": 0.8998, "step": 8778 }, { "epoch": 0.94, "grad_norm": 1.748231085629352, "learning_rate": 8.268692179831228e-08, "loss": 0.9492, "step": 8779 }, { "epoch": 0.94, "grad_norm": 1.944237886563713, "learning_rate": 8.237189197041196e-08, "loss": 1.0028, "step": 8780 }, { "epoch": 0.94, "grad_norm": 1.725507934137953, "learning_rate": 8.205745842187363e-08, "loss": 0.7632, "step": 8781 }, { "epoch": 0.94, "grad_norm": 1.742370024203653, "learning_rate": 8.174362119082291e-08, "loss": 0.8635, "step": 8782 }, { "epoch": 0.94, "grad_norm": 1.8342276961445105, "learning_rate": 8.143038031531381e-08, "loss": 0.875, "step": 8783 }, { "epoch": 0.94, "grad_norm": 1.8158857503321166, "learning_rate": 8.111773583332871e-08, "loss": 0.9179, "step": 8784 }, { "epoch": 0.94, "grad_norm": 1.7771395373419845, "learning_rate": 8.080568778277509e-08, "loss": 0.9493, "step": 8785 }, { "epoch": 0.94, "grad_norm": 1.783695062681277, "learning_rate": 8.049423620149155e-08, "loss": 0.8192, "step": 8786 }, { "epoch": 0.94, "grad_norm": 1.6812631544417553, "learning_rate": 8.018338112724178e-08, "loss": 0.8695, "step": 8787 }, { "epoch": 0.94, "grad_norm": 1.6300141931628267, "learning_rate": 7.98731225977184e-08, "loss": 0.8343, "step": 8788 }, { "epoch": 0.94, "grad_norm": 1.6797131687964026, "learning_rate": 7.956346065054132e-08, "loss": 0.8064, "step": 8789 }, { "epoch": 0.94, "grad_norm": 1.6378605104665973, "learning_rate": 7.925439532325774e-08, "loss": 0.8713, "step": 8790 }, { "epoch": 0.95, "grad_norm": 1.861710388795536, "learning_rate": 7.894592665334266e-08, "loss": 0.8817, "step": 8791 }, { "epoch": 0.95, "grad_norm": 1.7381407927136665, "learning_rate": 7.863805467820007e-08, "loss": 0.9509, "step": 8792 }, { "epoch": 0.95, "grad_norm": 1.728224629830989, "learning_rate": 7.833077943515954e-08, "loss": 0.9442, "step": 8793 }, { "epoch": 0.95, "grad_norm": 1.7252641636734705, "learning_rate": 7.802410096147961e-08, "loss": 0.9085, "step": 8794 }, { "epoch": 0.95, "grad_norm": 0.7766720271773432, "learning_rate": 7.771801929434608e-08, "loss": 1.0634, "step": 8795 }, { "epoch": 0.95, "grad_norm": 1.9603008739156298, "learning_rate": 7.74125344708726e-08, "loss": 0.8883, "step": 8796 }, { "epoch": 0.95, "grad_norm": 1.7685373426472952, "learning_rate": 7.71076465281001e-08, "loss": 0.8771, "step": 8797 }, { "epoch": 0.95, "grad_norm": 1.671431423901742, "learning_rate": 7.680335550299678e-08, "loss": 0.8536, "step": 8798 }, { "epoch": 0.95, "grad_norm": 1.800484202467439, "learning_rate": 7.64996614324598e-08, "loss": 0.9059, "step": 8799 }, { "epoch": 0.95, "grad_norm": 1.7373861939408914, "learning_rate": 7.619656435331302e-08, "loss": 0.8655, "step": 8800 }, { "epoch": 0.95, "grad_norm": 1.7105118362804141, "learning_rate": 7.589406430230706e-08, "loss": 0.9478, "step": 8801 }, { "epoch": 0.95, "grad_norm": 1.7741530499471447, "learning_rate": 7.559216131612256e-08, "loss": 0.9314, "step": 8802 }, { "epoch": 0.95, "grad_norm": 1.6790622925476417, "learning_rate": 7.529085543136472e-08, "loss": 0.8588, "step": 8803 }, { "epoch": 0.95, "grad_norm": 1.646597252695301, "learning_rate": 7.499014668456872e-08, "loss": 0.9009, "step": 8804 }, { "epoch": 0.95, "grad_norm": 1.819361423177791, "learning_rate": 7.469003511219708e-08, "loss": 0.9112, "step": 8805 }, { "epoch": 0.95, "grad_norm": 1.661352790042645, "learning_rate": 7.439052075063847e-08, "loss": 0.9193, "step": 8806 }, { "epoch": 0.95, "grad_norm": 1.6938598123814248, "learning_rate": 7.409160363621049e-08, "loss": 0.8867, "step": 8807 }, { "epoch": 0.95, "grad_norm": 1.748930993359509, "learning_rate": 7.379328380515805e-08, "loss": 0.8952, "step": 8808 }, { "epoch": 0.95, "grad_norm": 1.7017792350390337, "learning_rate": 7.349556129365276e-08, "loss": 0.8428, "step": 8809 }, { "epoch": 0.95, "grad_norm": 1.7104673229158864, "learning_rate": 7.31984361377952e-08, "loss": 0.8835, "step": 8810 }, { "epoch": 0.95, "grad_norm": 1.721271285473092, "learning_rate": 7.290190837361211e-08, "loss": 0.8317, "step": 8811 }, { "epoch": 0.95, "grad_norm": 1.6217396730571771, "learning_rate": 7.260597803705971e-08, "loss": 0.8727, "step": 8812 }, { "epoch": 0.95, "grad_norm": 1.7369141221640836, "learning_rate": 7.231064516401932e-08, "loss": 0.8797, "step": 8813 }, { "epoch": 0.95, "grad_norm": 1.8638751535000941, "learning_rate": 7.201590979030116e-08, "loss": 0.8864, "step": 8814 }, { "epoch": 0.95, "grad_norm": 0.7853878754829022, "learning_rate": 7.172177195164387e-08, "loss": 1.044, "step": 8815 }, { "epoch": 0.95, "grad_norm": 1.6186404965288494, "learning_rate": 7.142823168371171e-08, "loss": 0.7876, "step": 8816 }, { "epoch": 0.95, "grad_norm": 1.579585154508702, "learning_rate": 7.113528902209787e-08, "loss": 0.8693, "step": 8817 }, { "epoch": 0.95, "grad_norm": 1.8001981285371624, "learning_rate": 7.084294400232283e-08, "loss": 0.8813, "step": 8818 }, { "epoch": 0.95, "grad_norm": 0.8180598032321683, "learning_rate": 7.055119665983379e-08, "loss": 1.0557, "step": 8819 }, { "epoch": 0.95, "grad_norm": 1.7977290683950893, "learning_rate": 7.026004703000689e-08, "loss": 0.825, "step": 8820 }, { "epoch": 0.95, "grad_norm": 1.7001088242551985, "learning_rate": 6.9969495148145e-08, "loss": 0.9354, "step": 8821 }, { "epoch": 0.95, "grad_norm": 1.797356298583357, "learning_rate": 6.967954104947771e-08, "loss": 0.8669, "step": 8822 }, { "epoch": 0.95, "grad_norm": 0.8009441176804548, "learning_rate": 6.939018476916359e-08, "loss": 1.0258, "step": 8823 }, { "epoch": 0.95, "grad_norm": 1.9012258700201452, "learning_rate": 6.91014263422879e-08, "loss": 0.9468, "step": 8824 }, { "epoch": 0.95, "grad_norm": 1.6712749695536122, "learning_rate": 6.881326580386372e-08, "loss": 0.8585, "step": 8825 }, { "epoch": 0.95, "grad_norm": 1.6949411744251415, "learning_rate": 6.852570318883145e-08, "loss": 0.8451, "step": 8826 }, { "epoch": 0.95, "grad_norm": 1.7847498028469047, "learning_rate": 6.823873853205931e-08, "loss": 0.9019, "step": 8827 }, { "epoch": 0.95, "grad_norm": 1.8301198485257983, "learning_rate": 6.79523718683417e-08, "loss": 0.8909, "step": 8828 }, { "epoch": 0.95, "grad_norm": 1.7997265839082333, "learning_rate": 6.766660323240303e-08, "loss": 0.9405, "step": 8829 }, { "epoch": 0.95, "grad_norm": 1.7141982040544148, "learning_rate": 6.738143265889285e-08, "loss": 0.896, "step": 8830 }, { "epoch": 0.95, "grad_norm": 1.6634732983490532, "learning_rate": 6.709686018238958e-08, "loss": 0.8502, "step": 8831 }, { "epoch": 0.95, "grad_norm": 1.803037141831488, "learning_rate": 6.681288583739787e-08, "loss": 0.9035, "step": 8832 }, { "epoch": 0.95, "grad_norm": 1.6619339651063416, "learning_rate": 6.652950965835181e-08, "loss": 0.793, "step": 8833 }, { "epoch": 0.95, "grad_norm": 1.6958177435297506, "learning_rate": 6.624673167961004e-08, "loss": 0.9443, "step": 8834 }, { "epoch": 0.95, "grad_norm": 1.5378204699646796, "learning_rate": 6.59645519354618e-08, "loss": 0.8718, "step": 8835 }, { "epoch": 0.95, "grad_norm": 1.764638309614953, "learning_rate": 6.568297046012196e-08, "loss": 0.9329, "step": 8836 }, { "epoch": 0.95, "grad_norm": 1.7290949591877172, "learning_rate": 6.540198728773262e-08, "loss": 0.9618, "step": 8837 }, { "epoch": 0.95, "grad_norm": 1.872213947790235, "learning_rate": 6.512160245236432e-08, "loss": 0.9711, "step": 8838 }, { "epoch": 0.95, "grad_norm": 1.8483003544116654, "learning_rate": 6.484181598801542e-08, "loss": 0.9476, "step": 8839 }, { "epoch": 0.95, "grad_norm": 1.8165979406763404, "learning_rate": 6.456262792861046e-08, "loss": 0.9298, "step": 8840 }, { "epoch": 0.95, "grad_norm": 1.743048501710946, "learning_rate": 6.428403830800123e-08, "loss": 0.9003, "step": 8841 }, { "epoch": 0.95, "grad_norm": 0.7804193480048996, "learning_rate": 6.400604715996905e-08, "loss": 1.0569, "step": 8842 }, { "epoch": 0.95, "grad_norm": 1.7242733565395683, "learning_rate": 6.372865451822086e-08, "loss": 0.8394, "step": 8843 }, { "epoch": 0.95, "grad_norm": 1.6835111351209955, "learning_rate": 6.345186041639029e-08, "loss": 0.8691, "step": 8844 }, { "epoch": 0.95, "grad_norm": 1.7500758629832467, "learning_rate": 6.317566488804105e-08, "loss": 0.8303, "step": 8845 }, { "epoch": 0.95, "grad_norm": 1.699550610875349, "learning_rate": 6.290006796666249e-08, "loss": 0.8955, "step": 8846 }, { "epoch": 0.95, "grad_norm": 1.7506613634780688, "learning_rate": 6.262506968567061e-08, "loss": 0.9774, "step": 8847 }, { "epoch": 0.95, "grad_norm": 1.7124983288795061, "learning_rate": 6.235067007841155e-08, "loss": 0.8778, "step": 8848 }, { "epoch": 0.95, "grad_norm": 1.8466498050615583, "learning_rate": 6.207686917815592e-08, "loss": 0.899, "step": 8849 }, { "epoch": 0.95, "grad_norm": 1.7591888241633542, "learning_rate": 6.18036670181038e-08, "loss": 0.8723, "step": 8850 }, { "epoch": 0.95, "grad_norm": 1.61232954830691, "learning_rate": 6.153106363138151e-08, "loss": 0.8892, "step": 8851 }, { "epoch": 0.95, "grad_norm": 1.6548054021145893, "learning_rate": 6.125905905104368e-08, "loss": 0.8387, "step": 8852 }, { "epoch": 0.95, "grad_norm": 1.5697542340565545, "learning_rate": 6.098765331007061e-08, "loss": 0.8567, "step": 8853 }, { "epoch": 0.95, "grad_norm": 1.797859432906638, "learning_rate": 6.071684644137265e-08, "loss": 0.9176, "step": 8854 }, { "epoch": 0.95, "grad_norm": 1.69822809057246, "learning_rate": 6.044663847778521e-08, "loss": 0.8485, "step": 8855 }, { "epoch": 0.95, "grad_norm": 1.8179272236354664, "learning_rate": 6.017702945207149e-08, "loss": 0.913, "step": 8856 }, { "epoch": 0.95, "grad_norm": 1.7251087436150738, "learning_rate": 5.990801939692315e-08, "loss": 0.892, "step": 8857 }, { "epoch": 0.95, "grad_norm": 0.7816913788683594, "learning_rate": 5.963960834495908e-08, "loss": 1.0217, "step": 8858 }, { "epoch": 0.95, "grad_norm": 1.71616828150927, "learning_rate": 5.937179632872436e-08, "loss": 0.8829, "step": 8859 }, { "epoch": 0.95, "grad_norm": 1.6681775415608753, "learning_rate": 5.910458338069192e-08, "loss": 0.8707, "step": 8860 }, { "epoch": 0.95, "grad_norm": 1.6147134810100792, "learning_rate": 5.8837969533263595e-08, "loss": 0.9361, "step": 8861 }, { "epoch": 0.95, "grad_norm": 1.7241914758552606, "learning_rate": 5.857195481876521e-08, "loss": 0.8721, "step": 8862 }, { "epoch": 0.95, "grad_norm": 1.7194770741885168, "learning_rate": 5.830653926945373e-08, "loss": 0.9785, "step": 8863 }, { "epoch": 0.95, "grad_norm": 1.6891964027238264, "learning_rate": 5.804172291751064e-08, "loss": 0.8501, "step": 8864 }, { "epoch": 0.95, "grad_norm": 1.7327046697770807, "learning_rate": 5.777750579504582e-08, "loss": 0.8441, "step": 8865 }, { "epoch": 0.95, "grad_norm": 1.751938693775302, "learning_rate": 5.751388793409696e-08, "loss": 0.9844, "step": 8866 }, { "epoch": 0.95, "grad_norm": 0.8386238707148712, "learning_rate": 5.725086936662905e-08, "loss": 1.0297, "step": 8867 }, { "epoch": 0.95, "grad_norm": 0.7462363264918382, "learning_rate": 5.6988450124533244e-08, "loss": 0.9933, "step": 8868 }, { "epoch": 0.95, "grad_norm": 1.8599863604485445, "learning_rate": 5.672663023962854e-08, "loss": 1.0067, "step": 8869 }, { "epoch": 0.95, "grad_norm": 1.8456713218337186, "learning_rate": 5.646540974366288e-08, "loss": 0.9604, "step": 8870 }, { "epoch": 0.95, "grad_norm": 1.738856334355234, "learning_rate": 5.620478866830814e-08, "loss": 0.8973, "step": 8871 }, { "epoch": 0.95, "grad_norm": 1.8165274407170784, "learning_rate": 5.594476704516738e-08, "loss": 0.8524, "step": 8872 }, { "epoch": 0.95, "grad_norm": 1.7069556373185137, "learning_rate": 5.568534490576816e-08, "loss": 0.8868, "step": 8873 }, { "epoch": 0.95, "grad_norm": 1.699828107224752, "learning_rate": 5.542652228156698e-08, "loss": 0.8865, "step": 8874 }, { "epoch": 0.95, "grad_norm": 1.8733041992213415, "learning_rate": 5.5168299203945953e-08, "loss": 0.8818, "step": 8875 }, { "epoch": 0.95, "grad_norm": 1.6901173063260295, "learning_rate": 5.491067570421615e-08, "loss": 0.8674, "step": 8876 }, { "epoch": 0.95, "grad_norm": 1.6086175012147987, "learning_rate": 5.465365181361537e-08, "loss": 0.8415, "step": 8877 }, { "epoch": 0.95, "grad_norm": 1.7622831247538544, "learning_rate": 5.439722756330812e-08, "loss": 0.9099, "step": 8878 }, { "epoch": 0.95, "grad_norm": 1.671792320404714, "learning_rate": 5.4141402984387304e-08, "loss": 0.8571, "step": 8879 }, { "epoch": 0.95, "grad_norm": 1.7779374968832276, "learning_rate": 5.388617810787256e-08, "loss": 0.8924, "step": 8880 }, { "epoch": 0.95, "grad_norm": 1.7545494700455597, "learning_rate": 5.363155296471079e-08, "loss": 0.8814, "step": 8881 }, { "epoch": 0.95, "grad_norm": 1.7355268452741641, "learning_rate": 5.3377527585775635e-08, "loss": 0.8626, "step": 8882 }, { "epoch": 0.95, "grad_norm": 1.7587017700937684, "learning_rate": 5.3124102001868574e-08, "loss": 0.8561, "step": 8883 }, { "epoch": 0.96, "grad_norm": 1.6831612168029113, "learning_rate": 5.287127624371946e-08, "loss": 0.8679, "step": 8884 }, { "epoch": 0.96, "grad_norm": 1.6889580196336205, "learning_rate": 5.261905034198267e-08, "loss": 0.8795, "step": 8885 }, { "epoch": 0.96, "grad_norm": 0.765930972216828, "learning_rate": 5.236742432724262e-08, "loss": 1.0508, "step": 8886 }, { "epoch": 0.96, "grad_norm": 1.7923678839673987, "learning_rate": 5.211639823000936e-08, "loss": 0.9425, "step": 8887 }, { "epoch": 0.96, "grad_norm": 0.8011042401078649, "learning_rate": 5.1865972080720195e-08, "loss": 1.0891, "step": 8888 }, { "epoch": 0.96, "grad_norm": 1.7539197783992857, "learning_rate": 5.16161459097414e-08, "loss": 0.9203, "step": 8889 }, { "epoch": 0.96, "grad_norm": 1.705251960380442, "learning_rate": 5.13669197473643e-08, "loss": 0.859, "step": 8890 }, { "epoch": 0.96, "grad_norm": 1.6878493932151493, "learning_rate": 5.11182936238086e-08, "loss": 0.8167, "step": 8891 }, { "epoch": 0.96, "grad_norm": 1.5971559035553664, "learning_rate": 5.0870267569221864e-08, "loss": 0.9114, "step": 8892 }, { "epoch": 0.96, "grad_norm": 1.6730817709332049, "learning_rate": 5.0622841613676695e-08, "loss": 0.9578, "step": 8893 }, { "epoch": 0.96, "grad_norm": 1.6941228875662697, "learning_rate": 5.0376015787175215e-08, "loss": 0.9023, "step": 8894 }, { "epoch": 0.96, "grad_norm": 1.848185508010583, "learning_rate": 5.0129790119646804e-08, "loss": 0.8739, "step": 8895 }, { "epoch": 0.96, "grad_norm": 1.7554975845581249, "learning_rate": 4.988416464094481e-08, "loss": 0.8673, "step": 8896 }, { "epoch": 0.96, "grad_norm": 1.763305474072755, "learning_rate": 4.963913938085374e-08, "loss": 0.8286, "step": 8897 }, { "epoch": 0.96, "grad_norm": 1.7380162147368385, "learning_rate": 4.939471436908427e-08, "loss": 0.9477, "step": 8898 }, { "epoch": 0.96, "grad_norm": 1.6987738026288308, "learning_rate": 4.9150889635272145e-08, "loss": 0.9271, "step": 8899 }, { "epoch": 0.96, "grad_norm": 1.7148690992722675, "learning_rate": 4.890766520898316e-08, "loss": 0.8764, "step": 8900 }, { "epoch": 0.96, "grad_norm": 1.6698686989127507, "learning_rate": 4.8665041119708714e-08, "loss": 0.8878, "step": 8901 }, { "epoch": 0.96, "grad_norm": 1.7169047369672084, "learning_rate": 4.8423017396868054e-08, "loss": 0.9491, "step": 8902 }, { "epoch": 0.96, "grad_norm": 1.6538514911559579, "learning_rate": 4.818159406980716e-08, "loss": 0.8639, "step": 8903 }, { "epoch": 0.96, "grad_norm": 1.9285773021157036, "learning_rate": 4.7940771167799274e-08, "loss": 0.8546, "step": 8904 }, { "epoch": 0.96, "grad_norm": 1.6899697224651593, "learning_rate": 4.770054872004548e-08, "loss": 0.8791, "step": 8905 }, { "epoch": 0.96, "grad_norm": 1.7001991090521789, "learning_rate": 4.7460926755673595e-08, "loss": 0.8987, "step": 8906 }, { "epoch": 0.96, "grad_norm": 1.7738860319744307, "learning_rate": 4.72219053037376e-08, "loss": 1.0038, "step": 8907 }, { "epoch": 0.96, "grad_norm": 1.7701036416453981, "learning_rate": 4.698348439322098e-08, "loss": 0.827, "step": 8908 }, { "epoch": 0.96, "grad_norm": 0.7863045138553155, "learning_rate": 4.674566405303227e-08, "loss": 1.0146, "step": 8909 }, { "epoch": 0.96, "grad_norm": 1.6208247051974858, "learning_rate": 4.65084443120084e-08, "loss": 0.8782, "step": 8910 }, { "epoch": 0.96, "grad_norm": 0.7880261381925586, "learning_rate": 4.627182519891305e-08, "loss": 1.0752, "step": 8911 }, { "epoch": 0.96, "grad_norm": 1.7063538310636899, "learning_rate": 4.6035806742436575e-08, "loss": 0.825, "step": 8912 }, { "epoch": 0.96, "grad_norm": 1.6472008082219505, "learning_rate": 4.5800388971197764e-08, "loss": 0.9382, "step": 8913 }, { "epoch": 0.96, "grad_norm": 1.672508794677758, "learning_rate": 4.5565571913741005e-08, "loss": 0.8332, "step": 8914 }, { "epoch": 0.96, "grad_norm": 1.6763003775437368, "learning_rate": 4.533135559853963e-08, "loss": 0.8696, "step": 8915 }, { "epoch": 0.96, "grad_norm": 1.8043807929063636, "learning_rate": 4.5097740053993144e-08, "loss": 0.8836, "step": 8916 }, { "epoch": 0.96, "grad_norm": 1.6326466386802474, "learning_rate": 4.486472530842723e-08, "loss": 0.8574, "step": 8917 }, { "epoch": 0.96, "grad_norm": 1.7313987111186204, "learning_rate": 4.463231139009649e-08, "loss": 0.9043, "step": 8918 }, { "epoch": 0.96, "grad_norm": 1.6927496784043867, "learning_rate": 4.440049832718174e-08, "loss": 0.9356, "step": 8919 }, { "epoch": 0.96, "grad_norm": 1.870356306661973, "learning_rate": 4.416928614779104e-08, "loss": 0.9193, "step": 8920 }, { "epoch": 0.96, "grad_norm": 1.705461592435337, "learning_rate": 4.393867487995973e-08, "loss": 0.924, "step": 8921 }, { "epoch": 0.96, "grad_norm": 1.7317126631987951, "learning_rate": 4.370866455165046e-08, "loss": 0.9012, "step": 8922 }, { "epoch": 0.96, "grad_norm": 1.7245441685321954, "learning_rate": 4.347925519075258e-08, "loss": 0.8882, "step": 8923 }, { "epoch": 0.96, "grad_norm": 1.8179619861907725, "learning_rate": 4.325044682508217e-08, "loss": 0.8556, "step": 8924 }, { "epoch": 0.96, "grad_norm": 1.848362654376589, "learning_rate": 4.3022239482384264e-08, "loss": 0.903, "step": 8925 }, { "epoch": 0.96, "grad_norm": 1.5910201405758972, "learning_rate": 4.279463319032895e-08, "loss": 0.8657, "step": 8926 }, { "epoch": 0.96, "grad_norm": 1.6620280011002693, "learning_rate": 4.256762797651415e-08, "loss": 0.8268, "step": 8927 }, { "epoch": 0.96, "grad_norm": 1.751251381784036, "learning_rate": 4.234122386846562e-08, "loss": 0.9114, "step": 8928 }, { "epoch": 0.96, "grad_norm": 0.7932472079022234, "learning_rate": 4.2115420893635296e-08, "loss": 1.0179, "step": 8929 }, { "epoch": 0.96, "grad_norm": 1.6705358779688988, "learning_rate": 4.1890219079402384e-08, "loss": 0.9032, "step": 8930 }, { "epoch": 0.96, "grad_norm": 0.7754452620224036, "learning_rate": 4.166561845307393e-08, "loss": 1.0661, "step": 8931 }, { "epoch": 0.96, "grad_norm": 1.9250148332686925, "learning_rate": 4.1441619041883155e-08, "loss": 0.9392, "step": 8932 }, { "epoch": 0.96, "grad_norm": 1.6643434823376908, "learning_rate": 4.1218220872990546e-08, "loss": 0.8892, "step": 8933 }, { "epoch": 0.96, "grad_norm": 1.730597087840161, "learning_rate": 4.099542397348444e-08, "loss": 0.8652, "step": 8934 }, { "epoch": 0.96, "grad_norm": 1.6901388846520238, "learning_rate": 4.0773228370379334e-08, "loss": 0.8938, "step": 8935 }, { "epoch": 0.96, "grad_norm": 1.6976235775855437, "learning_rate": 4.055163409061758e-08, "loss": 0.811, "step": 8936 }, { "epoch": 0.96, "grad_norm": 1.6700276766233098, "learning_rate": 4.033064116106766e-08, "loss": 0.8798, "step": 8937 }, { "epoch": 0.96, "grad_norm": 1.8001882379960406, "learning_rate": 4.01102496085265e-08, "loss": 0.9604, "step": 8938 }, { "epoch": 0.96, "grad_norm": 1.8398321897652261, "learning_rate": 3.989045945971659e-08, "loss": 0.9396, "step": 8939 }, { "epoch": 0.96, "grad_norm": 1.7432841475041494, "learning_rate": 3.967127074128885e-08, "loss": 0.8576, "step": 8940 }, { "epoch": 0.96, "grad_norm": 1.72804025308454, "learning_rate": 3.9452683479819783e-08, "loss": 0.9422, "step": 8941 }, { "epoch": 0.96, "grad_norm": 1.7281732295649466, "learning_rate": 3.923469770181543e-08, "loss": 0.9144, "step": 8942 }, { "epoch": 0.96, "grad_norm": 1.7344771298564283, "learning_rate": 3.901731343370574e-08, "loss": 0.8813, "step": 8943 }, { "epoch": 0.96, "grad_norm": 1.8121729413924828, "learning_rate": 3.8800530701849634e-08, "loss": 0.9379, "step": 8944 }, { "epoch": 0.96, "grad_norm": 1.695819052500904, "learning_rate": 3.858434953253332e-08, "loss": 0.9707, "step": 8945 }, { "epoch": 0.96, "grad_norm": 1.7703958874659151, "learning_rate": 3.836876995196914e-08, "loss": 0.8029, "step": 8946 }, { "epoch": 0.96, "grad_norm": 1.659591993290325, "learning_rate": 3.8153791986297315e-08, "loss": 0.9332, "step": 8947 }, { "epoch": 0.96, "grad_norm": 1.7570856516879534, "learning_rate": 3.793941566158421e-08, "loss": 0.8263, "step": 8948 }, { "epoch": 0.96, "grad_norm": 1.7582010964140862, "learning_rate": 3.772564100382348e-08, "loss": 0.8749, "step": 8949 }, { "epoch": 0.96, "grad_norm": 1.810719522043132, "learning_rate": 3.75124680389366e-08, "loss": 0.9397, "step": 8950 }, { "epoch": 0.96, "grad_norm": 1.7139878045010424, "learning_rate": 3.729989679277124e-08, "loss": 0.9454, "step": 8951 }, { "epoch": 0.96, "grad_norm": 1.8472551925009655, "learning_rate": 3.7087927291102335e-08, "loss": 0.9097, "step": 8952 }, { "epoch": 0.96, "grad_norm": 1.653718743968741, "learning_rate": 3.687655955963154e-08, "loss": 0.905, "step": 8953 }, { "epoch": 0.96, "grad_norm": 1.6624409750389941, "learning_rate": 3.666579362398892e-08, "loss": 0.9247, "step": 8954 }, { "epoch": 0.96, "grad_norm": 0.78575587822623, "learning_rate": 3.645562950973014e-08, "loss": 1.0602, "step": 8955 }, { "epoch": 0.96, "grad_norm": 1.6849520477787194, "learning_rate": 3.624606724233759e-08, "loss": 0.8711, "step": 8956 }, { "epoch": 0.96, "grad_norm": 1.7236569883947854, "learning_rate": 3.603710684722317e-08, "loss": 0.8779, "step": 8957 }, { "epoch": 0.96, "grad_norm": 1.673275018096668, "learning_rate": 3.5828748349722164e-08, "loss": 0.8322, "step": 8958 }, { "epoch": 0.96, "grad_norm": 1.726566012505798, "learning_rate": 3.562099177509937e-08, "loss": 0.9682, "step": 8959 }, { "epoch": 0.96, "grad_norm": 1.8254854572908077, "learning_rate": 3.541383714854685e-08, "loss": 0.8792, "step": 8960 }, { "epoch": 0.96, "grad_norm": 0.7636441178940484, "learning_rate": 3.520728449518118e-08, "loss": 1.04, "step": 8961 }, { "epoch": 0.96, "grad_norm": 1.7230886840477964, "learning_rate": 3.5001333840049e-08, "loss": 0.9147, "step": 8962 }, { "epoch": 0.96, "grad_norm": 1.8304015594768872, "learning_rate": 3.4795985208122e-08, "loss": 0.903, "step": 8963 }, { "epoch": 0.96, "grad_norm": 1.918036581863025, "learning_rate": 3.4591238624299696e-08, "loss": 0.8819, "step": 8964 }, { "epoch": 0.96, "grad_norm": 0.7421933983926192, "learning_rate": 3.438709411340779e-08, "loss": 1.0517, "step": 8965 }, { "epoch": 0.96, "grad_norm": 1.6327324776391086, "learning_rate": 3.418355170019982e-08, "loss": 0.803, "step": 8966 }, { "epoch": 0.96, "grad_norm": 1.7479503801981549, "learning_rate": 3.3980611409356045e-08, "loss": 0.8941, "step": 8967 }, { "epoch": 0.96, "grad_norm": 1.8155400739378578, "learning_rate": 3.377827326548344e-08, "loss": 0.9456, "step": 8968 }, { "epoch": 0.96, "grad_norm": 0.7506774851964663, "learning_rate": 3.357653729311683e-08, "loss": 1.0565, "step": 8969 }, { "epoch": 0.96, "grad_norm": 1.6666653612872917, "learning_rate": 3.337540351671664e-08, "loss": 0.9143, "step": 8970 }, { "epoch": 0.96, "grad_norm": 1.6774366935344318, "learning_rate": 3.317487196067115e-08, "loss": 0.8973, "step": 8971 }, { "epoch": 0.96, "grad_norm": 1.7062203811175196, "learning_rate": 3.29749426492959e-08, "loss": 0.9158, "step": 8972 }, { "epoch": 0.96, "grad_norm": 1.732725148731328, "learning_rate": 3.277561560683262e-08, "loss": 0.9027, "step": 8973 }, { "epoch": 0.96, "grad_norm": 1.8192702145432427, "learning_rate": 3.257689085745086e-08, "loss": 0.9689, "step": 8974 }, { "epoch": 0.96, "grad_norm": 1.9127811301605193, "learning_rate": 3.237876842524634e-08, "loss": 0.8451, "step": 8975 }, { "epoch": 0.96, "grad_norm": 1.9551643038628586, "learning_rate": 3.2181248334242075e-08, "loss": 0.8868, "step": 8976 }, { "epoch": 0.97, "grad_norm": 1.6531556440667723, "learning_rate": 3.198433060838779e-08, "loss": 0.8514, "step": 8977 }, { "epoch": 0.97, "grad_norm": 1.7617610696275698, "learning_rate": 3.1788015271561054e-08, "loss": 0.9401, "step": 8978 }, { "epoch": 0.97, "grad_norm": 1.7385264135254228, "learning_rate": 3.159230234756561e-08, "loss": 0.8762, "step": 8979 }, { "epoch": 0.97, "grad_norm": 1.6966012306652218, "learning_rate": 3.139719186013246e-08, "loss": 0.8954, "step": 8980 }, { "epoch": 0.97, "grad_norm": 1.7788894648144027, "learning_rate": 3.1202683832918804e-08, "loss": 0.9353, "step": 8981 }, { "epoch": 0.97, "grad_norm": 1.7243292209200727, "learning_rate": 3.100877828950965e-08, "loss": 0.8627, "step": 8982 }, { "epoch": 0.97, "grad_norm": 1.747644751973078, "learning_rate": 3.0815475253417326e-08, "loss": 0.8787, "step": 8983 }, { "epoch": 0.97, "grad_norm": 1.7613750029026443, "learning_rate": 3.062277474807917e-08, "loss": 0.9184, "step": 8984 }, { "epoch": 0.97, "grad_norm": 1.7696073657812026, "learning_rate": 3.043067679686262e-08, "loss": 0.9189, "step": 8985 }, { "epoch": 0.97, "grad_norm": 1.6524156062781945, "learning_rate": 3.0239181423058485e-08, "loss": 0.9444, "step": 8986 }, { "epoch": 0.97, "grad_norm": 2.2027497377659473, "learning_rate": 3.004828864988707e-08, "loss": 0.8853, "step": 8987 }, { "epoch": 0.97, "grad_norm": 1.8070469209104079, "learning_rate": 2.985799850049487e-08, "loss": 0.8894, "step": 8988 }, { "epoch": 0.97, "grad_norm": 0.7739491741500097, "learning_rate": 2.9668310997955085e-08, "loss": 1.0391, "step": 8989 }, { "epoch": 0.97, "grad_norm": 1.627604044868102, "learning_rate": 2.9479226165268216e-08, "loss": 0.9493, "step": 8990 }, { "epoch": 0.97, "grad_norm": 1.6394821025515003, "learning_rate": 2.9290744025360916e-08, "loss": 0.8999, "step": 8991 }, { "epoch": 0.97, "grad_norm": 1.753405174210761, "learning_rate": 2.9102864601087133e-08, "loss": 0.9064, "step": 8992 }, { "epoch": 0.97, "grad_norm": 0.7882189206375205, "learning_rate": 2.8915587915228638e-08, "loss": 1.0211, "step": 8993 }, { "epoch": 0.97, "grad_norm": 1.710210630472524, "learning_rate": 2.872891399049338e-08, "loss": 0.8593, "step": 8994 }, { "epoch": 0.97, "grad_norm": 1.8178562948556511, "learning_rate": 2.8542842849515474e-08, "loss": 0.906, "step": 8995 }, { "epoch": 0.97, "grad_norm": 1.8271902199095564, "learning_rate": 2.835737451485687e-08, "loss": 0.8583, "step": 8996 }, { "epoch": 0.97, "grad_norm": 2.126638612163813, "learning_rate": 2.8172509009006808e-08, "loss": 0.857, "step": 8997 }, { "epoch": 0.97, "grad_norm": 1.7935405921507497, "learning_rate": 2.798824635438069e-08, "loss": 0.9019, "step": 8998 }, { "epoch": 0.97, "grad_norm": 1.699933654604272, "learning_rate": 2.780458657332008e-08, "loss": 0.9168, "step": 8999 }, { "epoch": 0.97, "grad_norm": 1.8058365640916076, "learning_rate": 2.7621529688096062e-08, "loss": 0.8817, "step": 9000 }, { "epoch": 0.97, "grad_norm": 1.6488815471621763, "learning_rate": 2.7439075720903653e-08, "loss": 0.8765, "step": 9001 }, { "epoch": 0.97, "grad_norm": 1.7473592009089598, "learning_rate": 2.7257224693866268e-08, "loss": 0.8877, "step": 9002 }, { "epoch": 0.97, "grad_norm": 3.227114455321381, "learning_rate": 2.7075976629033496e-08, "loss": 0.9331, "step": 9003 }, { "epoch": 0.97, "grad_norm": 1.8472669422075099, "learning_rate": 2.6895331548383864e-08, "loss": 0.8964, "step": 9004 }, { "epoch": 0.97, "grad_norm": 1.7208095557403624, "learning_rate": 2.6715289473819296e-08, "loss": 0.8377, "step": 9005 }, { "epoch": 0.97, "grad_norm": 1.7639713919724314, "learning_rate": 2.6535850427171774e-08, "loss": 0.8762, "step": 9006 }, { "epoch": 0.97, "grad_norm": 0.7659523203589909, "learning_rate": 2.6357014430198892e-08, "loss": 1.0693, "step": 9007 }, { "epoch": 0.97, "grad_norm": 1.6515840890174034, "learning_rate": 2.617878150458386e-08, "loss": 0.9298, "step": 9008 }, { "epoch": 0.97, "grad_norm": 1.71372722419767, "learning_rate": 2.6001151671939952e-08, "loss": 0.9311, "step": 9009 }, { "epoch": 0.97, "grad_norm": 1.6879951497225332, "learning_rate": 2.582412495380382e-08, "loss": 0.8909, "step": 9010 }, { "epoch": 0.97, "grad_norm": 1.6746786627703782, "learning_rate": 2.5647701371641075e-08, "loss": 0.9172, "step": 9011 }, { "epoch": 0.97, "grad_norm": 1.8568393935652887, "learning_rate": 2.5471880946844053e-08, "loss": 0.8532, "step": 9012 }, { "epoch": 0.97, "grad_norm": 1.697554459197382, "learning_rate": 2.529666370073125e-08, "loss": 0.9229, "step": 9013 }, { "epoch": 0.97, "grad_norm": 1.7776368607503963, "learning_rate": 2.5122049654547897e-08, "loss": 0.9878, "step": 9014 }, { "epoch": 0.97, "grad_norm": 1.7011011919752606, "learning_rate": 2.4948038829467613e-08, "loss": 0.8909, "step": 9015 }, { "epoch": 0.97, "grad_norm": 1.6866618702751146, "learning_rate": 2.4774631246589075e-08, "loss": 0.8783, "step": 9016 }, { "epoch": 0.97, "grad_norm": 1.671798130577716, "learning_rate": 2.4601826926938245e-08, "loss": 0.8749, "step": 9017 }, { "epoch": 0.97, "grad_norm": 0.7867919363608207, "learning_rate": 2.4429625891468912e-08, "loss": 1.0168, "step": 9018 }, { "epoch": 0.97, "grad_norm": 1.6524951184308798, "learning_rate": 2.4258028161061042e-08, "loss": 0.8569, "step": 9019 }, { "epoch": 0.97, "grad_norm": 1.857310186828111, "learning_rate": 2.408703375652133e-08, "loss": 0.8681, "step": 9020 }, { "epoch": 0.97, "grad_norm": 1.7614645044924335, "learning_rate": 2.391664269858318e-08, "loss": 0.8711, "step": 9021 }, { "epoch": 0.97, "grad_norm": 1.7526430011833185, "learning_rate": 2.3746855007907854e-08, "loss": 0.8317, "step": 9022 }, { "epoch": 0.97, "grad_norm": 1.8105588881337562, "learning_rate": 2.3577670705081655e-08, "loss": 0.9152, "step": 9023 }, { "epoch": 0.97, "grad_norm": 0.8198491536312023, "learning_rate": 2.3409089810618736e-08, "loss": 1.0323, "step": 9024 }, { "epoch": 0.97, "grad_norm": 1.6356789329134283, "learning_rate": 2.324111234496107e-08, "loss": 0.8006, "step": 9025 }, { "epoch": 0.97, "grad_norm": 1.6402744950731307, "learning_rate": 2.3073738328476258e-08, "loss": 0.8656, "step": 9026 }, { "epoch": 0.97, "grad_norm": 1.712331613595779, "learning_rate": 2.2906967781458066e-08, "loss": 0.9188, "step": 9027 }, { "epoch": 0.97, "grad_norm": 1.695258374561053, "learning_rate": 2.274080072412921e-08, "loss": 0.9149, "step": 9028 }, { "epoch": 0.97, "grad_norm": 1.667315166218039, "learning_rate": 2.2575237176637453e-08, "loss": 0.8553, "step": 9029 }, { "epoch": 0.97, "grad_norm": 1.7941765784662156, "learning_rate": 2.2410277159057858e-08, "loss": 0.954, "step": 9030 }, { "epoch": 0.97, "grad_norm": 1.6972773660947091, "learning_rate": 2.224592069139275e-08, "loss": 0.8544, "step": 9031 }, { "epoch": 0.97, "grad_norm": 0.7603201851553251, "learning_rate": 2.2082167793570642e-08, "loss": 1.0269, "step": 9032 }, { "epoch": 0.97, "grad_norm": 1.7972780395656742, "learning_rate": 2.1919018485446753e-08, "loss": 0.8914, "step": 9033 }, { "epoch": 0.97, "grad_norm": 1.713669507627122, "learning_rate": 2.175647278680415e-08, "loss": 0.8558, "step": 9034 }, { "epoch": 0.97, "grad_norm": 1.7353618726132312, "learning_rate": 2.159453071735207e-08, "loss": 0.8913, "step": 9035 }, { "epoch": 0.97, "grad_norm": 1.7185103497506375, "learning_rate": 2.1433192296725914e-08, "loss": 0.8386, "step": 9036 }, { "epoch": 0.97, "grad_norm": 1.63880072788424, "learning_rate": 2.1272457544488922e-08, "loss": 0.8639, "step": 9037 }, { "epoch": 0.97, "grad_norm": 1.7998498321645506, "learning_rate": 2.1112326480131063e-08, "loss": 0.9624, "step": 9038 }, { "epoch": 0.97, "grad_norm": 1.7336335606768198, "learning_rate": 2.095279912306847e-08, "loss": 0.9058, "step": 9039 }, { "epoch": 0.97, "grad_norm": 1.857660517657548, "learning_rate": 2.0793875492644e-08, "loss": 0.9399, "step": 9040 }, { "epoch": 0.97, "grad_norm": 1.7136757087458103, "learning_rate": 2.0635555608128354e-08, "loss": 0.9564, "step": 9041 }, { "epoch": 0.97, "grad_norm": 1.648849224453651, "learning_rate": 2.0477839488718398e-08, "loss": 0.832, "step": 9042 }, { "epoch": 0.97, "grad_norm": 1.714201623558537, "learning_rate": 2.032072715353717e-08, "loss": 0.9119, "step": 9043 }, { "epoch": 0.97, "grad_norm": 1.7478967296912449, "learning_rate": 2.016421862163498e-08, "loss": 0.9167, "step": 9044 }, { "epoch": 0.97, "grad_norm": 1.7008623821094497, "learning_rate": 2.0008313911989985e-08, "loss": 0.9065, "step": 9045 }, { "epoch": 0.97, "grad_norm": 1.717681618082401, "learning_rate": 1.9853013043504843e-08, "loss": 0.9012, "step": 9046 }, { "epoch": 0.97, "grad_norm": 1.6808209079669376, "learning_rate": 1.969831603501171e-08, "loss": 0.8696, "step": 9047 }, { "epoch": 0.97, "grad_norm": 1.7490170896943678, "learning_rate": 1.954422290526725e-08, "loss": 0.9231, "step": 9048 }, { "epoch": 0.97, "grad_norm": 1.6329333937599875, "learning_rate": 1.939073367295541e-08, "loss": 0.9354, "step": 9049 }, { "epoch": 0.97, "grad_norm": 1.8000361868559058, "learning_rate": 1.9237848356688517e-08, "loss": 0.9261, "step": 9050 }, { "epoch": 0.97, "grad_norm": 1.7095769030442776, "learning_rate": 1.9085566975003966e-08, "loss": 0.9077, "step": 9051 }, { "epoch": 0.97, "grad_norm": 0.8001885973775742, "learning_rate": 1.893388954636588e-08, "loss": 1.0631, "step": 9052 }, { "epoch": 0.97, "grad_norm": 1.7488987883584868, "learning_rate": 1.878281608916621e-08, "loss": 0.8623, "step": 9053 }, { "epoch": 0.97, "grad_norm": 1.79115705751056, "learning_rate": 1.8632346621723085e-08, "loss": 0.7667, "step": 9054 }, { "epoch": 0.97, "grad_norm": 1.6895989913953078, "learning_rate": 1.8482481162280797e-08, "loss": 0.8023, "step": 9055 }, { "epoch": 0.97, "grad_norm": 1.6880258206829248, "learning_rate": 1.833321972901203e-08, "loss": 0.7568, "step": 9056 }, { "epoch": 0.97, "grad_norm": 1.6998083083517264, "learning_rate": 1.8184562340014533e-08, "loss": 0.8463, "step": 9057 }, { "epoch": 0.97, "grad_norm": 1.7082504943783157, "learning_rate": 1.8036509013313886e-08, "loss": 0.9124, "step": 9058 }, { "epoch": 0.97, "grad_norm": 1.8395185715589875, "learning_rate": 1.7889059766862392e-08, "loss": 0.868, "step": 9059 }, { "epoch": 0.97, "grad_norm": 1.7232114719760543, "learning_rate": 1.7742214618537978e-08, "loss": 0.893, "step": 9060 }, { "epoch": 0.97, "grad_norm": 1.732287602766679, "learning_rate": 1.7595973586145842e-08, "loss": 0.8948, "step": 9061 }, { "epoch": 0.97, "grad_norm": 1.8373315503179548, "learning_rate": 1.7450336687420132e-08, "loss": 0.8846, "step": 9062 }, { "epoch": 0.97, "grad_norm": 1.6988471940477583, "learning_rate": 1.7305303940017838e-08, "loss": 0.8713, "step": 9063 }, { "epoch": 0.97, "grad_norm": 1.6154185306074311, "learning_rate": 1.716087536152544e-08, "loss": 0.9268, "step": 9064 }, { "epoch": 0.97, "grad_norm": 1.6956034955472434, "learning_rate": 1.701705096945505e-08, "loss": 0.8376, "step": 9065 }, { "epoch": 0.97, "grad_norm": 0.7905778417450919, "learning_rate": 1.6873830781246603e-08, "loss": 1.0475, "step": 9066 }, { "epoch": 0.97, "grad_norm": 1.6930014888452642, "learning_rate": 1.673121481426565e-08, "loss": 0.8943, "step": 9067 }, { "epoch": 0.97, "grad_norm": 1.7385071820795834, "learning_rate": 1.6589203085804473e-08, "loss": 0.8992, "step": 9068 }, { "epoch": 0.97, "grad_norm": 1.807488707910084, "learning_rate": 1.6447795613083185e-08, "loss": 0.9414, "step": 9069 }, { "epoch": 0.98, "grad_norm": 1.8712453961363233, "learning_rate": 1.6306992413247512e-08, "loss": 0.9696, "step": 9070 }, { "epoch": 0.98, "grad_norm": 1.6057756238716798, "learning_rate": 1.616679350337047e-08, "loss": 0.8964, "step": 9071 }, { "epoch": 0.98, "grad_norm": 1.7403326619418713, "learning_rate": 1.602719890045179e-08, "loss": 0.9162, "step": 9072 }, { "epoch": 0.98, "grad_norm": 1.742054600595073, "learning_rate": 1.5888208621417377e-08, "loss": 0.9154, "step": 9073 }, { "epoch": 0.98, "grad_norm": 1.7853893303960242, "learning_rate": 1.574982268312042e-08, "loss": 0.9125, "step": 9074 }, { "epoch": 0.98, "grad_norm": 1.83761020856176, "learning_rate": 1.561204110234138e-08, "loss": 0.9192, "step": 9075 }, { "epoch": 0.98, "grad_norm": 1.7089640134701365, "learning_rate": 1.547486389578523e-08, "loss": 0.8141, "step": 9076 }, { "epoch": 0.98, "grad_norm": 1.7827373941860172, "learning_rate": 1.5338291080086444e-08, "loss": 0.8751, "step": 9077 }, { "epoch": 0.98, "grad_norm": 1.861494262388883, "learning_rate": 1.5202322671805104e-08, "loss": 0.913, "step": 9078 }, { "epoch": 0.98, "grad_norm": 1.7785025146223936, "learning_rate": 1.5066958687426915e-08, "loss": 0.9149, "step": 9079 }, { "epoch": 0.98, "grad_norm": 1.8103344790437834, "learning_rate": 1.493219914336541e-08, "loss": 0.8377, "step": 9080 }, { "epoch": 0.98, "grad_norm": 0.7982179672632662, "learning_rate": 1.4798044055961414e-08, "loss": 1.0349, "step": 9081 }, { "epoch": 0.98, "grad_norm": 2.9191032215125934, "learning_rate": 1.4664493441480798e-08, "loss": 0.9087, "step": 9082 }, { "epoch": 0.98, "grad_norm": 1.8226221196107963, "learning_rate": 1.4531547316117833e-08, "loss": 0.8079, "step": 9083 }, { "epoch": 0.98, "grad_norm": 1.7944971360217326, "learning_rate": 1.4399205695991847e-08, "loss": 0.9084, "step": 9084 }, { "epoch": 0.98, "grad_norm": 1.723446214042162, "learning_rate": 1.4267468597150558e-08, "loss": 0.902, "step": 9085 }, { "epoch": 0.98, "grad_norm": 1.758819129178057, "learning_rate": 1.4136336035566744e-08, "loss": 0.8155, "step": 9086 }, { "epoch": 0.98, "grad_norm": 1.690446515285022, "learning_rate": 1.4005808027141576e-08, "loss": 0.8442, "step": 9087 }, { "epoch": 0.98, "grad_norm": 1.7178650777181188, "learning_rate": 1.3875884587700727e-08, "loss": 0.9156, "step": 9088 }, { "epoch": 0.98, "grad_norm": 1.8440204890789689, "learning_rate": 1.3746565732999372e-08, "loss": 0.911, "step": 9089 }, { "epoch": 0.98, "grad_norm": 1.8831473392886526, "learning_rate": 1.3617851478716637e-08, "loss": 0.88, "step": 9090 }, { "epoch": 0.98, "grad_norm": 1.738609579311347, "learning_rate": 1.3489741840460591e-08, "loss": 0.8092, "step": 9091 }, { "epoch": 0.98, "grad_norm": 1.7699042220963042, "learning_rate": 1.3362236833763809e-08, "loss": 0.9627, "step": 9092 }, { "epoch": 0.98, "grad_norm": 1.7119957305818998, "learning_rate": 1.3235336474087812e-08, "loss": 0.89, "step": 9093 }, { "epoch": 0.98, "grad_norm": 1.7283917105327136, "learning_rate": 1.3109040776819181e-08, "loss": 0.9074, "step": 9094 }, { "epoch": 0.98, "grad_norm": 1.7809673917823186, "learning_rate": 1.2983349757271779e-08, "loss": 1.0231, "step": 9095 }, { "epoch": 0.98, "grad_norm": 1.9068419779083436, "learning_rate": 1.285826343068619e-08, "loss": 0.8768, "step": 9096 }, { "epoch": 0.98, "grad_norm": 1.8644712209673977, "learning_rate": 1.2733781812229728e-08, "loss": 0.9349, "step": 9097 }, { "epoch": 0.98, "grad_norm": 1.825391201906082, "learning_rate": 1.2609904916995319e-08, "loss": 0.9274, "step": 9098 }, { "epoch": 0.98, "grad_norm": 1.8177325574104135, "learning_rate": 1.248663276000428e-08, "loss": 0.8698, "step": 9099 }, { "epoch": 0.98, "grad_norm": 1.5993057887215836, "learning_rate": 1.23639653562041e-08, "loss": 0.9245, "step": 9100 }, { "epoch": 0.98, "grad_norm": 0.7717911843510411, "learning_rate": 1.2241902720467325e-08, "loss": 1.07, "step": 9101 }, { "epoch": 0.98, "grad_norm": 1.6821630528082228, "learning_rate": 1.2120444867596003e-08, "loss": 0.8692, "step": 9102 }, { "epoch": 0.98, "grad_norm": 1.7746723796187034, "learning_rate": 1.1999591812316136e-08, "loss": 0.9126, "step": 9103 }, { "epoch": 0.98, "grad_norm": 1.8492099322470268, "learning_rate": 1.1879343569282109e-08, "loss": 0.9184, "step": 9104 }, { "epoch": 0.98, "grad_norm": 1.7623791065358076, "learning_rate": 1.1759700153073928e-08, "loss": 0.9777, "step": 9105 }, { "epoch": 0.98, "grad_norm": 1.7654244386832343, "learning_rate": 1.1640661578199985e-08, "loss": 0.8186, "step": 9106 }, { "epoch": 0.98, "grad_norm": 1.7304830298380984, "learning_rate": 1.1522227859092627e-08, "loss": 0.9392, "step": 9107 }, { "epoch": 0.98, "grad_norm": 1.690768074159443, "learning_rate": 1.1404399010113698e-08, "loss": 0.8725, "step": 9108 }, { "epoch": 0.98, "grad_norm": 1.7280855868652891, "learning_rate": 1.1287175045548993e-08, "loss": 0.881, "step": 9109 }, { "epoch": 0.98, "grad_norm": 1.6950658892271546, "learning_rate": 1.117055597961325e-08, "loss": 0.8178, "step": 9110 }, { "epoch": 0.98, "grad_norm": 1.7151001594030106, "learning_rate": 1.105454182644683e-08, "loss": 0.8519, "step": 9111 }, { "epoch": 0.98, "grad_norm": 1.7004414943541661, "learning_rate": 1.0939132600116808e-08, "loss": 0.9615, "step": 9112 }, { "epoch": 0.98, "grad_norm": 1.606747081021208, "learning_rate": 1.0824328314616995e-08, "loss": 0.8498, "step": 9113 }, { "epoch": 0.98, "grad_norm": 1.6274400127430584, "learning_rate": 1.0710128983867364e-08, "loss": 0.8736, "step": 9114 }, { "epoch": 0.98, "grad_norm": 1.6272621288648101, "learning_rate": 1.0596534621715171e-08, "loss": 0.9128, "step": 9115 }, { "epoch": 0.98, "grad_norm": 1.7189807665646388, "learning_rate": 1.0483545241934401e-08, "loss": 0.8904, "step": 9116 }, { "epoch": 0.98, "grad_norm": 1.8173224840161402, "learning_rate": 1.037116085822576e-08, "loss": 0.9057, "step": 9117 }, { "epoch": 0.98, "grad_norm": 1.7270116196563388, "learning_rate": 1.0259381484215014e-08, "loss": 0.8728, "step": 9118 }, { "epoch": 0.98, "grad_norm": 1.7185204396194957, "learning_rate": 1.0148207133456877e-08, "loss": 0.8975, "step": 9119 }, { "epoch": 0.98, "grad_norm": 1.6422698095934387, "learning_rate": 1.0037637819431123e-08, "loss": 0.9116, "step": 9120 }, { "epoch": 0.98, "grad_norm": 1.7174578228824693, "learning_rate": 9.927673555544804e-09, "loss": 0.8597, "step": 9121 }, { "epoch": 0.98, "grad_norm": 1.7745232725329894, "learning_rate": 9.818314355131697e-09, "loss": 0.8474, "step": 9122 }, { "epoch": 0.98, "grad_norm": 1.799376446663371, "learning_rate": 9.709560231451198e-09, "loss": 0.8533, "step": 9123 }, { "epoch": 0.98, "grad_norm": 1.7464341523903155, "learning_rate": 9.60141119769109e-09, "loss": 0.9681, "step": 9124 }, { "epoch": 0.98, "grad_norm": 1.9377358995752976, "learning_rate": 9.493867266964219e-09, "loss": 0.8759, "step": 9125 }, { "epoch": 0.98, "grad_norm": 1.6861470075195868, "learning_rate": 9.386928452310707e-09, "loss": 0.8306, "step": 9126 }, { "epoch": 0.98, "grad_norm": 1.70984096912081, "learning_rate": 9.280594766697959e-09, "loss": 0.8176, "step": 9127 }, { "epoch": 0.98, "grad_norm": 1.6742680743120448, "learning_rate": 9.174866223018441e-09, "loss": 0.7947, "step": 9128 }, { "epoch": 0.98, "grad_norm": 1.9394316850888744, "learning_rate": 9.069742834092454e-09, "loss": 0.9579, "step": 9129 }, { "epoch": 0.98, "grad_norm": 1.7993923890408288, "learning_rate": 8.965224612665912e-09, "loss": 0.866, "step": 9130 }, { "epoch": 0.98, "grad_norm": 1.7447403404168589, "learning_rate": 8.861311571413122e-09, "loss": 0.8641, "step": 9131 }, { "epoch": 0.98, "grad_norm": 0.7850976512753851, "learning_rate": 8.758003722933451e-09, "loss": 1.0539, "step": 9132 }, { "epoch": 0.98, "grad_norm": 1.7658521117293198, "learning_rate": 8.655301079752986e-09, "loss": 0.8853, "step": 9133 }, { "epoch": 0.98, "grad_norm": 1.6773223351139095, "learning_rate": 8.553203654325104e-09, "loss": 0.8556, "step": 9134 }, { "epoch": 0.98, "grad_norm": 0.8132855294922835, "learning_rate": 8.451711459029343e-09, "loss": 1.04, "step": 9135 }, { "epoch": 0.98, "grad_norm": 1.6863286563636097, "learning_rate": 8.350824506172528e-09, "loss": 0.8706, "step": 9136 }, { "epoch": 0.98, "grad_norm": 1.65650098637169, "learning_rate": 8.250542807986538e-09, "loss": 0.8251, "step": 9137 }, { "epoch": 0.98, "grad_norm": 1.741410215587374, "learning_rate": 8.15086637663165e-09, "loss": 0.8967, "step": 9138 }, { "epoch": 0.98, "grad_norm": 1.982185707419509, "learning_rate": 8.051795224193748e-09, "loss": 0.8117, "step": 9139 }, { "epoch": 0.98, "grad_norm": 1.7479311987933726, "learning_rate": 7.953329362685447e-09, "loss": 0.8447, "step": 9140 }, { "epoch": 0.98, "grad_norm": 1.7109635207171099, "learning_rate": 7.855468804046085e-09, "loss": 0.8834, "step": 9141 }, { "epoch": 0.98, "grad_norm": 1.682281990730583, "learning_rate": 7.758213560141726e-09, "loss": 0.8489, "step": 9142 }, { "epoch": 0.98, "grad_norm": 1.784424990257666, "learning_rate": 7.661563642765158e-09, "loss": 0.9257, "step": 9143 }, { "epoch": 0.98, "grad_norm": 1.75055616743191, "learning_rate": 7.565519063634785e-09, "loss": 0.9217, "step": 9144 }, { "epoch": 0.98, "grad_norm": 1.7264691161134966, "learning_rate": 7.470079834396849e-09, "loss": 0.9823, "step": 9145 }, { "epoch": 0.98, "grad_norm": 1.7127240808791075, "learning_rate": 7.375245966623757e-09, "loss": 0.8854, "step": 9146 }, { "epoch": 0.98, "grad_norm": 1.7692033642720633, "learning_rate": 7.281017471814089e-09, "loss": 0.9256, "step": 9147 }, { "epoch": 0.98, "grad_norm": 1.654493168379517, "learning_rate": 7.187394361393707e-09, "loss": 0.9111, "step": 9148 }, { "epoch": 0.98, "grad_norm": 1.829072790970889, "learning_rate": 7.09437664671464e-09, "loss": 0.9651, "step": 9149 }, { "epoch": 0.98, "grad_norm": 1.8457075067193203, "learning_rate": 7.001964339055644e-09, "loss": 0.8753, "step": 9150 }, { "epoch": 0.98, "grad_norm": 1.820544486890795, "learning_rate": 6.910157449621646e-09, "loss": 0.9111, "step": 9151 }, { "epoch": 0.98, "grad_norm": 1.6908804980856922, "learning_rate": 6.818955989545406e-09, "loss": 0.8361, "step": 9152 }, { "epoch": 0.98, "grad_norm": 1.7754779786135602, "learning_rate": 6.728359969884191e-09, "loss": 0.8911, "step": 9153 }, { "epoch": 0.98, "grad_norm": 1.6739641787319068, "learning_rate": 6.638369401624212e-09, "loss": 0.914, "step": 9154 }, { "epoch": 0.98, "grad_norm": 1.578525368276803, "learning_rate": 6.548984295676741e-09, "loss": 0.8516, "step": 9155 }, { "epoch": 0.98, "grad_norm": 1.6925381320760478, "learning_rate": 6.4602046628797765e-09, "loss": 0.9197, "step": 9156 }, { "epoch": 0.98, "grad_norm": 1.685470067717621, "learning_rate": 6.37203051399804e-09, "loss": 0.8734, "step": 9157 }, { "epoch": 0.98, "grad_norm": 0.7825497932004423, "learning_rate": 6.284461859723534e-09, "loss": 1.0306, "step": 9158 }, { "epoch": 0.98, "grad_norm": 1.6005891106138797, "learning_rate": 6.197498710673877e-09, "loss": 0.83, "step": 9159 }, { "epoch": 0.98, "grad_norm": 1.6659898542888691, "learning_rate": 6.111141077393967e-09, "loss": 0.8889, "step": 9160 }, { "epoch": 0.98, "grad_norm": 1.7843773793192037, "learning_rate": 6.025388970354873e-09, "loss": 0.9343, "step": 9161 }, { "epoch": 0.98, "grad_norm": 1.709312990622651, "learning_rate": 5.940242399953833e-09, "loss": 0.8766, "step": 9162 }, { "epoch": 0.99, "grad_norm": 1.7987757016052872, "learning_rate": 5.855701376515921e-09, "loss": 0.8605, "step": 9163 }, { "epoch": 0.99, "grad_norm": 1.7417187056297736, "learning_rate": 5.771765910291272e-09, "loss": 0.8628, "step": 9164 }, { "epoch": 0.99, "grad_norm": 1.6695044749664891, "learning_rate": 5.688436011457854e-09, "loss": 0.8849, "step": 9165 }, { "epoch": 0.99, "grad_norm": 1.7464852151295442, "learning_rate": 5.605711690119808e-09, "loss": 0.8689, "step": 9166 }, { "epoch": 0.99, "grad_norm": 1.7433133950511028, "learning_rate": 5.5235929563074446e-09, "loss": 0.9274, "step": 9167 }, { "epoch": 0.99, "grad_norm": 0.7687967092378002, "learning_rate": 5.442079819977797e-09, "loss": 1.0319, "step": 9168 }, { "epoch": 0.99, "grad_norm": 0.7533186276697215, "learning_rate": 5.361172291014627e-09, "loss": 1.0757, "step": 9169 }, { "epoch": 0.99, "grad_norm": 1.803827272520431, "learning_rate": 5.280870379228975e-09, "loss": 0.9688, "step": 9170 }, { "epoch": 0.99, "grad_norm": 1.8319383936184692, "learning_rate": 5.201174094356942e-09, "loss": 0.8795, "step": 9171 }, { "epoch": 0.99, "grad_norm": 0.7917142422047312, "learning_rate": 5.122083446062464e-09, "loss": 1.0606, "step": 9172 }, { "epoch": 0.99, "grad_norm": 1.7444879660423172, "learning_rate": 5.043598443935094e-09, "loss": 0.9081, "step": 9173 }, { "epoch": 0.99, "grad_norm": 1.5490749042177736, "learning_rate": 4.965719097491661e-09, "loss": 0.8728, "step": 9174 }, { "epoch": 0.99, "grad_norm": 1.6796411304409415, "learning_rate": 4.888445416175725e-09, "loss": 0.8342, "step": 9175 }, { "epoch": 0.99, "grad_norm": 1.6459189189222507, "learning_rate": 4.8117774093559e-09, "loss": 0.9059, "step": 9176 }, { "epoch": 0.99, "grad_norm": 1.8093140156129675, "learning_rate": 4.7357150863291955e-09, "loss": 0.863, "step": 9177 }, { "epoch": 0.99, "grad_norm": 1.7868077251150232, "learning_rate": 4.660258456318789e-09, "loss": 0.8175, "step": 9178 }, { "epoch": 0.99, "grad_norm": 1.7996668198164496, "learning_rate": 4.5854075284729184e-09, "loss": 0.9129, "step": 9179 }, { "epoch": 0.99, "grad_norm": 1.5685924330054664, "learning_rate": 4.5111623118687666e-09, "loss": 0.8879, "step": 9180 }, { "epoch": 0.99, "grad_norm": 1.7122473341977509, "learning_rate": 4.437522815508577e-09, "loss": 0.9346, "step": 9181 }, { "epoch": 0.99, "grad_norm": 1.7153049102960125, "learning_rate": 4.3644890483202086e-09, "loss": 0.8689, "step": 9182 }, { "epoch": 0.99, "grad_norm": 1.6551719665955429, "learning_rate": 4.292061019160465e-09, "loss": 0.8898, "step": 9183 }, { "epoch": 0.99, "grad_norm": 1.6802026216778958, "learning_rate": 4.2202387368112105e-09, "loss": 0.8848, "step": 9184 }, { "epoch": 0.99, "grad_norm": 1.7901308050121532, "learning_rate": 4.149022209981035e-09, "loss": 0.9608, "step": 9185 }, { "epoch": 0.99, "grad_norm": 1.7099148401147537, "learning_rate": 4.078411447305253e-09, "loss": 0.8323, "step": 9186 }, { "epoch": 0.99, "grad_norm": 1.6697862061336277, "learning_rate": 4.008406457345904e-09, "loss": 0.8961, "step": 9187 }, { "epoch": 0.99, "grad_norm": 1.7393976808328753, "learning_rate": 3.939007248590643e-09, "loss": 0.8644, "step": 9188 }, { "epoch": 0.99, "grad_norm": 1.8592704895006116, "learning_rate": 3.870213829454961e-09, "loss": 0.9447, "step": 9189 }, { "epoch": 0.99, "grad_norm": 1.752231416735354, "learning_rate": 3.802026208279963e-09, "loss": 0.9468, "step": 9190 }, { "epoch": 0.99, "grad_norm": 1.7003641237682152, "learning_rate": 3.734444393334036e-09, "loss": 0.8536, "step": 9191 }, { "epoch": 0.99, "grad_norm": 1.7829653479416725, "learning_rate": 3.66746839281118e-09, "loss": 0.8838, "step": 9192 }, { "epoch": 0.99, "grad_norm": 0.7774107316254032, "learning_rate": 3.601098214833232e-09, "loss": 0.986, "step": 9193 }, { "epoch": 0.99, "grad_norm": 1.724201884433632, "learning_rate": 3.5353338674470883e-09, "loss": 0.9272, "step": 9194 }, { "epoch": 0.99, "grad_norm": 1.791735323561752, "learning_rate": 3.4701753586269262e-09, "loss": 0.8899, "step": 9195 }, { "epoch": 0.99, "grad_norm": 0.8046994858282555, "learning_rate": 3.4056226962742024e-09, "loss": 1.0684, "step": 9196 }, { "epoch": 0.99, "grad_norm": 1.6199620216039676, "learning_rate": 3.3416758882154344e-09, "loss": 0.8862, "step": 9197 }, { "epoch": 0.99, "grad_norm": 1.694106765170152, "learning_rate": 3.2783349422044197e-09, "loss": 0.8702, "step": 9198 }, { "epoch": 0.99, "grad_norm": 1.7415696415325952, "learning_rate": 3.2155998659211265e-09, "loss": 0.8989, "step": 9199 }, { "epoch": 0.99, "grad_norm": 1.6724028420362715, "learning_rate": 3.153470666973357e-09, "loss": 0.8568, "step": 9200 }, { "epoch": 0.99, "grad_norm": 1.741978350469708, "learning_rate": 3.091947352893976e-09, "loss": 0.864, "step": 9201 }, { "epoch": 0.99, "grad_norm": 1.8347308123330335, "learning_rate": 3.031029931143126e-09, "loss": 0.9434, "step": 9202 }, { "epoch": 0.99, "grad_norm": 1.7361159595619056, "learning_rate": 2.9707184091071206e-09, "loss": 0.888, "step": 9203 }, { "epoch": 0.99, "grad_norm": 1.6745277667295864, "learning_rate": 2.9110127940984447e-09, "loss": 0.7828, "step": 9204 }, { "epoch": 0.99, "grad_norm": 1.8624210834481494, "learning_rate": 2.8519130933574168e-09, "loss": 0.8727, "step": 9205 }, { "epoch": 0.99, "grad_norm": 1.9420242658487823, "learning_rate": 2.7934193140499723e-09, "loss": 0.9323, "step": 9206 }, { "epoch": 0.99, "grad_norm": 1.668156707492051, "learning_rate": 2.73553146326766e-09, "loss": 0.8295, "step": 9207 }, { "epoch": 0.99, "grad_norm": 1.75298747884214, "learning_rate": 2.6782495480309757e-09, "loss": 0.8856, "step": 9208 }, { "epoch": 0.99, "grad_norm": 1.7328517413471247, "learning_rate": 2.621573575284919e-09, "loss": 0.8712, "step": 9209 }, { "epoch": 0.99, "grad_norm": 0.7675900322948337, "learning_rate": 2.565503551901216e-09, "loss": 1.034, "step": 9210 }, { "epoch": 0.99, "grad_norm": 1.7147983652449887, "learning_rate": 2.5100394846794272e-09, "loss": 0.8474, "step": 9211 }, { "epoch": 0.99, "grad_norm": 1.8194909961322767, "learning_rate": 2.455181380344174e-09, "loss": 0.8328, "step": 9212 }, { "epoch": 0.99, "grad_norm": 1.6677750753663276, "learning_rate": 2.4009292455468013e-09, "loss": 0.8632, "step": 9213 }, { "epoch": 0.99, "grad_norm": 1.8015452141650738, "learning_rate": 2.347283086867047e-09, "loss": 0.884, "step": 9214 }, { "epoch": 0.99, "grad_norm": 1.7053056673751479, "learning_rate": 2.294242910808042e-09, "loss": 0.8569, "step": 9215 }, { "epoch": 0.99, "grad_norm": 0.7870679862855685, "learning_rate": 2.2418087238018638e-09, "loss": 1.0511, "step": 9216 }, { "epoch": 0.99, "grad_norm": 1.7725984634506644, "learning_rate": 2.189980532206759e-09, "loss": 0.8602, "step": 9217 }, { "epoch": 0.99, "grad_norm": 1.7162118703527607, "learning_rate": 2.1387583423060354e-09, "loss": 0.8651, "step": 9218 }, { "epoch": 0.99, "grad_norm": 1.7869090857516297, "learning_rate": 2.0881421603113904e-09, "loss": 0.8646, "step": 9219 }, { "epoch": 0.99, "grad_norm": 1.8112580336366757, "learning_rate": 2.038131992359582e-09, "loss": 0.84, "step": 9220 }, { "epoch": 0.99, "grad_norm": 1.7552453134607935, "learning_rate": 1.9887278445152037e-09, "loss": 0.9001, "step": 9221 }, { "epoch": 0.99, "grad_norm": 0.770616304493373, "learning_rate": 1.9399297227684634e-09, "loss": 1.0231, "step": 9222 }, { "epoch": 0.99, "grad_norm": 1.7410290288123016, "learning_rate": 1.8917376330357395e-09, "loss": 0.8115, "step": 9223 }, { "epoch": 0.99, "grad_norm": 1.783793368000215, "learning_rate": 1.8441515811612465e-09, "loss": 0.9071, "step": 9224 }, { "epoch": 0.99, "grad_norm": 1.718071172083732, "learning_rate": 1.7971715729148131e-09, "loss": 0.8739, "step": 9225 }, { "epoch": 0.99, "grad_norm": 1.689541569774579, "learning_rate": 1.7507976139924388e-09, "loss": 0.845, "step": 9226 }, { "epoch": 0.99, "grad_norm": 1.5944375630462957, "learning_rate": 1.7050297100174029e-09, "loss": 0.8899, "step": 9227 }, { "epoch": 0.99, "grad_norm": 1.8896529716412072, "learning_rate": 1.6598678665397106e-09, "loss": 0.9372, "step": 9228 }, { "epoch": 0.99, "grad_norm": 1.7883531679192002, "learning_rate": 1.6153120890344266e-09, "loss": 0.8629, "step": 9229 }, { "epoch": 0.99, "grad_norm": 1.688139275768094, "learning_rate": 1.5713623829050063e-09, "loss": 0.8625, "step": 9230 }, { "epoch": 0.99, "grad_norm": 1.769601675892439, "learning_rate": 1.52801875347941e-09, "loss": 0.9351, "step": 9231 }, { "epoch": 0.99, "grad_norm": 1.686950453235554, "learning_rate": 1.4852812060145438e-09, "loss": 0.8068, "step": 9232 }, { "epoch": 0.99, "grad_norm": 1.6115900019069742, "learning_rate": 1.4431497456918186e-09, "loss": 0.8802, "step": 9233 }, { "epoch": 0.99, "grad_norm": 1.685260222683556, "learning_rate": 1.4016243776193705e-09, "loss": 0.8976, "step": 9234 }, { "epoch": 0.99, "grad_norm": 1.7676793964300495, "learning_rate": 1.3607051068331712e-09, "loss": 0.884, "step": 9235 }, { "epoch": 0.99, "grad_norm": 1.6721572400314098, "learning_rate": 1.320391938294252e-09, "loss": 0.8607, "step": 9236 }, { "epoch": 0.99, "grad_norm": 1.7288428051240505, "learning_rate": 1.2806848768909252e-09, "loss": 0.9181, "step": 9237 }, { "epoch": 0.99, "grad_norm": 1.7248314516585734, "learning_rate": 1.241583927437673e-09, "loss": 0.7547, "step": 9238 }, { "epoch": 0.99, "grad_norm": 1.8152562821450142, "learning_rate": 1.2030890946757024e-09, "loss": 0.9568, "step": 9239 }, { "epoch": 0.99, "grad_norm": 1.7129744543587961, "learning_rate": 1.1652003832729464e-09, "loss": 0.862, "step": 9240 }, { "epoch": 0.99, "grad_norm": 1.6826210880891392, "learning_rate": 1.1279177978229527e-09, "loss": 0.7989, "step": 9241 }, { "epoch": 0.99, "grad_norm": 1.8414224128793462, "learning_rate": 1.0912413428471046e-09, "loss": 0.9147, "step": 9242 }, { "epoch": 0.99, "grad_norm": 1.6660388927677798, "learning_rate": 1.0551710227912904e-09, "loss": 0.8029, "step": 9243 }, { "epoch": 0.99, "grad_norm": 1.7712005816402376, "learning_rate": 1.0197068420308986e-09, "loss": 0.9259, "step": 9244 }, { "epoch": 0.99, "grad_norm": 1.7562717522451998, "learning_rate": 9.84848804864713e-10, "loss": 0.8605, "step": 9245 }, { "epoch": 0.99, "grad_norm": 1.7626233880234385, "learning_rate": 9.505969155193528e-10, "loss": 0.8751, "step": 9246 }, { "epoch": 0.99, "grad_norm": 0.7657631848052613, "learning_rate": 9.169511781492724e-10, "loss": 1.0402, "step": 9247 }, { "epoch": 0.99, "grad_norm": 1.6212134503198508, "learning_rate": 8.83911596832876e-10, "loss": 0.8869, "step": 9248 }, { "epoch": 0.99, "grad_norm": 1.8614988222888011, "learning_rate": 8.514781755769586e-10, "loss": 0.8919, "step": 9249 }, { "epoch": 0.99, "grad_norm": 1.803076439215676, "learning_rate": 8.196509183139301e-10, "loss": 0.9004, "step": 9250 }, { "epoch": 0.99, "grad_norm": 1.6903965231622837, "learning_rate": 7.884298289029258e-10, "loss": 0.8651, "step": 9251 }, { "epoch": 0.99, "grad_norm": 1.810782489416566, "learning_rate": 7.578149111298061e-10, "loss": 0.9026, "step": 9252 }, { "epoch": 0.99, "grad_norm": 2.114020211513277, "learning_rate": 7.278061687066018e-10, "loss": 0.9181, "step": 9253 }, { "epoch": 0.99, "grad_norm": 1.7613803320428254, "learning_rate": 6.984036052720688e-10, "loss": 0.8922, "step": 9254 }, { "epoch": 0.99, "grad_norm": 1.691582899743085, "learning_rate": 6.696072243911333e-10, "loss": 0.9479, "step": 9255 }, { "epoch": 1.0, "grad_norm": 1.618488145780062, "learning_rate": 6.414170295560019e-10, "loss": 0.8667, "step": 9256 }, { "epoch": 1.0, "grad_norm": 1.6461631390311573, "learning_rate": 6.138330241839408e-10, "loss": 0.8966, "step": 9257 }, { "epoch": 1.0, "grad_norm": 1.6940069990940216, "learning_rate": 5.868552116206072e-10, "loss": 0.837, "step": 9258 }, { "epoch": 1.0, "grad_norm": 1.712907037219412, "learning_rate": 5.604835951367183e-10, "loss": 0.9584, "step": 9259 }, { "epoch": 1.0, "grad_norm": 1.80865466947985, "learning_rate": 5.347181779297162e-10, "loss": 0.8905, "step": 9260 }, { "epoch": 1.0, "grad_norm": 1.6943266445550123, "learning_rate": 5.095589631237686e-10, "loss": 0.7743, "step": 9261 }, { "epoch": 1.0, "grad_norm": 1.6516714029218185, "learning_rate": 4.850059537697682e-10, "loss": 0.9337, "step": 9262 }, { "epoch": 1.0, "grad_norm": 1.6778305443568975, "learning_rate": 4.610591528447783e-10, "loss": 0.9002, "step": 9263 }, { "epoch": 1.0, "grad_norm": 1.7855790963533185, "learning_rate": 4.37718563252032e-10, "loss": 0.9147, "step": 9264 }, { "epoch": 1.0, "grad_norm": 1.7179218980857869, "learning_rate": 4.14984187822598e-10, "loss": 0.9487, "step": 9265 }, { "epoch": 1.0, "grad_norm": 1.7832879426405934, "learning_rate": 3.928560293120498e-10, "loss": 0.8127, "step": 9266 }, { "epoch": 1.0, "grad_norm": 1.6660065315784636, "learning_rate": 3.7133409040435165e-10, "loss": 0.8945, "step": 9267 }, { "epoch": 1.0, "grad_norm": 1.8032130897953331, "learning_rate": 3.5041837370852763e-10, "loss": 0.8849, "step": 9268 }, { "epoch": 1.0, "grad_norm": 1.7439199649405395, "learning_rate": 3.3010888176088217e-10, "loss": 0.9488, "step": 9269 }, { "epoch": 1.0, "grad_norm": 1.6366513913097684, "learning_rate": 3.1040561702444517e-10, "loss": 0.8299, "step": 9270 }, { "epoch": 1.0, "grad_norm": 1.7554527824213058, "learning_rate": 2.913085818873063e-10, "loss": 0.9064, "step": 9271 }, { "epoch": 1.0, "grad_norm": 1.7596989810085981, "learning_rate": 2.7281777866594584e-10, "loss": 0.9059, "step": 9272 }, { "epoch": 1.0, "grad_norm": 1.614690178679153, "learning_rate": 2.5493320960190413e-10, "loss": 0.8463, "step": 9273 }, { "epoch": 1.0, "grad_norm": 1.7325704202541454, "learning_rate": 2.376548768640019e-10, "loss": 0.8454, "step": 9274 }, { "epoch": 1.0, "grad_norm": 1.71311509657776, "learning_rate": 2.2098278254722993e-10, "loss": 0.7916, "step": 9275 }, { "epoch": 1.0, "grad_norm": 1.6898908948729612, "learning_rate": 2.0491692867330438e-10, "loss": 0.9123, "step": 9276 }, { "epoch": 1.0, "grad_norm": 1.6649349260250577, "learning_rate": 1.8945731719011152e-10, "loss": 0.9142, "step": 9277 }, { "epoch": 1.0, "grad_norm": 1.5917590118855425, "learning_rate": 1.7460394997170783e-10, "loss": 0.8329, "step": 9278 }, { "epoch": 1.0, "grad_norm": 1.6164112088879237, "learning_rate": 1.6035682881998526e-10, "loss": 0.8622, "step": 9279 }, { "epoch": 1.0, "grad_norm": 1.7845646856090713, "learning_rate": 1.4671595546245087e-10, "loss": 0.872, "step": 9280 }, { "epoch": 1.0, "grad_norm": 1.795501175954851, "learning_rate": 1.3368133155222673e-10, "loss": 0.8339, "step": 9281 }, { "epoch": 1.0, "grad_norm": 1.7800859391995763, "learning_rate": 1.2125295867027043e-10, "loss": 0.9319, "step": 9282 }, { "epoch": 1.0, "grad_norm": 1.7562846578273954, "learning_rate": 1.0943083832370971e-10, "loss": 0.8798, "step": 9283 }, { "epoch": 1.0, "grad_norm": 1.750762004855234, "learning_rate": 9.821497194584251e-11, "loss": 0.8722, "step": 9284 }, { "epoch": 1.0, "grad_norm": 1.737622442790977, "learning_rate": 8.760536089724713e-11, "loss": 0.8604, "step": 9285 }, { "epoch": 1.0, "grad_norm": 1.6217067216408663, "learning_rate": 7.760200646300675e-11, "loss": 0.8714, "step": 9286 }, { "epoch": 1.0, "grad_norm": 1.7702148068771453, "learning_rate": 6.820490985715022e-11, "loss": 0.8923, "step": 9287 }, { "epoch": 1.0, "grad_norm": 0.7590042620966585, "learning_rate": 5.941407221932149e-11, "loss": 1.0117, "step": 9288 }, { "epoch": 1.0, "grad_norm": 0.7517794323985483, "learning_rate": 5.122949461422444e-11, "loss": 1.0238, "step": 9289 }, { "epoch": 1.0, "grad_norm": 1.78198599112025, "learning_rate": 4.365117803550867e-11, "loss": 0.8927, "step": 9290 }, { "epoch": 1.0, "grad_norm": 1.8005681139348992, "learning_rate": 3.667912340132862e-11, "loss": 0.8684, "step": 9291 }, { "epoch": 1.0, "grad_norm": 1.8270837844191012, "learning_rate": 3.031333155767424e-11, "loss": 0.9145, "step": 9292 }, { "epoch": 1.0, "grad_norm": 1.7498665486363356, "learning_rate": 2.4553803275595422e-11, "loss": 0.8724, "step": 9293 }, { "epoch": 1.0, "grad_norm": 1.6695096817029753, "learning_rate": 1.9400539253977558e-11, "loss": 0.9088, "step": 9294 }, { "epoch": 1.0, "grad_norm": 1.6716326178141856, "learning_rate": 1.4853540117321098e-11, "loss": 0.9037, "step": 9295 }, { "epoch": 1.0, "grad_norm": 1.6850984595928384, "learning_rate": 1.0912806417961997e-11, "loss": 0.8323, "step": 9296 }, { "epoch": 1.0, "grad_norm": 1.734363032129179, "learning_rate": 7.578338632741045e-12, "loss": 0.8643, "step": 9297 }, { "epoch": 1.0, "grad_norm": 0.7727923476558051, "learning_rate": 4.850137165779423e-12, "loss": 1.0634, "step": 9298 }, { "epoch": 1.0, "grad_norm": 1.7799045450905264, "learning_rate": 2.728202349033815e-12, "loss": 0.8718, "step": 9299 }, { "epoch": 1.0, "grad_norm": 1.7925661467446528, "learning_rate": 1.2125344384106285e-12, "loss": 0.9312, "step": 9300 }, { "epoch": 1.0, "grad_norm": 1.7025758032701872, "learning_rate": 3.031336187619971e-13, "loss": 0.9755, "step": 9301 }, { "epoch": 1.0, "grad_norm": 1.4419020241733236, "learning_rate": 0.0, "loss": 0.9002, "step": 9302 }, { "epoch": 1.0, "step": 9302, "total_flos": 4294892380667904.0, "train_loss": 0.9256457089672958, "train_runtime": 61794.9627, "train_samples_per_second": 38.538, "train_steps_per_second": 0.151 } ], "logging_steps": 1.0, "max_steps": 9302, "num_input_tokens_seen": 0, "num_train_epochs": 1, "save_steps": 1000, "total_flos": 4294892380667904.0, "train_batch_size": 2, "trial_name": null, "trial_params": null }