|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0, |
|
"eval_steps": 200, |
|
"global_step": 668, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0029940119760479044, |
|
"grad_norm": 2.2040421447560568, |
|
"learning_rate": 9.999944704978835e-06, |
|
"loss": 0.6026, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.005988023952095809, |
|
"grad_norm": 1.706829217782298, |
|
"learning_rate": 9.999778821138357e-06, |
|
"loss": 0.5602, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.008982035928143712, |
|
"grad_norm": 1.3201917385546982, |
|
"learning_rate": 9.999502352147583e-06, |
|
"loss": 0.5501, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.011976047904191617, |
|
"grad_norm": 1.4521425277487803, |
|
"learning_rate": 9.999115304121459e-06, |
|
"loss": 0.5781, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.014970059880239521, |
|
"grad_norm": 1.2061198483240583, |
|
"learning_rate": 9.998617685620715e-06, |
|
"loss": 0.4422, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.017964071856287425, |
|
"grad_norm": 1.2429550198024126, |
|
"learning_rate": 9.998009507651683e-06, |
|
"loss": 0.4971, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.020958083832335328, |
|
"grad_norm": 1.1286011870112085, |
|
"learning_rate": 9.997290783666048e-06, |
|
"loss": 0.4521, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.023952095808383235, |
|
"grad_norm": 1.1605139800048332, |
|
"learning_rate": 9.996461529560553e-06, |
|
"loss": 0.4348, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.02694610778443114, |
|
"grad_norm": 1.271946656514232, |
|
"learning_rate": 9.995521763676645e-06, |
|
"loss": 0.4915, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.029940119760479042, |
|
"grad_norm": 1.2493769539888613, |
|
"learning_rate": 9.994471506800078e-06, |
|
"loss": 0.4464, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03293413173652695, |
|
"grad_norm": 1.064126963343827, |
|
"learning_rate": 9.993310782160439e-06, |
|
"loss": 0.4809, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.03592814371257485, |
|
"grad_norm": 0.9959531195978738, |
|
"learning_rate": 9.992039615430648e-06, |
|
"loss": 0.4447, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.038922155688622756, |
|
"grad_norm": 0.9543314290660774, |
|
"learning_rate": 9.99065803472638e-06, |
|
"loss": 0.4216, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.041916167664670656, |
|
"grad_norm": 0.9850485715677441, |
|
"learning_rate": 9.989166070605447e-06, |
|
"loss": 0.4095, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.04491017964071856, |
|
"grad_norm": 0.9009609876478459, |
|
"learning_rate": 9.98756375606713e-06, |
|
"loss": 0.395, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.04790419161676647, |
|
"grad_norm": 1.0123732065428233, |
|
"learning_rate": 9.985851126551428e-06, |
|
"loss": 0.4286, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.05089820359281437, |
|
"grad_norm": 0.7433848076880695, |
|
"learning_rate": 9.9840282199383e-06, |
|
"loss": 0.3522, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.05389221556886228, |
|
"grad_norm": 0.7868585864748767, |
|
"learning_rate": 9.982095076546806e-06, |
|
"loss": 0.3991, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.05688622754491018, |
|
"grad_norm": 0.9349585272092751, |
|
"learning_rate": 9.980051739134235e-06, |
|
"loss": 0.4102, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.059880239520958084, |
|
"grad_norm": 0.9536042984951868, |
|
"learning_rate": 9.977898252895133e-06, |
|
"loss": 0.4178, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.06287425149700598, |
|
"grad_norm": 0.8474430314327542, |
|
"learning_rate": 9.975634665460333e-06, |
|
"loss": 0.3971, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.0658682634730539, |
|
"grad_norm": 0.9216923558311936, |
|
"learning_rate": 9.973261026895878e-06, |
|
"loss": 0.3639, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.0688622754491018, |
|
"grad_norm": 0.8986215167386543, |
|
"learning_rate": 9.970777389701927e-06, |
|
"loss": 0.449, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.0718562874251497, |
|
"grad_norm": 0.8226404900460093, |
|
"learning_rate": 9.968183808811586e-06, |
|
"loss": 0.3538, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.0748502994011976, |
|
"grad_norm": 0.9630832199680331, |
|
"learning_rate": 9.965480341589702e-06, |
|
"loss": 0.4195, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.07784431137724551, |
|
"grad_norm": 0.826812637560728, |
|
"learning_rate": 9.962667047831585e-06, |
|
"loss": 0.4146, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.08083832335329341, |
|
"grad_norm": 0.8668455064895977, |
|
"learning_rate": 9.95974398976169e-06, |
|
"loss": 0.3661, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.08383233532934131, |
|
"grad_norm": 0.9200527883911036, |
|
"learning_rate": 9.95671123203224e-06, |
|
"loss": 0.4069, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.08682634730538923, |
|
"grad_norm": 0.9692196219649082, |
|
"learning_rate": 9.953568841721796e-06, |
|
"loss": 0.4349, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.08982035928143713, |
|
"grad_norm": 0.775987435979951, |
|
"learning_rate": 9.950316888333775e-06, |
|
"loss": 0.3542, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.09281437125748503, |
|
"grad_norm": 0.8178557241224351, |
|
"learning_rate": 9.946955443794908e-06, |
|
"loss": 0.4216, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.09580838323353294, |
|
"grad_norm": 0.9389607587057774, |
|
"learning_rate": 9.943484582453653e-06, |
|
"loss": 0.4044, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.09880239520958084, |
|
"grad_norm": 0.7794272282393406, |
|
"learning_rate": 9.939904381078553e-06, |
|
"loss": 0.3491, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.10179640718562874, |
|
"grad_norm": 0.7020980849021461, |
|
"learning_rate": 9.93621491885653e-06, |
|
"loss": 0.3271, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.10479041916167664, |
|
"grad_norm": 0.9131827948904151, |
|
"learning_rate": 9.932416277391144e-06, |
|
"loss": 0.3765, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.10778443113772455, |
|
"grad_norm": 0.8839063900324204, |
|
"learning_rate": 9.928508540700775e-06, |
|
"loss": 0.3889, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.11077844311377245, |
|
"grad_norm": 0.864348313953269, |
|
"learning_rate": 9.924491795216777e-06, |
|
"loss": 0.4243, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.11377245508982035, |
|
"grad_norm": 0.8862059070465331, |
|
"learning_rate": 9.920366129781564e-06, |
|
"loss": 0.3956, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.11676646706586827, |
|
"grad_norm": 0.7131817643346815, |
|
"learning_rate": 9.916131635646635e-06, |
|
"loss": 0.3475, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.11976047904191617, |
|
"grad_norm": 0.8942423501970304, |
|
"learning_rate": 9.91178840647057e-06, |
|
"loss": 0.4226, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.12275449101796407, |
|
"grad_norm": 0.8130987873905524, |
|
"learning_rate": 9.907336538316946e-06, |
|
"loss": 0.3674, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.12574850299401197, |
|
"grad_norm": 0.7581674275510644, |
|
"learning_rate": 9.902776129652223e-06, |
|
"loss": 0.3211, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.12874251497005987, |
|
"grad_norm": 0.835347490776918, |
|
"learning_rate": 9.898107281343557e-06, |
|
"loss": 0.3332, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.1317365269461078, |
|
"grad_norm": 0.7603515058761441, |
|
"learning_rate": 9.893330096656576e-06, |
|
"loss": 0.3115, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.1347305389221557, |
|
"grad_norm": 0.8306219963480352, |
|
"learning_rate": 9.888444681253087e-06, |
|
"loss": 0.3832, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.1377245508982036, |
|
"grad_norm": 0.8958770540240665, |
|
"learning_rate": 9.883451143188753e-06, |
|
"loss": 0.3802, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.1407185628742515, |
|
"grad_norm": 0.8066887289512282, |
|
"learning_rate": 9.878349592910694e-06, |
|
"loss": 0.3172, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.1437125748502994, |
|
"grad_norm": 0.8836303821107435, |
|
"learning_rate": 9.873140143255035e-06, |
|
"loss": 0.406, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.1467065868263473, |
|
"grad_norm": 0.8297081568246096, |
|
"learning_rate": 9.867822909444435e-06, |
|
"loss": 0.3981, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.1497005988023952, |
|
"grad_norm": 0.845789689226809, |
|
"learning_rate": 9.862398009085511e-06, |
|
"loss": 0.3701, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.15269461077844312, |
|
"grad_norm": 0.818065405374004, |
|
"learning_rate": 9.856865562166256e-06, |
|
"loss": 0.3493, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.15568862275449102, |
|
"grad_norm": 0.7540749538935108, |
|
"learning_rate": 9.851225691053382e-06, |
|
"loss": 0.3184, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.15868263473053892, |
|
"grad_norm": 0.8856186820008544, |
|
"learning_rate": 9.8454785204896e-06, |
|
"loss": 0.3891, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.16167664670658682, |
|
"grad_norm": 0.7122037264181366, |
|
"learning_rate": 9.83962417759088e-06, |
|
"loss": 0.3317, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.16467065868263472, |
|
"grad_norm": 0.9025984473960144, |
|
"learning_rate": 9.833662791843628e-06, |
|
"loss": 0.334, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.16766467065868262, |
|
"grad_norm": 0.8296926651602788, |
|
"learning_rate": 9.827594495101824e-06, |
|
"loss": 0.3502, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.17065868263473055, |
|
"grad_norm": 0.8368614929206053, |
|
"learning_rate": 9.821419421584108e-06, |
|
"loss": 0.3657, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.17365269461077845, |
|
"grad_norm": 0.8024299882215358, |
|
"learning_rate": 9.815137707870806e-06, |
|
"loss": 0.3932, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.17664670658682635, |
|
"grad_norm": 0.9355249953810083, |
|
"learning_rate": 9.808749492900917e-06, |
|
"loss": 0.3546, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.17964071856287425, |
|
"grad_norm": 0.7741694345084122, |
|
"learning_rate": 9.802254917969033e-06, |
|
"loss": 0.3698, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.18263473053892215, |
|
"grad_norm": 0.909380338511485, |
|
"learning_rate": 9.795654126722218e-06, |
|
"loss": 0.362, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.18562874251497005, |
|
"grad_norm": 0.7781447173070738, |
|
"learning_rate": 9.788947265156828e-06, |
|
"loss": 0.3441, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.18862275449101795, |
|
"grad_norm": 0.8417092598843253, |
|
"learning_rate": 9.782134481615282e-06, |
|
"loss": 0.3191, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.19161676646706588, |
|
"grad_norm": 0.824159668729467, |
|
"learning_rate": 9.775215926782788e-06, |
|
"loss": 0.3858, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.19461077844311378, |
|
"grad_norm": 0.8778743074955353, |
|
"learning_rate": 9.768191753683997e-06, |
|
"loss": 0.3467, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.19760479041916168, |
|
"grad_norm": 0.8724861938818232, |
|
"learning_rate": 9.761062117679632e-06, |
|
"loss": 0.3438, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.20059880239520958, |
|
"grad_norm": 0.8197526384638352, |
|
"learning_rate": 9.75382717646304e-06, |
|
"loss": 0.3246, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.20359281437125748, |
|
"grad_norm": 0.9092970904697121, |
|
"learning_rate": 9.746487090056712e-06, |
|
"loss": 0.4084, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.20658682634730538, |
|
"grad_norm": 0.7222055598812208, |
|
"learning_rate": 9.739042020808746e-06, |
|
"loss": 0.3102, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.20958083832335328, |
|
"grad_norm": 0.8524180894036101, |
|
"learning_rate": 9.73149213338924e-06, |
|
"loss": 0.3904, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.2125748502994012, |
|
"grad_norm": 0.8555322805223762, |
|
"learning_rate": 9.72383759478667e-06, |
|
"loss": 0.3869, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.2155688622754491, |
|
"grad_norm": 0.8536310984136708, |
|
"learning_rate": 9.71607857430419e-06, |
|
"loss": 0.3024, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.218562874251497, |
|
"grad_norm": 0.8936249808386063, |
|
"learning_rate": 9.708215243555875e-06, |
|
"loss": 0.379, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.2215568862275449, |
|
"grad_norm": 0.9789467528433705, |
|
"learning_rate": 9.700247776462944e-06, |
|
"loss": 0.3625, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.2245508982035928, |
|
"grad_norm": 0.9208323679997393, |
|
"learning_rate": 9.6921763492499e-06, |
|
"loss": 0.3526, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.2275449101796407, |
|
"grad_norm": 0.7732918942210613, |
|
"learning_rate": 9.68400114044064e-06, |
|
"loss": 0.2822, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.23053892215568864, |
|
"grad_norm": 0.8819282042071043, |
|
"learning_rate": 9.6757223308545e-06, |
|
"loss": 0.3537, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.23353293413173654, |
|
"grad_norm": 0.8614139255257659, |
|
"learning_rate": 9.667340103602263e-06, |
|
"loss": 0.3994, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.23652694610778444, |
|
"grad_norm": 0.7935921391382667, |
|
"learning_rate": 9.658854644082099e-06, |
|
"loss": 0.3196, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.23952095808383234, |
|
"grad_norm": 1.0594181520845294, |
|
"learning_rate": 9.650266139975474e-06, |
|
"loss": 0.3903, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.24251497005988024, |
|
"grad_norm": 0.9369633580310035, |
|
"learning_rate": 9.641574781242999e-06, |
|
"loss": 0.3442, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.24550898203592814, |
|
"grad_norm": 0.8687848684448142, |
|
"learning_rate": 9.632780760120217e-06, |
|
"loss": 0.3542, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.24850299401197604, |
|
"grad_norm": 0.8617960569259269, |
|
"learning_rate": 9.62388427111336e-06, |
|
"loss": 0.3313, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.25149700598802394, |
|
"grad_norm": 0.7778268159117772, |
|
"learning_rate": 9.614885510995047e-06, |
|
"loss": 0.2961, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.25449101796407186, |
|
"grad_norm": 0.8919894825077804, |
|
"learning_rate": 9.605784678799934e-06, |
|
"loss": 0.3389, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.25748502994011974, |
|
"grad_norm": 0.9124988709452522, |
|
"learning_rate": 9.596581975820304e-06, |
|
"loss": 0.3487, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.26047904191616766, |
|
"grad_norm": 0.9751886763505696, |
|
"learning_rate": 9.587277605601617e-06, |
|
"loss": 0.3731, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.2634730538922156, |
|
"grad_norm": 0.960018724281691, |
|
"learning_rate": 9.577871773938013e-06, |
|
"loss": 0.3887, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.26646706586826346, |
|
"grad_norm": 0.9001216026084041, |
|
"learning_rate": 9.568364688867757e-06, |
|
"loss": 0.4347, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.2694610778443114, |
|
"grad_norm": 0.8628325506806731, |
|
"learning_rate": 9.558756560668637e-06, |
|
"loss": 0.3325, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.27245508982035926, |
|
"grad_norm": 0.9403070905201559, |
|
"learning_rate": 9.549047601853313e-06, |
|
"loss": 0.3424, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.2754491017964072, |
|
"grad_norm": 0.9757818110255363, |
|
"learning_rate": 9.539238027164618e-06, |
|
"loss": 0.3676, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.27844311377245506, |
|
"grad_norm": 0.8919717072992648, |
|
"learning_rate": 9.52932805357081e-06, |
|
"loss": 0.3521, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.281437125748503, |
|
"grad_norm": 0.8693087618050774, |
|
"learning_rate": 9.519317900260769e-06, |
|
"loss": 0.3806, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.2844311377245509, |
|
"grad_norm": 0.8634269398769101, |
|
"learning_rate": 9.509207788639148e-06, |
|
"loss": 0.3694, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.2874251497005988, |
|
"grad_norm": 0.8463285500330672, |
|
"learning_rate": 9.498997942321484e-06, |
|
"loss": 0.367, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.2904191616766467, |
|
"grad_norm": 0.7635633808551265, |
|
"learning_rate": 9.488688587129243e-06, |
|
"loss": 0.3852, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.2934131736526946, |
|
"grad_norm": 0.9900918694741316, |
|
"learning_rate": 9.47827995108483e-06, |
|
"loss": 0.3355, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.2964071856287425, |
|
"grad_norm": 0.996999918546389, |
|
"learning_rate": 9.467772264406545e-06, |
|
"loss": 0.3528, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.2994011976047904, |
|
"grad_norm": 0.7864973715714452, |
|
"learning_rate": 9.457165759503492e-06, |
|
"loss": 0.3191, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.3023952095808383, |
|
"grad_norm": 0.8771419186101935, |
|
"learning_rate": 9.446460670970436e-06, |
|
"loss": 0.3286, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.30538922155688625, |
|
"grad_norm": 0.866728406911566, |
|
"learning_rate": 9.435657235582616e-06, |
|
"loss": 0.2821, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.3083832335329341, |
|
"grad_norm": 0.9688078291261988, |
|
"learning_rate": 9.424755692290507e-06, |
|
"loss": 0.3543, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.31137724550898205, |
|
"grad_norm": 0.9286852806387357, |
|
"learning_rate": 9.413756282214538e-06, |
|
"loss": 0.3818, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.3143712574850299, |
|
"grad_norm": 0.8009276506295196, |
|
"learning_rate": 9.402659248639749e-06, |
|
"loss": 0.3415, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.31736526946107785, |
|
"grad_norm": 0.8759097193857428, |
|
"learning_rate": 9.391464837010428e-06, |
|
"loss": 0.3349, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.3203592814371258, |
|
"grad_norm": 0.9227104063780066, |
|
"learning_rate": 9.380173294924661e-06, |
|
"loss": 0.3866, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.32335329341317365, |
|
"grad_norm": 0.8649217141295237, |
|
"learning_rate": 9.368784872128877e-06, |
|
"loss": 0.3449, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.3263473053892216, |
|
"grad_norm": 0.8417111018584573, |
|
"learning_rate": 9.357299820512305e-06, |
|
"loss": 0.3312, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.32934131736526945, |
|
"grad_norm": 0.8645182101127776, |
|
"learning_rate": 9.345718394101412e-06, |
|
"loss": 0.3341, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.3323353293413174, |
|
"grad_norm": 0.8915971937099609, |
|
"learning_rate": 9.334040849054288e-06, |
|
"loss": 0.3727, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.33532934131736525, |
|
"grad_norm": 0.8962847413894931, |
|
"learning_rate": 9.322267443654974e-06, |
|
"loss": 0.3439, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.3383233532934132, |
|
"grad_norm": 0.8992657693893839, |
|
"learning_rate": 9.310398438307747e-06, |
|
"loss": 0.3907, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.3413173652694611, |
|
"grad_norm": 0.8250515120084873, |
|
"learning_rate": 9.29843409553137e-06, |
|
"loss": 0.3188, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.344311377245509, |
|
"grad_norm": 0.7732709676855889, |
|
"learning_rate": 9.286374679953278e-06, |
|
"loss": 0.3221, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.3473053892215569, |
|
"grad_norm": 0.8340614618224027, |
|
"learning_rate": 9.274220458303727e-06, |
|
"loss": 0.3033, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.3502994011976048, |
|
"grad_norm": 0.9081519043317762, |
|
"learning_rate": 9.261971699409893e-06, |
|
"loss": 0.3596, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.3532934131736527, |
|
"grad_norm": 0.9804451115483439, |
|
"learning_rate": 9.249628674189928e-06, |
|
"loss": 0.3401, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.3562874251497006, |
|
"grad_norm": 0.9048286976335761, |
|
"learning_rate": 9.237191655646972e-06, |
|
"loss": 0.3683, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.3592814371257485, |
|
"grad_norm": 0.9481229645975885, |
|
"learning_rate": 9.224660918863104e-06, |
|
"loss": 0.3976, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.36227544910179643, |
|
"grad_norm": 0.9306034331841647, |
|
"learning_rate": 9.212036740993265e-06, |
|
"loss": 0.3466, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.3652694610778443, |
|
"grad_norm": 0.9448818501487511, |
|
"learning_rate": 9.199319401259132e-06, |
|
"loss": 0.3531, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.36826347305389223, |
|
"grad_norm": 0.9092421622239264, |
|
"learning_rate": 9.186509180942928e-06, |
|
"loss": 0.3278, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.3712574850299401, |
|
"grad_norm": 0.8789641383722142, |
|
"learning_rate": 9.173606363381218e-06, |
|
"loss": 0.39, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.37425149700598803, |
|
"grad_norm": 0.9024575753731743, |
|
"learning_rate": 9.16061123395863e-06, |
|
"loss": 0.3488, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.3772455089820359, |
|
"grad_norm": 0.8869529616369447, |
|
"learning_rate": 9.147524080101543e-06, |
|
"loss": 0.3438, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.38023952095808383, |
|
"grad_norm": 1.0018425815907332, |
|
"learning_rate": 9.134345191271742e-06, |
|
"loss": 0.397, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.38323353293413176, |
|
"grad_norm": 0.8884698339357936, |
|
"learning_rate": 9.121074858959997e-06, |
|
"loss": 0.3779, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.38622754491017963, |
|
"grad_norm": 0.8022167109366266, |
|
"learning_rate": 9.107713376679634e-06, |
|
"loss": 0.3489, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.38922155688622756, |
|
"grad_norm": 0.9648398323735261, |
|
"learning_rate": 9.094261039960028e-06, |
|
"loss": 0.3578, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.39221556886227543, |
|
"grad_norm": 1.021544334412822, |
|
"learning_rate": 9.08071814634008e-06, |
|
"loss": 0.4094, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.39520958083832336, |
|
"grad_norm": 0.9388222677297999, |
|
"learning_rate": 9.067084995361623e-06, |
|
"loss": 0.3568, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.39820359281437123, |
|
"grad_norm": 0.9213582496355888, |
|
"learning_rate": 9.053361888562807e-06, |
|
"loss": 0.3911, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.40119760479041916, |
|
"grad_norm": 0.9070931123497717, |
|
"learning_rate": 9.039549129471423e-06, |
|
"loss": 0.3355, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.4041916167664671, |
|
"grad_norm": 0.7663830916226176, |
|
"learning_rate": 9.025647023598196e-06, |
|
"loss": 0.3138, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.40718562874251496, |
|
"grad_norm": 0.8506065165395951, |
|
"learning_rate": 9.011655878430018e-06, |
|
"loss": 0.3186, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.4101796407185629, |
|
"grad_norm": 1.013195726903395, |
|
"learning_rate": 8.99757600342316e-06, |
|
"loss": 0.3842, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.41317365269461076, |
|
"grad_norm": 0.9269440305974377, |
|
"learning_rate": 8.983407709996415e-06, |
|
"loss": 0.3682, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.4161676646706587, |
|
"grad_norm": 0.9287493222992738, |
|
"learning_rate": 8.969151311524215e-06, |
|
"loss": 0.3393, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.41916167664670656, |
|
"grad_norm": 0.9911779016436961, |
|
"learning_rate": 8.954807123329703e-06, |
|
"loss": 0.3674, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.4221556886227545, |
|
"grad_norm": 0.7616393001115792, |
|
"learning_rate": 8.940375462677758e-06, |
|
"loss": 0.2976, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.4251497005988024, |
|
"grad_norm": 0.8535317280369027, |
|
"learning_rate": 8.92585664876797e-06, |
|
"loss": 0.3382, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.4281437125748503, |
|
"grad_norm": 0.8078148654373282, |
|
"learning_rate": 8.911251002727588e-06, |
|
"loss": 0.3328, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.4311377245508982, |
|
"grad_norm": 0.8016111522171929, |
|
"learning_rate": 8.896558847604414e-06, |
|
"loss": 0.317, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.4341317365269461, |
|
"grad_norm": 0.8485527935175589, |
|
"learning_rate": 8.881780508359661e-06, |
|
"loss": 0.4161, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.437125748502994, |
|
"grad_norm": 1.0868267443809434, |
|
"learning_rate": 8.86691631186076e-06, |
|
"loss": 0.3443, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.44011976047904194, |
|
"grad_norm": 0.7355685063239832, |
|
"learning_rate": 8.851966586874138e-06, |
|
"loss": 0.3052, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.4431137724550898, |
|
"grad_norm": 0.8723074459225714, |
|
"learning_rate": 8.836931664057935e-06, |
|
"loss": 0.3301, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.44610778443113774, |
|
"grad_norm": 0.9422935022179557, |
|
"learning_rate": 8.821811875954705e-06, |
|
"loss": 0.354, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.4491017964071856, |
|
"grad_norm": 0.8642038045560563, |
|
"learning_rate": 8.806607556984045e-06, |
|
"loss": 0.2997, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.45209580838323354, |
|
"grad_norm": 0.8407656972433648, |
|
"learning_rate": 8.791319043435213e-06, |
|
"loss": 0.3402, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.4550898203592814, |
|
"grad_norm": 0.8018946809646184, |
|
"learning_rate": 8.775946673459682e-06, |
|
"loss": 0.3465, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.45808383233532934, |
|
"grad_norm": 0.8780480684120829, |
|
"learning_rate": 8.76049078706366e-06, |
|
"loss": 0.3181, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.46107784431137727, |
|
"grad_norm": 0.8621829816704457, |
|
"learning_rate": 8.744951726100572e-06, |
|
"loss": 0.3073, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.46407185628742514, |
|
"grad_norm": 0.9745447580705036, |
|
"learning_rate": 8.729329834263503e-06, |
|
"loss": 0.3351, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.46706586826347307, |
|
"grad_norm": 0.8994048708150972, |
|
"learning_rate": 8.713625457077585e-06, |
|
"loss": 0.3602, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.47005988023952094, |
|
"grad_norm": 0.8571469075006177, |
|
"learning_rate": 8.697838941892371e-06, |
|
"loss": 0.3206, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.47305389221556887, |
|
"grad_norm": 0.9010847756525124, |
|
"learning_rate": 8.681970637874131e-06, |
|
"loss": 0.3716, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.47604790419161674, |
|
"grad_norm": 0.9208985296526275, |
|
"learning_rate": 8.666020895998154e-06, |
|
"loss": 0.3451, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.47904191616766467, |
|
"grad_norm": 0.9327590019141727, |
|
"learning_rate": 8.64999006904096e-06, |
|
"loss": 0.3555, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.4820359281437126, |
|
"grad_norm": 0.936434139540589, |
|
"learning_rate": 8.63387851157252e-06, |
|
"loss": 0.3603, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.48502994011976047, |
|
"grad_norm": 0.8929422740312684, |
|
"learning_rate": 8.617686579948396e-06, |
|
"loss": 0.3036, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.4880239520958084, |
|
"grad_norm": 0.946236181581822, |
|
"learning_rate": 8.60141463230187e-06, |
|
"loss": 0.316, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.49101796407185627, |
|
"grad_norm": 0.9407961833230595, |
|
"learning_rate": 8.585063028536015e-06, |
|
"loss": 0.3996, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.4940119760479042, |
|
"grad_norm": 0.8738064243187453, |
|
"learning_rate": 8.568632130315747e-06, |
|
"loss": 0.3424, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.49700598802395207, |
|
"grad_norm": 0.8777689262622684, |
|
"learning_rate": 8.552122301059807e-06, |
|
"loss": 0.334, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.865110328632691, |
|
"learning_rate": 8.535533905932739e-06, |
|
"loss": 0.3227, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.5029940119760479, |
|
"grad_norm": 0.8794443278528441, |
|
"learning_rate": 8.518867311836808e-06, |
|
"loss": 0.3664, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.5059880239520959, |
|
"grad_norm": 0.9340014915251268, |
|
"learning_rate": 8.502122887403882e-06, |
|
"loss": 0.3759, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.5089820359281437, |
|
"grad_norm": 0.9445323902311572, |
|
"learning_rate": 8.485301002987285e-06, |
|
"loss": 0.2994, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.5119760479041916, |
|
"grad_norm": 0.8936966398490986, |
|
"learning_rate": 8.468402030653598e-06, |
|
"loss": 0.3841, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.5149700598802395, |
|
"grad_norm": 0.8302390316603054, |
|
"learning_rate": 8.451426344174433e-06, |
|
"loss": 0.3266, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.5179640718562875, |
|
"grad_norm": 0.9604064259018522, |
|
"learning_rate": 8.434374319018165e-06, |
|
"loss": 0.3952, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.5209580838323353, |
|
"grad_norm": 0.8232906684450247, |
|
"learning_rate": 8.417246332341638e-06, |
|
"loss": 0.3225, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.5239520958083832, |
|
"grad_norm": 0.8116589790256981, |
|
"learning_rate": 8.4000427629818e-06, |
|
"loss": 0.3385, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.5269461077844312, |
|
"grad_norm": 0.886421773500398, |
|
"learning_rate": 8.382763991447344e-06, |
|
"loss": 0.366, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.5299401197604791, |
|
"grad_norm": 0.9279916258861699, |
|
"learning_rate": 8.365410399910287e-06, |
|
"loss": 0.3463, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.5329341317365269, |
|
"grad_norm": 1.0192430861819075, |
|
"learning_rate": 8.347982372197515e-06, |
|
"loss": 0.3398, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.5359281437125748, |
|
"grad_norm": 0.8664905568270995, |
|
"learning_rate": 8.33048029378229e-06, |
|
"loss": 0.352, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.5389221556886228, |
|
"grad_norm": 0.7921524482429922, |
|
"learning_rate": 8.312904551775731e-06, |
|
"loss": 0.3078, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.5419161676646707, |
|
"grad_norm": 0.83331553442875, |
|
"learning_rate": 8.295255534918249e-06, |
|
"loss": 0.3445, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.5449101796407185, |
|
"grad_norm": 1.0257469835323392, |
|
"learning_rate": 8.277533633570948e-06, |
|
"loss": 0.3982, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.5479041916167665, |
|
"grad_norm": 0.8470156833032758, |
|
"learning_rate": 8.25973923970699e-06, |
|
"loss": 0.3648, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.5508982035928144, |
|
"grad_norm": 0.7975668659718067, |
|
"learning_rate": 8.241872746902934e-06, |
|
"loss": 0.3994, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.5538922155688623, |
|
"grad_norm": 0.8638109946726131, |
|
"learning_rate": 8.223934550330015e-06, |
|
"loss": 0.3167, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.5568862275449101, |
|
"grad_norm": 0.9154610778281713, |
|
"learning_rate": 8.20592504674542e-06, |
|
"loss": 0.3722, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.5598802395209581, |
|
"grad_norm": 0.7882458173582081, |
|
"learning_rate": 8.187844634483495e-06, |
|
"loss": 0.3761, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.562874251497006, |
|
"grad_norm": 1.0180358902255366, |
|
"learning_rate": 8.16969371344696e-06, |
|
"loss": 0.4305, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.5658682634730539, |
|
"grad_norm": 0.7709323802887873, |
|
"learning_rate": 8.151472685098037e-06, |
|
"loss": 0.3438, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.5688622754491018, |
|
"grad_norm": 0.8344500598340894, |
|
"learning_rate": 8.13318195244958e-06, |
|
"loss": 0.3228, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.5718562874251497, |
|
"grad_norm": 0.8352211070158856, |
|
"learning_rate": 8.114821920056177e-06, |
|
"loss": 0.3426, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.5748502994011976, |
|
"grad_norm": 0.8098442304130882, |
|
"learning_rate": 8.096392994005177e-06, |
|
"loss": 0.3657, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.5778443113772455, |
|
"grad_norm": 0.7430042712253846, |
|
"learning_rate": 8.077895581907719e-06, |
|
"loss": 0.3395, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.5808383233532934, |
|
"grad_norm": 0.7394722412885261, |
|
"learning_rate": 8.059330092889724e-06, |
|
"loss": 0.2955, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.5838323353293413, |
|
"grad_norm": 0.8250904355210983, |
|
"learning_rate": 8.040696937582833e-06, |
|
"loss": 0.3353, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.5868263473053892, |
|
"grad_norm": 0.9012937573479463, |
|
"learning_rate": 8.021996528115335e-06, |
|
"loss": 0.3136, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.5898203592814372, |
|
"grad_norm": 0.8755756618422285, |
|
"learning_rate": 8.003229278103044e-06, |
|
"loss": 0.3239, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.592814371257485, |
|
"grad_norm": 0.8659340320635494, |
|
"learning_rate": 7.984395602640153e-06, |
|
"loss": 0.3241, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.5958083832335329, |
|
"grad_norm": 0.9477354999471376, |
|
"learning_rate": 7.96549591829006e-06, |
|
"loss": 0.3421, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.5988023952095808, |
|
"grad_norm": 0.7900942009412749, |
|
"learning_rate": 7.946530643076138e-06, |
|
"loss": 0.2907, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.5988023952095808, |
|
"eval_loss": 0.33467286825180054, |
|
"eval_runtime": 3.5746, |
|
"eval_samples_per_second": 15.106, |
|
"eval_steps_per_second": 3.916, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.6017964071856288, |
|
"grad_norm": 0.79312094170382, |
|
"learning_rate": 7.927500196472506e-06, |
|
"loss": 0.3338, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.6047904191616766, |
|
"grad_norm": 0.9034217570817616, |
|
"learning_rate": 7.908404999394747e-06, |
|
"loss": 0.2964, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.6077844311377245, |
|
"grad_norm": 0.8586808639213538, |
|
"learning_rate": 7.889245474190588e-06, |
|
"loss": 0.332, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.6107784431137725, |
|
"grad_norm": 1.0051484162950264, |
|
"learning_rate": 7.870022044630569e-06, |
|
"loss": 0.3651, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.6137724550898204, |
|
"grad_norm": 0.8213608666013195, |
|
"learning_rate": 7.85073513589867e-06, |
|
"loss": 0.3368, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.6167664670658682, |
|
"grad_norm": 1.0557670504490637, |
|
"learning_rate": 7.831385174582901e-06, |
|
"loss": 0.3467, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.6197604790419161, |
|
"grad_norm": 0.8305844226434226, |
|
"learning_rate": 7.81197258866587e-06, |
|
"loss": 0.3415, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.6227544910179641, |
|
"grad_norm": 0.8810023832436372, |
|
"learning_rate": 7.792497807515317e-06, |
|
"loss": 0.304, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.625748502994012, |
|
"grad_norm": 0.8855634435213009, |
|
"learning_rate": 7.772961261874615e-06, |
|
"loss": 0.2946, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.6287425149700598, |
|
"grad_norm": 0.9067453911759643, |
|
"learning_rate": 7.75336338385325e-06, |
|
"loss": 0.3253, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.6317365269461078, |
|
"grad_norm": 0.8898244780755367, |
|
"learning_rate": 7.733704606917248e-06, |
|
"loss": 0.3478, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.6347305389221557, |
|
"grad_norm": 0.8738916048358085, |
|
"learning_rate": 7.713985365879607e-06, |
|
"loss": 0.3054, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.6377245508982036, |
|
"grad_norm": 0.8500188297265527, |
|
"learning_rate": 7.694206096890667e-06, |
|
"loss": 0.3169, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.6407185628742516, |
|
"grad_norm": 0.844256505268219, |
|
"learning_rate": 7.674367237428467e-06, |
|
"loss": 0.3071, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.6437125748502994, |
|
"grad_norm": 0.8379775205343643, |
|
"learning_rate": 7.654469226289068e-06, |
|
"loss": 0.2913, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.6467065868263473, |
|
"grad_norm": 0.8767560769911583, |
|
"learning_rate": 7.63451250357685e-06, |
|
"loss": 0.3501, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.6497005988023952, |
|
"grad_norm": 0.896172615000714, |
|
"learning_rate": 7.614497510694774e-06, |
|
"loss": 0.3558, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.6526946107784432, |
|
"grad_norm": 0.9346507675555815, |
|
"learning_rate": 7.5944246903346204e-06, |
|
"loss": 0.3217, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.655688622754491, |
|
"grad_norm": 0.8375284268531287, |
|
"learning_rate": 7.574294486467204e-06, |
|
"loss": 0.3219, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.6586826347305389, |
|
"grad_norm": 0.8616776383120752, |
|
"learning_rate": 7.55410734433254e-06, |
|
"loss": 0.3532, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.6616766467065869, |
|
"grad_norm": 0.9431676744062615, |
|
"learning_rate": 7.533863710430011e-06, |
|
"loss": 0.3687, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.6646706586826348, |
|
"grad_norm": 0.8488729393047406, |
|
"learning_rate": 7.513564032508484e-06, |
|
"loss": 0.309, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.6676646706586826, |
|
"grad_norm": 0.9355238777931589, |
|
"learning_rate": 7.493208759556406e-06, |
|
"loss": 0.3665, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.6706586826347305, |
|
"grad_norm": 0.9566226952897219, |
|
"learning_rate": 7.472798341791877e-06, |
|
"loss": 0.3868, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.6736526946107785, |
|
"grad_norm": 0.8072744247437709, |
|
"learning_rate": 7.452333230652688e-06, |
|
"loss": 0.3211, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.6766467065868264, |
|
"grad_norm": 0.7841398004064947, |
|
"learning_rate": 7.431813878786343e-06, |
|
"loss": 0.3181, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.6796407185628742, |
|
"grad_norm": 0.8723747151692307, |
|
"learning_rate": 7.4112407400400395e-06, |
|
"loss": 0.3509, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.6826347305389222, |
|
"grad_norm": 0.8260131752149777, |
|
"learning_rate": 7.390614269450633e-06, |
|
"loss": 0.3255, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.6856287425149701, |
|
"grad_norm": 0.7681373368953692, |
|
"learning_rate": 7.369934923234577e-06, |
|
"loss": 0.3091, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.688622754491018, |
|
"grad_norm": 0.9535284481872933, |
|
"learning_rate": 7.349203158777826e-06, |
|
"loss": 0.3794, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.6916167664670658, |
|
"grad_norm": 0.916802050557078, |
|
"learning_rate": 7.32841943462572e-06, |
|
"loss": 0.3423, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.6946107784431138, |
|
"grad_norm": 0.9287242102831487, |
|
"learning_rate": 7.3075842104728445e-06, |
|
"loss": 0.3495, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.6976047904191617, |
|
"grad_norm": 0.8792795000931635, |
|
"learning_rate": 7.286697947152868e-06, |
|
"loss": 0.3601, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.7005988023952096, |
|
"grad_norm": 0.8669066098912112, |
|
"learning_rate": 7.265761106628338e-06, |
|
"loss": 0.3235, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.7035928143712575, |
|
"grad_norm": 0.8135169115612799, |
|
"learning_rate": 7.244774151980466e-06, |
|
"loss": 0.3155, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.7065868263473054, |
|
"grad_norm": 1.0900128075304087, |
|
"learning_rate": 7.223737547398898e-06, |
|
"loss": 0.3287, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.7095808383233533, |
|
"grad_norm": 0.8782965885774511, |
|
"learning_rate": 7.20265175817143e-06, |
|
"loss": 0.3342, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.7125748502994012, |
|
"grad_norm": 0.9486594176816813, |
|
"learning_rate": 7.181517250673729e-06, |
|
"loss": 0.3711, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.7155688622754491, |
|
"grad_norm": 0.9032070160794724, |
|
"learning_rate": 7.1603344923590065e-06, |
|
"loss": 0.3279, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.718562874251497, |
|
"grad_norm": 0.8552847592774973, |
|
"learning_rate": 7.139103951747694e-06, |
|
"loss": 0.3598, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.7215568862275449, |
|
"grad_norm": 0.8463195466291924, |
|
"learning_rate": 7.1178260984170675e-06, |
|
"loss": 0.3046, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.7245508982035929, |
|
"grad_norm": 0.8371218456503741, |
|
"learning_rate": 7.0965014029908654e-06, |
|
"loss": 0.3036, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.7275449101796407, |
|
"grad_norm": 0.7820672325449471, |
|
"learning_rate": 7.075130337128883e-06, |
|
"loss": 0.3335, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.7305389221556886, |
|
"grad_norm": 0.9192843234012044, |
|
"learning_rate": 7.053713373516538e-06, |
|
"loss": 0.3542, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.7335329341317365, |
|
"grad_norm": 0.8379640798614526, |
|
"learning_rate": 7.03225098585441e-06, |
|
"loss": 0.3584, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.7365269461077845, |
|
"grad_norm": 0.7781508272722515, |
|
"learning_rate": 7.0107436488477694e-06, |
|
"loss": 0.3443, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.7395209580838323, |
|
"grad_norm": 0.8834077256780015, |
|
"learning_rate": 6.989191838196083e-06, |
|
"loss": 0.3627, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.7425149700598802, |
|
"grad_norm": 0.8544321583408758, |
|
"learning_rate": 6.9675960305824785e-06, |
|
"loss": 0.3155, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.7455089820359282, |
|
"grad_norm": 0.9527967928819479, |
|
"learning_rate": 6.945956703663212e-06, |
|
"loss": 0.3541, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.7485029940119761, |
|
"grad_norm": 0.8271194232057484, |
|
"learning_rate": 6.9242743360570985e-06, |
|
"loss": 0.3229, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.7514970059880239, |
|
"grad_norm": 0.9601665766412376, |
|
"learning_rate": 6.9025494073349284e-06, |
|
"loss": 0.3676, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.7544910179640718, |
|
"grad_norm": 0.7929259708210948, |
|
"learning_rate": 6.880782398008862e-06, |
|
"loss": 0.3347, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.7574850299401198, |
|
"grad_norm": 0.887569273022383, |
|
"learning_rate": 6.858973789521792e-06, |
|
"loss": 0.3027, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.7604790419161677, |
|
"grad_norm": 0.9319346859656158, |
|
"learning_rate": 6.837124064236709e-06, |
|
"loss": 0.3641, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.7634730538922155, |
|
"grad_norm": 0.7943563622003768, |
|
"learning_rate": 6.815233705426019e-06, |
|
"loss": 0.3319, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.7664670658682635, |
|
"grad_norm": 0.9084023668834192, |
|
"learning_rate": 6.7933031972608644e-06, |
|
"loss": 0.3609, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.7694610778443114, |
|
"grad_norm": 0.8153357455537337, |
|
"learning_rate": 6.771333024800411e-06, |
|
"loss": 0.3456, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.7724550898203593, |
|
"grad_norm": 1.032838951696525, |
|
"learning_rate": 6.74932367398112e-06, |
|
"loss": 0.3308, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.7754491017964071, |
|
"grad_norm": 0.8492434450017045, |
|
"learning_rate": 6.727275631605996e-06, |
|
"loss": 0.2931, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.7784431137724551, |
|
"grad_norm": 0.9059912918790543, |
|
"learning_rate": 6.70518938533383e-06, |
|
"loss": 0.3325, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.781437125748503, |
|
"grad_norm": 0.9460828916774942, |
|
"learning_rate": 6.683065423668403e-06, |
|
"loss": 0.3502, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.7844311377245509, |
|
"grad_norm": 0.9364179318985469, |
|
"learning_rate": 6.660904235947687e-06, |
|
"loss": 0.3335, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.7874251497005988, |
|
"grad_norm": 0.8603373377298055, |
|
"learning_rate": 6.638706312333018e-06, |
|
"loss": 0.3519, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.7904191616766467, |
|
"grad_norm": 0.8914430701461508, |
|
"learning_rate": 6.61647214379826e-06, |
|
"loss": 0.3857, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.7934131736526946, |
|
"grad_norm": 0.9617998429624773, |
|
"learning_rate": 6.594202222118941e-06, |
|
"loss": 0.3498, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.7964071856287425, |
|
"grad_norm": 0.9106443714228667, |
|
"learning_rate": 6.571897039861377e-06, |
|
"loss": 0.3341, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.7994011976047904, |
|
"grad_norm": 0.8730506251318221, |
|
"learning_rate": 6.549557090371775e-06, |
|
"loss": 0.345, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.8023952095808383, |
|
"grad_norm": 0.801950451184709, |
|
"learning_rate": 6.527182867765333e-06, |
|
"loss": 0.2552, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.8053892215568862, |
|
"grad_norm": 0.935679283867518, |
|
"learning_rate": 6.504774866915291e-06, |
|
"loss": 0.3341, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.8083832335329342, |
|
"grad_norm": 0.9778825623559328, |
|
"learning_rate": 6.482333583442002e-06, |
|
"loss": 0.3018, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.811377245508982, |
|
"grad_norm": 0.7869918395866576, |
|
"learning_rate": 6.459859513701967e-06, |
|
"loss": 0.3345, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.8143712574850299, |
|
"grad_norm": 1.0031750015830145, |
|
"learning_rate": 6.437353154776848e-06, |
|
"loss": 0.3393, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.8173652694610778, |
|
"grad_norm": 0.9085710799739674, |
|
"learning_rate": 6.414815004462483e-06, |
|
"loss": 0.3687, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.8203592814371258, |
|
"grad_norm": 0.8621398941933154, |
|
"learning_rate": 6.3922455612578715e-06, |
|
"loss": 0.3623, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.8233532934131736, |
|
"grad_norm": 0.855228401264576, |
|
"learning_rate": 6.369645324354149e-06, |
|
"loss": 0.2856, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.8263473053892215, |
|
"grad_norm": 0.8076193648394628, |
|
"learning_rate": 6.3470147936235485e-06, |
|
"loss": 0.3406, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.8293413173652695, |
|
"grad_norm": 0.7245911322472699, |
|
"learning_rate": 6.3243544696083355e-06, |
|
"loss": 0.3002, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.8323353293413174, |
|
"grad_norm": 0.8280158469680207, |
|
"learning_rate": 6.301664853509755e-06, |
|
"loss": 0.2891, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.8353293413173652, |
|
"grad_norm": 0.7854270076254557, |
|
"learning_rate": 6.278946447176924e-06, |
|
"loss": 0.3257, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.8383233532934131, |
|
"grad_norm": 0.7761777582444528, |
|
"learning_rate": 6.256199753095745e-06, |
|
"loss": 0.3063, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.8413173652694611, |
|
"grad_norm": 0.842302593081833, |
|
"learning_rate": 6.233425274377793e-06, |
|
"loss": 0.3318, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.844311377245509, |
|
"grad_norm": 1.0077950393614565, |
|
"learning_rate": 6.21062351474918e-06, |
|
"loss": 0.3674, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.8473053892215568, |
|
"grad_norm": 0.8701693556750275, |
|
"learning_rate": 6.18779497853942e-06, |
|
"loss": 0.3545, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.8502994011976048, |
|
"grad_norm": 0.8772047033068309, |
|
"learning_rate": 6.164940170670266e-06, |
|
"loss": 0.3602, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.8532934131736527, |
|
"grad_norm": 0.7301435777944242, |
|
"learning_rate": 6.142059596644557e-06, |
|
"loss": 0.2833, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.8562874251497006, |
|
"grad_norm": 0.8481337976116274, |
|
"learning_rate": 6.11915376253502e-06, |
|
"loss": 0.3102, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.8592814371257484, |
|
"grad_norm": 0.8863490665161842, |
|
"learning_rate": 6.096223174973091e-06, |
|
"loss": 0.3412, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.8622754491017964, |
|
"grad_norm": 0.7692810095706181, |
|
"learning_rate": 6.073268341137694e-06, |
|
"loss": 0.2856, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.8652694610778443, |
|
"grad_norm": 0.8876856270528781, |
|
"learning_rate": 6.050289768744042e-06, |
|
"loss": 0.2781, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.8682634730538922, |
|
"grad_norm": 0.9655511225084505, |
|
"learning_rate": 6.0272879660323936e-06, |
|
"loss": 0.3611, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.8712574850299402, |
|
"grad_norm": 0.8801553905843293, |
|
"learning_rate": 6.004263441756815e-06, |
|
"loss": 0.3386, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.874251497005988, |
|
"grad_norm": 0.8644727105745302, |
|
"learning_rate": 5.98121670517393e-06, |
|
"loss": 0.3101, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.8772455089820359, |
|
"grad_norm": 0.7541522693012973, |
|
"learning_rate": 5.958148266031654e-06, |
|
"loss": 0.2861, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.8802395209580839, |
|
"grad_norm": 0.8045210692490931, |
|
"learning_rate": 5.935058634557917e-06, |
|
"loss": 0.3023, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.8832335329341318, |
|
"grad_norm": 0.7439806112500583, |
|
"learning_rate": 5.911948321449384e-06, |
|
"loss": 0.3294, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.8862275449101796, |
|
"grad_norm": 0.8858437389929406, |
|
"learning_rate": 5.8888178378601565e-06, |
|
"loss": 0.3423, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.8892215568862275, |
|
"grad_norm": 0.9221571927934884, |
|
"learning_rate": 5.865667695390468e-06, |
|
"loss": 0.3243, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.8922155688622755, |
|
"grad_norm": 0.8235617800073382, |
|
"learning_rate": 5.842498406075363e-06, |
|
"loss": 0.3161, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.8952095808383234, |
|
"grad_norm": 0.8975683741325864, |
|
"learning_rate": 5.819310482373381e-06, |
|
"loss": 0.3295, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.8982035928143712, |
|
"grad_norm": 0.849503595809189, |
|
"learning_rate": 5.796104437155213e-06, |
|
"loss": 0.3629, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.9011976047904192, |
|
"grad_norm": 0.8681035836027373, |
|
"learning_rate": 5.772880783692363e-06, |
|
"loss": 0.309, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.9041916167664671, |
|
"grad_norm": 0.940434193489446, |
|
"learning_rate": 5.749640035645798e-06, |
|
"loss": 0.3, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.907185628742515, |
|
"grad_norm": 0.9053734789495154, |
|
"learning_rate": 5.726382707054578e-06, |
|
"loss": 0.3286, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.9101796407185628, |
|
"grad_norm": 0.9221578596023046, |
|
"learning_rate": 5.703109312324493e-06, |
|
"loss": 0.3232, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.9131736526946108, |
|
"grad_norm": 0.9396431013868058, |
|
"learning_rate": 5.679820366216684e-06, |
|
"loss": 0.3874, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.9161676646706587, |
|
"grad_norm": 0.7994743476501736, |
|
"learning_rate": 5.656516383836263e-06, |
|
"loss": 0.2814, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.9191616766467066, |
|
"grad_norm": 0.8469001159251874, |
|
"learning_rate": 5.6331978806209044e-06, |
|
"loss": 0.3352, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.9221556886227545, |
|
"grad_norm": 0.884067560153009, |
|
"learning_rate": 5.609865372329461e-06, |
|
"loss": 0.3084, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.9251497005988024, |
|
"grad_norm": 0.8169845922748498, |
|
"learning_rate": 5.586519375030549e-06, |
|
"loss": 0.2773, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.9281437125748503, |
|
"grad_norm": 0.7863783585710435, |
|
"learning_rate": 5.5631604050911354e-06, |
|
"loss": 0.3033, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.9311377245508982, |
|
"grad_norm": 0.948623009920141, |
|
"learning_rate": 5.539788979165115e-06, |
|
"loss": 0.2917, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.9341317365269461, |
|
"grad_norm": 0.779757317416599, |
|
"learning_rate": 5.516405614181883e-06, |
|
"loss": 0.2739, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.937125748502994, |
|
"grad_norm": 0.7695694632358886, |
|
"learning_rate": 5.4930108273349034e-06, |
|
"loss": 0.306, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.9401197604790419, |
|
"grad_norm": 0.8522371811136099, |
|
"learning_rate": 5.4696051360702725e-06, |
|
"loss": 0.2925, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.9431137724550899, |
|
"grad_norm": 0.9537835504530565, |
|
"learning_rate": 5.446189058075265e-06, |
|
"loss": 0.3472, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.9461077844311377, |
|
"grad_norm": 0.8724112927980736, |
|
"learning_rate": 5.4227631112668955e-06, |
|
"loss": 0.3496, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.9491017964071856, |
|
"grad_norm": 0.9143153514381644, |
|
"learning_rate": 5.39932781378045e-06, |
|
"loss": 0.3519, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.9520958083832335, |
|
"grad_norm": 0.7615328536144775, |
|
"learning_rate": 5.375883683958041e-06, |
|
"loss": 0.3098, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.9550898203592815, |
|
"grad_norm": 0.8649703731317651, |
|
"learning_rate": 5.3524312403371255e-06, |
|
"loss": 0.325, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.9580838323353293, |
|
"grad_norm": 0.9723484494302708, |
|
"learning_rate": 5.328971001639054e-06, |
|
"loss": 0.291, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.9610778443113772, |
|
"grad_norm": 0.8490247110632723, |
|
"learning_rate": 5.3055034867575825e-06, |
|
"loss": 0.2935, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.9640718562874252, |
|
"grad_norm": 0.9988398704436575, |
|
"learning_rate": 5.282029214747404e-06, |
|
"loss": 0.3491, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.9670658682634731, |
|
"grad_norm": 0.8543590517207817, |
|
"learning_rate": 5.258548704812667e-06, |
|
"loss": 0.3286, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.9700598802395209, |
|
"grad_norm": 0.8500233833887514, |
|
"learning_rate": 5.235062476295488e-06, |
|
"loss": 0.3151, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.9730538922155688, |
|
"grad_norm": 0.8765244333310814, |
|
"learning_rate": 5.211571048664469e-06, |
|
"loss": 0.2808, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.9760479041916168, |
|
"grad_norm": 0.9333931872302441, |
|
"learning_rate": 5.188074941503203e-06, |
|
"loss": 0.3391, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.9790419161676647, |
|
"grad_norm": 0.8773054912858921, |
|
"learning_rate": 5.164574674498788e-06, |
|
"loss": 0.3265, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.9820359281437125, |
|
"grad_norm": 0.9665585824211138, |
|
"learning_rate": 5.141070767430331e-06, |
|
"loss": 0.3117, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.9850299401197605, |
|
"grad_norm": 0.8395082187919094, |
|
"learning_rate": 5.117563740157444e-06, |
|
"loss": 0.3105, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.9880239520958084, |
|
"grad_norm": 0.9568305242413174, |
|
"learning_rate": 5.094054112608758e-06, |
|
"loss": 0.3738, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.9910179640718563, |
|
"grad_norm": 0.7907755067956981, |
|
"learning_rate": 5.070542404770413e-06, |
|
"loss": 0.3541, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.9940119760479041, |
|
"grad_norm": 0.7841586952208952, |
|
"learning_rate": 5.047029136674563e-06, |
|
"loss": 0.2886, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.9970059880239521, |
|
"grad_norm": 0.9757677527538441, |
|
"learning_rate": 5.023514828387868e-06, |
|
"loss": 0.4342, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.843376725475, |
|
"learning_rate": 5e-06, |
|
"loss": 0.2437, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 1.0029940119760479, |
|
"grad_norm": 0.9097444729914339, |
|
"learning_rate": 4.976485171612134e-06, |
|
"loss": 0.204, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.0059880239520957, |
|
"grad_norm": 0.7545799997722877, |
|
"learning_rate": 4.95297086332544e-06, |
|
"loss": 0.2173, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 1.0089820359281436, |
|
"grad_norm": 0.8688981009379331, |
|
"learning_rate": 4.9294575952295896e-06, |
|
"loss": 0.2283, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 1.0119760479041917, |
|
"grad_norm": 0.8948621203651169, |
|
"learning_rate": 4.905945887391242e-06, |
|
"loss": 0.2547, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 1.0149700598802396, |
|
"grad_norm": 0.8942361934728625, |
|
"learning_rate": 4.882436259842556e-06, |
|
"loss": 0.2305, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 1.0179640718562875, |
|
"grad_norm": 0.8198085683609914, |
|
"learning_rate": 4.858929232569671e-06, |
|
"loss": 0.2231, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.0209580838323353, |
|
"grad_norm": 0.8621368121736944, |
|
"learning_rate": 4.835425325501214e-06, |
|
"loss": 0.2397, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 1.0239520958083832, |
|
"grad_norm": 0.7630809423349001, |
|
"learning_rate": 4.811925058496799e-06, |
|
"loss": 0.1838, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 1.026946107784431, |
|
"grad_norm": 0.9879337199392489, |
|
"learning_rate": 4.788428951335534e-06, |
|
"loss": 0.2453, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 1.029940119760479, |
|
"grad_norm": 0.7847842378029675, |
|
"learning_rate": 4.7649375237045135e-06, |
|
"loss": 0.2138, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 1.032934131736527, |
|
"grad_norm": 0.8485728075701262, |
|
"learning_rate": 4.741451295187333e-06, |
|
"loss": 0.1931, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.035928143712575, |
|
"grad_norm": 0.8454857994536729, |
|
"learning_rate": 4.717970785252596e-06, |
|
"loss": 0.2547, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 1.0389221556886228, |
|
"grad_norm": 0.8807215076735381, |
|
"learning_rate": 4.694496513242418e-06, |
|
"loss": 0.2497, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 1.0419161676646707, |
|
"grad_norm": 0.9629588475475359, |
|
"learning_rate": 4.671028998360947e-06, |
|
"loss": 0.2017, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 1.0449101796407185, |
|
"grad_norm": 0.9289149674759557, |
|
"learning_rate": 4.647568759662876e-06, |
|
"loss": 0.2513, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 1.0479041916167664, |
|
"grad_norm": 0.8762206020145612, |
|
"learning_rate": 4.624116316041962e-06, |
|
"loss": 0.2309, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.0508982035928143, |
|
"grad_norm": 0.8520308484249168, |
|
"learning_rate": 4.600672186219551e-06, |
|
"loss": 0.2362, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 1.0538922155688624, |
|
"grad_norm": 0.9933722610372858, |
|
"learning_rate": 4.5772368887331044e-06, |
|
"loss": 0.2676, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 1.0568862275449102, |
|
"grad_norm": 0.9040467210370616, |
|
"learning_rate": 4.553810941924735e-06, |
|
"loss": 0.2599, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 1.0598802395209581, |
|
"grad_norm": 0.9016868832745932, |
|
"learning_rate": 4.530394863929728e-06, |
|
"loss": 0.2274, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 1.062874251497006, |
|
"grad_norm": 0.900854863732707, |
|
"learning_rate": 4.506989172665097e-06, |
|
"loss": 0.2122, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.0658682634730539, |
|
"grad_norm": 0.9050002807028359, |
|
"learning_rate": 4.483594385818119e-06, |
|
"loss": 0.2121, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 1.0688622754491017, |
|
"grad_norm": 0.9886008436717765, |
|
"learning_rate": 4.460211020834887e-06, |
|
"loss": 0.234, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 1.0718562874251496, |
|
"grad_norm": 0.9669754306017594, |
|
"learning_rate": 4.436839594908866e-06, |
|
"loss": 0.2816, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 1.0748502994011977, |
|
"grad_norm": 0.9573444964703198, |
|
"learning_rate": 4.4134806249694514e-06, |
|
"loss": 0.2493, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 1.0778443113772456, |
|
"grad_norm": 0.9676254717641087, |
|
"learning_rate": 4.39013462767054e-06, |
|
"loss": 0.2713, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.0808383233532934, |
|
"grad_norm": 0.8349788466453513, |
|
"learning_rate": 4.366802119379098e-06, |
|
"loss": 0.1791, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 1.0838323353293413, |
|
"grad_norm": 0.8154007400233115, |
|
"learning_rate": 4.34348361616374e-06, |
|
"loss": 0.2653, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 1.0868263473053892, |
|
"grad_norm": 1.0043377603516084, |
|
"learning_rate": 4.3201796337833165e-06, |
|
"loss": 0.2542, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 1.089820359281437, |
|
"grad_norm": 1.0642550736938516, |
|
"learning_rate": 4.29689068767551e-06, |
|
"loss": 0.2753, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 1.092814371257485, |
|
"grad_norm": 0.8601084168362063, |
|
"learning_rate": 4.273617292945425e-06, |
|
"loss": 0.2268, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.095808383233533, |
|
"grad_norm": 0.8241752489029223, |
|
"learning_rate": 4.250359964354203e-06, |
|
"loss": 0.2391, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 1.098802395209581, |
|
"grad_norm": 0.963344944682232, |
|
"learning_rate": 4.227119216307637e-06, |
|
"loss": 0.2378, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 1.1017964071856288, |
|
"grad_norm": 0.8948262875622087, |
|
"learning_rate": 4.203895562844789e-06, |
|
"loss": 0.2707, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 1.1047904191616766, |
|
"grad_norm": 0.9738213077981859, |
|
"learning_rate": 4.18068951762662e-06, |
|
"loss": 0.2665, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 1.1077844311377245, |
|
"grad_norm": 0.8637469614510346, |
|
"learning_rate": 4.157501593924638e-06, |
|
"loss": 0.2149, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.1107784431137724, |
|
"grad_norm": 0.8459074254573029, |
|
"learning_rate": 4.134332304609533e-06, |
|
"loss": 0.2534, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 1.1137724550898203, |
|
"grad_norm": 0.7160677188052542, |
|
"learning_rate": 4.111182162139844e-06, |
|
"loss": 0.2256, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.1167664670658684, |
|
"grad_norm": 0.9583456205987185, |
|
"learning_rate": 4.088051678550617e-06, |
|
"loss": 0.2191, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 1.1197604790419162, |
|
"grad_norm": 0.9081389134137872, |
|
"learning_rate": 4.064941365442084e-06, |
|
"loss": 0.2371, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 1.122754491017964, |
|
"grad_norm": 1.0314125965352072, |
|
"learning_rate": 4.041851733968348e-06, |
|
"loss": 0.2726, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.125748502994012, |
|
"grad_norm": 0.9534911581960199, |
|
"learning_rate": 4.018783294826071e-06, |
|
"loss": 0.255, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 1.1287425149700598, |
|
"grad_norm": 0.9956615113691805, |
|
"learning_rate": 3.995736558243186e-06, |
|
"loss": 0.2696, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 1.1317365269461077, |
|
"grad_norm": 0.9006877062415116, |
|
"learning_rate": 3.972712033967608e-06, |
|
"loss": 0.2354, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 1.1347305389221556, |
|
"grad_norm": 0.9292170195044273, |
|
"learning_rate": 3.949710231255961e-06, |
|
"loss": 0.2796, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 1.1377245508982037, |
|
"grad_norm": 1.0526665863211797, |
|
"learning_rate": 3.926731658862307e-06, |
|
"loss": 0.2413, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.1407185628742516, |
|
"grad_norm": 0.9004024526743142, |
|
"learning_rate": 3.903776825026912e-06, |
|
"loss": 0.2643, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 1.1437125748502994, |
|
"grad_norm": 0.9130898278860868, |
|
"learning_rate": 3.8808462374649805e-06, |
|
"loss": 0.2489, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 1.1467065868263473, |
|
"grad_norm": 0.8928230542508337, |
|
"learning_rate": 3.857940403355444e-06, |
|
"loss": 0.2127, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 1.1497005988023952, |
|
"grad_norm": 0.9327282858724195, |
|
"learning_rate": 3.8350598293297345e-06, |
|
"loss": 0.2618, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 1.152694610778443, |
|
"grad_norm": 0.9281682264649125, |
|
"learning_rate": 3.8122050214605822e-06, |
|
"loss": 0.2226, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.1556886227544911, |
|
"grad_norm": 0.8822237439841015, |
|
"learning_rate": 3.7893764852508207e-06, |
|
"loss": 0.2491, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 1.158682634730539, |
|
"grad_norm": 0.8388865603712542, |
|
"learning_rate": 3.766574725622208e-06, |
|
"loss": 0.2168, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 1.1616766467065869, |
|
"grad_norm": 0.848960789892097, |
|
"learning_rate": 3.7438002469042567e-06, |
|
"loss": 0.2351, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 1.1646706586826348, |
|
"grad_norm": 0.9716696256710403, |
|
"learning_rate": 3.721053552823078e-06, |
|
"loss": 0.2413, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 1.1676646706586826, |
|
"grad_norm": 0.9487172677318534, |
|
"learning_rate": 3.698335146490246e-06, |
|
"loss": 0.2326, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.1706586826347305, |
|
"grad_norm": 1.0407935865632547, |
|
"learning_rate": 3.675645530391665e-06, |
|
"loss": 0.2244, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 1.1736526946107784, |
|
"grad_norm": 0.925397327810438, |
|
"learning_rate": 3.652985206376455e-06, |
|
"loss": 0.2125, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 1.1766467065868262, |
|
"grad_norm": 0.7389453949674737, |
|
"learning_rate": 3.630354675645853e-06, |
|
"loss": 0.2058, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 1.1796407185628743, |
|
"grad_norm": 0.9303186413078484, |
|
"learning_rate": 3.6077544387421293e-06, |
|
"loss": 0.2497, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 1.1826347305389222, |
|
"grad_norm": 0.9429611529622871, |
|
"learning_rate": 3.5851849955375177e-06, |
|
"loss": 0.2042, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.18562874251497, |
|
"grad_norm": 0.9401869819479897, |
|
"learning_rate": 3.5626468452231534e-06, |
|
"loss": 0.2209, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.188622754491018, |
|
"grad_norm": 0.8992896025621585, |
|
"learning_rate": 3.540140486298035e-06, |
|
"loss": 0.1849, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 1.1916167664670658, |
|
"grad_norm": 0.8423693426005414, |
|
"learning_rate": 3.517666416557999e-06, |
|
"loss": 0.2663, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 1.1946107784431137, |
|
"grad_norm": 0.9083761337895103, |
|
"learning_rate": 3.495225133084712e-06, |
|
"loss": 0.2327, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 1.1976047904191618, |
|
"grad_norm": 0.8230200891995576, |
|
"learning_rate": 3.472817132234669e-06, |
|
"loss": 0.2535, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.1976047904191618, |
|
"eval_loss": 0.33559077978134155, |
|
"eval_runtime": 3.5782, |
|
"eval_samples_per_second": 15.091, |
|
"eval_steps_per_second": 3.913, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.2005988023952097, |
|
"grad_norm": 0.8934802983025646, |
|
"learning_rate": 3.4504429096282246e-06, |
|
"loss": 0.2663, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 1.2035928143712575, |
|
"grad_norm": 0.9089212009147788, |
|
"learning_rate": 3.428102960138625e-06, |
|
"loss": 0.2562, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 1.2065868263473054, |
|
"grad_norm": 0.9694772874115569, |
|
"learning_rate": 3.405797777881059e-06, |
|
"loss": 0.2645, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 1.2095808383233533, |
|
"grad_norm": 0.9510679768838551, |
|
"learning_rate": 3.3835278562017405e-06, |
|
"loss": 0.2195, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 1.2125748502994012, |
|
"grad_norm": 0.8638789626469725, |
|
"learning_rate": 3.3612936876669834e-06, |
|
"loss": 0.2236, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.215568862275449, |
|
"grad_norm": 0.8136320048040392, |
|
"learning_rate": 3.3390957640523147e-06, |
|
"loss": 0.2199, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 1.218562874251497, |
|
"grad_norm": 0.8399432142688728, |
|
"learning_rate": 3.3169345763315986e-06, |
|
"loss": 0.236, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 1.221556886227545, |
|
"grad_norm": 0.895132007721184, |
|
"learning_rate": 3.29481061466617e-06, |
|
"loss": 0.2506, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.2245508982035929, |
|
"grad_norm": 1.0315751441949677, |
|
"learning_rate": 3.2727243683940045e-06, |
|
"loss": 0.2336, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 1.2275449101796407, |
|
"grad_norm": 0.9294994902369398, |
|
"learning_rate": 3.2506763260188824e-06, |
|
"loss": 0.2569, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.2305389221556886, |
|
"grad_norm": 0.856843897399255, |
|
"learning_rate": 3.2286669751995905e-06, |
|
"loss": 0.2375, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 1.2335329341317365, |
|
"grad_norm": 0.981487886083542, |
|
"learning_rate": 3.2066968027391377e-06, |
|
"loss": 0.2418, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 1.2365269461077844, |
|
"grad_norm": 0.8363219451535354, |
|
"learning_rate": 3.1847662945739833e-06, |
|
"loss": 0.2281, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 1.2395209580838324, |
|
"grad_norm": 0.9293679889082452, |
|
"learning_rate": 3.1628759357632943e-06, |
|
"loss": 0.2467, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 1.2425149700598803, |
|
"grad_norm": 1.0280751031518225, |
|
"learning_rate": 3.1410262104782086e-06, |
|
"loss": 0.2172, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.2455089820359282, |
|
"grad_norm": 0.9800325945674049, |
|
"learning_rate": 3.119217601991139e-06, |
|
"loss": 0.2333, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 1.248502994011976, |
|
"grad_norm": 0.9889659524131217, |
|
"learning_rate": 3.0974505926650724e-06, |
|
"loss": 0.209, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 1.251497005988024, |
|
"grad_norm": 0.8401195322703149, |
|
"learning_rate": 3.0757256639429027e-06, |
|
"loss": 0.2328, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 1.2544910179640718, |
|
"grad_norm": 0.9467402067032646, |
|
"learning_rate": 3.0540432963367907e-06, |
|
"loss": 0.2626, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 1.2574850299401197, |
|
"grad_norm": 0.9149526790168448, |
|
"learning_rate": 3.032403969417523e-06, |
|
"loss": 0.2266, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.2604790419161676, |
|
"grad_norm": 0.8399102905235053, |
|
"learning_rate": 3.010808161803917e-06, |
|
"loss": 0.2182, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 1.2634730538922156, |
|
"grad_norm": 0.8572625991470963, |
|
"learning_rate": 2.9892563511522305e-06, |
|
"loss": 0.2541, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 1.2664670658682635, |
|
"grad_norm": 0.8878535847205529, |
|
"learning_rate": 2.9677490141455915e-06, |
|
"loss": 0.2121, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 1.2694610778443114, |
|
"grad_norm": 0.8984661284376178, |
|
"learning_rate": 2.946286626483463e-06, |
|
"loss": 0.2653, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 1.2724550898203593, |
|
"grad_norm": 1.0442695833692566, |
|
"learning_rate": 2.924869662871117e-06, |
|
"loss": 0.2377, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.2754491017964071, |
|
"grad_norm": 0.9062910588057472, |
|
"learning_rate": 2.903498597009136e-06, |
|
"loss": 0.2467, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 1.278443113772455, |
|
"grad_norm": 0.9501269806327622, |
|
"learning_rate": 2.8821739015829338e-06, |
|
"loss": 0.2475, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 1.281437125748503, |
|
"grad_norm": 0.8847493600244284, |
|
"learning_rate": 2.8608960482523058e-06, |
|
"loss": 0.2335, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 1.284431137724551, |
|
"grad_norm": 0.89853078091105, |
|
"learning_rate": 2.839665507640992e-06, |
|
"loss": 0.2241, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 1.2874251497005988, |
|
"grad_norm": 0.8345458778848835, |
|
"learning_rate": 2.818482749326272e-06, |
|
"loss": 0.2693, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.2904191616766467, |
|
"grad_norm": 0.8373650842866908, |
|
"learning_rate": 2.797348241828569e-06, |
|
"loss": 0.2321, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 1.2934131736526946, |
|
"grad_norm": 0.8089985643452108, |
|
"learning_rate": 2.776262452601104e-06, |
|
"loss": 0.2538, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 1.2964071856287425, |
|
"grad_norm": 0.929042165229628, |
|
"learning_rate": 2.7552258480195348e-06, |
|
"loss": 0.2315, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 1.2994011976047903, |
|
"grad_norm": 0.8300901799503175, |
|
"learning_rate": 2.734238893371667e-06, |
|
"loss": 0.2249, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 1.3023952095808382, |
|
"grad_norm": 0.8909248470484058, |
|
"learning_rate": 2.7133020528471322e-06, |
|
"loss": 0.23, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.3053892215568863, |
|
"grad_norm": 0.835317522371157, |
|
"learning_rate": 2.6924157895271563e-06, |
|
"loss": 0.2403, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 1.3083832335329342, |
|
"grad_norm": 0.8647395787653299, |
|
"learning_rate": 2.671580565374282e-06, |
|
"loss": 0.2601, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 1.311377245508982, |
|
"grad_norm": 0.928093889607222, |
|
"learning_rate": 2.6507968412221763e-06, |
|
"loss": 0.2044, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 1.31437125748503, |
|
"grad_norm": 1.0316703220893109, |
|
"learning_rate": 2.6300650767654234e-06, |
|
"loss": 0.2279, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 1.3173652694610778, |
|
"grad_norm": 0.8900386821386082, |
|
"learning_rate": 2.6093857305493666e-06, |
|
"loss": 0.2345, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.3203592814371259, |
|
"grad_norm": 0.9135592098541239, |
|
"learning_rate": 2.588759259959962e-06, |
|
"loss": 0.2182, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 1.3233532934131738, |
|
"grad_norm": 0.8823808480287963, |
|
"learning_rate": 2.568186121213658e-06, |
|
"loss": 0.2539, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 1.3263473053892216, |
|
"grad_norm": 0.9188576761856679, |
|
"learning_rate": 2.547666769347312e-06, |
|
"loss": 0.2455, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 1.3293413173652695, |
|
"grad_norm": 0.8970132200420725, |
|
"learning_rate": 2.5272016582081236e-06, |
|
"loss": 0.2945, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 1.3323353293413174, |
|
"grad_norm": 0.931708535253249, |
|
"learning_rate": 2.5067912404435952e-06, |
|
"loss": 0.2493, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.3353293413173652, |
|
"grad_norm": 0.913645709906956, |
|
"learning_rate": 2.486435967491516e-06, |
|
"loss": 0.2289, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 1.3383233532934131, |
|
"grad_norm": 0.8625374739508064, |
|
"learning_rate": 2.4661362895699903e-06, |
|
"loss": 0.2193, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 1.341317365269461, |
|
"grad_norm": 0.9150737245396889, |
|
"learning_rate": 2.445892655667462e-06, |
|
"loss": 0.299, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 1.3443113772455089, |
|
"grad_norm": 0.9739809489305545, |
|
"learning_rate": 2.425705513532798e-06, |
|
"loss": 0.2499, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 1.347305389221557, |
|
"grad_norm": 0.8383301908743397, |
|
"learning_rate": 2.4055753096653795e-06, |
|
"loss": 0.2601, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.3502994011976048, |
|
"grad_norm": 0.8327173176755049, |
|
"learning_rate": 2.3855024893052286e-06, |
|
"loss": 0.2129, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 1.3532934131736527, |
|
"grad_norm": 0.8647591015863577, |
|
"learning_rate": 2.365487496423152e-06, |
|
"loss": 0.2297, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 1.3562874251497006, |
|
"grad_norm": 0.7814006048796464, |
|
"learning_rate": 2.3455307737109338e-06, |
|
"loss": 0.1944, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 1.3592814371257484, |
|
"grad_norm": 0.8359729141244253, |
|
"learning_rate": 2.3256327625715345e-06, |
|
"loss": 0.1983, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 1.3622754491017965, |
|
"grad_norm": 0.9349340845802615, |
|
"learning_rate": 2.3057939031093346e-06, |
|
"loss": 0.2217, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.3652694610778444, |
|
"grad_norm": 0.8081200603562876, |
|
"learning_rate": 2.2860146341203936e-06, |
|
"loss": 0.2141, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.3682634730538923, |
|
"grad_norm": 0.9324708547933994, |
|
"learning_rate": 2.2662953930827546e-06, |
|
"loss": 0.2882, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 1.3712574850299402, |
|
"grad_norm": 0.9370627368817129, |
|
"learning_rate": 2.2466366161467528e-06, |
|
"loss": 0.2324, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 1.374251497005988, |
|
"grad_norm": 0.9775784913621385, |
|
"learning_rate": 2.227038738125385e-06, |
|
"loss": 0.2129, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 1.377245508982036, |
|
"grad_norm": 1.0153366345964563, |
|
"learning_rate": 2.207502192484685e-06, |
|
"loss": 0.2278, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.3802395209580838, |
|
"grad_norm": 0.8432754551394376, |
|
"learning_rate": 2.188027411334131e-06, |
|
"loss": 0.2294, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 1.3832335329341316, |
|
"grad_norm": 0.9453714951431974, |
|
"learning_rate": 2.1686148254171012e-06, |
|
"loss": 0.2225, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.3862275449101795, |
|
"grad_norm": 1.0293017957323893, |
|
"learning_rate": 2.1492648641013305e-06, |
|
"loss": 0.2123, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 1.3892215568862276, |
|
"grad_norm": 0.7973395829106387, |
|
"learning_rate": 2.1299779553694323e-06, |
|
"loss": 0.2174, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 1.3922155688622755, |
|
"grad_norm": 0.9822796163667018, |
|
"learning_rate": 2.1107545258094135e-06, |
|
"loss": 0.226, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.3952095808383234, |
|
"grad_norm": 0.9048202494674366, |
|
"learning_rate": 2.0915950006052555e-06, |
|
"loss": 0.189, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 1.3982035928143712, |
|
"grad_norm": 0.8482559459307193, |
|
"learning_rate": 2.0724998035274947e-06, |
|
"loss": 0.2216, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 1.401197604790419, |
|
"grad_norm": 0.9006129225448194, |
|
"learning_rate": 2.053469356923865e-06, |
|
"loss": 0.1872, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.4041916167664672, |
|
"grad_norm": 0.9113520816336781, |
|
"learning_rate": 2.0345040817099433e-06, |
|
"loss": 0.2328, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 1.407185628742515, |
|
"grad_norm": 0.7994080695465668, |
|
"learning_rate": 2.0156043973598475e-06, |
|
"loss": 0.2083, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.410179640718563, |
|
"grad_norm": 0.9704106274414475, |
|
"learning_rate": 1.996770721896957e-06, |
|
"loss": 0.2377, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 1.4131736526946108, |
|
"grad_norm": 0.950125781508677, |
|
"learning_rate": 1.9780034718846653e-06, |
|
"loss": 0.2245, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 1.4161676646706587, |
|
"grad_norm": 0.8578663798472137, |
|
"learning_rate": 1.9593030624171683e-06, |
|
"loss": 0.253, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 1.4191616766467066, |
|
"grad_norm": 0.9856060337714755, |
|
"learning_rate": 1.9406699071102774e-06, |
|
"loss": 0.2569, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 1.4221556886227544, |
|
"grad_norm": 0.8365055783466058, |
|
"learning_rate": 1.9221044180922833e-06, |
|
"loss": 0.2523, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.4251497005988023, |
|
"grad_norm": 0.9166241040139455, |
|
"learning_rate": 1.9036070059948253e-06, |
|
"loss": 0.2392, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 1.4281437125748502, |
|
"grad_norm": 1.0362533778177407, |
|
"learning_rate": 1.885178079943823e-06, |
|
"loss": 0.2679, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 1.4311377245508983, |
|
"grad_norm": 0.9816009419764054, |
|
"learning_rate": 1.866818047550419e-06, |
|
"loss": 0.3026, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 1.4341317365269461, |
|
"grad_norm": 0.8347456795332021, |
|
"learning_rate": 1.8485273149019655e-06, |
|
"loss": 0.2185, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 1.437125748502994, |
|
"grad_norm": 0.9091218473568945, |
|
"learning_rate": 1.8303062865530407e-06, |
|
"loss": 0.2165, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.4401197604790419, |
|
"grad_norm": 0.9154078383701513, |
|
"learning_rate": 1.8121553655165058e-06, |
|
"loss": 0.2112, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 1.4431137724550898, |
|
"grad_norm": 0.9032829778128162, |
|
"learning_rate": 1.7940749532545832e-06, |
|
"loss": 0.265, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 1.4461077844311379, |
|
"grad_norm": 1.0080697760970814, |
|
"learning_rate": 1.7760654496699876e-06, |
|
"loss": 0.2227, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 1.4491017964071857, |
|
"grad_norm": 0.8957733113522632, |
|
"learning_rate": 1.7581272530970666e-06, |
|
"loss": 0.24, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 1.4520958083832336, |
|
"grad_norm": 0.8335261496122127, |
|
"learning_rate": 1.7402607602930106e-06, |
|
"loss": 0.2291, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.4550898203592815, |
|
"grad_norm": 0.9849458838757417, |
|
"learning_rate": 1.7224663664290537e-06, |
|
"loss": 0.2744, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.4580838323353293, |
|
"grad_norm": 0.8911713531788786, |
|
"learning_rate": 1.7047444650817518e-06, |
|
"loss": 0.2288, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 1.4610778443113772, |
|
"grad_norm": 1.0137646108138194, |
|
"learning_rate": 1.6870954482242707e-06, |
|
"loss": 0.2237, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 1.464071856287425, |
|
"grad_norm": 0.8871791355821369, |
|
"learning_rate": 1.669519706217711e-06, |
|
"loss": 0.191, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 1.467065868263473, |
|
"grad_norm": 0.9401099300088629, |
|
"learning_rate": 1.652017627802487e-06, |
|
"loss": 0.2461, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.4700598802395208, |
|
"grad_norm": 0.8048016136208538, |
|
"learning_rate": 1.6345896000897122e-06, |
|
"loss": 0.2149, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 1.473053892215569, |
|
"grad_norm": 0.8176445572539465, |
|
"learning_rate": 1.6172360085526567e-06, |
|
"loss": 0.2508, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.4760479041916168, |
|
"grad_norm": 0.8101280568087744, |
|
"learning_rate": 1.5999572370182016e-06, |
|
"loss": 0.2446, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 1.4790419161676647, |
|
"grad_norm": 0.9448625335762181, |
|
"learning_rate": 1.5827536676583643e-06, |
|
"loss": 0.2471, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 1.4820359281437125, |
|
"grad_norm": 0.8586513112675779, |
|
"learning_rate": 1.5656256809818343e-06, |
|
"loss": 0.2255, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.4850299401197604, |
|
"grad_norm": 0.9768673466679008, |
|
"learning_rate": 1.54857365582557e-06, |
|
"loss": 0.1983, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 1.4880239520958085, |
|
"grad_norm": 0.9006140040891589, |
|
"learning_rate": 1.5315979693464039e-06, |
|
"loss": 0.24, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 1.4910179640718564, |
|
"grad_norm": 1.037317719805374, |
|
"learning_rate": 1.5146989970127158e-06, |
|
"loss": 0.2038, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 1.4940119760479043, |
|
"grad_norm": 0.9516617265953464, |
|
"learning_rate": 1.4978771125961177e-06, |
|
"loss": 0.2466, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 1.4970059880239521, |
|
"grad_norm": 0.81133512200921, |
|
"learning_rate": 1.4811326881631937e-06, |
|
"loss": 0.2389, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.8712368300259022, |
|
"learning_rate": 1.4644660940672628e-06, |
|
"loss": 0.2383, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 1.5029940119760479, |
|
"grad_norm": 0.8912224547341503, |
|
"learning_rate": 1.4478776989401949e-06, |
|
"loss": 0.2418, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 1.5059880239520957, |
|
"grad_norm": 0.9081742780158918, |
|
"learning_rate": 1.4313678696842559e-06, |
|
"loss": 0.2255, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 1.5089820359281436, |
|
"grad_norm": 0.8912235673005359, |
|
"learning_rate": 1.4149369714639856e-06, |
|
"loss": 0.22, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 1.5119760479041915, |
|
"grad_norm": 0.9123351850607487, |
|
"learning_rate": 1.3985853676981316e-06, |
|
"loss": 0.2372, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.5149700598802394, |
|
"grad_norm": 0.9163696266134105, |
|
"learning_rate": 1.3823134200516043e-06, |
|
"loss": 0.2237, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 1.5179640718562875, |
|
"grad_norm": 1.0110038661336658, |
|
"learning_rate": 1.366121488427481e-06, |
|
"loss": 0.2338, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 1.5209580838323353, |
|
"grad_norm": 0.9496680513346542, |
|
"learning_rate": 1.3500099309590397e-06, |
|
"loss": 0.2433, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 1.5239520958083832, |
|
"grad_norm": 0.9310276996335521, |
|
"learning_rate": 1.3339791040018479e-06, |
|
"loss": 0.2742, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 1.5269461077844313, |
|
"grad_norm": 1.0624769483110188, |
|
"learning_rate": 1.3180293621258694e-06, |
|
"loss": 0.2313, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.5299401197604792, |
|
"grad_norm": 0.9347386553606367, |
|
"learning_rate": 1.3021610581076316e-06, |
|
"loss": 0.1973, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 1.532934131736527, |
|
"grad_norm": 1.0200035540914512, |
|
"learning_rate": 1.2863745429224145e-06, |
|
"loss": 0.2325, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 1.535928143712575, |
|
"grad_norm": 0.8820492629345995, |
|
"learning_rate": 1.270670165736499e-06, |
|
"loss": 0.2369, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 1.5389221556886228, |
|
"grad_norm": 1.0546310430066739, |
|
"learning_rate": 1.2550482738994284e-06, |
|
"loss": 0.278, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 1.5419161676646707, |
|
"grad_norm": 0.9206426588636809, |
|
"learning_rate": 1.239509212936343e-06, |
|
"loss": 0.2285, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.5449101796407185, |
|
"grad_norm": 0.9554122664290174, |
|
"learning_rate": 1.22405332654032e-06, |
|
"loss": 0.2137, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.5479041916167664, |
|
"grad_norm": 0.7881244236343145, |
|
"learning_rate": 1.2086809565647877e-06, |
|
"loss": 0.2107, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 1.5508982035928143, |
|
"grad_norm": 0.9112851247315867, |
|
"learning_rate": 1.1933924430159571e-06, |
|
"loss": 0.231, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 1.5538922155688621, |
|
"grad_norm": 0.9191239398127155, |
|
"learning_rate": 1.1781881240452958e-06, |
|
"loss": 0.2251, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 1.55688622754491, |
|
"grad_norm": 0.9436054168566522, |
|
"learning_rate": 1.1630683359420653e-06, |
|
"loss": 0.2413, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.5598802395209581, |
|
"grad_norm": 0.9167771695271543, |
|
"learning_rate": 1.1480334131258626e-06, |
|
"loss": 0.1854, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 1.562874251497006, |
|
"grad_norm": 0.986854753217078, |
|
"learning_rate": 1.1330836881392405e-06, |
|
"loss": 0.2214, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 1.5658682634730539, |
|
"grad_norm": 0.915423363290195, |
|
"learning_rate": 1.11821949164034e-06, |
|
"loss": 0.2459, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 1.568862275449102, |
|
"grad_norm": 1.0260199812872717, |
|
"learning_rate": 1.103441152395588e-06, |
|
"loss": 0.2862, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.5718562874251498, |
|
"grad_norm": 0.9303227103245472, |
|
"learning_rate": 1.088748997272414e-06, |
|
"loss": 0.2462, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.5748502994011977, |
|
"grad_norm": 0.9160752971250262, |
|
"learning_rate": 1.0741433512320316e-06, |
|
"loss": 0.1854, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.5778443113772456, |
|
"grad_norm": 1.0599240939257424, |
|
"learning_rate": 1.0596245373222424e-06, |
|
"loss": 0.2206, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.5808383233532934, |
|
"grad_norm": 0.899009303584087, |
|
"learning_rate": 1.045192876670298e-06, |
|
"loss": 0.253, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.5838323353293413, |
|
"grad_norm": 0.8727433242418636, |
|
"learning_rate": 1.0308486884757868e-06, |
|
"loss": 0.261, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 1.5868263473053892, |
|
"grad_norm": 0.8286399656871455, |
|
"learning_rate": 1.0165922900035886e-06, |
|
"loss": 0.2353, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.589820359281437, |
|
"grad_norm": 0.8764735752385893, |
|
"learning_rate": 1.0024239965768417e-06, |
|
"loss": 0.2229, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.592814371257485, |
|
"grad_norm": 0.9219440935456223, |
|
"learning_rate": 9.883441215699824e-07, |
|
"loss": 0.1751, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.5958083832335328, |
|
"grad_norm": 1.080747710783597, |
|
"learning_rate": 9.74352976401805e-07, |
|
"loss": 0.2792, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 1.5988023952095807, |
|
"grad_norm": 1.0084037044161969, |
|
"learning_rate": 9.604508705285765e-07, |
|
"loss": 0.2342, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.6017964071856288, |
|
"grad_norm": 0.9488711476032898, |
|
"learning_rate": 9.466381114371942e-07, |
|
"loss": 0.2358, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.6047904191616766, |
|
"grad_norm": 0.9213491807397138, |
|
"learning_rate": 9.329150046383773e-07, |
|
"loss": 0.2299, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.6077844311377245, |
|
"grad_norm": 0.9871797150588095, |
|
"learning_rate": 9.192818536599213e-07, |
|
"loss": 0.243, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.6107784431137726, |
|
"grad_norm": 0.8941476823598501, |
|
"learning_rate": 9.057389600399719e-07, |
|
"loss": 0.243, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.6137724550898205, |
|
"grad_norm": 0.8562245477543875, |
|
"learning_rate": 8.922866233203681e-07, |
|
"loss": 0.2187, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 1.6167664670658684, |
|
"grad_norm": 0.7452803883355023, |
|
"learning_rate": 8.789251410400024e-07, |
|
"loss": 0.1749, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.6197604790419162, |
|
"grad_norm": 0.7355325160042411, |
|
"learning_rate": 8.65654808728259e-07, |
|
"loss": 0.2249, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.622754491017964, |
|
"grad_norm": 1.0984577194709262, |
|
"learning_rate": 8.524759198984567e-07, |
|
"loss": 0.2139, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.625748502994012, |
|
"grad_norm": 0.896782748741311, |
|
"learning_rate": 8.393887660413719e-07, |
|
"loss": 0.2276, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.6287425149700598, |
|
"grad_norm": 1.075942766285842, |
|
"learning_rate": 8.263936366187825e-07, |
|
"loss": 0.2716, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.6317365269461077, |
|
"grad_norm": 0.8861486842740338, |
|
"learning_rate": 8.134908190570723e-07, |
|
"loss": 0.2502, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.6347305389221556, |
|
"grad_norm": 0.7935283743028442, |
|
"learning_rate": 8.006805987408705e-07, |
|
"loss": 0.1715, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.6377245508982035, |
|
"grad_norm": 0.9590256087574384, |
|
"learning_rate": 7.879632590067354e-07, |
|
"loss": 0.2354, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 1.6407185628742516, |
|
"grad_norm": 0.9975426487033544, |
|
"learning_rate": 7.753390811368972e-07, |
|
"loss": 0.2635, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.6437125748502994, |
|
"grad_norm": 0.9145620885422897, |
|
"learning_rate": 7.628083443530287e-07, |
|
"loss": 0.2333, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.6467065868263473, |
|
"grad_norm": 1.0373271576164917, |
|
"learning_rate": 7.503713258100726e-07, |
|
"loss": 0.2545, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.6497005988023952, |
|
"grad_norm": 0.8593826234585763, |
|
"learning_rate": 7.380283005901084e-07, |
|
"loss": 0.2391, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 1.6526946107784433, |
|
"grad_norm": 0.8320133036949128, |
|
"learning_rate": 7.257795416962754e-07, |
|
"loss": 0.2127, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.6556886227544911, |
|
"grad_norm": 0.8847180137824004, |
|
"learning_rate": 7.136253200467231e-07, |
|
"loss": 0.2297, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 1.658682634730539, |
|
"grad_norm": 0.9185850186489214, |
|
"learning_rate": 7.015659044686307e-07, |
|
"loss": 0.2274, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.6616766467065869, |
|
"grad_norm": 0.9169111137668451, |
|
"learning_rate": 6.896015616922535e-07, |
|
"loss": 0.2291, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.6646706586826348, |
|
"grad_norm": 0.856353739728233, |
|
"learning_rate": 6.777325563450282e-07, |
|
"loss": 0.2679, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.6676646706586826, |
|
"grad_norm": 0.9043875388097713, |
|
"learning_rate": 6.659591509457125e-07, |
|
"loss": 0.2271, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 1.6706586826347305, |
|
"grad_norm": 0.9650530076188746, |
|
"learning_rate": 6.542816058985896e-07, |
|
"loss": 0.2354, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.6736526946107784, |
|
"grad_norm": 0.954636182956266, |
|
"learning_rate": 6.427001794876974e-07, |
|
"loss": 0.2277, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 1.6766467065868262, |
|
"grad_norm": 0.8374283590322238, |
|
"learning_rate": 6.312151278711237e-07, |
|
"loss": 0.2476, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.6796407185628741, |
|
"grad_norm": 0.8399538250559132, |
|
"learning_rate": 6.198267050753387e-07, |
|
"loss": 0.2284, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.6826347305389222, |
|
"grad_norm": 0.9345900962466387, |
|
"learning_rate": 6.085351629895736e-07, |
|
"loss": 0.2277, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.68562874251497, |
|
"grad_norm": 0.9966297559348759, |
|
"learning_rate": 5.973407513602514e-07, |
|
"loss": 0.2054, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.688622754491018, |
|
"grad_norm": 0.9308977097223865, |
|
"learning_rate": 5.862437177854629e-07, |
|
"loss": 0.249, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.6916167664670658, |
|
"grad_norm": 1.0515595335934342, |
|
"learning_rate": 5.752443077094927e-07, |
|
"loss": 0.2098, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.694610778443114, |
|
"grad_norm": 0.9038962722445684, |
|
"learning_rate": 5.643427644173838e-07, |
|
"loss": 0.2294, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.6976047904191618, |
|
"grad_norm": 0.8065933884675035, |
|
"learning_rate": 5.535393290295643e-07, |
|
"loss": 0.2081, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.7005988023952097, |
|
"grad_norm": 0.9019474894135322, |
|
"learning_rate": 5.428342404965076e-07, |
|
"loss": 0.2085, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.7035928143712575, |
|
"grad_norm": 0.918436592444138, |
|
"learning_rate": 5.322277355934557e-07, |
|
"loss": 0.2602, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.7065868263473054, |
|
"grad_norm": 0.8691317529476447, |
|
"learning_rate": 5.217200489151714e-07, |
|
"loss": 0.2372, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.7095808383233533, |
|
"grad_norm": 0.8343949227696129, |
|
"learning_rate": 5.113114128707592e-07, |
|
"loss": 0.1918, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.7125748502994012, |
|
"grad_norm": 0.953652304613386, |
|
"learning_rate": 5.010020576785174e-07, |
|
"loss": 0.2491, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.715568862275449, |
|
"grad_norm": 0.8124988839406792, |
|
"learning_rate": 4.907922113608532e-07, |
|
"loss": 0.1852, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.718562874251497, |
|
"grad_norm": 0.922773122795006, |
|
"learning_rate": 4.806820997392325e-07, |
|
"loss": 0.2289, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.7215568862275448, |
|
"grad_norm": 0.8340937152958176, |
|
"learning_rate": 4.7067194642919036e-07, |
|
"loss": 0.2223, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.7245508982035929, |
|
"grad_norm": 0.95537765114133, |
|
"learning_rate": 4.607619728353818e-07, |
|
"loss": 0.2138, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.7275449101796407, |
|
"grad_norm": 0.8369921763931705, |
|
"learning_rate": 4.50952398146689e-07, |
|
"loss": 0.2271, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.7305389221556886, |
|
"grad_norm": 0.8468425311718798, |
|
"learning_rate": 4.4124343933136525e-07, |
|
"loss": 0.2205, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.7335329341317365, |
|
"grad_norm": 0.8289388386941247, |
|
"learning_rate": 4.3163531113224466e-07, |
|
"loss": 0.1981, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.7365269461077846, |
|
"grad_norm": 0.9146570918536742, |
|
"learning_rate": 4.221282260619891e-07, |
|
"loss": 0.2398, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.7395209580838324, |
|
"grad_norm": 0.8297604996586672, |
|
"learning_rate": 4.127223943983849e-07, |
|
"loss": 0.2154, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.7425149700598803, |
|
"grad_norm": 0.837589147748755, |
|
"learning_rate": 4.03418024179697e-07, |
|
"loss": 0.2152, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.7455089820359282, |
|
"grad_norm": 0.8644075202235623, |
|
"learning_rate": 3.9421532120006544e-07, |
|
"loss": 0.2036, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.748502994011976, |
|
"grad_norm": 0.8659250328332493, |
|
"learning_rate": 3.851144890049535e-07, |
|
"loss": 0.2218, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.751497005988024, |
|
"grad_norm": 0.9583473617849272, |
|
"learning_rate": 3.761157288866418e-07, |
|
"loss": 0.279, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.7544910179640718, |
|
"grad_norm": 0.9245215526376561, |
|
"learning_rate": 3.672192398797858e-07, |
|
"loss": 0.2244, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.7574850299401197, |
|
"grad_norm": 0.9649161709724987, |
|
"learning_rate": 3.58425218757002e-07, |
|
"loss": 0.22, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.7604790419161676, |
|
"grad_norm": 0.894173288282072, |
|
"learning_rate": 3.497338600245254e-07, |
|
"loss": 0.2162, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.7634730538922154, |
|
"grad_norm": 0.9065861440899793, |
|
"learning_rate": 3.4114535591790233e-07, |
|
"loss": 0.2059, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.7664670658682635, |
|
"grad_norm": 0.9686267954709409, |
|
"learning_rate": 3.326598963977395e-07, |
|
"loss": 0.2085, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.7694610778443114, |
|
"grad_norm": 1.0006343137554319, |
|
"learning_rate": 3.242776691455013e-07, |
|
"loss": 0.2438, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.7724550898203593, |
|
"grad_norm": 0.7332242297499415, |
|
"learning_rate": 3.159988595593616e-07, |
|
"loss": 0.1991, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.7754491017964071, |
|
"grad_norm": 0.8878633844075399, |
|
"learning_rate": 3.078236507501015e-07, |
|
"loss": 0.1988, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.7784431137724552, |
|
"grad_norm": 0.8631090766938814, |
|
"learning_rate": 2.9975222353705757e-07, |
|
"loss": 0.2387, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.781437125748503, |
|
"grad_norm": 0.8850786727974973, |
|
"learning_rate": 2.917847564441256e-07, |
|
"loss": 0.2561, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.784431137724551, |
|
"grad_norm": 0.9310837214986993, |
|
"learning_rate": 2.839214256958106e-07, |
|
"loss": 0.2194, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.7874251497005988, |
|
"grad_norm": 0.9377101843539455, |
|
"learning_rate": 2.7616240521332884e-07, |
|
"loss": 0.217, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.7904191616766467, |
|
"grad_norm": 0.8239091250636275, |
|
"learning_rate": 2.6850786661076047e-07, |
|
"loss": 0.2331, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.7934131736526946, |
|
"grad_norm": 0.9118979148964735, |
|
"learning_rate": 2.6095797919125533e-07, |
|
"loss": 0.2103, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.7964071856287425, |
|
"grad_norm": 0.8534667314237735, |
|
"learning_rate": 2.5351290994328703e-07, |
|
"loss": 0.2395, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.7964071856287425, |
|
"eval_loss": 0.32940974831581116, |
|
"eval_runtime": 3.5774, |
|
"eval_samples_per_second": 15.095, |
|
"eval_steps_per_second": 3.913, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.7994011976047903, |
|
"grad_norm": 0.8119494414606993, |
|
"learning_rate": 2.4617282353696093e-07, |
|
"loss": 0.2028, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.8023952095808382, |
|
"grad_norm": 0.9678902123635491, |
|
"learning_rate": 2.3893788232036807e-07, |
|
"loss": 0.2794, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.805389221556886, |
|
"grad_norm": 0.9184998292750141, |
|
"learning_rate": 2.318082463160032e-07, |
|
"loss": 0.2605, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.8083832335329342, |
|
"grad_norm": 0.8258050559975173, |
|
"learning_rate": 2.2478407321721295e-07, |
|
"loss": 0.2173, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.811377245508982, |
|
"grad_norm": 0.8162702386622641, |
|
"learning_rate": 2.1786551838471892e-07, |
|
"loss": 0.2233, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.81437125748503, |
|
"grad_norm": 0.995800718266279, |
|
"learning_rate": 2.1105273484317402e-07, |
|
"loss": 0.1913, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.8173652694610778, |
|
"grad_norm": 0.8339609053360876, |
|
"learning_rate": 2.043458732777831e-07, |
|
"loss": 0.1993, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.8203592814371259, |
|
"grad_norm": 0.9485170803779748, |
|
"learning_rate": 1.9774508203096843e-07, |
|
"loss": 0.2336, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.8233532934131738, |
|
"grad_norm": 0.8176409009358556, |
|
"learning_rate": 1.9125050709908388e-07, |
|
"loss": 0.1949, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.8263473053892216, |
|
"grad_norm": 0.9188908452030895, |
|
"learning_rate": 1.8486229212919482e-07, |
|
"loss": 0.2364, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.8293413173652695, |
|
"grad_norm": 0.8255676677137985, |
|
"learning_rate": 1.7858057841589281e-07, |
|
"loss": 0.2234, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.8323353293413174, |
|
"grad_norm": 1.0315162352494296, |
|
"learning_rate": 1.7240550489817652e-07, |
|
"loss": 0.2477, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.8353293413173652, |
|
"grad_norm": 1.0211294700959108, |
|
"learning_rate": 1.66337208156373e-07, |
|
"loss": 0.2168, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.8383233532934131, |
|
"grad_norm": 0.9026794164390517, |
|
"learning_rate": 1.6037582240912175e-07, |
|
"loss": 0.2368, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.841317365269461, |
|
"grad_norm": 0.8776064474161454, |
|
"learning_rate": 1.5452147951040165e-07, |
|
"loss": 0.2323, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.8443113772455089, |
|
"grad_norm": 1.009690999308888, |
|
"learning_rate": 1.4877430894662037e-07, |
|
"loss": 0.2649, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.8473053892215567, |
|
"grad_norm": 0.8955062054578431, |
|
"learning_rate": 1.4313443783374405e-07, |
|
"loss": 0.21, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.8502994011976048, |
|
"grad_norm": 0.8223285759016997, |
|
"learning_rate": 1.3760199091449045e-07, |
|
"loss": 0.1883, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.8532934131736527, |
|
"grad_norm": 0.8040628257364225, |
|
"learning_rate": 1.3217709055556638e-07, |
|
"loss": 0.2, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.8562874251497006, |
|
"grad_norm": 0.858235644570802, |
|
"learning_rate": 1.268598567449647e-07, |
|
"loss": 0.2267, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.8592814371257484, |
|
"grad_norm": 0.9641435650840584, |
|
"learning_rate": 1.2165040708930763e-07, |
|
"loss": 0.2226, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.8622754491017965, |
|
"grad_norm": 0.7799819481928281, |
|
"learning_rate": 1.1654885681124661e-07, |
|
"loss": 0.221, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.8652694610778444, |
|
"grad_norm": 0.9100257311468898, |
|
"learning_rate": 1.1155531874691372e-07, |
|
"loss": 0.2527, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.8682634730538923, |
|
"grad_norm": 0.8770022413908396, |
|
"learning_rate": 1.0666990334342708e-07, |
|
"loss": 0.2032, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.8712574850299402, |
|
"grad_norm": 0.8674898686200286, |
|
"learning_rate": 1.0189271865644445e-07, |
|
"loss": 0.228, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.874251497005988, |
|
"grad_norm": 1.1343808792109478, |
|
"learning_rate": 9.722387034777847e-08, |
|
"loss": 0.2698, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.877245508982036, |
|
"grad_norm": 0.9294444437721019, |
|
"learning_rate": 9.266346168305518e-08, |
|
"loss": 0.2618, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.8802395209580838, |
|
"grad_norm": 0.8206454566517795, |
|
"learning_rate": 8.821159352943142e-08, |
|
"loss": 0.2211, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.8832335329341316, |
|
"grad_norm": 0.9086609286270629, |
|
"learning_rate": 8.38683643533661e-08, |
|
"loss": 0.2339, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.8862275449101795, |
|
"grad_norm": 0.9799968800296519, |
|
"learning_rate": 7.963387021843683e-08, |
|
"loss": 0.2335, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.8892215568862274, |
|
"grad_norm": 0.928853130443077, |
|
"learning_rate": 7.550820478322285e-08, |
|
"loss": 0.2521, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.8922155688622755, |
|
"grad_norm": 0.8598786368119536, |
|
"learning_rate": 7.149145929922607e-08, |
|
"loss": 0.1957, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.8952095808383234, |
|
"grad_norm": 0.915068110335021, |
|
"learning_rate": 6.758372260885714e-08, |
|
"loss": 0.2442, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.8982035928143712, |
|
"grad_norm": 1.0142945468007212, |
|
"learning_rate": 6.378508114346982e-08, |
|
"loss": 0.2215, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.9011976047904193, |
|
"grad_norm": 1.0404226389663245, |
|
"learning_rate": 6.009561892144744e-08, |
|
"loss": 0.2193, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.9041916167664672, |
|
"grad_norm": 0.9059426399430126, |
|
"learning_rate": 5.651541754634726e-08, |
|
"loss": 0.204, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.907185628742515, |
|
"grad_norm": 0.9607271314104048, |
|
"learning_rate": 5.304455620509297e-08, |
|
"loss": 0.2397, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.910179640718563, |
|
"grad_norm": 0.8903471611935083, |
|
"learning_rate": 4.968311166622553e-08, |
|
"loss": 0.2259, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.9131736526946108, |
|
"grad_norm": 0.9739038012140303, |
|
"learning_rate": 4.643115827820399e-08, |
|
"loss": 0.2012, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.9161676646706587, |
|
"grad_norm": 0.9671923210116596, |
|
"learning_rate": 4.328876796776071e-08, |
|
"loss": 0.2611, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.9191616766467066, |
|
"grad_norm": 0.9810030299257178, |
|
"learning_rate": 4.0256010238310936e-08, |
|
"loss": 0.2237, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.9221556886227544, |
|
"grad_norm": 0.859877438692762, |
|
"learning_rate": 3.733295216841626e-08, |
|
"loss": 0.2419, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.9251497005988023, |
|
"grad_norm": 0.9007188955813349, |
|
"learning_rate": 3.451965841029914e-08, |
|
"loss": 0.2246, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.9281437125748502, |
|
"grad_norm": 0.8394254102020391, |
|
"learning_rate": 3.181619118841517e-08, |
|
"loss": 0.2666, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.931137724550898, |
|
"grad_norm": 1.0110764984923726, |
|
"learning_rate": 2.9222610298074717e-08, |
|
"loss": 0.2124, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.9341317365269461, |
|
"grad_norm": 0.9137239418802343, |
|
"learning_rate": 2.673897310412288e-08, |
|
"loss": 0.2583, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.937125748502994, |
|
"grad_norm": 0.7529780732128938, |
|
"learning_rate": 2.4365334539667717e-08, |
|
"loss": 0.1931, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.9401197604790419, |
|
"grad_norm": 0.8730933596034636, |
|
"learning_rate": 2.210174710486679e-08, |
|
"loss": 0.2138, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.94311377245509, |
|
"grad_norm": 0.8365012170520347, |
|
"learning_rate": 1.99482608657664e-08, |
|
"loss": 0.211, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.9461077844311379, |
|
"grad_norm": 0.9272064842770233, |
|
"learning_rate": 1.7904923453193056e-08, |
|
"loss": 0.2508, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.9491017964071857, |
|
"grad_norm": 0.8486071913399128, |
|
"learning_rate": 1.5971780061701524e-08, |
|
"loss": 0.2129, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.9520958083832336, |
|
"grad_norm": 0.9006672267111283, |
|
"learning_rate": 1.4148873448573408e-08, |
|
"loss": 0.2009, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.9550898203592815, |
|
"grad_norm": 0.8278135537668184, |
|
"learning_rate": 1.2436243932872349e-08, |
|
"loss": 0.2337, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.9580838323353293, |
|
"grad_norm": 0.8876450837281434, |
|
"learning_rate": 1.0833929394552523e-08, |
|
"loss": 0.1976, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.9610778443113772, |
|
"grad_norm": 0.8423533893800599, |
|
"learning_rate": 9.341965273621522e-09, |
|
"loss": 0.2421, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.964071856287425, |
|
"grad_norm": 0.9317422471032494, |
|
"learning_rate": 7.96038456935322e-09, |
|
"loss": 0.2387, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.967065868263473, |
|
"grad_norm": 0.916442743765092, |
|
"learning_rate": 6.6892178395611125e-09, |
|
"loss": 0.2567, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.9700598802395208, |
|
"grad_norm": 1.1599640798849271, |
|
"learning_rate": 5.528493199922769e-09, |
|
"loss": 0.2543, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.9730538922155687, |
|
"grad_norm": 0.8906115635398465, |
|
"learning_rate": 4.478236323355312e-09, |
|
"loss": 0.2055, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.9760479041916168, |
|
"grad_norm": 0.986996432255188, |
|
"learning_rate": 3.538470439448105e-09, |
|
"loss": 0.2812, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.9790419161676647, |
|
"grad_norm": 0.8660770888742138, |
|
"learning_rate": 2.709216333952602e-09, |
|
"loss": 0.2395, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.9820359281437125, |
|
"grad_norm": 0.8200642556953853, |
|
"learning_rate": 1.9904923483171632e-09, |
|
"loss": 0.1981, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.9850299401197606, |
|
"grad_norm": 0.9519722214351811, |
|
"learning_rate": 1.3823143792851545e-09, |
|
"loss": 0.2791, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.9880239520958085, |
|
"grad_norm": 1.0220351222216668, |
|
"learning_rate": 8.846958785418969e-10, |
|
"loss": 0.2629, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.9910179640718564, |
|
"grad_norm": 0.896429038531988, |
|
"learning_rate": 4.97647852417682e-10, |
|
"loss": 0.2034, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.9940119760479043, |
|
"grad_norm": 0.8392074572982221, |
|
"learning_rate": 2.2117886164407797e-10, |
|
"loss": 0.2096, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.9970059880239521, |
|
"grad_norm": 0.7640885896999279, |
|
"learning_rate": 5.529502116519148e-11, |
|
"loss": 0.1963, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.8428548981366851, |
|
"learning_rate": 0.0, |
|
"loss": 0.1478, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"step": 668, |
|
"total_flos": 35357291446272.0, |
|
"train_loss": 0.2912723551877958, |
|
"train_runtime": 1590.1983, |
|
"train_samples_per_second": 6.702, |
|
"train_steps_per_second": 0.42 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 668, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 5000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 35357291446272.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|