|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.9996362019483405, |
|
"eval_steps": 500, |
|
"global_step": 23190, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.002587008367355188, |
|
"grad_norm": 3.117795467376709, |
|
"learning_rate": 8.624407072013799e-08, |
|
"loss": 3.7293, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.005174016734710376, |
|
"grad_norm": 3.1848440170288086, |
|
"learning_rate": 1.7248814144027598e-07, |
|
"loss": 3.7345, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.007761025102065565, |
|
"grad_norm": 3.073965072631836, |
|
"learning_rate": 2.5873221216041403e-07, |
|
"loss": 3.7232, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.010348033469420752, |
|
"grad_norm": 2.366281032562256, |
|
"learning_rate": 3.4497628288055197e-07, |
|
"loss": 3.6733, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.012935041836775941, |
|
"grad_norm": 1.8071706295013428, |
|
"learning_rate": 4.3122035360069e-07, |
|
"loss": 3.5801, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.01552205020413113, |
|
"grad_norm": 1.3455595970153809, |
|
"learning_rate": 5.174644243208281e-07, |
|
"loss": 3.4895, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.018109058571486317, |
|
"grad_norm": 0.9497399926185608, |
|
"learning_rate": 6.03708495040966e-07, |
|
"loss": 3.3785, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.020696066938841504, |
|
"grad_norm": 0.9015834331512451, |
|
"learning_rate": 6.899525657611039e-07, |
|
"loss": 3.3095, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.023283075306196693, |
|
"grad_norm": 0.8537250757217407, |
|
"learning_rate": 7.761966364812419e-07, |
|
"loss": 3.1901, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.025870083673551883, |
|
"grad_norm": 0.8126162886619568, |
|
"learning_rate": 8.6244070720138e-07, |
|
"loss": 3.1323, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.02845709204090707, |
|
"grad_norm": 0.8042150139808655, |
|
"learning_rate": 9.48684777921518e-07, |
|
"loss": 3.0969, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.03104410040826226, |
|
"grad_norm": 0.7180504202842712, |
|
"learning_rate": 1.0349288486416561e-06, |
|
"loss": 3.0284, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.033631108775617445, |
|
"grad_norm": 0.7540543675422668, |
|
"learning_rate": 1.1211729193617941e-06, |
|
"loss": 3.043, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.036218117142972635, |
|
"grad_norm": 0.720626175403595, |
|
"learning_rate": 1.207416990081932e-06, |
|
"loss": 3.002, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.038805125510327824, |
|
"grad_norm": 0.761584997177124, |
|
"learning_rate": 1.29366106080207e-06, |
|
"loss": 3.0019, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.04139213387768301, |
|
"grad_norm": 0.7120524644851685, |
|
"learning_rate": 1.3799051315222079e-06, |
|
"loss": 2.9614, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.0439791422450382, |
|
"grad_norm": 0.6938987374305725, |
|
"learning_rate": 1.4661492022423459e-06, |
|
"loss": 2.9435, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.04656615061239339, |
|
"grad_norm": 0.7532190680503845, |
|
"learning_rate": 1.5523932729624839e-06, |
|
"loss": 2.9194, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.049153158979748576, |
|
"grad_norm": 0.7144018411636353, |
|
"learning_rate": 1.6386373436826219e-06, |
|
"loss": 2.909, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.051740167347103766, |
|
"grad_norm": 0.7019342184066772, |
|
"learning_rate": 1.72488141440276e-06, |
|
"loss": 2.8771, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.05432717571445895, |
|
"grad_norm": 0.7003944516181946, |
|
"learning_rate": 1.8111254851228978e-06, |
|
"loss": 2.892, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.05691418408181414, |
|
"grad_norm": 0.7309603095054626, |
|
"learning_rate": 1.897369555843036e-06, |
|
"loss": 2.8603, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.05950119244916933, |
|
"grad_norm": 0.6825790405273438, |
|
"learning_rate": 1.983613626563174e-06, |
|
"loss": 2.8596, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.06208820081652452, |
|
"grad_norm": 0.6874191164970398, |
|
"learning_rate": 2.0698576972833122e-06, |
|
"loss": 2.8482, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.0646752091838797, |
|
"grad_norm": 0.7430766224861145, |
|
"learning_rate": 2.15610176800345e-06, |
|
"loss": 2.8446, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.06726221755123489, |
|
"grad_norm": 0.7108047008514404, |
|
"learning_rate": 2.2423458387235882e-06, |
|
"loss": 2.828, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.06984922591859008, |
|
"grad_norm": 0.7121191620826721, |
|
"learning_rate": 2.328589909443726e-06, |
|
"loss": 2.8002, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.07243623428594527, |
|
"grad_norm": 0.7068054676055908, |
|
"learning_rate": 2.414833980163864e-06, |
|
"loss": 2.8119, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.07502324265330046, |
|
"grad_norm": 0.6924973726272583, |
|
"learning_rate": 2.501078050884002e-06, |
|
"loss": 2.7846, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.07761025102065565, |
|
"grad_norm": 0.7206343412399292, |
|
"learning_rate": 2.58732212160414e-06, |
|
"loss": 2.7896, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.08019725938801084, |
|
"grad_norm": 0.7203959822654724, |
|
"learning_rate": 2.673566192324278e-06, |
|
"loss": 2.7793, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.08278426775536601, |
|
"grad_norm": 0.7108203172683716, |
|
"learning_rate": 2.7598102630444157e-06, |
|
"loss": 2.7642, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.0853712761227212, |
|
"grad_norm": 0.7053371667861938, |
|
"learning_rate": 2.846054333764554e-06, |
|
"loss": 2.7588, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.0879582844900764, |
|
"grad_norm": 0.6986986398696899, |
|
"learning_rate": 2.9322984044846917e-06, |
|
"loss": 2.7453, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.09054529285743158, |
|
"grad_norm": 0.7130378484725952, |
|
"learning_rate": 3.01854247520483e-06, |
|
"loss": 2.7249, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.09313230122478677, |
|
"grad_norm": 0.7068546414375305, |
|
"learning_rate": 3.1047865459249677e-06, |
|
"loss": 2.7134, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.09571930959214196, |
|
"grad_norm": 0.7224396467208862, |
|
"learning_rate": 3.191030616645106e-06, |
|
"loss": 2.718, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.09830631795949715, |
|
"grad_norm": 0.7299512624740601, |
|
"learning_rate": 3.2772746873652437e-06, |
|
"loss": 2.7049, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.10089332632685234, |
|
"grad_norm": 0.7126713991165161, |
|
"learning_rate": 3.363518758085382e-06, |
|
"loss": 2.7199, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.10348033469420753, |
|
"grad_norm": 0.6644526720046997, |
|
"learning_rate": 3.44976282880552e-06, |
|
"loss": 2.7046, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.10606734306156272, |
|
"grad_norm": 0.7410451769828796, |
|
"learning_rate": 3.536006899525658e-06, |
|
"loss": 2.7012, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.1086543514289179, |
|
"grad_norm": 0.7218043208122253, |
|
"learning_rate": 3.6222509702457957e-06, |
|
"loss": 2.71, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.11124135979627309, |
|
"grad_norm": 0.6941894292831421, |
|
"learning_rate": 3.708495040965934e-06, |
|
"loss": 2.7035, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.11382836816362828, |
|
"grad_norm": 0.6824482679367065, |
|
"learning_rate": 3.794739111686072e-06, |
|
"loss": 2.6741, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.11641537653098347, |
|
"grad_norm": 0.7253878712654114, |
|
"learning_rate": 3.88098318240621e-06, |
|
"loss": 2.6831, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.11900238489833866, |
|
"grad_norm": 0.6949134469032288, |
|
"learning_rate": 3.967227253126348e-06, |
|
"loss": 2.6674, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.12158939326569385, |
|
"grad_norm": 0.7096832394599915, |
|
"learning_rate": 4.053471323846485e-06, |
|
"loss": 2.674, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.12417640163304904, |
|
"grad_norm": 0.6938119530677795, |
|
"learning_rate": 4.1397153945666245e-06, |
|
"loss": 2.6533, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.12676341000040423, |
|
"grad_norm": 0.6781166195869446, |
|
"learning_rate": 4.225959465286762e-06, |
|
"loss": 2.6426, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.1293504183677594, |
|
"grad_norm": 0.7600920796394348, |
|
"learning_rate": 4.3122035360069e-06, |
|
"loss": 2.6454, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.1319374267351146, |
|
"grad_norm": 0.6884315609931946, |
|
"learning_rate": 4.398447606727037e-06, |
|
"loss": 2.6324, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.13452443510246978, |
|
"grad_norm": 0.7092069387435913, |
|
"learning_rate": 4.4846916774471764e-06, |
|
"loss": 2.6181, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.13711144346982498, |
|
"grad_norm": 0.729020357131958, |
|
"learning_rate": 4.570935748167314e-06, |
|
"loss": 2.6217, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.13969845183718016, |
|
"grad_norm": 0.7104108333587646, |
|
"learning_rate": 4.657179818887452e-06, |
|
"loss": 2.6152, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.14228546020453534, |
|
"grad_norm": 0.8092931509017944, |
|
"learning_rate": 4.743423889607589e-06, |
|
"loss": 2.621, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.14487246857189054, |
|
"grad_norm": 0.6620950102806091, |
|
"learning_rate": 4.829667960327728e-06, |
|
"loss": 2.6166, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.14745947693924571, |
|
"grad_norm": 0.6813467741012573, |
|
"learning_rate": 4.915912031047866e-06, |
|
"loss": 2.6171, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.15004648530660092, |
|
"grad_norm": 0.7140293121337891, |
|
"learning_rate": 5.002156101768004e-06, |
|
"loss": 2.5911, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.1526334936739561, |
|
"grad_norm": 0.7278040051460266, |
|
"learning_rate": 5.088400172488141e-06, |
|
"loss": 2.6055, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.1552205020413113, |
|
"grad_norm": 0.7283148169517517, |
|
"learning_rate": 5.17464424320828e-06, |
|
"loss": 2.6054, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.15780751040866647, |
|
"grad_norm": 0.7109535932540894, |
|
"learning_rate": 5.260888313928419e-06, |
|
"loss": 2.56, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.16039451877602168, |
|
"grad_norm": 0.7203260064125061, |
|
"learning_rate": 5.347132384648556e-06, |
|
"loss": 2.578, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.16298152714337685, |
|
"grad_norm": 0.7385180592536926, |
|
"learning_rate": 5.433376455368694e-06, |
|
"loss": 2.5829, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.16556853551073203, |
|
"grad_norm": 0.7511777281761169, |
|
"learning_rate": 5.5196205260888315e-06, |
|
"loss": 2.5703, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.16815554387808723, |
|
"grad_norm": 0.7461130619049072, |
|
"learning_rate": 5.60586459680897e-06, |
|
"loss": 2.5891, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.1707425522454424, |
|
"grad_norm": 0.7192751169204712, |
|
"learning_rate": 5.692108667529108e-06, |
|
"loss": 2.552, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.1733295606127976, |
|
"grad_norm": 0.7672246694564819, |
|
"learning_rate": 5.778352738249245e-06, |
|
"loss": 2.5451, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.1759165689801528, |
|
"grad_norm": 0.8286859393119812, |
|
"learning_rate": 5.8645968089693835e-06, |
|
"loss": 2.5691, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.178503577347508, |
|
"grad_norm": 0.8903458714485168, |
|
"learning_rate": 5.9508408796895225e-06, |
|
"loss": 2.5717, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.18109058571486317, |
|
"grad_norm": 0.7192072868347168, |
|
"learning_rate": 6.03708495040966e-06, |
|
"loss": 2.547, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.18367759408221837, |
|
"grad_norm": 0.7470182776451111, |
|
"learning_rate": 6.123329021129798e-06, |
|
"loss": 2.5309, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.18626460244957355, |
|
"grad_norm": 0.7186440229415894, |
|
"learning_rate": 6.2095730918499354e-06, |
|
"loss": 2.5433, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.18885161081692872, |
|
"grad_norm": 0.7359221577644348, |
|
"learning_rate": 6.295817162570074e-06, |
|
"loss": 2.5482, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.19143861918428393, |
|
"grad_norm": 0.7859694957733154, |
|
"learning_rate": 6.382061233290212e-06, |
|
"loss": 2.5371, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.1940256275516391, |
|
"grad_norm": 0.7339861392974854, |
|
"learning_rate": 6.468305304010349e-06, |
|
"loss": 2.5217, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.1966126359189943, |
|
"grad_norm": 0.7527260780334473, |
|
"learning_rate": 6.554549374730487e-06, |
|
"loss": 2.5183, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.19919964428634948, |
|
"grad_norm": 0.7345518469810486, |
|
"learning_rate": 6.6407934454506265e-06, |
|
"loss": 2.5249, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.20178665265370468, |
|
"grad_norm": 0.736298680305481, |
|
"learning_rate": 6.727037516170764e-06, |
|
"loss": 2.5184, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.20437366102105986, |
|
"grad_norm": 0.7677698135375977, |
|
"learning_rate": 6.813281586890902e-06, |
|
"loss": 2.5134, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.20696066938841506, |
|
"grad_norm": 0.7627900838851929, |
|
"learning_rate": 6.89952565761104e-06, |
|
"loss": 2.5064, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.20954767775577024, |
|
"grad_norm": 0.7290985584259033, |
|
"learning_rate": 6.985769728331178e-06, |
|
"loss": 2.5124, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.21213468612312544, |
|
"grad_norm": 0.7347148060798645, |
|
"learning_rate": 7.072013799051316e-06, |
|
"loss": 2.4901, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.21472169449048062, |
|
"grad_norm": 0.7257357239723206, |
|
"learning_rate": 7.158257869771453e-06, |
|
"loss": 2.4884, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.2173087028578358, |
|
"grad_norm": 0.7803710699081421, |
|
"learning_rate": 7.244501940491591e-06, |
|
"loss": 2.4899, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.219895711225191, |
|
"grad_norm": 0.6987377405166626, |
|
"learning_rate": 7.33074601121173e-06, |
|
"loss": 2.5001, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.22248271959254617, |
|
"grad_norm": 0.6989637017250061, |
|
"learning_rate": 7.416990081931868e-06, |
|
"loss": 2.4941, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.22506972795990138, |
|
"grad_norm": 0.7391577363014221, |
|
"learning_rate": 7.503234152652006e-06, |
|
"loss": 2.4811, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.22765673632725655, |
|
"grad_norm": 0.7664337754249573, |
|
"learning_rate": 7.589478223372144e-06, |
|
"loss": 2.4806, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.23024374469461176, |
|
"grad_norm": 0.7150381207466125, |
|
"learning_rate": 7.675722294092282e-06, |
|
"loss": 2.4674, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.23283075306196693, |
|
"grad_norm": 0.7978541254997253, |
|
"learning_rate": 7.76196636481242e-06, |
|
"loss": 2.4633, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.23541776142932214, |
|
"grad_norm": 0.7218653559684753, |
|
"learning_rate": 7.848210435532557e-06, |
|
"loss": 2.4589, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.2380047697966773, |
|
"grad_norm": 0.723008930683136, |
|
"learning_rate": 7.934454506252696e-06, |
|
"loss": 2.467, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.2405917781640325, |
|
"grad_norm": 0.7375757098197937, |
|
"learning_rate": 8.020698576972833e-06, |
|
"loss": 2.4812, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.2431787865313877, |
|
"grad_norm": 0.7501986026763916, |
|
"learning_rate": 8.10694264769297e-06, |
|
"loss": 2.4587, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.24576579489874287, |
|
"grad_norm": 0.7394606471061707, |
|
"learning_rate": 8.19318671841311e-06, |
|
"loss": 2.4514, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.24835280326609807, |
|
"grad_norm": 0.7856109738349915, |
|
"learning_rate": 8.279430789133249e-06, |
|
"loss": 2.45, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.25093981163345325, |
|
"grad_norm": 0.7603466510772705, |
|
"learning_rate": 8.365674859853386e-06, |
|
"loss": 2.4341, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.25352682000080845, |
|
"grad_norm": 0.7223484516143799, |
|
"learning_rate": 8.451918930573524e-06, |
|
"loss": 2.4349, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.25611382836816365, |
|
"grad_norm": 0.7488518357276917, |
|
"learning_rate": 8.538163001293663e-06, |
|
"loss": 2.4417, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.2587008367355188, |
|
"grad_norm": 0.83389812707901, |
|
"learning_rate": 8.6244070720138e-06, |
|
"loss": 2.4266, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.261287845102874, |
|
"grad_norm": 0.7574110627174377, |
|
"learning_rate": 8.710651142733937e-06, |
|
"loss": 2.4471, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.2638748534702292, |
|
"grad_norm": 0.7482550740242004, |
|
"learning_rate": 8.796895213454075e-06, |
|
"loss": 2.4368, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.26646186183758436, |
|
"grad_norm": 0.7341257333755493, |
|
"learning_rate": 8.883139284174214e-06, |
|
"loss": 2.4303, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.26904887020493956, |
|
"grad_norm": 0.764855682849884, |
|
"learning_rate": 8.969383354894353e-06, |
|
"loss": 2.4176, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.27163587857229476, |
|
"grad_norm": 0.7559799551963806, |
|
"learning_rate": 9.055627425614489e-06, |
|
"loss": 2.4088, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.27422288693964997, |
|
"grad_norm": 0.7435436844825745, |
|
"learning_rate": 9.141871496334628e-06, |
|
"loss": 2.4241, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.2768098953070051, |
|
"grad_norm": 0.7100109457969666, |
|
"learning_rate": 9.228115567054767e-06, |
|
"loss": 2.4122, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.2793969036743603, |
|
"grad_norm": 0.7357873320579529, |
|
"learning_rate": 9.314359637774904e-06, |
|
"loss": 2.425, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.2819839120417155, |
|
"grad_norm": 0.7281599640846252, |
|
"learning_rate": 9.400603708495041e-06, |
|
"loss": 2.4242, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.28457092040907067, |
|
"grad_norm": 0.7816882729530334, |
|
"learning_rate": 9.486847779215179e-06, |
|
"loss": 2.4115, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.2871579287764259, |
|
"grad_norm": 0.7402174472808838, |
|
"learning_rate": 9.573091849935318e-06, |
|
"loss": 2.4253, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.2897449371437811, |
|
"grad_norm": 0.7378434538841248, |
|
"learning_rate": 9.659335920655457e-06, |
|
"loss": 2.4015, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.2923319455111363, |
|
"grad_norm": 0.7792133688926697, |
|
"learning_rate": 9.745579991375592e-06, |
|
"loss": 2.3947, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.29491895387849143, |
|
"grad_norm": 0.7180085778236389, |
|
"learning_rate": 9.831824062095732e-06, |
|
"loss": 2.3975, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.29750596224584663, |
|
"grad_norm": 0.7536414861679077, |
|
"learning_rate": 9.91806813281587e-06, |
|
"loss": 2.4052, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.30009297061320184, |
|
"grad_norm": 0.7593511939048767, |
|
"learning_rate": 9.999999943356089e-06, |
|
"loss": 2.3856, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.30267997898055704, |
|
"grad_norm": 0.6879323720932007, |
|
"learning_rate": 9.999975020055695e-06, |
|
"loss": 2.4107, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.3052669873479122, |
|
"grad_norm": 0.7193155884742737, |
|
"learning_rate": 9.999904781886476e-06, |
|
"loss": 2.3872, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.3078539957152674, |
|
"grad_norm": 0.7621170282363892, |
|
"learning_rate": 9.999789229485002e-06, |
|
"loss": 2.3844, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.3104410040826226, |
|
"grad_norm": 0.7357332110404968, |
|
"learning_rate": 9.999628363898525e-06, |
|
"loss": 2.3863, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.31302801244997774, |
|
"grad_norm": 0.753011167049408, |
|
"learning_rate": 9.999422186584978e-06, |
|
"loss": 2.3807, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.31561502081733295, |
|
"grad_norm": 0.7547248005867004, |
|
"learning_rate": 9.999170699412942e-06, |
|
"loss": 2.3691, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.31820202918468815, |
|
"grad_norm": 0.7450150847434998, |
|
"learning_rate": 9.998873904661655e-06, |
|
"loss": 2.3495, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.32078903755204335, |
|
"grad_norm": 0.7460102438926697, |
|
"learning_rate": 9.998531805020974e-06, |
|
"loss": 2.3582, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.3233760459193985, |
|
"grad_norm": 0.7281093597412109, |
|
"learning_rate": 9.998144403591352e-06, |
|
"loss": 2.3559, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.3259630542867537, |
|
"grad_norm": 0.7635049223899841, |
|
"learning_rate": 9.99771170388382e-06, |
|
"loss": 2.3668, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.3285500626541089, |
|
"grad_norm": 0.7504300475120544, |
|
"learning_rate": 9.997233709819935e-06, |
|
"loss": 2.3642, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.33113707102146406, |
|
"grad_norm": 0.73244309425354, |
|
"learning_rate": 9.996710425731776e-06, |
|
"loss": 2.3634, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.33372407938881926, |
|
"grad_norm": 0.816100537776947, |
|
"learning_rate": 9.996141856361871e-06, |
|
"loss": 2.3405, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.33631108775617446, |
|
"grad_norm": 0.7125614285469055, |
|
"learning_rate": 9.995528006863175e-06, |
|
"loss": 2.3594, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.33889809612352967, |
|
"grad_norm": 0.7901700735092163, |
|
"learning_rate": 9.994868882799022e-06, |
|
"loss": 2.3636, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.3414851044908848, |
|
"grad_norm": 0.7499126195907593, |
|
"learning_rate": 9.994164490143062e-06, |
|
"loss": 2.36, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.34407211285824, |
|
"grad_norm": 0.78279048204422, |
|
"learning_rate": 9.99341483527922e-06, |
|
"loss": 2.3685, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.3466591212255952, |
|
"grad_norm": 0.7683995366096497, |
|
"learning_rate": 9.992619925001632e-06, |
|
"loss": 2.3532, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.3492461295929504, |
|
"grad_norm": 0.7231781482696533, |
|
"learning_rate": 9.991779766514586e-06, |
|
"loss": 2.377, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.3518331379603056, |
|
"grad_norm": 0.7408224940299988, |
|
"learning_rate": 9.990894367432453e-06, |
|
"loss": 2.3406, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.3544201463276608, |
|
"grad_norm": 0.7654145956039429, |
|
"learning_rate": 9.989963735779623e-06, |
|
"loss": 2.3627, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.357007154695016, |
|
"grad_norm": 0.7166858315467834, |
|
"learning_rate": 9.988987879990428e-06, |
|
"loss": 2.3343, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.35959416306237113, |
|
"grad_norm": 0.79310142993927, |
|
"learning_rate": 9.987966808909069e-06, |
|
"loss": 2.3521, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.36218117142972633, |
|
"grad_norm": 0.7726064324378967, |
|
"learning_rate": 9.98690053178953e-06, |
|
"loss": 2.3408, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.36476817979708154, |
|
"grad_norm": 0.7428059577941895, |
|
"learning_rate": 9.985789058295501e-06, |
|
"loss": 2.3238, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.36735518816443674, |
|
"grad_norm": 0.7808260321617126, |
|
"learning_rate": 9.984632398500289e-06, |
|
"loss": 2.3164, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.3699421965317919, |
|
"grad_norm": 0.7835705280303955, |
|
"learning_rate": 9.983430562886723e-06, |
|
"loss": 2.343, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.3725292048991471, |
|
"grad_norm": 0.7703275680541992, |
|
"learning_rate": 9.982183562347063e-06, |
|
"loss": 2.3354, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.3751162132665023, |
|
"grad_norm": 0.7307262420654297, |
|
"learning_rate": 9.980891408182897e-06, |
|
"loss": 2.3445, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.37770322163385744, |
|
"grad_norm": 0.6830443143844604, |
|
"learning_rate": 9.979554112105045e-06, |
|
"loss": 2.3437, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.38029023000121265, |
|
"grad_norm": 0.7646154761314392, |
|
"learning_rate": 9.978171686233445e-06, |
|
"loss": 2.3316, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.38287723836856785, |
|
"grad_norm": 0.7598294615745544, |
|
"learning_rate": 9.97674414309705e-06, |
|
"loss": 2.3135, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.38546424673592306, |
|
"grad_norm": 0.7368634343147278, |
|
"learning_rate": 9.975271495633709e-06, |
|
"loss": 2.3189, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.3880512551032782, |
|
"grad_norm": 0.7349269390106201, |
|
"learning_rate": 9.973753757190057e-06, |
|
"loss": 2.307, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.3906382634706334, |
|
"grad_norm": 0.7512723803520203, |
|
"learning_rate": 9.972190941521382e-06, |
|
"loss": 2.3195, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.3932252718379886, |
|
"grad_norm": 0.7430227398872375, |
|
"learning_rate": 9.970583062791517e-06, |
|
"loss": 2.3087, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.3958122802053438, |
|
"grad_norm": 0.7576317191123962, |
|
"learning_rate": 9.968930135572694e-06, |
|
"loss": 2.3, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.39839928857269896, |
|
"grad_norm": 0.7394402623176575, |
|
"learning_rate": 9.967232174845426e-06, |
|
"loss": 2.3164, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.40098629694005417, |
|
"grad_norm": 0.7061188220977783, |
|
"learning_rate": 9.965489195998363e-06, |
|
"loss": 2.3187, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.40357330530740937, |
|
"grad_norm": 0.7332024574279785, |
|
"learning_rate": 9.963701214828154e-06, |
|
"loss": 2.3085, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.4061603136747645, |
|
"grad_norm": 0.663925051689148, |
|
"learning_rate": 9.961868247539308e-06, |
|
"loss": 2.2978, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.4087473220421197, |
|
"grad_norm": 0.7256314158439636, |
|
"learning_rate": 9.959990310744042e-06, |
|
"loss": 2.3209, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.4113343304094749, |
|
"grad_norm": 0.711919903755188, |
|
"learning_rate": 9.958067421462133e-06, |
|
"loss": 2.3041, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.41392133877683013, |
|
"grad_norm": 0.714450478553772, |
|
"learning_rate": 9.956099597120762e-06, |
|
"loss": 2.2789, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.4165083471441853, |
|
"grad_norm": 0.7216628193855286, |
|
"learning_rate": 9.95408685555436e-06, |
|
"loss": 2.3056, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.4190953555115405, |
|
"grad_norm": 0.6629658341407776, |
|
"learning_rate": 9.952029215004441e-06, |
|
"loss": 2.3001, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.4216823638788957, |
|
"grad_norm": 0.8131959438323975, |
|
"learning_rate": 9.949926694119443e-06, |
|
"loss": 2.2881, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.4242693722462509, |
|
"grad_norm": 0.7133468389511108, |
|
"learning_rate": 9.94777931195455e-06, |
|
"loss": 2.2902, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.42685638061360603, |
|
"grad_norm": 0.7825130224227905, |
|
"learning_rate": 9.945587087971529e-06, |
|
"loss": 2.2829, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.42944338898096124, |
|
"grad_norm": 0.6878073811531067, |
|
"learning_rate": 9.943350042038545e-06, |
|
"loss": 2.2844, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.43203039734831644, |
|
"grad_norm": 0.7324455976486206, |
|
"learning_rate": 9.941068194429992e-06, |
|
"loss": 2.2932, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.4346174057156716, |
|
"grad_norm": 0.7330353856086731, |
|
"learning_rate": 9.938741565826295e-06, |
|
"loss": 2.2611, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.4372044140830268, |
|
"grad_norm": 0.8474377393722534, |
|
"learning_rate": 9.936370177313737e-06, |
|
"loss": 2.2938, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.439791422450382, |
|
"grad_norm": 0.776228666305542, |
|
"learning_rate": 9.933954050384253e-06, |
|
"loss": 2.292, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.4423784308177372, |
|
"grad_norm": 0.7257934212684631, |
|
"learning_rate": 9.931493206935249e-06, |
|
"loss": 2.2964, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.44496543918509235, |
|
"grad_norm": 0.7738561034202576, |
|
"learning_rate": 9.928987669269397e-06, |
|
"loss": 2.2585, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.44755244755244755, |
|
"grad_norm": 0.6888708472251892, |
|
"learning_rate": 9.926437460094431e-06, |
|
"loss": 2.281, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.45013945591980276, |
|
"grad_norm": 0.7120715975761414, |
|
"learning_rate": 9.923842602522948e-06, |
|
"loss": 2.2861, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.4527264642871579, |
|
"grad_norm": 0.770353376865387, |
|
"learning_rate": 9.92120312007219e-06, |
|
"loss": 2.271, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.4553134726545131, |
|
"grad_norm": 0.753852367401123, |
|
"learning_rate": 9.918519036663835e-06, |
|
"loss": 2.2592, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.4579004810218683, |
|
"grad_norm": 0.7479904294013977, |
|
"learning_rate": 9.915790376623785e-06, |
|
"loss": 2.2691, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.4604874893892235, |
|
"grad_norm": 0.7162041068077087, |
|
"learning_rate": 9.913017164681936e-06, |
|
"loss": 2.2637, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.46307449775657866, |
|
"grad_norm": 0.6859399080276489, |
|
"learning_rate": 9.91019942597196e-06, |
|
"loss": 2.2424, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.46566150612393387, |
|
"grad_norm": 0.7681860327720642, |
|
"learning_rate": 9.907337186031078e-06, |
|
"loss": 2.2758, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.46824851449128907, |
|
"grad_norm": 0.7416828274726868, |
|
"learning_rate": 9.904430470799826e-06, |
|
"loss": 2.2715, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.4708355228586443, |
|
"grad_norm": 0.7957201600074768, |
|
"learning_rate": 9.901479306621818e-06, |
|
"loss": 2.277, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.4734225312259994, |
|
"grad_norm": 0.71152663230896, |
|
"learning_rate": 9.89848372024351e-06, |
|
"loss": 2.2831, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.4760095395933546, |
|
"grad_norm": 0.6950436234474182, |
|
"learning_rate": 9.89544373881396e-06, |
|
"loss": 2.2678, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.47859654796070983, |
|
"grad_norm": 0.7226356267929077, |
|
"learning_rate": 9.89235938988458e-06, |
|
"loss": 2.2549, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.481183556328065, |
|
"grad_norm": 0.7032837867736816, |
|
"learning_rate": 9.88923070140888e-06, |
|
"loss": 2.2669, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.4837705646954202, |
|
"grad_norm": 0.749229371547699, |
|
"learning_rate": 9.886057701742222e-06, |
|
"loss": 2.2421, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.4863575730627754, |
|
"grad_norm": 0.7166919112205505, |
|
"learning_rate": 9.882840419641566e-06, |
|
"loss": 2.245, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.4889445814301306, |
|
"grad_norm": 0.7276502847671509, |
|
"learning_rate": 9.879578884265198e-06, |
|
"loss": 2.2542, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.49153158979748574, |
|
"grad_norm": 0.7321860194206238, |
|
"learning_rate": 9.876273125172476e-06, |
|
"loss": 2.2395, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.49411859816484094, |
|
"grad_norm": 0.6959764361381531, |
|
"learning_rate": 9.872923172323559e-06, |
|
"loss": 2.2588, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.49670560653219614, |
|
"grad_norm": 0.7237563729286194, |
|
"learning_rate": 9.869529056079133e-06, |
|
"loss": 2.2463, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.4992926148995513, |
|
"grad_norm": 0.7536144256591797, |
|
"learning_rate": 9.866090807200135e-06, |
|
"loss": 2.2394, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.5018796232669065, |
|
"grad_norm": 0.6835715770721436, |
|
"learning_rate": 9.862608456847484e-06, |
|
"loss": 2.2447, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.5044666316342616, |
|
"grad_norm": 0.7627236247062683, |
|
"learning_rate": 9.859082036581787e-06, |
|
"loss": 2.2727, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.5070536400016169, |
|
"grad_norm": 0.7260853052139282, |
|
"learning_rate": 9.855511578363057e-06, |
|
"loss": 2.2373, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.509640648368972, |
|
"grad_norm": 0.7366577386856079, |
|
"learning_rate": 9.851897114550423e-06, |
|
"loss": 2.2583, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.5122276567363273, |
|
"grad_norm": 0.718189001083374, |
|
"learning_rate": 9.848238677901844e-06, |
|
"loss": 2.2376, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.5148146651036825, |
|
"grad_norm": 0.6988586783409119, |
|
"learning_rate": 9.844536301573798e-06, |
|
"loss": 2.2413, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 0.5174016734710376, |
|
"grad_norm": 0.752648115158081, |
|
"learning_rate": 9.840790019120993e-06, |
|
"loss": 2.2346, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.5199886818383929, |
|
"grad_norm": 0.8000548481941223, |
|
"learning_rate": 9.836999864496058e-06, |
|
"loss": 2.2365, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 0.522575690205748, |
|
"grad_norm": 0.7324941158294678, |
|
"learning_rate": 9.833165872049235e-06, |
|
"loss": 2.2294, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 0.5251626985731032, |
|
"grad_norm": 0.765869140625, |
|
"learning_rate": 9.829288076528071e-06, |
|
"loss": 2.2418, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 0.5277497069404584, |
|
"grad_norm": 0.7222108244895935, |
|
"learning_rate": 9.825366513077104e-06, |
|
"loss": 2.2292, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 0.5303367153078136, |
|
"grad_norm": 0.7359380722045898, |
|
"learning_rate": 9.821401217237535e-06, |
|
"loss": 2.2491, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.5329237236751687, |
|
"grad_norm": 0.7288945317268372, |
|
"learning_rate": 9.817392224946916e-06, |
|
"loss": 2.2404, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 0.535510732042524, |
|
"grad_norm": 0.7518214583396912, |
|
"learning_rate": 9.813339572538822e-06, |
|
"loss": 2.2381, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 0.5380977404098791, |
|
"grad_norm": 0.753818154335022, |
|
"learning_rate": 9.809243296742516e-06, |
|
"loss": 2.2236, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 0.5406847487772344, |
|
"grad_norm": 0.7281918525695801, |
|
"learning_rate": 9.805103434682628e-06, |
|
"loss": 2.2142, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 0.5432717571445895, |
|
"grad_norm": 0.7605194449424744, |
|
"learning_rate": 9.800920023878803e-06, |
|
"loss": 2.2326, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.5458587655119447, |
|
"grad_norm": 0.7266237139701843, |
|
"learning_rate": 9.796693102245376e-06, |
|
"loss": 2.2144, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 0.5484457738792999, |
|
"grad_norm": 0.7575150728225708, |
|
"learning_rate": 9.792422708091014e-06, |
|
"loss": 2.2282, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 0.5510327822466551, |
|
"grad_norm": 0.7097237706184387, |
|
"learning_rate": 9.788108880118383e-06, |
|
"loss": 2.2139, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 0.5536197906140102, |
|
"grad_norm": 0.7074447870254517, |
|
"learning_rate": 9.783751657423787e-06, |
|
"loss": 2.2169, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 0.5562067989813655, |
|
"grad_norm": 0.7193735241889954, |
|
"learning_rate": 9.779351079496821e-06, |
|
"loss": 2.2435, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.5587938073487206, |
|
"grad_norm": 0.7104501128196716, |
|
"learning_rate": 9.774907186220005e-06, |
|
"loss": 2.198, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 0.5613808157160758, |
|
"grad_norm": 0.6711876392364502, |
|
"learning_rate": 9.770420017868426e-06, |
|
"loss": 2.1927, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 0.563967824083431, |
|
"grad_norm": 0.7272083163261414, |
|
"learning_rate": 9.765889615109379e-06, |
|
"loss": 2.2437, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 0.5665548324507862, |
|
"grad_norm": 0.7311263680458069, |
|
"learning_rate": 9.761316019001991e-06, |
|
"loss": 2.2106, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 0.5691418408181413, |
|
"grad_norm": 0.7337877154350281, |
|
"learning_rate": 9.756699270996848e-06, |
|
"loss": 2.2257, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.5717288491854966, |
|
"grad_norm": 0.7337206602096558, |
|
"learning_rate": 9.752039412935627e-06, |
|
"loss": 2.2066, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 0.5743158575528517, |
|
"grad_norm": 0.8056983947753906, |
|
"learning_rate": 9.74733648705071e-06, |
|
"loss": 2.2049, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 0.576902865920207, |
|
"grad_norm": 0.7203987240791321, |
|
"learning_rate": 9.742590535964805e-06, |
|
"loss": 2.2279, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 0.5794898742875622, |
|
"grad_norm": 0.7495117783546448, |
|
"learning_rate": 9.737801602690554e-06, |
|
"loss": 2.1986, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 0.5820768826549173, |
|
"grad_norm": 0.7343236207962036, |
|
"learning_rate": 9.732969730630153e-06, |
|
"loss": 2.2233, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.5846638910222726, |
|
"grad_norm": 0.7727257609367371, |
|
"learning_rate": 9.728094963574948e-06, |
|
"loss": 2.2042, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 0.5872508993896277, |
|
"grad_norm": 0.7064406275749207, |
|
"learning_rate": 9.723177345705048e-06, |
|
"loss": 2.2295, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 0.5898379077569829, |
|
"grad_norm": 0.7306511998176575, |
|
"learning_rate": 9.71821692158892e-06, |
|
"loss": 2.1982, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 0.5924249161243381, |
|
"grad_norm": 0.7717193365097046, |
|
"learning_rate": 9.713213736182976e-06, |
|
"loss": 2.206, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 0.5950119244916933, |
|
"grad_norm": 0.7180883288383484, |
|
"learning_rate": 9.708167834831183e-06, |
|
"loss": 2.2044, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 0.5975989328590484, |
|
"grad_norm": 0.7017503380775452, |
|
"learning_rate": 9.703079263264643e-06, |
|
"loss": 2.192, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 0.6001859412264037, |
|
"grad_norm": 0.7421537637710571, |
|
"learning_rate": 9.697948067601176e-06, |
|
"loss": 2.199, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 0.6027729495937588, |
|
"grad_norm": 0.7729679942131042, |
|
"learning_rate": 9.692774294344905e-06, |
|
"loss": 2.2073, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 0.6053599579611141, |
|
"grad_norm": 0.7517857551574707, |
|
"learning_rate": 9.687557990385836e-06, |
|
"loss": 2.1942, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 0.6079469663284692, |
|
"grad_norm": 0.7258435487747192, |
|
"learning_rate": 9.682299202999433e-06, |
|
"loss": 2.1916, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 0.6105339746958244, |
|
"grad_norm": 0.7256997227668762, |
|
"learning_rate": 9.676997979846183e-06, |
|
"loss": 2.1986, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 0.6131209830631796, |
|
"grad_norm": 0.7594742774963379, |
|
"learning_rate": 9.671654368971176e-06, |
|
"loss": 2.1867, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 0.6157079914305348, |
|
"grad_norm": 0.7546527981758118, |
|
"learning_rate": 9.666268418803655e-06, |
|
"loss": 2.1999, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 0.6182949997978899, |
|
"grad_norm": 0.7679339051246643, |
|
"learning_rate": 9.660840178156592e-06, |
|
"loss": 2.1964, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 0.6208820081652452, |
|
"grad_norm": 0.7719926834106445, |
|
"learning_rate": 9.655369696226235e-06, |
|
"loss": 2.1829, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.6234690165326003, |
|
"grad_norm": 0.7456310987472534, |
|
"learning_rate": 9.649857022591664e-06, |
|
"loss": 2.1725, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 0.6260560248999555, |
|
"grad_norm": 0.720140814781189, |
|
"learning_rate": 9.644302207214346e-06, |
|
"loss": 2.1806, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 0.6286430332673107, |
|
"grad_norm": 0.7672199010848999, |
|
"learning_rate": 9.63870530043768e-06, |
|
"loss": 2.1921, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 0.6312300416346659, |
|
"grad_norm": 0.7888000011444092, |
|
"learning_rate": 9.633066352986538e-06, |
|
"loss": 2.198, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 0.6338170500020212, |
|
"grad_norm": 0.7024748921394348, |
|
"learning_rate": 9.627385415966807e-06, |
|
"loss": 2.1579, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 0.6364040583693763, |
|
"grad_norm": 0.7126362919807434, |
|
"learning_rate": 9.62166254086493e-06, |
|
"loss": 2.1882, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 0.6389910667367315, |
|
"grad_norm": 0.6748985052108765, |
|
"learning_rate": 9.61589777954743e-06, |
|
"loss": 2.1931, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 0.6415780751040867, |
|
"grad_norm": 0.7744324207305908, |
|
"learning_rate": 9.61009118426045e-06, |
|
"loss": 2.1823, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 0.6441650834714419, |
|
"grad_norm": 0.6999046802520752, |
|
"learning_rate": 9.604242807629275e-06, |
|
"loss": 2.1598, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 0.646752091838797, |
|
"grad_norm": 0.7126173973083496, |
|
"learning_rate": 9.59835270265785e-06, |
|
"loss": 2.1661, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.6493391002061523, |
|
"grad_norm": 0.7425235509872437, |
|
"learning_rate": 9.592420922728312e-06, |
|
"loss": 2.1729, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 0.6519261085735074, |
|
"grad_norm": 0.735068142414093, |
|
"learning_rate": 9.586447521600496e-06, |
|
"loss": 2.1734, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 0.6545131169408626, |
|
"grad_norm": 0.772686779499054, |
|
"learning_rate": 9.580432553411446e-06, |
|
"loss": 2.1943, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 0.6571001253082178, |
|
"grad_norm": 0.8012945652008057, |
|
"learning_rate": 9.574376072674936e-06, |
|
"loss": 2.1678, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 0.659687133675573, |
|
"grad_norm": 0.7072063684463501, |
|
"learning_rate": 9.568278134280966e-06, |
|
"loss": 2.1749, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 0.6622741420429281, |
|
"grad_norm": 0.687140941619873, |
|
"learning_rate": 9.562138793495268e-06, |
|
"loss": 2.1844, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 0.6648611504102834, |
|
"grad_norm": 0.7809275984764099, |
|
"learning_rate": 9.555958105958805e-06, |
|
"loss": 2.1719, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 0.6674481587776385, |
|
"grad_norm": 0.7361642122268677, |
|
"learning_rate": 9.549736127687265e-06, |
|
"loss": 2.1706, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 0.6700351671449938, |
|
"grad_norm": 0.7150685787200928, |
|
"learning_rate": 9.543472915070555e-06, |
|
"loss": 2.1648, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 0.6726221755123489, |
|
"grad_norm": 0.7491324543952942, |
|
"learning_rate": 9.537168524872292e-06, |
|
"loss": 2.156, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 0.6752091838797041, |
|
"grad_norm": 0.7028157711029053, |
|
"learning_rate": 9.530823014229283e-06, |
|
"loss": 2.1588, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 0.6777961922470593, |
|
"grad_norm": 0.7949670553207397, |
|
"learning_rate": 9.52443644065101e-06, |
|
"loss": 2.1788, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 0.6803832006144145, |
|
"grad_norm": 0.7003277540206909, |
|
"learning_rate": 9.518008862019116e-06, |
|
"loss": 2.1917, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 0.6829702089817696, |
|
"grad_norm": 0.7252909541130066, |
|
"learning_rate": 9.511540336586864e-06, |
|
"loss": 2.1709, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 0.6855572173491249, |
|
"grad_norm": 0.7193975448608398, |
|
"learning_rate": 9.505030922978626e-06, |
|
"loss": 2.185, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 0.68814422571648, |
|
"grad_norm": 0.7502670288085938, |
|
"learning_rate": 9.49848068018934e-06, |
|
"loss": 2.1787, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 0.6907312340838352, |
|
"grad_norm": 0.7460989952087402, |
|
"learning_rate": 9.49188966758398e-06, |
|
"loss": 2.1557, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 0.6933182424511904, |
|
"grad_norm": 0.710442066192627, |
|
"learning_rate": 9.485257944897021e-06, |
|
"loss": 2.1547, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 0.6959052508185456, |
|
"grad_norm": 0.7363094091415405, |
|
"learning_rate": 9.478585572231891e-06, |
|
"loss": 2.1473, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 0.6984922591859009, |
|
"grad_norm": 0.7203119397163391, |
|
"learning_rate": 9.47187261006043e-06, |
|
"loss": 2.1743, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 0.701079267553256, |
|
"grad_norm": 0.715248167514801, |
|
"learning_rate": 9.465119119222346e-06, |
|
"loss": 2.1626, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 0.7036662759206112, |
|
"grad_norm": 0.7098533511161804, |
|
"learning_rate": 9.458325160924648e-06, |
|
"loss": 2.1807, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 0.7062532842879664, |
|
"grad_norm": 0.7714352607727051, |
|
"learning_rate": 9.451490796741117e-06, |
|
"loss": 2.1517, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 0.7088402926553216, |
|
"grad_norm": 0.720448911190033, |
|
"learning_rate": 9.444616088611718e-06, |
|
"loss": 2.1775, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 0.7114273010226767, |
|
"grad_norm": 0.7580015659332275, |
|
"learning_rate": 9.437701098842067e-06, |
|
"loss": 2.1529, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.714014309390032, |
|
"grad_norm": 0.727325439453125, |
|
"learning_rate": 9.430745890102849e-06, |
|
"loss": 2.1654, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 0.7166013177573871, |
|
"grad_norm": 0.7433022260665894, |
|
"learning_rate": 9.423750525429248e-06, |
|
"loss": 2.1565, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 0.7191883261247423, |
|
"grad_norm": 0.728881299495697, |
|
"learning_rate": 9.416715068220393e-06, |
|
"loss": 2.1734, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 0.7217753344920975, |
|
"grad_norm": 0.7467840909957886, |
|
"learning_rate": 9.409639582238761e-06, |
|
"loss": 2.1502, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 0.7243623428594527, |
|
"grad_norm": 0.7820594310760498, |
|
"learning_rate": 9.40252413160962e-06, |
|
"loss": 2.1725, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 0.7269493512268079, |
|
"grad_norm": 0.7077836394309998, |
|
"learning_rate": 9.395368780820433e-06, |
|
"loss": 2.1508, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 0.7295363595941631, |
|
"grad_norm": 0.7221850752830505, |
|
"learning_rate": 9.388173594720283e-06, |
|
"loss": 2.1649, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 0.7321233679615182, |
|
"grad_norm": 0.7874971628189087, |
|
"learning_rate": 9.380938638519274e-06, |
|
"loss": 2.158, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 0.7347103763288735, |
|
"grad_norm": 0.7347155213356018, |
|
"learning_rate": 9.373663977787956e-06, |
|
"loss": 2.1486, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 0.7372973846962286, |
|
"grad_norm": 0.7876786589622498, |
|
"learning_rate": 9.366349678456717e-06, |
|
"loss": 2.1501, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 0.7398843930635838, |
|
"grad_norm": 0.678352952003479, |
|
"learning_rate": 9.35899580681519e-06, |
|
"loss": 2.1644, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 0.742471401430939, |
|
"grad_norm": 0.7279735803604126, |
|
"learning_rate": 9.351602429511655e-06, |
|
"loss": 2.1508, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 0.7450584097982942, |
|
"grad_norm": 0.7041738629341125, |
|
"learning_rate": 9.344169613552428e-06, |
|
"loss": 2.1617, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 0.7476454181656493, |
|
"grad_norm": 0.750642716884613, |
|
"learning_rate": 9.336697426301267e-06, |
|
"loss": 2.1409, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 0.7502324265330046, |
|
"grad_norm": 0.7688580751419067, |
|
"learning_rate": 9.329185935478741e-06, |
|
"loss": 2.1459, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 0.7528194349003597, |
|
"grad_norm": 0.704578697681427, |
|
"learning_rate": 9.321635209161642e-06, |
|
"loss": 2.1417, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 0.7554064432677149, |
|
"grad_norm": 0.733323872089386, |
|
"learning_rate": 9.314045315782339e-06, |
|
"loss": 2.1516, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 0.7579934516350701, |
|
"grad_norm": 0.771930992603302, |
|
"learning_rate": 9.306416324128184e-06, |
|
"loss": 2.1256, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 0.7605804600024253, |
|
"grad_norm": 0.6910899877548218, |
|
"learning_rate": 9.298748303340871e-06, |
|
"loss": 2.1421, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 0.7631674683697806, |
|
"grad_norm": 0.7119818329811096, |
|
"learning_rate": 9.291041322915824e-06, |
|
"loss": 2.1631, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 0.7657544767371357, |
|
"grad_norm": 0.6817770600318909, |
|
"learning_rate": 9.283295452701549e-06, |
|
"loss": 2.143, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 0.7683414851044909, |
|
"grad_norm": 0.714541494846344, |
|
"learning_rate": 9.275510762899016e-06, |
|
"loss": 2.1546, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 0.7709284934718461, |
|
"grad_norm": 0.6969371438026428, |
|
"learning_rate": 9.267687324061016e-06, |
|
"loss": 2.1186, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 0.7735155018392013, |
|
"grad_norm": 0.7325617671012878, |
|
"learning_rate": 9.259825207091526e-06, |
|
"loss": 2.1363, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 0.7761025102065564, |
|
"grad_norm": 0.743165135383606, |
|
"learning_rate": 9.25192448324506e-06, |
|
"loss": 2.1269, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.7786895185739117, |
|
"grad_norm": 0.7538695931434631, |
|
"learning_rate": 9.243985224126031e-06, |
|
"loss": 2.1237, |
|
"step": 6020 |
|
}, |
|
{ |
|
"epoch": 0.7812765269412668, |
|
"grad_norm": 0.7549970746040344, |
|
"learning_rate": 9.236007501688094e-06, |
|
"loss": 2.1343, |
|
"step": 6040 |
|
}, |
|
{ |
|
"epoch": 0.783863535308622, |
|
"grad_norm": 0.732627272605896, |
|
"learning_rate": 9.2279913882335e-06, |
|
"loss": 2.1465, |
|
"step": 6060 |
|
}, |
|
{ |
|
"epoch": 0.7864505436759772, |
|
"grad_norm": 0.7083766460418701, |
|
"learning_rate": 9.219936956412436e-06, |
|
"loss": 2.1394, |
|
"step": 6080 |
|
}, |
|
{ |
|
"epoch": 0.7890375520433324, |
|
"grad_norm": 0.7239031195640564, |
|
"learning_rate": 9.211844279222376e-06, |
|
"loss": 2.1386, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 0.7916245604106876, |
|
"grad_norm": 0.7016878128051758, |
|
"learning_rate": 9.2037134300074e-06, |
|
"loss": 2.1313, |
|
"step": 6120 |
|
}, |
|
{ |
|
"epoch": 0.7942115687780428, |
|
"grad_norm": 0.6846844553947449, |
|
"learning_rate": 9.195544482457555e-06, |
|
"loss": 2.1383, |
|
"step": 6140 |
|
}, |
|
{ |
|
"epoch": 0.7967985771453979, |
|
"grad_norm": 0.7442426085472107, |
|
"learning_rate": 9.187337510608168e-06, |
|
"loss": 2.1177, |
|
"step": 6160 |
|
}, |
|
{ |
|
"epoch": 0.7993855855127532, |
|
"grad_norm": 0.7443544268608093, |
|
"learning_rate": 9.179092588839178e-06, |
|
"loss": 2.1391, |
|
"step": 6180 |
|
}, |
|
{ |
|
"epoch": 0.8019725938801083, |
|
"grad_norm": 0.7053080201148987, |
|
"learning_rate": 9.170809791874468e-06, |
|
"loss": 2.1221, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 0.8045596022474635, |
|
"grad_norm": 0.7665020227432251, |
|
"learning_rate": 9.16248919478119e-06, |
|
"loss": 2.1116, |
|
"step": 6220 |
|
}, |
|
{ |
|
"epoch": 0.8071466106148187, |
|
"grad_norm": 0.7906709909439087, |
|
"learning_rate": 9.154130872969067e-06, |
|
"loss": 2.1363, |
|
"step": 6240 |
|
}, |
|
{ |
|
"epoch": 0.8097336189821739, |
|
"grad_norm": 0.6853694915771484, |
|
"learning_rate": 9.145734902189733e-06, |
|
"loss": 2.149, |
|
"step": 6260 |
|
}, |
|
{ |
|
"epoch": 0.812320627349529, |
|
"grad_norm": 0.7411865592002869, |
|
"learning_rate": 9.137301358536032e-06, |
|
"loss": 2.1355, |
|
"step": 6280 |
|
}, |
|
{ |
|
"epoch": 0.8149076357168843, |
|
"grad_norm": 0.8013186454772949, |
|
"learning_rate": 9.128830318441327e-06, |
|
"loss": 2.1175, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 0.8174946440842394, |
|
"grad_norm": 0.7919278144836426, |
|
"learning_rate": 9.120321858678817e-06, |
|
"loss": 2.128, |
|
"step": 6320 |
|
}, |
|
{ |
|
"epoch": 0.8200816524515947, |
|
"grad_norm": 0.7355955243110657, |
|
"learning_rate": 9.111776056360838e-06, |
|
"loss": 2.1253, |
|
"step": 6340 |
|
}, |
|
{ |
|
"epoch": 0.8226686608189498, |
|
"grad_norm": 0.7750183343887329, |
|
"learning_rate": 9.103192988938155e-06, |
|
"loss": 2.1225, |
|
"step": 6360 |
|
}, |
|
{ |
|
"epoch": 0.825255669186305, |
|
"grad_norm": 0.7022154927253723, |
|
"learning_rate": 9.094572734199271e-06, |
|
"loss": 2.1193, |
|
"step": 6380 |
|
}, |
|
{ |
|
"epoch": 0.8278426775536603, |
|
"grad_norm": 0.7672535181045532, |
|
"learning_rate": 9.085915370269723e-06, |
|
"loss": 2.1188, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 0.8304296859210154, |
|
"grad_norm": 0.7107143998146057, |
|
"learning_rate": 9.077220975611363e-06, |
|
"loss": 2.1278, |
|
"step": 6420 |
|
}, |
|
{ |
|
"epoch": 0.8330166942883706, |
|
"grad_norm": 0.7380732297897339, |
|
"learning_rate": 9.068489629021655e-06, |
|
"loss": 2.1374, |
|
"step": 6440 |
|
}, |
|
{ |
|
"epoch": 0.8356037026557258, |
|
"grad_norm": 0.6959198117256165, |
|
"learning_rate": 9.05972140963296e-06, |
|
"loss": 2.129, |
|
"step": 6460 |
|
}, |
|
{ |
|
"epoch": 0.838190711023081, |
|
"grad_norm": 0.742127001285553, |
|
"learning_rate": 9.050916396911818e-06, |
|
"loss": 2.1285, |
|
"step": 6480 |
|
}, |
|
{ |
|
"epoch": 0.8407777193904361, |
|
"grad_norm": 0.6720155477523804, |
|
"learning_rate": 9.042074670658223e-06, |
|
"loss": 2.1172, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.8433647277577914, |
|
"grad_norm": 0.7020736932754517, |
|
"learning_rate": 9.033196311004915e-06, |
|
"loss": 2.1036, |
|
"step": 6520 |
|
}, |
|
{ |
|
"epoch": 0.8459517361251465, |
|
"grad_norm": 0.746281087398529, |
|
"learning_rate": 9.024281398416632e-06, |
|
"loss": 2.1183, |
|
"step": 6540 |
|
}, |
|
{ |
|
"epoch": 0.8485387444925018, |
|
"grad_norm": 0.7673128843307495, |
|
"learning_rate": 9.015330013689396e-06, |
|
"loss": 2.1435, |
|
"step": 6560 |
|
}, |
|
{ |
|
"epoch": 0.8511257528598569, |
|
"grad_norm": 0.7635971903800964, |
|
"learning_rate": 9.006342237949782e-06, |
|
"loss": 2.1018, |
|
"step": 6580 |
|
}, |
|
{ |
|
"epoch": 0.8537127612272121, |
|
"grad_norm": 0.7365761995315552, |
|
"learning_rate": 8.997318152654167e-06, |
|
"loss": 2.116, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 0.8562997695945673, |
|
"grad_norm": 0.7582866549491882, |
|
"learning_rate": 8.988257839588011e-06, |
|
"loss": 2.1146, |
|
"step": 6620 |
|
}, |
|
{ |
|
"epoch": 0.8588867779619225, |
|
"grad_norm": 0.7569854259490967, |
|
"learning_rate": 8.979161380865104e-06, |
|
"loss": 2.1156, |
|
"step": 6640 |
|
}, |
|
{ |
|
"epoch": 0.8614737863292776, |
|
"grad_norm": 0.6821621656417847, |
|
"learning_rate": 8.970028858926825e-06, |
|
"loss": 2.1134, |
|
"step": 6660 |
|
}, |
|
{ |
|
"epoch": 0.8640607946966329, |
|
"grad_norm": 0.7307649850845337, |
|
"learning_rate": 8.96086035654139e-06, |
|
"loss": 2.124, |
|
"step": 6680 |
|
}, |
|
{ |
|
"epoch": 0.866647803063988, |
|
"grad_norm": 0.7775338292121887, |
|
"learning_rate": 8.951655956803118e-06, |
|
"loss": 2.1052, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 0.8692348114313432, |
|
"grad_norm": 0.7425001859664917, |
|
"learning_rate": 8.942415743131651e-06, |
|
"loss": 2.106, |
|
"step": 6720 |
|
}, |
|
{ |
|
"epoch": 0.8718218197986984, |
|
"grad_norm": 0.7379507422447205, |
|
"learning_rate": 8.933139799271229e-06, |
|
"loss": 2.0974, |
|
"step": 6740 |
|
}, |
|
{ |
|
"epoch": 0.8744088281660536, |
|
"grad_norm": 0.7932276129722595, |
|
"learning_rate": 8.923828209289904e-06, |
|
"loss": 2.1257, |
|
"step": 6760 |
|
}, |
|
{ |
|
"epoch": 0.8769958365334087, |
|
"grad_norm": 0.7237013578414917, |
|
"learning_rate": 8.914481057578791e-06, |
|
"loss": 2.1033, |
|
"step": 6780 |
|
}, |
|
{ |
|
"epoch": 0.879582844900764, |
|
"grad_norm": 0.7310687899589539, |
|
"learning_rate": 8.905098428851309e-06, |
|
"loss": 2.1194, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 0.8821698532681191, |
|
"grad_norm": 0.6933685541152954, |
|
"learning_rate": 8.8956804081424e-06, |
|
"loss": 2.0891, |
|
"step": 6820 |
|
}, |
|
{ |
|
"epoch": 0.8847568616354744, |
|
"grad_norm": 0.7352440357208252, |
|
"learning_rate": 8.886227080807762e-06, |
|
"loss": 2.1022, |
|
"step": 6840 |
|
}, |
|
{ |
|
"epoch": 0.8873438700028295, |
|
"grad_norm": 0.7073638439178467, |
|
"learning_rate": 8.876738532523081e-06, |
|
"loss": 2.1065, |
|
"step": 6860 |
|
}, |
|
{ |
|
"epoch": 0.8899308783701847, |
|
"grad_norm": 0.7440693974494934, |
|
"learning_rate": 8.867214849283252e-06, |
|
"loss": 2.1079, |
|
"step": 6880 |
|
}, |
|
{ |
|
"epoch": 0.89251788673754, |
|
"grad_norm": 0.7651025056838989, |
|
"learning_rate": 8.8576561174016e-06, |
|
"loss": 2.121, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 0.8951048951048951, |
|
"grad_norm": 0.7150782346725464, |
|
"learning_rate": 8.84806242350909e-06, |
|
"loss": 2.1025, |
|
"step": 6920 |
|
}, |
|
{ |
|
"epoch": 0.8976919034722503, |
|
"grad_norm": 0.7346411943435669, |
|
"learning_rate": 8.838433854553555e-06, |
|
"loss": 2.0959, |
|
"step": 6940 |
|
}, |
|
{ |
|
"epoch": 0.9002789118396055, |
|
"grad_norm": 0.7531387209892273, |
|
"learning_rate": 8.828770497798897e-06, |
|
"loss": 2.0901, |
|
"step": 6960 |
|
}, |
|
{ |
|
"epoch": 0.9028659202069607, |
|
"grad_norm": 0.7870126366615295, |
|
"learning_rate": 8.819072440824303e-06, |
|
"loss": 2.1034, |
|
"step": 6980 |
|
}, |
|
{ |
|
"epoch": 0.9054529285743158, |
|
"grad_norm": 0.7796097993850708, |
|
"learning_rate": 8.80933977152345e-06, |
|
"loss": 2.0825, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.9080399369416711, |
|
"grad_norm": 0.7178963422775269, |
|
"learning_rate": 8.799572578103703e-06, |
|
"loss": 2.1252, |
|
"step": 7020 |
|
}, |
|
{ |
|
"epoch": 0.9106269453090262, |
|
"grad_norm": 0.7850649952888489, |
|
"learning_rate": 8.789770949085321e-06, |
|
"loss": 2.0886, |
|
"step": 7040 |
|
}, |
|
{ |
|
"epoch": 0.9132139536763815, |
|
"grad_norm": 0.7312331795692444, |
|
"learning_rate": 8.779934973300657e-06, |
|
"loss": 2.1015, |
|
"step": 7060 |
|
}, |
|
{ |
|
"epoch": 0.9158009620437366, |
|
"grad_norm": 0.7429481148719788, |
|
"learning_rate": 8.770064739893346e-06, |
|
"loss": 2.0791, |
|
"step": 7080 |
|
}, |
|
{ |
|
"epoch": 0.9183879704110918, |
|
"grad_norm": 0.70722895860672, |
|
"learning_rate": 8.7601603383175e-06, |
|
"loss": 2.0992, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 0.920974978778447, |
|
"grad_norm": 0.7041098475456238, |
|
"learning_rate": 8.750221858336902e-06, |
|
"loss": 2.1027, |
|
"step": 7120 |
|
}, |
|
{ |
|
"epoch": 0.9235619871458022, |
|
"grad_norm": 0.7348500490188599, |
|
"learning_rate": 8.740249390024183e-06, |
|
"loss": 2.0899, |
|
"step": 7140 |
|
}, |
|
{ |
|
"epoch": 0.9261489955131573, |
|
"grad_norm": 0.7186647653579712, |
|
"learning_rate": 8.730243023760012e-06, |
|
"loss": 2.1059, |
|
"step": 7160 |
|
}, |
|
{ |
|
"epoch": 0.9287360038805126, |
|
"grad_norm": 0.7586842775344849, |
|
"learning_rate": 8.720202850232281e-06, |
|
"loss": 2.0929, |
|
"step": 7180 |
|
}, |
|
{ |
|
"epoch": 0.9313230122478677, |
|
"grad_norm": 0.7751206159591675, |
|
"learning_rate": 8.710128960435271e-06, |
|
"loss": 2.0961, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 0.9339100206152229, |
|
"grad_norm": 0.7438105344772339, |
|
"learning_rate": 8.700021445668839e-06, |
|
"loss": 2.1013, |
|
"step": 7220 |
|
}, |
|
{ |
|
"epoch": 0.9364970289825781, |
|
"grad_norm": 0.7343020439147949, |
|
"learning_rate": 8.68988039753758e-06, |
|
"loss": 2.0904, |
|
"step": 7240 |
|
}, |
|
{ |
|
"epoch": 0.9390840373499333, |
|
"grad_norm": 0.7382264733314514, |
|
"learning_rate": 8.67970590795001e-06, |
|
"loss": 2.1016, |
|
"step": 7260 |
|
}, |
|
{ |
|
"epoch": 0.9416710457172885, |
|
"grad_norm": 0.720942497253418, |
|
"learning_rate": 8.669498069117721e-06, |
|
"loss": 2.1167, |
|
"step": 7280 |
|
}, |
|
{ |
|
"epoch": 0.9442580540846437, |
|
"grad_norm": 0.7112380266189575, |
|
"learning_rate": 8.65925697355455e-06, |
|
"loss": 2.0981, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 0.9468450624519988, |
|
"grad_norm": 0.7193992137908936, |
|
"learning_rate": 8.648982714075743e-06, |
|
"loss": 2.0853, |
|
"step": 7320 |
|
}, |
|
{ |
|
"epoch": 0.9494320708193541, |
|
"grad_norm": 0.7250093221664429, |
|
"learning_rate": 8.638675383797106e-06, |
|
"loss": 2.0936, |
|
"step": 7340 |
|
}, |
|
{ |
|
"epoch": 0.9520190791867092, |
|
"grad_norm": 0.727990448474884, |
|
"learning_rate": 8.628335076134173e-06, |
|
"loss": 2.0882, |
|
"step": 7360 |
|
}, |
|
{ |
|
"epoch": 0.9546060875540644, |
|
"grad_norm": 0.7733245491981506, |
|
"learning_rate": 8.617961884801346e-06, |
|
"loss": 2.0799, |
|
"step": 7380 |
|
}, |
|
{ |
|
"epoch": 0.9571930959214197, |
|
"grad_norm": 0.7614827752113342, |
|
"learning_rate": 8.60755590381106e-06, |
|
"loss": 2.0935, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 0.9597801042887748, |
|
"grad_norm": 0.7480348348617554, |
|
"learning_rate": 8.597117227472915e-06, |
|
"loss": 2.0889, |
|
"step": 7420 |
|
}, |
|
{ |
|
"epoch": 0.96236711265613, |
|
"grad_norm": 0.7127954959869385, |
|
"learning_rate": 8.586645950392835e-06, |
|
"loss": 2.0981, |
|
"step": 7440 |
|
}, |
|
{ |
|
"epoch": 0.9649541210234852, |
|
"grad_norm": 0.6996986865997314, |
|
"learning_rate": 8.576142167472204e-06, |
|
"loss": 2.0856, |
|
"step": 7460 |
|
}, |
|
{ |
|
"epoch": 0.9675411293908404, |
|
"grad_norm": 0.757079005241394, |
|
"learning_rate": 8.565605973907006e-06, |
|
"loss": 2.0873, |
|
"step": 7480 |
|
}, |
|
{ |
|
"epoch": 0.9701281377581955, |
|
"grad_norm": 0.7550200819969177, |
|
"learning_rate": 8.555037465186962e-06, |
|
"loss": 2.0817, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.9727151461255508, |
|
"grad_norm": 0.7152219414710999, |
|
"learning_rate": 8.544436737094672e-06, |
|
"loss": 2.0962, |
|
"step": 7520 |
|
}, |
|
{ |
|
"epoch": 0.9753021544929059, |
|
"grad_norm": 0.774013876914978, |
|
"learning_rate": 8.533803885704732e-06, |
|
"loss": 2.0944, |
|
"step": 7540 |
|
}, |
|
{ |
|
"epoch": 0.9778891628602612, |
|
"grad_norm": 0.721843421459198, |
|
"learning_rate": 8.523139007382881e-06, |
|
"loss": 2.0849, |
|
"step": 7560 |
|
}, |
|
{ |
|
"epoch": 0.9804761712276163, |
|
"grad_norm": 0.7735098600387573, |
|
"learning_rate": 8.51244219878511e-06, |
|
"loss": 2.0906, |
|
"step": 7580 |
|
}, |
|
{ |
|
"epoch": 0.9830631795949715, |
|
"grad_norm": 0.7077389359474182, |
|
"learning_rate": 8.501713556856803e-06, |
|
"loss": 2.0665, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 0.9856501879623267, |
|
"grad_norm": 0.7230038046836853, |
|
"learning_rate": 8.490953178831846e-06, |
|
"loss": 2.0766, |
|
"step": 7620 |
|
}, |
|
{ |
|
"epoch": 0.9882371963296819, |
|
"grad_norm": 0.7460753321647644, |
|
"learning_rate": 8.480161162231747e-06, |
|
"loss": 2.0556, |
|
"step": 7640 |
|
}, |
|
{ |
|
"epoch": 0.990824204697037, |
|
"grad_norm": 0.7653132081031799, |
|
"learning_rate": 8.469337604864759e-06, |
|
"loss": 2.0821, |
|
"step": 7660 |
|
}, |
|
{ |
|
"epoch": 0.9934112130643923, |
|
"grad_norm": 0.7243251204490662, |
|
"learning_rate": 8.458482604824988e-06, |
|
"loss": 2.0926, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 0.9959982214317474, |
|
"grad_norm": 0.7776849865913391, |
|
"learning_rate": 8.447596260491508e-06, |
|
"loss": 2.0925, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 0.9985852297991026, |
|
"grad_norm": 0.7500579953193665, |
|
"learning_rate": 8.436678670527463e-06, |
|
"loss": 2.0736, |
|
"step": 7720 |
|
}, |
|
{ |
|
"epoch": 1.0011722381664578, |
|
"grad_norm": 0.8922491669654846, |
|
"learning_rate": 8.42572993387918e-06, |
|
"loss": 2.0694, |
|
"step": 7740 |
|
}, |
|
{ |
|
"epoch": 1.003759246533813, |
|
"grad_norm": 0.7387255430221558, |
|
"learning_rate": 8.414750149775269e-06, |
|
"loss": 2.0478, |
|
"step": 7760 |
|
}, |
|
{ |
|
"epoch": 1.0063462549011681, |
|
"grad_norm": 0.7570685148239136, |
|
"learning_rate": 8.403739417725718e-06, |
|
"loss": 2.0594, |
|
"step": 7780 |
|
}, |
|
{ |
|
"epoch": 1.0089332632685233, |
|
"grad_norm": 0.7447026371955872, |
|
"learning_rate": 8.392697837521006e-06, |
|
"loss": 2.0535, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 1.0115202716358787, |
|
"grad_norm": 0.7841023802757263, |
|
"learning_rate": 8.381625509231185e-06, |
|
"loss": 2.0437, |
|
"step": 7820 |
|
}, |
|
{ |
|
"epoch": 1.0141072800032338, |
|
"grad_norm": 0.7826112508773804, |
|
"learning_rate": 8.37052253320497e-06, |
|
"loss": 2.052, |
|
"step": 7840 |
|
}, |
|
{ |
|
"epoch": 1.016694288370589, |
|
"grad_norm": 0.7537607550621033, |
|
"learning_rate": 8.359389010068852e-06, |
|
"loss": 2.0726, |
|
"step": 7860 |
|
}, |
|
{ |
|
"epoch": 1.019281296737944, |
|
"grad_norm": 0.730627715587616, |
|
"learning_rate": 8.34822504072616e-06, |
|
"loss": 2.0649, |
|
"step": 7880 |
|
}, |
|
{ |
|
"epoch": 1.0218683051052992, |
|
"grad_norm": 0.7026150822639465, |
|
"learning_rate": 8.337030726356162e-06, |
|
"loss": 2.0518, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 1.0244553134726546, |
|
"grad_norm": 0.7492315173149109, |
|
"learning_rate": 8.32580616841314e-06, |
|
"loss": 2.0681, |
|
"step": 7920 |
|
}, |
|
{ |
|
"epoch": 1.0270423218400098, |
|
"grad_norm": 0.7300956845283508, |
|
"learning_rate": 8.314551468625478e-06, |
|
"loss": 2.0589, |
|
"step": 7940 |
|
}, |
|
{ |
|
"epoch": 1.029629330207365, |
|
"grad_norm": 0.7457550764083862, |
|
"learning_rate": 8.303266728994736e-06, |
|
"loss": 2.0464, |
|
"step": 7960 |
|
}, |
|
{ |
|
"epoch": 1.03221633857472, |
|
"grad_norm": 0.8049952387809753, |
|
"learning_rate": 8.291952051794721e-06, |
|
"loss": 2.0398, |
|
"step": 7980 |
|
}, |
|
{ |
|
"epoch": 1.0348033469420752, |
|
"grad_norm": 0.7218441963195801, |
|
"learning_rate": 8.28060753957057e-06, |
|
"loss": 2.0454, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 1.0373903553094304, |
|
"grad_norm": 0.7822824716567993, |
|
"learning_rate": 8.269233295137811e-06, |
|
"loss": 2.0577, |
|
"step": 8020 |
|
}, |
|
{ |
|
"epoch": 1.0399773636767857, |
|
"grad_norm": 0.7328481674194336, |
|
"learning_rate": 8.257829421581442e-06, |
|
"loss": 2.0363, |
|
"step": 8040 |
|
}, |
|
{ |
|
"epoch": 1.0425643720441409, |
|
"grad_norm": 0.7289636135101318, |
|
"learning_rate": 8.246396022254983e-06, |
|
"loss": 2.0604, |
|
"step": 8060 |
|
}, |
|
{ |
|
"epoch": 1.045151380411496, |
|
"grad_norm": 0.7583228349685669, |
|
"learning_rate": 8.23493320077955e-06, |
|
"loss": 2.0358, |
|
"step": 8080 |
|
}, |
|
{ |
|
"epoch": 1.0477383887788512, |
|
"grad_norm": 0.7434881329536438, |
|
"learning_rate": 8.223441061042914e-06, |
|
"loss": 2.042, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 1.0503253971462063, |
|
"grad_norm": 0.7682307362556458, |
|
"learning_rate": 8.211919707198555e-06, |
|
"loss": 2.0631, |
|
"step": 8120 |
|
}, |
|
{ |
|
"epoch": 1.0529124055135617, |
|
"grad_norm": 0.7827048897743225, |
|
"learning_rate": 8.200369243664722e-06, |
|
"loss": 2.0541, |
|
"step": 8140 |
|
}, |
|
{ |
|
"epoch": 1.0554994138809168, |
|
"grad_norm": 0.7634522914886475, |
|
"learning_rate": 8.188789775123487e-06, |
|
"loss": 2.0401, |
|
"step": 8160 |
|
}, |
|
{ |
|
"epoch": 1.058086422248272, |
|
"grad_norm": 0.7598258852958679, |
|
"learning_rate": 8.177181406519794e-06, |
|
"loss": 2.0394, |
|
"step": 8180 |
|
}, |
|
{ |
|
"epoch": 1.0606734306156271, |
|
"grad_norm": 0.7735849022865295, |
|
"learning_rate": 8.165544243060507e-06, |
|
"loss": 2.0442, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 1.0632604389829823, |
|
"grad_norm": 0.742406964302063, |
|
"learning_rate": 8.153878390213464e-06, |
|
"loss": 2.0365, |
|
"step": 8220 |
|
}, |
|
{ |
|
"epoch": 1.0658474473503374, |
|
"grad_norm": 0.7898341417312622, |
|
"learning_rate": 8.142183953706506e-06, |
|
"loss": 2.0591, |
|
"step": 8240 |
|
}, |
|
{ |
|
"epoch": 1.0684344557176928, |
|
"grad_norm": 0.7791109681129456, |
|
"learning_rate": 8.130461039526538e-06, |
|
"loss": 2.0352, |
|
"step": 8260 |
|
}, |
|
{ |
|
"epoch": 1.071021464085048, |
|
"grad_norm": 0.6895206570625305, |
|
"learning_rate": 8.118709753918553e-06, |
|
"loss": 2.0434, |
|
"step": 8280 |
|
}, |
|
{ |
|
"epoch": 1.073608472452403, |
|
"grad_norm": 0.7232005000114441, |
|
"learning_rate": 8.106930203384675e-06, |
|
"loss": 2.029, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 1.0761954808197582, |
|
"grad_norm": 0.7908313870429993, |
|
"learning_rate": 8.095122494683196e-06, |
|
"loss": 2.0635, |
|
"step": 8320 |
|
}, |
|
{ |
|
"epoch": 1.0787824891871134, |
|
"grad_norm": 0.7397485971450806, |
|
"learning_rate": 8.083286734827605e-06, |
|
"loss": 2.0378, |
|
"step": 8340 |
|
}, |
|
{ |
|
"epoch": 1.0813694975544688, |
|
"grad_norm": 0.7352006435394287, |
|
"learning_rate": 8.07142303108562e-06, |
|
"loss": 2.0416, |
|
"step": 8360 |
|
}, |
|
{ |
|
"epoch": 1.083956505921824, |
|
"grad_norm": 0.7397055625915527, |
|
"learning_rate": 8.059531490978212e-06, |
|
"loss": 2.0494, |
|
"step": 8380 |
|
}, |
|
{ |
|
"epoch": 1.086543514289179, |
|
"grad_norm": 0.7299455404281616, |
|
"learning_rate": 8.047612222278639e-06, |
|
"loss": 2.0602, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 1.0891305226565342, |
|
"grad_norm": 0.7375478744506836, |
|
"learning_rate": 8.035665333011453e-06, |
|
"loss": 2.0331, |
|
"step": 8420 |
|
}, |
|
{ |
|
"epoch": 1.0917175310238894, |
|
"grad_norm": 0.7608847618103027, |
|
"learning_rate": 8.023690931451546e-06, |
|
"loss": 2.0488, |
|
"step": 8440 |
|
}, |
|
{ |
|
"epoch": 1.0943045393912445, |
|
"grad_norm": 0.7570823431015015, |
|
"learning_rate": 8.011689126123145e-06, |
|
"loss": 2.0438, |
|
"step": 8460 |
|
}, |
|
{ |
|
"epoch": 1.0968915477585999, |
|
"grad_norm": 0.71818608045578, |
|
"learning_rate": 7.99966002579884e-06, |
|
"loss": 2.0358, |
|
"step": 8480 |
|
}, |
|
{ |
|
"epoch": 1.099478556125955, |
|
"grad_norm": 0.7376600503921509, |
|
"learning_rate": 7.987603739498595e-06, |
|
"loss": 2.0506, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 1.1020655644933102, |
|
"grad_norm": 0.7798555493354797, |
|
"learning_rate": 7.975520376488764e-06, |
|
"loss": 2.0435, |
|
"step": 8520 |
|
}, |
|
{ |
|
"epoch": 1.1046525728606653, |
|
"grad_norm": 0.7355314493179321, |
|
"learning_rate": 7.963410046281097e-06, |
|
"loss": 2.0353, |
|
"step": 8540 |
|
}, |
|
{ |
|
"epoch": 1.1072395812280205, |
|
"grad_norm": 0.7146715521812439, |
|
"learning_rate": 7.951272858631745e-06, |
|
"loss": 2.0435, |
|
"step": 8560 |
|
}, |
|
{ |
|
"epoch": 1.1098265895953756, |
|
"grad_norm": 0.7580558657646179, |
|
"learning_rate": 7.939108923540272e-06, |
|
"loss": 2.0284, |
|
"step": 8580 |
|
}, |
|
{ |
|
"epoch": 1.112413597962731, |
|
"grad_norm": 0.7931268811225891, |
|
"learning_rate": 7.926918351248654e-06, |
|
"loss": 2.0384, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 1.1150006063300861, |
|
"grad_norm": 0.7423412203788757, |
|
"learning_rate": 7.914701252240278e-06, |
|
"loss": 2.0302, |
|
"step": 8620 |
|
}, |
|
{ |
|
"epoch": 1.1175876146974413, |
|
"grad_norm": 0.7477630972862244, |
|
"learning_rate": 7.902457737238946e-06, |
|
"loss": 2.0271, |
|
"step": 8640 |
|
}, |
|
{ |
|
"epoch": 1.1201746230647964, |
|
"grad_norm": 0.7558584809303284, |
|
"learning_rate": 7.89018791720787e-06, |
|
"loss": 2.0456, |
|
"step": 8660 |
|
}, |
|
{ |
|
"epoch": 1.1227616314321516, |
|
"grad_norm": 0.7822209596633911, |
|
"learning_rate": 7.877891903348658e-06, |
|
"loss": 2.0122, |
|
"step": 8680 |
|
}, |
|
{ |
|
"epoch": 1.125348639799507, |
|
"grad_norm": 0.7526381611824036, |
|
"learning_rate": 7.865569807100321e-06, |
|
"loss": 2.0487, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 1.127935648166862, |
|
"grad_norm": 0.7704318761825562, |
|
"learning_rate": 7.85322174013825e-06, |
|
"loss": 2.0251, |
|
"step": 8720 |
|
}, |
|
{ |
|
"epoch": 1.1305226565342172, |
|
"grad_norm": 0.725405752658844, |
|
"learning_rate": 7.840847814373213e-06, |
|
"loss": 2.0538, |
|
"step": 8740 |
|
}, |
|
{ |
|
"epoch": 1.1331096649015724, |
|
"grad_norm": 0.7347708940505981, |
|
"learning_rate": 7.828448141950334e-06, |
|
"loss": 2.0307, |
|
"step": 8760 |
|
}, |
|
{ |
|
"epoch": 1.1356966732689275, |
|
"grad_norm": 0.7331801652908325, |
|
"learning_rate": 7.81602283524808e-06, |
|
"loss": 2.0442, |
|
"step": 8780 |
|
}, |
|
{ |
|
"epoch": 1.138283681636283, |
|
"grad_norm": 0.7498618364334106, |
|
"learning_rate": 7.803572006877241e-06, |
|
"loss": 2.0394, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 1.140870690003638, |
|
"grad_norm": 0.8074409365653992, |
|
"learning_rate": 7.791095769679915e-06, |
|
"loss": 2.0308, |
|
"step": 8820 |
|
}, |
|
{ |
|
"epoch": 1.1434576983709932, |
|
"grad_norm": 0.7502855062484741, |
|
"learning_rate": 7.778594236728474e-06, |
|
"loss": 2.0259, |
|
"step": 8840 |
|
}, |
|
{ |
|
"epoch": 1.1460447067383484, |
|
"grad_norm": 0.7721311450004578, |
|
"learning_rate": 7.766067521324552e-06, |
|
"loss": 2.0348, |
|
"step": 8860 |
|
}, |
|
{ |
|
"epoch": 1.1486317151057035, |
|
"grad_norm": 0.8015813231468201, |
|
"learning_rate": 7.753515736998008e-06, |
|
"loss": 2.0174, |
|
"step": 8880 |
|
}, |
|
{ |
|
"epoch": 1.1512187234730586, |
|
"grad_norm": 0.7582475543022156, |
|
"learning_rate": 7.740938997505902e-06, |
|
"loss": 2.0228, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 1.153805731840414, |
|
"grad_norm": 0.7484523057937622, |
|
"learning_rate": 7.728337416831462e-06, |
|
"loss": 2.0324, |
|
"step": 8920 |
|
}, |
|
{ |
|
"epoch": 1.1563927402077692, |
|
"grad_norm": 0.7139847874641418, |
|
"learning_rate": 7.715711109183057e-06, |
|
"loss": 2.0391, |
|
"step": 8940 |
|
}, |
|
{ |
|
"epoch": 1.1589797485751243, |
|
"grad_norm": 0.7828614711761475, |
|
"learning_rate": 7.703060188993152e-06, |
|
"loss": 2.0075, |
|
"step": 8960 |
|
}, |
|
{ |
|
"epoch": 1.1615667569424795, |
|
"grad_norm": 0.7387328743934631, |
|
"learning_rate": 7.690384770917279e-06, |
|
"loss": 2.0245, |
|
"step": 8980 |
|
}, |
|
{ |
|
"epoch": 1.1641537653098346, |
|
"grad_norm": 0.7702174186706543, |
|
"learning_rate": 7.677684969832991e-06, |
|
"loss": 2.0268, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 1.1667407736771898, |
|
"grad_norm": 0.7316907048225403, |
|
"learning_rate": 7.664960900838827e-06, |
|
"loss": 2.0245, |
|
"step": 9020 |
|
}, |
|
{ |
|
"epoch": 1.1693277820445451, |
|
"grad_norm": 0.7085281610488892, |
|
"learning_rate": 7.65221267925327e-06, |
|
"loss": 2.0175, |
|
"step": 9040 |
|
}, |
|
{ |
|
"epoch": 1.1719147904119003, |
|
"grad_norm": 0.7116014361381531, |
|
"learning_rate": 7.639440420613691e-06, |
|
"loss": 2.0249, |
|
"step": 9060 |
|
}, |
|
{ |
|
"epoch": 1.1745017987792554, |
|
"grad_norm": 0.7048114538192749, |
|
"learning_rate": 7.626644240675316e-06, |
|
"loss": 2.0059, |
|
"step": 9080 |
|
}, |
|
{ |
|
"epoch": 1.1770888071466106, |
|
"grad_norm": 0.7138361930847168, |
|
"learning_rate": 7.613824255410165e-06, |
|
"loss": 2.0408, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 1.1796758155139657, |
|
"grad_norm": 0.7742594480514526, |
|
"learning_rate": 7.6009805810060126e-06, |
|
"loss": 2.0221, |
|
"step": 9120 |
|
}, |
|
{ |
|
"epoch": 1.182262823881321, |
|
"grad_norm": 0.8150427341461182, |
|
"learning_rate": 7.5881133338653215e-06, |
|
"loss": 2.0189, |
|
"step": 9140 |
|
}, |
|
{ |
|
"epoch": 1.1848498322486762, |
|
"grad_norm": 0.7460334300994873, |
|
"learning_rate": 7.575222630604198e-06, |
|
"loss": 2.0397, |
|
"step": 9160 |
|
}, |
|
{ |
|
"epoch": 1.1874368406160314, |
|
"grad_norm": 0.7613769769668579, |
|
"learning_rate": 7.56230858805133e-06, |
|
"loss": 2.0386, |
|
"step": 9180 |
|
}, |
|
{ |
|
"epoch": 1.1900238489833865, |
|
"grad_norm": 0.7651441693305969, |
|
"learning_rate": 7.549371323246934e-06, |
|
"loss": 2.0433, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 1.1926108573507417, |
|
"grad_norm": 0.810110330581665, |
|
"learning_rate": 7.536410953441685e-06, |
|
"loss": 2.0286, |
|
"step": 9220 |
|
}, |
|
{ |
|
"epoch": 1.195197865718097, |
|
"grad_norm": 0.7386390566825867, |
|
"learning_rate": 7.523427596095663e-06, |
|
"loss": 2.0062, |
|
"step": 9240 |
|
}, |
|
{ |
|
"epoch": 1.1977848740854522, |
|
"grad_norm": 0.7521648406982422, |
|
"learning_rate": 7.510421368877281e-06, |
|
"loss": 2.0072, |
|
"step": 9260 |
|
}, |
|
{ |
|
"epoch": 1.2003718824528073, |
|
"grad_norm": 0.7483949661254883, |
|
"learning_rate": 7.497392389662225e-06, |
|
"loss": 2.0281, |
|
"step": 9280 |
|
}, |
|
{ |
|
"epoch": 1.2029588908201625, |
|
"grad_norm": 0.7583929300308228, |
|
"learning_rate": 7.484340776532382e-06, |
|
"loss": 2.0402, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 1.2055458991875176, |
|
"grad_norm": 0.8147648572921753, |
|
"learning_rate": 7.471266647774771e-06, |
|
"loss": 2.013, |
|
"step": 9320 |
|
}, |
|
{ |
|
"epoch": 1.2081329075548728, |
|
"grad_norm": 0.7282470464706421, |
|
"learning_rate": 7.458170121880468e-06, |
|
"loss": 2.0124, |
|
"step": 9340 |
|
}, |
|
{ |
|
"epoch": 1.210719915922228, |
|
"grad_norm": 0.7271668314933777, |
|
"learning_rate": 7.445051317543538e-06, |
|
"loss": 2.0059, |
|
"step": 9360 |
|
}, |
|
{ |
|
"epoch": 1.2133069242895833, |
|
"grad_norm": 0.7665330171585083, |
|
"learning_rate": 7.4319103536599536e-06, |
|
"loss": 2.0307, |
|
"step": 9380 |
|
}, |
|
{ |
|
"epoch": 1.2158939326569385, |
|
"grad_norm": 0.7498813271522522, |
|
"learning_rate": 7.418747349326523e-06, |
|
"loss": 2.0086, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 1.2184809410242936, |
|
"grad_norm": 0.7305240035057068, |
|
"learning_rate": 7.4055624238398014e-06, |
|
"loss": 2.0019, |
|
"step": 9420 |
|
}, |
|
{ |
|
"epoch": 1.2210679493916488, |
|
"grad_norm": 0.7301153540611267, |
|
"learning_rate": 7.392355696695023e-06, |
|
"loss": 2.0062, |
|
"step": 9440 |
|
}, |
|
{ |
|
"epoch": 1.223654957759004, |
|
"grad_norm": 0.740593433380127, |
|
"learning_rate": 7.379127287585005e-06, |
|
"loss": 2.0055, |
|
"step": 9460 |
|
}, |
|
{ |
|
"epoch": 1.2262419661263593, |
|
"grad_norm": 0.7132686972618103, |
|
"learning_rate": 7.365877316399072e-06, |
|
"loss": 2.0229, |
|
"step": 9480 |
|
}, |
|
{ |
|
"epoch": 1.2288289744937144, |
|
"grad_norm": 0.7279082536697388, |
|
"learning_rate": 7.3526059032219655e-06, |
|
"loss": 2.009, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 1.2314159828610696, |
|
"grad_norm": 0.7989427447319031, |
|
"learning_rate": 7.339313168332756e-06, |
|
"loss": 2.0126, |
|
"step": 9520 |
|
}, |
|
{ |
|
"epoch": 1.2340029912284247, |
|
"grad_norm": 0.7120375633239746, |
|
"learning_rate": 7.3259992322037525e-06, |
|
"loss": 2.0057, |
|
"step": 9540 |
|
}, |
|
{ |
|
"epoch": 1.2365899995957799, |
|
"grad_norm": 0.8398416638374329, |
|
"learning_rate": 7.312664215499411e-06, |
|
"loss": 2.0028, |
|
"step": 9560 |
|
}, |
|
{ |
|
"epoch": 1.2391770079631352, |
|
"grad_norm": 0.7388201355934143, |
|
"learning_rate": 7.299308239075243e-06, |
|
"loss": 2.0111, |
|
"step": 9580 |
|
}, |
|
{ |
|
"epoch": 1.2417640163304904, |
|
"grad_norm": 0.707332193851471, |
|
"learning_rate": 7.285931423976717e-06, |
|
"loss": 1.9917, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 1.2443510246978455, |
|
"grad_norm": 0.7708361744880676, |
|
"learning_rate": 7.272533891438164e-06, |
|
"loss": 2.0042, |
|
"step": 9620 |
|
}, |
|
{ |
|
"epoch": 1.2469380330652007, |
|
"grad_norm": 0.7365394830703735, |
|
"learning_rate": 7.259115762881673e-06, |
|
"loss": 2.0061, |
|
"step": 9640 |
|
}, |
|
{ |
|
"epoch": 1.2495250414325558, |
|
"grad_norm": 0.7249080538749695, |
|
"learning_rate": 7.245677159916002e-06, |
|
"loss": 2.014, |
|
"step": 9660 |
|
}, |
|
{ |
|
"epoch": 1.2521120497999112, |
|
"grad_norm": 0.7352914810180664, |
|
"learning_rate": 7.232218204335465e-06, |
|
"loss": 2.0171, |
|
"step": 9680 |
|
}, |
|
{ |
|
"epoch": 1.2546990581672661, |
|
"grad_norm": 0.7830759286880493, |
|
"learning_rate": 7.218739018118831e-06, |
|
"loss": 2.0057, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 1.2572860665346215, |
|
"grad_norm": 0.730114758014679, |
|
"learning_rate": 7.205239723428224e-06, |
|
"loss": 2.0264, |
|
"step": 9720 |
|
}, |
|
{ |
|
"epoch": 1.2598730749019766, |
|
"grad_norm": 0.7489242553710938, |
|
"learning_rate": 7.1917204426080065e-06, |
|
"loss": 1.9886, |
|
"step": 9740 |
|
}, |
|
{ |
|
"epoch": 1.2624600832693318, |
|
"grad_norm": 0.7818061709403992, |
|
"learning_rate": 7.17818129818368e-06, |
|
"loss": 2.0001, |
|
"step": 9760 |
|
}, |
|
{ |
|
"epoch": 1.265047091636687, |
|
"grad_norm": 0.733417809009552, |
|
"learning_rate": 7.164622412860766e-06, |
|
"loss": 2.012, |
|
"step": 9780 |
|
}, |
|
{ |
|
"epoch": 1.267634100004042, |
|
"grad_norm": 0.76729816198349, |
|
"learning_rate": 7.151043909523702e-06, |
|
"loss": 2.0066, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 1.2702211083713975, |
|
"grad_norm": 0.814164936542511, |
|
"learning_rate": 7.137445911234724e-06, |
|
"loss": 2.0132, |
|
"step": 9820 |
|
}, |
|
{ |
|
"epoch": 1.2728081167387526, |
|
"grad_norm": 0.750637948513031, |
|
"learning_rate": 7.1238285412327456e-06, |
|
"loss": 2.0161, |
|
"step": 9840 |
|
}, |
|
{ |
|
"epoch": 1.2753951251061078, |
|
"grad_norm": 0.7567455768585205, |
|
"learning_rate": 7.110191922932252e-06, |
|
"loss": 2.0201, |
|
"step": 9860 |
|
}, |
|
{ |
|
"epoch": 1.277982133473463, |
|
"grad_norm": 0.7351866364479065, |
|
"learning_rate": 7.0965361799221756e-06, |
|
"loss": 2.0183, |
|
"step": 9880 |
|
}, |
|
{ |
|
"epoch": 1.280569141840818, |
|
"grad_norm": 0.7858896851539612, |
|
"learning_rate": 7.082861435964774e-06, |
|
"loss": 2.016, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 1.2831561502081734, |
|
"grad_norm": 0.7436255216598511, |
|
"learning_rate": 7.069167814994515e-06, |
|
"loss": 2.0116, |
|
"step": 9920 |
|
}, |
|
{ |
|
"epoch": 1.2857431585755286, |
|
"grad_norm": 0.7635684609413147, |
|
"learning_rate": 7.0554554411169414e-06, |
|
"loss": 2.0172, |
|
"step": 9940 |
|
}, |
|
{ |
|
"epoch": 1.2883301669428837, |
|
"grad_norm": 0.759364902973175, |
|
"learning_rate": 7.041724438607562e-06, |
|
"loss": 1.9951, |
|
"step": 9960 |
|
}, |
|
{ |
|
"epoch": 1.2909171753102389, |
|
"grad_norm": 0.7556828856468201, |
|
"learning_rate": 7.027974931910714e-06, |
|
"loss": 2.0098, |
|
"step": 9980 |
|
}, |
|
{ |
|
"epoch": 1.293504183677594, |
|
"grad_norm": 0.7476949095726013, |
|
"learning_rate": 7.014207045638437e-06, |
|
"loss": 2.0051, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 1.2960911920449494, |
|
"grad_norm": 0.7407810688018799, |
|
"learning_rate": 7.000420904569348e-06, |
|
"loss": 2.0138, |
|
"step": 10020 |
|
}, |
|
{ |
|
"epoch": 1.2986782004123045, |
|
"grad_norm": 0.7346110343933105, |
|
"learning_rate": 6.986616633647505e-06, |
|
"loss": 2.0077, |
|
"step": 10040 |
|
}, |
|
{ |
|
"epoch": 1.3012652087796597, |
|
"grad_norm": 0.7659201622009277, |
|
"learning_rate": 6.9727943579812785e-06, |
|
"loss": 1.9878, |
|
"step": 10060 |
|
}, |
|
{ |
|
"epoch": 1.3038522171470148, |
|
"grad_norm": 0.7628219723701477, |
|
"learning_rate": 6.958954202842216e-06, |
|
"loss": 1.9814, |
|
"step": 10080 |
|
}, |
|
{ |
|
"epoch": 1.30643922551437, |
|
"grad_norm": 0.7482396960258484, |
|
"learning_rate": 6.945096293663908e-06, |
|
"loss": 2.0132, |
|
"step": 10100 |
|
}, |
|
{ |
|
"epoch": 1.3090262338817253, |
|
"grad_norm": 0.7802220582962036, |
|
"learning_rate": 6.931220756040848e-06, |
|
"loss": 2.0065, |
|
"step": 10120 |
|
}, |
|
{ |
|
"epoch": 1.3116132422490803, |
|
"grad_norm": 0.8256868124008179, |
|
"learning_rate": 6.917327715727299e-06, |
|
"loss": 1.9969, |
|
"step": 10140 |
|
}, |
|
{ |
|
"epoch": 1.3142002506164356, |
|
"grad_norm": 0.7396267652511597, |
|
"learning_rate": 6.903417298636148e-06, |
|
"loss": 1.9911, |
|
"step": 10160 |
|
}, |
|
{ |
|
"epoch": 1.3167872589837908, |
|
"grad_norm": 0.7678589224815369, |
|
"learning_rate": 6.889489630837772e-06, |
|
"loss": 2.009, |
|
"step": 10180 |
|
}, |
|
{ |
|
"epoch": 1.319374267351146, |
|
"grad_norm": 0.7238781452178955, |
|
"learning_rate": 6.875544838558888e-06, |
|
"loss": 2.0097, |
|
"step": 10200 |
|
}, |
|
{ |
|
"epoch": 1.321961275718501, |
|
"grad_norm": 0.748767614364624, |
|
"learning_rate": 6.861583048181416e-06, |
|
"loss": 1.9943, |
|
"step": 10220 |
|
}, |
|
{ |
|
"epoch": 1.3245482840858562, |
|
"grad_norm": 0.7102675437927246, |
|
"learning_rate": 6.8476043862413255e-06, |
|
"loss": 1.9959, |
|
"step": 10240 |
|
}, |
|
{ |
|
"epoch": 1.3271352924532116, |
|
"grad_norm": 0.7931022047996521, |
|
"learning_rate": 6.833608979427497e-06, |
|
"loss": 2.0084, |
|
"step": 10260 |
|
}, |
|
{ |
|
"epoch": 1.3297223008205667, |
|
"grad_norm": 0.7266056537628174, |
|
"learning_rate": 6.8195969545805704e-06, |
|
"loss": 1.9942, |
|
"step": 10280 |
|
}, |
|
{ |
|
"epoch": 1.332309309187922, |
|
"grad_norm": 0.7871178984642029, |
|
"learning_rate": 6.805568438691792e-06, |
|
"loss": 2.02, |
|
"step": 10300 |
|
}, |
|
{ |
|
"epoch": 1.334896317555277, |
|
"grad_norm": 0.7746707201004028, |
|
"learning_rate": 6.791523558901869e-06, |
|
"loss": 2.003, |
|
"step": 10320 |
|
}, |
|
{ |
|
"epoch": 1.3374833259226322, |
|
"grad_norm": 0.7147629261016846, |
|
"learning_rate": 6.777462442499814e-06, |
|
"loss": 1.9881, |
|
"step": 10340 |
|
}, |
|
{ |
|
"epoch": 1.3400703342899876, |
|
"grad_norm": 0.75360107421875, |
|
"learning_rate": 6.763385216921794e-06, |
|
"loss": 2.0068, |
|
"step": 10360 |
|
}, |
|
{ |
|
"epoch": 1.3426573426573427, |
|
"grad_norm": 0.7157790660858154, |
|
"learning_rate": 6.749292009749971e-06, |
|
"loss": 1.9808, |
|
"step": 10380 |
|
}, |
|
{ |
|
"epoch": 1.3452443510246979, |
|
"grad_norm": 0.7485711574554443, |
|
"learning_rate": 6.73518294871135e-06, |
|
"loss": 2.0038, |
|
"step": 10400 |
|
}, |
|
{ |
|
"epoch": 1.347831359392053, |
|
"grad_norm": 0.7876157760620117, |
|
"learning_rate": 6.721058161676621e-06, |
|
"loss": 1.9937, |
|
"step": 10420 |
|
}, |
|
{ |
|
"epoch": 1.3504183677594082, |
|
"grad_norm": 0.7066450119018555, |
|
"learning_rate": 6.706917776658997e-06, |
|
"loss": 1.9937, |
|
"step": 10440 |
|
}, |
|
{ |
|
"epoch": 1.3530053761267635, |
|
"grad_norm": 0.7409518957138062, |
|
"learning_rate": 6.692761921813057e-06, |
|
"loss": 1.9938, |
|
"step": 10460 |
|
}, |
|
{ |
|
"epoch": 1.3555923844941187, |
|
"grad_norm": 0.7196327447891235, |
|
"learning_rate": 6.678590725433583e-06, |
|
"loss": 1.9705, |
|
"step": 10480 |
|
}, |
|
{ |
|
"epoch": 1.3581793928614738, |
|
"grad_norm": 0.7340644598007202, |
|
"learning_rate": 6.664404315954397e-06, |
|
"loss": 1.9975, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 1.360766401228829, |
|
"grad_norm": 0.7806727886199951, |
|
"learning_rate": 6.6502028219472e-06, |
|
"loss": 1.9754, |
|
"step": 10520 |
|
}, |
|
{ |
|
"epoch": 1.3633534095961841, |
|
"grad_norm": 0.7246034741401672, |
|
"learning_rate": 6.635986372120401e-06, |
|
"loss": 1.9734, |
|
"step": 10540 |
|
}, |
|
{ |
|
"epoch": 1.3659404179635395, |
|
"grad_norm": 0.795344352722168, |
|
"learning_rate": 6.621755095317957e-06, |
|
"loss": 1.9765, |
|
"step": 10560 |
|
}, |
|
{ |
|
"epoch": 1.3685274263308944, |
|
"grad_norm": 0.7736918330192566, |
|
"learning_rate": 6.6075091205181986e-06, |
|
"loss": 1.9954, |
|
"step": 10580 |
|
}, |
|
{ |
|
"epoch": 1.3711144346982498, |
|
"grad_norm": 0.7415809035301208, |
|
"learning_rate": 6.59324857683267e-06, |
|
"loss": 1.9701, |
|
"step": 10600 |
|
}, |
|
{ |
|
"epoch": 1.373701443065605, |
|
"grad_norm": 0.764735758304596, |
|
"learning_rate": 6.578973593504947e-06, |
|
"loss": 1.9956, |
|
"step": 10620 |
|
}, |
|
{ |
|
"epoch": 1.37628845143296, |
|
"grad_norm": 0.7368749976158142, |
|
"learning_rate": 6.564684299909476e-06, |
|
"loss": 1.9679, |
|
"step": 10640 |
|
}, |
|
{ |
|
"epoch": 1.3788754598003152, |
|
"grad_norm": 0.7555897235870361, |
|
"learning_rate": 6.550380825550397e-06, |
|
"loss": 1.9974, |
|
"step": 10660 |
|
}, |
|
{ |
|
"epoch": 1.3814624681676704, |
|
"grad_norm": 0.7471804618835449, |
|
"learning_rate": 6.536063300060371e-06, |
|
"loss": 2.0033, |
|
"step": 10680 |
|
}, |
|
{ |
|
"epoch": 1.3840494765350257, |
|
"grad_norm": 0.7835204005241394, |
|
"learning_rate": 6.521731853199406e-06, |
|
"loss": 1.9812, |
|
"step": 10700 |
|
}, |
|
{ |
|
"epoch": 1.386636484902381, |
|
"grad_norm": 0.8036173582077026, |
|
"learning_rate": 6.507386614853674e-06, |
|
"loss": 2.0038, |
|
"step": 10720 |
|
}, |
|
{ |
|
"epoch": 1.389223493269736, |
|
"grad_norm": 0.7817917466163635, |
|
"learning_rate": 6.493027715034346e-06, |
|
"loss": 1.9794, |
|
"step": 10740 |
|
}, |
|
{ |
|
"epoch": 1.3918105016370912, |
|
"grad_norm": 0.7393909692764282, |
|
"learning_rate": 6.478655283876404e-06, |
|
"loss": 1.9752, |
|
"step": 10760 |
|
}, |
|
{ |
|
"epoch": 1.3943975100044463, |
|
"grad_norm": 0.7540735006332397, |
|
"learning_rate": 6.464269451637464e-06, |
|
"loss": 1.9719, |
|
"step": 10780 |
|
}, |
|
{ |
|
"epoch": 1.3969845183718017, |
|
"grad_norm": 0.7721074819564819, |
|
"learning_rate": 6.449870348696599e-06, |
|
"loss": 1.99, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 1.3995715267391569, |
|
"grad_norm": 0.746705949306488, |
|
"learning_rate": 6.435458105553154e-06, |
|
"loss": 1.9967, |
|
"step": 10820 |
|
}, |
|
{ |
|
"epoch": 1.402158535106512, |
|
"grad_norm": 0.7043789029121399, |
|
"learning_rate": 6.421032852825561e-06, |
|
"loss": 1.9606, |
|
"step": 10840 |
|
}, |
|
{ |
|
"epoch": 1.4047455434738672, |
|
"grad_norm": 0.7369928359985352, |
|
"learning_rate": 6.4065947212501614e-06, |
|
"loss": 1.9975, |
|
"step": 10860 |
|
}, |
|
{ |
|
"epoch": 1.4073325518412223, |
|
"grad_norm": 0.7037030458450317, |
|
"learning_rate": 6.392143841680016e-06, |
|
"loss": 1.9852, |
|
"step": 10880 |
|
}, |
|
{ |
|
"epoch": 1.4099195602085777, |
|
"grad_norm": 0.7750017642974854, |
|
"learning_rate": 6.377680345083721e-06, |
|
"loss": 1.9737, |
|
"step": 10900 |
|
}, |
|
{ |
|
"epoch": 1.4125065685759328, |
|
"grad_norm": 0.7891290783882141, |
|
"learning_rate": 6.3632043625442244e-06, |
|
"loss": 1.9869, |
|
"step": 10920 |
|
}, |
|
{ |
|
"epoch": 1.415093576943288, |
|
"grad_norm": 0.7484495043754578, |
|
"learning_rate": 6.3487160252576275e-06, |
|
"loss": 1.9903, |
|
"step": 10940 |
|
}, |
|
{ |
|
"epoch": 1.4176805853106431, |
|
"grad_norm": 0.7297314405441284, |
|
"learning_rate": 6.33421546453201e-06, |
|
"loss": 1.9951, |
|
"step": 10960 |
|
}, |
|
{ |
|
"epoch": 1.4202675936779983, |
|
"grad_norm": 0.7458155751228333, |
|
"learning_rate": 6.319702811786228e-06, |
|
"loss": 1.9877, |
|
"step": 10980 |
|
}, |
|
{ |
|
"epoch": 1.4228546020453534, |
|
"grad_norm": 0.7403327822685242, |
|
"learning_rate": 6.305178198548732e-06, |
|
"loss": 1.9762, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 1.4254416104127086, |
|
"grad_norm": 0.7428018450737, |
|
"learning_rate": 6.290641756456365e-06, |
|
"loss": 1.992, |
|
"step": 11020 |
|
}, |
|
{ |
|
"epoch": 1.428028618780064, |
|
"grad_norm": 0.7310479283332825, |
|
"learning_rate": 6.2760936172531815e-06, |
|
"loss": 1.9693, |
|
"step": 11040 |
|
}, |
|
{ |
|
"epoch": 1.430615627147419, |
|
"grad_norm": 0.7689222693443298, |
|
"learning_rate": 6.261533912789244e-06, |
|
"loss": 1.9612, |
|
"step": 11060 |
|
}, |
|
{ |
|
"epoch": 1.4332026355147742, |
|
"grad_norm": 0.7754133343696594, |
|
"learning_rate": 6.246962775019431e-06, |
|
"loss": 1.9787, |
|
"step": 11080 |
|
}, |
|
{ |
|
"epoch": 1.4357896438821294, |
|
"grad_norm": 0.7548643946647644, |
|
"learning_rate": 6.232380336002241e-06, |
|
"loss": 1.9772, |
|
"step": 11100 |
|
}, |
|
{ |
|
"epoch": 1.4383766522494845, |
|
"grad_norm": 0.8060590624809265, |
|
"learning_rate": 6.217786727898595e-06, |
|
"loss": 1.9632, |
|
"step": 11120 |
|
}, |
|
{ |
|
"epoch": 1.44096366061684, |
|
"grad_norm": 0.7445610165596008, |
|
"learning_rate": 6.203182082970643e-06, |
|
"loss": 1.9693, |
|
"step": 11140 |
|
}, |
|
{ |
|
"epoch": 1.443550668984195, |
|
"grad_norm": 0.84158855676651, |
|
"learning_rate": 6.1885665335805614e-06, |
|
"loss": 1.9776, |
|
"step": 11160 |
|
}, |
|
{ |
|
"epoch": 1.4461376773515502, |
|
"grad_norm": 0.7844957113265991, |
|
"learning_rate": 6.173940212189351e-06, |
|
"loss": 1.9969, |
|
"step": 11180 |
|
}, |
|
{ |
|
"epoch": 1.4487246857189053, |
|
"grad_norm": 0.7566760182380676, |
|
"learning_rate": 6.159303251355641e-06, |
|
"loss": 1.9723, |
|
"step": 11200 |
|
}, |
|
{ |
|
"epoch": 1.4513116940862605, |
|
"grad_norm": 0.7104907035827637, |
|
"learning_rate": 6.144655783734485e-06, |
|
"loss": 1.9766, |
|
"step": 11220 |
|
}, |
|
{ |
|
"epoch": 1.4538987024536159, |
|
"grad_norm": 0.6929414868354797, |
|
"learning_rate": 6.129997942076164e-06, |
|
"loss": 1.9835, |
|
"step": 11240 |
|
}, |
|
{ |
|
"epoch": 1.456485710820971, |
|
"grad_norm": 0.7387101054191589, |
|
"learning_rate": 6.115329859224974e-06, |
|
"loss": 1.9799, |
|
"step": 11260 |
|
}, |
|
{ |
|
"epoch": 1.4590727191883262, |
|
"grad_norm": 0.7652817964553833, |
|
"learning_rate": 6.100651668118029e-06, |
|
"loss": 1.9742, |
|
"step": 11280 |
|
}, |
|
{ |
|
"epoch": 1.4616597275556813, |
|
"grad_norm": 0.7251682877540588, |
|
"learning_rate": 6.085963501784057e-06, |
|
"loss": 1.9761, |
|
"step": 11300 |
|
}, |
|
{ |
|
"epoch": 1.4642467359230364, |
|
"grad_norm": 0.7064483761787415, |
|
"learning_rate": 6.071265493342188e-06, |
|
"loss": 1.9839, |
|
"step": 11320 |
|
}, |
|
{ |
|
"epoch": 1.4668337442903918, |
|
"grad_norm": 0.759384036064148, |
|
"learning_rate": 6.056557776000751e-06, |
|
"loss": 1.9927, |
|
"step": 11340 |
|
}, |
|
{ |
|
"epoch": 1.4694207526577467, |
|
"grad_norm": 0.7314841747283936, |
|
"learning_rate": 6.041840483056068e-06, |
|
"loss": 1.9805, |
|
"step": 11360 |
|
}, |
|
{ |
|
"epoch": 1.4720077610251021, |
|
"grad_norm": 0.7411664128303528, |
|
"learning_rate": 6.0271137478912475e-06, |
|
"loss": 1.966, |
|
"step": 11380 |
|
}, |
|
{ |
|
"epoch": 1.4745947693924573, |
|
"grad_norm": 0.725292444229126, |
|
"learning_rate": 6.01237770397497e-06, |
|
"loss": 1.9778, |
|
"step": 11400 |
|
}, |
|
{ |
|
"epoch": 1.4771817777598124, |
|
"grad_norm": 0.7571858763694763, |
|
"learning_rate": 5.997632484860281e-06, |
|
"loss": 1.9805, |
|
"step": 11420 |
|
}, |
|
{ |
|
"epoch": 1.4797687861271676, |
|
"grad_norm": 0.7663134932518005, |
|
"learning_rate": 5.982878224183384e-06, |
|
"loss": 1.9793, |
|
"step": 11440 |
|
}, |
|
{ |
|
"epoch": 1.4823557944945227, |
|
"grad_norm": 0.8204968571662903, |
|
"learning_rate": 5.968115055662424e-06, |
|
"loss": 1.9687, |
|
"step": 11460 |
|
}, |
|
{ |
|
"epoch": 1.484942802861878, |
|
"grad_norm": 0.757935643196106, |
|
"learning_rate": 5.953343113096277e-06, |
|
"loss": 1.9599, |
|
"step": 11480 |
|
}, |
|
{ |
|
"epoch": 1.4875298112292332, |
|
"grad_norm": 0.768320620059967, |
|
"learning_rate": 5.938562530363341e-06, |
|
"loss": 1.9853, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 1.4901168195965884, |
|
"grad_norm": 0.7116501331329346, |
|
"learning_rate": 5.9237734414203185e-06, |
|
"loss": 1.973, |
|
"step": 11520 |
|
}, |
|
{ |
|
"epoch": 1.4927038279639435, |
|
"grad_norm": 0.7462672591209412, |
|
"learning_rate": 5.9089759803010035e-06, |
|
"loss": 1.97, |
|
"step": 11540 |
|
}, |
|
{ |
|
"epoch": 1.4952908363312987, |
|
"grad_norm": 0.7260856628417969, |
|
"learning_rate": 5.894170281115071e-06, |
|
"loss": 1.9761, |
|
"step": 11560 |
|
}, |
|
{ |
|
"epoch": 1.497877844698654, |
|
"grad_norm": 0.754158079624176, |
|
"learning_rate": 5.879356478046849e-06, |
|
"loss": 1.9823, |
|
"step": 11580 |
|
}, |
|
{ |
|
"epoch": 1.5004648530660092, |
|
"grad_norm": 0.7565730810165405, |
|
"learning_rate": 5.864534705354123e-06, |
|
"loss": 1.9782, |
|
"step": 11600 |
|
}, |
|
{ |
|
"epoch": 1.5030518614333643, |
|
"grad_norm": 0.7671797275543213, |
|
"learning_rate": 5.849705097366898e-06, |
|
"loss": 1.9611, |
|
"step": 11620 |
|
}, |
|
{ |
|
"epoch": 1.5056388698007195, |
|
"grad_norm": 0.7473678588867188, |
|
"learning_rate": 5.834867788486194e-06, |
|
"loss": 1.9544, |
|
"step": 11640 |
|
}, |
|
{ |
|
"epoch": 1.5082258781680746, |
|
"grad_norm": 0.7111749649047852, |
|
"learning_rate": 5.8200229131828225e-06, |
|
"loss": 1.9661, |
|
"step": 11660 |
|
}, |
|
{ |
|
"epoch": 1.51081288653543, |
|
"grad_norm": 0.7859391570091248, |
|
"learning_rate": 5.805170605996173e-06, |
|
"loss": 1.9493, |
|
"step": 11680 |
|
}, |
|
{ |
|
"epoch": 1.513399894902785, |
|
"grad_norm": 0.7364257574081421, |
|
"learning_rate": 5.7903110015329886e-06, |
|
"loss": 1.9703, |
|
"step": 11700 |
|
}, |
|
{ |
|
"epoch": 1.5159869032701403, |
|
"grad_norm": 0.7579001784324646, |
|
"learning_rate": 5.775444234466145e-06, |
|
"loss": 1.9656, |
|
"step": 11720 |
|
}, |
|
{ |
|
"epoch": 1.5185739116374954, |
|
"grad_norm": 0.7483280897140503, |
|
"learning_rate": 5.76057043953344e-06, |
|
"loss": 1.9785, |
|
"step": 11740 |
|
}, |
|
{ |
|
"epoch": 1.5211609200048506, |
|
"grad_norm": 0.7101817727088928, |
|
"learning_rate": 5.745689751536355e-06, |
|
"loss": 1.9794, |
|
"step": 11760 |
|
}, |
|
{ |
|
"epoch": 1.523747928372206, |
|
"grad_norm": 0.7399246096611023, |
|
"learning_rate": 5.730802305338852e-06, |
|
"loss": 1.9798, |
|
"step": 11780 |
|
}, |
|
{ |
|
"epoch": 1.5263349367395609, |
|
"grad_norm": 0.7751443982124329, |
|
"learning_rate": 5.715908235866139e-06, |
|
"loss": 1.9586, |
|
"step": 11800 |
|
}, |
|
{ |
|
"epoch": 1.5289219451069163, |
|
"grad_norm": 0.7285624742507935, |
|
"learning_rate": 5.701007678103449e-06, |
|
"loss": 1.9712, |
|
"step": 11820 |
|
}, |
|
{ |
|
"epoch": 1.5315089534742714, |
|
"grad_norm": 0.72426837682724, |
|
"learning_rate": 5.686100767094826e-06, |
|
"loss": 1.9655, |
|
"step": 11840 |
|
}, |
|
{ |
|
"epoch": 1.5340959618416266, |
|
"grad_norm": 0.8364124894142151, |
|
"learning_rate": 5.671187637941882e-06, |
|
"loss": 1.9664, |
|
"step": 11860 |
|
}, |
|
{ |
|
"epoch": 1.536682970208982, |
|
"grad_norm": 0.6998101472854614, |
|
"learning_rate": 5.656268425802594e-06, |
|
"loss": 1.9811, |
|
"step": 11880 |
|
}, |
|
{ |
|
"epoch": 1.5392699785763369, |
|
"grad_norm": 0.7216737270355225, |
|
"learning_rate": 5.641343265890065e-06, |
|
"loss": 1.9686, |
|
"step": 11900 |
|
}, |
|
{ |
|
"epoch": 1.5418569869436922, |
|
"grad_norm": 0.7465617060661316, |
|
"learning_rate": 5.626412293471304e-06, |
|
"loss": 1.9642, |
|
"step": 11920 |
|
}, |
|
{ |
|
"epoch": 1.5444439953110474, |
|
"grad_norm": 0.7474328279495239, |
|
"learning_rate": 5.6114756438659966e-06, |
|
"loss": 1.9698, |
|
"step": 11940 |
|
}, |
|
{ |
|
"epoch": 1.5470310036784025, |
|
"grad_norm": 0.709169328212738, |
|
"learning_rate": 5.5965334524452856e-06, |
|
"loss": 1.9629, |
|
"step": 11960 |
|
}, |
|
{ |
|
"epoch": 1.5496180120457577, |
|
"grad_norm": 0.778994083404541, |
|
"learning_rate": 5.5815858546305345e-06, |
|
"loss": 1.9922, |
|
"step": 11980 |
|
}, |
|
{ |
|
"epoch": 1.5522050204131128, |
|
"grad_norm": 0.7840653657913208, |
|
"learning_rate": 5.566632985892107e-06, |
|
"loss": 1.9659, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 1.5547920287804682, |
|
"grad_norm": 0.7363440990447998, |
|
"learning_rate": 5.551674981748137e-06, |
|
"loss": 1.9739, |
|
"step": 12020 |
|
}, |
|
{ |
|
"epoch": 1.557379037147823, |
|
"grad_norm": 0.74187833070755, |
|
"learning_rate": 5.5367119777633025e-06, |
|
"loss": 1.9654, |
|
"step": 12040 |
|
}, |
|
{ |
|
"epoch": 1.5599660455151785, |
|
"grad_norm": 0.7796049118041992, |
|
"learning_rate": 5.521744109547592e-06, |
|
"loss": 1.9442, |
|
"step": 12060 |
|
}, |
|
{ |
|
"epoch": 1.5625530538825336, |
|
"grad_norm": 0.7324556708335876, |
|
"learning_rate": 5.506771512755083e-06, |
|
"loss": 1.9703, |
|
"step": 12080 |
|
}, |
|
{ |
|
"epoch": 1.5651400622498888, |
|
"grad_norm": 0.7358458638191223, |
|
"learning_rate": 5.491794323082701e-06, |
|
"loss": 1.9563, |
|
"step": 12100 |
|
}, |
|
{ |
|
"epoch": 1.5677270706172441, |
|
"grad_norm": 0.725487232208252, |
|
"learning_rate": 5.476812676269003e-06, |
|
"loss": 1.9715, |
|
"step": 12120 |
|
}, |
|
{ |
|
"epoch": 1.570314078984599, |
|
"grad_norm": 0.7749314308166504, |
|
"learning_rate": 5.46182670809294e-06, |
|
"loss": 1.9812, |
|
"step": 12140 |
|
}, |
|
{ |
|
"epoch": 1.5729010873519544, |
|
"grad_norm": 0.7413734793663025, |
|
"learning_rate": 5.446836554372626e-06, |
|
"loss": 1.9621, |
|
"step": 12160 |
|
}, |
|
{ |
|
"epoch": 1.5754880957193096, |
|
"grad_norm": 0.782210111618042, |
|
"learning_rate": 5.431842350964111e-06, |
|
"loss": 1.9639, |
|
"step": 12180 |
|
}, |
|
{ |
|
"epoch": 1.5780751040866647, |
|
"grad_norm": 0.7391452193260193, |
|
"learning_rate": 5.416844233760145e-06, |
|
"loss": 1.9654, |
|
"step": 12200 |
|
}, |
|
{ |
|
"epoch": 1.58066211245402, |
|
"grad_norm": 0.755551815032959, |
|
"learning_rate": 5.401842338688951e-06, |
|
"loss": 1.9746, |
|
"step": 12220 |
|
}, |
|
{ |
|
"epoch": 1.583249120821375, |
|
"grad_norm": 0.8115268349647522, |
|
"learning_rate": 5.386836801712988e-06, |
|
"loss": 1.9772, |
|
"step": 12240 |
|
}, |
|
{ |
|
"epoch": 1.5858361291887304, |
|
"grad_norm": 0.7625905275344849, |
|
"learning_rate": 5.371827758827723e-06, |
|
"loss": 1.9654, |
|
"step": 12260 |
|
}, |
|
{ |
|
"epoch": 1.5884231375560856, |
|
"grad_norm": 0.735562264919281, |
|
"learning_rate": 5.356815346060401e-06, |
|
"loss": 1.9701, |
|
"step": 12280 |
|
}, |
|
{ |
|
"epoch": 1.5910101459234407, |
|
"grad_norm": 0.7974105477333069, |
|
"learning_rate": 5.3417996994688015e-06, |
|
"loss": 1.9849, |
|
"step": 12300 |
|
}, |
|
{ |
|
"epoch": 1.593597154290796, |
|
"grad_norm": 0.7090020179748535, |
|
"learning_rate": 5.326780955140019e-06, |
|
"loss": 1.9872, |
|
"step": 12320 |
|
}, |
|
{ |
|
"epoch": 1.596184162658151, |
|
"grad_norm": 0.80815589427948, |
|
"learning_rate": 5.311759249189213e-06, |
|
"loss": 1.9791, |
|
"step": 12340 |
|
}, |
|
{ |
|
"epoch": 1.5987711710255064, |
|
"grad_norm": 0.8091129660606384, |
|
"learning_rate": 5.296734717758397e-06, |
|
"loss": 1.9725, |
|
"step": 12360 |
|
}, |
|
{ |
|
"epoch": 1.6013581793928615, |
|
"grad_norm": 0.7238413691520691, |
|
"learning_rate": 5.2817074970151815e-06, |
|
"loss": 1.9716, |
|
"step": 12380 |
|
}, |
|
{ |
|
"epoch": 1.6039451877602167, |
|
"grad_norm": 0.7404330372810364, |
|
"learning_rate": 5.266677723151558e-06, |
|
"loss": 1.9559, |
|
"step": 12400 |
|
}, |
|
{ |
|
"epoch": 1.6065321961275718, |
|
"grad_norm": 0.7491504549980164, |
|
"learning_rate": 5.251645532382649e-06, |
|
"loss": 1.9582, |
|
"step": 12420 |
|
}, |
|
{ |
|
"epoch": 1.609119204494927, |
|
"grad_norm": 0.7146170139312744, |
|
"learning_rate": 5.236611060945488e-06, |
|
"loss": 1.957, |
|
"step": 12440 |
|
}, |
|
{ |
|
"epoch": 1.6117062128622823, |
|
"grad_norm": 0.7770988345146179, |
|
"learning_rate": 5.221574445097776e-06, |
|
"loss": 1.975, |
|
"step": 12460 |
|
}, |
|
{ |
|
"epoch": 1.6142932212296373, |
|
"grad_norm": 0.736663281917572, |
|
"learning_rate": 5.2065358211166486e-06, |
|
"loss": 1.9719, |
|
"step": 12480 |
|
}, |
|
{ |
|
"epoch": 1.6168802295969926, |
|
"grad_norm": 0.7258009910583496, |
|
"learning_rate": 5.191495325297441e-06, |
|
"loss": 1.9491, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 1.6194672379643478, |
|
"grad_norm": 0.739658534526825, |
|
"learning_rate": 5.1764530939524524e-06, |
|
"loss": 1.936, |
|
"step": 12520 |
|
}, |
|
{ |
|
"epoch": 1.622054246331703, |
|
"grad_norm": 0.7602417469024658, |
|
"learning_rate": 5.1614092634097146e-06, |
|
"loss": 1.9631, |
|
"step": 12540 |
|
}, |
|
{ |
|
"epoch": 1.6246412546990583, |
|
"grad_norm": 0.7500277161598206, |
|
"learning_rate": 5.14636397001175e-06, |
|
"loss": 1.9678, |
|
"step": 12560 |
|
}, |
|
{ |
|
"epoch": 1.6272282630664132, |
|
"grad_norm": 0.8021513223648071, |
|
"learning_rate": 5.131317350114335e-06, |
|
"loss": 1.9597, |
|
"step": 12580 |
|
}, |
|
{ |
|
"epoch": 1.6298152714337686, |
|
"grad_norm": 0.7306881546974182, |
|
"learning_rate": 5.116269540085277e-06, |
|
"loss": 1.956, |
|
"step": 12600 |
|
}, |
|
{ |
|
"epoch": 1.6324022798011237, |
|
"grad_norm": 0.786392867565155, |
|
"learning_rate": 5.101220676303161e-06, |
|
"loss": 1.961, |
|
"step": 12620 |
|
}, |
|
{ |
|
"epoch": 1.6349892881684789, |
|
"grad_norm": 0.7665414810180664, |
|
"learning_rate": 5.08617089515613e-06, |
|
"loss": 1.9689, |
|
"step": 12640 |
|
}, |
|
{ |
|
"epoch": 1.6375762965358343, |
|
"grad_norm": 0.7925981283187866, |
|
"learning_rate": 5.0711203330406334e-06, |
|
"loss": 1.965, |
|
"step": 12660 |
|
}, |
|
{ |
|
"epoch": 1.6401633049031892, |
|
"grad_norm": 0.7429537177085876, |
|
"learning_rate": 5.056069126360206e-06, |
|
"loss": 1.9493, |
|
"step": 12680 |
|
}, |
|
{ |
|
"epoch": 1.6427503132705445, |
|
"grad_norm": 0.7412150502204895, |
|
"learning_rate": 5.041017411524217e-06, |
|
"loss": 1.9598, |
|
"step": 12700 |
|
}, |
|
{ |
|
"epoch": 1.6453373216378997, |
|
"grad_norm": 0.7570027112960815, |
|
"learning_rate": 5.025965324946646e-06, |
|
"loss": 1.962, |
|
"step": 12720 |
|
}, |
|
{ |
|
"epoch": 1.6479243300052548, |
|
"grad_norm": 0.7325958013534546, |
|
"learning_rate": 5.010913003044842e-06, |
|
"loss": 1.9606, |
|
"step": 12740 |
|
}, |
|
{ |
|
"epoch": 1.6505113383726102, |
|
"grad_norm": 0.7462648153305054, |
|
"learning_rate": 4.995860582238284e-06, |
|
"loss": 1.943, |
|
"step": 12760 |
|
}, |
|
{ |
|
"epoch": 1.6530983467399651, |
|
"grad_norm": 0.733491063117981, |
|
"learning_rate": 4.980808198947347e-06, |
|
"loss": 1.9519, |
|
"step": 12780 |
|
}, |
|
{ |
|
"epoch": 1.6556853551073205, |
|
"grad_norm": 0.7238721251487732, |
|
"learning_rate": 4.965755989592069e-06, |
|
"loss": 1.9709, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 1.6582723634746757, |
|
"grad_norm": 0.7215867042541504, |
|
"learning_rate": 4.950704090590908e-06, |
|
"loss": 1.9739, |
|
"step": 12820 |
|
}, |
|
{ |
|
"epoch": 1.6608593718420308, |
|
"grad_norm": 0.7542194128036499, |
|
"learning_rate": 4.935652638359514e-06, |
|
"loss": 1.9684, |
|
"step": 12840 |
|
}, |
|
{ |
|
"epoch": 1.663446380209386, |
|
"grad_norm": 0.7353436350822449, |
|
"learning_rate": 4.920601769309484e-06, |
|
"loss": 1.9678, |
|
"step": 12860 |
|
}, |
|
{ |
|
"epoch": 1.666033388576741, |
|
"grad_norm": 0.7446224689483643, |
|
"learning_rate": 4.9055516198471295e-06, |
|
"loss": 1.9686, |
|
"step": 12880 |
|
}, |
|
{ |
|
"epoch": 1.6686203969440965, |
|
"grad_norm": 0.7474517226219177, |
|
"learning_rate": 4.890502326372244e-06, |
|
"loss": 1.9715, |
|
"step": 12900 |
|
}, |
|
{ |
|
"epoch": 1.6712074053114514, |
|
"grad_norm": 0.7246785163879395, |
|
"learning_rate": 4.87545402527686e-06, |
|
"loss": 1.9641, |
|
"step": 12920 |
|
}, |
|
{ |
|
"epoch": 1.6737944136788068, |
|
"grad_norm": 0.746099591255188, |
|
"learning_rate": 4.8604068529440165e-06, |
|
"loss": 1.984, |
|
"step": 12940 |
|
}, |
|
{ |
|
"epoch": 1.676381422046162, |
|
"grad_norm": 0.7616404294967651, |
|
"learning_rate": 4.845360945746523e-06, |
|
"loss": 1.9708, |
|
"step": 12960 |
|
}, |
|
{ |
|
"epoch": 1.678968430413517, |
|
"grad_norm": 0.7588489055633545, |
|
"learning_rate": 4.830316440045723e-06, |
|
"loss": 1.9497, |
|
"step": 12980 |
|
}, |
|
{ |
|
"epoch": 1.6815554387808724, |
|
"grad_norm": 0.7502963542938232, |
|
"learning_rate": 4.815273472190259e-06, |
|
"loss": 1.9742, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 1.6841424471482274, |
|
"grad_norm": 0.7508452534675598, |
|
"learning_rate": 4.800232178514834e-06, |
|
"loss": 1.966, |
|
"step": 13020 |
|
}, |
|
{ |
|
"epoch": 1.6867294555155827, |
|
"grad_norm": 0.7516492605209351, |
|
"learning_rate": 4.78519269533898e-06, |
|
"loss": 1.9499, |
|
"step": 13040 |
|
}, |
|
{ |
|
"epoch": 1.6893164638829379, |
|
"grad_norm": 0.7797842025756836, |
|
"learning_rate": 4.770155158965818e-06, |
|
"loss": 1.954, |
|
"step": 13060 |
|
}, |
|
{ |
|
"epoch": 1.691903472250293, |
|
"grad_norm": 0.7264002561569214, |
|
"learning_rate": 4.7551197056808275e-06, |
|
"loss": 1.9431, |
|
"step": 13080 |
|
}, |
|
{ |
|
"epoch": 1.6944904806176484, |
|
"grad_norm": 0.7275776267051697, |
|
"learning_rate": 4.740086471750608e-06, |
|
"loss": 1.9382, |
|
"step": 13100 |
|
}, |
|
{ |
|
"epoch": 1.6970774889850033, |
|
"grad_norm": 0.745893657207489, |
|
"learning_rate": 4.7250555934216446e-06, |
|
"loss": 1.961, |
|
"step": 13120 |
|
}, |
|
{ |
|
"epoch": 1.6996644973523587, |
|
"grad_norm": 0.7499802112579346, |
|
"learning_rate": 4.7100272069190735e-06, |
|
"loss": 1.9462, |
|
"step": 13140 |
|
}, |
|
{ |
|
"epoch": 1.7022515057197138, |
|
"grad_norm": 0.7486584782600403, |
|
"learning_rate": 4.69500144844545e-06, |
|
"loss": 1.9543, |
|
"step": 13160 |
|
}, |
|
{ |
|
"epoch": 1.704838514087069, |
|
"grad_norm": 0.715093195438385, |
|
"learning_rate": 4.679978454179507e-06, |
|
"loss": 1.9484, |
|
"step": 13180 |
|
}, |
|
{ |
|
"epoch": 1.7074255224544241, |
|
"grad_norm": 0.7148420810699463, |
|
"learning_rate": 4.664958360274927e-06, |
|
"loss": 1.9584, |
|
"step": 13200 |
|
}, |
|
{ |
|
"epoch": 1.7100125308217793, |
|
"grad_norm": 0.7597442269325256, |
|
"learning_rate": 4.649941302859109e-06, |
|
"loss": 1.9651, |
|
"step": 13220 |
|
}, |
|
{ |
|
"epoch": 1.7125995391891347, |
|
"grad_norm": 0.7285702228546143, |
|
"learning_rate": 4.63492741803193e-06, |
|
"loss": 1.9514, |
|
"step": 13240 |
|
}, |
|
{ |
|
"epoch": 1.7151865475564898, |
|
"grad_norm": 0.7763605117797852, |
|
"learning_rate": 4.619916841864513e-06, |
|
"loss": 1.9697, |
|
"step": 13260 |
|
}, |
|
{ |
|
"epoch": 1.717773555923845, |
|
"grad_norm": 0.7495293617248535, |
|
"learning_rate": 4.604909710397997e-06, |
|
"loss": 1.9471, |
|
"step": 13280 |
|
}, |
|
{ |
|
"epoch": 1.7203605642912, |
|
"grad_norm": 0.7468627691268921, |
|
"learning_rate": 4.5899061596423e-06, |
|
"loss": 1.9835, |
|
"step": 13300 |
|
}, |
|
{ |
|
"epoch": 1.7229475726585552, |
|
"grad_norm": 0.7560572624206543, |
|
"learning_rate": 4.574906325574888e-06, |
|
"loss": 1.945, |
|
"step": 13320 |
|
}, |
|
{ |
|
"epoch": 1.7255345810259106, |
|
"grad_norm": 0.7571983337402344, |
|
"learning_rate": 4.5599103441395415e-06, |
|
"loss": 1.9437, |
|
"step": 13340 |
|
}, |
|
{ |
|
"epoch": 1.7281215893932655, |
|
"grad_norm": 0.7818071842193604, |
|
"learning_rate": 4.544918351245128e-06, |
|
"loss": 1.9482, |
|
"step": 13360 |
|
}, |
|
{ |
|
"epoch": 1.730708597760621, |
|
"grad_norm": 0.764826238155365, |
|
"learning_rate": 4.529930482764362e-06, |
|
"loss": 1.9408, |
|
"step": 13380 |
|
}, |
|
{ |
|
"epoch": 1.733295606127976, |
|
"grad_norm": 0.7344059348106384, |
|
"learning_rate": 4.514946874532584e-06, |
|
"loss": 1.9487, |
|
"step": 13400 |
|
}, |
|
{ |
|
"epoch": 1.7358826144953312, |
|
"grad_norm": 0.7779365181922913, |
|
"learning_rate": 4.499967662346519e-06, |
|
"loss": 1.9417, |
|
"step": 13420 |
|
}, |
|
{ |
|
"epoch": 1.7384696228626866, |
|
"grad_norm": 0.7346351146697998, |
|
"learning_rate": 4.484992981963053e-06, |
|
"loss": 1.95, |
|
"step": 13440 |
|
}, |
|
{ |
|
"epoch": 1.7410566312300415, |
|
"grad_norm": 0.7388240098953247, |
|
"learning_rate": 4.4700229690979985e-06, |
|
"loss": 1.9404, |
|
"step": 13460 |
|
}, |
|
{ |
|
"epoch": 1.7436436395973969, |
|
"grad_norm": 0.7134066224098206, |
|
"learning_rate": 4.455057759424868e-06, |
|
"loss": 1.9515, |
|
"step": 13480 |
|
}, |
|
{ |
|
"epoch": 1.746230647964752, |
|
"grad_norm": 0.7144001722335815, |
|
"learning_rate": 4.440097488573642e-06, |
|
"loss": 1.9579, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 1.7488176563321072, |
|
"grad_norm": 0.7423569560050964, |
|
"learning_rate": 4.425142292129539e-06, |
|
"loss": 1.9642, |
|
"step": 13520 |
|
}, |
|
{ |
|
"epoch": 1.7514046646994625, |
|
"grad_norm": 0.7508745789527893, |
|
"learning_rate": 4.410192305631791e-06, |
|
"loss": 1.9495, |
|
"step": 13540 |
|
}, |
|
{ |
|
"epoch": 1.7539916730668175, |
|
"grad_norm": 0.788547933101654, |
|
"learning_rate": 4.395247664572409e-06, |
|
"loss": 1.9626, |
|
"step": 13560 |
|
}, |
|
{ |
|
"epoch": 1.7565786814341728, |
|
"grad_norm": 0.7150464057922363, |
|
"learning_rate": 4.3803085043949585e-06, |
|
"loss": 1.9405, |
|
"step": 13580 |
|
}, |
|
{ |
|
"epoch": 1.759165689801528, |
|
"grad_norm": 0.7496698498725891, |
|
"learning_rate": 4.365374960493335e-06, |
|
"loss": 1.9256, |
|
"step": 13600 |
|
}, |
|
{ |
|
"epoch": 1.7617526981688831, |
|
"grad_norm": 0.72135990858078, |
|
"learning_rate": 4.35044716821053e-06, |
|
"loss": 1.9226, |
|
"step": 13620 |
|
}, |
|
{ |
|
"epoch": 1.7643397065362383, |
|
"grad_norm": 0.717047929763794, |
|
"learning_rate": 4.335525262837409e-06, |
|
"loss": 1.9551, |
|
"step": 13640 |
|
}, |
|
{ |
|
"epoch": 1.7669267149035934, |
|
"grad_norm": 0.8064849376678467, |
|
"learning_rate": 4.3206093796114875e-06, |
|
"loss": 1.9284, |
|
"step": 13660 |
|
}, |
|
{ |
|
"epoch": 1.7695137232709488, |
|
"grad_norm": 0.778425931930542, |
|
"learning_rate": 4.305699653715695e-06, |
|
"loss": 1.9557, |
|
"step": 13680 |
|
}, |
|
{ |
|
"epoch": 1.7721007316383037, |
|
"grad_norm": 0.742351770401001, |
|
"learning_rate": 4.290796220277162e-06, |
|
"loss": 1.9541, |
|
"step": 13700 |
|
}, |
|
{ |
|
"epoch": 1.774687740005659, |
|
"grad_norm": 0.7197045087814331, |
|
"learning_rate": 4.275899214365991e-06, |
|
"loss": 1.962, |
|
"step": 13720 |
|
}, |
|
{ |
|
"epoch": 1.7772747483730142, |
|
"grad_norm": 0.7143533825874329, |
|
"learning_rate": 4.26100877099403e-06, |
|
"loss": 1.9533, |
|
"step": 13740 |
|
}, |
|
{ |
|
"epoch": 1.7798617567403694, |
|
"grad_norm": 0.7439367175102234, |
|
"learning_rate": 4.24612502511365e-06, |
|
"loss": 1.9479, |
|
"step": 13760 |
|
}, |
|
{ |
|
"epoch": 1.7824487651077248, |
|
"grad_norm": 0.7582443952560425, |
|
"learning_rate": 4.231248111616525e-06, |
|
"loss": 1.9495, |
|
"step": 13780 |
|
}, |
|
{ |
|
"epoch": 1.7850357734750797, |
|
"grad_norm": 0.7773938775062561, |
|
"learning_rate": 4.216378165332404e-06, |
|
"loss": 1.9526, |
|
"step": 13800 |
|
}, |
|
{ |
|
"epoch": 1.787622781842435, |
|
"grad_norm": 0.7457807660102844, |
|
"learning_rate": 4.201515321027894e-06, |
|
"loss": 1.9511, |
|
"step": 13820 |
|
}, |
|
{ |
|
"epoch": 1.7902097902097902, |
|
"grad_norm": 0.708568811416626, |
|
"learning_rate": 4.186659713405235e-06, |
|
"loss": 1.9399, |
|
"step": 13840 |
|
}, |
|
{ |
|
"epoch": 1.7927967985771454, |
|
"grad_norm": 0.8040826320648193, |
|
"learning_rate": 4.171811477101083e-06, |
|
"loss": 1.9546, |
|
"step": 13860 |
|
}, |
|
{ |
|
"epoch": 1.7953838069445007, |
|
"grad_norm": 0.8364230990409851, |
|
"learning_rate": 4.156970746685286e-06, |
|
"loss": 1.9466, |
|
"step": 13880 |
|
}, |
|
{ |
|
"epoch": 1.7979708153118557, |
|
"grad_norm": 0.7208890914916992, |
|
"learning_rate": 4.142137656659665e-06, |
|
"loss": 1.9477, |
|
"step": 13900 |
|
}, |
|
{ |
|
"epoch": 1.800557823679211, |
|
"grad_norm": 0.7639915943145752, |
|
"learning_rate": 4.1273123414568e-06, |
|
"loss": 1.9493, |
|
"step": 13920 |
|
}, |
|
{ |
|
"epoch": 1.8031448320465662, |
|
"grad_norm": 0.710559606552124, |
|
"learning_rate": 4.112494935438803e-06, |
|
"loss": 1.9613, |
|
"step": 13940 |
|
}, |
|
{ |
|
"epoch": 1.8057318404139213, |
|
"grad_norm": 0.7264363765716553, |
|
"learning_rate": 4.097685572896107e-06, |
|
"loss": 1.9369, |
|
"step": 13960 |
|
}, |
|
{ |
|
"epoch": 1.8083188487812767, |
|
"grad_norm": 0.7411283254623413, |
|
"learning_rate": 4.082884388046248e-06, |
|
"loss": 1.9311, |
|
"step": 13980 |
|
}, |
|
{ |
|
"epoch": 1.8109058571486316, |
|
"grad_norm": 0.7250226140022278, |
|
"learning_rate": 4.068091515032645e-06, |
|
"loss": 1.9455, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 1.813492865515987, |
|
"grad_norm": 0.7740228772163391, |
|
"learning_rate": 4.053307087923389e-06, |
|
"loss": 1.9459, |
|
"step": 14020 |
|
}, |
|
{ |
|
"epoch": 1.8160798738833421, |
|
"grad_norm": 0.7399852871894836, |
|
"learning_rate": 4.038531240710024e-06, |
|
"loss": 1.937, |
|
"step": 14040 |
|
}, |
|
{ |
|
"epoch": 1.8186668822506973, |
|
"grad_norm": 0.764217734336853, |
|
"learning_rate": 4.0237641073063335e-06, |
|
"loss": 1.9334, |
|
"step": 14060 |
|
}, |
|
{ |
|
"epoch": 1.8212538906180524, |
|
"grad_norm": 0.68044114112854, |
|
"learning_rate": 4.009005821547131e-06, |
|
"loss": 1.9374, |
|
"step": 14080 |
|
}, |
|
{ |
|
"epoch": 1.8238408989854076, |
|
"grad_norm": 0.7502301335334778, |
|
"learning_rate": 3.994256517187038e-06, |
|
"loss": 1.9443, |
|
"step": 14100 |
|
}, |
|
{ |
|
"epoch": 1.826427907352763, |
|
"grad_norm": 0.8162171840667725, |
|
"learning_rate": 3.979516327899285e-06, |
|
"loss": 1.9696, |
|
"step": 14120 |
|
}, |
|
{ |
|
"epoch": 1.8290149157201179, |
|
"grad_norm": 0.7348920106887817, |
|
"learning_rate": 3.964785387274485e-06, |
|
"loss": 1.9489, |
|
"step": 14140 |
|
}, |
|
{ |
|
"epoch": 1.8316019240874732, |
|
"grad_norm": 0.707190752029419, |
|
"learning_rate": 3.950063828819437e-06, |
|
"loss": 1.9545, |
|
"step": 14160 |
|
}, |
|
{ |
|
"epoch": 1.8341889324548284, |
|
"grad_norm": 0.7493889331817627, |
|
"learning_rate": 3.935351785955901e-06, |
|
"loss": 1.9414, |
|
"step": 14180 |
|
}, |
|
{ |
|
"epoch": 1.8367759408221835, |
|
"grad_norm": 0.730453372001648, |
|
"learning_rate": 3.920649392019404e-06, |
|
"loss": 1.933, |
|
"step": 14200 |
|
}, |
|
{ |
|
"epoch": 1.839362949189539, |
|
"grad_norm": 0.7539755702018738, |
|
"learning_rate": 3.9059567802580205e-06, |
|
"loss": 1.9684, |
|
"step": 14220 |
|
}, |
|
{ |
|
"epoch": 1.8419499575568938, |
|
"grad_norm": 0.7118528485298157, |
|
"learning_rate": 3.8912740838311735e-06, |
|
"loss": 1.9399, |
|
"step": 14240 |
|
}, |
|
{ |
|
"epoch": 1.8445369659242492, |
|
"grad_norm": 0.7369790077209473, |
|
"learning_rate": 3.876601435808419e-06, |
|
"loss": 1.9586, |
|
"step": 14260 |
|
}, |
|
{ |
|
"epoch": 1.8471239742916044, |
|
"grad_norm": 0.7587118744850159, |
|
"learning_rate": 3.861938969168245e-06, |
|
"loss": 1.9373, |
|
"step": 14280 |
|
}, |
|
{ |
|
"epoch": 1.8497109826589595, |
|
"grad_norm": 0.7134782075881958, |
|
"learning_rate": 3.847286816796867e-06, |
|
"loss": 1.936, |
|
"step": 14300 |
|
}, |
|
{ |
|
"epoch": 1.8522979910263149, |
|
"grad_norm": 0.7343664169311523, |
|
"learning_rate": 3.8326451114870204e-06, |
|
"loss": 1.9177, |
|
"step": 14320 |
|
}, |
|
{ |
|
"epoch": 1.8548849993936698, |
|
"grad_norm": 0.779914915561676, |
|
"learning_rate": 3.818013985936759e-06, |
|
"loss": 1.9347, |
|
"step": 14340 |
|
}, |
|
{ |
|
"epoch": 1.8574720077610252, |
|
"grad_norm": 0.7164252400398254, |
|
"learning_rate": 3.8033935727482522e-06, |
|
"loss": 1.9449, |
|
"step": 14360 |
|
}, |
|
{ |
|
"epoch": 1.8600590161283803, |
|
"grad_norm": 0.7624450325965881, |
|
"learning_rate": 3.788784004426584e-06, |
|
"loss": 1.9288, |
|
"step": 14380 |
|
}, |
|
{ |
|
"epoch": 1.8626460244957355, |
|
"grad_norm": 0.6874932646751404, |
|
"learning_rate": 3.7741854133785493e-06, |
|
"loss": 1.9348, |
|
"step": 14400 |
|
}, |
|
{ |
|
"epoch": 1.8652330328630906, |
|
"grad_norm": 0.7472041845321655, |
|
"learning_rate": 3.759597931911458e-06, |
|
"loss": 1.9471, |
|
"step": 14420 |
|
}, |
|
{ |
|
"epoch": 1.8678200412304458, |
|
"grad_norm": 0.7028324007987976, |
|
"learning_rate": 3.7450216922319317e-06, |
|
"loss": 1.9283, |
|
"step": 14440 |
|
}, |
|
{ |
|
"epoch": 1.8704070495978011, |
|
"grad_norm": 0.689389169216156, |
|
"learning_rate": 3.730456826444707e-06, |
|
"loss": 1.924, |
|
"step": 14460 |
|
}, |
|
{ |
|
"epoch": 1.8729940579651563, |
|
"grad_norm": 0.7389649748802185, |
|
"learning_rate": 3.715903466551442e-06, |
|
"loss": 1.9498, |
|
"step": 14480 |
|
}, |
|
{ |
|
"epoch": 1.8755810663325114, |
|
"grad_norm": 0.7347127199172974, |
|
"learning_rate": 3.7013617444495136e-06, |
|
"loss": 1.9788, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 1.8781680746998666, |
|
"grad_norm": 0.7470489144325256, |
|
"learning_rate": 3.686831791930826e-06, |
|
"loss": 1.9372, |
|
"step": 14520 |
|
}, |
|
{ |
|
"epoch": 1.8807550830672217, |
|
"grad_norm": 0.7183331847190857, |
|
"learning_rate": 3.6723137406806162e-06, |
|
"loss": 1.9303, |
|
"step": 14540 |
|
}, |
|
{ |
|
"epoch": 1.883342091434577, |
|
"grad_norm": 0.7698735594749451, |
|
"learning_rate": 3.6578077222762587e-06, |
|
"loss": 1.947, |
|
"step": 14560 |
|
}, |
|
{ |
|
"epoch": 1.885929099801932, |
|
"grad_norm": 0.7268314957618713, |
|
"learning_rate": 3.643313868186075e-06, |
|
"loss": 1.9219, |
|
"step": 14580 |
|
}, |
|
{ |
|
"epoch": 1.8885161081692874, |
|
"grad_norm": 0.6923518776893616, |
|
"learning_rate": 3.62883230976814e-06, |
|
"loss": 1.9196, |
|
"step": 14600 |
|
}, |
|
{ |
|
"epoch": 1.8911031165366425, |
|
"grad_norm": 0.7996921539306641, |
|
"learning_rate": 3.6143631782690936e-06, |
|
"loss": 1.95, |
|
"step": 14620 |
|
}, |
|
{ |
|
"epoch": 1.8936901249039977, |
|
"grad_norm": 0.7068451642990112, |
|
"learning_rate": 3.5999066048229512e-06, |
|
"loss": 1.9423, |
|
"step": 14640 |
|
}, |
|
{ |
|
"epoch": 1.896277133271353, |
|
"grad_norm": 0.7754804491996765, |
|
"learning_rate": 3.5854627204499114e-06, |
|
"loss": 1.9398, |
|
"step": 14660 |
|
}, |
|
{ |
|
"epoch": 1.898864141638708, |
|
"grad_norm": 0.7194542288780212, |
|
"learning_rate": 3.5710316560551743e-06, |
|
"loss": 1.9494, |
|
"step": 14680 |
|
}, |
|
{ |
|
"epoch": 1.9014511500060634, |
|
"grad_norm": 0.7717624306678772, |
|
"learning_rate": 3.5566135424277514e-06, |
|
"loss": 1.9385, |
|
"step": 14700 |
|
}, |
|
{ |
|
"epoch": 1.9040381583734185, |
|
"grad_norm": 0.750825822353363, |
|
"learning_rate": 3.542208510239281e-06, |
|
"loss": 1.9404, |
|
"step": 14720 |
|
}, |
|
{ |
|
"epoch": 1.9066251667407736, |
|
"grad_norm": 0.7245256900787354, |
|
"learning_rate": 3.5278166900428435e-06, |
|
"loss": 1.9395, |
|
"step": 14740 |
|
}, |
|
{ |
|
"epoch": 1.909212175108129, |
|
"grad_norm": 0.7693631649017334, |
|
"learning_rate": 3.513438212271781e-06, |
|
"loss": 1.9468, |
|
"step": 14760 |
|
}, |
|
{ |
|
"epoch": 1.911799183475484, |
|
"grad_norm": 0.7538326382637024, |
|
"learning_rate": 3.4990732072385096e-06, |
|
"loss": 1.9473, |
|
"step": 14780 |
|
}, |
|
{ |
|
"epoch": 1.9143861918428393, |
|
"grad_norm": 0.7545143961906433, |
|
"learning_rate": 3.4847218051333447e-06, |
|
"loss": 1.9373, |
|
"step": 14800 |
|
}, |
|
{ |
|
"epoch": 1.9169732002101945, |
|
"grad_norm": 0.7644726037979126, |
|
"learning_rate": 3.4703841360233166e-06, |
|
"loss": 1.9446, |
|
"step": 14820 |
|
}, |
|
{ |
|
"epoch": 1.9195602085775496, |
|
"grad_norm": 0.7512372732162476, |
|
"learning_rate": 3.456060329850993e-06, |
|
"loss": 1.9187, |
|
"step": 14840 |
|
}, |
|
{ |
|
"epoch": 1.9221472169449048, |
|
"grad_norm": 0.7338785529136658, |
|
"learning_rate": 3.441750516433304e-06, |
|
"loss": 1.9368, |
|
"step": 14860 |
|
}, |
|
{ |
|
"epoch": 1.92473422531226, |
|
"grad_norm": 0.6979427337646484, |
|
"learning_rate": 3.427454825460358e-06, |
|
"loss": 1.9341, |
|
"step": 14880 |
|
}, |
|
{ |
|
"epoch": 1.9273212336796153, |
|
"grad_norm": 0.7081001996994019, |
|
"learning_rate": 3.4131733864942755e-06, |
|
"loss": 1.9467, |
|
"step": 14900 |
|
}, |
|
{ |
|
"epoch": 1.9299082420469704, |
|
"grad_norm": 0.726633608341217, |
|
"learning_rate": 3.398906328968011e-06, |
|
"loss": 1.9608, |
|
"step": 14920 |
|
}, |
|
{ |
|
"epoch": 1.9324952504143256, |
|
"grad_norm": 0.7110878229141235, |
|
"learning_rate": 3.384653782184178e-06, |
|
"loss": 1.9337, |
|
"step": 14940 |
|
}, |
|
{ |
|
"epoch": 1.9350822587816807, |
|
"grad_norm": 0.7285667061805725, |
|
"learning_rate": 3.3704158753138795e-06, |
|
"loss": 1.9488, |
|
"step": 14960 |
|
}, |
|
{ |
|
"epoch": 1.9376692671490359, |
|
"grad_norm": 0.7054875493049622, |
|
"learning_rate": 3.356192737395536e-06, |
|
"loss": 1.9491, |
|
"step": 14980 |
|
}, |
|
{ |
|
"epoch": 1.9402562755163912, |
|
"grad_norm": 0.7593070268630981, |
|
"learning_rate": 3.3419844973337178e-06, |
|
"loss": 1.9456, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 1.9428432838837462, |
|
"grad_norm": 0.7805582880973816, |
|
"learning_rate": 3.327791283897975e-06, |
|
"loss": 1.9399, |
|
"step": 15020 |
|
}, |
|
{ |
|
"epoch": 1.9454302922511015, |
|
"grad_norm": 0.6953914761543274, |
|
"learning_rate": 3.313613225721672e-06, |
|
"loss": 1.9398, |
|
"step": 15040 |
|
}, |
|
{ |
|
"epoch": 1.9480173006184567, |
|
"grad_norm": 0.7649853229522705, |
|
"learning_rate": 3.2994504513008185e-06, |
|
"loss": 1.9202, |
|
"step": 15060 |
|
}, |
|
{ |
|
"epoch": 1.9506043089858118, |
|
"grad_norm": 0.7447147965431213, |
|
"learning_rate": 3.2853030889929105e-06, |
|
"loss": 1.9252, |
|
"step": 15080 |
|
}, |
|
{ |
|
"epoch": 1.9531913173531672, |
|
"grad_norm": 0.7669313549995422, |
|
"learning_rate": 3.2711712670157613e-06, |
|
"loss": 1.9228, |
|
"step": 15100 |
|
}, |
|
{ |
|
"epoch": 1.9557783257205221, |
|
"grad_norm": 0.7048401236534119, |
|
"learning_rate": 3.2570551134463426e-06, |
|
"loss": 1.9316, |
|
"step": 15120 |
|
}, |
|
{ |
|
"epoch": 1.9583653340878775, |
|
"grad_norm": 0.7256927490234375, |
|
"learning_rate": 3.242954756219623e-06, |
|
"loss": 1.9201, |
|
"step": 15140 |
|
}, |
|
{ |
|
"epoch": 1.9609523424552326, |
|
"grad_norm": 0.7202728390693665, |
|
"learning_rate": 3.228870323127409e-06, |
|
"loss": 1.9283, |
|
"step": 15160 |
|
}, |
|
{ |
|
"epoch": 1.9635393508225878, |
|
"grad_norm": 0.7406941056251526, |
|
"learning_rate": 3.214801941817185e-06, |
|
"loss": 1.9384, |
|
"step": 15180 |
|
}, |
|
{ |
|
"epoch": 1.9661263591899432, |
|
"grad_norm": 0.7298773527145386, |
|
"learning_rate": 3.2007497397909588e-06, |
|
"loss": 1.9366, |
|
"step": 15200 |
|
}, |
|
{ |
|
"epoch": 1.968713367557298, |
|
"grad_norm": 0.7127735018730164, |
|
"learning_rate": 3.186713844404105e-06, |
|
"loss": 1.919, |
|
"step": 15220 |
|
}, |
|
{ |
|
"epoch": 1.9713003759246535, |
|
"grad_norm": 0.7574893832206726, |
|
"learning_rate": 3.172694382864212e-06, |
|
"loss": 1.9286, |
|
"step": 15240 |
|
}, |
|
{ |
|
"epoch": 1.9738873842920086, |
|
"grad_norm": 0.7244542241096497, |
|
"learning_rate": 3.158691482229925e-06, |
|
"loss": 1.9195, |
|
"step": 15260 |
|
}, |
|
{ |
|
"epoch": 1.9764743926593638, |
|
"grad_norm": 0.7580679059028625, |
|
"learning_rate": 3.1447052694098006e-06, |
|
"loss": 1.9422, |
|
"step": 15280 |
|
}, |
|
{ |
|
"epoch": 1.979061401026719, |
|
"grad_norm": 0.7484192848205566, |
|
"learning_rate": 3.130735871161151e-06, |
|
"loss": 1.9322, |
|
"step": 15300 |
|
}, |
|
{ |
|
"epoch": 1.981648409394074, |
|
"grad_norm": 0.7437182068824768, |
|
"learning_rate": 3.1167834140888992e-06, |
|
"loss": 1.9283, |
|
"step": 15320 |
|
}, |
|
{ |
|
"epoch": 1.9842354177614294, |
|
"grad_norm": 0.7359150052070618, |
|
"learning_rate": 3.1028480246444293e-06, |
|
"loss": 1.9465, |
|
"step": 15340 |
|
}, |
|
{ |
|
"epoch": 1.9868224261287843, |
|
"grad_norm": 0.7357231378555298, |
|
"learning_rate": 3.08892982912444e-06, |
|
"loss": 1.9216, |
|
"step": 15360 |
|
}, |
|
{ |
|
"epoch": 1.9894094344961397, |
|
"grad_norm": 0.8238598704338074, |
|
"learning_rate": 3.075028953669804e-06, |
|
"loss": 1.9368, |
|
"step": 15380 |
|
}, |
|
{ |
|
"epoch": 1.9919964428634949, |
|
"grad_norm": 0.7543041110038757, |
|
"learning_rate": 3.0611455242644193e-06, |
|
"loss": 1.9318, |
|
"step": 15400 |
|
}, |
|
{ |
|
"epoch": 1.99458345123085, |
|
"grad_norm": 0.7391118407249451, |
|
"learning_rate": 3.047279666734072e-06, |
|
"loss": 1.9231, |
|
"step": 15420 |
|
}, |
|
{ |
|
"epoch": 1.9971704595982054, |
|
"grad_norm": 0.7640807628631592, |
|
"learning_rate": 3.0334315067452914e-06, |
|
"loss": 1.932, |
|
"step": 15440 |
|
}, |
|
{ |
|
"epoch": 1.9997574679655603, |
|
"grad_norm": 0.7693647742271423, |
|
"learning_rate": 3.019601169804216e-06, |
|
"loss": 1.9367, |
|
"step": 15460 |
|
}, |
|
{ |
|
"epoch": 2.0023444763329157, |
|
"grad_norm": 0.7293151617050171, |
|
"learning_rate": 3.0057887812554532e-06, |
|
"loss": 1.9103, |
|
"step": 15480 |
|
}, |
|
{ |
|
"epoch": 2.004931484700271, |
|
"grad_norm": 0.7756637930870056, |
|
"learning_rate": 2.991994466280943e-06, |
|
"loss": 1.9104, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 2.007518493067626, |
|
"grad_norm": 0.7553788423538208, |
|
"learning_rate": 2.9782183498988253e-06, |
|
"loss": 1.8962, |
|
"step": 15520 |
|
}, |
|
{ |
|
"epoch": 2.0101055014349813, |
|
"grad_norm": 0.7243167757987976, |
|
"learning_rate": 2.9644605569623046e-06, |
|
"loss": 1.9072, |
|
"step": 15540 |
|
}, |
|
{ |
|
"epoch": 2.0126925098023363, |
|
"grad_norm": 0.7398019433021545, |
|
"learning_rate": 2.9507212121585204e-06, |
|
"loss": 1.9038, |
|
"step": 15560 |
|
}, |
|
{ |
|
"epoch": 2.0152795181696916, |
|
"grad_norm": 0.8055486679077148, |
|
"learning_rate": 2.9370004400074165e-06, |
|
"loss": 1.9088, |
|
"step": 15580 |
|
}, |
|
{ |
|
"epoch": 2.0178665265370466, |
|
"grad_norm": 0.7211863398551941, |
|
"learning_rate": 2.923298364860612e-06, |
|
"loss": 1.9145, |
|
"step": 15600 |
|
}, |
|
{ |
|
"epoch": 2.020453534904402, |
|
"grad_norm": 0.7466139793395996, |
|
"learning_rate": 2.909615110900276e-06, |
|
"loss": 1.9115, |
|
"step": 15620 |
|
}, |
|
{ |
|
"epoch": 2.0230405432717573, |
|
"grad_norm": 0.7326526641845703, |
|
"learning_rate": 2.895950802137999e-06, |
|
"loss": 1.9158, |
|
"step": 15640 |
|
}, |
|
{ |
|
"epoch": 2.0256275516391122, |
|
"grad_norm": 0.7385395765304565, |
|
"learning_rate": 2.882305562413669e-06, |
|
"loss": 1.9107, |
|
"step": 15660 |
|
}, |
|
{ |
|
"epoch": 2.0282145600064676, |
|
"grad_norm": 0.715105414390564, |
|
"learning_rate": 2.8686795153943574e-06, |
|
"loss": 1.9281, |
|
"step": 15680 |
|
}, |
|
{ |
|
"epoch": 2.0308015683738225, |
|
"grad_norm": 0.7056469321250916, |
|
"learning_rate": 2.8550727845731846e-06, |
|
"loss": 1.9172, |
|
"step": 15700 |
|
}, |
|
{ |
|
"epoch": 2.033388576741178, |
|
"grad_norm": 0.7649030089378357, |
|
"learning_rate": 2.8414854932682147e-06, |
|
"loss": 1.9152, |
|
"step": 15720 |
|
}, |
|
{ |
|
"epoch": 2.0359755851085333, |
|
"grad_norm": 0.7351984977722168, |
|
"learning_rate": 2.827917764621323e-06, |
|
"loss": 1.898, |
|
"step": 15740 |
|
}, |
|
{ |
|
"epoch": 2.038562593475888, |
|
"grad_norm": 0.7420974969863892, |
|
"learning_rate": 2.814369721597098e-06, |
|
"loss": 1.8888, |
|
"step": 15760 |
|
}, |
|
{ |
|
"epoch": 2.0411496018432436, |
|
"grad_norm": 0.7377281188964844, |
|
"learning_rate": 2.800841486981708e-06, |
|
"loss": 1.9011, |
|
"step": 15780 |
|
}, |
|
{ |
|
"epoch": 2.0437366102105985, |
|
"grad_norm": 0.6988097429275513, |
|
"learning_rate": 2.7873331833817994e-06, |
|
"loss": 1.9062, |
|
"step": 15800 |
|
}, |
|
{ |
|
"epoch": 2.046323618577954, |
|
"grad_norm": 0.731208860874176, |
|
"learning_rate": 2.773844933223391e-06, |
|
"loss": 1.9038, |
|
"step": 15820 |
|
}, |
|
{ |
|
"epoch": 2.0489106269453092, |
|
"grad_norm": 0.7454236745834351, |
|
"learning_rate": 2.760376858750744e-06, |
|
"loss": 1.9045, |
|
"step": 15840 |
|
}, |
|
{ |
|
"epoch": 2.051497635312664, |
|
"grad_norm": 0.7510747909545898, |
|
"learning_rate": 2.7469290820252786e-06, |
|
"loss": 1.9081, |
|
"step": 15860 |
|
}, |
|
{ |
|
"epoch": 2.0540846436800195, |
|
"grad_norm": 0.8188992142677307, |
|
"learning_rate": 2.7335017249244484e-06, |
|
"loss": 1.9172, |
|
"step": 15880 |
|
}, |
|
{ |
|
"epoch": 2.0566716520473745, |
|
"grad_norm": 0.7091829180717468, |
|
"learning_rate": 2.72009490914065e-06, |
|
"loss": 1.9068, |
|
"step": 15900 |
|
}, |
|
{ |
|
"epoch": 2.05925866041473, |
|
"grad_norm": 0.7576590776443481, |
|
"learning_rate": 2.7067087561801053e-06, |
|
"loss": 1.891, |
|
"step": 15920 |
|
}, |
|
{ |
|
"epoch": 2.0618456687820848, |
|
"grad_norm": 0.7395625710487366, |
|
"learning_rate": 2.693343387361779e-06, |
|
"loss": 1.9226, |
|
"step": 15940 |
|
}, |
|
{ |
|
"epoch": 2.06443267714944, |
|
"grad_norm": 0.760321855545044, |
|
"learning_rate": 2.6799989238162583e-06, |
|
"loss": 1.9065, |
|
"step": 15960 |
|
}, |
|
{ |
|
"epoch": 2.0670196855167955, |
|
"grad_norm": 0.74191814661026, |
|
"learning_rate": 2.6666754864846744e-06, |
|
"loss": 1.914, |
|
"step": 15980 |
|
}, |
|
{ |
|
"epoch": 2.0696066938841504, |
|
"grad_norm": 0.7157737612724304, |
|
"learning_rate": 2.6533731961175902e-06, |
|
"loss": 1.895, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 2.072193702251506, |
|
"grad_norm": 0.7235260605812073, |
|
"learning_rate": 2.6400921732739193e-06, |
|
"loss": 1.893, |
|
"step": 16020 |
|
}, |
|
{ |
|
"epoch": 2.0747807106188607, |
|
"grad_norm": 0.7163159251213074, |
|
"learning_rate": 2.6268325383198223e-06, |
|
"loss": 1.9073, |
|
"step": 16040 |
|
}, |
|
{ |
|
"epoch": 2.077367718986216, |
|
"grad_norm": 0.749591052532196, |
|
"learning_rate": 2.6135944114276263e-06, |
|
"loss": 1.9073, |
|
"step": 16060 |
|
}, |
|
{ |
|
"epoch": 2.0799547273535715, |
|
"grad_norm": 0.7129009366035461, |
|
"learning_rate": 2.600377912574724e-06, |
|
"loss": 1.9067, |
|
"step": 16080 |
|
}, |
|
{ |
|
"epoch": 2.0825417357209264, |
|
"grad_norm": 0.7254235744476318, |
|
"learning_rate": 2.587183161542499e-06, |
|
"loss": 1.8991, |
|
"step": 16100 |
|
}, |
|
{ |
|
"epoch": 2.0851287440882817, |
|
"grad_norm": 0.7272098064422607, |
|
"learning_rate": 2.574010277915231e-06, |
|
"loss": 1.9026, |
|
"step": 16120 |
|
}, |
|
{ |
|
"epoch": 2.0877157524556367, |
|
"grad_norm": 0.7584415674209595, |
|
"learning_rate": 2.560859381079014e-06, |
|
"loss": 1.8804, |
|
"step": 16140 |
|
}, |
|
{ |
|
"epoch": 2.090302760822992, |
|
"grad_norm": 0.7325422763824463, |
|
"learning_rate": 2.5477305902206745e-06, |
|
"loss": 1.8983, |
|
"step": 16160 |
|
}, |
|
{ |
|
"epoch": 2.0928897691903474, |
|
"grad_norm": 0.7427433729171753, |
|
"learning_rate": 2.5346240243266982e-06, |
|
"loss": 1.9124, |
|
"step": 16180 |
|
}, |
|
{ |
|
"epoch": 2.0954767775577023, |
|
"grad_norm": 0.7172392010688782, |
|
"learning_rate": 2.521539802182138e-06, |
|
"loss": 1.9223, |
|
"step": 16200 |
|
}, |
|
{ |
|
"epoch": 2.0980637859250577, |
|
"grad_norm": 0.767247200012207, |
|
"learning_rate": 2.508478042369551e-06, |
|
"loss": 1.9028, |
|
"step": 16220 |
|
}, |
|
{ |
|
"epoch": 2.1006507942924126, |
|
"grad_norm": 0.7402515411376953, |
|
"learning_rate": 2.4954388632679126e-06, |
|
"loss": 1.9139, |
|
"step": 16240 |
|
}, |
|
{ |
|
"epoch": 2.103237802659768, |
|
"grad_norm": 0.7129824757575989, |
|
"learning_rate": 2.4824223830515544e-06, |
|
"loss": 1.9072, |
|
"step": 16260 |
|
}, |
|
{ |
|
"epoch": 2.1058248110271234, |
|
"grad_norm": 0.7000627517700195, |
|
"learning_rate": 2.469428719689081e-06, |
|
"loss": 1.9122, |
|
"step": 16280 |
|
}, |
|
{ |
|
"epoch": 2.1084118193944783, |
|
"grad_norm": 0.708697497844696, |
|
"learning_rate": 2.4564579909423144e-06, |
|
"loss": 1.9046, |
|
"step": 16300 |
|
}, |
|
{ |
|
"epoch": 2.1109988277618337, |
|
"grad_norm": 0.7344934344291687, |
|
"learning_rate": 2.443510314365213e-06, |
|
"loss": 1.909, |
|
"step": 16320 |
|
}, |
|
{ |
|
"epoch": 2.1135858361291886, |
|
"grad_norm": 0.7752679586410522, |
|
"learning_rate": 2.430585807302819e-06, |
|
"loss": 1.9077, |
|
"step": 16340 |
|
}, |
|
{ |
|
"epoch": 2.116172844496544, |
|
"grad_norm": 0.7711694240570068, |
|
"learning_rate": 2.4176845868901833e-06, |
|
"loss": 1.8833, |
|
"step": 16360 |
|
}, |
|
{ |
|
"epoch": 2.118759852863899, |
|
"grad_norm": 0.6925134062767029, |
|
"learning_rate": 2.404806770051315e-06, |
|
"loss": 1.916, |
|
"step": 16380 |
|
}, |
|
{ |
|
"epoch": 2.1213468612312543, |
|
"grad_norm": 0.7323116064071655, |
|
"learning_rate": 2.3919524734981104e-06, |
|
"loss": 1.8966, |
|
"step": 16400 |
|
}, |
|
{ |
|
"epoch": 2.1239338695986096, |
|
"grad_norm": 0.6923350691795349, |
|
"learning_rate": 2.3791218137293077e-06, |
|
"loss": 1.8859, |
|
"step": 16420 |
|
}, |
|
{ |
|
"epoch": 2.1265208779659646, |
|
"grad_norm": 0.7274029850959778, |
|
"learning_rate": 2.3663149070294163e-06, |
|
"loss": 1.904, |
|
"step": 16440 |
|
}, |
|
{ |
|
"epoch": 2.12910788633332, |
|
"grad_norm": 0.7480029463768005, |
|
"learning_rate": 2.3535318694676794e-06, |
|
"loss": 1.9303, |
|
"step": 16460 |
|
}, |
|
{ |
|
"epoch": 2.131694894700675, |
|
"grad_norm": 0.7643582224845886, |
|
"learning_rate": 2.340772816897007e-06, |
|
"loss": 1.9203, |
|
"step": 16480 |
|
}, |
|
{ |
|
"epoch": 2.1342819030680302, |
|
"grad_norm": 0.7323663830757141, |
|
"learning_rate": 2.328037864952938e-06, |
|
"loss": 1.9312, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 2.1368689114353856, |
|
"grad_norm": 0.6983356475830078, |
|
"learning_rate": 2.3153271290525814e-06, |
|
"loss": 1.901, |
|
"step": 16520 |
|
}, |
|
{ |
|
"epoch": 2.1394559198027405, |
|
"grad_norm": 0.7338689565658569, |
|
"learning_rate": 2.302640724393583e-06, |
|
"loss": 1.9059, |
|
"step": 16540 |
|
}, |
|
{ |
|
"epoch": 2.142042928170096, |
|
"grad_norm": 0.7170989513397217, |
|
"learning_rate": 2.289978765953065e-06, |
|
"loss": 1.9043, |
|
"step": 16560 |
|
}, |
|
{ |
|
"epoch": 2.144629936537451, |
|
"grad_norm": 0.7386534214019775, |
|
"learning_rate": 2.277341368486602e-06, |
|
"loss": 1.9012, |
|
"step": 16580 |
|
}, |
|
{ |
|
"epoch": 2.147216944904806, |
|
"grad_norm": 0.7534947991371155, |
|
"learning_rate": 2.264728646527166e-06, |
|
"loss": 1.9173, |
|
"step": 16600 |
|
}, |
|
{ |
|
"epoch": 2.1498039532721616, |
|
"grad_norm": 0.7366383075714111, |
|
"learning_rate": 2.252140714384096e-06, |
|
"loss": 1.9062, |
|
"step": 16620 |
|
}, |
|
{ |
|
"epoch": 2.1523909616395165, |
|
"grad_norm": 0.7437723875045776, |
|
"learning_rate": 2.23957768614206e-06, |
|
"loss": 1.9256, |
|
"step": 16640 |
|
}, |
|
{ |
|
"epoch": 2.154977970006872, |
|
"grad_norm": 0.7748574018478394, |
|
"learning_rate": 2.227039675660025e-06, |
|
"loss": 1.9139, |
|
"step": 16660 |
|
}, |
|
{ |
|
"epoch": 2.157564978374227, |
|
"grad_norm": 0.7447261810302734, |
|
"learning_rate": 2.214526796570215e-06, |
|
"loss": 1.8861, |
|
"step": 16680 |
|
}, |
|
{ |
|
"epoch": 2.160151986741582, |
|
"grad_norm": 0.7543417811393738, |
|
"learning_rate": 2.202039162277095e-06, |
|
"loss": 1.915, |
|
"step": 16700 |
|
}, |
|
{ |
|
"epoch": 2.1627389951089375, |
|
"grad_norm": 0.7496261596679688, |
|
"learning_rate": 2.18957688595633e-06, |
|
"loss": 1.8901, |
|
"step": 16720 |
|
}, |
|
{ |
|
"epoch": 2.1653260034762924, |
|
"grad_norm": 0.7722936272621155, |
|
"learning_rate": 2.1771400805537702e-06, |
|
"loss": 1.9219, |
|
"step": 16740 |
|
}, |
|
{ |
|
"epoch": 2.167913011843648, |
|
"grad_norm": 0.7838595509529114, |
|
"learning_rate": 2.164728858784416e-06, |
|
"loss": 1.8892, |
|
"step": 16760 |
|
}, |
|
{ |
|
"epoch": 2.1705000202110027, |
|
"grad_norm": 0.7225658893585205, |
|
"learning_rate": 2.1523433331314113e-06, |
|
"loss": 1.8933, |
|
"step": 16780 |
|
}, |
|
{ |
|
"epoch": 2.173087028578358, |
|
"grad_norm": 0.7566482424736023, |
|
"learning_rate": 2.1399836158450066e-06, |
|
"loss": 1.8978, |
|
"step": 16800 |
|
}, |
|
{ |
|
"epoch": 2.175674036945713, |
|
"grad_norm": 0.7426904439926147, |
|
"learning_rate": 2.1276498189415583e-06, |
|
"loss": 1.9037, |
|
"step": 16820 |
|
}, |
|
{ |
|
"epoch": 2.1782610453130684, |
|
"grad_norm": 0.7302761673927307, |
|
"learning_rate": 2.1153420542025e-06, |
|
"loss": 1.91, |
|
"step": 16840 |
|
}, |
|
{ |
|
"epoch": 2.180848053680424, |
|
"grad_norm": 0.7780102491378784, |
|
"learning_rate": 2.1030604331733412e-06, |
|
"loss": 1.8922, |
|
"step": 16860 |
|
}, |
|
{ |
|
"epoch": 2.1834350620477787, |
|
"grad_norm": 0.6786608099937439, |
|
"learning_rate": 2.090805067162643e-06, |
|
"loss": 1.8948, |
|
"step": 16880 |
|
}, |
|
{ |
|
"epoch": 2.186022070415134, |
|
"grad_norm": 0.7358562350273132, |
|
"learning_rate": 2.078576067241025e-06, |
|
"loss": 1.8864, |
|
"step": 16900 |
|
}, |
|
{ |
|
"epoch": 2.188609078782489, |
|
"grad_norm": 0.7617294192314148, |
|
"learning_rate": 2.0663735442401423e-06, |
|
"loss": 1.9153, |
|
"step": 16920 |
|
}, |
|
{ |
|
"epoch": 2.1911960871498444, |
|
"grad_norm": 0.7564135193824768, |
|
"learning_rate": 2.0541976087516972e-06, |
|
"loss": 1.9139, |
|
"step": 16940 |
|
}, |
|
{ |
|
"epoch": 2.1937830955171997, |
|
"grad_norm": 0.7490710020065308, |
|
"learning_rate": 2.042048371126422e-06, |
|
"loss": 1.8835, |
|
"step": 16960 |
|
}, |
|
{ |
|
"epoch": 2.1963701038845547, |
|
"grad_norm": 0.7540954947471619, |
|
"learning_rate": 2.0299259414730914e-06, |
|
"loss": 1.9067, |
|
"step": 16980 |
|
}, |
|
{ |
|
"epoch": 2.19895711225191, |
|
"grad_norm": 0.7352563142776489, |
|
"learning_rate": 2.017830429657513e-06, |
|
"loss": 1.9058, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 2.201544120619265, |
|
"grad_norm": 0.7499025464057922, |
|
"learning_rate": 2.0057619453015436e-06, |
|
"loss": 1.9248, |
|
"step": 17020 |
|
}, |
|
{ |
|
"epoch": 2.2041311289866203, |
|
"grad_norm": 0.6992461085319519, |
|
"learning_rate": 1.993720597782086e-06, |
|
"loss": 1.9128, |
|
"step": 17040 |
|
}, |
|
{ |
|
"epoch": 2.2067181373539757, |
|
"grad_norm": 0.7088660001754761, |
|
"learning_rate": 1.9817064962300998e-06, |
|
"loss": 1.9172, |
|
"step": 17060 |
|
}, |
|
{ |
|
"epoch": 2.2093051457213306, |
|
"grad_norm": 0.7242149114608765, |
|
"learning_rate": 1.9697197495296196e-06, |
|
"loss": 1.8821, |
|
"step": 17080 |
|
}, |
|
{ |
|
"epoch": 2.211892154088686, |
|
"grad_norm": 0.7427524328231812, |
|
"learning_rate": 1.957760466316757e-06, |
|
"loss": 1.9101, |
|
"step": 17100 |
|
}, |
|
{ |
|
"epoch": 2.214479162456041, |
|
"grad_norm": 0.7300739288330078, |
|
"learning_rate": 1.945828754978722e-06, |
|
"loss": 1.8993, |
|
"step": 17120 |
|
}, |
|
{ |
|
"epoch": 2.2170661708233963, |
|
"grad_norm": 0.7250590324401855, |
|
"learning_rate": 1.933924723652846e-06, |
|
"loss": 1.8905, |
|
"step": 17140 |
|
}, |
|
{ |
|
"epoch": 2.2196531791907512, |
|
"grad_norm": 0.6951375007629395, |
|
"learning_rate": 1.922048480225587e-06, |
|
"loss": 1.9272, |
|
"step": 17160 |
|
}, |
|
{ |
|
"epoch": 2.2222401875581066, |
|
"grad_norm": 0.7401907444000244, |
|
"learning_rate": 1.9102001323315693e-06, |
|
"loss": 1.9049, |
|
"step": 17180 |
|
}, |
|
{ |
|
"epoch": 2.224827195925462, |
|
"grad_norm": 0.7611769437789917, |
|
"learning_rate": 1.8983797873525916e-06, |
|
"loss": 1.8718, |
|
"step": 17200 |
|
}, |
|
{ |
|
"epoch": 2.227414204292817, |
|
"grad_norm": 0.7140897512435913, |
|
"learning_rate": 1.88658755241667e-06, |
|
"loss": 1.8931, |
|
"step": 17220 |
|
}, |
|
{ |
|
"epoch": 2.2300012126601723, |
|
"grad_norm": 0.7314221262931824, |
|
"learning_rate": 1.87482353439705e-06, |
|
"loss": 1.9083, |
|
"step": 17240 |
|
}, |
|
{ |
|
"epoch": 2.232588221027527, |
|
"grad_norm": 0.7419768571853638, |
|
"learning_rate": 1.8630878399112546e-06, |
|
"loss": 1.8963, |
|
"step": 17260 |
|
}, |
|
{ |
|
"epoch": 2.2351752293948826, |
|
"grad_norm": 0.7217703461647034, |
|
"learning_rate": 1.8513805753201026e-06, |
|
"loss": 1.9173, |
|
"step": 17280 |
|
}, |
|
{ |
|
"epoch": 2.237762237762238, |
|
"grad_norm": 0.7205759882926941, |
|
"learning_rate": 1.8397018467267603e-06, |
|
"loss": 1.889, |
|
"step": 17300 |
|
}, |
|
{ |
|
"epoch": 2.240349246129593, |
|
"grad_norm": 0.741363525390625, |
|
"learning_rate": 1.828051759975763e-06, |
|
"loss": 1.9146, |
|
"step": 17320 |
|
}, |
|
{ |
|
"epoch": 2.2429362544969482, |
|
"grad_norm": 0.6861704587936401, |
|
"learning_rate": 1.8164304206520728e-06, |
|
"loss": 1.8988, |
|
"step": 17340 |
|
}, |
|
{ |
|
"epoch": 2.245523262864303, |
|
"grad_norm": 0.7358688116073608, |
|
"learning_rate": 1.8048379340801075e-06, |
|
"loss": 1.9278, |
|
"step": 17360 |
|
}, |
|
{ |
|
"epoch": 2.2481102712316585, |
|
"grad_norm": 0.7279689311981201, |
|
"learning_rate": 1.7932744053227912e-06, |
|
"loss": 1.9001, |
|
"step": 17380 |
|
}, |
|
{ |
|
"epoch": 2.250697279599014, |
|
"grad_norm": 0.7442336678504944, |
|
"learning_rate": 1.78173993918061e-06, |
|
"loss": 1.8908, |
|
"step": 17400 |
|
}, |
|
{ |
|
"epoch": 2.253284287966369, |
|
"grad_norm": 0.7495877742767334, |
|
"learning_rate": 1.7702346401906456e-06, |
|
"loss": 1.9, |
|
"step": 17420 |
|
}, |
|
{ |
|
"epoch": 2.255871296333724, |
|
"grad_norm": 0.696934700012207, |
|
"learning_rate": 1.7587586126256463e-06, |
|
"loss": 1.9013, |
|
"step": 17440 |
|
}, |
|
{ |
|
"epoch": 2.258458304701079, |
|
"grad_norm": 0.7488421201705933, |
|
"learning_rate": 1.7473119604930638e-06, |
|
"loss": 1.9161, |
|
"step": 17460 |
|
}, |
|
{ |
|
"epoch": 2.2610453130684345, |
|
"grad_norm": 0.746863067150116, |
|
"learning_rate": 1.7358947875341293e-06, |
|
"loss": 1.8806, |
|
"step": 17480 |
|
}, |
|
{ |
|
"epoch": 2.2636323214357894, |
|
"grad_norm": 0.6953650712966919, |
|
"learning_rate": 1.7245071972228939e-06, |
|
"loss": 1.8927, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 2.2662193298031448, |
|
"grad_norm": 0.7111936807632446, |
|
"learning_rate": 1.7131492927653098e-06, |
|
"loss": 1.9135, |
|
"step": 17520 |
|
}, |
|
{ |
|
"epoch": 2.2688063381705, |
|
"grad_norm": 0.7535796761512756, |
|
"learning_rate": 1.7018211770982772e-06, |
|
"loss": 1.9041, |
|
"step": 17540 |
|
}, |
|
{ |
|
"epoch": 2.271393346537855, |
|
"grad_norm": 0.6831175088882446, |
|
"learning_rate": 1.6905229528887286e-06, |
|
"loss": 1.9107, |
|
"step": 17560 |
|
}, |
|
{ |
|
"epoch": 2.2739803549052104, |
|
"grad_norm": 0.7117632627487183, |
|
"learning_rate": 1.6792547225326827e-06, |
|
"loss": 1.9053, |
|
"step": 17580 |
|
}, |
|
{ |
|
"epoch": 2.276567363272566, |
|
"grad_norm": 0.7121444344520569, |
|
"learning_rate": 1.6680165881543243e-06, |
|
"loss": 1.9158, |
|
"step": 17600 |
|
}, |
|
{ |
|
"epoch": 2.2791543716399207, |
|
"grad_norm": 0.758989691734314, |
|
"learning_rate": 1.6568086516050823e-06, |
|
"loss": 1.8869, |
|
"step": 17620 |
|
}, |
|
{ |
|
"epoch": 2.281741380007276, |
|
"grad_norm": 0.745327889919281, |
|
"learning_rate": 1.645631014462698e-06, |
|
"loss": 1.9016, |
|
"step": 17640 |
|
}, |
|
{ |
|
"epoch": 2.284328388374631, |
|
"grad_norm": 0.7991662621498108, |
|
"learning_rate": 1.6344837780303075e-06, |
|
"loss": 1.8991, |
|
"step": 17660 |
|
}, |
|
{ |
|
"epoch": 2.2869153967419864, |
|
"grad_norm": 0.7338636517524719, |
|
"learning_rate": 1.623367043335531e-06, |
|
"loss": 1.9291, |
|
"step": 17680 |
|
}, |
|
{ |
|
"epoch": 2.2895024051093413, |
|
"grad_norm": 0.7263846397399902, |
|
"learning_rate": 1.6122809111295424e-06, |
|
"loss": 1.8736, |
|
"step": 17700 |
|
}, |
|
{ |
|
"epoch": 2.2920894134766967, |
|
"grad_norm": 0.7372823357582092, |
|
"learning_rate": 1.6012254818861729e-06, |
|
"loss": 1.9053, |
|
"step": 17720 |
|
}, |
|
{ |
|
"epoch": 2.294676421844052, |
|
"grad_norm": 0.7427831292152405, |
|
"learning_rate": 1.5902008558009851e-06, |
|
"loss": 1.9035, |
|
"step": 17740 |
|
}, |
|
{ |
|
"epoch": 2.297263430211407, |
|
"grad_norm": 0.6984022259712219, |
|
"learning_rate": 1.579207132790378e-06, |
|
"loss": 1.8995, |
|
"step": 17760 |
|
}, |
|
{ |
|
"epoch": 2.2998504385787624, |
|
"grad_norm": 0.7570576667785645, |
|
"learning_rate": 1.56824441249067e-06, |
|
"loss": 1.8999, |
|
"step": 17780 |
|
}, |
|
{ |
|
"epoch": 2.3024374469461173, |
|
"grad_norm": 0.7958073616027832, |
|
"learning_rate": 1.5573127942572064e-06, |
|
"loss": 1.8975, |
|
"step": 17800 |
|
}, |
|
{ |
|
"epoch": 2.3050244553134727, |
|
"grad_norm": 0.7173560857772827, |
|
"learning_rate": 1.5464123771634488e-06, |
|
"loss": 1.9026, |
|
"step": 17820 |
|
}, |
|
{ |
|
"epoch": 2.307611463680828, |
|
"grad_norm": 0.7174249291419983, |
|
"learning_rate": 1.5355432600000874e-06, |
|
"loss": 1.94, |
|
"step": 17840 |
|
}, |
|
{ |
|
"epoch": 2.310198472048183, |
|
"grad_norm": 0.727505087852478, |
|
"learning_rate": 1.5247055412741357e-06, |
|
"loss": 1.9025, |
|
"step": 17860 |
|
}, |
|
{ |
|
"epoch": 2.3127854804155383, |
|
"grad_norm": 0.7467734217643738, |
|
"learning_rate": 1.5138993192080476e-06, |
|
"loss": 1.9112, |
|
"step": 17880 |
|
}, |
|
{ |
|
"epoch": 2.3153724887828933, |
|
"grad_norm": 0.7557691931724548, |
|
"learning_rate": 1.5031246917388164e-06, |
|
"loss": 1.9124, |
|
"step": 17900 |
|
}, |
|
{ |
|
"epoch": 2.3179594971502486, |
|
"grad_norm": 0.7691644430160522, |
|
"learning_rate": 1.492381756517099e-06, |
|
"loss": 1.9019, |
|
"step": 17920 |
|
}, |
|
{ |
|
"epoch": 2.320546505517604, |
|
"grad_norm": 0.7507399320602417, |
|
"learning_rate": 1.4816706109063194e-06, |
|
"loss": 1.9176, |
|
"step": 17940 |
|
}, |
|
{ |
|
"epoch": 2.323133513884959, |
|
"grad_norm": 0.7393172979354858, |
|
"learning_rate": 1.4709913519817964e-06, |
|
"loss": 1.9091, |
|
"step": 17960 |
|
}, |
|
{ |
|
"epoch": 2.3257205222523143, |
|
"grad_norm": 0.7320428490638733, |
|
"learning_rate": 1.460344076529855e-06, |
|
"loss": 1.8969, |
|
"step": 17980 |
|
}, |
|
{ |
|
"epoch": 2.328307530619669, |
|
"grad_norm": 0.7323318719863892, |
|
"learning_rate": 1.449728881046958e-06, |
|
"loss": 1.8922, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 2.3308945389870246, |
|
"grad_norm": 0.7763849496841431, |
|
"learning_rate": 1.4391458617388204e-06, |
|
"loss": 1.9011, |
|
"step": 18020 |
|
}, |
|
{ |
|
"epoch": 2.3334815473543795, |
|
"grad_norm": 0.7710480690002441, |
|
"learning_rate": 1.428595114519552e-06, |
|
"loss": 1.8881, |
|
"step": 18040 |
|
}, |
|
{ |
|
"epoch": 2.336068555721735, |
|
"grad_norm": 0.7429326772689819, |
|
"learning_rate": 1.4180767350107732e-06, |
|
"loss": 1.8887, |
|
"step": 18060 |
|
}, |
|
{ |
|
"epoch": 2.3386555640890903, |
|
"grad_norm": 0.7290446758270264, |
|
"learning_rate": 1.4075908185407588e-06, |
|
"loss": 1.8949, |
|
"step": 18080 |
|
}, |
|
{ |
|
"epoch": 2.341242572456445, |
|
"grad_norm": 0.7395162582397461, |
|
"learning_rate": 1.3971374601435673e-06, |
|
"loss": 1.9096, |
|
"step": 18100 |
|
}, |
|
{ |
|
"epoch": 2.3438295808238006, |
|
"grad_norm": 0.7031149864196777, |
|
"learning_rate": 1.3867167545581884e-06, |
|
"loss": 1.8768, |
|
"step": 18120 |
|
}, |
|
{ |
|
"epoch": 2.3464165891911555, |
|
"grad_norm": 0.7703959941864014, |
|
"learning_rate": 1.3763287962276718e-06, |
|
"loss": 1.9192, |
|
"step": 18140 |
|
}, |
|
{ |
|
"epoch": 2.349003597558511, |
|
"grad_norm": 0.7346245050430298, |
|
"learning_rate": 1.3659736792982842e-06, |
|
"loss": 1.8983, |
|
"step": 18160 |
|
}, |
|
{ |
|
"epoch": 2.351590605925866, |
|
"grad_norm": 0.763273298740387, |
|
"learning_rate": 1.3556514976186459e-06, |
|
"loss": 1.8667, |
|
"step": 18180 |
|
}, |
|
{ |
|
"epoch": 2.354177614293221, |
|
"grad_norm": 0.71233731508255, |
|
"learning_rate": 1.3453623447388874e-06, |
|
"loss": 1.878, |
|
"step": 18200 |
|
}, |
|
{ |
|
"epoch": 2.3567646226605765, |
|
"grad_norm": 0.7411478757858276, |
|
"learning_rate": 1.3351063139097958e-06, |
|
"loss": 1.9045, |
|
"step": 18220 |
|
}, |
|
{ |
|
"epoch": 2.3593516310279314, |
|
"grad_norm": 0.7366203665733337, |
|
"learning_rate": 1.3248834980819764e-06, |
|
"loss": 1.8799, |
|
"step": 18240 |
|
}, |
|
{ |
|
"epoch": 2.361938639395287, |
|
"grad_norm": 0.7252218127250671, |
|
"learning_rate": 1.3146939899049999e-06, |
|
"loss": 1.8993, |
|
"step": 18260 |
|
}, |
|
{ |
|
"epoch": 2.364525647762642, |
|
"grad_norm": 0.7742691040039062, |
|
"learning_rate": 1.3045378817265764e-06, |
|
"loss": 1.9056, |
|
"step": 18280 |
|
}, |
|
{ |
|
"epoch": 2.367112656129997, |
|
"grad_norm": 0.802935004234314, |
|
"learning_rate": 1.2944152655917053e-06, |
|
"loss": 1.8976, |
|
"step": 18300 |
|
}, |
|
{ |
|
"epoch": 2.3696996644973525, |
|
"grad_norm": 0.786408007144928, |
|
"learning_rate": 1.2843262332418515e-06, |
|
"loss": 1.9044, |
|
"step": 18320 |
|
}, |
|
{ |
|
"epoch": 2.3722866728647074, |
|
"grad_norm": 0.7235627174377441, |
|
"learning_rate": 1.274270876114105e-06, |
|
"loss": 1.8944, |
|
"step": 18340 |
|
}, |
|
{ |
|
"epoch": 2.3748736812320628, |
|
"grad_norm": 0.6925949454307556, |
|
"learning_rate": 1.2642492853403614e-06, |
|
"loss": 1.9076, |
|
"step": 18360 |
|
}, |
|
{ |
|
"epoch": 2.3774606895994177, |
|
"grad_norm": 0.7337120175361633, |
|
"learning_rate": 1.2542615517464863e-06, |
|
"loss": 1.893, |
|
"step": 18380 |
|
}, |
|
{ |
|
"epoch": 2.380047697966773, |
|
"grad_norm": 0.7587104439735413, |
|
"learning_rate": 1.2443077658515023e-06, |
|
"loss": 1.9071, |
|
"step": 18400 |
|
}, |
|
{ |
|
"epoch": 2.3826347063341284, |
|
"grad_norm": 0.7254394292831421, |
|
"learning_rate": 1.2343880178667572e-06, |
|
"loss": 1.9028, |
|
"step": 18420 |
|
}, |
|
{ |
|
"epoch": 2.3852217147014834, |
|
"grad_norm": 0.6892567873001099, |
|
"learning_rate": 1.2245023976951186e-06, |
|
"loss": 1.8888, |
|
"step": 18440 |
|
}, |
|
{ |
|
"epoch": 2.3878087230688387, |
|
"grad_norm": 0.7421708106994629, |
|
"learning_rate": 1.2146509949301488e-06, |
|
"loss": 1.8796, |
|
"step": 18460 |
|
}, |
|
{ |
|
"epoch": 2.390395731436194, |
|
"grad_norm": 0.6860731840133667, |
|
"learning_rate": 1.2048338988552998e-06, |
|
"loss": 1.907, |
|
"step": 18480 |
|
}, |
|
{ |
|
"epoch": 2.392982739803549, |
|
"grad_norm": 0.7742668390274048, |
|
"learning_rate": 1.195051198443099e-06, |
|
"loss": 1.9043, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 2.3955697481709044, |
|
"grad_norm": 0.7003529071807861, |
|
"learning_rate": 1.1853029823543483e-06, |
|
"loss": 1.9085, |
|
"step": 18520 |
|
}, |
|
{ |
|
"epoch": 2.3981567565382593, |
|
"grad_norm": 0.7493278980255127, |
|
"learning_rate": 1.1755893389373136e-06, |
|
"loss": 1.8932, |
|
"step": 18540 |
|
}, |
|
{ |
|
"epoch": 2.4007437649056147, |
|
"grad_norm": 0.7520459294319153, |
|
"learning_rate": 1.1659103562269341e-06, |
|
"loss": 1.9133, |
|
"step": 18560 |
|
}, |
|
{ |
|
"epoch": 2.4033307732729696, |
|
"grad_norm": 0.7517881393432617, |
|
"learning_rate": 1.1562661219440086e-06, |
|
"loss": 1.9106, |
|
"step": 18580 |
|
}, |
|
{ |
|
"epoch": 2.405917781640325, |
|
"grad_norm": 0.7111313939094543, |
|
"learning_rate": 1.1466567234944203e-06, |
|
"loss": 1.8939, |
|
"step": 18600 |
|
}, |
|
{ |
|
"epoch": 2.4085047900076804, |
|
"grad_norm": 0.7109440565109253, |
|
"learning_rate": 1.137082247968328e-06, |
|
"loss": 1.8876, |
|
"step": 18620 |
|
}, |
|
{ |
|
"epoch": 2.4110917983750353, |
|
"grad_norm": 0.7710434794425964, |
|
"learning_rate": 1.1275427821393881e-06, |
|
"loss": 1.8838, |
|
"step": 18640 |
|
}, |
|
{ |
|
"epoch": 2.4136788067423907, |
|
"grad_norm": 0.7403404116630554, |
|
"learning_rate": 1.1180384124639586e-06, |
|
"loss": 1.8942, |
|
"step": 18660 |
|
}, |
|
{ |
|
"epoch": 2.4162658151097456, |
|
"grad_norm": 0.7132770419120789, |
|
"learning_rate": 1.1085692250803253e-06, |
|
"loss": 1.89, |
|
"step": 18680 |
|
}, |
|
{ |
|
"epoch": 2.418852823477101, |
|
"grad_norm": 0.7404444813728333, |
|
"learning_rate": 1.0991353058079113e-06, |
|
"loss": 1.9129, |
|
"step": 18700 |
|
}, |
|
{ |
|
"epoch": 2.421439831844456, |
|
"grad_norm": 0.7412508130073547, |
|
"learning_rate": 1.0897367401465098e-06, |
|
"loss": 1.884, |
|
"step": 18720 |
|
}, |
|
{ |
|
"epoch": 2.4240268402118113, |
|
"grad_norm": 0.6984736919403076, |
|
"learning_rate": 1.0803736132754966e-06, |
|
"loss": 1.8799, |
|
"step": 18740 |
|
}, |
|
{ |
|
"epoch": 2.4266138485791666, |
|
"grad_norm": 0.7330641150474548, |
|
"learning_rate": 1.0710460100530717e-06, |
|
"loss": 1.8967, |
|
"step": 18760 |
|
}, |
|
{ |
|
"epoch": 2.4292008569465215, |
|
"grad_norm": 0.7019391059875488, |
|
"learning_rate": 1.0617540150154792e-06, |
|
"loss": 1.8959, |
|
"step": 18780 |
|
}, |
|
{ |
|
"epoch": 2.431787865313877, |
|
"grad_norm": 0.7014403939247131, |
|
"learning_rate": 1.0524977123762487e-06, |
|
"loss": 1.8727, |
|
"step": 18800 |
|
}, |
|
{ |
|
"epoch": 2.4343748736812323, |
|
"grad_norm": 0.7545526623725891, |
|
"learning_rate": 1.0432771860254242e-06, |
|
"loss": 1.8853, |
|
"step": 18820 |
|
}, |
|
{ |
|
"epoch": 2.436961882048587, |
|
"grad_norm": 0.7624025940895081, |
|
"learning_rate": 1.0340925195288158e-06, |
|
"loss": 1.8764, |
|
"step": 18840 |
|
}, |
|
{ |
|
"epoch": 2.4395488904159426, |
|
"grad_norm": 0.7201825380325317, |
|
"learning_rate": 1.0249437961272273e-06, |
|
"loss": 1.91, |
|
"step": 18860 |
|
}, |
|
{ |
|
"epoch": 2.4421358987832975, |
|
"grad_norm": 0.7349814772605896, |
|
"learning_rate": 1.0158310987357157e-06, |
|
"loss": 1.8925, |
|
"step": 18880 |
|
}, |
|
{ |
|
"epoch": 2.444722907150653, |
|
"grad_norm": 0.7258120775222778, |
|
"learning_rate": 1.00675450994283e-06, |
|
"loss": 1.8917, |
|
"step": 18900 |
|
}, |
|
{ |
|
"epoch": 2.447309915518008, |
|
"grad_norm": 0.7066252827644348, |
|
"learning_rate": 9.977141120098655e-07, |
|
"loss": 1.892, |
|
"step": 18920 |
|
}, |
|
{ |
|
"epoch": 2.449896923885363, |
|
"grad_norm": 0.8023087382316589, |
|
"learning_rate": 9.887099868701239e-07, |
|
"loss": 1.8958, |
|
"step": 18940 |
|
}, |
|
{ |
|
"epoch": 2.4524839322527185, |
|
"grad_norm": 0.7593100666999817, |
|
"learning_rate": 9.7974221612816e-07, |
|
"loss": 1.8976, |
|
"step": 18960 |
|
}, |
|
{ |
|
"epoch": 2.4550709406200735, |
|
"grad_norm": 0.7671648859977722, |
|
"learning_rate": 9.70810881059054e-07, |
|
"loss": 1.8987, |
|
"step": 18980 |
|
}, |
|
{ |
|
"epoch": 2.457657948987429, |
|
"grad_norm": 0.7328961491584778, |
|
"learning_rate": 9.619160626076617e-07, |
|
"loss": 1.9158, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 2.4602449573547838, |
|
"grad_norm": 0.7224782109260559, |
|
"learning_rate": 9.530578413878961e-07, |
|
"loss": 1.8975, |
|
"step": 19020 |
|
}, |
|
{ |
|
"epoch": 2.462831965722139, |
|
"grad_norm": 0.7289260029792786, |
|
"learning_rate": 9.44236297681983e-07, |
|
"loss": 1.8897, |
|
"step": 19040 |
|
}, |
|
{ |
|
"epoch": 2.4654189740894945, |
|
"grad_norm": 0.7315030097961426, |
|
"learning_rate": 9.354515114397394e-07, |
|
"loss": 1.8947, |
|
"step": 19060 |
|
}, |
|
{ |
|
"epoch": 2.4680059824568494, |
|
"grad_norm": 0.7496863603591919, |
|
"learning_rate": 9.267035622778537e-07, |
|
"loss": 1.8774, |
|
"step": 19080 |
|
}, |
|
{ |
|
"epoch": 2.470592990824205, |
|
"grad_norm": 0.7712503671646118, |
|
"learning_rate": 9.179925294791525e-07, |
|
"loss": 1.9028, |
|
"step": 19100 |
|
}, |
|
{ |
|
"epoch": 2.4731799991915597, |
|
"grad_norm": 0.7254078388214111, |
|
"learning_rate": 9.093184919918946e-07, |
|
"loss": 1.89, |
|
"step": 19120 |
|
}, |
|
{ |
|
"epoch": 2.475767007558915, |
|
"grad_norm": 0.7153763175010681, |
|
"learning_rate": 9.006815284290443e-07, |
|
"loss": 1.9134, |
|
"step": 19140 |
|
}, |
|
{ |
|
"epoch": 2.4783540159262705, |
|
"grad_norm": 0.7105687856674194, |
|
"learning_rate": 8.9208171706757e-07, |
|
"loss": 1.8847, |
|
"step": 19160 |
|
}, |
|
{ |
|
"epoch": 2.4809410242936254, |
|
"grad_norm": 0.7396937608718872, |
|
"learning_rate": 8.835191358477235e-07, |
|
"loss": 1.8773, |
|
"step": 19180 |
|
}, |
|
{ |
|
"epoch": 2.4835280326609808, |
|
"grad_norm": 0.7564929723739624, |
|
"learning_rate": 8.749938623723415e-07, |
|
"loss": 1.8849, |
|
"step": 19200 |
|
}, |
|
{ |
|
"epoch": 2.4861150410283357, |
|
"grad_norm": 0.7557162642478943, |
|
"learning_rate": 8.665059739061415e-07, |
|
"loss": 1.8897, |
|
"step": 19220 |
|
}, |
|
{ |
|
"epoch": 2.488702049395691, |
|
"grad_norm": 0.7343015074729919, |
|
"learning_rate": 8.580555473750163e-07, |
|
"loss": 1.8965, |
|
"step": 19240 |
|
}, |
|
{ |
|
"epoch": 2.491289057763046, |
|
"grad_norm": 0.7815112471580505, |
|
"learning_rate": 8.49642659365344e-07, |
|
"loss": 1.9111, |
|
"step": 19260 |
|
}, |
|
{ |
|
"epoch": 2.4938760661304014, |
|
"grad_norm": 0.7031825184822083, |
|
"learning_rate": 8.412673861232878e-07, |
|
"loss": 1.8853, |
|
"step": 19280 |
|
}, |
|
{ |
|
"epoch": 2.4964630744977567, |
|
"grad_norm": 0.7057119607925415, |
|
"learning_rate": 8.3292980355411e-07, |
|
"loss": 1.8777, |
|
"step": 19300 |
|
}, |
|
{ |
|
"epoch": 2.4990500828651117, |
|
"grad_norm": 0.7549076676368713, |
|
"learning_rate": 8.24629987221478e-07, |
|
"loss": 1.8944, |
|
"step": 19320 |
|
}, |
|
{ |
|
"epoch": 2.501637091232467, |
|
"grad_norm": 0.7640539407730103, |
|
"learning_rate": 8.163680123467876e-07, |
|
"loss": 1.8973, |
|
"step": 19340 |
|
}, |
|
{ |
|
"epoch": 2.5042240995998224, |
|
"grad_norm": 0.7702010273933411, |
|
"learning_rate": 8.081439538084723e-07, |
|
"loss": 1.8835, |
|
"step": 19360 |
|
}, |
|
{ |
|
"epoch": 2.5068111079671773, |
|
"grad_norm": 0.6983287930488586, |
|
"learning_rate": 7.999578861413338e-07, |
|
"loss": 1.9018, |
|
"step": 19380 |
|
}, |
|
{ |
|
"epoch": 2.5093981163345322, |
|
"grad_norm": 0.73322594165802, |
|
"learning_rate": 7.918098835358567e-07, |
|
"loss": 1.899, |
|
"step": 19400 |
|
}, |
|
{ |
|
"epoch": 2.5119851247018876, |
|
"grad_norm": 0.7135536074638367, |
|
"learning_rate": 7.83700019837546e-07, |
|
"loss": 1.8904, |
|
"step": 19420 |
|
}, |
|
{ |
|
"epoch": 2.514572133069243, |
|
"grad_norm": 0.7201595902442932, |
|
"learning_rate": 7.756283685462485e-07, |
|
"loss": 1.8957, |
|
"step": 19440 |
|
}, |
|
{ |
|
"epoch": 2.517159141436598, |
|
"grad_norm": 0.6984549164772034, |
|
"learning_rate": 7.675950028154955e-07, |
|
"loss": 1.8826, |
|
"step": 19460 |
|
}, |
|
{ |
|
"epoch": 2.5197461498039533, |
|
"grad_norm": 0.766971230506897, |
|
"learning_rate": 7.595999954518323e-07, |
|
"loss": 1.8885, |
|
"step": 19480 |
|
}, |
|
{ |
|
"epoch": 2.5223331581713087, |
|
"grad_norm": 0.7196653485298157, |
|
"learning_rate": 7.516434189141647e-07, |
|
"loss": 1.8853, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 2.5249201665386636, |
|
"grad_norm": 0.7349998354911804, |
|
"learning_rate": 7.437253453130944e-07, |
|
"loss": 1.898, |
|
"step": 19520 |
|
}, |
|
{ |
|
"epoch": 2.527507174906019, |
|
"grad_norm": 0.720393180847168, |
|
"learning_rate": 7.35845846410278e-07, |
|
"loss": 1.9014, |
|
"step": 19540 |
|
}, |
|
{ |
|
"epoch": 2.530094183273374, |
|
"grad_norm": 0.728197455406189, |
|
"learning_rate": 7.280049936177586e-07, |
|
"loss": 1.872, |
|
"step": 19560 |
|
}, |
|
{ |
|
"epoch": 2.5326811916407292, |
|
"grad_norm": 0.7547232508659363, |
|
"learning_rate": 7.20202857997338e-07, |
|
"loss": 1.9024, |
|
"step": 19580 |
|
}, |
|
{ |
|
"epoch": 2.535268200008084, |
|
"grad_norm": 0.7494405508041382, |
|
"learning_rate": 7.124395102599175e-07, |
|
"loss": 1.8909, |
|
"step": 19600 |
|
}, |
|
{ |
|
"epoch": 2.5378552083754395, |
|
"grad_norm": 0.7292266488075256, |
|
"learning_rate": 7.047150207648684e-07, |
|
"loss": 1.8873, |
|
"step": 19620 |
|
}, |
|
{ |
|
"epoch": 2.540442216742795, |
|
"grad_norm": 0.7228005528450012, |
|
"learning_rate": 6.970294595193833e-07, |
|
"loss": 1.8854, |
|
"step": 19640 |
|
}, |
|
{ |
|
"epoch": 2.54302922511015, |
|
"grad_norm": 0.7293912172317505, |
|
"learning_rate": 6.893828961778521e-07, |
|
"loss": 1.8978, |
|
"step": 19660 |
|
}, |
|
{ |
|
"epoch": 2.545616233477505, |
|
"grad_norm": 0.7395282983779907, |
|
"learning_rate": 6.817754000412219e-07, |
|
"loss": 1.901, |
|
"step": 19680 |
|
}, |
|
{ |
|
"epoch": 2.5482032418448606, |
|
"grad_norm": 0.7036247849464417, |
|
"learning_rate": 6.742070400563783e-07, |
|
"loss": 1.8967, |
|
"step": 19700 |
|
}, |
|
{ |
|
"epoch": 2.5507902502122155, |
|
"grad_norm": 0.7170465588569641, |
|
"learning_rate": 6.666778848155081e-07, |
|
"loss": 1.8928, |
|
"step": 19720 |
|
}, |
|
{ |
|
"epoch": 2.553377258579571, |
|
"grad_norm": 0.7328929901123047, |
|
"learning_rate": 6.591880025554925e-07, |
|
"loss": 1.8919, |
|
"step": 19740 |
|
}, |
|
{ |
|
"epoch": 2.555964266946926, |
|
"grad_norm": 0.7401189804077148, |
|
"learning_rate": 6.517374611572736e-07, |
|
"loss": 1.9026, |
|
"step": 19760 |
|
}, |
|
{ |
|
"epoch": 2.558551275314281, |
|
"grad_norm": 0.7038640379905701, |
|
"learning_rate": 6.443263281452522e-07, |
|
"loss": 1.8732, |
|
"step": 19780 |
|
}, |
|
{ |
|
"epoch": 2.561138283681636, |
|
"grad_norm": 0.7266473770141602, |
|
"learning_rate": 6.36954670686667e-07, |
|
"loss": 1.885, |
|
"step": 19800 |
|
}, |
|
{ |
|
"epoch": 2.5637252920489915, |
|
"grad_norm": 0.736526370048523, |
|
"learning_rate": 6.296225555909907e-07, |
|
"loss": 1.8892, |
|
"step": 19820 |
|
}, |
|
{ |
|
"epoch": 2.566312300416347, |
|
"grad_norm": 0.708966851234436, |
|
"learning_rate": 6.223300493093203e-07, |
|
"loss": 1.9103, |
|
"step": 19840 |
|
}, |
|
{ |
|
"epoch": 2.5688993087837018, |
|
"grad_norm": 0.6924936175346375, |
|
"learning_rate": 6.150772179337821e-07, |
|
"loss": 1.8836, |
|
"step": 19860 |
|
}, |
|
{ |
|
"epoch": 2.571486317151057, |
|
"grad_norm": 0.7396383881568909, |
|
"learning_rate": 6.078641271969215e-07, |
|
"loss": 1.8733, |
|
"step": 19880 |
|
}, |
|
{ |
|
"epoch": 2.574073325518412, |
|
"grad_norm": 0.725099503993988, |
|
"learning_rate": 6.0069084247112e-07, |
|
"loss": 1.8965, |
|
"step": 19900 |
|
}, |
|
{ |
|
"epoch": 2.5766603338857674, |
|
"grad_norm": 0.7220191359519958, |
|
"learning_rate": 5.935574287679918e-07, |
|
"loss": 1.8797, |
|
"step": 19920 |
|
}, |
|
{ |
|
"epoch": 2.5792473422531224, |
|
"grad_norm": 0.691255509853363, |
|
"learning_rate": 5.864639507378029e-07, |
|
"loss": 1.9024, |
|
"step": 19940 |
|
}, |
|
{ |
|
"epoch": 2.5818343506204777, |
|
"grad_norm": 0.7219594717025757, |
|
"learning_rate": 5.794104726688777e-07, |
|
"loss": 1.8747, |
|
"step": 19960 |
|
}, |
|
{ |
|
"epoch": 2.584421358987833, |
|
"grad_norm": 0.7724885940551758, |
|
"learning_rate": 5.723970584870247e-07, |
|
"loss": 1.8878, |
|
"step": 19980 |
|
}, |
|
{ |
|
"epoch": 2.587008367355188, |
|
"grad_norm": 0.6879153251647949, |
|
"learning_rate": 5.654237717549477e-07, |
|
"loss": 1.8906, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 2.5895953757225434, |
|
"grad_norm": 0.6801023483276367, |
|
"learning_rate": 5.584906756716802e-07, |
|
"loss": 1.8773, |
|
"step": 20020 |
|
}, |
|
{ |
|
"epoch": 2.5921823840898988, |
|
"grad_norm": 0.6910654902458191, |
|
"learning_rate": 5.515978330720001e-07, |
|
"loss": 1.8824, |
|
"step": 20040 |
|
}, |
|
{ |
|
"epoch": 2.5947693924572537, |
|
"grad_norm": 0.7275547981262207, |
|
"learning_rate": 5.44745306425874e-07, |
|
"loss": 1.9134, |
|
"step": 20060 |
|
}, |
|
{ |
|
"epoch": 2.597356400824609, |
|
"grad_norm": 0.720346987247467, |
|
"learning_rate": 5.379331578378799e-07, |
|
"loss": 1.884, |
|
"step": 20080 |
|
}, |
|
{ |
|
"epoch": 2.599943409191964, |
|
"grad_norm": 0.701957643032074, |
|
"learning_rate": 5.311614490466515e-07, |
|
"loss": 1.8899, |
|
"step": 20100 |
|
}, |
|
{ |
|
"epoch": 2.6025304175593194, |
|
"grad_norm": 0.7209474444389343, |
|
"learning_rate": 5.244302414243136e-07, |
|
"loss": 1.8712, |
|
"step": 20120 |
|
}, |
|
{ |
|
"epoch": 2.6051174259266743, |
|
"grad_norm": 0.7242286205291748, |
|
"learning_rate": 5.177395959759296e-07, |
|
"loss": 1.902, |
|
"step": 20140 |
|
}, |
|
{ |
|
"epoch": 2.6077044342940296, |
|
"grad_norm": 0.704704225063324, |
|
"learning_rate": 5.110895733389459e-07, |
|
"loss": 1.8811, |
|
"step": 20160 |
|
}, |
|
{ |
|
"epoch": 2.610291442661385, |
|
"grad_norm": 0.7373464703559875, |
|
"learning_rate": 5.044802337826454e-07, |
|
"loss": 1.8687, |
|
"step": 20180 |
|
}, |
|
{ |
|
"epoch": 2.61287845102874, |
|
"grad_norm": 0.7121253609657288, |
|
"learning_rate": 4.979116372075971e-07, |
|
"loss": 1.9037, |
|
"step": 20200 |
|
}, |
|
{ |
|
"epoch": 2.6154654593960953, |
|
"grad_norm": 0.710035502910614, |
|
"learning_rate": 4.913838431451185e-07, |
|
"loss": 1.8841, |
|
"step": 20220 |
|
}, |
|
{ |
|
"epoch": 2.6180524677634507, |
|
"grad_norm": 0.7388237714767456, |
|
"learning_rate": 4.848969107567292e-07, |
|
"loss": 1.8786, |
|
"step": 20240 |
|
}, |
|
{ |
|
"epoch": 2.6206394761308056, |
|
"grad_norm": 0.7122521996498108, |
|
"learning_rate": 4.784508988336234e-07, |
|
"loss": 1.8869, |
|
"step": 20260 |
|
}, |
|
{ |
|
"epoch": 2.6232264844981605, |
|
"grad_norm": 0.7517115473747253, |
|
"learning_rate": 4.720458657961274e-07, |
|
"loss": 1.8865, |
|
"step": 20280 |
|
}, |
|
{ |
|
"epoch": 2.625813492865516, |
|
"grad_norm": 0.7097986936569214, |
|
"learning_rate": 4.656818696931809e-07, |
|
"loss": 1.8914, |
|
"step": 20300 |
|
}, |
|
{ |
|
"epoch": 2.6284005012328713, |
|
"grad_norm": 0.790813684463501, |
|
"learning_rate": 4.5935896820179794e-07, |
|
"loss": 1.8979, |
|
"step": 20320 |
|
}, |
|
{ |
|
"epoch": 2.630987509600226, |
|
"grad_norm": 0.730335533618927, |
|
"learning_rate": 4.5307721862655863e-07, |
|
"loss": 1.8907, |
|
"step": 20340 |
|
}, |
|
{ |
|
"epoch": 2.6335745179675816, |
|
"grad_norm": 0.7112900614738464, |
|
"learning_rate": 4.4683667789907625e-07, |
|
"loss": 1.8954, |
|
"step": 20360 |
|
}, |
|
{ |
|
"epoch": 2.636161526334937, |
|
"grad_norm": 0.7568684816360474, |
|
"learning_rate": 4.406374025774923e-07, |
|
"loss": 1.9078, |
|
"step": 20380 |
|
}, |
|
{ |
|
"epoch": 2.638748534702292, |
|
"grad_norm": 0.7176088094711304, |
|
"learning_rate": 4.344794488459553e-07, |
|
"loss": 1.8742, |
|
"step": 20400 |
|
}, |
|
{ |
|
"epoch": 2.6413355430696472, |
|
"grad_norm": 0.7279245257377625, |
|
"learning_rate": 4.2836287251411924e-07, |
|
"loss": 1.8936, |
|
"step": 20420 |
|
}, |
|
{ |
|
"epoch": 2.643922551437002, |
|
"grad_norm": 0.7371872663497925, |
|
"learning_rate": 4.222877290166305e-07, |
|
"loss": 1.9029, |
|
"step": 20440 |
|
}, |
|
{ |
|
"epoch": 2.6465095598043575, |
|
"grad_norm": 0.7651500105857849, |
|
"learning_rate": 4.162540734126297e-07, |
|
"loss": 1.8744, |
|
"step": 20460 |
|
}, |
|
{ |
|
"epoch": 2.6490965681717125, |
|
"grad_norm": 0.7438062429428101, |
|
"learning_rate": 4.1026196038525423e-07, |
|
"loss": 1.8921, |
|
"step": 20480 |
|
}, |
|
{ |
|
"epoch": 2.651683576539068, |
|
"grad_norm": 0.7692246437072754, |
|
"learning_rate": 4.043114442411372e-07, |
|
"loss": 1.8851, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 2.654270584906423, |
|
"grad_norm": 0.6949123740196228, |
|
"learning_rate": 3.984025789099205e-07, |
|
"loss": 1.8709, |
|
"step": 20520 |
|
}, |
|
{ |
|
"epoch": 2.656857593273778, |
|
"grad_norm": 0.764893651008606, |
|
"learning_rate": 3.9253541794376347e-07, |
|
"loss": 1.8924, |
|
"step": 20540 |
|
}, |
|
{ |
|
"epoch": 2.6594446016411335, |
|
"grad_norm": 0.6654639840126038, |
|
"learning_rate": 3.8671001451685564e-07, |
|
"loss": 1.8983, |
|
"step": 20560 |
|
}, |
|
{ |
|
"epoch": 2.662031610008489, |
|
"grad_norm": 0.7003082036972046, |
|
"learning_rate": 3.80926421424942e-07, |
|
"loss": 1.8793, |
|
"step": 20580 |
|
}, |
|
{ |
|
"epoch": 2.664618618375844, |
|
"grad_norm": 0.7128967046737671, |
|
"learning_rate": 3.7518469108483445e-07, |
|
"loss": 1.9077, |
|
"step": 20600 |
|
}, |
|
{ |
|
"epoch": 2.6672056267431987, |
|
"grad_norm": 0.6979056596755981, |
|
"learning_rate": 3.6948487553394585e-07, |
|
"loss": 1.8935, |
|
"step": 20620 |
|
}, |
|
{ |
|
"epoch": 2.669792635110554, |
|
"grad_norm": 0.7059783935546875, |
|
"learning_rate": 3.638270264298127e-07, |
|
"loss": 1.9103, |
|
"step": 20640 |
|
}, |
|
{ |
|
"epoch": 2.6723796434779095, |
|
"grad_norm": 0.7218994498252869, |
|
"learning_rate": 3.5821119504962975e-07, |
|
"loss": 1.8794, |
|
"step": 20660 |
|
}, |
|
{ |
|
"epoch": 2.6749666518452644, |
|
"grad_norm": 0.7997236847877502, |
|
"learning_rate": 3.5263743228978184e-07, |
|
"loss": 1.893, |
|
"step": 20680 |
|
}, |
|
{ |
|
"epoch": 2.6775536602126198, |
|
"grad_norm": 0.7285518646240234, |
|
"learning_rate": 3.4710578866539e-07, |
|
"loss": 1.8923, |
|
"step": 20700 |
|
}, |
|
{ |
|
"epoch": 2.680140668579975, |
|
"grad_norm": 0.7284007668495178, |
|
"learning_rate": 3.416163143098439e-07, |
|
"loss": 1.9175, |
|
"step": 20720 |
|
}, |
|
{ |
|
"epoch": 2.68272767694733, |
|
"grad_norm": 0.7764323949813843, |
|
"learning_rate": 3.3616905897435327e-07, |
|
"loss": 1.8964, |
|
"step": 20740 |
|
}, |
|
{ |
|
"epoch": 2.6853146853146854, |
|
"grad_norm": 0.7270144820213318, |
|
"learning_rate": 3.3076407202749906e-07, |
|
"loss": 1.8903, |
|
"step": 20760 |
|
}, |
|
{ |
|
"epoch": 2.6879016936820403, |
|
"grad_norm": 0.7056100964546204, |
|
"learning_rate": 3.254014024547786e-07, |
|
"loss": 1.8873, |
|
"step": 20780 |
|
}, |
|
{ |
|
"epoch": 2.6904887020493957, |
|
"grad_norm": 0.7765107750892639, |
|
"learning_rate": 3.2008109885817096e-07, |
|
"loss": 1.8964, |
|
"step": 20800 |
|
}, |
|
{ |
|
"epoch": 2.6930757104167506, |
|
"grad_norm": 0.7091467976570129, |
|
"learning_rate": 3.1480320945568687e-07, |
|
"loss": 1.8871, |
|
"step": 20820 |
|
}, |
|
{ |
|
"epoch": 2.695662718784106, |
|
"grad_norm": 0.7203754186630249, |
|
"learning_rate": 3.0956778208094007e-07, |
|
"loss": 1.8742, |
|
"step": 20840 |
|
}, |
|
{ |
|
"epoch": 2.6982497271514614, |
|
"grad_norm": 0.7383440732955933, |
|
"learning_rate": 3.043748641827066e-07, |
|
"loss": 1.8908, |
|
"step": 20860 |
|
}, |
|
{ |
|
"epoch": 2.7008367355188163, |
|
"grad_norm": 0.7114757299423218, |
|
"learning_rate": 2.9922450282450275e-07, |
|
"loss": 1.8787, |
|
"step": 20880 |
|
}, |
|
{ |
|
"epoch": 2.7034237438861717, |
|
"grad_norm": 0.7058377861976624, |
|
"learning_rate": 2.941167446841492e-07, |
|
"loss": 1.9043, |
|
"step": 20900 |
|
}, |
|
{ |
|
"epoch": 2.706010752253527, |
|
"grad_norm": 0.7250728607177734, |
|
"learning_rate": 2.8905163605335684e-07, |
|
"loss": 1.8839, |
|
"step": 20920 |
|
}, |
|
{ |
|
"epoch": 2.708597760620882, |
|
"grad_norm": 0.6688072681427002, |
|
"learning_rate": 2.840292228373004e-07, |
|
"loss": 1.8946, |
|
"step": 20940 |
|
}, |
|
{ |
|
"epoch": 2.7111847689882373, |
|
"grad_norm": 0.7553097605705261, |
|
"learning_rate": 2.790495505542085e-07, |
|
"loss": 1.891, |
|
"step": 20960 |
|
}, |
|
{ |
|
"epoch": 2.7137717773555923, |
|
"grad_norm": 0.6968030333518982, |
|
"learning_rate": 2.7411266433494344e-07, |
|
"loss": 1.8976, |
|
"step": 20980 |
|
}, |
|
{ |
|
"epoch": 2.7163587857229476, |
|
"grad_norm": 0.6912530660629272, |
|
"learning_rate": 2.692186089226007e-07, |
|
"loss": 1.8808, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 2.7189457940903026, |
|
"grad_norm": 0.7388253808021545, |
|
"learning_rate": 2.6436742867209763e-07, |
|
"loss": 1.8894, |
|
"step": 21020 |
|
}, |
|
{ |
|
"epoch": 2.721532802457658, |
|
"grad_norm": 0.7009615898132324, |
|
"learning_rate": 2.5955916754977216e-07, |
|
"loss": 1.87, |
|
"step": 21040 |
|
}, |
|
{ |
|
"epoch": 2.7241198108250133, |
|
"grad_norm": 0.7631382346153259, |
|
"learning_rate": 2.547938691329871e-07, |
|
"loss": 1.8842, |
|
"step": 21060 |
|
}, |
|
{ |
|
"epoch": 2.7267068191923682, |
|
"grad_norm": 0.7229073643684387, |
|
"learning_rate": 2.500715766097328e-07, |
|
"loss": 1.8858, |
|
"step": 21080 |
|
}, |
|
{ |
|
"epoch": 2.7292938275597236, |
|
"grad_norm": 0.7307857871055603, |
|
"learning_rate": 2.453923327782359e-07, |
|
"loss": 1.9071, |
|
"step": 21100 |
|
}, |
|
{ |
|
"epoch": 2.731880835927079, |
|
"grad_norm": 0.7224897146224976, |
|
"learning_rate": 2.4075618004657295e-07, |
|
"loss": 1.8939, |
|
"step": 21120 |
|
}, |
|
{ |
|
"epoch": 2.734467844294434, |
|
"grad_norm": 0.7270713448524475, |
|
"learning_rate": 2.3616316043228395e-07, |
|
"loss": 1.8881, |
|
"step": 21140 |
|
}, |
|
{ |
|
"epoch": 2.737054852661789, |
|
"grad_norm": 0.7002483606338501, |
|
"learning_rate": 2.3161331556199374e-07, |
|
"loss": 1.8873, |
|
"step": 21160 |
|
}, |
|
{ |
|
"epoch": 2.739641861029144, |
|
"grad_norm": 0.7371177673339844, |
|
"learning_rate": 2.2710668667103196e-07, |
|
"loss": 1.8979, |
|
"step": 21180 |
|
}, |
|
{ |
|
"epoch": 2.7422288693964996, |
|
"grad_norm": 0.7020816206932068, |
|
"learning_rate": 2.226433146030632e-07, |
|
"loss": 1.8991, |
|
"step": 21200 |
|
}, |
|
{ |
|
"epoch": 2.7448158777638545, |
|
"grad_norm": 0.7210609912872314, |
|
"learning_rate": 2.1822323980971228e-07, |
|
"loss": 1.8773, |
|
"step": 21220 |
|
}, |
|
{ |
|
"epoch": 2.74740288613121, |
|
"grad_norm": 0.7082483172416687, |
|
"learning_rate": 2.1384650235020343e-07, |
|
"loss": 1.8869, |
|
"step": 21240 |
|
}, |
|
{ |
|
"epoch": 2.7499898944985652, |
|
"grad_norm": 0.7483168840408325, |
|
"learning_rate": 2.0951314189098905e-07, |
|
"loss": 1.89, |
|
"step": 21260 |
|
}, |
|
{ |
|
"epoch": 2.75257690286592, |
|
"grad_norm": 0.7489413619041443, |
|
"learning_rate": 2.0522319770539867e-07, |
|
"loss": 1.8889, |
|
"step": 21280 |
|
}, |
|
{ |
|
"epoch": 2.7551639112332755, |
|
"grad_norm": 0.6907543540000916, |
|
"learning_rate": 2.0097670867327834e-07, |
|
"loss": 1.884, |
|
"step": 21300 |
|
}, |
|
{ |
|
"epoch": 2.7577509196006305, |
|
"grad_norm": 0.7174752354621887, |
|
"learning_rate": 1.967737132806391e-07, |
|
"loss": 1.8895, |
|
"step": 21320 |
|
}, |
|
{ |
|
"epoch": 2.760337927967986, |
|
"grad_norm": 0.6935007572174072, |
|
"learning_rate": 1.926142496193073e-07, |
|
"loss": 1.8832, |
|
"step": 21340 |
|
}, |
|
{ |
|
"epoch": 2.7629249363353408, |
|
"grad_norm": 0.6918604969978333, |
|
"learning_rate": 1.8849835538658212e-07, |
|
"loss": 1.9011, |
|
"step": 21360 |
|
}, |
|
{ |
|
"epoch": 2.765511944702696, |
|
"grad_norm": 0.7105856537818909, |
|
"learning_rate": 1.8442606788489137e-07, |
|
"loss": 1.8822, |
|
"step": 21380 |
|
}, |
|
{ |
|
"epoch": 2.7680989530700515, |
|
"grad_norm": 0.710788369178772, |
|
"learning_rate": 1.80397424021454e-07, |
|
"loss": 1.8969, |
|
"step": 21400 |
|
}, |
|
{ |
|
"epoch": 2.7706859614374064, |
|
"grad_norm": 0.737083375453949, |
|
"learning_rate": 1.764124603079459e-07, |
|
"loss": 1.9079, |
|
"step": 21420 |
|
}, |
|
{ |
|
"epoch": 2.773272969804762, |
|
"grad_norm": 0.7629084587097168, |
|
"learning_rate": 1.7247121286016965e-07, |
|
"loss": 1.8971, |
|
"step": 21440 |
|
}, |
|
{ |
|
"epoch": 2.775859978172117, |
|
"grad_norm": 0.7207373380661011, |
|
"learning_rate": 1.6857371739772476e-07, |
|
"loss": 1.8884, |
|
"step": 21460 |
|
}, |
|
{ |
|
"epoch": 2.778446986539472, |
|
"grad_norm": 0.7131302356719971, |
|
"learning_rate": 1.6472000924368904e-07, |
|
"loss": 1.9053, |
|
"step": 21480 |
|
}, |
|
{ |
|
"epoch": 2.781033994906827, |
|
"grad_norm": 0.7214493155479431, |
|
"learning_rate": 1.6091012332429045e-07, |
|
"loss": 1.9069, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 2.7836210032741824, |
|
"grad_norm": 0.7579489946365356, |
|
"learning_rate": 1.5714409416859923e-07, |
|
"loss": 1.888, |
|
"step": 21520 |
|
}, |
|
{ |
|
"epoch": 2.7862080116415378, |
|
"grad_norm": 0.7312991619110107, |
|
"learning_rate": 1.5342195590820784e-07, |
|
"loss": 1.8992, |
|
"step": 21540 |
|
}, |
|
{ |
|
"epoch": 2.7887950200088927, |
|
"grad_norm": 0.6886746287345886, |
|
"learning_rate": 1.4974374227692767e-07, |
|
"loss": 1.8749, |
|
"step": 21560 |
|
}, |
|
{ |
|
"epoch": 2.791382028376248, |
|
"grad_norm": 0.7241831421852112, |
|
"learning_rate": 1.4610948661047842e-07, |
|
"loss": 1.8823, |
|
"step": 21580 |
|
}, |
|
{ |
|
"epoch": 2.7939690367436034, |
|
"grad_norm": 0.7255502343177795, |
|
"learning_rate": 1.425192218461885e-07, |
|
"loss": 1.887, |
|
"step": 21600 |
|
}, |
|
{ |
|
"epoch": 2.7965560451109583, |
|
"grad_norm": 0.683193564414978, |
|
"learning_rate": 1.3897298052269526e-07, |
|
"loss": 1.9, |
|
"step": 21620 |
|
}, |
|
{ |
|
"epoch": 2.7991430534783137, |
|
"grad_norm": 0.7224968075752258, |
|
"learning_rate": 1.354707947796524e-07, |
|
"loss": 1.8884, |
|
"step": 21640 |
|
}, |
|
{ |
|
"epoch": 2.8017300618456686, |
|
"grad_norm": 0.7157533764839172, |
|
"learning_rate": 1.3201269635743474e-07, |
|
"loss": 1.9025, |
|
"step": 21660 |
|
}, |
|
{ |
|
"epoch": 2.804317070213024, |
|
"grad_norm": 0.7283643484115601, |
|
"learning_rate": 1.2859871659685496e-07, |
|
"loss": 1.8751, |
|
"step": 21680 |
|
}, |
|
{ |
|
"epoch": 2.806904078580379, |
|
"grad_norm": 0.7149679660797119, |
|
"learning_rate": 1.2522888643887675e-07, |
|
"loss": 1.8898, |
|
"step": 21700 |
|
}, |
|
{ |
|
"epoch": 2.8094910869477343, |
|
"grad_norm": 0.7097463011741638, |
|
"learning_rate": 1.2190323642433443e-07, |
|
"loss": 1.8876, |
|
"step": 21720 |
|
}, |
|
{ |
|
"epoch": 2.8120780953150897, |
|
"grad_norm": 0.6992908716201782, |
|
"learning_rate": 1.1862179669365814e-07, |
|
"loss": 1.892, |
|
"step": 21740 |
|
}, |
|
{ |
|
"epoch": 2.8146651036824446, |
|
"grad_norm": 0.7636439204216003, |
|
"learning_rate": 1.1538459698659798e-07, |
|
"loss": 1.8777, |
|
"step": 21760 |
|
}, |
|
{ |
|
"epoch": 2.8172521120498, |
|
"grad_norm": 0.725791335105896, |
|
"learning_rate": 1.1219166664195646e-07, |
|
"loss": 1.9016, |
|
"step": 21780 |
|
}, |
|
{ |
|
"epoch": 2.8198391204171553, |
|
"grad_norm": 0.7035162448883057, |
|
"learning_rate": 1.0904303459732313e-07, |
|
"loss": 1.8749, |
|
"step": 21800 |
|
}, |
|
{ |
|
"epoch": 2.8224261287845103, |
|
"grad_norm": 0.7429187893867493, |
|
"learning_rate": 1.0593872938880867e-07, |
|
"loss": 1.8994, |
|
"step": 21820 |
|
}, |
|
{ |
|
"epoch": 2.8250131371518656, |
|
"grad_norm": 0.7356532216072083, |
|
"learning_rate": 1.028787791507918e-07, |
|
"loss": 1.8881, |
|
"step": 21840 |
|
}, |
|
{ |
|
"epoch": 2.8276001455192206, |
|
"grad_norm": 0.7449535727500916, |
|
"learning_rate": 9.986321161565893e-08, |
|
"loss": 1.8885, |
|
"step": 21860 |
|
}, |
|
{ |
|
"epoch": 2.830187153886576, |
|
"grad_norm": 0.764678418636322, |
|
"learning_rate": 9.689205411355595e-08, |
|
"loss": 1.888, |
|
"step": 21880 |
|
}, |
|
{ |
|
"epoch": 2.832774162253931, |
|
"grad_norm": 0.7606685757637024, |
|
"learning_rate": 9.396533357214022e-08, |
|
"loss": 1.8768, |
|
"step": 21900 |
|
}, |
|
{ |
|
"epoch": 2.8353611706212862, |
|
"grad_norm": 0.7346524596214294, |
|
"learning_rate": 9.108307651633619e-08, |
|
"loss": 1.8942, |
|
"step": 21920 |
|
}, |
|
{ |
|
"epoch": 2.8379481789886416, |
|
"grad_norm": 0.7407386898994446, |
|
"learning_rate": 8.824530906809347e-08, |
|
"loss": 1.897, |
|
"step": 21940 |
|
}, |
|
{ |
|
"epoch": 2.8405351873559965, |
|
"grad_norm": 0.7275553941726685, |
|
"learning_rate": 8.545205694615311e-08, |
|
"loss": 1.8833, |
|
"step": 21960 |
|
}, |
|
{ |
|
"epoch": 2.843122195723352, |
|
"grad_norm": 0.7282954454421997, |
|
"learning_rate": 8.270334546581215e-08, |
|
"loss": 1.8765, |
|
"step": 21980 |
|
}, |
|
{ |
|
"epoch": 2.845709204090707, |
|
"grad_norm": 0.7188981175422668, |
|
"learning_rate": 7.999919953869561e-08, |
|
"loss": 1.8933, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 2.848296212458062, |
|
"grad_norm": 0.7398064732551575, |
|
"learning_rate": 7.733964367252822e-08, |
|
"loss": 1.877, |
|
"step": 22020 |
|
}, |
|
{ |
|
"epoch": 2.850883220825417, |
|
"grad_norm": 0.680725634098053, |
|
"learning_rate": 7.47247019709163e-08, |
|
"loss": 1.8816, |
|
"step": 22040 |
|
}, |
|
{ |
|
"epoch": 2.8534702291927725, |
|
"grad_norm": 0.7518890500068665, |
|
"learning_rate": 7.21543981331263e-08, |
|
"loss": 1.907, |
|
"step": 22060 |
|
}, |
|
{ |
|
"epoch": 2.856057237560128, |
|
"grad_norm": 0.6944283843040466, |
|
"learning_rate": 6.962875545387104e-08, |
|
"loss": 1.8936, |
|
"step": 22080 |
|
}, |
|
{ |
|
"epoch": 2.858644245927483, |
|
"grad_norm": 0.7511395812034607, |
|
"learning_rate": 6.714779682309713e-08, |
|
"loss": 1.8883, |
|
"step": 22100 |
|
}, |
|
{ |
|
"epoch": 2.861231254294838, |
|
"grad_norm": 0.7431060075759888, |
|
"learning_rate": 6.471154472578179e-08, |
|
"loss": 1.9062, |
|
"step": 22120 |
|
}, |
|
{ |
|
"epoch": 2.8638182626621935, |
|
"grad_norm": 0.7315056324005127, |
|
"learning_rate": 6.232002124172187e-08, |
|
"loss": 1.8785, |
|
"step": 22140 |
|
}, |
|
{ |
|
"epoch": 2.8664052710295485, |
|
"grad_norm": 0.71298748254776, |
|
"learning_rate": 5.997324804534243e-08, |
|
"loss": 1.8926, |
|
"step": 22160 |
|
}, |
|
{ |
|
"epoch": 2.868992279396904, |
|
"grad_norm": 0.7103483080863953, |
|
"learning_rate": 5.7671246405491824e-08, |
|
"loss": 1.9011, |
|
"step": 22180 |
|
}, |
|
{ |
|
"epoch": 2.8715792877642587, |
|
"grad_norm": 0.7365880012512207, |
|
"learning_rate": 5.541403718525629e-08, |
|
"loss": 1.8843, |
|
"step": 22200 |
|
}, |
|
{ |
|
"epoch": 2.874166296131614, |
|
"grad_norm": 0.6919415593147278, |
|
"learning_rate": 5.3201640841765157e-08, |
|
"loss": 1.8899, |
|
"step": 22220 |
|
}, |
|
{ |
|
"epoch": 2.876753304498969, |
|
"grad_norm": 0.7034029364585876, |
|
"learning_rate": 5.103407742600874e-08, |
|
"loss": 1.8802, |
|
"step": 22240 |
|
}, |
|
{ |
|
"epoch": 2.8793403128663244, |
|
"grad_norm": 0.7135015726089478, |
|
"learning_rate": 4.891136658265516e-08, |
|
"loss": 1.8821, |
|
"step": 22260 |
|
}, |
|
{ |
|
"epoch": 2.88192732123368, |
|
"grad_norm": 0.701713502407074, |
|
"learning_rate": 4.6833527549873804e-08, |
|
"loss": 1.8835, |
|
"step": 22280 |
|
}, |
|
{ |
|
"epoch": 2.8845143296010347, |
|
"grad_norm": 0.7160483598709106, |
|
"learning_rate": 4.4800579159159364e-08, |
|
"loss": 1.8898, |
|
"step": 22300 |
|
}, |
|
{ |
|
"epoch": 2.88710133796839, |
|
"grad_norm": 0.7640059590339661, |
|
"learning_rate": 4.2812539835161426e-08, |
|
"loss": 1.8979, |
|
"step": 22320 |
|
}, |
|
{ |
|
"epoch": 2.8896883463357455, |
|
"grad_norm": 0.7294439673423767, |
|
"learning_rate": 4.086942759551904e-08, |
|
"loss": 1.8819, |
|
"step": 22340 |
|
}, |
|
{ |
|
"epoch": 2.8922753547031004, |
|
"grad_norm": 0.7006838917732239, |
|
"learning_rate": 3.897126005069418e-08, |
|
"loss": 1.8888, |
|
"step": 22360 |
|
}, |
|
{ |
|
"epoch": 2.8948623630704553, |
|
"grad_norm": 0.7375736236572266, |
|
"learning_rate": 3.7118054403816326e-08, |
|
"loss": 1.9001, |
|
"step": 22380 |
|
}, |
|
{ |
|
"epoch": 2.8974493714378107, |
|
"grad_norm": 0.7070913910865784, |
|
"learning_rate": 3.530982745052314e-08, |
|
"loss": 1.9025, |
|
"step": 22400 |
|
}, |
|
{ |
|
"epoch": 2.900036379805166, |
|
"grad_norm": 0.719930112361908, |
|
"learning_rate": 3.354659557881057e-08, |
|
"loss": 1.9003, |
|
"step": 22420 |
|
}, |
|
{ |
|
"epoch": 2.902623388172521, |
|
"grad_norm": 0.7053143978118896, |
|
"learning_rate": 3.1828374768881895e-08, |
|
"loss": 1.8887, |
|
"step": 22440 |
|
}, |
|
{ |
|
"epoch": 2.9052103965398763, |
|
"grad_norm": 0.718190610408783, |
|
"learning_rate": 3.0155180593005595e-08, |
|
"loss": 1.9046, |
|
"step": 22460 |
|
}, |
|
{ |
|
"epoch": 2.9077974049072317, |
|
"grad_norm": 0.7384104132652283, |
|
"learning_rate": 2.8527028215372676e-08, |
|
"loss": 1.8983, |
|
"step": 22480 |
|
}, |
|
{ |
|
"epoch": 2.9103844132745866, |
|
"grad_norm": 0.7297363877296448, |
|
"learning_rate": 2.6943932391957916e-08, |
|
"loss": 1.9072, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 2.912971421641942, |
|
"grad_norm": 0.7329489588737488, |
|
"learning_rate": 2.54059074703894e-08, |
|
"loss": 1.8991, |
|
"step": 22520 |
|
}, |
|
{ |
|
"epoch": 2.915558430009297, |
|
"grad_norm": 0.7540693879127502, |
|
"learning_rate": 2.3912967389816967e-08, |
|
"loss": 1.8849, |
|
"step": 22540 |
|
}, |
|
{ |
|
"epoch": 2.9181454383766523, |
|
"grad_norm": 0.7132096290588379, |
|
"learning_rate": 2.2465125680784517e-08, |
|
"loss": 1.8883, |
|
"step": 22560 |
|
}, |
|
{ |
|
"epoch": 2.9207324467440072, |
|
"grad_norm": 0.752459704875946, |
|
"learning_rate": 2.1062395465108465e-08, |
|
"loss": 1.8754, |
|
"step": 22580 |
|
}, |
|
{ |
|
"epoch": 2.9233194551113626, |
|
"grad_norm": 0.6799810528755188, |
|
"learning_rate": 1.9704789455760598e-08, |
|
"loss": 1.8841, |
|
"step": 22600 |
|
}, |
|
{ |
|
"epoch": 2.925906463478718, |
|
"grad_norm": 0.6982004046440125, |
|
"learning_rate": 1.8392319956749283e-08, |
|
"loss": 1.8966, |
|
"step": 22620 |
|
}, |
|
{ |
|
"epoch": 2.928493471846073, |
|
"grad_norm": 0.6723421216011047, |
|
"learning_rate": 1.7124998863010668e-08, |
|
"loss": 1.885, |
|
"step": 22640 |
|
}, |
|
{ |
|
"epoch": 2.9310804802134283, |
|
"grad_norm": 0.6991791725158691, |
|
"learning_rate": 1.5902837660299873e-08, |
|
"loss": 1.8717, |
|
"step": 22660 |
|
}, |
|
{ |
|
"epoch": 2.9336674885807836, |
|
"grad_norm": 0.7259694933891296, |
|
"learning_rate": 1.4725847425087758e-08, |
|
"loss": 1.888, |
|
"step": 22680 |
|
}, |
|
{ |
|
"epoch": 2.9362544969481386, |
|
"grad_norm": 0.7207104563713074, |
|
"learning_rate": 1.3594038824458755e-08, |
|
"loss": 1.8777, |
|
"step": 22700 |
|
}, |
|
{ |
|
"epoch": 2.9388415053154935, |
|
"grad_norm": 0.7092163562774658, |
|
"learning_rate": 1.250742211601541e-08, |
|
"loss": 1.8765, |
|
"step": 22720 |
|
}, |
|
{ |
|
"epoch": 2.941428513682849, |
|
"grad_norm": 0.7252078652381897, |
|
"learning_rate": 1.146600714778623e-08, |
|
"loss": 1.8977, |
|
"step": 22740 |
|
}, |
|
{ |
|
"epoch": 2.9440155220502042, |
|
"grad_norm": 0.7089459300041199, |
|
"learning_rate": 1.0469803358135189e-08, |
|
"loss": 1.8732, |
|
"step": 22760 |
|
}, |
|
{ |
|
"epoch": 2.946602530417559, |
|
"grad_norm": 0.7141158580780029, |
|
"learning_rate": 9.518819775675147e-09, |
|
"loss": 1.8839, |
|
"step": 22780 |
|
}, |
|
{ |
|
"epoch": 2.9491895387849145, |
|
"grad_norm": 0.6566777229309082, |
|
"learning_rate": 8.613065019189571e-09, |
|
"loss": 1.9018, |
|
"step": 22800 |
|
}, |
|
{ |
|
"epoch": 2.95177654715227, |
|
"grad_norm": 0.6906895637512207, |
|
"learning_rate": 7.752547297549818e-09, |
|
"loss": 1.9024, |
|
"step": 22820 |
|
}, |
|
{ |
|
"epoch": 2.954363555519625, |
|
"grad_norm": 0.6949741244316101, |
|
"learning_rate": 6.937274409645756e-09, |
|
"loss": 1.8903, |
|
"step": 22840 |
|
}, |
|
{ |
|
"epoch": 2.95695056388698, |
|
"grad_norm": 0.723513662815094, |
|
"learning_rate": 6.167253744310264e-09, |
|
"loss": 1.8843, |
|
"step": 22860 |
|
}, |
|
{ |
|
"epoch": 2.959537572254335, |
|
"grad_norm": 0.7561472058296204, |
|
"learning_rate": 5.442492280255396e-09, |
|
"loss": 1.8926, |
|
"step": 22880 |
|
}, |
|
{ |
|
"epoch": 2.9621245806216905, |
|
"grad_norm": 0.7553021907806396, |
|
"learning_rate": 4.762996586007984e-09, |
|
"loss": 1.8811, |
|
"step": 22900 |
|
}, |
|
{ |
|
"epoch": 2.9647115889890454, |
|
"grad_norm": 0.6656256914138794, |
|
"learning_rate": 4.128772819850802e-09, |
|
"loss": 1.8929, |
|
"step": 22920 |
|
}, |
|
{ |
|
"epoch": 2.967298597356401, |
|
"grad_norm": 0.7597754001617432, |
|
"learning_rate": 3.5398267297653833e-09, |
|
"loss": 1.8858, |
|
"step": 22940 |
|
}, |
|
{ |
|
"epoch": 2.969885605723756, |
|
"grad_norm": 0.7134349942207336, |
|
"learning_rate": 2.996163653382067e-09, |
|
"loss": 1.889, |
|
"step": 22960 |
|
}, |
|
{ |
|
"epoch": 2.972472614091111, |
|
"grad_norm": 0.7336997389793396, |
|
"learning_rate": 2.497788517929478e-09, |
|
"loss": 1.8983, |
|
"step": 22980 |
|
}, |
|
{ |
|
"epoch": 2.9750596224584664, |
|
"grad_norm": 0.7686519622802734, |
|
"learning_rate": 2.0447058401906748e-09, |
|
"loss": 1.8844, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 2.977646630825822, |
|
"grad_norm": 0.7396214008331299, |
|
"learning_rate": 1.6369197264626268e-09, |
|
"loss": 1.8808, |
|
"step": 23020 |
|
}, |
|
{ |
|
"epoch": 2.9802336391931767, |
|
"grad_norm": 0.738203763961792, |
|
"learning_rate": 1.2744338725190208e-09, |
|
"loss": 1.8966, |
|
"step": 23040 |
|
}, |
|
{ |
|
"epoch": 2.982820647560532, |
|
"grad_norm": 0.724272608757019, |
|
"learning_rate": 9.5725156357529e-10, |
|
"loss": 1.8984, |
|
"step": 23060 |
|
}, |
|
{ |
|
"epoch": 2.985407655927887, |
|
"grad_norm": 0.7144853472709656, |
|
"learning_rate": 6.853756742614126e-10, |
|
"loss": 1.884, |
|
"step": 23080 |
|
}, |
|
{ |
|
"epoch": 2.9879946642952424, |
|
"grad_norm": 0.6902337074279785, |
|
"learning_rate": 4.588086685936022e-10, |
|
"loss": 1.8841, |
|
"step": 23100 |
|
}, |
|
{ |
|
"epoch": 2.9905816726625973, |
|
"grad_norm": 0.6939364671707153, |
|
"learning_rate": 2.775525999532125e-10, |
|
"loss": 1.8764, |
|
"step": 23120 |
|
}, |
|
{ |
|
"epoch": 2.9931686810299527, |
|
"grad_norm": 0.7078965306282043, |
|
"learning_rate": 1.416091110667539e-10, |
|
"loss": 1.895, |
|
"step": 23140 |
|
}, |
|
{ |
|
"epoch": 2.995755689397308, |
|
"grad_norm": 0.7176749110221863, |
|
"learning_rate": 5.097943399312577e-11, |
|
"loss": 1.8926, |
|
"step": 23160 |
|
}, |
|
{ |
|
"epoch": 2.998342697764663, |
|
"grad_norm": 0.6923559904098511, |
|
"learning_rate": 5.66439011084885e-12, |
|
"loss": 1.8958, |
|
"step": 23180 |
|
} |
|
], |
|
"logging_steps": 20, |
|
"max_steps": 23190, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.1889186481892858e+20, |
|
"train_batch_size": 12, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|