|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.0, |
|
"eval_steps": 50000, |
|
"global_step": 2364, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01692047377326565, |
|
"grad_norm": 4.428616046905518, |
|
"learning_rate": 7.042253521126762e-07, |
|
"loss": 0.6706, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0338409475465313, |
|
"grad_norm": 6.486809730529785, |
|
"learning_rate": 1.4084507042253523e-06, |
|
"loss": 0.6483, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.050761421319796954, |
|
"grad_norm": 3.8806605339050293, |
|
"learning_rate": 2.1126760563380285e-06, |
|
"loss": 0.5231, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0676818950930626, |
|
"grad_norm": 2.814377784729004, |
|
"learning_rate": 2.8169014084507046e-06, |
|
"loss": 0.3641, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.08460236886632826, |
|
"grad_norm": 2.3236453533172607, |
|
"learning_rate": 3.5211267605633804e-06, |
|
"loss": 0.2923, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.10152284263959391, |
|
"grad_norm": 1.732696294784546, |
|
"learning_rate": 4.225352112676057e-06, |
|
"loss": 0.2632, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.11844331641285956, |
|
"grad_norm": 1.71094810962677, |
|
"learning_rate": 4.929577464788733e-06, |
|
"loss": 0.2672, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.1353637901861252, |
|
"grad_norm": 2.0413691997528076, |
|
"learning_rate": 4.999809944183634e-06, |
|
"loss": 0.2208, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.15228426395939088, |
|
"grad_norm": 2.169725179672241, |
|
"learning_rate": 4.999152998213973e-06, |
|
"loss": 0.252, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.1692047377326565, |
|
"grad_norm": 1.3290756940841675, |
|
"learning_rate": 4.998026939008151e-06, |
|
"loss": 0.2468, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.18612521150592218, |
|
"grad_norm": 2.0767831802368164, |
|
"learning_rate": 4.996431977937491e-06, |
|
"loss": 0.2468, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.20304568527918782, |
|
"grad_norm": 1.3648345470428467, |
|
"learning_rate": 4.994368414390368e-06, |
|
"loss": 0.2068, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.21996615905245348, |
|
"grad_norm": 1.9443793296813965, |
|
"learning_rate": 4.991836635716003e-06, |
|
"loss": 0.233, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.23688663282571912, |
|
"grad_norm": 1.4086923599243164, |
|
"learning_rate": 4.988837117151762e-06, |
|
"loss": 0.2153, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.25380710659898476, |
|
"grad_norm": 1.4785760641098022, |
|
"learning_rate": 4.985370421733948e-06, |
|
"loss": 0.2316, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2707275803722504, |
|
"grad_norm": 1.52463960647583, |
|
"learning_rate": 4.981437200192111e-06, |
|
"loss": 0.215, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.2876480541455161, |
|
"grad_norm": 1.5714839696884155, |
|
"learning_rate": 4.9770381908269005e-06, |
|
"loss": 0.2366, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.30456852791878175, |
|
"grad_norm": 1.8277614116668701, |
|
"learning_rate": 4.972174219371484e-06, |
|
"loss": 0.2443, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.32148900169204736, |
|
"grad_norm": 1.175654411315918, |
|
"learning_rate": 4.966846198836546e-06, |
|
"loss": 0.2226, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.338409475465313, |
|
"grad_norm": 1.5629938840866089, |
|
"learning_rate": 4.96105512933891e-06, |
|
"loss": 0.2231, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.3553299492385787, |
|
"grad_norm": 1.6523264646530151, |
|
"learning_rate": 4.954802097913804e-06, |
|
"loss": 0.2193, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.37225042301184436, |
|
"grad_norm": 1.222960114479065, |
|
"learning_rate": 4.94808827831082e-06, |
|
"loss": 0.2534, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.38917089678510997, |
|
"grad_norm": 1.26102614402771, |
|
"learning_rate": 4.9409149307735835e-06, |
|
"loss": 0.2283, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.40609137055837563, |
|
"grad_norm": 1.4353593587875366, |
|
"learning_rate": 4.9332834018032025e-06, |
|
"loss": 0.2594, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.4230118443316413, |
|
"grad_norm": 1.1220771074295044, |
|
"learning_rate": 4.925195123905513e-06, |
|
"loss": 0.2262, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.43993231810490696, |
|
"grad_norm": 1.7100379467010498, |
|
"learning_rate": 4.916651615322182e-06, |
|
"loss": 0.2093, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.45685279187817257, |
|
"grad_norm": 1.975794792175293, |
|
"learning_rate": 4.907654479745726e-06, |
|
"loss": 0.2101, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.47377326565143824, |
|
"grad_norm": 1.3200404644012451, |
|
"learning_rate": 4.89820540601848e-06, |
|
"loss": 0.2462, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.4906937394247039, |
|
"grad_norm": 1.254960298538208, |
|
"learning_rate": 4.8883061678155905e-06, |
|
"loss": 0.2051, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.5076142131979695, |
|
"grad_norm": 1.7000998258590698, |
|
"learning_rate": 4.877958623312076e-06, |
|
"loss": 0.2531, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.5245346869712352, |
|
"grad_norm": 1.2153050899505615, |
|
"learning_rate": 4.8671647148340304e-06, |
|
"loss": 0.2484, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.5414551607445008, |
|
"grad_norm": 1.4881746768951416, |
|
"learning_rate": 4.855926468494039e-06, |
|
"loss": 0.2121, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.5583756345177665, |
|
"grad_norm": 1.3982009887695312, |
|
"learning_rate": 4.844245993810852e-06, |
|
"loss": 0.237, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.5752961082910322, |
|
"grad_norm": 1.4024406671524048, |
|
"learning_rate": 4.832125483313411e-06, |
|
"loss": 0.2126, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.5922165820642978, |
|
"grad_norm": 1.4790270328521729, |
|
"learning_rate": 4.819567212129292e-06, |
|
"loss": 0.2491, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.6091370558375635, |
|
"grad_norm": 1.5148818492889404, |
|
"learning_rate": 4.806573537557643e-06, |
|
"loss": 0.2359, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.626057529610829, |
|
"grad_norm": 1.7280242443084717, |
|
"learning_rate": 4.7931468986267014e-06, |
|
"loss": 0.2363, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.6429780033840947, |
|
"grad_norm": 1.5051395893096924, |
|
"learning_rate": 4.779289815635961e-06, |
|
"loss": 0.2132, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.6598984771573604, |
|
"grad_norm": 2.0735015869140625, |
|
"learning_rate": 4.765004889683096e-06, |
|
"loss": 0.2307, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.676818950930626, |
|
"grad_norm": 1.4238276481628418, |
|
"learning_rate": 4.750294802175703e-06, |
|
"loss": 0.2237, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.6937394247038917, |
|
"grad_norm": 1.283726453781128, |
|
"learning_rate": 4.735162314327987e-06, |
|
"loss": 0.2299, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.7106598984771574, |
|
"grad_norm": 1.8388890027999878, |
|
"learning_rate": 4.71961026664245e-06, |
|
"loss": 0.23, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.727580372250423, |
|
"grad_norm": 1.456938624382019, |
|
"learning_rate": 4.703641578376706e-06, |
|
"loss": 0.1979, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.7445008460236887, |
|
"grad_norm": 1.3884906768798828, |
|
"learning_rate": 4.687259246995512e-06, |
|
"loss": 0.2358, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.7614213197969543, |
|
"grad_norm": 1.8143761157989502, |
|
"learning_rate": 4.670466347608109e-06, |
|
"loss": 0.2382, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.7783417935702199, |
|
"grad_norm": 1.3720914125442505, |
|
"learning_rate": 4.65326603239101e-06, |
|
"loss": 0.1786, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.7952622673434856, |
|
"grad_norm": 1.378342866897583, |
|
"learning_rate": 4.6356615299963e-06, |
|
"loss": 0.2109, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.8121827411167513, |
|
"grad_norm": 1.6520991325378418, |
|
"learning_rate": 4.617656144945584e-06, |
|
"loss": 0.2296, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.8291032148900169, |
|
"grad_norm": 1.617244839668274, |
|
"learning_rate": 4.599253257009716e-06, |
|
"loss": 0.21, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.8460236886632826, |
|
"grad_norm": 1.8561798334121704, |
|
"learning_rate": 4.580456320574367e-06, |
|
"loss": 0.1935, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.8629441624365483, |
|
"grad_norm": 2.1917858123779297, |
|
"learning_rate": 4.561268863991618e-06, |
|
"loss": 0.2246, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.8798646362098139, |
|
"grad_norm": 1.4701884984970093, |
|
"learning_rate": 4.541694488917654e-06, |
|
"loss": 0.2234, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.8967851099830795, |
|
"grad_norm": 1.3482675552368164, |
|
"learning_rate": 4.521736869636699e-06, |
|
"loss": 0.2423, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.9137055837563451, |
|
"grad_norm": 1.618618130683899, |
|
"learning_rate": 4.501399752371323e-06, |
|
"loss": 0.2384, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.9306260575296108, |
|
"grad_norm": 1.7225276231765747, |
|
"learning_rate": 4.480686954579241e-06, |
|
"loss": 0.2208, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.9475465313028765, |
|
"grad_norm": 1.5978412628173828, |
|
"learning_rate": 4.459602364236743e-06, |
|
"loss": 0.2421, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.9644670050761421, |
|
"grad_norm": 1.34050452709198, |
|
"learning_rate": 4.438149939108887e-06, |
|
"loss": 0.2117, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.9813874788494078, |
|
"grad_norm": 1.7396200895309448, |
|
"learning_rate": 4.4163337060065895e-06, |
|
"loss": 0.2395, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.9983079526226735, |
|
"grad_norm": 1.5188558101654053, |
|
"learning_rate": 4.394157760030756e-06, |
|
"loss": 0.2414, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.015228426395939, |
|
"grad_norm": 1.3064625263214111, |
|
"learning_rate": 4.371626263803601e-06, |
|
"loss": 0.1639, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.0321489001692048, |
|
"grad_norm": 1.2497655153274536, |
|
"learning_rate": 4.348743446687279e-06, |
|
"loss": 0.1606, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.0490693739424704, |
|
"grad_norm": 1.3260619640350342, |
|
"learning_rate": 4.325513603989998e-06, |
|
"loss": 0.1373, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.0659898477157361, |
|
"grad_norm": 1.7090791463851929, |
|
"learning_rate": 4.301941096159757e-06, |
|
"loss": 0.151, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.0829103214890017, |
|
"grad_norm": 1.4625272750854492, |
|
"learning_rate": 4.278030347965842e-06, |
|
"loss": 0.1567, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.0998307952622675, |
|
"grad_norm": 1.374337911605835, |
|
"learning_rate": 4.253785847668264e-06, |
|
"loss": 0.1412, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.116751269035533, |
|
"grad_norm": 1.326503038406372, |
|
"learning_rate": 4.229212146175273e-06, |
|
"loss": 0.1249, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.1336717428087986, |
|
"grad_norm": 1.2745444774627686, |
|
"learning_rate": 4.2043138561891086e-06, |
|
"loss": 0.1697, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.1505922165820643, |
|
"grad_norm": 1.3895198106765747, |
|
"learning_rate": 4.179095651340155e-06, |
|
"loss": 0.1429, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.16751269035533, |
|
"grad_norm": 1.331308126449585, |
|
"learning_rate": 4.153562265309662e-06, |
|
"loss": 0.1196, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.1844331641285957, |
|
"grad_norm": 1.7868833541870117, |
|
"learning_rate": 4.1277184909411885e-06, |
|
"loss": 0.1512, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.2013536379018612, |
|
"grad_norm": 1.2682002782821655, |
|
"learning_rate": 4.101569179340946e-06, |
|
"loss": 0.1795, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.218274111675127, |
|
"grad_norm": 1.1160831451416016, |
|
"learning_rate": 4.075119238967196e-06, |
|
"loss": 0.1353, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.2351945854483926, |
|
"grad_norm": 1.2749055624008179, |
|
"learning_rate": 4.048373634708899e-06, |
|
"loss": 0.1452, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.252115059221658, |
|
"grad_norm": 1.6326930522918701, |
|
"learning_rate": 4.0213373869537504e-06, |
|
"loss": 0.1498, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.2690355329949239, |
|
"grad_norm": 1.2723865509033203, |
|
"learning_rate": 3.994015570645818e-06, |
|
"loss": 0.1221, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.2859560067681894, |
|
"grad_norm": 1.4743432998657227, |
|
"learning_rate": 3.966413314332924e-06, |
|
"loss": 0.1436, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.3028764805414552, |
|
"grad_norm": 1.3993955850601196, |
|
"learning_rate": 3.938535799203971e-06, |
|
"loss": 0.1456, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.3197969543147208, |
|
"grad_norm": 1.7814866304397583, |
|
"learning_rate": 3.91038825811639e-06, |
|
"loss": 0.1329, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.3367174280879865, |
|
"grad_norm": 1.551604151725769, |
|
"learning_rate": 3.881975974613888e-06, |
|
"loss": 0.1636, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.353637901861252, |
|
"grad_norm": 1.244645118713379, |
|
"learning_rate": 3.85330428193467e-06, |
|
"loss": 0.1291, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.3705583756345177, |
|
"grad_norm": 1.368085503578186, |
|
"learning_rate": 3.824378562010358e-06, |
|
"loss": 0.1292, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.3874788494077834, |
|
"grad_norm": 1.6551761627197266, |
|
"learning_rate": 3.7952042444557396e-06, |
|
"loss": 0.158, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.404399323181049, |
|
"grad_norm": 1.4006320238113403, |
|
"learning_rate": 3.7657868055495917e-06, |
|
"loss": 0.146, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.4213197969543148, |
|
"grad_norm": 1.8334702253341675, |
|
"learning_rate": 3.736131767206727e-06, |
|
"loss": 0.1572, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.4382402707275803, |
|
"grad_norm": 1.3605976104736328, |
|
"learning_rate": 3.706244695941489e-06, |
|
"loss": 0.1274, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.455160744500846, |
|
"grad_norm": 1.306801199913025, |
|
"learning_rate": 3.6761312018228597e-06, |
|
"loss": 0.1582, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.4720812182741116, |
|
"grad_norm": 1.4271026849746704, |
|
"learning_rate": 3.645796937421406e-06, |
|
"loss": 0.1197, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.4890016920473772, |
|
"grad_norm": 1.3257379531860352, |
|
"learning_rate": 3.6152475967482393e-06, |
|
"loss": 0.1392, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.505922165820643, |
|
"grad_norm": 1.4457297325134277, |
|
"learning_rate": 3.5844889141861977e-06, |
|
"loss": 0.1334, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.5228426395939088, |
|
"grad_norm": 1.5230379104614258, |
|
"learning_rate": 3.5535266634134556e-06, |
|
"loss": 0.1325, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.5397631133671743, |
|
"grad_norm": 1.1343902349472046, |
|
"learning_rate": 3.522366656319748e-06, |
|
"loss": 0.1315, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.5566835871404399, |
|
"grad_norm": 1.58329439163208, |
|
"learning_rate": 3.4910147419154306e-06, |
|
"loss": 0.1464, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.5736040609137056, |
|
"grad_norm": 1.3953927755355835, |
|
"learning_rate": 3.459476805233567e-06, |
|
"loss": 0.132, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.5905245346869712, |
|
"grad_norm": 1.5068899393081665, |
|
"learning_rate": 3.427758766225257e-06, |
|
"loss": 0.1348, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.6074450084602367, |
|
"grad_norm": 1.7132457494735718, |
|
"learning_rate": 3.3958665786484063e-06, |
|
"loss": 0.1343, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.6243654822335025, |
|
"grad_norm": 1.1419628858566284, |
|
"learning_rate": 3.3638062289501584e-06, |
|
"loss": 0.1381, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.6412859560067683, |
|
"grad_norm": 1.8477777242660522, |
|
"learning_rate": 3.331583735143179e-06, |
|
"loss": 0.1642, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.6582064297800339, |
|
"grad_norm": 1.5605595111846924, |
|
"learning_rate": 3.2992051456760242e-06, |
|
"loss": 0.1479, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.6751269035532994, |
|
"grad_norm": 1.643684983253479, |
|
"learning_rate": 3.266676538297797e-06, |
|
"loss": 0.1564, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.6920473773265652, |
|
"grad_norm": 1.6141108274459839, |
|
"learning_rate": 3.234004018917293e-06, |
|
"loss": 0.1496, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.708967851099831, |
|
"grad_norm": 1.8253343105316162, |
|
"learning_rate": 3.20119372045687e-06, |
|
"loss": 0.1535, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.7258883248730963, |
|
"grad_norm": 1.3858433961868286, |
|
"learning_rate": 3.1682518017012452e-06, |
|
"loss": 0.1273, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.742808798646362, |
|
"grad_norm": 1.9302154779434204, |
|
"learning_rate": 3.1351844461414348e-06, |
|
"loss": 0.15, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.7597292724196278, |
|
"grad_norm": 1.4370014667510986, |
|
"learning_rate": 3.1019978608140584e-06, |
|
"loss": 0.1387, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.7766497461928934, |
|
"grad_norm": 1.0620018243789673, |
|
"learning_rate": 3.0686982751362234e-06, |
|
"loss": 0.1261, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.793570219966159, |
|
"grad_norm": 1.401853084564209, |
|
"learning_rate": 3.0352919397362064e-06, |
|
"loss": 0.1492, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.8104906937394247, |
|
"grad_norm": 1.4132765531539917, |
|
"learning_rate": 3.0017851252801574e-06, |
|
"loss": 0.1401, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.8274111675126905, |
|
"grad_norm": 1.540830373764038, |
|
"learning_rate": 2.968184121295038e-06, |
|
"loss": 0.1327, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.844331641285956, |
|
"grad_norm": 1.434097170829773, |
|
"learning_rate": 2.93449523498802e-06, |
|
"loss": 0.1501, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.8612521150592216, |
|
"grad_norm": 1.495847463607788, |
|
"learning_rate": 2.900724790062571e-06, |
|
"loss": 0.1294, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.8781725888324874, |
|
"grad_norm": 1.7926485538482666, |
|
"learning_rate": 2.866879125531437e-06, |
|
"loss": 0.1286, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.895093062605753, |
|
"grad_norm": 1.778939962387085, |
|
"learning_rate": 2.832964594526748e-06, |
|
"loss": 0.1617, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.9120135363790185, |
|
"grad_norm": 1.9225724935531616, |
|
"learning_rate": 2.798987563107488e-06, |
|
"loss": 0.1467, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.9289340101522843, |
|
"grad_norm": 1.0806710720062256, |
|
"learning_rate": 2.7649544090645226e-06, |
|
"loss": 0.156, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.94585448392555, |
|
"grad_norm": 1.5917881727218628, |
|
"learning_rate": 2.7308715207234326e-06, |
|
"loss": 0.1377, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.9627749576988156, |
|
"grad_norm": 1.3541003465652466, |
|
"learning_rate": 2.69674529574537e-06, |
|
"loss": 0.141, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.9796954314720812, |
|
"grad_norm": 1.592405915260315, |
|
"learning_rate": 2.6625821399261562e-06, |
|
"loss": 0.1578, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.996615905245347, |
|
"grad_norm": 1.7678178548812866, |
|
"learning_rate": 2.628388465993864e-06, |
|
"loss": 0.172, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.0135363790186127, |
|
"grad_norm": 0.7831181287765503, |
|
"learning_rate": 2.594170692405083e-06, |
|
"loss": 0.1007, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.030456852791878, |
|
"grad_norm": 1.5708684921264648, |
|
"learning_rate": 2.559935242140125e-06, |
|
"loss": 0.0863, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.047377326565144, |
|
"grad_norm": 1.2178500890731812, |
|
"learning_rate": 2.5256885414973715e-06, |
|
"loss": 0.0815, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.0642978003384096, |
|
"grad_norm": 1.4404493570327759, |
|
"learning_rate": 2.491437018886998e-06, |
|
"loss": 0.0924, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.081218274111675, |
|
"grad_norm": 1.701210618019104, |
|
"learning_rate": 2.457187103624308e-06, |
|
"loss": 0.0843, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.0981387478849407, |
|
"grad_norm": 1.3683816194534302, |
|
"learning_rate": 2.4229452247228895e-06, |
|
"loss": 0.0712, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.1150592216582065, |
|
"grad_norm": 2.7988007068634033, |
|
"learning_rate": 2.3887178096878363e-06, |
|
"loss": 0.0883, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.1319796954314723, |
|
"grad_norm": 1.0649851560592651, |
|
"learning_rate": 2.354511283309244e-06, |
|
"loss": 0.0706, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.1489001692047376, |
|
"grad_norm": 2.3365893363952637, |
|
"learning_rate": 2.320332066456224e-06, |
|
"loss": 0.077, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.1658206429780034, |
|
"grad_norm": 1.3339556455612183, |
|
"learning_rate": 2.2861865748716448e-06, |
|
"loss": 0.0864, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.182741116751269, |
|
"grad_norm": 1.6249768733978271, |
|
"learning_rate": 2.2520812179678422e-06, |
|
"loss": 0.0783, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.199661590524535, |
|
"grad_norm": 1.3000891208648682, |
|
"learning_rate": 2.218022397623517e-06, |
|
"loss": 0.075, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.2165820642978002, |
|
"grad_norm": 1.5419507026672363, |
|
"learning_rate": 2.1840165069820434e-06, |
|
"loss": 0.0745, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.233502538071066, |
|
"grad_norm": 1.4701499938964844, |
|
"learning_rate": 2.150069929251419e-06, |
|
"loss": 0.0652, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.250423011844332, |
|
"grad_norm": 1.1285984516143799, |
|
"learning_rate": 2.1161890365060838e-06, |
|
"loss": 0.0842, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.267343485617597, |
|
"grad_norm": 1.860626459121704, |
|
"learning_rate": 2.082380188490817e-06, |
|
"loss": 0.0824, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 2.284263959390863, |
|
"grad_norm": 1.9373877048492432, |
|
"learning_rate": 2.048649731426965e-06, |
|
"loss": 0.0861, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.3011844331641287, |
|
"grad_norm": 1.4356682300567627, |
|
"learning_rate": 2.0150039968211958e-06, |
|
"loss": 0.0793, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 2.3181049069373945, |
|
"grad_norm": 1.4798492193222046, |
|
"learning_rate": 1.9814493002770186e-06, |
|
"loss": 0.0761, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 2.33502538071066, |
|
"grad_norm": 2.065126895904541, |
|
"learning_rate": 1.9479919403092863e-06, |
|
"loss": 0.0802, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 2.3519458544839256, |
|
"grad_norm": 1.5476558208465576, |
|
"learning_rate": 1.914638197161914e-06, |
|
"loss": 0.0804, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 2.3688663282571913, |
|
"grad_norm": 2.891502618789673, |
|
"learning_rate": 1.8813943316290177e-06, |
|
"loss": 0.08, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.3857868020304567, |
|
"grad_norm": 2.407071113586426, |
|
"learning_rate": 1.8482665838797087e-06, |
|
"loss": 0.0958, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 2.4027072758037225, |
|
"grad_norm": 2.067172050476074, |
|
"learning_rate": 1.8152611722867575e-06, |
|
"loss": 0.0772, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 2.4196277495769882, |
|
"grad_norm": 1.3346515893936157, |
|
"learning_rate": 1.78238429225935e-06, |
|
"loss": 0.0794, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 2.436548223350254, |
|
"grad_norm": 1.4311262369155884, |
|
"learning_rate": 1.7496421150801547e-06, |
|
"loss": 0.1038, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 2.4534686971235193, |
|
"grad_norm": 1.4051713943481445, |
|
"learning_rate": 1.7170407867469185e-06, |
|
"loss": 0.0854, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 2.470389170896785, |
|
"grad_norm": 1.4341652393341064, |
|
"learning_rate": 1.6845864268188073e-06, |
|
"loss": 0.0722, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 2.487309644670051, |
|
"grad_norm": 2.1416544914245605, |
|
"learning_rate": 1.6522851272677126e-06, |
|
"loss": 0.0662, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 2.504230118443316, |
|
"grad_norm": 1.5771617889404297, |
|
"learning_rate": 1.6201429513347275e-06, |
|
"loss": 0.0949, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 2.521150592216582, |
|
"grad_norm": 1.555201768875122, |
|
"learning_rate": 1.5881659323920329e-06, |
|
"loss": 0.069, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 2.5380710659898478, |
|
"grad_norm": 2.297422170639038, |
|
"learning_rate": 1.556360072810371e-06, |
|
"loss": 0.0822, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 2.5549915397631136, |
|
"grad_norm": 1.4423418045043945, |
|
"learning_rate": 1.5247313428323521e-06, |
|
"loss": 0.0739, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 2.571912013536379, |
|
"grad_norm": 1.7135895490646362, |
|
"learning_rate": 1.4932856794517828e-06, |
|
"loss": 0.0701, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 2.5888324873096447, |
|
"grad_norm": 1.2873613834381104, |
|
"learning_rate": 1.4620289852992436e-06, |
|
"loss": 0.0765, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 2.6057529610829104, |
|
"grad_norm": 1.5120519399642944, |
|
"learning_rate": 1.4309671275341115e-06, |
|
"loss": 0.0733, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 2.6226734348561758, |
|
"grad_norm": 1.4765515327453613, |
|
"learning_rate": 1.4001059367432387e-06, |
|
"loss": 0.0759, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 2.6395939086294415, |
|
"grad_norm": 1.6146334409713745, |
|
"learning_rate": 1.3694512058465064e-06, |
|
"loss": 0.0731, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 2.6565143824027073, |
|
"grad_norm": 2.312690258026123, |
|
"learning_rate": 1.3390086890094346e-06, |
|
"loss": 0.0826, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 2.673434856175973, |
|
"grad_norm": 1.315255880355835, |
|
"learning_rate": 1.3087841005630774e-06, |
|
"loss": 0.0753, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 2.6903553299492384, |
|
"grad_norm": 1.6095589399337769, |
|
"learning_rate": 1.2787831139313957e-06, |
|
"loss": 0.0942, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 2.707275803722504, |
|
"grad_norm": 2.9680864810943604, |
|
"learning_rate": 1.249011360566301e-06, |
|
"loss": 0.083, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 2.72419627749577, |
|
"grad_norm": 1.3204880952835083, |
|
"learning_rate": 1.2194744288905807e-06, |
|
"loss": 0.087, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 2.7411167512690353, |
|
"grad_norm": 1.4213435649871826, |
|
"learning_rate": 1.1901778632489037e-06, |
|
"loss": 0.0793, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 2.758037225042301, |
|
"grad_norm": 2.258690595626831, |
|
"learning_rate": 1.1611271628671e-06, |
|
"loss": 0.0802, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 2.774957698815567, |
|
"grad_norm": 1.5296716690063477, |
|
"learning_rate": 1.132327780819901e-06, |
|
"loss": 0.0789, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 2.7918781725888326, |
|
"grad_norm": 1.8829957246780396, |
|
"learning_rate": 1.1037851230073563e-06, |
|
"loss": 0.0779, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 2.808798646362098, |
|
"grad_norm": 1.4642276763916016, |
|
"learning_rate": 1.0755045471400897e-06, |
|
"loss": 0.0813, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 2.8257191201353637, |
|
"grad_norm": 1.2218215465545654, |
|
"learning_rate": 1.0474913617336184e-06, |
|
"loss": 0.0763, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 2.8426395939086295, |
|
"grad_norm": 2.2098135948181152, |
|
"learning_rate": 1.0197508251118892e-06, |
|
"loss": 0.0731, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 2.859560067681895, |
|
"grad_norm": 1.3853759765625, |
|
"learning_rate": 9.922881444202514e-07, |
|
"loss": 0.0861, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 2.8764805414551606, |
|
"grad_norm": 1.280692458152771, |
|
"learning_rate": 9.651084746480229e-07, |
|
"loss": 0.078, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 2.8934010152284264, |
|
"grad_norm": 1.077873945236206, |
|
"learning_rate": 9.382169176608568e-07, |
|
"loss": 0.0807, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 2.910321489001692, |
|
"grad_norm": 1.4427660703659058, |
|
"learning_rate": 9.116185212430765e-07, |
|
"loss": 0.0866, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 2.927241962774958, |
|
"grad_norm": 1.9770188331604004, |
|
"learning_rate": 8.853182781501598e-07, |
|
"loss": 0.0728, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 2.9441624365482233, |
|
"grad_norm": 1.3886785507202148, |
|
"learning_rate": 8.593211251715533e-07, |
|
"loss": 0.0726, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 2.961082910321489, |
|
"grad_norm": 1.6050931215286255, |
|
"learning_rate": 8.336319422039945e-07, |
|
"loss": 0.0806, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 2.9780033840947544, |
|
"grad_norm": 2.0447897911071777, |
|
"learning_rate": 8.082555513355148e-07, |
|
"loss": 0.0809, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 2.99492385786802, |
|
"grad_norm": 1.4558676481246948, |
|
"learning_rate": 7.831967159402837e-07, |
|
"loss": 0.0871, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 3.011844331641286, |
|
"grad_norm": 1.2891719341278076, |
|
"learning_rate": 7.584601397844882e-07, |
|
"loss": 0.0494, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 3.0287648054145517, |
|
"grad_norm": 1.3743082284927368, |
|
"learning_rate": 7.340504661433918e-07, |
|
"loss": 0.0507, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 3.045685279187817, |
|
"grad_norm": 1.0931307077407837, |
|
"learning_rate": 7.099722769297445e-07, |
|
"loss": 0.0465, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 3.062605752961083, |
|
"grad_norm": 1.310681939125061, |
|
"learning_rate": 6.862300918337275e-07, |
|
"loss": 0.0527, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 3.0795262267343486, |
|
"grad_norm": 1.3822815418243408, |
|
"learning_rate": 6.628283674745608e-07, |
|
"loss": 0.0412, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 3.0964467005076144, |
|
"grad_norm": 1.4022243022918701, |
|
"learning_rate": 6.397714965639556e-07, |
|
"loss": 0.0557, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 3.1133671742808797, |
|
"grad_norm": 0.8449741005897522, |
|
"learning_rate": 6.170638070815668e-07, |
|
"loss": 0.0465, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 3.1302876480541455, |
|
"grad_norm": 1.6387944221496582, |
|
"learning_rate": 5.947095614625894e-07, |
|
"loss": 0.0408, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 3.1472081218274113, |
|
"grad_norm": 1.1685737371444702, |
|
"learning_rate": 5.727129557976632e-07, |
|
"loss": 0.0369, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 3.164128595600677, |
|
"grad_norm": 1.1949836015701294, |
|
"learning_rate": 5.510781190452299e-07, |
|
"loss": 0.0413, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 3.1810490693739424, |
|
"grad_norm": 1.5283178091049194, |
|
"learning_rate": 5.298091122564911e-07, |
|
"loss": 0.0394, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 3.197969543147208, |
|
"grad_norm": 1.7606452703475952, |
|
"learning_rate": 5.089099278131079e-07, |
|
"loss": 0.0539, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 3.214890016920474, |
|
"grad_norm": 1.3933011293411255, |
|
"learning_rate": 4.883844886778028e-07, |
|
"loss": 0.0437, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 3.2318104906937393, |
|
"grad_norm": 1.65366530418396, |
|
"learning_rate": 4.6823664765798136e-07, |
|
"loss": 0.0472, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 3.248730964467005, |
|
"grad_norm": 1.2451823949813843, |
|
"learning_rate": 4.484701866825231e-07, |
|
"loss": 0.0408, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 3.265651438240271, |
|
"grad_norm": 0.8365059494972229, |
|
"learning_rate": 4.290888160918855e-07, |
|
"loss": 0.0417, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 3.2825719120135366, |
|
"grad_norm": 1.6369590759277344, |
|
"learning_rate": 4.1009617394163566e-07, |
|
"loss": 0.0483, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 3.299492385786802, |
|
"grad_norm": 1.3412925004959106, |
|
"learning_rate": 3.914958253195561e-07, |
|
"loss": 0.0422, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 3.3164128595600677, |
|
"grad_norm": 1.2426025867462158, |
|
"learning_rate": 3.732912616764478e-07, |
|
"loss": 0.0374, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 3.3333333333333335, |
|
"grad_norm": 1.1421136856079102, |
|
"learning_rate": 3.554859001707522e-07, |
|
"loss": 0.0427, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 3.350253807106599, |
|
"grad_norm": 1.1082905530929565, |
|
"learning_rate": 3.3808308302712007e-07, |
|
"loss": 0.0485, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 3.3671742808798646, |
|
"grad_norm": 1.8885458707809448, |
|
"learning_rate": 3.2108607690904595e-07, |
|
"loss": 0.0465, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 3.3840947546531304, |
|
"grad_norm": 1.5490193367004395, |
|
"learning_rate": 3.044980723056884e-07, |
|
"loss": 0.0424, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 3.401015228426396, |
|
"grad_norm": 1.3184294700622559, |
|
"learning_rate": 2.883221829329857e-07, |
|
"loss": 0.0469, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 3.4179357021996615, |
|
"grad_norm": 2.039376974105835, |
|
"learning_rate": 2.7256144514918204e-07, |
|
"loss": 0.0564, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 3.4348561759729273, |
|
"grad_norm": 1.3883781433105469, |
|
"learning_rate": 2.5721881738487776e-07, |
|
"loss": 0.0479, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 3.451776649746193, |
|
"grad_norm": 1.4026315212249756, |
|
"learning_rate": 2.4229717958770685e-07, |
|
"loss": 0.0469, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 3.4686971235194584, |
|
"grad_norm": 1.3182621002197266, |
|
"learning_rate": 2.277993326817421e-07, |
|
"loss": 0.0733, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 3.485617597292724, |
|
"grad_norm": 1.252341866493225, |
|
"learning_rate": 2.1372799804173894e-07, |
|
"loss": 0.0401, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 3.50253807106599, |
|
"grad_norm": 1.346508502960205, |
|
"learning_rate": 2.0008581698231e-07, |
|
"loss": 0.0486, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 3.5194585448392557, |
|
"grad_norm": 0.9947476983070374, |
|
"learning_rate": 1.8687535026212206e-07, |
|
"loss": 0.0448, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 3.536379018612521, |
|
"grad_norm": 1.5827783346176147, |
|
"learning_rate": 1.740990776032256e-07, |
|
"loss": 0.0462, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 3.553299492385787, |
|
"grad_norm": 1.36147940158844, |
|
"learning_rate": 1.617593972255846e-07, |
|
"loss": 0.0474, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 3.5702199661590526, |
|
"grad_norm": 1.3989553451538086, |
|
"learning_rate": 1.4985862539691033e-07, |
|
"loss": 0.0506, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 3.587140439932318, |
|
"grad_norm": 2.2407867908477783, |
|
"learning_rate": 1.3839899599787655e-07, |
|
"loss": 0.0563, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 3.6040609137055837, |
|
"grad_norm": 1.7094990015029907, |
|
"learning_rate": 1.2738266010280275e-07, |
|
"loss": 0.0455, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 3.6209813874788495, |
|
"grad_norm": 2.9613707065582275, |
|
"learning_rate": 1.168116855758747e-07, |
|
"loss": 0.0462, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 3.6379018612521152, |
|
"grad_norm": 1.730281949043274, |
|
"learning_rate": 1.066880566829917e-07, |
|
"loss": 0.0478, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 3.6548223350253806, |
|
"grad_norm": 1.7777799367904663, |
|
"learning_rate": 9.701367371930059e-08, |
|
"loss": 0.0514, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 3.6717428087986463, |
|
"grad_norm": 1.5566476583480835, |
|
"learning_rate": 8.779035265249064e-08, |
|
"loss": 0.0509, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 3.688663282571912, |
|
"grad_norm": 1.7991079092025757, |
|
"learning_rate": 7.901982478192572e-08, |
|
"loss": 0.0432, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 3.7055837563451774, |
|
"grad_norm": 1.1896286010742188, |
|
"learning_rate": 7.07037364136609e-08, |
|
"loss": 0.0468, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 3.7225042301184432, |
|
"grad_norm": 1.1462163925170898, |
|
"learning_rate": 6.2843648551415e-08, |
|
"loss": 0.0451, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 3.739424703891709, |
|
"grad_norm": 1.4667885303497314, |
|
"learning_rate": 5.544103660355987e-08, |
|
"loss": 0.0416, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 3.7563451776649748, |
|
"grad_norm": 1.2911655902862549, |
|
"learning_rate": 4.849729010616949e-08, |
|
"loss": 0.0494, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 3.77326565143824, |
|
"grad_norm": 1.1728025674819946, |
|
"learning_rate": 4.201371246219388e-08, |
|
"loss": 0.0411, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 3.790186125211506, |
|
"grad_norm": 1.3054083585739136, |
|
"learning_rate": 3.599152069679812e-08, |
|
"loss": 0.04, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 3.8071065989847717, |
|
"grad_norm": 1.867836594581604, |
|
"learning_rate": 3.043184522891679e-08, |
|
"loss": 0.0567, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 3.824027072758037, |
|
"grad_norm": 1.3285189867019653, |
|
"learning_rate": 2.5335729659062002e-08, |
|
"loss": 0.0401, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 3.8409475465313028, |
|
"grad_norm": 1.121762752532959, |
|
"learning_rate": 2.07041305734329e-08, |
|
"loss": 0.0408, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 3.8578680203045685, |
|
"grad_norm": 0.9373975992202759, |
|
"learning_rate": 1.6537917364354838e-08, |
|
"loss": 0.0379, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 3.8747884940778343, |
|
"grad_norm": 1.4871065616607666, |
|
"learning_rate": 1.2837872067085477e-08, |
|
"loss": 0.0454, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 3.8917089678511, |
|
"grad_norm": 1.1789824962615967, |
|
"learning_rate": 9.604689213022767e-09, |
|
"loss": 0.0558, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 3.9086294416243654, |
|
"grad_norm": 1.1937594413757324, |
|
"learning_rate": 6.838975699332007e-09, |
|
"loss": 0.0447, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 3.925549915397631, |
|
"grad_norm": 1.5232540369033813, |
|
"learning_rate": 4.541250675028363e-09, |
|
"loss": 0.0535, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 3.9424703891708965, |
|
"grad_norm": 1.3633273839950562, |
|
"learning_rate": 2.711945443526209e-09, |
|
"loss": 0.047, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 3.9593908629441623, |
|
"grad_norm": 1.0834922790527344, |
|
"learning_rate": 1.351403381680827e-09, |
|
"loss": 0.0456, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 3.976311336717428, |
|
"grad_norm": 1.0907598733901978, |
|
"learning_rate": 4.598798753330247e-10, |
|
"loss": 0.0413, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 3.993231810490694, |
|
"grad_norm": 1.6505200862884521, |
|
"learning_rate": 3.754227137081623e-11, |
|
"loss": 0.046, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"step": 2364, |
|
"total_flos": 1.5782755510340813e+17, |
|
"train_loss": 0.13063472979615384, |
|
"train_runtime": 2788.0875, |
|
"train_samples_per_second": 13.558, |
|
"train_steps_per_second": 0.848 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 2364, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.5782755510340813e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|