|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 3982, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 3.71875, |
|
"learning_rate": 5.0125313283208025e-08, |
|
"loss": 2.0023, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 3.578125, |
|
"learning_rate": 2.506265664160401e-07, |
|
"loss": 1.9652, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 3.484375, |
|
"learning_rate": 5.012531328320802e-07, |
|
"loss": 1.9643, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 3.40625, |
|
"learning_rate": 7.518796992481203e-07, |
|
"loss": 1.9965, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.40625, |
|
"learning_rate": 1.0025062656641603e-06, |
|
"loss": 1.9539, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.578125, |
|
"learning_rate": 1.2531328320802005e-06, |
|
"loss": 1.9449, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.4375, |
|
"learning_rate": 1.5037593984962406e-06, |
|
"loss": 1.9467, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.546875, |
|
"learning_rate": 1.7543859649122807e-06, |
|
"loss": 1.9068, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.234375, |
|
"learning_rate": 2.0050125313283207e-06, |
|
"loss": 2.0021, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.15625, |
|
"learning_rate": 2.255639097744361e-06, |
|
"loss": 1.9756, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.171875, |
|
"learning_rate": 2.506265664160401e-06, |
|
"loss": 1.9301, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.96875, |
|
"learning_rate": 2.7568922305764413e-06, |
|
"loss": 1.8914, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.921875, |
|
"learning_rate": 3.007518796992481e-06, |
|
"loss": 1.9242, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.828125, |
|
"learning_rate": 3.2581453634085216e-06, |
|
"loss": 1.8973, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 3.5087719298245615e-06, |
|
"loss": 1.9371, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 3.7593984962406014e-06, |
|
"loss": 1.8551, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.010025062656641e-06, |
|
"loss": 1.8906, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 4.260651629072682e-06, |
|
"loss": 1.8873, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 4.511278195488722e-06, |
|
"loss": 1.8961, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 4.761904761904762e-06, |
|
"loss": 1.7845, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.125, |
|
"learning_rate": 5.012531328320802e-06, |
|
"loss": 1.8556, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 5.263157894736842e-06, |
|
"loss": 1.8533, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 5.5137844611528826e-06, |
|
"loss": 1.8227, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 5.764411027568922e-06, |
|
"loss": 1.7946, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.96875, |
|
"learning_rate": 6.015037593984962e-06, |
|
"loss": 1.8398, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.9609375, |
|
"learning_rate": 6.265664160401003e-06, |
|
"loss": 1.7794, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.9921875, |
|
"learning_rate": 6.516290726817043e-06, |
|
"loss": 1.8295, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.9765625, |
|
"learning_rate": 6.766917293233083e-06, |
|
"loss": 1.7832, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.9140625, |
|
"learning_rate": 7.017543859649123e-06, |
|
"loss": 1.7952, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.9140625, |
|
"learning_rate": 7.268170426065163e-06, |
|
"loss": 1.7996, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.8984375, |
|
"learning_rate": 7.518796992481203e-06, |
|
"loss": 1.7931, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.921875, |
|
"learning_rate": 7.769423558897243e-06, |
|
"loss": 1.7922, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.90625, |
|
"learning_rate": 8.020050125313283e-06, |
|
"loss": 1.7766, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.125, |
|
"learning_rate": 8.270676691729324e-06, |
|
"loss": 1.7967, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.8828125, |
|
"learning_rate": 8.521303258145363e-06, |
|
"loss": 1.7879, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.875, |
|
"learning_rate": 8.771929824561405e-06, |
|
"loss": 1.7487, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.8125, |
|
"learning_rate": 9.022556390977444e-06, |
|
"loss": 1.8029, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.984375, |
|
"learning_rate": 9.273182957393484e-06, |
|
"loss": 1.7654, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.015625, |
|
"learning_rate": 9.523809523809525e-06, |
|
"loss": 1.7712, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.859375, |
|
"learning_rate": 9.774436090225564e-06, |
|
"loss": 1.7906, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.8046875, |
|
"learning_rate": 1.0025062656641604e-05, |
|
"loss": 1.7939, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.7421875, |
|
"learning_rate": 1.0275689223057645e-05, |
|
"loss": 1.8455, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.890625, |
|
"learning_rate": 1.0526315789473684e-05, |
|
"loss": 1.7572, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.890625, |
|
"learning_rate": 1.0776942355889726e-05, |
|
"loss": 1.7663, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.8984375, |
|
"learning_rate": 1.1027568922305765e-05, |
|
"loss": 1.7555, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.8828125, |
|
"learning_rate": 1.1278195488721806e-05, |
|
"loss": 1.742, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.875, |
|
"learning_rate": 1.1528822055137844e-05, |
|
"loss": 1.7503, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.859375, |
|
"learning_rate": 1.1779448621553885e-05, |
|
"loss": 1.7498, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.84375, |
|
"learning_rate": 1.2030075187969925e-05, |
|
"loss": 1.7433, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.984375, |
|
"learning_rate": 1.2280701754385966e-05, |
|
"loss": 1.7346, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 1.2531328320802006e-05, |
|
"loss": 1.7372, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.84375, |
|
"learning_rate": 1.2781954887218047e-05, |
|
"loss": 1.7331, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.875, |
|
"learning_rate": 1.3032581453634086e-05, |
|
"loss": 1.7573, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.9296875, |
|
"learning_rate": 1.3283208020050127e-05, |
|
"loss": 1.7163, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.046875, |
|
"learning_rate": 1.3533834586466165e-05, |
|
"loss": 1.6598, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.9296875, |
|
"learning_rate": 1.3784461152882206e-05, |
|
"loss": 1.7192, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.90625, |
|
"learning_rate": 1.4035087719298246e-05, |
|
"loss": 1.7514, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.953125, |
|
"learning_rate": 1.4285714285714287e-05, |
|
"loss": 1.7656, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 1.4536340852130327e-05, |
|
"loss": 1.7618, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.953125, |
|
"learning_rate": 1.4786967418546368e-05, |
|
"loss": 1.7497, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.7734375, |
|
"learning_rate": 1.5037593984962406e-05, |
|
"loss": 1.737, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.734375, |
|
"learning_rate": 1.5288220551378447e-05, |
|
"loss": 1.7304, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.9140625, |
|
"learning_rate": 1.5538847117794486e-05, |
|
"loss": 1.7251, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.90625, |
|
"learning_rate": 1.578947368421053e-05, |
|
"loss": 1.7399, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.765625, |
|
"learning_rate": 1.6040100250626565e-05, |
|
"loss": 1.7211, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.890625, |
|
"learning_rate": 1.6290726817042608e-05, |
|
"loss": 1.676, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.875, |
|
"learning_rate": 1.6541353383458648e-05, |
|
"loss": 1.7265, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.828125, |
|
"learning_rate": 1.6791979949874687e-05, |
|
"loss": 1.7135, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.8359375, |
|
"learning_rate": 1.7042606516290727e-05, |
|
"loss": 1.7441, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.6953125, |
|
"learning_rate": 1.729323308270677e-05, |
|
"loss": 1.7039, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.7734375, |
|
"learning_rate": 1.754385964912281e-05, |
|
"loss": 1.6405, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.7109375, |
|
"learning_rate": 1.779448621553885e-05, |
|
"loss": 1.7578, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.9453125, |
|
"learning_rate": 1.8045112781954888e-05, |
|
"loss": 1.7595, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.640625, |
|
"learning_rate": 1.8295739348370928e-05, |
|
"loss": 1.7058, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.875, |
|
"learning_rate": 1.8546365914786967e-05, |
|
"loss": 1.6768, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.8671875, |
|
"learning_rate": 1.879699248120301e-05, |
|
"loss": 1.7218, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.6796875, |
|
"learning_rate": 1.904761904761905e-05, |
|
"loss": 1.6976, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 1.929824561403509e-05, |
|
"loss": 1.7352, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.71875, |
|
"learning_rate": 1.954887218045113e-05, |
|
"loss": 1.6723, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.6796875, |
|
"learning_rate": 1.9799498746867168e-05, |
|
"loss": 1.719, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.8828125, |
|
"learning_rate": 1.9999996156064378e-05, |
|
"loss": 1.6525, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.78125, |
|
"learning_rate": 1.9999861618627885e-05, |
|
"loss": 1.6935, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.859375, |
|
"learning_rate": 1.999953488736543e-05, |
|
"loss": 1.6571, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.734375, |
|
"learning_rate": 1.999901596855667e-05, |
|
"loss": 1.7119, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.6796875, |
|
"learning_rate": 1.9998304872175046e-05, |
|
"loss": 1.7224, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.8046875, |
|
"learning_rate": 1.9997401611887574e-05, |
|
"loss": 1.7085, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.7109375, |
|
"learning_rate": 1.9996306205054605e-05, |
|
"loss": 1.7252, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.8125, |
|
"learning_rate": 1.999501867272947e-05, |
|
"loss": 1.7133, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.828125, |
|
"learning_rate": 1.999353903965809e-05, |
|
"loss": 1.6672, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 1.9991867334278497e-05, |
|
"loss": 1.6256, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.7421875, |
|
"learning_rate": 1.999000358872027e-05, |
|
"loss": 1.6789, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.703125, |
|
"learning_rate": 1.9987947838803955e-05, |
|
"loss": 1.68, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.625, |
|
"learning_rate": 1.9985700124040336e-05, |
|
"loss": 1.6815, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.65625, |
|
"learning_rate": 1.9983260487629698e-05, |
|
"loss": 1.6655, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.6875, |
|
"learning_rate": 1.9980628976461004e-05, |
|
"loss": 1.6622, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 1.9977805641110968e-05, |
|
"loss": 1.6893, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.765625, |
|
"learning_rate": 1.997479053584311e-05, |
|
"loss": 1.7168, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.7265625, |
|
"learning_rate": 1.997158371860668e-05, |
|
"loss": 1.6251, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.6875, |
|
"learning_rate": 1.9968185251035594e-05, |
|
"loss": 1.7348, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.7421875, |
|
"learning_rate": 1.9964595198447194e-05, |
|
"loss": 1.6948, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 1.996081362984103e-05, |
|
"loss": 1.6973, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 1.995684061789753e-05, |
|
"loss": 1.6703, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.7578125, |
|
"learning_rate": 1.9952676238976574e-05, |
|
"loss": 1.6736, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 1.9948320573116073e-05, |
|
"loss": 1.6812, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.71875, |
|
"learning_rate": 1.9943773704030387e-05, |
|
"loss": 1.6962, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.65625, |
|
"learning_rate": 1.993903571910875e-05, |
|
"loss": 1.6456, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 1.9934106709413555e-05, |
|
"loss": 1.6697, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 1.9928986769678643e-05, |
|
"loss": 1.6567, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.65625, |
|
"learning_rate": 1.992367599830746e-05, |
|
"loss": 1.685, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 1.9918174497371157e-05, |
|
"loss": 1.6425, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.6484375, |
|
"learning_rate": 1.9912482372606656e-05, |
|
"loss": 1.6892, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.640625, |
|
"learning_rate": 1.990659973341459e-05, |
|
"loss": 1.7066, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 1.990052669285722e-05, |
|
"loss": 1.7015, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.6640625, |
|
"learning_rate": 1.989426336765625e-05, |
|
"loss": 1.6728, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 1.9887809878190584e-05, |
|
"loss": 1.6693, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 1.988116634849403e-05, |
|
"loss": 1.6836, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 1.9874332906252888e-05, |
|
"loss": 1.6245, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.640625, |
|
"learning_rate": 1.9867309682803518e-05, |
|
"loss": 1.6658, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 1.986009681312981e-05, |
|
"loss": 1.6716, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 1.985269443586058e-05, |
|
"loss": 1.6758, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.609375, |
|
"learning_rate": 1.984510269326692e-05, |
|
"loss": 1.633, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 1.9837321731259452e-05, |
|
"loss": 1.6552, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 1.982935169938553e-05, |
|
"loss": 1.6836, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.671875, |
|
"learning_rate": 1.9821192750826373e-05, |
|
"loss": 1.6429, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.609375, |
|
"learning_rate": 1.9812845042394095e-05, |
|
"loss": 1.6634, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 1.980430873452873e-05, |
|
"loss": 1.6964, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 1.9795583991295104e-05, |
|
"loss": 1.7073, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 1.9786670980379724e-05, |
|
"loss": 1.6364, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.640625, |
|
"learning_rate": 1.977756987308752e-05, |
|
"loss": 1.7014, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 1.976828084433858e-05, |
|
"loss": 1.6339, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 1.9758804072664773e-05, |
|
"loss": 1.6574, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 1.9749139740206318e-05, |
|
"loss": 1.6337, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 1.973928803270829e-05, |
|
"loss": 1.6727, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 1.9729249139517045e-05, |
|
"loss": 1.6769, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.609375, |
|
"learning_rate": 1.971902325357658e-05, |
|
"loss": 1.6548, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.640625, |
|
"learning_rate": 1.9708610571424825e-05, |
|
"loss": 1.6741, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 1.9698011293189884e-05, |
|
"loss": 1.6463, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.6484375, |
|
"learning_rate": 1.968722562258615e-05, |
|
"loss": 1.6643, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 1.967625376691042e-05, |
|
"loss": 1.6968, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 1.966509593703791e-05, |
|
"loss": 1.6512, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.7109375, |
|
"learning_rate": 1.965375234741819e-05, |
|
"loss": 1.648, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 1.964222321607106e-05, |
|
"loss": 1.6885, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 1.9630508764582374e-05, |
|
"loss": 1.6735, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 1.9618609218099773e-05, |
|
"loss": 1.6813, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 1.960652480532836e-05, |
|
"loss": 1.682, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 1.9594255758526305e-05, |
|
"loss": 1.6807, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 1.9581802313500367e-05, |
|
"loss": 1.6265, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 1.9569164709601388e-05, |
|
"loss": 1.6634, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.5, |
|
"learning_rate": 1.9556343189719675e-05, |
|
"loss": 1.6635, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.609375, |
|
"learning_rate": 1.954333800028033e-05, |
|
"loss": 1.6683, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 1.953014939123853e-05, |
|
"loss": 1.6442, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 1.95167776160747e-05, |
|
"loss": 1.6648, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 1.9503222931789656e-05, |
|
"loss": 1.6437, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 1.9489485598899673e-05, |
|
"loss": 1.6726, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 1.9475565881431458e-05, |
|
"loss": 1.6561, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 1.9461464046917092e-05, |
|
"loss": 1.6713, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 1.944718036638888e-05, |
|
"loss": 1.6848, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 1.9432715114374133e-05, |
|
"loss": 1.6622, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 1.9418068568889918e-05, |
|
"loss": 1.7046, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 1.940324101143769e-05, |
|
"loss": 1.6199, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.609375, |
|
"learning_rate": 1.938823272699789e-05, |
|
"loss": 1.6755, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 1.937304400402448e-05, |
|
"loss": 1.6476, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 1.9357675134439366e-05, |
|
"loss": 1.6813, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 1.9342126413626825e-05, |
|
"loss": 1.6479, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 1.9326398140427806e-05, |
|
"loss": 1.668, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 1.9310490617134196e-05, |
|
"loss": 1.6647, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 1.9294404149482996e-05, |
|
"loss": 1.637, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 1.9278139046650463e-05, |
|
"loss": 1.6636, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 1.9261695621246156e-05, |
|
"loss": 1.6229, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 1.924507418930694e-05, |
|
"loss": 1.6384, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 1.9228275070290885e-05, |
|
"loss": 1.6909, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 1.9211298587071165e-05, |
|
"loss": 1.6706, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 1.919414506592981e-05, |
|
"loss": 1.6457, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 1.9176814836551478e-05, |
|
"loss": 1.6747, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 1.9159308232017076e-05, |
|
"loss": 1.6199, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 1.9141625588797404e-05, |
|
"loss": 1.7087, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 1.912376724674664e-05, |
|
"loss": 1.7007, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.910573354909585e-05, |
|
"loss": 1.6024, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 1.9087524842446357e-05, |
|
"loss": 1.6815, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 1.9069141476763108e-05, |
|
"loss": 1.6368, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 1.9050583805367932e-05, |
|
"loss": 1.6118, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 1.903185218493275e-05, |
|
"loss": 1.6702, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 1.901294697547272e-05, |
|
"loss": 1.6367, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 1.899386854033933e-05, |
|
"loss": 1.6737, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.8974617246213392e-05, |
|
"loss": 1.6564, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.8955193463098017e-05, |
|
"loss": 1.6926, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 1.8935597564311493e-05, |
|
"loss": 1.6554, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 1.8915829926480103e-05, |
|
"loss": 1.6878, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 1.8895890929530903e-05, |
|
"loss": 1.658, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 1.8875780956684403e-05, |
|
"loss": 1.6548, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 1.8855500394447214e-05, |
|
"loss": 1.698, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 1.8835049632604618e-05, |
|
"loss": 1.6291, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 1.881442906421307e-05, |
|
"loss": 1.6031, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 1.8793639085592645e-05, |
|
"loss": 1.6021, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 1.877268009631943e-05, |
|
"loss": 1.5901, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 1.8751552499217825e-05, |
|
"loss": 1.6181, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.873025670035283e-05, |
|
"loss": 1.6659, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 1.870879310902221e-05, |
|
"loss": 1.6853, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 1.8687162137748647e-05, |
|
"loss": 1.6478, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 1.866536420227181e-05, |
|
"loss": 1.647, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 1.864339972154036e-05, |
|
"loss": 1.651, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.609375, |
|
"learning_rate": 1.8621269117703887e-05, |
|
"loss": 1.6311, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 1.8598972816104833e-05, |
|
"loss": 1.6365, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.5, |
|
"learning_rate": 1.8576511245270266e-05, |
|
"loss": 1.6289, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 1.8553884836903695e-05, |
|
"loss": 1.6246, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 1.853109402587673e-05, |
|
"loss": 1.6639, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 1.8508139250220753e-05, |
|
"loss": 1.6381, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 1.8485020951118478e-05, |
|
"loss": 1.6427, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.5, |
|
"learning_rate": 1.8461739572895497e-05, |
|
"loss": 1.6531, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.5, |
|
"learning_rate": 1.8438295563011713e-05, |
|
"loss": 1.633, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 1.841468937205276e-05, |
|
"loss": 1.6537, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 1.839092145372133e-05, |
|
"loss": 1.6309, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.5, |
|
"learning_rate": 1.8366992264828463e-05, |
|
"loss": 1.6552, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 1.8342902265284755e-05, |
|
"loss": 1.6517, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 1.8318651918091535e-05, |
|
"loss": 1.6566, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 1.829424168933195e-05, |
|
"loss": 1.6815, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 1.826967204816202e-05, |
|
"loss": 1.5931, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 1.8244943466801615e-05, |
|
"loss": 1.6596, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 1.8220056420525378e-05, |
|
"loss": 1.6963, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 1.819501138765359e-05, |
|
"loss": 1.6305, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.8169808849542986e-05, |
|
"loss": 1.6534, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 1.814444929057748e-05, |
|
"loss": 1.6411, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 1.8118933198158893e-05, |
|
"loss": 1.6309, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 1.8093261062697553e-05, |
|
"loss": 1.5924, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.65625, |
|
"learning_rate": 1.806743337760288e-05, |
|
"loss": 1.6534, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 1.8041450639273902e-05, |
|
"loss": 1.6357, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 1.8015313347089723e-05, |
|
"loss": 1.6311, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 1.798902200339991e-05, |
|
"loss": 1.6626, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 1.7962577113514846e-05, |
|
"loss": 1.6675, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.7935979185696027e-05, |
|
"loss": 1.6463, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 1.7909228731146278e-05, |
|
"loss": 1.612, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 1.7882326263999927e-05, |
|
"loss": 1.653, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.625, |
|
"learning_rate": 1.785527230131295e-05, |
|
"loss": 1.6389, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 1.7828067363052984e-05, |
|
"loss": 1.6481, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 1.78007119720894e-05, |
|
"loss": 1.6829, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.7773206654183187e-05, |
|
"loss": 1.5923, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.7745551937976897e-05, |
|
"loss": 1.6356, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.5, |
|
"learning_rate": 1.771774835498445e-05, |
|
"loss": 1.6434, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 1.7689796439580942e-05, |
|
"loss": 1.6394, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 1.766169672899236e-05, |
|
"loss": 1.6625, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 1.7633449763285275e-05, |
|
"loss": 1.6714, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.760505608535643e-05, |
|
"loss": 1.68, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 1.7576516240922344e-05, |
|
"loss": 1.6307, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 1.75478307785088e-05, |
|
"loss": 1.6423, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 1.7519000249440293e-05, |
|
"loss": 1.6374, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 1.7490025207829466e-05, |
|
"loss": 1.6324, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 1.7460906210566435e-05, |
|
"loss": 1.6392, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.5, |
|
"learning_rate": 1.7431643817308097e-05, |
|
"loss": 1.6503, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 1.740223859046736e-05, |
|
"loss": 1.6207, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.7372691095202355e-05, |
|
"loss": 1.5971, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 1.7343001899405553e-05, |
|
"loss": 1.6922, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 1.7313171573692867e-05, |
|
"loss": 1.6626, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.7283200691392674e-05, |
|
"loss": 1.6359, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 1.7253089828534794e-05, |
|
"loss": 1.5985, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 1.7222839563839432e-05, |
|
"loss": 1.6303, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 1.7192450478706045e-05, |
|
"loss": 1.6414, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.7161923157202168e-05, |
|
"loss": 1.6167, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 1.7131258186052195e-05, |
|
"loss": 1.6023, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.710045615462609e-05, |
|
"loss": 1.6223, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 1.7069517654928073e-05, |
|
"loss": 1.6865, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.7038443281585238e-05, |
|
"loss": 1.6107, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 1.7007233631836115e-05, |
|
"loss": 1.6117, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 1.697588930551921e-05, |
|
"loss": 1.6448, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 1.6944410905061454e-05, |
|
"loss": 1.6116, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.691279903546664e-05, |
|
"loss": 1.6388, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 1.68810543043038e-05, |
|
"loss": 1.6123, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.6849177321695505e-05, |
|
"loss": 1.6201, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 1.6817168700306154e-05, |
|
"loss": 1.6428, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.678502905533021e-05, |
|
"loss": 1.6362, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 1.675275900448035e-05, |
|
"loss": 1.6151, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 1.672035916797561e-05, |
|
"loss": 1.6055, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.5, |
|
"learning_rate": 1.6687830168529458e-05, |
|
"loss": 1.6513, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 1.6655172631337836e-05, |
|
"loss": 1.5987, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.6622387184067134e-05, |
|
"loss": 1.5954, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 1.6589474456842117e-05, |
|
"loss": 1.6548, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.6556435082233843e-05, |
|
"loss": 1.5937, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 1.6523269695247482e-05, |
|
"loss": 1.6298, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.648997893331011e-05, |
|
"loss": 1.6436, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 1.6456563436258475e-05, |
|
"loss": 1.647, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.6423023846326686e-05, |
|
"loss": 1.6254, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 1.6389360808133877e-05, |
|
"loss": 1.6002, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 1.6355574968671807e-05, |
|
"loss": 1.6898, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.5, |
|
"learning_rate": 1.6321666977292442e-05, |
|
"loss": 1.6357, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 1.6287637485695452e-05, |
|
"loss": 1.6306, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 1.6253487147915718e-05, |
|
"loss": 1.6559, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 1.6219216620310717e-05, |
|
"loss": 1.5824, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 1.6184826561547955e-05, |
|
"loss": 1.6891, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.5, |
|
"learning_rate": 1.6150317632592272e-05, |
|
"loss": 1.604, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 1.6115690496693156e-05, |
|
"loss": 1.6402, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 1.608094581937199e-05, |
|
"loss": 1.5887, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 1.6046084268409255e-05, |
|
"loss": 1.6338, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 1.601110651383172e-05, |
|
"loss": 1.6046, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 1.5976013227899527e-05, |
|
"loss": 1.6425, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 1.59408050850933e-05, |
|
"loss": 1.6547, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.5905482762101173e-05, |
|
"loss": 1.6704, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 1.587004693780578e-05, |
|
"loss": 1.6419, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 1.5834498293271202e-05, |
|
"loss": 1.6424, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 1.5798837511729907e-05, |
|
"loss": 1.6314, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 1.5763065278569572e-05, |
|
"loss": 1.6517, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 1.572718228131996e-05, |
|
"loss": 1.5977, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 1.569118920963965e-05, |
|
"loss": 1.6342, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.565508675530284e-05, |
|
"loss": 1.6403, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 1.561887561218602e-05, |
|
"loss": 1.5749, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 1.5582556476254627e-05, |
|
"loss": 1.6235, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 1.5546130045549687e-05, |
|
"loss": 1.635, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 1.5509597020174404e-05, |
|
"loss": 1.6273, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 1.5472958102280683e-05, |
|
"loss": 1.6525, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 1.543621399605565e-05, |
|
"loss": 1.6509, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 1.5399365407708113e-05, |
|
"loss": 1.6284, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 1.536241304545499e-05, |
|
"loss": 1.594, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 1.53253576195077e-05, |
|
"loss": 1.6252, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.5, |
|
"learning_rate": 1.5288199842058497e-05, |
|
"loss": 1.6424, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 1.5250940427266818e-05, |
|
"loss": 1.6434, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 1.5213580091245513e-05, |
|
"loss": 1.6102, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 1.517611955204711e-05, |
|
"loss": 1.6414, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.375, |
|
"learning_rate": 1.5138559529650014e-05, |
|
"loss": 1.6163, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 1.5100900745944649e-05, |
|
"loss": 1.6522, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 1.5063143924719603e-05, |
|
"loss": 1.621, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.5025289791647714e-05, |
|
"loss": 1.6983, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 1.4987339074272116e-05, |
|
"loss": 1.6299, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 1.494929250199226e-05, |
|
"loss": 1.605, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 1.4911150806049887e-05, |
|
"loss": 1.6289, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 1.4872914719514998e-05, |
|
"loss": 1.6703, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.4834584977271735e-05, |
|
"loss": 1.6258, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 1.479616231600427e-05, |
|
"loss": 1.615, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.4757647474182657e-05, |
|
"loss": 1.6475, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 1.4719041192048613e-05, |
|
"loss": 1.6328, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 1.4680344211601313e-05, |
|
"loss": 1.6485, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 1.4641557276583126e-05, |
|
"loss": 1.6417, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.460268113246531e-05, |
|
"loss": 1.6082, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.456371652643369e-05, |
|
"loss": 1.6131, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 1.4524664207374306e-05, |
|
"loss": 1.632, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.4485524925858998e-05, |
|
"loss": 1.6555, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 1.4446299434131011e-05, |
|
"loss": 1.6406, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.44069884860905e-05, |
|
"loss": 1.6423, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 1.4367592837280077e-05, |
|
"loss": 1.6825, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 1.4328113244870267e-05, |
|
"loss": 1.6249, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 1.4288550467644961e-05, |
|
"loss": 1.659, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 1.4248905265986835e-05, |
|
"loss": 1.6215, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 1.4209178401862724e-05, |
|
"loss": 1.6452, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.375, |
|
"learning_rate": 1.4169370638809004e-05, |
|
"loss": 1.5952, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 1.4129482741916889e-05, |
|
"loss": 1.5953, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 1.4089515477817737e-05, |
|
"loss": 1.5673, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 1.4049469614668318e-05, |
|
"loss": 1.644, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 1.4009345922136048e-05, |
|
"loss": 1.6232, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 1.3969145171384197e-05, |
|
"loss": 1.6291, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.392886813505707e-05, |
|
"loss": 1.652, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.3888515587265144e-05, |
|
"loss": 1.628, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 1.3848088303570209e-05, |
|
"loss": 1.628, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 1.3807587060970449e-05, |
|
"loss": 1.6372, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 1.3767012637885513e-05, |
|
"loss": 1.5894, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 1.3726365814141554e-05, |
|
"loss": 1.631, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 1.3685647370956241e-05, |
|
"loss": 1.5791, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.3644858090923741e-05, |
|
"loss": 1.6218, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.375, |
|
"learning_rate": 1.3603998757999676e-05, |
|
"loss": 1.5844, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 1.3563070157486076e-05, |
|
"loss": 1.6515, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 1.3522073076016247e-05, |
|
"loss": 1.6189, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.3481008301539698e-05, |
|
"loss": 1.6147, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 1.3439876623306956e-05, |
|
"loss": 1.6187, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 1.3398678831854417e-05, |
|
"loss": 1.6313, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 1.3357415718989163e-05, |
|
"loss": 1.5335, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 1.3316088077773722e-05, |
|
"loss": 1.621, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 1.3274696702510825e-05, |
|
"loss": 1.6076, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 1.3233242388728167e-05, |
|
"loss": 1.6324, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 1.3191725933163087e-05, |
|
"loss": 1.6292, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 1.3150148133747269e-05, |
|
"loss": 1.577, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 1.3108509789591407e-05, |
|
"loss": 1.6413, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 1.3066811700969844e-05, |
|
"loss": 1.6291, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.5, |
|
"learning_rate": 1.3025054669305187e-05, |
|
"loss": 1.6541, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.2983239497152905e-05, |
|
"loss": 1.6797, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 1.2941366988185919e-05, |
|
"loss": 1.5779, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 1.2899437947179135e-05, |
|
"loss": 1.6196, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 1.285745317999398e-05, |
|
"loss": 1.6481, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.2815413493562927e-05, |
|
"loss": 1.6075, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 1.2773319695873977e-05, |
|
"loss": 1.6253, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 1.2731172595955125e-05, |
|
"loss": 1.6332, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 1.2688973003858824e-05, |
|
"loss": 1.6095, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 1.2646721730646398e-05, |
|
"loss": 1.6107, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 1.260441958837247e-05, |
|
"loss": 1.611, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 1.2562067390069353e-05, |
|
"loss": 1.617, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 1.2519665949731405e-05, |
|
"loss": 1.6319, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 1.2477216082299419e-05, |
|
"loss": 1.6473, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 1.243471860364492e-05, |
|
"loss": 1.6115, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 1.2392174330554515e-05, |
|
"loss": 1.6295, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 1.234958408071418e-05, |
|
"loss": 1.6129, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 1.2306948672693555e-05, |
|
"loss": 1.6285, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 1.226426892593019e-05, |
|
"loss": 1.6052, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 1.2221545660713825e-05, |
|
"loss": 1.6283, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 1.2178779698170597e-05, |
|
"loss": 1.6261, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 1.2135971860247282e-05, |
|
"loss": 1.6627, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 1.2093122969695476e-05, |
|
"loss": 1.6299, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 1.2050233850055798e-05, |
|
"loss": 1.6575, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 1.2007305325642055e-05, |
|
"loss": 1.6312, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 1.1964338221525401e-05, |
|
"loss": 1.6781, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 1.1921333363518474e-05, |
|
"loss": 1.6375, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 1.1878291578159536e-05, |
|
"loss": 1.6452, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 1.183521369269657e-05, |
|
"loss": 1.6708, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 1.1792100535071396e-05, |
|
"loss": 1.6155, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 1.1748952933903754e-05, |
|
"loss": 1.6023, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 1.1705771718475372e-05, |
|
"loss": 1.6147, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.1662557718714039e-05, |
|
"loss": 1.6652, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 1.1619311765177639e-05, |
|
"loss": 1.6058, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 1.1576034689038201e-05, |
|
"loss": 1.5889, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 1.1532727322065919e-05, |
|
"loss": 1.626, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 1.1489390496613174e-05, |
|
"loss": 1.5881, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 1.144602504559851e-05, |
|
"loss": 1.6013, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.1402631802490662e-05, |
|
"loss": 1.6822, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 1.1359211601292507e-05, |
|
"loss": 1.6418, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 1.1315765276525061e-05, |
|
"loss": 1.6366, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.1272293663211412e-05, |
|
"loss": 1.6232, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 1.1228797596860686e-05, |
|
"loss": 1.6611, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 1.1185277913451997e-05, |
|
"loss": 1.6388, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 1.1141735449418368e-05, |
|
"loss": 1.5679, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.109817104163065e-05, |
|
"loss": 1.6797, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.1054585527381453e-05, |
|
"loss": 1.6294, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 1.101097974436904e-05, |
|
"loss": 1.6137, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 1.0967354530681243e-05, |
|
"loss": 1.6225, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 1.0923710724779331e-05, |
|
"loss": 1.6253, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 1.0880049165481916e-05, |
|
"loss": 1.6296, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 1.0836370691948826e-05, |
|
"loss": 1.6128, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 1.079267614366497e-05, |
|
"loss": 1.651, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 1.0748966360424212e-05, |
|
"loss": 1.6071, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 1.0705242182313222e-05, |
|
"loss": 1.6355, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 1.0661504449695337e-05, |
|
"loss": 1.6353, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 1.0617754003194406e-05, |
|
"loss": 1.6562, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 1.057399168367864e-05, |
|
"loss": 1.599, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 1.0530218332244435e-05, |
|
"loss": 1.6245, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.0486434790200226e-05, |
|
"loss": 1.6382, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 1.04426418990503e-05, |
|
"loss": 1.6523, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 1.0398840500478635e-05, |
|
"loss": 1.6372, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 1.0355031436332724e-05, |
|
"loss": 1.6227, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.5, |
|
"learning_rate": 1.0311215548607373e-05, |
|
"loss": 1.6283, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.375, |
|
"learning_rate": 1.0267393679428554e-05, |
|
"loss": 1.6355, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.022356667103719e-05, |
|
"loss": 1.5956, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 1.0179735365772978e-05, |
|
"loss": 1.6589, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 1.01359006060582e-05, |
|
"loss": 1.6513, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.0092063234381536e-05, |
|
"loss": 1.668, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 1.0048224093281862e-05, |
|
"loss": 1.6202, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 1.0004384025332063e-05, |
|
"loss": 1.6059, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 9.960543873122836e-06, |
|
"loss": 1.6166, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 9.916704479246505e-06, |
|
"loss": 1.6034, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 9.872866686280809e-06, |
|
"loss": 1.629, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 9.829031336772719e-06, |
|
"loss": 1.6198, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 9.785199273222254e-06, |
|
"loss": 1.6402, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 9.74137133806627e-06, |
|
"loss": 1.6183, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 9.697548373662272e-06, |
|
"loss": 1.6332, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 9.653731222272242e-06, |
|
"loss": 1.6507, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 9.609920726046424e-06, |
|
"loss": 1.6473, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 9.566117727007165e-06, |
|
"loss": 1.5653, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 9.52232306703271e-06, |
|
"loss": 1.6348, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.5, |
|
"learning_rate": 9.478537587841028e-06, |
|
"loss": 1.6096, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 9.434762130973646e-06, |
|
"loss": 1.6153, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 9.390997537779462e-06, |
|
"loss": 1.6025, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 9.347244649398573e-06, |
|
"loss": 1.64, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 9.30350430674612e-06, |
|
"loss": 1.621, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 9.259777350496116e-06, |
|
"loss": 1.5929, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 9.21606462106529e-06, |
|
"loss": 1.5973, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 9.172366958596943e-06, |
|
"loss": 1.6234, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 9.128685202944786e-06, |
|
"loss": 1.5659, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 9.085020193656817e-06, |
|
"loss": 1.6311, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 9.041372769959166e-06, |
|
"loss": 1.6372, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 8.997743770739974e-06, |
|
"loss": 1.6044, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 8.95413403453328e-06, |
|
"loss": 1.6755, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 8.91054439950288e-06, |
|
"loss": 1.64, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 8.866975703426244e-06, |
|
"loss": 1.6352, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 8.823428783678402e-06, |
|
"loss": 1.6373, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 8.77990447721584e-06, |
|
"loss": 1.6642, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 8.73640362056044e-06, |
|
"loss": 1.6322, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 8.692927049783372e-06, |
|
"loss": 1.6259, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 8.649475600489052e-06, |
|
"loss": 1.5984, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 8.606050107799063e-06, |
|
"loss": 1.6185, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 8.56265140633611e-06, |
|
"loss": 1.6013, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 8.51928033020799e-06, |
|
"loss": 1.6586, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 8.475937712991534e-06, |
|
"loss": 1.6162, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 8.432624387716625e-06, |
|
"loss": 1.6052, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 8.389341186850144e-06, |
|
"loss": 1.6685, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 8.346088942280003e-06, |
|
"loss": 1.5978, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 8.30286848529914e-06, |
|
"loss": 1.6138, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 8.259680646589553e-06, |
|
"loss": 1.645, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.375, |
|
"learning_rate": 8.216526256206322e-06, |
|
"loss": 1.6061, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 8.173406143561661e-06, |
|
"loss": 1.7134, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 8.130321137408985e-06, |
|
"loss": 1.625, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 8.087272065826967e-06, |
|
"loss": 1.6675, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 8.044259756203639e-06, |
|
"loss": 1.6193, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 8.001285035220472e-06, |
|
"loss": 1.577, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 7.958348728836499e-06, |
|
"loss": 1.6533, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 7.915451662272442e-06, |
|
"loss": 1.6036, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 7.872594659994842e-06, |
|
"loss": 1.5943, |
|
"step": 2435 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 7.829778545700225e-06, |
|
"loss": 1.6511, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 7.787004142299258e-06, |
|
"loss": 1.6339, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 7.744272271900939e-06, |
|
"loss": 1.6608, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 7.701583755796804e-06, |
|
"loss": 1.6451, |
|
"step": 2455 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 7.658939414445127e-06, |
|
"loss": 1.6154, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 7.616340067455165e-06, |
|
"loss": 1.6226, |
|
"step": 2465 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 7.57378653357139e-06, |
|
"loss": 1.6419, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 7.531279630657776e-06, |
|
"loss": 1.6246, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 7.488820175682054e-06, |
|
"loss": 1.6596, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 7.446408984700029e-06, |
|
"loss": 1.6198, |
|
"step": 2485 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 7.404046872839886e-06, |
|
"loss": 1.6242, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 7.361734654286524e-06, |
|
"loss": 1.6338, |
|
"step": 2495 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 7.3194731422659185e-06, |
|
"loss": 1.5629, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 7.277263149029472e-06, |
|
"loss": 1.5845, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 7.235105485838427e-06, |
|
"loss": 1.6297, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 7.193000962948252e-06, |
|
"loss": 1.6451, |
|
"step": 2515 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 7.1509503895930765e-06, |
|
"loss": 1.6632, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 7.108954573970146e-06, |
|
"loss": 1.612, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 7.067014323224273e-06, |
|
"loss": 1.6612, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 7.0251304434323445e-06, |
|
"loss": 1.6252, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 6.983303739587806e-06, |
|
"loss": 1.6268, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.375, |
|
"learning_rate": 6.941535015585203e-06, |
|
"loss": 1.566, |
|
"step": 2545 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 6.899825074204739e-06, |
|
"loss": 1.6449, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 6.858174717096821e-06, |
|
"loss": 1.5949, |
|
"step": 2555 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 6.816584744766683e-06, |
|
"loss": 1.623, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 6.775055956558971e-06, |
|
"loss": 1.6131, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 6.7335891506424e-06, |
|
"loss": 1.6714, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 6.69218512399441e-06, |
|
"loss": 1.6585, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 6.6508446723858355e-06, |
|
"loss": 1.6033, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 6.609568590365636e-06, |
|
"loss": 1.6234, |
|
"step": 2585 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 6.568357671245598e-06, |
|
"loss": 1.6487, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 6.5272127070851035e-06, |
|
"loss": 1.6592, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 6.486134488675904e-06, |
|
"loss": 1.6651, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 6.445123805526922e-06, |
|
"loss": 1.6068, |
|
"step": 2605 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 6.404181445849073e-06, |
|
"loss": 1.6203, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 6.363308196540121e-06, |
|
"loss": 1.6255, |
|
"step": 2615 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 6.3225048431695564e-06, |
|
"loss": 1.6608, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 6.281772169963488e-06, |
|
"loss": 1.5833, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 6.241110959789583e-06, |
|
"loss": 1.6541, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 6.200521994142006e-06, |
|
"loss": 1.6032, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.375, |
|
"learning_rate": 6.160006053126415e-06, |
|
"loss": 1.6192, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 6.1195639154449594e-06, |
|
"loss": 1.5958, |
|
"step": 2645 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 6.079196358381307e-06, |
|
"loss": 1.6035, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 6.038904157785725e-06, |
|
"loss": 1.5928, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 5.998688088060144e-06, |
|
"loss": 1.6468, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 5.958548922143289e-06, |
|
"loss": 1.6246, |
|
"step": 2665 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 5.918487431495828e-06, |
|
"loss": 1.6489, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 5.878504386085526e-06, |
|
"loss": 1.648, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 5.838600554372469e-06, |
|
"loss": 1.5877, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 5.798776703294273e-06, |
|
"loss": 1.6025, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 5.759033598251367e-06, |
|
"loss": 1.6379, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 5.7193720030922605e-06, |
|
"loss": 1.6229, |
|
"step": 2695 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 5.679792680098869e-06, |
|
"loss": 1.6149, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 5.6402963899718775e-06, |
|
"loss": 1.6051, |
|
"step": 2705 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 5.6008838918161e-06, |
|
"loss": 1.5936, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 5.561555943125898e-06, |
|
"loss": 1.5987, |
|
"step": 2715 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 5.522313299770624e-06, |
|
"loss": 1.6739, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.375, |
|
"learning_rate": 5.483156715980089e-06, |
|
"loss": 1.5944, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 5.4440869443300804e-06, |
|
"loss": 1.5757, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 5.405104735727873e-06, |
|
"loss": 1.5867, |
|
"step": 2735 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 5.366210839397816e-06, |
|
"loss": 1.5878, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 5.327406002866932e-06, |
|
"loss": 1.6447, |
|
"step": 2745 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 5.288690971950536e-06, |
|
"loss": 1.5847, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 5.2500664907379174e-06, |
|
"loss": 1.5775, |
|
"step": 2755 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 5.211533301578036e-06, |
|
"loss": 1.6567, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 5.1730921450652404e-06, |
|
"loss": 1.6168, |
|
"step": 2765 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 5.1347437600250515e-06, |
|
"loss": 1.6286, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.375, |
|
"learning_rate": 5.0964888834999526e-06, |
|
"loss": 1.6167, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 5.0583282507352205e-06, |
|
"loss": 1.6098, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 5.020262595164814e-06, |
|
"loss": 1.5851, |
|
"step": 2785 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 4.982292648397252e-06, |
|
"loss": 1.6563, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 4.944419140201564e-06, |
|
"loss": 1.6066, |
|
"step": 2795 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 4.906642798493275e-06, |
|
"loss": 1.6134, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 4.868964349320391e-06, |
|
"loss": 1.5946, |
|
"step": 2805 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 4.831384516849461e-06, |
|
"loss": 1.6384, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 4.793904023351665e-06, |
|
"loss": 1.6232, |
|
"step": 2815 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.375, |
|
"learning_rate": 4.756523589188913e-06, |
|
"loss": 1.5959, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 4.719243932800012e-06, |
|
"loss": 1.6354, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 4.682065770686855e-06, |
|
"loss": 1.6081, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 4.644989817400651e-06, |
|
"loss": 1.6048, |
|
"step": 2835 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 4.608016785528198e-06, |
|
"loss": 1.6491, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 4.5711473856781706e-06, |
|
"loss": 1.5842, |
|
"step": 2845 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 4.534382326467477e-06, |
|
"loss": 1.6355, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 4.497722314507636e-06, |
|
"loss": 1.6665, |
|
"step": 2855 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 4.461168054391187e-06, |
|
"loss": 1.6266, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 4.4247202486781735e-06, |
|
"loss": 1.5948, |
|
"step": 2865 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.375, |
|
"learning_rate": 4.388379597882606e-06, |
|
"loss": 1.5851, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 4.352146800459023e-06, |
|
"loss": 1.5952, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.375, |
|
"learning_rate": 4.3160225527890556e-06, |
|
"loss": 1.6181, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 4.280007549168046e-06, |
|
"loss": 1.64, |
|
"step": 2885 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 4.244102481791713e-06, |
|
"loss": 1.6083, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 4.208308040742831e-06, |
|
"loss": 1.6524, |
|
"step": 2895 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 4.172624913977973e-06, |
|
"loss": 1.6263, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 4.1370537873143025e-06, |
|
"loss": 1.6471, |
|
"step": 2905 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 4.101595344416368e-06, |
|
"loss": 1.6364, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 4.066250266782979e-06, |
|
"loss": 1.6406, |
|
"step": 2915 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 4.0310192337341106e-06, |
|
"loss": 1.6156, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 3.995902922397834e-06, |
|
"loss": 1.65, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 3.960902007697314e-06, |
|
"loss": 1.6574, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 3.926017162337826e-06, |
|
"loss": 1.6318, |
|
"step": 2935 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 3.891249056793839e-06, |
|
"loss": 1.625, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 3.856598359296126e-06, |
|
"loss": 1.6285, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 3.822065735818914e-06, |
|
"loss": 1.6158, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 3.78765185006709e-06, |
|
"loss": 1.6165, |
|
"step": 2955 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 3.753357363463442e-06, |
|
"loss": 1.584, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 3.7191829351359487e-06, |
|
"loss": 1.6093, |
|
"step": 2965 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 3.685129221905117e-06, |
|
"loss": 1.6096, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 3.6511968782713468e-06, |
|
"loss": 1.6229, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.375, |
|
"learning_rate": 3.6173865564023557e-06, |
|
"loss": 1.6342, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 3.5836989061206473e-06, |
|
"loss": 1.6376, |
|
"step": 2985 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 3.5501345748910223e-06, |
|
"loss": 1.6727, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 3.5166942078081266e-06, |
|
"loss": 1.6045, |
|
"step": 2995 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 3.4833784475840705e-06, |
|
"loss": 1.6253, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 3.4501879345360534e-06, |
|
"loss": 1.6315, |
|
"step": 3005 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 3.4171233065740684e-06, |
|
"loss": 1.6292, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 3.384185199188652e-06, |
|
"loss": 1.6187, |
|
"step": 3015 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 3.351374245438649e-06, |
|
"loss": 1.6295, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 3.3186910759390546e-06, |
|
"loss": 1.5787, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.5, |
|
"learning_rate": 3.2861363188489083e-06, |
|
"loss": 1.684, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 3.2537105998591966e-06, |
|
"loss": 1.6018, |
|
"step": 3035 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 3.221414542180844e-06, |
|
"loss": 1.6337, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 3.1892487665327265e-06, |
|
"loss": 1.5943, |
|
"step": 3045 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 3.157213891129747e-06, |
|
"loss": 1.601, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 3.1253105316709574e-06, |
|
"loss": 1.6267, |
|
"step": 3055 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 3.0935393013277127e-06, |
|
"loss": 1.6177, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 3.0619008107318924e-06, |
|
"loss": 1.6532, |
|
"step": 3065 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 3.0303956679641677e-06, |
|
"loss": 1.6221, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 2.9990244785423074e-06, |
|
"loss": 1.6616, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 2.967787845409553e-06, |
|
"loss": 1.6482, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 2.936686368923013e-06, |
|
"loss": 1.6737, |
|
"step": 3085 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.375, |
|
"learning_rate": 2.905720646842135e-06, |
|
"loss": 1.6354, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 2.8748912743172152e-06, |
|
"loss": 1.6529, |
|
"step": 3095 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 2.8441988438779543e-06, |
|
"loss": 1.6664, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 2.8136439454220864e-06, |
|
"loss": 1.627, |
|
"step": 3105 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 2.7832271662040176e-06, |
|
"loss": 1.6276, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 2.7529490908235523e-06, |
|
"loss": 1.626, |
|
"step": 3115 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 2.7228103012146657e-06, |
|
"loss": 1.5861, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 2.692811376634297e-06, |
|
"loss": 1.6465, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.375, |
|
"learning_rate": 2.6629528936512306e-06, |
|
"loss": 1.5498, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 2.6332354261350247e-06, |
|
"loss": 1.6333, |
|
"step": 3135 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 2.6036595452449554e-06, |
|
"loss": 1.6199, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 2.5742258194190607e-06, |
|
"loss": 1.6126, |
|
"step": 3145 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 2.544934814363206e-06, |
|
"loss": 1.6537, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 2.515787093040212e-06, |
|
"loss": 1.6169, |
|
"step": 3155 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 2.4867832156590444e-06, |
|
"loss": 1.5844, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 2.457923739664031e-06, |
|
"loss": 1.6596, |
|
"step": 3165 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 2.4292092197241577e-06, |
|
"loss": 1.629, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 2.4006402077224067e-06, |
|
"loss": 1.6467, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 2.372217252745145e-06, |
|
"loss": 1.6157, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 2.343940901071584e-06, |
|
"loss": 1.5702, |
|
"step": 3185 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 2.3158116961632626e-06, |
|
"loss": 1.7161, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 2.2878301786536137e-06, |
|
"loss": 1.6483, |
|
"step": 3195 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 2.259996886337571e-06, |
|
"loss": 1.669, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 2.232312354161226e-06, |
|
"loss": 1.6202, |
|
"step": 3205 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 2.204777114211565e-06, |
|
"loss": 1.6383, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 2.177391695706219e-06, |
|
"loss": 1.6728, |
|
"step": 3215 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 2.1501566249833082e-06, |
|
"loss": 1.5633, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 2.1230724254913184e-06, |
|
"loss": 1.5665, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 2.0961396177790494e-06, |
|
"loss": 1.6195, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 2.0693587194855967e-06, |
|
"loss": 1.6128, |
|
"step": 3235 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 2.0427302453304143e-06, |
|
"loss": 1.6486, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 2.0162547071034187e-06, |
|
"loss": 1.5957, |
|
"step": 3245 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 1.98993261365515e-06, |
|
"loss": 1.6555, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 1.963764470886992e-06, |
|
"loss": 1.6126, |
|
"step": 3255 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 1.9377507817414532e-06, |
|
"loss": 1.5952, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 1.9118920461924938e-06, |
|
"loss": 1.6249, |
|
"step": 3265 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 1.8861887612359286e-06, |
|
"loss": 1.6511, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.8606414208798596e-06, |
|
"loss": 1.6364, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 1.8352505161351874e-06, |
|
"loss": 1.6138, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 1.8100165350061816e-06, |
|
"loss": 1.6323, |
|
"step": 3285 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 1.7849399624810848e-06, |
|
"loss": 1.6198, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 1.7600212805228156e-06, |
|
"loss": 1.6198, |
|
"step": 3295 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 1.735260968059681e-06, |
|
"loss": 1.5782, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.710659500976185e-06, |
|
"loss": 1.6216, |
|
"step": 3305 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 1.6862173521038783e-06, |
|
"loss": 1.6247, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 1.6619349912122695e-06, |
|
"loss": 1.6394, |
|
"step": 3315 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 1.6378128849998065e-06, |
|
"loss": 1.5938, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.6138514970848918e-06, |
|
"loss": 1.588, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 1.5900512879969765e-06, |
|
"loss": 1.6692, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 1.56641271516772e-06, |
|
"loss": 1.6308, |
|
"step": 3335 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 1.542936232922182e-06, |
|
"loss": 1.6051, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 1.5196222924700988e-06, |
|
"loss": 1.6803, |
|
"step": 3345 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 1.4964713418972166e-06, |
|
"loss": 1.5859, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 1.4734838261566676e-06, |
|
"loss": 1.6232, |
|
"step": 3355 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 1.4506601870604253e-06, |
|
"loss": 1.6776, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 1.428000863270812e-06, |
|
"loss": 1.5972, |
|
"step": 3365 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 1.5, |
|
"learning_rate": 1.4055062902920657e-06, |
|
"loss": 1.598, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 1.3831769004619766e-06, |
|
"loss": 1.64, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 1.3610131229435696e-06, |
|
"loss": 1.6315, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 1.3390153837168573e-06, |
|
"loss": 1.6181, |
|
"step": 3385 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 1.3171841055706591e-06, |
|
"loss": 1.5947, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 1.2955197080944648e-06, |
|
"loss": 1.6375, |
|
"step": 3395 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 1.2740226076703844e-06, |
|
"loss": 1.6416, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 1.2526932174651307e-06, |
|
"loss": 1.6436, |
|
"step": 3405 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 1.375, |
|
"learning_rate": 1.23153194742209e-06, |
|
"loss": 1.6326, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.2105392042534325e-06, |
|
"loss": 1.5622, |
|
"step": 3415 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 1.1897153914323034e-06, |
|
"loss": 1.6367, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 1.1690609091850713e-06, |
|
"loss": 1.636, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 1.1485761544836238e-06, |
|
"loss": 1.5795, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 1.1282615210377456e-06, |
|
"loss": 1.6616, |
|
"step": 3435 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 1.108117399287555e-06, |
|
"loss": 1.6382, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.088144176395992e-06, |
|
"loss": 1.6355, |
|
"step": 3445 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.0683422362413797e-06, |
|
"loss": 1.6833, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 1.048711959410048e-06, |
|
"loss": 1.6637, |
|
"step": 3455 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.029253723189022e-06, |
|
"loss": 1.5878, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 1.375, |
|
"learning_rate": 1.0099679015587616e-06, |
|
"loss": 1.614, |
|
"step": 3465 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 9.908548651859807e-07, |
|
"loss": 1.5783, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 9.719149814165219e-07, |
|
"loss": 1.6576, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 9.531486142682922e-07, |
|
"loss": 1.6059, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 1.375, |
|
"learning_rate": 9.345561244242773e-07, |
|
"loss": 1.6049, |
|
"step": 3485 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 9.161378692255963e-07, |
|
"loss": 1.6458, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 8.978942026646409e-07, |
|
"loss": 1.5813, |
|
"step": 3495 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 8.798254753782698e-07, |
|
"loss": 1.6261, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 8.61932034641072e-07, |
|
"loss": 1.6255, |
|
"step": 3505 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 8.442142243586904e-07, |
|
"loss": 1.5771, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 8.266723850612124e-07, |
|
"loss": 1.6067, |
|
"step": 3515 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 8.093068538966242e-07, |
|
"loss": 1.6403, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 7.921179646243315e-07, |
|
"loss": 1.6435, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 7.751060476087435e-07, |
|
"loss": 1.6065, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 7.582714298129302e-07, |
|
"loss": 1.6341, |
|
"step": 3535 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 1.375, |
|
"learning_rate": 7.416144347923271e-07, |
|
"loss": 1.5997, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 7.251353826885232e-07, |
|
"loss": 1.6263, |
|
"step": 3545 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 7.08834590223112e-07, |
|
"loss": 1.6525, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 6.92712370691594e-07, |
|
"loss": 1.6177, |
|
"step": 3555 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 6.767690339573607e-07, |
|
"loss": 1.6611, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 6.610048864457452e-07, |
|
"loss": 1.6394, |
|
"step": 3565 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 6.454202311381197e-07, |
|
"loss": 1.6174, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 6.300153675660836e-07, |
|
"loss": 1.6583, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 6.147905918057007e-07, |
|
"loss": 1.6458, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 5.997461964718076e-07, |
|
"loss": 1.6303, |
|
"step": 3585 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 5.84882470712399e-07, |
|
"loss": 1.6162, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 5.701997002030602e-07, |
|
"loss": 1.6281, |
|
"step": 3595 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 5.556981671414785e-07, |
|
"loss": 1.6293, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 5.413781502420234e-07, |
|
"loss": 1.6084, |
|
"step": 3605 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 5.272399247303861e-07, |
|
"loss": 1.6464, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 5.132837623382947e-07, |
|
"loss": 1.6099, |
|
"step": 3615 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 4.995099312982843e-07, |
|
"loss": 1.6073, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 4.859186963385465e-07, |
|
"loss": 1.6105, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 4.725103186778424e-07, |
|
"loss": 1.6115, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 4.5928505602047426e-07, |
|
"loss": 1.6123, |
|
"step": 3635 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 4.462431625513475e-07, |
|
"loss": 1.6208, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 4.3338488893106876e-07, |
|
"loss": 1.6154, |
|
"step": 3645 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 1.375, |
|
"learning_rate": 4.207104822911368e-07, |
|
"loss": 1.6232, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 4.082201862291957e-07, |
|
"loss": 1.6444, |
|
"step": 3655 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 3.959142408043448e-07, |
|
"loss": 1.6303, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 3.837928825325299e-07, |
|
"loss": 1.5837, |
|
"step": 3665 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 3.718563443819989e-07, |
|
"loss": 1.622, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 3.60104855768818e-07, |
|
"loss": 1.5705, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 3.485386425524695e-07, |
|
"loss": 1.6096, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 3.371579270315073e-07, |
|
"loss": 1.6356, |
|
"step": 3685 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 3.2596292793928174e-07, |
|
"loss": 1.5869, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 3.1495386043974173e-07, |
|
"loss": 1.6391, |
|
"step": 3695 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 3.0413093612329805e-07, |
|
"loss": 1.6174, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 2.934943630027509e-07, |
|
"loss": 1.604, |
|
"step": 3705 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 2.830443455092957e-07, |
|
"loss": 1.628, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 2.727810844885992e-07, |
|
"loss": 1.6104, |
|
"step": 3715 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 2.6270477719692823e-07, |
|
"loss": 1.6315, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 2.528156172973706e-07, |
|
"loss": 1.6311, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 2.431137948561024e-07, |
|
"loss": 1.6617, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 2.3359949633874224e-07, |
|
"loss": 1.5982, |
|
"step": 3735 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 2.2427290460676378e-07, |
|
"loss": 1.6156, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 2.1513419891398103e-07, |
|
"loss": 1.6412, |
|
"step": 3745 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 2.061835549031077e-07, |
|
"loss": 1.5988, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 1.9742114460237439e-07, |
|
"loss": 1.6496, |
|
"step": 3755 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 1.8884713642222773e-07, |
|
"loss": 1.5901, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 1.8046169515209432e-07, |
|
"loss": 1.616, |
|
"step": 3765 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 1.722649819572053e-07, |
|
"loss": 1.6194, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 1.6425715437550783e-07, |
|
"loss": 1.5966, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 1.5643836631463626e-07, |
|
"loss": 1.5756, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 1.4880876804894807e-07, |
|
"loss": 1.6281, |
|
"step": 3785 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 1.4136850621663922e-07, |
|
"loss": 1.6399, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 1.3411772381692888e-07, |
|
"loss": 1.6618, |
|
"step": 3795 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 1.2705656020730484e-07, |
|
"loss": 1.5982, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.2018515110085227e-07, |
|
"loss": 1.6163, |
|
"step": 3805 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 1.1350362856363817e-07, |
|
"loss": 1.6177, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 1.0701212101217772e-07, |
|
"loss": 1.6262, |
|
"step": 3815 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 1.0071075321096524e-07, |
|
"loss": 1.6355, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 9.459964627007378e-08, |
|
"loss": 1.6219, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 8.867891764283154e-08, |
|
"loss": 1.5891, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 8.294868112356358e-08, |
|
"loss": 1.5979, |
|
"step": 3835 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 7.74090468454014e-08, |
|
"loss": 1.6252, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 7.206012127816686e-08, |
|
"loss": 1.6028, |
|
"step": 3845 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 6.690200722633156e-08, |
|
"loss": 1.6106, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 6.19348038270351e-08, |
|
"loss": 1.6085, |
|
"step": 3855 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 5.715860654817995e-08, |
|
"loss": 1.6112, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 1.375, |
|
"learning_rate": 5.2573507186599595e-08, |
|
"loss": 1.639, |
|
"step": 3865 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 4.817959386629212e-08, |
|
"loss": 1.6482, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 4.397695103672828e-08, |
|
"loss": 1.6347, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 3.996565947122499e-08, |
|
"loss": 1.6203, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 3.6145796265397716e-08, |
|
"loss": 1.6375, |
|
"step": 3885 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 3.2517434835674936e-08, |
|
"loss": 1.6312, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 2.908064491788709e-08, |
|
"loss": 1.6125, |
|
"step": 3895 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 2.5835492565928766e-08, |
|
"loss": 1.652, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 2.278204015048746e-08, |
|
"loss": 1.6038, |
|
"step": 3905 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 1.9920346357846787e-08, |
|
"loss": 1.6156, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 1.7250466188754035e-08, |
|
"loss": 1.6265, |
|
"step": 3915 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.4772450957368788e-08, |
|
"loss": 1.6656, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 1.2486348290273731e-08, |
|
"loss": 1.606, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 1.0392202125556473e-08, |
|
"loss": 1.6396, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 8.490052711972452e-09, |
|
"loss": 1.6434, |
|
"step": 3935 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 6.77993660816334e-09, |
|
"loss": 1.6536, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 5.261886681959816e-09, |
|
"loss": 1.6093, |
|
"step": 3945 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 3.935932109749852e-09, |
|
"loss": 1.6293, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 2.8020983759124985e-09, |
|
"loss": 1.6311, |
|
"step": 3955 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 1.8604072723338306e-09, |
|
"loss": 1.6457, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 1.1108768979861685e-09, |
|
"loss": 1.5786, |
|
"step": 3965 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 5.5352165857947e-10, |
|
"loss": 1.5546, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 1.8835226628710623e-10, |
|
"loss": 1.6215, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 1.5375739534917445e-11, |
|
"loss": 1.5669, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.628558874130249, |
|
"eval_runtime": 653.8126, |
|
"eval_samples_per_second": 21.564, |
|
"eval_steps_per_second": 1.349, |
|
"step": 3982 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 3982, |
|
"total_flos": 2.7700535054303232e+17, |
|
"train_loss": 1.6493343958478666, |
|
"train_runtime": 16072.2247, |
|
"train_samples_per_second": 7.928, |
|
"train_steps_per_second": 0.248 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 3982, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"total_flos": 2.7700535054303232e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|