|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.6510342497163959, |
|
"eval_steps": 1000, |
|
"global_step": 8991, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0, |
|
"eval_loss": 7.394782543182373, |
|
"eval_runtime": 2.5063, |
|
"eval_samples_per_second": 4.788, |
|
"eval_steps_per_second": 1.197, |
|
"step": 0 |
|
}, |
|
{ |
|
"epoch": 0.000724095484057831, |
|
"grad_norm": 6.9375, |
|
"learning_rate": 1.2048192771084338e-06, |
|
"loss": 7.2474, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.001448190968115662, |
|
"grad_norm": 4.28125, |
|
"learning_rate": 2.4096385542168676e-06, |
|
"loss": 7.2559, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0021722864521734934, |
|
"grad_norm": 2.890625, |
|
"learning_rate": 3.614457831325301e-06, |
|
"loss": 7.2363, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.002896381936231324, |
|
"grad_norm": 1.9921875, |
|
"learning_rate": 4.819277108433735e-06, |
|
"loss": 7.2322, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0036204774202891555, |
|
"grad_norm": 1.8125, |
|
"learning_rate": 6.024096385542169e-06, |
|
"loss": 7.1863, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.004344572904346987, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 7.228915662650602e-06, |
|
"loss": 7.1754, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.005068668388404818, |
|
"grad_norm": 1.7265625, |
|
"learning_rate": 8.433734939759036e-06, |
|
"loss": 7.1641, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.005792763872462648, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 9.63855421686747e-06, |
|
"loss": 7.1237, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.00651685935652048, |
|
"grad_norm": 1.6640625, |
|
"learning_rate": 1.0843373493975904e-05, |
|
"loss": 7.0891, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.007240954840578311, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 1.2048192771084338e-05, |
|
"loss": 7.0901, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.007965050324636142, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 1.3253012048192772e-05, |
|
"loss": 7.0337, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.008689145808693973, |
|
"grad_norm": 1.953125, |
|
"learning_rate": 1.4457831325301205e-05, |
|
"loss": 7.0387, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.009413241292751805, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 1.566265060240964e-05, |
|
"loss": 7.018, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.010137336776809636, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 1.6867469879518073e-05, |
|
"loss": 6.9834, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.010861432260867467, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.8072289156626505e-05, |
|
"loss": 6.9829, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.011585527744925297, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 1.927710843373494e-05, |
|
"loss": 6.9724, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.012309623228983128, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 2.0481927710843373e-05, |
|
"loss": 6.9671, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.01303371871304096, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 2.168674698795181e-05, |
|
"loss": 6.9373, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.01375781419709879, |
|
"grad_norm": 1.71875, |
|
"learning_rate": 2.289156626506024e-05, |
|
"loss": 6.9478, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.014481909681156622, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 2.4096385542168677e-05, |
|
"loss": 6.9258, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.015206005165214453, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 2.530120481927711e-05, |
|
"loss": 6.9085, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.015930100649272284, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 2.6506024096385545e-05, |
|
"loss": 6.9023, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.016654196133330114, |
|
"grad_norm": 2.125, |
|
"learning_rate": 2.7710843373493977e-05, |
|
"loss": 6.9096, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.017378291617387947, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 2.891566265060241e-05, |
|
"loss": 6.8828, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.018102387101445776, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 3.012048192771085e-05, |
|
"loss": 6.8763, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.01882648258550361, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 3.132530120481928e-05, |
|
"loss": 6.875, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.01955057806956144, |
|
"grad_norm": 1.6484375, |
|
"learning_rate": 3.253012048192771e-05, |
|
"loss": 6.8641, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.020274673553619272, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 3.3734939759036146e-05, |
|
"loss": 6.8531, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.0209987690376771, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 3.4939759036144585e-05, |
|
"loss": 6.8722, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.021722864521734934, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 3.614457831325301e-05, |
|
"loss": 6.8478, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.022446960005792764, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 3.734939759036144e-05, |
|
"loss": 6.8374, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.023171055489850594, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 3.855421686746988e-05, |
|
"loss": 6.8136, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.023895150973908427, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 3.9759036144578314e-05, |
|
"loss": 6.833, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.024619246457966256, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 4.0963855421686746e-05, |
|
"loss": 6.8183, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.02534334194202409, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 4.2168674698795186e-05, |
|
"loss": 6.8071, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.02606743742608192, |
|
"grad_norm": 1.921875, |
|
"learning_rate": 4.337349397590362e-05, |
|
"loss": 6.8177, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.02679153291013975, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 4.457831325301205e-05, |
|
"loss": 6.797, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.02751562839419758, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 4.578313253012048e-05, |
|
"loss": 6.7979, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.028239723878255414, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 4.698795180722892e-05, |
|
"loss": 6.7899, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.028963819362313244, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 4.8192771084337354e-05, |
|
"loss": 6.7614, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.029687914846371073, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 4.9397590361445786e-05, |
|
"loss": 6.8134, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.030412010330428906, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 4.999998281045896e-05, |
|
"loss": 6.7845, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.031136105814486736, |
|
"grad_norm": 1.765625, |
|
"learning_rate": 4.99998452942724e-05, |
|
"loss": 6.7727, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.03186020129854457, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 4.9999570262655714e-05, |
|
"loss": 6.7537, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.0325842967826024, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 4.9999157717121745e-05, |
|
"loss": 6.7486, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.03330839226666023, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 4.9998607659939787e-05, |
|
"loss": 6.7585, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.034032487750718064, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 4.999792009413549e-05, |
|
"loss": 6.7467, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.034756583234775894, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 4.999709502349091e-05, |
|
"loss": 6.7446, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.03548067871883372, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 4.9996132452544484e-05, |
|
"loss": 6.7382, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.03620477420289155, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 4.9995032386590986e-05, |
|
"loss": 6.7563, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.03692886968694938, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 4.999379483168148e-05, |
|
"loss": 6.7614, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.03765296517100722, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 4.999241979462333e-05, |
|
"loss": 6.7422, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.03837706065506505, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 4.999090728298014e-05, |
|
"loss": 6.7343, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.03910115613912288, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 4.9989257305071704e-05, |
|
"loss": 6.7052, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.03982525162318071, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 4.998746986997397e-05, |
|
"loss": 6.7377, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.040549347107238544, |
|
"grad_norm": 1.640625, |
|
"learning_rate": 4.9985544987519e-05, |
|
"loss": 6.7441, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.04127344259129637, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 4.9983482668294905e-05, |
|
"loss": 6.7001, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.0419975380753542, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 4.998128292364578e-05, |
|
"loss": 6.7151, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.04272163355941203, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 4.997894576567166e-05, |
|
"loss": 6.7119, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.04344572904346987, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 4.9976471207228426e-05, |
|
"loss": 6.7282, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.0441698245275277, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 4.9973859261927755e-05, |
|
"loss": 6.726, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.04489392001158553, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 4.997110994413707e-05, |
|
"loss": 6.7266, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.04561801549564336, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 4.996822326897939e-05, |
|
"loss": 6.7016, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.04634211097970119, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 4.9965199252333315e-05, |
|
"loss": 6.6972, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.047066206463759024, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 4.996203791083291e-05, |
|
"loss": 6.6964, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.04779030194781685, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 4.99587392618676e-05, |
|
"loss": 6.6926, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.04851439743187468, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 4.995530332358211e-05, |
|
"loss": 6.7038, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.04923849291593251, |
|
"grad_norm": 1.375, |
|
"learning_rate": 4.995173011487635e-05, |
|
"loss": 6.7012, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.04996258839999035, |
|
"grad_norm": 1.375, |
|
"learning_rate": 4.994801965540527e-05, |
|
"loss": 6.7019, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.05068668388404818, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 4.9944171965578836e-05, |
|
"loss": 6.6656, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.05141077936810601, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 4.9940187066561825e-05, |
|
"loss": 6.6811, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.05213487485216384, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 4.99360649802738e-05, |
|
"loss": 6.6885, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.05285897033622167, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 4.9931805729388916e-05, |
|
"loss": 6.69, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.0535830658202795, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 4.992740933733581e-05, |
|
"loss": 6.675, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.05430716130433733, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 4.992287582829751e-05, |
|
"loss": 6.6997, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.05503125678839516, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 4.991820522721128e-05, |
|
"loss": 6.6923, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.05575535227245299, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 4.991339755976845e-05, |
|
"loss": 6.6936, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.05647944775651083, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 4.990845285241434e-05, |
|
"loss": 6.6684, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.05720354324056866, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 4.9903371132348046e-05, |
|
"loss": 6.6656, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.05792763872462649, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 4.9898152427522334e-05, |
|
"loss": 6.6807, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.05865173420868432, |
|
"grad_norm": 1.25, |
|
"learning_rate": 4.9892796766643503e-05, |
|
"loss": 6.6901, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.059375829692742146, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 4.988730417917115e-05, |
|
"loss": 6.6851, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.06009992517679998, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 4.98816746953181e-05, |
|
"loss": 6.6608, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.06082402066085781, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 4.987590834605017e-05, |
|
"loss": 6.6617, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.06154811614491564, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 4.9870005163086045e-05, |
|
"loss": 6.6574, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.06227221162897347, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 4.986396517889706e-05, |
|
"loss": 6.6633, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.06299630711303131, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 4.985778842670707e-05, |
|
"loss": 6.6807, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.06372040259708914, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 4.9851474940492256e-05, |
|
"loss": 6.6425, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.06444449808114697, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 4.9845024754980876e-05, |
|
"loss": 6.658, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.0651685935652048, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 4.983843790565316e-05, |
|
"loss": 6.6633, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.06589268904926263, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 4.983171442874106e-05, |
|
"loss": 6.6828, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.06661678453332046, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 4.982485436122809e-05, |
|
"loss": 6.6251, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.06734088001737829, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 4.981785774084909e-05, |
|
"loss": 6.6573, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.06806497550143613, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 4.981072460609002e-05, |
|
"loss": 6.6677, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.06878907098549396, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 4.980345499618778e-05, |
|
"loss": 6.6577, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.06951316646955179, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 4.9796048951129936e-05, |
|
"loss": 6.6628, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.07023726195360962, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 4.978850651165458e-05, |
|
"loss": 6.6443, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.07096135743766745, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 4.9780827719250035e-05, |
|
"loss": 6.6617, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.07168545292172528, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 4.977301261615465e-05, |
|
"loss": 6.6679, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.0724095484057831, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 4.9765061245356594e-05, |
|
"loss": 6.6385, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.0724095484057831, |
|
"eval_loss": 6.789700984954834, |
|
"eval_runtime": 1.6435, |
|
"eval_samples_per_second": 7.302, |
|
"eval_steps_per_second": 1.825, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.07313364388984094, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 4.9756973650593583e-05, |
|
"loss": 6.6351, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.07385773937389876, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 4.9748749876352655e-05, |
|
"loss": 6.6602, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.07458183485795661, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 4.974038996786991e-05, |
|
"loss": 6.653, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.07530593034201444, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 4.9731893971130306e-05, |
|
"loss": 6.6532, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.07603002582607227, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 4.972326193286736e-05, |
|
"loss": 6.6297, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.0767541213101301, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 4.971449390056289e-05, |
|
"loss": 6.6379, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.07747821679418793, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 4.970558992244679e-05, |
|
"loss": 6.6478, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.07820231227824576, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 4.969655004749674e-05, |
|
"loss": 6.6616, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.07892640776230359, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 4.968737432543794e-05, |
|
"loss": 6.6314, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.07965050324636141, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 4.967806280674283e-05, |
|
"loss": 6.6465, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.08037459873041924, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 4.9668615542630836e-05, |
|
"loss": 6.6517, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.08109869421447709, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 4.965903258506806e-05, |
|
"loss": 6.6278, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.08182278969853492, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 4.964931398676701e-05, |
|
"loss": 6.6246, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.08254688518259275, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 4.9639459801186306e-05, |
|
"loss": 6.616, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.08327098066665058, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 4.962947008253038e-05, |
|
"loss": 6.6492, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.0839950761507084, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 4.96193448857492e-05, |
|
"loss": 6.6096, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.08471917163476624, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 4.960908426653793e-05, |
|
"loss": 6.6282, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.08544326711882406, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 4.959868828133667e-05, |
|
"loss": 6.6053, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.0861673626028819, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 4.95881569873301e-05, |
|
"loss": 6.6031, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.08689145808693974, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 4.95774904424472e-05, |
|
"loss": 6.609, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.08761555357099757, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 4.9566688705360915e-05, |
|
"loss": 6.6291, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.0883396490550554, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 4.955575183548783e-05, |
|
"loss": 6.6252, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.08906374453911323, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 4.9544679892987855e-05, |
|
"loss": 6.6246, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.08978784002317106, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 4.9533472938763884e-05, |
|
"loss": 6.6155, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.09051193550722889, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 4.9522131034461464e-05, |
|
"loss": 6.6263, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.09123603099128672, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 4.951065424246845e-05, |
|
"loss": 6.6219, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.09196012647534454, |
|
"grad_norm": 1.375, |
|
"learning_rate": 4.9499042625914674e-05, |
|
"loss": 6.6144, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.09268422195940237, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 4.9487296248671586e-05, |
|
"loss": 6.6153, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.09340831744346022, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 4.9475415175351926e-05, |
|
"loss": 6.6136, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.09413241292751805, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 4.946339947130933e-05, |
|
"loss": 6.586, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.09485650841157588, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 4.945124920263797e-05, |
|
"loss": 6.6185, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.0955806038956337, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 4.9438964436172264e-05, |
|
"loss": 6.6248, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.09630469937969154, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 4.9426545239486424e-05, |
|
"loss": 6.605, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.09702879486374937, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 4.94139916808941e-05, |
|
"loss": 6.6525, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.0977528903478072, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 4.9401303829448075e-05, |
|
"loss": 6.6178, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.09847698583186502, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 4.938848175493976e-05, |
|
"loss": 6.5978, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.09920108131592285, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 4.937552552789894e-05, |
|
"loss": 6.6059, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.0999251767999807, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 4.936243521959329e-05, |
|
"loss": 6.6078, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.10064927228403853, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 4.934921090202803e-05, |
|
"loss": 6.6135, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.10137336776809636, |
|
"grad_norm": 1.375, |
|
"learning_rate": 4.933585264794552e-05, |
|
"loss": 6.6068, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.10209746325215419, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 4.932236053082486e-05, |
|
"loss": 6.6026, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.10282155873621202, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 4.930873462488148e-05, |
|
"loss": 6.6064, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.10354565422026984, |
|
"grad_norm": 1.375, |
|
"learning_rate": 4.9294975005066734e-05, |
|
"loss": 6.6075, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.10426974970432767, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 4.9281081747067494e-05, |
|
"loss": 6.5982, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.1049938451883855, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 4.926705492730572e-05, |
|
"loss": 6.6109, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.10571794067244333, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 4.925289462293807e-05, |
|
"loss": 6.596, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.10644203615650118, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 4.923860091185542e-05, |
|
"loss": 6.606, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.107166131640559, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 4.922417387268251e-05, |
|
"loss": 6.6, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.10789022712461684, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 4.9209613584777445e-05, |
|
"loss": 6.5953, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.10861432260867467, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 4.919492012823128e-05, |
|
"loss": 6.5907, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.1093384180927325, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 4.918009358386761e-05, |
|
"loss": 6.6144, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.11006251357679032, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 4.916513403324207e-05, |
|
"loss": 6.5911, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.11078660906084815, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 4.9150041558641947e-05, |
|
"loss": 6.6071, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.11151070454490598, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 4.913481624308566e-05, |
|
"loss": 6.6028, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.11223480002896381, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 4.911945817032238e-05, |
|
"loss": 6.6121, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.11295889551302166, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 4.91039674248315e-05, |
|
"loss": 6.6003, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.11368299099707949, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 4.908834409182221e-05, |
|
"loss": 6.632, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.11440708648113732, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 4.907258825723301e-05, |
|
"loss": 6.5953, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.11513118196519515, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 4.905670000773126e-05, |
|
"loss": 6.6028, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.11585527744925297, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 4.904067943071268e-05, |
|
"loss": 6.592, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.1165793729333108, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 4.902452661430087e-05, |
|
"loss": 6.6015, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.11730346841736863, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 4.9008241647346836e-05, |
|
"loss": 6.5767, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.11802756390142646, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 4.899182461942851e-05, |
|
"loss": 6.5894, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.11875165938548429, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 4.897527562085024e-05, |
|
"loss": 6.5856, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.11947575486954214, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 4.895859474264229e-05, |
|
"loss": 6.5761, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.12019985035359997, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 4.894178207656035e-05, |
|
"loss": 6.5964, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.1209239458376578, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 4.892483771508504e-05, |
|
"loss": 6.5657, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.12164804132171562, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 4.890776175142138e-05, |
|
"loss": 6.5946, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.12237213680577345, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 4.889055427949829e-05, |
|
"loss": 6.5916, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.12309623228983128, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 4.8873215393968074e-05, |
|
"loss": 6.594, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.12382032777388911, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 4.885574519020589e-05, |
|
"loss": 6.566, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.12454442325794694, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 4.883814376430924e-05, |
|
"loss": 6.5752, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.1252685187420048, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 4.882041121309743e-05, |
|
"loss": 6.6015, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.12599261422606262, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 4.880254763411103e-05, |
|
"loss": 6.5655, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.12671670971012045, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 4.878455312561138e-05, |
|
"loss": 6.5883, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.12744080519417827, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 4.8766427786579964e-05, |
|
"loss": 6.6061, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.1281649006782361, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 4.874817171671798e-05, |
|
"loss": 6.5878, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.12888899616229393, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 4.872978501644569e-05, |
|
"loss": 6.5837, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.12961309164635176, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 4.8711267786901914e-05, |
|
"loss": 6.5884, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.1303371871304096, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 4.869262012994348e-05, |
|
"loss": 6.5864, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.13106128261446742, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 4.867384214814465e-05, |
|
"loss": 6.5655, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.13178537809852525, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 4.8654933944796546e-05, |
|
"loss": 6.5747, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.13250947358258308, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 4.86358956239066e-05, |
|
"loss": 6.5689, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.1332335690666409, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 4.861672729019797e-05, |
|
"loss": 6.6059, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.13395766455069874, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 4.859742904910899e-05, |
|
"loss": 6.5985, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.13468176003475657, |
|
"grad_norm": 1.25, |
|
"learning_rate": 4.8578001006792544e-05, |
|
"loss": 6.6006, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.13540585551881443, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 4.8558443270115524e-05, |
|
"loss": 6.5779, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.13612995100287226, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 4.853875594665821e-05, |
|
"loss": 6.566, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.1368540464869301, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 4.85189391447137e-05, |
|
"loss": 6.6019, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.13757814197098792, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 4.8498992973287324e-05, |
|
"loss": 6.5758, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.13830223745504575, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 4.8478917542096005e-05, |
|
"loss": 6.5754, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.13902633293910358, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 4.845871296156769e-05, |
|
"loss": 6.593, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.1397504284231614, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 4.843837934284072e-05, |
|
"loss": 6.5754, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.14047452390721923, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 4.841791679776326e-05, |
|
"loss": 6.5659, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.14119861939127706, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 4.839732543889263e-05, |
|
"loss": 6.5821, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.1419227148753349, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 4.83766053794947e-05, |
|
"loss": 6.58, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.14264681035939272, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 4.83557567335433e-05, |
|
"loss": 6.5662, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.14337090584345055, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 4.833477961571956e-05, |
|
"loss": 6.5675, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.14409500132750838, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 4.831367414141129e-05, |
|
"loss": 6.5784, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.1448190968115662, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 4.829244042671235e-05, |
|
"loss": 6.5707, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.1448190968115662, |
|
"eval_loss": 6.703906536102295, |
|
"eval_runtime": 1.4135, |
|
"eval_samples_per_second": 8.489, |
|
"eval_steps_per_second": 2.122, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.14554319229562404, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 4.827107858842198e-05, |
|
"loss": 6.577, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.14626728777968187, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 4.824958874404422e-05, |
|
"loss": 6.5519, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.1469913832637397, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 4.8227971011787196e-05, |
|
"loss": 6.5635, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.14771547874779753, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 4.820622551056252e-05, |
|
"loss": 6.566, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.1484395742318554, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 4.818435235998461e-05, |
|
"loss": 6.5532, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.14916366971591322, |
|
"grad_norm": 1.25, |
|
"learning_rate": 4.8162351680370044e-05, |
|
"loss": 6.5784, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.14988776519997105, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 4.8140223592736885e-05, |
|
"loss": 6.5798, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.15061186068402888, |
|
"grad_norm": 1.25, |
|
"learning_rate": 4.811796821880402e-05, |
|
"loss": 6.5747, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.1513359561680867, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 4.809558568099051e-05, |
|
"loss": 6.5855, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.15206005165214453, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 4.807307610241488e-05, |
|
"loss": 6.5657, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.15278414713620236, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 4.805043960689447e-05, |
|
"loss": 6.5535, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.1535082426202602, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 4.8027676318944756e-05, |
|
"loss": 6.5728, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.15423233810431802, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 4.800478636377865e-05, |
|
"loss": 6.5564, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.15495643358837585, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 4.798176986730579e-05, |
|
"loss": 6.5745, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.15568052907243368, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 4.795862695613192e-05, |
|
"loss": 6.5697, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.1564046245564915, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 4.793535775755811e-05, |
|
"loss": 6.5961, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.15712872004054934, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 4.791196239958011e-05, |
|
"loss": 6.5581, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.15785281552460717, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 4.7888441010887625e-05, |
|
"loss": 6.554, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.158576911008665, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 4.786479372086361e-05, |
|
"loss": 6.5572, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.15930100649272283, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 4.784102065958356e-05, |
|
"loss": 6.5765, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.16002510197678066, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 4.781712195781479e-05, |
|
"loss": 6.5703, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.1607491974608385, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 4.779309774701574e-05, |
|
"loss": 6.5712, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.16147329294489635, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 4.77689481593352e-05, |
|
"loss": 6.5733, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.16219738842895418, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 4.7744673327611625e-05, |
|
"loss": 6.558, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.162921483913012, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 4.7720273385372416e-05, |
|
"loss": 6.5623, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.16364557939706983, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 4.769574846683313e-05, |
|
"loss": 6.5813, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.16436967488112766, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 4.76710987068968e-05, |
|
"loss": 6.5531, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.1650937703651855, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 4.764632424115315e-05, |
|
"loss": 6.5368, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.16581786584924332, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 4.762142520587789e-05, |
|
"loss": 6.5631, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.16654196133330115, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 4.7596401738031916e-05, |
|
"loss": 6.5436, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.16726605681735898, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 4.75712539752606e-05, |
|
"loss": 6.5749, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.1679901523014168, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 4.754598205589303e-05, |
|
"loss": 6.5359, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.16871424778547464, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 4.752058611894119e-05, |
|
"loss": 6.5382, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.16943834326953247, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 4.7495066304099294e-05, |
|
"loss": 6.5609, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.1701624387535903, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 4.7469422751742944e-05, |
|
"loss": 6.5635, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.17088653423764813, |
|
"grad_norm": 1.25, |
|
"learning_rate": 4.744365560292837e-05, |
|
"loss": 6.5632, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.17161062972170596, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 4.741776499939168e-05, |
|
"loss": 6.5634, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.1723347252057638, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 4.739175108354804e-05, |
|
"loss": 6.5638, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.17305882068982162, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 4.7365613998490944e-05, |
|
"loss": 6.5594, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.17378291617387948, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 4.733935388799137e-05, |
|
"loss": 6.5716, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.1745070116579373, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 4.731297089649703e-05, |
|
"loss": 6.5383, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.17523110714199513, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 4.728646516913157e-05, |
|
"loss": 6.5484, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.17595520262605296, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 4.725983685169374e-05, |
|
"loss": 6.5473, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.1766792981101108, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 4.723308609065663e-05, |
|
"loss": 6.5431, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.17740339359416862, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 4.720621303316685e-05, |
|
"loss": 6.5569, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.17812748907822645, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 4.717921782704371e-05, |
|
"loss": 6.5659, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.17885158456228428, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 4.715210062077844e-05, |
|
"loss": 6.5329, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.1795756800463421, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 4.712486156353331e-05, |
|
"loss": 6.5778, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.18029977553039994, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 4.7097500805140894e-05, |
|
"loss": 6.5461, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.18102387101445777, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 4.7070018496103176e-05, |
|
"loss": 6.5448, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.1817479664985156, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 4.704241478759076e-05, |
|
"loss": 6.5525, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.18247206198257343, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 4.7014689831442004e-05, |
|
"loss": 6.5503, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.18319615746663126, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 4.698684378016222e-05, |
|
"loss": 6.5854, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.1839202529506891, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 4.695887678692286e-05, |
|
"loss": 6.5733, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.18464434843474692, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 4.693078900556056e-05, |
|
"loss": 6.5334, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.18536844391880475, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 4.690258059057643e-05, |
|
"loss": 6.5561, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.18609253940286258, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 4.687425169713512e-05, |
|
"loss": 6.5502, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.18681663488692044, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 4.684580248106399e-05, |
|
"loss": 6.543, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.18754073037097826, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 4.6817233098852255e-05, |
|
"loss": 6.5674, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.1882648258550361, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 4.6788543707650124e-05, |
|
"loss": 6.5527, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.18898892133909392, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 4.675973446526793e-05, |
|
"loss": 6.5686, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.18971301682315175, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 4.673080553017528e-05, |
|
"loss": 6.5365, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.19043711230720958, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 4.670175706150015e-05, |
|
"loss": 6.5584, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.1911612077912674, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 4.667258921902803e-05, |
|
"loss": 6.5567, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.19188530327532524, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 4.664330216320105e-05, |
|
"loss": 6.5569, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.19260939875938307, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 4.661389605511709e-05, |
|
"loss": 6.5434, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.1933334942434409, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 4.65843710565289e-05, |
|
"loss": 6.5305, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.19405758972749873, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 4.65547273298432e-05, |
|
"loss": 6.542, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.19478168521155656, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 4.6524965038119786e-05, |
|
"loss": 6.5415, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.1955057806956144, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 4.6495084345070656e-05, |
|
"loss": 6.5114, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.19622987617967222, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 4.646508541505907e-05, |
|
"loss": 6.5511, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.19695397166373005, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 4.643496841309868e-05, |
|
"loss": 6.5372, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.19767806714778788, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 4.640473350485261e-05, |
|
"loss": 6.5409, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.1984021626318457, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 4.637438085663257e-05, |
|
"loss": 6.5408, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.19912625811590354, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 4.634391063539786e-05, |
|
"loss": 6.5488, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.1998503535999614, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 4.631332300875455e-05, |
|
"loss": 6.5651, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.20057444908401922, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 4.6282618144954516e-05, |
|
"loss": 6.536, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.20129854456807705, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 4.6251796212894486e-05, |
|
"loss": 6.5438, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.20202264005213488, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 4.622085738211518e-05, |
|
"loss": 6.5701, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.2027467355361927, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 4.618980182280031e-05, |
|
"loss": 6.5484, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.20347083102025054, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 4.615862970577567e-05, |
|
"loss": 6.5297, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.20419492650430837, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 4.612734120250821e-05, |
|
"loss": 6.5334, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.2049190219883662, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 4.609593648510507e-05, |
|
"loss": 6.5359, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.20564311747242403, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 4.606441572631266e-05, |
|
"loss": 6.534, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.20636721295648186, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 4.603277909951566e-05, |
|
"loss": 6.5344, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.2070913084405397, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 4.600102677873614e-05, |
|
"loss": 6.5294, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.20781540392459752, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 4.596915893863253e-05, |
|
"loss": 6.5422, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.20853949940865535, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 4.593717575449872e-05, |
|
"loss": 6.5522, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.20926359489271318, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 4.5905077402263034e-05, |
|
"loss": 6.5464, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.209987690376771, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 4.587286405848732e-05, |
|
"loss": 6.5167, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.21071178586082884, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 4.584053590036594e-05, |
|
"loss": 6.5263, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.21143588134488667, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 4.580809310572484e-05, |
|
"loss": 6.547, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.2121599768289445, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 4.57755358530205e-05, |
|
"loss": 6.5113, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.21288407231300235, |
|
"grad_norm": 1.25, |
|
"learning_rate": 4.574286432133901e-05, |
|
"loss": 6.5458, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.21360816779706018, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 4.571007869039509e-05, |
|
"loss": 6.5221, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.214332263281118, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 4.567717914053103e-05, |
|
"loss": 6.5321, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.21505635876517584, |
|
"grad_norm": 1.25, |
|
"learning_rate": 4.564416585271582e-05, |
|
"loss": 6.5424, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.21578045424923367, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 4.561103900854401e-05, |
|
"loss": 6.5315, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.2165045497332915, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 4.557779879023483e-05, |
|
"loss": 6.5202, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.21722864521734933, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 4.554444538063113e-05, |
|
"loss": 6.5246, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.21722864521734933, |
|
"eval_loss": 6.682950973510742, |
|
"eval_runtime": 1.4758, |
|
"eval_samples_per_second": 8.131, |
|
"eval_steps_per_second": 2.033, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.21795274070140716, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 4.551097896319838e-05, |
|
"loss": 6.5144, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.218676836185465, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 4.5477399722023674e-05, |
|
"loss": 6.5295, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.21940093166952282, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 4.5443707841814695e-05, |
|
"loss": 6.5278, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.22012502715358065, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 4.540990350789875e-05, |
|
"loss": 6.5584, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.22084912263763848, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 4.537598690622167e-05, |
|
"loss": 6.5528, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.2215732181216963, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 4.534195822334686e-05, |
|
"loss": 6.5392, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.22229731360575414, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 4.530781764645422e-05, |
|
"loss": 6.5403, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.22302140908981197, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 4.5273565363339185e-05, |
|
"loss": 6.5396, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.2237455045738698, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 4.52392015624116e-05, |
|
"loss": 6.5249, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.22446960005792763, |
|
"grad_norm": 1.25, |
|
"learning_rate": 4.520472643269477e-05, |
|
"loss": 6.5199, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.22519369554198548, |
|
"grad_norm": 1.375, |
|
"learning_rate": 4.517014016382432e-05, |
|
"loss": 6.5302, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.2259177910260433, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 4.5135442946047294e-05, |
|
"loss": 6.5274, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.22664188651010114, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 4.5100634970220967e-05, |
|
"loss": 6.5431, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.22736598199415897, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 4.5065716427811874e-05, |
|
"loss": 6.5362, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.2280900774782168, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 4.503068751089474e-05, |
|
"loss": 6.5307, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.22881417296227463, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 4.499554841215143e-05, |
|
"loss": 6.5172, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.22953826844633246, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 4.496029932486986e-05, |
|
"loss": 6.521, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.2302623639303903, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 4.492494044294297e-05, |
|
"loss": 6.5346, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.23098645941444812, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 4.4889471960867635e-05, |
|
"loss": 6.5422, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.23171055489850595, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 4.485389407374361e-05, |
|
"loss": 6.5498, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.23243465038256378, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 4.481820697727244e-05, |
|
"loss": 6.523, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.2331587458666216, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 4.47824108677564e-05, |
|
"loss": 6.5477, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.23388284135067944, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 4.47465059420974e-05, |
|
"loss": 6.5379, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.23460693683473727, |
|
"grad_norm": 1.25, |
|
"learning_rate": 4.471049239779592e-05, |
|
"loss": 6.5389, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.2353310323187951, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 4.4674370432949905e-05, |
|
"loss": 6.5552, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.23605512780285293, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 4.463814024625368e-05, |
|
"loss": 6.5114, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.23677922328691076, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 4.460180203699688e-05, |
|
"loss": 6.5101, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.23750331877096859, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 4.4565356005063304e-05, |
|
"loss": 6.5051, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.23822741425502644, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 4.452880235092987e-05, |
|
"loss": 6.5213, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.23895150973908427, |
|
"grad_norm": 1.25, |
|
"learning_rate": 4.449214127566549e-05, |
|
"loss": 6.5246, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.2396756052231421, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 4.4455372980929935e-05, |
|
"loss": 6.5309, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.24039970070719993, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 4.4418497668972785e-05, |
|
"loss": 6.5349, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.24112379619125776, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 4.4381515542632274e-05, |
|
"loss": 6.5232, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.2418478916753156, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 4.434442680533417e-05, |
|
"loss": 6.518, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.24257198715937342, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 4.430723166109069e-05, |
|
"loss": 6.5465, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.24329608264343125, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 4.426993031449934e-05, |
|
"loss": 6.5353, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.24402017812748908, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 4.423252297074183e-05, |
|
"loss": 6.5499, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.2447442736115469, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 4.41950098355829e-05, |
|
"loss": 6.5203, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.24546836909560474, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 4.415739111536924e-05, |
|
"loss": 6.5392, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.24619246457966257, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 4.4119667017028297e-05, |
|
"loss": 6.5197, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.2469165600637204, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 4.4081837748067186e-05, |
|
"loss": 6.4986, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.24764065554777823, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 4.404390351657153e-05, |
|
"loss": 6.5238, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.24836475103183606, |
|
"grad_norm": 1.25, |
|
"learning_rate": 4.4005864531204285e-05, |
|
"loss": 6.5473, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.24908884651589389, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 4.396772100120466e-05, |
|
"loss": 6.5189, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.24981294199995172, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 4.39294731363869e-05, |
|
"loss": 6.5228, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.2505370374840096, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 4.389112114713918e-05, |
|
"loss": 6.5224, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.2512611329680674, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 4.385266524442241e-05, |
|
"loss": 6.5229, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.25198522845212523, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 4.3814105639769106e-05, |
|
"loss": 6.54, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.25270932393618306, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 4.37754425452822e-05, |
|
"loss": 6.5226, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.2534334194202409, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 4.373667617363389e-05, |
|
"loss": 6.5276, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.2541575149042987, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 4.369780673806447e-05, |
|
"loss": 6.534, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.25488161038835655, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 4.365883445238116e-05, |
|
"loss": 6.5317, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.2556057058724144, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 4.361975953095689e-05, |
|
"loss": 6.5119, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.2563298013564722, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 4.358058218872918e-05, |
|
"loss": 6.5281, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.25705389684053004, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 4.354130264119894e-05, |
|
"loss": 6.5284, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.25777799232458787, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 4.350192110442926e-05, |
|
"loss": 6.5324, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.2585020878086457, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 4.346243779504421e-05, |
|
"loss": 6.523, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.2592261832927035, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 4.342285293022775e-05, |
|
"loss": 6.5267, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.25995027877676136, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 4.338316672772238e-05, |
|
"loss": 6.5263, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.2606743742608192, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 4.334337940582808e-05, |
|
"loss": 6.5438, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.261398469744877, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 4.330349118340102e-05, |
|
"loss": 6.5315, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.26212256522893485, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 4.326350227985241e-05, |
|
"loss": 6.5183, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.2628466607129927, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 4.3223412915147254e-05, |
|
"loss": 6.5214, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.2635707561970505, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 4.318322330980317e-05, |
|
"loss": 6.531, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.26429485168110833, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 4.314293368488915e-05, |
|
"loss": 6.5133, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.26501894716516616, |
|
"grad_norm": 1.25, |
|
"learning_rate": 4.3102544262024394e-05, |
|
"loss": 6.5288, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.265743042649224, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 4.3062055263377e-05, |
|
"loss": 6.4998, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.2664671381332818, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 4.302146691166286e-05, |
|
"loss": 6.5302, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.26719123361733965, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 4.298077943014431e-05, |
|
"loss": 6.5053, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.2679153291013975, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 4.293999304262902e-05, |
|
"loss": 6.5242, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.2686394245854553, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 4.289910797346868e-05, |
|
"loss": 6.5061, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.26936352006951314, |
|
"grad_norm": 1.25, |
|
"learning_rate": 4.285812444755779e-05, |
|
"loss": 6.5257, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.27008761555357097, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 4.281704269033242e-05, |
|
"loss": 6.5269, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.27081171103762885, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 4.2775862927769025e-05, |
|
"loss": 6.4974, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.2715358065216867, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 4.2734585386383086e-05, |
|
"loss": 6.4876, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.2722599020057445, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 4.269321029322797e-05, |
|
"loss": 6.5099, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.27298399748980234, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 4.265173787589364e-05, |
|
"loss": 6.5222, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.2737080929738602, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 4.2610168362505395e-05, |
|
"loss": 6.5077, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.274432188457918, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 4.256850198172263e-05, |
|
"loss": 6.5283, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.27515628394197583, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 4.252673896273758e-05, |
|
"loss": 6.5135, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.27588037942603366, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 4.248487953527404e-05, |
|
"loss": 6.5123, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.2766044749100915, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 4.244292392958613e-05, |
|
"loss": 6.5163, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.2773285703941493, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 4.2400872376457e-05, |
|
"loss": 6.5063, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.27805266587820715, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 4.2358725107197576e-05, |
|
"loss": 6.5214, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.278776761362265, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 4.231648235364529e-05, |
|
"loss": 6.5375, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.2795008568463228, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 4.227414434816279e-05, |
|
"loss": 6.5003, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.28022495233038064, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 4.22317113236367e-05, |
|
"loss": 6.5425, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.28094904781443847, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 4.218918351347626e-05, |
|
"loss": 6.5073, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.2816731432984963, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 4.214656115161215e-05, |
|
"loss": 6.5127, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.2823972387825541, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 4.210384447249509e-05, |
|
"loss": 6.5198, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.28312133426661196, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 4.2061033711094655e-05, |
|
"loss": 6.5206, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.2838454297506698, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 4.2018129102897904e-05, |
|
"loss": 6.4952, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.2845695252347276, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 4.197513088390813e-05, |
|
"loss": 6.5065, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.28529362071878545, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 4.193203929064353e-05, |
|
"loss": 6.4917, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.2860177162028433, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 4.1888854560135934e-05, |
|
"loss": 6.5252, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.2867418116869011, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 4.1845576929929486e-05, |
|
"loss": 6.4974, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.28746590717095893, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 4.180220663807934e-05, |
|
"loss": 6.5253, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 0.28819000265501676, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 4.175874392315033e-05, |
|
"loss": 6.4986, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 0.2889140981390746, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 4.1715189024215716e-05, |
|
"loss": 6.5147, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.2896381936231324, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 4.1671542180855796e-05, |
|
"loss": 6.5102, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.2896381936231324, |
|
"eval_loss": 6.658437252044678, |
|
"eval_runtime": 1.632, |
|
"eval_samples_per_second": 7.353, |
|
"eval_steps_per_second": 1.838, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.29036228910719025, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 4.162780363315662e-05, |
|
"loss": 6.5095, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 0.2910863845912481, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 4.158397362170871e-05, |
|
"loss": 6.4945, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 0.2918104800753059, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 4.154005238760565e-05, |
|
"loss": 6.4935, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 0.29253457555936374, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 4.149604017244285e-05, |
|
"loss": 6.5137, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 0.29325867104342157, |
|
"grad_norm": 1.25, |
|
"learning_rate": 4.145193721831615e-05, |
|
"loss": 6.5045, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.2939827665274794, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 4.1407743767820504e-05, |
|
"loss": 6.5433, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 0.29470686201153723, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 4.136346006404869e-05, |
|
"loss": 6.5018, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 0.29543095749559506, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 4.1319086350589916e-05, |
|
"loss": 6.5069, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 0.29615505297965294, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 4.127462287152849e-05, |
|
"loss": 6.5224, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 0.2968791484637108, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 4.1230069871442525e-05, |
|
"loss": 6.5215, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.2976032439477686, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 4.118542759540251e-05, |
|
"loss": 6.5162, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 0.29832733943182643, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 4.114069628897006e-05, |
|
"loss": 6.4849, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 0.29905143491588426, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 4.109587619819648e-05, |
|
"loss": 6.5217, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 0.2997755303999421, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 4.1050967569621465e-05, |
|
"loss": 6.5011, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 0.3004996258839999, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 4.1005970650271705e-05, |
|
"loss": 6.4931, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 0.30122372136805775, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 4.096088568765958e-05, |
|
"loss": 6.5058, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 0.3019478168521156, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 4.091571292978173e-05, |
|
"loss": 6.4999, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 0.3026719123361734, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 4.087045262511775e-05, |
|
"loss": 6.5029, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 0.30339600782023124, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 4.082510502262879e-05, |
|
"loss": 6.4853, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 0.30412010330428907, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 4.0779670371756184e-05, |
|
"loss": 6.4962, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.3048441987883469, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 4.073414892242011e-05, |
|
"loss": 6.5212, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 0.30556829427240473, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 4.068854092501819e-05, |
|
"loss": 6.5144, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 0.30629238975646256, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 4.0642846630424114e-05, |
|
"loss": 6.5349, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 0.3070164852405204, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 4.059706628998625e-05, |
|
"loss": 6.4897, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 0.3077405807245782, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 4.055120015552628e-05, |
|
"loss": 6.5038, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 0.30846467620863605, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 4.050524847933783e-05, |
|
"loss": 6.4993, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 0.3091887716926939, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 4.045921151418504e-05, |
|
"loss": 6.5261, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 0.3099128671767517, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 4.041308951330119e-05, |
|
"loss": 6.5095, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 0.31063696266080953, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 4.036688273038734e-05, |
|
"loss": 6.5032, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 0.31136105814486736, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 4.032059141961086e-05, |
|
"loss": 6.4758, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.3120851536289252, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 4.027421583560414e-05, |
|
"loss": 6.4878, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 0.312809249112983, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 4.022775623346306e-05, |
|
"loss": 6.5128, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 0.31353334459704085, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 4.018121286874571e-05, |
|
"loss": 6.5149, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 0.3142574400810987, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 4.013458599747091e-05, |
|
"loss": 6.5083, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 0.3149815355651565, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 4.00878758761168e-05, |
|
"loss": 6.516, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 0.31570563104921434, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 4.0041082761619476e-05, |
|
"loss": 6.4992, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 0.31642972653327217, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 3.9994206911371544e-05, |
|
"loss": 6.492, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 0.31715382201733, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 3.994724858322071e-05, |
|
"loss": 6.5087, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 0.31787791750138783, |
|
"grad_norm": 1.25, |
|
"learning_rate": 3.990020803546835e-05, |
|
"loss": 6.485, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 0.31860201298544566, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 3.985308552686812e-05, |
|
"loss": 6.4696, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.3193261084695035, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 3.9805881316624506e-05, |
|
"loss": 6.4946, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 0.3200502039535613, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 3.9758595664391396e-05, |
|
"loss": 6.5153, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 0.32077429943761915, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 3.9711228830270684e-05, |
|
"loss": 6.5096, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 0.321498394921677, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 3.9663781074810796e-05, |
|
"loss": 6.52, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 0.32222249040573486, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 3.961625265900529e-05, |
|
"loss": 6.4891, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 0.3229465858897927, |
|
"grad_norm": 1.25, |
|
"learning_rate": 3.956864384429142e-05, |
|
"loss": 6.4985, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 0.3236706813738505, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 3.952095489254864e-05, |
|
"loss": 6.5256, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 0.32439477685790835, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 3.947318606609727e-05, |
|
"loss": 6.4904, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 0.3251188723419662, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 3.942533762769695e-05, |
|
"loss": 6.4722, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 0.325842967826024, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 3.937740984054526e-05, |
|
"loss": 6.4933, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.32656706331008184, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 3.932940296827624e-05, |
|
"loss": 6.5028, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 0.32729115879413967, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 3.928131727495895e-05, |
|
"loss": 6.5001, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 0.3280152542781975, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 3.923315302509602e-05, |
|
"loss": 6.488, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 0.32873934976225533, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 3.918491048362219e-05, |
|
"loss": 6.4929, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 0.32946344524631316, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 3.9136589915902856e-05, |
|
"loss": 6.5015, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 0.330187540730371, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 3.9088191587732606e-05, |
|
"loss": 6.4959, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 0.3309116362144288, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 3.9039715765333766e-05, |
|
"loss": 6.4823, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 0.33163573169848665, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 3.8991162715354936e-05, |
|
"loss": 6.4936, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 0.3323598271825445, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 3.894253270486951e-05, |
|
"loss": 6.5096, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 0.3330839226666023, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 3.88938260013742e-05, |
|
"loss": 6.4944, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 0.33380801815066014, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 3.884504287278761e-05, |
|
"loss": 6.5183, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 0.33453211363471796, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 3.879618358744871e-05, |
|
"loss": 6.4898, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 0.3352562091187758, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 3.874724841411539e-05, |
|
"loss": 6.4888, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 0.3359803046028336, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 3.8698237621962974e-05, |
|
"loss": 6.504, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 0.33670440008689145, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 3.8649151480582745e-05, |
|
"loss": 6.514, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.3374284955709493, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 3.8599990259980435e-05, |
|
"loss": 6.5113, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 0.3381525910550071, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 3.855075423057477e-05, |
|
"loss": 6.494, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 0.33887668653906494, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 3.850144366319599e-05, |
|
"loss": 6.4887, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 0.33960078202312277, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 3.845205882908432e-05, |
|
"loss": 6.4935, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 0.3403248775071806, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 3.8402599999888514e-05, |
|
"loss": 6.5083, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 0.34104897299123843, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 3.835306744766434e-05, |
|
"loss": 6.4812, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 0.34177306847529626, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 3.830346144487309e-05, |
|
"loss": 6.4605, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 0.3424971639593541, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 3.825378226438009e-05, |
|
"loss": 6.4931, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 0.3432212594434119, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 3.820403017945319e-05, |
|
"loss": 6.4958, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 0.34394535492746975, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 3.815420546376124e-05, |
|
"loss": 6.4939, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 0.3446694504115276, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 3.8104308391372646e-05, |
|
"loss": 6.494, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 0.3453935458955854, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 3.805433923675379e-05, |
|
"loss": 6.4772, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 0.34611764137964324, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 3.800429827476757e-05, |
|
"loss": 6.497, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 0.34684173686370107, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 3.795418578067185e-05, |
|
"loss": 6.4894, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 0.34756583234775895, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 3.7904002030118e-05, |
|
"loss": 6.5089, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.3482899278318168, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 3.7853747299149325e-05, |
|
"loss": 6.5015, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 0.3490140233158746, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 3.7803421864199576e-05, |
|
"loss": 6.4898, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 0.34973811879993244, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 3.775302600209141e-05, |
|
"loss": 6.4994, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 0.35046221428399027, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 3.7702559990034906e-05, |
|
"loss": 6.4958, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 0.3511863097680481, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 3.765202410562598e-05, |
|
"loss": 6.5129, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 0.35191040525210593, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 3.760141862684492e-05, |
|
"loss": 6.5063, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 0.35263450073616376, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 3.75507438320548e-05, |
|
"loss": 6.4901, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 0.3533585962202216, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 6.5077, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 0.3540826917042794, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 3.7449187409804655e-05, |
|
"loss": 6.4971, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 0.35480678718833725, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 3.739830634097109e-05, |
|
"loss": 6.4796, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 0.3555308826723951, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 3.734735707337831e-05, |
|
"loss": 6.4935, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 0.3562549781564529, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 3.729633988728049e-05, |
|
"loss": 6.4704, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 0.35697907364051074, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 3.724525506330536e-05, |
|
"loss": 6.51, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 0.35770316912456857, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 3.719410288245273e-05, |
|
"loss": 6.4905, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 0.3584272646086264, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 3.7142883626092914e-05, |
|
"loss": 6.4786, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 0.3591513600926842, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 3.709159757596516e-05, |
|
"loss": 6.4824, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 0.35987545557674205, |
|
"grad_norm": 1.25, |
|
"learning_rate": 3.704024501417615e-05, |
|
"loss": 6.5007, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 0.3605995510607999, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 3.6988826223198425e-05, |
|
"loss": 6.4778, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 0.3613236465448577, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 3.693734148586882e-05, |
|
"loss": 6.4788, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 0.36204774202891554, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 3.6885791085386905e-05, |
|
"loss": 6.5081, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.36204774202891554, |
|
"eval_loss": 6.645383834838867, |
|
"eval_runtime": 1.5549, |
|
"eval_samples_per_second": 7.717, |
|
"eval_steps_per_second": 1.929, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.36277183751297337, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 3.683417530531348e-05, |
|
"loss": 6.4752, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 0.3634959329970312, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 3.678249442956892e-05, |
|
"loss": 6.4981, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 0.36422002848108903, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 3.673074874243172e-05, |
|
"loss": 6.4765, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 0.36494412396514686, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 3.667893852853685e-05, |
|
"loss": 6.4939, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 0.3656682194492047, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 3.662706407287424e-05, |
|
"loss": 6.4985, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 0.3663923149332625, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 3.6575125660787156e-05, |
|
"loss": 6.4764, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 0.36711641041732035, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 3.6523123577970694e-05, |
|
"loss": 6.4896, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 0.3678405059013782, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 3.64710581104702e-05, |
|
"loss": 6.4858, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 0.368564601385436, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 3.6418929544679635e-05, |
|
"loss": 6.4957, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 0.36928869686949384, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 3.6366738167340055e-05, |
|
"loss": 6.5179, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 0.37001279235355167, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 3.631448426553803e-05, |
|
"loss": 6.489, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 0.3707368878376095, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 3.626216812670405e-05, |
|
"loss": 6.4871, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 0.3714609833216673, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 3.620979003861095e-05, |
|
"loss": 6.4833, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 0.37218507880572516, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 3.615735028937232e-05, |
|
"loss": 6.4712, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 0.372909174289783, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 3.610484916744094e-05, |
|
"loss": 6.49, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 0.37363326977384087, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 3.6052286961607166e-05, |
|
"loss": 6.4859, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 0.3743573652578987, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 3.599966396099737e-05, |
|
"loss": 6.4697, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 0.37508146074195653, |
|
"grad_norm": 1.25, |
|
"learning_rate": 3.594698045507231e-05, |
|
"loss": 6.4875, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 0.37580555622601436, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 3.5894236733625594e-05, |
|
"loss": 6.4719, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 0.3765296517100722, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 3.584143308678203e-05, |
|
"loss": 6.4674, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 0.37725374719413, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 3.578856980499606e-05, |
|
"loss": 6.4724, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 0.37797784267818785, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 3.573564717905017e-05, |
|
"loss": 6.4982, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 0.3787019381622457, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 3.568266550005327e-05, |
|
"loss": 6.496, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 0.3794260336463035, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 3.5629625059439084e-05, |
|
"loss": 6.4764, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 0.38015012913036134, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 3.557652614896459e-05, |
|
"loss": 6.5149, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 0.38087422461441917, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 3.552336906070838e-05, |
|
"loss": 6.489, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 0.381598320098477, |
|
"grad_norm": 1.25, |
|
"learning_rate": 3.547015408706904e-05, |
|
"loss": 6.455, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 0.3823224155825348, |
|
"grad_norm": 1.25, |
|
"learning_rate": 3.54168815207636e-05, |
|
"loss": 6.481, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 0.38304651106659265, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 3.536355165482586e-05, |
|
"loss": 6.466, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 0.3837706065506505, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 3.531016478260481e-05, |
|
"loss": 6.4746, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 0.3844947020347083, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 3.525672119776302e-05, |
|
"loss": 6.5012, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 0.38521879751876614, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 3.5203221194275e-05, |
|
"loss": 6.4712, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 0.385942893002824, |
|
"grad_norm": 1.25, |
|
"learning_rate": 3.5149665066425636e-05, |
|
"loss": 6.4787, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 0.3866669884868818, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 3.5096053108808486e-05, |
|
"loss": 6.4847, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 0.38739108397093963, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 3.504238561632424e-05, |
|
"loss": 6.4859, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 0.38811517945499746, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 3.498866288417906e-05, |
|
"loss": 6.482, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 0.3888392749390553, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 3.493488520788295e-05, |
|
"loss": 6.4888, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 0.3895633704231131, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 3.4881052883248155e-05, |
|
"loss": 6.49, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 0.39028746590717095, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 3.482716620638753e-05, |
|
"loss": 6.4738, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 0.3910115613912288, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 3.477322547371288e-05, |
|
"loss": 6.4959, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 0.3917356568752866, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 3.4719230981933366e-05, |
|
"loss": 6.5087, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 0.39245975235934444, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 3.466518302805386e-05, |
|
"loss": 6.4963, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 0.39318384784340227, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 3.46110819093733e-05, |
|
"loss": 6.4831, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 0.3939079433274601, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 3.455692792348307e-05, |
|
"loss": 6.4731, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 0.3946320388115179, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 3.450272136826537e-05, |
|
"loss": 6.4805, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 0.39535613429557576, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 3.444846254189156e-05, |
|
"loss": 6.4445, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 0.3960802297796336, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 3.439415174282049e-05, |
|
"loss": 6.4815, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 0.3968043252636914, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 3.4339789269796964e-05, |
|
"loss": 6.479, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 0.39752842074774924, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 3.4285375421849954e-05, |
|
"loss": 6.4897, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 0.3982525162318071, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 3.423091049829108e-05, |
|
"loss": 6.4966, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.39897661171586496, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 3.4176394798712886e-05, |
|
"loss": 6.5001, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 0.3997007071999228, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 3.412182862298723e-05, |
|
"loss": 6.4965, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 0.4004248026839806, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 3.406721227126361e-05, |
|
"loss": 6.4859, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 0.40114889816803845, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 3.401254604396751e-05, |
|
"loss": 6.4876, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 0.4018729936520963, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 3.3957830241798807e-05, |
|
"loss": 6.493, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 0.4025970891361541, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 3.390306516573004e-05, |
|
"loss": 6.4825, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 0.40332118462021194, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 3.3848251117004786e-05, |
|
"loss": 6.4948, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 0.40404528010426977, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 3.379338839713601e-05, |
|
"loss": 6.465, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 0.4047693755883276, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 3.373847730790442e-05, |
|
"loss": 6.4826, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 0.4054934710723854, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 3.368351815135674e-05, |
|
"loss": 6.4876, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 0.40621756655644325, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 3.362851122980413e-05, |
|
"loss": 6.4724, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 0.4069416620405011, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 3.35734568458205e-05, |
|
"loss": 6.4725, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 0.4076657575245589, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 3.351835530224079e-05, |
|
"loss": 6.4914, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 0.40838985300861674, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 3.3463206902159395e-05, |
|
"loss": 6.4716, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 0.4091139484926746, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 3.3408011948928415e-05, |
|
"loss": 6.4575, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 0.4098380439767324, |
|
"grad_norm": 1.25, |
|
"learning_rate": 3.335277074615605e-05, |
|
"loss": 6.5197, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 0.41056213946079023, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 3.329748359770488e-05, |
|
"loss": 6.4902, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 0.41128623494484806, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 3.3242150807690234e-05, |
|
"loss": 6.481, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 0.4120103304289059, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 3.3186772680478496e-05, |
|
"loss": 6.4396, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 0.4127344259129637, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 3.313134952068543e-05, |
|
"loss": 6.4679, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 0.41345852139702155, |
|
"grad_norm": 1.25, |
|
"learning_rate": 3.307588163317449e-05, |
|
"loss": 6.4912, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 0.4141826168810794, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 3.30203693230552e-05, |
|
"loss": 6.4688, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 0.4149067123651372, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 3.296481289568141e-05, |
|
"loss": 6.494, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 0.41563080784919504, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 3.290921265664966e-05, |
|
"loss": 6.458, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 0.41635490333325287, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 3.2853568911797465e-05, |
|
"loss": 6.4874, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 0.4170789988173107, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 3.279788196720167e-05, |
|
"loss": 6.4784, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 0.4178030943013685, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 3.274215212917672e-05, |
|
"loss": 6.4754, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 0.41852718978542636, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 3.268637970427302e-05, |
|
"loss": 6.4881, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 0.4192512852694842, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 3.263056499927524e-05, |
|
"loss": 6.4943, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 0.419975380753542, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 3.2574708321200606e-05, |
|
"loss": 6.4685, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 0.42069947623759985, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 3.251880997729722e-05, |
|
"loss": 6.4982, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 0.4214235717216577, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 3.246287027504237e-05, |
|
"loss": 6.4781, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 0.4221476672057155, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 3.2406889522140856e-05, |
|
"loss": 6.4914, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 0.42287176268977333, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 3.235086802652327e-05, |
|
"loss": 6.4663, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 0.42359585817383116, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 3.2294806096344324e-05, |
|
"loss": 6.4821, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 0.424319953657889, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 3.223870403998116e-05, |
|
"loss": 6.4862, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 0.4250440491419469, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 3.2182562166031606e-05, |
|
"loss": 6.4912, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 0.4257681446260047, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 3.2126380783312525e-05, |
|
"loss": 6.4966, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 0.42649224011006254, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 3.2070160200858136e-05, |
|
"loss": 6.4732, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 0.42721633559412037, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 3.2013900727918236e-05, |
|
"loss": 6.4787, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 0.4279404310781782, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 3.195760267395656e-05, |
|
"loss": 6.4929, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 0.428664526562236, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 3.190126634864908e-05, |
|
"loss": 6.4908, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 0.42938862204629386, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 3.184489206188228e-05, |
|
"loss": 6.4957, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 0.4301127175303517, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 3.1788480123751426e-05, |
|
"loss": 6.4702, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 0.4308368130144095, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 3.1732030844558936e-05, |
|
"loss": 6.4674, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 0.43156090849846734, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 3.167554453481261e-05, |
|
"loss": 6.4738, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 0.4322850039825252, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 3.1619021505223936e-05, |
|
"loss": 6.4568, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 0.433009099466583, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 3.156246206670637e-05, |
|
"loss": 6.443, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 0.43373319495064083, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 3.150586653037367e-05, |
|
"loss": 6.4858, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 0.43445729043469866, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 3.1449235207538155e-05, |
|
"loss": 6.4682, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.43445729043469866, |
|
"eval_loss": 6.624475955963135, |
|
"eval_runtime": 1.4312, |
|
"eval_samples_per_second": 8.385, |
|
"eval_steps_per_second": 2.096, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.4351813859187565, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 3.139256840970897e-05, |
|
"loss": 6.4852, |
|
"step": 6010 |
|
}, |
|
{ |
|
"epoch": 0.4359054814028143, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 3.13358664485904e-05, |
|
"loss": 6.4892, |
|
"step": 6020 |
|
}, |
|
{ |
|
"epoch": 0.43662957688687215, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 3.127912963608016e-05, |
|
"loss": 6.4724, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 0.43735367237093, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 3.1222358284267685e-05, |
|
"loss": 6.4783, |
|
"step": 6040 |
|
}, |
|
{ |
|
"epoch": 0.4380777678549878, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 3.116555270543236e-05, |
|
"loss": 6.4949, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 0.43880186333904564, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 3.110871321204188e-05, |
|
"loss": 6.458, |
|
"step": 6060 |
|
}, |
|
{ |
|
"epoch": 0.43952595882310347, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 3.105184011675047e-05, |
|
"loss": 6.4576, |
|
"step": 6070 |
|
}, |
|
{ |
|
"epoch": 0.4402500543071613, |
|
"grad_norm": 1.25, |
|
"learning_rate": 3.099493373239719e-05, |
|
"loss": 6.4929, |
|
"step": 6080 |
|
}, |
|
{ |
|
"epoch": 0.4409741497912191, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 3.093799437200421e-05, |
|
"loss": 6.462, |
|
"step": 6090 |
|
}, |
|
{ |
|
"epoch": 0.44169824527527696, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 3.0881022348775104e-05, |
|
"loss": 6.4752, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 0.4424223407593348, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 3.082401797609309e-05, |
|
"loss": 6.4974, |
|
"step": 6110 |
|
}, |
|
{ |
|
"epoch": 0.4431464362433926, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 3.0766981567519346e-05, |
|
"loss": 6.4688, |
|
"step": 6120 |
|
}, |
|
{ |
|
"epoch": 0.44387053172745045, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 3.070991343679127e-05, |
|
"loss": 6.4683, |
|
"step": 6130 |
|
}, |
|
{ |
|
"epoch": 0.4445946272115083, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 3.065281389782075e-05, |
|
"loss": 6.4841, |
|
"step": 6140 |
|
}, |
|
{ |
|
"epoch": 0.4453187226955661, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 3.059568326469242e-05, |
|
"loss": 6.4724, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 0.44604281817962393, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 3.0538521851661986e-05, |
|
"loss": 6.4475, |
|
"step": 6160 |
|
}, |
|
{ |
|
"epoch": 0.44676691366368176, |
|
"grad_norm": 1.25, |
|
"learning_rate": 3.0481329973154427e-05, |
|
"loss": 6.4788, |
|
"step": 6170 |
|
}, |
|
{ |
|
"epoch": 0.4474910091477396, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 3.0424107943762333e-05, |
|
"loss": 6.4613, |
|
"step": 6180 |
|
}, |
|
{ |
|
"epoch": 0.4482151046317974, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 3.036685607824412e-05, |
|
"loss": 6.4948, |
|
"step": 6190 |
|
}, |
|
{ |
|
"epoch": 0.44893920011585525, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 3.030957469152235e-05, |
|
"loss": 6.4943, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 0.4496632955999131, |
|
"grad_norm": 1.25, |
|
"learning_rate": 3.0252264098681947e-05, |
|
"loss": 6.4713, |
|
"step": 6210 |
|
}, |
|
{ |
|
"epoch": 0.45038739108397097, |
|
"grad_norm": 1.25, |
|
"learning_rate": 3.0194924614968495e-05, |
|
"loss": 6.4844, |
|
"step": 6220 |
|
}, |
|
{ |
|
"epoch": 0.4511114865680288, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 3.013755655578649e-05, |
|
"loss": 6.4724, |
|
"step": 6230 |
|
}, |
|
{ |
|
"epoch": 0.4518355820520866, |
|
"grad_norm": 1.25, |
|
"learning_rate": 3.008016023669763e-05, |
|
"loss": 6.4964, |
|
"step": 6240 |
|
}, |
|
{ |
|
"epoch": 0.45255967753614446, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 3.002273597341903e-05, |
|
"loss": 6.4579, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 0.4532837730202023, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 2.996528408182156e-05, |
|
"loss": 6.4774, |
|
"step": 6260 |
|
}, |
|
{ |
|
"epoch": 0.4540078685042601, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 2.990780487792802e-05, |
|
"loss": 6.485, |
|
"step": 6270 |
|
}, |
|
{ |
|
"epoch": 0.45473196398831794, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 2.9850298677911475e-05, |
|
"loss": 6.4758, |
|
"step": 6280 |
|
}, |
|
{ |
|
"epoch": 0.4554560594723758, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 2.9792765798093465e-05, |
|
"loss": 6.479, |
|
"step": 6290 |
|
}, |
|
{ |
|
"epoch": 0.4561801549564336, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 2.97352065549423e-05, |
|
"loss": 6.4656, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 0.45690425044049143, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 2.9677621265071313e-05, |
|
"loss": 6.4761, |
|
"step": 6310 |
|
}, |
|
{ |
|
"epoch": 0.45762834592454926, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 2.9620010245237094e-05, |
|
"loss": 6.4661, |
|
"step": 6320 |
|
}, |
|
{ |
|
"epoch": 0.4583524414086071, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 2.956237381233777e-05, |
|
"loss": 6.467, |
|
"step": 6330 |
|
}, |
|
{ |
|
"epoch": 0.4590765368926649, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 2.9504712283411256e-05, |
|
"loss": 6.4593, |
|
"step": 6340 |
|
}, |
|
{ |
|
"epoch": 0.45980063237672275, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 2.9447025975633517e-05, |
|
"loss": 6.4832, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 0.4605247278607806, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 2.938931520631681e-05, |
|
"loss": 6.4839, |
|
"step": 6360 |
|
}, |
|
{ |
|
"epoch": 0.4612488233448384, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 2.933158029290795e-05, |
|
"loss": 6.4612, |
|
"step": 6370 |
|
}, |
|
{ |
|
"epoch": 0.46197291882889624, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 2.927382155298657e-05, |
|
"loss": 6.4639, |
|
"step": 6380 |
|
}, |
|
{ |
|
"epoch": 0.46269701431295407, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 2.921603930426333e-05, |
|
"loss": 6.4811, |
|
"step": 6390 |
|
}, |
|
{ |
|
"epoch": 0.4634211097970119, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 2.9158233864578254e-05, |
|
"loss": 6.4671, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 0.46414520528106973, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 2.9100405551898896e-05, |
|
"loss": 6.4636, |
|
"step": 6410 |
|
}, |
|
{ |
|
"epoch": 0.46486930076512756, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 2.9042554684318636e-05, |
|
"loss": 6.4711, |
|
"step": 6420 |
|
}, |
|
{ |
|
"epoch": 0.4655933962491854, |
|
"grad_norm": 1.125, |
|
"learning_rate": 2.8984681580054917e-05, |
|
"loss": 6.4705, |
|
"step": 6430 |
|
}, |
|
{ |
|
"epoch": 0.4663174917332432, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 2.892678655744751e-05, |
|
"loss": 6.4933, |
|
"step": 6440 |
|
}, |
|
{ |
|
"epoch": 0.46704158721730105, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 2.886886993495674e-05, |
|
"loss": 6.4649, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 0.4677656827013589, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 2.8810932031161742e-05, |
|
"loss": 6.4719, |
|
"step": 6460 |
|
}, |
|
{ |
|
"epoch": 0.4684897781854167, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 2.875297316475873e-05, |
|
"loss": 6.4689, |
|
"step": 6470 |
|
}, |
|
{ |
|
"epoch": 0.46921387366947453, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 2.869499365455921e-05, |
|
"loss": 6.4806, |
|
"step": 6480 |
|
}, |
|
{ |
|
"epoch": 0.46993796915353236, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 2.863699381948824e-05, |
|
"loss": 6.4796, |
|
"step": 6490 |
|
}, |
|
{ |
|
"epoch": 0.4706620646375902, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 2.8578973978582695e-05, |
|
"loss": 6.4612, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.471386160121648, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 2.8520934450989485e-05, |
|
"loss": 6.4452, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 0.47211025560570585, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 2.8462875555963797e-05, |
|
"loss": 6.4682, |
|
"step": 6520 |
|
}, |
|
{ |
|
"epoch": 0.4728343510897637, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 2.840479761286738e-05, |
|
"loss": 6.4683, |
|
"step": 6530 |
|
}, |
|
{ |
|
"epoch": 0.4735584465738215, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 2.8346700941166725e-05, |
|
"loss": 6.4716, |
|
"step": 6540 |
|
}, |
|
{ |
|
"epoch": 0.47428254205787934, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 2.8288585860431387e-05, |
|
"loss": 6.4644, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 0.47500663754193717, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 2.8230452690332132e-05, |
|
"loss": 6.4855, |
|
"step": 6560 |
|
}, |
|
{ |
|
"epoch": 0.475730733025995, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 2.8172301750639274e-05, |
|
"loss": 6.4506, |
|
"step": 6570 |
|
}, |
|
{ |
|
"epoch": 0.4764548285100529, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 2.8114133361220845e-05, |
|
"loss": 6.4714, |
|
"step": 6580 |
|
}, |
|
{ |
|
"epoch": 0.4771789239941107, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 2.8055947842040862e-05, |
|
"loss": 6.4555, |
|
"step": 6590 |
|
}, |
|
{ |
|
"epoch": 0.47790301947816854, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 2.7997745513157582e-05, |
|
"loss": 6.4689, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 0.4786271149622264, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 2.793952669472172e-05, |
|
"loss": 6.4681, |
|
"step": 6610 |
|
}, |
|
{ |
|
"epoch": 0.4793512104462842, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 2.7881291706974676e-05, |
|
"loss": 6.4709, |
|
"step": 6620 |
|
}, |
|
{ |
|
"epoch": 0.48007530593034203, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 2.782304087024682e-05, |
|
"loss": 6.4703, |
|
"step": 6630 |
|
}, |
|
{ |
|
"epoch": 0.48079940141439986, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 2.7764774504955694e-05, |
|
"loss": 6.4614, |
|
"step": 6640 |
|
}, |
|
{ |
|
"epoch": 0.4815234968984577, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 2.7706492931604238e-05, |
|
"loss": 6.4632, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 0.4822475923825155, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 2.7648196470779075e-05, |
|
"loss": 6.4594, |
|
"step": 6660 |
|
}, |
|
{ |
|
"epoch": 0.48297168786657335, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 2.75898854431487e-05, |
|
"loss": 6.4725, |
|
"step": 6670 |
|
}, |
|
{ |
|
"epoch": 0.4836957833506312, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 2.753156016946174e-05, |
|
"loss": 6.4721, |
|
"step": 6680 |
|
}, |
|
{ |
|
"epoch": 0.484419878834689, |
|
"grad_norm": 1.25, |
|
"learning_rate": 2.7473220970545182e-05, |
|
"loss": 6.4438, |
|
"step": 6690 |
|
}, |
|
{ |
|
"epoch": 0.48514397431874684, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 2.7414868167302614e-05, |
|
"loss": 6.4764, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 0.48586806980280467, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 2.735650208071246e-05, |
|
"loss": 6.4539, |
|
"step": 6710 |
|
}, |
|
{ |
|
"epoch": 0.4865921652868625, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 2.72981230318262e-05, |
|
"loss": 6.4654, |
|
"step": 6720 |
|
}, |
|
{ |
|
"epoch": 0.48731626077092033, |
|
"grad_norm": 1.25, |
|
"learning_rate": 2.723973134176662e-05, |
|
"loss": 6.4338, |
|
"step": 6730 |
|
}, |
|
{ |
|
"epoch": 0.48804035625497816, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 2.7181327331726058e-05, |
|
"loss": 6.4534, |
|
"step": 6740 |
|
}, |
|
{ |
|
"epoch": 0.488764451739036, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 2.7122911322964594e-05, |
|
"loss": 6.4736, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 0.4894885472230938, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 2.7064483636808313e-05, |
|
"loss": 6.4707, |
|
"step": 6760 |
|
}, |
|
{ |
|
"epoch": 0.49021264270715165, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 2.7006044594647543e-05, |
|
"loss": 6.4474, |
|
"step": 6770 |
|
}, |
|
{ |
|
"epoch": 0.4909367381912095, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 2.6947594517935083e-05, |
|
"loss": 6.4672, |
|
"step": 6780 |
|
}, |
|
{ |
|
"epoch": 0.4916608336752673, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 2.688913372818442e-05, |
|
"loss": 6.4718, |
|
"step": 6790 |
|
}, |
|
{ |
|
"epoch": 0.49238492915932514, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 2.683066254696795e-05, |
|
"loss": 6.4976, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 0.49310902464338296, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 2.6772181295915272e-05, |
|
"loss": 6.4536, |
|
"step": 6810 |
|
}, |
|
{ |
|
"epoch": 0.4938331201274408, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 2.6713690296711346e-05, |
|
"loss": 6.4684, |
|
"step": 6820 |
|
}, |
|
{ |
|
"epoch": 0.4945572156114986, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 2.665518987109475e-05, |
|
"loss": 6.4794, |
|
"step": 6830 |
|
}, |
|
{ |
|
"epoch": 0.49528131109555645, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 2.6596680340855935e-05, |
|
"loss": 6.477, |
|
"step": 6840 |
|
}, |
|
{ |
|
"epoch": 0.4960054065796143, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 2.6538162027835416e-05, |
|
"loss": 6.4537, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 0.4967295020636721, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 2.6479635253922018e-05, |
|
"loss": 6.4503, |
|
"step": 6860 |
|
}, |
|
{ |
|
"epoch": 0.49745359754772994, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 2.6421100341051125e-05, |
|
"loss": 6.4848, |
|
"step": 6870 |
|
}, |
|
{ |
|
"epoch": 0.49817769303178777, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 2.636255761120287e-05, |
|
"loss": 6.4634, |
|
"step": 6880 |
|
}, |
|
{ |
|
"epoch": 0.4989017885158456, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 2.6304007386400398e-05, |
|
"loss": 6.4821, |
|
"step": 6890 |
|
}, |
|
{ |
|
"epoch": 0.49962588399990343, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 2.6245449988708066e-05, |
|
"loss": 6.4534, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 0.5003499794839613, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 2.618688574022971e-05, |
|
"loss": 6.4562, |
|
"step": 6910 |
|
}, |
|
{ |
|
"epoch": 0.5010740749680191, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 2.6128314963106832e-05, |
|
"loss": 6.4691, |
|
"step": 6920 |
|
}, |
|
{ |
|
"epoch": 0.501798170452077, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 2.6069737979516845e-05, |
|
"loss": 6.4536, |
|
"step": 6930 |
|
}, |
|
{ |
|
"epoch": 0.5025222659361348, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 2.601115511167131e-05, |
|
"loss": 6.4474, |
|
"step": 6940 |
|
}, |
|
{ |
|
"epoch": 0.5032463614201926, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 2.5952566681814156e-05, |
|
"loss": 6.4382, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 0.5039704569042505, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 2.589397301221989e-05, |
|
"loss": 6.465, |
|
"step": 6960 |
|
}, |
|
{ |
|
"epoch": 0.5046945523883083, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 2.5835374425191866e-05, |
|
"loss": 6.4716, |
|
"step": 6970 |
|
}, |
|
{ |
|
"epoch": 0.5054186478723661, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 2.5776771243060472e-05, |
|
"loss": 6.4443, |
|
"step": 6980 |
|
}, |
|
{ |
|
"epoch": 0.506142743356424, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 2.5718163788181375e-05, |
|
"loss": 6.4442, |
|
"step": 6990 |
|
}, |
|
{ |
|
"epoch": 0.5068668388404818, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 2.5659552382933732e-05, |
|
"loss": 6.4818, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.5068668388404818, |
|
"eval_loss": 6.613395690917969, |
|
"eval_runtime": 1.5015, |
|
"eval_samples_per_second": 7.992, |
|
"eval_steps_per_second": 1.998, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.5075909343245396, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 2.5600937349718462e-05, |
|
"loss": 6.4476, |
|
"step": 7010 |
|
}, |
|
{ |
|
"epoch": 0.5083150298085974, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 2.554231901095641e-05, |
|
"loss": 6.459, |
|
"step": 7020 |
|
}, |
|
{ |
|
"epoch": 0.5090391252926553, |
|
"grad_norm": 1.25, |
|
"learning_rate": 2.5483697689086615e-05, |
|
"loss": 6.4414, |
|
"step": 7030 |
|
}, |
|
{ |
|
"epoch": 0.5097632207767131, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 2.5425073706564528e-05, |
|
"loss": 6.4691, |
|
"step": 7040 |
|
}, |
|
{ |
|
"epoch": 0.5104873162607709, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 2.5366447385860225e-05, |
|
"loss": 6.4414, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 0.5112114117448288, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 2.5307819049456655e-05, |
|
"loss": 6.478, |
|
"step": 7060 |
|
}, |
|
{ |
|
"epoch": 0.5119355072288866, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 2.524918901984784e-05, |
|
"loss": 6.4605, |
|
"step": 7070 |
|
}, |
|
{ |
|
"epoch": 0.5126596027129444, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 2.519055761953714e-05, |
|
"loss": 6.4395, |
|
"step": 7080 |
|
}, |
|
{ |
|
"epoch": 0.5133836981970022, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 2.5131925171035438e-05, |
|
"loss": 6.4684, |
|
"step": 7090 |
|
}, |
|
{ |
|
"epoch": 0.5141077936810601, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 2.5073291996859372e-05, |
|
"loss": 6.4657, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 0.5148318891651179, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 2.501465841952959e-05, |
|
"loss": 6.4963, |
|
"step": 7110 |
|
}, |
|
{ |
|
"epoch": 0.5155559846491757, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 2.4956024761568965e-05, |
|
"loss": 6.4635, |
|
"step": 7120 |
|
}, |
|
{ |
|
"epoch": 0.5162800801332336, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 2.4897391345500777e-05, |
|
"loss": 6.4686, |
|
"step": 7130 |
|
}, |
|
{ |
|
"epoch": 0.5170041756172914, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 2.483875849384702e-05, |
|
"loss": 6.4515, |
|
"step": 7140 |
|
}, |
|
{ |
|
"epoch": 0.5177282711013492, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 2.4780126529126555e-05, |
|
"loss": 6.4655, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 0.518452366585407, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 2.4721495773853366e-05, |
|
"loss": 6.457, |
|
"step": 7160 |
|
}, |
|
{ |
|
"epoch": 0.5191764620694649, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 2.4662866550534798e-05, |
|
"loss": 6.4679, |
|
"step": 7170 |
|
}, |
|
{ |
|
"epoch": 0.5199005575535227, |
|
"grad_norm": 1.25, |
|
"learning_rate": 2.460423918166975e-05, |
|
"loss": 6.4634, |
|
"step": 7180 |
|
}, |
|
{ |
|
"epoch": 0.5206246530375805, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 2.4545613989746953e-05, |
|
"loss": 6.4335, |
|
"step": 7190 |
|
}, |
|
{ |
|
"epoch": 0.5213487485216384, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 2.4486991297243123e-05, |
|
"loss": 6.4535, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 0.5220728440056962, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 2.4428371426621248e-05, |
|
"loss": 6.4829, |
|
"step": 7210 |
|
}, |
|
{ |
|
"epoch": 0.522796939489754, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 2.4369754700328805e-05, |
|
"loss": 6.4657, |
|
"step": 7220 |
|
}, |
|
{ |
|
"epoch": 0.5235210349738119, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 2.4311141440795953e-05, |
|
"loss": 6.4412, |
|
"step": 7230 |
|
}, |
|
{ |
|
"epoch": 0.5242451304578697, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 2.4252531970433786e-05, |
|
"loss": 6.4693, |
|
"step": 7240 |
|
}, |
|
{ |
|
"epoch": 0.5249692259419275, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 2.419392661163257e-05, |
|
"loss": 6.46, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 0.5256933214259853, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 2.4135325686759942e-05, |
|
"loss": 6.4613, |
|
"step": 7260 |
|
}, |
|
{ |
|
"epoch": 0.5264174169100432, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 2.4076729518159146e-05, |
|
"loss": 6.463, |
|
"step": 7270 |
|
}, |
|
{ |
|
"epoch": 0.527141512394101, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 2.401813842814728e-05, |
|
"loss": 6.4323, |
|
"step": 7280 |
|
}, |
|
{ |
|
"epoch": 0.5278656078781588, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 2.395955273901349e-05, |
|
"loss": 6.4312, |
|
"step": 7290 |
|
}, |
|
{ |
|
"epoch": 0.5285897033622167, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 2.390097277301722e-05, |
|
"loss": 6.4483, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 0.5293137988462745, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 2.3842398852386434e-05, |
|
"loss": 6.468, |
|
"step": 7310 |
|
}, |
|
{ |
|
"epoch": 0.5300378943303323, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 2.3783831299315845e-05, |
|
"loss": 6.4574, |
|
"step": 7320 |
|
}, |
|
{ |
|
"epoch": 0.5307619898143902, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 2.3725270435965123e-05, |
|
"loss": 6.4518, |
|
"step": 7330 |
|
}, |
|
{ |
|
"epoch": 0.531486085298448, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 2.3666716584457174e-05, |
|
"loss": 6.4598, |
|
"step": 7340 |
|
}, |
|
{ |
|
"epoch": 0.5322101807825058, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 2.36081700668763e-05, |
|
"loss": 6.4511, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 0.5329342762665636, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 2.3549631205266482e-05, |
|
"loss": 6.477, |
|
"step": 7360 |
|
}, |
|
{ |
|
"epoch": 0.5336583717506215, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 2.3491100321629573e-05, |
|
"loss": 6.4593, |
|
"step": 7370 |
|
}, |
|
{ |
|
"epoch": 0.5343824672346793, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 2.3432577737923564e-05, |
|
"loss": 6.4507, |
|
"step": 7380 |
|
}, |
|
{ |
|
"epoch": 0.5351065627187371, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 2.337406377606077e-05, |
|
"loss": 6.4771, |
|
"step": 7390 |
|
}, |
|
{ |
|
"epoch": 0.535830658202795, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 2.3315558757906087e-05, |
|
"loss": 6.4647, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 0.5365547536868528, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 2.325706300527521e-05, |
|
"loss": 6.4545, |
|
"step": 7410 |
|
}, |
|
{ |
|
"epoch": 0.5372788491709106, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 2.3198576839932893e-05, |
|
"loss": 6.4633, |
|
"step": 7420 |
|
}, |
|
{ |
|
"epoch": 0.5380029446549685, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 2.3140100583591124e-05, |
|
"loss": 6.4553, |
|
"step": 7430 |
|
}, |
|
{ |
|
"epoch": 0.5387270401390263, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 2.3081634557907388e-05, |
|
"loss": 6.4596, |
|
"step": 7440 |
|
}, |
|
{ |
|
"epoch": 0.5394511356230841, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 2.3023179084482916e-05, |
|
"loss": 6.4385, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 0.5401752311071419, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 2.296473448486087e-05, |
|
"loss": 6.4597, |
|
"step": 7460 |
|
}, |
|
{ |
|
"epoch": 0.5408993265911999, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 2.2906301080524614e-05, |
|
"loss": 6.4686, |
|
"step": 7470 |
|
}, |
|
{ |
|
"epoch": 0.5416234220752577, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 2.2847879192895922e-05, |
|
"loss": 6.4299, |
|
"step": 7480 |
|
}, |
|
{ |
|
"epoch": 0.5423475175593155, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 2.278946914333323e-05, |
|
"loss": 6.4546, |
|
"step": 7490 |
|
}, |
|
{ |
|
"epoch": 0.5430716130433734, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 2.2731071253129844e-05, |
|
"loss": 6.4454, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.5437957085274312, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 2.2672685843512187e-05, |
|
"loss": 6.471, |
|
"step": 7510 |
|
}, |
|
{ |
|
"epoch": 0.544519804011489, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 2.2614313235638047e-05, |
|
"loss": 6.4412, |
|
"step": 7520 |
|
}, |
|
{ |
|
"epoch": 0.5452438994955469, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 2.2555953750594777e-05, |
|
"loss": 6.4591, |
|
"step": 7530 |
|
}, |
|
{ |
|
"epoch": 0.5459679949796047, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 2.2497607709397543e-05, |
|
"loss": 6.4524, |
|
"step": 7540 |
|
}, |
|
{ |
|
"epoch": 0.5466920904636625, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 2.2439275432987585e-05, |
|
"loss": 6.4796, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 0.5474161859477203, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 2.23809572422304e-05, |
|
"loss": 6.4641, |
|
"step": 7560 |
|
}, |
|
{ |
|
"epoch": 0.5481402814317782, |
|
"grad_norm": 1.125, |
|
"learning_rate": 2.2322653457914014e-05, |
|
"loss": 6.4564, |
|
"step": 7570 |
|
}, |
|
{ |
|
"epoch": 0.548864376915836, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 2.2264364400747226e-05, |
|
"loss": 6.4588, |
|
"step": 7580 |
|
}, |
|
{ |
|
"epoch": 0.5495884723998938, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 2.2206090391357797e-05, |
|
"loss": 6.4472, |
|
"step": 7590 |
|
}, |
|
{ |
|
"epoch": 0.5503125678839517, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 2.2147831750290732e-05, |
|
"loss": 6.4609, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 0.5510366633680095, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 2.20895887980065e-05, |
|
"loss": 6.4575, |
|
"step": 7610 |
|
}, |
|
{ |
|
"epoch": 0.5517607588520673, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 2.203136185487927e-05, |
|
"loss": 6.4575, |
|
"step": 7620 |
|
}, |
|
{ |
|
"epoch": 0.5524848543361252, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 2.1973151241195143e-05, |
|
"loss": 6.4774, |
|
"step": 7630 |
|
}, |
|
{ |
|
"epoch": 0.553208949820183, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 2.191495727715041e-05, |
|
"loss": 6.4593, |
|
"step": 7640 |
|
}, |
|
{ |
|
"epoch": 0.5539330453042408, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 2.1856780282849765e-05, |
|
"loss": 6.4552, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 0.5546571407882986, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 2.1798620578304575e-05, |
|
"loss": 6.4388, |
|
"step": 7660 |
|
}, |
|
{ |
|
"epoch": 0.5553812362723565, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 2.1740478483431088e-05, |
|
"loss": 6.4495, |
|
"step": 7670 |
|
}, |
|
{ |
|
"epoch": 0.5561053317564143, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 2.1682354318048676e-05, |
|
"loss": 6.4629, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 0.5568294272404721, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 2.1624248401878123e-05, |
|
"loss": 6.4558, |
|
"step": 7690 |
|
}, |
|
{ |
|
"epoch": 0.55755352272453, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 2.1566161054539798e-05, |
|
"loss": 6.4757, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 0.5582776182085878, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 2.1508092595551938e-05, |
|
"loss": 6.4555, |
|
"step": 7710 |
|
}, |
|
{ |
|
"epoch": 0.5590017136926456, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 2.1450043344328892e-05, |
|
"loss": 6.4687, |
|
"step": 7720 |
|
}, |
|
{ |
|
"epoch": 0.5597258091767034, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 2.1392013620179337e-05, |
|
"loss": 6.4549, |
|
"step": 7730 |
|
}, |
|
{ |
|
"epoch": 0.5604499046607613, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 2.133400374230455e-05, |
|
"loss": 6.4584, |
|
"step": 7740 |
|
}, |
|
{ |
|
"epoch": 0.5611740001448191, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 2.127601402979665e-05, |
|
"loss": 6.4676, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 0.5618980956288769, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 2.1218044801636805e-05, |
|
"loss": 6.4536, |
|
"step": 7760 |
|
}, |
|
{ |
|
"epoch": 0.5626221911129348, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 2.116009637669353e-05, |
|
"loss": 6.4334, |
|
"step": 7770 |
|
}, |
|
{ |
|
"epoch": 0.5633462865969926, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 2.1102169073720894e-05, |
|
"loss": 6.4507, |
|
"step": 7780 |
|
}, |
|
{ |
|
"epoch": 0.5640703820810504, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 2.1044263211356797e-05, |
|
"loss": 6.436, |
|
"step": 7790 |
|
}, |
|
{ |
|
"epoch": 0.5647944775651083, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 2.098637910812119e-05, |
|
"loss": 6.4561, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 0.5655185730491661, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 2.0928517082414335e-05, |
|
"loss": 6.454, |
|
"step": 7810 |
|
}, |
|
{ |
|
"epoch": 0.5662426685332239, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 2.0870677452515057e-05, |
|
"loss": 6.4499, |
|
"step": 7820 |
|
}, |
|
{ |
|
"epoch": 0.5669667640172817, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 2.081286053657899e-05, |
|
"loss": 6.4552, |
|
"step": 7830 |
|
}, |
|
{ |
|
"epoch": 0.5676908595013396, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 2.0755066652636817e-05, |
|
"loss": 6.4605, |
|
"step": 7840 |
|
}, |
|
{ |
|
"epoch": 0.5684149549853974, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 2.0697296118592553e-05, |
|
"loss": 6.4492, |
|
"step": 7850 |
|
}, |
|
{ |
|
"epoch": 0.5691390504694552, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 2.0639549252221745e-05, |
|
"loss": 6.4476, |
|
"step": 7860 |
|
}, |
|
{ |
|
"epoch": 0.5698631459535131, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 2.058182637116976e-05, |
|
"loss": 6.4568, |
|
"step": 7870 |
|
}, |
|
{ |
|
"epoch": 0.5705872414375709, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 2.0524127792950056e-05, |
|
"loss": 6.4512, |
|
"step": 7880 |
|
}, |
|
{ |
|
"epoch": 0.5713113369216287, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 2.046645383494237e-05, |
|
"loss": 6.4383, |
|
"step": 7890 |
|
}, |
|
{ |
|
"epoch": 0.5720354324056866, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 2.0408804814391043e-05, |
|
"loss": 6.4694, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 0.5727595278897444, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 2.0351181048403227e-05, |
|
"loss": 6.4535, |
|
"step": 7910 |
|
}, |
|
{ |
|
"epoch": 0.5734836233738022, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 2.0293582853947164e-05, |
|
"loss": 6.4736, |
|
"step": 7920 |
|
}, |
|
{ |
|
"epoch": 0.57420771885786, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 2.0236010547850432e-05, |
|
"loss": 6.4463, |
|
"step": 7930 |
|
}, |
|
{ |
|
"epoch": 0.5749318143419179, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 2.0178464446798206e-05, |
|
"loss": 6.462, |
|
"step": 7940 |
|
}, |
|
{ |
|
"epoch": 0.5756559098259757, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 2.0120944867331522e-05, |
|
"loss": 6.4542, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 0.5763800053100335, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 2.0063452125845522e-05, |
|
"loss": 6.4458, |
|
"step": 7960 |
|
}, |
|
{ |
|
"epoch": 0.5771041007940914, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 2.0005986538587735e-05, |
|
"loss": 6.4731, |
|
"step": 7970 |
|
}, |
|
{ |
|
"epoch": 0.5778281962781492, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 1.9948548421656303e-05, |
|
"loss": 6.4686, |
|
"step": 7980 |
|
}, |
|
{ |
|
"epoch": 0.578552291762207, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 1.9891138090998285e-05, |
|
"loss": 6.4521, |
|
"step": 7990 |
|
}, |
|
{ |
|
"epoch": 0.5792763872462648, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 1.983375586240789e-05, |
|
"loss": 6.4232, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.5792763872462648, |
|
"eval_loss": 6.610226154327393, |
|
"eval_runtime": 1.4712, |
|
"eval_samples_per_second": 8.157, |
|
"eval_steps_per_second": 2.039, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.5800004827303227, |
|
"grad_norm": 1.25, |
|
"learning_rate": 1.9776402051524724e-05, |
|
"loss": 6.4368, |
|
"step": 8010 |
|
}, |
|
{ |
|
"epoch": 0.5807245782143805, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 1.971907697383213e-05, |
|
"loss": 6.4723, |
|
"step": 8020 |
|
}, |
|
{ |
|
"epoch": 0.5814486736984383, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 1.9661780944655345e-05, |
|
"loss": 6.4643, |
|
"step": 8030 |
|
}, |
|
{ |
|
"epoch": 0.5821727691824962, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 1.9604514279159843e-05, |
|
"loss": 6.4756, |
|
"step": 8040 |
|
}, |
|
{ |
|
"epoch": 0.582896864666554, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 1.95472772923496e-05, |
|
"loss": 6.4411, |
|
"step": 8050 |
|
}, |
|
{ |
|
"epoch": 0.5836209601506118, |
|
"grad_norm": 1.25, |
|
"learning_rate": 1.9490070299065293e-05, |
|
"loss": 6.4344, |
|
"step": 8060 |
|
}, |
|
{ |
|
"epoch": 0.5843450556346697, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 1.943289361398266e-05, |
|
"loss": 6.4451, |
|
"step": 8070 |
|
}, |
|
{ |
|
"epoch": 0.5850691511187275, |
|
"grad_norm": 1.25, |
|
"learning_rate": 1.9375747551610688e-05, |
|
"loss": 6.4609, |
|
"step": 8080 |
|
}, |
|
{ |
|
"epoch": 0.5857932466027853, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 1.931863242628995e-05, |
|
"loss": 6.4627, |
|
"step": 8090 |
|
}, |
|
{ |
|
"epoch": 0.5865173420868431, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 1.9261548552190827e-05, |
|
"loss": 6.4676, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 0.587241437570901, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 1.920449624331179e-05, |
|
"loss": 6.457, |
|
"step": 8110 |
|
}, |
|
{ |
|
"epoch": 0.5879655330549588, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 1.9147475813477718e-05, |
|
"loss": 6.4659, |
|
"step": 8120 |
|
}, |
|
{ |
|
"epoch": 0.5886896285390166, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 1.909048757633809e-05, |
|
"loss": 6.4445, |
|
"step": 8130 |
|
}, |
|
{ |
|
"epoch": 0.5894137240230745, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 1.9033531845365334e-05, |
|
"loss": 6.4637, |
|
"step": 8140 |
|
}, |
|
{ |
|
"epoch": 0.5901378195071323, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 1.897660893385305e-05, |
|
"loss": 6.4351, |
|
"step": 8150 |
|
}, |
|
{ |
|
"epoch": 0.5908619149911901, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 1.891971915491434e-05, |
|
"loss": 6.4607, |
|
"step": 8160 |
|
}, |
|
{ |
|
"epoch": 0.591586010475248, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 1.8862862821480025e-05, |
|
"loss": 6.4635, |
|
"step": 8170 |
|
}, |
|
{ |
|
"epoch": 0.5923101059593059, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 1.8806040246296966e-05, |
|
"loss": 6.4431, |
|
"step": 8180 |
|
}, |
|
{ |
|
"epoch": 0.5930342014433637, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 1.8749251741926326e-05, |
|
"loss": 6.4586, |
|
"step": 8190 |
|
}, |
|
{ |
|
"epoch": 0.5937582969274215, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 1.8692497620741884e-05, |
|
"loss": 6.4613, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 0.5944823924114794, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 1.8635778194928246e-05, |
|
"loss": 6.4493, |
|
"step": 8210 |
|
}, |
|
{ |
|
"epoch": 0.5952064878955372, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 1.85790937764792e-05, |
|
"loss": 6.4421, |
|
"step": 8220 |
|
}, |
|
{ |
|
"epoch": 0.595930583379595, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 1.8522444677195965e-05, |
|
"loss": 6.4376, |
|
"step": 8230 |
|
}, |
|
{ |
|
"epoch": 0.5966546788636529, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 1.8465831208685476e-05, |
|
"loss": 6.4449, |
|
"step": 8240 |
|
}, |
|
{ |
|
"epoch": 0.5973787743477107, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 1.8409253682358678e-05, |
|
"loss": 6.4748, |
|
"step": 8250 |
|
}, |
|
{ |
|
"epoch": 0.5981028698317685, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 1.835271240942882e-05, |
|
"loss": 6.4677, |
|
"step": 8260 |
|
}, |
|
{ |
|
"epoch": 0.5988269653158264, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 1.8296207700909717e-05, |
|
"loss": 6.4513, |
|
"step": 8270 |
|
}, |
|
{ |
|
"epoch": 0.5995510607998842, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 1.823973986761406e-05, |
|
"loss": 6.4442, |
|
"step": 8280 |
|
}, |
|
{ |
|
"epoch": 0.600275156283942, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 1.8183309220151717e-05, |
|
"loss": 6.4554, |
|
"step": 8290 |
|
}, |
|
{ |
|
"epoch": 0.6009992517679998, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 1.8126916068928e-05, |
|
"loss": 6.4365, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 0.6017233472520577, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 1.807056072414195e-05, |
|
"loss": 6.441, |
|
"step": 8310 |
|
}, |
|
{ |
|
"epoch": 0.6024474427361155, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 1.801424349578469e-05, |
|
"loss": 6.4688, |
|
"step": 8320 |
|
}, |
|
{ |
|
"epoch": 0.6031715382201733, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 1.795796469363763e-05, |
|
"loss": 6.4426, |
|
"step": 8330 |
|
}, |
|
{ |
|
"epoch": 0.6038956337042312, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 1.7901724627270833e-05, |
|
"loss": 6.4416, |
|
"step": 8340 |
|
}, |
|
{ |
|
"epoch": 0.604619729188289, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 1.784552360604131e-05, |
|
"loss": 6.4545, |
|
"step": 8350 |
|
}, |
|
{ |
|
"epoch": 0.6053438246723468, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 1.778936193909126e-05, |
|
"loss": 6.422, |
|
"step": 8360 |
|
}, |
|
{ |
|
"epoch": 0.6060679201564046, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 1.773323993534644e-05, |
|
"loss": 6.4194, |
|
"step": 8370 |
|
}, |
|
{ |
|
"epoch": 0.6067920156404625, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 1.7677157903514412e-05, |
|
"loss": 6.4644, |
|
"step": 8380 |
|
}, |
|
{ |
|
"epoch": 0.6075161111245203, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 1.762111615208287e-05, |
|
"loss": 6.4715, |
|
"step": 8390 |
|
}, |
|
{ |
|
"epoch": 0.6082402066085781, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 1.756511498931795e-05, |
|
"loss": 6.4393, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 0.608964302092636, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 1.750915472326252e-05, |
|
"loss": 6.4506, |
|
"step": 8410 |
|
}, |
|
{ |
|
"epoch": 0.6096883975766938, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 1.7453235661734478e-05, |
|
"loss": 6.4369, |
|
"step": 8420 |
|
}, |
|
{ |
|
"epoch": 0.6104124930607516, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 1.7397358112325085e-05, |
|
"loss": 6.4276, |
|
"step": 8430 |
|
}, |
|
{ |
|
"epoch": 0.6111365885448095, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 1.7341522382397256e-05, |
|
"loss": 6.4483, |
|
"step": 8440 |
|
}, |
|
{ |
|
"epoch": 0.6118606840288673, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 1.7285728779083862e-05, |
|
"loss": 6.4641, |
|
"step": 8450 |
|
}, |
|
{ |
|
"epoch": 0.6125847795129251, |
|
"grad_norm": 1.125, |
|
"learning_rate": 1.7229977609286062e-05, |
|
"loss": 6.4545, |
|
"step": 8460 |
|
}, |
|
{ |
|
"epoch": 0.613308874996983, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 1.7174269179671593e-05, |
|
"loss": 6.454, |
|
"step": 8470 |
|
}, |
|
{ |
|
"epoch": 0.6140329704810408, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 1.7118603796673102e-05, |
|
"loss": 6.4447, |
|
"step": 8480 |
|
}, |
|
{ |
|
"epoch": 0.6147570659650986, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 1.7062981766486437e-05, |
|
"loss": 6.4641, |
|
"step": 8490 |
|
}, |
|
{ |
|
"epoch": 0.6154811614491564, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 1.7007403395069005e-05, |
|
"loss": 6.4613, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.6162052569332143, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 1.695186898813803e-05, |
|
"loss": 6.4616, |
|
"step": 8510 |
|
}, |
|
{ |
|
"epoch": 0.6169293524172721, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 1.6896378851168914e-05, |
|
"loss": 6.4392, |
|
"step": 8520 |
|
}, |
|
{ |
|
"epoch": 0.6176534479013299, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 1.6840933289393564e-05, |
|
"loss": 6.4388, |
|
"step": 8530 |
|
}, |
|
{ |
|
"epoch": 0.6183775433853878, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 1.6785532607798664e-05, |
|
"loss": 6.4475, |
|
"step": 8540 |
|
}, |
|
{ |
|
"epoch": 0.6191016388694456, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 1.673017711112405e-05, |
|
"loss": 6.4399, |
|
"step": 8550 |
|
}, |
|
{ |
|
"epoch": 0.6198257343535034, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 1.6674867103861e-05, |
|
"loss": 6.4491, |
|
"step": 8560 |
|
}, |
|
{ |
|
"epoch": 0.6205498298375612, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 1.6619602890250565e-05, |
|
"loss": 6.4558, |
|
"step": 8570 |
|
}, |
|
{ |
|
"epoch": 0.6212739253216191, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 1.656438477428193e-05, |
|
"loss": 6.4644, |
|
"step": 8580 |
|
}, |
|
{ |
|
"epoch": 0.6219980208056769, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 1.6509213059690683e-05, |
|
"loss": 6.4744, |
|
"step": 8590 |
|
}, |
|
{ |
|
"epoch": 0.6227221162897347, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 1.645408804995719e-05, |
|
"loss": 6.4486, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 0.6234462117737926, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 1.6399010048304897e-05, |
|
"loss": 6.4742, |
|
"step": 8610 |
|
}, |
|
{ |
|
"epoch": 0.6241703072578504, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 1.6343979357698687e-05, |
|
"loss": 6.4486, |
|
"step": 8620 |
|
}, |
|
{ |
|
"epoch": 0.6248944027419082, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 1.6288996280843188e-05, |
|
"loss": 6.4436, |
|
"step": 8630 |
|
}, |
|
{ |
|
"epoch": 0.625618498225966, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 1.6234061120181142e-05, |
|
"loss": 6.4286, |
|
"step": 8640 |
|
}, |
|
{ |
|
"epoch": 0.6263425937100239, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 1.6179174177891703e-05, |
|
"loss": 6.4383, |
|
"step": 8650 |
|
}, |
|
{ |
|
"epoch": 0.6270666891940817, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 1.6124335755888797e-05, |
|
"loss": 6.4773, |
|
"step": 8660 |
|
}, |
|
{ |
|
"epoch": 0.6277907846781395, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 1.6069546155819464e-05, |
|
"loss": 6.415, |
|
"step": 8670 |
|
}, |
|
{ |
|
"epoch": 0.6285148801621974, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 1.6014805679062185e-05, |
|
"loss": 6.4114, |
|
"step": 8680 |
|
}, |
|
{ |
|
"epoch": 0.6292389756462552, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 1.596011462672522e-05, |
|
"loss": 6.4498, |
|
"step": 8690 |
|
}, |
|
{ |
|
"epoch": 0.629963071130313, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 1.590547329964499e-05, |
|
"loss": 6.4754, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 0.6306871666143709, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 1.585088199838437e-05, |
|
"loss": 6.4334, |
|
"step": 8710 |
|
}, |
|
{ |
|
"epoch": 0.6314112620984287, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 1.5796341023231066e-05, |
|
"loss": 6.4284, |
|
"step": 8720 |
|
}, |
|
{ |
|
"epoch": 0.6321353575824865, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 1.5741850674195975e-05, |
|
"loss": 6.4493, |
|
"step": 8730 |
|
}, |
|
{ |
|
"epoch": 0.6328594530665443, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 1.5687411251011487e-05, |
|
"loss": 6.4531, |
|
"step": 8740 |
|
}, |
|
{ |
|
"epoch": 0.6335835485506022, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 1.563302305312989e-05, |
|
"loss": 6.4532, |
|
"step": 8750 |
|
}, |
|
{ |
|
"epoch": 0.63430764403466, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 1.5578686379721698e-05, |
|
"loss": 6.4465, |
|
"step": 8760 |
|
}, |
|
{ |
|
"epoch": 0.6350317395187178, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 1.5524401529673994e-05, |
|
"loss": 6.4508, |
|
"step": 8770 |
|
}, |
|
{ |
|
"epoch": 0.6357558350027757, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 1.5470168801588805e-05, |
|
"loss": 6.4562, |
|
"step": 8780 |
|
}, |
|
{ |
|
"epoch": 0.6364799304868335, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 1.541598849378145e-05, |
|
"loss": 6.4499, |
|
"step": 8790 |
|
}, |
|
{ |
|
"epoch": 0.6372040259708913, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 1.5361860904278927e-05, |
|
"loss": 6.4544, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 0.6379281214549491, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 1.53077863308182e-05, |
|
"loss": 6.4484, |
|
"step": 8810 |
|
}, |
|
{ |
|
"epoch": 0.638652216939007, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 1.525376507084466e-05, |
|
"loss": 6.4392, |
|
"step": 8820 |
|
}, |
|
{ |
|
"epoch": 0.6393763124230648, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 1.5199797421510415e-05, |
|
"loss": 6.4545, |
|
"step": 8830 |
|
}, |
|
{ |
|
"epoch": 0.6401004079071226, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 1.514588367967268e-05, |
|
"loss": 6.4504, |
|
"step": 8840 |
|
}, |
|
{ |
|
"epoch": 0.6408245033911805, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 1.5092024141892142e-05, |
|
"loss": 6.4662, |
|
"step": 8850 |
|
}, |
|
{ |
|
"epoch": 0.6415485988752383, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 1.5038219104431334e-05, |
|
"loss": 6.4296, |
|
"step": 8860 |
|
}, |
|
{ |
|
"epoch": 0.6422726943592961, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 1.498446886325301e-05, |
|
"loss": 6.4654, |
|
"step": 8870 |
|
}, |
|
{ |
|
"epoch": 0.642996789843354, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 1.49307737140185e-05, |
|
"loss": 6.4722, |
|
"step": 8880 |
|
}, |
|
{ |
|
"epoch": 0.6437208853274119, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 1.4877133952086089e-05, |
|
"loss": 6.4471, |
|
"step": 8890 |
|
}, |
|
{ |
|
"epoch": 0.6444449808114697, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 1.4823549872509396e-05, |
|
"loss": 6.4602, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 0.6451690762955276, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 1.4770021770035767e-05, |
|
"loss": 6.4455, |
|
"step": 8910 |
|
}, |
|
{ |
|
"epoch": 0.6458931717795854, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 1.4716549939104618e-05, |
|
"loss": 6.4532, |
|
"step": 8920 |
|
}, |
|
{ |
|
"epoch": 0.6466172672636432, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 1.4663134673845838e-05, |
|
"loss": 6.4466, |
|
"step": 8930 |
|
}, |
|
{ |
|
"epoch": 0.647341362747701, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 1.4609776268078184e-05, |
|
"loss": 6.439, |
|
"step": 8940 |
|
}, |
|
{ |
|
"epoch": 0.6480654582317589, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 1.455647501530763e-05, |
|
"loss": 6.4401, |
|
"step": 8950 |
|
}, |
|
{ |
|
"epoch": 0.6487895537158167, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 1.4503231208725781e-05, |
|
"loss": 6.4521, |
|
"step": 8960 |
|
}, |
|
{ |
|
"epoch": 0.6495136491998745, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 1.4450045141208234e-05, |
|
"loss": 6.452, |
|
"step": 8970 |
|
}, |
|
{ |
|
"epoch": 0.6502377446839324, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 1.4396917105313018e-05, |
|
"loss": 6.481, |
|
"step": 8980 |
|
}, |
|
{ |
|
"epoch": 0.6509618401679902, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 1.4343847393278925e-05, |
|
"loss": 6.474, |
|
"step": 8990 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 13810, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 999, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.935261929621055e+19, |
|
"train_batch_size": 30, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|